Add Android specific VideoCapturer.
The Java implementation of VideoCapturer is losely based on the the work in webrtc/modules/videocapturer. The capturer is now started asyncronously. The capturer supports easy camera switching. BUG= R=henrika@webrtc.org, magjed@webrtc.org Review URL: https://webrtc-codereview.appspot.com/30849004 Cr-Commit-Position: refs/heads/master@{#8329} git-svn-id: http://webrtc.googlecode.com/svn/trunk@8329 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
		| @@ -0,0 +1,222 @@ | |||||||
|  | /* | ||||||
|  |  * libjingle | ||||||
|  |  * Copyright 2015 Google Inc. | ||||||
|  |  * | ||||||
|  |  * Redistribution and use in source and binary forms, with or without | ||||||
|  |  * modification, are permitted provided that the following conditions are met: | ||||||
|  |  * | ||||||
|  |  *  1. Redistributions of source code must retain the above copyright notice, | ||||||
|  |  *     this list of conditions and the following disclaimer. | ||||||
|  |  *  2. Redistributions in binary form must reproduce the above copyright notice, | ||||||
|  |  *     this list of conditions and the following disclaimer in the documentation | ||||||
|  |  *     and/or other materials provided with the distribution. | ||||||
|  |  *  3. The name of the author may not be used to endorse or promote products | ||||||
|  |  *     derived from this software without specific prior written permission. | ||||||
|  |  * | ||||||
|  |  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED | ||||||
|  |  * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF | ||||||
|  |  * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO | ||||||
|  |  * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  |  * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, | ||||||
|  |  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; | ||||||
|  |  * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, | ||||||
|  |  * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR | ||||||
|  |  * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF | ||||||
|  |  * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  |  */ | ||||||
|  | package org.webrtc; | ||||||
|  |  | ||||||
|  | import android.hardware.Camera; | ||||||
|  | import android.test.ActivityTestCase; | ||||||
|  | import android.test.suitebuilder.annotation.SmallTest; | ||||||
|  |  | ||||||
|  | import org.webrtc.VideoCapturerAndroid.CaptureFormat; | ||||||
|  | import org.webrtc.VideoRenderer.I420Frame; | ||||||
|  |  | ||||||
|  | import java.util.ArrayList; | ||||||
|  |  | ||||||
|  | @SuppressWarnings("deprecation") | ||||||
|  | public class VideoCapturerAndroidTest extends ActivityTestCase { | ||||||
|  |   static class RendererCallbacks implements VideoRenderer.Callbacks { | ||||||
|  |     private int framesRendered = 0; | ||||||
|  |     private Object frameLock = 0; | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public void setSize(int width, int height) { | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public void renderFrame(I420Frame frame) { | ||||||
|  |       synchronized (frameLock) { | ||||||
|  |         ++framesRendered; | ||||||
|  |         frameLock.notify(); | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public int WaitForNextFrameToRender() throws InterruptedException { | ||||||
|  |       synchronized (frameLock) { | ||||||
|  |         frameLock.wait(); | ||||||
|  |         return framesRendered; | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   static class FakeCapturerObserver implements | ||||||
|  |       VideoCapturerAndroid.CapturerObserver { | ||||||
|  |     private int framesCaptured = 0; | ||||||
|  |     private int frameSize = 0; | ||||||
|  |     private Object frameLock = 0; | ||||||
|  |     private Object capturerStartLock = 0; | ||||||
|  |     private boolean captureStartResult = false; | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public void OnCapturerStarted(boolean success) { | ||||||
|  |       synchronized (capturerStartLock) { | ||||||
|  |         captureStartResult = success; | ||||||
|  |         capturerStartLock.notify(); | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public void OnFrameCaptured(byte[] data, int rotation, long timeStamp) { | ||||||
|  |       synchronized (frameLock) { | ||||||
|  |         ++framesCaptured; | ||||||
|  |         frameSize = data.length; | ||||||
|  |         frameLock.notify(); | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public boolean WaitForCapturerToStart() throws InterruptedException { | ||||||
|  |       synchronized (capturerStartLock) { | ||||||
|  |         capturerStartLock.wait(); | ||||||
|  |         return captureStartResult; | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public int WaitForNextCapturedFrame() throws InterruptedException { | ||||||
|  |       synchronized (frameLock) { | ||||||
|  |         frameLock.wait(); | ||||||
|  |         return framesCaptured; | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     int frameSize() { | ||||||
|  |       synchronized (frameLock) { | ||||||
|  |         return frameSize; | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   // Return true if the device under test have at least two cameras. | ||||||
|  |   @SuppressWarnings("deprecation") | ||||||
|  |   boolean HaveTwoCameras() { | ||||||
|  |     return (Camera.getNumberOfCameras() >= 2); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   void starCapturerAndRender(String deviceName) throws InterruptedException { | ||||||
|  |     PeerConnectionFactory factory = new PeerConnectionFactory(); | ||||||
|  |     VideoCapturerAndroid capturer = VideoCapturerAndroid.create(deviceName); | ||||||
|  |     VideoSource source = | ||||||
|  |         factory.createVideoSource(capturer, new MediaConstraints()); | ||||||
|  |     VideoTrack track = factory.createVideoTrack("dummy", source); | ||||||
|  |     RendererCallbacks callbacks = new RendererCallbacks(); | ||||||
|  |     track.addRenderer(new VideoRenderer(callbacks)); | ||||||
|  |     assertTrue(callbacks.WaitForNextFrameToRender() > 0); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   @Override | ||||||
|  |   protected void setUp() { | ||||||
|  |     assertTrue(PeerConnectionFactory.initializeAndroidGlobals( | ||||||
|  |         getInstrumentation().getContext(), true, | ||||||
|  |         true, true, null)); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   @SmallTest | ||||||
|  |   public void testCreateAndRelease() throws Exception { | ||||||
|  |     VideoCapturerAndroid capturer = VideoCapturerAndroid.create(""); | ||||||
|  |     assertNotNull(capturer); | ||||||
|  |     capturer.dispose(); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   @SmallTest | ||||||
|  |   public void testCreateNoneExistingCamera() throws Exception { | ||||||
|  |     VideoCapturerAndroid capturer = VideoCapturerAndroid.create( | ||||||
|  |         "none existing camera"); | ||||||
|  |     assertNull(capturer); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   @SmallTest | ||||||
|  |   // This test that the camera can be started and that the frames are forwarded | ||||||
|  |   // to a Java video renderer using a "default" capturer. | ||||||
|  |   // It tests both the Java and the C++ layer. | ||||||
|  |   public void testStartVideoCapturer() throws Exception { | ||||||
|  |     starCapturerAndRender(""); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   @SmallTest | ||||||
|  |   // This test that the camera can be started and that the frames are forwarded | ||||||
|  |   // to a Java video renderer using the front facing video capturer. | ||||||
|  |   // It tests both the Java and the C++ layer. | ||||||
|  |   public void testStartFrontFacingVideoCapturer() throws Exception { | ||||||
|  |     starCapturerAndRender(VideoCapturerAndroid.getNameOfFrontFacingDevice()); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   @SmallTest | ||||||
|  |   // This test that the camera can be started and that the frames are forwarded | ||||||
|  |   // to a Java video renderer using the back facing video capturer. | ||||||
|  |   // It tests both the Java and the C++ layer. | ||||||
|  |   public void testStartBackFacingVideoCapturer() throws Exception { | ||||||
|  |     if (!HaveTwoCameras()) { | ||||||
|  |       return; | ||||||
|  |     } | ||||||
|  |     starCapturerAndRender(VideoCapturerAndroid.getNameOfBackFacingDevice()); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   @SmallTest | ||||||
|  |   // This test that the default camera can be started and but the camera can | ||||||
|  |   // later be switched to another camera. | ||||||
|  |   // It tests both the Java and the C++ layer. | ||||||
|  |   public void testSwitchVideoCapturer() throws Exception { | ||||||
|  |     PeerConnectionFactory factory = new PeerConnectionFactory(); | ||||||
|  |     VideoCapturerAndroid capturer = VideoCapturerAndroid.create(""); | ||||||
|  |     VideoSource source = | ||||||
|  |         factory.createVideoSource(capturer, new MediaConstraints()); | ||||||
|  |     VideoTrack track = factory.createVideoTrack("dummy", source); | ||||||
|  |  | ||||||
|  |     if (HaveTwoCameras()) | ||||||
|  |       assertTrue(capturer.switchCamera()); | ||||||
|  |     else | ||||||
|  |       assertFalse(capturer.switchCamera()); | ||||||
|  |  | ||||||
|  |     // Wait until the camera have been switched. | ||||||
|  |     capturer.runCameraThreadUntilIdle(); | ||||||
|  |  | ||||||
|  |     // Ensure that frames are received. | ||||||
|  |     RendererCallbacks callbacks = new RendererCallbacks(); | ||||||
|  |     track.addRenderer(new VideoRenderer(callbacks)); | ||||||
|  |     assertTrue(callbacks.WaitForNextFrameToRender() > 0); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   @SmallTest | ||||||
|  |   // This test that the camera can be started at different resolutions. | ||||||
|  |   // It does not test or use the C++ layer. | ||||||
|  |   public void testStartStopWithDifferentResolutions() throws Exception { | ||||||
|  |     FakeCapturerObserver observer = new FakeCapturerObserver(); | ||||||
|  |  | ||||||
|  |     String deviceName = VideoCapturerAndroid.getDeviceName(0); | ||||||
|  |     ArrayList<CaptureFormat> formats = | ||||||
|  |         VideoCapturerAndroid.getSupportedFormats(0); | ||||||
|  |     VideoCapturerAndroid capturer = VideoCapturerAndroid.create(deviceName); | ||||||
|  |  | ||||||
|  |     for(int i = 0; i < 3 ; ++i) { | ||||||
|  |       VideoCapturerAndroid.CaptureFormat format = formats.get(i); | ||||||
|  |       capturer.startCapture(format.width, format.height, format.maxFramerate, | ||||||
|  |           getInstrumentation().getContext(), observer); | ||||||
|  |       assertTrue(observer.WaitForCapturerToStart()); | ||||||
|  |       observer.WaitForNextCapturedFrame(); | ||||||
|  |       // Check the frame size. NV21 is assumed. | ||||||
|  |       assertEquals((format.width*format.height*3)/2, observer.frameSize()); | ||||||
|  |       assertTrue(capturer.stopCapture()); | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  | } | ||||||
							
								
								
									
										206
									
								
								talk/app/webrtc/androidvideocapturer.cc
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										206
									
								
								talk/app/webrtc/androidvideocapturer.cc
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,206 @@ | |||||||
|  | /* | ||||||
|  |  * libjingle | ||||||
|  |  * Copyright 2015 Google Inc. | ||||||
|  |  * | ||||||
|  |  * Redistribution and use in source and binary forms, with or without | ||||||
|  |  * modification, are permitted provided that the following conditions are met: | ||||||
|  |  * | ||||||
|  |  *  1. Redistributions of source code must retain the above copyright notice, | ||||||
|  |  *     this list of conditions and the following disclaimer. | ||||||
|  |  *  2. Redistributions in binary form must reproduce the above copyright notice, | ||||||
|  |  *     this list of conditions and the following disclaimer in the documentation | ||||||
|  |  *     and/or other materials provided with the distribution. | ||||||
|  |  *  3. The name of the author may not be used to endorse or promote products | ||||||
|  |  *     derived from this software without specific prior written permission. | ||||||
|  |  * | ||||||
|  |  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED | ||||||
|  |  * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF | ||||||
|  |  * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO | ||||||
|  |  * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  |  * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, | ||||||
|  |  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; | ||||||
|  |  * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, | ||||||
|  |  * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR | ||||||
|  |  * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF | ||||||
|  |  * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  |  */ | ||||||
|  | #include "talk/app/webrtc/androidvideocapturer.h" | ||||||
|  |  | ||||||
|  | #include "talk/media/webrtc/webrtcvideoframe.h" | ||||||
|  | #include "webrtc/base/bind.h" | ||||||
|  | #include "webrtc/base/common.h" | ||||||
|  | #include "webrtc/base/json.h" | ||||||
|  | #include "webrtc/base/timeutils.h" | ||||||
|  | #include "webrtc/base/thread.h" | ||||||
|  |  | ||||||
|  | namespace webrtc { | ||||||
|  |  | ||||||
|  | using cricket::WebRtcVideoFrame; | ||||||
|  | using rtc::scoped_ptr; | ||||||
|  |  | ||||||
|  | // An implementation of cricket::VideoFrameFactory for frames that are not | ||||||
|  | // guaranteed to outlive the created cricket::VideoFrame. | ||||||
|  | // A frame is injected using UpdateCapturedFrame, and converted into a | ||||||
|  | // cricket::VideoFrame with | ||||||
|  | // CreateAliasedFrame. UpdateCapturedFrame should be called before | ||||||
|  | // CreateAliasedFrame for every frame. | ||||||
|  | class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory { | ||||||
|  |  public: | ||||||
|  |   FrameFactory(int width, int height) : start_time_(rtc::TimeNanos()) { | ||||||
|  |     // Create a CapturedFrame that only contains header information, not the | ||||||
|  |     // actual pixel data. | ||||||
|  |     captured_frame_.width = width; | ||||||
|  |     captured_frame_.height = height; | ||||||
|  |     captured_frame_.pixel_height = 1; | ||||||
|  |     captured_frame_.pixel_width = 1; | ||||||
|  |     captured_frame_.rotation = 0; | ||||||
|  |     captured_frame_.data = NULL; | ||||||
|  |     captured_frame_.data_size = cricket::CapturedFrame::kUnknownDataSize; | ||||||
|  |     captured_frame_.fourcc = static_cast<uint32>(cricket::FOURCC_ANY); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   void UpdateCapturedFrame(signed char* frame_data, | ||||||
|  |                            int length, | ||||||
|  |                            int rotation, | ||||||
|  |                            int64 time_stamp_in_ms) { | ||||||
|  |     captured_frame_.fourcc = static_cast<uint32>(cricket::FOURCC_NV21); | ||||||
|  |     captured_frame_.data = frame_data; | ||||||
|  |     captured_frame_.elapsed_time = rtc::TimeNanos() - start_time_; | ||||||
|  |     captured_frame_.time_stamp = | ||||||
|  |         time_stamp_in_ms * rtc::kNumNanosecsPerMillisec; | ||||||
|  |     captured_frame_.rotation = rotation; | ||||||
|  |     captured_frame_.data_size = length; | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   const cricket::CapturedFrame* GetCapturedFrame() const { | ||||||
|  |     return &captured_frame_; | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   cricket::VideoFrame* CreateAliasedFrame( | ||||||
|  |       const cricket::CapturedFrame* captured_frame, | ||||||
|  |       int dst_width, | ||||||
|  |       int dst_height) const override { | ||||||
|  |     // This override of CreateAliasedFrame creates a copy of the frame since | ||||||
|  |     // |captured_frame_.data| is only guaranteed to be valid during the scope | ||||||
|  |     // of |AndroidVideoCapturer::OnIncomingFrame_w|. | ||||||
|  |     // Check that captured_frame is actually our frame. | ||||||
|  |     DCHECK(captured_frame == &captured_frame_); | ||||||
|  |     scoped_ptr<WebRtcVideoFrame> frame(new WebRtcVideoFrame()); | ||||||
|  |     frame->Init(captured_frame, dst_width, dst_height); | ||||||
|  |     return frame.release(); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |  private: | ||||||
|  |   uint64 start_time_; | ||||||
|  |   cricket::CapturedFrame captured_frame_; | ||||||
|  | }; | ||||||
|  |  | ||||||
|  | AndroidVideoCapturer::AndroidVideoCapturer( | ||||||
|  |     rtc::scoped_ptr<AndroidVideoCapturerDelegate> delegate) | ||||||
|  |     : running_(false), | ||||||
|  |       delegate_(delegate.Pass()), | ||||||
|  |       worker_thread_(NULL), | ||||||
|  |       frame_factory_(NULL) { | ||||||
|  |   std::string json_string = delegate_->GetSupportedFormats(); | ||||||
|  |   LOG(LS_INFO) << json_string; | ||||||
|  |  | ||||||
|  |   Json::Value json_values; | ||||||
|  |   Json::Reader reader(Json::Features::strictMode()); | ||||||
|  |   if (!reader.parse(json_string, json_values)) { | ||||||
|  |     LOG(LS_ERROR) << "Failed to parse formats."; | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   std::vector<cricket::VideoFormat> formats; | ||||||
|  |   for (Json::ArrayIndex i = 0; i < json_values.size(); ++i) { | ||||||
|  |       const Json::Value& json_value = json_values[i]; | ||||||
|  |       DCHECK(!json_value["width"].isNull() && !json_value["height"].isNull() && | ||||||
|  |              !json_value["framerate"].isNull()); | ||||||
|  |       cricket::VideoFormat format( | ||||||
|  |           json_value["width"].asInt(), | ||||||
|  |           json_value["height"].asInt(), | ||||||
|  |           cricket::VideoFormat::FpsToInterval(json_value["framerate"].asInt()), | ||||||
|  |           cricket::FOURCC_NV21); | ||||||
|  |       formats.push_back(format); | ||||||
|  |   } | ||||||
|  |   SetSupportedFormats(formats); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | AndroidVideoCapturer::~AndroidVideoCapturer() { | ||||||
|  |   DCHECK(!running_); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | cricket::CaptureState AndroidVideoCapturer::Start( | ||||||
|  |     const cricket::VideoFormat& capture_format) { | ||||||
|  |   DCHECK(!running_); | ||||||
|  |   DCHECK(worker_thread_ == nullptr); | ||||||
|  |   // TODO(perkj): Better way to get a handle to the worker thread? | ||||||
|  |   worker_thread_ = rtc::Thread::Current(); | ||||||
|  |  | ||||||
|  |   LOG(LS_INFO) << " AndroidVideoCapturer::Start w = " << capture_format.width | ||||||
|  |                << " h = " << capture_format.height; | ||||||
|  |   frame_factory_ = new AndroidVideoCapturer::FrameFactory( | ||||||
|  |       capture_format.width, capture_format.height); | ||||||
|  |   set_frame_factory(frame_factory_); | ||||||
|  |  | ||||||
|  |   running_ = true; | ||||||
|  |   delegate_->Start( | ||||||
|  |       capture_format.width, capture_format.height, | ||||||
|  |       cricket::VideoFormat::IntervalToFps(capture_format.interval), this); | ||||||
|  |   return cricket::CS_STARTING; | ||||||
|  | } | ||||||
|  |  | ||||||
|  | void AndroidVideoCapturer::Stop() { | ||||||
|  |   DCHECK(worker_thread_->IsCurrent()); | ||||||
|  |   LOG(LS_INFO) << " AndroidVideoCapturer::Stop "; | ||||||
|  |   DCHECK(running_); | ||||||
|  |   running_ = false; | ||||||
|  |   SetCaptureFormat(NULL); | ||||||
|  |  | ||||||
|  |   delegate_->Stop(); | ||||||
|  |   SignalStateChange(this, cricket::CS_STOPPED); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | bool AndroidVideoCapturer::IsRunning() { | ||||||
|  |   return running_; | ||||||
|  | } | ||||||
|  |  | ||||||
|  | bool AndroidVideoCapturer::GetPreferredFourccs(std::vector<uint32>* fourccs) { | ||||||
|  |   fourccs->push_back(cricket::FOURCC_NV21); | ||||||
|  |   return true; | ||||||
|  | } | ||||||
|  |  | ||||||
|  | void AndroidVideoCapturer::OnCapturerStarted(bool success) { | ||||||
|  |   // This method is called from a Java thread. | ||||||
|  |   DCHECK(!worker_thread_->IsCurrent()); | ||||||
|  |   worker_thread_->Invoke<void>( | ||||||
|  |       rtc::Bind(&AndroidVideoCapturer::OnCapturerStarted_w, this, success)); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | void AndroidVideoCapturer::OnCapturerStarted_w(bool success) { | ||||||
|  |   DCHECK(worker_thread_->IsCurrent()); | ||||||
|  |   cricket::CaptureState new_state = | ||||||
|  |       success ? cricket::CS_RUNNING : cricket::CS_FAILED; | ||||||
|  |   SetCaptureState(new_state); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | void AndroidVideoCapturer::OnIncomingFrame(signed char* videoFrame, | ||||||
|  |                                            int length, | ||||||
|  |                                            int rotation, | ||||||
|  |                                            int64 time_stamp) { | ||||||
|  |   // This method is called from a Java thread. | ||||||
|  |   DCHECK(!worker_thread_->IsCurrent()); | ||||||
|  |   worker_thread_->Invoke<void>( | ||||||
|  |       rtc::Bind(&AndroidVideoCapturer::OnIncomingFrame_w, this, videoFrame, | ||||||
|  |                 length, rotation, time_stamp)); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | void AndroidVideoCapturer::OnIncomingFrame_w(signed char* frame_data, | ||||||
|  |                                              int length, | ||||||
|  |                                              int rotation, | ||||||
|  |                                              int64 time_stamp) { | ||||||
|  |   DCHECK(worker_thread_->IsCurrent()); | ||||||
|  |   frame_factory_->UpdateCapturedFrame(frame_data, length, rotation, time_stamp); | ||||||
|  |   SignalFrameCaptured(this, frame_factory_->GetCapturedFrame()); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | }  // namespace webrtc | ||||||
							
								
								
									
										109
									
								
								talk/app/webrtc/androidvideocapturer.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										109
									
								
								talk/app/webrtc/androidvideocapturer.h
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,109 @@ | |||||||
|  | /* | ||||||
|  |  * libjingle | ||||||
|  |  * Copyright 2015 Google Inc. | ||||||
|  |  * | ||||||
|  |  * Redistribution and use in source and binary forms, with or without | ||||||
|  |  * modification, are permitted provided that the following conditions are met: | ||||||
|  |  * | ||||||
|  |  *  1. Redistributions of source code must retain the above copyright notice, | ||||||
|  |  *     this list of conditions and the following disclaimer. | ||||||
|  |  *  2. Redistributions in binary form must reproduce the above copyright notice, | ||||||
|  |  *     this list of conditions and the following disclaimer in the documentation | ||||||
|  |  *     and/or other materials provided with the distribution. | ||||||
|  |  *  3. The name of the author may not be used to endorse or promote products | ||||||
|  |  *     derived from this software without specific prior written permission. | ||||||
|  |  * | ||||||
|  |  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED | ||||||
|  |  * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF | ||||||
|  |  * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO | ||||||
|  |  * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  |  * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, | ||||||
|  |  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; | ||||||
|  |  * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, | ||||||
|  |  * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR | ||||||
|  |  * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF | ||||||
|  |  * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  |  */ | ||||||
|  | #ifndef TALK_APP_WEBRTC_ANDROIDVIDEOCAPTURER_H_ | ||||||
|  | #define TALK_APP_WEBRTC_ANDROIDVIDEOCAPTURER_H_ | ||||||
|  |  | ||||||
|  | #include <string> | ||||||
|  | #include <vector> | ||||||
|  |  | ||||||
|  | #include "talk/media/base/videocapturer.h" | ||||||
|  |  | ||||||
|  | namespace webrtc { | ||||||
|  |  | ||||||
|  | class AndroidVideoCapturer; | ||||||
|  |  | ||||||
|  | class AndroidVideoCapturerDelegate { | ||||||
|  |  public: | ||||||
|  |   virtual ~AndroidVideoCapturerDelegate() {} | ||||||
|  |   // Start capturing. The implementation of the delegate must call | ||||||
|  |   // AndroidVideoCapturer::OnCapturerStarted with the result of this request. | ||||||
|  |   virtual void Start(int width, int height, int framerate, | ||||||
|  |                      AndroidVideoCapturer* capturer) = 0; | ||||||
|  |  | ||||||
|  |   // Stops capturing. The implementation must synchronously stop the capturer. | ||||||
|  |   // The delegate may not call into AndroidVideoCapturer after this call. | ||||||
|  |   virtual bool Stop() = 0; | ||||||
|  |  | ||||||
|  |   // Must returns a JSON string "{{width=xxx, height=xxx, framerate = xxx}}" | ||||||
|  |   virtual std::string GetSupportedFormats() = 0; | ||||||
|  | }; | ||||||
|  |  | ||||||
|  | // Android implementation of cricket::VideoCapturer for use with WebRtc | ||||||
|  | // PeerConnection. | ||||||
|  | class AndroidVideoCapturer : public cricket::VideoCapturer { | ||||||
|  |  public: | ||||||
|  |   explicit AndroidVideoCapturer( | ||||||
|  |       rtc::scoped_ptr<AndroidVideoCapturerDelegate> delegate); | ||||||
|  |   virtual ~AndroidVideoCapturer(); | ||||||
|  |  | ||||||
|  |   // Called from JNI when the capturer has been started. Called from a Java | ||||||
|  |   // thread. | ||||||
|  |   void OnCapturerStarted(bool success); | ||||||
|  |  | ||||||
|  |   // Called from JNI when a new frame has been captured. Called from a Java | ||||||
|  |   // thread. | ||||||
|  |   void OnIncomingFrame(signed char* videoFrame, | ||||||
|  |                        int length, | ||||||
|  |                        int rotation, | ||||||
|  |                        int64 time_stamp); | ||||||
|  |  | ||||||
|  |   AndroidVideoCapturerDelegate* delegate() { return delegate_.get(); } | ||||||
|  |  | ||||||
|  |  private: | ||||||
|  |   void OnCapturerStarted_w(bool success); | ||||||
|  |  | ||||||
|  |   void OnIncomingFrame_w(signed char* frame_data, | ||||||
|  |                          int length, | ||||||
|  |                          int rotation, | ||||||
|  |                          int64 time_stamp); | ||||||
|  |  | ||||||
|  |   // cricket::VideoCapturer implementation. | ||||||
|  |   // Video frames will be delivered using | ||||||
|  |   // cricket::VideoCapturer::SignalFrameCaptured on the thread that calls Start. | ||||||
|  |   cricket::CaptureState Start( | ||||||
|  |       const cricket::VideoFormat& capture_format) override; | ||||||
|  |   void Stop() override; | ||||||
|  |   bool IsRunning() override; | ||||||
|  |   bool IsScreencast() const override { return false; } | ||||||
|  |   bool GetPreferredFourccs(std::vector<uint32>* fourccs) override; | ||||||
|  |  | ||||||
|  |   bool running_; | ||||||
|  |   rtc::scoped_ptr<AndroidVideoCapturerDelegate> delegate_; | ||||||
|  |  | ||||||
|  |   // |worker_thread_| is the thread that calls Start and is used for | ||||||
|  |   // communication with the Java capturer. | ||||||
|  |   // Video frames are delivered to cricket::VideoCapturer::SignalFrameCaptured | ||||||
|  |   // on this thread. | ||||||
|  |   rtc::Thread* worker_thread_; | ||||||
|  |  | ||||||
|  |   class FrameFactory; | ||||||
|  |   FrameFactory* frame_factory_;  // Owned by cricket::VideoCapturer. | ||||||
|  | }; | ||||||
|  |  | ||||||
|  | }  // namespace webrtc | ||||||
|  |  | ||||||
|  | #endif  // TALK_APP_WEBRTC_ANDROIDVIDEOCAPTURER_H_ | ||||||
| @@ -91,7 +91,7 @@ | |||||||
|  |  | ||||||
| #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) | #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) | ||||||
| #include <android/log.h> | #include <android/log.h> | ||||||
| #include "webrtc/modules/video_capture/video_capture_internal.h" | #include "talk/app/webrtc/androidvideocapturer.h" | ||||||
| #include "webrtc/modules/video_render/video_render_internal.h" | #include "webrtc/modules/video_render/video_render_internal.h" | ||||||
| #include "webrtc/system_wrappers/interface/logcat_trace_context.h" | #include "webrtc/system_wrappers/interface/logcat_trace_context.h" | ||||||
| #include "webrtc/system_wrappers/interface/tick_util.h" | #include "webrtc/system_wrappers/interface/tick_util.h" | ||||||
| @@ -279,6 +279,8 @@ class ClassReferenceHolder { | |||||||
|     LoadClass(jni, "org/webrtc/IceCandidate"); |     LoadClass(jni, "org/webrtc/IceCandidate"); | ||||||
| #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) | #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) | ||||||
|     LoadClass(jni, "android/graphics/SurfaceTexture"); |     LoadClass(jni, "android/graphics/SurfaceTexture"); | ||||||
|  |     LoadClass(jni, "org/webrtc/VideoCapturerAndroid"); | ||||||
|  |     LoadClass(jni, "org/webrtc/VideoCapturerAndroid$NativeFrameObserver"); | ||||||
|     LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder"); |     LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder"); | ||||||
|     LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo"); |     LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo"); | ||||||
|     LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder"); |     LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder"); | ||||||
| @@ -304,6 +306,7 @@ class ClassReferenceHolder { | |||||||
|     LoadClass(jni, "org/webrtc/StatsReport"); |     LoadClass(jni, "org/webrtc/StatsReport"); | ||||||
|     LoadClass(jni, "org/webrtc/StatsReport$Value"); |     LoadClass(jni, "org/webrtc/StatsReport$Value"); | ||||||
|     LoadClass(jni, "org/webrtc/VideoRenderer$I420Frame"); |     LoadClass(jni, "org/webrtc/VideoRenderer$I420Frame"); | ||||||
|  |     LoadClass(jni, "org/webrtc/VideoCapturer"); | ||||||
|     LoadClass(jni, "org/webrtc/VideoTrack"); |     LoadClass(jni, "org/webrtc/VideoTrack"); | ||||||
|   } |   } | ||||||
|  |  | ||||||
| @@ -425,6 +428,36 @@ void DeleteGlobalRef(JNIEnv* jni, jobject o) { | |||||||
|   CHECK_EXCEPTION(jni) << "error during DeleteGlobalRef"; |   CHECK_EXCEPTION(jni) << "error during DeleteGlobalRef"; | ||||||
| } | } | ||||||
|  |  | ||||||
|  | // Convenience macro defining JNI-accessible methods in the org.webrtc package. | ||||||
|  | // Eliminates unnecessary boilerplate and line-wraps, reducing visual clutter. | ||||||
|  | #define JOW(rettype, name) extern "C" rettype JNIEXPORT JNICALL \ | ||||||
|  |   Java_org_webrtc_##name | ||||||
|  |  | ||||||
|  | extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM *jvm, void *reserved) { | ||||||
|  |   CHECK(!g_jvm) << "JNI_OnLoad called more than once!"; | ||||||
|  |   g_jvm = jvm; | ||||||
|  |   CHECK(g_jvm) << "JNI_OnLoad handed NULL?"; | ||||||
|  |  | ||||||
|  |   CHECK(!pthread_once(&g_jni_ptr_once, &CreateJNIPtrKey)) << "pthread_once"; | ||||||
|  |  | ||||||
|  |   CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()"; | ||||||
|  |  | ||||||
|  |   JNIEnv* jni; | ||||||
|  |   if (jvm->GetEnv(reinterpret_cast<void**>(&jni), JNI_VERSION_1_6) != JNI_OK) | ||||||
|  |     return -1; | ||||||
|  |   g_class_reference_holder = new ClassReferenceHolder(jni); | ||||||
|  |  | ||||||
|  |   return JNI_VERSION_1_6; | ||||||
|  | } | ||||||
|  |  | ||||||
|  | extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM *jvm, void *reserved) { | ||||||
|  |   g_class_reference_holder->FreeReferences(AttachCurrentThreadIfNeeded()); | ||||||
|  |   delete g_class_reference_holder; | ||||||
|  |   g_class_reference_holder = NULL; | ||||||
|  |   CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()"; | ||||||
|  |   g_jvm = NULL; | ||||||
|  | } | ||||||
|  |  | ||||||
| // Given a jweak reference, allocate a (strong) local reference scoped to the | // Given a jweak reference, allocate a (strong) local reference scoped to the | ||||||
| // lifetime of this object if the weak reference is still valid, or NULL | // lifetime of this object if the weak reference is still valid, or NULL | ||||||
| // otherwise. | // otherwise. | ||||||
| @@ -2634,46 +2667,113 @@ webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder( | |||||||
|   return new MediaCodecVideoDecoder(AttachCurrentThreadIfNeeded()); |   return new MediaCodecVideoDecoder(AttachCurrentThreadIfNeeded()); | ||||||
| } | } | ||||||
|  |  | ||||||
|  |  | ||||||
| void MediaCodecVideoDecoderFactory::DestroyVideoDecoder( | void MediaCodecVideoDecoderFactory::DestroyVideoDecoder( | ||||||
|     webrtc::VideoDecoder* decoder) { |     webrtc::VideoDecoder* decoder) { | ||||||
|   delete decoder; |   delete decoder; | ||||||
| } | } | ||||||
|  |  | ||||||
|  | // AndroidVideoCapturerJni implements AndroidVideoCapturerDelegate. | ||||||
|  | // The purpose of the delegate is to hide the JNI specifics from the C++ only | ||||||
|  | // AndroidVideoCapturer. | ||||||
|  | // TODO(perkj): Refactor this to a separate file once the jni utility functions | ||||||
|  | // and classes have been moved. | ||||||
|  | class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate { | ||||||
|  |  public: | ||||||
|  |   static int SetAndroidObjects(JNIEnv* jni, jobject appliction_context) { | ||||||
|  |     if (application_context_) { | ||||||
|  |       jni->DeleteGlobalRef(application_context_); | ||||||
|  |     } | ||||||
|  |     application_context_ = NewGlobalRef(jni, appliction_context); | ||||||
|  |  | ||||||
|  |     return 0; | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   AndroidVideoCapturerJni(JNIEnv* jni, jobject j_video_capturer) | ||||||
|  |       : j_capturer_global_(jni, j_video_capturer), | ||||||
|  |         j_video_capturer_class_( | ||||||
|  |             jni, FindClass(jni, "org/webrtc/VideoCapturerAndroid")), | ||||||
|  |         j_frame_observer_class_( | ||||||
|  |             jni, | ||||||
|  |             FindClass(jni, | ||||||
|  |                       "org/webrtc/VideoCapturerAndroid$NativeFrameObserver")) { | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   void Start(int width, int height, int framerate, | ||||||
|  |              webrtc::AndroidVideoCapturer* capturer) override { | ||||||
|  |     j_frame_observer_ = NewGlobalRef( | ||||||
|  |         jni(), | ||||||
|  |         jni()->NewObject(*j_frame_observer_class_, | ||||||
|  |                          GetMethodID(jni(), | ||||||
|  |                                      *j_frame_observer_class_, | ||||||
|  |                                      "<init>", | ||||||
|  |                                      "(J)V"), | ||||||
|  |                          jlongFromPointer(capturer))); | ||||||
|  |     CHECK_EXCEPTION(jni()) << "error during NewObject"; | ||||||
|  |  | ||||||
|  |     jmethodID m = GetMethodID( | ||||||
|  |         jni(), *j_video_capturer_class_, "startCapture", | ||||||
|  |         "(IIILandroid/content/Context;" | ||||||
|  |         "Lorg/webrtc/VideoCapturerAndroid$CapturerObserver;)V"); | ||||||
|  |     jni()->CallVoidMethod(*j_capturer_global_, | ||||||
|  |                           m, width, height, | ||||||
|  |                           framerate, | ||||||
|  |                           application_context_, | ||||||
|  |                           j_frame_observer_); | ||||||
|  |     CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.startCapture"; | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   bool Stop() override { | ||||||
|  |     jmethodID m = GetMethodID(jni(), *j_video_capturer_class_, | ||||||
|  |                               "stopCapture", "()Z"); | ||||||
|  |     jboolean result = jni()->CallBooleanMethod(*j_capturer_global_, m); | ||||||
|  |     CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.stopCapture"; | ||||||
|  |     DeleteGlobalRef(jni(), j_frame_observer_); | ||||||
|  |     return result; | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   std::string GetSupportedFormats() override { | ||||||
|  |     jmethodID m = | ||||||
|  |         GetMethodID(jni(), *j_video_capturer_class_, | ||||||
|  |                     "getSupportedFormatsAsJson", "()Ljava/lang/String;"); | ||||||
|  |     jstring j_json_caps = | ||||||
|  |         (jstring) jni()->CallObjectMethod(*j_capturer_global_, m); | ||||||
|  |     CHECK_EXCEPTION(jni()) << "error during supportedFormatsAsJson"; | ||||||
|  |     return JavaToStdString(jni(), j_json_caps); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |  private: | ||||||
|  |   JNIEnv* jni() { return AttachCurrentThreadIfNeeded(); } | ||||||
|  |  | ||||||
|  |   const ScopedGlobalRef<jobject> j_capturer_global_; | ||||||
|  |   const ScopedGlobalRef<jclass> j_video_capturer_class_; | ||||||
|  |   const ScopedGlobalRef<jclass> j_frame_observer_class_; | ||||||
|  |   jobject j_frame_observer_; | ||||||
|  |  | ||||||
|  |   static jobject application_context_; | ||||||
|  | }; | ||||||
|  |  | ||||||
|  | jobject AndroidVideoCapturerJni::application_context_ = nullptr; | ||||||
|  |  | ||||||
|  | JOW(void, VideoCapturerAndroid_00024NativeFrameObserver_nativeOnFrameCaptured) | ||||||
|  |     (JNIEnv* jni, jclass, jlong j_capturer, jbyteArray j_frame, | ||||||
|  |         jint rotation, jlong ts) { | ||||||
|  |   jbyte* bytes = jni->GetByteArrayElements(j_frame, NULL); | ||||||
|  |   reinterpret_cast<webrtc::AndroidVideoCapturer*>( | ||||||
|  |       j_capturer)->OnIncomingFrame(bytes, jni->GetArrayLength(j_frame), | ||||||
|  |                                    rotation, ts); | ||||||
|  |   jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | JOW(void, VideoCapturerAndroid_00024NativeFrameObserver_nativeCapturerStarted) | ||||||
|  |     (JNIEnv* jni, jclass, jlong j_capturer, jboolean j_success) { | ||||||
|  |   reinterpret_cast<webrtc::AndroidVideoCapturer*>( | ||||||
|  |       j_capturer)->OnCapturerStarted(j_success); | ||||||
|  | } | ||||||
|  |  | ||||||
| #endif  // #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) | #endif  // #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) | ||||||
|  |  | ||||||
| }  // anonymous namespace | }  // anonymous namespace | ||||||
|  |  | ||||||
| // Convenience macro defining JNI-accessible methods in the org.webrtc package. |  | ||||||
| // Eliminates unnecessary boilerplate and line-wraps, reducing visual clutter. |  | ||||||
| #define JOW(rettype, name) extern "C" rettype JNIEXPORT JNICALL \ |  | ||||||
|   Java_org_webrtc_##name |  | ||||||
|  |  | ||||||
| extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM *jvm, void *reserved) { |  | ||||||
|   CHECK(!g_jvm) << "JNI_OnLoad called more than once!"; |  | ||||||
|   g_jvm = jvm; |  | ||||||
|   CHECK(g_jvm) << "JNI_OnLoad handed NULL?"; |  | ||||||
|  |  | ||||||
|   CHECK(!pthread_once(&g_jni_ptr_once, &CreateJNIPtrKey)) << "pthread_once"; |  | ||||||
|  |  | ||||||
|   CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()"; |  | ||||||
|  |  | ||||||
|   JNIEnv* jni; |  | ||||||
|   if (jvm->GetEnv(reinterpret_cast<void**>(&jni), JNI_VERSION_1_6) != JNI_OK) |  | ||||||
|     return -1; |  | ||||||
|   g_class_reference_holder = new ClassReferenceHolder(jni); |  | ||||||
|  |  | ||||||
|   return JNI_VERSION_1_6; |  | ||||||
| } |  | ||||||
|  |  | ||||||
| extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM *jvm, void *reserved) { |  | ||||||
|   g_class_reference_holder->FreeReferences(AttachCurrentThreadIfNeeded()); |  | ||||||
|   delete g_class_reference_holder; |  | ||||||
|   g_class_reference_holder = NULL; |  | ||||||
|   CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()"; |  | ||||||
|   g_jvm = NULL; |  | ||||||
| } |  | ||||||
|  |  | ||||||
| static DataChannelInterface* ExtractNativeDC(JNIEnv* jni, jobject j_dc) { | static DataChannelInterface* ExtractNativeDC(JNIEnv* jni, jobject j_dc) { | ||||||
|   jfieldID native_dc_id = GetFieldID(jni, |   jfieldID native_dc_id = GetFieldID(jni, | ||||||
|       GetObjectClass(jni, j_dc), "nativeDataChannel", "J"); |       GetObjectClass(jni, j_dc), "nativeDataChannel", "J"); | ||||||
| @@ -2828,16 +2928,17 @@ JOW(jboolean, PeerConnectionFactory_initializeAndroidGlobals)( | |||||||
|   vp8_hw_acceleration_enabled = vp8_hw_acceleration; |   vp8_hw_acceleration_enabled = vp8_hw_acceleration; | ||||||
|   if (!factory_static_initialized) { |   if (!factory_static_initialized) { | ||||||
|     if (initialize_video) { |     if (initialize_video) { | ||||||
|       failure |= webrtc::SetCaptureAndroidVM(g_jvm, context); |  | ||||||
|       failure |= webrtc::SetRenderAndroidVM(g_jvm); |       failure |= webrtc::SetRenderAndroidVM(g_jvm); | ||||||
|  |       failure |= AndroidVideoCapturerJni::SetAndroidObjects(jni, context); | ||||||
|     } |     } | ||||||
|     if (initialize_audio) |     if (initialize_audio) | ||||||
|       failure |= webrtc::VoiceEngine::SetAndroidObjects(g_jvm, jni, context); |       failure |= webrtc::VoiceEngine::SetAndroidObjects(g_jvm, jni, context); | ||||||
|     factory_static_initialized = true; |     factory_static_initialized = true; | ||||||
|   } |   } | ||||||
|   if (initialize_video) |   if (initialize_video) { | ||||||
|     failure |= MediaCodecVideoDecoder::SetAndroidObjects(jni, |     failure |= MediaCodecVideoDecoder::SetAndroidObjects(jni, | ||||||
|         render_egl_context); |         render_egl_context); | ||||||
|  |   } | ||||||
|   return !failure; |   return !failure; | ||||||
| } | } | ||||||
| #endif  // defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) | #endif  // defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) | ||||||
| @@ -3218,8 +3319,32 @@ JOW(jobject, MediaSource_nativeState)(JNIEnv* jni, jclass, jlong j_p) { | |||||||
|   return JavaEnumFromIndex(jni, "MediaSource$State", p->state()); |   return JavaEnumFromIndex(jni, "MediaSource$State", p->state()); | ||||||
| } | } | ||||||
|  |  | ||||||
| JOW(jlong, VideoCapturer_nativeCreateVideoCapturer)( | JOW(jobject, VideoCapturer_nativeCreateVideoCapturer)( | ||||||
|     JNIEnv* jni, jclass, jstring j_device_name) { |     JNIEnv* jni, jclass, jstring j_device_name) { | ||||||
|  | // Since we can't create platform specific java implementations in Java, we | ||||||
|  | // defer the creation to C land. | ||||||
|  | #if defined(ANDROID) | ||||||
|  |   jclass j_video_capturer_class( | ||||||
|  |       FindClass(jni, "org/webrtc/VideoCapturerAndroid")); | ||||||
|  |   const jmethodID j_videocapturer_ctor(GetMethodID( | ||||||
|  |       jni, j_video_capturer_class, "<init>", "()V")); | ||||||
|  |   jobject j_video_capturer = jni->NewObject(j_video_capturer_class, | ||||||
|  |                                             j_videocapturer_ctor); | ||||||
|  |   CHECK_EXCEPTION(jni) << "error during NewObject"; | ||||||
|  |  | ||||||
|  |   const jmethodID m(GetMethodID( | ||||||
|  |       jni, j_video_capturer_class, "Init", "(Ljava/lang/String;)Z")); | ||||||
|  |   if (!jni->CallBooleanMethod(j_video_capturer, m, j_device_name)) { | ||||||
|  |     return nullptr; | ||||||
|  |   } | ||||||
|  |   CHECK_EXCEPTION(jni) << "error during CallVoidMethod"; | ||||||
|  |  | ||||||
|  |   rtc::scoped_ptr<webrtc::AndroidVideoCapturerDelegate> delegate( | ||||||
|  |       new AndroidVideoCapturerJni(jni, j_video_capturer)); | ||||||
|  |   rtc::scoped_ptr<webrtc::AndroidVideoCapturer> capturer( | ||||||
|  |       new webrtc::AndroidVideoCapturer(delegate.Pass())); | ||||||
|  |  | ||||||
|  | #else | ||||||
|   std::string device_name = JavaToStdString(jni, j_device_name); |   std::string device_name = JavaToStdString(jni, j_device_name); | ||||||
|   scoped_ptr<cricket::DeviceManagerInterface> device_manager( |   scoped_ptr<cricket::DeviceManagerInterface> device_manager( | ||||||
|       cricket::DeviceManagerFactory::Create()); |       cricket::DeviceManagerFactory::Create()); | ||||||
| @@ -3231,7 +3356,24 @@ JOW(jlong, VideoCapturer_nativeCreateVideoCapturer)( | |||||||
|   } |   } | ||||||
|   scoped_ptr<cricket::VideoCapturer> capturer( |   scoped_ptr<cricket::VideoCapturer> capturer( | ||||||
|       device_manager->CreateVideoCapturer(device)); |       device_manager->CreateVideoCapturer(device)); | ||||||
|   return (jlong)capturer.release(); |  | ||||||
|  |   jclass j_video_capturer_class( | ||||||
|  |       FindClass(jni, "org/webrtc/VideoCapturer")); | ||||||
|  |   const jmethodID j_videocapturer_ctor(GetMethodID( | ||||||
|  |       jni, j_video_capturer_class, "<init>", "()V")); | ||||||
|  |   jobject j_video_capturer = | ||||||
|  |       jni->NewObject(j_video_capturer_class, | ||||||
|  |                      j_videocapturer_ctor); | ||||||
|  |   CHECK_EXCEPTION(jni) << "error during creation of VideoCapturer"; | ||||||
|  |  | ||||||
|  | #endif | ||||||
|  |   const jmethodID j_videocapturer_set_native_capturer(GetMethodID( | ||||||
|  |       jni, j_video_capturer_class, "setNativeCapturer", "(J)V")); | ||||||
|  |   jni->CallVoidMethod(j_video_capturer, | ||||||
|  |                       j_videocapturer_set_native_capturer, | ||||||
|  |                       (jlong)capturer.release()); | ||||||
|  |   CHECK_EXCEPTION(jni) << "error during setNativeCapturer"; | ||||||
|  |   return j_video_capturer; | ||||||
| } | } | ||||||
|  |  | ||||||
| JOW(jlong, VideoRenderer_nativeCreateGuiVideoRenderer)( | JOW(jlong, VideoRenderer_nativeCreateGuiVideoRenderer)( | ||||||
|   | |||||||
| @@ -31,16 +31,19 @@ package org.webrtc; | |||||||
| public class VideoCapturer { | public class VideoCapturer { | ||||||
|   private long nativeVideoCapturer; |   private long nativeVideoCapturer; | ||||||
|  |  | ||||||
|   private VideoCapturer(long nativeVideoCapturer) { |   protected VideoCapturer() { | ||||||
|     this.nativeVideoCapturer = nativeVideoCapturer; |  | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   public static VideoCapturer create(String deviceName) { |   public static VideoCapturer create(String deviceName) { | ||||||
|     long nativeVideoCapturer = nativeCreateVideoCapturer(deviceName); |     Object capturer = nativeCreateVideoCapturer(deviceName); | ||||||
|     if (nativeVideoCapturer == 0) { |     if (capturer != null) | ||||||
|       return null; |       return (VideoCapturer) (capturer); | ||||||
|     } |     return null; | ||||||
|     return new VideoCapturer(nativeVideoCapturer); |   } | ||||||
|  |  | ||||||
|  |   // Sets |nativeCapturer| to be owned by VideoCapturer. | ||||||
|  |   protected void setNativeCapturer(long nativeCapturer) { | ||||||
|  |     this.nativeVideoCapturer = nativeCapturer; | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   // Package-visible for PeerConnectionFactory. |   // Package-visible for PeerConnectionFactory. | ||||||
| @@ -61,7 +64,7 @@ public class VideoCapturer { | |||||||
|     } |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   private static native long nativeCreateVideoCapturer(String deviceName); |   private static native Object nativeCreateVideoCapturer(String deviceName); | ||||||
|  |  | ||||||
|   private static native void free(long nativeVideoCapturer); |   private static native void free(long nativeVideoCapturer); | ||||||
| } | } | ||||||
|   | |||||||
							
								
								
									
										605
									
								
								talk/app/webrtc/java/src/org/webrtc/VideoCapturerAndroid.java
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										605
									
								
								talk/app/webrtc/java/src/org/webrtc/VideoCapturerAndroid.java
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,605 @@ | |||||||
|  | /* | ||||||
|  |  * libjingle | ||||||
|  |  * Copyright 2015 Google Inc. | ||||||
|  |  * | ||||||
|  |  * Redistribution and use in source and binary forms, with or without | ||||||
|  |  * modification, are permitted provided that the following conditions are met: | ||||||
|  |  * | ||||||
|  |  *  1. Redistributions of source code must retain the above copyright notice, | ||||||
|  |  *     this list of conditions and the following disclaimer. | ||||||
|  |  *  2. Redistributions in binary form must reproduce the above copyright notice, | ||||||
|  |  *     this list of conditions and the following disclaimer in the documentation | ||||||
|  |  *     and/or other materials provided with the distribution. | ||||||
|  |  *  3. The name of the author may not be used to endorse or promote products | ||||||
|  |  *     derived from this software without specific prior written permission. | ||||||
|  |  * | ||||||
|  |  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED | ||||||
|  |  * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF | ||||||
|  |  * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO | ||||||
|  |  * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  |  * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, | ||||||
|  |  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; | ||||||
|  |  * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, | ||||||
|  |  * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR | ||||||
|  |  * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF | ||||||
|  |  * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  |  */ | ||||||
|  |  | ||||||
|  | package org.webrtc; | ||||||
|  |  | ||||||
|  | import static java.lang.Math.abs; | ||||||
|  |  | ||||||
|  | import android.content.Context; | ||||||
|  | import android.graphics.ImageFormat; | ||||||
|  | import android.graphics.SurfaceTexture; | ||||||
|  | import android.hardware.Camera; | ||||||
|  | import android.hardware.Camera.PreviewCallback; | ||||||
|  | import android.opengl.GLES11Ext; | ||||||
|  | import android.opengl.GLES20; | ||||||
|  | import android.os.Handler; | ||||||
|  | import android.os.Looper; | ||||||
|  | import android.os.SystemClock; | ||||||
|  | import android.util.Log; | ||||||
|  | import android.view.Surface; | ||||||
|  | import android.view.WindowManager; | ||||||
|  |  | ||||||
|  | import org.json.JSONArray; | ||||||
|  | import org.json.JSONException; | ||||||
|  | import org.json.JSONObject; | ||||||
|  |  | ||||||
|  | import java.io.IOException; | ||||||
|  | import java.util.ArrayList; | ||||||
|  | import java.util.List; | ||||||
|  | import java.util.concurrent.Exchanger; | ||||||
|  |  | ||||||
|  | // Android specific implementation of VideoCapturer. | ||||||
|  | // An instance of this class can be created by an application using | ||||||
|  | // VideoCapturerAndroid.create(); | ||||||
|  | // This class extends VideoCapturer with a method to easily switch between the | ||||||
|  | // front and back camera. It also provides methods for enumerating valid device | ||||||
|  | // names. | ||||||
|  | // | ||||||
|  | // Threading notes: this class is called from C++ code, and from Camera | ||||||
|  | // Java callbacks.  Since these calls happen on different threads, | ||||||
|  | // the entry points to this class are all synchronized.  This shouldn't present | ||||||
|  | // a performance bottleneck because only onPreviewFrame() is called more than | ||||||
|  | // once (and is called serially on a single thread), so the lock should be | ||||||
|  | // uncontended.  Note that each of these synchronized methods must check | ||||||
|  | // |camera| for null to account for having possibly waited for stopCapture() to | ||||||
|  | // complete. | ||||||
|  | @SuppressWarnings("deprecation") | ||||||
|  | public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallback { | ||||||
|  |   private final static String TAG = "VideoCapturerAndroid"; | ||||||
|  |  | ||||||
|  |   private Camera camera;  // Only non-null while capturing. | ||||||
|  |   private CameraThread cameraThread; | ||||||
|  |   private Handler cameraThreadHandler; | ||||||
|  |   private Context applicationContext; | ||||||
|  |   private int id; | ||||||
|  |   private Camera.CameraInfo info; | ||||||
|  |   private SurfaceTexture cameraSurfaceTexture; | ||||||
|  |   private int[] cameraGlTextures = null; | ||||||
|  |   // Arbitrary queue depth.  Higher number means more memory allocated & held, | ||||||
|  |   // lower number means more sensitivity to processing time in the client (and | ||||||
|  |   // potentially stalling the capturer if it runs out of buffers to write to). | ||||||
|  |   private final int numCaptureBuffers = 3; | ||||||
|  |   private int width; | ||||||
|  |   private int height; | ||||||
|  |   private int framerate; | ||||||
|  |   private CapturerObserver frameObserver = null; | ||||||
|  |   // List of formats supported by all cameras. This list is filled once in order | ||||||
|  |   // to be able to switch cameras. | ||||||
|  |   private static ArrayList<CaptureFormat>[] supportedFormats; | ||||||
|  |  | ||||||
|  |   // Returns device names that can be used to create a new VideoCapturerAndroid. | ||||||
|  |   public static String[] getDeviceNames() { | ||||||
|  |     String[] names = new String[Camera.getNumberOfCameras()]; | ||||||
|  |     for (int i = 0; i < Camera.getNumberOfCameras(); ++i) { | ||||||
|  |       names[i] = getDeviceName(i); | ||||||
|  |     } | ||||||
|  |     return names; | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   public static String getDeviceName(int index) { | ||||||
|  |     Camera.CameraInfo info = new Camera.CameraInfo(); | ||||||
|  |     Camera.getCameraInfo(index, info); | ||||||
|  |     String facing = | ||||||
|  |         (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back"; | ||||||
|  |     return "Camera " + index + ", Facing " + facing | ||||||
|  |         + ", Orientation " + info.orientation; | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   public static String getNameOfFrontFacingDevice() { | ||||||
|  |     for (int i = 0; i < Camera.getNumberOfCameras(); ++i) { | ||||||
|  |       Camera.CameraInfo info = new Camera.CameraInfo(); | ||||||
|  |       Camera.getCameraInfo(i, info); | ||||||
|  |       if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) | ||||||
|  |         return getDeviceName(i); | ||||||
|  |     } | ||||||
|  |     throw new RuntimeException("Front facing camera does not exist."); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   public static String getNameOfBackFacingDevice() { | ||||||
|  |     for (int i = 0; i < Camera.getNumberOfCameras(); ++i) { | ||||||
|  |       Camera.CameraInfo info = new Camera.CameraInfo(); | ||||||
|  |       Camera.getCameraInfo(i, info); | ||||||
|  |       if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) | ||||||
|  |         return getDeviceName(i); | ||||||
|  |     } | ||||||
|  |     throw new RuntimeException("Back facing camera does not exist."); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   public static VideoCapturerAndroid create(String name) { | ||||||
|  |     VideoCapturer capturer = VideoCapturer.create(name); | ||||||
|  |     if (capturer != null) | ||||||
|  |       return (VideoCapturerAndroid) capturer; | ||||||
|  |     return null; | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   // Switch camera to the next valid camera id. This can only be called while | ||||||
|  |   // the camera is running. | ||||||
|  |   // Returns true on success. False if the next camera does not support the | ||||||
|  |   // current resolution. | ||||||
|  |   public synchronized boolean switchCamera() { | ||||||
|  |     if (Camera.getNumberOfCameras() < 2 ) | ||||||
|  |       return false; | ||||||
|  |  | ||||||
|  |     if (cameraThread == null) { | ||||||
|  |       Log.e(TAG, "Camera has not been started"); | ||||||
|  |       return false; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     id = ++id % Camera.getNumberOfCameras(); | ||||||
|  |  | ||||||
|  |     CaptureFormat formatToUse  = null; | ||||||
|  |     for (CaptureFormat format : supportedFormats[id]) { | ||||||
|  |       if (format.width == width && format.height == height) { | ||||||
|  |         formatToUse = format; | ||||||
|  |         break; | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     if (formatToUse == null) { | ||||||
|  |       Log.d(TAG, "No valid format found to switch camera."); | ||||||
|  |       return false; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     cameraThreadHandler.post(new Runnable() { | ||||||
|  |       @Override public void run() { | ||||||
|  |         switchCameraOnCameraThread(); | ||||||
|  |       } | ||||||
|  |     }); | ||||||
|  |     return true; | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private VideoCapturerAndroid() { | ||||||
|  |     Log.d(TAG, "VideoCapturerAndroid"); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   // Called by native code. | ||||||
|  |   // Enumerates resolution and frame rates for all cameras to be able to switch | ||||||
|  |   // cameras. Initializes local variables for the camera named |deviceName|. | ||||||
|  |   // If deviceName is empty, the first available device is used in order to be | ||||||
|  |   // compatible with the generic VideoCapturer class. | ||||||
|  |   boolean Init(String deviceName) { | ||||||
|  |     Log.e(TAG, "Init " + deviceName); | ||||||
|  |     if (!InitStatics()) | ||||||
|  |       return false; | ||||||
|  |  | ||||||
|  |     if (deviceName.isEmpty()) { | ||||||
|  |       this.id = 0; | ||||||
|  |       return true; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     Boolean foundDevice = false; | ||||||
|  |     for (int i = 0; i < Camera.getNumberOfCameras(); ++i) { | ||||||
|  |       if (deviceName.equals(getDeviceName(i))) { | ||||||
|  |         this.id = i; | ||||||
|  |         foundDevice = true; | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |     return foundDevice; | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private static boolean InitStatics() { | ||||||
|  |     if (supportedFormats != null) | ||||||
|  |       return true; | ||||||
|  |     try { | ||||||
|  |       supportedFormats = new ArrayList[Camera.getNumberOfCameras()]; | ||||||
|  |       for (int i = 0; i < Camera.getNumberOfCameras(); ++i) { | ||||||
|  |         supportedFormats[i] = getSupportedFormats(i); | ||||||
|  |       } | ||||||
|  |       return true; | ||||||
|  |     } catch (Exception e) { | ||||||
|  |       supportedFormats = null; | ||||||
|  |       Log.e(TAG, "InitStatics failed",e); | ||||||
|  |     } | ||||||
|  |     return false; | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   String getSupportedFormatsAsJson() throws JSONException { | ||||||
|  |     return getSupportedFormatsAsJson(id); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   static class CaptureFormat { | ||||||
|  |     public final int width; | ||||||
|  |     public final int height; | ||||||
|  |     public final int maxFramerate; | ||||||
|  |     public final int minFramerate; | ||||||
|  |  | ||||||
|  |     public CaptureFormat(int width, int height, int minFramerate, | ||||||
|  |         int maxFramerate) { | ||||||
|  |       this.width = width; | ||||||
|  |       this.height = height; | ||||||
|  |       this.minFramerate = minFramerate; | ||||||
|  |       this.maxFramerate = maxFramerate; | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private static String getSupportedFormatsAsJson(int id) throws JSONException { | ||||||
|  |     ArrayList<CaptureFormat> formats = supportedFormats[id]; | ||||||
|  |     JSONArray json_formats = new JSONArray(); | ||||||
|  |     for (CaptureFormat format : formats) { | ||||||
|  |       JSONObject json_format = new JSONObject(); | ||||||
|  |       json_format.put("width", format.width); | ||||||
|  |       json_format.put("height", format.height); | ||||||
|  |       json_format.put("framerate", (format.maxFramerate + 999) / 1000); | ||||||
|  |       json_formats.put(json_format); | ||||||
|  |     } | ||||||
|  |     Log.d(TAG, "Supported formats: " + json_formats.toString(2)); | ||||||
|  |     return json_formats.toString(); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   // Returns a list of CaptureFormat for the camera with index id. | ||||||
|  |   static ArrayList<CaptureFormat> getSupportedFormats(int id) { | ||||||
|  |     Camera camera; | ||||||
|  |     camera = Camera.open(id); | ||||||
|  |     Camera.Parameters parameters; | ||||||
|  |     parameters = camera.getParameters(); | ||||||
|  |  | ||||||
|  |     ArrayList<CaptureFormat> formatList = new ArrayList<CaptureFormat>(); | ||||||
|  |     // getSupportedPreviewFpsRange returns a sorted list. | ||||||
|  |     List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange(); | ||||||
|  |     int[] range = {0, 0}; | ||||||
|  |     if (listFpsRange != null) | ||||||
|  |       range = listFpsRange.get(listFpsRange.size() -1); | ||||||
|  |  | ||||||
|  |     List<Camera.Size> supportedSizes = | ||||||
|  |         parameters.getSupportedPreviewSizes(); | ||||||
|  |     for (Camera.Size size : supportedSizes) { | ||||||
|  |       formatList.add(new CaptureFormat(size.width, size.height, | ||||||
|  |           range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], | ||||||
|  |           range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX])); | ||||||
|  |     } | ||||||
|  |     camera.release(); | ||||||
|  |     return formatList; | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private class CameraThread extends Thread { | ||||||
|  |     private Exchanger<Handler> handlerExchanger; | ||||||
|  |     public CameraThread(Exchanger<Handler> handlerExchanger) { | ||||||
|  |       this.handlerExchanger = handlerExchanger; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Override public void run() { | ||||||
|  |       Looper.prepare(); | ||||||
|  |       exchange(handlerExchanger, new Handler()); | ||||||
|  |       Looper.loop(); | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   // Called by native code.  Returns true if capturer is started. | ||||||
|  |   // | ||||||
|  |   // Note that this actually opens the camera, and Camera callbacks run on the | ||||||
|  |   // thread that calls open(), so this is done on the CameraThread.  Since the | ||||||
|  |   // API needs a synchronous success return value we wait for the result. | ||||||
|  |   synchronized void startCapture( | ||||||
|  |       final int width, final int height, final int framerate, | ||||||
|  |       final Context applicationContext, final CapturerObserver frameObserver) { | ||||||
|  |     Log.d(TAG, "startCapture requested: " + width + "x" + height | ||||||
|  |         + "@" + framerate); | ||||||
|  |     if (cameraThread != null || cameraThreadHandler != null) { | ||||||
|  |       throw new RuntimeException("Camera thread already started!"); | ||||||
|  |     } | ||||||
|  |     if (applicationContext == null) { | ||||||
|  |       throw new RuntimeException("applicationContext not set."); | ||||||
|  |     } | ||||||
|  |     if (frameObserver == null) { | ||||||
|  |       throw new RuntimeException("frameObserver not set."); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     Exchanger<Handler> handlerExchanger = new Exchanger<Handler>(); | ||||||
|  |     cameraThread = new CameraThread(handlerExchanger); | ||||||
|  |     cameraThread.start(); | ||||||
|  |     cameraThreadHandler = exchange(handlerExchanger, null); | ||||||
|  |  | ||||||
|  |     cameraThreadHandler.post(new Runnable() { | ||||||
|  |       @Override public void run() { | ||||||
|  |         startCaptureOnCameraThread(width, height, framerate, frameObserver, | ||||||
|  |             applicationContext); | ||||||
|  |       } | ||||||
|  |     }); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private void startCaptureOnCameraThread( | ||||||
|  |       int width, int height, int framerate, CapturerObserver frameObserver, | ||||||
|  |       Context applicationContext) { | ||||||
|  |     Throwable error = null; | ||||||
|  |     this.applicationContext = applicationContext; | ||||||
|  |     this.frameObserver = frameObserver; | ||||||
|  |     this.width = width; | ||||||
|  |     this.height = height; | ||||||
|  |     this.framerate = framerate; | ||||||
|  |     try { | ||||||
|  |       this.camera = Camera.open(id); | ||||||
|  |       this.info = new Camera.CameraInfo(); | ||||||
|  |       Camera.getCameraInfo(id, info); | ||||||
|  |  | ||||||
|  |       // No local renderer (we only care about onPreviewFrame() buffers, not a | ||||||
|  |       // directly-displayed UI element).  Camera won't capture without | ||||||
|  |       // setPreview{Texture,Display}, so we create a SurfaceTexture and hand | ||||||
|  |       // it over to Camera, but never listen for frame-ready callbacks, | ||||||
|  |       // and never call updateTexImage on it. | ||||||
|  |       try { | ||||||
|  |         cameraSurfaceTexture = null; | ||||||
|  |  | ||||||
|  |         cameraGlTextures = new int[1]; | ||||||
|  |         // Generate one texture pointer and bind it as an external texture. | ||||||
|  |         GLES20.glGenTextures(1, cameraGlTextures, 0); | ||||||
|  |         GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, | ||||||
|  |             cameraGlTextures[0]); | ||||||
|  |         GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, | ||||||
|  |             GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); | ||||||
|  |         GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, | ||||||
|  |             GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); | ||||||
|  |         GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, | ||||||
|  |             GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); | ||||||
|  |         GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, | ||||||
|  |             GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); | ||||||
|  |  | ||||||
|  |         cameraSurfaceTexture = new SurfaceTexture(cameraGlTextures[0]); | ||||||
|  |         cameraSurfaceTexture.setOnFrameAvailableListener(null); | ||||||
|  |  | ||||||
|  |         camera.setPreviewTexture(cameraSurfaceTexture); | ||||||
|  |       } catch (IOException e) { | ||||||
|  |         throw new RuntimeException(e); | ||||||
|  |       } | ||||||
|  |  | ||||||
|  |       Log.d(TAG, "Camera orientation: " + info.orientation + | ||||||
|  |           " .Device orientation: " + getDeviceOrientation()); | ||||||
|  |       Camera.Parameters parameters = camera.getParameters(); | ||||||
|  |       Log.d(TAG, "isVideoStabilizationSupported: " + | ||||||
|  |           parameters.isVideoStabilizationSupported()); | ||||||
|  |       if (parameters.isVideoStabilizationSupported()) { | ||||||
|  |         parameters.setVideoStabilization(true); | ||||||
|  |       } | ||||||
|  |  | ||||||
|  |       int androidFramerate = framerate * 1000; | ||||||
|  |       int[] range = getFramerateRange(parameters, androidFramerate); | ||||||
|  |       if (range != null) { | ||||||
|  |         Log.d(TAG, "Start capturing: " + width + "x" + height + "@[" + | ||||||
|  |             range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX]  + ":" + | ||||||
|  |             range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX] + "]"); | ||||||
|  |         parameters.setPreviewFpsRange( | ||||||
|  |             range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], | ||||||
|  |             range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]); | ||||||
|  |       } | ||||||
|  |       parameters.setPictureSize(width, height); | ||||||
|  |       parameters.setPreviewSize(width, height); | ||||||
|  |       int format = ImageFormat.NV21; | ||||||
|  |       parameters.setPreviewFormat(format); | ||||||
|  |       camera.setParameters(parameters); | ||||||
|  |       // Note: setRecordingHint(true) actually decrease frame rate on N5. | ||||||
|  |       // parameters.setRecordingHint(true); | ||||||
|  |  | ||||||
|  |       int bufSize = width * height * ImageFormat.getBitsPerPixel(format) / 8; | ||||||
|  |       for (int i = 0; i < numCaptureBuffers; i++) { | ||||||
|  |         camera.addCallbackBuffer(new byte[bufSize]); | ||||||
|  |       } | ||||||
|  |       camera.setPreviewCallbackWithBuffer(this); | ||||||
|  |  | ||||||
|  |       camera.startPreview(); | ||||||
|  |       frameObserver.OnCapturerStarted(true); | ||||||
|  |       return; | ||||||
|  |     } catch (RuntimeException e) { | ||||||
|  |       error = e; | ||||||
|  |     } | ||||||
|  |     Log.e(TAG, "startCapture failed", error); | ||||||
|  |     if (camera != null) { | ||||||
|  |       Exchanger<Boolean> resultDropper = new Exchanger<Boolean>(); | ||||||
|  |       stopCaptureOnCameraThread(resultDropper); | ||||||
|  |       frameObserver.OnCapturerStarted(false); | ||||||
|  |     } | ||||||
|  |     frameObserver.OnCapturerStarted(false); | ||||||
|  |     return; | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   // Called by native code.  Returns true when camera is known to be stopped. | ||||||
|  |   synchronized boolean stopCapture() { | ||||||
|  |     Log.d(TAG, "stopCapture"); | ||||||
|  |     final Exchanger<Boolean> result = new Exchanger<Boolean>(); | ||||||
|  |     cameraThreadHandler.post(new Runnable() { | ||||||
|  |         @Override public void run() { | ||||||
|  |           stopCaptureOnCameraThread(result); | ||||||
|  |         } | ||||||
|  |       }); | ||||||
|  |     boolean status = exchange(result, false);  // |false| is a dummy value here. | ||||||
|  |     try { | ||||||
|  |       cameraThread.join(); | ||||||
|  |     } catch (InterruptedException e) { | ||||||
|  |       throw new RuntimeException(e); | ||||||
|  |     } | ||||||
|  |     cameraThreadHandler = null; | ||||||
|  |     cameraThread = null; | ||||||
|  |     Log.d(TAG, "stopCapture done"); | ||||||
|  |     return status; | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private void stopCaptureOnCameraThread(Exchanger<Boolean> result) { | ||||||
|  |     Log.d(TAG, "stopCaptureOnCameraThread"); | ||||||
|  |     if (camera == null) { | ||||||
|  |       throw new RuntimeException("Camera is already stopped!"); | ||||||
|  |     } | ||||||
|  |     frameObserver = null; | ||||||
|  |  | ||||||
|  |     doStopCaptureOnCamerathread(); | ||||||
|  |     exchange(result, true); | ||||||
|  |     Looper.myLooper().quit(); | ||||||
|  |     return; | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private void doStopCaptureOnCamerathread() { | ||||||
|  |     try { | ||||||
|  |       camera.stopPreview(); | ||||||
|  |       camera.setPreviewCallbackWithBuffer(null); | ||||||
|  |  | ||||||
|  |       camera.setPreviewTexture(null); | ||||||
|  |       cameraSurfaceTexture = null; | ||||||
|  |       if (cameraGlTextures != null) { | ||||||
|  |         GLES20.glDeleteTextures(1, cameraGlTextures, 0); | ||||||
|  |         cameraGlTextures = null; | ||||||
|  |       } | ||||||
|  |  | ||||||
|  |       camera.release(); | ||||||
|  |       camera = null; | ||||||
|  |     } catch (IOException e) { | ||||||
|  |       Log.e(TAG, "Failed to stop camera", e); | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private void switchCameraOnCameraThread() { | ||||||
|  |     Log.d(TAG, "switchCameraOnCameraThread"); | ||||||
|  |  | ||||||
|  |     doStopCaptureOnCamerathread(); | ||||||
|  |     startCaptureOnCameraThread(width, height, framerate, frameObserver, | ||||||
|  |         applicationContext); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private int getDeviceOrientation() { | ||||||
|  |     int orientation = 0; | ||||||
|  |  | ||||||
|  |     WindowManager wm = (WindowManager) applicationContext.getSystemService( | ||||||
|  |         Context.WINDOW_SERVICE); | ||||||
|  |     switch(wm.getDefaultDisplay().getRotation()) { | ||||||
|  |       case Surface.ROTATION_90: | ||||||
|  |         orientation = 90; | ||||||
|  |         break; | ||||||
|  |       case Surface.ROTATION_180: | ||||||
|  |         orientation = 180; | ||||||
|  |         break; | ||||||
|  |       case Surface.ROTATION_270: | ||||||
|  |         orientation = 270; | ||||||
|  |         break; | ||||||
|  |       case Surface.ROTATION_0: | ||||||
|  |       default: | ||||||
|  |         orientation = 0; | ||||||
|  |         break; | ||||||
|  |     } | ||||||
|  |     return orientation; | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private static int[] getFramerateRange(Camera.Parameters parameters, | ||||||
|  |                                          int framerate) { | ||||||
|  |     List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange(); | ||||||
|  |     int[] bestRange = null; | ||||||
|  |     int bestRangeDiff = Integer.MAX_VALUE; | ||||||
|  |     for (int[] range : listFpsRange) { | ||||||
|  |       int rangeDiff = | ||||||
|  |           abs(framerate -range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX]) | ||||||
|  |           + abs(range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX] - framerate); | ||||||
|  |       if (bestRangeDiff > rangeDiff) { | ||||||
|  |         bestRange = range; | ||||||
|  |         bestRangeDiff = rangeDiff; | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |     return bestRange; | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   // Called on cameraThread so must not "synchronized". | ||||||
|  |   @Override | ||||||
|  |   public void onPreviewFrame(byte[] data, Camera callbackCamera) { | ||||||
|  |     if (Thread.currentThread() != cameraThread) { | ||||||
|  |       throw new RuntimeException("Camera callback not on camera thread?!?"); | ||||||
|  |     } | ||||||
|  |     if (camera == null) { | ||||||
|  |       return; | ||||||
|  |     } | ||||||
|  |     if (camera != callbackCamera) { | ||||||
|  |       throw new RuntimeException("Unexpected camera in callback!"); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     long captureTimeMs = SystemClock.elapsedRealtime(); | ||||||
|  |  | ||||||
|  |     int rotation = getDeviceOrientation(); | ||||||
|  |     if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) { | ||||||
|  |       rotation = 360 - rotation; | ||||||
|  |     } | ||||||
|  |     rotation = (info.orientation + rotation) % 360; | ||||||
|  |  | ||||||
|  |     frameObserver.OnFrameCaptured(data, rotation, captureTimeMs); | ||||||
|  |     camera.addCallbackBuffer(data); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   // runCameraThreadUntilIdle make sure all posted messages to the cameraThread | ||||||
|  |   // is processed before returning. It does that by itself posting a message to | ||||||
|  |   // to the message queue and waits until is has been processed. | ||||||
|  |   // It is used in tests. | ||||||
|  |   void runCameraThreadUntilIdle() { | ||||||
|  |     if (cameraThreadHandler == null) | ||||||
|  |       return; | ||||||
|  |     final Exchanger<Boolean> result = new Exchanger<Boolean>(); | ||||||
|  |     cameraThreadHandler.post(new Runnable() { | ||||||
|  |       @Override public void run() { | ||||||
|  |         exchange(result, true); // |true| is a dummy here. | ||||||
|  |       } | ||||||
|  |     }); | ||||||
|  |     exchange(result, false);  // |false| is a dummy value here. | ||||||
|  |     return; | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   // Exchanges |value| with |exchanger|, converting InterruptedExceptions to | ||||||
|  |   // RuntimeExceptions (since we expect never to see these). | ||||||
|  |   private static <T> T exchange(Exchanger<T> exchanger, T value) { | ||||||
|  |     try { | ||||||
|  |       return exchanger.exchange(value); | ||||||
|  |     } catch (InterruptedException e) { | ||||||
|  |       throw new RuntimeException(e); | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   // Interface used for providing callbacks to an observer. | ||||||
|  |   interface CapturerObserver { | ||||||
|  |     // Notify if the camera have beens started successfully or not. | ||||||
|  |     // Called on a Java thread owned by VideoCapturerAndroid. | ||||||
|  |     abstract void OnCapturerStarted(boolean success); | ||||||
|  |     // Delivers a captured frame. Called on a Java thread owned by | ||||||
|  |     // VideoCapturerAndroid. | ||||||
|  |     abstract void OnFrameCaptured(byte[] data, int rotation, long timeStamp); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   // An implementation of CapturerObserver that forwards all calls from | ||||||
|  |   // Java to the C layer. | ||||||
|  |   public static class NativeFrameObserver implements CapturerObserver { | ||||||
|  |     private final long nativeCapturer; | ||||||
|  |  | ||||||
|  |     public NativeFrameObserver(long nativeCapturer) { | ||||||
|  |       this.nativeCapturer = nativeCapturer; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public void OnFrameCaptured(byte[] data, int rotation, long timeStamp) { | ||||||
|  |       nativeOnFrameCaptured(nativeCapturer, data, rotation, timeStamp); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     private native void nativeOnFrameCaptured( | ||||||
|  |         long captureObject, byte[] data, int rotation, long timeStamp); | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public void OnCapturerStarted(boolean success) { | ||||||
|  |       nativeCapturerStarted(nativeCapturer, success); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     private native void nativeCapturerStarted(long captureObject, | ||||||
|  |         boolean success); | ||||||
|  |   } | ||||||
|  | } | ||||||
| @@ -27,13 +27,12 @@ | |||||||
|  |  | ||||||
| package org.appspot.apprtc; | package org.appspot.apprtc; | ||||||
|  |  | ||||||
| import org.appspot.apprtc.AppRTCClient.SignalingParameters; |  | ||||||
| import org.appspot.apprtc.util.LooperExecutor; |  | ||||||
|  |  | ||||||
| import android.content.Context; | import android.content.Context; | ||||||
| import android.opengl.EGLContext; | import android.opengl.EGLContext; | ||||||
| import android.util.Log; | import android.util.Log; | ||||||
|  |  | ||||||
|  | import org.appspot.apprtc.AppRTCClient.SignalingParameters; | ||||||
|  | import org.appspot.apprtc.util.LooperExecutor; | ||||||
| import org.webrtc.DataChannel; | import org.webrtc.DataChannel; | ||||||
| import org.webrtc.IceCandidate; | import org.webrtc.IceCandidate; | ||||||
| import org.webrtc.MediaCodecVideoEncoder; | import org.webrtc.MediaCodecVideoEncoder; | ||||||
| @@ -47,7 +46,7 @@ import org.webrtc.SdpObserver; | |||||||
| import org.webrtc.SessionDescription; | import org.webrtc.SessionDescription; | ||||||
| import org.webrtc.StatsObserver; | import org.webrtc.StatsObserver; | ||||||
| import org.webrtc.StatsReport; | import org.webrtc.StatsReport; | ||||||
| import org.webrtc.VideoCapturer; | import org.webrtc.VideoCapturerAndroid; | ||||||
| import org.webrtc.VideoRenderer; | import org.webrtc.VideoRenderer; | ||||||
| import org.webrtc.VideoSource; | import org.webrtc.VideoSource; | ||||||
| import org.webrtc.VideoTrack; | import org.webrtc.VideoTrack; | ||||||
| @@ -109,6 +108,8 @@ public class PeerConnectionClient { | |||||||
|   private boolean useFrontFacingCamera = true; |   private boolean useFrontFacingCamera = true; | ||||||
|   private SessionDescription localSdp = null; // either offer or answer SDP |   private SessionDescription localSdp = null; // either offer or answer SDP | ||||||
|   private MediaStream mediaStream = null; |   private MediaStream mediaStream = null; | ||||||
|  |   private VideoCapturerAndroid videoCapturer = null; | ||||||
|  |   private Context context = null; | ||||||
|   // enableVideo is set to true if video should be rendered and sent. |   // enableVideo is set to true if video should be rendered and sent. | ||||||
|   private boolean renderVideo = true; |   private boolean renderVideo = true; | ||||||
|   private VideoTrack localVideoTrack = null; |   private VideoTrack localVideoTrack = null; | ||||||
| @@ -282,6 +283,7 @@ public class PeerConnectionClient { | |||||||
|       events.onPeerConnectionError("Failed to initializeAndroidGlobals"); |       events.onPeerConnectionError("Failed to initializeAndroidGlobals"); | ||||||
|     } |     } | ||||||
|     factory = new PeerConnectionFactory(); |     factory = new PeerConnectionFactory(); | ||||||
|  |     this.context = context; | ||||||
|     Log.d(TAG, "Peer connection factory created."); |     Log.d(TAG, "Peer connection factory created."); | ||||||
|   } |   } | ||||||
|  |  | ||||||
| @@ -317,7 +319,9 @@ public class PeerConnectionClient { | |||||||
|  |  | ||||||
|     mediaStream = factory.createLocalMediaStream("ARDAMS"); |     mediaStream = factory.createLocalMediaStream("ARDAMS"); | ||||||
|     if (videoConstraints != null) { |     if (videoConstraints != null) { | ||||||
|       mediaStream.addTrack(createVideoTrack(useFrontFacingCamera)); |       videoCapturer = VideoCapturerAndroid.create( | ||||||
|  |           VideoCapturerAndroid.getNameOfFrontFacingDevice()); | ||||||
|  |       mediaStream.addTrack(createVideoTrack(videoCapturer)); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     if (signalingParameters.audioConstraints != null) { |     if (signalingParameters.audioConstraints != null) { | ||||||
| @@ -529,45 +533,12 @@ public class PeerConnectionClient { | |||||||
|     }); |     }); | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   // Cycle through likely device names for the camera and return the first |   private VideoTrack createVideoTrack(VideoCapturerAndroid capturer) { | ||||||
|   // capturer that works, or crash if none do. |     videoSource = factory.createVideoSource( | ||||||
|   private VideoCapturer getVideoCapturer(boolean useFrontFacing) { |         capturer, signalingParameters.videoConstraints); | ||||||
|     String[] cameraFacing = { "front", "back" }; |  | ||||||
|     if (!useFrontFacing) { |  | ||||||
|       cameraFacing[0] = "back"; |  | ||||||
|       cameraFacing[1] = "front"; |  | ||||||
|     } |  | ||||||
|     for (String facing : cameraFacing) { |  | ||||||
|       int[] cameraIndex = { 0, 1 }; |  | ||||||
|       int[] cameraOrientation = { 0, 90, 180, 270 }; |  | ||||||
|       for (int index : cameraIndex) { |  | ||||||
|         for (int orientation : cameraOrientation) { |  | ||||||
|           String name = "Camera " + index + ", Facing " + facing |  | ||||||
|               + ", Orientation " + orientation; |  | ||||||
|           VideoCapturer capturer = VideoCapturer.create(name); |  | ||||||
|           if (capturer != null) { |  | ||||||
|             Log.d(TAG, "Using camera: " + name); |  | ||||||
|             return capturer; |  | ||||||
|           } |  | ||||||
|         } |  | ||||||
|       } |  | ||||||
|     } |  | ||||||
|     reportError("Failed to open capturer"); |  | ||||||
|     return null; |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   private VideoTrack createVideoTrack(boolean frontFacing) { |  | ||||||
|     VideoCapturer capturer = getVideoCapturer(frontFacing); |  | ||||||
|     if (videoSource != null) { |  | ||||||
|       videoSource.stop(); |  | ||||||
|       videoSource.dispose(); |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     videoSource = factory.createVideoSource(capturer, videoConstraints); |  | ||||||
|     String trackExtension = frontFacing ? "frontFacing" : "backFacing"; |  | ||||||
|  |  | ||||||
|     localVideoTrack = |     localVideoTrack = | ||||||
|         factory.createVideoTrack(VIDEO_TRACK_ID + trackExtension, videoSource); |         factory.createVideoTrack(VIDEO_TRACK_ID, videoSource); | ||||||
|     localVideoTrack.setEnabled(renderVideo); |     localVideoTrack.setEnabled(renderVideo); | ||||||
|     localVideoTrack.addRenderer(new VideoRenderer(localRender)); |     localVideoTrack.addRenderer(new VideoRenderer(localRender)); | ||||||
|     return localVideoTrack; |     return localVideoTrack; | ||||||
| @@ -669,50 +640,8 @@ public class PeerConnectionClient { | |||||||
|     if (videoConstraints == null) { |     if (videoConstraints == null) { | ||||||
|       return;  // No video is sent. |       return;  // No video is sent. | ||||||
|     } |     } | ||||||
|     if (peerConnection.signalingState() |     videoCapturer.switchCamera(); | ||||||
|         != PeerConnection.SignalingState.STABLE) { |  | ||||||
|       Log.e(TAG, "Switching camera during negotiation is not handled."); |  | ||||||
|       return; |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     Log.d(TAG, "Switch camera"); |     Log.d(TAG, "Switch camera"); | ||||||
|     peerConnection.removeStream(mediaStream); |  | ||||||
|     VideoTrack currentTrack = mediaStream.videoTracks.get(0); |  | ||||||
|     mediaStream.removeTrack(currentTrack); |  | ||||||
|  |  | ||||||
|     String trackId = currentTrack.id(); |  | ||||||
|     // On Android, there can only be one camera open at the time and we |  | ||||||
|     // need to release our implicit references to the videoSource before the |  | ||||||
|     // PeerConnectionFactory is released. Since createVideoTrack creates a new |  | ||||||
|     // videoSource and frees the old one, we need to release the track here. |  | ||||||
|     currentTrack.dispose(); |  | ||||||
|  |  | ||||||
|     useFrontFacingCamera = !useFrontFacingCamera; |  | ||||||
|     VideoTrack newTrack = createVideoTrack(useFrontFacingCamera); |  | ||||||
|     mediaStream.addTrack(newTrack); |  | ||||||
|     peerConnection.addStream(mediaStream); |  | ||||||
|  |  | ||||||
|     SessionDescription remoteDesc = peerConnection.getRemoteDescription(); |  | ||||||
|     if (localSdp == null || remoteDesc == null) { |  | ||||||
|       Log.d(TAG, "Switching camera before the negotiation started."); |  | ||||||
|       return; |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     localSdp = new SessionDescription(localSdp.type, |  | ||||||
|         localSdp.description.replaceAll(trackId, newTrack.id())); |  | ||||||
|  |  | ||||||
|     if (isInitiator) { |  | ||||||
|       peerConnection.setLocalDescription( |  | ||||||
|           new SwitchCameraSdbObserver(), localSdp); |  | ||||||
|       peerConnection.setRemoteDescription( |  | ||||||
|           new SwitchCameraSdbObserver(), remoteDesc); |  | ||||||
|     } else { |  | ||||||
|       peerConnection.setRemoteDescription( |  | ||||||
|           new SwitchCameraSdbObserver(), remoteDesc); |  | ||||||
|       peerConnection.setLocalDescription( |  | ||||||
|           new SwitchCameraSdbObserver(), localSdp); |  | ||||||
|     } |  | ||||||
|     Log.d(TAG, "Switch camera done"); |  | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   public void switchCamera() { |   public void switchCamera() { | ||||||
| @@ -893,24 +822,4 @@ public class PeerConnectionClient { | |||||||
|       reportError("setSDP error: " + error); |       reportError("setSDP error: " + error); | ||||||
|     } |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   private class SwitchCameraSdbObserver implements SdpObserver { |  | ||||||
|     @Override |  | ||||||
|     public void onCreateSuccess(SessionDescription sdp) { |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     @Override |  | ||||||
|     public void onSetSuccess() { |  | ||||||
|       Log.d(TAG, "Camera switch SDP set succesfully"); |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     @Override |  | ||||||
|     public void onCreateFailure(final String error) { |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     @Override |  | ||||||
|     public void onSetFailure(final String error) { |  | ||||||
|       reportError("setSDP error while switching camera: " + error); |  | ||||||
|     } |  | ||||||
|   } |  | ||||||
| } | } | ||||||
|   | |||||||
| @@ -110,9 +110,8 @@ | |||||||
|                   'app/webrtc/java/android/org/webrtc/VideoRendererGui.java', |                   'app/webrtc/java/android/org/webrtc/VideoRendererGui.java', | ||||||
|                   'app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java', |                   'app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java', | ||||||
|                   'app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java', |                   'app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java', | ||||||
|  |                   'app/webrtc/java/src/org/webrtc/VideoCapturerAndroid.java', | ||||||
|                   '<(webrtc_modules_dir)/audio_device/android/java/src/org/webrtc/voiceengine/AudioManagerAndroid.java', |                   '<(webrtc_modules_dir)/audio_device/android/java/src/org/webrtc/voiceengine/AudioManagerAndroid.java', | ||||||
|                   '<(webrtc_modules_dir)/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java', |  | ||||||
|                   '<(webrtc_modules_dir)/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java', |  | ||||||
|                   '<(webrtc_modules_dir)/video_render/android/java/src/org/webrtc/videoengine/ViEAndroidGLES20.java', |                   '<(webrtc_modules_dir)/video_render/android/java/src/org/webrtc/videoengine/ViEAndroidGLES20.java', | ||||||
|                   '<(webrtc_modules_dir)/video_render/android/java/src/org/webrtc/videoengine/ViERenderer.java', |                   '<(webrtc_modules_dir)/video_render/android/java/src/org/webrtc/videoengine/ViERenderer.java', | ||||||
|                   '<(webrtc_modules_dir)/video_render/android/java/src/org/webrtc/videoengine/ViESurfaceRenderer.java', |                   '<(webrtc_modules_dir)/video_render/android/java/src/org/webrtc/videoengine/ViESurfaceRenderer.java', | ||||||
| @@ -694,6 +693,14 @@ | |||||||
|         'app/webrtc/webrtcsessiondescriptionfactory.cc', |         'app/webrtc/webrtcsessiondescriptionfactory.cc', | ||||||
|         'app/webrtc/webrtcsessiondescriptionfactory.h', |         'app/webrtc/webrtcsessiondescriptionfactory.h', | ||||||
|       ], |       ], | ||||||
|  |       'conditions': [ | ||||||
|  |         ['OS=="android" and build_with_chromium==0', { | ||||||
|  |           'sources': [ | ||||||
|  |             'app/webrtc/androidvideocapturer.h', | ||||||
|  |             'app/webrtc/androidvideocapturer.cc', | ||||||
|  |            ], | ||||||
|  |         }], | ||||||
|  |       ], | ||||||
|     },  # target libjingle_peerconnection |     },  # target libjingle_peerconnection | ||||||
|   ], |   ], | ||||||
| } | } | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user
	 perkj@webrtc.org
					perkj@webrtc.org