Clean up webrtc external capture.

This cl removes the dependency to the external capture module if external capturing is used in webrtc.
It also removes two external capture methods that is not needed.
Further more it adds I420VideoFrame::Create that takes a pointer to packed memory as input.

R=magjed@webrtc.org, mflodman@webrtc.org, pbos@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/43879004

Cr-Commit-Position: refs/heads/master@{#8804}
git-svn-id: http://webrtc.googlecode.com/svn/trunk@8804 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
perkj@webrtc.org 2015-03-20 10:55:15 +00:00
parent 443ad403f5
commit 9f9ea7e5ab
8 changed files with 47 additions and 121 deletions

View File

@ -376,20 +376,6 @@ class FakeWebRtcVideoEngine
int incoming_frame_num() const { return incoming_frame_num_; }
// From ViEExternalCapture
int IncomingFrame(unsigned char* videoFrame,
size_t videoFrameLength,
unsigned short width,
unsigned short height,
webrtc::RawVideoType videoType,
unsigned long long captureTime) override {
return 0;
}
int IncomingFrameI420(
const webrtc::ViEVideoFrameI420& video_frame,
unsigned long long captureTime) override {
return 0;
}
void IncomingFrame(const webrtc::I420VideoFrame& frame) override {
last_capture_time_ = frame.render_time_ms();
++incoming_frame_num_;

View File

@ -118,6 +118,20 @@ int I420VideoFrame::CreateFrame(const uint8_t* buffer_y,
return 0;
}
int I420VideoFrame::CreateFrame(const uint8_t* buffer,
int width,
int height,
VideoRotation rotation) {
const int stride_y = width;
const int stride_uv = (width + 1) / 2;
const uint8_t* buffer_y = buffer;
const uint8_t* buffer_u = buffer_y + stride_y * height;
const uint8_t* buffer_v = buffer_u + stride_uv * ((height + 1) / 2);
return CreateFrame(buffer_y, buffer_u, buffer_v, width, height, stride_y,
stride_uv, stride_uv, rotation);
}
int I420VideoFrame::CopyFrame(const I420VideoFrame& videoFrame) {
if (videoFrame.IsZeroSize()) {
video_frame_buffer_ = nullptr;

View File

@ -181,17 +181,10 @@ bool VideoProcessorImpl::ProcessFrame(int frame_number) {
}
if (frame_reader_->ReadFrame(source_buffer_)) {
// Copy the source frame to the newly read frame data.
int size_y = config_.codec_settings->width * config_.codec_settings->height;
int half_width = (config_.codec_settings->width + 1) / 2;
int half_height = (config_.codec_settings->height + 1) / 2;
int size_uv = half_width * half_height;
source_frame_.CreateFrame(source_buffer_,
source_buffer_ + size_y,
source_buffer_ + size_y + size_uv,
config_.codec_settings->width,
config_.codec_settings->height,
config_.codec_settings->width,
half_width, half_width);
kVideoRotation_0);
// Ensure we have a new statistics data object we can fill:
FrameStatistic& stat = stats_->NewFrame(frame_number);

View File

@ -61,32 +61,6 @@ enum CaptureAlarm {
AlarmCleared = 1
};
struct ViEVideoFrameI420 {
ViEVideoFrameI420() {
y_plane = NULL;
u_plane = NULL;
v_plane = NULL;
y_pitch = 0;
u_pitch = 0;
v_pitch = 0;
width = 0;
height = 0;
rotation = kVideoRotation_0;
}
unsigned char* y_plane;
unsigned char* u_plane;
unsigned char* v_plane;
int y_pitch;
int u_pitch;
int v_pitch;
unsigned short width;
unsigned short height;
VideoRotation rotation;
};
// This class declares an abstract interface to be used when implementing
// a user-defined capture device. This interface is not meant to be
// implemented by the user. Instead, the user should call AllocateCaptureDevice
@ -100,21 +74,6 @@ class WEBRTC_DLLEXPORT ViEExternalCapture {
// This method is called by the user to deliver a new captured frame to
// VideoEngine.
// |capture_time| must be specified in the NTP time format in milliseconds.
virtual int IncomingFrame(unsigned char* video_frame,
size_t video_frame_length,
unsigned short width,
unsigned short height,
RawVideoType video_type,
unsigned long long capture_time = 0) = 0;
// This method is specifically for delivering a new captured I420 frame to
// VideoEngine.
// |capture_time| must be specified in the NTP time format in milliseconds.
virtual int IncomingFrameI420(
const ViEVideoFrameI420& video_frame,
unsigned long long capture_time = 0) = 0;
virtual void IncomingFrame(const I420VideoFrame& frame) = 0;
};

View File

@ -82,12 +82,13 @@ void ViEFileCaptureDevice::ReadFileFor(uint64_t time_slice_ms,
FramePacemaker pacemaker(max_fps);
size_t read = fread(frame_buffer, 1, frame_length_, input_file_);
if (feof(input_file_)) {
if (feof(input_file_) || read != frame_length_) {
rewind(input_file_);
}
input_sink_->IncomingFrame(frame_buffer, read, width_, height_,
webrtc::kVideoI420,
webrtc::TickTime::MillisecondTimestamp());
webrtc::I420VideoFrame frame;
frame.CreateFrame(frame_buffer, width_, height_, webrtc::kVideoRotation_0);
frame.set_render_time_ms(webrtc::TickTime::MillisecondTimestamp());
input_sink_->IncomingFrame(frame);
pacemaker.SleepIfNecessary(sleeper);
elapsed_ms = webrtc::TickTime::MillisecondTimestamp() - start_time_ms;

View File

@ -64,7 +64,7 @@ ViECapturer::ViECapturer(int capture_id,
capture_cs_(CriticalSectionWrapper::CreateCriticalSection()),
effects_and_stats_cs_(CriticalSectionWrapper::CreateCriticalSection()),
capture_module_(NULL),
external_capture_module_(NULL),
use_external_capture_(false),
module_process_thread_(module_process_thread),
capture_id_(capture_id),
incoming_frame_cs_(CriticalSectionWrapper::CreateCriticalSection()),
@ -170,8 +170,8 @@ int32_t ViECapturer::Init(const char* device_unique_idUTF8,
uint32_t device_unique_idUTF8Length) {
assert(capture_module_ == NULL);
if (device_unique_idUTF8 == NULL) {
capture_module_ = VideoCaptureFactory::Create(
ViEModuleId(engine_id_, capture_id_), external_capture_module_);
use_external_capture_ = true;
return 0;
} else {
capture_module_ = VideoCaptureFactory::Create(
ViEModuleId(engine_id_, capture_id_), device_unique_idUTF8);
@ -187,6 +187,8 @@ int32_t ViECapturer::Init(const char* device_unique_idUTF8,
}
int ViECapturer::FrameCallbackChanged() {
if (use_external_capture_)
return -1;
if (Started() && !CaptureCapabilityFixed()) {
// Reconfigure the camera if a new size is required and the capture device
// does not provide encoded frames.
@ -210,6 +212,8 @@ int ViECapturer::FrameCallbackChanged() {
}
int32_t ViECapturer::Start(const CaptureCapability& capture_capability) {
if (use_external_capture_)
return -1;
int width;
int height;
int frame_rate;
@ -246,15 +250,21 @@ int32_t ViECapturer::Start(const CaptureCapability& capture_capability) {
}
int32_t ViECapturer::Stop() {
if (use_external_capture_)
return -1;
requested_capability_ = CaptureCapability();
return capture_module_->StopCapture();
}
bool ViECapturer::Started() {
if (use_external_capture_)
return false;
return capture_module_->CaptureStarted();
}
const char* ViECapturer::CurrentDeviceName() const {
if (use_external_capture_)
return "";
return capture_module_->CurrentDeviceName();
}
@ -276,54 +286,18 @@ void ViECapturer::GetCpuOveruseMetrics(CpuOveruseMetrics* metrics) const {
}
int32_t ViECapturer::SetCaptureDelay(int32_t delay_ms) {
if (use_external_capture_)
return -1;
capture_module_->SetCaptureDelay(delay_ms);
return 0;
}
int32_t ViECapturer::SetVideoRotation(const VideoRotation rotation) {
if (use_external_capture_)
return -1;
return capture_module_->SetCaptureRotation(rotation);
}
int ViECapturer::IncomingFrame(unsigned char* video_frame,
size_t video_frame_length,
uint16_t width,
uint16_t height,
RawVideoType video_type,
unsigned long long capture_time) { // NOLINT
if (!external_capture_module_) {
return -1;
}
VideoCaptureCapability capability;
capability.width = width;
capability.height = height;
capability.rawType = video_type;
return external_capture_module_->IncomingFrame(video_frame,
video_frame_length,
capability, capture_time);
}
int ViECapturer::IncomingFrameI420(const ViEVideoFrameI420& video_frame,
unsigned long long capture_time) { // NOLINT
CriticalSectionScoped cs(incoming_frame_cs_.get());
int ret = incoming_frame_.CreateFrame(video_frame.y_plane,
video_frame.u_plane,
video_frame.v_plane,
video_frame.width,
video_frame.height,
video_frame.y_pitch,
video_frame.u_pitch,
video_frame.v_pitch,
video_frame.rotation);
if (ret < 0) {
LOG_F(LS_ERROR) << "Could not create I420Frame.";
return -1;
}
incoming_frame_.set_ntp_time_ms(capture_time);
OnIncomingCapturedFrame(-1, incoming_frame_);
return 0;
}
void ViECapturer::IncomingFrame(const I420VideoFrame& frame) {
OnIncomingCapturedFrame(-1, frame);
}

View File

@ -69,16 +69,6 @@ class ViECapturer
int FrameCallbackChanged();
// Implements ExternalCapture.
int IncomingFrame(unsigned char* video_frame,
size_t video_frame_length,
uint16_t width,
uint16_t height,
RawVideoType video_type,
unsigned long long capture_time = 0) override;
int IncomingFrameI420(const ViEVideoFrameI420& video_frame,
unsigned long long capture_time = 0) override;
void IncomingFrame(const I420VideoFrame& frame) override;
// Start/Stop.
@ -154,7 +144,7 @@ class ViECapturer
rtc::scoped_ptr<CriticalSectionWrapper> capture_cs_;
rtc::scoped_ptr<CriticalSectionWrapper> effects_and_stats_cs_;
VideoCaptureModule* capture_module_;
VideoCaptureExternal* external_capture_module_;
bool use_external_capture_;
ProcessThread& module_process_thread_;
const int capture_id_;

View File

@ -69,6 +69,15 @@ class I420VideoFrame {
int stride_v,
VideoRotation rotation);
// CreateFrame: Sets the frame's members and buffers. If required size is
// bigger than allocated one, new buffers of adequate size will be allocated.
// |buffer| must be a packed I420 buffer.
// Return value: 0 on success, -1 on error.
int CreateFrame(const uint8_t* buffer,
int width,
int height,
VideoRotation rotation);
// Deep copy frame: If required size is bigger than allocated one, new
// buffers of adequate size will be allocated.
// Return value: 0 on success, -1 on error.