Removing functionality for inserting pre-encoded frames instead of raw

video frames. The functionality hasn't been used for a long time and
should be done properly if used in the future.

This is a pre-step for implementing CPU overload control.

R=pbos@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/1630004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@4194 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
mflodman@webrtc.org 2013-06-07 13:57:57 +00:00
parent b69cc15467
commit 3ba883f0fc
9 changed files with 12 additions and 434 deletions

View File

@ -134,9 +134,6 @@ class VideoCaptureDataCallback
public: public:
virtual void OnIncomingCapturedFrame(const int32_t id, virtual void OnIncomingCapturedFrame(const int32_t id,
I420VideoFrame& videoFrame) = 0; I420VideoFrame& videoFrame) = 0;
virtual void OnIncomingCapturedEncodedFrame(const int32_t id,
VideoFrame& videoFrame,
VideoCodecType codecType) = 0;
virtual void OnCaptureDelayChanged(const int32_t id, virtual void OnCaptureDelayChanged(const int32_t id,
const int32_t delay) = 0; const int32_t delay) = 0;
protected: protected:

View File

@ -225,41 +225,6 @@ int32_t VideoCaptureImpl::DeliverCapturedFrame(I420VideoFrame& captureFrame,
return 0; return 0;
} }
int32_t VideoCaptureImpl::DeliverEncodedCapturedFrame(
VideoFrame& captureFrame, int64_t capture_time,
VideoCodecType codecType) {
UpdateFrameCount(); // frame count used for local frame rate callback.
const bool callOnCaptureDelayChanged = _setCaptureDelay != _captureDelay;
// Capture delay changed
if (_setCaptureDelay != _captureDelay) {
_setCaptureDelay = _captureDelay;
}
// Set the capture time
if (capture_time != 0) {
captureFrame.SetRenderTime(capture_time);
}
else {
captureFrame.SetRenderTime(TickTime::MillisecondTimestamp());
}
if (captureFrame.RenderTimeMs() == last_capture_time_) {
// We don't allow the same capture time for two frames, drop this one.
return -1;
}
last_capture_time_ = captureFrame.RenderTimeMs();
if (_dataCallBack) {
if (callOnCaptureDelayChanged) {
_dataCallBack->OnCaptureDelayChanged(_id, _captureDelay);
}
_dataCallBack->OnIncomingCapturedEncodedFrame(_id, captureFrame, codecType);
}
return 0;
}
int32_t VideoCaptureImpl::IncomingFrame( int32_t VideoCaptureImpl::IncomingFrame(
uint8_t* videoFrame, uint8_t* videoFrame,
int32_t videoFrameLength, int32_t videoFrameLength,
@ -336,14 +301,8 @@ int32_t VideoCaptureImpl::IncomingFrame(
} }
else // Encoded format else // Encoded format
{ {
if (_capture_encoded_frame.CopyFrame(videoFrameLength, videoFrame) != 0) assert(false);
{ return -1;
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to copy captured frame of length %d",
static_cast<int>(videoFrameLength));
}
DeliverEncodedCapturedFrame(_capture_encoded_frame, captureTime,
frameInfo.codecType);
} }
const uint32_t processTime = const uint32_t processTime =

View File

@ -101,9 +101,6 @@ protected:
virtual ~VideoCaptureImpl(); virtual ~VideoCaptureImpl();
int32_t DeliverCapturedFrame(I420VideoFrame& captureFrame, int32_t DeliverCapturedFrame(I420VideoFrame& captureFrame,
int64_t capture_time); int64_t capture_time);
int32_t DeliverEncodedCapturedFrame(VideoFrame& captureFrame,
int64_t capture_time,
VideoCodecType codec_type);
int32_t _id; // Module ID int32_t _id; // Module ID
char* _deviceUniqueId; // current Device unique name; char* _deviceUniqueId; // current Device unique name;

View File

@ -98,10 +98,6 @@ void VcmCapturer::OnIncomingCapturedFrame(const int32_t id,
last_timestamp_ = frame.timestamp(); last_timestamp_ = frame.timestamp();
} }
void VcmCapturer::OnIncomingCapturedEncodedFrame(const int32_t id,
VideoFrame& frame,
VideoCodecType codec_type) {}
void VcmCapturer::OnCaptureDelayChanged(const int32_t id, const int32_t delay) { void VcmCapturer::OnCaptureDelayChanged(const int32_t id, const int32_t delay) {
} }
} // test } // test

View File

@ -29,10 +29,6 @@ class VcmCapturer : public VideoCapturer, public VideoCaptureDataCallback {
virtual void OnIncomingCapturedFrame( virtual void OnIncomingCapturedFrame(
const int32_t id, I420VideoFrame& frame) OVERRIDE; // NOLINT const int32_t id, I420VideoFrame& frame) OVERRIDE; // NOLINT
virtual void OnIncomingCapturedEncodedFrame(const int32_t id,
VideoFrame& frame,
VideoCodecType codec_type)
OVERRIDE;
virtual void OnCaptureDelayChanged(const int32_t id, const int32_t delay) virtual void OnCaptureDelayChanged(const int32_t id, const int32_t delay)
OVERRIDE; OVERRIDE;

View File

@ -197,19 +197,7 @@ int ViECaptureImpl::ConnectCaptureDevice(const int capture_id,
shared_data_->SetLastError(kViECaptureDeviceAlreadyConnected); shared_data_->SetLastError(kViECaptureDeviceAlreadyConnected);
return -1; return -1;
} }
VideoCodec codec; if (vie_capture->RegisterFrameCallback(video_channel, vie_encoder) != 0) {
bool use_hardware_encoder = false;
if (vie_encoder->GetEncoder(&codec) == 0) {
// Try to provide the encoder with pre-encoded frames if possible.
if (vie_capture->PreEncodeToViEEncoder(codec, *vie_encoder,
video_channel) == 0) {
use_hardware_encoder = true;
}
}
// If we don't use the camera as hardware encoder, we register the vie_encoder
// for callbacks.
if (!use_hardware_encoder &&
vie_capture->RegisterFrameCallback(video_channel, vie_encoder) != 0) {
shared_data_->SetLastError(kViECaptureDeviceUnknownError); shared_data_->SetLastError(kViECaptureDeviceUnknownError);
return -1; return -1;
} }

View File

@ -55,13 +55,7 @@ ViECapturer::ViECapturer(int capture_id,
reported_brightness_level_(Normal), reported_brightness_level_(Normal),
denoising_enabled_(false), denoising_enabled_(false),
observer_cs_(CriticalSectionWrapper::CreateCriticalSection()), observer_cs_(CriticalSectionWrapper::CreateCriticalSection()),
observer_(NULL), observer_(NULL) {
encoding_cs_(CriticalSectionWrapper::CreateCriticalSection()),
capture_encoder_(NULL),
encode_complete_callback_(NULL),
vie_encoder_(NULL),
vcm_(NULL),
decoder_initialized_(false) {
WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id, capture_id), WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id, capture_id),
"ViECapturer::ViECapturer(capture_id: %d, engine_id: %d)", "ViECapturer::ViECapturer(capture_id: %d, engine_id: %d)",
capture_id, engine_id); capture_id, engine_id);
@ -87,12 +81,6 @@ ViECapturer::~ViECapturer() {
capture_cs_->Leave(); capture_cs_->Leave();
deliver_cs_->Leave(); deliver_cs_->Leave();
provider_cs_->Enter();
if (vie_encoder_) {
vie_encoder_->DeRegisterExternalEncoder(codec_.plType);
}
provider_cs_->Leave();
// Stop the camera input. // Stop the camera input.
if (capture_module_) { if (capture_module_) {
module_process_thread_.DeRegisterModule(capture_module_); module_process_thread_.DeRegisterModule(capture_module_);
@ -121,9 +109,6 @@ ViECapturer::~ViECapturer() {
deflicker_frame_stats_ = NULL; deflicker_frame_stats_ = NULL;
} }
delete brightness_frame_stats_; delete brightness_frame_stats_;
if (vcm_) {
delete vcm_;
}
} }
ViECapturer* ViECapturer::CreateViECapture( ViECapturer* ViECapturer::CreateViECapture(
@ -170,9 +155,8 @@ ViECapturer* ViECapturer::CreateViECapture(
return capture; return capture;
} }
int32_t ViECapturer::Init( int32_t ViECapturer::Init(const char* device_unique_idUTF8,
const char* device_unique_idUTF8, uint32_t device_unique_idUTF8Length) {
const uint32_t device_unique_idUTF8Length) {
assert(capture_module_ == NULL); assert(capture_module_ == NULL);
if (device_unique_idUTF8 == NULL) { if (device_unique_idUTF8 == NULL) {
capture_module_ = VideoCaptureFactory::Create( capture_module_ = VideoCaptureFactory::Create(
@ -194,7 +178,7 @@ int32_t ViECapturer::Init(
} }
int ViECapturer::FrameCallbackChanged() { int ViECapturer::FrameCallbackChanged() {
if (Started() && !EncoderActive() && !CaptureCapabilityFixed()) { if (Started() && !CaptureCapabilityFixed()) {
// Reconfigure the camera if a new size is required and the capture device // Reconfigure the camera if a new size is required and the capture device
// does not provide encoded frames. // does not provide encoded frames.
int best_width; int best_width;
@ -224,15 +208,8 @@ int32_t ViECapturer::Start(const CaptureCapability& capture_capability) {
int frame_rate; int frame_rate;
VideoCaptureCapability capability; VideoCaptureCapability capability;
requested_capability_ = capture_capability; requested_capability_ = capture_capability;
if (EncoderActive()) {
CriticalSectionScoped cs(encoding_cs_.get());
capability.width = codec_.width;
capability.height = codec_.height;
capability.maxFPS = codec_.maxFramerate;
capability.codecType = codec_.codecType;
capability.rawType = kVideoI420;
} else if (!CaptureCapabilityFixed()) { if (!CaptureCapabilityFixed()) {
// Ask the observers for best size. // Ask the observers for best size.
GetBestFormat(&width, &height, &frame_rate); GetBestFormat(&width, &height, &frame_rate);
if (width == 0) { if (width == 0) {
@ -366,40 +343,6 @@ void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id,
return; return;
} }
void ViECapturer::OnIncomingCapturedEncodedFrame(const int32_t capture_id,
VideoFrame& video_frame,
VideoCodecType codec_type) {
WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_id: %d)", __FUNCTION__, capture_id);
CriticalSectionScoped cs(capture_cs_.get());
// Make sure we render this frame earlier since we know the render time set
// is slightly off since it's being set when the frame has been received from
// the camera, and not when the camera actually captured the frame.
video_frame.SetRenderTime(video_frame.RenderTimeMs() - FrameDelay());
TRACE_EVENT_INSTANT1("webrtc", "VC::OnIncomingCapturedEncodedFrame",
"render_time", video_frame.RenderTimeMs());
assert(codec_type != kVideoCodecUnknown);
if (encoded_frame_.Length() != 0) {
// The last encoded frame has not been sent yet. Need to wait.
deliver_event_.Reset();
WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_id: %d) Last encoded frame not yet delivered.",
__FUNCTION__, capture_id);
capture_cs_->Leave();
// Wait for the coded frame to be sent before unblocking this.
deliver_event_.Wait(kMaxDeliverWaitTime);
assert(encoded_frame_.Length() == 0);
capture_cs_->Enter();
} else {
assert(false);
}
encoded_frame_.SwapFrame(video_frame);
capture_event_.Set();
return;
}
void ViECapturer::OnCaptureDelayChanged(const int32_t id, void ViECapturer::OnCaptureDelayChanged(const int32_t id,
const int32_t delay) { const int32_t delay) {
WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_), WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
@ -408,10 +351,6 @@ void ViECapturer::OnCaptureDelayChanged(const int32_t id,
// Deliver the network delay to all registered callbacks. // Deliver the network delay to all registered callbacks.
ViEFrameProviderBase::SetFrameDelay(delay); ViEFrameProviderBase::SetFrameDelay(delay);
CriticalSectionScoped cs(encoding_cs_.get());
if (vie_encoder_) {
vie_encoder_->DelayChanged(id, delay);
}
} }
int32_t ViECapturer::RegisterEffectFilter( int32_t ViECapturer::RegisterEffectFilter(
@ -569,14 +508,6 @@ bool ViECapturer::ViECaptureProcess() {
capture_cs_->Leave(); capture_cs_->Leave();
DeliverI420Frame(&deliver_frame_); DeliverI420Frame(&deliver_frame_);
} }
if (encoded_frame_.Length() > 0) {
capture_cs_->Enter();
deliver_encoded_frame_.SwapFrame(encoded_frame_);
encoded_frame_.SetLength(0);
deliver_event_.Set();
capture_cs_->Leave();
DeliverCodedFrame(&deliver_encoded_frame_);
}
deliver_cs_->Leave(); deliver_cs_->Leave();
if (current_brightness_level_ != reported_brightness_level_) { if (current_brightness_level_ != reported_brightness_level_) {
CriticalSectionScoped cs(observer_cs_.get()); CriticalSectionScoped cs(observer_cs_.get());
@ -641,239 +572,23 @@ void ViECapturer::DeliverI420Frame(I420VideoFrame* video_frame) {
ViEFrameProviderBase::DeliverFrame(video_frame); ViEFrameProviderBase::DeliverFrame(video_frame);
} }
void ViECapturer::DeliverCodedFrame(VideoFrame* video_frame) {
if (encode_complete_callback_) {
EncodedImage encoded_image(video_frame->Buffer(), video_frame->Length(),
video_frame->Size());
encoded_image._timeStamp =
90 * static_cast<uint32_t>(video_frame->RenderTimeMs());
encode_complete_callback_->Encoded(encoded_image);
}
if (NumberOfRegisteredFrameCallbacks() > 0 && decoder_initialized_) {
video_frame->Swap(decode_buffer_.payloadData, decode_buffer_.bufferSize,
decode_buffer_.payloadSize);
decode_buffer_.encodedHeight = video_frame->Height();
decode_buffer_.encodedWidth = video_frame->Width();
decode_buffer_.renderTimeMs = video_frame->RenderTimeMs();
const int kMsToRtpTimestamp = 90;
decode_buffer_.timeStamp = kMsToRtpTimestamp *
static_cast<uint32_t>(video_frame->RenderTimeMs());
decode_buffer_.payloadType = codec_.plType;
vcm_->DecodeFromStorage(decode_buffer_);
}
}
int ViECapturer::DeregisterFrameCallback( int ViECapturer::DeregisterFrameCallback(
const ViEFrameCallback* callbackObject) { const ViEFrameCallback* callbackObject) {
provider_cs_->Enter();
if (callbackObject == vie_encoder_) {
// Don't use this camera as encoder anymore. Need to tell the ViEEncoder.
ViEEncoder* vie_encoder = NULL;
vie_encoder = vie_encoder_;
vie_encoder_ = NULL;
provider_cs_->Leave();
// Need to take this here in order to avoid deadlock with VCM. The reason is
// that VCM will call ::Release and a deadlock can occur.
deliver_cs_->Enter();
vie_encoder->DeRegisterExternalEncoder(codec_.plType);
deliver_cs_->Leave();
return 0;
}
provider_cs_->Leave();
return ViEFrameProviderBase::DeregisterFrameCallback(callbackObject); return ViEFrameProviderBase::DeregisterFrameCallback(callbackObject);
} }
bool ViECapturer::IsFrameCallbackRegistered( bool ViECapturer::IsFrameCallbackRegistered(
const ViEFrameCallback* callbackObject) { const ViEFrameCallback* callbackObject) {
CriticalSectionScoped cs(provider_cs_.get()); CriticalSectionScoped cs(provider_cs_.get());
if (callbackObject == vie_encoder_) {
return true;
}
return ViEFrameProviderBase::IsFrameCallbackRegistered(callbackObject); return ViEFrameProviderBase::IsFrameCallbackRegistered(callbackObject);
} }
int32_t ViECapturer::PreEncodeToViEEncoder(const VideoCodec& codec,
ViEEncoder& vie_encoder,
int32_t vie_encoder_id) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
if (vie_encoder_ && &vie_encoder != vie_encoder_) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_device_id: %d Capture device already encoding)",
__FUNCTION__, capture_id_);
return -1;
}
CriticalSectionScoped cs(encoding_cs_.get());
VideoCaptureModule::VideoCaptureEncodeInterface* capture_encoder =
capture_module_->GetEncodeInterface(codec);
if (!capture_encoder) {
// Encoding not supported?
return -1;
}
capture_encoder_ = capture_encoder;
// Create VCM module used for decoding frames if needed.
if (!vcm_) {
vcm_ = VideoCodingModule::Create(capture_id_);
}
if (vie_encoder.RegisterExternalEncoder(this, codec.plType, false) != 0) {
return -1;
}
if (vie_encoder.SetEncoder(codec) != 0) {
vie_encoder.DeRegisterExternalEncoder(codec.plType);
return -1;
}
// Make sure the encoder is not an I420 observer.
ViEFrameProviderBase::DeregisterFrameCallback(&vie_encoder);
// Store the vie_encoder using this capture device.
vie_encoder_ = &vie_encoder;
vie_encoder_id_ = vie_encoder_id;
memcpy(&codec_, &codec, sizeof(VideoCodec));
return 0;
}
bool ViECapturer::EncoderActive() {
return vie_encoder_ != NULL;
}
bool ViECapturer::CaptureCapabilityFixed() { bool ViECapturer::CaptureCapabilityFixed() {
return requested_capability_.width != 0 && return requested_capability_.width != 0 &&
requested_capability_.height != 0 && requested_capability_.height != 0 &&
requested_capability_.maxFPS != 0; requested_capability_.maxFPS != 0;
} }
int32_t ViECapturer::Version(char* version, int32_t length) const {
return 0;
}
int32_t ViECapturer::InitEncode(const VideoCodec* codec_settings,
int32_t number_of_cores,
uint32_t max_payload_size) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
CriticalSectionScoped cs(encoding_cs_.get());
if (!capture_encoder_ || !codec_settings) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
if (vcm_) {
// Initialize VCM to be able to decode frames if needed.
if (vcm_->InitializeReceiver() == 0) {
if (vcm_->RegisterReceiveCallback(this) == 0) {
if (vcm_->RegisterReceiveCodec(codec_settings, number_of_cores,
false) == 0) {
decoder_initialized_ = true;
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_device_id: %d) VCM Decoder initialized",
__FUNCTION__, capture_id_);
}
}
}
}
return capture_encoder_->ConfigureEncoder(*codec_settings, max_payload_size);
}
int32_t ViECapturer::Encode(
const I420VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
const std::vector<VideoFrameType>* frame_types) {
CriticalSectionScoped cs(encoding_cs_.get());
if (!capture_encoder_) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
if (frame_types == NULL) {
return capture_encoder_->EncodeFrameType(kVideoFrameDelta);
} else if ((*frame_types)[0] == kKeyFrame) {
return capture_encoder_->EncodeFrameType(kVideoFrameKey);
} else if ((*frame_types)[0] == kSkipFrame) {
return capture_encoder_->EncodeFrameType(kFrameEmpty);
}
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
int32_t ViECapturer::RegisterEncodeCompleteCallback(
EncodedImageCallback* callback) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
CriticalSectionScoped cs(deliver_cs_.get());
if (!capture_encoder_) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
encode_complete_callback_ = callback;
return 0;
}
int32_t ViECapturer::Release() {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
{
CriticalSectionScoped cs(deliver_cs_.get());
encode_complete_callback_ = NULL;
}
{
CriticalSectionScoped cs(encoding_cs_.get());
decoder_initialized_ = false;
codec_.codecType = kVideoCodecUnknown;
// Reset the camera to output I420.
capture_encoder_->ConfigureEncoder(codec_, 0);
if (vie_encoder_) {
// Need to add the encoder as an observer of I420.
ViEFrameProviderBase::RegisterFrameCallback(vie_encoder_id_,
vie_encoder_);
}
vie_encoder_ = NULL;
}
return 0;
}
// Should reset the capture device to the state it was in after the InitEncode
// function. Current implementation do nothing.
int32_t ViECapturer::Reset() {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
return 0;
}
int32_t ViECapturer::SetChannelParameters(uint32_t packet_loss, int rtt) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
CriticalSectionScoped cs(encoding_cs_.get());
if (!capture_encoder_) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
return capture_encoder_->SetChannelParameters(packet_loss, rtt);
}
int32_t ViECapturer::SetRates(uint32_t new_bit_rate, uint32_t frame_rate) {
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
CriticalSectionScoped cs(encoding_cs_.get());
if (!capture_encoder_) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
return capture_encoder_->SetRates(new_bit_rate, frame_rate);
}
int32_t ViECapturer::FrameToRender(
I420VideoFrame& video_frame) { //NOLINT
deliver_cs_->Enter();
DeliverI420Frame(&video_frame);
deliver_cs_->Leave();
return 0;
}
int32_t ViECapturer::RegisterObserver(ViECaptureObserver* observer) { int32_t ViECapturer::RegisterObserver(ViECaptureObserver* observer) {
if (observer_) { if (observer_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_), WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
@ -892,9 +607,7 @@ int32_t ViECapturer::RegisterObserver(ViECaptureObserver* observer) {
int32_t ViECapturer::DeRegisterObserver() { int32_t ViECapturer::DeRegisterObserver() {
CriticalSectionScoped cs(observer_cs_.get()); CriticalSectionScoped cs(observer_cs_.get());
if (!observer_) { if (!observer_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_), return 0;
"%s No observer registered", __FUNCTION__, capture_id_);
return -1;
} }
capture_module_->EnableFrameRateCallback(false); capture_module_->EnableFrameRateCallback(false);
capture_module_->EnableNoPictureAlarm(false); capture_module_->EnableNoPictureAlarm(false);

View File

@ -39,10 +39,8 @@ struct ViEPicture;
class ViECapturer class ViECapturer
: public ViEFrameProviderBase, : public ViEFrameProviderBase,
public ViEExternalCapture, public ViEExternalCapture,
protected VCMReceiveCallback,
protected VideoCaptureDataCallback, protected VideoCaptureDataCallback,
protected VideoCaptureFeedBack, protected VideoCaptureFeedBack {
protected VideoEncoder {
public: public:
static ViECapturer* CreateViECapture(int capture_id, static ViECapturer* CreateViECapture(int capture_id,
int engine_id, int engine_id,
@ -76,12 +74,6 @@ class ViECapturer
virtual int IncomingFrameI420(const ViEVideoFrameI420& video_frame, virtual int IncomingFrameI420(const ViEVideoFrameI420& video_frame,
unsigned long long capture_time = 0); // NOLINT unsigned long long capture_time = 0); // NOLINT
// Use this capture device as encoder.
// Returns 0 if the codec is supported by this capture device.
virtual int32_t PreEncodeToViEEncoder(const VideoCodec& codec,
ViEEncoder& vie_encoder,
int32_t vie_encoder_id);
// Start/Stop. // Start/Stop.
int32_t Start( int32_t Start(
const CaptureCapability& capture_capability = CaptureCapability()); const CaptureCapability& capture_capability = CaptureCapability());
@ -116,19 +108,14 @@ class ViECapturer
int32_t Init(VideoCaptureModule* capture_module); int32_t Init(VideoCaptureModule* capture_module);
int32_t Init(const char* device_unique_idUTF8, int32_t Init(const char* device_unique_idUTF8,
const uint32_t device_unique_idUTF8Length); uint32_t device_unique_idUTF8Length);
// Implements VideoCaptureDataCallback. // Implements VideoCaptureDataCallback.
virtual void OnIncomingCapturedFrame(const int32_t id, virtual void OnIncomingCapturedFrame(const int32_t id,
I420VideoFrame& video_frame); I420VideoFrame& video_frame);
virtual void OnIncomingCapturedEncodedFrame(const int32_t capture_id,
VideoFrame& video_frame,
VideoCodecType codec_type);
virtual void OnCaptureDelayChanged(const int32_t id, virtual void OnCaptureDelayChanged(const int32_t id,
const int32_t delay); const int32_t delay);
bool EncoderActive();
// Returns true if the capture capability has been set in |StartCapture| // Returns true if the capture capability has been set in |StartCapture|
// function and may not be changed. // function and may not be changed.
bool CaptureCapabilityFixed(); bool CaptureCapabilityFixed();
@ -139,25 +126,6 @@ class ViECapturer
int32_t IncImageProcRefCount(); int32_t IncImageProcRefCount();
int32_t DecImageProcRefCount(); int32_t DecImageProcRefCount();
// Implements VideoEncoder.
virtual int32_t Version(char* version, int32_t length) const;
virtual int32_t InitEncode(const VideoCodec* codec_settings,
int32_t number_of_cores,
uint32_t max_payload_size);
virtual int32_t Encode(const I420VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
const std::vector<VideoFrameType>* frame_types);
virtual int32_t RegisterEncodeCompleteCallback(
EncodedImageCallback* callback);
virtual int32_t Release();
virtual int32_t Reset();
virtual int32_t SetChannelParameters(uint32_t packet_loss, int rtt);
virtual int32_t SetRates(uint32_t new_bit_rate, uint32_t frame_rate);
// Implements VCMReceiveCallback.
// TODO(mflodman) Change input argument to pointer.
virtual int32_t FrameToRender(I420VideoFrame& video_frame); // NOLINT
// Implements VideoCaptureFeedBack // Implements VideoCaptureFeedBack
virtual void OnCaptureFrameRate(const int32_t id, virtual void OnCaptureFrameRate(const int32_t id,
const uint32_t frame_rate); const uint32_t frame_rate);
@ -187,8 +155,6 @@ class ViECapturer
I420VideoFrame captured_frame_; I420VideoFrame captured_frame_;
I420VideoFrame deliver_frame_; I420VideoFrame deliver_frame_;
VideoFrame deliver_encoded_frame_;
VideoFrame encoded_frame_;
// Image processing. // Image processing.
ViEEffectFilter* effect_filter_; ViEEffectFilter* effect_filter_;
@ -204,19 +170,6 @@ class ViECapturer
scoped_ptr<CriticalSectionWrapper> observer_cs_; scoped_ptr<CriticalSectionWrapper> observer_cs_;
ViECaptureObserver* observer_; ViECaptureObserver* observer_;
// Encoding using encoding capable cameras.
scoped_ptr<CriticalSectionWrapper> encoding_cs_;
VideoCaptureModule::VideoCaptureEncodeInterface* capture_encoder_;
EncodedImageCallback* encode_complete_callback_;
VideoCodec codec_;
// The ViEEncoder we are encoding for.
ViEEncoder* vie_encoder_;
// ViEEncoder id we are encoding for.
int32_t vie_encoder_id_;
// Used for decoding preencoded frames.
VideoCodingModule* vcm_;
EncodedVideoData decode_buffer_;
bool decoder_initialized_;
CaptureCapability requested_capability_; CaptureCapability requested_capability_;
I420VideoFrame capture_device_image_; I420VideoFrame capture_device_image_;

View File

@ -193,28 +193,7 @@ int ViECodecImpl::SetSendCodec(const int video_channel,
// Stop the media flow while reconfiguring. // Stop the media flow while reconfiguring.
vie_encoder->Pause(); vie_encoder->Pause();
// Check if we have a frame provider that is a camera and can provide this if (vie_encoder->SetEncoder(video_codec_internal) != 0) {
// codec for us.
bool use_capture_device_as_encoder = false;
frame_provider = is.FrameProvider(vie_encoder);
if (frame_provider) {
if (frame_provider->Id() >= kViECaptureIdBase &&
frame_provider->Id() <= kViECaptureIdMax) {
ViECapturer* vie_capture = static_cast<ViECapturer*>(frame_provider);
// Try to get preencoded. Nothing to do if it is not supported.
if (vie_capture && vie_capture->PreEncodeToViEEncoder(
video_codec_internal,
*vie_encoder,
video_channel) == 0) {
use_capture_device_as_encoder = true;
}
}
}
// Update the encoder settings if we are not using a capture device capable
// of this codec.
if (!use_capture_device_as_encoder &&
vie_encoder->SetEncoder(video_codec_internal) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel), ViEId(shared_data_->instance_id(), video_channel),
"%s: Could not change encoder for channel %d", __FUNCTION__, "%s: Could not change encoder for channel %d", __FUNCTION__,