This CL is part of enabling cpplint check for video_engine uploads.

BUG=627
TEST=vie_auto_test

Review URL: https://webrtc-codereview.appspot.com/653006

git-svn-id: http://webrtc.googlecode.com/svn/trunk@2434 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
mflodman@webrtc.org 2012-06-21 12:11:50 +00:00
parent 9ba151bdf9
commit 8baed51f6e
20 changed files with 200 additions and 210 deletions

View File

@ -162,7 +162,7 @@ int ViECaptureImpl::AllocateCaptureDevice(
return -1;
}
const WebRtc_Word32 result =
shared_data_->input_manager()->CreateCaptureDevice(capture_module,
shared_data_->input_manager()->CreateCaptureDevice(&capture_module,
capture_id);
if (result != 0) {
shared_data_->SetLastError(result);
@ -563,7 +563,7 @@ int ViECaptureImpl::RegisterObserver(const int capture_id,
shared_data_->SetLastError(kViECaptureObserverAlreadyRegistered);
return -1;
}
if (vie_capture->RegisterObserver(observer) != 0) {
if (vie_capture->RegisterObserver(&observer) != 0) {
shared_data_->SetLastError(kViECaptureDeviceUnknownError);
return -1;
}

View File

@ -126,7 +126,7 @@ ViECapturer::~ViECapturer() {
ViECapturer* ViECapturer::CreateViECapture(
int capture_id,
int engine_id,
VideoCaptureModule& capture_module,
VideoCaptureModule* capture_module,
ProcessThread& module_process_thread) {
ViECapturer* capture = new ViECapturer(capture_id, engine_id,
module_process_thread);
@ -137,9 +137,9 @@ ViECapturer* ViECapturer::CreateViECapture(
return capture;
}
WebRtc_Word32 ViECapturer::Init(VideoCaptureModule& capture_module) {
WebRtc_Word32 ViECapturer::Init(VideoCaptureModule* capture_module) {
assert(capture_module_ == NULL);
capture_module_ = &capture_module;
capture_module_ = capture_module;
capture_module_->RegisterCaptureDataCallback(*this);
capture_module_->AddRef();
if (module_process_thread_.RegisterModule(capture_module_) != 0) {
@ -197,7 +197,7 @@ int ViECapturer::FrameCallbackChanged() {
int best_frame_rate;
VideoCaptureCapability capture_settings;
capture_module_->CaptureSettings(capture_settings);
GetBestFormat(best_width, best_height, best_frame_rate);
GetBestFormat(&best_width, &best_height, &best_frame_rate);
if (best_width != 0 && best_height != 0 && best_frame_rate != 0) {
if (best_width != capture_settings.width ||
best_height != capture_settings.height ||
@ -229,7 +229,7 @@ WebRtc_Word32 ViECapturer::Start(const CaptureCapability& capture_capability) {
} else if (!CaptureCapabilityFixed()) {
// Ask the observers for best size.
GetBestFormat(width, height, frame_rate);
GetBestFormat(&width, &height, &frame_rate);
if (width == 0) {
width = kViECaptureDefaultWidth;
}
@ -299,10 +299,10 @@ WebRtc_Word32 ViECapturer::SetRotateCapturedFrames(
int ViECapturer::IncomingFrame(unsigned char* video_frame,
unsigned int video_frame_length,
unsigned short width,
unsigned short height,
uint16_t width,
uint16_t height,
RawVideoType video_type,
unsigned long long capture_time) {
unsigned long long capture_time) { // NOLINT
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"ExternalCapture::IncomingFrame width %d, height %d, "
"capture_time %u", width, height, capture_time);
@ -320,7 +320,7 @@ int ViECapturer::IncomingFrame(unsigned char* video_frame,
}
int ViECapturer::IncomingFrameI420(const ViEVideoFrameI420& video_frame,
unsigned long long capture_time) {
unsigned long long capture_time) { // NOLINT
WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
"ExternalCapture::IncomingFrame width %d, height %d, "
" capture_time %u", video_frame.width, video_frame.height,
@ -538,7 +538,7 @@ bool ViECapturer::ViECaptureProcess() {
deliver_frame_.SwapFrame(captured_frame_);
captured_frame_.SetLength(0);
capture_cs_->Leave();
DeliverI420Frame(deliver_frame_);
DeliverI420Frame(&deliver_frame_);
}
if (encoded_frame_.Length() > 0) {
capture_cs_->Enter();
@ -546,7 +546,7 @@ bool ViECapturer::ViECaptureProcess() {
encoded_frame_.SetLength(0);
deliver_event_.Set();
capture_cs_->Leave();
DeliverCodedFrame(deliver_frame_);
DeliverCodedFrame(&deliver_frame_);
}
deliver_cs_->Leave();
if (current_brightness_level_ != reported_brightness_level_) {
@ -561,12 +561,12 @@ bool ViECapturer::ViECaptureProcess() {
return true;
}
void ViECapturer::DeliverI420Frame(VideoFrame& video_frame) {
void ViECapturer::DeliverI420Frame(VideoFrame* video_frame) {
// Apply image enhancement and effect filter.
if (deflicker_frame_stats_) {
if (image_proc_module_->GetFrameStats(*deflicker_frame_stats_,
video_frame) == 0) {
image_proc_module_->Deflickering(video_frame, *deflicker_frame_stats_);
*video_frame) == 0) {
image_proc_module_->Deflickering(*video_frame, *deflicker_frame_stats_);
} else {
WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s: could not get frame stats for captured frame",
@ -574,13 +574,13 @@ void ViECapturer::DeliverI420Frame(VideoFrame& video_frame) {
}
}
if (denoising_enabled_) {
image_proc_module_->Denoising(video_frame);
image_proc_module_->Denoising(*video_frame);
}
if (brightness_frame_stats_) {
if (image_proc_module_->GetFrameStats(*brightness_frame_stats_,
video_frame) == 0) {
*video_frame) == 0) {
WebRtc_Word32 brightness = image_proc_module_->BrightnessDetection(
video_frame, *brightness_frame_stats_);
*video_frame, *brightness_frame_stats_);
switch (brightness) {
case VideoProcessingModule::kNoWarning:
@ -599,29 +599,32 @@ void ViECapturer::DeliverI420Frame(VideoFrame& video_frame) {
}
}
if (effect_filter_) {
effect_filter_->Transform(video_frame.Length(), video_frame.Buffer(),
video_frame.TimeStamp(), video_frame.Width(),
video_frame.Height());
effect_filter_->Transform(video_frame->Length(), video_frame->Buffer(),
video_frame->TimeStamp(), video_frame->Width(),
video_frame->Height());
}
// Deliver the captured frame to all observers (channels, renderer or file).
ViEFrameProviderBase::DeliverFrame(video_frame);
}
void ViECapturer::DeliverCodedFrame(VideoFrame& video_frame) {
void ViECapturer::DeliverCodedFrame(VideoFrame* video_frame) {
if (encode_complete_callback_) {
EncodedImage encoded_image(video_frame.Buffer(), video_frame.Length(),
video_frame.Size());
encoded_image._timeStamp = 90 * (WebRtc_UWord32) video_frame.RenderTimeMs();
EncodedImage encoded_image(video_frame->Buffer(), video_frame->Length(),
video_frame->Size());
encoded_image._timeStamp =
90 * static_cast<WebRtc_UWord32>(video_frame->RenderTimeMs());
encode_complete_callback_->Encoded(encoded_image);
}
if (NumberOfRegisteredFrameCallbacks() > 0 && decoder_initialized_) {
video_frame.Swap(decode_buffer_.payloadData, decode_buffer_.bufferSize,
video_frame->Swap(decode_buffer_.payloadData, decode_buffer_.bufferSize,
decode_buffer_.payloadSize);
decode_buffer_.encodedHeight = video_frame.Height();
decode_buffer_.encodedWidth = video_frame.Width();
decode_buffer_.renderTimeMs = video_frame.RenderTimeMs();
decode_buffer_.timeStamp = 90 * (WebRtc_UWord32) video_frame.RenderTimeMs();
decode_buffer_.encodedHeight = video_frame->Height();
decode_buffer_.encodedWidth = video_frame->Width();
decode_buffer_.renderTimeMs = video_frame->RenderTimeMs();
const int kMsToRtpTimestamp = 90;
decode_buffer_.timeStamp = kMsToRtpTimestamp *
static_cast<WebRtc_UWord32>(video_frame->RenderTimeMs());
decode_buffer_.payloadType = codec_.plType;
vcm_->DecodeFromStorage(decode_buffer_);
}
@ -830,14 +833,14 @@ WebRtc_Word32 ViECapturer::SetRates(WebRtc_UWord32 new_bit_rate,
return capture_encoder_->SetRates(new_bit_rate, frame_rate);
}
WebRtc_Word32 ViECapturer::FrameToRender(VideoFrame& video_frame) {
WebRtc_Word32 ViECapturer::FrameToRender(VideoFrame& video_frame) { // NOLINT
deliver_cs_->Enter();
DeliverI420Frame(video_frame);
DeliverI420Frame(&video_frame);
deliver_cs_->Leave();
return 0;
}
WebRtc_Word32 ViECapturer::RegisterObserver(ViECaptureObserver& observer) {
WebRtc_Word32 ViECapturer::RegisterObserver(ViECaptureObserver* observer) {
if (observer_) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s Observer already registered", __FUNCTION__, capture_id_);
@ -848,7 +851,7 @@ WebRtc_Word32 ViECapturer::RegisterObserver(ViECaptureObserver& observer) {
}
capture_module_->EnableFrameRateCallback(true);
capture_module_->EnableNoPictureAlarm(true);
observer_ = &observer;
observer_ = observer;
return 0;
}

View File

@ -8,17 +8,17 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_VIE_CAPTURER_H_
#define WEBRTC_VIDEO_ENGINE_VIE_CAPTURER_H_
#ifndef WEBRTC_VIDEO_ENGINE_VIE_CAPTURER_H_ // NOLINT
#define WEBRTC_VIDEO_ENGINE_VIE_CAPTURER_H_ // NOLINT
#include "common_types.h"
#include "engine_configurations.h"
#include "common_types.h" // NOLINT
#include "engine_configurations.h" // NOLINT
#include "modules/video_capture/main/interface/video_capture.h"
#include "modules/video_coding/codecs/interface/video_codec_interface.h"
#include "modules/video_coding/main/interface/video_coding.h"
#include "modules/video_processing/main/interface/video_processing.h"
#include "system_wrappers/interface/scoped_ptr.h"
#include "typedefs.h"
#include "typedefs.h" // NOLINT
#include "video_engine/include/vie_capture.h"
#include "video_engine/vie_defines.h"
#include "video_engine/vie_frame_provider_base.h"
@ -43,7 +43,7 @@ class ViECapturer
public:
static ViECapturer* CreateViECapture(int capture_id,
int engine_id,
VideoCaptureModule& capture_module,
VideoCaptureModule* capture_module,
ProcessThread& module_process_thread);
static ViECapturer* CreateViECapture(
@ -63,12 +63,13 @@ class ViECapturer
// Implements ExternalCapture.
virtual int IncomingFrame(unsigned char* video_frame,
unsigned int video_frame_length,
unsigned short width, unsigned short height,
uint16_t width,
uint16_t height,
RawVideoType video_type,
unsigned long long capture_time = 0);
unsigned long long capture_time = 0); // NOLINT
virtual int IncomingFrameI420(const ViEVideoFrameI420& video_frame,
unsigned long long capture_time = 0);
unsigned long long capture_time = 0); // NOLINT
// Use this capture device as encoder.
// Returns 0 if the codec is supported by this capture device.
@ -95,7 +96,7 @@ class ViECapturer
WebRtc_Word32 EnableBrightnessAlarm(bool enable);
// Statistics observer.
WebRtc_Word32 RegisterObserver(ViECaptureObserver& observer);
WebRtc_Word32 RegisterObserver(ViECaptureObserver* observer);
WebRtc_Word32 DeRegisterObserver();
bool IsObserverRegistered();
@ -110,7 +111,7 @@ class ViECapturer
int engine_id,
ProcessThread& module_process_thread);
WebRtc_Word32 Init(VideoCaptureModule& capture_module);
WebRtc_Word32 Init(VideoCaptureModule* capture_module);
WebRtc_Word32 Init(const char* device_unique_idUTF8,
const WebRtc_UWord32 device_unique_idUTF8Length);
@ -152,7 +153,8 @@ class ViECapturer
WebRtc_UWord32 frame_rate);
// Implements VCMReceiveCallback.
virtual WebRtc_Word32 FrameToRender(VideoFrame& video_frame);
// TODO(mflodman) Change input argument to pointer.
virtual WebRtc_Word32 FrameToRender(VideoFrame& video_frame); // NOLINT
// Implements VideoCaptureFeedBack
virtual void OnCaptureFrameRate(const WebRtc_Word32 id,
@ -164,8 +166,8 @@ class ViECapturer
static bool ViECaptureThreadFunction(void* obj);
bool ViECaptureProcess();
void DeliverI420Frame(VideoFrame& video_frame);
void DeliverCodedFrame(VideoFrame& video_frame);
void DeliverI420Frame(VideoFrame* video_frame);
void DeliverCodedFrame(VideoFrame* video_frame);
private:
// Never take capture_cs_ before deliver_cs_!
@ -219,4 +221,4 @@ class ViECapturer
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_VIE_CAPTURER_H_
#endif // WEBRTC_VIDEO_ENGINE_VIE_CAPTURER_H_ // NOLINT

View File

@ -2004,7 +2004,7 @@ WebRtc_Word32 ViEChannel::FrameToRender(VideoFrame& video_frame) {
}
WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s(timestamp:%u)", __FUNCTION__, video_frame.TimeStamp());
DeliverFrame(video_frame, no_of_csrcs, arr_ofCSRC);
DeliverFrame(&video_frame, no_of_csrcs, arr_ofCSRC);
return 0;
}

View File

@ -415,12 +415,13 @@ RtpRtcp* ViEEncoder::SendRtpRtcpModule() {
return default_rtp_rtcp_.get();
}
void ViEEncoder::DeliverFrame(int id, webrtc::VideoFrame& video_frame,
void ViEEncoder::DeliverFrame(int id,
VideoFrame* video_frame,
int num_csrcs,
const WebRtc_UWord32 CSRC[kRtpCsrcSize]) {
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s: %llu", __FUNCTION__,
video_frame.TimeStamp());
video_frame->TimeStamp());
{
CriticalSectionScoped cs(data_cs_.get());
@ -433,26 +434,28 @@ void ViEEncoder::DeliverFrame(int id, webrtc::VideoFrame& video_frame,
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s: Dropping frame %llu after a key fame", __FUNCTION__,
video_frame.TimeStamp());
video_frame->TimeStamp());
drop_next_frame_ = false;
return;
}
}
// Convert render time, in ms, to RTP timestamp.
const int kMsToRtpTimestamp = 90;
const WebRtc_UWord32 time_stamp =
90 * static_cast<WebRtc_UWord32>(video_frame.RenderTimeMs());
video_frame.SetTimeStamp(time_stamp);
kMsToRtpTimestamp *
static_cast<WebRtc_UWord32>(video_frame->RenderTimeMs());
video_frame->SetTimeStamp(time_stamp);
{
CriticalSectionScoped cs(callback_cs_.get());
if (effect_filter_) {
effect_filter_->Transform(video_frame.Length(), video_frame.Buffer(),
video_frame.TimeStamp(),
video_frame.Width(), video_frame.Height());
effect_filter_->Transform(video_frame->Length(), video_frame->Buffer(),
video_frame->TimeStamp(),
video_frame->Width(), video_frame->Height());
}
}
// Record raw frame.
file_recorder_.RecordVideoFrame(video_frame);
file_recorder_.RecordVideoFrame(*video_frame);
// Make sure the CSRC list is correct.
if (num_csrcs > 0) {
@ -486,7 +489,7 @@ void ViEEncoder::DeliverFrame(int id, webrtc::VideoFrame& video_frame,
has_received_rpsi_ = false;
}
VideoFrame* decimated_frame = NULL;
const int ret = vpm_.PreprocessFrame(&video_frame, &decimated_frame);
const int ret = vpm_.PreprocessFrame(video_frame, &decimated_frame);
if (ret == 1) {
// Drop this frame.
return;
@ -494,7 +497,7 @@ void ViEEncoder::DeliverFrame(int id, webrtc::VideoFrame& video_frame,
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s: Error preprocessing frame %u", __FUNCTION__,
video_frame.TimeStamp());
video_frame->TimeStamp());
return;
}
@ -503,7 +506,7 @@ void ViEEncoder::DeliverFrame(int id, webrtc::VideoFrame& video_frame,
// Frame was not re-sampled => use original.
if (decimated_frame == NULL) {
decimated_frame = &video_frame;
decimated_frame = video_frame;
}
if (vcm_.AddVideoFrame(*decimated_frame, content_metrics,
@ -511,7 +514,7 @@ void ViEEncoder::DeliverFrame(int id, webrtc::VideoFrame& video_frame,
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s: Error encoding frame %u", __FUNCTION__,
video_frame.TimeStamp());
video_frame->TimeStamp());
}
return;
}
@ -519,7 +522,7 @@ void ViEEncoder::DeliverFrame(int id, webrtc::VideoFrame& video_frame,
// TODO(mflodman) Rewrite this to use code common to VP8 case.
// Pass frame via preprocessor.
VideoFrame* decimated_frame = NULL;
const int ret = vpm_.PreprocessFrame(&video_frame, &decimated_frame);
const int ret = vpm_.PreprocessFrame(video_frame, &decimated_frame);
if (ret == 1) {
// Drop this frame.
return;
@ -527,18 +530,18 @@ void ViEEncoder::DeliverFrame(int id, webrtc::VideoFrame& video_frame,
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s: Error preprocessing frame %u", __FUNCTION__,
video_frame.TimeStamp());
video_frame->TimeStamp());
return;
}
// Frame was not sampled => use original.
if (decimated_frame == NULL) {
decimated_frame = &video_frame;
decimated_frame = video_frame;
}
if (vcm_.AddVideoFrame(*decimated_frame) != VCM_OK) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s: Error encoding frame %u",
__FUNCTION__, video_frame.TimeStamp());
__FUNCTION__, video_frame->TimeStamp());
}
}
@ -551,9 +554,9 @@ void ViEEncoder::DelayChanged(int id, int frame_delay) {
file_recorder_.SetFrameDelay(frame_delay);
}
int ViEEncoder::GetPreferedFrameSettings(int& width,
int& height,
int& frame_rate) {
int ViEEncoder::GetPreferedFrameSettings(int* width,
int* height,
int* frame_rate) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
@ -566,9 +569,9 @@ int ViEEncoder::GetPreferedFrameSettings(int& width,
return -1;
}
width = video_codec.width;
height = video_codec.height;
frame_rate = video_codec.maxFramerate;
*width = video_codec.width;
*height = video_codec.height;
*frame_rate = video_codec.maxFramerate;
return 0;
}

View File

@ -82,13 +82,13 @@ class ViEEncoder
// Implementing ViEFrameCallback.
virtual void DeliverFrame(int id,
VideoFrame& video_frame,
VideoFrame* video_frame,
int num_csrcs = 0,
const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL);
virtual void DelayChanged(int id, int frame_delay);
virtual int GetPreferedFrameSettings(int& width,
int& height,
int& frame_rate);
virtual int GetPreferedFrameSettings(int* width,
int* height,
int* frame_rate);
virtual void ProviderDestroyed(int id) {
return;

View File

@ -9,13 +9,13 @@
*/
// Placed first to get WEBRTC_VIDEO_ENGINE_FILE_API.
#include "engine_configurations.h"
#include "engine_configurations.h" // NOLINT
#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
#include "video_engine/vie_file_image.h"
#include <stdio.h>
#include <stdio.h> // NOLINT
#include "common_video/interface/video_image.h"
#include "common_video/jpeg/include/jpeg.h"
@ -25,7 +25,7 @@ namespace webrtc {
int ViEFileImage::ConvertJPEGToVideoFrame(int engine_id,
const char* file_nameUTF8,
VideoFrame& video_frame) {
VideoFrame* video_frame) {
// Read jpeg file into temporary buffer.
EncodedImage image_buffer;
@ -90,7 +90,7 @@ int ViEFileImage::ConvertJPEGToVideoFrame(int engine_id,
// Image length in I420.
WebRtc_UWord32 image_length = (WebRtc_UWord32)(decoded_image._width *
decoded_image._height * 1.5);
if (-1 == video_frame.Swap(decoded_image._buffer, image_length,
if (-1 == video_frame->Swap(decoded_image._buffer, image_length,
image_length)) {
WEBRTC_TRACE(kTraceDebug, kTraceVideo, engine_id,
"%s could not copy frame image_decoded_buffer to video_frame ",
@ -103,20 +103,20 @@ int ViEFileImage::ConvertJPEGToVideoFrame(int engine_id,
decoded_image._buffer = NULL;
}
video_frame.SetWidth(decoded_image._width);
video_frame.SetHeight(decoded_image._height);
video_frame->SetWidth(decoded_image._width);
video_frame->SetHeight(decoded_image._height);
return 0;
}
int ViEFileImage::ConvertPictureToVideoFrame(int engine_id,
const ViEPicture& picture,
VideoFrame& video_frame) {
VideoFrame* video_frame) {
WebRtc_UWord32 picture_length = (WebRtc_UWord32)(picture.width *
picture.height * 1.5);
video_frame.CopyFrame(picture_length, picture.data);
video_frame.SetWidth(picture.width);
video_frame.SetHeight(picture.height);
video_frame.SetLength(picture_length);
video_frame->CopyFrame(picture_length, picture.data);
video_frame->SetWidth(picture.width);
video_frame->SetHeight(picture.height);
video_frame->SetLength(picture_length);
return 0;
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -8,11 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_VIE_FILE_IMAGE_H_
#define WEBRTC_VIDEO_ENGINE_VIE_FILE_IMAGE_H_
#ifndef WEBRTC_VIDEO_ENGINE_VIE_FILE_IMAGE_H_ // NOLINT
#define WEBRTC_VIDEO_ENGINE_VIE_FILE_IMAGE_H_ // NOLINT
#include "modules/interface/module_common_types.h"
#include "typedefs.h"
#include "typedefs.h" // NOLINT
#include "video_engine/include/vie_file.h"
namespace webrtc {
@ -21,12 +21,12 @@ class ViEFileImage {
public:
static int ConvertJPEGToVideoFrame(int engine_id,
const char* file_nameUTF8,
VideoFrame& video_frame);
VideoFrame* video_frame);
static int ConvertPictureToVideoFrame(int engine_id,
const ViEPicture& picture,
VideoFrame& video_frame);
VideoFrame* video_frame);
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_VIE_FILE_IMAGE_H_
#endif // WEBRTC_VIDEO_ENGINE_VIE_FILE_IMAGE_H_ // NOLINT

View File

@ -143,7 +143,7 @@ int ViEFileImpl::RegisterObserver(int file_id,
shared_data_->SetLastError(kViEFileObserverAlreadyRegistered);
return -1;
}
if (vie_file_player->RegisterObserver(observer) != 0) {
if (vie_file_player->RegisterObserver(&observer) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), file_id),
"%s: Failed to register observer", __FUNCTION__, file_id);
@ -727,7 +727,7 @@ int ViEFileImpl::SetCaptureDeviceImage(const int capture_id,
VideoFrame capture_image;
if (ViEFileImage::ConvertJPEGToVideoFrame(
ViEId(shared_data_->instance_id(), capture_id), file_nameUTF8,
capture_image) != 0) {
&capture_image) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), capture_id),
"%s(capture_id: %d) Failed to open file.", __FUNCTION__,
@ -743,7 +743,7 @@ int ViEFileImpl::SetCaptureDeviceImage(const int capture_id,
}
int ViEFileImpl::SetCaptureDeviceImage(const int capture_id,
const ViEPicture& picture) {
const ViEPicture& picture) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
"%s(capture_id: %d)", __FUNCTION__, capture_id);
@ -765,7 +765,7 @@ const ViEPicture& picture) {
VideoFrame capture_image;
if (ViEFileImage::ConvertPictureToVideoFrame(
ViEId(shared_data_->instance_id(), capture_id), picture,
capture_image) != 0) {
&capture_image) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), capture_id),
"%s(capture_id: %d) Failed to use picture.", __FUNCTION__,
@ -781,7 +781,7 @@ const ViEPicture& picture) {
}
int ViEFileImpl::SetRenderStartImage(const int video_channel,
const char* file_nameUTF8) {
const char* file_nameUTF8) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d)", __FUNCTION__, video_channel);
@ -796,7 +796,7 @@ const char* file_nameUTF8) {
VideoFrame start_image;
if (ViEFileImage::ConvertJPEGToVideoFrame(
ViEId(shared_data_->instance_id(), video_channel), file_nameUTF8,
start_image) != 0) {
&start_image) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d) Failed to open file.", __FUNCTION__,
@ -835,7 +835,7 @@ int ViEFileImpl::SetRenderStartImage(const int video_channel,
VideoFrame start_image;
if (ViEFileImage::ConvertPictureToVideoFrame(
ViEId(shared_data_->instance_id(), video_channel), picture,
start_image) != 0) {
&start_image) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d) Failed to use picture.",
@ -865,7 +865,7 @@ int ViEFileImpl::SetRenderTimeoutImage(const int video_channel,
VideoFrame timeout_image;
if (ViEFileImage::ConvertJPEGToVideoFrame(
ViEId(shared_data_->instance_id(), video_channel), file_nameUTF8,
timeout_image) != 0) {
&timeout_image) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d) Failed to open file.", __FUNCTION__,
@ -920,7 +920,7 @@ const unsigned int timeout_ms) {
VideoFrame timeout_image;
if (ViEFileImage::ConvertPictureToVideoFrame(
ViEId(shared_data_->instance_id(), video_channel), picture,
timeout_image) != 0) {
&timeout_image) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(video_channel: %d) Failed to use picture.",
@ -1003,14 +1003,15 @@ bool ViECaptureSnapshot::GetSnapshot(unsigned int max_wait_time,
return false;
}
void ViECaptureSnapshot::DeliverFrame(int id, VideoFrame& video_frame,
void ViECaptureSnapshot::DeliverFrame(int id,
VideoFrame* video_frame,
int num_csrcs,
const WebRtc_UWord32 CSRC[kRtpCsrcSize]) {
CriticalSectionScoped cs(crit_.get());
if (!video_frame_) {
return;
}
video_frame_->SwapFrame(video_frame);
video_frame_->SwapFrame(*video_frame);
condition_varaible_->WakeAll();
return;
}

View File

@ -33,11 +33,14 @@ class ViECaptureSnapshot : public ViEFrameCallback {
bool GetSnapshot(unsigned int max_wait_time, VideoFrame* video_frame);
// Implements ViEFrameCallback.
virtual void DeliverFrame(int id, VideoFrame& video_frame, int num_csrcs = 0,
virtual void DeliverFrame(int id,
VideoFrame* video_frame,
int num_csrcs = 0,
const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL);
virtual void DelayChanged(int id, int frame_delay) {}
virtual int GetPreferedFrameSettings(int& width, int& height,
int& frame_rate) {
virtual int GetPreferedFrameSettings(int* width,
int* height,
int* frame_rate) {
return -1;
}
virtual void ProviderDestroyed(int id) {}

View File

@ -17,7 +17,7 @@
#include "system_wrappers/interface/tick_util.h"
#include "system_wrappers/interface/trace.h"
#include "video_engine/include/vie_file.h"
#include "video_engine/vie_input_manager.h"
#include "video_engine/vie_defines.h"
#include "voice_engine/main/interface/voe_base.h"
#include "voice_engine/main/interface/voe_file.h"
#include "voice_engine/main/interface/voe_video_sync.h"
@ -32,9 +32,8 @@ ViEFilePlayer* ViEFilePlayer::CreateViEFilePlayer(
const char* file_nameUTF8,
const bool loop,
const FileFormats file_format,
ViEInputManager& input_manager,
VoiceEngine* voe_ptr) {
ViEFilePlayer* self = new ViEFilePlayer(file_id, engine_id, input_manager);
ViEFilePlayer* self = new ViEFilePlayer(file_id, engine_id);
if (!self || self->Init(file_nameUTF8, loop, file_format, voe_ptr) != 0) {
delete self;
self = NULL;
@ -43,8 +42,7 @@ ViEFilePlayer* ViEFilePlayer::CreateViEFilePlayer(
}
ViEFilePlayer::ViEFilePlayer(int Id,
int engine_id,
ViEInputManager& input_manager)
int engine_id)
: ViEFrameProviderBase(Id, engine_id),
play_back_started_(false),
feedback_cs_(NULL),
@ -226,7 +224,7 @@ bool ViEFilePlayer::FilePlayDecodeProcess() {
audio_delay);
}
}
DeliverFrame(decoded_video_);
DeliverFrame(&decoded_video_);
decoded_video_.SetLength(0);
}
}
@ -349,12 +347,12 @@ bool ViEFilePlayer::IsObserverRegistered() {
return observer_ != NULL;
}
int ViEFilePlayer::RegisterObserver(ViEFileObserver& observer) {
int ViEFilePlayer::RegisterObserver(ViEFileObserver* observer) {
CriticalSectionScoped lock(*feedback_cs_);
if (observer_) {
return -1;
}
observer_ = &observer;
observer_ = observer;
return 0;
}

View File

@ -8,16 +8,16 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_VIE_FILE_PLAYER_H_
#define WEBRTC_VIDEO_ENGINE_VIE_FILE_PLAYER_H_
#ifndef WEBRTC_VIDEO_ENGINE_VIE_FILE_PLAYER_H_ // NOLINT
#define WEBRTC_VIDEO_ENGINE_VIE_FILE_PLAYER_H_ // NOLINT
#include <list>
#include <set>
#include "common_types.h"
#include "common_types.h" // NOLINT
#include "modules/media_file/interface/media_file_defines.h"
#include "system_wrappers/interface/file_wrapper.h"
#include "typedefs.h"
#include "typedefs.h" // NOLINT
#include "video_engine/vie_frame_provider_base.h"
namespace webrtc {
@ -26,7 +26,6 @@ class EventWrapper;
class FilePlayer;
class ThreadWrapper;
class ViEFileObserver;
class ViEInputManager;
class VoEFile;
class VoEVideoSync;
class VoiceEngine;
@ -41,7 +40,6 @@ class ViEFilePlayer
const char* file_nameUTF8,
const bool loop,
const FileFormats file_format,
ViEInputManager& input_manager,
VoiceEngine* voe_ptr);
static int GetFileInformation(const int engine_id,
@ -52,7 +50,7 @@ class ViEFilePlayer
~ViEFilePlayer();
bool IsObserverRegistered();
int RegisterObserver(ViEFileObserver& observer);
int RegisterObserver(ViEFileObserver* observer);
int DeRegisterObserver();
int SendAudioOnChannel(const int audio_channel,
bool mix_microphone,
@ -65,7 +63,7 @@ class ViEFilePlayer
virtual int FrameCallbackChanged();
protected:
ViEFilePlayer(int Id, int engine_id, ViEInputManager& input_manager);
ViEFilePlayer(int Id, int engine_id);
int Init(const char* file_nameUTF8,
const bool loop,
const FileFormats file_format,
@ -137,4 +135,4 @@ class ViEFilePlayer
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_VIE_FILE_PLAYER_H_
#endif // WEBRTC_VIDEO_ENGINE_VIE_FILE_PLAYER_H_ // NOLINT

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -8,11 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_VIE_FILE_RECORDER_H_
#define WEBRTC_VIDEO_ENGINE_VIE_FILE_RECORDER_H_
#ifndef WEBRTC_VIDEO_ENGINE_VIE_FILE_RECORDER_H_ // NOLINT
#define WEBRTC_VIDEO_ENGINE_VIE_FILE_RECORDER_H_ // NOLINT
#include "modules/utility/interface/file_recorder.h"
#include "typedefs.h"
#include "typedefs.h" // NOLINT
#include "video_engine/include/vie_file.h"
#include "voice_engine/main/interface/voe_file.h"
@ -62,4 +62,4 @@ class ViEFileRecorder : protected OutStream {
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_VIE_FILE_RECORDER_H_
#endif // WEBRTC_VIDEO_ENGINE_VIE_FILE_RECORDER_H_ // NOLINT

View File

@ -46,7 +46,7 @@ int ViEFrameProviderBase::Id() {
}
void ViEFrameProviderBase::DeliverFrame(
VideoFrame& video_frame,
VideoFrame* video_frame,
int num_csrcs,
const WebRtc_UWord32 CSRC[kRtpCsrcSize]) {
#ifdef DEBUG_
@ -66,8 +66,8 @@ void ViEFrameProviderBase::DeliverFrame(
if (!extra_frame_.get()) {
extra_frame_.reset(new VideoFrame());
}
extra_frame_->CopyFrame(video_frame);
(*it)->DeliverFrame(id_, *(extra_frame_.get()), num_csrcs, CSRC);
extra_frame_->CopyFrame(*video_frame);
(*it)->DeliverFrame(id_, extra_frame_.get(), num_csrcs, CSRC);
}
}
}
@ -96,9 +96,9 @@ int ViEFrameProviderBase::FrameDelay() {
return frame_delay_;
}
int ViEFrameProviderBase::GetBestFormat(int& best_width,
int& best_height,
int& best_frame_rate) {
int ViEFrameProviderBase::GetBestFormat(int* best_width,
int* best_height,
int* best_frame_rate) {
int largest_width = 0;
int largest_height = 0;
int highest_frame_rate = 0;
@ -109,8 +109,8 @@ int ViEFrameProviderBase::GetBestFormat(int& best_width,
int prefered_width = 0;
int prefered_height = 0;
int prefered_frame_rate = 0;
if ((*it)->GetPreferedFrameSettings(prefered_width, prefered_height,
prefered_frame_rate) == 0) {
if ((*it)->GetPreferedFrameSettings(&prefered_width, &prefered_height,
&prefered_frame_rate) == 0) {
if (prefered_width > largest_width) {
largest_width = prefered_width;
}
@ -122,9 +122,9 @@ int ViEFrameProviderBase::GetBestFormat(int& best_width,
}
}
}
best_width = largest_width;
best_height = largest_height;
best_frame_rate = highest_frame_rate;
*best_width = largest_width;
*best_height = largest_height;
*best_frame_rate = highest_frame_rate;
return 0;
}

View File

@ -8,14 +8,14 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_VIE_FRAME_PROVIDER_BASE_H_
#define WEBRTC_VIDEO_ENGINE_VIE_FRAME_PROVIDER_BASE_H_
#ifndef WEBRTC_VIDEO_ENGINE_VIE_FRAME_PROVIDER_BASE_H_ // NOLINT
#define WEBRTC_VIDEO_ENGINE_VIE_FRAME_PROVIDER_BASE_H_ // NOLINT
#include <vector>
#include "common_types.h"
#include "common_types.h" // NOLINT
#include "system_wrappers/interface/scoped_ptr.h"
#include "typedefs.h"
#include "typedefs.h" // NOLINT
namespace webrtc {
@ -28,7 +28,7 @@ class VideoFrame;
class ViEFrameCallback {
public:
virtual void DeliverFrame(int id,
VideoFrame& video_frame,
VideoFrame* video_frame,
int num_csrcs = 0,
const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL) = 0;
@ -37,9 +37,9 @@ class ViEFrameCallback {
virtual void DelayChanged(int id, int frame_delay) = 0;
// Get the width, height and frame rate preferred by this observer.
virtual int GetPreferedFrameSettings(int& width,
int& height,
int& frame_rate) = 0;
virtual int GetPreferedFrameSettings(int* width,
int* height,
int* frame_rate) = 0;
// ProviderDestroyed is called when the frame is about to be destroyed. There
// must not be any more calls to the frame provider after this.
@ -75,14 +75,14 @@ class ViEFrameProviderBase {
virtual int FrameCallbackChanged() = 0;
protected:
void DeliverFrame(VideoFrame& video_frame,
void DeliverFrame(VideoFrame* video_frame,
int num_csrcs = 0,
const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL);
void SetFrameDelay(int frame_delay);
int FrameDelay();
int GetBestFormat(int& best_width,
int& best_height,
int& best_frame_rate);
int GetBestFormat(int* best_width,
int* best_height,
int* best_frame_rate);
int id_;
int engine_id_;
@ -99,4 +99,4 @@ class ViEFrameProviderBase {
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_VIE_FRAME_PROVIDER_BASE_H_
#endif // WEBRTC_VIDEO_ENGINE_VIE_FRAME_PROVIDER_BASE_H_ // NOLINT

View File

@ -12,7 +12,7 @@
#include <cassert>
#include "common_types.h"
#include "common_types.h" // NOLINT
#include "modules/video_capture/main/interface/video_capture_factory.h"
#include "modules/video_coding/main/interface/video_coding.h"
#include "modules/video_coding/main/interface/video_coding_defines.h"
@ -63,9 +63,9 @@ ViEInputManager::~ViEInputManager() {
}
}
void ViEInputManager::SetModuleProcessThread(
ProcessThread& module_process_thread) {
ProcessThread* module_process_thread) {
assert(!module_process_thread_);
module_process_thread_ = &module_process_thread;
module_process_thread_ = module_process_thread;
}
int ViEInputManager::NumberOfCaptureDevices() {
@ -224,7 +224,7 @@ int ViEInputManager::CreateCaptureDevice(
}
int newcapture_id = 0;
if (GetFreeCaptureId(newcapture_id) == false) {
if (GetFreeCaptureId(&newcapture_id) == false) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
"%s: Maximum supported number of capture devices already in "
"use", __FUNCTION__);
@ -255,14 +255,14 @@ int ViEInputManager::CreateCaptureDevice(
return 0;
}
int ViEInputManager::CreateCaptureDevice(VideoCaptureModule& capture_module,
int ViEInputManager::CreateCaptureDevice(VideoCaptureModule* capture_module,
int& capture_id) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_), "%s",
__FUNCTION__);
CriticalSectionScoped cs(map_cs_.get());
int newcapture_id = 0;
if (!GetFreeCaptureId(newcapture_id)) {
if (!GetFreeCaptureId(&newcapture_id)) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
"%s: Maximum supported number of capture devices already in "
"use", __FUNCTION__);
@ -332,7 +332,7 @@ int ViEInputManager::CreateExternalCaptureDevice(
CriticalSectionScoped cs(map_cs_.get());
int newcapture_id = 0;
if (GetFreeCaptureId(newcapture_id) == false) {
if (GetFreeCaptureId(&newcapture_id) == false) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
"%s: Maximum supported number of capture devices already in "
"use", __FUNCTION__);
@ -372,7 +372,7 @@ int ViEInputManager::CreateFilePlayer(const char* file_nameUTF8,
CriticalSectionScoped cs(map_cs_.get());
int new_file_id = 0;
if (GetFreeFileId(new_file_id) == false) {
if (GetFreeFileId(&new_file_id) == false) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
"%s: Maximum supported number of file players already in use",
__FUNCTION__);
@ -380,8 +380,7 @@ int ViEInputManager::CreateFilePlayer(const char* file_nameUTF8,
}
ViEFilePlayer* vie_file_player = ViEFilePlayer::CreateViEFilePlayer(
new_file_id, engine_id_, file_nameUTF8, loop, file_format, *this,
voe_ptr);
new_file_id, engine_id_, file_nameUTF8, loop, file_format, voe_ptr);
if (!vie_file_player) {
ReturnFileId(new_file_id);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
@ -441,16 +440,16 @@ int ViEInputManager::DestroyFilePlayer(int file_id) {
return 0;
}
bool ViEInputManager::GetFreeCaptureId(int& freecapture_id) {
bool ViEInputManager::GetFreeCaptureId(int* freecapture_id) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_), "%s",
__FUNCTION__);
for (int id = 0; id < kViEMaxCaptureDevices; id++) {
if (free_capture_device_id_[id]) {
// We found a free capture device id.
free_capture_device_id_[id] = false;
freecapture_id = id + kViECaptureIdBase;
*freecapture_id = id + kViECaptureIdBase;
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
"%s: new id: %d", __FUNCTION__, freecapture_id);
"%s: new id: %d", __FUNCTION__, *freecapture_id);
return true;
}
}
@ -468,7 +467,7 @@ void ViEInputManager::ReturnCaptureId(int capture_id) {
return;
}
bool ViEInputManager::GetFreeFileId(int& free_file_id) {
bool ViEInputManager::GetFreeFileId(int* free_file_id) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_), "%s",
__FUNCTION__);
@ -476,9 +475,9 @@ bool ViEInputManager::GetFreeFileId(int& free_file_id) {
if (free_file_id_[id]) {
// We found a free capture device id.
free_file_id_[id] = false;
free_file_id = id + kViEFileIdBase;
*free_file_id = id + kViEFileIdBase;
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
"%s: new id: %d", __FUNCTION__, free_file_id);
"%s: new id: %d", __FUNCTION__, *free_file_id);
return true;
}
}
@ -542,20 +541,6 @@ ViECapturer* ViEInputManager::ViECapturePtr(int capture_id) const {
return vie_capture;
}
void ViEInputManager::GetViECaptures(MapWrapper& vie_capture_map) {
CriticalSectionScoped cs(map_cs_.get());
if (vie_frame_provider_map_.Size() == 0) {
return;
}
// Add all items to the map.
for (MapItem* item = vie_frame_provider_map_.First(); item != NULL;
item = vie_frame_provider_map_.Next(item)) {
vie_capture_map.Insert(item->GetId(), item->GetItem());
}
return;
}
ViEFilePlayer* ViEInputManager::ViEFilePlayerPtr(int file_id) const {
if (file_id < kViEFileIdBase || file_id > kViEFileIdMax) {
return NULL;

View File

@ -8,13 +8,13 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_VIE_INPUT_MANAGER_H_
#define WEBRTC_VIDEO_ENGINE_VIE_INPUT_MANAGER_H_
#ifndef WEBRTC_VIDEO_ENGINE_VIE_INPUT_MANAGER_H_ // NOLINT
#define WEBRTC_VIDEO_ENGINE_VIE_INPUT_MANAGER_H_ // NOLINT
#include "modules/video_capture/main/interface/video_capture.h"
#include "system_wrappers/interface/map_wrapper.h"
#include "system_wrappers/interface/scoped_ptr.h"
#include "typedefs.h"
#include "typedefs.h" // NOLINT
#include "video_engine/include/vie_capture.h"
#include "video_engine/vie_defines.h"
#include "video_engine/vie_frame_provider_base.h"
@ -36,7 +36,7 @@ class ViEInputManager : private ViEManagerBase {
explicit ViEInputManager(int engine_id);
~ViEInputManager();
void SetModuleProcessThread(ProcessThread& module_process_thread);
void SetModuleProcessThread(ProcessThread* module_process_thread);
// Returns number of capture devices.
int NumberOfCaptureDevices();
@ -71,7 +71,7 @@ class ViEInputManager : private ViEManagerBase {
int CreateCaptureDevice(const char* device_unique_idUTF8,
const WebRtc_UWord32 device_unique_idUTF8Length,
int& capture_id);
int CreateCaptureDevice(VideoCaptureModule& capture_module,
int CreateCaptureDevice(VideoCaptureModule* capture_module,
int& capture_id);
int CreateExternalCaptureDevice(ViEExternalCapture*& external_capture,
int& capture_id);
@ -85,13 +85,13 @@ class ViEInputManager : private ViEManagerBase {
private:
// Gets and allocates a free capture device id. Assumed protected by caller.
bool GetFreeCaptureId(int& freecapture_id);
bool GetFreeCaptureId(int* freecapture_id);
// Frees a capture id assigned in GetFreeCaptureId.
void ReturnCaptureId(int capture_id);
// Gets and allocates a free file id. Assumed protected by caller.
bool GetFreeFileId(int& free_file_id);
bool GetFreeFileId(int* free_file_id);
// Frees a file id assigned in GetFreeFileId.
void ReturnFileId(int file_id);
@ -106,9 +106,6 @@ class ViEInputManager : private ViEManagerBase {
// Gets the ViECapturer for the capture device id.
ViECapturer* ViECapturePtr(int capture_id) const;
// Gets the the entire map with GetViECaptures.
void GetViECaptures(MapWrapper& vie_capture_map);
// Gets the ViEFilePlayer for this file_id.
ViEFilePlayer* ViEFilePlayerPtr(int file_id) const;
@ -123,7 +120,7 @@ class ViEInputManager : private ViEManagerBase {
// File Players.
int free_file_id_[kViEMaxFilePlayers];
ProcessThread* module_process_thread_;
ProcessThread* module_process_thread_; // Weak.
};
// Provides protected access to ViEInputManater.
@ -140,4 +137,4 @@ class ViEInputManagerScoped: private ViEManagerScopedBase {
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_VIE_INPUT_MANAGER_H_
#endif // WEBRTC_VIDEO_ENGINE_VIE_INPUT_MANAGER_H_ // NOLINT

View File

@ -126,17 +126,17 @@ WebRtc_Word32 ViERenderer::Init(const WebRtc_UWord32 z_order,
}
void ViERenderer::DeliverFrame(int id,
VideoFrame& video_frame,
VideoFrame* video_frame,
int num_csrcs,
const WebRtc_UWord32 CSRC[kRtpCsrcSize]) {
render_callback_->RenderFrame(render_id_, video_frame);
render_callback_->RenderFrame(render_id_, *video_frame);
}
void ViERenderer::DelayChanged(int id, int frame_delay) {}
int ViERenderer::GetPreferedFrameSettings(int& width,
int& height,
int& frame_rate) {
int ViERenderer::GetPreferedFrameSettings(int* width,
int* height,
int* frame_rate) {
return -1;
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -95,13 +95,13 @@ class ViERenderer: public ViEFrameCallback {
// Implement ViEFrameCallback
virtual void DeliverFrame(int id,
VideoFrame& video_frame,
VideoFrame* video_frame,
int num_csrcs = 0,
const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL);
virtual void DelayChanged(int id, int frame_delay);
virtual int GetPreferedFrameSettings(int& width,
int& height,
int& frame_rate);
virtual int GetPreferedFrameSettings(int* width,
int* height,
int* frame_rate);
virtual void ProviderDestroyed(int id);
WebRtc_UWord32 render_id_;

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -35,7 +35,7 @@ ViESharedData::ViESharedData()
last_error_(0) {
Trace::CreateTrace();
channel_manager_.SetModuleProcessThread(*module_process_thread_);
input_manager_.SetModuleProcessThread(*module_process_thread_);
input_manager_.SetModuleProcessThread(module_process_thread_);
module_process_thread_->Start();
}