Remove usage of webrtc trace in video processing modules.

BUG=3153
R=mflodman@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/11089005

git-svn-id: http://webrtc.googlecode.com/svn/trunk@5880 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
asapersson@webrtc.org 2014-04-10 11:30:49 +00:00
parent 0273fa98e0
commit 2a770828d8
9 changed files with 34 additions and 74 deletions

View File

@ -93,7 +93,6 @@ enum TraceModule
kTraceAudioDevice = 0x0012,
kTraceVideoRenderer = 0x0014,
kTraceVideoCapture = 0x0015,
kTraceVideoPreocessing = 0x0016,
kTraceRemoteBitrateEstimator = 0x0017,
};

View File

@ -12,22 +12,15 @@
#include <stdlib.h>
#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
namespace VideoProcessing {
int32_t Brighten(I420VideoFrame* frame, int delta) {
assert(frame);
if (frame->IsZeroSize()) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
"zero size frame");
return VPM_PARAMETER_ERROR;
}
if (frame->width() <= 0 || frame->height() <= 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
"Invalid frame size");
return VPM_PARAMETER_ERROR;
}

View File

@ -10,7 +10,6 @@
#include "webrtc/modules/video_processing/main/interface/video_processing.h"
#include "webrtc/modules/video_processing/main/source/brightness_detection.h"
#include "webrtc/system_wrappers/interface/trace.h"
#include <math.h>
@ -37,16 +36,12 @@ int32_t VPMBrightnessDetection::ProcessFrame(
const I420VideoFrame& frame,
const VideoProcessingModule::FrameStats& stats) {
if (frame.IsZeroSize()) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_,
"Null frame pointer");
return VPM_PARAMETER_ERROR;
}
int width = frame.width();
int height = frame.height();
if (!VideoProcessingModule::ValidFrameStats(stats)) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_,
"Invalid frame stats");
return VPM_PARAMETER_ERROR;
}
@ -58,7 +53,7 @@ int32_t VPMBrightnessDetection::ProcessFrame(
for (uint32_t i = 0; i < low_th; i++) {
prop_low += stats.hist[i];
}
prop_low /= stats.num_pixels;
prop_low /= stats.num_pixels;
// Get proportion in highest bins.
unsigned char high_th = 230;

View File

@ -12,44 +12,38 @@
#include "webrtc/modules/video_processing/main/source/color_enhancement.h"
#include "webrtc/modules/video_processing/main/source/color_enhancement_private.h"
#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
namespace VideoProcessing {
int32_t ColorEnhancement(I420VideoFrame* frame) {
assert(frame);
// Pointers to U and V color pixels.
uint8_t* ptr_u;
uint8_t* ptr_v;
uint8_t temp_chroma;
if (frame->IsZeroSize()) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing,
-1, "Null frame pointer");
return VPM_GENERAL_ERROR;
}
assert(frame);
// Pointers to U and V color pixels.
uint8_t* ptr_u;
uint8_t* ptr_v;
uint8_t temp_chroma;
if (frame->IsZeroSize()) {
return VPM_GENERAL_ERROR;
}
if (frame->width() == 0 || frame->height() == 0) {
return VPM_GENERAL_ERROR;
}
if (frame->width() == 0 || frame->height() == 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing,
-1, "Invalid frame size");
return VPM_GENERAL_ERROR;
}
// Set pointers to first U and V pixels (skip luminance).
ptr_u = frame->buffer(kUPlane);
ptr_v = frame->buffer(kVPlane);
int size_uv = ((frame->width() + 1) / 2) * ((frame->height() + 1) / 2);
// Set pointers to first U and V pixels (skip luminance).
ptr_u = frame->buffer(kUPlane);
ptr_v = frame->buffer(kVPlane);
int size_uv = ((frame->width() + 1) / 2) * ((frame->height() + 1) / 2);
// Loop through all chrominance pixels and modify color.
for (int ix = 0; ix < size_uv; ix++) {
temp_chroma = colorTable[*ptr_u][*ptr_v];
*ptr_v = colorTable[*ptr_v][*ptr_u];
*ptr_u = temp_chroma;
// Loop through all chrominance pixels and modify color.
for (int ix = 0; ix < size_uv; ix++) {
temp_chroma = colorTable[*ptr_u][*ptr_v];
*ptr_v = colorTable[*ptr_v][*ptr_u];
*ptr_u = temp_chroma;
ptr_u++;
ptr_v++;
}
return VPM_OK;
ptr_u++;
ptr_v++;
}
return VPM_OK;
}
} // namespace VideoProcessing

View File

@ -14,8 +14,8 @@
#include <stdlib.h>
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/sort.h"
#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
@ -102,21 +102,16 @@ int32_t VPMDeflickering::ProcessFrame(I420VideoFrame* frame,
int height = frame->height();
if (frame->IsZeroSize()) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_,
"Null frame pointer");
return VPM_GENERAL_ERROR;
}
// Stricter height check due to subsampling size calculation below.
if (height < 2) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_,
"Invalid frame size");
LOG(LS_ERROR) << "Invalid frame size.";
return VPM_GENERAL_ERROR;
}
if (!VideoProcessingModule::ValidFrameStats(*stats)) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_,
"Invalid frame stats");
return VPM_GENERAL_ERROR;
}
@ -152,8 +147,7 @@ int32_t VPMDeflickering::ProcessFrame(I420VideoFrame* frame,
// Ensure we won't get an overflow below.
// In practice, the number of subsampled pixels will not become this large.
if (y_sub_size > (1 << 21) - 1) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_,
"Subsampled number of pixels too large");
LOG(LS_ERROR) << "Subsampled number of pixels too large.";
return -1;
}

View File

@ -9,7 +9,6 @@
*/
#include "webrtc/modules/video_processing/main/source/denoising.h"
#include "webrtc/system_wrappers/interface/trace.h"
#include <string.h>
@ -78,8 +77,6 @@ int32_t VPMDenoising::ProcessFrame(I420VideoFrame* frame) {
int32_t num_pixels_changed = 0;
if (frame->IsZeroSize()) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_,
"zero size frame");
return VPM_GENERAL_ERROR;
}

View File

@ -9,7 +9,6 @@
*/
#include "webrtc/modules/video_processing/main/source/frame_preprocessor.h"
#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
@ -112,8 +111,6 @@ int32_t VPMFramePreprocessor::PreprocessFrame(const I420VideoFrame& frame,
vd_->UpdateIncomingframe_rate();
if (vd_->DropFrame()) {
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, id_,
"Drop frame due to frame rate");
return 1; // drop 1 frame
}

View File

@ -11,7 +11,7 @@
#include "webrtc/modules/video_processing/main/source/video_processing_impl.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include <assert.h>
@ -68,13 +68,9 @@ VideoProcessingModuleImpl::VideoProcessingModuleImpl(const int32_t id)
deflickering_.ChangeUniqueId(id);
denoising_.ChangeUniqueId(id);
frame_pre_processor_.ChangeUniqueId(id);
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, id_,
"Created");
}
VideoProcessingModuleImpl::~VideoProcessingModuleImpl() {
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, id_,
"Destroyed");
delete &mutex_;
}
@ -89,8 +85,7 @@ void VideoProcessingModuleImpl::Reset() {
int32_t VideoProcessingModule::GetFrameStats(FrameStats* stats,
const I420VideoFrame& frame) {
if (frame.IsZeroSize()) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
"zero size frame");
LOG(LS_ERROR) << "Zero size frame.";
return VPM_PARAMETER_ERROR;
}
@ -121,7 +116,10 @@ int32_t VideoProcessingModule::GetFrameStats(FrameStats* stats,
}
bool VideoProcessingModule::ValidFrameStats(const FrameStats& stats) {
if (stats.num_pixels == 0) return false;
if (stats.num_pixels == 0) {
LOG(LS_WARNING) << "Invalid frame stats.";
return false;
}
return true;
}

View File

@ -269,10 +269,6 @@ int32_t TraceImpl::AddModuleAndId(char* trace_message,
sprintf(trace_message, "VIDEO CAPTUR:%5ld %5ld;", id_engine,
id_channel);
break;
case kTraceVideoPreocessing:
sprintf(trace_message, " VIDEO PROC:%5ld %5ld;", id_engine,
id_channel);
break;
case kTraceRemoteBitrateEstimator:
sprintf(trace_message, " BWE RBE:%5ld %5ld;", id_engine,
id_channel);
@ -333,9 +329,6 @@ int32_t TraceImpl::AddModuleAndId(char* trace_message,
case kTraceVideoCapture:
sprintf(trace_message, "VIDEO CAPTUR:%11ld;", idl);
break;
case kTraceVideoPreocessing:
sprintf(trace_message, " VIDEO PROC:%11ld;", idl);
break;
case kTraceRemoteBitrateEstimator:
sprintf(trace_message, " BWE RBE:%11ld;", idl);
break;