diff --git a/webrtc/common_types.h b/webrtc/common_types.h index 5d63a61df..c566f6b85 100644 --- a/webrtc/common_types.h +++ b/webrtc/common_types.h @@ -93,7 +93,6 @@ enum TraceModule kTraceAudioDevice = 0x0012, kTraceVideoRenderer = 0x0014, kTraceVideoCapture = 0x0015, - kTraceVideoPreocessing = 0x0016, kTraceRemoteBitrateEstimator = 0x0017, }; diff --git a/webrtc/modules/video_processing/main/source/brighten.cc b/webrtc/modules/video_processing/main/source/brighten.cc index ffabbf7ff..907a54906 100644 --- a/webrtc/modules/video_processing/main/source/brighten.cc +++ b/webrtc/modules/video_processing/main/source/brighten.cc @@ -12,22 +12,15 @@ #include -#include "webrtc/system_wrappers/interface/trace.h" - namespace webrtc { namespace VideoProcessing { int32_t Brighten(I420VideoFrame* frame, int delta) { assert(frame); if (frame->IsZeroSize()) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, - "zero size frame"); return VPM_PARAMETER_ERROR; } - if (frame->width() <= 0 || frame->height() <= 0) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, - "Invalid frame size"); return VPM_PARAMETER_ERROR; } diff --git a/webrtc/modules/video_processing/main/source/brightness_detection.cc b/webrtc/modules/video_processing/main/source/brightness_detection.cc index 8817bac43..f33117d13 100644 --- a/webrtc/modules/video_processing/main/source/brightness_detection.cc +++ b/webrtc/modules/video_processing/main/source/brightness_detection.cc @@ -10,7 +10,6 @@ #include "webrtc/modules/video_processing/main/interface/video_processing.h" #include "webrtc/modules/video_processing/main/source/brightness_detection.h" -#include "webrtc/system_wrappers/interface/trace.h" #include @@ -37,16 +36,12 @@ int32_t VPMBrightnessDetection::ProcessFrame( const I420VideoFrame& frame, const VideoProcessingModule::FrameStats& stats) { if (frame.IsZeroSize()) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_, - "Null frame pointer"); return VPM_PARAMETER_ERROR; } int width = frame.width(); int height = frame.height(); if (!VideoProcessingModule::ValidFrameStats(stats)) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_, - "Invalid frame stats"); return VPM_PARAMETER_ERROR; } @@ -58,7 +53,7 @@ int32_t VPMBrightnessDetection::ProcessFrame( for (uint32_t i = 0; i < low_th; i++) { prop_low += stats.hist[i]; } -prop_low /= stats.num_pixels; + prop_low /= stats.num_pixels; // Get proportion in highest bins. unsigned char high_th = 230; diff --git a/webrtc/modules/video_processing/main/source/color_enhancement.cc b/webrtc/modules/video_processing/main/source/color_enhancement.cc index eeec01659..aaa3a4622 100644 --- a/webrtc/modules/video_processing/main/source/color_enhancement.cc +++ b/webrtc/modules/video_processing/main/source/color_enhancement.cc @@ -12,44 +12,38 @@ #include "webrtc/modules/video_processing/main/source/color_enhancement.h" #include "webrtc/modules/video_processing/main/source/color_enhancement_private.h" -#include "webrtc/system_wrappers/interface/trace.h" namespace webrtc { namespace VideoProcessing { int32_t ColorEnhancement(I420VideoFrame* frame) { -assert(frame); -// Pointers to U and V color pixels. -uint8_t* ptr_u; -uint8_t* ptr_v; -uint8_t temp_chroma; -if (frame->IsZeroSize()) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, - -1, "Null frame pointer"); - return VPM_GENERAL_ERROR; -} + assert(frame); + // Pointers to U and V color pixels. + uint8_t* ptr_u; + uint8_t* ptr_v; + uint8_t temp_chroma; + if (frame->IsZeroSize()) { + return VPM_GENERAL_ERROR; + } + if (frame->width() == 0 || frame->height() == 0) { + return VPM_GENERAL_ERROR; + } -if (frame->width() == 0 || frame->height() == 0) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, - -1, "Invalid frame size"); - return VPM_GENERAL_ERROR; -} + // Set pointers to first U and V pixels (skip luminance). + ptr_u = frame->buffer(kUPlane); + ptr_v = frame->buffer(kVPlane); + int size_uv = ((frame->width() + 1) / 2) * ((frame->height() + 1) / 2); -// Set pointers to first U and V pixels (skip luminance). -ptr_u = frame->buffer(kUPlane); -ptr_v = frame->buffer(kVPlane); -int size_uv = ((frame->width() + 1) / 2) * ((frame->height() + 1) / 2); + // Loop through all chrominance pixels and modify color. + for (int ix = 0; ix < size_uv; ix++) { + temp_chroma = colorTable[*ptr_u][*ptr_v]; + *ptr_v = colorTable[*ptr_v][*ptr_u]; + *ptr_u = temp_chroma; -// Loop through all chrominance pixels and modify color. -for (int ix = 0; ix < size_uv; ix++) { - temp_chroma = colorTable[*ptr_u][*ptr_v]; - *ptr_v = colorTable[*ptr_v][*ptr_u]; - *ptr_u = temp_chroma; - - ptr_u++; - ptr_v++; -} -return VPM_OK; + ptr_u++; + ptr_v++; + } + return VPM_OK; } } // namespace VideoProcessing diff --git a/webrtc/modules/video_processing/main/source/deflickering.cc b/webrtc/modules/video_processing/main/source/deflickering.cc index 898fd80f4..cdc617488 100644 --- a/webrtc/modules/video_processing/main/source/deflickering.cc +++ b/webrtc/modules/video_processing/main/source/deflickering.cc @@ -14,8 +14,8 @@ #include #include "webrtc/common_audio/signal_processing/include/signal_processing_library.h" +#include "webrtc/system_wrappers/interface/logging.h" #include "webrtc/system_wrappers/interface/sort.h" -#include "webrtc/system_wrappers/interface/trace.h" namespace webrtc { @@ -102,21 +102,16 @@ int32_t VPMDeflickering::ProcessFrame(I420VideoFrame* frame, int height = frame->height(); if (frame->IsZeroSize()) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_, - "Null frame pointer"); return VPM_GENERAL_ERROR; } // Stricter height check due to subsampling size calculation below. if (height < 2) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_, - "Invalid frame size"); + LOG(LS_ERROR) << "Invalid frame size."; return VPM_GENERAL_ERROR; } if (!VideoProcessingModule::ValidFrameStats(*stats)) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_, - "Invalid frame stats"); return VPM_GENERAL_ERROR; } @@ -152,8 +147,7 @@ int32_t VPMDeflickering::ProcessFrame(I420VideoFrame* frame, // Ensure we won't get an overflow below. // In practice, the number of subsampled pixels will not become this large. if (y_sub_size > (1 << 21) - 1) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_, - "Subsampled number of pixels too large"); + LOG(LS_ERROR) << "Subsampled number of pixels too large."; return -1; } diff --git a/webrtc/modules/video_processing/main/source/denoising.cc b/webrtc/modules/video_processing/main/source/denoising.cc index 79c4bcc3d..4c8dcb439 100644 --- a/webrtc/modules/video_processing/main/source/denoising.cc +++ b/webrtc/modules/video_processing/main/source/denoising.cc @@ -9,7 +9,6 @@ */ #include "webrtc/modules/video_processing/main/source/denoising.h" -#include "webrtc/system_wrappers/interface/trace.h" #include @@ -78,8 +77,6 @@ int32_t VPMDenoising::ProcessFrame(I420VideoFrame* frame) { int32_t num_pixels_changed = 0; if (frame->IsZeroSize()) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_, - "zero size frame"); return VPM_GENERAL_ERROR; } diff --git a/webrtc/modules/video_processing/main/source/frame_preprocessor.cc b/webrtc/modules/video_processing/main/source/frame_preprocessor.cc index de4907029..8050f7049 100644 --- a/webrtc/modules/video_processing/main/source/frame_preprocessor.cc +++ b/webrtc/modules/video_processing/main/source/frame_preprocessor.cc @@ -9,7 +9,6 @@ */ #include "webrtc/modules/video_processing/main/source/frame_preprocessor.h" -#include "webrtc/system_wrappers/interface/trace.h" namespace webrtc { @@ -112,8 +111,6 @@ int32_t VPMFramePreprocessor::PreprocessFrame(const I420VideoFrame& frame, vd_->UpdateIncomingframe_rate(); if (vd_->DropFrame()) { - WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, id_, - "Drop frame due to frame rate"); return 1; // drop 1 frame } diff --git a/webrtc/modules/video_processing/main/source/video_processing_impl.cc b/webrtc/modules/video_processing/main/source/video_processing_impl.cc index af1bfe1a4..999227ea2 100644 --- a/webrtc/modules/video_processing/main/source/video_processing_impl.cc +++ b/webrtc/modules/video_processing/main/source/video_processing_impl.cc @@ -11,7 +11,7 @@ #include "webrtc/modules/video_processing/main/source/video_processing_impl.h" #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" -#include "webrtc/system_wrappers/interface/trace.h" +#include "webrtc/system_wrappers/interface/logging.h" #include @@ -68,13 +68,9 @@ VideoProcessingModuleImpl::VideoProcessingModuleImpl(const int32_t id) deflickering_.ChangeUniqueId(id); denoising_.ChangeUniqueId(id); frame_pre_processor_.ChangeUniqueId(id); - WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, id_, - "Created"); } VideoProcessingModuleImpl::~VideoProcessingModuleImpl() { - WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, id_, - "Destroyed"); delete &mutex_; } @@ -89,8 +85,7 @@ void VideoProcessingModuleImpl::Reset() { int32_t VideoProcessingModule::GetFrameStats(FrameStats* stats, const I420VideoFrame& frame) { if (frame.IsZeroSize()) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, - "zero size frame"); + LOG(LS_ERROR) << "Zero size frame."; return VPM_PARAMETER_ERROR; } @@ -121,7 +116,10 @@ int32_t VideoProcessingModule::GetFrameStats(FrameStats* stats, } bool VideoProcessingModule::ValidFrameStats(const FrameStats& stats) { - if (stats.num_pixels == 0) return false; + if (stats.num_pixels == 0) { + LOG(LS_WARNING) << "Invalid frame stats."; + return false; + } return true; } diff --git a/webrtc/system_wrappers/source/trace_impl.cc b/webrtc/system_wrappers/source/trace_impl.cc index 8dbe76b11..13c63acff 100644 --- a/webrtc/system_wrappers/source/trace_impl.cc +++ b/webrtc/system_wrappers/source/trace_impl.cc @@ -269,10 +269,6 @@ int32_t TraceImpl::AddModuleAndId(char* trace_message, sprintf(trace_message, "VIDEO CAPTUR:%5ld %5ld;", id_engine, id_channel); break; - case kTraceVideoPreocessing: - sprintf(trace_message, " VIDEO PROC:%5ld %5ld;", id_engine, - id_channel); - break; case kTraceRemoteBitrateEstimator: sprintf(trace_message, " BWE RBE:%5ld %5ld;", id_engine, id_channel); @@ -333,9 +329,6 @@ int32_t TraceImpl::AddModuleAndId(char* trace_message, case kTraceVideoCapture: sprintf(trace_message, "VIDEO CAPTUR:%11ld;", idl); break; - case kTraceVideoPreocessing: - sprintf(trace_message, " VIDEO PROC:%11ld;", idl); - break; case kTraceRemoteBitrateEstimator: sprintf(trace_message, " BWE RBE:%11ld;", idl); break;