Updating VPM APi's to use VideoFrame

Review URL: https://webrtc-codereview.appspot.com/879006

git-svn-id: http://webrtc.googlecode.com/svn/trunk@2956 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
mikhal@webrtc.org 2012-10-19 15:43:31 +00:00
parent aa220bf568
commit 0e196e1aa1
27 changed files with 281 additions and 431 deletions

View File

@ -251,7 +251,7 @@ QualityModesTest::Perform()
_timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(codec.maxFramerate)); _timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(codec.maxFramerate));
sourceFrame.SetTimeStamp(_timeStamp); sourceFrame.SetTimeStamp(_timeStamp);
ret = _vpm->PreprocessFrame(&sourceFrame, &decimatedFrame); ret = _vpm->PreprocessFrame(sourceFrame, &decimatedFrame);
if (ret == 1) if (ret == 1)
{ {
printf("VD: frame drop %d \n",_frameCnt); printf("VD: frame drop %d \n",_frameCnt);

View File

@ -29,8 +29,8 @@
concurrently processed stream. Similarly, it is recommended to call Reset() concurrently processed stream. Similarly, it is recommended to call Reset()
before switching to a new stream, but this is not absolutely required. before switching to a new stream, but this is not absolutely required.
The module provides basic thread safety by permitting only a single function to The module provides basic thread safety by permitting only a single function
execute concurrently. to execute concurrently.
*/ */
namespace webrtc { namespace webrtc {
@ -57,8 +57,10 @@ public:
WebRtc_UWord32 mean; /**< Mean value of frame */ WebRtc_UWord32 mean; /**< Mean value of frame */
WebRtc_UWord32 sum; /**< Sum of frame */ WebRtc_UWord32 sum; /**< Sum of frame */
WebRtc_UWord32 numPixels; /**< Number of pixels */ WebRtc_UWord32 numPixels; /**< Number of pixels */
WebRtc_UWord8 subSamplWidth; /**< Subsampling rate of width in powers of 2 */ WebRtc_UWord8 subSamplWidth; /**< Subsampling rate of width in powers
WebRtc_UWord8 subSamplHeight; /**< Subsampling rate of height in powers of 2 */ of 2 */
WebRtc_UWord8 subSamplHeight; /**< Subsampling rate of height in powers
of 2 */
}; };
/** /**
@ -113,25 +115,11 @@ public:
The frame statistics will be stored here on return. The frame statistics will be stored here on return.
\param[in] frame \param[in] frame
Pointer to the video frame. Reference to the video frame.
\param[in] width
Frame width in pixels.
\param[in] height
Frame height in pixels.
\return 0 on success, -1 on failure. \return 0 on success, -1 on failure.
*/ */
static WebRtc_Word32 GetFrameStats(FrameStats& stats, static WebRtc_Word32 GetFrameStats(FrameStats* stats,
const WebRtc_UWord8* frame,
WebRtc_UWord32 width,
WebRtc_UWord32 height);
/**
\overload
*/
static WebRtc_Word32 GetFrameStats(FrameStats& stats,
const VideoFrame& frame); const VideoFrame& frame);
/** /**
@ -151,7 +139,7 @@ public:
\param[in,out] stats \param[in,out] stats
Frame statistics. Frame statistics.
*/ */
static void ClearFrameStats(FrameStats& stats); static void ClearFrameStats(FrameStats* stats);
/** /**
Enhances the color of an image through a constant mapping. Only the Enhances the color of an image through a constant mapping. Only the
@ -159,35 +147,14 @@ public:
\param[in,out] frame \param[in,out] frame
Pointer to the video frame. Pointer to the video frame.
\param[in] width
Frame width in pixels.
\param[in] height
Frame height in pixels.
\return 0 on success, -1 on failure.
*/ */
static WebRtc_Word32 ColorEnhancement(WebRtc_UWord8* frame, static WebRtc_Word32 ColorEnhancement(VideoFrame* frame);
WebRtc_UWord32 width,
WebRtc_UWord32 height);
/**
\overload
*/
static WebRtc_Word32 ColorEnhancement(VideoFrame& frame);
/** /**
Increases/decreases the luminance value. Increases/decreases the luminance value.
\param[in,out] frame \param[in,out] frame
Pointer to the video frame buffer. Pointer to the video frame.
\param[in] width
Frame width in pixels.
\param[in] height
Frame height in pixels.
\param[in] delta \param[in] delta
The amount to change the chrominance value of every single pixel. The amount to change the chrominance value of every single pixel.
@ -195,30 +162,16 @@ public:
\return 0 on success, -1 on failure. \return 0 on success, -1 on failure.
*/ */
static WebRtc_Word32 Brighten(WebRtc_UWord8* frame, static WebRtc_Word32 Brighten(VideoFrame* frame, int delta);
int width, int height, int delta);
/**
\overload
*/
static WebRtc_Word32 Brighten(VideoFrame& frame, int delta);
/** /**
Detects and removes camera flicker from a video stream. Every frame from the Detects and removes camera flicker from a video stream. Every frame from
stream must be passed in. A frame will only be altered if flicker has been the stream must be passed in. A frame will only be altered if flicker has
detected. Has a fixed-point implementation. been detected. Has a fixed-point implementation.
\param[in,out] frame \param[in,out] frame
Pointer to the video frame. Pointer to the video frame.
\param[in] width
Frame width in pixels.
\param[in] height
Frame height in pixels.
\param[in] timestamp
Frame timestamp in 90 kHz format.
\param[in,out] stats \param[in,out] stats
Frame statistics provided by GetFrameStats(). On return the stats will Frame statistics provided by GetFrameStats(). On return the stats will
be reset to zero if the frame was altered. Call GetFrameStats() again be reset to zero if the frame was altered. Call GetFrameStats() again
@ -226,17 +179,8 @@ public:
\return 0 on success, -1 on failure. \return 0 on success, -1 on failure.
*/ */
virtual WebRtc_Word32 Deflickering(WebRtc_UWord8* frame, virtual WebRtc_Word32 Deflickering(VideoFrame* frame,
WebRtc_UWord32 width, FrameStats* stats) = 0;
WebRtc_UWord32 height,
WebRtc_UWord32 timestamp,
FrameStats& stats) = 0;
/**
\overload
*/
virtual WebRtc_Word32 Deflickering(VideoFrame& frame,
FrameStats& stats) = 0;
/** /**
Denoises a video frame. Every frame from the stream should be passed in. Denoises a video frame. Every frame from the stream should be passed in.
@ -245,57 +189,30 @@ public:
\param[in,out] frame \param[in,out] frame
Pointer to the video frame. Pointer to the video frame.
\param[in] width
Frame width in pixels.
\param[in] height
Frame height in pixels.
\return The number of modified pixels on success, -1 on failure. \return The number of modified pixels on success, -1 on failure.
*/ */
virtual WebRtc_Word32 Denoising(WebRtc_UWord8* frame, virtual WebRtc_Word32 Denoising(VideoFrame* frame) = 0;
WebRtc_UWord32 width,
WebRtc_UWord32 height) = 0;
/** /**
\overload Detects if a video frame is excessively bright or dark. Returns a
*/ warning if this is the case. Multiple frames should be passed in before
virtual WebRtc_Word32 Denoising(VideoFrame& frame) = 0; expecting a warning. Has a floating-point implementation.
/**
Detects if a video frame is excessively bright or dark. Returns a warning if
this is the case. Multiple frames should be passed in before expecting a
warning. Has a floating-point implementation.
\param[in] frame \param[in] frame
Pointer to the video frame. Pointer to the video frame.
\param[in] width
Frame width in pixels.
\param[in] height
Frame height in pixels.
\param[in] stats \param[in] stats
Frame statistics provided by GetFrameStats(). Frame statistics provided by GetFrameStats().
\return A member of BrightnessWarning on success, -1 on error \return A member of BrightnessWarning on success, -1 on error
*/ */
virtual WebRtc_Word32 BrightnessDetection(const WebRtc_UWord8* frame,
WebRtc_UWord32 width,
WebRtc_UWord32 height,
const FrameStats& stats) = 0;
/**
\overload
*/
virtual WebRtc_Word32 BrightnessDetection(const VideoFrame& frame, virtual WebRtc_Word32 BrightnessDetection(const VideoFrame& frame,
const FrameStats& stats) = 0; const FrameStats& stats) = 0;
/** /**
The following functions refer to the pre-processor unit within VPM. The pre-processor The following functions refer to the pre-processor unit within VPM. The
perfoms spatial/temporal decimation and content analysis on the frames prior to encoding. pre-processor perfoms spatial/temporal decimation and content analysis on
the frames prior to encoding.
*/ */
/** /**
@ -320,7 +237,9 @@ public:
\return VPM_OK on success, a negative value on error (see error codes) \return VPM_OK on success, a negative value on error (see error codes)
*/ */
virtual WebRtc_Word32 SetTargetResolution(WebRtc_UWord32 width, WebRtc_UWord32 height, WebRtc_UWord32 frameRate) = 0; virtual WebRtc_Word32 SetTargetResolution(WebRtc_UWord32 width,
WebRtc_UWord32 height,
WebRtc_UWord32 frameRate) = 0;
/** /**
Set max frame rate Set max frame rate
@ -352,7 +271,8 @@ public:
\param[in] resamplingMode \param[in] resamplingMode
Set resampling mode (a member of VideoFrameResampling) Set resampling mode (a member of VideoFrameResampling)
*/ */
virtual void SetInputFrameResampleMode(VideoFrameResampling resamplingMode) = 0; virtual void SetInputFrameResampleMode(VideoFrameResampling
resamplingMode) = 0;
/** /**
Get Processed (decimated) frame Get Processed (decimated) frame
@ -363,7 +283,8 @@ public:
\return VPM_OK on success, a negative value on error (see error codes) \return VPM_OK on success, a negative value on error (see error codes)
*/ */
virtual WebRtc_Word32 PreprocessFrame(const VideoFrame* frame, VideoFrame** processedFrame) = 0; virtual WebRtc_Word32 PreprocessFrame(const VideoFrame& frame,
VideoFrame** processedFrame) = 0;
/** /**
Return content metrics for the last processed frame Return content metrics for the last processed frame

View File

@ -17,21 +17,21 @@
namespace webrtc { namespace webrtc {
namespace VideoProcessing { namespace VideoProcessing {
WebRtc_Word32 Brighten(WebRtc_UWord8* frame, WebRtc_Word32 Brighten(VideoFrame* frame, int delta) {
int width, int height, int delta) { assert(frame);
if (frame == NULL) { if (frame->Buffer() == NULL) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
"Null frame pointer"); "Null frame pointer");
return VPM_PARAMETER_ERROR; return VPM_PARAMETER_ERROR;
} }
if (width <= 0 || height <= 0) { if (frame->Width() <= 0 || frame->Height() <= 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
"Invalid frame size"); "Invalid frame size");
return VPM_PARAMETER_ERROR; return VPM_PARAMETER_ERROR;
} }
int numPixels = width * height; int numPixels = frame->Width() * frame->Height();
int lookUp[256]; int lookUp[256];
for (int i = 0; i < 256; i++) { for (int i = 0; i < 256; i++) {
@ -39,7 +39,7 @@ WebRtc_Word32 Brighten(WebRtc_UWord8* frame,
lookUp[i] = ((((val < 0) ? 0 : val) > 255) ? 255 : val); lookUp[i] = ((((val < 0) ? 0 : val) > 255) ? 255 : val);
} }
WebRtc_UWord8* tempPtr = frame; WebRtc_UWord8* tempPtr = frame->Buffer();
for (int i = 0; i < numPixels; i++) { for (int i = 0; i < numPixels; i++) {
*tempPtr = static_cast<WebRtc_UWord8>(lookUp[*tempPtr]); *tempPtr = static_cast<WebRtc_UWord8>(lookUp[*tempPtr]);

View File

@ -17,8 +17,7 @@
namespace webrtc { namespace webrtc {
namespace VideoProcessing { namespace VideoProcessing {
WebRtc_Word32 Brighten(WebRtc_UWord8* frame, WebRtc_Word32 Brighten(VideoFrame* frame, int delta);
int width, int height, int delta);
} // namespace VideoProcessing } // namespace VideoProcessing
} // namespace webrtc } // namespace webrtc

View File

@ -41,26 +41,30 @@ VPMBrightnessDetection::Reset()
} }
WebRtc_Word32 WebRtc_Word32
VPMBrightnessDetection::ProcessFrame(const WebRtc_UWord8* frame, VPMBrightnessDetection::ProcessFrame(const VideoFrame& frame,
const WebRtc_UWord32 width, const VideoProcessingModule::FrameStats&
const WebRtc_UWord32 height, stats)
const VideoProcessingModule::FrameStats& stats)
{ {
if (frame == NULL) if (frame.Buffer() == NULL)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Null frame pointer"); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Null frame pointer");
return VPM_PARAMETER_ERROR; return VPM_PARAMETER_ERROR;
} }
int width = frame.Width();
int height = frame.Height();
if (width == 0 || height == 0) if (width == 0 || height == 0)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Invalid frame size"); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Invalid frame size");
return VPM_PARAMETER_ERROR; return VPM_PARAMETER_ERROR;
} }
if (!VideoProcessingModule::ValidFrameStats(stats)) if (!VideoProcessingModule::ValidFrameStats(stats))
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Invalid frame stats"); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Invalid frame stats");
return VPM_PARAMETER_ERROR; return VPM_PARAMETER_ERROR;
} }
@ -90,12 +94,14 @@ VPMBrightnessDetection::ProcessFrame(const WebRtc_UWord8* frame,
{ {
// Standard deviation of Y // Standard deviation of Y
float stdY = 0; float stdY = 0;
for (WebRtc_UWord32 h = 0; h < height; h += (1 << stats.subSamplHeight)) uint8_t* buffer = frame.Buffer();
for (int h = 0; h < height; h += (1 << stats.subSamplHeight))
{ {
WebRtc_UWord32 row = h*width; WebRtc_UWord32 row = h*width;
for (WebRtc_UWord32 w = 0; w < width; w += (1 << stats.subSamplWidth)) for (int w = 0; w < width; w += (1 << stats.subSamplWidth))
{ {
stdY += (frame[w + row] - stats.mean) * (frame[w + row] - stats.mean); stdY += (buffer[w + row] - stats.mean) * (buffer[w + row] -
stats.mean);
} }
} }
stdY = sqrt(stdY / stats.numPixels); stdY = sqrt(stdY / stats.numPixels);
@ -133,7 +139,8 @@ VPMBrightnessDetection::ProcessFrame(const WebRtc_UWord8* frame,
// Check if image is too dark // Check if image is too dark
if ((stdY < 55) && (perc05 < 50)) if ((stdY < 55) && (perc05 < 50))
{ {
if (medianY < 60 || stats.mean < 80 || perc95 < 130 || propLow > 0.20) if (medianY < 60 || stats.mean < 80 || perc95 < 130 ||
propLow > 0.20)
{ {
_frameCntDark++; _frameCntDark++;
} }
@ -150,7 +157,8 @@ VPMBrightnessDetection::ProcessFrame(const WebRtc_UWord8* frame,
// Check if image is too bright // Check if image is too bright
if ((stdY < 52) && (perc95 > 200) && (medianY > 160)) if ((stdY < 52) && (perc95 > 200) && (medianY > 160))
{ {
if (medianY > 185 || stats.mean > 185 || perc05 > 140 || propHigh > 0.25) if (medianY > 185 || stats.mean > 185 || perc05 > 140 ||
propHigh > 0.25)
{ {
_frameCntBright++; _frameCntBright++;
} }

View File

@ -29,9 +29,7 @@ public:
void Reset(); void Reset();
WebRtc_Word32 ProcessFrame(const WebRtc_UWord8* frame, WebRtc_Word32 ProcessFrame(const VideoFrame& frame,
WebRtc_UWord32 width,
WebRtc_UWord32 height,
const VideoProcessingModule::FrameStats& stats); const VideoProcessingModule::FrameStats& stats);
private: private:

View File

@ -18,39 +18,38 @@ namespace webrtc {
namespace VideoProcessing namespace VideoProcessing
{ {
WebRtc_Word32 WebRtc_Word32
ColorEnhancement(WebRtc_UWord8* frame, ColorEnhancement(VideoFrame* frame)
const WebRtc_UWord32 width,
const WebRtc_UWord32 height)
{ {
assert(frame);
// pointers to U and V color pixels // pointers to U and V color pixels
WebRtc_UWord8* ptrU; WebRtc_UWord8* ptrU;
WebRtc_UWord8* ptrV; WebRtc_UWord8* ptrV;
WebRtc_UWord8 tempChroma; WebRtc_UWord8 tempChroma;
const WebRtc_UWord32 numPixels = width * height; const unsigned int size_y = frame->Width() * frame->Height();
const unsigned int size_uv = ((frame->Width() + 1) / 2) *
((frame->Height() + 1 ) / 2);
if (frame == NULL) if (frame->Buffer() == NULL)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, "Null frame pointer"); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing,
-1, "Null frame pointer");
return VPM_GENERAL_ERROR; return VPM_GENERAL_ERROR;
} }
if (width == 0 || height == 0) if (frame->Width() == 0 || frame->Height() == 0)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, "Invalid frame size"); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing,
-1, "Invalid frame size");
return VPM_GENERAL_ERROR; return VPM_GENERAL_ERROR;
} }
// set pointers to first U and V pixels // set pointers to first U and V pixels (skip luminance)
ptrU = frame->Buffer() + size_y;
// stream format: ptrV = ptrU + size_uv;
// | numPixels bytes luminance | numPixels/4 bytes chroma U | numPixels/4 chroma V |
ptrU = frame + numPixels; // skip luminance
ptrV = ptrU + (numPixels>>2);
// loop through all chrominance pixels and modify color // loop through all chrominance pixels and modify color
for (WebRtc_UWord32 ix = 0; ix < (numPixels>>2); ix++) for (unsigned int ix = 0; ix < size_uv; ix++)
{ {
tempChroma = colorTable[*ptrU][*ptrV]; tempChroma = colorTable[*ptrU][*ptrV];
*ptrV = colorTable[*ptrV][*ptrU]; *ptrV = colorTable[*ptrV][*ptrU];

View File

@ -21,9 +21,7 @@ namespace webrtc {
namespace VideoProcessing namespace VideoProcessing
{ {
WebRtc_Word32 ColorEnhancement(WebRtc_UWord8* frame, WebRtc_Word32 ColorEnhancement(VideoFrame* frame);
WebRtc_UWord32 width,
WebRtc_UWord32 height);
} }
} //namespace } //namespace

View File

@ -56,25 +56,25 @@ VPMContentAnalysis::~VPMContentAnalysis()
VideoContentMetrics* VideoContentMetrics*
VPMContentAnalysis::ComputeContentMetrics(const VideoFrame* inputFrame) VPMContentAnalysis::ComputeContentMetrics(const VideoFrame& inputFrame)
{ {
if (inputFrame == NULL) if (inputFrame.Buffer() == NULL)
{ {
return NULL; return NULL;
} }
// Init if needed (native dimension change) // Init if needed (native dimension change)
if (_width != static_cast<int>(inputFrame->Width()) || if (_width != static_cast<int>(inputFrame.Width()) ||
_height != static_cast<int>(inputFrame->Height())) _height != static_cast<int>(inputFrame.Height()))
{ {
if (VPM_OK != Initialize(static_cast<int>(inputFrame->Width()), if (VPM_OK != Initialize(static_cast<int>(inputFrame.Width()),
static_cast<int>(inputFrame->Height()))) static_cast<int>(inputFrame.Height())))
{ {
return NULL; return NULL;
} }
} }
_origFrame = inputFrame->Buffer(); _origFrame = inputFrame.Buffer();
// compute spatial metrics: 3 spatial prediction errors // compute spatial metrics: 3 spatial prediction errors
(this->*ComputeSpatialMetrics)(); (this->*ComputeSpatialMetrics)();

View File

@ -35,7 +35,7 @@ public:
// Input: new frame // Input: new frame
// Return value: pointer to structure containing content Analysis // Return value: pointer to structure containing content Analysis
// metrics or NULL value upon error // metrics or NULL value upon error
VideoContentMetrics* ComputeContentMetrics(const VideoFrame* inputFrame); VideoContentMetrics* ComputeContentMetrics(const VideoFrame& inputFrame);
// Release all allocated memory // Release all allocated memory
// Output: 0 if OK, negative value upon error // Output: 0 if OK, negative value upon error

View File

@ -89,12 +89,10 @@ VPMDeflickering::Reset()
} }
WebRtc_Word32 WebRtc_Word32
VPMDeflickering::ProcessFrame(WebRtc_UWord8* frame, VPMDeflickering::ProcessFrame(VideoFrame* frame,
const WebRtc_UWord32 width, VideoProcessingModule::FrameStats* stats)
const WebRtc_UWord32 height,
const WebRtc_UWord32 timestamp,
VideoProcessingModule::FrameStats& stats)
{ {
assert(frame);
WebRtc_UWord32 frameMemory; WebRtc_UWord32 frameMemory;
WebRtc_UWord8 quantUW8[kNumQuants]; WebRtc_UWord8 quantUW8[kNumQuants];
WebRtc_UWord8 maxQuantUW8[kNumQuants]; WebRtc_UWord8 maxQuantUW8[kNumQuants];
@ -105,27 +103,32 @@ VPMDeflickering::ProcessFrame(WebRtc_UWord8* frame,
WebRtc_UWord16 tmpUW16; WebRtc_UWord16 tmpUW16;
WebRtc_UWord32 tmpUW32; WebRtc_UWord32 tmpUW32;
int width = frame->Width();
int height = frame->Height();
if (frame == NULL) if (frame->Buffer() == NULL)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Null frame pointer"); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Null frame pointer");
return VPM_GENERAL_ERROR; return VPM_GENERAL_ERROR;
} }
// Stricter height check due to subsampling size calculation below. // Stricter height check due to subsampling size calculation below.
if (width == 0 || height < 2) if (width == 0 || height < 2)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Invalid frame size"); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Invalid frame size");
return VPM_GENERAL_ERROR; return VPM_GENERAL_ERROR;
} }
if (!VideoProcessingModule::ValidFrameStats(stats)) if (!VideoProcessingModule::ValidFrameStats(*stats))
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Invalid frame stats"); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Invalid frame stats");
return VPM_GENERAL_ERROR; return VPM_GENERAL_ERROR;
} }
if (PreDetection(timestamp, stats) == -1) if (PreDetection(frame->TimeStamp(), *stats) == -1)
{ {
return VPM_GENERAL_ERROR; return VPM_GENERAL_ERROR;
} }
@ -148,9 +151,10 @@ VPMDeflickering::ProcessFrame(WebRtc_UWord8* frame,
kLog2OfDownsamplingFactor) + 1); kLog2OfDownsamplingFactor) + 1);
WebRtc_UWord8* ySorted = new WebRtc_UWord8[ySubSize]; WebRtc_UWord8* ySorted = new WebRtc_UWord8[ySubSize];
WebRtc_UWord32 sortRowIdx = 0; WebRtc_UWord32 sortRowIdx = 0;
for (WebRtc_UWord32 i = 0; i < height; i += kDownsamplingFactor) for (int i = 0; i < height; i += kDownsamplingFactor)
{ {
memcpy(ySorted + sortRowIdx * width, frame + i * width, width); memcpy(ySorted + sortRowIdx * width,
frame->Buffer() + i * width, width);
sortRowIdx++; sortRowIdx++;
} }
@ -254,9 +258,10 @@ VPMDeflickering::ProcessFrame(WebRtc_UWord8* frame,
} }
// Map to the output frame. // Map to the output frame.
uint8_t* buffer = frame->Buffer();
for (WebRtc_UWord32 i = 0; i < ySize; i++) for (WebRtc_UWord32 i = 0; i < ySize; i++)
{ {
frame[i] = mapUW8[frame[i]]; buffer[i] = mapUW8[buffer[i]];
} }
// Frame was altered, so reset stats. // Frame was altered, so reset stats.

View File

@ -32,11 +32,8 @@ public:
void Reset(); void Reset();
WebRtc_Word32 ProcessFrame(WebRtc_UWord8* frame, WebRtc_Word32 ProcessFrame(VideoFrame* frame,
WebRtc_UWord32 width, VideoProcessingModule::FrameStats* stats);
WebRtc_UWord32 height,
WebRtc_UWord32 timestamp,
VideoProcessingModule::FrameStats& stats);
private: private:
WebRtc_Word32 PreDetection(WebRtc_UWord32 timestamp, WebRtc_Word32 PreDetection(WebRtc_UWord32 timestamp,
const VideoProcessingModule::FrameStats& stats); const VideoProcessingModule::FrameStats& stats);

View File

@ -72,28 +72,31 @@ VPMDenoising::Reset()
} }
WebRtc_Word32 WebRtc_Word32
VPMDenoising::ProcessFrame(WebRtc_UWord8* frame, VPMDenoising::ProcessFrame(VideoFrame* frame)
const WebRtc_UWord32 width,
const WebRtc_UWord32 height)
{ {
assert(frame);
WebRtc_Word32 thevar; WebRtc_Word32 thevar;
WebRtc_UWord32 k; int k;
WebRtc_UWord32 jsub, ksub; int jsub, ksub;
WebRtc_Word32 diff0; WebRtc_Word32 diff0;
WebRtc_UWord32 tmpMoment1; WebRtc_UWord32 tmpMoment1;
WebRtc_UWord32 tmpMoment2; WebRtc_UWord32 tmpMoment2;
WebRtc_UWord32 tmp; WebRtc_UWord32 tmp;
WebRtc_Word32 numPixelsChanged = 0; WebRtc_Word32 numPixelsChanged = 0;
if (frame == NULL) if (frame->Buffer() == NULL)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Null frame pointer"); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Null frame pointer");
return VPM_GENERAL_ERROR; return VPM_GENERAL_ERROR;
} }
int width = frame->Width();
int height = frame->Height();
if (width == 0 || height == 0) if (width == 0 || height == 0)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Invalid frame size"); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Invalid frame size");
return VPM_GENERAL_ERROR; return VPM_GENERAL_ERROR;
} }
@ -124,31 +127,34 @@ VPMDenoising::ProcessFrame(WebRtc_UWord8* frame,
} }
/* Apply de-noising on each pixel, but update variance sub-sampled */ /* Apply de-noising on each pixel, but update variance sub-sampled */
for (WebRtc_UWord32 i = 0; i < height; i++) uint8_t* buffer = frame->Buffer();
for (int i = 0; i < height; i++)
{ // Collect over height { // Collect over height
k = i * width; k = i * width;
ksub = ((i >> kSubsamplingHeight) << kSubsamplingHeight) * width; ksub = ((i >> kSubsamplingHeight) << kSubsamplingHeight) * width;
for (WebRtc_UWord32 j = 0; j < width; j++) for (int j = 0; j < width; j++)
{ // Collect over width { // Collect over width
jsub = ((j >> kSubsamplingWidth) << kSubsamplingWidth); jsub = ((j >> kSubsamplingWidth) << kSubsamplingWidth);
/* Update mean value for every pixel and every frame */ /* Update mean value for every pixel and every frame */
tmpMoment1 = _moment1[k + j]; tmpMoment1 = _moment1[k + j];
tmpMoment1 *= kDenoiseFiltParam; // Q16 tmpMoment1 *= kDenoiseFiltParam; // Q16
tmpMoment1 += ((kDenoiseFiltParamRec * ((WebRtc_UWord32)frame[k + j])) << 8); tmpMoment1 += ((kDenoiseFiltParamRec *
((WebRtc_UWord32)buffer[k + j])) << 8);
tmpMoment1 >>= 8; // Q8 tmpMoment1 >>= 8; // Q8
_moment1[k + j] = tmpMoment1; _moment1[k + j] = tmpMoment1;
tmpMoment2 = _moment2[ksub + jsub]; tmpMoment2 = _moment2[ksub + jsub];
if ((ksub == k) && (jsub == j) && (_denoiseFrameCnt == 0)) if ((ksub == k) && (jsub == j) && (_denoiseFrameCnt == 0))
{ {
tmp = ((WebRtc_UWord32)frame[k + j] * (WebRtc_UWord32)frame[k + j]); tmp = ((WebRtc_UWord32)buffer[k + j] *
(WebRtc_UWord32)buffer[k + j]);
tmpMoment2 *= kDenoiseFiltParam; // Q16 tmpMoment2 *= kDenoiseFiltParam; // Q16
tmpMoment2 += ((kDenoiseFiltParamRec * tmp)<<8); tmpMoment2 += ((kDenoiseFiltParamRec * tmp)<<8);
tmpMoment2 >>= 8; // Q8 tmpMoment2 >>= 8; // Q8
} }
_moment2[k + j] = tmpMoment2; _moment2[k + j] = tmpMoment2;
/* Current event = deviation from mean value */ /* Current event = deviation from mean value */
diff0 = ((WebRtc_Word32)frame[k + j] << 8) - _moment1[k + j]; diff0 = ((WebRtc_Word32)buffer[k + j] << 8) - _moment1[k + j];
/* Recent events = variance (variations over time) */ /* Recent events = variance (variations over time) */
thevar = _moment2[k + j]; thevar = _moment2[k + j];
thevar -= ((_moment1[k + j] * _moment1[k + j]) >> 8); thevar -= ((_moment1[k + j] * _moment1[k + j]) >> 8);
@ -161,7 +167,7 @@ VPMDenoising::ProcessFrame(WebRtc_UWord8* frame,
if ((thevar < kDenoiseThreshold) if ((thevar < kDenoiseThreshold)
&& ((diff0 * diff0 >> 8) < kDenoiseThreshold)) && ((diff0 * diff0 >> 8) < kDenoiseThreshold))
{ // Replace with mean { // Replace with mean
frame[k + j] = (WebRtc_UWord8)(_moment1[k + j] >> 8); buffer[k + j] = (WebRtc_UWord8)(_moment1[k + j] >> 8);
numPixelsChanged++; numPixelsChanged++;
} }
} }

View File

@ -29,9 +29,7 @@ public:
void Reset(); void Reset();
WebRtc_Word32 ProcessFrame(WebRtc_UWord8* frame, WebRtc_Word32 ProcessFrame(VideoFrame* frame);
WebRtc_UWord32 width,
WebRtc_UWord32 height);
private: private:
WebRtc_Word32 _id; WebRtc_Word32 _id;
@ -39,7 +37,7 @@ private:
WebRtc_UWord32* _moment1; // (Q8) First order moment (mean) WebRtc_UWord32* _moment1; // (Q8) First order moment (mean)
WebRtc_UWord32* _moment2; // (Q8) Second order moment WebRtc_UWord32* _moment2; // (Q8) Second order moment
WebRtc_UWord32 _frameSize; // Size (# of pixels) of frame WebRtc_UWord32 _frameSize; // Size (# of pixels) of frame
WebRtc_Word32 _denoiseFrameCnt; // Counter for subsampling in time int _denoiseFrameCnt; // Counter for subsampling in time
}; };
} //namespace } //namespace

View File

@ -136,9 +136,10 @@ VPMFramePreprocessor::DecimatedHeight() const
WebRtc_Word32 WebRtc_Word32
VPMFramePreprocessor::PreprocessFrame(const VideoFrame* frame, VideoFrame** processedFrame) VPMFramePreprocessor::PreprocessFrame(const VideoFrame& frame,
VideoFrame** processedFrame)
{ {
if (frame == NULL || frame->Height() == 0 || frame->Width() == 0) if (frame.Buffer() == NULL || frame.Height() == 0 || frame.Width() == 0)
{ {
return VPM_PARAMETER_ERROR; return VPM_PARAMETER_ERROR;
} }
@ -147,7 +148,8 @@ VPMFramePreprocessor::PreprocessFrame(const VideoFrame* frame, VideoFrame** proc
if (_vd->DropFrame()) if (_vd->DropFrame())
{ {
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, _id, "Drop frame due to frame rate"); WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, _id,
"Drop frame due to frame rate");
return 1; // drop 1 frame return 1; // drop 1 frame
} }
@ -155,8 +157,9 @@ VPMFramePreprocessor::PreprocessFrame(const VideoFrame* frame, VideoFrame** proc
// Note that we must make a copy of it. // Note that we must make a copy of it.
// We are not allowed to resample the input frame. // We are not allowed to resample the input frame.
*processedFrame = NULL; *processedFrame = NULL;
if (_spatialResampler->ApplyResample(frame->Width(), frame->Height())) { if (_spatialResampler->ApplyResample(frame.Width(), frame.Height())) {
WebRtc_Word32 ret = _spatialResampler->ResampleFrame(*frame, _resampledFrame); WebRtc_Word32 ret = _spatialResampler->ResampleFrame(frame,
_resampledFrame);
if (ret != VPM_OK) if (ret != VPM_OK)
return ret; return ret;
*processedFrame = &_resampledFrame; *processedFrame = &_resampledFrame;
@ -171,7 +174,7 @@ VPMFramePreprocessor::PreprocessFrame(const VideoFrame* frame, VideoFrame** proc
if (*processedFrame == NULL) { if (*processedFrame == NULL) {
_contentMetrics = _ca->ComputeContentMetrics(frame); _contentMetrics = _ca->ComputeContentMetrics(frame);
} else { } else {
_contentMetrics = _ca->ComputeContentMetrics(&_resampledFrame); _contentMetrics = _ca->ComputeContentMetrics(_resampledFrame);
} }
} }
++_frameCnt; ++_frameCnt;

View File

@ -46,12 +46,15 @@ public:
WebRtc_Word32 SetMaxFrameRate(WebRtc_UWord32 maxFrameRate); WebRtc_Word32 SetMaxFrameRate(WebRtc_UWord32 maxFrameRate);
//Set target resolution: frame rate and dimension //Set target resolution: frame rate and dimension
WebRtc_Word32 SetTargetResolution(WebRtc_UWord32 width, WebRtc_UWord32 height, WebRtc_UWord32 frameRate); WebRtc_Word32 SetTargetResolution(WebRtc_UWord32 width,
WebRtc_UWord32 height,
WebRtc_UWord32 frameRate);
//Update incoming frame rate/dimension //Update incoming frame rate/dimension
void UpdateIncomingFrameRate(); void UpdateIncomingFrameRate();
WebRtc_Word32 updateIncomingFrameSize(WebRtc_UWord32 width, WebRtc_UWord32 height); WebRtc_Word32 updateIncomingFrameSize(WebRtc_UWord32 width,
WebRtc_UWord32 height);
//Set decimated values: frame rate/dimension //Set decimated values: frame rate/dimension
WebRtc_UWord32 DecimatedFrameRate(); WebRtc_UWord32 DecimatedFrameRate();
@ -59,7 +62,8 @@ public:
WebRtc_UWord32 DecimatedHeight() const; WebRtc_UWord32 DecimatedHeight() const;
//Preprocess output: //Preprocess output:
WebRtc_Word32 PreprocessFrame(const VideoFrame* frame, VideoFrame** processedFrame); WebRtc_Word32 PreprocessFrame(const VideoFrame& frame,
VideoFrame** processedFrame);
VideoContentMetrics* ContentMetrics() const; VideoContentMetrics* ContentMetrics() const;
private: private:

View File

@ -19,29 +19,29 @@ namespace webrtc {
namespace namespace
{ {
void void
SetSubSampling(VideoProcessingModule::FrameStats& stats, SetSubSampling(VideoProcessingModule::FrameStats* stats,
const WebRtc_Word32 width, const WebRtc_Word32 width,
const WebRtc_Word32 height) const WebRtc_Word32 height)
{ {
if (width * height >= 640 * 480) if (width * height >= 640 * 480)
{ {
stats.subSamplWidth = 3; stats->subSamplWidth = 3;
stats.subSamplHeight = 3; stats->subSamplHeight = 3;
} }
else if (width * height >= 352 * 288) else if (width * height >= 352 * 288)
{ {
stats.subSamplWidth = 2; stats->subSamplWidth = 2;
stats.subSamplHeight = 2; stats->subSamplHeight = 2;
} }
else if (width * height >= 176 * 144) else if (width * height >= 176 * 144)
{ {
stats.subSamplWidth = 1; stats->subSamplWidth = 1;
stats.subSamplHeight = 1; stats->subSamplHeight = 1;
} }
else else
{ {
stats.subSamplWidth = 0; stats->subSamplWidth = 0;
stats.subSamplHeight = 0; stats->subSamplHeight = 0;
} }
} }
} }
@ -89,13 +89,15 @@ VideoProcessingModuleImpl::VideoProcessingModuleImpl(const WebRtc_Word32 id) :
_deflickering.ChangeUniqueId(id); _deflickering.ChangeUniqueId(id);
_denoising.ChangeUniqueId(id); _denoising.ChangeUniqueId(id);
_framePreProcessor.ChangeUniqueId(id); _framePreProcessor.ChangeUniqueId(id);
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, _id, "Created"); WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, _id,
"Created");
} }
VideoProcessingModuleImpl::~VideoProcessingModuleImpl() VideoProcessingModuleImpl::~VideoProcessingModuleImpl()
{ {
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, _id, "Destroyed"); WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, _id,
"Destroyed");
delete &_mutex; delete &_mutex;
} }
@ -112,49 +114,47 @@ VideoProcessingModuleImpl::Reset()
} }
WebRtc_Word32 WebRtc_Word32
VideoProcessingModule::GetFrameStats(FrameStats& stats, VideoProcessingModule::GetFrameStats(FrameStats* stats,
const VideoFrame& frame) const VideoFrame& frame)
{ {
return GetFrameStats(stats, frame.Buffer(), frame.Width(), frame.Height()); if (frame.Buffer() == NULL)
}
WebRtc_Word32
VideoProcessingModule::GetFrameStats(FrameStats& stats,
const WebRtc_UWord8* frame,
const WebRtc_UWord32 width,
const WebRtc_UWord32 height)
{ {
if (frame == NULL) WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
{ "Null frame pointer");
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, "Null frame pointer");
return VPM_PARAMETER_ERROR; return VPM_PARAMETER_ERROR;
} }
int width = frame.Width();
int height = frame.Height();
if (width == 0 || height == 0) if (width == 0 || height == 0)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, "Invalid frame size"); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
"Invalid frame size");
return VPM_PARAMETER_ERROR; return VPM_PARAMETER_ERROR;
} }
ClearFrameStats(stats); // The histogram needs to be zeroed out. ClearFrameStats(stats); // The histogram needs to be zeroed out.
SetSubSampling(stats, width, height); SetSubSampling(stats, width, height);
uint8_t* buffer = frame.Buffer();
// Compute histogram and sum of frame // Compute histogram and sum of frame
for (WebRtc_UWord32 i = 0; i < height; i += (1 << stats.subSamplHeight)) for (int i = 0; i < height; i += (1 << stats->subSamplHeight))
{ {
WebRtc_Word32 k = i * width; int k = i * width;
for (WebRtc_UWord32 j = 0; j < width; j += (1 << stats.subSamplWidth)) for (int j = 0; j < width; j += (1 << stats->subSamplWidth))
{ {
stats.hist[frame[k + j]]++; stats->hist[buffer[k + j]]++;
stats.sum += frame[k + j]; stats->sum += buffer[k + j];
} }
} }
stats.numPixels = (width * height) / ((1 << stats.subSamplWidth) * (1 << stats.subSamplHeight)); stats->numPixels = (width * height) / ((1 << stats->subSamplWidth) *
assert(stats.numPixels > 0); (1 << stats->subSamplHeight));
assert(stats->numPixels > 0);
// Compute mean value of frame // Compute mean value of frame
stats.mean = stats.sum / stats.numPixels; stats->mean = stats->sum / stats->numPixels;
return VPM_OK; return VPM_OK;
} }
@ -171,94 +171,48 @@ VideoProcessingModule::ValidFrameStats(const FrameStats& stats)
} }
void void
VideoProcessingModule::ClearFrameStats(FrameStats& stats) VideoProcessingModule::ClearFrameStats(FrameStats* stats)
{ {
stats.mean = 0; stats->mean = 0;
stats.sum = 0; stats->sum = 0;
stats.numPixels = 0; stats->numPixels = 0;
stats.subSamplWidth = 0; stats->subSamplWidth = 0;
stats.subSamplHeight = 0; stats->subSamplHeight = 0;
memset(stats.hist, 0, sizeof(stats.hist)); memset(stats->hist, 0, sizeof(stats->hist));
} }
WebRtc_Word32 WebRtc_Word32
VideoProcessingModule::ColorEnhancement(VideoFrame& frame) VideoProcessingModule::ColorEnhancement(VideoFrame* frame)
{ {
return ColorEnhancement(frame.Buffer(), frame.Width(), frame.Height()); return VideoProcessing::ColorEnhancement(frame);
} }
WebRtc_Word32 WebRtc_Word32
VideoProcessingModule::ColorEnhancement(WebRtc_UWord8* frame, VideoProcessingModule::Brighten(VideoFrame* frame, int delta)
const WebRtc_UWord32 width,
const WebRtc_UWord32 height)
{ {
return VideoProcessing::ColorEnhancement(frame, width, height); return VideoProcessing::Brighten(frame, delta);
} }
WebRtc_Word32 WebRtc_Word32
VideoProcessingModule::Brighten(VideoFrame& frame, int delta) VideoProcessingModuleImpl::Deflickering(VideoFrame* frame, FrameStats* stats)
{
return Brighten(frame.Buffer(), frame.Width(), frame.Height(), delta);
}
WebRtc_Word32
VideoProcessingModule::Brighten(WebRtc_UWord8* frame,
int width,
int height,
int delta)
{
return VideoProcessing::Brighten(frame, width, height, delta);
}
WebRtc_Word32
VideoProcessingModuleImpl::Deflickering(VideoFrame& frame,
FrameStats& stats)
{
return Deflickering(frame.Buffer(), frame.Width(), frame.Height(),
frame.TimeStamp(), stats);
}
WebRtc_Word32
VideoProcessingModuleImpl::Deflickering(WebRtc_UWord8* frame,
const WebRtc_UWord32 width,
const WebRtc_UWord32 height,
const WebRtc_UWord32 timestamp,
FrameStats& stats)
{ {
CriticalSectionScoped mutex(&_mutex); CriticalSectionScoped mutex(&_mutex);
return _deflickering.ProcessFrame(frame, width, height, timestamp, stats); return _deflickering.ProcessFrame(frame, stats);
} }
WebRtc_Word32 WebRtc_Word32
VideoProcessingModuleImpl::Denoising(VideoFrame& frame) VideoProcessingModuleImpl::Denoising(VideoFrame* frame)
{
return Denoising(frame.Buffer(), frame.Width(), frame.Height());
}
WebRtc_Word32
VideoProcessingModuleImpl::Denoising(WebRtc_UWord8* frame,
const WebRtc_UWord32 width,
const WebRtc_UWord32 height)
{ {
CriticalSectionScoped mutex(&_mutex); CriticalSectionScoped mutex(&_mutex);
return _denoising.ProcessFrame(frame, width, height); return _denoising.ProcessFrame(frame);
} }
WebRtc_Word32 WebRtc_Word32
VideoProcessingModuleImpl::BrightnessDetection(const VideoFrame& frame, VideoProcessingModuleImpl::BrightnessDetection(const VideoFrame& frame,
const FrameStats& stats) const FrameStats& stats)
{
return BrightnessDetection(frame.Buffer(), frame.Width(), frame.Height(), stats);
}
WebRtc_Word32
VideoProcessingModuleImpl::BrightnessDetection(const WebRtc_UWord8* frame,
const WebRtc_UWord32 width,
const WebRtc_UWord32 height,
const FrameStats& stats)
{ {
CriticalSectionScoped mutex(&_mutex); CriticalSectionScoped mutex(&_mutex);
return _brightnessDetection.ProcessFrame(frame, width, height, stats); return _brightnessDetection.ProcessFrame(frame, stats);
} }
@ -271,7 +225,8 @@ VideoProcessingModuleImpl::EnableTemporalDecimation(bool enable)
void void
VideoProcessingModuleImpl::SetInputFrameResampleMode(VideoFrameResampling resamplingMode) VideoProcessingModuleImpl::SetInputFrameResampleMode(VideoFrameResampling
resamplingMode)
{ {
CriticalSectionScoped cs(&_mutex); CriticalSectionScoped cs(&_mutex);
_framePreProcessor.SetInputFrameResampleMode(resamplingMode); _framePreProcessor.SetInputFrameResampleMode(resamplingMode);
@ -286,7 +241,9 @@ VideoProcessingModuleImpl::SetMaxFrameRate(WebRtc_UWord32 maxFrameRate)
} }
WebRtc_Word32 WebRtc_Word32
VideoProcessingModuleImpl::SetTargetResolution(WebRtc_UWord32 width, WebRtc_UWord32 height, WebRtc_UWord32 frameRate) VideoProcessingModuleImpl::SetTargetResolution(WebRtc_UWord32 width,
WebRtc_UWord32 height,
WebRtc_UWord32 frameRate)
{ {
CriticalSectionScoped cs(&_mutex); CriticalSectionScoped cs(&_mutex);
return _framePreProcessor.SetTargetResolution(width, height, frameRate); return _framePreProcessor.SetTargetResolution(width, height, frameRate);
@ -316,7 +273,8 @@ VideoProcessingModuleImpl::DecimatedHeight() const
} }
WebRtc_Word32 WebRtc_Word32
VideoProcessingModuleImpl::PreprocessFrame(const VideoFrame *frame, VideoFrame **processedFrame) VideoProcessingModuleImpl::PreprocessFrame(const VideoFrame& frame,
VideoFrame **processedFrame)
{ {
CriticalSectionScoped mutex(&_mutex); CriticalSectionScoped mutex(&_mutex);
return _framePreProcessor.PreprocessFrame(frame, processedFrame); return _framePreProcessor.PreprocessFrame(frame, processedFrame);

View File

@ -36,30 +36,14 @@ public:
virtual void Reset(); virtual void Reset();
virtual WebRtc_Word32 Deflickering(WebRtc_UWord8* frame, virtual WebRtc_Word32 Deflickering(VideoFrame* frame,
WebRtc_UWord32 width, FrameStats* stats);
WebRtc_UWord32 height,
WebRtc_UWord32 timestamp,
FrameStats& stats);
virtual WebRtc_Word32 Deflickering(VideoFrame& frame, virtual WebRtc_Word32 Denoising(VideoFrame* frame);
FrameStats& stats);
virtual WebRtc_Word32 Denoising(WebRtc_UWord8* frame,
WebRtc_UWord32 width,
WebRtc_UWord32 height);
virtual WebRtc_Word32 Denoising(VideoFrame& frame);
virtual WebRtc_Word32 BrightnessDetection(const WebRtc_UWord8* frame,
WebRtc_UWord32 width,
WebRtc_UWord32 height,
const FrameStats& stats);
virtual WebRtc_Word32 BrightnessDetection(const VideoFrame& frame, virtual WebRtc_Word32 BrightnessDetection(const VideoFrame& frame,
const FrameStats& stats); const FrameStats& stats);
//Frame pre-processor functions //Frame pre-processor functions
//Enable temporal decimation //Enable temporal decimation
@ -88,7 +72,7 @@ public:
// Pre-process incoming frame: Sample when needed and compute content // Pre-process incoming frame: Sample when needed and compute content
// metrics when enabled. // metrics when enabled.
// If no resampling takes place - processedFrame is set to NULL. // If no resampling takes place - processedFrame is set to NULL.
virtual WebRtc_Word32 PreprocessFrame(const VideoFrame* frame, virtual WebRtc_Word32 PreprocessFrame(const VideoFrame& frame,
VideoFrame** processedFrame); VideoFrame** processedFrame);
virtual VideoContentMetrics* ContentMetrics() const; virtual VideoContentMetrics* ContentMetrics() const;

View File

@ -18,12 +18,14 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection)
WebRtc_UWord32 frameNum = 0; WebRtc_UWord32 frameNum = 0;
WebRtc_Word32 brightnessWarning = 0; WebRtc_Word32 brightnessWarning = 0;
WebRtc_UWord32 warningCount = 0; WebRtc_UWord32 warningCount = 0;
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength) while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) ==
_frameLength)
{ {
frameNum++; frameNum++;
VideoProcessingModule::FrameStats stats; VideoProcessingModule::FrameStats stats;
ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame)); ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
ASSERT_GE(brightnessWarning = _vpm->BrightnessDetection(_videoFrame, stats), 0); ASSERT_GE(brightnessWarning = _vpm->BrightnessDetection(_videoFrame,
stats), 0);
if (brightnessWarning != VideoProcessingModule::kNoWarning) if (brightnessWarning != VideoProcessingModule::kNoWarning)
{ {
warningCount++; warningCount++;
@ -40,7 +42,8 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection)
rewind(_sourceFile); rewind(_sourceFile);
frameNum = 0; frameNum = 0;
warningCount = 0; warningCount = 0;
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength && while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) ==
_frameLength &&
frameNum < 300) frameNum < 300)
{ {
frameNum++; frameNum++;
@ -58,8 +61,9 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection)
} }
VideoProcessingModule::FrameStats stats; VideoProcessingModule::FrameStats stats;
ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame)); ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
ASSERT_GE(brightnessWarning = _vpm->BrightnessDetection(_videoFrame, stats), 0); ASSERT_GE(brightnessWarning = _vpm->BrightnessDetection(_videoFrame,
stats), 0);
EXPECT_NE(VideoProcessingModule::kDarkWarning, brightnessWarning); EXPECT_NE(VideoProcessingModule::kDarkWarning, brightnessWarning);
if (brightnessWarning == VideoProcessingModule::kBrightWarning) if (brightnessWarning == VideoProcessingModule::kBrightWarning)
{ {
@ -90,8 +94,9 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection)
} }
VideoProcessingModule::FrameStats stats; VideoProcessingModule::FrameStats stats;
ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame)); ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
ASSERT_GE(brightnessWarning = _vpm->BrightnessDetection(_videoFrame, stats), 0); ASSERT_GE(brightnessWarning = _vpm->BrightnessDetection(_videoFrame,
stats), 0);
EXPECT_NE(VideoProcessingModule::kBrightWarning, brightnessWarning); EXPECT_NE(VideoProcessingModule::kBrightWarning, brightnessWarning);
if (brightnessWarning == VideoProcessingModule::kDarkWarning) if (brightnessWarning == VideoProcessingModule::kDarkWarning)
{ {

View File

@ -11,6 +11,7 @@
#include <cstdio> #include <cstdio>
#include <cstdlib> #include <cstdlib>
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/video_processing/main/interface/video_processing.h" #include "modules/video_processing/main/interface/video_processing.h"
#include "modules/video_processing/main/test/unit_test/unit_test.h" #include "modules/video_processing/main/test/unit_test/unit_test.h"
#include "system_wrappers/interface/tick_util.h" #include "system_wrappers/interface/tick_util.h"
@ -42,7 +43,7 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement)
{ {
frameNum++; frameNum++;
t0 = TickTime::Now(); t0 = TickTime::Now();
ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(_videoFrame)); ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&_videoFrame));
t1 = TickTime::Now(); t1 = TickTime::Now();
accTicks += t1 - t0; accTicks += t1 - t0;
if (fwrite(_videoFrame.Buffer(), 1, _frameLength, if (fwrite(_videoFrame.Buffer(), 1, _frameLength,
@ -88,41 +89,31 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement)
} }
ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file"; ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file";
// Verify that all color pixels are enhanced, that no luminance values are altered, // Verify that all color pixels are enhanced, and no luminance values are
// and that the function does not write outside the vector. // altered.
WebRtc_UWord32 safeGuard = 1000;
WebRtc_UWord32 numPixels = 352*288; // CIF size
WebRtc_UWord8 *testFrame = new WebRtc_UWord8[numPixels + (numPixels / 2) + (2 * safeGuard)];
WebRtc_UWord8 *refFrame = new WebRtc_UWord8[numPixels + (numPixels / 2) + (2 * safeGuard)];
// use value 128 as probe value, since we know that this will be changed in the enhancement WebRtc_UWord8 *testFrame = new WebRtc_UWord8[_frameLength];
memset(testFrame, 128, safeGuard);
memset(&testFrame[safeGuard], 128, numPixels);
memset(&testFrame[safeGuard + numPixels], 128, numPixels / 2);
memset(&testFrame[safeGuard + numPixels + (numPixels / 2)], 128, safeGuard);
memcpy(refFrame, testFrame, numPixels + (numPixels / 2) + (2 * safeGuard)); // Use value 128 as probe value, since we know that this will be changed
// in the enhancement.
memset(testFrame, 128, _frameLength);
ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&testFrame[safeGuard], 352, 288)); VideoFrame testVideoFrame;
testVideoFrame.CopyFrame(_frameLength, testFrame);
testVideoFrame.SetWidth(_width);
testVideoFrame.SetHeight(_height);
ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&testVideoFrame));
EXPECT_EQ(0, memcmp(testFrame, refFrame, safeGuard)) << EXPECT_EQ(0, memcmp(testVideoFrame.Buffer(), testFrame, _width * _height))
"Function is writing outside the frame memory."; << "Function is modifying the luminance.";
EXPECT_EQ(0, memcmp(&testFrame[safeGuard + numPixels + (numPixels / 2)], EXPECT_NE(0, memcmp(testVideoFrame.Buffer() + _width * _height,
&refFrame[safeGuard + numPixels + (numPixels / 2)], safeGuard)) << &testFrame[_width * _height], _width * _height / 2)) <<
"Function is writing outside the frame memory.";
EXPECT_EQ(0, memcmp(&testFrame[safeGuard], &refFrame[safeGuard], numPixels)) <<
"Function is modifying the luminance.";
EXPECT_NE(0, memcmp(&testFrame[safeGuard + numPixels],
&refFrame[safeGuard + numPixels], numPixels / 2)) <<
"Function is not modifying all chrominance pixels"; "Function is not modifying all chrominance pixels";
ASSERT_EQ(0, fclose(refFile)); ASSERT_EQ(0, fclose(refFile));
ASSERT_EQ(0, fclose(modFile)); ASSERT_EQ(0, fclose(modFile));
delete [] testFrame; delete [] testFrame;
delete [] refFrame;
} }
} // namespace webrtc } // namespace webrtc

View File

@ -26,8 +26,8 @@ TEST_F(VideoProcessingModuleTest, ContentAnalysis)
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile)
== _frameLength) == _frameLength)
{ {
_cM_c = _ca_c.ComputeContentMetrics(&_videoFrame); _cM_c = _ca_c.ComputeContentMetrics(_videoFrame);
_cM_SSE = _ca_sse.ComputeContentMetrics(&_videoFrame); _cM_SSE = _ca_sse.ComputeContentMetrics(_videoFrame);
ASSERT_EQ(_cM_c->spatial_pred_err, _cM_SSE->spatial_pred_err); ASSERT_EQ(_cM_c->spatial_pred_err, _cM_SSE->spatial_pred_err);
ASSERT_EQ(_cM_c->spatial_pred_err_v, _cM_SSE->spatial_pred_err_v); ASSERT_EQ(_cM_c->spatial_pred_err_v, _cM_SSE->spatial_pred_err_v);

View File

@ -57,8 +57,8 @@ TEST_F(VideoProcessingModuleTest, Deflickering)
t0 = TickTime::Now(); t0 = TickTime::Now();
VideoProcessingModule::FrameStats stats; VideoProcessingModule::FrameStats stats;
ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame)); ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
ASSERT_EQ(0, _vpm->Deflickering(_videoFrame, stats)); ASSERT_EQ(0, _vpm->Deflickering(&_videoFrame, &stats));
t1 = TickTime::Now(); t1 = TickTime::Now();
accTicks += t1 - t0; accTicks += t1 - t0;

View File

@ -99,7 +99,7 @@ TEST_F(VideoProcessingModuleTest, Denoising)
} }
t0 = TickTime::Now(); t0 = TickTime::Now();
ASSERT_GE(modifiedPixels = _vpm->Denoising(_videoFrame), 0); ASSERT_GE(modifiedPixels = _vpm->Denoising(&_videoFrame), 0);
t1 = TickTime::Now(); t1 = TickTime::Now();
accTicks += t1 - t0; accTicks += t1 - t0;

View File

@ -68,28 +68,21 @@ void VideoProcessingModuleTest::TearDown()
TEST_F(VideoProcessingModuleTest, HandleNullBuffer) TEST_F(VideoProcessingModuleTest, HandleNullBuffer)
{ {
VideoProcessingModule::FrameStats stats; VideoProcessingModule::FrameStats stats;
ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame)); ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
// Video frame with unallocated buffer. // Video frame with unallocated buffer.
VideoFrame videoFrame; VideoFrame videoFrame;
videoFrame.SetWidth(_width); videoFrame.SetWidth(_width);
videoFrame.SetHeight(_height); videoFrame.SetHeight(_height);
EXPECT_EQ(-3, _vpm->GetFrameStats(stats, NULL, _width, _height)); EXPECT_EQ(-3, _vpm->GetFrameStats(&stats, videoFrame));
EXPECT_EQ(-3, _vpm->GetFrameStats(stats, videoFrame));
EXPECT_EQ(-1, _vpm->ColorEnhancement(NULL, _width, _height)); EXPECT_EQ(-1, _vpm->ColorEnhancement(&videoFrame));
EXPECT_EQ(-1, _vpm->ColorEnhancement(videoFrame));
EXPECT_EQ(-1, _vpm->Deflickering(NULL, _width, _height, 0, stats)); EXPECT_EQ(-1, _vpm->Deflickering(&videoFrame, &stats));
EXPECT_EQ(-1, _vpm->Deflickering(videoFrame, stats));
EXPECT_EQ(-1, _vpm->Denoising(NULL, _width, _height)); EXPECT_EQ(-1, _vpm->Denoising(&videoFrame));
EXPECT_EQ(-1, _vpm->Denoising(videoFrame));
EXPECT_EQ(-3, _vpm->BrightnessDetection(NULL, _width, _height, stats));
EXPECT_EQ(-3, _vpm->BrightnessDetection(videoFrame, stats)); EXPECT_EQ(-3, _vpm->BrightnessDetection(videoFrame, stats));
EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->PreprocessFrame(NULL, NULL));
} }
TEST_F(VideoProcessingModuleTest, HandleBadStats) TEST_F(VideoProcessingModuleTest, HandleBadStats)
@ -99,65 +92,48 @@ TEST_F(VideoProcessingModuleTest, HandleBadStats)
ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength, ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
_sourceFile)); _sourceFile));
EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame.Buffer(), _width, _height, 0, _videoFrame.SetWidth(_width);
stats)); _videoFrame.SetHeight(_height);
EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame, stats)); EXPECT_EQ(-1, _vpm->Deflickering(&_videoFrame, &stats));
EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame.Buffer(), _width,
_height, stats));
EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame, stats)); EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame, stats));
} }
TEST_F(VideoProcessingModuleTest, HandleBadSize) TEST_F(VideoProcessingModuleTest, HandleBadSize)
{ {
VideoProcessingModule::FrameStats stats; VideoProcessingModule::FrameStats stats;
ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame)); ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
// Bad width // Bad width
_videoFrame.SetWidth(0); _videoFrame.SetWidth(0);
EXPECT_EQ(-3, _vpm->GetFrameStats(stats, _videoFrame.Buffer(), 0, _height)); EXPECT_EQ(-3, _vpm->GetFrameStats(&stats, _videoFrame));
EXPECT_EQ(-3, _vpm->GetFrameStats(stats, _videoFrame));
EXPECT_EQ(-1, _vpm->ColorEnhancement(_videoFrame.Buffer(), 0, _height)); EXPECT_EQ(-1, _vpm->ColorEnhancement(&_videoFrame));
EXPECT_EQ(-1, _vpm->ColorEnhancement(_videoFrame));
EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame.Buffer(), 0, _height, 0, EXPECT_EQ(-1, _vpm->Deflickering(&_videoFrame, &stats));
stats));
EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame, stats));
EXPECT_EQ(-1, _vpm->Denoising(_videoFrame.Buffer(), 0, _height)); EXPECT_EQ(-1, _vpm->Denoising(&_videoFrame));
EXPECT_EQ(-1, _vpm->Denoising(_videoFrame));
EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame.Buffer(), 0, _height,
stats));
EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame, stats)); EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame, stats));
// Bad height // Bad height
_videoFrame.SetWidth(_width); _videoFrame.SetWidth(_width);
_videoFrame.SetHeight(0); _videoFrame.SetHeight(0);
EXPECT_EQ(-3, _vpm->GetFrameStats(stats, _videoFrame.Buffer(), _width, 0)); EXPECT_EQ(-3, _vpm->GetFrameStats(&stats, _videoFrame));
EXPECT_EQ(-3, _vpm->GetFrameStats(stats, _videoFrame));
EXPECT_EQ(-1, _vpm->ColorEnhancement(_videoFrame.Buffer(), _width, 0)); EXPECT_EQ(-1, _vpm->ColorEnhancement(&_videoFrame));
EXPECT_EQ(-1, _vpm->ColorEnhancement(_videoFrame));
EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame.Buffer(), _width, 0, 0, EXPECT_EQ(-1, _vpm->Deflickering(&_videoFrame, &stats));
stats));
EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame, stats));
EXPECT_EQ(-1, _vpm->Denoising(_videoFrame.Buffer(), _width, 0)); EXPECT_EQ(-1, _vpm->Denoising(&_videoFrame));
EXPECT_EQ(-1, _vpm->Denoising(_videoFrame));
EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame.Buffer(), _width, 0,
stats));
EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame, stats)); EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame, stats));
EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->SetTargetResolution(0,0,0)); EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->SetTargetResolution(0,0,0));
EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->SetMaxFrameRate(0)); EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->SetMaxFrameRate(0));
VideoFrame *outFrame = NULL; VideoFrame *outFrame = NULL;
EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->PreprocessFrame(&_videoFrame, EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->PreprocessFrame(_videoFrame,
&outFrame)); &outFrame));
} }
@ -173,28 +149,28 @@ TEST_F(VideoProcessingModuleTest, IdenticalResultsAfterReset)
// Only testing non-static functions here. // Only testing non-static functions here.
ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength, ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
_sourceFile)); _sourceFile));
ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame)); ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
memcpy(videoFrame2.Buffer(), _videoFrame.Buffer(), _frameLength); memcpy(videoFrame2.Buffer(), _videoFrame.Buffer(), _frameLength);
ASSERT_EQ(0, _vpm->Deflickering(_videoFrame, stats)); ASSERT_EQ(0, _vpm->Deflickering(&_videoFrame, &stats));
_vpm->Reset(); _vpm->Reset();
// Retrieve frame stats again in case Deflickering() has zeroed them. // Retrieve frame stats again in case Deflickering() has zeroed them.
ASSERT_EQ(0, _vpm->GetFrameStats(stats, videoFrame2)); ASSERT_EQ(0, _vpm->GetFrameStats(&stats, videoFrame2));
ASSERT_EQ(0, _vpm->Deflickering(videoFrame2, stats)); ASSERT_EQ(0, _vpm->Deflickering(&videoFrame2, &stats));
EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), videoFrame2.Buffer(), EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), videoFrame2.Buffer(),
_frameLength)); _frameLength));
ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength, ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
_sourceFile)); _sourceFile));
memcpy(videoFrame2.Buffer(), _videoFrame.Buffer(), _frameLength); memcpy(videoFrame2.Buffer(), _videoFrame.Buffer(), _frameLength);
ASSERT_GE(_vpm->Denoising(_videoFrame), 0); ASSERT_GE(_vpm->Denoising(&_videoFrame), 0);
_vpm->Reset(); _vpm->Reset();
ASSERT_GE(_vpm->Denoising(videoFrame2), 0); ASSERT_GE(_vpm->Denoising(&videoFrame2), 0);
EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), videoFrame2.Buffer(), EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), videoFrame2.Buffer(),
_frameLength)); _frameLength));
ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength, ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
_sourceFile)); _sourceFile));
ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame)); ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
memcpy(videoFrame2.Buffer(), _videoFrame.Buffer(), _frameLength); memcpy(videoFrame2.Buffer(), _videoFrame.Buffer(), _frameLength);
ASSERT_EQ(0, _vpm->BrightnessDetection(_videoFrame, stats)); ASSERT_EQ(0, _vpm->BrightnessDetection(_videoFrame, stats));
_vpm->Reset(); _vpm->Reset();
@ -210,7 +186,7 @@ TEST_F(VideoProcessingModuleTest, FrameStats)
_sourceFile)); _sourceFile));
EXPECT_FALSE(_vpm->ValidFrameStats(stats)); EXPECT_FALSE(_vpm->ValidFrameStats(stats));
EXPECT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame)); EXPECT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
EXPECT_TRUE(_vpm->ValidFrameStats(stats)); EXPECT_TRUE(_vpm->ValidFrameStats(stats));
printf("\nFrameStats\n"); printf("\nFrameStats\n");
@ -222,7 +198,7 @@ TEST_F(VideoProcessingModuleTest, FrameStats)
static_cast<unsigned int>(stats.subSamplWidth), static_cast<unsigned int>(stats.subSamplWidth),
static_cast<unsigned int>(stats.sum)); static_cast<unsigned int>(stats.sum));
_vpm->ClearFrameStats(stats); _vpm->ClearFrameStats(&stats);
EXPECT_FALSE(_vpm->ValidFrameStats(stats)); EXPECT_FALSE(_vpm->ValidFrameStats(stats));
} }
@ -239,7 +215,7 @@ TEST_F(VideoProcessingModuleTest, PreprocessorLogic)
_vpm->SetInputFrameResampleMode(kNoRescaling); _vpm->SetInputFrameResampleMode(kNoRescaling);
ASSERT_EQ(VPM_OK, _vpm->SetTargetResolution(100, 100, 30)); ASSERT_EQ(VPM_OK, _vpm->SetTargetResolution(100, 100, 30));
VideoFrame *outFrame = NULL; VideoFrame *outFrame = NULL;
ASSERT_EQ(VPM_OK, _vpm->PreprocessFrame(&_videoFrame, &outFrame)); ASSERT_EQ(VPM_OK, _vpm->PreprocessFrame(_videoFrame, &outFrame));
// No rescaling=> output frame = NULL // No rescaling=> output frame = NULL
ASSERT_TRUE(outFrame == NULL); ASSERT_TRUE(outFrame == NULL);
} }
@ -324,7 +300,7 @@ void TestSize(const VideoFrame& source_frame, int target_width,
VideoFrame* out_frame = NULL; VideoFrame* out_frame = NULL;
ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(target_width, target_height, 30)); ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(target_width, target_height, 30));
ASSERT_EQ(VPM_OK, vpm->PreprocessFrame(&source_frame, &out_frame)); ASSERT_EQ(VPM_OK, vpm->PreprocessFrame(source_frame, &out_frame));
// If the frame was resampled (scale changed) then: // If the frame was resampled (scale changed) then:
// (1) verify the new size and write out processed frame for viewing. // (1) verify the new size and write out processed frame for viewing.
@ -362,7 +338,7 @@ void TestSize(const VideoFrame& source_frame, int target_width,
ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(source_width, ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(source_width,
source_height, source_height,
30)); 30));
ASSERT_EQ(VPM_OK, vpm->PreprocessFrame(&resampled_source_frame, ASSERT_EQ(VPM_OK, vpm->PreprocessFrame(resampled_source_frame,
&out_frame)); &out_frame));
// Write the processed frame to file for visual inspection. // Write the processed frame to file for visual inspection.

View File

@ -567,9 +567,9 @@ bool ViECapturer::ViECaptureProcess() {
void ViECapturer::DeliverI420Frame(VideoFrame* video_frame) { void ViECapturer::DeliverI420Frame(VideoFrame* video_frame) {
// Apply image enhancement and effect filter. // Apply image enhancement and effect filter.
if (deflicker_frame_stats_) { if (deflicker_frame_stats_) {
if (image_proc_module_->GetFrameStats(*deflicker_frame_stats_, if (image_proc_module_->GetFrameStats(deflicker_frame_stats_,
*video_frame) == 0) { *video_frame) == 0) {
image_proc_module_->Deflickering(*video_frame, *deflicker_frame_stats_); image_proc_module_->Deflickering(video_frame, deflicker_frame_stats_);
} else { } else {
WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_), WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s: could not get frame stats for captured frame", "%s: could not get frame stats for captured frame",
@ -577,10 +577,10 @@ void ViECapturer::DeliverI420Frame(VideoFrame* video_frame) {
} }
} }
if (denoising_enabled_) { if (denoising_enabled_) {
image_proc_module_->Denoising(*video_frame); image_proc_module_->Denoising(video_frame);
} }
if (brightness_frame_stats_) { if (brightness_frame_stats_) {
if (image_proc_module_->GetFrameStats(*brightness_frame_stats_, if (image_proc_module_->GetFrameStats(brightness_frame_stats_,
*video_frame) == 0) { *video_frame) == 0) {
WebRtc_Word32 brightness = image_proc_module_->BrightnessDetection( WebRtc_Word32 brightness = image_proc_module_->BrightnessDetection(
*video_frame, *brightness_frame_stats_); *video_frame, *brightness_frame_stats_);

View File

@ -2096,7 +2096,7 @@ WebRtc_Word32 ViEChannel::FrameToRender(VideoFrame& video_frame) { // NOLINT
video_frame.Height()); video_frame.Height());
} }
if (color_enhancement_) { if (color_enhancement_) {
VideoProcessingModule::ColorEnhancement(video_frame); VideoProcessingModule::ColorEnhancement(&video_frame);
} }
// Record videoframe. // Record videoframe.

View File

@ -495,7 +495,7 @@ void ViEEncoder::DeliverFrame(int id,
has_received_rpsi_ = false; has_received_rpsi_ = false;
} }
VideoFrame* decimated_frame = NULL; VideoFrame* decimated_frame = NULL;
const int ret = vpm_.PreprocessFrame(video_frame, &decimated_frame); const int ret = vpm_.PreprocessFrame(*video_frame, &decimated_frame);
if (ret == 1) { if (ret == 1) {
// Drop this frame. // Drop this frame.
return; return;
@ -528,7 +528,7 @@ void ViEEncoder::DeliverFrame(int id,
// TODO(mflodman) Rewrite this to use code common to VP8 case. // TODO(mflodman) Rewrite this to use code common to VP8 case.
// Pass frame via preprocessor. // Pass frame via preprocessor.
VideoFrame* decimated_frame = NULL; VideoFrame* decimated_frame = NULL;
const int ret = vpm_.PreprocessFrame(video_frame, &decimated_frame); const int ret = vpm_.PreprocessFrame(*video_frame, &decimated_frame);
if (ret == 1) { if (ret == 1) {
// Drop this frame. // Drop this frame.
return; return;