Updating VPM APi's to use VideoFrame

Review URL: https://webrtc-codereview.appspot.com/879006

git-svn-id: http://webrtc.googlecode.com/svn/trunk@2956 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
mikhal@webrtc.org 2012-10-19 15:43:31 +00:00
parent aa220bf568
commit 0e196e1aa1
27 changed files with 281 additions and 431 deletions

View File

@ -251,7 +251,7 @@ QualityModesTest::Perform()
_timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(codec.maxFramerate));
sourceFrame.SetTimeStamp(_timeStamp);
ret = _vpm->PreprocessFrame(&sourceFrame, &decimatedFrame);
ret = _vpm->PreprocessFrame(sourceFrame, &decimatedFrame);
if (ret == 1)
{
printf("VD: frame drop %d \n",_frameCnt);

View File

@ -29,8 +29,8 @@
concurrently processed stream. Similarly, it is recommended to call Reset()
before switching to a new stream, but this is not absolutely required.
The module provides basic thread safety by permitting only a single function to
execute concurrently.
The module provides basic thread safety by permitting only a single function
to execute concurrently.
*/
namespace webrtc {
@ -57,8 +57,10 @@ public:
WebRtc_UWord32 mean; /**< Mean value of frame */
WebRtc_UWord32 sum; /**< Sum of frame */
WebRtc_UWord32 numPixels; /**< Number of pixels */
WebRtc_UWord8 subSamplWidth; /**< Subsampling rate of width in powers of 2 */
WebRtc_UWord8 subSamplHeight; /**< Subsampling rate of height in powers of 2 */
WebRtc_UWord8 subSamplWidth; /**< Subsampling rate of width in powers
of 2 */
WebRtc_UWord8 subSamplHeight; /**< Subsampling rate of height in powers
of 2 */
};
/**
@ -113,26 +115,12 @@ public:
The frame statistics will be stored here on return.
\param[in] frame
Pointer to the video frame.
\param[in] width
Frame width in pixels.
\param[in] height
Frame height in pixels.
Reference to the video frame.
\return 0 on success, -1 on failure.
*/
static WebRtc_Word32 GetFrameStats(FrameStats& stats,
const WebRtc_UWord8* frame,
WebRtc_UWord32 width,
WebRtc_UWord32 height);
/**
\overload
*/
static WebRtc_Word32 GetFrameStats(FrameStats& stats,
const VideoFrame& frame);
static WebRtc_Word32 GetFrameStats(FrameStats* stats,
const VideoFrame& frame);
/**
Checks the validity of a FrameStats struct. Currently, valid implies only
@ -151,7 +139,7 @@ public:
\param[in,out] stats
Frame statistics.
*/
static void ClearFrameStats(FrameStats& stats);
static void ClearFrameStats(FrameStats* stats);
/**
Enhances the color of an image through a constant mapping. Only the
@ -159,35 +147,14 @@ public:
\param[in,out] frame
Pointer to the video frame.
\param[in] width
Frame width in pixels.
\param[in] height
Frame height in pixels.
\return 0 on success, -1 on failure.
*/
static WebRtc_Word32 ColorEnhancement(WebRtc_UWord8* frame,
WebRtc_UWord32 width,
WebRtc_UWord32 height);
/**
\overload
*/
static WebRtc_Word32 ColorEnhancement(VideoFrame& frame);
static WebRtc_Word32 ColorEnhancement(VideoFrame* frame);
/**
Increases/decreases the luminance value.
\param[in,out] frame
Pointer to the video frame buffer.
\param[in] width
Frame width in pixels.
\param[in] height
Frame height in pixels.
Pointer to the video frame.
\param[in] delta
The amount to change the chrominance value of every single pixel.
@ -195,30 +162,16 @@ public:
\return 0 on success, -1 on failure.
*/
static WebRtc_Word32 Brighten(WebRtc_UWord8* frame,
int width, int height, int delta);
/**
\overload
*/
static WebRtc_Word32 Brighten(VideoFrame& frame, int delta);
static WebRtc_Word32 Brighten(VideoFrame* frame, int delta);
/**
Detects and removes camera flicker from a video stream. Every frame from the
stream must be passed in. A frame will only be altered if flicker has been
detected. Has a fixed-point implementation.
Detects and removes camera flicker from a video stream. Every frame from
the stream must be passed in. A frame will only be altered if flicker has
been detected. Has a fixed-point implementation.
\param[in,out] frame
Pointer to the video frame.
\param[in] width
Frame width in pixels.
\param[in] height
Frame height in pixels.
\param[in] timestamp
Frame timestamp in 90 kHz format.
\param[in,out] stats
Frame statistics provided by GetFrameStats(). On return the stats will
be reset to zero if the frame was altered. Call GetFrameStats() again
@ -226,18 +179,9 @@ public:
\return 0 on success, -1 on failure.
*/
virtual WebRtc_Word32 Deflickering(WebRtc_UWord8* frame,
WebRtc_UWord32 width,
WebRtc_UWord32 height,
WebRtc_UWord32 timestamp,
FrameStats& stats) = 0;
virtual WebRtc_Word32 Deflickering(VideoFrame* frame,
FrameStats* stats) = 0;
/**
\overload
*/
virtual WebRtc_Word32 Deflickering(VideoFrame& frame,
FrameStats& stats) = 0;
/**
Denoises a video frame. Every frame from the stream should be passed in.
Has a fixed-point implementation.
@ -245,57 +189,30 @@ public:
\param[in,out] frame
Pointer to the video frame.
\param[in] width
Frame width in pixels.
\param[in] height
Frame height in pixels.
\return The number of modified pixels on success, -1 on failure.
*/
virtual WebRtc_Word32 Denoising(WebRtc_UWord8* frame,
WebRtc_UWord32 width,
WebRtc_UWord32 height) = 0;
virtual WebRtc_Word32 Denoising(VideoFrame* frame) = 0;
/**
\overload
*/
virtual WebRtc_Word32 Denoising(VideoFrame& frame) = 0;
/**
Detects if a video frame is excessively bright or dark. Returns a warning if
this is the case. Multiple frames should be passed in before expecting a
warning. Has a floating-point implementation.
Detects if a video frame is excessively bright or dark. Returns a
warning if this is the case. Multiple frames should be passed in before
expecting a warning. Has a floating-point implementation.
\param[in] frame
Pointer to the video frame.
\param[in] width
Frame width in pixels.
\param[in] height
Frame height in pixels.
\param[in] stats
Frame statistics provided by GetFrameStats().
\return A member of BrightnessWarning on success, -1 on error
*/
virtual WebRtc_Word32 BrightnessDetection(const WebRtc_UWord8* frame,
WebRtc_UWord32 width,
WebRtc_UWord32 height,
const FrameStats& stats) = 0;
/**
\overload
*/
virtual WebRtc_Word32 BrightnessDetection(const VideoFrame& frame,
const FrameStats& stats) = 0;
const FrameStats& stats) = 0;
/**
The following functions refer to the pre-processor unit within VPM. The pre-processor
perfoms spatial/temporal decimation and content analysis on the frames prior to encoding.
The following functions refer to the pre-processor unit within VPM. The
pre-processor perfoms spatial/temporal decimation and content analysis on
the frames prior to encoding.
*/
/**
@ -320,7 +237,9 @@ public:
\return VPM_OK on success, a negative value on error (see error codes)
*/
virtual WebRtc_Word32 SetTargetResolution(WebRtc_UWord32 width, WebRtc_UWord32 height, WebRtc_UWord32 frameRate) = 0;
virtual WebRtc_Word32 SetTargetResolution(WebRtc_UWord32 width,
WebRtc_UWord32 height,
WebRtc_UWord32 frameRate) = 0;
/**
Set max frame rate
@ -352,7 +271,8 @@ public:
\param[in] resamplingMode
Set resampling mode (a member of VideoFrameResampling)
*/
virtual void SetInputFrameResampleMode(VideoFrameResampling resamplingMode) = 0;
virtual void SetInputFrameResampleMode(VideoFrameResampling
resamplingMode) = 0;
/**
Get Processed (decimated) frame
@ -363,7 +283,8 @@ public:
\return VPM_OK on success, a negative value on error (see error codes)
*/
virtual WebRtc_Word32 PreprocessFrame(const VideoFrame* frame, VideoFrame** processedFrame) = 0;
virtual WebRtc_Word32 PreprocessFrame(const VideoFrame& frame,
VideoFrame** processedFrame) = 0;
/**
Return content metrics for the last processed frame

View File

@ -17,21 +17,21 @@
namespace webrtc {
namespace VideoProcessing {
WebRtc_Word32 Brighten(WebRtc_UWord8* frame,
int width, int height, int delta) {
if (frame == NULL) {
WebRtc_Word32 Brighten(VideoFrame* frame, int delta) {
assert(frame);
if (frame->Buffer() == NULL) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
"Null frame pointer");
return VPM_PARAMETER_ERROR;
}
if (width <= 0 || height <= 0) {
if (frame->Width() <= 0 || frame->Height() <= 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
"Invalid frame size");
return VPM_PARAMETER_ERROR;
}
int numPixels = width * height;
int numPixels = frame->Width() * frame->Height();
int lookUp[256];
for (int i = 0; i < 256; i++) {
@ -39,7 +39,7 @@ WebRtc_Word32 Brighten(WebRtc_UWord8* frame,
lookUp[i] = ((((val < 0) ? 0 : val) > 255) ? 255 : val);
}
WebRtc_UWord8* tempPtr = frame;
WebRtc_UWord8* tempPtr = frame->Buffer();
for (int i = 0; i < numPixels; i++) {
*tempPtr = static_cast<WebRtc_UWord8>(lookUp[*tempPtr]);

View File

@ -17,8 +17,7 @@
namespace webrtc {
namespace VideoProcessing {
WebRtc_Word32 Brighten(WebRtc_UWord8* frame,
int width, int height, int delta);
WebRtc_Word32 Brighten(VideoFrame* frame, int delta);
} // namespace VideoProcessing
} // namespace webrtc

View File

@ -41,26 +41,30 @@ VPMBrightnessDetection::Reset()
}
WebRtc_Word32
VPMBrightnessDetection::ProcessFrame(const WebRtc_UWord8* frame,
const WebRtc_UWord32 width,
const WebRtc_UWord32 height,
const VideoProcessingModule::FrameStats& stats)
VPMBrightnessDetection::ProcessFrame(const VideoFrame& frame,
const VideoProcessingModule::FrameStats&
stats)
{
if (frame == NULL)
if (frame.Buffer() == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Null frame pointer");
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Null frame pointer");
return VPM_PARAMETER_ERROR;
}
int width = frame.Width();
int height = frame.Height();
if (width == 0 || height == 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Invalid frame size");
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Invalid frame size");
return VPM_PARAMETER_ERROR;
}
if (!VideoProcessingModule::ValidFrameStats(stats))
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Invalid frame stats");
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Invalid frame stats");
return VPM_PARAMETER_ERROR;
}
@ -90,12 +94,14 @@ VPMBrightnessDetection::ProcessFrame(const WebRtc_UWord8* frame,
{
// Standard deviation of Y
float stdY = 0;
for (WebRtc_UWord32 h = 0; h < height; h += (1 << stats.subSamplHeight))
uint8_t* buffer = frame.Buffer();
for (int h = 0; h < height; h += (1 << stats.subSamplHeight))
{
WebRtc_UWord32 row = h*width;
for (WebRtc_UWord32 w = 0; w < width; w += (1 << stats.subSamplWidth))
for (int w = 0; w < width; w += (1 << stats.subSamplWidth))
{
stdY += (frame[w + row] - stats.mean) * (frame[w + row] - stats.mean);
stdY += (buffer[w + row] - stats.mean) * (buffer[w + row] -
stats.mean);
}
}
stdY = sqrt(stdY / stats.numPixels);
@ -133,7 +139,8 @@ VPMBrightnessDetection::ProcessFrame(const WebRtc_UWord8* frame,
// Check if image is too dark
if ((stdY < 55) && (perc05 < 50))
{
if (medianY < 60 || stats.mean < 80 || perc95 < 130 || propLow > 0.20)
if (medianY < 60 || stats.mean < 80 || perc95 < 130 ||
propLow > 0.20)
{
_frameCntDark++;
}
@ -150,7 +157,8 @@ VPMBrightnessDetection::ProcessFrame(const WebRtc_UWord8* frame,
// Check if image is too bright
if ((stdY < 52) && (perc95 > 200) && (medianY > 160))
{
if (medianY > 185 || stats.mean > 185 || perc05 > 140 || propHigh > 0.25)
if (medianY > 185 || stats.mean > 185 || perc05 > 140 ||
propHigh > 0.25)
{
_frameCntBright++;
}

View File

@ -29,10 +29,8 @@ public:
void Reset();
WebRtc_Word32 ProcessFrame(const WebRtc_UWord8* frame,
WebRtc_UWord32 width,
WebRtc_UWord32 height,
const VideoProcessingModule::FrameStats& stats);
WebRtc_Word32 ProcessFrame(const VideoFrame& frame,
const VideoProcessingModule::FrameStats& stats);
private:
WebRtc_Word32 _id;

View File

@ -18,39 +18,38 @@ namespace webrtc {
namespace VideoProcessing
{
WebRtc_Word32
ColorEnhancement(WebRtc_UWord8* frame,
const WebRtc_UWord32 width,
const WebRtc_UWord32 height)
ColorEnhancement(VideoFrame* frame)
{
assert(frame);
// pointers to U and V color pixels
WebRtc_UWord8* ptrU;
WebRtc_UWord8* ptrV;
WebRtc_UWord8 tempChroma;
const WebRtc_UWord32 numPixels = width * height;
const unsigned int size_y = frame->Width() * frame->Height();
const unsigned int size_uv = ((frame->Width() + 1) / 2) *
((frame->Height() + 1 ) / 2);
if (frame == NULL)
if (frame->Buffer() == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, "Null frame pointer");
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing,
-1, "Null frame pointer");
return VPM_GENERAL_ERROR;
}
if (width == 0 || height == 0)
if (frame->Width() == 0 || frame->Height() == 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, "Invalid frame size");
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing,
-1, "Invalid frame size");
return VPM_GENERAL_ERROR;
}
// set pointers to first U and V pixels
// stream format:
// | numPixels bytes luminance | numPixels/4 bytes chroma U | numPixels/4 chroma V |
ptrU = frame + numPixels; // skip luminance
ptrV = ptrU + (numPixels>>2);
// set pointers to first U and V pixels (skip luminance)
ptrU = frame->Buffer() + size_y;
ptrV = ptrU + size_uv;
// loop through all chrominance pixels and modify color
for (WebRtc_UWord32 ix = 0; ix < (numPixels>>2); ix++)
for (unsigned int ix = 0; ix < size_uv; ix++)
{
tempChroma = colorTable[*ptrU][*ptrV];
*ptrV = colorTable[*ptrV][*ptrU];

View File

@ -21,9 +21,7 @@ namespace webrtc {
namespace VideoProcessing
{
WebRtc_Word32 ColorEnhancement(WebRtc_UWord8* frame,
WebRtc_UWord32 width,
WebRtc_UWord32 height);
WebRtc_Word32 ColorEnhancement(VideoFrame* frame);
}
} //namespace

View File

@ -56,25 +56,25 @@ VPMContentAnalysis::~VPMContentAnalysis()
VideoContentMetrics*
VPMContentAnalysis::ComputeContentMetrics(const VideoFrame* inputFrame)
VPMContentAnalysis::ComputeContentMetrics(const VideoFrame& inputFrame)
{
if (inputFrame == NULL)
if (inputFrame.Buffer() == NULL)
{
return NULL;
}
// Init if needed (native dimension change)
if (_width != static_cast<int>(inputFrame->Width()) ||
_height != static_cast<int>(inputFrame->Height()))
if (_width != static_cast<int>(inputFrame.Width()) ||
_height != static_cast<int>(inputFrame.Height()))
{
if (VPM_OK != Initialize(static_cast<int>(inputFrame->Width()),
static_cast<int>(inputFrame->Height())))
if (VPM_OK != Initialize(static_cast<int>(inputFrame.Width()),
static_cast<int>(inputFrame.Height())))
{
return NULL;
}
}
_origFrame = inputFrame->Buffer();
_origFrame = inputFrame.Buffer();
// compute spatial metrics: 3 spatial prediction errors
(this->*ComputeSpatialMetrics)();

View File

@ -35,7 +35,7 @@ public:
// Input: new frame
// Return value: pointer to structure containing content Analysis
// metrics or NULL value upon error
VideoContentMetrics* ComputeContentMetrics(const VideoFrame* inputFrame);
VideoContentMetrics* ComputeContentMetrics(const VideoFrame& inputFrame);
// Release all allocated memory
// Output: 0 if OK, negative value upon error

View File

@ -89,12 +89,10 @@ VPMDeflickering::Reset()
}
WebRtc_Word32
VPMDeflickering::ProcessFrame(WebRtc_UWord8* frame,
const WebRtc_UWord32 width,
const WebRtc_UWord32 height,
const WebRtc_UWord32 timestamp,
VideoProcessingModule::FrameStats& stats)
VPMDeflickering::ProcessFrame(VideoFrame* frame,
VideoProcessingModule::FrameStats* stats)
{
assert(frame);
WebRtc_UWord32 frameMemory;
WebRtc_UWord8 quantUW8[kNumQuants];
WebRtc_UWord8 maxQuantUW8[kNumQuants];
@ -105,27 +103,32 @@ VPMDeflickering::ProcessFrame(WebRtc_UWord8* frame,
WebRtc_UWord16 tmpUW16;
WebRtc_UWord32 tmpUW32;
int width = frame->Width();
int height = frame->Height();
if (frame == NULL)
if (frame->Buffer() == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Null frame pointer");
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Null frame pointer");
return VPM_GENERAL_ERROR;
}
// Stricter height check due to subsampling size calculation below.
if (width == 0 || height < 2)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Invalid frame size");
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Invalid frame size");
return VPM_GENERAL_ERROR;
}
if (!VideoProcessingModule::ValidFrameStats(stats))
if (!VideoProcessingModule::ValidFrameStats(*stats))
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Invalid frame stats");
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Invalid frame stats");
return VPM_GENERAL_ERROR;
}
if (PreDetection(timestamp, stats) == -1)
if (PreDetection(frame->TimeStamp(), *stats) == -1)
{
return VPM_GENERAL_ERROR;
}
@ -148,9 +151,10 @@ VPMDeflickering::ProcessFrame(WebRtc_UWord8* frame,
kLog2OfDownsamplingFactor) + 1);
WebRtc_UWord8* ySorted = new WebRtc_UWord8[ySubSize];
WebRtc_UWord32 sortRowIdx = 0;
for (WebRtc_UWord32 i = 0; i < height; i += kDownsamplingFactor)
for (int i = 0; i < height; i += kDownsamplingFactor)
{
memcpy(ySorted + sortRowIdx * width, frame + i * width, width);
memcpy(ySorted + sortRowIdx * width,
frame->Buffer() + i * width, width);
sortRowIdx++;
}
@ -254,9 +258,10 @@ VPMDeflickering::ProcessFrame(WebRtc_UWord8* frame,
}
// Map to the output frame.
uint8_t* buffer = frame->Buffer();
for (WebRtc_UWord32 i = 0; i < ySize; i++)
{
frame[i] = mapUW8[frame[i]];
buffer[i] = mapUW8[buffer[i]];
}
// Frame was altered, so reset stats.

View File

@ -32,14 +32,11 @@ public:
void Reset();
WebRtc_Word32 ProcessFrame(WebRtc_UWord8* frame,
WebRtc_UWord32 width,
WebRtc_UWord32 height,
WebRtc_UWord32 timestamp,
VideoProcessingModule::FrameStats& stats);
WebRtc_Word32 ProcessFrame(VideoFrame* frame,
VideoProcessingModule::FrameStats* stats);
private:
WebRtc_Word32 PreDetection(WebRtc_UWord32 timestamp,
const VideoProcessingModule::FrameStats& stats);
const VideoProcessingModule::FrameStats& stats);
WebRtc_Word32 DetectFlicker();

View File

@ -72,28 +72,31 @@ VPMDenoising::Reset()
}
WebRtc_Word32
VPMDenoising::ProcessFrame(WebRtc_UWord8* frame,
const WebRtc_UWord32 width,
const WebRtc_UWord32 height)
VPMDenoising::ProcessFrame(VideoFrame* frame)
{
assert(frame);
WebRtc_Word32 thevar;
WebRtc_UWord32 k;
WebRtc_UWord32 jsub, ksub;
int k;
int jsub, ksub;
WebRtc_Word32 diff0;
WebRtc_UWord32 tmpMoment1;
WebRtc_UWord32 tmpMoment2;
WebRtc_UWord32 tmp;
WebRtc_Word32 numPixelsChanged = 0;
if (frame == NULL)
if (frame->Buffer() == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Null frame pointer");
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Null frame pointer");
return VPM_GENERAL_ERROR;
}
int width = frame->Width();
int height = frame->Height();
if (width == 0 || height == 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Invalid frame size");
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Invalid frame size");
return VPM_GENERAL_ERROR;
}
@ -124,31 +127,34 @@ VPMDenoising::ProcessFrame(WebRtc_UWord8* frame,
}
/* Apply de-noising on each pixel, but update variance sub-sampled */
for (WebRtc_UWord32 i = 0; i < height; i++)
uint8_t* buffer = frame->Buffer();
for (int i = 0; i < height; i++)
{ // Collect over height
k = i * width;
ksub = ((i >> kSubsamplingHeight) << kSubsamplingHeight) * width;
for (WebRtc_UWord32 j = 0; j < width; j++)
for (int j = 0; j < width; j++)
{ // Collect over width
jsub = ((j >> kSubsamplingWidth) << kSubsamplingWidth);
/* Update mean value for every pixel and every frame */
tmpMoment1 = _moment1[k + j];
tmpMoment1 *= kDenoiseFiltParam; // Q16
tmpMoment1 += ((kDenoiseFiltParamRec * ((WebRtc_UWord32)frame[k + j])) << 8);
tmpMoment1 += ((kDenoiseFiltParamRec *
((WebRtc_UWord32)buffer[k + j])) << 8);
tmpMoment1 >>= 8; // Q8
_moment1[k + j] = tmpMoment1;
tmpMoment2 = _moment2[ksub + jsub];
if ((ksub == k) && (jsub == j) && (_denoiseFrameCnt == 0))
{
tmp = ((WebRtc_UWord32)frame[k + j] * (WebRtc_UWord32)frame[k + j]);
tmp = ((WebRtc_UWord32)buffer[k + j] *
(WebRtc_UWord32)buffer[k + j]);
tmpMoment2 *= kDenoiseFiltParam; // Q16
tmpMoment2 += ((kDenoiseFiltParamRec * tmp)<<8);
tmpMoment2 >>= 8; // Q8
}
_moment2[k + j] = tmpMoment2;
/* Current event = deviation from mean value */
diff0 = ((WebRtc_Word32)frame[k + j] << 8) - _moment1[k + j];
diff0 = ((WebRtc_Word32)buffer[k + j] << 8) - _moment1[k + j];
/* Recent events = variance (variations over time) */
thevar = _moment2[k + j];
thevar -= ((_moment1[k + j] * _moment1[k + j]) >> 8);
@ -161,7 +167,7 @@ VPMDenoising::ProcessFrame(WebRtc_UWord8* frame,
if ((thevar < kDenoiseThreshold)
&& ((diff0 * diff0 >> 8) < kDenoiseThreshold))
{ // Replace with mean
frame[k + j] = (WebRtc_UWord8)(_moment1[k + j] >> 8);
buffer[k + j] = (WebRtc_UWord8)(_moment1[k + j] >> 8);
numPixelsChanged++;
}
}

View File

@ -29,9 +29,7 @@ public:
void Reset();
WebRtc_Word32 ProcessFrame(WebRtc_UWord8* frame,
WebRtc_UWord32 width,
WebRtc_UWord32 height);
WebRtc_Word32 ProcessFrame(VideoFrame* frame);
private:
WebRtc_Word32 _id;
@ -39,7 +37,7 @@ private:
WebRtc_UWord32* _moment1; // (Q8) First order moment (mean)
WebRtc_UWord32* _moment2; // (Q8) Second order moment
WebRtc_UWord32 _frameSize; // Size (# of pixels) of frame
WebRtc_Word32 _denoiseFrameCnt; // Counter for subsampling in time
int _denoiseFrameCnt; // Counter for subsampling in time
};
} //namespace

View File

@ -136,9 +136,10 @@ VPMFramePreprocessor::DecimatedHeight() const
WebRtc_Word32
VPMFramePreprocessor::PreprocessFrame(const VideoFrame* frame, VideoFrame** processedFrame)
VPMFramePreprocessor::PreprocessFrame(const VideoFrame& frame,
VideoFrame** processedFrame)
{
if (frame == NULL || frame->Height() == 0 || frame->Width() == 0)
if (frame.Buffer() == NULL || frame.Height() == 0 || frame.Width() == 0)
{
return VPM_PARAMETER_ERROR;
}
@ -147,7 +148,8 @@ VPMFramePreprocessor::PreprocessFrame(const VideoFrame* frame, VideoFrame** proc
if (_vd->DropFrame())
{
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, _id, "Drop frame due to frame rate");
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, _id,
"Drop frame due to frame rate");
return 1; // drop 1 frame
}
@ -155,8 +157,9 @@ VPMFramePreprocessor::PreprocessFrame(const VideoFrame* frame, VideoFrame** proc
// Note that we must make a copy of it.
// We are not allowed to resample the input frame.
*processedFrame = NULL;
if (_spatialResampler->ApplyResample(frame->Width(), frame->Height())) {
WebRtc_Word32 ret = _spatialResampler->ResampleFrame(*frame, _resampledFrame);
if (_spatialResampler->ApplyResample(frame.Width(), frame.Height())) {
WebRtc_Word32 ret = _spatialResampler->ResampleFrame(frame,
_resampledFrame);
if (ret != VPM_OK)
return ret;
*processedFrame = &_resampledFrame;
@ -171,7 +174,7 @@ VPMFramePreprocessor::PreprocessFrame(const VideoFrame* frame, VideoFrame** proc
if (*processedFrame == NULL) {
_contentMetrics = _ca->ComputeContentMetrics(frame);
} else {
_contentMetrics = _ca->ComputeContentMetrics(&_resampledFrame);
_contentMetrics = _ca->ComputeContentMetrics(_resampledFrame);
}
}
++_frameCnt;

View File

@ -46,12 +46,15 @@ public:
WebRtc_Word32 SetMaxFrameRate(WebRtc_UWord32 maxFrameRate);
//Set target resolution: frame rate and dimension
WebRtc_Word32 SetTargetResolution(WebRtc_UWord32 width, WebRtc_UWord32 height, WebRtc_UWord32 frameRate);
WebRtc_Word32 SetTargetResolution(WebRtc_UWord32 width,
WebRtc_UWord32 height,
WebRtc_UWord32 frameRate);
//Update incoming frame rate/dimension
void UpdateIncomingFrameRate();
WebRtc_Word32 updateIncomingFrameSize(WebRtc_UWord32 width, WebRtc_UWord32 height);
WebRtc_Word32 updateIncomingFrameSize(WebRtc_UWord32 width,
WebRtc_UWord32 height);
//Set decimated values: frame rate/dimension
WebRtc_UWord32 DecimatedFrameRate();
@ -59,7 +62,8 @@ public:
WebRtc_UWord32 DecimatedHeight() const;
//Preprocess output:
WebRtc_Word32 PreprocessFrame(const VideoFrame* frame, VideoFrame** processedFrame);
WebRtc_Word32 PreprocessFrame(const VideoFrame& frame,
VideoFrame** processedFrame);
VideoContentMetrics* ContentMetrics() const;
private:

View File

@ -19,29 +19,29 @@ namespace webrtc {
namespace
{
void
SetSubSampling(VideoProcessingModule::FrameStats& stats,
SetSubSampling(VideoProcessingModule::FrameStats* stats,
const WebRtc_Word32 width,
const WebRtc_Word32 height)
{
if (width * height >= 640 * 480)
{
stats.subSamplWidth = 3;
stats.subSamplHeight = 3;
stats->subSamplWidth = 3;
stats->subSamplHeight = 3;
}
else if (width * height >= 352 * 288)
{
stats.subSamplWidth = 2;
stats.subSamplHeight = 2;
stats->subSamplWidth = 2;
stats->subSamplHeight = 2;
}
else if (width * height >= 176 * 144)
{
stats.subSamplWidth = 1;
stats.subSamplHeight = 1;
stats->subSamplWidth = 1;
stats->subSamplHeight = 1;
}
else
{
stats.subSamplWidth = 0;
stats.subSamplHeight = 0;
stats->subSamplWidth = 0;
stats->subSamplHeight = 0;
}
}
}
@ -89,13 +89,15 @@ VideoProcessingModuleImpl::VideoProcessingModuleImpl(const WebRtc_Word32 id) :
_deflickering.ChangeUniqueId(id);
_denoising.ChangeUniqueId(id);
_framePreProcessor.ChangeUniqueId(id);
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, _id, "Created");
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, _id,
"Created");
}
VideoProcessingModuleImpl::~VideoProcessingModuleImpl()
{
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, _id, "Destroyed");
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, _id,
"Destroyed");
delete &_mutex;
}
@ -112,49 +114,47 @@ VideoProcessingModuleImpl::Reset()
}
WebRtc_Word32
VideoProcessingModule::GetFrameStats(FrameStats& stats,
const VideoFrame& frame)
VideoProcessingModule::GetFrameStats(FrameStats* stats,
const VideoFrame& frame)
{
return GetFrameStats(stats, frame.Buffer(), frame.Width(), frame.Height());
}
WebRtc_Word32
VideoProcessingModule::GetFrameStats(FrameStats& stats,
const WebRtc_UWord8* frame,
const WebRtc_UWord32 width,
const WebRtc_UWord32 height)
{
if (frame == NULL)
if (frame.Buffer() == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, "Null frame pointer");
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
"Null frame pointer");
return VPM_PARAMETER_ERROR;
}
int width = frame.Width();
int height = frame.Height();
if (width == 0 || height == 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, "Invalid frame size");
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
"Invalid frame size");
return VPM_PARAMETER_ERROR;
}
ClearFrameStats(stats); // The histogram needs to be zeroed out.
SetSubSampling(stats, width, height);
uint8_t* buffer = frame.Buffer();
// Compute histogram and sum of frame
for (WebRtc_UWord32 i = 0; i < height; i += (1 << stats.subSamplHeight))
for (int i = 0; i < height; i += (1 << stats->subSamplHeight))
{
WebRtc_Word32 k = i * width;
for (WebRtc_UWord32 j = 0; j < width; j += (1 << stats.subSamplWidth))
int k = i * width;
for (int j = 0; j < width; j += (1 << stats->subSamplWidth))
{
stats.hist[frame[k + j]]++;
stats.sum += frame[k + j];
stats->hist[buffer[k + j]]++;
stats->sum += buffer[k + j];
}
}
stats.numPixels = (width * height) / ((1 << stats.subSamplWidth) * (1 << stats.subSamplHeight));
assert(stats.numPixels > 0);
stats->numPixels = (width * height) / ((1 << stats->subSamplWidth) *
(1 << stats->subSamplHeight));
assert(stats->numPixels > 0);
// Compute mean value of frame
stats.mean = stats.sum / stats.numPixels;
stats->mean = stats->sum / stats->numPixels;
return VPM_OK;
}
@ -171,94 +171,48 @@ VideoProcessingModule::ValidFrameStats(const FrameStats& stats)
}
void
VideoProcessingModule::ClearFrameStats(FrameStats& stats)
VideoProcessingModule::ClearFrameStats(FrameStats* stats)
{
stats.mean = 0;
stats.sum = 0;
stats.numPixels = 0;
stats.subSamplWidth = 0;
stats.subSamplHeight = 0;
memset(stats.hist, 0, sizeof(stats.hist));
stats->mean = 0;
stats->sum = 0;
stats->numPixels = 0;
stats->subSamplWidth = 0;
stats->subSamplHeight = 0;
memset(stats->hist, 0, sizeof(stats->hist));
}
WebRtc_Word32
VideoProcessingModule::ColorEnhancement(VideoFrame& frame)
VideoProcessingModule::ColorEnhancement(VideoFrame* frame)
{
return ColorEnhancement(frame.Buffer(), frame.Width(), frame.Height());
return VideoProcessing::ColorEnhancement(frame);
}
WebRtc_Word32
VideoProcessingModule::ColorEnhancement(WebRtc_UWord8* frame,
const WebRtc_UWord32 width,
const WebRtc_UWord32 height)
VideoProcessingModule::Brighten(VideoFrame* frame, int delta)
{
return VideoProcessing::ColorEnhancement(frame, width, height);
return VideoProcessing::Brighten(frame, delta);
}
WebRtc_Word32
VideoProcessingModule::Brighten(VideoFrame& frame, int delta)
{
return Brighten(frame.Buffer(), frame.Width(), frame.Height(), delta);
}
WebRtc_Word32
VideoProcessingModule::Brighten(WebRtc_UWord8* frame,
int width,
int height,
int delta)
{
return VideoProcessing::Brighten(frame, width, height, delta);
}
WebRtc_Word32
VideoProcessingModuleImpl::Deflickering(VideoFrame& frame,
FrameStats& stats)
{
return Deflickering(frame.Buffer(), frame.Width(), frame.Height(),
frame.TimeStamp(), stats);
}
WebRtc_Word32
VideoProcessingModuleImpl::Deflickering(WebRtc_UWord8* frame,
const WebRtc_UWord32 width,
const WebRtc_UWord32 height,
const WebRtc_UWord32 timestamp,
FrameStats& stats)
VideoProcessingModuleImpl::Deflickering(VideoFrame* frame, FrameStats* stats)
{
CriticalSectionScoped mutex(&_mutex);
return _deflickering.ProcessFrame(frame, width, height, timestamp, stats);
return _deflickering.ProcessFrame(frame, stats);
}
WebRtc_Word32
VideoProcessingModuleImpl::Denoising(VideoFrame& frame)
{
return Denoising(frame.Buffer(), frame.Width(), frame.Height());
}
WebRtc_Word32
VideoProcessingModuleImpl::Denoising(WebRtc_UWord8* frame,
const WebRtc_UWord32 width,
const WebRtc_UWord32 height)
VideoProcessingModuleImpl::Denoising(VideoFrame* frame)
{
CriticalSectionScoped mutex(&_mutex);
return _denoising.ProcessFrame(frame, width, height);
return _denoising.ProcessFrame(frame);
}
WebRtc_Word32
VideoProcessingModuleImpl::BrightnessDetection(const VideoFrame& frame,
const FrameStats& stats)
{
return BrightnessDetection(frame.Buffer(), frame.Width(), frame.Height(), stats);
}
WebRtc_Word32
VideoProcessingModuleImpl::BrightnessDetection(const WebRtc_UWord8* frame,
const WebRtc_UWord32 width,
const WebRtc_UWord32 height,
const FrameStats& stats)
const FrameStats& stats)
{
CriticalSectionScoped mutex(&_mutex);
return _brightnessDetection.ProcessFrame(frame, width, height, stats);
return _brightnessDetection.ProcessFrame(frame, stats);
}
@ -271,7 +225,8 @@ VideoProcessingModuleImpl::EnableTemporalDecimation(bool enable)
void
VideoProcessingModuleImpl::SetInputFrameResampleMode(VideoFrameResampling resamplingMode)
VideoProcessingModuleImpl::SetInputFrameResampleMode(VideoFrameResampling
resamplingMode)
{
CriticalSectionScoped cs(&_mutex);
_framePreProcessor.SetInputFrameResampleMode(resamplingMode);
@ -286,7 +241,9 @@ VideoProcessingModuleImpl::SetMaxFrameRate(WebRtc_UWord32 maxFrameRate)
}
WebRtc_Word32
VideoProcessingModuleImpl::SetTargetResolution(WebRtc_UWord32 width, WebRtc_UWord32 height, WebRtc_UWord32 frameRate)
VideoProcessingModuleImpl::SetTargetResolution(WebRtc_UWord32 width,
WebRtc_UWord32 height,
WebRtc_UWord32 frameRate)
{
CriticalSectionScoped cs(&_mutex);
return _framePreProcessor.SetTargetResolution(width, height, frameRate);
@ -316,7 +273,8 @@ VideoProcessingModuleImpl::DecimatedHeight() const
}
WebRtc_Word32
VideoProcessingModuleImpl::PreprocessFrame(const VideoFrame *frame, VideoFrame **processedFrame)
VideoProcessingModuleImpl::PreprocessFrame(const VideoFrame& frame,
VideoFrame **processedFrame)
{
CriticalSectionScoped mutex(&_mutex);
return _framePreProcessor.PreprocessFrame(frame, processedFrame);

View File

@ -36,30 +36,14 @@ public:
virtual void Reset();
virtual WebRtc_Word32 Deflickering(WebRtc_UWord8* frame,
WebRtc_UWord32 width,
WebRtc_UWord32 height,
WebRtc_UWord32 timestamp,
FrameStats& stats);
virtual WebRtc_Word32 Deflickering(VideoFrame* frame,
FrameStats* stats);
virtual WebRtc_Word32 Deflickering(VideoFrame& frame,
FrameStats& stats);
virtual WebRtc_Word32 Denoising(WebRtc_UWord8* frame,
WebRtc_UWord32 width,
WebRtc_UWord32 height);
virtual WebRtc_Word32 Denoising(VideoFrame& frame);
virtual WebRtc_Word32 BrightnessDetection(const WebRtc_UWord8* frame,
WebRtc_UWord32 width,
WebRtc_UWord32 height,
const FrameStats& stats);
virtual WebRtc_Word32 Denoising(VideoFrame* frame);
virtual WebRtc_Word32 BrightnessDetection(const VideoFrame& frame,
const FrameStats& stats);
//Frame pre-processor functions
//Enable temporal decimation
@ -88,7 +72,7 @@ public:
// Pre-process incoming frame: Sample when needed and compute content
// metrics when enabled.
// If no resampling takes place - processedFrame is set to NULL.
virtual WebRtc_Word32 PreprocessFrame(const VideoFrame* frame,
virtual WebRtc_Word32 PreprocessFrame(const VideoFrame& frame,
VideoFrame** processedFrame);
virtual VideoContentMetrics* ContentMetrics() const;

View File

@ -18,12 +18,14 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection)
WebRtc_UWord32 frameNum = 0;
WebRtc_Word32 brightnessWarning = 0;
WebRtc_UWord32 warningCount = 0;
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength)
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) ==
_frameLength)
{
frameNum++;
VideoProcessingModule::FrameStats stats;
ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
ASSERT_GE(brightnessWarning = _vpm->BrightnessDetection(_videoFrame, stats), 0);
ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
ASSERT_GE(brightnessWarning = _vpm->BrightnessDetection(_videoFrame,
stats), 0);
if (brightnessWarning != VideoProcessingModule::kNoWarning)
{
warningCount++;
@ -40,7 +42,8 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection)
rewind(_sourceFile);
frameNum = 0;
warningCount = 0;
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength &&
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) ==
_frameLength &&
frameNum < 300)
{
frameNum++;
@ -58,8 +61,9 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection)
}
VideoProcessingModule::FrameStats stats;
ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
ASSERT_GE(brightnessWarning = _vpm->BrightnessDetection(_videoFrame, stats), 0);
ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
ASSERT_GE(brightnessWarning = _vpm->BrightnessDetection(_videoFrame,
stats), 0);
EXPECT_NE(VideoProcessingModule::kDarkWarning, brightnessWarning);
if (brightnessWarning == VideoProcessingModule::kBrightWarning)
{
@ -90,8 +94,9 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection)
}
VideoProcessingModule::FrameStats stats;
ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
ASSERT_GE(brightnessWarning = _vpm->BrightnessDetection(_videoFrame, stats), 0);
ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
ASSERT_GE(brightnessWarning = _vpm->BrightnessDetection(_videoFrame,
stats), 0);
EXPECT_NE(VideoProcessingModule::kBrightWarning, brightnessWarning);
if (brightnessWarning == VideoProcessingModule::kDarkWarning)
{

View File

@ -11,6 +11,7 @@
#include <cstdio>
#include <cstdlib>
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/video_processing/main/interface/video_processing.h"
#include "modules/video_processing/main/test/unit_test/unit_test.h"
#include "system_wrappers/interface/tick_util.h"
@ -42,7 +43,7 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement)
{
frameNum++;
t0 = TickTime::Now();
ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(_videoFrame));
ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&_videoFrame));
t1 = TickTime::Now();
accTicks += t1 - t0;
if (fwrite(_videoFrame.Buffer(), 1, _frameLength,
@ -88,41 +89,31 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement)
}
ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file";
// Verify that all color pixels are enhanced, that no luminance values are altered,
// and that the function does not write outside the vector.
WebRtc_UWord32 safeGuard = 1000;
WebRtc_UWord32 numPixels = 352*288; // CIF size
WebRtc_UWord8 *testFrame = new WebRtc_UWord8[numPixels + (numPixels / 2) + (2 * safeGuard)];
WebRtc_UWord8 *refFrame = new WebRtc_UWord8[numPixels + (numPixels / 2) + (2 * safeGuard)];
// Verify that all color pixels are enhanced, and no luminance values are
// altered.
// use value 128 as probe value, since we know that this will be changed in the enhancement
memset(testFrame, 128, safeGuard);
memset(&testFrame[safeGuard], 128, numPixels);
memset(&testFrame[safeGuard + numPixels], 128, numPixels / 2);
memset(&testFrame[safeGuard + numPixels + (numPixels / 2)], 128, safeGuard);
WebRtc_UWord8 *testFrame = new WebRtc_UWord8[_frameLength];
memcpy(refFrame, testFrame, numPixels + (numPixels / 2) + (2 * safeGuard));
// Use value 128 as probe value, since we know that this will be changed
// in the enhancement.
memset(testFrame, 128, _frameLength);
ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&testFrame[safeGuard], 352, 288));
VideoFrame testVideoFrame;
testVideoFrame.CopyFrame(_frameLength, testFrame);
testVideoFrame.SetWidth(_width);
testVideoFrame.SetHeight(_height);
ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&testVideoFrame));
EXPECT_EQ(0, memcmp(testFrame, refFrame, safeGuard)) <<
"Function is writing outside the frame memory.";
EXPECT_EQ(0, memcmp(&testFrame[safeGuard + numPixels + (numPixels / 2)],
&refFrame[safeGuard + numPixels + (numPixels / 2)], safeGuard)) <<
"Function is writing outside the frame memory.";
EXPECT_EQ(0, memcmp(testVideoFrame.Buffer(), testFrame, _width * _height))
<< "Function is modifying the luminance.";
EXPECT_EQ(0, memcmp(&testFrame[safeGuard], &refFrame[safeGuard], numPixels)) <<
"Function is modifying the luminance.";
EXPECT_NE(0, memcmp(&testFrame[safeGuard + numPixels],
&refFrame[safeGuard + numPixels], numPixels / 2)) <<
EXPECT_NE(0, memcmp(testVideoFrame.Buffer() + _width * _height,
&testFrame[_width * _height], _width * _height / 2)) <<
"Function is not modifying all chrominance pixels";
ASSERT_EQ(0, fclose(refFile));
ASSERT_EQ(0, fclose(modFile));
delete [] testFrame;
delete [] refFrame;
}
} // namespace webrtc

View File

@ -26,8 +26,8 @@ TEST_F(VideoProcessingModuleTest, ContentAnalysis)
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile)
== _frameLength)
{
_cM_c = _ca_c.ComputeContentMetrics(&_videoFrame);
_cM_SSE = _ca_sse.ComputeContentMetrics(&_videoFrame);
_cM_c = _ca_c.ComputeContentMetrics(_videoFrame);
_cM_SSE = _ca_sse.ComputeContentMetrics(_videoFrame);
ASSERT_EQ(_cM_c->spatial_pred_err, _cM_SSE->spatial_pred_err);
ASSERT_EQ(_cM_c->spatial_pred_err_v, _cM_SSE->spatial_pred_err_v);

View File

@ -57,8 +57,8 @@ TEST_F(VideoProcessingModuleTest, Deflickering)
t0 = TickTime::Now();
VideoProcessingModule::FrameStats stats;
ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
ASSERT_EQ(0, _vpm->Deflickering(_videoFrame, stats));
ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
ASSERT_EQ(0, _vpm->Deflickering(&_videoFrame, &stats));
t1 = TickTime::Now();
accTicks += t1 - t0;

View File

@ -99,7 +99,7 @@ TEST_F(VideoProcessingModuleTest, Denoising)
}
t0 = TickTime::Now();
ASSERT_GE(modifiedPixels = _vpm->Denoising(_videoFrame), 0);
ASSERT_GE(modifiedPixels = _vpm->Denoising(&_videoFrame), 0);
t1 = TickTime::Now();
accTicks += t1 - t0;

View File

@ -68,28 +68,21 @@ void VideoProcessingModuleTest::TearDown()
TEST_F(VideoProcessingModuleTest, HandleNullBuffer)
{
VideoProcessingModule::FrameStats stats;
ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
// Video frame with unallocated buffer.
VideoFrame videoFrame;
videoFrame.SetWidth(_width);
videoFrame.SetHeight(_height);
EXPECT_EQ(-3, _vpm->GetFrameStats(stats, NULL, _width, _height));
EXPECT_EQ(-3, _vpm->GetFrameStats(stats, videoFrame));
EXPECT_EQ(-3, _vpm->GetFrameStats(&stats, videoFrame));
EXPECT_EQ(-1, _vpm->ColorEnhancement(NULL, _width, _height));
EXPECT_EQ(-1, _vpm->ColorEnhancement(videoFrame));
EXPECT_EQ(-1, _vpm->ColorEnhancement(&videoFrame));
EXPECT_EQ(-1, _vpm->Deflickering(NULL, _width, _height, 0, stats));
EXPECT_EQ(-1, _vpm->Deflickering(videoFrame, stats));
EXPECT_EQ(-1, _vpm->Deflickering(&videoFrame, &stats));
EXPECT_EQ(-1, _vpm->Denoising(NULL, _width, _height));
EXPECT_EQ(-1, _vpm->Denoising(videoFrame));
EXPECT_EQ(-1, _vpm->Denoising(&videoFrame));
EXPECT_EQ(-3, _vpm->BrightnessDetection(NULL, _width, _height, stats));
EXPECT_EQ(-3, _vpm->BrightnessDetection(videoFrame, stats));
EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->PreprocessFrame(NULL, NULL));
}
TEST_F(VideoProcessingModuleTest, HandleBadStats)
@ -99,65 +92,48 @@ TEST_F(VideoProcessingModuleTest, HandleBadStats)
ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
_sourceFile));
EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame.Buffer(), _width, _height, 0,
stats));
EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame, stats));
_videoFrame.SetWidth(_width);
_videoFrame.SetHeight(_height);
EXPECT_EQ(-1, _vpm->Deflickering(&_videoFrame, &stats));
EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame.Buffer(), _width,
_height, stats));
EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame, stats));
}
TEST_F(VideoProcessingModuleTest, HandleBadSize)
{
VideoProcessingModule::FrameStats stats;
ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
// Bad width
_videoFrame.SetWidth(0);
EXPECT_EQ(-3, _vpm->GetFrameStats(stats, _videoFrame.Buffer(), 0, _height));
EXPECT_EQ(-3, _vpm->GetFrameStats(stats, _videoFrame));
EXPECT_EQ(-3, _vpm->GetFrameStats(&stats, _videoFrame));
EXPECT_EQ(-1, _vpm->ColorEnhancement(_videoFrame.Buffer(), 0, _height));
EXPECT_EQ(-1, _vpm->ColorEnhancement(_videoFrame));
EXPECT_EQ(-1, _vpm->ColorEnhancement(&_videoFrame));
EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame.Buffer(), 0, _height, 0,
stats));
EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame, stats));
EXPECT_EQ(-1, _vpm->Deflickering(&_videoFrame, &stats));
EXPECT_EQ(-1, _vpm->Denoising(_videoFrame.Buffer(), 0, _height));
EXPECT_EQ(-1, _vpm->Denoising(_videoFrame));
EXPECT_EQ(-1, _vpm->Denoising(&_videoFrame));
EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame.Buffer(), 0, _height,
stats));
EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame, stats));
// Bad height
_videoFrame.SetWidth(_width);
_videoFrame.SetHeight(0);
EXPECT_EQ(-3, _vpm->GetFrameStats(stats, _videoFrame.Buffer(), _width, 0));
EXPECT_EQ(-3, _vpm->GetFrameStats(stats, _videoFrame));
EXPECT_EQ(-3, _vpm->GetFrameStats(&stats, _videoFrame));
EXPECT_EQ(-1, _vpm->ColorEnhancement(_videoFrame.Buffer(), _width, 0));
EXPECT_EQ(-1, _vpm->ColorEnhancement(_videoFrame));
EXPECT_EQ(-1, _vpm->ColorEnhancement(&_videoFrame));
EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame.Buffer(), _width, 0, 0,
stats));
EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame, stats));
EXPECT_EQ(-1, _vpm->Deflickering(&_videoFrame, &stats));
EXPECT_EQ(-1, _vpm->Denoising(_videoFrame.Buffer(), _width, 0));
EXPECT_EQ(-1, _vpm->Denoising(_videoFrame));
EXPECT_EQ(-1, _vpm->Denoising(&_videoFrame));
EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame.Buffer(), _width, 0,
stats));
EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame, stats));
EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->SetTargetResolution(0,0,0));
EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->SetMaxFrameRate(0));
VideoFrame *outFrame = NULL;
EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->PreprocessFrame(&_videoFrame,
EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->PreprocessFrame(_videoFrame,
&outFrame));
}
@ -173,28 +149,28 @@ TEST_F(VideoProcessingModuleTest, IdenticalResultsAfterReset)
// Only testing non-static functions here.
ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
_sourceFile));
ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
memcpy(videoFrame2.Buffer(), _videoFrame.Buffer(), _frameLength);
ASSERT_EQ(0, _vpm->Deflickering(_videoFrame, stats));
ASSERT_EQ(0, _vpm->Deflickering(&_videoFrame, &stats));
_vpm->Reset();
// Retrieve frame stats again in case Deflickering() has zeroed them.
ASSERT_EQ(0, _vpm->GetFrameStats(stats, videoFrame2));
ASSERT_EQ(0, _vpm->Deflickering(videoFrame2, stats));
ASSERT_EQ(0, _vpm->GetFrameStats(&stats, videoFrame2));
ASSERT_EQ(0, _vpm->Deflickering(&videoFrame2, &stats));
EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), videoFrame2.Buffer(),
_frameLength));
ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
_sourceFile));
memcpy(videoFrame2.Buffer(), _videoFrame.Buffer(), _frameLength);
ASSERT_GE(_vpm->Denoising(_videoFrame), 0);
ASSERT_GE(_vpm->Denoising(&_videoFrame), 0);
_vpm->Reset();
ASSERT_GE(_vpm->Denoising(videoFrame2), 0);
ASSERT_GE(_vpm->Denoising(&videoFrame2), 0);
EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), videoFrame2.Buffer(),
_frameLength));
ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
_sourceFile));
ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
memcpy(videoFrame2.Buffer(), _videoFrame.Buffer(), _frameLength);
ASSERT_EQ(0, _vpm->BrightnessDetection(_videoFrame, stats));
_vpm->Reset();
@ -210,7 +186,7 @@ TEST_F(VideoProcessingModuleTest, FrameStats)
_sourceFile));
EXPECT_FALSE(_vpm->ValidFrameStats(stats));
EXPECT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
EXPECT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
EXPECT_TRUE(_vpm->ValidFrameStats(stats));
printf("\nFrameStats\n");
@ -222,7 +198,7 @@ TEST_F(VideoProcessingModuleTest, FrameStats)
static_cast<unsigned int>(stats.subSamplWidth),
static_cast<unsigned int>(stats.sum));
_vpm->ClearFrameStats(stats);
_vpm->ClearFrameStats(&stats);
EXPECT_FALSE(_vpm->ValidFrameStats(stats));
}
@ -239,7 +215,7 @@ TEST_F(VideoProcessingModuleTest, PreprocessorLogic)
_vpm->SetInputFrameResampleMode(kNoRescaling);
ASSERT_EQ(VPM_OK, _vpm->SetTargetResolution(100, 100, 30));
VideoFrame *outFrame = NULL;
ASSERT_EQ(VPM_OK, _vpm->PreprocessFrame(&_videoFrame, &outFrame));
ASSERT_EQ(VPM_OK, _vpm->PreprocessFrame(_videoFrame, &outFrame));
// No rescaling=> output frame = NULL
ASSERT_TRUE(outFrame == NULL);
}
@ -324,7 +300,7 @@ void TestSize(const VideoFrame& source_frame, int target_width,
VideoFrame* out_frame = NULL;
ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(target_width, target_height, 30));
ASSERT_EQ(VPM_OK, vpm->PreprocessFrame(&source_frame, &out_frame));
ASSERT_EQ(VPM_OK, vpm->PreprocessFrame(source_frame, &out_frame));
// If the frame was resampled (scale changed) then:
// (1) verify the new size and write out processed frame for viewing.
@ -362,7 +338,7 @@ void TestSize(const VideoFrame& source_frame, int target_width,
ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(source_width,
source_height,
30));
ASSERT_EQ(VPM_OK, vpm->PreprocessFrame(&resampled_source_frame,
ASSERT_EQ(VPM_OK, vpm->PreprocessFrame(resampled_source_frame,
&out_frame));
// Write the processed frame to file for visual inspection.

View File

@ -567,9 +567,9 @@ bool ViECapturer::ViECaptureProcess() {
void ViECapturer::DeliverI420Frame(VideoFrame* video_frame) {
// Apply image enhancement and effect filter.
if (deflicker_frame_stats_) {
if (image_proc_module_->GetFrameStats(*deflicker_frame_stats_,
if (image_proc_module_->GetFrameStats(deflicker_frame_stats_,
*video_frame) == 0) {
image_proc_module_->Deflickering(*video_frame, *deflicker_frame_stats_);
image_proc_module_->Deflickering(video_frame, deflicker_frame_stats_);
} else {
WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
"%s: could not get frame stats for captured frame",
@ -577,10 +577,10 @@ void ViECapturer::DeliverI420Frame(VideoFrame* video_frame) {
}
}
if (denoising_enabled_) {
image_proc_module_->Denoising(*video_frame);
image_proc_module_->Denoising(video_frame);
}
if (brightness_frame_stats_) {
if (image_proc_module_->GetFrameStats(*brightness_frame_stats_,
if (image_proc_module_->GetFrameStats(brightness_frame_stats_,
*video_frame) == 0) {
WebRtc_Word32 brightness = image_proc_module_->BrightnessDetection(
*video_frame, *brightness_frame_stats_);

View File

@ -2096,7 +2096,7 @@ WebRtc_Word32 ViEChannel::FrameToRender(VideoFrame& video_frame) { // NOLINT
video_frame.Height());
}
if (color_enhancement_) {
VideoProcessingModule::ColorEnhancement(video_frame);
VideoProcessingModule::ColorEnhancement(&video_frame);
}
// Record videoframe.

View File

@ -495,7 +495,7 @@ void ViEEncoder::DeliverFrame(int id,
has_received_rpsi_ = false;
}
VideoFrame* decimated_frame = NULL;
const int ret = vpm_.PreprocessFrame(video_frame, &decimated_frame);
const int ret = vpm_.PreprocessFrame(*video_frame, &decimated_frame);
if (ret == 1) {
// Drop this frame.
return;
@ -528,7 +528,7 @@ void ViEEncoder::DeliverFrame(int id,
// TODO(mflodman) Rewrite this to use code common to VP8 case.
// Pass frame via preprocessor.
VideoFrame* decimated_frame = NULL;
const int ret = vpm_.PreprocessFrame(video_frame, &decimated_frame);
const int ret = vpm_.PreprocessFrame(*video_frame, &decimated_frame);
if (ret == 1) {
// Drop this frame.
return;