From 54006d1a1b5ac3f31d6f6dd0336239229d666599 Mon Sep 17 00:00:00 2001 From: Dmitry Kovalev Date: Tue, 22 Apr 2014 17:45:30 -0700 Subject: [PATCH] Adding macro to define vp9_variance_WxH_c() functions. Change-Id: I983e3831938d4ae055de20b8d257d223e7916b1f --- vp9/encoder/vp9_variance.c | 191 +++++-------------------------------- 1 file changed, 23 insertions(+), 168 deletions(-) diff --git a/vp9/encoder/vp9_variance.c b/vp9/encoder/vp9_variance.c index 696faf114..1399bfb7e 100644 --- a/vp9/encoder/vp9_variance.c +++ b/vp9/encoder/vp9_variance.c @@ -109,17 +109,16 @@ unsigned int vp9_get_mb_ss_c(const int16_t *src_ptr) { return sum; } -unsigned int vp9_variance64x32_c(const uint8_t *src_ptr, - int source_stride, - const uint8_t *ref_ptr, - int recon_stride, - unsigned int *sse) { - unsigned int var; - int avg; - - variance(src_ptr, source_stride, ref_ptr, recon_stride, 64, 32, &var, &avg); - *sse = var; - return (var - (((int64_t)avg * avg) >> 11)); +#define VAR(W, H) \ +unsigned int vp9_variance##W##x##H##_c(const uint8_t *a, int a_stride, \ + const uint8_t *b, int b_stride, \ + unsigned int *sse) { \ + unsigned int var; \ + int avg; \ +\ + variance(a, a_stride, b, b_stride, W, H, &var, &avg); \ + *sse = var; \ + return var - (((int64_t)avg * avg) / (W * H)); \ } #define SUBPIX_VAR(W, H) \ @@ -161,175 +160,18 @@ unsigned int vp9_sub_pixel_avg_variance##W##x##H##_c( \ } -unsigned int vp9_variance32x64_c(const uint8_t *src_ptr, - int source_stride, - const uint8_t *ref_ptr, - int recon_stride, - unsigned int *sse) { - unsigned int var; - int avg; - - variance(src_ptr, source_stride, ref_ptr, recon_stride, 32, 64, &var, &avg); - *sse = var; - return (var - (((int64_t)avg * avg) >> 11)); -} - -unsigned int vp9_variance32x16_c(const uint8_t *src_ptr, - int source_stride, - const uint8_t *ref_ptr, - int recon_stride, - unsigned int *sse) { - unsigned int var; - int avg; - - variance(src_ptr, source_stride, ref_ptr, recon_stride, 32, 16, &var, &avg); - *sse = var; - return (var - (((int64_t)avg * avg) >> 9)); -} - -unsigned int vp9_variance16x32_c(const uint8_t *src_ptr, - int source_stride, - const uint8_t *ref_ptr, - int recon_stride, - unsigned int *sse) { - unsigned int var; - int avg; - - variance(src_ptr, source_stride, ref_ptr, recon_stride, 16, 32, &var, &avg); - *sse = var; - return (var - (((int64_t)avg * avg) >> 9)); -} - -unsigned int vp9_variance64x64_c(const uint8_t *src_ptr, - int source_stride, - const uint8_t *ref_ptr, - int recon_stride, - unsigned int *sse) { - unsigned int var; - int avg; - - variance(src_ptr, source_stride, ref_ptr, recon_stride, 64, 64, &var, &avg); - *sse = var; - return (var - (((int64_t)avg * avg) >> 12)); -} - -unsigned int vp9_variance32x32_c(const uint8_t *src_ptr, - int source_stride, - const uint8_t *ref_ptr, - int recon_stride, - unsigned int *sse) { - unsigned int var; - int avg; - - variance(src_ptr, source_stride, ref_ptr, recon_stride, 32, 32, &var, &avg); - *sse = var; - return (var - (((int64_t)avg * avg) >> 10)); -} - void vp9_get_sse_sum_16x16_c(const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, int ref_stride, unsigned int *sse, int *sum) { variance(src_ptr, source_stride, ref_ptr, ref_stride, 16, 16, sse, sum); } -unsigned int vp9_variance16x16_c(const uint8_t *src_ptr, - int source_stride, - const uint8_t *ref_ptr, - int recon_stride, - unsigned int *sse) { - unsigned int var; - int avg; - - variance(src_ptr, source_stride, ref_ptr, recon_stride, 16, 16, &var, &avg); - *sse = var; - return (var - (((unsigned int)avg * avg) >> 8)); -} - -unsigned int vp9_variance8x16_c(const uint8_t *src_ptr, - int source_stride, - const uint8_t *ref_ptr, - int recon_stride, - unsigned int *sse) { - unsigned int var; - int avg; - - variance(src_ptr, source_stride, ref_ptr, recon_stride, 8, 16, &var, &avg); - *sse = var; - return (var - (((unsigned int)avg * avg) >> 7)); -} - -unsigned int vp9_variance16x8_c(const uint8_t *src_ptr, - int source_stride, - const uint8_t *ref_ptr, - int recon_stride, - unsigned int *sse) { - unsigned int var; - int avg; - - variance(src_ptr, source_stride, ref_ptr, recon_stride, 16, 8, &var, &avg); - *sse = var; - return (var - (((unsigned int)avg * avg) >> 7)); -} - void vp9_get_sse_sum_8x8_c(const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, int ref_stride, unsigned int *sse, int *sum) { variance(src_ptr, source_stride, ref_ptr, ref_stride, 8, 8, sse, sum); } -unsigned int vp9_variance8x8_c(const uint8_t *src_ptr, - int source_stride, - const uint8_t *ref_ptr, - int recon_stride, - unsigned int *sse) { - unsigned int var; - int avg; - - variance(src_ptr, source_stride, ref_ptr, recon_stride, 8, 8, &var, &avg); - *sse = var; - return (var - (((unsigned int)avg * avg) >> 6)); -} - -unsigned int vp9_variance8x4_c(const uint8_t *src_ptr, - int source_stride, - const uint8_t *ref_ptr, - int recon_stride, - unsigned int *sse) { - unsigned int var; - int avg; - - variance(src_ptr, source_stride, ref_ptr, recon_stride, 8, 4, &var, &avg); - *sse = var; - return (var - (((unsigned int)avg * avg) >> 5)); -} - -unsigned int vp9_variance4x8_c(const uint8_t *src_ptr, - int source_stride, - const uint8_t *ref_ptr, - int recon_stride, - unsigned int *sse) { - unsigned int var; - int avg; - - variance(src_ptr, source_stride, ref_ptr, recon_stride, 4, 8, &var, &avg); - *sse = var; - return (var - (((unsigned int)avg * avg) >> 5)); -} - -unsigned int vp9_variance4x4_c(const uint8_t *src_ptr, - int source_stride, - const uint8_t *ref_ptr, - int recon_stride, - unsigned int *sse) { - unsigned int var; - int avg; - - variance(src_ptr, source_stride, ref_ptr, recon_stride, 4, 4, &var, &avg); - *sse = var; - return (var - (((unsigned int)avg * avg) >> 4)); -} - - unsigned int vp9_mse16x16_c(const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, @@ -382,42 +224,55 @@ unsigned int vp9_mse8x8_c(const uint8_t *src_ptr, return var; } +VAR(4, 4) SUBPIX_VAR(4, 4) SUBPIX_AVG_VAR(4, 4) +VAR(4, 8) SUBPIX_VAR(4, 8) SUBPIX_AVG_VAR(4, 8) +VAR(8, 4) SUBPIX_VAR(8, 4) SUBPIX_AVG_VAR(8, 4) +VAR(8, 8) SUBPIX_VAR(8, 8) SUBPIX_AVG_VAR(8, 8) +VAR(8, 16) SUBPIX_VAR(8, 16) SUBPIX_AVG_VAR(8, 16) +VAR(16, 8) SUBPIX_VAR(16, 8) SUBPIX_AVG_VAR(16, 8) +VAR(16, 16) SUBPIX_VAR(16, 16) SUBPIX_AVG_VAR(16, 16) +VAR(16, 32) SUBPIX_VAR(16, 32) SUBPIX_AVG_VAR(16, 32) +VAR(32, 16) SUBPIX_VAR(32, 16) SUBPIX_AVG_VAR(32, 16) +VAR(32, 32) SUBPIX_VAR(32, 32) SUBPIX_AVG_VAR(32, 32) +VAR(32, 64) SUBPIX_VAR(32, 64) SUBPIX_AVG_VAR(32, 64) +VAR(64, 32) SUBPIX_VAR(64, 32) SUBPIX_AVG_VAR(64, 32) +VAR(64, 64) SUBPIX_VAR(64, 64) SUBPIX_AVG_VAR(64, 64)