vpx/vp9/common/vp9_entropy.c

3731 lines
172 KiB
C
Raw Normal View History

/*
* Copyright (c) 2010 The WebM project authors. All Rights Reserved.
2010-05-18 17:58:33 +02:00
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
2010-05-18 17:58:33 +02:00
*/
#include <stdio.h>
#include "vp9/common/vp9_entropy.h"
2010-05-18 17:58:33 +02:00
#include "string.h"
#include "vp9/common/vp9_blockd.h"
#include "vp9/common/vp9_onyxc_int.h"
#include "vp9/common/vp9_entropymode.h"
#include "vpx_mem/vpx_mem.h"
#include "vpx/vpx_integer.h"
#include "vp9/common/vp9_coefupdateprobs.h"
2010-05-18 17:58:33 +02:00
const int vp9_i8x8_block[4] = {0, 2, 8, 10};
DECLARE_ALIGNED(16, const uint8_t, vp9_norm[256]) = {
0, 7, 6, 6, 5, 5, 5, 5, 4, 4, 4, 4, 4, 4, 4, 4,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
};
// Unified coefficient band structure used by all block sizes
DECLARE_ALIGNED(16, const int, vp9_coef_bands8x8[64]) = {
0, 1, 2, 3, 4, 4, 5, 5,
1, 2, 3, 4, 4, 5, 5, 5,
2, 3, 4, 4, 5, 5, 5, 5,
3, 4, 4, 5, 5, 5, 5, 5,
4, 4, 5, 5, 5, 5, 5, 5,
4, 5, 5, 5, 5, 5, 5, 5,
5, 5, 5, 5, 5, 5, 5, 5,
5, 5, 5, 5, 5, 5, 5, 5
};
DECLARE_ALIGNED(16, const int, vp9_coef_bands4x4[16]) = {
0, 1, 2, 3,
1, 2, 3, 4,
2, 3, 4, 5,
3, 4, 5, 5
};
DECLARE_ALIGNED(16, const uint8_t, vp9_pt_energy_class[MAX_ENTROPY_TOKENS]) = {
0, 1, 2, 3, 3, 4, 4, 5, 5, 5, 5, 5
};
#if CONFIG_SCATTERSCAN
DECLARE_ALIGNED(16, const int, vp9_default_zig_zag1d_4x4[16]) = {
0, 4, 1, 5,
8, 2, 12, 9,
3, 6, 13, 10,
7, 14, 11, 15,
};
DECLARE_ALIGNED(16, const int, vp9_col_scan_4x4[16]) = {
0, 4, 8, 1,
12, 5, 9, 2,
13, 6, 10, 3,
7, 14, 11, 15,
};
DECLARE_ALIGNED(16, const int, vp9_row_scan_4x4[16]) = {
0, 1, 4, 2,
5, 3, 6, 8,
9, 7, 12, 10,
13, 11, 14, 15,
};
DECLARE_ALIGNED(64, const int, vp9_default_zig_zag1d_8x8[64]) = {
0, 8, 1, 16, 9, 2, 17, 24,
10, 3, 18, 25, 32, 11, 4, 26,
33, 19, 40, 12, 34, 27, 5, 41,
20, 48, 13, 35, 42, 28, 21, 6,
49, 56, 36, 43, 29, 7, 14, 50,
57, 44, 22, 37, 15, 51, 58, 30,
45, 23, 52, 59, 38, 31, 60, 53,
46, 39, 61, 54, 47, 62, 55, 63,
};
DECLARE_ALIGNED(16, const int, vp9_col_scan_8x8[64]) = {
0, 8, 16, 1, 24, 9, 32, 17,
2, 40, 25, 10, 33, 18, 48, 3,
26, 41, 11, 56, 19, 34, 4, 49,
27, 42, 12, 35, 20, 57, 50, 28,
5, 43, 13, 36, 58, 51, 21, 44,
6, 29, 59, 37, 14, 52, 22, 7,
45, 60, 30, 15, 38, 53, 23, 46,
31, 61, 39, 54, 47, 62, 55, 63,
};
DECLARE_ALIGNED(16, const int, vp9_row_scan_8x8[64]) = {
0, 1, 2, 8, 9, 3, 16, 10,
4, 17, 11, 24, 5, 18, 25, 12,
19, 26, 32, 6, 13, 20, 33, 27,
7, 34, 40, 21, 28, 41, 14, 35,
48, 42, 29, 36, 49, 22, 43, 15,
56, 37, 50, 44, 30, 57, 23, 51,
58, 45, 38, 52, 31, 59, 53, 46,
60, 39, 61, 47, 54, 55, 62, 63,
};
DECLARE_ALIGNED(16, const int, vp9_default_zig_zag1d_16x16[256]) = {
0, 16, 1, 32, 17, 2, 48, 33, 18, 3, 64, 34, 49, 19, 65, 80,
50, 4, 35, 66, 20, 81, 96, 51, 5, 36, 82, 97, 67, 112, 21, 52,
98, 37, 83, 113, 6, 68, 128, 53, 22, 99, 114, 84, 7, 129, 38, 69,
100, 115, 144, 130, 85, 54, 23, 8, 145, 39, 70, 116, 101, 131, 160, 146,
55, 86, 24, 71, 132, 117, 161, 40, 9, 102, 147, 176, 162, 87, 56, 25,
133, 118, 177, 148, 72, 103, 41, 163, 10, 192, 178, 88, 57, 134, 149, 119,
26, 164, 73, 104, 193, 42, 179, 208, 11, 135, 89, 165, 120, 150, 58, 194,
180, 27, 74, 209, 105, 151, 136, 43, 90, 224, 166, 195, 181, 121, 210, 59,
12, 152, 106, 167, 196, 75, 137, 225, 211, 240, 182, 122, 91, 28, 197, 13,
226, 168, 183, 153, 44, 212, 138, 107, 241, 60, 29, 123, 198, 184, 227, 169,
242, 76, 213, 154, 45, 92, 14, 199, 139, 61, 228, 214, 170, 185, 243, 108,
77, 155, 30, 15, 200, 229, 124, 215, 244, 93, 46, 186, 171, 201, 109, 140,
230, 62, 216, 245, 31, 125, 78, 156, 231, 47, 187, 202, 217, 94, 246, 141,
63, 232, 172, 110, 247, 157, 79, 218, 203, 126, 233, 188, 248, 95, 173, 142,
219, 111, 249, 234, 158, 127, 189, 204, 250, 235, 143, 174, 220, 205, 159, 251,
190, 221, 175, 236, 237, 191, 206, 252, 222, 253, 207, 238, 223, 254, 239, 255,
};
DECLARE_ALIGNED(16, const int, vp9_col_scan_16x16[256]) = {
0, 16, 32, 48, 1, 64, 17, 80, 33, 96, 49, 2, 65, 112, 18, 81,
34, 128, 50, 97, 3, 66, 144, 19, 113, 35, 82, 160, 98, 51, 129, 4,
67, 176, 20, 114, 145, 83, 36, 99, 130, 52, 192, 5, 161, 68, 115, 21,
146, 84, 208, 177, 37, 131, 100, 53, 162, 224, 69, 6, 116, 193, 147, 85,
22, 240, 132, 38, 178, 101, 163, 54, 209, 117, 70, 7, 148, 194, 86, 179,
225, 23, 133, 39, 164, 8, 102, 210, 241, 55, 195, 118, 149, 71, 180, 24,
87, 226, 134, 165, 211, 40, 103, 56, 72, 150, 196, 242, 119, 9, 181, 227,
88, 166, 25, 135, 41, 104, 212, 57, 151, 197, 120, 73, 243, 182, 136, 167,
213, 89, 10, 228, 105, 152, 198, 26, 42, 121, 183, 244, 168, 58, 137, 229,
74, 214, 90, 153, 199, 184, 11, 106, 245, 27, 122, 230, 169, 43, 215, 59,
200, 138, 185, 246, 75, 12, 91, 154, 216, 231, 107, 28, 44, 201, 123, 170,
60, 247, 232, 76, 139, 13, 92, 217, 186, 248, 155, 108, 29, 124, 45, 202,
233, 171, 61, 14, 77, 140, 15, 249, 93, 30, 187, 156, 218, 46, 109, 125,
62, 172, 78, 203, 31, 141, 234, 94, 47, 188, 63, 157, 110, 250, 219, 79,
126, 204, 173, 142, 95, 189, 111, 235, 158, 220, 251, 127, 174, 143, 205, 236,
159, 190, 221, 252, 175, 206, 237, 191, 253, 222, 238, 207, 254, 223, 239, 255,
};
DECLARE_ALIGNED(16, const int, vp9_row_scan_16x16[256]) = {
0, 1, 2, 16, 3, 17, 4, 18, 32, 5, 33, 19, 6, 34, 48, 20,
49, 7, 35, 21, 50, 64, 8, 36, 65, 22, 51, 37, 80, 9, 66, 52,
23, 38, 81, 67, 10, 53, 24, 82, 68, 96, 39, 11, 54, 83, 97, 69,
25, 98, 84, 40, 112, 55, 12, 70, 99, 113, 85, 26, 41, 56, 114, 100,
13, 71, 128, 86, 27, 115, 101, 129, 42, 57, 72, 116, 14, 87, 130, 102,
144, 73, 131, 117, 28, 58, 15, 88, 43, 145, 103, 132, 146, 118, 74, 160,
89, 133, 104, 29, 59, 147, 119, 44, 161, 148, 90, 105, 134, 162, 120, 176,
75, 135, 149, 30, 60, 163, 177, 45, 121, 91, 106, 164, 178, 150, 192, 136,
165, 179, 31, 151, 193, 76, 122, 61, 137, 194, 107, 152, 180, 208, 46, 166,
167, 195, 92, 181, 138, 209, 123, 153, 224, 196, 77, 168, 210, 182, 240, 108,
197, 62, 154, 225, 183, 169, 211, 47, 139, 93, 184, 226, 212, 241, 198, 170,
124, 155, 199, 78, 213, 185, 109, 227, 200, 63, 228, 242, 140, 214, 171, 186,
156, 229, 243, 125, 94, 201, 244, 215, 216, 230, 141, 187, 202, 79, 172, 110,
157, 245, 217, 231, 95, 246, 232, 126, 203, 247, 233, 173, 218, 142, 111, 158,
188, 248, 127, 234, 219, 249, 189, 204, 143, 174, 159, 250, 235, 205, 220, 175,
190, 251, 221, 191, 206, 236, 207, 237, 252, 222, 253, 223, 238, 239, 254, 255,
};
DECLARE_ALIGNED(16, const int, vp9_default_zig_zag1d_32x32[1024]) = {
0, 32, 1, 64, 33, 2, 96, 65, 34, 128, 3, 97, 66, 160, 129, 35, 98, 4, 67, 130, 161, 192, 36, 99, 224, 5, 162, 193, 68, 131, 37, 100,
225, 194, 256, 163, 69, 132, 6, 226, 257, 288, 195, 101, 164, 38, 258, 7, 227, 289, 133, 320, 70, 196, 165, 290, 259, 228, 39, 321, 102, 352, 8, 197,
71, 134, 322, 291, 260, 353, 384, 229, 166, 103, 40, 354, 323, 292, 135, 385, 198, 261, 72, 9, 416, 167, 386, 355, 230, 324, 104, 293, 41, 417, 199, 136,
262, 387, 448, 325, 356, 10, 73, 418, 231, 168, 449, 294, 388, 105, 419, 263, 42, 200, 357, 450, 137, 480, 74, 326, 232, 11, 389, 169, 295, 420, 106, 451,
481, 358, 264, 327, 201, 43, 138, 512, 482, 390, 296, 233, 170, 421, 75, 452, 359, 12, 513, 265, 483, 328, 107, 202, 514, 544, 422, 391, 453, 139, 44, 234,
484, 297, 360, 171, 76, 515, 545, 266, 329, 454, 13, 423, 392, 203, 108, 546, 485, 576, 298, 235, 140, 361, 516, 330, 172, 547, 45, 424, 455, 267, 393, 577,
486, 77, 204, 517, 362, 548, 608, 14, 456, 299, 578, 109, 236, 425, 394, 487, 609, 331, 141, 579, 518, 46, 268, 15, 173, 549, 610, 640, 363, 78, 519, 488,
300, 205, 16, 457, 580, 426, 550, 395, 110, 237, 611, 641, 332, 672, 142, 642, 269, 458, 47, 581, 427, 489, 174, 364, 520, 612, 551, 673, 79, 206, 301, 643,
704, 17, 111, 490, 674, 238, 582, 48, 521, 613, 333, 396, 459, 143, 270, 552, 644, 705, 736, 365, 80, 675, 583, 175, 428, 706, 112, 302, 207, 614, 553, 49,
645, 522, 737, 397, 768, 144, 334, 18, 676, 491, 239, 615, 707, 584, 81, 460, 176, 271, 738, 429, 113, 800, 366, 208, 523, 708, 646, 554, 677, 769, 19, 145,
585, 739, 240, 303, 50, 461, 616, 398, 647, 335, 492, 177, 82, 770, 832, 555, 272, 430, 678, 209, 709, 114, 740, 801, 617, 51, 304, 679, 524, 367, 586, 241,
20, 146, 771, 864, 83, 802, 648, 493, 399, 273, 336, 710, 178, 462, 833, 587, 741, 115, 305, 711, 368, 525, 618, 803, 210, 896, 680, 834, 772, 52, 649, 147,
431, 494, 556, 242, 400, 865, 337, 21, 928, 179, 742, 84, 463, 274, 369, 804, 650, 557, 743, 960, 835, 619, 773, 306, 211, 526, 432, 992, 588, 712, 116, 243,
866, 495, 681, 558, 805, 589, 401, 897, 53, 338, 148, 682, 867, 464, 275, 22, 370, 433, 307, 620, 527, 836, 774, 651, 713, 744, 85, 180, 621, 465, 929, 775,
496, 898, 212, 339, 244, 402, 590, 117, 559, 714, 434, 23, 868, 930, 806, 683, 528, 652, 371, 961, 149, 837, 54, 899, 745, 276, 993, 497, 403, 622, 181, 776,
746, 529, 560, 435, 86, 684, 466, 308, 591, 653, 715, 807, 340, 869, 213, 962, 245, 838, 561, 931, 808, 592, 118, 498, 372, 623, 685, 994, 467, 654, 747, 900,
716, 277, 150, 55, 24, 404, 530, 839, 777, 655, 182, 963, 840, 686, 778, 309, 870, 341, 87, 499, 809, 624, 593, 436, 717, 932, 214, 246, 995, 718, 625, 373,
562, 25, 119, 901, 531, 468, 964, 748, 810, 278, 779, 500, 563, 656, 405, 687, 871, 872, 594, 151, 933, 749, 841, 310, 657, 626, 595, 437, 688, 183, 996, 965,
902, 811, 342, 750, 689, 719, 532, 56, 215, 469, 934, 374, 247, 720, 780, 564, 781, 842, 406, 26, 751, 903, 873, 57, 279, 627, 501, 658, 843, 997, 812, 904,
88, 813, 438, 752, 935, 936, 311, 596, 533, 690, 343, 966, 874, 89, 120, 470, 721, 875, 659, 782, 565, 998, 375, 844, 845, 27, 628, 967, 121, 905, 968, 152,
937, 814, 753, 502, 691, 783, 184, 153, 722, 407, 58, 815, 999, 660, 597, 723, 534, 906, 216, 439, 907, 248, 185, 876, 846, 692, 784, 629, 90, 969, 280, 754,
938, 939, 217, 847, 566, 471, 785, 816, 877, 1000, 249, 878, 661, 503, 312, 970, 755, 122, 817, 281, 344, 786, 598, 724, 28, 59, 29, 154, 535, 630, 376, 1001,
313, 908, 186, 91, 848, 849, 345, 909, 940, 879, 408, 818, 693, 1002, 971, 941, 567, 377, 218, 756, 910, 787, 440, 123, 880, 725, 662, 250, 819, 1003, 282, 972,
850, 599, 472, 409, 155, 441, 942, 757, 788, 694, 911, 881, 314, 631, 973, 504, 187, 1004, 346, 473, 851, 943, 820, 726, 60, 505, 219, 378, 912, 974, 30, 31,
536, 882, 1005, 92, 251, 663, 944, 913, 283, 695, 883, 568, 1006, 975, 410, 442, 945, 789, 852, 537, 1007, 124, 315, 61, 758, 821, 600, 914, 976, 569, 474, 347,
156, 1008, 915, 93, 977, 506, 946, 727, 379, 884, 188, 632, 601, 1009, 790, 853, 978, 947, 220, 411, 125, 633, 664, 759, 252, 443, 916, 538, 157, 822, 62, 570,
979, 284, 1010, 885, 948, 189, 475, 94, 316, 665, 696, 1011, 854, 791, 980, 221, 348, 63, 917, 602, 380, 507, 253, 126, 697, 823, 634, 285, 728, 949, 886, 95,
158, 539, 1012, 317, 412, 444, 760, 571, 190, 981, 729, 918, 127, 666, 349, 381, 476, 855, 761, 1013, 603, 222, 159, 698, 950, 508, 254, 792, 286, 635, 887, 793,
413, 191, 982, 445, 540, 318, 730, 667, 223, 824, 919, 1014, 350, 477, 572, 255, 825, 951, 762, 509, 604, 856, 382, 699, 287, 319, 636, 983, 794, 414, 541, 731,
857, 888, 351, 446, 573, 1015, 668, 889, 478, 826, 383, 763, 605, 920, 510, 637, 415, 700, 921, 858, 447, 952, 542, 795, 479, 953, 732, 890, 669, 574, 511, 984,
827, 985, 922, 1016, 764, 606, 543, 701, 859, 638, 1017, 575, 796, 954, 733, 891, 670, 607, 828, 986, 765, 923, 639, 1018, 702, 860, 955, 671, 892, 734, 797, 703,
987, 829, 1019, 766, 924, 735, 861, 956, 988, 893, 767, 798, 830, 1020, 925, 957, 799, 862, 831, 989, 894, 1021, 863, 926, 895, 958, 990, 1022, 927, 959, 991, 1023,
};
#else // CONFIG_SCATTERSCAN
DECLARE_ALIGNED(16, const int, vp9_default_zig_zag1d_4x4[16]) = {
0, 1, 4, 8,
5, 2, 3, 6,
9, 12, 13, 10,
7, 11, 14, 15,
2010-05-18 17:58:33 +02:00
};
DECLARE_ALIGNED(16, const int, vp9_col_scan_4x4[16]) = {
0, 4, 8, 12,
1, 5, 9, 13,
2, 6, 10, 14,
3, 7, 11, 15
};
DECLARE_ALIGNED(16, const int, vp9_row_scan_4x4[16]) = {
0, 1, 2, 3,
4, 5, 6, 7,
8, 9, 10, 11,
12, 13, 14, 15
};
DECLARE_ALIGNED(64, const int, vp9_default_zig_zag1d_8x8[64]) = {
0, 1, 8, 16, 9, 2, 3, 10, 17, 24, 32, 25, 18, 11, 4, 5,
12, 19, 26, 33, 40, 48, 41, 34, 27, 20, 13, 6, 7, 14, 21, 28,
35, 42, 49, 56, 57, 50, 43, 36, 29, 22, 15, 23, 30, 37, 44, 51,
58, 59, 52, 45, 38, 31, 39, 46, 53, 60, 61, 54, 47, 55, 62, 63,
};
2010-05-18 17:58:33 +02:00
DECLARE_ALIGNED(16, const int, vp9_col_scan_8x8[64]) = {
0, 8, 16, 24, 32, 40, 48, 56,
1, 9, 17, 25, 33, 41, 49, 57,
2, 10, 18, 26, 34, 42, 50, 58,
3, 11, 19, 27, 35, 43, 51, 59,
4, 12, 20, 28, 36, 44, 52, 60,
5, 13, 21, 29, 37, 45, 53, 61,
6, 14, 22, 30, 38, 46, 54, 62,
7, 15, 23, 31, 39, 47, 55, 63,
};
DECLARE_ALIGNED(16, const int, vp9_row_scan_8x8[64]) = {
0, 1, 2, 3, 4, 5, 6, 7,
8, 9, 10, 11, 12, 13, 14, 15,
16, 17, 18, 19, 20, 21, 22, 23,
24, 25, 26, 27, 28, 29, 30, 31,
32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47,
48, 49, 50, 51, 52, 53, 54, 55,
56, 57, 58, 59, 60, 61, 62, 63,
};
DECLARE_ALIGNED(16, const int, vp9_default_zig_zag1d_16x16[256]) = {
0, 1, 16, 32, 17, 2, 3, 18,
33, 48, 64, 49, 34, 19, 4, 5,
20, 35, 50, 65, 80, 96, 81, 66,
51, 36, 21, 6, 7, 22, 37, 52,
67, 82, 97, 112, 128, 113, 98, 83,
68, 53, 38, 23, 8, 9, 24, 39,
54, 69, 84, 99, 114, 129, 144, 160,
145, 130, 115, 100, 85, 70, 55, 40,
25, 10, 11, 26, 41, 56, 71, 86,
101, 116, 131, 146, 161, 176, 192, 177,
162, 147, 132, 117, 102, 87, 72, 57,
42, 27, 12, 13, 28, 43, 58, 73,
88, 103, 118, 133, 148, 163, 178, 193,
208, 224, 209, 194, 179, 164, 149, 134,
119, 104, 89, 74, 59, 44, 29, 14,
15, 30, 45, 60, 75, 90, 105, 120,
135, 150, 165, 180, 195, 210, 225, 240,
241, 226, 211, 196, 181, 166, 151, 136,
121, 106, 91, 76, 61, 46, 31, 47,
62, 77, 92, 107, 122, 137, 152, 167,
182, 197, 212, 227, 242, 243, 228, 213,
198, 183, 168, 153, 138, 123, 108, 93,
78, 63, 79, 94, 109, 124, 139, 154,
169, 184, 199, 214, 229, 244, 245, 230,
215, 200, 185, 170, 155, 140, 125, 110,
95, 111, 126, 141, 156, 171, 186, 201,
216, 231, 246, 247, 232, 217, 202, 187,
172, 157, 142, 127, 143, 158, 173, 188,
203, 218, 233, 248, 249, 234, 219, 204,
189, 174, 159, 175, 190, 205, 220, 235,
250, 251, 236, 221, 206, 191, 207, 222,
237, 252, 253, 238, 223, 239, 254, 255,
};
DECLARE_ALIGNED(16, const int, vp9_col_scan_16x16[256]) = {
0, 16, 32, 48, 64, 80, 96, 112, 128, 144, 160, 176, 192, 208, 224, 240,
1, 17, 33, 49, 65, 81, 97, 113, 129, 145, 161, 177, 193, 209, 225, 241,
2, 18, 34, 50, 66, 82, 98, 114, 130, 146, 162, 178, 194, 210, 226, 242,
3, 19, 35, 51, 67, 83, 99, 115, 131, 147, 163, 179, 195, 211, 227, 243,
4, 20, 36, 52, 68, 84, 100, 116, 132, 148, 164, 180, 196, 212, 228, 244,
5, 21, 37, 53, 69, 85, 101, 117, 133, 149, 165, 181, 197, 213, 229, 245,
6, 22, 38, 54, 70, 86, 102, 118, 134, 150, 166, 182, 198, 214, 230, 246,
7, 23, 39, 55, 71, 87, 103, 119, 135, 151, 167, 183, 199, 215, 231, 247,
8, 24, 40, 56, 72, 88, 104, 120, 136, 152, 168, 184, 200, 216, 232, 248,
9, 25, 41, 57, 73, 89, 105, 121, 137, 153, 169, 185, 201, 217, 233, 249,
10, 26, 42, 58, 74, 90, 106, 122, 138, 154, 170, 186, 202, 218, 234, 250,
11, 27, 43, 59, 75, 91, 107, 123, 139, 155, 171, 187, 203, 219, 235, 251,
12, 28, 44, 60, 76, 92, 108, 124, 140, 156, 172, 188, 204, 220, 236, 252,
13, 29, 45, 61, 77, 93, 109, 125, 141, 157, 173, 189, 205, 221, 237, 253,
14, 30, 46, 62, 78, 94, 110, 126, 142, 158, 174, 190, 206, 222, 238, 254,
15, 31, 47, 63, 79, 95, 111, 127, 143, 159, 175, 191, 207, 223, 239, 255,
};
DECLARE_ALIGNED(16, const int, vp9_row_scan_16x16[256]) = {
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95,
96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111,
112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127,
128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143,
144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159,
160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175,
176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191,
192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,
208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223,
224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239,
240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255,
};
32x32 transform for superblocks. This adds Debargha's DCT/DWT hybrid and a regular 32x32 DCT, and adds code all over the place to wrap that in the bitstream/encoder/decoder/RD. Some implementation notes (these probably need careful review): - token range is extended by 1 bit, since the value range out of this transform is [-16384,16383]. - the coefficients coming out of the FDCT are manually scaled back by 1 bit, or else they won't fit in int16_t (they are 17 bits). Because of this, the RD error scoring does not right-shift the MSE score by two (unlike for 4x4/8x8/16x16). - to compensate for this loss in precision, the quantizer is halved also. This is currently a little hacky. - FDCT and IDCT is double-only right now. Needs a fixed-point impl. - There are no default probabilities for the 32x32 transform yet; I'm simply using the 16x16 luma ones. A future commit will add newly generated probabilities for all transforms. - No ADST version. I don't think we'll add one for this level; if an ADST is desired, transform-size selection can scale back to 16x16 or lower, and use an ADST at that level. Additional notes specific to Debargha's DWT/DCT hybrid: - coefficient scale is different for the top/left 16x16 (DCT-over-DWT) block than for the rest (DWT pixel differences) of the block. Therefore, RD error scoring isn't easily scalable between coefficient and pixel domain. Thus, unfortunately, we need to compute the RD distortion in the pixel domain until we figure out how to scale these appropriately. Change-Id: I00386f20f35d7fabb19aba94c8162f8aee64ef2b
2012-12-07 23:45:05 +01:00
DECLARE_ALIGNED(16, const int, vp9_default_zig_zag1d_32x32[1024]) = {
0, 1, 32, 64, 33, 2, 3, 34, 65, 96, 128, 97, 66, 35, 4, 5, 36, 67, 98, 129, 160, 192, 161, 130, 99, 68, 37, 6, 7, 38, 69, 100,
131, 162, 193, 224, 256, 225, 194, 163, 132, 101, 70, 39, 8, 9, 40, 71, 102, 133, 164, 195, 226, 257, 288, 320, 289, 258, 227, 196, 165, 134, 103, 72,
41, 10, 11, 42, 73, 104, 135, 166, 197, 228, 259, 290, 321, 352, 384, 353, 322, 291, 260, 229, 198, 167, 136, 105, 74, 43, 12, 13, 44, 75, 106, 137,
168, 199, 230, 261, 292, 323, 354, 385, 416, 448, 417, 386, 355, 324, 293, 262, 231, 200, 169, 138, 107, 76, 45, 14, 15, 46, 77, 108, 139, 170, 201, 232,
263, 294, 325, 356, 387, 418, 449, 480, 512, 481, 450, 419, 388, 357, 326, 295, 264, 233, 202, 171, 140, 109, 78, 47, 16, 17, 48, 79, 110, 141, 172, 203,
234, 265, 296, 327, 358, 389, 420, 451, 482, 513, 544, 576, 545, 514, 483, 452, 421, 390, 359, 328, 297, 266, 235, 204, 173, 142, 111, 80, 49, 18, 19, 50,
81, 112, 143, 174, 205, 236, 267, 298, 329, 360, 391, 422, 453, 484, 515, 546, 577, 608, 640, 609, 578, 547, 516, 485, 454, 423, 392, 361, 330, 299, 268, 237,
206, 175, 144, 113, 82, 51, 20, 21, 52, 83, 114, 145, 176, 207, 238, 269, 300, 331, 362, 393, 424, 455, 486, 517, 548, 579, 610, 641, 672, 704, 673, 642,
611, 580, 549, 518, 487, 456, 425, 394, 363, 332, 301, 270, 239, 208, 177, 146, 115, 84, 53, 22, 23, 54, 85, 116, 147, 178, 209, 240, 271, 302, 333, 364,
395, 426, 457, 488, 519, 550, 581, 612, 643, 674, 705, 736, 768, 737, 706, 675, 644, 613, 582, 551, 520, 489, 458, 427, 396, 365, 334, 303, 272, 241, 210, 179,
148, 117, 86, 55, 24, 25, 56, 87, 118, 149, 180, 211, 242, 273, 304, 335, 366, 397, 428, 459, 490, 521, 552, 583, 614, 645, 676, 707, 738, 769, 800, 832,
801, 770, 739, 708, 677, 646, 615, 584, 553, 522, 491, 460, 429, 398, 367, 336, 305, 274, 243, 212, 181, 150, 119, 88, 57, 26, 27, 58, 89, 120, 151, 182,
213, 244, 275, 306, 337, 368, 399, 430, 461, 492, 523, 554, 585, 616, 647, 678, 709, 740, 771, 802, 833, 864, 896, 865, 834, 803, 772, 741, 710, 679, 648, 617,
586, 555, 524, 493, 462, 431, 400, 369, 338, 307, 276, 245, 214, 183, 152, 121, 90, 59, 28, 29, 60, 91, 122, 153, 184, 215, 246, 277, 308, 339, 370, 401,
432, 463, 494, 525, 556, 587, 618, 649, 680, 711, 742, 773, 804, 835, 866, 897, 928, 960, 929, 898, 867, 836, 805, 774, 743, 712, 681, 650, 619, 588, 557, 526,
495, 464, 433, 402, 371, 340, 309, 278, 247, 216, 185, 154, 123, 92, 61, 30, 31, 62, 93, 124, 155, 186, 217, 248, 279, 310, 341, 372, 403, 434, 465, 496,
527, 558, 589, 620, 651, 682, 713, 744, 775, 806, 837, 868, 899, 930, 961, 992, 993, 962, 931, 900, 869, 838, 807, 776, 745, 714, 683, 652, 621, 590, 559, 528,
497, 466, 435, 404, 373, 342, 311, 280, 249, 218, 187, 156, 125, 94, 63, 95, 126, 157, 188, 219, 250, 281, 312, 343, 374, 405, 436, 467, 498, 529, 560, 591,
622, 653, 684, 715, 746, 777, 808, 839, 870, 901, 932, 963, 994, 995, 964, 933, 902, 871, 840, 809, 778, 747, 716, 685, 654, 623, 592, 561, 530, 499, 468, 437,
406, 375, 344, 313, 282, 251, 220, 189, 158, 127, 159, 190, 221, 252, 283, 314, 345, 376, 407, 438, 469, 500, 531, 562, 593, 624, 655, 686, 717, 748, 779, 810,
841, 872, 903, 934, 965, 996, 997, 966, 935, 904, 873, 842, 811, 780, 749, 718, 687, 656, 625, 594, 563, 532, 501, 470, 439, 408, 377, 346, 315, 284, 253, 222,
191, 223, 254, 285, 316, 347, 378, 409, 440, 471, 502, 533, 564, 595, 626, 657, 688, 719, 750, 781, 812, 843, 874, 905, 936, 967, 998, 999, 968, 937, 906, 875,
844, 813, 782, 751, 720, 689, 658, 627, 596, 565, 534, 503, 472, 441, 410, 379, 348, 317, 286, 255, 287, 318, 349, 380, 411, 442, 473, 504, 535, 566, 597, 628,
659, 690, 721, 752, 783, 814, 845, 876, 907, 938, 969, 1000, 1001, 970, 939, 908, 877, 846, 815, 784, 753, 722, 691, 660, 629, 598, 567, 536, 505, 474, 443, 412,
381, 350, 319, 351, 382, 413, 444, 475, 506, 537, 568, 599, 630, 661, 692, 723, 754, 785, 816, 847, 878, 909, 940, 971, 1002, 1003, 972, 941, 910, 879, 848, 817,
786, 755, 724, 693, 662, 631, 600, 569, 538, 507, 476, 445, 414, 383, 415, 446, 477, 508, 539, 570, 601, 632, 663, 694, 725, 756, 787, 818, 849, 880, 911, 942,
973, 1004, 1005, 974, 943, 912, 881, 850, 819, 788, 757, 726, 695, 664, 633, 602, 571, 540, 509, 478, 447, 479, 510, 541, 572, 603, 634, 665, 696, 727, 758, 789,
820, 851, 882, 913, 944, 975, 1006, 1007, 976, 945, 914, 883, 852, 821, 790, 759, 728, 697, 666, 635, 604, 573, 542, 511, 543, 574, 605, 636, 667, 698, 729, 760,
791, 822, 853, 884, 915, 946, 977, 1008, 1009, 978, 947, 916, 885, 854, 823, 792, 761, 730, 699, 668, 637, 606, 575, 607, 638, 669, 700, 731, 762, 793, 824, 855,
886, 917, 948, 979, 1010, 1011, 980, 949, 918, 887, 856, 825, 794, 763, 732, 701, 670, 639, 671, 702, 733, 764, 795, 826, 857, 888, 919, 950, 981, 1012, 1013, 982,
951, 920, 889, 858, 827, 796, 765, 734, 703, 735, 766, 797, 828, 859, 890, 921, 952, 983, 1014, 1015, 984, 953, 922, 891, 860, 829, 798, 767, 799, 830, 861, 892,
923, 954, 985, 1016, 1017, 986, 955, 924, 893, 862, 831, 863, 894, 925, 956, 987, 1018, 1019, 988, 957, 926, 895, 927, 958, 989, 1020, 1021, 990, 959, 991, 1022, 1023,
};
#endif // CONFIG_SCATTERSCAN
2010-05-18 17:58:33 +02:00
/* Array indices are identical to previously-existing CONTEXT_NODE indices */
const vp9_tree_index vp9_coef_tree[ 22] = /* corresponding _CONTEXT_NODEs */
2010-05-18 17:58:33 +02:00
{
-DCT_EOB_TOKEN, 2, /* 0 = EOB */
-ZERO_TOKEN, 4, /* 1 = ZERO */
-ONE_TOKEN, 6, /* 2 = ONE */
8, 12, /* 3 = LOW_VAL */
-TWO_TOKEN, 10, /* 4 = TWO */
-THREE_TOKEN, -FOUR_TOKEN, /* 5 = THREE */
14, 16, /* 6 = HIGH_LOW */
-DCT_VAL_CATEGORY1, -DCT_VAL_CATEGORY2, /* 7 = CAT_ONE */
18, 20, /* 8 = CAT_THREEFOUR */
-DCT_VAL_CATEGORY3, -DCT_VAL_CATEGORY4, /* 9 = CAT_THREE */
-DCT_VAL_CATEGORY5, -DCT_VAL_CATEGORY6 /* 10 = CAT_FIVE */
2010-05-18 17:58:33 +02:00
};
struct vp9_token_struct vp9_coef_encodings[MAX_ENTROPY_TOKENS];
2010-05-18 17:58:33 +02:00
/* Trees for extra bits. Probabilities are constant and
do not depend on previously encoded bits */
static const vp9_prob Pcat1[] = { 159};
static const vp9_prob Pcat2[] = { 165, 145};
static const vp9_prob Pcat3[] = { 173, 148, 140};
static const vp9_prob Pcat4[] = { 176, 155, 140, 135};
static const vp9_prob Pcat5[] = { 180, 157, 141, 134, 130};
static const vp9_prob Pcat6[] = {
32x32 transform for superblocks. This adds Debargha's DCT/DWT hybrid and a regular 32x32 DCT, and adds code all over the place to wrap that in the bitstream/encoder/decoder/RD. Some implementation notes (these probably need careful review): - token range is extended by 1 bit, since the value range out of this transform is [-16384,16383]. - the coefficients coming out of the FDCT are manually scaled back by 1 bit, or else they won't fit in int16_t (they are 17 bits). Because of this, the RD error scoring does not right-shift the MSE score by two (unlike for 4x4/8x8/16x16). - to compensate for this loss in precision, the quantizer is halved also. This is currently a little hacky. - FDCT and IDCT is double-only right now. Needs a fixed-point impl. - There are no default probabilities for the 32x32 transform yet; I'm simply using the 16x16 luma ones. A future commit will add newly generated probabilities for all transforms. - No ADST version. I don't think we'll add one for this level; if an ADST is desired, transform-size selection can scale back to 16x16 or lower, and use an ADST at that level. Additional notes specific to Debargha's DWT/DCT hybrid: - coefficient scale is different for the top/left 16x16 (DCT-over-DWT) block than for the rest (DWT pixel differences) of the block. Therefore, RD error scoring isn't easily scalable between coefficient and pixel domain. Thus, unfortunately, we need to compute the RD distortion in the pixel domain until we figure out how to scale these appropriately. Change-Id: I00386f20f35d7fabb19aba94c8162f8aee64ef2b
2012-12-07 23:45:05 +01:00
254, 254, 254, 252, 249, 243, 230, 196, 177, 153, 140, 133, 130, 129
};
2010-05-18 17:58:33 +02:00
#if CONFIG_CODE_NONZEROCOUNT
const vp9_tree_index vp9_nzc4x4_tree[2 * NZC4X4_NODES] = {
-NZC_0, 2,
4, 6,
-NZC_1, -NZC_2,
-NZC_3TO4, 8,
-NZC_5TO8, -NZC_9TO16,
};
struct vp9_token_struct vp9_nzc4x4_encodings[NZC4X4_TOKENS];
const vp9_tree_index vp9_nzc8x8_tree[2 * NZC8X8_NODES] = {
-NZC_0, 2,
4, 6,
-NZC_1, -NZC_2,
8, 10,
-NZC_3TO4, -NZC_5TO8,
-NZC_9TO16, 12,
-NZC_17TO32, -NZC_33TO64,
};
struct vp9_token_struct vp9_nzc8x8_encodings[NZC8X8_TOKENS];
const vp9_tree_index vp9_nzc16x16_tree[2 * NZC16X16_NODES] = {
-NZC_0, 2,
4, 6,
-NZC_1, -NZC_2,
8, 10,
-NZC_3TO4, -NZC_5TO8,
12, 14,
-NZC_9TO16, -NZC_17TO32,
-NZC_33TO64, 16,
-NZC_65TO128, -NZC_129TO256,
};
struct vp9_token_struct vp9_nzc16x16_encodings[NZC16X16_TOKENS];
const vp9_tree_index vp9_nzc32x32_tree[2 * NZC32X32_NODES] = {
-NZC_0, 2,
4, 6,
-NZC_1, -NZC_2,
8, 10,
-NZC_3TO4, -NZC_5TO8,
12, 14,
-NZC_9TO16, -NZC_17TO32,
16, 18,
-NZC_33TO64, -NZC_65TO128,
-NZC_129TO256, 20,
-NZC_257TO512, -NZC_513TO1024,
};
struct vp9_token_struct vp9_nzc32x32_encodings[NZC32X32_TOKENS];
const int vp9_extranzcbits[NZC32X32_TOKENS] = {
0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9
};
const int vp9_basenzcvalue[NZC32X32_TOKENS] = {
0, 1, 2, 3, 5, 9, 17, 33, 65, 129, 257, 513
};
#endif // CONFIG_CODE_NONZEROCOUNT
Modeling default coef probs with distribution Replaces the default tables for single coefficient magnitudes with those obtained from an appropriate distribution. The EOB node is left unchanged. The model is represeted as a 256-size codebook where the index corresponds to the probability of the Zero or the One node. Two variations are implemented corresponding to whether the Zero node or the One-node is used as the peg. The main advantage is that the default prob tables will become considerably smaller and manageable. Besides there is substantially less risk of over-fitting for a training set. Various distributions are tried and the one that gives the best results is the family of Generalized Gaussian distributions with shape parameter 0.75. The results are within about 0.2% of fully trained tables for the Zero peg variant, and within 0.1% of the One peg variant. The forward updates are optionally (controlled by a macro) model-based, i.e. restricted to only convey probabilities from the codebook. Backward updates can also be optionally (controlled by another macro) model-based, but is turned off by default. Currently model-based forward updates work about the same as unconstrained updates, but there is a drop in performance with backward-updates being model based. The model based approach also allows the probabilities for the key frames to be adjusted from the defaults based on the base_qindex of the frame. Currently the adjustment function is a placeholder that adjusts the prob of EOB and Zero node from the nominal one at higher quality (lower qindex) or lower quality (higher qindex) ends of the range. The rest of the probabilities are then derived based on the model from the adjusted prob of zero. Change-Id: Iae050f3cbcc6d8b3f204e8dc395ae47b3b2192c9
2013-03-13 19:03:17 +01:00
#if CONFIG_MODELCOEFPROB
const vp9_prob vp9_modelcoefprobs_gg875[COEFPROB_MODELS][ENTROPY_NODES - 1] = {
// Probs generated with a Generalized Gaussian (with shape parameter 0.875)
// source model with varying quantizer step size for a uniform quantizer
{0, 0, 0, 0, 0, 0, 0, 0, 0, 0,}, // do not use
{1, 2, 6, 86, 129, 11, 87, 42, 92, 52,},
{2, 4, 12, 87, 129, 22, 89, 75, 97, 91,},
{3, 6, 17, 88, 130, 32, 90, 102, 102, 121,},
{4, 8, 22, 89, 131, 41, 91, 125, 107, 145,},
{5, 10, 28, 90, 131, 50, 93, 144, 112, 164,},
{6, 12, 33, 90, 132, 59, 94, 160, 117, 180,},
{7, 14, 38, 91, 132, 67, 95, 173, 122, 193,},
{8, 15, 42, 92, 133, 75, 97, 185, 126, 204,},
{9, 17, 47, 92, 133, 82, 98, 195, 131, 212,},
{10, 19, 52, 93, 134, 89, 99, 203, 135, 220,},
{11, 21, 56, 94, 134, 96, 101, 211, 140, 226,},
{12, 23, 60, 95, 135, 102, 102, 217, 144, 231,},
{13, 25, 65, 95, 135, 109, 103, 222, 148, 235,},
{14, 26, 69, 96, 136, 115, 105, 227, 153, 238,},
{15, 28, 73, 97, 136, 120, 106, 231, 157, 241,},
{16, 30, 77, 97, 137, 126, 107, 234, 161, 244,},
{17, 32, 81, 98, 138, 131, 108, 237, 164, 246,},
{18, 34, 85, 99, 138, 136, 110, 240, 168, 247,},
{19, 35, 89, 100, 139, 141, 111, 242, 172, 249,},
{20, 37, 92, 100, 139, 145, 112, 244, 175, 250,},
{21, 39, 96, 101, 140, 150, 113, 246, 179, 251,},
{22, 41, 99, 102, 140, 154, 115, 247, 182, 252,},
{23, 42, 103, 102, 141, 158, 116, 248, 185, 252,},
{24, 44, 106, 103, 141, 162, 117, 249, 188, 253,},
{25, 46, 110, 104, 142, 166, 118, 250, 191, 253,},
{26, 48, 113, 104, 142, 170, 120, 251, 194, 254,},
{27, 49, 116, 105, 143, 173, 121, 252, 197, 254,},
{28, 51, 119, 106, 143, 176, 122, 252, 200, 254,},
{29, 53, 122, 107, 144, 180, 123, 253, 202, 255,},
{30, 54, 125, 107, 144, 183, 125, 253, 205, 255,},
{31, 56, 128, 108, 145, 186, 126, 254, 207, 255,},
{32, 58, 131, 109, 145, 189, 127, 254, 209, 255,},
{33, 59, 134, 109, 146, 191, 128, 254, 212, 255,},
{34, 61, 137, 110, 146, 194, 130, 254, 214, 255,},
{35, 62, 139, 111, 147, 196, 131, 255, 216, 255,},
{36, 64, 142, 112, 147, 199, 132, 255, 218, 255,},
{37, 66, 145, 112, 148, 201, 134, 255, 220, 255,},
{38, 67, 147, 113, 148, 203, 135, 255, 221, 255,},
{39, 69, 150, 114, 149, 206, 136, 255, 223, 255,},
{40, 70, 152, 114, 149, 208, 137, 255, 225, 255,},
{41, 72, 155, 115, 150, 210, 138, 255, 226, 255,},
{42, 74, 157, 116, 150, 212, 140, 255, 228, 255,},
{43, 75, 159, 117, 151, 213, 141, 255, 229, 255,},
{44, 77, 161, 117, 151, 215, 142, 255, 230, 255,},
{45, 78, 164, 118, 152, 217, 143, 255, 232, 255,},
{46, 80, 166, 119, 152, 219, 145, 255, 233, 255,},
{47, 81, 168, 120, 153, 220, 146, 255, 234, 255,},
{48, 83, 170, 120, 153, 222, 147, 255, 235, 255,},
{49, 84, 172, 121, 154, 223, 148, 255, 236, 255,},
{50, 86, 174, 122, 154, 225, 150, 255, 237, 255,},
{51, 87, 176, 123, 155, 226, 151, 255, 238, 255,},
{52, 89, 178, 123, 155, 227, 152, 255, 239, 255,},
{53, 90, 180, 124, 156, 228, 153, 255, 240, 255,},
{54, 92, 182, 125, 156, 230, 154, 255, 241, 255,},
{55, 93, 183, 126, 157, 231, 156, 255, 242, 255,},
{56, 95, 185, 126, 157, 232, 157, 255, 242, 255,},
{57, 96, 187, 127, 158, 233, 158, 255, 243, 255,},
{58, 98, 189, 128, 158, 234, 159, 255, 244, 255,},
{59, 99, 190, 129, 159, 235, 160, 255, 244, 255,},
{60, 101, 192, 129, 159, 236, 162, 255, 245, 255,},
{61, 102, 193, 130, 160, 237, 163, 255, 246, 255,},
{62, 104, 195, 131, 160, 238, 164, 255, 246, 255,},
{63, 105, 197, 132, 161, 238, 165, 255, 247, 255,},
{64, 106, 198, 132, 162, 239, 166, 255, 247, 255,},
{65, 108, 199, 133, 162, 240, 167, 255, 248, 255,},
{66, 109, 201, 134, 163, 241, 169, 255, 248, 255,},
{67, 111, 202, 135, 163, 241, 170, 255, 249, 255,},
{68, 112, 204, 135, 164, 242, 171, 255, 249, 255,},
{69, 113, 205, 136, 164, 243, 172, 255, 249, 255,},
{70, 115, 206, 137, 165, 243, 173, 255, 250, 255,},
{71, 116, 208, 138, 165, 244, 174, 255, 250, 255,},
{72, 117, 209, 138, 166, 244, 175, 255, 250, 255,},
{73, 119, 210, 139, 166, 245, 177, 255, 251, 255,},
{74, 120, 211, 140, 167, 245, 178, 255, 251, 255,},
{75, 121, 212, 141, 167, 246, 179, 255, 251, 255,},
{76, 123, 214, 142, 168, 246, 180, 255, 252, 255,},
{77, 124, 215, 142, 168, 247, 181, 255, 252, 255,},
{78, 125, 216, 143, 169, 247, 182, 255, 252, 255,},
{79, 127, 217, 144, 170, 248, 183, 255, 252, 255,},
{80, 128, 218, 145, 170, 248, 184, 255, 253, 255,},
{81, 129, 219, 146, 171, 248, 185, 255, 253, 255,},
{82, 131, 220, 146, 171, 249, 186, 255, 253, 255,},
{83, 132, 221, 147, 172, 249, 187, 255, 253, 255,},
{84, 133, 222, 148, 172, 249, 188, 255, 253, 255,},
{85, 134, 223, 149, 173, 250, 189, 255, 253, 255,},
{86, 136, 224, 149, 173, 250, 190, 255, 254, 255,},
{87, 137, 225, 150, 174, 250, 191, 255, 254, 255,},
{88, 138, 226, 151, 174, 251, 192, 255, 254, 255,},
{89, 139, 226, 152, 175, 251, 193, 255, 254, 255,},
{90, 141, 227, 153, 175, 251, 194, 255, 254, 255,},
{91, 142, 228, 153, 176, 251, 195, 255, 254, 255,},
{92, 143, 229, 154, 177, 252, 196, 255, 254, 255,},
{93, 144, 230, 155, 177, 252, 197, 255, 254, 255,},
{94, 146, 230, 156, 178, 252, 198, 255, 255, 255,},
{95, 147, 231, 157, 178, 252, 199, 255, 255, 255,},
{96, 148, 232, 157, 179, 252, 200, 255, 255, 255,},
{97, 149, 233, 158, 179, 253, 201, 255, 255, 255,},
{98, 150, 233, 159, 180, 253, 202, 255, 255, 255,},
{99, 152, 234, 160, 180, 253, 203, 255, 255, 255,},
{100, 153, 235, 161, 181, 253, 204, 255, 255, 255,},
{101, 154, 235, 161, 182, 253, 205, 255, 255, 255,},
{102, 155, 236, 162, 182, 253, 206, 255, 255, 255,},
{103, 156, 236, 163, 183, 254, 207, 255, 255, 255,},
{104, 157, 237, 164, 183, 254, 207, 255, 255, 255,},
{105, 159, 238, 165, 184, 254, 208, 255, 255, 255,},
{106, 160, 238, 166, 184, 254, 209, 255, 255, 255,},
{107, 161, 239, 166, 185, 254, 210, 255, 255, 255,},
{108, 162, 239, 167, 185, 254, 211, 255, 255, 255,},
{109, 163, 240, 168, 186, 254, 212, 255, 255, 255,},
{110, 164, 240, 169, 187, 254, 212, 255, 255, 255,},
{111, 165, 241, 170, 187, 254, 213, 255, 255, 255,},
{112, 166, 241, 170, 188, 255, 214, 255, 255, 255,},
{113, 167, 242, 171, 188, 255, 215, 255, 255, 255,},
{114, 169, 242, 172, 189, 255, 216, 255, 255, 255,},
{115, 170, 243, 173, 189, 255, 216, 255, 255, 255,},
{116, 171, 243, 174, 190, 255, 217, 255, 255, 255,},
{117, 172, 244, 174, 190, 255, 218, 255, 255, 255,},
{118, 173, 244, 175, 191, 255, 219, 255, 255, 255,},
{119, 174, 244, 176, 192, 255, 219, 255, 255, 255,},
{120, 175, 245, 177, 192, 255, 220, 255, 255, 255,},
{121, 176, 245, 178, 193, 255, 221, 255, 255, 255,},
{122, 177, 245, 178, 193, 255, 222, 255, 255, 255,},
{123, 178, 246, 179, 194, 255, 222, 255, 255, 255,},
{124, 179, 246, 180, 194, 255, 223, 255, 255, 255,},
{125, 180, 247, 181, 195, 255, 224, 255, 255, 255,},
{126, 181, 247, 182, 196, 255, 224, 255, 255, 255,},
{127, 182, 247, 182, 196, 255, 225, 255, 255, 255,},
{128, 183, 247, 183, 197, 255, 226, 255, 255, 255,},
{129, 184, 248, 184, 197, 255, 226, 255, 255, 255,},
{130, 185, 248, 185, 198, 255, 227, 255, 255, 255,},
{131, 186, 248, 186, 198, 255, 228, 255, 255, 255,},
{132, 187, 249, 186, 199, 255, 228, 255, 255, 255,},
{133, 188, 249, 187, 200, 255, 229, 255, 255, 255,},
{134, 189, 249, 188, 200, 255, 230, 255, 255, 255,},
{135, 190, 249, 189, 201, 255, 230, 255, 255, 255,},
{136, 191, 250, 190, 201, 255, 231, 255, 255, 255,},
{137, 192, 250, 190, 202, 255, 231, 255, 255, 255,},
{138, 193, 250, 191, 202, 255, 232, 255, 255, 255,},
{139, 194, 250, 192, 203, 255, 232, 255, 255, 255,},
{140, 195, 251, 193, 204, 255, 233, 255, 255, 255,},
{141, 195, 251, 194, 204, 255, 234, 255, 255, 255,},
{142, 196, 251, 194, 205, 255, 234, 255, 255, 255,},
{143, 197, 251, 195, 205, 255, 235, 255, 255, 255,},
{144, 198, 251, 196, 206, 255, 235, 255, 255, 255,},
{145, 199, 252, 197, 206, 255, 236, 255, 255, 255,},
{146, 200, 252, 197, 207, 255, 236, 255, 255, 255,},
{147, 201, 252, 198, 208, 255, 237, 255, 255, 255,},
{148, 202, 252, 199, 208, 255, 237, 255, 255, 255,},
{149, 203, 252, 200, 209, 255, 238, 255, 255, 255,},
{150, 203, 252, 201, 209, 255, 238, 255, 255, 255,},
{151, 204, 253, 201, 210, 255, 239, 255, 255, 255,},
{152, 205, 253, 202, 210, 255, 239, 255, 255, 255,},
{153, 206, 253, 203, 211, 255, 239, 255, 255, 255,},
{154, 207, 253, 204, 212, 255, 240, 255, 255, 255,},
{155, 208, 253, 204, 212, 255, 240, 255, 255, 255,},
{156, 209, 253, 205, 213, 255, 241, 255, 255, 255,},
{157, 209, 253, 206, 213, 255, 241, 255, 255, 255,},
{158, 210, 254, 207, 214, 255, 242, 255, 255, 255,},
{159, 211, 254, 207, 214, 255, 242, 255, 255, 255,},
{160, 212, 254, 208, 215, 255, 242, 255, 255, 255,},
{161, 213, 254, 209, 215, 255, 243, 255, 255, 255,},
{162, 213, 254, 210, 216, 255, 243, 255, 255, 255,},
{163, 214, 254, 210, 217, 255, 244, 255, 255, 255,},
{164, 215, 254, 211, 217, 255, 244, 255, 255, 255,},
{165, 216, 254, 212, 218, 255, 244, 255, 255, 255,},
{166, 216, 254, 212, 218, 255, 245, 255, 255, 255,},
{167, 217, 254, 213, 219, 255, 245, 255, 255, 255,},
{168, 218, 254, 214, 219, 255, 245, 255, 255, 255,},
{169, 219, 255, 215, 220, 255, 246, 255, 255, 255,},
{170, 219, 255, 215, 221, 255, 246, 255, 255, 255,},
{171, 220, 255, 216, 221, 255, 246, 255, 255, 255,},
{172, 221, 255, 217, 222, 255, 247, 255, 255, 255,},
{173, 222, 255, 217, 222, 255, 247, 255, 255, 255,},
{174, 222, 255, 218, 223, 255, 247, 255, 255, 255,},
{175, 223, 255, 219, 223, 255, 248, 255, 255, 255,},
{176, 224, 255, 220, 224, 255, 248, 255, 255, 255,},
{177, 224, 255, 220, 224, 255, 248, 255, 255, 255,},
{178, 225, 255, 221, 225, 255, 248, 255, 255, 255,},
{179, 226, 255, 222, 225, 255, 249, 255, 255, 255,},
{180, 226, 255, 222, 226, 255, 249, 255, 255, 255,},
{181, 227, 255, 223, 227, 255, 249, 255, 255, 255,},
{182, 228, 255, 224, 227, 255, 249, 255, 255, 255,},
{183, 228, 255, 224, 228, 255, 250, 255, 255, 255,},
{184, 229, 255, 225, 228, 255, 250, 255, 255, 255,},
{185, 230, 255, 226, 229, 255, 250, 255, 255, 255,},
{186, 230, 255, 226, 229, 255, 250, 255, 255, 255,},
{187, 231, 255, 227, 230, 255, 251, 255, 255, 255,},
{188, 232, 255, 228, 230, 255, 251, 255, 255, 255,},
{189, 232, 255, 228, 231, 255, 251, 255, 255, 255,},
{190, 233, 255, 229, 231, 255, 251, 255, 255, 255,},
{191, 233, 255, 229, 232, 255, 251, 255, 255, 255,},
{192, 234, 255, 230, 232, 255, 252, 255, 255, 255,},
{193, 234, 255, 231, 233, 255, 252, 255, 255, 255,},
{194, 235, 255, 231, 233, 255, 252, 255, 255, 255,},
{195, 236, 255, 232, 234, 255, 252, 255, 255, 255,},
{196, 236, 255, 232, 234, 255, 252, 255, 255, 255,},
{197, 237, 255, 233, 235, 255, 252, 255, 255, 255,},
{198, 237, 255, 234, 235, 255, 253, 255, 255, 255,},
{199, 238, 255, 234, 236, 255, 253, 255, 255, 255,},
{200, 238, 255, 235, 236, 255, 253, 255, 255, 255,},
{201, 239, 255, 235, 237, 255, 253, 255, 255, 255,},
{202, 239, 255, 236, 237, 255, 253, 255, 255, 255,},
{203, 240, 255, 237, 238, 255, 253, 255, 255, 255,},
{204, 240, 255, 237, 238, 255, 254, 255, 255, 255,},
{205, 241, 255, 238, 239, 255, 254, 255, 255, 255,},
{206, 241, 255, 238, 239, 255, 254, 255, 255, 255,},
{207, 242, 255, 239, 240, 255, 254, 255, 255, 255,},
{208, 242, 255, 239, 240, 255, 254, 255, 255, 255,},
{209, 243, 255, 240, 241, 255, 254, 255, 255, 255,},
{210, 243, 255, 240, 241, 255, 254, 255, 255, 255,},
{211, 244, 255, 241, 242, 255, 254, 255, 255, 255,},
{212, 244, 255, 241, 242, 255, 254, 255, 255, 255,},
{213, 245, 255, 242, 243, 255, 255, 255, 255, 255,},
{214, 245, 255, 242, 243, 255, 255, 255, 255, 255,},
{215, 246, 255, 243, 244, 255, 255, 255, 255, 255,},
{216, 246, 255, 243, 244, 255, 255, 255, 255, 255,},
{217, 246, 255, 244, 244, 255, 255, 255, 255, 255,},
{218, 247, 255, 244, 245, 255, 255, 255, 255, 255,},
{219, 247, 255, 245, 245, 255, 255, 255, 255, 255,},
{220, 248, 255, 245, 246, 255, 255, 255, 255, 255,},
{221, 248, 255, 246, 246, 255, 255, 255, 255, 255,},
{222, 248, 255, 246, 247, 255, 255, 255, 255, 255,},
{223, 249, 255, 247, 247, 255, 255, 255, 255, 255,},
{224, 249, 255, 247, 247, 255, 255, 255, 255, 255,},
{225, 250, 255, 247, 248, 255, 255, 255, 255, 255,},
{226, 250, 255, 248, 248, 255, 255, 255, 255, 255,},
{227, 250, 255, 248, 249, 255, 255, 255, 255, 255,},
{228, 251, 255, 249, 249, 255, 255, 255, 255, 255,},
{229, 251, 255, 249, 249, 255, 255, 255, 255, 255,},
{230, 251, 255, 249, 250, 255, 255, 255, 255, 255,},
{231, 251, 255, 250, 250, 255, 255, 255, 255, 255,},
{232, 252, 255, 250, 250, 255, 255, 255, 255, 255,},
{233, 252, 255, 251, 251, 255, 255, 255, 255, 255,},
{234, 252, 255, 251, 251, 255, 255, 255, 255, 255,},
{235, 253, 255, 251, 251, 255, 255, 255, 255, 255,},
{236, 253, 255, 252, 252, 255, 255, 255, 255, 255,},
{237, 253, 255, 252, 252, 255, 255, 255, 255, 255,},
{238, 253, 255, 252, 252, 255, 255, 255, 255, 255,},
{239, 254, 255, 253, 253, 255, 255, 255, 255, 255,},
{240, 254, 255, 253, 253, 255, 255, 255, 255, 255,},
{241, 254, 255, 253, 253, 255, 255, 255, 255, 255,},
{242, 254, 255, 253, 254, 255, 255, 255, 255, 255,},
{243, 254, 255, 254, 254, 255, 255, 255, 255, 255,},
{244, 255, 255, 254, 254, 255, 255, 255, 255, 255,},
{245, 255, 255, 254, 254, 255, 255, 255, 255, 255,},
{246, 255, 255, 254, 254, 255, 255, 255, 255, 255,},
{247, 255, 255, 255, 255, 255, 255, 255, 255, 255,},
{248, 255, 255, 255, 255, 255, 255, 255, 255, 255,},
{249, 255, 255, 255, 255, 255, 255, 255, 255, 255,},
{250, 255, 255, 255, 255, 255, 255, 255, 255, 255,},
{251, 255, 255, 255, 255, 255, 255, 255, 255, 255,},
{252, 255, 255, 255, 255, 255, 255, 255, 255, 255,},
{253, 255, 255, 255, 255, 255, 255, 255, 255, 255,},
{254, 255, 255, 255, 255, 255, 255, 255, 255, 255,},
{255, 255, 255, 255, 255, 255, 255, 255, 255, 255,},
};
const vp9_prob vp9_modelcoefprobs_gg75[COEFPROB_MODELS][ENTROPY_NODES - 1] = {
// Probs generated with a Generalized Gaussian (with shape parameter 0.75)
// source model with varying quantizer step size for a uniform quantizer
{0, 0, 0, 0, 0, 0, 0, 0, 0, 0,}, // do not use
{1, 2, 6, 87, 129, 11, 88, 39, 93, 47,},
{2, 4, 11, 88, 130, 21, 89, 68, 98, 79,},
{3, 6, 16, 89, 131, 30, 91, 92, 103, 105,},
{4, 8, 21, 90, 131, 38, 92, 112, 107, 126,},
{5, 10, 26, 90, 132, 46, 94, 129, 111, 143,},
{6, 11, 31, 91, 133, 54, 95, 143, 115, 157,},
{7, 13, 35, 92, 133, 61, 96, 156, 119, 170,},
{8, 15, 40, 93, 134, 68, 97, 167, 123, 180,},
{9, 17, 44, 94, 134, 74, 98, 177, 126, 189,},
{10, 19, 48, 94, 135, 80, 100, 185, 130, 197,},
{11, 20, 52, 95, 135, 86, 101, 192, 133, 204,},
{12, 22, 56, 96, 136, 92, 102, 199, 137, 210,},
{13, 24, 60, 96, 136, 97, 103, 205, 140, 215,},
{14, 26, 64, 97, 137, 103, 104, 210, 143, 219,},
{15, 27, 68, 98, 137, 108, 105, 215, 146, 223,},
{16, 29, 71, 98, 138, 112, 106, 219, 149, 227,},
{17, 31, 75, 99, 138, 117, 107, 223, 152, 230,},
{18, 32, 78, 100, 139, 121, 108, 226, 155, 233,},
{19, 34, 82, 100, 139, 126, 109, 229, 158, 235,},
{20, 36, 85, 101, 140, 130, 110, 231, 161, 238,},
{21, 37, 88, 102, 140, 134, 111, 234, 164, 239,},
{22, 39, 91, 102, 141, 138, 112, 236, 167, 241,},
{23, 40, 94, 103, 141, 141, 113, 238, 169, 243,},
{24, 42, 97, 104, 142, 145, 114, 240, 172, 244,},
{25, 44, 100, 104, 142, 149, 115, 241, 174, 245,},
{26, 45, 103, 105, 143, 152, 116, 243, 177, 246,},
{27, 47, 106, 105, 143, 155, 117, 244, 179, 247,},
{28, 48, 109, 106, 143, 158, 118, 245, 182, 248,},
{29, 50, 112, 107, 144, 161, 119, 246, 184, 249,},
{30, 52, 115, 107, 144, 164, 120, 247, 186, 250,},
{31, 53, 117, 108, 145, 167, 121, 248, 188, 250,},
{32, 55, 120, 109, 145, 170, 122, 249, 190, 251,},
{33, 56, 122, 109, 146, 173, 123, 249, 192, 252,},
{34, 58, 125, 110, 146, 175, 124, 250, 194, 252,},
{35, 59, 127, 110, 147, 178, 125, 251, 196, 252,},
{36, 61, 130, 111, 147, 180, 126, 251, 198, 253,},
{37, 62, 132, 112, 147, 183, 127, 251, 200, 253,},
{38, 64, 135, 112, 148, 185, 128, 252, 202, 253,},
{39, 65, 137, 113, 148, 187, 129, 252, 204, 254,},
{40, 67, 139, 114, 149, 189, 130, 253, 205, 254,},
{41, 68, 141, 114, 149, 191, 131, 253, 207, 254,},
{42, 70, 144, 115, 150, 193, 132, 253, 209, 254,},
{43, 71, 146, 115, 150, 195, 133, 254, 210, 254,},
{44, 72, 148, 116, 151, 197, 134, 254, 212, 255,},
{45, 74, 150, 117, 151, 199, 135, 254, 213, 255,},
{46, 75, 152, 117, 151, 201, 136, 254, 215, 255,},
{47, 77, 154, 118, 152, 202, 137, 254, 216, 255,},
{48, 78, 156, 119, 152, 204, 138, 254, 217, 255,},
{49, 80, 158, 119, 153, 206, 139, 255, 219, 255,},
{50, 81, 160, 120, 153, 207, 140, 255, 220, 255,},
{51, 82, 162, 120, 154, 209, 141, 255, 221, 255,},
{52, 84, 164, 121, 154, 210, 142, 255, 222, 255,},
{53, 85, 165, 122, 155, 212, 143, 255, 224, 255,},
{54, 87, 167, 122, 155, 213, 144, 255, 225, 255,},
{55, 88, 169, 123, 155, 215, 145, 255, 226, 255,},
{56, 89, 171, 124, 156, 216, 146, 255, 227, 255,},
{57, 91, 172, 124, 156, 217, 146, 255, 228, 255,},
{58, 92, 174, 125, 157, 218, 147, 255, 229, 255,},
{59, 93, 176, 126, 157, 220, 148, 255, 230, 255,},
{60, 95, 177, 126, 158, 221, 149, 255, 231, 255,},
{61, 96, 179, 127, 158, 222, 150, 255, 232, 255,},
{62, 97, 180, 127, 159, 223, 151, 255, 232, 255,},
{63, 99, 182, 128, 159, 224, 152, 255, 233, 255,},
{64, 100, 183, 129, 159, 225, 153, 255, 234, 255,},
{65, 101, 185, 129, 160, 226, 154, 255, 235, 255,},
{66, 103, 186, 130, 160, 227, 155, 255, 236, 255,},
{67, 104, 188, 131, 161, 228, 156, 255, 236, 255,},
{68, 105, 189, 131, 161, 229, 157, 255, 237, 255,},
{69, 106, 190, 132, 162, 230, 158, 255, 238, 255,},
{70, 108, 192, 133, 162, 231, 159, 255, 238, 255,},
{71, 109, 193, 133, 162, 231, 159, 255, 239, 255,},
{72, 110, 194, 134, 163, 232, 160, 255, 240, 255,},
{73, 111, 196, 134, 163, 233, 161, 255, 240, 255,},
{74, 113, 197, 135, 164, 234, 162, 255, 241, 255,},
{75, 114, 198, 136, 164, 235, 163, 255, 241, 255,},
{76, 115, 199, 136, 165, 235, 164, 255, 242, 255,},
{77, 116, 200, 137, 165, 236, 165, 255, 243, 255,},
{78, 118, 202, 138, 166, 237, 166, 255, 243, 255,},
{79, 119, 203, 138, 166, 237, 167, 255, 244, 255,},
{80, 120, 204, 139, 167, 238, 168, 255, 244, 255,},
{81, 121, 205, 140, 167, 239, 168, 255, 244, 255,},
{82, 123, 206, 140, 167, 239, 169, 255, 245, 255,},
{83, 124, 207, 141, 168, 240, 170, 255, 245, 255,},
{84, 125, 208, 142, 168, 240, 171, 255, 246, 255,},
{85, 126, 209, 142, 169, 241, 172, 255, 246, 255,},
{86, 127, 210, 143, 169, 241, 173, 255, 247, 255,},
{87, 129, 211, 144, 170, 242, 174, 255, 247, 255,},
{88, 130, 212, 144, 170, 242, 175, 255, 247, 255,},
{89, 131, 213, 145, 171, 243, 175, 255, 248, 255,},
{90, 132, 214, 146, 171, 243, 176, 255, 248, 255,},
{91, 133, 215, 146, 171, 244, 177, 255, 248, 255,},
{92, 134, 216, 147, 172, 244, 178, 255, 249, 255,},
{93, 136, 217, 148, 172, 245, 179, 255, 249, 255,},
{94, 137, 218, 148, 173, 245, 180, 255, 249, 255,},
{95, 138, 219, 149, 173, 245, 181, 255, 249, 255,},
{96, 139, 220, 150, 174, 246, 181, 255, 250, 255,},
{97, 140, 220, 150, 174, 246, 182, 255, 250, 255,},
{98, 141, 221, 151, 175, 247, 183, 255, 250, 255,},
{99, 142, 222, 152, 175, 247, 184, 255, 250, 255,},
{100, 144, 223, 152, 176, 247, 185, 255, 251, 255,},
{101, 145, 224, 153, 176, 248, 186, 255, 251, 255,},
{102, 146, 224, 154, 177, 248, 186, 255, 251, 255,},
{103, 147, 225, 154, 177, 248, 187, 255, 251, 255,},
{104, 148, 226, 155, 177, 248, 188, 255, 252, 255,},
{105, 149, 226, 156, 178, 249, 189, 255, 252, 255,},
{106, 150, 227, 156, 178, 249, 190, 255, 252, 255,},
{107, 151, 228, 157, 179, 249, 191, 255, 252, 255,},
{108, 152, 229, 158, 179, 250, 191, 255, 252, 255,},
{109, 153, 229, 158, 180, 250, 192, 255, 252, 255,},
{110, 154, 230, 159, 180, 250, 193, 255, 253, 255,},
{111, 155, 231, 160, 181, 250, 194, 255, 253, 255,},
{112, 157, 231, 160, 181, 251, 195, 255, 253, 255,},
{113, 158, 232, 161, 182, 251, 195, 255, 253, 255,},
{114, 159, 232, 162, 182, 251, 196, 255, 253, 255,},
{115, 160, 233, 162, 183, 251, 197, 255, 253, 255,},
{116, 161, 234, 163, 183, 251, 198, 255, 253, 255,},
{117, 162, 234, 164, 184, 252, 198, 255, 254, 255,},
{118, 163, 235, 165, 184, 252, 199, 255, 254, 255,},
{119, 164, 235, 165, 185, 252, 200, 255, 254, 255,},
{120, 165, 236, 166, 185, 252, 201, 255, 254, 255,},
{121, 166, 236, 167, 186, 252, 201, 255, 254, 255,},
{122, 167, 237, 167, 186, 252, 202, 255, 254, 255,},
{123, 168, 237, 168, 186, 253, 203, 255, 254, 255,},
{124, 169, 238, 169, 187, 253, 204, 255, 254, 255,},
{125, 170, 238, 169, 187, 253, 204, 255, 254, 255,},
{126, 171, 239, 170, 188, 253, 205, 255, 254, 255,},
{127, 172, 239, 171, 188, 253, 206, 255, 254, 255,},
{128, 173, 240, 171, 189, 253, 207, 255, 255, 255,},
{129, 174, 240, 172, 189, 253, 207, 255, 255, 255,},
{130, 175, 241, 173, 190, 253, 208, 255, 255, 255,},
{131, 176, 241, 174, 190, 254, 209, 255, 255, 255,},
{132, 177, 241, 174, 191, 254, 209, 255, 255, 255,},
{133, 178, 242, 175, 191, 254, 210, 255, 255, 255,},
{134, 179, 242, 176, 192, 254, 211, 255, 255, 255,},
{135, 180, 243, 176, 192, 254, 212, 255, 255, 255,},
{136, 180, 243, 177, 193, 254, 212, 255, 255, 255,},
{137, 181, 243, 178, 193, 254, 213, 255, 255, 255,},
{138, 182, 244, 179, 194, 254, 214, 255, 255, 255,},
{139, 183, 244, 179, 194, 254, 214, 255, 255, 255,},
{140, 184, 244, 180, 195, 254, 215, 255, 255, 255,},
{141, 185, 245, 181, 195, 254, 216, 255, 255, 255,},
{142, 186, 245, 181, 196, 255, 216, 255, 255, 255,},
{143, 187, 245, 182, 196, 255, 217, 255, 255, 255,},
{144, 188, 246, 183, 197, 255, 218, 255, 255, 255,},
{145, 189, 246, 183, 197, 255, 218, 255, 255, 255,},
{146, 190, 246, 184, 198, 255, 219, 255, 255, 255,},
{147, 191, 247, 185, 198, 255, 220, 255, 255, 255,},
{148, 191, 247, 186, 199, 255, 220, 255, 255, 255,},
{149, 192, 247, 186, 199, 255, 221, 255, 255, 255,},
{150, 193, 248, 187, 200, 255, 221, 255, 255, 255,},
{151, 194, 248, 188, 200, 255, 222, 255, 255, 255,},
{152, 195, 248, 188, 201, 255, 223, 255, 255, 255,},
{153, 196, 248, 189, 201, 255, 223, 255, 255, 255,},
{154, 197, 249, 190, 202, 255, 224, 255, 255, 255,},
{155, 198, 249, 191, 202, 255, 224, 255, 255, 255,},
{156, 198, 249, 191, 203, 255, 225, 255, 255, 255,},
{157, 199, 249, 192, 203, 255, 226, 255, 255, 255,},
{158, 200, 250, 193, 204, 255, 226, 255, 255, 255,},
{159, 201, 250, 193, 204, 255, 227, 255, 255, 255,},
{160, 202, 250, 194, 205, 255, 227, 255, 255, 255,},
{161, 203, 250, 195, 206, 255, 228, 255, 255, 255,},
{162, 203, 250, 196, 206, 255, 228, 255, 255, 255,},
{163, 204, 251, 196, 207, 255, 229, 255, 255, 255,},
{164, 205, 251, 197, 207, 255, 229, 255, 255, 255,},
{165, 206, 251, 198, 208, 255, 230, 255, 255, 255,},
{166, 207, 251, 198, 208, 255, 231, 255, 255, 255,},
{167, 207, 251, 199, 209, 255, 231, 255, 255, 255,},
{168, 208, 252, 200, 209, 255, 232, 255, 255, 255,},
{169, 209, 252, 201, 210, 255, 232, 255, 255, 255,},
{170, 210, 252, 201, 210, 255, 233, 255, 255, 255,},
{171, 211, 252, 202, 211, 255, 233, 255, 255, 255,},
{172, 211, 252, 203, 211, 255, 234, 255, 255, 255,},
{173, 212, 252, 203, 212, 255, 234, 255, 255, 255,},
{174, 213, 252, 204, 212, 255, 235, 255, 255, 255,},
{175, 214, 253, 205, 213, 255, 235, 255, 255, 255,},
{176, 214, 253, 206, 213, 255, 236, 255, 255, 255,},
{177, 215, 253, 206, 214, 255, 236, 255, 255, 255,},
{178, 216, 253, 207, 214, 255, 237, 255, 255, 255,},
{179, 217, 253, 208, 215, 255, 237, 255, 255, 255,},
{180, 217, 253, 208, 216, 255, 237, 255, 255, 255,},
{181, 218, 253, 209, 216, 255, 238, 255, 255, 255,},
{182, 219, 254, 210, 217, 255, 238, 255, 255, 255,},
{183, 220, 254, 211, 217, 255, 239, 255, 255, 255,},
{184, 220, 254, 211, 218, 255, 239, 255, 255, 255,},
{185, 221, 254, 212, 218, 255, 240, 255, 255, 255,},
{186, 222, 254, 213, 219, 255, 240, 255, 255, 255,},
{187, 222, 254, 213, 219, 255, 241, 255, 255, 255,},
{188, 223, 254, 214, 220, 255, 241, 255, 255, 255,},
{189, 224, 254, 215, 220, 255, 241, 255, 255, 255,},
{190, 225, 254, 215, 221, 255, 242, 255, 255, 255,},
{191, 225, 254, 216, 221, 255, 242, 255, 255, 255,},
{192, 226, 254, 217, 222, 255, 243, 255, 255, 255,},
{193, 227, 255, 218, 223, 255, 243, 255, 255, 255,},
{194, 227, 255, 218, 223, 255, 243, 255, 255, 255,},
{195, 228, 255, 219, 224, 255, 244, 255, 255, 255,},
{196, 229, 255, 220, 224, 255, 244, 255, 255, 255,},
{197, 229, 255, 220, 225, 255, 244, 255, 255, 255,},
{198, 230, 255, 221, 225, 255, 245, 255, 255, 255,},
{199, 230, 255, 222, 226, 255, 245, 255, 255, 255,},
{200, 231, 255, 222, 226, 255, 246, 255, 255, 255,},
{201, 232, 255, 223, 227, 255, 246, 255, 255, 255,},
{202, 232, 255, 224, 228, 255, 246, 255, 255, 255,},
{203, 233, 255, 224, 228, 255, 247, 255, 255, 255,},
{204, 234, 255, 225, 229, 255, 247, 255, 255, 255,},
{205, 234, 255, 226, 229, 255, 247, 255, 255, 255,},
{206, 235, 255, 227, 230, 255, 248, 255, 255, 255,},
{207, 235, 255, 227, 230, 255, 248, 255, 255, 255,},
{208, 236, 255, 228, 231, 255, 248, 255, 255, 255,},
{209, 237, 255, 229, 231, 255, 248, 255, 255, 255,},
{210, 237, 255, 229, 232, 255, 249, 255, 255, 255,},
{211, 238, 255, 230, 233, 255, 249, 255, 255, 255,},
{212, 238, 255, 231, 233, 255, 249, 255, 255, 255,},
{213, 239, 255, 231, 234, 255, 250, 255, 255, 255,},
{214, 239, 255, 232, 234, 255, 250, 255, 255, 255,},
{215, 240, 255, 233, 235, 255, 250, 255, 255, 255,},
{216, 241, 255, 233, 235, 255, 250, 255, 255, 255,},
{217, 241, 255, 234, 236, 255, 251, 255, 255, 255,},
{218, 242, 255, 235, 236, 255, 251, 255, 255, 255,},
{219, 242, 255, 235, 237, 255, 251, 255, 255, 255,},
{220, 243, 255, 236, 237, 255, 251, 255, 255, 255,},
{221, 243, 255, 236, 238, 255, 252, 255, 255, 255,},
{222, 244, 255, 237, 239, 255, 252, 255, 255, 255,},
{223, 244, 255, 238, 239, 255, 252, 255, 255, 255,},
{224, 245, 255, 238, 240, 255, 252, 255, 255, 255,},
{225, 245, 255, 239, 240, 255, 252, 255, 255, 255,},
{226, 246, 255, 240, 241, 255, 253, 255, 255, 255,},
{227, 246, 255, 240, 241, 255, 253, 255, 255, 255,},
{228, 247, 255, 241, 242, 255, 253, 255, 255, 255,},
{229, 247, 255, 242, 242, 255, 253, 255, 255, 255,},
{230, 248, 255, 242, 243, 255, 253, 255, 255, 255,},
{231, 248, 255, 243, 244, 255, 254, 255, 255, 255,},
{232, 248, 255, 243, 244, 255, 254, 255, 255, 255,},
{233, 249, 255, 244, 245, 255, 254, 255, 255, 255,},
{234, 249, 255, 245, 245, 255, 254, 255, 255, 255,},
{235, 250, 255, 245, 246, 255, 254, 255, 255, 255,},
{236, 250, 255, 246, 246, 255, 254, 255, 255, 255,},
{237, 251, 255, 246, 247, 255, 255, 255, 255, 255,},
{238, 251, 255, 247, 247, 255, 255, 255, 255, 255,},
{239, 251, 255, 248, 248, 255, 255, 255, 255, 255,},
{240, 252, 255, 248, 248, 255, 255, 255, 255, 255,},
{241, 252, 255, 249, 249, 255, 255, 255, 255, 255,},
{242, 252, 255, 249, 249, 255, 255, 255, 255, 255,},
{243, 253, 255, 250, 250, 255, 255, 255, 255, 255,},
{244, 253, 255, 250, 250, 255, 255, 255, 255, 255,},
{245, 253, 255, 251, 251, 255, 255, 255, 255, 255,},
{246, 254, 255, 251, 251, 255, 255, 255, 255, 255,},
{247, 254, 255, 252, 252, 255, 255, 255, 255, 255,},
{248, 254, 255, 252, 252, 255, 255, 255, 255, 255,},
{249, 255, 255, 253, 253, 255, 255, 255, 255, 255,},
{250, 255, 255, 253, 253, 255, 255, 255, 255, 255,},
{251, 255, 255, 254, 254, 255, 255, 255, 255, 255,},
{252, 255, 255, 254, 254, 255, 255, 255, 255, 255,},
{253, 255, 255, 255, 255, 255, 255, 255, 255, 255,},
{254, 255, 255, 255, 255, 255, 255, 255, 255, 255,},
{255, 255, 255, 255, 255, 255, 255, 255, 255, 255,}
};
const vp9_prob vp9_modelcoefprobs_gg625[COEFPROB_MODELS][ENTROPY_NODES - 1] = {
// Probs generated with a Generalized Gaussian (with shape parameter 0.625)
// source model with varying quantizer step size for a uniform quantizer
{0, 0, 0, 0, 0, 0, 0, 0, 0, 0,}, // do not use
{1, 2, 6, 88, 130, 10, 88, 35, 94, 40,},
{2, 4, 11, 89, 131, 19, 90, 60, 99, 67,},
{3, 6, 15, 90, 132, 27, 92, 80, 103, 88,},
{4, 7, 20, 91, 132, 34, 93, 97, 107, 105,},
{5, 9, 24, 92, 133, 41, 94, 112, 110, 120,},
{6, 11, 28, 93, 134, 48, 95, 125, 113, 132,},
{7, 13, 33, 93, 134, 54, 97, 136, 116, 143,},
{8, 14, 36, 94, 135, 60, 98, 146, 119, 152,},
{9, 16, 40, 95, 135, 65, 99, 155, 122, 161,},
{10, 18, 44, 95, 136, 70, 100, 163, 125, 168,},
{11, 19, 48, 96, 136, 75, 101, 170, 127, 175,},
{12, 21, 51, 97, 137, 80, 102, 176, 130, 181,},
{13, 23, 55, 97, 137, 85, 102, 182, 132, 187,},
{14, 24, 58, 98, 138, 89, 103, 188, 135, 192,},
{15, 26, 61, 99, 138, 94, 104, 193, 137, 196,},
{16, 27, 64, 99, 139, 98, 105, 197, 140, 201,},
{17, 29, 67, 100, 139, 102, 106, 201, 142, 205,},
{18, 30, 70, 101, 140, 106, 107, 205, 144, 208,},
{19, 32, 73, 101, 140, 109, 108, 209, 146, 211,},
{20, 34, 76, 102, 140, 113, 109, 212, 148, 214,},
{21, 35, 79, 102, 141, 116, 109, 215, 151, 217,},
{22, 37, 82, 103, 141, 120, 110, 218, 153, 220,},
{23, 38, 85, 103, 142, 123, 111, 220, 155, 222,},
{24, 40, 87, 104, 142, 126, 112, 223, 157, 224,},
{25, 41, 90, 105, 143, 129, 113, 225, 159, 226,},
{26, 42, 93, 105, 143, 132, 113, 227, 161, 228,},
{27, 44, 95, 106, 143, 135, 114, 229, 162, 230,},
{28, 45, 98, 106, 144, 138, 115, 230, 164, 232,},
{29, 47, 100, 107, 144, 141, 116, 232, 166, 233,},
{30, 48, 103, 107, 145, 144, 117, 234, 168, 235,},
{31, 50, 105, 108, 145, 146, 117, 235, 170, 236,},
{32, 51, 107, 108, 145, 149, 118, 236, 171, 237,},
{33, 52, 110, 109, 146, 151, 119, 238, 173, 238,},
{34, 54, 112, 110, 146, 154, 120, 239, 175, 239,},
{35, 55, 114, 110, 147, 156, 120, 240, 176, 240,},
{36, 57, 116, 111, 147, 158, 121, 241, 178, 241,},
{37, 58, 119, 111, 147, 161, 122, 242, 180, 242,},
{38, 59, 121, 112, 148, 163, 123, 243, 181, 243,},
{39, 61, 123, 112, 148, 165, 123, 244, 183, 244,},
{40, 62, 125, 113, 148, 167, 124, 244, 184, 245,},
{41, 63, 127, 113, 149, 169, 125, 245, 186, 245,},
{42, 65, 129, 114, 149, 171, 126, 246, 187, 246,},
{43, 66, 131, 114, 150, 173, 126, 246, 188, 247,},
{44, 67, 133, 115, 150, 175, 127, 247, 190, 247,},
{45, 69, 135, 115, 150, 177, 128, 247, 191, 248,},
{46, 70, 136, 116, 151, 178, 129, 248, 193, 248,},
{47, 71, 138, 116, 151, 180, 129, 248, 194, 249,},
{48, 73, 140, 117, 151, 182, 130, 249, 195, 249,},
{49, 74, 142, 118, 152, 184, 131, 249, 197, 250,},
{50, 75, 144, 118, 152, 185, 131, 250, 198, 250,},
{51, 76, 145, 119, 153, 187, 132, 250, 199, 250,},
{52, 78, 147, 119, 153, 188, 133, 251, 200, 251,},
{53, 79, 149, 120, 153, 190, 134, 251, 201, 251,},
{54, 80, 151, 120, 154, 192, 134, 251, 203, 251,},
{55, 82, 152, 121, 154, 193, 135, 251, 204, 252,},
{56, 83, 154, 121, 154, 194, 136, 252, 205, 252,},
{57, 84, 155, 122, 155, 196, 136, 252, 206, 252,},
{58, 85, 157, 122, 155, 197, 137, 252, 207, 252,},
{59, 86, 158, 123, 156, 199, 138, 252, 208, 252,},
{60, 88, 160, 123, 156, 200, 139, 253, 209, 253,},
{61, 89, 162, 124, 156, 201, 139, 253, 210, 253,},
{62, 90, 163, 124, 157, 202, 140, 253, 211, 253,},
{63, 91, 164, 125, 157, 204, 141, 253, 212, 253,},
{64, 93, 166, 125, 157, 205, 141, 253, 213, 253,},
{65, 94, 167, 126, 158, 206, 142, 254, 214, 254,},
{66, 95, 169, 126, 158, 207, 143, 254, 215, 254,},
{67, 96, 170, 127, 158, 208, 143, 254, 216, 254,},
{68, 97, 172, 127, 159, 209, 144, 254, 217, 254,},
{69, 98, 173, 128, 159, 210, 145, 254, 218, 254,},
{70, 100, 174, 128, 160, 212, 146, 254, 219, 254,},
{71, 101, 176, 129, 160, 213, 146, 254, 220, 254,},
{72, 102, 177, 130, 160, 214, 147, 254, 220, 254,},
{73, 103, 178, 130, 161, 215, 148, 255, 221, 255,},
{74, 104, 179, 131, 161, 216, 148, 255, 222, 255,},
{75, 105, 181, 131, 161, 217, 149, 255, 223, 255,},
{76, 107, 182, 132, 162, 217, 150, 255, 224, 255,},
{77, 108, 183, 132, 162, 218, 150, 255, 224, 255,},
{78, 109, 184, 133, 163, 219, 151, 255, 225, 255,},
{79, 110, 185, 133, 163, 220, 152, 255, 226, 255,},
{80, 111, 187, 134, 163, 221, 153, 255, 227, 255,},
{81, 112, 188, 134, 164, 222, 153, 255, 227, 255,},
{82, 113, 189, 135, 164, 223, 154, 255, 228, 255,},
{83, 115, 190, 135, 164, 223, 155, 255, 229, 255,},
{84, 116, 191, 136, 165, 224, 155, 255, 229, 255,},
{85, 117, 192, 136, 165, 225, 156, 255, 230, 255,},
{86, 118, 193, 137, 165, 226, 157, 255, 231, 255,},
{87, 119, 194, 137, 166, 226, 157, 255, 231, 255,},
{88, 120, 195, 138, 166, 227, 158, 255, 232, 255,},
{89, 121, 196, 139, 167, 228, 159, 255, 232, 255,},
{90, 122, 197, 139, 167, 229, 159, 255, 233, 255,},
{91, 123, 198, 140, 167, 229, 160, 255, 234, 255,},
{92, 124, 199, 140, 168, 230, 161, 255, 234, 255,},
{93, 125, 200, 141, 168, 231, 162, 255, 235, 255,},
{94, 127, 201, 141, 168, 231, 162, 255, 235, 255,},
{95, 128, 202, 142, 169, 232, 163, 255, 236, 255,},
{96, 129, 203, 142, 169, 232, 164, 255, 236, 255,},
{97, 130, 204, 143, 170, 233, 164, 255, 237, 255,},
{98, 131, 205, 143, 170, 234, 165, 255, 237, 255,},
{99, 132, 206, 144, 170, 234, 166, 255, 238, 255,},
{100, 133, 207, 144, 171, 235, 166, 255, 238, 255,},
{101, 134, 208, 145, 171, 235, 167, 255, 239, 255,},
{102, 135, 209, 146, 171, 236, 168, 255, 239, 255,},
{103, 136, 209, 146, 172, 236, 168, 255, 240, 255,},
{104, 137, 210, 147, 172, 237, 169, 255, 240, 255,},
{105, 138, 211, 147, 173, 237, 170, 255, 240, 255,},
{106, 139, 212, 148, 173, 238, 170, 255, 241, 255,},
{107, 140, 213, 148, 173, 238, 171, 255, 241, 255,},
{108, 141, 213, 149, 174, 239, 172, 255, 242, 255,},
{109, 142, 214, 149, 174, 239, 172, 255, 242, 255,},
{110, 143, 215, 150, 175, 240, 173, 255, 242, 255,},
{111, 144, 216, 151, 175, 240, 174, 255, 243, 255,},
{112, 145, 217, 151, 175, 240, 174, 255, 243, 255,},
{113, 146, 217, 152, 176, 241, 175, 255, 244, 255,},
{114, 147, 218, 152, 176, 241, 176, 255, 244, 255,},
{115, 148, 219, 153, 176, 242, 177, 255, 244, 255,},
{116, 149, 219, 153, 177, 242, 177, 255, 245, 255,},
{117, 150, 220, 154, 177, 242, 178, 255, 245, 255,},
{118, 151, 221, 155, 178, 243, 179, 255, 245, 255,},
{119, 152, 222, 155, 178, 243, 179, 255, 245, 255,},
{120, 153, 222, 156, 178, 244, 180, 255, 246, 255,},
{121, 154, 223, 156, 179, 244, 181, 255, 246, 255,},
{122, 155, 224, 157, 179, 244, 181, 255, 246, 255,},
{123, 156, 224, 157, 180, 245, 182, 255, 247, 255,},
{124, 157, 225, 158, 180, 245, 183, 255, 247, 255,},
{125, 158, 225, 159, 180, 245, 183, 255, 247, 255,},
{126, 159, 226, 159, 181, 246, 184, 255, 247, 255,},
{127, 160, 227, 160, 181, 246, 185, 255, 248, 255,},
{128, 161, 227, 160, 182, 246, 185, 255, 248, 255,},
{129, 162, 228, 161, 182, 246, 186, 255, 248, 255,},
{130, 163, 228, 161, 182, 247, 187, 255, 248, 255,},
{131, 164, 229, 162, 183, 247, 187, 255, 249, 255,},
{132, 165, 230, 163, 183, 247, 188, 255, 249, 255,},
{133, 166, 230, 163, 184, 248, 189, 255, 249, 255,},
{134, 166, 231, 164, 184, 248, 189, 255, 249, 255,},
{135, 167, 231, 164, 184, 248, 190, 255, 250, 255,},
{136, 168, 232, 165, 185, 248, 191, 255, 250, 255,},
{137, 169, 232, 166, 185, 248, 191, 255, 250, 255,},
{138, 170, 233, 166, 186, 249, 192, 255, 250, 255,},
{139, 171, 233, 167, 186, 249, 192, 255, 250, 255,},
{140, 172, 234, 167, 187, 249, 193, 255, 251, 255,},
{141, 173, 234, 168, 187, 249, 194, 255, 251, 255,},
{142, 174, 235, 169, 187, 250, 194, 255, 251, 255,},
{143, 175, 235, 169, 188, 250, 195, 255, 251, 255,},
{144, 176, 236, 170, 188, 250, 196, 255, 251, 255,},
{145, 177, 236, 170, 189, 250, 196, 255, 251, 255,},
{146, 177, 237, 171, 189, 250, 197, 255, 252, 255,},
{147, 178, 237, 172, 189, 251, 198, 255, 252, 255,},
{148, 179, 238, 172, 190, 251, 198, 255, 252, 255,},
{149, 180, 238, 173, 190, 251, 199, 255, 252, 255,},
{150, 181, 238, 173, 191, 251, 200, 255, 252, 255,},
{151, 182, 239, 174, 191, 251, 200, 255, 252, 255,},
{152, 183, 239, 175, 192, 251, 201, 255, 252, 255,},
{153, 184, 240, 175, 192, 252, 202, 255, 252, 255,},
{154, 184, 240, 176, 193, 252, 202, 255, 253, 255,},
{155, 185, 240, 177, 193, 252, 203, 255, 253, 255,},
{156, 186, 241, 177, 193, 252, 203, 255, 253, 255,},
{157, 187, 241, 178, 194, 252, 204, 255, 253, 255,},
{158, 188, 242, 178, 194, 252, 205, 255, 253, 255,},
{159, 189, 242, 179, 195, 252, 205, 255, 253, 255,},
{160, 190, 242, 180, 195, 253, 206, 255, 253, 255,},
{161, 190, 243, 180, 196, 253, 207, 255, 253, 255,},
{162, 191, 243, 181, 196, 253, 207, 255, 254, 255,},
{163, 192, 243, 182, 197, 253, 208, 255, 254, 255,},
{164, 193, 244, 182, 197, 253, 209, 255, 254, 255,},
{165, 194, 244, 183, 197, 253, 209, 255, 254, 255,},
{166, 195, 244, 184, 198, 253, 210, 255, 254, 255,},
{167, 196, 245, 184, 198, 253, 210, 255, 254, 255,},
{168, 196, 245, 185, 199, 253, 211, 255, 254, 255,},
{169, 197, 245, 186, 199, 254, 212, 255, 254, 255,},
{170, 198, 246, 186, 200, 254, 212, 255, 254, 255,},
{171, 199, 246, 187, 200, 254, 213, 255, 254, 255,},
{172, 200, 246, 188, 201, 254, 214, 255, 254, 255,},
{173, 200, 246, 188, 201, 254, 214, 255, 254, 255,},
{174, 201, 247, 189, 202, 254, 215, 255, 254, 255,},
{175, 202, 247, 189, 202, 254, 215, 255, 255, 255,},
{176, 203, 247, 190, 203, 254, 216, 255, 255, 255,},
{177, 204, 248, 191, 203, 254, 217, 255, 255, 255,},
{178, 204, 248, 191, 204, 254, 217, 255, 255, 255,},
{179, 205, 248, 192, 204, 254, 218, 255, 255, 255,},
{180, 206, 248, 193, 204, 254, 218, 255, 255, 255,},
{181, 207, 249, 194, 205, 255, 219, 255, 255, 255,},
{182, 208, 249, 194, 205, 255, 220, 255, 255, 255,},
{183, 208, 249, 195, 206, 255, 220, 255, 255, 255,},
{184, 209, 249, 196, 206, 255, 221, 255, 255, 255,},
{185, 210, 250, 196, 207, 255, 221, 255, 255, 255,},
{186, 211, 250, 197, 207, 255, 222, 255, 255, 255,},
{187, 211, 250, 198, 208, 255, 223, 255, 255, 255,},
{188, 212, 250, 198, 208, 255, 223, 255, 255, 255,},
{189, 213, 250, 199, 209, 255, 224, 255, 255, 255,},
{190, 214, 251, 200, 209, 255, 224, 255, 255, 255,},
{191, 215, 251, 200, 210, 255, 225, 255, 255, 255,},
{192, 215, 251, 201, 211, 255, 225, 255, 255, 255,},
{193, 216, 251, 202, 211, 255, 226, 255, 255, 255,},
{194, 217, 251, 203, 212, 255, 227, 255, 255, 255,},
{195, 218, 252, 203, 212, 255, 227, 255, 255, 255,},
{196, 218, 252, 204, 213, 255, 228, 255, 255, 255,},
{197, 219, 252, 205, 213, 255, 228, 255, 255, 255,},
{198, 220, 252, 205, 214, 255, 229, 255, 255, 255,},
{199, 221, 252, 206, 214, 255, 229, 255, 255, 255,},
{200, 221, 252, 207, 215, 255, 230, 255, 255, 255,},
{201, 222, 252, 208, 215, 255, 231, 255, 255, 255,},
{202, 223, 253, 208, 216, 255, 231, 255, 255, 255,},
{203, 223, 253, 209, 216, 255, 232, 255, 255, 255,},
{204, 224, 253, 210, 217, 255, 232, 255, 255, 255,},
{205, 225, 253, 211, 218, 255, 233, 255, 255, 255,},
{206, 226, 253, 211, 218, 255, 233, 255, 255, 255,},
{207, 226, 253, 212, 219, 255, 234, 255, 255, 255,},
{208, 227, 253, 213, 219, 255, 234, 255, 255, 255,},
{209, 228, 254, 214, 220, 255, 235, 255, 255, 255,},
{210, 228, 254, 214, 220, 255, 236, 255, 255, 255,},
{211, 229, 254, 215, 221, 255, 236, 255, 255, 255,},
{212, 230, 254, 216, 222, 255, 237, 255, 255, 255,},
{213, 230, 254, 217, 222, 255, 237, 255, 255, 255,},
{214, 231, 254, 217, 223, 255, 238, 255, 255, 255,},
{215, 232, 254, 218, 223, 255, 238, 255, 255, 255,},
{216, 233, 254, 219, 224, 255, 239, 255, 255, 255,},
{217, 233, 254, 220, 225, 255, 239, 255, 255, 255,},
{218, 234, 255, 220, 225, 255, 240, 255, 255, 255,},
{219, 235, 255, 221, 226, 255, 240, 255, 255, 255,},
{220, 235, 255, 222, 226, 255, 241, 255, 255, 255,},
{221, 236, 255, 223, 227, 255, 241, 255, 255, 255,},
{222, 237, 255, 224, 228, 255, 242, 255, 255, 255,},
{223, 237, 255, 224, 228, 255, 242, 255, 255, 255,},
{224, 238, 255, 225, 229, 255, 243, 255, 255, 255,},
{225, 238, 255, 226, 230, 255, 243, 255, 255, 255,},
{226, 239, 255, 227, 230, 255, 244, 255, 255, 255,},
{227, 240, 255, 228, 231, 255, 244, 255, 255, 255,},
{228, 240, 255, 228, 232, 255, 245, 255, 255, 255,},
{229, 241, 255, 229, 232, 255, 245, 255, 255, 255,},
{230, 242, 255, 230, 233, 255, 246, 255, 255, 255,},
{231, 242, 255, 231, 234, 255, 246, 255, 255, 255,},
{232, 243, 255, 232, 234, 255, 247, 255, 255, 255,},
{233, 243, 255, 233, 235, 255, 247, 255, 255, 255,},
{234, 244, 255, 233, 236, 255, 247, 255, 255, 255,},
{235, 245, 255, 234, 236, 255, 248, 255, 255, 255,},
{236, 245, 255, 235, 237, 255, 248, 255, 255, 255,},
{237, 246, 255, 236, 238, 255, 249, 255, 255, 255,},
{238, 247, 255, 237, 239, 255, 249, 255, 255, 255,},
{239, 247, 255, 238, 239, 255, 250, 255, 255, 255,},
{240, 248, 255, 239, 240, 255, 250, 255, 255, 255,},
{241, 248, 255, 240, 241, 255, 251, 255, 255, 255,},
{242, 249, 255, 241, 242, 255, 251, 255, 255, 255,},
{243, 249, 255, 241, 243, 255, 251, 255, 255, 255,},
{244, 250, 255, 242, 243, 255, 252, 255, 255, 255,},
{245, 251, 255, 243, 244, 255, 252, 255, 255, 255,},
{246, 251, 255, 244, 245, 255, 253, 255, 255, 255,},
{247, 252, 255, 245, 246, 255, 253, 255, 255, 255,},
{248, 252, 255, 246, 247, 255, 253, 255, 255, 255,},
{249, 253, 255, 247, 248, 255, 254, 255, 255, 255,},
{250, 253, 255, 248, 249, 255, 254, 255, 255, 255,},
{251, 254, 255, 249, 250, 255, 254, 255, 255, 255,},
{252, 254, 255, 251, 251, 255, 255, 255, 255, 255,},
{253, 255, 255, 252, 252, 255, 255, 255, 255, 255,},
{254, 255, 255, 253, 253, 255, 255, 255, 255, 255,},
{255, 255, 255, 254, 254, 255, 255, 255, 255, 255,},
};
const vp9_prob vp9_modelcoefprobs_gg875p1[COEFPROB_MODELS][ENTROPY_NODES - 1] = {
// Probs generated with a Generalized Gaussian (with shape parameter 0.625)
// source model with varying quantizer step size for a uniform quantizer
{0, 0, 0, 0, 0, 0, 0, 0, 0, 0,}, // do not use
{1, 1, 3, 86, 128, 6, 86, 22, 89, 28,},
{1, 2, 6, 86, 129, 11, 87, 42, 92, 52,},
{2, 3, 9, 87, 129, 17, 88, 59, 94, 73,},
{2, 4, 12, 87, 129, 22, 89, 75, 97, 92,},
{3, 5, 14, 88, 130, 27, 89, 90, 100, 108,},
{3, 6, 17, 88, 130, 33, 90, 103, 102, 122,},
{4, 7, 20, 88, 130, 37, 91, 115, 105, 135,},
{4, 8, 23, 89, 131, 42, 92, 126, 108, 147,},
{5, 9, 25, 89, 131, 47, 92, 137, 110, 157,},
{5, 10, 28, 90, 131, 52, 93, 146, 113, 167,},
{6, 11, 31, 90, 132, 56, 94, 154, 115, 175,},
{6, 12, 33, 90, 132, 60, 94, 162, 118, 183,},
{7, 13, 36, 91, 132, 65, 95, 170, 120, 190,},
{7, 14, 39, 91, 132, 69, 96, 176, 123, 196,},
{8, 15, 41, 92, 133, 73, 96, 182, 125, 201,},
{8, 16, 44, 92, 133, 77, 97, 188, 128, 206,},
{9, 17, 46, 92, 133, 81, 98, 193, 130, 211,},
{9, 18, 49, 93, 134, 85, 99, 198, 133, 215,},
{10, 19, 51, 93, 134, 89, 99, 203, 135, 219,},
{10, 20, 54, 93, 134, 92, 100, 207, 137, 222,},
{11, 21, 56, 94, 134, 96, 101, 211, 140, 226,},
{12, 22, 58, 94, 135, 100, 101, 214, 142, 228,},
{12, 23, 61, 95, 135, 103, 102, 217, 145, 231,},
{13, 24, 63, 95, 135, 106, 103, 220, 147, 233,},
{13, 25, 66, 95, 136, 110, 103, 223, 149, 235,},
{14, 26, 68, 96, 136, 113, 104, 226, 151, 237,},
{14, 27, 70, 96, 136, 116, 105, 228, 154, 239,},
{15, 28, 72, 97, 136, 119, 106, 230, 156, 241,},
{15, 29, 75, 97, 137, 122, 106, 232, 158, 242,},
{16, 30, 77, 97, 137, 125, 107, 234, 160, 243,},
{17, 31, 79, 98, 137, 128, 108, 236, 163, 245,},
{17, 32, 81, 98, 138, 131, 108, 237, 165, 246,},
{18, 33, 83, 99, 138, 134, 109, 239, 167, 247,},
{18, 34, 86, 99, 138, 137, 110, 240, 169, 248,},
{19, 35, 88, 99, 138, 140, 111, 242, 171, 248,},
{19, 36, 90, 100, 139, 142, 111, 243, 173, 249,},
{20, 37, 92, 100, 139, 145, 112, 244, 175, 250,},
{20, 38, 94, 101, 139, 148, 113, 245, 177, 250,},
{21, 39, 96, 101, 140, 150, 113, 246, 179, 251,},
{22, 40, 98, 101, 140, 153, 114, 246, 181, 251,},
{22, 41, 100, 102, 140, 155, 115, 247, 183, 252,},
{23, 42, 102, 102, 140, 157, 116, 248, 185, 252,},
{23, 43, 104, 103, 141, 160, 116, 249, 186, 253,},
{24, 44, 106, 103, 141, 162, 117, 249, 188, 253,},
{25, 45, 108, 103, 141, 164, 118, 250, 190, 253,},
{25, 46, 110, 104, 142, 166, 119, 250, 192, 253,},
{26, 47, 112, 104, 142, 168, 119, 251, 193, 254,},
{26, 48, 114, 105, 142, 171, 120, 251, 195, 254,},
{27, 49, 116, 105, 143, 173, 121, 252, 197, 254,},
{27, 50, 118, 105, 143, 175, 122, 252, 198, 254,},
{28, 51, 119, 106, 143, 177, 122, 252, 200, 254,},
{29, 52, 121, 106, 143, 179, 123, 253, 201, 255,},
{29, 53, 123, 107, 144, 180, 124, 253, 203, 255,},
{30, 54, 125, 107, 144, 182, 125, 253, 204, 255,},
{30, 55, 127, 108, 144, 184, 125, 253, 206, 255,},
{31, 56, 128, 108, 145, 186, 126, 254, 207, 255,},
{32, 57, 130, 108, 145, 188, 127, 254, 209, 255,},
{32, 58, 132, 109, 145, 189, 128, 254, 210, 255,},
{33, 59, 134, 109, 146, 191, 128, 254, 211, 255,},
{33, 60, 135, 110, 146, 193, 129, 254, 213, 255,},
{34, 61, 137, 110, 146, 194, 130, 254, 214, 255,},
{35, 62, 139, 111, 146, 196, 131, 255, 215, 255,},
{35, 63, 140, 111, 147, 197, 131, 255, 216, 255,},
{36, 64, 142, 112, 147, 199, 132, 255, 218, 255,},
{37, 65, 144, 112, 147, 200, 133, 255, 219, 255,},
{37, 66, 145, 112, 148, 202, 134, 255, 220, 255,},
{38, 67, 147, 113, 148, 203, 135, 255, 221, 255,},
{38, 68, 148, 113, 148, 204, 135, 255, 222, 255,},
{39, 69, 150, 114, 149, 206, 136, 255, 223, 255,},
{40, 70, 151, 114, 149, 207, 137, 255, 224, 255,},
{40, 71, 153, 115, 149, 208, 138, 255, 225, 255,},
{41, 72, 154, 115, 150, 210, 138, 255, 226, 255,},
{42, 73, 156, 116, 150, 211, 139, 255, 227, 255,},
{42, 74, 157, 116, 150, 212, 140, 255, 228, 255,},
{43, 75, 159, 117, 151, 213, 141, 255, 229, 255,},
{44, 76, 160, 117, 151, 214, 142, 255, 230, 255,},
{44, 77, 162, 117, 151, 216, 142, 255, 231, 255,},
{45, 78, 163, 118, 152, 217, 143, 255, 231, 255,},
{45, 79, 165, 118, 152, 218, 144, 255, 232, 255,},
{46, 80, 166, 119, 152, 219, 145, 255, 233, 255,},
{47, 81, 167, 119, 153, 220, 146, 255, 234, 255,},
{47, 82, 169, 120, 153, 221, 146, 255, 235, 255,},
{48, 83, 170, 120, 153, 222, 147, 255, 235, 255,},
{49, 84, 171, 121, 154, 223, 148, 255, 236, 255,},
{49, 85, 173, 121, 154, 224, 149, 255, 237, 255,},
{50, 86, 174, 122, 154, 225, 150, 255, 237, 255,},
{51, 87, 175, 122, 155, 225, 150, 255, 238, 255,},
{51, 88, 177, 123, 155, 226, 151, 255, 239, 255,},
{52, 89, 178, 123, 155, 227, 152, 255, 239, 255,},
{53, 90, 179, 124, 156, 228, 153, 255, 240, 255,},
{53, 91, 180, 124, 156, 229, 154, 255, 240, 255,},
{54, 92, 182, 125, 156, 230, 154, 255, 241, 255,},
{55, 93, 183, 125, 157, 230, 155, 255, 241, 255,},
{55, 94, 184, 126, 157, 231, 156, 255, 242, 255,},
{56, 95, 185, 126, 157, 232, 157, 255, 242, 255,},
{57, 96, 187, 127, 158, 233, 158, 255, 243, 255,},
{57, 97, 188, 127, 158, 233, 159, 255, 243, 255,},
{58, 98, 189, 128, 158, 234, 159, 255, 244, 255,},
{59, 99, 190, 128, 159, 235, 160, 255, 244, 255,},
{60, 100, 191, 129, 159, 235, 161, 255, 245, 255,},
{60, 101, 192, 129, 160, 236, 162, 255, 245, 255,},
{61, 102, 193, 130, 160, 237, 163, 255, 246, 255,},
{62, 103, 194, 131, 160, 237, 164, 255, 246, 255,},
{62, 104, 196, 131, 161, 238, 164, 255, 246, 255,},
{63, 105, 197, 132, 161, 238, 165, 255, 247, 255,},
{64, 106, 198, 132, 161, 239, 166, 255, 247, 255,},
{64, 107, 199, 133, 162, 239, 167, 255, 247, 255,},
{65, 108, 200, 133, 162, 240, 168, 255, 248, 255,},
{66, 109, 201, 134, 163, 241, 168, 255, 248, 255,},
{67, 110, 202, 134, 163, 241, 169, 255, 248, 255,},
{67, 111, 203, 135, 163, 242, 170, 255, 249, 255,},
{68, 112, 204, 135, 164, 242, 171, 255, 249, 255,},
{69, 113, 205, 136, 164, 242, 172, 255, 249, 255,},
{69, 114, 206, 137, 164, 243, 173, 255, 250, 255,},
{70, 115, 207, 137, 165, 243, 173, 255, 250, 255,},
{71, 116, 208, 138, 165, 244, 174, 255, 250, 255,},
{72, 117, 208, 138, 166, 244, 175, 255, 250, 255,},
{72, 118, 209, 139, 166, 245, 176, 255, 251, 255,},
{73, 119, 210, 139, 166, 245, 177, 255, 251, 255,},
{74, 120, 211, 140, 167, 245, 178, 255, 251, 255,},
{75, 121, 212, 141, 167, 246, 178, 255, 251, 255,},
{75, 122, 213, 141, 168, 246, 179, 255, 251, 255,},
{76, 123, 214, 142, 168, 246, 180, 255, 252, 255,},
{77, 124, 215, 142, 168, 247, 181, 255, 252, 255,},
{78, 125, 215, 143, 169, 247, 182, 255, 252, 255,},
{78, 126, 216, 144, 169, 247, 182, 255, 252, 255,},
{79, 127, 217, 144, 170, 248, 183, 255, 252, 255,},
{80, 128, 218, 145, 170, 248, 184, 255, 253, 255,},
{81, 129, 219, 145, 170, 248, 185, 255, 253, 255,},
{82, 130, 219, 146, 171, 249, 186, 255, 253, 255,},
{82, 131, 220, 147, 171, 249, 187, 255, 253, 255,},
{83, 132, 221, 147, 172, 249, 187, 255, 253, 255,},
{84, 133, 222, 148, 172, 249, 188, 255, 253, 255,},
{85, 134, 222, 148, 173, 250, 189, 255, 253, 255,},
{85, 135, 223, 149, 173, 250, 190, 255, 254, 255,},
{86, 136, 224, 150, 173, 250, 191, 255, 254, 255,},
{87, 137, 225, 150, 174, 250, 191, 255, 254, 255,},
{88, 138, 225, 151, 174, 251, 192, 255, 254, 255,},
{89, 139, 226, 152, 175, 251, 193, 255, 254, 255,},
{89, 140, 227, 152, 175, 251, 194, 255, 254, 255,},
{90, 141, 227, 153, 176, 251, 195, 255, 254, 255,},
{91, 142, 228, 153, 176, 251, 195, 255, 254, 255,},
{92, 143, 229, 154, 176, 252, 196, 255, 254, 255,},
{93, 144, 229, 155, 177, 252, 197, 255, 254, 255,},
{93, 145, 230, 155, 177, 252, 198, 255, 255, 255,},
{94, 146, 231, 156, 178, 252, 199, 255, 255, 255,},
{95, 147, 231, 157, 178, 252, 199, 255, 255, 255,},
{96, 148, 232, 157, 179, 252, 200, 255, 255, 255,},
{97, 149, 232, 158, 179, 253, 201, 255, 255, 255,},
{98, 150, 233, 159, 180, 253, 202, 255, 255, 255,},
{99, 151, 234, 159, 180, 253, 202, 255, 255, 255,},
{99, 152, 234, 160, 181, 253, 203, 255, 255, 255,},
{100, 153, 235, 161, 181, 253, 204, 255, 255, 255,},
{101, 154, 235, 162, 182, 253, 205, 255, 255, 255,},
{102, 155, 236, 162, 182, 253, 206, 255, 255, 255,},
{103, 156, 236, 163, 183, 254, 206, 255, 255, 255,},
{104, 157, 237, 164, 183, 254, 207, 255, 255, 255,},
{105, 158, 237, 164, 183, 254, 208, 255, 255, 255,},
{105, 159, 238, 165, 184, 254, 209, 255, 255, 255,},
{106, 160, 238, 166, 184, 254, 209, 255, 255, 255,},
{107, 161, 239, 166, 185, 254, 210, 255, 255, 255,},
{108, 162, 239, 167, 185, 254, 211, 255, 255, 255,},
{109, 163, 240, 168, 186, 254, 212, 255, 255, 255,},
{110, 164, 240, 169, 186, 254, 212, 255, 255, 255,},
{111, 165, 241, 169, 187, 254, 213, 255, 255, 255,},
{112, 166, 241, 170, 187, 255, 214, 255, 255, 255,},
{113, 167, 242, 171, 188, 255, 215, 255, 255, 255,},
{114, 168, 242, 172, 189, 255, 215, 255, 255, 255,},
{114, 169, 242, 172, 189, 255, 216, 255, 255, 255,},
{115, 170, 243, 173, 190, 255, 217, 255, 255, 255,},
{116, 171, 243, 174, 190, 255, 217, 255, 255, 255,},
{117, 172, 244, 175, 191, 255, 218, 255, 255, 255,},
{118, 173, 244, 175, 191, 255, 219, 255, 255, 255,},
{119, 174, 244, 176, 192, 255, 220, 255, 255, 255,},
{120, 175, 245, 177, 192, 255, 220, 255, 255, 255,},
{121, 176, 245, 178, 193, 255, 221, 255, 255, 255,},
{122, 177, 245, 178, 193, 255, 222, 255, 255, 255,},
{123, 178, 246, 179, 194, 255, 222, 255, 255, 255,},
{124, 179, 246, 180, 194, 255, 223, 255, 255, 255,},
{125, 180, 247, 181, 195, 255, 224, 255, 255, 255,},
{126, 181, 247, 182, 196, 255, 224, 255, 255, 255,},
{127, 182, 247, 182, 196, 255, 225, 255, 255, 255,},
{128, 183, 247, 183, 197, 255, 226, 255, 255, 255,},
{129, 184, 248, 184, 197, 255, 226, 255, 255, 255,},
{130, 185, 248, 185, 198, 255, 227, 255, 255, 255,},
{131, 186, 248, 186, 198, 255, 228, 255, 255, 255,},
{132, 187, 249, 186, 199, 255, 228, 255, 255, 255,},
{133, 188, 249, 187, 200, 255, 229, 255, 255, 255,},
{134, 189, 249, 188, 200, 255, 230, 255, 255, 255,},
{135, 190, 249, 189, 201, 255, 230, 255, 255, 255,},
{136, 191, 250, 190, 201, 255, 231, 255, 255, 255,},
{137, 192, 250, 191, 202, 255, 231, 255, 255, 255,},
{138, 193, 250, 191, 203, 255, 232, 255, 255, 255,},
{139, 194, 250, 192, 203, 255, 233, 255, 255, 255,},
{140, 195, 251, 193, 204, 255, 233, 255, 255, 255,},
{142, 196, 251, 194, 204, 255, 234, 255, 255, 255,},
{143, 197, 251, 195, 205, 255, 234, 255, 255, 255,},
{144, 198, 251, 196, 206, 255, 235, 255, 255, 255,},
{145, 199, 252, 197, 206, 255, 236, 255, 255, 255,},
{146, 200, 252, 197, 207, 255, 236, 255, 255, 255,},
{147, 201, 252, 198, 208, 255, 237, 255, 255, 255,},
{148, 202, 252, 199, 208, 255, 237, 255, 255, 255,},
{149, 203, 252, 200, 209, 255, 238, 255, 255, 255,},
{151, 204, 253, 201, 210, 255, 238, 255, 255, 255,},
{152, 205, 253, 202, 210, 255, 239, 255, 255, 255,},
{153, 206, 253, 203, 211, 255, 239, 255, 255, 255,},
{154, 207, 253, 204, 212, 255, 240, 255, 255, 255,},
{155, 208, 253, 205, 212, 255, 241, 255, 255, 255,},
{157, 209, 253, 206, 213, 255, 241, 255, 255, 255,},
{158, 210, 253, 206, 214, 255, 242, 255, 255, 255,},
{159, 211, 254, 207, 214, 255, 242, 255, 255, 255,},
{160, 212, 254, 208, 215, 255, 243, 255, 255, 255,},
{162, 213, 254, 209, 216, 255, 243, 255, 255, 255,},
{163, 214, 254, 210, 217, 255, 244, 255, 255, 255,},
{164, 215, 254, 211, 217, 255, 244, 255, 255, 255,},
{165, 216, 254, 212, 218, 255, 244, 255, 255, 255,},
{167, 217, 254, 213, 219, 255, 245, 255, 255, 255,},
{168, 218, 254, 214, 219, 255, 245, 255, 255, 255,},
{169, 219, 255, 215, 220, 255, 246, 255, 255, 255,},
{171, 220, 255, 216, 221, 255, 246, 255, 255, 255,},
{172, 221, 255, 217, 222, 255, 247, 255, 255, 255,},
{174, 222, 255, 218, 223, 255, 247, 255, 255, 255,},
{175, 223, 255, 219, 223, 255, 248, 255, 255, 255,},
{177, 224, 255, 220, 224, 255, 248, 255, 255, 255,},
{178, 225, 255, 221, 225, 255, 248, 255, 255, 255,},
{179, 226, 255, 222, 226, 255, 249, 255, 255, 255,},
{181, 227, 255, 223, 227, 255, 249, 255, 255, 255,},
{182, 228, 255, 224, 227, 255, 250, 255, 255, 255,},
{184, 229, 255, 225, 228, 255, 250, 255, 255, 255,},
{186, 230, 255, 226, 229, 255, 250, 255, 255, 255,},
{187, 231, 255, 227, 230, 255, 251, 255, 255, 255,},
{189, 232, 255, 228, 231, 255, 251, 255, 255, 255,},
{190, 233, 255, 229, 232, 255, 251, 255, 255, 255,},
{192, 234, 255, 230, 232, 255, 252, 255, 255, 255,},
{194, 235, 255, 231, 233, 255, 252, 255, 255, 255,},
{196, 236, 255, 232, 234, 255, 252, 255, 255, 255,},
{197, 237, 255, 233, 235, 255, 253, 255, 255, 255,},
{199, 238, 255, 234, 236, 255, 253, 255, 255, 255,},
{201, 239, 255, 235, 237, 255, 253, 255, 255, 255,},
{203, 240, 255, 237, 238, 255, 253, 255, 255, 255,},
{205, 241, 255, 238, 239, 255, 254, 255, 255, 255,},
{207, 242, 255, 239, 240, 255, 254, 255, 255, 255,},
{209, 243, 255, 240, 241, 255, 254, 255, 255, 255,},
{211, 244, 255, 241, 242, 255, 254, 255, 255, 255,},
{214, 245, 255, 242, 243, 255, 255, 255, 255, 255,},
{216, 246, 255, 243, 244, 255, 255, 255, 255, 255,},
{218, 247, 255, 244, 245, 255, 255, 255, 255, 255,},
{221, 248, 255, 246, 246, 255, 255, 255, 255, 255,},
{224, 249, 255, 247, 247, 255, 255, 255, 255, 255,},
{226, 250, 255, 248, 248, 255, 255, 255, 255, 255,},
{229, 251, 255, 249, 249, 255, 255, 255, 255, 255,},
{233, 252, 255, 251, 251, 255, 255, 255, 255, 255,},
{236, 253, 255, 252, 252, 255, 255, 255, 255, 255,},
{241, 254, 255, 253, 253, 255, 255, 255, 255, 255,},
{246, 255, 255, 254, 254, 255, 255, 255, 255, 255,},
};
const vp9_prob vp9_modelcoefprobs_gg75p1[COEFPROB_MODELS][ENTROPY_NODES - 1] = {
// Probs generated with a Generalized Gaussian (with shape parameter 0.625)
// source model with varying quantizer step size for a uniform quantizer
{0, 0, 0, 0, 0, 0, 0, 0, 0, 0,}, // do not use
{1, 1, 3, 86, 129, 6, 87, 21, 90, 26,},
{1, 2, 6, 87, 129, 11, 88, 39, 93, 47,},
{2, 3, 9, 87, 130, 16, 89, 55, 96, 65,},
{2, 4, 11, 88, 130, 21, 89, 69, 98, 81,},
{3, 5, 14, 88, 130, 26, 90, 82, 101, 95,},
{3, 6, 17, 89, 131, 31, 91, 94, 103, 107,},
{4, 7, 20, 89, 131, 35, 92, 105, 105, 119,},
{4, 8, 22, 90, 131, 40, 92, 115, 108, 129,},
{5, 9, 25, 90, 132, 44, 93, 124, 110, 138,},
{5, 10, 27, 91, 132, 48, 94, 133, 112, 147,},
{6, 11, 30, 91, 132, 52, 95, 141, 114, 155,},
{6, 12, 32, 92, 133, 56, 95, 148, 116, 162,},
{7, 13, 35, 92, 133, 60, 96, 155, 118, 168,},
{7, 14, 37, 92, 133, 64, 97, 161, 121, 174,},
{8, 15, 40, 93, 134, 68, 97, 167, 123, 180,},
{9, 16, 42, 93, 134, 71, 98, 173, 125, 185,},
{9, 17, 44, 94, 134, 75, 99, 178, 127, 190,},
{10, 18, 47, 94, 135, 78, 99, 182, 129, 195,},
{10, 19, 49, 94, 135, 82, 100, 187, 131, 199,},
{11, 20, 51, 95, 135, 85, 100, 191, 133, 202,},
{11, 21, 54, 95, 135, 88, 101, 195, 135, 206,},
{12, 22, 56, 96, 136, 92, 102, 199, 137, 209,},
{13, 23, 58, 96, 136, 95, 102, 202, 138, 213,},
{13, 24, 61, 96, 136, 98, 103, 206, 140, 215,},
{14, 25, 63, 97, 137, 101, 104, 209, 142, 218,},
{14, 26, 65, 97, 137, 104, 104, 211, 144, 221,},
{15, 27, 67, 98, 137, 107, 105, 214, 146, 223,},
{15, 28, 69, 98, 138, 110, 106, 217, 148, 225,},
{16, 29, 71, 98, 138, 113, 106, 219, 150, 227,},
{17, 30, 73, 99, 138, 115, 107, 221, 151, 229,},
{17, 31, 76, 99, 138, 118, 107, 223, 153, 231,},
{18, 32, 78, 100, 139, 121, 108, 225, 155, 232,},
{18, 33, 80, 100, 139, 123, 109, 227, 157, 234,},
{19, 34, 82, 100, 139, 126, 109, 229, 158, 235,},
{20, 35, 84, 101, 140, 128, 110, 231, 160, 237,},
{20, 36, 86, 101, 140, 131, 111, 232, 162, 238,},
{21, 37, 88, 102, 140, 133, 111, 234, 164, 239,},
{21, 38, 90, 102, 140, 136, 112, 235, 165, 240,},
{22, 39, 92, 102, 141, 138, 112, 236, 167, 241,},
{23, 40, 94, 103, 141, 140, 113, 237, 169, 242,},
{23, 41, 95, 103, 141, 143, 114, 238, 170, 243,},
{24, 42, 97, 103, 142, 145, 114, 240, 172, 244,},
{25, 43, 99, 104, 142, 147, 115, 241, 173, 245,},
{25, 44, 101, 104, 142, 149, 116, 242, 175, 246,},
{26, 45, 103, 105, 142, 151, 116, 242, 176, 246,},
{26, 46, 105, 105, 143, 153, 117, 243, 178, 247,},
{27, 47, 107, 105, 143, 156, 117, 244, 180, 248,},
{28, 48, 108, 106, 143, 158, 118, 245, 181, 248,},
{28, 49, 110, 106, 144, 159, 119, 245, 182, 249,},
{29, 50, 112, 107, 144, 161, 119, 246, 184, 249,},
{30, 51, 114, 107, 144, 163, 120, 247, 185, 250,},
{30, 52, 115, 108, 144, 165, 121, 247, 187, 250,},
{31, 53, 117, 108, 145, 167, 121, 248, 188, 250,},
{32, 54, 119, 108, 145, 169, 122, 248, 190, 251,},
{32, 55, 121, 109, 145, 171, 123, 249, 191, 251,},
{33, 56, 122, 109, 146, 172, 123, 249, 192, 251,},
{34, 57, 124, 110, 146, 174, 124, 250, 194, 252,},
{34, 58, 126, 110, 146, 176, 125, 250, 195, 252,},
{35, 59, 127, 110, 147, 177, 125, 250, 196, 252,},
{36, 60, 129, 111, 147, 179, 126, 251, 197, 253,},
{36, 61, 130, 111, 147, 181, 127, 251, 199, 253,},
{37, 62, 132, 112, 147, 182, 127, 251, 200, 253,},
{38, 63, 134, 112, 148, 184, 128, 252, 201, 253,},
{38, 64, 135, 112, 148, 185, 128, 252, 202, 253,},
{39, 65, 137, 113, 148, 187, 129, 252, 204, 254,},
{40, 66, 138, 113, 149, 188, 130, 253, 205, 254,},
{40, 67, 140, 114, 149, 190, 130, 253, 206, 254,},
{41, 68, 141, 114, 149, 191, 131, 253, 207, 254,},
{42, 69, 143, 115, 150, 192, 132, 253, 208, 254,},
{42, 70, 144, 115, 150, 194, 132, 253, 209, 254,},
{43, 71, 146, 115, 150, 195, 133, 254, 210, 254,},
{44, 72, 147, 116, 150, 197, 134, 254, 211, 255,},
{44, 73, 149, 116, 151, 198, 134, 254, 212, 255,},
{45, 74, 150, 117, 151, 199, 135, 254, 213, 255,},
{46, 75, 152, 117, 151, 200, 136, 254, 214, 255,},
{46, 76, 153, 118, 152, 202, 136, 254, 215, 255,},
{47, 77, 154, 118, 152, 203, 137, 254, 216, 255,},
{48, 78, 156, 119, 152, 204, 138, 254, 217, 255,},
{49, 79, 157, 119, 153, 205, 139, 255, 218, 255,},
{49, 80, 159, 119, 153, 206, 139, 255, 219, 255,},
{50, 81, 160, 120, 153, 207, 140, 255, 220, 255,},
{51, 82, 161, 120, 154, 208, 141, 255, 221, 255,},
{51, 83, 163, 121, 154, 210, 141, 255, 222, 255,},
{52, 84, 164, 121, 154, 211, 142, 255, 223, 255,},
{53, 85, 165, 122, 154, 212, 143, 255, 223, 255,},
{54, 86, 166, 122, 155, 213, 143, 255, 224, 255,},
{54, 87, 168, 123, 155, 214, 144, 255, 225, 255,},
{55, 88, 169, 123, 155, 215, 145, 255, 226, 255,},
{56, 89, 170, 123, 156, 216, 145, 255, 227, 255,},
{57, 90, 172, 124, 156, 217, 146, 255, 227, 255,},
{57, 91, 173, 124, 156, 218, 147, 255, 228, 255,},
{58, 92, 174, 125, 157, 218, 147, 255, 229, 255,},
{59, 93, 175, 125, 157, 219, 148, 255, 230, 255,},
{60, 94, 176, 126, 157, 220, 149, 255, 230, 255,},
{60, 95, 178, 126, 158, 221, 150, 255, 231, 255,},
{61, 96, 179, 127, 158, 222, 150, 255, 232, 255,},
{62, 97, 180, 127, 158, 223, 151, 255, 232, 255,},
{63, 98, 181, 128, 159, 224, 152, 255, 233, 255,},
{63, 99, 182, 128, 159, 224, 152, 255, 234, 255,},
{64, 100, 183, 129, 159, 225, 153, 255, 234, 255,},
{65, 101, 184, 129, 160, 226, 154, 255, 235, 255,},
{66, 102, 186, 130, 160, 227, 154, 255, 235, 255,},
{66, 103, 187, 130, 160, 227, 155, 255, 236, 255,},
{67, 104, 188, 131, 161, 228, 156, 255, 236, 255,},
{68, 105, 189, 131, 161, 229, 157, 255, 237, 255,},
{69, 106, 190, 132, 161, 230, 157, 255, 238, 255,},
{69, 107, 191, 132, 162, 230, 158, 255, 238, 255,},
{70, 108, 192, 133, 162, 231, 159, 255, 239, 255,},
{71, 109, 193, 133, 163, 232, 159, 255, 239, 255,},
{72, 110, 194, 134, 163, 232, 160, 255, 240, 255,},
{73, 111, 195, 134, 163, 233, 161, 255, 240, 255,},
{73, 112, 196, 135, 164, 233, 162, 255, 241, 255,},
{74, 113, 197, 135, 164, 234, 162, 255, 241, 255,},
{75, 114, 198, 136, 164, 235, 163, 255, 241, 255,},
{76, 115, 199, 136, 165, 235, 164, 255, 242, 255,},
{77, 116, 200, 137, 165, 236, 165, 255, 242, 255,},
{77, 117, 201, 137, 165, 236, 165, 255, 243, 255,},
{78, 118, 202, 138, 166, 237, 166, 255, 243, 255,},
{79, 119, 203, 138, 166, 237, 167, 255, 244, 255,},
{80, 120, 204, 139, 166, 238, 167, 255, 244, 255,},
{81, 121, 205, 139, 167, 238, 168, 255, 244, 255,},
{82, 122, 206, 140, 167, 239, 169, 255, 245, 255,},
{82, 123, 206, 141, 168, 239, 170, 255, 245, 255,},
{83, 124, 207, 141, 168, 240, 170, 255, 245, 255,},
{84, 125, 208, 142, 168, 240, 171, 255, 246, 255,},
{85, 126, 209, 142, 169, 241, 172, 255, 246, 255,},
{86, 127, 210, 143, 169, 241, 173, 255, 246, 255,},
{87, 128, 211, 143, 169, 242, 173, 255, 247, 255,},
{87, 129, 212, 144, 170, 242, 174, 255, 247, 255,},
{88, 130, 212, 144, 170, 242, 175, 255, 247, 255,},
{89, 131, 213, 145, 171, 243, 176, 255, 248, 255,},
{90, 132, 214, 146, 171, 243, 176, 255, 248, 255,},
{91, 133, 215, 146, 171, 244, 177, 255, 248, 255,},
{92, 134, 216, 147, 172, 244, 178, 255, 248, 255,},
{93, 135, 216, 147, 172, 244, 179, 255, 249, 255,},
{93, 136, 217, 148, 173, 245, 179, 255, 249, 255,},
{94, 137, 218, 148, 173, 245, 180, 255, 249, 255,},
{95, 138, 219, 149, 173, 245, 181, 255, 249, 255,},
{96, 139, 220, 150, 174, 246, 181, 255, 250, 255,},
{97, 140, 220, 150, 174, 246, 182, 255, 250, 255,},
{98, 141, 221, 151, 175, 246, 183, 255, 250, 255,},
{99, 142, 222, 151, 175, 247, 184, 255, 250, 255,},
{100, 143, 222, 152, 175, 247, 184, 255, 251, 255,},
{100, 144, 223, 153, 176, 247, 185, 255, 251, 255,},
{101, 145, 224, 153, 176, 248, 186, 255, 251, 255,},
{102, 146, 224, 154, 177, 248, 187, 255, 251, 255,},
{103, 147, 225, 154, 177, 248, 187, 255, 251, 255,},
{104, 148, 226, 155, 178, 248, 188, 255, 252, 255,},
{105, 149, 226, 156, 178, 249, 189, 255, 252, 255,},
{106, 150, 227, 156, 178, 249, 190, 255, 252, 255,},
{107, 151, 228, 157, 179, 249, 190, 255, 252, 255,},
{108, 152, 228, 158, 179, 249, 191, 255, 252, 255,},
{109, 153, 229, 158, 180, 250, 192, 255, 252, 255,},
{110, 154, 230, 159, 180, 250, 193, 255, 253, 255,},
{111, 155, 230, 159, 181, 250, 193, 255, 253, 255,},
{111, 156, 231, 160, 181, 250, 194, 255, 253, 255,},
{112, 157, 231, 161, 181, 251, 195, 255, 253, 255,},
{113, 158, 232, 161, 182, 251, 196, 255, 253, 255,},
{114, 159, 233, 162, 182, 251, 196, 255, 253, 255,},
{115, 160, 233, 163, 183, 251, 197, 255, 253, 255,},
{116, 161, 234, 163, 183, 251, 198, 255, 253, 255,},
{117, 162, 234, 164, 184, 252, 199, 255, 254, 255,},
{118, 163, 235, 165, 184, 252, 199, 255, 254, 255,},
{119, 164, 235, 165, 185, 252, 200, 255, 254, 255,},
{120, 165, 236, 166, 185, 252, 201, 255, 254, 255,},
{121, 166, 236, 167, 186, 252, 202, 255, 254, 255,},
{122, 167, 237, 167, 186, 252, 202, 255, 254, 255,},
{123, 168, 237, 168, 187, 253, 203, 255, 254, 255,},
{124, 169, 238, 169, 187, 253, 204, 255, 254, 255,},
{125, 170, 238, 169, 188, 253, 205, 255, 254, 255,},
{126, 171, 239, 170, 188, 253, 205, 255, 254, 255,},
{127, 172, 239, 171, 189, 253, 206, 255, 254, 255,},
{128, 173, 240, 172, 189, 253, 207, 255, 255, 255,},
{129, 174, 240, 172, 190, 253, 208, 255, 255, 255,},
{130, 175, 241, 173, 190, 253, 208, 255, 255, 255,},
{131, 176, 241, 174, 191, 254, 209, 255, 255, 255,},
{132, 177, 242, 175, 191, 254, 210, 255, 255, 255,},
{133, 178, 242, 175, 192, 254, 210, 255, 255, 255,},
{134, 179, 242, 176, 192, 254, 211, 255, 255, 255,},
{135, 180, 243, 177, 193, 254, 212, 255, 255, 255,},
{137, 181, 243, 177, 193, 254, 213, 255, 255, 255,},
{138, 182, 244, 178, 194, 254, 213, 255, 255, 255,},
{139, 183, 244, 179, 194, 254, 214, 255, 255, 255,},
{140, 184, 244, 180, 195, 254, 215, 255, 255, 255,},
{141, 185, 245, 181, 195, 254, 216, 255, 255, 255,},
{142, 186, 245, 181, 196, 255, 216, 255, 255, 255,},
{143, 187, 245, 182, 196, 255, 217, 255, 255, 255,},
{144, 188, 246, 183, 197, 255, 218, 255, 255, 255,},
{145, 189, 246, 184, 197, 255, 218, 255, 255, 255,},
{146, 190, 247, 184, 198, 255, 219, 255, 255, 255,},
{147, 191, 247, 185, 199, 255, 220, 255, 255, 255,},
{149, 192, 247, 186, 199, 255, 221, 255, 255, 255,},
{150, 193, 247, 187, 200, 255, 221, 255, 255, 255,},
{151, 194, 248, 188, 200, 255, 222, 255, 255, 255,},
{152, 195, 248, 188, 201, 255, 223, 255, 255, 255,},
{153, 196, 248, 189, 201, 255, 223, 255, 255, 255,},
{154, 197, 249, 190, 202, 255, 224, 255, 255, 255,},
{156, 198, 249, 191, 203, 255, 225, 255, 255, 255,},
{157, 199, 249, 192, 203, 255, 225, 255, 255, 255,},
{158, 200, 250, 193, 204, 255, 226, 255, 255, 255,},
{159, 201, 250, 193, 205, 255, 227, 255, 255, 255,},
{160, 202, 250, 194, 205, 255, 227, 255, 255, 255,},
{162, 203, 250, 195, 206, 255, 228, 255, 255, 255,},
{163, 204, 251, 196, 206, 255, 229, 255, 255, 255,},
{164, 205, 251, 197, 207, 255, 229, 255, 255, 255,},
{165, 206, 251, 198, 208, 255, 230, 255, 255, 255,},
{166, 207, 251, 199, 208, 255, 231, 255, 255, 255,},
{168, 208, 251, 200, 209, 255, 231, 255, 255, 255,},
{169, 209, 252, 201, 210, 255, 232, 255, 255, 255,},
{170, 210, 252, 201, 210, 255, 233, 255, 255, 255,},
{172, 211, 252, 202, 211, 255, 233, 255, 255, 255,},
{173, 212, 252, 203, 212, 255, 234, 255, 255, 255,},
{174, 213, 252, 204, 212, 255, 235, 255, 255, 255,},
{175, 214, 253, 205, 213, 255, 235, 255, 255, 255,},
{177, 215, 253, 206, 214, 255, 236, 255, 255, 255,},
{178, 216, 253, 207, 215, 255, 237, 255, 255, 255,},
{179, 217, 253, 208, 215, 255, 237, 255, 255, 255,},
{181, 218, 253, 209, 216, 255, 238, 255, 255, 255,},
{182, 219, 254, 210, 217, 255, 238, 255, 255, 255,},
{184, 220, 254, 211, 217, 255, 239, 255, 255, 255,},
{185, 221, 254, 212, 218, 255, 240, 255, 255, 255,},
{186, 222, 254, 213, 219, 255, 240, 255, 255, 255,},
{188, 223, 254, 214, 220, 255, 241, 255, 255, 255,},
{189, 224, 254, 215, 221, 255, 241, 255, 255, 255,},
{191, 225, 254, 216, 221, 255, 242, 255, 255, 255,},
{192, 226, 254, 217, 222, 255, 243, 255, 255, 255,},
{194, 227, 255, 218, 223, 255, 243, 255, 255, 255,},
{195, 228, 255, 219, 224, 255, 244, 255, 255, 255,},
{197, 229, 255, 220, 225, 255, 244, 255, 255, 255,},
{198, 230, 255, 221, 225, 255, 245, 255, 255, 255,},
{200, 231, 255, 222, 226, 255, 245, 255, 255, 255,},
{201, 232, 255, 223, 227, 255, 246, 255, 255, 255,},
{203, 233, 255, 224, 228, 255, 247, 255, 255, 255,},
{205, 234, 255, 226, 229, 255, 247, 255, 255, 255,},
{206, 235, 255, 227, 230, 255, 248, 255, 255, 255,},
{208, 236, 255, 228, 231, 255, 248, 255, 255, 255,},
{210, 237, 255, 229, 232, 255, 249, 255, 255, 255,},
{211, 238, 255, 230, 233, 255, 249, 255, 255, 255,},
{213, 239, 255, 231, 234, 255, 250, 255, 255, 255,},
{215, 240, 255, 233, 235, 255, 250, 255, 255, 255,},
{217, 241, 255, 234, 236, 255, 251, 255, 255, 255,},
{219, 242, 255, 235, 237, 255, 251, 255, 255, 255,},
{221, 243, 255, 236, 238, 255, 252, 255, 255, 255,},
{223, 244, 255, 237, 239, 255, 252, 255, 255, 255,},
{225, 245, 255, 239, 240, 255, 252, 255, 255, 255,},
{227, 246, 255, 240, 241, 255, 253, 255, 255, 255,},
{229, 247, 255, 241, 242, 255, 253, 255, 255, 255,},
{231, 248, 255, 243, 244, 255, 254, 255, 255, 255,},
{233, 249, 255, 244, 245, 255, 254, 255, 255, 255,},
{236, 250, 255, 246, 246, 255, 254, 255, 255, 255,},
{238, 251, 255, 247, 247, 255, 255, 255, 255, 255,},
{241, 252, 255, 249, 249, 255, 255, 255, 255, 255,},
{244, 253, 255, 250, 250, 255, 255, 255, 255, 255,},
{247, 254, 255, 252, 252, 255, 255, 255, 255, 255,},
{251, 255, 255, 254, 254, 255, 255, 255, 255, 255,},
};
const vp9_prob vp9_modelcoefprobs_gg625p1[COEFPROB_MODELS][ENTROPY_NODES - 1] = {
// Probs generated with a Generalized Gaussian (with shape parameter 0.625)
// source model with varying quantizer step size for a uniform quantizer
{0, 0, 0, 0, 0, 0, 0, 0, 0, 0,}, // do not use
{1, 1, 3, 87, 129, 6, 87, 20, 91, 24,},
{1, 2, 6, 88, 130, 11, 89, 36, 94, 41,},
{2, 3, 8, 88, 130, 15, 90, 50, 97, 56,},
{2, 4, 11, 89, 131, 20, 90, 62, 99, 70,},
{3, 5, 14, 90, 131, 24, 91, 74, 102, 81,},
{3, 6, 16, 90, 132, 29, 92, 84, 104, 92,},
{4, 7, 19, 91, 132, 33, 93, 93, 106, 101,},
{4, 8, 21, 91, 132, 37, 93, 102, 108, 110,},
{5, 9, 24, 92, 133, 40, 94, 110, 110, 118,},
{5, 10, 26, 92, 133, 44, 95, 118, 111, 125,},
{6, 11, 29, 93, 134, 48, 96, 125, 113, 132,},
{7, 12, 31, 93, 134, 51, 96, 132, 115, 139,},
{7, 13, 33, 93, 134, 55, 97, 138, 117, 145,},
{8, 14, 36, 94, 135, 58, 97, 144, 119, 150,},
{8, 15, 38, 94, 135, 62, 98, 149, 120, 155,},
{9, 16, 40, 95, 135, 65, 99, 154, 122, 160,},
{10, 17, 42, 95, 136, 68, 99, 159, 124, 165,},
{10, 18, 45, 96, 136, 71, 100, 164, 125, 169,},
{11, 19, 47, 96, 136, 74, 100, 168, 127, 174,},
{11, 20, 49, 96, 136, 77, 101, 173, 128, 177,},
{12, 21, 51, 97, 137, 80, 102, 176, 130, 181,},
{13, 22, 53, 97, 137, 83, 102, 180, 131, 185,},
{13, 23, 55, 98, 137, 86, 103, 184, 133, 188,},
{14, 24, 57, 98, 138, 89, 103, 187, 135, 191,},
{14, 25, 59, 98, 138, 91, 104, 190, 136, 194,},
{15, 26, 61, 99, 138, 94, 104, 193, 138, 197,},
{16, 27, 64, 99, 139, 97, 105, 196, 139, 200,},
{16, 28, 66, 100, 139, 99, 106, 199, 141, 202,},
{17, 29, 68, 100, 139, 102, 106, 201, 142, 205,},
{18, 30, 69, 100, 139, 104, 107, 204, 143, 207,},
{18, 31, 71, 101, 140, 107, 107, 206, 145, 209,},
{19, 32, 73, 101, 140, 109, 108, 209, 146, 211,},
{20, 33, 75, 102, 140, 112, 108, 211, 148, 213,},
{20, 34, 77, 102, 141, 114, 109, 213, 149, 215,},
{21, 35, 79, 102, 141, 116, 109, 215, 150, 217,},
{22, 36, 81, 103, 141, 119, 110, 217, 152, 219,},
{22, 37, 83, 103, 141, 121, 110, 218, 153, 220,},
{23, 38, 85, 103, 142, 123, 111, 220, 155, 222,},
{24, 39, 87, 104, 142, 125, 112, 222, 156, 224,},
{24, 40, 88, 104, 142, 127, 112, 223, 157, 225,},
{25, 41, 90, 105, 143, 129, 113, 225, 159, 226,},
{26, 42, 92, 105, 143, 131, 113, 226, 160, 228,},
{26, 43, 94, 105, 143, 133, 114, 227, 161, 229,},
{27, 44, 95, 106, 143, 135, 114, 229, 162, 230,},
{28, 45, 97, 106, 144, 137, 115, 230, 164, 231,},
{28, 46, 99, 107, 144, 139, 115, 231, 165, 232,},
{29, 47, 101, 107, 144, 141, 116, 232, 166, 233,},
{30, 48, 102, 107, 145, 143, 116, 233, 168, 234,},
{31, 49, 104, 108, 145, 145, 117, 234, 169, 235,},
{31, 50, 106, 108, 145, 147, 118, 235, 170, 236,},
{32, 51, 107, 108, 145, 149, 118, 236, 171, 237,},
{33, 52, 109, 109, 146, 150, 119, 237, 172, 238,},
{33, 53, 111, 109, 146, 152, 119, 238, 174, 239,},
{34, 54, 112, 110, 146, 154, 120, 239, 175, 240,},
{35, 55, 114, 110, 146, 156, 120, 240, 176, 240,},
{36, 56, 115, 110, 147, 157, 121, 240, 177, 241,},
{36, 57, 117, 111, 147, 159, 121, 241, 178, 242,},
{37, 58, 119, 111, 147, 161, 122, 242, 180, 242,},
{38, 59, 120, 112, 148, 162, 122, 242, 181, 243,},
{38, 60, 122, 112, 148, 164, 123, 243, 182, 244,},
{39, 61, 123, 112, 148, 165, 124, 244, 183, 244,},
{40, 62, 125, 113, 148, 167, 124, 244, 184, 245,},
{41, 63, 126, 113, 149, 168, 125, 245, 185, 245,},
{41, 64, 128, 114, 149, 170, 125, 245, 186, 246,},
{42, 65, 129, 114, 149, 171, 126, 246, 187, 246,},
{43, 66, 131, 114, 150, 173, 126, 246, 188, 247,},
{44, 67, 132, 115, 150, 174, 127, 247, 189, 247,},
{44, 68, 134, 115, 150, 176, 127, 247, 191, 247,},
{45, 69, 135, 116, 150, 177, 128, 248, 192, 248,},
{46, 70, 136, 116, 151, 178, 129, 248, 193, 248,},
{47, 71, 138, 116, 151, 180, 129, 248, 194, 249,},
{48, 72, 139, 117, 151, 181, 130, 249, 195, 249,},
{48, 73, 141, 117, 152, 183, 130, 249, 196, 249,},
{49, 74, 142, 118, 152, 184, 131, 249, 197, 250,},
{50, 75, 143, 118, 152, 185, 131, 250, 198, 250,},
{51, 76, 145, 118, 152, 186, 132, 250, 199, 250,},
{51, 77, 146, 119, 153, 188, 132, 250, 200, 250,},
{52, 78, 148, 119, 153, 189, 133, 251, 201, 251,},
{53, 79, 149, 120, 153, 190, 134, 251, 201, 251,},
{54, 80, 150, 120, 154, 191, 134, 251, 202, 251,},
{55, 81, 151, 120, 154, 192, 135, 251, 203, 251,},
{55, 82, 153, 121, 154, 194, 135, 252, 204, 252,},
{56, 83, 154, 121, 155, 195, 136, 252, 205, 252,},
{57, 84, 155, 122, 155, 196, 136, 252, 206, 252,},
{58, 85, 157, 122, 155, 197, 137, 252, 207, 252,},
{59, 86, 158, 123, 155, 198, 138, 252, 208, 252,},
{59, 87, 159, 123, 156, 199, 138, 253, 209, 253,},
{60, 88, 160, 123, 156, 200, 139, 253, 210, 253,},
{61, 89, 162, 124, 156, 201, 139, 253, 210, 253,},
{62, 90, 163, 124, 157, 202, 140, 253, 211, 253,},
{63, 91, 164, 125, 157, 203, 140, 253, 212, 253,},
{64, 92, 165, 125, 157, 204, 141, 253, 213, 253,},
{64, 93, 166, 126, 158, 205, 142, 254, 214, 253,},
{65, 94, 168, 126, 158, 206, 142, 254, 214, 254,},
{66, 95, 169, 126, 158, 207, 143, 254, 215, 254,},
{67, 96, 170, 127, 158, 208, 143, 254, 216, 254,},
{68, 97, 171, 127, 159, 209, 144, 254, 217, 254,},
{69, 98, 172, 128, 159, 210, 145, 254, 218, 254,},
{69, 99, 173, 128, 159, 211, 145, 254, 218, 254,},
{70, 100, 175, 129, 160, 212, 146, 254, 219, 254,},
{71, 101, 176, 129, 160, 213, 146, 254, 220, 254,},
{72, 102, 177, 130, 160, 214, 147, 254, 220, 254,},
{73, 103, 178, 130, 161, 214, 148, 255, 221, 255,},
{74, 104, 179, 130, 161, 215, 148, 255, 222, 255,},
{75, 105, 180, 131, 161, 216, 149, 255, 223, 255,},
{75, 106, 181, 131, 162, 217, 149, 255, 223, 255,},
{76, 107, 182, 132, 162, 218, 150, 255, 224, 255,},
{77, 108, 183, 132, 162, 219, 151, 255, 225, 255,},
{78, 109, 184, 133, 163, 219, 151, 255, 225, 255,},
{79, 110, 185, 133, 163, 220, 152, 255, 226, 255,},
{80, 111, 186, 134, 163, 221, 152, 255, 226, 255,},
{81, 112, 187, 134, 164, 222, 153, 255, 227, 255,},
{82, 113, 188, 135, 164, 222, 154, 255, 228, 255,},
{83, 114, 189, 135, 164, 223, 154, 255, 228, 255,},
{83, 115, 190, 136, 165, 224, 155, 255, 229, 255,},
{84, 116, 191, 136, 165, 224, 156, 255, 230, 255,},
{85, 117, 192, 137, 165, 225, 156, 255, 230, 255,},
{86, 118, 193, 137, 166, 226, 157, 255, 231, 255,},
{87, 119, 194, 137, 166, 226, 157, 255, 231, 255,},
{88, 120, 195, 138, 166, 227, 158, 255, 232, 255,},
{89, 121, 196, 138, 167, 228, 159, 255, 232, 255,},
{90, 122, 197, 139, 167, 228, 159, 255, 233, 255,},
{91, 123, 198, 139, 167, 229, 160, 255, 233, 255,},
{92, 124, 199, 140, 168, 230, 161, 255, 234, 255,},
{93, 125, 200, 140, 168, 230, 161, 255, 234, 255,},
{93, 126, 201, 141, 168, 231, 162, 255, 235, 255,},
{94, 127, 202, 141, 169, 231, 163, 255, 235, 255,},
{95, 128, 203, 142, 169, 232, 163, 255, 236, 255,},
{96, 129, 203, 142, 169, 233, 164, 255, 236, 255,},
{97, 130, 204, 143, 170, 233, 164, 255, 237, 255,},
{98, 131, 205, 143, 170, 234, 165, 255, 237, 255,},
{99, 132, 206, 144, 170, 234, 166, 255, 238, 255,},
{100, 133, 207, 145, 171, 235, 166, 255, 238, 255,},
{101, 134, 208, 145, 171, 235, 167, 255, 239, 255,},
{102, 135, 209, 146, 171, 236, 168, 255, 239, 255,},
{103, 136, 209, 146, 172, 236, 168, 255, 240, 255,},
{104, 137, 210, 147, 172, 237, 169, 255, 240, 255,},
{105, 138, 211, 147, 173, 237, 170, 255, 240, 255,},
{106, 139, 212, 148, 173, 238, 170, 255, 241, 255,},
{107, 140, 213, 148, 173, 238, 171, 255, 241, 255,},
{108, 141, 213, 149, 174, 239, 172, 255, 242, 255,},
{109, 142, 214, 149, 174, 239, 172, 255, 242, 255,},
{110, 143, 215, 150, 174, 240, 173, 255, 242, 255,},
{111, 144, 216, 150, 175, 240, 174, 255, 243, 255,},
{112, 145, 216, 151, 175, 240, 174, 255, 243, 255,},
{113, 146, 217, 152, 176, 241, 175, 255, 243, 255,},
{114, 147, 218, 152, 176, 241, 176, 255, 244, 255,},
{115, 148, 219, 153, 176, 242, 176, 255, 244, 255,},
{116, 149, 219, 153, 177, 242, 177, 255, 244, 255,},
{117, 150, 220, 154, 177, 242, 178, 255, 245, 255,},
{118, 151, 221, 154, 178, 243, 178, 255, 245, 255,},
{119, 152, 221, 155, 178, 243, 179, 255, 245, 255,},
{120, 153, 222, 156, 178, 244, 180, 255, 246, 255,},
{121, 154, 223, 156, 179, 244, 180, 255, 246, 255,},
{122, 155, 223, 157, 179, 244, 181, 255, 246, 255,},
{123, 156, 224, 157, 180, 245, 182, 255, 247, 255,},
{124, 157, 225, 158, 180, 245, 183, 255, 247, 255,},
{125, 158, 225, 159, 180, 245, 183, 255, 247, 255,},
{126, 159, 226, 159, 181, 246, 184, 255, 247, 255,},
{127, 160, 227, 160, 181, 246, 185, 255, 248, 255,},
{128, 161, 227, 160, 182, 246, 185, 255, 248, 255,},
{129, 162, 228, 161, 182, 246, 186, 255, 248, 255,},
{130, 163, 229, 162, 183, 247, 187, 255, 248, 255,},
{131, 164, 229, 162, 183, 247, 187, 255, 249, 255,},
{132, 165, 230, 163, 183, 247, 188, 255, 249, 255,},
{133, 166, 230, 163, 184, 248, 189, 255, 249, 255,},
{135, 167, 231, 164, 184, 248, 190, 255, 249, 255,},
{136, 168, 232, 165, 185, 248, 190, 255, 250, 255,},
{137, 169, 232, 165, 185, 248, 191, 255, 250, 255,},
{138, 170, 233, 166, 186, 249, 192, 255, 250, 255,},
{139, 171, 233, 167, 186, 249, 192, 255, 250, 255,},
{140, 172, 234, 167, 187, 249, 193, 255, 251, 255,},
{141, 173, 234, 168, 187, 249, 194, 255, 251, 255,},
{142, 174, 235, 169, 187, 250, 195, 255, 251, 255,},
{143, 175, 235, 169, 188, 250, 195, 255, 251, 255,},
{144, 176, 236, 170, 188, 250, 196, 255, 251, 255,},
{146, 177, 236, 171, 189, 250, 197, 255, 251, 255,},
{147, 178, 237, 171, 189, 251, 197, 255, 252, 255,},
{148, 179, 237, 172, 190, 251, 198, 255, 252, 255,},
{149, 180, 238, 173, 190, 251, 199, 255, 252, 255,},
{150, 181, 238, 173, 191, 251, 200, 255, 252, 255,},
{151, 182, 239, 174, 191, 251, 200, 255, 252, 255,},
{152, 183, 239, 175, 192, 251, 201, 255, 252, 255,},
{153, 184, 240, 176, 192, 252, 202, 255, 253, 255,},
{155, 185, 240, 176, 193, 252, 203, 255, 253, 255,},
{156, 186, 241, 177, 193, 252, 203, 255, 253, 255,},
{157, 187, 241, 178, 194, 252, 204, 255, 253, 255,},
{158, 188, 242, 179, 194, 252, 205, 255, 253, 255,},
{159, 189, 242, 179, 195, 252, 206, 255, 253, 255,},
{160, 190, 242, 180, 195, 253, 206, 255, 253, 255,},
{162, 191, 243, 181, 196, 253, 207, 255, 253, 255,},
{163, 192, 243, 182, 196, 253, 208, 255, 254, 255,},
{164, 193, 244, 182, 197, 253, 209, 255, 254, 255,},
{165, 194, 244, 183, 198, 253, 209, 255, 254, 255,},
{166, 195, 244, 184, 198, 253, 210, 255, 254, 255,},
{168, 196, 245, 185, 199, 253, 211, 255, 254, 255,},
{169, 197, 245, 185, 199, 254, 212, 255, 254, 255,},
{170, 198, 246, 186, 200, 254, 212, 255, 254, 255,},
{171, 199, 246, 187, 200, 254, 213, 255, 254, 255,},
{172, 200, 246, 188, 201, 254, 214, 255, 254, 255,},
{174, 201, 247, 189, 201, 254, 215, 255, 254, 255,},
{175, 202, 247, 189, 202, 254, 215, 255, 255, 255,},
{176, 203, 247, 190, 203, 254, 216, 255, 255, 255,},
{177, 204, 248, 191, 203, 254, 217, 255, 255, 255,},
{179, 205, 248, 192, 204, 254, 218, 255, 255, 255,},
{180, 206, 248, 193, 204, 254, 218, 255, 255, 255,},
{181, 207, 249, 194, 205, 255, 219, 255, 255, 255,},
{183, 208, 249, 195, 206, 255, 220, 255, 255, 255,},
{184, 209, 249, 195, 206, 255, 221, 255, 255, 255,},
{185, 210, 250, 196, 207, 255, 221, 255, 255, 255,},
{186, 211, 250, 197, 208, 255, 222, 255, 255, 255,},
{188, 212, 250, 198, 208, 255, 223, 255, 255, 255,},
{189, 213, 250, 199, 209, 255, 224, 255, 255, 255,},
{190, 214, 251, 200, 210, 255, 224, 255, 255, 255,},
{192, 215, 251, 201, 210, 255, 225, 255, 255, 255,},
{193, 216, 251, 202, 211, 255, 226, 255, 255, 255,},
{194, 217, 251, 203, 212, 255, 227, 255, 255, 255,},
{196, 218, 252, 204, 212, 255, 228, 255, 255, 255,},
{197, 219, 252, 205, 213, 255, 228, 255, 255, 255,},
{198, 220, 252, 206, 214, 255, 229, 255, 255, 255,},
{200, 221, 252, 207, 215, 255, 230, 255, 255, 255,},
{201, 222, 252, 208, 215, 255, 231, 255, 255, 255,},
{202, 223, 253, 209, 216, 255, 231, 255, 255, 255,},
{204, 224, 253, 210, 217, 255, 232, 255, 255, 255,},
{205, 225, 253, 211, 218, 255, 233, 255, 255, 255,},
{207, 226, 253, 212, 218, 255, 234, 255, 255, 255,},
{208, 227, 253, 213, 219, 255, 234, 255, 255, 255,},
{209, 228, 254, 214, 220, 255, 235, 255, 255, 255,},
{211, 229, 254, 215, 221, 255, 236, 255, 255, 255,},
{212, 230, 254, 216, 222, 255, 237, 255, 255, 255,},
{214, 231, 254, 217, 223, 255, 238, 255, 255, 255,},
{215, 232, 254, 218, 223, 255, 238, 255, 255, 255,},
{217, 233, 254, 219, 224, 255, 239, 255, 255, 255,},
{218, 234, 255, 221, 225, 255, 240, 255, 255, 255,},
{220, 235, 255, 222, 226, 255, 241, 255, 255, 255,},
{221, 236, 255, 223, 227, 255, 241, 255, 255, 255,},
{223, 237, 255, 224, 228, 255, 242, 255, 255, 255,},
{224, 238, 255, 225, 229, 255, 243, 255, 255, 255,},
{226, 239, 255, 227, 230, 255, 244, 255, 255, 255,},
{227, 240, 255, 228, 231, 255, 244, 255, 255, 255,},
{229, 241, 255, 229, 232, 255, 245, 255, 255, 255,},
{231, 242, 255, 231, 233, 255, 246, 255, 255, 255,},
{232, 243, 255, 232, 234, 255, 247, 255, 255, 255,},
{234, 244, 255, 233, 236, 255, 247, 255, 255, 255,},
{235, 245, 255, 235, 237, 255, 248, 255, 255, 255,},
{237, 246, 255, 236, 238, 255, 249, 255, 255, 255,},
{239, 247, 255, 238, 239, 255, 250, 255, 255, 255,},
{241, 248, 255, 239, 241, 255, 250, 255, 255, 255,},
{242, 249, 255, 241, 242, 255, 251, 255, 255, 255,},
{244, 250, 255, 243, 243, 255, 252, 255, 255, 255,},
{246, 251, 255, 244, 245, 255, 253, 255, 255, 255,},
{248, 252, 255, 246, 247, 255, 253, 255, 255, 255,},
{250, 253, 255, 248, 248, 255, 254, 255, 255, 255,},
{252, 254, 255, 250, 250, 255, 255, 255, 255, 255,},
{254, 255, 255, 253, 253, 255, 255, 255, 255, 255,},
};
void vp9_get_model_distribution(vp9_prob p, vp9_prob *tree_probs,
int b, int r) {
const vp9_prob (*model)[ENTROPY_NODES - 1];
#if UNCONSTRAINED_NODES == 2
if (r != INTRA_FRAME && b == PLANE_TYPE_UV)
model = vp9_modelcoefprobs_gg75;
else if (r == INTRA_FRAME && b == PLANE_TYPE_UV)
model = vp9_modelcoefprobs_gg75;
else if (r != INTRA_FRAME && b == PLANE_TYPE_Y_WITH_DC)
model = vp9_modelcoefprobs_gg75;
else
model = vp9_modelcoefprobs_gg75;
#else
if (r != INTRA_FRAME && b == PLANE_TYPE_UV)
model = vp9_modelcoefprobs_gg75p1;
else if (r == INTRA_FRAME && b == PLANE_TYPE_UV)
model = vp9_modelcoefprobs_gg75p1;
else if (r != INTRA_FRAME && b == PLANE_TYPE_Y_WITH_DC)
model = vp9_modelcoefprobs_gg75p1;
else
model = vp9_modelcoefprobs_gg75p1;
#endif
vpx_memcpy(tree_probs + UNCONSTRAINED_NODES,
model[p] + UNCONSTRAINED_NODES - 1,
(ENTROPY_NODES - UNCONSTRAINED_NODES) * sizeof(vp9_prob));
}
#endif
32x32 transform for superblocks. This adds Debargha's DCT/DWT hybrid and a regular 32x32 DCT, and adds code all over the place to wrap that in the bitstream/encoder/decoder/RD. Some implementation notes (these probably need careful review): - token range is extended by 1 bit, since the value range out of this transform is [-16384,16383]. - the coefficients coming out of the FDCT are manually scaled back by 1 bit, or else they won't fit in int16_t (they are 17 bits). Because of this, the RD error scoring does not right-shift the MSE score by two (unlike for 4x4/8x8/16x16). - to compensate for this loss in precision, the quantizer is halved also. This is currently a little hacky. - FDCT and IDCT is double-only right now. Needs a fixed-point impl. - There are no default probabilities for the 32x32 transform yet; I'm simply using the 16x16 luma ones. A future commit will add newly generated probabilities for all transforms. - No ADST version. I don't think we'll add one for this level; if an ADST is desired, transform-size selection can scale back to 16x16 or lower, and use an ADST at that level. Additional notes specific to Debargha's DWT/DCT hybrid: - coefficient scale is different for the top/left 16x16 (DCT-over-DWT) block than for the rest (DWT pixel differences) of the block. Therefore, RD error scoring isn't easily scalable between coefficient and pixel domain. Thus, unfortunately, we need to compute the RD distortion in the pixel domain until we figure out how to scale these appropriately. Change-Id: I00386f20f35d7fabb19aba94c8162f8aee64ef2b
2012-12-07 23:45:05 +01:00
static vp9_tree_index cat1[2], cat2[4], cat3[6], cat4[8], cat5[10], cat6[28];
2010-05-18 17:58:33 +02:00
static void init_bit_tree(vp9_tree_index *p, int n) {
int i = 0;
2010-05-18 17:58:33 +02:00
while (++i < n) {
p[0] = p[1] = i << 1;
p += 2;
}
2010-05-18 17:58:33 +02:00
p[0] = p[1] = 0;
2010-05-18 17:58:33 +02:00
}
static void init_bit_trees() {
init_bit_tree(cat1, 1);
init_bit_tree(cat2, 2);
init_bit_tree(cat3, 3);
init_bit_tree(cat4, 4);
init_bit_tree(cat5, 5);
32x32 transform for superblocks. This adds Debargha's DCT/DWT hybrid and a regular 32x32 DCT, and adds code all over the place to wrap that in the bitstream/encoder/decoder/RD. Some implementation notes (these probably need careful review): - token range is extended by 1 bit, since the value range out of this transform is [-16384,16383]. - the coefficients coming out of the FDCT are manually scaled back by 1 bit, or else they won't fit in int16_t (they are 17 bits). Because of this, the RD error scoring does not right-shift the MSE score by two (unlike for 4x4/8x8/16x16). - to compensate for this loss in precision, the quantizer is halved also. This is currently a little hacky. - FDCT and IDCT is double-only right now. Needs a fixed-point impl. - There are no default probabilities for the 32x32 transform yet; I'm simply using the 16x16 luma ones. A future commit will add newly generated probabilities for all transforms. - No ADST version. I don't think we'll add one for this level; if an ADST is desired, transform-size selection can scale back to 16x16 or lower, and use an ADST at that level. Additional notes specific to Debargha's DWT/DCT hybrid: - coefficient scale is different for the top/left 16x16 (DCT-over-DWT) block than for the rest (DWT pixel differences) of the block. Therefore, RD error scoring isn't easily scalable between coefficient and pixel domain. Thus, unfortunately, we need to compute the RD distortion in the pixel domain until we figure out how to scale these appropriately. Change-Id: I00386f20f35d7fabb19aba94c8162f8aee64ef2b
2012-12-07 23:45:05 +01:00
init_bit_tree(cat6, 14);
2010-05-18 17:58:33 +02:00
}
vp9_extra_bit_struct vp9_extra_bits[12] = {
{ 0, 0, 0, 0},
{ 0, 0, 0, 1},
{ 0, 0, 0, 2},
{ 0, 0, 0, 3},
{ 0, 0, 0, 4},
{ cat1, Pcat1, 1, 5},
{ cat2, Pcat2, 2, 7},
{ cat3, Pcat3, 3, 11},
{ cat4, Pcat4, 4, 19},
{ cat5, Pcat5, 5, 35},
32x32 transform for superblocks. This adds Debargha's DCT/DWT hybrid and a regular 32x32 DCT, and adds code all over the place to wrap that in the bitstream/encoder/decoder/RD. Some implementation notes (these probably need careful review): - token range is extended by 1 bit, since the value range out of this transform is [-16384,16383]. - the coefficients coming out of the FDCT are manually scaled back by 1 bit, or else they won't fit in int16_t (they are 17 bits). Because of this, the RD error scoring does not right-shift the MSE score by two (unlike for 4x4/8x8/16x16). - to compensate for this loss in precision, the quantizer is halved also. This is currently a little hacky. - FDCT and IDCT is double-only right now. Needs a fixed-point impl. - There are no default probabilities for the 32x32 transform yet; I'm simply using the 16x16 luma ones. A future commit will add newly generated probabilities for all transforms. - No ADST version. I don't think we'll add one for this level; if an ADST is desired, transform-size selection can scale back to 16x16 or lower, and use an ADST at that level. Additional notes specific to Debargha's DWT/DCT hybrid: - coefficient scale is different for the top/left 16x16 (DCT-over-DWT) block than for the rest (DWT pixel differences) of the block. Therefore, RD error scoring isn't easily scalable between coefficient and pixel domain. Thus, unfortunately, we need to compute the RD distortion in the pixel domain until we figure out how to scale these appropriately. Change-Id: I00386f20f35d7fabb19aba94c8162f8aee64ef2b
2012-12-07 23:45:05 +01:00
{ cat6, Pcat6, 14, 67},
{ 0, 0, 0, 0}
2010-05-18 17:58:33 +02:00
};
#include "vp9/common/vp9_default_coef_probs.h"
2010-05-18 17:58:33 +02:00
// This function updates and then returns n AC coefficient context
// This is currently a placeholder function to allow experimentation
// using various context models based on the energy earlier tokens
// within the current block.
//
// For now it just returns the previously used context.
#define MAX_NEIGHBORS 2
int vp9_get_coef_context(const int *scan, const int *neighbors,
int nb_pad, uint8_t *token_cache, int c, int l) {
int eob = l;
assert(nb_pad == MAX_NEIGHBORS);
if (c == eob) {
return 0;
} else {
int ctx;
assert(neighbors[MAX_NEIGHBORS * c + 0] >= 0);
if (neighbors[MAX_NEIGHBORS * c + 1] >= 0) {
ctx = (1 + token_cache[neighbors[MAX_NEIGHBORS * c + 0]] +
token_cache[neighbors[MAX_NEIGHBORS * c + 1]]) >> 1;
} else {
ctx = token_cache[neighbors[MAX_NEIGHBORS * c + 0]];
}
return vp9_pt_energy_class[ctx];
}
};
void vp9_default_coef_probs(VP9_COMMON *pc) {
Modeling default coef probs with distribution Replaces the default tables for single coefficient magnitudes with those obtained from an appropriate distribution. The EOB node is left unchanged. The model is represeted as a 256-size codebook where the index corresponds to the probability of the Zero or the One node. Two variations are implemented corresponding to whether the Zero node or the One-node is used as the peg. The main advantage is that the default prob tables will become considerably smaller and manageable. Besides there is substantially less risk of over-fitting for a training set. Various distributions are tried and the one that gives the best results is the family of Generalized Gaussian distributions with shape parameter 0.75. The results are within about 0.2% of fully trained tables for the Zero peg variant, and within 0.1% of the One peg variant. The forward updates are optionally (controlled by a macro) model-based, i.e. restricted to only convey probabilities from the codebook. Backward updates can also be optionally (controlled by another macro) model-based, but is turned off by default. Currently model-based forward updates work about the same as unconstrained updates, but there is a drop in performance with backward-updates being model based. The model based approach also allows the probabilities for the key frames to be adjusted from the defaults based on the base_qindex of the frame. Currently the adjustment function is a placeholder that adjusts the prob of EOB and Zero node from the nominal one at higher quality (lower qindex) or lower quality (higher qindex) ends of the range. The rest of the probabilities are then derived based on the model from the adjusted prob of zero. Change-Id: Iae050f3cbcc6d8b3f204e8dc395ae47b3b2192c9
2013-03-13 19:03:17 +01:00
#if CONFIG_MODELCOEFPROB
int b, r, c, p;
#endif
#if CONFIG_CODE_NONZEROCOUNT
#ifdef NZC_DEFAULT_COUNTS
int h, g;
for (h = 0; h < MAX_NZC_CONTEXTS; ++h) {
for (g = 0; g < REF_TYPES; ++g) {
int i;
unsigned int branch_ct4x4[NZC4X4_NODES][2];
unsigned int branch_ct8x8[NZC8X8_NODES][2];
unsigned int branch_ct16x16[NZC16X16_NODES][2];
unsigned int branch_ct32x32[NZC32X32_NODES][2];
for (i = 0; i < BLOCK_TYPES; ++i) {
vp9_tree_probs_from_distribution(
vp9_nzc4x4_tree,
pc->fc.nzc_probs_4x4[h][g][i], branch_ct4x4,
default_nzc_counts_4x4[h][g][i], 0);
}
for (i = 0; i < BLOCK_TYPES; ++i) {
vp9_tree_probs_from_distribution(
vp9_nzc8x8_tree,
pc->fc.nzc_probs_8x8[h][g][i], branch_ct8x8,
default_nzc_counts_8x8[h][g][i], 0);
}
for (i = 0; i < BLOCK_TYPES; ++i) {
vp9_tree_probs_from_distribution(
vp9_nzc16x16_tree,
pc->fc.nzc_probs_16x16[h][g][i], branch_ct16x16,
default_nzc_counts_16x16[h][g][i], 0);
}
for (i = 0; i < BLOCK_TYPES; ++i) {
vp9_tree_probs_from_distribution(
vp9_nzc32x32_tree,
pc->fc.nzc_probs_32x32[h][g][i], branch_ct32x32,
default_nzc_counts_32x32[h][g][i], 0);
}
}
}
#else
vpx_memcpy(pc->fc.nzc_probs_4x4, default_nzc_probs_4x4,
sizeof(pc->fc.nzc_probs_4x4));
vpx_memcpy(pc->fc.nzc_probs_8x8, default_nzc_probs_8x8,
sizeof(pc->fc.nzc_probs_8x8));
vpx_memcpy(pc->fc.nzc_probs_16x16, default_nzc_probs_16x16,
sizeof(pc->fc.nzc_probs_16x16));
vpx_memcpy(pc->fc.nzc_probs_32x32, default_nzc_probs_32x32,
sizeof(pc->fc.nzc_probs_32x32));
#endif
vpx_memcpy(pc->fc.nzc_pcat_probs, default_nzc_pcat_probs,
sizeof(pc->fc.nzc_pcat_probs));
Modeling default coef probs with distribution Replaces the default tables for single coefficient magnitudes with those obtained from an appropriate distribution. The EOB node is left unchanged. The model is represeted as a 256-size codebook where the index corresponds to the probability of the Zero or the One node. Two variations are implemented corresponding to whether the Zero node or the One-node is used as the peg. The main advantage is that the default prob tables will become considerably smaller and manageable. Besides there is substantially less risk of over-fitting for a training set. Various distributions are tried and the one that gives the best results is the family of Generalized Gaussian distributions with shape parameter 0.75. The results are within about 0.2% of fully trained tables for the Zero peg variant, and within 0.1% of the One peg variant. The forward updates are optionally (controlled by a macro) model-based, i.e. restricted to only convey probabilities from the codebook. Backward updates can also be optionally (controlled by another macro) model-based, but is turned off by default. Currently model-based forward updates work about the same as unconstrained updates, but there is a drop in performance with backward-updates being model based. The model based approach also allows the probabilities for the key frames to be adjusted from the defaults based on the base_qindex of the frame. Currently the adjustment function is a placeholder that adjusts the prob of EOB and Zero node from the nominal one at higher quality (lower qindex) or lower quality (higher qindex) ends of the range. The rest of the probabilities are then derived based on the model from the adjusted prob of zero. Change-Id: Iae050f3cbcc6d8b3f204e8dc395ae47b3b2192c9
2013-03-13 19:03:17 +01:00
#endif // CONFIG_CODE_NONZEROCOUNT
#if CONFIG_MODELCOEFPROB
for (b = 0; b < BLOCK_TYPES; ++b)
for (r = 0; r < REF_TYPES; ++r)
for (c = 0; c < COEF_BANDS; ++c)
for (p = 0; p < PREV_COEF_CONTEXTS; ++p) {
int t;
for (t = 0; t < UNCONSTRAINED_NODES; t++)
pc->fc.coef_probs_4x4[b][r][c][p][t] =
default_coef_probs_4x4[b][r][c][p][t];
vp9_get_model_distribution(
default_coef_probs_4x4[b][r][c][p][UNCONSTRAINED_NODES - 1],
pc->fc.coef_probs_4x4[b][r][c][p], b, r);
for (t = 0; t < UNCONSTRAINED_NODES; t++)
pc->fc.coef_probs_8x8[b][r][c][p][t] =
default_coef_probs_8x8[b][r][c][p][t];
vp9_get_model_distribution(
default_coef_probs_8x8[b][r][c][p][UNCONSTRAINED_NODES - 1],
pc->fc.coef_probs_8x8[b][r][c][p], b, r);
for (t = 0; t < UNCONSTRAINED_NODES; t++)
pc->fc.coef_probs_16x16[b][r][c][p][t] =
default_coef_probs_16x16[b][r][c][p][t];
vp9_get_model_distribution(
default_coef_probs_16x16[b][r][c][p][UNCONSTRAINED_NODES - 1],
pc->fc.coef_probs_16x16[b][r][c][p], b, r);
for (t = 0; t < UNCONSTRAINED_NODES; t++)
pc->fc.coef_probs_32x32[b][r][c][p][t] =
default_coef_probs_32x32[b][r][c][p][t];
vp9_get_model_distribution(
default_coef_probs_32x32[b][r][c][p][UNCONSTRAINED_NODES - 1],
pc->fc.coef_probs_32x32[b][r][c][p], b, r);
}
#else
vpx_memcpy(pc->fc.coef_probs_4x4, default_coef_probs_4x4,
sizeof(pc->fc.coef_probs_4x4));
vpx_memcpy(pc->fc.coef_probs_8x8, default_coef_probs_8x8,
sizeof(pc->fc.coef_probs_8x8));
vpx_memcpy(pc->fc.coef_probs_16x16, default_coef_probs_16x16,
sizeof(pc->fc.coef_probs_16x16));
vpx_memcpy(pc->fc.coef_probs_32x32, default_coef_probs_32x32,
sizeof(pc->fc.coef_probs_32x32));
Modeling default coef probs with distribution Replaces the default tables for single coefficient magnitudes with those obtained from an appropriate distribution. The EOB node is left unchanged. The model is represeted as a 256-size codebook where the index corresponds to the probability of the Zero or the One node. Two variations are implemented corresponding to whether the Zero node or the One-node is used as the peg. The main advantage is that the default prob tables will become considerably smaller and manageable. Besides there is substantially less risk of over-fitting for a training set. Various distributions are tried and the one that gives the best results is the family of Generalized Gaussian distributions with shape parameter 0.75. The results are within about 0.2% of fully trained tables for the Zero peg variant, and within 0.1% of the One peg variant. The forward updates are optionally (controlled by a macro) model-based, i.e. restricted to only convey probabilities from the codebook. Backward updates can also be optionally (controlled by another macro) model-based, but is turned off by default. Currently model-based forward updates work about the same as unconstrained updates, but there is a drop in performance with backward-updates being model based. The model based approach also allows the probabilities for the key frames to be adjusted from the defaults based on the base_qindex of the frame. Currently the adjustment function is a placeholder that adjusts the prob of EOB and Zero node from the nominal one at higher quality (lower qindex) or lower quality (higher qindex) ends of the range. The rest of the probabilities are then derived based on the model from the adjusted prob of zero. Change-Id: Iae050f3cbcc6d8b3f204e8dc395ae47b3b2192c9
2013-03-13 19:03:17 +01:00
#endif
}
2010-05-18 17:58:33 +02:00
Modeling default coef probs with distribution Replaces the default tables for single coefficient magnitudes with those obtained from an appropriate distribution. The EOB node is left unchanged. The model is represeted as a 256-size codebook where the index corresponds to the probability of the Zero or the One node. Two variations are implemented corresponding to whether the Zero node or the One-node is used as the peg. The main advantage is that the default prob tables will become considerably smaller and manageable. Besides there is substantially less risk of over-fitting for a training set. Various distributions are tried and the one that gives the best results is the family of Generalized Gaussian distributions with shape parameter 0.75. The results are within about 0.2% of fully trained tables for the Zero peg variant, and within 0.1% of the One peg variant. The forward updates are optionally (controlled by a macro) model-based, i.e. restricted to only convey probabilities from the codebook. Backward updates can also be optionally (controlled by another macro) model-based, but is turned off by default. Currently model-based forward updates work about the same as unconstrained updates, but there is a drop in performance with backward-updates being model based. The model based approach also allows the probabilities for the key frames to be adjusted from the defaults based on the base_qindex of the frame. Currently the adjustment function is a placeholder that adjusts the prob of EOB and Zero node from the nominal one at higher quality (lower qindex) or lower quality (higher qindex) ends of the range. The rest of the probabilities are then derived based on the model from the adjusted prob of zero. Change-Id: Iae050f3cbcc6d8b3f204e8dc395ae47b3b2192c9
2013-03-13 19:03:17 +01:00
#if CONFIG_MODELCOEFPROB
// This is a placeholder function that will enable the default coef probs to
// change for key frames based on the base_qindex. If base_qindex is large,
// we can expect probabilities of zeros to be bigger, and vice versa. The rest
// of the probabilities are derived from the nodel.
void vp9_adjust_default_coef_probs(VP9_COMMON *cm) {
static const int factor_bits = 4;
static const int factor_rnd = 8; // (1 << (factor_bits - 1))
int b, r, c, p;
int factor = (1 << factor_bits);
/*
if (cm->base_qindex < 32)
factor -= ((32 - cm->base_qindex) >> 4);
*/
if (cm->base_qindex > 128)
factor += ((cm->base_qindex - 128) >> 4);
// printf(" Q %d factor %d\n", cm->base_qindex, factor);
for (b = 0; b < BLOCK_TYPES; ++b)
for (r = 0; r < REF_TYPES; ++r)
for (c = 0; c < COEF_BANDS; ++c)
for (p = 0; p < PREV_COEF_CONTEXTS; ++p) {
int t, x;
vp9_prob prob;
for (t = 0; t < UNCONSTRAINED_NODES; t++) {
x = (default_coef_probs_4x4[b][r][c][p][t] * factor + factor_rnd)
>> factor_bits;
prob = (x > 255 ? 255 : (x < 1 ? 1 : x));
cm->fc.coef_probs_4x4[b][r][c][p][t] = prob;
}
vp9_get_model_distribution(
prob, cm->fc.coef_probs_4x4[b][r][c][p], b, r);
for (t = 0; t < UNCONSTRAINED_NODES; t++) {
x = (default_coef_probs_8x8[b][r][c][p][t] * factor + factor_rnd)
>> factor_bits;
prob = (x > 255 ? 255 : (x < 1 ? 1 : x));
cm->fc.coef_probs_8x8[b][r][c][p][t] = prob;
}
vp9_get_model_distribution(
prob, cm->fc.coef_probs_8x8[b][r][c][p], b, r);
for (t = 0; t < UNCONSTRAINED_NODES; t++) {
x = (default_coef_probs_16x16[b][r][c][p][t] * factor + factor_rnd)
>> factor_bits;
prob = (x > 255 ? 255 : (x < 1 ? 1 : x));
cm->fc.coef_probs_16x16[b][r][c][p][t] = prob;
}
vp9_get_model_distribution(
prob, cm->fc.coef_probs_16x16[b][r][c][p], b, r);
for (t = 0; t < UNCONSTRAINED_NODES; t++) {
x = (default_coef_probs_32x32[b][r][c][p][t] * factor + factor_rnd)
>> factor_bits;
prob = (x > 255 ? 255 : (x < 1 ? 1 : x));
cm->fc.coef_probs_32x32[b][r][c][p][t] = prob;
}
vp9_get_model_distribution(
prob, cm->fc.coef_probs_32x32[b][r][c][p], b, r);
}
}
#endif
// Neighborhood 5-tuples for various scans and blocksizes,
// in {top, left, topleft, topright, bottomleft} order
// for each position in raster scan order.
// -1 indicates the neighbor does not exist.
DECLARE_ALIGNED(16, int,
vp9_default_zig_zag1d_4x4_neighbors[16 * MAX_NEIGHBORS]);
DECLARE_ALIGNED(16, int,
vp9_col_scan_4x4_neighbors[16 * MAX_NEIGHBORS]);
DECLARE_ALIGNED(16, int,
vp9_row_scan_4x4_neighbors[16 * MAX_NEIGHBORS]);
DECLARE_ALIGNED(16, int,
vp9_col_scan_8x8_neighbors[64 * MAX_NEIGHBORS]);
DECLARE_ALIGNED(16, int,
vp9_row_scan_8x8_neighbors[64 * MAX_NEIGHBORS]);
DECLARE_ALIGNED(16, int,
vp9_default_zig_zag1d_8x8_neighbors[64 * MAX_NEIGHBORS]);
DECLARE_ALIGNED(16, int,
vp9_col_scan_16x16_neighbors[256 * MAX_NEIGHBORS]);
DECLARE_ALIGNED(16, int,
vp9_row_scan_16x16_neighbors[256 * MAX_NEIGHBORS]);
DECLARE_ALIGNED(16, int,
vp9_default_zig_zag1d_16x16_neighbors[256 * MAX_NEIGHBORS]);
DECLARE_ALIGNED(16, int,
vp9_default_zig_zag1d_32x32_neighbors[1024 * MAX_NEIGHBORS]);
static int find_in_scan(const int *scan, int l, int idx) {
int n, l2 = l * l;
for (n = 0; n < l2; n++) {
int rc = scan[n];
if (rc == idx)
return n;
}
assert(0);
return -1;
}
static void init_scan_neighbors(const int *scan, int l, int *neighbors,
int max_neighbors) {
int l2 = l * l;
int n, i, j;
for (n = 0; n < l2; n++) {
int rc = scan[n];
assert(max_neighbors == MAX_NEIGHBORS);
i = rc / l;
j = rc % l;
if (i > 0 && j > 0) {
// col/row scan is used for adst/dct, and generally means that
// energy decreases to zero much faster in the dimension in
// which ADST is used compared to the direction in which DCT
// is used. Likewise, we find much higher correlation between
// coefficients within the direction in which DCT is used.
// Therefore, if we use ADST/DCT, prefer the DCT neighbor coeff
// as a context. If ADST or DCT is used in both directions, we
// use the combination of the two as a context.
int a = find_in_scan(scan, l, (i - 1) * l + j);
int b = find_in_scan(scan, l, i * l + j - 1);
if (scan == vp9_col_scan_4x4 || scan == vp9_col_scan_8x8 ||
scan == vp9_col_scan_16x16) {
neighbors[max_neighbors * n + 0] = a;
neighbors[max_neighbors * n + 1] = -1;
} else if (scan == vp9_row_scan_4x4 || scan == vp9_row_scan_8x8 ||
scan == vp9_row_scan_16x16) {
neighbors[max_neighbors * n + 0] = b;
neighbors[max_neighbors * n + 1] = -1;
} else {
neighbors[max_neighbors * n + 0] = a;
neighbors[max_neighbors * n + 1] = b;
}
} else if (i > 0) {
neighbors[max_neighbors * n + 0] = find_in_scan(scan, l, (i - 1) * l + j);
neighbors[max_neighbors * n + 1] = -1;
} else if (j > 0) {
neighbors[max_neighbors * n + 0] =
find_in_scan(scan, l, i * l + j - 1);
neighbors[max_neighbors * n + 1] = -1;
} else {
assert(n == 0);
// dc predictor doesn't use previous tokens
neighbors[max_neighbors * n + 0] = -1;
}
assert(neighbors[max_neighbors * n + 0] < n);
}
}
void vp9_init_neighbors() {
init_scan_neighbors(vp9_default_zig_zag1d_4x4, 4,
vp9_default_zig_zag1d_4x4_neighbors, MAX_NEIGHBORS);
init_scan_neighbors(vp9_row_scan_4x4, 4,
vp9_row_scan_4x4_neighbors, MAX_NEIGHBORS);
init_scan_neighbors(vp9_col_scan_4x4, 4,
vp9_col_scan_4x4_neighbors, MAX_NEIGHBORS);
init_scan_neighbors(vp9_default_zig_zag1d_8x8, 8,
vp9_default_zig_zag1d_8x8_neighbors, MAX_NEIGHBORS);
init_scan_neighbors(vp9_row_scan_8x8, 8,
vp9_row_scan_8x8_neighbors, MAX_NEIGHBORS);
init_scan_neighbors(vp9_col_scan_8x8, 8,
vp9_col_scan_8x8_neighbors, MAX_NEIGHBORS);
init_scan_neighbors(vp9_default_zig_zag1d_16x16, 16,
vp9_default_zig_zag1d_16x16_neighbors, MAX_NEIGHBORS);
init_scan_neighbors(vp9_row_scan_16x16, 16,
vp9_row_scan_16x16_neighbors, MAX_NEIGHBORS);
init_scan_neighbors(vp9_col_scan_16x16, 16,
vp9_col_scan_16x16_neighbors, MAX_NEIGHBORS);
init_scan_neighbors(vp9_default_zig_zag1d_32x32, 32,
vp9_default_zig_zag1d_32x32_neighbors, MAX_NEIGHBORS);
}
const int *vp9_get_coef_neighbors_handle(const int *scan, int *pad) {
if (scan == vp9_default_zig_zag1d_4x4) {
*pad = MAX_NEIGHBORS;
return vp9_default_zig_zag1d_4x4_neighbors;
} else if (scan == vp9_row_scan_4x4) {
*pad = MAX_NEIGHBORS;
return vp9_row_scan_4x4_neighbors;
} else if (scan == vp9_col_scan_4x4) {
*pad = MAX_NEIGHBORS;
return vp9_col_scan_4x4_neighbors;
} else if (scan == vp9_default_zig_zag1d_8x8) {
*pad = MAX_NEIGHBORS;
return vp9_default_zig_zag1d_8x8_neighbors;
} else if (scan == vp9_row_scan_8x8) {
*pad = 2;
return vp9_row_scan_8x8_neighbors;
} else if (scan == vp9_col_scan_8x8) {
*pad = 2;
return vp9_col_scan_8x8_neighbors;
} else if (scan == vp9_default_zig_zag1d_16x16) {
*pad = MAX_NEIGHBORS;
return vp9_default_zig_zag1d_16x16_neighbors;
} else if (scan == vp9_row_scan_16x16) {
*pad = 2;
return vp9_row_scan_16x16_neighbors;
} else if (scan == vp9_col_scan_16x16) {
*pad = 2;
return vp9_col_scan_16x16_neighbors;
} else if (scan == vp9_default_zig_zag1d_32x32) {
*pad = MAX_NEIGHBORS;
return vp9_default_zig_zag1d_32x32_neighbors;
} else {
assert(0);
return NULL;
}
}
void vp9_coef_tree_initialize() {
vp9_init_neighbors();
init_bit_trees();
vp9_tokens_from_tree(vp9_coef_encodings, vp9_coef_tree);
#if CONFIG_CODE_NONZEROCOUNT
vp9_tokens_from_tree(vp9_nzc4x4_encodings, vp9_nzc4x4_tree);
vp9_tokens_from_tree(vp9_nzc8x8_encodings, vp9_nzc8x8_tree);
vp9_tokens_from_tree(vp9_nzc16x16_encodings, vp9_nzc16x16_tree);
vp9_tokens_from_tree(vp9_nzc32x32_encodings, vp9_nzc32x32_tree);
#endif
}
#if CONFIG_CODE_NONZEROCOUNT
#define mb_in_cur_tile(cm, mb_row, mb_col) \
((mb_col) >= (cm)->cur_tile_mb_col_start && \
(mb_col) <= (cm)->cur_tile_mb_col_end && \
(mb_row) >= 0)
#define choose_nzc_context(nzc_exp, t2, t1) \
((nzc_exp) >= (t2) ? 2 : (nzc_exp) >= (t1) ? 1 : 0)
#define NZC_T2_32X32 (16 << 6)
#define NZC_T1_32X32 (4 << 6)
#define NZC_T2_16X16 (12 << 6)
#define NZC_T1_16X16 (3 << 6)
#define NZC_T2_8X8 (8 << 6)
#define NZC_T1_8X8 (2 << 6)
#define NZC_T2_4X4 (4 << 6)
#define NZC_T1_4X4 (1 << 6)
// Transforms a mb16 block index to a sb64 block index
static inline int mb16_to_sb64_index(int mb_row, int mb_col, int block) {
int r = (mb_row & 3);
int c = (mb_col & 3);
int b;
if (block < 16) { // Y
int ib = block >> 2;
int jb = block & 3;
ib += r * 4;
jb += c * 4;
b = ib * 16 + jb;
assert(b < 256);
return b;
} else { // UV
int base = block - (block & 3);
int ib = (block - base) >> 1;
int jb = (block - base) & 1;
ib += r * 2;
jb += c * 2;
b = base * 16 + ib * 8 + jb;
assert(b >= 256 && b < 384);
return b;
}
}
// Transforms a mb16 block index to a sb32 block index
static inline int mb16_to_sb32_index(int mb_row, int mb_col, int block) {
int r = (mb_row & 1);
int c = (mb_col & 1);
int b;
if (block < 16) { // Y
int ib = block >> 2;
int jb = block & 3;
ib += r * 4;
jb += c * 4;
b = ib * 8 + jb;
assert(b < 64);
return b;
} else { // UV
int base = block - (block & 3);
int ib = (block - base) >> 1;
int jb = (block - base) & 1;
ib += r * 2;
jb += c * 2;
b = base * 4 + ib * 4 + jb;
assert(b >= 64 && b < 96);
return b;
}
}
static inline int block_to_txfm_index(int block, TX_SIZE tx_size, int s) {
// s is the log of the number of 4x4 blocks in each row/col of larger block
int b, ib, jb, nb;
ib = block >> s;
jb = block - (ib << s);
ib >>= tx_size;
jb >>= tx_size;
nb = 1 << (s - tx_size);
b = (ib * nb + jb) << (2 * tx_size);
return b;
}
/* BEGIN - Helper functions to get the y nzcs */
static unsigned int get_nzc_4x4_y_sb64(MB_MODE_INFO *mi, int block) {
int b;
assert(block < 256);
b = block_to_txfm_index(block, mi->txfm_size, 4);
assert(b < 256);
return mi->nzcs[b] << (6 - 2 * mi->txfm_size);
}
static unsigned int get_nzc_4x4_y_sb32(MB_MODE_INFO *mi, int block) {
int b;
assert(block < 64);
b = block_to_txfm_index(block, mi->txfm_size, 3);
assert(b < 64);
return mi->nzcs[b] << (6 - 2 * mi->txfm_size);
}
static unsigned int get_nzc_4x4_y_mb16(MB_MODE_INFO *mi, int block) {
int b;
assert(block < 16);
b = block_to_txfm_index(block, mi->txfm_size, 2);
assert(b < 16);
return mi->nzcs[b] << (6 - 2 * mi->txfm_size);
}
/* END - Helper functions to get the y nzcs */
/* Function to get y nzc where block index is in mb16 terms */
static unsigned int get_nzc_4x4_y(VP9_COMMON *cm, MODE_INFO *m,
int mb_row, int mb_col, int block) {
// NOTE: All values returned are at 64 times the true value at 4x4 scale
MB_MODE_INFO *const mi = &m->mbmi;
const int mis = cm->mode_info_stride;
if (mi->mb_skip_coeff || !mb_in_cur_tile(cm, mb_row, mb_col))
return 0;
if (mi->sb_type == BLOCK_SIZE_SB64X64) {
int r = mb_row & 3;
int c = mb_col & 3;
m -= c + r * mis;
if (m->mbmi.mb_skip_coeff || !mb_in_cur_tile(cm, mb_row - r, mb_col - c))
return 0;
else
return get_nzc_4x4_y_sb64(
&m->mbmi, mb16_to_sb64_index(mb_row, mb_col, block));
} else if (mi->sb_type == BLOCK_SIZE_SB32X32) {
int r = mb_row & 1;
int c = mb_col & 1;
m -= c + r * mis;
if (m->mbmi.mb_skip_coeff || !mb_in_cur_tile(cm, mb_row - r, mb_col - c))
return 0;
else
return get_nzc_4x4_y_sb32(
&m->mbmi, mb16_to_sb32_index(mb_row, mb_col, block));
} else {
if (m->mbmi.mb_skip_coeff || !mb_in_cur_tile(cm, mb_row, mb_col))
return 0;
return get_nzc_4x4_y_mb16(mi, block);
}
}
/* BEGIN - Helper functions to get the uv nzcs */
static unsigned int get_nzc_4x4_uv_sb64(MB_MODE_INFO *mi, int block) {
int b;
int base, uvtxfm_size;
assert(block >= 256 && block < 384);
uvtxfm_size = mi->txfm_size;
base = 256 + (block & 64);
block -= base;
b = base + block_to_txfm_index(block, uvtxfm_size, 3);
assert(b >= 256 && b < 384);
return mi->nzcs[b] << (6 - 2 * uvtxfm_size);
}
static unsigned int get_nzc_4x4_uv_sb32(MB_MODE_INFO *mi, int block) {
int b;
int base, uvtxfm_size;
assert(block >= 64 && block < 96);
if (mi->txfm_size == TX_32X32)
uvtxfm_size = TX_16X16;
else
uvtxfm_size = mi->txfm_size;
base = 64 + (block & 16);
block -= base;
b = base + block_to_txfm_index(block, uvtxfm_size, 2);
assert(b >= 64 && b < 96);
return mi->nzcs[b] << (6 - 2 * uvtxfm_size);
}
static unsigned int get_nzc_4x4_uv_mb16(MB_MODE_INFO *mi, int block) {
int b;
int base, uvtxfm_size;
assert(block >= 16 && block < 24);
if (mi->txfm_size == TX_8X8 &&
(mi->mode == SPLITMV || mi->mode == I8X8_PRED))
uvtxfm_size = TX_4X4;
else if (mi->txfm_size == TX_16X16)
uvtxfm_size = TX_8X8;
else
uvtxfm_size = mi->txfm_size;
base = 16 + (block & 4);
block -= base;
b = base + block_to_txfm_index(block, uvtxfm_size, 1);
assert(b >= 16 && b < 24);
return mi->nzcs[b] << (6 - 2 * uvtxfm_size);
}
/* END - Helper functions to get the uv nzcs */
/* Function to get uv nzc where block index is in mb16 terms */
static unsigned int get_nzc_4x4_uv(VP9_COMMON *cm, MODE_INFO *m,
int mb_row, int mb_col, int block) {
// NOTE: All values returned are at 64 times the true value at 4x4 scale
MB_MODE_INFO *const mi = &m->mbmi;
const int mis = cm->mode_info_stride;
if (mi->mb_skip_coeff || !mb_in_cur_tile(cm, mb_row, mb_col))
return 0;
if (mi->sb_type == BLOCK_SIZE_SB64X64) {
int r = mb_row & 3;
int c = mb_col & 3;
m -= c + r * mis;
if (m->mbmi.mb_skip_coeff || !mb_in_cur_tile(cm, mb_row - r, mb_col - c))
return 0;
else
return get_nzc_4x4_uv_sb64(
&m->mbmi, mb16_to_sb64_index(mb_row, mb_col, block));
} else if (mi->sb_type == BLOCK_SIZE_SB32X32) {
int r = mb_row & 1;
int c = mb_col & 1;
m -= c + r * mis;
if (m->mbmi.mb_skip_coeff || !mb_in_cur_tile(cm, mb_row - r, mb_col - c))
return 0;
else
return get_nzc_4x4_uv_sb32(
&m->mbmi, mb16_to_sb32_index(mb_row, mb_col, block));
} else {
return get_nzc_4x4_uv_mb16(mi, block);
}
}
int vp9_get_nzc_context_y_sb64(VP9_COMMON *cm, MODE_INFO *cur,
int mb_row, int mb_col, int block) {
// returns an index in [0, MAX_NZC_CONTEXTS - 1] to reflect how busy
// neighboring blocks are
int mis = cm->mode_info_stride;
int nzc_exp = 0;
TX_SIZE txfm_size = cur->mbmi.txfm_size;
assert(block < 256);
switch (txfm_size) {
case TX_32X32:
assert((block & 63) == 0);
if (block < 128) {
int o = (block >> 6) * 2;
nzc_exp =
get_nzc_4x4_y(cm, cur - mis + o, mb_row - 1, mb_col + o, 12) +
get_nzc_4x4_y(cm, cur - mis + o, mb_row - 1, mb_col + o, 13) +
get_nzc_4x4_y(cm, cur - mis + o, mb_row - 1, mb_col + o, 14) +
get_nzc_4x4_y(cm, cur - mis + o, mb_row - 1, mb_col + o, 15) +
get_nzc_4x4_y(cm, cur - mis + o + 1,
mb_row - 1, mb_col + o + 1, 12) +
get_nzc_4x4_y(cm, cur - mis + o + 1,
mb_row - 1, mb_col + o + 1, 13) +
get_nzc_4x4_y(cm, cur - mis + o + 1,
mb_row - 1, mb_col + o + 1, 14) +
get_nzc_4x4_y(cm, cur - mis + o + 1,
mb_row - 1, mb_col + o + 1, 15);
} else {
nzc_exp = cur->mbmi.nzcs[block - 128] << 3;
}
if ((block & 127) == 0) {
int o = (block >> 7) * 2;
nzc_exp +=
get_nzc_4x4_y(cm, cur - 1 + o * mis, mb_row + o, mb_col - 1, 3) +
get_nzc_4x4_y(cm, cur - 1 + o * mis, mb_row + o, mb_col - 1, 7) +
get_nzc_4x4_y(cm, cur - 1 + o * mis, mb_row + o, mb_col - 1, 11) +
get_nzc_4x4_y(cm, cur - 1 + o * mis, mb_row + o, mb_col - 1, 15) +
get_nzc_4x4_y(cm, cur - 1 + o * mis + mis,
mb_row + o + 1, mb_col - 1, 3) +
get_nzc_4x4_y(cm, cur - 1 + o * mis + mis,
mb_row + o + 1, mb_col - 1, 7) +
get_nzc_4x4_y(cm, cur - 1 + o * mis + mis,
mb_row + o + 1, mb_col - 1, 11) +
get_nzc_4x4_y(cm, cur - 1 + o * mis + mis,
mb_row + o + 1, mb_col - 1, 15);
} else {
nzc_exp += cur->mbmi.nzcs[block - 64] << 3;
}
nzc_exp <<= 2;
// Note nzc_exp is 64 times the average value expected at 32x32 scale
return choose_nzc_context(nzc_exp, NZC_T2_32X32, NZC_T1_32X32);
break;
case TX_16X16:
assert((block & 15) == 0);
if (block < 64) {
int o = block >> 4;
nzc_exp =
get_nzc_4x4_y(cm, cur - mis + o, mb_row - 1, mb_col + o, 12) +
get_nzc_4x4_y(cm, cur - mis + o, mb_row - 1, mb_col + o, 13) +
get_nzc_4x4_y(cm, cur - mis + o, mb_row - 1, mb_col + o, 14) +
get_nzc_4x4_y(cm, cur - mis + o, mb_row - 1, mb_col + o, 15);
} else {
nzc_exp = cur->mbmi.nzcs[block - 64] << 4;
}
if ((block & 63) == 0) {
int o = block >> 6;
nzc_exp +=
get_nzc_4x4_y(cm, cur - 1 + o * mis, mb_row + o, mb_col - 1, 3) +
get_nzc_4x4_y(cm, cur - 1 + o * mis, mb_row + o, mb_col - 1, 7) +
get_nzc_4x4_y(cm, cur - 1 + o * mis, mb_row + o, mb_col - 1, 11) +
get_nzc_4x4_y(cm, cur - 1 + o * mis, mb_row + o, mb_col - 1, 15);
} else {
nzc_exp += cur->mbmi.nzcs[block - 16] << 4;
}
nzc_exp <<= 1;
// Note nzc_exp is 64 times the average value expected at 16x16 scale
return choose_nzc_context(nzc_exp, NZC_T2_16X16, NZC_T1_16X16);
break;
case TX_8X8:
assert((block & 3) == 0);
if (block < 32) {
int o = block >> 3;
int p = ((block >> 2) & 1) ? 14 : 12;
nzc_exp =
get_nzc_4x4_y(cm, cur - mis + o, mb_row - 1, mb_col + o, p) +
get_nzc_4x4_y(cm, cur - mis + o, mb_row - 1, mb_col + o, p + 1);
} else {
nzc_exp = cur->mbmi.nzcs[block - 32] << 5;
}
if ((block & 31) == 0) {
int o = block >> 6;
int p = ((block >> 5) & 1) ? 11 : 3;
nzc_exp +=
get_nzc_4x4_y(cm, cur - 1 + o * mis, mb_row + o, mb_col - 1, p) +
get_nzc_4x4_y(cm, cur - 1 + o * mis, mb_row + o, mb_col - 1, p + 4);
} else {
nzc_exp += cur->mbmi.nzcs[block - 4] << 5;
}
// Note nzc_exp is 64 times the average value expected at 8x8 scale
return choose_nzc_context(nzc_exp, NZC_T2_8X8, NZC_T1_8X8);
break;
case TX_4X4:
if (block < 16) {
int o = block >> 2;
int p = block & 3;
nzc_exp = get_nzc_4x4_y(cm, cur - mis + o, mb_row - 1, mb_col + o,
12 + p);
} else {
nzc_exp = (cur->mbmi.nzcs[block - 16] << 6);
}
if ((block & 15) == 0) {
int o = block >> 6;
int p = (block >> 4) & 3;
nzc_exp += get_nzc_4x4_y(cm, cur - 1 + o * mis, mb_row + o, mb_col - 1,
3 + 4 * p);
} else {
nzc_exp += (cur->mbmi.nzcs[block - 1] << 6);
}
nzc_exp >>= 1;
// Note nzc_exp is 64 times the average value expected at 4x4 scale
return choose_nzc_context(nzc_exp, NZC_T2_4X4, NZC_T1_4X4);
break;
default:
return 0;
}
}
int vp9_get_nzc_context_y_sb32(VP9_COMMON *cm, MODE_INFO *cur,
int mb_row, int mb_col, int block) {
// returns an index in [0, MAX_NZC_CONTEXTS - 1] to reflect how busy
// neighboring blocks are
int mis = cm->mode_info_stride;
int nzc_exp = 0;
TX_SIZE txfm_size = cur->mbmi.txfm_size;
assert(block < 64);
switch (txfm_size) {
case TX_32X32:
assert(block == 0);
nzc_exp =
(get_nzc_4x4_y(cm, cur - mis, mb_row - 1, mb_col, 12) +
get_nzc_4x4_y(cm, cur - mis, mb_row - 1, mb_col, 13) +
get_nzc_4x4_y(cm, cur - mis, mb_row - 1, mb_col, 14) +
get_nzc_4x4_y(cm, cur - mis, mb_row - 1, mb_col, 15) +
get_nzc_4x4_y(cm, cur - mis + 1, mb_row - 1, mb_col + 1, 12) +
get_nzc_4x4_y(cm, cur - mis + 1, mb_row - 1, mb_col + 1, 13) +
get_nzc_4x4_y(cm, cur - mis + 1, mb_row - 1, mb_col + 1, 14) +
get_nzc_4x4_y(cm, cur - mis + 1, mb_row - 1, mb_col + 1, 15) +
get_nzc_4x4_y(cm, cur - 1, mb_row, mb_col - 1, 3) +
get_nzc_4x4_y(cm, cur - 1, mb_row, mb_col - 1, 7) +
get_nzc_4x4_y(cm, cur - 1, mb_row, mb_col - 1, 11) +
get_nzc_4x4_y(cm, cur - 1, mb_row, mb_col - 1, 15) +
get_nzc_4x4_y(cm, cur - 1 + mis, mb_row + 1, mb_col - 1, 3) +
get_nzc_4x4_y(cm, cur - 1 + mis, mb_row + 1, mb_col - 1, 7) +
get_nzc_4x4_y(cm, cur - 1 + mis, mb_row + 1, mb_col - 1, 11) +
get_nzc_4x4_y(cm, cur - 1 + mis, mb_row + 1, mb_col - 1, 15)) << 2;
// Note nzc_exp is 64 times the average value expected at 32x32 scale
return choose_nzc_context(nzc_exp, NZC_T2_32X32, NZC_T1_32X32);
break;
case TX_16X16:
assert((block & 15) == 0);
if (block < 32) {
int o = (block >> 4) & 1;
nzc_exp =
get_nzc_4x4_y(cm, cur - mis + o, mb_row - 1, mb_col + o, 12) +
get_nzc_4x4_y(cm, cur - mis + o, mb_row - 1, mb_col + o, 13) +
get_nzc_4x4_y(cm, cur - mis + o, mb_row - 1, mb_col + o, 14) +
get_nzc_4x4_y(cm, cur - mis + o, mb_row - 1, mb_col + o, 15);
} else {
nzc_exp = cur->mbmi.nzcs[block - 32] << 4;
}
if ((block & 31) == 0) {
int o = block >> 5;
nzc_exp +=
get_nzc_4x4_y(cm, cur - 1 + o * mis, mb_row + o, mb_col - 1, 3) +
get_nzc_4x4_y(cm, cur - 1 + o * mis, mb_row + o, mb_col - 1, 7) +
get_nzc_4x4_y(cm, cur - 1 + o * mis, mb_row + o, mb_col - 1, 11) +
get_nzc_4x4_y(cm, cur - 1 + o * mis, mb_row + o, mb_col - 1, 15);
} else {
nzc_exp += cur->mbmi.nzcs[block - 16] << 4;
}
nzc_exp <<= 1;
// Note nzc_exp is 64 times the average value expected at 16x16 scale
return choose_nzc_context(nzc_exp, NZC_T2_16X16, NZC_T1_16X16);
break;
case TX_8X8:
assert((block & 3) == 0);
if (block < 16) {
int o = block >> 3;
int p = ((block >> 2) & 1) ? 14 : 12;
nzc_exp =
get_nzc_4x4_y(cm, cur - mis + o, mb_row - 1, mb_col + o, p) +
get_nzc_4x4_y(cm, cur - mis + o, mb_row - 1, mb_col + o, p + 1);
} else {
nzc_exp = cur->mbmi.nzcs[block - 16] << 5;
}
if ((block & 15) == 0) {
int o = block >> 5;
int p = ((block >> 4) & 1) ? 11 : 3;
nzc_exp +=
get_nzc_4x4_y(cm, cur - 1 + o * mis, mb_row + o, mb_col - 1, p) +
get_nzc_4x4_y(cm, cur - 1 + o * mis, mb_row + o, mb_col - 1, p + 4);
} else {
nzc_exp += cur->mbmi.nzcs[block - 4] << 5;
}
// Note nzc_exp is 64 times the average value expected at 8x8 scale
return choose_nzc_context(nzc_exp, NZC_T2_8X8, NZC_T1_8X8);
break;
case TX_4X4:
if (block < 8) {
int o = block >> 2;
int p = block & 3;
nzc_exp = get_nzc_4x4_y(cm, cur - mis + o, mb_row - 1, mb_col + o,
12 + p);
} else {
nzc_exp = (cur->mbmi.nzcs[block - 8] << 6);
}
if ((block & 7) == 0) {
int o = block >> 5;
int p = (block >> 3) & 3;
nzc_exp += get_nzc_4x4_y(cm, cur - 1 + o * mis, mb_row + o, mb_col - 1,
3 + 4 * p);
} else {
nzc_exp += (cur->mbmi.nzcs[block - 1] << 6);
}
nzc_exp >>= 1;
// Note nzc_exp is 64 times the average value expected at 4x4 scale
return choose_nzc_context(nzc_exp, NZC_T2_4X4, NZC_T1_4X4);
break;
default:
return 0;
break;
}
}
int vp9_get_nzc_context_y_mb16(VP9_COMMON *cm, MODE_INFO *cur,
int mb_row, int mb_col, int block) {
// returns an index in [0, MAX_NZC_CONTEXTS - 1] to reflect how busy
// neighboring blocks are
int mis = cm->mode_info_stride;
int nzc_exp = 0;
TX_SIZE txfm_size = cur->mbmi.txfm_size;
assert(block < 16);
switch (txfm_size) {
case TX_16X16:
assert(block == 0);
nzc_exp =
get_nzc_4x4_y(cm, cur - mis, mb_row - 1, mb_col, 12) +
get_nzc_4x4_y(cm, cur - mis, mb_row - 1, mb_col, 13) +
get_nzc_4x4_y(cm, cur - mis, mb_row - 1, mb_col, 14) +
get_nzc_4x4_y(cm, cur - mis, mb_row - 1, mb_col, 15) +
get_nzc_4x4_y(cm, cur - 1, mb_row, mb_col - 1, 3) +
get_nzc_4x4_y(cm, cur - 1, mb_row, mb_col - 1, 7) +
get_nzc_4x4_y(cm, cur - 1, mb_row, mb_col - 1, 11) +
get_nzc_4x4_y(cm, cur - 1, mb_row, mb_col - 1, 15);
nzc_exp <<= 1;
// Note nzc_exp is 64 times the average value expected at 16x16 scale
return choose_nzc_context(nzc_exp, NZC_T2_16X16, NZC_T1_16X16);
case TX_8X8:
assert((block & 3) == 0);
if (block < 8) {
int p = ((block >> 2) & 1) ? 14 : 12;
nzc_exp =
get_nzc_4x4_y(cm, cur - mis, mb_row - 1, mb_col, p) +
get_nzc_4x4_y(cm, cur - mis, mb_row - 1, mb_col, p + 1);
} else {
nzc_exp = cur->mbmi.nzcs[block - 8] << 5;
}
if ((block & 7) == 0) {
int p = ((block >> 3) & 1) ? 11 : 3;
nzc_exp +=
get_nzc_4x4_y(cm, cur - 1, mb_row, mb_col - 1, p) +
get_nzc_4x4_y(cm, cur - 1, mb_row, mb_col - 1, p + 4);
} else {
nzc_exp += cur->mbmi.nzcs[block - 4] << 5;
}
// Note nzc_exp is 64 times the average value expected at 8x8 scale
return choose_nzc_context(nzc_exp, NZC_T2_8X8, NZC_T1_8X8);
case TX_4X4:
if (block < 4) {
int p = block & 3;
nzc_exp = get_nzc_4x4_y(cm, cur - mis, mb_row - 1, mb_col,
12 + p);
} else {
nzc_exp = (cur->mbmi.nzcs[block - 4] << 6);
}
if ((block & 3) == 0) {
int p = (block >> 2) & 3;
nzc_exp += get_nzc_4x4_y(cm, cur - 1, mb_row, mb_col - 1,
3 + 4 * p);
} else {
nzc_exp += (cur->mbmi.nzcs[block - 1] << 6);
}
nzc_exp >>= 1;
// Note nzc_exp is 64 times the average value expected at 4x4 scale
return choose_nzc_context(nzc_exp, NZC_T2_4X4, NZC_T1_4X4);
default:
return 0;
break;
}
}
int vp9_get_nzc_context_uv_sb64(VP9_COMMON *cm, MODE_INFO *cur,
int mb_row, int mb_col, int block) {
// returns an index in [0, MAX_NZC_CONTEXTS - 1] to reflect how busy
// neighboring blocks are
int mis = cm->mode_info_stride;
int nzc_exp = 0;
const int base = block - (block & 63);
const int boff = (block & 63);
const int base_mb16 = base >> 4;
TX_SIZE txfm_size = cur->mbmi.txfm_size;
TX_SIZE txfm_size_uv;
assert(block >= 256 && block < 384);
txfm_size_uv = txfm_size;
switch (txfm_size_uv) {
case TX_32X32:
assert(block == 256 || block == 320);
nzc_exp =
get_nzc_4x4_uv(cm, cur - mis, mb_row - 1, mb_col,
base_mb16 + 2) +
get_nzc_4x4_uv(cm, cur - mis, mb_row - 1, mb_col,
base_mb16 + 3) +
get_nzc_4x4_uv(cm, cur - mis + 1, mb_row - 1, mb_col + 1,
base_mb16 + 2) +
get_nzc_4x4_uv(cm, cur - mis + 1, mb_row - 1, mb_col + 1,
base_mb16 + 3) +
get_nzc_4x4_uv(cm, cur - mis + 2, mb_row - 1, mb_col + 2,
base_mb16 + 2) +
get_nzc_4x4_uv(cm, cur - mis + 2, mb_row - 1, mb_col + 2,
base_mb16 + 3) +
get_nzc_4x4_uv(cm, cur - mis + 3, mb_row - 1, mb_col + 3,
base_mb16 + 2) +
get_nzc_4x4_uv(cm, cur - mis + 3, mb_row - 1, mb_col + 3,
base_mb16 + 3) +
get_nzc_4x4_uv(cm, cur - 1, mb_row, mb_col - 1,
base_mb16 + 1) +
get_nzc_4x4_uv(cm, cur - 1, mb_row, mb_col - 1,
base_mb16 + 3) +
get_nzc_4x4_uv(cm, cur - 1 + mis, mb_row + 1, mb_col - 1,
base_mb16 + 1) +
get_nzc_4x4_uv(cm, cur - 1 + mis, mb_row + 1, mb_col - 1,
base_mb16 + 3) +
get_nzc_4x4_uv(cm, cur - 1 + 2 * mis, mb_row + 2, mb_col - 1,
base_mb16 + 1) +
get_nzc_4x4_uv(cm, cur - 1 + 2 * mis, mb_row + 2, mb_col - 1,
base_mb16 + 3) +
get_nzc_4x4_uv(cm, cur - 1 + 3 * mis, mb_row + 3, mb_col - 1,
base_mb16 + 1) +
get_nzc_4x4_uv(cm, cur - 1 + 3 * mis, mb_row + 3, mb_col - 1,
base_mb16 + 3);
nzc_exp <<= 2;
// Note nzc_exp is 64 times the average value expected at 32x32 scale
return choose_nzc_context(nzc_exp, NZC_T2_32X32, NZC_T1_32X32);
case TX_16X16:
// uv txfm_size 16x16
assert((block & 15) == 0);
if (boff < 32) {
int o = (boff >> 4) & 1;
nzc_exp =
get_nzc_4x4_uv(cm, cur - mis + o, mb_row - 1, mb_col + o,
base_mb16 + 2) +
get_nzc_4x4_uv(cm, cur - mis + o, mb_row - 1, mb_col + o,
base_mb16 + 3) +
get_nzc_4x4_uv(cm, cur - mis + o + 1, mb_row - 1, mb_col + o + 1,
base_mb16 + 2) +
get_nzc_4x4_uv(cm, cur - mis + o + 1, mb_row - 1, mb_col + o + 1,
base_mb16 + 3);
} else {
nzc_exp = cur->mbmi.nzcs[block - 32] << 4;
}
if ((boff & 31) == 0) {
int o = boff >> 5;
nzc_exp +=
get_nzc_4x4_uv(cm, cur - 1 + o * mis,
mb_row + o, mb_col - 1, base_mb16 + 1) +
get_nzc_4x4_uv(cm, cur - 1 + o * mis,
mb_row + o, mb_col - 1, base_mb16 + 3) +
get_nzc_4x4_uv(cm, cur - 1 + o * mis + mis,
mb_row + o + 1, mb_col - 1, base_mb16 + 1) +
get_nzc_4x4_uv(cm, cur - 1 + o * mis + mis,
mb_row + o + 1, mb_col - 1, base_mb16 + 3);
} else {
nzc_exp += cur->mbmi.nzcs[block - 16] << 4;
}
nzc_exp <<= 1;
// Note nzc_exp is 64 times the average value expected at 16x16 scale
return choose_nzc_context(nzc_exp, NZC_T2_16X16, NZC_T1_16X16);
case TX_8X8:
assert((block & 3) == 0);
if (boff < 16) {
int o = boff >> 2;
nzc_exp =
get_nzc_4x4_uv(cm, cur - mis + o, mb_row - 1, mb_col + o,
base_mb16 + 2) +
get_nzc_4x4_uv(cm, cur - mis + o, mb_row - 1, mb_col + o,
base_mb16 + 3);
} else {
nzc_exp = cur->mbmi.nzcs[block - 16] << 5;
}
if ((boff & 15) == 0) {
int o = boff >> 4;
nzc_exp +=
get_nzc_4x4_uv(cm, cur - 1 + o * mis, mb_row + o, mb_col - 1,
base_mb16 + 1) +
get_nzc_4x4_uv(cm, cur - 1 + o * mis, mb_row + o, mb_col - 1,
base_mb16 + 3);
} else {
nzc_exp += cur->mbmi.nzcs[block - 4] << 5;
}
// Note nzc_exp is 64 times the average value expected at 8x8 scale
return choose_nzc_context(nzc_exp, NZC_T2_8X8, NZC_T1_8X8);
case TX_4X4:
if (boff < 8) {
int o = boff >> 1;
int p = boff & 1;
nzc_exp = get_nzc_4x4_uv(cm, cur - mis + o, mb_row - 1, mb_col + o,
base_mb16 + 2 + p);
} else {
nzc_exp = (cur->mbmi.nzcs[block - 8] << 6);
}
if ((boff & 7) == 0) {
int o = boff >> 4;
int p = (boff >> 3) & 1;
nzc_exp += get_nzc_4x4_uv(cm, cur - 1 + o * mis, mb_row + o, mb_col - 1,
base_mb16 + 1 + 2 * p);
} else {
nzc_exp += (cur->mbmi.nzcs[block - 1] << 6);
}
nzc_exp >>= 1;
// Note nzc_exp is 64 times the average value expected at 4x4 scale
return choose_nzc_context(nzc_exp, NZC_T2_4X4, NZC_T1_4X4);
default:
return 0;
}
}
int vp9_get_nzc_context_uv_sb32(VP9_COMMON *cm, MODE_INFO *cur,
int mb_row, int mb_col, int block) {
// returns an index in [0, MAX_NZC_CONTEXTS - 1] to reflect how busy
// neighboring blocks are
int mis = cm->mode_info_stride;
int nzc_exp = 0;
const int base = block - (block & 15);
const int boff = (block & 15);
const int base_mb16 = base >> 2;
TX_SIZE txfm_size = cur->mbmi.txfm_size;
TX_SIZE txfm_size_uv;
assert(block >= 64 && block < 96);
if (txfm_size == TX_32X32)
txfm_size_uv = TX_16X16;
else
txfm_size_uv = txfm_size;
switch (txfm_size_uv) {
case TX_16X16:
// uv txfm_size 16x16
assert(block == 64 || block == 80);
nzc_exp =
get_nzc_4x4_uv(cm, cur - mis, mb_row - 1, mb_col,
base_mb16 + 2) +
get_nzc_4x4_uv(cm, cur - mis, mb_row - 1, mb_col,
base_mb16 + 3) +
get_nzc_4x4_uv(cm, cur - mis + 1, mb_row - 1, mb_col + 1,
base_mb16 + 2) +
get_nzc_4x4_uv(cm, cur - mis + 1, mb_row - 1, mb_col + 1,
base_mb16 + 3) +
get_nzc_4x4_uv(cm, cur - 1 + mis, mb_row, mb_col - 1,
base_mb16 + 1) +
get_nzc_4x4_uv(cm, cur - 1 + mis, mb_row, mb_col - 1,
base_mb16 + 3) +
get_nzc_4x4_uv(cm, cur - 1 + mis, mb_row + 1, mb_col - 1,
base_mb16 + 1) +
get_nzc_4x4_uv(cm, cur - 1 + mis, mb_row + 1, mb_col - 1,
base_mb16 + 3);
nzc_exp <<= 1;
// Note nzc_exp is 64 times the average value expected at 16x16 scale
return choose_nzc_context(nzc_exp, NZC_T2_16X16, NZC_T1_16X16);
break;
case TX_8X8:
assert((block & 3) == 0);
if (boff < 8) {
int o = boff >> 2;
nzc_exp =
get_nzc_4x4_uv(cm, cur - mis + o, mb_row - 1, mb_col + o,
base_mb16 + 2) +
get_nzc_4x4_uv(cm, cur - mis + o, mb_row - 1, mb_col + o,
base_mb16 + 3);
} else {
nzc_exp = cur->mbmi.nzcs[block - 8] << 5;
}
if ((boff & 7) == 0) {
int o = boff >> 3;
nzc_exp +=
get_nzc_4x4_uv(cm, cur - 1 + o * mis, mb_row + o, mb_col - 1,
base_mb16 + 1) +
get_nzc_4x4_uv(cm, cur - 1 + o * mis, mb_row + o, mb_col - 1,
base_mb16 + 3);
} else {
nzc_exp += cur->mbmi.nzcs[block - 4] << 5;
}
// Note nzc_exp is 64 times the average value expected at 8x8 scale
return choose_nzc_context(nzc_exp, NZC_T2_8X8, NZC_T1_8X8);
case TX_4X4:
if (boff < 4) {
int o = boff >> 1;
int p = boff & 1;
nzc_exp = get_nzc_4x4_uv(cm, cur - mis + o, mb_row - 1, mb_col + o,
base_mb16 + 2 + p);
} else {
nzc_exp = (cur->mbmi.nzcs[block - 4] << 6);
}
if ((boff & 3) == 0) {
int o = boff >> 3;
int p = (boff >> 2) & 1;
nzc_exp += get_nzc_4x4_uv(cm, cur - 1 + o * mis, mb_row + o, mb_col - 1,
base_mb16 + 1 + 2 * p);
} else {
nzc_exp += (cur->mbmi.nzcs[block - 1] << 6);
}
nzc_exp >>= 1;
// Note nzc_exp is 64 times the average value expected at 4x4 scale
return choose_nzc_context(nzc_exp, NZC_T2_4X4, NZC_T1_4X4);
default:
return 0;
}
}
int vp9_get_nzc_context_uv_mb16(VP9_COMMON *cm, MODE_INFO *cur,
int mb_row, int mb_col, int block) {
// returns an index in [0, MAX_NZC_CONTEXTS - 1] to reflect how busy
// neighboring blocks are
int mis = cm->mode_info_stride;
int nzc_exp = 0;
const int base = block - (block & 3);
const int boff = (block & 3);
const int base_mb16 = base;
TX_SIZE txfm_size = cur->mbmi.txfm_size;
TX_SIZE txfm_size_uv;
assert(block >= 16 && block < 24);
if (txfm_size == TX_16X16)
txfm_size_uv = TX_8X8;
else if (txfm_size == TX_8X8 &&
(cur->mbmi.mode == I8X8_PRED || cur->mbmi.mode == SPLITMV))
txfm_size_uv = TX_4X4;
else
txfm_size_uv = txfm_size;
switch (txfm_size_uv) {
case TX_8X8:
assert((block & 3) == 0);
nzc_exp =
get_nzc_4x4_uv(cm, cur - mis, mb_row - 1, mb_col, base_mb16 + 2) +
get_nzc_4x4_uv(cm, cur - mis, mb_row - 1, mb_col, base_mb16 + 3) +
get_nzc_4x4_uv(cm, cur - 1, mb_row, mb_col - 1, base_mb16 + 1) +
get_nzc_4x4_uv(cm, cur - 1, mb_row, mb_col - 1, base_mb16 + 3);
// Note nzc_exp is 64 times the average value expected at 8x8 scale
return choose_nzc_context(nzc_exp, NZC_T2_8X8, NZC_T1_8X8);
case TX_4X4:
if (boff < 2) {
int p = boff & 1;
nzc_exp = get_nzc_4x4_uv(cm, cur - mis, mb_row - 1, mb_col,
base_mb16 + 2 + p);
} else {
nzc_exp = (cur->mbmi.nzcs[block - 2] << 6);
}
if ((boff & 1) == 0) {
int p = (boff >> 1) & 1;
nzc_exp += get_nzc_4x4_uv(cm, cur - 1, mb_row, mb_col - 1,
base_mb16 + 1 + 2 * p);
} else {
nzc_exp += (cur->mbmi.nzcs[block - 1] << 6);
}
nzc_exp >>= 1;
// Note nzc_exp is 64 times the average value expected at 4x4 scale
return choose_nzc_context(nzc_exp, NZC_T2_4X4, NZC_T1_4X4);
default:
return 0;
}
}
int vp9_get_nzc_context(VP9_COMMON *cm, MACROBLOCKD *xd, int block) {
if (xd->mode_info_context->mbmi.sb_type == BLOCK_SIZE_SB64X64) {
assert(block < 384);
if (block < 256)
return vp9_get_nzc_context_y_sb64(cm, xd->mode_info_context,
get_mb_row(xd), get_mb_col(xd), block);
else
return vp9_get_nzc_context_uv_sb64(cm, xd->mode_info_context,
get_mb_row(xd), get_mb_col(xd), block);
} else if (xd->mode_info_context->mbmi.sb_type == BLOCK_SIZE_SB32X32) {
assert(block < 96);
if (block < 64)
return vp9_get_nzc_context_y_sb32(cm, xd->mode_info_context,
get_mb_row(xd), get_mb_col(xd), block);
else
return vp9_get_nzc_context_uv_sb32(cm, xd->mode_info_context,
get_mb_row(xd), get_mb_col(xd), block);
} else {
assert(block < 64);
if (block < 16)
return vp9_get_nzc_context_y_mb16(cm, xd->mode_info_context,
get_mb_row(xd), get_mb_col(xd), block);
else
return vp9_get_nzc_context_uv_mb16(cm, xd->mode_info_context,
get_mb_row(xd), get_mb_col(xd), block);
}
}
static void update_nzc(VP9_COMMON *cm,
uint16_t nzc,
int nzc_context,
TX_SIZE tx_size,
int ref,
int type) {
int e, c;
if (!get_nzc_used(tx_size)) return;
c = codenzc(nzc);
if (tx_size == TX_32X32)
cm->fc.nzc_counts_32x32[nzc_context][ref][type][c]++;
else if (tx_size == TX_16X16)
cm->fc.nzc_counts_16x16[nzc_context][ref][type][c]++;
else if (tx_size == TX_8X8)
cm->fc.nzc_counts_8x8[nzc_context][ref][type][c]++;
else if (tx_size == TX_4X4)
cm->fc.nzc_counts_4x4[nzc_context][ref][type][c]++;
else
assert(0);
if ((e = vp9_extranzcbits[c])) {
int x = nzc - vp9_basenzcvalue[c];
while (e--) {
int b = (x >> e) & 1;
cm->fc.nzc_pcat_counts[nzc_context][c - NZC_TOKENS_NOEXTRA][e][b]++;
}
}
}
static void update_nzcs_sb64(VP9_COMMON *cm,
MACROBLOCKD *xd,
int mb_row,
int mb_col) {
MODE_INFO *m = xd->mode_info_context;
MB_MODE_INFO *const mi = &m->mbmi;
int j, nzc_context;
const int ref = m->mbmi.ref_frame != INTRA_FRAME;
assert(mb_col == get_mb_col(xd));
assert(mb_row == get_mb_row(xd));
if (mi->mb_skip_coeff)
return;
switch (mi->txfm_size) {
case TX_32X32:
for (j = 0; j < 256; j += 64) {
nzc_context = vp9_get_nzc_context_y_sb64(cm, m, mb_row, mb_col, j);
update_nzc(cm, m->mbmi.nzcs[j], nzc_context, TX_32X32, ref, 0);
}
for (j = 256; j < 384; j += 64) {
nzc_context = vp9_get_nzc_context_uv_sb64(cm, m, mb_row, mb_col, j);
update_nzc(cm, m->mbmi.nzcs[j], nzc_context, TX_32X32, ref, 1);
}
break;
case TX_16X16:
for (j = 0; j < 256; j += 16) {
nzc_context = vp9_get_nzc_context_y_sb64(cm, m, mb_row, mb_col, j);
update_nzc(cm, m->mbmi.nzcs[j], nzc_context, TX_16X16, ref, 0);
}
for (j = 256; j < 384; j += 16) {
nzc_context = vp9_get_nzc_context_uv_sb64(cm, m, mb_row, mb_col, j);
update_nzc(cm, m->mbmi.nzcs[j], nzc_context, TX_16X16, ref, 1);
}
break;
case TX_8X8:
for (j = 0; j < 256; j += 4) {
nzc_context = vp9_get_nzc_context_y_sb64(cm, m, mb_row, mb_col, j);
update_nzc(cm, m->mbmi.nzcs[j], nzc_context, TX_8X8, ref, 0);
}
for (j = 256; j < 384; j += 4) {
nzc_context = vp9_get_nzc_context_uv_sb64(cm, m, mb_row, mb_col, j);
update_nzc(cm, m->mbmi.nzcs[j], nzc_context, TX_8X8, ref, 1);
}
break;
case TX_4X4:
for (j = 0; j < 256; ++j) {
nzc_context = vp9_get_nzc_context_y_sb64(cm, m, mb_row, mb_col, j);
update_nzc(cm, m->mbmi.nzcs[j], nzc_context, TX_4X4, ref, 0);
}
for (j = 256; j < 384; ++j) {
nzc_context = vp9_get_nzc_context_uv_sb64(cm, m, mb_row, mb_col, j);
update_nzc(cm, m->mbmi.nzcs[j], nzc_context, TX_4X4, ref, 1);
}
break;
default:
break;
}
}
static void update_nzcs_sb32(VP9_COMMON *cm,
MACROBLOCKD *xd,
int mb_row,
int mb_col) {
MODE_INFO *m = xd->mode_info_context;
MB_MODE_INFO *const mi = &m->mbmi;
int j, nzc_context;
const int ref = m->mbmi.ref_frame != INTRA_FRAME;
assert(mb_col == get_mb_col(xd));
assert(mb_row == get_mb_row(xd));
if (mi->mb_skip_coeff)
return;
switch (mi->txfm_size) {
case TX_32X32:
for (j = 0; j < 64; j += 64) {
nzc_context = vp9_get_nzc_context_y_sb32(cm, m, mb_row, mb_col, j);
update_nzc(cm, m->mbmi.nzcs[j], nzc_context, TX_32X32, ref, 0);
}
for (j = 64; j < 96; j += 16) {
nzc_context = vp9_get_nzc_context_uv_sb32(cm, m, mb_row, mb_col, j);
update_nzc(cm, m->mbmi.nzcs[j], nzc_context, TX_16X16, ref, 1);
}
break;
case TX_16X16:
for (j = 0; j < 64; j += 16) {
nzc_context = vp9_get_nzc_context_y_sb32(cm, m, mb_row, mb_col, j);
update_nzc(cm, m->mbmi.nzcs[j], nzc_context, TX_16X16, ref, 0);
}
for (j = 64; j < 96; j += 16) {
nzc_context = vp9_get_nzc_context_uv_sb32(cm, m, mb_row, mb_col, j);
update_nzc(cm, m->mbmi.nzcs[j], nzc_context, TX_16X16, ref, 1);
}
break;
case TX_8X8:
for (j = 0; j < 64; j += 4) {
nzc_context = vp9_get_nzc_context_y_sb32(cm, m, mb_row, mb_col, j);
update_nzc(cm, m->mbmi.nzcs[j], nzc_context, TX_8X8, ref, 0);
}
for (j = 64; j < 96; j += 4) {
nzc_context = vp9_get_nzc_context_uv_sb32(cm, m, mb_row, mb_col, j);
update_nzc(cm, m->mbmi.nzcs[j], nzc_context, TX_8X8, ref, 1);
}
break;
case TX_4X4:
for (j = 0; j < 64; ++j) {
nzc_context = vp9_get_nzc_context_y_sb32(cm, m, mb_row, mb_col, j);
update_nzc(cm, m->mbmi.nzcs[j], nzc_context, TX_4X4, ref, 0);
}
for (j = 64; j < 96; ++j) {
nzc_context = vp9_get_nzc_context_uv_sb32(cm, m, mb_row, mb_col, j);
update_nzc(cm, m->mbmi.nzcs[j], nzc_context, TX_4X4, ref, 1);
}
break;
default:
break;
}
2010-05-18 17:58:33 +02:00
}
static void update_nzcs_mb16(VP9_COMMON *cm,
MACROBLOCKD *xd,
int mb_row,
int mb_col) {
MODE_INFO *m = xd->mode_info_context;
MB_MODE_INFO *const mi = &m->mbmi;
int j, nzc_context;
const int ref = m->mbmi.ref_frame != INTRA_FRAME;
assert(mb_col == get_mb_col(xd));
assert(mb_row == get_mb_row(xd));
if (mi->mb_skip_coeff)
return;
switch (mi->txfm_size) {
case TX_16X16:
for (j = 0; j < 16; j += 16) {
nzc_context = vp9_get_nzc_context_y_mb16(cm, m, mb_row, mb_col, j);
update_nzc(cm, m->mbmi.nzcs[j], nzc_context, TX_16X16, ref, 0);
}
for (j = 16; j < 24; j += 4) {
nzc_context = vp9_get_nzc_context_uv_mb16(cm, m, mb_row, mb_col, j);
update_nzc(cm, m->mbmi.nzcs[j], nzc_context, TX_8X8, ref, 1);
}
break;
case TX_8X8:
for (j = 0; j < 16; j += 4) {
nzc_context = vp9_get_nzc_context_y_mb16(cm, m, mb_row, mb_col, j);
update_nzc(cm, m->mbmi.nzcs[j], nzc_context, TX_8X8, ref, 0);
}
if (mi->mode == I8X8_PRED || mi->mode == SPLITMV) {
for (j = 16; j < 24; ++j) {
nzc_context = vp9_get_nzc_context_uv_mb16(cm, m, mb_row, mb_col, j);
update_nzc(cm, m->mbmi.nzcs[j], nzc_context, TX_4X4, ref, 1);
}
} else {
for (j = 16; j < 24; j += 4) {
nzc_context = vp9_get_nzc_context_uv_mb16(cm, m, mb_row, mb_col, j);
update_nzc(cm, m->mbmi.nzcs[j], nzc_context, TX_8X8, ref, 1);
}
}
break;
case TX_4X4:
for (j = 0; j < 16; ++j) {
nzc_context = vp9_get_nzc_context_y_mb16(cm, m, mb_row, mb_col, j);
update_nzc(cm, m->mbmi.nzcs[j], nzc_context, TX_4X4, ref, 0);
}
for (j = 16; j < 24; ++j) {
nzc_context = vp9_get_nzc_context_uv_mb16(cm, m, mb_row, mb_col, j);
update_nzc(cm, m->mbmi.nzcs[j], nzc_context, TX_4X4, ref, 1);
}
break;
default:
break;
}
}
void vp9_update_nzc_counts(VP9_COMMON *cm,
MACROBLOCKD *xd,
int mb_row,
int mb_col) {
if (xd->mode_info_context->mbmi.sb_type == BLOCK_SIZE_SB64X64)
update_nzcs_sb64(cm, xd, mb_row, mb_col);
else if (xd->mode_info_context->mbmi.sb_type == BLOCK_SIZE_SB32X32)
update_nzcs_sb32(cm, xd, mb_row, mb_col);
else
update_nzcs_mb16(cm, xd, mb_row, mb_col);
}
#endif // CONFIG_CODE_NONZEROCOUNT
// #define COEF_COUNT_TESTING
#define COEF_COUNT_SAT 24
#define COEF_MAX_UPDATE_FACTOR 112
#define COEF_COUNT_SAT_KEY 24
#define COEF_MAX_UPDATE_FACTOR_KEY 112
#define COEF_COUNT_SAT_AFTER_KEY 24
#define COEF_MAX_UPDATE_FACTOR_AFTER_KEY 128
static void adapt_coef_probs(vp9_coeff_probs *dst_coef_probs,
vp9_coeff_probs *pre_coef_probs,
int block_types, vp9_coeff_count *coef_counts,
unsigned int (*eob_branch_count)[REF_TYPES]
[COEF_BANDS]
[PREV_COEF_CONTEXTS],
int count_sat, int update_factor) {
int t, i, j, k, l, count;
unsigned int branch_ct[ENTROPY_NODES][2];
vp9_prob coef_probs[ENTROPY_NODES];
int factor;
Modeling default coef probs with distribution Replaces the default tables for single coefficient magnitudes with those obtained from an appropriate distribution. The EOB node is left unchanged. The model is represeted as a 256-size codebook where the index corresponds to the probability of the Zero or the One node. Two variations are implemented corresponding to whether the Zero node or the One-node is used as the peg. The main advantage is that the default prob tables will become considerably smaller and manageable. Besides there is substantially less risk of over-fitting for a training set. Various distributions are tried and the one that gives the best results is the family of Generalized Gaussian distributions with shape parameter 0.75. The results are within about 0.2% of fully trained tables for the Zero peg variant, and within 0.1% of the One peg variant. The forward updates are optionally (controlled by a macro) model-based, i.e. restricted to only convey probabilities from the codebook. Backward updates can also be optionally (controlled by another macro) model-based, but is turned off by default. Currently model-based forward updates work about the same as unconstrained updates, but there is a drop in performance with backward-updates being model based. The model based approach also allows the probabilities for the key frames to be adjusted from the defaults based on the base_qindex of the frame. Currently the adjustment function is a placeholder that adjusts the prob of EOB and Zero node from the nominal one at higher quality (lower qindex) or lower quality (higher qindex) ends of the range. The rest of the probabilities are then derived based on the model from the adjusted prob of zero. Change-Id: Iae050f3cbcc6d8b3f204e8dc395ae47b3b2192c9
2013-03-13 19:03:17 +01:00
#if CONFIG_MODELCOEFPROB && MODEL_BASED_ADAPT
int entropy_nodes_adapt = UNCONSTRAINED_ADAPT_NODES;
#else
int entropy_nodes_adapt = ENTROPY_NODES;
#endif
for (i = 0; i < block_types; ++i)
for (j = 0; j < REF_TYPES; ++j)
for (k = 0; k < COEF_BANDS; ++k)
for (l = 0; l < PREV_COEF_CONTEXTS; ++l) {
if (l >= 3 && k == 0)
continue;
vp9_tree_probs_from_distribution(vp9_coef_tree,
coef_probs, branch_ct,
coef_counts[i][j][k][l], 0);
branch_ct[0][1] = eob_branch_count[i][j][k][l] - branch_ct[0][0];
coef_probs[0] = get_binary_prob(branch_ct[0][0], branch_ct[0][1]);
Modeling default coef probs with distribution Replaces the default tables for single coefficient magnitudes with those obtained from an appropriate distribution. The EOB node is left unchanged. The model is represeted as a 256-size codebook where the index corresponds to the probability of the Zero or the One node. Two variations are implemented corresponding to whether the Zero node or the One-node is used as the peg. The main advantage is that the default prob tables will become considerably smaller and manageable. Besides there is substantially less risk of over-fitting for a training set. Various distributions are tried and the one that gives the best results is the family of Generalized Gaussian distributions with shape parameter 0.75. The results are within about 0.2% of fully trained tables for the Zero peg variant, and within 0.1% of the One peg variant. The forward updates are optionally (controlled by a macro) model-based, i.e. restricted to only convey probabilities from the codebook. Backward updates can also be optionally (controlled by another macro) model-based, but is turned off by default. Currently model-based forward updates work about the same as unconstrained updates, but there is a drop in performance with backward-updates being model based. The model based approach also allows the probabilities for the key frames to be adjusted from the defaults based on the base_qindex of the frame. Currently the adjustment function is a placeholder that adjusts the prob of EOB and Zero node from the nominal one at higher quality (lower qindex) or lower quality (higher qindex) ends of the range. The rest of the probabilities are then derived based on the model from the adjusted prob of zero. Change-Id: Iae050f3cbcc6d8b3f204e8dc395ae47b3b2192c9
2013-03-13 19:03:17 +01:00
for (t = 0; t < entropy_nodes_adapt; ++t) {
count = branch_ct[t][0] + branch_ct[t][1];
count = count > count_sat ? count_sat : count;
factor = (update_factor * count / count_sat);
dst_coef_probs[i][j][k][l][t] =
weighted_prob(pre_coef_probs[i][j][k][l][t],
coef_probs[t], factor);
Modeling default coef probs with distribution Replaces the default tables for single coefficient magnitudes with those obtained from an appropriate distribution. The EOB node is left unchanged. The model is represeted as a 256-size codebook where the index corresponds to the probability of the Zero or the One node. Two variations are implemented corresponding to whether the Zero node or the One-node is used as the peg. The main advantage is that the default prob tables will become considerably smaller and manageable. Besides there is substantially less risk of over-fitting for a training set. Various distributions are tried and the one that gives the best results is the family of Generalized Gaussian distributions with shape parameter 0.75. The results are within about 0.2% of fully trained tables for the Zero peg variant, and within 0.1% of the One peg variant. The forward updates are optionally (controlled by a macro) model-based, i.e. restricted to only convey probabilities from the codebook. Backward updates can also be optionally (controlled by another macro) model-based, but is turned off by default. Currently model-based forward updates work about the same as unconstrained updates, but there is a drop in performance with backward-updates being model based. The model based approach also allows the probabilities for the key frames to be adjusted from the defaults based on the base_qindex of the frame. Currently the adjustment function is a placeholder that adjusts the prob of EOB and Zero node from the nominal one at higher quality (lower qindex) or lower quality (higher qindex) ends of the range. The rest of the probabilities are then derived based on the model from the adjusted prob of zero. Change-Id: Iae050f3cbcc6d8b3f204e8dc395ae47b3b2192c9
2013-03-13 19:03:17 +01:00
#if CONFIG_MODELCOEFPROB && MODEL_BASED_ADAPT
if (t == UNCONSTRAINED_NODES - 1)
vp9_get_model_distribution(
dst_coef_probs[i][j][k][l][UNCONSTRAINED_NODES - 1],
dst_coef_probs[i][j][k][l], i, j);
#endif
}
}
}
void vp9_adapt_coef_probs(VP9_COMMON *cm) {
int count_sat;
int update_factor; /* denominator 256 */
if (cm->frame_type == KEY_FRAME) {
update_factor = COEF_MAX_UPDATE_FACTOR_KEY;
count_sat = COEF_COUNT_SAT_KEY;
} else if (cm->last_frame_type == KEY_FRAME) {
update_factor = COEF_MAX_UPDATE_FACTOR_AFTER_KEY; /* adapt quickly */
count_sat = COEF_COUNT_SAT_AFTER_KEY;
} else {
update_factor = COEF_MAX_UPDATE_FACTOR;
count_sat = COEF_COUNT_SAT;
}
adapt_coef_probs(cm->fc.coef_probs_4x4, cm->fc.pre_coef_probs_4x4,
BLOCK_TYPES, cm->fc.coef_counts_4x4,
cm->fc.eob_branch_counts[TX_4X4],
count_sat, update_factor);
adapt_coef_probs(cm->fc.coef_probs_8x8, cm->fc.pre_coef_probs_8x8,
BLOCK_TYPES, cm->fc.coef_counts_8x8,
cm->fc.eob_branch_counts[TX_8X8],
count_sat, update_factor);
adapt_coef_probs(cm->fc.coef_probs_16x16, cm->fc.pre_coef_probs_16x16,
BLOCK_TYPES, cm->fc.coef_counts_16x16,
cm->fc.eob_branch_counts[TX_16X16],
count_sat, update_factor);
adapt_coef_probs(cm->fc.coef_probs_32x32, cm->fc.pre_coef_probs_32x32,
BLOCK_TYPES, cm->fc.coef_counts_32x32,
cm->fc.eob_branch_counts[TX_32X32],
count_sat, update_factor);
}
#if CONFIG_CODE_NONZEROCOUNT
static void adapt_nzc_probs_common(VP9_COMMON *cm,
TX_SIZE tx_size,
int count_sat,
int update_factor) {
int c, r, b, n;
int count, factor;
unsigned int nzc_branch_ct[NZC32X32_NODES][2];
vp9_prob nzc_probs[NZC32X32_NODES];
int tokens, nodes;
const vp9_tree_index *nzc_tree;
vp9_prob *dst_nzc_probs;
vp9_prob *pre_nzc_probs;
unsigned int *nzc_counts;
if (!get_nzc_used(tx_size)) return;
if (tx_size == TX_32X32) {
tokens = NZC32X32_TOKENS;
nzc_tree = vp9_nzc32x32_tree;
dst_nzc_probs = cm->fc.nzc_probs_32x32[0][0][0];
pre_nzc_probs = cm->fc.pre_nzc_probs_32x32[0][0][0];
nzc_counts = cm->fc.nzc_counts_32x32[0][0][0];
} else if (tx_size == TX_16X16) {
tokens = NZC16X16_TOKENS;
nzc_tree = vp9_nzc16x16_tree;
dst_nzc_probs = cm->fc.nzc_probs_16x16[0][0][0];
pre_nzc_probs = cm->fc.pre_nzc_probs_16x16[0][0][0];
nzc_counts = cm->fc.nzc_counts_16x16[0][0][0];
} else if (tx_size == TX_8X8) {
tokens = NZC8X8_TOKENS;
nzc_tree = vp9_nzc8x8_tree;
dst_nzc_probs = cm->fc.nzc_probs_8x8[0][0][0];
pre_nzc_probs = cm->fc.pre_nzc_probs_8x8[0][0][0];
nzc_counts = cm->fc.nzc_counts_8x8[0][0][0];
} else {
nzc_tree = vp9_nzc4x4_tree;
tokens = NZC4X4_TOKENS;
dst_nzc_probs = cm->fc.nzc_probs_4x4[0][0][0];
pre_nzc_probs = cm->fc.pre_nzc_probs_4x4[0][0][0];
nzc_counts = cm->fc.nzc_counts_4x4[0][0][0];
}
nodes = tokens - 1;
for (c = 0; c < MAX_NZC_CONTEXTS; ++c)
for (r = 0; r < REF_TYPES; ++r)
for (b = 0; b < BLOCK_TYPES; ++b) {
int offset = c * REF_TYPES * BLOCK_TYPES + r * BLOCK_TYPES + b;
int offset_nodes = offset * nodes;
int offset_tokens = offset * tokens;
vp9_tree_probs_from_distribution(nzc_tree,
nzc_probs, nzc_branch_ct,
nzc_counts + offset_tokens, 0);
for (n = 0; n < nodes; ++n) {
count = nzc_branch_ct[n][0] + nzc_branch_ct[n][1];
count = count > count_sat ? count_sat : count;
factor = (update_factor * count / count_sat);
dst_nzc_probs[offset_nodes + n] =
weighted_prob(pre_nzc_probs[offset_nodes + n],
nzc_probs[n], factor);
}
}
}
static void adapt_nzc_pcat(VP9_COMMON *cm, int count_sat, int update_factor) {
int c, t;
int count, factor;
if (!(get_nzc_used(TX_4X4) || get_nzc_used(TX_8X8) ||
get_nzc_used(TX_16X16) || get_nzc_used(TX_32X32)))
return;
for (c = 0; c < MAX_NZC_CONTEXTS; ++c) {
for (t = 0; t < NZC_TOKENS_EXTRA; ++t) {
int bits = vp9_extranzcbits[t + NZC_TOKENS_NOEXTRA];
int b;
for (b = 0; b < bits; ++b) {
vp9_prob prob = get_binary_prob(cm->fc.nzc_pcat_counts[c][t][b][0],
cm->fc.nzc_pcat_counts[c][t][b][1]);
count = cm->fc.nzc_pcat_counts[c][t][b][0] +
cm->fc.nzc_pcat_counts[c][t][b][1];
count = count > count_sat ? count_sat : count;
factor = (update_factor * count / count_sat);
cm->fc.nzc_pcat_probs[c][t][b] = weighted_prob(
cm->fc.pre_nzc_pcat_probs[c][t][b], prob, factor);
}
}
}
}
// #define NZC_COUNT_TESTING
void vp9_adapt_nzc_probs(VP9_COMMON *cm) {
int count_sat;
int update_factor; /* denominator 256 */
#ifdef NZC_COUNT_TESTING
int c, r, b, t;
printf("\n");
for (c = 0; c < MAX_NZC_CONTEXTS; ++c)
for (r = 0; r < REF_TYPES; ++r) {
for (b = 0; b < BLOCK_TYPES; ++b) {
printf(" {");
for (t = 0; t < NZC4X4_TOKENS; ++t) {
printf(" %d,", cm->fc.nzc_counts_4x4[c][r][b][t]);
}
printf("}\n");
}
printf("\n");
}
#endif
if (cm->frame_type == KEY_FRAME) {
update_factor = COEF_MAX_UPDATE_FACTOR_KEY;
count_sat = COEF_COUNT_SAT_KEY;
} else if (cm->last_frame_type == KEY_FRAME) {
update_factor = COEF_MAX_UPDATE_FACTOR_AFTER_KEY; /* adapt quickly */
count_sat = COEF_COUNT_SAT_AFTER_KEY;
} else {
update_factor = COEF_MAX_UPDATE_FACTOR;
count_sat = COEF_COUNT_SAT;
}
adapt_nzc_probs_common(cm, TX_4X4, count_sat, update_factor);
adapt_nzc_probs_common(cm, TX_8X8, count_sat, update_factor);
adapt_nzc_probs_common(cm, TX_16X16, count_sat, update_factor);
adapt_nzc_probs_common(cm, TX_32X32, count_sat, update_factor);
adapt_nzc_pcat(cm, count_sat, update_factor);
}
#endif // CONFIG_CODE_NONZEROCOUNT