1 /* 2 * Copyright (c) 2012 The WebM project authors. All Rights Reserved. 3 * 4 * Use of this source code is governed by a BSD-style license 5 * that can be found in the LICENSE file in the root of the source 6 * tree. An additional intellectual property rights grant can be found 7 * in the file PATENTS. All contributing project authors may 8 * be found in the AUTHORS file in the root of the source tree. 9 */ 10 #include <stdlib.h> 11 #include <new> 12 13 #include "third_party/googletest/src/include/gtest/gtest.h" 14 15 #include "test/clear_system_state.h" 16 #include "test/register_state_check.h" 17 18 #include "vpx/vpx_integer.h" 19 #include "./vpx_config.h" 20 extern "C" { 21 #include "vpx_mem/vpx_mem.h" 22 #if CONFIG_VP8_ENCODER 23 # include "vp8/common/variance.h" 24 # include "./vp8_rtcd.h" 25 #endif 26 #if CONFIG_VP9_ENCODER 27 # include "vp9/encoder/vp9_variance.h" 28 # include "./vp9_rtcd.h" 29 #endif 30 } 31 #include "test/acm_random.h" 32 33 namespace { 34 35 using ::std::tr1::get; 36 using ::std::tr1::make_tuple; 37 using ::std::tr1::tuple; 38 using libvpx_test::ACMRandom; 39 40 static unsigned int variance_ref(const uint8_t *ref, const uint8_t *src, 41 int l2w, int l2h, unsigned int *sse_ptr) { 42 int se = 0; 43 unsigned int sse = 0; 44 const int w = 1 << l2w, h = 1 << l2h; 45 for (int y = 0; y < h; y++) { 46 for (int x = 0; x < w; x++) { 47 int diff = ref[w * y + x] - src[w * y + x]; 48 se += diff; 49 sse += diff * diff; 50 } 51 } 52 *sse_ptr = sse; 53 return sse - (((int64_t) se * se) >> (l2w + l2h)); 54 } 55 56 static unsigned int subpel_variance_ref(const uint8_t *ref, const uint8_t *src, 57 int l2w, int l2h, int xoff, int yoff, 58 unsigned int *sse_ptr) { 59 int se = 0; 60 unsigned int sse = 0; 61 const int w = 1 << l2w, h = 1 << l2h; 62 for (int y = 0; y < h; y++) { 63 for (int x = 0; x < w; x++) { 64 // bilinear interpolation at a 16th pel step 65 const int a1 = ref[(w + 1) * (y + 0) + x + 0]; 66 const int a2 = ref[(w + 1) * (y + 0) + x + 1]; 67 const int b1 = ref[(w + 1) * (y + 1) + x + 0]; 68 const int b2 = ref[(w + 1) * (y + 1) + x + 1]; 69 const int a = a1 + (((a2 - a1) * xoff + 8) >> 4); 70 const int b = b1 + (((b2 - b1) * xoff + 8) >> 4); 71 const int r = a + (((b - a) * yoff + 8) >> 4); 72 int diff = r - src[w * y + x]; 73 se += diff; 74 sse += diff * diff; 75 } 76 } 77 *sse_ptr = sse; 78 return sse - (((int64_t) se * se) >> (l2w + l2h)); 79 } 80 81 template<typename VarianceFunctionType> 82 class VarianceTest 83 : public ::testing::TestWithParam<tuple<int, int, VarianceFunctionType> > { 84 public: 85 virtual void SetUp() { 86 const tuple<int, int, VarianceFunctionType>& params = this->GetParam(); 87 log2width_ = get<0>(params); 88 width_ = 1 << log2width_; 89 log2height_ = get<1>(params); 90 height_ = 1 << log2height_; 91 variance_ = get<2>(params); 92 93 rnd(ACMRandom::DeterministicSeed()); 94 block_size_ = width_ * height_; 95 src_ = new uint8_t[block_size_]; 96 ref_ = new uint8_t[block_size_]; 97 ASSERT_TRUE(src_ != NULL); 98 ASSERT_TRUE(ref_ != NULL); 99 } 100 101 virtual void TearDown() { 102 delete[] src_; 103 delete[] ref_; 104 libvpx_test::ClearSystemState(); 105 } 106 107 protected: 108 void ZeroTest(); 109 void RefTest(); 110 void OneQuarterTest(); 111 112 ACMRandom rnd; 113 uint8_t* src_; 114 uint8_t* ref_; 115 int width_, log2width_; 116 int height_, log2height_; 117 int block_size_; 118 VarianceFunctionType variance_; 119 }; 120 121 template<typename VarianceFunctionType> 122 void VarianceTest<VarianceFunctionType>::ZeroTest() { 123 for (int i = 0; i <= 255; ++i) { 124 memset(src_, i, block_size_); 125 for (int j = 0; j <= 255; ++j) { 126 memset(ref_, j, block_size_); 127 unsigned int sse; 128 unsigned int var; 129 REGISTER_STATE_CHECK(var = variance_(src_, width_, ref_, width_, &sse)); 130 EXPECT_EQ(0u, var) << "src values: " << i << "ref values: " << j; 131 } 132 } 133 } 134 135 template<typename VarianceFunctionType> 136 void VarianceTest<VarianceFunctionType>::RefTest() { 137 for (int i = 0; i < 10; ++i) { 138 for (int j = 0; j < block_size_; j++) { 139 src_[j] = rnd.Rand8(); 140 ref_[j] = rnd.Rand8(); 141 } 142 unsigned int sse1, sse2; 143 unsigned int var1; 144 REGISTER_STATE_CHECK(var1 = variance_(src_, width_, ref_, width_, &sse1)); 145 const unsigned int var2 = variance_ref(src_, ref_, log2width_, 146 log2height_, &sse2); 147 EXPECT_EQ(sse1, sse2); 148 EXPECT_EQ(var1, var2); 149 } 150 } 151 152 template<typename VarianceFunctionType> 153 void VarianceTest<VarianceFunctionType>::OneQuarterTest() { 154 memset(src_, 255, block_size_); 155 const int half = block_size_ / 2; 156 memset(ref_, 255, half); 157 memset(ref_ + half, 0, half); 158 unsigned int sse; 159 unsigned int var; 160 REGISTER_STATE_CHECK(var = variance_(src_, width_, ref_, width_, &sse)); 161 const unsigned int expected = block_size_ * 255 * 255 / 4; 162 EXPECT_EQ(expected, var); 163 } 164 165 #if CONFIG_VP9_ENCODER 166 167 unsigned int subpel_avg_variance_ref(const uint8_t *ref, 168 const uint8_t *src, 169 const uint8_t *second_pred, 170 int l2w, int l2h, 171 int xoff, int yoff, 172 unsigned int *sse_ptr) { 173 int se = 0; 174 unsigned int sse = 0; 175 const int w = 1 << l2w, h = 1 << l2h; 176 for (int y = 0; y < h; y++) { 177 for (int x = 0; x < w; x++) { 178 // bilinear interpolation at a 16th pel step 179 const int a1 = ref[(w + 1) * (y + 0) + x + 0]; 180 const int a2 = ref[(w + 1) * (y + 0) + x + 1]; 181 const int b1 = ref[(w + 1) * (y + 1) + x + 0]; 182 const int b2 = ref[(w + 1) * (y + 1) + x + 1]; 183 const int a = a1 + (((a2 - a1) * xoff + 8) >> 4); 184 const int b = b1 + (((b2 - b1) * xoff + 8) >> 4); 185 const int r = a + (((b - a) * yoff + 8) >> 4); 186 int diff = ((r + second_pred[w * y + x] + 1) >> 1) - src[w * y + x]; 187 se += diff; 188 sse += diff * diff; 189 } 190 } 191 *sse_ptr = sse; 192 return sse - (((int64_t) se * se) >> (l2w + l2h)); 193 } 194 195 template<typename SubpelVarianceFunctionType> 196 class SubpelVarianceTest 197 : public ::testing::TestWithParam<tuple<int, int, 198 SubpelVarianceFunctionType> > { 199 public: 200 virtual void SetUp() { 201 const tuple<int, int, SubpelVarianceFunctionType>& params = 202 this->GetParam(); 203 log2width_ = get<0>(params); 204 width_ = 1 << log2width_; 205 log2height_ = get<1>(params); 206 height_ = 1 << log2height_; 207 subpel_variance_ = get<2>(params); 208 209 rnd(ACMRandom::DeterministicSeed()); 210 block_size_ = width_ * height_; 211 src_ = reinterpret_cast<uint8_t *>(vpx_memalign(16, block_size_)); 212 sec_ = reinterpret_cast<uint8_t *>(vpx_memalign(16, block_size_)); 213 ref_ = new uint8_t[block_size_ + width_ + height_ + 1]; 214 ASSERT_TRUE(src_ != NULL); 215 ASSERT_TRUE(sec_ != NULL); 216 ASSERT_TRUE(ref_ != NULL); 217 } 218 219 virtual void TearDown() { 220 vpx_free(src_); 221 delete[] ref_; 222 vpx_free(sec_); 223 libvpx_test::ClearSystemState(); 224 } 225 226 protected: 227 void RefTest(); 228 229 ACMRandom rnd; 230 uint8_t *src_; 231 uint8_t *ref_; 232 uint8_t *sec_; 233 int width_, log2width_; 234 int height_, log2height_; 235 int block_size_; 236 SubpelVarianceFunctionType subpel_variance_; 237 }; 238 239 template<typename SubpelVarianceFunctionType> 240 void SubpelVarianceTest<SubpelVarianceFunctionType>::RefTest() { 241 for (int x = 0; x < 16; ++x) { 242 for (int y = 0; y < 16; ++y) { 243 for (int j = 0; j < block_size_; j++) { 244 src_[j] = rnd.Rand8(); 245 } 246 for (int j = 0; j < block_size_ + width_ + height_ + 1; j++) { 247 ref_[j] = rnd.Rand8(); 248 } 249 unsigned int sse1, sse2; 250 unsigned int var1; 251 REGISTER_STATE_CHECK(var1 = subpel_variance_(ref_, width_ + 1, x, y, 252 src_, width_, &sse1)); 253 const unsigned int var2 = subpel_variance_ref(ref_, src_, log2width_, 254 log2height_, x, y, &sse2); 255 EXPECT_EQ(sse1, sse2) << "at position " << x << ", " << y; 256 EXPECT_EQ(var1, var2) << "at position " << x << ", " << y; 257 } 258 } 259 } 260 261 template<> 262 void SubpelVarianceTest<vp9_subp_avg_variance_fn_t>::RefTest() { 263 for (int x = 0; x < 16; ++x) { 264 for (int y = 0; y < 16; ++y) { 265 for (int j = 0; j < block_size_; j++) { 266 src_[j] = rnd.Rand8(); 267 sec_[j] = rnd.Rand8(); 268 } 269 for (int j = 0; j < block_size_ + width_ + height_ + 1; j++) { 270 ref_[j] = rnd.Rand8(); 271 } 272 unsigned int sse1, sse2; 273 unsigned int var1; 274 REGISTER_STATE_CHECK(var1 = subpel_variance_(ref_, width_ + 1, x, y, 275 src_, width_, &sse1, sec_)); 276 const unsigned int var2 = subpel_avg_variance_ref(ref_, src_, sec_, 277 log2width_, log2height_, 278 x, y, &sse2); 279 EXPECT_EQ(sse1, sse2) << "at position " << x << ", " << y; 280 EXPECT_EQ(var1, var2) << "at position " << x << ", " << y; 281 } 282 } 283 } 284 285 #endif // CONFIG_VP9_ENCODER 286 287 // ----------------------------------------------------------------------------- 288 // VP8 test cases. 289 290 namespace vp8 { 291 292 #if CONFIG_VP8_ENCODER 293 typedef VarianceTest<vp8_variance_fn_t> VP8VarianceTest; 294 295 TEST_P(VP8VarianceTest, Zero) { ZeroTest(); } 296 TEST_P(VP8VarianceTest, Ref) { RefTest(); } 297 TEST_P(VP8VarianceTest, OneQuarter) { OneQuarterTest(); } 298 299 const vp8_variance_fn_t variance4x4_c = vp8_variance4x4_c; 300 const vp8_variance_fn_t variance8x8_c = vp8_variance8x8_c; 301 const vp8_variance_fn_t variance8x16_c = vp8_variance8x16_c; 302 const vp8_variance_fn_t variance16x8_c = vp8_variance16x8_c; 303 const vp8_variance_fn_t variance16x16_c = vp8_variance16x16_c; 304 INSTANTIATE_TEST_CASE_P( 305 C, VP8VarianceTest, 306 ::testing::Values(make_tuple(2, 2, variance4x4_c), 307 make_tuple(3, 3, variance8x8_c), 308 make_tuple(3, 4, variance8x16_c), 309 make_tuple(4, 3, variance16x8_c), 310 make_tuple(4, 4, variance16x16_c))); 311 312 #if HAVE_MMX 313 const vp8_variance_fn_t variance4x4_mmx = vp8_variance4x4_mmx; 314 const vp8_variance_fn_t variance8x8_mmx = vp8_variance8x8_mmx; 315 const vp8_variance_fn_t variance8x16_mmx = vp8_variance8x16_mmx; 316 const vp8_variance_fn_t variance16x8_mmx = vp8_variance16x8_mmx; 317 const vp8_variance_fn_t variance16x16_mmx = vp8_variance16x16_mmx; 318 INSTANTIATE_TEST_CASE_P( 319 MMX, VP8VarianceTest, 320 ::testing::Values(make_tuple(2, 2, variance4x4_mmx), 321 make_tuple(3, 3, variance8x8_mmx), 322 make_tuple(3, 4, variance8x16_mmx), 323 make_tuple(4, 3, variance16x8_mmx), 324 make_tuple(4, 4, variance16x16_mmx))); 325 #endif 326 327 #if HAVE_SSE2 328 const vp8_variance_fn_t variance4x4_wmt = vp8_variance4x4_wmt; 329 const vp8_variance_fn_t variance8x8_wmt = vp8_variance8x8_wmt; 330 const vp8_variance_fn_t variance8x16_wmt = vp8_variance8x16_wmt; 331 const vp8_variance_fn_t variance16x8_wmt = vp8_variance16x8_wmt; 332 const vp8_variance_fn_t variance16x16_wmt = vp8_variance16x16_wmt; 333 INSTANTIATE_TEST_CASE_P( 334 SSE2, VP8VarianceTest, 335 ::testing::Values(make_tuple(2, 2, variance4x4_wmt), 336 make_tuple(3, 3, variance8x8_wmt), 337 make_tuple(3, 4, variance8x16_wmt), 338 make_tuple(4, 3, variance16x8_wmt), 339 make_tuple(4, 4, variance16x16_wmt))); 340 #endif 341 #endif // CONFIG_VP8_ENCODER 342 343 } // namespace vp8 344 345 // ----------------------------------------------------------------------------- 346 // VP9 test cases. 347 348 namespace vp9 { 349 350 #if CONFIG_VP9_ENCODER 351 typedef VarianceTest<vp9_variance_fn_t> VP9VarianceTest; 352 typedef SubpelVarianceTest<vp9_subpixvariance_fn_t> VP9SubpelVarianceTest; 353 typedef SubpelVarianceTest<vp9_subp_avg_variance_fn_t> VP9SubpelAvgVarianceTest; 354 355 TEST_P(VP9VarianceTest, Zero) { ZeroTest(); } 356 TEST_P(VP9VarianceTest, Ref) { RefTest(); } 357 TEST_P(VP9SubpelVarianceTest, Ref) { RefTest(); } 358 TEST_P(VP9SubpelAvgVarianceTest, Ref) { RefTest(); } 359 TEST_P(VP9VarianceTest, OneQuarter) { OneQuarterTest(); } 360 361 const vp9_variance_fn_t variance4x4_c = vp9_variance4x4_c; 362 const vp9_variance_fn_t variance4x8_c = vp9_variance4x8_c; 363 const vp9_variance_fn_t variance8x4_c = vp9_variance8x4_c; 364 const vp9_variance_fn_t variance8x8_c = vp9_variance8x8_c; 365 const vp9_variance_fn_t variance8x16_c = vp9_variance8x16_c; 366 const vp9_variance_fn_t variance16x8_c = vp9_variance16x8_c; 367 const vp9_variance_fn_t variance16x16_c = vp9_variance16x16_c; 368 const vp9_variance_fn_t variance16x32_c = vp9_variance16x32_c; 369 const vp9_variance_fn_t variance32x16_c = vp9_variance32x16_c; 370 const vp9_variance_fn_t variance32x32_c = vp9_variance32x32_c; 371 const vp9_variance_fn_t variance32x64_c = vp9_variance32x64_c; 372 const vp9_variance_fn_t variance64x32_c = vp9_variance64x32_c; 373 const vp9_variance_fn_t variance64x64_c = vp9_variance64x64_c; 374 INSTANTIATE_TEST_CASE_P( 375 C, VP9VarianceTest, 376 ::testing::Values(make_tuple(2, 2, variance4x4_c), 377 make_tuple(2, 3, variance4x8_c), 378 make_tuple(3, 2, variance8x4_c), 379 make_tuple(3, 3, variance8x8_c), 380 make_tuple(3, 4, variance8x16_c), 381 make_tuple(4, 3, variance16x8_c), 382 make_tuple(4, 4, variance16x16_c), 383 make_tuple(4, 5, variance16x32_c), 384 make_tuple(5, 4, variance32x16_c), 385 make_tuple(5, 5, variance32x32_c), 386 make_tuple(5, 6, variance32x64_c), 387 make_tuple(6, 5, variance64x32_c), 388 make_tuple(6, 6, variance64x64_c))); 389 390 const vp9_subpixvariance_fn_t subpel_variance4x4_c = 391 vp9_sub_pixel_variance4x4_c; 392 const vp9_subpixvariance_fn_t subpel_variance4x8_c = 393 vp9_sub_pixel_variance4x8_c; 394 const vp9_subpixvariance_fn_t subpel_variance8x4_c = 395 vp9_sub_pixel_variance8x4_c; 396 const vp9_subpixvariance_fn_t subpel_variance8x8_c = 397 vp9_sub_pixel_variance8x8_c; 398 const vp9_subpixvariance_fn_t subpel_variance8x16_c = 399 vp9_sub_pixel_variance8x16_c; 400 const vp9_subpixvariance_fn_t subpel_variance16x8_c = 401 vp9_sub_pixel_variance16x8_c; 402 const vp9_subpixvariance_fn_t subpel_variance16x16_c = 403 vp9_sub_pixel_variance16x16_c; 404 const vp9_subpixvariance_fn_t subpel_variance16x32_c = 405 vp9_sub_pixel_variance16x32_c; 406 const vp9_subpixvariance_fn_t subpel_variance32x16_c = 407 vp9_sub_pixel_variance32x16_c; 408 const vp9_subpixvariance_fn_t subpel_variance32x32_c = 409 vp9_sub_pixel_variance32x32_c; 410 const vp9_subpixvariance_fn_t subpel_variance32x64_c = 411 vp9_sub_pixel_variance32x64_c; 412 const vp9_subpixvariance_fn_t subpel_variance64x32_c = 413 vp9_sub_pixel_variance64x32_c; 414 const vp9_subpixvariance_fn_t subpel_variance64x64_c = 415 vp9_sub_pixel_variance64x64_c; 416 INSTANTIATE_TEST_CASE_P( 417 C, VP9SubpelVarianceTest, 418 ::testing::Values(make_tuple(2, 2, subpel_variance4x4_c), 419 make_tuple(2, 3, subpel_variance4x8_c), 420 make_tuple(3, 2, subpel_variance8x4_c), 421 make_tuple(3, 3, subpel_variance8x8_c), 422 make_tuple(3, 4, subpel_variance8x16_c), 423 make_tuple(4, 3, subpel_variance16x8_c), 424 make_tuple(4, 4, subpel_variance16x16_c), 425 make_tuple(4, 5, subpel_variance16x32_c), 426 make_tuple(5, 4, subpel_variance32x16_c), 427 make_tuple(5, 5, subpel_variance32x32_c), 428 make_tuple(5, 6, subpel_variance32x64_c), 429 make_tuple(6, 5, subpel_variance64x32_c), 430 make_tuple(6, 6, subpel_variance64x64_c))); 431 432 const vp9_subp_avg_variance_fn_t subpel_avg_variance4x4_c = 433 vp9_sub_pixel_avg_variance4x4_c; 434 const vp9_subp_avg_variance_fn_t subpel_avg_variance4x8_c = 435 vp9_sub_pixel_avg_variance4x8_c; 436 const vp9_subp_avg_variance_fn_t subpel_avg_variance8x4_c = 437 vp9_sub_pixel_avg_variance8x4_c; 438 const vp9_subp_avg_variance_fn_t subpel_avg_variance8x8_c = 439 vp9_sub_pixel_avg_variance8x8_c; 440 const vp9_subp_avg_variance_fn_t subpel_avg_variance8x16_c = 441 vp9_sub_pixel_avg_variance8x16_c; 442 const vp9_subp_avg_variance_fn_t subpel_avg_variance16x8_c = 443 vp9_sub_pixel_avg_variance16x8_c; 444 const vp9_subp_avg_variance_fn_t subpel_avg_variance16x16_c = 445 vp9_sub_pixel_avg_variance16x16_c; 446 const vp9_subp_avg_variance_fn_t subpel_avg_variance16x32_c = 447 vp9_sub_pixel_avg_variance16x32_c; 448 const vp9_subp_avg_variance_fn_t subpel_avg_variance32x16_c = 449 vp9_sub_pixel_avg_variance32x16_c; 450 const vp9_subp_avg_variance_fn_t subpel_avg_variance32x32_c = 451 vp9_sub_pixel_avg_variance32x32_c; 452 const vp9_subp_avg_variance_fn_t subpel_avg_variance32x64_c = 453 vp9_sub_pixel_avg_variance32x64_c; 454 const vp9_subp_avg_variance_fn_t subpel_avg_variance64x32_c = 455 vp9_sub_pixel_avg_variance64x32_c; 456 const vp9_subp_avg_variance_fn_t subpel_avg_variance64x64_c = 457 vp9_sub_pixel_avg_variance64x64_c; 458 INSTANTIATE_TEST_CASE_P( 459 C, VP9SubpelAvgVarianceTest, 460 ::testing::Values(make_tuple(2, 2, subpel_avg_variance4x4_c), 461 make_tuple(2, 3, subpel_avg_variance4x8_c), 462 make_tuple(3, 2, subpel_avg_variance8x4_c), 463 make_tuple(3, 3, subpel_avg_variance8x8_c), 464 make_tuple(3, 4, subpel_avg_variance8x16_c), 465 make_tuple(4, 3, subpel_avg_variance16x8_c), 466 make_tuple(4, 4, subpel_avg_variance16x16_c), 467 make_tuple(4, 5, subpel_avg_variance16x32_c), 468 make_tuple(5, 4, subpel_avg_variance32x16_c), 469 make_tuple(5, 5, subpel_avg_variance32x32_c), 470 make_tuple(5, 6, subpel_avg_variance32x64_c), 471 make_tuple(6, 5, subpel_avg_variance64x32_c), 472 make_tuple(6, 6, subpel_avg_variance64x64_c))); 473 474 #if HAVE_MMX 475 const vp9_variance_fn_t variance4x4_mmx = vp9_variance4x4_mmx; 476 const vp9_variance_fn_t variance8x8_mmx = vp9_variance8x8_mmx; 477 const vp9_variance_fn_t variance8x16_mmx = vp9_variance8x16_mmx; 478 const vp9_variance_fn_t variance16x8_mmx = vp9_variance16x8_mmx; 479 const vp9_variance_fn_t variance16x16_mmx = vp9_variance16x16_mmx; 480 INSTANTIATE_TEST_CASE_P( 481 MMX, VP9VarianceTest, 482 ::testing::Values(make_tuple(2, 2, variance4x4_mmx), 483 make_tuple(3, 3, variance8x8_mmx), 484 make_tuple(3, 4, variance8x16_mmx), 485 make_tuple(4, 3, variance16x8_mmx), 486 make_tuple(4, 4, variance16x16_mmx))); 487 #endif 488 489 #if HAVE_SSE2 490 #if CONFIG_USE_X86INC 491 const vp9_variance_fn_t variance4x4_sse2 = vp9_variance4x4_sse2; 492 const vp9_variance_fn_t variance4x8_sse2 = vp9_variance4x8_sse2; 493 const vp9_variance_fn_t variance8x4_sse2 = vp9_variance8x4_sse2; 494 const vp9_variance_fn_t variance8x8_sse2 = vp9_variance8x8_sse2; 495 const vp9_variance_fn_t variance8x16_sse2 = vp9_variance8x16_sse2; 496 const vp9_variance_fn_t variance16x8_sse2 = vp9_variance16x8_sse2; 497 const vp9_variance_fn_t variance16x16_sse2 = vp9_variance16x16_sse2; 498 const vp9_variance_fn_t variance16x32_sse2 = vp9_variance16x32_sse2; 499 const vp9_variance_fn_t variance32x16_sse2 = vp9_variance32x16_sse2; 500 const vp9_variance_fn_t variance32x32_sse2 = vp9_variance32x32_sse2; 501 const vp9_variance_fn_t variance32x64_sse2 = vp9_variance32x64_sse2; 502 const vp9_variance_fn_t variance64x32_sse2 = vp9_variance64x32_sse2; 503 const vp9_variance_fn_t variance64x64_sse2 = vp9_variance64x64_sse2; 504 INSTANTIATE_TEST_CASE_P( 505 SSE2, VP9VarianceTest, 506 ::testing::Values(make_tuple(2, 2, variance4x4_sse2), 507 make_tuple(2, 3, variance4x8_sse2), 508 make_tuple(3, 2, variance8x4_sse2), 509 make_tuple(3, 3, variance8x8_sse2), 510 make_tuple(3, 4, variance8x16_sse2), 511 make_tuple(4, 3, variance16x8_sse2), 512 make_tuple(4, 4, variance16x16_sse2), 513 make_tuple(4, 5, variance16x32_sse2), 514 make_tuple(5, 4, variance32x16_sse2), 515 make_tuple(5, 5, variance32x32_sse2), 516 make_tuple(5, 6, variance32x64_sse2), 517 make_tuple(6, 5, variance64x32_sse2), 518 make_tuple(6, 6, variance64x64_sse2))); 519 520 const vp9_subpixvariance_fn_t subpel_variance4x4_sse = 521 vp9_sub_pixel_variance4x4_sse; 522 const vp9_subpixvariance_fn_t subpel_variance4x8_sse = 523 vp9_sub_pixel_variance4x8_sse; 524 const vp9_subpixvariance_fn_t subpel_variance8x4_sse2 = 525 vp9_sub_pixel_variance8x4_sse2; 526 const vp9_subpixvariance_fn_t subpel_variance8x8_sse2 = 527 vp9_sub_pixel_variance8x8_sse2; 528 const vp9_subpixvariance_fn_t subpel_variance8x16_sse2 = 529 vp9_sub_pixel_variance8x16_sse2; 530 const vp9_subpixvariance_fn_t subpel_variance16x8_sse2 = 531 vp9_sub_pixel_variance16x8_sse2; 532 const vp9_subpixvariance_fn_t subpel_variance16x16_sse2 = 533 vp9_sub_pixel_variance16x16_sse2; 534 const vp9_subpixvariance_fn_t subpel_variance16x32_sse2 = 535 vp9_sub_pixel_variance16x32_sse2; 536 const vp9_subpixvariance_fn_t subpel_variance32x16_sse2 = 537 vp9_sub_pixel_variance32x16_sse2; 538 const vp9_subpixvariance_fn_t subpel_variance32x32_sse2 = 539 vp9_sub_pixel_variance32x32_sse2; 540 const vp9_subpixvariance_fn_t subpel_variance32x64_sse2 = 541 vp9_sub_pixel_variance32x64_sse2; 542 const vp9_subpixvariance_fn_t subpel_variance64x32_sse2 = 543 vp9_sub_pixel_variance64x32_sse2; 544 const vp9_subpixvariance_fn_t subpel_variance64x64_sse2 = 545 vp9_sub_pixel_variance64x64_sse2; 546 INSTANTIATE_TEST_CASE_P( 547 SSE2, VP9SubpelVarianceTest, 548 ::testing::Values(make_tuple(2, 2, subpel_variance4x4_sse), 549 make_tuple(2, 3, subpel_variance4x8_sse), 550 make_tuple(3, 2, subpel_variance8x4_sse2), 551 make_tuple(3, 3, subpel_variance8x8_sse2), 552 make_tuple(3, 4, subpel_variance8x16_sse2), 553 make_tuple(4, 3, subpel_variance16x8_sse2), 554 make_tuple(4, 4, subpel_variance16x16_sse2), 555 make_tuple(4, 5, subpel_variance16x32_sse2), 556 make_tuple(5, 4, subpel_variance32x16_sse2), 557 make_tuple(5, 5, subpel_variance32x32_sse2), 558 make_tuple(5, 6, subpel_variance32x64_sse2), 559 make_tuple(6, 5, subpel_variance64x32_sse2), 560 make_tuple(6, 6, subpel_variance64x64_sse2))); 561 562 const vp9_subp_avg_variance_fn_t subpel_avg_variance4x4_sse = 563 vp9_sub_pixel_avg_variance4x4_sse; 564 const vp9_subp_avg_variance_fn_t subpel_avg_variance4x8_sse = 565 vp9_sub_pixel_avg_variance4x8_sse; 566 const vp9_subp_avg_variance_fn_t subpel_avg_variance8x4_sse2 = 567 vp9_sub_pixel_avg_variance8x4_sse2; 568 const vp9_subp_avg_variance_fn_t subpel_avg_variance8x8_sse2 = 569 vp9_sub_pixel_avg_variance8x8_sse2; 570 const vp9_subp_avg_variance_fn_t subpel_avg_variance8x16_sse2 = 571 vp9_sub_pixel_avg_variance8x16_sse2; 572 const vp9_subp_avg_variance_fn_t subpel_avg_variance16x8_sse2 = 573 vp9_sub_pixel_avg_variance16x8_sse2; 574 const vp9_subp_avg_variance_fn_t subpel_avg_variance16x16_sse2 = 575 vp9_sub_pixel_avg_variance16x16_sse2; 576 const vp9_subp_avg_variance_fn_t subpel_avg_variance16x32_sse2 = 577 vp9_sub_pixel_avg_variance16x32_sse2; 578 const vp9_subp_avg_variance_fn_t subpel_avg_variance32x16_sse2 = 579 vp9_sub_pixel_avg_variance32x16_sse2; 580 const vp9_subp_avg_variance_fn_t subpel_avg_variance32x32_sse2 = 581 vp9_sub_pixel_avg_variance32x32_sse2; 582 const vp9_subp_avg_variance_fn_t subpel_avg_variance32x64_sse2 = 583 vp9_sub_pixel_avg_variance32x64_sse2; 584 const vp9_subp_avg_variance_fn_t subpel_avg_variance64x32_sse2 = 585 vp9_sub_pixel_avg_variance64x32_sse2; 586 const vp9_subp_avg_variance_fn_t subpel_avg_variance64x64_sse2 = 587 vp9_sub_pixel_avg_variance64x64_sse2; 588 INSTANTIATE_TEST_CASE_P( 589 SSE2, VP9SubpelAvgVarianceTest, 590 ::testing::Values(make_tuple(2, 2, subpel_avg_variance4x4_sse), 591 make_tuple(2, 3, subpel_avg_variance4x8_sse), 592 make_tuple(3, 2, subpel_avg_variance8x4_sse2), 593 make_tuple(3, 3, subpel_avg_variance8x8_sse2), 594 make_tuple(3, 4, subpel_avg_variance8x16_sse2), 595 make_tuple(4, 3, subpel_avg_variance16x8_sse2), 596 make_tuple(4, 4, subpel_avg_variance16x16_sse2), 597 make_tuple(4, 5, subpel_avg_variance16x32_sse2), 598 make_tuple(5, 4, subpel_avg_variance32x16_sse2), 599 make_tuple(5, 5, subpel_avg_variance32x32_sse2), 600 make_tuple(5, 6, subpel_avg_variance32x64_sse2), 601 make_tuple(6, 5, subpel_avg_variance64x32_sse2), 602 make_tuple(6, 6, subpel_avg_variance64x64_sse2))); 603 #endif 604 #endif 605 606 #if HAVE_SSSE3 607 #if CONFIG_USE_X86INC 608 609 const vp9_subpixvariance_fn_t subpel_variance4x4_ssse3 = 610 vp9_sub_pixel_variance4x4_ssse3; 611 const vp9_subpixvariance_fn_t subpel_variance4x8_ssse3 = 612 vp9_sub_pixel_variance4x8_ssse3; 613 const vp9_subpixvariance_fn_t subpel_variance8x4_ssse3 = 614 vp9_sub_pixel_variance8x4_ssse3; 615 const vp9_subpixvariance_fn_t subpel_variance8x8_ssse3 = 616 vp9_sub_pixel_variance8x8_ssse3; 617 const vp9_subpixvariance_fn_t subpel_variance8x16_ssse3 = 618 vp9_sub_pixel_variance8x16_ssse3; 619 const vp9_subpixvariance_fn_t subpel_variance16x8_ssse3 = 620 vp9_sub_pixel_variance16x8_ssse3; 621 const vp9_subpixvariance_fn_t subpel_variance16x16_ssse3 = 622 vp9_sub_pixel_variance16x16_ssse3; 623 const vp9_subpixvariance_fn_t subpel_variance16x32_ssse3 = 624 vp9_sub_pixel_variance16x32_ssse3; 625 const vp9_subpixvariance_fn_t subpel_variance32x16_ssse3 = 626 vp9_sub_pixel_variance32x16_ssse3; 627 const vp9_subpixvariance_fn_t subpel_variance32x32_ssse3 = 628 vp9_sub_pixel_variance32x32_ssse3; 629 const vp9_subpixvariance_fn_t subpel_variance32x64_ssse3 = 630 vp9_sub_pixel_variance32x64_ssse3; 631 const vp9_subpixvariance_fn_t subpel_variance64x32_ssse3 = 632 vp9_sub_pixel_variance64x32_ssse3; 633 const vp9_subpixvariance_fn_t subpel_variance64x64_ssse3 = 634 vp9_sub_pixel_variance64x64_ssse3; 635 INSTANTIATE_TEST_CASE_P( 636 SSSE3, VP9SubpelVarianceTest, 637 ::testing::Values(make_tuple(2, 2, subpel_variance4x4_ssse3), 638 make_tuple(2, 3, subpel_variance4x8_ssse3), 639 make_tuple(3, 2, subpel_variance8x4_ssse3), 640 make_tuple(3, 3, subpel_variance8x8_ssse3), 641 make_tuple(3, 4, subpel_variance8x16_ssse3), 642 make_tuple(4, 3, subpel_variance16x8_ssse3), 643 make_tuple(4, 4, subpel_variance16x16_ssse3), 644 make_tuple(4, 5, subpel_variance16x32_ssse3), 645 make_tuple(5, 4, subpel_variance32x16_ssse3), 646 make_tuple(5, 5, subpel_variance32x32_ssse3), 647 make_tuple(5, 6, subpel_variance32x64_ssse3), 648 make_tuple(6, 5, subpel_variance64x32_ssse3), 649 make_tuple(6, 6, subpel_variance64x64_ssse3))); 650 651 const vp9_subp_avg_variance_fn_t subpel_avg_variance4x4_ssse3 = 652 vp9_sub_pixel_avg_variance4x4_ssse3; 653 const vp9_subp_avg_variance_fn_t subpel_avg_variance4x8_ssse3 = 654 vp9_sub_pixel_avg_variance4x8_ssse3; 655 const vp9_subp_avg_variance_fn_t subpel_avg_variance8x4_ssse3 = 656 vp9_sub_pixel_avg_variance8x4_ssse3; 657 const vp9_subp_avg_variance_fn_t subpel_avg_variance8x8_ssse3 = 658 vp9_sub_pixel_avg_variance8x8_ssse3; 659 const vp9_subp_avg_variance_fn_t subpel_avg_variance8x16_ssse3 = 660 vp9_sub_pixel_avg_variance8x16_ssse3; 661 const vp9_subp_avg_variance_fn_t subpel_avg_variance16x8_ssse3 = 662 vp9_sub_pixel_avg_variance16x8_ssse3; 663 const vp9_subp_avg_variance_fn_t subpel_avg_variance16x16_ssse3 = 664 vp9_sub_pixel_avg_variance16x16_ssse3; 665 const vp9_subp_avg_variance_fn_t subpel_avg_variance16x32_ssse3 = 666 vp9_sub_pixel_avg_variance16x32_ssse3; 667 const vp9_subp_avg_variance_fn_t subpel_avg_variance32x16_ssse3 = 668 vp9_sub_pixel_avg_variance32x16_ssse3; 669 const vp9_subp_avg_variance_fn_t subpel_avg_variance32x32_ssse3 = 670 vp9_sub_pixel_avg_variance32x32_ssse3; 671 const vp9_subp_avg_variance_fn_t subpel_avg_variance32x64_ssse3 = 672 vp9_sub_pixel_avg_variance32x64_ssse3; 673 const vp9_subp_avg_variance_fn_t subpel_avg_variance64x32_ssse3 = 674 vp9_sub_pixel_avg_variance64x32_ssse3; 675 const vp9_subp_avg_variance_fn_t subpel_avg_variance64x64_ssse3 = 676 vp9_sub_pixel_avg_variance64x64_ssse3; 677 INSTANTIATE_TEST_CASE_P( 678 SSSE3, VP9SubpelAvgVarianceTest, 679 ::testing::Values(make_tuple(2, 2, subpel_avg_variance4x4_ssse3), 680 make_tuple(2, 3, subpel_avg_variance4x8_ssse3), 681 make_tuple(3, 2, subpel_avg_variance8x4_ssse3), 682 make_tuple(3, 3, subpel_avg_variance8x8_ssse3), 683 make_tuple(3, 4, subpel_avg_variance8x16_ssse3), 684 make_tuple(4, 3, subpel_avg_variance16x8_ssse3), 685 make_tuple(4, 4, subpel_avg_variance16x16_ssse3), 686 make_tuple(4, 5, subpel_avg_variance16x32_ssse3), 687 make_tuple(5, 4, subpel_avg_variance32x16_ssse3), 688 make_tuple(5, 5, subpel_avg_variance32x32_ssse3), 689 make_tuple(5, 6, subpel_avg_variance32x64_ssse3), 690 make_tuple(6, 5, subpel_avg_variance64x32_ssse3), 691 make_tuple(6, 6, subpel_avg_variance64x64_ssse3))); 692 #endif 693 #endif 694 #endif // CONFIG_VP9_ENCODER 695 696 } // namespace vp9 697 698 } // namespace 699