Home | History | Annotate | Download | only in x86

Lines Matching refs:rtcd

223         cpi->rtcd.variance.sad16x16              = vp8_sad16x16_mmx;
224 cpi->rtcd.variance.sad16x8 = vp8_sad16x8_mmx;
225 cpi->rtcd.variance.sad8x16 = vp8_sad8x16_mmx;
226 cpi->rtcd.variance.sad8x8 = vp8_sad8x8_mmx;
227 cpi->rtcd.variance.sad4x4 = vp8_sad4x4_mmx;
229 cpi->rtcd.variance.var4x4 = vp8_variance4x4_mmx;
230 cpi->rtcd.variance.var8x8 = vp8_variance8x8_mmx;
231 cpi->rtcd.variance.var8x16 = vp8_variance8x16_mmx;
232 cpi->rtcd.variance.var16x8 = vp8_variance16x8_mmx;
233 cpi->rtcd.variance.var16x16 = vp8_variance16x16_mmx;
235 cpi->rtcd.variance.subpixvar4x4 = vp8_sub_pixel_variance4x4_mmx;
236 cpi->rtcd.variance.subpixvar8x8 = vp8_sub_pixel_variance8x8_mmx;
237 cpi->rtcd.variance.subpixvar8x16 = vp8_sub_pixel_variance8x16_mmx;
238 cpi->rtcd.variance.subpixvar16x8 = vp8_sub_pixel_variance16x8_mmx;
239 cpi->rtcd.variance.subpixvar16x16 = vp8_sub_pixel_variance16x16_mmx;
240 cpi->rtcd.variance.halfpixvar16x16_h = vp8_variance_halfpixvar16x16_h_mmx;
241 cpi->rtcd.variance.halfpixvar16x16_v = vp8_variance_halfpixvar16x16_v_mmx;
242 cpi->rtcd.variance.halfpixvar16x16_hv = vp8_variance_halfpixvar16x16_hv_mmx;
243 cpi->rtcd.variance.subpixmse16x16 = vp8_sub_pixel_mse16x16_mmx;
245 cpi->rtcd.variance.mse16x16 = vp8_mse16x16_mmx;
246 cpi->rtcd.variance.getmbss = vp8_get_mb_ss_mmx;
248 cpi->rtcd.variance.get16x16prederror = vp8_get16x16pred_error_mmx;
249 cpi->rtcd.variance.get8x8var = vp8_get8x8var_mmx;
250 cpi->rtcd.variance.get16x16var = vp8_get16x16var_mmx;
251 cpi->rtcd.variance.get4x4sse_cs = vp8_get4x4sse_cs_mmx;
253 cpi->rtcd.fdct.short4x4 = vp8_short_fdct4x4_mmx;
254 cpi->rtcd.fdct.short8x4 = vp8_short_fdct8x4_mmx;
255 cpi->rtcd.fdct.fast4x4 = vp8_short_fdct4x4_mmx;
256 cpi->rtcd.fdct.fast8x4 = vp8_short_fdct8x4_mmx;
258 cpi->rtcd.fdct.short4x4 = vp8_short_fdct4x4_c;
259 cpi->rtcd.fdct.short8x4 = vp8_short_fdct8x4_c;
260 cpi->rtcd.fdct.fast4x4 = vp8_short_fdct4x4_c;
261 cpi->rtcd.fdct.fast8x4 = vp8_short_fdct8x4_c;
265 cpi->rtcd.fdct.walsh_short4x4 = vp8_short_walsh4x4_c;
267 cpi->rtcd.encodemb.berr = vp8_block_error_mmx;
268 cpi->rtcd.encodemb.mberr = vp8_mbblock_error_mmx;
269 cpi->rtcd.encodemb.mbuverr = vp8_mbuverror_mmx;
270 cpi->rtcd.encodemb.subb = vp8_subtract_b_mmx;
271 cpi->rtcd.encodemb.submby = vp8_subtract_mby_mmx;
272 cpi->rtcd.encodemb.submbuv = vp8_subtract_mbuv_mmx;
274 /*cpi->rtcd.quantize.fastquantb = vp8_fast_quantize_b_mmx;*/
281 cpi->rtcd.variance.sad16x16 = vp8_sad16x16_wmt;
282 cpi->rtcd.variance.sad16x8 = vp8_sad16x8_wmt;
283 cpi->rtcd.variance.sad8x16 = vp8_sad8x16_wmt;
284 cpi->rtcd.variance.sad8x8 = vp8_sad8x8_wmt;
285 cpi->rtcd.variance.sad4x4 = vp8_sad4x4_wmt;
287 cpi->rtcd.variance.var4x4 = vp8_variance4x4_wmt;
288 cpi->rtcd.variance.var8x8 = vp8_variance8x8_wmt;
289 cpi->rtcd.variance.var8x16 = vp8_variance8x16_wmt;
290 cpi->rtcd.variance.var16x8 = vp8_variance16x8_wmt;
291 cpi->rtcd.variance.var16x16 = vp8_variance16x16_wmt;
293 cpi->rtcd.variance.subpixvar4x4 = vp8_sub_pixel_variance4x4_wmt;
294 cpi->rtcd.variance.subpixvar8x8 = vp8_sub_pixel_variance8x8_wmt;
295 cpi->rtcd.variance.subpixvar8x16 = vp8_sub_pixel_variance8x16_wmt;
296 cpi->rtcd.variance.subpixvar16x8 = vp8_sub_pixel_variance16x8_wmt;
297 cpi->rtcd.variance.subpixvar16x16 = vp8_sub_pixel_variance16x16_wmt;
298 cpi->rtcd.variance.halfpixvar16x16_h = vp8_variance_halfpixvar16x16_h_wmt;
299 cpi->rtcd.variance.halfpixvar16x16_v = vp8_variance_halfpixvar16x16_v_wmt;
300 cpi->rtcd.variance.halfpixvar16x16_hv = vp8_variance_halfpixvar16x16_hv_wmt;
301 cpi->rtcd.variance.subpixmse16x16 = vp8_sub_pixel_mse16x16_wmt;
303 cpi->rtcd.variance.mse16x16 = vp8_mse16x16_wmt;
304 cpi->rtcd.variance.getmbss = vp8_get_mb_ss_sse2;
306 cpi->rtcd.variance.get16x16prederror = vp8_get16x16pred_error_sse2;
307 cpi->rtcd.variance.get8x8var = vp8_get8x8var_sse2;
308 cpi->rtcd.variance.get16x16var = vp8_get16x16var_sse2;
309 /* cpi->rtcd.variance.get4x4sse_cs not implemented for wmt */;
311 cpi->rtcd.fdct.short4x4 = vp8_short_fdct4x4_sse2;
312 cpi->rtcd.fdct.short8x4 = vp8_short_fdct8x4_sse2;
313 cpi->rtcd.fdct.fast4x4 = vp8_short_fdct4x4_sse2;
314 cpi->rtcd.fdct.fast8x4 = vp8_short_fdct8x4_sse2;
316 cpi->rtcd.fdct.walsh_short4x4 = vp8_short_walsh4x4_sse2 ;
318 cpi->rtcd.encodemb.berr = vp8_block_error_xmm;
319 cpi->rtcd.encodemb.mberr = vp8_mbblock_error_xmm;
320 cpi->rtcd.encodemb.mbuverr = vp8_mbuverror_xmm;
321 cpi->rtcd.encodemb.subb = vp8_subtract_b_sse2;
322 cpi->rtcd.encodemb.submby = vp8_subtract_mby_sse2;
323 cpi->rtcd.encodemb.submbuv = vp8_subtract_mbuv_sse2;
325 /*cpi->rtcd.quantize.quantb = vp8_regular_quantize_b_sse2;*/
326 cpi->rtcd.quantize.fastquantb = vp8_fast_quantize_b_sse2;
333 cpi->rtcd.variance.sad16x16 = vp8_sad16x16_sse3;
334 cpi->rtcd.variance.sad16x16x3 = vp8_sad16x16x3_sse3;
335 cpi->rtcd.variance.sad16x8x3 = vp8_sad16x8x3_sse3;
336 cpi->rtcd.variance.sad8x16x3 = vp8_sad8x16x3_sse3;
337 cpi->rtcd.variance.sad8x8x3 = vp8_sad8x8x3_sse3;
338 cpi->rtcd.variance.sad4x4x3 = vp8_sad4x4x3_sse3;
339 cpi->rtcd.search.full_search = vp8_full_search_sadx3;
341 cpi->rtcd.variance.sad16x16x4d = vp8_sad16x16x4d_sse3;
342 cpi->rtcd.variance.sad16x8x4d = vp8_sad16x8x4d_sse3;
343 cpi->rtcd.variance.sad8x16x4d = vp8_sad8x16x4d_sse3;
344 cpi->rtcd.variance.sad8x8x4d = vp8_sad8x8x4d_sse3;
345 cpi->rtcd.variance.sad4x4x4d = vp8_sad4x4x4d_sse3;
346 cpi->rtcd.search.diamond_search = vp8_diamond_search_sadx4;
353 cpi->rtcd.variance.sad16x16x3 = vp8_sad16x16x3_ssse3;
354 cpi->rtcd.variance.sad16x8x3 = vp8_sad16x8x3_ssse3;
356 cpi->rtcd.quantize.fastquantb = vp8_fast_quantize_b_ssse3;
364 cpi->rtcd.variance.sad16x16x8 = vp8_sad16x16x8_sse4;
365 cpi->rtcd.variance.sad16x8x8 = vp8_sad16x8x8_sse4;
366 cpi->rtcd.variance.sad8x16x8 = vp8_sad8x16x8_sse4;
367 cpi->rtcd.variance.sad8x8x8 = vp8_sad8x8x8_sse4;
368 cpi->rtcd.variance.sad4x4x8 = vp8_sad4x4x8_sse4;
369 cpi->rtcd.search.full_search = vp8_full_search_sadx8;