Home | History | Annotate | Download | only in x86

Lines Matching refs:cpi

177 void vp8_arch_x86_encoder_init(VP8_COMP *cpi)
199 cpi->rtcd.variance.sad16x16 = vp8_sad16x16_mmx;
200 cpi->rtcd.variance.sad16x8 = vp8_sad16x8_mmx;
201 cpi->rtcd.variance.sad8x16 = vp8_sad8x16_mmx;
202 cpi->rtcd.variance.sad8x8 = vp8_sad8x8_mmx;
203 cpi->rtcd.variance.sad4x4 = vp8_sad4x4_mmx;
205 cpi->rtcd.variance.var4x4 = vp8_variance4x4_mmx;
206 cpi->rtcd.variance.var8x8 = vp8_variance8x8_mmx;
207 cpi->rtcd.variance.var8x16 = vp8_variance8x16_mmx;
208 cpi->rtcd.variance.var16x8 = vp8_variance16x8_mmx;
209 cpi->rtcd.variance.var16x16 = vp8_variance16x16_mmx;
211 cpi->rtcd.variance.subpixvar4x4 = vp8_sub_pixel_variance4x4_mmx;
212 cpi->rtcd.variance.subpixvar8x8 = vp8_sub_pixel_variance8x8_mmx;
213 cpi->rtcd.variance.subpixvar8x16 = vp8_sub_pixel_variance8x16_mmx;
214 cpi->rtcd.variance.subpixvar16x8 = vp8_sub_pixel_variance16x8_mmx;
215 cpi->rtcd.variance.subpixvar16x16 = vp8_sub_pixel_variance16x16_mmx;
216 cpi->rtcd.variance.halfpixvar16x16_h = vp8_variance_halfpixvar16x16_h_mmx;
217 cpi->rtcd.variance.halfpixvar16x16_v = vp8_variance_halfpixvar16x16_v_mmx;
218 cpi->rtcd.variance.halfpixvar16x16_hv = vp8_variance_halfpixvar16x16_hv_mmx;
219 cpi->rtcd.variance.subpixmse16x16 = vp8_sub_pixel_mse16x16_mmx;
221 cpi->rtcd.variance.mse16x16 = vp8_mse16x16_mmx;
222 cpi->rtcd.variance.getmbss = vp8_get_mb_ss_mmx;
224 cpi->rtcd.variance.get16x16prederror = vp8_get16x16pred_error_mmx;
225 cpi->rtcd.variance.get8x8var = vp8_get8x8var_mmx;
226 cpi->rtcd.variance.get16x16var = vp8_get16x16var_mmx;
227 cpi->rtcd.variance.get4x4sse_cs = vp8_get4x4sse_cs_mmx;
229 cpi->rtcd.fdct.short4x4 = vp8_short_fdct4x4_mmx;
230 cpi->rtcd.fdct.short8x4 = short_fdct8x4_mmx;
231 cpi->rtcd.fdct.fast4x4 = vp8_short_fdct4x4_mmx;
232 cpi->rtcd.fdct.fast8x4 = short_fdct8x4_mmx;
234 cpi->rtcd.fdct.walsh_short4x4 = vp8_short_walsh4x4_c;
236 cpi->rtcd.encodemb.berr = vp8_block_error_mmx;
237 cpi->rtcd.encodemb.mberr = mbblock_error_mmx;
238 cpi->rtcd.encodemb.mbuverr = mbuverror_mmx;
239 cpi->rtcd.encodemb.subb = subtract_b_mmx;
240 cpi->rtcd.encodemb.submby = vp8_subtract_mby_mmx;
241 cpi->rtcd.encodemb.submbuv = vp8_subtract_mbuv_mmx;
243 /*cpi->rtcd.quantize.fastquantb = fast_quantize_b_mmx;*/
250 cpi->rtcd.variance.sad16x16 = vp8_sad16x16_wmt;
251 cpi->rtcd.variance.sad16x8 = vp8_sad16x8_wmt;
252 cpi->rtcd.variance.sad8x16 = vp8_sad8x16_wmt;
253 cpi->rtcd.variance.sad8x8 = vp8_sad8x8_wmt;
254 cpi->rtcd.variance.sad4x4 = vp8_sad4x4_wmt;
256 cpi->rtcd.variance.var4x4 = vp8_variance4x4_wmt;
257 cpi->rtcd.variance.var8x8 = vp8_variance8x8_wmt;
258 cpi->rtcd.variance.var8x16 = vp8_variance8x16_wmt;
259 cpi->rtcd.variance.var16x8 = vp8_variance16x8_wmt;
260 cpi->rtcd.variance.var16x16 = vp8_variance16x16_wmt;
262 cpi->rtcd.variance.subpixvar4x4 = vp8_sub_pixel_variance4x4_wmt;
263 cpi->rtcd.variance.subpixvar8x8 = vp8_sub_pixel_variance8x8_wmt;
264 cpi->rtcd.variance.subpixvar8x16 = vp8_sub_pixel_variance8x16_wmt;
265 cpi->rtcd.variance.subpixvar16x8 = vp8_sub_pixel_variance16x8_wmt;
266 cpi->rtcd.variance.subpixvar16x16 = vp8_sub_pixel_variance16x16_wmt;
267 cpi->rtcd.variance.halfpixvar16x16_h = vp8_variance_halfpixvar16x16_h_wmt;
268 cpi->rtcd.variance.halfpixvar16x16_v = vp8_variance_halfpixvar16x16_v_wmt;
269 cpi->rtcd.variance.halfpixvar16x16_hv = vp8_variance_halfpixvar16x16_hv_wmt;
270 cpi->rtcd.variance.subpixmse16x16 = vp8_sub_pixel_mse16x16_wmt;
272 cpi->rtcd.variance.mse16x16 = vp8_mse16x16_wmt;
273 cpi->rtcd.variance.getmbss = vp8_get_mb_ss_sse2;
275 cpi->rtcd.variance.get16x16prederror = vp8_get16x16pred_error_sse2;
276 cpi->rtcd.variance.get8x8var = vp8_get8x8var_sse2;
277 cpi->rtcd.variance.get16x16var = vp8_get16x16var_sse2;
280 /* cpi->rtcd.variance.get4x4sse_cs not implemented for wmt */;
282 cpi->rtcd.fdct.short4x4 = vp8_short_fdct4x4_sse2;
283 cpi->rtcd.fdct.short8x4 = vp8_short_fdct8x4_sse2;
284 cpi->rtcd.fdct.fast4x4 = vp8_short_fdct4x4_sse2;
285 cpi->rtcd.fdct.fast8x4 = vp8_short_fdct8x4_sse2;
287 cpi->rtcd.fdct.walsh_short4x4 = vp8_short_walsh4x4_sse2 ;
289 cpi->rtcd.encodemb.berr = vp8_block_error_xmm;
290 cpi->rtcd.encodemb.mberr = mbblock_error_xmm;
291 cpi->rtcd.encodemb.mbuverr = mbuverror_xmm;
292 cpi->rtcd.encodemb.subb = subtract_b_sse2;
293 cpi->rtcd.encodemb.submby = vp8_subtract_mby_sse2;
294 cpi->rtcd.encodemb.submbuv = vp8_subtract_mbuv_sse2;
296 cpi->rtcd.quantize.quantb = vp8_regular_quantize_b_sse2;
297 cpi->rtcd.quantize.fastquantb = fast_quantize_b_sse2;
300 cpi->rtcd.temporal.apply = vp8_temporal_filter_apply_sse2;
308 cpi->rtcd.variance.sad16x16 = vp8_sad16x16_sse3;
309 cpi->rtcd.variance.sad16x16x3 = vp8_sad16x16x3_sse3;
310 cpi->rtcd.variance.sad16x8x3 = vp8_sad16x8x3_sse3;
311 cpi->rtcd.variance.sad8x16x3 = vp8_sad8x16x3_sse3;
312 cpi->rtcd.variance.sad8x8x3 = vp8_sad8x8x3_sse3;
313 cpi->rtcd.variance.sad4x4x3 = vp8_sad4x4x3_sse3;
315 cpi->rtcd.search.full_search = vp8_full_search_sadx3;
317 cpi->rtcd.variance.sad16x16x4d = vp8_sad16x16x4d_sse3;
318 cpi->rtcd.variance.sad16x8x4d = vp8_sad16x8x4d_sse3;
319 cpi->rtcd.variance.sad8x16x4d = vp8_sad8x16x4d_sse3;
320 cpi->rtcd.variance.sad8x8x4d = vp8_sad8x8x4d_sse3;
321 cpi->rtcd.variance.sad4x4x4d = vp8_sad4x4x4d_sse3;
322 cpi->rtcd.search.diamond_search = vp8_diamond_search_sadx4;
329 cpi->rtcd.variance.sad16x16x3 = vp8_sad16x16x3_ssse3;
330 cpi->rtcd.variance.sad16x8x3 = vp8_sad16x8x3_ssse3;
332 cpi->rtcd.variance.subpixvar16x8 = vp8_sub_pixel_variance16x8_ssse3;
333 cpi->rtcd.variance.subpixvar16x16 = vp8_sub_pixel_variance16x16_ssse3;
335 cpi->rtcd.quantize.fastquantb = fast_quantize_b_ssse3;
339 cpi->rtcd.variance.ssimpf_8x8 = vp8_ssim_parms_8x8_sse3;
340 cpi->rtcd.variance.ssimpf = vp8_ssim_parms_16x16_sse3;
352 cpi->rtcd.variance.sad16x16x8 = vp8_sad16x16x8_sse4;
353 cpi->rtcd.variance.sad16x8x8 = vp8_sad16x8x8_sse4;
354 cpi->rtcd.variance.sad8x16x8 = vp8_sad8x16x8_sse4;
355 cpi->rtcd.variance.sad8x8x8 = vp8_sad8x8x8_sse4;
356 cpi->rtcd.variance.sad4x4x8 = vp8_sad4x4x8_sse4;
358 cpi->rtcd.search.full_search = vp8_full_search_sadx8;