Lines Matching refs:vuc
8 vector unsigned char vuc = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 };
104 res_vuc = vec_add(vuc, vuc);
108 res_vuc = vec_add(vbc, vuc);
112 res_vuc = vec_add(vuc, vbc);
180 res_vuc = vec_vaddubm(vuc, vuc);
184 res_vuc = vec_vaddubm(vbc, vuc);
188 res_vuc = vec_vaddubm(vuc, vbc);
266 res_vuc = vec_adds(vuc, vuc);
270 res_vuc = vec_adds(vbc, vuc);
274 res_vuc = vec_adds(vuc, vbc);
338 res_vuc = vec_vaddubs(vuc, vuc);
342 res_vuc = vec_vaddubs(vbc, vuc);
346 res_vuc = vec_vaddubs(vuc, vbc);
411 res_vuc = vec_and(vuc, vuc);
415 res_vuc = vec_and(vbc, vuc);
419 res_vuc = vec_and(vuc, vbc);
495 res_vuc = vec_vand(vuc, vuc);
499 res_vuc = vec_vand(vbc, vuc);
503 res_vuc = vec_vand(vuc, vbc);
586 res_vuc = vec_andc(vuc, vuc);
592 res_vuc = vec_andc(vbc, vuc);
598 res_vuc = vec_andc(vuc, vbc);
722 res_vuc = vec_vandc(vuc, vuc);
728 res_vuc = vec_vandc(vbc, vuc);
734 res_vuc = vec_vandc(vuc, vbc);
851 res_vuc = vec_avg(vuc, vuc);
875 res_vuc = vec_vavgub(vuc, vuc);
918 res_vbc = vec_cmpeq(vuc, vuc);
947 res_vbc = vec_cmpge(vuc, vuc);
984 res_vbc = vec_cmpgt(vuc, vuc);
1012 res_vbc = vec_vcmpgtub(vuc, vuc);
1041 res_vbc = vec_cmple(vuc, vuc);
1073 res_vbc = vec_cmplt(vuc, vuc);
1137 res_vuc = vec_div(vuc, vuc);
1214 res_vuc = vec_ld(0, &vuc);
1286 res_vuc = vec_lvx(0, &vuc);
1416 res_vuc = vec_ldl(0, &vuc);
1488 res_vuc = vec_lvxl(0, &vuc);
1602 res_vuc = vec_max(vuc, vuc);
1606 res_vuc = vec_max(vbc, vuc);
1610 res_vuc = vec_max(vuc, vbc);
1678 res_vuc = vec_vmaxub(vuc, vuc);
1682 res_vuc = vec_vmaxub(vbc, vuc);
1686 res_vuc = vec_vmaxub(vuc, vbc);
1747 res_vuc = vec_mergeh(vuc, vuc);
1791 res_vuc = vec_vmrghb(vuc, vuc);
1836 res_vuc = vec_mergel(vuc, vuc);
1880 res_vuc = vec_vmrglb(vuc, vuc);
1938 res_vuc = vec_min(vuc, vuc);
1942 res_vuc = vec_min(vbc, vuc);
1946 res_vuc = vec_min(vuc, vbc);
2014 res_vuc = vec_vminub(vuc, vuc);
2018 res_vuc = vec_vminub(vbc, vuc);
2022 res_vuc = vec_vminub(vuc, vbc);
2113 res_vi = vec_msum(vsc, vuc, vi);
2117 res_vui = vec_msum(vuc, vuc, vui);
2129 res_vi = vec_vmsummbm(vsc, vuc, vi);
2133 res_vui = vec_vmsumubm(vuc, vuc, vui);
2167 vec_mtvscr(vuc);
2208 res_vuc = vec_mul(vuc, vuc);
2233 res_vus = vec_mule(vuc, vuc);
2249 res_vus = vec_vmuleub(vuc, vuc);
2266 res_vus = vec_mulo(vuc, vuc);
2282 res_vus = vec_vmuloub(vuc, vuc);
2310 res_vuc = vec_nor(vuc, vuc);
2370 res_vuc = vec_vnor(vuc, vuc);
2437 res_vuc = vec_or(vuc, vuc);
2441 res_vuc = vec_or(vbc, vuc);
2445 res_vuc = vec_or(vuc, vbc);
2533 res_vuc = vec_vor(vuc, vuc);
2537 res_vuc = vec_vor(vbc, vuc);
2541 res_vuc = vec_vor(vuc, vbc);
2742 res_vsc = vec_perm(vsc, vsc, vuc);
2746 res_vuc = vec_perm(vuc, vuc, vuc);
2750 res_vbc = vec_perm(vbc, vbc, vuc);
2754 res_vs = vec_perm(vs, vs, vuc);
2758 res_vus = vec_perm(vus, vus, vuc);
2762 res_vbs = vec_perm(vbs, vbs, vuc);
2766 res_vp = vec_perm(vp, vp, vuc);
2770 res_vi = vec_perm(vi, vi, vuc);
2774 res_vui = vec_perm(vui, vui, vuc);
2778 res_vbi = vec_perm(vbi, vbi, vuc);
2782 res_vf = vec_perm(vf, vf, vuc);
2786 res_vsc = vec_vperm(vsc, vsc, vuc);
2790 res_vuc = vec_vperm(vuc, vuc, vuc);
2794 res_vbc = vec_vperm(vbc, vbc, vuc);
2798 res_vs = vec_vperm(vs, vs, vuc);
2802 res_vus = vec_vperm(vus, vus, vuc);
2806 res_vbs = vec_vperm(vbs, vbs, vuc);
2810 res_vp = vec_vperm(vp, vp, vuc);
2814 res_vi = vec_vperm(vi, vi, vuc);
2818 res_vui = vec_vperm(vui, vui, vuc);
2822 res_vbi = vec_vperm(vbi, vbi, vuc);
2826 res_vf = vec_vperm(vf, vf, vuc);
2840 res_vsc = vec_rl(vsc, vuc);
2844 res_vuc = vec_rl(vuc, vuc);
2864 res_vsc = vec_vrlb(vsc, vuc);
2868 res_vuc = vec_vrlb(vuc, vuc);
2907 res_vsc = vec_sel(vsc, vsc, vuc);
2927 res_vuc = vec_sel(vuc, vuc, vuc);
2937 res_vuc = vec_sel(vuc, vuc, vbc);
2947 res_vbc = vec_sel(vbc, vbc, vuc);
3107 res_vsc = vec_vsel(vsc, vsc, vuc);
3127 res_vuc = vec_vsel(vuc, vuc, vuc);
3137 res_vuc = vec_vsel(vuc, vuc, vbc);
3147 res_vbc = vec_vsel(vbc, vbc, vuc);
3308 res_vsc = vec_sl(vsc, vuc);
3312 res_vuc = vec_sl(vuc, vuc);
3332 res_vsc = vec_vslb(vsc, vuc);
3336 res_vuc = vec_vslb(vuc, vuc);
3369 res_vuc = vec_sld(vuc, vuc, 0);
3495 res_vuc = vec_vsldoi(vuc, vuc, 0);
3580 res_vsc = vec_sll(vsc, vuc);
3592 res_vuc = vec_sll(vuc, vuc);
3596 res_vuc = vec_sll(vuc, vus);
3600 res_vuc = vec_sll(vuc, vui);
3604 res_vbc = vec_sll(vbc, vuc);
3616 res_vs = vec_sll(vs, vuc);
3628 res_vus = vec_sll(vus, vuc);
3640 res_vbs = vec_sll(vbs, vuc);
3652 res_vp = vec_sll(vp, vuc);
3664 res_vi = vec_sll(vi, vuc);
3676 res_vui = vec_sll(vui, vuc);
3688 res_vbi = vec_sll(vbi, vuc);
3700 res_vsc = vec_vsl(vsc, vuc);
3712 res_vuc = vec_vsl(vuc, vuc);
3716 res_vuc = vec_vsl(vuc, vus);
3720 res_vuc = vec_vsl(vuc, vui);
3724 res_vbc = vec_vsl(vbc, vuc);
3736 res_vs = vec_vsl(vs, vuc);
3748 res_vus = vec_vsl(vus, vuc);
3760 res_vbs = vec_vsl(vbs, vuc);
3772 res_vp = vec_vsl(vp, vuc);
3784 res_vi = vec_vsl(vi, vuc);
3796 res_vui = vec_vsl(vui, vuc);
3808 res_vbi = vec_vsl(vbi, vuc);
3825 res_vsc = vec_slo(vsc, vuc);
3829 res_vuc = vec_slo(vuc, vsc);
3833 res_vuc = vec_slo(vuc, vuc);
3841 res_vs = vec_slo(vs, vuc);
3849 res_vus = vec_slo(vus, vuc);
3857 res_vp = vec_slo(vp, vuc);
3865 res_vi = vec_slo(vi, vuc);
3873 res_vui = vec_slo(vui, vuc);
3881 res_vf = vec_slo(vf, vuc);
3889 res_vsc = vec_vslo(vsc, vuc);
3893 res_vuc = vec_vslo(vuc, vsc);
3897 res_vuc = vec_vslo(vuc, vuc);
3905 res_vs = vec_vslo(vs, vuc);
3913 res_vus = vec_vslo(vus, vuc);
3921 res_vp = vec_vslo(vp, vuc);
3929 res_vi = vec_vslo(vi, vuc);
3937 res_vui = vec_vslo(vui, vuc);
3945 res_vf = vec_vslo(vf, vuc);
3954 res_vuc = vec_splat(vuc, 0);
3998 res_vuc = vec_vspltb(vuc, 0);
4060 res_vsc = vec_sr(vsc, vuc);
4064 res_vuc = vec_sr(vuc, vuc);
4084 res_vsc = vec_vsrb(vsc, vuc);
4088 res_vuc = vec_vsrb(vuc, vuc);
4109 res_vsc = vec_sra(vsc, vuc);
4113 res_vuc = vec_sra(vuc, vuc);
4133 res_vsc = vec_vsrab(vsc, vuc);
4137 res_vuc = vec_vsrab(vuc, vuc);
4158 res_vsc = vec_srl(vsc, vuc);
4170 res_vuc = vec_srl(vuc, vuc);
4174 res_vuc = vec_srl(vuc, vus);
4178 res_vuc = vec_srl(vuc, vui);
4182 res_vbc = vec_srl(vbc, vuc);
4194 res_vs = vec_srl(vs, vuc);
4206 res_vus = vec_srl(vus, vuc);
4218 res_vbs = vec_srl(vbs, vuc);
4230 res_vp = vec_srl(vp, vuc);
4242 res_vi = vec_srl(vi, vuc);
4254 res_vui = vec_srl(vui, vuc);
4266 res_vbi = vec_srl(vbi, vuc);
4278 res_vsc = vec_vsr(vsc, vuc);
4290 res_vuc = vec_vsr(vuc, vuc);
4294 res_vuc = vec_vsr(vuc, vus);
4298 res_vuc = vec_vsr(vuc, vui);
4302 res_vbc = vec_vsr(vbc, vuc);
4314 res_vs = vec_vsr(vs, vuc);
4326 res_vus = vec_vsr(vus, vuc);
4338 res_vbs = vec_vsr(vbs, vuc);
4350 res_vp = vec_vsr(vp, vuc);
4362 res_vi = vec_vsr(vi, vuc);
4374 res_vui = vec_vsr(vui, vuc);
4386 res_vbi = vec_vsr(vbi, vuc);
4403 res_vsc = vec_sro(vsc, vuc);
4407 res_vuc = vec_sro(vuc, vsc);
4411 res_vuc = vec_sro(vuc, vuc);
4419 res_vs = vec_sro(vs, vuc);
4427 res_vus = vec_sro(vus, vuc);
4435 res_vp = vec_sro(vp, vuc);
4443 res_vi = vec_sro(vi, vuc);
4451 res_vui = vec_sro(vui, vuc);
4459 res_vf = vec_sro(vf, vuc);
4467 res_vsc = vec_vsro(vsc, vuc);
4471 res_vuc = vec_vsro(vuc, vsc);
4475 res_vuc = vec_vsro(vuc, vuc);
4483 res_vs = vec_vsro(vs, vuc);
4491 res_vus = vec_vsro(vus, vuc);
4499 res_vp = vec_vsro(vp, vuc);
4507 res_vi = vec_vsro(vi, vuc);
4515 res_vui = vec_vsro(vui, vuc);
4523 res_vf = vec_vsro(vf, vuc);
4536 vec_st(vuc, 0, &vuc);
4540 vec_st(vuc, 0, ¶m_uc);
4640 vec_stvx(vuc, 0, &vuc);
4644 vec_stvx(vuc, 0, ¶m_uc);
4741 vec_ste(vuc, 0, ¶m_uc);
4801 vec_stvebx(vuc, 0, ¶m_uc);
4866 vec_stl(vuc, 0, &vuc);
4870 vec_stl(vuc, 0, ¶m_uc);
4970 vec_stvxl(vuc, 0, &vuc);
4974 vec_stvxl(vuc, 0, ¶m_uc);
5079 res_vuc = vec_sub(vuc, vuc);
5083 res_vuc = vec_sub(vbc, vuc);
5087 res_vuc = vec_sub(vuc, vbc);
5155 res_vuc = vec_vsububm(vuc, vuc);
5159 res_vuc = vec_vsububm(vbc, vuc);
5163 res_vuc = vec_vsububm(vuc, vbc);
5241 res_vuc = vec_subs(vuc, vuc);
5245 res_vuc = vec_subs(vbc, vuc);
5249 res_vuc = vec_subs(vuc, vbc);
5313 res_vuc = vec_vsububs(vuc, vuc);
5317 res_vuc = vec_vsububs(vbc, vuc);
5321 res_vuc = vec_vsububs(vuc, vbc);
5378 res_vui = vec_sum4s(vuc, vui);
5390 res_vui = vec_vsum4ubs(vuc, vui);
5526 res_vuc = vec_xor(vuc, vuc);
5530 res_vuc = vec_xor(vbc, vuc);
5534 res_vuc = vec_xor(vuc, vbc);
5622 res_vuc = vec_vxor(vuc, vuc);
5626 res_vuc = vec_vxor(vbc, vuc);
5630 res_vuc = vec_vxor(vuc, vbc);
5713 res_uc = vec_extract(vuc, param_i);
5754 res_vuc = vec_insert(param_uc, vuc, param_i);
5821 res_vuc = vec_lvlx(0, &vuc);
5992 res_vuc = vec_lvlxl(0, &vuc);
6163 res_vuc = vec_lvrx(0, &vuc);
6334 res_vuc = vec_lvrxl(0, &vuc);
6507 vec_stvlx(vuc, 0, ¶m_uc);
6523 vec_stvlx(vuc, 0, &vuc);
6780 vec_stvlxl(vuc, 0, ¶m_uc);
6796 vec_stvlxl(vuc, 0, &vuc);
7053 vec_stvrx(vuc, 0, ¶m_uc);
7069 vec_stvrx(vuc, 0, &vuc);
7326 vec_stvrxl(vuc, 0, ¶m_uc);
7342 vec_stvrxl(vuc, 0, &vuc);
7649 res_i = vec_all_eq(vuc, vuc);
7653 res_i = vec_all_eq(vuc, vbc);
7661 res_i = vec_all_eq(vbc, vuc);
7742 res_i = vec_all_ge(vuc, vuc);
7746 res_i = vec_all_ge(vuc, vbc);
7754 res_i = vec_all_ge(vbc, vuc);
7831 res_i = vec_all_gt(vuc, vuc);
7835 res_i = vec_all_gt(vuc, vbc);
7843 res_i = vec_all_gt(vbc, vuc);
7925 res_i = vec_all_le(vuc, vuc);
7929 res_i = vec_all_le(vuc, vbc);
7937 res_i = vec_all_le(vbc, vuc);
8014 res_i = vec_all_lt(vuc, vuc);
8018 res_i = vec_all_lt(vuc, vbc);
8026 res_i = vec_all_lt(vbc, vuc);
8108 res_i = vec_all_ne(vuc, vuc);
8112 res_i = vec_all_ne(vuc, vbc);
8120 res_i = vec_all_ne(vbc, vuc);
8226 res_i = vec_any_eq(vuc, vuc);
8230 res_i = vec_any_eq(vuc, vbc);
8238 res_i = vec_any_eq(vbc, vuc);
8319 res_i = vec_any_ge(vuc, vuc);
8323 res_i = vec_any_ge(vuc, vbc);
8331 res_i = vec_any_ge(vbc, vuc);
8408 res_i = vec_any_gt(vuc, vuc);
8412 res_i = vec_any_gt(vuc, vbc);
8420 res_i = vec_any_gt(vbc, vuc);
8497 res_i = vec_any_le(vuc, vuc);
8501 res_i = vec_any_le(vuc, vbc);
8509 res_i = vec_any_le(vbc, vuc);
8586 res_i = vec_any_lt(vuc, vuc);
8590 res_i = vec_any_lt(vuc, vbc);
8598 res_i = vec_any_lt(vbc, vuc);
8680 res_i = vec_any_ne(vuc, vuc);
8684 res_i = vec_any_ne(vuc, vbc);
8692 res_i = vec_any_ne(vbc, vuc);