Home | History | Annotate | Download | only in CodeGen

Lines Matching refs:vss

9 volatile vector signed short vss;
82 ss = vec_extract(vss, idx);
109 vss = vec_insert(ss, vss, idx);
134 vss = vec_promote(ss, idx);
153 vss = vec_insert_and_zero(cptrss);
174 vss = vec_perm(vss, vss, vuc);
234 vss = vec_sel(vss, vss, vus);
235 vss = vec_sel(vss, vss, vbs);
299 vss = vec_xld2(idx, cptrss);
309 vss = vec_xlw4(idx, cptrss);
316 vec_xstd2(vss, idx, ptrss);
326 vec_xstw4(vss, idx, ptrss);
335 vss = vec_load_bndry(cptrss, 64);
366 vss = vec_load_len(cptrss, idx);
385 vec_store_len(vss, ptrss, idx);
447 vss = vec_splat(vss, 0);
449 vss = vec_splat(vss, 7);
496 vss = vec_splat_s16(-32768);
498 vss = vec_splat_s16(32767);
525 vss = vec_splats(ss);
541 vsl = vec_extend_s64(vss);
550 vss = vec_mergeh(vss, vss);
577 vss = vec_mergel(vss, vss);
598 vsc = vec_pack(vss, vss);
604 vss = vec_pack(vsi, vsi);
617 vsc = vec_packs(vss, vss);
621 vss = vec_packs(vsi, vsi);
630 vsc = vec_packs_cc(vss, vss, &cc);
634 vss = vec_packs_cc(vsi, vsi, &cc);
643 vuc = vec_packsu(vss, vss);
663 vss = vec_unpackh(vsc);
669 vsi = vec_unpackh(vss);
682 vss = vec_unpackl(vsc);
688 vsi = vec_unpackl(vss);
709 vbs = vec_cmpeq(vss, vss);
734 vbs = vec_cmpge(vss, vss);
753 vbs = vec_cmpgt(vss, vss);
772 vbs = vec_cmple(vss, vss);
791 vbs = vec_cmplt(vss, vss);
820 idx = vec_all_eq(vss, vss);
822 idx = vec_all_eq(vss, vbs);
824 idx = vec_all_eq(vbs, vss);
879 idx = vec_all_ne(vss, vss);
881 idx = vec_all_ne(vss, vbs);
883 idx = vec_all_ne(vbs, vss);
938 idx = vec_all_ge(vss, vss);
940 idx = vec_all_ge(vss, vbs);
942 idx = vec_all_ge(vbs, vss);
997 idx = vec_all_gt(vss, vss);
999 idx = vec_all_gt(vss, vbs);
1001 idx = vec_all_gt(vbs, vss);
1056 idx = vec_all_le(vss, vss);
1058 idx = vec_all_le(vss, vbs);
1060 idx = vec_all_le(vbs, vss);
1115 idx = vec_all_lt(vss, vss);
1117 idx = vec_all_lt(vss, vbs);
1119 idx = vec_all_lt(vbs, vss);
1188 idx = vec_any_eq(vss, vss);
1190 idx = vec_any_eq(vss, vbs);
1192 idx = vec_any_eq(vbs, vss);
1247 idx = vec_any_ne(vss, vss);
1249 idx = vec_any_ne(vss, vbs);
1251 idx = vec_any_ne(vbs, vss);
1306 idx = vec_any_ge(vss, vss);
1308 idx = vec_any_ge(vss, vbs);
1310 idx = vec_any_ge(vbs, vss);
1365 idx = vec_any_gt(vss, vss);
1367 idx = vec_any_gt(vss, vbs);
1369 idx = vec_any_gt(vbs, vss);
1424 idx = vec_any_le(vss, vss);
1426 idx = vec_any_le(vss, vbs);
1428 idx = vec_any_le(vbs, vss);
1483 idx = vec_any_lt(vss, vss);
1485 idx = vec_any_lt(vss, vbs);
1487 idx = vec_any_lt(vbs, vss);
1551 vss = vec_andc(vss, vss);
1552 vss = vec_andc(vss, vbs);
1553 vss = vec_andc(vbs, vss);
1583 vss = vec_nor(vss, vss);
1584 vss = vec_nor(vss, vbs);
1585 vss = vec_nor(vbs, vss);
1612 vus = vec_cntlz(vss);
1629 vus = vec_cnttz(vss);
1646 vus = vec_popcnt(vss);
1663 vss = vec_rl(vss, vus);
1680 vss = vec_rli(vss, ul);
1701 vss = vec_rl_mask(vss, vus, 0);
1703 vss = vec_rl_mask(vss, vus, 255);
1744 vss = vec_sll(vss, vuc);
1746 vss = vec_sll(vss, vus);
1748 vss = vec_sll(vss, vui);
1807 vss = vec_slb(vss, vss);
1809 vss = vec_slb(vss, vus);
1811 vus = vec_slb(vus, vss);
1844 vss = vec_sld(vss, vss, 0);
1846 vss = vec_sld(vss, vss, 15);
1881 vss = vec_sldw(vss, vss, 0);
1883 vss = vec_sldw(vss, vss, 3);
1928 vss = vec_sral(vss, vuc);
1930 vss = vec_sral(vss, vus);
1932 vss = vec_sral(vss, vui);
1991 vss = vec_srab(vss, vss);
1993 vss = vec_srab(vss, vus);
1995 vus = vec_srab(vus, vss);
2038 vss = vec_srl(vss, vuc);
2040 vss = vec_srl(vss, vus);
2042 vss = vec_srl(vss, vui);
2101 vss = vec_srb(vss, vss);
2103 vss = vec_srb(vss, vus);
2105 vus = vec_srb(vus, vss);
2131 vss = vec_abs(vss);
2141 vss = vec_max(vss, vss);
2142 vss = vec_max(vss, vbs);
2143 vss = vec_max(vbs, vss);
2167 vss = vec_min(vss, vss);
2168 vss = vec_min(vss, vbs);
2169 vss = vec_min(vbs, vss);
2209 vss = vec_avg(vss, vss);
2247 vss = vec_mladd(vss, vss, vss);
2248 vss = vec_mladd(vus, vss, vss);
2249 vss = vec_mladd(vss, vus, vus);
2260 vss = vec_mhadd(vss, vss, vss);
2269 vss = vec_meadd(vsc, vsc, vss);
2273 vsi = vec_meadd(vss, vss, vsi);
2282 vss = vec_moadd(vsc, vsc, vss);
2286 vsi = vec_moadd(vss, vss, vsi);
2299 vss = vec_mulh(vss, vss);
2308 vss = vec_mule(vsc, vsc);
2312 vsi = vec_mule(vss, vss);
2321 vss = vec_mulo(vsc, vsc);
2325 vsi = vec_mulo(vss, vss);
2369 idx = vec_test_mask(vss, vus);
2392 vss = vec_cp_until_zero(vss);
2411 vss = vec_cp_until_zero_cc(vss, &cc);
2430 vss = vec_cmpeq_idx(vss, vss);
2449 vss = vec_cmpeq_idx_cc(vss, vss, &cc);
2468 vss = vec_cmpeq_or_0_idx(vss, vss);
2487 vss = vec_cmpeq_or_0_idx_cc(vss, vss, &cc);
2506 vss = vec_cmpne_idx(vss, vss);
2525 vss = vec_cmpne_idx_cc(vss, vss, &cc);
2544 vss = vec_cmpne_or_0_idx(vss, vss);
2563 vss = vec_cmpne_or_0_idx_cc(vss, vss, &cc);
2666 vbs = vec_find_any_eq(vss, vss);
2685 vbs = vec_find_any_eq_cc(vss, vss, &cc);
2704 vss = vec_find_any_eq_idx(vss, vss);
2723 vss = vec_find_any_eq_idx_cc(vss, vss, &cc);
2742 vss = vec_find_any_eq_or_0_idx(vss, vss);
2761 vss = vec_find_any_eq_or_0_idx_cc(vss, vss, &cc);
2780 vbs = vec_find_any_ne(vss, vss);
2799 vbs = vec_find_any_ne_cc(vss, vss, &cc);
2818 vss = vec_find_any_ne_idx(vss, vss);
2837 vss = vec_find_any_ne_idx_cc(vss, vss, &cc);
2856 vss = vec_find_any_ne_or_0_idx(vss, vss);
2875 vss = vec_find_any_ne_or_0_idx_cc(vss, vss, &cc);