Lines Matching refs:vsc
8 volatile vector signed char vsc;
76 sc = vec_extract(vsc, idx);
103 vsc = vec_insert(sc, vsc, idx);
130 vsc = vec_promote(sc, idx);
149 vsc = vec_insert_and_zero(cptrsc);
168 vsc = vec_perm(vsc, vsc, vuc);
228 vsc = vec_sel(vsc, vsc, vuc);
229 vsc = vec_sel(vsc, vsc, vbc);
297 vsc = vec_xld2(idx, cptrsc);
307 vsc = vec_xlw4(idx, cptrsc);
314 vec_xstd2(vsc, idx, ptrsc);
324 vec_xstw4(vsc, idx, ptrsc);
331 vsc = vec_load_bndry(cptrsc, 64);
349 vsc = vec_load_bndry(cptrsc, 128);
351 vsc = vec_load_bndry(cptrsc, 256);
353 vsc = vec_load_bndry(cptrsc, 512);
355 vsc = vec_load_bndry(cptrsc, 1024);
357 vsc = vec_load_bndry(cptrsc, 2048);
359 vsc = vec_load_bndry(cptrsc, 4096);
362 vsc = vec_load_len(cptrsc, idx);
381 vec_store_len(vsc, ptrsc, idx);
435 vsc = vec_splat(vsc, 0);
437 vsc = vec_splat(vsc, 15);
488 vsc = vec_splat_s8(-128);
490 vsc = vec_splat_s8(127);
521 vsc = vec_splats(sc);
540 vsl = vec_extend_s64(vsc);
544 vsc = vec_mergeh(vsc, vsc);
571 vsc = vec_mergel(vsc, vsc);
598 vsc = vec_pack(vss, vss);
617 vsc = vec_packs(vss, vss);
630 vsc = vec_packs_cc(vss, vss, &cc);
663 vss = vec_unpackh(vsc);
682 vss = vec_unpackl(vsc);
703 vbc = vec_cmpeq(vsc, vsc);
730 vbc = vec_cmpge(vsc, vsc);
749 vbc = vec_cmpgt(vsc, vsc);
768 vbc = vec_cmple(vsc, vsc);
787 vbc = vec_cmplt(vsc, vsc);
806 idx = vec_all_eq(vsc, vsc);
808 idx = vec_all_eq(vsc, vbc);
810 idx = vec_all_eq(vbc, vsc);
865 idx = vec_all_ne(vsc, vsc);
867 idx = vec_all_ne(vsc, vbc);
869 idx = vec_all_ne(vbc, vsc);
924 idx = vec_all_ge(vsc, vsc);
926 idx = vec_all_ge(vsc, vbc);
928 idx = vec_all_ge(vbc, vsc);
983 idx = vec_all_gt(vsc, vsc);
985 idx = vec_all_gt(vsc, vbc);
987 idx = vec_all_gt(vbc, vsc);
1042 idx = vec_all_le(vsc, vsc);
1044 idx = vec_all_le(vsc, vbc);
1046 idx = vec_all_le(vbc, vsc);
1101 idx = vec_all_lt(vsc, vsc);
1103 idx = vec_all_lt(vsc, vbc);
1105 idx = vec_all_lt(vbc, vsc);
1174 idx = vec_any_eq(vsc, vsc);
1176 idx = vec_any_eq(vsc, vbc);
1178 idx = vec_any_eq(vbc, vsc);
1233 idx = vec_any_ne(vsc, vsc);
1235 idx = vec_any_ne(vsc, vbc);
1237 idx = vec_any_ne(vbc, vsc);
1292 idx = vec_any_ge(vsc, vsc);
1294 idx = vec_any_ge(vsc, vbc);
1296 idx = vec_any_ge(vbc, vsc);
1351 idx = vec_any_gt(vsc, vsc);
1353 idx = vec_any_gt(vsc, vbc);
1355 idx = vec_any_gt(vbc, vsc);
1410 idx = vec_any_le(vsc, vsc);
1412 idx = vec_any_le(vsc, vbc);
1414 idx = vec_any_le(vbc, vsc);
1469 idx = vec_any_lt(vsc, vsc);
1471 idx = vec_any_lt(vsc, vbc);
1473 idx = vec_any_lt(vbc, vsc);
1544 vsc = vec_andc(vsc, vsc);
1545 vsc = vec_andc(vsc, vbc);
1546 vsc = vec_andc(vbc, vsc);
1576 vsc = vec_nor(vsc, vsc);
1577 vsc = vec_nor(vsc, vbc);
1578 vsc = vec_nor(vbc, vsc);
1608 vuc = vec_cntlz(vsc);
1625 vuc = vec_cnttz(vsc);
1642 vuc = vec_popcnt(vsc);
1659 vsc = vec_rl(vsc, vuc);
1676 vsc = vec_rli(vsc, ul);
1693 vsc = vec_rl_mask(vsc, vuc, 0);
1695 vsc = vec_rl_mask(vsc, vuc, 255);
1726 vsc = vec_sll(vsc, vuc);
1728 vsc = vec_sll(vsc, vus);
1730 vsc = vec_sll(vsc, vui);
1799 vsc = vec_slb(vsc, vsc);
1801 vsc = vec_slb(vsc, vuc);
1803 vuc = vec_slb(vuc, vsc);
1836 vsc = vec_sld(vsc, vsc, 0);
1838 vsc = vec_sld(vsc, vsc, 15);
1873 vsc = vec_sldw(vsc, vsc, 0);
1875 vsc = vec_sldw(vsc, vsc, 3);
1910 vsc = vec_sral(vsc, vuc);
1912 vsc = vec_sral(vsc, vus);
1914 vsc = vec_sral(vsc, vui);
1983 vsc = vec_srab(vsc, vsc);
1985 vsc = vec_srab(vsc, vuc);
1987 vuc = vec_srab(vuc, vsc);
2020 vsc = vec_srl(vsc, vuc);
2022 vsc = vec_srl(vsc, vus);
2024 vsc = vec_srl(vsc, vui);
2093 vsc = vec_srb(vsc, vsc);
2095 vsc = vec_srb(vsc, vuc);
2097 vuc = vec_srb(vuc, vsc);
2130 vsc = vec_abs(vsc);
2135 vsc = vec_max(vsc, vsc);
2136 vsc = vec_max(vsc, vbc);
2137 vsc = vec_max(vbc, vsc);
2161 vsc = vec_min(vsc, vsc);
2162 vsc = vec_min(vsc, vbc);
2163 vsc = vec_min(vbc, vsc);
2205 vsc = vec_avg(vsc, vsc);
2243 vsc = vec_mladd(vsc, vsc, vsc);
2244 vsc = vec_mladd(vuc, vsc, vsc);
2245 vsc = vec_mladd(vsc, vuc, vuc);
2256 vsc = vec_mhadd(vsc, vsc, vsc);
2269 vss = vec_meadd(vsc, vsc, vss);
2282 vss = vec_moadd(vsc, vsc, vss);
2295 vsc = vec_mulh(vsc, vsc);
2308 vss = vec_mule(vsc, vsc);
2321 vss = vec_mulo(vsc, vsc);
2365 idx = vec_test_mask(vsc, vuc);
2386 vsc = vec_cp_until_zero(vsc);
2405 vsc = vec_cp_until_zero_cc(vsc, &cc);
2424 vsc = vec_cmpeq_idx(vsc, vsc);
2443 vsc = vec_cmpeq_idx_cc(vsc, vsc, &cc);
2462 vsc = vec_cmpeq_or_0_idx(vsc, vsc);
2481 vsc = vec_cmpeq_or_0_idx_cc(vsc, vsc, &cc);
2500 vsc = vec_cmpne_idx(vsc, vsc);
2519 vsc = vec_cmpne_idx_cc(vsc, vsc, &cc);
2538 vsc = vec_cmpne_or_0_idx(vsc, vsc);
2557 vsc = vec_cmpne_or_0_idx_cc(vsc, vsc, &cc);
2660 vbc = vec_find_any_eq(vsc, vsc);
2679 vbc = vec_find_any_eq_cc(vsc, vsc, &cc);
2698 vsc = vec_find_any_eq_idx(vsc, vsc);
2717 vsc = vec_find_any_eq_idx_cc(vsc, vsc, &cc);
2736 vsc = vec_find_any_eq_or_0_idx(vsc, vsc);
2755 vsc = vec_find_any_eq_or_0_idx_cc(vsc, vsc, &cc);
2774 vbc = vec_find_any_ne(vsc, vsc);
2793 vbc = vec_find_any_ne_cc(vsc, vsc, &cc);
2812 vsc = vec_find_any_ne_idx(vsc, vsc);
2831 vsc = vec_find_any_ne_idx_cc(vsc, vsc, &cc);
2850 vsc = vec_find_any_ne_or_0_idx(vsc, vsc);
2869 vsc = vec_find_any_ne_or_0_idx_cc(vsc, vsc, &cc);