Home | History | Annotate | Download | only in source

Lines Matching refs:v4i32

44     v4i32 zero_m = {0};                                          \
56 v4i32 max_m = __msa_ldi_w(0xFF); \
76 v4i32 reg0_m, reg1_m, reg2_m, reg3_m, reg4_m; \
77 v4i32 reg5_m, reg6_m, reg7_m; \
82 reg0_m = (v4i32)__msa_ilvr_h((v8i16)zero_m, (v8i16)vec0_m); \
83 reg1_m = (v4i32)__msa_ilvl_h((v8i16)zero_m, (v8i16)vec0_m); \
84 reg2_m = (v4i32)__msa_ilvr_h((v8i16)zero_m, (v8i16)vec1_m); \
85 reg3_m = (v4i32)__msa_ilvl_h((v8i16)zero_m, (v8i16)vec1_m); \
379 v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, vec_br, vec_yg;
380 v4i32 vec_ubvr, vec_ugvg;
386 vec_ugvg = (v4i32)__msa_ilvev_h((v8i16)vec_vg, (v8i16)vec_ug);
410 v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, vec_br, vec_yg;
411 v4i32 vec_ubvr, vec_ugvg;
417 vec_ugvg = (v4i32)__msa_ilvev_h((v8i16)vec_vg, (v8i16)vec_ug);
443 v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, vec_br, vec_yg;
444 v4i32 vec_ubvr, vec_ugvg;
445 v4i32 zero = {0};
450 vec_ugvg = (v4i32)__msa_ilvev_h((v8i16)vec_vg, (v8i16)vec_ug);
479 v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, vec_br, vec_yg;
480 v4i32 vec_ubvr, vec_ugvg;
491 vec_ugvg = (v4i32)__msa_ilvev_h((v8i16)vec_vg, (v8i16)vec_ug);
532 v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, vec_br, vec_yg;
533 v4i32 vec_ubvr, vec_ugvg;
538 vec_ugvg = (v4i32)__msa_ilvev_h((v8i16)vec_vg, (v8i16)vec_ug);
571 v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, vec_br, vec_yg;
572 v4i32 vec_ubvr, vec_ugvg;
578 vec_ugvg = (v4i32)__msa_ilvev_h((v8i16)vec_vg, (v8i16)vec_ug);
611 v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, vec_br, vec_yg;
612 v4i32 vec_ubvr, vec_ugvg;
618 vec_ugvg = (v4i32)__msa_ilvev_h((v8i16)vec_vg, (v8i16)vec_ug);
1177 reg0 = (v4u32)__msa_srai_w((v4i32)reg0, 16);
1178 reg1 = (v4u32)__msa_srai_w((v4i32)reg1, 16);
1179 reg2 = (v4u32)__msa_srai_w((v4i32)reg2, 16);
1180 reg3 = (v4u32)__msa_srai_w((v4i32)reg3, 16);
1276 reg0 = (v4u32)__msa_srai_w((v4i32)reg0, 24);
1277 reg1 = (v4u32)__msa_srai_w((v4i32)reg1, 24);
1278 reg2 = (v4u32)__msa_srai_w((v4i32)reg2, 24);
1279 reg3 = (v4u32)__msa_srai_w((v4i32)reg3, 24);
1280 reg4 = (v4u32)__msa_srai_w((v4i32)reg4, 24);
1281 reg5 = (v4u32)__msa_srai_w((v4i32)reg5, 24);
1282 reg6 = (v4u32)__msa_srai_w((v4i32)reg6, 24);
1283 reg7 = (v4u32)__msa_srai_w((v4i32)reg7, 24);
1392 reg0 = (v4u32)__msa_srai_w((v4i32)reg0, 24);
1393 reg1 = (v4u32)__msa_srai_w((v4i32)reg1, 24);
1394 reg2 = (v4u32)__msa_srai_w((v4i32)reg2, 24);
1395 reg3 = (v4u32)__msa_srai_w((v4i32)reg3, 24);
1752 res0 = (v4u32)__msa_srai_w((v4i32)res0, 8);
1753 res1 = (v4u32)__msa_srai_w((v4i32)res1, 8);
1754 res2 = (v4u32)__msa_srai_w((v4i32)res2, 8);
1755 res3 = (v4u32)__msa_srai_w((v4i32)res3, 8);
2227 v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, vec_br, vec_yg;
2228 v4i32 vec_ubvr, vec_ugvg;
2235 vec_ugvg = (v4i32)__msa_ilvev_h((v8i16)vec_vg, (v8i16)vec_ug);
2264 v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, vec_br, vec_yg;
2265 v4i32 vec_ubvr, vec_ugvg;
2271 vec_ugvg = (v4i32)__msa_ilvev_h((v8i16)vec_vg, (v8i16)vec_ug);
2300 v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, vec_br, vec_yg;
2301 v4i32 vec_ubvr, vec_ugvg;
2309 vec_ugvg = (v4i32)__msa_ilvev_h((v8i16)vec_vg, (v8i16)vec_ug);
2521 src4 = (v16u8)__msa_pckev_w((v4i32)src1, (v4i32)src0);
2522 src5 = (v16u8)__msa_pckev_w((v4i32)src3, (v4i32)src2);
2523 src6 = (v16u8)__msa_pckod_w((v4i32)src1, (v4i32)src0);
2524 src7 = (v16u8)__msa_pckod_w((v4i32)src3, (v4i32)src2);
2539 src4 = (v16u8)__msa_pckev_w((v4i32)src1, (v4i32)src0);
2540 src5 = (v16u8)__msa_pckev_w((v4i32)src3, (v4i32)src2);
2541 src6 = (v16u8)__msa_pckod_w((v4i32)src1, (v4i32)src0);
2542 src7 = (v16u8)__msa_pckod_w((v4i32)src3, (v4i32)src2);
2666 v4i32 reg0, reg1, reg2, reg3, reg4, reg5, reg6, reg7, reg8, reg9;
2667 v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, vec_br, vec_yg;
2677 reg0 = (v4i32)__msa_ilvr_h((v8i16)zero, (v8i16)vec0);
2678 reg1 = (v4i32)__msa_ilvl_h((v8i16)zero, (v8i16)vec0);
2691 reg6 = (v4i32)__msa_ilvr_h((v8i16)zero, (v8i16)vec0);
2692 reg7 = (v4i32)__msa_ilvl_h((v8i16)zero, (v8i16)vec0);
2693 reg8 = (v4i32)__msa_ilvr_h((v8i16)zero, (v8i16)vec1);
2694 reg9 = (v4i32)__msa_ilvl_h((v8i16)zero, (v8i16)vec1);
2729 v4i32 reg0, reg1, reg2, reg3;
2730 v4i32 vec_yg = __msa_fill_w(0x4A35);
2740 reg0 = (v4i32)__msa_ilvr_h(zero, vec0);
2741 reg1 = (v4i32)__msa_ilvl_h(zero, vec0);
2742 reg2 = (v4i32)__msa_ilvr_h(zero, vec1);
2743 reg3 = (v4i32)__msa_ilvl_h(zero, vec1);
2805 v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, vec_br, vec_yg;
2806 v4i32 vec_ubvr, vec_ugvg;
2812 vec_ugvg = (v4i32)__msa_ilvev_h((v8i16)vec_vg, (v8i16)vec_ug);
2833 v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, vec_br, vec_yg;
2834 v4i32 vec_ubvr, vec_ugvg;
2840 vec_ugvg = (v4i32)__msa_ilvev_h((v8i16)vec_vg, (v8i16)vec_ug);