Home | History | Annotate | Download | only in priv

Lines Matching refs:yy

142 static inline Int qadd32S ( Int xx, Int yy ) 
144 Long t = ((Long)xx) + ((Long)yy);
152 static inline Short qadd16S ( Short xx, Short yy )
154 Int t = ((Int)xx) + ((Int)yy);
160 static inline Char qadd8S ( Char xx, Char yy )
162 Int t = ((Int)xx) + ((Int)yy);
168 static inline UShort qadd16U ( UShort xx, UShort yy )
170 UInt t = ((UInt)xx) + ((UInt)yy);
175 static inline UChar qadd8U ( UChar xx, UChar yy )
177 UInt t = ((UInt)xx) + ((UInt)yy);
182 static inline Int qsub32S ( Int xx, Int yy )
184 Long t = ((Long)xx) - ((Long)yy);
192 static inline Short qsub16S ( Short xx, Short yy )
194 Int t = ((Int)xx) - ((Int)yy);
200 static inline Char qsub8S ( Char xx, Char yy )
202 Int t = ((Int)xx) - ((Int)yy);
208 static inline UShort qsub16U ( UShort xx, UShort yy )
210 Int t = ((Int)xx) - ((Int)yy);
216 static inline UChar qsub8U ( UChar xx, UChar yy )
218 Int t = ((Int)xx) - ((Int)yy);
224 static inline Short mul16 ( Short xx, Short yy )
226 Int t = ((Int)xx) * ((Int)yy);
230 static inline Int mul32 ( Int xx, Int yy )
232 Int t = ((Int)xx) * ((Int)yy);
236 static inline Short mulhi16S ( Short xx, Short yy )
238 Int t = ((Int)xx) * ((Int)yy);
243 static inline UShort mulhi16U ( UShort xx, UShort yy )
245 UInt t = ((UInt)xx) * ((UInt)yy);
250 static inline UInt cmpeq32 ( UInt xx, UInt yy )
252 return xx==yy ? 0xFFFFFFFF : 0;
255 static inline UShort cmpeq16 ( UShort xx, UShort yy )
257 return toUShort(xx==yy ? 0xFFFF : 0);
260 static inline UChar cmpeq8 ( UChar xx, UChar yy )
262 return toUChar(xx==yy ? 0xFF : 0);
265 static inline UInt cmpgt32S ( Int xx, Int yy )
267 return xx>yy ? 0xFFFFFFFF : 0;
270 static inline UShort cmpgt16S ( Short xx, Short yy )
272 return toUShort(xx>yy ? 0xFFFF : 0);
275 static inline UChar cmpgt8S ( Char xx, Char yy )
277 return toUChar(xx>yy ? 0xFF : 0);
372 static inline UChar avg8U ( UChar xx, UChar yy )
375 UInt yyi = (UInt)yy;
380 static inline UShort avg16U ( UShort xx, UShort yy )
383 UInt yyi = (UInt)yy;
388 static inline Short max16S ( Short xx, Short yy )
390 return toUShort((xx > yy) ? xx : yy);
393 static inline UChar max8U ( UChar xx, UChar yy )
395 return toUChar((xx > yy) ? xx : yy);
398 static inline Short min16S ( Short xx, Short yy )
400 return toUShort((xx < yy) ? xx : yy);
403 static inline UChar min8U ( UChar xx, UChar yy )
405 return toUChar((xx < yy) ? xx : yy);
408 static inline UShort hadd16U ( UShort xx, UShort yy )
411 UInt yyi = (UInt)yy;
416 static inline Short hadd16S ( Short xx, Short yy )
419 Int yyi = (Int)yy;
424 static inline UShort hsub16U ( UShort xx, UShort yy )
427 UInt yyi = (UInt)yy;
432 static inline Short hsub16S ( Short xx, Short yy )
435 Int yyi = (Int)yy;
440 static inline UChar hadd8U ( UChar xx, UChar yy )
443 UInt yyi = (UInt)yy;
448 static inline Char hadd8S ( Char xx, Char yy )
451 Int yyi = (Int)yy;
456 static inline UChar hsub8U ( UChar xx, UChar yy )
459 UInt yyi = (UInt)yy;
464 static inline Char hsub8S ( Char xx, Char yy )
467 Int yyi = (Int)yy;
472 static inline UInt absdiff8U ( UChar xx, UChar yy )
475 UInt yyu = (UChar)yy;
486 ULong h_generic_calc_Add32x2 ( ULong xx, ULong yy )
489 sel32x2_1(xx) + sel32x2_1(yy),
490 sel32x2_0(xx) + sel32x2_0(yy)
494 ULong h_generic_calc_Add16x4 ( ULong xx, ULong yy )
497 toUShort( sel16x4_3(xx) + sel16x4_3(yy) ),
498 toUShort( sel16x4_2(xx) + sel16x4_2(yy) ),
499 toUShort( sel16x4_1(xx) + sel16x4_1(yy) ),
500 toUShort( sel16x4_0(xx) + sel16x4_0(yy) )
504 ULong h_generic_calc_Add8x8 ( ULong xx, ULong yy )
507 toUChar( sel8x8_7(xx) + sel8x8_7(yy) ),
508 toUChar( sel8x8_6(xx) + sel8x8_6(yy) ),
509 toUChar( sel8x8_5(xx) + sel8x8_5(yy) ),
510 toUChar( sel8x8_4(xx) + sel8x8_4(yy) ),
511 toUChar( sel8x8_3(xx) + sel8x8_3(yy) ),
512 toUChar( sel8x8_2(xx) + sel8x8_2(yy) ),
513 toUChar( sel8x8_1(xx) + sel8x8_1(yy) ),
514 toUChar( sel8x8_0(xx) + sel8x8_0(yy) )
520 ULong h_generic_calc_QAdd16Sx4 ( ULong xx, ULong yy )
523 qadd16S( sel16x4_3(xx), sel16x4_3(yy) ),
524 qadd16S( sel16x4_2(xx), sel16x4_2(yy) ),
525 qadd16S( sel16x4_1(xx), sel16x4_1(yy) ),
526 qadd16S( sel16x4_0(xx), sel16x4_0(yy) )
530 ULong h_generic_calc_QAdd8Sx8 ( ULong xx, ULong yy )
533 qadd8S( sel8x8_7(xx), sel8x8_7(yy) ),
534 qadd8S( sel8x8_6(xx), sel8x8_6(yy) ),
535 qadd8S( sel8x8_5(xx), sel8x8_5(yy) ),
536 qadd8S( sel8x8_4(xx), sel8x8_4(yy) ),
537 qadd8S( sel8x8_3(xx), sel8x8_3(yy) ),
538 qadd8S( sel8x8_2(xx), sel8x8_2(yy) ),
539 qadd8S( sel8x8_1(xx), sel8x8_1(yy) ),
540 qadd8S( sel8x8_0(xx), sel8x8_0(yy) )
544 ULong h_generic_calc_QAdd16Ux4 ( ULong xx, ULong yy )
547 qadd16U( sel16x4_3(xx), sel16x4_3(yy) ),
548 qadd16U( sel16x4_2(xx), sel16x4_2(yy) ),
549 qadd16U( sel16x4_1(xx), sel16x4_1(yy) ),
550 qadd16U( sel16x4_0(xx), sel16x4_0(yy) )
554 ULong h_generic_calc_QAdd8Ux8 ( ULong xx, ULong yy )
557 qadd8U( sel8x8_7(xx), sel8x8_7(yy) ),
558 qadd8U( sel8x8_6(xx), sel8x8_6(yy) ),
559 qadd8U( sel8x8_5(xx), sel8x8_5(yy) ),
560 qadd8U( sel8x8_4(xx), sel8x8_4(yy) ),
561 qadd8U( sel8x8_3(xx), sel8x8_3(yy) ),
562 qadd8U( sel8x8_2(xx), sel8x8_2(yy) ),
563 qadd8U( sel8x8_1(xx), sel8x8_1(yy) ),
564 qadd8U( sel8x8_0(xx), sel8x8_0(yy) )
570 ULong h_generic_calc_Sub32x2 ( ULong xx, ULong yy )
573 sel32x2_1(xx) - sel32x2_1(yy),
574 sel32x2_0(xx) - sel32x2_0(yy)
578 ULong h_generic_calc_Sub16x4 ( ULong xx, ULong yy )
581 toUShort( sel16x4_3(xx) - sel16x4_3(yy) ),
582 toUShort( sel16x4_2(xx) - sel16x4_2(yy) ),
583 toUShort( sel16x4_1(xx) - sel16x4_1(yy) ),
584 toUShort( sel16x4_0(xx) - sel16x4_0(yy) )
588 ULong h_generic_calc_Sub8x8 ( ULong xx, ULong yy )
591 toUChar( sel8x8_7(xx) - sel8x8_7(yy) ),
592 toUChar( sel8x8_6(xx) - sel8x8_6(yy) ),
593 toUChar( sel8x8_5(xx) - sel8x8_5(yy) ),
594 toUChar( sel8x8_4(xx) - sel8x8_4(yy) ),
595 toUChar( sel8x8_3(xx) - sel8x8_3(yy) ),
596 toUChar( sel8x8_2(xx) - sel8x8_2(yy) ),
597 toUChar( sel8x8_1(xx) - sel8x8_1(yy) ),
598 toUChar( sel8x8_0(xx) - sel8x8_0(yy) )
604 ULong h_generic_calc_QSub16Sx4 ( ULong xx, ULong yy )
607 qsub16S( sel16x4_3(xx), sel16x4_3(yy) ),
608 qsub16S( sel16x4_2(xx), sel16x4_2(yy) ),
609 qsub16S( sel16x4_1(xx), sel16x4_1(yy) ),
610 qsub16S( sel16x4_0(xx), sel16x4_0(yy) )
614 ULong h_generic_calc_QSub8Sx8 ( ULong xx, ULong yy )
617 qsub8S( sel8x8_7(xx), sel8x8_7(yy) ),
618 qsub8S( sel8x8_6(xx), sel8x8_6(yy) ),
619 qsub8S( sel8x8_5(xx), sel8x8_5(yy) ),
620 qsub8S( sel8x8_4(xx), sel8x8_4(yy) ),
621 qsub8S( sel8x8_3(xx), sel8x8_3(yy) ),
622 qsub8S( sel8x8_2(xx), sel8x8_2(yy) ),
623 qsub8S( sel8x8_1(xx), sel8x8_1(yy) ),
624 qsub8S( sel8x8_0(xx), sel8x8_0(yy) )
628 ULong h_generic_calc_QSub16Ux4 ( ULong xx, ULong yy )
631 qsub16U( sel16x4_3(xx), sel16x4_3(yy) ),
632 qsub16U( sel16x4_2(xx), sel16x4_2(yy) ),
633 qsub16U( sel16x4_1(xx), sel16x4_1(yy) ),
634 qsub16U( sel16x4_0(xx), sel16x4_0(yy) )
638 ULong h_generic_calc_QSub8Ux8 ( ULong xx, ULong yy )
641 qsub8U( sel8x8_7(xx), sel8x8_7(yy) ),
642 qsub8U( sel8x8_6(xx), sel8x8_6(yy) ),
643 qsub8U( sel8x8_5(xx), sel8x8_5(yy) ),
644 qsub8U( sel8x8_4(xx), sel8x8_4(yy) ),
645 qsub8U( sel8x8_3(xx), sel8x8_3(yy) ),
646 qsub8U( sel8x8_2(xx), sel8x8_2(yy) ),
647 yy) ),
648 qsub8U( sel8x8_0(xx), sel8x8_0(yy) )
654 ULong h_generic_calc_Mul16x4 ( ULong xx, ULong yy )
657 mul16( sel16x4_3(xx), sel16x4_3(yy) ),
658 mul16( sel16x4_2(xx), sel16x4_2(yy) ),
659 mul16( sel16x4_1(xx), sel16x4_1(yy) ),
660 mul16( sel16x4_0(xx), sel16x4_0(yy) )
664 ULong h_generic_calc_Mul32x2 ( ULong xx, ULong yy )
667 mul32( sel32x2_1(xx), sel32x2_1(yy) ),
668 mul32( sel32x2_0(xx), sel32x2_0(yy) )
672 ULong h_generic_calc_MulHi16Sx4 ( ULong xx, ULong yy )
675 mulhi16S( sel16x4_3(xx), sel16x4_3(yy) ),
676 mulhi16S( sel16x4_2(xx), sel16x4_2(yy) ),
677 mulhi16S( sel16x4_1(xx), sel16x4_1(yy) ),
678 mulhi16S( sel16x4_0(xx), sel16x4_0(yy) )
682 ULong h_generic_calc_MulHi16Ux4 ( ULong xx, ULong yy )
685 mulhi16U( sel16x4_3(xx), sel16x4_3(yy) ),
686 mulhi16U( sel16x4_2(xx), sel16x4_2(yy) ),
687 mulhi16U( sel16x4_1(xx), sel16x4_1(yy) ),
688 mulhi16U( sel16x4_0(xx), sel16x4_0(yy) )
694 ULong h_generic_calc_CmpEQ32x2 ( ULong xx, ULong yy )
697 cmpeq32( sel32x2_1(xx), sel32x2_1(yy) ),
698 cmpeq32( sel32x2_0(xx), sel32x2_0(yy) )
702 ULong h_generic_calc_CmpEQ16x4 ( ULong xx, ULong yy )
705 cmpeq16( sel16x4_3(xx), sel16x4_3(yy) ),
706 cmpeq16( sel16x4_2(xx), sel16x4_2(yy) ),
707 cmpeq16( sel16x4_1(xx), sel16x4_1(yy) ),
708 cmpeq16( sel16x4_0(xx), sel16x4_0(yy) )
712 ULong h_generic_calc_CmpEQ8x8 ( ULong xx, ULong yy )
715 cmpeq8( sel8x8_7(xx), sel8x8_7(yy) ),
716 cmpeq8( sel8x8_6(xx), sel8x8_6(yy) ),
717 cmpeq8( sel8x8_5(xx), sel8x8_5(yy) ),
718 cmpeq8( sel8x8_4(xx), sel8x8_4(yy) ),
719 cmpeq8( sel8x8_3(xx), sel8x8_3(yy) ),
720 cmpeq8( sel8x8_2(xx), sel8x8_2(yy) ),
721 cmpeq8( sel8x8_1(xx), sel8x8_1(yy) ),
722 cmpeq8( sel8x8_0(xx), sel8x8_0(yy) )
726 ULong h_generic_calc_CmpGT32Sx2 ( ULong xx, ULong yy )
729 cmpgt32S( sel32x2_1(xx), sel32x2_1(yy) ),
730 cmpgt32S( sel32x2_0(xx), sel32x2_0(yy) )
734 ULong h_generic_calc_CmpGT16Sx4 ( ULong xx, ULong yy )
737 cmpgt16S( sel16x4_3(xx), sel16x4_3(yy) ),
738 cmpgt16S( sel16x4_2(xx), sel16x4_2(yy) ),
739 cmpgt16S( sel16x4_1(xx), sel16x4_1(yy) ),
740 cmpgt16S( sel16x4_0(xx), sel16x4_0(yy) )
744 ULong h_generic_calc_CmpGT8Sx8 ( ULong xx, ULong yy )
747 cmpgt8S( sel8x8_7(xx), sel8x8_7(yy) ),
748 cmpgt8S( sel8x8_6(xx), sel8x8_6(yy) ),
749 cmpgt8S( sel8x8_5(xx), sel8x8_5(yy) ),
750 cmpgt8S( sel8x8_4(xx), sel8x8_4(yy) ),
751 cmpgt8S( sel8x8_3(xx), sel8x8_3(yy) ),
752 cmpgt8S( sel8x8_2(xx), sel8x8_2(yy) ),
753 cmpgt8S( sel8x8_1(xx), sel8x8_1(yy) ),
754 cmpgt8S( sel8x8_0(xx), sel8x8_0(yy) )
1098 ULong h_generic_calc_Avg8Ux8 ( ULong xx, ULong yy )
1101 avg8U( sel8x8_7(xx), sel8x8_7(yy) ),
1102 avg8U( sel8x8_6(xx), sel8x8_6(yy) ),
1103 avg8U( sel8x8_5(xx), sel8x8_5(yy) ),
1104 avg8U( sel8x8_4(xx), sel8x8_4(yy) ),
1105 avg8U( sel8x8_3(xx), sel8x8_3(yy) ),
1106 avg8U( sel8x8_2(xx), sel8x8_2(yy) ),
1107 avg8U( sel8x8_1(xx), sel8x8_1(yy) ),
1108 avg8U( sel8x8_0(xx), sel8x8_0(yy) )
1112 ULong h_generic_calc_Avg16Ux4 ( ULong xx, ULong yy )
1115 avg16U( sel16x4_3(xx), sel16x4_3(yy) ),
1116 avg16U( sel16x4_2(xx), sel16x4_2(yy) ),
1117 avg16U( sel16x4_1(xx), sel16x4_1(yy) ),
1118 avg16U( sel16x4_0(xx), sel16x4_0(yy) )
1124 ULong h_generic_calc_Max16Sx4 ( ULong xx, ULong yy )
1127 max16S( sel16x4_3(xx), sel16x4_3(yy) ),
1128 max16S( sel16x4_2(xx), sel16x4_2(yy) ),
1129 max16S( sel16x4_1(xx), sel16x4_1(yy) ),
1130 max16S( sel16x4_0(xx), sel16x4_0(yy) )
1134 ULong h_generic_calc_Max8Ux8 ( ULong xx, ULong yy )
1137 max8U( sel8x8_7(xx), sel8x8_7(yy) ),
1138 max8U( sel8x8_6(xx), sel8x8_6(yy) ),
1139 max8U( sel8x8_5(xx), sel8x8_5(yy) ),
1140 max8U( sel8x8_4(xx), sel8x8_4(yy) ),
1141 max8U( sel8x8_3(xx), sel8x8_3(yy) ),
1142 max8U( sel8x8_2(xx), sel8x8_2(yy) ),
1143 max8U( sel8x8_1(xx), sel8x8_1(yy) ),
1144 max8U( sel8x8_0(xx), sel8x8_0(yy) )
1148 ULong h_generic_calc_Min16Sx4 ( ULong xx, ULong yy )
1151 min16S( sel16x4_3(xx), sel16x4_3(yy) ),
1152 min16S( sel16x4_2(xx), sel16x4_2(yy) ),
1153 min16S( sel16x4_1(xx), sel16x4_1(yy) ),
1154 min16S( sel16x4_0(xx), sel16x4_0(yy) )
1158 ULong h_generic_calc_Min8Ux8 ( ULong xx, ULong yy )
1161 min8U( sel8x8_7(xx), sel8x8_7(yy) ),
1162 min8U( sel8x8_6(xx), sel8x8_6(yy) ),
1163 min8U( sel8x8_5(xx), sel8x8_5(yy) ),
1164 min8U( sel8x8_4(xx), sel8x8_4(yy) ),
1165 min8U( sel8x8_3(xx), sel8x8_3(yy) ),
1166 min8U( sel8x8_2(xx), sel8x8_2(yy) ),
1167 min8U( sel8x8_1(xx), sel8x8_1(yy) ),
1168 min8U( sel8x8_0(xx), sel8x8_0(yy) )
1214 UInt h_generic_calc_Add16x2 ( UInt xx, UInt yy )
1216 return mk16x2( sel16x2_1(xx) + sel16x2_1(yy),
1217 sel16x2_0(xx) + sel16x2_0(yy) );
1220 UInt h_generic_calc_Sub16x2 ( UInt xx, UInt yy )
1222 return mk16x2( sel16x2_1(xx) - sel16x2_1(yy),
1223 sel16x2_0(xx) - sel16x2_0(yy) );
1226 UInt h_generic_calc_HAdd16Ux2 ( UInt xx, UInt yy )
1228 return mk16x2( hadd16U( sel16x2_1(xx), sel16x2_1(yy) ),
1229 hadd16U( sel16x2_0(xx), sel16x2_0(yy) ) );
1232 UInt h_generic_calc_HAdd16Sx2 ( UInt xx, UInt yy )
1234 return mk16x2( hadd16S( sel16x2_1(xx), sel16x2_1(yy) ),
1235 hadd16S( sel16x2_0(xx), sel16x2_0(yy) ) );
1238 UInt h_generic_calc_HSub16Ux2 ( UInt xx, UInt yy )
1240 return mk16x2( hsub16U( sel16x2_1(xx), sel16x2_1(yy) ),
1241 hsub16U( sel16x2_0(xx), sel16x2_0(yy) ) );
1244 UInt h_generic_calc_HSub16Sx2 ( UInt xx, UInt yy )
1246 return mk16x2( hsub16S( sel16x2_1(xx), sel16x2_1(yy) ),
1247 hsub16S( sel16x2_0(xx), sel16x2_0(yy) ) );
1250 UInt h_generic_calc_QAdd16Ux2 ( UInt xx, UInt yy )
1252 yy) ),
1253 qadd16U( sel16x2_0(xx), sel16x2_0(yy) ) );
1256 UInt h_generic_calc_QAdd16Sx2 ( UInt xx, UInt yy )
1258 return mk16x2( qadd16S( sel16x2_1(xx), sel16x2_1(yy) ),
1259 qadd16S( sel16x2_0(xx), sel16x2_0(yy) ) );
1262 UInt h_generic_calc_QSub16Ux2 ( UInt xx, UInt yy )
1264 return mk16x2( qsub16U( sel16x2_1(xx), sel16x2_1(yy) ),
1265 qsub16U( sel16x2_0(xx), sel16x2_0(yy) ) );
1268 UInt h_generic_calc_QSub16Sx2 ( UInt xx, UInt yy )
1270 return mk16x2( qsub16S( sel16x2_1(xx), sel16x2_1(yy) ),
1271 qsub16S( sel16x2_0(xx), sel16x2_0(yy) ) );
1276 UInt h_generic_calc_Add8x4 ( UInt xx, UInt yy )
1279 sel8x4_3(xx) + sel8x4_3(yy),
1280 sel8x4_2(xx) + sel8x4_2(yy),
1281 sel8x4_1(xx) + sel8x4_1(yy),
1282 sel8x4_0(xx) + sel8x4_0(yy)
1286 UInt h_generic_calc_Sub8x4 ( UInt xx, UInt yy )
1289 sel8x4_3(xx) - sel8x4_3(yy),
1290 sel8x4_2(xx) - sel8x4_2(yy),
1291 sel8x4_1(xx) - sel8x4_1(yy),
1292 sel8x4_0(xx) - sel8x4_0(yy)
1296 UInt h_generic_calc_HAdd8Ux4 ( UInt xx, UInt yy )
1299 hadd8U( sel8x4_3(xx), sel8x4_3(yy) ),
1300 hadd8U( sel8x4_2(xx), sel8x4_2(yy) ),
1301 hadd8U( sel8x4_1(xx), sel8x4_1(yy) ),
1302 hadd8U( sel8x4_0(xx), sel8x4_0(yy) )
1306 UInt h_generic_calc_HAdd8Sx4 ( UInt xx, UInt yy )
1309 hadd8S( sel8x4_3(xx), sel8x4_3(yy) ),
1310 hadd8S( sel8x4_2(xx), sel8x4_2(yy) ),
1311 hadd8S( sel8x4_1(xx), sel8x4_1(yy) ),
1312 hadd8S( sel8x4_0(xx), sel8x4_0(yy) )
1316 UInt h_generic_calc_HSub8Ux4 ( UInt xx, UInt yy )
1319 hsub8U( sel8x4_3(xx), sel8x4_3(yy) ),
1320 hsub8U( sel8x4_2(xx), sel8x4_2(yy) ),
1321 hsub8U( sel8x4_1(xx), sel8x4_1(yy) ),
1322 hsub8U( sel8x4_0(xx), sel8x4_0(yy) )
1326 UInt h_generic_calc_HSub8Sx4 ( UInt xx, UInt yy )
1329 hsub8S( sel8x4_3(xx), sel8x4_3(yy) ),
1330 hsub8S( sel8x4_2(xx), sel8x4_2(yy) ),
1331 hsub8S( sel8x4_1(xx), sel8x4_1(yy) ),
1332 hsub8S( sel8x4_0(xx), sel8x4_0(yy) )
1336 UInt h_generic_calc_QAdd8Ux4 ( UInt xx, UInt yy )
1339 qadd8U( sel8x4_3(xx), sel8x4_3(yy) ),
1340 qadd8U( sel8x4_2(xx), sel8x4_2(yy) ),
1341 qadd8U( sel8x4_1(xx), sel8x4_1(yy) ),
1342 qadd8U( sel8x4_0(xx), sel8x4_0(yy) )
1346 UInt h_generic_calc_QAdd8Sx4 ( UInt xx, UInt yy )
1349 qadd8S( sel8x4_3(xx), sel8x4_3(yy) ),
1350 qadd8S( sel8x4_2(xx), sel8x4_2(yy) ),
1351 qadd8S( sel8x4_1(xx), sel8x4_1(yy) ),
1352 qadd8S( sel8x4_0(xx), sel8x4_0(yy) )
1356 UInt h_generic_calc_QSub8Ux4 ( UInt xx, UInt yy )
1359 qsub8U( sel8x4_3(xx), sel8x4_3(yy) ),
1360 qsub8U( sel8x4_2(xx), sel8x4_2(yy) ),
1361 qsub8U( sel8x4_1(xx), sel8x4_1(yy) ),
1362 qsub8U( sel8x4_0(xx), sel8x4_0(yy) )
1366 UInt h_generic_calc_QSub8Sx4 ( UInt xx, UInt yy )
1369 qsub8S( sel8x4_3(xx), sel8x4_3(yy) ),
1370 qsub8S( sel8x4_2(xx), sel8x4_2(yy) ),
1371 qsub8S( sel8x4_1(xx), sel8x4_1(yy) ),
1372 qsub8S( sel8x4_0(xx), sel8x4_0(yy) )
1394 UInt h_generic_calc_Sad8Ux4 ( UInt xx, UInt yy )
1396 return absdiff8U( sel8x4_3(xx), sel8x4_3(yy) )
1397 + absdiff8U( sel8x4_2(xx), sel8x4_2(yy) )
1398 + absdiff8U( sel8x4_1(xx), sel8x4_1(yy) )
1399 + absdiff8U( sel8x4_0(xx), sel8x4_0(yy) );
1402 UInt h_generic_calc_QAdd32S ( UInt xx, UInt yy )
1404 return qadd32S( xx, yy );
1407 UInt h_generic_calc_QSub32S ( UInt xx, UInt yy )
1409 return qsub32S( xx, yy );