Lines Matching refs:r8
231 #.set r8,8
275 # Freely use registers r5,r6,r7,r8,r9,r10,r11 as follows:
278 # r7,r8 are the results of the 32x32 giving 64 bit multiply.
298 $UMULH r8,r5,r6
300 addc r7,r7,r7 # compute (r7,r8)=2*(r7,r8)
301 adde r8,r8,r8
306 addze r11,r8 # r8 added to r11 which is 0
312 $UMULH r8,r6,r6
314 adde r9,r8,r9
319 $UMULH r8,r5,r6
322 adde r8,r8,r8
326 adde r9,r8,r9
332 $UMULH r8,r5,r6
334 adde r8,r8,r8
338 adde r10,r8,r10
344 $UMULH r8,r5,r6
347 adde r8,r8,r8
350 adde r10,r8,r10
355 $UMULH r8,r6,r6
357 adde r11,r8,r11
362 $UMULH r8,r5,r6
364 adde r8,r8,r8
368 adde r11,r8,r11
374 $UMULH r8,r5,r6
376 adde r8,r8,r8
380 adde r9,r8,r9
385 $UMULH r8,r6,r6
387 adde r10,r8,r10
412 # Freely use registers r5,r6,r7,r8,r9,r10,r11 as follows:
415 # r7,r8 are the results of the 32x32 giving 64 bit multiply.
433 $UMULH r8,r5,r6
436 adde r11,r8,r0 # (r8,r7) to the three register
440 adde r11,r8,r11 # (r8,r7) to the three register
447 $UMULH r8,r6,r6
449 adde r9,r8,r9
454 $UMULH r8,r5,r6
457 adde r9,r8,r9
461 adde r9,r8,r9
468 $UMULH r8,r5,r6
471 adde r10,r8,r10
475 adde r10,r8,r10
481 $UMULH r8,r5,r6
484 adde r10,r8,r10
488 adde r10,r8,r10
494 $UMULH r8,r6,r6
497 adde r11,r8,r11
502 $UMULH r8,r5,r6
505 adde r11,r8,r11
509 adde r11,r8,r11
515 $UMULH r8,r5,r6
518 adde r11,r8,r11
522 adde r11,r8,r11
528 $UMULH r8,r5,r6
531 adde r9,r8,r9
535 adde r9,r8,r9
541 $UMULH r8,r5,r6
544 adde r9,r8,r9
548 adde r9,r8,r9
554 $UMULH r8,r5,r6
557 adde r9,r8,r9
561 adde r9,r8,r9
566 $UMULH r8,r6,r6
568 adde r10,r8,r10
573 $UMULH r8,r5,r6
576 adde r10,r8,r10
580 adde r10,r8,r10
586 $UMULH r8,r5,r6
589 adde r10,r8,r10
593 adde r10,r8,r10
599 $UMULH r8,r5,r6
601 adde r10,r8,r10
604 adde r10,r8,r10
610 $UMULH r8,r5,r6
613 adde r11,r8,r11
616 adde r11,r8,r11
622 $UMULH r8,r5,r6
625 adde r11,r8,r11
628 adde r11,r8,r11
634 $UMULH r8,r5,r6
636 adde r11,r8,r11
639 adde r11,r8,r11
645 $UMULH r8,r5,r6
648 adde r11,r8,r11
651 adde r11,r8,r11
656 $UMULH r8,r6,r6
658 adde r9,r8,r9
663 $UMULH r8,r5,r6
665 adde r9,r8,r9
668 adde r9,r8,r9
674 $UMULH r8,r5,r6
676 adde r9,r8,r9
680 adde r9,r8,r9
686 $UMULH r8,r5,r6
688 adde r9,r8,r9
691 adde r9,r8,r9
697 $UMULH r8,r5,r6
700 adde r10,r8,r10
703 adde r10,r8,r10
709 $UMULH r8,r5,r6
711 adde r10,r8,r10
714 adde r10,r8,r10
720 $UMULH r8,r5,r6
722 adde r10,r8,r10
725 adde r10,r8,r10
730 $UMULH r8,r6,r6
732 adde r11,r8,r11
737 $UMULH r8,r5,r6
739 adde r11,r8,r11
742 adde r11,r8,r11
748 $UMULH r8,r5,r6
750 adde r11,r8,r11
753 adde r11,r8,r11
759 $UMULH r8,r5,r6
761 adde r9,r8,r9
764 adde r9,r8,r9
770 $UMULH r8,r5,r6
772 adde r9,r8,r9
775 adde r9,r8,r9
780 $UMULH r8,r6,r6
782 adde r10,r8,r10
787 $UMULH r8,r5,r6
789 adde r10,r8,r10
792 adde r10,r8,r10
799 $UMULH r8,r5,r6
801 adde r11,r8,r11
804 adde r11,r8,r11
809 $UMULH r8,r6,r6
811 adde r9,r8,r9
837 # r8, r9 are the results of the 32x32 giving 64 multiply.
849 $UMULL r8,r6,r7
851 addc r11,r8,r11
857 $UMULL r8,r6,r7
859 addc r11,r8,r11
865 $UMULL r8,r6,r7
867 addc r12,r8,r12
873 $UMULL r8,r6,r7
875 addc r12,r8,r12
881 $UMULL r8,r6,r7
883 addc r12,r8,r12
889 $UMULL r8,r6,r7
891 addc r10,r8,r10
897 $UMULL r8,r6,r7
899 addc r10,r8,r10
905 $UMULL r8,r6,r7
907 addc r10,r8,r10
913 $UMULL r8,r6,r7
915 addc r10,r8,r10
921 $UMULL r8,r6,r7
923 addc r11,r8,r11
929 $UMULL r8,r6,r7
931 addc r11,r8,r11
937 $UMULL r8,r6,r7
939 addc r11,r8,r11
945 $UMULL r8,r6,r7
947 addc r12,r8,r12
953 $UMULL r8,r6,r7
955 addc r12,r8,r12
961 $UMULL r8,r6,r7
963 addc r10,r8,r10
988 # r8, r9 are the results of the 32x32 giving 64 multiply.
1001 $UMULL r8,r6,r7
1003 addc r11,r11,r8
1009 $UMULL r8,r6,r7
1011 addc r11,r11,r8
1017 $UMULL r8,r6,r7
1019 addc r12,r12,r8
1025 $UMULL r8,r6,r7
1027 addc r12,r12,r8
1033 $UMULL r8,r6,r7
1035 addc r12,r12,r8
1041 $UMULL r8,r6,r7
1043 addc r10,r10,r8
1049 $UMULL r8,r6,r7
1051 addc r10,r10,r8
1058 $UMULL r8,r6,r7
1060 addc r10,r10,r8
1066 $UMULL r8,r6,r7
1068 addc r10,r10,r8
1074 $UMULL r8,r6,r7
1076 addc r11,r11,r8
1082 $UMULL r8,r6,r7
1084 addc r11,r11,r8
1090 $UMULL r8,r6,r7
1092 addc r11,r11,r8
1098 $UMULL r8,r6,r7
1100 addc r11,r11,r8
1106 $UMULL r8,r6,r7
1108 addc r11,r11,r8
1114 $UMULL r8,r6,r7
1116 addc r12,r12,r8
1122 $UMULL r8,r6,r7
1124 addc r12,r12,r8
1130 $UMULL r8,r6,r7
1132 addc r12,r12,r8
1138 $UMULL r8,r6,r7
1140 addc r12,r12,r8
1146 $UMULL r8,r6,r7
1148 addc r12,r12,r8
1154 $UMULL r8,r6,r7
1156 addc r12,r12,r8
1162 $UMULL r8,r6,r7
1164 addc r10,r10,r8
1170 $UMULL r8,r6,r7
1172 addc r10,r10,r8
1178 $UMULL r8,r6,r7
1180 addc r10,r10,r8
1186 $UMULL r8,r6,r7
1188 addc r10,r10,r8
1194 $UMULL r8,r6,r7
1196 addc r10,r10,r8
1202 $UMULL r8,r6,r7
1204 addc r10,r10,r8
1210 $UMULL r8,r6,r7
1212 addc r10,r10,r8
1218 $UMULL r8,r6,r7
1220 addc r11,r11,r8
1226 $UMULL r8,r6,r7
1228 addc r11,r11,r8
1234 $UMULL r8,r6,r7
1236 addc r11,r11,r8
1242 $UMULL r8,r6,r7
1244 addc r11,r11,r8
1250 $UMULL r8,r6,r7
1252 addc r11,r11,r8
1258 $UMULL r8,r6,r7
1260 addc r11,r11,r8
1266 $UMULL r8,r6,r7
1268 addc r11,r11,r8
1274 $UMULL r8,r6,r7
1276 addc r11,r11,r8
1282 $UMULL r8,r6,r7
1284 addc r12,r12,r8
1290 $UMULL r8,r6,r7
1292 addc r12,r12,r8
1298 $UMULL r8,r6,r7
1300 addc r12,r12,r8
1306 $UMULL r8,r6,r7
1308 addc r12,r12,r8
1314 $UMULL r8,r6,r7
1316 addc r12,r12,r8
1322 $UMULL r8,r6,r7
1324 addc r12,r12,r8
1330 $UMULL r8,r6,r7
1332 addc r12,r12,r8
1338 $UMULL r8,r6,r7
1340 addc r10,r10,r8
1346 $UMULL r8,r6,r7
1348 addc r10,r10,r8
1354 $UMULL r8,r6,r7
1356 addc r10,r10,r8
1362 $UMULL r8,r6,r7
1364 addc r10,r10,r8
1370 $UMULL r8,r6,r7
1372 addc r10,r10,r8
1378 $UMULL r8,r6,r7
1380 addc r10,r10,r8
1386 $UMULL r8,r6,r7
1388 addc r11,r11,r8
1394 $UMULL r8,r6,r7
1396 addc r11,r11,r8
1402 $UMULL r8,r6,r7
1404 addc r11,r11,r8
1410 $UMULL r8,r6,r7
1412 addc r11,r11,r8
1418 $UMULL r8,r6,r7
1420 addc r11,r11,r8
1426 $UMULL r8,r6,r7
1428 addc r12,r12,r8
1434 $UMULL r8,r6,r7
1436 addc r12,r12,r8
1442 $UMULL r8,r6,r7
1444 r8
1450 $UMULL r8,r6,r7
1452 addc r12,r12,r8
1458 $UMULL r8,r6,r7
1460 addc r10,r10,r8
1466 $UMULL r8,r6,r7
1468 addc r10,r10,r8
1474 $UMULL r8,r6,r7
1476 addc r10,r10,r8
1482 $UMULL r8,r6,r7
1484 addc r11,r11,r8
1490 $UMULL r8,r6,r7
1492 addc r11,r11,r8
1498 $UMULL r8,r6,r7
1500 addc r12,r12,r8
1543 $LDU r8,$BNSZ(r5)
1544 subfe r6,r8,r7 # r6 = r7+carry bit + onescomplement(r8)
1545 # if carry = 1 this is r7-r8. Else it
1546 # is r7-r8 -1 as we need.
1590 $LDU r8,$BNSZ(r5)
1591 adde r8,r7,r8
1592 $STU r8,$BNSZ(r3)
1625 li r8,$BITS
1628 subf r8,r7,r8 #r8 = BN_num_bits_word(d)
1629 $SHR. r9,r3,r8 #are there any bits above r8'th?
1639 $SHR r8,r4,r8 # r8 = (l >> BN_BITS2 -i)
1641 or r3,r3,r8 # h = (h<<i)|(l>>(BN_BITS2-i))
1650 $SHRI r8,r3,`$BITS/2` #r8 = (h>>BN_BITS4)
1653 $UCMP 0,r8,r9 # is (h>>BN_BITS4)==dh
1656 li r8,-1
1657 $CLRU r8,r8,`$BITS/2` #q = BN_MASK2l
1660 $UDIV r8,r3,r9 #q = h/dh
1662 $UMULL r12,r9,r8 #th = q*dh
1664 $UMULL r6,r8,r10 #tl = q*dl
1678 addi r8,r8,-1 #q--
1694 addi r8,r8,-1 # q--
1705 $SHLI r0,r8,`$BITS/2` #ret =q<<BN_BITS4
1708 or r3,r8,r0
1730 # r7,r8 = product.
1743 $UMULH r8,r6,r6
1745 $STU r8,$BNSZ(r3)
1775 $LD r8,`0*$BNSZ`(r4)
1776 $UMULL r9,r6,r8
1777 $UMULH r10,r6,r8
1785 $LD r8,`1*$BNSZ`(r4)
1786 $UMULL r11,r6,r8
1787 $UMULH r12,r6,r8
1792 $LD r8,`2*$BNSZ`(r4)
1793 $UMULL r9,r6,r8
1794 $UMULH r10,r6,r8
1799 $LD r8,`3*$BNSZ`(r4)
1800 $UMULL r11,r6,r8
1801 $UMULH r12,r6,r8
1815 $LD r8,`0*$BNSZ`(r4)
1816 $UMULL r9,r6,r8
1817 $UMULH r10,r6,r8
1829 $LD r8,`1*$BNSZ`(r4)
1830 $UMULL r9,r6,r8
1831 $UMULH r10,r6,r8
1842 $LD r8,`2*$BNSZ`(r4)
1843 $UMULL r9,r6,r8
1844 $UMULH r10,r6,r8
1881 $LD r8,`0*$BNSZ`(r4)
1883 $UMULL r9,r6,r8
1884 $UMULH r10,r6,r8
1897 $LD r8,`1*$BNSZ`(r4)
1899 $UMULL r11,r6,r8
1900 $UMULH r12,r6,r8
1908 $LD r8,`2*$BNSZ`(r4)
1909 $UMULL r9,r6,r8
1911 $UMULH r10,r6,r8
1919 $LD r8,`3*$BNSZ`(r4)
1920 $UMULL r11,r6,r8
1922 $UMULH r12,r6,r8
1939 $LDU r8,$BNSZ(r4)
1940 $UMULL r9,r6,r8
1941 $UMULH r10,r6,r8
1951 $LDU r8,$BNSZ(r4)
1952 $UMULL r9,r6,r8
1953 $UMULH r10,r6,r8
1963 $LDU r8,$BNSZ(r4)
1964 $UMULL r9,r6,r8
1965 $UMULH r10,r6,r8