Home | History | Annotate | Download | only in asm

Lines Matching refs:r8

231 #.set r8,8
275 # Freely use registers r5,r6,r7,r8,r9,r10,r11 as follows:
278 # r7,r8 are the results of the 32x32 giving 64 bit multiply.
298 $UMULH r8,r5,r6
300 addc r7,r7,r7 # compute (r7,r8)=2*(r7,r8)
301 adde r8,r8,r8
306 addze r11,r8 # r8 added to r11 which is 0
312 $UMULH r8,r6,r6
314 adde r9,r8,r9
319 $UMULH r8,r5,r6
322 adde r8,r8,r8
326 adde r9,r8,r9
332 $UMULH r8,r5,r6
334 adde r8,r8,r8
338 adde r10,r8,r10
344 $UMULH r8,r5,r6
347 adde r8,r8,r8
350 adde r10,r8,r10
355 $UMULH r8,r6,r6
357 adde r11,r8,r11
362 $UMULH r8,r5,r6
364 adde r8,r8,r8
368 adde r11,r8,r11
374 $UMULH r8,r5,r6
376 adde r8,r8,r8
380 adde r9,r8,r9
385 $UMULH r8,r6,r6
387 adde r10,r8,r10
414 # Freely use registers r5,r6,r7,r8,r9,r10,r11 as follows:
417 # r7,r8 are the results of the 32x32 giving 64 bit multiply.
435 $UMULH r8,r5,r6
438 adde r11,r8,r0 # (r8,r7) to the three register
442 adde r11,r8,r11 # (r8,r7) to the three register
449 $UMULH r8,r6,r6
451 adde r9,r8,r9
456 $UMULH r8,r5,r6
459 adde r9,r8,r9
463 adde r9,r8,r9
470 $UMULH r8,r5,r6
473 adde r10,r8,r10
477 adde r10,r8,r10
483 $UMULH r8,r5,r6
486 adde r10,r8,r10
490 adde r10,r8,r10
496 $UMULH r8,r6,r6
499 adde r11,r8,r11
504 $UMULH r8,r5,r6
507 adde r11,r8,r11
511 adde r11,r8,r11
517 $UMULH r8,r5,r6
520 adde r11,r8,r11
524 adde r11,r8,r11
530 $UMULH r8,r5,r6
533 adde r9,r8,r9
537 adde r9,r8,r9
543 $UMULH r8,r5,r6
546 adde r9,r8,r9
550 adde r9,r8,r9
556 $UMULH r8,r5,r6
559 adde r9,r8,r9
563 adde r9,r8,r9
568 $UMULH r8,r6,r6
570 adde r10,r8,r10
575 $UMULH r8,r5,r6
578 adde r10,r8,r10
582 adde r10,r8,r10
588 $UMULH r8,r5,r6
591 adde r10,r8,r10
595 adde r10,r8,r10
601 $UMULH r8,r5,r6
603 adde r10,r8,r10
606 adde r10,r8,r10
612 $UMULH r8,r5,r6
615 adde r11,r8,r11
618 adde r11,r8,r11
624 $UMULH r8,r5,r6
627 adde r11,r8,r11
630 adde r11,r8,r11
636 $UMULH r8,r5,r6
638 adde r11,r8,r11
641 adde r11,r8,r11
647 $UMULH r8,r5,r6
650 adde r11,r8,r11
653 adde r11,r8,r11
658 $UMULH r8,r6,r6
660 adde r9,r8,r9
665 $UMULH r8,r5,r6
667 adde r9,r8,r9
670 adde r9,r8,r9
676 $UMULH r8,r5,r6
678 adde r9,r8,r9
682 adde r9,r8,r9
688 $UMULH r8,r5,r6
690 adde r9,r8,r9
693 adde r9,r8,r9
699 $UMULH r8,r5,r6
702 adde r10,r8,r10
705 adde r10,r8,r10
711 $UMULH r8,r5,r6
713 adde r10,r8,r10
716 adde r10,r8,r10
722 $UMULH r8,r5,r6
724 adde r10,r8,r10
727 adde r10,r8,r10
732 $UMULH r8,r6,r6
734 adde r11,r8,r11
739 $UMULH r8,r5,r6
741 adde r11,r8,r11
744 adde r11,r8,r11
750 $UMULH r8,r5,r6
752 adde r11,r8,r11
755 adde r11,r8,r11
761 $UMULH r8,r5,r6
763 adde r9,r8,r9
766 adde r9,r8,r9
772 $UMULH r8,r5,r6
774 adde r9,r8,r9
777 adde r9,r8,r9
782 $UMULH r8,r6,r6
784 adde r10,r8,r10
789 $UMULH r8,r5,r6
791 adde r10,r8,r10
794 adde r10,r8,r10
801 $UMULH r8,r5,r6
803 adde r11,r8,r11
806 adde r11,r8,r11
811 $UMULH r8,r6,r6
813 adde r9,r8,r9
840 # r8, r9 are the results of the 32x32 giving 64 multiply.
852 $UMULL r8,r6,r7
854 addc r11,r8,r11
860 $UMULL r8,r6,r7
862 addc r11,r8,r11
868 $UMULL r8,r6,r7
870 addc r12,r8,r12
876 $UMULL r8,r6,r7
878 addc r12,r8,r12
884 $UMULL r8,r6,r7
886 addc r12,r8,r12
892 $UMULL r8,r6,r7
894 addc r10,r8,r10
900 $UMULL r8,r6,r7
902 addc r10,r8,r10
908 $UMULL r8,r6,r7
910 addc r10,r8,r10
916 $UMULL r8,r6,r7
918 addc r10,r8,r10
924 $UMULL r8,r6,r7
926 addc r11,r8,r11
932 $UMULL r8,r6,r7
934 addc r11,r8,r11
940 $UMULL r8,r6,r7
942 addc r11,r8,r11
948 $UMULL r8,r6,r7
950 addc r12,r8,r12
956 $UMULL r8,r6,r7
958 addc r12,r8,r12
964 $UMULL r8,r6,r7
966 addc r10,r8,r10
993 # r8, r9 are the results of the 32x32 giving 64 multiply.
1006 $UMULL r8,r6,r7
1008 addc r11,r11,r8
1014 $UMULL r8,r6,r7
1016 addc r11,r11,r8
1022 $UMULL r8,r6,r7
1024 addc r12,r12,r8
1030 $UMULL r8,r6,r7
1032 addc r12,r12,r8
1038 $UMULL r8,r6,r7
1040 addc r12,r12,r8
1046 $UMULL r8,r6,r7
1048 addc r10,r10,r8
1054 $UMULL r8,r6,r7
1056 addc r10,r10,r8
1063 $UMULL r8,r6,r7
1065 addc r10,r10,r8
1071 $UMULL r8,r6,r7
1073 addc r10,r10,r8
1079 $UMULL r8,r6,r7
1081 addc r11,r11,r8
1087 $UMULL r8,r6,r7
1089 addc r11,r11,r8
1095 $UMULL r8,r6,r7
1097 addc r11,r11,r8
1103 $UMULL r8,r6,r7
1105 addc r11,r11,r8
1111 $UMULL r8,r6,r7
1113 addc r11,r11,r8
1119 $UMULL r8,r6,r7
1121 addc r12,r12,r8
1127 $UMULL r8,r6,r7
1129 addc r12,r12,r8
1135 $UMULL r8,r6,r7
1137 addc r12,r12,r8
1143 $UMULL r8,r6,r7
1145 addc r12,r12,r8
1151 $UMULL r8,r6,r7
1153 addc r12,r12,r8
1159 $UMULL r8,r6,r7
1161 addc r12,r12,r8
1167 $UMULL r8,r6,r7
1169 addc r10,r10,r8
1175 $UMULL r8,r6,r7
1177 addc r10,r10,r8
1183 $UMULL r8,r6,r7
1185 addc r10,r10,r8
1191 $UMULL r8,r6,r7
1193 addc r10,r10,r8
1199 $UMULL r8,r6,r7
1201 addc r10,r10,r8
1207 $UMULL r8,r6,r7
1209 addc r10,r10,r8
1215 $UMULL r8,r6,r7
1217 addc r10,r10,r8
1223 $UMULL r8,r6,r7
1225 addc r11,r11,r8
1231 $UMULL r8,r6,r7
1233 addc r11,r11,r8
1239 $UMULL r8,r6,r7
1241 addc r11,r11,r8
1247 $UMULL r8,r6,r7
1249 addc r11,r11,r8
1255 $UMULL r8,r6,r7
1257 addc r11,r11,r8
1263 $UMULL r8,r6,r7
1265 addc r11,r11,r8
1271 $UMULL r8,r6,r7
1273 addc r11,r11,r8
1279 $UMULL r8,r6,r7
1281 addc r11,r11,r8
1287 $UMULL r8,r6,r7
1289 addc r12,r12,r8
1295 $UMULL r8,r6,r7
1297 addc r12,r12,r8
1303 $UMULL r8,r6,r7
1305 addc r12,r12,r8
1311 $UMULL r8,r6,r7
1313 addc r12,r12,r8
1319 $UMULL r8,r6,r7
1321 addc r12,r12,r8
1327 $UMULL r8,r6,r7
1329 addc r12,r12,r8
1335 $UMULL r8,r6,r7
1337 addc r12,r12,r8
1343 $UMULL r8,r6,r7
1345 addc r10,r10,r8
1351 $UMULL r8,r6,r7
1353 addc r10,r10,r8
1359 $UMULL r8,r6,r7
1361 addc r10,r10,r8
1367 $UMULL r8,r6,r7
1369 addc r10,r10,r8
1375 $UMULL r8,r6,r7
1377 addc r10,r10,r8
1383 $UMULL r8,r6,r7
1385 addc r10,r10,r8
1391 $UMULL r8,r6,r7
1393 addc r11,r11,r8
1399 $UMULL r8,r6,r7
1401 addc r11,r11,r8
1407 $UMULL r8,r6,r7
1409 addc r11,r11,r8
1415 $UMULL r8,r6,r7
1417 addc r11,r11,r8
1423 $UMULL r8,r6,r7
1425 addc r11,r11,r8
1431 $UMULL r8,r6,r7
1433 addc r12,r12,r8
1439 $UMULL r8,r6,r7
1441 addc r12,r12,r8
1447 $UMULL r8,r6,r7
1449 addc r12,r12,r8
1455 $UMULL r8,r6,r7
1457 addc r12,r12,r8
1463 $UMULL r8,r6,r7
1465 addc r10,r10,r8
1471 $UMULL r8,r6,r7
1473 addc r10,r10,r8
1479 $UMULL r8,r6,r7
1481 addc r10,r10,r8
1487 $UMULL r8,r6,r7
1489 addc r11,r11,r8
1495 $UMULL r8,r6,r7
1497 addc r11,r11,r8
1503 $UMULL r8,r6,r7
1505 addc r12,r12,r8
1550 $LDU r8,$BNSZ(r5)
1551 subfe r6,r8,r7 # r6 = r7+carry bit + onescomplement(r8)
1552 # if carry = 1 this is r7-r8. Else it
1553 # is r7-r8 -1 as we need.
1598 $LDU r8,$BNSZ(r5)
1599 adde r8,r7,r8
1600 $STU r8,$BNSZ(r3)
1635 li r8,$BITS
1638 subf r8,r7,r8 #r8 = BN_num_bits_word(d)
1639 $SHR. r9,r3,r8 #are there any bits above r8'th?
1649 $SHR r8,r4,r8 # r8 = (l >> BN_BITS2 -i)
1651 or r3,r3,r8 # h = (h<<i)|(l>>(BN_BITS2-i))
1660 $SHRI r8,r3,`$BITS/2` #r8 = (h>>BN_BITS4)
1663 $UCMP 0,r8,r9 # is (h>>BN_BITS4)==dh
1666 li r8,-1
1667 $CLRU r8,r8,`$BITS/2` #q = BN_MASK2l
1670 $UDIV r8,r3,r9 #q = h/dh
1672 $UMULL r12,r9,r8 #th = q*dh
1674 $UMULL r6,r8,r10 #tl = q*dl
1688 addi r8,r8,-1 #q--
1704 addi r8,r8,-1 # q--
1715 $SHLI r0,r8,`$BITS/2` #ret =q<<BN_BITS4
1718 or r3,r8,r0
1742 # r7,r8 = product.
1755 $UMULH r8,r6,r6
1757 $STU r8,$BNSZ(r3)
1788 $LD r8,`0*$BNSZ`(r4)
1789 $UMULL r9,r6,r8
1790 $UMULH r10,r6,r8
1798 $LD r8,`1*$BNSZ`(r4)
1799 $UMULL r11,r6,r8
1800 $UMULH r12,r6,r8
1805 $LD r8,`2*$BNSZ`(r4)
1806 $UMULL r9,r6,r8
1807 $UMULH r10,r6,r8
1812 $LD r8,`3*$BNSZ`(r4)
1813 $UMULL r11,r6,r8
1814 $UMULH r12,r6,r8
1828 $LD r8,`0*$BNSZ`(r4)
1829 $UMULL r9,r6,r8
1830 $UMULH r10,r6,r8
1842 $LD r8,`1*$BNSZ`(r4)
1843 $UMULL r9,r6,r8
1844 $UMULH r10,r6,r8
1855 $LD r8,`2*$BNSZ`(r4)
1856 $UMULL r9,r6,r8
1857 $UMULH r10,r6,r8
1896 $LD r8,`0*$BNSZ`(r4)
1898 $UMULL r9,r6,r8
1899 $UMULH r10,r6,r8
1912 $LD r8,`1*$BNSZ`(r4)
1914 $UMULL r11,r6,r8
1915 $UMULH r12,r6,r8
1923 $LD r8,`2*$BNSZ`(r4)
1924 $UMULL r9,r6,r8
1926 $UMULH r10,r6,r8
1934 $LD r8,`3*$BNSZ`(r4)
1935 $UMULL r11,r6,r8
1937 $UMULH r12,r6,r8
1954 $LDU r8,$BNSZ(r4)
1955 $UMULL r9,r6,r8
1956 $UMULH r10,r6,r8
1966 $LDU r8,$BNSZ(r4)
1967 $UMULL r9,r6,r8
1968 $UMULH r10,r6,r8
1978 $LDU r8,$BNSZ(r4)
1979 $UMULL r9,r6,r8
1980 $UMULH r10,r6,r8