Home | History | Annotate | Download | only in asm

Lines Matching refs:r10

233 #.set r10,10
275 # Freely use registers r5,r6,r7,r8,r9,r10,r11 as follows:
279 # r9,r10, r11 are the equivalents of c1,c2, c3.
289 $UMULH r10,r5,r5 #in first iteration. No need
305 addc r10,r7,r10 # now add to temp result.
309 $ST r10,`1*$BNSZ`(r3) #r[1]=c2;
315 addze r10,r0
323 addze r10,r10
327 addze r10,r10
338 adde r10,r8,r10
350 adde r10,r8,r10
356 addc r10,r7,r10
367 addc r10,r7,r10
370 $ST r10,`4*$BNSZ`(r3) #r[4]=c2
377 addze r10,r0
381 addze r10,r10
387 adde r10,r8,r10
390 $ST r10,`7*$BNSZ`(r3) #r[7]=c2
414 # Freely use registers r5,r6,r7,r8,r9,r10,r11 as follows:
418 # r9,r10, r11 are the equivalents of c1,c2, c3.
430 $UMULH r10,r5,r5
437 addc r10,r7,r10 #add the two register number
439 addze r9,r0 # number (r9,r11,r10).NOTE:r0=0
441 addc r10,r7,r10 #add the two register number
443 addze r9,r9 # number (r9,r11,r10).
445 $ST r10,`1*$BNSZ`(r3) # r[1]=c2
452 addze r10,r0
460 addze r10,r10
464 addze r10,r10
473 adde r10,r8,r10
477 adde r10,r8,r10
486 adde r10,r8,r10
490 adde r10,r8,r10
498 addc r10,r7,r10
506 addc r10,r7,r10
510 addc r10,r7,r10
519 addc r10,r7,r10
523 addc r10,r7,r10
526 $ST r10,`4*$BNSZ`(r3) #r[4]=c2;
534 addze r10,r0
538 addze r10,r10
547 addze r10,r10
551 addze r10,r10
560 addze r10,r10
564 addze r10,r10
570 adde r10,r8,r10
578 adde r10,r8,r10
582 adde r10,r8,r10
591 adde r10,r8,r10
595 adde r10,r8,r10
603 adde r10,r8,r10
606 adde r10,r8,r10
614 addc r10,r7,r10
617 addc r10,r7,r10
626 addc r10,r7,r10
629 addc r10,r7,r10
637 addc r10,r7,r10
640 addc r10,r7,r10
649 addc r10,r7,r10
652 addc r10,r7,r10
655 $ST r10,`7*$BNSZ`(r3) #r[7]=c2;
661 addze r10,r0
668 addze r10,r10
671 addze r10,r10
679 addze r10,r10
683 addze r10,r10
691 addze r10,r10
694 addze r10,r10
702 adde r10,r8,r10
705 adde r10,r8,r10
713 adde r10,r8,r10
716 adde r10,r8,r10
724 adde r10,r8,r10
727 adde r10,r8,r10
733 addc r10,r7,r10
740 addc r10,r7,r10
743 addc r10,r7,r10
751 addc r10,r7,r10
754 addc r10,r7,r10
757 $ST r10,`10*$BNSZ`(r3) #r[10]=c2;
764 addze r10,r0
767 addze r10,r10
775 addze r10,r10
778 addze r10,r10
784 adde r10,r8,r10
791 adde r10,r8,r10
794 adde r10,r8,r10
802 addc r10,r7,r10
805 addc r10,r7,r10
808 $ST r10,`13*$BNSZ`(r3) #r[13]=c2;
841 # r10, r11, r12 are the equivalents of c1, c2, and c3.
847 $UMULL r10,r6,r7
849 $ST r10,`0*$BNSZ`(r3) #r[0]=c1
856 addze r10,r0
864 addze r10,r10
871 adde r10,r9,r10
879 adde r10,r9,r10
887 adde r10,r9,r10
894 addc r10,r8,r10
902 addc r10,r8,r10
910 addc r10,r8,r10
918 addc r10,r8,r10
921 $ST r10,`3*$BNSZ`(r3) #r[3]=c1
928 addze r10,r0
936 addze r10,r10
944 addze r10,r10
951 adde r10,r9,r10
959 adde r10,r9,r10
966 addc r10,r8,r10
969 $ST r10,`6*$BNSZ`(r3) #r[6]=c1
994 # r10, r11, r12 are the equivalents of c1, c2, and c3.
1001 $UMULL r10,r6,r7
1003 $ST r10,`0*$BNSZ`(r3) #r[0]=c1;
1010 addze r10,r0
1018 addze r10,r10
1025 adde r10,r10,r9
1033 adde r10,r10,r9
1041 adde r10,r10,r9
1048 addc r10,r10,r8
1056 addc r10,r10,r8
1065 addc r10,r10,r8
1073 addc r10,r10,r8
1076 $ST r10,`3*$BNSZ`(r3) #r[3]=c1;
1083 addze r10,r0
1091 addze r10,r10
1099 addze r10,r10
1107 addze r10,r10
1115 addze r10,r10
1122 adde r10,r10,r9
1130 adde r10,r10,r9
1138 adde r10,r10,r9
1146 adde r10,r10,r9
1154 adde r10,r10,r9
1162 adde r10,r10,r9
1169 addc r10,r10,r8
1177 addc r10,r10,r8
1185 addc r10,r10,r8
1193 addc r10,r10,r8
1201 addc r10,r10,r8
1209 addc r10,r10,r8
1217 addc r10,r10,r8
1220 $ST r10,`6*$BNSZ`(r3) #r[6]=c1;
1227 addze r10,r0
1235 addze r10,r10
1243 addze r10,r10
1251 addze r10,r10
1259 addze r10,r10
1267 addze r10,r10
1275 addze r10,r10
1283 addze r10,r10
1290 adde r10,r10,r9
1298 adde r10,r10,r9
1306 adde r10,r10,r9
1314 adde r10,r10,r9
1322 adde r10,r10,r9
1330 adde r10,r10,r9
1338 adde r10,r10,r9
1345 addc r10,r10,r8
1353 addc r10,r10,r8
1361 addc r10,r10,r8
1369 addc r10,r10,r8
1377 addc r10,r10,r8
1385 addc r10,r10,r8
1388 $ST r10,`9*$BNSZ`(r3) #r[9]=c1;
1395 addze r10,r0
1403 addze r10,r10
1411 addze r10,r10
1419 addze r10,r10
1427 addze r10,r10
1434 adde r10,r10,r9
1442 adde r10,r10,r9
1450 adde r10,r10,r9
1458 adde r10,r10,r9
1465 addc r10,r10,r8
1473 addc r10,r10,r8
1481 addc r10,r10,r8
1484 $ST r10,`12*$BNSZ`(r3) #r[12]=c1;
1491 addze r10,r0
1499 addze r10,r10
1506 adde r10,r10,r9
1508 $ST r10,`15*$BNSZ`(r3) #r[15]=c1;
1673 $CLRU r10,r5,`$BITS/2` #r10=dl
1674 $UMULL r6,r8,r10 #tl = q*dl
1677 subf r10,r12,r3 #t = h -th
1678 $SHRI r7,r10,`$BITS/2` #r7= (t &BN_MASK2H), sort of...
1683 $SHLI r7,r10,`$BITS/2` # r7 = (t<<BN_BITS4)
1690 $CLRU r10,r5,`$BITS/2` #r10=dl. t is no longer needed in loop.
1691 subf r6,r10,r6 #tl -=dl
1694 $SHRI r10,r6,`$BITS/2` #t=(tl>>BN_BITS4)
1697 add r12,r12,r10 # th+=t
1790 $UMULH r10,r6,r8
1792 #addze r10,r10 #carry is NOT ignored.
1801 adde r11,r11,r10
1807 $UMULH r10,r6,r8
1809 #addze r10,r10
1815 adde r11,r11,r10
1830 $UMULH r10,r6,r8
1832 addze r10,r10
1834 addi r12,r10,0
1844 $UMULH r10,r6,r8
1846 addze r10,r10
1848 addi r12,r10,0
1857 $UMULH r10,r6,r8
1859 addze r10,r10
1861 addi r12,r10,0
1899 $UMULH r10,r6,r8
1901 addze r10,r10
1903 #addze r10,r10
1916 adde r11,r11,r10 #r10 is carry.
1926 $UMULH r10,r6,r8
1928 addze r10,r10
1930 #addze r10,r10
1938 adde r11,r11,r10
1956 $UMULH r10,r6,r8
1959 addze r10,r10
1961 addze r12,r10
1968 $UMULH r10,r6,r8
1971 addze r10,r10
1973 addze r12,r10
1980 $UMULH r10,r6,r8
1983 addze r10,r10
1985 addze r12,r10