Home | History | Annotate | Download | only in asm

Lines Matching refs:r10

233 #.set r10,10
275 # Freely use registers r5,r6,r7,r8,r9,r10,r11 as follows:
279 # r9,r10, r11 are the equivalents of c1,c2, c3.
289 $UMULH r10,r5,r5 #in first iteration. No need
305 addc r10,r7,r10 # now add to temp result.
309 $ST r10,`1*$BNSZ`(r3) #r[1]=c2;
315 addze r10,r0
323 addze r10,r10
327 addze r10,r10
338 adde r10,r8,r10
350 adde r10,r8,r10
356 addc r10,r7,r10
367 addc r10,r7,r10
370 $ST r10,`4*$BNSZ`(r3) #r[4]=c2
377 addze r10,r0
381 addze r10,r10
387 adde r10,r8,r10
390 $ST r10,`7*$BNSZ`(r3) #r[7]=c2
412 # Freely use registers r5,r6,r7,r8,r9,r10,r11 as follows:
416 # r9,r10, r11 are the equivalents of c1,c2, c3.
428 $UMULH r10,r5,r5
435 addc r10,r7,r10 #add the two register number
437 addze r9,r0 # number (r9,r11,r10).NOTE:r0=0
439 addc r10,r7,r10 #add the two register number
441 addze r9,r9 # number (r9,r11,r10).
443 $ST r10,`1*$BNSZ`(r3) # r[1]=c2
450 addze r10,r0
458 addze r10,r10
462 addze r10,r10
471 adde r10,r8,r10
475 adde r10,r8,r10
484 adde r10,r8,r10
488 adde r10,r8,r10
496 addc r10,r7,r10
504 addc r10,r7,r10
508 addc r10,r7,r10
517 addc r10,r7,r10
521 addc r10,r7,r10
524 $ST r10,`4*$BNSZ`(r3) #r[4]=c2;
532 addze r10,r0
536 addze r10,r10
545 addze r10,r10
549 addze r10,r10
558 addze r10,r10
562 addze r10,r10
568 adde r10,r8,r10
576 adde r10,r8,r10
580 adde r10,r8,r10
589 adde r10,r8,r10
593 adde r10,r8,r10
601 adde r10,r8,r10
604 adde r10,r8,r10
612 addc r10,r7,r10
615 addc r10,r7,r10
624 addc r10,r7,r10
627 addc r10,r7,r10
635 addc r10,r7,r10
638 addc r10,r7,r10
647 addc r10,r7,r10
650 addc r10,r7,r10
653 $ST r10,`7*$BNSZ`(r3) #r[7]=c2;
659 addze r10,r0
666 addze r10,r10
669 addze r10,r10
677 addze r10,r10
681 addze r10,r10
689 addze r10,r10
692 addze r10,r10
700 adde r10,r8,r10
703 adde r10,r8,r10
711 adde r10,r8,r10
714 adde r10,r8,r10
722 adde r10,r8,r10
725 adde r10,r8,r10
731 addc r10,r7,r10
738 addc r10,r7,r10
741 addc r10,r7,r10
749 addc r10,r7,r10
752 addc r10,r7,r10
755 $ST r10,`10*$BNSZ`(r3) #r[10]=c2;
762 addze r10,r0
765 addze r10,r10
773 addze r10,r10
776 addze r10,r10
782 adde r10,r8,r10
789 adde r10,r8,r10
792 adde r10,r8,r10
800 addc r10,r7,r10
803 addc r10,r7,r10
806 $ST r10,`13*$BNSZ`(r3) #r[13]=c2;
838 # r10, r11, r12 are the equivalents of c1, c2, and c3.
844 $UMULL r10,r6,r7
846 $ST r10,`0*$BNSZ`(r3) #r[0]=c1
853 addze r10,r0
861 addze r10,r10
868 adde r10,r9,r10
876 adde r10,r9,r10
884 adde r10,r9,r10
891 addc r10,r8,r10
899 addc r10,r8,r10
907 addc r10,r8,r10
915 addc r10,r8,r10
918 $ST r10,`3*$BNSZ`(r3) #r[3]=c1
925 addze r10,r0
933 addze r10,r10
941 addze r10,r10
948 adde r10,r9,r10
956 adde r10,r9,r10
963 addc r10,r8,r10
966 $ST r10,`6*$BNSZ`(r3) #r[6]=c1
989 # r10, r11, r12 are the equivalents of c1, c2, and c3.
996 $UMULL r10,r6,r7
998 $ST r10,`0*$BNSZ`(r3) #r[0]=c1;
1005 addze r10,r0
1013 addze r10,r10
1020 adde r10,r10,r9
1028 adde r10,r10,r9
1036 adde r10,r10,r9
1043 addc r10,r10,r8
1051 addc r10,r10,r8
1060 addc r10,r10,r8
1068 addc r10,r10,r8
1071 $ST r10,`3*$BNSZ`(r3) #r[3]=c1;
1078 addze r10,r0
1086 addze r10,r10
1094 addze r10,r10
1102 addze r10,r10
1110 addze r10,r10
1117 adde r10,r10,r9
1125 adde r10,r10,r9
1133 adde r10,r10,r9
1141 adde r10,r10,r9
1149 adde r10,r10,r9
1157 adde r10,r10,r9
1164 addc r10,r10,r8
1172 addc r10,r10,r8
1180 addc r10,r10,r8
1188 addc r10,r10,r8
1196 addc r10,r10,r8
1204 addc r10,r10,r8
1212 addc r10,r10,r8
1215 $ST r10,`6*$BNSZ`(r3) #r[6]=c1;
1222 addze r10,r0
1230 addze r10,r10
1238 addze r10,r10
1246 addze r10,r10
1254 addze r10,r10
1262 addze r10,r10
1270 addze r10,r10
1278 addze r10,r10
1285 adde r10,r10,r9
1293 adde r10,r10,r9
1301 adde r10,r10,r9
1309 adde r10,r10,r9
1317 adde r10,r10,r9
1325 adde r10,r10,r9
1333 adde r10,r10,r9
1340 addc r10,r10,r8
1348 addc r10,r10,r8
1356 addc r10,r10,r8
1364 addc r10,r10,r8
1372 addc r10,r10,r8
1380 addc r10,r10,r8
1383 $ST r10,`9*$BNSZ`(r3) #r[9]=c1;
1390 addze r10,r0
1398 addze r10,r10
1406 addze r10,r10
1414 addze r10,r10
1422 addze r10,r10
1429 adde r10,r10,r9
1437 adde r10,r10,r9
1445 adde r10,r10,r9
1453 adde r10,r10,r9
1460 addc r10,r10,r8
1468 addc r10,r10,r8
1476 addc r10,r10,r8
1479 $ST r10,`12*$BNSZ`(r3) #r[12]=c1;
1486 addze r10,r0
1494 addze r10,r10
1501 adde r10,r10,r9
1503 $ST r10,`15*$BNSZ`(r3) #r[15]=c1;
1663 $CLRU r10,r5,`$BITS/2` #r10=dl
1664 $UMULL r6,r8,r10 #tl = q*dl
1667 subf r10,r12,r3 #t = h -th
1668 $SHRI r7,r10,`$BITS/2` #r7= (t &BN_MASK2H), sort of...
1673 $SHLI r7,r10,`$BITS/2` # r7 = (t<<BN_BITS4)
1680 $CLRU r10,r5,`$BITS/2` #r10=dl. t is no longer needed in loop.
1681 subf r6,r10,r6 #tl -=dl
1684 $SHRI r10,r6,`$BITS/2` #t=(tl>>BN_BITS4)
1687 add r12,r12,r10 # th+=t
1777 $UMULH r10,r6,r8
1779 #addze r10,r10 #carry is NOT ignored.
1788 adde r11,r11,r10
1794 $UMULH r10,r6,r8
1796 #addze r10,r10
1802 adde r11,r11,r10
1817 $UMULH r10,r6,r8
1819 addze r10,r10
1821 addi r12,r10,0
1831 $UMULH r10,r6,r8
1833 addze r10,r10
1835 addi r12,r10,0
1844 $UMULH r10,r6,r8
1846 addze r10,r10
1848 addi r12,r10,0
1884 $UMULH r10,r6,r8
1886 addze r10,r10
1888 #addze r10,r10
1901 adde r11,r11,r10 #r10 is carry.
1911 $UMULH r10,r6,r8
1913 addze r10,r10
1915 #addze r10,r10
1923 adde r11,r11,r10
1941 $UMULH r10,r6,r8
1944 addze r10,r10
1946 addze r12,r10
1953 $UMULH r10,r6,r8
1956 addze r10,r10
1958 addze r12,r10
1965 $UMULH r10,r6,r8
1968 addze r10,r10
1970 addze r12,r10