Lines Matching refs:r8

231 #.set r8,8
275 # Freely use registers r5,r6,r7,r8,r9,r10,r11 as follows:
278 # r7,r8 are the results of the 32x32 giving 64 bit multiply.
298 $UMULH r8,r5,r6
300 addc r7,r7,r7 # compute (r7,r8)=2*(r7,r8)
301 adde r8,r8,r8
306 addze r11,r8 # r8 added to r11 which is 0
312 $UMULH r8,r6,r6
314 adde r9,r8,r9
319 $UMULH r8,r5,r6
322 adde r8,r8,r8
326 adde r9,r8,r9
332 $UMULH r8,r5,r6
334 adde r8,r8,r8
338 adde r10,r8,r10
344 $UMULH r8,r5,r6
347 adde r8,r8,r8
350 adde r10,r8,r10
355 $UMULH r8,r6,r6
357 adde r11,r8,r11
362 $UMULH r8,r5,r6
364 adde r8,r8,r8
368 adde r11,r8,r11
374 $UMULH r8,r5,r6
376 adde r8,r8,r8
380 adde r9,r8,r9
385 $UMULH r8,r6,r6
387 adde r10,r8,r10
415 # Freely use registers r5,r6,r7,r8,r9,r10,r11 as follows:
418 # r7,r8 are the results of the 32x32 giving 64 bit multiply.
436 $UMULH r8,r5,r6
439 adde r11,r8,r0 # (r8,r7) to the three register
443 adde r11,r8,r11 # (r8,r7) to the three register
450 $UMULH r8,r6,r6
452 adde r9,r8,r9
457 $UMULH r8,r5,r6
460 adde r9,r8,r9
464 adde r9,r8,r9
471 $UMULH r8,r5,r6
474 adde r10,r8,r10
478 adde r10,r8,r10
484 $UMULH r8,r5,r6
487 adde r10,r8,r10
491 adde r10,r8,r10
497 $UMULH r8,r6,r6
500 adde r11,r8,r11
505 $UMULH r8,r5,r6
508 adde r11,r8,r11
512 adde r11,r8,r11
518 $UMULH r8,r5,r6
521 adde r11,r8,r11
525 adde r11,r8,r11
531 $UMULH r8,r5,r6
534 adde r9,r8,r9
538 adde r9,r8,r9
544 $UMULH r8,r5,r6
547 adde r9,r8,r9
551 adde r9,r8,r9
557 $UMULH r8,r5,r6
560 adde r9,r8,r9
564 adde r9,r8,r9
569 $UMULH r8,r6,r6
571 adde r10,r8,r10
576 $UMULH r8,r5,r6
579 adde r10,r8,r10
583 adde r10,r8,r10
589 $UMULH r8,r5,r6
592 adde r10,r8,r10
596 adde r10,r8,r10
602 $UMULH r8,r5,r6
604 adde r10,r8,r10
607 adde r10,r8,r10
613 $UMULH r8,r5,r6
616 adde r11,r8,r11
619 adde r11,r8,r11
625 $UMULH r8,r5,r6
628 adde r11,r8,r11
631 adde r11,r8,r11
637 $UMULH r8,r5,r6
639 adde r11,r8,r11
642 adde r11,r8,r11
648 $UMULH r8,r5,r6
651 adde r11,r8,r11
654 adde r11,r8,r11
659 $UMULH r8,r6,r6
661 adde r9,r8,r9
666 $UMULH r8,r5,r6
668 adde r9,r8,r9
671 adde r9,r8,r9
677 $UMULH r8,r5,r6
679 adde r9,r8,r9
683 adde r9,r8,r9
689 $UMULH r8,r5,r6
691 adde r9,r8,r9
694 adde r9,r8,r9
700 $UMULH r8,r5,r6
703 adde r10,r8,r10
706 adde r10,r8,r10
712 $UMULH r8,r5,r6
714 adde r10,r8,r10
717 adde r10,r8,r10
723 $UMULH r8,r5,r6
725 adde r10,r8,r10
728 adde r10,r8,r10
733 $UMULH r8,r6,r6
735 adde r11,r8,r11
740 $UMULH r8,r5,r6
742 adde r11,r8,r11
745 adde r11,r8,r11
751 $UMULH r8,r5,r6
753 adde r11,r8,r11
756 adde r11,r8,r11
762 $UMULH r8,r5,r6
764 adde r9,r8,r9
767 adde r9,r8,r9
773 $UMULH r8,r5,r6
775 adde r9,r8,r9
778 adde r9,r8,r9
783 $UMULH r8,r6,r6
785 adde r10,r8,r10
790 $UMULH r8,r5,r6
792 adde r10,r8,r10
795 adde r10,r8,r10
802 $UMULH r8,r5,r6
804 adde r11,r8,r11
807 adde r11,r8,r11
812 $UMULH r8,r6,r6
814 adde r9,r8,r9
842 # r8, r9 are the results of the 32x32 giving 64 multiply.
854 $UMULL r8,r6,r7
856 addc r11,r8,r11
862 $UMULL r8,r6,r7
864 addc r11,r8,r11
870 $UMULL r8,r6,r7
872 addc r12,r8,r12
878 $UMULL r8,r6,r7
880 addc r12,r8,r12
886 $UMULL r8,r6,r7
888 addc r12,r8,r12
894 $UMULL r8,r6,r7
896 addc r10,r8,r10
902 $UMULL r8,r6,r7
904 addc r10,r8,r10
910 $UMULL r8,r6,r7
912 addc r10,r8,r10
918 $UMULL r8,r6,r7
920 addc r10,r8,r10
926 $UMULL r8,r6,r7
928 addc r11,r8,r11
934 $UMULL r8,r6,r7
936 addc r11,r8,r11
942 $UMULL r8,r6,r7
944 addc r11,r8,r11
950 $UMULL r8,r6,r7
952 addc r12,r8,r12
958 $UMULL r8,r6,r7
960 addc r12,r8,r12
966 $UMULL r8,r6,r7
968 addc r10,r8,r10
996 # r8, r9 are the results of the 32x32 giving 64 multiply.
1009 $UMULL r8,r6,r7
1011 addc r11,r11,r8
1017 $UMULL r8,r6,r7
1019 addc r11,r11,r8
1025 $UMULL r8,r6,r7
1027 addc r12,r12,r8
1033 $UMULL r8,r6,r7
1035 addc r12,r12,r8
1041 $UMULL r8,r6,r7
1043 addc r12,r12,r8
1049 $UMULL r8,r6,r7
1051 addc r10,r10,r8
1057 $UMULL r8,r6,r7
1059 addc r10,r10,r8
1066 $UMULL r8,r6,r7
1068 addc r10,r10,r8
1074 $UMULL r8,r6,r7
1076 addc r10,r10,r8
1082 $UMULL r8,r6,r7
1084 addc r11,r11,r8
1090 $UMULL r8,r6,r7
1092 addc r11,r11,r8
1098 $UMULL r8,r6,r7
1100 addc r11,r11,r8
1106 $UMULL r8,r6,r7
1108 addc r11,r11,r8
1114 $UMULL r8,r6,r7
1116 addc r11,r11,r8
1122 $UMULL r8,r6,r7
1124 addc r12,r12,r8
1130 $UMULL r8,r6,r7
1132 addc r12,r12,r8
1138 $UMULL r8,r6,r7
1140 addc r12,r12,r8
1146 $UMULL r8,r6,r7
1148 addc r12,r12,r8
1154 $UMULL r8,r6,r7
1156 addc r12,r12,r8
1162 $UMULL r8,r6,r7
1164 addc r12,r12,r8
1170 $UMULL r8,r6,r7
1172 addc r10,r10,r8
1178 $UMULL r8,r6,r7
1180 addc r10,r10,r8
1186 $UMULL r8,r6,r7
1188 addc r10,r10,r8
1194 $UMULL r8,r6,r7
1196 addc r10,r10,r8
1202 $UMULL r8,r6,r7
1204 addc r10,r10,r8
1210 $UMULL r8,r6,r7
1212 addc r10,r10,r8
1218 $UMULL r8,r6,r7
1220 addc r10,r10,r8
1226 $UMULL r8,r6,r7
1228 addc r11,r11,r8
1234 $UMULL r8,r6,r7
1236 addc r11,r11,r8
1242 $UMULL r8,r6,r7
1244 addc r11,r11,r8
1250 $UMULL r8,r6,r7
1252 addc r11,r11,r8
1258 $UMULL r8,r6,r7
1260 addc r11,r11,r8
1266 $UMULL r8,r6,r7
1268 addc r11,r11,r8
1274 $UMULL r8,r6,r7
1276 addc r11,r11,r8
1282 $UMULL r8,r6,r7
1284 addc r11,r11,r8
1290 $UMULL r8,r6,r7
1292 addc r12,r12,r8
1298 $UMULL r8,r6,r7
1300 addc r12,r12,r8
1306 $UMULL r8,r6,r7
1308 addc r12,r12,r8
1314 $UMULL r8,r6,r7
1316 addc r12,r12,r8
1322 $UMULL r8,r6,r7
1324 addc r12,r12,r8
1330 $UMULL r8,r6,r7
1332 addc r12,r12,r8
1338 $UMULL r8,r6,r7
1340 addc r12,r12,r8
1346 $UMULL r8,r6,r7
1348 addc r10,r10,r8
1354 $UMULL r8,r6,r7
1356 addc r10,r10,r8
1362 $UMULL r8,r6,r7
1364 addc r10,r10,r8
1370 $UMULL r8,r6,r7
1372 addc r10,r10,r8
1378 $UMULL r8,r6,r7
1380 addc r10,r10,r8
1386 $UMULL r8,r6,r7
1388 addc r10,r10,r8
1394 $UMULL r8,r6,r7
1396 addc r11,r11,r8
1402 $UMULL r8,r6,r7
1404 addc r11,r11,r8
1410 $UMULL r8,r6,r7
1412 addc r11,r11,r8
1418 $UMULL r8,r6,r7
1420 addc r11,r11,r8
1426 $UMULL r8,r6,r7
1428 addc r11,r11,r8
1434 $UMULL r8,r6,r7
1436 addc r12,r12,r8
1442 $UMULL r8,r6,r7
1444 addc r12,r12,r8
1450 $UMULL r8,r6,r7
1452 addc r12,r12,r8
1458 $UMULL r8,r6,r7
1460 addc r12,r12,r8
1466 $UMULL r8,r6,r7
1468 addc r10,r10,r8
1474 $UMULL r8,r6,r7
1476 addc r10,r10,r8
1482 $UMULL r8,r6,r7
1484 addc r10,r10,r8
1490 $UMULL r8,r6,r7
1492 addc r11,r11,r8
1498 $UMULL r8,r6,r7
1500 addc r11,r11,r8
1506 $UMULL r8,r6,r7
1508 addc r12,r12,r8
1554 $LDU r8,$BNSZ(r5)
1555 subfe r6,r8,r7 # r6 = r7+carry bit + onescomplement(r8)
1556 # if carry = 1 this is r7-r8. Else it
1557 # is r7-r8 -1 as we need.
1603 $LDU r8,$BNSZ(r5)
1604 adde r8,r7,r8
1605 $STU r8,$BNSZ(r3)
1641 li r8,$BITS
1644 subf r8,r7,r8 #r8 = BN_num_bits_word(d)
1645 $SHR. r9,r3,r8 #are there any bits above r8'th?
1655 $SHR r8,r4,r8 # r8 = (l >> BN_BITS2 -i)
1657 or r3,r3,r8 # h = (h<<i)|(l>>(BN_BITS2-i))
1666 $SHRI r8,r3,`$BITS/2` #r8 = (h>>BN_BITS4)
1669 $UCMP 0,r8,r9 # is (h>>BN_BITS4)==dh
1672 li r8,-1
1673 $CLRU r8,r8,`$BITS/2` #q = BN_MASK2l
1676 $UDIV r8,r3,r9 #q = h/dh
1678 $UMULL r12,r9,r8 #th = q*dh
1680 $UMULL r6,r8,r10 #tl = q*dl
1694 addi r8,r8,-1 #q--
1710 addi r8,r8,-1 # q--
1721 $SHLI r0,r8,`$BITS/2` #ret =q<<BN_BITS4
1724 or r3,r8,r0
1749 # r7,r8 = product.
1762 $UMULH r8,r6,r6
1764 $STU r8,$BNSZ(r3)
1796 $LD r8,`0*$BNSZ`(r4)
1797 $UMULL r9,r6,r8
1798 $UMULH r10,r6,r8
1806 $LD r8,`1*$BNSZ`(r4)
1807 $UMULL r11,r6,r8
1808 $UMULH r12,r6,r8
1813 $LD r8,`2*$BNSZ`(r4)
1814 $UMULL r9,r6,r8
1815 $UMULH r10,r6,r8
1820 $LD r8,`3*$BNSZ`(r4)
1821 $UMULL r11,r6,r8
1822 $UMULH r12,r6,r8
1836 $LD r8,`0*$BNSZ`(r4)
1837 $UMULL r9,r6,r8
1838 $UMULH r10,r6,r8
1850 $LD r8,`1*$BNSZ`(r4)
1851 $UMULL r9,r6,r8
1852 $UMULH r10,r6,r8
1863 $LD r8,`2*$BNSZ`(r4)
1864 $UMULL r9,r6,r8
1865 $UMULH r10,r6,r8
1905 $LD r8,`0*$BNSZ`(r4)
1907 $UMULL r9,r6,r8
1908 $UMULH r10,r6,r8
1921 $LD r8,`1*$BNSZ`(r4)
1923 $UMULL r11,r6,r8
1924 $UMULH r12,r6,r8
1932 $LD r8,`2*$BNSZ`(r4)
1933 $UMULL r9,r6,r8
1935 $UMULH r10,r6,r8
1943 $LD r8,`3*$BNSZ`(r4)
1944 $UMULL r11,r6,r8
1946 $UMULH r12,r6,r8
1963 $LDU r8,$BNSZ(r4)
1964 $UMULL r9,r6,r8
1965 $UMULH r10,r6,r8
1975 $LDU r8,$BNSZ(r4)
1976 $UMULL r9,r6,r8
1977 $UMULH r10,r6,r8
1987 $LDU r8,$BNSZ(r4)
1988 $UMULL r9,r6,r8
1989 $UMULH r10,r6,r8