• Home
  • History
  • Annotate
  • Raw
  • Download
  • only in /netgear-WNDR4500v2-V1.0.0.60_1.0.38/ap/gpl/timemachine/openssl-0.9.8e/crypto/bn/asm/

Lines Matching refs:r8

308 .set r8,8
365 # Freely use registers r5,r6,r7,r8,r9,r10,r11 as follows:
368 # r7,r8 are the results of the 32x32 giving 64 bit multiply.
388 $UMULH r8,r5,r6
390 addc r7,r7,r7 # compute (r7,r8)=2*(r7,r8)
391 adde r8,r8,r8
396 addze r11,r8 # r8 added to r11 which is 0
402 $UMULH r8,r6,r6
404 adde r9,r8,r9
409 $UMULH r8,r5,r6
412 adde r8,r8,r8
416 adde r9,r8,r9
422 $UMULH r8,r5,r6
424 adde r8,r8,r8
428 adde r10,r8,r10
434 $UMULH r8,r5,r6
437 adde r8,r8,r8
440 adde r10,r8,r10
445 $UMULH r8,r6,r6
447 adde r11,r8,r11
452 $UMULH r8,r5,r6
454 adde r8,r8,r8
458 adde r11,r8,r11
464 $UMULH r8,r5,r6
466 adde r8,r8,r8
470 adde r9,r8,r9
475 $UMULH r8,r6,r6
477 adde r10,r8,r10
502 # Freely use registers r5,r6,r7,r8,r9,r10,r11 as follows:
505 # r7,r8 are the results of the 32x32 giving 64 bit multiply.
523 $UMULH r8,r5,r6
526 adde r11,r8,r0 # (r8,r7) to the three register
530 adde r11,r8,r11 # (r8,r7) to the three register
537 $UMULH r8,r6,r6
539 adde r9,r8,r9
544 $UMULH r8,r5,r6
547 adde r9,r8,r9
551 adde r9,r8,r9
558 $UMULH r8,r5,r6
561 adde r10,r8,r10
565 adde r10,r8,r10
571 $UMULH r8,r5,r6
574 adde r10,r8,r10
578 adde r10,r8,r10
584 $UMULH r8,r6,r6
587 adde r11,r8,r11
592 $UMULH r8,r5,r6
595 adde r11,r8,r11
599 adde r11,r8,r11
605 $UMULH r8,r5,r6
608 adde r11,r8,r11
612 adde r11,r8,r11
618 $UMULH r8,r5,r6
621 adde r9,r8,r9
625 adde r9,r8,r9
631 $UMULH r8,r5,r6
634 adde r9,r8,r9
638 adde r9,r8,r9
644 $UMULH r8,r5,r6
647 adde r9,r8,r9
651 adde r9,r8,r9
656 $UMULH r8,r6,r6
658 adde r10,r8,r10
663 $UMULH r8,r5,r6
666 adde r10,r8,r10
670 adde r10,r8,r10
676 $UMULH r8,r5,r6
679 adde r10,r8,r10
683 adde r10,r8,r10
689 $UMULH r8,r5,r6
691 adde r10,r8,r10
694 adde r10,r8,r10
700 $UMULH r8,r5,r6
703 adde r11,r8,r11
706 adde r11,r8,r11
712 $UMULH r8,r5,r6
715 adde r11,r8,r11
718 adde r11,r8,r11
724 $UMULH r8,r5,r6
726 adde r11,r8,r11
729 adde r11,r8,r11
735 $UMULH r8,r5,r6
738 adde r11,r8,r11
741 adde r11,r8,r11
746 $UMULH r8,r6,r6
748 adde r9,r8,r9
753 $UMULH r8,r5,r6
755 adde r9,r8,r9
758 adde r9,r8,r9
764 $UMULH r8,r5,r6
766 adde r9,r8,r9
770 adde r9,r8,r9
776 $UMULH r8,r5,r6
778 adde r9,r8,r9
781 adde r9,r8,r9
787 $UMULH r8,r5,r6
790 adde r10,r8,r10
793 adde r10,r8,r10
799 $UMULH r8,r5,r6
801 adde r10,r8,r10
804 adde r10,r8,r10
810 $UMULH r8,r5,r6
812 adde r10,r8,r10
815 adde r10,r8,r10
820 $UMULH r8,r6,r6
822 adde r11,r8,r11
827 $UMULH r8,r5,r6
829 adde r11,r8,r11
832 adde r11,r8,r11
838 $UMULH r8,r5,r6
840 adde r11,r8,r11
843 adde r11,r8,r11
849 $UMULH r8,r5,r6
851 adde r9,r8,r9
854 adde r9,r8,r9
860 $UMULH r8,r5,r6
862 adde r9,r8,r9
865 adde r9,r8,r9
870 $UMULH r8,r6,r6
872 adde r10,r8,r10
877 $UMULH r8,r5,r6
879 adde r10,r8,r10
882 adde r10,r8,r10
889 $UMULH r8,r5,r6
891 adde r11,r8,r11
894 adde r11,r8,r11
899 $UMULH r8,r6,r6
901 adde r9,r8,r9
927 # r8, r9 are the results of the 32x32 giving 64 multiply.
939 $UMULL r8,r6,r7
941 addc r11,r8,r11
947 $UMULL r8,r6,r7
949 addc r11,r8,r11
955 $UMULL r8,r6,r7
957 addc r12,r8,r12
963 $UMULL r8,r6,r7
965 addc r12,r8,r12
971 $UMULL r8,r6,r7
973 addc r12,r8,r12
979 $UMULL r8,r6,r7
981 addc r10,r8,r10
987 $UMULL r8,r6,r7
989 addc r10,r8,r10
995 $UMULL r8,r6,r7
997 addc r10,r8,r10
1003 $UMULL r8,r6,r7
1005 addc r10,r8,r10
1011 $UMULL r8,r6,r7
1013 addc r11,r8,r11
1019 $UMULL r8,r6,r7
1021 addc r11,r8,r11
1027 $UMULL r8,r6,r7
1029 addc r11,r8,r11
1035 $UMULL r8,r6,r7
1037 addc r12,r8,r12
1043 $UMULL r8,r6,r7
1045 addc r12,r8,r12
1051 $UMULL r8,r6,r7
1053 addc r10,r8,r10
1078 # r8, r9 are the results of the 32x32 giving 64 multiply.
1091 $UMULL r8,r6,r7
1093 addc r11,r11,r8
1099 $UMULL r8,r6,r7
1101 addc r11,r11,r8
1107 $UMULL r8,r6,r7
1109 addc r12,r12,r8
1115 $UMULL r8,r6,r7
1117 addc r12,r12,r8
1123 $UMULL r8,r6,r7
1125 addc r12,r12,r8
1131 $UMULL r8,r6,r7
1133 addc r10,r10,r8
1139 $UMULL r8,r6,r7
1141 addc r10,r10,r8
1148 $UMULL r8,r6,r7
1150 addc r10,r10,r8
1156 $UMULL r8,r6,r7
1158 addc r10,r10,r8
1164 $UMULL r8,r6,r7
1166 addc r11,r11,r8
1172 $UMULL r8,r6,r7
1174 addc r11,r11,r8
1180 $UMULL r8,r6,r7
1182 addc r11,r11,r8
1188 $UMULL r8,r6,r7
1190 addc r11,r11,r8
1196 $UMULL r8,r6,r7
1198 addc r11,r11,r8
1204 $UMULL r8,r6,r7
1206 addc r12,r12,r8
1212 $UMULL r8,r6,r7
1214 addc r12,r12,r8
1220 $UMULL r8,r6,r7
1222 addc r12,r12,r8
1228 $UMULL r8,r6,r7
1230 addc r12,r12,r8
1236 $UMULL r8,r6,r7
1238 addc r12,r12,r8
1244 $UMULL r8,r6,r7
1246 addc r12,r12,r8
1252 $UMULL r8,r6,r7
1254 addc r10,r10,r8
1260 $UMULL r8,r6,r7
1262 addc r10,r10,r8
1268 $UMULL r8,r6,r7
1270 addc r10,r10,r8
1276 $UMULL r8,r6,r7
1278 addc r10,r10,r8
1284 $UMULL r8,r6,r7
1286 addc r10,r10,r8
1292 $UMULL r8,r6,r7
1294 addc r10,r10,r8
1300 $UMULL r8,r6,r7
1302 addc r10,r10,r8
1308 $UMULL r8,r6,r7
1310 addc r11,r11,r8
1316 $UMULL r8,r6,r7
1318 addc r11,r11,r8
1324 $UMULL r8,r6,r7
1326 addc r11,r11,r8
1332 $UMULL r8,r6,r7
1334 addc r11,r11,r8
1340 $UMULL r8,r6,r7
1342 addc r11,r11,r8
1348 $UMULL r8,r6,r7
1350 addc r11,r11,r8
1356 $UMULL r8,r6,r7
1358 addc r11,r11,r8
1364 $UMULL r8,r6,r7
1366 addc r11,r11,r8
1372 $UMULL r8,r6,r7
1374 addc r12,r12,r8
1380 $UMULL r8,r6,r7
1382 addc r12,r12,r8
1388 $UMULL r8,r6,r7
1390 addc r12,r12,r8
1396 $UMULL r8,r6,r7
1398 addc r12,r12,r8
1404 $UMULL r8,r6,r7
1406 addc r12,r12,r8
1412 $UMULL r8,r6,r7
1414 addc r12,r12,r8
1420 $UMULL r8,r6,r7
1422 addc r12,r12,r8
1428 $UMULL r8,r6,r7
1430 addc r10,r10,r8
1436 $UMULL r8,r6,r7
1438 addc r10,r10,r8
1444 $UMULL r8,r6,r7
1446 addc r10,r10,r8
1452 $UMULL r8,r6,r7
1454 addc r10,r10,r8
1460 $UMULL r8,r6,r7
1462 addc r10,r10,r8
1468 $UMULL r8,r6,r7
1470 addc r10,r10,r8
1476 $UMULL r8,r6,r7
1478 addc r11,r11,r8
1484 $UMULL r8,r6,r7
1486 addc r11,r11,r8
1492 $UMULL r8,r6,r7
1494 addc r11,r11,r8
1500 $UMULL r8,r6,r7
1502 addc r11,r11,r8
1508 $UMULL r8,r6,r7
1510 addc r11,r11,r8
1516 $UMULL r8,r6,r7
1518 addc r12,r12,r8
1524 $UMULL r8,r6,r7
1526 addc r12,r12,r8
1532 $UMULL r8,r6,r7
1534 addc r12,r12,r8
1540 $UMULL r8,r6,r7
1542 addc r12,r12,r8
1548 $UMULL r8,r6,r7
1550 addc r10,r10,r8
1556 $UMULL r8,r6,r7
1558 addc r10,r10,r8
1564 $UMULL r8,r6,r7
1566 addc r10,r10,r8
1572 $UMULL r8,r6,r7
1574 addc r11,r11,r8
1580 $UMULL r8,r6,r7
1582 addc r11,r11,r8
1588 $UMULL r8,r6,r7
1590 addc r12,r12,r8
1633 $LDU r8,$BNSZ(r5)
1634 subfe r6,r8,r7 # r6 = r7+carry bit + onescomplement(r8)
1635 # if carry = 1 this is r7-r8. Else it
1636 # is r7-r8 -1 as we need.
1680 $LDU r8,$BNSZ(r5)
1681 adde r8,r7,r8
1682 $STU r8,$BNSZ(r3)
1715 li r8,$BITS
1718 subf r8,r7,r8 #r8 = BN_num_bits_word(d)
1719 $SHR. r9,r3,r8 #are there any bits above r8'th?
1729 $SHR r8,r4,r8 # r8 = (l >> BN_BITS2 -i)
1731 or r3,r3,r8 # h = (h<<i)|(l>>(BN_BITS2-i))
1740 $SHRI r8,r3,`$BITS/2` #r8 = (h>>BN_BITS4)
1743 $UCMP 0,r8,r9 # is (h>>BN_BITS4)==dh
1746 li r8,-1
1747 $CLRU r8,r8,`$BITS/2` #q = BN_MASK2l
1750 $UDIV r8,r3,r9 #q = h/dh
1752 $UMULL r12,r9,r8 #th = q*dh
1754 $UMULL r6,r8,r10 #tl = q*dl
1768 addi r8,r8,-1 #q--
1784 addi r8,r8,-1 # q--
1795 $SHLI r0,r8,`$BITS/2` #ret =q<<BN_BITS4
1798 or r3,r8,r0
1820 # r7,r8 = product.
1833 $UMULH r8,r6,r6
1835 $STU r8,$BNSZ(r3)
1865 $LD r8,`0*$BNSZ`(r4)
1866 $UMULL r9,r6,r8
1867 $UMULH r10,r6,r8
1875 $LD r8,`1*$BNSZ`(r4)
1876 $UMULL r11,r6,r8
1877 $UMULH r12,r6,r8
1882 $LD r8,`2*$BNSZ`(r4)
1883 $UMULL r9,r6,r8
1884 $UMULH r10,r6,r8
1889 $LD r8,`3*$BNSZ`(r4)
1890 $UMULL r11,r6,r8
1891 $UMULH r12,r6,r8
1905 $LD r8,`0*$BNSZ`(r4)
1906 $UMULL r9,r6,r8
1907 $UMULH r10,r6,r8
1919 $LD r8,`1*$BNSZ`(r4)
1920 $UMULL r9,r6,r8
1921 $UMULH r10,r6,r8
1932 $LD r8,`2*$BNSZ`(r4)
1933 $UMULL r9,r6,r8
1934 $UMULH r10,r6,r8
1971 $LD r8,`0*$BNSZ`(r4)
1973 $UMULL r9,r6,r8
1974 $UMULH r10,r6,r8
1987 $LD r8,`1*$BNSZ`(r4)
1989 $UMULL r11,r6,r8
1990 $UMULH r12,r6,r8
1998 $LD r8,`2*$BNSZ`(r4)
1999 $UMULL r9,r6,r8
2001 $UMULH r10,r6,r8
2009 $LD r8,`3*$BNSZ`(r4)
2010 $UMULL r11,r6,r8
2012 $UMULH r12,r6,r8
2029 $LDU r8,$BNSZ(r4)
2030 $UMULL r9,r6,r8
2031 $UMULH r10,r6,r8
2041 $LDU r8,$BNSZ(r4)
2042 $UMULL r9,r6,r8
2043 $UMULH r10,r6,r8
2053 $LDU r8,$BNSZ(r4)
2054 $UMULL r9,r6,r8
2055 $UMULH r10,r6,r8