• Home
  • History
  • Annotate
  • Raw
  • Download
  • only in /netgear-WNDR4500v2-V1.0.0.60_1.0.38/ap/gpl/openssl/crypto/bn/asm/

Lines Matching refs:r8

306 .set r8,8
363 # Freely use registers r5,r6,r7,r8,r9,r10,r11 as follows:
366 # r7,r8 are the results of the 32x32 giving 64 bit multiply.
386 $UMULH r8,r5,r6
388 addc r7,r7,r7 # compute (r7,r8)=2*(r7,r8)
389 adde r8,r8,r8
394 addze r11,r8 # r8 added to r11 which is 0
400 $UMULH r8,r6,r6
402 adde r9,r8,r9
407 $UMULH r8,r5,r6
410 adde r8,r8,r8
414 adde r9,r8,r9
420 $UMULH r8,r5,r6
422 adde r8,r8,r8
426 adde r10,r8,r10
432 $UMULH r8,r5,r6
435 adde r8,r8,r8
438 adde r10,r8,r10
443 $UMULH r8,r6,r6
445 adde r11,r8,r11
450 $UMULH r8,r5,r6
452 adde r8,r8,r8
456 adde r11,r8,r11
462 $UMULH r8,r5,r6
464 adde r8,r8,r8
468 adde r9,r8,r9
473 $UMULH r8,r6,r6
475 adde r10,r8,r10
500 # Freely use registers r5,r6,r7,r8,r9,r10,r11 as follows:
503 # r7,r8 are the results of the 32x32 giving 64 bit multiply.
521 $UMULH r8,r5,r6
524 adde r11,r8,r0 # (r8,r7) to the three register
528 adde r11,r8,r11 # (r8,r7) to the three register
535 $UMULH r8,r6,r6
537 adde r9,r8,r9
542 $UMULH r8,r5,r6
545 adde r9,r8,r9
549 adde r9,r8,r9
556 $UMULH r8,r5,r6
559 adde r10,r8,r10
563 adde r10,r8,r10
569 $UMULH r8,r5,r6
572 adde r10,r8,r10
576 adde r10,r8,r10
582 $UMULH r8,r6,r6
585 adde r11,r8,r11
590 $UMULH r8,r5,r6
593 adde r11,r8,r11
597 adde r11,r8,r11
603 $UMULH r8,r5,r6
606 adde r11,r8,r11
610 adde r11,r8,r11
616 $UMULH r8,r5,r6
619 adde r9,r8,r9
623 adde r9,r8,r9
629 $UMULH r8,r5,r6
632 adde r9,r8,r9
636 adde r9,r8,r9
642 $UMULH r8,r5,r6
645 adde r9,r8,r9
649 adde r9,r8,r9
654 $UMULH r8,r6,r6
656 adde r10,r8,r10
661 $UMULH r8,r5,r6
664 adde r10,r8,r10
668 adde r10,r8,r10
674 $UMULH r8,r5,r6
677 adde r10,r8,r10
681 adde r10,r8,r10
687 $UMULH r8,r5,r6
689 adde r10,r8,r10
692 adde r10,r8,r10
698 $UMULH r8,r5,r6
701 adde r11,r8,r11
704 adde r11,r8,r11
710 $UMULH r8,r5,r6
713 adde r11,r8,r11
716 adde r11,r8,r11
722 $UMULH r8,r5,r6
724 adde r11,r8,r11
727 adde r11,r8,r11
733 $UMULH r8,r5,r6
736 adde r11,r8,r11
739 adde r11,r8,r11
744 $UMULH r8,r6,r6
746 adde r9,r8,r9
751 $UMULH r8,r5,r6
753 adde r9,r8,r9
756 adde r9,r8,r9
762 $UMULH r8,r5,r6
764 adde r9,r8,r9
768 adde r9,r8,r9
774 $UMULH r8,r5,r6
776 adde r9,r8,r9
779 adde r9,r8,r9
785 $UMULH r8,r5,r6
788 adde r10,r8,r10
791 adde r10,r8,r10
797 $UMULH r8,r5,r6
799 adde r10,r8,r10
802 adde r10,r8,r10
808 $UMULH r8,r5,r6
810 adde r10,r8,r10
813 adde r10,r8,r10
818 $UMULH r8,r6,r6
820 adde r11,r8,r11
825 $UMULH r8,r5,r6
827 adde r11,r8,r11
830 adde r11,r8,r11
836 $UMULH r8,r5,r6
838 adde r11,r8,r11
841 adde r11,r8,r11
847 $UMULH r8,r5,r6
849 adde r9,r8,r9
852 adde r9,r8,r9
858 $UMULH r8,r5,r6
860 adde r9,r8,r9
863 adde r9,r8,r9
868 $UMULH r8,r6,r6
870 adde r10,r8,r10
875 $UMULH r8,r5,r6
877 adde r10,r8,r10
880 adde r10,r8,r10
887 $UMULH r8,r5,r6
889 adde r11,r8,r11
892 adde r11,r8,r11
897 $UMULH r8,r6,r6
899 adde r9,r8,r9
925 # r8, r9 are the results of the 32x32 giving 64 multiply.
937 $UMULL r8,r6,r7
939 addc r11,r8,r11
945 $UMULL r8,r6,r7
947 addc r11,r8,r11
953 $UMULL r8,r6,r7
955 addc r12,r8,r12
961 $UMULL r8,r6,r7
963 addc r12,r8,r12
969 $UMULL r8,r6,r7
971 addc r12,r8,r12
977 $UMULL r8,r6,r7
979 addc r10,r8,r10
985 $UMULL r8,r6,r7
987 addc r10,r8,r10
993 $UMULL r8,r6,r7
995 addc r10,r8,r10
1001 $UMULL r8,r6,r7
1003 addc r10,r8,r10
1009 $UMULL r8,r6,r7
1011 addc r11,r8,r11
1017 $UMULL r8,r6,r7
1019 addc r11,r8,r11
1025 $UMULL r8,r6,r7
1027 addc r11,r8,r11
1033 $UMULL r8,r6,r7
1035 addc r12,r8,r12
1041 $UMULL r8,r6,r7
1043 addc r12,r8,r12
1049 $UMULL r8,r6,r7
1051 addc r10,r8,r10
1076 # r8, r9 are the results of the 32x32 giving 64 multiply.
1089 $UMULL r8,r6,r7
1091 addc r11,r11,r8
1097 $UMULL r8,r6,r7
1099 addc r11,r11,r8
1105 $UMULL r8,r6,r7
1107 addc r12,r12,r8
1113 $UMULL r8,r6,r7
1115 addc r12,r12,r8
1121 $UMULL r8,r6,r7
1123 addc r12,r12,r8
1129 $UMULL r8,r6,r7
1131 addc r10,r10,r8
1137 $UMULL r8,r6,r7
1139 addc r10,r10,r8
1146 $UMULL r8,r6,r7
1148 addc r10,r10,r8
1154 $UMULL r8,r6,r7
1156 addc r10,r10,r8
1162 $UMULL r8,r6,r7
1164 addc r11,r11,r8
1170 $UMULL r8,r6,r7
1172 addc r11,r11,r8
1178 $UMULL r8,r6,r7
1180 addc r11,r11,r8
1186 $UMULL r8,r6,r7
1188 addc r11,r11,r8
1194 $UMULL r8,r6,r7
1196 addc r11,r11,r8
1202 $UMULL r8,r6,r7
1204 addc r12,r12,r8
1210 $UMULL r8,r6,r7
1212 addc r12,r12,r8
1218 $UMULL r8,r6,r7
1220 addc r12,r12,r8
1226 $UMULL r8,r6,r7
1228 addc r12,r12,r8
1234 $UMULL r8,r6,r7
1236 addc r12,r12,r8
1242 $UMULL r8,r6,r7
1244 addc r12,r12,r8
1250 $UMULL r8,r6,r7
1252 addc r10,r10,r8
1258 $UMULL r8,r6,r7
1260 addc r10,r10,r8
1266 $UMULL r8,r6,r7
1268 addc r10,r10,r8
1274 $UMULL r8,r6,r7
1276 addc r10,r10,r8
1282 $UMULL r8,r6,r7
1284 addc r10,r10,r8
1290 $UMULL r8,r6,r7
1292 addc r10,r10,r8
1298 $UMULL r8,r6,r7
1300 addc r10,r10,r8
1306 $UMULL r8,r6,r7
1308 addc r11,r11,r8
1314 $UMULL r8,r6,r7
1316 addc r11,r11,r8
1322 $UMULL r8,r6,r7
1324 addc r11,r11,r8
1330 $UMULL r8,r6,r7
1332 addc r11,r11,r8
1338 $UMULL r8,r6,r7
1340 addc r11,r11,r8
1346 $UMULL r8,r6,r7
1348 addc r11,r11,r8
1354 $UMULL r8,r6,r7
1356 addc r11,r11,r8
1362 $UMULL r8,r6,r7
1364 addc r11,r11,r8
1370 $UMULL r8,r6,r7
1372 addc r12,r12,r8
1378 $UMULL r8,r6,r7
1380 addc r12,r12,r8
1386 $UMULL r8,r6,r7
1388 addc r12,r12,r8
1394 $UMULL r8,r6,r7
1396 addc r12,r12,r8
1402 $UMULL r8,r6,r7
1404 addc r12,r12,r8
1410 $UMULL r8,r6,r7
1412 addc r12,r12,r8
1418 $UMULL r8,r6,r7
1420 addc r12,r12,r8
1426 $UMULL r8,r6,r7
1428 addc r10,r10,r8
1434 $UMULL r8,r6,r7
1436 addc r10,r10,r8
1442 $UMULL r8,r6,r7
1444 addc r10,r10,r8
1450 $UMULL r8,r6,r7
1452 addc r10,r10,r8
1458 $UMULL r8,r6,r7
1460 addc r10,r10,r8
1466 $UMULL r8,r6,r7
1468 addc r10,r10,r8
1474 $UMULL r8,r6,r7
1476 addc r11,r11,r8
1482 $UMULL r8,r6,r7
1484 addc r11,r11,r8
1490 $UMULL r8,r6,r7
1492 addc r11,r11,r8
1498 $UMULL r8,r6,r7
1500 addc r11,r11,r8
1506 $UMULL r8,r6,r7
1508 addc r11,r11,r8
1514 $UMULL r8,r6,r7
1516 addc r12,r12,r8
1522 $UMULL r8,r6,r7
1524 addc r12,r12,r8
1530 $UMULL r8,r6,r7
1532 addc r12,r12,r8
1538 $UMULL r8,r6,r7
1540 addc r12,r12,r8
1546 $UMULL r8,r6,r7
1548 addc r10,r10,r8
1554 $UMULL r8,r6,r7
1556 addc r10,r10,r8
1562 $UMULL r8,r6,r7
1564 addc r10,r10,r8
1570 $UMULL r8,r6,r7
1572 addc r11,r11,r8
1578 $UMULL r8,r6,r7
1580 addc r11,r11,r8
1586 $UMULL r8,r6,r7
1588 addc r12,r12,r8
1631 $LDU r8,$BNSZ(r5)
1632 subfe r6,r8,r7 # r6 = r7+carry bit + onescomplement(r8)
1633 # if carry = 1 this is r7-r8. Else it
1634 # is r7-r8 -1 as we need.
1678 $LDU r8,$BNSZ(r5)
1679 adde r8,r7,r8
1680 $STU r8,$BNSZ(r3)
1714 subfic r8,r7,$BITS #r8 = BN_num_bits_word(d)
1715 cmpi 0,0,r8,$BITS #
1716 bc BO_IF,CR0_EQ,Lppcasm_div2 #proceed if (r8==$BITS)
1718 $SHL r10,r9,r8 # r9<<=r8
1720 bc BO_IF,CR0_GT,Lppcasm_div2 #or if (h > (1<<r8))
1732 $SHR r8,r4,r8 # r8 = (l >> BN_BITS2 -i)
1734 or r3,r3,r8 # h = (h<<i)|(l>>(BN_BITS2-i))
1743 $SHRI r8,r3,`$BITS/2` #r8 = (h>>BN_BITS4)
1746 $UCMP 0,r8,r9 # is (h>>BN_BITS4)==dh
1749 li r8,-1
1750 $CLRU r8,r8,`$BITS/2` #q = BN_MASK2l
1753 $UDIV r8,r3,r9 #q = h/dh
1755 $UMULL r12,r9,r8 #th = q*dh
1757 $UMULL r6,r8,r10 #tl = q*dl
1771 addi r8,r8,-1 #q--
1787 addi r8,r8,-1 # q--
1798 $SHLI r0,r8,`$BITS/2` #ret =q<<BN_BITS4
1801 or r3,r8,r0
1823 # r7,r8 = product.
1836 $UMULH r8,r6,r6
1838 $STU r8,$BNSZ(r3)
1868 $LD r8,`0*$BNSZ`(r4)
1869 $UMULL r9,r6,r8
1870 $UMULH r10,r6,r8
1878 $LD r8,`1*$BNSZ`(r4)
1879 $UMULL r11,r6,r8
1880 $UMULH r12,r6,r8
1885 $LD r8,`2*$BNSZ`(r4)
1886 $UMULL r9,r6,r8
1887 $UMULH r10,r6,r8
1892 $LD r8,`3*$BNSZ`(r4)
1893 $UMULL r11,r6,r8
1894 $UMULH r12,r6,r8
1908 $LD r8,`0*$BNSZ`(r4)
1909 $UMULL r9,r6,r8
1910 $UMULH r10,r6,r8
1922 $LD r8,`1*$BNSZ`(r4)
1923 $UMULL r9,r6,r8
1924 $UMULH r10,r6,r8
1935 $LD r8,`2*$BNSZ`(r4)
1936 $UMULL r9,r6,r8
1937 $UMULH r10,r6,r8
1974 $LD r8,`0*$BNSZ`(r4)
1976 $UMULL r9,r6,r8
1977 $UMULH r10,r6,r8
1990 $LD r8,`1*$BNSZ`(r4)
1992 $UMULL r11,r6,r8
1993 $UMULH r12,r6,r8
2001 $LD r8,`2*$BNSZ`(r4)
2002 $UMULL r9,r6,r8
2004 $UMULH r10,r6,r8
2012 $LD r8,`3*$BNSZ`(r4)
2013 $UMULL r11,r6,r8
2015 $UMULH r12,r6,r8
2032 $LDU r8,$BNSZ(r4)
2033 $UMULL r9,r6,r8
2034 $UMULH r10,r6,r8
2044 $LDU r8,$BNSZ(r4)
2045 $UMULL r9,r6,r8
2046 $UMULH r10,r6,r8
2056 $LDU r8,$BNSZ(r4)
2057 $UMULL r9,r6,r8
2058 $UMULH r10,r6,r8