/external/openssl/crypto/bn/asm/ |
alpha-mont.pl | 18 # int bn_mul_mont( 56 .globl bn_mul_mont 58 .ent bn_mul_mont 59 bn_mul_mont: label 316 .end bn_mul_mont
|
armv4-mont.s | 3 .global bn_mul_mont 4 .type bn_mul_mont,%function 7 bn_mul_mont: label 145 .size bn_mul_mont,.-bn_mul_mont
|
mips3-mont.pl | 21 # int bn_mul_mont( 59 .globl bn_mul_mont 60 .ent bn_mul_mont 61 bn_mul_mont: label 322 END(bn_mul_mont)
|
s390x-mont.pl | 19 # and _strictly_ in-order execution, while bn_mul_mont is more or less 23 # the cost of other operations increase, bn_mul_mont aim to neatly 38 # int bn_mul_mont( 60 .globl bn_mul_mont 61 .type bn_mul_mont,\@function 62 bn_mul_mont: label 220 .size bn_mul_mont,.-bn_mul_mont
|
armv4-mont.pl | 17 # +115-80% on Intel IXP425. This is compared to pre-bn_mul_mont code 54 .global bn_mul_mont 55 .type bn_mul_mont,%function 58 bn_mul_mont: label 196 .size bn_mul_mont,.-bn_mul_mont
|
x86_64-mont.pl | 31 # int bn_mul_mont( 50 .globl bn_mul_mont 51 .type bn_mul_mont,\@function,6 53 bn_mul_mont: label 216 .size bn_mul_mont,.-bn_mul_mont
|
/external/openssl/crypto/ |
sparcv9cap.c | 17 int bn_mul_mont(BN_ULONG *rp, const BN_ULONG *ap, const BN_ULONG *bp, const BN_ULONG *np,const BN_ULONG *n0, int num) function
|
/external/openssl/crypto/bn/ |
bn_asm.c | 847 int bn_mul_mont(BN_ULONG *rp, const BN_ULONG *ap, const BN_ULONG *bp, const BN_ULONG *np,const BN_ULONG *n0p, int num) function 941 int bn_mul_mont(BN_ULONG *rp, const BN_ULONG *ap, const BN_ULONG *bp, const BN_ULONG *np,const BN_ULONG *n0, int num) function 986 int bn_mul_mont(BN_ULONG *rp, const BN_ULONG *ap, const BN_ULONG *bp, const BN_ULONG *np,const BN_ULONG *n0p, int num) function 1025 int bn_mul_mont(BN_ULONG *rp, const BN_ULONG *ap, const BN_ULONG *bp, const BN_ULONG *np,const BN_ULONG *n0, int num) function
|