123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195 |
- .text
- .p2align 5
- .globl _crypto_scalarmult_curve25519_athlon_fromdouble
- .globl crypto_scalarmult_curve25519_athlon_fromdouble
- _crypto_scalarmult_curve25519_athlon_fromdouble:
- crypto_scalarmult_curve25519_athlon_fromdouble:
- mov %esp,%eax
- and $31,%eax
- add $192,%eax
- sub %eax,%esp
- movl %ebp,0(%esp)
- movl 8(%esp,%eax),%ecx
- fldl 0(%ecx)
- faddl crypto_scalarmult_curve25519_athlon_out0offset
- fstpl 96(%esp)
- fldl 8(%ecx)
- faddl crypto_scalarmult_curve25519_athlon_out1offset
- fstpl 104(%esp)
- fldl 16(%ecx)
- faddl crypto_scalarmult_curve25519_athlon_out2offset
- fstpl 112(%esp)
- fldl 24(%ecx)
- faddl crypto_scalarmult_curve25519_athlon_out3offset
- fstpl 120(%esp)
- fldl 32(%ecx)
- faddl crypto_scalarmult_curve25519_athlon_out4offset
- fstpl 128(%esp)
- fldl 40(%ecx)
- faddl crypto_scalarmult_curve25519_athlon_out5offset
- fstpl 136(%esp)
- fldl 48(%ecx)
- faddl crypto_scalarmult_curve25519_athlon_out6offset
- fstpl 144(%esp)
- fldl 56(%ecx)
- faddl crypto_scalarmult_curve25519_athlon_out7offset
- fstpl 152(%esp)
- fldl 64(%ecx)
- faddl crypto_scalarmult_curve25519_athlon_out8offset
- fstpl 160(%esp)
- fldl 72(%ecx)
- faddl crypto_scalarmult_curve25519_athlon_out9offset
- fstpl 168(%esp)
- movl 96(%esp),%ecx
- movl %ecx,4(%esp)
- movl 104(%esp),%ecx
- shl $26,%ecx
- movl %ecx,40(%esp)
- movl 104(%esp),%ecx
- shr $6,%ecx
- movl %ecx,8(%esp)
- movl 112(%esp),%ecx
- shl $19,%ecx
- movl %ecx,44(%esp)
- movl 112(%esp),%ecx
- shr $13,%ecx
- movl %ecx,12(%esp)
- movl 120(%esp),%ecx
- shl $13,%ecx
- movl %ecx,48(%esp)
- movl 120(%esp),%ecx
- shr $19,%ecx
- movl %ecx,16(%esp)
- movl 128(%esp),%ecx
- shl $6,%ecx
- movl %ecx,52(%esp)
- movl 128(%esp),%ecx
- shr $26,%ecx
- movl 136(%esp),%edx
- add %edx,%ecx
- movl %ecx,20(%esp)
- movl 144(%esp),%ecx
- shl $25,%ecx
- movl %ecx,56(%esp)
- movl 144(%esp),%ecx
- shr $7,%ecx
- movl %ecx,24(%esp)
- movl 152(%esp),%ecx
- shl $19,%ecx
- movl %ecx,60(%esp)
- movl 152(%esp),%ecx
- shr $13,%ecx
- movl %ecx,28(%esp)
- movl 160(%esp),%ecx
- shl $12,%ecx
- movl %ecx,64(%esp)
- movl 160(%esp),%ecx
- shr $20,%ecx
- movl %ecx,32(%esp)
- movl 168(%esp),%ecx
- shl $6,%ecx
- movl %ecx,68(%esp)
- movl 168(%esp),%ecx
- shr $26,%ecx
- movl %ecx,36(%esp)
- mov $0,%ecx
- movl %ecx,72(%esp)
- movl 4(%esp),%ecx
- addl 40(%esp),%ecx
- movl %ecx,4(%esp)
- movl 8(%esp),%ecx
- adcl 44(%esp),%ecx
- movl %ecx,8(%esp)
- movl 12(%esp),%ecx
- adcl 48(%esp),%ecx
- movl %ecx,12(%esp)
- movl 16(%esp),%ecx
- adcl 52(%esp),%ecx
- movl %ecx,16(%esp)
- movl 20(%esp),%ecx
- adcl 56(%esp),%ecx
- movl %ecx,20(%esp)
- movl 24(%esp),%ecx
- adcl 60(%esp),%ecx
- movl %ecx,24(%esp)
- movl 28(%esp),%ecx
- adcl 64(%esp),%ecx
- movl %ecx,28(%esp)
- movl 32(%esp),%ecx
- adcl 68(%esp),%ecx
- movl %ecx,32(%esp)
- movl 36(%esp),%ecx
- adcl 72(%esp),%ecx
- movl %ecx,36(%esp)
- movl 4(%esp),%ecx
- adc $0x13,%ecx
- movl %ecx,40(%esp)
- movl 8(%esp),%ecx
- adc $0,%ecx
- movl %ecx,44(%esp)
- movl 12(%esp),%ecx
- adc $0,%ecx
- movl %ecx,48(%esp)
- movl 16(%esp),%ecx
- adc $0,%ecx
- movl %ecx,52(%esp)
- movl 20(%esp),%ecx
- adc $0,%ecx
- movl %ecx,56(%esp)
- movl 24(%esp),%ecx
- adc $0,%ecx
- movl %ecx,60(%esp)
- movl 28(%esp),%ecx
- adc $0,%ecx
- movl %ecx,64(%esp)
- movl 32(%esp),%ecx
- adc $0x80000000,%ecx
- movl %ecx,68(%esp)
- movl 36(%esp),%ebp
- adc $0xffffffff,%ebp
- and $0x80000000,%ebp
- sar $31,%ebp
- movl 4(%esp,%eax),%ecx
- movl 4(%esp),%edx
- xorl 40(%esp),%edx
- and %ebp,%edx
- xorl 40(%esp),%edx
- movl %edx,0(%ecx)
- movl 8(%esp),%edx
- xorl 44(%esp),%edx
- and %ebp,%edx
- xorl 44(%esp),%edx
- movl %edx,4(%ecx)
- movl 12(%esp),%edx
- xorl 48(%esp),%edx
- and %ebp,%edx
- xorl 48(%esp),%edx
- movl %edx,8(%ecx)
- movl 16(%esp),%edx
- xorl 52(%esp),%edx
- and %ebp,%edx
- xorl 52(%esp),%edx
- movl %edx,12(%ecx)
- movl 20(%esp),%edx
- xorl 56(%esp),%edx
- and %ebp,%edx
- xorl 56(%esp),%edx
- movl %edx,16(%ecx)
- movl 24(%esp),%edx
- xorl 60(%esp),%edx
- and %ebp,%edx
- xorl 60(%esp),%edx
- movl %edx,20(%ecx)
- movl 28(%esp),%edx
- xorl 64(%esp),%edx
- and %ebp,%edx
- xorl 64(%esp),%edx
- movl %edx,24(%ecx)
- movl 32(%esp),%edx
- xorl 68(%esp),%edx
- and %ebp,%edx
- xorl 68(%esp),%edx
- movl %edx,28(%ecx)
- movl 0(%esp),%ebp
- add %eax,%esp
- ret
|