Browse Source

openssl: add generated asm code

v0.8.10-release
Bert Belder 12 years ago
committed by isaacs
parent
commit
823e807b5b
  1. 2545
      deps/openssl/asm/x64-elf-gas/aes/aes-x86_64.s
  2. 172
      deps/openssl/asm/x64-elf-gas/bn/x86_64-mont.s
  3. 1844
      deps/openssl/asm/x64-elf-gas/camellia/cmll-x86_64.s
  4. 671
      deps/openssl/asm/x64-elf-gas/md5/md5-x86_64.s
  5. 430
      deps/openssl/asm/x64-elf-gas/rc4/rc4-x86_64.s
  6. 1283
      deps/openssl/asm/x64-elf-gas/sha/sha1-x86_64.s
  7. 1971
      deps/openssl/asm/x64-elf-gas/sha/sha512-x86_64.s
  8. 859
      deps/openssl/asm/x64-elf-gas/whrlpool/wp-x86_64.s
  9. 194
      deps/openssl/asm/x64-elf-gas/x86_64cpuid.s
  10. 2545
      deps/openssl/asm/x64-macosx-gas/aes/aes-x86_64.s
  11. 172
      deps/openssl/asm/x64-macosx-gas/bn/x86_64-mont.s
  12. 1844
      deps/openssl/asm/x64-macosx-gas/camellia/cmll-x86_64.s
  13. 671
      deps/openssl/asm/x64-macosx-gas/md5/md5-x86_64.s
  14. 430
      deps/openssl/asm/x64-macosx-gas/rc4/rc4-x86_64.s
  15. 1283
      deps/openssl/asm/x64-macosx-gas/sha/sha1-x86_64.s
  16. 1971
      deps/openssl/asm/x64-macosx-gas/sha/sha512-x86_64.s
  17. 859
      deps/openssl/asm/x64-macosx-gas/whrlpool/wp-x86_64.s
  18. 195
      deps/openssl/asm/x64-macosx-gas/x86_64cpuid.s
  19. 2885
      deps/openssl/asm/x64-win32-masm/aes/aes-x86_64.asm
  20. 293
      deps/openssl/asm/x64-win32-masm/bn/x86_64-mont.asm
  21. 2108
      deps/openssl/asm/x64-win32-masm/camellia/cmll-x86_64.asm
  22. 781
      deps/openssl/asm/x64-win32-masm/md5/md5-x86_64.asm
  23. 586
      deps/openssl/asm/x64-win32-masm/rc4/rc4-x86_64.asm
  24. 1394
      deps/openssl/asm/x64-win32-masm/sha/sha1-x86_64.asm
  25. 2085
      deps/openssl/asm/x64-win32-masm/sha/sha512-x86_64.asm
  26. 972
      deps/openssl/asm/x64-win32-masm/whrlpool/wp-x86_64.asm
  27. 186
      deps/openssl/asm/x64-win32-masm/x86_64cpuid.asm
  28. 3234
      deps/openssl/asm/x86-elf-gas/aes/aes-586.s
  29. 864
      deps/openssl/asm/x86-elf-gas/bf/bf-686.s
  30. 338
      deps/openssl/asm/x86-elf-gas/bn/x86-mont.s
  31. 2114
      deps/openssl/asm/x86-elf-gas/bn/x86.s
  32. 2375
      deps/openssl/asm/x86-elf-gas/camellia/cmll-x86.s
  33. 933
      deps/openssl/asm/x86-elf-gas/cast/cast-586.s
  34. 875
      deps/openssl/asm/x86-elf-gas/des/crypt586.s
  35. 1837
      deps/openssl/asm/x86-elf-gas/des/des-586.s
  36. 679
      deps/openssl/asm/x86-elf-gas/md5/md5-586.s
  37. 230
      deps/openssl/asm/x86-elf-gas/rc4/rc4-586.s
  38. 564
      deps/openssl/asm/x86-elf-gas/rc5/rc5-586.s
  39. 1965
      deps/openssl/asm/x86-elf-gas/ripemd/rmd-586.s
  40. 1442
      deps/openssl/asm/x86-elf-gas/sha/sha1-586.s
  41. 261
      deps/openssl/asm/x86-elf-gas/sha/sha256-586.s
  42. 563
      deps/openssl/asm/x86-elf-gas/sha/sha512-586.s
  43. 1105
      deps/openssl/asm/x86-elf-gas/whrlpool/wp-mmx.s
  44. 279
      deps/openssl/asm/x86-elf-gas/x86cpuid.s
  45. 3194
      deps/openssl/asm/x86-macosx-gas/aes/aes-586.s
  46. 897
      deps/openssl/asm/x86-macosx-gas/bf/bf-686.s
  47. 336
      deps/openssl/asm/x86-macosx-gas/bn/x86-mont.s
  48. 2385
      deps/openssl/asm/x86-macosx-gas/bn/x86.s
  49. 2353
      deps/openssl/asm/x86-macosx-gas/camellia/cmll-x86.s
  50. 967
      deps/openssl/asm/x86-macosx-gas/cast/cast-586.s
  51. 891
      deps/openssl/asm/x86-macosx-gas/des/crypt586.s
  52. 1873
      deps/openssl/asm/x86-macosx-gas/des/des-586.s
  53. 745
      deps/openssl/asm/x86-macosx-gas/md5/md5-586.s
  54. 224
      deps/openssl/asm/x86-macosx-gas/rc4/rc4-586.s
  55. 563
      deps/openssl/asm/x86-macosx-gas/rc5/rc5-586.s
  56. 2123
      deps/openssl/asm/x86-macosx-gas/ripemd/rmd-586.s
  57. 1520
      deps/openssl/asm/x86-macosx-gas/sha/sha1-586.s
  58. 259
      deps/openssl/asm/x86-macosx-gas/sha/sha256-586.s
  59. 561
      deps/openssl/asm/x86-macosx-gas/sha/sha512-586.s
  60. 1103
      deps/openssl/asm/x86-macosx-gas/whrlpool/wp-mmx.s
  61. 261
      deps/openssl/asm/x86-macosx-gas/x86cpuid.s
  62. 3222
      deps/openssl/asm/x86-win32-masm/aes/aes-586.asm
  63. 907
      deps/openssl/asm/x86-win32-masm/bf/bf-686.asm
  64. 348
      deps/openssl/asm/x86-win32-masm/bn/x86-mont.asm
  65. 2116
      deps/openssl/asm/x86-win32-masm/bn/x86.asm
  66. 2367
      deps/openssl/asm/x86-win32-masm/camellia/cmll-x86.asm
  67. 950
      deps/openssl/asm/x86-win32-masm/cast/cast-586.asm
  68. 909
      deps/openssl/asm/x86-win32-masm/des/crypt586.asm
  69. 1878
      deps/openssl/asm/x86-win32-masm/des/des-586.asm
  70. 693
      deps/openssl/asm/x86-win32-masm/md5/md5-586.asm
  71. 239
      deps/openssl/asm/x86-win32-masm/rc4/rc4-586.asm
  72. 573
      deps/openssl/asm/x86-win32-masm/rc5/rc5-586.asm
  73. 1976
      deps/openssl/asm/x86-win32-masm/ripemd/rmd-586.asm
  74. 1452
      deps/openssl/asm/x86-win32-masm/sha/sha1-586.asm
  75. 271
      deps/openssl/asm/x86-win32-masm/sha/sha256-586.asm
  76. 573
      deps/openssl/asm/x86-win32-masm/sha/sha512-586.asm
  77. 1122
      deps/openssl/asm/x86-win32-masm/whrlpool/wp-mmx.asm
  78. 277
      deps/openssl/asm/x86-win32-masm/x86cpuid.asm

2545
deps/openssl/asm/x64-elf-gas/aes/aes-x86_64.s

File diff suppressed because it is too large

172
deps/openssl/asm/x64-elf-gas/bn/x86_64-mont.s

@ -0,0 +1,172 @@
.text
.globl bn_mul_mont
.type bn_mul_mont,@function
.align 16
bn_mul_mont:
pushq %rbx
pushq %rbp
pushq %r12
pushq %r13
pushq %r14
pushq %r15
movl %r9d,%r9d
leaq 2(%r9),%r10
movq %rsp,%r11
negq %r10
leaq (%rsp,%r10,8),%rsp
andq $-1024,%rsp
movq %r11,8(%rsp,%r9,8)
.Lprologue:
movq %rdx,%r12
movq (%r8),%r8
xorq %r14,%r14
xorq %r15,%r15
movq (%r12),%rbx
movq (%rsi),%rax
mulq %rbx
movq %rax,%r10
movq %rdx,%r11
imulq %r8,%rax
movq %rax,%rbp
mulq (%rcx)
addq %r10,%rax
adcq $0,%rdx
movq %rdx,%r13
leaq 1(%r15),%r15
.L1st:
movq (%rsi,%r15,8),%rax
mulq %rbx
addq %r11,%rax
adcq $0,%rdx
movq %rax,%r10
movq (%rcx,%r15,8),%rax
movq %rdx,%r11
mulq %rbp
addq %r13,%rax
leaq 1(%r15),%r15
adcq $0,%rdx
addq %r10,%rax
adcq $0,%rdx
movq %rax,-16(%rsp,%r15,8)
cmpq %r9,%r15
movq %rdx,%r13
jl .L1st
xorq %rdx,%rdx
addq %r11,%r13
adcq $0,%rdx
movq %r13,-8(%rsp,%r9,8)
movq %rdx,(%rsp,%r9,8)
leaq 1(%r14),%r14
.align 4
.Louter:
xorq %r15,%r15
movq (%r12,%r14,8),%rbx
movq (%rsi),%rax
mulq %rbx
addq (%rsp),%rax
adcq $0,%rdx
movq %rax,%r10
movq %rdx,%r11
imulq %r8,%rax
movq %rax,%rbp
mulq (%rcx,%r15,8)
addq %r10,%rax
movq 8(%rsp),%r10
adcq $0,%rdx
movq %rdx,%r13
leaq 1(%r15),%r15
.align 4
.Linner:
movq (%rsi,%r15,8),%rax
mulq %rbx
addq %r11,%rax
adcq $0,%rdx
addq %rax,%r10
movq (%rcx,%r15,8),%rax
adcq $0,%rdx
movq %rdx,%r11
mulq %rbp
addq %r13,%rax
leaq 1(%r15),%r15
adcq $0,%rdx
addq %r10,%rax
adcq $0,%rdx
movq (%rsp,%r15,8),%r10
cmpq %r9,%r15
movq %rax,-16(%rsp,%r15,8)
movq %rdx,%r13
jl .Linner
xorq %rdx,%rdx
addq %r11,%r13
adcq $0,%rdx
addq %r10,%r13
adcq $0,%rdx
movq %r13,-8(%rsp,%r9,8)
movq %rdx,(%rsp,%r9,8)
leaq 1(%r14),%r14
cmpq %r9,%r14
jl .Louter
leaq (%rsp),%rsi
leaq -1(%r9),%r15
movq (%rsi),%rax
xorq %r14,%r14
jmp .Lsub
.align 16
.Lsub: sbbq (%rcx,%r14,8),%rax
movq %rax,(%rdi,%r14,8)
decq %r15
movq 8(%rsi,%r14,8),%rax
leaq 1(%r14),%r14
jge .Lsub
sbbq $0,%rax
andq %rax,%rsi
notq %rax
movq %rdi,%rcx
andq %rax,%rcx
leaq -1(%r9),%r15
orq %rcx,%rsi
.align 16
.Lcopy:
movq (%rsi,%r15,8),%rax
movq %rax,(%rdi,%r15,8)
movq %r14,(%rsp,%r15,8)
decq %r15
jge .Lcopy
movq 8(%rsp,%r9,8),%rsi
movq $1,%rax
movq (%rsi),%r15
movq 8(%rsi),%r14
movq 16(%rsi),%r13
movq 24(%rsi),%r12
movq 32(%rsi),%rbp
movq 40(%rsi),%rbx
leaq 48(%rsi),%rsp
.Lepilogue:
.byte 0xf3,0xc3
.size bn_mul_mont,.-bn_mul_mont
.byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105,112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
.align 16

1844
deps/openssl/asm/x64-elf-gas/camellia/cmll-x86_64.s

File diff suppressed because it is too large

671
deps/openssl/asm/x64-elf-gas/md5/md5-x86_64.s

@ -0,0 +1,671 @@
.text
.align 16
.globl md5_block_asm_data_order
.type md5_block_asm_data_order,@function
md5_block_asm_data_order:
pushq %rbp
pushq %rbx
pushq %r12
pushq %r14
pushq %r15
.Lprologue:
movq %rdi,%rbp
shlq $6,%rdx
leaq (%rsi,%rdx,1),%rdi
movl 0(%rbp),%eax
movl 4(%rbp),%ebx
movl 8(%rbp),%ecx
movl 12(%rbp),%edx
cmpq %rdi,%rsi
je .Lend
.Lloop:
movl %eax,%r8d
movl %ebx,%r9d
movl %ecx,%r14d
movl %edx,%r15d
movl 0(%rsi),%r10d
movl %edx,%r11d
xorl %ecx,%r11d
leal -680876936(%rax,%r10,1),%eax
andl %ebx,%r11d
xorl %edx,%r11d
movl 4(%rsi),%r10d
addl %r11d,%eax
roll $7,%eax
movl %ecx,%r11d
addl %ebx,%eax
xorl %ebx,%r11d
leal -389564586(%rdx,%r10,1),%edx
andl %eax,%r11d
xorl %ecx,%r11d
movl 8(%rsi),%r10d
addl %r11d,%edx
roll $12,%edx
movl %ebx,%r11d
addl %eax,%edx
xorl %eax,%r11d
leal 606105819(%rcx,%r10,1),%ecx
andl %edx,%r11d
xorl %ebx,%r11d
movl 12(%rsi),%r10d
addl %r11d,%ecx
roll $17,%ecx
movl %eax,%r11d
addl %edx,%ecx
xorl %edx,%r11d
leal -1044525330(%rbx,%r10,1),%ebx
andl %ecx,%r11d
xorl %eax,%r11d
movl 16(%rsi),%r10d
addl %r11d,%ebx
roll $22,%ebx
movl %edx,%r11d
addl %ecx,%ebx
xorl %ecx,%r11d
leal -176418897(%rax,%r10,1),%eax
andl %ebx,%r11d
xorl %edx,%r11d
movl 20(%rsi),%r10d
addl %r11d,%eax
roll $7,%eax
movl %ecx,%r11d
addl %ebx,%eax
xorl %ebx,%r11d
leal 1200080426(%rdx,%r10,1),%edx
andl %eax,%r11d
xorl %ecx,%r11d
movl 24(%rsi),%r10d
addl %r11d,%edx
roll $12,%edx
movl %ebx,%r11d
addl %eax,%edx
xorl %eax,%r11d
leal -1473231341(%rcx,%r10,1),%ecx
andl %edx,%r11d
xorl %ebx,%r11d
movl 28(%rsi),%r10d
addl %r11d,%ecx
roll $17,%ecx
movl %eax,%r11d
addl %edx,%ecx
xorl %edx,%r11d
leal -45705983(%rbx,%r10,1),%ebx
andl %ecx,%r11d
xorl %eax,%r11d
movl 32(%rsi),%r10d
addl %r11d,%ebx
roll $22,%ebx
movl %edx,%r11d
addl %ecx,%ebx
xorl %ecx,%r11d
leal 1770035416(%rax,%r10,1),%eax
andl %ebx,%r11d
xorl %edx,%r11d
movl 36(%rsi),%r10d
addl %r11d,%eax
roll $7,%eax
movl %ecx,%r11d
addl %ebx,%eax
xorl %ebx,%r11d
leal -1958414417(%rdx,%r10,1),%edx
andl %eax,%r11d
xorl %ecx,%r11d
movl 40(%rsi),%r10d
addl %r11d,%edx
roll $12,%edx
movl %ebx,%r11d
addl %eax,%edx
xorl %eax,%r11d
leal -42063(%rcx,%r10,1),%ecx
andl %edx,%r11d
xorl %ebx,%r11d
movl 44(%rsi),%r10d
addl %r11d,%ecx
roll $17,%ecx
movl %eax,%r11d
addl %edx,%ecx
xorl %edx,%r11d
leal -1990404162(%rbx,%r10,1),%ebx
andl %ecx,%r11d
xorl %eax,%r11d
movl 48(%rsi),%r10d
addl %r11d,%ebx
roll $22,%ebx
movl %edx,%r11d
addl %ecx,%ebx
xorl %ecx,%r11d
leal 1804603682(%rax,%r10,1),%eax
andl %ebx,%r11d
xorl %edx,%r11d
movl 52(%rsi),%r10d
addl %r11d,%eax
roll $7,%eax
movl %ecx,%r11d
addl %ebx,%eax
xorl %ebx,%r11d
leal -40341101(%rdx,%r10,1),%edx
andl %eax,%r11d
xorl %ecx,%r11d
movl 56(%rsi),%r10d
addl %r11d,%edx
roll $12,%edx
movl %ebx,%r11d
addl %eax,%edx
xorl %eax,%r11d
leal -1502002290(%rcx,%r10,1),%ecx
andl %edx,%r11d
xorl %ebx,%r11d
movl 60(%rsi),%r10d
addl %r11d,%ecx
roll $17,%ecx
movl %eax,%r11d
addl %edx,%ecx
xorl %edx,%r11d
leal 1236535329(%rbx,%r10,1),%ebx
andl %ecx,%r11d
xorl %eax,%r11d
movl 0(%rsi),%r10d
addl %r11d,%ebx
roll $22,%ebx
movl %edx,%r11d
addl %ecx,%ebx
movl 4(%rsi),%r10d
movl %edx,%r11d
movl %edx,%r12d
notl %r11d
leal -165796510(%rax,%r10,1),%eax
andl %ebx,%r12d
andl %ecx,%r11d
movl 24(%rsi),%r10d
orl %r11d,%r12d
movl %ecx,%r11d
addl %r12d,%eax
movl %ecx,%r12d
roll $5,%eax
addl %ebx,%eax
notl %r11d
leal -1069501632(%rdx,%r10,1),%edx
andl %eax,%r12d
andl %ebx,%r11d
movl 44(%rsi),%r10d
orl %r11d,%r12d
movl %ebx,%r11d
addl %r12d,%edx
movl %ebx,%r12d
roll $9,%edx
addl %eax,%edx
notl %r11d
leal 643717713(%rcx,%r10,1),%ecx
andl %edx,%r12d
andl %eax,%r11d
movl 0(%rsi),%r10d
orl %r11d,%r12d
movl %eax,%r11d
addl %r12d,%ecx
movl %eax,%r12d
roll $14,%ecx
addl %edx,%ecx
notl %r11d
leal -373897302(%rbx,%r10,1),%ebx
andl %ecx,%r12d
andl %edx,%r11d
movl 20(%rsi),%r10d
orl %r11d,%r12d
movl %edx,%r11d
addl %r12d,%ebx
movl %edx,%r12d
roll $20,%ebx
addl %ecx,%ebx
notl %r11d
leal -701558691(%rax,%r10,1),%eax
andl %ebx,%r12d
andl %ecx,%r11d
movl 40(%rsi),%r10d
orl %r11d,%r12d
movl %ecx,%r11d
addl %r12d,%eax
movl %ecx,%r12d
roll $5,%eax
addl %ebx,%eax
notl %r11d
leal 38016083(%rdx,%r10,1),%edx
andl %eax,%r12d
andl %ebx,%r11d
movl 60(%rsi),%r10d
orl %r11d,%r12d
movl %ebx,%r11d
addl %r12d,%edx
movl %ebx,%r12d
roll $9,%edx
addl %eax,%edx
notl %r11d
leal -660478335(%rcx,%r10,1),%ecx
andl %edx,%r12d
andl %eax,%r11d
movl 16(%rsi),%r10d
orl %r11d,%r12d
movl %eax,%r11d
addl %r12d,%ecx
movl %eax,%r12d
roll $14,%ecx
addl %edx,%ecx
notl %r11d
leal -405537848(%rbx,%r10,1),%ebx
andl %ecx,%r12d
andl %edx,%r11d
movl 36(%rsi),%r10d
orl %r11d,%r12d
movl %edx,%r11d
addl %r12d,%ebx
movl %edx,%r12d
roll $20,%ebx
addl %ecx,%ebx
notl %r11d
leal 568446438(%rax,%r10,1),%eax
andl %ebx,%r12d
andl %ecx,%r11d
movl 56(%rsi),%r10d
orl %r11d,%r12d
movl %ecx,%r11d
addl %r12d,%eax
movl %ecx,%r12d
roll $5,%eax
addl %ebx,%eax
notl %r11d
leal -1019803690(%rdx,%r10,1),%edx
andl %eax,%r12d
andl %ebx,%r11d
movl 12(%rsi),%r10d
orl %r11d,%r12d
movl %ebx,%r11d
addl %r12d,%edx
movl %ebx,%r12d
roll $9,%edx
addl %eax,%edx
notl %r11d
leal -187363961(%rcx,%r10,1),%ecx
andl %edx,%r12d
andl %eax,%r11d
movl 32(%rsi),%r10d
orl %r11d,%r12d
movl %eax,%r11d
addl %r12d,%ecx
movl %eax,%r12d
roll $14,%ecx
addl %edx,%ecx
notl %r11d
leal 1163531501(%rbx,%r10,1),%ebx
andl %ecx,%r12d
andl %edx,%r11d
movl 52(%rsi),%r10d
orl %r11d,%r12d
movl %edx,%r11d
addl %r12d,%ebx
movl %edx,%r12d
roll $20,%ebx
addl %ecx,%ebx
notl %r11d
leal -1444681467(%rax,%r10,1),%eax
andl %ebx,%r12d
andl %ecx,%r11d
movl 8(%rsi),%r10d
orl %r11d,%r12d
movl %ecx,%r11d
addl %r12d,%eax
movl %ecx,%r12d
roll $5,%eax
addl %ebx,%eax
notl %r11d
leal -51403784(%rdx,%r10,1),%edx
andl %eax,%r12d
andl %ebx,%r11d
movl 28(%rsi),%r10d
orl %r11d,%r12d
movl %ebx,%r11d
addl %r12d,%edx
movl %ebx,%r12d
roll $9,%edx
addl %eax,%edx
notl %r11d
leal 1735328473(%rcx,%r10,1),%ecx
andl %edx,%r12d
andl %eax,%r11d
movl 48(%rsi),%r10d
orl %r11d,%r12d
movl %eax,%r11d
addl %r12d,%ecx
movl %eax,%r12d
roll $14,%ecx
addl %edx,%ecx
notl %r11d
leal -1926607734(%rbx,%r10,1),%ebx
andl %ecx,%r12d
andl %edx,%r11d
movl 0(%rsi),%r10d
orl %r11d,%r12d
movl %edx,%r11d
addl %r12d,%ebx
movl %edx,%r12d
roll $20,%ebx
addl %ecx,%ebx
movl 20(%rsi),%r10d
movl %ecx,%r11d
leal -378558(%rax,%r10,1),%eax
movl 32(%rsi),%r10d
xorl %edx,%r11d
xorl %ebx,%r11d
addl %r11d,%eax
roll $4,%eax
movl %ebx,%r11d
addl %ebx,%eax
leal -2022574463(%rdx,%r10,1),%edx
movl 44(%rsi),%r10d
xorl %ecx,%r11d
xorl %eax,%r11d
addl %r11d,%edx
roll $11,%edx
movl %eax,%r11d
addl %eax,%edx
leal 1839030562(%rcx,%r10,1),%ecx
movl 56(%rsi),%r10d
xorl %ebx,%r11d
xorl %edx,%r11d
addl %r11d,%ecx
roll $16,%ecx
movl %edx,%r11d
addl %edx,%ecx
leal -35309556(%rbx,%r10,1),%ebx
movl 4(%rsi),%r10d
xorl %eax,%r11d
xorl %ecx,%r11d
addl %r11d,%ebx
roll $23,%ebx
movl %ecx,%r11d
addl %ecx,%ebx
leal -1530992060(%rax,%r10,1),%eax
movl 16(%rsi),%r10d
xorl %edx,%r11d
xorl %ebx,%r11d
addl %r11d,%eax
roll $4,%eax
movl %ebx,%r11d
addl %ebx,%eax
leal 1272893353(%rdx,%r10,1),%edx
movl 28(%rsi),%r10d
xorl %ecx,%r11d
xorl %eax,%r11d
addl %r11d,%edx
roll $11,%edx
movl %eax,%r11d
addl %eax,%edx
leal -155497632(%rcx,%r10,1),%ecx
movl 40(%rsi),%r10d
xorl %ebx,%r11d
xorl %edx,%r11d
addl %r11d,%ecx
roll $16,%ecx
movl %edx,%r11d
addl %edx,%ecx
leal -1094730640(%rbx,%r10,1),%ebx
movl 52(%rsi),%r10d
xorl %eax,%r11d
xorl %ecx,%r11d
addl %r11d,%ebx
roll $23,%ebx
movl %ecx,%r11d
addl %ecx,%ebx
leal 681279174(%rax,%r10,1),%eax
movl 0(%rsi),%r10d
xorl %edx,%r11d
xorl %ebx,%r11d
addl %r11d,%eax
roll $4,%eax
movl %ebx,%r11d
addl %ebx,%eax
leal -358537222(%rdx,%r10,1),%edx
movl 12(%rsi),%r10d
xorl %ecx,%r11d
xorl %eax,%r11d
addl %r11d,%edx
roll $11,%edx
movl %eax,%r11d
addl %eax,%edx
leal -722521979(%rcx,%r10,1),%ecx
movl 24(%rsi),%r10d
xorl %ebx,%r11d
xorl %edx,%r11d
addl %r11d,%ecx
roll $16,%ecx
movl %edx,%r11d
addl %edx,%ecx
leal 76029189(%rbx,%r10,1),%ebx
movl 36(%rsi),%r10d
xorl %eax,%r11d
xorl %ecx,%r11d
addl %r11d,%ebx
roll $23,%ebx
movl %ecx,%r11d
addl %ecx,%ebx
leal -640364487(%rax,%r10,1),%eax
movl 48(%rsi),%r10d
xorl %edx,%r11d
xorl %ebx,%r11d
addl %r11d,%eax
roll $4,%eax
movl %ebx,%r11d
addl %ebx,%eax
leal -421815835(%rdx,%r10,1),%edx
movl 60(%rsi),%r10d
xorl %ecx,%r11d
xorl %eax,%r11d
addl %r11d,%edx
roll $11,%edx
movl %eax,%r11d
addl %eax,%edx
leal 530742520(%rcx,%r10,1),%ecx
movl 8(%rsi),%r10d
xorl %ebx,%r11d
xorl %edx,%r11d
addl %r11d,%ecx
roll $16,%ecx
movl %edx,%r11d
addl %edx,%ecx
leal -995338651(%rbx,%r10,1),%ebx
movl 0(%rsi),%r10d
xorl %eax,%r11d
xorl %ecx,%r11d
addl %r11d,%ebx
roll $23,%ebx
movl %ecx,%r11d
addl %ecx,%ebx
movl 0(%rsi),%r10d
movl $4294967295,%r11d
xorl %edx,%r11d
leal -198630844(%rax,%r10,1),%eax
orl %ebx,%r11d
xorl %ecx,%r11d
addl %r11d,%eax
movl 28(%rsi),%r10d
movl $4294967295,%r11d
roll $6,%eax
xorl %ecx,%r11d
addl %ebx,%eax
leal 1126891415(%rdx,%r10,1),%edx
orl %eax,%r11d
xorl %ebx,%r11d
addl %r11d,%edx
movl 56(%rsi),%r10d
movl $4294967295,%r11d
roll $10,%edx
xorl %ebx,%r11d
addl %eax,%edx
leal -1416354905(%rcx,%r10,1),%ecx
orl %edx,%r11d
xorl %eax,%r11d
addl %r11d,%ecx
movl 20(%rsi),%r10d
movl $4294967295,%r11d
roll $15,%ecx
xorl %eax,%r11d
addl %edx,%ecx
leal -57434055(%rbx,%r10,1),%ebx
orl %ecx,%r11d
xorl %edx,%r11d
addl %r11d,%ebx
movl 48(%rsi),%r10d
movl $4294967295,%r11d
roll $21,%ebx
xorl %edx,%r11d
addl %ecx,%ebx
leal 1700485571(%rax,%r10,1),%eax
orl %ebx,%r11d
xorl %ecx,%r11d
addl %r11d,%eax
movl 12(%rsi),%r10d
movl $4294967295,%r11d
roll $6,%eax
xorl %ecx,%r11d
addl %ebx,%eax
leal -1894986606(%rdx,%r10,1),%edx
orl %eax,%r11d
xorl %ebx,%r11d
addl %r11d,%edx
movl 40(%rsi),%r10d
movl $4294967295,%r11d
roll $10,%edx
xorl %ebx,%r11d
addl %eax,%edx
leal -1051523(%rcx,%r10,1),%ecx
orl %edx,%r11d
xorl %eax,%r11d
addl %r11d,%ecx
movl 4(%rsi),%r10d
movl $4294967295,%r11d
roll $15,%ecx
xorl %eax,%r11d
addl %edx,%ecx
leal -2054922799(%rbx,%r10,1),%ebx
orl %ecx,%r11d
xorl %edx,%r11d
addl %r11d,%ebx
movl 32(%rsi),%r10d
movl $4294967295,%r11d
roll $21,%ebx
xorl %edx,%r11d
addl %ecx,%ebx
leal 1873313359(%rax,%r10,1),%eax
orl %ebx,%r11d
xorl %ecx,%r11d
addl %r11d,%eax
movl 60(%rsi),%r10d
movl $4294967295,%r11d
roll $6,%eax
xorl %ecx,%r11d
addl %ebx,%eax
leal -30611744(%rdx,%r10,1),%edx
orl %eax,%r11d
xorl %ebx,%r11d
addl %r11d,%edx
movl 24(%rsi),%r10d
movl $4294967295,%r11d
roll $10,%edx
xorl %ebx,%r11d
addl %eax,%edx
leal -1560198380(%rcx,%r10,1),%ecx
orl %edx,%r11d
xorl %eax,%r11d
addl %r11d,%ecx
movl 52(%rsi),%r10d
movl $4294967295,%r11d
roll $15,%ecx
xorl %eax,%r11d
addl %edx,%ecx
leal 1309151649(%rbx,%r10,1),%ebx
orl %ecx,%r11d
xorl %edx,%r11d
addl %r11d,%ebx
movl 16(%rsi),%r10d
movl $4294967295,%r11d
roll $21,%ebx
xorl %edx,%r11d
addl %ecx,%ebx
leal -145523070(%rax,%r10,1),%eax
orl %ebx,%r11d
xorl %ecx,%r11d
addl %r11d,%eax
movl 44(%rsi),%r10d
movl $4294967295,%r11d
roll $6,%eax
xorl %ecx,%r11d
addl %ebx,%eax
leal -1120210379(%rdx,%r10,1),%edx
orl %eax,%r11d
xorl %ebx,%r11d
addl %r11d,%edx
movl 8(%rsi),%r10d
movl $4294967295,%r11d
roll $10,%edx
xorl %ebx,%r11d
addl %eax,%edx
leal 718787259(%rcx,%r10,1),%ecx
orl %edx,%r11d
xorl %eax,%r11d
addl %r11d,%ecx
movl 36(%rsi),%r10d
movl $4294967295,%r11d
roll $15,%ecx
xorl %eax,%r11d
addl %edx,%ecx
leal -343485551(%rbx,%r10,1),%ebx
orl %ecx,%r11d
xorl %edx,%r11d
addl %r11d,%ebx
movl 0(%rsi),%r10d
movl $4294967295,%r11d
roll $21,%ebx
xorl %edx,%r11d
addl %ecx,%ebx
addl %r8d,%eax
addl %r9d,%ebx
addl %r14d,%ecx
addl %r15d,%edx
addq $64,%rsi
cmpq %rdi,%rsi
jb .Lloop
.Lend:
movl %eax,0(%rbp)
movl %ebx,4(%rbp)
movl %ecx,8(%rbp)
movl %edx,12(%rbp)
movq (%rsp),%r15
movq 8(%rsp),%r14
movq 16(%rsp),%r12
movq 24(%rsp),%rbx
movq 32(%rsp),%rbp
addq $40,%rsp
.Lepilogue:
.byte 0xf3,0xc3
.size md5_block_asm_data_order,.-md5_block_asm_data_order

430
deps/openssl/asm/x64-elf-gas/rc4/rc4-x86_64.s

@ -0,0 +1,430 @@
.text
.globl RC4
.type RC4,@function
.align 16
RC4: orq %rsi,%rsi
jne .Lentry
.byte 0xf3,0xc3
.Lentry:
pushq %rbx
pushq %r12
pushq %r13
.Lprologue:
addq $8,%rdi
movl -8(%rdi),%r8d
movl -4(%rdi),%r12d
cmpl $-1,256(%rdi)
je .LRC4_CHAR
incb %r8b
movl (%rdi,%r8,4),%r9d
testq $-8,%rsi
jz .Lloop1
jmp .Lloop8
.align 16
.Lloop8:
addb %r9b,%r12b
movq %r8,%r10
movl (%rdi,%r12,4),%r13d
rorq $8,%rax
incb %r10b
movl (%rdi,%r10,4),%r11d
cmpq %r10,%r12
movl %r9d,(%rdi,%r12,4)
cmoveq %r9,%r11
movl %r13d,(%rdi,%r8,4)
addb %r9b,%r13b
movb (%rdi,%r13,4),%al
addb %r11b,%r12b
movq %r10,%r8
movl (%rdi,%r12,4),%r13d
rorq $8,%rax
incb %r8b
movl (%rdi,%r8,4),%r9d
cmpq %r8,%r12
movl %r11d,(%rdi,%r12,4)
cmoveq %r11,%r9
movl %r13d,(%rdi,%r10,4)
addb %r11b,%r13b
movb (%rdi,%r13,4),%al
addb %r9b,%r12b
movq %r8,%r10
movl (%rdi,%r12,4),%r13d
rorq $8,%rax
incb %r10b
movl (%rdi,%r10,4),%r11d
cmpq %r10,%r12
movl %r9d,(%rdi,%r12,4)
cmoveq %r9,%r11
movl %r13d,(%rdi,%r8,4)
addb %r9b,%r13b
movb (%rdi,%r13,4),%al
addb %r11b,%r12b
movq %r10,%r8
movl (%rdi,%r12,4),%r13d
rorq $8,%rax
incb %r8b
movl (%rdi,%r8,4),%r9d
cmpq %r8,%r12
movl %r11d,(%rdi,%r12,4)
cmoveq %r11,%r9
movl %r13d,(%rdi,%r10,4)
addb %r11b,%r13b
movb (%rdi,%r13,4),%al
addb %r9b,%r12b
movq %r8,%r10
movl (%rdi,%r12,4),%r13d
rorq $8,%rax
incb %r10b
movl (%rdi,%r10,4),%r11d
cmpq %r10,%r12
movl %r9d,(%rdi,%r12,4)
cmoveq %r9,%r11
movl %r13d,(%rdi,%r8,4)
addb %r9b,%r13b
movb (%rdi,%r13,4),%al
addb %r11b,%r12b
movq %r10,%r8
movl (%rdi,%r12,4),%r13d
rorq $8,%rax
incb %r8b
movl (%rdi,%r8,4),%r9d
cmpq %r8,%r12
movl %r11d,(%rdi,%r12,4)
cmoveq %r11,%r9
movl %r13d,(%rdi,%r10,4)
addb %r11b,%r13b
movb (%rdi,%r13,4),%al
addb %r9b,%r12b
movq %r8,%r10
movl (%rdi,%r12,4),%r13d
rorq $8,%rax
incb %r10b
movl (%rdi,%r10,4),%r11d
cmpq %r10,%r12
movl %r9d,(%rdi,%r12,4)
cmoveq %r9,%r11
movl %r13d,(%rdi,%r8,4)
addb %r9b,%r13b
movb (%rdi,%r13,4),%al
addb %r11b,%r12b
movq %r10,%r8
movl (%rdi,%r12,4),%r13d
rorq $8,%rax
incb %r8b
movl (%rdi,%r8,4),%r9d
cmpq %r8,%r12
movl %r11d,(%rdi,%r12,4)
cmoveq %r11,%r9
movl %r13d,(%rdi,%r10,4)
addb %r11b,%r13b
movb (%rdi,%r13,4),%al
rorq $8,%rax
subq $8,%rsi
xorq (%rdx),%rax
addq $8,%rdx
movq %rax,(%rcx)
addq $8,%rcx
testq $-8,%rsi
jnz .Lloop8
cmpq $0,%rsi
jne .Lloop1
jmp .Lexit
.align 16
.Lloop1:
addb %r9b,%r12b
movl (%rdi,%r12,4),%r13d
movl %r9d,(%rdi,%r12,4)
movl %r13d,(%rdi,%r8,4)
addb %r13b,%r9b
incb %r8b
movl (%rdi,%r9,4),%r13d
movl (%rdi,%r8,4),%r9d
xorb (%rdx),%r13b
incq %rdx
movb %r13b,(%rcx)
incq %rcx
decq %rsi
jnz .Lloop1
jmp .Lexit
.align 16
.LRC4_CHAR:
addb $1,%r8b
movzbl (%rdi,%r8,1),%r9d
testq $-8,%rsi
jz .Lcloop1
cmpl $0,260(%rdi)
jnz .Lcloop1
jmp .Lcloop8
.align 16
.Lcloop8:
movl (%rdx),%eax
movl 4(%rdx),%ebx
addb %r9b,%r12b
leaq 1(%r8),%r10
movzbl (%rdi,%r12,1),%r13d
movzbl %r10b,%r10d
movzbl (%rdi,%r10,1),%r11d
movb %r9b,(%rdi,%r12,1)
cmpq %r10,%r12
movb %r13b,(%rdi,%r8,1)
jne .Lcmov0
movq %r9,%r11
.Lcmov0:
addb %r9b,%r13b
xorb (%rdi,%r13,1),%al
rorl $8,%eax
addb %r11b,%r12b
leaq 1(%r10),%r8
movzbl (%rdi,%r12,1),%r13d
movzbl %r8b,%r8d
movzbl (%rdi,%r8,1),%r9d
movb %r11b,(%rdi,%r12,1)
cmpq %r8,%r12
movb %r13b,(%rdi,%r10,1)
jne .Lcmov1
movq %r11,%r9
.Lcmov1:
addb %r11b,%r13b
xorb (%rdi,%r13,1),%al
rorl $8,%eax
addb %r9b,%r12b
leaq 1(%r8),%r10
movzbl (%rdi,%r12,1),%r13d
movzbl %r10b,%r10d
movzbl (%rdi,%r10,1),%r11d
movb %r9b,(%rdi,%r12,1)
cmpq %r10,%r12
movb %r13b,(%rdi,%r8,1)
jne .Lcmov2
movq %r9,%r11
.Lcmov2:
addb %r9b,%r13b
xorb (%rdi,%r13,1),%al
rorl $8,%eax
addb %r11b,%r12b
leaq 1(%r10),%r8
movzbl (%rdi,%r12,1),%r13d
movzbl %r8b,%r8d
movzbl (%rdi,%r8,1),%r9d
movb %r11b,(%rdi,%r12,1)
cmpq %r8,%r12
movb %r13b,(%rdi,%r10,1)
jne .Lcmov3
movq %r11,%r9
.Lcmov3:
addb %r11b,%r13b
xorb (%rdi,%r13,1),%al
rorl $8,%eax
addb %r9b,%r12b
leaq 1(%r8),%r10
movzbl (%rdi,%r12,1),%r13d
movzbl %r10b,%r10d
movzbl (%rdi,%r10,1),%r11d
movb %r9b,(%rdi,%r12,1)
cmpq %r10,%r12
movb %r13b,(%rdi,%r8,1)
jne .Lcmov4
movq %r9,%r11
.Lcmov4:
addb %r9b,%r13b
xorb (%rdi,%r13,1),%bl
rorl $8,%ebx
addb %r11b,%r12b
leaq 1(%r10),%r8
movzbl (%rdi,%r12,1),%r13d
movzbl %r8b,%r8d
movzbl (%rdi,%r8,1),%r9d
movb %r11b,(%rdi,%r12,1)
cmpq %r8,%r12
movb %r13b,(%rdi,%r10,1)
jne .Lcmov5
movq %r11,%r9
.Lcmov5:
addb %r11b,%r13b
xorb (%rdi,%r13,1),%bl
rorl $8,%ebx
addb %r9b,%r12b
leaq 1(%r8),%r10
movzbl (%rdi,%r12,1),%r13d
movzbl %r10b,%r10d
movzbl (%rdi,%r10,1),%r11d
movb %r9b,(%rdi,%r12,1)
cmpq %r10,%r12
movb %r13b,(%rdi,%r8,1)
jne .Lcmov6
movq %r9,%r11
.Lcmov6:
addb %r9b,%r13b
xorb (%rdi,%r13,1),%bl
rorl $8,%ebx
addb %r11b,%r12b
leaq 1(%r10),%r8
movzbl (%rdi,%r12,1),%r13d
movzbl %r8b,%r8d
movzbl (%rdi,%r8,1),%r9d
movb %r11b,(%rdi,%r12,1)
cmpq %r8,%r12
movb %r13b,(%rdi,%r10,1)
jne .Lcmov7
movq %r11,%r9
.Lcmov7:
addb %r11b,%r13b
xorb (%rdi,%r13,1),%bl
rorl $8,%ebx
leaq -8(%rsi),%rsi
movl %eax,(%rcx)
leaq 8(%rdx),%rdx
movl %ebx,4(%rcx)
leaq 8(%rcx),%rcx
testq $-8,%rsi
jnz .Lcloop8
cmpq $0,%rsi
jne .Lcloop1
jmp .Lexit
.align 16
.Lcloop1:
addb %r9b,%r12b
movzbl (%rdi,%r12,1),%r13d
movb %r9b,(%rdi,%r12,1)
movb %r13b,(%rdi,%r8,1)
addb %r9b,%r13b
addb $1,%r8b
movzbl %r13b,%r13d
movzbl %r8b,%r8d
movzbl (%rdi,%r13,1),%r13d
movzbl (%rdi,%r8,1),%r9d
xorb (%rdx),%r13b
leaq 1(%rdx),%rdx
movb %r13b,(%rcx)
leaq 1(%rcx),%rcx
subq $1,%rsi
jnz .Lcloop1
jmp .Lexit
.align 16
.Lexit:
subb $1,%r8b
movl %r8d,-8(%rdi)
movl %r12d,-4(%rdi)
movq (%rsp),%r13
movq 8(%rsp),%r12
movq 16(%rsp),%rbx
addq $24,%rsp
.Lepilogue:
.byte 0xf3,0xc3
.size RC4,.-RC4
.globl RC4_set_key
.type RC4_set_key,@function
.align 16
RC4_set_key:
leaq 8(%rdi),%rdi
leaq (%rdx,%rsi,1),%rdx
negq %rsi
movq %rsi,%rcx
xorl %eax,%eax
xorq %r9,%r9
xorq %r10,%r10
xorq %r11,%r11
movl OPENSSL_ia32cap_P(%rip),%r8d
btl $20,%r8d
jnc .Lw1stloop
btl $30,%r8d
setc %r9b
movl %r9d,260(%rdi)
jmp .Lc1stloop
.align 16
.Lw1stloop:
movl %eax,(%rdi,%rax,4)
addb $1,%al
jnc .Lw1stloop
xorq %r9,%r9
xorq %r8,%r8
.align 16
.Lw2ndloop:
movl (%rdi,%r9,4),%r10d
addb (%rdx,%rsi,1),%r8b
addb %r10b,%r8b
addq $1,%rsi
movl (%rdi,%r8,4),%r11d
cmovzq %rcx,%rsi
movl %r10d,(%rdi,%r8,4)
movl %r11d,(%rdi,%r9,4)
addb $1,%r9b
jnc .Lw2ndloop
jmp .Lexit_key
.align 16
.Lc1stloop:
movb %al,(%rdi,%rax,1)
addb $1,%al
jnc .Lc1stloop
xorq %r9,%r9
xorq %r8,%r8
.align 16
.Lc2ndloop:
movb (%rdi,%r9,1),%r10b
addb (%rdx,%rsi,1),%r8b
addb %r10b,%r8b
addq $1,%rsi
movb (%rdi,%r8,1),%r11b
jnz .Lcnowrap
movq %rcx,%rsi
.Lcnowrap:
movb %r10b,(%rdi,%r8,1)
movb %r11b,(%rdi,%r9,1)
addb $1,%r9b
jnc .Lc2ndloop
movl $-1,256(%rdi)
.align 16
.Lexit_key:
xorl %eax,%eax
movl %eax,-8(%rdi)
movl %eax,-4(%rdi)
.byte 0xf3,0xc3
.size RC4_set_key,.-RC4_set_key
.globl RC4_options
.type RC4_options,@function
.align 16
RC4_options:
leaq .Lopts(%rip),%rax
movl OPENSSL_ia32cap_P(%rip),%edx
btl $20,%edx
jnc .Ldone
addq $12,%rax
btl $30,%edx
jnc .Ldone
addq $13,%rax
.Ldone:
.byte 0xf3,0xc3
.align 64
.Lopts:
.byte 114,99,52,40,56,120,44,105,110,116,41,0
.byte 114,99,52,40,56,120,44,99,104,97,114,41,0
.byte 114,99,52,40,49,120,44,99,104,97,114,41,0
.byte 82,67,52,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
.align 64
.size RC4_options,.-RC4_options

1283
deps/openssl/asm/x64-elf-gas/sha/sha1-x86_64.s

File diff suppressed because it is too large

1971
deps/openssl/asm/x64-elf-gas/sha/sha512-x86_64.s

File diff suppressed because it is too large

859
deps/openssl/asm/x64-elf-gas/whrlpool/wp-x86_64.s

@ -0,0 +1,859 @@
.text
.globl whirlpool_block
.type whirlpool_block,@function
.align 16
whirlpool_block:
pushq %rbx
pushq %rbp
pushq %r12
pushq %r13
pushq %r14
pushq %r15
movq %rsp,%r11
subq $128+40,%rsp
andq $-64,%rsp
leaq 128(%rsp),%r10
movq %rdi,0(%r10)
movq %rsi,8(%r10)
movq %rdx,16(%r10)
movq %r11,32(%r10)
.Lprologue:
movq %r10,%rbx
leaq .Ltable(%rip),%rbp
xorq %rcx,%rcx
xorq %rdx,%rdx
movq 0(%rdi),%r8
movq 8(%rdi),%r9
movq 16(%rdi),%r10
movq 24(%rdi),%r11
movq 32(%rdi),%r12
movq 40(%rdi),%r13
movq 48(%rdi),%r14
movq 56(%rdi),%r15
.Louterloop:
movq %r8,0(%rsp)
movq %r9,8(%rsp)
movq %r10,16(%rsp)
movq %r11,24(%rsp)
movq %r12,32(%rsp)
movq %r13,40(%rsp)
movq %r14,48(%rsp)
movq %r15,56(%rsp)
xorq 0(%rsi),%r8
xorq 8(%rsi),%r9
xorq 16(%rsi),%r10
xorq 24(%rsi),%r11
xorq 32(%rsi),%r12
xorq 40(%rsi),%r13
xorq 48(%rsi),%r14
xorq 56(%rsi),%r15
movq %r8,64+0(%rsp)
movq %r9,64+8(%rsp)
movq %r10,64+16(%rsp)
movq %r11,64+24(%rsp)
movq %r12,64+32(%rsp)
movq %r13,64+40(%rsp)
movq %r14,64+48(%rsp)
movq %r15,64+56(%rsp)
xorq %rsi,%rsi
movq %rsi,24(%rbx)
.align 16
.Lround:
movq 4096(%rbp,%rsi,8),%r8
movl 0(%rsp),%eax
movl 4(%rsp),%ebx
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r8
movq 7(%rbp,%rdi,8),%r9
movb %al,%cl
movb %ah,%dl
movl 0+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
movq 6(%rbp,%rsi,8),%r10
movq 5(%rbp,%rdi,8),%r11
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
movq 4(%rbp,%rsi,8),%r12
movq 3(%rbp,%rdi,8),%r13
movb %bl,%cl
movb %bh,%dl
movl 0+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
movq 2(%rbp,%rsi,8),%r14
movq 1(%rbp,%rdi,8),%r15
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r9
xorq 7(%rbp,%rdi,8),%r10
movb %al,%cl
movb %ah,%dl
movl 8+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r11
xorq 5(%rbp,%rdi,8),%r12
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r13
xorq 3(%rbp,%rdi,8),%r14
movb %bl,%cl
movb %bh,%dl
movl 8+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r15
xorq 1(%rbp,%rdi,8),%r8
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r10
xorq 7(%rbp,%rdi,8),%r11
movb %al,%cl
movb %ah,%dl
movl 16+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r12
xorq 5(%rbp,%rdi,8),%r13
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r14
xorq 3(%rbp,%rdi,8),%r15
movb %bl,%cl
movb %bh,%dl
movl 16+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r8
xorq 1(%rbp,%rdi,8),%r9
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r11
xorq 7(%rbp,%rdi,8),%r12
movb %al,%cl
movb %ah,%dl
movl 24+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r13
xorq 5(%rbp,%rdi,8),%r14
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r15
xorq 3(%rbp,%rdi,8),%r8
movb %bl,%cl
movb %bh,%dl
movl 24+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r9
xorq 1(%rbp,%rdi,8),%r10
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r12
xorq 7(%rbp,%rdi,8),%r13
movb %al,%cl
movb %ah,%dl
movl 32+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r14
xorq 5(%rbp,%rdi,8),%r15
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r8
xorq 3(%rbp,%rdi,8),%r9
movb %bl,%cl
movb %bh,%dl
movl 32+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r10
xorq 1(%rbp,%rdi,8),%r11
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r13
xorq 7(%rbp,%rdi,8),%r14
movb %al,%cl
movb %ah,%dl
movl 40+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r15
xorq 5(%rbp,%rdi,8),%r8
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r9
xorq 3(%rbp,%rdi,8),%r10
movb %bl,%cl
movb %bh,%dl
movl 40+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r11
xorq 1(%rbp,%rdi,8),%r12
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r14
xorq 7(%rbp,%rdi,8),%r15
movb %al,%cl
movb %ah,%dl
movl 48+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r8
xorq 5(%rbp,%rdi,8),%r9
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r10
xorq 3(%rbp,%rdi,8),%r11
movb %bl,%cl
movb %bh,%dl
movl 48+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r12
xorq 1(%rbp,%rdi,8),%r13
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r15
xorq 7(%rbp,%rdi,8),%r8
movb %al,%cl
movb %ah,%dl
movl 56+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r9
xorq 5(%rbp,%rdi,8),%r10
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r11
xorq 3(%rbp,%rdi,8),%r12
movb %bl,%cl
movb %bh,%dl
movl 56+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r13
xorq 1(%rbp,%rdi,8),%r14
movq %r8,0(%rsp)
movq %r9,8(%rsp)
movq %r10,16(%rsp)
movq %r11,24(%rsp)
movq %r12,32(%rsp)
movq %r13,40(%rsp)
movq %r14,48(%rsp)
movq %r15,56(%rsp)
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r8
xorq 7(%rbp,%rdi,8),%r9
movb %al,%cl
movb %ah,%dl
movl 64+0+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r10
xorq 5(%rbp,%rdi,8),%r11
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r12
xorq 3(%rbp,%rdi,8),%r13
movb %bl,%cl
movb %bh,%dl
movl 64+0+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r14
xorq 1(%rbp,%rdi,8),%r15
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r9
xorq 7(%rbp,%rdi,8),%r10
movb %al,%cl
movb %ah,%dl
movl 64+8+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r11
xorq 5(%rbp,%rdi,8),%r12
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r13
xorq 3(%rbp,%rdi,8),%r14
movb %bl,%cl
movb %bh,%dl
movl 64+8+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r15
xorq 1(%rbp,%rdi,8),%r8
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r10
xorq 7(%rbp,%rdi,8),%r11
movb %al,%cl
movb %ah,%dl
movl 64+16+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r12
xorq 5(%rbp,%rdi,8),%r13
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r14
xorq 3(%rbp,%rdi,8),%r15
movb %bl,%cl
movb %bh,%dl
movl 64+16+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r8
xorq 1(%rbp,%rdi,8),%r9
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r11
xorq 7(%rbp,%rdi,8),%r12
movb %al,%cl
movb %ah,%dl
movl 64+24+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r13
xorq 5(%rbp,%rdi,8),%r14
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r15
xorq 3(%rbp,%rdi,8),%r8
movb %bl,%cl
movb %bh,%dl
movl 64+24+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r9
xorq 1(%rbp,%rdi,8),%r10
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r12
xorq 7(%rbp,%rdi,8),%r13
movb %al,%cl
movb %ah,%dl
movl 64+32+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r14
xorq 5(%rbp,%rdi,8),%r15
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r8
xorq 3(%rbp,%rdi,8),%r9
movb %bl,%cl
movb %bh,%dl
movl 64+32+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r10
xorq 1(%rbp,%rdi,8),%r11
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r13
xorq 7(%rbp,%rdi,8),%r14
movb %al,%cl
movb %ah,%dl
movl 64+40+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r15
xorq 5(%rbp,%rdi,8),%r8
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r9
xorq 3(%rbp,%rdi,8),%r10
movb %bl,%cl
movb %bh,%dl
movl 64+40+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r11
xorq 1(%rbp,%rdi,8),%r12
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r14
xorq 7(%rbp,%rdi,8),%r15
movb %al,%cl
movb %ah,%dl
movl 64+48+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r8
xorq 5(%rbp,%rdi,8),%r9
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r10
xorq 3(%rbp,%rdi,8),%r11
movb %bl,%cl
movb %bh,%dl
movl 64+48+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r12
xorq 1(%rbp,%rdi,8),%r13
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r15
xorq 7(%rbp,%rdi,8),%r8
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r9
xorq 5(%rbp,%rdi,8),%r10
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r11
xorq 3(%rbp,%rdi,8),%r12
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r13
xorq 1(%rbp,%rdi,8),%r14
leaq 128(%rsp),%rbx
movq 24(%rbx),%rsi
addq $1,%rsi
cmpq $10,%rsi
je .Lroundsdone
movq %rsi,24(%rbx)
movq %r8,64+0(%rsp)
movq %r9,64+8(%rsp)
movq %r10,64+16(%rsp)
movq %r11,64+24(%rsp)
movq %r12,64+32(%rsp)
movq %r13,64+40(%rsp)
movq %r14,64+48(%rsp)
movq %r15,64+56(%rsp)
jmp .Lround
.align 16
.Lroundsdone:
movq 0(%rbx),%rdi
movq 8(%rbx),%rsi
movq 16(%rbx),%rax
xorq 0(%rsi),%r8
xorq 8(%rsi),%r9
xorq 16(%rsi),%r10
xorq 24(%rsi),%r11
xorq 32(%rsi),%r12
xorq 40(%rsi),%r13
xorq 48(%rsi),%r14
xorq 56(%rsi),%r15
xorq 0(%rdi),%r8
xorq 8(%rdi),%r9
xorq 16(%rdi),%r10
xorq 24(%rdi),%r11
xorq 32(%rdi),%r12
xorq 40(%rdi),%r13
xorq 48(%rdi),%r14
xorq 56(%rdi),%r15
movq %r8,0(%rdi)
movq %r9,8(%rdi)
movq %r10,16(%rdi)
movq %r11,24(%rdi)
movq %r12,32(%rdi)
movq %r13,40(%rdi)
movq %r14,48(%rdi)
movq %r15,56(%rdi)
leaq 64(%rsi),%rsi
subq $1,%rax
jz .Lalldone
movq %rsi,8(%rbx)
movq %rax,16(%rbx)
jmp .Louterloop
.Lalldone:
movq 32(%rbx),%rsi
movq (%rsi),%r15
movq 8(%rsi),%r14
movq 16(%rsi),%r13
movq 24(%rsi),%r12
movq 32(%rsi),%rbp
movq 40(%rsi),%rbx
leaq 48(%rsi),%rsp
.Lepilogue:
.byte 0xf3,0xc3
.size whirlpool_block,.-whirlpool_block
.align 64
.type .Ltable,@object
.Ltable:
.byte 24,24,96,24,192,120,48,216,24,24,96,24,192,120,48,216
.byte 35,35,140,35,5,175,70,38,35,35,140,35,5,175,70,38
.byte 198,198,63,198,126,249,145,184,198,198,63,198,126,249,145,184
.byte 232,232,135,232,19,111,205,251,232,232,135,232,19,111,205,251
.byte 135,135,38,135,76,161,19,203,135,135,38,135,76,161,19,203
.byte 184,184,218,184,169,98,109,17,184,184,218,184,169,98,109,17
.byte 1,1,4,1,8,5,2,9,1,1,4,1,8,5,2,9
.byte 79,79,33,79,66,110,158,13,79,79,33,79,66,110,158,13
.byte 54,54,216,54,173,238,108,155,54,54,216,54,173,238,108,155
.byte 166,166,162,166,89,4,81,255,166,166,162,166,89,4,81,255
.byte 210,210,111,210,222,189,185,12,210,210,111,210,222,189,185,12
.byte 245,245,243,245,251,6,247,14,245,245,243,245,251,6,247,14
.byte 121,121,249,121,239,128,242,150,121,121,249,121,239,128,242,150
.byte 111,111,161,111,95,206,222,48,111,111,161,111,95,206,222,48
.byte 145,145,126,145,252,239,63,109,145,145,126,145,252,239,63,109
.byte 82,82,85,82,170,7,164,248,82,82,85,82,170,7,164,248
.byte 96,96,157,96,39,253,192,71,96,96,157,96,39,253,192,71
.byte 188,188,202,188,137,118,101,53,188,188,202,188,137,118,101,53
.byte 155,155,86,155,172,205,43,55,155,155,86,155,172,205,43,55
.byte 142,142,2,142,4,140,1,138,142,142,2,142,4,140,1,138
.byte 163,163,182,163,113,21,91,210,163,163,182,163,113,21,91,210
.byte 12,12,48,12,96,60,24,108,12,12,48,12,96,60,24,108
.byte 123,123,241,123,255,138,246,132,123,123,241,123,255,138,246,132
.byte 53,53,212,53,181,225,106,128,53,53,212,53,181,225,106,128
.byte 29,29,116,29,232,105,58,245,29,29,116,29,232,105,58,245
.byte 224,224,167,224,83,71,221,179,224,224,167,224,83,71,221,179
.byte 215,215,123,215,246,172,179,33,215,215,123,215,246,172,179,33
.byte 194,194,47,194,94,237,153,156,194,194,47,194,94,237,153,156
.byte 46,46,184,46,109,150,92,67,46,46,184,46,109,150,92,67
.byte 75,75,49,75,98,122,150,41,75,75,49,75,98,122,150,41
.byte 254,254,223,254,163,33,225,93,254,254,223,254,163,33,225,93
.byte 87,87,65,87,130,22,174,213,87,87,65,87,130,22,174,213
.byte 21,21,84,21,168,65,42,189,21,21,84,21,168,65,42,189
.byte 119,119,193,119,159,182,238,232,119,119,193,119,159,182,238,232
.byte 55,55,220,55,165,235,110,146,55,55,220,55,165,235,110,146
.byte 229,229,179,229,123,86,215,158,229,229,179,229,123,86,215,158
.byte 159,159,70,159,140,217,35,19,159,159,70,159,140,217,35,19
.byte 240,240,231,240,211,23,253,35,240,240,231,240,211,23,253,35
.byte 74,74,53,74,106,127,148,32,74,74,53,74,106,127,148,32
.byte 218,218,79,218,158,149,169,68,218,218,79,218,158,149,169,68
.byte 88,88,125,88,250,37,176,162,88,88,125,88,250,37,176,162
.byte 201,201,3,201,6,202,143,207,201,201,3,201,6,202,143,207
.byte 41,41,164,41,85,141,82,124,41,41,164,41,85,141,82,124
.byte 10,10,40,10,80,34,20,90,10,10,40,10,80,34,20,90
.byte 177,177,254,177,225,79,127,80,177,177,254,177,225,79,127,80
.byte 160,160,186,160,105,26,93,201,160,160,186,160,105,26,93,201
.byte 107,107,177,107,127,218,214,20,107,107,177,107,127,218,214,20
.byte 133,133,46,133,92,171,23,217,133,133,46,133,92,171,23,217
.byte 189,189,206,189,129,115,103,60,189,189,206,189,129,115,103,60
.byte 93,93,105,93,210,52,186,143,93,93,105,93,210,52,186,143
.byte 16,16,64,16,128,80,32,144,16,16,64,16,128,80,32,144
.byte 244,244,247,244,243,3,245,7,244,244,247,244,243,3,245,7
.byte 203,203,11,203,22,192,139,221,203,203,11,203,22,192,139,221
.byte 62,62,248,62,237,198,124,211,62,62,248,62,237,198,124,211
.byte 5,5,20,5,40,17,10,45,5,5,20,5,40,17,10,45
.byte 103,103,129,103,31,230,206,120,103,103,129,103,31,230,206,120
.byte 228,228,183,228,115,83,213,151,228,228,183,228,115,83,213,151
.byte 39,39,156,39,37,187,78,2,39,39,156,39,37,187,78,2
.byte 65,65,25,65,50,88,130,115,65,65,25,65,50,88,130,115
.byte 139,139,22,139,44,157,11,167,139,139,22,139,44,157,11,167
.byte 167,167,166,167,81,1,83,246,167,167,166,167,81,1,83,246
.byte 125,125,233,125,207,148,250,178,125,125,233,125,207,148,250,178
.byte 149,149,110,149,220,251,55,73,149,149,110,149,220,251,55,73
.byte 216,216,71,216,142,159,173,86,216,216,71,216,142,159,173,86
.byte 251,251,203,251,139,48,235,112,251,251,203,251,139,48,235,112
.byte 238,238,159,238,35,113,193,205,238,238,159,238,35,113,193,205
.byte 124,124,237,124,199,145,248,187,124,124,237,124,199,145,248,187
.byte 102,102,133,102,23,227,204,113,102,102,133,102,23,227,204,113
.byte 221,221,83,221,166,142,167,123,221,221,83,221,166,142,167,123
.byte 23,23,92,23,184,75,46,175,23,23,92,23,184,75,46,175
.byte 71,71,1,71,2,70,142,69,71,71,1,71,2,70,142,69
.byte 158,158,66,158,132,220,33,26,158,158,66,158,132,220,33,26
.byte 202,202,15,202,30,197,137,212,202,202,15,202,30,197,137,212
.byte 45,45,180,45,117,153,90,88,45,45,180,45,117,153,90,88
.byte 191,191,198,191,145,121,99,46,191,191,198,191,145,121,99,46
.byte 7,7,28,7,56,27,14,63,7,7,28,7,56,27,14,63
.byte 173,173,142,173,1,35,71,172,173,173,142,173,1,35,71,172
.byte 90,90,117,90,234,47,180,176,90,90,117,90,234,47,180,176
.byte 131,131,54,131,108,181,27,239,131,131,54,131,108,181,27,239
.byte 51,51,204,51,133,255,102,182,51,51,204,51,133,255,102,182
.byte 99,99,145,99,63,242,198,92,99,99,145,99,63,242,198,92
.byte 2,2,8,2,16,10,4,18,2,2,8,2,16,10,4,18
.byte 170,170,146,170,57,56,73,147,170,170,146,170,57,56,73,147
.byte 113,113,217,113,175,168,226,222,113,113,217,113,175,168,226,222
.byte 200,200,7,200,14,207,141,198,200,200,7,200,14,207,141,198
.byte 25,25,100,25,200,125,50,209,25,25,100,25,200,125,50,209
.byte 73,73,57,73,114,112,146,59,73,73,57,73,114,112,146,59
.byte 217,217,67,217,134,154,175,95,217,217,67,217,134,154,175,95
.byte 242,242,239,242,195,29,249,49,242,242,239,242,195,29,249,49
.byte 227,227,171,227,75,72,219,168,227,227,171,227,75,72,219,168
.byte 91,91,113,91,226,42,182,185,91,91,113,91,226,42,182,185
.byte 136,136,26,136,52,146,13,188,136,136,26,136,52,146,13,188
.byte 154,154,82,154,164,200,41,62,154,154,82,154,164,200,41,62
.byte 38,38,152,38,45,190,76,11,38,38,152,38,45,190,76,11
.byte 50,50,200,50,141,250,100,191,50,50,200,50,141,250,100,191
.byte 176,176,250,176,233,74,125,89,176,176,250,176,233,74,125,89
.byte 233,233,131,233,27,106,207,242,233,233,131,233,27,106,207,242
.byte 15,15,60,15,120,51,30,119,15,15,60,15,120,51,30,119
.byte 213,213,115,213,230,166,183,51,213,213,115,213,230,166,183,51
.byte 128,128,58,128,116,186,29,244,128,128,58,128,116,186,29,244
.byte 190,190,194,190,153,124,97,39,190,190,194,190,153,124,97,39
.byte 205,205,19,205,38,222,135,235,205,205,19,205,38,222,135,235
.byte 52,52,208,52,189,228,104,137,52,52,208,52,189,228,104,137
.byte 72,72,61,72,122,117,144,50,72,72,61,72,122,117,144,50
.byte 255,255,219,255,171,36,227,84,255,255,219,255,171,36,227,84
.byte 122,122,245,122,247,143,244,141,122,122,245,122,247,143,244,141
.byte 144,144,122,144,244,234,61,100,144,144,122,144,244,234,61,100
.byte 95,95,97,95,194,62,190,157,95,95,97,95,194,62,190,157
.byte 32,32,128,32,29,160,64,61,32,32,128,32,29,160,64,61
.byte 104,104,189,104,103,213,208,15,104,104,189,104,103,213,208,15
.byte 26,26,104,26,208,114,52,202,26,26,104,26,208,114,52,202
.byte 174,174,130,174,25,44,65,183,174,174,130,174,25,44,65,183
.byte 180,180,234,180,201,94,117,125,180,180,234,180,201,94,117,125
.byte 84,84,77,84,154,25,168,206,84,84,77,84,154,25,168,206
.byte 147,147,118,147,236,229,59,127,147,147,118,147,236,229,59,127
.byte 34,34,136,34,13,170,68,47,34,34,136,34,13,170,68,47
.byte 100,100,141,100,7,233,200,99,100,100,141,100,7,233,200,99
.byte 241,241,227,241,219,18,255,42,241,241,227,241,219,18,255,42
.byte 115,115,209,115,191,162,230,204,115,115,209,115,191,162,230,204
.byte 18,18,72,18,144,90,36,130,18,18,72,18,144,90,36,130
.byte 64,64,29,64,58,93,128,122,64,64,29,64,58,93,128,122
.byte 8,8,32,8,64,40,16,72,8,8,32,8,64,40,16,72
.byte 195,195,43,195,86,232,155,149,195,195,43,195,86,232,155,149
.byte 236,236,151,236,51,123,197,223,236,236,151,236,51,123,197,223
.byte 219,219,75,219,150,144,171,77,219,219,75,219,150,144,171,77
.byte 161,161,190,161,97,31,95,192,161,161,190,161,97,31,95,192
.byte 141,141,14,141,28,131,7,145,141,141,14,141,28,131,7,145
.byte 61,61,244,61,245,201,122,200,61,61,244,61,245,201,122,200
.byte 151,151,102,151,204,241,51,91,151,151,102,151,204,241,51,91
.byte 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
.byte 207,207,27,207,54,212,131,249,207,207,27,207,54,212,131,249
.byte 43,43,172,43,69,135,86,110,43,43,172,43,69,135,86,110
.byte 118,118,197,118,151,179,236,225,118,118,197,118,151,179,236,225
.byte 130,130,50,130,100,176,25,230,130,130,50,130,100,176,25,230
.byte 214,214,127,214,254,169,177,40,214,214,127,214,254,169,177,40
.byte 27,27,108,27,216,119,54,195,27,27,108,27,216,119,54,195
.byte 181,181,238,181,193,91,119,116,181,181,238,181,193,91,119,116
.byte 175,175,134,175,17,41,67,190,175,175,134,175,17,41,67,190
.byte 106,106,181,106,119,223,212,29,106,106,181,106,119,223,212,29
.byte 80,80,93,80,186,13,160,234,80,80,93,80,186,13,160,234
.byte 69,69,9,69,18,76,138,87,69,69,9,69,18,76,138,87
.byte 243,243,235,243,203,24,251,56,243,243,235,243,203,24,251,56
.byte 48,48,192,48,157,240,96,173,48,48,192,48,157,240,96,173
.byte 239,239,155,239,43,116,195,196,239,239,155,239,43,116,195,196
.byte 63,63,252,63,229,195,126,218,63,63,252,63,229,195,126,218
.byte 85,85,73,85,146,28,170,199,85,85,73,85,146,28,170,199
.byte 162,162,178,162,121,16,89,219,162,162,178,162,121,16,89,219
.byte 234,234,143,234,3,101,201,233,234,234,143,234,3,101,201,233
.byte 101,101,137,101,15,236,202,106,101,101,137,101,15,236,202,106
.byte 186,186,210,186,185,104,105,3,186,186,210,186,185,104,105,3
.byte 47,47,188,47,101,147,94,74,47,47,188,47,101,147,94,74
.byte 192,192,39,192,78,231,157,142,192,192,39,192,78,231,157,142
.byte 222,222,95,222,190,129,161,96,222,222,95,222,190,129,161,96
.byte 28,28,112,28,224,108,56,252,28,28,112,28,224,108,56,252
.byte 253,253,211,253,187,46,231,70,253,253,211,253,187,46,231,70
.byte 77,77,41,77,82,100,154,31,77,77,41,77,82,100,154,31
.byte 146,146,114,146,228,224,57,118,146,146,114,146,228,224,57,118
.byte 117,117,201,117,143,188,234,250,117,117,201,117,143,188,234,250
.byte 6,6,24,6,48,30,12,54,6,6,24,6,48,30,12,54
.byte 138,138,18,138,36,152,9,174,138,138,18,138,36,152,9,174
.byte 178,178,242,178,249,64,121,75,178,178,242,178,249,64,121,75
.byte 230,230,191,230,99,89,209,133,230,230,191,230,99,89,209,133
.byte 14,14,56,14,112,54,28,126,14,14,56,14,112,54,28,126
.byte 31,31,124,31,248,99,62,231,31,31,124,31,248,99,62,231
.byte 98,98,149,98,55,247,196,85,98,98,149,98,55,247,196,85
.byte 212,212,119,212,238,163,181,58,212,212,119,212,238,163,181,58
.byte 168,168,154,168,41,50,77,129,168,168,154,168,41,50,77,129
.byte 150,150,98,150,196,244,49,82,150,150,98,150,196,244,49,82
.byte 249,249,195,249,155,58,239,98,249,249,195,249,155,58,239,98
.byte 197,197,51,197,102,246,151,163,197,197,51,197,102,246,151,163
.byte 37,37,148,37,53,177,74,16,37,37,148,37,53,177,74,16
.byte 89,89,121,89,242,32,178,171,89,89,121,89,242,32,178,171
.byte 132,132,42,132,84,174,21,208,132,132,42,132,84,174,21,208
.byte 114,114,213,114,183,167,228,197,114,114,213,114,183,167,228,197
.byte 57,57,228,57,213,221,114,236,57,57,228,57,213,221,114,236
.byte 76,76,45,76,90,97,152,22,76,76,45,76,90,97,152,22
.byte 94,94,101,94,202,59,188,148,94,94,101,94,202,59,188,148
.byte 120,120,253,120,231,133,240,159,120,120,253,120,231,133,240,159
.byte 56,56,224,56,221,216,112,229,56,56,224,56,221,216,112,229
.byte 140,140,10,140,20,134,5,152,140,140,10,140,20,134,5,152
.byte 209,209,99,209,198,178,191,23,209,209,99,209,198,178,191,23
.byte 165,165,174,165,65,11,87,228,165,165,174,165,65,11,87,228
.byte 226,226,175,226,67,77,217,161,226,226,175,226,67,77,217,161
.byte 97,97,153,97,47,248,194,78,97,97,153,97,47,248,194,78
.byte 179,179,246,179,241,69,123,66,179,179,246,179,241,69,123,66
.byte 33,33,132,33,21,165,66,52,33,33,132,33,21,165,66,52
.byte 156,156,74,156,148,214,37,8,156,156,74,156,148,214,37,8
.byte 30,30,120,30,240,102,60,238,30,30,120,30,240,102,60,238
.byte 67,67,17,67,34,82,134,97,67,67,17,67,34,82,134,97
.byte 199,199,59,199,118,252,147,177,199,199,59,199,118,252,147,177
.byte 252,252,215,252,179,43,229,79,252,252,215,252,179,43,229,79
.byte 4,4,16,4,32,20,8,36,4,4,16,4,32,20,8,36
.byte 81,81,89,81,178,8,162,227,81,81,89,81,178,8,162,227
.byte 153,153,94,153,188,199,47,37,153,153,94,153,188,199,47,37
.byte 109,109,169,109,79,196,218,34,109,109,169,109,79,196,218,34
.byte 13,13,52,13,104,57,26,101,13,13,52,13,104,57,26,101
.byte 250,250,207,250,131,53,233,121,250,250,207,250,131,53,233,121
.byte 223,223,91,223,182,132,163,105,223,223,91,223,182,132,163,105
.byte 126,126,229,126,215,155,252,169,126,126,229,126,215,155,252,169
.byte 36,36,144,36,61,180,72,25,36,36,144,36,61,180,72,25
.byte 59,59,236,59,197,215,118,254,59,59,236,59,197,215,118,254
.byte 171,171,150,171,49,61,75,154,171,171,150,171,49,61,75,154
.byte 206,206,31,206,62,209,129,240,206,206,31,206,62,209,129,240
.byte 17,17,68,17,136,85,34,153,17,17,68,17,136,85,34,153
.byte 143,143,6,143,12,137,3,131,143,143,6,143,12,137,3,131
.byte 78,78,37,78,74,107,156,4,78,78,37,78,74,107,156,4
.byte 183,183,230,183,209,81,115,102,183,183,230,183,209,81,115,102
.byte 235,235,139,235,11,96,203,224,235,235,139,235,11,96,203,224
.byte 60,60,240,60,253,204,120,193,60,60,240,60,253,204,120,193
.byte 129,129,62,129,124,191,31,253,129,129,62,129,124,191,31,253
.byte 148,148,106,148,212,254,53,64,148,148,106,148,212,254,53,64
.byte 247,247,251,247,235,12,243,28,247,247,251,247,235,12,243,28
.byte 185,185,222,185,161,103,111,24,185,185,222,185,161,103,111,24
.byte 19,19,76,19,152,95,38,139,19,19,76,19,152,95,38,139
.byte 44,44,176,44,125,156,88,81,44,44,176,44,125,156,88,81
.byte 211,211,107,211,214,184,187,5,211,211,107,211,214,184,187,5
.byte 231,231,187,231,107,92,211,140,231,231,187,231,107,92,211,140
.byte 110,110,165,110,87,203,220,57,110,110,165,110,87,203,220,57
.byte 196,196,55,196,110,243,149,170,196,196,55,196,110,243,149,170
.byte 3,3,12,3,24,15,6,27,3,3,12,3,24,15,6,27
.byte 86,86,69,86,138,19,172,220,86,86,69,86,138,19,172,220
.byte 68,68,13,68,26,73,136,94,68,68,13,68,26,73,136,94
.byte 127,127,225,127,223,158,254,160,127,127,225,127,223,158,254,160
.byte 169,169,158,169,33,55,79,136,169,169,158,169,33,55,79,136
.byte 42,42,168,42,77,130,84,103,42,42,168,42,77,130,84,103
.byte 187,187,214,187,177,109,107,10,187,187,214,187,177,109,107,10
.byte 193,193,35,193,70,226,159,135,193,193,35,193,70,226,159,135
.byte 83,83,81,83,162,2,166,241,83,83,81,83,162,2,166,241
.byte 220,220,87,220,174,139,165,114,220,220,87,220,174,139,165,114
.byte 11,11,44,11,88,39,22,83,11,11,44,11,88,39,22,83
.byte 157,157,78,157,156,211,39,1,157,157,78,157,156,211,39,1
.byte 108,108,173,108,71,193,216,43,108,108,173,108,71,193,216,43
.byte 49,49,196,49,149,245,98,164,49,49,196,49,149,245,98,164
.byte 116,116,205,116,135,185,232,243,116,116,205,116,135,185,232,243
.byte 246,246,255,246,227,9,241,21,246,246,255,246,227,9,241,21
.byte 70,70,5,70,10,67,140,76,70,70,5,70,10,67,140,76
.byte 172,172,138,172,9,38,69,165,172,172,138,172,9,38,69,165
.byte 137,137,30,137,60,151,15,181,137,137,30,137,60,151,15,181
.byte 20,20,80,20,160,68,40,180,20,20,80,20,160,68,40,180
.byte 225,225,163,225,91,66,223,186,225,225,163,225,91,66,223,186
.byte 22,22,88,22,176,78,44,166,22,22,88,22,176,78,44,166
.byte 58,58,232,58,205,210,116,247,58,58,232,58,205,210,116,247
.byte 105,105,185,105,111,208,210,6,105,105,185,105,111,208,210,6
.byte 9,9,36,9,72,45,18,65,9,9,36,9,72,45,18,65
.byte 112,112,221,112,167,173,224,215,112,112,221,112,167,173,224,215
.byte 182,182,226,182,217,84,113,111,182,182,226,182,217,84,113,111
.byte 208,208,103,208,206,183,189,30,208,208,103,208,206,183,189,30
.byte 237,237,147,237,59,126,199,214,237,237,147,237,59,126,199,214
.byte 204,204,23,204,46,219,133,226,204,204,23,204,46,219,133,226
.byte 66,66,21,66,42,87,132,104,66,66,21,66,42,87,132,104
.byte 152,152,90,152,180,194,45,44,152,152,90,152,180,194,45,44
.byte 164,164,170,164,73,14,85,237,164,164,170,164,73,14,85,237
.byte 40,40,160,40,93,136,80,117,40,40,160,40,93,136,80,117
.byte 92,92,109,92,218,49,184,134,92,92,109,92,218,49,184,134
.byte 248,248,199,248,147,63,237,107,248,248,199,248,147,63,237,107
.byte 134,134,34,134,68,164,17,194,134,134,34,134,68,164,17,194
.byte 24,35,198,232,135,184,1,79
.byte 54,166,210,245,121,111,145,82
.byte 96,188,155,142,163,12,123,53
.byte 29,224,215,194,46,75,254,87
.byte 21,119,55,229,159,240,74,218
.byte 88,201,41,10,177,160,107,133
.byte 189,93,16,244,203,62,5,103
.byte 228,39,65,139,167,125,149,216
.byte 251,238,124,102,221,23,71,158
.byte 202,45,191,7,173,90,131,51

194
deps/openssl/asm/x64-elf-gas/x86_64cpuid.s

@ -0,0 +1,194 @@
.section .init
call OPENSSL_cpuid_setup
.text
.globl OPENSSL_atomic_add
.type OPENSSL_atomic_add,@function
.align 16
OPENSSL_atomic_add:
movl (%rdi),%eax
.Lspin: leaq (%rsi,%rax,1),%r8
.byte 0xf0
cmpxchgl %r8d,(%rdi)
jne .Lspin
movl %r8d,%eax
.byte 0x48,0x98
.byte 0xf3,0xc3
.size OPENSSL_atomic_add,.-OPENSSL_atomic_add
.globl OPENSSL_rdtsc
.type OPENSSL_rdtsc,@function
.align 16
OPENSSL_rdtsc:
rdtsc
shlq $32,%rdx
orq %rdx,%rax
.byte 0xf3,0xc3
.size OPENSSL_rdtsc,.-OPENSSL_rdtsc
.globl OPENSSL_ia32_cpuid
.type OPENSSL_ia32_cpuid,@function
.align 16
OPENSSL_ia32_cpuid:
movq %rbx,%r8
xorl %eax,%eax
cpuid
movl %eax,%r11d
xorl %eax,%eax
cmpl $1970169159,%ebx
setne %al
movl %eax,%r9d
cmpl $1231384169,%edx
setne %al
orl %eax,%r9d
cmpl $1818588270,%ecx
setne %al
orl %eax,%r9d
jz .Lintel
cmpl $1752462657,%ebx
setne %al
movl %eax,%r10d
cmpl $1769238117,%edx
setne %al
orl %eax,%r10d
cmpl $1145913699,%ecx
setne %al
orl %eax,%r10d
jnz .Lintel
movl $2147483648,%eax
cpuid
cmpl $2147483656,%eax
jb .Lintel
movl $2147483656,%eax
cpuid
movzbq %cl,%r10
incq %r10
movl $1,%eax
cpuid
btl $28,%edx
jnc .Ldone
shrl $16,%ebx
cmpb %r10b,%bl
ja .Ldone
andl $4026531839,%edx
jmp .Ldone
.Lintel:
cmpl $4,%r11d
movl $-1,%r10d
jb .Lnocacheinfo
movl $4,%eax
movl $0,%ecx
cpuid
movl %eax,%r10d
shrl $14,%r10d
andl $4095,%r10d
.Lnocacheinfo:
movl $1,%eax
cpuid
cmpl $0,%r9d
jne .Lnotintel
orl $1048576,%edx
andb $15,%ah
cmpb $15,%ah
je .Lnotintel
orl $1073741824,%edx
.Lnotintel:
btl $28,%edx
jnc .Ldone
andl $4026531839,%edx
cmpl $0,%r10d
je .Ldone
orl $268435456,%edx
shrl $16,%ebx
cmpb $1,%bl
ja .Ldone
andl $4026531839,%edx
.Ldone:
shlq $32,%rcx
movl %edx,%eax
movq %r8,%rbx
orq %rcx,%rax
.byte 0xf3,0xc3
.size OPENSSL_ia32_cpuid,.-OPENSSL_ia32_cpuid
.globl OPENSSL_cleanse
.type OPENSSL_cleanse,@function
.align 16
OPENSSL_cleanse:
xorq %rax,%rax
cmpq $15,%rsi
jae .Lot
cmpq $0,%rsi
je .Lret
.Little:
movb %al,(%rdi)
subq $1,%rsi
leaq 1(%rdi),%rdi
jnz .Little
.Lret:
.byte 0xf3,0xc3
.align 16
.Lot:
testq $7,%rdi
jz .Laligned
movb %al,(%rdi)
leaq -1(%rsi),%rsi
leaq 1(%rdi),%rdi
jmp .Lot
.Laligned:
movq %rax,(%rdi)
leaq -8(%rsi),%rsi
testq $-8,%rsi
leaq 8(%rdi),%rdi
jnz .Laligned
cmpq $0,%rsi
jne .Little
.byte 0xf3,0xc3
.size OPENSSL_cleanse,.-OPENSSL_cleanse
.globl OPENSSL_wipe_cpu
.type OPENSSL_wipe_cpu,@function
.align 16
OPENSSL_wipe_cpu:
pxor %xmm0,%xmm0
pxor %xmm1,%xmm1
pxor %xmm2,%xmm2
pxor %xmm3,%xmm3
pxor %xmm4,%xmm4
pxor %xmm5,%xmm5
pxor %xmm6,%xmm6
pxor %xmm7,%xmm7
pxor %xmm8,%xmm8
pxor %xmm9,%xmm9
pxor %xmm10,%xmm10
pxor %xmm11,%xmm11
pxor %xmm12,%xmm12
pxor %xmm13,%xmm13
pxor %xmm14,%xmm14
pxor %xmm15,%xmm15
xorq %rcx,%rcx
xorq %rdx,%rdx
xorq %rsi,%rsi
xorq %rdi,%rdi
xorq %r8,%r8
xorq %r9,%r9
xorq %r10,%r10
xorq %r11,%r11
leaq 8(%rsp),%rax
.byte 0xf3,0xc3
.size OPENSSL_wipe_cpu,.-OPENSSL_wipe_cpu

2545
deps/openssl/asm/x64-macosx-gas/aes/aes-x86_64.s

File diff suppressed because it is too large

172
deps/openssl/asm/x64-macosx-gas/bn/x86_64-mont.s

@ -0,0 +1,172 @@
.text
.globl _bn_mul_mont
.p2align 4
_bn_mul_mont:
pushq %rbx
pushq %rbp
pushq %r12
pushq %r13
pushq %r14
pushq %r15
movl %r9d,%r9d
leaq 2(%r9),%r10
movq %rsp,%r11
negq %r10
leaq (%rsp,%r10,8),%rsp
andq $-1024,%rsp
movq %r11,8(%rsp,%r9,8)
L$prologue:
movq %rdx,%r12
movq (%r8),%r8
xorq %r14,%r14
xorq %r15,%r15
movq (%r12),%rbx
movq (%rsi),%rax
mulq %rbx
movq %rax,%r10
movq %rdx,%r11
imulq %r8,%rax
movq %rax,%rbp
mulq (%rcx)
addq %r10,%rax
adcq $0,%rdx
movq %rdx,%r13
leaq 1(%r15),%r15
L$1st:
movq (%rsi,%r15,8),%rax
mulq %rbx
addq %r11,%rax
adcq $0,%rdx
movq %rax,%r10
movq (%rcx,%r15,8),%rax
movq %rdx,%r11
mulq %rbp
addq %r13,%rax
leaq 1(%r15),%r15
adcq $0,%rdx
addq %r10,%rax
adcq $0,%rdx
movq %rax,-16(%rsp,%r15,8)
cmpq %r9,%r15
movq %rdx,%r13
jl L$1st
xorq %rdx,%rdx
addq %r11,%r13
adcq $0,%rdx
movq %r13,-8(%rsp,%r9,8)
movq %rdx,(%rsp,%r9,8)
leaq 1(%r14),%r14
.p2align 2
L$outer:
xorq %r15,%r15
movq (%r12,%r14,8),%rbx
movq (%rsi),%rax
mulq %rbx
addq (%rsp),%rax
adcq $0,%rdx
movq %rax,%r10
movq %rdx,%r11
imulq %r8,%rax
movq %rax,%rbp
mulq (%rcx,%r15,8)
addq %r10,%rax
movq 8(%rsp),%r10
adcq $0,%rdx
movq %rdx,%r13
leaq 1(%r15),%r15
.p2align 2
L$inner:
movq (%rsi,%r15,8),%rax
mulq %rbx
addq %r11,%rax
adcq $0,%rdx
addq %rax,%r10
movq (%rcx,%r15,8),%rax
adcq $0,%rdx
movq %rdx,%r11
mulq %rbp
addq %r13,%rax
leaq 1(%r15),%r15
adcq $0,%rdx
addq %r10,%rax
adcq $0,%rdx
movq (%rsp,%r15,8),%r10
cmpq %r9,%r15
movq %rax,-16(%rsp,%r15,8)
movq %rdx,%r13
jl L$inner
xorq %rdx,%rdx
addq %r11,%r13
adcq $0,%rdx
addq %r10,%r13
adcq $0,%rdx
movq %r13,-8(%rsp,%r9,8)
movq %rdx,(%rsp,%r9,8)
leaq 1(%r14),%r14
cmpq %r9,%r14
jl L$outer
leaq (%rsp),%rsi
leaq -1(%r9),%r15
movq (%rsi),%rax
xorq %r14,%r14
jmp L$sub
.p2align 4
L$sub: sbbq (%rcx,%r14,8),%rax
movq %rax,(%rdi,%r14,8)
decq %r15
movq 8(%rsi,%r14,8),%rax
leaq 1(%r14),%r14
jge L$sub
sbbq $0,%rax
andq %rax,%rsi
notq %rax
movq %rdi,%rcx
andq %rax,%rcx
leaq -1(%r9),%r15
orq %rcx,%rsi
.p2align 4
L$copy:
movq (%rsi,%r15,8),%rax
movq %rax,(%rdi,%r15,8)
movq %r14,(%rsp,%r15,8)
decq %r15
jge L$copy
movq 8(%rsp,%r9,8),%rsi
movq $1,%rax
movq (%rsi),%r15
movq 8(%rsi),%r14
movq 16(%rsi),%r13
movq 24(%rsi),%r12
movq 32(%rsi),%rbp
movq 40(%rsi),%rbx
leaq 48(%rsi),%rsp
L$epilogue:
.byte 0xf3,0xc3
.byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105,112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
.p2align 4

1844
deps/openssl/asm/x64-macosx-gas/camellia/cmll-x86_64.s

File diff suppressed because it is too large

671
deps/openssl/asm/x64-macosx-gas/md5/md5-x86_64.s

@ -0,0 +1,671 @@
.text
.p2align 4
.globl _md5_block_asm_data_order
_md5_block_asm_data_order:
pushq %rbp
pushq %rbx
pushq %r12
pushq %r14
pushq %r15
L$prologue:
movq %rdi,%rbp
shlq $6,%rdx
leaq (%rsi,%rdx,1),%rdi
movl 0(%rbp),%eax
movl 4(%rbp),%ebx
movl 8(%rbp),%ecx
movl 12(%rbp),%edx
cmpq %rdi,%rsi
je L$end
L$loop:
movl %eax,%r8d
movl %ebx,%r9d
movl %ecx,%r14d
movl %edx,%r15d
movl 0(%rsi),%r10d
movl %edx,%r11d
xorl %ecx,%r11d
leal -680876936(%rax,%r10,1),%eax
andl %ebx,%r11d
xorl %edx,%r11d
movl 4(%rsi),%r10d
addl %r11d,%eax
roll $7,%eax
movl %ecx,%r11d
addl %ebx,%eax
xorl %ebx,%r11d
leal -389564586(%rdx,%r10,1),%edx
andl %eax,%r11d
xorl %ecx,%r11d
movl 8(%rsi),%r10d
addl %r11d,%edx
roll $12,%edx
movl %ebx,%r11d
addl %eax,%edx
xorl %eax,%r11d
leal 606105819(%rcx,%r10,1),%ecx
andl %edx,%r11d
xorl %ebx,%r11d
movl 12(%rsi),%r10d
addl %r11d,%ecx
roll $17,%ecx
movl %eax,%r11d
addl %edx,%ecx
xorl %edx,%r11d
leal -1044525330(%rbx,%r10,1),%ebx
andl %ecx,%r11d
xorl %eax,%r11d
movl 16(%rsi),%r10d
addl %r11d,%ebx
roll $22,%ebx
movl %edx,%r11d
addl %ecx,%ebx
xorl %ecx,%r11d
leal -176418897(%rax,%r10,1),%eax
andl %ebx,%r11d
xorl %edx,%r11d
movl 20(%rsi),%r10d
addl %r11d,%eax
roll $7,%eax
movl %ecx,%r11d
addl %ebx,%eax
xorl %ebx,%r11d
leal 1200080426(%rdx,%r10,1),%edx
andl %eax,%r11d
xorl %ecx,%r11d
movl 24(%rsi),%r10d
addl %r11d,%edx
roll $12,%edx
movl %ebx,%r11d
addl %eax,%edx
xorl %eax,%r11d
leal -1473231341(%rcx,%r10,1),%ecx
andl %edx,%r11d
xorl %ebx,%r11d
movl 28(%rsi),%r10d
addl %r11d,%ecx
roll $17,%ecx
movl %eax,%r11d
addl %edx,%ecx
xorl %edx,%r11d
leal -45705983(%rbx,%r10,1),%ebx
andl %ecx,%r11d
xorl %eax,%r11d
movl 32(%rsi),%r10d
addl %r11d,%ebx
roll $22,%ebx
movl %edx,%r11d
addl %ecx,%ebx
xorl %ecx,%r11d
leal 1770035416(%rax,%r10,1),%eax
andl %ebx,%r11d
xorl %edx,%r11d
movl 36(%rsi),%r10d
addl %r11d,%eax
roll $7,%eax
movl %ecx,%r11d
addl %ebx,%eax
xorl %ebx,%r11d
leal -1958414417(%rdx,%r10,1),%edx
andl %eax,%r11d
xorl %ecx,%r11d
movl 40(%rsi),%r10d
addl %r11d,%edx
roll $12,%edx
movl %ebx,%r11d
addl %eax,%edx
xorl %eax,%r11d
leal -42063(%rcx,%r10,1),%ecx
andl %edx,%r11d
xorl %ebx,%r11d
movl 44(%rsi),%r10d
addl %r11d,%ecx
roll $17,%ecx
movl %eax,%r11d
addl %edx,%ecx
xorl %edx,%r11d
leal -1990404162(%rbx,%r10,1),%ebx
andl %ecx,%r11d
xorl %eax,%r11d
movl 48(%rsi),%r10d
addl %r11d,%ebx
roll $22,%ebx
movl %edx,%r11d
addl %ecx,%ebx
xorl %ecx,%r11d
leal 1804603682(%rax,%r10,1),%eax
andl %ebx,%r11d
xorl %edx,%r11d
movl 52(%rsi),%r10d
addl %r11d,%eax
roll $7,%eax
movl %ecx,%r11d
addl %ebx,%eax
xorl %ebx,%r11d
leal -40341101(%rdx,%r10,1),%edx
andl %eax,%r11d
xorl %ecx,%r11d
movl 56(%rsi),%r10d
addl %r11d,%edx
roll $12,%edx
movl %ebx,%r11d
addl %eax,%edx
xorl %eax,%r11d
leal -1502002290(%rcx,%r10,1),%ecx
andl %edx,%r11d
xorl %ebx,%r11d
movl 60(%rsi),%r10d
addl %r11d,%ecx
roll $17,%ecx
movl %eax,%r11d
addl %edx,%ecx
xorl %edx,%r11d
leal 1236535329(%rbx,%r10,1),%ebx
andl %ecx,%r11d
xorl %eax,%r11d
movl 0(%rsi),%r10d
addl %r11d,%ebx
roll $22,%ebx
movl %edx,%r11d
addl %ecx,%ebx
movl 4(%rsi),%r10d
movl %edx,%r11d
movl %edx,%r12d
notl %r11d
leal -165796510(%rax,%r10,1),%eax
andl %ebx,%r12d
andl %ecx,%r11d
movl 24(%rsi),%r10d
orl %r11d,%r12d
movl %ecx,%r11d
addl %r12d,%eax
movl %ecx,%r12d
roll $5,%eax
addl %ebx,%eax
notl %r11d
leal -1069501632(%rdx,%r10,1),%edx
andl %eax,%r12d
andl %ebx,%r11d
movl 44(%rsi),%r10d
orl %r11d,%r12d
movl %ebx,%r11d
addl %r12d,%edx
movl %ebx,%r12d
roll $9,%edx
addl %eax,%edx
notl %r11d
leal 643717713(%rcx,%r10,1),%ecx
andl %edx,%r12d
andl %eax,%r11d
movl 0(%rsi),%r10d
orl %r11d,%r12d
movl %eax,%r11d
addl %r12d,%ecx
movl %eax,%r12d
roll $14,%ecx
addl %edx,%ecx
notl %r11d
leal -373897302(%rbx,%r10,1),%ebx
andl %ecx,%r12d
andl %edx,%r11d
movl 20(%rsi),%r10d
orl %r11d,%r12d
movl %edx,%r11d
addl %r12d,%ebx
movl %edx,%r12d
roll $20,%ebx
addl %ecx,%ebx
notl %r11d
leal -701558691(%rax,%r10,1),%eax
andl %ebx,%r12d
andl %ecx,%r11d
movl 40(%rsi),%r10d
orl %r11d,%r12d
movl %ecx,%r11d
addl %r12d,%eax
movl %ecx,%r12d
roll $5,%eax
addl %ebx,%eax
notl %r11d
leal 38016083(%rdx,%r10,1),%edx
andl %eax,%r12d
andl %ebx,%r11d
movl 60(%rsi),%r10d
orl %r11d,%r12d
movl %ebx,%r11d
addl %r12d,%edx
movl %ebx,%r12d
roll $9,%edx
addl %eax,%edx
notl %r11d
leal -660478335(%rcx,%r10,1),%ecx
andl %edx,%r12d
andl %eax,%r11d
movl 16(%rsi),%r10d
orl %r11d,%r12d
movl %eax,%r11d
addl %r12d,%ecx
movl %eax,%r12d
roll $14,%ecx
addl %edx,%ecx
notl %r11d
leal -405537848(%rbx,%r10,1),%ebx
andl %ecx,%r12d
andl %edx,%r11d
movl 36(%rsi),%r10d
orl %r11d,%r12d
movl %edx,%r11d
addl %r12d,%ebx
movl %edx,%r12d
roll $20,%ebx
addl %ecx,%ebx
notl %r11d
leal 568446438(%rax,%r10,1),%eax
andl %ebx,%r12d
andl %ecx,%r11d
movl 56(%rsi),%r10d
orl %r11d,%r12d
movl %ecx,%r11d
addl %r12d,%eax
movl %ecx,%r12d
roll $5,%eax
addl %ebx,%eax
notl %r11d
leal -1019803690(%rdx,%r10,1),%edx
andl %eax,%r12d
andl %ebx,%r11d
movl 12(%rsi),%r10d
orl %r11d,%r12d
movl %ebx,%r11d
addl %r12d,%edx
movl %ebx,%r12d
roll $9,%edx
addl %eax,%edx
notl %r11d
leal -187363961(%rcx,%r10,1),%ecx
andl %edx,%r12d
andl %eax,%r11d
movl 32(%rsi),%r10d
orl %r11d,%r12d
movl %eax,%r11d
addl %r12d,%ecx
movl %eax,%r12d
roll $14,%ecx
addl %edx,%ecx
notl %r11d
leal 1163531501(%rbx,%r10,1),%ebx
andl %ecx,%r12d
andl %edx,%r11d
movl 52(%rsi),%r10d
orl %r11d,%r12d
movl %edx,%r11d
addl %r12d,%ebx
movl %edx,%r12d
roll $20,%ebx
addl %ecx,%ebx
notl %r11d
leal -1444681467(%rax,%r10,1),%eax
andl %ebx,%r12d
andl %ecx,%r11d
movl 8(%rsi),%r10d
orl %r11d,%r12d
movl %ecx,%r11d
addl %r12d,%eax
movl %ecx,%r12d
roll $5,%eax
addl %ebx,%eax
notl %r11d
leal -51403784(%rdx,%r10,1),%edx
andl %eax,%r12d
andl %ebx,%r11d
movl 28(%rsi),%r10d
orl %r11d,%r12d
movl %ebx,%r11d
addl %r12d,%edx
movl %ebx,%r12d
roll $9,%edx
addl %eax,%edx
notl %r11d
leal 1735328473(%rcx,%r10,1),%ecx
andl %edx,%r12d
andl %eax,%r11d
movl 48(%rsi),%r10d
orl %r11d,%r12d
movl %eax,%r11d
addl %r12d,%ecx
movl %eax,%r12d
roll $14,%ecx
addl %edx,%ecx
notl %r11d
leal -1926607734(%rbx,%r10,1),%ebx
andl %ecx,%r12d
andl %edx,%r11d
movl 0(%rsi),%r10d
orl %r11d,%r12d
movl %edx,%r11d
addl %r12d,%ebx
movl %edx,%r12d
roll $20,%ebx
addl %ecx,%ebx
movl 20(%rsi),%r10d
movl %ecx,%r11d
leal -378558(%rax,%r10,1),%eax
movl 32(%rsi),%r10d
xorl %edx,%r11d
xorl %ebx,%r11d
addl %r11d,%eax
roll $4,%eax
movl %ebx,%r11d
addl %ebx,%eax
leal -2022574463(%rdx,%r10,1),%edx
movl 44(%rsi),%r10d
xorl %ecx,%r11d
xorl %eax,%r11d
addl %r11d,%edx
roll $11,%edx
movl %eax,%r11d
addl %eax,%edx
leal 1839030562(%rcx,%r10,1),%ecx
movl 56(%rsi),%r10d
xorl %ebx,%r11d
xorl %edx,%r11d
addl %r11d,%ecx
roll $16,%ecx
movl %edx,%r11d
addl %edx,%ecx
leal -35309556(%rbx,%r10,1),%ebx
movl 4(%rsi),%r10d
xorl %eax,%r11d
xorl %ecx,%r11d
addl %r11d,%ebx
roll $23,%ebx
movl %ecx,%r11d
addl %ecx,%ebx
leal -1530992060(%rax,%r10,1),%eax
movl 16(%rsi),%r10d
xorl %edx,%r11d
xorl %ebx,%r11d
addl %r11d,%eax
roll $4,%eax
movl %ebx,%r11d
addl %ebx,%eax
leal 1272893353(%rdx,%r10,1),%edx
movl 28(%rsi),%r10d
xorl %ecx,%r11d
xorl %eax,%r11d
addl %r11d,%edx
roll $11,%edx
movl %eax,%r11d
addl %eax,%edx
leal -155497632(%rcx,%r10,1),%ecx
movl 40(%rsi),%r10d
xorl %ebx,%r11d
xorl %edx,%r11d
addl %r11d,%ecx
roll $16,%ecx
movl %edx,%r11d
addl %edx,%ecx
leal -1094730640(%rbx,%r10,1),%ebx
movl 52(%rsi),%r10d
xorl %eax,%r11d
xorl %ecx,%r11d
addl %r11d,%ebx
roll $23,%ebx
movl %ecx,%r11d
addl %ecx,%ebx
leal 681279174(%rax,%r10,1),%eax
movl 0(%rsi),%r10d
xorl %edx,%r11d
xorl %ebx,%r11d
addl %r11d,%eax
roll $4,%eax
movl %ebx,%r11d
addl %ebx,%eax
leal -358537222(%rdx,%r10,1),%edx
movl 12(%rsi),%r10d
xorl %ecx,%r11d
xorl %eax,%r11d
addl %r11d,%edx
roll $11,%edx
movl %eax,%r11d
addl %eax,%edx
leal -722521979(%rcx,%r10,1),%ecx
movl 24(%rsi),%r10d
xorl %ebx,%r11d
xorl %edx,%r11d
addl %r11d,%ecx
roll $16,%ecx
movl %edx,%r11d
addl %edx,%ecx
leal 76029189(%rbx,%r10,1),%ebx
movl 36(%rsi),%r10d
xorl %eax,%r11d
xorl %ecx,%r11d
addl %r11d,%ebx
roll $23,%ebx
movl %ecx,%r11d
addl %ecx,%ebx
leal -640364487(%rax,%r10,1),%eax
movl 48(%rsi),%r10d
xorl %edx,%r11d
xorl %ebx,%r11d
addl %r11d,%eax
roll $4,%eax
movl %ebx,%r11d
addl %ebx,%eax
leal -421815835(%rdx,%r10,1),%edx
movl 60(%rsi),%r10d
xorl %ecx,%r11d
xorl %eax,%r11d
addl %r11d,%edx
roll $11,%edx
movl %eax,%r11d
addl %eax,%edx
leal 530742520(%rcx,%r10,1),%ecx
movl 8(%rsi),%r10d
xorl %ebx,%r11d
xorl %edx,%r11d
addl %r11d,%ecx
roll $16,%ecx
movl %edx,%r11d
addl %edx,%ecx
leal -995338651(%rbx,%r10,1),%ebx
movl 0(%rsi),%r10d
xorl %eax,%r11d
xorl %ecx,%r11d
addl %r11d,%ebx
roll $23,%ebx
movl %ecx,%r11d
addl %ecx,%ebx
movl 0(%rsi),%r10d
movl $4294967295,%r11d
xorl %edx,%r11d
leal -198630844(%rax,%r10,1),%eax
orl %ebx,%r11d
xorl %ecx,%r11d
addl %r11d,%eax
movl 28(%rsi),%r10d
movl $4294967295,%r11d
roll $6,%eax
xorl %ecx,%r11d
addl %ebx,%eax
leal 1126891415(%rdx,%r10,1),%edx
orl %eax,%r11d
xorl %ebx,%r11d
addl %r11d,%edx
movl 56(%rsi),%r10d
movl $4294967295,%r11d
roll $10,%edx
xorl %ebx,%r11d
addl %eax,%edx
leal -1416354905(%rcx,%r10,1),%ecx
orl %edx,%r11d
xorl %eax,%r11d
addl %r11d,%ecx
movl 20(%rsi),%r10d
movl $4294967295,%r11d
roll $15,%ecx
xorl %eax,%r11d
addl %edx,%ecx
leal -57434055(%rbx,%r10,1),%ebx
orl %ecx,%r11d
xorl %edx,%r11d
addl %r11d,%ebx
movl 48(%rsi),%r10d
movl $4294967295,%r11d
roll $21,%ebx
xorl %edx,%r11d
addl %ecx,%ebx
leal 1700485571(%rax,%r10,1),%eax
orl %ebx,%r11d
xorl %ecx,%r11d
addl %r11d,%eax
movl 12(%rsi),%r10d
movl $4294967295,%r11d
roll $6,%eax
xorl %ecx,%r11d
addl %ebx,%eax
leal -1894986606(%rdx,%r10,1),%edx
orl %eax,%r11d
xorl %ebx,%r11d
addl %r11d,%edx
movl 40(%rsi),%r10d
movl $4294967295,%r11d
roll $10,%edx
xorl %ebx,%r11d
addl %eax,%edx
leal -1051523(%rcx,%r10,1),%ecx
orl %edx,%r11d
xorl %eax,%r11d
addl %r11d,%ecx
movl 4(%rsi),%r10d
movl $4294967295,%r11d
roll $15,%ecx
xorl %eax,%r11d
addl %edx,%ecx
leal -2054922799(%rbx,%r10,1),%ebx
orl %ecx,%r11d
xorl %edx,%r11d
addl %r11d,%ebx
movl 32(%rsi),%r10d
movl $4294967295,%r11d
roll $21,%ebx
xorl %edx,%r11d
addl %ecx,%ebx
leal 1873313359(%rax,%r10,1),%eax
orl %ebx,%r11d
xorl %ecx,%r11d
addl %r11d,%eax
movl 60(%rsi),%r10d
movl $4294967295,%r11d
roll $6,%eax
xorl %ecx,%r11d
addl %ebx,%eax
leal -30611744(%rdx,%r10,1),%edx
orl %eax,%r11d
xorl %ebx,%r11d
addl %r11d,%edx
movl 24(%rsi),%r10d
movl $4294967295,%r11d
roll $10,%edx
xorl %ebx,%r11d
addl %eax,%edx
leal -1560198380(%rcx,%r10,1),%ecx
orl %edx,%r11d
xorl %eax,%r11d
addl %r11d,%ecx
movl 52(%rsi),%r10d
movl $4294967295,%r11d
roll $15,%ecx
xorl %eax,%r11d
addl %edx,%ecx
leal 1309151649(%rbx,%r10,1),%ebx
orl %ecx,%r11d
xorl %edx,%r11d
addl %r11d,%ebx
movl 16(%rsi),%r10d
movl $4294967295,%r11d
roll $21,%ebx
xorl %edx,%r11d
addl %ecx,%ebx
leal -145523070(%rax,%r10,1),%eax
orl %ebx,%r11d
xorl %ecx,%r11d
addl %r11d,%eax
movl 44(%rsi),%r10d
movl $4294967295,%r11d
roll $6,%eax
xorl %ecx,%r11d
addl %ebx,%eax
leal -1120210379(%rdx,%r10,1),%edx
orl %eax,%r11d
xorl %ebx,%r11d
addl %r11d,%edx
movl 8(%rsi),%r10d
movl $4294967295,%r11d
roll $10,%edx
xorl %ebx,%r11d
addl %eax,%edx
leal 718787259(%rcx,%r10,1),%ecx
orl %edx,%r11d
xorl %eax,%r11d
addl %r11d,%ecx
movl 36(%rsi),%r10d
movl $4294967295,%r11d
roll $15,%ecx
xorl %eax,%r11d
addl %edx,%ecx
leal -343485551(%rbx,%r10,1),%ebx
orl %ecx,%r11d
xorl %edx,%r11d
addl %r11d,%ebx
movl 0(%rsi),%r10d
movl $4294967295,%r11d
roll $21,%ebx
xorl %edx,%r11d
addl %ecx,%ebx
addl %r8d,%eax
addl %r9d,%ebx
addl %r14d,%ecx
addl %r15d,%edx
addq $64,%rsi
cmpq %rdi,%rsi
jb L$loop
L$end:
movl %eax,0(%rbp)
movl %ebx,4(%rbp)
movl %ecx,8(%rbp)
movl %edx,12(%rbp)
movq (%rsp),%r15
movq 8(%rsp),%r14
movq 16(%rsp),%r12
movq 24(%rsp),%rbx
movq 32(%rsp),%rbp
addq $40,%rsp
L$epilogue:
.byte 0xf3,0xc3

430
deps/openssl/asm/x64-macosx-gas/rc4/rc4-x86_64.s

@ -0,0 +1,430 @@
.text
.globl _RC4
.p2align 4
_RC4: orq %rsi,%rsi
jne L$entry
.byte 0xf3,0xc3
L$entry:
pushq %rbx
pushq %r12
pushq %r13
L$prologue:
addq $8,%rdi
movl -8(%rdi),%r8d
movl -4(%rdi),%r12d
cmpl $-1,256(%rdi)
je L$RC4_CHAR
incb %r8b
movl (%rdi,%r8,4),%r9d
testq $-8,%rsi
jz L$loop1
jmp L$loop8
.p2align 4
L$loop8:
addb %r9b,%r12b
movq %r8,%r10
movl (%rdi,%r12,4),%r13d
rorq $8,%rax
incb %r10b
movl (%rdi,%r10,4),%r11d
cmpq %r10,%r12
movl %r9d,(%rdi,%r12,4)
cmoveq %r9,%r11
movl %r13d,(%rdi,%r8,4)
addb %r9b,%r13b
movb (%rdi,%r13,4),%al
addb %r11b,%r12b
movq %r10,%r8
movl (%rdi,%r12,4),%r13d
rorq $8,%rax
incb %r8b
movl (%rdi,%r8,4),%r9d
cmpq %r8,%r12
movl %r11d,(%rdi,%r12,4)
cmoveq %r11,%r9
movl %r13d,(%rdi,%r10,4)
addb %r11b,%r13b
movb (%rdi,%r13,4),%al
addb %r9b,%r12b
movq %r8,%r10
movl (%rdi,%r12,4),%r13d
rorq $8,%rax
incb %r10b
movl (%rdi,%r10,4),%r11d
cmpq %r10,%r12
movl %r9d,(%rdi,%r12,4)
cmoveq %r9,%r11
movl %r13d,(%rdi,%r8,4)
addb %r9b,%r13b
movb (%rdi,%r13,4),%al
addb %r11b,%r12b
movq %r10,%r8
movl (%rdi,%r12,4),%r13d
rorq $8,%rax
incb %r8b
movl (%rdi,%r8,4),%r9d
cmpq %r8,%r12
movl %r11d,(%rdi,%r12,4)
cmoveq %r11,%r9
movl %r13d,(%rdi,%r10,4)
addb %r11b,%r13b
movb (%rdi,%r13,4),%al
addb %r9b,%r12b
movq %r8,%r10
movl (%rdi,%r12,4),%r13d
rorq $8,%rax
incb %r10b
movl (%rdi,%r10,4),%r11d
cmpq %r10,%r12
movl %r9d,(%rdi,%r12,4)
cmoveq %r9,%r11
movl %r13d,(%rdi,%r8,4)
addb %r9b,%r13b
movb (%rdi,%r13,4),%al
addb %r11b,%r12b
movq %r10,%r8
movl (%rdi,%r12,4),%r13d
rorq $8,%rax
incb %r8b
movl (%rdi,%r8,4),%r9d
cmpq %r8,%r12
movl %r11d,(%rdi,%r12,4)
cmoveq %r11,%r9
movl %r13d,(%rdi,%r10,4)
addb %r11b,%r13b
movb (%rdi,%r13,4),%al
addb %r9b,%r12b
movq %r8,%r10
movl (%rdi,%r12,4),%r13d
rorq $8,%rax
incb %r10b
movl (%rdi,%r10,4),%r11d
cmpq %r10,%r12
movl %r9d,(%rdi,%r12,4)
cmoveq %r9,%r11
movl %r13d,(%rdi,%r8,4)
addb %r9b,%r13b
movb (%rdi,%r13,4),%al
addb %r11b,%r12b
movq %r10,%r8
movl (%rdi,%r12,4),%r13d
rorq $8,%rax
incb %r8b
movl (%rdi,%r8,4),%r9d
cmpq %r8,%r12
movl %r11d,(%rdi,%r12,4)
cmoveq %r11,%r9
movl %r13d,(%rdi,%r10,4)
addb %r11b,%r13b
movb (%rdi,%r13,4),%al
rorq $8,%rax
subq $8,%rsi
xorq (%rdx),%rax
addq $8,%rdx
movq %rax,(%rcx)
addq $8,%rcx
testq $-8,%rsi
jnz L$loop8
cmpq $0,%rsi
jne L$loop1
jmp L$exit
.p2align 4
L$loop1:
addb %r9b,%r12b
movl (%rdi,%r12,4),%r13d
movl %r9d,(%rdi,%r12,4)
movl %r13d,(%rdi,%r8,4)
addb %r13b,%r9b
incb %r8b
movl (%rdi,%r9,4),%r13d
movl (%rdi,%r8,4),%r9d
xorb (%rdx),%r13b
incq %rdx
movb %r13b,(%rcx)
incq %rcx
decq %rsi
jnz L$loop1
jmp L$exit
.p2align 4
L$RC4_CHAR:
addb $1,%r8b
movzbl (%rdi,%r8,1),%r9d
testq $-8,%rsi
jz L$cloop1
cmpl $0,260(%rdi)
jnz L$cloop1
jmp L$cloop8
.p2align 4
L$cloop8:
movl (%rdx),%eax
movl 4(%rdx),%ebx
addb %r9b,%r12b
leaq 1(%r8),%r10
movzbl (%rdi,%r12,1),%r13d
movzbl %r10b,%r10d
movzbl (%rdi,%r10,1),%r11d
movb %r9b,(%rdi,%r12,1)
cmpq %r10,%r12
movb %r13b,(%rdi,%r8,1)
jne L$cmov0
movq %r9,%r11
L$cmov0:
addb %r9b,%r13b
xorb (%rdi,%r13,1),%al
rorl $8,%eax
addb %r11b,%r12b
leaq 1(%r10),%r8
movzbl (%rdi,%r12,1),%r13d
movzbl %r8b,%r8d
movzbl (%rdi,%r8,1),%r9d
movb %r11b,(%rdi,%r12,1)
cmpq %r8,%r12
movb %r13b,(%rdi,%r10,1)
jne L$cmov1
movq %r11,%r9
L$cmov1:
addb %r11b,%r13b
xorb (%rdi,%r13,1),%al
rorl $8,%eax
addb %r9b,%r12b
leaq 1(%r8),%r10
movzbl (%rdi,%r12,1),%r13d
movzbl %r10b,%r10d
movzbl (%rdi,%r10,1),%r11d
movb %r9b,(%rdi,%r12,1)
cmpq %r10,%r12
movb %r13b,(%rdi,%r8,1)
jne L$cmov2
movq %r9,%r11
L$cmov2:
addb %r9b,%r13b
xorb (%rdi,%r13,1),%al
rorl $8,%eax
addb %r11b,%r12b
leaq 1(%r10),%r8
movzbl (%rdi,%r12,1),%r13d
movzbl %r8b,%r8d
movzbl (%rdi,%r8,1),%r9d
movb %r11b,(%rdi,%r12,1)
cmpq %r8,%r12
movb %r13b,(%rdi,%r10,1)
jne L$cmov3
movq %r11,%r9
L$cmov3:
addb %r11b,%r13b
xorb (%rdi,%r13,1),%al
rorl $8,%eax
addb %r9b,%r12b
leaq 1(%r8),%r10
movzbl (%rdi,%r12,1),%r13d
movzbl %r10b,%r10d
movzbl (%rdi,%r10,1),%r11d
movb %r9b,(%rdi,%r12,1)
cmpq %r10,%r12
movb %r13b,(%rdi,%r8,1)
jne L$cmov4
movq %r9,%r11
L$cmov4:
addb %r9b,%r13b
xorb (%rdi,%r13,1),%bl
rorl $8,%ebx
addb %r11b,%r12b
leaq 1(%r10),%r8
movzbl (%rdi,%r12,1),%r13d
movzbl %r8b,%r8d
movzbl (%rdi,%r8,1),%r9d
movb %r11b,(%rdi,%r12,1)
cmpq %r8,%r12
movb %r13b,(%rdi,%r10,1)
jne L$cmov5
movq %r11,%r9
L$cmov5:
addb %r11b,%r13b
xorb (%rdi,%r13,1),%bl
rorl $8,%ebx
addb %r9b,%r12b
leaq 1(%r8),%r10
movzbl (%rdi,%r12,1),%r13d
movzbl %r10b,%r10d
movzbl (%rdi,%r10,1),%r11d
movb %r9b,(%rdi,%r12,1)
cmpq %r10,%r12
movb %r13b,(%rdi,%r8,1)
jne L$cmov6
movq %r9,%r11
L$cmov6:
addb %r9b,%r13b
xorb (%rdi,%r13,1),%bl
rorl $8,%ebx
addb %r11b,%r12b
leaq 1(%r10),%r8
movzbl (%rdi,%r12,1),%r13d
movzbl %r8b,%r8d
movzbl (%rdi,%r8,1),%r9d
movb %r11b,(%rdi,%r12,1)
cmpq %r8,%r12
movb %r13b,(%rdi,%r10,1)
jne L$cmov7
movq %r11,%r9
L$cmov7:
addb %r11b,%r13b
xorb (%rdi,%r13,1),%bl
rorl $8,%ebx
leaq -8(%rsi),%rsi
movl %eax,(%rcx)
leaq 8(%rdx),%rdx
movl %ebx,4(%rcx)
leaq 8(%rcx),%rcx
testq $-8,%rsi
jnz L$cloop8
cmpq $0,%rsi
jne L$cloop1
jmp L$exit
.p2align 4
L$cloop1:
addb %r9b,%r12b
movzbl (%rdi,%r12,1),%r13d
movb %r9b,(%rdi,%r12,1)
movb %r13b,(%rdi,%r8,1)
addb %r9b,%r13b
addb $1,%r8b
movzbl %r13b,%r13d
movzbl %r8b,%r8d
movzbl (%rdi,%r13,1),%r13d
movzbl (%rdi,%r8,1),%r9d
xorb (%rdx),%r13b
leaq 1(%rdx),%rdx
movb %r13b,(%rcx)
leaq 1(%rcx),%rcx
subq $1,%rsi
jnz L$cloop1
jmp L$exit
.p2align 4
L$exit:
subb $1,%r8b
movl %r8d,-8(%rdi)
movl %r12d,-4(%rdi)
movq (%rsp),%r13
movq 8(%rsp),%r12
movq 16(%rsp),%rbx
addq $24,%rsp
L$epilogue:
.byte 0xf3,0xc3
.globl _RC4_set_key
.p2align 4
_RC4_set_key:
leaq 8(%rdi),%rdi
leaq (%rdx,%rsi,1),%rdx
negq %rsi
movq %rsi,%rcx
xorl %eax,%eax
xorq %r9,%r9
xorq %r10,%r10
xorq %r11,%r11
movl _OPENSSL_ia32cap_P(%rip),%r8d
btl $20,%r8d
jnc L$w1stloop
btl $30,%r8d
setc %r9b
movl %r9d,260(%rdi)
jmp L$c1stloop
.p2align 4
L$w1stloop:
movl %eax,(%rdi,%rax,4)
addb $1,%al
jnc L$w1stloop
xorq %r9,%r9
xorq %r8,%r8
.p2align 4
L$w2ndloop:
movl (%rdi,%r9,4),%r10d
addb (%rdx,%rsi,1),%r8b
addb %r10b,%r8b
addq $1,%rsi
movl (%rdi,%r8,4),%r11d
cmovzq %rcx,%rsi
movl %r10d,(%rdi,%r8,4)
movl %r11d,(%rdi,%r9,4)
addb $1,%r9b
jnc L$w2ndloop
jmp L$exit_key
.p2align 4
L$c1stloop:
movb %al,(%rdi,%rax,1)
addb $1,%al
jnc L$c1stloop
xorq %r9,%r9
xorq %r8,%r8
.p2align 4
L$c2ndloop:
movb (%rdi,%r9,1),%r10b
addb (%rdx,%rsi,1),%r8b
addb %r10b,%r8b
addq $1,%rsi
movb (%rdi,%r8,1),%r11b
jnz L$cnowrap
movq %rcx,%rsi
L$cnowrap:
movb %r10b,(%rdi,%r8,1)
movb %r11b,(%rdi,%r9,1)
addb $1,%r9b
jnc L$c2ndloop
movl $-1,256(%rdi)
.p2align 4
L$exit_key:
xorl %eax,%eax
movl %eax,-8(%rdi)
movl %eax,-4(%rdi)
.byte 0xf3,0xc3
.globl _RC4_options
.p2align 4
_RC4_options:
leaq L$opts(%rip),%rax
movl _OPENSSL_ia32cap_P(%rip),%edx
btl $20,%edx
jnc L$done
addq $12,%rax
btl $30,%edx
jnc L$done
addq $13,%rax
L$done:
.byte 0xf3,0xc3
.p2align 6
L$opts:
.byte 114,99,52,40,56,120,44,105,110,116,41,0
.byte 114,99,52,40,56,120,44,99,104,97,114,41,0
.byte 114,99,52,40,49,120,44,99,104,97,114,41,0
.byte 82,67,52,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
.p2align 6

1283
deps/openssl/asm/x64-macosx-gas/sha/sha1-x86_64.s

File diff suppressed because it is too large

1971
deps/openssl/asm/x64-macosx-gas/sha/sha512-x86_64.s

File diff suppressed because it is too large

859
deps/openssl/asm/x64-macosx-gas/whrlpool/wp-x86_64.s

@ -0,0 +1,859 @@
.text
.globl _whirlpool_block
.p2align 4
_whirlpool_block:
pushq %rbx
pushq %rbp
pushq %r12
pushq %r13
pushq %r14
pushq %r15
movq %rsp,%r11
subq $128+40,%rsp
andq $-64,%rsp
leaq 128(%rsp),%r10
movq %rdi,0(%r10)
movq %rsi,8(%r10)
movq %rdx,16(%r10)
movq %r11,32(%r10)
L$prologue:
movq %r10,%rbx
leaq L$table(%rip),%rbp
xorq %rcx,%rcx
xorq %rdx,%rdx
movq 0(%rdi),%r8
movq 8(%rdi),%r9
movq 16(%rdi),%r10
movq 24(%rdi),%r11
movq 32(%rdi),%r12
movq 40(%rdi),%r13
movq 48(%rdi),%r14
movq 56(%rdi),%r15
L$outerloop:
movq %r8,0(%rsp)
movq %r9,8(%rsp)
movq %r10,16(%rsp)
movq %r11,24(%rsp)
movq %r12,32(%rsp)
movq %r13,40(%rsp)
movq %r14,48(%rsp)
movq %r15,56(%rsp)
xorq 0(%rsi),%r8
xorq 8(%rsi),%r9
xorq 16(%rsi),%r10
xorq 24(%rsi),%r11
xorq 32(%rsi),%r12
xorq 40(%rsi),%r13
xorq 48(%rsi),%r14
xorq 56(%rsi),%r15
movq %r8,64+0(%rsp)
movq %r9,64+8(%rsp)
movq %r10,64+16(%rsp)
movq %r11,64+24(%rsp)
movq %r12,64+32(%rsp)
movq %r13,64+40(%rsp)
movq %r14,64+48(%rsp)
movq %r15,64+56(%rsp)
xorq %rsi,%rsi
movq %rsi,24(%rbx)
.p2align 4
L$round:
movq 4096(%rbp,%rsi,8),%r8
movl 0(%rsp),%eax
movl 4(%rsp),%ebx
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r8
movq 7(%rbp,%rdi,8),%r9
movb %al,%cl
movb %ah,%dl
movl 0+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
movq 6(%rbp,%rsi,8),%r10
movq 5(%rbp,%rdi,8),%r11
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
movq 4(%rbp,%rsi,8),%r12
movq 3(%rbp,%rdi,8),%r13
movb %bl,%cl
movb %bh,%dl
movl 0+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
movq 2(%rbp,%rsi,8),%r14
movq 1(%rbp,%rdi,8),%r15
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r9
xorq 7(%rbp,%rdi,8),%r10
movb %al,%cl
movb %ah,%dl
movl 8+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r11
xorq 5(%rbp,%rdi,8),%r12
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r13
xorq 3(%rbp,%rdi,8),%r14
movb %bl,%cl
movb %bh,%dl
movl 8+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r15
xorq 1(%rbp,%rdi,8),%r8
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r10
xorq 7(%rbp,%rdi,8),%r11
movb %al,%cl
movb %ah,%dl
movl 16+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r12
xorq 5(%rbp,%rdi,8),%r13
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r14
xorq 3(%rbp,%rdi,8),%r15
movb %bl,%cl
movb %bh,%dl
movl 16+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r8
xorq 1(%rbp,%rdi,8),%r9
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r11
xorq 7(%rbp,%rdi,8),%r12
movb %al,%cl
movb %ah,%dl
movl 24+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r13
xorq 5(%rbp,%rdi,8),%r14
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r15
xorq 3(%rbp,%rdi,8),%r8
movb %bl,%cl
movb %bh,%dl
movl 24+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r9
xorq 1(%rbp,%rdi,8),%r10
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r12
xorq 7(%rbp,%rdi,8),%r13
movb %al,%cl
movb %ah,%dl
movl 32+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r14
xorq 5(%rbp,%rdi,8),%r15
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r8
xorq 3(%rbp,%rdi,8),%r9
movb %bl,%cl
movb %bh,%dl
movl 32+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r10
xorq 1(%rbp,%rdi,8),%r11
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r13
xorq 7(%rbp,%rdi,8),%r14
movb %al,%cl
movb %ah,%dl
movl 40+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r15
xorq 5(%rbp,%rdi,8),%r8
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r9
xorq 3(%rbp,%rdi,8),%r10
movb %bl,%cl
movb %bh,%dl
movl 40+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r11
xorq 1(%rbp,%rdi,8),%r12
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r14
xorq 7(%rbp,%rdi,8),%r15
movb %al,%cl
movb %ah,%dl
movl 48+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r8
xorq 5(%rbp,%rdi,8),%r9
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r10
xorq 3(%rbp,%rdi,8),%r11
movb %bl,%cl
movb %bh,%dl
movl 48+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r12
xorq 1(%rbp,%rdi,8),%r13
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r15
xorq 7(%rbp,%rdi,8),%r8
movb %al,%cl
movb %ah,%dl
movl 56+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r9
xorq 5(%rbp,%rdi,8),%r10
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r11
xorq 3(%rbp,%rdi,8),%r12
movb %bl,%cl
movb %bh,%dl
movl 56+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r13
xorq 1(%rbp,%rdi,8),%r14
movq %r8,0(%rsp)
movq %r9,8(%rsp)
movq %r10,16(%rsp)
movq %r11,24(%rsp)
movq %r12,32(%rsp)
movq %r13,40(%rsp)
movq %r14,48(%rsp)
movq %r15,56(%rsp)
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r8
xorq 7(%rbp,%rdi,8),%r9
movb %al,%cl
movb %ah,%dl
movl 64+0+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r10
xorq 5(%rbp,%rdi,8),%r11
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r12
xorq 3(%rbp,%rdi,8),%r13
movb %bl,%cl
movb %bh,%dl
movl 64+0+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r14
xorq 1(%rbp,%rdi,8),%r15
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r9
xorq 7(%rbp,%rdi,8),%r10
movb %al,%cl
movb %ah,%dl
movl 64+8+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r11
xorq 5(%rbp,%rdi,8),%r12
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r13
xorq 3(%rbp,%rdi,8),%r14
movb %bl,%cl
movb %bh,%dl
movl 64+8+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r15
xorq 1(%rbp,%rdi,8),%r8
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r10
xorq 7(%rbp,%rdi,8),%r11
movb %al,%cl
movb %ah,%dl
movl 64+16+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r12
xorq 5(%rbp,%rdi,8),%r13
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r14
xorq 3(%rbp,%rdi,8),%r15
movb %bl,%cl
movb %bh,%dl
movl 64+16+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r8
xorq 1(%rbp,%rdi,8),%r9
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r11
xorq 7(%rbp,%rdi,8),%r12
movb %al,%cl
movb %ah,%dl
movl 64+24+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r13
xorq 5(%rbp,%rdi,8),%r14
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r15
xorq 3(%rbp,%rdi,8),%r8
movb %bl,%cl
movb %bh,%dl
movl 64+24+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r9
xorq 1(%rbp,%rdi,8),%r10
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r12
xorq 7(%rbp,%rdi,8),%r13
movb %al,%cl
movb %ah,%dl
movl 64+32+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r14
xorq 5(%rbp,%rdi,8),%r15
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r8
xorq 3(%rbp,%rdi,8),%r9
movb %bl,%cl
movb %bh,%dl
movl 64+32+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r10
xorq 1(%rbp,%rdi,8),%r11
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r13
xorq 7(%rbp,%rdi,8),%r14
movb %al,%cl
movb %ah,%dl
movl 64+40+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r15
xorq 5(%rbp,%rdi,8),%r8
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r9
xorq 3(%rbp,%rdi,8),%r10
movb %bl,%cl
movb %bh,%dl
movl 64+40+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r11
xorq 1(%rbp,%rdi,8),%r12
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r14
xorq 7(%rbp,%rdi,8),%r15
movb %al,%cl
movb %ah,%dl
movl 64+48+8(%rsp),%eax
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r8
xorq 5(%rbp,%rdi,8),%r9
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r10
xorq 3(%rbp,%rdi,8),%r11
movb %bl,%cl
movb %bh,%dl
movl 64+48+8+4(%rsp),%ebx
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r12
xorq 1(%rbp,%rdi,8),%r13
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%eax
xorq 0(%rbp,%rsi,8),%r15
xorq 7(%rbp,%rdi,8),%r8
movb %al,%cl
movb %ah,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 6(%rbp,%rsi,8),%r9
xorq 5(%rbp,%rdi,8),%r10
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
shrl $16,%ebx
xorq 4(%rbp,%rsi,8),%r11
xorq 3(%rbp,%rdi,8),%r12
movb %bl,%cl
movb %bh,%dl
leaq (%rcx,%rcx,1),%rsi
leaq (%rdx,%rdx,1),%rdi
xorq 2(%rbp,%rsi,8),%r13
xorq 1(%rbp,%rdi,8),%r14
leaq 128(%rsp),%rbx
movq 24(%rbx),%rsi
addq $1,%rsi
cmpq $10,%rsi
je L$roundsdone
movq %rsi,24(%rbx)
movq %r8,64+0(%rsp)
movq %r9,64+8(%rsp)
movq %r10,64+16(%rsp)
movq %r11,64+24(%rsp)
movq %r12,64+32(%rsp)
movq %r13,64+40(%rsp)
movq %r14,64+48(%rsp)
movq %r15,64+56(%rsp)
jmp L$round
.p2align 4
L$roundsdone:
movq 0(%rbx),%rdi
movq 8(%rbx),%rsi
movq 16(%rbx),%rax
xorq 0(%rsi),%r8
xorq 8(%rsi),%r9
xorq 16(%rsi),%r10
xorq 24(%rsi),%r11
xorq 32(%rsi),%r12
xorq 40(%rsi),%r13
xorq 48(%rsi),%r14
xorq 56(%rsi),%r15
xorq 0(%rdi),%r8
xorq 8(%rdi),%r9
xorq 16(%rdi),%r10
xorq 24(%rdi),%r11
xorq 32(%rdi),%r12
xorq 40(%rdi),%r13
xorq 48(%rdi),%r14
xorq 56(%rdi),%r15
movq %r8,0(%rdi)
movq %r9,8(%rdi)
movq %r10,16(%rdi)
movq %r11,24(%rdi)
movq %r12,32(%rdi)
movq %r13,40(%rdi)
movq %r14,48(%rdi)
movq %r15,56(%rdi)
leaq 64(%rsi),%rsi
subq $1,%rax
jz L$alldone
movq %rsi,8(%rbx)
movq %rax,16(%rbx)
jmp L$outerloop
L$alldone:
movq 32(%rbx),%rsi
movq (%rsi),%r15
movq 8(%rsi),%r14
movq 16(%rsi),%r13
movq 24(%rsi),%r12
movq 32(%rsi),%rbp
movq 40(%rsi),%rbx
leaq 48(%rsi),%rsp
L$epilogue:
.byte 0xf3,0xc3
.p2align 6
L$table:
.byte 24,24,96,24,192,120,48,216,24,24,96,24,192,120,48,216
.byte 35,35,140,35,5,175,70,38,35,35,140,35,5,175,70,38
.byte 198,198,63,198,126,249,145,184,198,198,63,198,126,249,145,184
.byte 232,232,135,232,19,111,205,251,232,232,135,232,19,111,205,251
.byte 135,135,38,135,76,161,19,203,135,135,38,135,76,161,19,203
.byte 184,184,218,184,169,98,109,17,184,184,218,184,169,98,109,17
.byte 1,1,4,1,8,5,2,9,1,1,4,1,8,5,2,9
.byte 79,79,33,79,66,110,158,13,79,79,33,79,66,110,158,13
.byte 54,54,216,54,173,238,108,155,54,54,216,54,173,238,108,155
.byte 166,166,162,166,89,4,81,255,166,166,162,166,89,4,81,255
.byte 210,210,111,210,222,189,185,12,210,210,111,210,222,189,185,12
.byte 245,245,243,245,251,6,247,14,245,245,243,245,251,6,247,14
.byte 121,121,249,121,239,128,242,150,121,121,249,121,239,128,242,150
.byte 111,111,161,111,95,206,222,48,111,111,161,111,95,206,222,48
.byte 145,145,126,145,252,239,63,109,145,145,126,145,252,239,63,109
.byte 82,82,85,82,170,7,164,248,82,82,85,82,170,7,164,248
.byte 96,96,157,96,39,253,192,71,96,96,157,96,39,253,192,71
.byte 188,188,202,188,137,118,101,53,188,188,202,188,137,118,101,53
.byte 155,155,86,155,172,205,43,55,155,155,86,155,172,205,43,55
.byte 142,142,2,142,4,140,1,138,142,142,2,142,4,140,1,138
.byte 163,163,182,163,113,21,91,210,163,163,182,163,113,21,91,210
.byte 12,12,48,12,96,60,24,108,12,12,48,12,96,60,24,108
.byte 123,123,241,123,255,138,246,132,123,123,241,123,255,138,246,132
.byte 53,53,212,53,181,225,106,128,53,53,212,53,181,225,106,128
.byte 29,29,116,29,232,105,58,245,29,29,116,29,232,105,58,245
.byte 224,224,167,224,83,71,221,179,224,224,167,224,83,71,221,179
.byte 215,215,123,215,246,172,179,33,215,215,123,215,246,172,179,33
.byte 194,194,47,194,94,237,153,156,194,194,47,194,94,237,153,156
.byte 46,46,184,46,109,150,92,67,46,46,184,46,109,150,92,67
.byte 75,75,49,75,98,122,150,41,75,75,49,75,98,122,150,41
.byte 254,254,223,254,163,33,225,93,254,254,223,254,163,33,225,93
.byte 87,87,65,87,130,22,174,213,87,87,65,87,130,22,174,213
.byte 21,21,84,21,168,65,42,189,21,21,84,21,168,65,42,189
.byte 119,119,193,119,159,182,238,232,119,119,193,119,159,182,238,232
.byte 55,55,220,55,165,235,110,146,55,55,220,55,165,235,110,146
.byte 229,229,179,229,123,86,215,158,229,229,179,229,123,86,215,158
.byte 159,159,70,159,140,217,35,19,159,159,70,159,140,217,35,19
.byte 240,240,231,240,211,23,253,35,240,240,231,240,211,23,253,35
.byte 74,74,53,74,106,127,148,32,74,74,53,74,106,127,148,32
.byte 218,218,79,218,158,149,169,68,218,218,79,218,158,149,169,68
.byte 88,88,125,88,250,37,176,162,88,88,125,88,250,37,176,162
.byte 201,201,3,201,6,202,143,207,201,201,3,201,6,202,143,207
.byte 41,41,164,41,85,141,82,124,41,41,164,41,85,141,82,124
.byte 10,10,40,10,80,34,20,90,10,10,40,10,80,34,20,90
.byte 177,177,254,177,225,79,127,80,177,177,254,177,225,79,127,80
.byte 160,160,186,160,105,26,93,201,160,160,186,160,105,26,93,201
.byte 107,107,177,107,127,218,214,20,107,107,177,107,127,218,214,20
.byte 133,133,46,133,92,171,23,217,133,133,46,133,92,171,23,217
.byte 189,189,206,189,129,115,103,60,189,189,206,189,129,115,103,60
.byte 93,93,105,93,210,52,186,143,93,93,105,93,210,52,186,143
.byte 16,16,64,16,128,80,32,144,16,16,64,16,128,80,32,144
.byte 244,244,247,244,243,3,245,7,244,244,247,244,243,3,245,7
.byte 203,203,11,203,22,192,139,221,203,203,11,203,22,192,139,221
.byte 62,62,248,62,237,198,124,211,62,62,248,62,237,198,124,211
.byte 5,5,20,5,40,17,10,45,5,5,20,5,40,17,10,45
.byte 103,103,129,103,31,230,206,120,103,103,129,103,31,230,206,120
.byte 228,228,183,228,115,83,213,151,228,228,183,228,115,83,213,151
.byte 39,39,156,39,37,187,78,2,39,39,156,39,37,187,78,2
.byte 65,65,25,65,50,88,130,115,65,65,25,65,50,88,130,115
.byte 139,139,22,139,44,157,11,167,139,139,22,139,44,157,11,167
.byte 167,167,166,167,81,1,83,246,167,167,166,167,81,1,83,246
.byte 125,125,233,125,207,148,250,178,125,125,233,125,207,148,250,178
.byte 149,149,110,149,220,251,55,73,149,149,110,149,220,251,55,73
.byte 216,216,71,216,142,159,173,86,216,216,71,216,142,159,173,86
.byte 251,251,203,251,139,48,235,112,251,251,203,251,139,48,235,112
.byte 238,238,159,238,35,113,193,205,238,238,159,238,35,113,193,205
.byte 124,124,237,124,199,145,248,187,124,124,237,124,199,145,248,187
.byte 102,102,133,102,23,227,204,113,102,102,133,102,23,227,204,113
.byte 221,221,83,221,166,142,167,123,221,221,83,221,166,142,167,123
.byte 23,23,92,23,184,75,46,175,23,23,92,23,184,75,46,175
.byte 71,71,1,71,2,70,142,69,71,71,1,71,2,70,142,69
.byte 158,158,66,158,132,220,33,26,158,158,66,158,132,220,33,26
.byte 202,202,15,202,30,197,137,212,202,202,15,202,30,197,137,212
.byte 45,45,180,45,117,153,90,88,45,45,180,45,117,153,90,88
.byte 191,191,198,191,145,121,99,46,191,191,198,191,145,121,99,46
.byte 7,7,28,7,56,27,14,63,7,7,28,7,56,27,14,63
.byte 173,173,142,173,1,35,71,172,173,173,142,173,1,35,71,172
.byte 90,90,117,90,234,47,180,176,90,90,117,90,234,47,180,176
.byte 131,131,54,131,108,181,27,239,131,131,54,131,108,181,27,239
.byte 51,51,204,51,133,255,102,182,51,51,204,51,133,255,102,182
.byte 99,99,145,99,63,242,198,92,99,99,145,99,63,242,198,92
.byte 2,2,8,2,16,10,4,18,2,2,8,2,16,10,4,18
.byte 170,170,146,170,57,56,73,147,170,170,146,170,57,56,73,147
.byte 113,113,217,113,175,168,226,222,113,113,217,113,175,168,226,222
.byte 200,200,7,200,14,207,141,198,200,200,7,200,14,207,141,198
.byte 25,25,100,25,200,125,50,209,25,25,100,25,200,125,50,209
.byte 73,73,57,73,114,112,146,59,73,73,57,73,114,112,146,59
.byte 217,217,67,217,134,154,175,95,217,217,67,217,134,154,175,95
.byte 242,242,239,242,195,29,249,49,242,242,239,242,195,29,249,49
.byte 227,227,171,227,75,72,219,168,227,227,171,227,75,72,219,168
.byte 91,91,113,91,226,42,182,185,91,91,113,91,226,42,182,185
.byte 136,136,26,136,52,146,13,188,136,136,26,136,52,146,13,188
.byte 154,154,82,154,164,200,41,62,154,154,82,154,164,200,41,62
.byte 38,38,152,38,45,190,76,11,38,38,152,38,45,190,76,11
.byte 50,50,200,50,141,250,100,191,50,50,200,50,141,250,100,191
.byte 176,176,250,176,233,74,125,89,176,176,250,176,233,74,125,89
.byte 233,233,131,233,27,106,207,242,233,233,131,233,27,106,207,242
.byte 15,15,60,15,120,51,30,119,15,15,60,15,120,51,30,119
.byte 213,213,115,213,230,166,183,51,213,213,115,213,230,166,183,51
.byte 128,128,58,128,116,186,29,244,128,128,58,128,116,186,29,244
.byte 190,190,194,190,153,124,97,39,190,190,194,190,153,124,97,39
.byte 205,205,19,205,38,222,135,235,205,205,19,205,38,222,135,235
.byte 52,52,208,52,189,228,104,137,52,52,208,52,189,228,104,137
.byte 72,72,61,72,122,117,144,50,72,72,61,72,122,117,144,50
.byte 255,255,219,255,171,36,227,84,255,255,219,255,171,36,227,84
.byte 122,122,245,122,247,143,244,141,122,122,245,122,247,143,244,141
.byte 144,144,122,144,244,234,61,100,144,144,122,144,244,234,61,100
.byte 95,95,97,95,194,62,190,157,95,95,97,95,194,62,190,157
.byte 32,32,128,32,29,160,64,61,32,32,128,32,29,160,64,61
.byte 104,104,189,104,103,213,208,15,104,104,189,104,103,213,208,15
.byte 26,26,104,26,208,114,52,202,26,26,104,26,208,114,52,202
.byte 174,174,130,174,25,44,65,183,174,174,130,174,25,44,65,183
.byte 180,180,234,180,201,94,117,125,180,180,234,180,201,94,117,125
.byte 84,84,77,84,154,25,168,206,84,84,77,84,154,25,168,206
.byte 147,147,118,147,236,229,59,127,147,147,118,147,236,229,59,127
.byte 34,34,136,34,13,170,68,47,34,34,136,34,13,170,68,47
.byte 100,100,141,100,7,233,200,99,100,100,141,100,7,233,200,99
.byte 241,241,227,241,219,18,255,42,241,241,227,241,219,18,255,42
.byte 115,115,209,115,191,162,230,204,115,115,209,115,191,162,230,204
.byte 18,18,72,18,144,90,36,130,18,18,72,18,144,90,36,130
.byte 64,64,29,64,58,93,128,122,64,64,29,64,58,93,128,122
.byte 8,8,32,8,64,40,16,72,8,8,32,8,64,40,16,72
.byte 195,195,43,195,86,232,155,149,195,195,43,195,86,232,155,149
.byte 236,236,151,236,51,123,197,223,236,236,151,236,51,123,197,223
.byte 219,219,75,219,150,144,171,77,219,219,75,219,150,144,171,77
.byte 161,161,190,161,97,31,95,192,161,161,190,161,97,31,95,192
.byte 141,141,14,141,28,131,7,145,141,141,14,141,28,131,7,145
.byte 61,61,244,61,245,201,122,200,61,61,244,61,245,201,122,200
.byte 151,151,102,151,204,241,51,91,151,151,102,151,204,241,51,91
.byte 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
.byte 207,207,27,207,54,212,131,249,207,207,27,207,54,212,131,249
.byte 43,43,172,43,69,135,86,110,43,43,172,43,69,135,86,110
.byte 118,118,197,118,151,179,236,225,118,118,197,118,151,179,236,225
.byte 130,130,50,130,100,176,25,230,130,130,50,130,100,176,25,230
.byte 214,214,127,214,254,169,177,40,214,214,127,214,254,169,177,40
.byte 27,27,108,27,216,119,54,195,27,27,108,27,216,119,54,195
.byte 181,181,238,181,193,91,119,116,181,181,238,181,193,91,119,116
.byte 175,175,134,175,17,41,67,190,175,175,134,175,17,41,67,190
.byte 106,106,181,106,119,223,212,29,106,106,181,106,119,223,212,29
.byte 80,80,93,80,186,13,160,234,80,80,93,80,186,13,160,234
.byte 69,69,9,69,18,76,138,87,69,69,9,69,18,76,138,87
.byte 243,243,235,243,203,24,251,56,243,243,235,243,203,24,251,56
.byte 48,48,192,48,157,240,96,173,48,48,192,48,157,240,96,173
.byte 239,239,155,239,43,116,195,196,239,239,155,239,43,116,195,196
.byte 63,63,252,63,229,195,126,218,63,63,252,63,229,195,126,218
.byte 85,85,73,85,146,28,170,199,85,85,73,85,146,28,170,199
.byte 162,162,178,162,121,16,89,219,162,162,178,162,121,16,89,219
.byte 234,234,143,234,3,101,201,233,234,234,143,234,3,101,201,233
.byte 101,101,137,101,15,236,202,106,101,101,137,101,15,236,202,106
.byte 186,186,210,186,185,104,105,3,186,186,210,186,185,104,105,3
.byte 47,47,188,47,101,147,94,74,47,47,188,47,101,147,94,74
.byte 192,192,39,192,78,231,157,142,192,192,39,192,78,231,157,142
.byte 222,222,95,222,190,129,161,96,222,222,95,222,190,129,161,96
.byte 28,28,112,28,224,108,56,252,28,28,112,28,224,108,56,252
.byte 253,253,211,253,187,46,231,70,253,253,211,253,187,46,231,70
.byte 77,77,41,77,82,100,154,31,77,77,41,77,82,100,154,31
.byte 146,146,114,146,228,224,57,118,146,146,114,146,228,224,57,118
.byte 117,117,201,117,143,188,234,250,117,117,201,117,143,188,234,250
.byte 6,6,24,6,48,30,12,54,6,6,24,6,48,30,12,54
.byte 138,138,18,138,36,152,9,174,138,138,18,138,36,152,9,174
.byte 178,178,242,178,249,64,121,75,178,178,242,178,249,64,121,75
.byte 230,230,191,230,99,89,209,133,230,230,191,230,99,89,209,133
.byte 14,14,56,14,112,54,28,126,14,14,56,14,112,54,28,126
.byte 31,31,124,31,248,99,62,231,31,31,124,31,248,99,62,231
.byte 98,98,149,98,55,247,196,85,98,98,149,98,55,247,196,85
.byte 212,212,119,212,238,163,181,58,212,212,119,212,238,163,181,58
.byte 168,168,154,168,41,50,77,129,168,168,154,168,41,50,77,129
.byte 150,150,98,150,196,244,49,82,150,150,98,150,196,244,49,82
.byte 249,249,195,249,155,58,239,98,249,249,195,249,155,58,239,98
.byte 197,197,51,197,102,246,151,163,197,197,51,197,102,246,151,163
.byte 37,37,148,37,53,177,74,16,37,37,148,37,53,177,74,16
.byte 89,89,121,89,242,32,178,171,89,89,121,89,242,32,178,171
.byte 132,132,42,132,84,174,21,208,132,132,42,132,84,174,21,208
.byte 114,114,213,114,183,167,228,197,114,114,213,114,183,167,228,197
.byte 57,57,228,57,213,221,114,236,57,57,228,57,213,221,114,236
.byte 76,76,45,76,90,97,152,22,76,76,45,76,90,97,152,22
.byte 94,94,101,94,202,59,188,148,94,94,101,94,202,59,188,148
.byte 120,120,253,120,231,133,240,159,120,120,253,120,231,133,240,159
.byte 56,56,224,56,221,216,112,229,56,56,224,56,221,216,112,229
.byte 140,140,10,140,20,134,5,152,140,140,10,140,20,134,5,152
.byte 209,209,99,209,198,178,191,23,209,209,99,209,198,178,191,23
.byte 165,165,174,165,65,11,87,228,165,165,174,165,65,11,87,228
.byte 226,226,175,226,67,77,217,161,226,226,175,226,67,77,217,161
.byte 97,97,153,97,47,248,194,78,97,97,153,97,47,248,194,78
.byte 179,179,246,179,241,69,123,66,179,179,246,179,241,69,123,66
.byte 33,33,132,33,21,165,66,52,33,33,132,33,21,165,66,52
.byte 156,156,74,156,148,214,37,8,156,156,74,156,148,214,37,8
.byte 30,30,120,30,240,102,60,238,30,30,120,30,240,102,60,238
.byte 67,67,17,67,34,82,134,97,67,67,17,67,34,82,134,97
.byte 199,199,59,199,118,252,147,177,199,199,59,199,118,252,147,177
.byte 252,252,215,252,179,43,229,79,252,252,215,252,179,43,229,79
.byte 4,4,16,4,32,20,8,36,4,4,16,4,32,20,8,36
.byte 81,81,89,81,178,8,162,227,81,81,89,81,178,8,162,227
.byte 153,153,94,153,188,199,47,37,153,153,94,153,188,199,47,37
.byte 109,109,169,109,79,196,218,34,109,109,169,109,79,196,218,34
.byte 13,13,52,13,104,57,26,101,13,13,52,13,104,57,26,101
.byte 250,250,207,250,131,53,233,121,250,250,207,250,131,53,233,121
.byte 223,223,91,223,182,132,163,105,223,223,91,223,182,132,163,105
.byte 126,126,229,126,215,155,252,169,126,126,229,126,215,155,252,169
.byte 36,36,144,36,61,180,72,25,36,36,144,36,61,180,72,25
.byte 59,59,236,59,197,215,118,254,59,59,236,59,197,215,118,254
.byte 171,171,150,171,49,61,75,154,171,171,150,171,49,61,75,154
.byte 206,206,31,206,62,209,129,240,206,206,31,206,62,209,129,240
.byte 17,17,68,17,136,85,34,153,17,17,68,17,136,85,34,153
.byte 143,143,6,143,12,137,3,131,143,143,6,143,12,137,3,131
.byte 78,78,37,78,74,107,156,4,78,78,37,78,74,107,156,4
.byte 183,183,230,183,209,81,115,102,183,183,230,183,209,81,115,102
.byte 235,235,139,235,11,96,203,224,235,235,139,235,11,96,203,224
.byte 60,60,240,60,253,204,120,193,60,60,240,60,253,204,120,193
.byte 129,129,62,129,124,191,31,253,129,129,62,129,124,191,31,253
.byte 148,148,106,148,212,254,53,64,148,148,106,148,212,254,53,64
.byte 247,247,251,247,235,12,243,28,247,247,251,247,235,12,243,28
.byte 185,185,222,185,161,103,111,24,185,185,222,185,161,103,111,24
.byte 19,19,76,19,152,95,38,139,19,19,76,19,152,95,38,139
.byte 44,44,176,44,125,156,88,81,44,44,176,44,125,156,88,81
.byte 211,211,107,211,214,184,187,5,211,211,107,211,214,184,187,5
.byte 231,231,187,231,107,92,211,140,231,231,187,231,107,92,211,140
.byte 110,110,165,110,87,203,220,57,110,110,165,110,87,203,220,57
.byte 196,196,55,196,110,243,149,170,196,196,55,196,110,243,149,170
.byte 3,3,12,3,24,15,6,27,3,3,12,3,24,15,6,27
.byte 86,86,69,86,138,19,172,220,86,86,69,86,138,19,172,220
.byte 68,68,13,68,26,73,136,94,68,68,13,68,26,73,136,94
.byte 127,127,225,127,223,158,254,160,127,127,225,127,223,158,254,160
.byte 169,169,158,169,33,55,79,136,169,169,158,169,33,55,79,136
.byte 42,42,168,42,77,130,84,103,42,42,168,42,77,130,84,103
.byte 187,187,214,187,177,109,107,10,187,187,214,187,177,109,107,10
.byte 193,193,35,193,70,226,159,135,193,193,35,193,70,226,159,135
.byte 83,83,81,83,162,2,166,241,83,83,81,83,162,2,166,241
.byte 220,220,87,220,174,139,165,114,220,220,87,220,174,139,165,114
.byte 11,11,44,11,88,39,22,83,11,11,44,11,88,39,22,83
.byte 157,157,78,157,156,211,39,1,157,157,78,157,156,211,39,1
.byte 108,108,173,108,71,193,216,43,108,108,173,108,71,193,216,43
.byte 49,49,196,49,149,245,98,164,49,49,196,49,149,245,98,164
.byte 116,116,205,116,135,185,232,243,116,116,205,116,135,185,232,243
.byte 246,246,255,246,227,9,241,21,246,246,255,246,227,9,241,21
.byte 70,70,5,70,10,67,140,76,70,70,5,70,10,67,140,76
.byte 172,172,138,172,9,38,69,165,172,172,138,172,9,38,69,165
.byte 137,137,30,137,60,151,15,181,137,137,30,137,60,151,15,181
.byte 20,20,80,20,160,68,40,180,20,20,80,20,160,68,40,180
.byte 225,225,163,225,91,66,223,186,225,225,163,225,91,66,223,186
.byte 22,22,88,22,176,78,44,166,22,22,88,22,176,78,44,166
.byte 58,58,232,58,205,210,116,247,58,58,232,58,205,210,116,247
.byte 105,105,185,105,111,208,210,6,105,105,185,105,111,208,210,6
.byte 9,9,36,9,72,45,18,65,9,9,36,9,72,45,18,65
.byte 112,112,221,112,167,173,224,215,112,112,221,112,167,173,224,215
.byte 182,182,226,182,217,84,113,111,182,182,226,182,217,84,113,111
.byte 208,208,103,208,206,183,189,30,208,208,103,208,206,183,189,30
.byte 237,237,147,237,59,126,199,214,237,237,147,237,59,126,199,214
.byte 204,204,23,204,46,219,133,226,204,204,23,204,46,219,133,226
.byte 66,66,21,66,42,87,132,104,66,66,21,66,42,87,132,104
.byte 152,152,90,152,180,194,45,44,152,152,90,152,180,194,45,44
.byte 164,164,170,164,73,14,85,237,164,164,170,164,73,14,85,237
.byte 40,40,160,40,93,136,80,117,40,40,160,40,93,136,80,117
.byte 92,92,109,92,218,49,184,134,92,92,109,92,218,49,184,134
.byte 248,248,199,248,147,63,237,107,248,248,199,248,147,63,237,107
.byte 134,134,34,134,68,164,17,194,134,134,34,134,68,164,17,194
.byte 24,35,198,232,135,184,1,79
.byte 54,166,210,245,121,111,145,82
.byte 96,188,155,142,163,12,123,53
.byte 29,224,215,194,46,75,254,87
.byte 21,119,55,229,159,240,74,218
.byte 88,201,41,10,177,160,107,133
.byte 189,93,16,244,203,62,5,103
.byte 228,39,65,139,167,125,149,216
.byte 251,238,124,102,221,23,71,158
.byte 202,45,191,7,173,90,131,51

195
deps/openssl/asm/x64-macosx-gas/x86_64cpuid.s

@ -0,0 +1,195 @@
.mod_init_func
.p2align 3
.quad _OPENSSL_cpuid_setup
.text
.globl _OPENSSL_atomic_add
.p2align 4
_OPENSSL_atomic_add:
movl (%rdi),%eax
L$spin: leaq (%rsi,%rax,1),%r8
.byte 0xf0
cmpxchgl %r8d,(%rdi)
jne L$spin
movl %r8d,%eax
.byte 0x48,0x98
.byte 0xf3,0xc3
.globl _OPENSSL_rdtsc
.p2align 4
_OPENSSL_rdtsc:
rdtsc
shlq $32,%rdx
orq %rdx,%rax
.byte 0xf3,0xc3
.globl _OPENSSL_ia32_cpuid
.p2align 4
_OPENSSL_ia32_cpuid:
movq %rbx,%r8
xorl %eax,%eax
cpuid
movl %eax,%r11d
xorl %eax,%eax
cmpl $1970169159,%ebx
setne %al
movl %eax,%r9d
cmpl $1231384169,%edx
setne %al
orl %eax,%r9d
cmpl $1818588270,%ecx
setne %al
orl %eax,%r9d
jz L$intel
cmpl $1752462657,%ebx
setne %al
movl %eax,%r10d
cmpl $1769238117,%edx
setne %al
orl %eax,%r10d
cmpl $1145913699,%ecx
setne %al
orl %eax,%r10d
jnz L$intel
movl $2147483648,%eax
cpuid
cmpl $2147483656,%eax
jb L$intel
movl $2147483656,%eax
cpuid
movzbq %cl,%r10
incq %r10
movl $1,%eax
cpuid
btl $28,%edx
jnc L$done
shrl $16,%ebx
cmpb %r10b,%bl
ja L$done
andl $4026531839,%edx
jmp L$done
L$intel:
cmpl $4,%r11d
movl $-1,%r10d
jb L$nocacheinfo
movl $4,%eax
movl $0,%ecx
cpuid
movl %eax,%r10d
shrl $14,%r10d
andl $4095,%r10d
L$nocacheinfo:
movl $1,%eax
cpuid
cmpl $0,%r9d
jne L$notintel
orl $1048576,%edx
andb $15,%ah
cmpb $15,%ah
je L$notintel
orl $1073741824,%edx
L$notintel:
btl $28,%edx
jnc L$done
andl $4026531839,%edx
cmpl $0,%r10d
je L$done
orl $268435456,%edx
shrl $16,%ebx
cmpb $1,%bl
ja L$done
andl $4026531839,%edx
L$done:
shlq $32,%rcx
movl %edx,%eax
movq %r8,%rbx
orq %rcx,%rax
.byte 0xf3,0xc3
.globl _OPENSSL_cleanse
.p2align 4
_OPENSSL_cleanse:
xorq %rax,%rax
cmpq $15,%rsi
jae L$ot
cmpq $0,%rsi
je L$ret
L$ittle:
movb %al,(%rdi)
subq $1,%rsi
leaq 1(%rdi),%rdi
jnz L$ittle
L$ret:
.byte 0xf3,0xc3
.p2align 4
L$ot:
testq $7,%rdi
jz L$aligned
movb %al,(%rdi)
leaq -1(%rsi),%rsi
leaq 1(%rdi),%rdi
jmp L$ot
L$aligned:
movq %rax,(%rdi)
leaq -8(%rsi),%rsi
testq $-8,%rsi
leaq 8(%rdi),%rdi
jnz L$aligned
cmpq $0,%rsi
jne L$ittle
.byte 0xf3,0xc3
.globl _OPENSSL_wipe_cpu
.p2align 4
_OPENSSL_wipe_cpu:
pxor %xmm0,%xmm0
pxor %xmm1,%xmm1
pxor %xmm2,%xmm2
pxor %xmm3,%xmm3
pxor %xmm4,%xmm4
pxor %xmm5,%xmm5
pxor %xmm6,%xmm6
pxor %xmm7,%xmm7
pxor %xmm8,%xmm8
pxor %xmm9,%xmm9
pxor %xmm10,%xmm10
pxor %xmm11,%xmm11
pxor %xmm12,%xmm12
pxor %xmm13,%xmm13
pxor %xmm14,%xmm14
pxor %xmm15,%xmm15
xorq %rcx,%rcx
xorq %rdx,%rdx
xorq %rsi,%rsi
xorq %rdi,%rdi
xorq %r8,%r8
xorq %r9,%r9
xorq %r10,%r10
xorq %r11,%r11
leaq 8(%rsp),%rax
.byte 0xf3,0xc3

2885
deps/openssl/asm/x64-win32-masm/aes/aes-x86_64.asm

File diff suppressed because it is too large

293
deps/openssl/asm/x64-win32-masm/bn/x86_64-mont.asm

@ -0,0 +1,293 @@
OPTION DOTNAME
.text$ SEGMENT ALIGN(64) 'CODE'
PUBLIC bn_mul_mont
ALIGN 16
bn_mul_mont PROC PUBLIC
mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
mov QWORD PTR[16+rsp],rsi
mov rax,rsp
$L$SEH_begin_bn_mul_mont::
mov rdi,rcx
mov rsi,rdx
mov rdx,r8
mov rcx,r9
mov r8,QWORD PTR[40+rsp]
mov r9,QWORD PTR[48+rsp]
push rbx
push rbp
push r12
push r13
push r14
push r15
mov r9d,r9d
lea r10,QWORD PTR[2+r9]
mov r11,rsp
neg r10
lea rsp,QWORD PTR[r10*8+rsp]
and rsp,-1024
mov QWORD PTR[8+r9*8+rsp],r11
$L$prologue::
mov r12,rdx
mov r8,QWORD PTR[r8]
xor r14,r14
xor r15,r15
mov rbx,QWORD PTR[r12]
mov rax,QWORD PTR[rsi]
mul rbx
mov r10,rax
mov r11,rdx
imul rax,r8
mov rbp,rax
mul QWORD PTR[rcx]
add rax,r10
adc rdx,0
mov r13,rdx
lea r15,QWORD PTR[1+r15]
$L$1st::
mov rax,QWORD PTR[r15*8+rsi]
mul rbx
add rax,r11
adc rdx,0
mov r10,rax
mov rax,QWORD PTR[r15*8+rcx]
mov r11,rdx
mul rbp
add rax,r13
lea r15,QWORD PTR[1+r15]
adc rdx,0
add rax,r10
adc rdx,0
mov QWORD PTR[((-16))+r15*8+rsp],rax
cmp r15,r9
mov r13,rdx
jl $L$1st
xor rdx,rdx
add r13,r11
adc rdx,0
mov QWORD PTR[((-8))+r9*8+rsp],r13
mov QWORD PTR[r9*8+rsp],rdx
lea r14,QWORD PTR[1+r14]
ALIGN 4
$L$outer::
xor r15,r15
mov rbx,QWORD PTR[r14*8+r12]
mov rax,QWORD PTR[rsi]
mul rbx
add rax,QWORD PTR[rsp]
adc rdx,0
mov r10,rax
mov r11,rdx
imul rax,r8
mov rbp,rax
mul QWORD PTR[r15*8+rcx]
add rax,r10
mov r10,QWORD PTR[8+rsp]
adc rdx,0
mov r13,rdx
lea r15,QWORD PTR[1+r15]
ALIGN 4
$L$inner::
mov rax,QWORD PTR[r15*8+rsi]
mul rbx
add rax,r11
adc rdx,0
add r10,rax
mov rax,QWORD PTR[r15*8+rcx]
adc rdx,0
mov r11,rdx
mul rbp
add rax,r13
lea r15,QWORD PTR[1+r15]
adc rdx,0
add rax,r10
adc rdx,0
mov r10,QWORD PTR[r15*8+rsp]
cmp r15,r9
mov QWORD PTR[((-16))+r15*8+rsp],rax
mov r13,rdx
jl $L$inner
xor rdx,rdx
add r13,r11
adc rdx,0
add r13,r10
adc rdx,0
mov QWORD PTR[((-8))+r9*8+rsp],r13
mov QWORD PTR[r9*8+rsp],rdx
lea r14,QWORD PTR[1+r14]
cmp r14,r9
jl $L$outer
lea rsi,QWORD PTR[rsp]
lea r15,QWORD PTR[((-1))+r9]
mov rax,QWORD PTR[rsi]
xor r14,r14
jmp $L$sub
ALIGN 16
$L$sub:: sbb rax,QWORD PTR[r14*8+rcx]
mov QWORD PTR[r14*8+rdi],rax
dec r15
mov rax,QWORD PTR[8+r14*8+rsi]
lea r14,QWORD PTR[1+r14]
jge $L$sub
sbb rax,0
and rsi,rax
not rax
mov rcx,rdi
and rcx,rax
lea r15,QWORD PTR[((-1))+r9]
or rsi,rcx
ALIGN 16
$L$copy::
mov rax,QWORD PTR[r15*8+rsi]
mov QWORD PTR[r15*8+rdi],rax
mov QWORD PTR[r15*8+rsp],r14
dec r15
jge $L$copy
mov rsi,QWORD PTR[8+r9*8+rsp]
mov rax,1
mov r15,QWORD PTR[rsi]
mov r14,QWORD PTR[8+rsi]
mov r13,QWORD PTR[16+rsi]
mov r12,QWORD PTR[24+rsi]
mov rbp,QWORD PTR[32+rsi]
mov rbx,QWORD PTR[40+rsi]
lea rsp,QWORD PTR[48+rsi]
$L$epilogue::
mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
mov rsi,QWORD PTR[16+rsp]
DB 0F3h,0C3h ;repret
$L$SEH_end_bn_mul_mont::
bn_mul_mont ENDP
DB 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105
DB 112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56
DB 54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83
DB 32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115
DB 115,108,46,111,114,103,62,0
ALIGN 16
EXTERN __imp_RtlVirtualUnwind:NEAR
ALIGN 16
se_handler PROC PRIVATE
push rsi
push rdi
push rbx
push rbp
push r12
push r13
push r14
push r15
pushfq
sub rsp,64
mov rax,QWORD PTR[120+r8]
mov rbx,QWORD PTR[248+r8]
lea r10,QWORD PTR[$L$prologue]
cmp rbx,r10
jb $L$in_prologue
mov rax,QWORD PTR[152+r8]
lea r10,QWORD PTR[$L$epilogue]
cmp rbx,r10
jae $L$in_prologue
mov r10,QWORD PTR[192+r8]
mov rax,QWORD PTR[8+r10*8+rax]
lea rax,QWORD PTR[48+rax]
mov rbx,QWORD PTR[((-8))+rax]
mov rbp,QWORD PTR[((-16))+rax]
mov r12,QWORD PTR[((-24))+rax]
mov r13,QWORD PTR[((-32))+rax]
mov r14,QWORD PTR[((-40))+rax]
mov r15,QWORD PTR[((-48))+rax]
mov QWORD PTR[144+r8],rbx
mov QWORD PTR[160+r8],rbp
mov QWORD PTR[216+r8],r12
mov QWORD PTR[224+r8],r13
mov QWORD PTR[232+r8],r14
mov QWORD PTR[240+r8],r15
$L$in_prologue::
mov rdi,QWORD PTR[8+rax]
mov rsi,QWORD PTR[16+rax]
mov QWORD PTR[152+r8],rax
mov QWORD PTR[168+r8],rsi
mov QWORD PTR[176+r8],rdi
mov rdi,QWORD PTR[40+r9]
mov rsi,r8
mov ecx,154
DD 0a548f3fch
mov rsi,r9
xor rcx,rcx
mov rdx,QWORD PTR[8+rsi]
mov r8,QWORD PTR[rsi]
mov r9,QWORD PTR[16+rsi]
mov r10,QWORD PTR[40+rsi]
lea r11,QWORD PTR[56+rsi]
lea r12,QWORD PTR[24+rsi]
mov QWORD PTR[32+rsp],r10
mov QWORD PTR[40+rsp],r11
mov QWORD PTR[48+rsp],r12
mov QWORD PTR[56+rsp],rcx
call QWORD PTR[__imp_RtlVirtualUnwind]
mov eax,1
add rsp,64
popfq
pop r15
pop r14
pop r13
pop r12
pop rbp
pop rbx
pop rdi
pop rsi
DB 0F3h,0C3h ;repret
se_handler ENDP
.text$ ENDS
.pdata SEGMENT READONLY ALIGN(4)
ALIGN 4
DD imagerel $L$SEH_begin_bn_mul_mont
DD imagerel $L$SEH_end_bn_mul_mont
DD imagerel $L$SEH_info_bn_mul_mont
.pdata ENDS
.xdata SEGMENT READONLY ALIGN(8)
ALIGN 8
$L$SEH_info_bn_mul_mont::
DB 9,0,0,0
DD imagerel se_handler
.xdata ENDS
END

2108
deps/openssl/asm/x64-win32-masm/camellia/cmll-x86_64.asm

File diff suppressed because it is too large

781
deps/openssl/asm/x64-win32-masm/md5/md5-x86_64.asm

@ -0,0 +1,781 @@
OPTION DOTNAME
.text$ SEGMENT ALIGN(64) 'CODE'
ALIGN 16
PUBLIC md5_block_asm_data_order
md5_block_asm_data_order PROC PUBLIC
mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
mov QWORD PTR[16+rsp],rsi
mov rax,rsp
$L$SEH_begin_md5_block_asm_data_order::
mov rdi,rcx
mov rsi,rdx
mov rdx,r8
push rbp
push rbx
push r12
push r14
push r15
$L$prologue::
mov rbp,rdi
shl rdx,6
lea rdi,QWORD PTR[rdx*1+rsi]
mov eax,DWORD PTR[((0*4))+rbp]
mov ebx,DWORD PTR[((1*4))+rbp]
mov ecx,DWORD PTR[((2*4))+rbp]
mov edx,DWORD PTR[((3*4))+rbp]
cmp rsi,rdi
je $L$end
$L$loop::
mov r8d,eax
mov r9d,ebx
mov r14d,ecx
mov r15d,edx
mov r10d,DWORD PTR[((0*4))+rsi]
mov r11d,edx
xor r11d,ecx
lea eax,DWORD PTR[0d76aa478h+r10*1+rax]
and r11d,ebx
xor r11d,edx
mov r10d,DWORD PTR[((1*4))+rsi]
add eax,r11d
rol eax,7
mov r11d,ecx
add eax,ebx
xor r11d,ebx
lea edx,DWORD PTR[0e8c7b756h+r10*1+rdx]
and r11d,eax
xor r11d,ecx
mov r10d,DWORD PTR[((2*4))+rsi]
add edx,r11d
rol edx,12
mov r11d,ebx
add edx,eax
xor r11d,eax
lea ecx,DWORD PTR[0242070dbh+r10*1+rcx]
and r11d,edx
xor r11d,ebx
mov r10d,DWORD PTR[((3*4))+rsi]
add ecx,r11d
rol ecx,17
mov r11d,eax
add ecx,edx
xor r11d,edx
lea ebx,DWORD PTR[0c1bdceeeh+r10*1+rbx]
and r11d,ecx
xor r11d,eax
mov r10d,DWORD PTR[((4*4))+rsi]
add ebx,r11d
rol ebx,22
mov r11d,edx
add ebx,ecx
xor r11d,ecx
lea eax,DWORD PTR[0f57c0fafh+r10*1+rax]
and r11d,ebx
xor r11d,edx
mov r10d,DWORD PTR[((5*4))+rsi]
add eax,r11d
rol eax,7
mov r11d,ecx
add eax,ebx
xor r11d,ebx
lea edx,DWORD PTR[04787c62ah+r10*1+rdx]
and r11d,eax
xor r11d,ecx
mov r10d,DWORD PTR[((6*4))+rsi]
add edx,r11d
rol edx,12
mov r11d,ebx
add edx,eax
xor r11d,eax
lea ecx,DWORD PTR[0a8304613h+r10*1+rcx]
and r11d,edx
xor r11d,ebx
mov r10d,DWORD PTR[((7*4))+rsi]
add ecx,r11d
rol ecx,17
mov r11d,eax
add ecx,edx
xor r11d,edx
lea ebx,DWORD PTR[0fd469501h+r10*1+rbx]
and r11d,ecx
xor r11d,eax
mov r10d,DWORD PTR[((8*4))+rsi]
add ebx,r11d
rol ebx,22
mov r11d,edx
add ebx,ecx
xor r11d,ecx
lea eax,DWORD PTR[0698098d8h+r10*1+rax]
and r11d,ebx
xor r11d,edx
mov r10d,DWORD PTR[((9*4))+rsi]
add eax,r11d
rol eax,7
mov r11d,ecx
add eax,ebx
xor r11d,ebx
lea edx,DWORD PTR[08b44f7afh+r10*1+rdx]
and r11d,eax
xor r11d,ecx
mov r10d,DWORD PTR[((10*4))+rsi]
add edx,r11d
rol edx,12
mov r11d,ebx
add edx,eax
xor r11d,eax
lea ecx,DWORD PTR[0ffff5bb1h+r10*1+rcx]
and r11d,edx
xor r11d,ebx
mov r10d,DWORD PTR[((11*4))+rsi]
add ecx,r11d
rol ecx,17
mov r11d,eax
add ecx,edx
xor r11d,edx
lea ebx,DWORD PTR[0895cd7beh+r10*1+rbx]
and r11d,ecx
xor r11d,eax
mov r10d,DWORD PTR[((12*4))+rsi]
add ebx,r11d
rol ebx,22
mov r11d,edx
add ebx,ecx
xor r11d,ecx
lea eax,DWORD PTR[06b901122h+r10*1+rax]
and r11d,ebx
xor r11d,edx
mov r10d,DWORD PTR[((13*4))+rsi]
add eax,r11d
rol eax,7
mov r11d,ecx
add eax,ebx
xor r11d,ebx
lea edx,DWORD PTR[0fd987193h+r10*1+rdx]
and r11d,eax
xor r11d,ecx
mov r10d,DWORD PTR[((14*4))+rsi]
add edx,r11d
rol edx,12
mov r11d,ebx
add edx,eax
xor r11d,eax
lea ecx,DWORD PTR[0a679438eh+r10*1+rcx]
and r11d,edx
xor r11d,ebx
mov r10d,DWORD PTR[((15*4))+rsi]
add ecx,r11d
rol ecx,17
mov r11d,eax
add ecx,edx
xor r11d,edx
lea ebx,DWORD PTR[049b40821h+r10*1+rbx]
and r11d,ecx
xor r11d,eax
mov r10d,DWORD PTR[((0*4))+rsi]
add ebx,r11d
rol ebx,22
mov r11d,edx
add ebx,ecx
mov r10d,DWORD PTR[((1*4))+rsi]
mov r11d,edx
mov r12d,edx
not r11d
lea eax,DWORD PTR[0f61e2562h+r10*1+rax]
and r12d,ebx
and r11d,ecx
mov r10d,DWORD PTR[((6*4))+rsi]
or r12d,r11d
mov r11d,ecx
add eax,r12d
mov r12d,ecx
rol eax,5
add eax,ebx
not r11d
lea edx,DWORD PTR[0c040b340h+r10*1+rdx]
and r12d,eax
and r11d,ebx
mov r10d,DWORD PTR[((11*4))+rsi]
or r12d,r11d
mov r11d,ebx
add edx,r12d
mov r12d,ebx
rol edx,9
add edx,eax
not r11d
lea ecx,DWORD PTR[0265e5a51h+r10*1+rcx]
and r12d,edx
and r11d,eax
mov r10d,DWORD PTR[((0*4))+rsi]
or r12d,r11d
mov r11d,eax
add ecx,r12d
mov r12d,eax
rol ecx,14
add ecx,edx
not r11d
lea ebx,DWORD PTR[0e9b6c7aah+r10*1+rbx]
and r12d,ecx
and r11d,edx
mov r10d,DWORD PTR[((5*4))+rsi]
or r12d,r11d
mov r11d,edx
add ebx,r12d
mov r12d,edx
rol ebx,20
add ebx,ecx
not r11d
lea eax,DWORD PTR[0d62f105dh+r10*1+rax]
and r12d,ebx
and r11d,ecx
mov r10d,DWORD PTR[((10*4))+rsi]
or r12d,r11d
mov r11d,ecx
add eax,r12d
mov r12d,ecx
rol eax,5
add eax,ebx
not r11d
lea edx,DWORD PTR[02441453h+r10*1+rdx]
and r12d,eax
and r11d,ebx
mov r10d,DWORD PTR[((15*4))+rsi]
or r12d,r11d
mov r11d,ebx
add edx,r12d
mov r12d,ebx
rol edx,9
add edx,eax
not r11d
lea ecx,DWORD PTR[0d8a1e681h+r10*1+rcx]
and r12d,edx
and r11d,eax
mov r10d,DWORD PTR[((4*4))+rsi]
or r12d,r11d
mov r11d,eax
add ecx,r12d
mov r12d,eax
rol ecx,14
add ecx,edx
not r11d
lea ebx,DWORD PTR[0e7d3fbc8h+r10*1+rbx]
and r12d,ecx
and r11d,edx
mov r10d,DWORD PTR[((9*4))+rsi]
or r12d,r11d
mov r11d,edx
add ebx,r12d
mov r12d,edx
rol ebx,20
add ebx,ecx
not r11d
lea eax,DWORD PTR[021e1cde6h+r10*1+rax]
and r12d,ebx
and r11d,ecx
mov r10d,DWORD PTR[((14*4))+rsi]
or r12d,r11d
mov r11d,ecx
add eax,r12d
mov r12d,ecx
rol eax,5
add eax,ebx
not r11d
lea edx,DWORD PTR[0c33707d6h+r10*1+rdx]
and r12d,eax
and r11d,ebx
mov r10d,DWORD PTR[((3*4))+rsi]
or r12d,r11d
mov r11d,ebx
add edx,r12d
mov r12d,ebx
rol edx,9
add edx,eax
not r11d
lea ecx,DWORD PTR[0f4d50d87h+r10*1+rcx]
and r12d,edx
and r11d,eax
mov r10d,DWORD PTR[((8*4))+rsi]
or r12d,r11d
mov r11d,eax
add ecx,r12d
mov r12d,eax
rol ecx,14
add ecx,edx
not r11d
lea ebx,DWORD PTR[0455a14edh+r10*1+rbx]
and r12d,ecx
and r11d,edx
mov r10d,DWORD PTR[((13*4))+rsi]
or r12d,r11d
mov r11d,edx
add ebx,r12d
mov r12d,edx
rol ebx,20
add ebx,ecx
not r11d
lea eax,DWORD PTR[0a9e3e905h+r10*1+rax]
and r12d,ebx
and r11d,ecx
mov r10d,DWORD PTR[((2*4))+rsi]
or r12d,r11d
mov r11d,ecx
add eax,r12d
mov r12d,ecx
rol eax,5
add eax,ebx
not r11d
lea edx,DWORD PTR[0fcefa3f8h+r10*1+rdx]
and r12d,eax
and r11d,ebx
mov r10d,DWORD PTR[((7*4))+rsi]
or r12d,r11d
mov r11d,ebx
add edx,r12d
mov r12d,ebx
rol edx,9
add edx,eax
not r11d
lea ecx,DWORD PTR[0676f02d9h+r10*1+rcx]
and r12d,edx
and r11d,eax
mov r10d,DWORD PTR[((12*4))+rsi]
or r12d,r11d
mov r11d,eax
add ecx,r12d
mov r12d,eax
rol ecx,14
add ecx,edx
not r11d
lea ebx,DWORD PTR[08d2a4c8ah+r10*1+rbx]
and r12d,ecx
and r11d,edx
mov r10d,DWORD PTR[((0*4))+rsi]
or r12d,r11d
mov r11d,edx
add ebx,r12d
mov r12d,edx
rol ebx,20
add ebx,ecx
mov r10d,DWORD PTR[((5*4))+rsi]
mov r11d,ecx
lea eax,DWORD PTR[0fffa3942h+r10*1+rax]
mov r10d,DWORD PTR[((8*4))+rsi]
xor r11d,edx
xor r11d,ebx
add eax,r11d
rol eax,4
mov r11d,ebx
add eax,ebx
lea edx,DWORD PTR[08771f681h+r10*1+rdx]
mov r10d,DWORD PTR[((11*4))+rsi]
xor r11d,ecx
xor r11d,eax
add edx,r11d
rol edx,11
mov r11d,eax
add edx,eax
lea ecx,DWORD PTR[06d9d6122h+r10*1+rcx]
mov r10d,DWORD PTR[((14*4))+rsi]
xor r11d,ebx
xor r11d,edx
add ecx,r11d
rol ecx,16
mov r11d,edx
add ecx,edx
lea ebx,DWORD PTR[0fde5380ch+r10*1+rbx]
mov r10d,DWORD PTR[((1*4))+rsi]
xor r11d,eax
xor r11d,ecx
add ebx,r11d
rol ebx,23
mov r11d,ecx
add ebx,ecx
lea eax,DWORD PTR[0a4beea44h+r10*1+rax]
mov r10d,DWORD PTR[((4*4))+rsi]
xor r11d,edx
xor r11d,ebx
add eax,r11d
rol eax,4
mov r11d,ebx
add eax,ebx
lea edx,DWORD PTR[04bdecfa9h+r10*1+rdx]
mov r10d,DWORD PTR[((7*4))+rsi]
xor r11d,ecx
xor r11d,eax
add edx,r11d
rol edx,11
mov r11d,eax
add edx,eax
lea ecx,DWORD PTR[0f6bb4b60h+r10*1+rcx]
mov r10d,DWORD PTR[((10*4))+rsi]
xor r11d,ebx
xor r11d,edx
add ecx,r11d
rol ecx,16
mov r11d,edx
add ecx,edx
lea ebx,DWORD PTR[0bebfbc70h+r10*1+rbx]
mov r10d,DWORD PTR[((13*4))+rsi]
xor r11d,eax
xor r11d,ecx
add ebx,r11d
rol ebx,23
mov r11d,ecx
add ebx,ecx
lea eax,DWORD PTR[0289b7ec6h+r10*1+rax]
mov r10d,DWORD PTR[((0*4))+rsi]
xor r11d,edx
xor r11d,ebx
add eax,r11d
rol eax,4
mov r11d,ebx
add eax,ebx
lea edx,DWORD PTR[0eaa127fah+r10*1+rdx]
mov r10d,DWORD PTR[((3*4))+rsi]
xor r11d,ecx
xor r11d,eax
add edx,r11d
rol edx,11
mov r11d,eax
add edx,eax
lea ecx,DWORD PTR[0d4ef3085h+r10*1+rcx]
mov r10d,DWORD PTR[((6*4))+rsi]
xor r11d,ebx
xor r11d,edx
add ecx,r11d
rol ecx,16
mov r11d,edx
add ecx,edx
lea ebx,DWORD PTR[04881d05h+r10*1+rbx]
mov r10d,DWORD PTR[((9*4))+rsi]
xor r11d,eax
xor r11d,ecx
add ebx,r11d
rol ebx,23
mov r11d,ecx
add ebx,ecx
lea eax,DWORD PTR[0d9d4d039h+r10*1+rax]
mov r10d,DWORD PTR[((12*4))+rsi]
xor r11d,edx
xor r11d,ebx
add eax,r11d
rol eax,4
mov r11d,ebx
add eax,ebx
lea edx,DWORD PTR[0e6db99e5h+r10*1+rdx]
mov r10d,DWORD PTR[((15*4))+rsi]
xor r11d,ecx
xor r11d,eax
add edx,r11d
rol edx,11
mov r11d,eax
add edx,eax
lea ecx,DWORD PTR[01fa27cf8h+r10*1+rcx]
mov r10d,DWORD PTR[((2*4))+rsi]
xor r11d,ebx
xor r11d,edx
add ecx,r11d
rol ecx,16
mov r11d,edx
add ecx,edx
lea ebx,DWORD PTR[0c4ac5665h+r10*1+rbx]
mov r10d,DWORD PTR[((0*4))+rsi]
xor r11d,eax
xor r11d,ecx
add ebx,r11d
rol ebx,23
mov r11d,ecx
add ebx,ecx
mov r10d,DWORD PTR[((0*4))+rsi]
mov r11d,0ffffffffh
xor r11d,edx
lea eax,DWORD PTR[0f4292244h+r10*1+rax]
or r11d,ebx
xor r11d,ecx
add eax,r11d
mov r10d,DWORD PTR[((7*4))+rsi]
mov r11d,0ffffffffh
rol eax,6
xor r11d,ecx
add eax,ebx
lea edx,DWORD PTR[0432aff97h+r10*1+rdx]
or r11d,eax
xor r11d,ebx
add edx,r11d
mov r10d,DWORD PTR[((14*4))+rsi]
mov r11d,0ffffffffh
rol edx,10
xor r11d,ebx
add edx,eax
lea ecx,DWORD PTR[0ab9423a7h+r10*1+rcx]
or r11d,edx
xor r11d,eax
add ecx,r11d
mov r10d,DWORD PTR[((5*4))+rsi]
mov r11d,0ffffffffh
rol ecx,15
xor r11d,eax
add ecx,edx
lea ebx,DWORD PTR[0fc93a039h+r10*1+rbx]
or r11d,ecx
xor r11d,edx
add ebx,r11d
mov r10d,DWORD PTR[((12*4))+rsi]
mov r11d,0ffffffffh
rol ebx,21
xor r11d,edx
add ebx,ecx
lea eax,DWORD PTR[0655b59c3h+r10*1+rax]
or r11d,ebx
xor r11d,ecx
add eax,r11d
mov r10d,DWORD PTR[((3*4))+rsi]
mov r11d,0ffffffffh
rol eax,6
xor r11d,ecx
add eax,ebx
lea edx,DWORD PTR[08f0ccc92h+r10*1+rdx]
or r11d,eax
xor r11d,ebx
add edx,r11d
mov r10d,DWORD PTR[((10*4))+rsi]
mov r11d,0ffffffffh
rol edx,10
xor r11d,ebx
add edx,eax
lea ecx,DWORD PTR[0ffeff47dh+r10*1+rcx]
or r11d,edx
xor r11d,eax
add ecx,r11d
mov r10d,DWORD PTR[((1*4))+rsi]
mov r11d,0ffffffffh
rol ecx,15
xor r11d,eax
add ecx,edx
lea ebx,DWORD PTR[085845dd1h+r10*1+rbx]
or r11d,ecx
xor r11d,edx
add ebx,r11d
mov r10d,DWORD PTR[((8*4))+rsi]
mov r11d,0ffffffffh
rol ebx,21
xor r11d,edx
add ebx,ecx
lea eax,DWORD PTR[06fa87e4fh+r10*1+rax]
or r11d,ebx
xor r11d,ecx
add eax,r11d
mov r10d,DWORD PTR[((15*4))+rsi]
mov r11d,0ffffffffh
rol eax,6
xor r11d,ecx
add eax,ebx
lea edx,DWORD PTR[0fe2ce6e0h+r10*1+rdx]
or r11d,eax
xor r11d,ebx
add edx,r11d
mov r10d,DWORD PTR[((6*4))+rsi]
mov r11d,0ffffffffh
rol edx,10
xor r11d,ebx
add edx,eax
lea ecx,DWORD PTR[0a3014314h+r10*1+rcx]
or r11d,edx
xor r11d,eax
add ecx,r11d
mov r10d,DWORD PTR[((13*4))+rsi]
mov r11d,0ffffffffh
rol ecx,15
xor r11d,eax
add ecx,edx
lea ebx,DWORD PTR[04e0811a1h+r10*1+rbx]
or r11d,ecx
xor r11d,edx
add ebx,r11d
mov r10d,DWORD PTR[((4*4))+rsi]
mov r11d,0ffffffffh
rol ebx,21
xor r11d,edx
add ebx,ecx
lea eax,DWORD PTR[0f7537e82h+r10*1+rax]
or r11d,ebx
xor r11d,ecx
add eax,r11d
mov r10d,DWORD PTR[((11*4))+rsi]
mov r11d,0ffffffffh
rol eax,6
xor r11d,ecx
add eax,ebx
lea edx,DWORD PTR[0bd3af235h+r10*1+rdx]
or r11d,eax
xor r11d,ebx
add edx,r11d
mov r10d,DWORD PTR[((2*4))+rsi]
mov r11d,0ffffffffh
rol edx,10
xor r11d,ebx
add edx,eax
lea ecx,DWORD PTR[02ad7d2bbh+r10*1+rcx]
or r11d,edx
xor r11d,eax
add ecx,r11d
mov r10d,DWORD PTR[((9*4))+rsi]
mov r11d,0ffffffffh
rol ecx,15
xor r11d,eax
add ecx,edx
lea ebx,DWORD PTR[0eb86d391h+r10*1+rbx]
or r11d,ecx
xor r11d,edx
add ebx,r11d
mov r10d,DWORD PTR[((0*4))+rsi]
mov r11d,0ffffffffh
rol ebx,21
xor r11d,edx
add ebx,ecx
add eax,r8d
add ebx,r9d
add ecx,r14d
add edx,r15d
add rsi,64
cmp rsi,rdi
jb $L$loop
$L$end::
mov DWORD PTR[((0*4))+rbp],eax
mov DWORD PTR[((1*4))+rbp],ebx
mov DWORD PTR[((2*4))+rbp],ecx
mov DWORD PTR[((3*4))+rbp],edx
mov r15,QWORD PTR[rsp]
mov r14,QWORD PTR[8+rsp]
mov r12,QWORD PTR[16+rsp]
mov rbx,QWORD PTR[24+rsp]
mov rbp,QWORD PTR[32+rsp]
add rsp,40
$L$epilogue::
mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
mov rsi,QWORD PTR[16+rsp]
DB 0F3h,0C3h ;repret
$L$SEH_end_md5_block_asm_data_order::
md5_block_asm_data_order ENDP
EXTERN __imp_RtlVirtualUnwind:NEAR
ALIGN 16
se_handler PROC PRIVATE
push rsi
push rdi
push rbx
push rbp
push r12
push r13
push r14
push r15
pushfq
sub rsp,64
mov rax,QWORD PTR[120+r8]
mov rbx,QWORD PTR[248+r8]
lea r10,QWORD PTR[$L$prologue]
cmp rbx,r10
jb $L$in_prologue
mov rax,QWORD PTR[152+r8]
lea r10,QWORD PTR[$L$epilogue]
cmp rbx,r10
jae $L$in_prologue
lea rax,QWORD PTR[40+rax]
mov rbp,QWORD PTR[((-8))+rax]
mov rbx,QWORD PTR[((-16))+rax]
mov r12,QWORD PTR[((-24))+rax]
mov r14,QWORD PTR[((-32))+rax]
mov r15,QWORD PTR[((-40))+rax]
mov QWORD PTR[144+r8],rbx
mov QWORD PTR[160+r8],rbp
mov QWORD PTR[216+r8],r12
mov QWORD PTR[232+r8],r14
mov QWORD PTR[240+r8],r15
$L$in_prologue::
mov rdi,QWORD PTR[8+rax]
mov rsi,QWORD PTR[16+rax]
mov QWORD PTR[152+r8],rax
mov QWORD PTR[168+r8],rsi
mov QWORD PTR[176+r8],rdi
mov rdi,QWORD PTR[40+r9]
mov rsi,r8
mov ecx,154
DD 0a548f3fch
mov rsi,r9
xor rcx,rcx
mov rdx,QWORD PTR[8+rsi]
mov r8,QWORD PTR[rsi]
mov r9,QWORD PTR[16+rsi]
mov r10,QWORD PTR[40+rsi]
lea r11,QWORD PTR[56+rsi]
lea r12,QWORD PTR[24+rsi]
mov QWORD PTR[32+rsp],r10
mov QWORD PTR[40+rsp],r11
mov QWORD PTR[48+rsp],r12
mov QWORD PTR[56+rsp],rcx
call QWORD PTR[__imp_RtlVirtualUnwind]
mov eax,1
add rsp,64
popfq
pop r15
pop r14
pop r13
pop r12
pop rbp
pop rbx
pop rdi
pop rsi
DB 0F3h,0C3h ;repret
se_handler ENDP
.text$ ENDS
.pdata SEGMENT READONLY ALIGN(4)
ALIGN 4
DD imagerel $L$SEH_begin_md5_block_asm_data_order
DD imagerel $L$SEH_end_md5_block_asm_data_order
DD imagerel $L$SEH_info_md5_block_asm_data_order
.pdata ENDS
.xdata SEGMENT READONLY ALIGN(8)
ALIGN 8
$L$SEH_info_md5_block_asm_data_order::
DB 9,0,0,0
DD imagerel se_handler
.xdata ENDS
END

586
deps/openssl/asm/x64-win32-masm/rc4/rc4-x86_64.asm

@ -0,0 +1,586 @@
OPTION DOTNAME
.text$ SEGMENT ALIGN(64) 'CODE'
PUBLIC RC4
ALIGN 16
RC4 PROC PUBLIC
mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
mov QWORD PTR[16+rsp],rsi
mov rax,rsp
$L$SEH_begin_RC4::
mov rdi,rcx
mov rsi,rdx
mov rdx,r8
mov rcx,r9
or rsi,rsi
jne $L$entry
mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
mov rsi,QWORD PTR[16+rsp]
DB 0F3h,0C3h ;repret
$L$entry::
push rbx
push r12
push r13
$L$prologue::
add rdi,8
mov r8d,DWORD PTR[((-8))+rdi]
mov r12d,DWORD PTR[((-4))+rdi]
cmp DWORD PTR[256+rdi],-1
je $L$RC4_CHAR
inc r8b
mov r9d,DWORD PTR[r8*4+rdi]
test rsi,-8
jz $L$loop1
jmp $L$loop8
ALIGN 16
$L$loop8::
add r12b,r9b
mov r10,r8
mov r13d,DWORD PTR[r12*4+rdi]
ror rax,8
inc r10b
mov r11d,DWORD PTR[r10*4+rdi]
cmp r12,r10
mov DWORD PTR[r12*4+rdi],r9d
cmove r11,r9
mov DWORD PTR[r8*4+rdi],r13d
add r13b,r9b
mov al,BYTE PTR[r13*4+rdi]
add r12b,r11b
mov r8,r10
mov r13d,DWORD PTR[r12*4+rdi]
ror rax,8
inc r8b
mov r9d,DWORD PTR[r8*4+rdi]
cmp r12,r8
mov DWORD PTR[r12*4+rdi],r11d
cmove r9,r11
mov DWORD PTR[r10*4+rdi],r13d
add r13b,r11b
mov al,BYTE PTR[r13*4+rdi]
add r12b,r9b
mov r10,r8
mov r13d,DWORD PTR[r12*4+rdi]
ror rax,8
inc r10b
mov r11d,DWORD PTR[r10*4+rdi]
cmp r12,r10
mov DWORD PTR[r12*4+rdi],r9d
cmove r11,r9
mov DWORD PTR[r8*4+rdi],r13d
add r13b,r9b
mov al,BYTE PTR[r13*4+rdi]
add r12b,r11b
mov r8,r10
mov r13d,DWORD PTR[r12*4+rdi]
ror rax,8
inc r8b
mov r9d,DWORD PTR[r8*4+rdi]
cmp r12,r8
mov DWORD PTR[r12*4+rdi],r11d
cmove r9,r11
mov DWORD PTR[r10*4+rdi],r13d
add r13b,r11b
mov al,BYTE PTR[r13*4+rdi]
add r12b,r9b
mov r10,r8
mov r13d,DWORD PTR[r12*4+rdi]
ror rax,8
inc r10b
mov r11d,DWORD PTR[r10*4+rdi]
cmp r12,r10
mov DWORD PTR[r12*4+rdi],r9d
cmove r11,r9
mov DWORD PTR[r8*4+rdi],r13d
add r13b,r9b
mov al,BYTE PTR[r13*4+rdi]
add r12b,r11b
mov r8,r10
mov r13d,DWORD PTR[r12*4+rdi]
ror rax,8
inc r8b
mov r9d,DWORD PTR[r8*4+rdi]
cmp r12,r8
mov DWORD PTR[r12*4+rdi],r11d
cmove r9,r11
mov DWORD PTR[r10*4+rdi],r13d
add r13b,r11b
mov al,BYTE PTR[r13*4+rdi]
add r12b,r9b
mov r10,r8
mov r13d,DWORD PTR[r12*4+rdi]
ror rax,8
inc r10b
mov r11d,DWORD PTR[r10*4+rdi]
cmp r12,r10
mov DWORD PTR[r12*4+rdi],r9d
cmove r11,r9
mov DWORD PTR[r8*4+rdi],r13d
add r13b,r9b
mov al,BYTE PTR[r13*4+rdi]
add r12b,r11b
mov r8,r10
mov r13d,DWORD PTR[r12*4+rdi]
ror rax,8
inc r8b
mov r9d,DWORD PTR[r8*4+rdi]
cmp r12,r8
mov DWORD PTR[r12*4+rdi],r11d
cmove r9,r11
mov DWORD PTR[r10*4+rdi],r13d
add r13b,r11b
mov al,BYTE PTR[r13*4+rdi]
ror rax,8
sub rsi,8
xor rax,QWORD PTR[rdx]
add rdx,8
mov QWORD PTR[rcx],rax
add rcx,8
test rsi,-8
jnz $L$loop8
cmp rsi,0
jne $L$loop1
jmp $L$exit
ALIGN 16
$L$loop1::
add r12b,r9b
mov r13d,DWORD PTR[r12*4+rdi]
mov DWORD PTR[r12*4+rdi],r9d
mov DWORD PTR[r8*4+rdi],r13d
add r9b,r13b
inc r8b
mov r13d,DWORD PTR[r9*4+rdi]
mov r9d,DWORD PTR[r8*4+rdi]
xor r13b,BYTE PTR[rdx]
inc rdx
mov BYTE PTR[rcx],r13b
inc rcx
dec rsi
jnz $L$loop1
jmp $L$exit
ALIGN 16
$L$RC4_CHAR::
add r8b,1
movzx r9d,BYTE PTR[r8*1+rdi]
test rsi,-8
jz $L$cloop1
cmp DWORD PTR[260+rdi],0
jnz $L$cloop1
jmp $L$cloop8
ALIGN 16
$L$cloop8::
mov eax,DWORD PTR[rdx]
mov ebx,DWORD PTR[4+rdx]
add r12b,r9b
lea r10,QWORD PTR[1+r8]
movzx r13d,BYTE PTR[r12*1+rdi]
movzx r10d,r10b
movzx r11d,BYTE PTR[r10*1+rdi]
mov BYTE PTR[r12*1+rdi],r9b
cmp r12,r10
mov BYTE PTR[r8*1+rdi],r13b
jne $L$cmov0
mov r11,r9
$L$cmov0::
add r13b,r9b
xor al,BYTE PTR[r13*1+rdi]
ror eax,8
add r12b,r11b
lea r8,QWORD PTR[1+r10]
movzx r13d,BYTE PTR[r12*1+rdi]
movzx r8d,r8b
movzx r9d,BYTE PTR[r8*1+rdi]
mov BYTE PTR[r12*1+rdi],r11b
cmp r12,r8
mov BYTE PTR[r10*1+rdi],r13b
jne $L$cmov1
mov r9,r11
$L$cmov1::
add r13b,r11b
xor al,BYTE PTR[r13*1+rdi]
ror eax,8
add r12b,r9b
lea r10,QWORD PTR[1+r8]
movzx r13d,BYTE PTR[r12*1+rdi]
movzx r10d,r10b
movzx r11d,BYTE PTR[r10*1+rdi]
mov BYTE PTR[r12*1+rdi],r9b
cmp r12,r10
mov BYTE PTR[r8*1+rdi],r13b
jne $L$cmov2
mov r11,r9
$L$cmov2::
add r13b,r9b
xor al,BYTE PTR[r13*1+rdi]
ror eax,8
add r12b,r11b
lea r8,QWORD PTR[1+r10]
movzx r13d,BYTE PTR[r12*1+rdi]
movzx r8d,r8b
movzx r9d,BYTE PTR[r8*1+rdi]
mov BYTE PTR[r12*1+rdi],r11b
cmp r12,r8
mov BYTE PTR[r10*1+rdi],r13b
jne $L$cmov3
mov r9,r11
$L$cmov3::
add r13b,r11b
xor al,BYTE PTR[r13*1+rdi]
ror eax,8
add r12b,r9b
lea r10,QWORD PTR[1+r8]
movzx r13d,BYTE PTR[r12*1+rdi]
movzx r10d,r10b
movzx r11d,BYTE PTR[r10*1+rdi]
mov BYTE PTR[r12*1+rdi],r9b
cmp r12,r10
mov BYTE PTR[r8*1+rdi],r13b
jne $L$cmov4
mov r11,r9
$L$cmov4::
add r13b,r9b
xor bl,BYTE PTR[r13*1+rdi]
ror ebx,8
add r12b,r11b
lea r8,QWORD PTR[1+r10]
movzx r13d,BYTE PTR[r12*1+rdi]
movzx r8d,r8b
movzx r9d,BYTE PTR[r8*1+rdi]
mov BYTE PTR[r12*1+rdi],r11b
cmp r12,r8
mov BYTE PTR[r10*1+rdi],r13b
jne $L$cmov5
mov r9,r11
$L$cmov5::
add r13b,r11b
xor bl,BYTE PTR[r13*1+rdi]
ror ebx,8
add r12b,r9b
lea r10,QWORD PTR[1+r8]
movzx r13d,BYTE PTR[r12*1+rdi]
movzx r10d,r10b
movzx r11d,BYTE PTR[r10*1+rdi]
mov BYTE PTR[r12*1+rdi],r9b
cmp r12,r10
mov BYTE PTR[r8*1+rdi],r13b
jne $L$cmov6
mov r11,r9
$L$cmov6::
add r13b,r9b
xor bl,BYTE PTR[r13*1+rdi]
ror ebx,8
add r12b,r11b
lea r8,QWORD PTR[1+r10]
movzx r13d,BYTE PTR[r12*1+rdi]
movzx r8d,r8b
movzx r9d,BYTE PTR[r8*1+rdi]
mov BYTE PTR[r12*1+rdi],r11b
cmp r12,r8
mov BYTE PTR[r10*1+rdi],r13b
jne $L$cmov7
mov r9,r11
$L$cmov7::
add r13b,r11b
xor bl,BYTE PTR[r13*1+rdi]
ror ebx,8
lea rsi,QWORD PTR[((-8))+rsi]
mov DWORD PTR[rcx],eax
lea rdx,QWORD PTR[8+rdx]
mov DWORD PTR[4+rcx],ebx
lea rcx,QWORD PTR[8+rcx]
test rsi,-8
jnz $L$cloop8
cmp rsi,0
jne $L$cloop1
jmp $L$exit
ALIGN 16
$L$cloop1::
add r12b,r9b
movzx r13d,BYTE PTR[r12*1+rdi]
mov BYTE PTR[r12*1+rdi],r9b
mov BYTE PTR[r8*1+rdi],r13b
add r13b,r9b
add r8b,1
movzx r13d,r13b
movzx r8d,r8b
movzx r13d,BYTE PTR[r13*1+rdi]
movzx r9d,BYTE PTR[r8*1+rdi]
xor r13b,BYTE PTR[rdx]
lea rdx,QWORD PTR[1+rdx]
mov BYTE PTR[rcx],r13b
lea rcx,QWORD PTR[1+rcx]
sub rsi,1
jnz $L$cloop1
jmp $L$exit
ALIGN 16
$L$exit::
sub r8b,1
mov DWORD PTR[((-8))+rdi],r8d
mov DWORD PTR[((-4))+rdi],r12d
mov r13,QWORD PTR[rsp]
mov r12,QWORD PTR[8+rsp]
mov rbx,QWORD PTR[16+rsp]
add rsp,24
$L$epilogue::
mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
mov rsi,QWORD PTR[16+rsp]
DB 0F3h,0C3h ;repret
$L$SEH_end_RC4::
RC4 ENDP
EXTERN OPENSSL_ia32cap_P:NEAR
PUBLIC RC4_set_key
ALIGN 16
RC4_set_key PROC PUBLIC
mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
mov QWORD PTR[16+rsp],rsi
mov rax,rsp
$L$SEH_begin_RC4_set_key::
mov rdi,rcx
mov rsi,rdx
mov rdx,r8
lea rdi,QWORD PTR[8+rdi]
lea rdx,QWORD PTR[rsi*1+rdx]
neg rsi
mov rcx,rsi
xor eax,eax
xor r9,r9
xor r10,r10
xor r11,r11
mov r8d,DWORD PTR[OPENSSL_ia32cap_P]
bt r8d,20
jnc $L$w1stloop
bt r8d,30
setc r9b
mov DWORD PTR[260+rdi],r9d
jmp $L$c1stloop
ALIGN 16
$L$w1stloop::
mov DWORD PTR[rax*4+rdi],eax
add al,1
jnc $L$w1stloop
xor r9,r9
xor r8,r8
ALIGN 16
$L$w2ndloop::
mov r10d,DWORD PTR[r9*4+rdi]
add r8b,BYTE PTR[rsi*1+rdx]
add r8b,r10b
add rsi,1
mov r11d,DWORD PTR[r8*4+rdi]
cmovz rsi,rcx
mov DWORD PTR[r8*4+rdi],r10d
mov DWORD PTR[r9*4+rdi],r11d
add r9b,1
jnc $L$w2ndloop
jmp $L$exit_key
ALIGN 16
$L$c1stloop::
mov BYTE PTR[rax*1+rdi],al
add al,1
jnc $L$c1stloop
xor r9,r9
xor r8,r8
ALIGN 16
$L$c2ndloop::
mov r10b,BYTE PTR[r9*1+rdi]
add r8b,BYTE PTR[rsi*1+rdx]
add r8b,r10b
add rsi,1
mov r11b,BYTE PTR[r8*1+rdi]
jnz $L$cnowrap
mov rsi,rcx
$L$cnowrap::
mov BYTE PTR[r8*1+rdi],r10b
mov BYTE PTR[r9*1+rdi],r11b
add r9b,1
jnc $L$c2ndloop
mov DWORD PTR[256+rdi],-1
ALIGN 16
$L$exit_key::
xor eax,eax
mov DWORD PTR[((-8))+rdi],eax
mov DWORD PTR[((-4))+rdi],eax
mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
mov rsi,QWORD PTR[16+rsp]
DB 0F3h,0C3h ;repret
$L$SEH_end_RC4_set_key::
RC4_set_key ENDP
PUBLIC RC4_options
ALIGN 16
RC4_options PROC PUBLIC
lea rax,QWORD PTR[$L$opts]
mov edx,DWORD PTR[OPENSSL_ia32cap_P]
bt edx,20
jnc $L$done
add rax,12
bt edx,30
jnc $L$done
add rax,13
$L$done::
DB 0F3h,0C3h ;repret
ALIGN 64
$L$opts::
DB 114,99,52,40,56,120,44,105,110,116,41,0
DB 114,99,52,40,56,120,44,99,104,97,114,41,0
DB 114,99,52,40,49,120,44,99,104,97,114,41,0
DB 82,67,52,32,102,111,114,32,120,56,54,95,54,52,44,32
DB 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
DB 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
DB 62,0
ALIGN 64
RC4_options ENDP
EXTERN __imp_RtlVirtualUnwind:NEAR
ALIGN 16
stream_se_handler PROC PRIVATE
push rsi
push rdi
push rbx
push rbp
push r12
push r13
push r14
push r15
pushfq
sub rsp,64
mov rax,QWORD PTR[120+r8]
mov rbx,QWORD PTR[248+r8]
lea r10,QWORD PTR[$L$prologue]
cmp rbx,r10
jb $L$in_prologue
mov rax,QWORD PTR[152+r8]
lea r10,QWORD PTR[$L$epilogue]
cmp rbx,r10
jae $L$in_prologue
lea rax,QWORD PTR[24+rax]
mov rbx,QWORD PTR[((-8))+rax]
mov r12,QWORD PTR[((-16))+rax]
mov r13,QWORD PTR[((-24))+rax]
mov QWORD PTR[144+r8],rbx
mov QWORD PTR[216+r8],r12
mov QWORD PTR[224+r8],r13
$L$in_prologue::
mov rdi,QWORD PTR[8+rax]
mov rsi,QWORD PTR[16+rax]
mov QWORD PTR[152+r8],rax
mov QWORD PTR[168+r8],rsi
mov QWORD PTR[176+r8],rdi
jmp $L$common_seh_exit
stream_se_handler ENDP
ALIGN 16
key_se_handler PROC PRIVATE
push rsi
push rdi
push rbx
push rbp
push r12
push r13
push r14
push r15
pushfq
sub rsp,64
mov rax,QWORD PTR[152+r8]
mov rdi,QWORD PTR[8+rax]
mov rsi,QWORD PTR[16+rax]
mov QWORD PTR[168+r8],rsi
mov QWORD PTR[176+r8],rdi
$L$common_seh_exit::
mov rdi,QWORD PTR[40+r9]
mov rsi,r8
mov ecx,154
DD 0a548f3fch
mov rsi,r9
xor rcx,rcx
mov rdx,QWORD PTR[8+rsi]
mov r8,QWORD PTR[rsi]
mov r9,QWORD PTR[16+rsi]
mov r10,QWORD PTR[40+rsi]
lea r11,QWORD PTR[56+rsi]
lea r12,QWORD PTR[24+rsi]
mov QWORD PTR[32+rsp],r10
mov QWORD PTR[40+rsp],r11
mov QWORD PTR[48+rsp],r12
mov QWORD PTR[56+rsp],rcx
call QWORD PTR[__imp_RtlVirtualUnwind]
mov eax,1
add rsp,64
popfq
pop r15
pop r14
pop r13
pop r12
pop rbp
pop rbx
pop rdi
pop rsi
DB 0F3h,0C3h ;repret
key_se_handler ENDP
.text$ ENDS
.pdata SEGMENT READONLY ALIGN(4)
ALIGN 4
DD imagerel $L$SEH_begin_RC4
DD imagerel $L$SEH_end_RC4
DD imagerel $L$SEH_info_RC4
DD imagerel $L$SEH_begin_RC4_set_key
DD imagerel $L$SEH_end_RC4_set_key
DD imagerel $L$SEH_info_RC4_set_key
.pdata ENDS
.xdata SEGMENT READONLY ALIGN(8)
ALIGN 8
$L$SEH_info_RC4::
DB 9,0,0,0
DD imagerel stream_se_handler
$L$SEH_info_RC4_set_key::
DB 9,0,0,0
DD imagerel key_se_handler
.xdata ENDS
END

1394
deps/openssl/asm/x64-win32-masm/sha/sha1-x86_64.asm

File diff suppressed because it is too large

2085
deps/openssl/asm/x64-win32-masm/sha/sha512-x86_64.asm

File diff suppressed because it is too large

972
deps/openssl/asm/x64-win32-masm/whrlpool/wp-x86_64.asm

@ -0,0 +1,972 @@
OPTION DOTNAME
.text$ SEGMENT ALIGN(64) 'CODE'
PUBLIC whirlpool_block
ALIGN 16
whirlpool_block PROC PUBLIC
mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
mov QWORD PTR[16+rsp],rsi
mov rax,rsp
$L$SEH_begin_whirlpool_block::
mov rdi,rcx
mov rsi,rdx
mov rdx,r8
push rbx
push rbp
push r12
push r13
push r14
push r15
mov r11,rsp
sub rsp,128+40
and rsp,-64
lea r10,QWORD PTR[128+rsp]
mov QWORD PTR[r10],rdi
mov QWORD PTR[8+r10],rsi
mov QWORD PTR[16+r10],rdx
mov QWORD PTR[32+r10],r11
$L$prologue::
mov rbx,r10
lea rbp,QWORD PTR[$L$table]
xor rcx,rcx
xor rdx,rdx
mov r8,QWORD PTR[((0*8))+rdi]
mov r9,QWORD PTR[((1*8))+rdi]
mov r10,QWORD PTR[((2*8))+rdi]
mov r11,QWORD PTR[((3*8))+rdi]
mov r12,QWORD PTR[((4*8))+rdi]
mov r13,QWORD PTR[((5*8))+rdi]
mov r14,QWORD PTR[((6*8))+rdi]
mov r15,QWORD PTR[((7*8))+rdi]
$L$outerloop::
mov QWORD PTR[((0*8))+rsp],r8
mov QWORD PTR[((1*8))+rsp],r9
mov QWORD PTR[((2*8))+rsp],r10
mov QWORD PTR[((3*8))+rsp],r11
mov QWORD PTR[((4*8))+rsp],r12
mov QWORD PTR[((5*8))+rsp],r13
mov QWORD PTR[((6*8))+rsp],r14
mov QWORD PTR[((7*8))+rsp],r15
xor r8,QWORD PTR[((0*8))+rsi]
xor r9,QWORD PTR[((1*8))+rsi]
xor r10,QWORD PTR[((2*8))+rsi]
xor r11,QWORD PTR[((3*8))+rsi]
xor r12,QWORD PTR[((4*8))+rsi]
xor r13,QWORD PTR[((5*8))+rsi]
xor r14,QWORD PTR[((6*8))+rsi]
xor r15,QWORD PTR[((7*8))+rsi]
mov QWORD PTR[((64+0*8))+rsp],r8
mov QWORD PTR[((64+1*8))+rsp],r9
mov QWORD PTR[((64+2*8))+rsp],r10
mov QWORD PTR[((64+3*8))+rsp],r11
mov QWORD PTR[((64+4*8))+rsp],r12
mov QWORD PTR[((64+5*8))+rsp],r13
mov QWORD PTR[((64+6*8))+rsp],r14
mov QWORD PTR[((64+7*8))+rsp],r15
xor rsi,rsi
mov QWORD PTR[24+rbx],rsi
ALIGN 16
$L$round::
mov r8,QWORD PTR[4096+rsi*8+rbp]
mov eax,DWORD PTR[rsp]
mov ebx,DWORD PTR[4+rsp]
mov cl,al
mov dl,ah
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr eax,16
xor r8,QWORD PTR[rsi*8+rbp]
mov r9,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
mov eax,DWORD PTR[((0*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
mov r10,QWORD PTR[6+rsi*8+rbp]
mov r11,QWORD PTR[5+rdi*8+rbp]
mov cl,bl
mov dl,bh
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr ebx,16
mov r12,QWORD PTR[4+rsi*8+rbp]
mov r13,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
mov ebx,DWORD PTR[((0*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
mov r14,QWORD PTR[2+rsi*8+rbp]
mov r15,QWORD PTR[1+rdi*8+rbp]
mov cl,al
mov dl,ah
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr eax,16
xor r9,QWORD PTR[rsi*8+rbp]
xor r10,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
mov eax,DWORD PTR[((1*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r11,QWORD PTR[6+rsi*8+rbp]
xor r12,QWORD PTR[5+rdi*8+rbp]
mov cl,bl
mov dl,bh
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr ebx,16
xor r13,QWORD PTR[4+rsi*8+rbp]
xor r14,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
mov ebx,DWORD PTR[((1*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r15,QWORD PTR[2+rsi*8+rbp]
xor r8,QWORD PTR[1+rdi*8+rbp]
mov cl,al
mov dl,ah
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr eax,16
xor r10,QWORD PTR[rsi*8+rbp]
xor r11,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
mov eax,DWORD PTR[((2*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r12,QWORD PTR[6+rsi*8+rbp]
xor r13,QWORD PTR[5+rdi*8+rbp]
mov cl,bl
mov dl,bh
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr ebx,16
xor r14,QWORD PTR[4+rsi*8+rbp]
xor r15,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
mov ebx,DWORD PTR[((2*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r8,QWORD PTR[2+rsi*8+rbp]
xor r9,QWORD PTR[1+rdi*8+rbp]
mov cl,al
mov dl,ah
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr eax,16
xor r11,QWORD PTR[rsi*8+rbp]
xor r12,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
mov eax,DWORD PTR[((3*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r13,QWORD PTR[6+rsi*8+rbp]
xor r14,QWORD PTR[5+rdi*8+rbp]
mov cl,bl
mov dl,bh
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr ebx,16
xor r15,QWORD PTR[4+rsi*8+rbp]
xor r8,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
mov ebx,DWORD PTR[((3*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r9,QWORD PTR[2+rsi*8+rbp]
xor r10,QWORD PTR[1+rdi*8+rbp]
mov cl,al
mov dl,ah
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr eax,16
xor r12,QWORD PTR[rsi*8+rbp]
xor r13,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
mov eax,DWORD PTR[((4*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r14,QWORD PTR[6+rsi*8+rbp]
xor r15,QWORD PTR[5+rdi*8+rbp]
mov cl,bl
mov dl,bh
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr ebx,16
xor r8,QWORD PTR[4+rsi*8+rbp]
xor r9,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
mov ebx,DWORD PTR[((4*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r10,QWORD PTR[2+rsi*8+rbp]
xor r11,QWORD PTR[1+rdi*8+rbp]
mov cl,al
mov dl,ah
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr eax,16
xor r13,QWORD PTR[rsi*8+rbp]
xor r14,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
mov eax,DWORD PTR[((5*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r15,QWORD PTR[6+rsi*8+rbp]
xor r8,QWORD PTR[5+rdi*8+rbp]
mov cl,bl
mov dl,bh
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr ebx,16
xor r9,QWORD PTR[4+rsi*8+rbp]
xor r10,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
mov ebx,DWORD PTR[((5*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r11,QWORD PTR[2+rsi*8+rbp]
xor r12,QWORD PTR[1+rdi*8+rbp]
mov cl,al
mov dl,ah
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr eax,16
xor r14,QWORD PTR[rsi*8+rbp]
xor r15,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
mov eax,DWORD PTR[((6*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r8,QWORD PTR[6+rsi*8+rbp]
xor r9,QWORD PTR[5+rdi*8+rbp]
mov cl,bl
mov dl,bh
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr ebx,16
xor r10,QWORD PTR[4+rsi*8+rbp]
xor r11,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
mov ebx,DWORD PTR[((6*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r12,QWORD PTR[2+rsi*8+rbp]
xor r13,QWORD PTR[1+rdi*8+rbp]
mov cl,al
mov dl,ah
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr eax,16
xor r15,QWORD PTR[rsi*8+rbp]
xor r8,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
mov eax,DWORD PTR[((7*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r9,QWORD PTR[6+rsi*8+rbp]
xor r10,QWORD PTR[5+rdi*8+rbp]
mov cl,bl
mov dl,bh
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr ebx,16
xor r11,QWORD PTR[4+rsi*8+rbp]
xor r12,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
mov ebx,DWORD PTR[((7*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r13,QWORD PTR[2+rsi*8+rbp]
xor r14,QWORD PTR[1+rdi*8+rbp]
mov QWORD PTR[((0*8))+rsp],r8
mov QWORD PTR[((1*8))+rsp],r9
mov QWORD PTR[((2*8))+rsp],r10
mov QWORD PTR[((3*8))+rsp],r11
mov QWORD PTR[((4*8))+rsp],r12
mov QWORD PTR[((5*8))+rsp],r13
mov QWORD PTR[((6*8))+rsp],r14
mov QWORD PTR[((7*8))+rsp],r15
mov cl,al
mov dl,ah
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr eax,16
xor r8,QWORD PTR[rsi*8+rbp]
xor r9,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
mov eax,DWORD PTR[((64+0*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r10,QWORD PTR[6+rsi*8+rbp]
xor r11,QWORD PTR[5+rdi*8+rbp]
mov cl,bl
mov dl,bh
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr ebx,16
xor r12,QWORD PTR[4+rsi*8+rbp]
xor r13,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
mov ebx,DWORD PTR[((64+0*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r14,QWORD PTR[2+rsi*8+rbp]
xor r15,QWORD PTR[1+rdi*8+rbp]
mov cl,al
mov dl,ah
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr eax,16
xor r9,QWORD PTR[rsi*8+rbp]
xor r10,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
mov eax,DWORD PTR[((64+1*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r11,QWORD PTR[6+rsi*8+rbp]
xor r12,QWORD PTR[5+rdi*8+rbp]
mov cl,bl
mov dl,bh
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr ebx,16
xor r13,QWORD PTR[4+rsi*8+rbp]
xor r14,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
mov ebx,DWORD PTR[((64+1*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r15,QWORD PTR[2+rsi*8+rbp]
xor r8,QWORD PTR[1+rdi*8+rbp]
mov cl,al
mov dl,ah
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr eax,16
xor r10,QWORD PTR[rsi*8+rbp]
xor r11,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
mov eax,DWORD PTR[((64+2*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r12,QWORD PTR[6+rsi*8+rbp]
xor r13,QWORD PTR[5+rdi*8+rbp]
mov cl,bl
mov dl,bh
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr ebx,16
xor r14,QWORD PTR[4+rsi*8+rbp]
xor r15,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
mov ebx,DWORD PTR[((64+2*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r8,QWORD PTR[2+rsi*8+rbp]
xor r9,QWORD PTR[1+rdi*8+rbp]
mov cl,al
mov dl,ah
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr eax,16
xor r11,QWORD PTR[rsi*8+rbp]
xor r12,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
mov eax,DWORD PTR[((64+3*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r13,QWORD PTR[6+rsi*8+rbp]
xor r14,QWORD PTR[5+rdi*8+rbp]
mov cl,bl
mov dl,bh
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr ebx,16
xor r15,QWORD PTR[4+rsi*8+rbp]
xor r8,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
mov ebx,DWORD PTR[((64+3*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r9,QWORD PTR[2+rsi*8+rbp]
xor r10,QWORD PTR[1+rdi*8+rbp]
mov cl,al
mov dl,ah
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr eax,16
xor r12,QWORD PTR[rsi*8+rbp]
xor r13,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
mov eax,DWORD PTR[((64+4*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r14,QWORD PTR[6+rsi*8+rbp]
xor r15,QWORD PTR[5+rdi*8+rbp]
mov cl,bl
mov dl,bh
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr ebx,16
xor r8,QWORD PTR[4+rsi*8+rbp]
xor r9,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
mov ebx,DWORD PTR[((64+4*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r10,QWORD PTR[2+rsi*8+rbp]
xor r11,QWORD PTR[1+rdi*8+rbp]
mov cl,al
mov dl,ah
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr eax,16
xor r13,QWORD PTR[rsi*8+rbp]
xor r14,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
mov eax,DWORD PTR[((64+5*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r15,QWORD PTR[6+rsi*8+rbp]
xor r8,QWORD PTR[5+rdi*8+rbp]
mov cl,bl
mov dl,bh
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr ebx,16
xor r9,QWORD PTR[4+rsi*8+rbp]
xor r10,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
mov ebx,DWORD PTR[((64+5*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r11,QWORD PTR[2+rsi*8+rbp]
xor r12,QWORD PTR[1+rdi*8+rbp]
mov cl,al
mov dl,ah
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr eax,16
xor r14,QWORD PTR[rsi*8+rbp]
xor r15,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
mov eax,DWORD PTR[((64+6*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r8,QWORD PTR[6+rsi*8+rbp]
xor r9,QWORD PTR[5+rdi*8+rbp]
mov cl,bl
mov dl,bh
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr ebx,16
xor r10,QWORD PTR[4+rsi*8+rbp]
xor r11,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
mov ebx,DWORD PTR[((64+6*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r12,QWORD PTR[2+rsi*8+rbp]
xor r13,QWORD PTR[1+rdi*8+rbp]
mov cl,al
mov dl,ah
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr eax,16
xor r15,QWORD PTR[rsi*8+rbp]
xor r8,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r9,QWORD PTR[6+rsi*8+rbp]
xor r10,QWORD PTR[5+rdi*8+rbp]
mov cl,bl
mov dl,bh
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
shr ebx,16
xor r11,QWORD PTR[4+rsi*8+rbp]
xor r12,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r13,QWORD PTR[2+rsi*8+rbp]
xor r14,QWORD PTR[1+rdi*8+rbp]
lea rbx,QWORD PTR[128+rsp]
mov rsi,QWORD PTR[24+rbx]
add rsi,1
cmp rsi,10
je $L$roundsdone
mov QWORD PTR[24+rbx],rsi
mov QWORD PTR[((64+0*8))+rsp],r8
mov QWORD PTR[((64+1*8))+rsp],r9
mov QWORD PTR[((64+2*8))+rsp],r10
mov QWORD PTR[((64+3*8))+rsp],r11
mov QWORD PTR[((64+4*8))+rsp],r12
mov QWORD PTR[((64+5*8))+rsp],r13
mov QWORD PTR[((64+6*8))+rsp],r14
mov QWORD PTR[((64+7*8))+rsp],r15
jmp $L$round
ALIGN 16
$L$roundsdone::
mov rdi,QWORD PTR[rbx]
mov rsi,QWORD PTR[8+rbx]
mov rax,QWORD PTR[16+rbx]
xor r8,QWORD PTR[((0*8))+rsi]
xor r9,QWORD PTR[((1*8))+rsi]
xor r10,QWORD PTR[((2*8))+rsi]
xor r11,QWORD PTR[((3*8))+rsi]
xor r12,QWORD PTR[((4*8))+rsi]
xor r13,QWORD PTR[((5*8))+rsi]
xor r14,QWORD PTR[((6*8))+rsi]
xor r15,QWORD PTR[((7*8))+rsi]
xor r8,QWORD PTR[((0*8))+rdi]
xor r9,QWORD PTR[((1*8))+rdi]
xor r10,QWORD PTR[((2*8))+rdi]
xor r11,QWORD PTR[((3*8))+rdi]
xor r12,QWORD PTR[((4*8))+rdi]
xor r13,QWORD PTR[((5*8))+rdi]
xor r14,QWORD PTR[((6*8))+rdi]
xor r15,QWORD PTR[((7*8))+rdi]
mov QWORD PTR[((0*8))+rdi],r8
mov QWORD PTR[((1*8))+rdi],r9
mov QWORD PTR[((2*8))+rdi],r10
mov QWORD PTR[((3*8))+rdi],r11
mov QWORD PTR[((4*8))+rdi],r12
mov QWORD PTR[((5*8))+rdi],r13
mov QWORD PTR[((6*8))+rdi],r14
mov QWORD PTR[((7*8))+rdi],r15
lea rsi,QWORD PTR[64+rsi]
sub rax,1
jz $L$alldone
mov QWORD PTR[8+rbx],rsi
mov QWORD PTR[16+rbx],rax
jmp $L$outerloop
$L$alldone::
mov rsi,QWORD PTR[32+rbx]
mov r15,QWORD PTR[rsi]
mov r14,QWORD PTR[8+rsi]
mov r13,QWORD PTR[16+rsi]
mov r12,QWORD PTR[24+rsi]
mov rbp,QWORD PTR[32+rsi]
mov rbx,QWORD PTR[40+rsi]
lea rsp,QWORD PTR[48+rsi]
$L$epilogue::
mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
mov rsi,QWORD PTR[16+rsp]
DB 0F3h,0C3h ;repret
$L$SEH_end_whirlpool_block::
whirlpool_block ENDP
ALIGN 64
$L$table::
DB 24,24,96,24,192,120,48,216,24,24,96,24,192,120,48,216
DB 35,35,140,35,5,175,70,38,35,35,140,35,5,175,70,38
DB 198,198,63,198,126,249,145,184,198,198,63,198,126,249,145,184
DB 232,232,135,232,19,111,205,251,232,232,135,232,19,111,205,251
DB 135,135,38,135,76,161,19,203,135,135,38,135,76,161,19,203
DB 184,184,218,184,169,98,109,17,184,184,218,184,169,98,109,17
DB 1,1,4,1,8,5,2,9,1,1,4,1,8,5,2,9
DB 79,79,33,79,66,110,158,13,79,79,33,79,66,110,158,13
DB 54,54,216,54,173,238,108,155,54,54,216,54,173,238,108,155
DB 166,166,162,166,89,4,81,255,166,166,162,166,89,4,81,255
DB 210,210,111,210,222,189,185,12,210,210,111,210,222,189,185,12
DB 245,245,243,245,251,6,247,14,245,245,243,245,251,6,247,14
DB 121,121,249,121,239,128,242,150,121,121,249,121,239,128,242,150
DB 111,111,161,111,95,206,222,48,111,111,161,111,95,206,222,48
DB 145,145,126,145,252,239,63,109,145,145,126,145,252,239,63,109
DB 82,82,85,82,170,7,164,248,82,82,85,82,170,7,164,248
DB 96,96,157,96,39,253,192,71,96,96,157,96,39,253,192,71
DB 188,188,202,188,137,118,101,53,188,188,202,188,137,118,101,53
DB 155,155,86,155,172,205,43,55,155,155,86,155,172,205,43,55
DB 142,142,2,142,4,140,1,138,142,142,2,142,4,140,1,138
DB 163,163,182,163,113,21,91,210,163,163,182,163,113,21,91,210
DB 12,12,48,12,96,60,24,108,12,12,48,12,96,60,24,108
DB 123,123,241,123,255,138,246,132,123,123,241,123,255,138,246,132
DB 53,53,212,53,181,225,106,128,53,53,212,53,181,225,106,128
DB 29,29,116,29,232,105,58,245,29,29,116,29,232,105,58,245
DB 224,224,167,224,83,71,221,179,224,224,167,224,83,71,221,179
DB 215,215,123,215,246,172,179,33,215,215,123,215,246,172,179,33
DB 194,194,47,194,94,237,153,156,194,194,47,194,94,237,153,156
DB 46,46,184,46,109,150,92,67,46,46,184,46,109,150,92,67
DB 75,75,49,75,98,122,150,41,75,75,49,75,98,122,150,41
DB 254,254,223,254,163,33,225,93,254,254,223,254,163,33,225,93
DB 87,87,65,87,130,22,174,213,87,87,65,87,130,22,174,213
DB 21,21,84,21,168,65,42,189,21,21,84,21,168,65,42,189
DB 119,119,193,119,159,182,238,232,119,119,193,119,159,182,238,232
DB 55,55,220,55,165,235,110,146,55,55,220,55,165,235,110,146
DB 229,229,179,229,123,86,215,158,229,229,179,229,123,86,215,158
DB 159,159,70,159,140,217,35,19,159,159,70,159,140,217,35,19
DB 240,240,231,240,211,23,253,35,240,240,231,240,211,23,253,35
DB 74,74,53,74,106,127,148,32,74,74,53,74,106,127,148,32
DB 218,218,79,218,158,149,169,68,218,218,79,218,158,149,169,68
DB 88,88,125,88,250,37,176,162,88,88,125,88,250,37,176,162
DB 201,201,3,201,6,202,143,207,201,201,3,201,6,202,143,207
DB 41,41,164,41,85,141,82,124,41,41,164,41,85,141,82,124
DB 10,10,40,10,80,34,20,90,10,10,40,10,80,34,20,90
DB 177,177,254,177,225,79,127,80,177,177,254,177,225,79,127,80
DB 160,160,186,160,105,26,93,201,160,160,186,160,105,26,93,201
DB 107,107,177,107,127,218,214,20,107,107,177,107,127,218,214,20
DB 133,133,46,133,92,171,23,217,133,133,46,133,92,171,23,217
DB 189,189,206,189,129,115,103,60,189,189,206,189,129,115,103,60
DB 93,93,105,93,210,52,186,143,93,93,105,93,210,52,186,143
DB 16,16,64,16,128,80,32,144,16,16,64,16,128,80,32,144
DB 244,244,247,244,243,3,245,7,244,244,247,244,243,3,245,7
DB 203,203,11,203,22,192,139,221,203,203,11,203,22,192,139,221
DB 62,62,248,62,237,198,124,211,62,62,248,62,237,198,124,211
DB 5,5,20,5,40,17,10,45,5,5,20,5,40,17,10,45
DB 103,103,129,103,31,230,206,120,103,103,129,103,31,230,206,120
DB 228,228,183,228,115,83,213,151,228,228,183,228,115,83,213,151
DB 39,39,156,39,37,187,78,2,39,39,156,39,37,187,78,2
DB 65,65,25,65,50,88,130,115,65,65,25,65,50,88,130,115
DB 139,139,22,139,44,157,11,167,139,139,22,139,44,157,11,167
DB 167,167,166,167,81,1,83,246,167,167,166,167,81,1,83,246
DB 125,125,233,125,207,148,250,178,125,125,233,125,207,148,250,178
DB 149,149,110,149,220,251,55,73,149,149,110,149,220,251,55,73
DB 216,216,71,216,142,159,173,86,216,216,71,216,142,159,173,86
DB 251,251,203,251,139,48,235,112,251,251,203,251,139,48,235,112
DB 238,238,159,238,35,113,193,205,238,238,159,238,35,113,193,205
DB 124,124,237,124,199,145,248,187,124,124,237,124,199,145,248,187
DB 102,102,133,102,23,227,204,113,102,102,133,102,23,227,204,113
DB 221,221,83,221,166,142,167,123,221,221,83,221,166,142,167,123
DB 23,23,92,23,184,75,46,175,23,23,92,23,184,75,46,175
DB 71,71,1,71,2,70,142,69,71,71,1,71,2,70,142,69
DB 158,158,66,158,132,220,33,26,158,158,66,158,132,220,33,26
DB 202,202,15,202,30,197,137,212,202,202,15,202,30,197,137,212
DB 45,45,180,45,117,153,90,88,45,45,180,45,117,153,90,88
DB 191,191,198,191,145,121,99,46,191,191,198,191,145,121,99,46
DB 7,7,28,7,56,27,14,63,7,7,28,7,56,27,14,63
DB 173,173,142,173,1,35,71,172,173,173,142,173,1,35,71,172
DB 90,90,117,90,234,47,180,176,90,90,117,90,234,47,180,176
DB 131,131,54,131,108,181,27,239,131,131,54,131,108,181,27,239
DB 51,51,204,51,133,255,102,182,51,51,204,51,133,255,102,182
DB 99,99,145,99,63,242,198,92,99,99,145,99,63,242,198,92
DB 2,2,8,2,16,10,4,18,2,2,8,2,16,10,4,18
DB 170,170,146,170,57,56,73,147,170,170,146,170,57,56,73,147
DB 113,113,217,113,175,168,226,222,113,113,217,113,175,168,226,222
DB 200,200,7,200,14,207,141,198,200,200,7,200,14,207,141,198
DB 25,25,100,25,200,125,50,209,25,25,100,25,200,125,50,209
DB 73,73,57,73,114,112,146,59,73,73,57,73,114,112,146,59
DB 217,217,67,217,134,154,175,95,217,217,67,217,134,154,175,95
DB 242,242,239,242,195,29,249,49,242,242,239,242,195,29,249,49
DB 227,227,171,227,75,72,219,168,227,227,171,227,75,72,219,168
DB 91,91,113,91,226,42,182,185,91,91,113,91,226,42,182,185
DB 136,136,26,136,52,146,13,188,136,136,26,136,52,146,13,188
DB 154,154,82,154,164,200,41,62,154,154,82,154,164,200,41,62
DB 38,38,152,38,45,190,76,11,38,38,152,38,45,190,76,11
DB 50,50,200,50,141,250,100,191,50,50,200,50,141,250,100,191
DB 176,176,250,176,233,74,125,89,176,176,250,176,233,74,125,89
DB 233,233,131,233,27,106,207,242,233,233,131,233,27,106,207,242
DB 15,15,60,15,120,51,30,119,15,15,60,15,120,51,30,119
DB 213,213,115,213,230,166,183,51,213,213,115,213,230,166,183,51
DB 128,128,58,128,116,186,29,244,128,128,58,128,116,186,29,244
DB 190,190,194,190,153,124,97,39,190,190,194,190,153,124,97,39
DB 205,205,19,205,38,222,135,235,205,205,19,205,38,222,135,235
DB 52,52,208,52,189,228,104,137,52,52,208,52,189,228,104,137
DB 72,72,61,72,122,117,144,50,72,72,61,72,122,117,144,50
DB 255,255,219,255,171,36,227,84,255,255,219,255,171,36,227,84
DB 122,122,245,122,247,143,244,141,122,122,245,122,247,143,244,141
DB 144,144,122,144,244,234,61,100,144,144,122,144,244,234,61,100
DB 95,95,97,95,194,62,190,157,95,95,97,95,194,62,190,157
DB 32,32,128,32,29,160,64,61,32,32,128,32,29,160,64,61
DB 104,104,189,104,103,213,208,15,104,104,189,104,103,213,208,15
DB 26,26,104,26,208,114,52,202,26,26,104,26,208,114,52,202
DB 174,174,130,174,25,44,65,183,174,174,130,174,25,44,65,183
DB 180,180,234,180,201,94,117,125,180,180,234,180,201,94,117,125
DB 84,84,77,84,154,25,168,206,84,84,77,84,154,25,168,206
DB 147,147,118,147,236,229,59,127,147,147,118,147,236,229,59,127
DB 34,34,136,34,13,170,68,47,34,34,136,34,13,170,68,47
DB 100,100,141,100,7,233,200,99,100,100,141,100,7,233,200,99
DB 241,241,227,241,219,18,255,42,241,241,227,241,219,18,255,42
DB 115,115,209,115,191,162,230,204,115,115,209,115,191,162,230,204
DB 18,18,72,18,144,90,36,130,18,18,72,18,144,90,36,130
DB 64,64,29,64,58,93,128,122,64,64,29,64,58,93,128,122
DB 8,8,32,8,64,40,16,72,8,8,32,8,64,40,16,72
DB 195,195,43,195,86,232,155,149,195,195,43,195,86,232,155,149
DB 236,236,151,236,51,123,197,223,236,236,151,236,51,123,197,223
DB 219,219,75,219,150,144,171,77,219,219,75,219,150,144,171,77
DB 161,161,190,161,97,31,95,192,161,161,190,161,97,31,95,192
DB 141,141,14,141,28,131,7,145,141,141,14,141,28,131,7,145
DB 61,61,244,61,245,201,122,200,61,61,244,61,245,201,122,200
DB 151,151,102,151,204,241,51,91,151,151,102,151,204,241,51,91
DB 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
DB 207,207,27,207,54,212,131,249,207,207,27,207,54,212,131,249
DB 43,43,172,43,69,135,86,110,43,43,172,43,69,135,86,110
DB 118,118,197,118,151,179,236,225,118,118,197,118,151,179,236,225
DB 130,130,50,130,100,176,25,230,130,130,50,130,100,176,25,230
DB 214,214,127,214,254,169,177,40,214,214,127,214,254,169,177,40
DB 27,27,108,27,216,119,54,195,27,27,108,27,216,119,54,195
DB 181,181,238,181,193,91,119,116,181,181,238,181,193,91,119,116
DB 175,175,134,175,17,41,67,190,175,175,134,175,17,41,67,190
DB 106,106,181,106,119,223,212,29,106,106,181,106,119,223,212,29
DB 80,80,93,80,186,13,160,234,80,80,93,80,186,13,160,234
DB 69,69,9,69,18,76,138,87,69,69,9,69,18,76,138,87
DB 243,243,235,243,203,24,251,56,243,243,235,243,203,24,251,56
DB 48,48,192,48,157,240,96,173,48,48,192,48,157,240,96,173
DB 239,239,155,239,43,116,195,196,239,239,155,239,43,116,195,196
DB 63,63,252,63,229,195,126,218,63,63,252,63,229,195,126,218
DB 85,85,73,85,146,28,170,199,85,85,73,85,146,28,170,199
DB 162,162,178,162,121,16,89,219,162,162,178,162,121,16,89,219
DB 234,234,143,234,3,101,201,233,234,234,143,234,3,101,201,233
DB 101,101,137,101,15,236,202,106,101,101,137,101,15,236,202,106
DB 186,186,210,186,185,104,105,3,186,186,210,186,185,104,105,3
DB 47,47,188,47,101,147,94,74,47,47,188,47,101,147,94,74
DB 192,192,39,192,78,231,157,142,192,192,39,192,78,231,157,142
DB 222,222,95,222,190,129,161,96,222,222,95,222,190,129,161,96
DB 28,28,112,28,224,108,56,252,28,28,112,28,224,108,56,252
DB 253,253,211,253,187,46,231,70,253,253,211,253,187,46,231,70
DB 77,77,41,77,82,100,154,31,77,77,41,77,82,100,154,31
DB 146,146,114,146,228,224,57,118,146,146,114,146,228,224,57,118
DB 117,117,201,117,143,188,234,250,117,117,201,117,143,188,234,250
DB 6,6,24,6,48,30,12,54,6,6,24,6,48,30,12,54
DB 138,138,18,138,36,152,9,174,138,138,18,138,36,152,9,174
DB 178,178,242,178,249,64,121,75,178,178,242,178,249,64,121,75
DB 230,230,191,230,99,89,209,133,230,230,191,230,99,89,209,133
DB 14,14,56,14,112,54,28,126,14,14,56,14,112,54,28,126
DB 31,31,124,31,248,99,62,231,31,31,124,31,248,99,62,231
DB 98,98,149,98,55,247,196,85,98,98,149,98,55,247,196,85
DB 212,212,119,212,238,163,181,58,212,212,119,212,238,163,181,58
DB 168,168,154,168,41,50,77,129,168,168,154,168,41,50,77,129
DB 150,150,98,150,196,244,49,82,150,150,98,150,196,244,49,82
DB 249,249,195,249,155,58,239,98,249,249,195,249,155,58,239,98
DB 197,197,51,197,102,246,151,163,197,197,51,197,102,246,151,163
DB 37,37,148,37,53,177,74,16,37,37,148,37,53,177,74,16
DB 89,89,121,89,242,32,178,171,89,89,121,89,242,32,178,171
DB 132,132,42,132,84,174,21,208,132,132,42,132,84,174,21,208
DB 114,114,213,114,183,167,228,197,114,114,213,114,183,167,228,197
DB 57,57,228,57,213,221,114,236,57,57,228,57,213,221,114,236
DB 76,76,45,76,90,97,152,22,76,76,45,76,90,97,152,22
DB 94,94,101,94,202,59,188,148,94,94,101,94,202,59,188,148
DB 120,120,253,120,231,133,240,159,120,120,253,120,231,133,240,159
DB 56,56,224,56,221,216,112,229,56,56,224,56,221,216,112,229
DB 140,140,10,140,20,134,5,152,140,140,10,140,20,134,5,152
DB 209,209,99,209,198,178,191,23,209,209,99,209,198,178,191,23
DB 165,165,174,165,65,11,87,228,165,165,174,165,65,11,87,228
DB 226,226,175,226,67,77,217,161,226,226,175,226,67,77,217,161
DB 97,97,153,97,47,248,194,78,97,97,153,97,47,248,194,78
DB 179,179,246,179,241,69,123,66,179,179,246,179,241,69,123,66
DB 33,33,132,33,21,165,66,52,33,33,132,33,21,165,66,52
DB 156,156,74,156,148,214,37,8,156,156,74,156,148,214,37,8
DB 30,30,120,30,240,102,60,238,30,30,120,30,240,102,60,238
DB 67,67,17,67,34,82,134,97,67,67,17,67,34,82,134,97
DB 199,199,59,199,118,252,147,177,199,199,59,199,118,252,147,177
DB 252,252,215,252,179,43,229,79,252,252,215,252,179,43,229,79
DB 4,4,16,4,32,20,8,36,4,4,16,4,32,20,8,36
DB 81,81,89,81,178,8,162,227,81,81,89,81,178,8,162,227
DB 153,153,94,153,188,199,47,37,153,153,94,153,188,199,47,37
DB 109,109,169,109,79,196,218,34,109,109,169,109,79,196,218,34
DB 13,13,52,13,104,57,26,101,13,13,52,13,104,57,26,101
DB 250,250,207,250,131,53,233,121,250,250,207,250,131,53,233,121
DB 223,223,91,223,182,132,163,105,223,223,91,223,182,132,163,105
DB 126,126,229,126,215,155,252,169,126,126,229,126,215,155,252,169
DB 36,36,144,36,61,180,72,25,36,36,144,36,61,180,72,25
DB 59,59,236,59,197,215,118,254,59,59,236,59,197,215,118,254
DB 171,171,150,171,49,61,75,154,171,171,150,171,49,61,75,154
DB 206,206,31,206,62,209,129,240,206,206,31,206,62,209,129,240
DB 17,17,68,17,136,85,34,153,17,17,68,17,136,85,34,153
DB 143,143,6,143,12,137,3,131,143,143,6,143,12,137,3,131
DB 78,78,37,78,74,107,156,4,78,78,37,78,74,107,156,4
DB 183,183,230,183,209,81,115,102,183,183,230,183,209,81,115,102
DB 235,235,139,235,11,96,203,224,235,235,139,235,11,96,203,224
DB 60,60,240,60,253,204,120,193,60,60,240,60,253,204,120,193
DB 129,129,62,129,124,191,31,253,129,129,62,129,124,191,31,253
DB 148,148,106,148,212,254,53,64,148,148,106,148,212,254,53,64
DB 247,247,251,247,235,12,243,28,247,247,251,247,235,12,243,28
DB 185,185,222,185,161,103,111,24,185,185,222,185,161,103,111,24
DB 19,19,76,19,152,95,38,139,19,19,76,19,152,95,38,139
DB 44,44,176,44,125,156,88,81,44,44,176,44,125,156,88,81
DB 211,211,107,211,214,184,187,5,211,211,107,211,214,184,187,5
DB 231,231,187,231,107,92,211,140,231,231,187,231,107,92,211,140
DB 110,110,165,110,87,203,220,57,110,110,165,110,87,203,220,57
DB 196,196,55,196,110,243,149,170,196,196,55,196,110,243,149,170
DB 3,3,12,3,24,15,6,27,3,3,12,3,24,15,6,27
DB 86,86,69,86,138,19,172,220,86,86,69,86,138,19,172,220
DB 68,68,13,68,26,73,136,94,68,68,13,68,26,73,136,94
DB 127,127,225,127,223,158,254,160,127,127,225,127,223,158,254,160
DB 169,169,158,169,33,55,79,136,169,169,158,169,33,55,79,136
DB 42,42,168,42,77,130,84,103,42,42,168,42,77,130,84,103
DB 187,187,214,187,177,109,107,10,187,187,214,187,177,109,107,10
DB 193,193,35,193,70,226,159,135,193,193,35,193,70,226,159,135
DB 83,83,81,83,162,2,166,241,83,83,81,83,162,2,166,241
DB 220,220,87,220,174,139,165,114,220,220,87,220,174,139,165,114
DB 11,11,44,11,88,39,22,83,11,11,44,11,88,39,22,83
DB 157,157,78,157,156,211,39,1,157,157,78,157,156,211,39,1
DB 108,108,173,108,71,193,216,43,108,108,173,108,71,193,216,43
DB 49,49,196,49,149,245,98,164,49,49,196,49,149,245,98,164
DB 116,116,205,116,135,185,232,243,116,116,205,116,135,185,232,243
DB 246,246,255,246,227,9,241,21,246,246,255,246,227,9,241,21
DB 70,70,5,70,10,67,140,76,70,70,5,70,10,67,140,76
DB 172,172,138,172,9,38,69,165,172,172,138,172,9,38,69,165
DB 137,137,30,137,60,151,15,181,137,137,30,137,60,151,15,181
DB 20,20,80,20,160,68,40,180,20,20,80,20,160,68,40,180
DB 225,225,163,225,91,66,223,186,225,225,163,225,91,66,223,186
DB 22,22,88,22,176,78,44,166,22,22,88,22,176,78,44,166
DB 58,58,232,58,205,210,116,247,58,58,232,58,205,210,116,247
DB 105,105,185,105,111,208,210,6,105,105,185,105,111,208,210,6
DB 9,9,36,9,72,45,18,65,9,9,36,9,72,45,18,65
DB 112,112,221,112,167,173,224,215,112,112,221,112,167,173,224,215
DB 182,182,226,182,217,84,113,111,182,182,226,182,217,84,113,111
DB 208,208,103,208,206,183,189,30,208,208,103,208,206,183,189,30
DB 237,237,147,237,59,126,199,214,237,237,147,237,59,126,199,214
DB 204,204,23,204,46,219,133,226,204,204,23,204,46,219,133,226
DB 66,66,21,66,42,87,132,104,66,66,21,66,42,87,132,104
DB 152,152,90,152,180,194,45,44,152,152,90,152,180,194,45,44
DB 164,164,170,164,73,14,85,237,164,164,170,164,73,14,85,237
DB 40,40,160,40,93,136,80,117,40,40,160,40,93,136,80,117
DB 92,92,109,92,218,49,184,134,92,92,109,92,218,49,184,134
DB 248,248,199,248,147,63,237,107,248,248,199,248,147,63,237,107
DB 134,134,34,134,68,164,17,194,134,134,34,134,68,164,17,194
DB 24,35,198,232,135,184,1,79
DB 54,166,210,245,121,111,145,82
DB 96,188,155,142,163,12,123,53
DB 29,224,215,194,46,75,254,87
DB 21,119,55,229,159,240,74,218
DB 88,201,41,10,177,160,107,133
DB 189,93,16,244,203,62,5,103
DB 228,39,65,139,167,125,149,216
DB 251,238,124,102,221,23,71,158
DB 202,45,191,7,173,90,131,51
EXTERN __imp_RtlVirtualUnwind:NEAR
ALIGN 16
se_handler PROC PRIVATE
push rsi
push rdi
push rbx
push rbp
push r12
push r13
push r14
push r15
pushfq
sub rsp,64
mov rax,QWORD PTR[120+r8]
mov rbx,QWORD PTR[248+r8]
lea r10,QWORD PTR[$L$prologue]
cmp rbx,r10
jb $L$in_prologue
mov rax,QWORD PTR[152+r8]
lea r10,QWORD PTR[$L$epilogue]
cmp rbx,r10
jae $L$in_prologue
mov rax,QWORD PTR[((128+32))+rax]
lea rax,QWORD PTR[48+rax]
mov rbx,QWORD PTR[((-8))+rax]
mov rbp,QWORD PTR[((-16))+rax]
mov r12,QWORD PTR[((-24))+rax]
mov r13,QWORD PTR[((-32))+rax]
mov r14,QWORD PTR[((-40))+rax]
mov r15,QWORD PTR[((-48))+rax]
mov QWORD PTR[144+r8],rbx
mov QWORD PTR[160+r8],rbp
mov QWORD PTR[216+r8],r12
mov QWORD PTR[224+r8],r13
mov QWORD PTR[232+r8],r14
mov QWORD PTR[240+r8],r15
$L$in_prologue::
mov rdi,QWORD PTR[8+rax]
mov rsi,QWORD PTR[16+rax]
mov QWORD PTR[152+r8],rax
mov QWORD PTR[168+r8],rsi
mov QWORD PTR[176+r8],rdi
mov rdi,QWORD PTR[40+r9]
mov rsi,r8
mov ecx,154
DD 0a548f3fch
mov rsi,r9
xor rcx,rcx
mov rdx,QWORD PTR[8+rsi]
mov r8,QWORD PTR[rsi]
mov r9,QWORD PTR[16+rsi]
mov r10,QWORD PTR[40+rsi]
lea r11,QWORD PTR[56+rsi]
lea r12,QWORD PTR[24+rsi]
mov QWORD PTR[32+rsp],r10
mov QWORD PTR[40+rsp],r11
mov QWORD PTR[48+rsp],r12
mov QWORD PTR[56+rsp],rcx
call QWORD PTR[__imp_RtlVirtualUnwind]
mov eax,1
add rsp,64
popfq
pop r15
pop r14
pop r13
pop r12
pop rbp
pop rbx
pop rdi
pop rsi
DB 0F3h,0C3h ;repret
se_handler ENDP
.text$ ENDS
.pdata SEGMENT READONLY ALIGN(4)
ALIGN 4
DD imagerel $L$SEH_begin_whirlpool_block
DD imagerel $L$SEH_end_whirlpool_block
DD imagerel $L$SEH_info_whirlpool_block
.pdata ENDS
.xdata SEGMENT READONLY ALIGN(8)
ALIGN 8
$L$SEH_info_whirlpool_block::
DB 9,0,0,0
DD imagerel se_handler
.xdata ENDS
END

186
deps/openssl/asm/x64-win32-masm/x86_64cpuid.asm

@ -0,0 +1,186 @@
OPTION DOTNAME
EXTERN OPENSSL_cpuid_setup:NEAR
.CRT$XCU SEGMENT READONLY DWORD
DQ OPENSSL_cpuid_setup
.CRT$XCU ENDS
.text$ SEGMENT ALIGN(64) 'CODE'
PUBLIC OPENSSL_atomic_add
ALIGN 16
OPENSSL_atomic_add PROC PUBLIC
mov eax,DWORD PTR[rcx]
$L$spin:: lea r8,QWORD PTR[rax*1+rdx]
DB 0f0h
cmpxchg DWORD PTR[rcx],r8d
jne $L$spin
mov eax,r8d
DB 048h,098h
DB 0F3h,0C3h ;repret
OPENSSL_atomic_add ENDP
PUBLIC OPENSSL_rdtsc
ALIGN 16
OPENSSL_rdtsc PROC PUBLIC
rdtsc
shl rdx,32
or rax,rdx
DB 0F3h,0C3h ;repret
OPENSSL_rdtsc ENDP
PUBLIC OPENSSL_ia32_cpuid
ALIGN 16
OPENSSL_ia32_cpuid PROC PUBLIC
mov r8,rbx
xor eax,eax
cpuid
mov r11d,eax
xor eax,eax
cmp ebx,0756e6547h
setne al
mov r9d,eax
cmp edx,049656e69h
setne al
or r9d,eax
cmp ecx,06c65746eh
setne al
or r9d,eax
jz $L$intel
cmp ebx,068747541h
setne al
mov r10d,eax
cmp edx,069746E65h
setne al
or r10d,eax
cmp ecx,0444D4163h
setne al
or r10d,eax
jnz $L$intel
mov eax,080000000h
cpuid
cmp eax,080000008h
jb $L$intel
mov eax,080000008h
cpuid
movzx r10,cl
inc r10
mov eax,1
cpuid
bt edx,28
jnc $L$done
shr ebx,16
cmp bl,r10b
ja $L$done
and edx,0efffffffh
jmp $L$done
$L$intel::
cmp r11d,4
mov r10d,-1
jb $L$nocacheinfo
mov eax,4
mov ecx,0
cpuid
mov r10d,eax
shr r10d,14
and r10d,0fffh
$L$nocacheinfo::
mov eax,1
cpuid
cmp r9d,0
jne $L$notintel
or edx,000100000h
and ah,15
cmp ah,15
je $L$notintel
or edx,040000000h
$L$notintel::
bt edx,28
jnc $L$done
and edx,0efffffffh
cmp r10d,0
je $L$done
or edx,010000000h
shr ebx,16
cmp bl,1
ja $L$done
and edx,0efffffffh
$L$done::
shl rcx,32
mov eax,edx
mov rbx,r8
or rax,rcx
DB 0F3h,0C3h ;repret
OPENSSL_ia32_cpuid ENDP
PUBLIC OPENSSL_cleanse
ALIGN 16
OPENSSL_cleanse PROC PUBLIC
xor rax,rax
cmp rdx,15
jae $L$ot
cmp rdx,0
je $L$ret
$L$ittle::
mov BYTE PTR[rcx],al
sub rdx,1
lea rcx,QWORD PTR[1+rcx]
jnz $L$ittle
$L$ret::
DB 0F3h,0C3h ;repret
ALIGN 16
$L$ot::
test rcx,7
jz $L$aligned
mov BYTE PTR[rcx],al
lea rdx,QWORD PTR[((-1))+rdx]
lea rcx,QWORD PTR[1+rcx]
jmp $L$ot
$L$aligned::
mov QWORD PTR[rcx],rax
lea rdx,QWORD PTR[((-8))+rdx]
test rdx,-8
lea rcx,QWORD PTR[8+rcx]
jnz $L$aligned
cmp rdx,0
jne $L$ittle
DB 0F3h,0C3h ;repret
OPENSSL_cleanse ENDP
PUBLIC OPENSSL_wipe_cpu
ALIGN 16
OPENSSL_wipe_cpu PROC PUBLIC
pxor xmm0,xmm0
pxor xmm1,xmm1
pxor xmm2,xmm2
pxor xmm3,xmm3
pxor xmm4,xmm4
pxor xmm5,xmm5
xor rcx,rcx
xor rdx,rdx
xor r8,r8
xor r9,r9
xor r10,r10
xor r11,r11
lea rax,QWORD PTR[8+rsp]
DB 0F3h,0C3h ;repret
OPENSSL_wipe_cpu ENDP
.text$ ENDS
END

3234
deps/openssl/asm/x86-elf-gas/aes/aes-586.s

File diff suppressed because it is too large

864
deps/openssl/asm/x86-elf-gas/bf/bf-686.s

@ -0,0 +1,864 @@
.file "bf-686.s"
.text
.globl BF_encrypt
.type BF_encrypt,@function
.align 16
BF_encrypt:
.L_BF_encrypt_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 20(%esp),%eax
movl (%eax),%ecx
movl 4(%eax),%edx
movl 24(%esp),%edi
xorl %eax,%eax
xorl %ebx,%ebx
xorl (%edi),%ecx
rorl $16,%ecx
movl 4(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 8(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 12(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 16(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 20(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 24(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 28(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 32(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 36(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 40(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 44(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 48(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 52(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 56(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 60(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 64(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
xorl 68(%edi),%edx
movl 20(%esp),%eax
movl %edx,(%eax)
movl %ecx,4(%eax)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size BF_encrypt,.-.L_BF_encrypt_begin
.globl BF_decrypt
.type BF_decrypt,@function
.align 16
BF_decrypt:
.L_BF_decrypt_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 20(%esp),%eax
movl (%eax),%ecx
movl 4(%eax),%edx
movl 24(%esp),%edi
xorl %eax,%eax
xorl %ebx,%ebx
xorl 68(%edi),%ecx
rorl $16,%ecx
movl 64(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 60(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 56(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 52(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 48(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 44(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 40(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 36(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 32(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 28(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 24(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 20(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 16(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 12(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 8(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 4(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
xorl (%edi),%edx
movl 20(%esp),%eax
movl %edx,(%eax)
movl %ecx,4(%eax)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size BF_decrypt,.-.L_BF_decrypt_begin
.globl BF_cbc_encrypt
.type BF_cbc_encrypt,@function
.align 16
BF_cbc_encrypt:
.L_BF_cbc_encrypt_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 28(%esp),%ebp
movl 36(%esp),%ebx
movl (%ebx),%esi
movl 4(%ebx),%edi
pushl %edi
pushl %esi
pushl %edi
pushl %esi
movl %esp,%ebx
movl 36(%esp),%esi
movl 40(%esp),%edi
movl 56(%esp),%ecx
movl 48(%esp),%eax
pushl %eax
pushl %ebx
cmpl $0,%ecx
jz .L000decrypt
andl $4294967288,%ebp
movl 8(%esp),%eax
movl 12(%esp),%ebx
jz .L001encrypt_finish
.L002encrypt_loop:
movl (%esi),%ecx
movl 4(%esi),%edx
xorl %ecx,%eax
xorl %edx,%ebx
bswap %eax
bswap %ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call .L_BF_encrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
bswap %eax
bswap %ebx
movl %eax,(%edi)
movl %ebx,4(%edi)
addl $8,%esi
addl $8,%edi
subl $8,%ebp
jnz .L002encrypt_loop
.L001encrypt_finish:
movl 52(%esp),%ebp
andl $7,%ebp
jz .L003finish
call .L004PIC_point
.L004PIC_point:
popl %edx
leal .L005cbc_enc_jmp_table-.L004PIC_point(%edx),%ecx
movl (%ecx,%ebp,4),%ebp
addl %edx,%ebp
xorl %ecx,%ecx
xorl %edx,%edx
jmp *%ebp
.L006ej7:
movb 6(%esi),%dh
shll $8,%edx
.L007ej6:
movb 5(%esi),%dh
.L008ej5:
movb 4(%esi),%dl
.L009ej4:
movl (%esi),%ecx
jmp .L010ejend
.L011ej3:
movb 2(%esi),%ch
shll $8,%ecx
.L012ej2:
movb 1(%esi),%ch
.L013ej1:
movb (%esi),%cl
.L010ejend:
xorl %ecx,%eax
xorl %edx,%ebx
bswap %eax
bswap %ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call .L_BF_encrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
bswap %eax
bswap %ebx
movl %eax,(%edi)
movl %ebx,4(%edi)
jmp .L003finish
.L000decrypt:
andl $4294967288,%ebp
movl 16(%esp),%eax
movl 20(%esp),%ebx
jz .L014decrypt_finish
.L015decrypt_loop:
movl (%esi),%eax
movl 4(%esi),%ebx
bswap %eax
bswap %ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call .L_BF_decrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
bswap %eax
bswap %ebx
movl 16(%esp),%ecx
movl 20(%esp),%edx
xorl %eax,%ecx
xorl %ebx,%edx
movl (%esi),%eax
movl 4(%esi),%ebx
movl %ecx,(%edi)
movl %edx,4(%edi)
movl %eax,16(%esp)
movl %ebx,20(%esp)
addl $8,%esi
addl $8,%edi
subl $8,%ebp
jnz .L015decrypt_loop
.L014decrypt_finish:
movl 52(%esp),%ebp
andl $7,%ebp
jz .L003finish
movl (%esi),%eax
movl 4(%esi),%ebx
bswap %eax
bswap %ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call .L_BF_decrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
bswap %eax
bswap %ebx
movl 16(%esp),%ecx
movl 20(%esp),%edx
xorl %eax,%ecx
xorl %ebx,%edx
movl (%esi),%eax
movl 4(%esi),%ebx
.L016dj7:
rorl $16,%edx
movb %dl,6(%edi)
shrl $16,%edx
.L017dj6:
movb %dh,5(%edi)
.L018dj5:
movb %dl,4(%edi)
.L019dj4:
movl %ecx,(%edi)
jmp .L020djend
.L021dj3:
rorl $16,%ecx
movb %cl,2(%edi)
shll $16,%ecx
.L022dj2:
movb %ch,1(%esi)
.L023dj1:
movb %cl,(%esi)
.L020djend:
jmp .L003finish
.L003finish:
movl 60(%esp),%ecx
addl $24,%esp
movl %eax,(%ecx)
movl %ebx,4(%ecx)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.align 64
.L005cbc_enc_jmp_table:
.long 0
.long .L013ej1-.L004PIC_point
.long .L012ej2-.L004PIC_point
.long .L011ej3-.L004PIC_point
.long .L009ej4-.L004PIC_point
.long .L008ej5-.L004PIC_point
.long .L007ej6-.L004PIC_point
.long .L006ej7-.L004PIC_point
.align 64
.size BF_cbc_encrypt,.-.L_BF_cbc_encrypt_begin

338
deps/openssl/asm/x86-elf-gas/bn/x86-mont.s

@ -0,0 +1,338 @@
.file "../openssl/crypto/bn/asm/x86-mont.s"
.text
.globl bn_mul_mont
.type bn_mul_mont,@function
.align 16
bn_mul_mont:
.L_bn_mul_mont_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
xorl %eax,%eax
movl 40(%esp),%edi
cmpl $4,%edi
jl .L000just_leave
leal 20(%esp),%esi
leal 24(%esp),%edx
movl %esp,%ebp
addl $2,%edi
negl %edi
leal -32(%esp,%edi,4),%esp
negl %edi
movl %esp,%eax
subl %edx,%eax
andl $2047,%eax
subl %eax,%esp
xorl %esp,%edx
andl $2048,%edx
xorl $2048,%edx
subl %edx,%esp
andl $-64,%esp
movl (%esi),%eax
movl 4(%esi),%ebx
movl 8(%esi),%ecx
movl 12(%esi),%edx
movl 16(%esi),%esi
movl (%esi),%esi
movl %eax,4(%esp)
movl %ebx,8(%esp)
movl %ecx,12(%esp)
movl %edx,16(%esp)
movl %esi,20(%esp)
leal -3(%edi),%ebx
movl %ebp,24(%esp)
movl 8(%esp),%esi
leal 1(%ebx),%ebp
movl 12(%esp),%edi
xorl %ecx,%ecx
movl %esi,%edx
andl $1,%ebp
subl %edi,%edx
leal 4(%edi,%ebx,4),%eax
orl %edx,%ebp
movl (%edi),%edi
jz .L001bn_sqr_mont
movl %eax,28(%esp)
movl (%esi),%eax
xorl %edx,%edx
.align 16
.L002mull:
movl %edx,%ebp
mull %edi
addl %eax,%ebp
leal 1(%ecx),%ecx
adcl $0,%edx
movl (%esi,%ecx,4),%eax
cmpl %ebx,%ecx
movl %ebp,28(%esp,%ecx,4)
jl .L002mull
movl %edx,%ebp
mull %edi
movl 20(%esp),%edi
addl %ebp,%eax
movl 16(%esp),%esi
adcl $0,%edx
imull 32(%esp),%edi
movl %eax,32(%esp,%ebx,4)
xorl %ecx,%ecx
movl %edx,36(%esp,%ebx,4)
movl %ecx,40(%esp,%ebx,4)
movl (%esi),%eax
mull %edi
addl 32(%esp),%eax
movl 4(%esi),%eax
adcl $0,%edx
incl %ecx
jmp .L0032ndmadd
.align 16
.L0041stmadd:
movl %edx,%ebp
mull %edi
addl 32(%esp,%ecx,4),%ebp
leal 1(%ecx),%ecx
adcl $0,%edx
addl %eax,%ebp
movl (%esi,%ecx,4),%eax
adcl $0,%edx
cmpl %ebx,%ecx
movl %ebp,28(%esp,%ecx,4)
jl .L0041stmadd
movl %edx,%ebp
mull %edi
addl 32(%esp,%ebx,4),%eax
movl 20(%esp),%edi
adcl $0,%edx
movl 16(%esp),%esi
addl %eax,%ebp
adcl $0,%edx
imull 32(%esp),%edi
xorl %ecx,%ecx
addl 36(%esp,%ebx,4),%edx
movl %ebp,32(%esp,%ebx,4)
adcl $0,%ecx
movl (%esi),%eax
movl %edx,36(%esp,%ebx,4)
movl %ecx,40(%esp,%ebx,4)
mull %edi
addl 32(%esp),%eax
movl 4(%esi),%eax
adcl $0,%edx
movl $1,%ecx
.align 16
.L0032ndmadd:
movl %edx,%ebp
mull %edi
addl 32(%esp,%ecx,4),%ebp
leal 1(%ecx),%ecx
adcl $0,%edx
addl %eax,%ebp
movl (%esi,%ecx,4),%eax
adcl $0,%edx
cmpl %ebx,%ecx
movl %ebp,24(%esp,%ecx,4)
jl .L0032ndmadd
movl %edx,%ebp
mull %edi
addl 32(%esp,%ebx,4),%ebp
adcl $0,%edx
addl %eax,%ebp
adcl $0,%edx
movl %ebp,28(%esp,%ebx,4)
xorl %eax,%eax
movl 12(%esp),%ecx
addl 36(%esp,%ebx,4),%edx
adcl 40(%esp,%ebx,4),%eax
leal 4(%ecx),%ecx
movl %edx,32(%esp,%ebx,4)
cmpl 28(%esp),%ecx
movl %eax,36(%esp,%ebx,4)
je .L005common_tail
movl (%ecx),%edi
movl 8(%esp),%esi
movl %ecx,12(%esp)
xorl %ecx,%ecx
xorl %edx,%edx
movl (%esi),%eax
jmp .L0041stmadd
.align 16
.L001bn_sqr_mont:
movl %ebx,(%esp)
movl %ecx,12(%esp)
movl %edi,%eax
mull %edi
movl %eax,32(%esp)
movl %edx,%ebx
shrl $1,%edx
andl $1,%ebx
incl %ecx
.align 16
.L006sqr:
movl (%esi,%ecx,4),%eax
movl %edx,%ebp
mull %edi
addl %ebp,%eax
leal 1(%ecx),%ecx
adcl $0,%edx
leal (%ebx,%eax,2),%ebp
shrl $31,%eax
cmpl (%esp),%ecx
movl %eax,%ebx
movl %ebp,28(%esp,%ecx,4)
jl .L006sqr
movl (%esi,%ecx,4),%eax
movl %edx,%ebp
mull %edi
addl %ebp,%eax
movl 20(%esp),%edi
adcl $0,%edx
movl 16(%esp),%esi
leal (%ebx,%eax,2),%ebp
imull 32(%esp),%edi
shrl $31,%eax
movl %ebp,32(%esp,%ecx,4)
leal (%eax,%edx,2),%ebp
movl (%esi),%eax
shrl $31,%edx
movl %ebp,36(%esp,%ecx,4)
movl %edx,40(%esp,%ecx,4)
mull %edi
addl 32(%esp),%eax
movl %ecx,%ebx
adcl $0,%edx
movl 4(%esi),%eax
movl $1,%ecx
.align 16
.L0073rdmadd:
movl %edx,%ebp
mull %edi
addl 32(%esp,%ecx,4),%ebp
adcl $0,%edx
addl %eax,%ebp
movl 4(%esi,%ecx,4),%eax
adcl $0,%edx
movl %ebp,28(%esp,%ecx,4)
movl %edx,%ebp
mull %edi
addl 36(%esp,%ecx,4),%ebp
leal 2(%ecx),%ecx
adcl $0,%edx
addl %eax,%ebp
movl (%esi,%ecx,4),%eax
adcl $0,%edx
cmpl %ebx,%ecx
movl %ebp,24(%esp,%ecx,4)
jl .L0073rdmadd
movl %edx,%ebp
mull %edi
addl 32(%esp,%ebx,4),%ebp
adcl $0,%edx
addl %eax,%ebp
adcl $0,%edx
movl %ebp,28(%esp,%ebx,4)
movl 12(%esp),%ecx
xorl %eax,%eax
movl 8(%esp),%esi
addl 36(%esp,%ebx,4),%edx
adcl 40(%esp,%ebx,4),%eax
movl %edx,32(%esp,%ebx,4)
cmpl %ebx,%ecx
movl %eax,36(%esp,%ebx,4)
je .L005common_tail
movl 4(%esi,%ecx,4),%edi
leal 1(%ecx),%ecx
movl %edi,%eax
movl %ecx,12(%esp)
mull %edi
addl 32(%esp,%ecx,4),%eax
adcl $0,%edx
movl %eax,32(%esp,%ecx,4)
xorl %ebp,%ebp
cmpl %ebx,%ecx
leal 1(%ecx),%ecx
je .L008sqrlast
movl %edx,%ebx
shrl $1,%edx
andl $1,%ebx
.align 16
.L009sqradd:
movl (%esi,%ecx,4),%eax
movl %edx,%ebp
mull %edi
addl %ebp,%eax
leal (%eax,%eax,1),%ebp
adcl $0,%edx
shrl $31,%eax
addl 32(%esp,%ecx,4),%ebp
leal 1(%ecx),%ecx
adcl $0,%eax
addl %ebx,%ebp
adcl $0,%eax
cmpl (%esp),%ecx
movl %ebp,28(%esp,%ecx,4)
movl %eax,%ebx
jle .L009sqradd
movl %edx,%ebp
addl %edx,%edx
shrl $31,%ebp
addl %ebx,%edx
adcl $0,%ebp
.L008sqrlast:
movl 20(%esp),%edi
movl 16(%esp),%esi
imull 32(%esp),%edi
addl 32(%esp,%ecx,4),%edx
movl (%esi),%eax
adcl $0,%ebp
movl %edx,32(%esp,%ecx,4)
movl %ebp,36(%esp,%ecx,4)
mull %edi
addl 32(%esp),%eax
leal -1(%ecx),%ebx
adcl $0,%edx
movl $1,%ecx
movl 4(%esi),%eax
jmp .L0073rdmadd
.align 16
.L005common_tail:
movl 16(%esp),%ebp
movl 4(%esp),%edi
leal 32(%esp),%esi
movl (%esi),%eax
movl %ebx,%ecx
xorl %edx,%edx
.align 16
.L010sub:
sbbl (%ebp,%edx,4),%eax
movl %eax,(%edi,%edx,4)
decl %ecx
movl 4(%esi,%edx,4),%eax
leal 1(%edx),%edx
jge .L010sub
sbbl $0,%eax
andl %eax,%esi
notl %eax
movl %edi,%ebp
andl %eax,%ebp
orl %ebp,%esi
.align 16
.L011copy:
movl (%esi,%ebx,4),%eax
movl %eax,(%edi,%ebx,4)
movl %ecx,32(%esp,%ebx,4)
decl %ebx
jge .L011copy
movl 24(%esp),%esp
movl $1,%eax
.L000just_leave:
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size bn_mul_mont,.-.L_bn_mul_mont_begin
.byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105
.byte 112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56
.byte 54,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121
.byte 32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46
.byte 111,114,103,62,0

2114
deps/openssl/asm/x86-elf-gas/bn/x86.s

File diff suppressed because it is too large

2375
deps/openssl/asm/x86-elf-gas/camellia/cmll-x86.s

File diff suppressed because it is too large

933
deps/openssl/asm/x86-elf-gas/cast/cast-586.s

@ -0,0 +1,933 @@
.file "cast-586.s"
.text
.globl CAST_encrypt
.type CAST_encrypt,@function
.align 16
CAST_encrypt:
.L_CAST_encrypt_begin:
pushl %ebp
pushl %ebx
movl 12(%esp),%ebx
movl 16(%esp),%ebp
pushl %esi
pushl %edi
movl (%ebx),%edi
movl 4(%ebx),%esi
movl 128(%ebp),%eax
pushl %eax
xorl %eax,%eax
movl (%ebp),%edx
movl 4(%ebp),%ecx
addl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
xorl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
subl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
addl %ebx,%ecx
xorl %ecx,%edi
movl 8(%ebp),%edx
movl 12(%ebp),%ecx
xorl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
subl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
addl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
xorl %ebx,%ecx
xorl %ecx,%esi
movl 16(%ebp),%edx
movl 20(%ebp),%ecx
subl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
addl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
xorl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
subl %ebx,%ecx
xorl %ecx,%edi
movl 24(%ebp),%edx
movl 28(%ebp),%ecx
addl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
xorl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
subl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
addl %ebx,%ecx
xorl %ecx,%esi
movl 32(%ebp),%edx
movl 36(%ebp),%ecx
xorl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
subl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
addl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
xorl %ebx,%ecx
xorl %ecx,%edi
movl 40(%ebp),%edx
movl 44(%ebp),%ecx
subl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
addl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
xorl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
subl %ebx,%ecx
xorl %ecx,%esi
movl 48(%ebp),%edx
movl 52(%ebp),%ecx
addl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
xorl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
subl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
addl %ebx,%ecx
xorl %ecx,%edi
movl 56(%ebp),%edx
movl 60(%ebp),%ecx
xorl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
subl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
addl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
xorl %ebx,%ecx
xorl %ecx,%esi
movl 64(%ebp),%edx
movl 68(%ebp),%ecx
subl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
addl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
xorl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
subl %ebx,%ecx
xorl %ecx,%edi
movl 72(%ebp),%edx
movl 76(%ebp),%ecx
addl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
xorl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
subl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
addl %ebx,%ecx
xorl %ecx,%esi
movl 80(%ebp),%edx
movl 84(%ebp),%ecx
xorl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
subl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
addl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
xorl %ebx,%ecx
xorl %ecx,%edi
movl 88(%ebp),%edx
movl 92(%ebp),%ecx
subl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
addl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
xorl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
subl %ebx,%ecx
xorl %ecx,%esi
popl %edx
orl %edx,%edx
jnz .L000cast_enc_done
movl 96(%ebp),%edx
movl 100(%ebp),%ecx
addl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
xorl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
subl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
addl %ebx,%ecx
xorl %ecx,%edi
movl 104(%ebp),%edx
movl 108(%ebp),%ecx
xorl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
subl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
addl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
xorl %ebx,%ecx
xorl %ecx,%esi
movl 112(%ebp),%edx
movl 116(%ebp),%ecx
subl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
addl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
xorl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
subl %ebx,%ecx
xorl %ecx,%edi
movl 120(%ebp),%edx
movl 124(%ebp),%ecx
addl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
xorl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
subl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
addl %ebx,%ecx
xorl %ecx,%esi
.L000cast_enc_done:
nop
movl 20(%esp),%eax
movl %edi,4(%eax)
movl %esi,(%eax)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size CAST_encrypt,.-.L_CAST_encrypt_begin
.globl CAST_decrypt
.type CAST_decrypt,@function
.align 16
CAST_decrypt:
.L_CAST_decrypt_begin:
pushl %ebp
pushl %ebx
movl 12(%esp),%ebx
movl 16(%esp),%ebp
pushl %esi
pushl %edi
movl (%ebx),%edi
movl 4(%ebx),%esi
movl 128(%ebp),%eax
orl %eax,%eax
jnz .L001cast_dec_skip
xorl %eax,%eax
movl 120(%ebp),%edx
movl 124(%ebp),%ecx
addl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
xorl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
subl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
addl %ebx,%ecx
xorl %ecx,%edi
movl 112(%ebp),%edx
movl 116(%ebp),%ecx
subl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
addl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
xorl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
subl %ebx,%ecx
xorl %ecx,%esi
movl 104(%ebp),%edx
movl 108(%ebp),%ecx
xorl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
subl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
addl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
xorl %ebx,%ecx
xorl %ecx,%edi
movl 96(%ebp),%edx
movl 100(%ebp),%ecx
addl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
xorl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
subl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
addl %ebx,%ecx
xorl %ecx,%esi
.L001cast_dec_skip:
movl 88(%ebp),%edx
movl 92(%ebp),%ecx
subl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
addl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
xorl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
subl %ebx,%ecx
xorl %ecx,%edi
movl 80(%ebp),%edx
movl 84(%ebp),%ecx
xorl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
subl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
addl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
xorl %ebx,%ecx
xorl %ecx,%esi
movl 72(%ebp),%edx
movl 76(%ebp),%ecx
addl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
xorl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
subl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
addl %ebx,%ecx
xorl %ecx,%edi
movl 64(%ebp),%edx
movl 68(%ebp),%ecx
subl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
addl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
xorl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
subl %ebx,%ecx
xorl %ecx,%esi
movl 56(%ebp),%edx
movl 60(%ebp),%ecx
xorl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
subl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
addl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
xorl %ebx,%ecx
xorl %ecx,%edi
movl 48(%ebp),%edx
movl 52(%ebp),%ecx
addl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
xorl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
subl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
addl %ebx,%ecx
xorl %ecx,%esi
movl 40(%ebp),%edx
movl 44(%ebp),%ecx
subl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
addl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
xorl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
subl %ebx,%ecx
xorl %ecx,%edi
movl 32(%ebp),%edx
movl 36(%ebp),%ecx
xorl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
subl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
addl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
xorl %ebx,%ecx
xorl %ecx,%esi
movl 24(%ebp),%edx
movl 28(%ebp),%ecx
addl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
xorl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
subl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
addl %ebx,%ecx
xorl %ecx,%edi
movl 16(%ebp),%edx
movl 20(%ebp),%ecx
subl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
addl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
xorl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
subl %ebx,%ecx
xorl %ecx,%esi
movl 8(%ebp),%edx
movl 12(%ebp),%ecx
xorl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
subl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
addl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
xorl %ebx,%ecx
xorl %ecx,%edi
movl (%ebp),%edx
movl 4(%ebp),%ecx
addl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
xorl %ebx,%ecx
movl CAST_S_table2(,%eax,4),%ebx
subl %ebx,%ecx
movl CAST_S_table3(,%edx,4),%ebx
addl %ebx,%ecx
xorl %ecx,%esi
nop
movl 20(%esp),%eax
movl %edi,4(%eax)
movl %esi,(%eax)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size CAST_decrypt,.-.L_CAST_decrypt_begin
.globl CAST_cbc_encrypt
.type CAST_cbc_encrypt,@function
.align 16
CAST_cbc_encrypt:
.L_CAST_cbc_encrypt_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 28(%esp),%ebp
movl 36(%esp),%ebx
movl (%ebx),%esi
movl 4(%ebx),%edi
pushl %edi
pushl %esi
pushl %edi
pushl %esi
movl %esp,%ebx
movl 36(%esp),%esi
movl 40(%esp),%edi
movl 56(%esp),%ecx
movl 48(%esp),%eax
pushl %eax
pushl %ebx
cmpl $0,%ecx
jz .L002decrypt
andl $4294967288,%ebp
movl 8(%esp),%eax
movl 12(%esp),%ebx
jz .L003encrypt_finish
.L004encrypt_loop:
movl (%esi),%ecx
movl 4(%esi),%edx
xorl %ecx,%eax
xorl %edx,%ebx
bswap %eax
bswap %ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call .L_CAST_encrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
bswap %eax
bswap %ebx
movl %eax,(%edi)
movl %ebx,4(%edi)
addl $8,%esi
addl $8,%edi
subl $8,%ebp
jnz .L004encrypt_loop
.L003encrypt_finish:
movl 52(%esp),%ebp
andl $7,%ebp
jz .L005finish
call .L006PIC_point
.L006PIC_point:
popl %edx
leal .L007cbc_enc_jmp_table-.L006PIC_point(%edx),%ecx
movl (%ecx,%ebp,4),%ebp
addl %edx,%ebp
xorl %ecx,%ecx
xorl %edx,%edx
jmp *%ebp
.L008ej7:
movb 6(%esi),%dh
shll $8,%edx
.L009ej6:
movb 5(%esi),%dh
.L010ej5:
movb 4(%esi),%dl
.L011ej4:
movl (%esi),%ecx
jmp .L012ejend
.L013ej3:
movb 2(%esi),%ch
shll $8,%ecx
.L014ej2:
movb 1(%esi),%ch
.L015ej1:
movb (%esi),%cl
.L012ejend:
xorl %ecx,%eax
xorl %edx,%ebx
bswap %eax
bswap %ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call .L_CAST_encrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
bswap %eax
bswap %ebx
movl %eax,(%edi)
movl %ebx,4(%edi)
jmp .L005finish
.L002decrypt:
andl $4294967288,%ebp
movl 16(%esp),%eax
movl 20(%esp),%ebx
jz .L016decrypt_finish
.L017decrypt_loop:
movl (%esi),%eax
movl 4(%esi),%ebx
bswap %eax
bswap %ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call .L_CAST_decrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
bswap %eax
bswap %ebx
movl 16(%esp),%ecx
movl 20(%esp),%edx
xorl %eax,%ecx
xorl %ebx,%edx
movl (%esi),%eax
movl 4(%esi),%ebx
movl %ecx,(%edi)
movl %edx,4(%edi)
movl %eax,16(%esp)
movl %ebx,20(%esp)
addl $8,%esi
addl $8,%edi
subl $8,%ebp
jnz .L017decrypt_loop
.L016decrypt_finish:
movl 52(%esp),%ebp
andl $7,%ebp
jz .L005finish
movl (%esi),%eax
movl 4(%esi),%ebx
bswap %eax
bswap %ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call .L_CAST_decrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
bswap %eax
bswap %ebx
movl 16(%esp),%ecx
movl 20(%esp),%edx
xorl %eax,%ecx
xorl %ebx,%edx
movl (%esi),%eax
movl 4(%esi),%ebx
.L018dj7:
rorl $16,%edx
movb %dl,6(%edi)
shrl $16,%edx
.L019dj6:
movb %dh,5(%edi)
.L020dj5:
movb %dl,4(%edi)
.L021dj4:
movl %ecx,(%edi)
jmp .L022djend
.L023dj3:
rorl $16,%ecx
movb %cl,2(%edi)
shll $16,%ecx
.L024dj2:
movb %ch,1(%esi)
.L025dj1:
movb %cl,(%esi)
.L022djend:
jmp .L005finish
.L005finish:
movl 60(%esp),%ecx
addl $24,%esp
movl %eax,(%ecx)
movl %ebx,4(%ecx)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.align 64
.L007cbc_enc_jmp_table:
.long 0
.long .L015ej1-.L006PIC_point
.long .L014ej2-.L006PIC_point
.long .L013ej3-.L006PIC_point
.long .L011ej4-.L006PIC_point
.long .L010ej5-.L006PIC_point
.long .L009ej6-.L006PIC_point
.long .L008ej7-.L006PIC_point
.align 64
.size CAST_cbc_encrypt,.-.L_CAST_cbc_encrypt_begin

875
deps/openssl/asm/x86-elf-gas/des/crypt586.s

@ -0,0 +1,875 @@
.file "crypt586.s"
.text
.globl fcrypt_body
.type fcrypt_body,@function
.align 16
fcrypt_body:
.L_fcrypt_body_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
xorl %edi,%edi
xorl %esi,%esi
leal DES_SPtrans,%edx
pushl %edx
movl 28(%esp),%ebp
pushl $25
.L000start:
movl 36(%esp),%eax
movl %esi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %esi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl (%ebp),%ebx
xorl %ebx,%eax
movl 4(%ebp),%ecx
xorl %esi,%eax
xorl %esi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%edi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%edi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%edi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%edi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%edi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%edi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%edi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%edi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %edi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %edi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 8(%ebp),%ebx
xorl %ebx,%eax
movl 12(%ebp),%ecx
xorl %edi,%eax
xorl %edi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%esi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%esi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%esi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%esi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%esi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%esi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%esi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%esi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %esi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %esi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 16(%ebp),%ebx
xorl %ebx,%eax
movl 20(%ebp),%ecx
xorl %esi,%eax
xorl %esi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%edi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%edi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%edi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%edi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%edi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%edi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%edi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%edi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %edi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %edi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 24(%ebp),%ebx
xorl %ebx,%eax
movl 28(%ebp),%ecx
xorl %edi,%eax
xorl %edi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%esi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%esi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%esi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%esi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%esi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%esi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%esi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%esi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %esi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %esi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 32(%ebp),%ebx
xorl %ebx,%eax
movl 36(%ebp),%ecx
xorl %esi,%eax
xorl %esi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%edi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%edi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%edi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%edi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%edi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%edi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%edi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%edi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %edi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %edi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 40(%ebp),%ebx
xorl %ebx,%eax
movl 44(%ebp),%ecx
xorl %edi,%eax
xorl %edi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%esi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%esi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%esi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%esi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%esi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%esi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%esi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%esi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %esi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %esi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 48(%ebp),%ebx
xorl %ebx,%eax
movl 52(%ebp),%ecx
xorl %esi,%eax
xorl %esi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%edi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%edi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%edi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%edi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%edi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%edi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%edi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%edi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %edi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %edi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 56(%ebp),%ebx
xorl %ebx,%eax
movl 60(%ebp),%ecx
xorl %edi,%eax
xorl %edi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%esi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%esi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%esi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%esi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%esi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%esi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%esi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%esi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %esi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %esi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 64(%ebp),%ebx
xorl %ebx,%eax
movl 68(%ebp),%ecx
xorl %esi,%eax
xorl %esi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%edi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%edi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%edi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%edi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%edi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%edi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%edi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%edi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %edi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %edi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 72(%ebp),%ebx
xorl %ebx,%eax
movl 76(%ebp),%ecx
xorl %edi,%eax
xorl %edi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%esi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%esi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%esi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%esi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%esi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%esi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%esi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%esi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %esi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %esi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 80(%ebp),%ebx
xorl %ebx,%eax
movl 84(%ebp),%ecx
xorl %esi,%eax
xorl %esi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%edi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%edi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%edi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%edi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%edi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%edi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%edi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%edi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %edi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %edi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 88(%ebp),%ebx
xorl %ebx,%eax
movl 92(%ebp),%ecx
xorl %edi,%eax
xorl %edi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%esi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%esi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%esi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%esi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%esi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%esi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%esi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%esi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %esi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %esi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 96(%ebp),%ebx
xorl %ebx,%eax
movl 100(%ebp),%ecx
xorl %esi,%eax
xorl %esi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%edi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%edi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%edi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%edi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%edi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%edi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%edi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%edi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %edi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %edi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 104(%ebp),%ebx
xorl %ebx,%eax
movl 108(%ebp),%ecx
xorl %edi,%eax
xorl %edi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%esi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%esi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%esi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%esi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%esi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%esi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%esi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%esi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %esi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %esi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 112(%ebp),%ebx
xorl %ebx,%eax
movl 116(%ebp),%ecx
xorl %esi,%eax
xorl %esi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%edi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%edi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%edi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%edi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%edi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%edi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%edi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%edi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %edi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %edi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 120(%ebp),%ebx
xorl %ebx,%eax
movl 124(%ebp),%ecx
xorl %edi,%eax
xorl %edi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%esi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%esi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%esi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%esi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%esi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%esi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%esi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%esi
movl 32(%esp),%ebp
movl (%esp),%ebx
movl %edi,%eax
decl %ebx
movl %esi,%edi
movl %eax,%esi
movl %ebx,(%esp)
jnz .L000start
movl 28(%esp),%edx
rorl $1,%edi
movl %esi,%eax
xorl %edi,%esi
andl $0xaaaaaaaa,%esi
xorl %esi,%eax
xorl %esi,%edi
roll $23,%eax
movl %eax,%esi
xorl %edi,%eax
andl $0x03fc03fc,%eax
xorl %eax,%esi
xorl %eax,%edi
roll $10,%esi
movl %esi,%eax
xorl %edi,%esi
andl $0x33333333,%esi
xorl %esi,%eax
xorl %esi,%edi
roll $18,%edi
movl %edi,%esi
xorl %eax,%edi
andl $0xfff0000f,%edi
xorl %edi,%esi
xorl %edi,%eax
roll $12,%esi
movl %esi,%edi
xorl %eax,%esi
andl $0xf0f0f0f0,%esi
xorl %esi,%edi
xorl %esi,%eax
rorl $4,%eax
movl %eax,(%edx)
movl %edi,4(%edx)
addl $8,%esp
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size fcrypt_body,.-.L_fcrypt_body_begin

1837
deps/openssl/asm/x86-elf-gas/des/des-586.s

File diff suppressed because it is too large

679
deps/openssl/asm/x86-elf-gas/md5/md5-586.s

@ -0,0 +1,679 @@
.file "../openssl/crypto/md5/asm/md5-586.s"
.text
.globl md5_block_asm_data_order
.type md5_block_asm_data_order,@function
.align 16
md5_block_asm_data_order:
.L_md5_block_asm_data_order_begin:
pushl %esi
pushl %edi
movl 12(%esp),%edi
movl 16(%esp),%esi
movl 20(%esp),%ecx
pushl %ebp
shll $6,%ecx
pushl %ebx
addl %esi,%ecx
subl $64,%ecx
movl (%edi),%eax
pushl %ecx
movl 4(%edi),%ebx
movl 8(%edi),%ecx
movl 12(%edi),%edx
.L000start:
movl %ecx,%edi
movl (%esi),%ebp
xorl %edx,%edi
andl %ebx,%edi
leal 3614090360(%eax,%ebp,1),%eax
xorl %edx,%edi
addl %edi,%eax
movl %ebx,%edi
roll $7,%eax
movl 4(%esi),%ebp
addl %ebx,%eax
xorl %ecx,%edi
andl %eax,%edi
leal 3905402710(%edx,%ebp,1),%edx
xorl %ecx,%edi
addl %edi,%edx
movl %eax,%edi
roll $12,%edx
movl 8(%esi),%ebp
addl %eax,%edx
xorl %ebx,%edi
andl %edx,%edi
leal 606105819(%ecx,%ebp,1),%ecx
xorl %ebx,%edi
addl %edi,%ecx
movl %edx,%edi
roll $17,%ecx
movl 12(%esi),%ebp
addl %edx,%ecx
xorl %eax,%edi
andl %ecx,%edi
leal 3250441966(%ebx,%ebp,1),%ebx
xorl %eax,%edi
addl %edi,%ebx
movl %ecx,%edi
roll $22,%ebx
movl 16(%esi),%ebp
addl %ecx,%ebx
xorl %edx,%edi
andl %ebx,%edi
leal 4118548399(%eax,%ebp,1),%eax
xorl %edx,%edi
addl %edi,%eax
movl %ebx,%edi
roll $7,%eax
movl 20(%esi),%ebp
addl %ebx,%eax
xorl %ecx,%edi
andl %eax,%edi
leal 1200080426(%edx,%ebp,1),%edx
xorl %ecx,%edi
addl %edi,%edx
movl %eax,%edi
roll $12,%edx
movl 24(%esi),%ebp
addl %eax,%edx
xorl %ebx,%edi
andl %edx,%edi
leal 2821735955(%ecx,%ebp,1),%ecx
xorl %ebx,%edi
addl %edi,%ecx
movl %edx,%edi
roll $17,%ecx
movl 28(%esi),%ebp
addl %edx,%ecx
xorl %eax,%edi
andl %ecx,%edi
leal 4249261313(%ebx,%ebp,1),%ebx
xorl %eax,%edi
addl %edi,%ebx
movl %ecx,%edi
roll $22,%ebx
movl 32(%esi),%ebp
addl %ecx,%ebx
xorl %edx,%edi
andl %ebx,%edi
leal 1770035416(%eax,%ebp,1),%eax
xorl %edx,%edi
addl %edi,%eax
movl %ebx,%edi
roll $7,%eax
movl 36(%esi),%ebp
addl %ebx,%eax
xorl %ecx,%edi
andl %eax,%edi
leal 2336552879(%edx,%ebp,1),%edx
xorl %ecx,%edi
addl %edi,%edx
movl %eax,%edi
roll $12,%edx
movl 40(%esi),%ebp
addl %eax,%edx
xorl %ebx,%edi
andl %edx,%edi
leal 4294925233(%ecx,%ebp,1),%ecx
xorl %ebx,%edi
addl %edi,%ecx
movl %edx,%edi
roll $17,%ecx
movl 44(%esi),%ebp
addl %edx,%ecx
xorl %eax,%edi
andl %ecx,%edi
leal 2304563134(%ebx,%ebp,1),%ebx
xorl %eax,%edi
addl %edi,%ebx
movl %ecx,%edi
roll $22,%ebx
movl 48(%esi),%ebp
addl %ecx,%ebx
xorl %edx,%edi
andl %ebx,%edi
leal 1804603682(%eax,%ebp,1),%eax
xorl %edx,%edi
addl %edi,%eax
movl %ebx,%edi
roll $7,%eax
movl 52(%esi),%ebp
addl %ebx,%eax
xorl %ecx,%edi
andl %eax,%edi
leal 4254626195(%edx,%ebp,1),%edx
xorl %ecx,%edi
addl %edi,%edx
movl %eax,%edi
roll $12,%edx
movl 56(%esi),%ebp
addl %eax,%edx
xorl %ebx,%edi
andl %edx,%edi
leal 2792965006(%ecx,%ebp,1),%ecx
xorl %ebx,%edi
addl %edi,%ecx
movl %edx,%edi
roll $17,%ecx
movl 60(%esi),%ebp
addl %edx,%ecx
xorl %eax,%edi
andl %ecx,%edi
leal 1236535329(%ebx,%ebp,1),%ebx
xorl %eax,%edi
addl %edi,%ebx
movl %ecx,%edi
roll $22,%ebx
movl 4(%esi),%ebp
addl %ecx,%ebx
leal 4129170786(%eax,%ebp,1),%eax
xorl %ebx,%edi
andl %edx,%edi
movl 24(%esi),%ebp
xorl %ecx,%edi
addl %edi,%eax
movl %ebx,%edi
roll $5,%eax
addl %ebx,%eax
leal 3225465664(%edx,%ebp,1),%edx
xorl %eax,%edi
andl %ecx,%edi
movl 44(%esi),%ebp
xorl %ebx,%edi
addl %edi,%edx
movl %eax,%edi
roll $9,%edx
addl %eax,%edx
leal 643717713(%ecx,%ebp,1),%ecx
xorl %edx,%edi
andl %ebx,%edi
movl (%esi),%ebp
xorl %eax,%edi
addl %edi,%ecx
movl %edx,%edi
roll $14,%ecx
addl %edx,%ecx
leal 3921069994(%ebx,%ebp,1),%ebx
xorl %ecx,%edi
andl %eax,%edi
movl 20(%esi),%ebp
xorl %edx,%edi
addl %edi,%ebx
movl %ecx,%edi
roll $20,%ebx
addl %ecx,%ebx
leal 3593408605(%eax,%ebp,1),%eax
xorl %ebx,%edi
andl %edx,%edi
movl 40(%esi),%ebp
xorl %ecx,%edi
addl %edi,%eax
movl %ebx,%edi
roll $5,%eax
addl %ebx,%eax
leal 38016083(%edx,%ebp,1),%edx
xorl %eax,%edi
andl %ecx,%edi
movl 60(%esi),%ebp
xorl %ebx,%edi
addl %edi,%edx
movl %eax,%edi
roll $9,%edx
addl %eax,%edx
leal 3634488961(%ecx,%ebp,1),%ecx
xorl %edx,%edi
andl %ebx,%edi
movl 16(%esi),%ebp
xorl %eax,%edi
addl %edi,%ecx
movl %edx,%edi
roll $14,%ecx
addl %edx,%ecx
leal 3889429448(%ebx,%ebp,1),%ebx
xorl %ecx,%edi
andl %eax,%edi
movl 36(%esi),%ebp
xorl %edx,%edi
addl %edi,%ebx
movl %ecx,%edi
roll $20,%ebx
addl %ecx,%ebx
leal 568446438(%eax,%ebp,1),%eax
xorl %ebx,%edi
andl %edx,%edi
movl 56(%esi),%ebp
xorl %ecx,%edi
addl %edi,%eax
movl %ebx,%edi
roll $5,%eax
addl %ebx,%eax
leal 3275163606(%edx,%ebp,1),%edx
xorl %eax,%edi
andl %ecx,%edi
movl 12(%esi),%ebp
xorl %ebx,%edi
addl %edi,%edx
movl %eax,%edi
roll $9,%edx
addl %eax,%edx
leal 4107603335(%ecx,%ebp,1),%ecx
xorl %edx,%edi
andl %ebx,%edi
movl 32(%esi),%ebp
xorl %eax,%edi
addl %edi,%ecx
movl %edx,%edi
roll $14,%ecx
addl %edx,%ecx
leal 1163531501(%ebx,%ebp,1),%ebx
xorl %ecx,%edi
andl %eax,%edi
movl 52(%esi),%ebp
xorl %edx,%edi
addl %edi,%ebx
movl %ecx,%edi
roll $20,%ebx
addl %ecx,%ebx
leal 2850285829(%eax,%ebp,1),%eax
xorl %ebx,%edi
andl %edx,%edi
movl 8(%esi),%ebp
xorl %ecx,%edi
addl %edi,%eax
movl %ebx,%edi
roll $5,%eax
addl %ebx,%eax
leal 4243563512(%edx,%ebp,1),%edx
xorl %eax,%edi
andl %ecx,%edi
movl 28(%esi),%ebp
xorl %ebx,%edi
addl %edi,%edx
movl %eax,%edi
roll $9,%edx
addl %eax,%edx
leal 1735328473(%ecx,%ebp,1),%ecx
xorl %edx,%edi
andl %ebx,%edi
movl 48(%esi),%ebp
xorl %eax,%edi
addl %edi,%ecx
movl %edx,%edi
roll $14,%ecx
addl %edx,%ecx
leal 2368359562(%ebx,%ebp,1),%ebx
xorl %ecx,%edi
andl %eax,%edi
movl 20(%esi),%ebp
xorl %edx,%edi
addl %edi,%ebx
movl %ecx,%edi
roll $20,%ebx
addl %ecx,%ebx
xorl %edx,%edi
xorl %ebx,%edi
leal 4294588738(%eax,%ebp,1),%eax
addl %edi,%eax
roll $4,%eax
movl 32(%esi),%ebp
movl %ebx,%edi
leal 2272392833(%edx,%ebp,1),%edx
addl %ebx,%eax
xorl %ecx,%edi
xorl %eax,%edi
movl 44(%esi),%ebp
addl %edi,%edx
movl %eax,%edi
roll $11,%edx
addl %eax,%edx
xorl %ebx,%edi
xorl %edx,%edi
leal 1839030562(%ecx,%ebp,1),%ecx
addl %edi,%ecx
roll $16,%ecx
movl 56(%esi),%ebp
movl %edx,%edi
leal 4259657740(%ebx,%ebp,1),%ebx
addl %edx,%ecx
xorl %eax,%edi
xorl %ecx,%edi
movl 4(%esi),%ebp
addl %edi,%ebx
movl %ecx,%edi
roll $23,%ebx
addl %ecx,%ebx
xorl %edx,%edi
xorl %ebx,%edi
leal 2763975236(%eax,%ebp,1),%eax
addl %edi,%eax
roll $4,%eax
movl 16(%esi),%ebp
movl %ebx,%edi
leal 1272893353(%edx,%ebp,1),%edx
addl %ebx,%eax
xorl %ecx,%edi
xorl %eax,%edi
movl 28(%esi),%ebp
addl %edi,%edx
movl %eax,%edi
roll $11,%edx
addl %eax,%edx
xorl %ebx,%edi
xorl %edx,%edi
leal 4139469664(%ecx,%ebp,1),%ecx
addl %edi,%ecx
roll $16,%ecx
movl 40(%esi),%ebp
movl %edx,%edi
leal 3200236656(%ebx,%ebp,1),%ebx
addl %edx,%ecx
xorl %eax,%edi
xorl %ecx,%edi
movl 52(%esi),%ebp
addl %edi,%ebx
movl %ecx,%edi
roll $23,%ebx
addl %ecx,%ebx
xorl %edx,%edi
xorl %ebx,%edi
leal 681279174(%eax,%ebp,1),%eax
addl %edi,%eax
roll $4,%eax
movl (%esi),%ebp
movl %ebx,%edi
leal 3936430074(%edx,%ebp,1),%edx
addl %ebx,%eax
xorl %ecx,%edi
xorl %eax,%edi
movl 12(%esi),%ebp
addl %edi,%edx
movl %eax,%edi
roll $11,%edx
addl %eax,%edx
xorl %ebx,%edi
xorl %edx,%edi
leal 3572445317(%ecx,%ebp,1),%ecx
addl %edi,%ecx
roll $16,%ecx
movl 24(%esi),%ebp
movl %edx,%edi
leal 76029189(%ebx,%ebp,1),%ebx
addl %edx,%ecx
xorl %eax,%edi
xorl %ecx,%edi
movl 36(%esi),%ebp
addl %edi,%ebx
movl %ecx,%edi
roll $23,%ebx
addl %ecx,%ebx
xorl %edx,%edi
xorl %ebx,%edi
leal 3654602809(%eax,%ebp,1),%eax
addl %edi,%eax
roll $4,%eax
movl 48(%esi),%ebp
movl %ebx,%edi
leal 3873151461(%edx,%ebp,1),%edx
addl %ebx,%eax
xorl %ecx,%edi
xorl %eax,%edi
movl 60(%esi),%ebp
addl %edi,%edx
movl %eax,%edi
roll $11,%edx
addl %eax,%edx
xorl %ebx,%edi
xorl %edx,%edi
leal 530742520(%ecx,%ebp,1),%ecx
addl %edi,%ecx
roll $16,%ecx
movl 8(%esi),%ebp
movl %edx,%edi
leal 3299628645(%ebx,%ebp,1),%ebx
addl %edx,%ecx
xorl %eax,%edi
xorl %ecx,%edi
movl (%esi),%ebp
addl %edi,%ebx
movl $-1,%edi
roll $23,%ebx
addl %ecx,%ebx
xorl %edx,%edi
orl %ebx,%edi
leal 4096336452(%eax,%ebp,1),%eax
xorl %ecx,%edi
movl 28(%esi),%ebp
addl %edi,%eax
movl $-1,%edi
roll $6,%eax
xorl %ecx,%edi
addl %ebx,%eax
orl %eax,%edi
leal 1126891415(%edx,%ebp,1),%edx
xorl %ebx,%edi
movl 56(%esi),%ebp
addl %edi,%edx
movl $-1,%edi
roll $10,%edx
xorl %ebx,%edi
addl %eax,%edx
orl %edx,%edi
leal 2878612391(%ecx,%ebp,1),%ecx
xorl %eax,%edi
movl 20(%esi),%ebp
addl %edi,%ecx
movl $-1,%edi
roll $15,%ecx
xorl %eax,%edi
addl %edx,%ecx
orl %ecx,%edi
leal 4237533241(%ebx,%ebp,1),%ebx
xorl %edx,%edi
movl 48(%esi),%ebp
addl %edi,%ebx
movl $-1,%edi
roll $21,%ebx
xorl %edx,%edi
addl %ecx,%ebx
orl %ebx,%edi
leal 1700485571(%eax,%ebp,1),%eax
xorl %ecx,%edi
movl 12(%esi),%ebp
addl %edi,%eax
movl $-1,%edi
roll $6,%eax
xorl %ecx,%edi
addl %ebx,%eax
orl %eax,%edi
leal 2399980690(%edx,%ebp,1),%edx
xorl %ebx,%edi
movl 40(%esi),%ebp
addl %edi,%edx
movl $-1,%edi
roll $10,%edx
xorl %ebx,%edi
addl %eax,%edx
orl %edx,%edi
leal 4293915773(%ecx,%ebp,1),%ecx
xorl %eax,%edi
movl 4(%esi),%ebp
addl %edi,%ecx
movl $-1,%edi
roll $15,%ecx
xorl %eax,%edi
addl %edx,%ecx
orl %ecx,%edi
leal 2240044497(%ebx,%ebp,1),%ebx
xorl %edx,%edi
movl 32(%esi),%ebp
addl %edi,%ebx
movl $-1,%edi
roll $21,%ebx
xorl %edx,%edi
addl %ecx,%ebx
orl %ebx,%edi
leal 1873313359(%eax,%ebp,1),%eax
xorl %ecx,%edi
movl 60(%esi),%ebp
addl %edi,%eax
movl $-1,%edi
roll $6,%eax
xorl %ecx,%edi
addl %ebx,%eax
orl %eax,%edi
leal 4264355552(%edx,%ebp,1),%edx
xorl %ebx,%edi
movl 24(%esi),%ebp
addl %edi,%edx
movl $-1,%edi
roll $10,%edx
xorl %ebx,%edi
addl %eax,%edx
orl %edx,%edi
leal 2734768916(%ecx,%ebp,1),%ecx
xorl %eax,%edi
movl 52(%esi),%ebp
addl %edi,%ecx
movl $-1,%edi
roll $15,%ecx
xorl %eax,%edi
addl %edx,%ecx
orl %ecx,%edi
leal 1309151649(%ebx,%ebp,1),%ebx
xorl %edx,%edi
movl 16(%esi),%ebp
addl %edi,%ebx
movl $-1,%edi
roll $21,%ebx
xorl %edx,%edi
addl %ecx,%ebx
orl %ebx,%edi
leal 4149444226(%eax,%ebp,1),%eax
xorl %ecx,%edi
movl 44(%esi),%ebp
addl %edi,%eax
movl $-1,%edi
roll $6,%eax
xorl %ecx,%edi
addl %ebx,%eax
orl %eax,%edi
leal 3174756917(%edx,%ebp,1),%edx
xorl %ebx,%edi
movl 8(%esi),%ebp
addl %edi,%edx
movl $-1,%edi
roll $10,%edx
xorl %ebx,%edi
addl %eax,%edx
orl %edx,%edi
leal 718787259(%ecx,%ebp,1),%ecx
xorl %eax,%edi
movl 36(%esi),%ebp
addl %edi,%ecx
movl $-1,%edi
roll $15,%ecx
xorl %eax,%edi
addl %edx,%ecx
orl %ecx,%edi
leal 3951481745(%ebx,%ebp,1),%ebx
xorl %edx,%edi
movl 24(%esp),%ebp
addl %edi,%ebx
addl $64,%esi
roll $21,%ebx
movl (%ebp),%edi
addl %ecx,%ebx
addl %edi,%eax
movl 4(%ebp),%edi
addl %edi,%ebx
movl 8(%ebp),%edi
addl %edi,%ecx
movl 12(%ebp),%edi
addl %edi,%edx
movl %eax,(%ebp)
movl %ebx,4(%ebp)
movl (%esp),%edi
movl %ecx,8(%ebp)
movl %edx,12(%ebp)
cmpl %esi,%edi
jae .L000start
popl %eax
popl %ebx
popl %ebp
popl %edi
popl %esi
ret
.size md5_block_asm_data_order,.-.L_md5_block_asm_data_order_begin

230
deps/openssl/asm/x86-elf-gas/rc4/rc4-586.s

@ -0,0 +1,230 @@
.file "rc4-586.s"
.text
.globl RC4
.type RC4,@function
.align 16
RC4:
.L_RC4_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 20(%esp),%edi
movl 24(%esp),%edx
movl 28(%esp),%esi
movl 32(%esp),%ebp
xorl %eax,%eax
xorl %ebx,%ebx
cmpl $0,%edx
je .L000abort
movb (%edi),%al
movb 4(%edi),%bl
addl $8,%edi
leal (%esi,%edx,1),%ecx
subl %esi,%ebp
movl %ecx,24(%esp)
incb %al
cmpl $-1,256(%edi)
je .L001RC4_CHAR
movl (%edi,%eax,4),%ecx
andl $-4,%edx
jz .L002loop1
leal -4(%esi,%edx,1),%edx
movl %edx,28(%esp)
movl %ebp,32(%esp)
.align 16
.L003loop4:
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
addl %ecx,%edx
incb %al
andl $255,%edx
movl (%edi,%eax,4),%ecx
movl (%edi,%edx,4),%ebp
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
addl %ecx,%edx
incb %al
andl $255,%edx
rorl $8,%ebp
movl (%edi,%eax,4),%ecx
orl (%edi,%edx,4),%ebp
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
addl %ecx,%edx
incb %al
andl $255,%edx
rorl $8,%ebp
movl (%edi,%eax,4),%ecx
orl (%edi,%edx,4),%ebp
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
addl %ecx,%edx
incb %al
andl $255,%edx
rorl $8,%ebp
movl 32(%esp),%ecx
orl (%edi,%edx,4),%ebp
rorl $8,%ebp
xorl (%esi),%ebp
cmpl 28(%esp),%esi
movl %ebp,(%ecx,%esi,1)
leal 4(%esi),%esi
movl (%edi,%eax,4),%ecx
jb .L003loop4
cmpl 24(%esp),%esi
je .L004done
movl 32(%esp),%ebp
.align 16
.L002loop1:
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
addl %ecx,%edx
incb %al
andl $255,%edx
movl (%edi,%edx,4),%edx
xorb (%esi),%dl
leal 1(%esi),%esi
movl (%edi,%eax,4),%ecx
cmpl 24(%esp),%esi
movb %dl,-1(%ebp,%esi,1)
jb .L002loop1
jmp .L004done
.align 16
.L001RC4_CHAR:
movzbl (%edi,%eax,1),%ecx
.L005cloop1:
addb %cl,%bl
movzbl (%edi,%ebx,1),%edx
movb %cl,(%edi,%ebx,1)
movb %dl,(%edi,%eax,1)
addb %cl,%dl
movzbl (%edi,%edx,1),%edx
addb $1,%al
xorb (%esi),%dl
leal 1(%esi),%esi
movzbl (%edi,%eax,1),%ecx
cmpl 24(%esp),%esi
movb %dl,-1(%ebp,%esi,1)
jb .L005cloop1
.L004done:
decb %al
movb %bl,-4(%edi)
movb %al,-8(%edi)
.L000abort:
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size RC4,.-.L_RC4_begin
.globl RC4_set_key
.type RC4_set_key,@function
.align 16
RC4_set_key:
.L_RC4_set_key_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 20(%esp),%edi
movl 24(%esp),%ebp
movl 28(%esp),%esi
leal OPENSSL_ia32cap_P,%edx
leal 8(%edi),%edi
leal (%esi,%ebp,1),%esi
negl %ebp
xorl %eax,%eax
movl %ebp,-4(%edi)
btl $20,(%edx)
jc .L006c1stloop
.align 16
.L007w1stloop:
movl %eax,(%edi,%eax,4)
addb $1,%al
jnc .L007w1stloop
xorl %ecx,%ecx
xorl %edx,%edx
.align 16
.L008w2ndloop:
movl (%edi,%ecx,4),%eax
addb (%esi,%ebp,1),%dl
addb %al,%dl
addl $1,%ebp
movl (%edi,%edx,4),%ebx
jnz .L009wnowrap
movl -4(%edi),%ebp
.L009wnowrap:
movl %eax,(%edi,%edx,4)
movl %ebx,(%edi,%ecx,4)
addb $1,%cl
jnc .L008w2ndloop
jmp .L010exit
.align 16
.L006c1stloop:
movb %al,(%edi,%eax,1)
addb $1,%al
jnc .L006c1stloop
xorl %ecx,%ecx
xorl %edx,%edx
xorl %ebx,%ebx
.align 16
.L011c2ndloop:
movb (%edi,%ecx,1),%al
addb (%esi,%ebp,1),%dl
addb %al,%dl
addl $1,%ebp
movb (%edi,%edx,1),%bl
jnz .L012cnowrap
movl -4(%edi),%ebp
.L012cnowrap:
movb %al,(%edi,%edx,1)
movb %bl,(%edi,%ecx,1)
addb $1,%cl
jnc .L011c2ndloop
movl $-1,256(%edi)
.L010exit:
xorl %eax,%eax
movl %eax,-8(%edi)
movl %eax,-4(%edi)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size RC4_set_key,.-.L_RC4_set_key_begin
.globl RC4_options
.type RC4_options,@function
.align 16
RC4_options:
.L_RC4_options_begin:
call .L013pic_point
.L013pic_point:
popl %eax
leal .L014opts-.L013pic_point(%eax),%eax
leal OPENSSL_ia32cap_P,%edx
btl $20,(%edx)
jnc .L015skip
addl $12,%eax
.L015skip:
ret
.align 64
.L014opts:
.byte 114,99,52,40,52,120,44,105,110,116,41,0
.byte 114,99,52,40,49,120,44,99,104,97,114,41,0
.byte 82,67,52,32,102,111,114,32,120,56,54,44,32,67,82,89
.byte 80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
.byte 111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
.align 64
.size RC4_options,.-.L_RC4_options_begin
.comm OPENSSL_ia32cap_P,4,4

564
deps/openssl/asm/x86-elf-gas/rc5/rc5-586.s

@ -0,0 +1,564 @@
.file "rc5-586.s"
.text
.globl RC5_32_encrypt
.type RC5_32_encrypt,@function
.align 16
RC5_32_encrypt:
.L_RC5_32_encrypt_begin:
pushl %ebp
pushl %esi
pushl %edi
movl 16(%esp),%edx
movl 20(%esp),%ebp
movl (%edx),%edi
movl 4(%edx),%esi
pushl %ebx
movl (%ebp),%ebx
addl 4(%ebp),%edi
addl 8(%ebp),%esi
xorl %esi,%edi
movl 12(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 16(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 20(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 24(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 28(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 32(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 36(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 40(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 44(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 48(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 52(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 56(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 60(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 64(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 68(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 72(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
cmpl $8,%ebx
je .L000rc5_exit
xorl %esi,%edi
movl 76(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 80(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 84(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 88(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 92(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 96(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 100(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 104(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
cmpl $12,%ebx
je .L000rc5_exit
xorl %esi,%edi
movl 108(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 112(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 116(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 120(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 124(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 128(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 132(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 136(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
.L000rc5_exit:
movl %edi,(%edx)
movl %esi,4(%edx)
popl %ebx
popl %edi
popl %esi
popl %ebp
ret
.size RC5_32_encrypt,.-.L_RC5_32_encrypt_begin
.globl RC5_32_decrypt
.type RC5_32_decrypt,@function
.align 16
RC5_32_decrypt:
.L_RC5_32_decrypt_begin:
pushl %ebp
pushl %esi
pushl %edi
movl 16(%esp),%edx
movl 20(%esp),%ebp
movl (%edx),%edi
movl 4(%edx),%esi
pushl %ebx
movl (%ebp),%ebx
cmpl $12,%ebx
je .L001rc5_dec_12
cmpl $8,%ebx
je .L002rc5_dec_8
movl 136(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 132(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 128(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 124(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 120(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 116(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 112(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 108(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
.L001rc5_dec_12:
movl 104(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 100(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 96(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 92(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 88(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 84(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 80(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 76(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
.L002rc5_dec_8:
movl 72(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 68(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 64(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 60(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 56(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 52(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 48(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 44(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 40(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 36(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 32(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 28(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 24(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 20(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 16(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 12(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
subl 8(%ebp),%esi
subl 4(%ebp),%edi
.L003rc5_exit:
movl %edi,(%edx)
movl %esi,4(%edx)
popl %ebx
popl %edi
popl %esi
popl %ebp
ret
.size RC5_32_decrypt,.-.L_RC5_32_decrypt_begin
.globl RC5_32_cbc_encrypt
.type RC5_32_cbc_encrypt,@function
.align 16
RC5_32_cbc_encrypt:
.L_RC5_32_cbc_encrypt_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 28(%esp),%ebp
movl 36(%esp),%ebx
movl (%ebx),%esi
movl 4(%ebx),%edi
pushl %edi
pushl %esi
pushl %edi
pushl %esi
movl %esp,%ebx
movl 36(%esp),%esi
movl 40(%esp),%edi
movl 56(%esp),%ecx
movl 48(%esp),%eax
pushl %eax
pushl %ebx
cmpl $0,%ecx
jz .L004decrypt
andl $4294967288,%ebp
movl 8(%esp),%eax
movl 12(%esp),%ebx
jz .L005encrypt_finish
.L006encrypt_loop:
movl (%esi),%ecx
movl 4(%esi),%edx
xorl %ecx,%eax
xorl %edx,%ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call .L_RC5_32_encrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
movl %eax,(%edi)
movl %ebx,4(%edi)
addl $8,%esi
addl $8,%edi
subl $8,%ebp
jnz .L006encrypt_loop
.L005encrypt_finish:
movl 52(%esp),%ebp
andl $7,%ebp
jz .L007finish
call .L008PIC_point
.L008PIC_point:
popl %edx
leal .L009cbc_enc_jmp_table-.L008PIC_point(%edx),%ecx
movl (%ecx,%ebp,4),%ebp
addl %edx,%ebp
xorl %ecx,%ecx
xorl %edx,%edx
jmp *%ebp
.L010ej7:
movb 6(%esi),%dh
shll $8,%edx
.L011ej6:
movb 5(%esi),%dh
.L012ej5:
movb 4(%esi),%dl
.L013ej4:
movl (%esi),%ecx
jmp .L014ejend
.L015ej3:
movb 2(%esi),%ch
shll $8,%ecx
.L016ej2:
movb 1(%esi),%ch
.L017ej1:
movb (%esi),%cl
.L014ejend:
xorl %ecx,%eax
xorl %edx,%ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call .L_RC5_32_encrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
movl %eax,(%edi)
movl %ebx,4(%edi)
jmp .L007finish
.L004decrypt:
andl $4294967288,%ebp
movl 16(%esp),%eax
movl 20(%esp),%ebx
jz .L018decrypt_finish
.L019decrypt_loop:
movl (%esi),%eax
movl 4(%esi),%ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call .L_RC5_32_decrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
movl 16(%esp),%ecx
movl 20(%esp),%edx
xorl %eax,%ecx
xorl %ebx,%edx
movl (%esi),%eax
movl 4(%esi),%ebx
movl %ecx,(%edi)
movl %edx,4(%edi)
movl %eax,16(%esp)
movl %ebx,20(%esp)
addl $8,%esi
addl $8,%edi
subl $8,%ebp
jnz .L019decrypt_loop
.L018decrypt_finish:
movl 52(%esp),%ebp
andl $7,%ebp
jz .L007finish
movl (%esi),%eax
movl 4(%esi),%ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call .L_RC5_32_decrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
movl 16(%esp),%ecx
movl 20(%esp),%edx
xorl %eax,%ecx
xorl %ebx,%edx
movl (%esi),%eax
movl 4(%esi),%ebx
.L020dj7:
rorl $16,%edx
movb %dl,6(%edi)
shrl $16,%edx
.L021dj6:
movb %dh,5(%edi)
.L022dj5:
movb %dl,4(%edi)
.L023dj4:
movl %ecx,(%edi)
jmp .L024djend
.L025dj3:
rorl $16,%ecx
movb %cl,2(%edi)
shll $16,%ecx
.L026dj2:
movb %ch,1(%esi)
.L027dj1:
movb %cl,(%esi)
.L024djend:
jmp .L007finish
.L007finish:
movl 60(%esp),%ecx
addl $24,%esp
movl %eax,(%ecx)
movl %ebx,4(%ecx)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.align 64
.L009cbc_enc_jmp_table:
.long 0
.long .L017ej1-.L008PIC_point
.long .L016ej2-.L008PIC_point
.long .L015ej3-.L008PIC_point
.long .L013ej4-.L008PIC_point
.long .L012ej5-.L008PIC_point
.long .L011ej6-.L008PIC_point
.long .L010ej7-.L008PIC_point
.align 64
.size RC5_32_cbc_encrypt,.-.L_RC5_32_cbc_encrypt_begin

1965
deps/openssl/asm/x86-elf-gas/ripemd/rmd-586.s

File diff suppressed because it is too large

1442
deps/openssl/asm/x86-elf-gas/sha/sha1-586.s

File diff suppressed because it is too large

261
deps/openssl/asm/x86-elf-gas/sha/sha256-586.s

@ -0,0 +1,261 @@
.file "sha512-586.s"
.text
.globl sha256_block_data_order
.type sha256_block_data_order,@function
.align 16
sha256_block_data_order:
.L_sha256_block_data_order_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 20(%esp),%esi
movl 24(%esp),%edi
movl 28(%esp),%eax
movl %esp,%ebx
call .L000pic_point
.L000pic_point:
popl %ebp
leal .L001K256-.L000pic_point(%ebp),%ebp
subl $16,%esp
andl $-64,%esp
shll $6,%eax
addl %edi,%eax
movl %esi,(%esp)
movl %edi,4(%esp)
movl %eax,8(%esp)
movl %ebx,12(%esp)
.align 16
.L002loop:
movl (%edi),%eax
movl 4(%edi),%ebx
movl 8(%edi),%ecx
movl 12(%edi),%edx
bswap %eax
bswap %ebx
bswap %ecx
bswap %edx
pushl %eax
pushl %ebx
pushl %ecx
pushl %edx
movl 16(%edi),%eax
movl 20(%edi),%ebx
movl 24(%edi),%ecx
movl 28(%edi),%edx
bswap %eax
bswap %ebx
bswap %ecx
bswap %edx
pushl %eax
pushl %ebx
pushl %ecx
pushl %edx
movl 32(%edi),%eax
movl 36(%edi),%ebx
movl 40(%edi),%ecx
movl 44(%edi),%edx
bswap %eax
bswap %ebx
bswap %ecx
bswap %edx
pushl %eax
pushl %ebx
pushl %ecx
pushl %edx
movl 48(%edi),%eax
movl 52(%edi),%ebx
movl 56(%edi),%ecx
movl 60(%edi),%edx
bswap %eax
bswap %ebx
bswap %ecx
bswap %edx
pushl %eax
pushl %ebx
pushl %ecx
pushl %edx
addl $64,%edi
subl $32,%esp
movl %edi,100(%esp)
movl (%esi),%eax
movl 4(%esi),%ebx
movl 8(%esi),%ecx
movl 12(%esi),%edi
movl %ebx,4(%esp)
movl %ecx,8(%esp)
movl %edi,12(%esp)
movl 16(%esi),%edx
movl 20(%esi),%ebx
movl 24(%esi),%ecx
movl 28(%esi),%edi
movl %ebx,20(%esp)
movl %ecx,24(%esp)
movl %edi,28(%esp)
.align 16
.L00300_15:
movl 92(%esp),%ebx
movl %edx,%ecx
rorl $6,%ecx
movl %edx,%edi
rorl $11,%edi
movl 20(%esp),%esi
xorl %edi,%ecx
rorl $14,%edi
xorl %edi,%ecx
movl 24(%esp),%edi
addl %ecx,%ebx
movl %edx,16(%esp)
xorl %edi,%esi
movl %eax,%ecx
andl %edx,%esi
movl 12(%esp),%edx
xorl %edi,%esi
movl %eax,%edi
addl %esi,%ebx
rorl $2,%ecx
addl 28(%esp),%ebx
rorl $13,%edi
movl 4(%esp),%esi
xorl %edi,%ecx
rorl $9,%edi
addl %ebx,%edx
xorl %edi,%ecx
movl 8(%esp),%edi
addl %ecx,%ebx
movl %eax,(%esp)
movl %eax,%ecx
subl $4,%esp
orl %esi,%eax
andl %esi,%ecx
andl %edi,%eax
movl (%ebp),%esi
orl %ecx,%eax
addl $4,%ebp
addl %ebx,%eax
addl %esi,%edx
addl %esi,%eax
cmpl $3248222580,%esi
jne .L00300_15
movl 152(%esp),%ebx
.align 16
.L00416_63:
movl %ebx,%esi
movl 100(%esp),%ecx
shrl $3,%ebx
rorl $7,%esi
xorl %esi,%ebx
rorl $11,%esi
movl %ecx,%edi
xorl %esi,%ebx
shrl $10,%ecx
movl 156(%esp),%esi
rorl $17,%edi
xorl %edi,%ecx
rorl $2,%edi
addl %esi,%ebx
xorl %ecx,%edi
addl %edi,%ebx
movl %edx,%ecx
addl 120(%esp),%ebx
rorl $6,%ecx
movl %edx,%edi
rorl $11,%edi
movl 20(%esp),%esi
xorl %edi,%ecx
rorl $14,%edi
movl %ebx,92(%esp)
xorl %edi,%ecx
movl 24(%esp),%edi
addl %ecx,%ebx
movl %edx,16(%esp)
xorl %edi,%esi
movl %eax,%ecx
andl %edx,%esi
movl 12(%esp),%edx
xorl %edi,%esi
movl %eax,%edi
addl %esi,%ebx
rorl $2,%ecx
addl 28(%esp),%ebx
rorl $13,%edi
movl 4(%esp),%esi
xorl %edi,%ecx
rorl $9,%edi
addl %ebx,%edx
xorl %edi,%ecx
movl 8(%esp),%edi
addl %ecx,%ebx
movl %eax,(%esp)
movl %eax,%ecx
subl $4,%esp
orl %esi,%eax
andl %esi,%ecx
andl %edi,%eax
movl (%ebp),%esi
orl %ecx,%eax
addl $4,%ebp
addl %ebx,%eax
movl 152(%esp),%ebx
addl %esi,%edx
addl %esi,%eax
cmpl $3329325298,%esi
jne .L00416_63
movl 352(%esp),%esi
movl 4(%esp),%ebx
movl 8(%esp),%ecx
movl 12(%esp),%edi
addl (%esi),%eax
addl 4(%esi),%ebx
addl 8(%esi),%ecx
addl 12(%esi),%edi
movl %eax,(%esi)
movl %ebx,4(%esi)
movl %ecx,8(%esi)
movl %edi,12(%esi)
movl 20(%esp),%eax
movl 24(%esp),%ebx
movl 28(%esp),%ecx
movl 356(%esp),%edi
addl 16(%esi),%edx
addl 20(%esi),%eax
addl 24(%esi),%ebx
addl 28(%esi),%ecx
movl %edx,16(%esi)
movl %eax,20(%esi)
movl %ebx,24(%esi)
movl %ecx,28(%esi)
addl $352,%esp
subl $256,%ebp
cmpl 8(%esp),%edi
jb .L002loop
movl 12(%esp),%esp
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.align 64
.L001K256:
.long 1116352408,1899447441,3049323471,3921009573
.long 961987163,1508970993,2453635748,2870763221
.long 3624381080,310598401,607225278,1426881987
.long 1925078388,2162078206,2614888103,3248222580
.long 3835390401,4022224774,264347078,604807628
.long 770255983,1249150122,1555081692,1996064986
.long 2554220882,2821834349,2952996808,3210313671
.long 3336571891,3584528711,113926993,338241895
.long 666307205,773529912,1294757372,1396182291
.long 1695183700,1986661051,2177026350,2456956037
.long 2730485921,2820302411,3259730800,3345764771
.long 3516065817,3600352804,4094571909,275423344
.long 430227734,506948616,659060556,883997877
.long 958139571,1322822218,1537002063,1747873779
.long 1955562222,2024104815,2227730452,2361852424
.long 2428436474,2756734187,3204031479,3329325298
.size sha256_block_data_order,.-.L_sha256_block_data_order_begin
.byte 83,72,65,50,53,54,32,98,108,111,99,107,32,116,114,97
.byte 110,115,102,111,114,109,32,102,111,114,32,120,56,54,44,32
.byte 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
.byte 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
.byte 62,0

563
deps/openssl/asm/x86-elf-gas/sha/sha512-586.s

@ -0,0 +1,563 @@
.file "sha512-586.s"
.text
.globl sha512_block_data_order
.type sha512_block_data_order,@function
.align 16
sha512_block_data_order:
.L_sha512_block_data_order_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 20(%esp),%esi
movl 24(%esp),%edi
movl 28(%esp),%eax
movl %esp,%ebx
call .L000pic_point
.L000pic_point:
popl %ebp
leal .L001K512-.L000pic_point(%ebp),%ebp
subl $16,%esp
andl $-64,%esp
shll $7,%eax
addl %edi,%eax
movl %esi,(%esp)
movl %edi,4(%esp)
movl %eax,8(%esp)
movl %ebx,12(%esp)
.align 16
.L002loop_x86:
movl (%edi),%eax
movl 4(%edi),%ebx
movl 8(%edi),%ecx
movl 12(%edi),%edx
bswap %eax
bswap %ebx
bswap %ecx
bswap %edx
pushl %eax
pushl %ebx
pushl %ecx
pushl %edx
movl 16(%edi),%eax
movl 20(%edi),%ebx
movl 24(%edi),%ecx
movl 28(%edi),%edx
bswap %eax
bswap %ebx
bswap %ecx
bswap %edx
pushl %eax
pushl %ebx
pushl %ecx
pushl %edx
movl 32(%edi),%eax
movl 36(%edi),%ebx
movl 40(%edi),%ecx
movl 44(%edi),%edx
bswap %eax
bswap %ebx
bswap %ecx
bswap %edx
pushl %eax
pushl %ebx
pushl %ecx
pushl %edx
movl 48(%edi),%eax
movl 52(%edi),%ebx
movl 56(%edi),%ecx
movl 60(%edi),%edx
bswap %eax
bswap %ebx
bswap %ecx
bswap %edx
pushl %eax
pushl %ebx
pushl %ecx
pushl %edx
movl 64(%edi),%eax
movl 68(%edi),%ebx
movl 72(%edi),%ecx
movl 76(%edi),%edx
bswap %eax
bswap %ebx
bswap %ecx
bswap %edx
pushl %eax
pushl %ebx
pushl %ecx
pushl %edx
movl 80(%edi),%eax
movl 84(%edi),%ebx
movl 88(%edi),%ecx
movl 92(%edi),%edx
bswap %eax
bswap %ebx
bswap %ecx
bswap %edx
pushl %eax
pushl %ebx
pushl %ecx
pushl %edx
movl 96(%edi),%eax
movl 100(%edi),%ebx
movl 104(%edi),%ecx
movl 108(%edi),%edx
bswap %eax
bswap %ebx
bswap %ecx
bswap %edx
pushl %eax
pushl %ebx
pushl %ecx
pushl %edx
movl 112(%edi),%eax
movl 116(%edi),%ebx
movl 120(%edi),%ecx
movl 124(%edi),%edx
bswap %eax
bswap %ebx
bswap %ecx
bswap %edx
pushl %eax
pushl %ebx
pushl %ecx
pushl %edx
addl $128,%edi
subl $72,%esp
movl %edi,204(%esp)
leal 8(%esp),%edi
movl $16,%ecx
.long 2784229001
.align 16
.L00300_15_x86:
movl 40(%esp),%ecx
movl 44(%esp),%edx
movl %ecx,%esi
shrl $9,%ecx
movl %edx,%edi
shrl $9,%edx
movl %ecx,%ebx
shll $14,%esi
movl %edx,%eax
shll $14,%edi
xorl %esi,%ebx
shrl $5,%ecx
xorl %edi,%eax
shrl $5,%edx
xorl %ecx,%eax
shll $4,%esi
xorl %edx,%ebx
shll $4,%edi
xorl %esi,%ebx
shrl $4,%ecx
xorl %edi,%eax
shrl $4,%edx
xorl %ecx,%eax
shll $5,%esi
xorl %edx,%ebx
shll $5,%edi
xorl %esi,%eax
xorl %edi,%ebx
movl 48(%esp),%ecx
movl 52(%esp),%edx
movl 56(%esp),%esi
movl 60(%esp),%edi
addl 64(%esp),%eax
adcl 68(%esp),%ebx
xorl %esi,%ecx
xorl %edi,%edx
andl 40(%esp),%ecx
andl 44(%esp),%edx
addl 192(%esp),%eax
adcl 196(%esp),%ebx
xorl %esi,%ecx
xorl %edi,%edx
movl (%ebp),%esi
movl 4(%ebp),%edi
addl %ecx,%eax
adcl %edx,%ebx
movl 32(%esp),%ecx
movl 36(%esp),%edx
addl %esi,%eax
adcl %edi,%ebx
movl %eax,(%esp)
movl %ebx,4(%esp)
addl %ecx,%eax
adcl %edx,%ebx
movl 8(%esp),%ecx
movl 12(%esp),%edx
movl %eax,32(%esp)
movl %ebx,36(%esp)
movl %ecx,%esi
shrl $2,%ecx
movl %edx,%edi
shrl $2,%edx
movl %ecx,%ebx
shll $4,%esi
movl %edx,%eax
shll $4,%edi
xorl %esi,%ebx
shrl $5,%ecx
xorl %edi,%eax
shrl $5,%edx
xorl %ecx,%ebx
shll $21,%esi
xorl %edx,%eax
shll $21,%edi
xorl %esi,%eax
shrl $21,%ecx
xorl %edi,%ebx
shrl $21,%edx
xorl %ecx,%eax
shll $5,%esi
xorl %edx,%ebx
shll $5,%edi
xorl %esi,%eax
xorl %edi,%ebx
movl 8(%esp),%ecx
movl 12(%esp),%edx
movl 16(%esp),%esi
movl 20(%esp),%edi
addl (%esp),%eax
adcl 4(%esp),%ebx
orl %esi,%ecx
orl %edi,%edx
andl 24(%esp),%ecx
andl 28(%esp),%edx
andl 8(%esp),%esi
andl 12(%esp),%edi
orl %esi,%ecx
orl %edi,%edx
addl %ecx,%eax
adcl %edx,%ebx
movl %eax,(%esp)
movl %ebx,4(%esp)
movb (%ebp),%dl
subl $8,%esp
leal 8(%ebp),%ebp
cmpb $148,%dl
jne .L00300_15_x86
.align 16
.L00416_79_x86:
movl 312(%esp),%ecx
movl 316(%esp),%edx
movl %ecx,%esi
shrl $1,%ecx
movl %edx,%edi
shrl $1,%edx
movl %ecx,%eax
shll $24,%esi
movl %edx,%ebx
shll $24,%edi
xorl %esi,%ebx
shrl $6,%ecx
xorl %edi,%eax
shrl $6,%edx
xorl %ecx,%eax
shll $7,%esi
xorl %edx,%ebx
shll $1,%edi
xorl %esi,%ebx
shrl $1,%ecx
xorl %edi,%eax
shrl $1,%edx
xorl %ecx,%eax
shll $6,%edi
xorl %edx,%ebx
xorl %edi,%eax
movl %eax,(%esp)
movl %ebx,4(%esp)
movl 208(%esp),%ecx
movl 212(%esp),%edx
movl %ecx,%esi
shrl $6,%ecx
movl %edx,%edi
shrl $6,%edx
movl %ecx,%eax
shll $3,%esi
movl %edx,%ebx
shll $3,%edi
xorl %esi,%eax
shrl $13,%ecx
xorl %edi,%ebx
shrl $13,%edx
xorl %ecx,%eax
shll $10,%esi
xorl %edx,%ebx
shll $10,%edi
xorl %esi,%ebx
shrl $10,%ecx
xorl %edi,%eax
shrl $10,%edx
xorl %ecx,%ebx
shll $13,%edi
xorl %edx,%eax
xorl %edi,%eax
movl 320(%esp),%ecx
movl 324(%esp),%edx
addl (%esp),%eax
adcl 4(%esp),%ebx
movl 248(%esp),%esi
movl 252(%esp),%edi
addl %ecx,%eax
adcl %edx,%ebx
addl %esi,%eax
adcl %edi,%ebx
movl %eax,192(%esp)
movl %ebx,196(%esp)
movl 40(%esp),%ecx
movl 44(%esp),%edx
movl %ecx,%esi
shrl $9,%ecx
movl %edx,%edi
shrl $9,%edx
movl %ecx,%ebx
shll $14,%esi
movl %edx,%eax
shll $14,%edi
xorl %esi,%ebx
shrl $5,%ecx
xorl %edi,%eax
shrl $5,%edx
xorl %ecx,%eax
shll $4,%esi
xorl %edx,%ebx
shll $4,%edi
xorl %esi,%ebx
shrl $4,%ecx
xorl %edi,%eax
shrl $4,%edx
xorl %ecx,%eax
shll $5,%esi
xorl %edx,%ebx
shll $5,%edi
xorl %esi,%eax
xorl %edi,%ebx
movl 48(%esp),%ecx
movl 52(%esp),%edx
movl 56(%esp),%esi
movl 60(%esp),%edi
addl 64(%esp),%eax
adcl 68(%esp),%ebx
xorl %esi,%ecx
xorl %edi,%edx
andl 40(%esp),%ecx
andl 44(%esp),%edx
addl 192(%esp),%eax
adcl 196(%esp),%ebx
xorl %esi,%ecx
xorl %edi,%edx
movl (%ebp),%esi
movl 4(%ebp),%edi
addl %ecx,%eax
adcl %edx,%ebx
movl 32(%esp),%ecx
movl 36(%esp),%edx
addl %esi,%eax
adcl %edi,%ebx
movl %eax,(%esp)
movl %ebx,4(%esp)
addl %ecx,%eax
adcl %edx,%ebx
movl 8(%esp),%ecx
movl 12(%esp),%edx
movl %eax,32(%esp)
movl %ebx,36(%esp)
movl %ecx,%esi
shrl $2,%ecx
movl %edx,%edi
shrl $2,%edx
movl %ecx,%ebx
shll $4,%esi
movl %edx,%eax
shll $4,%edi
xorl %esi,%ebx
shrl $5,%ecx
xorl %edi,%eax
shrl $5,%edx
xorl %ecx,%ebx
shll $21,%esi
xorl %edx,%eax
shll $21,%edi
xorl %esi,%eax
shrl $21,%ecx
xorl %edi,%ebx
shrl $21,%edx
xorl %ecx,%eax
shll $5,%esi
xorl %edx,%ebx
shll $5,%edi
xorl %esi,%eax
xorl %edi,%ebx
movl 8(%esp),%ecx
movl 12(%esp),%edx
movl 16(%esp),%esi
movl 20(%esp),%edi
addl (%esp),%eax
adcl 4(%esp),%ebx
orl %esi,%ecx
orl %edi,%edx
andl 24(%esp),%ecx
andl 28(%esp),%edx
andl 8(%esp),%esi
andl 12(%esp),%edi
orl %esi,%ecx
orl %edi,%edx
addl %ecx,%eax
adcl %edx,%ebx
movl %eax,(%esp)
movl %ebx,4(%esp)
movb (%ebp),%dl
subl $8,%esp
leal 8(%ebp),%ebp
cmpb $23,%dl
jne .L00416_79_x86
movl 840(%esp),%esi
movl 844(%esp),%edi
movl (%esi),%eax
movl 4(%esi),%ebx
movl 8(%esi),%ecx
movl 12(%esi),%edx
addl 8(%esp),%eax
adcl 12(%esp),%ebx
movl %eax,(%esi)
movl %ebx,4(%esi)
addl 16(%esp),%ecx
adcl 20(%esp),%edx
movl %ecx,8(%esi)
movl %edx,12(%esi)
movl 16(%esi),%eax
movl 20(%esi),%ebx
movl 24(%esi),%ecx
movl 28(%esi),%edx
addl 24(%esp),%eax
adcl 28(%esp),%ebx
movl %eax,16(%esi)
movl %ebx,20(%esi)
addl 32(%esp),%ecx
adcl 36(%esp),%edx
movl %ecx,24(%esi)
movl %edx,28(%esi)
movl 32(%esi),%eax
movl 36(%esi),%ebx
movl 40(%esi),%ecx
movl 44(%esi),%edx
addl 40(%esp),%eax
adcl 44(%esp),%ebx
movl %eax,32(%esi)
movl %ebx,36(%esi)
addl 48(%esp),%ecx
adcl 52(%esp),%edx
movl %ecx,40(%esi)
movl %edx,44(%esi)
movl 48(%esi),%eax
movl 52(%esi),%ebx
movl 56(%esi),%ecx
movl 60(%esi),%edx
addl 56(%esp),%eax
adcl 60(%esp),%ebx
movl %eax,48(%esi)
movl %ebx,52(%esi)
addl 64(%esp),%ecx
adcl 68(%esp),%edx
movl %ecx,56(%esi)
movl %edx,60(%esi)
addl $840,%esp
subl $640,%ebp
cmpl 8(%esp),%edi
jb .L002loop_x86
movl 12(%esp),%esp
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.align 64
.L001K512:
.long 3609767458,1116352408
.long 602891725,1899447441
.long 3964484399,3049323471
.long 2173295548,3921009573
.long 4081628472,961987163
.long 3053834265,1508970993
.long 2937671579,2453635748
.long 3664609560,2870763221
.long 2734883394,3624381080
.long 1164996542,310598401
.long 1323610764,607225278
.long 3590304994,1426881987
.long 4068182383,1925078388
.long 991336113,2162078206
.long 633803317,2614888103
.long 3479774868,3248222580
.long 2666613458,3835390401
.long 944711139,4022224774
.long 2341262773,264347078
.long 2007800933,604807628
.long 1495990901,770255983
.long 1856431235,1249150122
.long 3175218132,1555081692
.long 2198950837,1996064986
.long 3999719339,2554220882
.long 766784016,2821834349
.long 2566594879,2952996808
.long 3203337956,3210313671
.long 1034457026,3336571891
.long 2466948901,3584528711
.long 3758326383,113926993
.long 168717936,338241895
.long 1188179964,666307205
.long 1546045734,773529912
.long 1522805485,1294757372
.long 2643833823,1396182291
.long 2343527390,1695183700
.long 1014477480,1986661051
.long 1206759142,2177026350
.long 344077627,2456956037
.long 1290863460,2730485921
.long 3158454273,2820302411
.long 3505952657,3259730800
.long 106217008,3345764771
.long 3606008344,3516065817
.long 1432725776,3600352804
.long 1467031594,4094571909
.long 851169720,275423344
.long 3100823752,430227734
.long 1363258195,506948616
.long 3750685593,659060556
.long 3785050280,883997877
.long 3318307427,958139571
.long 3812723403,1322822218
.long 2003034995,1537002063
.long 3602036899,1747873779
.long 1575990012,1955562222
.long 1125592928,2024104815
.long 2716904306,2227730452
.long 442776044,2361852424
.long 593698344,2428436474
.long 3733110249,2756734187
.long 2999351573,3204031479
.long 3815920427,3329325298
.long 3928383900,3391569614
.long 566280711,3515267271
.long 3454069534,3940187606
.long 4000239992,4118630271
.long 1914138554,116418474
.long 2731055270,174292421
.long 3203993006,289380356
.long 320620315,460393269
.long 587496836,685471733
.long 1086792851,852142971
.long 365543100,1017036298
.long 2618297676,1126000580
.long 3409855158,1288033470
.long 4234509866,1501505948
.long 987167468,1607167915
.long 1246189591,1816402316
.size sha512_block_data_order,.-.L_sha512_block_data_order_begin
.byte 83,72,65,53,49,50,32,98,108,111,99,107,32,116,114,97
.byte 110,115,102,111,114,109,32,102,111,114,32,120,56,54,44,32
.byte 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
.byte 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
.byte 62,0

1105
deps/openssl/asm/x86-elf-gas/whrlpool/wp-mmx.s

File diff suppressed because it is too large

279
deps/openssl/asm/x86-elf-gas/x86cpuid.s

@ -0,0 +1,279 @@
.file "x86cpuid.s"
.text
.globl OPENSSL_ia32_cpuid
.type OPENSSL_ia32_cpuid,@function
.align 16
OPENSSL_ia32_cpuid:
.L_OPENSSL_ia32_cpuid_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
xorl %edx,%edx
pushfl
popl %eax
movl %eax,%ecx
xorl $2097152,%eax
pushl %eax
popfl
pushfl
popl %eax
xorl %eax,%ecx
btl $21,%ecx
jnc .L000done
xorl %eax,%eax
.byte 0x0f,0xa2
movl %eax,%edi
xorl %eax,%eax
cmpl $1970169159,%ebx
setne %al
movl %eax,%ebp
cmpl $1231384169,%edx
setne %al
orl %eax,%ebp
cmpl $1818588270,%ecx
setne %al
orl %eax,%ebp
jz .L001intel
cmpl $1752462657,%ebx
setne %al
movl %eax,%esi
cmpl $1769238117,%edx
setne %al
orl %eax,%esi
cmpl $1145913699,%ecx
setne %al
orl %eax,%esi
jnz .L001intel
movl $2147483648,%eax
.byte 0x0f,0xa2
cmpl $2147483656,%eax
jb .L001intel
movl $2147483656,%eax
.byte 0x0f,0xa2
movzbl %cl,%esi
incl %esi
movl $1,%eax
.byte 0x0f,0xa2
btl $28,%edx
jnc .L000done
shrl $16,%ebx
andl $255,%ebx
cmpl %esi,%ebx
ja .L000done
andl $4026531839,%edx
jmp .L000done
.L001intel:
cmpl $4,%edi
movl $-1,%edi
jb .L002nocacheinfo
movl $4,%eax
movl $0,%ecx
.byte 0x0f,0xa2
movl %eax,%edi
shrl $14,%edi
andl $4095,%edi
.L002nocacheinfo:
movl $1,%eax
.byte 0x0f,0xa2
cmpl $0,%ebp
jne .L003notP4
andb $15,%ah
cmpb $15,%ah
jne .L003notP4
orl $1048576,%edx
.L003notP4:
btl $28,%edx
jnc .L000done
andl $4026531839,%edx
cmpl $0,%edi
je .L000done
orl $268435456,%edx
shrl $16,%ebx
cmpb $1,%bl
ja .L000done
andl $4026531839,%edx
.L000done:
movl %edx,%eax
movl %ecx,%edx
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size OPENSSL_ia32_cpuid,.-.L_OPENSSL_ia32_cpuid_begin
.globl OPENSSL_rdtsc
.type OPENSSL_rdtsc,@function
.align 16
OPENSSL_rdtsc:
.L_OPENSSL_rdtsc_begin:
xorl %eax,%eax
xorl %edx,%edx
leal OPENSSL_ia32cap_P,%ecx
btl $4,(%ecx)
jnc .L004notsc
.byte 0x0f,0x31
.L004notsc:
ret
.size OPENSSL_rdtsc,.-.L_OPENSSL_rdtsc_begin
.globl OPENSSL_instrument_halt
.type OPENSSL_instrument_halt,@function
.align 16
OPENSSL_instrument_halt:
.L_OPENSSL_instrument_halt_begin:
leal OPENSSL_ia32cap_P,%ecx
btl $4,(%ecx)
jnc .L005nohalt
.long 2421723150
andl $3,%eax
jnz .L005nohalt
pushfl
popl %eax
btl $9,%eax
jnc .L005nohalt
.byte 0x0f,0x31
pushl %edx
pushl %eax
hlt
.byte 0x0f,0x31
subl (%esp),%eax
sbbl 4(%esp),%edx
addl $8,%esp
ret
.L005nohalt:
xorl %eax,%eax
xorl %edx,%edx
ret
.size OPENSSL_instrument_halt,.-.L_OPENSSL_instrument_halt_begin
.globl OPENSSL_far_spin
.type OPENSSL_far_spin,@function
.align 16
OPENSSL_far_spin:
.L_OPENSSL_far_spin_begin:
pushfl
popl %eax
btl $9,%eax
jnc .L006nospin
movl 4(%esp),%eax
movl 8(%esp),%ecx
.long 2430111262
xorl %eax,%eax
movl (%ecx),%edx
jmp .L007spin
.align 16
.L007spin:
incl %eax
cmpl (%ecx),%edx
je .L007spin
.long 529567888
ret
.L006nospin:
xorl %eax,%eax
xorl %edx,%edx
ret
.size OPENSSL_far_spin,.-.L_OPENSSL_far_spin_begin
.globl OPENSSL_wipe_cpu
.type OPENSSL_wipe_cpu,@function
.align 16
OPENSSL_wipe_cpu:
.L_OPENSSL_wipe_cpu_begin:
xorl %eax,%eax
xorl %edx,%edx
leal OPENSSL_ia32cap_P,%ecx
movl (%ecx),%ecx
btl $1,(%ecx)
jnc .L008no_x87
.long 4007259865,4007259865,4007259865,4007259865,2430851995
.L008no_x87:
leal 4(%esp),%eax
ret
.size OPENSSL_wipe_cpu,.-.L_OPENSSL_wipe_cpu_begin
.globl OPENSSL_atomic_add
.type OPENSSL_atomic_add,@function
.align 16
OPENSSL_atomic_add:
.L_OPENSSL_atomic_add_begin:
movl 4(%esp),%edx
movl 8(%esp),%ecx
pushl %ebx
nop
movl (%edx),%eax
.L009spin:
leal (%eax,%ecx,1),%ebx
nop
.long 447811568
jne .L009spin
movl %ebx,%eax
popl %ebx
ret
.size OPENSSL_atomic_add,.-.L_OPENSSL_atomic_add_begin
.globl OPENSSL_indirect_call
.type OPENSSL_indirect_call,@function
.align 16
OPENSSL_indirect_call:
.L_OPENSSL_indirect_call_begin:
pushl %ebp
movl %esp,%ebp
subl $28,%esp
movl 12(%ebp),%ecx
movl %ecx,(%esp)
movl 16(%ebp),%edx
movl %edx,4(%esp)
movl 20(%ebp),%eax
movl %eax,8(%esp)
movl 24(%ebp),%eax
movl %eax,12(%esp)
movl 28(%ebp),%eax
movl %eax,16(%esp)
movl 32(%ebp),%eax
movl %eax,20(%esp)
movl 36(%ebp),%eax
movl %eax,24(%esp)
call *8(%ebp)
movl %ebp,%esp
popl %ebp
ret
.size OPENSSL_indirect_call,.-.L_OPENSSL_indirect_call_begin
.globl OPENSSL_cleanse
.type OPENSSL_cleanse,@function
.align 16
OPENSSL_cleanse:
.L_OPENSSL_cleanse_begin:
movl 4(%esp),%edx
movl 8(%esp),%ecx
xorl %eax,%eax
cmpl $7,%ecx
jae .L010lot
cmpl $0,%ecx
je .L011ret
.L012little:
movb %al,(%edx)
subl $1,%ecx
leal 1(%edx),%edx
jnz .L012little
.L011ret:
ret
.align 16
.L010lot:
testl $3,%edx
jz .L013aligned
movb %al,(%edx)
leal -1(%ecx),%ecx
leal 1(%edx),%edx
jmp .L010lot
.L013aligned:
movl %eax,(%edx)
leal -4(%ecx),%ecx
testl $-4,%ecx
leal 4(%edx),%edx
jnz .L013aligned
cmpl $0,%ecx
jne .L012little
ret
.size OPENSSL_cleanse,.-.L_OPENSSL_cleanse_begin
.comm OPENSSL_ia32cap_P,4,4
.section .init
call OPENSSL_cpuid_setup
jmp .Linitalign
.align 16
.Linitalign:

3194
deps/openssl/asm/x86-macosx-gas/aes/aes-586.s

File diff suppressed because it is too large

897
deps/openssl/asm/x86-macosx-gas/bf/bf-686.s

@ -0,0 +1,897 @@
.file "bf-686.s"
.text
.globl _BF_encrypt
.align 4
_BF_encrypt:
L_BF_encrypt_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
# Load the 2 words
movl 20(%esp),%eax
movl (%eax),%ecx
movl 4(%eax),%edx
# P pointer, s and enc flag
movl 24(%esp),%edi
xorl %eax,%eax
xorl %ebx,%ebx
xorl (%edi),%ecx
# Round 0
rorl $16,%ecx
movl 4(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
# Round 1
rorl $16,%edx
movl 8(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
# Round 2
rorl $16,%ecx
movl 12(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
# Round 3
rorl $16,%edx
movl 16(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
# Round 4
rorl $16,%ecx
movl 20(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
# Round 5
rorl $16,%edx
movl 24(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
# Round 6
rorl $16,%ecx
movl 28(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
# Round 7
rorl $16,%edx
movl 32(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
# Round 8
rorl $16,%ecx
movl 36(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
# Round 9
rorl $16,%edx
movl 40(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
# Round 10
rorl $16,%ecx
movl 44(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
# Round 11
rorl $16,%edx
movl 48(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
# Round 12
rorl $16,%ecx
movl 52(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
# Round 13
rorl $16,%edx
movl 56(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
# Round 14
rorl $16,%ecx
movl 60(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
# Round 15
rorl $16,%edx
movl 64(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
xorl 68(%edi),%edx
movl 20(%esp),%eax
movl %edx,(%eax)
movl %ecx,4(%eax)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.globl _BF_decrypt
.align 4
_BF_decrypt:
L_BF_decrypt_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
# Load the 2 words
movl 20(%esp),%eax
movl (%eax),%ecx
movl 4(%eax),%edx
# P pointer, s and enc flag
movl 24(%esp),%edi
xorl %eax,%eax
xorl %ebx,%ebx
xorl 68(%edi),%ecx
# Round 16
rorl $16,%ecx
movl 64(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
# Round 15
rorl $16,%edx
movl 60(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
# Round 14
rorl $16,%ecx
movl 56(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
# Round 13
rorl $16,%edx
movl 52(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
# Round 12
rorl $16,%ecx
movl 48(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
# Round 11
rorl $16,%edx
movl 44(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
# Round 10
rorl $16,%ecx
movl 40(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
# Round 9
rorl $16,%edx
movl 36(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
# Round 8
rorl $16,%ecx
movl 32(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
# Round 7
rorl $16,%edx
movl 28(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
# Round 6
rorl $16,%ecx
movl 24(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
# Round 5
rorl $16,%edx
movl 20(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
# Round 4
rorl $16,%ecx
movl 16(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
# Round 3
rorl $16,%edx
movl 12(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
# Round 2
rorl $16,%ecx
movl 8(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
# Round 1
rorl $16,%edx
movl 4(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
xorl (%edi),%edx
movl 20(%esp),%eax
movl %edx,(%eax)
movl %ecx,4(%eax)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.globl _BF_cbc_encrypt
.align 4
_BF_cbc_encrypt:
L_BF_cbc_encrypt_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 28(%esp),%ebp
# getting iv ptr from parameter 4
movl 36(%esp),%ebx
movl (%ebx),%esi
movl 4(%ebx),%edi
pushl %edi
pushl %esi
pushl %edi
pushl %esi
movl %esp,%ebx
movl 36(%esp),%esi
movl 40(%esp),%edi
# getting encrypt flag from parameter 5
movl 56(%esp),%ecx
# get and push parameter 3
movl 48(%esp),%eax
pushl %eax
pushl %ebx
cmpl $0,%ecx
jz L000decrypt
andl $4294967288,%ebp
movl 8(%esp),%eax
movl 12(%esp),%ebx
jz L001encrypt_finish
L002encrypt_loop:
movl (%esi),%ecx
movl 4(%esi),%edx
xorl %ecx,%eax
xorl %edx,%ebx
bswap %eax
bswap %ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call L_BF_encrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
bswap %eax
bswap %ebx
movl %eax,(%edi)
movl %ebx,4(%edi)
addl $8,%esi
addl $8,%edi
subl $8,%ebp
jnz L002encrypt_loop
L001encrypt_finish:
movl 52(%esp),%ebp
andl $7,%ebp
jz L003finish
call L004PIC_point
L004PIC_point:
popl %edx
leal L005cbc_enc_jmp_table-L004PIC_point(%edx),%ecx
movl (%ecx,%ebp,4),%ebp
addl %edx,%ebp
xorl %ecx,%ecx
xorl %edx,%edx
jmp *%ebp
L006ej7:
movb 6(%esi),%dh
shll $8,%edx
L007ej6:
movb 5(%esi),%dh
L008ej5:
movb 4(%esi),%dl
L009ej4:
movl (%esi),%ecx
jmp L010ejend
L011ej3:
movb 2(%esi),%ch
shll $8,%ecx
L012ej2:
movb 1(%esi),%ch
L013ej1:
movb (%esi),%cl
L010ejend:
xorl %ecx,%eax
xorl %edx,%ebx
bswap %eax
bswap %ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call L_BF_encrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
bswap %eax
bswap %ebx
movl %eax,(%edi)
movl %ebx,4(%edi)
jmp L003finish
L000decrypt:
andl $4294967288,%ebp
movl 16(%esp),%eax
movl 20(%esp),%ebx
jz L014decrypt_finish
L015decrypt_loop:
movl (%esi),%eax
movl 4(%esi),%ebx
bswap %eax
bswap %ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call L_BF_decrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
bswap %eax
bswap %ebx
movl 16(%esp),%ecx
movl 20(%esp),%edx
xorl %eax,%ecx
xorl %ebx,%edx
movl (%esi),%eax
movl 4(%esi),%ebx
movl %ecx,(%edi)
movl %edx,4(%edi)
movl %eax,16(%esp)
movl %ebx,20(%esp)
addl $8,%esi
addl $8,%edi
subl $8,%ebp
jnz L015decrypt_loop
L014decrypt_finish:
movl 52(%esp),%ebp
andl $7,%ebp
jz L003finish
movl (%esi),%eax
movl 4(%esi),%ebx
bswap %eax
bswap %ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call L_BF_decrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
bswap %eax
bswap %ebx
movl 16(%esp),%ecx
movl 20(%esp),%edx
xorl %eax,%ecx
xorl %ebx,%edx
movl (%esi),%eax
movl 4(%esi),%ebx
L016dj7:
rorl $16,%edx
movb %dl,6(%edi)
shrl $16,%edx
L017dj6:
movb %dh,5(%edi)
L018dj5:
movb %dl,4(%edi)
L019dj4:
movl %ecx,(%edi)
jmp L020djend
L021dj3:
rorl $16,%ecx
movb %cl,2(%edi)
shll $16,%ecx
L022dj2:
movb %ch,1(%esi)
L023dj1:
movb %cl,(%esi)
L020djend:
jmp L003finish
L003finish:
movl 60(%esp),%ecx
addl $24,%esp
movl %eax,(%ecx)
movl %ebx,4(%ecx)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.align 6,0x90
L005cbc_enc_jmp_table:
.long 0
.long L013ej1-L004PIC_point
.long L012ej2-L004PIC_point
.long L011ej3-L004PIC_point
.long L009ej4-L004PIC_point
.long L008ej5-L004PIC_point
.long L007ej6-L004PIC_point
.long L006ej7-L004PIC_point
.align 6,0x90

336
deps/openssl/asm/x86-macosx-gas/bn/x86-mont.s

@ -0,0 +1,336 @@
.file "../openssl/crypto/bn/asm/x86-mont.s"
.text
.globl _bn_mul_mont
.align 4
_bn_mul_mont:
L_bn_mul_mont_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
xorl %eax,%eax
movl 40(%esp),%edi
cmpl $4,%edi
jl L000just_leave
leal 20(%esp),%esi
leal 24(%esp),%edx
movl %esp,%ebp
addl $2,%edi
negl %edi
leal -32(%esp,%edi,4),%esp
negl %edi
movl %esp,%eax
subl %edx,%eax
andl $2047,%eax
subl %eax,%esp
xorl %esp,%edx
andl $2048,%edx
xorl $2048,%edx
subl %edx,%esp
andl $-64,%esp
movl (%esi),%eax
movl 4(%esi),%ebx
movl 8(%esi),%ecx
movl 12(%esi),%edx
movl 16(%esi),%esi
movl (%esi),%esi
movl %eax,4(%esp)
movl %ebx,8(%esp)
movl %ecx,12(%esp)
movl %edx,16(%esp)
movl %esi,20(%esp)
leal -3(%edi),%ebx
movl %ebp,24(%esp)
movl 8(%esp),%esi
leal 1(%ebx),%ebp
movl 12(%esp),%edi
xorl %ecx,%ecx
movl %esi,%edx
andl $1,%ebp
subl %edi,%edx
leal 4(%edi,%ebx,4),%eax
orl %edx,%ebp
movl (%edi),%edi
jz L001bn_sqr_mont
movl %eax,28(%esp)
movl (%esi),%eax
xorl %edx,%edx
.align 4,0x90
L002mull:
movl %edx,%ebp
mull %edi
addl %eax,%ebp
leal 1(%ecx),%ecx
adcl $0,%edx
movl (%esi,%ecx,4),%eax
cmpl %ebx,%ecx
movl %ebp,28(%esp,%ecx,4)
jl L002mull
movl %edx,%ebp
mull %edi
movl 20(%esp),%edi
addl %ebp,%eax
movl 16(%esp),%esi
adcl $0,%edx
imull 32(%esp),%edi
movl %eax,32(%esp,%ebx,4)
xorl %ecx,%ecx
movl %edx,36(%esp,%ebx,4)
movl %ecx,40(%esp,%ebx,4)
movl (%esi),%eax
mull %edi
addl 32(%esp),%eax
movl 4(%esi),%eax
adcl $0,%edx
incl %ecx
jmp L0032ndmadd
.align 4,0x90
L0041stmadd:
movl %edx,%ebp
mull %edi
addl 32(%esp,%ecx,4),%ebp
leal 1(%ecx),%ecx
adcl $0,%edx
addl %eax,%ebp
movl (%esi,%ecx,4),%eax
adcl $0,%edx
cmpl %ebx,%ecx
movl %ebp,28(%esp,%ecx,4)
jl L0041stmadd
movl %edx,%ebp
mull %edi
addl 32(%esp,%ebx,4),%eax
movl 20(%esp),%edi
adcl $0,%edx
movl 16(%esp),%esi
addl %eax,%ebp
adcl $0,%edx
imull 32(%esp),%edi
xorl %ecx,%ecx
addl 36(%esp,%ebx,4),%edx
movl %ebp,32(%esp,%ebx,4)
adcl $0,%ecx
movl (%esi),%eax
movl %edx,36(%esp,%ebx,4)
movl %ecx,40(%esp,%ebx,4)
mull %edi
addl 32(%esp),%eax
movl 4(%esi),%eax
adcl $0,%edx
movl $1,%ecx
.align 4,0x90
L0032ndmadd:
movl %edx,%ebp
mull %edi
addl 32(%esp,%ecx,4),%ebp
leal 1(%ecx),%ecx
adcl $0,%edx
addl %eax,%ebp
movl (%esi,%ecx,4),%eax
adcl $0,%edx
cmpl %ebx,%ecx
movl %ebp,24(%esp,%ecx,4)
jl L0032ndmadd
movl %edx,%ebp
mull %edi
addl 32(%esp,%ebx,4),%ebp
adcl $0,%edx
addl %eax,%ebp
adcl $0,%edx
movl %ebp,28(%esp,%ebx,4)
xorl %eax,%eax
movl 12(%esp),%ecx
addl 36(%esp,%ebx,4),%edx
adcl 40(%esp,%ebx,4),%eax
leal 4(%ecx),%ecx
movl %edx,32(%esp,%ebx,4)
cmpl 28(%esp),%ecx
movl %eax,36(%esp,%ebx,4)
je L005common_tail
movl (%ecx),%edi
movl 8(%esp),%esi
movl %ecx,12(%esp)
xorl %ecx,%ecx
xorl %edx,%edx
movl (%esi),%eax
jmp L0041stmadd
.align 4,0x90
L001bn_sqr_mont:
movl %ebx,(%esp)
movl %ecx,12(%esp)
movl %edi,%eax
mull %edi
movl %eax,32(%esp)
movl %edx,%ebx
shrl $1,%edx
andl $1,%ebx
incl %ecx
.align 4,0x90
L006sqr:
movl (%esi,%ecx,4),%eax
movl %edx,%ebp
mull %edi
addl %ebp,%eax
leal 1(%ecx),%ecx
adcl $0,%edx
leal (%ebx,%eax,2),%ebp
shrl $31,%eax
cmpl (%esp),%ecx
movl %eax,%ebx
movl %ebp,28(%esp,%ecx,4)
jl L006sqr
movl (%esi,%ecx,4),%eax
movl %edx,%ebp
mull %edi
addl %ebp,%eax
movl 20(%esp),%edi
adcl $0,%edx
movl 16(%esp),%esi
leal (%ebx,%eax,2),%ebp
imull 32(%esp),%edi
shrl $31,%eax
movl %ebp,32(%esp,%ecx,4)
leal (%eax,%edx,2),%ebp
movl (%esi),%eax
shrl $31,%edx
movl %ebp,36(%esp,%ecx,4)
movl %edx,40(%esp,%ecx,4)
mull %edi
addl 32(%esp),%eax
movl %ecx,%ebx
adcl $0,%edx
movl 4(%esi),%eax
movl $1,%ecx
.align 4,0x90
L0073rdmadd:
movl %edx,%ebp
mull %edi
addl 32(%esp,%ecx,4),%ebp
adcl $0,%edx
addl %eax,%ebp
movl 4(%esi,%ecx,4),%eax
adcl $0,%edx
movl %ebp,28(%esp,%ecx,4)
movl %edx,%ebp
mull %edi
addl 36(%esp,%ecx,4),%ebp
leal 2(%ecx),%ecx
adcl $0,%edx
addl %eax,%ebp
movl (%esi,%ecx,4),%eax
adcl $0,%edx
cmpl %ebx,%ecx
movl %ebp,24(%esp,%ecx,4)
jl L0073rdmadd
movl %edx,%ebp
mull %edi
addl 32(%esp,%ebx,4),%ebp
adcl $0,%edx
addl %eax,%ebp
adcl $0,%edx
movl %ebp,28(%esp,%ebx,4)
movl 12(%esp),%ecx
xorl %eax,%eax
movl 8(%esp),%esi
addl 36(%esp,%ebx,4),%edx
adcl 40(%esp,%ebx,4),%eax
movl %edx,32(%esp,%ebx,4)
cmpl %ebx,%ecx
movl %eax,36(%esp,%ebx,4)
je L005common_tail
movl 4(%esi,%ecx,4),%edi
leal 1(%ecx),%ecx
movl %edi,%eax
movl %ecx,12(%esp)
mull %edi
addl 32(%esp,%ecx,4),%eax
adcl $0,%edx
movl %eax,32(%esp,%ecx,4)
xorl %ebp,%ebp
cmpl %ebx,%ecx
leal 1(%ecx),%ecx
je L008sqrlast
movl %edx,%ebx
shrl $1,%edx
andl $1,%ebx
.align 4,0x90
L009sqradd:
movl (%esi,%ecx,4),%eax
movl %edx,%ebp
mull %edi
addl %ebp,%eax
leal (%eax,%eax,1),%ebp
adcl $0,%edx
shrl $31,%eax
addl 32(%esp,%ecx,4),%ebp
leal 1(%ecx),%ecx
adcl $0,%eax
addl %ebx,%ebp
adcl $0,%eax
cmpl (%esp),%ecx
movl %ebp,28(%esp,%ecx,4)
movl %eax,%ebx
jle L009sqradd
movl %edx,%ebp
addl %edx,%edx
shrl $31,%ebp
addl %ebx,%edx
adcl $0,%ebp
L008sqrlast:
movl 20(%esp),%edi
movl 16(%esp),%esi
imull 32(%esp),%edi
addl 32(%esp,%ecx,4),%edx
movl (%esi),%eax
adcl $0,%ebp
movl %edx,32(%esp,%ecx,4)
movl %ebp,36(%esp,%ecx,4)
mull %edi
addl 32(%esp),%eax
leal -1(%ecx),%ebx
adcl $0,%edx
movl $1,%ecx
movl 4(%esi),%eax
jmp L0073rdmadd
.align 4,0x90
L005common_tail:
movl 16(%esp),%ebp
movl 4(%esp),%edi
leal 32(%esp),%esi
movl (%esi),%eax
movl %ebx,%ecx
xorl %edx,%edx
.align 4,0x90
L010sub:
sbbl (%ebp,%edx,4),%eax
movl %eax,(%edi,%edx,4)
decl %ecx
movl 4(%esi,%edx,4),%eax
leal 1(%edx),%edx
jge L010sub
sbbl $0,%eax
andl %eax,%esi
notl %eax
movl %edi,%ebp
andl %eax,%ebp
orl %ebp,%esi
.align 4,0x90
L011copy:
movl (%esi,%ebx,4),%eax
movl %eax,(%edi,%ebx,4)
movl %ecx,32(%esp,%ebx,4)
decl %ebx
jge L011copy
movl 24(%esp),%esp
movl $1,%eax
L000just_leave:
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105
.byte 112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56
.byte 54,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121
.byte 32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46
.byte 111,114,103,62,0

2385
deps/openssl/asm/x86-macosx-gas/bn/x86.s

File diff suppressed because it is too large

2353
deps/openssl/asm/x86-macosx-gas/camellia/cmll-x86.s

File diff suppressed because it is too large

967
deps/openssl/asm/x86-macosx-gas/cast/cast-586.s

@ -0,0 +1,967 @@
.file "cast-586.s"
.text
.globl _CAST_encrypt
.align 4
_CAST_encrypt:
L_CAST_encrypt_begin:
pushl %ebp
pushl %ebx
movl 12(%esp),%ebx
movl 16(%esp),%ebp
pushl %esi
pushl %edi
# Load the 2 words
movl (%ebx),%edi
movl 4(%ebx),%esi
# Get short key flag
movl 128(%ebp),%eax
pushl %eax
xorl %eax,%eax
# round 0
movl (%ebp),%edx
movl 4(%ebp),%ecx
addl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
xorl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
subl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
addl %ebx,%ecx
xorl %ecx,%edi
# round 1
movl 8(%ebp),%edx
movl 12(%ebp),%ecx
xorl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
subl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
addl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
xorl %ebx,%ecx
xorl %ecx,%esi
# round 2
movl 16(%ebp),%edx
movl 20(%ebp),%ecx
subl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
addl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
xorl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
subl %ebx,%ecx
xorl %ecx,%edi
# round 3
movl 24(%ebp),%edx
movl 28(%ebp),%ecx
addl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
xorl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
subl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
addl %ebx,%ecx
xorl %ecx,%esi
# round 4
movl 32(%ebp),%edx
movl 36(%ebp),%ecx
xorl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
subl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
addl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
xorl %ebx,%ecx
xorl %ecx,%edi
# round 5
movl 40(%ebp),%edx
movl 44(%ebp),%ecx
subl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
addl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
xorl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
subl %ebx,%ecx
xorl %ecx,%esi
# round 6
movl 48(%ebp),%edx
movl 52(%ebp),%ecx
addl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
xorl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
subl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
addl %ebx,%ecx
xorl %ecx,%edi
# round 7
movl 56(%ebp),%edx
movl 60(%ebp),%ecx
xorl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
subl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
addl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
xorl %ebx,%ecx
xorl %ecx,%esi
# round 8
movl 64(%ebp),%edx
movl 68(%ebp),%ecx
subl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
addl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
xorl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
subl %ebx,%ecx
xorl %ecx,%edi
# round 9
movl 72(%ebp),%edx
movl 76(%ebp),%ecx
addl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
xorl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
subl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
addl %ebx,%ecx
xorl %ecx,%esi
# round 10
movl 80(%ebp),%edx
movl 84(%ebp),%ecx
xorl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
subl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
addl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
xorl %ebx,%ecx
xorl %ecx,%edi
# round 11
movl 88(%ebp),%edx
movl 92(%ebp),%ecx
subl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
addl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
xorl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
subl %ebx,%ecx
xorl %ecx,%esi
# test short key flag
popl %edx
orl %edx,%edx
jnz L000cast_enc_done
# round 12
movl 96(%ebp),%edx
movl 100(%ebp),%ecx
addl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
xorl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
subl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
addl %ebx,%ecx
xorl %ecx,%edi
# round 13
movl 104(%ebp),%edx
movl 108(%ebp),%ecx
xorl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
subl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
addl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
xorl %ebx,%ecx
xorl %ecx,%esi
# round 14
movl 112(%ebp),%edx
movl 116(%ebp),%ecx
subl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
addl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
xorl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
subl %ebx,%ecx
xorl %ecx,%edi
# round 15
movl 120(%ebp),%edx
movl 124(%ebp),%ecx
addl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
xorl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
subl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
addl %ebx,%ecx
xorl %ecx,%esi
L000cast_enc_done:
nop
movl 20(%esp),%eax
movl %edi,4(%eax)
movl %esi,(%eax)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.globl _CAST_decrypt
.align 4
_CAST_decrypt:
L_CAST_decrypt_begin:
pushl %ebp
pushl %ebx
movl 12(%esp),%ebx
movl 16(%esp),%ebp
pushl %esi
pushl %edi
# Load the 2 words
movl (%ebx),%edi
movl 4(%ebx),%esi
# Get short key flag
movl 128(%ebp),%eax
orl %eax,%eax
jnz L001cast_dec_skip
xorl %eax,%eax
# round 15
movl 120(%ebp),%edx
movl 124(%ebp),%ecx
addl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
xorl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
subl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
addl %ebx,%ecx
xorl %ecx,%edi
# round 14
movl 112(%ebp),%edx
movl 116(%ebp),%ecx
subl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
addl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
xorl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
subl %ebx,%ecx
xorl %ecx,%esi
# round 13
movl 104(%ebp),%edx
movl 108(%ebp),%ecx
xorl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
subl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
addl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
xorl %ebx,%ecx
xorl %ecx,%edi
# round 12
movl 96(%ebp),%edx
movl 100(%ebp),%ecx
addl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
xorl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
subl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
addl %ebx,%ecx
xorl %ecx,%esi
L001cast_dec_skip:
# round 11
movl 88(%ebp),%edx
movl 92(%ebp),%ecx
subl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
addl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
xorl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
subl %ebx,%ecx
xorl %ecx,%edi
# round 10
movl 80(%ebp),%edx
movl 84(%ebp),%ecx
xorl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
subl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
addl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
xorl %ebx,%ecx
xorl %ecx,%esi
# round 9
movl 72(%ebp),%edx
movl 76(%ebp),%ecx
addl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
xorl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
subl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
addl %ebx,%ecx
xorl %ecx,%edi
# round 8
movl 64(%ebp),%edx
movl 68(%ebp),%ecx
subl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
addl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
xorl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
subl %ebx,%ecx
xorl %ecx,%esi
# round 7
movl 56(%ebp),%edx
movl 60(%ebp),%ecx
xorl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
subl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
addl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
xorl %ebx,%ecx
xorl %ecx,%edi
# round 6
movl 48(%ebp),%edx
movl 52(%ebp),%ecx
addl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
xorl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
subl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
addl %ebx,%ecx
xorl %ecx,%esi
# round 5
movl 40(%ebp),%edx
movl 44(%ebp),%ecx
subl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
addl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
xorl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
subl %ebx,%ecx
xorl %ecx,%edi
# round 4
movl 32(%ebp),%edx
movl 36(%ebp),%ecx
xorl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
subl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
addl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
xorl %ebx,%ecx
xorl %ecx,%esi
# round 3
movl 24(%ebp),%edx
movl 28(%ebp),%ecx
addl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
xorl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
subl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
addl %ebx,%ecx
xorl %ecx,%edi
# round 2
movl 16(%ebp),%edx
movl 20(%ebp),%ecx
subl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
addl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
xorl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
subl %ebx,%ecx
xorl %ecx,%esi
# round 1
movl 8(%ebp),%edx
movl 12(%ebp),%ecx
xorl %esi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
subl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
addl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
xorl %ebx,%ecx
xorl %ecx,%edi
# round 0
movl (%ebp),%edx
movl 4(%ebp),%ecx
addl %edi,%edx
roll %cl,%edx
movl %edx,%ebx
xorl %ecx,%ecx
movb %dh,%cl
andl $255,%ebx
shrl $16,%edx
xorl %eax,%eax
movb %dh,%al
andl $255,%edx
movl _CAST_S_table0(,%ecx,4),%ecx
movl _CAST_S_table1(,%ebx,4),%ebx
xorl %ebx,%ecx
movl _CAST_S_table2(,%eax,4),%ebx
subl %ebx,%ecx
movl _CAST_S_table3(,%edx,4),%ebx
addl %ebx,%ecx
xorl %ecx,%esi
nop
movl 20(%esp),%eax
movl %edi,4(%eax)
movl %esi,(%eax)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.globl _CAST_cbc_encrypt
.align 4
_CAST_cbc_encrypt:
L_CAST_cbc_encrypt_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 28(%esp),%ebp
# getting iv ptr from parameter 4
movl 36(%esp),%ebx
movl (%ebx),%esi
movl 4(%ebx),%edi
pushl %edi
pushl %esi
pushl %edi
pushl %esi
movl %esp,%ebx
movl 36(%esp),%esi
movl 40(%esp),%edi
# getting encrypt flag from parameter 5
movl 56(%esp),%ecx
# get and push parameter 3
movl 48(%esp),%eax
pushl %eax
pushl %ebx
cmpl $0,%ecx
jz L002decrypt
andl $4294967288,%ebp
movl 8(%esp),%eax
movl 12(%esp),%ebx
jz L003encrypt_finish
L004encrypt_loop:
movl (%esi),%ecx
movl 4(%esi),%edx
xorl %ecx,%eax
xorl %edx,%ebx
bswap %eax
bswap %ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call L_CAST_encrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
bswap %eax
bswap %ebx
movl %eax,(%edi)
movl %ebx,4(%edi)
addl $8,%esi
addl $8,%edi
subl $8,%ebp
jnz L004encrypt_loop
L003encrypt_finish:
movl 52(%esp),%ebp
andl $7,%ebp
jz L005finish
call L006PIC_point
L006PIC_point:
popl %edx
leal L007cbc_enc_jmp_table-L006PIC_point(%edx),%ecx
movl (%ecx,%ebp,4),%ebp
addl %edx,%ebp
xorl %ecx,%ecx
xorl %edx,%edx
jmp *%ebp
L008ej7:
movb 6(%esi),%dh
shll $8,%edx
L009ej6:
movb 5(%esi),%dh
L010ej5:
movb 4(%esi),%dl
L011ej4:
movl (%esi),%ecx
jmp L012ejend
L013ej3:
movb 2(%esi),%ch
shll $8,%ecx
L014ej2:
movb 1(%esi),%ch
L015ej1:
movb (%esi),%cl
L012ejend:
xorl %ecx,%eax
xorl %edx,%ebx
bswap %eax
bswap %ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call L_CAST_encrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
bswap %eax
bswap %ebx
movl %eax,(%edi)
movl %ebx,4(%edi)
jmp L005finish
L002decrypt:
andl $4294967288,%ebp
movl 16(%esp),%eax
movl 20(%esp),%ebx
jz L016decrypt_finish
L017decrypt_loop:
movl (%esi),%eax
movl 4(%esi),%ebx
bswap %eax
bswap %ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call L_CAST_decrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
bswap %eax
bswap %ebx
movl 16(%esp),%ecx
movl 20(%esp),%edx
xorl %eax,%ecx
xorl %ebx,%edx
movl (%esi),%eax
movl 4(%esi),%ebx
movl %ecx,(%edi)
movl %edx,4(%edi)
movl %eax,16(%esp)
movl %ebx,20(%esp)
addl $8,%esi
addl $8,%edi
subl $8,%ebp
jnz L017decrypt_loop
L016decrypt_finish:
movl 52(%esp),%ebp
andl $7,%ebp
jz L005finish
movl (%esi),%eax
movl 4(%esi),%ebx
bswap %eax
bswap %ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call L_CAST_decrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
bswap %eax
bswap %ebx
movl 16(%esp),%ecx
movl 20(%esp),%edx
xorl %eax,%ecx
xorl %ebx,%edx
movl (%esi),%eax
movl 4(%esi),%ebx
L018dj7:
rorl $16,%edx
movb %dl,6(%edi)
shrl $16,%edx
L019dj6:
movb %dh,5(%edi)
L020dj5:
movb %dl,4(%edi)
L021dj4:
movl %ecx,(%edi)
jmp L022djend
L023dj3:
rorl $16,%ecx
movb %cl,2(%edi)
shll $16,%ecx
L024dj2:
movb %ch,1(%esi)
L025dj1:
movb %cl,(%esi)
L022djend:
jmp L005finish
L005finish:
movl 60(%esp),%ecx
addl $24,%esp
movl %eax,(%ecx)
movl %ebx,4(%ecx)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.align 6,0x90
L007cbc_enc_jmp_table:
.long 0
.long L015ej1-L006PIC_point
.long L014ej2-L006PIC_point
.long L013ej3-L006PIC_point
.long L011ej4-L006PIC_point
.long L010ej5-L006PIC_point
.long L009ej6-L006PIC_point
.long L008ej7-L006PIC_point
.align 6,0x90

891
deps/openssl/asm/x86-macosx-gas/des/crypt586.s

@ -0,0 +1,891 @@
.file "crypt586.s"
.text
.globl _fcrypt_body
.align 4
_fcrypt_body:
L_fcrypt_body_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
# Load the 2 words
xorl %edi,%edi
xorl %esi,%esi
leal _DES_SPtrans,%edx
pushl %edx
movl 28(%esp),%ebp
pushl $25
L000start:
# Round 0
movl 36(%esp),%eax
movl %esi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %esi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl (%ebp),%ebx
xorl %ebx,%eax
movl 4(%ebp),%ecx
xorl %esi,%eax
xorl %esi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%edi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%edi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%edi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%edi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%edi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%edi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%edi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%edi
movl 32(%esp),%ebp
# Round 1
movl 36(%esp),%eax
movl %edi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %edi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 8(%ebp),%ebx
xorl %ebx,%eax
movl 12(%ebp),%ecx
xorl %edi,%eax
xorl %edi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%esi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%esi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%esi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%esi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%esi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%esi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%esi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%esi
movl 32(%esp),%ebp
# Round 2
movl 36(%esp),%eax
movl %esi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %esi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 16(%ebp),%ebx
xorl %ebx,%eax
movl 20(%ebp),%ecx
xorl %esi,%eax
xorl %esi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%edi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%edi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%edi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%edi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%edi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%edi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%edi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%edi
movl 32(%esp),%ebp
# Round 3
movl 36(%esp),%eax
movl %edi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %edi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 24(%ebp),%ebx
xorl %ebx,%eax
movl 28(%ebp),%ecx
xorl %edi,%eax
xorl %edi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%esi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%esi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%esi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%esi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%esi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%esi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%esi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%esi
movl 32(%esp),%ebp
# Round 4
movl 36(%esp),%eax
movl %esi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %esi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 32(%ebp),%ebx
xorl %ebx,%eax
movl 36(%ebp),%ecx
xorl %esi,%eax
xorl %esi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%edi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%edi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%edi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%edi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%edi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%edi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%edi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%edi
movl 32(%esp),%ebp
# Round 5
movl 36(%esp),%eax
movl %edi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %edi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 40(%ebp),%ebx
xorl %ebx,%eax
movl 44(%ebp),%ecx
xorl %edi,%eax
xorl %edi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%esi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%esi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%esi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%esi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%esi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%esi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%esi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%esi
movl 32(%esp),%ebp
# Round 6
movl 36(%esp),%eax
movl %esi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %esi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 48(%ebp),%ebx
xorl %ebx,%eax
movl 52(%ebp),%ecx
xorl %esi,%eax
xorl %esi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%edi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%edi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%edi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%edi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%edi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%edi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%edi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%edi
movl 32(%esp),%ebp
# Round 7
movl 36(%esp),%eax
movl %edi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %edi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 56(%ebp),%ebx
xorl %ebx,%eax
movl 60(%ebp),%ecx
xorl %edi,%eax
xorl %edi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%esi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%esi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%esi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%esi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%esi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%esi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%esi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%esi
movl 32(%esp),%ebp
# Round 8
movl 36(%esp),%eax
movl %esi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %esi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 64(%ebp),%ebx
xorl %ebx,%eax
movl 68(%ebp),%ecx
xorl %esi,%eax
xorl %esi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%edi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%edi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%edi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%edi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%edi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%edi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%edi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%edi
movl 32(%esp),%ebp
# Round 9
movl 36(%esp),%eax
movl %edi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %edi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 72(%ebp),%ebx
xorl %ebx,%eax
movl 76(%ebp),%ecx
xorl %edi,%eax
xorl %edi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%esi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%esi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%esi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%esi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%esi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%esi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%esi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%esi
movl 32(%esp),%ebp
# Round 10
movl 36(%esp),%eax
movl %esi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %esi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 80(%ebp),%ebx
xorl %ebx,%eax
movl 84(%ebp),%ecx
xorl %esi,%eax
xorl %esi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%edi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%edi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%edi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%edi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%edi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%edi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%edi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%edi
movl 32(%esp),%ebp
# Round 11
movl 36(%esp),%eax
movl %edi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %edi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 88(%ebp),%ebx
xorl %ebx,%eax
movl 92(%ebp),%ecx
xorl %edi,%eax
xorl %edi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%esi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%esi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%esi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%esi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%esi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%esi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%esi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%esi
movl 32(%esp),%ebp
# Round 12
movl 36(%esp),%eax
movl %esi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %esi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 96(%ebp),%ebx
xorl %ebx,%eax
movl 100(%ebp),%ecx
xorl %esi,%eax
xorl %esi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%edi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%edi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%edi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%edi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%edi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%edi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%edi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%edi
movl 32(%esp),%ebp
# Round 13
movl 36(%esp),%eax
movl %edi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %edi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 104(%ebp),%ebx
xorl %ebx,%eax
movl 108(%ebp),%ecx
xorl %edi,%eax
xorl %edi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%esi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%esi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%esi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%esi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%esi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%esi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%esi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%esi
movl 32(%esp),%ebp
# Round 14
movl 36(%esp),%eax
movl %esi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %esi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 112(%ebp),%ebx
xorl %ebx,%eax
movl 116(%ebp),%ecx
xorl %esi,%eax
xorl %esi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%edi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%edi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%edi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%edi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%edi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%edi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%edi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%edi
movl 32(%esp),%ebp
# Round 15
movl 36(%esp),%eax
movl %edi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %edi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 120(%ebp),%ebx
xorl %ebx,%eax
movl 124(%ebp),%ecx
xorl %edi,%eax
xorl %edi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%esi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%esi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%esi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%esi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%esi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%esi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%esi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%esi
movl 32(%esp),%ebp
movl (%esp),%ebx
movl %edi,%eax
decl %ebx
movl %esi,%edi
movl %eax,%esi
movl %ebx,(%esp)
jnz L000start
# FP
movl 28(%esp),%edx
rorl $1,%edi
movl %esi,%eax
xorl %edi,%esi
andl $0xaaaaaaaa,%esi
xorl %esi,%eax
xorl %esi,%edi
roll $23,%eax
movl %eax,%esi
xorl %edi,%eax
andl $0x03fc03fc,%eax
xorl %eax,%esi
xorl %eax,%edi
roll $10,%esi
movl %esi,%eax
xorl %edi,%esi
andl $0x33333333,%esi
xorl %esi,%eax
xorl %esi,%edi
roll $18,%edi
movl %edi,%esi
xorl %eax,%edi
andl $0xfff0000f,%edi
xorl %edi,%esi
xorl %edi,%eax
roll $12,%esi
movl %esi,%edi
xorl %eax,%esi
andl $0xf0f0f0f0,%esi
xorl %esi,%edi
xorl %esi,%eax
rorl $4,%eax
movl %eax,(%edx)
movl %edi,4(%edx)
addl $8,%esp
popl %edi
popl %esi
popl %ebx
popl %ebp
ret

1873
deps/openssl/asm/x86-macosx-gas/des/des-586.s

File diff suppressed because it is too large

745
deps/openssl/asm/x86-macosx-gas/md5/md5-586.s

@ -0,0 +1,745 @@
.file "../openssl/crypto/md5/asm/md5-586.s"
.text
.globl _md5_block_asm_data_order
.align 4
_md5_block_asm_data_order:
L_md5_block_asm_data_order_begin:
pushl %esi
pushl %edi
movl 12(%esp),%edi
movl 16(%esp),%esi
movl 20(%esp),%ecx
pushl %ebp
shll $6,%ecx
pushl %ebx
addl %esi,%ecx
subl $64,%ecx
movl (%edi),%eax
pushl %ecx
movl 4(%edi),%ebx
movl 8(%edi),%ecx
movl 12(%edi),%edx
L000start:
# R0 section
movl %ecx,%edi
movl (%esi),%ebp
# R0 0
xorl %edx,%edi
andl %ebx,%edi
leal 3614090360(%eax,%ebp,1),%eax
xorl %edx,%edi
addl %edi,%eax
movl %ebx,%edi
roll $7,%eax
movl 4(%esi),%ebp
addl %ebx,%eax
# R0 1
xorl %ecx,%edi
andl %eax,%edi
leal 3905402710(%edx,%ebp,1),%edx
xorl %ecx,%edi
addl %edi,%edx
movl %eax,%edi
roll $12,%edx
movl 8(%esi),%ebp
addl %eax,%edx
# R0 2
xorl %ebx,%edi
andl %edx,%edi
leal 606105819(%ecx,%ebp,1),%ecx
xorl %ebx,%edi
addl %edi,%ecx
movl %edx,%edi
roll $17,%ecx
movl 12(%esi),%ebp
addl %edx,%ecx
# R0 3
xorl %eax,%edi
andl %ecx,%edi
leal 3250441966(%ebx,%ebp,1),%ebx
xorl %eax,%edi
addl %edi,%ebx
movl %ecx,%edi
roll $22,%ebx
movl 16(%esi),%ebp
addl %ecx,%ebx
# R0 4
xorl %edx,%edi
andl %ebx,%edi
leal 4118548399(%eax,%ebp,1),%eax
xorl %edx,%edi
addl %edi,%eax
movl %ebx,%edi
roll $7,%eax
movl 20(%esi),%ebp
addl %ebx,%eax
# R0 5
xorl %ecx,%edi
andl %eax,%edi
leal 1200080426(%edx,%ebp,1),%edx
xorl %ecx,%edi
addl %edi,%edx
movl %eax,%edi
roll $12,%edx
movl 24(%esi),%ebp
addl %eax,%edx
# R0 6
xorl %ebx,%edi
andl %edx,%edi
leal 2821735955(%ecx,%ebp,1),%ecx
xorl %ebx,%edi
addl %edi,%ecx
movl %edx,%edi
roll $17,%ecx
movl 28(%esi),%ebp
addl %edx,%ecx
# R0 7
xorl %eax,%edi
andl %ecx,%edi
leal 4249261313(%ebx,%ebp,1),%ebx
xorl %eax,%edi
addl %edi,%ebx
movl %ecx,%edi
roll $22,%ebx
movl 32(%esi),%ebp
addl %ecx,%ebx
# R0 8
xorl %edx,%edi
andl %ebx,%edi
leal 1770035416(%eax,%ebp,1),%eax
xorl %edx,%edi
addl %edi,%eax
movl %ebx,%edi
roll $7,%eax
movl 36(%esi),%ebp
addl %ebx,%eax
# R0 9
xorl %ecx,%edi
andl %eax,%edi
leal 2336552879(%edx,%ebp,1),%edx
xorl %ecx,%edi
addl %edi,%edx
movl %eax,%edi
roll $12,%edx
movl 40(%esi),%ebp
addl %eax,%edx
# R0 10
xorl %ebx,%edi
andl %edx,%edi
leal 4294925233(%ecx,%ebp,1),%ecx
xorl %ebx,%edi
addl %edi,%ecx
movl %edx,%edi
roll $17,%ecx
movl 44(%esi),%ebp
addl %edx,%ecx
# R0 11
xorl %eax,%edi
andl %ecx,%edi
leal 2304563134(%ebx,%ebp,1),%ebx
xorl %eax,%edi
addl %edi,%ebx
movl %ecx,%edi
roll $22,%ebx
movl 48(%esi),%ebp
addl %ecx,%ebx
# R0 12
xorl %edx,%edi
andl %ebx,%edi
leal 1804603682(%eax,%ebp,1),%eax
xorl %edx,%edi
addl %edi,%eax
movl %ebx,%edi
roll $7,%eax
movl 52(%esi),%ebp
addl %ebx,%eax
# R0 13
xorl %ecx,%edi
andl %eax,%edi
leal 4254626195(%edx,%ebp,1),%edx
xorl %ecx,%edi
addl %edi,%edx
movl %eax,%edi
roll $12,%edx
movl 56(%esi),%ebp
addl %eax,%edx
# R0 14
xorl %ebx,%edi
andl %edx,%edi
leal 2792965006(%ecx,%ebp,1),%ecx
xorl %ebx,%edi
addl %edi,%ecx
movl %edx,%edi
roll $17,%ecx
movl 60(%esi),%ebp
addl %edx,%ecx
# R0 15
xorl %eax,%edi
andl %ecx,%edi
leal 1236535329(%ebx,%ebp,1),%ebx
xorl %eax,%edi
addl %edi,%ebx
movl %ecx,%edi
roll $22,%ebx
movl 4(%esi),%ebp
addl %ecx,%ebx
# R1 section
# R1 16
leal 4129170786(%eax,%ebp,1),%eax
xorl %ebx,%edi
andl %edx,%edi
movl 24(%esi),%ebp
xorl %ecx,%edi
addl %edi,%eax
movl %ebx,%edi
roll $5,%eax
addl %ebx,%eax
# R1 17
leal 3225465664(%edx,%ebp,1),%edx
xorl %eax,%edi
andl %ecx,%edi
movl 44(%esi),%ebp
xorl %ebx,%edi
addl %edi,%edx
movl %eax,%edi
roll $9,%edx
addl %eax,%edx
# R1 18
leal 643717713(%ecx,%ebp,1),%ecx
xorl %edx,%edi
andl %ebx,%edi
movl (%esi),%ebp
xorl %eax,%edi
addl %edi,%ecx
movl %edx,%edi
roll $14,%ecx
addl %edx,%ecx
# R1 19
leal 3921069994(%ebx,%ebp,1),%ebx
xorl %ecx,%edi
andl %eax,%edi
movl 20(%esi),%ebp
xorl %edx,%edi
addl %edi,%ebx
movl %ecx,%edi
roll $20,%ebx
addl %ecx,%ebx
# R1 20
leal 3593408605(%eax,%ebp,1),%eax
xorl %ebx,%edi
andl %edx,%edi
movl 40(%esi),%ebp
xorl %ecx,%edi
addl %edi,%eax
movl %ebx,%edi
roll $5,%eax
addl %ebx,%eax
# R1 21
leal 38016083(%edx,%ebp,1),%edx
xorl %eax,%edi
andl %ecx,%edi
movl 60(%esi),%ebp
xorl %ebx,%edi
addl %edi,%edx
movl %eax,%edi
roll $9,%edx
addl %eax,%edx
# R1 22
leal 3634488961(%ecx,%ebp,1),%ecx
xorl %edx,%edi
andl %ebx,%edi
movl 16(%esi),%ebp
xorl %eax,%edi
addl %edi,%ecx
movl %edx,%edi
roll $14,%ecx
addl %edx,%ecx
# R1 23
leal 3889429448(%ebx,%ebp,1),%ebx
xorl %ecx,%edi
andl %eax,%edi
movl 36(%esi),%ebp
xorl %edx,%edi
addl %edi,%ebx
movl %ecx,%edi
roll $20,%ebx
addl %ecx,%ebx
# R1 24
leal 568446438(%eax,%ebp,1),%eax
xorl %ebx,%edi
andl %edx,%edi
movl 56(%esi),%ebp
xorl %ecx,%edi
addl %edi,%eax
movl %ebx,%edi
roll $5,%eax
addl %ebx,%eax
# R1 25
leal 3275163606(%edx,%ebp,1),%edx
xorl %eax,%edi
andl %ecx,%edi
movl 12(%esi),%ebp
xorl %ebx,%edi
addl %edi,%edx
movl %eax,%edi
roll $9,%edx
addl %eax,%edx
# R1 26
leal 4107603335(%ecx,%ebp,1),%ecx
xorl %edx,%edi
andl %ebx,%edi
movl 32(%esi),%ebp
xorl %eax,%edi
addl %edi,%ecx
movl %edx,%edi
roll $14,%ecx
addl %edx,%ecx
# R1 27
leal 1163531501(%ebx,%ebp,1),%ebx
xorl %ecx,%edi
andl %eax,%edi
movl 52(%esi),%ebp
xorl %edx,%edi
addl %edi,%ebx
movl %ecx,%edi
roll $20,%ebx
addl %ecx,%ebx
# R1 28
leal 2850285829(%eax,%ebp,1),%eax
xorl %ebx,%edi
andl %edx,%edi
movl 8(%esi),%ebp
xorl %ecx,%edi
addl %edi,%eax
movl %ebx,%edi
roll $5,%eax
addl %ebx,%eax
# R1 29
leal 4243563512(%edx,%ebp,1),%edx
xorl %eax,%edi
andl %ecx,%edi
movl 28(%esi),%ebp
xorl %ebx,%edi
addl %edi,%edx
movl %eax,%edi
roll $9,%edx
addl %eax,%edx
# R1 30
leal 1735328473(%ecx,%ebp,1),%ecx
xorl %edx,%edi
andl %ebx,%edi
movl 48(%esi),%ebp
xorl %eax,%edi
addl %edi,%ecx
movl %edx,%edi
roll $14,%ecx
addl %edx,%ecx
# R1 31
leal 2368359562(%ebx,%ebp,1),%ebx
xorl %ecx,%edi
andl %eax,%edi
movl 20(%esi),%ebp
xorl %edx,%edi
addl %edi,%ebx
movl %ecx,%edi
roll $20,%ebx
addl %ecx,%ebx
# R2 section
# R2 32
xorl %edx,%edi
xorl %ebx,%edi
leal 4294588738(%eax,%ebp,1),%eax
addl %edi,%eax
roll $4,%eax
movl 32(%esi),%ebp
movl %ebx,%edi
# R2 33
leal 2272392833(%edx,%ebp,1),%edx
addl %ebx,%eax
xorl %ecx,%edi
xorl %eax,%edi
movl 44(%esi),%ebp
addl %edi,%edx
movl %eax,%edi
roll $11,%edx
addl %eax,%edx
# R2 34
xorl %ebx,%edi
xorl %edx,%edi
leal 1839030562(%ecx,%ebp,1),%ecx
addl %edi,%ecx
roll $16,%ecx
movl 56(%esi),%ebp
movl %edx,%edi
# R2 35
leal 4259657740(%ebx,%ebp,1),%ebx
addl %edx,%ecx
xorl %eax,%edi
xorl %ecx,%edi
movl 4(%esi),%ebp
addl %edi,%ebx
movl %ecx,%edi
roll $23,%ebx
addl %ecx,%ebx
# R2 36
xorl %edx,%edi
xorl %ebx,%edi
leal 2763975236(%eax,%ebp,1),%eax
addl %edi,%eax
roll $4,%eax
movl 16(%esi),%ebp
movl %ebx,%edi
# R2 37
leal 1272893353(%edx,%ebp,1),%edx
addl %ebx,%eax
xorl %ecx,%edi
xorl %eax,%edi
movl 28(%esi),%ebp
addl %edi,%edx
movl %eax,%edi
roll $11,%edx
addl %eax,%edx
# R2 38
xorl %ebx,%edi
xorl %edx,%edi
leal 4139469664(%ecx,%ebp,1),%ecx
addl %edi,%ecx
roll $16,%ecx
movl 40(%esi),%ebp
movl %edx,%edi
# R2 39
leal 3200236656(%ebx,%ebp,1),%ebx
addl %edx,%ecx
xorl %eax,%edi
xorl %ecx,%edi
movl 52(%esi),%ebp
addl %edi,%ebx
movl %ecx,%edi
roll $23,%ebx
addl %ecx,%ebx
# R2 40
xorl %edx,%edi
xorl %ebx,%edi
leal 681279174(%eax,%ebp,1),%eax
addl %edi,%eax
roll $4,%eax
movl (%esi),%ebp
movl %ebx,%edi
# R2 41
leal 3936430074(%edx,%ebp,1),%edx
addl %ebx,%eax
xorl %ecx,%edi
xorl %eax,%edi
movl 12(%esi),%ebp
addl %edi,%edx
movl %eax,%edi
roll $11,%edx
addl %eax,%edx
# R2 42
xorl %ebx,%edi
xorl %edx,%edi
leal 3572445317(%ecx,%ebp,1),%ecx
addl %edi,%ecx
roll $16,%ecx
movl 24(%esi),%ebp
movl %edx,%edi
# R2 43
leal 76029189(%ebx,%ebp,1),%ebx
addl %edx,%ecx
xorl %eax,%edi
xorl %ecx,%edi
movl 36(%esi),%ebp
addl %edi,%ebx
movl %ecx,%edi
roll $23,%ebx
addl %ecx,%ebx
# R2 44
xorl %edx,%edi
xorl %ebx,%edi
leal 3654602809(%eax,%ebp,1),%eax
addl %edi,%eax
roll $4,%eax
movl 48(%esi),%ebp
movl %ebx,%edi
# R2 45
leal 3873151461(%edx,%ebp,1),%edx
addl %ebx,%eax
xorl %ecx,%edi
xorl %eax,%edi
movl 60(%esi),%ebp
addl %edi,%edx
movl %eax,%edi
roll $11,%edx
addl %eax,%edx
# R2 46
xorl %ebx,%edi
xorl %edx,%edi
leal 530742520(%ecx,%ebp,1),%ecx
addl %edi,%ecx
roll $16,%ecx
movl 8(%esi),%ebp
movl %edx,%edi
# R2 47
leal 3299628645(%ebx,%ebp,1),%ebx
addl %edx,%ecx
xorl %eax,%edi
xorl %ecx,%edi
movl (%esi),%ebp
addl %edi,%ebx
movl $-1,%edi
roll $23,%ebx
addl %ecx,%ebx
# R3 section
# R3 48
xorl %edx,%edi
orl %ebx,%edi
leal 4096336452(%eax,%ebp,1),%eax
xorl %ecx,%edi
movl 28(%esi),%ebp
addl %edi,%eax
movl $-1,%edi
roll $6,%eax
xorl %ecx,%edi
addl %ebx,%eax
# R3 49
orl %eax,%edi
leal 1126891415(%edx,%ebp,1),%edx
xorl %ebx,%edi
movl 56(%esi),%ebp
addl %edi,%edx
movl $-1,%edi
roll $10,%edx
xorl %ebx,%edi
addl %eax,%edx
# R3 50
orl %edx,%edi
leal 2878612391(%ecx,%ebp,1),%ecx
xorl %eax,%edi
movl 20(%esi),%ebp
addl %edi,%ecx
movl $-1,%edi
roll $15,%ecx
xorl %eax,%edi
addl %edx,%ecx
# R3 51
orl %ecx,%edi
leal 4237533241(%ebx,%ebp,1),%ebx
xorl %edx,%edi
movl 48(%esi),%ebp
addl %edi,%ebx
movl $-1,%edi
roll $21,%ebx
xorl %edx,%edi
addl %ecx,%ebx
# R3 52
orl %ebx,%edi
leal 1700485571(%eax,%ebp,1),%eax
xorl %ecx,%edi
movl 12(%esi),%ebp
addl %edi,%eax
movl $-1,%edi
roll $6,%eax
xorl %ecx,%edi
addl %ebx,%eax
# R3 53
orl %eax,%edi
leal 2399980690(%edx,%ebp,1),%edx
xorl %ebx,%edi
movl 40(%esi),%ebp
addl %edi,%edx
movl $-1,%edi
roll $10,%edx
xorl %ebx,%edi
addl %eax,%edx
# R3 54
orl %edx,%edi
leal 4293915773(%ecx,%ebp,1),%ecx
xorl %eax,%edi
movl 4(%esi),%ebp
addl %edi,%ecx
movl $-1,%edi
roll $15,%ecx
xorl %eax,%edi
addl %edx,%ecx
# R3 55
orl %ecx,%edi
leal 2240044497(%ebx,%ebp,1),%ebx
xorl %edx,%edi
movl 32(%esi),%ebp
addl %edi,%ebx
movl $-1,%edi
roll $21,%ebx
xorl %edx,%edi
addl %ecx,%ebx
# R3 56
orl %ebx,%edi
leal 1873313359(%eax,%ebp,1),%eax
xorl %ecx,%edi
movl 60(%esi),%ebp
addl %edi,%eax
movl $-1,%edi
roll $6,%eax
xorl %ecx,%edi
addl %ebx,%eax
# R3 57
orl %eax,%edi
leal 4264355552(%edx,%ebp,1),%edx
xorl %ebx,%edi
movl 24(%esi),%ebp
addl %edi,%edx
movl $-1,%edi
roll $10,%edx
xorl %ebx,%edi
addl %eax,%edx
# R3 58
orl %edx,%edi
leal 2734768916(%ecx,%ebp,1),%ecx
xorl %eax,%edi
movl 52(%esi),%ebp
addl %edi,%ecx
movl $-1,%edi
roll $15,%ecx
xorl %eax,%edi
addl %edx,%ecx
# R3 59
orl %ecx,%edi
leal 1309151649(%ebx,%ebp,1),%ebx
xorl %edx,%edi
movl 16(%esi),%ebp
addl %edi,%ebx
movl $-1,%edi
roll $21,%ebx
xorl %edx,%edi
addl %ecx,%ebx
# R3 60
orl %ebx,%edi
leal 4149444226(%eax,%ebp,1),%eax
xorl %ecx,%edi
movl 44(%esi),%ebp
addl %edi,%eax
movl $-1,%edi
roll $6,%eax
xorl %ecx,%edi
addl %ebx,%eax
# R3 61
orl %eax,%edi
leal 3174756917(%edx,%ebp,1),%edx
xorl %ebx,%edi
movl 8(%esi),%ebp
addl %edi,%edx
movl $-1,%edi
roll $10,%edx
xorl %ebx,%edi
addl %eax,%edx
# R3 62
orl %edx,%edi
leal 718787259(%ecx,%ebp,1),%ecx
xorl %eax,%edi
movl 36(%esi),%ebp
addl %edi,%ecx
movl $-1,%edi
roll $15,%ecx
xorl %eax,%edi
addl %edx,%ecx
# R3 63
orl %ecx,%edi
leal 3951481745(%ebx,%ebp,1),%ebx
xorl %edx,%edi
movl 24(%esp),%ebp
addl %edi,%ebx
addl $64,%esi
roll $21,%ebx
movl (%ebp),%edi
addl %ecx,%ebx
addl %edi,%eax
movl 4(%ebp),%edi
addl %edi,%ebx
movl 8(%ebp),%edi
addl %edi,%ecx
movl 12(%ebp),%edi
addl %edi,%edx
movl %eax,(%ebp)
movl %ebx,4(%ebp)
movl (%esp),%edi
movl %ecx,8(%ebp)
movl %edx,12(%ebp)
cmpl %esi,%edi
jae L000start
popl %eax
popl %ebx
popl %ebp
popl %edi
popl %esi
ret

224
deps/openssl/asm/x86-macosx-gas/rc4/rc4-586.s

@ -0,0 +1,224 @@
.file "rc4-586.s"
.text
.globl _RC4
.align 4
_RC4:
L_RC4_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 20(%esp),%edi
movl 24(%esp),%edx
movl 28(%esp),%esi
movl 32(%esp),%ebp
xorl %eax,%eax
xorl %ebx,%ebx
cmpl $0,%edx
je L000abort
movb (%edi),%al
movb 4(%edi),%bl
addl $8,%edi
leal (%esi,%edx,1),%ecx
subl %esi,%ebp
movl %ecx,24(%esp)
incb %al
cmpl $-1,256(%edi)
je L001RC4_CHAR
movl (%edi,%eax,4),%ecx
andl $-4,%edx
jz L002loop1
leal -4(%esi,%edx,1),%edx
movl %edx,28(%esp)
movl %ebp,32(%esp)
.align 4,0x90
L003loop4:
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
addl %ecx,%edx
incb %al
andl $255,%edx
movl (%edi,%eax,4),%ecx
movl (%edi,%edx,4),%ebp
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
addl %ecx,%edx
incb %al
andl $255,%edx
rorl $8,%ebp
movl (%edi,%eax,4),%ecx
orl (%edi,%edx,4),%ebp
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
addl %ecx,%edx
incb %al
andl $255,%edx
rorl $8,%ebp
movl (%edi,%eax,4),%ecx
orl (%edi,%edx,4),%ebp
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
addl %ecx,%edx
incb %al
andl $255,%edx
rorl $8,%ebp
movl 32(%esp),%ecx
orl (%edi,%edx,4),%ebp
rorl $8,%ebp
xorl (%esi),%ebp
cmpl 28(%esp),%esi
movl %ebp,(%ecx,%esi,1)
leal 4(%esi),%esi
movl (%edi,%eax,4),%ecx
jb L003loop4
cmpl 24(%esp),%esi
je L004done
movl 32(%esp),%ebp
.align 4,0x90
L002loop1:
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
addl %ecx,%edx
incb %al
andl $255,%edx
movl (%edi,%edx,4),%edx
xorb (%esi),%dl
leal 1(%esi),%esi
movl (%edi,%eax,4),%ecx
cmpl 24(%esp),%esi
movb %dl,-1(%ebp,%esi,1)
jb L002loop1
jmp L004done
.align 4,0x90
L001RC4_CHAR:
movzbl (%edi,%eax,1),%ecx
L005cloop1:
addb %cl,%bl
movzbl (%edi,%ebx,1),%edx
movb %cl,(%edi,%ebx,1)
movb %dl,(%edi,%eax,1)
addb %cl,%dl
movzbl (%edi,%edx,1),%edx
addb $1,%al
xorb (%esi),%dl
leal 1(%esi),%esi
movzbl (%edi,%eax,1),%ecx
cmpl 24(%esp),%esi
movb %dl,-1(%ebp,%esi,1)
jb L005cloop1
L004done:
decb %al
movb %bl,-4(%edi)
movb %al,-8(%edi)
L000abort:
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.globl _RC4_set_key
.align 4
_RC4_set_key:
L_RC4_set_key_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 20(%esp),%edi
movl 24(%esp),%ebp
movl 28(%esp),%esi
leal _OPENSSL_ia32cap_P,%edx
leal 8(%edi),%edi
leal (%esi,%ebp,1),%esi
negl %ebp
xorl %eax,%eax
movl %ebp,-4(%edi)
btl $20,(%edx)
jc L006c1stloop
.align 4,0x90
L007w1stloop:
movl %eax,(%edi,%eax,4)
addb $1,%al
jnc L007w1stloop
xorl %ecx,%ecx
xorl %edx,%edx
.align 4,0x90
L008w2ndloop:
movl (%edi,%ecx,4),%eax
addb (%esi,%ebp,1),%dl
addb %al,%dl
addl $1,%ebp
movl (%edi,%edx,4),%ebx
jnz L009wnowrap
movl -4(%edi),%ebp
L009wnowrap:
movl %eax,(%edi,%edx,4)
movl %ebx,(%edi,%ecx,4)
addb $1,%cl
jnc L008w2ndloop
jmp L010exit
.align 4,0x90
L006c1stloop:
movb %al,(%edi,%eax,1)
addb $1,%al
jnc L006c1stloop
xorl %ecx,%ecx
xorl %edx,%edx
xorl %ebx,%ebx
.align 4,0x90
L011c2ndloop:
movb (%edi,%ecx,1),%al
addb (%esi,%ebp,1),%dl
addb %al,%dl
addl $1,%ebp
movb (%edi,%edx,1),%bl
jnz L012cnowrap
movl -4(%edi),%ebp
L012cnowrap:
movb %al,(%edi,%edx,1)
movb %bl,(%edi,%ecx,1)
addb $1,%cl
jnc L011c2ndloop
movl $-1,256(%edi)
L010exit:
xorl %eax,%eax
movl %eax,-8(%edi)
movl %eax,-4(%edi)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.globl _RC4_options
.align 4
_RC4_options:
L_RC4_options_begin:
call L013pic_point
L013pic_point:
popl %eax
leal L014opts-L013pic_point(%eax),%eax
leal _OPENSSL_ia32cap_P,%edx
btl $20,(%edx)
jnc L015skip
addl $12,%eax
L015skip:
ret
.align 6,0x90
L014opts:
.byte 114,99,52,40,52,120,44,105,110,116,41,0
.byte 114,99,52,40,49,120,44,99,104,97,114,41,0
.byte 82,67,52,32,102,111,114,32,120,56,54,44,32,67,82,89
.byte 80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
.byte 111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
.align 6,0x90
.comm _OPENSSL_ia32cap_P,4

563
deps/openssl/asm/x86-macosx-gas/rc5/rc5-586.s

@ -0,0 +1,563 @@
.file "rc5-586.s"
.text
.globl _RC5_32_encrypt
.align 4
_RC5_32_encrypt:
L_RC5_32_encrypt_begin:
pushl %ebp
pushl %esi
pushl %edi
movl 16(%esp),%edx
movl 20(%esp),%ebp
# Load the 2 words
movl (%edx),%edi
movl 4(%edx),%esi
pushl %ebx
movl (%ebp),%ebx
addl 4(%ebp),%edi
addl 8(%ebp),%esi
xorl %esi,%edi
movl 12(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 16(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 20(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 24(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 28(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 32(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 36(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 40(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 44(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 48(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 52(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 56(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 60(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 64(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 68(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 72(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
cmpl $8,%ebx
je L000rc5_exit
xorl %esi,%edi
movl 76(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 80(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 84(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 88(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 92(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 96(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 100(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 104(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
cmpl $12,%ebx
je L000rc5_exit
xorl %esi,%edi
movl 108(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 112(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 116(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 120(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 124(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 128(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 132(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 136(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
L000rc5_exit:
movl %edi,(%edx)
movl %esi,4(%edx)
popl %ebx
popl %edi
popl %esi
popl %ebp
ret
.globl _RC5_32_decrypt
.align 4
_RC5_32_decrypt:
L_RC5_32_decrypt_begin:
pushl %ebp
pushl %esi
pushl %edi
movl 16(%esp),%edx
movl 20(%esp),%ebp
# Load the 2 words
movl (%edx),%edi
movl 4(%edx),%esi
pushl %ebx
movl (%ebp),%ebx
cmpl $12,%ebx
je L001rc5_dec_12
cmpl $8,%ebx
je L002rc5_dec_8
movl 136(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 132(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 128(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 124(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 120(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 116(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 112(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 108(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
L001rc5_dec_12:
movl 104(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 100(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 96(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 92(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 88(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 84(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 80(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 76(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
L002rc5_dec_8:
movl 72(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 68(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 64(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 60(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 56(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 52(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 48(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 44(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 40(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 36(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 32(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 28(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 24(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 20(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 16(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 12(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
subl 8(%ebp),%esi
subl 4(%ebp),%edi
L003rc5_exit:
movl %edi,(%edx)
movl %esi,4(%edx)
popl %ebx
popl %edi
popl %esi
popl %ebp
ret
.globl _RC5_32_cbc_encrypt
.align 4
_RC5_32_cbc_encrypt:
L_RC5_32_cbc_encrypt_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 28(%esp),%ebp
# getting iv ptr from parameter 4
movl 36(%esp),%ebx
movl (%ebx),%esi
movl 4(%ebx),%edi
pushl %edi
pushl %esi
pushl %edi
pushl %esi
movl %esp,%ebx
movl 36(%esp),%esi
movl 40(%esp),%edi
# getting encrypt flag from parameter 5
movl 56(%esp),%ecx
# get and push parameter 3
movl 48(%esp),%eax
pushl %eax
pushl %ebx
cmpl $0,%ecx
jz L004decrypt
andl $4294967288,%ebp
movl 8(%esp),%eax
movl 12(%esp),%ebx
jz L005encrypt_finish
L006encrypt_loop:
movl (%esi),%ecx
movl 4(%esi),%edx
xorl %ecx,%eax
xorl %edx,%ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call L_RC5_32_encrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
movl %eax,(%edi)
movl %ebx,4(%edi)
addl $8,%esi
addl $8,%edi
subl $8,%ebp
jnz L006encrypt_loop
L005encrypt_finish:
movl 52(%esp),%ebp
andl $7,%ebp
jz L007finish
call L008PIC_point
L008PIC_point:
popl %edx
leal L009cbc_enc_jmp_table-L008PIC_point(%edx),%ecx
movl (%ecx,%ebp,4),%ebp
addl %edx,%ebp
xorl %ecx,%ecx
xorl %edx,%edx
jmp *%ebp
L010ej7:
movb 6(%esi),%dh
shll $8,%edx
L011ej6:
movb 5(%esi),%dh
L012ej5:
movb 4(%esi),%dl
L013ej4:
movl (%esi),%ecx
jmp L014ejend
L015ej3:
movb 2(%esi),%ch
shll $8,%ecx
L016ej2:
movb 1(%esi),%ch
L017ej1:
movb (%esi),%cl
L014ejend:
xorl %ecx,%eax
xorl %edx,%ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call L_RC5_32_encrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
movl %eax,(%edi)
movl %ebx,4(%edi)
jmp L007finish
L004decrypt:
andl $4294967288,%ebp
movl 16(%esp),%eax
movl 20(%esp),%ebx
jz L018decrypt_finish
L019decrypt_loop:
movl (%esi),%eax
movl 4(%esi),%ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call L_RC5_32_decrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
movl 16(%esp),%ecx
movl 20(%esp),%edx
xorl %eax,%ecx
xorl %ebx,%edx
movl (%esi),%eax
movl 4(%esi),%ebx
movl %ecx,(%edi)
movl %edx,4(%edi)
movl %eax,16(%esp)
movl %ebx,20(%esp)
addl $8,%esi
addl $8,%edi
subl $8,%ebp
jnz L019decrypt_loop
L018decrypt_finish:
movl 52(%esp),%ebp
andl $7,%ebp
jz L007finish
movl (%esi),%eax
movl 4(%esi),%ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call L_RC5_32_decrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
movl 16(%esp),%ecx
movl 20(%esp),%edx
xorl %eax,%ecx
xorl %ebx,%edx
movl (%esi),%eax
movl 4(%esi),%ebx
L020dj7:
rorl $16,%edx
movb %dl,6(%edi)
shrl $16,%edx
L021dj6:
movb %dh,5(%edi)
L022dj5:
movb %dl,4(%edi)
L023dj4:
movl %ecx,(%edi)
jmp L024djend
L025dj3:
rorl $16,%ecx
movb %cl,2(%edi)
shll $16,%ecx
L026dj2:
movb %ch,1(%esi)
L027dj1:
movb %cl,(%esi)
L024djend:
jmp L007finish
L007finish:
movl 60(%esp),%ecx
addl $24,%esp
movl %eax,(%ecx)
movl %ebx,4(%ecx)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.align 6,0x90
L009cbc_enc_jmp_table:
.long 0
.long L017ej1-L008PIC_point
.long L016ej2-L008PIC_point
.long L015ej3-L008PIC_point
.long L013ej4-L008PIC_point
.long L012ej5-L008PIC_point
.long L011ej6-L008PIC_point
.long L010ej7-L008PIC_point
.align 6,0x90

2123
deps/openssl/asm/x86-macosx-gas/ripemd/rmd-586.s

File diff suppressed because it is too large

1520
deps/openssl/asm/x86-macosx-gas/sha/sha1-586.s

File diff suppressed because it is too large

259
deps/openssl/asm/x86-macosx-gas/sha/sha256-586.s

@ -0,0 +1,259 @@
.file "sha512-586.s"
.text
.globl _sha256_block_data_order
.align 4
_sha256_block_data_order:
L_sha256_block_data_order_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 20(%esp),%esi
movl 24(%esp),%edi
movl 28(%esp),%eax
movl %esp,%ebx
call L000pic_point
L000pic_point:
popl %ebp
leal L001K256-L000pic_point(%ebp),%ebp
subl $16,%esp
andl $-64,%esp
shll $6,%eax
addl %edi,%eax
movl %esi,(%esp)
movl %edi,4(%esp)
movl %eax,8(%esp)
movl %ebx,12(%esp)
.align 4,0x90
L002loop:
movl (%edi),%eax
movl 4(%edi),%ebx
movl 8(%edi),%ecx
movl 12(%edi),%edx
bswap %eax
bswap %ebx
bswap %ecx
bswap %edx
pushl %eax
pushl %ebx
pushl %ecx
pushl %edx
movl 16(%edi),%eax
movl 20(%edi),%ebx
movl 24(%edi),%ecx
movl 28(%edi),%edx
bswap %eax
bswap %ebx
bswap %ecx
bswap %edx
pushl %eax
pushl %ebx
pushl %ecx
pushl %edx
movl 32(%edi),%eax
movl 36(%edi),%ebx
movl 40(%edi),%ecx
movl 44(%edi),%edx
bswap %eax
bswap %ebx
bswap %ecx
bswap %edx
pushl %eax
pushl %ebx
pushl %ecx
pushl %edx
movl 48(%edi),%eax
movl 52(%edi),%ebx
movl 56(%edi),%ecx
movl 60(%edi),%edx
bswap %eax
bswap %ebx
bswap %ecx
bswap %edx
pushl %eax
pushl %ebx
pushl %ecx
pushl %edx
addl $64,%edi
subl $32,%esp
movl %edi,100(%esp)
movl (%esi),%eax
movl 4(%esi),%ebx
movl 8(%esi),%ecx
movl 12(%esi),%edi
movl %ebx,4(%esp)
movl %ecx,8(%esp)
movl %edi,12(%esp)
movl 16(%esi),%edx
movl 20(%esi),%ebx
movl 24(%esi),%ecx
movl 28(%esi),%edi
movl %ebx,20(%esp)
movl %ecx,24(%esp)
movl %edi,28(%esp)
.align 4,0x90
L00300_15:
movl 92(%esp),%ebx
movl %edx,%ecx
rorl $6,%ecx
movl %edx,%edi
rorl $11,%edi
movl 20(%esp),%esi
xorl %edi,%ecx
rorl $14,%edi
xorl %edi,%ecx
movl 24(%esp),%edi
addl %ecx,%ebx
movl %edx,16(%esp)
xorl %edi,%esi
movl %eax,%ecx
andl %edx,%esi
movl 12(%esp),%edx
xorl %edi,%esi
movl %eax,%edi
addl %esi,%ebx
rorl $2,%ecx
addl 28(%esp),%ebx
rorl $13,%edi
movl 4(%esp),%esi
xorl %edi,%ecx
rorl $9,%edi
addl %ebx,%edx
xorl %edi,%ecx
movl 8(%esp),%edi
addl %ecx,%ebx
movl %eax,(%esp)
movl %eax,%ecx
subl $4,%esp
orl %esi,%eax
andl %esi,%ecx
andl %edi,%eax
movl (%ebp),%esi
orl %ecx,%eax
addl $4,%ebp
addl %ebx,%eax
addl %esi,%edx
addl %esi,%eax
cmpl $3248222580,%esi
jne L00300_15
movl 152(%esp),%ebx
.align 4,0x90
L00416_63:
movl %ebx,%esi
movl 100(%esp),%ecx
shrl $3,%ebx
rorl $7,%esi
xorl %esi,%ebx
rorl $11,%esi
movl %ecx,%edi
xorl %esi,%ebx
shrl $10,%ecx
movl 156(%esp),%esi
rorl $17,%edi
xorl %edi,%ecx
rorl $2,%edi
addl %esi,%ebx
xorl %ecx,%edi
addl %edi,%ebx
movl %edx,%ecx
addl 120(%esp),%ebx
rorl $6,%ecx
movl %edx,%edi
rorl $11,%edi
movl 20(%esp),%esi
xorl %edi,%ecx
rorl $14,%edi
movl %ebx,92(%esp)
xorl %edi,%ecx
movl 24(%esp),%edi
addl %ecx,%ebx
movl %edx,16(%esp)
xorl %edi,%esi
movl %eax,%ecx
andl %edx,%esi
movl 12(%esp),%edx
xorl %edi,%esi
movl %eax,%edi
addl %esi,%ebx
rorl $2,%ecx
addl 28(%esp),%ebx
rorl $13,%edi
movl 4(%esp),%esi
xorl %edi,%ecx
rorl $9,%edi
addl %ebx,%edx
xorl %edi,%ecx
movl 8(%esp),%edi
addl %ecx,%ebx
movl %eax,(%esp)
movl %eax,%ecx
subl $4,%esp
orl %esi,%eax
andl %esi,%ecx
andl %edi,%eax
movl (%ebp),%esi
orl %ecx,%eax
addl $4,%ebp
addl %ebx,%eax
movl 152(%esp),%ebx
addl %esi,%edx
addl %esi,%eax
cmpl $3329325298,%esi
jne L00416_63
movl 352(%esp),%esi
movl 4(%esp),%ebx
movl 8(%esp),%ecx
movl 12(%esp),%edi
addl (%esi),%eax
addl 4(%esi),%ebx
addl 8(%esi),%ecx
addl 12(%esi),%edi
movl %eax,(%esi)
movl %ebx,4(%esi)
movl %ecx,8(%esi)
movl %edi,12(%esi)
movl 20(%esp),%eax
movl 24(%esp),%ebx
movl 28(%esp),%ecx
movl 356(%esp),%edi
addl 16(%esi),%edx
addl 20(%esi),%eax
addl 24(%esi),%ebx
addl 28(%esi),%ecx
movl %edx,16(%esi)
movl %eax,20(%esi)
movl %ebx,24(%esi)
movl %ecx,28(%esi)
addl $352,%esp
subl $256,%ebp
cmpl 8(%esp),%edi
jb L002loop
movl 12(%esp),%esp
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.align 6,0x90
L001K256:
.long 1116352408,1899447441,3049323471,3921009573
.long 961987163,1508970993,2453635748,2870763221
.long 3624381080,310598401,607225278,1426881987
.long 1925078388,2162078206,2614888103,3248222580
.long 3835390401,4022224774,264347078,604807628
.long 770255983,1249150122,1555081692,1996064986
.long 2554220882,2821834349,2952996808,3210313671
.long 3336571891,3584528711,113926993,338241895
.long 666307205,773529912,1294757372,1396182291
.long 1695183700,1986661051,2177026350,2456956037
.long 2730485921,2820302411,3259730800,3345764771
.long 3516065817,3600352804,4094571909,275423344
.long 430227734,506948616,659060556,883997877
.long 958139571,1322822218,1537002063,1747873779
.long 1955562222,2024104815,2227730452,2361852424
.long 2428436474,2756734187,3204031479,3329325298
.byte 83,72,65,50,53,54,32,98,108,111,99,107,32,116,114,97
.byte 110,115,102,111,114,109,32,102,111,114,32,120,56,54,44,32
.byte 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
.byte 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
.byte 62,0

561
deps/openssl/asm/x86-macosx-gas/sha/sha512-586.s

@ -0,0 +1,561 @@
.file "sha512-586.s"
.text
.globl _sha512_block_data_order
.align 4
_sha512_block_data_order:
L_sha512_block_data_order_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 20(%esp),%esi
movl 24(%esp),%edi
movl 28(%esp),%eax
movl %esp,%ebx
call L000pic_point
L000pic_point:
popl %ebp
leal L001K512-L000pic_point(%ebp),%ebp
subl $16,%esp
andl $-64,%esp
shll $7,%eax
addl %edi,%eax
movl %esi,(%esp)
movl %edi,4(%esp)
movl %eax,8(%esp)
movl %ebx,12(%esp)
.align 4,0x90
L002loop_x86:
movl (%edi),%eax
movl 4(%edi),%ebx
movl 8(%edi),%ecx
movl 12(%edi),%edx
bswap %eax
bswap %ebx
bswap %ecx
bswap %edx
pushl %eax
pushl %ebx
pushl %ecx
pushl %edx
movl 16(%edi),%eax
movl 20(%edi),%ebx
movl 24(%edi),%ecx
movl 28(%edi),%edx
bswap %eax
bswap %ebx
bswap %ecx
bswap %edx
pushl %eax
pushl %ebx
pushl %ecx
pushl %edx
movl 32(%edi),%eax
movl 36(%edi),%ebx
movl 40(%edi),%ecx
movl 44(%edi),%edx
bswap %eax
bswap %ebx
bswap %ecx
bswap %edx
pushl %eax
pushl %ebx
pushl %ecx
pushl %edx
movl 48(%edi),%eax
movl 52(%edi),%ebx
movl 56(%edi),%ecx
movl 60(%edi),%edx
bswap %eax
bswap %ebx
bswap %ecx
bswap %edx
pushl %eax
pushl %ebx
pushl %ecx
pushl %edx
movl 64(%edi),%eax
movl 68(%edi),%ebx
movl 72(%edi),%ecx
movl 76(%edi),%edx
bswap %eax
bswap %ebx
bswap %ecx
bswap %edx
pushl %eax
pushl %ebx
pushl %ecx
pushl %edx
movl 80(%edi),%eax
movl 84(%edi),%ebx
movl 88(%edi),%ecx
movl 92(%edi),%edx
bswap %eax
bswap %ebx
bswap %ecx
bswap %edx
pushl %eax
pushl %ebx
pushl %ecx
pushl %edx
movl 96(%edi),%eax
movl 100(%edi),%ebx
movl 104(%edi),%ecx
movl 108(%edi),%edx
bswap %eax
bswap %ebx
bswap %ecx
bswap %edx
pushl %eax
pushl %ebx
pushl %ecx
pushl %edx
movl 112(%edi),%eax
movl 116(%edi),%ebx
movl 120(%edi),%ecx
movl 124(%edi),%edx
bswap %eax
bswap %ebx
bswap %ecx
bswap %edx
pushl %eax
pushl %ebx
pushl %ecx
pushl %edx
addl $128,%edi
subl $72,%esp
movl %edi,204(%esp)
leal 8(%esp),%edi
movl $16,%ecx
.long 2784229001
.align 4,0x90
L00300_15_x86:
movl 40(%esp),%ecx
movl 44(%esp),%edx
movl %ecx,%esi
shrl $9,%ecx
movl %edx,%edi
shrl $9,%edx
movl %ecx,%ebx
shll $14,%esi
movl %edx,%eax
shll $14,%edi
xorl %esi,%ebx
shrl $5,%ecx
xorl %edi,%eax
shrl $5,%edx
xorl %ecx,%eax
shll $4,%esi
xorl %edx,%ebx
shll $4,%edi
xorl %esi,%ebx
shrl $4,%ecx
xorl %edi,%eax
shrl $4,%edx
xorl %ecx,%eax
shll $5,%esi
xorl %edx,%ebx
shll $5,%edi
xorl %esi,%eax
xorl %edi,%ebx
movl 48(%esp),%ecx
movl 52(%esp),%edx
movl 56(%esp),%esi
movl 60(%esp),%edi
addl 64(%esp),%eax
adcl 68(%esp),%ebx
xorl %esi,%ecx
xorl %edi,%edx
andl 40(%esp),%ecx
andl 44(%esp),%edx
addl 192(%esp),%eax
adcl 196(%esp),%ebx
xorl %esi,%ecx
xorl %edi,%edx
movl (%ebp),%esi
movl 4(%ebp),%edi
addl %ecx,%eax
adcl %edx,%ebx
movl 32(%esp),%ecx
movl 36(%esp),%edx
addl %esi,%eax
adcl %edi,%ebx
movl %eax,(%esp)
movl %ebx,4(%esp)
addl %ecx,%eax
adcl %edx,%ebx
movl 8(%esp),%ecx
movl 12(%esp),%edx
movl %eax,32(%esp)
movl %ebx,36(%esp)
movl %ecx,%esi
shrl $2,%ecx
movl %edx,%edi
shrl $2,%edx
movl %ecx,%ebx
shll $4,%esi
movl %edx,%eax
shll $4,%edi
xorl %esi,%ebx
shrl $5,%ecx
xorl %edi,%eax
shrl $5,%edx
xorl %ecx,%ebx
shll $21,%esi
xorl %edx,%eax
shll $21,%edi
xorl %esi,%eax
shrl $21,%ecx
xorl %edi,%ebx
shrl $21,%edx
xorl %ecx,%eax
shll $5,%esi
xorl %edx,%ebx
shll $5,%edi
xorl %esi,%eax
xorl %edi,%ebx
movl 8(%esp),%ecx
movl 12(%esp),%edx
movl 16(%esp),%esi
movl 20(%esp),%edi
addl (%esp),%eax
adcl 4(%esp),%ebx
orl %esi,%ecx
orl %edi,%edx
andl 24(%esp),%ecx
andl 28(%esp),%edx
andl 8(%esp),%esi
andl 12(%esp),%edi
orl %esi,%ecx
orl %edi,%edx
addl %ecx,%eax
adcl %edx,%ebx
movl %eax,(%esp)
movl %ebx,4(%esp)
movb (%ebp),%dl
subl $8,%esp
leal 8(%ebp),%ebp
cmpb $148,%dl
jne L00300_15_x86
.align 4,0x90
L00416_79_x86:
movl 312(%esp),%ecx
movl 316(%esp),%edx
movl %ecx,%esi
shrl $1,%ecx
movl %edx,%edi
shrl $1,%edx
movl %ecx,%eax
shll $24,%esi
movl %edx,%ebx
shll $24,%edi
xorl %esi,%ebx
shrl $6,%ecx
xorl %edi,%eax
shrl $6,%edx
xorl %ecx,%eax
shll $7,%esi
xorl %edx,%ebx
shll $1,%edi
xorl %esi,%ebx
shrl $1,%ecx
xorl %edi,%eax
shrl $1,%edx
xorl %ecx,%eax
shll $6,%edi
xorl %edx,%ebx
xorl %edi,%eax
movl %eax,(%esp)
movl %ebx,4(%esp)
movl 208(%esp),%ecx
movl 212(%esp),%edx
movl %ecx,%esi
shrl $6,%ecx
movl %edx,%edi
shrl $6,%edx
movl %ecx,%eax
shll $3,%esi
movl %edx,%ebx
shll $3,%edi
xorl %esi,%eax
shrl $13,%ecx
xorl %edi,%ebx
shrl $13,%edx
xorl %ecx,%eax
shll $10,%esi
xorl %edx,%ebx
shll $10,%edi
xorl %esi,%ebx
shrl $10,%ecx
xorl %edi,%eax
shrl $10,%edx
xorl %ecx,%ebx
shll $13,%edi
xorl %edx,%eax
xorl %edi,%eax
movl 320(%esp),%ecx
movl 324(%esp),%edx
addl (%esp),%eax
adcl 4(%esp),%ebx
movl 248(%esp),%esi
movl 252(%esp),%edi
addl %ecx,%eax
adcl %edx,%ebx
addl %esi,%eax
adcl %edi,%ebx
movl %eax,192(%esp)
movl %ebx,196(%esp)
movl 40(%esp),%ecx
movl 44(%esp),%edx
movl %ecx,%esi
shrl $9,%ecx
movl %edx,%edi
shrl $9,%edx
movl %ecx,%ebx
shll $14,%esi
movl %edx,%eax
shll $14,%edi
xorl %esi,%ebx
shrl $5,%ecx
xorl %edi,%eax
shrl $5,%edx
xorl %ecx,%eax
shll $4,%esi
xorl %edx,%ebx
shll $4,%edi
xorl %esi,%ebx
shrl $4,%ecx
xorl %edi,%eax
shrl $4,%edx
xorl %ecx,%eax
shll $5,%esi
xorl %edx,%ebx
shll $5,%edi
xorl %esi,%eax
xorl %edi,%ebx
movl 48(%esp),%ecx
movl 52(%esp),%edx
movl 56(%esp),%esi
movl 60(%esp),%edi
addl 64(%esp),%eax
adcl 68(%esp),%ebx
xorl %esi,%ecx
xorl %edi,%edx
andl 40(%esp),%ecx
andl 44(%esp),%edx
addl 192(%esp),%eax
adcl 196(%esp),%ebx
xorl %esi,%ecx
xorl %edi,%edx
movl (%ebp),%esi
movl 4(%ebp),%edi
addl %ecx,%eax
adcl %edx,%ebx
movl 32(%esp),%ecx
movl 36(%esp),%edx
addl %esi,%eax
adcl %edi,%ebx
movl %eax,(%esp)
movl %ebx,4(%esp)
addl %ecx,%eax
adcl %edx,%ebx
movl 8(%esp),%ecx
movl 12(%esp),%edx
movl %eax,32(%esp)
movl %ebx,36(%esp)
movl %ecx,%esi
shrl $2,%ecx
movl %edx,%edi
shrl $2,%edx
movl %ecx,%ebx
shll $4,%esi
movl %edx,%eax
shll $4,%edi
xorl %esi,%ebx
shrl $5,%ecx
xorl %edi,%eax
shrl $5,%edx
xorl %ecx,%ebx
shll $21,%esi
xorl %edx,%eax
shll $21,%edi
xorl %esi,%eax
shrl $21,%ecx
xorl %edi,%ebx
shrl $21,%edx
xorl %ecx,%eax
shll $5,%esi
xorl %edx,%ebx
shll $5,%edi
xorl %esi,%eax
xorl %edi,%ebx
movl 8(%esp),%ecx
movl 12(%esp),%edx
movl 16(%esp),%esi
movl 20(%esp),%edi
addl (%esp),%eax
adcl 4(%esp),%ebx
orl %esi,%ecx
orl %edi,%edx
andl 24(%esp),%ecx
andl 28(%esp),%edx
andl 8(%esp),%esi
andl 12(%esp),%edi
orl %esi,%ecx
orl %edi,%edx
addl %ecx,%eax
adcl %edx,%ebx
movl %eax,(%esp)
movl %ebx,4(%esp)
movb (%ebp),%dl
subl $8,%esp
leal 8(%ebp),%ebp
cmpb $23,%dl
jne L00416_79_x86
movl 840(%esp),%esi
movl 844(%esp),%edi
movl (%esi),%eax
movl 4(%esi),%ebx
movl 8(%esi),%ecx
movl 12(%esi),%edx
addl 8(%esp),%eax
adcl 12(%esp),%ebx
movl %eax,(%esi)
movl %ebx,4(%esi)
addl 16(%esp),%ecx
adcl 20(%esp),%edx
movl %ecx,8(%esi)
movl %edx,12(%esi)
movl 16(%esi),%eax
movl 20(%esi),%ebx
movl 24(%esi),%ecx
movl 28(%esi),%edx
addl 24(%esp),%eax
adcl 28(%esp),%ebx
movl %eax,16(%esi)
movl %ebx,20(%esi)
addl 32(%esp),%ecx
adcl 36(%esp),%edx
movl %ecx,24(%esi)
movl %edx,28(%esi)
movl 32(%esi),%eax
movl 36(%esi),%ebx
movl 40(%esi),%ecx
movl 44(%esi),%edx
addl 40(%esp),%eax
adcl 44(%esp),%ebx
movl %eax,32(%esi)
movl %ebx,36(%esi)
addl 48(%esp),%ecx
adcl 52(%esp),%edx
movl %ecx,40(%esi)
movl %edx,44(%esi)
movl 48(%esi),%eax
movl 52(%esi),%ebx
movl 56(%esi),%ecx
movl 60(%esi),%edx
addl 56(%esp),%eax
adcl 60(%esp),%ebx
movl %eax,48(%esi)
movl %ebx,52(%esi)
addl 64(%esp),%ecx
adcl 68(%esp),%edx
movl %ecx,56(%esi)
movl %edx,60(%esi)
addl $840,%esp
subl $640,%ebp
cmpl 8(%esp),%edi
jb L002loop_x86
movl 12(%esp),%esp
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.align 6,0x90
L001K512:
.long 3609767458,1116352408
.long 602891725,1899447441
.long 3964484399,3049323471
.long 2173295548,3921009573
.long 4081628472,961987163
.long 3053834265,1508970993
.long 2937671579,2453635748
.long 3664609560,2870763221
.long 2734883394,3624381080
.long 1164996542,310598401
.long 1323610764,607225278
.long 3590304994,1426881987
.long 4068182383,1925078388
.long 991336113,2162078206
.long 633803317,2614888103
.long 3479774868,3248222580
.long 2666613458,3835390401
.long 944711139,4022224774
.long 2341262773,264347078
.long 2007800933,604807628
.long 1495990901,770255983
.long 1856431235,1249150122
.long 3175218132,1555081692
.long 2198950837,1996064986
.long 3999719339,2554220882
.long 766784016,2821834349
.long 2566594879,2952996808
.long 3203337956,3210313671
.long 1034457026,3336571891
.long 2466948901,3584528711
.long 3758326383,113926993
.long 168717936,338241895
.long 1188179964,666307205
.long 1546045734,773529912
.long 1522805485,1294757372
.long 2643833823,1396182291
.long 2343527390,1695183700
.long 1014477480,1986661051
.long 1206759142,2177026350
.long 344077627,2456956037
.long 1290863460,2730485921
.long 3158454273,2820302411
.long 3505952657,3259730800
.long 106217008,3345764771
.long 3606008344,3516065817
.long 1432725776,3600352804
.long 1467031594,4094571909
.long 851169720,275423344
.long 3100823752,430227734
.long 1363258195,506948616
.long 3750685593,659060556
.long 3785050280,883997877
.long 3318307427,958139571
.long 3812723403,1322822218
.long 2003034995,1537002063
.long 3602036899,1747873779
.long 1575990012,1955562222
.long 1125592928,2024104815
.long 2716904306,2227730452
.long 442776044,2361852424
.long 593698344,2428436474
.long 3733110249,2756734187
.long 2999351573,3204031479
.long 3815920427,3329325298
.long 3928383900,3391569614
.long 566280711,3515267271
.long 3454069534,3940187606
.long 4000239992,4118630271
.long 1914138554,116418474
.long 2731055270,174292421
.long 3203993006,289380356
.long 320620315,460393269
.long 587496836,685471733
.long 1086792851,852142971
.long 365543100,1017036298
.long 2618297676,1126000580
.long 3409855158,1288033470
.long 4234509866,1501505948
.long 987167468,1607167915
.long 1246189591,1816402316
.byte 83,72,65,53,49,50,32,98,108,111,99,107,32,116,114,97
.byte 110,115,102,111,114,109,32,102,111,114,32,120,56,54,44,32
.byte 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
.byte 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
.byte 62,0

1103
deps/openssl/asm/x86-macosx-gas/whrlpool/wp-mmx.s

File diff suppressed because it is too large

261
deps/openssl/asm/x86-macosx-gas/x86cpuid.s

@ -0,0 +1,261 @@
.file "x86cpuid.s"
.text
.globl _OPENSSL_ia32_cpuid
.align 4
_OPENSSL_ia32_cpuid:
L_OPENSSL_ia32_cpuid_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
xorl %edx,%edx
pushfl
popl %eax
movl %eax,%ecx
xorl $2097152,%eax
pushl %eax
popfl
pushfl
popl %eax
xorl %eax,%ecx
btl $21,%ecx
jnc L000done
xorl %eax,%eax
.byte 0x0f,0xa2
movl %eax,%edi
xorl %eax,%eax
cmpl $1970169159,%ebx
setne %al
movl %eax,%ebp
cmpl $1231384169,%edx
setne %al
orl %eax,%ebp
cmpl $1818588270,%ecx
setne %al
orl %eax,%ebp
jz L001intel
cmpl $1752462657,%ebx
setne %al
movl %eax,%esi
cmpl $1769238117,%edx
setne %al
orl %eax,%esi
cmpl $1145913699,%ecx
setne %al
orl %eax,%esi
jnz L001intel
movl $2147483648,%eax
.byte 0x0f,0xa2
cmpl $2147483656,%eax
jb L001intel
movl $2147483656,%eax
.byte 0x0f,0xa2
movzbl %cl,%esi
incl %esi
movl $1,%eax
.byte 0x0f,0xa2
btl $28,%edx
jnc L000done
shrl $16,%ebx
andl $255,%ebx
cmpl %esi,%ebx
ja L000done
andl $4026531839,%edx
jmp L000done
L001intel:
cmpl $4,%edi
movl $-1,%edi
jb L002nocacheinfo
movl $4,%eax
movl $0,%ecx
.byte 0x0f,0xa2
movl %eax,%edi
shrl $14,%edi
andl $4095,%edi
L002nocacheinfo:
movl $1,%eax
.byte 0x0f,0xa2
cmpl $0,%ebp
jne L003notP4
andb $15,%ah
cmpb $15,%ah
jne L003notP4
orl $1048576,%edx
L003notP4:
btl $28,%edx
jnc L000done
andl $4026531839,%edx
cmpl $0,%edi
je L000done
orl $268435456,%edx
shrl $16,%ebx
cmpb $1,%bl
ja L000done
andl $4026531839,%edx
L000done:
movl %edx,%eax
movl %ecx,%edx
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.globl _OPENSSL_rdtsc
.align 4
_OPENSSL_rdtsc:
L_OPENSSL_rdtsc_begin:
xorl %eax,%eax
xorl %edx,%edx
leal _OPENSSL_ia32cap_P,%ecx
btl $4,(%ecx)
jnc L004notsc
.byte 0x0f,0x31
L004notsc:
ret
.globl _OPENSSL_instrument_halt
.align 4
_OPENSSL_instrument_halt:
L_OPENSSL_instrument_halt_begin:
leal _OPENSSL_ia32cap_P,%ecx
btl $4,(%ecx)
jnc L005nohalt
.long 2421723150
andl $3,%eax
jnz L005nohalt
pushfl
popl %eax
btl $9,%eax
jnc L005nohalt
.byte 0x0f,0x31
pushl %edx
pushl %eax
hlt
.byte 0x0f,0x31
subl (%esp),%eax
sbbl 4(%esp),%edx
addl $8,%esp
ret
L005nohalt:
xorl %eax,%eax
xorl %edx,%edx
ret
.globl _OPENSSL_far_spin
.align 4
_OPENSSL_far_spin:
L_OPENSSL_far_spin_begin:
pushfl
popl %eax
btl $9,%eax
jnc L006nospin
movl 4(%esp),%eax
movl 8(%esp),%ecx
.long 2430111262
xorl %eax,%eax
movl (%ecx),%edx
jmp L007spin
.align 4,0x90
L007spin:
incl %eax
cmpl (%ecx),%edx
je L007spin
.long 529567888
ret
L006nospin:
xorl %eax,%eax
xorl %edx,%edx
ret
.globl _OPENSSL_wipe_cpu
.align 4
_OPENSSL_wipe_cpu:
L_OPENSSL_wipe_cpu_begin:
xorl %eax,%eax
xorl %edx,%edx
leal _OPENSSL_ia32cap_P,%ecx
movl (%ecx),%ecx
btl $1,(%ecx)
jnc L008no_x87
.long 4007259865,4007259865,4007259865,4007259865,2430851995
L008no_x87:
leal 4(%esp),%eax
ret
.globl _OPENSSL_atomic_add
.align 4
_OPENSSL_atomic_add:
L_OPENSSL_atomic_add_begin:
movl 4(%esp),%edx
movl 8(%esp),%ecx
pushl %ebx
nop
movl (%edx),%eax
L009spin:
leal (%eax,%ecx,1),%ebx
nop
.long 447811568
jne L009spin
movl %ebx,%eax
popl %ebx
ret
.globl _OPENSSL_indirect_call
.align 4
_OPENSSL_indirect_call:
L_OPENSSL_indirect_call_begin:
pushl %ebp
movl %esp,%ebp
subl $28,%esp
movl 12(%ebp),%ecx
movl %ecx,(%esp)
movl 16(%ebp),%edx
movl %edx,4(%esp)
movl 20(%ebp),%eax
movl %eax,8(%esp)
movl 24(%ebp),%eax
movl %eax,12(%esp)
movl 28(%ebp),%eax
movl %eax,16(%esp)
movl 32(%ebp),%eax
movl %eax,20(%esp)
movl 36(%ebp),%eax
movl %eax,24(%esp)
call *8(%ebp)
movl %ebp,%esp
popl %ebp
ret
.globl _OPENSSL_cleanse
.align 4
_OPENSSL_cleanse:
L_OPENSSL_cleanse_begin:
movl 4(%esp),%edx
movl 8(%esp),%ecx
xorl %eax,%eax
cmpl $7,%ecx
jae L010lot
cmpl $0,%ecx
je L011ret
L012little:
movb %al,(%edx)
subl $1,%ecx
leal 1(%edx),%edx
jnz L012little
L011ret:
ret
.align 4,0x90
L010lot:
testl $3,%edx
jz L013aligned
movb %al,(%edx)
leal -1(%ecx),%ecx
leal 1(%edx),%edx
jmp L010lot
L013aligned:
movl %eax,(%edx)
leal -4(%ecx),%ecx
testl $-4,%ecx
leal 4(%edx),%edx
jnz L013aligned
cmpl $0,%ecx
jne L012little
ret
.comm _OPENSSL_ia32cap_P,4
.mod_init_func
.align 2
.long _OPENSSL_cpuid_setup

3222
deps/openssl/asm/x86-win32-masm/aes/aes-586.asm

File diff suppressed because it is too large

907
deps/openssl/asm/x86-win32-masm/bf/bf-686.asm

@ -0,0 +1,907 @@
TITLE bf-686.asm
IF @Version LT 800
ECHO MASM version 8.00 or later is strongly recommended.
ENDIF
.586
.MODEL FLAT
OPTION DOTNAME
IF @Version LT 800
.text$ SEGMENT PAGE 'CODE'
ELSE
.text$ SEGMENT ALIGN(64) 'CODE'
ENDIF
ALIGN 16
_BF_encrypt PROC PUBLIC
$L_BF_encrypt_begin::
push ebp
push ebx
push esi
push edi
;
; Load the 2 words
mov eax,DWORD PTR 20[esp]
mov ecx,DWORD PTR [eax]
mov edx,DWORD PTR 4[eax]
;
; P pointer, s and enc flag
mov edi,DWORD PTR 24[esp]
xor eax,eax
xor ebx,ebx
xor ecx,DWORD PTR [edi]
;
; Round 0
ror ecx,16
mov esi,DWORD PTR 4[edi]
mov al,ch
mov bl,cl
ror ecx,16
xor edx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,ch
mov bl,cl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor edx,esi
;
; Round 1
ror edx,16
mov esi,DWORD PTR 8[edi]
mov al,dh
mov bl,dl
ror edx,16
xor ecx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,dh
mov bl,dl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor ecx,esi
;
; Round 2
ror ecx,16
mov esi,DWORD PTR 12[edi]
mov al,ch
mov bl,cl
ror ecx,16
xor edx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,ch
mov bl,cl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor edx,esi
;
; Round 3
ror edx,16
mov esi,DWORD PTR 16[edi]
mov al,dh
mov bl,dl
ror edx,16
xor ecx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,dh
mov bl,dl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor ecx,esi
;
; Round 4
ror ecx,16
mov esi,DWORD PTR 20[edi]
mov al,ch
mov bl,cl
ror ecx,16
xor edx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,ch
mov bl,cl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor edx,esi
;
; Round 5
ror edx,16
mov esi,DWORD PTR 24[edi]
mov al,dh
mov bl,dl
ror edx,16
xor ecx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,dh
mov bl,dl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor ecx,esi
;
; Round 6
ror ecx,16
mov esi,DWORD PTR 28[edi]
mov al,ch
mov bl,cl
ror ecx,16
xor edx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,ch
mov bl,cl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor edx,esi
;
; Round 7
ror edx,16
mov esi,DWORD PTR 32[edi]
mov al,dh
mov bl,dl
ror edx,16
xor ecx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,dh
mov bl,dl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor ecx,esi
;
; Round 8
ror ecx,16
mov esi,DWORD PTR 36[edi]
mov al,ch
mov bl,cl
ror ecx,16
xor edx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,ch
mov bl,cl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor edx,esi
;
; Round 9
ror edx,16
mov esi,DWORD PTR 40[edi]
mov al,dh
mov bl,dl
ror edx,16
xor ecx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,dh
mov bl,dl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor ecx,esi
;
; Round 10
ror ecx,16
mov esi,DWORD PTR 44[edi]
mov al,ch
mov bl,cl
ror ecx,16
xor edx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,ch
mov bl,cl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor edx,esi
;
; Round 11
ror edx,16
mov esi,DWORD PTR 48[edi]
mov al,dh
mov bl,dl
ror edx,16
xor ecx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,dh
mov bl,dl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor ecx,esi
;
; Round 12
ror ecx,16
mov esi,DWORD PTR 52[edi]
mov al,ch
mov bl,cl
ror ecx,16
xor edx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,ch
mov bl,cl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor edx,esi
;
; Round 13
ror edx,16
mov esi,DWORD PTR 56[edi]
mov al,dh
mov bl,dl
ror edx,16
xor ecx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,dh
mov bl,dl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor ecx,esi
;
; Round 14
ror ecx,16
mov esi,DWORD PTR 60[edi]
mov al,ch
mov bl,cl
ror ecx,16
xor edx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,ch
mov bl,cl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor edx,esi
;
; Round 15
ror edx,16
mov esi,DWORD PTR 64[edi]
mov al,dh
mov bl,dl
ror edx,16
xor ecx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,dh
mov bl,dl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor ecx,esi
xor edx,DWORD PTR 68[edi]
mov eax,DWORD PTR 20[esp]
mov DWORD PTR [eax],edx
mov DWORD PTR 4[eax],ecx
pop edi
pop esi
pop ebx
pop ebp
ret
_BF_encrypt ENDP
ALIGN 16
_BF_decrypt PROC PUBLIC
$L_BF_decrypt_begin::
push ebp
push ebx
push esi
push edi
;
; Load the 2 words
mov eax,DWORD PTR 20[esp]
mov ecx,DWORD PTR [eax]
mov edx,DWORD PTR 4[eax]
;
; P pointer, s and enc flag
mov edi,DWORD PTR 24[esp]
xor eax,eax
xor ebx,ebx
xor ecx,DWORD PTR 68[edi]
;
; Round 16
ror ecx,16
mov esi,DWORD PTR 64[edi]
mov al,ch
mov bl,cl
ror ecx,16
xor edx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,ch
mov bl,cl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor edx,esi
;
; Round 15
ror edx,16
mov esi,DWORD PTR 60[edi]
mov al,dh
mov bl,dl
ror edx,16
xor ecx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,dh
mov bl,dl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor ecx,esi
;
; Round 14
ror ecx,16
mov esi,DWORD PTR 56[edi]
mov al,ch
mov bl,cl
ror ecx,16
xor edx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,ch
mov bl,cl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor edx,esi
;
; Round 13
ror edx,16
mov esi,DWORD PTR 52[edi]
mov al,dh
mov bl,dl
ror edx,16
xor ecx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,dh
mov bl,dl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor ecx,esi
;
; Round 12
ror ecx,16
mov esi,DWORD PTR 48[edi]
mov al,ch
mov bl,cl
ror ecx,16
xor edx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,ch
mov bl,cl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor edx,esi
;
; Round 11
ror edx,16
mov esi,DWORD PTR 44[edi]
mov al,dh
mov bl,dl
ror edx,16
xor ecx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,dh
mov bl,dl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor ecx,esi
;
; Round 10
ror ecx,16
mov esi,DWORD PTR 40[edi]
mov al,ch
mov bl,cl
ror ecx,16
xor edx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,ch
mov bl,cl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor edx,esi
;
; Round 9
ror edx,16
mov esi,DWORD PTR 36[edi]
mov al,dh
mov bl,dl
ror edx,16
xor ecx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,dh
mov bl,dl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor ecx,esi
;
; Round 8
ror ecx,16
mov esi,DWORD PTR 32[edi]
mov al,ch
mov bl,cl
ror ecx,16
xor edx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,ch
mov bl,cl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor edx,esi
;
; Round 7
ror edx,16
mov esi,DWORD PTR 28[edi]
mov al,dh
mov bl,dl
ror edx,16
xor ecx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,dh
mov bl,dl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor ecx,esi
;
; Round 6
ror ecx,16
mov esi,DWORD PTR 24[edi]
mov al,ch
mov bl,cl
ror ecx,16
xor edx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,ch
mov bl,cl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor edx,esi
;
; Round 5
ror edx,16
mov esi,DWORD PTR 20[edi]
mov al,dh
mov bl,dl
ror edx,16
xor ecx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,dh
mov bl,dl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor ecx,esi
;
; Round 4
ror ecx,16
mov esi,DWORD PTR 16[edi]
mov al,ch
mov bl,cl
ror ecx,16
xor edx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,ch
mov bl,cl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor edx,esi
;
; Round 3
ror edx,16
mov esi,DWORD PTR 12[edi]
mov al,dh
mov bl,dl
ror edx,16
xor ecx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,dh
mov bl,dl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor ecx,esi
;
; Round 2
ror ecx,16
mov esi,DWORD PTR 8[edi]
mov al,ch
mov bl,cl
ror ecx,16
xor edx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,ch
mov bl,cl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor edx,esi
;
; Round 1
ror edx,16
mov esi,DWORD PTR 4[edi]
mov al,dh
mov bl,dl
ror edx,16
xor ecx,esi
mov esi,DWORD PTR 72[eax*4+edi]
mov ebp,DWORD PTR 1096[ebx*4+edi]
mov al,dh
mov bl,dl
add esi,ebp
mov eax,DWORD PTR 2120[eax*4+edi]
xor esi,eax
mov ebp,DWORD PTR 3144[ebx*4+edi]
add esi,ebp
xor eax,eax
xor ecx,esi
xor edx,DWORD PTR [edi]
mov eax,DWORD PTR 20[esp]
mov DWORD PTR [eax],edx
mov DWORD PTR 4[eax],ecx
pop edi
pop esi
pop ebx
pop ebp
ret
_BF_decrypt ENDP
ALIGN 16
_BF_cbc_encrypt PROC PUBLIC
$L_BF_cbc_encrypt_begin::
;
push ebp
push ebx
push esi
push edi
mov ebp,DWORD PTR 28[esp]
; getting iv ptr from parameter 4
mov ebx,DWORD PTR 36[esp]
mov esi,DWORD PTR [ebx]
mov edi,DWORD PTR 4[ebx]
push edi
push esi
push edi
push esi
mov ebx,esp
mov esi,DWORD PTR 36[esp]
mov edi,DWORD PTR 40[esp]
; getting encrypt flag from parameter 5
mov ecx,DWORD PTR 56[esp]
; get and push parameter 3
mov eax,DWORD PTR 48[esp]
push eax
push ebx
cmp ecx,0
jz $L000decrypt
and ebp,4294967288
mov eax,DWORD PTR 8[esp]
mov ebx,DWORD PTR 12[esp]
jz $L001encrypt_finish
$L002encrypt_loop:
mov ecx,DWORD PTR [esi]
mov edx,DWORD PTR 4[esi]
xor eax,ecx
xor ebx,edx
bswap eax
bswap ebx
mov DWORD PTR 8[esp],eax
mov DWORD PTR 12[esp],ebx
call $L_BF_encrypt_begin
mov eax,DWORD PTR 8[esp]
mov ebx,DWORD PTR 12[esp]
bswap eax
bswap ebx
mov DWORD PTR [edi],eax
mov DWORD PTR 4[edi],ebx
add esi,8
add edi,8
sub ebp,8
jnz $L002encrypt_loop
$L001encrypt_finish:
mov ebp,DWORD PTR 52[esp]
and ebp,7
jz $L003finish
call $L004PIC_point
$L004PIC_point:
pop edx
lea ecx,DWORD PTR ($L005cbc_enc_jmp_table-$L004PIC_point)[edx]
mov ebp,DWORD PTR [ebp*4+ecx]
add ebp,edx
xor ecx,ecx
xor edx,edx
jmp ebp
$L006ej7:
mov dh,BYTE PTR 6[esi]
shl edx,8
$L007ej6:
mov dh,BYTE PTR 5[esi]
$L008ej5:
mov dl,BYTE PTR 4[esi]
$L009ej4:
mov ecx,DWORD PTR [esi]
jmp $L010ejend
$L011ej3:
mov ch,BYTE PTR 2[esi]
shl ecx,8
$L012ej2:
mov ch,BYTE PTR 1[esi]
$L013ej1:
mov cl,BYTE PTR [esi]
$L010ejend:
xor eax,ecx
xor ebx,edx
bswap eax
bswap ebx
mov DWORD PTR 8[esp],eax
mov DWORD PTR 12[esp],ebx
call $L_BF_encrypt_begin
mov eax,DWORD PTR 8[esp]
mov ebx,DWORD PTR 12[esp]
bswap eax
bswap ebx
mov DWORD PTR [edi],eax
mov DWORD PTR 4[edi],ebx
jmp $L003finish
$L000decrypt:
and ebp,4294967288
mov eax,DWORD PTR 16[esp]
mov ebx,DWORD PTR 20[esp]
jz $L014decrypt_finish
$L015decrypt_loop:
mov eax,DWORD PTR [esi]
mov ebx,DWORD PTR 4[esi]
bswap eax
bswap ebx
mov DWORD PTR 8[esp],eax
mov DWORD PTR 12[esp],ebx
call $L_BF_decrypt_begin
mov eax,DWORD PTR 8[esp]
mov ebx,DWORD PTR 12[esp]
bswap eax
bswap ebx
mov ecx,DWORD PTR 16[esp]
mov edx,DWORD PTR 20[esp]
xor ecx,eax
xor edx,ebx
mov eax,DWORD PTR [esi]
mov ebx,DWORD PTR 4[esi]
mov DWORD PTR [edi],ecx
mov DWORD PTR 4[edi],edx
mov DWORD PTR 16[esp],eax
mov DWORD PTR 20[esp],ebx
add esi,8
add edi,8
sub ebp,8
jnz $L015decrypt_loop
$L014decrypt_finish:
mov ebp,DWORD PTR 52[esp]
and ebp,7
jz $L003finish
mov eax,DWORD PTR [esi]
mov ebx,DWORD PTR 4[esi]
bswap eax
bswap ebx
mov DWORD PTR 8[esp],eax
mov DWORD PTR 12[esp],ebx
call $L_BF_decrypt_begin
mov eax,DWORD PTR 8[esp]
mov ebx,DWORD PTR 12[esp]
bswap eax
bswap ebx
mov ecx,DWORD PTR 16[esp]
mov edx,DWORD PTR 20[esp]
xor ecx,eax
xor edx,ebx
mov eax,DWORD PTR [esi]
mov ebx,DWORD PTR 4[esi]
$L016dj7:
ror edx,16
mov BYTE PTR 6[edi],dl
shr edx,16
$L017dj6:
mov BYTE PTR 5[edi],dh
$L018dj5:
mov BYTE PTR 4[edi],dl
$L019dj4:
mov DWORD PTR [edi],ecx
jmp $L020djend
$L021dj3:
ror ecx,16
mov BYTE PTR 2[edi],cl
shl ecx,16
$L022dj2:
mov BYTE PTR 1[esi],ch
$L023dj1:
mov BYTE PTR [esi],cl
$L020djend:
jmp $L003finish
$L003finish:
mov ecx,DWORD PTR 60[esp]
add esp,24
mov DWORD PTR [ecx],eax
mov DWORD PTR 4[ecx],ebx
pop edi
pop esi
pop ebx
pop ebp
ret
ALIGN 64
$L005cbc_enc_jmp_table:
DD 0
DD $L013ej1-$L004PIC_point
DD $L012ej2-$L004PIC_point
DD $L011ej3-$L004PIC_point
DD $L009ej4-$L004PIC_point
DD $L008ej5-$L004PIC_point
DD $L007ej6-$L004PIC_point
DD $L006ej7-$L004PIC_point
ALIGN 64
_BF_cbc_encrypt ENDP
.text$ ENDS
END

348
deps/openssl/asm/x86-win32-masm/bn/x86-mont.asm

@ -0,0 +1,348 @@
TITLE ../openssl/crypto/bn/asm/x86-mont.asm
IF @Version LT 800
ECHO MASM version 8.00 or later is strongly recommended.
ENDIF
.586
.MODEL FLAT
OPTION DOTNAME
IF @Version LT 800
.text$ SEGMENT PAGE 'CODE'
ELSE
.text$ SEGMENT ALIGN(64) 'CODE'
ENDIF
ALIGN 16
_bn_mul_mont PROC PUBLIC
$L_bn_mul_mont_begin::
push ebp
push ebx
push esi
push edi
xor eax,eax
mov edi,DWORD PTR 40[esp]
cmp edi,4
jl $L000just_leave
lea esi,DWORD PTR 20[esp]
lea edx,DWORD PTR 24[esp]
mov ebp,esp
add edi,2
neg edi
lea esp,DWORD PTR [edi*4+esp-32]
neg edi
mov eax,esp
sub eax,edx
and eax,2047
sub esp,eax
xor edx,esp
and edx,2048
xor edx,2048
sub esp,edx
and esp,-64
mov eax,DWORD PTR [esi]
mov ebx,DWORD PTR 4[esi]
mov ecx,DWORD PTR 8[esi]
mov edx,DWORD PTR 12[esi]
mov esi,DWORD PTR 16[esi]
mov esi,DWORD PTR [esi]
mov DWORD PTR 4[esp],eax
mov DWORD PTR 8[esp],ebx
mov DWORD PTR 12[esp],ecx
mov DWORD PTR 16[esp],edx
mov DWORD PTR 20[esp],esi
lea ebx,DWORD PTR [edi-3]
mov DWORD PTR 24[esp],ebp
mov esi,DWORD PTR 8[esp]
lea ebp,DWORD PTR 1[ebx]
mov edi,DWORD PTR 12[esp]
xor ecx,ecx
mov edx,esi
and ebp,1
sub edx,edi
lea eax,DWORD PTR 4[ebx*4+edi]
or ebp,edx
mov edi,DWORD PTR [edi]
jz $L001bn_sqr_mont
mov DWORD PTR 28[esp],eax
mov eax,DWORD PTR [esi]
xor edx,edx
ALIGN 16
$L002mull:
mov ebp,edx
mul edi
add ebp,eax
lea ecx,DWORD PTR 1[ecx]
adc edx,0
mov eax,DWORD PTR [ecx*4+esi]
cmp ecx,ebx
mov DWORD PTR 28[ecx*4+esp],ebp
jl $L002mull
mov ebp,edx
mul edi
mov edi,DWORD PTR 20[esp]
add eax,ebp
mov esi,DWORD PTR 16[esp]
adc edx,0
imul edi,DWORD PTR 32[esp]
mov DWORD PTR 32[ebx*4+esp],eax
xor ecx,ecx
mov DWORD PTR 36[ebx*4+esp],edx
mov DWORD PTR 40[ebx*4+esp],ecx
mov eax,DWORD PTR [esi]
mul edi
add eax,DWORD PTR 32[esp]
mov eax,DWORD PTR 4[esi]
adc edx,0
inc ecx
jmp $L0032ndmadd
ALIGN 16
$L0041stmadd:
mov ebp,edx
mul edi
add ebp,DWORD PTR 32[ecx*4+esp]
lea ecx,DWORD PTR 1[ecx]
adc edx,0
add ebp,eax
mov eax,DWORD PTR [ecx*4+esi]
adc edx,0
cmp ecx,ebx
mov DWORD PTR 28[ecx*4+esp],ebp
jl $L0041stmadd
mov ebp,edx
mul edi
add eax,DWORD PTR 32[ebx*4+esp]
mov edi,DWORD PTR 20[esp]
adc edx,0
mov esi,DWORD PTR 16[esp]
add ebp,eax
adc edx,0
imul edi,DWORD PTR 32[esp]
xor ecx,ecx
add edx,DWORD PTR 36[ebx*4+esp]
mov DWORD PTR 32[ebx*4+esp],ebp
adc ecx,0
mov eax,DWORD PTR [esi]
mov DWORD PTR 36[ebx*4+esp],edx
mov DWORD PTR 40[ebx*4+esp],ecx
mul edi
add eax,DWORD PTR 32[esp]
mov eax,DWORD PTR 4[esi]
adc edx,0
mov ecx,1
ALIGN 16
$L0032ndmadd:
mov ebp,edx
mul edi
add ebp,DWORD PTR 32[ecx*4+esp]
lea ecx,DWORD PTR 1[ecx]
adc edx,0
add ebp,eax
mov eax,DWORD PTR [ecx*4+esi]
adc edx,0
cmp ecx,ebx
mov DWORD PTR 24[ecx*4+esp],ebp
jl $L0032ndmadd
mov ebp,edx
mul edi
add ebp,DWORD PTR 32[ebx*4+esp]
adc edx,0
add ebp,eax
adc edx,0
mov DWORD PTR 28[ebx*4+esp],ebp
xor eax,eax
mov ecx,DWORD PTR 12[esp]
add edx,DWORD PTR 36[ebx*4+esp]
adc eax,DWORD PTR 40[ebx*4+esp]
lea ecx,DWORD PTR 4[ecx]
mov DWORD PTR 32[ebx*4+esp],edx
cmp ecx,DWORD PTR 28[esp]
mov DWORD PTR 36[ebx*4+esp],eax
je $L005common_tail
mov edi,DWORD PTR [ecx]
mov esi,DWORD PTR 8[esp]
mov DWORD PTR 12[esp],ecx
xor ecx,ecx
xor edx,edx
mov eax,DWORD PTR [esi]
jmp $L0041stmadd
ALIGN 16
$L001bn_sqr_mont:
mov DWORD PTR [esp],ebx
mov DWORD PTR 12[esp],ecx
mov eax,edi
mul edi
mov DWORD PTR 32[esp],eax
mov ebx,edx
shr edx,1
and ebx,1
inc ecx
ALIGN 16
$L006sqr:
mov eax,DWORD PTR [ecx*4+esi]
mov ebp,edx
mul edi
add eax,ebp
lea ecx,DWORD PTR 1[ecx]
adc edx,0
lea ebp,DWORD PTR [eax*2+ebx]
shr eax,31
cmp ecx,DWORD PTR [esp]
mov ebx,eax
mov DWORD PTR 28[ecx*4+esp],ebp
jl $L006sqr
mov eax,DWORD PTR [ecx*4+esi]
mov ebp,edx
mul edi
add eax,ebp
mov edi,DWORD PTR 20[esp]
adc edx,0
mov esi,DWORD PTR 16[esp]
lea ebp,DWORD PTR [eax*2+ebx]
imul edi,DWORD PTR 32[esp]
shr eax,31
mov DWORD PTR 32[ecx*4+esp],ebp
lea ebp,DWORD PTR [edx*2+eax]
mov eax,DWORD PTR [esi]
shr edx,31
mov DWORD PTR 36[ecx*4+esp],ebp
mov DWORD PTR 40[ecx*4+esp],edx
mul edi
add eax,DWORD PTR 32[esp]
mov ebx,ecx
adc edx,0
mov eax,DWORD PTR 4[esi]
mov ecx,1
ALIGN 16
$L0073rdmadd:
mov ebp,edx
mul edi
add ebp,DWORD PTR 32[ecx*4+esp]
adc edx,0
add ebp,eax
mov eax,DWORD PTR 4[ecx*4+esi]
adc edx,0
mov DWORD PTR 28[ecx*4+esp],ebp
mov ebp,edx
mul edi
add ebp,DWORD PTR 36[ecx*4+esp]
lea ecx,DWORD PTR 2[ecx]
adc edx,0
add ebp,eax
mov eax,DWORD PTR [ecx*4+esi]
adc edx,0
cmp ecx,ebx
mov DWORD PTR 24[ecx*4+esp],ebp
jl $L0073rdmadd
mov ebp,edx
mul edi
add ebp,DWORD PTR 32[ebx*4+esp]
adc edx,0
add ebp,eax
adc edx,0
mov DWORD PTR 28[ebx*4+esp],ebp
mov ecx,DWORD PTR 12[esp]
xor eax,eax
mov esi,DWORD PTR 8[esp]
add edx,DWORD PTR 36[ebx*4+esp]
adc eax,DWORD PTR 40[ebx*4+esp]
mov DWORD PTR 32[ebx*4+esp],edx
cmp ecx,ebx
mov DWORD PTR 36[ebx*4+esp],eax
je $L005common_tail
mov edi,DWORD PTR 4[ecx*4+esi]
lea ecx,DWORD PTR 1[ecx]
mov eax,edi
mov DWORD PTR 12[esp],ecx
mul edi
add eax,DWORD PTR 32[ecx*4+esp]
adc edx,0
mov DWORD PTR 32[ecx*4+esp],eax
xor ebp,ebp
cmp ecx,ebx
lea ecx,DWORD PTR 1[ecx]
je $L008sqrlast
mov ebx,edx
shr edx,1
and ebx,1
ALIGN 16
$L009sqradd:
mov eax,DWORD PTR [ecx*4+esi]
mov ebp,edx
mul edi
add eax,ebp
lea ebp,DWORD PTR [eax*1+eax]
adc edx,0
shr eax,31
add ebp,DWORD PTR 32[ecx*4+esp]
lea ecx,DWORD PTR 1[ecx]
adc eax,0
add ebp,ebx
adc eax,0
cmp ecx,DWORD PTR [esp]
mov DWORD PTR 28[ecx*4+esp],ebp
mov ebx,eax
jle $L009sqradd
mov ebp,edx
add edx,edx
shr ebp,31
add edx,ebx
adc ebp,0
$L008sqrlast:
mov edi,DWORD PTR 20[esp]
mov esi,DWORD PTR 16[esp]
imul edi,DWORD PTR 32[esp]
add edx,DWORD PTR 32[ecx*4+esp]
mov eax,DWORD PTR [esi]
adc ebp,0
mov DWORD PTR 32[ecx*4+esp],edx
mov DWORD PTR 36[ecx*4+esp],ebp
mul edi
add eax,DWORD PTR 32[esp]
lea ebx,DWORD PTR [ecx-1]
adc edx,0
mov ecx,1
mov eax,DWORD PTR 4[esi]
jmp $L0073rdmadd
ALIGN 16
$L005common_tail:
mov ebp,DWORD PTR 16[esp]
mov edi,DWORD PTR 4[esp]
lea esi,DWORD PTR 32[esp]
mov eax,DWORD PTR [esi]
mov ecx,ebx
xor edx,edx
ALIGN 16
$L010sub:
sbb eax,DWORD PTR [edx*4+ebp]
mov DWORD PTR [edx*4+edi],eax
dec ecx
mov eax,DWORD PTR 4[edx*4+esi]
lea edx,DWORD PTR 1[edx]
jge $L010sub
sbb eax,0
and esi,eax
not eax
mov ebp,edi
and ebp,eax
or esi,ebp
ALIGN 16
$L011copy:
mov eax,DWORD PTR [ebx*4+esi]
mov DWORD PTR [ebx*4+edi],eax
mov DWORD PTR 32[ebx*4+esp],ecx
dec ebx
jge $L011copy
mov esp,DWORD PTR 24[esp]
mov eax,1
$L000just_leave:
pop edi
pop esi
pop ebx
pop ebp
ret
_bn_mul_mont ENDP
DB 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105
DB 112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56
DB 54,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121
DB 32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46
DB 111,114,103,62,0
.text$ ENDS
END

2116
deps/openssl/asm/x86-win32-masm/bn/x86.asm

File diff suppressed because it is too large

2367
deps/openssl/asm/x86-win32-masm/camellia/cmll-x86.asm

File diff suppressed because it is too large

950
deps/openssl/asm/x86-win32-masm/cast/cast-586.asm

@ -0,0 +1,950 @@
TITLE cast-586.asm
IF @Version LT 800
ECHO MASM version 8.00 or later is strongly recommended.
ENDIF
.586
.MODEL FLAT
OPTION DOTNAME
IF @Version LT 800
.text$ SEGMENT PAGE 'CODE'
ELSE
.text$ SEGMENT ALIGN(64) 'CODE'
ENDIF
EXTERN _CAST_S_table0:NEAR
EXTERN _CAST_S_table1:NEAR
EXTERN _CAST_S_table2:NEAR
EXTERN _CAST_S_table3:NEAR
ALIGN 16
_CAST_encrypt PROC PUBLIC
$L_CAST_encrypt_begin::
;
push ebp
push ebx
mov ebx,DWORD PTR 12[esp]
mov ebp,DWORD PTR 16[esp]
push esi
push edi
; Load the 2 words
mov edi,DWORD PTR [ebx]
mov esi,DWORD PTR 4[ebx]
; Get short key flag
mov eax,DWORD PTR 128[ebp]
push eax
xor eax,eax
; round 0
mov edx,DWORD PTR [ebp]
mov ecx,DWORD PTR 4[ebp]
add edx,esi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
xor ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
sub ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
add ecx,ebx
xor edi,ecx
; round 1
mov edx,DWORD PTR 8[ebp]
mov ecx,DWORD PTR 12[ebp]
xor edx,edi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
sub ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
add ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
xor ecx,ebx
xor esi,ecx
; round 2
mov edx,DWORD PTR 16[ebp]
mov ecx,DWORD PTR 20[ebp]
sub edx,esi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
add ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
xor ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
sub ecx,ebx
xor edi,ecx
; round 3
mov edx,DWORD PTR 24[ebp]
mov ecx,DWORD PTR 28[ebp]
add edx,edi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
xor ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
sub ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
add ecx,ebx
xor esi,ecx
; round 4
mov edx,DWORD PTR 32[ebp]
mov ecx,DWORD PTR 36[ebp]
xor edx,esi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
sub ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
add ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
xor ecx,ebx
xor edi,ecx
; round 5
mov edx,DWORD PTR 40[ebp]
mov ecx,DWORD PTR 44[ebp]
sub edx,edi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
add ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
xor ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
sub ecx,ebx
xor esi,ecx
; round 6
mov edx,DWORD PTR 48[ebp]
mov ecx,DWORD PTR 52[ebp]
add edx,esi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
xor ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
sub ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
add ecx,ebx
xor edi,ecx
; round 7
mov edx,DWORD PTR 56[ebp]
mov ecx,DWORD PTR 60[ebp]
xor edx,edi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
sub ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
add ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
xor ecx,ebx
xor esi,ecx
; round 8
mov edx,DWORD PTR 64[ebp]
mov ecx,DWORD PTR 68[ebp]
sub edx,esi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
add ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
xor ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
sub ecx,ebx
xor edi,ecx
; round 9
mov edx,DWORD PTR 72[ebp]
mov ecx,DWORD PTR 76[ebp]
add edx,edi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
xor ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
sub ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
add ecx,ebx
xor esi,ecx
; round 10
mov edx,DWORD PTR 80[ebp]
mov ecx,DWORD PTR 84[ebp]
xor edx,esi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
sub ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
add ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
xor ecx,ebx
xor edi,ecx
; round 11
mov edx,DWORD PTR 88[ebp]
mov ecx,DWORD PTR 92[ebp]
sub edx,edi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
add ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
xor ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
sub ecx,ebx
xor esi,ecx
; test short key flag
pop edx
or edx,edx
jnz $L000cast_enc_done
; round 12
mov edx,DWORD PTR 96[ebp]
mov ecx,DWORD PTR 100[ebp]
add edx,esi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
xor ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
sub ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
add ecx,ebx
xor edi,ecx
; round 13
mov edx,DWORD PTR 104[ebp]
mov ecx,DWORD PTR 108[ebp]
xor edx,edi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
sub ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
add ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
xor ecx,ebx
xor esi,ecx
; round 14
mov edx,DWORD PTR 112[ebp]
mov ecx,DWORD PTR 116[ebp]
sub edx,esi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
add ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
xor ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
sub ecx,ebx
xor edi,ecx
; round 15
mov edx,DWORD PTR 120[ebp]
mov ecx,DWORD PTR 124[ebp]
add edx,edi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
xor ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
sub ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
add ecx,ebx
xor esi,ecx
$L000cast_enc_done:
nop
mov eax,DWORD PTR 20[esp]
mov DWORD PTR 4[eax],edi
mov DWORD PTR [eax],esi
pop edi
pop esi
pop ebx
pop ebp
ret
_CAST_encrypt ENDP
EXTERN _CAST_S_table0:NEAR
EXTERN _CAST_S_table1:NEAR
EXTERN _CAST_S_table2:NEAR
EXTERN _CAST_S_table3:NEAR
ALIGN 16
_CAST_decrypt PROC PUBLIC
$L_CAST_decrypt_begin::
;
push ebp
push ebx
mov ebx,DWORD PTR 12[esp]
mov ebp,DWORD PTR 16[esp]
push esi
push edi
; Load the 2 words
mov edi,DWORD PTR [ebx]
mov esi,DWORD PTR 4[ebx]
; Get short key flag
mov eax,DWORD PTR 128[ebp]
or eax,eax
jnz $L001cast_dec_skip
xor eax,eax
; round 15
mov edx,DWORD PTR 120[ebp]
mov ecx,DWORD PTR 124[ebp]
add edx,esi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
xor ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
sub ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
add ecx,ebx
xor edi,ecx
; round 14
mov edx,DWORD PTR 112[ebp]
mov ecx,DWORD PTR 116[ebp]
sub edx,edi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
add ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
xor ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
sub ecx,ebx
xor esi,ecx
; round 13
mov edx,DWORD PTR 104[ebp]
mov ecx,DWORD PTR 108[ebp]
xor edx,esi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
sub ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
add ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
xor ecx,ebx
xor edi,ecx
; round 12
mov edx,DWORD PTR 96[ebp]
mov ecx,DWORD PTR 100[ebp]
add edx,edi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
xor ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
sub ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
add ecx,ebx
xor esi,ecx
$L001cast_dec_skip:
; round 11
mov edx,DWORD PTR 88[ebp]
mov ecx,DWORD PTR 92[ebp]
sub edx,esi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
add ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
xor ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
sub ecx,ebx
xor edi,ecx
; round 10
mov edx,DWORD PTR 80[ebp]
mov ecx,DWORD PTR 84[ebp]
xor edx,edi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
sub ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
add ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
xor ecx,ebx
xor esi,ecx
; round 9
mov edx,DWORD PTR 72[ebp]
mov ecx,DWORD PTR 76[ebp]
add edx,esi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
xor ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
sub ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
add ecx,ebx
xor edi,ecx
; round 8
mov edx,DWORD PTR 64[ebp]
mov ecx,DWORD PTR 68[ebp]
sub edx,edi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
add ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
xor ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
sub ecx,ebx
xor esi,ecx
; round 7
mov edx,DWORD PTR 56[ebp]
mov ecx,DWORD PTR 60[ebp]
xor edx,esi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
sub ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
add ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
xor ecx,ebx
xor edi,ecx
; round 6
mov edx,DWORD PTR 48[ebp]
mov ecx,DWORD PTR 52[ebp]
add edx,edi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
xor ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
sub ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
add ecx,ebx
xor esi,ecx
; round 5
mov edx,DWORD PTR 40[ebp]
mov ecx,DWORD PTR 44[ebp]
sub edx,esi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
add ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
xor ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
sub ecx,ebx
xor edi,ecx
; round 4
mov edx,DWORD PTR 32[ebp]
mov ecx,DWORD PTR 36[ebp]
xor edx,edi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
sub ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
add ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
xor ecx,ebx
xor esi,ecx
; round 3
mov edx,DWORD PTR 24[ebp]
mov ecx,DWORD PTR 28[ebp]
add edx,esi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
xor ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
sub ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
add ecx,ebx
xor edi,ecx
; round 2
mov edx,DWORD PTR 16[ebp]
mov ecx,DWORD PTR 20[ebp]
sub edx,edi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
add ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
xor ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
sub ecx,ebx
xor esi,ecx
; round 1
mov edx,DWORD PTR 8[ebp]
mov ecx,DWORD PTR 12[ebp]
xor edx,esi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
sub ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
add ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
xor ecx,ebx
xor edi,ecx
; round 0
mov edx,DWORD PTR [ebp]
mov ecx,DWORD PTR 4[ebp]
add edx,edi
rol edx,cl
mov ebx,edx
xor ecx,ecx
mov cl,dh
and ebx,255
shr edx,16
xor eax,eax
mov al,dh
and edx,255
mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
xor ecx,ebx
mov ebx,DWORD PTR _CAST_S_table2[eax*4]
sub ecx,ebx
mov ebx,DWORD PTR _CAST_S_table3[edx*4]
add ecx,ebx
xor esi,ecx
nop
mov eax,DWORD PTR 20[esp]
mov DWORD PTR 4[eax],edi
mov DWORD PTR [eax],esi
pop edi
pop esi
pop ebx
pop ebp
ret
_CAST_decrypt ENDP
ALIGN 16
_CAST_cbc_encrypt PROC PUBLIC
$L_CAST_cbc_encrypt_begin::
;
push ebp
push ebx
push esi
push edi
mov ebp,DWORD PTR 28[esp]
; getting iv ptr from parameter 4
mov ebx,DWORD PTR 36[esp]
mov esi,DWORD PTR [ebx]
mov edi,DWORD PTR 4[ebx]
push edi
push esi
push edi
push esi
mov ebx,esp
mov esi,DWORD PTR 36[esp]
mov edi,DWORD PTR 40[esp]
; getting encrypt flag from parameter 5
mov ecx,DWORD PTR 56[esp]
; get and push parameter 3
mov eax,DWORD PTR 48[esp]
push eax
push ebx
cmp ecx,0
jz $L002decrypt
and ebp,4294967288
mov eax,DWORD PTR 8[esp]
mov ebx,DWORD PTR 12[esp]
jz $L003encrypt_finish
$L004encrypt_loop:
mov ecx,DWORD PTR [esi]
mov edx,DWORD PTR 4[esi]
xor eax,ecx
xor ebx,edx
bswap eax
bswap ebx
mov DWORD PTR 8[esp],eax
mov DWORD PTR 12[esp],ebx
call $L_CAST_encrypt_begin
mov eax,DWORD PTR 8[esp]
mov ebx,DWORD PTR 12[esp]
bswap eax
bswap ebx
mov DWORD PTR [edi],eax
mov DWORD PTR 4[edi],ebx
add esi,8
add edi,8
sub ebp,8
jnz $L004encrypt_loop
$L003encrypt_finish:
mov ebp,DWORD PTR 52[esp]
and ebp,7
jz $L005finish
call $L006PIC_point
$L006PIC_point:
pop edx
lea ecx,DWORD PTR ($L007cbc_enc_jmp_table-$L006PIC_point)[edx]
mov ebp,DWORD PTR [ebp*4+ecx]
add ebp,edx
xor ecx,ecx
xor edx,edx
jmp ebp
$L008ej7:
mov dh,BYTE PTR 6[esi]
shl edx,8
$L009ej6:
mov dh,BYTE PTR 5[esi]
$L010ej5:
mov dl,BYTE PTR 4[esi]
$L011ej4:
mov ecx,DWORD PTR [esi]
jmp $L012ejend
$L013ej3:
mov ch,BYTE PTR 2[esi]
shl ecx,8
$L014ej2:
mov ch,BYTE PTR 1[esi]
$L015ej1:
mov cl,BYTE PTR [esi]
$L012ejend:
xor eax,ecx
xor ebx,edx
bswap eax
bswap ebx
mov DWORD PTR 8[esp],eax
mov DWORD PTR 12[esp],ebx
call $L_CAST_encrypt_begin
mov eax,DWORD PTR 8[esp]
mov ebx,DWORD PTR 12[esp]
bswap eax
bswap ebx
mov DWORD PTR [edi],eax
mov DWORD PTR 4[edi],ebx
jmp $L005finish
$L002decrypt:
and ebp,4294967288
mov eax,DWORD PTR 16[esp]
mov ebx,DWORD PTR 20[esp]
jz $L016decrypt_finish
$L017decrypt_loop:
mov eax,DWORD PTR [esi]
mov ebx,DWORD PTR 4[esi]
bswap eax
bswap ebx
mov DWORD PTR 8[esp],eax
mov DWORD PTR 12[esp],ebx
call $L_CAST_decrypt_begin
mov eax,DWORD PTR 8[esp]
mov ebx,DWORD PTR 12[esp]
bswap eax
bswap ebx
mov ecx,DWORD PTR 16[esp]
mov edx,DWORD PTR 20[esp]
xor ecx,eax
xor edx,ebx
mov eax,DWORD PTR [esi]
mov ebx,DWORD PTR 4[esi]
mov DWORD PTR [edi],ecx
mov DWORD PTR 4[edi],edx
mov DWORD PTR 16[esp],eax
mov DWORD PTR 20[esp],ebx
add esi,8
add edi,8
sub ebp,8
jnz $L017decrypt_loop
$L016decrypt_finish:
mov ebp,DWORD PTR 52[esp]
and ebp,7
jz $L005finish
mov eax,DWORD PTR [esi]
mov ebx,DWORD PTR 4[esi]
bswap eax
bswap ebx
mov DWORD PTR 8[esp],eax
mov DWORD PTR 12[esp],ebx
call $L_CAST_decrypt_begin
mov eax,DWORD PTR 8[esp]
mov ebx,DWORD PTR 12[esp]
bswap eax
bswap ebx
mov ecx,DWORD PTR 16[esp]
mov edx,DWORD PTR 20[esp]
xor ecx,eax
xor edx,ebx
mov eax,DWORD PTR [esi]
mov ebx,DWORD PTR 4[esi]
$L018dj7:
ror edx,16
mov BYTE PTR 6[edi],dl
shr edx,16
$L019dj6:
mov BYTE PTR 5[edi],dh
$L020dj5:
mov BYTE PTR 4[edi],dl
$L021dj4:
mov DWORD PTR [edi],ecx
jmp $L022djend
$L023dj3:
ror ecx,16
mov BYTE PTR 2[edi],cl
shl ecx,16
$L024dj2:
mov BYTE PTR 1[esi],ch
$L025dj1:
mov BYTE PTR [esi],cl
$L022djend:
jmp $L005finish
$L005finish:
mov ecx,DWORD PTR 60[esp]
add esp,24
mov DWORD PTR [ecx],eax
mov DWORD PTR 4[ecx],ebx
pop edi
pop esi
pop ebx
pop ebp
ret
ALIGN 64
$L007cbc_enc_jmp_table:
DD 0
DD $L015ej1-$L006PIC_point
DD $L014ej2-$L006PIC_point
DD $L013ej3-$L006PIC_point
DD $L011ej4-$L006PIC_point
DD $L010ej5-$L006PIC_point
DD $L009ej6-$L006PIC_point
DD $L008ej7-$L006PIC_point
ALIGN 64
_CAST_cbc_encrypt ENDP
.text$ ENDS
END

909
deps/openssl/asm/x86-win32-masm/des/crypt586.asm

@ -0,0 +1,909 @@
TITLE crypt586.asm
IF @Version LT 800
ECHO MASM version 8.00 or later is strongly recommended.
ENDIF
.586
.MODEL FLAT
OPTION DOTNAME
IF @Version LT 800
.text$ SEGMENT PAGE 'CODE'
ELSE
.text$ SEGMENT ALIGN(64) 'CODE'
ENDIF
EXTERN _DES_SPtrans:NEAR
ALIGN 16
_fcrypt_body PROC PUBLIC
$L_fcrypt_body_begin::
push ebp
push ebx
push esi
push edi
;
; Load the 2 words
xor edi,edi
xor esi,esi
lea edx,DWORD PTR _DES_SPtrans
push edx
mov ebp,DWORD PTR 28[esp]
push 25
$L000start:
;
; Round 0
mov eax,DWORD PTR 36[esp]
mov edx,esi
shr edx,16
mov ecx,DWORD PTR 40[esp]
xor edx,esi
and eax,edx
and edx,ecx
mov ebx,eax
shl ebx,16
mov ecx,edx
shl ecx,16
xor eax,ebx
xor edx,ecx
mov ebx,DWORD PTR [ebp]
xor eax,ebx
mov ecx,DWORD PTR 4[ebp]
xor eax,esi
xor edx,esi
xor edx,ecx
and eax,0fcfcfcfch
xor ebx,ebx
and edx,0cfcfcfcfh
xor ecx,ecx
mov bl,al
mov cl,ah
ror edx,4
mov ebp,DWORD PTR 4[esp]
xor edi,DWORD PTR [ebx*1+ebp]
mov bl,dl
xor edi,DWORD PTR 0200h[ecx*1+ebp]
mov cl,dh
shr eax,16
xor edi,DWORD PTR 0100h[ebx*1+ebp]
mov bl,ah
shr edx,16
xor edi,DWORD PTR 0300h[ecx*1+ebp]
mov cl,dh
and eax,0ffh
and edx,0ffh
mov ebx,DWORD PTR 0600h[ebx*1+ebp]
xor edi,ebx
mov ebx,DWORD PTR 0700h[ecx*1+ebp]
xor edi,ebx
mov ebx,DWORD PTR 0400h[eax*1+ebp]
xor edi,ebx
mov ebx,DWORD PTR 0500h[edx*1+ebp]
xor edi,ebx
mov ebp,DWORD PTR 32[esp]
;
; Round 1
mov eax,DWORD PTR 36[esp]
mov edx,edi
shr edx,16
mov ecx,DWORD PTR 40[esp]
xor edx,edi
and eax,edx
and edx,ecx
mov ebx,eax
shl ebx,16
mov ecx,edx
shl ecx,16
xor eax,ebx
xor edx,ecx
mov ebx,DWORD PTR 8[ebp]
xor eax,ebx
mov ecx,DWORD PTR 12[ebp]
xor eax,edi
xor edx,edi
xor edx,ecx
and eax,0fcfcfcfch
xor ebx,ebx
and edx,0cfcfcfcfh
xor ecx,ecx
mov bl,al
mov cl,ah
ror edx,4
mov ebp,DWORD PTR 4[esp]
xor esi,DWORD PTR [ebx*1+ebp]
mov bl,dl
xor esi,DWORD PTR 0200h[ecx*1+ebp]
mov cl,dh
shr eax,16
xor esi,DWORD PTR 0100h[ebx*1+ebp]
mov bl,ah
shr edx,16
xor esi,DWORD PTR 0300h[ecx*1+ebp]
mov cl,dh
and eax,0ffh
and edx,0ffh
mov ebx,DWORD PTR 0600h[ebx*1+ebp]
xor esi,ebx
mov ebx,DWORD PTR 0700h[ecx*1+ebp]
xor esi,ebx
mov ebx,DWORD PTR 0400h[eax*1+ebp]
xor esi,ebx
mov ebx,DWORD PTR 0500h[edx*1+ebp]
xor esi,ebx
mov ebp,DWORD PTR 32[esp]
;
; Round 2
mov eax,DWORD PTR 36[esp]
mov edx,esi
shr edx,16
mov ecx,DWORD PTR 40[esp]
xor edx,esi
and eax,edx
and edx,ecx
mov ebx,eax
shl ebx,16
mov ecx,edx
shl ecx,16
xor eax,ebx
xor edx,ecx
mov ebx,DWORD PTR 16[ebp]
xor eax,ebx
mov ecx,DWORD PTR 20[ebp]
xor eax,esi
xor edx,esi
xor edx,ecx
and eax,0fcfcfcfch
xor ebx,ebx
and edx,0cfcfcfcfh
xor ecx,ecx
mov bl,al
mov cl,ah
ror edx,4
mov ebp,DWORD PTR 4[esp]
xor edi,DWORD PTR [ebx*1+ebp]
mov bl,dl
xor edi,DWORD PTR 0200h[ecx*1+ebp]
mov cl,dh
shr eax,16
xor edi,DWORD PTR 0100h[ebx*1+ebp]
mov bl,ah
shr edx,16
xor edi,DWORD PTR 0300h[ecx*1+ebp]
mov cl,dh
and eax,0ffh
and edx,0ffh
mov ebx,DWORD PTR 0600h[ebx*1+ebp]
xor edi,ebx
mov ebx,DWORD PTR 0700h[ecx*1+ebp]
xor edi,ebx
mov ebx,DWORD PTR 0400h[eax*1+ebp]
xor edi,ebx
mov ebx,DWORD PTR 0500h[edx*1+ebp]
xor edi,ebx
mov ebp,DWORD PTR 32[esp]
;
; Round 3
mov eax,DWORD PTR 36[esp]
mov edx,edi
shr edx,16
mov ecx,DWORD PTR 40[esp]
xor edx,edi
and eax,edx
and edx,ecx
mov ebx,eax
shl ebx,16
mov ecx,edx
shl ecx,16
xor eax,ebx
xor edx,ecx
mov ebx,DWORD PTR 24[ebp]
xor eax,ebx
mov ecx,DWORD PTR 28[ebp]
xor eax,edi
xor edx,edi
xor edx,ecx
and eax,0fcfcfcfch
xor ebx,ebx
and edx,0cfcfcfcfh
xor ecx,ecx
mov bl,al
mov cl,ah
ror edx,4
mov ebp,DWORD PTR 4[esp]
xor esi,DWORD PTR [ebx*1+ebp]
mov bl,dl
xor esi,DWORD PTR 0200h[ecx*1+ebp]
mov cl,dh
shr eax,16
xor esi,DWORD PTR 0100h[ebx*1+ebp]
mov bl,ah
shr edx,16
xor esi,DWORD PTR 0300h[ecx*1+ebp]
mov cl,dh
and eax,0ffh
and edx,0ffh
mov ebx,DWORD PTR 0600h[ebx*1+ebp]
xor esi,ebx
mov ebx,DWORD PTR 0700h[ecx*1+ebp]
xor esi,ebx
mov ebx,DWORD PTR 0400h[eax*1+ebp]
xor esi,ebx
mov ebx,DWORD PTR 0500h[edx*1+ebp]
xor esi,ebx
mov ebp,DWORD PTR 32[esp]
;
; Round 4
mov eax,DWORD PTR 36[esp]
mov edx,esi
shr edx,16
mov ecx,DWORD PTR 40[esp]
xor edx,esi
and eax,edx
and edx,ecx
mov ebx,eax
shl ebx,16
mov ecx,edx
shl ecx,16
xor eax,ebx
xor edx,ecx
mov ebx,DWORD PTR 32[ebp]
xor eax,ebx
mov ecx,DWORD PTR 36[ebp]
xor eax,esi
xor edx,esi
xor edx,ecx
and eax,0fcfcfcfch
xor ebx,ebx
and edx,0cfcfcfcfh
xor ecx,ecx
mov bl,al
mov cl,ah
ror edx,4
mov ebp,DWORD PTR 4[esp]
xor edi,DWORD PTR [ebx*1+ebp]
mov bl,dl
xor edi,DWORD PTR 0200h[ecx*1+ebp]
mov cl,dh
shr eax,16
xor edi,DWORD PTR 0100h[ebx*1+ebp]
mov bl,ah
shr edx,16
xor edi,DWORD PTR 0300h[ecx*1+ebp]
mov cl,dh
and eax,0ffh
and edx,0ffh
mov ebx,DWORD PTR 0600h[ebx*1+ebp]
xor edi,ebx
mov ebx,DWORD PTR 0700h[ecx*1+ebp]
xor edi,ebx
mov ebx,DWORD PTR 0400h[eax*1+ebp]
xor edi,ebx
mov ebx,DWORD PTR 0500h[edx*1+ebp]
xor edi,ebx
mov ebp,DWORD PTR 32[esp]
;
; Round 5
mov eax,DWORD PTR 36[esp]
mov edx,edi
shr edx,16
mov ecx,DWORD PTR 40[esp]
xor edx,edi
and eax,edx
and edx,ecx
mov ebx,eax
shl ebx,16
mov ecx,edx
shl ecx,16
xor eax,ebx
xor edx,ecx
mov ebx,DWORD PTR 40[ebp]
xor eax,ebx
mov ecx,DWORD PTR 44[ebp]
xor eax,edi
xor edx,edi
xor edx,ecx
and eax,0fcfcfcfch
xor ebx,ebx
and edx,0cfcfcfcfh
xor ecx,ecx
mov bl,al
mov cl,ah
ror edx,4
mov ebp,DWORD PTR 4[esp]
xor esi,DWORD PTR [ebx*1+ebp]
mov bl,dl
xor esi,DWORD PTR 0200h[ecx*1+ebp]
mov cl,dh
shr eax,16
xor esi,DWORD PTR 0100h[ebx*1+ebp]
mov bl,ah
shr edx,16
xor esi,DWORD PTR 0300h[ecx*1+ebp]
mov cl,dh
and eax,0ffh
and edx,0ffh
mov ebx,DWORD PTR 0600h[ebx*1+ebp]
xor esi,ebx
mov ebx,DWORD PTR 0700h[ecx*1+ebp]
xor esi,ebx
mov ebx,DWORD PTR 0400h[eax*1+ebp]
xor esi,ebx
mov ebx,DWORD PTR 0500h[edx*1+ebp]
xor esi,ebx
mov ebp,DWORD PTR 32[esp]
;
; Round 6
mov eax,DWORD PTR 36[esp]
mov edx,esi
shr edx,16
mov ecx,DWORD PTR 40[esp]
xor edx,esi
and eax,edx
and edx,ecx
mov ebx,eax
shl ebx,16
mov ecx,edx
shl ecx,16
xor eax,ebx
xor edx,ecx
mov ebx,DWORD PTR 48[ebp]
xor eax,ebx
mov ecx,DWORD PTR 52[ebp]
xor eax,esi
xor edx,esi
xor edx,ecx
and eax,0fcfcfcfch
xor ebx,ebx
and edx,0cfcfcfcfh
xor ecx,ecx
mov bl,al
mov cl,ah
ror edx,4
mov ebp,DWORD PTR 4[esp]
xor edi,DWORD PTR [ebx*1+ebp]
mov bl,dl
xor edi,DWORD PTR 0200h[ecx*1+ebp]
mov cl,dh
shr eax,16
xor edi,DWORD PTR 0100h[ebx*1+ebp]
mov bl,ah
shr edx,16
xor edi,DWORD PTR 0300h[ecx*1+ebp]
mov cl,dh
and eax,0ffh
and edx,0ffh
mov ebx,DWORD PTR 0600h[ebx*1+ebp]
xor edi,ebx
mov ebx,DWORD PTR 0700h[ecx*1+ebp]
xor edi,ebx
mov ebx,DWORD PTR 0400h[eax*1+ebp]
xor edi,ebx
mov ebx,DWORD PTR 0500h[edx*1+ebp]
xor edi,ebx
mov ebp,DWORD PTR 32[esp]
;
; Round 7
mov eax,DWORD PTR 36[esp]
mov edx,edi
shr edx,16
mov ecx,DWORD PTR 40[esp]
xor edx,edi
and eax,edx
and edx,ecx
mov ebx,eax
shl ebx,16
mov ecx,edx
shl ecx,16
xor eax,ebx
xor edx,ecx
mov ebx,DWORD PTR 56[ebp]
xor eax,ebx
mov ecx,DWORD PTR 60[ebp]
xor eax,edi
xor edx,edi
xor edx,ecx
and eax,0fcfcfcfch
xor ebx,ebx
and edx,0cfcfcfcfh
xor ecx,ecx
mov bl,al
mov cl,ah
ror edx,4
mov ebp,DWORD PTR 4[esp]
xor esi,DWORD PTR [ebx*1+ebp]
mov bl,dl
xor esi,DWORD PTR 0200h[ecx*1+ebp]
mov cl,dh
shr eax,16
xor esi,DWORD PTR 0100h[ebx*1+ebp]
mov bl,ah
shr edx,16
xor esi,DWORD PTR 0300h[ecx*1+ebp]
mov cl,dh
and eax,0ffh
and edx,0ffh
mov ebx,DWORD PTR 0600h[ebx*1+ebp]
xor esi,ebx
mov ebx,DWORD PTR 0700h[ecx*1+ebp]
xor esi,ebx
mov ebx,DWORD PTR 0400h[eax*1+ebp]
xor esi,ebx
mov ebx,DWORD PTR 0500h[edx*1+ebp]
xor esi,ebx
mov ebp,DWORD PTR 32[esp]
;
; Round 8
mov eax,DWORD PTR 36[esp]
mov edx,esi
shr edx,16
mov ecx,DWORD PTR 40[esp]
xor edx,esi
and eax,edx
and edx,ecx
mov ebx,eax
shl ebx,16
mov ecx,edx
shl ecx,16
xor eax,ebx
xor edx,ecx
mov ebx,DWORD PTR 64[ebp]
xor eax,ebx
mov ecx,DWORD PTR 68[ebp]
xor eax,esi
xor edx,esi
xor edx,ecx
and eax,0fcfcfcfch
xor ebx,ebx
and edx,0cfcfcfcfh
xor ecx,ecx
mov bl,al
mov cl,ah
ror edx,4
mov ebp,DWORD PTR 4[esp]
xor edi,DWORD PTR [ebx*1+ebp]
mov bl,dl
xor edi,DWORD PTR 0200h[ecx*1+ebp]
mov cl,dh
shr eax,16
xor edi,DWORD PTR 0100h[ebx*1+ebp]
mov bl,ah
shr edx,16
xor edi,DWORD PTR 0300h[ecx*1+ebp]
mov cl,dh
and eax,0ffh
and edx,0ffh
mov ebx,DWORD PTR 0600h[ebx*1+ebp]
xor edi,ebx
mov ebx,DWORD PTR 0700h[ecx*1+ebp]
xor edi,ebx
mov ebx,DWORD PTR 0400h[eax*1+ebp]
xor edi,ebx
mov ebx,DWORD PTR 0500h[edx*1+ebp]
xor edi,ebx
mov ebp,DWORD PTR 32[esp]
;
; Round 9
mov eax,DWORD PTR 36[esp]
mov edx,edi
shr edx,16
mov ecx,DWORD PTR 40[esp]
xor edx,edi
and eax,edx
and edx,ecx
mov ebx,eax
shl ebx,16
mov ecx,edx
shl ecx,16
xor eax,ebx
xor edx,ecx
mov ebx,DWORD PTR 72[ebp]
xor eax,ebx
mov ecx,DWORD PTR 76[ebp]
xor eax,edi
xor edx,edi
xor edx,ecx
and eax,0fcfcfcfch
xor ebx,ebx
and edx,0cfcfcfcfh
xor ecx,ecx
mov bl,al
mov cl,ah
ror edx,4
mov ebp,DWORD PTR 4[esp]
xor esi,DWORD PTR [ebx*1+ebp]
mov bl,dl
xor esi,DWORD PTR 0200h[ecx*1+ebp]
mov cl,dh
shr eax,16
xor esi,DWORD PTR 0100h[ebx*1+ebp]
mov bl,ah
shr edx,16
xor esi,DWORD PTR 0300h[ecx*1+ebp]
mov cl,dh
and eax,0ffh
and edx,0ffh
mov ebx,DWORD PTR 0600h[ebx*1+ebp]
xor esi,ebx
mov ebx,DWORD PTR 0700h[ecx*1+ebp]
xor esi,ebx
mov ebx,DWORD PTR 0400h[eax*1+ebp]
xor esi,ebx
mov ebx,DWORD PTR 0500h[edx*1+ebp]
xor esi,ebx
mov ebp,DWORD PTR 32[esp]
;
; Round 10
mov eax,DWORD PTR 36[esp]
mov edx,esi
shr edx,16
mov ecx,DWORD PTR 40[esp]
xor edx,esi
and eax,edx
and edx,ecx
mov ebx,eax
shl ebx,16
mov ecx,edx
shl ecx,16
xor eax,ebx
xor edx,ecx
mov ebx,DWORD PTR 80[ebp]
xor eax,ebx
mov ecx,DWORD PTR 84[ebp]
xor eax,esi
xor edx,esi
xor edx,ecx
and eax,0fcfcfcfch
xor ebx,ebx
and edx,0cfcfcfcfh
xor ecx,ecx
mov bl,al
mov cl,ah
ror edx,4
mov ebp,DWORD PTR 4[esp]
xor edi,DWORD PTR [ebx*1+ebp]
mov bl,dl
xor edi,DWORD PTR 0200h[ecx*1+ebp]
mov cl,dh
shr eax,16
xor edi,DWORD PTR 0100h[ebx*1+ebp]
mov bl,ah
shr edx,16
xor edi,DWORD PTR 0300h[ecx*1+ebp]
mov cl,dh
and eax,0ffh
and edx,0ffh
mov ebx,DWORD PTR 0600h[ebx*1+ebp]
xor edi,ebx
mov ebx,DWORD PTR 0700h[ecx*1+ebp]
xor edi,ebx
mov ebx,DWORD PTR 0400h[eax*1+ebp]
xor edi,ebx
mov ebx,DWORD PTR 0500h[edx*1+ebp]
xor edi,ebx
mov ebp,DWORD PTR 32[esp]
;
; Round 11
mov eax,DWORD PTR 36[esp]
mov edx,edi
shr edx,16
mov ecx,DWORD PTR 40[esp]
xor edx,edi
and eax,edx
and edx,ecx
mov ebx,eax
shl ebx,16
mov ecx,edx
shl ecx,16
xor eax,ebx
xor edx,ecx
mov ebx,DWORD PTR 88[ebp]
xor eax,ebx
mov ecx,DWORD PTR 92[ebp]
xor eax,edi
xor edx,edi
xor edx,ecx
and eax,0fcfcfcfch
xor ebx,ebx
and edx,0cfcfcfcfh
xor ecx,ecx
mov bl,al
mov cl,ah
ror edx,4
mov ebp,DWORD PTR 4[esp]
xor esi,DWORD PTR [ebx*1+ebp]
mov bl,dl
xor esi,DWORD PTR 0200h[ecx*1+ebp]
mov cl,dh
shr eax,16
xor esi,DWORD PTR 0100h[ebx*1+ebp]
mov bl,ah
shr edx,16
xor esi,DWORD PTR 0300h[ecx*1+ebp]
mov cl,dh
and eax,0ffh
and edx,0ffh
mov ebx,DWORD PTR 0600h[ebx*1+ebp]
xor esi,ebx
mov ebx,DWORD PTR 0700h[ecx*1+ebp]
xor esi,ebx
mov ebx,DWORD PTR 0400h[eax*1+ebp]
xor esi,ebx
mov ebx,DWORD PTR 0500h[edx*1+ebp]
xor esi,ebx
mov ebp,DWORD PTR 32[esp]
;
; Round 12
mov eax,DWORD PTR 36[esp]
mov edx,esi
shr edx,16
mov ecx,DWORD PTR 40[esp]
xor edx,esi
and eax,edx
and edx,ecx
mov ebx,eax
shl ebx,16
mov ecx,edx
shl ecx,16
xor eax,ebx
xor edx,ecx
mov ebx,DWORD PTR 96[ebp]
xor eax,ebx
mov ecx,DWORD PTR 100[ebp]
xor eax,esi
xor edx,esi
xor edx,ecx
and eax,0fcfcfcfch
xor ebx,ebx
and edx,0cfcfcfcfh
xor ecx,ecx
mov bl,al
mov cl,ah
ror edx,4
mov ebp,DWORD PTR 4[esp]
xor edi,DWORD PTR [ebx*1+ebp]
mov bl,dl
xor edi,DWORD PTR 0200h[ecx*1+ebp]
mov cl,dh
shr eax,16
xor edi,DWORD PTR 0100h[ebx*1+ebp]
mov bl,ah
shr edx,16
xor edi,DWORD PTR 0300h[ecx*1+ebp]
mov cl,dh
and eax,0ffh
and edx,0ffh
mov ebx,DWORD PTR 0600h[ebx*1+ebp]
xor edi,ebx
mov ebx,DWORD PTR 0700h[ecx*1+ebp]
xor edi,ebx
mov ebx,DWORD PTR 0400h[eax*1+ebp]
xor edi,ebx
mov ebx,DWORD PTR 0500h[edx*1+ebp]
xor edi,ebx
mov ebp,DWORD PTR 32[esp]
;
; Round 13
mov eax,DWORD PTR 36[esp]
mov edx,edi
shr edx,16
mov ecx,DWORD PTR 40[esp]
xor edx,edi
and eax,edx
and edx,ecx
mov ebx,eax
shl ebx,16
mov ecx,edx
shl ecx,16
xor eax,ebx
xor edx,ecx
mov ebx,DWORD PTR 104[ebp]
xor eax,ebx
mov ecx,DWORD PTR 108[ebp]
xor eax,edi
xor edx,edi
xor edx,ecx
and eax,0fcfcfcfch
xor ebx,ebx
and edx,0cfcfcfcfh
xor ecx,ecx
mov bl,al
mov cl,ah
ror edx,4
mov ebp,DWORD PTR 4[esp]
xor esi,DWORD PTR [ebx*1+ebp]
mov bl,dl
xor esi,DWORD PTR 0200h[ecx*1+ebp]
mov cl,dh
shr eax,16
xor esi,DWORD PTR 0100h[ebx*1+ebp]
mov bl,ah
shr edx,16
xor esi,DWORD PTR 0300h[ecx*1+ebp]
mov cl,dh
and eax,0ffh
and edx,0ffh
mov ebx,DWORD PTR 0600h[ebx*1+ebp]
xor esi,ebx
mov ebx,DWORD PTR 0700h[ecx*1+ebp]
xor esi,ebx
mov ebx,DWORD PTR 0400h[eax*1+ebp]
xor esi,ebx
mov ebx,DWORD PTR 0500h[edx*1+ebp]
xor esi,ebx
mov ebp,DWORD PTR 32[esp]
;
; Round 14
mov eax,DWORD PTR 36[esp]
mov edx,esi
shr edx,16
mov ecx,DWORD PTR 40[esp]
xor edx,esi
and eax,edx
and edx,ecx
mov ebx,eax
shl ebx,16
mov ecx,edx
shl ecx,16
xor eax,ebx
xor edx,ecx
mov ebx,DWORD PTR 112[ebp]
xor eax,ebx
mov ecx,DWORD PTR 116[ebp]
xor eax,esi
xor edx,esi
xor edx,ecx
and eax,0fcfcfcfch
xor ebx,ebx
and edx,0cfcfcfcfh
xor ecx,ecx
mov bl,al
mov cl,ah
ror edx,4
mov ebp,DWORD PTR 4[esp]
xor edi,DWORD PTR [ebx*1+ebp]
mov bl,dl
xor edi,DWORD PTR 0200h[ecx*1+ebp]
mov cl,dh
shr eax,16
xor edi,DWORD PTR 0100h[ebx*1+ebp]
mov bl,ah
shr edx,16
xor edi,DWORD PTR 0300h[ecx*1+ebp]
mov cl,dh
and eax,0ffh
and edx,0ffh
mov ebx,DWORD PTR 0600h[ebx*1+ebp]
xor edi,ebx
mov ebx,DWORD PTR 0700h[ecx*1+ebp]
xor edi,ebx
mov ebx,DWORD PTR 0400h[eax*1+ebp]
xor edi,ebx
mov ebx,DWORD PTR 0500h[edx*1+ebp]
xor edi,ebx
mov ebp,DWORD PTR 32[esp]
;
; Round 15
mov eax,DWORD PTR 36[esp]
mov edx,edi
shr edx,16
mov ecx,DWORD PTR 40[esp]
xor edx,edi
and eax,edx
and edx,ecx
mov ebx,eax
shl ebx,16
mov ecx,edx
shl ecx,16
xor eax,ebx
xor edx,ecx
mov ebx,DWORD PTR 120[ebp]
xor eax,ebx
mov ecx,DWORD PTR 124[ebp]
xor eax,edi
xor edx,edi
xor edx,ecx
and eax,0fcfcfcfch
xor ebx,ebx
and edx,0cfcfcfcfh
xor ecx,ecx
mov bl,al
mov cl,ah
ror edx,4
mov ebp,DWORD PTR 4[esp]
xor esi,DWORD PTR [ebx*1+ebp]
mov bl,dl
xor esi,DWORD PTR 0200h[ecx*1+ebp]
mov cl,dh
shr eax,16
xor esi,DWORD PTR 0100h[ebx*1+ebp]
mov bl,ah
shr edx,16
xor esi,DWORD PTR 0300h[ecx*1+ebp]
mov cl,dh
and eax,0ffh
and edx,0ffh
mov ebx,DWORD PTR 0600h[ebx*1+ebp]
xor esi,ebx
mov ebx,DWORD PTR 0700h[ecx*1+ebp]
xor esi,ebx
mov ebx,DWORD PTR 0400h[eax*1+ebp]
xor esi,ebx
mov ebx,DWORD PTR 0500h[edx*1+ebp]
xor esi,ebx
mov ebp,DWORD PTR 32[esp]
mov ebx,DWORD PTR [esp]
mov eax,edi
dec ebx
mov edi,esi
mov esi,eax
mov DWORD PTR [esp],ebx
jnz $L000start
;
; FP
mov edx,DWORD PTR 28[esp]
ror edi,1
mov eax,esi
xor esi,edi
and esi,0aaaaaaaah
xor eax,esi
xor edi,esi
;
rol eax,23
mov esi,eax
xor eax,edi
and eax,003fc03fch
xor esi,eax
xor edi,eax
;
rol esi,10
mov eax,esi
xor esi,edi
and esi,033333333h
xor eax,esi
xor edi,esi
;
rol edi,18
mov esi,edi
xor edi,eax
and edi,0fff0000fh
xor esi,edi
xor eax,edi
;
rol esi,12
mov edi,esi
xor esi,eax
and esi,0f0f0f0f0h
xor edi,esi
xor eax,esi
;
ror eax,4
mov DWORD PTR [edx],eax
mov DWORD PTR 4[edx],edi
add esp,8
pop edi
pop esi
pop ebx
pop ebp
ret
_fcrypt_body ENDP
.text$ ENDS
END

1878
deps/openssl/asm/x86-win32-masm/des/des-586.asm

File diff suppressed because it is too large

693
deps/openssl/asm/x86-win32-masm/md5/md5-586.asm

@ -0,0 +1,693 @@
TITLE ../openssl/crypto/md5/asm/md5-586.asm
IF @Version LT 800
ECHO MASM version 8.00 or later is strongly recommended.
ENDIF
.586
.MODEL FLAT
OPTION DOTNAME
IF @Version LT 800
.text$ SEGMENT PAGE 'CODE'
ELSE
.text$ SEGMENT ALIGN(64) 'CODE'
ENDIF
ALIGN 16
_md5_block_asm_data_order PROC PUBLIC
$L_md5_block_asm_data_order_begin::
push esi
push edi
mov edi,DWORD PTR 12[esp]
mov esi,DWORD PTR 16[esp]
mov ecx,DWORD PTR 20[esp]
push ebp
shl ecx,6
push ebx
add ecx,esi
sub ecx,64
mov eax,DWORD PTR [edi]
push ecx
mov ebx,DWORD PTR 4[edi]
mov ecx,DWORD PTR 8[edi]
mov edx,DWORD PTR 12[edi]
$L000start:
;
; R0 section
mov edi,ecx
mov ebp,DWORD PTR [esi]
; R0 0
xor edi,edx
and edi,ebx
lea eax,DWORD PTR 3614090360[ebp*1+eax]
xor edi,edx
add eax,edi
mov edi,ebx
rol eax,7
mov ebp,DWORD PTR 4[esi]
add eax,ebx
; R0 1
xor edi,ecx
and edi,eax
lea edx,DWORD PTR 3905402710[ebp*1+edx]
xor edi,ecx
add edx,edi
mov edi,eax
rol edx,12
mov ebp,DWORD PTR 8[esi]
add edx,eax
; R0 2
xor edi,ebx
and edi,edx
lea ecx,DWORD PTR 606105819[ebp*1+ecx]
xor edi,ebx
add ecx,edi
mov edi,edx
rol ecx,17
mov ebp,DWORD PTR 12[esi]
add ecx,edx
; R0 3
xor edi,eax
and edi,ecx
lea ebx,DWORD PTR 3250441966[ebp*1+ebx]
xor edi,eax
add ebx,edi
mov edi,ecx
rol ebx,22
mov ebp,DWORD PTR 16[esi]
add ebx,ecx
; R0 4
xor edi,edx
and edi,ebx
lea eax,DWORD PTR 4118548399[ebp*1+eax]
xor edi,edx
add eax,edi
mov edi,ebx
rol eax,7
mov ebp,DWORD PTR 20[esi]
add eax,ebx
; R0 5
xor edi,ecx
and edi,eax
lea edx,DWORD PTR 1200080426[ebp*1+edx]
xor edi,ecx
add edx,edi
mov edi,eax
rol edx,12
mov ebp,DWORD PTR 24[esi]
add edx,eax
; R0 6
xor edi,ebx
and edi,edx
lea ecx,DWORD PTR 2821735955[ebp*1+ecx]
xor edi,ebx
add ecx,edi
mov edi,edx
rol ecx,17
mov ebp,DWORD PTR 28[esi]
add ecx,edx
; R0 7
xor edi,eax
and edi,ecx
lea ebx,DWORD PTR 4249261313[ebp*1+ebx]
xor edi,eax
add ebx,edi
mov edi,ecx
rol ebx,22
mov ebp,DWORD PTR 32[esi]
add ebx,ecx
; R0 8
xor edi,edx
and edi,ebx
lea eax,DWORD PTR 1770035416[ebp*1+eax]
xor edi,edx
add eax,edi
mov edi,ebx
rol eax,7
mov ebp,DWORD PTR 36[esi]
add eax,ebx
; R0 9
xor edi,ecx
and edi,eax
lea edx,DWORD PTR 2336552879[ebp*1+edx]
xor edi,ecx
add edx,edi
mov edi,eax
rol edx,12
mov ebp,DWORD PTR 40[esi]
add edx,eax
; R0 10
xor edi,ebx
and edi,edx
lea ecx,DWORD PTR 4294925233[ebp*1+ecx]
xor edi,ebx
add ecx,edi
mov edi,edx
rol ecx,17
mov ebp,DWORD PTR 44[esi]
add ecx,edx
; R0 11
xor edi,eax
and edi,ecx
lea ebx,DWORD PTR 2304563134[ebp*1+ebx]
xor edi,eax
add ebx,edi
mov edi,ecx
rol ebx,22
mov ebp,DWORD PTR 48[esi]
add ebx,ecx
; R0 12
xor edi,edx
and edi,ebx
lea eax,DWORD PTR 1804603682[ebp*1+eax]
xor edi,edx
add eax,edi
mov edi,ebx
rol eax,7
mov ebp,DWORD PTR 52[esi]
add eax,ebx
; R0 13
xor edi,ecx
and edi,eax
lea edx,DWORD PTR 4254626195[ebp*1+edx]
xor edi,ecx
add edx,edi
mov edi,eax
rol edx,12
mov ebp,DWORD PTR 56[esi]
add edx,eax
; R0 14
xor edi,ebx
and edi,edx
lea ecx,DWORD PTR 2792965006[ebp*1+ecx]
xor edi,ebx
add ecx,edi
mov edi,edx
rol ecx,17
mov ebp,DWORD PTR 60[esi]
add ecx,edx
; R0 15
xor edi,eax
and edi,ecx
lea ebx,DWORD PTR 1236535329[ebp*1+ebx]
xor edi,eax
add ebx,edi
mov edi,ecx
rol ebx,22
mov ebp,DWORD PTR 4[esi]
add ebx,ecx
;
; R1 section
; R1 16
lea eax,DWORD PTR 4129170786[ebp*1+eax]
xor edi,ebx
and edi,edx
mov ebp,DWORD PTR 24[esi]
xor edi,ecx
add eax,edi
mov edi,ebx
rol eax,5
add eax,ebx
; R1 17
lea edx,DWORD PTR 3225465664[ebp*1+edx]
xor edi,eax
and edi,ecx
mov ebp,DWORD PTR 44[esi]
xor edi,ebx
add edx,edi
mov edi,eax
rol edx,9
add edx,eax
; R1 18
lea ecx,DWORD PTR 643717713[ebp*1+ecx]
xor edi,edx
and edi,ebx
mov ebp,DWORD PTR [esi]
xor edi,eax
add ecx,edi
mov edi,edx
rol ecx,14
add ecx,edx
; R1 19
lea ebx,DWORD PTR 3921069994[ebp*1+ebx]
xor edi,ecx
and edi,eax
mov ebp,DWORD PTR 20[esi]
xor edi,edx
add ebx,edi
mov edi,ecx
rol ebx,20
add ebx,ecx
; R1 20
lea eax,DWORD PTR 3593408605[ebp*1+eax]
xor edi,ebx
and edi,edx
mov ebp,DWORD PTR 40[esi]
xor edi,ecx
add eax,edi
mov edi,ebx
rol eax,5
add eax,ebx
; R1 21
lea edx,DWORD PTR 38016083[ebp*1+edx]
xor edi,eax
and edi,ecx
mov ebp,DWORD PTR 60[esi]
xor edi,ebx
add edx,edi
mov edi,eax
rol edx,9
add edx,eax
; R1 22
lea ecx,DWORD PTR 3634488961[ebp*1+ecx]
xor edi,edx
and edi,ebx
mov ebp,DWORD PTR 16[esi]
xor edi,eax
add ecx,edi
mov edi,edx
rol ecx,14
add ecx,edx
; R1 23
lea ebx,DWORD PTR 3889429448[ebp*1+ebx]
xor edi,ecx
and edi,eax
mov ebp,DWORD PTR 36[esi]
xor edi,edx
add ebx,edi
mov edi,ecx
rol ebx,20
add ebx,ecx
; R1 24
lea eax,DWORD PTR 568446438[ebp*1+eax]
xor edi,ebx
and edi,edx
mov ebp,DWORD PTR 56[esi]
xor edi,ecx
add eax,edi
mov edi,ebx
rol eax,5
add eax,ebx
; R1 25
lea edx,DWORD PTR 3275163606[ebp*1+edx]
xor edi,eax
and edi,ecx
mov ebp,DWORD PTR 12[esi]
xor edi,ebx
add edx,edi
mov edi,eax
rol edx,9
add edx,eax
; R1 26
lea ecx,DWORD PTR 4107603335[ebp*1+ecx]
xor edi,edx
and edi,ebx
mov ebp,DWORD PTR 32[esi]
xor edi,eax
add ecx,edi
mov edi,edx
rol ecx,14
add ecx,edx
; R1 27
lea ebx,DWORD PTR 1163531501[ebp*1+ebx]
xor edi,ecx
and edi,eax
mov ebp,DWORD PTR 52[esi]
xor edi,edx
add ebx,edi
mov edi,ecx
rol ebx,20
add ebx,ecx
; R1 28
lea eax,DWORD PTR 2850285829[ebp*1+eax]
xor edi,ebx
and edi,edx
mov ebp,DWORD PTR 8[esi]
xor edi,ecx
add eax,edi
mov edi,ebx
rol eax,5
add eax,ebx
; R1 29
lea edx,DWORD PTR 4243563512[ebp*1+edx]
xor edi,eax
and edi,ecx
mov ebp,DWORD PTR 28[esi]
xor edi,ebx
add edx,edi
mov edi,eax
rol edx,9
add edx,eax
; R1 30
lea ecx,DWORD PTR 1735328473[ebp*1+ecx]
xor edi,edx
and edi,ebx
mov ebp,DWORD PTR 48[esi]
xor edi,eax
add ecx,edi
mov edi,edx
rol ecx,14
add ecx,edx
; R1 31
lea ebx,DWORD PTR 2368359562[ebp*1+ebx]
xor edi,ecx
and edi,eax
mov ebp,DWORD PTR 20[esi]
xor edi,edx
add ebx,edi
mov edi,ecx
rol ebx,20
add ebx,ecx
;
; R2 section
; R2 32
xor edi,edx
xor edi,ebx
lea eax,DWORD PTR 4294588738[ebp*1+eax]
add eax,edi
rol eax,4
mov ebp,DWORD PTR 32[esi]
mov edi,ebx
; R2 33
lea edx,DWORD PTR 2272392833[ebp*1+edx]
add eax,ebx
xor edi,ecx
xor edi,eax
mov ebp,DWORD PTR 44[esi]
add edx,edi
mov edi,eax
rol edx,11
add edx,eax
; R2 34
xor edi,ebx
xor edi,edx
lea ecx,DWORD PTR 1839030562[ebp*1+ecx]
add ecx,edi
rol ecx,16
mov ebp,DWORD PTR 56[esi]
mov edi,edx
; R2 35
lea ebx,DWORD PTR 4259657740[ebp*1+ebx]
add ecx,edx
xor edi,eax
xor edi,ecx
mov ebp,DWORD PTR 4[esi]
add ebx,edi
mov edi,ecx
rol ebx,23
add ebx,ecx
; R2 36
xor edi,edx
xor edi,ebx
lea eax,DWORD PTR 2763975236[ebp*1+eax]
add eax,edi
rol eax,4
mov ebp,DWORD PTR 16[esi]
mov edi,ebx
; R2 37
lea edx,DWORD PTR 1272893353[ebp*1+edx]
add eax,ebx
xor edi,ecx
xor edi,eax
mov ebp,DWORD PTR 28[esi]
add edx,edi
mov edi,eax
rol edx,11
add edx,eax
; R2 38
xor edi,ebx
xor edi,edx
lea ecx,DWORD PTR 4139469664[ebp*1+ecx]
add ecx,edi
rol ecx,16
mov ebp,DWORD PTR 40[esi]
mov edi,edx
; R2 39
lea ebx,DWORD PTR 3200236656[ebp*1+ebx]
add ecx,edx
xor edi,eax
xor edi,ecx
mov ebp,DWORD PTR 52[esi]
add ebx,edi
mov edi,ecx
rol ebx,23
add ebx,ecx
; R2 40
xor edi,edx
xor edi,ebx
lea eax,DWORD PTR 681279174[ebp*1+eax]
add eax,edi
rol eax,4
mov ebp,DWORD PTR [esi]
mov edi,ebx
; R2 41
lea edx,DWORD PTR 3936430074[ebp*1+edx]
add eax,ebx
xor edi,ecx
xor edi,eax
mov ebp,DWORD PTR 12[esi]
add edx,edi
mov edi,eax
rol edx,11
add edx,eax
; R2 42
xor edi,ebx
xor edi,edx
lea ecx,DWORD PTR 3572445317[ebp*1+ecx]
add ecx,edi
rol ecx,16
mov ebp,DWORD PTR 24[esi]
mov edi,edx
; R2 43
lea ebx,DWORD PTR 76029189[ebp*1+ebx]
add ecx,edx
xor edi,eax
xor edi,ecx
mov ebp,DWORD PTR 36[esi]
add ebx,edi
mov edi,ecx
rol ebx,23
add ebx,ecx
; R2 44
xor edi,edx
xor edi,ebx
lea eax,DWORD PTR 3654602809[ebp*1+eax]
add eax,edi
rol eax,4
mov ebp,DWORD PTR 48[esi]
mov edi,ebx
; R2 45
lea edx,DWORD PTR 3873151461[ebp*1+edx]
add eax,ebx
xor edi,ecx
xor edi,eax
mov ebp,DWORD PTR 60[esi]
add edx,edi
mov edi,eax
rol edx,11
add edx,eax
; R2 46
xor edi,ebx
xor edi,edx
lea ecx,DWORD PTR 530742520[ebp*1+ecx]
add ecx,edi
rol ecx,16
mov ebp,DWORD PTR 8[esi]
mov edi,edx
; R2 47
lea ebx,DWORD PTR 3299628645[ebp*1+ebx]
add ecx,edx
xor edi,eax
xor edi,ecx
mov ebp,DWORD PTR [esi]
add ebx,edi
mov edi,-1
rol ebx,23
add ebx,ecx
;
; R3 section
; R3 48
xor edi,edx
or edi,ebx
lea eax,DWORD PTR 4096336452[ebp*1+eax]
xor edi,ecx
mov ebp,DWORD PTR 28[esi]
add eax,edi
mov edi,-1
rol eax,6
xor edi,ecx
add eax,ebx
; R3 49
or edi,eax
lea edx,DWORD PTR 1126891415[ebp*1+edx]
xor edi,ebx
mov ebp,DWORD PTR 56[esi]
add edx,edi
mov edi,-1
rol edx,10
xor edi,ebx
add edx,eax
; R3 50
or edi,edx
lea ecx,DWORD PTR 2878612391[ebp*1+ecx]
xor edi,eax
mov ebp,DWORD PTR 20[esi]
add ecx,edi
mov edi,-1
rol ecx,15
xor edi,eax
add ecx,edx
; R3 51
or edi,ecx
lea ebx,DWORD PTR 4237533241[ebp*1+ebx]
xor edi,edx
mov ebp,DWORD PTR 48[esi]
add ebx,edi
mov edi,-1
rol ebx,21
xor edi,edx
add ebx,ecx
; R3 52
or edi,ebx
lea eax,DWORD PTR 1700485571[ebp*1+eax]
xor edi,ecx
mov ebp,DWORD PTR 12[esi]
add eax,edi
mov edi,-1
rol eax,6
xor edi,ecx
add eax,ebx
; R3 53
or edi,eax
lea edx,DWORD PTR 2399980690[ebp*1+edx]
xor edi,ebx
mov ebp,DWORD PTR 40[esi]
add edx,edi
mov edi,-1
rol edx,10
xor edi,ebx
add edx,eax
; R3 54
or edi,edx
lea ecx,DWORD PTR 4293915773[ebp*1+ecx]
xor edi,eax
mov ebp,DWORD PTR 4[esi]
add ecx,edi
mov edi,-1
rol ecx,15
xor edi,eax
add ecx,edx
; R3 55
or edi,ecx
lea ebx,DWORD PTR 2240044497[ebp*1+ebx]
xor edi,edx
mov ebp,DWORD PTR 32[esi]
add ebx,edi
mov edi,-1
rol ebx,21
xor edi,edx
add ebx,ecx
; R3 56
or edi,ebx
lea eax,DWORD PTR 1873313359[ebp*1+eax]
xor edi,ecx
mov ebp,DWORD PTR 60[esi]
add eax,edi
mov edi,-1
rol eax,6
xor edi,ecx
add eax,ebx
; R3 57
or edi,eax
lea edx,DWORD PTR 4264355552[ebp*1+edx]
xor edi,ebx
mov ebp,DWORD PTR 24[esi]
add edx,edi
mov edi,-1
rol edx,10
xor edi,ebx
add edx,eax
; R3 58
or edi,edx
lea ecx,DWORD PTR 2734768916[ebp*1+ecx]
xor edi,eax
mov ebp,DWORD PTR 52[esi]
add ecx,edi
mov edi,-1
rol ecx,15
xor edi,eax
add ecx,edx
; R3 59
or edi,ecx
lea ebx,DWORD PTR 1309151649[ebp*1+ebx]
xor edi,edx
mov ebp,DWORD PTR 16[esi]
add ebx,edi
mov edi,-1
rol ebx,21
xor edi,edx
add ebx,ecx
; R3 60
or edi,ebx
lea eax,DWORD PTR 4149444226[ebp*1+eax]
xor edi,ecx
mov ebp,DWORD PTR 44[esi]
add eax,edi
mov edi,-1
rol eax,6
xor edi,ecx
add eax,ebx
; R3 61
or edi,eax
lea edx,DWORD PTR 3174756917[ebp*1+edx]
xor edi,ebx
mov ebp,DWORD PTR 8[esi]
add edx,edi
mov edi,-1
rol edx,10
xor edi,ebx
add edx,eax
; R3 62
or edi,edx
lea ecx,DWORD PTR 718787259[ebp*1+ecx]
xor edi,eax
mov ebp,DWORD PTR 36[esi]
add ecx,edi
mov edi,-1
rol ecx,15
xor edi,eax
add ecx,edx
; R3 63
or edi,ecx
lea ebx,DWORD PTR 3951481745[ebp*1+ebx]
xor edi,edx
mov ebp,DWORD PTR 24[esp]
add ebx,edi
add esi,64
rol ebx,21
mov edi,DWORD PTR [ebp]
add ebx,ecx
add eax,edi
mov edi,DWORD PTR 4[ebp]
add ebx,edi
mov edi,DWORD PTR 8[ebp]
add ecx,edi
mov edi,DWORD PTR 12[ebp]
add edx,edi
mov DWORD PTR [ebp],eax
mov DWORD PTR 4[ebp],ebx
mov edi,DWORD PTR [esp]
mov DWORD PTR 8[ebp],ecx
mov DWORD PTR 12[ebp],edx
cmp edi,esi
jae $L000start
pop eax
pop ebx
pop ebp
pop edi
pop esi
ret
_md5_block_asm_data_order ENDP
.text$ ENDS
END

239
deps/openssl/asm/x86-win32-masm/rc4/rc4-586.asm

@ -0,0 +1,239 @@
TITLE rc4-586.asm
IF @Version LT 800
ECHO MASM version 8.00 or later is strongly recommended.
ENDIF
.586
.MODEL FLAT
OPTION DOTNAME
IF @Version LT 800
.text$ SEGMENT PAGE 'CODE'
ELSE
.text$ SEGMENT ALIGN(64) 'CODE'
ENDIF
ALIGN 16
_RC4 PROC PUBLIC
$L_RC4_begin::
push ebp
push ebx
push esi
push edi
mov edi,DWORD PTR 20[esp]
mov edx,DWORD PTR 24[esp]
mov esi,DWORD PTR 28[esp]
mov ebp,DWORD PTR 32[esp]
xor eax,eax
xor ebx,ebx
cmp edx,0
je $L000abort
mov al,BYTE PTR [edi]
mov bl,BYTE PTR 4[edi]
add edi,8
lea ecx,DWORD PTR [edx*1+esi]
sub ebp,esi
mov DWORD PTR 24[esp],ecx
inc al
cmp DWORD PTR 256[edi],-1
je $L001RC4_CHAR
mov ecx,DWORD PTR [eax*4+edi]
and edx,-4
jz $L002loop1
lea edx,DWORD PTR [edx*1+esi-4]
mov DWORD PTR 28[esp],edx
mov DWORD PTR 32[esp],ebp
ALIGN 16
$L003loop4:
add bl,cl
mov edx,DWORD PTR [ebx*4+edi]
mov DWORD PTR [ebx*4+edi],ecx
mov DWORD PTR [eax*4+edi],edx
add edx,ecx
inc al
and edx,255
mov ecx,DWORD PTR [eax*4+edi]
mov ebp,DWORD PTR [edx*4+edi]
add bl,cl
mov edx,DWORD PTR [ebx*4+edi]
mov DWORD PTR [ebx*4+edi],ecx
mov DWORD PTR [eax*4+edi],edx
add edx,ecx
inc al
and edx,255
ror ebp,8
mov ecx,DWORD PTR [eax*4+edi]
or ebp,DWORD PTR [edx*4+edi]
add bl,cl
mov edx,DWORD PTR [ebx*4+edi]
mov DWORD PTR [ebx*4+edi],ecx
mov DWORD PTR [eax*4+edi],edx
add edx,ecx
inc al
and edx,255
ror ebp,8
mov ecx,DWORD PTR [eax*4+edi]
or ebp,DWORD PTR [edx*4+edi]
add bl,cl
mov edx,DWORD PTR [ebx*4+edi]
mov DWORD PTR [ebx*4+edi],ecx
mov DWORD PTR [eax*4+edi],edx
add edx,ecx
inc al
and edx,255
ror ebp,8
mov ecx,DWORD PTR 32[esp]
or ebp,DWORD PTR [edx*4+edi]
ror ebp,8
xor ebp,DWORD PTR [esi]
cmp esi,DWORD PTR 28[esp]
mov DWORD PTR [esi*1+ecx],ebp
lea esi,DWORD PTR 4[esi]
mov ecx,DWORD PTR [eax*4+edi]
jb $L003loop4
cmp esi,DWORD PTR 24[esp]
je $L004done
mov ebp,DWORD PTR 32[esp]
ALIGN 16
$L002loop1:
add bl,cl
mov edx,DWORD PTR [ebx*4+edi]
mov DWORD PTR [ebx*4+edi],ecx
mov DWORD PTR [eax*4+edi],edx
add edx,ecx
inc al
and edx,255
mov edx,DWORD PTR [edx*4+edi]
xor dl,BYTE PTR [esi]
lea esi,DWORD PTR 1[esi]
mov ecx,DWORD PTR [eax*4+edi]
cmp esi,DWORD PTR 24[esp]
mov BYTE PTR [esi*1+ebp-1],dl
jb $L002loop1
jmp $L004done
ALIGN 16
$L001RC4_CHAR:
movzx ecx,BYTE PTR [eax*1+edi]
$L005cloop1:
add bl,cl
movzx edx,BYTE PTR [ebx*1+edi]
mov BYTE PTR [ebx*1+edi],cl
mov BYTE PTR [eax*1+edi],dl
add dl,cl
movzx edx,BYTE PTR [edx*1+edi]
add al,1
xor dl,BYTE PTR [esi]
lea esi,DWORD PTR 1[esi]
movzx ecx,BYTE PTR [eax*1+edi]
cmp esi,DWORD PTR 24[esp]
mov BYTE PTR [esi*1+ebp-1],dl
jb $L005cloop1
$L004done:
dec al
mov BYTE PTR [edi-4],bl
mov BYTE PTR [edi-8],al
$L000abort:
pop edi
pop esi
pop ebx
pop ebp
ret
_RC4 ENDP
;EXTERN _OPENSSL_ia32cap_P:NEAR
ALIGN 16
_RC4_set_key PROC PUBLIC
$L_RC4_set_key_begin::
push ebp
push ebx
push esi
push edi
mov edi,DWORD PTR 20[esp]
mov ebp,DWORD PTR 24[esp]
mov esi,DWORD PTR 28[esp]
lea edx,DWORD PTR _OPENSSL_ia32cap_P
lea edi,DWORD PTR 8[edi]
lea esi,DWORD PTR [ebp*1+esi]
neg ebp
xor eax,eax
mov DWORD PTR [edi-4],ebp
bt DWORD PTR [edx],20
jc $L006c1stloop
ALIGN 16
$L007w1stloop:
mov DWORD PTR [eax*4+edi],eax
add al,1
jnc $L007w1stloop
xor ecx,ecx
xor edx,edx
ALIGN 16
$L008w2ndloop:
mov eax,DWORD PTR [ecx*4+edi]
add dl,BYTE PTR [ebp*1+esi]
add dl,al
add ebp,1
mov ebx,DWORD PTR [edx*4+edi]
jnz $L009wnowrap
mov ebp,DWORD PTR [edi-4]
$L009wnowrap:
mov DWORD PTR [edx*4+edi],eax
mov DWORD PTR [ecx*4+edi],ebx
add cl,1
jnc $L008w2ndloop
jmp $L010exit
ALIGN 16
$L006c1stloop:
mov BYTE PTR [eax*1+edi],al
add al,1
jnc $L006c1stloop
xor ecx,ecx
xor edx,edx
xor ebx,ebx
ALIGN 16
$L011c2ndloop:
mov al,BYTE PTR [ecx*1+edi]
add dl,BYTE PTR [ebp*1+esi]
add dl,al
add ebp,1
mov bl,BYTE PTR [edx*1+edi]
jnz $L012cnowrap
mov ebp,DWORD PTR [edi-4]
$L012cnowrap:
mov BYTE PTR [edx*1+edi],al
mov BYTE PTR [ecx*1+edi],bl
add cl,1
jnc $L011c2ndloop
mov DWORD PTR 256[edi],-1
$L010exit:
xor eax,eax
mov DWORD PTR [edi-8],eax
mov DWORD PTR [edi-4],eax
pop edi
pop esi
pop ebx
pop ebp
ret
_RC4_set_key ENDP
ALIGN 16
_RC4_options PROC PUBLIC
$L_RC4_options_begin::
call $L013pic_point
$L013pic_point:
pop eax
lea eax,DWORD PTR ($L014opts-$L013pic_point)[eax]
lea edx,DWORD PTR _OPENSSL_ia32cap_P
bt DWORD PTR [edx],20
jnc $L015skip
add eax,12
$L015skip:
ret
ALIGN 64
$L014opts:
DB 114,99,52,40,52,120,44,105,110,116,41,0
DB 114,99,52,40,49,120,44,99,104,97,114,41,0
DB 82,67,52,32,102,111,114,32,120,56,54,44,32,67,82,89
DB 80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
DB 111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
ALIGN 64
_RC4_options ENDP
.text$ ENDS
.bss SEGMENT 'BSS'
COMM _OPENSSL_ia32cap_P:DWORD
.bss ENDS
END

573
deps/openssl/asm/x86-win32-masm/rc5/rc5-586.asm

@ -0,0 +1,573 @@
TITLE rc5-586.asm
IF @Version LT 800
ECHO MASM version 8.00 or later is strongly recommended.
ENDIF
.586
.MODEL FLAT
OPTION DOTNAME
IF @Version LT 800
.text$ SEGMENT PAGE 'CODE'
ELSE
.text$ SEGMENT ALIGN(64) 'CODE'
ENDIF
ALIGN 16
_RC5_32_encrypt PROC PUBLIC
$L_RC5_32_encrypt_begin::
;
push ebp
push esi
push edi
mov edx,DWORD PTR 16[esp]
mov ebp,DWORD PTR 20[esp]
; Load the 2 words
mov edi,DWORD PTR [edx]
mov esi,DWORD PTR 4[edx]
push ebx
mov ebx,DWORD PTR [ebp]
add edi,DWORD PTR 4[ebp]
add esi,DWORD PTR 8[ebp]
xor edi,esi
mov eax,DWORD PTR 12[ebp]
mov ecx,esi
rol edi,cl
add edi,eax
xor esi,edi
mov eax,DWORD PTR 16[ebp]
mov ecx,edi
rol esi,cl
add esi,eax
xor edi,esi
mov eax,DWORD PTR 20[ebp]
mov ecx,esi
rol edi,cl
add edi,eax
xor esi,edi
mov eax,DWORD PTR 24[ebp]
mov ecx,edi
rol esi,cl
add esi,eax
xor edi,esi
mov eax,DWORD PTR 28[ebp]
mov ecx,esi
rol edi,cl
add edi,eax
xor esi,edi
mov eax,DWORD PTR 32[ebp]
mov ecx,edi
rol esi,cl
add esi,eax
xor edi,esi
mov eax,DWORD PTR 36[ebp]
mov ecx,esi
rol edi,cl
add edi,eax
xor esi,edi
mov eax,DWORD PTR 40[ebp]
mov ecx,edi
rol esi,cl
add esi,eax
xor edi,esi
mov eax,DWORD PTR 44[ebp]
mov ecx,esi
rol edi,cl
add edi,eax
xor esi,edi
mov eax,DWORD PTR 48[ebp]
mov ecx,edi
rol esi,cl
add esi,eax
xor edi,esi
mov eax,DWORD PTR 52[ebp]
mov ecx,esi
rol edi,cl
add edi,eax
xor esi,edi
mov eax,DWORD PTR 56[ebp]
mov ecx,edi
rol esi,cl
add esi,eax
xor edi,esi
mov eax,DWORD PTR 60[ebp]
mov ecx,esi
rol edi,cl
add edi,eax
xor esi,edi
mov eax,DWORD PTR 64[ebp]
mov ecx,edi
rol esi,cl
add esi,eax
xor edi,esi
mov eax,DWORD PTR 68[ebp]
mov ecx,esi
rol edi,cl
add edi,eax
xor esi,edi
mov eax,DWORD PTR 72[ebp]
mov ecx,edi
rol esi,cl
add esi,eax
cmp ebx,8
je $L000rc5_exit
xor edi,esi
mov eax,DWORD PTR 76[ebp]
mov ecx,esi
rol edi,cl
add edi,eax
xor esi,edi
mov eax,DWORD PTR 80[ebp]
mov ecx,edi
rol esi,cl
add esi,eax
xor edi,esi
mov eax,DWORD PTR 84[ebp]
mov ecx,esi
rol edi,cl
add edi,eax
xor esi,edi
mov eax,DWORD PTR 88[ebp]
mov ecx,edi
rol esi,cl
add esi,eax
xor edi,esi
mov eax,DWORD PTR 92[ebp]
mov ecx,esi
rol edi,cl
add edi,eax
xor esi,edi
mov eax,DWORD PTR 96[ebp]
mov ecx,edi
rol esi,cl
add esi,eax
xor edi,esi
mov eax,DWORD PTR 100[ebp]
mov ecx,esi
rol edi,cl
add edi,eax
xor esi,edi
mov eax,DWORD PTR 104[ebp]
mov ecx,edi
rol esi,cl
add esi,eax
cmp ebx,12
je $L000rc5_exit
xor edi,esi
mov eax,DWORD PTR 108[ebp]
mov ecx,esi
rol edi,cl
add edi,eax
xor esi,edi
mov eax,DWORD PTR 112[ebp]
mov ecx,edi
rol esi,cl
add esi,eax
xor edi,esi
mov eax,DWORD PTR 116[ebp]
mov ecx,esi
rol edi,cl
add edi,eax
xor esi,edi
mov eax,DWORD PTR 120[ebp]
mov ecx,edi
rol esi,cl
add esi,eax
xor edi,esi
mov eax,DWORD PTR 124[ebp]
mov ecx,esi
rol edi,cl
add edi,eax
xor esi,edi
mov eax,DWORD PTR 128[ebp]
mov ecx,edi
rol esi,cl
add esi,eax
xor edi,esi
mov eax,DWORD PTR 132[ebp]
mov ecx,esi
rol edi,cl
add edi,eax
xor esi,edi
mov eax,DWORD PTR 136[ebp]
mov ecx,edi
rol esi,cl
add esi,eax
$L000rc5_exit:
mov DWORD PTR [edx],edi
mov DWORD PTR 4[edx],esi
pop ebx
pop edi
pop esi
pop ebp
ret
_RC5_32_encrypt ENDP
ALIGN 16
_RC5_32_decrypt PROC PUBLIC
$L_RC5_32_decrypt_begin::
;
push ebp
push esi
push edi
mov edx,DWORD PTR 16[esp]
mov ebp,DWORD PTR 20[esp]
; Load the 2 words
mov edi,DWORD PTR [edx]
mov esi,DWORD PTR 4[edx]
push ebx
mov ebx,DWORD PTR [ebp]
cmp ebx,12
je $L001rc5_dec_12
cmp ebx,8
je $L002rc5_dec_8
mov eax,DWORD PTR 136[ebp]
sub esi,eax
mov ecx,edi
ror esi,cl
xor esi,edi
mov eax,DWORD PTR 132[ebp]
sub edi,eax
mov ecx,esi
ror edi,cl
xor edi,esi
mov eax,DWORD PTR 128[ebp]
sub esi,eax
mov ecx,edi
ror esi,cl
xor esi,edi
mov eax,DWORD PTR 124[ebp]
sub edi,eax
mov ecx,esi
ror edi,cl
xor edi,esi
mov eax,DWORD PTR 120[ebp]
sub esi,eax
mov ecx,edi
ror esi,cl
xor esi,edi
mov eax,DWORD PTR 116[ebp]
sub edi,eax
mov ecx,esi
ror edi,cl
xor edi,esi
mov eax,DWORD PTR 112[ebp]
sub esi,eax
mov ecx,edi
ror esi,cl
xor esi,edi
mov eax,DWORD PTR 108[ebp]
sub edi,eax
mov ecx,esi
ror edi,cl
xor edi,esi
$L001rc5_dec_12:
mov eax,DWORD PTR 104[ebp]
sub esi,eax
mov ecx,edi
ror esi,cl
xor esi,edi
mov eax,DWORD PTR 100[ebp]
sub edi,eax
mov ecx,esi
ror edi,cl
xor edi,esi
mov eax,DWORD PTR 96[ebp]
sub esi,eax
mov ecx,edi
ror esi,cl
xor esi,edi
mov eax,DWORD PTR 92[ebp]
sub edi,eax
mov ecx,esi
ror edi,cl
xor edi,esi
mov eax,DWORD PTR 88[ebp]
sub esi,eax
mov ecx,edi
ror esi,cl
xor esi,edi
mov eax,DWORD PTR 84[ebp]
sub edi,eax
mov ecx,esi
ror edi,cl
xor edi,esi
mov eax,DWORD PTR 80[ebp]
sub esi,eax
mov ecx,edi
ror esi,cl
xor esi,edi
mov eax,DWORD PTR 76[ebp]
sub edi,eax
mov ecx,esi
ror edi,cl
xor edi,esi
$L002rc5_dec_8:
mov eax,DWORD PTR 72[ebp]
sub esi,eax
mov ecx,edi
ror esi,cl
xor esi,edi
mov eax,DWORD PTR 68[ebp]
sub edi,eax
mov ecx,esi
ror edi,cl
xor edi,esi
mov eax,DWORD PTR 64[ebp]
sub esi,eax
mov ecx,edi
ror esi,cl
xor esi,edi
mov eax,DWORD PTR 60[ebp]
sub edi,eax
mov ecx,esi
ror edi,cl
xor edi,esi
mov eax,DWORD PTR 56[ebp]
sub esi,eax
mov ecx,edi
ror esi,cl
xor esi,edi
mov eax,DWORD PTR 52[ebp]
sub edi,eax
mov ecx,esi
ror edi,cl
xor edi,esi
mov eax,DWORD PTR 48[ebp]
sub esi,eax
mov ecx,edi
ror esi,cl
xor esi,edi
mov eax,DWORD PTR 44[ebp]
sub edi,eax
mov ecx,esi
ror edi,cl
xor edi,esi
mov eax,DWORD PTR 40[ebp]
sub esi,eax
mov ecx,edi
ror esi,cl
xor esi,edi
mov eax,DWORD PTR 36[ebp]
sub edi,eax
mov ecx,esi
ror edi,cl
xor edi,esi
mov eax,DWORD PTR 32[ebp]
sub esi,eax
mov ecx,edi
ror esi,cl
xor esi,edi
mov eax,DWORD PTR 28[ebp]
sub edi,eax
mov ecx,esi
ror edi,cl
xor edi,esi
mov eax,DWORD PTR 24[ebp]
sub esi,eax
mov ecx,edi
ror esi,cl
xor esi,edi
mov eax,DWORD PTR 20[ebp]
sub edi,eax
mov ecx,esi
ror edi,cl
xor edi,esi
mov eax,DWORD PTR 16[ebp]
sub esi,eax
mov ecx,edi
ror esi,cl
xor esi,edi
mov eax,DWORD PTR 12[ebp]
sub edi,eax
mov ecx,esi
ror edi,cl
xor edi,esi
sub esi,DWORD PTR 8[ebp]
sub edi,DWORD PTR 4[ebp]
$L003rc5_exit:
mov DWORD PTR [edx],edi
mov DWORD PTR 4[edx],esi
pop ebx
pop edi
pop esi
pop ebp
ret
_RC5_32_decrypt ENDP
ALIGN 16
_RC5_32_cbc_encrypt PROC PUBLIC
$L_RC5_32_cbc_encrypt_begin::
;
push ebp
push ebx
push esi
push edi
mov ebp,DWORD PTR 28[esp]
; getting iv ptr from parameter 4
mov ebx,DWORD PTR 36[esp]
mov esi,DWORD PTR [ebx]
mov edi,DWORD PTR 4[ebx]
push edi
push esi
push edi
push esi
mov ebx,esp
mov esi,DWORD PTR 36[esp]
mov edi,DWORD PTR 40[esp]
; getting encrypt flag from parameter 5
mov ecx,DWORD PTR 56[esp]
; get and push parameter 3
mov eax,DWORD PTR 48[esp]
push eax
push ebx
cmp ecx,0
jz $L004decrypt
and ebp,4294967288
mov eax,DWORD PTR 8[esp]
mov ebx,DWORD PTR 12[esp]
jz $L005encrypt_finish
$L006encrypt_loop:
mov ecx,DWORD PTR [esi]
mov edx,DWORD PTR 4[esi]
xor eax,ecx
xor ebx,edx
mov DWORD PTR 8[esp],eax
mov DWORD PTR 12[esp],ebx
call $L_RC5_32_encrypt_begin
mov eax,DWORD PTR 8[esp]
mov ebx,DWORD PTR 12[esp]
mov DWORD PTR [edi],eax
mov DWORD PTR 4[edi],ebx
add esi,8
add edi,8
sub ebp,8
jnz $L006encrypt_loop
$L005encrypt_finish:
mov ebp,DWORD PTR 52[esp]
and ebp,7
jz $L007finish
call $L008PIC_point
$L008PIC_point:
pop edx
lea ecx,DWORD PTR ($L009cbc_enc_jmp_table-$L008PIC_point)[edx]
mov ebp,DWORD PTR [ebp*4+ecx]
add ebp,edx
xor ecx,ecx
xor edx,edx
jmp ebp
$L010ej7:
mov dh,BYTE PTR 6[esi]
shl edx,8
$L011ej6:
mov dh,BYTE PTR 5[esi]
$L012ej5:
mov dl,BYTE PTR 4[esi]
$L013ej4:
mov ecx,DWORD PTR [esi]
jmp $L014ejend
$L015ej3:
mov ch,BYTE PTR 2[esi]
shl ecx,8
$L016ej2:
mov ch,BYTE PTR 1[esi]
$L017ej1:
mov cl,BYTE PTR [esi]
$L014ejend:
xor eax,ecx
xor ebx,edx
mov DWORD PTR 8[esp],eax
mov DWORD PTR 12[esp],ebx
call $L_RC5_32_encrypt_begin
mov eax,DWORD PTR 8[esp]
mov ebx,DWORD PTR 12[esp]
mov DWORD PTR [edi],eax
mov DWORD PTR 4[edi],ebx
jmp $L007finish
$L004decrypt:
and ebp,4294967288
mov eax,DWORD PTR 16[esp]
mov ebx,DWORD PTR 20[esp]
jz $L018decrypt_finish
$L019decrypt_loop:
mov eax,DWORD PTR [esi]
mov ebx,DWORD PTR 4[esi]
mov DWORD PTR 8[esp],eax
mov DWORD PTR 12[esp],ebx
call $L_RC5_32_decrypt_begin
mov eax,DWORD PTR 8[esp]
mov ebx,DWORD PTR 12[esp]
mov ecx,DWORD PTR 16[esp]
mov edx,DWORD PTR 20[esp]
xor ecx,eax
xor edx,ebx
mov eax,DWORD PTR [esi]
mov ebx,DWORD PTR 4[esi]
mov DWORD PTR [edi],ecx
mov DWORD PTR 4[edi],edx
mov DWORD PTR 16[esp],eax
mov DWORD PTR 20[esp],ebx
add esi,8
add edi,8
sub ebp,8
jnz $L019decrypt_loop
$L018decrypt_finish:
mov ebp,DWORD PTR 52[esp]
and ebp,7
jz $L007finish
mov eax,DWORD PTR [esi]
mov ebx,DWORD PTR 4[esi]
mov DWORD PTR 8[esp],eax
mov DWORD PTR 12[esp],ebx
call $L_RC5_32_decrypt_begin
mov eax,DWORD PTR 8[esp]
mov ebx,DWORD PTR 12[esp]
mov ecx,DWORD PTR 16[esp]
mov edx,DWORD PTR 20[esp]
xor ecx,eax
xor edx,ebx
mov eax,DWORD PTR [esi]
mov ebx,DWORD PTR 4[esi]
$L020dj7:
ror edx,16
mov BYTE PTR 6[edi],dl
shr edx,16
$L021dj6:
mov BYTE PTR 5[edi],dh
$L022dj5:
mov BYTE PTR 4[edi],dl
$L023dj4:
mov DWORD PTR [edi],ecx
jmp $L024djend
$L025dj3:
ror ecx,16
mov BYTE PTR 2[edi],cl
shl ecx,16
$L026dj2:
mov BYTE PTR 1[esi],ch
$L027dj1:
mov BYTE PTR [esi],cl
$L024djend:
jmp $L007finish
$L007finish:
mov ecx,DWORD PTR 60[esp]
add esp,24
mov DWORD PTR [ecx],eax
mov DWORD PTR 4[ecx],ebx
pop edi
pop esi
pop ebx
pop ebp
ret
ALIGN 64
$L009cbc_enc_jmp_table:
DD 0
DD $L017ej1-$L008PIC_point
DD $L016ej2-$L008PIC_point
DD $L015ej3-$L008PIC_point
DD $L013ej4-$L008PIC_point
DD $L012ej5-$L008PIC_point
DD $L011ej6-$L008PIC_point
DD $L010ej7-$L008PIC_point
ALIGN 64
_RC5_32_cbc_encrypt ENDP
.text$ ENDS
END

1976
deps/openssl/asm/x86-win32-masm/ripemd/rmd-586.asm

File diff suppressed because it is too large

1452
deps/openssl/asm/x86-win32-masm/sha/sha1-586.asm

File diff suppressed because it is too large

271
deps/openssl/asm/x86-win32-masm/sha/sha256-586.asm

@ -0,0 +1,271 @@
TITLE sha512-586.asm
IF @Version LT 800
ECHO MASM version 8.00 or later is strongly recommended.
ENDIF
.586
.MODEL FLAT
OPTION DOTNAME
IF @Version LT 800
.text$ SEGMENT PAGE 'CODE'
ELSE
.text$ SEGMENT ALIGN(64) 'CODE'
ENDIF
ALIGN 16
_sha256_block_data_order PROC PUBLIC
$L_sha256_block_data_order_begin::
push ebp
push ebx
push esi
push edi
mov esi,DWORD PTR 20[esp]
mov edi,DWORD PTR 24[esp]
mov eax,DWORD PTR 28[esp]
mov ebx,esp
call $L000pic_point
$L000pic_point:
pop ebp
lea ebp,DWORD PTR ($L001K256-$L000pic_point)[ebp]
sub esp,16
and esp,-64
shl eax,6
add eax,edi
mov DWORD PTR [esp],esi
mov DWORD PTR 4[esp],edi
mov DWORD PTR 8[esp],eax
mov DWORD PTR 12[esp],ebx
ALIGN 16
$L002loop:
mov eax,DWORD PTR [edi]
mov ebx,DWORD PTR 4[edi]
mov ecx,DWORD PTR 8[edi]
mov edx,DWORD PTR 12[edi]
bswap eax
bswap ebx
bswap ecx
bswap edx
push eax
push ebx
push ecx
push edx
mov eax,DWORD PTR 16[edi]
mov ebx,DWORD PTR 20[edi]
mov ecx,DWORD PTR 24[edi]
mov edx,DWORD PTR 28[edi]
bswap eax
bswap ebx
bswap ecx
bswap edx
push eax
push ebx
push ecx
push edx
mov eax,DWORD PTR 32[edi]
mov ebx,DWORD PTR 36[edi]
mov ecx,DWORD PTR 40[edi]
mov edx,DWORD PTR 44[edi]
bswap eax
bswap ebx
bswap ecx
bswap edx
push eax
push ebx
push ecx
push edx
mov eax,DWORD PTR 48[edi]
mov ebx,DWORD PTR 52[edi]
mov ecx,DWORD PTR 56[edi]
mov edx,DWORD PTR 60[edi]
bswap eax
bswap ebx
bswap ecx
bswap edx
push eax
push ebx
push ecx
push edx
add edi,64
sub esp,32
mov DWORD PTR 100[esp],edi
mov eax,DWORD PTR [esi]
mov ebx,DWORD PTR 4[esi]
mov ecx,DWORD PTR 8[esi]
mov edi,DWORD PTR 12[esi]
mov DWORD PTR 4[esp],ebx
mov DWORD PTR 8[esp],ecx
mov DWORD PTR 12[esp],edi
mov edx,DWORD PTR 16[esi]
mov ebx,DWORD PTR 20[esi]
mov ecx,DWORD PTR 24[esi]
mov edi,DWORD PTR 28[esi]
mov DWORD PTR 20[esp],ebx
mov DWORD PTR 24[esp],ecx
mov DWORD PTR 28[esp],edi
ALIGN 16
$L00300_15:
mov ebx,DWORD PTR 92[esp]
mov ecx,edx
ror ecx,6
mov edi,edx
ror edi,11
mov esi,DWORD PTR 20[esp]
xor ecx,edi
ror edi,14
xor ecx,edi
mov edi,DWORD PTR 24[esp]
add ebx,ecx
mov DWORD PTR 16[esp],edx
xor esi,edi
mov ecx,eax
and esi,edx
mov edx,DWORD PTR 12[esp]
xor esi,edi
mov edi,eax
add ebx,esi
ror ecx,2
add ebx,DWORD PTR 28[esp]
ror edi,13
mov esi,DWORD PTR 4[esp]
xor ecx,edi
ror edi,9
add edx,ebx
xor ecx,edi
mov edi,DWORD PTR 8[esp]
add ebx,ecx
mov DWORD PTR [esp],eax
mov ecx,eax
sub esp,4
or eax,esi
and ecx,esi
and eax,edi
mov esi,DWORD PTR [ebp]
or eax,ecx
add ebp,4
add eax,ebx
add edx,esi
add eax,esi
cmp esi,3248222580
jne $L00300_15
mov ebx,DWORD PTR 152[esp]
ALIGN 16
$L00416_63:
mov esi,ebx
mov ecx,DWORD PTR 100[esp]
shr ebx,3
ror esi,7
xor ebx,esi
ror esi,11
mov edi,ecx
xor ebx,esi
shr ecx,10
mov esi,DWORD PTR 156[esp]
ror edi,17
xor ecx,edi
ror edi,2
add ebx,esi
xor edi,ecx
add ebx,edi
mov ecx,edx
add ebx,DWORD PTR 120[esp]
ror ecx,6
mov edi,edx
ror edi,11
mov esi,DWORD PTR 20[esp]
xor ecx,edi
ror edi,14
mov DWORD PTR 92[esp],ebx
xor ecx,edi
mov edi,DWORD PTR 24[esp]
add ebx,ecx
mov DWORD PTR 16[esp],edx
xor esi,edi
mov ecx,eax
and esi,edx
mov edx,DWORD PTR 12[esp]
xor esi,edi
mov edi,eax
add ebx,esi
ror ecx,2
add ebx,DWORD PTR 28[esp]
ror edi,13
mov esi,DWORD PTR 4[esp]
xor ecx,edi
ror edi,9
add edx,ebx
xor ecx,edi
mov edi,DWORD PTR 8[esp]
add ebx,ecx
mov DWORD PTR [esp],eax
mov ecx,eax
sub esp,4
or eax,esi
and ecx,esi
and eax,edi
mov esi,DWORD PTR [ebp]
or eax,ecx
add ebp,4
add eax,ebx
mov ebx,DWORD PTR 152[esp]
add edx,esi
add eax,esi
cmp esi,3329325298
jne $L00416_63
mov esi,DWORD PTR 352[esp]
mov ebx,DWORD PTR 4[esp]
mov ecx,DWORD PTR 8[esp]
mov edi,DWORD PTR 12[esp]
add eax,DWORD PTR [esi]
add ebx,DWORD PTR 4[esi]
add ecx,DWORD PTR 8[esi]
add edi,DWORD PTR 12[esi]
mov DWORD PTR [esi],eax
mov DWORD PTR 4[esi],ebx
mov DWORD PTR 8[esi],ecx
mov DWORD PTR 12[esi],edi
mov eax,DWORD PTR 20[esp]
mov ebx,DWORD PTR 24[esp]
mov ecx,DWORD PTR 28[esp]
mov edi,DWORD PTR 356[esp]
add edx,DWORD PTR 16[esi]
add eax,DWORD PTR 20[esi]
add ebx,DWORD PTR 24[esi]
add ecx,DWORD PTR 28[esi]
mov DWORD PTR 16[esi],edx
mov DWORD PTR 20[esi],eax
mov DWORD PTR 24[esi],ebx
mov DWORD PTR 28[esi],ecx
add esp,352
sub ebp,256
cmp edi,DWORD PTR 8[esp]
jb $L002loop
mov esp,DWORD PTR 12[esp]
pop edi
pop esi
pop ebx
pop ebp
ret
ALIGN 64
$L001K256:
DD 1116352408,1899447441,3049323471,3921009573
DD 961987163,1508970993,2453635748,2870763221
DD 3624381080,310598401,607225278,1426881987
DD 1925078388,2162078206,2614888103,3248222580
DD 3835390401,4022224774,264347078,604807628
DD 770255983,1249150122,1555081692,1996064986
DD 2554220882,2821834349,2952996808,3210313671
DD 3336571891,3584528711,113926993,338241895
DD 666307205,773529912,1294757372,1396182291
DD 1695183700,1986661051,2177026350,2456956037
DD 2730485921,2820302411,3259730800,3345764771
DD 3516065817,3600352804,4094571909,275423344
DD 430227734,506948616,659060556,883997877
DD 958139571,1322822218,1537002063,1747873779
DD 1955562222,2024104815,2227730452,2361852424
DD 2428436474,2756734187,3204031479,3329325298
_sha256_block_data_order ENDP
DB 83,72,65,50,53,54,32,98,108,111,99,107,32,116,114,97
DB 110,115,102,111,114,109,32,102,111,114,32,120,56,54,44,32
DB 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
DB 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
DB 62,0
.text$ ENDS
END

573
deps/openssl/asm/x86-win32-masm/sha/sha512-586.asm

@ -0,0 +1,573 @@
TITLE sha512-586.asm
IF @Version LT 800
ECHO MASM version 8.00 or later is strongly recommended.
ENDIF
.586
.MODEL FLAT
OPTION DOTNAME
IF @Version LT 800
.text$ SEGMENT PAGE 'CODE'
ELSE
.text$ SEGMENT ALIGN(64) 'CODE'
ENDIF
ALIGN 16
_sha512_block_data_order PROC PUBLIC
$L_sha512_block_data_order_begin::
push ebp
push ebx
push esi
push edi
mov esi,DWORD PTR 20[esp]
mov edi,DWORD PTR 24[esp]
mov eax,DWORD PTR 28[esp]
mov ebx,esp
call $L000pic_point
$L000pic_point:
pop ebp
lea ebp,DWORD PTR ($L001K512-$L000pic_point)[ebp]
sub esp,16
and esp,-64
shl eax,7
add eax,edi
mov DWORD PTR [esp],esi
mov DWORD PTR 4[esp],edi
mov DWORD PTR 8[esp],eax
mov DWORD PTR 12[esp],ebx
ALIGN 16
$L002loop_x86:
mov eax,DWORD PTR [edi]
mov ebx,DWORD PTR 4[edi]
mov ecx,DWORD PTR 8[edi]
mov edx,DWORD PTR 12[edi]
bswap eax
bswap ebx
bswap ecx
bswap edx
push eax
push ebx
push ecx
push edx
mov eax,DWORD PTR 16[edi]
mov ebx,DWORD PTR 20[edi]
mov ecx,DWORD PTR 24[edi]
mov edx,DWORD PTR 28[edi]
bswap eax
bswap ebx
bswap ecx
bswap edx
push eax
push ebx
push ecx
push edx
mov eax,DWORD PTR 32[edi]
mov ebx,DWORD PTR 36[edi]
mov ecx,DWORD PTR 40[edi]
mov edx,DWORD PTR 44[edi]
bswap eax
bswap ebx
bswap ecx
bswap edx
push eax
push ebx
push ecx
push edx
mov eax,DWORD PTR 48[edi]
mov ebx,DWORD PTR 52[edi]
mov ecx,DWORD PTR 56[edi]
mov edx,DWORD PTR 60[edi]
bswap eax
bswap ebx
bswap ecx
bswap edx
push eax
push ebx
push ecx
push edx
mov eax,DWORD PTR 64[edi]
mov ebx,DWORD PTR 68[edi]
mov ecx,DWORD PTR 72[edi]
mov edx,DWORD PTR 76[edi]
bswap eax
bswap ebx
bswap ecx
bswap edx
push eax
push ebx
push ecx
push edx
mov eax,DWORD PTR 80[edi]
mov ebx,DWORD PTR 84[edi]
mov ecx,DWORD PTR 88[edi]
mov edx,DWORD PTR 92[edi]
bswap eax
bswap ebx
bswap ecx
bswap edx
push eax
push ebx
push ecx
push edx
mov eax,DWORD PTR 96[edi]
mov ebx,DWORD PTR 100[edi]
mov ecx,DWORD PTR 104[edi]
mov edx,DWORD PTR 108[edi]
bswap eax
bswap ebx
bswap ecx
bswap edx
push eax
push ebx
push ecx
push edx
mov eax,DWORD PTR 112[edi]
mov ebx,DWORD PTR 116[edi]
mov ecx,DWORD PTR 120[edi]
mov edx,DWORD PTR 124[edi]
bswap eax
bswap ebx
bswap ecx
bswap edx
push eax
push ebx
push ecx
push edx
add edi,128
sub esp,72
mov DWORD PTR 204[esp],edi
lea edi,DWORD PTR 8[esp]
mov ecx,16
DD 2784229001
ALIGN 16
$L00300_15_x86:
mov ecx,DWORD PTR 40[esp]
mov edx,DWORD PTR 44[esp]
mov esi,ecx
shr ecx,9
mov edi,edx
shr edx,9
mov ebx,ecx
shl esi,14
mov eax,edx
shl edi,14
xor ebx,esi
shr ecx,5
xor eax,edi
shr edx,5
xor eax,ecx
shl esi,4
xor ebx,edx
shl edi,4
xor ebx,esi
shr ecx,4
xor eax,edi
shr edx,4
xor eax,ecx
shl esi,5
xor ebx,edx
shl edi,5
xor eax,esi
xor ebx,edi
mov ecx,DWORD PTR 48[esp]
mov edx,DWORD PTR 52[esp]
mov esi,DWORD PTR 56[esp]
mov edi,DWORD PTR 60[esp]
add eax,DWORD PTR 64[esp]
adc ebx,DWORD PTR 68[esp]
xor ecx,esi
xor edx,edi
and ecx,DWORD PTR 40[esp]
and edx,DWORD PTR 44[esp]
add eax,DWORD PTR 192[esp]
adc ebx,DWORD PTR 196[esp]
xor ecx,esi
xor edx,edi
mov esi,DWORD PTR [ebp]
mov edi,DWORD PTR 4[ebp]
add eax,ecx
adc ebx,edx
mov ecx,DWORD PTR 32[esp]
mov edx,DWORD PTR 36[esp]
add eax,esi
adc ebx,edi
mov DWORD PTR [esp],eax
mov DWORD PTR 4[esp],ebx
add eax,ecx
adc ebx,edx
mov ecx,DWORD PTR 8[esp]
mov edx,DWORD PTR 12[esp]
mov DWORD PTR 32[esp],eax
mov DWORD PTR 36[esp],ebx
mov esi,ecx
shr ecx,2
mov edi,edx
shr edx,2
mov ebx,ecx
shl esi,4
mov eax,edx
shl edi,4
xor ebx,esi
shr ecx,5
xor eax,edi
shr edx,5
xor ebx,ecx
shl esi,21
xor eax,edx
shl edi,21
xor eax,esi
shr ecx,21
xor ebx,edi
shr edx,21
xor eax,ecx
shl esi,5
xor ebx,edx
shl edi,5
xor eax,esi
xor ebx,edi
mov ecx,DWORD PTR 8[esp]
mov edx,DWORD PTR 12[esp]
mov esi,DWORD PTR 16[esp]
mov edi,DWORD PTR 20[esp]
add eax,DWORD PTR [esp]
adc ebx,DWORD PTR 4[esp]
or ecx,esi
or edx,edi
and ecx,DWORD PTR 24[esp]
and edx,DWORD PTR 28[esp]
and esi,DWORD PTR 8[esp]
and edi,DWORD PTR 12[esp]
or ecx,esi
or edx,edi
add eax,ecx
adc ebx,edx
mov DWORD PTR [esp],eax
mov DWORD PTR 4[esp],ebx
mov dl,BYTE PTR [ebp]
sub esp,8
lea ebp,DWORD PTR 8[ebp]
cmp dl,148
jne $L00300_15_x86
ALIGN 16
$L00416_79_x86:
mov ecx,DWORD PTR 312[esp]
mov edx,DWORD PTR 316[esp]
mov esi,ecx
shr ecx,1
mov edi,edx
shr edx,1
mov eax,ecx
shl esi,24
mov ebx,edx
shl edi,24
xor ebx,esi
shr ecx,6
xor eax,edi
shr edx,6
xor eax,ecx
shl esi,7
xor ebx,edx
shl edi,1
xor ebx,esi
shr ecx,1
xor eax,edi
shr edx,1
xor eax,ecx
shl edi,6
xor ebx,edx
xor eax,edi
mov DWORD PTR [esp],eax
mov DWORD PTR 4[esp],ebx
mov ecx,DWORD PTR 208[esp]
mov edx,DWORD PTR 212[esp]
mov esi,ecx
shr ecx,6
mov edi,edx
shr edx,6
mov eax,ecx
shl esi,3
mov ebx,edx
shl edi,3
xor eax,esi
shr ecx,13
xor ebx,edi
shr edx,13
xor eax,ecx
shl esi,10
xor ebx,edx
shl edi,10
xor ebx,esi
shr ecx,10
xor eax,edi
shr edx,10
xor ebx,ecx
shl edi,13
xor eax,edx
xor eax,edi
mov ecx,DWORD PTR 320[esp]
mov edx,DWORD PTR 324[esp]
add eax,DWORD PTR [esp]
adc ebx,DWORD PTR 4[esp]
mov esi,DWORD PTR 248[esp]
mov edi,DWORD PTR 252[esp]
add eax,ecx
adc ebx,edx
add eax,esi
adc ebx,edi
mov DWORD PTR 192[esp],eax
mov DWORD PTR 196[esp],ebx
mov ecx,DWORD PTR 40[esp]
mov edx,DWORD PTR 44[esp]
mov esi,ecx
shr ecx,9
mov edi,edx
shr edx,9
mov ebx,ecx
shl esi,14
mov eax,edx
shl edi,14
xor ebx,esi
shr ecx,5
xor eax,edi
shr edx,5
xor eax,ecx
shl esi,4
xor ebx,edx
shl edi,4
xor ebx,esi
shr ecx,4
xor eax,edi
shr edx,4
xor eax,ecx
shl esi,5
xor ebx,edx
shl edi,5
xor eax,esi
xor ebx,edi
mov ecx,DWORD PTR 48[esp]
mov edx,DWORD PTR 52[esp]
mov esi,DWORD PTR 56[esp]
mov edi,DWORD PTR 60[esp]
add eax,DWORD PTR 64[esp]
adc ebx,DWORD PTR 68[esp]
xor ecx,esi
xor edx,edi
and ecx,DWORD PTR 40[esp]
and edx,DWORD PTR 44[esp]
add eax,DWORD PTR 192[esp]
adc ebx,DWORD PTR 196[esp]
xor ecx,esi
xor edx,edi
mov esi,DWORD PTR [ebp]
mov edi,DWORD PTR 4[ebp]
add eax,ecx
adc ebx,edx
mov ecx,DWORD PTR 32[esp]
mov edx,DWORD PTR 36[esp]
add eax,esi
adc ebx,edi
mov DWORD PTR [esp],eax
mov DWORD PTR 4[esp],ebx
add eax,ecx
adc ebx,edx
mov ecx,DWORD PTR 8[esp]
mov edx,DWORD PTR 12[esp]
mov DWORD PTR 32[esp],eax
mov DWORD PTR 36[esp],ebx
mov esi,ecx
shr ecx,2
mov edi,edx
shr edx,2
mov ebx,ecx
shl esi,4
mov eax,edx
shl edi,4
xor ebx,esi
shr ecx,5
xor eax,edi
shr edx,5
xor ebx,ecx
shl esi,21
xor eax,edx
shl edi,21
xor eax,esi
shr ecx,21
xor ebx,edi
shr edx,21
xor eax,ecx
shl esi,5
xor ebx,edx
shl edi,5
xor eax,esi
xor ebx,edi
mov ecx,DWORD PTR 8[esp]
mov edx,DWORD PTR 12[esp]
mov esi,DWORD PTR 16[esp]
mov edi,DWORD PTR 20[esp]
add eax,DWORD PTR [esp]
adc ebx,DWORD PTR 4[esp]
or ecx,esi
or edx,edi
and ecx,DWORD PTR 24[esp]
and edx,DWORD PTR 28[esp]
and esi,DWORD PTR 8[esp]
and edi,DWORD PTR 12[esp]
or ecx,esi
or edx,edi
add eax,ecx
adc ebx,edx
mov DWORD PTR [esp],eax
mov DWORD PTR 4[esp],ebx
mov dl,BYTE PTR [ebp]
sub esp,8
lea ebp,DWORD PTR 8[ebp]
cmp dl,23
jne $L00416_79_x86
mov esi,DWORD PTR 840[esp]
mov edi,DWORD PTR 844[esp]
mov eax,DWORD PTR [esi]
mov ebx,DWORD PTR 4[esi]
mov ecx,DWORD PTR 8[esi]
mov edx,DWORD PTR 12[esi]
add eax,DWORD PTR 8[esp]
adc ebx,DWORD PTR 12[esp]
mov DWORD PTR [esi],eax
mov DWORD PTR 4[esi],ebx
add ecx,DWORD PTR 16[esp]
adc edx,DWORD PTR 20[esp]
mov DWORD PTR 8[esi],ecx
mov DWORD PTR 12[esi],edx
mov eax,DWORD PTR 16[esi]
mov ebx,DWORD PTR 20[esi]
mov ecx,DWORD PTR 24[esi]
mov edx,DWORD PTR 28[esi]
add eax,DWORD PTR 24[esp]
adc ebx,DWORD PTR 28[esp]
mov DWORD PTR 16[esi],eax
mov DWORD PTR 20[esi],ebx
add ecx,DWORD PTR 32[esp]
adc edx,DWORD PTR 36[esp]
mov DWORD PTR 24[esi],ecx
mov DWORD PTR 28[esi],edx
mov eax,DWORD PTR 32[esi]
mov ebx,DWORD PTR 36[esi]
mov ecx,DWORD PTR 40[esi]
mov edx,DWORD PTR 44[esi]
add eax,DWORD PTR 40[esp]
adc ebx,DWORD PTR 44[esp]
mov DWORD PTR 32[esi],eax
mov DWORD PTR 36[esi],ebx
add ecx,DWORD PTR 48[esp]
adc edx,DWORD PTR 52[esp]
mov DWORD PTR 40[esi],ecx
mov DWORD PTR 44[esi],edx
mov eax,DWORD PTR 48[esi]
mov ebx,DWORD PTR 52[esi]
mov ecx,DWORD PTR 56[esi]
mov edx,DWORD PTR 60[esi]
add eax,DWORD PTR 56[esp]
adc ebx,DWORD PTR 60[esp]
mov DWORD PTR 48[esi],eax
mov DWORD PTR 52[esi],ebx
add ecx,DWORD PTR 64[esp]
adc edx,DWORD PTR 68[esp]
mov DWORD PTR 56[esi],ecx
mov DWORD PTR 60[esi],edx
add esp,840
sub ebp,640
cmp edi,DWORD PTR 8[esp]
jb $L002loop_x86
mov esp,DWORD PTR 12[esp]
pop edi
pop esi
pop ebx
pop ebp
ret
ALIGN 64
$L001K512:
DD 3609767458,1116352408
DD 602891725,1899447441
DD 3964484399,3049323471
DD 2173295548,3921009573
DD 4081628472,961987163
DD 3053834265,1508970993
DD 2937671579,2453635748
DD 3664609560,2870763221
DD 2734883394,3624381080
DD 1164996542,310598401
DD 1323610764,607225278
DD 3590304994,1426881987
DD 4068182383,1925078388
DD 991336113,2162078206
DD 633803317,2614888103
DD 3479774868,3248222580
DD 2666613458,3835390401
DD 944711139,4022224774
DD 2341262773,264347078
DD 2007800933,604807628
DD 1495990901,770255983
DD 1856431235,1249150122
DD 3175218132,1555081692
DD 2198950837,1996064986
DD 3999719339,2554220882
DD 766784016,2821834349
DD 2566594879,2952996808
DD 3203337956,3210313671
DD 1034457026,3336571891
DD 2466948901,3584528711
DD 3758326383,113926993
DD 168717936,338241895
DD 1188179964,666307205
DD 1546045734,773529912
DD 1522805485,1294757372
DD 2643833823,1396182291
DD 2343527390,1695183700
DD 1014477480,1986661051
DD 1206759142,2177026350
DD 344077627,2456956037
DD 1290863460,2730485921
DD 3158454273,2820302411
DD 3505952657,3259730800
DD 106217008,3345764771
DD 3606008344,3516065817
DD 1432725776,3600352804
DD 1467031594,4094571909
DD 851169720,275423344
DD 3100823752,430227734
DD 1363258195,506948616
DD 3750685593,659060556
DD 3785050280,883997877
DD 3318307427,958139571
DD 3812723403,1322822218
DD 2003034995,1537002063
DD 3602036899,1747873779
DD 1575990012,1955562222
DD 1125592928,2024104815
DD 2716904306,2227730452
DD 442776044,2361852424
DD 593698344,2428436474
DD 3733110249,2756734187
DD 2999351573,3204031479
DD 3815920427,3329325298
DD 3928383900,3391569614
DD 566280711,3515267271
DD 3454069534,3940187606
DD 4000239992,4118630271
DD 1914138554,116418474
DD 2731055270,174292421
DD 3203993006,289380356
DD 320620315,460393269
DD 587496836,685471733
DD 1086792851,852142971
DD 365543100,1017036298
DD 2618297676,1126000580
DD 3409855158,1288033470
DD 4234509866,1501505948
DD 987167468,1607167915
DD 1246189591,1816402316
_sha512_block_data_order ENDP
DB 83,72,65,53,49,50,32,98,108,111,99,107,32,116,114,97
DB 110,115,102,111,114,109,32,102,111,114,32,120,56,54,44,32
DB 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
DB 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
DB 62,0
.text$ ENDS
END

1122
deps/openssl/asm/x86-win32-masm/whrlpool/wp-mmx.asm

File diff suppressed because it is too large

277
deps/openssl/asm/x86-win32-masm/x86cpuid.asm

@ -0,0 +1,277 @@
TITLE x86cpuid.asm
IF @Version LT 800
ECHO MASM version 8.00 or later is strongly recommended.
ENDIF
.586
.MODEL FLAT
OPTION DOTNAME
IF @Version LT 800
.text$ SEGMENT PAGE 'CODE'
ELSE
.text$ SEGMENT ALIGN(64) 'CODE'
ENDIF
ALIGN 16
_OPENSSL_ia32_cpuid PROC PUBLIC
$L_OPENSSL_ia32_cpuid_begin::
push ebp
push ebx
push esi
push edi
xor edx,edx
pushfd
pop eax
mov ecx,eax
xor eax,2097152
push eax
popfd
pushfd
pop eax
xor ecx,eax
bt ecx,21
jnc $L000done
xor eax,eax
cpuid
mov edi,eax
xor eax,eax
cmp ebx,1970169159
setne al
mov ebp,eax
cmp edx,1231384169
setne al
or ebp,eax
cmp ecx,1818588270
setne al
or ebp,eax
jz $L001intel
cmp ebx,1752462657
setne al
mov esi,eax
cmp edx,1769238117
setne al
or esi,eax
cmp ecx,1145913699
setne al
or esi,eax
jnz $L001intel
mov eax,2147483648
cpuid
cmp eax,2147483656
jb $L001intel
mov eax,2147483656
cpuid
movzx esi,cl
inc esi
mov eax,1
cpuid
bt edx,28
jnc $L000done
shr ebx,16
and ebx,255
cmp ebx,esi
ja $L000done
and edx,4026531839
jmp $L000done
$L001intel:
cmp edi,4
mov edi,-1
jb $L002nocacheinfo
mov eax,4
mov ecx,0
cpuid
mov edi,eax
shr edi,14
and edi,4095
$L002nocacheinfo:
mov eax,1
cpuid
cmp ebp,0
jne $L003notP4
and ah,15
cmp ah,15
jne $L003notP4
or edx,1048576
$L003notP4:
bt edx,28
jnc $L000done
and edx,4026531839
cmp edi,0
je $L000done
or edx,268435456
shr ebx,16
cmp bl,1
ja $L000done
and edx,4026531839
$L000done:
mov eax,edx
mov edx,ecx
pop edi
pop esi
pop ebx
pop ebp
ret
_OPENSSL_ia32_cpuid ENDP
;EXTERN _OPENSSL_ia32cap_P:NEAR
ALIGN 16
_OPENSSL_rdtsc PROC PUBLIC
$L_OPENSSL_rdtsc_begin::
xor eax,eax
xor edx,edx
lea ecx,DWORD PTR _OPENSSL_ia32cap_P
bt DWORD PTR [ecx],4
jnc $L004notsc
rdtsc
$L004notsc:
ret
_OPENSSL_rdtsc ENDP
ALIGN 16
_OPENSSL_instrument_halt PROC PUBLIC
$L_OPENSSL_instrument_halt_begin::
lea ecx,DWORD PTR _OPENSSL_ia32cap_P
bt DWORD PTR [ecx],4
jnc $L005nohalt
DD 2421723150
and eax,3
jnz $L005nohalt
pushfd
pop eax
bt eax,9
jnc $L005nohalt
rdtsc
push edx
push eax
hlt
rdtsc
sub eax,DWORD PTR [esp]
sbb edx,DWORD PTR 4[esp]
add esp,8
ret
$L005nohalt:
xor eax,eax
xor edx,edx
ret
_OPENSSL_instrument_halt ENDP
ALIGN 16
_OPENSSL_far_spin PROC PUBLIC
$L_OPENSSL_far_spin_begin::
pushfd
pop eax
bt eax,9
jnc $L006nospin
mov eax,DWORD PTR 4[esp]
mov ecx,DWORD PTR 8[esp]
DD 2430111262
xor eax,eax
mov edx,DWORD PTR [ecx]
jmp $L007spin
ALIGN 16
$L007spin:
inc eax
cmp edx,DWORD PTR [ecx]
je $L007spin
DD 529567888
ret
$L006nospin:
xor eax,eax
xor edx,edx
ret
_OPENSSL_far_spin ENDP
ALIGN 16
_OPENSSL_wipe_cpu PROC PUBLIC
$L_OPENSSL_wipe_cpu_begin::
xor eax,eax
xor edx,edx
lea ecx,DWORD PTR _OPENSSL_ia32cap_P
mov ecx,DWORD PTR [ecx]
bt DWORD PTR [ecx],1
jnc $L008no_x87
DD 4007259865,4007259865,4007259865,4007259865,2430851995
$L008no_x87:
lea eax,DWORD PTR 4[esp]
ret
_OPENSSL_wipe_cpu ENDP
ALIGN 16
_OPENSSL_atomic_add PROC PUBLIC
$L_OPENSSL_atomic_add_begin::
mov edx,DWORD PTR 4[esp]
mov ecx,DWORD PTR 8[esp]
push ebx
nop
mov eax,DWORD PTR [edx]
$L009spin:
lea ebx,DWORD PTR [ecx*1+eax]
nop
DD 447811568
jne $L009spin
mov eax,ebx
pop ebx
ret
_OPENSSL_atomic_add ENDP
ALIGN 16
_OPENSSL_indirect_call PROC PUBLIC
$L_OPENSSL_indirect_call_begin::
push ebp
mov ebp,esp
sub esp,28
mov ecx,DWORD PTR 12[ebp]
mov DWORD PTR [esp],ecx
mov edx,DWORD PTR 16[ebp]
mov DWORD PTR 4[esp],edx
mov eax,DWORD PTR 20[ebp]
mov DWORD PTR 8[esp],eax
mov eax,DWORD PTR 24[ebp]
mov DWORD PTR 12[esp],eax
mov eax,DWORD PTR 28[ebp]
mov DWORD PTR 16[esp],eax
mov eax,DWORD PTR 32[ebp]
mov DWORD PTR 20[esp],eax
mov eax,DWORD PTR 36[ebp]
mov DWORD PTR 24[esp],eax
call DWORD PTR 8[ebp]
mov esp,ebp
pop ebp
ret
_OPENSSL_indirect_call ENDP
ALIGN 16
_OPENSSL_cleanse PROC PUBLIC
$L_OPENSSL_cleanse_begin::
mov edx,DWORD PTR 4[esp]
mov ecx,DWORD PTR 8[esp]
xor eax,eax
cmp ecx,7
jae $L010lot
cmp ecx,0
je $L011ret
$L012little:
mov BYTE PTR [edx],al
sub ecx,1
lea edx,DWORD PTR 1[edx]
jnz $L012little
$L011ret:
ret
ALIGN 16
$L010lot:
test edx,3
jz $L013aligned
mov BYTE PTR [edx],al
lea ecx,DWORD PTR [ecx-1]
lea edx,DWORD PTR 1[edx]
jmp $L010lot
$L013aligned:
mov DWORD PTR [edx],eax
lea ecx,DWORD PTR [ecx-4]
test ecx,-4
lea edx,DWORD PTR 4[edx]
jnz $L013aligned
cmp ecx,0
jne $L012little
ret
_OPENSSL_cleanse ENDP
.text$ ENDS
.bss SEGMENT 'BSS'
COMM _OPENSSL_ia32cap_P:DWORD
.bss ENDS
.CRT$XCU SEGMENT DWORD PUBLIC 'DATA'
EXTERN _OPENSSL_cpuid_setup:NEAR
DD _OPENSSL_cpuid_setup
.CRT$XCU ENDS
END
Loading…
Cancel
Save