aboutsummaryrefslogtreecommitdiff
path: root/crypto/md5/asm
diff options
context:
space:
mode:
authorRalf S. Engelschall <rse@openssl.org>1998-12-21 10:56:39 +0000
committerRalf S. Engelschall <rse@openssl.org>1998-12-21 10:56:39 +0000
commit58964a492275ca9a59a0cd9c8155cb2491b4b909 (patch)
treec7b16876a5789463bbbb468ef4829c8129b3d718 /crypto/md5/asm
parentd02b48c63a58ea4367a0e905979f140b7d090f86 (diff)
downloadopenssl-58964a492275ca9a59a0cd9c8155cb2491b4b909.zip
openssl-58964a492275ca9a59a0cd9c8155cb2491b4b909.tar.gz
openssl-58964a492275ca9a59a0cd9c8155cb2491b4b909.tar.bz2
Import of old SSLeay release: SSLeay 0.9.0b
Diffstat (limited to 'crypto/md5/asm')
-rw-r--r--crypto/md5/asm/m5-win32.asm686
-rw-r--r--crypto/md5/asm/md5-586.pl304
-rw-r--r--crypto/md5/asm/mx86unix.cpp730
3 files changed, 1720 insertions, 0 deletions
diff --git a/crypto/md5/asm/m5-win32.asm b/crypto/md5/asm/m5-win32.asm
new file mode 100644
index 0000000..c2081da
--- /dev/null
+++ b/crypto/md5/asm/m5-win32.asm
@@ -0,0 +1,686 @@
+ ; Don't even think of reading this code
+ ; It was automatically generated by md5-586.pl
+ ; Which is a perl program used to generate the x86 assember for
+ ; any of elf, a.out, BSDI,Win32, or Solaris
+ ; eric <eay@cryptsoft.com>
+ ;
+ TITLE md5-586.asm
+ .386
+.model FLAT
+_TEXT SEGMENT
+PUBLIC _md5_block_x86
+
+_md5_block_x86 PROC NEAR
+ push esi
+ push edi
+ mov edi, DWORD PTR 12[esp]
+ mov esi, DWORD PTR 16[esp]
+ mov ecx, DWORD PTR 20[esp]
+ push ebp
+ push ebx
+ add ecx, esi
+ sub ecx, 64
+ mov eax, DWORD PTR [edi]
+ push ecx
+ mov ebx, DWORD PTR 4[edi]
+ mov ecx, DWORD PTR 8[edi]
+ mov edx, DWORD PTR 12[edi]
+L000start:
+ ;
+ ; R0 section
+ mov edi, ecx
+ mov ebp, DWORD PTR [esi]
+ ; R0 0
+ xor edi, edx
+ and edi, ebx
+ lea eax, DWORD PTR 3614090360[ebp*1+eax]
+ mov ebp, DWORD PTR 4[esi]
+ xor edi, edx
+ add eax, edi
+ mov edi, ebx
+ rol eax, 7
+ add eax, ebx
+ ; R0 1
+ xor edi, ecx
+ and edi, eax
+ lea edx, DWORD PTR 3905402710[ebp*1+edx]
+ mov ebp, DWORD PTR 8[esi]
+ xor edi, ecx
+ add edx, edi
+ mov edi, eax
+ rol edx, 12
+ add edx, eax
+ ; R0 2
+ xor edi, ebx
+ and edi, edx
+ lea ecx, DWORD PTR 606105819[ebp*1+ecx]
+ mov ebp, DWORD PTR 12[esi]
+ xor edi, ebx
+ add ecx, edi
+ mov edi, edx
+ rol ecx, 17
+ add ecx, edx
+ ; R0 3
+ xor edi, eax
+ and edi, ecx
+ lea ebx, DWORD PTR 3250441966[ebp*1+ebx]
+ mov ebp, DWORD PTR 16[esi]
+ xor edi, eax
+ add ebx, edi
+ mov edi, ecx
+ rol ebx, 22
+ add ebx, ecx
+ ; R0 4
+ xor edi, edx
+ and edi, ebx
+ lea eax, DWORD PTR 4118548399[ebp*1+eax]
+ mov ebp, DWORD PTR 20[esi]
+ xor edi, edx
+ add eax, edi
+ mov edi, ebx
+ rol eax, 7
+ add eax, ebx
+ ; R0 5
+ xor edi, ecx
+ and edi, eax
+ lea edx, DWORD PTR 1200080426[ebp*1+edx]
+ mov ebp, DWORD PTR 24[esi]
+ xor edi, ecx
+ add edx, edi
+ mov edi, eax
+ rol edx, 12
+ add edx, eax
+ ; R0 6
+ xor edi, ebx
+ and edi, edx
+ lea ecx, DWORD PTR 2821735955[ebp*1+ecx]
+ mov ebp, DWORD PTR 28[esi]
+ xor edi, ebx
+ add ecx, edi
+ mov edi, edx
+ rol ecx, 17
+ add ecx, edx
+ ; R0 7
+ xor edi, eax
+ and edi, ecx
+ lea ebx, DWORD PTR 4249261313[ebp*1+ebx]
+ mov ebp, DWORD PTR 32[esi]
+ xor edi, eax
+ add ebx, edi
+ mov edi, ecx
+ rol ebx, 22
+ add ebx, ecx
+ ; R0 8
+ xor edi, edx
+ and edi, ebx
+ lea eax, DWORD PTR 1770035416[ebp*1+eax]
+ mov ebp, DWORD PTR 36[esi]
+ xor edi, edx
+ add eax, edi
+ mov edi, ebx
+ rol eax, 7
+ add eax, ebx
+ ; R0 9
+ xor edi, ecx
+ and edi, eax
+ lea edx, DWORD PTR 2336552879[ebp*1+edx]
+ mov ebp, DWORD PTR 40[esi]
+ xor edi, ecx
+ add edx, edi
+ mov edi, eax
+ rol edx, 12
+ add edx, eax
+ ; R0 10
+ xor edi, ebx
+ and edi, edx
+ lea ecx, DWORD PTR 4294925233[ebp*1+ecx]
+ mov ebp, DWORD PTR 44[esi]
+ xor edi, ebx
+ add ecx, edi
+ mov edi, edx
+ rol ecx, 17
+ add ecx, edx
+ ; R0 11
+ xor edi, eax
+ and edi, ecx
+ lea ebx, DWORD PTR 2304563134[ebp*1+ebx]
+ mov ebp, DWORD PTR 48[esi]
+ xor edi, eax
+ add ebx, edi
+ mov edi, ecx
+ rol ebx, 22
+ add ebx, ecx
+ ; R0 12
+ xor edi, edx
+ and edi, ebx
+ lea eax, DWORD PTR 1804603682[ebp*1+eax]
+ mov ebp, DWORD PTR 52[esi]
+ xor edi, edx
+ add eax, edi
+ mov edi, ebx
+ rol eax, 7
+ add eax, ebx
+ ; R0 13
+ xor edi, ecx
+ and edi, eax
+ lea edx, DWORD PTR 4254626195[ebp*1+edx]
+ mov ebp, DWORD PTR 56[esi]
+ xor edi, ecx
+ add edx, edi
+ mov edi, eax
+ rol edx, 12
+ add edx, eax
+ ; R0 14
+ xor edi, ebx
+ and edi, edx
+ lea ecx, DWORD PTR 2792965006[ebp*1+ecx]
+ mov ebp, DWORD PTR 60[esi]
+ xor edi, ebx
+ add ecx, edi
+ mov edi, edx
+ rol ecx, 17
+ add ecx, edx
+ ; R0 15
+ xor edi, eax
+ and edi, ecx
+ lea ebx, DWORD PTR 1236535329[ebp*1+ebx]
+ mov ebp, DWORD PTR 4[esi]
+ xor edi, eax
+ add ebx, edi
+ mov edi, ecx
+ rol ebx, 22
+ add ebx, ecx
+ ;
+ ; R1 section
+ ; R1 16
+ lea eax, DWORD PTR 4129170786[ebp*1+eax]
+ xor edi, ebx
+ and edi, edx
+ mov ebp, DWORD PTR 24[esi]
+ xor edi, ecx
+ add eax, edi
+ mov edi, ebx
+ rol eax, 5
+ add eax, ebx
+ ; R1 17
+ lea edx, DWORD PTR 3225465664[ebp*1+edx]
+ xor edi, eax
+ and edi, ecx
+ mov ebp, DWORD PTR 44[esi]
+ xor edi, ebx
+ add edx, edi
+ mov edi, eax
+ rol edx, 9
+ add edx, eax
+ ; R1 18
+ lea ecx, DWORD PTR 643717713[ebp*1+ecx]
+ xor edi, edx
+ and edi, ebx
+ mov ebp, DWORD PTR [esi]
+ xor edi, eax
+ add ecx, edi
+ mov edi, edx
+ rol ecx, 14
+ add ecx, edx
+ ; R1 19
+ lea ebx, DWORD PTR 3921069994[ebp*1+ebx]
+ xor edi, ecx
+ and edi, eax
+ mov ebp, DWORD PTR 20[esi]
+ xor edi, edx
+ add ebx, edi
+ mov edi, ecx
+ rol ebx, 20
+ add ebx, ecx
+ ; R1 20
+ lea eax, DWORD PTR 3593408605[ebp*1+eax]
+ xor edi, ebx
+ and edi, edx
+ mov ebp, DWORD PTR 40[esi]
+ xor edi, ecx
+ add eax, edi
+ mov edi, ebx
+ rol eax, 5
+ add eax, ebx
+ ; R1 21
+ lea edx, DWORD PTR 38016083[ebp*1+edx]
+ xor edi, eax
+ and edi, ecx
+ mov ebp, DWORD PTR 60[esi]
+ xor edi, ebx
+ add edx, edi
+ mov edi, eax
+ rol edx, 9
+ add edx, eax
+ ; R1 22
+ lea ecx, DWORD PTR 3634488961[ebp*1+ecx]
+ xor edi, edx
+ and edi, ebx
+ mov ebp, DWORD PTR 16[esi]
+ xor edi, eax
+ add ecx, edi
+ mov edi, edx
+ rol ecx, 14
+ add ecx, edx
+ ; R1 23
+ lea ebx, DWORD PTR 3889429448[ebp*1+ebx]
+ xor edi, ecx
+ and edi, eax
+ mov ebp, DWORD PTR 36[esi]
+ xor edi, edx
+ add ebx, edi
+ mov edi, ecx
+ rol ebx, 20
+ add ebx, ecx
+ ; R1 24
+ lea eax, DWORD PTR 568446438[ebp*1+eax]
+ xor edi, ebx
+ and edi, edx
+ mov ebp, DWORD PTR 56[esi]
+ xor edi, ecx
+ add eax, edi
+ mov edi, ebx
+ rol eax, 5
+ add eax, ebx
+ ; R1 25
+ lea edx, DWORD PTR 3275163606[ebp*1+edx]
+ xor edi, eax
+ and edi, ecx
+ mov ebp, DWORD PTR 12[esi]
+ xor edi, ebx
+ add edx, edi
+ mov edi, eax
+ rol edx, 9
+ add edx, eax
+ ; R1 26
+ lea ecx, DWORD PTR 4107603335[ebp*1+ecx]
+ xor edi, edx
+ and edi, ebx
+ mov ebp, DWORD PTR 32[esi]
+ xor edi, eax
+ add ecx, edi
+ mov edi, edx
+ rol ecx, 14
+ add ecx, edx
+ ; R1 27
+ lea ebx, DWORD PTR 1163531501[ebp*1+ebx]
+ xor edi, ecx
+ and edi, eax
+ mov ebp, DWORD PTR 52[esi]
+ xor edi, edx
+ add ebx, edi
+ mov edi, ecx
+ rol ebx, 20
+ add ebx, ecx
+ ; R1 28
+ lea eax, DWORD PTR 2850285829[ebp*1+eax]
+ xor edi, ebx
+ and edi, edx
+ mov ebp, DWORD PTR 8[esi]
+ xor edi, ecx
+ add eax, edi
+ mov edi, ebx
+ rol eax, 5
+ add eax, ebx
+ ; R1 29
+ lea edx, DWORD PTR 4243563512[ebp*1+edx]
+ xor edi, eax
+ and edi, ecx
+ mov ebp, DWORD PTR 28[esi]
+ xor edi, ebx
+ add edx, edi
+ mov edi, eax
+ rol edx, 9
+ add edx, eax
+ ; R1 30
+ lea ecx, DWORD PTR 1735328473[ebp*1+ecx]
+ xor edi, edx
+ and edi, ebx
+ mov ebp, DWORD PTR 48[esi]
+ xor edi, eax
+ add ecx, edi
+ mov edi, edx
+ rol ecx, 14
+ add ecx, edx
+ ; R1 31
+ lea ebx, DWORD PTR 2368359562[ebp*1+ebx]
+ xor edi, ecx
+ and edi, eax
+ mov ebp, DWORD PTR 20[esi]
+ xor edi, edx
+ add ebx, edi
+ mov edi, ecx
+ rol ebx, 20
+ add ebx, ecx
+ ;
+ ; R2 section
+ ; R2 32
+ xor edi, edx
+ xor edi, ebx
+ lea eax, DWORD PTR 4294588738[ebp*1+eax]
+ add eax, edi
+ mov ebp, DWORD PTR 32[esi]
+ rol eax, 4
+ mov edi, ebx
+ ; R2 33
+ lea edx, DWORD PTR 2272392833[ebp*1+edx]
+ add eax, ebx
+ xor edi, ecx
+ xor edi, eax
+ mov ebp, DWORD PTR 44[esi]
+ add edx, edi
+ mov edi, eax
+ rol edx, 11
+ add edx, eax
+ ; R2 34
+ xor edi, ebx
+ xor edi, edx
+ lea ecx, DWORD PTR 1839030562[ebp*1+ecx]
+ add ecx, edi
+ mov ebp, DWORD PTR 56[esi]
+ rol ecx, 16
+ mov edi, edx
+ ; R2 35
+ lea ebx, DWORD PTR 4259657740[ebp*1+ebx]
+ add ecx, edx
+ xor edi, eax
+ xor edi, ecx
+ mov ebp, DWORD PTR 4[esi]
+ add ebx, edi
+ mov edi, ecx
+ rol ebx, 23
+ add ebx, ecx
+ ; R2 36
+ xor edi, edx
+ xor edi, ebx
+ lea eax, DWORD PTR 2763975236[ebp*1+eax]
+ add eax, edi
+ mov ebp, DWORD PTR 16[esi]
+ rol eax, 4
+ mov edi, ebx
+ ; R2 37
+ lea edx, DWORD PTR 1272893353[ebp*1+edx]
+ add eax, ebx
+ xor edi, ecx
+ xor edi, eax
+ mov ebp, DWORD PTR 28[esi]
+ add edx, edi
+ mov edi, eax
+ rol edx, 11
+ add edx, eax
+ ; R2 38
+ xor edi, ebx
+ xor edi, edx
+ lea ecx, DWORD PTR 4139469664[ebp*1+ecx]
+ add ecx, edi
+ mov ebp, DWORD PTR 40[esi]
+ rol ecx, 16
+ mov edi, edx
+ ; R2 39
+ lea ebx, DWORD PTR 3200236656[ebp*1+ebx]
+ add ecx, edx
+ xor edi, eax
+ xor edi, ecx
+ mov ebp, DWORD PTR 52[esi]
+ add ebx, edi
+ mov edi, ecx
+ rol ebx, 23
+ add ebx, ecx
+ ; R2 40
+ xor edi, edx
+ xor edi, ebx
+ lea eax, DWORD PTR 681279174[ebp*1+eax]
+ add eax, edi
+ mov ebp, DWORD PTR [esi]
+ rol eax, 4
+ mov edi, ebx
+ ; R2 41
+ lea edx, DWORD PTR 3936430074[ebp*1+edx]
+ add eax, ebx
+ xor edi, ecx
+ xor edi, eax
+ mov ebp, DWORD PTR 12[esi]
+ add edx, edi
+ mov edi, eax
+ rol edx, 11
+ add edx, eax
+ ; R2 42
+ xor edi, ebx
+ xor edi, edx
+ lea ecx, DWORD PTR 3572445317[ebp*1+ecx]
+ add ecx, edi
+ mov ebp, DWORD PTR 24[esi]
+ rol ecx, 16
+ mov edi, edx
+ ; R2 43
+ lea ebx, DWORD PTR 76029189[ebp*1+ebx]
+ add ecx, edx
+ xor edi, eax
+ xor edi, ecx
+ mov ebp, DWORD PTR 36[esi]
+ add ebx, edi
+ mov edi, ecx
+ rol ebx, 23
+ add ebx, ecx
+ ; R2 44
+ xor edi, edx
+ xor edi, ebx
+ lea eax, DWORD PTR 3654602809[ebp*1+eax]
+ add eax, edi
+ mov ebp, DWORD PTR 48[esi]
+ rol eax, 4
+ mov edi, ebx
+ ; R2 45
+ lea edx, DWORD PTR 3873151461[ebp*1+edx]
+ add eax, ebx
+ xor edi, ecx
+ xor edi, eax
+ mov ebp, DWORD PTR 60[esi]
+ add edx, edi
+ mov edi, eax
+ rol edx, 11
+ add edx, eax
+ ; R2 46
+ xor edi, ebx
+ xor edi, edx
+ lea ecx, DWORD PTR 530742520[ebp*1+ecx]
+ add ecx, edi
+ mov ebp, DWORD PTR 8[esi]
+ rol ecx, 16
+ mov edi, edx
+ ; R2 47
+ lea ebx, DWORD PTR 3299628645[ebp*1+ebx]
+ add ecx, edx
+ xor edi, eax
+ xor edi, ecx
+ mov ebp, DWORD PTR [esi]
+ add ebx, edi
+ mov edi, -1
+ rol ebx, 23
+ add ebx, ecx
+ ;
+ ; R3 section
+ ; R3 48
+ xor edi, edx
+ or edi, ebx
+ lea eax, DWORD PTR 4096336452[ebp*1+eax]
+ xor edi, ecx
+ mov ebp, DWORD PTR 28[esi]
+ add eax, edi
+ mov edi, -1
+ rol eax, 6
+ xor edi, ecx
+ add eax, ebx
+ ; R3 49
+ or edi, eax
+ lea edx, DWORD PTR 1126891415[ebp*1+edx]
+ xor edi, ebx
+ mov ebp, DWORD PTR 56[esi]
+ add edx, edi
+ mov edi, -1
+ rol edx, 10
+ xor edi, ebx
+ add edx, eax
+ ; R3 50
+ or edi, edx
+ lea ecx, DWORD PTR 2878612391[ebp*1+ecx]
+ xor edi, eax
+ mov ebp, DWORD PTR 20[esi]
+ add ecx, edi
+ mov edi, -1
+ rol ecx, 15
+ xor edi, eax
+ add ecx, edx
+ ; R3 51
+ or edi, ecx
+ lea ebx, DWORD PTR 4237533241[ebp*1+ebx]
+ xor edi, edx
+ mov ebp, DWORD PTR 48[esi]
+ add ebx, edi
+ mov edi, -1
+ rol ebx, 21
+ xor edi, edx
+ add ebx, ecx
+ ; R3 52
+ or edi, ebx
+ lea eax, DWORD PTR 1700485571[ebp*1+eax]
+ xor edi, ecx
+ mov ebp, DWORD PTR 12[esi]
+ add eax, edi
+ mov edi, -1
+ rol eax, 6
+ xor edi, ecx
+ add eax, ebx
+ ; R3 53
+ or edi, eax
+ lea edx, DWORD PTR 2399980690[ebp*1+edx]
+ xor edi, ebx
+ mov ebp, DWORD PTR 40[esi]
+ add edx, edi
+ mov edi, -1
+ rol edx, 10
+ xor edi, ebx
+ add edx, eax
+ ; R3 54
+ or edi, edx
+ lea ecx, DWORD PTR 4293915773[ebp*1+ecx]
+ xor edi, eax
+ mov ebp, DWORD PTR 4[esi]
+ add ecx, edi
+ mov edi, -1
+ rol ecx, 15
+ xor edi, eax
+ add ecx, edx
+ ; R3 55
+ or edi, ecx
+ lea ebx, DWORD PTR 2240044497[ebp*1+ebx]
+ xor edi, edx
+ mov ebp, DWORD PTR 32[esi]
+ add ebx, edi
+ mov edi, -1
+ rol ebx, 21
+ xor edi, edx
+ add ebx, ecx
+ ; R3 56
+ or edi, ebx
+ lea eax, DWORD PTR 1873313359[ebp*1+eax]
+ xor edi, ecx
+ mov ebp, DWORD PTR 60[esi]
+ add eax, edi
+ mov edi, -1
+ rol eax, 6
+ xor edi, ecx
+ add eax, ebx
+ ; R3 57
+ or edi, eax
+ lea edx, DWORD PTR 4264355552[ebp*1+edx]
+ xor edi, ebx
+ mov ebp, DWORD PTR 24[esi]
+ add edx, edi
+ mov edi, -1
+ rol edx, 10
+ xor edi, ebx
+ add edx, eax
+ ; R3 58
+ or edi, edx
+ lea ecx, DWORD PTR 2734768916[ebp*1+ecx]
+ xor edi, eax
+ mov ebp, DWORD PTR 52[esi]
+ add ecx, edi
+ mov edi, -1
+ rol ecx, 15
+ xor edi, eax
+ add ecx, edx
+ ; R3 59
+ or edi, ecx
+ lea ebx, DWORD PTR 1309151649[ebp*1+ebx]
+ xor edi, edx
+ mov ebp, DWORD PTR 16[esi]
+ add ebx, edi
+ mov edi, -1
+ rol ebx, 21
+ xor edi, edx
+ add ebx, ecx
+ ; R3 60
+ or edi, ebx
+ lea eax, DWORD PTR 4149444226[ebp*1+eax]
+ xor edi, ecx
+ mov ebp, DWORD PTR 44[esi]
+ add eax, edi
+ mov edi, -1
+ rol eax, 6
+ xor edi, ecx
+ add eax, ebx
+ ; R3 61
+ or edi, eax
+ lea edx, DWORD PTR 3174756917[ebp*1+edx]
+ xor edi, ebx
+ mov ebp, DWORD PTR 8[esi]
+ add edx, edi
+ mov edi, -1
+ rol edx, 10
+ xor edi, ebx
+ add edx, eax
+ ; R3 62
+ or edi, edx
+ lea ecx, DWORD PTR 718787259[ebp*1+ecx]
+ xor edi, eax
+ mov ebp, DWORD PTR 36[esi]
+ add ecx, edi
+ mov edi, -1
+ rol ecx, 15
+ xor edi, eax
+ add ecx, edx
+ ; R3 63
+ or edi, ecx
+ lea ebx, DWORD PTR 3951481745[ebp*1+ebx]
+ xor edi, edx
+ mov ebp, DWORD PTR 24[esp]
+ add ebx, edi
+ add esi, 64
+ rol ebx, 21
+ mov edi, DWORD PTR [ebp]
+ add ebx, ecx
+ add eax, edi
+ mov edi, DWORD PTR 4[ebp]
+ add ebx, edi
+ mov edi, DWORD PTR 8[ebp]
+ add ecx, edi
+ mov edi, DWORD PTR 12[ebp]
+ add edx, edi
+ mov DWORD PTR [ebp],eax
+ mov DWORD PTR 4[ebp],ebx
+ mov edi, DWORD PTR [esp]
+ mov DWORD PTR 8[ebp],ecx
+ mov DWORD PTR 12[ebp],edx
+ cmp edi, esi
+ jge L000start
+ pop eax
+ pop ebx
+ pop ebp
+ pop edi
+ pop esi
+ ret
+_md5_block_x86 ENDP
+_TEXT ENDS
+END
diff --git a/crypto/md5/asm/md5-586.pl b/crypto/md5/asm/md5-586.pl
new file mode 100644
index 0000000..b1238e0
--- /dev/null
+++ b/crypto/md5/asm/md5-586.pl
@@ -0,0 +1,304 @@
+#!/usr/local/bin/perl
+
+# Normal is the
+# md5_block_x86(MD5_CTX *c, ULONG *X);
+# version, non-normal is the
+# md5_block_x86(MD5_CTX *c, ULONG *X,int blocks);
+
+$normal=0;
+
+push(@INC,"perlasm","../../perlasm");
+require "x86asm.pl";
+
+&asm_init($ARGV[0],$0);
+
+$A="eax";
+$B="ebx";
+$C="ecx";
+$D="edx";
+$tmp1="edi";
+$tmp2="ebp";
+$X="esi";
+
+# What we need to load into $tmp for the next round
+%Ltmp1=("R0",&Np($C), "R1",&Np($C), "R2",&Np($C), "R3",&Np($D));
+@xo=(
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, # R0
+ 1, 6, 11, 0, 5, 10, 15, 4, 9, 14, 3, 8, 13, 2, 7, 12, # R1
+ 5, 8, 11, 14, 1, 4, 7, 10, 13, 0, 3, 6, 9, 12, 15, 2, # R2
+ 0, 7, 14, 5, 12, 3, 10, 1, 8, 15, 6, 13, 4, 11, 2, 9, # R3
+ );
+
+&md5_block("md5_block_x86");
+&asm_finish();
+
+sub Np
+ {
+ local($p)=@_;
+ local(%n)=($A,$D,$B,$A,$C,$B,$D,$C);
+ return($n{$p});
+ }
+
+sub R0
+ {
+ local($pos,$a,$b,$c,$d,$K,$ki,$s,$t)=@_;
+
+ &mov($tmp1,$C) if $pos < 0;
+ &mov($tmp2,&DWP($xo[$ki]*4,$K,"",0)) if $pos < 0; # very first one
+
+ # body proper
+
+ &comment("R0 $ki");
+ &xor($tmp1,$d); # F function - part 2
+
+ &and($tmp1,$b); # F function - part 3
+ &lea($a,&DWP($t,$a,$tmp2,1));
+
+ &mov($tmp2,&DWP($xo[$ki+1]*4,$K,"",0)) if ($pos != 2);
+ &xor($tmp1,$d); # F function - part 4
+
+ &add($a,$tmp1);
+ &mov($tmp1,&Np($c)) if $pos < 1; # next tmp1 for R0
+ &mov($tmp1,&Np($c)) if $pos == 1; # next tmp1 for R1
+
+ &rotl($a,$s);
+ &add($a,$b);
+
+ }
+
+sub R1
+ {
+ local($pos,$a,$b,$c,$d,$K,$ki,$s,$t)=@_;
+
+ &comment("R1 $ki");
+
+ &lea($a,&DWP($t,$a,$tmp2,1));
+
+ &xor($tmp1,$b); # G function - part 2
+ &and($tmp1,$d); # G function - part 3
+
+ &mov($tmp2,&DWP($xo[$ki+1]*4,$K,"",0)) if ($pos != 2);
+ &xor($tmp1,$c); # G function - part 4
+
+ &add($a,$tmp1);
+ &mov($tmp1,&Np($c)) if $pos < 1; # G function - part 1
+ &mov($tmp1,&Np($c)) if $pos == 1; # G function - part 1
+
+ &rotl($a,$s);
+
+ &add($a,$b);
+ }
+
+sub R2
+ {
+ local($n,$pos,$a,$b,$c,$d,$K,$ki,$s,$t)=@_;
+ # This one is different, only 3 logical operations
+
+if (($n & 1) == 0)
+ {
+ &comment("R2 $ki");
+ # make sure to do 'D' first, not 'B', else we clash with
+ # the last add from the previous round.
+
+ &xor($tmp1,$d); # H function - part 2
+
+ &xor($tmp1,$b); # H function - part 3
+ &lea($a,&DWP($t,$a,$tmp2,1));
+
+ &add($a,$tmp1);
+ &mov($tmp2,&DWP($xo[$ki+1]*4,$K,"",0));
+
+ &rotl($a,$s);
+
+ &mov($tmp1,&Np($c));
+ }
+else
+ {
+ &comment("R2 $ki");
+ # make sure to do 'D' first, not 'B', else we clash with
+ # the last add from the previous round.
+
+ &lea($a,&DWP($t,$a,$tmp2,1));
+
+ &add($b,$c); # MOVED FORWARD
+ &xor($tmp1,$d); # H function - part 2
+
+ &xor($tmp1,$b); # H function - part 3
+ &mov($tmp2,&DWP($xo[$ki+1]*4,$K,"",0)) if ($pos != 2);
+
+ &add($a,$tmp1);
+ &mov($tmp1,&Np($c)) if $pos < 1; # H function - part 1
+ &mov($tmp1,-1) if $pos == 1; # I function - part 1
+
+ &rotl($a,$s);
+
+ &add($a,$b);
+ }
+ }
+
+sub R3
+ {
+ local($pos,$a,$b,$c,$d,$K,$ki,$s,$t)=@_;
+
+ &comment("R3 $ki");
+
+ # &not($tmp1)
+ &xor($tmp1,$d) if $pos < 0; # I function - part 2
+
+ &or($tmp1,$b); # I function - part 3
+ &lea($a,&DWP($t,$a,$tmp2,1));
+
+ &xor($tmp1,$c); # I function - part 4
+ &mov($tmp2,&DWP($xo[$ki+1]*4,$K,"",0)) if $pos != 2; # load X/k value
+ &mov($tmp2,&wparam(0)) if $pos == 2;
+
+ &add($a,$tmp1);
+ &mov($tmp1,-1) if $pos < 1; # H function - part 1
+ &add($K,64) if $pos >=1 && !$normal;
+
+ &rotl($a,$s);
+
+ &xor($tmp1,&Np($d)) if $pos <= 0; # I function - part = first time
+ &mov($tmp1,&DWP( 0,$tmp2,"",0)) if $pos > 0;
+ &add($a,$b);
+ }
+
+
+sub md5_block
+ {
+ local($name)=@_;
+
+ &function_begin_B($name,"",3);
+
+ # parameter 1 is the MD5_CTX structure.
+ # A 0
+ # B 4
+ # C 8
+ # D 12
+
+ &push("esi");
+ &push("edi");
+ &mov($tmp1, &wparam(0)); # edi
+ &mov($X, &wparam(1)); # esi
+ &mov($C, &wparam(2));
+ &push("ebp");
+ &push("ebx");
+ &add($C, $X); # offset we end at
+ &sub($C, 64);
+ &mov($A, &DWP( 0,$tmp1,"",0));
+ &push($C); # Put on the TOS
+ &mov($B, &DWP( 4,$tmp1,"",0));
+ &mov($C, &DWP( 8,$tmp1,"",0));
+ &mov($D, &DWP(12,$tmp1,"",0));
+
+ &set_label("start") unless $normal;
+ &comment("");
+ &comment("R0 section");
+
+ &R0(-2,$A,$B,$C,$D,$X, 0, 7,0xd76aa478);
+ &R0( 0,$D,$A,$B,$C,$X, 1,12,0xe8c7b756);
+ &R0( 0,$C,$D,$A,$B,$X, 2,17,0x242070db);
+ &R0( 0,$B,$C,$D,$A,$X, 3,22,0xc1bdceee);
+ &R0( 0,$A,$B,$C,$D,$X, 4, 7,0xf57c0faf);
+ &R0( 0,$D,$A,$B,$C,$X, 5,12,0x4787c62a);
+ &R0( 0,$C,$D,$A,$B,$X, 6,17,0xa8304613);
+ &R0( 0,$B,$C,$D,$A,$X, 7,22,0xfd469501);
+ &R0( 0,$A,$B,$C,$D,$X, 8, 7,0x698098d8);
+ &R0( 0,$D,$A,$B,$C,$X, 9,12,0x8b44f7af);
+ &R0( 0,$C,$D,$A,$B,$X,10,17,0xffff5bb1);
+ &R0( 0,$B,$C,$D,$A,$X,11,22,0x895cd7be);
+ &R0( 0,$A,$B,$C,$D,$X,12, 7,0x6b901122);
+ &R0( 0,$D,$A,$B,$C,$X,13,12,0xfd987193);
+ &R0( 0,$C,$D,$A,$B,$X,14,17,0xa679438e);
+ &R0( 1,$B,$C,$D,$A,$X,15,22,0x49b40821);
+
+ &comment("");
+ &comment("R1 section");
+ &R1(-1,$A,$B,$C,$D,$X,16, 5,0xf61e2562);
+ &R1( 0,$D,$A,$B,$C,$X,17, 9,0xc040b340);
+ &R1( 0,$C,$D,$A,$B,$X,18,14,0x265e5a51);
+ &R1( 0,$B,$C,$D,$A,$X,19,20,0xe9b6c7aa);
+ &R1( 0,$A,$B,$C,$D,$X,20, 5,0xd62f105d);
+ &R1( 0,$D,$A,$B,$C,$X,21, 9,0x02441453);
+ &R1( 0,$C,$D,$A,$B,$X,22,14,0xd8a1e681);
+ &R1( 0,$B,$C,$D,$A,$X,23,20,0xe7d3fbc8);
+ &R1( 0,$A,$B,$C,$D,$X,24, 5,0x21e1cde6);
+ &R1( 0,$D,$A,$B,$C,$X,25, 9,0xc33707d6);
+ &R1( 0,$C,$D,$A,$B,$X,26,14,0xf4d50d87);
+ &R1( 0,$B,$C,$D,$A,$X,27,20,0x455a14ed);
+ &R1( 0,$A,$B,$C,$D,$X,28, 5,0xa9e3e905);
+ &R1( 0,$D,$A,$B,$C,$X,29, 9,0xfcefa3f8);
+ &R1( 0,$C,$D,$A,$B,$X,30,14,0x676f02d9);
+ &R1( 1,$B,$C,$D,$A,$X,31,20,0x8d2a4c8a);
+
+ &comment("");
+ &comment("R2 section");
+ &R2( 0,-1,$A,$B,$C,$D,$X,32, 4,0xfffa3942);
+ &R2( 1, 0,$D,$A,$B,$C,$X,33,11,0x8771f681);
+ &R2( 2, 0,$C,$D,$A,$B,$X,34,16,0x6d9d6122);
+ &R2( 3, 0,$B,$C,$D,$A,$X,35,23,0xfde5380c);
+ &R2( 4, 0,$A,$B,$C,$D,$X,36, 4,0xa4beea44);
+ &R2( 5, 0,$D,$A,$B,$C,$X,37,11,0x4bdecfa9);
+ &R2( 6, 0,$C,$D,$A,$B,$X,38,16,0xf6bb4b60);
+ &R2( 7, 0,$B,$C,$D,$A,$X,39,23,0xbebfbc70);
+ &R2( 8, 0,$A,$B,$C,$D,$X,40, 4,0x289b7ec6);
+ &R2( 9, 0,$D,$A,$B,$C,$X,41,11,0xeaa127fa);
+ &R2(10, 0,$C,$D,$A,$B,$X,42,16,0xd4ef3085);
+ &R2(11, 0,$B,$C,$D,$A,$X,43,23,0x04881d05);
+ &R2(12, 0,$A,$B,$C,$D,$X,44, 4,0xd9d4d039);
+ &R2(13, 0,$D,$A,$B,$C,$X,45,11,0xe6db99e5);
+ &R2(14, 0,$C,$D,$A,$B,$X,46,16,0x1fa27cf8);
+ &R2(15, 1,$B,$C,$D,$A,$X,47,23,0xc4ac5665);
+
+ &comment("");
+ &comment("R3 section");
+ &R3(-1,$A,$B,$C,$D,$X,48, 6,0xf4292244);
+ &R3( 0,$D,$A,$B,$C,$X,49,10,0x432aff97);
+ &R3( 0,$C,$D,$A,$B,$X,50,15,0xab9423a7);
+ &R3( 0,$B,$C,$D,$A,$X,51,21,0xfc93a039);
+ &R3( 0,$A,$B,$C,$D,$X,52, 6,0x655b59c3);
+ &R3( 0,$D,$A,$B,$C,$X,53,10,0x8f0ccc92);
+ &R3( 0,$C,$D,$A,$B,$X,54,15,0xffeff47d);
+ &R3( 0,$B,$C,$D,$A,$X,55,21,0x85845dd1);
+ &R3( 0,$A,$B,$C,$D,$X,56, 6,0x6fa87e4f);
+ &R3( 0,$D,$A,$B,$C,$X,57,10,0xfe2ce6e0);
+ &R3( 0,$C,$D,$A,$B,$X,58,15,0xa3014314);
+ &R3( 0,$B,$C,$D,$A,$X,59,21,0x4e0811a1);
+ &R3( 0,$A,$B,$C,$D,$X,60, 6,0xf7537e82);
+ &R3( 0,$D,$A,$B,$C,$X,61,10,0xbd3af235);
+ &R3( 0,$C,$D,$A,$B,$X,62,15,0x2ad7d2bb);
+ &R3( 2,$B,$C,$D,$A,$X,63,21,0xeb86d391);
+
+ # &mov($tmp2,&wparam(0)); # done in the last R3
+ # &mov($tmp1, &DWP( 0,$tmp2,"",0)); # done is the last R3
+
+ &add($A,$tmp1);
+ &mov($tmp1, &DWP( 4,$tmp2,"",0));
+
+ &add($B,$tmp1);
+ &mov($tmp1, &DWP( 8,$tmp2,"",0));
+
+ &add($C,$tmp1);
+ &mov($tmp1, &DWP(12,$tmp2,"",0));
+
+ &add($D,$tmp1);
+ &mov(&DWP( 0,$tmp2,"",0),$A);
+
+ &mov(&DWP( 4,$tmp2,"",0),$B);
+ &mov($tmp1,&swtmp(0)) unless $normal;
+
+ &mov(&DWP( 8,$tmp2,"",0),$C);
+ &mov(&DWP(12,$tmp2,"",0),$D);
+
+ &cmp($tmp1,$X) unless $normal; # check count
+ &jge(&label("start")) unless $normal;
+
+ &pop("eax"); # pop the temp variable off the stack
+ &pop("ebx");
+ &pop("ebp");
+ &pop("edi");
+ &pop("esi");
+ &ret();
+ &function_end_B($name);
+ }
+
diff --git a/crypto/md5/asm/mx86unix.cpp b/crypto/md5/asm/mx86unix.cpp
new file mode 100644
index 0000000..5d39912
--- /dev/null
+++ b/crypto/md5/asm/mx86unix.cpp
@@ -0,0 +1,730 @@
+/* Run the C pre-processor over this file with one of the following defined
+ * ELF - elf object files,
+ * OUT - a.out object files,
+ * BSDI - BSDI style a.out object files
+ * SOL - Solaris style elf
+ */
+
+#define TYPE(a,b) .type a,b
+#define SIZE(a,b) .size a,b
+
+#if defined(OUT) || defined(BSDI)
+#define md5_block_x86 _md5_block_x86
+
+#endif
+
+#ifdef OUT
+#define OK 1
+#define ALIGN 4
+#endif
+
+#ifdef BSDI
+#define OK 1
+#define ALIGN 4
+#undef SIZE
+#undef TYPE
+#define SIZE(a,b)
+#define TYPE(a,b)
+#endif
+
+#if defined(ELF) || defined(SOL)
+#define OK 1
+#define ALIGN 16
+#endif
+
+#ifndef OK
+You need to define one of
+ELF - elf systems - linux-elf, NetBSD and DG-UX
+OUT - a.out systems - linux-a.out and FreeBSD
+SOL - solaris systems, which are elf with strange comment lines
+BSDI - a.out with a very primative version of as.
+#endif
+
+/* Let the Assembler begin :-) */
+ /* Don't even think of reading this code */
+ /* It was automatically generated by md5-586.pl */
+ /* Which is a perl program used to generate the x86 assember for */
+ /* any of elf, a.out, BSDI,Win32, or Solaris */
+ /* eric <eay@cryptsoft.com> */
+
+ .file "md5-586.s"
+ .version "01.01"
+gcc2_compiled.:
+.text
+ .align ALIGN
+.globl md5_block_x86
+ TYPE(md5_block_x86,@function)
+md5_block_x86:
+ pushl %esi
+ pushl %edi
+ movl 12(%esp), %edi
+ movl 16(%esp), %esi
+ movl 20(%esp), %ecx
+ pushl %ebp
+ pushl %ebx
+ addl %esi, %ecx
+ subl $64, %ecx
+ movl (%edi), %eax
+ pushl %ecx
+ movl 4(%edi), %ebx
+ movl 8(%edi), %ecx
+ movl 12(%edi), %edx
+.L000start:
+
+ /* R0 section */
+ movl %ecx, %edi
+ movl (%esi), %ebp
+ /* R0 0 */
+ xorl %edx, %edi
+ andl %ebx, %edi
+ leal 3614090360(%eax,%ebp,1),%eax
+ movl 4(%esi), %ebp
+ xorl %edx, %edi
+ addl %edi, %eax
+ movl %ebx, %edi
+ roll $7, %eax
+ addl %ebx, %eax
+ /* R0 1 */
+ xorl %ecx, %edi
+ andl %eax, %edi
+ leal 3905402710(%edx,%ebp,1),%edx
+ movl 8(%esi), %ebp
+ xorl %ecx, %edi
+ addl %edi, %edx
+ movl %eax, %edi
+ roll $12, %edx
+ addl %eax, %edx
+ /* R0 2 */
+ xorl %ebx, %edi
+ andl %edx, %edi
+ leal 606105819(%ecx,%ebp,1),%ecx
+ movl 12(%esi), %ebp
+ xorl %ebx, %edi
+ addl %edi, %ecx
+ movl %edx, %edi
+ roll $17, %ecx
+ addl %edx, %ecx
+ /* R0 3 */
+ xorl %eax, %edi
+ andl %ecx, %edi
+ leal 3250441966(%ebx,%ebp,1),%ebx
+ movl 16(%esi), %ebp
+ xorl %eax, %edi
+ addl %edi, %ebx
+ movl %ecx, %edi
+ roll $22, %ebx
+ addl %ecx, %ebx
+ /* R0 4 */
+ xorl %edx, %edi
+ andl %ebx, %edi
+ leal 4118548399(%eax,%ebp,1),%eax
+ movl 20(%esi), %ebp
+ xorl %edx, %edi
+ addl %edi, %eax
+ movl %ebx, %edi
+ roll $7, %eax
+ addl %ebx, %eax
+ /* R0 5 */
+ xorl %ecx, %edi
+ andl %eax, %edi
+ leal 1200080426(%edx,%ebp,1),%edx
+ movl 24(%esi), %ebp
+ xorl %ecx, %edi
+ addl %edi, %edx
+ movl %eax, %edi
+ roll $12, %edx
+ addl %eax, %edx
+ /* R0 6 */
+ xorl %ebx, %edi
+ andl %edx, %edi
+ leal 2821735955(%ecx,%ebp,1),%ecx
+ movl 28(%esi), %ebp
+ xorl %ebx, %edi
+ addl %edi, %ecx
+ movl %edx, %edi
+ roll $17, %ecx
+ addl %edx, %ecx
+ /* R0 7 */
+ xorl %eax, %edi
+ andl %ecx, %edi
+ leal 4249261313(%ebx,%ebp,1),%ebx
+ movl 32(%esi), %ebp
+ xorl %eax, %edi
+ addl %edi, %ebx
+ movl %ecx, %edi
+ roll $22, %ebx
+ addl %ecx, %ebx
+ /* R0 8 */
+ xorl %edx, %edi
+ andl %ebx, %edi
+ leal 1770035416(%eax,%ebp,1),%eax
+ movl 36(%esi), %ebp
+ xorl %edx, %edi
+ addl %edi, %eax
+ movl %ebx, %edi
+ roll $7, %eax
+ addl %ebx, %eax
+ /* R0 9 */
+ xorl %ecx, %edi
+ andl %eax, %edi
+ leal 2336552879(%edx,%ebp,1),%edx
+ movl 40(%esi), %ebp
+ xorl %ecx, %edi
+ addl %edi, %edx
+ movl %eax, %edi
+ roll $12, %edx
+ addl %eax, %edx
+ /* R0 10 */
+ xorl %ebx, %edi
+ andl %edx, %edi
+ leal 4294925233(%ecx,%ebp,1),%ecx
+ movl 44(%esi), %ebp
+ xorl %ebx, %edi
+ addl %edi, %ecx
+ movl %edx, %edi
+ roll $17, %ecx
+ addl %edx, %ecx
+ /* R0 11 */
+ xorl %eax, %edi
+ andl %ecx, %edi
+ leal 2304563134(%ebx,%ebp,1),%ebx
+ movl 48(%esi), %ebp
+ xorl %eax, %edi
+ addl %edi, %ebx
+ movl %ecx, %edi
+ roll $22, %ebx
+ addl %ecx, %ebx
+ /* R0 12 */
+ xorl %edx, %edi
+ andl %ebx, %edi
+ leal 1804603682(%eax,%ebp,1),%eax
+ movl 52(%esi), %ebp
+ xorl %edx, %edi
+ addl %edi, %eax
+ movl %ebx, %edi
+ roll $7, %eax
+ addl %ebx, %eax
+ /* R0 13 */
+ xorl %ecx, %edi
+ andl %eax, %edi
+ leal 4254626195(%edx,%ebp,1),%edx
+ movl 56(%esi), %ebp
+ xorl %ecx, %edi
+ addl %edi, %edx
+ movl %eax, %edi
+ roll $12, %edx
+ addl %eax, %edx
+ /* R0 14 */
+ xorl %ebx, %edi
+ andl %edx, %edi
+ leal 2792965006(%ecx,%ebp,1),%ecx
+ movl 60(%esi), %ebp
+ xorl %ebx, %edi
+ addl %edi, %ecx
+ movl %edx, %edi
+ roll $17, %ecx
+ addl %edx, %ecx
+ /* R0 15 */
+ xorl %eax, %edi
+ andl %ecx, %edi
+ leal 1236535329(%ebx,%ebp,1),%ebx
+ movl 4(%esi), %ebp
+ xorl %eax, %edi
+ addl %edi, %ebx
+ movl %ecx, %edi
+ roll $22, %ebx
+ addl %ecx, %ebx
+
+ /* R1 section */
+ /* R1 16 */
+ leal 4129170786(%eax,%ebp,1),%eax
+ xorl %ebx, %edi
+ andl %edx, %edi
+ movl 24(%esi), %ebp
+ xorl %ecx, %edi
+ addl %edi, %eax
+ movl %ebx, %edi
+ roll $5, %eax
+ addl %ebx, %eax
+ /* R1 17 */
+ leal 3225465664(%edx,%ebp,1),%edx
+ xorl %eax, %edi
+ andl %ecx, %edi
+ movl 44(%esi), %ebp
+ xorl %ebx, %edi
+ addl %edi, %edx
+ movl %eax, %edi
+ roll $9, %edx
+ addl %eax, %edx
+ /* R1 18 */
+ leal 643717713(%ecx,%ebp,1),%ecx
+ xorl %edx, %edi
+ andl %ebx, %edi
+ movl (%esi), %ebp
+ xorl %eax, %edi
+ addl %edi, %ecx
+ movl %edx, %edi
+ roll $14, %ecx
+ addl %edx, %ecx
+ /* R1 19 */
+ leal 3921069994(%ebx,%ebp,1),%ebx
+ xorl %ecx, %edi
+ andl %eax, %edi
+ movl 20(%esi), %ebp
+ xorl %edx, %edi
+ addl %edi, %ebx
+ movl %ecx, %edi
+ roll $20, %ebx
+ addl %ecx, %ebx
+ /* R1 20 */
+ leal 3593408605(%eax,%ebp,1),%eax
+ xorl %ebx, %edi
+ andl %edx, %edi
+ movl 40(%esi), %ebp
+ xorl %ecx, %edi
+ addl %edi, %eax
+ movl %ebx, %edi
+ roll $5, %eax
+ addl %ebx, %eax
+ /* R1 21 */
+ leal 38016083(%edx,%ebp,1),%edx
+ xorl %eax, %edi
+ andl %ecx, %edi
+ movl 60(%esi), %ebp
+ xorl %ebx, %edi
+ addl %edi, %edx
+ movl %eax, %edi
+ roll $9, %edx
+ addl %eax, %edx
+ /* R1 22 */
+ leal 3634488961(%ecx,%ebp,1),%ecx
+ xorl %edx, %edi
+ andl %ebx, %edi
+ movl 16(%esi), %ebp
+ xorl %eax, %edi
+ addl %edi, %ecx
+ movl %edx, %edi
+ roll $14, %ecx
+ addl %edx, %ecx
+ /* R1 23 */
+ leal 3889429448(%ebx,%ebp,1),%ebx
+ xorl %ecx, %edi
+ andl %eax, %edi
+ movl 36(%esi), %ebp
+ xorl %edx, %edi
+ addl %edi, %ebx
+ movl %ecx, %edi
+ roll $20, %ebx
+ addl %ecx, %ebx
+ /* R1 24 */
+ leal 568446438(%eax,%ebp,1),%eax
+ xorl %ebx, %edi
+ andl %edx, %edi
+ movl 56(%esi), %ebp
+ xorl %ecx, %edi
+ addl %edi, %eax
+ movl %ebx, %edi
+ roll $5, %eax
+ addl %ebx, %eax
+ /* R1 25 */
+ leal 3275163606(%edx,%ebp,1),%edx
+ xorl %eax, %edi
+ andl %ecx, %edi
+ movl 12(%esi), %ebp
+ xorl %ebx, %edi
+ addl %edi, %edx
+ movl %eax, %edi
+ roll $9, %edx
+ addl %eax, %edx
+ /* R1 26 */
+ leal 4107603335(%ecx,%ebp,1),%ecx
+ xorl %edx, %edi
+ andl %ebx, %edi
+ movl 32(%esi), %ebp
+ xorl %eax, %edi
+ addl %edi, %ecx
+ movl %edx, %edi
+ roll $14, %ecx
+ addl %edx, %ecx
+ /* R1 27 */
+ leal 1163531501(%ebx,%ebp,1),%ebx
+ xorl %ecx, %edi
+ andl %eax, %edi
+ movl 52(%esi), %ebp
+ xorl %edx, %edi
+ addl %edi, %ebx
+ movl %ecx, %edi
+ roll $20, %ebx
+ addl %ecx, %ebx
+ /* R1 28 */
+ leal 2850285829(%eax,%ebp,1),%eax
+ xorl %ebx, %edi
+ andl %edx, %edi
+ movl 8(%esi), %ebp
+ xorl %ecx, %edi
+ addl %edi, %eax
+ movl %ebx, %edi
+ roll $5, %eax
+ addl %ebx, %eax
+ /* R1 29 */
+ leal 4243563512(%edx,%ebp,1),%edx
+ xorl %eax, %edi
+ andl %ecx, %edi
+ movl 28(%esi), %ebp
+ xorl %ebx, %edi
+ addl %edi, %edx
+ movl %eax, %edi
+ roll $9, %edx
+ addl %eax, %edx
+ /* R1 30 */
+ leal 1735328473(%ecx,%ebp,1),%ecx
+ xorl %edx, %edi
+ andl %ebx, %edi
+ movl 48(%esi), %ebp
+ xorl %eax, %edi
+ addl %edi, %ecx
+ movl %edx, %edi
+ roll $14, %ecx
+ addl %edx, %ecx
+ /* R1 31 */
+ leal 2368359562(%ebx,%ebp,1),%ebx
+ xorl %ecx, %edi
+ andl %eax, %edi
+ movl 20(%esi), %ebp
+ xorl %edx, %edi
+ addl %edi, %ebx
+ movl %ecx, %edi
+ roll $20, %ebx
+ addl %ecx, %ebx
+
+ /* R2 section */
+ /* R2 32 */
+ xorl %edx, %edi
+ xorl %ebx, %edi
+ leal 4294588738(%eax,%ebp,1),%eax
+ addl %edi, %eax
+ movl 32(%esi), %ebp
+ roll $4, %eax
+ movl %ebx, %edi
+ /* R2 33 */
+ leal 2272392833(%edx,%ebp,1),%edx
+ addl %ebx, %eax
+ xorl %ecx, %edi
+ xorl %eax, %edi
+ movl 44(%esi), %ebp
+ addl %edi, %edx
+ movl %eax, %edi
+ roll $11, %edx
+ addl %eax, %edx
+ /* R2 34 */
+ xorl %ebx, %edi
+ xorl %edx, %edi
+ leal 1839030562(%ecx,%ebp,1),%ecx
+ addl %edi, %ecx
+ movl 56(%esi), %ebp
+ roll $16, %ecx
+ movl %edx, %edi
+ /* R2 35 */
+ leal 4259657740(%ebx,%ebp,1),%ebx
+ addl %edx, %ecx
+ xorl %eax, %edi
+ xorl %ecx, %edi
+ movl 4(%esi), %ebp
+ addl %edi, %ebx
+ movl %ecx, %edi
+ roll $23, %ebx
+ addl %ecx, %ebx
+ /* R2 36 */
+ xorl %edx, %edi
+ xorl %ebx, %edi
+ leal 2763975236(%eax,%ebp,1),%eax
+ addl %edi, %eax
+ movl 16(%esi), %ebp
+ roll $4, %eax
+ movl %ebx, %edi
+ /* R2 37 */
+ leal 1272893353(%edx,%ebp,1),%edx
+ addl %ebx, %eax
+ xorl %ecx, %edi
+ xorl %eax, %edi
+ movl 28(%esi), %ebp
+ addl %edi, %edx
+ movl %eax, %edi
+ roll $11, %edx
+ addl %eax, %edx
+ /* R2 38 */
+ xorl %ebx, %edi
+ xorl %edx, %edi
+ leal 4139469664(%ecx,%ebp,1),%ecx
+ addl %edi, %ecx
+ movl 40(%esi), %ebp
+ roll $16, %ecx
+ movl %edx, %edi
+ /* R2 39 */
+ leal 3200236656(%ebx,%ebp,1),%ebx
+ addl %edx, %ecx
+ xorl %eax, %edi
+ xorl %ecx, %edi
+ movl 52(%esi), %ebp
+ addl %edi, %ebx
+ movl %ecx, %edi
+ roll $23, %ebx
+ addl %ecx, %ebx
+ /* R2 40 */
+ xorl %edx, %edi
+ xorl %ebx, %edi
+ leal 681279174(%eax,%ebp,1),%eax
+ addl %edi, %eax
+ movl (%esi), %ebp
+ roll $4, %eax
+ movl %ebx, %edi
+ /* R2 41 */
+ leal 3936430074(%edx,%ebp,1),%edx
+ addl %ebx, %eax
+ xorl %ecx, %edi
+ xorl %eax, %edi
+ movl 12(%esi), %ebp
+ addl %edi, %edx
+ movl %eax, %edi
+ roll $11, %edx
+ addl %eax, %edx
+ /* R2 42 */
+ xorl %ebx, %edi
+ xorl %edx, %edi
+ leal 3572445317(%ecx,%ebp,1),%ecx
+ addl %edi, %ecx
+ movl 24(%esi), %ebp
+ roll $16, %ecx
+ movl %edx, %edi
+ /* R2 43 */
+ leal 76029189(%ebx,%ebp,1),%ebx
+ addl %edx, %ecx
+ xorl %eax, %edi
+ xorl %ecx, %edi
+ movl 36(%esi), %ebp
+ addl %edi, %ebx
+ movl %ecx, %edi
+ roll $23, %ebx
+ addl %ecx, %ebx
+ /* R2 44 */
+ xorl %edx, %edi
+ xorl %ebx, %edi
+ leal 3654602809(%eax,%ebp,1),%eax
+ addl %edi, %eax
+ movl 48(%esi), %ebp
+ roll $4, %eax
+ movl %ebx, %edi
+ /* R2 45 */
+ leal 3873151461(%edx,%ebp,1),%edx
+ addl %ebx, %eax
+ xorl %ecx, %edi
+ xorl %eax, %edi
+ movl 60(%esi), %ebp
+ addl %edi, %edx
+ movl %eax, %edi
+ roll $11, %edx
+ addl %eax, %edx
+ /* R2 46 */
+ xorl %ebx, %edi
+ xorl %edx, %edi
+ leal 530742520(%ecx,%ebp,1),%ecx
+ addl %edi, %ecx
+ movl 8(%esi), %ebp
+ roll $16, %ecx
+ movl %edx, %edi
+ /* R2 47 */
+ leal 3299628645(%ebx,%ebp,1),%ebx
+ addl %edx, %ecx
+ xorl %eax, %edi
+ xorl %ecx, %edi
+ movl (%esi), %ebp
+ addl %edi, %ebx
+ movl $-1, %edi
+ roll $23, %ebx
+ addl %ecx, %ebx
+
+ /* R3 section */
+ /* R3 48 */
+ xorl %edx, %edi
+ orl %ebx, %edi
+ leal 4096336452(%eax,%ebp,1),%eax
+ xorl %ecx, %edi
+ movl 28(%esi), %ebp
+ addl %edi, %eax
+ movl $-1, %edi
+ roll $6, %eax
+ xorl %ecx, %edi
+ addl %ebx, %eax
+ /* R3 49 */
+ orl %eax, %edi
+ leal 1126891415(%edx,%ebp,1),%edx
+ xorl %ebx, %edi
+ movl 56(%esi), %ebp
+ addl %edi, %edx
+ movl $-1, %edi
+ roll $10, %edx
+ xorl %ebx, %edi
+ addl %eax, %edx
+ /* R3 50 */
+ orl %edx, %edi
+ leal 2878612391(%ecx,%ebp,1),%ecx
+ xorl %eax, %edi
+ movl 20(%esi), %ebp
+ addl %edi, %ecx
+ movl $-1, %edi
+ roll $15, %ecx
+ xorl %eax, %edi
+ addl %edx, %ecx
+ /* R3 51 */
+ orl %ecx, %edi
+ leal 4237533241(%ebx,%ebp,1),%ebx
+ xorl %edx, %edi
+ movl 48(%esi), %ebp
+ addl %edi, %ebx
+ movl $-1, %edi
+ roll $21, %ebx
+ xorl %edx, %edi
+ addl %ecx, %ebx
+ /* R3 52 */
+ orl %ebx, %edi
+ leal 1700485571(%eax,%ebp,1),%eax
+ xorl %ecx, %edi
+ movl 12(%esi), %ebp
+ addl %edi, %eax
+ movl $-1, %edi
+ roll $6, %eax
+ xorl %ecx, %edi
+ addl %ebx, %eax
+ /* R3 53 */
+ orl %eax, %edi
+ leal 2399980690(%edx,%ebp,1),%edx
+ xorl %ebx, %edi
+ movl 40(%esi), %ebp
+ addl %edi, %edx
+ movl $-1, %edi
+ roll $10, %edx
+ xorl %ebx, %edi
+ addl %eax, %edx
+ /* R3 54 */
+ orl %edx, %edi
+ leal 4293915773(%ecx,%ebp,1),%ecx
+ xorl %eax, %edi
+ movl 4(%esi), %ebp
+ addl %edi, %ecx
+ movl $-1, %edi
+ roll $15, %ecx
+ xorl %eax, %edi
+ addl %edx, %ecx
+ /* R3 55 */
+ orl %ecx, %edi
+ leal 2240044497(%ebx,%ebp,1),%ebx
+ xorl %edx, %edi
+ movl 32(%esi), %ebp
+ addl %edi, %ebx
+ movl $-1, %edi
+ roll $21, %ebx
+ xorl %edx, %edi
+ addl %ecx, %ebx
+ /* R3 56 */
+ orl %ebx, %edi
+ leal 1873313359(%eax,%ebp,1),%eax
+ xorl %ecx, %edi
+ movl 60(%esi), %ebp
+ addl %edi, %eax
+ movl $-1, %edi
+ roll $6, %eax
+ xorl %ecx, %edi
+ addl %ebx, %eax
+ /* R3 57 */
+ orl %eax, %edi
+ leal 4264355552(%edx,%ebp,1),%edx
+ xorl %ebx, %edi
+ movl 24(%esi), %ebp
+ addl %edi, %edx
+ movl $-1, %edi
+ roll $10, %edx
+ xorl %ebx, %edi
+ addl %eax, %edx
+ /* R3 58 */
+ orl %edx, %edi
+ leal 2734768916(%ecx,%ebp,1),%ecx
+ xorl %eax, %edi
+ movl 52(%esi), %ebp
+ addl %edi, %ecx
+ movl $-1, %edi
+ roll $15, %ecx
+ xorl %eax, %edi
+ addl %edx, %ecx
+ /* R3 59 */
+ orl %ecx, %edi
+ leal 1309151649(%ebx,%ebp,1),%ebx
+ xorl %edx, %edi
+ movl 16(%esi), %ebp
+ addl %edi, %ebx
+ movl $-1, %edi
+ roll $21, %ebx
+ xorl %edx, %edi
+ addl %ecx, %ebx
+ /* R3 60 */
+ orl %ebx, %edi
+ leal 4149444226(%eax,%ebp,1),%eax
+ xorl %ecx, %edi
+ movl 44(%esi), %ebp
+ addl %edi, %eax
+ movl $-1, %edi
+ roll $6, %eax
+ xorl %ecx, %edi
+ addl %ebx, %eax
+ /* R3 61 */
+ orl %eax, %edi
+ leal 3174756917(%edx,%ebp,1),%edx
+ xorl %ebx, %edi
+ movl 8(%esi), %ebp
+ addl %edi, %edx
+ movl $-1, %edi
+ roll $10, %edx
+ xorl %ebx, %edi
+ addl %eax, %edx
+ /* R3 62 */
+ orl %edx, %edi
+ leal 718787259(%ecx,%ebp,1),%ecx
+ xorl %eax, %edi
+ movl 36(%esi), %ebp
+ addl %edi, %ecx
+ movl $-1, %edi
+ roll $15, %ecx
+ xorl %eax, %edi
+ addl %edx, %ecx
+ /* R3 63 */
+ orl %ecx, %edi
+ leal 3951481745(%ebx,%ebp,1),%ebx
+ xorl %edx, %edi
+ movl 24(%esp), %ebp
+ addl %edi, %ebx
+ addl $64, %esi
+ roll $21, %ebx
+ movl (%ebp), %edi
+ addl %ecx, %ebx
+ addl %edi, %eax
+ movl 4(%ebp), %edi
+ addl %edi, %ebx
+ movl 8(%ebp), %edi
+ addl %edi, %ecx
+ movl 12(%ebp), %edi
+ addl %edi, %edx
+ movl %eax, (%ebp)
+ movl %ebx, 4(%ebp)
+ movl (%esp), %edi
+ movl %ecx, 8(%ebp)
+ movl %edx, 12(%ebp)
+ cmpl %esi, %edi
+ jge .L000start
+ popl %eax
+ popl %ebx
+ popl %ebp
+ popl %edi
+ popl %esi
+ ret
+.md5_block_x86_end:
+ SIZE(md5_block_x86,.md5_block_x86_end-md5_block_x86)
+.ident "desasm.pl"