summaryrefslogtreecommitdiff
path: root/src/lib/libcrypto/rc4/asm
diff options
context:
space:
mode:
authorcvs2svn <admin@example.com>1998-10-05 20:13:17 +0000
committercvs2svn <admin@example.com>1998-10-05 20:13:17 +0000
commite82f18fab47b698d93971f576f962a3068132912 (patch)
tree681519717892864935c3d0533cf171098afa649a /src/lib/libcrypto/rc4/asm
parent536c76cbb863bab152f19842ab88772c01e922c7 (diff)
downloadopenbsd-SSLeay_0_9_0b.tar.gz
openbsd-SSLeay_0_9_0b.tar.bz2
openbsd-SSLeay_0_9_0b.zip
This commit was manufactured by cvs2git to create tag 'SSLeay_0_9_0b'.SSLeay_0_9_0b
Diffstat (limited to 'src/lib/libcrypto/rc4/asm')
-rw-r--r--src/lib/libcrypto/rc4/asm/r4-win32.asm314
-rw-r--r--src/lib/libcrypto/rc4/asm/rx86unix.cpp358
2 files changed, 672 insertions, 0 deletions
diff --git a/src/lib/libcrypto/rc4/asm/r4-win32.asm b/src/lib/libcrypto/rc4/asm/r4-win32.asm
new file mode 100644
index 0000000000..70b0f7484c
--- /dev/null
+++ b/src/lib/libcrypto/rc4/asm/r4-win32.asm
@@ -0,0 +1,314 @@
1 ; Don't even think of reading this code
2 ; It was automatically generated by rc4-586.pl
3 ; Which is a perl program used to generate the x86 assember for
4 ; any of elf, a.out, BSDI,Win32, or Solaris
5 ; eric <eay@cryptsoft.com>
6 ;
7 TITLE rc4-586.asm
8 .386
9.model FLAT
10_TEXT SEGMENT
11PUBLIC _RC4
12
13_RC4 PROC NEAR
14 ;
15 push ebp
16 push ebx
17 mov ebp, DWORD PTR 12[esp]
18 mov ebx, DWORD PTR 16[esp]
19 push esi
20 push edi
21 mov ecx, DWORD PTR [ebp]
22 mov edx, DWORD PTR 4[ebp]
23 mov esi, DWORD PTR 28[esp]
24 inc ecx
25 sub esp, 12
26 add ebp, 8
27 and ecx, 255
28 lea ebx, DWORD PTR [esi+ebx-8]
29 mov edi, DWORD PTR 44[esp]
30 mov DWORD PTR 8[esp],ebx
31 mov eax, DWORD PTR [ecx*4+ebp]
32 cmp ebx, esi
33 jl $L000end
34L001start:
35 add esi, 8
36 ; Round 0
37 add edx, eax
38 and edx, 255
39 inc ecx
40 mov ebx, DWORD PTR [edx*4+ebp]
41 mov DWORD PTR [ecx*4+ebp-4],ebx
42 add ebx, eax
43 and ecx, 255
44 and ebx, 255
45 mov DWORD PTR [edx*4+ebp],eax
46 nop
47 mov ebx, DWORD PTR [ebx*4+ebp]
48 mov eax, DWORD PTR [ecx*4+ebp]
49 mov BYTE PTR [esp], bl
50 ; Round 1
51 add edx, eax
52 and edx, 255
53 inc ecx
54 mov ebx, DWORD PTR [edx*4+ebp]
55 mov DWORD PTR [ecx*4+ebp-4],ebx
56 add ebx, eax
57 and ecx, 255
58 and ebx, 255
59 mov DWORD PTR [edx*4+ebp],eax
60 nop
61 mov ebx, DWORD PTR [ebx*4+ebp]
62 mov eax, DWORD PTR [ecx*4+ebp]
63 mov BYTE PTR 1[esp],bl
64 ; Round 2
65 add edx, eax
66 and edx, 255
67 inc ecx
68 mov ebx, DWORD PTR [edx*4+ebp]
69 mov DWORD PTR [ecx*4+ebp-4],ebx
70 add ebx, eax
71 and ecx, 255
72 and ebx, 255
73 mov DWORD PTR [edx*4+ebp],eax
74 nop
75 mov ebx, DWORD PTR [ebx*4+ebp]
76 mov eax, DWORD PTR [ecx*4+ebp]
77 mov BYTE PTR 2[esp],bl
78 ; Round 3
79 add edx, eax
80 and edx, 255
81 inc ecx
82 mov ebx, DWORD PTR [edx*4+ebp]
83 mov DWORD PTR [ecx*4+ebp-4],ebx
84 add ebx, eax
85 and ecx, 255
86 and ebx, 255
87 mov DWORD PTR [edx*4+ebp],eax
88 nop
89 mov ebx, DWORD PTR [ebx*4+ebp]
90 mov eax, DWORD PTR [ecx*4+ebp]
91 mov BYTE PTR 3[esp],bl
92 ; Round 4
93 add edx, eax
94 and edx, 255
95 inc ecx
96 mov ebx, DWORD PTR [edx*4+ebp]
97 mov DWORD PTR [ecx*4+ebp-4],ebx
98 add ebx, eax
99 and ecx, 255
100 and ebx, 255
101 mov DWORD PTR [edx*4+ebp],eax
102 nop
103 mov ebx, DWORD PTR [ebx*4+ebp]
104 mov eax, DWORD PTR [ecx*4+ebp]
105 mov BYTE PTR 4[esp],bl
106 ; Round 5
107 add edx, eax
108 and edx, 255
109 inc ecx
110 mov ebx, DWORD PTR [edx*4+ebp]
111 mov DWORD PTR [ecx*4+ebp-4],ebx
112 add ebx, eax
113 and ecx, 255
114 and ebx, 255
115 mov DWORD PTR [edx*4+ebp],eax
116 nop
117 mov ebx, DWORD PTR [ebx*4+ebp]
118 mov eax, DWORD PTR [ecx*4+ebp]
119 mov BYTE PTR 5[esp],bl
120 ; Round 6
121 add edx, eax
122 and edx, 255
123 inc ecx
124 mov ebx, DWORD PTR [edx*4+ebp]
125 mov DWORD PTR [ecx*4+ebp-4],ebx
126 add ebx, eax
127 and ecx, 255
128 and ebx, 255
129 mov DWORD PTR [edx*4+ebp],eax
130 nop
131 mov ebx, DWORD PTR [ebx*4+ebp]
132 mov eax, DWORD PTR [ecx*4+ebp]
133 mov BYTE PTR 6[esp],bl
134 ; Round 7
135 add edx, eax
136 and edx, 255
137 inc ecx
138 mov ebx, DWORD PTR [edx*4+ebp]
139 mov DWORD PTR [ecx*4+ebp-4],ebx
140 add ebx, eax
141 and ecx, 255
142 and ebx, 255
143 mov DWORD PTR [edx*4+ebp],eax
144 nop
145 mov ebx, DWORD PTR [ebx*4+ebp]
146 add edi, 8
147 mov BYTE PTR 7[esp],bl
148 ; apply the cipher text
149 mov eax, DWORD PTR [esp]
150 mov ebx, DWORD PTR [esi-8]
151 xor eax, ebx
152 mov ebx, DWORD PTR [esi-4]
153 mov DWORD PTR [edi-8],eax
154 mov eax, DWORD PTR 4[esp]
155 xor eax, ebx
156 mov ebx, DWORD PTR 8[esp]
157 mov DWORD PTR [edi-4],eax
158 mov eax, DWORD PTR [ecx*4+ebp]
159 cmp esi, ebx
160 jle L001start
161$L000end:
162 ; Round 0
163 add ebx, 8
164 inc esi
165 cmp ebx, esi
166 jl $L002finished
167 mov DWORD PTR 8[esp],ebx
168 add edx, eax
169 and edx, 255
170 inc ecx
171 mov ebx, DWORD PTR [edx*4+ebp]
172 mov DWORD PTR [ecx*4+ebp-4],ebx
173 add ebx, eax
174 and ecx, 255
175 and ebx, 255
176 mov DWORD PTR [edx*4+ebp],eax
177 nop
178 mov ebx, DWORD PTR [ebx*4+ebp]
179 mov eax, DWORD PTR [ecx*4+ebp]
180 mov bh, BYTE PTR [esi-1]
181 xor bl, bh
182 mov BYTE PTR [edi], bl
183 ; Round 1
184 mov ebx, DWORD PTR 8[esp]
185 cmp ebx, esi
186 jle $L002finished
187 inc esi
188 add edx, eax
189 and edx, 255
190 inc ecx
191 mov ebx, DWORD PTR [edx*4+ebp]
192 mov DWORD PTR [ecx*4+ebp-4],ebx
193 add ebx, eax
194 and ecx, 255
195 and ebx, 255
196 mov DWORD PTR [edx*4+ebp],eax
197 nop
198 mov ebx, DWORD PTR [ebx*4+ebp]
199 mov eax, DWORD PTR [ecx*4+ebp]
200 mov bh, BYTE PTR [esi-1]
201 xor bl, bh
202 mov BYTE PTR 1[edi],bl
203 ; Round 2
204 mov ebx, DWORD PTR 8[esp]
205 cmp ebx, esi
206 jle $L002finished
207 inc esi
208 add edx, eax
209 and edx, 255
210 inc ecx
211 mov ebx, DWORD PTR [edx*4+ebp]
212 mov DWORD PTR [ecx*4+ebp-4],ebx
213 add ebx, eax
214 and ecx, 255
215 and ebx, 255
216 mov DWORD PTR [edx*4+ebp],eax
217 nop
218 mov ebx, DWORD PTR [ebx*4+ebp]
219 mov eax, DWORD PTR [ecx*4+ebp]
220 mov bh, BYTE PTR [esi-1]
221 xor bl, bh
222 mov BYTE PTR 2[edi],bl
223 ; Round 3
224 mov ebx, DWORD PTR 8[esp]
225 cmp ebx, esi
226 jle $L002finished
227 inc esi
228 add edx, eax
229 and edx, 255
230 inc ecx
231 mov ebx, DWORD PTR [edx*4+ebp]
232 mov DWORD PTR [ecx*4+ebp-4],ebx
233 add ebx, eax
234 and ecx, 255
235 and ebx, 255
236 mov DWORD PTR [edx*4+ebp],eax
237 nop
238 mov ebx, DWORD PTR [ebx*4+ebp]
239 mov eax, DWORD PTR [ecx*4+ebp]
240 mov bh, BYTE PTR [esi-1]
241 xor bl, bh
242 mov BYTE PTR 3[edi],bl
243 ; Round 4
244 mov ebx, DWORD PTR 8[esp]
245 cmp ebx, esi
246 jle $L002finished
247 inc esi
248 add edx, eax
249 and edx, 255
250 inc ecx
251 mov ebx, DWORD PTR [edx*4+ebp]
252 mov DWORD PTR [ecx*4+ebp-4],ebx
253 add ebx, eax
254 and ecx, 255
255 and ebx, 255
256 mov DWORD PTR [edx*4+ebp],eax
257 nop
258 mov ebx, DWORD PTR [ebx*4+ebp]
259 mov eax, DWORD PTR [ecx*4+ebp]
260 mov bh, BYTE PTR [esi-1]
261 xor bl, bh
262 mov BYTE PTR 4[edi],bl
263 ; Round 5
264 mov ebx, DWORD PTR 8[esp]
265 cmp ebx, esi
266 jle $L002finished
267 inc esi
268 add edx, eax
269 and edx, 255
270 inc ecx
271 mov ebx, DWORD PTR [edx*4+ebp]
272 mov DWORD PTR [ecx*4+ebp-4],ebx
273 add ebx, eax
274 and ecx, 255
275 and ebx, 255
276 mov DWORD PTR [edx*4+ebp],eax
277 nop
278 mov ebx, DWORD PTR [ebx*4+ebp]
279 mov eax, DWORD PTR [ecx*4+ebp]
280 mov bh, BYTE PTR [esi-1]
281 xor bl, bh
282 mov BYTE PTR 5[edi],bl
283 ; Round 6
284 mov ebx, DWORD PTR 8[esp]
285 cmp ebx, esi
286 jle $L002finished
287 inc esi
288 add edx, eax
289 and edx, 255
290 inc ecx
291 mov ebx, DWORD PTR [edx*4+ebp]
292 mov DWORD PTR [ecx*4+ebp-4],ebx
293 add ebx, eax
294 and ecx, 255
295 and ebx, 255
296 mov DWORD PTR [edx*4+ebp],eax
297 nop
298 mov ebx, DWORD PTR [ebx*4+ebp]
299 mov bh, BYTE PTR [esi-1]
300 xor bl, bh
301 mov BYTE PTR 6[edi],bl
302$L002finished:
303 dec ecx
304 add esp, 12
305 mov DWORD PTR [ebp-4],edx
306 mov BYTE PTR [ebp-8],cl
307 pop edi
308 pop esi
309 pop ebx
310 pop ebp
311 ret
312_RC4 ENDP
313_TEXT ENDS
314END
diff --git a/src/lib/libcrypto/rc4/asm/rx86unix.cpp b/src/lib/libcrypto/rc4/asm/rx86unix.cpp
new file mode 100644
index 0000000000..ec1d72a110
--- /dev/null
+++ b/src/lib/libcrypto/rc4/asm/rx86unix.cpp
@@ -0,0 +1,358 @@
1/* Run the C pre-processor over this file with one of the following defined
2 * ELF - elf object files,
3 * OUT - a.out object files,
4 * BSDI - BSDI style a.out object files
5 * SOL - Solaris style elf
6 */
7
8#define TYPE(a,b) .type a,b
9#define SIZE(a,b) .size a,b
10
11#if defined(OUT) || defined(BSDI)
12#define RC4 _RC4
13
14#endif
15
16#ifdef OUT
17#define OK 1
18#define ALIGN 4
19#endif
20
21#ifdef BSDI
22#define OK 1
23#define ALIGN 4
24#undef SIZE
25#undef TYPE
26#define SIZE(a,b)
27#define TYPE(a,b)
28#endif
29
30#if defined(ELF) || defined(SOL)
31#define OK 1
32#define ALIGN 16
33#endif
34
35#ifndef OK
36You need to define one of
37ELF - elf systems - linux-elf, NetBSD and DG-UX
38OUT - a.out systems - linux-a.out and FreeBSD
39SOL - solaris systems, which are elf with strange comment lines
40BSDI - a.out with a very primative version of as.
41#endif
42
43/* Let the Assembler begin :-) */
44 /* Don't even think of reading this code */
45 /* It was automatically generated by rc4-586.pl */
46 /* Which is a perl program used to generate the x86 assember for */
47 /* any of elf, a.out, BSDI,Win32, or Solaris */
48 /* eric <eay@cryptsoft.com> */
49
50 .file "rc4-586.s"
51 .version "01.01"
52gcc2_compiled.:
53.text
54 .align ALIGN
55.globl RC4
56 TYPE(RC4,@function)
57RC4:
58
59 pushl %ebp
60 pushl %ebx
61 movl 12(%esp), %ebp
62 movl 16(%esp), %ebx
63 pushl %esi
64 pushl %edi
65 movl (%ebp), %ecx
66 movl 4(%ebp), %edx
67 movl 28(%esp), %esi
68 incl %ecx
69 subl $12, %esp
70 addl $8, %ebp
71 andl $255, %ecx
72 leal -8(%ebx,%esi,), %ebx
73 movl 44(%esp), %edi
74 movl %ebx, 8(%esp)
75 movl (%ebp,%ecx,4), %eax
76 cmpl %esi, %ebx
77 jl .L000end
78.L001start:
79 addl $8, %esi
80 /* Round 0 */
81 addl %eax, %edx
82 andl $255, %edx
83 incl %ecx
84 movl (%ebp,%edx,4), %ebx
85 movl %ebx, -4(%ebp,%ecx,4)
86 addl %eax, %ebx
87 andl $255, %ecx
88 andl $255, %ebx
89 movl %eax, (%ebp,%edx,4)
90 nop
91 movl (%ebp,%ebx,4), %ebx
92 movl (%ebp,%ecx,4), %eax
93 movb %bl, (%esp)
94 /* Round 1 */
95 addl %eax, %edx
96 andl $255, %edx
97 incl %ecx
98 movl (%ebp,%edx,4), %ebx
99 movl %ebx, -4(%ebp,%ecx,4)
100 addl %eax, %ebx
101 andl $255, %ecx
102 andl $255, %ebx
103 movl %eax, (%ebp,%edx,4)
104 nop
105 movl (%ebp,%ebx,4), %ebx
106 movl (%ebp,%ecx,4), %eax
107 movb %bl, 1(%esp)
108 /* Round 2 */
109 addl %eax, %edx
110 andl $255, %edx
111 incl %ecx
112 movl (%ebp,%edx,4), %ebx
113 movl %ebx, -4(%ebp,%ecx,4)
114 addl %eax, %ebx
115 andl $255, %ecx
116 andl $255, %ebx
117 movl %eax, (%ebp,%edx,4)
118 nop
119 movl (%ebp,%ebx,4), %ebx
120 movl (%ebp,%ecx,4), %eax
121 movb %bl, 2(%esp)
122 /* Round 3 */
123 addl %eax, %edx
124 andl $255, %edx
125 incl %ecx
126 movl (%ebp,%edx,4), %ebx
127 movl %ebx, -4(%ebp,%ecx,4)
128 addl %eax, %ebx
129 andl $255, %ecx
130 andl $255, %ebx
131 movl %eax, (%ebp,%edx,4)
132 nop
133 movl (%ebp,%ebx,4), %ebx
134 movl (%ebp,%ecx,4), %eax
135 movb %bl, 3(%esp)
136 /* Round 4 */
137 addl %eax, %edx
138 andl $255, %edx
139 incl %ecx
140 movl (%ebp,%edx,4), %ebx
141 movl %ebx, -4(%ebp,%ecx,4)
142 addl %eax, %ebx
143 andl $255, %ecx
144 andl $255, %ebx
145 movl %eax, (%ebp,%edx,4)
146 nop
147 movl (%ebp,%ebx,4), %ebx
148 movl (%ebp,%ecx,4), %eax
149 movb %bl, 4(%esp)
150 /* Round 5 */
151 addl %eax, %edx
152 andl $255, %edx
153 incl %ecx
154 movl (%ebp,%edx,4), %ebx
155 movl %ebx, -4(%ebp,%ecx,4)
156 addl %eax, %ebx
157 andl $255, %ecx
158 andl $255, %ebx
159 movl %eax, (%ebp,%edx,4)
160 nop
161 movl (%ebp,%ebx,4), %ebx
162 movl (%ebp,%ecx,4), %eax
163 movb %bl, 5(%esp)
164 /* Round 6 */
165 addl %eax, %edx
166 andl $255, %edx
167 incl %ecx
168 movl (%ebp,%edx,4), %ebx
169 movl %ebx, -4(%ebp,%ecx,4)
170 addl %eax, %ebx
171 andl $255, %ecx
172 andl $255, %ebx
173 movl %eax, (%ebp,%edx,4)
174 nop
175 movl (%ebp,%ebx,4), %ebx
176 movl (%ebp,%ecx,4), %eax
177 movb %bl, 6(%esp)
178 /* Round 7 */
179 addl %eax, %edx
180 andl $255, %edx
181 incl %ecx
182 movl (%ebp,%edx,4), %ebx
183 movl %ebx, -4(%ebp,%ecx,4)
184 addl %eax, %ebx
185 andl $255, %ecx
186 andl $255, %ebx
187 movl %eax, (%ebp,%edx,4)
188 nop
189 movl (%ebp,%ebx,4), %ebx
190 addl $8, %edi
191 movb %bl, 7(%esp)
192 /* apply the cipher text */
193 movl (%esp), %eax
194 movl -8(%esi), %ebx
195 xorl %ebx, %eax
196 movl -4(%esi), %ebx
197 movl %eax, -8(%edi)
198 movl 4(%esp), %eax
199 xorl %ebx, %eax
200 movl 8(%esp), %ebx
201 movl %eax, -4(%edi)
202 movl (%ebp,%ecx,4), %eax
203 cmpl %ebx, %esi
204 jle .L001start
205.L000end:
206 /* Round 0 */
207 addl $8, %ebx
208 incl %esi
209 cmpl %esi, %ebx
210 jl .L002finished
211 movl %ebx, 8(%esp)
212 addl %eax, %edx
213 andl $255, %edx
214 incl %ecx
215 movl (%ebp,%edx,4), %ebx
216 movl %ebx, -4(%ebp,%ecx,4)
217 addl %eax, %ebx
218 andl $255, %ecx
219 andl $255, %ebx
220 movl %eax, (%ebp,%edx,4)
221 nop
222 movl (%ebp,%ebx,4), %ebx
223 movl (%ebp,%ecx,4), %eax
224 movb -1(%esi), %bh
225 xorb %bh, %bl
226 movb %bl, (%edi)
227 /* Round 1 */
228 movl 8(%esp), %ebx
229 cmpl %esi, %ebx
230 jle .L002finished
231 incl %esi
232 addl %eax, %edx
233 andl $255, %edx
234 incl %ecx
235 movl (%ebp,%edx,4), %ebx
236 movl %ebx, -4(%ebp,%ecx,4)
237 addl %eax, %ebx
238 andl $255, %ecx
239 andl $255, %ebx
240 movl %eax, (%ebp,%edx,4)
241 nop
242 movl (%ebp,%ebx,4), %ebx
243 movl (%ebp,%ecx,4), %eax
244 movb -1(%esi), %bh
245 xorb %bh, %bl
246 movb %bl, 1(%edi)
247 /* Round 2 */
248 movl 8(%esp), %ebx
249 cmpl %esi, %ebx
250 jle .L002finished
251 incl %esi
252 addl %eax, %edx
253 andl $255, %edx
254 incl %ecx
255 movl (%ebp,%edx,4), %ebx
256 movl %ebx, -4(%ebp,%ecx,4)
257 addl %eax, %ebx
258 andl $255, %ecx
259 andl $255, %ebx
260 movl %eax, (%ebp,%edx,4)
261 nop
262 movl (%ebp,%ebx,4), %ebx
263 movl (%ebp,%ecx,4), %eax
264 movb -1(%esi), %bh
265 xorb %bh, %bl
266 movb %bl, 2(%edi)
267 /* Round 3 */
268 movl 8(%esp), %ebx
269 cmpl %esi, %ebx
270 jle .L002finished
271 incl %esi
272 addl %eax, %edx
273 andl $255, %edx
274 incl %ecx
275 movl (%ebp,%edx,4), %ebx
276 movl %ebx, -4(%ebp,%ecx,4)
277 addl %eax, %ebx
278 andl $255, %ecx
279 andl $255, %ebx
280 movl %eax, (%ebp,%edx,4)
281 nop
282 movl (%ebp,%ebx,4), %ebx
283 movl (%ebp,%ecx,4), %eax
284 movb -1(%esi), %bh
285 xorb %bh, %bl
286 movb %bl, 3(%edi)
287 /* Round 4 */
288 movl 8(%esp), %ebx
289 cmpl %esi, %ebx
290 jle .L002finished
291 incl %esi
292 addl %eax, %edx
293 andl $255, %edx
294 incl %ecx
295 movl (%ebp,%edx,4), %ebx
296 movl %ebx, -4(%ebp,%ecx,4)
297 addl %eax, %ebx
298 andl $255, %ecx
299 andl $255, %ebx
300 movl %eax, (%ebp,%edx,4)
301 nop
302 movl (%ebp,%ebx,4), %ebx
303 movl (%ebp,%ecx,4), %eax
304 movb -1(%esi), %bh
305 xorb %bh, %bl
306 movb %bl, 4(%edi)
307 /* Round 5 */
308 movl 8(%esp), %ebx
309 cmpl %esi, %ebx
310 jle .L002finished
311 incl %esi
312 addl %eax, %edx
313 andl $255, %edx
314 incl %ecx
315 movl (%ebp,%edx,4), %ebx
316 movl %ebx, -4(%ebp,%ecx,4)
317 addl %eax, %ebx
318 andl $255, %ecx
319 andl $255, %ebx
320 movl %eax, (%ebp,%edx,4)
321 nop
322 movl (%ebp,%ebx,4), %ebx
323 movl (%ebp,%ecx,4), %eax
324 movb -1(%esi), %bh
325 xorb %bh, %bl
326 movb %bl, 5(%edi)
327 /* Round 6 */
328 movl 8(%esp), %ebx
329 cmpl %esi, %ebx
330 jle .L002finished
331 incl %esi
332 addl %eax, %edx
333 andl $255, %edx
334 incl %ecx
335 movl (%ebp,%edx,4), %ebx
336 movl %ebx, -4(%ebp,%ecx,4)
337 addl %eax, %ebx
338 andl $255, %ecx
339 andl $255, %ebx
340 movl %eax, (%ebp,%edx,4)
341 nop
342 movl (%ebp,%ebx,4), %ebx
343 movb -1(%esi), %bh
344 xorb %bh, %bl
345 movb %bl, 6(%edi)
346.L002finished:
347 decl %ecx
348 addl $12, %esp
349 movl %edx, -4(%ebp)
350 movb %cl, -8(%ebp)
351 popl %edi
352 popl %esi
353 popl %ebx
354 popl %ebp
355 ret
356.RC4_end:
357 SIZE(RC4,.RC4_end-RC4)
358.ident "RC4"