summaryrefslogtreecommitdiff
path: root/src/lib/libcrypto/rc4/asm/rx86unix.cpp
diff options
context:
space:
mode:
authorcvs2svn <admin@example.com>1998-10-05 20:13:17 +0000
committercvs2svn <admin@example.com>1998-10-05 20:13:17 +0000
commite82f18fab47b698d93971f576f962a3068132912 (patch)
tree681519717892864935c3d0533cf171098afa649a /src/lib/libcrypto/rc4/asm/rx86unix.cpp
parent536c76cbb863bab152f19842ab88772c01e922c7 (diff)
downloadopenbsd-SSLeay_0_9_0b.tar.gz
openbsd-SSLeay_0_9_0b.tar.bz2
openbsd-SSLeay_0_9_0b.zip
This commit was manufactured by cvs2git to create tag 'SSLeay_0_9_0b'.SSLeay_0_9_0b
Diffstat (limited to 'src/lib/libcrypto/rc4/asm/rx86unix.cpp')
-rw-r--r--src/lib/libcrypto/rc4/asm/rx86unix.cpp358
1 files changed, 358 insertions, 0 deletions
diff --git a/src/lib/libcrypto/rc4/asm/rx86unix.cpp b/src/lib/libcrypto/rc4/asm/rx86unix.cpp
new file mode 100644
index 0000000000..ec1d72a110
--- /dev/null
+++ b/src/lib/libcrypto/rc4/asm/rx86unix.cpp
@@ -0,0 +1,358 @@
1/* Run the C pre-processor over this file with one of the following defined
2 * ELF - elf object files,
3 * OUT - a.out object files,
4 * BSDI - BSDI style a.out object files
5 * SOL - Solaris style elf
6 */
7
8#define TYPE(a,b) .type a,b
9#define SIZE(a,b) .size a,b
10
11#if defined(OUT) || defined(BSDI)
12#define RC4 _RC4
13
14#endif
15
16#ifdef OUT
17#define OK 1
18#define ALIGN 4
19#endif
20
21#ifdef BSDI
22#define OK 1
23#define ALIGN 4
24#undef SIZE
25#undef TYPE
26#define SIZE(a,b)
27#define TYPE(a,b)
28#endif
29
30#if defined(ELF) || defined(SOL)
31#define OK 1
32#define ALIGN 16
33#endif
34
35#ifndef OK
36You need to define one of
37ELF - elf systems - linux-elf, NetBSD and DG-UX
38OUT - a.out systems - linux-a.out and FreeBSD
39SOL - solaris systems, which are elf with strange comment lines
40BSDI - a.out with a very primative version of as.
41#endif
42
43/* Let the Assembler begin :-) */
44 /* Don't even think of reading this code */
45 /* It was automatically generated by rc4-586.pl */
46 /* Which is a perl program used to generate the x86 assember for */
47 /* any of elf, a.out, BSDI,Win32, or Solaris */
48 /* eric <eay@cryptsoft.com> */
49
50 .file "rc4-586.s"
51 .version "01.01"
52gcc2_compiled.:
53.text
54 .align ALIGN
55.globl RC4
56 TYPE(RC4,@function)
57RC4:
58
59 pushl %ebp
60 pushl %ebx
61 movl 12(%esp), %ebp
62 movl 16(%esp), %ebx
63 pushl %esi
64 pushl %edi
65 movl (%ebp), %ecx
66 movl 4(%ebp), %edx
67 movl 28(%esp), %esi
68 incl %ecx
69 subl $12, %esp
70 addl $8, %ebp
71 andl $255, %ecx
72 leal -8(%ebx,%esi,), %ebx
73 movl 44(%esp), %edi
74 movl %ebx, 8(%esp)
75 movl (%ebp,%ecx,4), %eax
76 cmpl %esi, %ebx
77 jl .L000end
78.L001start:
79 addl $8, %esi
80 /* Round 0 */
81 addl %eax, %edx
82 andl $255, %edx
83 incl %ecx
84 movl (%ebp,%edx,4), %ebx
85 movl %ebx, -4(%ebp,%ecx,4)
86 addl %eax, %ebx
87 andl $255, %ecx
88 andl $255, %ebx
89 movl %eax, (%ebp,%edx,4)
90 nop
91 movl (%ebp,%ebx,4), %ebx
92 movl (%ebp,%ecx,4), %eax
93 movb %bl, (%esp)
94 /* Round 1 */
95 addl %eax, %edx
96 andl $255, %edx
97 incl %ecx
98 movl (%ebp,%edx,4), %ebx
99 movl %ebx, -4(%ebp,%ecx,4)
100 addl %eax, %ebx
101 andl $255, %ecx
102 andl $255, %ebx
103 movl %eax, (%ebp,%edx,4)
104 nop
105 movl (%ebp,%ebx,4), %ebx
106 movl (%ebp,%ecx,4), %eax
107 movb %bl, 1(%esp)
108 /* Round 2 */
109 addl %eax, %edx
110 andl $255, %edx
111 incl %ecx
112 movl (%ebp,%edx,4), %ebx
113 movl %ebx, -4(%ebp,%ecx,4)
114 addl %eax, %ebx
115 andl $255, %ecx
116 andl $255, %ebx
117 movl %eax, (%ebp,%edx,4)
118 nop
119 movl (%ebp,%ebx,4), %ebx
120 movl (%ebp,%ecx,4), %eax
121 movb %bl, 2(%esp)
122 /* Round 3 */
123 addl %eax, %edx
124 andl $255, %edx
125 incl %ecx
126 movl (%ebp,%edx,4), %ebx
127 movl %ebx, -4(%ebp,%ecx,4)
128 addl %eax, %ebx
129 andl $255, %ecx
130 andl $255, %ebx
131 movl %eax, (%ebp,%edx,4)
132 nop
133 movl (%ebp,%ebx,4), %ebx
134 movl (%ebp,%ecx,4), %eax
135 movb %bl, 3(%esp)
136 /* Round 4 */
137 addl %eax, %edx
138 andl $255, %edx
139 incl %ecx
140 movl (%ebp,%edx,4), %ebx
141 movl %ebx, -4(%ebp,%ecx,4)
142 addl %eax, %ebx
143 andl $255, %ecx
144 andl $255, %ebx
145 movl %eax, (%ebp,%edx,4)
146 nop
147 movl (%ebp,%ebx,4), %ebx
148 movl (%ebp,%ecx,4), %eax
149 movb %bl, 4(%esp)
150 /* Round 5 */
151 addl %eax, %edx
152 andl $255, %edx
153 incl %ecx
154 movl (%ebp,%edx,4), %ebx
155 movl %ebx, -4(%ebp,%ecx,4)
156 addl %eax, %ebx
157 andl $255, %ecx
158 andl $255, %ebx
159 movl %eax, (%ebp,%edx,4)
160 nop
161 movl (%ebp,%ebx,4), %ebx
162 movl (%ebp,%ecx,4), %eax
163 movb %bl, 5(%esp)
164 /* Round 6 */
165 addl %eax, %edx
166 andl $255, %edx
167 incl %ecx
168 movl (%ebp,%edx,4), %ebx
169 movl %ebx, -4(%ebp,%ecx,4)
170 addl %eax, %ebx
171 andl $255, %ecx
172 andl $255, %ebx
173 movl %eax, (%ebp,%edx,4)
174 nop
175 movl (%ebp,%ebx,4), %ebx
176 movl (%ebp,%ecx,4), %eax
177 movb %bl, 6(%esp)
178 /* Round 7 */
179 addl %eax, %edx
180 andl $255, %edx
181 incl %ecx
182 movl (%ebp,%edx,4), %ebx
183 movl %ebx, -4(%ebp,%ecx,4)
184 addl %eax, %ebx
185 andl $255, %ecx
186 andl $255, %ebx
187 movl %eax, (%ebp,%edx,4)
188 nop
189 movl (%ebp,%ebx,4), %ebx
190 addl $8, %edi
191 movb %bl, 7(%esp)
192 /* apply the cipher text */
193 movl (%esp), %eax
194 movl -8(%esi), %ebx
195 xorl %ebx, %eax
196 movl -4(%esi), %ebx
197 movl %eax, -8(%edi)
198 movl 4(%esp), %eax
199 xorl %ebx, %eax
200 movl 8(%esp), %ebx
201 movl %eax, -4(%edi)
202 movl (%ebp,%ecx,4), %eax
203 cmpl %ebx, %esi
204 jle .L001start
205.L000end:
206 /* Round 0 */
207 addl $8, %ebx
208 incl %esi
209 cmpl %esi, %ebx
210 jl .L002finished
211 movl %ebx, 8(%esp)
212 addl %eax, %edx
213 andl $255, %edx
214 incl %ecx
215 movl (%ebp,%edx,4), %ebx
216 movl %ebx, -4(%ebp,%ecx,4)
217 addl %eax, %ebx
218 andl $255, %ecx
219 andl $255, %ebx
220 movl %eax, (%ebp,%edx,4)
221 nop
222 movl (%ebp,%ebx,4), %ebx
223 movl (%ebp,%ecx,4), %eax
224 movb -1(%esi), %bh
225 xorb %bh, %bl
226 movb %bl, (%edi)
227 /* Round 1 */
228 movl 8(%esp), %ebx
229 cmpl %esi, %ebx
230 jle .L002finished
231 incl %esi
232 addl %eax, %edx
233 andl $255, %edx
234 incl %ecx
235 movl (%ebp,%edx,4), %ebx
236 movl %ebx, -4(%ebp,%ecx,4)
237 addl %eax, %ebx
238 andl $255, %ecx
239 andl $255, %ebx
240 movl %eax, (%ebp,%edx,4)
241 nop
242 movl (%ebp,%ebx,4), %ebx
243 movl (%ebp,%ecx,4), %eax
244 movb -1(%esi), %bh
245 xorb %bh, %bl
246 movb %bl, 1(%edi)
247 /* Round 2 */
248 movl 8(%esp), %ebx
249 cmpl %esi, %ebx
250 jle .L002finished
251 incl %esi
252 addl %eax, %edx
253 andl $255, %edx
254 incl %ecx
255 movl (%ebp,%edx,4), %ebx
256 movl %ebx, -4(%ebp,%ecx,4)
257 addl %eax, %ebx
258 andl $255, %ecx
259 andl $255, %ebx
260 movl %eax, (%ebp,%edx,4)
261 nop
262 movl (%ebp,%ebx,4), %ebx
263 movl (%ebp,%ecx,4), %eax
264 movb -1(%esi), %bh
265 xorb %bh, %bl
266 movb %bl, 2(%edi)
267 /* Round 3 */
268 movl 8(%esp), %ebx
269 cmpl %esi, %ebx
270 jle .L002finished
271 incl %esi
272 addl %eax, %edx
273 andl $255, %edx
274 incl %ecx
275 movl (%ebp,%edx,4), %ebx
276 movl %ebx, -4(%ebp,%ecx,4)
277 addl %eax, %ebx
278 andl $255, %ecx
279 andl $255, %ebx
280 movl %eax, (%ebp,%edx,4)
281 nop
282 movl (%ebp,%ebx,4), %ebx
283 movl (%ebp,%ecx,4), %eax
284 movb -1(%esi), %bh
285 xorb %bh, %bl
286 movb %bl, 3(%edi)
287 /* Round 4 */
288 movl 8(%esp), %ebx
289 cmpl %esi, %ebx
290 jle .L002finished
291 incl %esi
292 addl %eax, %edx
293 andl $255, %edx
294 incl %ecx
295 movl (%ebp,%edx,4), %ebx
296 movl %ebx, -4(%ebp,%ecx,4)
297 addl %eax, %ebx
298 andl $255, %ecx
299 andl $255, %ebx
300 movl %eax, (%ebp,%edx,4)
301 nop
302 movl (%ebp,%ebx,4), %ebx
303 movl (%ebp,%ecx,4), %eax
304 movb -1(%esi), %bh
305 xorb %bh, %bl
306 movb %bl, 4(%edi)
307 /* Round 5 */
308 movl 8(%esp), %ebx
309 cmpl %esi, %ebx
310 jle .L002finished
311 incl %esi
312 addl %eax, %edx
313 andl $255, %edx
314 incl %ecx
315 movl (%ebp,%edx,4), %ebx
316 movl %ebx, -4(%ebp,%ecx,4)
317 addl %eax, %ebx
318 andl $255, %ecx
319 andl $255, %ebx
320 movl %eax, (%ebp,%edx,4)
321 nop
322 movl (%ebp,%ebx,4), %ebx
323 movl (%ebp,%ecx,4), %eax
324 movb -1(%esi), %bh
325 xorb %bh, %bl
326 movb %bl, 5(%edi)
327 /* Round 6 */
328 movl 8(%esp), %ebx
329 cmpl %esi, %ebx
330 jle .L002finished
331 incl %esi
332 addl %eax, %edx
333 andl $255, %edx
334 incl %ecx
335 movl (%ebp,%edx,4), %ebx
336 movl %ebx, -4(%ebp,%ecx,4)
337 addl %eax, %ebx
338 andl $255, %ecx
339 andl $255, %ebx
340 movl %eax, (%ebp,%edx,4)
341 nop
342 movl (%ebp,%ebx,4), %ebx
343 movb -1(%esi), %bh
344 xorb %bh, %bl
345 movb %bl, 6(%edi)
346.L002finished:
347 decl %ecx
348 addl $12, %esp
349 movl %edx, -4(%ebp)
350 movb %cl, -8(%ebp)
351 popl %edi
352 popl %esi
353 popl %ebx
354 popl %ebp
355 ret
356.RC4_end:
357 SIZE(RC4,.RC4_end-RC4)
358.ident "RC4"