summaryrefslogtreecommitdiff
path: root/src/lib/libcrypto/bf/asm/bx86unix.cpp
diff options
context:
space:
mode:
authorcvs2svn <admin@example.com>1998-10-05 20:13:17 +0000
committercvs2svn <admin@example.com>1998-10-05 20:13:17 +0000
commite82f18fab47b698d93971f576f962a3068132912 (patch)
tree681519717892864935c3d0533cf171098afa649a /src/lib/libcrypto/bf/asm/bx86unix.cpp
parent536c76cbb863bab152f19842ab88772c01e922c7 (diff)
downloadopenbsd-SSLeay_0_9_0b.tar.gz
openbsd-SSLeay_0_9_0b.tar.bz2
openbsd-SSLeay_0_9_0b.zip
This commit was manufactured by cvs2git to create tag 'SSLeay_0_9_0b'.SSLeay_0_9_0b
Diffstat (limited to 'src/lib/libcrypto/bf/asm/bx86unix.cpp')
-rw-r--r--src/lib/libcrypto/bf/asm/bx86unix.cpp976
1 files changed, 976 insertions, 0 deletions
diff --git a/src/lib/libcrypto/bf/asm/bx86unix.cpp b/src/lib/libcrypto/bf/asm/bx86unix.cpp
new file mode 100644
index 0000000000..cdaa269378
--- /dev/null
+++ b/src/lib/libcrypto/bf/asm/bx86unix.cpp
@@ -0,0 +1,976 @@
1/* Run the C pre-processor over this file with one of the following defined
2 * ELF - elf object files,
3 * OUT - a.out object files,
4 * BSDI - BSDI style a.out object files
5 * SOL - Solaris style elf
6 */
7
8#define TYPE(a,b) .type a,b
9#define SIZE(a,b) .size a,b
10
11#if defined(OUT) || defined(BSDI)
12#define BF_encrypt _BF_encrypt
13#define BF_decrypt _BF_decrypt
14#define BF_cbc_encrypt _BF_cbc_encrypt
15
16#endif
17
18#ifdef OUT
19#define OK 1
20#define ALIGN 4
21#endif
22
23#ifdef BSDI
24#define OK 1
25#define ALIGN 4
26#undef SIZE
27#undef TYPE
28#define SIZE(a,b)
29#define TYPE(a,b)
30#endif
31
32#if defined(ELF) || defined(SOL)
33#define OK 1
34#define ALIGN 16
35#endif
36
37#ifndef OK
38You need to define one of
39ELF - elf systems - linux-elf, NetBSD and DG-UX
40OUT - a.out systems - linux-a.out and FreeBSD
41SOL - solaris systems, which are elf with strange comment lines
42BSDI - a.out with a very primative version of as.
43#endif
44
45/* Let the Assembler begin :-) */
46 /* Don't even think of reading this code */
47 /* It was automatically generated by bf-586.pl */
48 /* Which is a perl program used to generate the x86 assember for */
49 /* any of elf, a.out, BSDI,Win32, or Solaris */
50 /* eric <eay@cryptsoft.com> */
51
52 .file "bf-586.s"
53 .version "01.01"
54gcc2_compiled.:
55.text
56 .align ALIGN
57.globl BF_encrypt
58 TYPE(BF_encrypt,@function)
59BF_encrypt:
60
61 pushl %ebp
62 pushl %ebx
63 movl 12(%esp), %ebx
64 movl 16(%esp), %ebp
65 pushl %esi
66 pushl %edi
67 /* Load the 2 words */
68 movl (%ebx), %edi
69 movl 4(%ebx), %esi
70 xorl %eax, %eax
71 movl (%ebp), %ebx
72 xorl %ecx, %ecx
73 xorl %ebx, %edi
74
75 /* Round 0 */
76 movl 4(%ebp), %edx
77 movl %edi, %ebx
78 xorl %edx, %esi
79 shrl $16, %ebx
80 movl %edi, %edx
81 movb %bh, %al
82 andl $255, %ebx
83 movb %dh, %cl
84 andl $255, %edx
85 movl 72(%ebp,%eax,4),%eax
86 movl 1096(%ebp,%ebx,4),%ebx
87 addl %eax, %ebx
88 movl 2120(%ebp,%ecx,4),%eax
89 xorl %eax, %ebx
90 movl 3144(%ebp,%edx,4),%edx
91 addl %edx, %ebx
92 xorl %eax, %eax
93 xorl %ebx, %esi
94
95 /* Round 1 */
96 movl 8(%ebp), %edx
97 movl %esi, %ebx
98 xorl %edx, %edi
99 shrl $16, %ebx
100 movl %esi, %edx
101 movb %bh, %al
102 andl $255, %ebx
103 movb %dh, %cl
104 andl $255, %edx
105 movl 72(%ebp,%eax,4),%eax
106 movl 1096(%ebp,%ebx,4),%ebx
107 addl %eax, %ebx
108 movl 2120(%ebp,%ecx,4),%eax
109 xorl %eax, %ebx
110 movl 3144(%ebp,%edx,4),%edx
111 addl %edx, %ebx
112 xorl %eax, %eax
113 xorl %ebx, %edi
114
115 /* Round 2 */
116 movl 12(%ebp), %edx
117 movl %edi, %ebx
118 xorl %edx, %esi
119 shrl $16, %ebx
120 movl %edi, %edx
121 movb %bh, %al
122 andl $255, %ebx
123 movb %dh, %cl
124 andl $255, %edx
125 movl 72(%ebp,%eax,4),%eax
126 movl 1096(%ebp,%ebx,4),%ebx
127 addl %eax, %ebx
128 movl 2120(%ebp,%ecx,4),%eax
129 xorl %eax, %ebx
130 movl 3144(%ebp,%edx,4),%edx
131 addl %edx, %ebx
132 xorl %eax, %eax
133 xorl %ebx, %esi
134
135 /* Round 3 */
136 movl 16(%ebp), %edx
137 movl %esi, %ebx
138 xorl %edx, %edi
139 shrl $16, %ebx
140 movl %esi, %edx
141 movb %bh, %al
142 andl $255, %ebx
143 movb %dh, %cl
144 andl $255, %edx
145 movl 72(%ebp,%eax,4),%eax
146 movl 1096(%ebp,%ebx,4),%ebx
147 addl %eax, %ebx
148 movl 2120(%ebp,%ecx,4),%eax
149 xorl %eax, %ebx
150 movl 3144(%ebp,%edx,4),%edx
151 addl %edx, %ebx
152 xorl %eax, %eax
153 xorl %ebx, %edi
154
155 /* Round 4 */
156 movl 20(%ebp), %edx
157 movl %edi, %ebx
158 xorl %edx, %esi
159 shrl $16, %ebx
160 movl %edi, %edx
161 movb %bh, %al
162 andl $255, %ebx
163 movb %dh, %cl
164 andl $255, %edx
165 movl 72(%ebp,%eax,4),%eax
166 movl 1096(%ebp,%ebx,4),%ebx
167 addl %eax, %ebx
168 movl 2120(%ebp,%ecx,4),%eax
169 xorl %eax, %ebx
170 movl 3144(%ebp,%edx,4),%edx
171 addl %edx, %ebx
172 xorl %eax, %eax
173 xorl %ebx, %esi
174
175 /* Round 5 */
176 movl 24(%ebp), %edx
177 movl %esi, %ebx
178 xorl %edx, %edi
179 shrl $16, %ebx
180 movl %esi, %edx
181 movb %bh, %al
182 andl $255, %ebx
183 movb %dh, %cl
184 andl $255, %edx
185 movl 72(%ebp,%eax,4),%eax
186 movl 1096(%ebp,%ebx,4),%ebx
187 addl %eax, %ebx
188 movl 2120(%ebp,%ecx,4),%eax
189 xorl %eax, %ebx
190 movl 3144(%ebp,%edx,4),%edx
191 addl %edx, %ebx
192 xorl %eax, %eax
193 xorl %ebx, %edi
194
195 /* Round 6 */
196 movl 28(%ebp), %edx
197 movl %edi, %ebx
198 xorl %edx, %esi
199 shrl $16, %ebx
200 movl %edi, %edx
201 movb %bh, %al
202 andl $255, %ebx
203 movb %dh, %cl
204 andl $255, %edx
205 movl 72(%ebp,%eax,4),%eax
206 movl 1096(%ebp,%ebx,4),%ebx
207 addl %eax, %ebx
208 movl 2120(%ebp,%ecx,4),%eax
209 xorl %eax, %ebx
210 movl 3144(%ebp,%edx,4),%edx
211 addl %edx, %ebx
212 xorl %eax, %eax
213 xorl %ebx, %esi
214
215 /* Round 7 */
216 movl 32(%ebp), %edx
217 movl %esi, %ebx
218 xorl %edx, %edi
219 shrl $16, %ebx
220 movl %esi, %edx
221 movb %bh, %al
222 andl $255, %ebx
223 movb %dh, %cl
224 andl $255, %edx
225 movl 72(%ebp,%eax,4),%eax
226 movl 1096(%ebp,%ebx,4),%ebx
227 addl %eax, %ebx
228 movl 2120(%ebp,%ecx,4),%eax
229 xorl %eax, %ebx
230 movl 3144(%ebp,%edx,4),%edx
231 addl %edx, %ebx
232 xorl %eax, %eax
233 xorl %ebx, %edi
234
235 /* Round 8 */
236 movl 36(%ebp), %edx
237 movl %edi, %ebx
238 xorl %edx, %esi
239 shrl $16, %ebx
240 movl %edi, %edx
241 movb %bh, %al
242 andl $255, %ebx
243 movb %dh, %cl
244 andl $255, %edx
245 movl 72(%ebp,%eax,4),%eax
246 movl 1096(%ebp,%ebx,4),%ebx
247 addl %eax, %ebx
248 movl 2120(%ebp,%ecx,4),%eax
249 xorl %eax, %ebx
250 movl 3144(%ebp,%edx,4),%edx
251 addl %edx, %ebx
252 xorl %eax, %eax
253 xorl %ebx, %esi
254
255 /* Round 9 */
256 movl 40(%ebp), %edx
257 movl %esi, %ebx
258 xorl %edx, %edi
259 shrl $16, %ebx
260 movl %esi, %edx
261 movb %bh, %al
262 andl $255, %ebx
263 movb %dh, %cl
264 andl $255, %edx
265 movl 72(%ebp,%eax,4),%eax
266 movl 1096(%ebp,%ebx,4),%ebx
267 addl %eax, %ebx
268 movl 2120(%ebp,%ecx,4),%eax
269 xorl %eax, %ebx
270 movl 3144(%ebp,%edx,4),%edx
271 addl %edx, %ebx
272 xorl %eax, %eax
273 xorl %ebx, %edi
274
275 /* Round 10 */
276 movl 44(%ebp), %edx
277 movl %edi, %ebx
278 xorl %edx, %esi
279 shrl $16, %ebx
280 movl %edi, %edx
281 movb %bh, %al
282 andl $255, %ebx
283 movb %dh, %cl
284 andl $255, %edx
285 movl 72(%ebp,%eax,4),%eax
286 movl 1096(%ebp,%ebx,4),%ebx
287 addl %eax, %ebx
288 movl 2120(%ebp,%ecx,4),%eax
289 xorl %eax, %ebx
290 movl 3144(%ebp,%edx,4),%edx
291 addl %edx, %ebx
292 xorl %eax, %eax
293 xorl %ebx, %esi
294
295 /* Round 11 */
296 movl 48(%ebp), %edx
297 movl %esi, %ebx
298 xorl %edx, %edi
299 shrl $16, %ebx
300 movl %esi, %edx
301 movb %bh, %al
302 andl $255, %ebx
303 movb %dh, %cl
304 andl $255, %edx
305 movl 72(%ebp,%eax,4),%eax
306 movl 1096(%ebp,%ebx,4),%ebx
307 addl %eax, %ebx
308 movl 2120(%ebp,%ecx,4),%eax
309 xorl %eax, %ebx
310 movl 3144(%ebp,%edx,4),%edx
311 addl %edx, %ebx
312 xorl %eax, %eax
313 xorl %ebx, %edi
314
315 /* Round 12 */
316 movl 52(%ebp), %edx
317 movl %edi, %ebx
318 xorl %edx, %esi
319 shrl $16, %ebx
320 movl %edi, %edx
321 movb %bh, %al
322 andl $255, %ebx
323 movb %dh, %cl
324 andl $255, %edx
325 movl 72(%ebp,%eax,4),%eax
326 movl 1096(%ebp,%ebx,4),%ebx
327 addl %eax, %ebx
328 movl 2120(%ebp,%ecx,4),%eax
329 xorl %eax, %ebx
330 movl 3144(%ebp,%edx,4),%edx
331 addl %edx, %ebx
332 xorl %eax, %eax
333 xorl %ebx, %esi
334
335 /* Round 13 */
336 movl 56(%ebp), %edx
337 movl %esi, %ebx
338 xorl %edx, %edi
339 shrl $16, %ebx
340 movl %esi, %edx
341 movb %bh, %al
342 andl $255, %ebx
343 movb %dh, %cl
344 andl $255, %edx
345 movl 72(%ebp,%eax,4),%eax
346 movl 1096(%ebp,%ebx,4),%ebx
347 addl %eax, %ebx
348 movl 2120(%ebp,%ecx,4),%eax
349 xorl %eax, %ebx
350 movl 3144(%ebp,%edx,4),%edx
351 addl %edx, %ebx
352 xorl %eax, %eax
353 xorl %ebx, %edi
354
355 /* Round 14 */
356 movl 60(%ebp), %edx
357 movl %edi, %ebx
358 xorl %edx, %esi
359 shrl $16, %ebx
360 movl %edi, %edx
361 movb %bh, %al
362 andl $255, %ebx
363 movb %dh, %cl
364 andl $255, %edx
365 movl 72(%ebp,%eax,4),%eax
366 movl 1096(%ebp,%ebx,4),%ebx
367 addl %eax, %ebx
368 movl 2120(%ebp,%ecx,4),%eax
369 xorl %eax, %ebx
370 movl 3144(%ebp,%edx,4),%edx
371 addl %edx, %ebx
372 xorl %eax, %eax
373 xorl %ebx, %esi
374
375 /* Round 15 */
376 movl 64(%ebp), %edx
377 movl %esi, %ebx
378 xorl %edx, %edi
379 shrl $16, %ebx
380 movl %esi, %edx
381 movb %bh, %al
382 andl $255, %ebx
383 movb %dh, %cl
384 andl $255, %edx
385 movl 72(%ebp,%eax,4),%eax
386 movl 1096(%ebp,%ebx,4),%ebx
387 addl %eax, %ebx
388 movl 2120(%ebp,%ecx,4),%eax
389 xorl %eax, %ebx
390 movl 3144(%ebp,%edx,4),%edx
391 addl %edx, %ebx
392 /* Load parameter 0 (16) enc=1 */
393 movl 20(%esp), %eax
394 xorl %ebx, %edi
395 movl 68(%ebp), %edx
396 xorl %edx, %esi
397 movl %edi, 4(%eax)
398 movl %esi, (%eax)
399 popl %edi
400 popl %esi
401 popl %ebx
402 popl %ebp
403 ret
404.BF_encrypt_end:
405 SIZE(BF_encrypt,.BF_encrypt_end-BF_encrypt)
406.ident "BF_encrypt"
407.text
408 .align ALIGN
409.globl BF_decrypt
410 TYPE(BF_decrypt,@function)
411BF_decrypt:
412
413 pushl %ebp
414 pushl %ebx
415 movl 12(%esp), %ebx
416 movl 16(%esp), %ebp
417 pushl %esi
418 pushl %edi
419 /* Load the 2 words */
420 movl (%ebx), %edi
421 movl 4(%ebx), %esi
422 xorl %eax, %eax
423 movl 68(%ebp), %ebx
424 xorl %ecx, %ecx
425 xorl %ebx, %edi
426
427 /* Round 16 */
428 movl 64(%ebp), %edx
429 movl %edi, %ebx
430 xorl %edx, %esi
431 shrl $16, %ebx
432 movl %edi, %edx
433 movb %bh, %al
434 andl $255, %ebx
435 movb %dh, %cl
436 andl $255, %edx
437 movl 72(%ebp,%eax,4),%eax
438 movl 1096(%ebp,%ebx,4),%ebx
439 addl %eax, %ebx
440 movl 2120(%ebp,%ecx,4),%eax
441 xorl %eax, %ebx
442 movl 3144(%ebp,%edx,4),%edx
443 addl %edx, %ebx
444 xorl %eax, %eax
445 xorl %ebx, %esi
446
447 /* Round 15 */
448 movl 60(%ebp), %edx
449 movl %esi, %ebx
450 xorl %edx, %edi
451 shrl $16, %ebx
452 movl %esi, %edx
453 movb %bh, %al
454 andl $255, %ebx
455 movb %dh, %cl
456 andl $255, %edx
457 movl 72(%ebp,%eax,4),%eax
458 movl 1096(%ebp,%ebx,4),%ebx
459 addl %eax, %ebx
460 movl 2120(%ebp,%ecx,4),%eax
461 xorl %eax, %ebx
462 movl 3144(%ebp,%edx,4),%edx
463 addl %edx, %ebx
464 xorl %eax, %eax
465 xorl %ebx, %edi
466
467 /* Round 14 */
468 movl 56(%ebp), %edx
469 movl %edi, %ebx
470 xorl %edx, %esi
471 shrl $16, %ebx
472 movl %edi, %edx
473 movb %bh, %al
474 andl $255, %ebx
475 movb %dh, %cl
476 andl $255, %edx
477 movl 72(%ebp,%eax,4),%eax
478 movl 1096(%ebp,%ebx,4),%ebx
479 addl %eax, %ebx
480 movl 2120(%ebp,%ecx,4),%eax
481 xorl %eax, %ebx
482 movl 3144(%ebp,%edx,4),%edx
483 addl %edx, %ebx
484 xorl %eax, %eax
485 xorl %ebx, %esi
486
487 /* Round 13 */
488 movl 52(%ebp), %edx
489 movl %esi, %ebx
490 xorl %edx, %edi
491 shrl $16, %ebx
492 movl %esi, %edx
493 movb %bh, %al
494 andl $255, %ebx
495 movb %dh, %cl
496 andl $255, %edx
497 movl 72(%ebp,%eax,4),%eax
498 movl 1096(%ebp,%ebx,4),%ebx
499 addl %eax, %ebx
500 movl 2120(%ebp,%ecx,4),%eax
501 xorl %eax, %ebx
502 movl 3144(%ebp,%edx,4),%edx
503 addl %edx, %ebx
504 xorl %eax, %eax
505 xorl %ebx, %edi
506
507 /* Round 12 */
508 movl 48(%ebp), %edx
509 movl %edi, %ebx
510 xorl %edx, %esi
511 shrl $16, %ebx
512 movl %edi, %edx
513 movb %bh, %al
514 andl $255, %ebx
515 movb %dh, %cl
516 andl $255, %edx
517 movl 72(%ebp,%eax,4),%eax
518 movl 1096(%ebp,%ebx,4),%ebx
519 addl %eax, %ebx
520 movl 2120(%ebp,%ecx,4),%eax
521 xorl %eax, %ebx
522 movl 3144(%ebp,%edx,4),%edx
523 addl %edx, %ebx
524 xorl %eax, %eax
525 xorl %ebx, %esi
526
527 /* Round 11 */
528 movl 44(%ebp), %edx
529 movl %esi, %ebx
530 xorl %edx, %edi
531 shrl $16, %ebx
532 movl %esi, %edx
533 movb %bh, %al
534 andl $255, %ebx
535 movb %dh, %cl
536 andl $255, %edx
537 movl 72(%ebp,%eax,4),%eax
538 movl 1096(%ebp,%ebx,4),%ebx
539 addl %eax, %ebx
540 movl 2120(%ebp,%ecx,4),%eax
541 xorl %eax, %ebx
542 movl 3144(%ebp,%edx,4),%edx
543 addl %edx, %ebx
544 xorl %eax, %eax
545 xorl %ebx, %edi
546
547 /* Round 10 */
548 movl 40(%ebp), %edx
549 movl %edi, %ebx
550 xorl %edx, %esi
551 shrl $16, %ebx
552 movl %edi, %edx
553 movb %bh, %al
554 andl $255, %ebx
555 movb %dh, %cl
556 andl $255, %edx
557 movl 72(%ebp,%eax,4),%eax
558 movl 1096(%ebp,%ebx,4),%ebx
559 addl %eax, %ebx
560 movl 2120(%ebp,%ecx,4),%eax
561 xorl %eax, %ebx
562 movl 3144(%ebp,%edx,4),%edx
563 addl %edx, %ebx
564 xorl %eax, %eax
565 xorl %ebx, %esi
566
567 /* Round 9 */
568 movl 36(%ebp), %edx
569 movl %esi, %ebx
570 xorl %edx, %edi
571 shrl $16, %ebx
572 movl %esi, %edx
573 movb %bh, %al
574 andl $255, %ebx
575 movb %dh, %cl
576 andl $255, %edx
577 movl 72(%ebp,%eax,4),%eax
578 movl 1096(%ebp,%ebx,4),%ebx
579 addl %eax, %ebx
580 movl 2120(%ebp,%ecx,4),%eax
581 xorl %eax, %ebx
582 movl 3144(%ebp,%edx,4),%edx
583 addl %edx, %ebx
584 xorl %eax, %eax
585 xorl %ebx, %edi
586
587 /* Round 8 */
588 movl 32(%ebp), %edx
589 movl %edi, %ebx
590 xorl %edx, %esi
591 shrl $16, %ebx
592 movl %edi, %edx
593 movb %bh, %al
594 andl $255, %ebx
595 movb %dh, %cl
596 andl $255, %edx
597 movl 72(%ebp,%eax,4),%eax
598 movl 1096(%ebp,%ebx,4),%ebx
599 addl %eax, %ebx
600 movl 2120(%ebp,%ecx,4),%eax
601 xorl %eax, %ebx
602 movl 3144(%ebp,%edx,4),%edx
603 addl %edx, %ebx
604 xorl %eax, %eax
605 xorl %ebx, %esi
606
607 /* Round 7 */
608 movl 28(%ebp), %edx
609 movl %esi, %ebx
610 xorl %edx, %edi
611 shrl $16, %ebx
612 movl %esi, %edx
613 movb %bh, %al
614 andl $255, %ebx
615 movb %dh, %cl
616 andl $255, %edx
617 movl 72(%ebp,%eax,4),%eax
618 movl 1096(%ebp,%ebx,4),%ebx
619 addl %eax, %ebx
620 movl 2120(%ebp,%ecx,4),%eax
621 xorl %eax, %ebx
622 movl 3144(%ebp,%edx,4),%edx
623 addl %edx, %ebx
624 xorl %eax, %eax
625 xorl %ebx, %edi
626
627 /* Round 6 */
628 movl 24(%ebp), %edx
629 movl %edi, %ebx
630 xorl %edx, %esi
631 shrl $16, %ebx
632 movl %edi, %edx
633 movb %bh, %al
634 andl $255, %ebx
635 movb %dh, %cl
636 andl $255, %edx
637 movl 72(%ebp,%eax,4),%eax
638 movl 1096(%ebp,%ebx,4),%ebx
639 addl %eax, %ebx
640 movl 2120(%ebp,%ecx,4),%eax
641 xorl %eax, %ebx
642 movl 3144(%ebp,%edx,4),%edx
643 addl %edx, %ebx
644 xorl %eax, %eax
645 xorl %ebx, %esi
646
647 /* Round 5 */
648 movl 20(%ebp), %edx
649 movl %esi, %ebx
650 xorl %edx, %edi
651 shrl $16, %ebx
652 movl %esi, %edx
653 movb %bh, %al
654 andl $255, %ebx
655 movb %dh, %cl
656 andl $255, %edx
657 movl 72(%ebp,%eax,4),%eax
658 movl 1096(%ebp,%ebx,4),%ebx
659 addl %eax, %ebx
660 movl 2120(%ebp,%ecx,4),%eax
661 xorl %eax, %ebx
662 movl 3144(%ebp,%edx,4),%edx
663 addl %edx, %ebx
664 xorl %eax, %eax
665 xorl %ebx, %edi
666
667 /* Round 4 */
668 movl 16(%ebp), %edx
669 movl %edi, %ebx
670 xorl %edx, %esi
671 shrl $16, %ebx
672 movl %edi, %edx
673 movb %bh, %al
674 andl $255, %ebx
675 movb %dh, %cl
676 andl $255, %edx
677 movl 72(%ebp,%eax,4),%eax
678 movl 1096(%ebp,%ebx,4),%ebx
679 addl %eax, %ebx
680 movl 2120(%ebp,%ecx,4),%eax
681 xorl %eax, %ebx
682 movl 3144(%ebp,%edx,4),%edx
683 addl %edx, %ebx
684 xorl %eax, %eax
685 xorl %ebx, %esi
686
687 /* Round 3 */
688 movl 12(%ebp), %edx
689 movl %esi, %ebx
690 xorl %edx, %edi
691 shrl $16, %ebx
692 movl %esi, %edx
693 movb %bh, %al
694 andl $255, %ebx
695 movb %dh, %cl
696 andl $255, %edx
697 movl 72(%ebp,%eax,4),%eax
698 movl 1096(%ebp,%ebx,4),%ebx
699 addl %eax, %ebx
700 movl 2120(%ebp,%ecx,4),%eax
701 xorl %eax, %ebx
702 movl 3144(%ebp,%edx,4),%edx
703 addl %edx, %ebx
704 xorl %eax, %eax
705 xorl %ebx, %edi
706
707 /* Round 2 */
708 movl 8(%ebp), %edx
709 movl %edi, %ebx
710 xorl %edx, %esi
711 shrl $16, %ebx
712 movl %edi, %edx
713 movb %bh, %al
714 andl $255, %ebx
715 movb %dh, %cl
716 andl $255, %edx
717 movl 72(%ebp,%eax,4),%eax
718 movl 1096(%ebp,%ebx,4),%ebx
719 addl %eax, %ebx
720 movl 2120(%ebp,%ecx,4),%eax
721 xorl %eax, %ebx
722 movl 3144(%ebp,%edx,4),%edx
723 addl %edx, %ebx
724 xorl %eax, %eax
725 xorl %ebx, %esi
726
727 /* Round 1 */
728 movl 4(%ebp), %edx
729 movl %esi, %ebx
730 xorl %edx, %edi
731 shrl $16, %ebx
732 movl %esi, %edx
733 movb %bh, %al
734 andl $255, %ebx
735 movb %dh, %cl
736 andl $255, %edx
737 movl 72(%ebp,%eax,4),%eax
738 movl 1096(%ebp,%ebx,4),%ebx
739 addl %eax, %ebx
740 movl 2120(%ebp,%ecx,4),%eax
741 xorl %eax, %ebx
742 movl 3144(%ebp,%edx,4),%edx
743 addl %edx, %ebx
744 /* Load parameter 0 (1) enc=0 */
745 movl 20(%esp), %eax
746 xorl %ebx, %edi
747 movl (%ebp), %edx
748 xorl %edx, %esi
749 movl %edi, 4(%eax)
750 movl %esi, (%eax)
751 popl %edi
752 popl %esi
753 popl %ebx
754 popl %ebp
755 ret
756.BF_decrypt_end:
757 SIZE(BF_decrypt,.BF_decrypt_end-BF_decrypt)
758.ident "BF_decrypt"
759.text
760 .align ALIGN
761.globl BF_cbc_encrypt
762 TYPE(BF_cbc_encrypt,@function)
763BF_cbc_encrypt:
764
765 pushl %ebp
766 pushl %ebx
767 pushl %esi
768 pushl %edi
769 movl 28(%esp), %ebp
770 /* getting iv ptr from parameter 4 */
771 movl 36(%esp), %ebx
772 movl (%ebx), %esi
773 movl 4(%ebx), %edi
774 pushl %edi
775 pushl %esi
776 pushl %edi
777 pushl %esi
778 movl %esp, %ebx
779 movl 36(%esp), %esi
780 movl 40(%esp), %edi
781 /* getting encrypt flag from parameter 5 */
782 movl 56(%esp), %ecx
783 /* get and push parameter 3 */
784 movl 48(%esp), %eax
785 pushl %eax
786 pushl %ebx
787 cmpl $0, %ecx
788 jz .L000decrypt
789 andl $4294967288, %ebp
790 movl 8(%esp), %eax
791 movl 12(%esp), %ebx
792 jz .L001encrypt_finish
793.L002encrypt_loop:
794 movl (%esi), %ecx
795 movl 4(%esi), %edx
796 xorl %ecx, %eax
797 xorl %edx, %ebx
798.byte 15
799.byte 200 /* bswapl %eax */
800.byte 15
801.byte 203 /* bswapl %ebx */
802 movl %eax, 8(%esp)
803 movl %ebx, 12(%esp)
804 call BF_encrypt
805 movl 8(%esp), %eax
806 movl 12(%esp), %ebx
807.byte 15
808.byte 200 /* bswapl %eax */
809.byte 15
810.byte 203 /* bswapl %ebx */
811 movl %eax, (%edi)
812 movl %ebx, 4(%edi)
813 addl $8, %esi
814 addl $8, %edi
815 subl $8, %ebp
816 jnz .L002encrypt_loop
817.L001encrypt_finish:
818 movl 52(%esp), %ebp
819 andl $7, %ebp
820 jz .L003finish
821 xorl %ecx, %ecx
822 xorl %edx, %edx
823 movl .L004cbc_enc_jmp_table(,%ebp,4),%ebp
824 jmp *%ebp
825.L005ej7:
826 movb 6(%esi), %dh
827 sall $8, %edx
828.L006ej6:
829 movb 5(%esi), %dh
830.L007ej5:
831 movb 4(%esi), %dl
832.L008ej4:
833 movl (%esi), %ecx
834 jmp .L009ejend
835.L010ej3:
836 movb 2(%esi), %ch
837 sall $8, %ecx
838.L011ej2:
839 movb 1(%esi), %ch
840.L012ej1:
841 movb (%esi), %cl
842.L009ejend:
843 xorl %ecx, %eax
844 xorl %edx, %ebx
845.byte 15
846.byte 200 /* bswapl %eax */
847.byte 15
848.byte 203 /* bswapl %ebx */
849 movl %eax, 8(%esp)
850 movl %ebx, 12(%esp)
851 call BF_encrypt
852 movl 8(%esp), %eax
853 movl 12(%esp), %ebx
854.byte 15
855.byte 200 /* bswapl %eax */
856.byte 15
857.byte 203 /* bswapl %ebx */
858 movl %eax, (%edi)
859 movl %ebx, 4(%edi)
860 jmp .L003finish
861.align ALIGN
862.L000decrypt:
863 andl $4294967288, %ebp
864 movl 16(%esp), %eax
865 movl 20(%esp), %ebx
866 jz .L013decrypt_finish
867.L014decrypt_loop:
868 movl (%esi), %eax
869 movl 4(%esi), %ebx
870.byte 15
871.byte 200 /* bswapl %eax */
872.byte 15
873.byte 203 /* bswapl %ebx */
874 movl %eax, 8(%esp)
875 movl %ebx, 12(%esp)
876 call BF_decrypt
877 movl 8(%esp), %eax
878 movl 12(%esp), %ebx
879.byte 15
880.byte 200 /* bswapl %eax */
881.byte 15
882.byte 203 /* bswapl %ebx */
883 movl 16(%esp), %ecx
884 movl 20(%esp), %edx
885 xorl %eax, %ecx
886 xorl %ebx, %edx
887 movl (%esi), %eax
888 movl 4(%esi), %ebx
889 movl %ecx, (%edi)
890 movl %edx, 4(%edi)
891 movl %eax, 16(%esp)
892 movl %ebx, 20(%esp)
893 addl $8, %esi
894 addl $8, %edi
895 subl $8, %ebp
896 jnz .L014decrypt_loop
897.L013decrypt_finish:
898 movl 52(%esp), %ebp
899 andl $7, %ebp
900 jz .L003finish
901 movl (%esi), %eax
902 movl 4(%esi), %ebx
903.byte 15
904.byte 200 /* bswapl %eax */
905.byte 15
906.byte 203 /* bswapl %ebx */
907 movl %eax, 8(%esp)
908 movl %ebx, 12(%esp)
909 call BF_decrypt
910 movl 8(%esp), %eax
911 movl 12(%esp), %ebx
912.byte 15
913.byte 200 /* bswapl %eax */
914.byte 15
915.byte 203 /* bswapl %ebx */
916 movl 16(%esp), %ecx
917 movl 20(%esp), %edx
918 xorl %eax, %ecx
919 xorl %ebx, %edx
920 movl (%esi), %eax
921 movl 4(%esi), %ebx
922.L015dj7:
923 rorl $16, %edx
924 movb %dl, 6(%edi)
925 shrl $16, %edx
926.L016dj6:
927 movb %dh, 5(%edi)
928.L017dj5:
929 movb %dl, 4(%edi)
930.L018dj4:
931 movl %ecx, (%edi)
932 jmp .L019djend
933.L020dj3:
934 rorl $16, %ecx
935 movb %cl, 2(%edi)
936 sall $16, %ecx
937.L021dj2:
938 movb %ch, 1(%esi)
939.L022dj1:
940 movb %cl, (%esi)
941.L019djend:
942 jmp .L003finish
943.align ALIGN
944.L003finish:
945 movl 60(%esp), %ecx
946 addl $24, %esp
947 movl %eax, (%ecx)
948 movl %ebx, 4(%ecx)
949 popl %edi
950 popl %esi
951 popl %ebx
952 popl %ebp
953 ret
954.align ALIGN
955.L004cbc_enc_jmp_table:
956 .long 0
957 .long .L012ej1
958 .long .L011ej2
959 .long .L010ej3
960 .long .L008ej4
961 .long .L007ej5
962 .long .L006ej6
963 .long .L005ej7
964.align ALIGN
965.L023cbc_dec_jmp_table:
966 .long 0
967 .long .L022dj1
968 .long .L021dj2
969 .long .L020dj3
970 .long .L018dj4
971 .long .L017dj5
972 .long .L016dj6
973 .long .L015dj7
974.BF_cbc_encrypt_end:
975 SIZE(BF_cbc_encrypt,.BF_cbc_encrypt_end-BF_cbc_encrypt)
976.ident "desasm.pl"