summaryrefslogtreecommitdiff
path: root/src/lib/libcrypto/md5/asm/md5-sparcv9.S
diff options
context:
space:
mode:
Diffstat (limited to 'src/lib/libcrypto/md5/asm/md5-sparcv9.S')
-rw-r--r--src/lib/libcrypto/md5/asm/md5-sparcv9.S1029
1 files changed, 1029 insertions, 0 deletions
diff --git a/src/lib/libcrypto/md5/asm/md5-sparcv9.S b/src/lib/libcrypto/md5/asm/md5-sparcv9.S
new file mode 100644
index 0000000000..ca4257f134
--- /dev/null
+++ b/src/lib/libcrypto/md5/asm/md5-sparcv9.S
@@ -0,0 +1,1029 @@
1.ident "md5-sparcv9.S, Version 1.0"
2.ident "SPARC V9 ISA artwork by Andy Polyakov <appro@fy.chalmers.se>"
3.file "md5-sparcv9.S"
4
5/*
6 * ====================================================================
7 * Copyright (c) 1999 Andy Polyakov <appro@fy.chalmers.se>.
8 *
9 * Rights for redistribution and usage in source and binary forms are
10 * granted as long as above copyright notices are retained. Warranty
11 * of any kind is (of course:-) disclaimed.
12 * ====================================================================
13 */
14
15/*
16 * This is my modest contribution to OpenSSL project (see
17 * http://www.openssl.org/ for more information about it) and is an
18 * assembler implementation of MD5 block hash function. I've hand-coded
19 * this for the sole reason to reach UltraSPARC-specific "load in
20 * little-endian byte order" instruction. This gives up to 15%
21 * performance improvement for cases when input message is aligned at
22 * 32 bits boundary. The module was tested under both 32 *and* 64 bit
23 * kernels. For updates see http://fy.chalmers.se/~appro/hpe/.
24 *
25 * To compile with SC4.x/SC5.x:
26 *
27 * cc -xarch=v[9|8plus] -DULTRASPARC -DMD5_BLOCK_DATA_ORDER \
28 * -c md5-sparcv9.S
29 *
30 * and with gcc:
31 *
32 * gcc -mcpu=ultrasparc -DULTRASPARC -DMD5_BLOCK_DATA_ORDER \
33 * -c md5-sparcv9.S
34 *
35 * or if above fails (it does if you have gas):
36 *
37 * gcc -E -DULTRASPARC -DMD5_BLOCK_DATA_ORDER md5_block.sparc.S | \
38 * as -xarch=v8plus /dev/fd/0 -o md5-sparcv9.o
39 */
40
41#define A %o0
42#define B %o1
43#define C %o2
44#define D %o3
45#define T1 %o4
46#define T2 %o5
47
48#define R0 %l0
49#define R1 %l1
50#define R2 %l2
51#define R3 %l3
52#define R4 %l4
53#define R5 %l5
54#define R6 %l6
55#define R7 %l7
56#define R8 %i3
57#define R9 %i4
58#define R10 %i5
59#define R11 %g1
60#define R12 %g2
61#define R13 %g3
62#define RX %g4
63
64#define Aptr %i0+0
65#define Bptr %i0+4
66#define Cptr %i0+8
67#define Dptr %i0+12
68
69#define Aval R5 /* those not used at the end of the last round */
70#define Bval R6
71#define Cval R7
72#define Dval R8
73
74#if defined(MD5_BLOCK_DATA_ORDER)
75# if defined(ULTRASPARC)
76# define LOAD lda
77# define X(i) [%i1+i*4]%asi
78# define md5_block md5_block_asm_data_order_aligned
79# define ASI_PRIMARY_LITTLE 0x88
80# else
81# error "MD5_BLOCK_DATA_ORDER is supported only on UltraSPARC!"
82# endif
83#else
84# define LOAD ld
85# define X(i) [%i1+i*4]
86# define md5_block md5_block_asm_host_order
87#endif
88
89.section ".text",#alloc,#execinstr
90
91#if defined(__SUNPRO_C) && defined(__sparcv9)
92 /* They've said -xarch=v9 at command line */
93 .register %g2,#scratch
94 .register %g3,#scratch
95# define FRAME -192
96#elif defined(__GNUC__) && defined(__arch64__)
97 /* They've said -m64 at command line */
98 .register %g2,#scratch
99 .register %g3,#scratch
100# define FRAME -192
101#else
102# define FRAME -96
103#endif
104
105.align 32
106
107.global md5_block
108md5_block:
109 save %sp,FRAME,%sp
110
111 ld [Dptr],D
112 ld [Cptr],C
113 ld [Bptr],B
114 ld [Aptr],A
115#ifdef ASI_PRIMARY_LITTLE
116 rd %asi,%o7 ! How dare I? Well, I just do:-)
117 wr %g0,ASI_PRIMARY_LITTLE,%asi
118#endif
119 LOAD X(0),R0
120
121.Lmd5_block_loop:
122
123!!!!!!!!Round 0
124
125 xor C,D,T1
126 sethi %hi(0xd76aa478),T2
127 and T1,B,T1
128 or T2,%lo(0xd76aa478),T2 !=
129 xor T1,D,T1
130 add T1,R0,T1
131 LOAD X(1),R1
132 add T1,T2,T1 !=
133 add A,T1,A
134 sll A,7,T2
135 srl A,32-7,A
136 or A,T2,A !=
137 xor B,C,T1
138 add A,B,A
139
140 sethi %hi(0xe8c7b756),T2
141 and T1,A,T1 !=
142 or T2,%lo(0xe8c7b756),T2
143 xor T1,C,T1
144 LOAD X(2),R2
145 add T1,R1,T1 !=
146 add T1,T2,T1
147 add D,T1,D
148 sll D,12,T2
149 srl D,32-12,D !=
150 or D,T2,D
151 xor A,B,T1
152 add D,A,D
153
154 sethi %hi(0x242070db),T2 !=
155 and T1,D,T1
156 or T2,%lo(0x242070db),T2
157 xor T1,B,T1
158 add T1,R2,T1 !=
159 LOAD X(3),R3
160 add T1,T2,T1
161 add C,T1,C
162 sll C,17,T2 !=
163 srl C,32-17,C
164 or C,T2,C
165 xor D,A,T1
166 add C,D,C !=
167
168 sethi %hi(0xc1bdceee),T2
169 and T1,C,T1
170 or T2,%lo(0xc1bdceee),T2
171 xor T1,A,T1 !=
172 add T1,R3,T1
173 LOAD X(4),R4
174 add T1,T2,T1
175 add B,T1,B !=
176 sll B,22,T2
177 srl B,32-22,B
178 or B,T2,B
179 xor C,D,T1 !=
180 add B,C,B
181
182 sethi %hi(0xf57c0faf),T2
183 and T1,B,T1
184 or T2,%lo(0xf57c0faf),T2 !=
185 xor T1,D,T1
186 add T1,R4,T1
187 LOAD X(5),R5
188 add T1,T2,T1 !=
189 add A,T1,A
190 sll A,7,T2
191 srl A,32-7,A
192 or A,T2,A !=
193 xor B,C,T1
194 add A,B,A
195
196 sethi %hi(0x4787c62a),T2
197 and T1,A,T1 !=
198 or T2,%lo(0x4787c62a),T2
199 xor T1,C,T1
200 LOAD X(6),R6
201 add T1,R5,T1 !=
202 add T1,T2,T1
203 add D,T1,D
204 sll D,12,T2
205 srl D,32-12,D !=
206 or D,T2,D
207 xor A,B,T1
208 add D,A,D
209
210 sethi %hi(0xa8304613),T2 !=
211 and T1,D,T1
212 or T2,%lo(0xa8304613),T2
213 xor T1,B,T1
214 add T1,R6,T1 !=
215 LOAD X(7),R7
216 add T1,T2,T1
217 add C,T1,C
218 sll C,17,T2 !=
219 srl C,32-17,C
220 or C,T2,C
221 xor D,A,T1
222 add C,D,C !=
223
224 sethi %hi(0xfd469501),T2
225 and T1,C,T1
226 or T2,%lo(0xfd469501),T2
227 xor T1,A,T1 !=
228 add T1,R7,T1
229 LOAD X(8),R8
230 add T1,T2,T1
231 add B,T1,B !=
232 sll B,22,T2
233 srl B,32-22,B
234 or B,T2,B
235 xor C,D,T1 !=
236 add B,C,B
237
238 sethi %hi(0x698098d8),T2
239 and T1,B,T1
240 or T2,%lo(0x698098d8),T2 !=
241 xor T1,D,T1
242 add T1,R8,T1
243 LOAD X(9),R9
244 add T1,T2,T1 !=
245 add A,T1,A
246 sll A,7,T2
247 srl A,32-7,A
248 or A,T2,A !=
249 xor B,C,T1
250 add A,B,A
251
252 sethi %hi(0x8b44f7af),T2
253 and T1,A,T1 !=
254 or T2,%lo(0x8b44f7af),T2
255 xor T1,C,T1
256 LOAD X(10),R10
257 add T1,R9,T1 !=
258 add T1,T2,T1
259 add D,T1,D
260 sll D,12,T2
261 srl D,32-12,D !=
262 or D,T2,D
263 xor A,B,T1
264 add D,A,D
265
266 sethi %hi(0xffff5bb1),T2 !=
267 and T1,D,T1
268 or T2,%lo(0xffff5bb1),T2
269 xor T1,B,T1
270 add T1,R10,T1 !=
271 LOAD X(11),R11
272 add T1,T2,T1
273 add C,T1,C
274 sll C,17,T2 !=
275 srl C,32-17,C
276 or C,T2,C
277 xor D,A,T1
278 add C,D,C !=
279
280 sethi %hi(0x895cd7be),T2
281 and T1,C,T1
282 or T2,%lo(0x895cd7be),T2
283 xor T1,A,T1 !=
284 add T1,R11,T1
285 LOAD X(12),R12
286 add T1,T2,T1
287 add B,T1,B !=
288 sll B,22,T2
289 srl B,32-22,B
290 or B,T2,B
291 xor C,D,T1 !=
292 add B,C,B
293
294 sethi %hi(0x6b901122),T2
295 and T1,B,T1
296 or T2,%lo(0x6b901122),T2 !=
297 xor T1,D,T1
298 add T1,R12,T1
299 LOAD X(13),R13
300 add T1,T2,T1 !=
301 add A,T1,A
302 sll A,7,T2
303 srl A,32-7,A
304 or A,T2,A !=
305 xor B,C,T1
306 add A,B,A
307
308 sethi %hi(0xfd987193),T2
309 and T1,A,T1 !=
310 or T2,%lo(0xfd987193),T2
311 xor T1,C,T1
312 LOAD X(14),RX
313 add T1,R13,T1 !=
314 add T1,T2,T1
315 add D,T1,D
316 sll D,12,T2
317 srl D,32-12,D !=
318 or D,T2,D
319 xor A,B,T1
320 add D,A,D
321
322 sethi %hi(0xa679438e),T2 !=
323 and T1,D,T1
324 or T2,%lo(0xa679438e),T2
325 xor T1,B,T1
326 add T1,RX,T1 !=
327 LOAD X(15),RX
328 add T1,T2,T1
329 add C,T1,C
330 sll C,17,T2 !=
331 srl C,32-17,C
332 or C,T2,C
333 xor D,A,T1
334 add C,D,C !=
335
336 sethi %hi(0x49b40821),T2
337 and T1,C,T1
338 or T2,%lo(0x49b40821),T2
339 xor T1,A,T1 !=
340 add T1,RX,T1
341 !pre-LOADed X(1),R1
342 add T1,T2,T1
343 add B,T1,B
344 sll B,22,T2 !=
345 srl B,32-22,B
346 or B,T2,B
347 add B,C,B
348
349!!!!!!!!Round 1
350
351 xor B,C,T1 !=
352 sethi %hi(0xf61e2562),T2
353 and T1,D,T1
354 or T2,%lo(0xf61e2562),T2
355 xor T1,C,T1 !=
356 add T1,R1,T1
357 !pre-LOADed X(6),R6
358 add T1,T2,T1
359 add A,T1,A
360 sll A,5,T2 !=
361 srl A,32-5,A
362 or A,T2,A
363 add A,B,A
364
365 xor A,B,T1 !=
366 sethi %hi(0xc040b340),T2
367 and T1,C,T1
368 or T2,%lo(0xc040b340),T2
369 xor T1,B,T1 !=
370 add T1,R6,T1
371 !pre-LOADed X(11),R11
372 add T1,T2,T1
373 add D,T1,D
374 sll D,9,T2 !=
375 srl D,32-9,D
376 or D,T2,D
377 add D,A,D
378
379 xor D,A,T1 !=
380 sethi %hi(0x265e5a51),T2
381 and T1,B,T1
382 or T2,%lo(0x265e5a51),T2
383 xor T1,A,T1 !=
384 add T1,R11,T1
385 !pre-LOADed X(0),R0
386 add T1,T2,T1
387 add C,T1,C
388 sll C,14,T2 !=
389 srl C,32-14,C
390 or C,T2,C
391 add C,D,C
392
393 xor C,D,T1 !=
394 sethi %hi(0xe9b6c7aa),T2
395 and T1,A,T1
396 or T2,%lo(0xe9b6c7aa),T2
397 xor T1,D,T1 !=
398 add T1,R0,T1
399 !pre-LOADed X(5),R5
400 add T1,T2,T1
401 add B,T1,B
402 sll B,20,T2 !=
403 srl B,32-20,B
404 or B,T2,B
405 add B,C,B
406
407 xor B,C,T1 !=
408 sethi %hi(0xd62f105d),T2
409 and T1,D,T1
410 or T2,%lo(0xd62f105d),T2
411 xor T1,C,T1 !=
412 add T1,R5,T1
413 !pre-LOADed X(10),R10
414 add T1,T2,T1
415 add A,T1,A
416 sll A,5,T2 !=
417 srl A,32-5,A
418 or A,T2,A
419 add A,B,A
420
421 xor A,B,T1 !=
422 sethi %hi(0x02441453),T2
423 and T1,C,T1
424 or T2,%lo(0x02441453),T2
425 xor T1,B,T1 !=
426 add T1,R10,T1
427 LOAD X(15),RX
428 add T1,T2,T1
429 add D,T1,D !=
430 sll D,9,T2
431 srl D,32-9,D
432 or D,T2,D
433 add D,A,D !=
434
435 xor D,A,T1
436 sethi %hi(0xd8a1e681),T2
437 and T1,B,T1
438 or T2,%lo(0xd8a1e681),T2 !=
439 xor T1,A,T1
440 add T1,RX,T1
441 !pre-LOADed X(4),R4
442 add T1,T2,T1
443 add C,T1,C !=
444 sll C,14,T2
445 srl C,32-14,C
446 or C,T2,C
447 add C,D,C !=
448
449 xor C,D,T1
450 sethi %hi(0xe7d3fbc8),T2
451 and T1,A,T1
452 or T2,%lo(0xe7d3fbc8),T2 !=
453 xor T1,D,T1
454 add T1,R4,T1
455 !pre-LOADed X(9),R9
456 add T1,T2,T1
457 add B,T1,B !=
458 sll B,20,T2
459 srl B,32-20,B
460 or B,T2,B
461 add B,C,B !=
462
463 xor B,C,T1
464 sethi %hi(0x21e1cde6),T2
465 and T1,D,T1
466 or T2,%lo(0x21e1cde6),T2 !=
467 xor T1,C,T1
468 add T1,R9,T1
469 LOAD X(14),RX
470 add T1,T2,T1 !=
471 add A,T1,A
472 sll A,5,T2
473 srl A,32-5,A
474 or A,T2,A !=
475 add A,B,A
476
477 xor A,B,T1
478 sethi %hi(0xc33707d6),T2
479 and T1,C,T1 !=
480 or T2,%lo(0xc33707d6),T2
481 xor T1,B,T1
482 add T1,RX,T1
483 !pre-LOADed X(3),R3
484 add T1,T2,T1 !=
485 add D,T1,D
486 sll D,9,T2
487 srl D,32-9,D
488 or D,T2,D !=
489 add D,A,D
490
491 xor D,A,T1
492 sethi %hi(0xf4d50d87),T2
493 and T1,B,T1 !=
494 or T2,%lo(0xf4d50d87),T2
495 xor T1,A,T1
496 add T1,R3,T1
497 !pre-LOADed X(8),R8
498 add T1,T2,T1 !=
499 add C,T1,C
500 sll C,14,T2
501 srl C,32-14,C
502 or C,T2,C !=
503 add C,D,C
504
505 xor C,D,T1
506 sethi %hi(0x455a14ed),T2
507 and T1,A,T1 !=
508 or T2,%lo(0x455a14ed),T2
509 xor T1,D,T1
510 add T1,R8,T1
511 !pre-LOADed X(13),R13
512 add T1,T2,T1 !=
513 add B,T1,B
514 sll B,20,T2
515 srl B,32-20,B
516 or B,T2,B !=
517 add B,C,B
518
519 xor B,C,T1
520 sethi %hi(0xa9e3e905),T2
521 and T1,D,T1 !=
522 or T2,%lo(0xa9e3e905),T2
523 xor T1,C,T1
524 add T1,R13,T1
525 !pre-LOADed X(2),R2
526 add T1,T2,T1 !=
527 add A,T1,A
528 sll A,5,T2
529 srl A,32-5,A
530 or A,T2,A !=
531 add A,B,A
532
533 xor A,B,T1
534 sethi %hi(0xfcefa3f8),T2
535 and T1,C,T1 !=
536 or T2,%lo(0xfcefa3f8),T2
537 xor T1,B,T1
538 add T1,R2,T1
539 !pre-LOADed X(7),R7
540 add T1,T2,T1 !=
541 add D,T1,D
542 sll D,9,T2
543 srl D,32-9,D
544 or D,T2,D !=
545 add D,A,D
546
547 xor D,A,T1
548 sethi %hi(0x676f02d9),T2
549 and T1,B,T1 !=
550 or T2,%lo(0x676f02d9),T2
551 xor T1,A,T1
552 add T1,R7,T1
553 !pre-LOADed X(12),R12
554 add T1,T2,T1 !=
555 add C,T1,C
556 sll C,14,T2
557 srl C,32-14,C
558 or C,T2,C !=
559 add C,D,C
560
561 xor C,D,T1
562 sethi %hi(0x8d2a4c8a),T2
563 and T1,A,T1 !=
564 or T2,%lo(0x8d2a4c8a),T2
565 xor T1,D,T1
566 add T1,R12,T1
567 !pre-LOADed X(5),R5
568 add T1,T2,T1 !=
569 add B,T1,B
570 sll B,20,T2
571 srl B,32-20,B
572 or B,T2,B !=
573 add B,C,B
574
575!!!!!!!!Round 2
576
577 xor B,C,T1
578 sethi %hi(0xfffa3942),T2
579 xor T1,D,T1 !=
580 or T2,%lo(0xfffa3942),T2
581 add T1,R5,T1
582 !pre-LOADed X(8),R8
583 add T1,T2,T1
584 add A,T1,A !=
585 sll A,4,T2
586 srl A,32-4,A
587 or A,T2,A
588 add A,B,A !=
589
590 xor A,B,T1
591 sethi %hi(0x8771f681),T2
592 xor T1,C,T1
593 or T2,%lo(0x8771f681),T2 !=
594 add T1,R8,T1
595 !pre-LOADed X(11),R11
596 add T1,T2,T1
597 add D,T1,D
598 sll D,11,T2 !=
599 srl D,32-11,D
600 or D,T2,D
601 add D,A,D
602
603 xor D,A,T1 !=
604 sethi %hi(0x6d9d6122),T2
605 xor T1,B,T1
606 or T2,%lo(0x6d9d6122),T2
607 add T1,R11,T1 !=
608 LOAD X(14),RX
609 add T1,T2,T1
610 add C,T1,C
611 sll C,16,T2 !=
612 srl C,32-16,C
613 or C,T2,C
614 add C,D,C
615
616 xor C,D,T1 !=
617 sethi %hi(0xfde5380c),T2
618 xor T1,A,T1
619 or T2,%lo(0xfde5380c),T2
620 add T1,RX,T1 !=
621 !pre-LOADed X(1),R1
622 add T1,T2,T1
623 add B,T1,B
624 sll B,23,T2
625 srl B,32-23,B !=
626 or B,T2,B
627 add B,C,B
628
629 xor B,C,T1
630 sethi %hi(0xa4beea44),T2 !=
631 xor T1,D,T1
632 or T2,%lo(0xa4beea44),T2
633 add T1,R1,T1
634 !pre-LOADed X(4),R4
635 add T1,T2,T1 !=
636 add A,T1,A
637 sll A,4,T2
638 srl A,32-4,A
639 or A,T2,A !=
640 add A,B,A
641
642 xor A,B,T1
643 sethi %hi(0x4bdecfa9),T2
644 xor T1,C,T1 !=
645 or T2,%lo(0x4bdecfa9),T2
646 add T1,R4,T1
647 !pre-LOADed X(7),R7
648 add T1,T2,T1
649 add D,T1,D !=
650 sll D,11,T2
651 srl D,32-11,D
652 or D,T2,D
653 add D,A,D !=
654
655 xor D,A,T1
656 sethi %hi(0xf6bb4b60),T2
657 xor T1,B,T1
658 or T2,%lo(0xf6bb4b60),T2 !=
659 add T1,R7,T1
660 !pre-LOADed X(10),R10
661 add T1,T2,T1
662 add C,T1,C
663 sll C,16,T2 !=
664 srl C,32-16,C
665 or C,T2,C
666 add C,D,C
667
668 xor C,D,T1 !=
669 sethi %hi(0xbebfbc70),T2
670 xor T1,A,T1
671 or T2,%lo(0xbebfbc70),T2
672 add T1,R10,T1 !=
673 !pre-LOADed X(13),R13
674 add T1,T2,T1
675 add B,T1,B
676 sll B,23,T2
677 srl B,32-23,B !=
678 or B,T2,B
679 add B,C,B
680
681 xor B,C,T1
682 sethi %hi(0x289b7ec6),T2 !=
683 xor T1,D,T1
684 or T2,%lo(0x289b7ec6),T2
685 add T1,R13,T1
686 !pre-LOADed X(0),R0
687 add T1,T2,T1 !=
688 add A,T1,A
689 sll A,4,T2
690 srl A,32-4,A
691 or A,T2,A !=
692 add A,B,A
693
694 xor A,B,T1
695 sethi %hi(0xeaa127fa),T2
696 xor T1,C,T1 !=
697 or T2,%lo(0xeaa127fa),T2
698 add T1,R0,T1
699 !pre-LOADed X(3),R3
700 add T1,T2,T1
701 add D,T1,D !=
702 sll D,11,T2
703 srl D,32-11,D
704 or D,T2,D
705 add D,A,D !=
706
707 xor D,A,T1
708 sethi %hi(0xd4ef3085),T2
709 xor T1,B,T1
710 or T2,%lo(0xd4ef3085),T2 !=
711 add T1,R3,T1
712 !pre-LOADed X(6),R6
713 add T1,T2,T1
714 add C,T1,C
715 sll C,16,T2 !=
716 srl C,32-16,C
717 or C,T2,C
718 add C,D,C
719
720 xor C,D,T1 !=
721 sethi %hi(0x04881d05),T2
722 xor T1,A,T1
723 or T2,%lo(0x04881d05),T2
724 add T1,R6,T1 !=
725 !pre-LOADed X(9),R9
726 add T1,T2,T1
727 add B,T1,B
728 sll B,23,T2
729 srl B,32-23,B !=
730 or B,T2,B
731 add B,C,B
732
733 xor B,C,T1
734 sethi %hi(0xd9d4d039),T2 !=
735 xor T1,D,T1
736 or T2,%lo(0xd9d4d039),T2
737 add T1,R9,T1
738 !pre-LOADed X(12),R12
739 add T1,T2,T1 !=
740 add A,T1,A
741 sll A,4,T2
742 srl A,32-4,A
743 or A,T2,A !=
744 add A,B,A
745
746 xor A,B,T1
747 sethi %hi(0xe6db99e5),T2
748 xor T1,C,T1 !=
749 or T2,%lo(0xe6db99e5),T2
750 add T1,R12,T1
751 LOAD X(15),RX
752 add T1,T2,T1 !=
753 add D,T1,D
754 sll D,11,T2
755 srl D,32-11,D
756 or D,T2,D !=
757 add D,A,D
758
759 xor D,A,T1
760 sethi %hi(0x1fa27cf8),T2
761 xor T1,B,T1 !=
762 or T2,%lo(0x1fa27cf8),T2
763 add T1,RX,T1
764 !pre-LOADed X(2),R2
765 add T1,T2,T1
766 add C,T1,C !=
767 sll C,16,T2
768 srl C,32-16,C
769 or C,T2,C
770 add C,D,C !=
771
772 xor C,D,T1
773 sethi %hi(0xc4ac5665),T2
774 xor T1,A,T1
775 or T2,%lo(0xc4ac5665),T2 !=
776 add T1,R2,T1
777 !pre-LOADed X(0),R0
778 add T1,T2,T1
779 add B,T1,B
780 sll B,23,T2 !=
781 srl B,32-23,B
782 or B,T2,B
783 add B,C,B
784
785!!!!!!!!Round 3
786
787 orn B,D,T1 !=
788 sethi %hi(0xf4292244),T2
789 xor T1,C,T1
790 or T2,%lo(0xf4292244),T2
791 add T1,R0,T1 !=
792 !pre-LOADed X(7),R7
793 add T1,T2,T1
794 add A,T1,A
795 sll A,6,T2
796 srl A,32-6,A !=
797 or A,T2,A
798 add A,B,A
799
800 orn A,C,T1
801 sethi %hi(0x432aff97),T2 !=
802 xor T1,B,T1
803 or T2,%lo(0x432aff97),T2
804 LOAD X(14),RX
805 add T1,R7,T1 !=
806 add T1,T2,T1
807 add D,T1,D
808 sll D,10,T2
809 srl D,32-10,D !=
810 or D,T2,D
811 add D,A,D
812
813 orn D,B,T1
814 sethi %hi(0xab9423a7),T2 !=
815 xor T1,A,T1
816 or T2,%lo(0xab9423a7),T2
817 add T1,RX,T1
818 !pre-LOADed X(5),R5
819 add T1,T2,T1 !=
820 add C,T1,C
821 sll C,15,T2
822 srl C,32-15,C
823 or C,T2,C !=
824 add C,D,C
825
826 orn C,A,T1
827 sethi %hi(0xfc93a039),T2
828 xor T1,D,T1 !=
829 or T2,%lo(0xfc93a039),T2
830 add T1,R5,T1
831 !pre-LOADed X(12),R12
832 add T1,T2,T1
833 add B,T1,B !=
834 sll B,21,T2
835 srl B,32-21,B
836 or B,T2,B
837 add B,C,B !=
838
839 orn B,D,T1
840 sethi %hi(0x655b59c3),T2
841 xor T1,C,T1
842 or T2,%lo(0x655b59c3),T2 !=
843 add T1,R12,T1
844 !pre-LOADed X(3),R3
845 add T1,T2,T1
846 add A,T1,A
847 sll A,6,T2 !=
848 srl A,32-6,A
849 or A,T2,A
850 add A,B,A
851
852 orn A,C,T1 !=
853 sethi %hi(0x8f0ccc92),T2
854 xor T1,B,T1
855 or T2,%lo(0x8f0ccc92),T2
856 add T1,R3,T1 !=
857 !pre-LOADed X(10),R10
858 add T1,T2,T1
859 add D,T1,D
860 sll D,10,T2
861 srl D,32-10,D !=
862 or D,T2,D
863 add D,A,D
864
865 orn D,B,T1
866 sethi %hi(0xffeff47d),T2 !=
867 xor T1,A,T1
868 or T2,%lo(0xffeff47d),T2
869 add T1,R10,T1
870 !pre-LOADed X(1),R1
871 add T1,T2,T1 !=
872 add C,T1,C
873 sll C,15,T2
874 srl C,32-15,C
875 or C,T2,C !=
876 add C,D,C
877
878 orn C,A,T1
879 sethi %hi(0x85845dd1),T2
880 xor T1,D,T1 !=
881 or T2,%lo(0x85845dd1),T2
882 add T1,R1,T1
883 !pre-LOADed X(8),R8
884 add T1,T2,T1
885 add B,T1,B !=
886 sll B,21,T2
887 srl B,32-21,B
888 or B,T2,B
889 add B,C,B !=
890
891 orn B,D,T1
892 sethi %hi(0x6fa87e4f),T2
893 xor T1,C,T1
894 or T2,%lo(0x6fa87e4f),T2 !=
895 add T1,R8,T1
896 LOAD X(15),RX
897 add T1,T2,T1
898 add A,T1,A !=
899 sll A,6,T2
900 srl A,32-6,A
901 or A,T2,A
902 add A,B,A !=
903
904 orn A,C,T1
905 sethi %hi(0xfe2ce6e0),T2
906 xor T1,B,T1
907 or T2,%lo(0xfe2ce6e0),T2 !=
908 add T1,RX,T1
909 !pre-LOADed X(6),R6
910 add T1,T2,T1
911 add D,T1,D
912 sll D,10,T2 !=
913 srl D,32-10,D
914 or D,T2,D
915 add D,A,D
916
917 orn D,B,T1 !=
918 sethi %hi(0xa3014314),T2
919 xor T1,A,T1
920 or T2,%lo(0xa3014314),T2
921 add T1,R6,T1 !=
922 !pre-LOADed X(13),R13
923 add T1,T2,T1
924 add C,T1,C
925 sll C,15,T2
926 srl C,32-15,C !=
927 or C,T2,C
928 add C,D,C
929
930 orn C,A,T1
931 sethi %hi(0x4e0811a1),T2 !=
932 xor T1,D,T1
933 or T2,%lo(0x4e0811a1),T2
934 !pre-LOADed X(4),R4
935 ld [Aptr],Aval
936 add T1,R13,T1 !=
937 add T1,T2,T1
938 add B,T1,B
939 sll B,21,T2
940 srl B,32-21,B !=
941 or B,T2,B
942 add B,C,B
943
944 orn B,D,T1
945 sethi %hi(0xf7537e82),T2 !=
946 xor T1,C,T1
947 or T2,%lo(0xf7537e82),T2
948 !pre-LOADed X(11),R11
949 ld [Dptr],Dval
950 add T1,R4,T1 !=
951 add T1,T2,T1
952 add A,T1,A
953 sll A,6,T2
954 srl A,32-6,A !=
955 or A,T2,A
956 add A,B,A
957
958 orn A,C,T1
959 sethi %hi(0xbd3af235),T2 !=
960 xor T1,B,T1
961 or T2,%lo(0xbd3af235),T2
962 !pre-LOADed X(2),R2
963 ld [Cptr],Cval
964 add T1,R11,T1 !=
965 add T1,T2,T1
966 add D,T1,D
967 sll D,10,T2
968 srl D,32-10,D !=
969 or D,T2,D
970 add D,A,D
971
972 orn D,B,T1
973 sethi %hi(0x2ad7d2bb),T2 !=
974 xor T1,A,T1
975 or T2,%lo(0x2ad7d2bb),T2
976 !pre-LOADed X(9),R9
977 ld [Bptr],Bval
978 add T1,R2,T1 !=
979 add Aval,A,Aval
980 add T1,T2,T1
981 st Aval,[Aptr]
982 add C,T1,C !=
983 sll C,15,T2
984 add Dval,D,Dval
985 srl C,32-15,C
986 or C,T2,C !=
987 st Dval,[Dptr]
988 add C,D,C
989
990 orn C,A,T1
991 sethi %hi(0xeb86d391),T2 !=
992 xor T1,D,T1
993 or T2,%lo(0xeb86d391),T2
994 add T1,R9,T1
995 !pre-LOADed X(0),R0
996 mov Aval,A !=
997 add T1,T2,T1
998 mov Dval,D
999 add B,T1,B
1000 sll B,21,T2 !=
1001 add Cval,C,Cval
1002 srl B,32-21,B
1003 st Cval,[Cptr]
1004 or B,T2,B !=
1005 add B,C,B
1006
1007 deccc %i2
1008 mov Cval,C
1009 add B,Bval,B !=
1010 inc 64,%i1
1011 nop
1012 st B,[Bptr]
1013 nop !=
1014
1015#ifdef ULTRASPARC
1016 bg,a,pt %icc,.Lmd5_block_loop
1017#else
1018 bg,a .Lmd5_block_loop
1019#endif
1020 LOAD X(0),R0
1021
1022#ifdef ASI_PRIMARY_LITTLE
1023 wr %g0,%o7,%asi
1024#endif
1025 ret
1026 restore %g0,0,%o0
1027
1028.type md5_block,#function
1029.size md5_block,(.-md5_block)