diff options
author | kettenis <> | 2018-01-07 12:35:52 +0000 |
---|---|---|
committer | kettenis <> | 2018-01-07 12:35:52 +0000 |
commit | b5565d9f20514e6aee34a8beb128ad836d739bf9 (patch) | |
tree | d4ae0f75dc0e9b838b63ab32052ba4154f16910d /src/lib/libcrypto/aes | |
parent | bf59c7f1f430d5203f39c3206e7df506fe74264b (diff) | |
download | openbsd-b5565d9f20514e6aee34a8beb128ad836d739bf9.tar.gz openbsd-b5565d9f20514e6aee34a8beb128ad836d739bf9.tar.bz2 openbsd-b5565d9f20514e6aee34a8beb128ad836d739bf9.zip |
On OpenBSD/armv7 we deliberately trap unaligned access. Unfortunately
the assembly code in libcrypto assumes unaligned access is allowed for
ARMv7. Make these paths conditional on __STRICT_ALIGNMENT not being
defined and define __STRICT_ALIGNMENT in arm_arch.h for OpenBSD.
ok tom@
Diffstat (limited to 'src/lib/libcrypto/aes')
-rw-r--r-- | src/lib/libcrypto/aes/asm/aes-armv4.pl | 14 |
1 files changed, 7 insertions, 7 deletions
diff --git a/src/lib/libcrypto/aes/asm/aes-armv4.pl b/src/lib/libcrypto/aes/asm/aes-armv4.pl index 717cc1ed7f..1cb9586d4b 100644 --- a/src/lib/libcrypto/aes/asm/aes-armv4.pl +++ b/src/lib/libcrypto/aes/asm/aes-armv4.pl | |||
@@ -172,7 +172,7 @@ AES_encrypt: | |||
172 | mov $rounds,r0 @ inp | 172 | mov $rounds,r0 @ inp |
173 | mov $key,r2 | 173 | mov $key,r2 |
174 | sub $tbl,r3,#AES_encrypt-AES_Te @ Te | 174 | sub $tbl,r3,#AES_encrypt-AES_Te @ Te |
175 | #if __ARM_ARCH__<7 | 175 | #if __ARM_ARCH__<7 || defined(__STRICT_ALIGNMENT) |
176 | ldrb $s0,[$rounds,#3] @ load input data in endian-neutral | 176 | ldrb $s0,[$rounds,#3] @ load input data in endian-neutral |
177 | ldrb $t1,[$rounds,#2] @ manner... | 177 | ldrb $t1,[$rounds,#2] @ manner... |
178 | ldrb $t2,[$rounds,#1] | 178 | ldrb $t2,[$rounds,#1] |
@@ -216,7 +216,7 @@ AES_encrypt: | |||
216 | bl _armv4_AES_encrypt | 216 | bl _armv4_AES_encrypt |
217 | 217 | ||
218 | ldr $rounds,[sp],#4 @ pop out | 218 | ldr $rounds,[sp],#4 @ pop out |
219 | #if __ARM_ARCH__>=7 | 219 | #if __ARM_ARCH__>=7 && !defined(__STRICT_ALIGNMENT) |
220 | #ifdef __ARMEL__ | 220 | #ifdef __ARMEL__ |
221 | rev $s0,$s0 | 221 | rev $s0,$s0 |
222 | rev $s1,$s1 | 222 | rev $s1,$s1 |
@@ -432,7 +432,7 @@ _armv4_AES_set_encrypt_key: | |||
432 | mov lr,r1 @ bits | 432 | mov lr,r1 @ bits |
433 | mov $key,r2 @ key | 433 | mov $key,r2 @ key |
434 | 434 | ||
435 | #if __ARM_ARCH__<7 | 435 | #if __ARM_ARCH__<7 || defined(__STRICT_ALIGNMENT) |
436 | ldrb $s0,[$rounds,#3] @ load input data in endian-neutral | 436 | ldrb $s0,[$rounds,#3] @ load input data in endian-neutral |
437 | ldrb $t1,[$rounds,#2] @ manner... | 437 | ldrb $t1,[$rounds,#2] @ manner... |
438 | ldrb $t2,[$rounds,#1] | 438 | ldrb $t2,[$rounds,#1] |
@@ -517,7 +517,7 @@ _armv4_AES_set_encrypt_key: | |||
517 | b .Ldone | 517 | b .Ldone |
518 | 518 | ||
519 | .Lnot128: | 519 | .Lnot128: |
520 | #if __ARM_ARCH__<7 | 520 | #if __ARM_ARCH__<7 || defined(__STRICT_ALIGNMENT) |
521 | ldrb $i2,[$rounds,#19] | 521 | ldrb $i2,[$rounds,#19] |
522 | ldrb $t1,[$rounds,#18] | 522 | ldrb $t1,[$rounds,#18] |
523 | ldrb $t2,[$rounds,#17] | 523 | ldrb $t2,[$rounds,#17] |
@@ -588,7 +588,7 @@ _armv4_AES_set_encrypt_key: | |||
588 | b .L192_loop | 588 | b .L192_loop |
589 | 589 | ||
590 | .Lnot192: | 590 | .Lnot192: |
591 | #if __ARM_ARCH__<7 | 591 | #if __ARM_ARCH__<7 || defined(__STRICT_ALIGNMENT) |
592 | ldrb $i2,[$rounds,#27] | 592 | ldrb $i2,[$rounds,#27] |
593 | ldrb $t1,[$rounds,#26] | 593 | ldrb $t1,[$rounds,#26] |
594 | ldrb $t2,[$rounds,#25] | 594 | ldrb $t2,[$rounds,#25] |
@@ -888,7 +888,7 @@ AES_decrypt: | |||
888 | mov $rounds,r0 @ inp | 888 | mov $rounds,r0 @ inp |
889 | mov $key,r2 | 889 | mov $key,r2 |
890 | sub $tbl,r3,#AES_decrypt-AES_Td @ Td | 890 | sub $tbl,r3,#AES_decrypt-AES_Td @ Td |
891 | #if __ARM_ARCH__<7 | 891 | #if __ARM_ARCH__<7 || defined(__STRICT_ALIGNMENT) |
892 | ldrb $s0,[$rounds,#3] @ load input data in endian-neutral | 892 | ldrb $s0,[$rounds,#3] @ load input data in endian-neutral |
893 | ldrb $t1,[$rounds,#2] @ manner... | 893 | ldrb $t1,[$rounds,#2] @ manner... |
894 | ldrb $t2,[$rounds,#1] | 894 | ldrb $t2,[$rounds,#1] |
@@ -932,7 +932,7 @@ AES_decrypt: | |||
932 | bl _armv4_AES_decrypt | 932 | bl _armv4_AES_decrypt |
933 | 933 | ||
934 | ldr $rounds,[sp],#4 @ pop out | 934 | ldr $rounds,[sp],#4 @ pop out |
935 | #if __ARM_ARCH__>=7 | 935 | #if __ARM_ARCH__>=7 && !defined(__STRICT_ALIGNMENT) |
936 | #ifdef __ARMEL__ | 936 | #ifdef __ARMEL__ |
937 | rev $s0,$s0 | 937 | rev $s0,$s0 |
938 | rev $s1,$s1 | 938 | rev $s1,$s1 |