summaryrefslogtreecommitdiff
path: root/src/lib/libcrypto/sha
diff options
context:
space:
mode:
authorkettenis <>2018-01-07 12:35:52 +0000
committerkettenis <>2018-01-07 12:35:52 +0000
commitb5565d9f20514e6aee34a8beb128ad836d739bf9 (patch)
treed4ae0f75dc0e9b838b63ab32052ba4154f16910d /src/lib/libcrypto/sha
parentbf59c7f1f430d5203f39c3206e7df506fe74264b (diff)
downloadopenbsd-b5565d9f20514e6aee34a8beb128ad836d739bf9.tar.gz
openbsd-b5565d9f20514e6aee34a8beb128ad836d739bf9.tar.bz2
openbsd-b5565d9f20514e6aee34a8beb128ad836d739bf9.zip
On OpenBSD/armv7 we deliberately trap unaligned access. Unfortunately
the assembly code in libcrypto assumes unaligned access is allowed for ARMv7. Make these paths conditional on __STRICT_ALIGNMENT not being defined and define __STRICT_ALIGNMENT in arm_arch.h for OpenBSD. ok tom@
Diffstat (limited to 'src/lib/libcrypto/sha')
-rw-r--r--src/lib/libcrypto/sha/asm/sha1-armv4-large.pl2
-rw-r--r--src/lib/libcrypto/sha/asm/sha256-armv4.pl4
-rw-r--r--src/lib/libcrypto/sha/asm/sha512-armv4.pl2
3 files changed, 4 insertions, 4 deletions
diff --git a/src/lib/libcrypto/sha/asm/sha1-armv4-large.pl b/src/lib/libcrypto/sha/asm/sha1-armv4-large.pl
index 33da3e0e3c..8f0cdaf83c 100644
--- a/src/lib/libcrypto/sha/asm/sha1-armv4-large.pl
+++ b/src/lib/libcrypto/sha/asm/sha1-armv4-large.pl
@@ -95,7 +95,7 @@ ___
95sub BODY_00_15 { 95sub BODY_00_15 {
96my ($a,$b,$c,$d,$e)=@_; 96my ($a,$b,$c,$d,$e)=@_;
97$code.=<<___; 97$code.=<<___;
98#if __ARM_ARCH__<7 98#if __ARM_ARCH__<7 || defined(__STRICT_ALIGNMENT)
99 ldrb $t1,[$inp,#2] 99 ldrb $t1,[$inp,#2]
100 ldrb $t0,[$inp,#3] 100 ldrb $t0,[$inp,#3]
101 ldrb $t2,[$inp,#1] 101 ldrb $t2,[$inp,#1]
diff --git a/src/lib/libcrypto/sha/asm/sha256-armv4.pl b/src/lib/libcrypto/sha/asm/sha256-armv4.pl
index 9c84e8d93c..292520731c 100644
--- a/src/lib/libcrypto/sha/asm/sha256-armv4.pl
+++ b/src/lib/libcrypto/sha/asm/sha256-armv4.pl
@@ -51,7 +51,7 @@ sub BODY_00_15 {
51my ($i,$a,$b,$c,$d,$e,$f,$g,$h) = @_; 51my ($i,$a,$b,$c,$d,$e,$f,$g,$h) = @_;
52 52
53$code.=<<___ if ($i<16); 53$code.=<<___ if ($i<16);
54#if __ARM_ARCH__>=7 54#if __ARM_ARCH__>=7 && !defined(__STRICT_ALIGNMENT)
55 ldr $T1,[$inp],#4 55 ldr $T1,[$inp],#4
56#else 56#else
57 ldrb $T1,[$inp,#3] @ $i 57 ldrb $T1,[$inp,#3] @ $i
@@ -70,7 +70,7 @@ $code.=<<___;
70 eor $t1,$f,$g 70 eor $t1,$f,$g
71#if $i>=16 71#if $i>=16
72 add $T1,$T1,$t3 @ from BODY_16_xx 72 add $T1,$T1,$t3 @ from BODY_16_xx
73#elif __ARM_ARCH__>=7 && defined(__ARMEL__) 73#elif __ARM_ARCH__>=7 && defined(__ARMEL__) && !defined(__STRICT_ALIGNMENT)
74 rev $T1,$T1 74 rev $T1,$T1
75#endif 75#endif
76#if $i==15 76#if $i==15
diff --git a/src/lib/libcrypto/sha/asm/sha512-armv4.pl b/src/lib/libcrypto/sha/asm/sha512-armv4.pl
index 7faf37b147..28ae155f4b 100644
--- a/src/lib/libcrypto/sha/asm/sha512-armv4.pl
+++ b/src/lib/libcrypto/sha/asm/sha512-armv4.pl
@@ -270,7 +270,7 @@ sha512_block_data_order:
270 str $Thi,[sp,#$Foff+4] 270 str $Thi,[sp,#$Foff+4]
271 271
272.L00_15: 272.L00_15:
273#if __ARM_ARCH__<7 273#if __ARM_ARCH__<7 || defined(__STRICT_ALIGNMENT)
274 ldrb $Tlo,[$inp,#7] 274 ldrb $Tlo,[$inp,#7]
275 ldrb $t0, [$inp,#6] 275 ldrb $t0, [$inp,#6]
276 ldrb $t1, [$inp,#5] 276 ldrb $t1, [$inp,#5]