From: kettenis Date: Sun, 7 Jan 2018 12:35:52 +0000 (+0000) Subject: On OpenBSD/armv7 we deliberately trap unaligned access. Unfortunately X-Git-Url: http://artulab.com/gitweb/?a=commitdiff_plain;h=404fe9f236d7a657e87bf80c27852df3619bb656;p=openbsd On OpenBSD/armv7 we deliberately trap unaligned access. Unfortunately the assembly code in libcrypto assumes unaligned access is allowed for ARMv7. Make these paths conditional on __STRICT_ALIGNMENT not being defined and define __STRICT_ALIGNMENT in arm_arch.h for OpenBSD. ok tom@ --- diff --git a/lib/libcrypto/aes/asm/aes-armv4.pl b/lib/libcrypto/aes/asm/aes-armv4.pl index 717cc1ed7f0..1cb9586d4b9 100644 --- a/lib/libcrypto/aes/asm/aes-armv4.pl +++ b/lib/libcrypto/aes/asm/aes-armv4.pl @@ -172,7 +172,7 @@ AES_encrypt: mov $rounds,r0 @ inp mov $key,r2 sub $tbl,r3,#AES_encrypt-AES_Te @ Te -#if __ARM_ARCH__<7 +#if __ARM_ARCH__<7 || defined(__STRICT_ALIGNMENT) ldrb $s0,[$rounds,#3] @ load input data in endian-neutral ldrb $t1,[$rounds,#2] @ manner... ldrb $t2,[$rounds,#1] @@ -216,7 +216,7 @@ AES_encrypt: bl _armv4_AES_encrypt ldr $rounds,[sp],#4 @ pop out -#if __ARM_ARCH__>=7 +#if __ARM_ARCH__>=7 && !defined(__STRICT_ALIGNMENT) #ifdef __ARMEL__ rev $s0,$s0 rev $s1,$s1 @@ -432,7 +432,7 @@ _armv4_AES_set_encrypt_key: mov lr,r1 @ bits mov $key,r2 @ key -#if __ARM_ARCH__<7 +#if __ARM_ARCH__<7 || defined(__STRICT_ALIGNMENT) ldrb $s0,[$rounds,#3] @ load input data in endian-neutral ldrb $t1,[$rounds,#2] @ manner... ldrb $t2,[$rounds,#1] @@ -517,7 +517,7 @@ _armv4_AES_set_encrypt_key: b .Ldone .Lnot128: -#if __ARM_ARCH__<7 +#if __ARM_ARCH__<7 || defined(__STRICT_ALIGNMENT) ldrb $i2,[$rounds,#19] ldrb $t1,[$rounds,#18] ldrb $t2,[$rounds,#17] @@ -588,7 +588,7 @@ _armv4_AES_set_encrypt_key: b .L192_loop .Lnot192: -#if __ARM_ARCH__<7 +#if __ARM_ARCH__<7 || defined(__STRICT_ALIGNMENT) ldrb $i2,[$rounds,#27] ldrb $t1,[$rounds,#26] ldrb $t2,[$rounds,#25] @@ -888,7 +888,7 @@ AES_decrypt: mov $rounds,r0 @ inp mov $key,r2 sub $tbl,r3,#AES_decrypt-AES_Td @ Td -#if __ARM_ARCH__<7 +#if __ARM_ARCH__<7 || defined(__STRICT_ALIGNMENT) ldrb $s0,[$rounds,#3] @ load input data in endian-neutral ldrb $t1,[$rounds,#2] @ manner... ldrb $t2,[$rounds,#1] @@ -932,7 +932,7 @@ AES_decrypt: bl _armv4_AES_decrypt ldr $rounds,[sp],#4 @ pop out -#if __ARM_ARCH__>=7 +#if __ARM_ARCH__>=7 && !defined(__STRICT_ALIGNMENT) #ifdef __ARMEL__ rev $s0,$s0 rev $s1,$s1 diff --git a/lib/libcrypto/arm_arch.h b/lib/libcrypto/arm_arch.h index 3304be81ab1..8b8a05b5f71 100644 --- a/lib/libcrypto/arm_arch.h +++ b/lib/libcrypto/arm_arch.h @@ -1,4 +1,4 @@ -/* $OpenBSD: arm_arch.h,v 1.7 2015/06/29 06:40:06 jsg Exp $ */ +/* $OpenBSD: arm_arch.h,v 1.8 2018/01/07 12:35:52 kettenis Exp $ */ #ifndef __ARM_ARCH_H__ #define __ARM_ARCH_H__ @@ -44,4 +44,8 @@ extern unsigned int OPENSSL_armcap_P; #define ARMV7_NEON (1<<0) #endif +#if defined(__OpenBSD__) +#define __STRICT_ALIGNMENT +#endif + #endif diff --git a/lib/libcrypto/sha/asm/sha1-armv4-large.pl b/lib/libcrypto/sha/asm/sha1-armv4-large.pl index 33da3e0e3c0..8f0cdaf83c8 100644 --- a/lib/libcrypto/sha/asm/sha1-armv4-large.pl +++ b/lib/libcrypto/sha/asm/sha1-armv4-large.pl @@ -95,7 +95,7 @@ ___ sub BODY_00_15 { my ($a,$b,$c,$d,$e)=@_; $code.=<<___; -#if __ARM_ARCH__<7 +#if __ARM_ARCH__<7 || defined(__STRICT_ALIGNMENT) ldrb $t1,[$inp,#2] ldrb $t0,[$inp,#3] ldrb $t2,[$inp,#1] diff --git a/lib/libcrypto/sha/asm/sha256-armv4.pl b/lib/libcrypto/sha/asm/sha256-armv4.pl index 9c84e8d93c3..292520731cd 100644 --- a/lib/libcrypto/sha/asm/sha256-armv4.pl +++ b/lib/libcrypto/sha/asm/sha256-armv4.pl @@ -51,7 +51,7 @@ sub BODY_00_15 { my ($i,$a,$b,$c,$d,$e,$f,$g,$h) = @_; $code.=<<___ if ($i<16); -#if __ARM_ARCH__>=7 +#if __ARM_ARCH__>=7 && !defined(__STRICT_ALIGNMENT) ldr $T1,[$inp],#4 #else ldrb $T1,[$inp,#3] @ $i @@ -70,7 +70,7 @@ $code.=<<___; eor $t1,$f,$g #if $i>=16 add $T1,$T1,$t3 @ from BODY_16_xx -#elif __ARM_ARCH__>=7 && defined(__ARMEL__) +#elif __ARM_ARCH__>=7 && defined(__ARMEL__) && !defined(__STRICT_ALIGNMENT) rev $T1,$T1 #endif #if $i==15 diff --git a/lib/libcrypto/sha/asm/sha512-armv4.pl b/lib/libcrypto/sha/asm/sha512-armv4.pl index 7faf37b1479..28ae155f4b4 100644 --- a/lib/libcrypto/sha/asm/sha512-armv4.pl +++ b/lib/libcrypto/sha/asm/sha512-armv4.pl @@ -270,7 +270,7 @@ sha512_block_data_order: str $Thi,[sp,#$Foff+4] .L00_15: -#if __ARM_ARCH__<7 +#if __ARM_ARCH__<7 || defined(__STRICT_ALIGNMENT) ldrb $Tlo,[$inp,#7] ldrb $t0, [$inp,#6] ldrb $t1, [$inp,#5]