From a5910b4fbcaa3dd48faff38aeadcb22c953af71e Mon Sep 17 00:00:00 2001 From: deraadt Date: Tue, 10 Jul 2018 16:01:26 +0000 Subject: [PATCH] In asm.h ensure NENTRY uses the old-school nop-sled align, but change standard ENTRY is a trapsled. Fix a few functions which fall-through into an ENTRY macro. amd64 binaries now are free of double+-nop sequences (except for one assember nit in aes-586.pl). Previous changes by guenther got us here. ok mortimer kettenis --- lib/libc/arch/amd64/string/memmove.S | 2 +- sys/arch/amd64/amd64/locore.S | 4 +--- sys/arch/amd64/amd64/locore0.S | 4 +--- sys/arch/amd64/amd64/spl.S | 4 +--- sys/arch/amd64/amd64/vector.S | 4 +--- sys/arch/amd64/include/asm.h | 8 +++++--- sys/lib/libkern/arch/amd64/htonl.S | 4 ++-- sys/lib/libkern/arch/amd64/htons.S | 4 ++-- sys/lib/libkern/arch/amd64/memmove.S | 2 +- 9 files changed, 15 insertions(+), 21 deletions(-) diff --git a/lib/libc/arch/amd64/string/memmove.S b/lib/libc/arch/amd64/string/memmove.S index 496a0a37930..9df41afb096 100644 --- a/lib/libc/arch/amd64/string/memmove.S +++ b/lib/libc/arch/amd64/string/memmove.S @@ -44,7 +44,7 @@ ENTRY(bcopy) xchgq %rdi,%rsi /* fall into memmove */ -ENTRY(memmove) +NENTRY(memmove) RETGUARD_SETUP(memmove, r10) movq %rdi,%r11 /* save dest */ movq %rdx,%rcx diff --git a/sys/arch/amd64/amd64/locore.S b/sys/arch/amd64/amd64/locore.S index b1f85ab6137..05ec5fb2d37 100644 --- a/sys/arch/amd64/amd64/locore.S +++ b/sys/arch/amd64/amd64/locore.S @@ -1,4 +1,4 @@ -/* $OpenBSD: locore.S,v 1.103 2018/07/03 23:21:15 mortimer Exp $ */ +/* $OpenBSD: locore.S,v 1.104 2018/07/10 16:01:26 deraadt Exp $ */ /* $NetBSD: locore.S,v 1.13 2004/03/25 18:33:17 drochner Exp $ */ /* @@ -128,8 +128,6 @@ * override user-land alignment before including asm.h */ #define ALIGN_DATA .align 8,0xcc -#define ALIGN_TEXT .align 16,0x90 -#define _ALIGN_TEXT ALIGN_TEXT #include diff --git a/sys/arch/amd64/amd64/locore0.S b/sys/arch/amd64/amd64/locore0.S index 662b58fb5bc..67f6f75ea99 100644 --- a/sys/arch/amd64/amd64/locore0.S +++ b/sys/arch/amd64/amd64/locore0.S @@ -1,4 +1,4 @@ -/* $OpenBSD: locore0.S,v 1.8 2018/05/22 15:55:30 guenther Exp $ */ +/* $OpenBSD: locore0.S,v 1.9 2018/07/10 16:01:26 deraadt Exp $ */ /* $NetBSD: locore.S,v 1.13 2004/03/25 18:33:17 drochner Exp $ */ /* @@ -116,8 +116,6 @@ * override user-land alignment before including asm.h */ #define ALIGN_DATA .align 8,0xcc -#define ALIGN_TEXT .align 16,0x90 -#define _ALIGN_TEXT ALIGN_TEXT #include diff --git a/sys/arch/amd64/amd64/spl.S b/sys/arch/amd64/amd64/spl.S index 063dbe7db81..c4782552d5c 100644 --- a/sys/arch/amd64/amd64/spl.S +++ b/sys/arch/amd64/amd64/spl.S @@ -1,4 +1,4 @@ -/* $OpenBSD: spl.S,v 1.16 2018/07/03 23:21:15 mortimer Exp $ */ +/* $OpenBSD: spl.S,v 1.17 2018/07/10 16:01:26 deraadt Exp $ */ /* $NetBSD: spl.S,v 1.3 2004/06/28 09:13:11 fvdl Exp $ */ /* @@ -65,8 +65,6 @@ * POSSIBILITY OF SUCH DAMAGE. */ -#define ALIGN_TEXT .align 16,0x90 - #include #include #include diff --git a/sys/arch/amd64/amd64/vector.S b/sys/arch/amd64/amd64/vector.S index d9c42d5b03c..0dc7f2e5dd7 100644 --- a/sys/arch/amd64/amd64/vector.S +++ b/sys/arch/amd64/amd64/vector.S @@ -1,4 +1,4 @@ -/* $OpenBSD: vector.S,v 1.70 2018/07/10 08:57:44 guenther Exp $ */ +/* $OpenBSD: vector.S,v 1.71 2018/07/10 16:01:26 deraadt Exp $ */ /* $NetBSD: vector.S,v 1.5 2004/06/28 09:13:11 fvdl Exp $ */ /* @@ -65,8 +65,6 @@ * POSSIBILITY OF SUCH DAMAGE. */ -#define ALIGN_TEXT .align 16,0x90 - #include #include #include diff --git a/sys/arch/amd64/include/asm.h b/sys/arch/amd64/include/asm.h index 110beb7c5c3..0ef11752c02 100644 --- a/sys/arch/amd64/include/asm.h +++ b/sys/arch/amd64/include/asm.h @@ -1,4 +1,4 @@ -/* $OpenBSD: asm.h,v 1.13 2018/07/01 16:02:12 guenther Exp $ */ +/* $OpenBSD: asm.h,v 1.14 2018/07/10 16:01:26 deraadt Exp $ */ /* $NetBSD: asm.h,v 1.2 2003/05/02 18:05:47 yamt Exp $ */ /*- @@ -66,6 +66,8 @@ #define _ALIGN_TRAPS .align 16, 0xcc #define _ENTRY(x) \ + .text; _ALIGN_TRAPS; .globl x; .type x,@function; x: +#define _NENTRY(x) \ .text; _ALIGN_TEXT; .globl x; .type x,@function; x: #ifdef _KERNEL @@ -137,8 +139,8 @@ #endif #define ENTRY(y) _ENTRY(_C_LABEL(y)); _PROF_PROLOGUE -#define NENTRY(y) _ENTRY(_C_LABEL(y)) -#define ASENTRY(y) _ENTRY(_ASM_LABEL(y)); _PROF_PROLOGUE +#define NENTRY(y) _NENTRY(_C_LABEL(y)) +#define ASENTRY(y) _NENTRY(_ASM_LABEL(y)); _PROF_PROLOGUE #define END(y) .size y, . - y #define STRONG_ALIAS(alias,sym) \ diff --git a/sys/lib/libkern/arch/amd64/htonl.S b/sys/lib/libkern/arch/amd64/htonl.S index f7d640521d1..07965cd1fd9 100644 --- a/sys/lib/libkern/arch/amd64/htonl.S +++ b/sys/lib/libkern/arch/amd64/htonl.S @@ -41,8 +41,8 @@ #include _ENTRY(_C_LABEL(htonl)) -_ENTRY(_C_LABEL(ntohl)) -_ENTRY(_C_LABEL(bswap32)) +_NENTRY(_C_LABEL(ntohl)) +_NENTRY(_C_LABEL(bswap32)) _PROF_PROLOGUE RETGUARD_SETUP(htonl, r11) movl %edi,%eax diff --git a/sys/lib/libkern/arch/amd64/htons.S b/sys/lib/libkern/arch/amd64/htons.S index 15016f5736e..3a702594733 100644 --- a/sys/lib/libkern/arch/amd64/htons.S +++ b/sys/lib/libkern/arch/amd64/htons.S @@ -41,8 +41,8 @@ #include _ENTRY(_C_LABEL(htons)) -_ENTRY(_C_LABEL(ntohs)) -_ENTRY(_C_LABEL(bswap16)) +_NENTRY(_C_LABEL(ntohs)) +_NENTRY(_C_LABEL(bswap16)) _PROF_PROLOGUE RETGUARD_SETUP(htons, r11) movl %edi,%eax diff --git a/sys/lib/libkern/arch/amd64/memmove.S b/sys/lib/libkern/arch/amd64/memmove.S index 3a0bed88790..71d5b007f41 100644 --- a/sys/lib/libkern/arch/amd64/memmove.S +++ b/sys/lib/libkern/arch/amd64/memmove.S @@ -44,7 +44,7 @@ ENTRY(bcopy) xchgq %rdi,%rsi /* fall into memmove */ -ENTRY(memmove) +NENTRY(memmove) RETGUARD_SETUP(memmove, r10) movq %rdi,%r11 /* save dest */ movq %rdx,%rcx -- 2.20.1