Blob Blame History Raw
From: Peter Zijlstra <peterz@infradead.org>
Date: Sat, 4 Dec 2021 14:43:44 +0100
Subject: x86: Add straight-line-speculation mitigation
Git-commit: e463a09af2f0677b9485a7e8e4e70b396b2ffb6f
Patch-mainline: v5.17-rc1
References: bsc#1201050 CVE-2021-26341

Make use of an upcoming GCC feature to mitigate
straight-line-speculation for x86:

  https://gcc.gnu.org/g:53a643f8568067d7700a9f2facc8ba39974973d3
  https://gcc.gnu.org/bugzilla/show_bug.cgi?id=102952
  https://bugs.llvm.org/show_bug.cgi?id=52323

It's built tested on x86_64-allyesconfig using GCC-12 and GCC-11.

Maintenance overhead of this should be fairly low due to objtool
validation.

Size overhead of all these additional int3 instructions comes to:

     text	   data	    bss	    dec	    hex	filename
  22267751	6933356	2011368	31212475	1dc43bb	defconfig-build/vmlinux
  22804126	6933356	1470696	31208178	1dc32f2	defconfig-build/vmlinux.sls

Or roughly 2.4% additional text.

Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Signed-off-by: Borislav Petkov <bp@suse.de>
Link: https://lore.kernel.org/r/20211204134908.140103474@infradead.org
---
 arch/x86/Kconfig                     |    9 +++++++++
 arch/x86/Makefile                    |    4 ++++
 arch/x86/include/asm/linkage.h       |   10 ++++++++++
 arch/x86/include/asm/nospec-branch.h |    2 +-
 arch/x86/lib/memmove_64.S            |    2 +-
 5 files changed, 25 insertions(+), 2 deletions(-)

--- a/arch/x86/include/asm/linkage.h
+++ b/arch/x86/include/asm/linkage.h
@@ -21,9 +21,19 @@
 #define __ALIGN_STR	__stringify(__ALIGN)
 #endif
 
+#ifdef CONFIG_SLS
+#define RET	ret; int3
+#else
+#define RET	ret
+#endif
+
 #else /* __ASSEMBLY__ */
 
+#ifdef CONFIG_SLS
+#define ASM_RET	"ret; int3\n\t"
+#else
 #define ASM_RET	"ret\n\t"
+#endif
 
 #endif /* __ASSEMBLY__ */
 
--- a/arch/x86/include/asm/nospec-branch.h
+++ b/arch/x86/include/asm/nospec-branch.h
@@ -119,7 +119,7 @@
 	ANNOTATE_NOSPEC_ALTERNATIVE
 	ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; jmp *\reg),	\
 		__stringify(RETPOLINE_JMP \reg), X86_FEATURE_RETPOLINE,	\
-		__stringify(lfence; ANNOTATE_RETPOLINE_SAFE; jmp *\reg), X86_FEATURE_RETPOLINE_AMD
+		__stringify(lfence; ANNOTATE_RETPOLINE_SAFE; jmp *\reg; int3), X86_FEATURE_RETPOLINE_AMD
 #else
 	jmp	*\reg
 #endif
--- a/arch/x86/Kconfig
+++ b/arch/x86/Kconfig
@@ -433,6 +433,15 @@ config RETPOLINE
 	  branches. Requires a compiler with -mindirect-branch=thunk-extern
 	  support for full protection. The kernel may run slower.
 
+config SLS
+	bool "Mitigate Straight-Line-Speculation"
+	depends on X86_64
+	default n
+	help
+	  Compile the kernel with straight-line-speculation options to guard
+	  against straight line speculation. The kernel image might be slightly
+	  larger.
+
 config X86_CPU_RESCTRL
 	bool "x86 CPU resource control support"
 	depends on X86 && (CPU_SUP_INTEL || CPU_SUP_AMD)
--- a/arch/x86/lib/memmove_64.S
+++ b/arch/x86/lib/memmove_64.S
@@ -42,7 +42,7 @@ ENTRY(__memmove)
 	jg 2f
 
 .Lmemmove_begin_forward:
-	ALTERNATIVE "", "movq %rdx, %rcx; rep movsb; RET", X86_FEATURE_ERMS
+	ALTERNATIVE "", __stringify(movq %rdx, %rcx; rep movsb; RET), X86_FEATURE_ERMS
 
 	/*
 	 * movsq instruction have many startup latency
--- a/arch/x86/Makefile
+++ b/arch/x86/Makefile
@@ -233,6 +233,10 @@ ifdef CONFIG_RETPOLINE
   KBUILD_CFLAGS += $(RETPOLINE_CFLAGS) -DRETPOLINE
 endif
 
+ifdef CONFIG_SLS
+  KBUILD_CFLAGS += $(call cc-option,-mharden-sls=all)
+endif
+
 archscripts: scripts_basic
 	$(Q)$(MAKE) $(build)=arch/x86/tools relocs