summaryrefslogtreecommitdiffstats
path: root/security/Kconfig.hardening
diff options
context:
space:
mode:
Diffstat (limited to 'security/Kconfig.hardening')
-rw-r--r--security/Kconfig.hardening71
1 files changed, 51 insertions, 20 deletions
diff --git a/security/Kconfig.hardening b/security/Kconfig.hardening
index a56c36470cb1..90cbaff86e13 100644
--- a/security/Kconfig.hardening
+++ b/security/Kconfig.hardening
@@ -29,6 +29,7 @@ choice
prompt "Initialize kernel stack variables at function entry"
default GCC_PLUGIN_STRUCTLEAK_BYREF_ALL if COMPILE_TEST && GCC_PLUGINS
default INIT_STACK_ALL_PATTERN if COMPILE_TEST && CC_HAS_AUTO_VAR_INIT_PATTERN
+ default INIT_STACK_ALL_ZERO if CC_HAS_AUTO_VAR_INIT_PATTERN
default INIT_STACK_NONE
help
This option enables initialization of stack variables at
@@ -39,11 +40,11 @@ choice
syscalls.
This chooses the level of coverage over classes of potentially
- uninitialized variables. The selected class will be
+ uninitialized variables. The selected class of variable will be
initialized before use in a function.
config INIT_STACK_NONE
- bool "no automatic initialization (weakest)"
+ bool "no automatic stack variable initialization (weakest)"
help
Disable automatic stack variable initialization.
This leaves the kernel vulnerable to the standard
@@ -80,7 +81,7 @@ choice
and is disallowed.
config GCC_PLUGIN_STRUCTLEAK_BYREF_ALL
- bool "zero-init anything passed by reference (very strong)"
+ bool "zero-init everything passed by reference (very strong)"
depends on GCC_PLUGINS
depends on !(KASAN && KASAN_STACK)
select GCC_PLUGIN_STRUCTLEAK
@@ -91,33 +92,44 @@ choice
of uninitialized stack variable exploits and information
exposures.
+ As a side-effect, this keeps a lot of variables on the
+ stack that can otherwise be optimized out, so combining
+ this with CONFIG_KASAN_STACK can lead to a stack overflow
+ and is disallowed.
+
config INIT_STACK_ALL_PATTERN
- bool "0xAA-init everything on the stack (strongest)"
+ bool "pattern-init everything (strongest)"
depends on CC_HAS_AUTO_VAR_INIT_PATTERN
help
- Initializes everything on the stack with a 0xAA
- pattern. This is intended to eliminate all classes
- of uninitialized stack variable exploits and information
- exposures, even variables that were warned to have been
- left uninitialized.
+ Initializes everything on the stack (including padding)
+ with a specific debug value. This is intended to eliminate
+ all classes of uninitialized stack variable exploits and
+ information exposures, even variables that were warned about
+ having been left uninitialized.
Pattern initialization is known to provoke many existing bugs
related to uninitialized locals, e.g. pointers receive
- non-NULL values, buffer sizes and indices are very big.
+ non-NULL values, buffer sizes and indices are very big. The
+ pattern is situation-specific; Clang on 64-bit uses 0xAA
+ repeating for all types and padding except float and double
+ which use 0xFF repeating (-NaN). Clang on 32-bit uses 0xFF
+ repeating for all types and padding.
config INIT_STACK_ALL_ZERO
- bool "zero-init everything on the stack (strongest and safest)"
+ bool "zero-init everything (strongest and safest)"
depends on CC_HAS_AUTO_VAR_INIT_ZERO
help
- Initializes everything on the stack with a zero
- value. This is intended to eliminate all classes
- of uninitialized stack variable exploits and information
- exposures, even variables that were warned to have been
- left uninitialized.
-
- Zero initialization provides safe defaults for strings,
- pointers, indices and sizes, and is therefore
- more suitable as a security mitigation measure.
+ Initializes everything on the stack (including padding)
+ with a zero value. This is intended to eliminate all
+ classes of uninitialized stack variable exploits and
+ information exposures, even variables that were warned
+ about having been left uninitialized.
+
+ Zero initialization provides safe defaults for strings
+ (immediately NUL-terminated), pointers (NULL), indices
+ (index 0), and sizes (0 length), so it is therefore more
+ suitable as a production security mitigation than pattern
+ initialization.
endchoice
@@ -217,6 +229,25 @@ config INIT_ON_FREE_DEFAULT_ON
touching "cold" memory areas. Most cases see 3-5% impact. Some
synthetic workloads have measured as high as 8%.
+config CC_HAS_ZERO_CALL_USED_REGS
+ def_bool $(cc-option,-fzero-call-used-regs=used-gpr)
+
+config ZERO_CALL_USED_REGS
+ bool "Enable register zeroing on function exit"
+ depends on CC_HAS_ZERO_CALL_USED_REGS
+ help
+ At the end of functions, always zero any caller-used register
+ contents. This helps ensure that temporary values are not
+ leaked beyond the function boundary. This means that register
+ contents are less likely to be available for side channels
+ and information exposures. Additionally, this helps reduce the
+ number of useful ROP gadgets by about 20% (and removes compiler
+ generated "write-what-where" gadgets) in the resulting kernel
+ image. This has a less than 1% performance impact on most
+ workloads. Image size growth depends on architecture, and should
+ be evaluated for suitability. For example, x86_64 grows by less
+ than 1%, and arm64 grows by about 5%.
+
endmenu
endmenu