On Wed, Nov 7, 2018 at 5:54 PM, Mark Rutland <mark.rutland@xxxxxxx> wrote: [...] >> --- a/arch/arm64/Makefile >> +++ b/arch/arm64/Makefile >> @@ -94,7 +94,7 @@ endif >> # KASAN_SHADOW_OFFSET = VA_START + (1 << (VA_BITS - KASAN_SHADOW_SCALE_SHIFT)) >> # - (1 << (64 - KASAN_SHADOW_SCALE_SHIFT)) >> # in 32-bit arithmetic >> -KASAN_SHADOW_SCALE_SHIFT := 3 >> +KASAN_SHADOW_SCALE_SHIFT := $(if $(CONFIG_KASAN_SW_TAGS), 4, 3) > > > We could make this something like: > > ifeq ($(CONFIG_KASAN_SW_TAGS), y) > KASAN_SHADOW_SCALE_SHIFT := 4 > else > KASAN_SHADOW_SCALE_SHIFT := 3 > endif > > KBUILD_CFLAGS += -DKASAN_SHADOW_SCALE_SHIFT=$(KASAN_SHADOW_SCALE_SHIFT) Seems that we need the same for KBUILD_CPPFLAGS and KBUILD_AFLAGS. >> diff --git a/arch/arm64/include/asm/memory.h b/arch/arm64/include/asm/memory.h >> index b96442960aea..0f1e024a951f 100644 >> --- a/arch/arm64/include/asm/memory.h >> +++ b/arch/arm64/include/asm/memory.h >> @@ -74,12 +74,17 @@ >> #define KERNEL_END _end >> >> /* >> - * KASAN requires 1/8th of the kernel virtual address space for the shadow >> - * region. KASAN can bloat the stack significantly, so double the (minimum) >> - * stack size when KASAN is in use. >> + * Generic and tag-based KASAN require 1/8th and 1/16th of the kernel virtual >> + * address space for the shadow region respectively. They can bloat the stack >> + * significantly, so double the (minimum) stack size when they are in use. >> */ >> -#ifdef CONFIG_KASAN >> +#ifdef CONFIG_KASAN_GENERIC >> #define KASAN_SHADOW_SCALE_SHIFT 3 >> +#endif >> +#ifdef CONFIG_KASAN_SW_TAGS >> +#define KASAN_SHADOW_SCALE_SHIFT 4 >> +#endif >> +#ifdef CONFIG_KASAN > > ... and remove the constant entirely here, avoiding duplication. > > Maybe factor that into a Makefile.kasan if things are going to get much > more complicated. Will do in v11, thanks!