For tag-based mode kasan_poison_memory() already rounds up the size. Do the same for software modes and remove round_up() from common code. Signed-off-by: Andrey Konovalov <andreyknvl@xxxxxxxxxx> Link: https://linux-review.googlesource.com/id/Ib397128fac6eba874008662b4964d65352db4aa4 --- mm/kasan/common.c | 8 ++------ mm/kasan/shadow.c | 1 + 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/mm/kasan/common.c b/mm/kasan/common.c index 5622b0ec0907..983383ebe32a 100644 --- a/mm/kasan/common.c +++ b/mm/kasan/common.c @@ -215,9 +215,7 @@ void __kasan_unpoison_object_data(struct kmem_cache *cache, void *object) void __kasan_poison_object_data(struct kmem_cache *cache, void *object) { - kasan_poison_memory(object, - round_up(cache->object_size, KASAN_GRANULE_SIZE), - KASAN_KMALLOC_REDZONE); + kasan_poison_memory(object, cache->object_size, KASAN_KMALLOC_REDZONE); } /* @@ -290,7 +288,6 @@ static bool ____kasan_slab_free(struct kmem_cache *cache, void *object, { u8 tag; void *tagged_object; - unsigned long rounded_up_size; tag = get_tag(object); tagged_object = object; @@ -311,8 +308,7 @@ static bool ____kasan_slab_free(struct kmem_cache *cache, void *object, return true; } - rounded_up_size = round_up(cache->object_size, KASAN_GRANULE_SIZE); - kasan_poison_memory(object, rounded_up_size, KASAN_KMALLOC_FREE); + kasan_poison_memory(object, cache->object_size, KASAN_KMALLOC_FREE); if (static_branch_unlikely(&kasan_stack)) { if ((IS_ENABLED(CONFIG_KASAN_GENERIC) && !quarantine) || diff --git a/mm/kasan/shadow.c b/mm/kasan/shadow.c index 616ac64c4a21..ab1d39c566b9 100644 --- a/mm/kasan/shadow.c +++ b/mm/kasan/shadow.c @@ -82,6 +82,7 @@ void kasan_poison_memory(const void *address, size_t size, u8 value) * addresses to this function. */ address = reset_tag(address); + size = round_up(size, KASAN_GRANULE_SIZE); shadow_start = kasan_mem_to_shadow(address); shadow_end = kasan_mem_to_shadow(address + size); -- 2.29.0.rc1.297.gfa9743e501-goog