From: Peter Zijlstra <peterz@xxxxxxxxxxxxx> Provide inline memcpy and memset functions that can be used instead of the GCC builtins whenever necessary. Originally-by: Peter Zijlstra <peterz@xxxxxxxxxxxxx> Link: https://lore.kernel.org/lkml/Y759AJ%2F0N9fqwDED@xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx/ Signed-off-by: Peter Zijlstra (Intel) <peterz@xxxxxxxxxxxxx> Signed-off-by: Sohil Mehta <sohil.mehta@xxxxxxxxx> Signed-off-by: Alexander Shishkin <alexander.shishkin@xxxxxxxxxxxxxxx> --- arch/x86/include/asm/string.h | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/arch/x86/include/asm/string.h b/arch/x86/include/asm/string.h index c3c2c1914d65..9cb5aae7fba9 100644 --- a/arch/x86/include/asm/string.h +++ b/arch/x86/include/asm/string.h @@ -1,6 +1,32 @@ /* SPDX-License-Identifier: GPL-2.0 */ +#ifndef _ASM_X86_STRING_H +#define _ASM_X86_STRING_H + #ifdef CONFIG_X86_32 # include <asm/string_32.h> #else # include <asm/string_64.h> #endif + +static __always_inline void *__inline_memcpy(void *to, const void *from, size_t len) +{ + void *ret = to; + + asm volatile("rep movsb" + : "+D" (to), "+S" (from), "+c" (len) + : : "memory"); + return ret; +} + +static __always_inline void *__inline_memset(void *s, int v, size_t n) +{ + void *ret = s; + + asm volatile("rep stosb" + : "+D" (s), "+c" (n) + : "a" ((uint8_t)v) + : "memory"); + return ret; +} + +#endif /* _ASM_X86_STRING_H */ -- 2.45.2