Use more consistent unambivalent types (u8 rather than char) for the source and destination buffer pointer arguments for the asm functions. Reference them with "asmlinkage" as well. Signed-off-by: Robert Elliott <elliott@xxxxxxx> --- arch/x86/crypto/ghash-clmulni-intel_asm.S | 6 +++--- arch/x86/crypto/ghash-clmulni-intel_glue.c | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/arch/x86/crypto/ghash-clmulni-intel_asm.S b/arch/x86/crypto/ghash-clmulni-intel_asm.S index 09cf9271b83a..ad860836f75b 100644 --- a/arch/x86/crypto/ghash-clmulni-intel_asm.S +++ b/arch/x86/crypto/ghash-clmulni-intel_asm.S @@ -96,7 +96,7 @@ SYM_FUNC_END(__clmul_gf128mul_ble) * This supports 64-bit CPUs. * * Return: none (but @dst is updated) - * Prototype: asmlinkage void clmul_ghash_mul(char *dst, const u128 *shash) + * Prototype: asmlinkage void clmul_ghash_mul(u8 *dst, const u128 *shash) */ SYM_FUNC_START(clmul_ghash_mul) FRAME_BEGIN @@ -122,8 +122,8 @@ SYM_FUNC_END(clmul_ghash_mul) * This supports 64-bit CPUs. * * Return: none (but @dst is updated) - * Prototype: asmlinkage clmul_ghash_update(char *dst, const char *src, - * unsigned int srclen, const u128 *shash); + * Prototype: asmlinkage void clmul_ghash_update(u8 *dst, const u8 *src, + * unsigned int srclen, const u128 *shash); */ SYM_FUNC_START(clmul_ghash_update) FRAME_BEGIN diff --git a/arch/x86/crypto/ghash-clmulni-intel_glue.c b/arch/x86/crypto/ghash-clmulni-intel_glue.c index 1f1a95f3dd0c..beac4b2eddf6 100644 --- a/arch/x86/crypto/ghash-clmulni-intel_glue.c +++ b/arch/x86/crypto/ghash-clmulni-intel_glue.c @@ -23,10 +23,10 @@ #define GHASH_BLOCK_SIZE 16 #define GHASH_DIGEST_SIZE 16 -void clmul_ghash_mul(char *dst, const u128 *shash); +asmlinkage void clmul_ghash_mul(u8 *dst, const u128 *shash); -void clmul_ghash_update(char *dst, const char *src, unsigned int srclen, - const u128 *shash); +asmlinkage void clmul_ghash_update(u8 *dst, const u8 *src, unsigned int srclen, + const u128 *shash); struct ghash_async_ctx { struct cryptd_ahash *cryptd_tfm; -- 2.38.1