On Mon, Jun 28, 2021 at 05:22:30PM +0100, Robin Murphy wrote: > diff --git a/arch/arm64/lib/copy_to_user.S b/arch/arm64/lib/copy_to_user.S > index 043da90f5dd7..cfb598ae4812 100644 > --- a/arch/arm64/lib/copy_to_user.S > +++ b/arch/arm64/lib/copy_to_user.S > @@ -32,7 +32,7 @@ > .endm > .macro strh1 reg, ptr, val > - user_ldst 9998f, sttrh, \reg, \ptr, \val > + user_ldst 9997f, sttrh, \reg, \ptr, \val > .endm > .macro ldr1 reg, ptr, val > @@ -40,7 +40,7 @@ > .endm > .macro str1 reg, ptr, val > - user_ldst 9998f, sttr, \reg, \ptr, \val > + user_ldst 9997f, sttr, \reg, \ptr, \val > .endm > .macro ldp1 reg1, reg2, ptr, val > @@ -48,12 +48,14 @@ > .endm > .macro stp1 reg1, reg2, ptr, val > - user_stp 9998f, \reg1, \reg2, \ptr, \val > + user_stp 9997f, \reg1, \reg2, \ptr, \val > .endm > end .req x5 > +srcin .req x15 > SYM_FUNC_START(__arch_copy_to_user) > add end, x0, x2 > + mov srcin, x1 > #include "copy_template.S" > mov x0, #0 > ret > @@ -62,6 +64,12 @@ EXPORT_SYMBOL(__arch_copy_to_user) > .section .fixup,"ax" > .align 2 > +9997: cmp dst, dstin > + b.ne 9998f > + // Before being absolutely sure we couldn't copy anything, try harder > + ldrb tmp1w, [srcin] > +USER(9998f, sttrb tmp1w, [dstin]) > + add dst, dstin, #1 > 9998: sub x0, end, dst // bytes not copied > ret > .previous I think it's worth doing the copy_to_user() fallback in a loop until it faults or hits the end of the buffer. This would solve the problem we currently have with writing more bytes than actually reported. The copy_from_user() is not necessary, a byte would suffice. -- Catalin