The __copy_user*() functions have thus far returned the number of uncopied bytes in the $a2 register used as the argument providing the length of the memory region to be copied. As part of moving to use the standard calling convention, return the number of uncopied bytes in v0 instead. Signed-off-by: Paul Burton <paul.burton@xxxxxxxxxx> --- arch/mips/cavium-octeon/octeon-memcpy.S | 18 +++++++++--------- arch/mips/include/asm/uaccess.h | 30 ++++++++++++++++++++---------- arch/mips/lib/memcpy.S | 26 +++++++++++++------------- 3 files changed, 42 insertions(+), 32 deletions(-) diff --git a/arch/mips/cavium-octeon/octeon-memcpy.S b/arch/mips/cavium-octeon/octeon-memcpy.S index 944f8f5..6f312a2 100644 --- a/arch/mips/cavium-octeon/octeon-memcpy.S +++ b/arch/mips/cavium-octeon/octeon-memcpy.S @@ -141,7 +141,7 @@ .set noreorder .set noat - .macro __BUILD_COPY_USER mode + .macro __BUILD_COPY_USER mode, uncopied /* * Note: dst & src may be unaligned, len may be 0 * Temps @@ -358,12 +358,12 @@ EXC( sb t0, N(dst), s_exc_p1) COPY_BYTE(4) COPY_BYTE(5) EXC( lb t0, NBYTES-2(src), l_exc) - SUB len, len, 1 + SUB \uncopied, len, 1 jr ra EXC( sb t0, NBYTES-2(dst), s_exc_p1) .Ldone\@: jr ra - nop + move \uncopied, len /* memcpy shouldn't generate exceptions */ .if \mode != MEMCPY_MODE @@ -410,13 +410,13 @@ l_exc: bnez src, 1b SUB src, src, 1 2: jr ra - nop + move \uncopied, len #define SEXC(n) \ s_exc_p ## n ## u: \ jr ra; \ - ADD len, len, n*NBYTES + ADD \uncopied, len, n*NBYTES SEXC(16) SEXC(15) @@ -437,10 +437,10 @@ SEXC(1) s_exc_p1: jr ra - ADD len, len, 1 + ADD \uncopied, len, 1 s_exc: jr ra - nop + move \uncopied, len .endif /* \mode != MEMCPY_MODE */ .endm @@ -458,7 +458,7 @@ s_exc: LEAF(memcpy) /* a0=dst a1=src a2=len */ EXPORT_SYMBOL(memcpy) move v0, dst /* return value */ - __BUILD_COPY_USER MEMCPY_MODE + __BUILD_COPY_USER MEMCPY_MODE len END(memcpy) /* @@ -475,7 +475,7 @@ LEAF(__copy_user) EXPORT_SYMBOL(__copy_user) li t7, 0 /* not inatomic */ __copy_user_common: - __BUILD_COPY_USER COPY_USER_MODE + __BUILD_COPY_USER COPY_USER_MODE v0 END(__copy_user) /* diff --git a/arch/mips/include/asm/uaccess.h b/arch/mips/include/asm/uaccess.h index 89fa5c0b..81d632f 100644 --- a/arch/mips/include/asm/uaccess.h +++ b/arch/mips/include/asm/uaccess.h @@ -814,6 +814,7 @@ extern size_t __copy_user(void *__to, const void *__from, size_t __n); #ifndef CONFIG_EVA #define __invoke_copy_to_user(to, from, n) \ ({ \ + register long __cu_ret_r __asm__("$2"); \ register void __user *__cu_to_r __asm__("$4"); \ register const void *__cu_from_r __asm__("$5"); \ register long __cu_len_r __asm__("$6"); \ @@ -823,11 +824,12 @@ extern size_t __copy_user(void *__to, const void *__from, size_t __n); __cu_len_r = (n); \ __asm__ __volatile__( \ __MODULE_JAL(__copy_user) \ - : "+r" (__cu_to_r), "+r" (__cu_from_r), "+r" (__cu_len_r) \ + : "=r"(__cu_ret_r), "+r" (__cu_to_r), \ + "+r" (__cu_from_r), "+r" (__cu_len_r) \ : \ : "$8", "$9", "$10", "$11", "$12", "$14", "$15", "$24", "$31", \ DADDI_SCRATCH, "memory"); \ - __cu_len_r; \ + __cu_ret_r; \ }) #define __invoke_copy_to_kernel(to, from, n) \ @@ -963,6 +965,7 @@ extern size_t __copy_user_inatomic(void *__to, const void *__from, size_t __n); #define __invoke_copy_from_user(to, from, n) \ ({ \ + register long __cu_ret_r __asm__("$2"); \ register void *__cu_to_r __asm__("$4"); \ register const void __user *__cu_from_r __asm__("$5"); \ register long __cu_len_r __asm__("$6"); \ @@ -977,11 +980,12 @@ extern size_t __copy_user_inatomic(void *__to, const void *__from, size_t __n); __UA_ADDU "\t$1, %1, %2\n\t" \ ".set\tat\n\t" \ ".set\treorder" \ - : "+r" (__cu_to_r), "+r" (__cu_from_r), "+r" (__cu_len_r) \ + : "=r"(__cu_ret_r), "+r" (__cu_to_r), \ + "+r" (__cu_from_r), "+r" (__cu_len_r) \ : \ : "$8", "$9", "$10", "$11", "$12", "$14", "$15", "$24", "$31", \ DADDI_SCRATCH, "memory"); \ - __cu_len_r; \ + __cu_ret_r; \ }) #define __invoke_copy_from_kernel(to, from, n) \ @@ -997,6 +1001,7 @@ extern size_t __copy_user_inatomic(void *__to, const void *__from, size_t __n); #define __invoke_copy_from_user_inatomic(to, from, n) \ ({ \ + register long __cu_ret_r __asm__("$2"); \ register void *__cu_to_r __asm__("$4"); \ register const void __user *__cu_from_r __asm__("$5"); \ register long __cu_len_r __asm__("$6"); \ @@ -1011,11 +1016,12 @@ extern size_t __copy_user_inatomic(void *__to, const void *__from, size_t __n); __UA_ADDU "\t$1, %1, %2\n\t" \ ".set\tat\n\t" \ ".set\treorder" \ - : "+r" (__cu_to_r), "+r" (__cu_from_r), "+r" (__cu_len_r) \ + : "=r"(__cu_ret_r), "+r" (__cu_to_r), \ + "+r" (__cu_from_r), "+r" (__cu_len_r) \ : \ : "$8", "$9", "$10", "$11", "$12", "$14", "$15", "$24", "$31", \ DADDI_SCRATCH, "memory"); \ - __cu_len_r; \ + __cu_ret_r; \ }) #define __invoke_copy_from_kernel_inatomic(to, from, n) \ @@ -1035,6 +1041,7 @@ extern size_t __copy_in_user_eva(void *__to, const void *__from, size_t __n); #define __invoke_copy_from_user_eva_generic(to, from, n, func_ptr) \ ({ \ + register long __cu_ret_r __asm__("$2"); \ register void *__cu_to_r __asm__("$4"); \ register const void __user *__cu_from_r __asm__("$5"); \ register long __cu_len_r __asm__("$6"); \ @@ -1049,15 +1056,17 @@ extern size_t __copy_in_user_eva(void *__to, const void *__from, size_t __n); __UA_ADDU "\t$1, %1, %2\n\t" \ ".set\tat\n\t" \ ".set\treorder" \ - : "+r" (__cu_to_r), "+r" (__cu_from_r), "+r" (__cu_len_r) \ + : "=r"(__cu_ret_r), "+r" (__cu_to_r), \ + "+r" (__cu_from_r), "+r" (__cu_len_r) \ : \ : "$8", "$9", "$10", "$11", "$12", "$14", "$15", "$24", "$31", \ DADDI_SCRATCH, "memory"); \ - __cu_len_r; \ + __cu_ret_r; \ }) #define __invoke_copy_to_user_eva_generic(to, from, n, func_ptr) \ ({ \ + register long __cu_ret_r __asm__("$2"); \ register void *__cu_to_r __asm__("$4"); \ register const void __user *__cu_from_r __asm__("$5"); \ register long __cu_len_r __asm__("$6"); \ @@ -1067,11 +1076,12 @@ extern size_t __copy_in_user_eva(void *__to, const void *__from, size_t __n); __cu_len_r = (n); \ __asm__ __volatile__( \ __MODULE_JAL(func_ptr) \ - : "+r" (__cu_to_r), "+r" (__cu_from_r), "+r" (__cu_len_r) \ + : "=r"(__cu_ret_r), "+r" (__cu_to_r), \ + "+r" (__cu_from_r), "+r" (__cu_len_r) \ : \ : "$8", "$9", "$10", "$11", "$12", "$14", "$15", "$24", "$31", \ DADDI_SCRATCH, "memory"); \ - __cu_len_r; \ + __cu_ret_r; \ }) /* diff --git a/arch/mips/lib/memcpy.S b/arch/mips/lib/memcpy.S index bfbe23c..052f7a1 100644 --- a/arch/mips/lib/memcpy.S +++ b/arch/mips/lib/memcpy.S @@ -262,7 +262,7 @@ * from : Source operand. USEROP or KERNELOP * to : Destination operand. USEROP or KERNELOP */ - .macro __BUILD_COPY_USER mode, from, to + .macro __BUILD_COPY_USER mode, from, to, uncopied /* * Note: dst & src may be unaligned, len may be 0 @@ -398,7 +398,7 @@ SHIFT_DISCARD t0, t0, bits STREST(t0, -1(t1), .Ls_exc\@) jr ra - move len, zero + move \uncopied, zero .Ldst_unaligned\@: /* * dst is unaligned @@ -500,12 +500,12 @@ COPY_BYTE(5) #endif LOADB(t0, NBYTES-2(src), .Ll_exc\@) - SUB len, len, 1 + SUB \uncopied, len, 1 jr ra STOREB(t0, NBYTES-2(dst), .Ls_exc_p1\@) .Ldone\@: jr ra - nop + move \uncopied, len #ifdef CONFIG_CPU_MIPSR6 .Lcopy_unaligned_bytes\@: @@ -584,13 +584,13 @@ .set pop #endif jr ra - nop + move \uncopied, len #define SEXC(n) \ .set reorder; /* DADDI_WAR */ \ .Ls_exc_p ## n ## u\@: \ - ADD len, len, n*NBYTES; \ + ADD \uncopied, len, n*NBYTES; \ jr ra; \ .set noreorder @@ -605,12 +605,12 @@ SEXC(1) .Ls_exc_p1\@: .set reorder /* DADDI_WAR */ - ADD len, len, 1 + ADD \uncopied, len, 1 jr ra .set noreorder .Ls_exc\@: jr ra - nop + move \uncopied, len .endif /* \mode != MEMCPY_MODE */ .endm @@ -632,7 +632,7 @@ EXPORT_SYMBOL(memcpy) .L__memcpy: li t6, 0 /* not inatomic */ /* Legacy Mode, user <-> user */ - __BUILD_COPY_USER MEMCPY_MODE USEROP USEROP + __BUILD_COPY_USER MEMCPY_MODE USEROP USEROP len END(memcpy) /* @@ -651,7 +651,7 @@ EXPORT_SYMBOL(__copy_user) li t6, 0 /* not inatomic */ __copy_user_common: /* Legacy Mode, user <-> user */ - __BUILD_COPY_USER LEGACY_MODE USEROP USEROP + __BUILD_COPY_USER LEGACY_MODE USEROP USEROP v0 END(__copy_user) /* @@ -686,7 +686,7 @@ LEAF(__copy_from_user_eva) EXPORT_SYMBOL(__copy_from_user_eva) li t6, 0 /* not inatomic */ __copy_from_user_common: - __BUILD_COPY_USER EVA_MODE USEROP KERNELOP + __BUILD_COPY_USER EVA_MODE USEROP KERNELOP v0 END(__copy_from_user_eva) @@ -697,7 +697,7 @@ END(__copy_from_user_eva) LEAF(__copy_to_user_eva) EXPORT_SYMBOL(__copy_to_user_eva) -__BUILD_COPY_USER EVA_MODE KERNELOP USEROP +__BUILD_COPY_USER EVA_MODE KERNELOP USEROP v0 END(__copy_to_user_eva) /* @@ -706,7 +706,7 @@ END(__copy_to_user_eva) LEAF(__copy_in_user_eva) EXPORT_SYMBOL(__copy_in_user_eva) -__BUILD_COPY_USER EVA_MODE USEROP USEROP +__BUILD_COPY_USER EVA_MODE USEROP USEROP v0 END(__copy_in_user_eva) #endif -- 2.10.2