Replace the shifting and masking of x with a rotation. This generates better assembly. Signed-off-by: Charlie Jenkins <charlie@xxxxxxxxxxxx> --- arch/parisc/lib/checksum.c | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/arch/parisc/lib/checksum.c b/arch/parisc/lib/checksum.c index eaa660491e24..1ae8cc730d13 100644 --- a/arch/parisc/lib/checksum.c +++ b/arch/parisc/lib/checksum.c @@ -27,11 +27,8 @@ static inline unsigned short from32to16(unsigned int x) { - /* 32 bits --> 16 bits + carry */ - x = (x & 0xffff) + (x >> 16); - /* 16 bits + carry --> 16 bits including carry */ - x = (x & 0xffff) + (x >> 16); - return (unsigned short)x; + x += ror32(x, 16); + return (unsigned short)(x >> 16); } unsigned int do_csum(const unsigned char *buff, int len) -- 2.34.1