The new assembly code works on both 32bit and 64bit cpus and allows for more compiler optimisations by not requiring smm_regs to be packed. Also since the SMM handler seems to modify the carry flag, the new code informs the compiler that the flags register needs to be saved/restored. Tested with 32 bit and 64 bit kernels on a Dell Inspiron 3505. Signed-off-by: Armin Wolf <W_Armin@xxxxxx> --- drivers/hwmon/dell-smm-hwmon.c | 71 ++++++++-------------------------- 1 file changed, 17 insertions(+), 54 deletions(-) diff --git a/drivers/hwmon/dell-smm-hwmon.c b/drivers/hwmon/dell-smm-hwmon.c index b7016971bb2e..04a41d59da60 100644 --- a/drivers/hwmon/dell-smm-hwmon.c +++ b/drivers/hwmon/dell-smm-hwmon.c @@ -123,7 +123,7 @@ struct smm_regs { unsigned int edx; unsigned int esi; unsigned int edi; -} __packed; +}; static const char * const temp_labels[] = { "CPU", @@ -175,59 +175,22 @@ static int i8k_smm_func(void *par) if (smp_processor_id() != 0) return -EBUSY; -#if defined(CONFIG_X86_64) - asm volatile("pushq %%rax\n\t" - "movl 0(%%rax),%%edx\n\t" - "pushq %%rdx\n\t" - "movl 4(%%rax),%%ebx\n\t" - "movl 8(%%rax),%%ecx\n\t" - "movl 12(%%rax),%%edx\n\t" - "movl 16(%%rax),%%esi\n\t" - "movl 20(%%rax),%%edi\n\t" - "popq %%rax\n\t" - "out %%al,$0xb2\n\t" - "out %%al,$0x84\n\t" - "xchgq %%rax,(%%rsp)\n\t" - "movl %%ebx,4(%%rax)\n\t" - "movl %%ecx,8(%%rax)\n\t" - "movl %%edx,12(%%rax)\n\t" - "movl %%esi,16(%%rax)\n\t" - "movl %%edi,20(%%rax)\n\t" - "popq %%rdx\n\t" - "movl %%edx,0(%%rax)\n\t" - "pushfq\n\t" - "popq %%rax\n\t" - "andl $1,%%eax\n" - : "=a"(rc) - : "a"(regs) - : "%ebx", "%ecx", "%edx", "%esi", "%edi", "memory"); -#else - asm volatile("pushl %%eax\n\t" - "movl 0(%%eax),%%edx\n\t" - "push %%edx\n\t" - "movl 4(%%eax),%%ebx\n\t" - "movl 8(%%eax),%%ecx\n\t" - "movl 12(%%eax),%%edx\n\t" - "movl 16(%%eax),%%esi\n\t" - "movl 20(%%eax),%%edi\n\t" - "popl %%eax\n\t" - "out %%al,$0xb2\n\t" - "out %%al,$0x84\n\t" - "xchgl %%eax,(%%esp)\n\t" - "movl %%ebx,4(%%eax)\n\t" - "movl %%ecx,8(%%eax)\n\t" - "movl %%edx,12(%%eax)\n\t" - "movl %%esi,16(%%eax)\n\t" - "movl %%edi,20(%%eax)\n\t" - "popl %%edx\n\t" - "movl %%edx,0(%%eax)\n\t" - "lahf\n\t" - "shrl $8,%%eax\n\t" - "andl $1,%%eax\n" - : "=a"(rc) - : "a"(regs) - : "%ebx", "%ecx", "%edx", "%esi", "%edi", "memory"); -#endif + asm volatile("out %%al,$0xb2\n\t" + "out %%al,$0x84\n" + : "=a" (regs->eax), + "=b" (regs->ebx), + "=c" (regs->ecx), + "=d" (regs->edx), + "=S" (regs->esi), + "=D" (regs->edi), + CC_OUT(c) (rc) + : "a" (regs->eax), + "b" (regs->ebx), + "c" (regs->ecx), + "d" (regs->edx), + "S" (regs->esi), + "D" (regs->edi)); + if (rc != 0 || (regs->eax & 0xffff) == 0xffff || regs->eax == eax) rc = -EINVAL; -- 2.30.2