On Thu, 2023-07-13 at 10:07 +0200, Peter Zijlstra wrote: > On Wed, Jul 12, 2023 at 08:55:24PM +1200, Kai Huang wrote: > > @@ -85,6 +86,7 @@ > > .endif /* \saved */ > > > > .if \host > > +1: > > seamcall > > /* > > * SEAMCALL instruction is essentially a VMExit from VMX root > > @@ -99,6 +101,7 @@ > > */ > > mov $TDX_SEAMCALL_VMFAILINVALID, %rdi > > cmovc %rdi, %rax > > +2: > > .else > > tdcall > > .endif > > This is just wrong, if the thing traps you should not do the return > registers. And at this point the mov/cmovc thing doesn't make much sense > either. OK will do. Yes "do return registers" isn't necessary. I thought to keep code simple we can just do it. The trap/VMFAILINVALID code path isn't in performance path anyway. This is a problem in the current upstream code too. I'll fix it first in a separate patch. > > > @@ -185,4 +188,21 @@ > > > > FRAME_END > > RET > > + > > + .if \host > > +3: > > + /* > > + * SEAMCALL caused #GP or #UD. By reaching here %eax contains > > + * the trap number. Convert the trap number to the TDX error > > + * code by setting TDX_SW_ERROR to the high 32-bits of %rax. > > + * > > + * Note cannot OR TDX_SW_ERROR directly to %rax as OR instruction > > + * only accepts 32-bit immediate at most. > > + */ > > + movq $TDX_SW_ERROR, %r12 > > + orq %r12, %rax > > + jmp 2b > > + > > + _ASM_EXTABLE_FAULT(1b, 3b) > > + .endif /* \host */ > > .endm > > Also, please used named labels where possible and *please* keep asm > directives unindented. Yes will do. > > > --- a/arch/x86/virt/vmx/tdx/tdxcall.S > +++ b/arch/x86/virt/vmx/tdx/tdxcall.S > @@ -56,7 +56,7 @@ > movq TDX_MODULE_r10(%rsi), %r10 > movq TDX_MODULE_r11(%rsi), %r11 > > - .if \saved > +.if \saved > /* > * Move additional input regs from the structure. For simplicity > * assume that anything needs the callee-saved regs also tramples > @@ -75,18 +75,18 @@ > movq TDX_MODULE_r15(%rsi), %r15 > movq TDX_MODULE_rbx(%rsi), %rbx > > - .if \ret > +.if \ret > /* Save the structure pointer as %rsi is about to be clobbered */ > pushq %rsi > - .endif > +.endif > > movq TDX_MODULE_rdi(%rsi), %rdi > /* %rsi needs to be done at last */ > movq TDX_MODULE_rsi(%rsi), %rsi > - .endif /* \saved */ > +.endif /* \saved */ > > - .if \host > -1: > +.if \host > +.Lseamcall: > seamcall > /* > * SEAMCALL instruction is essentially a VMExit from VMX root > @@ -99,15 +99,13 @@ > * This value will never be used as actual SEAMCALL error code as > * it is from the Reserved status code class. > */ > - mov $TDX_SEAMCALL_VMFAILINVALID, %rdi > - cmovc %rdi, %rax > -2: > - .else > + jc .Lseamfail > +.else > tdcall > - .endif > +.endif > > - .if \ret > - .if \saved > +.if \ret > +.if \saved > /* > * Restore the structure from stack to saved the output registers > * > @@ -136,7 +134,7 @@ > movq %r15, TDX_MODULE_r15(%rsi) > movq %rbx, TDX_MODULE_rbx(%rsi) > movq %rdi, TDX_MODULE_rdi(%rsi) > - .endif /* \saved */ > +.endif /* \saved */ > > /* Copy output regs to the structure */ > movq %rcx, TDX_MODULE_rcx(%rsi) > @@ -145,10 +143,11 @@ > movq %r9, TDX_MODULE_r9(%rsi) > movq %r10, TDX_MODULE_r10(%rsi) > movq %r11, TDX_MODULE_r11(%rsi) > - .endif /* \ret */ > +.endif /* \ret */ > > - .if \saved > - .if \ret > +.Lout: > +.if \saved > +.if \ret > /* > * Clear registers shared by guest for VP.ENTER and VP.VMCALL to > * prevent speculative use of values from guest/VMM, including > @@ -170,13 +169,8 @@ > xorq %r9, %r9 > xorq %r10, %r10 > xorq %r11, %r11 > - xorq %r12, %r12 > - xorq %r13, %r13 > - xorq %r14, %r14 > - xorq %r15, %r15 > - xorq %rbx, %rbx > xorq %rdi, %rdi > - .endif /* \ret */ > +.endif /* \ret */ > > /* Restore callee-saved GPRs as mandated by the x86_64 ABI */ > popq %r15 > @@ -184,13 +178,17 @@ > popq %r13 > popq %r12 > popq %rbx > - .endif /* \saved */ > +.endif /* \saved */ > > FRAME_END > RET > > - .if \host > -3: > +.if \host > +.Lseamfail: > + mov $TDX_SEAMCALL_VMFAILINVALID, %rax > + jmp .Lout > + > +.Lseamtrap: > /* > * SEAMCALL caused #GP or #UD. By reaching here %eax contains > * the trap number. Convert the trap number to the TDX error > @@ -201,8 +199,8 @@ > */ > movq $TDX_SW_ERROR, %r12 > orq %r12, %rax > - jmp 2b > + jmp .Lout Thanks for the code. There might be stack balancing issue here. I'll double check when updating this patch. Thanks! > > - _ASM_EXTABLE_FAULT(1b, 3b) > - .endif /* \host */ > + _ASM_EXTABLE_FAULT(.Lseamcall, .Lseamtrap) > +.endif /* \host */ > .endm