Tianyu Lan <ltykernel@xxxxxxxxx> writes: > From: Tianyu Lan <tiala@xxxxxxxxxxxxx> > > In sev-snp enlightened guest, Hyper-V hypercall needs > to use vmmcall to trigger vmexit and notify hypervisor > to handle hypercall request. > > There is no x86 SEV SNP feature flag support so far and > hardware provides MSR_AMD64_SEV register to check SEV-SNP > capability with MSR_AMD64_SEV_ENABLED bit. ALTERNATIVE can't > work without SEV-SNP x86 feature flag. May add later when > the associated flag is introduced. > > Signed-off-by: Tianyu Lan <tiala@xxxxxxxxxxxxx> > --- > arch/x86/include/asm/mshyperv.h | 44 ++++++++++++++++++++++++--------- > 1 file changed, 33 insertions(+), 11 deletions(-) > > diff --git a/arch/x86/include/asm/mshyperv.h b/arch/x86/include/asm/mshyperv.h > index 31c476f4e656..d859d7c5f5e8 100644 > --- a/arch/x86/include/asm/mshyperv.h > +++ b/arch/x86/include/asm/mshyperv.h > @@ -61,16 +61,25 @@ static inline u64 hv_do_hypercall(u64 control, void *input, void *output) > u64 hv_status; > > #ifdef CONFIG_X86_64 > - if (!hv_hypercall_pg) > - return U64_MAX; > + if (hv_isolation_type_en_snp()) { Would it be possible to redo 'hv_isolation_type_en_snp()' into a static inline doing static_branch_unlikely() so we avoid function call penalty here? > + __asm__ __volatile__("mov %4, %%r8\n" > + "vmmcall" > + : "=a" (hv_status), ASM_CALL_CONSTRAINT, > + "+c" (control), "+d" (input_address) > + : "r" (output_address) > + : "cc", "memory", "r8", "r9", "r10", "r11"); > + } else { > + if (!hv_hypercall_pg) > + return U64_MAX; > > - __asm__ __volatile__("mov %4, %%r8\n" > - CALL_NOSPEC > - : "=a" (hv_status), ASM_CALL_CONSTRAINT, > - "+c" (control), "+d" (input_address) > - : "r" (output_address), > - THUNK_TARGET(hv_hypercall_pg) > - : "cc", "memory", "r8", "r9", "r10", "r11"); > + __asm__ __volatile__("mov %4, %%r8\n" > + CALL_NOSPEC > + : "=a" (hv_status), ASM_CALL_CONSTRAINT, > + "+c" (control), "+d" (input_address) > + : "r" (output_address), > + THUNK_TARGET(hv_hypercall_pg) > + : "cc", "memory", "r8", "r9", "r10", "r11"); > + } > #else > u32 input_address_hi = upper_32_bits(input_address); > u32 input_address_lo = lower_32_bits(input_address); > @@ -104,7 +113,13 @@ static inline u64 _hv_do_fast_hypercall8(u64 control, u64 input1) > u64 hv_status; > > #ifdef CONFIG_X86_64 > - { > + if (hv_isolation_type_en_snp()) { > + __asm__ __volatile__( > + "vmmcall" > + : "=a" (hv_status), ASM_CALL_CONSTRAINT, > + "+c" (control), "+d" (input1) > + :: "cc", "r8", "r9", "r10", "r11"); > + } else { > __asm__ __volatile__(CALL_NOSPEC > : "=a" (hv_status), ASM_CALL_CONSTRAINT, > "+c" (control), "+d" (input1) > @@ -149,7 +164,14 @@ static inline u64 _hv_do_fast_hypercall16(u64 control, u64 input1, u64 input2) > u64 hv_status; > > #ifdef CONFIG_X86_64 > - { > + if (hv_isolation_type_en_snp()) { > + __asm__ __volatile__("mov %4, %%r8\n" > + "vmmcall" > + : "=a" (hv_status), ASM_CALL_CONSTRAINT, > + "+c" (control), "+d" (input1) > + : "r" (input2) > + : "cc", "r8", "r9", "r10", "r11"); > + } else { > __asm__ __volatile__("mov %4, %%r8\n" > CALL_NOSPEC > : "=a" (hv_status), ASM_CALL_CONSTRAINT, -- Vitaly