According to section "Canonicalization and Consistency Checks" in APM vol. 2, the following guest state is illegal: "Any MBZ bit of CR3 is set." "Any MBZ bit of CR4 is set." Signed-off-by: Krish Sadhukhan <krish.sadhukhan@xxxxxxxxxx> --- x86/svm.h | 5 +++ x86/svm_tests.c | 94 ++++++++++++++++++++++++++++++++++++++++++++++++++++++--- 2 files changed, 95 insertions(+), 4 deletions(-) diff --git a/x86/svm.h b/x86/svm.h index 457ce3c..f6b9a31 100644 --- a/x86/svm.h +++ b/x86/svm.h @@ -325,6 +325,11 @@ struct __attribute__ ((__packed__)) vmcb { #define SVM_CR0_SELECTIVE_MASK (X86_CR0_TS | X86_CR0_MP) #define SVM_CR0_RESERVED_MASK 0xffffffff00000000U +#define SVM_CR3_LEGACY_RESERVED_MASK 0xfe7U +#define SVM_CR3_LEGACY_PAE_RESERVED_MASK 0x7U +#define SVM_CR3_LONG_RESERVED_MASK 0xfff0000000000fe7U +#define SVM_CR4_LEGACY_RESERVED_MASK 0xffbaf000U +#define SVM_CR4_RESERVED_MASK 0xffffffffffbaf000U #define SVM_DR6_RESERVED_MASK 0xffffffffffff1ff0U #define SVM_DR7_RESERVED_MASK 0xffffffff0000cc00U #define SVM_EFER_RESERVED_MASK 0xffffffffffff0200U diff --git a/x86/svm_tests.c b/x86/svm_tests.c index d4d130f..c59e7eb 100644 --- a/x86/svm_tests.c +++ b/x86/svm_tests.c @@ -1913,6 +1913,31 @@ static void basic_guest_main(struct svm_test *test) } \ } +#define SVM_TEST_CR_RESERVED_BITS(start, end, inc, cr, val, resv_mask) \ +{ \ + u64 tmp, mask; \ + int i; \ + \ + for (i = start; i <= end; i = i + inc) { \ + mask = 1ull << i; \ + if (!(mask & resv_mask)) \ + continue; \ + tmp = val | mask; \ + switch (cr) { \ + case 0: \ + vmcb->save.cr0 = tmp; \ + break; \ + case 3: \ + vmcb->save.cr3 = tmp; \ + break; \ + case 4: \ + vmcb->save.cr4 = tmp; \ + } \ + report(svm_vmrun() == SVM_EXIT_ERR, "Test CR%d %d:%d: %lx",\ + cr, end, start, tmp); \ + } \ +} + static void svm_guest_state_test(void) { test_set_guest(basic_guest_main); @@ -1938,17 +1963,21 @@ static void svm_guest_state_test(void) cr0 |= X86_CR0_CD; cr0 &= ~X86_CR0_NW; vmcb->save.cr0 = cr0; - report (svm_vmrun() == SVM_EXIT_VMMCALL, "CR0: %lx", cr0); + report (svm_vmrun() == SVM_EXIT_VMMCALL, "Test CR0 CD=1,NW=0: %lx", + cr0); cr0 |= X86_CR0_NW; vmcb->save.cr0 = cr0; - report (svm_vmrun() == SVM_EXIT_VMMCALL, "CR0: %lx", cr0); + report (svm_vmrun() == SVM_EXIT_VMMCALL, "Test CR0 CD=1,NW=1: %lx", + cr0); cr0 &= ~X86_CR0_NW; cr0 &= ~X86_CR0_CD; vmcb->save.cr0 = cr0; - report (svm_vmrun() == SVM_EXIT_VMMCALL, "CR0: %lx", cr0); + report (svm_vmrun() == SVM_EXIT_VMMCALL, "Test CR0 CD=0,NW=0: %lx", + cr0); cr0 |= X86_CR0_NW; vmcb->save.cr0 = cr0; - report (svm_vmrun() == SVM_EXIT_ERR, "CR0: %lx", cr0); + report (svm_vmrun() == SVM_EXIT_ERR, "Test CR0 CD=0,NW=1: %lx", + cr0); vmcb->save.cr0 = cr0_saved; /* @@ -1961,6 +1990,63 @@ static void svm_guest_state_test(void) vmcb->save.cr0 = cr0_saved; /* + * CR3 MBZ bits based on different modes: + * [2:0] - legacy PAE + * [2:0], [11:5] - legacy non-PAE + * [2:0], [11:5], [63:52] - long mode + */ + u64 cr3_saved = vmcb->save.cr3; + u64 cr4_saved = vmcb->save.cr4; + u64 cr4 = cr4_saved; + efer_saved = vmcb->save.efer; + efer = efer_saved; + + efer &= ~EFER_LMA; + vmcb->save.efer = efer; + cr4 |= X86_CR4_PAE; + vmcb->save.cr4 = cr4; + SVM_TEST_CR_RESERVED_BITS(0, 2, 1, 3, cr3_saved, + SVM_CR3_LEGACY_PAE_RESERVED_MASK); + + cr4 = cr4_saved & ~X86_CR4_PAE; + vmcb->save.cr4 = cr4; + SVM_TEST_CR_RESERVED_BITS(0, 11, 2, 3, cr3_saved, + SVM_CR3_LEGACY_RESERVED_MASK); + + cr4 |= X86_CR4_PAE; + vmcb->save.cr4 = cr4; + efer |= EFER_LMA; + vmcb->save.efer = efer; + SVM_TEST_CR_RESERVED_BITS(0, 63, 2, 3, cr3_saved, + SVM_CR3_LONG_RESERVED_MASK); + + vmcb->save.cr4 = cr4_saved; + vmcb->save.cr3 = cr3_saved; + vmcb->save.efer = efer_saved; + + /* + * CR4 MBZ bits based on different modes: + * [15:12], 17, 19, [31:22] - legacy mode + * [15:12], 17, 19, [63:22] - long mode + */ + cr4_saved = vmcb->save.cr4; + efer_saved = vmcb->save.efer; + efer &= ~EFER_LMA; + vmcb->save.efer = efer; + SVM_TEST_CR_RESERVED_BITS(12, 31, 2, 4, cr4_saved, + SVM_CR4_LEGACY_RESERVED_MASK); + + efer |= EFER_LMA; + vmcb->save.efer = efer; + SVM_TEST_CR_RESERVED_BITS(12, 31, 2, 4, cr4_saved, + SVM_CR4_RESERVED_MASK); + SVM_TEST_CR_RESERVED_BITS(32, 63, 4, 4, cr4_saved, + SVM_CR4_RESERVED_MASK); + + vmcb->save.cr4 = cr4_saved; + vmcb->save.efer = efer_saved; + + /* * DR6[63:32] and DR7[63:32] are MBZ */ u64 dr_saved = vmcb->save.dr6; -- 1.8.3.1