Instructions on s390 must be halfword aligned. Introducing an odd instruction address into the PSW leads to a specification exception when attempting to execute the instruction at the odd address. Add a test for this. Signed-off-by: Nina Schoetterl-Glausch <nsg@xxxxxxxxxxxxx> --- s390x/spec_ex.c | 73 ++++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 69 insertions(+), 4 deletions(-) diff --git a/s390x/spec_ex.c b/s390x/spec_ex.c index 42ecaed3..b6764677 100644 --- a/s390x/spec_ex.c +++ b/s390x/spec_ex.c @@ -44,9 +44,10 @@ static void fixup_invalid_psw(struct stack_frame_int *stack) /* * Load possibly invalid psw, but setup fixup_psw before, * so that fixup_invalid_psw() can bring us back onto the right track. + * The provided argument is loaded into register 1. * Also acts as compiler barrier, -> none required in expect/check_invalid_psw */ -static void load_psw(struct psw psw) +static void load_psw_with_arg(struct psw psw, uint64_t arg) { uint64_t scratch; @@ -57,15 +58,22 @@ static void load_psw(struct psw psw) fixup_psw.mask = extract_psw_mask(); asm volatile ( "larl %[scratch],0f\n" " stg %[scratch],%[fixup_addr]\n" + " lgr %%r1,%[arg]\n" " lpswe %[psw]\n" "0: nop\n" : [scratch] "=&d" (scratch), [fixup_addr] "=&T" (fixup_psw.addr) - : [psw] "Q" (psw) - : "cc", "memory" + : [psw] "Q" (psw), + [arg] "d" (arg) + : "cc", "memory", "%r1" ); } +static void load_psw(struct psw psw) +{ + load_psw_with_arg(psw, 0); +} + static void load_short_psw(struct short_psw psw) { uint64_t scratch; @@ -88,12 +96,18 @@ static void expect_invalid_psw(struct psw psw) invalid_psw_expected = true; } +static void clear_invalid_psw(void) +{ + expected_psw = (struct psw){0}; + invalid_psw_expected = false; +} + static int check_invalid_psw(void) { /* Since the fixup sets this to false we check for false here. */ if (!invalid_psw_expected) { if (expected_psw.mask == invalid_psw.mask && - expected_psw.addr == invalid_psw.addr) + expected_psw.addr == invalid_psw.addr - lowcore.pgm_int_id) return 0; report_fail("Wrong invalid PSW"); } else { @@ -115,6 +129,56 @@ static int psw_bit_12_is_1(void) return check_invalid_psw(); } +static int psw_odd_address(void) +{ + struct psw odd = { + .mask = extract_psw_mask(), + }; + uint64_t regs[16]; + int r; + + /* + * This asm is reentered at an odd address, which should cause a specification + * exception before the first unaligned instruction is executed. + * In this case, the interrupt handler fixes the address and the test succeeds. + * If, however, unaligned instructions *are* executed, they are jumped to + * from somewhere, with unknown registers, so save and restore those before. + */ + asm volatile ( "stmg %%r0,%%r15,%[regs]\n" + //can only offset by even number when using larl -> increment by one + " larl %[r],0f\n" + " aghi %[r],1\n" + " stg %[r],%[addr]\n" + " xr %[r],%[r]\n" + " brc 0xf,1f\n" + "0: . = . + 1\n" + " lmg %%r0,%%r15,0(%%r1)\n" + //address of the instruction itself, should be odd, store for assert + " larl %[r],0\n" + " stg %[r],%[addr]\n" + " larl %[r],0f\n" + " aghi %[r],1\n" + " bcr 0xf,%[r]\n" + "0: . = . + 1\n" + "1:\n" + : [addr] "=T" (odd.addr), + [regs] "=Q" (regs), + [r] "=d" (r) + : : "cc", "memory" + ); + + if (!r) { + expect_invalid_psw(odd); + load_psw_with_arg(odd, (uint64_t)®s); + return check_invalid_psw(); + } else { + assert(odd.addr & 1); + clear_invalid_psw(); + report_fail("executed unaligned instructions"); + return 1; + } +} + /* A short PSW needs to have bit 12 set to be valid. */ static int short_psw_bit_12_is_0(void) { @@ -176,6 +240,7 @@ struct spec_ex_trigger { static const struct spec_ex_trigger spec_ex_triggers[] = { { "psw_bit_12_is_1", &psw_bit_12_is_1, false, &fixup_invalid_psw }, { "short_psw_bit_12_is_0", &short_psw_bit_12_is_0, false, &fixup_invalid_psw }, + { "psw_odd_address", &psw_odd_address, false, &fixup_invalid_psw }, { "bad_alignment", &bad_alignment, true, NULL }, { "not_even", ¬_even, true, NULL }, { NULL, NULL, false, NULL }, -- 2.36.1