On Tue, 4 Apr 2023 12:24:37 +0200 Thomas Huth <thuth@xxxxxxxxxx> wrote: > stpx, spx, stap and stidp use addressing via "base register", i.e. > if register 0 is used, the base address will be 0, independent from > the value of the register. Thus we must not use the "r" constraint > here to avoid register 0. This fixes test failures when compiling > with Clang instead of GCC, since Clang apparently prefers to use > register 0 in some cases where GCC never uses register 0. > > Signed-off-by: Thomas Huth <thuth@xxxxxxxxxx> maybe you can also add a couple of Fixes tags: Fixes: 2667b05e ("s390x: Interception tests") Fixes: 484a3a57 ("s390x: add stidp interception test") in any case: Reviewed-by: Claudio Imbrenda <imbrenda@xxxxxxxxxxxxx> > --- > s390x/intercept.c | 22 +++++++++++----------- > 1 file changed, 11 insertions(+), 11 deletions(-) > > diff --git a/s390x/intercept.c b/s390x/intercept.c > index 9e826b6c..faa74bbb 100644 > --- a/s390x/intercept.c > +++ b/s390x/intercept.c > @@ -36,16 +36,16 @@ static void test_stpx(void) > > expect_pgm_int(); > low_prot_enable(); > - asm volatile(" stpx 0(%0) " : : "r"(8)); > + asm volatile(" stpx 0(%0) " : : "a"(8)); > low_prot_disable(); > check_pgm_int_code(PGM_INT_CODE_PROTECTION); > > expect_pgm_int(); > - asm volatile(" stpx 0(%0) " : : "r"(1)); > + asm volatile(" stpx 0(%0) " : : "a"(1)); > check_pgm_int_code(PGM_INT_CODE_SPECIFICATION); > > expect_pgm_int(); > - asm volatile(" stpx 0(%0) " : : "r"(-8L)); > + asm volatile(" stpx 0(%0) " : : "a"(-8L)); > check_pgm_int_code(PGM_INT_CODE_ADDRESSING); > } > > @@ -70,13 +70,13 @@ static void test_spx(void) > > report_prefix_push("operand not word aligned"); > expect_pgm_int(); > - asm volatile(" spx 0(%0) " : : "r"(1)); > + asm volatile(" spx 0(%0) " : : "a"(1)); > check_pgm_int_code(PGM_INT_CODE_SPECIFICATION); > report_prefix_pop(); > > report_prefix_push("operand outside memory"); > expect_pgm_int(); > - asm volatile(" spx 0(%0) " : : "r"(-8L)); > + asm volatile(" spx 0(%0) " : : "a"(-8L)); > check_pgm_int_code(PGM_INT_CODE_ADDRESSING); > report_prefix_pop(); > > @@ -113,16 +113,16 @@ static void test_stap(void) > > expect_pgm_int(); > low_prot_enable(); > - asm volatile ("stap 0(%0)\n" : : "r"(8)); > + asm volatile ("stap 0(%0)\n" : : "a"(8)); > low_prot_disable(); > check_pgm_int_code(PGM_INT_CODE_PROTECTION); > > expect_pgm_int(); > - asm volatile ("stap 0(%0)\n" : : "r"(1)); > + asm volatile ("stap 0(%0)\n" : : "a"(1)); > check_pgm_int_code(PGM_INT_CODE_SPECIFICATION); > > expect_pgm_int(); > - asm volatile ("stap 0(%0)\n" : : "r"(-8L)); > + asm volatile ("stap 0(%0)\n" : : "a"(-8L)); > check_pgm_int_code(PGM_INT_CODE_ADDRESSING); > } > > @@ -138,16 +138,16 @@ static void test_stidp(void) > > expect_pgm_int(); > low_prot_enable(); > - asm volatile ("stidp 0(%0)\n" : : "r"(8)); > + asm volatile ("stidp 0(%0)\n" : : "a"(8)); > low_prot_disable(); > check_pgm_int_code(PGM_INT_CODE_PROTECTION); > > expect_pgm_int(); > - asm volatile ("stidp 0(%0)\n" : : "r"(1)); > + asm volatile ("stidp 0(%0)\n" : : "a"(1)); > check_pgm_int_code(PGM_INT_CODE_SPECIFICATION); > > expect_pgm_int(); > - asm volatile ("stidp 0(%0)\n" : : "r"(-8L)); > + asm volatile ("stidp 0(%0)\n" : : "a"(-8L)); > check_pgm_int_code(PGM_INT_CODE_ADDRESSING); > } >