Re: [kvm-unit-tests PATCH v2 1/3] s390x/spec_ex: Use PSW macro

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

 



Messed up the recipients on the cover letter...


Instructions on s390 must be halfword aligned.
Add two tests for that.
These currently fail when using TCG.

v1 -> v2:
 * rebase
 * use PSW macros
 * simplify odd psw test (thanks Claudio)
 * rename some identifiers
 * pick up R-b (thanks Claudio)

Nina Schoetterl-Glausch (3):
  s390x/spec_ex: Use PSW macro
  s390x/spec_ex: Add test introducing odd address into PSW
  s390x/spec_ex: Add test of EXECUTE with odd target address

 s390x/spec_ex.c | 85 +++++++++++++++++++++++++++++++++++++++++++------
 1 file changed, 76 insertions(+), 9 deletions(-)

Range-diff against v1:
-:  -------- > 1:  d82f4fb6 s390x/spec_ex: Use PSW macro
1:  62f61c07 ! 2:  e537797f s390x/spec_ex: Add test introducing odd address into PSW
    @@ Commit message
         Signed-off-by: Nina Schoetterl-Glausch <nsg@xxxxxxxxxxxxx>
     
      ## s390x/spec_ex.c ##
    -@@ s390x/spec_ex.c: static void fixup_invalid_psw(struct stack_frame_int *stack)
    - /*
    -  * Load possibly invalid psw, but setup fixup_psw before,
    -  * so that fixup_invalid_psw() can bring us back onto the right track.
    -+ * The provided argument is loaded into register 1.
    -  * Also acts as compiler barrier, -> none required in expect/check_invalid_psw
    -  */
    --static void load_psw(struct psw psw)
    -+static void load_psw_with_arg(struct psw psw, uint64_t arg)
    - {
    - 	uint64_t scratch;
    - 
    -@@ s390x/spec_ex.c: static void load_psw(struct psw psw)
    - 	fixup_psw.mask = extract_psw_mask();
    - 	asm volatile ( "larl	%[scratch],0f\n"
    - 		"	stg	%[scratch],%[fixup_addr]\n"
    -+		"	lgr	%%r1,%[arg]\n"
    - 		"	lpswe	%[psw]\n"
    - 		"0:	nop\n"
    - 		: [scratch] "=&d" (scratch),
    - 		  [fixup_addr] "=&T" (fixup_psw.addr)
    --		: [psw] "Q" (psw)
    --		: "cc", "memory"
    -+		: [psw] "Q" (psw),
    -+		  [arg] "d" (arg)
    -+		: "cc", "memory", "%r1"
    - 	);
    - }
    - 
    -+static void load_psw(struct psw psw)
    -+{
    -+	load_psw_with_arg(psw, 0);
    -+}
    -+
    - static void load_short_psw(struct short_psw psw)
    - {
    - 	uint64_t scratch;
     @@ s390x/spec_ex.c: static void expect_invalid_psw(struct psw psw)
      	invalid_psw_expected = true;
      }
      
     +static void clear_invalid_psw(void)
     +{
    -+	expected_psw = (struct psw){0};
    ++	expected_psw = PSW(0, 0);
     +	invalid_psw_expected = false;
     +}
     +
    @@ s390x/spec_ex.c: static void expect_invalid_psw(struct psw psw)
      {
      	/* Since the fixup sets this to false we check for false here. */
      	if (!invalid_psw_expected) {
    ++		/*
    ++		 * Early exception recognition: pgm_int_id == 0.
    ++		 * Late exception recognition: psw address has been
    ++		 *	incremented by pgm_int_id (unpredictable value)
    ++		 */
      		if (expected_psw.mask == invalid_psw.mask &&
     -		    expected_psw.addr == invalid_psw.addr)
     +		    expected_psw.addr == invalid_psw.addr - lowcore.pgm_int_id)
    @@ s390x/spec_ex.c: static int psw_bit_12_is_1(void)
      	return check_invalid_psw();
      }
      
    ++extern char misaligned_code[];
    ++asm (  ".balign	2\n"
    ++"	. = . + 1\n"
    ++"misaligned_code:\n"
    ++"	larl	%r0,0\n"
    ++"	bcr	0xf,%r1\n"
    ++);
    ++
     +static int psw_odd_address(void)
     +{
    -+	struct psw odd = {
    -+		.mask = extract_psw_mask(),
    -+	};
    -+	uint64_t regs[16];
    -+	int r;
    ++	struct psw odd = PSW_WITH_CUR_MASK((uint64_t)&misaligned_code);
    ++	uint64_t executed_addr;
     +
    -+	/*
    -+	 * This asm is reentered at an odd address, which should cause a specification
    -+	 * exception before the first unaligned instruction is executed.
    -+	 * In this case, the interrupt handler fixes the address and the test succeeds.
    -+	 * If, however, unaligned instructions *are* executed, they are jumped to
    -+	 * from somewhere, with unknown registers, so save and restore those before.
    -+	 */
    -+	asm volatile ( "stmg	%%r0,%%r15,%[regs]\n"
    -+		//can only offset by even number when using larl -> increment by one
    -+		"	larl	%[r],0f\n"
    -+		"	aghi	%[r],1\n"
    -+		"	stg	%[r],%[addr]\n"
    -+		"	xr	%[r],%[r]\n"
    -+		"	brc	0xf,1f\n"
    -+		"0:	. = . + 1\n"
    -+		"	lmg	%%r0,%%r15,0(%%r1)\n"
    -+		//address of the instruction itself, should be odd, store for assert
    -+		"	larl	%[r],0\n"
    -+		"	stg	%[r],%[addr]\n"
    -+		"	larl	%[r],0f\n"
    -+		"	aghi	%[r],1\n"
    -+		"	bcr	0xf,%[r]\n"
    -+		"0:	. = . + 1\n"
    -+		"1:\n"
    -+	: [addr] "=T" (odd.addr),
    -+	  [regs] "=Q" (regs),
    -+	  [r] "=d" (r)
    -+	: : "cc", "memory"
    ++	expect_invalid_psw(odd);
    ++	fixup_psw.mask = extract_psw_mask();
    ++	asm volatile ( "xr	%%r0,%%r0\n"
    ++		"	larl	%%r1,0f\n"
    ++		"	stg	%%r1,%[fixup_addr]\n"
    ++		"	lpswe	%[odd_psw]\n"
    ++		"0:	lr	%[executed_addr],%%r0\n"
    ++	: [fixup_addr] "=&T" (fixup_psw.addr),
    ++	  [executed_addr] "=d" (executed_addr)
    ++	: [odd_psw] "Q" (odd)
    ++	: "cc", "%r0", "%r1"
     +	);
     +
    -+	if (!r) {
    -+		expect_invalid_psw(odd);
    -+		load_psw_with_arg(odd, (uint64_t)&regs);
    ++	if (!executed_addr) {
     +		return check_invalid_psw();
     +	} else {
    -+		assert(odd.addr & 1);
    ++		assert(executed_addr == odd.addr);
     +		clear_invalid_psw();
    -+		report_fail("executed unaligned instructions");
    ++		report_fail("did not execute unaligned instructions");
     +		return 1;
     +	}
     +}
2:  30075863 ! 3:  dc552880 s390x/spec_ex: Add test of EXECUTE with odd target address
    @@ s390x/spec_ex.c: static int short_psw_bit_12_is_0(void)
      
     +static int odd_ex_target(void)
     +{
    -+	uint64_t target_addr_pre;
    ++	uint64_t pre_target_addr;
     +	int to = 0, from = 0x0dd;
     +
     +	asm volatile ( ".pushsection .rodata\n"
    -+		"odd_ex_target_pre_insn:\n"
    -+		"	.balign 2\n"
    ++		"pre_odd_ex_target:\n"
    ++		"	.balign	2\n"
     +		"	. = . + 1\n"
     +		"	lr	%[to],%[from]\n"
     +		"	.popsection\n"
     +
    -+		"	larl	%[target_addr_pre],odd_ex_target_pre_insn\n"
    -+		"	ex	0,1(%[target_addr_pre])\n"
    -+		: [target_addr_pre] "=&a" (target_addr_pre),
    ++		"	larl	%[pre_target_addr],pre_odd_ex_target\n"
    ++		"	ex	0,1(%[pre_target_addr])\n"
    ++		: [pre_target_addr] "=&a" (pre_target_addr),
     +		  [to] "+d" (to)
     +		: [from] "d" (from)
     +	);
     +
    -+	assert((target_addr_pre + 1) & 1);
    ++	assert((pre_target_addr + 1) & 1);
     +	report(to != from, "did not perform ex with odd target");
     +	return 0;
     +}

base-commit: e3c5c3ef2524c58023073c0fadde2e8ae3c04ec6
-- 
2.36.1





[Index of Archives]     [KVM ARM]     [KVM ia64]     [KVM ppc]     [Virtualization Tools]     [Spice Development]     [Libvirt]     [Libvirt Users]     [Linux USB Devel]     [Linux Audio Users]     [Yosemite Questions]     [Linux Kernel]     [Linux SCSI]     [XFree86]

  Powered by Linux