From 0fbbf07ce67d2edb869572bf41b507a5f6851c72 Mon Sep 17 00:00:00 2001 From: Mathieu Desnoyers Date: Tue, 27 Jun 2023 11:29:22 -0400 Subject: selftests/rseq: Fix arm64 buggy load-acquire/store-release macros The arm64 load-acquire/store-release macros from the Linux kernel rseq selftests are buggy. Remplace them by a working implementation. Signed-off-by: Mathieu Desnoyers Cc: Catalin Marinas Cc: Will Deacon Cc: Peter Zijlstra Signed-off-by: Shuah Khan --- tools/testing/selftests/rseq/rseq-arm64.h | 58 ++++++++++++++++--------------- 1 file changed, 30 insertions(+), 28 deletions(-) (limited to 'tools/testing/selftests/rseq') diff --git a/tools/testing/selftests/rseq/rseq-arm64.h b/tools/testing/selftests/rseq/rseq-arm64.h index 85b90977e7e6..21e1626a7235 100644 --- a/tools/testing/selftests/rseq/rseq-arm64.h +++ b/tools/testing/selftests/rseq/rseq-arm64.h @@ -27,59 +27,61 @@ #define rseq_smp_load_acquire(p) \ __extension__ ({ \ - __typeof(*p) ____p1; \ - switch (sizeof(*p)) { \ + union { rseq_unqual_scalar_typeof(*(p)) __val; char __c[sizeof(*(p))]; } __u; \ + switch (sizeof(*(p))) { \ case 1: \ - asm volatile ("ldarb %w0, %1" \ - : "=r" (*(__u8 *)p) \ - : "Q" (*p) : "memory"); \ + __asm__ __volatile__ ("ldarb %w0, %1" \ + : "=r" (*(__u8 *)__u.__c) \ + : "Q" (*(p)) : "memory"); \ break; \ case 2: \ - asm volatile ("ldarh %w0, %1" \ - : "=r" (*(__u16 *)p) \ - : "Q" (*p) : "memory"); \ + __asm__ __volatile__ ("ldarh %w0, %1" \ + : "=r" (*(__u16 *)__u.__c) \ + : "Q" (*(p)) : "memory"); \ break; \ case 4: \ - asm volatile ("ldar %w0, %1" \ - : "=r" (*(__u32 *)p) \ - : "Q" (*p) : "memory"); \ + __asm__ __volatile__ ("ldar %w0, %1" \ + : "=r" (*(__u32 *)__u.__c) \ + : "Q" (*(p)) : "memory"); \ break; \ case 8: \ - asm volatile ("ldar %0, %1" \ - : "=r" (*(__u64 *)p) \ - : "Q" (*p) : "memory"); \ + __asm__ __volatile__ ("ldar %0, %1" \ + : "=r" (*(__u64 *)__u.__c) \ + : "Q" (*(p)) : "memory"); \ break; \ } \ - ____p1; \ + (rseq_unqual_scalar_typeof(*(p)))__u.__val; \ }) #define rseq_smp_acquire__after_ctrl_dep() rseq_smp_rmb() #define rseq_smp_store_release(p, v) \ do { \ - switch (sizeof(*p)) { \ + union { rseq_unqual_scalar_typeof(*(p)) __val; char __c[sizeof(*(p))]; } __u = \ + { .__val = (rseq_unqual_scalar_typeof(*(p))) (v) }; \ + switch (sizeof(*(p))) { \ case 1: \ - asm volatile ("stlrb %w1, %0" \ - : "=Q" (*p) \ - : "r" ((__u8)v) \ + __asm__ __volatile__ ("stlrb %w1, %0" \ + : "=Q" (*(p)) \ + : "r" (*(__u8 *)__u.__c) \ : "memory"); \ break; \ case 2: \ - asm volatile ("stlrh %w1, %0" \ - : "=Q" (*p) \ - : "r" ((__u16)v) \ + __asm__ __volatile__ ("stlrh %w1, %0" \ + : "=Q" (*(p)) \ + : "r" (*(__u16 *)__u.__c) \ : "memory"); \ break; \ case 4: \ - asm volatile ("stlr %w1, %0" \ - : "=Q" (*p) \ - : "r" ((__u32)v) \ + __asm__ __volatile__ ("stlr %w1, %0" \ + : "=Q" (*(p)) \ + : "r" (*(__u32 *)__u.__c) \ : "memory"); \ break; \ case 8: \ - asm volatile ("stlr %1, %0" \ - : "=Q" (*p) \ - : "r" ((__u64)v) \ + __asm__ __volatile__ ("stlr %1, %0" \ + : "=Q" (*(p)) \ + : "r" (*(__u64 *)__u.__c) \ : "memory"); \ break; \ } \ -- cgit v1.2.3