forked from luck/tmp_suning_uos_patched
arm64: barrier: Use '__unqual_scalar_typeof' for acquire/release macros
Passing volatile-qualified pointers to the arm64 implementations of the load-acquire/store-release macros results in a re-load from the stack and a bunch of associated stack-protector churn due to the temporary result variable inheriting the volatile semantics thanks to the use of 'typeof()'. Define these temporary variables using 'unqual_scalar_typeof' to drop the volatile qualifier in the case that they are scalar types. Cc: Linus Torvalds <torvalds@linux-foundation.org> Cc: Peter Zijlstra <peterz@infradead.org> Cc: Arnd Bergmann <arnd@arndb.de> Acked-by: Mark Rutland <mark.rutland@arm.com> Signed-off-by: Will Deacon <will@kernel.org>
This commit is contained in:
parent
549887271a
commit
10223c5286
|
@ -76,8 +76,8 @@ static inline unsigned long array_index_mask_nospec(unsigned long idx,
|
|||
#define __smp_store_release(p, v) \
|
||||
do { \
|
||||
typeof(p) __p = (p); \
|
||||
union { typeof(*p) __val; char __c[1]; } __u = \
|
||||
{ .__val = (__force typeof(*p)) (v) }; \
|
||||
union { __unqual_scalar_typeof(*p) __val; char __c[1]; } __u = \
|
||||
{ .__val = (__force __unqual_scalar_typeof(*p)) (v) }; \
|
||||
compiletime_assert_atomic_type(*p); \
|
||||
kasan_check_write(__p, sizeof(*p)); \
|
||||
switch (sizeof(*p)) { \
|
||||
|
@ -110,7 +110,7 @@ do { \
|
|||
|
||||
#define __smp_load_acquire(p) \
|
||||
({ \
|
||||
union { typeof(*p) __val; char __c[1]; } __u; \
|
||||
union { __unqual_scalar_typeof(*p) __val; char __c[1]; } __u; \
|
||||
typeof(p) __p = (p); \
|
||||
compiletime_assert_atomic_type(*p); \
|
||||
kasan_check_read(__p, sizeof(*p)); \
|
||||
|
@ -136,33 +136,33 @@ do { \
|
|||
: "Q" (*__p) : "memory"); \
|
||||
break; \
|
||||
} \
|
||||
__u.__val; \
|
||||
(typeof(*p))__u.__val; \
|
||||
})
|
||||
|
||||
#define smp_cond_load_relaxed(ptr, cond_expr) \
|
||||
({ \
|
||||
typeof(ptr) __PTR = (ptr); \
|
||||
typeof(*ptr) VAL; \
|
||||
__unqual_scalar_typeof(*ptr) VAL; \
|
||||
for (;;) { \
|
||||
VAL = READ_ONCE(*__PTR); \
|
||||
if (cond_expr) \
|
||||
break; \
|
||||
__cmpwait_relaxed(__PTR, VAL); \
|
||||
} \
|
||||
VAL; \
|
||||
(typeof(*ptr))VAL; \
|
||||
})
|
||||
|
||||
#define smp_cond_load_acquire(ptr, cond_expr) \
|
||||
({ \
|
||||
typeof(ptr) __PTR = (ptr); \
|
||||
typeof(*ptr) VAL; \
|
||||
__unqual_scalar_typeof(*ptr) VAL; \
|
||||
for (;;) { \
|
||||
VAL = smp_load_acquire(__PTR); \
|
||||
if (cond_expr) \
|
||||
break; \
|
||||
__cmpwait_relaxed(__PTR, VAL); \
|
||||
} \
|
||||
VAL; \
|
||||
(typeof(*ptr))VAL; \
|
||||
})
|
||||
|
||||
#include <asm-generic/barrier.h>
|
||||
|
|
Loading…
Reference in New Issue
Block a user