summaryrefslogtreecommitdiff
path: root/libc/sysdeps
diff options
context:
space:
mode:
authorPavel Kozlov <pavel.kozlov@synopsys.com>2023-11-09 15:01:53 +0400
committerWaldemar Brodkorb <wbx@openadk.org>2023-11-10 16:14:19 +0100
commitdfa72192f19f130d73e6587d0c3f7ef4e22c52c7 (patch)
tree1d828f1ce8e629f6560f0646fe06cc300380e870 /libc/sysdeps
parent4bf3912213ed8d0fa937fb4784a3e8e0c8fd8c3c (diff)
arc: add acq/rel variants for atomic cmpxchg/xchg
Add acquire/release variants for atomic functions cmpxchg/xchg and provide a memory barrier after/before exchange. For cmpxchg use compiler builtins. For xchg functions add memory barrier explicitly. These barriers are required to keep memory consistency of ARCv3 CPU cores in SMP. For ARC700 barriers are not required and the compiler doesn't provide _atomic_compare_exchange*, use current asm insertion without acquire/release variants for ARC700. Signed-off-by: Pavel Kozlov <pavel.kozlov@synopsys.com>
Diffstat (limited to 'libc/sysdeps')
-rw-r--r--libc/sysdeps/linux/arc/bits/atomic.h76
1 files changed, 74 insertions, 2 deletions
diff --git a/libc/sysdeps/linux/arc/bits/atomic.h b/libc/sysdeps/linux/arc/bits/atomic.h
index 587860964..610b3c7c7 100644
--- a/libc/sysdeps/linux/arc/bits/atomic.h
+++ b/libc/sysdeps/linux/arc/bits/atomic.h
@@ -26,8 +26,10 @@ void __arc_link_error (void);
#ifdef __A7__
#define atomic_full_barrier() __asm__ __volatile__("": : :"memory")
+#define ARC_BARRIER_INSTR ""
#else
#define atomic_full_barrier() __asm__ __volatile__("dmb 3": : :"memory")
+#define ARC_BARRIER_INSTR "dmb 3"
#endif
/* Atomic compare and exchange. */
@@ -38,11 +40,12 @@ void __arc_link_error (void);
#define __arch_compare_and_exchange_val_16_acq(mem, newval, oldval) \
({ __arc_link_error (); oldval; })
-#define __arch_compare_and_exchange_val_64_acq(mem, newval, oldval) \
+#define __arch_compare_and_exchange_val_64_acq(mem, newval, oldval) \
({ __arc_link_error (); oldval; })
#ifdef __CONFIG_ARC_HAS_ATOMICS__
+#ifdef __A7__
#define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \
({ \
__typeof(oldval) prev; \
@@ -60,8 +63,55 @@ void __arc_link_error (void);
\
prev; \
})
+#else /* !__A7__ */
+#define USE_ATOMIC_COMPILER_BUILTINS 1
-#else
+#define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \
+ ({ \
+ __typeof(*mem) __oldval = (oldval); \
+ __atomic_compare_exchange_n(mem, (void *) &__oldval, newval, 0, \
+ __ATOMIC_ACQUIRE, __ATOMIC_RELAXED); \
+ __oldval; \
+ })
+
+#define __arch_compare_and_exchange_val_8_rel(mem, newval, oldval) \
+ ({ __arc_link_error (); oldval; })
+
+#define __arch_compare_and_exchange_val_16_rel(mem, newval, oldval) \
+ ({ __arc_link_error (); oldval; })
+
+#define __arch_compare_and_exchange_val_64_rel(mem, newval, oldval) \
+ ({ __arc_link_error (); oldval; })
+
+#define __arch_compare_and_exchange_val_32_rel(mem, newval, oldval) \
+ ({ \
+ __typeof(*mem) __oldval = (oldval); \
+ __atomic_compare_exchange_n(mem, (void *) &__oldval, newval, 0, \
+ __ATOMIC_RELEASE, __ATOMIC_RELAXED); \
+ __oldval; \
+ })
+
+/* Compare and exchange with "acquire" semantics, ie barrier after */
+#define atomic_compare_and_exchange_val_acq(mem, new, old) \
+ __atomic_val_bysize(__arch_compare_and_exchange_val, acq, \
+ mem, new, old)
+
+/* Compare and exchange with "release" semantics, ie barrier before */
+#define atomic_compare_and_exchange_val_rel(mem, new, old) \
+ __atomic_val_bysize(__arch_compare_and_exchange_val, rel, \
+ mem, new, old)
+
+/* Explicitly define here to use release semantics*/
+#define atomic_compare_and_exchange_bool_rel(mem, newval, oldval) \
+ ({ \
+ __typeof (oldval) __atg3_old = (oldval); \
+ atomic_compare_and_exchange_val_rel (mem, newval, __atg3_old) \
+ != __atg3_old; \
+ })
+
+#endif /* __A7__ */
+
+#else /* !__CONFIG_ARC_HAS_ATOMICS__ */
#ifndef __NR_arc_usr_cmpxchg
#error "__NR_arc_usr_cmpxchg missing: Please upgrade to kernel 4.9+ headers"
@@ -101,6 +151,21 @@ void __arc_link_error (void);
__typeof__(*(mem)) val = newval; \
\
__asm__ __volatile__( \
+ "ex %0, [%1]\n" \
+ ARC_BARRIER_INSTR \
+ : "+r" (val) \
+ : "r" (mem) \
+ : "memory" ); \
+ \
+ val; \
+ })
+
+#define __arch_exchange_32_rel(mem, newval) \
+ ({ \
+ __typeof__(*(mem)) val = newval; \
+ \
+ __asm__ __volatile__( \
+ ARC_BARRIER_INSTR"\n" \
"ex %0, [%1]" \
: "+r" (val) \
: "r" (mem) \
@@ -115,3 +180,10 @@ void __arc_link_error (void);
abort(); \
__arch_exchange_32_acq(mem, newval); \
})
+
+#define atomic_exchange_rel(mem, newval) \
+ ({ \
+ if (sizeof(*(mem)) != 4) \
+ abort(); \
+ __arch_exchange_32_rel(mem, newval); \
+ })