summaryrefslogtreecommitdiff
path: root/libc/sysdeps/linux/arc/bits
diff options
context:
space:
mode:
Diffstat (limited to 'libc/sysdeps/linux/arc/bits')
-rw-r--r--libc/sysdeps/linux/arc/bits/atomic.h76
1 files changed, 74 insertions, 2 deletions
diff --git a/libc/sysdeps/linux/arc/bits/atomic.h b/libc/sysdeps/linux/arc/bits/atomic.h
index 587860964..610b3c7c7 100644
--- a/libc/sysdeps/linux/arc/bits/atomic.h
+++ b/libc/sysdeps/linux/arc/bits/atomic.h
@@ -26,8 +26,10 @@ void __arc_link_error (void);
#ifdef __A7__
#define atomic_full_barrier() __asm__ __volatile__("": : :"memory")
+#define ARC_BARRIER_INSTR ""
#else
#define atomic_full_barrier() __asm__ __volatile__("dmb 3": : :"memory")
+#define ARC_BARRIER_INSTR "dmb 3"
#endif
/* Atomic compare and exchange. */
@@ -38,11 +40,12 @@ void __arc_link_error (void);
#define __arch_compare_and_exchange_val_16_acq(mem, newval, oldval) \
({ __arc_link_error (); oldval; })
-#define __arch_compare_and_exchange_val_64_acq(mem, newval, oldval) \
+#define __arch_compare_and_exchange_val_64_acq(mem, newval, oldval) \
({ __arc_link_error (); oldval; })
#ifdef __CONFIG_ARC_HAS_ATOMICS__
+#ifdef __A7__
#define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \
({ \
__typeof(oldval) prev; \
@@ -60,8 +63,55 @@ void __arc_link_error (void);
\
prev; \
})
+#else /* !__A7__ */
+#define USE_ATOMIC_COMPILER_BUILTINS 1
-#else
+#define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \
+ ({ \
+ __typeof(*mem) __oldval = (oldval); \
+ __atomic_compare_exchange_n(mem, (void *) &__oldval, newval, 0, \
+ __ATOMIC_ACQUIRE, __ATOMIC_RELAXED); \
+ __oldval; \
+ })
+
+#define __arch_compare_and_exchange_val_8_rel(mem, newval, oldval) \
+ ({ __arc_link_error (); oldval; })
+
+#define __arch_compare_and_exchange_val_16_rel(mem, newval, oldval) \
+ ({ __arc_link_error (); oldval; })
+
+#define __arch_compare_and_exchange_val_64_rel(mem, newval, oldval) \
+ ({ __arc_link_error (); oldval; })
+
+#define __arch_compare_and_exchange_val_32_rel(mem, newval, oldval) \
+ ({ \
+ __typeof(*mem) __oldval = (oldval); \
+ __atomic_compare_exchange_n(mem, (void *) &__oldval, newval, 0, \
+ __ATOMIC_RELEASE, __ATOMIC_RELAXED); \
+ __oldval; \
+ })
+
+/* Compare and exchange with "acquire" semantics, ie barrier after */
+#define atomic_compare_and_exchange_val_acq(mem, new, old) \
+ __atomic_val_bysize(__arch_compare_and_exchange_val, acq, \
+ mem, new, old)
+
+/* Compare and exchange with "release" semantics, ie barrier before */
+#define atomic_compare_and_exchange_val_rel(mem, new, old) \
+ __atomic_val_bysize(__arch_compare_and_exchange_val, rel, \
+ mem, new, old)
+
+/* Explicitly define here to use release semantics*/
+#define atomic_compare_and_exchange_bool_rel(mem, newval, oldval) \
+ ({ \
+ __typeof (oldval) __atg3_old = (oldval); \
+ atomic_compare_and_exchange_val_rel (mem, newval, __atg3_old) \
+ != __atg3_old; \
+ })
+
+#endif /* __A7__ */
+
+#else /* !__CONFIG_ARC_HAS_ATOMICS__ */
#ifndef __NR_arc_usr_cmpxchg
#error "__NR_arc_usr_cmpxchg missing: Please upgrade to kernel 4.9+ headers"
@@ -101,6 +151,21 @@ void __arc_link_error (void);
__typeof__(*(mem)) val = newval; \
\
__asm__ __volatile__( \
+ "ex %0, [%1]\n" \
+ ARC_BARRIER_INSTR \
+ : "+r" (val) \
+ : "r" (mem) \
+ : "memory" ); \
+ \
+ val; \
+ })
+
+#define __arch_exchange_32_rel(mem, newval) \
+ ({ \
+ __typeof__(*(mem)) val = newval; \
+ \
+ __asm__ __volatile__( \
+ ARC_BARRIER_INSTR"\n" \
"ex %0, [%1]" \
: "+r" (val) \
: "r" (mem) \
@@ -115,3 +180,10 @@ void __arc_link_error (void);
abort(); \
__arch_exchange_32_acq(mem, newval); \
})
+
+#define atomic_exchange_rel(mem, newval) \
+ ({ \
+ if (sizeof(*(mem)) != 4) \
+ abort(); \
+ __arch_exchange_32_rel(mem, newval); \
+ })