summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--include/sbi/riscv_atomic.h3
-rw-r--r--lib/sbi/riscv_atomic.c16
2 files changed, 19 insertions, 0 deletions
diff --git a/include/sbi/riscv_atomic.h b/include/sbi/riscv_atomic.h
index 40efb68..8543f4d 100644
--- a/include/sbi/riscv_atomic.h
+++ b/include/sbi/riscv_atomic.h
@@ -35,6 +35,9 @@ long arch_atomic_xchg(atomic_t *atom, long newval);
unsigned int atomic_raw_xchg_uint(volatile unsigned int *ptr,
unsigned int newval);
+
+unsigned long atomic_raw_xchg_ulong(volatile unsigned long *ptr,
+ unsigned long newval);
/**
* Set a bit in an atomic variable and return the new value.
* @nr : Bit to set.
diff --git a/lib/sbi/riscv_atomic.c b/lib/sbi/riscv_atomic.c
index 34bf522..996e893 100644
--- a/lib/sbi/riscv_atomic.c
+++ b/lib/sbi/riscv_atomic.c
@@ -175,6 +175,22 @@ unsigned int atomic_raw_xchg_uint(volatile unsigned int *ptr,
#endif
}
+unsigned long atomic_raw_xchg_ulong(volatile unsigned long *ptr,
+ unsigned long newval)
+{
+ /* Atomically set new value and return old value. */
+#ifdef __riscv_atomic
+ /*
+ * The name of GCC built-in macro __sync_lock_test_and_set()
+ * is misleading. A more appropriate name for GCC built-in
+ * macro would be __sync_val_exchange().
+ */
+ return __sync_lock_test_and_set(ptr, newval);
+#else
+ return xchg(ptr, newval);
+#endif
+}
+
#if (BITS_PER_LONG == 64)
#define __AMO(op) "amo" #op ".d"
#elif (BITS_PER_LONG == 32)