summaryrefslogtreecommitdiff
path: root/scripts/atomic/fallbacks
diff options
context:
space:
mode:
authorThomas Gleixner <tglx@linutronix.de>2023-03-23 23:55:30 +0300
committerPeter Zijlstra <peterz@infradead.org>2023-03-28 11:39:29 +0300
commite5ab9eff46b04c5a04778e40d7092fed3fda52ca (patch)
tree87cac8205ddaf1e0e81fba3be4f0e8d7dcf7f584 /scripts/atomic/fallbacks
parentfe15c26ee26efa11741a7b632e9f23b01aca4cc6 (diff)
downloadlinux-e5ab9eff46b04c5a04778e40d7092fed3fda52ca.tar.xz
atomics: Provide atomic_add_negative() variants
atomic_add_negative() does not provide the relaxed/acquire/release variants. Provide them in preparation for a new scalable reference count algorithm. Signed-off-by: Thomas Gleixner <tglx@linutronix.de> Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Acked-by: Mark Rutland <mark.rutland@arm.com> Link: https://lore.kernel.org/r/20230323102800.101763813@linutronix.de
Diffstat (limited to 'scripts/atomic/fallbacks')
-rwxr-xr-xscripts/atomic/fallbacks/add_negative11
1 files changed, 5 insertions, 6 deletions
diff --git a/scripts/atomic/fallbacks/add_negative b/scripts/atomic/fallbacks/add_negative
index 15caa2eb2371..e5980abf5904 100755
--- a/scripts/atomic/fallbacks/add_negative
+++ b/scripts/atomic/fallbacks/add_negative
@@ -1,16 +1,15 @@
cat <<EOF
/**
- * arch_${atomic}_add_negative - add and test if negative
+ * arch_${atomic}_add_negative${order} - Add and test if negative
* @i: integer value to add
* @v: pointer of type ${atomic}_t
*
- * Atomically adds @i to @v and returns true
- * if the result is negative, or false when
- * result is greater than or equal to zero.
+ * Atomically adds @i to @v and returns true if the result is negative,
+ * or false when the result is greater than or equal to zero.
*/
static __always_inline bool
-arch_${atomic}_add_negative(${int} i, ${atomic}_t *v)
+arch_${atomic}_add_negative${order}(${int} i, ${atomic}_t *v)
{
- return arch_${atomic}_add_return(i, v) < 0;
+ return arch_${atomic}_add_return${order}(i, v) < 0;
}
EOF