summaryrefslogtreecommitdiff
path: root/include/uapi
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2012-12-19 19:52:48 +0400
committerLinus Torvalds <torvalds@linux-foundation.org>2012-12-19 19:52:48 +0400
commit7f2de8171ddf28fdb2ca7f9a683ee1207849f718 (patch)
treed89da981ac762de3fd32e1c08ddc8041f3c37519 /include/uapi
parent59771079c18c44e39106f0f30054025acafadb41 (diff)
parentcf66bb93e0f75e0a4ba1ec070692618fa028e994 (diff)
downloadlinux-7f2de8171ddf28fdb2ca7f9a683ee1207849f718.tar.xz
Merge tag 'byteswap-for-linus-20121219' of git://git.infradead.org/users/dwmw2/byteswap
Pull preparatory gcc intrisics bswap patch from David Woodhouse: "This single patch is effectively a no-op for now. It enables architectures to opt in to using GCC's __builtin_bswapXX() intrinsics for byteswapping, and if we merge this now then the architecture maintainers can enable it for their arch during the next cycle without dependency issues. It's worth making it a par-arch opt-in, because although in *theory* the compiler should never do worse than hand-coded assembler (and of course it also ought to do a lot better on platforms like Atom and PowerPC which have load-and-swap or store-and-swap instructions), that isn't always the case. See http://gcc.gnu.org/bugzilla/show_bug.cgi?id=46453 for example." * tag 'byteswap-for-linus-20121219' of git://git.infradead.org/users/dwmw2/byteswap: byteorder: allow arch to opt to use GCC intrinsics for byteswapping
Diffstat (limited to 'include/uapi')
-rw-r--r--include/uapi/linux/swab.h12
1 files changed, 9 insertions, 3 deletions
diff --git a/include/uapi/linux/swab.h b/include/uapi/linux/swab.h
index e811474724c2..0e011eb91b5d 100644
--- a/include/uapi/linux/swab.h
+++ b/include/uapi/linux/swab.h
@@ -45,7 +45,9 @@
static inline __attribute_const__ __u16 __fswab16(__u16 val)
{
-#ifdef __arch_swab16
+#ifdef __HAVE_BUILTIN_BSWAP16__
+ return __builtin_bswap16(val);
+#elif defined (__arch_swab16)
return __arch_swab16(val);
#else
return ___constant_swab16(val);
@@ -54,7 +56,9 @@ static inline __attribute_const__ __u16 __fswab16(__u16 val)
static inline __attribute_const__ __u32 __fswab32(__u32 val)
{
-#ifdef __arch_swab32
+#ifdef __HAVE_BUILTIN_BSWAP32__
+ return __builtin_bswap32(val);
+#elif defined(__arch_swab32)
return __arch_swab32(val);
#else
return ___constant_swab32(val);
@@ -63,7 +67,9 @@ static inline __attribute_const__ __u32 __fswab32(__u32 val)
static inline __attribute_const__ __u64 __fswab64(__u64 val)
{
-#ifdef __arch_swab64
+#ifdef __HAVE_BUILTIN_BSWAP64__
+ return __builtin_bswap64(val);
+#elif defined (__arch_swab64)
return __arch_swab64(val);
#elif defined(__SWAB_64_THRU_32__)
__u32 h = val >> 32;