summaryrefslogtreecommitdiff
path: root/arch/sparc/include
diff options
context:
space:
mode:
authorAl Viro <viro@zeniv.linux.org.uk>2020-07-20 01:31:07 +0300
committerAl Viro <viro@zeniv.linux.org.uk>2020-08-20 22:45:21 +0300
commitfdf8bee96f9aeaac4559725c2dfae6e1bd7b7043 (patch)
treeeab26412446bb22b6402576093874a315ef021da /arch/sparc/include
parent2a5d2bd159f33ef34484ee14705dcf8634061f2c (diff)
downloadlinux-fdf8bee96f9aeaac4559725c2dfae6e1bd7b7043.tar.xz
sparc64: propagate the calling convention changes down to __csum_partial_copy_...()
... and rename them into csum_and_copy_...() - the wrappers become pointless. [braino fixed] Signed-off-by: Al Viro <viro@zeniv.linux.org.uk>
Diffstat (limited to 'arch/sparc/include')
-rw-r--r--arch/sparc/include/asm/checksum.h1
-rw-r--r--arch/sparc/include/asm/checksum_32.h2
-rw-r--r--arch/sparc/include/asm/checksum_64.h41
3 files changed, 4 insertions, 40 deletions
diff --git a/arch/sparc/include/asm/checksum.h b/arch/sparc/include/asm/checksum.h
index deb4fe5aeafd..f2ac13323b6d 100644
--- a/arch/sparc/include/asm/checksum.h
+++ b/arch/sparc/include/asm/checksum.h
@@ -3,6 +3,7 @@
#define ___ASM_SPARC_CHECKSUM_H
#define _HAVE_ARCH_CSUM_AND_COPY
#define _HAVE_ARCH_COPY_AND_CSUM_FROM_USER
+#define HAVE_CSUM_COPY_USER
#if defined(__sparc__) && defined(__arch64__)
#include <asm/checksum_64.h>
#else
diff --git a/arch/sparc/include/asm/checksum_32.h b/arch/sparc/include/asm/checksum_32.h
index d55e480172a6..ce11e0ad80c7 100644
--- a/arch/sparc/include/asm/checksum_32.h
+++ b/arch/sparc/include/asm/checksum_32.h
@@ -67,8 +67,6 @@ csum_and_copy_from_user(const void __user *src, void *dst, int len)
return csum_partial_copy_nocheck((__force void *)src, dst, len);
}
-#define HAVE_CSUM_COPY_USER
-
static inline __wsum
csum_and_copy_to_user(const void *src, void __user *dst, int len)
{
diff --git a/arch/sparc/include/asm/checksum_64.h b/arch/sparc/include/asm/checksum_64.h
index 4d0bbff43e62..d6b59461e064 100644
--- a/arch/sparc/include/asm/checksum_64.h
+++ b/arch/sparc/include/asm/checksum_64.h
@@ -38,44 +38,9 @@ __wsum csum_partial(const void * buff, int len, __wsum sum);
* here even more important to align src and dst on a 32-bit (or even
* better 64-bit) boundary
*/
-__wsum __csum_partial_copy_nocheck(const void *src, void *dst, int len, __wsum sum);
-
-static inline __wsum csum_partial_copy_nocheck(const void *src, void *dst, int len)
-{
- return __csum_partial_copy_nocheck(src, dst, len, 0);
-}
-
-long __csum_partial_copy_from_user(const void __user *src,
- void *dst, int len,
- __wsum sum);
-
-static inline __wsum
-csum_and_copy_from_user(const void __user *src,
- void *dst, int len)
-{
- long ret = __csum_partial_copy_from_user(src, dst, len, ~0U);
- if (ret < 0)
- return 0;
- return (__force __wsum) ret;
-}
-
-/*
- * Copy and checksum to user
- */
-#define HAVE_CSUM_COPY_USER
-long __csum_partial_copy_to_user(const void *src,
- void __user *dst, int len,
- __wsum sum);
-
-static inline __wsum
-csum_and_copy_to_user(const void *src,
- void __user *dst, int len)
-{
- long ret = __csum_partial_copy_to_user(src, dst, len, ~0U);
- if (ret < 0)
- return 0;
- return (__force __wsum) ret;
-}
+__wsum csum_partial_copy_nocheck(const void *src, void *dst, int len);
+__wsum csum_and_copy_from_user(const void __user *src, void *dst, int len);
+__wsum csum_and_copy_to_user(const void *src, void __user *dst, int len);
/* ihl is always 5 or greater, almost always is 5, and iph is word aligned
* the majority of the time.