summaryrefslogtreecommitdiff
path: root/include/linux/crypto.h
diff options
context:
space:
mode:
authorHerbert Xu <herbert@gondor.apana.org.au>2007-12-17 15:07:31 +0300
committerHerbert Xu <herbert@gondor.apana.org.au>2008-01-11 00:16:42 +0300
commit378f4f51f9fdd8df80ea875320e2bf1d7c6e6e77 (patch)
treef38f23fa09ec7971546a0a3aa42a79cc055c455f /include/linux/crypto.h
parent84c911523020a2e39b307a2da26ee1886b7214fe (diff)
downloadlinux-378f4f51f9fdd8df80ea875320e2bf1d7c6e6e77.tar.xz
[CRYPTO] skcipher: Add crypto_grab_skcipher interface
Note: From now on the collective of ablkcipher/blkcipher/givcipher will be known as skcipher, i.e., symmetric key cipher. The name blkcipher has always been much of a misnomer since it supports stream ciphers too. This patch adds the function crypto_grab_skcipher as a new way of getting an ablkcipher spawn. The problem is that previously we did this in two steps, first getting the algorithm and then calling crypto_init_spawn. This meant that each spawn user had to be aware of what type and mask to use for these two steps. This is difficult and also presents a problem when the type/mask changes as they're about to be for IV generators. The new interface does both steps together just like crypto_alloc_ablkcipher. As a side-effect this also allows us to be stronger on type enforcement for spawns. For now this is only done for ablkcipher but it's trivial to extend for other types. This patch also moves the type/mask logic for skcipher into the helpers crypto_skcipher_type and crypto_skcipher_mask. Finally this patch introduces the function crypto_require_sync to determine whether the user is specifically requesting a sync algorithm. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'include/linux/crypto.h')
-rw-r--r--include/linux/crypto.h26
1 files changed, 16 insertions, 10 deletions
diff --git a/include/linux/crypto.h b/include/linux/crypto.h
index ef7642ed3e42..d6962b409489 100644
--- a/include/linux/crypto.h
+++ b/include/linux/crypto.h
@@ -528,16 +528,26 @@ static inline struct crypto_ablkcipher *__crypto_ablkcipher_cast(
return (struct crypto_ablkcipher *)tfm;
}
-static inline struct crypto_ablkcipher *crypto_alloc_ablkcipher(
- const char *alg_name, u32 type, u32 mask)
+static inline u32 crypto_skcipher_type(u32 type)
{
type &= ~CRYPTO_ALG_TYPE_MASK;
- mask &= ~CRYPTO_ALG_TYPE_MASK;
type |= CRYPTO_ALG_TYPE_BLKCIPHER;
+ return type;
+}
+
+static inline u32 crypto_skcipher_mask(u32 mask)
+{
+ mask &= ~CRYPTO_ALG_TYPE_MASK;
mask |= CRYPTO_ALG_TYPE_BLKCIPHER_MASK;
+ return mask;
+}
+static inline struct crypto_ablkcipher *crypto_alloc_ablkcipher(
+ const char *alg_name, u32 type, u32 mask)
+{
return __crypto_ablkcipher_cast(
- crypto_alloc_base(alg_name, type, mask));
+ crypto_alloc_base(alg_name, crypto_skcipher_type(type),
+ crypto_skcipher_mask(mask)));
}
static inline struct crypto_tfm *crypto_ablkcipher_tfm(
@@ -554,12 +564,8 @@ static inline void crypto_free_ablkcipher(struct crypto_ablkcipher *tfm)
static inline int crypto_has_ablkcipher(const char *alg_name, u32 type,
u32 mask)
{
- type &= ~CRYPTO_ALG_TYPE_MASK;
- mask &= ~CRYPTO_ALG_TYPE_MASK;
- type |= CRYPTO_ALG_TYPE_BLKCIPHER;
- mask |= CRYPTO_ALG_TYPE_BLKCIPHER_MASK;
-
- return crypto_has_alg(alg_name, type, mask);
+ return crypto_has_alg(alg_name, crypto_skcipher_type(type),
+ crypto_skcipher_mask(mask));
}
static inline struct ablkcipher_tfm *crypto_ablkcipher_crt(