summaryrefslogtreecommitdiff
path: root/arch/x86/crypto/cast6_avx_glue.c
diff options
context:
space:
mode:
authorEric Biggers <ebiggers@google.com>2018-02-20 10:48:14 +0300
committerHerbert Xu <herbert@gondor.apana.org.au>2018-03-02 19:03:27 +0300
commitf51a1fa43972c93e08a608df51182d90ab8d7594 (patch)
tree29d38cbe2a211bf2323c6a43cd113ff605ff52b9 /arch/x86/crypto/cast6_avx_glue.c
parent1e63183a203dba8333677c9490455df48f937ea0 (diff)
downloadlinux-f51a1fa43972c93e08a608df51182d90ab8d7594.tar.xz
crypto: x86/cast6-avx - remove LRW algorithm
The LRW template now wraps an ECB mode algorithm rather than the block cipher directly. Therefore it is now redundant for crypto modules to wrap their ECB code with generic LRW code themselves via lrw_crypt(). Remove the lrw-cast6-avx algorithm which did this. Users who request lrw(cast6) and previously would have gotten lrw-cast6-avx will now get lrw(ecb-cast6-avx) instead, which is just as fast. Signed-off-by: Eric Biggers <ebiggers@google.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'arch/x86/crypto/cast6_avx_glue.c')
-rw-r--r--arch/x86/crypto/cast6_avx_glue.c180
1 files changed, 1 insertions, 179 deletions
diff --git a/arch/x86/crypto/cast6_avx_glue.c b/arch/x86/crypto/cast6_avx_glue.c
index 50e684768c55..d2fbf2be771e 100644
--- a/arch/x86/crypto/cast6_avx_glue.c
+++ b/arch/x86/crypto/cast6_avx_glue.c
@@ -34,9 +34,7 @@
#include <crypto/cryptd.h>
#include <crypto/b128ops.h>
#include <crypto/ctr.h>
-#include <crypto/lrw.h>
#include <crypto/xts.h>
-#include <asm/fpu/api.h>
#include <asm/crypto/glue_helper.h>
#define CAST6_PARALLEL_BLOCKS 8
@@ -189,134 +187,6 @@ static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
return glue_ctr_crypt_128bit(&cast6_ctr, desc, dst, src, nbytes);
}
-static inline bool cast6_fpu_begin(bool fpu_enabled, unsigned int nbytes)
-{
- return glue_fpu_begin(CAST6_BLOCK_SIZE, CAST6_PARALLEL_BLOCKS,
- NULL, fpu_enabled, nbytes);
-}
-
-static inline void cast6_fpu_end(bool fpu_enabled)
-{
- glue_fpu_end(fpu_enabled);
-}
-
-struct crypt_priv {
- struct cast6_ctx *ctx;
- bool fpu_enabled;
-};
-
-static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
-{
- const unsigned int bsize = CAST6_BLOCK_SIZE;
- struct crypt_priv *ctx = priv;
- int i;
-
- ctx->fpu_enabled = cast6_fpu_begin(ctx->fpu_enabled, nbytes);
-
- if (nbytes == bsize * CAST6_PARALLEL_BLOCKS) {
- cast6_ecb_enc_8way(ctx->ctx, srcdst, srcdst);
- return;
- }
-
- for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
- __cast6_encrypt(ctx->ctx, srcdst, srcdst);
-}
-
-static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
-{
- const unsigned int bsize = CAST6_BLOCK_SIZE;
- struct crypt_priv *ctx = priv;
- int i;
-
- ctx->fpu_enabled = cast6_fpu_begin(ctx->fpu_enabled, nbytes);
-
- if (nbytes == bsize * CAST6_PARALLEL_BLOCKS) {
- cast6_ecb_dec_8way(ctx->ctx, srcdst, srcdst);
- return;
- }
-
- for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
- __cast6_decrypt(ctx->ctx, srcdst, srcdst);
-}
-
-struct cast6_lrw_ctx {
- struct lrw_table_ctx lrw_table;
- struct cast6_ctx cast6_ctx;
-};
-
-static int lrw_cast6_setkey(struct crypto_tfm *tfm, const u8 *key,
- unsigned int keylen)
-{
- struct cast6_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
- int err;
-
- err = __cast6_setkey(&ctx->cast6_ctx, key, keylen - CAST6_BLOCK_SIZE,
- &tfm->crt_flags);
- if (err)
- return err;
-
- return lrw_init_table(&ctx->lrw_table, key + keylen - CAST6_BLOCK_SIZE);
-}
-
-static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
- struct scatterlist *src, unsigned int nbytes)
-{
- struct cast6_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
- be128 buf[CAST6_PARALLEL_BLOCKS];
- struct crypt_priv crypt_ctx = {
- .ctx = &ctx->cast6_ctx,
- .fpu_enabled = false,
- };
- struct lrw_crypt_req req = {
- .tbuf = buf,
- .tbuflen = sizeof(buf),
-
- .table_ctx = &ctx->lrw_table,
- .crypt_ctx = &crypt_ctx,
- .crypt_fn = encrypt_callback,
- };
- int ret;
-
- desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
- ret = lrw_crypt(desc, dst, src, nbytes, &req);
- cast6_fpu_end(crypt_ctx.fpu_enabled);
-
- return ret;
-}
-
-static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
- struct scatterlist *src, unsigned int nbytes)
-{
- struct cast6_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
- be128 buf[CAST6_PARALLEL_BLOCKS];
- struct crypt_priv crypt_ctx = {
- .ctx = &ctx->cast6_ctx,
- .fpu_enabled = false,
- };
- struct lrw_crypt_req req = {
- .tbuf = buf,
- .tbuflen = sizeof(buf),
-
- .table_ctx = &ctx->lrw_table,
- .crypt_ctx = &crypt_ctx,
- .crypt_fn = decrypt_callback,
- };
- int ret;
-
- desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
- ret = lrw_crypt(desc, dst, src, nbytes, &req);
- cast6_fpu_end(crypt_ctx.fpu_enabled);
-
- return ret;
-}
-
-static void lrw_exit_tfm(struct crypto_tfm *tfm)
-{
- struct cast6_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
-
- lrw_free_table(&ctx->lrw_table);
-}
-
struct cast6_xts_ctx {
struct cast6_ctx tweak_ctx;
struct cast6_ctx crypt_ctx;
@@ -363,7 +233,7 @@ static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
&ctx->tweak_ctx, &ctx->crypt_ctx);
}
-static struct crypto_alg cast6_algs[10] = { {
+static struct crypto_alg cast6_algs[] = { {
.cra_name = "__ecb-cast6-avx",
.cra_driver_name = "__driver-ecb-cast6-avx",
.cra_priority = 0,
@@ -425,30 +295,6 @@ static struct crypto_alg cast6_algs[10] = { {
},
},
}, {
- .cra_name = "__lrw-cast6-avx",
- .cra_driver_name = "__driver-lrw-cast6-avx",
- .cra_priority = 0,
- .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
- CRYPTO_ALG_INTERNAL,
- .cra_blocksize = CAST6_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct cast6_lrw_ctx),
- .cra_alignmask = 0,
- .cra_type = &crypto_blkcipher_type,
- .cra_module = THIS_MODULE,
- .cra_exit = lrw_exit_tfm,
- .cra_u = {
- .blkcipher = {
- .min_keysize = CAST6_MIN_KEY_SIZE +
- CAST6_BLOCK_SIZE,
- .max_keysize = CAST6_MAX_KEY_SIZE +
- CAST6_BLOCK_SIZE,
- .ivsize = CAST6_BLOCK_SIZE,
- .setkey = lrw_cast6_setkey,
- .encrypt = lrw_encrypt,
- .decrypt = lrw_decrypt,
- },
- },
-}, {
.cra_name = "__xts-cast6-avx",
.cra_driver_name = "__driver-xts-cast6-avx",
.cra_priority = 0,
@@ -536,30 +382,6 @@ static struct crypto_alg cast6_algs[10] = { {
},
},
}, {
- .cra_name = "lrw(cast6)",
- .cra_driver_name = "lrw-cast6-avx",
- .cra_priority = 200,
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
- .cra_blocksize = CAST6_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct async_helper_ctx),
- .cra_alignmask = 0,
- .cra_type = &crypto_ablkcipher_type,
- .cra_module = THIS_MODULE,
- .cra_init = ablk_init,
- .cra_exit = ablk_exit,
- .cra_u = {
- .ablkcipher = {
- .min_keysize = CAST6_MIN_KEY_SIZE +
- CAST6_BLOCK_SIZE,
- .max_keysize = CAST6_MAX_KEY_SIZE +
- CAST6_BLOCK_SIZE,
- .ivsize = CAST6_BLOCK_SIZE,
- .setkey = ablk_set_key,
- .encrypt = ablk_encrypt,
- .decrypt = ablk_decrypt,
- },
- },
-}, {
.cra_name = "xts(cast6)",
.cra_driver_name = "xts-cast6-avx",
.cra_priority = 200,