From 738206d325a936d048bb66b5e0c70e3b1a8692be Mon Sep 17 00:00:00 2001 From: Jussi Kivilinna Date: Wed, 11 Jul 2012 14:19:55 +0300 Subject: crypto: tea - use crypto_[un]register_algs Combine all crypto_alg to be registered and use new crypto_[un]register_algs functions. This simplifies init/exit code. Signed-off-by: Jussi Kivilinna Signed-off-by: Herbert Xu --- crypto/tea.c | 41 ++++++----------------------------------- 1 file changed, 6 insertions(+), 35 deletions(-) (limited to 'crypto') diff --git a/crypto/tea.c b/crypto/tea.c index 412bc74f8179..0a572323ee4a 100644 --- a/crypto/tea.c +++ b/crypto/tea.c @@ -219,84 +219,55 @@ static void xeta_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) out[1] = cpu_to_le32(z); } -static struct crypto_alg tea_alg = { +static struct crypto_alg tea_algs[3] = { { .cra_name = "tea", .cra_flags = CRYPTO_ALG_TYPE_CIPHER, .cra_blocksize = TEA_BLOCK_SIZE, .cra_ctxsize = sizeof (struct tea_ctx), .cra_alignmask = 3, .cra_module = THIS_MODULE, - .cra_list = LIST_HEAD_INIT(tea_alg.cra_list), .cra_u = { .cipher = { .cia_min_keysize = TEA_KEY_SIZE, .cia_max_keysize = TEA_KEY_SIZE, .cia_setkey = tea_setkey, .cia_encrypt = tea_encrypt, .cia_decrypt = tea_decrypt } } -}; - -static struct crypto_alg xtea_alg = { +}, { .cra_name = "xtea", .cra_flags = CRYPTO_ALG_TYPE_CIPHER, .cra_blocksize = XTEA_BLOCK_SIZE, .cra_ctxsize = sizeof (struct xtea_ctx), .cra_alignmask = 3, .cra_module = THIS_MODULE, - .cra_list = LIST_HEAD_INIT(xtea_alg.cra_list), .cra_u = { .cipher = { .cia_min_keysize = XTEA_KEY_SIZE, .cia_max_keysize = XTEA_KEY_SIZE, .cia_setkey = xtea_setkey, .cia_encrypt = xtea_encrypt, .cia_decrypt = xtea_decrypt } } -}; - -static struct crypto_alg xeta_alg = { +}, { .cra_name = "xeta", .cra_flags = CRYPTO_ALG_TYPE_CIPHER, .cra_blocksize = XTEA_BLOCK_SIZE, .cra_ctxsize = sizeof (struct xtea_ctx), .cra_alignmask = 3, .cra_module = THIS_MODULE, - .cra_list = LIST_HEAD_INIT(xtea_alg.cra_list), .cra_u = { .cipher = { .cia_min_keysize = XTEA_KEY_SIZE, .cia_max_keysize = XTEA_KEY_SIZE, .cia_setkey = xtea_setkey, .cia_encrypt = xeta_encrypt, .cia_decrypt = xeta_decrypt } } -}; +} }; static int __init tea_mod_init(void) { - int ret = 0; - - ret = crypto_register_alg(&tea_alg); - if (ret < 0) - goto out; - - ret = crypto_register_alg(&xtea_alg); - if (ret < 0) { - crypto_unregister_alg(&tea_alg); - goto out; - } - - ret = crypto_register_alg(&xeta_alg); - if (ret < 0) { - crypto_unregister_alg(&tea_alg); - crypto_unregister_alg(&xtea_alg); - goto out; - } - -out: - return ret; + return crypto_register_algs(tea_algs, ARRAY_SIZE(tea_algs)); } static void __exit tea_mod_fini(void) { - crypto_unregister_alg(&tea_alg); - crypto_unregister_alg(&xtea_alg); - crypto_unregister_alg(&xeta_alg); + crypto_unregister_algs(tea_algs, ARRAY_SIZE(tea_algs)); } MODULE_ALIAS("xtea"); -- cgit v1.2.3 From 70a03bff6c3f9d2983f90b74e40b0769ceb4cbd7 Mon Sep 17 00:00:00 2001 From: Jussi Kivilinna Date: Wed, 11 Jul 2012 14:20:00 +0300 Subject: crypto: crypto_null - use crypto_[un]register_algs Combine all crypto_alg to be registered and use new crypto_[un]register_algs functions. This simplifies init/exit code. Signed-off-by: Jussi Kivilinna Signed-off-by: Herbert Xu --- crypto/crypto_null.c | 57 +++++++++++++++++----------------------------------- 1 file changed, 18 insertions(+), 39 deletions(-) (limited to 'crypto') diff --git a/crypto/crypto_null.c b/crypto/crypto_null.c index 07a8a96d46fc..fee7265cd35d 100644 --- a/crypto/crypto_null.c +++ b/crypto/crypto_null.c @@ -94,18 +94,6 @@ static int skcipher_null_crypt(struct blkcipher_desc *desc, return err; } -static struct crypto_alg compress_null = { - .cra_name = "compress_null", - .cra_flags = CRYPTO_ALG_TYPE_COMPRESS, - .cra_blocksize = NULL_BLOCK_SIZE, - .cra_ctxsize = 0, - .cra_module = THIS_MODULE, - .cra_list = LIST_HEAD_INIT(compress_null.cra_list), - .cra_u = { .compress = { - .coa_compress = null_compress, - .coa_decompress = null_compress } } -}; - static struct shash_alg digest_null = { .digestsize = NULL_DIGEST_SIZE, .setkey = null_hash_setkey, @@ -122,22 +110,19 @@ static struct shash_alg digest_null = { } }; -static struct crypto_alg cipher_null = { +static struct crypto_alg null_algs[3] = { { .cra_name = "cipher_null", .cra_flags = CRYPTO_ALG_TYPE_CIPHER, .cra_blocksize = NULL_BLOCK_SIZE, .cra_ctxsize = 0, .cra_module = THIS_MODULE, - .cra_list = LIST_HEAD_INIT(cipher_null.cra_list), .cra_u = { .cipher = { .cia_min_keysize = NULL_KEY_SIZE, .cia_max_keysize = NULL_KEY_SIZE, .cia_setkey = null_setkey, .cia_encrypt = null_crypt, .cia_decrypt = null_crypt } } -}; - -static struct crypto_alg skcipher_null = { +}, { .cra_name = "ecb(cipher_null)", .cra_driver_name = "ecb-cipher_null", .cra_priority = 100, @@ -146,7 +131,6 @@ static struct crypto_alg skcipher_null = { .cra_type = &crypto_blkcipher_type, .cra_ctxsize = 0, .cra_module = THIS_MODULE, - .cra_list = LIST_HEAD_INIT(skcipher_null.cra_list), .cra_u = { .blkcipher = { .min_keysize = NULL_KEY_SIZE, .max_keysize = NULL_KEY_SIZE, @@ -154,7 +138,16 @@ static struct crypto_alg skcipher_null = { .setkey = null_setkey, .encrypt = skcipher_null_crypt, .decrypt = skcipher_null_crypt } } -}; +}, { + .cra_name = "compress_null", + .cra_flags = CRYPTO_ALG_TYPE_COMPRESS, + .cra_blocksize = NULL_BLOCK_SIZE, + .cra_ctxsize = 0, + .cra_module = THIS_MODULE, + .cra_u = { .compress = { + .coa_compress = null_compress, + .coa_decompress = null_compress } } +} }; MODULE_ALIAS("compress_null"); MODULE_ALIAS("digest_null"); @@ -164,40 +157,26 @@ static int __init crypto_null_mod_init(void) { int ret = 0; - ret = crypto_register_alg(&cipher_null); + ret = crypto_register_algs(null_algs, ARRAY_SIZE(null_algs)); if (ret < 0) goto out; - ret = crypto_register_alg(&skcipher_null); - if (ret < 0) - goto out_unregister_cipher; - ret = crypto_register_shash(&digest_null); if (ret < 0) - goto out_unregister_skcipher; + goto out_unregister_algs; - ret = crypto_register_alg(&compress_null); - if (ret < 0) - goto out_unregister_digest; + return 0; +out_unregister_algs: + crypto_unregister_algs(null_algs, ARRAY_SIZE(null_algs)); out: return ret; - -out_unregister_digest: - crypto_unregister_shash(&digest_null); -out_unregister_skcipher: - crypto_unregister_alg(&skcipher_null); -out_unregister_cipher: - crypto_unregister_alg(&cipher_null); - goto out; } static void __exit crypto_null_mod_fini(void) { - crypto_unregister_alg(&compress_null); crypto_unregister_shash(&digest_null); - crypto_unregister_alg(&skcipher_null); - crypto_unregister_alg(&cipher_null); + crypto_unregister_algs(null_algs, ARRAY_SIZE(null_algs)); } module_init(crypto_null_mod_init); -- cgit v1.2.3 From 9935e6d2f3b5670550e48a55172cab546ae5d096 Mon Sep 17 00:00:00 2001 From: Jussi Kivilinna Date: Wed, 11 Jul 2012 14:20:05 +0300 Subject: crypto: des - use crypto_[un]register_algs Combine all crypto_alg to be registered and use new crypto_[un]register_algs functions. This simplifies init/exit code. Signed-off-by: Jussi Kivilinna Signed-off-by: Herbert Xu --- crypto/des_generic.c | 25 +++++-------------------- 1 file changed, 5 insertions(+), 20 deletions(-) (limited to 'crypto') diff --git a/crypto/des_generic.c b/crypto/des_generic.c index 873818d48e86..f6cf63f88468 100644 --- a/crypto/des_generic.c +++ b/crypto/des_generic.c @@ -943,59 +943,44 @@ static void des3_ede_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) d[1] = cpu_to_le32(L); } -static struct crypto_alg des_alg = { +static struct crypto_alg des_algs[2] = { { .cra_name = "des", .cra_flags = CRYPTO_ALG_TYPE_CIPHER, .cra_blocksize = DES_BLOCK_SIZE, .cra_ctxsize = sizeof(struct des_ctx), .cra_module = THIS_MODULE, .cra_alignmask = 3, - .cra_list = LIST_HEAD_INIT(des_alg.cra_list), .cra_u = { .cipher = { .cia_min_keysize = DES_KEY_SIZE, .cia_max_keysize = DES_KEY_SIZE, .cia_setkey = des_setkey, .cia_encrypt = des_encrypt, .cia_decrypt = des_decrypt } } -}; - -static struct crypto_alg des3_ede_alg = { +}, { .cra_name = "des3_ede", .cra_flags = CRYPTO_ALG_TYPE_CIPHER, .cra_blocksize = DES3_EDE_BLOCK_SIZE, .cra_ctxsize = sizeof(struct des3_ede_ctx), .cra_module = THIS_MODULE, .cra_alignmask = 3, - .cra_list = LIST_HEAD_INIT(des3_ede_alg.cra_list), .cra_u = { .cipher = { .cia_min_keysize = DES3_EDE_KEY_SIZE, .cia_max_keysize = DES3_EDE_KEY_SIZE, .cia_setkey = des3_ede_setkey, .cia_encrypt = des3_ede_encrypt, .cia_decrypt = des3_ede_decrypt } } -}; +} }; MODULE_ALIAS("des3_ede"); static int __init des_generic_mod_init(void) { - int ret = 0; - - ret = crypto_register_alg(&des_alg); - if (ret < 0) - goto out; - - ret = crypto_register_alg(&des3_ede_alg); - if (ret < 0) - crypto_unregister_alg(&des_alg); -out: - return ret; + return crypto_register_algs(des_algs, ARRAY_SIZE(des_algs)); } static void __exit des_generic_mod_fini(void) { - crypto_unregister_alg(&des3_ede_alg); - crypto_unregister_alg(&des_alg); + crypto_unregister_algs(des_algs, ARRAY_SIZE(des_algs)); } module_init(des_generic_mod_init); -- cgit v1.2.3 From bbc406b9d2de4182a4b2990efcd1754ae9e2c483 Mon Sep 17 00:00:00 2001 From: Jussi Kivilinna Date: Wed, 11 Jul 2012 14:20:10 +0300 Subject: crypto: serpent - use crypto_[un]register_algs Combine all crypto_alg to be registered and use new crypto_[un]register_algs functions. This simplifies init/exit code. Signed-off-by: Jussi Kivilinna Signed-off-by: Herbert Xu --- crypto/serpent_generic.c | 53 +++++++++++++++++------------------------------- 1 file changed, 19 insertions(+), 34 deletions(-) (limited to 'crypto') diff --git a/crypto/serpent_generic.c b/crypto/serpent_generic.c index 8f32cf35e5ce..7ddbd7e88859 100644 --- a/crypto/serpent_generic.c +++ b/crypto/serpent_generic.c @@ -567,24 +567,6 @@ static void serpent_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) __serpent_decrypt(ctx, dst, src); } -static struct crypto_alg serpent_alg = { - .cra_name = "serpent", - .cra_driver_name = "serpent-generic", - .cra_priority = 100, - .cra_flags = CRYPTO_ALG_TYPE_CIPHER, - .cra_blocksize = SERPENT_BLOCK_SIZE, - .cra_ctxsize = sizeof(struct serpent_ctx), - .cra_alignmask = 3, - .cra_module = THIS_MODULE, - .cra_list = LIST_HEAD_INIT(serpent_alg.cra_list), - .cra_u = { .cipher = { - .cia_min_keysize = SERPENT_MIN_KEY_SIZE, - .cia_max_keysize = SERPENT_MAX_KEY_SIZE, - .cia_setkey = serpent_setkey, - .cia_encrypt = serpent_encrypt, - .cia_decrypt = serpent_decrypt } } -}; - static int tnepres_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) { @@ -637,41 +619,44 @@ static void tnepres_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) d[3] = swab32(rd[0]); } -static struct crypto_alg tnepres_alg = { +static struct crypto_alg srp_algs[2] = { { + .cra_name = "serpent", + .cra_driver_name = "serpent-generic", + .cra_priority = 100, + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = SERPENT_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct serpent_ctx), + .cra_alignmask = 3, + .cra_module = THIS_MODULE, + .cra_u = { .cipher = { + .cia_min_keysize = SERPENT_MIN_KEY_SIZE, + .cia_max_keysize = SERPENT_MAX_KEY_SIZE, + .cia_setkey = serpent_setkey, + .cia_encrypt = serpent_encrypt, + .cia_decrypt = serpent_decrypt } } +}, { .cra_name = "tnepres", .cra_flags = CRYPTO_ALG_TYPE_CIPHER, .cra_blocksize = SERPENT_BLOCK_SIZE, .cra_ctxsize = sizeof(struct serpent_ctx), .cra_alignmask = 3, .cra_module = THIS_MODULE, - .cra_list = LIST_HEAD_INIT(serpent_alg.cra_list), .cra_u = { .cipher = { .cia_min_keysize = SERPENT_MIN_KEY_SIZE, .cia_max_keysize = SERPENT_MAX_KEY_SIZE, .cia_setkey = tnepres_setkey, .cia_encrypt = tnepres_encrypt, .cia_decrypt = tnepres_decrypt } } -}; +} }; static int __init serpent_mod_init(void) { - int ret = crypto_register_alg(&serpent_alg); - - if (ret) - return ret; - - ret = crypto_register_alg(&tnepres_alg); - - if (ret) - crypto_unregister_alg(&serpent_alg); - - return ret; + return crypto_register_algs(srp_algs, ARRAY_SIZE(srp_algs)); } static void __exit serpent_mod_fini(void) { - crypto_unregister_alg(&tnepres_alg); - crypto_unregister_alg(&serpent_alg); + crypto_unregister_algs(srp_algs, ARRAY_SIZE(srp_algs)); } module_init(serpent_mod_init); -- cgit v1.2.3 From 8fc229a51b0e10f4ceb794e8b99fa0a427a7ba41 Mon Sep 17 00:00:00 2001 From: Jussi Kivilinna Date: Wed, 11 Jul 2012 14:20:15 +0300 Subject: crypto: ansi_cprng - use crypto_[un]register_algs Combine all crypto_alg to be registered and use new crypto_[un]register_algs functions. This simplifies init/exit code. Cc: Neil Horman Signed-off-by: Jussi Kivilinna Signed-off-by: Herbert Xu --- crypto/ansi_cprng.c | 63 +++++++++++++++++++---------------------------------- 1 file changed, 23 insertions(+), 40 deletions(-) (limited to 'crypto') diff --git a/crypto/ansi_cprng.c b/crypto/ansi_cprng.c index 6ddd99e6114b..c0bb3778f1ae 100644 --- a/crypto/ansi_cprng.c +++ b/crypto/ansi_cprng.c @@ -382,26 +382,6 @@ static int cprng_reset(struct crypto_rng *tfm, u8 *seed, unsigned int slen) return 0; } -static struct crypto_alg rng_alg = { - .cra_name = "stdrng", - .cra_driver_name = "ansi_cprng", - .cra_priority = 100, - .cra_flags = CRYPTO_ALG_TYPE_RNG, - .cra_ctxsize = sizeof(struct prng_context), - .cra_type = &crypto_rng_type, - .cra_module = THIS_MODULE, - .cra_list = LIST_HEAD_INIT(rng_alg.cra_list), - .cra_init = cprng_init, - .cra_exit = cprng_exit, - .cra_u = { - .rng = { - .rng_make_random = cprng_get_random, - .rng_reset = cprng_reset, - .seedsize = DEFAULT_PRNG_KSZ + 2*DEFAULT_BLK_SZ, - } - } -}; - #ifdef CONFIG_CRYPTO_FIPS static int fips_cprng_get_random(struct crypto_rng *tfm, u8 *rdata, unsigned int dlen) @@ -438,8 +418,27 @@ static int fips_cprng_reset(struct crypto_rng *tfm, u8 *seed, unsigned int slen) out: return rc; } +#endif -static struct crypto_alg fips_rng_alg = { +static struct crypto_alg rng_algs[] = { { + .cra_name = "stdrng", + .cra_driver_name = "ansi_cprng", + .cra_priority = 100, + .cra_flags = CRYPTO_ALG_TYPE_RNG, + .cra_ctxsize = sizeof(struct prng_context), + .cra_type = &crypto_rng_type, + .cra_module = THIS_MODULE, + .cra_init = cprng_init, + .cra_exit = cprng_exit, + .cra_u = { + .rng = { + .rng_make_random = cprng_get_random, + .rng_reset = cprng_reset, + .seedsize = DEFAULT_PRNG_KSZ + 2*DEFAULT_BLK_SZ, + } + } +#ifdef CONFIG_CRYPTO_FIPS +}, { .cra_name = "fips(ansi_cprng)", .cra_driver_name = "fips_ansi_cprng", .cra_priority = 300, @@ -447,7 +446,6 @@ static struct crypto_alg fips_rng_alg = { .cra_ctxsize = sizeof(struct prng_context), .cra_type = &crypto_rng_type, .cra_module = THIS_MODULE, - .cra_list = LIST_HEAD_INIT(rng_alg.cra_list), .cra_init = cprng_init, .cra_exit = cprng_exit, .cra_u = { @@ -457,33 +455,18 @@ static struct crypto_alg fips_rng_alg = { .seedsize = DEFAULT_PRNG_KSZ + 2*DEFAULT_BLK_SZ, } } -}; #endif +} }; /* Module initalization */ static int __init prng_mod_init(void) { - int rc = 0; - - rc = crypto_register_alg(&rng_alg); -#ifdef CONFIG_CRYPTO_FIPS - if (rc) - goto out; - - rc = crypto_register_alg(&fips_rng_alg); - -out: -#endif - return rc; + return crypto_register_algs(rng_algs, ARRAY_SIZE(rng_algs)); } static void __exit prng_mod_fini(void) { - crypto_unregister_alg(&rng_alg); -#ifdef CONFIG_CRYPTO_FIPS - crypto_unregister_alg(&fips_rng_alg); -#endif - return; + crypto_unregister_algs(rng_algs, ARRAY_SIZE(rng_algs)); } MODULE_LICENSE("GPL"); -- cgit v1.2.3 From 50fc3e8d2c9d1ee72c67b751e5ac5d76ebc5a12e Mon Sep 17 00:00:00 2001 From: Jussi Kivilinna Date: Wed, 11 Jul 2012 14:20:20 +0300 Subject: crypto: add crypto_[un]register_shashes for [un]registering multiple shash entries at once Add crypto_[un]register_shashes() to allow simplifying init/exit code of shash crypto modules that register multiple algorithms. Signed-off-by: Jussi Kivilinna Signed-off-by: Herbert Xu --- crypto/shash.c | 36 ++++++++++++++++++++++++++++++++++++ include/crypto/internal/hash.h | 2 ++ 2 files changed, 38 insertions(+) (limited to 'crypto') diff --git a/crypto/shash.c b/crypto/shash.c index 32067f47e6c7..f426330f1017 100644 --- a/crypto/shash.c +++ b/crypto/shash.c @@ -629,6 +629,42 @@ int crypto_unregister_shash(struct shash_alg *alg) } EXPORT_SYMBOL_GPL(crypto_unregister_shash); +int crypto_register_shashes(struct shash_alg *algs, int count) +{ + int i, ret; + + for (i = 0; i < count; i++) { + ret = crypto_register_shash(&algs[i]); + if (ret) + goto err; + } + + return 0; + +err: + for (--i; i >= 0; --i) + crypto_unregister_shash(&algs[i]); + + return ret; +} +EXPORT_SYMBOL_GPL(crypto_register_shashes); + +int crypto_unregister_shashes(struct shash_alg *algs, int count) +{ + int i, ret; + + for (i = count - 1; i >= 0; --i) { + ret = crypto_unregister_shash(&algs[i]); + if (ret) + pr_err("Failed to unregister %s %s: %d\n", + algs[i].base.cra_driver_name, + algs[i].base.cra_name, ret); + } + + return 0; +} +EXPORT_SYMBOL_GPL(crypto_unregister_shashes); + int shash_register_instance(struct crypto_template *tmpl, struct shash_instance *inst) { diff --git a/include/crypto/internal/hash.h b/include/crypto/internal/hash.h index 5bfad8c80595..821eae8cbd8c 100644 --- a/include/crypto/internal/hash.h +++ b/include/crypto/internal/hash.h @@ -83,6 +83,8 @@ struct hash_alg_common *ahash_attr_alg(struct rtattr *rta, u32 type, u32 mask); int crypto_register_shash(struct shash_alg *alg); int crypto_unregister_shash(struct shash_alg *alg); +int crypto_register_shashes(struct shash_alg *algs, int count); +int crypto_unregister_shashes(struct shash_alg *algs, int count); int shash_register_instance(struct crypto_template *tmpl, struct shash_instance *inst); void shash_free_instance(struct crypto_instance *inst); -- cgit v1.2.3 From a5e7a2dcfcf360f285db9edd479491b1e2207b4f Mon Sep 17 00:00:00 2001 From: Jussi Kivilinna Date: Wed, 11 Jul 2012 14:20:25 +0300 Subject: crypto: tiger - use crypto_[un]register_shashes Combine all shash algs to be registered and use new crypto_[un]register_shashes functions. This simplifies init/exit code. Signed-off-by: Jussi Kivilinna Signed-off-by: Herbert Xu --- crypto/tgr192.c | 38 ++++++-------------------------------- 1 file changed, 6 insertions(+), 32 deletions(-) (limited to 'crypto') diff --git a/crypto/tgr192.c b/crypto/tgr192.c index cbca4f208c9f..87403556fd0b 100644 --- a/crypto/tgr192.c +++ b/crypto/tgr192.c @@ -628,7 +628,7 @@ static int tgr128_final(struct shash_desc *desc, u8 * out) return 0; } -static struct shash_alg tgr192 = { +static struct shash_alg tgr_algs[3] = { { .digestsize = TGR192_DIGEST_SIZE, .init = tgr192_init, .update = tgr192_update, @@ -640,9 +640,7 @@ static struct shash_alg tgr192 = { .cra_blocksize = TGR192_BLOCK_SIZE, .cra_module = THIS_MODULE, } -}; - -static struct shash_alg tgr160 = { +}, { .digestsize = TGR160_DIGEST_SIZE, .init = tgr192_init, .update = tgr192_update, @@ -654,9 +652,7 @@ static struct shash_alg tgr160 = { .cra_blocksize = TGR192_BLOCK_SIZE, .cra_module = THIS_MODULE, } -}; - -static struct shash_alg tgr128 = { +}, { .digestsize = TGR128_DIGEST_SIZE, .init = tgr192_init, .update = tgr192_update, @@ -668,38 +664,16 @@ static struct shash_alg tgr128 = { .cra_blocksize = TGR192_BLOCK_SIZE, .cra_module = THIS_MODULE, } -}; +} }; static int __init tgr192_mod_init(void) { - int ret = 0; - - ret = crypto_register_shash(&tgr192); - - if (ret < 0) { - goto out; - } - - ret = crypto_register_shash(&tgr160); - if (ret < 0) { - crypto_unregister_shash(&tgr192); - goto out; - } - - ret = crypto_register_shash(&tgr128); - if (ret < 0) { - crypto_unregister_shash(&tgr192); - crypto_unregister_shash(&tgr160); - } - out: - return ret; + return crypto_register_shashes(tgr_algs, ARRAY_SIZE(tgr_algs)); } static void __exit tgr192_mod_fini(void) { - crypto_unregister_shash(&tgr192); - crypto_unregister_shash(&tgr160); - crypto_unregister_shash(&tgr128); + crypto_unregister_shashes(tgr_algs, ARRAY_SIZE(tgr_algs)); } MODULE_ALIAS("tgr160"); -- cgit v1.2.3 From 6aeb49bc5a6fffe2f8ba0668cf7459b6a4b672dc Mon Sep 17 00:00:00 2001 From: Jussi Kivilinna Date: Wed, 11 Jul 2012 14:20:30 +0300 Subject: crypto: sha256 - use crypto_[un]register_shashes Combine all shash algs to be registered and use new crypto_[un]register_shashes functions. This simplifies init/exit code. Signed-off-by: Jussi Kivilinna Signed-off-by: Herbert Xu --- crypto/sha256_generic.c | 25 +++++-------------------- 1 file changed, 5 insertions(+), 20 deletions(-) (limited to 'crypto') diff --git a/crypto/sha256_generic.c b/crypto/sha256_generic.c index c48459ebf05b..c3ed4ec924e1 100644 --- a/crypto/sha256_generic.c +++ b/crypto/sha256_generic.c @@ -336,7 +336,7 @@ static int sha256_import(struct shash_desc *desc, const void *in) return 0; } -static struct shash_alg sha256 = { +static struct shash_alg sha256_algs[2] = { { .digestsize = SHA256_DIGEST_SIZE, .init = sha256_init, .update = sha256_update, @@ -352,9 +352,7 @@ static struct shash_alg sha256 = { .cra_blocksize = SHA256_BLOCK_SIZE, .cra_module = THIS_MODULE, } -}; - -static struct shash_alg sha224 = { +}, { .digestsize = SHA224_DIGEST_SIZE, .init = sha224_init, .update = sha256_update, @@ -367,29 +365,16 @@ static struct shash_alg sha224 = { .cra_blocksize = SHA224_BLOCK_SIZE, .cra_module = THIS_MODULE, } -}; +} }; static int __init sha256_generic_mod_init(void) { - int ret = 0; - - ret = crypto_register_shash(&sha224); - - if (ret < 0) - return ret; - - ret = crypto_register_shash(&sha256); - - if (ret < 0) - crypto_unregister_shash(&sha224); - - return ret; + return crypto_register_shashes(sha256_algs, ARRAY_SIZE(sha256_algs)); } static void __exit sha256_generic_mod_fini(void) { - crypto_unregister_shash(&sha224); - crypto_unregister_shash(&sha256); + crypto_unregister_shashes(sha256_algs, ARRAY_SIZE(sha256_algs)); } module_init(sha256_generic_mod_init); -- cgit v1.2.3 From 648b2a102d268d41d8116abde9081327c1be82e8 Mon Sep 17 00:00:00 2001 From: Jussi Kivilinna Date: Wed, 11 Jul 2012 14:20:36 +0300 Subject: crypto: sha512 - use crypto_[un]register_shashes Combine all shash algs to be registered and use new crypto_[un]register_shashes functions. This simplifies init/exit code. Signed-off-by: Jussi Kivilinna Signed-off-by: Herbert Xu --- crypto/sha512_generic.c | 20 +++++--------------- 1 file changed, 5 insertions(+), 15 deletions(-) (limited to 'crypto') diff --git a/crypto/sha512_generic.c b/crypto/sha512_generic.c index dd30f40af9f5..71fcf361102d 100644 --- a/crypto/sha512_generic.c +++ b/crypto/sha512_generic.c @@ -242,7 +242,7 @@ static int sha384_final(struct shash_desc *desc, u8 *hash) return 0; } -static struct shash_alg sha512 = { +static struct shash_alg sha512_algs[2] = { { .digestsize = SHA512_DIGEST_SIZE, .init = sha512_init, .update = sha512_update, @@ -254,9 +254,7 @@ static struct shash_alg sha512 = { .cra_blocksize = SHA512_BLOCK_SIZE, .cra_module = THIS_MODULE, } -}; - -static struct shash_alg sha384 = { +}, { .digestsize = SHA384_DIGEST_SIZE, .init = sha384_init, .update = sha512_update, @@ -268,24 +266,16 @@ static struct shash_alg sha384 = { .cra_blocksize = SHA384_BLOCK_SIZE, .cra_module = THIS_MODULE, } -}; +} }; static int __init sha512_generic_mod_init(void) { - int ret = 0; - - if ((ret = crypto_register_shash(&sha384)) < 0) - goto out; - if ((ret = crypto_register_shash(&sha512)) < 0) - crypto_unregister_shash(&sha384); -out: - return ret; + return crypto_register_shashes(sha512_algs, ARRAY_SIZE(sha512_algs)); } static void __exit sha512_generic_mod_fini(void) { - crypto_unregister_shash(&sha384); - crypto_unregister_shash(&sha512); + crypto_unregister_shashes(sha512_algs, ARRAY_SIZE(sha512_algs)); } module_init(sha512_generic_mod_init); -- cgit v1.2.3 From f4b0277e7ef435733b888a62cf9c4c12b219e7c5 Mon Sep 17 00:00:00 2001 From: Jussi Kivilinna Date: Wed, 11 Jul 2012 14:20:41 +0300 Subject: crypto: whirlpool - use crypto_[un]register_shashes Combine all shash algs to be registered and use new crypto_[un]register_shashes functions. This simplifies init/exit code. Signed-off-by: Jussi Kivilinna Signed-off-by: Herbert Xu --- crypto/wp512.c | 39 ++++++--------------------------------- 1 file changed, 6 insertions(+), 33 deletions(-) (limited to 'crypto') diff --git a/crypto/wp512.c b/crypto/wp512.c index 71719a2be25a..180f1d6e03f4 100644 --- a/crypto/wp512.c +++ b/crypto/wp512.c @@ -1119,7 +1119,7 @@ static int wp256_final(struct shash_desc *desc, u8 *out) return 0; } -static struct shash_alg wp512 = { +static struct shash_alg wp_algs[3] = { { .digestsize = WP512_DIGEST_SIZE, .init = wp512_init, .update = wp512_update, @@ -1131,9 +1131,7 @@ static struct shash_alg wp512 = { .cra_blocksize = WP512_BLOCK_SIZE, .cra_module = THIS_MODULE, } -}; - -static struct shash_alg wp384 = { +}, { .digestsize = WP384_DIGEST_SIZE, .init = wp512_init, .update = wp512_update, @@ -1145,9 +1143,7 @@ static struct shash_alg wp384 = { .cra_blocksize = WP512_BLOCK_SIZE, .cra_module = THIS_MODULE, } -}; - -static struct shash_alg wp256 = { +}, { .digestsize = WP256_DIGEST_SIZE, .init = wp512_init, .update = wp512_update, @@ -1159,39 +1155,16 @@ static struct shash_alg wp256 = { .cra_blocksize = WP512_BLOCK_SIZE, .cra_module = THIS_MODULE, } -}; +} }; static int __init wp512_mod_init(void) { - int ret = 0; - - ret = crypto_register_shash(&wp512); - - if (ret < 0) - goto out; - - ret = crypto_register_shash(&wp384); - if (ret < 0) - { - crypto_unregister_shash(&wp512); - goto out; - } - - ret = crypto_register_shash(&wp256); - if (ret < 0) - { - crypto_unregister_shash(&wp512); - crypto_unregister_shash(&wp384); - } -out: - return ret; + return crypto_register_shashes(wp_algs, ARRAY_SIZE(wp_algs)); } static void __exit wp512_mod_fini(void) { - crypto_unregister_shash(&wp512); - crypto_unregister_shash(&wp384); - crypto_unregister_shash(&wp256); + crypto_unregister_shashes(wp_algs, ARRAY_SIZE(wp_algs)); } MODULE_ALIAS("wp384"); -- cgit v1.2.3 From 77ec2e734d4820c51cbabe1257e9311df5868160 Mon Sep 17 00:00:00 2001 From: Jussi Kivilinna Date: Wed, 11 Jul 2012 14:20:46 +0300 Subject: crypto: cleanup - remove unneeded crypto_alg.cra_list initializations Initialization of cra_list is currently mixed, most ciphers initialize this field and most shashes do not. Initialization however is not needed at all since cra_list is initialized/overwritten in __crypto_register_alg() with list_add(). Therefore perform cleanup to remove all unneeded initializations of this field in 'crypto/'. Signed-off-by: Jussi Kivilinna Signed-off-by: Herbert Xu --- crypto/aes_generic.c | 1 - crypto/anubis.c | 1 - crypto/blowfish_generic.c | 1 - crypto/camellia_generic.c | 1 - crypto/cast5.c | 1 - crypto/cast6.c | 1 - crypto/deflate.c | 1 - crypto/fcrypt.c | 1 - crypto/ghash-generic.c | 1 - crypto/khazad.c | 1 - crypto/krng.c | 1 - crypto/lzo.c | 1 - crypto/salsa20_generic.c | 1 - crypto/seed.c | 1 - crypto/twofish_generic.c | 1 - 15 files changed, 15 deletions(-) (limited to 'crypto') diff --git a/crypto/aes_generic.c b/crypto/aes_generic.c index a68c73dae15a..47f2e5c71759 100644 --- a/crypto/aes_generic.c +++ b/crypto/aes_generic.c @@ -1448,7 +1448,6 @@ static struct crypto_alg aes_alg = { .cra_ctxsize = sizeof(struct crypto_aes_ctx), .cra_alignmask = 3, .cra_module = THIS_MODULE, - .cra_list = LIST_HEAD_INIT(aes_alg.cra_list), .cra_u = { .cipher = { .cia_min_keysize = AES_MIN_KEY_SIZE, diff --git a/crypto/anubis.c b/crypto/anubis.c index 77530d571c96..008c8a4fb67c 100644 --- a/crypto/anubis.c +++ b/crypto/anubis.c @@ -678,7 +678,6 @@ static struct crypto_alg anubis_alg = { .cra_ctxsize = sizeof (struct anubis_ctx), .cra_alignmask = 3, .cra_module = THIS_MODULE, - .cra_list = LIST_HEAD_INIT(anubis_alg.cra_list), .cra_u = { .cipher = { .cia_min_keysize = ANUBIS_MIN_KEY_SIZE, .cia_max_keysize = ANUBIS_MAX_KEY_SIZE, diff --git a/crypto/blowfish_generic.c b/crypto/blowfish_generic.c index 6f269b5cfa3b..8baf5447d35b 100644 --- a/crypto/blowfish_generic.c +++ b/crypto/blowfish_generic.c @@ -115,7 +115,6 @@ static struct crypto_alg alg = { .cra_ctxsize = sizeof(struct bf_ctx), .cra_alignmask = 3, .cra_module = THIS_MODULE, - .cra_list = LIST_HEAD_INIT(alg.cra_list), .cra_u = { .cipher = { .cia_min_keysize = BF_MIN_KEY_SIZE, .cia_max_keysize = BF_MAX_KEY_SIZE, diff --git a/crypto/camellia_generic.c b/crypto/camellia_generic.c index f7aaaaf86982..75efa2052305 100644 --- a/crypto/camellia_generic.c +++ b/crypto/camellia_generic.c @@ -1072,7 +1072,6 @@ static struct crypto_alg camellia_alg = { .cra_ctxsize = sizeof(struct camellia_ctx), .cra_alignmask = 3, .cra_module = THIS_MODULE, - .cra_list = LIST_HEAD_INIT(camellia_alg.cra_list), .cra_u = { .cipher = { .cia_min_keysize = CAMELLIA_MIN_KEY_SIZE, diff --git a/crypto/cast5.c b/crypto/cast5.c index 4a230ddec877..fffcb37dec11 100644 --- a/crypto/cast5.c +++ b/crypto/cast5.c @@ -779,7 +779,6 @@ static struct crypto_alg alg = { .cra_ctxsize = sizeof(struct cast5_ctx), .cra_alignmask = 3, .cra_module = THIS_MODULE, - .cra_list = LIST_HEAD_INIT(alg.cra_list), .cra_u = { .cipher = { .cia_min_keysize = CAST5_MIN_KEY_SIZE, diff --git a/crypto/cast6.c b/crypto/cast6.c index e0c15a6c7c34..04264f574601 100644 --- a/crypto/cast6.c +++ b/crypto/cast6.c @@ -519,7 +519,6 @@ static struct crypto_alg alg = { .cra_ctxsize = sizeof(struct cast6_ctx), .cra_alignmask = 3, .cra_module = THIS_MODULE, - .cra_list = LIST_HEAD_INIT(alg.cra_list), .cra_u = { .cipher = { .cia_min_keysize = CAST6_MIN_KEY_SIZE, diff --git a/crypto/deflate.c b/crypto/deflate.c index b0165ecad0c5..b57d70eb156b 100644 --- a/crypto/deflate.c +++ b/crypto/deflate.c @@ -199,7 +199,6 @@ static struct crypto_alg alg = { .cra_flags = CRYPTO_ALG_TYPE_COMPRESS, .cra_ctxsize = sizeof(struct deflate_ctx), .cra_module = THIS_MODULE, - .cra_list = LIST_HEAD_INIT(alg.cra_list), .cra_init = deflate_init, .cra_exit = deflate_exit, .cra_u = { .compress = { diff --git a/crypto/fcrypt.c b/crypto/fcrypt.c index c33107e340b6..3b2cf569c684 100644 --- a/crypto/fcrypt.c +++ b/crypto/fcrypt.c @@ -396,7 +396,6 @@ static struct crypto_alg fcrypt_alg = { .cra_ctxsize = sizeof(struct fcrypt_ctx), .cra_module = THIS_MODULE, .cra_alignmask = 3, - .cra_list = LIST_HEAD_INIT(fcrypt_alg.cra_list), .cra_u = { .cipher = { .cia_min_keysize = 8, .cia_max_keysize = 8, diff --git a/crypto/ghash-generic.c b/crypto/ghash-generic.c index 7835b8fc94db..9d3f0c69a86f 100644 --- a/crypto/ghash-generic.c +++ b/crypto/ghash-generic.c @@ -153,7 +153,6 @@ static struct shash_alg ghash_alg = { .cra_blocksize = GHASH_BLOCK_SIZE, .cra_ctxsize = sizeof(struct ghash_ctx), .cra_module = THIS_MODULE, - .cra_list = LIST_HEAD_INIT(ghash_alg.base.cra_list), .cra_exit = ghash_exit_tfm, }, }; diff --git a/crypto/khazad.c b/crypto/khazad.c index 527e4e395fc3..60e7cd66facc 100644 --- a/crypto/khazad.c +++ b/crypto/khazad.c @@ -853,7 +853,6 @@ static struct crypto_alg khazad_alg = { .cra_ctxsize = sizeof (struct khazad_ctx), .cra_alignmask = 7, .cra_module = THIS_MODULE, - .cra_list = LIST_HEAD_INIT(khazad_alg.cra_list), .cra_u = { .cipher = { .cia_min_keysize = KHAZAD_KEY_SIZE, .cia_max_keysize = KHAZAD_KEY_SIZE, diff --git a/crypto/krng.c b/crypto/krng.c index 4328bb3430ed..a2d2b72fc135 100644 --- a/crypto/krng.c +++ b/crypto/krng.c @@ -35,7 +35,6 @@ static struct crypto_alg krng_alg = { .cra_ctxsize = 0, .cra_type = &crypto_rng_type, .cra_module = THIS_MODULE, - .cra_list = LIST_HEAD_INIT(krng_alg.cra_list), .cra_u = { .rng = { .rng_make_random = krng_get_random, diff --git a/crypto/lzo.c b/crypto/lzo.c index b5e77077d751..1c2aa69c54b8 100644 --- a/crypto/lzo.c +++ b/crypto/lzo.c @@ -81,7 +81,6 @@ static struct crypto_alg alg = { .cra_flags = CRYPTO_ALG_TYPE_COMPRESS, .cra_ctxsize = sizeof(struct lzo_ctx), .cra_module = THIS_MODULE, - .cra_list = LIST_HEAD_INIT(alg.cra_list), .cra_init = lzo_init, .cra_exit = lzo_exit, .cra_u = { .compress = { diff --git a/crypto/salsa20_generic.c b/crypto/salsa20_generic.c index eac10c11685c..9a4770c02284 100644 --- a/crypto/salsa20_generic.c +++ b/crypto/salsa20_generic.c @@ -221,7 +221,6 @@ static struct crypto_alg alg = { .cra_ctxsize = sizeof(struct salsa20_ctx), .cra_alignmask = 3, .cra_module = THIS_MODULE, - .cra_list = LIST_HEAD_INIT(alg.cra_list), .cra_u = { .blkcipher = { .setkey = setkey, diff --git a/crypto/seed.c b/crypto/seed.c index d3e422f60556..9c904d6d2151 100644 --- a/crypto/seed.c +++ b/crypto/seed.c @@ -449,7 +449,6 @@ static struct crypto_alg seed_alg = { .cra_ctxsize = sizeof(struct seed_ctx), .cra_alignmask = 3, .cra_module = THIS_MODULE, - .cra_list = LIST_HEAD_INIT(seed_alg.cra_list), .cra_u = { .cipher = { .cia_min_keysize = SEED_KEY_SIZE, diff --git a/crypto/twofish_generic.c b/crypto/twofish_generic.c index 1f07b843e07c..2d5000552d0f 100644 --- a/crypto/twofish_generic.c +++ b/crypto/twofish_generic.c @@ -188,7 +188,6 @@ static struct crypto_alg alg = { .cra_ctxsize = sizeof(struct twofish_ctx), .cra_alignmask = 3, .cra_module = THIS_MODULE, - .cra_list = LIST_HEAD_INIT(alg.cra_list), .cra_u = { .cipher = { .cia_min_keysize = TF_MIN_KEY_SIZE, .cia_max_keysize = TF_MAX_KEY_SIZE, -- cgit v1.2.3 From 270b0c6b406a0ae7673ee880d1d7cc6bd6c904de Mon Sep 17 00:00:00 2001 From: Johannes Goetzfried Date: Wed, 11 Jul 2012 19:37:04 +0200 Subject: crypto: cast5 - prepare generic module for optimized implementations Rename cast5 module to cast5_generic to allow autoloading of optimized implementations. Generic functions and s-boxes are exported to be able to use them within optimized implementations. Signed-off-by: Johannes Goetzfried Signed-off-by: Herbert Xu --- crypto/Makefile | 2 +- crypto/cast5.c | 808 ------------------------------------------------ crypto/cast5_generic.c | 821 +++++++++++++++++++++++++++++++++++++++++++++++++ include/crypto/cast5.h | 22 ++ 4 files changed, 844 insertions(+), 809 deletions(-) delete mode 100644 crypto/cast5.c create mode 100644 crypto/cast5_generic.c create mode 100644 include/crypto/cast5.h (limited to 'crypto') diff --git a/crypto/Makefile b/crypto/Makefile index 30f33d675330..a56821e5d573 100644 --- a/crypto/Makefile +++ b/crypto/Makefile @@ -68,7 +68,7 @@ obj-$(CONFIG_CRYPTO_TWOFISH_COMMON) += twofish_common.o obj-$(CONFIG_CRYPTO_SERPENT) += serpent_generic.o obj-$(CONFIG_CRYPTO_AES) += aes_generic.o obj-$(CONFIG_CRYPTO_CAMELLIA) += camellia_generic.o -obj-$(CONFIG_CRYPTO_CAST5) += cast5.o +obj-$(CONFIG_CRYPTO_CAST5) += cast5_generic.o obj-$(CONFIG_CRYPTO_CAST6) += cast6.o obj-$(CONFIG_CRYPTO_ARC4) += arc4.o obj-$(CONFIG_CRYPTO_TEA) += tea.o diff --git a/crypto/cast5.c b/crypto/cast5.c deleted file mode 100644 index fffcb37dec11..000000000000 --- a/crypto/cast5.c +++ /dev/null @@ -1,808 +0,0 @@ -/* Kernel cryptographic api. -* cast5.c - Cast5 cipher algorithm (rfc2144). -* -* Derived from GnuPG implementation of cast5. -* -* Major Changes. -* Complete conformance to rfc2144. -* Supports key size from 40 to 128 bits. -* -* Copyright (C) 1998, 1999, 2000, 2001 Free Software Foundation, Inc. -* Copyright (C) 2003 Kartikey Mahendra Bhatt . -* -* This program is free software; you can redistribute it and/or modify it -* under the terms of GNU General Public License as published by the Free -* Software Foundation; either version 2 of the License, or (at your option) -* any later version. -* -* You should have received a copy of the GNU General Public License -* along with this program; if not, write to the Free Software -* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA -*/ - - -#include -#include -#include -#include -#include -#include -#include - -#define CAST5_BLOCK_SIZE 8 -#define CAST5_MIN_KEY_SIZE 5 -#define CAST5_MAX_KEY_SIZE 16 - -struct cast5_ctx { - u32 Km[16]; - u8 Kr[16]; - int rr; /* rr?number of rounds = 16:number of rounds = 12; (rfc 2144) */ -}; - - -static const u32 s1[256] = { - 0x30fb40d4, 0x9fa0ff0b, 0x6beccd2f, 0x3f258c7a, 0x1e213f2f, - 0x9c004dd3, 0x6003e540, 0xcf9fc949, - 0xbfd4af27, 0x88bbbdb5, 0xe2034090, 0x98d09675, 0x6e63a0e0, - 0x15c361d2, 0xc2e7661d, 0x22d4ff8e, - 0x28683b6f, 0xc07fd059, 0xff2379c8, 0x775f50e2, 0x43c340d3, - 0xdf2f8656, 0x887ca41a, 0xa2d2bd2d, - 0xa1c9e0d6, 0x346c4819, 0x61b76d87, 0x22540f2f, 0x2abe32e1, - 0xaa54166b, 0x22568e3a, 0xa2d341d0, - 0x66db40c8, 0xa784392f, 0x004dff2f, 0x2db9d2de, 0x97943fac, - 0x4a97c1d8, 0x527644b7, 0xb5f437a7, - 0xb82cbaef, 0xd751d159, 0x6ff7f0ed, 0x5a097a1f, 0x827b68d0, - 0x90ecf52e, 0x22b0c054, 0xbc8e5935, - 0x4b6d2f7f, 0x50bb64a2, 0xd2664910, 0xbee5812d, 0xb7332290, - 0xe93b159f, 0xb48ee411, 0x4bff345d, - 0xfd45c240, 0xad31973f, 0xc4f6d02e, 0x55fc8165, 0xd5b1caad, - 0xa1ac2dae, 0xa2d4b76d, 0xc19b0c50, - 0x882240f2, 0x0c6e4f38, 0xa4e4bfd7, 0x4f5ba272, 0x564c1d2f, - 0xc59c5319, 0xb949e354, 0xb04669fe, - 0xb1b6ab8a, 0xc71358dd, 0x6385c545, 0x110f935d, 0x57538ad5, - 0x6a390493, 0xe63d37e0, 0x2a54f6b3, - 0x3a787d5f, 0x6276a0b5, 0x19a6fcdf, 0x7a42206a, 0x29f9d4d5, - 0xf61b1891, 0xbb72275e, 0xaa508167, - 0x38901091, 0xc6b505eb, 0x84c7cb8c, 0x2ad75a0f, 0x874a1427, - 0xa2d1936b, 0x2ad286af, 0xaa56d291, - 0xd7894360, 0x425c750d, 0x93b39e26, 0x187184c9, 0x6c00b32d, - 0x73e2bb14, 0xa0bebc3c, 0x54623779, - 0x64459eab, 0x3f328b82, 0x7718cf82, 0x59a2cea6, 0x04ee002e, - 0x89fe78e6, 0x3fab0950, 0x325ff6c2, - 0x81383f05, 0x6963c5c8, 0x76cb5ad6, 0xd49974c9, 0xca180dcf, - 0x380782d5, 0xc7fa5cf6, 0x8ac31511, - 0x35e79e13, 0x47da91d0, 0xf40f9086, 0xa7e2419e, 0x31366241, - 0x051ef495, 0xaa573b04, 0x4a805d8d, - 0x548300d0, 0x00322a3c, 0xbf64cddf, 0xba57a68e, 0x75c6372b, - 0x50afd341, 0xa7c13275, 0x915a0bf5, - 0x6b54bfab, 0x2b0b1426, 0xab4cc9d7, 0x449ccd82, 0xf7fbf265, - 0xab85c5f3, 0x1b55db94, 0xaad4e324, - 0xcfa4bd3f, 0x2deaa3e2, 0x9e204d02, 0xc8bd25ac, 0xeadf55b3, - 0xd5bd9e98, 0xe31231b2, 0x2ad5ad6c, - 0x954329de, 0xadbe4528, 0xd8710f69, 0xaa51c90f, 0xaa786bf6, - 0x22513f1e, 0xaa51a79b, 0x2ad344cc, - 0x7b5a41f0, 0xd37cfbad, 0x1b069505, 0x41ece491, 0xb4c332e6, - 0x032268d4, 0xc9600acc, 0xce387e6d, - 0xbf6bb16c, 0x6a70fb78, 0x0d03d9c9, 0xd4df39de, 0xe01063da, - 0x4736f464, 0x5ad328d8, 0xb347cc96, - 0x75bb0fc3, 0x98511bfb, 0x4ffbcc35, 0xb58bcf6a, 0xe11f0abc, - 0xbfc5fe4a, 0xa70aec10, 0xac39570a, - 0x3f04442f, 0x6188b153, 0xe0397a2e, 0x5727cb79, 0x9ceb418f, - 0x1cacd68d, 0x2ad37c96, 0x0175cb9d, - 0xc69dff09, 0xc75b65f0, 0xd9db40d8, 0xec0e7779, 0x4744ead4, - 0xb11c3274, 0xdd24cb9e, 0x7e1c54bd, - 0xf01144f9, 0xd2240eb1, 0x9675b3fd, 0xa3ac3755, 0xd47c27af, - 0x51c85f4d, 0x56907596, 0xa5bb15e6, - 0x580304f0, 0xca042cf1, 0x011a37ea, 0x8dbfaadb, 0x35ba3e4a, - 0x3526ffa0, 0xc37b4d09, 0xbc306ed9, - 0x98a52666, 0x5648f725, 0xff5e569d, 0x0ced63d0, 0x7c63b2cf, - 0x700b45e1, 0xd5ea50f1, 0x85a92872, - 0xaf1fbda7, 0xd4234870, 0xa7870bf3, 0x2d3b4d79, 0x42e04198, - 0x0cd0ede7, 0x26470db8, 0xf881814c, - 0x474d6ad7, 0x7c0c5e5c, 0xd1231959, 0x381b7298, 0xf5d2f4db, - 0xab838653, 0x6e2f1e23, 0x83719c9e, - 0xbd91e046, 0x9a56456e, 0xdc39200c, 0x20c8c571, 0x962bda1c, - 0xe1e696ff, 0xb141ab08, 0x7cca89b9, - 0x1a69e783, 0x02cc4843, 0xa2f7c579, 0x429ef47d, 0x427b169c, - 0x5ac9f049, 0xdd8f0f00, 0x5c8165bf -}; -static const u32 s2[256] = { - 0x1f201094, 0xef0ba75b, 0x69e3cf7e, 0x393f4380, 0xfe61cf7a, - 0xeec5207a, 0x55889c94, 0x72fc0651, - 0xada7ef79, 0x4e1d7235, 0xd55a63ce, 0xde0436ba, 0x99c430ef, - 0x5f0c0794, 0x18dcdb7d, 0xa1d6eff3, - 0xa0b52f7b, 0x59e83605, 0xee15b094, 0xe9ffd909, 0xdc440086, - 0xef944459, 0xba83ccb3, 0xe0c3cdfb, - 0xd1da4181, 0x3b092ab1, 0xf997f1c1, 0xa5e6cf7b, 0x01420ddb, - 0xe4e7ef5b, 0x25a1ff41, 0xe180f806, - 0x1fc41080, 0x179bee7a, 0xd37ac6a9, 0xfe5830a4, 0x98de8b7f, - 0x77e83f4e, 0x79929269, 0x24fa9f7b, - 0xe113c85b, 0xacc40083, 0xd7503525, 0xf7ea615f, 0x62143154, - 0x0d554b63, 0x5d681121, 0xc866c359, - 0x3d63cf73, 0xcee234c0, 0xd4d87e87, 0x5c672b21, 0x071f6181, - 0x39f7627f, 0x361e3084, 0xe4eb573b, - 0x602f64a4, 0xd63acd9c, 0x1bbc4635, 0x9e81032d, 0x2701f50c, - 0x99847ab4, 0xa0e3df79, 0xba6cf38c, - 0x10843094, 0x2537a95e, 0xf46f6ffe, 0xa1ff3b1f, 0x208cfb6a, - 0x8f458c74, 0xd9e0a227, 0x4ec73a34, - 0xfc884f69, 0x3e4de8df, 0xef0e0088, 0x3559648d, 0x8a45388c, - 0x1d804366, 0x721d9bfd, 0xa58684bb, - 0xe8256333, 0x844e8212, 0x128d8098, 0xfed33fb4, 0xce280ae1, - 0x27e19ba5, 0xd5a6c252, 0xe49754bd, - 0xc5d655dd, 0xeb667064, 0x77840b4d, 0xa1b6a801, 0x84db26a9, - 0xe0b56714, 0x21f043b7, 0xe5d05860, - 0x54f03084, 0x066ff472, 0xa31aa153, 0xdadc4755, 0xb5625dbf, - 0x68561be6, 0x83ca6b94, 0x2d6ed23b, - 0xeccf01db, 0xa6d3d0ba, 0xb6803d5c, 0xaf77a709, 0x33b4a34c, - 0x397bc8d6, 0x5ee22b95, 0x5f0e5304, - 0x81ed6f61, 0x20e74364, 0xb45e1378, 0xde18639b, 0x881ca122, - 0xb96726d1, 0x8049a7e8, 0x22b7da7b, - 0x5e552d25, 0x5272d237, 0x79d2951c, 0xc60d894c, 0x488cb402, - 0x1ba4fe5b, 0xa4b09f6b, 0x1ca815cf, - 0xa20c3005, 0x8871df63, 0xb9de2fcb, 0x0cc6c9e9, 0x0beeff53, - 0xe3214517, 0xb4542835, 0x9f63293c, - 0xee41e729, 0x6e1d2d7c, 0x50045286, 0x1e6685f3, 0xf33401c6, - 0x30a22c95, 0x31a70850, 0x60930f13, - 0x73f98417, 0xa1269859, 0xec645c44, 0x52c877a9, 0xcdff33a6, - 0xa02b1741, 0x7cbad9a2, 0x2180036f, - 0x50d99c08, 0xcb3f4861, 0xc26bd765, 0x64a3f6ab, 0x80342676, - 0x25a75e7b, 0xe4e6d1fc, 0x20c710e6, - 0xcdf0b680, 0x17844d3b, 0x31eef84d, 0x7e0824e4, 0x2ccb49eb, - 0x846a3bae, 0x8ff77888, 0xee5d60f6, - 0x7af75673, 0x2fdd5cdb, 0xa11631c1, 0x30f66f43, 0xb3faec54, - 0x157fd7fa, 0xef8579cc, 0xd152de58, - 0xdb2ffd5e, 0x8f32ce19, 0x306af97a, 0x02f03ef8, 0x99319ad5, - 0xc242fa0f, 0xa7e3ebb0, 0xc68e4906, - 0xb8da230c, 0x80823028, 0xdcdef3c8, 0xd35fb171, 0x088a1bc8, - 0xbec0c560, 0x61a3c9e8, 0xbca8f54d, - 0xc72feffa, 0x22822e99, 0x82c570b4, 0xd8d94e89, 0x8b1c34bc, - 0x301e16e6, 0x273be979, 0xb0ffeaa6, - 0x61d9b8c6, 0x00b24869, 0xb7ffce3f, 0x08dc283b, 0x43daf65a, - 0xf7e19798, 0x7619b72f, 0x8f1c9ba4, - 0xdc8637a0, 0x16a7d3b1, 0x9fc393b7, 0xa7136eeb, 0xc6bcc63e, - 0x1a513742, 0xef6828bc, 0x520365d6, - 0x2d6a77ab, 0x3527ed4b, 0x821fd216, 0x095c6e2e, 0xdb92f2fb, - 0x5eea29cb, 0x145892f5, 0x91584f7f, - 0x5483697b, 0x2667a8cc, 0x85196048, 0x8c4bacea, 0x833860d4, - 0x0d23e0f9, 0x6c387e8a, 0x0ae6d249, - 0xb284600c, 0xd835731d, 0xdcb1c647, 0xac4c56ea, 0x3ebd81b3, - 0x230eabb0, 0x6438bc87, 0xf0b5b1fa, - 0x8f5ea2b3, 0xfc184642, 0x0a036b7a, 0x4fb089bd, 0x649da589, - 0xa345415e, 0x5c038323, 0x3e5d3bb9, - 0x43d79572, 0x7e6dd07c, 0x06dfdf1e, 0x6c6cc4ef, 0x7160a539, - 0x73bfbe70, 0x83877605, 0x4523ecf1 -}; -static const u32 s3[256] = { - 0x8defc240, 0x25fa5d9f, 0xeb903dbf, 0xe810c907, 0x47607fff, - 0x369fe44b, 0x8c1fc644, 0xaececa90, - 0xbeb1f9bf, 0xeefbcaea, 0xe8cf1950, 0x51df07ae, 0x920e8806, - 0xf0ad0548, 0xe13c8d83, 0x927010d5, - 0x11107d9f, 0x07647db9, 0xb2e3e4d4, 0x3d4f285e, 0xb9afa820, - 0xfade82e0, 0xa067268b, 0x8272792e, - 0x553fb2c0, 0x489ae22b, 0xd4ef9794, 0x125e3fbc, 0x21fffcee, - 0x825b1bfd, 0x9255c5ed, 0x1257a240, - 0x4e1a8302, 0xbae07fff, 0x528246e7, 0x8e57140e, 0x3373f7bf, - 0x8c9f8188, 0xa6fc4ee8, 0xc982b5a5, - 0xa8c01db7, 0x579fc264, 0x67094f31, 0xf2bd3f5f, 0x40fff7c1, - 0x1fb78dfc, 0x8e6bd2c1, 0x437be59b, - 0x99b03dbf, 0xb5dbc64b, 0x638dc0e6, 0x55819d99, 0xa197c81c, - 0x4a012d6e, 0xc5884a28, 0xccc36f71, - 0xb843c213, 0x6c0743f1, 0x8309893c, 0x0feddd5f, 0x2f7fe850, - 0xd7c07f7e, 0x02507fbf, 0x5afb9a04, - 0xa747d2d0, 0x1651192e, 0xaf70bf3e, 0x58c31380, 0x5f98302e, - 0x727cc3c4, 0x0a0fb402, 0x0f7fef82, - 0x8c96fdad, 0x5d2c2aae, 0x8ee99a49, 0x50da88b8, 0x8427f4a0, - 0x1eac5790, 0x796fb449, 0x8252dc15, - 0xefbd7d9b, 0xa672597d, 0xada840d8, 0x45f54504, 0xfa5d7403, - 0xe83ec305, 0x4f91751a, 0x925669c2, - 0x23efe941, 0xa903f12e, 0x60270df2, 0x0276e4b6, 0x94fd6574, - 0x927985b2, 0x8276dbcb, 0x02778176, - 0xf8af918d, 0x4e48f79e, 0x8f616ddf, 0xe29d840e, 0x842f7d83, - 0x340ce5c8, 0x96bbb682, 0x93b4b148, - 0xef303cab, 0x984faf28, 0x779faf9b, 0x92dc560d, 0x224d1e20, - 0x8437aa88, 0x7d29dc96, 0x2756d3dc, - 0x8b907cee, 0xb51fd240, 0xe7c07ce3, 0xe566b4a1, 0xc3e9615e, - 0x3cf8209d, 0x6094d1e3, 0xcd9ca341, - 0x5c76460e, 0x00ea983b, 0xd4d67881, 0xfd47572c, 0xf76cedd9, - 0xbda8229c, 0x127dadaa, 0x438a074e, - 0x1f97c090, 0x081bdb8a, 0x93a07ebe, 0xb938ca15, 0x97b03cff, - 0x3dc2c0f8, 0x8d1ab2ec, 0x64380e51, - 0x68cc7bfb, 0xd90f2788, 0x12490181, 0x5de5ffd4, 0xdd7ef86a, - 0x76a2e214, 0xb9a40368, 0x925d958f, - 0x4b39fffa, 0xba39aee9, 0xa4ffd30b, 0xfaf7933b, 0x6d498623, - 0x193cbcfa, 0x27627545, 0x825cf47a, - 0x61bd8ba0, 0xd11e42d1, 0xcead04f4, 0x127ea392, 0x10428db7, - 0x8272a972, 0x9270c4a8, 0x127de50b, - 0x285ba1c8, 0x3c62f44f, 0x35c0eaa5, 0xe805d231, 0x428929fb, - 0xb4fcdf82, 0x4fb66a53, 0x0e7dc15b, - 0x1f081fab, 0x108618ae, 0xfcfd086d, 0xf9ff2889, 0x694bcc11, - 0x236a5cae, 0x12deca4d, 0x2c3f8cc5, - 0xd2d02dfe, 0xf8ef5896, 0xe4cf52da, 0x95155b67, 0x494a488c, - 0xb9b6a80c, 0x5c8f82bc, 0x89d36b45, - 0x3a609437, 0xec00c9a9, 0x44715253, 0x0a874b49, 0xd773bc40, - 0x7c34671c, 0x02717ef6, 0x4feb5536, - 0xa2d02fff, 0xd2bf60c4, 0xd43f03c0, 0x50b4ef6d, 0x07478cd1, - 0x006e1888, 0xa2e53f55, 0xb9e6d4bc, - 0xa2048016, 0x97573833, 0xd7207d67, 0xde0f8f3d, 0x72f87b33, - 0xabcc4f33, 0x7688c55d, 0x7b00a6b0, - 0x947b0001, 0x570075d2, 0xf9bb88f8, 0x8942019e, 0x4264a5ff, - 0x856302e0, 0x72dbd92b, 0xee971b69, - 0x6ea22fde, 0x5f08ae2b, 0xaf7a616d, 0xe5c98767, 0xcf1febd2, - 0x61efc8c2, 0xf1ac2571, 0xcc8239c2, - 0x67214cb8, 0xb1e583d1, 0xb7dc3e62, 0x7f10bdce, 0xf90a5c38, - 0x0ff0443d, 0x606e6dc6, 0x60543a49, - 0x5727c148, 0x2be98a1d, 0x8ab41738, 0x20e1be24, 0xaf96da0f, - 0x68458425, 0x99833be5, 0x600d457d, - 0x282f9350, 0x8334b362, 0xd91d1120, 0x2b6d8da0, 0x642b1e31, - 0x9c305a00, 0x52bce688, 0x1b03588a, - 0xf7baefd5, 0x4142ed9c, 0xa4315c11, 0x83323ec5, 0xdfef4636, - 0xa133c501, 0xe9d3531c, 0xee353783 -}; -static const u32 s4[256] = { - 0x9db30420, 0x1fb6e9de, 0xa7be7bef, 0xd273a298, 0x4a4f7bdb, - 0x64ad8c57, 0x85510443, 0xfa020ed1, - 0x7e287aff, 0xe60fb663, 0x095f35a1, 0x79ebf120, 0xfd059d43, - 0x6497b7b1, 0xf3641f63, 0x241e4adf, - 0x28147f5f, 0x4fa2b8cd, 0xc9430040, 0x0cc32220, 0xfdd30b30, - 0xc0a5374f, 0x1d2d00d9, 0x24147b15, - 0xee4d111a, 0x0fca5167, 0x71ff904c, 0x2d195ffe, 0x1a05645f, - 0x0c13fefe, 0x081b08ca, 0x05170121, - 0x80530100, 0xe83e5efe, 0xac9af4f8, 0x7fe72701, 0xd2b8ee5f, - 0x06df4261, 0xbb9e9b8a, 0x7293ea25, - 0xce84ffdf, 0xf5718801, 0x3dd64b04, 0xa26f263b, 0x7ed48400, - 0x547eebe6, 0x446d4ca0, 0x6cf3d6f5, - 0x2649abdf, 0xaea0c7f5, 0x36338cc1, 0x503f7e93, 0xd3772061, - 0x11b638e1, 0x72500e03, 0xf80eb2bb, - 0xabe0502e, 0xec8d77de, 0x57971e81, 0xe14f6746, 0xc9335400, - 0x6920318f, 0x081dbb99, 0xffc304a5, - 0x4d351805, 0x7f3d5ce3, 0xa6c866c6, 0x5d5bcca9, 0xdaec6fea, - 0x9f926f91, 0x9f46222f, 0x3991467d, - 0xa5bf6d8e, 0x1143c44f, 0x43958302, 0xd0214eeb, 0x022083b8, - 0x3fb6180c, 0x18f8931e, 0x281658e6, - 0x26486e3e, 0x8bd78a70, 0x7477e4c1, 0xb506e07c, 0xf32d0a25, - 0x79098b02, 0xe4eabb81, 0x28123b23, - 0x69dead38, 0x1574ca16, 0xdf871b62, 0x211c40b7, 0xa51a9ef9, - 0x0014377b, 0x041e8ac8, 0x09114003, - 0xbd59e4d2, 0xe3d156d5, 0x4fe876d5, 0x2f91a340, 0x557be8de, - 0x00eae4a7, 0x0ce5c2ec, 0x4db4bba6, - 0xe756bdff, 0xdd3369ac, 0xec17b035, 0x06572327, 0x99afc8b0, - 0x56c8c391, 0x6b65811c, 0x5e146119, - 0x6e85cb75, 0xbe07c002, 0xc2325577, 0x893ff4ec, 0x5bbfc92d, - 0xd0ec3b25, 0xb7801ab7, 0x8d6d3b24, - 0x20c763ef, 0xc366a5fc, 0x9c382880, 0x0ace3205, 0xaac9548a, - 0xeca1d7c7, 0x041afa32, 0x1d16625a, - 0x6701902c, 0x9b757a54, 0x31d477f7, 0x9126b031, 0x36cc6fdb, - 0xc70b8b46, 0xd9e66a48, 0x56e55a79, - 0x026a4ceb, 0x52437eff, 0x2f8f76b4, 0x0df980a5, 0x8674cde3, - 0xedda04eb, 0x17a9be04, 0x2c18f4df, - 0xb7747f9d, 0xab2af7b4, 0xefc34d20, 0x2e096b7c, 0x1741a254, - 0xe5b6a035, 0x213d42f6, 0x2c1c7c26, - 0x61c2f50f, 0x6552daf9, 0xd2c231f8, 0x25130f69, 0xd8167fa2, - 0x0418f2c8, 0x001a96a6, 0x0d1526ab, - 0x63315c21, 0x5e0a72ec, 0x49bafefd, 0x187908d9, 0x8d0dbd86, - 0x311170a7, 0x3e9b640c, 0xcc3e10d7, - 0xd5cad3b6, 0x0caec388, 0xf73001e1, 0x6c728aff, 0x71eae2a1, - 0x1f9af36e, 0xcfcbd12f, 0xc1de8417, - 0xac07be6b, 0xcb44a1d8, 0x8b9b0f56, 0x013988c3, 0xb1c52fca, - 0xb4be31cd, 0xd8782806, 0x12a3a4e2, - 0x6f7de532, 0x58fd7eb6, 0xd01ee900, 0x24adffc2, 0xf4990fc5, - 0x9711aac5, 0x001d7b95, 0x82e5e7d2, - 0x109873f6, 0x00613096, 0xc32d9521, 0xada121ff, 0x29908415, - 0x7fbb977f, 0xaf9eb3db, 0x29c9ed2a, - 0x5ce2a465, 0xa730f32c, 0xd0aa3fe8, 0x8a5cc091, 0xd49e2ce7, - 0x0ce454a9, 0xd60acd86, 0x015f1919, - 0x77079103, 0xdea03af6, 0x78a8565e, 0xdee356df, 0x21f05cbe, - 0x8b75e387, 0xb3c50651, 0xb8a5c3ef, - 0xd8eeb6d2, 0xe523be77, 0xc2154529, 0x2f69efdf, 0xafe67afb, - 0xf470c4b2, 0xf3e0eb5b, 0xd6cc9876, - 0x39e4460c, 0x1fda8538, 0x1987832f, 0xca007367, 0xa99144f8, - 0x296b299e, 0x492fc295, 0x9266beab, - 0xb5676e69, 0x9bd3ddda, 0xdf7e052f, 0xdb25701c, 0x1b5e51ee, - 0xf65324e6, 0x6afce36c, 0x0316cc04, - 0x8644213e, 0xb7dc59d0, 0x7965291f, 0xccd6fd43, 0x41823979, - 0x932bcdf6, 0xb657c34d, 0x4edfd282, - 0x7ae5290c, 0x3cb9536b, 0x851e20fe, 0x9833557e, 0x13ecf0b0, - 0xd3ffb372, 0x3f85c5c1, 0x0aef7ed2 -}; -static const u32 s5[256] = { - 0x7ec90c04, 0x2c6e74b9, 0x9b0e66df, 0xa6337911, 0xb86a7fff, - 0x1dd358f5, 0x44dd9d44, 0x1731167f, - 0x08fbf1fa, 0xe7f511cc, 0xd2051b00, 0x735aba00, 0x2ab722d8, - 0x386381cb, 0xacf6243a, 0x69befd7a, - 0xe6a2e77f, 0xf0c720cd, 0xc4494816, 0xccf5c180, 0x38851640, - 0x15b0a848, 0xe68b18cb, 0x4caadeff, - 0x5f480a01, 0x0412b2aa, 0x259814fc, 0x41d0efe2, 0x4e40b48d, - 0x248eb6fb, 0x8dba1cfe, 0x41a99b02, - 0x1a550a04, 0xba8f65cb, 0x7251f4e7, 0x95a51725, 0xc106ecd7, - 0x97a5980a, 0xc539b9aa, 0x4d79fe6a, - 0xf2f3f763, 0x68af8040, 0xed0c9e56, 0x11b4958b, 0xe1eb5a88, - 0x8709e6b0, 0xd7e07156, 0x4e29fea7, - 0x6366e52d, 0x02d1c000, 0xc4ac8e05, 0x9377f571, 0x0c05372a, - 0x578535f2, 0x2261be02, 0xd642a0c9, - 0xdf13a280, 0x74b55bd2, 0x682199c0, 0xd421e5ec, 0x53fb3ce8, - 0xc8adedb3, 0x28a87fc9, 0x3d959981, - 0x5c1ff900, 0xfe38d399, 0x0c4eff0b, 0x062407ea, 0xaa2f4fb1, - 0x4fb96976, 0x90c79505, 0xb0a8a774, - 0xef55a1ff, 0xe59ca2c2, 0xa6b62d27, 0xe66a4263, 0xdf65001f, - 0x0ec50966, 0xdfdd55bc, 0x29de0655, - 0x911e739a, 0x17af8975, 0x32c7911c, 0x89f89468, 0x0d01e980, - 0x524755f4, 0x03b63cc9, 0x0cc844b2, - 0xbcf3f0aa, 0x87ac36e9, 0xe53a7426, 0x01b3d82b, 0x1a9e7449, - 0x64ee2d7e, 0xcddbb1da, 0x01c94910, - 0xb868bf80, 0x0d26f3fd, 0x9342ede7, 0x04a5c284, 0x636737b6, - 0x50f5b616, 0xf24766e3, 0x8eca36c1, - 0x136e05db, 0xfef18391, 0xfb887a37, 0xd6e7f7d4, 0xc7fb7dc9, - 0x3063fcdf, 0xb6f589de, 0xec2941da, - 0x26e46695, 0xb7566419, 0xf654efc5, 0xd08d58b7, 0x48925401, - 0xc1bacb7f, 0xe5ff550f, 0xb6083049, - 0x5bb5d0e8, 0x87d72e5a, 0xab6a6ee1, 0x223a66ce, 0xc62bf3cd, - 0x9e0885f9, 0x68cb3e47, 0x086c010f, - 0xa21de820, 0xd18b69de, 0xf3f65777, 0xfa02c3f6, 0x407edac3, - 0xcbb3d550, 0x1793084d, 0xb0d70eba, - 0x0ab378d5, 0xd951fb0c, 0xded7da56, 0x4124bbe4, 0x94ca0b56, - 0x0f5755d1, 0xe0e1e56e, 0x6184b5be, - 0x580a249f, 0x94f74bc0, 0xe327888e, 0x9f7b5561, 0xc3dc0280, - 0x05687715, 0x646c6bd7, 0x44904db3, - 0x66b4f0a3, 0xc0f1648a, 0x697ed5af, 0x49e92ff6, 0x309e374f, - 0x2cb6356a, 0x85808573, 0x4991f840, - 0x76f0ae02, 0x083be84d, 0x28421c9a, 0x44489406, 0x736e4cb8, - 0xc1092910, 0x8bc95fc6, 0x7d869cf4, - 0x134f616f, 0x2e77118d, 0xb31b2be1, 0xaa90b472, 0x3ca5d717, - 0x7d161bba, 0x9cad9010, 0xaf462ba2, - 0x9fe459d2, 0x45d34559, 0xd9f2da13, 0xdbc65487, 0xf3e4f94e, - 0x176d486f, 0x097c13ea, 0x631da5c7, - 0x445f7382, 0x175683f4, 0xcdc66a97, 0x70be0288, 0xb3cdcf72, - 0x6e5dd2f3, 0x20936079, 0x459b80a5, - 0xbe60e2db, 0xa9c23101, 0xeba5315c, 0x224e42f2, 0x1c5c1572, - 0xf6721b2c, 0x1ad2fff3, 0x8c25404e, - 0x324ed72f, 0x4067b7fd, 0x0523138e, 0x5ca3bc78, 0xdc0fd66e, - 0x75922283, 0x784d6b17, 0x58ebb16e, - 0x44094f85, 0x3f481d87, 0xfcfeae7b, 0x77b5ff76, 0x8c2302bf, - 0xaaf47556, 0x5f46b02a, 0x2b092801, - 0x3d38f5f7, 0x0ca81f36, 0x52af4a8a, 0x66d5e7c0, 0xdf3b0874, - 0x95055110, 0x1b5ad7a8, 0xf61ed5ad, - 0x6cf6e479, 0x20758184, 0xd0cefa65, 0x88f7be58, 0x4a046826, - 0x0ff6f8f3, 0xa09c7f70, 0x5346aba0, - 0x5ce96c28, 0xe176eda3, 0x6bac307f, 0x376829d2, 0x85360fa9, - 0x17e3fe2a, 0x24b79767, 0xf5a96b20, - 0xd6cd2595, 0x68ff1ebf, 0x7555442c, 0xf19f06be, 0xf9e0659a, - 0xeeb9491d, 0x34010718, 0xbb30cab8, - 0xe822fe15, 0x88570983, 0x750e6249, 0xda627e55, 0x5e76ffa8, - 0xb1534546, 0x6d47de08, 0xefe9e7d4 -}; -static const u32 s6[256] = { - 0xf6fa8f9d, 0x2cac6ce1, 0x4ca34867, 0xe2337f7c, 0x95db08e7, - 0x016843b4, 0xeced5cbc, 0x325553ac, - 0xbf9f0960, 0xdfa1e2ed, 0x83f0579d, 0x63ed86b9, 0x1ab6a6b8, - 0xde5ebe39, 0xf38ff732, 0x8989b138, - 0x33f14961, 0xc01937bd, 0xf506c6da, 0xe4625e7e, 0xa308ea99, - 0x4e23e33c, 0x79cbd7cc, 0x48a14367, - 0xa3149619, 0xfec94bd5, 0xa114174a, 0xeaa01866, 0xa084db2d, - 0x09a8486f, 0xa888614a, 0x2900af98, - 0x01665991, 0xe1992863, 0xc8f30c60, 0x2e78ef3c, 0xd0d51932, - 0xcf0fec14, 0xf7ca07d2, 0xd0a82072, - 0xfd41197e, 0x9305a6b0, 0xe86be3da, 0x74bed3cd, 0x372da53c, - 0x4c7f4448, 0xdab5d440, 0x6dba0ec3, - 0x083919a7, 0x9fbaeed9, 0x49dbcfb0, 0x4e670c53, 0x5c3d9c01, - 0x64bdb941, 0x2c0e636a, 0xba7dd9cd, - 0xea6f7388, 0xe70bc762, 0x35f29adb, 0x5c4cdd8d, 0xf0d48d8c, - 0xb88153e2, 0x08a19866, 0x1ae2eac8, - 0x284caf89, 0xaa928223, 0x9334be53, 0x3b3a21bf, 0x16434be3, - 0x9aea3906, 0xefe8c36e, 0xf890cdd9, - 0x80226dae, 0xc340a4a3, 0xdf7e9c09, 0xa694a807, 0x5b7c5ecc, - 0x221db3a6, 0x9a69a02f, 0x68818a54, - 0xceb2296f, 0x53c0843a, 0xfe893655, 0x25bfe68a, 0xb4628abc, - 0xcf222ebf, 0x25ac6f48, 0xa9a99387, - 0x53bddb65, 0xe76ffbe7, 0xe967fd78, 0x0ba93563, 0x8e342bc1, - 0xe8a11be9, 0x4980740d, 0xc8087dfc, - 0x8de4bf99, 0xa11101a0, 0x7fd37975, 0xda5a26c0, 0xe81f994f, - 0x9528cd89, 0xfd339fed, 0xb87834bf, - 0x5f04456d, 0x22258698, 0xc9c4c83b, 0x2dc156be, 0x4f628daa, - 0x57f55ec5, 0xe2220abe, 0xd2916ebf, - 0x4ec75b95, 0x24f2c3c0, 0x42d15d99, 0xcd0d7fa0, 0x7b6e27ff, - 0xa8dc8af0, 0x7345c106, 0xf41e232f, - 0x35162386, 0xe6ea8926, 0x3333b094, 0x157ec6f2, 0x372b74af, - 0x692573e4, 0xe9a9d848, 0xf3160289, - 0x3a62ef1d, 0xa787e238, 0xf3a5f676, 0x74364853, 0x20951063, - 0x4576698d, 0xb6fad407, 0x592af950, - 0x36f73523, 0x4cfb6e87, 0x7da4cec0, 0x6c152daa, 0xcb0396a8, - 0xc50dfe5d, 0xfcd707ab, 0x0921c42f, - 0x89dff0bb, 0x5fe2be78, 0x448f4f33, 0x754613c9, 0x2b05d08d, - 0x48b9d585, 0xdc049441, 0xc8098f9b, - 0x7dede786, 0xc39a3373, 0x42410005, 0x6a091751, 0x0ef3c8a6, - 0x890072d6, 0x28207682, 0xa9a9f7be, - 0xbf32679d, 0xd45b5b75, 0xb353fd00, 0xcbb0e358, 0x830f220a, - 0x1f8fb214, 0xd372cf08, 0xcc3c4a13, - 0x8cf63166, 0x061c87be, 0x88c98f88, 0x6062e397, 0x47cf8e7a, - 0xb6c85283, 0x3cc2acfb, 0x3fc06976, - 0x4e8f0252, 0x64d8314d, 0xda3870e3, 0x1e665459, 0xc10908f0, - 0x513021a5, 0x6c5b68b7, 0x822f8aa0, - 0x3007cd3e, 0x74719eef, 0xdc872681, 0x073340d4, 0x7e432fd9, - 0x0c5ec241, 0x8809286c, 0xf592d891, - 0x08a930f6, 0x957ef305, 0xb7fbffbd, 0xc266e96f, 0x6fe4ac98, - 0xb173ecc0, 0xbc60b42a, 0x953498da, - 0xfba1ae12, 0x2d4bd736, 0x0f25faab, 0xa4f3fceb, 0xe2969123, - 0x257f0c3d, 0x9348af49, 0x361400bc, - 0xe8816f4a, 0x3814f200, 0xa3f94043, 0x9c7a54c2, 0xbc704f57, - 0xda41e7f9, 0xc25ad33a, 0x54f4a084, - 0xb17f5505, 0x59357cbe, 0xedbd15c8, 0x7f97c5ab, 0xba5ac7b5, - 0xb6f6deaf, 0x3a479c3a, 0x5302da25, - 0x653d7e6a, 0x54268d49, 0x51a477ea, 0x5017d55b, 0xd7d25d88, - 0x44136c76, 0x0404a8c8, 0xb8e5a121, - 0xb81a928a, 0x60ed5869, 0x97c55b96, 0xeaec991b, 0x29935913, - 0x01fdb7f1, 0x088e8dfa, 0x9ab6f6f5, - 0x3b4cbf9f, 0x4a5de3ab, 0xe6051d35, 0xa0e1d855, 0xd36b4cf1, - 0xf544edeb, 0xb0e93524, 0xbebb8fbd, - 0xa2d762cf, 0x49c92f54, 0x38b5f331, 0x7128a454, 0x48392905, - 0xa65b1db8, 0x851c97bd, 0xd675cf2f -}; -static const u32 s7[256] = { - 0x85e04019, 0x332bf567, 0x662dbfff, 0xcfc65693, 0x2a8d7f6f, - 0xab9bc912, 0xde6008a1, 0x2028da1f, - 0x0227bce7, 0x4d642916, 0x18fac300, 0x50f18b82, 0x2cb2cb11, - 0xb232e75c, 0x4b3695f2, 0xb28707de, - 0xa05fbcf6, 0xcd4181e9, 0xe150210c, 0xe24ef1bd, 0xb168c381, - 0xfde4e789, 0x5c79b0d8, 0x1e8bfd43, - 0x4d495001, 0x38be4341, 0x913cee1d, 0x92a79c3f, 0x089766be, - 0xbaeeadf4, 0x1286becf, 0xb6eacb19, - 0x2660c200, 0x7565bde4, 0x64241f7a, 0x8248dca9, 0xc3b3ad66, - 0x28136086, 0x0bd8dfa8, 0x356d1cf2, - 0x107789be, 0xb3b2e9ce, 0x0502aa8f, 0x0bc0351e, 0x166bf52a, - 0xeb12ff82, 0xe3486911, 0xd34d7516, - 0x4e7b3aff, 0x5f43671b, 0x9cf6e037, 0x4981ac83, 0x334266ce, - 0x8c9341b7, 0xd0d854c0, 0xcb3a6c88, - 0x47bc2829, 0x4725ba37, 0xa66ad22b, 0x7ad61f1e, 0x0c5cbafa, - 0x4437f107, 0xb6e79962, 0x42d2d816, - 0x0a961288, 0xe1a5c06e, 0x13749e67, 0x72fc081a, 0xb1d139f7, - 0xf9583745, 0xcf19df58, 0xbec3f756, - 0xc06eba30, 0x07211b24, 0x45c28829, 0xc95e317f, 0xbc8ec511, - 0x38bc46e9, 0xc6e6fa14, 0xbae8584a, - 0xad4ebc46, 0x468f508b, 0x7829435f, 0xf124183b, 0x821dba9f, - 0xaff60ff4, 0xea2c4e6d, 0x16e39264, - 0x92544a8b, 0x009b4fc3, 0xaba68ced, 0x9ac96f78, 0x06a5b79a, - 0xb2856e6e, 0x1aec3ca9, 0xbe838688, - 0x0e0804e9, 0x55f1be56, 0xe7e5363b, 0xb3a1f25d, 0xf7debb85, - 0x61fe033c, 0x16746233, 0x3c034c28, - 0xda6d0c74, 0x79aac56c, 0x3ce4e1ad, 0x51f0c802, 0x98f8f35a, - 0x1626a49f, 0xeed82b29, 0x1d382fe3, - 0x0c4fb99a, 0xbb325778, 0x3ec6d97b, 0x6e77a6a9, 0xcb658b5c, - 0xd45230c7, 0x2bd1408b, 0x60c03eb7, - 0xb9068d78, 0xa33754f4, 0xf430c87d, 0xc8a71302, 0xb96d8c32, - 0xebd4e7be, 0xbe8b9d2d, 0x7979fb06, - 0xe7225308, 0x8b75cf77, 0x11ef8da4, 0xe083c858, 0x8d6b786f, - 0x5a6317a6, 0xfa5cf7a0, 0x5dda0033, - 0xf28ebfb0, 0xf5b9c310, 0xa0eac280, 0x08b9767a, 0xa3d9d2b0, - 0x79d34217, 0x021a718d, 0x9ac6336a, - 0x2711fd60, 0x438050e3, 0x069908a8, 0x3d7fedc4, 0x826d2bef, - 0x4eeb8476, 0x488dcf25, 0x36c9d566, - 0x28e74e41, 0xc2610aca, 0x3d49a9cf, 0xbae3b9df, 0xb65f8de6, - 0x92aeaf64, 0x3ac7d5e6, 0x9ea80509, - 0xf22b017d, 0xa4173f70, 0xdd1e16c3, 0x15e0d7f9, 0x50b1b887, - 0x2b9f4fd5, 0x625aba82, 0x6a017962, - 0x2ec01b9c, 0x15488aa9, 0xd716e740, 0x40055a2c, 0x93d29a22, - 0xe32dbf9a, 0x058745b9, 0x3453dc1e, - 0xd699296e, 0x496cff6f, 0x1c9f4986, 0xdfe2ed07, 0xb87242d1, - 0x19de7eae, 0x053e561a, 0x15ad6f8c, - 0x66626c1c, 0x7154c24c, 0xea082b2a, 0x93eb2939, 0x17dcb0f0, - 0x58d4f2ae, 0x9ea294fb, 0x52cf564c, - 0x9883fe66, 0x2ec40581, 0x763953c3, 0x01d6692e, 0xd3a0c108, - 0xa1e7160e, 0xe4f2dfa6, 0x693ed285, - 0x74904698, 0x4c2b0edd, 0x4f757656, 0x5d393378, 0xa132234f, - 0x3d321c5d, 0xc3f5e194, 0x4b269301, - 0xc79f022f, 0x3c997e7e, 0x5e4f9504, 0x3ffafbbd, 0x76f7ad0e, - 0x296693f4, 0x3d1fce6f, 0xc61e45be, - 0xd3b5ab34, 0xf72bf9b7, 0x1b0434c0, 0x4e72b567, 0x5592a33d, - 0xb5229301, 0xcfd2a87f, 0x60aeb767, - 0x1814386b, 0x30bcc33d, 0x38a0c07d, 0xfd1606f2, 0xc363519b, - 0x589dd390, 0x5479f8e6, 0x1cb8d647, - 0x97fd61a9, 0xea7759f4, 0x2d57539d, 0x569a58cf, 0xe84e63ad, - 0x462e1b78, 0x6580f87e, 0xf3817914, - 0x91da55f4, 0x40a230f3, 0xd1988f35, 0xb6e318d2, 0x3ffa50bc, - 0x3d40f021, 0xc3c0bdae, 0x4958c24c, - 0x518f36b2, 0x84b1d370, 0x0fedce83, 0x878ddada, 0xf2a279c7, - 0x94e01be8, 0x90716f4b, 0x954b8aa3 -}; -static const u32 sb8[256] = { - 0xe216300d, 0xbbddfffc, 0xa7ebdabd, 0x35648095, 0x7789f8b7, - 0xe6c1121b, 0x0e241600, 0x052ce8b5, - 0x11a9cfb0, 0xe5952f11, 0xece7990a, 0x9386d174, 0x2a42931c, - 0x76e38111, 0xb12def3a, 0x37ddddfc, - 0xde9adeb1, 0x0a0cc32c, 0xbe197029, 0x84a00940, 0xbb243a0f, - 0xb4d137cf, 0xb44e79f0, 0x049eedfd, - 0x0b15a15d, 0x480d3168, 0x8bbbde5a, 0x669ded42, 0xc7ece831, - 0x3f8f95e7, 0x72df191b, 0x7580330d, - 0x94074251, 0x5c7dcdfa, 0xabbe6d63, 0xaa402164, 0xb301d40a, - 0x02e7d1ca, 0x53571dae, 0x7a3182a2, - 0x12a8ddec, 0xfdaa335d, 0x176f43e8, 0x71fb46d4, 0x38129022, - 0xce949ad4, 0xb84769ad, 0x965bd862, - 0x82f3d055, 0x66fb9767, 0x15b80b4e, 0x1d5b47a0, 0x4cfde06f, - 0xc28ec4b8, 0x57e8726e, 0x647a78fc, - 0x99865d44, 0x608bd593, 0x6c200e03, 0x39dc5ff6, 0x5d0b00a3, - 0xae63aff2, 0x7e8bd632, 0x70108c0c, - 0xbbd35049, 0x2998df04, 0x980cf42a, 0x9b6df491, 0x9e7edd53, - 0x06918548, 0x58cb7e07, 0x3b74ef2e, - 0x522fffb1, 0xd24708cc, 0x1c7e27cd, 0xa4eb215b, 0x3cf1d2e2, - 0x19b47a38, 0x424f7618, 0x35856039, - 0x9d17dee7, 0x27eb35e6, 0xc9aff67b, 0x36baf5b8, 0x09c467cd, - 0xc18910b1, 0xe11dbf7b, 0x06cd1af8, - 0x7170c608, 0x2d5e3354, 0xd4de495a, 0x64c6d006, 0xbcc0c62c, - 0x3dd00db3, 0x708f8f34, 0x77d51b42, - 0x264f620f, 0x24b8d2bf, 0x15c1b79e, 0x46a52564, 0xf8d7e54e, - 0x3e378160, 0x7895cda5, 0x859c15a5, - 0xe6459788, 0xc37bc75f, 0xdb07ba0c, 0x0676a3ab, 0x7f229b1e, - 0x31842e7b, 0x24259fd7, 0xf8bef472, - 0x835ffcb8, 0x6df4c1f2, 0x96f5b195, 0xfd0af0fc, 0xb0fe134c, - 0xe2506d3d, 0x4f9b12ea, 0xf215f225, - 0xa223736f, 0x9fb4c428, 0x25d04979, 0x34c713f8, 0xc4618187, - 0xea7a6e98, 0x7cd16efc, 0x1436876c, - 0xf1544107, 0xbedeee14, 0x56e9af27, 0xa04aa441, 0x3cf7c899, - 0x92ecbae6, 0xdd67016d, 0x151682eb, - 0xa842eedf, 0xfdba60b4, 0xf1907b75, 0x20e3030f, 0x24d8c29e, - 0xe139673b, 0xefa63fb8, 0x71873054, - 0xb6f2cf3b, 0x9f326442, 0xcb15a4cc, 0xb01a4504, 0xf1e47d8d, - 0x844a1be5, 0xbae7dfdc, 0x42cbda70, - 0xcd7dae0a, 0x57e85b7a, 0xd53f5af6, 0x20cf4d8c, 0xcea4d428, - 0x79d130a4, 0x3486ebfb, 0x33d3cddc, - 0x77853b53, 0x37effcb5, 0xc5068778, 0xe580b3e6, 0x4e68b8f4, - 0xc5c8b37e, 0x0d809ea2, 0x398feb7c, - 0x132a4f94, 0x43b7950e, 0x2fee7d1c, 0x223613bd, 0xdd06caa2, - 0x37df932b, 0xc4248289, 0xacf3ebc3, - 0x5715f6b7, 0xef3478dd, 0xf267616f, 0xc148cbe4, 0x9052815e, - 0x5e410fab, 0xb48a2465, 0x2eda7fa4, - 0xe87b40e4, 0xe98ea084, 0x5889e9e1, 0xefd390fc, 0xdd07d35b, - 0xdb485694, 0x38d7e5b2, 0x57720101, - 0x730edebc, 0x5b643113, 0x94917e4f, 0x503c2fba, 0x646f1282, - 0x7523d24a, 0xe0779695, 0xf9c17a8f, - 0x7a5b2121, 0xd187b896, 0x29263a4d, 0xba510cdf, 0x81f47c9f, - 0xad1163ed, 0xea7b5965, 0x1a00726e, - 0x11403092, 0x00da6d77, 0x4a0cdd61, 0xad1f4603, 0x605bdfb0, - 0x9eedc364, 0x22ebe6a8, 0xcee7d28a, - 0xa0e736a0, 0x5564a6b9, 0x10853209, 0xc7eb8f37, 0x2de705ca, - 0x8951570f, 0xdf09822b, 0xbd691a6c, - 0xaa12e4f2, 0x87451c0f, 0xe0f6a27a, 0x3ada4819, 0x4cf1764f, - 0x0d771c2b, 0x67cdb156, 0x350d8384, - 0x5938fa0f, 0x42399ef3, 0x36997b07, 0x0e84093d, 0x4aa93e61, - 0x8360d87b, 0x1fa98b0c, 0x1149382c, - 0xe97625a5, 0x0614d1b7, 0x0e25244b, 0x0c768347, 0x589e8d82, - 0x0d2059d1, 0xa466bb1e, 0xf8da0a82, - 0x04f19130, 0xba6e4ec0, 0x99265164, 0x1ee7230d, 0x50b2ad80, - 0xeaee6801, 0x8db2a283, 0xea8bf59e -}; - -#define F1(D, m, r) ((I = ((m) + (D))), (I = rol32(I, (r))), \ - (((s1[I >> 24] ^ s2[(I>>16)&0xff]) - s3[(I>>8)&0xff]) + s4[I&0xff])) -#define F2(D, m, r) ((I = ((m) ^ (D))), (I = rol32(I, (r))), \ - (((s1[I >> 24] - s2[(I>>16)&0xff]) + s3[(I>>8)&0xff]) ^ s4[I&0xff])) -#define F3(D, m, r) ((I = ((m) - (D))), (I = rol32(I, (r))), \ - (((s1[I >> 24] + s2[(I>>16)&0xff]) ^ s3[(I>>8)&0xff]) - s4[I&0xff])) - - -static void cast5_encrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf) -{ - struct cast5_ctx *c = crypto_tfm_ctx(tfm); - const __be32 *src = (const __be32 *)inbuf; - __be32 *dst = (__be32 *)outbuf; - u32 l, r, t; - u32 I; /* used by the Fx macros */ - u32 *Km; - u8 *Kr; - - Km = c->Km; - Kr = c->Kr; - - /* (L0,R0) <-- (m1...m64). (Split the plaintext into left and - * right 32-bit halves L0 = m1...m32 and R0 = m33...m64.) - */ - l = be32_to_cpu(src[0]); - r = be32_to_cpu(src[1]); - - /* (16 rounds) for i from 1 to 16, compute Li and Ri as follows: - * Li = Ri-1; - * Ri = Li-1 ^ f(Ri-1,Kmi,Kri), where f is defined in Section 2.2 - * Rounds 1, 4, 7, 10, 13, and 16 use f function Type 1. - * Rounds 2, 5, 8, 11, and 14 use f function Type 2. - * Rounds 3, 6, 9, 12, and 15 use f function Type 3. - */ - - t = l; l = r; r = t ^ F1(r, Km[0], Kr[0]); - t = l; l = r; r = t ^ F2(r, Km[1], Kr[1]); - t = l; l = r; r = t ^ F3(r, Km[2], Kr[2]); - t = l; l = r; r = t ^ F1(r, Km[3], Kr[3]); - t = l; l = r; r = t ^ F2(r, Km[4], Kr[4]); - t = l; l = r; r = t ^ F3(r, Km[5], Kr[5]); - t = l; l = r; r = t ^ F1(r, Km[6], Kr[6]); - t = l; l = r; r = t ^ F2(r, Km[7], Kr[7]); - t = l; l = r; r = t ^ F3(r, Km[8], Kr[8]); - t = l; l = r; r = t ^ F1(r, Km[9], Kr[9]); - t = l; l = r; r = t ^ F2(r, Km[10], Kr[10]); - t = l; l = r; r = t ^ F3(r, Km[11], Kr[11]); - if (!(c->rr)) { - t = l; l = r; r = t ^ F1(r, Km[12], Kr[12]); - t = l; l = r; r = t ^ F2(r, Km[13], Kr[13]); - t = l; l = r; r = t ^ F3(r, Km[14], Kr[14]); - t = l; l = r; r = t ^ F1(r, Km[15], Kr[15]); - } - - /* c1...c64 <-- (R16,L16). (Exchange final blocks L16, R16 and - * concatenate to form the ciphertext.) */ - dst[0] = cpu_to_be32(r); - dst[1] = cpu_to_be32(l); -} - -static void cast5_decrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf) -{ - struct cast5_ctx *c = crypto_tfm_ctx(tfm); - const __be32 *src = (const __be32 *)inbuf; - __be32 *dst = (__be32 *)outbuf; - u32 l, r, t; - u32 I; - u32 *Km; - u8 *Kr; - - Km = c->Km; - Kr = c->Kr; - - l = be32_to_cpu(src[0]); - r = be32_to_cpu(src[1]); - - if (!(c->rr)) { - t = l; l = r; r = t ^ F1(r, Km[15], Kr[15]); - t = l; l = r; r = t ^ F3(r, Km[14], Kr[14]); - t = l; l = r; r = t ^ F2(r, Km[13], Kr[13]); - t = l; l = r; r = t ^ F1(r, Km[12], Kr[12]); - } - t = l; l = r; r = t ^ F3(r, Km[11], Kr[11]); - t = l; l = r; r = t ^ F2(r, Km[10], Kr[10]); - t = l; l = r; r = t ^ F1(r, Km[9], Kr[9]); - t = l; l = r; r = t ^ F3(r, Km[8], Kr[8]); - t = l; l = r; r = t ^ F2(r, Km[7], Kr[7]); - t = l; l = r; r = t ^ F1(r, Km[6], Kr[6]); - t = l; l = r; r = t ^ F3(r, Km[5], Kr[5]); - t = l; l = r; r = t ^ F2(r, Km[4], Kr[4]); - t = l; l = r; r = t ^ F1(r, Km[3], Kr[3]); - t = l; l = r; r = t ^ F3(r, Km[2], Kr[2]); - t = l; l = r; r = t ^ F2(r, Km[1], Kr[1]); - t = l; l = r; r = t ^ F1(r, Km[0], Kr[0]); - - dst[0] = cpu_to_be32(r); - dst[1] = cpu_to_be32(l); -} - -static void key_schedule(u32 *x, u32 *z, u32 *k) -{ - -#define xi(i) ((x[(i)/4] >> (8*(3-((i)%4)))) & 0xff) -#define zi(i) ((z[(i)/4] >> (8*(3-((i)%4)))) & 0xff) - - z[0] = x[0] ^ s5[xi(13)] ^ s6[xi(15)] ^ s7[xi(12)] ^ sb8[xi(14)] ^ - s7[xi(8)]; - z[1] = x[2] ^ s5[zi(0)] ^ s6[zi(2)] ^ s7[zi(1)] ^ sb8[zi(3)] ^ - sb8[xi(10)]; - z[2] = x[3] ^ s5[zi(7)] ^ s6[zi(6)] ^ s7[zi(5)] ^ sb8[zi(4)] ^ - s5[xi(9)]; - z[3] = x[1] ^ s5[zi(10)] ^ s6[zi(9)] ^ s7[zi(11)] ^ sb8[zi(8)] ^ - s6[xi(11)]; - k[0] = s5[zi(8)] ^ s6[zi(9)] ^ s7[zi(7)] ^ sb8[zi(6)] ^ s5[zi(2)]; - k[1] = s5[zi(10)] ^ s6[zi(11)] ^ s7[zi(5)] ^ sb8[zi(4)] ^ - s6[zi(6)]; - k[2] = s5[zi(12)] ^ s6[zi(13)] ^ s7[zi(3)] ^ sb8[zi(2)] ^ - s7[zi(9)]; - k[3] = s5[zi(14)] ^ s6[zi(15)] ^ s7[zi(1)] ^ sb8[zi(0)] ^ - sb8[zi(12)]; - - x[0] = z[2] ^ s5[zi(5)] ^ s6[zi(7)] ^ s7[zi(4)] ^ sb8[zi(6)] ^ - s7[zi(0)]; - x[1] = z[0] ^ s5[xi(0)] ^ s6[xi(2)] ^ s7[xi(1)] ^ sb8[xi(3)] ^ - sb8[zi(2)]; - x[2] = z[1] ^ s5[xi(7)] ^ s6[xi(6)] ^ s7[xi(5)] ^ sb8[xi(4)] ^ - s5[zi(1)]; - x[3] = z[3] ^ s5[xi(10)] ^ s6[xi(9)] ^ s7[xi(11)] ^ sb8[xi(8)] ^ - s6[zi(3)]; - k[4] = s5[xi(3)] ^ s6[xi(2)] ^ s7[xi(12)] ^ sb8[xi(13)] ^ - s5[xi(8)]; - k[5] = s5[xi(1)] ^ s6[xi(0)] ^ s7[xi(14)] ^ sb8[xi(15)] ^ - s6[xi(13)]; - k[6] = s5[xi(7)] ^ s6[xi(6)] ^ s7[xi(8)] ^ sb8[xi(9)] ^ s7[xi(3)]; - k[7] = s5[xi(5)] ^ s6[xi(4)] ^ s7[xi(10)] ^ sb8[xi(11)] ^ - sb8[xi(7)]; - - z[0] = x[0] ^ s5[xi(13)] ^ s6[xi(15)] ^ s7[xi(12)] ^ sb8[xi(14)] ^ - s7[xi(8)]; - z[1] = x[2] ^ s5[zi(0)] ^ s6[zi(2)] ^ s7[zi(1)] ^ sb8[zi(3)] ^ - sb8[xi(10)]; - z[2] = x[3] ^ s5[zi(7)] ^ s6[zi(6)] ^ s7[zi(5)] ^ sb8[zi(4)] ^ - s5[xi(9)]; - z[3] = x[1] ^ s5[zi(10)] ^ s6[zi(9)] ^ s7[zi(11)] ^ sb8[zi(8)] ^ - s6[xi(11)]; - k[8] = s5[zi(3)] ^ s6[zi(2)] ^ s7[zi(12)] ^ sb8[zi(13)] ^ - s5[zi(9)]; - k[9] = s5[zi(1)] ^ s6[zi(0)] ^ s7[zi(14)] ^ sb8[zi(15)] ^ - s6[zi(12)]; - k[10] = s5[zi(7)] ^ s6[zi(6)] ^ s7[zi(8)] ^ sb8[zi(9)] ^ s7[zi(2)]; - k[11] = s5[zi(5)] ^ s6[zi(4)] ^ s7[zi(10)] ^ sb8[zi(11)] ^ - sb8[zi(6)]; - - x[0] = z[2] ^ s5[zi(5)] ^ s6[zi(7)] ^ s7[zi(4)] ^ sb8[zi(6)] ^ - s7[zi(0)]; - x[1] = z[0] ^ s5[xi(0)] ^ s6[xi(2)] ^ s7[xi(1)] ^ sb8[xi(3)] ^ - sb8[zi(2)]; - x[2] = z[1] ^ s5[xi(7)] ^ s6[xi(6)] ^ s7[xi(5)] ^ sb8[xi(4)] ^ - s5[zi(1)]; - x[3] = z[3] ^ s5[xi(10)] ^ s6[xi(9)] ^ s7[xi(11)] ^ sb8[xi(8)] ^ - s6[zi(3)]; - k[12] = s5[xi(8)] ^ s6[xi(9)] ^ s7[xi(7)] ^ sb8[xi(6)] ^ s5[xi(3)]; - k[13] = s5[xi(10)] ^ s6[xi(11)] ^ s7[xi(5)] ^ sb8[xi(4)] ^ - s6[xi(7)]; - k[14] = s5[xi(12)] ^ s6[xi(13)] ^ s7[xi(3)] ^ sb8[xi(2)] ^ - s7[xi(8)]; - k[15] = s5[xi(14)] ^ s6[xi(15)] ^ s7[xi(1)] ^ sb8[xi(0)] ^ - sb8[xi(13)]; - -#undef xi -#undef zi -} - - -static int cast5_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned key_len) -{ - struct cast5_ctx *c = crypto_tfm_ctx(tfm); - int i; - u32 x[4]; - u32 z[4]; - u32 k[16]; - __be32 p_key[4]; - - c->rr = key_len <= 10 ? 1 : 0; - - memset(p_key, 0, 16); - memcpy(p_key, key, key_len); - - - x[0] = be32_to_cpu(p_key[0]); - x[1] = be32_to_cpu(p_key[1]); - x[2] = be32_to_cpu(p_key[2]); - x[3] = be32_to_cpu(p_key[3]); - - key_schedule(x, z, k); - for (i = 0; i < 16; i++) - c->Km[i] = k[i]; - key_schedule(x, z, k); - for (i = 0; i < 16; i++) - c->Kr[i] = k[i] & 0x1f; - return 0; -} - -static struct crypto_alg alg = { - .cra_name = "cast5", - .cra_flags = CRYPTO_ALG_TYPE_CIPHER, - .cra_blocksize = CAST5_BLOCK_SIZE, - .cra_ctxsize = sizeof(struct cast5_ctx), - .cra_alignmask = 3, - .cra_module = THIS_MODULE, - .cra_u = { - .cipher = { - .cia_min_keysize = CAST5_MIN_KEY_SIZE, - .cia_max_keysize = CAST5_MAX_KEY_SIZE, - .cia_setkey = cast5_setkey, - .cia_encrypt = cast5_encrypt, - .cia_decrypt = cast5_decrypt - } - } -}; - -static int __init cast5_mod_init(void) -{ - return crypto_register_alg(&alg); -} - -static void __exit cast5_mod_fini(void) -{ - crypto_unregister_alg(&alg); -} - -module_init(cast5_mod_init); -module_exit(cast5_mod_fini); - -MODULE_LICENSE("GPL"); -MODULE_DESCRIPTION("Cast5 Cipher Algorithm"); - diff --git a/crypto/cast5_generic.c b/crypto/cast5_generic.c new file mode 100644 index 000000000000..bc525dbd8a4b --- /dev/null +++ b/crypto/cast5_generic.c @@ -0,0 +1,821 @@ +/* Kernel cryptographic api. +* cast5.c - Cast5 cipher algorithm (rfc2144). +* +* Derived from GnuPG implementation of cast5. +* +* Major Changes. +* Complete conformance to rfc2144. +* Supports key size from 40 to 128 bits. +* +* Copyright (C) 1998, 1999, 2000, 2001 Free Software Foundation, Inc. +* Copyright (C) 2003 Kartikey Mahendra Bhatt . +* +* This program is free software; you can redistribute it and/or modify it +* under the terms of GNU General Public License as published by the Free +* Software Foundation; either version 2 of the License, or (at your option) +* any later version. +* +* You should have received a copy of the GNU General Public License +* along with this program; if not, write to the Free Software +* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA +*/ + + +#include +#include +#include +#include +#include +#include +#include +#include + + +const u32 cast5_s1[256] = { + 0x30fb40d4, 0x9fa0ff0b, 0x6beccd2f, 0x3f258c7a, 0x1e213f2f, + 0x9c004dd3, 0x6003e540, 0xcf9fc949, + 0xbfd4af27, 0x88bbbdb5, 0xe2034090, 0x98d09675, 0x6e63a0e0, + 0x15c361d2, 0xc2e7661d, 0x22d4ff8e, + 0x28683b6f, 0xc07fd059, 0xff2379c8, 0x775f50e2, 0x43c340d3, + 0xdf2f8656, 0x887ca41a, 0xa2d2bd2d, + 0xa1c9e0d6, 0x346c4819, 0x61b76d87, 0x22540f2f, 0x2abe32e1, + 0xaa54166b, 0x22568e3a, 0xa2d341d0, + 0x66db40c8, 0xa784392f, 0x004dff2f, 0x2db9d2de, 0x97943fac, + 0x4a97c1d8, 0x527644b7, 0xb5f437a7, + 0xb82cbaef, 0xd751d159, 0x6ff7f0ed, 0x5a097a1f, 0x827b68d0, + 0x90ecf52e, 0x22b0c054, 0xbc8e5935, + 0x4b6d2f7f, 0x50bb64a2, 0xd2664910, 0xbee5812d, 0xb7332290, + 0xe93b159f, 0xb48ee411, 0x4bff345d, + 0xfd45c240, 0xad31973f, 0xc4f6d02e, 0x55fc8165, 0xd5b1caad, + 0xa1ac2dae, 0xa2d4b76d, 0xc19b0c50, + 0x882240f2, 0x0c6e4f38, 0xa4e4bfd7, 0x4f5ba272, 0x564c1d2f, + 0xc59c5319, 0xb949e354, 0xb04669fe, + 0xb1b6ab8a, 0xc71358dd, 0x6385c545, 0x110f935d, 0x57538ad5, + 0x6a390493, 0xe63d37e0, 0x2a54f6b3, + 0x3a787d5f, 0x6276a0b5, 0x19a6fcdf, 0x7a42206a, 0x29f9d4d5, + 0xf61b1891, 0xbb72275e, 0xaa508167, + 0x38901091, 0xc6b505eb, 0x84c7cb8c, 0x2ad75a0f, 0x874a1427, + 0xa2d1936b, 0x2ad286af, 0xaa56d291, + 0xd7894360, 0x425c750d, 0x93b39e26, 0x187184c9, 0x6c00b32d, + 0x73e2bb14, 0xa0bebc3c, 0x54623779, + 0x64459eab, 0x3f328b82, 0x7718cf82, 0x59a2cea6, 0x04ee002e, + 0x89fe78e6, 0x3fab0950, 0x325ff6c2, + 0x81383f05, 0x6963c5c8, 0x76cb5ad6, 0xd49974c9, 0xca180dcf, + 0x380782d5, 0xc7fa5cf6, 0x8ac31511, + 0x35e79e13, 0x47da91d0, 0xf40f9086, 0xa7e2419e, 0x31366241, + 0x051ef495, 0xaa573b04, 0x4a805d8d, + 0x548300d0, 0x00322a3c, 0xbf64cddf, 0xba57a68e, 0x75c6372b, + 0x50afd341, 0xa7c13275, 0x915a0bf5, + 0x6b54bfab, 0x2b0b1426, 0xab4cc9d7, 0x449ccd82, 0xf7fbf265, + 0xab85c5f3, 0x1b55db94, 0xaad4e324, + 0xcfa4bd3f, 0x2deaa3e2, 0x9e204d02, 0xc8bd25ac, 0xeadf55b3, + 0xd5bd9e98, 0xe31231b2, 0x2ad5ad6c, + 0x954329de, 0xadbe4528, 0xd8710f69, 0xaa51c90f, 0xaa786bf6, + 0x22513f1e, 0xaa51a79b, 0x2ad344cc, + 0x7b5a41f0, 0xd37cfbad, 0x1b069505, 0x41ece491, 0xb4c332e6, + 0x032268d4, 0xc9600acc, 0xce387e6d, + 0xbf6bb16c, 0x6a70fb78, 0x0d03d9c9, 0xd4df39de, 0xe01063da, + 0x4736f464, 0x5ad328d8, 0xb347cc96, + 0x75bb0fc3, 0x98511bfb, 0x4ffbcc35, 0xb58bcf6a, 0xe11f0abc, + 0xbfc5fe4a, 0xa70aec10, 0xac39570a, + 0x3f04442f, 0x6188b153, 0xe0397a2e, 0x5727cb79, 0x9ceb418f, + 0x1cacd68d, 0x2ad37c96, 0x0175cb9d, + 0xc69dff09, 0xc75b65f0, 0xd9db40d8, 0xec0e7779, 0x4744ead4, + 0xb11c3274, 0xdd24cb9e, 0x7e1c54bd, + 0xf01144f9, 0xd2240eb1, 0x9675b3fd, 0xa3ac3755, 0xd47c27af, + 0x51c85f4d, 0x56907596, 0xa5bb15e6, + 0x580304f0, 0xca042cf1, 0x011a37ea, 0x8dbfaadb, 0x35ba3e4a, + 0x3526ffa0, 0xc37b4d09, 0xbc306ed9, + 0x98a52666, 0x5648f725, 0xff5e569d, 0x0ced63d0, 0x7c63b2cf, + 0x700b45e1, 0xd5ea50f1, 0x85a92872, + 0xaf1fbda7, 0xd4234870, 0xa7870bf3, 0x2d3b4d79, 0x42e04198, + 0x0cd0ede7, 0x26470db8, 0xf881814c, + 0x474d6ad7, 0x7c0c5e5c, 0xd1231959, 0x381b7298, 0xf5d2f4db, + 0xab838653, 0x6e2f1e23, 0x83719c9e, + 0xbd91e046, 0x9a56456e, 0xdc39200c, 0x20c8c571, 0x962bda1c, + 0xe1e696ff, 0xb141ab08, 0x7cca89b9, + 0x1a69e783, 0x02cc4843, 0xa2f7c579, 0x429ef47d, 0x427b169c, + 0x5ac9f049, 0xdd8f0f00, 0x5c8165bf +}; +EXPORT_SYMBOL_GPL(cast5_s1); +const u32 cast5_s2[256] = { + 0x1f201094, 0xef0ba75b, 0x69e3cf7e, 0x393f4380, 0xfe61cf7a, + 0xeec5207a, 0x55889c94, 0x72fc0651, + 0xada7ef79, 0x4e1d7235, 0xd55a63ce, 0xde0436ba, 0x99c430ef, + 0x5f0c0794, 0x18dcdb7d, 0xa1d6eff3, + 0xa0b52f7b, 0x59e83605, 0xee15b094, 0xe9ffd909, 0xdc440086, + 0xef944459, 0xba83ccb3, 0xe0c3cdfb, + 0xd1da4181, 0x3b092ab1, 0xf997f1c1, 0xa5e6cf7b, 0x01420ddb, + 0xe4e7ef5b, 0x25a1ff41, 0xe180f806, + 0x1fc41080, 0x179bee7a, 0xd37ac6a9, 0xfe5830a4, 0x98de8b7f, + 0x77e83f4e, 0x79929269, 0x24fa9f7b, + 0xe113c85b, 0xacc40083, 0xd7503525, 0xf7ea615f, 0x62143154, + 0x0d554b63, 0x5d681121, 0xc866c359, + 0x3d63cf73, 0xcee234c0, 0xd4d87e87, 0x5c672b21, 0x071f6181, + 0x39f7627f, 0x361e3084, 0xe4eb573b, + 0x602f64a4, 0xd63acd9c, 0x1bbc4635, 0x9e81032d, 0x2701f50c, + 0x99847ab4, 0xa0e3df79, 0xba6cf38c, + 0x10843094, 0x2537a95e, 0xf46f6ffe, 0xa1ff3b1f, 0x208cfb6a, + 0x8f458c74, 0xd9e0a227, 0x4ec73a34, + 0xfc884f69, 0x3e4de8df, 0xef0e0088, 0x3559648d, 0x8a45388c, + 0x1d804366, 0x721d9bfd, 0xa58684bb, + 0xe8256333, 0x844e8212, 0x128d8098, 0xfed33fb4, 0xce280ae1, + 0x27e19ba5, 0xd5a6c252, 0xe49754bd, + 0xc5d655dd, 0xeb667064, 0x77840b4d, 0xa1b6a801, 0x84db26a9, + 0xe0b56714, 0x21f043b7, 0xe5d05860, + 0x54f03084, 0x066ff472, 0xa31aa153, 0xdadc4755, 0xb5625dbf, + 0x68561be6, 0x83ca6b94, 0x2d6ed23b, + 0xeccf01db, 0xa6d3d0ba, 0xb6803d5c, 0xaf77a709, 0x33b4a34c, + 0x397bc8d6, 0x5ee22b95, 0x5f0e5304, + 0x81ed6f61, 0x20e74364, 0xb45e1378, 0xde18639b, 0x881ca122, + 0xb96726d1, 0x8049a7e8, 0x22b7da7b, + 0x5e552d25, 0x5272d237, 0x79d2951c, 0xc60d894c, 0x488cb402, + 0x1ba4fe5b, 0xa4b09f6b, 0x1ca815cf, + 0xa20c3005, 0x8871df63, 0xb9de2fcb, 0x0cc6c9e9, 0x0beeff53, + 0xe3214517, 0xb4542835, 0x9f63293c, + 0xee41e729, 0x6e1d2d7c, 0x50045286, 0x1e6685f3, 0xf33401c6, + 0x30a22c95, 0x31a70850, 0x60930f13, + 0x73f98417, 0xa1269859, 0xec645c44, 0x52c877a9, 0xcdff33a6, + 0xa02b1741, 0x7cbad9a2, 0x2180036f, + 0x50d99c08, 0xcb3f4861, 0xc26bd765, 0x64a3f6ab, 0x80342676, + 0x25a75e7b, 0xe4e6d1fc, 0x20c710e6, + 0xcdf0b680, 0x17844d3b, 0x31eef84d, 0x7e0824e4, 0x2ccb49eb, + 0x846a3bae, 0x8ff77888, 0xee5d60f6, + 0x7af75673, 0x2fdd5cdb, 0xa11631c1, 0x30f66f43, 0xb3faec54, + 0x157fd7fa, 0xef8579cc, 0xd152de58, + 0xdb2ffd5e, 0x8f32ce19, 0x306af97a, 0x02f03ef8, 0x99319ad5, + 0xc242fa0f, 0xa7e3ebb0, 0xc68e4906, + 0xb8da230c, 0x80823028, 0xdcdef3c8, 0xd35fb171, 0x088a1bc8, + 0xbec0c560, 0x61a3c9e8, 0xbca8f54d, + 0xc72feffa, 0x22822e99, 0x82c570b4, 0xd8d94e89, 0x8b1c34bc, + 0x301e16e6, 0x273be979, 0xb0ffeaa6, + 0x61d9b8c6, 0x00b24869, 0xb7ffce3f, 0x08dc283b, 0x43daf65a, + 0xf7e19798, 0x7619b72f, 0x8f1c9ba4, + 0xdc8637a0, 0x16a7d3b1, 0x9fc393b7, 0xa7136eeb, 0xc6bcc63e, + 0x1a513742, 0xef6828bc, 0x520365d6, + 0x2d6a77ab, 0x3527ed4b, 0x821fd216, 0x095c6e2e, 0xdb92f2fb, + 0x5eea29cb, 0x145892f5, 0x91584f7f, + 0x5483697b, 0x2667a8cc, 0x85196048, 0x8c4bacea, 0x833860d4, + 0x0d23e0f9, 0x6c387e8a, 0x0ae6d249, + 0xb284600c, 0xd835731d, 0xdcb1c647, 0xac4c56ea, 0x3ebd81b3, + 0x230eabb0, 0x6438bc87, 0xf0b5b1fa, + 0x8f5ea2b3, 0xfc184642, 0x0a036b7a, 0x4fb089bd, 0x649da589, + 0xa345415e, 0x5c038323, 0x3e5d3bb9, + 0x43d79572, 0x7e6dd07c, 0x06dfdf1e, 0x6c6cc4ef, 0x7160a539, + 0x73bfbe70, 0x83877605, 0x4523ecf1 +}; +EXPORT_SYMBOL_GPL(cast5_s2); +const u32 cast5_s3[256] = { + 0x8defc240, 0x25fa5d9f, 0xeb903dbf, 0xe810c907, 0x47607fff, + 0x369fe44b, 0x8c1fc644, 0xaececa90, + 0xbeb1f9bf, 0xeefbcaea, 0xe8cf1950, 0x51df07ae, 0x920e8806, + 0xf0ad0548, 0xe13c8d83, 0x927010d5, + 0x11107d9f, 0x07647db9, 0xb2e3e4d4, 0x3d4f285e, 0xb9afa820, + 0xfade82e0, 0xa067268b, 0x8272792e, + 0x553fb2c0, 0x489ae22b, 0xd4ef9794, 0x125e3fbc, 0x21fffcee, + 0x825b1bfd, 0x9255c5ed, 0x1257a240, + 0x4e1a8302, 0xbae07fff, 0x528246e7, 0x8e57140e, 0x3373f7bf, + 0x8c9f8188, 0xa6fc4ee8, 0xc982b5a5, + 0xa8c01db7, 0x579fc264, 0x67094f31, 0xf2bd3f5f, 0x40fff7c1, + 0x1fb78dfc, 0x8e6bd2c1, 0x437be59b, + 0x99b03dbf, 0xb5dbc64b, 0x638dc0e6, 0x55819d99, 0xa197c81c, + 0x4a012d6e, 0xc5884a28, 0xccc36f71, + 0xb843c213, 0x6c0743f1, 0x8309893c, 0x0feddd5f, 0x2f7fe850, + 0xd7c07f7e, 0x02507fbf, 0x5afb9a04, + 0xa747d2d0, 0x1651192e, 0xaf70bf3e, 0x58c31380, 0x5f98302e, + 0x727cc3c4, 0x0a0fb402, 0x0f7fef82, + 0x8c96fdad, 0x5d2c2aae, 0x8ee99a49, 0x50da88b8, 0x8427f4a0, + 0x1eac5790, 0x796fb449, 0x8252dc15, + 0xefbd7d9b, 0xa672597d, 0xada840d8, 0x45f54504, 0xfa5d7403, + 0xe83ec305, 0x4f91751a, 0x925669c2, + 0x23efe941, 0xa903f12e, 0x60270df2, 0x0276e4b6, 0x94fd6574, + 0x927985b2, 0x8276dbcb, 0x02778176, + 0xf8af918d, 0x4e48f79e, 0x8f616ddf, 0xe29d840e, 0x842f7d83, + 0x340ce5c8, 0x96bbb682, 0x93b4b148, + 0xef303cab, 0x984faf28, 0x779faf9b, 0x92dc560d, 0x224d1e20, + 0x8437aa88, 0x7d29dc96, 0x2756d3dc, + 0x8b907cee, 0xb51fd240, 0xe7c07ce3, 0xe566b4a1, 0xc3e9615e, + 0x3cf8209d, 0x6094d1e3, 0xcd9ca341, + 0x5c76460e, 0x00ea983b, 0xd4d67881, 0xfd47572c, 0xf76cedd9, + 0xbda8229c, 0x127dadaa, 0x438a074e, + 0x1f97c090, 0x081bdb8a, 0x93a07ebe, 0xb938ca15, 0x97b03cff, + 0x3dc2c0f8, 0x8d1ab2ec, 0x64380e51, + 0x68cc7bfb, 0xd90f2788, 0x12490181, 0x5de5ffd4, 0xdd7ef86a, + 0x76a2e214, 0xb9a40368, 0x925d958f, + 0x4b39fffa, 0xba39aee9, 0xa4ffd30b, 0xfaf7933b, 0x6d498623, + 0x193cbcfa, 0x27627545, 0x825cf47a, + 0x61bd8ba0, 0xd11e42d1, 0xcead04f4, 0x127ea392, 0x10428db7, + 0x8272a972, 0x9270c4a8, 0x127de50b, + 0x285ba1c8, 0x3c62f44f, 0x35c0eaa5, 0xe805d231, 0x428929fb, + 0xb4fcdf82, 0x4fb66a53, 0x0e7dc15b, + 0x1f081fab, 0x108618ae, 0xfcfd086d, 0xf9ff2889, 0x694bcc11, + 0x236a5cae, 0x12deca4d, 0x2c3f8cc5, + 0xd2d02dfe, 0xf8ef5896, 0xe4cf52da, 0x95155b67, 0x494a488c, + 0xb9b6a80c, 0x5c8f82bc, 0x89d36b45, + 0x3a609437, 0xec00c9a9, 0x44715253, 0x0a874b49, 0xd773bc40, + 0x7c34671c, 0x02717ef6, 0x4feb5536, + 0xa2d02fff, 0xd2bf60c4, 0xd43f03c0, 0x50b4ef6d, 0x07478cd1, + 0x006e1888, 0xa2e53f55, 0xb9e6d4bc, + 0xa2048016, 0x97573833, 0xd7207d67, 0xde0f8f3d, 0x72f87b33, + 0xabcc4f33, 0x7688c55d, 0x7b00a6b0, + 0x947b0001, 0x570075d2, 0xf9bb88f8, 0x8942019e, 0x4264a5ff, + 0x856302e0, 0x72dbd92b, 0xee971b69, + 0x6ea22fde, 0x5f08ae2b, 0xaf7a616d, 0xe5c98767, 0xcf1febd2, + 0x61efc8c2, 0xf1ac2571, 0xcc8239c2, + 0x67214cb8, 0xb1e583d1, 0xb7dc3e62, 0x7f10bdce, 0xf90a5c38, + 0x0ff0443d, 0x606e6dc6, 0x60543a49, + 0x5727c148, 0x2be98a1d, 0x8ab41738, 0x20e1be24, 0xaf96da0f, + 0x68458425, 0x99833be5, 0x600d457d, + 0x282f9350, 0x8334b362, 0xd91d1120, 0x2b6d8da0, 0x642b1e31, + 0x9c305a00, 0x52bce688, 0x1b03588a, + 0xf7baefd5, 0x4142ed9c, 0xa4315c11, 0x83323ec5, 0xdfef4636, + 0xa133c501, 0xe9d3531c, 0xee353783 +}; +EXPORT_SYMBOL_GPL(cast5_s3); +const u32 cast5_s4[256] = { + 0x9db30420, 0x1fb6e9de, 0xa7be7bef, 0xd273a298, 0x4a4f7bdb, + 0x64ad8c57, 0x85510443, 0xfa020ed1, + 0x7e287aff, 0xe60fb663, 0x095f35a1, 0x79ebf120, 0xfd059d43, + 0x6497b7b1, 0xf3641f63, 0x241e4adf, + 0x28147f5f, 0x4fa2b8cd, 0xc9430040, 0x0cc32220, 0xfdd30b30, + 0xc0a5374f, 0x1d2d00d9, 0x24147b15, + 0xee4d111a, 0x0fca5167, 0x71ff904c, 0x2d195ffe, 0x1a05645f, + 0x0c13fefe, 0x081b08ca, 0x05170121, + 0x80530100, 0xe83e5efe, 0xac9af4f8, 0x7fe72701, 0xd2b8ee5f, + 0x06df4261, 0xbb9e9b8a, 0x7293ea25, + 0xce84ffdf, 0xf5718801, 0x3dd64b04, 0xa26f263b, 0x7ed48400, + 0x547eebe6, 0x446d4ca0, 0x6cf3d6f5, + 0x2649abdf, 0xaea0c7f5, 0x36338cc1, 0x503f7e93, 0xd3772061, + 0x11b638e1, 0x72500e03, 0xf80eb2bb, + 0xabe0502e, 0xec8d77de, 0x57971e81, 0xe14f6746, 0xc9335400, + 0x6920318f, 0x081dbb99, 0xffc304a5, + 0x4d351805, 0x7f3d5ce3, 0xa6c866c6, 0x5d5bcca9, 0xdaec6fea, + 0x9f926f91, 0x9f46222f, 0x3991467d, + 0xa5bf6d8e, 0x1143c44f, 0x43958302, 0xd0214eeb, 0x022083b8, + 0x3fb6180c, 0x18f8931e, 0x281658e6, + 0x26486e3e, 0x8bd78a70, 0x7477e4c1, 0xb506e07c, 0xf32d0a25, + 0x79098b02, 0xe4eabb81, 0x28123b23, + 0x69dead38, 0x1574ca16, 0xdf871b62, 0x211c40b7, 0xa51a9ef9, + 0x0014377b, 0x041e8ac8, 0x09114003, + 0xbd59e4d2, 0xe3d156d5, 0x4fe876d5, 0x2f91a340, 0x557be8de, + 0x00eae4a7, 0x0ce5c2ec, 0x4db4bba6, + 0xe756bdff, 0xdd3369ac, 0xec17b035, 0x06572327, 0x99afc8b0, + 0x56c8c391, 0x6b65811c, 0x5e146119, + 0x6e85cb75, 0xbe07c002, 0xc2325577, 0x893ff4ec, 0x5bbfc92d, + 0xd0ec3b25, 0xb7801ab7, 0x8d6d3b24, + 0x20c763ef, 0xc366a5fc, 0x9c382880, 0x0ace3205, 0xaac9548a, + 0xeca1d7c7, 0x041afa32, 0x1d16625a, + 0x6701902c, 0x9b757a54, 0x31d477f7, 0x9126b031, 0x36cc6fdb, + 0xc70b8b46, 0xd9e66a48, 0x56e55a79, + 0x026a4ceb, 0x52437eff, 0x2f8f76b4, 0x0df980a5, 0x8674cde3, + 0xedda04eb, 0x17a9be04, 0x2c18f4df, + 0xb7747f9d, 0xab2af7b4, 0xefc34d20, 0x2e096b7c, 0x1741a254, + 0xe5b6a035, 0x213d42f6, 0x2c1c7c26, + 0x61c2f50f, 0x6552daf9, 0xd2c231f8, 0x25130f69, 0xd8167fa2, + 0x0418f2c8, 0x001a96a6, 0x0d1526ab, + 0x63315c21, 0x5e0a72ec, 0x49bafefd, 0x187908d9, 0x8d0dbd86, + 0x311170a7, 0x3e9b640c, 0xcc3e10d7, + 0xd5cad3b6, 0x0caec388, 0xf73001e1, 0x6c728aff, 0x71eae2a1, + 0x1f9af36e, 0xcfcbd12f, 0xc1de8417, + 0xac07be6b, 0xcb44a1d8, 0x8b9b0f56, 0x013988c3, 0xb1c52fca, + 0xb4be31cd, 0xd8782806, 0x12a3a4e2, + 0x6f7de532, 0x58fd7eb6, 0xd01ee900, 0x24adffc2, 0xf4990fc5, + 0x9711aac5, 0x001d7b95, 0x82e5e7d2, + 0x109873f6, 0x00613096, 0xc32d9521, 0xada121ff, 0x29908415, + 0x7fbb977f, 0xaf9eb3db, 0x29c9ed2a, + 0x5ce2a465, 0xa730f32c, 0xd0aa3fe8, 0x8a5cc091, 0xd49e2ce7, + 0x0ce454a9, 0xd60acd86, 0x015f1919, + 0x77079103, 0xdea03af6, 0x78a8565e, 0xdee356df, 0x21f05cbe, + 0x8b75e387, 0xb3c50651, 0xb8a5c3ef, + 0xd8eeb6d2, 0xe523be77, 0xc2154529, 0x2f69efdf, 0xafe67afb, + 0xf470c4b2, 0xf3e0eb5b, 0xd6cc9876, + 0x39e4460c, 0x1fda8538, 0x1987832f, 0xca007367, 0xa99144f8, + 0x296b299e, 0x492fc295, 0x9266beab, + 0xb5676e69, 0x9bd3ddda, 0xdf7e052f, 0xdb25701c, 0x1b5e51ee, + 0xf65324e6, 0x6afce36c, 0x0316cc04, + 0x8644213e, 0xb7dc59d0, 0x7965291f, 0xccd6fd43, 0x41823979, + 0x932bcdf6, 0xb657c34d, 0x4edfd282, + 0x7ae5290c, 0x3cb9536b, 0x851e20fe, 0x9833557e, 0x13ecf0b0, + 0xd3ffb372, 0x3f85c5c1, 0x0aef7ed2 +}; +EXPORT_SYMBOL_GPL(cast5_s4); +static const u32 s5[256] = { + 0x7ec90c04, 0x2c6e74b9, 0x9b0e66df, 0xa6337911, 0xb86a7fff, + 0x1dd358f5, 0x44dd9d44, 0x1731167f, + 0x08fbf1fa, 0xe7f511cc, 0xd2051b00, 0x735aba00, 0x2ab722d8, + 0x386381cb, 0xacf6243a, 0x69befd7a, + 0xe6a2e77f, 0xf0c720cd, 0xc4494816, 0xccf5c180, 0x38851640, + 0x15b0a848, 0xe68b18cb, 0x4caadeff, + 0x5f480a01, 0x0412b2aa, 0x259814fc, 0x41d0efe2, 0x4e40b48d, + 0x248eb6fb, 0x8dba1cfe, 0x41a99b02, + 0x1a550a04, 0xba8f65cb, 0x7251f4e7, 0x95a51725, 0xc106ecd7, + 0x97a5980a, 0xc539b9aa, 0x4d79fe6a, + 0xf2f3f763, 0x68af8040, 0xed0c9e56, 0x11b4958b, 0xe1eb5a88, + 0x8709e6b0, 0xd7e07156, 0x4e29fea7, + 0x6366e52d, 0x02d1c000, 0xc4ac8e05, 0x9377f571, 0x0c05372a, + 0x578535f2, 0x2261be02, 0xd642a0c9, + 0xdf13a280, 0x74b55bd2, 0x682199c0, 0xd421e5ec, 0x53fb3ce8, + 0xc8adedb3, 0x28a87fc9, 0x3d959981, + 0x5c1ff900, 0xfe38d399, 0x0c4eff0b, 0x062407ea, 0xaa2f4fb1, + 0x4fb96976, 0x90c79505, 0xb0a8a774, + 0xef55a1ff, 0xe59ca2c2, 0xa6b62d27, 0xe66a4263, 0xdf65001f, + 0x0ec50966, 0xdfdd55bc, 0x29de0655, + 0x911e739a, 0x17af8975, 0x32c7911c, 0x89f89468, 0x0d01e980, + 0x524755f4, 0x03b63cc9, 0x0cc844b2, + 0xbcf3f0aa, 0x87ac36e9, 0xe53a7426, 0x01b3d82b, 0x1a9e7449, + 0x64ee2d7e, 0xcddbb1da, 0x01c94910, + 0xb868bf80, 0x0d26f3fd, 0x9342ede7, 0x04a5c284, 0x636737b6, + 0x50f5b616, 0xf24766e3, 0x8eca36c1, + 0x136e05db, 0xfef18391, 0xfb887a37, 0xd6e7f7d4, 0xc7fb7dc9, + 0x3063fcdf, 0xb6f589de, 0xec2941da, + 0x26e46695, 0xb7566419, 0xf654efc5, 0xd08d58b7, 0x48925401, + 0xc1bacb7f, 0xe5ff550f, 0xb6083049, + 0x5bb5d0e8, 0x87d72e5a, 0xab6a6ee1, 0x223a66ce, 0xc62bf3cd, + 0x9e0885f9, 0x68cb3e47, 0x086c010f, + 0xa21de820, 0xd18b69de, 0xf3f65777, 0xfa02c3f6, 0x407edac3, + 0xcbb3d550, 0x1793084d, 0xb0d70eba, + 0x0ab378d5, 0xd951fb0c, 0xded7da56, 0x4124bbe4, 0x94ca0b56, + 0x0f5755d1, 0xe0e1e56e, 0x6184b5be, + 0x580a249f, 0x94f74bc0, 0xe327888e, 0x9f7b5561, 0xc3dc0280, + 0x05687715, 0x646c6bd7, 0x44904db3, + 0x66b4f0a3, 0xc0f1648a, 0x697ed5af, 0x49e92ff6, 0x309e374f, + 0x2cb6356a, 0x85808573, 0x4991f840, + 0x76f0ae02, 0x083be84d, 0x28421c9a, 0x44489406, 0x736e4cb8, + 0xc1092910, 0x8bc95fc6, 0x7d869cf4, + 0x134f616f, 0x2e77118d, 0xb31b2be1, 0xaa90b472, 0x3ca5d717, + 0x7d161bba, 0x9cad9010, 0xaf462ba2, + 0x9fe459d2, 0x45d34559, 0xd9f2da13, 0xdbc65487, 0xf3e4f94e, + 0x176d486f, 0x097c13ea, 0x631da5c7, + 0x445f7382, 0x175683f4, 0xcdc66a97, 0x70be0288, 0xb3cdcf72, + 0x6e5dd2f3, 0x20936079, 0x459b80a5, + 0xbe60e2db, 0xa9c23101, 0xeba5315c, 0x224e42f2, 0x1c5c1572, + 0xf6721b2c, 0x1ad2fff3, 0x8c25404e, + 0x324ed72f, 0x4067b7fd, 0x0523138e, 0x5ca3bc78, 0xdc0fd66e, + 0x75922283, 0x784d6b17, 0x58ebb16e, + 0x44094f85, 0x3f481d87, 0xfcfeae7b, 0x77b5ff76, 0x8c2302bf, + 0xaaf47556, 0x5f46b02a, 0x2b092801, + 0x3d38f5f7, 0x0ca81f36, 0x52af4a8a, 0x66d5e7c0, 0xdf3b0874, + 0x95055110, 0x1b5ad7a8, 0xf61ed5ad, + 0x6cf6e479, 0x20758184, 0xd0cefa65, 0x88f7be58, 0x4a046826, + 0x0ff6f8f3, 0xa09c7f70, 0x5346aba0, + 0x5ce96c28, 0xe176eda3, 0x6bac307f, 0x376829d2, 0x85360fa9, + 0x17e3fe2a, 0x24b79767, 0xf5a96b20, + 0xd6cd2595, 0x68ff1ebf, 0x7555442c, 0xf19f06be, 0xf9e0659a, + 0xeeb9491d, 0x34010718, 0xbb30cab8, + 0xe822fe15, 0x88570983, 0x750e6249, 0xda627e55, 0x5e76ffa8, + 0xb1534546, 0x6d47de08, 0xefe9e7d4 +}; +static const u32 s6[256] = { + 0xf6fa8f9d, 0x2cac6ce1, 0x4ca34867, 0xe2337f7c, 0x95db08e7, + 0x016843b4, 0xeced5cbc, 0x325553ac, + 0xbf9f0960, 0xdfa1e2ed, 0x83f0579d, 0x63ed86b9, 0x1ab6a6b8, + 0xde5ebe39, 0xf38ff732, 0x8989b138, + 0x33f14961, 0xc01937bd, 0xf506c6da, 0xe4625e7e, 0xa308ea99, + 0x4e23e33c, 0x79cbd7cc, 0x48a14367, + 0xa3149619, 0xfec94bd5, 0xa114174a, 0xeaa01866, 0xa084db2d, + 0x09a8486f, 0xa888614a, 0x2900af98, + 0x01665991, 0xe1992863, 0xc8f30c60, 0x2e78ef3c, 0xd0d51932, + 0xcf0fec14, 0xf7ca07d2, 0xd0a82072, + 0xfd41197e, 0x9305a6b0, 0xe86be3da, 0x74bed3cd, 0x372da53c, + 0x4c7f4448, 0xdab5d440, 0x6dba0ec3, + 0x083919a7, 0x9fbaeed9, 0x49dbcfb0, 0x4e670c53, 0x5c3d9c01, + 0x64bdb941, 0x2c0e636a, 0xba7dd9cd, + 0xea6f7388, 0xe70bc762, 0x35f29adb, 0x5c4cdd8d, 0xf0d48d8c, + 0xb88153e2, 0x08a19866, 0x1ae2eac8, + 0x284caf89, 0xaa928223, 0x9334be53, 0x3b3a21bf, 0x16434be3, + 0x9aea3906, 0xefe8c36e, 0xf890cdd9, + 0x80226dae, 0xc340a4a3, 0xdf7e9c09, 0xa694a807, 0x5b7c5ecc, + 0x221db3a6, 0x9a69a02f, 0x68818a54, + 0xceb2296f, 0x53c0843a, 0xfe893655, 0x25bfe68a, 0xb4628abc, + 0xcf222ebf, 0x25ac6f48, 0xa9a99387, + 0x53bddb65, 0xe76ffbe7, 0xe967fd78, 0x0ba93563, 0x8e342bc1, + 0xe8a11be9, 0x4980740d, 0xc8087dfc, + 0x8de4bf99, 0xa11101a0, 0x7fd37975, 0xda5a26c0, 0xe81f994f, + 0x9528cd89, 0xfd339fed, 0xb87834bf, + 0x5f04456d, 0x22258698, 0xc9c4c83b, 0x2dc156be, 0x4f628daa, + 0x57f55ec5, 0xe2220abe, 0xd2916ebf, + 0x4ec75b95, 0x24f2c3c0, 0x42d15d99, 0xcd0d7fa0, 0x7b6e27ff, + 0xa8dc8af0, 0x7345c106, 0xf41e232f, + 0x35162386, 0xe6ea8926, 0x3333b094, 0x157ec6f2, 0x372b74af, + 0x692573e4, 0xe9a9d848, 0xf3160289, + 0x3a62ef1d, 0xa787e238, 0xf3a5f676, 0x74364853, 0x20951063, + 0x4576698d, 0xb6fad407, 0x592af950, + 0x36f73523, 0x4cfb6e87, 0x7da4cec0, 0x6c152daa, 0xcb0396a8, + 0xc50dfe5d, 0xfcd707ab, 0x0921c42f, + 0x89dff0bb, 0x5fe2be78, 0x448f4f33, 0x754613c9, 0x2b05d08d, + 0x48b9d585, 0xdc049441, 0xc8098f9b, + 0x7dede786, 0xc39a3373, 0x42410005, 0x6a091751, 0x0ef3c8a6, + 0x890072d6, 0x28207682, 0xa9a9f7be, + 0xbf32679d, 0xd45b5b75, 0xb353fd00, 0xcbb0e358, 0x830f220a, + 0x1f8fb214, 0xd372cf08, 0xcc3c4a13, + 0x8cf63166, 0x061c87be, 0x88c98f88, 0x6062e397, 0x47cf8e7a, + 0xb6c85283, 0x3cc2acfb, 0x3fc06976, + 0x4e8f0252, 0x64d8314d, 0xda3870e3, 0x1e665459, 0xc10908f0, + 0x513021a5, 0x6c5b68b7, 0x822f8aa0, + 0x3007cd3e, 0x74719eef, 0xdc872681, 0x073340d4, 0x7e432fd9, + 0x0c5ec241, 0x8809286c, 0xf592d891, + 0x08a930f6, 0x957ef305, 0xb7fbffbd, 0xc266e96f, 0x6fe4ac98, + 0xb173ecc0, 0xbc60b42a, 0x953498da, + 0xfba1ae12, 0x2d4bd736, 0x0f25faab, 0xa4f3fceb, 0xe2969123, + 0x257f0c3d, 0x9348af49, 0x361400bc, + 0xe8816f4a, 0x3814f200, 0xa3f94043, 0x9c7a54c2, 0xbc704f57, + 0xda41e7f9, 0xc25ad33a, 0x54f4a084, + 0xb17f5505, 0x59357cbe, 0xedbd15c8, 0x7f97c5ab, 0xba5ac7b5, + 0xb6f6deaf, 0x3a479c3a, 0x5302da25, + 0x653d7e6a, 0x54268d49, 0x51a477ea, 0x5017d55b, 0xd7d25d88, + 0x44136c76, 0x0404a8c8, 0xb8e5a121, + 0xb81a928a, 0x60ed5869, 0x97c55b96, 0xeaec991b, 0x29935913, + 0x01fdb7f1, 0x088e8dfa, 0x9ab6f6f5, + 0x3b4cbf9f, 0x4a5de3ab, 0xe6051d35, 0xa0e1d855, 0xd36b4cf1, + 0xf544edeb, 0xb0e93524, 0xbebb8fbd, + 0xa2d762cf, 0x49c92f54, 0x38b5f331, 0x7128a454, 0x48392905, + 0xa65b1db8, 0x851c97bd, 0xd675cf2f +}; +static const u32 s7[256] = { + 0x85e04019, 0x332bf567, 0x662dbfff, 0xcfc65693, 0x2a8d7f6f, + 0xab9bc912, 0xde6008a1, 0x2028da1f, + 0x0227bce7, 0x4d642916, 0x18fac300, 0x50f18b82, 0x2cb2cb11, + 0xb232e75c, 0x4b3695f2, 0xb28707de, + 0xa05fbcf6, 0xcd4181e9, 0xe150210c, 0xe24ef1bd, 0xb168c381, + 0xfde4e789, 0x5c79b0d8, 0x1e8bfd43, + 0x4d495001, 0x38be4341, 0x913cee1d, 0x92a79c3f, 0x089766be, + 0xbaeeadf4, 0x1286becf, 0xb6eacb19, + 0x2660c200, 0x7565bde4, 0x64241f7a, 0x8248dca9, 0xc3b3ad66, + 0x28136086, 0x0bd8dfa8, 0x356d1cf2, + 0x107789be, 0xb3b2e9ce, 0x0502aa8f, 0x0bc0351e, 0x166bf52a, + 0xeb12ff82, 0xe3486911, 0xd34d7516, + 0x4e7b3aff, 0x5f43671b, 0x9cf6e037, 0x4981ac83, 0x334266ce, + 0x8c9341b7, 0xd0d854c0, 0xcb3a6c88, + 0x47bc2829, 0x4725ba37, 0xa66ad22b, 0x7ad61f1e, 0x0c5cbafa, + 0x4437f107, 0xb6e79962, 0x42d2d816, + 0x0a961288, 0xe1a5c06e, 0x13749e67, 0x72fc081a, 0xb1d139f7, + 0xf9583745, 0xcf19df58, 0xbec3f756, + 0xc06eba30, 0x07211b24, 0x45c28829, 0xc95e317f, 0xbc8ec511, + 0x38bc46e9, 0xc6e6fa14, 0xbae8584a, + 0xad4ebc46, 0x468f508b, 0x7829435f, 0xf124183b, 0x821dba9f, + 0xaff60ff4, 0xea2c4e6d, 0x16e39264, + 0x92544a8b, 0x009b4fc3, 0xaba68ced, 0x9ac96f78, 0x06a5b79a, + 0xb2856e6e, 0x1aec3ca9, 0xbe838688, + 0x0e0804e9, 0x55f1be56, 0xe7e5363b, 0xb3a1f25d, 0xf7debb85, + 0x61fe033c, 0x16746233, 0x3c034c28, + 0xda6d0c74, 0x79aac56c, 0x3ce4e1ad, 0x51f0c802, 0x98f8f35a, + 0x1626a49f, 0xeed82b29, 0x1d382fe3, + 0x0c4fb99a, 0xbb325778, 0x3ec6d97b, 0x6e77a6a9, 0xcb658b5c, + 0xd45230c7, 0x2bd1408b, 0x60c03eb7, + 0xb9068d78, 0xa33754f4, 0xf430c87d, 0xc8a71302, 0xb96d8c32, + 0xebd4e7be, 0xbe8b9d2d, 0x7979fb06, + 0xe7225308, 0x8b75cf77, 0x11ef8da4, 0xe083c858, 0x8d6b786f, + 0x5a6317a6, 0xfa5cf7a0, 0x5dda0033, + 0xf28ebfb0, 0xf5b9c310, 0xa0eac280, 0x08b9767a, 0xa3d9d2b0, + 0x79d34217, 0x021a718d, 0x9ac6336a, + 0x2711fd60, 0x438050e3, 0x069908a8, 0x3d7fedc4, 0x826d2bef, + 0x4eeb8476, 0x488dcf25, 0x36c9d566, + 0x28e74e41, 0xc2610aca, 0x3d49a9cf, 0xbae3b9df, 0xb65f8de6, + 0x92aeaf64, 0x3ac7d5e6, 0x9ea80509, + 0xf22b017d, 0xa4173f70, 0xdd1e16c3, 0x15e0d7f9, 0x50b1b887, + 0x2b9f4fd5, 0x625aba82, 0x6a017962, + 0x2ec01b9c, 0x15488aa9, 0xd716e740, 0x40055a2c, 0x93d29a22, + 0xe32dbf9a, 0x058745b9, 0x3453dc1e, + 0xd699296e, 0x496cff6f, 0x1c9f4986, 0xdfe2ed07, 0xb87242d1, + 0x19de7eae, 0x053e561a, 0x15ad6f8c, + 0x66626c1c, 0x7154c24c, 0xea082b2a, 0x93eb2939, 0x17dcb0f0, + 0x58d4f2ae, 0x9ea294fb, 0x52cf564c, + 0x9883fe66, 0x2ec40581, 0x763953c3, 0x01d6692e, 0xd3a0c108, + 0xa1e7160e, 0xe4f2dfa6, 0x693ed285, + 0x74904698, 0x4c2b0edd, 0x4f757656, 0x5d393378, 0xa132234f, + 0x3d321c5d, 0xc3f5e194, 0x4b269301, + 0xc79f022f, 0x3c997e7e, 0x5e4f9504, 0x3ffafbbd, 0x76f7ad0e, + 0x296693f4, 0x3d1fce6f, 0xc61e45be, + 0xd3b5ab34, 0xf72bf9b7, 0x1b0434c0, 0x4e72b567, 0x5592a33d, + 0xb5229301, 0xcfd2a87f, 0x60aeb767, + 0x1814386b, 0x30bcc33d, 0x38a0c07d, 0xfd1606f2, 0xc363519b, + 0x589dd390, 0x5479f8e6, 0x1cb8d647, + 0x97fd61a9, 0xea7759f4, 0x2d57539d, 0x569a58cf, 0xe84e63ad, + 0x462e1b78, 0x6580f87e, 0xf3817914, + 0x91da55f4, 0x40a230f3, 0xd1988f35, 0xb6e318d2, 0x3ffa50bc, + 0x3d40f021, 0xc3c0bdae, 0x4958c24c, + 0x518f36b2, 0x84b1d370, 0x0fedce83, 0x878ddada, 0xf2a279c7, + 0x94e01be8, 0x90716f4b, 0x954b8aa3 +}; +static const u32 sb8[256] = { + 0xe216300d, 0xbbddfffc, 0xa7ebdabd, 0x35648095, 0x7789f8b7, + 0xe6c1121b, 0x0e241600, 0x052ce8b5, + 0x11a9cfb0, 0xe5952f11, 0xece7990a, 0x9386d174, 0x2a42931c, + 0x76e38111, 0xb12def3a, 0x37ddddfc, + 0xde9adeb1, 0x0a0cc32c, 0xbe197029, 0x84a00940, 0xbb243a0f, + 0xb4d137cf, 0xb44e79f0, 0x049eedfd, + 0x0b15a15d, 0x480d3168, 0x8bbbde5a, 0x669ded42, 0xc7ece831, + 0x3f8f95e7, 0x72df191b, 0x7580330d, + 0x94074251, 0x5c7dcdfa, 0xabbe6d63, 0xaa402164, 0xb301d40a, + 0x02e7d1ca, 0x53571dae, 0x7a3182a2, + 0x12a8ddec, 0xfdaa335d, 0x176f43e8, 0x71fb46d4, 0x38129022, + 0xce949ad4, 0xb84769ad, 0x965bd862, + 0x82f3d055, 0x66fb9767, 0x15b80b4e, 0x1d5b47a0, 0x4cfde06f, + 0xc28ec4b8, 0x57e8726e, 0x647a78fc, + 0x99865d44, 0x608bd593, 0x6c200e03, 0x39dc5ff6, 0x5d0b00a3, + 0xae63aff2, 0x7e8bd632, 0x70108c0c, + 0xbbd35049, 0x2998df04, 0x980cf42a, 0x9b6df491, 0x9e7edd53, + 0x06918548, 0x58cb7e07, 0x3b74ef2e, + 0x522fffb1, 0xd24708cc, 0x1c7e27cd, 0xa4eb215b, 0x3cf1d2e2, + 0x19b47a38, 0x424f7618, 0x35856039, + 0x9d17dee7, 0x27eb35e6, 0xc9aff67b, 0x36baf5b8, 0x09c467cd, + 0xc18910b1, 0xe11dbf7b, 0x06cd1af8, + 0x7170c608, 0x2d5e3354, 0xd4de495a, 0x64c6d006, 0xbcc0c62c, + 0x3dd00db3, 0x708f8f34, 0x77d51b42, + 0x264f620f, 0x24b8d2bf, 0x15c1b79e, 0x46a52564, 0xf8d7e54e, + 0x3e378160, 0x7895cda5, 0x859c15a5, + 0xe6459788, 0xc37bc75f, 0xdb07ba0c, 0x0676a3ab, 0x7f229b1e, + 0x31842e7b, 0x24259fd7, 0xf8bef472, + 0x835ffcb8, 0x6df4c1f2, 0x96f5b195, 0xfd0af0fc, 0xb0fe134c, + 0xe2506d3d, 0x4f9b12ea, 0xf215f225, + 0xa223736f, 0x9fb4c428, 0x25d04979, 0x34c713f8, 0xc4618187, + 0xea7a6e98, 0x7cd16efc, 0x1436876c, + 0xf1544107, 0xbedeee14, 0x56e9af27, 0xa04aa441, 0x3cf7c899, + 0x92ecbae6, 0xdd67016d, 0x151682eb, + 0xa842eedf, 0xfdba60b4, 0xf1907b75, 0x20e3030f, 0x24d8c29e, + 0xe139673b, 0xefa63fb8, 0x71873054, + 0xb6f2cf3b, 0x9f326442, 0xcb15a4cc, 0xb01a4504, 0xf1e47d8d, + 0x844a1be5, 0xbae7dfdc, 0x42cbda70, + 0xcd7dae0a, 0x57e85b7a, 0xd53f5af6, 0x20cf4d8c, 0xcea4d428, + 0x79d130a4, 0x3486ebfb, 0x33d3cddc, + 0x77853b53, 0x37effcb5, 0xc5068778, 0xe580b3e6, 0x4e68b8f4, + 0xc5c8b37e, 0x0d809ea2, 0x398feb7c, + 0x132a4f94, 0x43b7950e, 0x2fee7d1c, 0x223613bd, 0xdd06caa2, + 0x37df932b, 0xc4248289, 0xacf3ebc3, + 0x5715f6b7, 0xef3478dd, 0xf267616f, 0xc148cbe4, 0x9052815e, + 0x5e410fab, 0xb48a2465, 0x2eda7fa4, + 0xe87b40e4, 0xe98ea084, 0x5889e9e1, 0xefd390fc, 0xdd07d35b, + 0xdb485694, 0x38d7e5b2, 0x57720101, + 0x730edebc, 0x5b643113, 0x94917e4f, 0x503c2fba, 0x646f1282, + 0x7523d24a, 0xe0779695, 0xf9c17a8f, + 0x7a5b2121, 0xd187b896, 0x29263a4d, 0xba510cdf, 0x81f47c9f, + 0xad1163ed, 0xea7b5965, 0x1a00726e, + 0x11403092, 0x00da6d77, 0x4a0cdd61, 0xad1f4603, 0x605bdfb0, + 0x9eedc364, 0x22ebe6a8, 0xcee7d28a, + 0xa0e736a0, 0x5564a6b9, 0x10853209, 0xc7eb8f37, 0x2de705ca, + 0x8951570f, 0xdf09822b, 0xbd691a6c, + 0xaa12e4f2, 0x87451c0f, 0xe0f6a27a, 0x3ada4819, 0x4cf1764f, + 0x0d771c2b, 0x67cdb156, 0x350d8384, + 0x5938fa0f, 0x42399ef3, 0x36997b07, 0x0e84093d, 0x4aa93e61, + 0x8360d87b, 0x1fa98b0c, 0x1149382c, + 0xe97625a5, 0x0614d1b7, 0x0e25244b, 0x0c768347, 0x589e8d82, + 0x0d2059d1, 0xa466bb1e, 0xf8da0a82, + 0x04f19130, 0xba6e4ec0, 0x99265164, 0x1ee7230d, 0x50b2ad80, + 0xeaee6801, 0x8db2a283, 0xea8bf59e +}; + +#define s1 cast5_s1 +#define s2 cast5_s2 +#define s3 cast5_s3 +#define s4 cast5_s4 + +#define F1(D, m, r) ((I = ((m) + (D))), (I = rol32(I, (r))), \ + (((s1[I >> 24] ^ s2[(I>>16)&0xff]) - s3[(I>>8)&0xff]) + s4[I&0xff])) +#define F2(D, m, r) ((I = ((m) ^ (D))), (I = rol32(I, (r))), \ + (((s1[I >> 24] - s2[(I>>16)&0xff]) + s3[(I>>8)&0xff]) ^ s4[I&0xff])) +#define F3(D, m, r) ((I = ((m) - (D))), (I = rol32(I, (r))), \ + (((s1[I >> 24] + s2[(I>>16)&0xff]) ^ s3[(I>>8)&0xff]) - s4[I&0xff])) + + +void __cast5_encrypt(struct cast5_ctx *c, u8 *outbuf, const u8 *inbuf) +{ + const __be32 *src = (const __be32 *)inbuf; + __be32 *dst = (__be32 *)outbuf; + u32 l, r, t; + u32 I; /* used by the Fx macros */ + u32 *Km; + u8 *Kr; + + Km = c->Km; + Kr = c->Kr; + + /* (L0,R0) <-- (m1...m64). (Split the plaintext into left and + * right 32-bit halves L0 = m1...m32 and R0 = m33...m64.) + */ + l = be32_to_cpu(src[0]); + r = be32_to_cpu(src[1]); + + /* (16 rounds) for i from 1 to 16, compute Li and Ri as follows: + * Li = Ri-1; + * Ri = Li-1 ^ f(Ri-1,Kmi,Kri), where f is defined in Section 2.2 + * Rounds 1, 4, 7, 10, 13, and 16 use f function Type 1. + * Rounds 2, 5, 8, 11, and 14 use f function Type 2. + * Rounds 3, 6, 9, 12, and 15 use f function Type 3. + */ + + t = l; l = r; r = t ^ F1(r, Km[0], Kr[0]); + t = l; l = r; r = t ^ F2(r, Km[1], Kr[1]); + t = l; l = r; r = t ^ F3(r, Km[2], Kr[2]); + t = l; l = r; r = t ^ F1(r, Km[3], Kr[3]); + t = l; l = r; r = t ^ F2(r, Km[4], Kr[4]); + t = l; l = r; r = t ^ F3(r, Km[5], Kr[5]); + t = l; l = r; r = t ^ F1(r, Km[6], Kr[6]); + t = l; l = r; r = t ^ F2(r, Km[7], Kr[7]); + t = l; l = r; r = t ^ F3(r, Km[8], Kr[8]); + t = l; l = r; r = t ^ F1(r, Km[9], Kr[9]); + t = l; l = r; r = t ^ F2(r, Km[10], Kr[10]); + t = l; l = r; r = t ^ F3(r, Km[11], Kr[11]); + if (!(c->rr)) { + t = l; l = r; r = t ^ F1(r, Km[12], Kr[12]); + t = l; l = r; r = t ^ F2(r, Km[13], Kr[13]); + t = l; l = r; r = t ^ F3(r, Km[14], Kr[14]); + t = l; l = r; r = t ^ F1(r, Km[15], Kr[15]); + } + + /* c1...c64 <-- (R16,L16). (Exchange final blocks L16, R16 and + * concatenate to form the ciphertext.) */ + dst[0] = cpu_to_be32(r); + dst[1] = cpu_to_be32(l); +} +EXPORT_SYMBOL_GPL(__cast5_encrypt); + +static void cast5_encrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf) +{ + __cast5_encrypt(crypto_tfm_ctx(tfm), outbuf, inbuf); +} + +void __cast5_decrypt(struct cast5_ctx *c, u8 *outbuf, const u8 *inbuf) +{ + const __be32 *src = (const __be32 *)inbuf; + __be32 *dst = (__be32 *)outbuf; + u32 l, r, t; + u32 I; + u32 *Km; + u8 *Kr; + + Km = c->Km; + Kr = c->Kr; + + l = be32_to_cpu(src[0]); + r = be32_to_cpu(src[1]); + + if (!(c->rr)) { + t = l; l = r; r = t ^ F1(r, Km[15], Kr[15]); + t = l; l = r; r = t ^ F3(r, Km[14], Kr[14]); + t = l; l = r; r = t ^ F2(r, Km[13], Kr[13]); + t = l; l = r; r = t ^ F1(r, Km[12], Kr[12]); + } + t = l; l = r; r = t ^ F3(r, Km[11], Kr[11]); + t = l; l = r; r = t ^ F2(r, Km[10], Kr[10]); + t = l; l = r; r = t ^ F1(r, Km[9], Kr[9]); + t = l; l = r; r = t ^ F3(r, Km[8], Kr[8]); + t = l; l = r; r = t ^ F2(r, Km[7], Kr[7]); + t = l; l = r; r = t ^ F1(r, Km[6], Kr[6]); + t = l; l = r; r = t ^ F3(r, Km[5], Kr[5]); + t = l; l = r; r = t ^ F2(r, Km[4], Kr[4]); + t = l; l = r; r = t ^ F1(r, Km[3], Kr[3]); + t = l; l = r; r = t ^ F3(r, Km[2], Kr[2]); + t = l; l = r; r = t ^ F2(r, Km[1], Kr[1]); + t = l; l = r; r = t ^ F1(r, Km[0], Kr[0]); + + dst[0] = cpu_to_be32(r); + dst[1] = cpu_to_be32(l); +} +EXPORT_SYMBOL_GPL(__cast5_decrypt); + +static void cast5_decrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf) +{ + __cast5_decrypt(crypto_tfm_ctx(tfm), outbuf, inbuf); +} + +static void key_schedule(u32 *x, u32 *z, u32 *k) +{ + +#define xi(i) ((x[(i)/4] >> (8*(3-((i)%4)))) & 0xff) +#define zi(i) ((z[(i)/4] >> (8*(3-((i)%4)))) & 0xff) + + z[0] = x[0] ^ s5[xi(13)] ^ s6[xi(15)] ^ s7[xi(12)] ^ sb8[xi(14)] ^ + s7[xi(8)]; + z[1] = x[2] ^ s5[zi(0)] ^ s6[zi(2)] ^ s7[zi(1)] ^ sb8[zi(3)] ^ + sb8[xi(10)]; + z[2] = x[3] ^ s5[zi(7)] ^ s6[zi(6)] ^ s7[zi(5)] ^ sb8[zi(4)] ^ + s5[xi(9)]; + z[3] = x[1] ^ s5[zi(10)] ^ s6[zi(9)] ^ s7[zi(11)] ^ sb8[zi(8)] ^ + s6[xi(11)]; + k[0] = s5[zi(8)] ^ s6[zi(9)] ^ s7[zi(7)] ^ sb8[zi(6)] ^ s5[zi(2)]; + k[1] = s5[zi(10)] ^ s6[zi(11)] ^ s7[zi(5)] ^ sb8[zi(4)] ^ + s6[zi(6)]; + k[2] = s5[zi(12)] ^ s6[zi(13)] ^ s7[zi(3)] ^ sb8[zi(2)] ^ + s7[zi(9)]; + k[3] = s5[zi(14)] ^ s6[zi(15)] ^ s7[zi(1)] ^ sb8[zi(0)] ^ + sb8[zi(12)]; + + x[0] = z[2] ^ s5[zi(5)] ^ s6[zi(7)] ^ s7[zi(4)] ^ sb8[zi(6)] ^ + s7[zi(0)]; + x[1] = z[0] ^ s5[xi(0)] ^ s6[xi(2)] ^ s7[xi(1)] ^ sb8[xi(3)] ^ + sb8[zi(2)]; + x[2] = z[1] ^ s5[xi(7)] ^ s6[xi(6)] ^ s7[xi(5)] ^ sb8[xi(4)] ^ + s5[zi(1)]; + x[3] = z[3] ^ s5[xi(10)] ^ s6[xi(9)] ^ s7[xi(11)] ^ sb8[xi(8)] ^ + s6[zi(3)]; + k[4] = s5[xi(3)] ^ s6[xi(2)] ^ s7[xi(12)] ^ sb8[xi(13)] ^ + s5[xi(8)]; + k[5] = s5[xi(1)] ^ s6[xi(0)] ^ s7[xi(14)] ^ sb8[xi(15)] ^ + s6[xi(13)]; + k[6] = s5[xi(7)] ^ s6[xi(6)] ^ s7[xi(8)] ^ sb8[xi(9)] ^ s7[xi(3)]; + k[7] = s5[xi(5)] ^ s6[xi(4)] ^ s7[xi(10)] ^ sb8[xi(11)] ^ + sb8[xi(7)]; + + z[0] = x[0] ^ s5[xi(13)] ^ s6[xi(15)] ^ s7[xi(12)] ^ sb8[xi(14)] ^ + s7[xi(8)]; + z[1] = x[2] ^ s5[zi(0)] ^ s6[zi(2)] ^ s7[zi(1)] ^ sb8[zi(3)] ^ + sb8[xi(10)]; + z[2] = x[3] ^ s5[zi(7)] ^ s6[zi(6)] ^ s7[zi(5)] ^ sb8[zi(4)] ^ + s5[xi(9)]; + z[3] = x[1] ^ s5[zi(10)] ^ s6[zi(9)] ^ s7[zi(11)] ^ sb8[zi(8)] ^ + s6[xi(11)]; + k[8] = s5[zi(3)] ^ s6[zi(2)] ^ s7[zi(12)] ^ sb8[zi(13)] ^ + s5[zi(9)]; + k[9] = s5[zi(1)] ^ s6[zi(0)] ^ s7[zi(14)] ^ sb8[zi(15)] ^ + s6[zi(12)]; + k[10] = s5[zi(7)] ^ s6[zi(6)] ^ s7[zi(8)] ^ sb8[zi(9)] ^ s7[zi(2)]; + k[11] = s5[zi(5)] ^ s6[zi(4)] ^ s7[zi(10)] ^ sb8[zi(11)] ^ + sb8[zi(6)]; + + x[0] = z[2] ^ s5[zi(5)] ^ s6[zi(7)] ^ s7[zi(4)] ^ sb8[zi(6)] ^ + s7[zi(0)]; + x[1] = z[0] ^ s5[xi(0)] ^ s6[xi(2)] ^ s7[xi(1)] ^ sb8[xi(3)] ^ + sb8[zi(2)]; + x[2] = z[1] ^ s5[xi(7)] ^ s6[xi(6)] ^ s7[xi(5)] ^ sb8[xi(4)] ^ + s5[zi(1)]; + x[3] = z[3] ^ s5[xi(10)] ^ s6[xi(9)] ^ s7[xi(11)] ^ sb8[xi(8)] ^ + s6[zi(3)]; + k[12] = s5[xi(8)] ^ s6[xi(9)] ^ s7[xi(7)] ^ sb8[xi(6)] ^ s5[xi(3)]; + k[13] = s5[xi(10)] ^ s6[xi(11)] ^ s7[xi(5)] ^ sb8[xi(4)] ^ + s6[xi(7)]; + k[14] = s5[xi(12)] ^ s6[xi(13)] ^ s7[xi(3)] ^ sb8[xi(2)] ^ + s7[xi(8)]; + k[15] = s5[xi(14)] ^ s6[xi(15)] ^ s7[xi(1)] ^ sb8[xi(0)] ^ + sb8[xi(13)]; + +#undef xi +#undef zi +} + + +int cast5_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int key_len) +{ + struct cast5_ctx *c = crypto_tfm_ctx(tfm); + int i; + u32 x[4]; + u32 z[4]; + u32 k[16]; + __be32 p_key[4]; + + c->rr = key_len <= 10 ? 1 : 0; + + memset(p_key, 0, 16); + memcpy(p_key, key, key_len); + + + x[0] = be32_to_cpu(p_key[0]); + x[1] = be32_to_cpu(p_key[1]); + x[2] = be32_to_cpu(p_key[2]); + x[3] = be32_to_cpu(p_key[3]); + + key_schedule(x, z, k); + for (i = 0; i < 16; i++) + c->Km[i] = k[i]; + key_schedule(x, z, k); + for (i = 0; i < 16; i++) + c->Kr[i] = k[i] & 0x1f; + return 0; +} +EXPORT_SYMBOL_GPL(cast5_setkey); + +static struct crypto_alg alg = { + .cra_name = "cast5", + .cra_driver_name = "cast5-generic", + .cra_priority = 100, + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = CAST5_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct cast5_ctx), + .cra_alignmask = 3, + .cra_module = THIS_MODULE, + .cra_u = { + .cipher = { + .cia_min_keysize = CAST5_MIN_KEY_SIZE, + .cia_max_keysize = CAST5_MAX_KEY_SIZE, + .cia_setkey = cast5_setkey, + .cia_encrypt = cast5_encrypt, + .cia_decrypt = cast5_decrypt + } + } +}; + +static int __init cast5_mod_init(void) +{ + return crypto_register_alg(&alg); +} + +static void __exit cast5_mod_fini(void) +{ + crypto_unregister_alg(&alg); +} + +module_init(cast5_mod_init); +module_exit(cast5_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Cast5 Cipher Algorithm"); +MODULE_ALIAS("cast5"); diff --git a/include/crypto/cast5.h b/include/crypto/cast5.h new file mode 100644 index 000000000000..dcb90c13dd39 --- /dev/null +++ b/include/crypto/cast5.h @@ -0,0 +1,22 @@ +#ifndef _CRYPTO_CAST5_H +#define _CRYPTO_CAST5_H + +#include +#include + +#define CAST5_BLOCK_SIZE 8 +#define CAST5_MIN_KEY_SIZE 5 +#define CAST5_MAX_KEY_SIZE 16 + +struct cast5_ctx { + u32 Km[16]; + u8 Kr[16]; + int rr; /* rr ? rounds = 12 : rounds = 16; (rfc 2144) */ +}; + +int cast5_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen); + +void __cast5_encrypt(struct cast5_ctx *ctx, u8 *dst, const u8 *src); +void __cast5_decrypt(struct cast5_ctx *ctx, u8 *dst, const u8 *src); + +#endif -- cgit v1.2.3 From a2c5826095562983bf316e3a7eb137ef04a71a24 Mon Sep 17 00:00:00 2001 From: Johannes Goetzfried Date: Wed, 11 Jul 2012 19:37:21 +0200 Subject: crypto: testmgr - add larger cast5 testvectors New ECB, CBC and CTR testvectors for cast5. We need larger testvectors to check parallel code paths in the optimized implementation. Tests have also been added to the tcrypt module. Signed-off-by: Johannes Goetzfried Signed-off-by: Herbert Xu --- crypto/tcrypt.c | 32 +++ crypto/tcrypt.h | 1 + crypto/testmgr.c | 30 +++ crypto/testmgr.h | 810 ++++++++++++++++++++++++++++++++++++++++++++++++++++++- 4 files changed, 871 insertions(+), 2 deletions(-) (limited to 'crypto') diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c index 5cf2ccb1540c..a94bbd77dc60 100644 --- a/crypto/tcrypt.c +++ b/crypto/tcrypt.c @@ -1037,6 +1037,8 @@ static int do_test(int m) case 14: ret += tcrypt_test("ecb(cast5)"); + ret += tcrypt_test("cbc(cast5)"); + ret += tcrypt_test("ctr(cast5)"); break; case 15: @@ -1359,6 +1361,21 @@ static int do_test(int m) speed_template_8); break; + case 209: + test_cipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0, + speed_template_8_16); + test_cipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0, + speed_template_8_16); + test_cipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0, + speed_template_8_16); + test_cipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0, + speed_template_8_16); + test_cipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0, + speed_template_8_16); + test_cipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0, + speed_template_8_16); + break; + case 300: /* fall through */ @@ -1639,6 +1656,21 @@ static int do_test(int m) speed_template_8); break; + case 506: + test_acipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0, + speed_template_8_16); + test_acipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0, + speed_template_8_16); + test_acipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0, + speed_template_8_16); + test_acipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0, + speed_template_8_16); + test_acipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0, + speed_template_8_16); + test_acipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0, + speed_template_8_16); + break; + case 1000: test_available(); break; diff --git a/crypto/tcrypt.h b/crypto/tcrypt.h index 5be1fc8c1ab3..cd2068524f3f 100644 --- a/crypto/tcrypt.h +++ b/crypto/tcrypt.h @@ -47,6 +47,7 @@ static struct cipher_speed_template des3_speed_template[] = { */ static u8 speed_template_8[] = {8, 0}; static u8 speed_template_24[] = {24, 0}; +static u8 speed_template_8_16[] = {8, 16, 0}; static u8 speed_template_8_32[] = {8, 32, 0}; static u8 speed_template_16_32[] = {16, 32, 0}; static u8 speed_template_16_24_32[] = {16, 24, 32, 0}; diff --git a/crypto/testmgr.c b/crypto/testmgr.c index a2ca7431760a..7a91e540563f 100644 --- a/crypto/testmgr.c +++ b/crypto/testmgr.c @@ -1817,6 +1817,21 @@ static const struct alg_test_desc alg_test_descs[] = { } } } + }, { + .alg = "cbc(cast5)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = cast5_cbc_enc_tv_template, + .count = CAST5_CBC_ENC_TEST_VECTORS + }, + .dec = { + .vecs = cast5_cbc_dec_tv_template, + .count = CAST5_CBC_DEC_TEST_VECTORS + } + } + } }, { .alg = "cbc(des)", .test = alg_test_skcipher, @@ -2053,6 +2068,21 @@ static const struct alg_test_desc alg_test_descs[] = { } } } + }, { + .alg = "ctr(cast5)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = cast5_ctr_enc_tv_template, + .count = CAST5_CTR_ENC_TEST_VECTORS + }, + .dec = { + .vecs = cast5_ctr_dec_tv_template, + .count = CAST5_CTR_DEC_TEST_VECTORS + } + } + } }, { .alg = "ctr(serpent)", .test = alg_test_skcipher, diff --git a/crypto/testmgr.h b/crypto/testmgr.h index f8179e0344ed..9309948a7028 100644 --- a/crypto/testmgr.h +++ b/crypto/testmgr.h @@ -12125,8 +12125,12 @@ static struct cprng_testvec ansi_cprng_aes_tv_template[] = { }; /* Cast5 test vectors from RFC 2144 */ -#define CAST5_ENC_TEST_VECTORS 3 -#define CAST5_DEC_TEST_VECTORS 3 +#define CAST5_ENC_TEST_VECTORS 4 +#define CAST5_DEC_TEST_VECTORS 4 +#define CAST5_CBC_ENC_TEST_VECTORS 1 +#define CAST5_CBC_DEC_TEST_VECTORS 1 +#define CAST5_CTR_ENC_TEST_VECTORS 1 +#define CAST5_CTR_DEC_TEST_VECTORS 1 static struct cipher_testvec cast5_enc_tv_template[] = { { @@ -12152,6 +12156,137 @@ static struct cipher_testvec cast5_enc_tv_template[] = { .ilen = 8, .result = "\x7a\xc8\x16\xd1\x6e\x9b\x30\x2e", .rlen = 8, + }, { /* Generated from TF test vectors */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A", + .klen = 16, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F", + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A" + "\xF1\x65\xFC\x93\x07\x9E\x35\xCC" + "\x40\xD7\x6E\x05\x79\x10\xA7\x1B" + "\xB2\x49\xE0\x54\xEB\x82\x19\x8D" + "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF" + "\x96\x0A\xA1\x38\xCF\x43\xDA\x71" + "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3" + "\x57\xEE\x85\x1C\x90\x27\xBE\x32" + "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4" + "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16" + "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88" + "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA" + "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49" + "\xE0\x77\x0E\x82\x19\xB0\x24\xBB" + "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D" + "\xC4\x38\xCF\x66\xFD\x71\x08\x9F" + "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11" + "\x85\x1C\xB3\x27\xBE\x55\xEC\x60" + "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2" + "\x69\x00\x74\x0B\xA2\x16\xAD\x44" + "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6" + "\x2A\xC1\x58\xEF\x63\xFA\x91\x05" + "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77" + "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9" + "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B" + "\xF2\x66\xFD\x94\x08\x9F\x36\xCD" + "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C" + "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E" + "\x25\xBC\x30\xC7\x5E\xF5\x69\x00" + "\x97\x0B\xA2\x39\xD0\x44\xDB\x72" + "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4" + "\x58\xEF\x86\x1D\x91\x28\xBF\x33" + "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5" + "\x3C\xD3\x47\xDE\x75\x0C\x80\x17" + "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89" + "\x20\x94\x2B\xC2\x36\xCD\x64\xFB" + "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A" + "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC" + "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E" + "\xC5\x39\xD0\x67\xFE\x72\x09\xA0" + "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12" + "\x86\x1D\xB4\x28\xBF\x56\xED\x61" + "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3" + "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" + "\xDC\x50\xE7\x7E\x15\x89\x20\xB7", + .ilen = 496, + .result = "\x8D\xFC\x81\x9C\xCB\xAA\x5A\x1C" + "\x7E\x95\xCF\x40\xAB\x4D\x6F\xEA" + "\xD3\xD9\xB0\x9A\xB7\xC7\xE0\x2E" + "\xD1\x39\x34\x92\x8F\xFA\x14\xF1" + "\xD5\xD2\x7B\x59\x1F\x35\x28\xC2" + "\x20\xD9\x42\x06\xC9\x0B\x10\x04" + "\xF8\x79\xCD\x32\x86\x75\x4C\xB6" + "\x7B\x1C\x52\xB1\x91\x64\x22\x4B" + "\x13\xC7\xAE\x98\x0E\xB5\xCF\x6F" + "\x3F\xF4\x43\x96\x73\x0D\xA2\x05" + "\xDB\xFD\x28\x90\x2C\x56\xB9\x37" + "\x5B\x69\x0C\xAD\x84\x67\xFF\x15" + "\x4A\xD4\xA7\xD3\xDD\x99\x47\x3A" + "\xED\x34\x35\x78\x6B\x91\xC9\x32" + "\xE1\xBF\xBC\xB4\x04\x85\x6A\x39" + "\xC0\xBA\x51\xD0\x0F\x4E\xD1\xE2" + "\x1C\xFD\x0E\x05\x07\xF4\x10\xED" + "\xA2\x17\xFF\xF5\x64\xC6\x1A\x22" + "\xAD\x78\xE7\xD7\x11\xE9\x99\xB9" + "\xAA\xEC\x6F\xF8\x3B\xBF\xCE\x77" + "\x93\xE8\xAD\x1D\x50\x6C\xAE\xBC" + "\xBA\x5C\x80\xD1\x91\x65\x51\x1B" + "\xE8\x0A\xCD\x99\x96\x71\x3D\xB6" + "\x78\x75\x37\x55\xC1\xF5\x90\x40" + "\x34\xF4\x7E\xC8\xCC\x3A\x5F\x6E" + "\x36\xA1\xA1\xC2\x3A\x72\x42\x8E" + "\x0E\x37\x88\xE8\xCE\x83\xCB\xAD" + "\xE0\x69\x77\x50\xC7\x0C\x99\xCA" + "\x19\x5B\x30\x25\x9A\xEF\x9B\x0C" + "\xEF\x8F\x74\x4C\xCF\x49\x4E\xB9" + "\xC5\xAE\x9E\x2E\x78\x9A\xB9\x48" + "\xD5\x81\xE4\x37\x1D\xBF\x27\xD9" + "\xC5\xD6\x65\x43\x45\x8C\xBB\xB6" + "\x55\xF4\x06\xBB\x49\x53\x8B\x1B" + "\x07\xA9\x96\x69\x5B\xCB\x0F\xBC" + "\x93\x85\x90\x0F\x0A\x68\x40\x2A" + "\x95\xED\x2D\x88\xBF\x71\xD0\xBB" + "\xEC\xB0\x77\x6C\x79\xFC\x3C\x05" + "\x49\x3F\xB8\x24\xEF\x8E\x09\xA2" + "\x1D\xEF\x92\x02\x96\xD4\x7F\xC8" + "\x03\xB2\xCA\xDB\x17\x5C\x52\xCF" + "\xDD\x70\x37\x63\xAA\xA5\x83\x20" + "\x52\x02\xF6\xB9\xE7\x6E\x0A\xB6" + "\x79\x03\xA0\xDA\xA3\x79\x21\xBD" + "\xE3\x37\x3A\xC0\xF7\x2C\x32\xBE" + "\x8B\xE8\xA6\x00\xC7\x32\xD5\x06" + "\xBB\xE3\xAB\x06\x21\x82\xB8\x32" + "\x31\x34\x2A\xA7\x1F\x64\x99\xBF" + "\xFA\xDA\x3D\x75\xF7\x48\xD5\x48" + "\x4B\x52\x7E\xF6\x7C\xAB\x67\x59" + "\xC5\xDC\xA8\xC6\x63\x85\x4A\xDF" + "\xF0\x40\x5F\xCF\xE3\x58\x52\x67" + "\x7A\x24\x32\xC5\xEC\x9E\xA9\x6F" + "\x58\x56\xDD\x94\x1F\x71\x8D\xF4" + "\x6E\xFF\x2C\xA7\xA5\xD8\xBA\xAF" + "\x1D\x8B\xA2\x46\xB5\xC4\x9F\x57" + "\x8D\xD8\xB3\x3C\x02\x0D\xBB\x84" + "\xC7\xBD\xB4\x9A\x6E\xBB\xB1\x37" + "\x95\x79\xC4\xA7\xEA\x1D\xDC\x33" + "\x5D\x0B\x3F\x03\x8F\x30\xF9\xAE" + "\x4F\xFE\x24\x9C\x9A\x02\xE5\x57" + "\xF5\xBC\x25\xD6\x02\x56\x57\x1C", + .rlen = 496, }, }; @@ -12179,6 +12314,677 @@ static struct cipher_testvec cast5_dec_tv_template[] = { .ilen = 8, .result = "\x01\x23\x45\x67\x89\xab\xcd\xef", .rlen = 8, + }, { /* Generated from TF test vectors */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A", + .klen = 16, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F", + .input = "\x8D\xFC\x81\x9C\xCB\xAA\x5A\x1C" + "\x7E\x95\xCF\x40\xAB\x4D\x6F\xEA" + "\xD3\xD9\xB0\x9A\xB7\xC7\xE0\x2E" + "\xD1\x39\x34\x92\x8F\xFA\x14\xF1" + "\xD5\xD2\x7B\x59\x1F\x35\x28\xC2" + "\x20\xD9\x42\x06\xC9\x0B\x10\x04" + "\xF8\x79\xCD\x32\x86\x75\x4C\xB6" + "\x7B\x1C\x52\xB1\x91\x64\x22\x4B" + "\x13\xC7\xAE\x98\x0E\xB5\xCF\x6F" + "\x3F\xF4\x43\x96\x73\x0D\xA2\x05" + "\xDB\xFD\x28\x90\x2C\x56\xB9\x37" + "\x5B\x69\x0C\xAD\x84\x67\xFF\x15" + "\x4A\xD4\xA7\xD3\xDD\x99\x47\x3A" + "\xED\x34\x35\x78\x6B\x91\xC9\x32" + "\xE1\xBF\xBC\xB4\x04\x85\x6A\x39" + "\xC0\xBA\x51\xD0\x0F\x4E\xD1\xE2" + "\x1C\xFD\x0E\x05\x07\xF4\x10\xED" + "\xA2\x17\xFF\xF5\x64\xC6\x1A\x22" + "\xAD\x78\xE7\xD7\x11\xE9\x99\xB9" + "\xAA\xEC\x6F\xF8\x3B\xBF\xCE\x77" + "\x93\xE8\xAD\x1D\x50\x6C\xAE\xBC" + "\xBA\x5C\x80\xD1\x91\x65\x51\x1B" + "\xE8\x0A\xCD\x99\x96\x71\x3D\xB6" + "\x78\x75\x37\x55\xC1\xF5\x90\x40" + "\x34\xF4\x7E\xC8\xCC\x3A\x5F\x6E" + "\x36\xA1\xA1\xC2\x3A\x72\x42\x8E" + "\x0E\x37\x88\xE8\xCE\x83\xCB\xAD" + "\xE0\x69\x77\x50\xC7\x0C\x99\xCA" + "\x19\x5B\x30\x25\x9A\xEF\x9B\x0C" + "\xEF\x8F\x74\x4C\xCF\x49\x4E\xB9" + "\xC5\xAE\x9E\x2E\x78\x9A\xB9\x48" + "\xD5\x81\xE4\x37\x1D\xBF\x27\xD9" + "\xC5\xD6\x65\x43\x45\x8C\xBB\xB6" + "\x55\xF4\x06\xBB\x49\x53\x8B\x1B" + "\x07\xA9\x96\x69\x5B\xCB\x0F\xBC" + "\x93\x85\x90\x0F\x0A\x68\x40\x2A" + "\x95\xED\x2D\x88\xBF\x71\xD0\xBB" + "\xEC\xB0\x77\x6C\x79\xFC\x3C\x05" + "\x49\x3F\xB8\x24\xEF\x8E\x09\xA2" + "\x1D\xEF\x92\x02\x96\xD4\x7F\xC8" + "\x03\xB2\xCA\xDB\x17\x5C\x52\xCF" + "\xDD\x70\x37\x63\xAA\xA5\x83\x20" + "\x52\x02\xF6\xB9\xE7\x6E\x0A\xB6" + "\x79\x03\xA0\xDA\xA3\x79\x21\xBD" + "\xE3\x37\x3A\xC0\xF7\x2C\x32\xBE" + "\x8B\xE8\xA6\x00\xC7\x32\xD5\x06" + "\xBB\xE3\xAB\x06\x21\x82\xB8\x32" + "\x31\x34\x2A\xA7\x1F\x64\x99\xBF" + "\xFA\xDA\x3D\x75\xF7\x48\xD5\x48" + "\x4B\x52\x7E\xF6\x7C\xAB\x67\x59" + "\xC5\xDC\xA8\xC6\x63\x85\x4A\xDF" + "\xF0\x40\x5F\xCF\xE3\x58\x52\x67" + "\x7A\x24\x32\xC5\xEC\x9E\xA9\x6F" + "\x58\x56\xDD\x94\x1F\x71\x8D\xF4" + "\x6E\xFF\x2C\xA7\xA5\xD8\xBA\xAF" + "\x1D\x8B\xA2\x46\xB5\xC4\x9F\x57" + "\x8D\xD8\xB3\x3C\x02\x0D\xBB\x84" + "\xC7\xBD\xB4\x9A\x6E\xBB\xB1\x37" + "\x95\x79\xC4\xA7\xEA\x1D\xDC\x33" + "\x5D\x0B\x3F\x03\x8F\x30\xF9\xAE" + "\x4F\xFE\x24\x9C\x9A\x02\xE5\x57" + "\xF5\xBC\x25\xD6\x02\x56\x57\x1C", + .ilen = 496, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A" + "\xF1\x65\xFC\x93\x07\x9E\x35\xCC" + "\x40\xD7\x6E\x05\x79\x10\xA7\x1B" + "\xB2\x49\xE0\x54\xEB\x82\x19\x8D" + "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF" + "\x96\x0A\xA1\x38\xCF\x43\xDA\x71" + "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3" + "\x57\xEE\x85\x1C\x90\x27\xBE\x32" + "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4" + "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16" + "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88" + "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA" + "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49" + "\xE0\x77\x0E\x82\x19\xB0\x24\xBB" + "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D" + "\xC4\x38\xCF\x66\xFD\x71\x08\x9F" + "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11" + "\x85\x1C\xB3\x27\xBE\x55\xEC\x60" + "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2" + "\x69\x00\x74\x0B\xA2\x16\xAD\x44" + "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6" + "\x2A\xC1\x58\xEF\x63\xFA\x91\x05" + "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77" + "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9" + "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B" + "\xF2\x66\xFD\x94\x08\x9F\x36\xCD" + "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C" + "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E" + "\x25\xBC\x30\xC7\x5E\xF5\x69\x00" + "\x97\x0B\xA2\x39\xD0\x44\xDB\x72" + "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4" + "\x58\xEF\x86\x1D\x91\x28\xBF\x33" + "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5" + "\x3C\xD3\x47\xDE\x75\x0C\x80\x17" + "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89" + "\x20\x94\x2B\xC2\x36\xCD\x64\xFB" + "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A" + "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC" + "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E" + "\xC5\x39\xD0\x67\xFE\x72\x09\xA0" + "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12" + "\x86\x1D\xB4\x28\xBF\x56\xED\x61" + "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3" + "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" + "\xDC\x50\xE7\x7E\x15\x89\x20\xB7", + .rlen = 496, + }, +}; + +static struct cipher_testvec cast5_cbc_enc_tv_template[] = { + { /* Generated from TF test vectors */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A", + .klen = 16, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F", + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A" + "\xF1\x65\xFC\x93\x07\x9E\x35\xCC" + "\x40\xD7\x6E\x05\x79\x10\xA7\x1B" + "\xB2\x49\xE0\x54\xEB\x82\x19\x8D" + "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF" + "\x96\x0A\xA1\x38\xCF\x43\xDA\x71" + "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3" + "\x57\xEE\x85\x1C\x90\x27\xBE\x32" + "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4" + "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16" + "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88" + "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA" + "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49" + "\xE0\x77\x0E\x82\x19\xB0\x24\xBB" + "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D" + "\xC4\x38\xCF\x66\xFD\x71\x08\x9F" + "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11" + "\x85\x1C\xB3\x27\xBE\x55\xEC\x60" + "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2" + "\x69\x00\x74\x0B\xA2\x16\xAD\x44" + "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6" + "\x2A\xC1\x58\xEF\x63\xFA\x91\x05" + "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77" + "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9" + "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B" + "\xF2\x66\xFD\x94\x08\x9F\x36\xCD" + "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C" + "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E" + "\x25\xBC\x30\xC7\x5E\xF5\x69\x00" + "\x97\x0B\xA2\x39\xD0\x44\xDB\x72" + "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4" + "\x58\xEF\x86\x1D\x91\x28\xBF\x33" + "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5" + "\x3C\xD3\x47\xDE\x75\x0C\x80\x17" + "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89" + "\x20\x94\x2B\xC2\x36\xCD\x64\xFB" + "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A" + "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC" + "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E" + "\xC5\x39\xD0\x67\xFE\x72\x09\xA0" + "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12" + "\x86\x1D\xB4\x28\xBF\x56\xED\x61" + "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3" + "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" + "\xDC\x50\xE7\x7E\x15\x89\x20\xB7", + .ilen = 496, + .result = "\x05\x28\xCE\x61\x90\x80\xE1\x78" + "\xB9\x2A\x97\x7C\xB0\x83\xD8\x1A" + "\xDE\x58\x7F\xD7\xFD\x72\xB8\xFB" + "\xDA\xF0\x6E\x77\x14\x47\x82\xBA" + "\x29\x0E\x25\x6E\xB4\x39\xD9\x7F" + "\x05\xA7\xA7\x3A\xC1\x5D\x9E\x39" + "\xA7\xFB\x0D\x05\x00\xF3\x58\x67" + "\x60\xEC\x73\x77\x46\x85\x9B\x6A" + "\x08\x3E\xBE\x59\xFB\xE4\x96\x34" + "\xB4\x05\x49\x1A\x97\x43\xAD\xA0" + "\xA9\x1E\x6E\x74\xF1\x94\xEC\xA8" + "\xB5\x8A\x20\xEA\x89\x6B\x19\xAA" + "\xA7\xF1\x33\x67\x90\x23\x0D\xEE" + "\x81\xD5\x78\x4F\xD3\x63\xEA\x46" + "\xB5\xB2\x6E\xBB\xCA\x76\x06\x10" + "\x96\x2A\x0A\xBA\xF9\x41\x5A\x1D" + "\x36\x7C\x56\x14\x54\x83\xFA\xA1" + "\x27\xDD\xBA\x8A\x90\x29\xD6\xA6" + "\xFA\x48\x3E\x1E\x23\x6E\x98\xA8" + "\xA7\xD9\x67\x92\x5C\x13\xB4\x71" + "\xA8\xAA\x89\x4A\xA4\xB3\x49\x7C" + "\x7D\x7F\xCE\x6F\x29\x2E\x7E\x37" + "\xC8\x52\x60\xD9\xE7\xCA\x60\x98" + "\xED\xCD\xE8\x60\x83\xAD\x34\x4D" + "\x96\x4A\x99\x2B\xB7\x14\x75\x66" + "\x6C\x2C\x1A\xBA\x4B\xBB\x49\x56" + "\xE1\x86\xA2\x0E\xD0\xF0\x07\xD3" + "\x18\x38\x09\x9C\x0E\x8B\x86\x07" + "\x90\x12\x37\x49\x27\x98\x69\x18" + "\xB0\xCC\xFB\xD3\xBD\x04\xA0\x85" + "\x4B\x22\x97\x07\xB6\x97\xE9\x95" + "\x0F\x88\x36\xA9\x44\x00\xC6\xE9" + "\x27\x53\x5C\x5B\x1F\xD3\xE2\xEE" + "\xD0\xCD\x63\x30\xA9\xC0\xDD\x49" + "\xFE\x16\xA4\x07\x0D\xE2\x5D\x97" + "\xDE\x89\xBA\x2E\xF3\xA9\x5E\xBE" + "\x03\x55\x0E\x02\x41\x4A\x45\x06" + "\xBE\xEA\x32\xF2\xDC\x91\x5C\x20" + "\x94\x02\x30\xD2\xFC\x29\xFA\x8E" + "\x34\xA0\x31\xB8\x34\xBA\xAE\x54" + "\xB5\x88\x1F\xDC\x43\xDC\x22\x9F" + "\xDC\xCE\xD3\xFA\xA4\xA8\xBC\x8A" + "\xC7\x5A\x43\x21\xA5\xB1\xDB\xC3" + "\x84\x3B\xB4\x9B\xB5\xA7\xF1\x0A" + "\xB6\x37\x21\x19\x55\xC2\xBD\x99" + "\x49\x24\xBB\x7C\xB3\x8E\xEF\xD2" + "\x3A\xCF\xA0\x31\x28\x0E\x25\xA2" + "\x11\xB4\x18\x17\x1A\x65\x92\x56" + "\xE8\xE0\x52\x9C\x61\x18\x2A\xB1" + "\x1A\x01\x22\x45\x17\x62\x52\x6C" + "\x91\x44\xCF\x98\xC7\xC0\x79\x26" + "\x32\x66\x6F\x23\x7F\x94\x36\x88" + "\x3C\xC9\xD0\xB7\x45\x30\x31\x86" + "\x3D\xC6\xA3\x98\x62\x84\x1A\x8B" + "\x16\x88\xC7\xA3\xE9\x4F\xE0\x86" + "\xA4\x93\xA8\x34\x5A\xCA\xDF\xCA" + "\x46\x38\xD2\xF4\xE0\x2D\x1E\xC9" + "\x7C\xEF\x53\xB7\x60\x72\x41\xBF" + "\x29\x00\x87\x02\xAF\x44\x4C\xB7" + "\x8C\xF5\x3F\x19\xF4\x80\x45\xA7" + "\x15\x5F\xDB\xE9\xB1\x83\xD2\xE6" + "\x1D\x18\x66\x44\x5B\x8F\x14\xEB", + .rlen = 496, + }, +}; + +static struct cipher_testvec cast5_cbc_dec_tv_template[] = { + { /* Generated from TF test vectors */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A", + .klen = 16, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F", + .input = "\x05\x28\xCE\x61\x90\x80\xE1\x78" + "\xB9\x2A\x97\x7C\xB0\x83\xD8\x1A" + "\xDE\x58\x7F\xD7\xFD\x72\xB8\xFB" + "\xDA\xF0\x6E\x77\x14\x47\x82\xBA" + "\x29\x0E\x25\x6E\xB4\x39\xD9\x7F" + "\x05\xA7\xA7\x3A\xC1\x5D\x9E\x39" + "\xA7\xFB\x0D\x05\x00\xF3\x58\x67" + "\x60\xEC\x73\x77\x46\x85\x9B\x6A" + "\x08\x3E\xBE\x59\xFB\xE4\x96\x34" + "\xB4\x05\x49\x1A\x97\x43\xAD\xA0" + "\xA9\x1E\x6E\x74\xF1\x94\xEC\xA8" + "\xB5\x8A\x20\xEA\x89\x6B\x19\xAA" + "\xA7\xF1\x33\x67\x90\x23\x0D\xEE" + "\x81\xD5\x78\x4F\xD3\x63\xEA\x46" + "\xB5\xB2\x6E\xBB\xCA\x76\x06\x10" + "\x96\x2A\x0A\xBA\xF9\x41\x5A\x1D" + "\x36\x7C\x56\x14\x54\x83\xFA\xA1" + "\x27\xDD\xBA\x8A\x90\x29\xD6\xA6" + "\xFA\x48\x3E\x1E\x23\x6E\x98\xA8" + "\xA7\xD9\x67\x92\x5C\x13\xB4\x71" + "\xA8\xAA\x89\x4A\xA4\xB3\x49\x7C" + "\x7D\x7F\xCE\x6F\x29\x2E\x7E\x37" + "\xC8\x52\x60\xD9\xE7\xCA\x60\x98" + "\xED\xCD\xE8\x60\x83\xAD\x34\x4D" + "\x96\x4A\x99\x2B\xB7\x14\x75\x66" + "\x6C\x2C\x1A\xBA\x4B\xBB\x49\x56" + "\xE1\x86\xA2\x0E\xD0\xF0\x07\xD3" + "\x18\x38\x09\x9C\x0E\x8B\x86\x07" + "\x90\x12\x37\x49\x27\x98\x69\x18" + "\xB0\xCC\xFB\xD3\xBD\x04\xA0\x85" + "\x4B\x22\x97\x07\xB6\x97\xE9\x95" + "\x0F\x88\x36\xA9\x44\x00\xC6\xE9" + "\x27\x53\x5C\x5B\x1F\xD3\xE2\xEE" + "\xD0\xCD\x63\x30\xA9\xC0\xDD\x49" + "\xFE\x16\xA4\x07\x0D\xE2\x5D\x97" + "\xDE\x89\xBA\x2E\xF3\xA9\x5E\xBE" + "\x03\x55\x0E\x02\x41\x4A\x45\x06" + "\xBE\xEA\x32\xF2\xDC\x91\x5C\x20" + "\x94\x02\x30\xD2\xFC\x29\xFA\x8E" + "\x34\xA0\x31\xB8\x34\xBA\xAE\x54" + "\xB5\x88\x1F\xDC\x43\xDC\x22\x9F" + "\xDC\xCE\xD3\xFA\xA4\xA8\xBC\x8A" + "\xC7\x5A\x43\x21\xA5\xB1\xDB\xC3" + "\x84\x3B\xB4\x9B\xB5\xA7\xF1\x0A" + "\xB6\x37\x21\x19\x55\xC2\xBD\x99" + "\x49\x24\xBB\x7C\xB3\x8E\xEF\xD2" + "\x3A\xCF\xA0\x31\x28\x0E\x25\xA2" + "\x11\xB4\x18\x17\x1A\x65\x92\x56" + "\xE8\xE0\x52\x9C\x61\x18\x2A\xB1" + "\x1A\x01\x22\x45\x17\x62\x52\x6C" + "\x91\x44\xCF\x98\xC7\xC0\x79\x26" + "\x32\x66\x6F\x23\x7F\x94\x36\x88" + "\x3C\xC9\xD0\xB7\x45\x30\x31\x86" + "\x3D\xC6\xA3\x98\x62\x84\x1A\x8B" + "\x16\x88\xC7\xA3\xE9\x4F\xE0\x86" + "\xA4\x93\xA8\x34\x5A\xCA\xDF\xCA" + "\x46\x38\xD2\xF4\xE0\x2D\x1E\xC9" + "\x7C\xEF\x53\xB7\x60\x72\x41\xBF" + "\x29\x00\x87\x02\xAF\x44\x4C\xB7" + "\x8C\xF5\x3F\x19\xF4\x80\x45\xA7" + "\x15\x5F\xDB\xE9\xB1\x83\xD2\xE6" + "\x1D\x18\x66\x44\x5B\x8F\x14\xEB", + .ilen = 496, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A" + "\xF1\x65\xFC\x93\x07\x9E\x35\xCC" + "\x40\xD7\x6E\x05\x79\x10\xA7\x1B" + "\xB2\x49\xE0\x54\xEB\x82\x19\x8D" + "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF" + "\x96\x0A\xA1\x38\xCF\x43\xDA\x71" + "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3" + "\x57\xEE\x85\x1C\x90\x27\xBE\x32" + "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4" + "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16" + "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88" + "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA" + "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49" + "\xE0\x77\x0E\x82\x19\xB0\x24\xBB" + "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D" + "\xC4\x38\xCF\x66\xFD\x71\x08\x9F" + "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11" + "\x85\x1C\xB3\x27\xBE\x55\xEC\x60" + "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2" + "\x69\x00\x74\x0B\xA2\x16\xAD\x44" + "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6" + "\x2A\xC1\x58\xEF\x63\xFA\x91\x05" + "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77" + "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9" + "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B" + "\xF2\x66\xFD\x94\x08\x9F\x36\xCD" + "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C" + "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E" + "\x25\xBC\x30\xC7\x5E\xF5\x69\x00" + "\x97\x0B\xA2\x39\xD0\x44\xDB\x72" + "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4" + "\x58\xEF\x86\x1D\x91\x28\xBF\x33" + "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5" + "\x3C\xD3\x47\xDE\x75\x0C\x80\x17" + "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89" + "\x20\x94\x2B\xC2\x36\xCD\x64\xFB" + "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A" + "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC" + "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E" + "\xC5\x39\xD0\x67\xFE\x72\x09\xA0" + "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12" + "\x86\x1D\xB4\x28\xBF\x56\xED\x61" + "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3" + "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" + "\xDC\x50\xE7\x7E\x15\x89\x20\xB7", + .rlen = 496, + }, +}; + +static struct cipher_testvec cast5_ctr_enc_tv_template[] = { + { /* Generated from TF test vectors */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A", + .klen = 16, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F", + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A" + "\xF1\x65\xFC\x93\x07\x9E\x35\xCC" + "\x40\xD7\x6E\x05\x79\x10\xA7\x1B" + "\xB2\x49\xE0\x54\xEB\x82\x19\x8D" + "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF" + "\x96\x0A\xA1\x38\xCF\x43\xDA\x71" + "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3" + "\x57\xEE\x85\x1C\x90\x27\xBE\x32" + "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4" + "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16" + "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88" + "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA" + "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49" + "\xE0\x77\x0E\x82\x19\xB0\x24\xBB" + "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D" + "\xC4\x38\xCF\x66\xFD\x71\x08\x9F" + "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11" + "\x85\x1C\xB3\x27\xBE\x55\xEC\x60" + "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2" + "\x69\x00\x74\x0B\xA2\x16\xAD\x44" + "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6" + "\x2A\xC1\x58\xEF\x63\xFA\x91\x05" + "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77" + "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9" + "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B" + "\xF2\x66\xFD\x94\x08\x9F\x36\xCD" + "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C" + "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E" + "\x25\xBC\x30\xC7\x5E\xF5\x69\x00" + "\x97\x0B\xA2\x39\xD0\x44\xDB\x72" + "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4" + "\x58\xEF\x86\x1D\x91\x28\xBF\x33" + "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5" + "\x3C\xD3\x47\xDE\x75\x0C\x80\x17" + "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89" + "\x20\x94\x2B\xC2\x36\xCD\x64\xFB" + "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A" + "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC" + "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E" + "\xC5\x39\xD0\x67\xFE\x72\x09\xA0" + "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12" + "\x86\x1D\xB4\x28\xBF\x56\xED\x61" + "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3" + "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" + "\xDC\x50\xE7\x7E\x15\x89\x20\xB7", + .ilen = 496, + .result = "\xFF\xC4\x2E\x82\x3D\xF8\xA8\x39" + "\x7C\x52\xC4\xD3\xBB\x62\xC6\xA8" + "\x0C\x63\xA5\x55\xE3\xF8\x1C\x7F" + "\xDC\x59\xF9\xA0\x52\xAD\x83\xDF" + "\xD5\x3B\x53\x4A\xAA\x1F\x49\x44" + "\xE8\x20\xCC\xF8\x97\xE6\xE0\x3C" + "\x5A\xD2\x83\xEC\xEE\x25\x3F\xCF" + "\x0D\xC2\x79\x80\x99\x6E\xFF\x7B" + "\x64\xB0\x7B\x86\x29\x1D\x9F\x17" + "\x10\xA5\xA5\xEB\x16\x55\x9E\xE3" + "\x88\x18\x52\x56\x48\x58\xD1\x6B" + "\xE8\x74\x6E\x48\xB0\x2E\x69\x63" + "\x32\xAA\xAC\x26\x55\x45\x94\xDE" + "\x30\x26\x26\xE6\x08\x82\x2F\x5F" + "\xA7\x15\x94\x07\x75\x2D\xC6\x3A" + "\x1B\xA0\x39\xFB\xBA\xB9\x06\x56" + "\xF6\x9F\xF1\x2F\x9B\xF3\x89\x8B" + "\x08\xC8\x9D\x5E\x6B\x95\x09\xC7" + "\x98\xB7\x62\xA4\x1D\x25\xFA\xC5" + "\x62\xC8\x5D\x6B\xB4\x85\x88\x7F" + "\x3B\x29\xF9\xB4\x32\x62\x69\xBF" + "\x32\xB8\xEB\xFD\x0E\x26\xAA\xA3" + "\x44\x67\x90\x20\xAC\x41\xDF\x43" + "\xC6\xC7\x19\x9F\x2C\x28\x74\xEB" + "\x3E\x7F\x7A\x80\x5B\xE4\x08\x60" + "\xC7\xC9\x71\x34\x44\xCE\x05\xFD" + "\xA8\x91\xA8\x44\x5E\xD3\x89\x2C" + "\xAE\x59\x0F\x07\x88\x79\x53\x26" + "\xAF\xAC\xCB\x1D\x6F\x08\x25\x62" + "\xD0\x82\x65\x66\xE4\x2A\x29\x1C" + "\x9C\x64\x5F\x49\x9D\xF8\x62\xF9" + "\xED\xC4\x13\x52\x75\xDC\xE4\xF9" + "\x68\x0F\x8A\xCD\xA6\x8D\x75\xAA" + "\x49\xA1\x86\x86\x37\x5C\x6B\x3D" + "\x56\xE5\x6F\xBE\x27\xC0\x10\xF8" + "\x3C\x4D\x17\x35\x14\xDC\x1C\xA0" + "\x6E\xAE\xD1\x10\xDD\x83\x06\xC2" + "\x23\xD3\xC7\x27\x15\x04\x2C\x27" + "\xDD\x1F\x2E\x97\x09\x9C\x33\x7D" + "\xAC\x50\x1B\x2E\xC9\x52\x0C\x14" + "\x4B\x78\xC4\xDE\x07\x6A\x12\x02" + "\x6E\xD7\x4B\x91\xB9\x88\x4D\x02" + "\xC3\xB5\x04\xBC\xE0\x67\xCA\x18" + "\x22\xA1\xAE\x9A\x21\xEF\xB2\x06" + "\x35\xCD\xEC\x37\x70\x2D\xFC\x1E" + "\xA8\x31\xE7\xFC\xE5\x8E\x88\x66" + "\x16\xB5\xC8\x45\x21\x37\xBD\x24" + "\xA9\xD5\x36\x12\x9F\x6E\x67\x80" + "\x87\x54\xD5\xAF\x97\xE1\x15\xA7" + "\x11\xF0\x63\x7B\xE1\x44\x14\x1C" + "\x06\x32\x05\x8C\x6C\xDB\x9B\x36" + "\x6A\x6B\xAD\x3A\x27\x55\x20\x4C" + "\x76\x36\x43\xE8\x16\x60\xB5\xF3" + "\xDF\x5A\xC6\xA5\x69\x78\x59\x51" + "\x54\x68\x65\x06\x84\xDE\x3D\xAE" + "\x38\x91\xBD\xCC\xA2\x8A\xEC\xE6" + "\x9E\x83\xAE\x1E\x8E\x34\x5D\xDE" + "\x91\xCE\x8F\xED\x40\xF7\xC8\x8B" + "\x9A\x13\x4C\xAD\x89\x97\x9E\xD1" + "\x91\x01\xD7\x21\x23\x28\x1E\xCC" + "\x8C\x98\xDB\xDE\xFC\x72\x94\xAA" + "\xC0\x0D\x96\xAA\x23\xF8\xFE\x13", + .rlen = 496, + }, +}; + +static struct cipher_testvec cast5_ctr_dec_tv_template[] = { + { /* Generated from TF test vectors */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A", + .klen = 16, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F", + .input = "\xFF\xC4\x2E\x82\x3D\xF8\xA8\x39" + "\x7C\x52\xC4\xD3\xBB\x62\xC6\xA8" + "\x0C\x63\xA5\x55\xE3\xF8\x1C\x7F" + "\xDC\x59\xF9\xA0\x52\xAD\x83\xDF" + "\xD5\x3B\x53\x4A\xAA\x1F\x49\x44" + "\xE8\x20\xCC\xF8\x97\xE6\xE0\x3C" + "\x5A\xD2\x83\xEC\xEE\x25\x3F\xCF" + "\x0D\xC2\x79\x80\x99\x6E\xFF\x7B" + "\x64\xB0\x7B\x86\x29\x1D\x9F\x17" + "\x10\xA5\xA5\xEB\x16\x55\x9E\xE3" + "\x88\x18\x52\x56\x48\x58\xD1\x6B" + "\xE8\x74\x6E\x48\xB0\x2E\x69\x63" + "\x32\xAA\xAC\x26\x55\x45\x94\xDE" + "\x30\x26\x26\xE6\x08\x82\x2F\x5F" + "\xA7\x15\x94\x07\x75\x2D\xC6\x3A" + "\x1B\xA0\x39\xFB\xBA\xB9\x06\x56" + "\xF6\x9F\xF1\x2F\x9B\xF3\x89\x8B" + "\x08\xC8\x9D\x5E\x6B\x95\x09\xC7" + "\x98\xB7\x62\xA4\x1D\x25\xFA\xC5" + "\x62\xC8\x5D\x6B\xB4\x85\x88\x7F" + "\x3B\x29\xF9\xB4\x32\x62\x69\xBF" + "\x32\xB8\xEB\xFD\x0E\x26\xAA\xA3" + "\x44\x67\x90\x20\xAC\x41\xDF\x43" + "\xC6\xC7\x19\x9F\x2C\x28\x74\xEB" + "\x3E\x7F\x7A\x80\x5B\xE4\x08\x60" + "\xC7\xC9\x71\x34\x44\xCE\x05\xFD" + "\xA8\x91\xA8\x44\x5E\xD3\x89\x2C" + "\xAE\x59\x0F\x07\x88\x79\x53\x26" + "\xAF\xAC\xCB\x1D\x6F\x08\x25\x62" + "\xD0\x82\x65\x66\xE4\x2A\x29\x1C" + "\x9C\x64\x5F\x49\x9D\xF8\x62\xF9" + "\xED\xC4\x13\x52\x75\xDC\xE4\xF9" + "\x68\x0F\x8A\xCD\xA6\x8D\x75\xAA" + "\x49\xA1\x86\x86\x37\x5C\x6B\x3D" + "\x56\xE5\x6F\xBE\x27\xC0\x10\xF8" + "\x3C\x4D\x17\x35\x14\xDC\x1C\xA0" + "\x6E\xAE\xD1\x10\xDD\x83\x06\xC2" + "\x23\xD3\xC7\x27\x15\x04\x2C\x27" + "\xDD\x1F\x2E\x97\x09\x9C\x33\x7D" + "\xAC\x50\x1B\x2E\xC9\x52\x0C\x14" + "\x4B\x78\xC4\xDE\x07\x6A\x12\x02" + "\x6E\xD7\x4B\x91\xB9\x88\x4D\x02" + "\xC3\xB5\x04\xBC\xE0\x67\xCA\x18" + "\x22\xA1\xAE\x9A\x21\xEF\xB2\x06" + "\x35\xCD\xEC\x37\x70\x2D\xFC\x1E" + "\xA8\x31\xE7\xFC\xE5\x8E\x88\x66" + "\x16\xB5\xC8\x45\x21\x37\xBD\x24" + "\xA9\xD5\x36\x12\x9F\x6E\x67\x80" + "\x87\x54\xD5\xAF\x97\xE1\x15\xA7" + "\x11\xF0\x63\x7B\xE1\x44\x14\x1C" + "\x06\x32\x05\x8C\x6C\xDB\x9B\x36" + "\x6A\x6B\xAD\x3A\x27\x55\x20\x4C" + "\x76\x36\x43\xE8\x16\x60\xB5\xF3" + "\xDF\x5A\xC6\xA5\x69\x78\x59\x51" + "\x54\x68\x65\x06\x84\xDE\x3D\xAE" + "\x38\x91\xBD\xCC\xA2\x8A\xEC\xE6" + "\x9E\x83\xAE\x1E\x8E\x34\x5D\xDE" + "\x91\xCE\x8F\xED\x40\xF7\xC8\x8B" + "\x9A\x13\x4C\xAD\x89\x97\x9E\xD1" + "\x91\x01\xD7\x21\x23\x28\x1E\xCC" + "\x8C\x98\xDB\xDE\xFC\x72\x94\xAA" + "\xC0\x0D\x96\xAA\x23\xF8\xFE\x13", + .ilen = 496, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A" + "\xF1\x65\xFC\x93\x07\x9E\x35\xCC" + "\x40\xD7\x6E\x05\x79\x10\xA7\x1B" + "\xB2\x49\xE0\x54\xEB\x82\x19\x8D" + "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF" + "\x96\x0A\xA1\x38\xCF\x43\xDA\x71" + "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3" + "\x57\xEE\x85\x1C\x90\x27\xBE\x32" + "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4" + "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16" + "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88" + "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA" + "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49" + "\xE0\x77\x0E\x82\x19\xB0\x24\xBB" + "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D" + "\xC4\x38\xCF\x66\xFD\x71\x08\x9F" + "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11" + "\x85\x1C\xB3\x27\xBE\x55\xEC\x60" + "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2" + "\x69\x00\x74\x0B\xA2\x16\xAD\x44" + "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6" + "\x2A\xC1\x58\xEF\x63\xFA\x91\x05" + "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77" + "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9" + "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B" + "\xF2\x66\xFD\x94\x08\x9F\x36\xCD" + "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C" + "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E" + "\x25\xBC\x30\xC7\x5E\xF5\x69\x00" + "\x97\x0B\xA2\x39\xD0\x44\xDB\x72" + "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4" + "\x58\xEF\x86\x1D\x91\x28\xBF\x33" + "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5" + "\x3C\xD3\x47\xDE\x75\x0C\x80\x17" + "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89" + "\x20\x94\x2B\xC2\x36\xCD\x64\xFB" + "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A" + "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC" + "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E" + "\xC5\x39\xD0\x67\xFE\x72\x09\xA0" + "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12" + "\x86\x1D\xB4\x28\xBF\x56\xED\x61" + "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3" + "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" + "\xDC\x50\xE7\x7E\x15\x89\x20\xB7", + .rlen = 496, }, }; -- cgit v1.2.3 From 4d6d6a2c850f89bc9283d02519cb536baba72032 Mon Sep 17 00:00:00 2001 From: Johannes Goetzfried Date: Wed, 11 Jul 2012 19:37:37 +0200 Subject: crypto: cast5 - add x86_64/avx assembler implementation This patch adds a x86_64/avx assembler implementation of the Cast5 block cipher. The implementation processes sixteen blocks in parallel (four 4 block chunk AVX operations). The table-lookups are done in general-purpose registers. For small blocksizes the functions from the generic module are called. A good performance increase is provided for blocksizes greater or equal to 128B. Patch has been tested with tcrypt and automated filesystem tests. Tcrypt benchmark results: Intel Core i5-2500 CPU (fam:6, model:42, step:7) cast5-avx-x86_64 vs. cast5-generic 64bit key: size ecb-enc ecb-dec cbc-enc cbc-dec ctr-enc ctr-dec 16B 0.99x 0.99x 1.00x 1.00x 1.02x 1.01x 64B 1.00x 1.00x 0.98x 1.00x 1.01x 1.02x 256B 2.03x 2.01x 0.95x 2.11x 2.12x 2.13x 1024B 2.30x 2.24x 0.95x 2.29x 2.35x 2.35x 8192B 2.31x 2.27x 0.95x 2.31x 2.39x 2.39x 128bit key: size ecb-enc ecb-dec cbc-enc cbc-dec ctr-enc ctr-dec 16B 0.99x 0.99x 1.00x 1.00x 1.01x 1.01x 64B 1.00x 1.00x 0.98x 1.01x 1.02x 1.01x 256B 2.17x 2.13x 0.96x 2.19x 2.19x 2.19x 1024B 2.29x 2.32x 0.95x 2.34x 2.37x 2.38x 8192B 2.35x 2.32x 0.95x 2.35x 2.39x 2.39x Signed-off-by: Johannes Goetzfried Signed-off-by: Herbert Xu --- arch/x86/crypto/Makefile | 2 + arch/x86/crypto/cast5-avx-x86_64-asm_64.S | 322 ++++++++++++++++++ arch/x86/crypto/cast5_avx_glue.c | 530 ++++++++++++++++++++++++++++++ crypto/Kconfig | 14 + crypto/testmgr.c | 60 ++++ 5 files changed, 928 insertions(+) create mode 100644 arch/x86/crypto/cast5-avx-x86_64-asm_64.S create mode 100644 arch/x86/crypto/cast5_avx_glue.c (limited to 'crypto') diff --git a/arch/x86/crypto/Makefile b/arch/x86/crypto/Makefile index e908e5de82d3..565e82b00142 100644 --- a/arch/x86/crypto/Makefile +++ b/arch/x86/crypto/Makefile @@ -12,6 +12,7 @@ obj-$(CONFIG_CRYPTO_SERPENT_SSE2_586) += serpent-sse2-i586.o obj-$(CONFIG_CRYPTO_AES_X86_64) += aes-x86_64.o obj-$(CONFIG_CRYPTO_CAMELLIA_X86_64) += camellia-x86_64.o +obj-$(CONFIG_CRYPTO_CAST5_AVX_X86_64) += cast5-avx-x86_64.o obj-$(CONFIG_CRYPTO_BLOWFISH_X86_64) += blowfish-x86_64.o obj-$(CONFIG_CRYPTO_TWOFISH_X86_64) += twofish-x86_64.o obj-$(CONFIG_CRYPTO_TWOFISH_X86_64_3WAY) += twofish-x86_64-3way.o @@ -32,6 +33,7 @@ serpent-sse2-i586-y := serpent-sse2-i586-asm_32.o serpent_sse2_glue.o aes-x86_64-y := aes-x86_64-asm_64.o aes_glue.o camellia-x86_64-y := camellia-x86_64-asm_64.o camellia_glue.o +cast5-avx-x86_64-y := cast5-avx-x86_64-asm_64.o cast5_avx_glue.o blowfish-x86_64-y := blowfish-x86_64-asm_64.o blowfish_glue.o twofish-x86_64-y := twofish-x86_64-asm_64.o twofish_glue.o twofish-x86_64-3way-y := twofish-x86_64-asm_64-3way.o twofish_glue_3way.o diff --git a/arch/x86/crypto/cast5-avx-x86_64-asm_64.S b/arch/x86/crypto/cast5-avx-x86_64-asm_64.S new file mode 100644 index 000000000000..94693c877e3b --- /dev/null +++ b/arch/x86/crypto/cast5-avx-x86_64-asm_64.S @@ -0,0 +1,322 @@ +/* + * Cast5 Cipher 16-way parallel algorithm (AVX/x86_64) + * + * Copyright (C) 2012 Johannes Goetzfried + * + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 + * USA + * + */ + +.file "cast5-avx-x86_64-asm_64.S" +.text + +.extern cast5_s1 +.extern cast5_s2 +.extern cast5_s3 +.extern cast5_s4 + +/* structure of crypto context */ +#define km 0 +#define kr (16*4) +#define rr ((16*4)+16) + +/* s-boxes */ +#define s1 cast5_s1 +#define s2 cast5_s2 +#define s3 cast5_s3 +#define s4 cast5_s4 + +/********************************************************************** + 16-way AVX cast5 + **********************************************************************/ +#define CTX %rdi + +#define RL1 %xmm0 +#define RR1 %xmm1 +#define RL2 %xmm2 +#define RR2 %xmm3 +#define RL3 %xmm4 +#define RR3 %xmm5 +#define RL4 %xmm6 +#define RR4 %xmm7 + +#define RX %xmm8 + +#define RKM %xmm9 +#define RKRF %xmm10 +#define RKRR %xmm11 + +#define RTMP %xmm12 +#define RMASK %xmm13 +#define R32 %xmm14 + +#define RID1 %rax +#define RID1b %al +#define RID2 %rbx +#define RID2b %bl + +#define RGI1 %rdx +#define RGI1bl %dl +#define RGI1bh %dh +#define RGI2 %rcx +#define RGI2bl %cl +#define RGI2bh %ch + +#define RFS1 %r8 +#define RFS1d %r8d +#define RFS2 %r9 +#define RFS2d %r9d +#define RFS3 %r10 +#define RFS3d %r10d + + +#define lookup_32bit(src, dst, op1, op2, op3) \ + movb src ## bl, RID1b; \ + movb src ## bh, RID2b; \ + movl s1(, RID1, 4), dst ## d; \ + op1 s2(, RID2, 4), dst ## d; \ + shrq $16, src; \ + movb src ## bl, RID1b; \ + movb src ## bh, RID2b; \ + op2 s3(, RID1, 4), dst ## d; \ + op3 s4(, RID2, 4), dst ## d; + +#define F(a, x, op0, op1, op2, op3) \ + op0 a, RKM, x; \ + vpslld RKRF, x, RTMP; \ + vpsrld RKRR, x, x; \ + vpor RTMP, x, x; \ + \ + vpshufb RMASK, x, x; \ + vmovq x, RGI1; \ + vpsrldq $8, x, x; \ + vmovq x, RGI2; \ + \ + lookup_32bit(RGI1, RFS1, op1, op2, op3); \ + shrq $16, RGI1; \ + lookup_32bit(RGI1, RFS2, op1, op2, op3); \ + shlq $32, RFS2; \ + orq RFS1, RFS2; \ + \ + lookup_32bit(RGI2, RFS1, op1, op2, op3); \ + shrq $16, RGI2; \ + lookup_32bit(RGI2, RFS3, op1, op2, op3); \ + shlq $32, RFS3; \ + orq RFS1, RFS3; \ + \ + vmovq RFS2, x; \ + vpinsrq $1, RFS3, x, x; + +#define F1(b, x) F(b, x, vpaddd, xorl, subl, addl) +#define F2(b, x) F(b, x, vpxor, subl, addl, xorl) +#define F3(b, x) F(b, x, vpsubd, addl, xorl, subl) + +#define subround(a, b, x, n, f) \ + F ## f(b, x); \ + vpxor a, x, a; + +#define round(l, r, n, f) \ + vbroadcastss (km+(4*n))(CTX), RKM; \ + vpinsrb $0, (kr+n)(CTX), RKRF, RKRF; \ + vpsubq RKRF, R32, RKRR; \ + subround(l ## 1, r ## 1, RX, n, f); \ + subround(l ## 2, r ## 2, RX, n, f); \ + subround(l ## 3, r ## 3, RX, n, f); \ + subround(l ## 4, r ## 4, RX, n, f); + + +#define transpose_2x4(x0, x1, t0, t1) \ + vpunpckldq x1, x0, t0; \ + vpunpckhdq x1, x0, t1; \ + \ + vpunpcklqdq t1, t0, x0; \ + vpunpckhqdq t1, t0, x1; + +#define inpack_blocks(in, x0, x1, t0, t1) \ + vmovdqu (0*4*4)(in), x0; \ + vmovdqu (1*4*4)(in), x1; \ + vpshufb RMASK, x0, x0; \ + vpshufb RMASK, x1, x1; \ + \ + transpose_2x4(x0, x1, t0, t1) + +#define outunpack_blocks(out, x0, x1, t0, t1) \ + transpose_2x4(x0, x1, t0, t1) \ + \ + vpshufb RMASK, x0, x0; \ + vpshufb RMASK, x1, x1; \ + vmovdqu x0, (0*4*4)(out); \ + vmovdqu x1, (1*4*4)(out); + +#define outunpack_xor_blocks(out, x0, x1, t0, t1) \ + transpose_2x4(x0, x1, t0, t1) \ + \ + vpshufb RMASK, x0, x0; \ + vpshufb RMASK, x1, x1; \ + vpxor (0*4*4)(out), x0, x0; \ + vmovdqu x0, (0*4*4)(out); \ + vpxor (1*4*4)(out), x1, x1; \ + vmovdqu x1, (1*4*4)(out); + +.align 16 +.Lbswap_mask: + .byte 3, 2, 1, 0, 7, 6, 5, 4, 11, 10, 9, 8, 15, 14, 13, 12 +.L32_mask: + .byte 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ,0, 0, 0, 0, 0 + +.align 16 +.global __cast5_enc_blk_16way +.type __cast5_enc_blk_16way,@function; + +__cast5_enc_blk_16way: + /* input: + * %rdi: ctx, CTX + * %rsi: dst + * %rdx: src + * %rcx: bool, if true: xor output + */ + + pushq %rbx; + pushq %rcx; + + vmovdqu .Lbswap_mask, RMASK; + vmovdqu .L32_mask, R32; + vpxor RKRF, RKRF, RKRF; + + inpack_blocks(%rdx, RL1, RR1, RTMP, RX); + leaq (2*4*4)(%rdx), %rax; + inpack_blocks(%rax, RL2, RR2, RTMP, RX); + leaq (2*4*4)(%rax), %rax; + inpack_blocks(%rax, RL3, RR3, RTMP, RX); + leaq (2*4*4)(%rax), %rax; + inpack_blocks(%rax, RL4, RR4, RTMP, RX); + + xorq RID1, RID1; + xorq RID2, RID2; + + round(RL, RR, 0, 1); + round(RR, RL, 1, 2); + round(RL, RR, 2, 3); + round(RR, RL, 3, 1); + round(RL, RR, 4, 2); + round(RR, RL, 5, 3); + round(RL, RR, 6, 1); + round(RR, RL, 7, 2); + round(RL, RR, 8, 3); + round(RR, RL, 9, 1); + round(RL, RR, 10, 2); + round(RR, RL, 11, 3); + + movb rr(CTX), %al; + testb %al, %al; + jnz __skip_enc; + + round(RL, RR, 12, 1); + round(RR, RL, 13, 2); + round(RL, RR, 14, 3); + round(RR, RL, 15, 1); + +__skip_enc: + popq %rcx; + popq %rbx; + + testb %cl, %cl; + jnz __enc_xor16; + + outunpack_blocks(%rsi, RR1, RL1, RTMP, RX); + leaq (2*4*4)(%rsi), %rax; + outunpack_blocks(%rax, RR2, RL2, RTMP, RX); + leaq (2*4*4)(%rax), %rax; + outunpack_blocks(%rax, RR3, RL3, RTMP, RX); + leaq (2*4*4)(%rax), %rax; + outunpack_blocks(%rax, RR4, RL4, RTMP, RX); + + ret; + +__enc_xor16: + outunpack_xor_blocks(%rsi, RR1, RL1, RTMP, RX); + leaq (2*4*4)(%rsi), %rax; + outunpack_xor_blocks(%rax, RR2, RL2, RTMP, RX); + leaq (2*4*4)(%rax), %rax; + outunpack_xor_blocks(%rax, RR3, RL3, RTMP, RX); + leaq (2*4*4)(%rax), %rax; + outunpack_xor_blocks(%rax, RR4, RL4, RTMP, RX); + + ret; + +.align 16 +.global cast5_dec_blk_16way +.type cast5_dec_blk_16way,@function; + +cast5_dec_blk_16way: + /* input: + * %rdi: ctx, CTX + * %rsi: dst + * %rdx: src + */ + + pushq %rbx; + + vmovdqu .Lbswap_mask, RMASK; + vmovdqu .L32_mask, R32; + vpxor RKRF, RKRF, RKRF; + + inpack_blocks(%rdx, RL1, RR1, RTMP, RX); + leaq (2*4*4)(%rdx), %rax; + inpack_blocks(%rax, RL2, RR2, RTMP, RX); + leaq (2*4*4)(%rax), %rax; + inpack_blocks(%rax, RL3, RR3, RTMP, RX); + leaq (2*4*4)(%rax), %rax; + inpack_blocks(%rax, RL4, RR4, RTMP, RX); + + xorq RID1, RID1; + xorq RID2, RID2; + + movb rr(CTX), %al; + testb %al, %al; + jnz __skip_dec; + + round(RL, RR, 15, 1); + round(RR, RL, 14, 3); + round(RL, RR, 13, 2); + round(RR, RL, 12, 1); + +__skip_dec: + round(RL, RR, 11, 3); + round(RR, RL, 10, 2); + round(RL, RR, 9, 1); + round(RR, RL, 8, 3); + round(RL, RR, 7, 2); + round(RR, RL, 6, 1); + round(RL, RR, 5, 3); + round(RR, RL, 4, 2); + round(RL, RR, 3, 1); + round(RR, RL, 2, 3); + round(RL, RR, 1, 2); + round(RR, RL, 0, 1); + + popq %rbx; + + outunpack_blocks(%rsi, RR1, RL1, RTMP, RX); + leaq (2*4*4)(%rsi), %rax; + outunpack_blocks(%rax, RR2, RL2, RTMP, RX); + leaq (2*4*4)(%rax), %rax; + outunpack_blocks(%rax, RR3, RL3, RTMP, RX); + leaq (2*4*4)(%rax), %rax; + outunpack_blocks(%rax, RR4, RL4, RTMP, RX); + + ret; diff --git a/arch/x86/crypto/cast5_avx_glue.c b/arch/x86/crypto/cast5_avx_glue.c new file mode 100644 index 000000000000..445aab06387b --- /dev/null +++ b/arch/x86/crypto/cast5_avx_glue.c @@ -0,0 +1,530 @@ +/* + * Glue Code for the AVX assembler implemention of the Cast5 Cipher + * + * Copyright (C) 2012 Johannes Goetzfried + * + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 + * USA + * + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#define CAST5_PARALLEL_BLOCKS 16 + +asmlinkage void __cast5_enc_blk_16way(struct cast5_ctx *ctx, u8 *dst, + const u8 *src, bool xor); +asmlinkage void cast5_dec_blk_16way(struct cast5_ctx *ctx, u8 *dst, + const u8 *src); + +static inline void cast5_enc_blk_xway(struct cast5_ctx *ctx, u8 *dst, + const u8 *src) +{ + __cast5_enc_blk_16way(ctx, dst, src, false); +} + +static inline void cast5_enc_blk_xway_xor(struct cast5_ctx *ctx, u8 *dst, + const u8 *src) +{ + __cast5_enc_blk_16way(ctx, dst, src, true); +} + +static inline void cast5_dec_blk_xway(struct cast5_ctx *ctx, u8 *dst, + const u8 *src) +{ + cast5_dec_blk_16way(ctx, dst, src); +} + + +static inline bool cast5_fpu_begin(bool fpu_enabled, unsigned int nbytes) +{ + return glue_fpu_begin(CAST5_BLOCK_SIZE, CAST5_PARALLEL_BLOCKS, + NULL, fpu_enabled, nbytes); +} + +static inline void cast5_fpu_end(bool fpu_enabled) +{ + return glue_fpu_end(fpu_enabled); +} + +static int ecb_crypt(struct blkcipher_desc *desc, struct blkcipher_walk *walk, + bool enc) +{ + bool fpu_enabled = false; + struct cast5_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); + const unsigned int bsize = CAST5_BLOCK_SIZE; + unsigned int nbytes; + int err; + + err = blkcipher_walk_virt(desc, walk); + desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; + + while ((nbytes = walk->nbytes)) { + u8 *wsrc = walk->src.virt.addr; + u8 *wdst = walk->dst.virt.addr; + + fpu_enabled = cast5_fpu_begin(fpu_enabled, nbytes); + + /* Process multi-block batch */ + if (nbytes >= bsize * CAST5_PARALLEL_BLOCKS) { + do { + if (enc) + cast5_enc_blk_xway(ctx, wdst, wsrc); + else + cast5_dec_blk_xway(ctx, wdst, wsrc); + + wsrc += bsize * CAST5_PARALLEL_BLOCKS; + wdst += bsize * CAST5_PARALLEL_BLOCKS; + nbytes -= bsize * CAST5_PARALLEL_BLOCKS; + } while (nbytes >= bsize * CAST5_PARALLEL_BLOCKS); + + if (nbytes < bsize) + goto done; + } + + /* Handle leftovers */ + do { + if (enc) + __cast5_encrypt(ctx, wdst, wsrc); + else + __cast5_decrypt(ctx, wdst, wsrc); + + wsrc += bsize; + wdst += bsize; + nbytes -= bsize; + } while (nbytes >= bsize); + +done: + err = blkcipher_walk_done(desc, walk, nbytes); + } + + cast5_fpu_end(fpu_enabled); + return err; +} + +static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, + struct scatterlist *src, unsigned int nbytes) +{ + struct blkcipher_walk walk; + + blkcipher_walk_init(&walk, dst, src, nbytes); + return ecb_crypt(desc, &walk, true); +} + +static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, + struct scatterlist *src, unsigned int nbytes) +{ + struct blkcipher_walk walk; + + blkcipher_walk_init(&walk, dst, src, nbytes); + return ecb_crypt(desc, &walk, false); +} + +static unsigned int __cbc_encrypt(struct blkcipher_desc *desc, + struct blkcipher_walk *walk) +{ + struct cast5_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); + const unsigned int bsize = CAST5_BLOCK_SIZE; + unsigned int nbytes = walk->nbytes; + u64 *src = (u64 *)walk->src.virt.addr; + u64 *dst = (u64 *)walk->dst.virt.addr; + u64 *iv = (u64 *)walk->iv; + + do { + *dst = *src ^ *iv; + __cast5_encrypt(ctx, (u8 *)dst, (u8 *)dst); + iv = dst; + + src += 1; + dst += 1; + nbytes -= bsize; + } while (nbytes >= bsize); + + *(u64 *)walk->iv ^= *iv; + return nbytes; +} + +static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, + struct scatterlist *src, unsigned int nbytes) +{ + struct blkcipher_walk walk; + int err; + + blkcipher_walk_init(&walk, dst, src, nbytes); + err = blkcipher_walk_virt(desc, &walk); + + while ((nbytes = walk.nbytes)) { + nbytes = __cbc_encrypt(desc, &walk); + err = blkcipher_walk_done(desc, &walk, nbytes); + } + + return err; +} + +static unsigned int __cbc_decrypt(struct blkcipher_desc *desc, + struct blkcipher_walk *walk) +{ + struct cast5_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); + const unsigned int bsize = CAST5_BLOCK_SIZE; + unsigned int nbytes = walk->nbytes; + u64 *src = (u64 *)walk->src.virt.addr; + u64 *dst = (u64 *)walk->dst.virt.addr; + u64 ivs[CAST5_PARALLEL_BLOCKS - 1]; + u64 last_iv; + int i; + + /* Start of the last block. */ + src += nbytes / bsize - 1; + dst += nbytes / bsize - 1; + + last_iv = *src; + + /* Process multi-block batch */ + if (nbytes >= bsize * CAST5_PARALLEL_BLOCKS) { + do { + nbytes -= bsize * (CAST5_PARALLEL_BLOCKS - 1); + src -= CAST5_PARALLEL_BLOCKS - 1; + dst -= CAST5_PARALLEL_BLOCKS - 1; + + for (i = 0; i < CAST5_PARALLEL_BLOCKS - 1; i++) + ivs[i] = src[i]; + + cast5_dec_blk_xway(ctx, (u8 *)dst, (u8 *)src); + + for (i = 0; i < CAST5_PARALLEL_BLOCKS - 1; i++) + *(dst + (i + 1)) ^= *(ivs + i); + + nbytes -= bsize; + if (nbytes < bsize) + goto done; + + *dst ^= *(src - 1); + src -= 1; + dst -= 1; + } while (nbytes >= bsize * CAST5_PARALLEL_BLOCKS); + + if (nbytes < bsize) + goto done; + } + + /* Handle leftovers */ + for (;;) { + __cast5_decrypt(ctx, (u8 *)dst, (u8 *)src); + + nbytes -= bsize; + if (nbytes < bsize) + break; + + *dst ^= *(src - 1); + src -= 1; + dst -= 1; + } + +done: + *dst ^= *(u64 *)walk->iv; + *(u64 *)walk->iv = last_iv; + + return nbytes; +} + +static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, + struct scatterlist *src, unsigned int nbytes) +{ + bool fpu_enabled = false; + struct blkcipher_walk walk; + int err; + + blkcipher_walk_init(&walk, dst, src, nbytes); + err = blkcipher_walk_virt(desc, &walk); + desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; + + while ((nbytes = walk.nbytes)) { + fpu_enabled = cast5_fpu_begin(fpu_enabled, nbytes); + nbytes = __cbc_decrypt(desc, &walk); + err = blkcipher_walk_done(desc, &walk, nbytes); + } + + cast5_fpu_end(fpu_enabled); + return err; +} + +static void ctr_crypt_final(struct blkcipher_desc *desc, + struct blkcipher_walk *walk) +{ + struct cast5_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); + u8 *ctrblk = walk->iv; + u8 keystream[CAST5_BLOCK_SIZE]; + u8 *src = walk->src.virt.addr; + u8 *dst = walk->dst.virt.addr; + unsigned int nbytes = walk->nbytes; + + __cast5_encrypt(ctx, keystream, ctrblk); + crypto_xor(keystream, src, nbytes); + memcpy(dst, keystream, nbytes); + + crypto_inc(ctrblk, CAST5_BLOCK_SIZE); +} + +static unsigned int __ctr_crypt(struct blkcipher_desc *desc, + struct blkcipher_walk *walk) +{ + struct cast5_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); + const unsigned int bsize = CAST5_BLOCK_SIZE; + unsigned int nbytes = walk->nbytes; + u64 *src = (u64 *)walk->src.virt.addr; + u64 *dst = (u64 *)walk->dst.virt.addr; + u64 ctrblk = be64_to_cpu(*(__be64 *)walk->iv); + __be64 ctrblocks[CAST5_PARALLEL_BLOCKS]; + int i; + + /* Process multi-block batch */ + if (nbytes >= bsize * CAST5_PARALLEL_BLOCKS) { + do { + /* create ctrblks for parallel encrypt */ + for (i = 0; i < CAST5_PARALLEL_BLOCKS; i++) { + if (dst != src) + dst[i] = src[i]; + + ctrblocks[i] = cpu_to_be64(ctrblk++); + } + + cast5_enc_blk_xway_xor(ctx, (u8 *)dst, + (u8 *)ctrblocks); + + src += CAST5_PARALLEL_BLOCKS; + dst += CAST5_PARALLEL_BLOCKS; + nbytes -= bsize * CAST5_PARALLEL_BLOCKS; + } while (nbytes >= bsize * CAST5_PARALLEL_BLOCKS); + + if (nbytes < bsize) + goto done; + } + + /* Handle leftovers */ + do { + if (dst != src) + *dst = *src; + + ctrblocks[0] = cpu_to_be64(ctrblk++); + + __cast5_encrypt(ctx, (u8 *)ctrblocks, (u8 *)ctrblocks); + *dst ^= ctrblocks[0]; + + src += 1; + dst += 1; + nbytes -= bsize; + } while (nbytes >= bsize); + +done: + *(__be64 *)walk->iv = cpu_to_be64(ctrblk); + return nbytes; +} + +static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst, + struct scatterlist *src, unsigned int nbytes) +{ + bool fpu_enabled = false; + struct blkcipher_walk walk; + int err; + + blkcipher_walk_init(&walk, dst, src, nbytes); + err = blkcipher_walk_virt_block(desc, &walk, CAST5_BLOCK_SIZE); + desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; + + while ((nbytes = walk.nbytes) >= CAST5_BLOCK_SIZE) { + fpu_enabled = cast5_fpu_begin(fpu_enabled, nbytes); + nbytes = __ctr_crypt(desc, &walk); + err = blkcipher_walk_done(desc, &walk, nbytes); + } + + cast5_fpu_end(fpu_enabled); + + if (walk.nbytes) { + ctr_crypt_final(desc, &walk); + err = blkcipher_walk_done(desc, &walk, 0); + } + + return err; +} + + +static struct crypto_alg cast5_algs[6] = { { + .cra_name = "__ecb-cast5-avx", + .cra_driver_name = "__driver-ecb-cast5-avx", + .cra_priority = 0, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_blocksize = CAST5_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct cast5_ctx), + .cra_alignmask = 0, + .cra_type = &crypto_blkcipher_type, + .cra_module = THIS_MODULE, + .cra_u = { + .blkcipher = { + .min_keysize = CAST5_MIN_KEY_SIZE, + .max_keysize = CAST5_MAX_KEY_SIZE, + .setkey = cast5_setkey, + .encrypt = ecb_encrypt, + .decrypt = ecb_decrypt, + }, + }, +}, { + .cra_name = "__cbc-cast5-avx", + .cra_driver_name = "__driver-cbc-cast5-avx", + .cra_priority = 0, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_blocksize = CAST5_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct cast5_ctx), + .cra_alignmask = 0, + .cra_type = &crypto_blkcipher_type, + .cra_module = THIS_MODULE, + .cra_u = { + .blkcipher = { + .min_keysize = CAST5_MIN_KEY_SIZE, + .max_keysize = CAST5_MAX_KEY_SIZE, + .setkey = cast5_setkey, + .encrypt = cbc_encrypt, + .decrypt = cbc_decrypt, + }, + }, +}, { + .cra_name = "__ctr-cast5-avx", + .cra_driver_name = "__driver-ctr-cast5-avx", + .cra_priority = 0, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_blocksize = 1, + .cra_ctxsize = sizeof(struct cast5_ctx), + .cra_alignmask = 0, + .cra_type = &crypto_blkcipher_type, + .cra_module = THIS_MODULE, + .cra_u = { + .blkcipher = { + .min_keysize = CAST5_MIN_KEY_SIZE, + .max_keysize = CAST5_MAX_KEY_SIZE, + .ivsize = CAST5_BLOCK_SIZE, + .setkey = cast5_setkey, + .encrypt = ctr_crypt, + .decrypt = ctr_crypt, + }, + }, +}, { + .cra_name = "ecb(cast5)", + .cra_driver_name = "ecb-cast5-avx", + .cra_priority = 200, + .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, + .cra_blocksize = CAST5_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct async_helper_ctx), + .cra_alignmask = 0, + .cra_type = &crypto_ablkcipher_type, + .cra_module = THIS_MODULE, + .cra_init = ablk_init, + .cra_exit = ablk_exit, + .cra_u = { + .ablkcipher = { + .min_keysize = CAST5_MIN_KEY_SIZE, + .max_keysize = CAST5_MAX_KEY_SIZE, + .setkey = ablk_set_key, + .encrypt = ablk_encrypt, + .decrypt = ablk_decrypt, + }, + }, +}, { + .cra_name = "cbc(cast5)", + .cra_driver_name = "cbc-cast5-avx", + .cra_priority = 200, + .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, + .cra_blocksize = CAST5_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct async_helper_ctx), + .cra_alignmask = 0, + .cra_type = &crypto_ablkcipher_type, + .cra_module = THIS_MODULE, + .cra_init = ablk_init, + .cra_exit = ablk_exit, + .cra_u = { + .ablkcipher = { + .min_keysize = CAST5_MIN_KEY_SIZE, + .max_keysize = CAST5_MAX_KEY_SIZE, + .ivsize = CAST5_BLOCK_SIZE, + .setkey = ablk_set_key, + .encrypt = __ablk_encrypt, + .decrypt = ablk_decrypt, + }, + }, +}, { + .cra_name = "ctr(cast5)", + .cra_driver_name = "ctr-cast5-avx", + .cra_priority = 200, + .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, + .cra_blocksize = 1, + .cra_ctxsize = sizeof(struct async_helper_ctx), + .cra_alignmask = 0, + .cra_type = &crypto_ablkcipher_type, + .cra_module = THIS_MODULE, + .cra_init = ablk_init, + .cra_exit = ablk_exit, + .cra_u = { + .ablkcipher = { + .min_keysize = CAST5_MIN_KEY_SIZE, + .max_keysize = CAST5_MAX_KEY_SIZE, + .ivsize = CAST5_BLOCK_SIZE, + .setkey = ablk_set_key, + .encrypt = ablk_encrypt, + .decrypt = ablk_encrypt, + .geniv = "chainiv", + }, + }, +} }; + +static int __init cast5_init(void) +{ + u64 xcr0; + + if (!cpu_has_avx || !cpu_has_osxsave) { + pr_info("AVX instructions are not detected.\n"); + return -ENODEV; + } + + xcr0 = xgetbv(XCR_XFEATURE_ENABLED_MASK); + if ((xcr0 & (XSTATE_SSE | XSTATE_YMM)) != (XSTATE_SSE | XSTATE_YMM)) { + pr_info("AVX detected but unusable.\n"); + return -ENODEV; + } + + return crypto_register_algs(cast5_algs, ARRAY_SIZE(cast5_algs)); +} + +static void __exit cast5_exit(void) +{ + crypto_unregister_algs(cast5_algs, ARRAY_SIZE(cast5_algs)); +} + +module_init(cast5_init); +module_exit(cast5_exit); + +MODULE_DESCRIPTION("Cast5 Cipher Algorithm, AVX optimized"); +MODULE_LICENSE("GPL"); +MODULE_ALIAS("cast5"); diff --git a/crypto/Kconfig b/crypto/Kconfig index a3238051b03e..cda97fcaa822 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -692,6 +692,20 @@ config CRYPTO_CAST5 The CAST5 encryption algorithm (synonymous with CAST-128) is described in RFC2144. +config CRYPTO_CAST5_AVX_X86_64 + tristate "CAST5 (CAST-128) cipher algorithm (x86_64/AVX)" + depends on X86 && 64BIT + select CRYPTO_ALGAPI + select CRYPTO_CRYPTD + select CRYPTO_ABLK_HELPER_X86 + select CRYPTO_CAST5 + help + The CAST5 encryption algorithm (synonymous with CAST-128) is + described in RFC2144. + + This module provides the Cast5 cipher algorithm that processes + sixteen blocks parallel using the AVX instruction set. + config CRYPTO_CAST6 tristate "CAST6 (CAST-256) cipher algorithm" select CRYPTO_ALGAPI diff --git a/crypto/testmgr.c b/crypto/testmgr.c index 7a91e540563f..def0f430b667 100644 --- a/crypto/testmgr.c +++ b/crypto/testmgr.c @@ -1534,6 +1534,21 @@ static int alg_test_null(const struct alg_test_desc *desc, /* Please keep this list sorted by algorithm name. */ static const struct alg_test_desc alg_test_descs[] = { { + .alg = "__cbc-cast5-avx", + .test = alg_test_null, + .suite = { + .cipher = { + .enc = { + .vecs = NULL, + .count = 0 + }, + .dec = { + .vecs = NULL, + .count = 0 + } + } + } + }, { .alg = "__cbc-serpent-avx", .test = alg_test_null, .suite = { @@ -1594,6 +1609,21 @@ static const struct alg_test_desc alg_test_descs[] = { } } } + }, { + .alg = "__driver-cbc-cast5-avx", + .test = alg_test_null, + .suite = { + .cipher = { + .enc = { + .vecs = NULL, + .count = 0 + }, + .dec = { + .vecs = NULL, + .count = 0 + } + } + } }, { .alg = "__driver-cbc-serpent-avx", .test = alg_test_null, @@ -1655,6 +1685,21 @@ static const struct alg_test_desc alg_test_descs[] = { } } } + }, { + .alg = "__driver-ecb-cast5-avx", + .test = alg_test_null, + .suite = { + .cipher = { + .enc = { + .vecs = NULL, + .count = 0 + }, + .dec = { + .vecs = NULL, + .count = 0 + } + } + } }, { .alg = "__driver-ecb-serpent-avx", .test = alg_test_null, @@ -1951,6 +1996,21 @@ static const struct alg_test_desc alg_test_descs[] = { } } } + }, { + .alg = "cryptd(__driver-ecb-cast5-avx)", + .test = alg_test_null, + .suite = { + .cipher = { + .enc = { + .vecs = NULL, + .count = 0 + }, + .dec = { + .vecs = NULL, + .count = 0 + } + } + } }, { .alg = "cryptd(__driver-ecb-serpent-avx)", .test = alg_test_null, -- cgit v1.2.3 From 2b49b906729644dd4696b9291b7e2f6cd1266dc0 Mon Sep 17 00:00:00 2001 From: Johannes Goetzfried Date: Wed, 11 Jul 2012 19:38:12 +0200 Subject: crypto: cast6 - prepare generic module for optimized implementations Rename cast6 module to cast6_generic to allow autoloading of optimized implementations. Generic functions and s-boxes are exported to be able to use them within optimized implementations. Signed-off-by: Johannes Goetzfried Signed-off-by: Herbert Xu --- crypto/Makefile | 2 +- crypto/cast6.c | 546 ----------------------------------------------- crypto/cast6_generic.c | 566 +++++++++++++++++++++++++++++++++++++++++++++++++ include/crypto/cast6.h | 23 ++ 4 files changed, 590 insertions(+), 547 deletions(-) delete mode 100644 crypto/cast6.c create mode 100644 crypto/cast6_generic.c create mode 100644 include/crypto/cast6.h (limited to 'crypto') diff --git a/crypto/Makefile b/crypto/Makefile index a56821e5d573..396966d2d849 100644 --- a/crypto/Makefile +++ b/crypto/Makefile @@ -69,7 +69,7 @@ obj-$(CONFIG_CRYPTO_SERPENT) += serpent_generic.o obj-$(CONFIG_CRYPTO_AES) += aes_generic.o obj-$(CONFIG_CRYPTO_CAMELLIA) += camellia_generic.o obj-$(CONFIG_CRYPTO_CAST5) += cast5_generic.o -obj-$(CONFIG_CRYPTO_CAST6) += cast6.o +obj-$(CONFIG_CRYPTO_CAST6) += cast6_generic.o obj-$(CONFIG_CRYPTO_ARC4) += arc4.o obj-$(CONFIG_CRYPTO_TEA) += tea.o obj-$(CONFIG_CRYPTO_KHAZAD) += khazad.o diff --git a/crypto/cast6.c b/crypto/cast6.c deleted file mode 100644 index 04264f574601..000000000000 --- a/crypto/cast6.c +++ /dev/null @@ -1,546 +0,0 @@ -/* Kernel cryptographic api. - * cast6.c - Cast6 cipher algorithm [rfc2612]. - * - * CAST-256 (*cast6*) is a DES like Substitution-Permutation Network (SPN) - * cryptosystem built upon the CAST-128 (*cast5*) [rfc2144] encryption - * algorithm. - * - * Copyright (C) 2003 Kartikey Mahendra Bhatt . - * - * This program is free software; you can redistribute it and/or modify it - * under the terms of GNU General Public License as published by the Free - * Software Foundation; either version 2 of the License, or (at your option) - * any later version. - * - * You should have received a copy of the GNU General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA - */ - - -#include -#include -#include -#include -#include -#include -#include - -#define CAST6_BLOCK_SIZE 16 -#define CAST6_MIN_KEY_SIZE 16 -#define CAST6_MAX_KEY_SIZE 32 - -struct cast6_ctx { - u32 Km[12][4]; - u8 Kr[12][4]; -}; - -#define F1(D, r, m) ((I = ((m) + (D))), (I = rol32(I, (r))), \ - (((s1[I >> 24] ^ s2[(I>>16)&0xff]) - s3[(I>>8)&0xff]) + s4[I&0xff])) -#define F2(D, r, m) ((I = ((m) ^ (D))), (I = rol32(I, (r))), \ - (((s1[I >> 24] - s2[(I>>16)&0xff]) + s3[(I>>8)&0xff]) ^ s4[I&0xff])) -#define F3(D, r, m) ((I = ((m) - (D))), (I = rol32(I, (r))), \ - (((s1[I >> 24] + s2[(I>>16)&0xff]) ^ s3[(I>>8)&0xff]) - s4[I&0xff])) - -static const u32 s1[256] = { - 0x30fb40d4, 0x9fa0ff0b, 0x6beccd2f, 0x3f258c7a, 0x1e213f2f, - 0x9c004dd3, 0x6003e540, 0xcf9fc949, - 0xbfd4af27, 0x88bbbdb5, 0xe2034090, 0x98d09675, 0x6e63a0e0, - 0x15c361d2, 0xc2e7661d, 0x22d4ff8e, - 0x28683b6f, 0xc07fd059, 0xff2379c8, 0x775f50e2, 0x43c340d3, - 0xdf2f8656, 0x887ca41a, 0xa2d2bd2d, - 0xa1c9e0d6, 0x346c4819, 0x61b76d87, 0x22540f2f, 0x2abe32e1, - 0xaa54166b, 0x22568e3a, 0xa2d341d0, - 0x66db40c8, 0xa784392f, 0x004dff2f, 0x2db9d2de, 0x97943fac, - 0x4a97c1d8, 0x527644b7, 0xb5f437a7, - 0xb82cbaef, 0xd751d159, 0x6ff7f0ed, 0x5a097a1f, 0x827b68d0, - 0x90ecf52e, 0x22b0c054, 0xbc8e5935, - 0x4b6d2f7f, 0x50bb64a2, 0xd2664910, 0xbee5812d, 0xb7332290, - 0xe93b159f, 0xb48ee411, 0x4bff345d, - 0xfd45c240, 0xad31973f, 0xc4f6d02e, 0x55fc8165, 0xd5b1caad, - 0xa1ac2dae, 0xa2d4b76d, 0xc19b0c50, - 0x882240f2, 0x0c6e4f38, 0xa4e4bfd7, 0x4f5ba272, 0x564c1d2f, - 0xc59c5319, 0xb949e354, 0xb04669fe, - 0xb1b6ab8a, 0xc71358dd, 0x6385c545, 0x110f935d, 0x57538ad5, - 0x6a390493, 0xe63d37e0, 0x2a54f6b3, - 0x3a787d5f, 0x6276a0b5, 0x19a6fcdf, 0x7a42206a, 0x29f9d4d5, - 0xf61b1891, 0xbb72275e, 0xaa508167, - 0x38901091, 0xc6b505eb, 0x84c7cb8c, 0x2ad75a0f, 0x874a1427, - 0xa2d1936b, 0x2ad286af, 0xaa56d291, - 0xd7894360, 0x425c750d, 0x93b39e26, 0x187184c9, 0x6c00b32d, - 0x73e2bb14, 0xa0bebc3c, 0x54623779, - 0x64459eab, 0x3f328b82, 0x7718cf82, 0x59a2cea6, 0x04ee002e, - 0x89fe78e6, 0x3fab0950, 0x325ff6c2, - 0x81383f05, 0x6963c5c8, 0x76cb5ad6, 0xd49974c9, 0xca180dcf, - 0x380782d5, 0xc7fa5cf6, 0x8ac31511, - 0x35e79e13, 0x47da91d0, 0xf40f9086, 0xa7e2419e, 0x31366241, - 0x051ef495, 0xaa573b04, 0x4a805d8d, - 0x548300d0, 0x00322a3c, 0xbf64cddf, 0xba57a68e, 0x75c6372b, - 0x50afd341, 0xa7c13275, 0x915a0bf5, - 0x6b54bfab, 0x2b0b1426, 0xab4cc9d7, 0x449ccd82, 0xf7fbf265, - 0xab85c5f3, 0x1b55db94, 0xaad4e324, - 0xcfa4bd3f, 0x2deaa3e2, 0x9e204d02, 0xc8bd25ac, 0xeadf55b3, - 0xd5bd9e98, 0xe31231b2, 0x2ad5ad6c, - 0x954329de, 0xadbe4528, 0xd8710f69, 0xaa51c90f, 0xaa786bf6, - 0x22513f1e, 0xaa51a79b, 0x2ad344cc, - 0x7b5a41f0, 0xd37cfbad, 0x1b069505, 0x41ece491, 0xb4c332e6, - 0x032268d4, 0xc9600acc, 0xce387e6d, - 0xbf6bb16c, 0x6a70fb78, 0x0d03d9c9, 0xd4df39de, 0xe01063da, - 0x4736f464, 0x5ad328d8, 0xb347cc96, - 0x75bb0fc3, 0x98511bfb, 0x4ffbcc35, 0xb58bcf6a, 0xe11f0abc, - 0xbfc5fe4a, 0xa70aec10, 0xac39570a, - 0x3f04442f, 0x6188b153, 0xe0397a2e, 0x5727cb79, 0x9ceb418f, - 0x1cacd68d, 0x2ad37c96, 0x0175cb9d, - 0xc69dff09, 0xc75b65f0, 0xd9db40d8, 0xec0e7779, 0x4744ead4, - 0xb11c3274, 0xdd24cb9e, 0x7e1c54bd, - 0xf01144f9, 0xd2240eb1, 0x9675b3fd, 0xa3ac3755, 0xd47c27af, - 0x51c85f4d, 0x56907596, 0xa5bb15e6, - 0x580304f0, 0xca042cf1, 0x011a37ea, 0x8dbfaadb, 0x35ba3e4a, - 0x3526ffa0, 0xc37b4d09, 0xbc306ed9, - 0x98a52666, 0x5648f725, 0xff5e569d, 0x0ced63d0, 0x7c63b2cf, - 0x700b45e1, 0xd5ea50f1, 0x85a92872, - 0xaf1fbda7, 0xd4234870, 0xa7870bf3, 0x2d3b4d79, 0x42e04198, - 0x0cd0ede7, 0x26470db8, 0xf881814c, - 0x474d6ad7, 0x7c0c5e5c, 0xd1231959, 0x381b7298, 0xf5d2f4db, - 0xab838653, 0x6e2f1e23, 0x83719c9e, - 0xbd91e046, 0x9a56456e, 0xdc39200c, 0x20c8c571, 0x962bda1c, - 0xe1e696ff, 0xb141ab08, 0x7cca89b9, - 0x1a69e783, 0x02cc4843, 0xa2f7c579, 0x429ef47d, 0x427b169c, - 0x5ac9f049, 0xdd8f0f00, 0x5c8165bf -}; - -static const u32 s2[256] = { - 0x1f201094, 0xef0ba75b, 0x69e3cf7e, 0x393f4380, 0xfe61cf7a, - 0xeec5207a, 0x55889c94, 0x72fc0651, - 0xada7ef79, 0x4e1d7235, 0xd55a63ce, 0xde0436ba, 0x99c430ef, - 0x5f0c0794, 0x18dcdb7d, 0xa1d6eff3, - 0xa0b52f7b, 0x59e83605, 0xee15b094, 0xe9ffd909, 0xdc440086, - 0xef944459, 0xba83ccb3, 0xe0c3cdfb, - 0xd1da4181, 0x3b092ab1, 0xf997f1c1, 0xa5e6cf7b, 0x01420ddb, - 0xe4e7ef5b, 0x25a1ff41, 0xe180f806, - 0x1fc41080, 0x179bee7a, 0xd37ac6a9, 0xfe5830a4, 0x98de8b7f, - 0x77e83f4e, 0x79929269, 0x24fa9f7b, - 0xe113c85b, 0xacc40083, 0xd7503525, 0xf7ea615f, 0x62143154, - 0x0d554b63, 0x5d681121, 0xc866c359, - 0x3d63cf73, 0xcee234c0, 0xd4d87e87, 0x5c672b21, 0x071f6181, - 0x39f7627f, 0x361e3084, 0xe4eb573b, - 0x602f64a4, 0xd63acd9c, 0x1bbc4635, 0x9e81032d, 0x2701f50c, - 0x99847ab4, 0xa0e3df79, 0xba6cf38c, - 0x10843094, 0x2537a95e, 0xf46f6ffe, 0xa1ff3b1f, 0x208cfb6a, - 0x8f458c74, 0xd9e0a227, 0x4ec73a34, - 0xfc884f69, 0x3e4de8df, 0xef0e0088, 0x3559648d, 0x8a45388c, - 0x1d804366, 0x721d9bfd, 0xa58684bb, - 0xe8256333, 0x844e8212, 0x128d8098, 0xfed33fb4, 0xce280ae1, - 0x27e19ba5, 0xd5a6c252, 0xe49754bd, - 0xc5d655dd, 0xeb667064, 0x77840b4d, 0xa1b6a801, 0x84db26a9, - 0xe0b56714, 0x21f043b7, 0xe5d05860, - 0x54f03084, 0x066ff472, 0xa31aa153, 0xdadc4755, 0xb5625dbf, - 0x68561be6, 0x83ca6b94, 0x2d6ed23b, - 0xeccf01db, 0xa6d3d0ba, 0xb6803d5c, 0xaf77a709, 0x33b4a34c, - 0x397bc8d6, 0x5ee22b95, 0x5f0e5304, - 0x81ed6f61, 0x20e74364, 0xb45e1378, 0xde18639b, 0x881ca122, - 0xb96726d1, 0x8049a7e8, 0x22b7da7b, - 0x5e552d25, 0x5272d237, 0x79d2951c, 0xc60d894c, 0x488cb402, - 0x1ba4fe5b, 0xa4b09f6b, 0x1ca815cf, - 0xa20c3005, 0x8871df63, 0xb9de2fcb, 0x0cc6c9e9, 0x0beeff53, - 0xe3214517, 0xb4542835, 0x9f63293c, - 0xee41e729, 0x6e1d2d7c, 0x50045286, 0x1e6685f3, 0xf33401c6, - 0x30a22c95, 0x31a70850, 0x60930f13, - 0x73f98417, 0xa1269859, 0xec645c44, 0x52c877a9, 0xcdff33a6, - 0xa02b1741, 0x7cbad9a2, 0x2180036f, - 0x50d99c08, 0xcb3f4861, 0xc26bd765, 0x64a3f6ab, 0x80342676, - 0x25a75e7b, 0xe4e6d1fc, 0x20c710e6, - 0xcdf0b680, 0x17844d3b, 0x31eef84d, 0x7e0824e4, 0x2ccb49eb, - 0x846a3bae, 0x8ff77888, 0xee5d60f6, - 0x7af75673, 0x2fdd5cdb, 0xa11631c1, 0x30f66f43, 0xb3faec54, - 0x157fd7fa, 0xef8579cc, 0xd152de58, - 0xdb2ffd5e, 0x8f32ce19, 0x306af97a, 0x02f03ef8, 0x99319ad5, - 0xc242fa0f, 0xa7e3ebb0, 0xc68e4906, - 0xb8da230c, 0x80823028, 0xdcdef3c8, 0xd35fb171, 0x088a1bc8, - 0xbec0c560, 0x61a3c9e8, 0xbca8f54d, - 0xc72feffa, 0x22822e99, 0x82c570b4, 0xd8d94e89, 0x8b1c34bc, - 0x301e16e6, 0x273be979, 0xb0ffeaa6, - 0x61d9b8c6, 0x00b24869, 0xb7ffce3f, 0x08dc283b, 0x43daf65a, - 0xf7e19798, 0x7619b72f, 0x8f1c9ba4, - 0xdc8637a0, 0x16a7d3b1, 0x9fc393b7, 0xa7136eeb, 0xc6bcc63e, - 0x1a513742, 0xef6828bc, 0x520365d6, - 0x2d6a77ab, 0x3527ed4b, 0x821fd216, 0x095c6e2e, 0xdb92f2fb, - 0x5eea29cb, 0x145892f5, 0x91584f7f, - 0x5483697b, 0x2667a8cc, 0x85196048, 0x8c4bacea, 0x833860d4, - 0x0d23e0f9, 0x6c387e8a, 0x0ae6d249, - 0xb284600c, 0xd835731d, 0xdcb1c647, 0xac4c56ea, 0x3ebd81b3, - 0x230eabb0, 0x6438bc87, 0xf0b5b1fa, - 0x8f5ea2b3, 0xfc184642, 0x0a036b7a, 0x4fb089bd, 0x649da589, - 0xa345415e, 0x5c038323, 0x3e5d3bb9, - 0x43d79572, 0x7e6dd07c, 0x06dfdf1e, 0x6c6cc4ef, 0x7160a539, - 0x73bfbe70, 0x83877605, 0x4523ecf1 -}; - -static const u32 s3[256] = { - 0x8defc240, 0x25fa5d9f, 0xeb903dbf, 0xe810c907, 0x47607fff, - 0x369fe44b, 0x8c1fc644, 0xaececa90, - 0xbeb1f9bf, 0xeefbcaea, 0xe8cf1950, 0x51df07ae, 0x920e8806, - 0xf0ad0548, 0xe13c8d83, 0x927010d5, - 0x11107d9f, 0x07647db9, 0xb2e3e4d4, 0x3d4f285e, 0xb9afa820, - 0xfade82e0, 0xa067268b, 0x8272792e, - 0x553fb2c0, 0x489ae22b, 0xd4ef9794, 0x125e3fbc, 0x21fffcee, - 0x825b1bfd, 0x9255c5ed, 0x1257a240, - 0x4e1a8302, 0xbae07fff, 0x528246e7, 0x8e57140e, 0x3373f7bf, - 0x8c9f8188, 0xa6fc4ee8, 0xc982b5a5, - 0xa8c01db7, 0x579fc264, 0x67094f31, 0xf2bd3f5f, 0x40fff7c1, - 0x1fb78dfc, 0x8e6bd2c1, 0x437be59b, - 0x99b03dbf, 0xb5dbc64b, 0x638dc0e6, 0x55819d99, 0xa197c81c, - 0x4a012d6e, 0xc5884a28, 0xccc36f71, - 0xb843c213, 0x6c0743f1, 0x8309893c, 0x0feddd5f, 0x2f7fe850, - 0xd7c07f7e, 0x02507fbf, 0x5afb9a04, - 0xa747d2d0, 0x1651192e, 0xaf70bf3e, 0x58c31380, 0x5f98302e, - 0x727cc3c4, 0x0a0fb402, 0x0f7fef82, - 0x8c96fdad, 0x5d2c2aae, 0x8ee99a49, 0x50da88b8, 0x8427f4a0, - 0x1eac5790, 0x796fb449, 0x8252dc15, - 0xefbd7d9b, 0xa672597d, 0xada840d8, 0x45f54504, 0xfa5d7403, - 0xe83ec305, 0x4f91751a, 0x925669c2, - 0x23efe941, 0xa903f12e, 0x60270df2, 0x0276e4b6, 0x94fd6574, - 0x927985b2, 0x8276dbcb, 0x02778176, - 0xf8af918d, 0x4e48f79e, 0x8f616ddf, 0xe29d840e, 0x842f7d83, - 0x340ce5c8, 0x96bbb682, 0x93b4b148, - 0xef303cab, 0x984faf28, 0x779faf9b, 0x92dc560d, 0x224d1e20, - 0x8437aa88, 0x7d29dc96, 0x2756d3dc, - 0x8b907cee, 0xb51fd240, 0xe7c07ce3, 0xe566b4a1, 0xc3e9615e, - 0x3cf8209d, 0x6094d1e3, 0xcd9ca341, - 0x5c76460e, 0x00ea983b, 0xd4d67881, 0xfd47572c, 0xf76cedd9, - 0xbda8229c, 0x127dadaa, 0x438a074e, - 0x1f97c090, 0x081bdb8a, 0x93a07ebe, 0xb938ca15, 0x97b03cff, - 0x3dc2c0f8, 0x8d1ab2ec, 0x64380e51, - 0x68cc7bfb, 0xd90f2788, 0x12490181, 0x5de5ffd4, 0xdd7ef86a, - 0x76a2e214, 0xb9a40368, 0x925d958f, - 0x4b39fffa, 0xba39aee9, 0xa4ffd30b, 0xfaf7933b, 0x6d498623, - 0x193cbcfa, 0x27627545, 0x825cf47a, - 0x61bd8ba0, 0xd11e42d1, 0xcead04f4, 0x127ea392, 0x10428db7, - 0x8272a972, 0x9270c4a8, 0x127de50b, - 0x285ba1c8, 0x3c62f44f, 0x35c0eaa5, 0xe805d231, 0x428929fb, - 0xb4fcdf82, 0x4fb66a53, 0x0e7dc15b, - 0x1f081fab, 0x108618ae, 0xfcfd086d, 0xf9ff2889, 0x694bcc11, - 0x236a5cae, 0x12deca4d, 0x2c3f8cc5, - 0xd2d02dfe, 0xf8ef5896, 0xe4cf52da, 0x95155b67, 0x494a488c, - 0xb9b6a80c, 0x5c8f82bc, 0x89d36b45, - 0x3a609437, 0xec00c9a9, 0x44715253, 0x0a874b49, 0xd773bc40, - 0x7c34671c, 0x02717ef6, 0x4feb5536, - 0xa2d02fff, 0xd2bf60c4, 0xd43f03c0, 0x50b4ef6d, 0x07478cd1, - 0x006e1888, 0xa2e53f55, 0xb9e6d4bc, - 0xa2048016, 0x97573833, 0xd7207d67, 0xde0f8f3d, 0x72f87b33, - 0xabcc4f33, 0x7688c55d, 0x7b00a6b0, - 0x947b0001, 0x570075d2, 0xf9bb88f8, 0x8942019e, 0x4264a5ff, - 0x856302e0, 0x72dbd92b, 0xee971b69, - 0x6ea22fde, 0x5f08ae2b, 0xaf7a616d, 0xe5c98767, 0xcf1febd2, - 0x61efc8c2, 0xf1ac2571, 0xcc8239c2, - 0x67214cb8, 0xb1e583d1, 0xb7dc3e62, 0x7f10bdce, 0xf90a5c38, - 0x0ff0443d, 0x606e6dc6, 0x60543a49, - 0x5727c148, 0x2be98a1d, 0x8ab41738, 0x20e1be24, 0xaf96da0f, - 0x68458425, 0x99833be5, 0x600d457d, - 0x282f9350, 0x8334b362, 0xd91d1120, 0x2b6d8da0, 0x642b1e31, - 0x9c305a00, 0x52bce688, 0x1b03588a, - 0xf7baefd5, 0x4142ed9c, 0xa4315c11, 0x83323ec5, 0xdfef4636, - 0xa133c501, 0xe9d3531c, 0xee353783 -}; - -static const u32 s4[256] = { - 0x9db30420, 0x1fb6e9de, 0xa7be7bef, 0xd273a298, 0x4a4f7bdb, - 0x64ad8c57, 0x85510443, 0xfa020ed1, - 0x7e287aff, 0xe60fb663, 0x095f35a1, 0x79ebf120, 0xfd059d43, - 0x6497b7b1, 0xf3641f63, 0x241e4adf, - 0x28147f5f, 0x4fa2b8cd, 0xc9430040, 0x0cc32220, 0xfdd30b30, - 0xc0a5374f, 0x1d2d00d9, 0x24147b15, - 0xee4d111a, 0x0fca5167, 0x71ff904c, 0x2d195ffe, 0x1a05645f, - 0x0c13fefe, 0x081b08ca, 0x05170121, - 0x80530100, 0xe83e5efe, 0xac9af4f8, 0x7fe72701, 0xd2b8ee5f, - 0x06df4261, 0xbb9e9b8a, 0x7293ea25, - 0xce84ffdf, 0xf5718801, 0x3dd64b04, 0xa26f263b, 0x7ed48400, - 0x547eebe6, 0x446d4ca0, 0x6cf3d6f5, - 0x2649abdf, 0xaea0c7f5, 0x36338cc1, 0x503f7e93, 0xd3772061, - 0x11b638e1, 0x72500e03, 0xf80eb2bb, - 0xabe0502e, 0xec8d77de, 0x57971e81, 0xe14f6746, 0xc9335400, - 0x6920318f, 0x081dbb99, 0xffc304a5, - 0x4d351805, 0x7f3d5ce3, 0xa6c866c6, 0x5d5bcca9, 0xdaec6fea, - 0x9f926f91, 0x9f46222f, 0x3991467d, - 0xa5bf6d8e, 0x1143c44f, 0x43958302, 0xd0214eeb, 0x022083b8, - 0x3fb6180c, 0x18f8931e, 0x281658e6, - 0x26486e3e, 0x8bd78a70, 0x7477e4c1, 0xb506e07c, 0xf32d0a25, - 0x79098b02, 0xe4eabb81, 0x28123b23, - 0x69dead38, 0x1574ca16, 0xdf871b62, 0x211c40b7, 0xa51a9ef9, - 0x0014377b, 0x041e8ac8, 0x09114003, - 0xbd59e4d2, 0xe3d156d5, 0x4fe876d5, 0x2f91a340, 0x557be8de, - 0x00eae4a7, 0x0ce5c2ec, 0x4db4bba6, - 0xe756bdff, 0xdd3369ac, 0xec17b035, 0x06572327, 0x99afc8b0, - 0x56c8c391, 0x6b65811c, 0x5e146119, - 0x6e85cb75, 0xbe07c002, 0xc2325577, 0x893ff4ec, 0x5bbfc92d, - 0xd0ec3b25, 0xb7801ab7, 0x8d6d3b24, - 0x20c763ef, 0xc366a5fc, 0x9c382880, 0x0ace3205, 0xaac9548a, - 0xeca1d7c7, 0x041afa32, 0x1d16625a, - 0x6701902c, 0x9b757a54, 0x31d477f7, 0x9126b031, 0x36cc6fdb, - 0xc70b8b46, 0xd9e66a48, 0x56e55a79, - 0x026a4ceb, 0x52437eff, 0x2f8f76b4, 0x0df980a5, 0x8674cde3, - 0xedda04eb, 0x17a9be04, 0x2c18f4df, - 0xb7747f9d, 0xab2af7b4, 0xefc34d20, 0x2e096b7c, 0x1741a254, - 0xe5b6a035, 0x213d42f6, 0x2c1c7c26, - 0x61c2f50f, 0x6552daf9, 0xd2c231f8, 0x25130f69, 0xd8167fa2, - 0x0418f2c8, 0x001a96a6, 0x0d1526ab, - 0x63315c21, 0x5e0a72ec, 0x49bafefd, 0x187908d9, 0x8d0dbd86, - 0x311170a7, 0x3e9b640c, 0xcc3e10d7, - 0xd5cad3b6, 0x0caec388, 0xf73001e1, 0x6c728aff, 0x71eae2a1, - 0x1f9af36e, 0xcfcbd12f, 0xc1de8417, - 0xac07be6b, 0xcb44a1d8, 0x8b9b0f56, 0x013988c3, 0xb1c52fca, - 0xb4be31cd, 0xd8782806, 0x12a3a4e2, - 0x6f7de532, 0x58fd7eb6, 0xd01ee900, 0x24adffc2, 0xf4990fc5, - 0x9711aac5, 0x001d7b95, 0x82e5e7d2, - 0x109873f6, 0x00613096, 0xc32d9521, 0xada121ff, 0x29908415, - 0x7fbb977f, 0xaf9eb3db, 0x29c9ed2a, - 0x5ce2a465, 0xa730f32c, 0xd0aa3fe8, 0x8a5cc091, 0xd49e2ce7, - 0x0ce454a9, 0xd60acd86, 0x015f1919, - 0x77079103, 0xdea03af6, 0x78a8565e, 0xdee356df, 0x21f05cbe, - 0x8b75e387, 0xb3c50651, 0xb8a5c3ef, - 0xd8eeb6d2, 0xe523be77, 0xc2154529, 0x2f69efdf, 0xafe67afb, - 0xf470c4b2, 0xf3e0eb5b, 0xd6cc9876, - 0x39e4460c, 0x1fda8538, 0x1987832f, 0xca007367, 0xa99144f8, - 0x296b299e, 0x492fc295, 0x9266beab, - 0xb5676e69, 0x9bd3ddda, 0xdf7e052f, 0xdb25701c, 0x1b5e51ee, - 0xf65324e6, 0x6afce36c, 0x0316cc04, - 0x8644213e, 0xb7dc59d0, 0x7965291f, 0xccd6fd43, 0x41823979, - 0x932bcdf6, 0xb657c34d, 0x4edfd282, - 0x7ae5290c, 0x3cb9536b, 0x851e20fe, 0x9833557e, 0x13ecf0b0, - 0xd3ffb372, 0x3f85c5c1, 0x0aef7ed2 -}; - -static const u32 Tm[24][8] = { - { 0x5a827999, 0xc95c653a, 0x383650db, 0xa7103c7c, 0x15ea281d, - 0x84c413be, 0xf39dff5f, 0x6277eb00 } , - { 0xd151d6a1, 0x402bc242, 0xaf05ade3, 0x1ddf9984, 0x8cb98525, - 0xfb9370c6, 0x6a6d5c67, 0xd9474808 } , - { 0x482133a9, 0xb6fb1f4a, 0x25d50aeb, 0x94aef68c, 0x0388e22d, - 0x7262cdce, 0xe13cb96f, 0x5016a510 } , - { 0xbef090b1, 0x2dca7c52, 0x9ca467f3, 0x0b7e5394, 0x7a583f35, - 0xe9322ad6, 0x580c1677, 0xc6e60218 } , - { 0x35bfedb9, 0xa499d95a, 0x1373c4fb, 0x824db09c, 0xf1279c3d, - 0x600187de, 0xcedb737f, 0x3db55f20 } , - { 0xac8f4ac1, 0x1b693662, 0x8a432203, 0xf91d0da4, 0x67f6f945, - 0xd6d0e4e6, 0x45aad087, 0xb484bc28 } , - { 0x235ea7c9, 0x9238936a, 0x01127f0b, 0x6fec6aac, 0xdec6564d, - 0x4da041ee, 0xbc7a2d8f, 0x2b541930 } , - { 0x9a2e04d1, 0x0907f072, 0x77e1dc13, 0xe6bbc7b4, 0x5595b355, - 0xc46f9ef6, 0x33498a97, 0xa2237638 } , - { 0x10fd61d9, 0x7fd74d7a, 0xeeb1391b, 0x5d8b24bc, 0xcc65105d, - 0x3b3efbfe, 0xaa18e79f, 0x18f2d340 } , - { 0x87ccbee1, 0xf6a6aa82, 0x65809623, 0xd45a81c4, 0x43346d65, - 0xb20e5906, 0x20e844a7, 0x8fc23048 } , - { 0xfe9c1be9, 0x6d76078a, 0xdc4ff32b, 0x4b29decc, 0xba03ca6d, - 0x28ddb60e, 0x97b7a1af, 0x06918d50 } , - { 0x756b78f1, 0xe4456492, 0x531f5033, 0xc1f93bd4, 0x30d32775, - 0x9fad1316, 0x0e86feb7, 0x7d60ea58 } , - { 0xec3ad5f9, 0x5b14c19a, 0xc9eead3b, 0x38c898dc, 0xa7a2847d, - 0x167c701e, 0x85565bbf, 0xf4304760 } , - { 0x630a3301, 0xd1e41ea2, 0x40be0a43, 0xaf97f5e4, 0x1e71e185, - 0x8d4bcd26, 0xfc25b8c7, 0x6affa468 } , - { 0xd9d99009, 0x48b37baa, 0xb78d674b, 0x266752ec, 0x95413e8d, - 0x041b2a2e, 0x72f515cf, 0xe1cf0170 } , - { 0x50a8ed11, 0xbf82d8b2, 0x2e5cc453, 0x9d36aff4, 0x0c109b95, - 0x7aea8736, 0xe9c472d7, 0x589e5e78 } , - { 0xc7784a19, 0x365235ba, 0xa52c215b, 0x14060cfc, 0x82dff89d, - 0xf1b9e43e, 0x6093cfdf, 0xcf6dbb80 } , - { 0x3e47a721, 0xad2192c2, 0x1bfb7e63, 0x8ad56a04, 0xf9af55a5, - 0x68894146, 0xd7632ce7, 0x463d1888 } , - { 0xb5170429, 0x23f0efca, 0x92cadb6b, 0x01a4c70c, 0x707eb2ad, - 0xdf589e4e, 0x4e3289ef, 0xbd0c7590 } , - { 0x2be66131, 0x9ac04cd2, 0x099a3873, 0x78742414, 0xe74e0fb5, - 0x5627fb56, 0xc501e6f7, 0x33dbd298 } , - { 0xa2b5be39, 0x118fa9da, 0x8069957b, 0xef43811c, 0x5e1d6cbd, - 0xccf7585e, 0x3bd143ff, 0xaaab2fa0 } , - { 0x19851b41, 0x885f06e2, 0xf738f283, 0x6612de24, 0xd4ecc9c5, - 0x43c6b566, 0xb2a0a107, 0x217a8ca8 } , - { 0x90547849, 0xff2e63ea, 0x6e084f8b, 0xdce23b2c, 0x4bbc26cd, - 0xba96126e, 0x296ffe0f, 0x9849e9b0 } , - { 0x0723d551, 0x75fdc0f2, 0xe4d7ac93, 0x53b19834, 0xc28b83d5, - 0x31656f76, 0xa03f5b17, 0x0f1946b8 } -}; - -static const u8 Tr[4][8] = { - { 0x13, 0x04, 0x15, 0x06, 0x17, 0x08, 0x19, 0x0a } , - { 0x1b, 0x0c, 0x1d, 0x0e, 0x1f, 0x10, 0x01, 0x12 } , - { 0x03, 0x14, 0x05, 0x16, 0x07, 0x18, 0x09, 0x1a } , - { 0x0b, 0x1c, 0x0d, 0x1e, 0x0f, 0x00, 0x11, 0x02 } -}; - -/* forward octave */ -static void W(u32 *key, unsigned int i) -{ - u32 I; - key[6] ^= F1(key[7], Tr[i % 4][0], Tm[i][0]); - key[5] ^= F2(key[6], Tr[i % 4][1], Tm[i][1]); - key[4] ^= F3(key[5], Tr[i % 4][2], Tm[i][2]); - key[3] ^= F1(key[4], Tr[i % 4][3], Tm[i][3]); - key[2] ^= F2(key[3], Tr[i % 4][4], Tm[i][4]); - key[1] ^= F3(key[2], Tr[i % 4][5], Tm[i][5]); - key[0] ^= F1(key[1], Tr[i % 4][6], Tm[i][6]); - key[7] ^= F2(key[0], Tr[i % 4][7], Tm[i][7]); -} - -static int cast6_setkey(struct crypto_tfm *tfm, const u8 *in_key, - unsigned key_len) -{ - int i; - u32 key[8]; - __be32 p_key[8]; /* padded key */ - struct cast6_ctx *c = crypto_tfm_ctx(tfm); - u32 *flags = &tfm->crt_flags; - - if (key_len % 4 != 0) { - *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; - return -EINVAL; - } - - memset(p_key, 0, 32); - memcpy(p_key, in_key, key_len); - - key[0] = be32_to_cpu(p_key[0]); /* A */ - key[1] = be32_to_cpu(p_key[1]); /* B */ - key[2] = be32_to_cpu(p_key[2]); /* C */ - key[3] = be32_to_cpu(p_key[3]); /* D */ - key[4] = be32_to_cpu(p_key[4]); /* E */ - key[5] = be32_to_cpu(p_key[5]); /* F */ - key[6] = be32_to_cpu(p_key[6]); /* G */ - key[7] = be32_to_cpu(p_key[7]); /* H */ - - for (i = 0; i < 12; i++) { - W(key, 2 * i); - W(key, 2 * i + 1); - - c->Kr[i][0] = key[0] & 0x1f; - c->Kr[i][1] = key[2] & 0x1f; - c->Kr[i][2] = key[4] & 0x1f; - c->Kr[i][3] = key[6] & 0x1f; - - c->Km[i][0] = key[7]; - c->Km[i][1] = key[5]; - c->Km[i][2] = key[3]; - c->Km[i][3] = key[1]; - } - - return 0; -} - -/*forward quad round*/ -static void Q(u32 *block, u8 *Kr, u32 *Km) -{ - u32 I; - block[2] ^= F1(block[3], Kr[0], Km[0]); - block[1] ^= F2(block[2], Kr[1], Km[1]); - block[0] ^= F3(block[1], Kr[2], Km[2]); - block[3] ^= F1(block[0], Kr[3], Km[3]); -} - -/*reverse quad round*/ -static void QBAR(u32 *block, u8 *Kr, u32 *Km) -{ - u32 I; - block[3] ^= F1(block[0], Kr[3], Km[3]); - block[0] ^= F3(block[1], Kr[2], Km[2]); - block[1] ^= F2(block[2], Kr[1], Km[1]); - block[2] ^= F1(block[3], Kr[0], Km[0]); -} - -static void cast6_encrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf) -{ - struct cast6_ctx *c = crypto_tfm_ctx(tfm); - const __be32 *src = (const __be32 *)inbuf; - __be32 *dst = (__be32 *)outbuf; - u32 block[4]; - u32 *Km; - u8 *Kr; - - block[0] = be32_to_cpu(src[0]); - block[1] = be32_to_cpu(src[1]); - block[2] = be32_to_cpu(src[2]); - block[3] = be32_to_cpu(src[3]); - - Km = c->Km[0]; Kr = c->Kr[0]; Q(block, Kr, Km); - Km = c->Km[1]; Kr = c->Kr[1]; Q(block, Kr, Km); - Km = c->Km[2]; Kr = c->Kr[2]; Q(block, Kr, Km); - Km = c->Km[3]; Kr = c->Kr[3]; Q(block, Kr, Km); - Km = c->Km[4]; Kr = c->Kr[4]; Q(block, Kr, Km); - Km = c->Km[5]; Kr = c->Kr[5]; Q(block, Kr, Km); - Km = c->Km[6]; Kr = c->Kr[6]; QBAR(block, Kr, Km); - Km = c->Km[7]; Kr = c->Kr[7]; QBAR(block, Kr, Km); - Km = c->Km[8]; Kr = c->Kr[8]; QBAR(block, Kr, Km); - Km = c->Km[9]; Kr = c->Kr[9]; QBAR(block, Kr, Km); - Km = c->Km[10]; Kr = c->Kr[10]; QBAR(block, Kr, Km); - Km = c->Km[11]; Kr = c->Kr[11]; QBAR(block, Kr, Km); - - dst[0] = cpu_to_be32(block[0]); - dst[1] = cpu_to_be32(block[1]); - dst[2] = cpu_to_be32(block[2]); - dst[3] = cpu_to_be32(block[3]); -} - -static void cast6_decrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf) -{ - struct cast6_ctx *c = crypto_tfm_ctx(tfm); - const __be32 *src = (const __be32 *)inbuf; - __be32 *dst = (__be32 *)outbuf; - u32 block[4]; - u32 *Km; - u8 *Kr; - - block[0] = be32_to_cpu(src[0]); - block[1] = be32_to_cpu(src[1]); - block[2] = be32_to_cpu(src[2]); - block[3] = be32_to_cpu(src[3]); - - Km = c->Km[11]; Kr = c->Kr[11]; Q(block, Kr, Km); - Km = c->Km[10]; Kr = c->Kr[10]; Q(block, Kr, Km); - Km = c->Km[9]; Kr = c->Kr[9]; Q(block, Kr, Km); - Km = c->Km[8]; Kr = c->Kr[8]; Q(block, Kr, Km); - Km = c->Km[7]; Kr = c->Kr[7]; Q(block, Kr, Km); - Km = c->Km[6]; Kr = c->Kr[6]; Q(block, Kr, Km); - Km = c->Km[5]; Kr = c->Kr[5]; QBAR(block, Kr, Km); - Km = c->Km[4]; Kr = c->Kr[4]; QBAR(block, Kr, Km); - Km = c->Km[3]; Kr = c->Kr[3]; QBAR(block, Kr, Km); - Km = c->Km[2]; Kr = c->Kr[2]; QBAR(block, Kr, Km); - Km = c->Km[1]; Kr = c->Kr[1]; QBAR(block, Kr, Km); - Km = c->Km[0]; Kr = c->Kr[0]; QBAR(block, Kr, Km); - - dst[0] = cpu_to_be32(block[0]); - dst[1] = cpu_to_be32(block[1]); - dst[2] = cpu_to_be32(block[2]); - dst[3] = cpu_to_be32(block[3]); -} - -static struct crypto_alg alg = { - .cra_name = "cast6", - .cra_flags = CRYPTO_ALG_TYPE_CIPHER, - .cra_blocksize = CAST6_BLOCK_SIZE, - .cra_ctxsize = sizeof(struct cast6_ctx), - .cra_alignmask = 3, - .cra_module = THIS_MODULE, - .cra_u = { - .cipher = { - .cia_min_keysize = CAST6_MIN_KEY_SIZE, - .cia_max_keysize = CAST6_MAX_KEY_SIZE, - .cia_setkey = cast6_setkey, - .cia_encrypt = cast6_encrypt, - .cia_decrypt = cast6_decrypt} - } -}; - -static int __init cast6_mod_init(void) -{ - return crypto_register_alg(&alg); -} - -static void __exit cast6_mod_fini(void) -{ - crypto_unregister_alg(&alg); -} - -module_init(cast6_mod_init); -module_exit(cast6_mod_fini); - -MODULE_LICENSE("GPL"); -MODULE_DESCRIPTION("Cast6 Cipher Algorithm"); diff --git a/crypto/cast6_generic.c b/crypto/cast6_generic.c new file mode 100644 index 000000000000..dc9309d70405 --- /dev/null +++ b/crypto/cast6_generic.c @@ -0,0 +1,566 @@ +/* Kernel cryptographic api. + * cast6.c - Cast6 cipher algorithm [rfc2612]. + * + * CAST-256 (*cast6*) is a DES like Substitution-Permutation Network (SPN) + * cryptosystem built upon the CAST-128 (*cast5*) [rfc2144] encryption + * algorithm. + * + * Copyright (C) 2003 Kartikey Mahendra Bhatt . + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA + */ + + +#include +#include +#include +#include +#include +#include +#include +#include + +#define s1 cast6_s1 +#define s2 cast6_s2 +#define s3 cast6_s3 +#define s4 cast6_s4 + +#define F1(D, r, m) ((I = ((m) + (D))), (I = rol32(I, (r))), \ + (((s1[I >> 24] ^ s2[(I>>16)&0xff]) - s3[(I>>8)&0xff]) + s4[I&0xff])) +#define F2(D, r, m) ((I = ((m) ^ (D))), (I = rol32(I, (r))), \ + (((s1[I >> 24] - s2[(I>>16)&0xff]) + s3[(I>>8)&0xff]) ^ s4[I&0xff])) +#define F3(D, r, m) ((I = ((m) - (D))), (I = rol32(I, (r))), \ + (((s1[I >> 24] + s2[(I>>16)&0xff]) ^ s3[(I>>8)&0xff]) - s4[I&0xff])) + +const u32 cast6_s1[256] = { + 0x30fb40d4, 0x9fa0ff0b, 0x6beccd2f, 0x3f258c7a, 0x1e213f2f, + 0x9c004dd3, 0x6003e540, 0xcf9fc949, + 0xbfd4af27, 0x88bbbdb5, 0xe2034090, 0x98d09675, 0x6e63a0e0, + 0x15c361d2, 0xc2e7661d, 0x22d4ff8e, + 0x28683b6f, 0xc07fd059, 0xff2379c8, 0x775f50e2, 0x43c340d3, + 0xdf2f8656, 0x887ca41a, 0xa2d2bd2d, + 0xa1c9e0d6, 0x346c4819, 0x61b76d87, 0x22540f2f, 0x2abe32e1, + 0xaa54166b, 0x22568e3a, 0xa2d341d0, + 0x66db40c8, 0xa784392f, 0x004dff2f, 0x2db9d2de, 0x97943fac, + 0x4a97c1d8, 0x527644b7, 0xb5f437a7, + 0xb82cbaef, 0xd751d159, 0x6ff7f0ed, 0x5a097a1f, 0x827b68d0, + 0x90ecf52e, 0x22b0c054, 0xbc8e5935, + 0x4b6d2f7f, 0x50bb64a2, 0xd2664910, 0xbee5812d, 0xb7332290, + 0xe93b159f, 0xb48ee411, 0x4bff345d, + 0xfd45c240, 0xad31973f, 0xc4f6d02e, 0x55fc8165, 0xd5b1caad, + 0xa1ac2dae, 0xa2d4b76d, 0xc19b0c50, + 0x882240f2, 0x0c6e4f38, 0xa4e4bfd7, 0x4f5ba272, 0x564c1d2f, + 0xc59c5319, 0xb949e354, 0xb04669fe, + 0xb1b6ab8a, 0xc71358dd, 0x6385c545, 0x110f935d, 0x57538ad5, + 0x6a390493, 0xe63d37e0, 0x2a54f6b3, + 0x3a787d5f, 0x6276a0b5, 0x19a6fcdf, 0x7a42206a, 0x29f9d4d5, + 0xf61b1891, 0xbb72275e, 0xaa508167, + 0x38901091, 0xc6b505eb, 0x84c7cb8c, 0x2ad75a0f, 0x874a1427, + 0xa2d1936b, 0x2ad286af, 0xaa56d291, + 0xd7894360, 0x425c750d, 0x93b39e26, 0x187184c9, 0x6c00b32d, + 0x73e2bb14, 0xa0bebc3c, 0x54623779, + 0x64459eab, 0x3f328b82, 0x7718cf82, 0x59a2cea6, 0x04ee002e, + 0x89fe78e6, 0x3fab0950, 0x325ff6c2, + 0x81383f05, 0x6963c5c8, 0x76cb5ad6, 0xd49974c9, 0xca180dcf, + 0x380782d5, 0xc7fa5cf6, 0x8ac31511, + 0x35e79e13, 0x47da91d0, 0xf40f9086, 0xa7e2419e, 0x31366241, + 0x051ef495, 0xaa573b04, 0x4a805d8d, + 0x548300d0, 0x00322a3c, 0xbf64cddf, 0xba57a68e, 0x75c6372b, + 0x50afd341, 0xa7c13275, 0x915a0bf5, + 0x6b54bfab, 0x2b0b1426, 0xab4cc9d7, 0x449ccd82, 0xf7fbf265, + 0xab85c5f3, 0x1b55db94, 0xaad4e324, + 0xcfa4bd3f, 0x2deaa3e2, 0x9e204d02, 0xc8bd25ac, 0xeadf55b3, + 0xd5bd9e98, 0xe31231b2, 0x2ad5ad6c, + 0x954329de, 0xadbe4528, 0xd8710f69, 0xaa51c90f, 0xaa786bf6, + 0x22513f1e, 0xaa51a79b, 0x2ad344cc, + 0x7b5a41f0, 0xd37cfbad, 0x1b069505, 0x41ece491, 0xb4c332e6, + 0x032268d4, 0xc9600acc, 0xce387e6d, + 0xbf6bb16c, 0x6a70fb78, 0x0d03d9c9, 0xd4df39de, 0xe01063da, + 0x4736f464, 0x5ad328d8, 0xb347cc96, + 0x75bb0fc3, 0x98511bfb, 0x4ffbcc35, 0xb58bcf6a, 0xe11f0abc, + 0xbfc5fe4a, 0xa70aec10, 0xac39570a, + 0x3f04442f, 0x6188b153, 0xe0397a2e, 0x5727cb79, 0x9ceb418f, + 0x1cacd68d, 0x2ad37c96, 0x0175cb9d, + 0xc69dff09, 0xc75b65f0, 0xd9db40d8, 0xec0e7779, 0x4744ead4, + 0xb11c3274, 0xdd24cb9e, 0x7e1c54bd, + 0xf01144f9, 0xd2240eb1, 0x9675b3fd, 0xa3ac3755, 0xd47c27af, + 0x51c85f4d, 0x56907596, 0xa5bb15e6, + 0x580304f0, 0xca042cf1, 0x011a37ea, 0x8dbfaadb, 0x35ba3e4a, + 0x3526ffa0, 0xc37b4d09, 0xbc306ed9, + 0x98a52666, 0x5648f725, 0xff5e569d, 0x0ced63d0, 0x7c63b2cf, + 0x700b45e1, 0xd5ea50f1, 0x85a92872, + 0xaf1fbda7, 0xd4234870, 0xa7870bf3, 0x2d3b4d79, 0x42e04198, + 0x0cd0ede7, 0x26470db8, 0xf881814c, + 0x474d6ad7, 0x7c0c5e5c, 0xd1231959, 0x381b7298, 0xf5d2f4db, + 0xab838653, 0x6e2f1e23, 0x83719c9e, + 0xbd91e046, 0x9a56456e, 0xdc39200c, 0x20c8c571, 0x962bda1c, + 0xe1e696ff, 0xb141ab08, 0x7cca89b9, + 0x1a69e783, 0x02cc4843, 0xa2f7c579, 0x429ef47d, 0x427b169c, + 0x5ac9f049, 0xdd8f0f00, 0x5c8165bf +}; +EXPORT_SYMBOL_GPL(cast6_s1); + +const u32 cast6_s2[256] = { + 0x1f201094, 0xef0ba75b, 0x69e3cf7e, 0x393f4380, 0xfe61cf7a, + 0xeec5207a, 0x55889c94, 0x72fc0651, + 0xada7ef79, 0x4e1d7235, 0xd55a63ce, 0xde0436ba, 0x99c430ef, + 0x5f0c0794, 0x18dcdb7d, 0xa1d6eff3, + 0xa0b52f7b, 0x59e83605, 0xee15b094, 0xe9ffd909, 0xdc440086, + 0xef944459, 0xba83ccb3, 0xe0c3cdfb, + 0xd1da4181, 0x3b092ab1, 0xf997f1c1, 0xa5e6cf7b, 0x01420ddb, + 0xe4e7ef5b, 0x25a1ff41, 0xe180f806, + 0x1fc41080, 0x179bee7a, 0xd37ac6a9, 0xfe5830a4, 0x98de8b7f, + 0x77e83f4e, 0x79929269, 0x24fa9f7b, + 0xe113c85b, 0xacc40083, 0xd7503525, 0xf7ea615f, 0x62143154, + 0x0d554b63, 0x5d681121, 0xc866c359, + 0x3d63cf73, 0xcee234c0, 0xd4d87e87, 0x5c672b21, 0x071f6181, + 0x39f7627f, 0x361e3084, 0xe4eb573b, + 0x602f64a4, 0xd63acd9c, 0x1bbc4635, 0x9e81032d, 0x2701f50c, + 0x99847ab4, 0xa0e3df79, 0xba6cf38c, + 0x10843094, 0x2537a95e, 0xf46f6ffe, 0xa1ff3b1f, 0x208cfb6a, + 0x8f458c74, 0xd9e0a227, 0x4ec73a34, + 0xfc884f69, 0x3e4de8df, 0xef0e0088, 0x3559648d, 0x8a45388c, + 0x1d804366, 0x721d9bfd, 0xa58684bb, + 0xe8256333, 0x844e8212, 0x128d8098, 0xfed33fb4, 0xce280ae1, + 0x27e19ba5, 0xd5a6c252, 0xe49754bd, + 0xc5d655dd, 0xeb667064, 0x77840b4d, 0xa1b6a801, 0x84db26a9, + 0xe0b56714, 0x21f043b7, 0xe5d05860, + 0x54f03084, 0x066ff472, 0xa31aa153, 0xdadc4755, 0xb5625dbf, + 0x68561be6, 0x83ca6b94, 0x2d6ed23b, + 0xeccf01db, 0xa6d3d0ba, 0xb6803d5c, 0xaf77a709, 0x33b4a34c, + 0x397bc8d6, 0x5ee22b95, 0x5f0e5304, + 0x81ed6f61, 0x20e74364, 0xb45e1378, 0xde18639b, 0x881ca122, + 0xb96726d1, 0x8049a7e8, 0x22b7da7b, + 0x5e552d25, 0x5272d237, 0x79d2951c, 0xc60d894c, 0x488cb402, + 0x1ba4fe5b, 0xa4b09f6b, 0x1ca815cf, + 0xa20c3005, 0x8871df63, 0xb9de2fcb, 0x0cc6c9e9, 0x0beeff53, + 0xe3214517, 0xb4542835, 0x9f63293c, + 0xee41e729, 0x6e1d2d7c, 0x50045286, 0x1e6685f3, 0xf33401c6, + 0x30a22c95, 0x31a70850, 0x60930f13, + 0x73f98417, 0xa1269859, 0xec645c44, 0x52c877a9, 0xcdff33a6, + 0xa02b1741, 0x7cbad9a2, 0x2180036f, + 0x50d99c08, 0xcb3f4861, 0xc26bd765, 0x64a3f6ab, 0x80342676, + 0x25a75e7b, 0xe4e6d1fc, 0x20c710e6, + 0xcdf0b680, 0x17844d3b, 0x31eef84d, 0x7e0824e4, 0x2ccb49eb, + 0x846a3bae, 0x8ff77888, 0xee5d60f6, + 0x7af75673, 0x2fdd5cdb, 0xa11631c1, 0x30f66f43, 0xb3faec54, + 0x157fd7fa, 0xef8579cc, 0xd152de58, + 0xdb2ffd5e, 0x8f32ce19, 0x306af97a, 0x02f03ef8, 0x99319ad5, + 0xc242fa0f, 0xa7e3ebb0, 0xc68e4906, + 0xb8da230c, 0x80823028, 0xdcdef3c8, 0xd35fb171, 0x088a1bc8, + 0xbec0c560, 0x61a3c9e8, 0xbca8f54d, + 0xc72feffa, 0x22822e99, 0x82c570b4, 0xd8d94e89, 0x8b1c34bc, + 0x301e16e6, 0x273be979, 0xb0ffeaa6, + 0x61d9b8c6, 0x00b24869, 0xb7ffce3f, 0x08dc283b, 0x43daf65a, + 0xf7e19798, 0x7619b72f, 0x8f1c9ba4, + 0xdc8637a0, 0x16a7d3b1, 0x9fc393b7, 0xa7136eeb, 0xc6bcc63e, + 0x1a513742, 0xef6828bc, 0x520365d6, + 0x2d6a77ab, 0x3527ed4b, 0x821fd216, 0x095c6e2e, 0xdb92f2fb, + 0x5eea29cb, 0x145892f5, 0x91584f7f, + 0x5483697b, 0x2667a8cc, 0x85196048, 0x8c4bacea, 0x833860d4, + 0x0d23e0f9, 0x6c387e8a, 0x0ae6d249, + 0xb284600c, 0xd835731d, 0xdcb1c647, 0xac4c56ea, 0x3ebd81b3, + 0x230eabb0, 0x6438bc87, 0xf0b5b1fa, + 0x8f5ea2b3, 0xfc184642, 0x0a036b7a, 0x4fb089bd, 0x649da589, + 0xa345415e, 0x5c038323, 0x3e5d3bb9, + 0x43d79572, 0x7e6dd07c, 0x06dfdf1e, 0x6c6cc4ef, 0x7160a539, + 0x73bfbe70, 0x83877605, 0x4523ecf1 +}; +EXPORT_SYMBOL_GPL(cast6_s2); + +const u32 cast6_s3[256] = { + 0x8defc240, 0x25fa5d9f, 0xeb903dbf, 0xe810c907, 0x47607fff, + 0x369fe44b, 0x8c1fc644, 0xaececa90, + 0xbeb1f9bf, 0xeefbcaea, 0xe8cf1950, 0x51df07ae, 0x920e8806, + 0xf0ad0548, 0xe13c8d83, 0x927010d5, + 0x11107d9f, 0x07647db9, 0xb2e3e4d4, 0x3d4f285e, 0xb9afa820, + 0xfade82e0, 0xa067268b, 0x8272792e, + 0x553fb2c0, 0x489ae22b, 0xd4ef9794, 0x125e3fbc, 0x21fffcee, + 0x825b1bfd, 0x9255c5ed, 0x1257a240, + 0x4e1a8302, 0xbae07fff, 0x528246e7, 0x8e57140e, 0x3373f7bf, + 0x8c9f8188, 0xa6fc4ee8, 0xc982b5a5, + 0xa8c01db7, 0x579fc264, 0x67094f31, 0xf2bd3f5f, 0x40fff7c1, + 0x1fb78dfc, 0x8e6bd2c1, 0x437be59b, + 0x99b03dbf, 0xb5dbc64b, 0x638dc0e6, 0x55819d99, 0xa197c81c, + 0x4a012d6e, 0xc5884a28, 0xccc36f71, + 0xb843c213, 0x6c0743f1, 0x8309893c, 0x0feddd5f, 0x2f7fe850, + 0xd7c07f7e, 0x02507fbf, 0x5afb9a04, + 0xa747d2d0, 0x1651192e, 0xaf70bf3e, 0x58c31380, 0x5f98302e, + 0x727cc3c4, 0x0a0fb402, 0x0f7fef82, + 0x8c96fdad, 0x5d2c2aae, 0x8ee99a49, 0x50da88b8, 0x8427f4a0, + 0x1eac5790, 0x796fb449, 0x8252dc15, + 0xefbd7d9b, 0xa672597d, 0xada840d8, 0x45f54504, 0xfa5d7403, + 0xe83ec305, 0x4f91751a, 0x925669c2, + 0x23efe941, 0xa903f12e, 0x60270df2, 0x0276e4b6, 0x94fd6574, + 0x927985b2, 0x8276dbcb, 0x02778176, + 0xf8af918d, 0x4e48f79e, 0x8f616ddf, 0xe29d840e, 0x842f7d83, + 0x340ce5c8, 0x96bbb682, 0x93b4b148, + 0xef303cab, 0x984faf28, 0x779faf9b, 0x92dc560d, 0x224d1e20, + 0x8437aa88, 0x7d29dc96, 0x2756d3dc, + 0x8b907cee, 0xb51fd240, 0xe7c07ce3, 0xe566b4a1, 0xc3e9615e, + 0x3cf8209d, 0x6094d1e3, 0xcd9ca341, + 0x5c76460e, 0x00ea983b, 0xd4d67881, 0xfd47572c, 0xf76cedd9, + 0xbda8229c, 0x127dadaa, 0x438a074e, + 0x1f97c090, 0x081bdb8a, 0x93a07ebe, 0xb938ca15, 0x97b03cff, + 0x3dc2c0f8, 0x8d1ab2ec, 0x64380e51, + 0x68cc7bfb, 0xd90f2788, 0x12490181, 0x5de5ffd4, 0xdd7ef86a, + 0x76a2e214, 0xb9a40368, 0x925d958f, + 0x4b39fffa, 0xba39aee9, 0xa4ffd30b, 0xfaf7933b, 0x6d498623, + 0x193cbcfa, 0x27627545, 0x825cf47a, + 0x61bd8ba0, 0xd11e42d1, 0xcead04f4, 0x127ea392, 0x10428db7, + 0x8272a972, 0x9270c4a8, 0x127de50b, + 0x285ba1c8, 0x3c62f44f, 0x35c0eaa5, 0xe805d231, 0x428929fb, + 0xb4fcdf82, 0x4fb66a53, 0x0e7dc15b, + 0x1f081fab, 0x108618ae, 0xfcfd086d, 0xf9ff2889, 0x694bcc11, + 0x236a5cae, 0x12deca4d, 0x2c3f8cc5, + 0xd2d02dfe, 0xf8ef5896, 0xe4cf52da, 0x95155b67, 0x494a488c, + 0xb9b6a80c, 0x5c8f82bc, 0x89d36b45, + 0x3a609437, 0xec00c9a9, 0x44715253, 0x0a874b49, 0xd773bc40, + 0x7c34671c, 0x02717ef6, 0x4feb5536, + 0xa2d02fff, 0xd2bf60c4, 0xd43f03c0, 0x50b4ef6d, 0x07478cd1, + 0x006e1888, 0xa2e53f55, 0xb9e6d4bc, + 0xa2048016, 0x97573833, 0xd7207d67, 0xde0f8f3d, 0x72f87b33, + 0xabcc4f33, 0x7688c55d, 0x7b00a6b0, + 0x947b0001, 0x570075d2, 0xf9bb88f8, 0x8942019e, 0x4264a5ff, + 0x856302e0, 0x72dbd92b, 0xee971b69, + 0x6ea22fde, 0x5f08ae2b, 0xaf7a616d, 0xe5c98767, 0xcf1febd2, + 0x61efc8c2, 0xf1ac2571, 0xcc8239c2, + 0x67214cb8, 0xb1e583d1, 0xb7dc3e62, 0x7f10bdce, 0xf90a5c38, + 0x0ff0443d, 0x606e6dc6, 0x60543a49, + 0x5727c148, 0x2be98a1d, 0x8ab41738, 0x20e1be24, 0xaf96da0f, + 0x68458425, 0x99833be5, 0x600d457d, + 0x282f9350, 0x8334b362, 0xd91d1120, 0x2b6d8da0, 0x642b1e31, + 0x9c305a00, 0x52bce688, 0x1b03588a, + 0xf7baefd5, 0x4142ed9c, 0xa4315c11, 0x83323ec5, 0xdfef4636, + 0xa133c501, 0xe9d3531c, 0xee353783 +}; +EXPORT_SYMBOL_GPL(cast6_s3); + +const u32 cast6_s4[256] = { + 0x9db30420, 0x1fb6e9de, 0xa7be7bef, 0xd273a298, 0x4a4f7bdb, + 0x64ad8c57, 0x85510443, 0xfa020ed1, + 0x7e287aff, 0xe60fb663, 0x095f35a1, 0x79ebf120, 0xfd059d43, + 0x6497b7b1, 0xf3641f63, 0x241e4adf, + 0x28147f5f, 0x4fa2b8cd, 0xc9430040, 0x0cc32220, 0xfdd30b30, + 0xc0a5374f, 0x1d2d00d9, 0x24147b15, + 0xee4d111a, 0x0fca5167, 0x71ff904c, 0x2d195ffe, 0x1a05645f, + 0x0c13fefe, 0x081b08ca, 0x05170121, + 0x80530100, 0xe83e5efe, 0xac9af4f8, 0x7fe72701, 0xd2b8ee5f, + 0x06df4261, 0xbb9e9b8a, 0x7293ea25, + 0xce84ffdf, 0xf5718801, 0x3dd64b04, 0xa26f263b, 0x7ed48400, + 0x547eebe6, 0x446d4ca0, 0x6cf3d6f5, + 0x2649abdf, 0xaea0c7f5, 0x36338cc1, 0x503f7e93, 0xd3772061, + 0x11b638e1, 0x72500e03, 0xf80eb2bb, + 0xabe0502e, 0xec8d77de, 0x57971e81, 0xe14f6746, 0xc9335400, + 0x6920318f, 0x081dbb99, 0xffc304a5, + 0x4d351805, 0x7f3d5ce3, 0xa6c866c6, 0x5d5bcca9, 0xdaec6fea, + 0x9f926f91, 0x9f46222f, 0x3991467d, + 0xa5bf6d8e, 0x1143c44f, 0x43958302, 0xd0214eeb, 0x022083b8, + 0x3fb6180c, 0x18f8931e, 0x281658e6, + 0x26486e3e, 0x8bd78a70, 0x7477e4c1, 0xb506e07c, 0xf32d0a25, + 0x79098b02, 0xe4eabb81, 0x28123b23, + 0x69dead38, 0x1574ca16, 0xdf871b62, 0x211c40b7, 0xa51a9ef9, + 0x0014377b, 0x041e8ac8, 0x09114003, + 0xbd59e4d2, 0xe3d156d5, 0x4fe876d5, 0x2f91a340, 0x557be8de, + 0x00eae4a7, 0x0ce5c2ec, 0x4db4bba6, + 0xe756bdff, 0xdd3369ac, 0xec17b035, 0x06572327, 0x99afc8b0, + 0x56c8c391, 0x6b65811c, 0x5e146119, + 0x6e85cb75, 0xbe07c002, 0xc2325577, 0x893ff4ec, 0x5bbfc92d, + 0xd0ec3b25, 0xb7801ab7, 0x8d6d3b24, + 0x20c763ef, 0xc366a5fc, 0x9c382880, 0x0ace3205, 0xaac9548a, + 0xeca1d7c7, 0x041afa32, 0x1d16625a, + 0x6701902c, 0x9b757a54, 0x31d477f7, 0x9126b031, 0x36cc6fdb, + 0xc70b8b46, 0xd9e66a48, 0x56e55a79, + 0x026a4ceb, 0x52437eff, 0x2f8f76b4, 0x0df980a5, 0x8674cde3, + 0xedda04eb, 0x17a9be04, 0x2c18f4df, + 0xb7747f9d, 0xab2af7b4, 0xefc34d20, 0x2e096b7c, 0x1741a254, + 0xe5b6a035, 0x213d42f6, 0x2c1c7c26, + 0x61c2f50f, 0x6552daf9, 0xd2c231f8, 0x25130f69, 0xd8167fa2, + 0x0418f2c8, 0x001a96a6, 0x0d1526ab, + 0x63315c21, 0x5e0a72ec, 0x49bafefd, 0x187908d9, 0x8d0dbd86, + 0x311170a7, 0x3e9b640c, 0xcc3e10d7, + 0xd5cad3b6, 0x0caec388, 0xf73001e1, 0x6c728aff, 0x71eae2a1, + 0x1f9af36e, 0xcfcbd12f, 0xc1de8417, + 0xac07be6b, 0xcb44a1d8, 0x8b9b0f56, 0x013988c3, 0xb1c52fca, + 0xb4be31cd, 0xd8782806, 0x12a3a4e2, + 0x6f7de532, 0x58fd7eb6, 0xd01ee900, 0x24adffc2, 0xf4990fc5, + 0x9711aac5, 0x001d7b95, 0x82e5e7d2, + 0x109873f6, 0x00613096, 0xc32d9521, 0xada121ff, 0x29908415, + 0x7fbb977f, 0xaf9eb3db, 0x29c9ed2a, + 0x5ce2a465, 0xa730f32c, 0xd0aa3fe8, 0x8a5cc091, 0xd49e2ce7, + 0x0ce454a9, 0xd60acd86, 0x015f1919, + 0x77079103, 0xdea03af6, 0x78a8565e, 0xdee356df, 0x21f05cbe, + 0x8b75e387, 0xb3c50651, 0xb8a5c3ef, + 0xd8eeb6d2, 0xe523be77, 0xc2154529, 0x2f69efdf, 0xafe67afb, + 0xf470c4b2, 0xf3e0eb5b, 0xd6cc9876, + 0x39e4460c, 0x1fda8538, 0x1987832f, 0xca007367, 0xa99144f8, + 0x296b299e, 0x492fc295, 0x9266beab, + 0xb5676e69, 0x9bd3ddda, 0xdf7e052f, 0xdb25701c, 0x1b5e51ee, + 0xf65324e6, 0x6afce36c, 0x0316cc04, + 0x8644213e, 0xb7dc59d0, 0x7965291f, 0xccd6fd43, 0x41823979, + 0x932bcdf6, 0xb657c34d, 0x4edfd282, + 0x7ae5290c, 0x3cb9536b, 0x851e20fe, 0x9833557e, 0x13ecf0b0, + 0xd3ffb372, 0x3f85c5c1, 0x0aef7ed2 +}; +EXPORT_SYMBOL_GPL(cast6_s4); + +static const u32 Tm[24][8] = { + { 0x5a827999, 0xc95c653a, 0x383650db, 0xa7103c7c, 0x15ea281d, + 0x84c413be, 0xf39dff5f, 0x6277eb00 } , + { 0xd151d6a1, 0x402bc242, 0xaf05ade3, 0x1ddf9984, 0x8cb98525, + 0xfb9370c6, 0x6a6d5c67, 0xd9474808 } , + { 0x482133a9, 0xb6fb1f4a, 0x25d50aeb, 0x94aef68c, 0x0388e22d, + 0x7262cdce, 0xe13cb96f, 0x5016a510 } , + { 0xbef090b1, 0x2dca7c52, 0x9ca467f3, 0x0b7e5394, 0x7a583f35, + 0xe9322ad6, 0x580c1677, 0xc6e60218 } , + { 0x35bfedb9, 0xa499d95a, 0x1373c4fb, 0x824db09c, 0xf1279c3d, + 0x600187de, 0xcedb737f, 0x3db55f20 } , + { 0xac8f4ac1, 0x1b693662, 0x8a432203, 0xf91d0da4, 0x67f6f945, + 0xd6d0e4e6, 0x45aad087, 0xb484bc28 } , + { 0x235ea7c9, 0x9238936a, 0x01127f0b, 0x6fec6aac, 0xdec6564d, + 0x4da041ee, 0xbc7a2d8f, 0x2b541930 } , + { 0x9a2e04d1, 0x0907f072, 0x77e1dc13, 0xe6bbc7b4, 0x5595b355, + 0xc46f9ef6, 0x33498a97, 0xa2237638 } , + { 0x10fd61d9, 0x7fd74d7a, 0xeeb1391b, 0x5d8b24bc, 0xcc65105d, + 0x3b3efbfe, 0xaa18e79f, 0x18f2d340 } , + { 0x87ccbee1, 0xf6a6aa82, 0x65809623, 0xd45a81c4, 0x43346d65, + 0xb20e5906, 0x20e844a7, 0x8fc23048 } , + { 0xfe9c1be9, 0x6d76078a, 0xdc4ff32b, 0x4b29decc, 0xba03ca6d, + 0x28ddb60e, 0x97b7a1af, 0x06918d50 } , + { 0x756b78f1, 0xe4456492, 0x531f5033, 0xc1f93bd4, 0x30d32775, + 0x9fad1316, 0x0e86feb7, 0x7d60ea58 } , + { 0xec3ad5f9, 0x5b14c19a, 0xc9eead3b, 0x38c898dc, 0xa7a2847d, + 0x167c701e, 0x85565bbf, 0xf4304760 } , + { 0x630a3301, 0xd1e41ea2, 0x40be0a43, 0xaf97f5e4, 0x1e71e185, + 0x8d4bcd26, 0xfc25b8c7, 0x6affa468 } , + { 0xd9d99009, 0x48b37baa, 0xb78d674b, 0x266752ec, 0x95413e8d, + 0x041b2a2e, 0x72f515cf, 0xe1cf0170 } , + { 0x50a8ed11, 0xbf82d8b2, 0x2e5cc453, 0x9d36aff4, 0x0c109b95, + 0x7aea8736, 0xe9c472d7, 0x589e5e78 } , + { 0xc7784a19, 0x365235ba, 0xa52c215b, 0x14060cfc, 0x82dff89d, + 0xf1b9e43e, 0x6093cfdf, 0xcf6dbb80 } , + { 0x3e47a721, 0xad2192c2, 0x1bfb7e63, 0x8ad56a04, 0xf9af55a5, + 0x68894146, 0xd7632ce7, 0x463d1888 } , + { 0xb5170429, 0x23f0efca, 0x92cadb6b, 0x01a4c70c, 0x707eb2ad, + 0xdf589e4e, 0x4e3289ef, 0xbd0c7590 } , + { 0x2be66131, 0x9ac04cd2, 0x099a3873, 0x78742414, 0xe74e0fb5, + 0x5627fb56, 0xc501e6f7, 0x33dbd298 } , + { 0xa2b5be39, 0x118fa9da, 0x8069957b, 0xef43811c, 0x5e1d6cbd, + 0xccf7585e, 0x3bd143ff, 0xaaab2fa0 } , + { 0x19851b41, 0x885f06e2, 0xf738f283, 0x6612de24, 0xd4ecc9c5, + 0x43c6b566, 0xb2a0a107, 0x217a8ca8 } , + { 0x90547849, 0xff2e63ea, 0x6e084f8b, 0xdce23b2c, 0x4bbc26cd, + 0xba96126e, 0x296ffe0f, 0x9849e9b0 } , + { 0x0723d551, 0x75fdc0f2, 0xe4d7ac93, 0x53b19834, 0xc28b83d5, + 0x31656f76, 0xa03f5b17, 0x0f1946b8 } +}; + +static const u8 Tr[4][8] = { + { 0x13, 0x04, 0x15, 0x06, 0x17, 0x08, 0x19, 0x0a } , + { 0x1b, 0x0c, 0x1d, 0x0e, 0x1f, 0x10, 0x01, 0x12 } , + { 0x03, 0x14, 0x05, 0x16, 0x07, 0x18, 0x09, 0x1a } , + { 0x0b, 0x1c, 0x0d, 0x1e, 0x0f, 0x00, 0x11, 0x02 } +}; + +/* forward octave */ +static void W(u32 *key, unsigned int i) +{ + u32 I; + key[6] ^= F1(key[7], Tr[i % 4][0], Tm[i][0]); + key[5] ^= F2(key[6], Tr[i % 4][1], Tm[i][1]); + key[4] ^= F3(key[5], Tr[i % 4][2], Tm[i][2]); + key[3] ^= F1(key[4], Tr[i % 4][3], Tm[i][3]); + key[2] ^= F2(key[3], Tr[i % 4][4], Tm[i][4]); + key[1] ^= F3(key[2], Tr[i % 4][5], Tm[i][5]); + key[0] ^= F1(key[1], Tr[i % 4][6], Tm[i][6]); + key[7] ^= F2(key[0], Tr[i % 4][7], Tm[i][7]); +} + +int __cast6_setkey(struct cast6_ctx *c, const u8 *in_key, + unsigned key_len, u32 *flags) +{ + int i; + u32 key[8]; + __be32 p_key[8]; /* padded key */ + + if (key_len % 4 != 0) { + *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; + return -EINVAL; + } + + memset(p_key, 0, 32); + memcpy(p_key, in_key, key_len); + + key[0] = be32_to_cpu(p_key[0]); /* A */ + key[1] = be32_to_cpu(p_key[1]); /* B */ + key[2] = be32_to_cpu(p_key[2]); /* C */ + key[3] = be32_to_cpu(p_key[3]); /* D */ + key[4] = be32_to_cpu(p_key[4]); /* E */ + key[5] = be32_to_cpu(p_key[5]); /* F */ + key[6] = be32_to_cpu(p_key[6]); /* G */ + key[7] = be32_to_cpu(p_key[7]); /* H */ + + for (i = 0; i < 12; i++) { + W(key, 2 * i); + W(key, 2 * i + 1); + + c->Kr[i][0] = key[0] & 0x1f; + c->Kr[i][1] = key[2] & 0x1f; + c->Kr[i][2] = key[4] & 0x1f; + c->Kr[i][3] = key[6] & 0x1f; + + c->Km[i][0] = key[7]; + c->Km[i][1] = key[5]; + c->Km[i][2] = key[3]; + c->Km[i][3] = key[1]; + } + + return 0; +} +EXPORT_SYMBOL_GPL(__cast6_setkey); + +int cast6_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) +{ + return __cast6_setkey(crypto_tfm_ctx(tfm), key, keylen, + &tfm->crt_flags); +} +EXPORT_SYMBOL_GPL(cast6_setkey); + +/*forward quad round*/ +static void Q(u32 *block, u8 *Kr, u32 *Km) +{ + u32 I; + block[2] ^= F1(block[3], Kr[0], Km[0]); + block[1] ^= F2(block[2], Kr[1], Km[1]); + block[0] ^= F3(block[1], Kr[2], Km[2]); + block[3] ^= F1(block[0], Kr[3], Km[3]); +} + +/*reverse quad round*/ +static void QBAR(u32 *block, u8 *Kr, u32 *Km) +{ + u32 I; + block[3] ^= F1(block[0], Kr[3], Km[3]); + block[0] ^= F3(block[1], Kr[2], Km[2]); + block[1] ^= F2(block[2], Kr[1], Km[1]); + block[2] ^= F1(block[3], Kr[0], Km[0]); +} + +void __cast6_encrypt(struct cast6_ctx *c, u8 *outbuf, const u8 *inbuf) +{ + const __be32 *src = (const __be32 *)inbuf; + __be32 *dst = (__be32 *)outbuf; + u32 block[4]; + u32 *Km; + u8 *Kr; + + block[0] = be32_to_cpu(src[0]); + block[1] = be32_to_cpu(src[1]); + block[2] = be32_to_cpu(src[2]); + block[3] = be32_to_cpu(src[3]); + + Km = c->Km[0]; Kr = c->Kr[0]; Q(block, Kr, Km); + Km = c->Km[1]; Kr = c->Kr[1]; Q(block, Kr, Km); + Km = c->Km[2]; Kr = c->Kr[2]; Q(block, Kr, Km); + Km = c->Km[3]; Kr = c->Kr[3]; Q(block, Kr, Km); + Km = c->Km[4]; Kr = c->Kr[4]; Q(block, Kr, Km); + Km = c->Km[5]; Kr = c->Kr[5]; Q(block, Kr, Km); + Km = c->Km[6]; Kr = c->Kr[6]; QBAR(block, Kr, Km); + Km = c->Km[7]; Kr = c->Kr[7]; QBAR(block, Kr, Km); + Km = c->Km[8]; Kr = c->Kr[8]; QBAR(block, Kr, Km); + Km = c->Km[9]; Kr = c->Kr[9]; QBAR(block, Kr, Km); + Km = c->Km[10]; Kr = c->Kr[10]; QBAR(block, Kr, Km); + Km = c->Km[11]; Kr = c->Kr[11]; QBAR(block, Kr, Km); + + dst[0] = cpu_to_be32(block[0]); + dst[1] = cpu_to_be32(block[1]); + dst[2] = cpu_to_be32(block[2]); + dst[3] = cpu_to_be32(block[3]); +} +EXPORT_SYMBOL_GPL(__cast6_encrypt); + +static void cast6_encrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf) +{ + __cast6_encrypt(crypto_tfm_ctx(tfm), outbuf, inbuf); +} + +void __cast6_decrypt(struct cast6_ctx *c, u8 *outbuf, const u8 *inbuf) +{ + const __be32 *src = (const __be32 *)inbuf; + __be32 *dst = (__be32 *)outbuf; + u32 block[4]; + u32 *Km; + u8 *Kr; + + block[0] = be32_to_cpu(src[0]); + block[1] = be32_to_cpu(src[1]); + block[2] = be32_to_cpu(src[2]); + block[3] = be32_to_cpu(src[3]); + + Km = c->Km[11]; Kr = c->Kr[11]; Q(block, Kr, Km); + Km = c->Km[10]; Kr = c->Kr[10]; Q(block, Kr, Km); + Km = c->Km[9]; Kr = c->Kr[9]; Q(block, Kr, Km); + Km = c->Km[8]; Kr = c->Kr[8]; Q(block, Kr, Km); + Km = c->Km[7]; Kr = c->Kr[7]; Q(block, Kr, Km); + Km = c->Km[6]; Kr = c->Kr[6]; Q(block, Kr, Km); + Km = c->Km[5]; Kr = c->Kr[5]; QBAR(block, Kr, Km); + Km = c->Km[4]; Kr = c->Kr[4]; QBAR(block, Kr, Km); + Km = c->Km[3]; Kr = c->Kr[3]; QBAR(block, Kr, Km); + Km = c->Km[2]; Kr = c->Kr[2]; QBAR(block, Kr, Km); + Km = c->Km[1]; Kr = c->Kr[1]; QBAR(block, Kr, Km); + Km = c->Km[0]; Kr = c->Kr[0]; QBAR(block, Kr, Km); + + dst[0] = cpu_to_be32(block[0]); + dst[1] = cpu_to_be32(block[1]); + dst[2] = cpu_to_be32(block[2]); + dst[3] = cpu_to_be32(block[3]); +} +EXPORT_SYMBOL_GPL(__cast6_decrypt); + +static void cast6_decrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf) +{ + __cast6_decrypt(crypto_tfm_ctx(tfm), outbuf, inbuf); +} + +static struct crypto_alg alg = { + .cra_name = "cast6", + .cra_driver_name = "cast6-generic", + .cra_priority = 100, + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = CAST6_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct cast6_ctx), + .cra_alignmask = 3, + .cra_module = THIS_MODULE, + .cra_u = { + .cipher = { + .cia_min_keysize = CAST6_MIN_KEY_SIZE, + .cia_max_keysize = CAST6_MAX_KEY_SIZE, + .cia_setkey = cast6_setkey, + .cia_encrypt = cast6_encrypt, + .cia_decrypt = cast6_decrypt} + } +}; + +static int __init cast6_mod_init(void) +{ + return crypto_register_alg(&alg); +} + +static void __exit cast6_mod_fini(void) +{ + crypto_unregister_alg(&alg); +} + +module_init(cast6_mod_init); +module_exit(cast6_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Cast6 Cipher Algorithm"); +MODULE_ALIAS("cast6"); diff --git a/include/crypto/cast6.h b/include/crypto/cast6.h new file mode 100644 index 000000000000..02dde6b77e15 --- /dev/null +++ b/include/crypto/cast6.h @@ -0,0 +1,23 @@ +#ifndef _CRYPTO_CAST6_H +#define _CRYPTO_CAST6_H + +#include +#include + +#define CAST6_BLOCK_SIZE 16 +#define CAST6_MIN_KEY_SIZE 16 +#define CAST6_MAX_KEY_SIZE 32 + +struct cast6_ctx { + u32 Km[12][4]; + u8 Kr[12][4]; +}; + +int __cast6_setkey(struct cast6_ctx *ctx, const u8 *key, + unsigned int keylen, u32 *flags); +int cast6_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen); + +void __cast6_encrypt(struct cast6_ctx *ctx, u8 *dst, const u8 *src); +void __cast6_decrypt(struct cast6_ctx *ctx, u8 *dst, const u8 *src); + +#endif -- cgit v1.2.3 From 9b8b04051d0df1e2c7c31206caff05673a2c685f Mon Sep 17 00:00:00 2001 From: Johannes Goetzfried Date: Wed, 11 Jul 2012 19:38:29 +0200 Subject: crypto: testmgr - add larger cast6 testvectors New ECB, CBC, CTR, LRW and XTS testvectors for cast6. We need larger testvectors to check parallel code paths in the optimized implementation. Tests have also been added to the tcrypt module. Signed-off-by: Johannes Goetzfried Signed-off-by: Herbert Xu --- crypto/tcrypt.c | 50 ++ crypto/testmgr.c | 60 +++ crypto/testmgr.h | 1412 +++++++++++++++++++++++++++++++++++++++++++++++++++++- 3 files changed, 1520 insertions(+), 2 deletions(-) (limited to 'crypto') diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c index a94bbd77dc60..871076b1e0ab 100644 --- a/crypto/tcrypt.c +++ b/crypto/tcrypt.c @@ -1043,6 +1043,10 @@ static int do_test(int m) case 15: ret += tcrypt_test("ecb(cast6)"); + ret += tcrypt_test("cbc(cast6)"); + ret += tcrypt_test("ctr(cast6)"); + ret += tcrypt_test("lrw(cast6)"); + ret += tcrypt_test("xts(cast6)"); break; case 16: @@ -1376,6 +1380,29 @@ static int do_test(int m) speed_template_8_16); break; + case 210: + test_cipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0, + speed_template_16_32); + test_cipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0, + speed_template_16_32); + test_cipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0, + speed_template_16_32); + test_cipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0, + speed_template_16_32); + test_cipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0, + speed_template_16_32); + test_cipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0, + speed_template_16_32); + test_cipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0, + speed_template_32_48); + test_cipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0, + speed_template_32_48); + test_cipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0, + speed_template_32_64); + test_cipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0, + speed_template_32_64); + break; + case 300: /* fall through */ @@ -1671,6 +1698,29 @@ static int do_test(int m) speed_template_8_16); break; + case 507: + test_acipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0, + speed_template_16_32); + test_acipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0, + speed_template_16_32); + test_acipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0, + speed_template_16_32); + test_acipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0, + speed_template_16_32); + test_acipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0, + speed_template_16_32); + test_acipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0, + speed_template_16_32); + test_acipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0, + speed_template_32_48); + test_acipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0, + speed_template_32_48); + test_acipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0, + speed_template_32_64); + test_acipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0, + speed_template_32_64); + break; + case 1000: test_available(); break; diff --git a/crypto/testmgr.c b/crypto/testmgr.c index def0f430b667..cff3c1c3f83c 100644 --- a/crypto/testmgr.c +++ b/crypto/testmgr.c @@ -1877,6 +1877,21 @@ static const struct alg_test_desc alg_test_descs[] = { } } } + }, { + .alg = "cbc(cast6)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = cast6_cbc_enc_tv_template, + .count = CAST6_CBC_ENC_TEST_VECTORS + }, + .dec = { + .vecs = cast6_cbc_dec_tv_template, + .count = CAST6_CBC_DEC_TEST_VECTORS + } + } + } }, { .alg = "cbc(des)", .test = alg_test_skcipher, @@ -2143,6 +2158,21 @@ static const struct alg_test_desc alg_test_descs[] = { } } } + }, { + .alg = "ctr(cast6)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = cast6_ctr_enc_tv_template, + .count = CAST6_CTR_ENC_TEST_VECTORS + }, + .dec = { + .vecs = cast6_ctr_dec_tv_template, + .count = CAST6_CTR_DEC_TEST_VECTORS + } + } + } }, { .alg = "ctr(serpent)", .test = alg_test_skcipher, @@ -2619,6 +2649,21 @@ static const struct alg_test_desc alg_test_descs[] = { } } } + }, { + .alg = "lrw(cast6)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = cast6_lrw_enc_tv_template, + .count = CAST6_LRW_ENC_TEST_VECTORS + }, + .dec = { + .vecs = cast6_lrw_dec_tv_template, + .count = CAST6_LRW_DEC_TEST_VECTORS + } + } + } }, { .alg = "lrw(serpent)", .test = alg_test_skcipher, @@ -2971,6 +3016,21 @@ static const struct alg_test_desc alg_test_descs[] = { } } } + }, { + .alg = "xts(cast6)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = cast6_xts_enc_tv_template, + .count = CAST6_XTS_ENC_TEST_VECTORS + }, + .dec = { + .vecs = cast6_xts_dec_tv_template, + .count = CAST6_XTS_DEC_TEST_VECTORS + } + } + } }, { .alg = "xts(serpent)", .test = alg_test_skcipher, diff --git a/crypto/testmgr.h b/crypto/testmgr.h index 9309948a7028..6eb3ef5e3f88 100644 --- a/crypto/testmgr.h +++ b/crypto/testmgr.h @@ -6896,8 +6896,16 @@ static struct cipher_testvec serpent_xts_dec_tv_template[] = { }; /* Cast6 test vectors from RFC 2612 */ -#define CAST6_ENC_TEST_VECTORS 3 -#define CAST6_DEC_TEST_VECTORS 3 +#define CAST6_ENC_TEST_VECTORS 4 +#define CAST6_DEC_TEST_VECTORS 4 +#define CAST6_CBC_ENC_TEST_VECTORS 1 +#define CAST6_CBC_DEC_TEST_VECTORS 1 +#define CAST6_CTR_ENC_TEST_VECTORS 1 +#define CAST6_CTR_DEC_TEST_VECTORS 1 +#define CAST6_LRW_ENC_TEST_VECTORS 1 +#define CAST6_LRW_DEC_TEST_VECTORS 1 +#define CAST6_XTS_ENC_TEST_VECTORS 1 +#define CAST6_XTS_DEC_TEST_VECTORS 1 static struct cipher_testvec cast6_enc_tv_template[] = { { @@ -6930,6 +6938,140 @@ static struct cipher_testvec cast6_enc_tv_template[] = { .result = "\x4f\x6a\x20\x38\x28\x68\x97\xb9" "\xc9\x87\x01\x36\x55\x33\x17\xfa", .rlen = 16, + }, { /* Generated from TF test vectors */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F" + "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64", + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A" + "\xF1\x65\xFC\x93\x07\x9E\x35\xCC" + "\x40\xD7\x6E\x05\x79\x10\xA7\x1B" + "\xB2\x49\xE0\x54\xEB\x82\x19\x8D" + "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF" + "\x96\x0A\xA1\x38\xCF\x43\xDA\x71" + "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3" + "\x57\xEE\x85\x1C\x90\x27\xBE\x32" + "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4" + "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16" + "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88" + "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA" + "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49" + "\xE0\x77\x0E\x82\x19\xB0\x24\xBB" + "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D" + "\xC4\x38\xCF\x66\xFD\x71\x08\x9F" + "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11" + "\x85\x1C\xB3\x27\xBE\x55\xEC\x60" + "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2" + "\x69\x00\x74\x0B\xA2\x16\xAD\x44" + "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6" + "\x2A\xC1\x58\xEF\x63\xFA\x91\x05" + "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77" + "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9" + "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B" + "\xF2\x66\xFD\x94\x08\x9F\x36\xCD" + "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C" + "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E" + "\x25\xBC\x30\xC7\x5E\xF5\x69\x00" + "\x97\x0B\xA2\x39\xD0\x44\xDB\x72" + "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4" + "\x58\xEF\x86\x1D\x91\x28\xBF\x33" + "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5" + "\x3C\xD3\x47\xDE\x75\x0C\x80\x17" + "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89" + "\x20\x94\x2B\xC2\x36\xCD\x64\xFB" + "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A" + "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC" + "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E" + "\xC5\x39\xD0\x67\xFE\x72\x09\xA0" + "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12" + "\x86\x1D\xB4\x28\xBF\x56\xED\x61" + "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3" + "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" + "\xDC\x50\xE7\x7E\x15\x89\x20\xB7", + .ilen = 496, + .result = "\xC3\x70\x22\x32\xF5\x80\xCB\x54" + "\xFC\x30\xE0\xF6\xEB\x39\x57\xA6" + "\xB6\xB9\xC5\xA4\x91\x55\x14\x97" + "\xC1\x20\xFF\x6C\x5C\xF0\x67\xEA" + "\x2F\xED\xD8\xC9\xFB\x38\x3F\xFE" + "\x93\xBE\xDC\x00\xD3\x7F\xAD\x4C" + "\x5A\x08\x92\xD1\x47\x0C\xFA\x6C" + "\xD0\x6A\x99\x10\x72\xF8\x47\x62" + "\x81\x42\xF8\xD8\xF5\xBB\x94\x08" + "\xAA\x97\xA2\x8B\x69\xB3\xD2\x7E" + "\xBC\xB5\x00\x0C\xE5\x44\x4B\x58" + "\xE8\x63\xDC\xB3\xC4\xE5\x23\x12" + "\x5A\x72\x85\x47\x8B\xEC\x9F\x26" + "\x84\xB6\xED\x10\x33\x63\x9B\x5F" + "\x4D\x53\xEE\x94\x45\x8B\x60\x58" + "\x86\x20\xF9\x1E\x82\x08\x3E\x58" + "\x60\x1B\x34\x19\x02\xBE\x4E\x09" + "\xBB\x7C\x15\xCC\x60\x27\x55\x7A" + "\x12\xB8\xD8\x08\x89\x3C\xA6\xF3" + "\xF1\xDD\xA7\x07\xA3\x12\x85\x28" + "\xE9\x57\xAC\x80\x0C\x5C\x0F\x3A" + "\x5D\xC2\x91\xC7\x90\xE4\x8C\x43" + "\x92\xE4\x7C\x26\x69\x4D\x83\x68" + "\x14\x96\x42\x47\xBD\xA9\xE4\x8A" + "\x33\x19\xEB\x54\x8E\x0D\x4B\x6E" + "\x91\x51\xB5\x36\x08\xDE\x1C\x06" + "\x03\xBD\xDE\x81\x26\xF7\x99\xC2" + "\xBA\xF7\x6D\x87\x0D\xE4\xA6\xCF" + "\xC1\xF5\x27\x05\xB8\x02\x57\x72" + "\xE6\x42\x13\x0B\xC6\x47\x05\x74" + "\x24\x15\xF7\x0D\xC2\x23\x9D\xB9" + "\x3C\x77\x18\x93\xBA\xB4\xFC\x8C" + "\x98\x82\x67\x67\xB4\xD7\xD3\x43" + "\x23\x08\x02\xB7\x9B\x99\x05\xFB" + "\xD3\xB5\x00\x0A\xA9\x9D\x66\xD6" + "\x2E\x49\x58\xD0\xA8\x57\x29\x7F" + "\x0A\x0E\x7D\xFC\x92\x83\xCC\x67" + "\xA2\xB1\x70\x3A\x8F\x87\x4A\x8D" + "\x17\xE2\x58\x2B\x88\x0D\x68\x62" + "\xBF\x35\xD1\x6F\xC0\xF0\x18\x62" + "\xB2\xC7\x2D\x58\xC7\x16\xDE\x08" + "\xEB\x84\x1D\x25\xA7\x38\x94\x06" + "\x93\x9D\xF8\xFE\x88\x71\xE7\x84" + "\x2C\xA0\x38\xA3\x1D\x48\xCF\x29" + "\x0B\xBC\xD8\x50\x99\x1A\x26\xFB" + "\x8E\x75\x3D\x73\xEB\x6A\xED\x29" + "\xE0\x8E\xED\xFC\xFE\x6F\xF6\xBA" + "\x41\xE2\x10\x4C\x01\x8B\x69\x2B" + "\x25\x3F\x4D\x70\x7B\x92\xD6\x3B" + "\xAC\xF9\x77\x18\xD9\x6A\x30\xA6" + "\x2E\xFA\x30\xFF\xC8\xD5\x1D\x06" + "\x59\x28\x1D\x86\x43\x04\x5D\x3B" + "\x99\x4C\x04\x5A\x21\x17\x8B\x76" + "\x8F\x72\xCB\xA1\x9C\x29\x4C\xC3" + "\x65\xA2\x58\x2A\xC5\x66\x24\xBF" + "\xBA\xE6\x0C\xDD\x34\x24\x74\xC8" + "\x84\x0A\x66\x2C\xBE\x8F\x32\xA9" + "\xE7\xE4\xA1\xD7\xDA\xAB\x23\x1E" + "\xEB\xEE\x6C\x94\x6F\x9C\x2E\xD1" + "\x49\x2C\xF3\xD4\x90\xCC\x93\x4C" + "\x84\x52\x6D\x68\xDE\xC6\x64\xB2" + "\x11\x74\x93\x57\xB4\x7E\xC6\x00", + .rlen = 496, }, }; @@ -6964,6 +7106,1272 @@ static struct cipher_testvec cast6_dec_tv_template[] = { .ilen = 16, .result = zeroed_string, .rlen = 16, + }, { /* Generated from TF test vectors */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F" + "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64", + .input = "\xC3\x70\x22\x32\xF5\x80\xCB\x54" + "\xFC\x30\xE0\xF6\xEB\x39\x57\xA6" + "\xB6\xB9\xC5\xA4\x91\x55\x14\x97" + "\xC1\x20\xFF\x6C\x5C\xF0\x67\xEA" + "\x2F\xED\xD8\xC9\xFB\x38\x3F\xFE" + "\x93\xBE\xDC\x00\xD3\x7F\xAD\x4C" + "\x5A\x08\x92\xD1\x47\x0C\xFA\x6C" + "\xD0\x6A\x99\x10\x72\xF8\x47\x62" + "\x81\x42\xF8\xD8\xF5\xBB\x94\x08" + "\xAA\x97\xA2\x8B\x69\xB3\xD2\x7E" + "\xBC\xB5\x00\x0C\xE5\x44\x4B\x58" + "\xE8\x63\xDC\xB3\xC4\xE5\x23\x12" + "\x5A\x72\x85\x47\x8B\xEC\x9F\x26" + "\x84\xB6\xED\x10\x33\x63\x9B\x5F" + "\x4D\x53\xEE\x94\x45\x8B\x60\x58" + "\x86\x20\xF9\x1E\x82\x08\x3E\x58" + "\x60\x1B\x34\x19\x02\xBE\x4E\x09" + "\xBB\x7C\x15\xCC\x60\x27\x55\x7A" + "\x12\xB8\xD8\x08\x89\x3C\xA6\xF3" + "\xF1\xDD\xA7\x07\xA3\x12\x85\x28" + "\xE9\x57\xAC\x80\x0C\x5C\x0F\x3A" + "\x5D\xC2\x91\xC7\x90\xE4\x8C\x43" + "\x92\xE4\x7C\x26\x69\x4D\x83\x68" + "\x14\x96\x42\x47\xBD\xA9\xE4\x8A" + "\x33\x19\xEB\x54\x8E\x0D\x4B\x6E" + "\x91\x51\xB5\x36\x08\xDE\x1C\x06" + "\x03\xBD\xDE\x81\x26\xF7\x99\xC2" + "\xBA\xF7\x6D\x87\x0D\xE4\xA6\xCF" + "\xC1\xF5\x27\x05\xB8\x02\x57\x72" + "\xE6\x42\x13\x0B\xC6\x47\x05\x74" + "\x24\x15\xF7\x0D\xC2\x23\x9D\xB9" + "\x3C\x77\x18\x93\xBA\xB4\xFC\x8C" + "\x98\x82\x67\x67\xB4\xD7\xD3\x43" + "\x23\x08\x02\xB7\x9B\x99\x05\xFB" + "\xD3\xB5\x00\x0A\xA9\x9D\x66\xD6" + "\x2E\x49\x58\xD0\xA8\x57\x29\x7F" + "\x0A\x0E\x7D\xFC\x92\x83\xCC\x67" + "\xA2\xB1\x70\x3A\x8F\x87\x4A\x8D" + "\x17\xE2\x58\x2B\x88\x0D\x68\x62" + "\xBF\x35\xD1\x6F\xC0\xF0\x18\x62" + "\xB2\xC7\x2D\x58\xC7\x16\xDE\x08" + "\xEB\x84\x1D\x25\xA7\x38\x94\x06" + "\x93\x9D\xF8\xFE\x88\x71\xE7\x84" + "\x2C\xA0\x38\xA3\x1D\x48\xCF\x29" + "\x0B\xBC\xD8\x50\x99\x1A\x26\xFB" + "\x8E\x75\x3D\x73\xEB\x6A\xED\x29" + "\xE0\x8E\xED\xFC\xFE\x6F\xF6\xBA" + "\x41\xE2\x10\x4C\x01\x8B\x69\x2B" + "\x25\x3F\x4D\x70\x7B\x92\xD6\x3B" + "\xAC\xF9\x77\x18\xD9\x6A\x30\xA6" + "\x2E\xFA\x30\xFF\xC8\xD5\x1D\x06" + "\x59\x28\x1D\x86\x43\x04\x5D\x3B" + "\x99\x4C\x04\x5A\x21\x17\x8B\x76" + "\x8F\x72\xCB\xA1\x9C\x29\x4C\xC3" + "\x65\xA2\x58\x2A\xC5\x66\x24\xBF" + "\xBA\xE6\x0C\xDD\x34\x24\x74\xC8" + "\x84\x0A\x66\x2C\xBE\x8F\x32\xA9" + "\xE7\xE4\xA1\xD7\xDA\xAB\x23\x1E" + "\xEB\xEE\x6C\x94\x6F\x9C\x2E\xD1" + "\x49\x2C\xF3\xD4\x90\xCC\x93\x4C" + "\x84\x52\x6D\x68\xDE\xC6\x64\xB2" + "\x11\x74\x93\x57\xB4\x7E\xC6\x00", + .ilen = 496, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A" + "\xF1\x65\xFC\x93\x07\x9E\x35\xCC" + "\x40\xD7\x6E\x05\x79\x10\xA7\x1B" + "\xB2\x49\xE0\x54\xEB\x82\x19\x8D" + "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF" + "\x96\x0A\xA1\x38\xCF\x43\xDA\x71" + "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3" + "\x57\xEE\x85\x1C\x90\x27\xBE\x32" + "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4" + "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16" + "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88" + "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA" + "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49" + "\xE0\x77\x0E\x82\x19\xB0\x24\xBB" + "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D" + "\xC4\x38\xCF\x66\xFD\x71\x08\x9F" + "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11" + "\x85\x1C\xB3\x27\xBE\x55\xEC\x60" + "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2" + "\x69\x00\x74\x0B\xA2\x16\xAD\x44" + "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6" + "\x2A\xC1\x58\xEF\x63\xFA\x91\x05" + "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77" + "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9" + "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B" + "\xF2\x66\xFD\x94\x08\x9F\x36\xCD" + "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C" + "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E" + "\x25\xBC\x30\xC7\x5E\xF5\x69\x00" + "\x97\x0B\xA2\x39\xD0\x44\xDB\x72" + "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4" + "\x58\xEF\x86\x1D\x91\x28\xBF\x33" + "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5" + "\x3C\xD3\x47\xDE\x75\x0C\x80\x17" + "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89" + "\x20\x94\x2B\xC2\x36\xCD\x64\xFB" + "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A" + "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC" + "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E" + "\xC5\x39\xD0\x67\xFE\x72\x09\xA0" + "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12" + "\x86\x1D\xB4\x28\xBF\x56\xED\x61" + "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3" + "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" + "\xDC\x50\xE7\x7E\x15\x89\x20\xB7", + .rlen = 496, + }, +}; + +static struct cipher_testvec cast6_cbc_enc_tv_template[] = { + { /* Generated from TF test vectors */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F" + "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64", + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A" + "\xF1\x65\xFC\x93\x07\x9E\x35\xCC" + "\x40\xD7\x6E\x05\x79\x10\xA7\x1B" + "\xB2\x49\xE0\x54\xEB\x82\x19\x8D" + "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF" + "\x96\x0A\xA1\x38\xCF\x43\xDA\x71" + "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3" + "\x57\xEE\x85\x1C\x90\x27\xBE\x32" + "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4" + "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16" + "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88" + "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA" + "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49" + "\xE0\x77\x0E\x82\x19\xB0\x24\xBB" + "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D" + "\xC4\x38\xCF\x66\xFD\x71\x08\x9F" + "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11" + "\x85\x1C\xB3\x27\xBE\x55\xEC\x60" + "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2" + "\x69\x00\x74\x0B\xA2\x16\xAD\x44" + "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6" + "\x2A\xC1\x58\xEF\x63\xFA\x91\x05" + "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77" + "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9" + "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B" + "\xF2\x66\xFD\x94\x08\x9F\x36\xCD" + "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C" + "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E" + "\x25\xBC\x30\xC7\x5E\xF5\x69\x00" + "\x97\x0B\xA2\x39\xD0\x44\xDB\x72" + "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4" + "\x58\xEF\x86\x1D\x91\x28\xBF\x33" + "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5" + "\x3C\xD3\x47\xDE\x75\x0C\x80\x17" + "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89" + "\x20\x94\x2B\xC2\x36\xCD\x64\xFB" + "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A" + "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC" + "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E" + "\xC5\x39\xD0\x67\xFE\x72\x09\xA0" + "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12" + "\x86\x1D\xB4\x28\xBF\x56\xED\x61" + "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3" + "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" + "\xDC\x50\xE7\x7E\x15\x89\x20\xB7", + .ilen = 496, + .result = "\xDF\x77\x68\x96\xC7\xBA\xF8\xE2" + "\x0E\x24\x99\x1A\xAA\xF3\xC6\x9F" + "\xA0\x73\xB3\x70\xC3\x68\x64\x70" + "\xAD\x33\x02\xFB\x88\x74\xAA\x78" + "\xC7\x47\x1A\x18\x61\x2D\xAC\x9F" + "\x7E\x6F\xDF\x05\x13\x76\xA6\x72" + "\xB7\x13\x09\x0F\x7D\x38\xDF\x25" + "\x4E\xFD\x50\x45\xFA\x35\x6A\xC0" + "\x57\x95\xE1\x21\x26\x10\x9A\x21" + "\xA1\x8A\x51\x05\xD1\xB1\x78\x35" + "\x98\xF5\xAE\xC0\xC1\x8B\x94\xFF" + "\xD0\x69\x3F\x42\xC2\x01\xA7\x9B" + "\x23\x16\x47\x72\x81\x13\x3A\x72" + "\xEC\xD9\x40\x88\x00\x9C\xB0\xA8" + "\x9C\xAC\xCE\x11\x73\x7B\x63\x3E" + "\xA3\x63\x98\x7D\x35\xE4\xD9\x83" + "\xE2\xD0\x52\x87\x0C\x1F\xB0\xB3" + "\x41\x1A\x93\x8D\x76\x31\x9F\xF2" + "\xFE\x09\xA3\x8F\x22\x6A\x3B\xB9" + "\x6C\x9E\xE4\xA1\xA0\xC4\xE7\xA1" + "\x21\x9C\x1A\xCA\x65\xDE\x44\x03" + "\x99\xF2\xD2\x39\xE3\x3F\x0F\x37" + "\x53\x50\x23\xA4\x81\x6E\xDA\xFB" + "\xF8\x7B\x01\xD7\xB2\x32\x9C\xB8" + "\xB1\x0E\x99\x17\xB5\x38\xF9\xD7" + "\x86\x2D\x6E\x94\x5C\x99\x9D\xB3" + "\xD3\x63\x4B\x2A\x7D\x44\x6A\xB2" + "\xC1\x03\xE6\x5A\x37\xD8\x64\x18" + "\xAA\x32\xCE\x29\xED\xC0\xA2\xCB" + "\x8D\xAF\xCD\xBE\x8F\xB6\xEC\xB4" + "\x89\x05\x81\x6E\x71\x4F\xC3\x28" + "\x10\xC1\x62\xC4\x41\xE9\xD2\x39" + "\xF3\x22\x39\x12\x2C\xC2\x95\x2D" + "\xBF\x93\x58\x4B\x04\xD1\x8D\x57" + "\xAE\xEB\x60\x03\x56\x35\xAD\x5A" + "\xE9\xC3\xFF\x4E\x31\xE1\x37\xF8" + "\x7D\xEE\x65\x8A\xB6\x88\x1A\x3E" + "\x07\x09\x82\xBA\xF0\x80\x8A\xD0" + "\xA0\x3F\x6A\xE9\x24\x87\x19\x65" + "\x73\x3F\x12\x91\x47\x54\xBA\x39" + "\x30\x5B\x1E\xE5\xC2\xF9\x3F\xEF" + "\xD6\x75\xF9\xB8\x7C\x8B\x05\x76" + "\xEE\xB7\x08\x25\x4B\xB6\x7B\x47" + "\x72\xC0\x4C\xD4\xDA\xE0\x75\xF1" + "\x7C\xE8\x94\x9E\x16\x6E\xB8\x12" + "\xA1\xC1\x6E\x3B\x1C\x59\x41\x2D" + "\x23\xFA\x7D\x77\xB8\x46\x75\xFE" + "\x4F\x10\xD3\x09\x60\xA1\x36\x96" + "\x5B\xC2\xDC\x6E\x84\x7D\x9B\x14" + "\x80\x21\x83\x58\x3C\x76\xFD\x28" + "\x1D\xF9\x93\x13\xD7\x0E\x62\x14" + "\x5A\xC5\x4E\x08\xA5\x56\xA4\x3C" + "\x68\x93\x44\x70\xDF\xCF\x4A\x51" + "\x0B\x81\x29\x41\xE5\x62\x4D\x36" + "\xB3\xEA\x94\xA6\xB9\xDD\x3F\x09" + "\x62\x34\xA0\x6A\x7E\x7D\xF5\xF6" + "\x01\x91\xB4\x27\xDA\x59\xD6\x17" + "\x56\x4D\x82\x62\x37\xA3\x48\x01" + "\x99\x91\x77\xB2\x08\x6B\x2C\x37" + "\xC5\x5C\xAD\xB6\x07\xB6\x84\xF3" + "\x4D\x59\x7D\xC5\x28\x69\xFA\x92" + "\x22\x46\x89\x2D\x0F\x2B\x08\x24", + .rlen = 496, + }, +}; + +static struct cipher_testvec cast6_cbc_dec_tv_template[] = { + { /* Generated from TF test vectors */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F" + "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64", + .input = "\xDF\x77\x68\x96\xC7\xBA\xF8\xE2" + "\x0E\x24\x99\x1A\xAA\xF3\xC6\x9F" + "\xA0\x73\xB3\x70\xC3\x68\x64\x70" + "\xAD\x33\x02\xFB\x88\x74\xAA\x78" + "\xC7\x47\x1A\x18\x61\x2D\xAC\x9F" + "\x7E\x6F\xDF\x05\x13\x76\xA6\x72" + "\xB7\x13\x09\x0F\x7D\x38\xDF\x25" + "\x4E\xFD\x50\x45\xFA\x35\x6A\xC0" + "\x57\x95\xE1\x21\x26\x10\x9A\x21" + "\xA1\x8A\x51\x05\xD1\xB1\x78\x35" + "\x98\xF5\xAE\xC0\xC1\x8B\x94\xFF" + "\xD0\x69\x3F\x42\xC2\x01\xA7\x9B" + "\x23\x16\x47\x72\x81\x13\x3A\x72" + "\xEC\xD9\x40\x88\x00\x9C\xB0\xA8" + "\x9C\xAC\xCE\x11\x73\x7B\x63\x3E" + "\xA3\x63\x98\x7D\x35\xE4\xD9\x83" + "\xE2\xD0\x52\x87\x0C\x1F\xB0\xB3" + "\x41\x1A\x93\x8D\x76\x31\x9F\xF2" + "\xFE\x09\xA3\x8F\x22\x6A\x3B\xB9" + "\x6C\x9E\xE4\xA1\xA0\xC4\xE7\xA1" + "\x21\x9C\x1A\xCA\x65\xDE\x44\x03" + "\x99\xF2\xD2\x39\xE3\x3F\x0F\x37" + "\x53\x50\x23\xA4\x81\x6E\xDA\xFB" + "\xF8\x7B\x01\xD7\xB2\x32\x9C\xB8" + "\xB1\x0E\x99\x17\xB5\x38\xF9\xD7" + "\x86\x2D\x6E\x94\x5C\x99\x9D\xB3" + "\xD3\x63\x4B\x2A\x7D\x44\x6A\xB2" + "\xC1\x03\xE6\x5A\x37\xD8\x64\x18" + "\xAA\x32\xCE\x29\xED\xC0\xA2\xCB" + "\x8D\xAF\xCD\xBE\x8F\xB6\xEC\xB4" + "\x89\x05\x81\x6E\x71\x4F\xC3\x28" + "\x10\xC1\x62\xC4\x41\xE9\xD2\x39" + "\xF3\x22\x39\x12\x2C\xC2\x95\x2D" + "\xBF\x93\x58\x4B\x04\xD1\x8D\x57" + "\xAE\xEB\x60\x03\x56\x35\xAD\x5A" + "\xE9\xC3\xFF\x4E\x31\xE1\x37\xF8" + "\x7D\xEE\x65\x8A\xB6\x88\x1A\x3E" + "\x07\x09\x82\xBA\xF0\x80\x8A\xD0" + "\xA0\x3F\x6A\xE9\x24\x87\x19\x65" + "\x73\x3F\x12\x91\x47\x54\xBA\x39" + "\x30\x5B\x1E\xE5\xC2\xF9\x3F\xEF" + "\xD6\x75\xF9\xB8\x7C\x8B\x05\x76" + "\xEE\xB7\x08\x25\x4B\xB6\x7B\x47" + "\x72\xC0\x4C\xD4\xDA\xE0\x75\xF1" + "\x7C\xE8\x94\x9E\x16\x6E\xB8\x12" + "\xA1\xC1\x6E\x3B\x1C\x59\x41\x2D" + "\x23\xFA\x7D\x77\xB8\x46\x75\xFE" + "\x4F\x10\xD3\x09\x60\xA1\x36\x96" + "\x5B\xC2\xDC\x6E\x84\x7D\x9B\x14" + "\x80\x21\x83\x58\x3C\x76\xFD\x28" + "\x1D\xF9\x93\x13\xD7\x0E\x62\x14" + "\x5A\xC5\x4E\x08\xA5\x56\xA4\x3C" + "\x68\x93\x44\x70\xDF\xCF\x4A\x51" + "\x0B\x81\x29\x41\xE5\x62\x4D\x36" + "\xB3\xEA\x94\xA6\xB9\xDD\x3F\x09" + "\x62\x34\xA0\x6A\x7E\x7D\xF5\xF6" + "\x01\x91\xB4\x27\xDA\x59\xD6\x17" + "\x56\x4D\x82\x62\x37\xA3\x48\x01" + "\x99\x91\x77\xB2\x08\x6B\x2C\x37" + "\xC5\x5C\xAD\xB6\x07\xB6\x84\xF3" + "\x4D\x59\x7D\xC5\x28\x69\xFA\x92" + "\x22\x46\x89\x2D\x0F\x2B\x08\x24", + .ilen = 496, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A" + "\xF1\x65\xFC\x93\x07\x9E\x35\xCC" + "\x40\xD7\x6E\x05\x79\x10\xA7\x1B" + "\xB2\x49\xE0\x54\xEB\x82\x19\x8D" + "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF" + "\x96\x0A\xA1\x38\xCF\x43\xDA\x71" + "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3" + "\x57\xEE\x85\x1C\x90\x27\xBE\x32" + "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4" + "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16" + "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88" + "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA" + "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49" + "\xE0\x77\x0E\x82\x19\xB0\x24\xBB" + "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D" + "\xC4\x38\xCF\x66\xFD\x71\x08\x9F" + "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11" + "\x85\x1C\xB3\x27\xBE\x55\xEC\x60" + "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2" + "\x69\x00\x74\x0B\xA2\x16\xAD\x44" + "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6" + "\x2A\xC1\x58\xEF\x63\xFA\x91\x05" + "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77" + "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9" + "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B" + "\xF2\x66\xFD\x94\x08\x9F\x36\xCD" + "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C" + "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E" + "\x25\xBC\x30\xC7\x5E\xF5\x69\x00" + "\x97\x0B\xA2\x39\xD0\x44\xDB\x72" + "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4" + "\x58\xEF\x86\x1D\x91\x28\xBF\x33" + "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5" + "\x3C\xD3\x47\xDE\x75\x0C\x80\x17" + "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89" + "\x20\x94\x2B\xC2\x36\xCD\x64\xFB" + "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A" + "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC" + "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E" + "\xC5\x39\xD0\x67\xFE\x72\x09\xA0" + "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12" + "\x86\x1D\xB4\x28\xBF\x56\xED\x61" + "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3" + "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" + "\xDC\x50\xE7\x7E\x15\x89\x20\xB7", + .rlen = 496, + }, +}; + +static struct cipher_testvec cast6_ctr_enc_tv_template[] = { + { /* Generated from TF test vectors */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F" + "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64", + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A" + "\xF1\x65\xFC\x93\x07\x9E\x35\xCC" + "\x40\xD7\x6E\x05\x79\x10\xA7\x1B" + "\xB2\x49\xE0\x54\xEB\x82\x19\x8D" + "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF" + "\x96\x0A\xA1\x38\xCF\x43\xDA\x71" + "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3" + "\x57\xEE\x85\x1C\x90\x27\xBE\x32" + "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4" + "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16" + "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88" + "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA" + "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49" + "\xE0\x77\x0E\x82\x19\xB0\x24\xBB" + "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D" + "\xC4\x38\xCF\x66\xFD\x71\x08\x9F" + "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11" + "\x85\x1C\xB3\x27\xBE\x55\xEC\x60" + "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2" + "\x69\x00\x74\x0B\xA2\x16\xAD\x44" + "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6" + "\x2A\xC1\x58\xEF\x63\xFA\x91\x05" + "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77" + "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9" + "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B" + "\xF2\x66\xFD\x94\x08\x9F\x36\xCD" + "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C" + "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E" + "\x25\xBC\x30\xC7\x5E\xF5\x69\x00" + "\x97\x0B\xA2\x39\xD0\x44\xDB\x72" + "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4" + "\x58\xEF\x86\x1D\x91\x28\xBF\x33" + "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5" + "\x3C\xD3\x47\xDE\x75\x0C\x80\x17" + "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89" + "\x20\x94\x2B\xC2\x36\xCD\x64\xFB" + "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A" + "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC" + "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E" + "\xC5\x39\xD0\x67\xFE\x72\x09\xA0" + "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12" + "\x86\x1D\xB4\x28\xBF\x56\xED\x61" + "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3" + "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" + "\xDC\x50\xE7\x7E\x15\x89\x20\xB7", + .ilen = 496, + .result = "\x26\x0A\xF1\xE2\x3F\x8A\xEF\xA3" + "\x53\x9A\x5E\x1B\x2A\x1A\xC6\x0A" + "\x57\xA3\xEF\x47\x2A\xE8\x88\xA7" + "\x3C\xD0\xEC\xB9\x94\x50\x7D\x56" + "\xBC\xE1\xC1\xF5\xE1\xEE\x12\xF8" + "\x4F\x03\x82\x3A\x93\x6B\x4C\xD3" + "\xE3\xF3\xFA\xC2\x23\x55\x98\x20" + "\x49\x76\x9B\x6B\xC1\x23\xBF\xE5" + "\xD4\xC4\x2F\x61\xE1\x67\x2A\x30" + "\x6F\x29\xCA\x54\xF8\x1B\xA6\x7D" + "\x66\x45\xEE\xC8\x19\xBE\x50\xF0" + "\x5F\x65\xF8\x1E\x4D\x07\x87\xD9" + "\xD3\xD9\x1B\x09\x89\xFD\x42\xC5" + "\xDB\xEB\x86\xF1\x67\x04\x0F\x5C" + "\x81\xDF\x82\x12\xC7\x4C\x1B\x07" + "\xDE\xE6\xFA\x29\x86\xD1\xB0\xBA" + "\x3D\x6A\x69\x76\xEC\x0F\xB4\xE6" + "\xCD\xA7\xF8\xA8\xB8\xE0\x33\xF5" + "\x49\x61\x22\x52\x64\x8C\x46\x41" + "\x1F\x48\x5F\x4F\xA2\x89\x36\x17" + "\x20\xF8\x2F\x8F\x4B\xFA\xF2\xC0" + "\x1E\x18\xA2\xF8\xB7\x6D\x98\xE3" + "\x00\x14\x15\x59\xC1\x30\x64\xAF" + "\xA8\x01\x38\xAB\xD4\x8B\xEC\x7C" + "\x44\x9A\xC6\x2C\x2E\x2B\x2B\xF4" + "\x02\x37\xC4\x69\xEF\x36\xC1\xF3" + "\xA0\xFB\xFE\x29\xAD\x39\xCF\xD0" + "\x51\x73\xA3\x22\x42\x41\xAB\xD2" + "\x0F\x50\x14\xB9\x54\xD3\xD4\xFA" + "\xBF\xC9\xBB\xCE\xC4\x1D\x2D\xAF" + "\xC9\x3F\x07\x87\x42\x4B\x3A\x54" + "\x34\x8E\x37\xA3\x03\x6F\x65\x66" + "\xDB\x44\xC3\xE8\xD7\xDD\x7D\xDD" + "\x61\xB4\x2B\x80\xA3\x98\x13\xF5" + "\x5A\xD3\x34\x58\xC3\x6E\xF6\xB8" + "\x0A\xC6\x50\x01\x8E\xD5\x6C\x7D" + "\xFE\x16\xB6\xCF\xFC\x51\x40\xAE" + "\xB3\x15\xAC\x90\x6F\x0B\x28\x3A" + "\x60\x40\x38\x90\x20\x46\xC7\xB3" + "\x0B\x12\x6D\x3B\x15\x14\xF9\xF4" + "\x11\x41\x76\x6B\xB3\x60\x82\x3C" + "\x84\xFB\x08\x2E\x92\x25\xCB\x79" + "\x6F\x58\xC5\x94\x00\x00\x47\xB6" + "\x9E\xDC\x0F\x29\x70\x46\x20\x76" + "\x65\x75\x66\x5C\x00\x96\xB3\xE1" + "\x0B\xA7\x11\x8B\x2E\x61\x4E\x45" + "\x73\xFC\x91\xAB\x79\x41\x23\x14" + "\x13\xB6\x72\x6C\x46\xB3\x03\x11" + "\xE4\xF1\xEE\xC9\x7A\xCF\x96\x32" + "\xB6\xF0\x8B\x97\xB4\xCF\x82\xB7" + "\x15\x48\x44\x99\x09\xF6\xE0\xD7" + "\xBC\xF1\x5B\x91\x4F\x30\x22\xA2" + "\x45\xC4\x68\x55\xC2\xBE\xA7\xD2" + "\x12\x53\x35\x9C\xF9\xE7\x35\x5D" + "\x81\xE4\x86\x42\xC3\x58\xFB\xF0" + "\x38\x9B\x8E\x5A\xEF\x83\x33\x0F" + "\x00\x4E\x3F\x9F\xF5\x84\x62\xC4" + "\x19\x35\x88\x22\x45\x59\x0E\x8F" + "\xEC\x27\xDD\x4A\xA4\x1F\xBC\x41" + "\x9B\x66\x8D\x32\xBA\x81\x34\x87" + "\x0E\x74\x33\x30\x62\xB9\x89\xDF" + "\xF9\xC5\xDD\x27\xB3\x39\xCB\xCB", + .rlen = 496, + }, +}; + +static struct cipher_testvec cast6_ctr_dec_tv_template[] = { + { /* Generated from TF test vectors */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F" + "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64", + .input = "\x26\x0A\xF1\xE2\x3F\x8A\xEF\xA3" + "\x53\x9A\x5E\x1B\x2A\x1A\xC6\x0A" + "\x57\xA3\xEF\x47\x2A\xE8\x88\xA7" + "\x3C\xD0\xEC\xB9\x94\x50\x7D\x56" + "\xBC\xE1\xC1\xF5\xE1\xEE\x12\xF8" + "\x4F\x03\x82\x3A\x93\x6B\x4C\xD3" + "\xE3\xF3\xFA\xC2\x23\x55\x98\x20" + "\x49\x76\x9B\x6B\xC1\x23\xBF\xE5" + "\xD4\xC4\x2F\x61\xE1\x67\x2A\x30" + "\x6F\x29\xCA\x54\xF8\x1B\xA6\x7D" + "\x66\x45\xEE\xC8\x19\xBE\x50\xF0" + "\x5F\x65\xF8\x1E\x4D\x07\x87\xD9" + "\xD3\xD9\x1B\x09\x89\xFD\x42\xC5" + "\xDB\xEB\x86\xF1\x67\x04\x0F\x5C" + "\x81\xDF\x82\x12\xC7\x4C\x1B\x07" + "\xDE\xE6\xFA\x29\x86\xD1\xB0\xBA" + "\x3D\x6A\x69\x76\xEC\x0F\xB4\xE6" + "\xCD\xA7\xF8\xA8\xB8\xE0\x33\xF5" + "\x49\x61\x22\x52\x64\x8C\x46\x41" + "\x1F\x48\x5F\x4F\xA2\x89\x36\x17" + "\x20\xF8\x2F\x8F\x4B\xFA\xF2\xC0" + "\x1E\x18\xA2\xF8\xB7\x6D\x98\xE3" + "\x00\x14\x15\x59\xC1\x30\x64\xAF" + "\xA8\x01\x38\xAB\xD4\x8B\xEC\x7C" + "\x44\x9A\xC6\x2C\x2E\x2B\x2B\xF4" + "\x02\x37\xC4\x69\xEF\x36\xC1\xF3" + "\xA0\xFB\xFE\x29\xAD\x39\xCF\xD0" + "\x51\x73\xA3\x22\x42\x41\xAB\xD2" + "\x0F\x50\x14\xB9\x54\xD3\xD4\xFA" + "\xBF\xC9\xBB\xCE\xC4\x1D\x2D\xAF" + "\xC9\x3F\x07\x87\x42\x4B\x3A\x54" + "\x34\x8E\x37\xA3\x03\x6F\x65\x66" + "\xDB\x44\xC3\xE8\xD7\xDD\x7D\xDD" + "\x61\xB4\x2B\x80\xA3\x98\x13\xF5" + "\x5A\xD3\x34\x58\xC3\x6E\xF6\xB8" + "\x0A\xC6\x50\x01\x8E\xD5\x6C\x7D" + "\xFE\x16\xB6\xCF\xFC\x51\x40\xAE" + "\xB3\x15\xAC\x90\x6F\x0B\x28\x3A" + "\x60\x40\x38\x90\x20\x46\xC7\xB3" + "\x0B\x12\x6D\x3B\x15\x14\xF9\xF4" + "\x11\x41\x76\x6B\xB3\x60\x82\x3C" + "\x84\xFB\x08\x2E\x92\x25\xCB\x79" + "\x6F\x58\xC5\x94\x00\x00\x47\xB6" + "\x9E\xDC\x0F\x29\x70\x46\x20\x76" + "\x65\x75\x66\x5C\x00\x96\xB3\xE1" + "\x0B\xA7\x11\x8B\x2E\x61\x4E\x45" + "\x73\xFC\x91\xAB\x79\x41\x23\x14" + "\x13\xB6\x72\x6C\x46\xB3\x03\x11" + "\xE4\xF1\xEE\xC9\x7A\xCF\x96\x32" + "\xB6\xF0\x8B\x97\xB4\xCF\x82\xB7" + "\x15\x48\x44\x99\x09\xF6\xE0\xD7" + "\xBC\xF1\x5B\x91\x4F\x30\x22\xA2" + "\x45\xC4\x68\x55\xC2\xBE\xA7\xD2" + "\x12\x53\x35\x9C\xF9\xE7\x35\x5D" + "\x81\xE4\x86\x42\xC3\x58\xFB\xF0" + "\x38\x9B\x8E\x5A\xEF\x83\x33\x0F" + "\x00\x4E\x3F\x9F\xF5\x84\x62\xC4" + "\x19\x35\x88\x22\x45\x59\x0E\x8F" + "\xEC\x27\xDD\x4A\xA4\x1F\xBC\x41" + "\x9B\x66\x8D\x32\xBA\x81\x34\x87" + "\x0E\x74\x33\x30\x62\xB9\x89\xDF" + "\xF9\xC5\xDD\x27\xB3\x39\xCB\xCB", + .ilen = 496, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A" + "\xF1\x65\xFC\x93\x07\x9E\x35\xCC" + "\x40\xD7\x6E\x05\x79\x10\xA7\x1B" + "\xB2\x49\xE0\x54\xEB\x82\x19\x8D" + "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF" + "\x96\x0A\xA1\x38\xCF\x43\xDA\x71" + "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3" + "\x57\xEE\x85\x1C\x90\x27\xBE\x32" + "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4" + "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16" + "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88" + "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA" + "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49" + "\xE0\x77\x0E\x82\x19\xB0\x24\xBB" + "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D" + "\xC4\x38\xCF\x66\xFD\x71\x08\x9F" + "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11" + "\x85\x1C\xB3\x27\xBE\x55\xEC\x60" + "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2" + "\x69\x00\x74\x0B\xA2\x16\xAD\x44" + "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6" + "\x2A\xC1\x58\xEF\x63\xFA\x91\x05" + "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77" + "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9" + "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B" + "\xF2\x66\xFD\x94\x08\x9F\x36\xCD" + "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C" + "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E" + "\x25\xBC\x30\xC7\x5E\xF5\x69\x00" + "\x97\x0B\xA2\x39\xD0\x44\xDB\x72" + "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4" + "\x58\xEF\x86\x1D\x91\x28\xBF\x33" + "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5" + "\x3C\xD3\x47\xDE\x75\x0C\x80\x17" + "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89" + "\x20\x94\x2B\xC2\x36\xCD\x64\xFB" + "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A" + "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC" + "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E" + "\xC5\x39\xD0\x67\xFE\x72\x09\xA0" + "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12" + "\x86\x1D\xB4\x28\xBF\x56\xED\x61" + "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3" + "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" + "\xDC\x50\xE7\x7E\x15\x89\x20\xB7", + .rlen = 496, + }, +}; + +static struct cipher_testvec cast6_lrw_enc_tv_template[] = { + { /* Generated from TF test vectors */ + .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c" + "\x23\x84\xcb\x1c\x77\xd6\x19\x5d" + "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21" + "\xa7\x9c\x21\xf8\xcb\x90\x02\x89" + "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1" + "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e", + .klen = 48, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x05\x11\xb7\x18\xab\xc6\x2d\xac" + "\x70\x5d\xf6\x22\x94\xcd\xe5\x6c" + "\x17\x6b\xf6\x1c\xf0\xf3\x6e\xf8" + "\x50\x38\x1f\x71\x49\xb6\x57\xd6" + "\x8f\xcb\x8d\x6b\xe3\xa6\x29\x90" + "\xfe\x2a\x62\x82\xae\x6d\x8b\xf6" + "\xad\x1e\x9e\x20\x5f\x38\xbe\x04" + "\xda\x10\x8e\xed\xa2\xa4\x87\xab" + "\xda\x6b\xb4\x0c\x75\xba\xd3\x7c" + "\xc9\xac\x42\x31\x95\x7c\xc9\x04" + "\xeb\xd5\x6e\x32\x69\x8a\xdb\xa6" + "\x15\xd7\x3f\x4f\x2f\x66\x69\x03" + "\x9c\x1f\x54\x0f\xde\x1f\xf3\x65" + "\x4c\x96\x12\xed\x7c\x92\x03\x01" + "\x6f\xbc\x35\x93\xac\xf1\x27\xf1" + "\xb4\x96\x82\x5a\x5f\xb0\xa0\x50" + "\x89\xa4\x8e\x66\x44\x85\xcc\xfd" + "\x33\x14\x70\xe3\x96\xb2\xc3\xd3" + "\xbb\x54\x5a\x1a\xf9\x74\xa2\xc5" + "\x2d\x64\x75\xdd\xb4\x54\xe6\x74" + "\x8c\xd3\x9d\x9e\x86\xab\x51\x53" + "\xb7\x93\x3e\x6f\xd0\x4e\x2c\x40" + "\xf6\xa8\x2e\x3e\x9d\xf4\x66\xa5" + "\x76\x12\x73\x44\x1a\x56\xd7\x72" + "\x88\xcd\x21\x8c\x4c\x0f\xfe\xda" + "\x95\xe0\x3a\xa6\xa5\x84\x46\xcd" + "\xd5\x3e\x9d\x3a\xe2\x67\xe6\x60" + "\x1a\xe2\x70\x85\x58\xc2\x1b\x09" + "\xe1\xd7\x2c\xca\xad\xa8\x8f\xf9" + "\xac\xb3\x0e\xdb\xca\x2e\xe2\xb8" + "\x51\x71\xd9\x3c\x6c\xf1\x56\xf8" + "\xea\x9c\xf1\xfb\x0c\xe6\xb7\x10" + "\x1c\xf8\xa9\x7c\xe8\x53\x35\xc1" + "\x90\x3e\x76\x4a\x74\xa4\x21\x2c" + "\xf6\x2c\x4e\x0f\x94\x3a\x88\x2e" + "\x41\x09\x6a\x33\x7d\xf6\xdd\x3f" + "\x8d\x23\x31\x74\x84\xeb\x88\x6e" + "\xcc\xb9\xbc\x22\x83\x19\x07\x22" + "\xa5\x2d\xdf\xa5\xf3\x80\x85\x78" + "\x84\x39\x6a\x6d\x6a\x99\x4f\xa5" + "\x15\xfe\x46\xb0\xe4\x6c\xa5\x41" + "\x3c\xce\x8f\x42\x60\x71\xa7\x75" + "\x08\x40\x65\x8a\x82\xbf\xf5\x43" + "\x71\x96\xa9\x4d\x44\x8a\x20\xbe" + "\xfa\x4d\xbb\xc0\x7d\x31\x96\x65" + "\xe7\x75\xe5\x3e\xfd\x92\x3b\xc9" + "\x55\xbb\x16\x7e\xf7\xc2\x8c\xa4" + "\x40\x1d\xe5\xef\x0e\xdf\xe4\x9a" + "\x62\x73\x65\xfd\x46\x63\x25\x3d" + "\x2b\xaf\xe5\x64\xfe\xa5\x5c\xcf" + "\x24\xf3\xb4\xac\x64\xba\xdf\x4b" + "\xc6\x96\x7d\x81\x2d\x8d\x97\xf7" + "\xc5\x68\x77\x84\x32\x2b\xcc\x85" + "\x74\x96\xf0\x12\x77\x61\xb9\xeb" + "\x71\xaa\x82\xcb\x1c\xdb\x89\xc8" + "\xc6\xb5\xe3\x5c\x7d\x39\x07\x24" + "\xda\x39\x87\x45\xc0\x2b\xbb\x01" + "\xac\xbc\x2a\x5c\x7f\xfc\xe8\xce" + "\x6d\x9c\x6f\xed\xd3\xc1\xa1\xd6" + "\xc5\x55\xa9\x66\x2f\xe1\xc8\x32" + "\xa6\x5d\xa4\x3a\x98\x73\xe8\x45" + "\xa4\xc7\xa8\xb4\xf6\x13\x03\xf6" + "\xe9\x2e\xc4\x29\x0f\x84\xdb\xc4" + "\x21\xc4\xc2\x75\x67\x89\x37\x0a", + .ilen = 512, + .result = "\x55\x25\x09\x8B\xB5\xD5\xF8\xBF" + "\x37\x4A\xFE\x3C\x47\xD8\xE6\xEB" + "\xCA\xA4\x9B\xB0\xAB\x6D\x64\xCA" + "\x58\xB6\x73\xF0\xD7\x52\x34\xEF" + "\xFB\x3E\x96\x81\xB7\x71\x34\xA4" + "\x55\x20\xBE\x39\x5A\x2B\xF9\xD1" + "\x65\x0B\xDA\xD3\x7E\xB3\xA6\xF7" + "\x2E\x0B\x5A\x52\xDB\x39\x8C\x9B" + "\x61\x17\x5F\xAF\xB6\x5A\xC8\x08" + "\xA7\xB7\x2A\x11\x7C\x97\x38\x9D" + "\x59\x0E\x66\x59\x5E\xD8\x8B\xCE" + "\x70\xE0\xC3\x42\xB0\x8C\x0F\xBA" + "\xB2\x0D\x81\xB6\xBE\x61\x1C\x2D" + "\x7E\xEA\x91\x25\xAC\xEC\xF8\x28" + "\x80\x1D\xF0\x30\xBA\x62\x77\x7D" + "\xDB\x15\x69\xDF\xFA\x2A\x81\x64" + "\x95\x5B\xA4\x7F\x3E\x4F\xE3\x30" + "\xB0\x5C\xC2\x05\xF8\xF0\x29\xE7" + "\x0A\xA0\x66\xB2\x5D\x0F\x39\x2B" + "\xB4\xB3\x00\xA9\xD0\xAB\x63\x61" + "\x5E\xDB\xFC\x11\x74\x25\x96\x65" + "\xE8\xE2\x34\x57\x77\x15\x5E\x70" + "\xFF\x10\x90\xC3\x64\xF0\x11\x0A" + "\x63\x3A\xD3\x55\x92\x15\x4B\x0C" + "\xC7\x08\x89\x17\x3B\x99\xAD\x63" + "\xE7\x06\xDF\x52\xBC\x15\x64\x45" + "\x9D\x7A\xFB\x69\xBC\x2D\x6E\xA9" + "\x35\xD9\xD8\xF5\x0C\xC4\xA2\x23" + "\x9C\x18\x8B\xA8\x8C\xFE\xF8\x0E" + "\xBD\xAB\x60\x1A\x51\x17\x54\x27" + "\xB6\xE8\xBE\x0F\xA9\xA5\x82\x19" + "\x2F\x6F\x20\xA7\x47\xED\x74\x6C" + "\x4E\xC1\xF8\x8C\x14\xF3\xBB\x1F" + "\xED\x4D\x8F\x7C\x37\xEF\x19\xA1" + "\x07\x16\xDE\x76\xCC\x5E\x94\x02" + "\xFB\xBF\xE4\x81\x50\xCE\xFC\x0F" + "\x9E\xCF\x3D\xF6\x67\x00\xBF\xA7" + "\x6E\x21\x58\x36\x06\xDE\xB3\xD4" + "\xA2\xFA\xD8\x4E\xE0\xB9\x7F\x23" + "\x51\x21\x2B\x32\x68\xAA\xF8\xA8" + "\x93\x08\xB5\x6D\xE6\x43\x2C\xB7" + "\x31\xB2\x0F\xD0\xA2\x51\xC0\x25" + "\x30\xC7\x10\x3F\x97\x27\x01\x8E" + "\xFA\xD8\x4F\x78\xD8\x2E\x1D\xEB" + "\xA1\x37\x52\x0F\x7B\x5E\x87\xA8" + "\x22\xE2\xE6\x92\xA7\x5F\x11\x32" + "\xCC\x93\x34\xFC\xD1\x7E\xAE\x54" + "\xBC\x6A\x1B\x91\xD1\x2E\x21\xEC" + "\x5D\xF1\xC4\xF1\x55\x20\xBF\xE5" + "\x96\x3D\x69\x91\x20\x4E\xF2\x61" + "\xDA\x77\xFE\xEE\xC3\x74\x57\x2A" + "\x78\x39\xB0\xE0\xCF\x12\x56\xD6" + "\x05\xDC\xF9\x19\x66\x44\x1D\xF9" + "\x82\x37\xD4\xC2\x60\xB6\x31\xDF" + "\x0C\xAF\xBC\x8B\x55\x9A\xC8\x2D" + "\xAB\xA7\x88\x7B\x41\xE8\x29\xC9" + "\x9B\x8D\xA7\x00\x86\x25\xB6\x14" + "\xF5\x13\x73\xD7\x4B\x6B\x83\xF3" + "\xAF\x96\x00\xE4\xB7\x3C\x65\xA6" + "\x15\xB7\x94\x7D\x4E\x70\x4C\x75" + "\xF3\xB4\x02\xA9\x17\x1C\x7A\x0A" + "\xC0\xD5\x33\x11\x56\xDE\xDC\xF5" + "\x8D\xD9\xCD\x3B\x22\x67\x18\xC7" + "\xC4\xF5\x99\x61\xBC\xBB\x5B\x46", + .rlen = 512, + }, +}; + +static struct cipher_testvec cast6_lrw_dec_tv_template[] = { + { /* Generated from TF test vectors */ + .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c" + "\x23\x84\xcb\x1c\x77\xd6\x19\x5d" + "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21" + "\xa7\x9c\x21\xf8\xcb\x90\x02\x89" + "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1" + "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e", + .klen = 48, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x01", + .input = "\x55\x25\x09\x8B\xB5\xD5\xF8\xBF" + "\x37\x4A\xFE\x3C\x47\xD8\xE6\xEB" + "\xCA\xA4\x9B\xB0\xAB\x6D\x64\xCA" + "\x58\xB6\x73\xF0\xD7\x52\x34\xEF" + "\xFB\x3E\x96\x81\xB7\x71\x34\xA4" + "\x55\x20\xBE\x39\x5A\x2B\xF9\xD1" + "\x65\x0B\xDA\xD3\x7E\xB3\xA6\xF7" + "\x2E\x0B\x5A\x52\xDB\x39\x8C\x9B" + "\x61\x17\x5F\xAF\xB6\x5A\xC8\x08" + "\xA7\xB7\x2A\x11\x7C\x97\x38\x9D" + "\x59\x0E\x66\x59\x5E\xD8\x8B\xCE" + "\x70\xE0\xC3\x42\xB0\x8C\x0F\xBA" + "\xB2\x0D\x81\xB6\xBE\x61\x1C\x2D" + "\x7E\xEA\x91\x25\xAC\xEC\xF8\x28" + "\x80\x1D\xF0\x30\xBA\x62\x77\x7D" + "\xDB\x15\x69\xDF\xFA\x2A\x81\x64" + "\x95\x5B\xA4\x7F\x3E\x4F\xE3\x30" + "\xB0\x5C\xC2\x05\xF8\xF0\x29\xE7" + "\x0A\xA0\x66\xB2\x5D\x0F\x39\x2B" + "\xB4\xB3\x00\xA9\xD0\xAB\x63\x61" + "\x5E\xDB\xFC\x11\x74\x25\x96\x65" + "\xE8\xE2\x34\x57\x77\x15\x5E\x70" + "\xFF\x10\x90\xC3\x64\xF0\x11\x0A" + "\x63\x3A\xD3\x55\x92\x15\x4B\x0C" + "\xC7\x08\x89\x17\x3B\x99\xAD\x63" + "\xE7\x06\xDF\x52\xBC\x15\x64\x45" + "\x9D\x7A\xFB\x69\xBC\x2D\x6E\xA9" + "\x35\xD9\xD8\xF5\x0C\xC4\xA2\x23" + "\x9C\x18\x8B\xA8\x8C\xFE\xF8\x0E" + "\xBD\xAB\x60\x1A\x51\x17\x54\x27" + "\xB6\xE8\xBE\x0F\xA9\xA5\x82\x19" + "\x2F\x6F\x20\xA7\x47\xED\x74\x6C" + "\x4E\xC1\xF8\x8C\x14\xF3\xBB\x1F" + "\xED\x4D\x8F\x7C\x37\xEF\x19\xA1" + "\x07\x16\xDE\x76\xCC\x5E\x94\x02" + "\xFB\xBF\xE4\x81\x50\xCE\xFC\x0F" + "\x9E\xCF\x3D\xF6\x67\x00\xBF\xA7" + "\x6E\x21\x58\x36\x06\xDE\xB3\xD4" + "\xA2\xFA\xD8\x4E\xE0\xB9\x7F\x23" + "\x51\x21\x2B\x32\x68\xAA\xF8\xA8" + "\x93\x08\xB5\x6D\xE6\x43\x2C\xB7" + "\x31\xB2\x0F\xD0\xA2\x51\xC0\x25" + "\x30\xC7\x10\x3F\x97\x27\x01\x8E" + "\xFA\xD8\x4F\x78\xD8\x2E\x1D\xEB" + "\xA1\x37\x52\x0F\x7B\x5E\x87\xA8" + "\x22\xE2\xE6\x92\xA7\x5F\x11\x32" + "\xCC\x93\x34\xFC\xD1\x7E\xAE\x54" + "\xBC\x6A\x1B\x91\xD1\x2E\x21\xEC" + "\x5D\xF1\xC4\xF1\x55\x20\xBF\xE5" + "\x96\x3D\x69\x91\x20\x4E\xF2\x61" + "\xDA\x77\xFE\xEE\xC3\x74\x57\x2A" + "\x78\x39\xB0\xE0\xCF\x12\x56\xD6" + "\x05\xDC\xF9\x19\x66\x44\x1D\xF9" + "\x82\x37\xD4\xC2\x60\xB6\x31\xDF" + "\x0C\xAF\xBC\x8B\x55\x9A\xC8\x2D" + "\xAB\xA7\x88\x7B\x41\xE8\x29\xC9" + "\x9B\x8D\xA7\x00\x86\x25\xB6\x14" + "\xF5\x13\x73\xD7\x4B\x6B\x83\xF3" + "\xAF\x96\x00\xE4\xB7\x3C\x65\xA6" + "\x15\xB7\x94\x7D\x4E\x70\x4C\x75" + "\xF3\xB4\x02\xA9\x17\x1C\x7A\x0A" + "\xC0\xD5\x33\x11\x56\xDE\xDC\xF5" + "\x8D\xD9\xCD\x3B\x22\x67\x18\xC7" + "\xC4\xF5\x99\x61\xBC\xBB\x5B\x46", + .ilen = 512, + .result = "\x05\x11\xb7\x18\xab\xc6\x2d\xac" + "\x70\x5d\xf6\x22\x94\xcd\xe5\x6c" + "\x17\x6b\xf6\x1c\xf0\xf3\x6e\xf8" + "\x50\x38\x1f\x71\x49\xb6\x57\xd6" + "\x8f\xcb\x8d\x6b\xe3\xa6\x29\x90" + "\xfe\x2a\x62\x82\xae\x6d\x8b\xf6" + "\xad\x1e\x9e\x20\x5f\x38\xbe\x04" + "\xda\x10\x8e\xed\xa2\xa4\x87\xab" + "\xda\x6b\xb4\x0c\x75\xba\xd3\x7c" + "\xc9\xac\x42\x31\x95\x7c\xc9\x04" + "\xeb\xd5\x6e\x32\x69\x8a\xdb\xa6" + "\x15\xd7\x3f\x4f\x2f\x66\x69\x03" + "\x9c\x1f\x54\x0f\xde\x1f\xf3\x65" + "\x4c\x96\x12\xed\x7c\x92\x03\x01" + "\x6f\xbc\x35\x93\xac\xf1\x27\xf1" + "\xb4\x96\x82\x5a\x5f\xb0\xa0\x50" + "\x89\xa4\x8e\x66\x44\x85\xcc\xfd" + "\x33\x14\x70\xe3\x96\xb2\xc3\xd3" + "\xbb\x54\x5a\x1a\xf9\x74\xa2\xc5" + "\x2d\x64\x75\xdd\xb4\x54\xe6\x74" + "\x8c\xd3\x9d\x9e\x86\xab\x51\x53" + "\xb7\x93\x3e\x6f\xd0\x4e\x2c\x40" + "\xf6\xa8\x2e\x3e\x9d\xf4\x66\xa5" + "\x76\x12\x73\x44\x1a\x56\xd7\x72" + "\x88\xcd\x21\x8c\x4c\x0f\xfe\xda" + "\x95\xe0\x3a\xa6\xa5\x84\x46\xcd" + "\xd5\x3e\x9d\x3a\xe2\x67\xe6\x60" + "\x1a\xe2\x70\x85\x58\xc2\x1b\x09" + "\xe1\xd7\x2c\xca\xad\xa8\x8f\xf9" + "\xac\xb3\x0e\xdb\xca\x2e\xe2\xb8" + "\x51\x71\xd9\x3c\x6c\xf1\x56\xf8" + "\xea\x9c\xf1\xfb\x0c\xe6\xb7\x10" + "\x1c\xf8\xa9\x7c\xe8\x53\x35\xc1" + "\x90\x3e\x76\x4a\x74\xa4\x21\x2c" + "\xf6\x2c\x4e\x0f\x94\x3a\x88\x2e" + "\x41\x09\x6a\x33\x7d\xf6\xdd\x3f" + "\x8d\x23\x31\x74\x84\xeb\x88\x6e" + "\xcc\xb9\xbc\x22\x83\x19\x07\x22" + "\xa5\x2d\xdf\xa5\xf3\x80\x85\x78" + "\x84\x39\x6a\x6d\x6a\x99\x4f\xa5" + "\x15\xfe\x46\xb0\xe4\x6c\xa5\x41" + "\x3c\xce\x8f\x42\x60\x71\xa7\x75" + "\x08\x40\x65\x8a\x82\xbf\xf5\x43" + "\x71\x96\xa9\x4d\x44\x8a\x20\xbe" + "\xfa\x4d\xbb\xc0\x7d\x31\x96\x65" + "\xe7\x75\xe5\x3e\xfd\x92\x3b\xc9" + "\x55\xbb\x16\x7e\xf7\xc2\x8c\xa4" + "\x40\x1d\xe5\xef\x0e\xdf\xe4\x9a" + "\x62\x73\x65\xfd\x46\x63\x25\x3d" + "\x2b\xaf\xe5\x64\xfe\xa5\x5c\xcf" + "\x24\xf3\xb4\xac\x64\xba\xdf\x4b" + "\xc6\x96\x7d\x81\x2d\x8d\x97\xf7" + "\xc5\x68\x77\x84\x32\x2b\xcc\x85" + "\x74\x96\xf0\x12\x77\x61\xb9\xeb" + "\x71\xaa\x82\xcb\x1c\xdb\x89\xc8" + "\xc6\xb5\xe3\x5c\x7d\x39\x07\x24" + "\xda\x39\x87\x45\xc0\x2b\xbb\x01" + "\xac\xbc\x2a\x5c\x7f\xfc\xe8\xce" + "\x6d\x9c\x6f\xed\xd3\xc1\xa1\xd6" + "\xc5\x55\xa9\x66\x2f\xe1\xc8\x32" + "\xa6\x5d\xa4\x3a\x98\x73\xe8\x45" + "\xa4\xc7\xa8\xb4\xf6\x13\x03\xf6" + "\xe9\x2e\xc4\x29\x0f\x84\xdb\xc4" + "\x21\xc4\xc2\x75\x67\x89\x37\x0a", + .rlen = 512, + }, +}; + +static struct cipher_testvec cast6_xts_enc_tv_template[] = { + { /* Generated from TF test vectors */ + .key = "\x27\x18\x28\x18\x28\x45\x90\x45" + "\x23\x53\x60\x28\x74\x71\x35\x26" + "\x62\x49\x77\x57\x24\x70\x93\x69" + "\x99\x59\x57\x49\x66\x96\x76\x27" + "\x31\x41\x59\x26\x53\x58\x97\x93" + "\x23\x84\x62\x64\x33\x83\x27\x95" + "\x02\x88\x41\x97\x16\x93\x99\x37" + "\x51\x05\x82\x09\x74\x94\x45\x92", + .klen = 64, + .iv = "\xff\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .ilen = 512, + .result = "\xDE\x6F\x22\xA5\xE8\x39\xE8\x78" + "\x88\x5A\x4F\x8D\x82\x76\x52\x6D" + "\xB2\x41\x16\xF4\x2B\xA6\xEB\xF6" + "\xE2\xC5\x62\x8D\x61\xA1\x01\xED" + "\xD9\x38\x01\xC1\x43\x63\x4E\x88" + "\xC9\x4B\x5A\x88\x80\xB7\x5C\x71" + "\x47\xEE\x11\xD8\xB7\x2D\x5D\x13" + "\x1A\xB1\x68\x5B\x61\xA7\xA9\x81" + "\x8B\x83\xA1\x6A\xAA\x36\xD6\xB6" + "\x60\x54\x09\x32\xFE\x6A\x76\x2E" + "\x28\xFF\xD5\xD6\xDD\x1D\x45\x7D" + "\xF0\x8B\xF3\x32\x4E\x6C\x12\xCB" + "\xB8\x25\x70\xF8\x40\xBC\x90\x1B" + "\x11\xC3\x59\xAF\xF0\x2F\x92\xDD" + "\xD3\x3B\xCF\x60\xA1\x78\x94\x57" + "\xAF\x76\xC1\x67\xA6\x3C\xCD\x98" + "\xB1\xF7\x27\xB9\xA3\xBD\x10\xEA" + "\xCD\x8B\xC2\xF2\x14\xF2\xB2\x67" + "\x05\xDD\x1D\x58\x6E\x2F\x95\x08" + "\x3A\xF8\x78\x76\x82\x56\xA7\xEC" + "\x51\x4B\x85\x77\xC2\x4C\x4A\x34" + "\x71\x38\x17\x91\x44\xE8\xFC\x65" + "\x99\x0D\x52\x91\xEE\xF8\xEF\x27" + "\x2A\x9E\x6E\x78\xC4\x26\x87\xF4" + "\x8A\xF0\x2D\x04\xE8\x14\x92\x5D" + "\x59\x22\x9B\x29\x5C\x18\xF0\xC3" + "\x47\xF3\x76\xD8\xE4\xF3\x1B\xD1" + "\x70\xA3\x0D\xB5\x70\x02\x1D\xA3" + "\x91\x3B\x49\x73\x18\xAB\xD4\xC9" + "\xC3\x1E\xEF\x1F\xFE\xD5\x59\x8A" + "\xD7\xF6\xC9\x71\x67\x79\xD7\x0E" + "\xBE\x1F\x8E\xEC\x55\x7E\x4F\x24" + "\xE6\x87\xEA\xFE\x96\x25\x67\x8E" + "\x93\x03\xFA\xFF\xCE\xAF\xB2\x3C" + "\x6F\xEB\x57\xFB\xD3\x28\x87\xA9" + "\xCE\xC2\xF5\x9C\xC6\x67\xB5\x97" + "\x49\xF7\x04\xCB\xEF\x84\x98\x33" + "\xAF\x38\xD3\x04\x1C\x24\x71\x38" + "\xC7\x71\xDD\x43\x0D\x12\x4A\x18" + "\xBA\xC4\xAF\xBA\xB2\x5B\xEB\x95" + "\x02\x43\x5D\xCE\x19\xCC\xCD\x66" + "\x91\x0B\x8C\x7F\x51\xC4\xBF\x3C" + "\x8B\xF1\xCC\xAA\x29\xD7\x87\xCB" + "\x3E\xC5\xF3\xC9\x75\xE8\xA3\x5B" + "\x30\x45\xA9\xB7\xAF\x80\x64\x6F" + "\x75\x4A\xA7\xC0\x6D\x19\x6B\xDE" + "\x17\xDE\x6D\xEA\x87\x9F\x95\xAE" + "\xF5\x3C\xEE\x54\xB8\x27\x84\xF8" + "\x97\xA3\xE1\x6F\x38\x24\x34\x88" + "\xCE\xBD\x32\x52\xE0\x00\x6C\x94" + "\xC9\xD7\x5D\x37\x81\x33\x2E\x7F" + "\x4F\x7E\x2E\x0D\x94\xBD\xEA\x59" + "\x34\x39\xA8\x35\x12\xB7\xBC\xAC" + "\xEA\x52\x9C\x78\x02\x6D\x92\x36" + "\xFB\x59\x2B\xA4\xEA\x7B\x1B\x83" + "\xE1\x4D\x5E\x2A\x7E\x92\xB1\x64" + "\xDE\xE0\x27\x4B\x0A\x6F\x4C\xE3" + "\xB0\xEB\x31\xE4\x69\x95\xAB\x35" + "\x8B\x2C\xF5\x6B\x7F\xF1\xA2\x82" + "\xF8\xD9\x47\x82\xA9\x82\x03\x91" + "\x69\x1F\xBE\x4C\xE7\xC7\x34\x2F" + "\x45\x72\x80\x17\x81\xBD\x9D\x62" + "\xA1\xAC\xE8\xCF\xC6\x74\xCF\xDC" + "\x22\x60\x4E\xE8\xA4\x5D\x85\xB9", + .rlen = 512, + }, +}; + +static struct cipher_testvec cast6_xts_dec_tv_template[] = { + { /* Generated from TF test vectors */ + .key = "\x27\x18\x28\x18\x28\x45\x90\x45" + "\x23\x53\x60\x28\x74\x71\x35\x26" + "\x62\x49\x77\x57\x24\x70\x93\x69" + "\x99\x59\x57\x49\x66\x96\x76\x27" + "\x31\x41\x59\x26\x53\x58\x97\x93" + "\x23\x84\x62\x64\x33\x83\x27\x95" + "\x02\x88\x41\x97\x16\x93\x99\x37" + "\x51\x05\x82\x09\x74\x94\x45\x92", + .klen = 64, + .iv = "\xff\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\xDE\x6F\x22\xA5\xE8\x39\xE8\x78" + "\x88\x5A\x4F\x8D\x82\x76\x52\x6D" + "\xB2\x41\x16\xF4\x2B\xA6\xEB\xF6" + "\xE2\xC5\x62\x8D\x61\xA1\x01\xED" + "\xD9\x38\x01\xC1\x43\x63\x4E\x88" + "\xC9\x4B\x5A\x88\x80\xB7\x5C\x71" + "\x47\xEE\x11\xD8\xB7\x2D\x5D\x13" + "\x1A\xB1\x68\x5B\x61\xA7\xA9\x81" + "\x8B\x83\xA1\x6A\xAA\x36\xD6\xB6" + "\x60\x54\x09\x32\xFE\x6A\x76\x2E" + "\x28\xFF\xD5\xD6\xDD\x1D\x45\x7D" + "\xF0\x8B\xF3\x32\x4E\x6C\x12\xCB" + "\xB8\x25\x70\xF8\x40\xBC\x90\x1B" + "\x11\xC3\x59\xAF\xF0\x2F\x92\xDD" + "\xD3\x3B\xCF\x60\xA1\x78\x94\x57" + "\xAF\x76\xC1\x67\xA6\x3C\xCD\x98" + "\xB1\xF7\x27\xB9\xA3\xBD\x10\xEA" + "\xCD\x8B\xC2\xF2\x14\xF2\xB2\x67" + "\x05\xDD\x1D\x58\x6E\x2F\x95\x08" + "\x3A\xF8\x78\x76\x82\x56\xA7\xEC" + "\x51\x4B\x85\x77\xC2\x4C\x4A\x34" + "\x71\x38\x17\x91\x44\xE8\xFC\x65" + "\x99\x0D\x52\x91\xEE\xF8\xEF\x27" + "\x2A\x9E\x6E\x78\xC4\x26\x87\xF4" + "\x8A\xF0\x2D\x04\xE8\x14\x92\x5D" + "\x59\x22\x9B\x29\x5C\x18\xF0\xC3" + "\x47\xF3\x76\xD8\xE4\xF3\x1B\xD1" + "\x70\xA3\x0D\xB5\x70\x02\x1D\xA3" + "\x91\x3B\x49\x73\x18\xAB\xD4\xC9" + "\xC3\x1E\xEF\x1F\xFE\xD5\x59\x8A" + "\xD7\xF6\xC9\x71\x67\x79\xD7\x0E" + "\xBE\x1F\x8E\xEC\x55\x7E\x4F\x24" + "\xE6\x87\xEA\xFE\x96\x25\x67\x8E" + "\x93\x03\xFA\xFF\xCE\xAF\xB2\x3C" + "\x6F\xEB\x57\xFB\xD3\x28\x87\xA9" + "\xCE\xC2\xF5\x9C\xC6\x67\xB5\x97" + "\x49\xF7\x04\xCB\xEF\x84\x98\x33" + "\xAF\x38\xD3\x04\x1C\x24\x71\x38" + "\xC7\x71\xDD\x43\x0D\x12\x4A\x18" + "\xBA\xC4\xAF\xBA\xB2\x5B\xEB\x95" + "\x02\x43\x5D\xCE\x19\xCC\xCD\x66" + "\x91\x0B\x8C\x7F\x51\xC4\xBF\x3C" + "\x8B\xF1\xCC\xAA\x29\xD7\x87\xCB" + "\x3E\xC5\xF3\xC9\x75\xE8\xA3\x5B" + "\x30\x45\xA9\xB7\xAF\x80\x64\x6F" + "\x75\x4A\xA7\xC0\x6D\x19\x6B\xDE" + "\x17\xDE\x6D\xEA\x87\x9F\x95\xAE" + "\xF5\x3C\xEE\x54\xB8\x27\x84\xF8" + "\x97\xA3\xE1\x6F\x38\x24\x34\x88" + "\xCE\xBD\x32\x52\xE0\x00\x6C\x94" + "\xC9\xD7\x5D\x37\x81\x33\x2E\x7F" + "\x4F\x7E\x2E\x0D\x94\xBD\xEA\x59" + "\x34\x39\xA8\x35\x12\xB7\xBC\xAC" + "\xEA\x52\x9C\x78\x02\x6D\x92\x36" + "\xFB\x59\x2B\xA4\xEA\x7B\x1B\x83" + "\xE1\x4D\x5E\x2A\x7E\x92\xB1\x64" + "\xDE\xE0\x27\x4B\x0A\x6F\x4C\xE3" + "\xB0\xEB\x31\xE4\x69\x95\xAB\x35" + "\x8B\x2C\xF5\x6B\x7F\xF1\xA2\x82" + "\xF8\xD9\x47\x82\xA9\x82\x03\x91" + "\x69\x1F\xBE\x4C\xE7\xC7\x34\x2F" + "\x45\x72\x80\x17\x81\xBD\x9D\x62" + "\xA1\xAC\xE8\xCF\xC6\x74\xCF\xDC" + "\x22\x60\x4E\xE8\xA4\x5D\x85\xB9", + .ilen = 512, + .result = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .rlen = 512, }, }; -- cgit v1.2.3 From 4ea1277d301eb776e321684cd4ea95116b4e8847 Mon Sep 17 00:00:00 2001 From: Johannes Goetzfried Date: Wed, 11 Jul 2012 19:38:57 +0200 Subject: crypto: cast6 - add x86_64/avx assembler implementation This patch adds a x86_64/avx assembler implementation of the Cast6 block cipher. The implementation processes eight blocks in parallel (two 4 block chunk AVX operations). The table-lookups are done in general-purpose registers. For small blocksizes the functions from the generic module are called. A good performance increase is provided for blocksizes greater or equal to 128B. Patch has been tested with tcrypt and automated filesystem tests. Tcrypt benchmark results: Intel Core i5-2500 CPU (fam:6, model:42, step:7) cast6-avx-x86_64 vs. cast6-generic 128bit key: (lrw:256bit) (xts:256bit) size ecb-enc ecb-dec cbc-enc cbc-dec ctr-enc ctr-dec lrw-enc lrw-dec xts-enc xts-dec 16B 0.97x 1.00x 1.01x 1.01x 0.99x 0.97x 0.98x 1.01x 0.96x 0.98x 64B 0.98x 0.99x 1.02x 1.01x 0.99x 1.00x 1.01x 0.99x 1.00x 0.99x 256B 1.77x 1.84x 0.99x 1.85x 1.77x 1.77x 1.70x 1.74x 1.69x 1.72x 1024B 1.93x 1.95x 0.99x 1.96x 1.93x 1.93x 1.84x 1.85x 1.89x 1.87x 8192B 1.91x 1.95x 0.99x 1.97x 1.95x 1.91x 1.86x 1.87x 1.93x 1.90x 256bit key: (lrw:384bit) (xts:512bit) size ecb-enc ecb-dec cbc-enc cbc-dec ctr-enc ctr-dec lrw-enc lrw-dec xts-enc xts-dec 16B 0.97x 0.99x 1.02x 1.01x 0.98x 0.99x 1.00x 1.00x 0.98x 0.98x 64B 0.98x 0.99x 1.01x 1.00x 1.00x 1.00x 1.01x 1.01x 0.97x 1.00x 256B 1.77x 1.83x 1.00x 1.86x 1.79x 1.78x 1.70x 1.76x 1.71x 1.69x 1024B 1.92x 1.95x 0.99x 1.96x 1.93x 1.93x 1.83x 1.86x 1.89x 1.87x 8192B 1.94x 1.95x 0.99x 1.97x 1.95x 1.95x 1.87x 1.87x 1.93x 1.91x Signed-off-by: Johannes Goetzfried Signed-off-by: Herbert Xu --- arch/x86/crypto/Makefile | 2 + arch/x86/crypto/cast6-avx-x86_64-asm_64.S | 335 +++++++++++++++ arch/x86/crypto/cast6_avx_glue.c | 648 ++++++++++++++++++++++++++++++ crypto/Kconfig | 17 + crypto/testmgr.c | 60 +++ 5 files changed, 1062 insertions(+) create mode 100644 arch/x86/crypto/cast6-avx-x86_64-asm_64.S create mode 100644 arch/x86/crypto/cast6_avx_glue.c (limited to 'crypto') diff --git a/arch/x86/crypto/Makefile b/arch/x86/crypto/Makefile index 565e82b00142..5bacb4a226ac 100644 --- a/arch/x86/crypto/Makefile +++ b/arch/x86/crypto/Makefile @@ -13,6 +13,7 @@ obj-$(CONFIG_CRYPTO_SERPENT_SSE2_586) += serpent-sse2-i586.o obj-$(CONFIG_CRYPTO_AES_X86_64) += aes-x86_64.o obj-$(CONFIG_CRYPTO_CAMELLIA_X86_64) += camellia-x86_64.o obj-$(CONFIG_CRYPTO_CAST5_AVX_X86_64) += cast5-avx-x86_64.o +obj-$(CONFIG_CRYPTO_CAST6_AVX_X86_64) += cast6-avx-x86_64.o obj-$(CONFIG_CRYPTO_BLOWFISH_X86_64) += blowfish-x86_64.o obj-$(CONFIG_CRYPTO_TWOFISH_X86_64) += twofish-x86_64.o obj-$(CONFIG_CRYPTO_TWOFISH_X86_64_3WAY) += twofish-x86_64-3way.o @@ -34,6 +35,7 @@ serpent-sse2-i586-y := serpent-sse2-i586-asm_32.o serpent_sse2_glue.o aes-x86_64-y := aes-x86_64-asm_64.o aes_glue.o camellia-x86_64-y := camellia-x86_64-asm_64.o camellia_glue.o cast5-avx-x86_64-y := cast5-avx-x86_64-asm_64.o cast5_avx_glue.o +cast6-avx-x86_64-y := cast6-avx-x86_64-asm_64.o cast6_avx_glue.o blowfish-x86_64-y := blowfish-x86_64-asm_64.o blowfish_glue.o twofish-x86_64-y := twofish-x86_64-asm_64.o twofish_glue.o twofish-x86_64-3way-y := twofish-x86_64-asm_64-3way.o twofish_glue_3way.o diff --git a/arch/x86/crypto/cast6-avx-x86_64-asm_64.S b/arch/x86/crypto/cast6-avx-x86_64-asm_64.S new file mode 100644 index 000000000000..d258ce0d2e06 --- /dev/null +++ b/arch/x86/crypto/cast6-avx-x86_64-asm_64.S @@ -0,0 +1,335 @@ +/* + * Cast6 Cipher 8-way parallel algorithm (AVX/x86_64) + * + * Copyright (C) 2012 Johannes Goetzfried + * + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 + * USA + * + */ + +.file "cast6-avx-x86_64-asm_64.S" +.text + +.extern cast6_s1 +.extern cast6_s2 +.extern cast6_s3 +.extern cast6_s4 + +/* structure of crypto context */ +#define km 0 +#define kr (12*4*4) + +/* s-boxes */ +#define s1 cast6_s1 +#define s2 cast6_s2 +#define s3 cast6_s3 +#define s4 cast6_s4 + +/********************************************************************** + 8-way AVX cast6 + **********************************************************************/ +#define CTX %rdi + +#define RA1 %xmm0 +#define RB1 %xmm1 +#define RC1 %xmm2 +#define RD1 %xmm3 + +#define RA2 %xmm4 +#define RB2 %xmm5 +#define RC2 %xmm6 +#define RD2 %xmm7 + +#define RX %xmm8 + +#define RKM %xmm9 +#define RKRF %xmm10 +#define RKRR %xmm11 + +#define RTMP %xmm12 +#define RMASK %xmm13 +#define R32 %xmm14 + +#define RID1 %rax +#define RID1b %al +#define RID2 %rbx +#define RID2b %bl + +#define RGI1 %rdx +#define RGI1bl %dl +#define RGI1bh %dh +#define RGI2 %rcx +#define RGI2bl %cl +#define RGI2bh %ch + +#define RFS1 %r8 +#define RFS1d %r8d +#define RFS2 %r9 +#define RFS2d %r9d +#define RFS3 %r10 +#define RFS3d %r10d + + +#define lookup_32bit(src, dst, op1, op2, op3) \ + movb src ## bl, RID1b; \ + movb src ## bh, RID2b; \ + movl s1(, RID1, 4), dst ## d; \ + op1 s2(, RID2, 4), dst ## d; \ + shrq $16, src; \ + movb src ## bl, RID1b; \ + movb src ## bh, RID2b; \ + op2 s3(, RID1, 4), dst ## d; \ + op3 s4(, RID2, 4), dst ## d; + +#define F(a, x, op0, op1, op2, op3) \ + op0 a, RKM, x; \ + vpslld RKRF, x, RTMP; \ + vpsrld RKRR, x, x; \ + vpor RTMP, x, x; \ + \ + vpshufb RMASK, x, x; \ + vmovq x, RGI1; \ + vpsrldq $8, x, x; \ + vmovq x, RGI2; \ + \ + lookup_32bit(RGI1, RFS1, op1, op2, op3); \ + shrq $16, RGI1; \ + lookup_32bit(RGI1, RFS2, op1, op2, op3); \ + shlq $32, RFS2; \ + orq RFS1, RFS2; \ + \ + lookup_32bit(RGI2, RFS1, op1, op2, op3); \ + shrq $16, RGI2; \ + lookup_32bit(RGI2, RFS3, op1, op2, op3); \ + shlq $32, RFS3; \ + orq RFS1, RFS3; \ + \ + vmovq RFS2, x; \ + vpinsrq $1, RFS3, x, x; + +#define F1(b, x) F(b, x, vpaddd, xorl, subl, addl) +#define F2(b, x) F(b, x, vpxor, subl, addl, xorl) +#define F3(b, x) F(b, x, vpsubd, addl, xorl, subl) + +#define qop(in, out, x, f) \ + F ## f(in ## 1, x); \ + vpxor out ## 1, x, out ## 1; \ + F ## f(in ## 2, x); \ + vpxor out ## 2, x, out ## 2; \ + +#define Q(n) \ + vbroadcastss (km+(4*(4*n+0)))(CTX), RKM; \ + vpinsrb $0, (kr+(4*n+0))(CTX), RKRF, RKRF; \ + vpsubq RKRF, R32, RKRR; \ + qop(RD, RC, RX, 1); \ + \ + vbroadcastss (km+(4*(4*n+1)))(CTX), RKM; \ + vpinsrb $0, (kr+(4*n+1))(CTX), RKRF, RKRF; \ + vpsubq RKRF, R32, RKRR; \ + qop(RC, RB, RX, 2); \ + \ + vbroadcastss (km+(4*(4*n+2)))(CTX), RKM; \ + vpinsrb $0, (kr+(4*n+2))(CTX), RKRF, RKRF; \ + vpsubq RKRF, R32, RKRR; \ + qop(RB, RA, RX, 3); \ + \ + vbroadcastss (km+(4*(4*n+3)))(CTX), RKM; \ + vpinsrb $0, (kr+(4*n+3))(CTX), RKRF, RKRF; \ + vpsubq RKRF, R32, RKRR; \ + qop(RA, RD, RX, 1); + +#define QBAR(n) \ + vbroadcastss (km+(4*(4*n+3)))(CTX), RKM; \ + vpinsrb $0, (kr+(4*n+3))(CTX), RKRF, RKRF; \ + vpsubq RKRF, R32, RKRR; \ + qop(RA, RD, RX, 1); \ + \ + vbroadcastss (km+(4*(4*n+2)))(CTX), RKM; \ + vpinsrb $0, (kr+(4*n+2))(CTX), RKRF, RKRF; \ + vpsubq RKRF, R32, RKRR; \ + qop(RB, RA, RX, 3); \ + \ + vbroadcastss (km+(4*(4*n+1)))(CTX), RKM; \ + vpinsrb $0, (kr+(4*n+1))(CTX), RKRF, RKRF; \ + vpsubq RKRF, R32, RKRR; \ + qop(RC, RB, RX, 2); \ + \ + vbroadcastss (km+(4*(4*n+0)))(CTX), RKM; \ + vpinsrb $0, (kr+(4*n+0))(CTX), RKRF, RKRF; \ + vpsubq RKRF, R32, RKRR; \ + qop(RD, RC, RX, 1); + + +#define transpose_4x4(x0, x1, x2, x3, t0, t1, t2) \ + vpunpckldq x1, x0, t0; \ + vpunpckhdq x1, x0, t2; \ + vpunpckldq x3, x2, t1; \ + vpunpckhdq x3, x2, x3; \ + \ + vpunpcklqdq t1, t0, x0; \ + vpunpckhqdq t1, t0, x1; \ + vpunpcklqdq x3, t2, x2; \ + vpunpckhqdq x3, t2, x3; + +#define inpack_blocks(in, x0, x1, x2, x3, t0, t1, t2) \ + vmovdqu (0*4*4)(in), x0; \ + vmovdqu (1*4*4)(in), x1; \ + vmovdqu (2*4*4)(in), x2; \ + vmovdqu (3*4*4)(in), x3; \ + vpshufb RMASK, x0, x0; \ + vpshufb RMASK, x1, x1; \ + vpshufb RMASK, x2, x2; \ + vpshufb RMASK, x3, x3; \ + \ + transpose_4x4(x0, x1, x2, x3, t0, t1, t2) + +#define outunpack_blocks(out, x0, x1, x2, x3, t0, t1, t2) \ + transpose_4x4(x0, x1, x2, x3, t0, t1, t2) \ + \ + vpshufb RMASK, x0, x0; \ + vpshufb RMASK, x1, x1; \ + vpshufb RMASK, x2, x2; \ + vpshufb RMASK, x3, x3; \ + vmovdqu x0, (0*4*4)(out); \ + vmovdqu x1, (1*4*4)(out); \ + vmovdqu x2, (2*4*4)(out); \ + vmovdqu x3, (3*4*4)(out); + +#define outunpack_xor_blocks(out, x0, x1, x2, x3, t0, t1, t2) \ + transpose_4x4(x0, x1, x2, x3, t0, t1, t2) \ + \ + vpshufb RMASK, x0, x0; \ + vpshufb RMASK, x1, x1; \ + vpshufb RMASK, x2, x2; \ + vpshufb RMASK, x3, x3; \ + vpxor (0*4*4)(out), x0, x0; \ + vmovdqu x0, (0*4*4)(out); \ + vpxor (1*4*4)(out), x1, x1; \ + vmovdqu x1, (1*4*4)(out); \ + vpxor (2*4*4)(out), x2, x2; \ + vmovdqu x2, (2*4*4)(out); \ + vpxor (3*4*4)(out), x3, x3; \ + vmovdqu x3, (3*4*4)(out); + +.align 16 +.Lbswap_mask: + .byte 3, 2, 1, 0, 7, 6, 5, 4, 11, 10, 9, 8, 15, 14, 13, 12 +.L32_mask: + .byte 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ,0, 0, 0, 0, 0 + +.align 16 +.global __cast6_enc_blk_8way +.type __cast6_enc_blk_8way,@function; + +__cast6_enc_blk_8way: + /* input: + * %rdi: ctx, CTX + * %rsi: dst + * %rdx: src + * %rcx: bool, if true: xor output + */ + + pushq %rbx; + pushq %rcx; + + vmovdqu .Lbswap_mask, RMASK; + vmovdqu .L32_mask, R32; + vpxor RKRF, RKRF, RKRF; + + leaq (4*4*4)(%rdx), %rax; + inpack_blocks(%rdx, RA1, RB1, RC1, RD1, RTMP, RX, RKM); + inpack_blocks(%rax, RA2, RB2, RC2, RD2, RTMP, RX, RKM); + + xorq RID1, RID1; + xorq RID2, RID2; + + Q(0); + Q(1); + Q(2); + Q(3); + Q(4); + Q(5); + QBAR(6); + QBAR(7); + QBAR(8); + QBAR(9); + QBAR(10); + QBAR(11); + + popq %rcx; + popq %rbx; + + leaq (4*4*4)(%rsi), %rax; + + testb %cl, %cl; + jnz __enc_xor8; + + outunpack_blocks(%rsi, RA1, RB1, RC1, RD1, RTMP, RX, RKM); + outunpack_blocks(%rax, RA2, RB2, RC2, RD2, RTMP, RX, RKM); + + ret; + +__enc_xor8: + outunpack_xor_blocks(%rsi, RA1, RB1, RC1, RD1, RTMP, RX, RKM); + outunpack_xor_blocks(%rax, RA2, RB2, RC2, RD2, RTMP, RX, RKM); + + ret; + +.align 16 +.global cast6_dec_blk_8way +.type cast6_dec_blk_8way,@function; + +cast6_dec_blk_8way: + /* input: + * %rdi: ctx, CTX + * %rsi: dst + * %rdx: src + */ + + pushq %rbx; + + vmovdqu .Lbswap_mask, RMASK; + vmovdqu .L32_mask, R32; + vpxor RKRF, RKRF, RKRF; + + leaq (4*4*4)(%rdx), %rax; + inpack_blocks(%rdx, RA1, RB1, RC1, RD1, RTMP, RX, RKM); + inpack_blocks(%rax, RA2, RB2, RC2, RD2, RTMP, RX, RKM); + + xorq RID1, RID1; + xorq RID2, RID2; + + Q(11); + Q(10); + Q(9); + Q(8); + Q(7); + Q(6); + QBAR(5); + QBAR(4); + QBAR(3); + QBAR(2); + QBAR(1); + QBAR(0); + + popq %rbx; + + leaq (4*4*4)(%rsi), %rax; + outunpack_blocks(%rsi, RA1, RB1, RC1, RD1, RTMP, RX, RKM); + outunpack_blocks(%rax, RA2, RB2, RC2, RD2, RTMP, RX, RKM); + + ret; diff --git a/arch/x86/crypto/cast6_avx_glue.c b/arch/x86/crypto/cast6_avx_glue.c new file mode 100644 index 000000000000..15e5f85a5011 --- /dev/null +++ b/arch/x86/crypto/cast6_avx_glue.c @@ -0,0 +1,648 @@ +/* + * Glue Code for the AVX assembler implemention of the Cast6 Cipher + * + * Copyright (C) 2012 Johannes Goetzfried + * + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 + * USA + * + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#define CAST6_PARALLEL_BLOCKS 8 + +asmlinkage void __cast6_enc_blk_8way(struct cast6_ctx *ctx, u8 *dst, + const u8 *src, bool xor); +asmlinkage void cast6_dec_blk_8way(struct cast6_ctx *ctx, u8 *dst, + const u8 *src); + +static inline void cast6_enc_blk_xway(struct cast6_ctx *ctx, u8 *dst, + const u8 *src) +{ + __cast6_enc_blk_8way(ctx, dst, src, false); +} + +static inline void cast6_enc_blk_xway_xor(struct cast6_ctx *ctx, u8 *dst, + const u8 *src) +{ + __cast6_enc_blk_8way(ctx, dst, src, true); +} + +static inline void cast6_dec_blk_xway(struct cast6_ctx *ctx, u8 *dst, + const u8 *src) +{ + cast6_dec_blk_8way(ctx, dst, src); +} + + +static void cast6_decrypt_cbc_xway(void *ctx, u128 *dst, const u128 *src) +{ + u128 ivs[CAST6_PARALLEL_BLOCKS - 1]; + unsigned int j; + + for (j = 0; j < CAST6_PARALLEL_BLOCKS - 1; j++) + ivs[j] = src[j]; + + cast6_dec_blk_xway(ctx, (u8 *)dst, (u8 *)src); + + for (j = 0; j < CAST6_PARALLEL_BLOCKS - 1; j++) + u128_xor(dst + (j + 1), dst + (j + 1), ivs + j); +} + +static void cast6_crypt_ctr(void *ctx, u128 *dst, const u128 *src, u128 *iv) +{ + be128 ctrblk; + + u128_to_be128(&ctrblk, iv); + u128_inc(iv); + + __cast6_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk); + u128_xor(dst, src, (u128 *)&ctrblk); +} + +static void cast6_crypt_ctr_xway(void *ctx, u128 *dst, const u128 *src, + u128 *iv) +{ + be128 ctrblks[CAST6_PARALLEL_BLOCKS]; + unsigned int i; + + for (i = 0; i < CAST6_PARALLEL_BLOCKS; i++) { + if (dst != src) + dst[i] = src[i]; + + u128_to_be128(&ctrblks[i], iv); + u128_inc(iv); + } + + cast6_enc_blk_xway_xor(ctx, (u8 *)dst, (u8 *)ctrblks); +} + +static const struct common_glue_ctx cast6_enc = { + .num_funcs = 2, + .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS, + + .funcs = { { + .num_blocks = CAST6_PARALLEL_BLOCKS, + .fn_u = { .ecb = GLUE_FUNC_CAST(cast6_enc_blk_xway) } + }, { + .num_blocks = 1, + .fn_u = { .ecb = GLUE_FUNC_CAST(__cast6_encrypt) } + } } +}; + +static const struct common_glue_ctx cast6_ctr = { + .num_funcs = 2, + .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS, + + .funcs = { { + .num_blocks = CAST6_PARALLEL_BLOCKS, + .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(cast6_crypt_ctr_xway) } + }, { + .num_blocks = 1, + .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(cast6_crypt_ctr) } + } } +}; + +static const struct common_glue_ctx cast6_dec = { + .num_funcs = 2, + .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS, + + .funcs = { { + .num_blocks = CAST6_PARALLEL_BLOCKS, + .fn_u = { .ecb = GLUE_FUNC_CAST(cast6_dec_blk_xway) } + }, { + .num_blocks = 1, + .fn_u = { .ecb = GLUE_FUNC_CAST(__cast6_decrypt) } + } } +}; + +static const struct common_glue_ctx cast6_dec_cbc = { + .num_funcs = 2, + .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS, + + .funcs = { { + .num_blocks = CAST6_PARALLEL_BLOCKS, + .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(cast6_decrypt_cbc_xway) } + }, { + .num_blocks = 1, + .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(__cast6_decrypt) } + } } +}; + +static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, + struct scatterlist *src, unsigned int nbytes) +{ + return glue_ecb_crypt_128bit(&cast6_enc, desc, dst, src, nbytes); +} + +static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, + struct scatterlist *src, unsigned int nbytes) +{ + return glue_ecb_crypt_128bit(&cast6_dec, desc, dst, src, nbytes); +} + +static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, + struct scatterlist *src, unsigned int nbytes) +{ + return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(__cast6_encrypt), desc, + dst, src, nbytes); +} + +static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, + struct scatterlist *src, unsigned int nbytes) +{ + return glue_cbc_decrypt_128bit(&cast6_dec_cbc, desc, dst, src, + nbytes); +} + +static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst, + struct scatterlist *src, unsigned int nbytes) +{ + return glue_ctr_crypt_128bit(&cast6_ctr, desc, dst, src, nbytes); +} + +static inline bool cast6_fpu_begin(bool fpu_enabled, unsigned int nbytes) +{ + return glue_fpu_begin(CAST6_BLOCK_SIZE, CAST6_PARALLEL_BLOCKS, + NULL, fpu_enabled, nbytes); +} + +static inline void cast6_fpu_end(bool fpu_enabled) +{ + glue_fpu_end(fpu_enabled); +} + +struct crypt_priv { + struct cast6_ctx *ctx; + bool fpu_enabled; +}; + +static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes) +{ + const unsigned int bsize = CAST6_BLOCK_SIZE; + struct crypt_priv *ctx = priv; + int i; + + ctx->fpu_enabled = cast6_fpu_begin(ctx->fpu_enabled, nbytes); + + if (nbytes == bsize * CAST6_PARALLEL_BLOCKS) { + cast6_enc_blk_xway(ctx->ctx, srcdst, srcdst); + return; + } + + for (i = 0; i < nbytes / bsize; i++, srcdst += bsize) + __cast6_encrypt(ctx->ctx, srcdst, srcdst); +} + +static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes) +{ + const unsigned int bsize = CAST6_BLOCK_SIZE; + struct crypt_priv *ctx = priv; + int i; + + ctx->fpu_enabled = cast6_fpu_begin(ctx->fpu_enabled, nbytes); + + if (nbytes == bsize * CAST6_PARALLEL_BLOCKS) { + cast6_dec_blk_xway(ctx->ctx, srcdst, srcdst); + return; + } + + for (i = 0; i < nbytes / bsize; i++, srcdst += bsize) + __cast6_decrypt(ctx->ctx, srcdst, srcdst); +} + +struct cast6_lrw_ctx { + struct lrw_table_ctx lrw_table; + struct cast6_ctx cast6_ctx; +}; + +static int lrw_cast6_setkey(struct crypto_tfm *tfm, const u8 *key, + unsigned int keylen) +{ + struct cast6_lrw_ctx *ctx = crypto_tfm_ctx(tfm); + int err; + + err = __cast6_setkey(&ctx->cast6_ctx, key, keylen - CAST6_BLOCK_SIZE, + &tfm->crt_flags); + if (err) + return err; + + return lrw_init_table(&ctx->lrw_table, key + keylen - CAST6_BLOCK_SIZE); +} + +static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, + struct scatterlist *src, unsigned int nbytes) +{ + struct cast6_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); + be128 buf[CAST6_PARALLEL_BLOCKS]; + struct crypt_priv crypt_ctx = { + .ctx = &ctx->cast6_ctx, + .fpu_enabled = false, + }; + struct lrw_crypt_req req = { + .tbuf = buf, + .tbuflen = sizeof(buf), + + .table_ctx = &ctx->lrw_table, + .crypt_ctx = &crypt_ctx, + .crypt_fn = encrypt_callback, + }; + int ret; + + desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; + ret = lrw_crypt(desc, dst, src, nbytes, &req); + cast6_fpu_end(crypt_ctx.fpu_enabled); + + return ret; +} + +static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, + struct scatterlist *src, unsigned int nbytes) +{ + struct cast6_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); + be128 buf[CAST6_PARALLEL_BLOCKS]; + struct crypt_priv crypt_ctx = { + .ctx = &ctx->cast6_ctx, + .fpu_enabled = false, + }; + struct lrw_crypt_req req = { + .tbuf = buf, + .tbuflen = sizeof(buf), + + .table_ctx = &ctx->lrw_table, + .crypt_ctx = &crypt_ctx, + .crypt_fn = decrypt_callback, + }; + int ret; + + desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; + ret = lrw_crypt(desc, dst, src, nbytes, &req); + cast6_fpu_end(crypt_ctx.fpu_enabled); + + return ret; +} + +static void lrw_exit_tfm(struct crypto_tfm *tfm) +{ + struct cast6_lrw_ctx *ctx = crypto_tfm_ctx(tfm); + + lrw_free_table(&ctx->lrw_table); +} + +struct cast6_xts_ctx { + struct cast6_ctx tweak_ctx; + struct cast6_ctx crypt_ctx; +}; + +static int xts_cast6_setkey(struct crypto_tfm *tfm, const u8 *key, + unsigned int keylen) +{ + struct cast6_xts_ctx *ctx = crypto_tfm_ctx(tfm); + u32 *flags = &tfm->crt_flags; + int err; + + /* key consists of keys of equal size concatenated, therefore + * the length must be even + */ + if (keylen % 2) { + *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; + return -EINVAL; + } + + /* first half of xts-key is for crypt */ + err = __cast6_setkey(&ctx->crypt_ctx, key, keylen / 2, flags); + if (err) + return err; + + /* second half of xts-key is for tweak */ + return __cast6_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2, + flags); +} + +static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, + struct scatterlist *src, unsigned int nbytes) +{ + struct cast6_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); + be128 buf[CAST6_PARALLEL_BLOCKS]; + struct crypt_priv crypt_ctx = { + .ctx = &ctx->crypt_ctx, + .fpu_enabled = false, + }; + struct xts_crypt_req req = { + .tbuf = buf, + .tbuflen = sizeof(buf), + + .tweak_ctx = &ctx->tweak_ctx, + .tweak_fn = XTS_TWEAK_CAST(__cast6_encrypt), + .crypt_ctx = &crypt_ctx, + .crypt_fn = encrypt_callback, + }; + int ret; + + desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; + ret = xts_crypt(desc, dst, src, nbytes, &req); + cast6_fpu_end(crypt_ctx.fpu_enabled); + + return ret; +} + +static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, + struct scatterlist *src, unsigned int nbytes) +{ + struct cast6_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); + be128 buf[CAST6_PARALLEL_BLOCKS]; + struct crypt_priv crypt_ctx = { + .ctx = &ctx->crypt_ctx, + .fpu_enabled = false, + }; + struct xts_crypt_req req = { + .tbuf = buf, + .tbuflen = sizeof(buf), + + .tweak_ctx = &ctx->tweak_ctx, + .tweak_fn = XTS_TWEAK_CAST(__cast6_encrypt), + .crypt_ctx = &crypt_ctx, + .crypt_fn = decrypt_callback, + }; + int ret; + + desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; + ret = xts_crypt(desc, dst, src, nbytes, &req); + cast6_fpu_end(crypt_ctx.fpu_enabled); + + return ret; +} + +static struct crypto_alg cast6_algs[10] = { { + .cra_name = "__ecb-cast6-avx", + .cra_driver_name = "__driver-ecb-cast6-avx", + .cra_priority = 0, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_blocksize = CAST6_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct cast6_ctx), + .cra_alignmask = 0, + .cra_type = &crypto_blkcipher_type, + .cra_module = THIS_MODULE, + .cra_u = { + .blkcipher = { + .min_keysize = CAST6_MIN_KEY_SIZE, + .max_keysize = CAST6_MAX_KEY_SIZE, + .setkey = cast6_setkey, + .encrypt = ecb_encrypt, + .decrypt = ecb_decrypt, + }, + }, +}, { + .cra_name = "__cbc-cast6-avx", + .cra_driver_name = "__driver-cbc-cast6-avx", + .cra_priority = 0, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_blocksize = CAST6_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct cast6_ctx), + .cra_alignmask = 0, + .cra_type = &crypto_blkcipher_type, + .cra_module = THIS_MODULE, + .cra_u = { + .blkcipher = { + .min_keysize = CAST6_MIN_KEY_SIZE, + .max_keysize = CAST6_MAX_KEY_SIZE, + .setkey = cast6_setkey, + .encrypt = cbc_encrypt, + .decrypt = cbc_decrypt, + }, + }, +}, { + .cra_name = "__ctr-cast6-avx", + .cra_driver_name = "__driver-ctr-cast6-avx", + .cra_priority = 0, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_blocksize = 1, + .cra_ctxsize = sizeof(struct cast6_ctx), + .cra_alignmask = 0, + .cra_type = &crypto_blkcipher_type, + .cra_module = THIS_MODULE, + .cra_u = { + .blkcipher = { + .min_keysize = CAST6_MIN_KEY_SIZE, + .max_keysize = CAST6_MAX_KEY_SIZE, + .ivsize = CAST6_BLOCK_SIZE, + .setkey = cast6_setkey, + .encrypt = ctr_crypt, + .decrypt = ctr_crypt, + }, + }, +}, { + .cra_name = "__lrw-cast6-avx", + .cra_driver_name = "__driver-lrw-cast6-avx", + .cra_priority = 0, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_blocksize = CAST6_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct cast6_lrw_ctx), + .cra_alignmask = 0, + .cra_type = &crypto_blkcipher_type, + .cra_module = THIS_MODULE, + .cra_exit = lrw_exit_tfm, + .cra_u = { + .blkcipher = { + .min_keysize = CAST6_MIN_KEY_SIZE + + CAST6_BLOCK_SIZE, + .max_keysize = CAST6_MAX_KEY_SIZE + + CAST6_BLOCK_SIZE, + .ivsize = CAST6_BLOCK_SIZE, + .setkey = lrw_cast6_setkey, + .encrypt = lrw_encrypt, + .decrypt = lrw_decrypt, + }, + }, +}, { + .cra_name = "__xts-cast6-avx", + .cra_driver_name = "__driver-xts-cast6-avx", + .cra_priority = 0, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_blocksize = CAST6_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct cast6_xts_ctx), + .cra_alignmask = 0, + .cra_type = &crypto_blkcipher_type, + .cra_module = THIS_MODULE, + .cra_u = { + .blkcipher = { + .min_keysize = CAST6_MIN_KEY_SIZE * 2, + .max_keysize = CAST6_MAX_KEY_SIZE * 2, + .ivsize = CAST6_BLOCK_SIZE, + .setkey = xts_cast6_setkey, + .encrypt = xts_encrypt, + .decrypt = xts_decrypt, + }, + }, +}, { + .cra_name = "ecb(cast6)", + .cra_driver_name = "ecb-cast6-avx", + .cra_priority = 200, + .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, + .cra_blocksize = CAST6_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct async_helper_ctx), + .cra_alignmask = 0, + .cra_type = &crypto_ablkcipher_type, + .cra_module = THIS_MODULE, + .cra_init = ablk_init, + .cra_exit = ablk_exit, + .cra_u = { + .ablkcipher = { + .min_keysize = CAST6_MIN_KEY_SIZE, + .max_keysize = CAST6_MAX_KEY_SIZE, + .setkey = ablk_set_key, + .encrypt = ablk_encrypt, + .decrypt = ablk_decrypt, + }, + }, +}, { + .cra_name = "cbc(cast6)", + .cra_driver_name = "cbc-cast6-avx", + .cra_priority = 200, + .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, + .cra_blocksize = CAST6_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct async_helper_ctx), + .cra_alignmask = 0, + .cra_type = &crypto_ablkcipher_type, + .cra_module = THIS_MODULE, + .cra_init = ablk_init, + .cra_exit = ablk_exit, + .cra_u = { + .ablkcipher = { + .min_keysize = CAST6_MIN_KEY_SIZE, + .max_keysize = CAST6_MAX_KEY_SIZE, + .ivsize = CAST6_BLOCK_SIZE, + .setkey = ablk_set_key, + .encrypt = __ablk_encrypt, + .decrypt = ablk_decrypt, + }, + }, +}, { + .cra_name = "ctr(cast6)", + .cra_driver_name = "ctr-cast6-avx", + .cra_priority = 200, + .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, + .cra_blocksize = 1, + .cra_ctxsize = sizeof(struct async_helper_ctx), + .cra_alignmask = 0, + .cra_type = &crypto_ablkcipher_type, + .cra_module = THIS_MODULE, + .cra_init = ablk_init, + .cra_exit = ablk_exit, + .cra_u = { + .ablkcipher = { + .min_keysize = CAST6_MIN_KEY_SIZE, + .max_keysize = CAST6_MAX_KEY_SIZE, + .ivsize = CAST6_BLOCK_SIZE, + .setkey = ablk_set_key, + .encrypt = ablk_encrypt, + .decrypt = ablk_encrypt, + .geniv = "chainiv", + }, + }, +}, { + .cra_name = "lrw(cast6)", + .cra_driver_name = "lrw-cast6-avx", + .cra_priority = 200, + .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, + .cra_blocksize = CAST6_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct async_helper_ctx), + .cra_alignmask = 0, + .cra_type = &crypto_ablkcipher_type, + .cra_module = THIS_MODULE, + .cra_init = ablk_init, + .cra_exit = ablk_exit, + .cra_u = { + .ablkcipher = { + .min_keysize = CAST6_MIN_KEY_SIZE + + CAST6_BLOCK_SIZE, + .max_keysize = CAST6_MAX_KEY_SIZE + + CAST6_BLOCK_SIZE, + .ivsize = CAST6_BLOCK_SIZE, + .setkey = ablk_set_key, + .encrypt = ablk_encrypt, + .decrypt = ablk_decrypt, + }, + }, +}, { + .cra_name = "xts(cast6)", + .cra_driver_name = "xts-cast6-avx", + .cra_priority = 200, + .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, + .cra_blocksize = CAST6_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct async_helper_ctx), + .cra_alignmask = 0, + .cra_type = &crypto_ablkcipher_type, + .cra_module = THIS_MODULE, + .cra_init = ablk_init, + .cra_exit = ablk_exit, + .cra_u = { + .ablkcipher = { + .min_keysize = CAST6_MIN_KEY_SIZE * 2, + .max_keysize = CAST6_MAX_KEY_SIZE * 2, + .ivsize = CAST6_BLOCK_SIZE, + .setkey = ablk_set_key, + .encrypt = ablk_encrypt, + .decrypt = ablk_decrypt, + }, + }, +} }; + +static int __init cast6_init(void) +{ + u64 xcr0; + + if (!cpu_has_avx || !cpu_has_osxsave) { + pr_info("AVX instructions are not detected.\n"); + return -ENODEV; + } + + xcr0 = xgetbv(XCR_XFEATURE_ENABLED_MASK); + if ((xcr0 & (XSTATE_SSE | XSTATE_YMM)) != (XSTATE_SSE | XSTATE_YMM)) { + pr_info("AVX detected but unusable.\n"); + return -ENODEV; + } + + return crypto_register_algs(cast6_algs, ARRAY_SIZE(cast6_algs)); +} + +static void __exit cast6_exit(void) +{ + crypto_unregister_algs(cast6_algs, ARRAY_SIZE(cast6_algs)); +} + +module_init(cast6_init); +module_exit(cast6_exit); + +MODULE_DESCRIPTION("Cast6 Cipher Algorithm, AVX optimized"); +MODULE_LICENSE("GPL"); +MODULE_ALIAS("cast6"); diff --git a/crypto/Kconfig b/crypto/Kconfig index cda97fcaa822..fe8ed62efe2f 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -713,6 +713,23 @@ config CRYPTO_CAST6 The CAST6 encryption algorithm (synonymous with CAST-256) is described in RFC2612. +config CRYPTO_CAST6_AVX_X86_64 + tristate "CAST6 (CAST-256) cipher algorithm (x86_64/AVX)" + depends on X86 && 64BIT + select CRYPTO_ALGAPI + select CRYPTO_CRYPTD + select CRYPTO_ABLK_HELPER_X86 + select CRYPTO_GLUE_HELPER_X86 + select CRYPTO_CAST6 + select CRYPTO_LRW + select CRYPTO_XTS + help + The CAST6 encryption algorithm (synonymous with CAST-256) is + described in RFC2612. + + This module provides the Cast6 cipher algorithm that processes + eight blocks parallel using the AVX instruction set. + config CRYPTO_DES tristate "DES and Triple DES EDE cipher algorithms" select CRYPTO_ALGAPI diff --git a/crypto/testmgr.c b/crypto/testmgr.c index cff3c1c3f83c..575b57c3244b 100644 --- a/crypto/testmgr.c +++ b/crypto/testmgr.c @@ -1548,6 +1548,21 @@ static const struct alg_test_desc alg_test_descs[] = { } } } + }, { + .alg = "__cbc-cast6-avx", + .test = alg_test_null, + .suite = { + .cipher = { + .enc = { + .vecs = NULL, + .count = 0 + }, + .dec = { + .vecs = NULL, + .count = 0 + } + } + } }, { .alg = "__cbc-serpent-avx", .test = alg_test_null, @@ -1624,6 +1639,21 @@ static const struct alg_test_desc alg_test_descs[] = { } } } + }, { + .alg = "__driver-cbc-cast6-avx", + .test = alg_test_null, + .suite = { + .cipher = { + .enc = { + .vecs = NULL, + .count = 0 + }, + .dec = { + .vecs = NULL, + .count = 0 + } + } + } }, { .alg = "__driver-cbc-serpent-avx", .test = alg_test_null, @@ -1700,6 +1730,21 @@ static const struct alg_test_desc alg_test_descs[] = { } } } + }, { + .alg = "__driver-ecb-cast6-avx", + .test = alg_test_null, + .suite = { + .cipher = { + .enc = { + .vecs = NULL, + .count = 0 + }, + .dec = { + .vecs = NULL, + .count = 0 + } + } + } }, { .alg = "__driver-ecb-serpent-avx", .test = alg_test_null, @@ -2026,6 +2071,21 @@ static const struct alg_test_desc alg_test_descs[] = { } } } + }, { + .alg = "cryptd(__driver-ecb-cast6-avx)", + .test = alg_test_null, + .suite = { + .cipher = { + .enc = { + .vecs = NULL, + .count = 0 + }, + .dec = { + .vecs = NULL, + .count = 0 + } + } + } }, { .alg = "cryptd(__driver-ecb-serpent-avx)", .test = alg_test_null, -- cgit v1.2.3 From 35a1fc1873dd6deac6c005ead85424a8ee28183a Mon Sep 17 00:00:00 2001 From: Seth Jennings Date: Thu, 19 Jul 2012 09:42:41 -0500 Subject: powerpc/crypto: add 842 crypto driver This patch add the 842 cryptographic API driver that submits compression requests to the 842 hardware compression accelerator driver (nx-compress). If the hardware accelerator goes offline for any reason (dynamic disable, migration, etc...), this driver will use LZO as a software failover for all future compression requests. For decompression requests, the 842 hardware driver contains a software implementation of the 842 decompressor to support the decompression of data that was compressed before the accelerator went offline. Signed-off-by: Robert Jennings Signed-off-by: Seth Jennings Signed-off-by: Herbert Xu --- crypto/842.c | 183 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ crypto/Kconfig | 9 +++ crypto/Makefile | 1 + 3 files changed, 193 insertions(+) create mode 100644 crypto/842.c (limited to 'crypto') diff --git a/crypto/842.c b/crypto/842.c new file mode 100644 index 000000000000..144767db44e5 --- /dev/null +++ b/crypto/842.c @@ -0,0 +1,183 @@ +/* + * Cryptographic API for the 842 compression algorithm. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + * + * Copyright (C) IBM Corporation, 2011 + * + * Authors: Robert Jennings + * Seth Jennings + */ + +#include +#include +#include +#include +#include +#include +#include + +static int nx842_uselzo; + +struct nx842_ctx { + void *nx842_wmem; /* working memory for 842/lzo */ +}; + +enum nx842_crypto_type { + NX842_CRYPTO_TYPE_842, + NX842_CRYPTO_TYPE_LZO +}; + +#define NX842_SENTINEL 0xdeadbeef + +struct nx842_crypto_header { + unsigned int sentinel; /* debug */ + enum nx842_crypto_type type; +}; + +static int nx842_init(struct crypto_tfm *tfm) +{ + struct nx842_ctx *ctx = crypto_tfm_ctx(tfm); + int wmemsize; + + wmemsize = max_t(int, nx842_get_workmem_size(), LZO1X_MEM_COMPRESS); + ctx->nx842_wmem = kmalloc(wmemsize, GFP_NOFS); + if (!ctx->nx842_wmem) + return -ENOMEM; + + return 0; +} + +static void nx842_exit(struct crypto_tfm *tfm) +{ + struct nx842_ctx *ctx = crypto_tfm_ctx(tfm); + + kfree(ctx->nx842_wmem); +} + +static void nx842_reset_uselzo(unsigned long data) +{ + nx842_uselzo = 0; +} + +static DEFINE_TIMER(failover_timer, nx842_reset_uselzo, 0, 0); + +static int nx842_crypto_compress(struct crypto_tfm *tfm, const u8 *src, + unsigned int slen, u8 *dst, unsigned int *dlen) +{ + struct nx842_ctx *ctx = crypto_tfm_ctx(tfm); + struct nx842_crypto_header *hdr; + unsigned int tmp_len = *dlen; + size_t lzodlen; /* needed for lzo */ + int err; + + *dlen = 0; + hdr = (struct nx842_crypto_header *)dst; + hdr->sentinel = NX842_SENTINEL; /* debug */ + dst += sizeof(struct nx842_crypto_header); + tmp_len -= sizeof(struct nx842_crypto_header); + lzodlen = tmp_len; + + if (likely(!nx842_uselzo)) { + err = nx842_compress(src, slen, dst, &tmp_len, ctx->nx842_wmem); + + if (likely(!err)) { + hdr->type = NX842_CRYPTO_TYPE_842; + *dlen = tmp_len + sizeof(struct nx842_crypto_header); + return 0; + } + + /* hardware failed */ + nx842_uselzo = 1; + + /* set timer to check for hardware again in 1 second */ + mod_timer(&failover_timer, jiffies + msecs_to_jiffies(1000)); + } + + /* no hardware, use lzo */ + err = lzo1x_1_compress(src, slen, dst, &lzodlen, ctx->nx842_wmem); + if (err != LZO_E_OK) + return -EINVAL; + + hdr->type = NX842_CRYPTO_TYPE_LZO; + *dlen = lzodlen + sizeof(struct nx842_crypto_header); + return 0; +} + +static int nx842_crypto_decompress(struct crypto_tfm *tfm, const u8 *src, + unsigned int slen, u8 *dst, unsigned int *dlen) +{ + struct nx842_ctx *ctx = crypto_tfm_ctx(tfm); + struct nx842_crypto_header *hdr; + unsigned int tmp_len = *dlen; + size_t lzodlen; /* needed for lzo */ + int err; + + *dlen = 0; + hdr = (struct nx842_crypto_header *)src; + + if (unlikely(hdr->sentinel != NX842_SENTINEL)) + return -EINVAL; + + src += sizeof(struct nx842_crypto_header); + slen -= sizeof(struct nx842_crypto_header); + + if (likely(hdr->type == NX842_CRYPTO_TYPE_842)) { + err = nx842_decompress(src, slen, dst, &tmp_len, + ctx->nx842_wmem); + if (err) + return -EINVAL; + *dlen = tmp_len; + } else if (hdr->type == NX842_CRYPTO_TYPE_LZO) { + lzodlen = tmp_len; + err = lzo1x_decompress_safe(src, slen, dst, &lzodlen); + if (err != LZO_E_OK) + return -EINVAL; + *dlen = lzodlen; + } else + return -EINVAL; + + return 0; +} + +static struct crypto_alg alg = { + .cra_name = "842", + .cra_flags = CRYPTO_ALG_TYPE_COMPRESS, + .cra_ctxsize = sizeof(struct nx842_ctx), + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(alg.cra_list), + .cra_init = nx842_init, + .cra_exit = nx842_exit, + .cra_u = { .compress = { + .coa_compress = nx842_crypto_compress, + .coa_decompress = nx842_crypto_decompress } } +}; + +static int __init nx842_mod_init(void) +{ + del_timer(&failover_timer); + return crypto_register_alg(&alg); +} + +static void __exit nx842_mod_exit(void) +{ + crypto_unregister_alg(&alg); +} + +module_init(nx842_mod_init); +module_exit(nx842_mod_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("842 Compression Algorithm"); diff --git a/crypto/Kconfig b/crypto/Kconfig index fe8ed62efe2f..cbcc0e2eeda0 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -1039,6 +1039,15 @@ config CRYPTO_LZO help This is the LZO algorithm. +config CRYPTO_842 + tristate "842 compression algorithm" + depends on CRYPTO_DEV_NX_COMPRESS + # 842 uses lzo if the hardware becomes unavailable + select LZO_COMPRESS + select LZO_DECOMPRESS + help + This is the 842 algorithm. + comment "Random Number Generation" config CRYPTO_ANSI_CPRNG diff --git a/crypto/Makefile b/crypto/Makefile index 396966d2d849..a301ad2b258c 100644 --- a/crypto/Makefile +++ b/crypto/Makefile @@ -82,6 +82,7 @@ obj-$(CONFIG_CRYPTO_MICHAEL_MIC) += michael_mic.o obj-$(CONFIG_CRYPTO_CRC32C) += crc32c.o obj-$(CONFIG_CRYPTO_AUTHENC) += authenc.o authencesn.o obj-$(CONFIG_CRYPTO_LZO) += lzo.o +obj-$(CONFIG_CRYPTO_842) += 842.o obj-$(CONFIG_CRYPTO_RNG2) += rng.o obj-$(CONFIG_CRYPTO_RNG2) += krng.o obj-$(CONFIG_CRYPTO_ANSI_CPRNG) += ansi_cprng.o -- cgit v1.2.3 From 023af608254add7ba037cd634cc5f2fb21ff6420 Mon Sep 17 00:00:00 2001 From: Jussi Kivilinna Date: Sun, 22 Jul 2012 18:18:37 +0300 Subject: crypto: aesni_intel - improve lrw and xts performance by utilizing parallel AES-NI hardware pipelines Use parallel LRW and XTS encryption facilities to better utilize AES-NI hardware pipelines and gain extra performance. Tcrypt benchmark results (async), old vs new ratios: Intel Core i5-2450M CPU (fam: 6, model: 42, step: 7) aes:128bit lrw:256bit xts:256bit size lrw-enc lrw-dec xts-dec xts-dec 16B 0.99x 1.00x 1.22x 1.19x 64B 1.38x 1.50x 1.58x 1.61x 256B 2.04x 2.02x 2.27x 2.29x 1024B 2.56x 2.54x 2.89x 2.92x 8192B 2.85x 2.99x 3.40x 3.23x aes:192bit lrw:320bit xts:384bit size lrw-enc lrw-dec xts-dec xts-dec 16B 1.08x 1.08x 1.16x 1.17x 64B 1.48x 1.54x 1.59x 1.65x 256B 2.18x 2.17x 2.29x 2.28x 1024B 2.67x 2.67x 2.87x 3.05x 8192B 2.93x 2.84x 3.28x 3.33x aes:256bit lrw:348bit xts:512bit size lrw-enc lrw-dec xts-dec xts-dec 16B 1.07x 1.07x 1.18x 1.19x 64B 1.56x 1.56x 1.70x 1.71x 256B 2.22x 2.24x 2.46x 2.46x 1024B 2.76x 2.77x 3.13x 3.05x 8192B 2.99x 3.05x 3.40x 3.30x Cc: Huang Ying Signed-off-by: Jussi Kivilinna Reviewed-by: Kim Phillips Signed-off-by: Herbert Xu --- arch/x86/crypto/aesni-intel_glue.c | 253 ++++++++++++++++++++++++++++++++----- crypto/Kconfig | 2 + 2 files changed, 220 insertions(+), 35 deletions(-) (limited to 'crypto') diff --git a/arch/x86/crypto/aesni-intel_glue.c b/arch/x86/crypto/aesni-intel_glue.c index 648347a05773..7c04d0da709b 100644 --- a/arch/x86/crypto/aesni-intel_glue.c +++ b/arch/x86/crypto/aesni-intel_glue.c @@ -28,6 +28,9 @@ #include #include #include +#include +#include +#include #include #include #include @@ -41,18 +44,10 @@ #define HAS_CTR #endif -#if defined(CONFIG_CRYPTO_LRW) || defined(CONFIG_CRYPTO_LRW_MODULE) -#define HAS_LRW -#endif - #if defined(CONFIG_CRYPTO_PCBC) || defined(CONFIG_CRYPTO_PCBC_MODULE) #define HAS_PCBC #endif -#if defined(CONFIG_CRYPTO_XTS) || defined(CONFIG_CRYPTO_XTS_MODULE) -#define HAS_XTS -#endif - /* This data is stored at the end of the crypto_tfm struct. * It's a type of per "session" data storage location. * This needs to be 16 byte aligned. @@ -79,6 +74,16 @@ struct aesni_hash_subkey_req_data { #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1)) #define RFC4106_HASH_SUBKEY_SIZE 16 +struct aesni_lrw_ctx { + struct lrw_table_ctx lrw_table; + u8 raw_aes_ctx[sizeof(struct crypto_aes_ctx) + AESNI_ALIGN - 1]; +}; + +struct aesni_xts_ctx { + u8 raw_tweak_ctx[sizeof(struct crypto_aes_ctx) + AESNI_ALIGN - 1]; + u8 raw_crypt_ctx[sizeof(struct crypto_aes_ctx) + AESNI_ALIGN - 1]; +}; + asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key, unsigned int key_len); asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out, @@ -398,13 +403,6 @@ static int ablk_rfc3686_ctr_init(struct crypto_tfm *tfm) #endif #endif -#ifdef HAS_LRW -static int ablk_lrw_init(struct crypto_tfm *tfm) -{ - return ablk_init_common(tfm, "fpu(lrw(__driver-aes-aesni))"); -} -#endif - #ifdef HAS_PCBC static int ablk_pcbc_init(struct crypto_tfm *tfm) { @@ -412,12 +410,160 @@ static int ablk_pcbc_init(struct crypto_tfm *tfm) } #endif -#ifdef HAS_XTS -static int ablk_xts_init(struct crypto_tfm *tfm) +static void lrw_xts_encrypt_callback(void *ctx, u8 *blks, unsigned int nbytes) { - return ablk_init_common(tfm, "fpu(xts(__driver-aes-aesni))"); + aesni_ecb_enc(ctx, blks, blks, nbytes); +} + +static void lrw_xts_decrypt_callback(void *ctx, u8 *blks, unsigned int nbytes) +{ + aesni_ecb_dec(ctx, blks, blks, nbytes); +} + +static int lrw_aesni_setkey(struct crypto_tfm *tfm, const u8 *key, + unsigned int keylen) +{ + struct aesni_lrw_ctx *ctx = crypto_tfm_ctx(tfm); + int err; + + err = aes_set_key_common(tfm, ctx->raw_aes_ctx, key, + keylen - AES_BLOCK_SIZE); + if (err) + return err; + + return lrw_init_table(&ctx->lrw_table, key + keylen - AES_BLOCK_SIZE); +} + +static void lrw_aesni_exit_tfm(struct crypto_tfm *tfm) +{ + struct aesni_lrw_ctx *ctx = crypto_tfm_ctx(tfm); + + lrw_free_table(&ctx->lrw_table); +} + +static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, + struct scatterlist *src, unsigned int nbytes) +{ + struct aesni_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); + be128 buf[8]; + struct lrw_crypt_req req = { + .tbuf = buf, + .tbuflen = sizeof(buf), + + .table_ctx = &ctx->lrw_table, + .crypt_ctx = aes_ctx(ctx->raw_aes_ctx), + .crypt_fn = lrw_xts_encrypt_callback, + }; + int ret; + + desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; + + kernel_fpu_begin(); + ret = lrw_crypt(desc, dst, src, nbytes, &req); + kernel_fpu_end(); + + return ret; +} + +static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, + struct scatterlist *src, unsigned int nbytes) +{ + struct aesni_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); + be128 buf[8]; + struct lrw_crypt_req req = { + .tbuf = buf, + .tbuflen = sizeof(buf), + + .table_ctx = &ctx->lrw_table, + .crypt_ctx = aes_ctx(ctx->raw_aes_ctx), + .crypt_fn = lrw_xts_decrypt_callback, + }; + int ret; + + desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; + + kernel_fpu_begin(); + ret = lrw_crypt(desc, dst, src, nbytes, &req); + kernel_fpu_end(); + + return ret; +} + +static int xts_aesni_setkey(struct crypto_tfm *tfm, const u8 *key, + unsigned int keylen) +{ + struct aesni_xts_ctx *ctx = crypto_tfm_ctx(tfm); + u32 *flags = &tfm->crt_flags; + int err; + + /* key consists of keys of equal size concatenated, therefore + * the length must be even + */ + if (keylen % 2) { + *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; + return -EINVAL; + } + + /* first half of xts-key is for crypt */ + err = aes_set_key_common(tfm, ctx->raw_crypt_ctx, key, keylen / 2); + if (err) + return err; + + /* second half of xts-key is for tweak */ + return aes_set_key_common(tfm, ctx->raw_tweak_ctx, key + keylen / 2, + keylen / 2); +} + + +static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, + struct scatterlist *src, unsigned int nbytes) +{ + struct aesni_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); + be128 buf[8]; + struct xts_crypt_req req = { + .tbuf = buf, + .tbuflen = sizeof(buf), + + .tweak_ctx = aes_ctx(ctx->raw_tweak_ctx), + .tweak_fn = XTS_TWEAK_CAST(aesni_enc), + .crypt_ctx = aes_ctx(ctx->raw_crypt_ctx), + .crypt_fn = lrw_xts_encrypt_callback, + }; + int ret; + + desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; + + kernel_fpu_begin(); + ret = xts_crypt(desc, dst, src, nbytes, &req); + kernel_fpu_end(); + + return ret; +} + +static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, + struct scatterlist *src, unsigned int nbytes) +{ + struct aesni_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); + be128 buf[8]; + struct xts_crypt_req req = { + .tbuf = buf, + .tbuflen = sizeof(buf), + + .tweak_ctx = aes_ctx(ctx->raw_tweak_ctx), + .tweak_fn = XTS_TWEAK_CAST(aesni_enc), + .crypt_ctx = aes_ctx(ctx->raw_crypt_ctx), + .crypt_fn = lrw_xts_decrypt_callback, + }; + int ret; + + desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; + + kernel_fpu_begin(); + ret = xts_crypt(desc, dst, src, nbytes, &req); + kernel_fpu_end(); + + return ret; } -#endif #ifdef CONFIG_X86_64 static int rfc4106_init(struct crypto_tfm *tfm) @@ -1035,10 +1181,10 @@ static struct crypto_alg aesni_algs[] = { { }, #endif #endif -#ifdef HAS_LRW +#ifdef HAS_PCBC }, { - .cra_name = "lrw(aes)", - .cra_driver_name = "lrw-aes-aesni", + .cra_name = "pcbc(aes)", + .cra_driver_name = "pcbc-aes-aesni", .cra_priority = 400, .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, .cra_blocksize = AES_BLOCK_SIZE, @@ -1046,12 +1192,12 @@ static struct crypto_alg aesni_algs[] = { { .cra_alignmask = 0, .cra_type = &crypto_ablkcipher_type, .cra_module = THIS_MODULE, - .cra_init = ablk_lrw_init, + .cra_init = ablk_pcbc_init, .cra_exit = ablk_exit, .cra_u = { .ablkcipher = { - .min_keysize = AES_MIN_KEY_SIZE + AES_BLOCK_SIZE, - .max_keysize = AES_MAX_KEY_SIZE + AES_BLOCK_SIZE, + .min_keysize = AES_MIN_KEY_SIZE, + .max_keysize = AES_MAX_KEY_SIZE, .ivsize = AES_BLOCK_SIZE, .setkey = ablk_set_key, .encrypt = ablk_encrypt, @@ -1059,10 +1205,50 @@ static struct crypto_alg aesni_algs[] = { { }, }, #endif -#ifdef HAS_PCBC }, { - .cra_name = "pcbc(aes)", - .cra_driver_name = "pcbc-aes-aesni", + .cra_name = "__lrw-aes-aesni", + .cra_driver_name = "__driver-lrw-aes-aesni", + .cra_priority = 0, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_blocksize = AES_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct aesni_lrw_ctx), + .cra_alignmask = 0, + .cra_type = &crypto_blkcipher_type, + .cra_module = THIS_MODULE, + .cra_exit = lrw_aesni_exit_tfm, + .cra_u = { + .blkcipher = { + .min_keysize = AES_MIN_KEY_SIZE + AES_BLOCK_SIZE, + .max_keysize = AES_MAX_KEY_SIZE + AES_BLOCK_SIZE, + .ivsize = AES_BLOCK_SIZE, + .setkey = lrw_aesni_setkey, + .encrypt = lrw_encrypt, + .decrypt = lrw_decrypt, + }, + }, +}, { + .cra_name = "__xts-aes-aesni", + .cra_driver_name = "__driver-xts-aes-aesni", + .cra_priority = 0, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_blocksize = AES_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct aesni_xts_ctx), + .cra_alignmask = 0, + .cra_type = &crypto_blkcipher_type, + .cra_module = THIS_MODULE, + .cra_u = { + .blkcipher = { + .min_keysize = 2 * AES_MIN_KEY_SIZE, + .max_keysize = 2 * AES_MAX_KEY_SIZE, + .ivsize = AES_BLOCK_SIZE, + .setkey = xts_aesni_setkey, + .encrypt = xts_encrypt, + .decrypt = xts_decrypt, + }, + }, +}, { + .cra_name = "lrw(aes)", + .cra_driver_name = "lrw-aes-aesni", .cra_priority = 400, .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, .cra_blocksize = AES_BLOCK_SIZE, @@ -1070,20 +1256,18 @@ static struct crypto_alg aesni_algs[] = { { .cra_alignmask = 0, .cra_type = &crypto_ablkcipher_type, .cra_module = THIS_MODULE, - .cra_init = ablk_pcbc_init, + .cra_init = ablk_init, .cra_exit = ablk_exit, .cra_u = { .ablkcipher = { - .min_keysize = AES_MIN_KEY_SIZE, - .max_keysize = AES_MAX_KEY_SIZE, + .min_keysize = AES_MIN_KEY_SIZE + AES_BLOCK_SIZE, + .max_keysize = AES_MAX_KEY_SIZE + AES_BLOCK_SIZE, .ivsize = AES_BLOCK_SIZE, .setkey = ablk_set_key, .encrypt = ablk_encrypt, .decrypt = ablk_decrypt, }, }, -#endif -#ifdef HAS_XTS }, { .cra_name = "xts(aes)", .cra_driver_name = "xts-aes-aesni", @@ -1094,7 +1278,7 @@ static struct crypto_alg aesni_algs[] = { { .cra_alignmask = 0, .cra_type = &crypto_ablkcipher_type, .cra_module = THIS_MODULE, - .cra_init = ablk_xts_init, + .cra_init = ablk_init, .cra_exit = ablk_exit, .cra_u = { .ablkcipher = { @@ -1106,7 +1290,6 @@ static struct crypto_alg aesni_algs[] = { { .decrypt = ablk_decrypt, }, }, -#endif } }; diff --git a/crypto/Kconfig b/crypto/Kconfig index cbcc0e2eeda0..213fb37be51f 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -564,6 +564,8 @@ config CRYPTO_AES_NI_INTEL select CRYPTO_CRYPTD select CRYPTO_ABLK_HELPER_X86 select CRYPTO_ALGAPI + select CRYPTO_LRW + select CRYPTO_XTS help Use Intel AES-NI instructions for AES algorithm. -- cgit v1.2.3 From f0be44f4fb1faee42635ca5ea06dc9c3e820a35d Mon Sep 17 00:00:00 2001 From: David McCullough Date: Fri, 7 Sep 2012 04:17:02 +0800 Subject: arm/crypto: Add optimized AES and SHA1 routines Add assembler versions of AES and SHA1 for ARM platforms. This has provided up to a 50% improvement in IPsec/TCP throughout for tunnels using AES128/SHA1. Platform CPU SPeed Endian Before (bps) After (bps) Improvement IXP425 533 MHz big 11217042 15566294 ~38% KS8695 166 MHz little 3828549 5795373 ~51% Signed-off-by: David McCullough Signed-off-by: Herbert Xu --- arch/arm/Makefile | 1 + arch/arm/crypto/Makefile | 9 + arch/arm/crypto/aes-armv4.S | 1112 ++++++++++++++++++++++++++++++++++++ arch/arm/crypto/aes_glue.c | 108 ++++ arch/arm/crypto/sha1-armv4-large.S | 503 ++++++++++++++++ arch/arm/crypto/sha1_glue.c | 179 ++++++ crypto/Kconfig | 33 ++ 7 files changed, 1945 insertions(+) create mode 100644 arch/arm/crypto/Makefile create mode 100644 arch/arm/crypto/aes-armv4.S create mode 100644 arch/arm/crypto/aes_glue.c create mode 100644 arch/arm/crypto/sha1-armv4-large.S create mode 100644 arch/arm/crypto/sha1_glue.c (limited to 'crypto') diff --git a/arch/arm/Makefile b/arch/arm/Makefile index 30eae87ead6d..46038d756bf6 100644 --- a/arch/arm/Makefile +++ b/arch/arm/Makefile @@ -255,6 +255,7 @@ core-$(CONFIG_VFP) += arch/arm/vfp/ # If we have a machine-specific directory, then include it in the build. core-y += arch/arm/kernel/ arch/arm/mm/ arch/arm/common/ core-y += arch/arm/net/ +core-y += arch/arm/crypto/ core-y += $(machdirs) $(platdirs) drivers-$(CONFIG_OPROFILE) += arch/arm/oprofile/ diff --git a/arch/arm/crypto/Makefile b/arch/arm/crypto/Makefile new file mode 100644 index 000000000000..a2c83851bc90 --- /dev/null +++ b/arch/arm/crypto/Makefile @@ -0,0 +1,9 @@ +# +# Arch-specific CryptoAPI modules. +# + +obj-$(CONFIG_CRYPTO_AES_ARM) += aes-arm.o +obj-$(CONFIG_CRYPTO_SHA1_ARM) += sha1-arm.o + +aes-arm-y := aes-armv4.o aes_glue.o +sha1-arm-y := sha1-armv4-large.o sha1_glue.o diff --git a/arch/arm/crypto/aes-armv4.S b/arch/arm/crypto/aes-armv4.S new file mode 100644 index 000000000000..e59b1d505d6c --- /dev/null +++ b/arch/arm/crypto/aes-armv4.S @@ -0,0 +1,1112 @@ +#define __ARM_ARCH__ __LINUX_ARM_ARCH__ +@ ==================================================================== +@ Written by Andy Polyakov for the OpenSSL +@ project. The module is, however, dual licensed under OpenSSL and +@ CRYPTOGAMS licenses depending on where you obtain it. For further +@ details see http://www.openssl.org/~appro/cryptogams/. +@ ==================================================================== + +@ AES for ARMv4 + +@ January 2007. +@ +@ Code uses single 1K S-box and is >2 times faster than code generated +@ by gcc-3.4.1. This is thanks to unique feature of ARMv4 ISA, which +@ allows to merge logical or arithmetic operation with shift or rotate +@ in one instruction and emit combined result every cycle. The module +@ is endian-neutral. The performance is ~42 cycles/byte for 128-bit +@ key [on single-issue Xscale PXA250 core]. + +@ May 2007. +@ +@ AES_set_[en|de]crypt_key is added. + +@ July 2010. +@ +@ Rescheduling for dual-issue pipeline resulted in 12% improvement on +@ Cortex A8 core and ~25 cycles per byte processed with 128-bit key. + +@ February 2011. +@ +@ Profiler-assisted and platform-specific optimization resulted in 16% +@ improvement on Cortex A8 core and ~21.5 cycles per byte. + +@ A little glue here to select the correct code below for the ARM CPU +@ that is being targetted. + +.text +.code 32 + +.type AES_Te,%object +.align 5 +AES_Te: +.word 0xc66363a5, 0xf87c7c84, 0xee777799, 0xf67b7b8d +.word 0xfff2f20d, 0xd66b6bbd, 0xde6f6fb1, 0x91c5c554 +.word 0x60303050, 0x02010103, 0xce6767a9, 0x562b2b7d +.word 0xe7fefe19, 0xb5d7d762, 0x4dababe6, 0xec76769a +.word 0x8fcaca45, 0x1f82829d, 0x89c9c940, 0xfa7d7d87 +.word 0xeffafa15, 0xb25959eb, 0x8e4747c9, 0xfbf0f00b +.word 0x41adadec, 0xb3d4d467, 0x5fa2a2fd, 0x45afafea +.word 0x239c9cbf, 0x53a4a4f7, 0xe4727296, 0x9bc0c05b +.word 0x75b7b7c2, 0xe1fdfd1c, 0x3d9393ae, 0x4c26266a +.word 0x6c36365a, 0x7e3f3f41, 0xf5f7f702, 0x83cccc4f +.word 0x6834345c, 0x51a5a5f4, 0xd1e5e534, 0xf9f1f108 +.word 0xe2717193, 0xabd8d873, 0x62313153, 0x2a15153f +.word 0x0804040c, 0x95c7c752, 0x46232365, 0x9dc3c35e +.word 0x30181828, 0x379696a1, 0x0a05050f, 0x2f9a9ab5 +.word 0x0e070709, 0x24121236, 0x1b80809b, 0xdfe2e23d +.word 0xcdebeb26, 0x4e272769, 0x7fb2b2cd, 0xea75759f +.word 0x1209091b, 0x1d83839e, 0x582c2c74, 0x341a1a2e +.word 0x361b1b2d, 0xdc6e6eb2, 0xb45a5aee, 0x5ba0a0fb +.word 0xa45252f6, 0x763b3b4d, 0xb7d6d661, 0x7db3b3ce +.word 0x5229297b, 0xdde3e33e, 0x5e2f2f71, 0x13848497 +.word 0xa65353f5, 0xb9d1d168, 0x00000000, 0xc1eded2c +.word 0x40202060, 0xe3fcfc1f, 0x79b1b1c8, 0xb65b5bed +.word 0xd46a6abe, 0x8dcbcb46, 0x67bebed9, 0x7239394b +.word 0x944a4ade, 0x984c4cd4, 0xb05858e8, 0x85cfcf4a +.word 0xbbd0d06b, 0xc5efef2a, 0x4faaaae5, 0xedfbfb16 +.word 0x864343c5, 0x9a4d4dd7, 0x66333355, 0x11858594 +.word 0x8a4545cf, 0xe9f9f910, 0x04020206, 0xfe7f7f81 +.word 0xa05050f0, 0x783c3c44, 0x259f9fba, 0x4ba8a8e3 +.word 0xa25151f3, 0x5da3a3fe, 0x804040c0, 0x058f8f8a +.word 0x3f9292ad, 0x219d9dbc, 0x70383848, 0xf1f5f504 +.word 0x63bcbcdf, 0x77b6b6c1, 0xafdada75, 0x42212163 +.word 0x20101030, 0xe5ffff1a, 0xfdf3f30e, 0xbfd2d26d +.word 0x81cdcd4c, 0x180c0c14, 0x26131335, 0xc3ecec2f +.word 0xbe5f5fe1, 0x359797a2, 0x884444cc, 0x2e171739 +.word 0x93c4c457, 0x55a7a7f2, 0xfc7e7e82, 0x7a3d3d47 +.word 0xc86464ac, 0xba5d5de7, 0x3219192b, 0xe6737395 +.word 0xc06060a0, 0x19818198, 0x9e4f4fd1, 0xa3dcdc7f +.word 0x44222266, 0x542a2a7e, 0x3b9090ab, 0x0b888883 +.word 0x8c4646ca, 0xc7eeee29, 0x6bb8b8d3, 0x2814143c +.word 0xa7dede79, 0xbc5e5ee2, 0x160b0b1d, 0xaddbdb76 +.word 0xdbe0e03b, 0x64323256, 0x743a3a4e, 0x140a0a1e +.word 0x924949db, 0x0c06060a, 0x4824246c, 0xb85c5ce4 +.word 0x9fc2c25d, 0xbdd3d36e, 0x43acacef, 0xc46262a6 +.word 0x399191a8, 0x319595a4, 0xd3e4e437, 0xf279798b +.word 0xd5e7e732, 0x8bc8c843, 0x6e373759, 0xda6d6db7 +.word 0x018d8d8c, 0xb1d5d564, 0x9c4e4ed2, 0x49a9a9e0 +.word 0xd86c6cb4, 0xac5656fa, 0xf3f4f407, 0xcfeaea25 +.word 0xca6565af, 0xf47a7a8e, 0x47aeaee9, 0x10080818 +.word 0x6fbabad5, 0xf0787888, 0x4a25256f, 0x5c2e2e72 +.word 0x381c1c24, 0x57a6a6f1, 0x73b4b4c7, 0x97c6c651 +.word 0xcbe8e823, 0xa1dddd7c, 0xe874749c, 0x3e1f1f21 +.word 0x964b4bdd, 0x61bdbddc, 0x0d8b8b86, 0x0f8a8a85 +.word 0xe0707090, 0x7c3e3e42, 0x71b5b5c4, 0xcc6666aa +.word 0x904848d8, 0x06030305, 0xf7f6f601, 0x1c0e0e12 +.word 0xc26161a3, 0x6a35355f, 0xae5757f9, 0x69b9b9d0 +.word 0x17868691, 0x99c1c158, 0x3a1d1d27, 0x279e9eb9 +.word 0xd9e1e138, 0xebf8f813, 0x2b9898b3, 0x22111133 +.word 0xd26969bb, 0xa9d9d970, 0x078e8e89, 0x339494a7 +.word 0x2d9b9bb6, 0x3c1e1e22, 0x15878792, 0xc9e9e920 +.word 0x87cece49, 0xaa5555ff, 0x50282878, 0xa5dfdf7a +.word 0x038c8c8f, 0x59a1a1f8, 0x09898980, 0x1a0d0d17 +.word 0x65bfbfda, 0xd7e6e631, 0x844242c6, 0xd06868b8 +.word 0x824141c3, 0x299999b0, 0x5a2d2d77, 0x1e0f0f11 +.word 0x7bb0b0cb, 0xa85454fc, 0x6dbbbbd6, 0x2c16163a +@ Te4[256] +.byte 0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5 +.byte 0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76 +.byte 0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0 +.byte 0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0 +.byte 0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc +.byte 0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15 +.byte 0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a +.byte 0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75 +.byte 0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0 +.byte 0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84 +.byte 0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b +.byte 0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf +.byte 0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85 +.byte 0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8 +.byte 0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5 +.byte 0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2 +.byte 0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17 +.byte 0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73 +.byte 0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88 +.byte 0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb +.byte 0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c +.byte 0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79 +.byte 0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9 +.byte 0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08 +.byte 0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6 +.byte 0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a +.byte 0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e +.byte 0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e +.byte 0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94 +.byte 0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf +.byte 0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68 +.byte 0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16 +@ rcon[] +.word 0x01000000, 0x02000000, 0x04000000, 0x08000000 +.word 0x10000000, 0x20000000, 0x40000000, 0x80000000 +.word 0x1B000000, 0x36000000, 0, 0, 0, 0, 0, 0 +.size AES_Te,.-AES_Te + +@ void AES_encrypt(const unsigned char *in, unsigned char *out, +@ const AES_KEY *key) { +.global AES_encrypt +.type AES_encrypt,%function +.align 5 +AES_encrypt: + sub r3,pc,#8 @ AES_encrypt + stmdb sp!,{r1,r4-r12,lr} + mov r12,r0 @ inp + mov r11,r2 + sub r10,r3,#AES_encrypt-AES_Te @ Te +#if __ARM_ARCH__<7 + ldrb r0,[r12,#3] @ load input data in endian-neutral + ldrb r4,[r12,#2] @ manner... + ldrb r5,[r12,#1] + ldrb r6,[r12,#0] + orr r0,r0,r4,lsl#8 + ldrb r1,[r12,#7] + orr r0,r0,r5,lsl#16 + ldrb r4,[r12,#6] + orr r0,r0,r6,lsl#24 + ldrb r5,[r12,#5] + ldrb r6,[r12,#4] + orr r1,r1,r4,lsl#8 + ldrb r2,[r12,#11] + orr r1,r1,r5,lsl#16 + ldrb r4,[r12,#10] + orr r1,r1,r6,lsl#24 + ldrb r5,[r12,#9] + ldrb r6,[r12,#8] + orr r2,r2,r4,lsl#8 + ldrb r3,[r12,#15] + orr r2,r2,r5,lsl#16 + ldrb r4,[r12,#14] + orr r2,r2,r6,lsl#24 + ldrb r5,[r12,#13] + ldrb r6,[r12,#12] + orr r3,r3,r4,lsl#8 + orr r3,r3,r5,lsl#16 + orr r3,r3,r6,lsl#24 +#else + ldr r0,[r12,#0] + ldr r1,[r12,#4] + ldr r2,[r12,#8] + ldr r3,[r12,#12] +#ifdef __ARMEL__ + rev r0,r0 + rev r1,r1 + rev r2,r2 + rev r3,r3 +#endif +#endif + bl _armv4_AES_encrypt + + ldr r12,[sp],#4 @ pop out +#if __ARM_ARCH__>=7 +#ifdef __ARMEL__ + rev r0,r0 + rev r1,r1 + rev r2,r2 + rev r3,r3 +#endif + str r0,[r12,#0] + str r1,[r12,#4] + str r2,[r12,#8] + str r3,[r12,#12] +#else + mov r4,r0,lsr#24 @ write output in endian-neutral + mov r5,r0,lsr#16 @ manner... + mov r6,r0,lsr#8 + strb r4,[r12,#0] + strb r5,[r12,#1] + mov r4,r1,lsr#24 + strb r6,[r12,#2] + mov r5,r1,lsr#16 + strb r0,[r12,#3] + mov r6,r1,lsr#8 + strb r4,[r12,#4] + strb r5,[r12,#5] + mov r4,r2,lsr#24 + strb r6,[r12,#6] + mov r5,r2,lsr#16 + strb r1,[r12,#7] + mov r6,r2,lsr#8 + strb r4,[r12,#8] + strb r5,[r12,#9] + mov r4,r3,lsr#24 + strb r6,[r12,#10] + mov r5,r3,lsr#16 + strb r2,[r12,#11] + mov r6,r3,lsr#8 + strb r4,[r12,#12] + strb r5,[r12,#13] + strb r6,[r12,#14] + strb r3,[r12,#15] +#endif +#if __ARM_ARCH__>=5 + ldmia sp!,{r4-r12,pc} +#else + ldmia sp!,{r4-r12,lr} + tst lr,#1 + moveq pc,lr @ be binary compatible with V4, yet + .word 0xe12fff1e @ interoperable with Thumb ISA:-) +#endif +.size AES_encrypt,.-AES_encrypt + +.type _armv4_AES_encrypt,%function +.align 2 +_armv4_AES_encrypt: + str lr,[sp,#-4]! @ push lr + ldmia r11!,{r4-r7} + eor r0,r0,r4 + ldr r12,[r11,#240-16] + eor r1,r1,r5 + eor r2,r2,r6 + eor r3,r3,r7 + sub r12,r12,#1 + mov lr,#255 + + and r7,lr,r0 + and r8,lr,r0,lsr#8 + and r9,lr,r0,lsr#16 + mov r0,r0,lsr#24 +.Lenc_loop: + ldr r4,[r10,r7,lsl#2] @ Te3[s0>>0] + and r7,lr,r1,lsr#16 @ i0 + ldr r5,[r10,r8,lsl#2] @ Te2[s0>>8] + and r8,lr,r1 + ldr r6,[r10,r9,lsl#2] @ Te1[s0>>16] + and r9,lr,r1,lsr#8 + ldr r0,[r10,r0,lsl#2] @ Te0[s0>>24] + mov r1,r1,lsr#24 + + ldr r7,[r10,r7,lsl#2] @ Te1[s1>>16] + ldr r8,[r10,r8,lsl#2] @ Te3[s1>>0] + ldr r9,[r10,r9,lsl#2] @ Te2[s1>>8] + eor r0,r0,r7,ror#8 + ldr r1,[r10,r1,lsl#2] @ Te0[s1>>24] + and r7,lr,r2,lsr#8 @ i0 + eor r5,r5,r8,ror#8 + and r8,lr,r2,lsr#16 @ i1 + eor r6,r6,r9,ror#8 + and r9,lr,r2 + ldr r7,[r10,r7,lsl#2] @ Te2[s2>>8] + eor r1,r1,r4,ror#24 + ldr r8,[r10,r8,lsl#2] @ Te1[s2>>16] + mov r2,r2,lsr#24 + + ldr r9,[r10,r9,lsl#2] @ Te3[s2>>0] + eor r0,r0,r7,ror#16 + ldr r2,[r10,r2,lsl#2] @ Te0[s2>>24] + and r7,lr,r3 @ i0 + eor r1,r1,r8,ror#8 + and r8,lr,r3,lsr#8 @ i1 + eor r6,r6,r9,ror#16 + and r9,lr,r3,lsr#16 @ i2 + ldr r7,[r10,r7,lsl#2] @ Te3[s3>>0] + eor r2,r2,r5,ror#16 + ldr r8,[r10,r8,lsl#2] @ Te2[s3>>8] + mov r3,r3,lsr#24 + + ldr r9,[r10,r9,lsl#2] @ Te1[s3>>16] + eor r0,r0,r7,ror#24 + ldr r7,[r11],#16 + eor r1,r1,r8,ror#16 + ldr r3,[r10,r3,lsl#2] @ Te0[s3>>24] + eor r2,r2,r9,ror#8 + ldr r4,[r11,#-12] + eor r3,r3,r6,ror#8 + + ldr r5,[r11,#-8] + eor r0,r0,r7 + ldr r6,[r11,#-4] + and r7,lr,r0 + eor r1,r1,r4 + and r8,lr,r0,lsr#8 + eor r2,r2,r5 + and r9,lr,r0,lsr#16 + eor r3,r3,r6 + mov r0,r0,lsr#24 + + subs r12,r12,#1 + bne .Lenc_loop + + add r10,r10,#2 + + ldrb r4,[r10,r7,lsl#2] @ Te4[s0>>0] + and r7,lr,r1,lsr#16 @ i0 + ldrb r5,[r10,r8,lsl#2] @ Te4[s0>>8] + and r8,lr,r1 + ldrb r6,[r10,r9,lsl#2] @ Te4[s0>>16] + and r9,lr,r1,lsr#8 + ldrb r0,[r10,r0,lsl#2] @ Te4[s0>>24] + mov r1,r1,lsr#24 + + ldrb r7,[r10,r7,lsl#2] @ Te4[s1>>16] + ldrb r8,[r10,r8,lsl#2] @ Te4[s1>>0] + ldrb r9,[r10,r9,lsl#2] @ Te4[s1>>8] + eor r0,r7,r0,lsl#8 + ldrb r1,[r10,r1,lsl#2] @ Te4[s1>>24] + and r7,lr,r2,lsr#8 @ i0 + eor r5,r8,r5,lsl#8 + and r8,lr,r2,lsr#16 @ i1 + eor r6,r9,r6,lsl#8 + and r9,lr,r2 + ldrb r7,[r10,r7,lsl#2] @ Te4[s2>>8] + eor r1,r4,r1,lsl#24 + ldrb r8,[r10,r8,lsl#2] @ Te4[s2>>16] + mov r2,r2,lsr#24 + + ldrb r9,[r10,r9,lsl#2] @ Te4[s2>>0] + eor r0,r7,r0,lsl#8 + ldrb r2,[r10,r2,lsl#2] @ Te4[s2>>24] + and r7,lr,r3 @ i0 + eor r1,r1,r8,lsl#16 + and r8,lr,r3,lsr#8 @ i1 + eor r6,r9,r6,lsl#8 + and r9,lr,r3,lsr#16 @ i2 + ldrb r7,[r10,r7,lsl#2] @ Te4[s3>>0] + eor r2,r5,r2,lsl#24 + ldrb r8,[r10,r8,lsl#2] @ Te4[s3>>8] + mov r3,r3,lsr#24 + + ldrb r9,[r10,r9,lsl#2] @ Te4[s3>>16] + eor r0,r7,r0,lsl#8 + ldr r7,[r11,#0] + ldrb r3,[r10,r3,lsl#2] @ Te4[s3>>24] + eor r1,r1,r8,lsl#8 + ldr r4,[r11,#4] + eor r2,r2,r9,lsl#16 + ldr r5,[r11,#8] + eor r3,r6,r3,lsl#24 + ldr r6,[r11,#12] + + eor r0,r0,r7 + eor r1,r1,r4 + eor r2,r2,r5 + eor r3,r3,r6 + + sub r10,r10,#2 + ldr pc,[sp],#4 @ pop and return +.size _armv4_AES_encrypt,.-_armv4_AES_encrypt + +.global private_AES_set_encrypt_key +.type private_AES_set_encrypt_key,%function +.align 5 +private_AES_set_encrypt_key: +_armv4_AES_set_encrypt_key: + sub r3,pc,#8 @ AES_set_encrypt_key + teq r0,#0 + moveq r0,#-1 + beq .Labrt + teq r2,#0 + moveq r0,#-1 + beq .Labrt + + teq r1,#128 + beq .Lok + teq r1,#192 + beq .Lok + teq r1,#256 + movne r0,#-1 + bne .Labrt + +.Lok: stmdb sp!,{r4-r12,lr} + sub r10,r3,#_armv4_AES_set_encrypt_key-AES_Te-1024 @ Te4 + + mov r12,r0 @ inp + mov lr,r1 @ bits + mov r11,r2 @ key + +#if __ARM_ARCH__<7 + ldrb r0,[r12,#3] @ load input data in endian-neutral + ldrb r4,[r12,#2] @ manner... + ldrb r5,[r12,#1] + ldrb r6,[r12,#0] + orr r0,r0,r4,lsl#8 + ldrb r1,[r12,#7] + orr r0,r0,r5,lsl#16 + ldrb r4,[r12,#6] + orr r0,r0,r6,lsl#24 + ldrb r5,[r12,#5] + ldrb r6,[r12,#4] + orr r1,r1,r4,lsl#8 + ldrb r2,[r12,#11] + orr r1,r1,r5,lsl#16 + ldrb r4,[r12,#10] + orr r1,r1,r6,lsl#24 + ldrb r5,[r12,#9] + ldrb r6,[r12,#8] + orr r2,r2,r4,lsl#8 + ldrb r3,[r12,#15] + orr r2,r2,r5,lsl#16 + ldrb r4,[r12,#14] + orr r2,r2,r6,lsl#24 + ldrb r5,[r12,#13] + ldrb r6,[r12,#12] + orr r3,r3,r4,lsl#8 + str r0,[r11],#16 + orr r3,r3,r5,lsl#16 + str r1,[r11,#-12] + orr r3,r3,r6,lsl#24 + str r2,[r11,#-8] + str r3,[r11,#-4] +#else + ldr r0,[r12,#0] + ldr r1,[r12,#4] + ldr r2,[r12,#8] + ldr r3,[r12,#12] +#ifdef __ARMEL__ + rev r0,r0 + rev r1,r1 + rev r2,r2 + rev r3,r3 +#endif + str r0,[r11],#16 + str r1,[r11,#-12] + str r2,[r11,#-8] + str r3,[r11,#-4] +#endif + + teq lr,#128 + bne .Lnot128 + mov r12,#10 + str r12,[r11,#240-16] + add r6,r10,#256 @ rcon + mov lr,#255 + +.L128_loop: + and r5,lr,r3,lsr#24 + and r7,lr,r3,lsr#16 + ldrb r5,[r10,r5] + and r8,lr,r3,lsr#8 + ldrb r7,[r10,r7] + and r9,lr,r3 + ldrb r8,[r10,r8] + orr r5,r5,r7,lsl#24 + ldrb r9,[r10,r9] + orr r5,r5,r8,lsl#16 + ldr r4,[r6],#4 @ rcon[i++] + orr r5,r5,r9,lsl#8 + eor r5,r5,r4 + eor r0,r0,r5 @ rk[4]=rk[0]^... + eor r1,r1,r0 @ rk[5]=rk[1]^rk[4] + str r0,[r11],#16 + eor r2,r2,r1 @ rk[6]=rk[2]^rk[5] + str r1,[r11,#-12] + eor r3,r3,r2 @ rk[7]=rk[3]^rk[6] + str r2,[r11,#-8] + subs r12,r12,#1 + str r3,[r11,#-4] + bne .L128_loop + sub r2,r11,#176 + b .Ldone + +.Lnot128: +#if __ARM_ARCH__<7 + ldrb r8,[r12,#19] + ldrb r4,[r12,#18] + ldrb r5,[r12,#17] + ldrb r6,[r12,#16] + orr r8,r8,r4,lsl#8 + ldrb r9,[r12,#23] + orr r8,r8,r5,lsl#16 + ldrb r4,[r12,#22] + orr r8,r8,r6,lsl#24 + ldrb r5,[r12,#21] + ldrb r6,[r12,#20] + orr r9,r9,r4,lsl#8 + orr r9,r9,r5,lsl#16 + str r8,[r11],#8 + orr r9,r9,r6,lsl#24 + str r9,[r11,#-4] +#else + ldr r8,[r12,#16] + ldr r9,[r12,#20] +#ifdef __ARMEL__ + rev r8,r8 + rev r9,r9 +#endif + str r8,[r11],#8 + str r9,[r11,#-4] +#endif + + teq lr,#192 + bne .Lnot192 + mov r12,#12 + str r12,[r11,#240-24] + add r6,r10,#256 @ rcon + mov lr,#255 + mov r12,#8 + +.L192_loop: + and r5,lr,r9,lsr#24 + and r7,lr,r9,lsr#16 + ldrb r5,[r10,r5] + and r8,lr,r9,lsr#8 + ldrb r7,[r10,r7] + and r9,lr,r9 + ldrb r8,[r10,r8] + orr r5,r5,r7,lsl#24 + ldrb r9,[r10,r9] + orr r5,r5,r8,lsl#16 + ldr r4,[r6],#4 @ rcon[i++] + orr r5,r5,r9,lsl#8 + eor r9,r5,r4 + eor r0,r0,r9 @ rk[6]=rk[0]^... + eor r1,r1,r0 @ rk[7]=rk[1]^rk[6] + str r0,[r11],#24 + eor r2,r2,r1 @ rk[8]=rk[2]^rk[7] + str r1,[r11,#-20] + eor r3,r3,r2 @ rk[9]=rk[3]^rk[8] + str r2,[r11,#-16] + subs r12,r12,#1 + str r3,[r11,#-12] + subeq r2,r11,#216 + beq .Ldone + + ldr r7,[r11,#-32] + ldr r8,[r11,#-28] + eor r7,r7,r3 @ rk[10]=rk[4]^rk[9] + eor r9,r8,r7 @ rk[11]=rk[5]^rk[10] + str r7,[r11,#-8] + str r9,[r11,#-4] + b .L192_loop + +.Lnot192: +#if __ARM_ARCH__<7 + ldrb r8,[r12,#27] + ldrb r4,[r12,#26] + ldrb r5,[r12,#25] + ldrb r6,[r12,#24] + orr r8,r8,r4,lsl#8 + ldrb r9,[r12,#31] + orr r8,r8,r5,lsl#16 + ldrb r4,[r12,#30] + orr r8,r8,r6,lsl#24 + ldrb r5,[r12,#29] + ldrb r6,[r12,#28] + orr r9,r9,r4,lsl#8 + orr r9,r9,r5,lsl#16 + str r8,[r11],#8 + orr r9,r9,r6,lsl#24 + str r9,[r11,#-4] +#else + ldr r8,[r12,#24] + ldr r9,[r12,#28] +#ifdef __ARMEL__ + rev r8,r8 + rev r9,r9 +#endif + str r8,[r11],#8 + str r9,[r11,#-4] +#endif + + mov r12,#14 + str r12,[r11,#240-32] + add r6,r10,#256 @ rcon + mov lr,#255 + mov r12,#7 + +.L256_loop: + and r5,lr,r9,lsr#24 + and r7,lr,r9,lsr#16 + ldrb r5,[r10,r5] + and r8,lr,r9,lsr#8 + ldrb r7,[r10,r7] + and r9,lr,r9 + ldrb r8,[r10,r8] + orr r5,r5,r7,lsl#24 + ldrb r9,[r10,r9] + orr r5,r5,r8,lsl#16 + ldr r4,[r6],#4 @ rcon[i++] + orr r5,r5,r9,lsl#8 + eor r9,r5,r4 + eor r0,r0,r9 @ rk[8]=rk[0]^... + eor r1,r1,r0 @ rk[9]=rk[1]^rk[8] + str r0,[r11],#32 + eor r2,r2,r1 @ rk[10]=rk[2]^rk[9] + str r1,[r11,#-28] + eor r3,r3,r2 @ rk[11]=rk[3]^rk[10] + str r2,[r11,#-24] + subs r12,r12,#1 + str r3,[r11,#-20] + subeq r2,r11,#256 + beq .Ldone + + and r5,lr,r3 + and r7,lr,r3,lsr#8 + ldrb r5,[r10,r5] + and r8,lr,r3,lsr#16 + ldrb r7,[r10,r7] + and r9,lr,r3,lsr#24 + ldrb r8,[r10,r8] + orr r5,r5,r7,lsl#8 + ldrb r9,[r10,r9] + orr r5,r5,r8,lsl#16 + ldr r4,[r11,#-48] + orr r5,r5,r9,lsl#24 + + ldr r7,[r11,#-44] + ldr r8,[r11,#-40] + eor r4,r4,r5 @ rk[12]=rk[4]^... + ldr r9,[r11,#-36] + eor r7,r7,r4 @ rk[13]=rk[5]^rk[12] + str r4,[r11,#-16] + eor r8,r8,r7 @ rk[14]=rk[6]^rk[13] + str r7,[r11,#-12] + eor r9,r9,r8 @ rk[15]=rk[7]^rk[14] + str r8,[r11,#-8] + str r9,[r11,#-4] + b .L256_loop + +.Ldone: mov r0,#0 + ldmia sp!,{r4-r12,lr} +.Labrt: tst lr,#1 + moveq pc,lr @ be binary compatible with V4, yet + .word 0xe12fff1e @ interoperable with Thumb ISA:-) +.size private_AES_set_encrypt_key,.-private_AES_set_encrypt_key + +.global private_AES_set_decrypt_key +.type private_AES_set_decrypt_key,%function +.align 5 +private_AES_set_decrypt_key: + str lr,[sp,#-4]! @ push lr +#if 0 + @ kernel does both of these in setkey so optimise this bit out by + @ expecting the key to already have the enc_key work done (see aes_glue.c) + bl _armv4_AES_set_encrypt_key +#else + mov r0,#0 +#endif + teq r0,#0 + ldrne lr,[sp],#4 @ pop lr + bne .Labrt + + stmdb sp!,{r4-r12} + + ldr r12,[r2,#240] @ AES_set_encrypt_key preserves r2, + mov r11,r2 @ which is AES_KEY *key + mov r7,r2 + add r8,r2,r12,lsl#4 + +.Linv: ldr r0,[r7] + ldr r1,[r7,#4] + ldr r2,[r7,#8] + ldr r3,[r7,#12] + ldr r4,[r8] + ldr r5,[r8,#4] + ldr r6,[r8,#8] + ldr r9,[r8,#12] + str r0,[r8],#-16 + str r1,[r8,#16+4] + str r2,[r8,#16+8] + str r3,[r8,#16+12] + str r4,[r7],#16 + str r5,[r7,#-12] + str r6,[r7,#-8] + str r9,[r7,#-4] + teq r7,r8 + bne .Linv + ldr r0,[r11,#16]! @ prefetch tp1 + mov r7,#0x80 + mov r8,#0x1b + orr r7,r7,#0x8000 + orr r8,r8,#0x1b00 + orr r7,r7,r7,lsl#16 + orr r8,r8,r8,lsl#16 + sub r12,r12,#1 + mvn r9,r7 + mov r12,r12,lsl#2 @ (rounds-1)*4 + +.Lmix: and r4,r0,r7 + and r1,r0,r9 + sub r4,r4,r4,lsr#7 + and r4,r4,r8 + eor r1,r4,r1,lsl#1 @ tp2 + + and r4,r1,r7 + and r2,r1,r9 + sub r4,r4,r4,lsr#7 + and r4,r4,r8 + eor r2,r4,r2,lsl#1 @ tp4 + + and r4,r2,r7 + and r3,r2,r9 + sub r4,r4,r4,lsr#7 + and r4,r4,r8 + eor r3,r4,r3,lsl#1 @ tp8 + + eor r4,r1,r2 + eor r5,r0,r3 @ tp9 + eor r4,r4,r3 @ tpe + eor r4,r4,r1,ror#24 + eor r4,r4,r5,ror#24 @ ^= ROTATE(tpb=tp9^tp2,8) + eor r4,r4,r2,ror#16 + eor r4,r4,r5,ror#16 @ ^= ROTATE(tpd=tp9^tp4,16) + eor r4,r4,r5,ror#8 @ ^= ROTATE(tp9,24) + + ldr r0,[r11,#4] @ prefetch tp1 + str r4,[r11],#4 + subs r12,r12,#1 + bne .Lmix + + mov r0,#0 +#if __ARM_ARCH__>=5 + ldmia sp!,{r4-r12,pc} +#else + ldmia sp!,{r4-r12,lr} + tst lr,#1 + moveq pc,lr @ be binary compatible with V4, yet + .word 0xe12fff1e @ interoperable with Thumb ISA:-) +#endif +.size private_AES_set_decrypt_key,.-private_AES_set_decrypt_key + +.type AES_Td,%object +.align 5 +AES_Td: +.word 0x51f4a750, 0x7e416553, 0x1a17a4c3, 0x3a275e96 +.word 0x3bab6bcb, 0x1f9d45f1, 0xacfa58ab, 0x4be30393 +.word 0x2030fa55, 0xad766df6, 0x88cc7691, 0xf5024c25 +.word 0x4fe5d7fc, 0xc52acbd7, 0x26354480, 0xb562a38f +.word 0xdeb15a49, 0x25ba1b67, 0x45ea0e98, 0x5dfec0e1 +.word 0xc32f7502, 0x814cf012, 0x8d4697a3, 0x6bd3f9c6 +.word 0x038f5fe7, 0x15929c95, 0xbf6d7aeb, 0x955259da +.word 0xd4be832d, 0x587421d3, 0x49e06929, 0x8ec9c844 +.word 0x75c2896a, 0xf48e7978, 0x99583e6b, 0x27b971dd +.word 0xbee14fb6, 0xf088ad17, 0xc920ac66, 0x7dce3ab4 +.word 0x63df4a18, 0xe51a3182, 0x97513360, 0x62537f45 +.word 0xb16477e0, 0xbb6bae84, 0xfe81a01c, 0xf9082b94 +.word 0x70486858, 0x8f45fd19, 0x94de6c87, 0x527bf8b7 +.word 0xab73d323, 0x724b02e2, 0xe31f8f57, 0x6655ab2a +.word 0xb2eb2807, 0x2fb5c203, 0x86c57b9a, 0xd33708a5 +.word 0x302887f2, 0x23bfa5b2, 0x02036aba, 0xed16825c +.word 0x8acf1c2b, 0xa779b492, 0xf307f2f0, 0x4e69e2a1 +.word 0x65daf4cd, 0x0605bed5, 0xd134621f, 0xc4a6fe8a +.word 0x342e539d, 0xa2f355a0, 0x058ae132, 0xa4f6eb75 +.word 0x0b83ec39, 0x4060efaa, 0x5e719f06, 0xbd6e1051 +.word 0x3e218af9, 0x96dd063d, 0xdd3e05ae, 0x4de6bd46 +.word 0x91548db5, 0x71c45d05, 0x0406d46f, 0x605015ff +.word 0x1998fb24, 0xd6bde997, 0x894043cc, 0x67d99e77 +.word 0xb0e842bd, 0x07898b88, 0xe7195b38, 0x79c8eedb +.word 0xa17c0a47, 0x7c420fe9, 0xf8841ec9, 0x00000000 +.word 0x09808683, 0x322bed48, 0x1e1170ac, 0x6c5a724e +.word 0xfd0efffb, 0x0f853856, 0x3daed51e, 0x362d3927 +.word 0x0a0fd964, 0x685ca621, 0x9b5b54d1, 0x24362e3a +.word 0x0c0a67b1, 0x9357e70f, 0xb4ee96d2, 0x1b9b919e +.word 0x80c0c54f, 0x61dc20a2, 0x5a774b69, 0x1c121a16 +.word 0xe293ba0a, 0xc0a02ae5, 0x3c22e043, 0x121b171d +.word 0x0e090d0b, 0xf28bc7ad, 0x2db6a8b9, 0x141ea9c8 +.word 0x57f11985, 0xaf75074c, 0xee99ddbb, 0xa37f60fd +.word 0xf701269f, 0x5c72f5bc, 0x44663bc5, 0x5bfb7e34 +.word 0x8b432976, 0xcb23c6dc, 0xb6edfc68, 0xb8e4f163 +.word 0xd731dcca, 0x42638510, 0x13972240, 0x84c61120 +.word 0x854a247d, 0xd2bb3df8, 0xaef93211, 0xc729a16d +.word 0x1d9e2f4b, 0xdcb230f3, 0x0d8652ec, 0x77c1e3d0 +.word 0x2bb3166c, 0xa970b999, 0x119448fa, 0x47e96422 +.word 0xa8fc8cc4, 0xa0f03f1a, 0x567d2cd8, 0x223390ef +.word 0x87494ec7, 0xd938d1c1, 0x8ccaa2fe, 0x98d40b36 +.word 0xa6f581cf, 0xa57ade28, 0xdab78e26, 0x3fadbfa4 +.word 0x2c3a9de4, 0x5078920d, 0x6a5fcc9b, 0x547e4662 +.word 0xf68d13c2, 0x90d8b8e8, 0x2e39f75e, 0x82c3aff5 +.word 0x9f5d80be, 0x69d0937c, 0x6fd52da9, 0xcf2512b3 +.word 0xc8ac993b, 0x10187da7, 0xe89c636e, 0xdb3bbb7b +.word 0xcd267809, 0x6e5918f4, 0xec9ab701, 0x834f9aa8 +.word 0xe6956e65, 0xaaffe67e, 0x21bccf08, 0xef15e8e6 +.word 0xbae79bd9, 0x4a6f36ce, 0xea9f09d4, 0x29b07cd6 +.word 0x31a4b2af, 0x2a3f2331, 0xc6a59430, 0x35a266c0 +.word 0x744ebc37, 0xfc82caa6, 0xe090d0b0, 0x33a7d815 +.word 0xf104984a, 0x41ecdaf7, 0x7fcd500e, 0x1791f62f +.word 0x764dd68d, 0x43efb04d, 0xccaa4d54, 0xe49604df +.word 0x9ed1b5e3, 0x4c6a881b, 0xc12c1fb8, 0x4665517f +.word 0x9d5eea04, 0x018c355d, 0xfa877473, 0xfb0b412e +.word 0xb3671d5a, 0x92dbd252, 0xe9105633, 0x6dd64713 +.word 0x9ad7618c, 0x37a10c7a, 0x59f8148e, 0xeb133c89 +.word 0xcea927ee, 0xb761c935, 0xe11ce5ed, 0x7a47b13c +.word 0x9cd2df59, 0x55f2733f, 0x1814ce79, 0x73c737bf +.word 0x53f7cdea, 0x5ffdaa5b, 0xdf3d6f14, 0x7844db86 +.word 0xcaaff381, 0xb968c43e, 0x3824342c, 0xc2a3405f +.word 0x161dc372, 0xbce2250c, 0x283c498b, 0xff0d9541 +.word 0x39a80171, 0x080cb3de, 0xd8b4e49c, 0x6456c190 +.word 0x7bcb8461, 0xd532b670, 0x486c5c74, 0xd0b85742 +@ Td4[256] +.byte 0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38 +.byte 0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb +.byte 0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87 +.byte 0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb +.byte 0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d +.byte 0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e +.byte 0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2 +.byte 0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25 +.byte 0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16 +.byte 0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92 +.byte 0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda +.byte 0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84 +.byte 0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a +.byte 0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06 +.byte 0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02 +.byte 0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b +.byte 0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea +.byte 0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73 +.byte 0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85 +.byte 0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e +.byte 0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89 +.byte 0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b +.byte 0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20 +.byte 0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4 +.byte 0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31 +.byte 0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f +.byte 0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d +.byte 0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef +.byte 0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0 +.byte 0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61 +.byte 0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26 +.byte 0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d +.size AES_Td,.-AES_Td + +@ void AES_decrypt(const unsigned char *in, unsigned char *out, +@ const AES_KEY *key) { +.global AES_decrypt +.type AES_decrypt,%function +.align 5 +AES_decrypt: + sub r3,pc,#8 @ AES_decrypt + stmdb sp!,{r1,r4-r12,lr} + mov r12,r0 @ inp + mov r11,r2 + sub r10,r3,#AES_decrypt-AES_Td @ Td +#if __ARM_ARCH__<7 + ldrb r0,[r12,#3] @ load input data in endian-neutral + ldrb r4,[r12,#2] @ manner... + ldrb r5,[r12,#1] + ldrb r6,[r12,#0] + orr r0,r0,r4,lsl#8 + ldrb r1,[r12,#7] + orr r0,r0,r5,lsl#16 + ldrb r4,[r12,#6] + orr r0,r0,r6,lsl#24 + ldrb r5,[r12,#5] + ldrb r6,[r12,#4] + orr r1,r1,r4,lsl#8 + ldrb r2,[r12,#11] + orr r1,r1,r5,lsl#16 + ldrb r4,[r12,#10] + orr r1,r1,r6,lsl#24 + ldrb r5,[r12,#9] + ldrb r6,[r12,#8] + orr r2,r2,r4,lsl#8 + ldrb r3,[r12,#15] + orr r2,r2,r5,lsl#16 + ldrb r4,[r12,#14] + orr r2,r2,r6,lsl#24 + ldrb r5,[r12,#13] + ldrb r6,[r12,#12] + orr r3,r3,r4,lsl#8 + orr r3,r3,r5,lsl#16 + orr r3,r3,r6,lsl#24 +#else + ldr r0,[r12,#0] + ldr r1,[r12,#4] + ldr r2,[r12,#8] + ldr r3,[r12,#12] +#ifdef __ARMEL__ + rev r0,r0 + rev r1,r1 + rev r2,r2 + rev r3,r3 +#endif +#endif + bl _armv4_AES_decrypt + + ldr r12,[sp],#4 @ pop out +#if __ARM_ARCH__>=7 +#ifdef __ARMEL__ + rev r0,r0 + rev r1,r1 + rev r2,r2 + rev r3,r3 +#endif + str r0,[r12,#0] + str r1,[r12,#4] + str r2,[r12,#8] + str r3,[r12,#12] +#else + mov r4,r0,lsr#24 @ write output in endian-neutral + mov r5,r0,lsr#16 @ manner... + mov r6,r0,lsr#8 + strb r4,[r12,#0] + strb r5,[r12,#1] + mov r4,r1,lsr#24 + strb r6,[r12,#2] + mov r5,r1,lsr#16 + strb r0,[r12,#3] + mov r6,r1,lsr#8 + strb r4,[r12,#4] + strb r5,[r12,#5] + mov r4,r2,lsr#24 + strb r6,[r12,#6] + mov r5,r2,lsr#16 + strb r1,[r12,#7] + mov r6,r2,lsr#8 + strb r4,[r12,#8] + strb r5,[r12,#9] + mov r4,r3,lsr#24 + strb r6,[r12,#10] + mov r5,r3,lsr#16 + strb r2,[r12,#11] + mov r6,r3,lsr#8 + strb r4,[r12,#12] + strb r5,[r12,#13] + strb r6,[r12,#14] + strb r3,[r12,#15] +#endif +#if __ARM_ARCH__>=5 + ldmia sp!,{r4-r12,pc} +#else + ldmia sp!,{r4-r12,lr} + tst lr,#1 + moveq pc,lr @ be binary compatible with V4, yet + .word 0xe12fff1e @ interoperable with Thumb ISA:-) +#endif +.size AES_decrypt,.-AES_decrypt + +.type _armv4_AES_decrypt,%function +.align 2 +_armv4_AES_decrypt: + str lr,[sp,#-4]! @ push lr + ldmia r11!,{r4-r7} + eor r0,r0,r4 + ldr r12,[r11,#240-16] + eor r1,r1,r5 + eor r2,r2,r6 + eor r3,r3,r7 + sub r12,r12,#1 + mov lr,#255 + + and r7,lr,r0,lsr#16 + and r8,lr,r0,lsr#8 + and r9,lr,r0 + mov r0,r0,lsr#24 +.Ldec_loop: + ldr r4,[r10,r7,lsl#2] @ Td1[s0>>16] + and r7,lr,r1 @ i0 + ldr r5,[r10,r8,lsl#2] @ Td2[s0>>8] + and r8,lr,r1,lsr#16 + ldr r6,[r10,r9,lsl#2] @ Td3[s0>>0] + and r9,lr,r1,lsr#8 + ldr r0,[r10,r0,lsl#2] @ Td0[s0>>24] + mov r1,r1,lsr#24 + + ldr r7,[r10,r7,lsl#2] @ Td3[s1>>0] + ldr r8,[r10,r8,lsl#2] @ Td1[s1>>16] + ldr r9,[r10,r9,lsl#2] @ Td2[s1>>8] + eor r0,r0,r7,ror#24 + ldr r1,[r10,r1,lsl#2] @ Td0[s1>>24] + and r7,lr,r2,lsr#8 @ i0 + eor r5,r8,r5,ror#8 + and r8,lr,r2 @ i1 + eor r6,r9,r6,ror#8 + and r9,lr,r2,lsr#16 + ldr r7,[r10,r7,lsl#2] @ Td2[s2>>8] + eor r1,r1,r4,ror#8 + ldr r8,[r10,r8,lsl#2] @ Td3[s2>>0] + mov r2,r2,lsr#24 + + ldr r9,[r10,r9,lsl#2] @ Td1[s2>>16] + eor r0,r0,r7,ror#16 + ldr r2,[r10,r2,lsl#2] @ Td0[s2>>24] + and r7,lr,r3,lsr#16 @ i0 + eor r1,r1,r8,ror#24 + and r8,lr,r3,lsr#8 @ i1 + eor r6,r9,r6,ror#8 + and r9,lr,r3 @ i2 + ldr r7,[r10,r7,lsl#2] @ Td1[s3>>16] + eor r2,r2,r5,ror#8 + ldr r8,[r10,r8,lsl#2] @ Td2[s3>>8] + mov r3,r3,lsr#24 + + ldr r9,[r10,r9,lsl#2] @ Td3[s3>>0] + eor r0,r0,r7,ror#8 + ldr r7,[r11],#16 + eor r1,r1,r8,ror#16 + ldr r3,[r10,r3,lsl#2] @ Td0[s3>>24] + eor r2,r2,r9,ror#24 + + ldr r4,[r11,#-12] + eor r0,r0,r7 + ldr r5,[r11,#-8] + eor r3,r3,r6,ror#8 + ldr r6,[r11,#-4] + and r7,lr,r0,lsr#16 + eor r1,r1,r4 + and r8,lr,r0,lsr#8 + eor r2,r2,r5 + and r9,lr,r0 + eor r3,r3,r6 + mov r0,r0,lsr#24 + + subs r12,r12,#1 + bne .Ldec_loop + + add r10,r10,#1024 + + ldr r5,[r10,#0] @ prefetch Td4 + ldr r6,[r10,#32] + ldr r4,[r10,#64] + ldr r5,[r10,#96] + ldr r6,[r10,#128] + ldr r4,[r10,#160] + ldr r5,[r10,#192] + ldr r6,[r10,#224] + + ldrb r0,[r10,r0] @ Td4[s0>>24] + ldrb r4,[r10,r7] @ Td4[s0>>16] + and r7,lr,r1 @ i0 + ldrb r5,[r10,r8] @ Td4[s0>>8] + and r8,lr,r1,lsr#16 + ldrb r6,[r10,r9] @ Td4[s0>>0] + and r9,lr,r1,lsr#8 + + ldrb r7,[r10,r7] @ Td4[s1>>0] + ldrb r1,[r10,r1,lsr#24] @ Td4[s1>>24] + ldrb r8,[r10,r8] @ Td4[s1>>16] + eor r0,r7,r0,lsl#24 + ldrb r9,[r10,r9] @ Td4[s1>>8] + eor r1,r4,r1,lsl#8 + and r7,lr,r2,lsr#8 @ i0 + eor r5,r5,r8,lsl#8 + and r8,lr,r2 @ i1 + ldrb r7,[r10,r7] @ Td4[s2>>8] + eor r6,r6,r9,lsl#8 + ldrb r8,[r10,r8] @ Td4[s2>>0] + and r9,lr,r2,lsr#16 + + ldrb r2,[r10,r2,lsr#24] @ Td4[s2>>24] + eor r0,r0,r7,lsl#8 + ldrb r9,[r10,r9] @ Td4[s2>>16] + eor r1,r8,r1,lsl#16 + and r7,lr,r3,lsr#16 @ i0 + eor r2,r5,r2,lsl#16 + and r8,lr,r3,lsr#8 @ i1 + ldrb r7,[r10,r7] @ Td4[s3>>16] + eor r6,r6,r9,lsl#16 + ldrb r8,[r10,r8] @ Td4[s3>>8] + and r9,lr,r3 @ i2 + + ldrb r9,[r10,r9] @ Td4[s3>>0] + ldrb r3,[r10,r3,lsr#24] @ Td4[s3>>24] + eor r0,r0,r7,lsl#16 + ldr r7,[r11,#0] + eor r1,r1,r8,lsl#8 + ldr r4,[r11,#4] + eor r2,r9,r2,lsl#8 + ldr r5,[r11,#8] + eor r3,r6,r3,lsl#24 + ldr r6,[r11,#12] + + eor r0,r0,r7 + eor r1,r1,r4 + eor r2,r2,r5 + eor r3,r3,r6 + + sub r10,r10,#1024 + ldr pc,[sp],#4 @ pop and return +.size _armv4_AES_decrypt,.-_armv4_AES_decrypt +.asciz "AES for ARMv4, CRYPTOGAMS by " +.align 2 diff --git a/arch/arm/crypto/aes_glue.c b/arch/arm/crypto/aes_glue.c new file mode 100644 index 000000000000..59f7877ead6a --- /dev/null +++ b/arch/arm/crypto/aes_glue.c @@ -0,0 +1,108 @@ +/* + * Glue Code for the asm optimized version of the AES Cipher Algorithm + */ + +#include +#include +#include + +#define AES_MAXNR 14 + +typedef struct { + unsigned int rd_key[4 *(AES_MAXNR + 1)]; + int rounds; +} AES_KEY; + +struct AES_CTX { + AES_KEY enc_key; + AES_KEY dec_key; +}; + +asmlinkage void AES_encrypt(const u8 *in, u8 *out, AES_KEY *ctx); +asmlinkage void AES_decrypt(const u8 *in, u8 *out, AES_KEY *ctx); +asmlinkage int private_AES_set_decrypt_key(const unsigned char *userKey, const int bits, AES_KEY *key); +asmlinkage int private_AES_set_encrypt_key(const unsigned char *userKey, const int bits, AES_KEY *key); + +static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + struct AES_CTX *ctx = crypto_tfm_ctx(tfm); + AES_encrypt(src, dst, &ctx->enc_key); +} + +static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + struct AES_CTX *ctx = crypto_tfm_ctx(tfm); + AES_decrypt(src, dst, &ctx->dec_key); +} + +static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, + unsigned int key_len) +{ + struct AES_CTX *ctx = crypto_tfm_ctx(tfm); + + switch (key_len) { + case AES_KEYSIZE_128: + key_len = 128; + break; + case AES_KEYSIZE_192: + key_len = 192; + break; + case AES_KEYSIZE_256: + key_len = 256; + break; + default: + tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; + return -EINVAL; + } + + if (private_AES_set_encrypt_key(in_key, key_len, &ctx->enc_key) == -1) { + tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; + return -EINVAL; + } + /* private_AES_set_decrypt_key expects an encryption key as input */ + ctx->dec_key = ctx->enc_key; + if (private_AES_set_decrypt_key(in_key, key_len, &ctx->dec_key) == -1) { + tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; + return -EINVAL; + } + return 0; +} + +static struct crypto_alg aes_alg = { + .cra_name = "aes", + .cra_driver_name = "aes-asm", + .cra_priority = 200, + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = AES_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct AES_CTX), + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(aes_alg.cra_list), + .cra_u = { + .cipher = { + .cia_min_keysize = AES_MIN_KEY_SIZE, + .cia_max_keysize = AES_MAX_KEY_SIZE, + .cia_setkey = aes_set_key, + .cia_encrypt = aes_encrypt, + .cia_decrypt = aes_decrypt + } + } +}; + +static int __init aes_init(void) +{ + return crypto_register_alg(&aes_alg); +} + +static void __exit aes_fini(void) +{ + crypto_unregister_alg(&aes_alg); +} + +module_init(aes_init); +module_exit(aes_fini); + +MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm (ASM)"); +MODULE_LICENSE("GPL"); +MODULE_ALIAS("aes"); +MODULE_ALIAS("aes-asm"); +MODULE_AUTHOR("David McCullough "); diff --git a/arch/arm/crypto/sha1-armv4-large.S b/arch/arm/crypto/sha1-armv4-large.S new file mode 100644 index 000000000000..7050ab133b9d --- /dev/null +++ b/arch/arm/crypto/sha1-armv4-large.S @@ -0,0 +1,503 @@ +#define __ARM_ARCH__ __LINUX_ARM_ARCH__ +@ ==================================================================== +@ Written by Andy Polyakov for the OpenSSL +@ project. The module is, however, dual licensed under OpenSSL and +@ CRYPTOGAMS licenses depending on where you obtain it. For further +@ details see http://www.openssl.org/~appro/cryptogams/. +@ ==================================================================== + +@ sha1_block procedure for ARMv4. +@ +@ January 2007. + +@ Size/performance trade-off +@ ==================================================================== +@ impl size in bytes comp cycles[*] measured performance +@ ==================================================================== +@ thumb 304 3212 4420 +@ armv4-small 392/+29% 1958/+64% 2250/+96% +@ armv4-compact 740/+89% 1552/+26% 1840/+22% +@ armv4-large 1420/+92% 1307/+19% 1370/+34%[***] +@ full unroll ~5100/+260% ~1260/+4% ~1300/+5% +@ ==================================================================== +@ thumb = same as 'small' but in Thumb instructions[**] and +@ with recurring code in two private functions; +@ small = detached Xload/update, loops are folded; +@ compact = detached Xload/update, 5x unroll; +@ large = interleaved Xload/update, 5x unroll; +@ full unroll = interleaved Xload/update, full unroll, estimated[!]; +@ +@ [*] Manually counted instructions in "grand" loop body. Measured +@ performance is affected by prologue and epilogue overhead, +@ i-cache availability, branch penalties, etc. +@ [**] While each Thumb instruction is twice smaller, they are not as +@ diverse as ARM ones: e.g., there are only two arithmetic +@ instructions with 3 arguments, no [fixed] rotate, addressing +@ modes are limited. As result it takes more instructions to do +@ the same job in Thumb, therefore the code is never twice as +@ small and always slower. +@ [***] which is also ~35% better than compiler generated code. Dual- +@ issue Cortex A8 core was measured to process input block in +@ ~990 cycles. + +@ August 2010. +@ +@ Rescheduling for dual-issue pipeline resulted in 13% improvement on +@ Cortex A8 core and in absolute terms ~870 cycles per input block +@ [or 13.6 cycles per byte]. + +@ February 2011. +@ +@ Profiler-assisted and platform-specific optimization resulted in 10% +@ improvement on Cortex A8 core and 12.2 cycles per byte. + +.text + +.global sha1_block_data_order +.type sha1_block_data_order,%function + +.align 2 +sha1_block_data_order: + stmdb sp!,{r4-r12,lr} + add r2,r1,r2,lsl#6 @ r2 to point at the end of r1 + ldmia r0,{r3,r4,r5,r6,r7} +.Lloop: + ldr r8,.LK_00_19 + mov r14,sp + sub sp,sp,#15*4 + mov r5,r5,ror#30 + mov r6,r6,ror#30 + mov r7,r7,ror#30 @ [6] +.L_00_15: +#if __ARM_ARCH__<7 + ldrb r10,[r1,#2] + ldrb r9,[r1,#3] + ldrb r11,[r1,#1] + add r7,r8,r7,ror#2 @ E+=K_00_19 + ldrb r12,[r1],#4 + orr r9,r9,r10,lsl#8 + eor r10,r5,r6 @ F_xx_xx + orr r9,r9,r11,lsl#16 + add r7,r7,r3,ror#27 @ E+=ROR(A,27) + orr r9,r9,r12,lsl#24 +#else + ldr r9,[r1],#4 @ handles unaligned + add r7,r8,r7,ror#2 @ E+=K_00_19 + eor r10,r5,r6 @ F_xx_xx + add r7,r7,r3,ror#27 @ E+=ROR(A,27) +#ifdef __ARMEL__ + rev r9,r9 @ byte swap +#endif +#endif + and r10,r4,r10,ror#2 + add r7,r7,r9 @ E+=X[i] + eor r10,r10,r6,ror#2 @ F_00_19(B,C,D) + str r9,[r14,#-4]! + add r7,r7,r10 @ E+=F_00_19(B,C,D) +#if __ARM_ARCH__<7 + ldrb r10,[r1,#2] + ldrb r9,[r1,#3] + ldrb r11,[r1,#1] + add r6,r8,r6,ror#2 @ E+=K_00_19 + ldrb r12,[r1],#4 + orr r9,r9,r10,lsl#8 + eor r10,r4,r5 @ F_xx_xx + orr r9,r9,r11,lsl#16 + add r6,r6,r7,ror#27 @ E+=ROR(A,27) + orr r9,r9,r12,lsl#24 +#else + ldr r9,[r1],#4 @ handles unaligned + add r6,r8,r6,ror#2 @ E+=K_00_19 + eor r10,r4,r5 @ F_xx_xx + add r6,r6,r7,ror#27 @ E+=ROR(A,27) +#ifdef __ARMEL__ + rev r9,r9 @ byte swap +#endif +#endif + and r10,r3,r10,ror#2 + add r6,r6,r9 @ E+=X[i] + eor r10,r10,r5,ror#2 @ F_00_19(B,C,D) + str r9,[r14,#-4]! + add r6,r6,r10 @ E+=F_00_19(B,C,D) +#if __ARM_ARCH__<7 + ldrb r10,[r1,#2] + ldrb r9,[r1,#3] + ldrb r11,[r1,#1] + add r5,r8,r5,ror#2 @ E+=K_00_19 + ldrb r12,[r1],#4 + orr r9,r9,r10,lsl#8 + eor r10,r3,r4 @ F_xx_xx + orr r9,r9,r11,lsl#16 + add r5,r5,r6,ror#27 @ E+=ROR(A,27) + orr r9,r9,r12,lsl#24 +#else + ldr r9,[r1],#4 @ handles unaligned + add r5,r8,r5,ror#2 @ E+=K_00_19 + eor r10,r3,r4 @ F_xx_xx + add r5,r5,r6,ror#27 @ E+=ROR(A,27) +#ifdef __ARMEL__ + rev r9,r9 @ byte swap +#endif +#endif + and r10,r7,r10,ror#2 + add r5,r5,r9 @ E+=X[i] + eor r10,r10,r4,ror#2 @ F_00_19(B,C,D) + str r9,[r14,#-4]! + add r5,r5,r10 @ E+=F_00_19(B,C,D) +#if __ARM_ARCH__<7 + ldrb r10,[r1,#2] + ldrb r9,[r1,#3] + ldrb r11,[r1,#1] + add r4,r8,r4,ror#2 @ E+=K_00_19 + ldrb r12,[r1],#4 + orr r9,r9,r10,lsl#8 + eor r10,r7,r3 @ F_xx_xx + orr r9,r9,r11,lsl#16 + add r4,r4,r5,ror#27 @ E+=ROR(A,27) + orr r9,r9,r12,lsl#24 +#else + ldr r9,[r1],#4 @ handles unaligned + add r4,r8,r4,ror#2 @ E+=K_00_19 + eor r10,r7,r3 @ F_xx_xx + add r4,r4,r5,ror#27 @ E+=ROR(A,27) +#ifdef __ARMEL__ + rev r9,r9 @ byte swap +#endif +#endif + and r10,r6,r10,ror#2 + add r4,r4,r9 @ E+=X[i] + eor r10,r10,r3,ror#2 @ F_00_19(B,C,D) + str r9,[r14,#-4]! + add r4,r4,r10 @ E+=F_00_19(B,C,D) +#if __ARM_ARCH__<7 + ldrb r10,[r1,#2] + ldrb r9,[r1,#3] + ldrb r11,[r1,#1] + add r3,r8,r3,ror#2 @ E+=K_00_19 + ldrb r12,[r1],#4 + orr r9,r9,r10,lsl#8 + eor r10,r6,r7 @ F_xx_xx + orr r9,r9,r11,lsl#16 + add r3,r3,r4,ror#27 @ E+=ROR(A,27) + orr r9,r9,r12,lsl#24 +#else + ldr r9,[r1],#4 @ handles unaligned + add r3,r8,r3,ror#2 @ E+=K_00_19 + eor r10,r6,r7 @ F_xx_xx + add r3,r3,r4,ror#27 @ E+=ROR(A,27) +#ifdef __ARMEL__ + rev r9,r9 @ byte swap +#endif +#endif + and r10,r5,r10,ror#2 + add r3,r3,r9 @ E+=X[i] + eor r10,r10,r7,ror#2 @ F_00_19(B,C,D) + str r9,[r14,#-4]! + add r3,r3,r10 @ E+=F_00_19(B,C,D) + teq r14,sp + bne .L_00_15 @ [((11+4)*5+2)*3] +#if __ARM_ARCH__<7 + ldrb r10,[r1,#2] + ldrb r9,[r1,#3] + ldrb r11,[r1,#1] + add r7,r8,r7,ror#2 @ E+=K_00_19 + ldrb r12,[r1],#4 + orr r9,r9,r10,lsl#8 + eor r10,r5,r6 @ F_xx_xx + orr r9,r9,r11,lsl#16 + add r7,r7,r3,ror#27 @ E+=ROR(A,27) + orr r9,r9,r12,lsl#24 +#else + ldr r9,[r1],#4 @ handles unaligned + add r7,r8,r7,ror#2 @ E+=K_00_19 + eor r10,r5,r6 @ F_xx_xx + add r7,r7,r3,ror#27 @ E+=ROR(A,27) +#ifdef __ARMEL__ + rev r9,r9 @ byte swap +#endif +#endif + and r10,r4,r10,ror#2 + add r7,r7,r9 @ E+=X[i] + eor r10,r10,r6,ror#2 @ F_00_19(B,C,D) + str r9,[r14,#-4]! + add r7,r7,r10 @ E+=F_00_19(B,C,D) + ldr r9,[r14,#15*4] + ldr r10,[r14,#13*4] + ldr r11,[r14,#7*4] + add r6,r8,r6,ror#2 @ E+=K_xx_xx + ldr r12,[r14,#2*4] + eor r9,r9,r10 + eor r11,r11,r12 @ 1 cycle stall + eor r10,r4,r5 @ F_xx_xx + mov r9,r9,ror#31 + add r6,r6,r7,ror#27 @ E+=ROR(A,27) + eor r9,r9,r11,ror#31 + str r9,[r14,#-4]! + and r10,r3,r10,ror#2 @ F_xx_xx + @ F_xx_xx + add r6,r6,r9 @ E+=X[i] + eor r10,r10,r5,ror#2 @ F_00_19(B,C,D) + add r6,r6,r10 @ E+=F_00_19(B,C,D) + ldr r9,[r14,#15*4] + ldr r10,[r14,#13*4] + ldr r11,[r14,#7*4] + add r5,r8,r5,ror#2 @ E+=K_xx_xx + ldr r12,[r14,#2*4] + eor r9,r9,r10 + eor r11,r11,r12 @ 1 cycle stall + eor r10,r3,r4 @ F_xx_xx + mov r9,r9,ror#31 + add r5,r5,r6,ror#27 @ E+=ROR(A,27) + eor r9,r9,r11,ror#31 + str r9,[r14,#-4]! + and r10,r7,r10,ror#2 @ F_xx_xx + @ F_xx_xx + add r5,r5,r9 @ E+=X[i] + eor r10,r10,r4,ror#2 @ F_00_19(B,C,D) + add r5,r5,r10 @ E+=F_00_19(B,C,D) + ldr r9,[r14,#15*4] + ldr r10,[r14,#13*4] + ldr r11,[r14,#7*4] + add r4,r8,r4,ror#2 @ E+=K_xx_xx + ldr r12,[r14,#2*4] + eor r9,r9,r10 + eor r11,r11,r12 @ 1 cycle stall + eor r10,r7,r3 @ F_xx_xx + mov r9,r9,ror#31 + add r4,r4,r5,ror#27 @ E+=ROR(A,27) + eor r9,r9,r11,ror#31 + str r9,[r14,#-4]! + and r10,r6,r10,ror#2 @ F_xx_xx + @ F_xx_xx + add r4,r4,r9 @ E+=X[i] + eor r10,r10,r3,ror#2 @ F_00_19(B,C,D) + add r4,r4,r10 @ E+=F_00_19(B,C,D) + ldr r9,[r14,#15*4] + ldr r10,[r14,#13*4] + ldr r11,[r14,#7*4] + add r3,r8,r3,ror#2 @ E+=K_xx_xx + ldr r12,[r14,#2*4] + eor r9,r9,r10 + eor r11,r11,r12 @ 1 cycle stall + eor r10,r6,r7 @ F_xx_xx + mov r9,r9,ror#31 + add r3,r3,r4,ror#27 @ E+=ROR(A,27) + eor r9,r9,r11,ror#31 + str r9,[r14,#-4]! + and r10,r5,r10,ror#2 @ F_xx_xx + @ F_xx_xx + add r3,r3,r9 @ E+=X[i] + eor r10,r10,r7,ror#2 @ F_00_19(B,C,D) + add r3,r3,r10 @ E+=F_00_19(B,C,D) + + ldr r8,.LK_20_39 @ [+15+16*4] + sub sp,sp,#25*4 + cmn sp,#0 @ [+3], clear carry to denote 20_39 +.L_20_39_or_60_79: + ldr r9,[r14,#15*4] + ldr r10,[r14,#13*4] + ldr r11,[r14,#7*4] + add r7,r8,r7,ror#2 @ E+=K_xx_xx + ldr r12,[r14,#2*4] + eor r9,r9,r10 + eor r11,r11,r12 @ 1 cycle stall + eor r10,r5,r6 @ F_xx_xx + mov r9,r9,ror#31 + add r7,r7,r3,ror#27 @ E+=ROR(A,27) + eor r9,r9,r11,ror#31 + str r9,[r14,#-4]! + eor r10,r4,r10,ror#2 @ F_xx_xx + @ F_xx_xx + add r7,r7,r9 @ E+=X[i] + add r7,r7,r10 @ E+=F_20_39(B,C,D) + ldr r9,[r14,#15*4] + ldr r10,[r14,#13*4] + ldr r11,[r14,#7*4] + add r6,r8,r6,ror#2 @ E+=K_xx_xx + ldr r12,[r14,#2*4] + eor r9,r9,r10 + eor r11,r11,r12 @ 1 cycle stall + eor r10,r4,r5 @ F_xx_xx + mov r9,r9,ror#31 + add r6,r6,r7,ror#27 @ E+=ROR(A,27) + eor r9,r9,r11,ror#31 + str r9,[r14,#-4]! + eor r10,r3,r10,ror#2 @ F_xx_xx + @ F_xx_xx + add r6,r6,r9 @ E+=X[i] + add r6,r6,r10 @ E+=F_20_39(B,C,D) + ldr r9,[r14,#15*4] + ldr r10,[r14,#13*4] + ldr r11,[r14,#7*4] + add r5,r8,r5,ror#2 @ E+=K_xx_xx + ldr r12,[r14,#2*4] + eor r9,r9,r10 + eor r11,r11,r12 @ 1 cycle stall + eor r10,r3,r4 @ F_xx_xx + mov r9,r9,ror#31 + add r5,r5,r6,ror#27 @ E+=ROR(A,27) + eor r9,r9,r11,ror#31 + str r9,[r14,#-4]! + eor r10,r7,r10,ror#2 @ F_xx_xx + @ F_xx_xx + add r5,r5,r9 @ E+=X[i] + add r5,r5,r10 @ E+=F_20_39(B,C,D) + ldr r9,[r14,#15*4] + ldr r10,[r14,#13*4] + ldr r11,[r14,#7*4] + add r4,r8,r4,ror#2 @ E+=K_xx_xx + ldr r12,[r14,#2*4] + eor r9,r9,r10 + eor r11,r11,r12 @ 1 cycle stall + eor r10,r7,r3 @ F_xx_xx + mov r9,r9,ror#31 + add r4,r4,r5,ror#27 @ E+=ROR(A,27) + eor r9,r9,r11,ror#31 + str r9,[r14,#-4]! + eor r10,r6,r10,ror#2 @ F_xx_xx + @ F_xx_xx + add r4,r4,r9 @ E+=X[i] + add r4,r4,r10 @ E+=F_20_39(B,C,D) + ldr r9,[r14,#15*4] + ldr r10,[r14,#13*4] + ldr r11,[r14,#7*4] + add r3,r8,r3,ror#2 @ E+=K_xx_xx + ldr r12,[r14,#2*4] + eor r9,r9,r10 + eor r11,r11,r12 @ 1 cycle stall + eor r10,r6,r7 @ F_xx_xx + mov r9,r9,ror#31 + add r3,r3,r4,ror#27 @ E+=ROR(A,27) + eor r9,r9,r11,ror#31 + str r9,[r14,#-4]! + eor r10,r5,r10,ror#2 @ F_xx_xx + @ F_xx_xx + add r3,r3,r9 @ E+=X[i] + add r3,r3,r10 @ E+=F_20_39(B,C,D) + teq r14,sp @ preserve carry + bne .L_20_39_or_60_79 @ [+((12+3)*5+2)*4] + bcs .L_done @ [+((12+3)*5+2)*4], spare 300 bytes + + ldr r8,.LK_40_59 + sub sp,sp,#20*4 @ [+2] +.L_40_59: + ldr r9,[r14,#15*4] + ldr r10,[r14,#13*4] + ldr r11,[r14,#7*4] + add r7,r8,r7,ror#2 @ E+=K_xx_xx + ldr r12,[r14,#2*4] + eor r9,r9,r10 + eor r11,r11,r12 @ 1 cycle stall + eor r10,r5,r6 @ F_xx_xx + mov r9,r9,ror#31 + add r7,r7,r3,ror#27 @ E+=ROR(A,27) + eor r9,r9,r11,ror#31 + str r9,[r14,#-4]! + and r10,r4,r10,ror#2 @ F_xx_xx + and r11,r5,r6 @ F_xx_xx + add r7,r7,r9 @ E+=X[i] + add r7,r7,r10 @ E+=F_40_59(B,C,D) + add r7,r7,r11,ror#2 + ldr r9,[r14,#15*4] + ldr r10,[r14,#13*4] + ldr r11,[r14,#7*4] + add r6,r8,r6,ror#2 @ E+=K_xx_xx + ldr r12,[r14,#2*4] + eor r9,r9,r10 + eor r11,r11,r12 @ 1 cycle stall + eor r10,r4,r5 @ F_xx_xx + mov r9,r9,ror#31 + add r6,r6,r7,ror#27 @ E+=ROR(A,27) + eor r9,r9,r11,ror#31 + str r9,[r14,#-4]! + and r10,r3,r10,ror#2 @ F_xx_xx + and r11,r4,r5 @ F_xx_xx + add r6,r6,r9 @ E+=X[i] + add r6,r6,r10 @ E+=F_40_59(B,C,D) + add r6,r6,r11,ror#2 + ldr r9,[r14,#15*4] + ldr r10,[r14,#13*4] + ldr r11,[r14,#7*4] + add r5,r8,r5,ror#2 @ E+=K_xx_xx + ldr r12,[r14,#2*4] + eor r9,r9,r10 + eor r11,r11,r12 @ 1 cycle stall + eor r10,r3,r4 @ F_xx_xx + mov r9,r9,ror#31 + add r5,r5,r6,ror#27 @ E+=ROR(A,27) + eor r9,r9,r11,ror#31 + str r9,[r14,#-4]! + and r10,r7,r10,ror#2 @ F_xx_xx + and r11,r3,r4 @ F_xx_xx + add r5,r5,r9 @ E+=X[i] + add r5,r5,r10 @ E+=F_40_59(B,C,D) + add r5,r5,r11,ror#2 + ldr r9,[r14,#15*4] + ldr r10,[r14,#13*4] + ldr r11,[r14,#7*4] + add r4,r8,r4,ror#2 @ E+=K_xx_xx + ldr r12,[r14,#2*4] + eor r9,r9,r10 + eor r11,r11,r12 @ 1 cycle stall + eor r10,r7,r3 @ F_xx_xx + mov r9,r9,ror#31 + add r4,r4,r5,ror#27 @ E+=ROR(A,27) + eor r9,r9,r11,ror#31 + str r9,[r14,#-4]! + and r10,r6,r10,ror#2 @ F_xx_xx + and r11,r7,r3 @ F_xx_xx + add r4,r4,r9 @ E+=X[i] + add r4,r4,r10 @ E+=F_40_59(B,C,D) + add r4,r4,r11,ror#2 + ldr r9,[r14,#15*4] + ldr r10,[r14,#13*4] + ldr r11,[r14,#7*4] + add r3,r8,r3,ror#2 @ E+=K_xx_xx + ldr r12,[r14,#2*4] + eor r9,r9,r10 + eor r11,r11,r12 @ 1 cycle stall + eor r10,r6,r7 @ F_xx_xx + mov r9,r9,ror#31 + add r3,r3,r4,ror#27 @ E+=ROR(A,27) + eor r9,r9,r11,ror#31 + str r9,[r14,#-4]! + and r10,r5,r10,ror#2 @ F_xx_xx + and r11,r6,r7 @ F_xx_xx + add r3,r3,r9 @ E+=X[i] + add r3,r3,r10 @ E+=F_40_59(B,C,D) + add r3,r3,r11,ror#2 + teq r14,sp + bne .L_40_59 @ [+((12+5)*5+2)*4] + + ldr r8,.LK_60_79 + sub sp,sp,#20*4 + cmp sp,#0 @ set carry to denote 60_79 + b .L_20_39_or_60_79 @ [+4], spare 300 bytes +.L_done: + add sp,sp,#80*4 @ "deallocate" stack frame + ldmia r0,{r8,r9,r10,r11,r12} + add r3,r8,r3 + add r4,r9,r4 + add r5,r10,r5,ror#2 + add r6,r11,r6,ror#2 + add r7,r12,r7,ror#2 + stmia r0,{r3,r4,r5,r6,r7} + teq r1,r2 + bne .Lloop @ [+18], total 1307 + +#if __ARM_ARCH__>=5 + ldmia sp!,{r4-r12,pc} +#else + ldmia sp!,{r4-r12,lr} + tst lr,#1 + moveq pc,lr @ be binary compatible with V4, yet + .word 0xe12fff1e @ interoperable with Thumb ISA:-) +#endif +.align 2 +.LK_00_19: .word 0x5a827999 +.LK_20_39: .word 0x6ed9eba1 +.LK_40_59: .word 0x8f1bbcdc +.LK_60_79: .word 0xca62c1d6 +.size sha1_block_data_order,.-sha1_block_data_order +.asciz "SHA1 block transform for ARMv4, CRYPTOGAMS by " +.align 2 diff --git a/arch/arm/crypto/sha1_glue.c b/arch/arm/crypto/sha1_glue.c new file mode 100644 index 000000000000..76cd976230bc --- /dev/null +++ b/arch/arm/crypto/sha1_glue.c @@ -0,0 +1,179 @@ +/* + * Cryptographic API. + * Glue code for the SHA1 Secure Hash Algorithm assembler implementation + * + * This file is based on sha1_generic.c and sha1_ssse3_glue.c + * + * Copyright (c) Alan Smithee. + * Copyright (c) Andrew McDonald + * Copyright (c) Jean-Francois Dive + * Copyright (c) Mathias Krause + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include +#include +#include +#include +#include +#include +#include + +struct SHA1_CTX { + uint32_t h0,h1,h2,h3,h4; + u64 count; + u8 data[SHA1_BLOCK_SIZE]; +}; + +asmlinkage void sha1_block_data_order(struct SHA1_CTX *digest, + const unsigned char *data, unsigned int rounds); + + +static int sha1_init(struct shash_desc *desc) +{ + struct SHA1_CTX *sctx = shash_desc_ctx(desc); + memset(sctx, 0, sizeof(*sctx)); + sctx->h0 = SHA1_H0; + sctx->h1 = SHA1_H1; + sctx->h2 = SHA1_H2; + sctx->h3 = SHA1_H3; + sctx->h4 = SHA1_H4; + return 0; +} + + +static int __sha1_update(struct SHA1_CTX *sctx, const u8 *data, + unsigned int len, unsigned int partial) +{ + unsigned int done = 0; + + sctx->count += len; + + if (partial) { + done = SHA1_BLOCK_SIZE - partial; + memcpy(sctx->data + partial, data, done); + sha1_block_data_order(sctx, sctx->data, 1); + } + + if (len - done >= SHA1_BLOCK_SIZE) { + const unsigned int rounds = (len - done) / SHA1_BLOCK_SIZE; + sha1_block_data_order(sctx, data + done, rounds); + done += rounds * SHA1_BLOCK_SIZE; + } + + memcpy(sctx->data, data + done, len - done); + return 0; +} + + +static int sha1_update(struct shash_desc *desc, const u8 *data, + unsigned int len) +{ + struct SHA1_CTX *sctx = shash_desc_ctx(desc); + unsigned int partial = sctx->count % SHA1_BLOCK_SIZE; + int res; + + /* Handle the fast case right here */ + if (partial + len < SHA1_BLOCK_SIZE) { + sctx->count += len; + memcpy(sctx->data + partial, data, len); + return 0; + } + res = __sha1_update(sctx, data, len, partial); + return res; +} + + +/* Add padding and return the message digest. */ +static int sha1_final(struct shash_desc *desc, u8 *out) +{ + struct SHA1_CTX *sctx = shash_desc_ctx(desc); + unsigned int i, index, padlen; + __be32 *dst = (__be32 *)out; + __be64 bits; + static const u8 padding[SHA1_BLOCK_SIZE] = { 0x80, }; + + bits = cpu_to_be64(sctx->count << 3); + + /* Pad out to 56 mod 64 and append length */ + index = sctx->count % SHA1_BLOCK_SIZE; + padlen = (index < 56) ? (56 - index) : ((SHA1_BLOCK_SIZE+56) - index); + /* We need to fill a whole block for __sha1_update() */ + if (padlen <= 56) { + sctx->count += padlen; + memcpy(sctx->data + index, padding, padlen); + } else { + __sha1_update(sctx, padding, padlen, index); + } + __sha1_update(sctx, (const u8 *)&bits, sizeof(bits), 56); + + /* Store state in digest */ + for (i = 0; i < 5; i++) + dst[i] = cpu_to_be32(((u32 *)sctx)[i]); + + /* Wipe context */ + memset(sctx, 0, sizeof(*sctx)); + return 0; +} + + +static int sha1_export(struct shash_desc *desc, void *out) +{ + struct SHA1_CTX *sctx = shash_desc_ctx(desc); + memcpy(out, sctx, sizeof(*sctx)); + return 0; +} + + +static int sha1_import(struct shash_desc *desc, const void *in) +{ + struct SHA1_CTX *sctx = shash_desc_ctx(desc); + memcpy(sctx, in, sizeof(*sctx)); + return 0; +} + + +static struct shash_alg alg = { + .digestsize = SHA1_DIGEST_SIZE, + .init = sha1_init, + .update = sha1_update, + .final = sha1_final, + .export = sha1_export, + .import = sha1_import, + .descsize = sizeof(struct SHA1_CTX), + .statesize = sizeof(struct SHA1_CTX), + .base = { + .cra_name = "sha1", + .cra_driver_name= "sha1-asm", + .cra_priority = 150, + .cra_flags = CRYPTO_ALG_TYPE_SHASH, + .cra_blocksize = SHA1_BLOCK_SIZE, + .cra_module = THIS_MODULE, + } +}; + + +static int __init sha1_mod_init(void) +{ + return crypto_register_shash(&alg); +} + + +static void __exit sha1_mod_fini(void) +{ + crypto_unregister_shash(&alg); +} + + +module_init(sha1_mod_init); +module_exit(sha1_mod_fini); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("SHA1 Secure Hash Algorithm (ARM)"); +MODULE_ALIAS("sha1"); +MODULE_AUTHOR("David McCullough "); diff --git a/crypto/Kconfig b/crypto/Kconfig index 213fb37be51f..27307981f88e 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -433,6 +433,15 @@ config CRYPTO_SHA1_SSSE3 using Supplemental SSE3 (SSSE3) instructions or Advanced Vector Extensions (AVX), when available. +config CRYPTO_SHA1_ARM + tristate "SHA1 digest algorithm (ARM-asm)" + depends on ARM + select CRYPTO_SHA1 + select CRYPTO_HASH + help + SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2) implemented + using optimized ARM assembler. + config CRYPTO_SHA256 tristate "SHA224 and SHA256 digest algorithm" select CRYPTO_HASH @@ -590,6 +599,30 @@ config CRYPTO_AES_NI_INTEL ECB, CBC, LRW, PCBC, XTS. The 64 bit version has additional acceleration for CTR. +config CRYPTO_AES_ARM + tristate "AES cipher algorithms (ARM-asm)" + depends on ARM + select CRYPTO_ALGAPI + select CRYPTO_AES + help + Use optimized AES assembler routines for ARM platforms. + + AES cipher algorithms (FIPS-197). AES uses the Rijndael + algorithm. + + Rijndael appears to be consistently a very good performer in + both hardware and software across a wide range of computing + environments regardless of its use in feedback or non-feedback + modes. Its key setup time is excellent, and its key agility is + good. Rijndael's very low memory requirements make it very well + suited for restricted-space environments, in which it also + demonstrates excellent performance. Rijndael's operations are + among the easiest to defend against power and timing attacks. + + The AES specifies three key sizes: 128, 192 and 256 bits + + See for more information. + config CRYPTO_ANUBIS tristate "Anubis cipher algorithm" select CRYPTO_ALGAPI -- cgit v1.2.3 From 66ce0b0f290b2cf1ac74a30ddd13a9a3e67296ce Mon Sep 17 00:00:00 2001 From: Jussi Kivilinna Date: Tue, 28 Aug 2012 16:46:54 +0300 Subject: crypto: crypto_user - fix sparse warnings (symbol was not declared, should be static?) Fix "symbol 'x' was not declared. Should it be static?" sparse warnings. Signed-off-by: Jussi Kivilinna Signed-off-by: Herbert Xu --- crypto/crypto_user.c | 2 +- crypto/vmac.c | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) (limited to 'crypto') diff --git a/crypto/crypto_user.c b/crypto/crypto_user.c index ba2c611154af..b590471abfa1 100644 --- a/crypto/crypto_user.c +++ b/crypto/crypto_user.c @@ -30,7 +30,7 @@ #include "internal.h" -DEFINE_MUTEX(crypto_cfg_mutex); +static DEFINE_MUTEX(crypto_cfg_mutex); /* The crypto netlink socket */ static struct sock *crypto_nlsk; diff --git a/crypto/vmac.c b/crypto/vmac.c index 4243905ba135..f2338ca98368 100644 --- a/crypto/vmac.c +++ b/crypto/vmac.c @@ -38,11 +38,11 @@ * Constants and masks */ #define UINT64_C(x) x##ULL -const u64 p64 = UINT64_C(0xfffffffffffffeff); /* 2^64 - 257 prime */ -const u64 m62 = UINT64_C(0x3fffffffffffffff); /* 62-bit mask */ -const u64 m63 = UINT64_C(0x7fffffffffffffff); /* 63-bit mask */ -const u64 m64 = UINT64_C(0xffffffffffffffff); /* 64-bit mask */ -const u64 mpoly = UINT64_C(0x1fffffff1fffffff); /* Poly key mask */ +static const u64 p64 = UINT64_C(0xfffffffffffffeff); /* 2^64 - 257 prime */ +static const u64 m62 = UINT64_C(0x3fffffffffffffff); /* 62-bit mask */ +static const u64 m63 = UINT64_C(0x7fffffffffffffff); /* 63-bit mask */ +static const u64 m64 = UINT64_C(0xffffffffffffffff); /* 64-bit mask */ +static const u64 mpoly = UINT64_C(0x1fffffff1fffffff); /* Poly key mask */ #define pe64_to_cpup le64_to_cpup /* Prefer little endian */ -- cgit v1.2.3 From 312639bb1bc65abca243a6cee6e5364663d2dd7a Mon Sep 17 00:00:00 2001 From: Jussi Kivilinna Date: Tue, 28 Aug 2012 16:49:28 +0300 Subject: Revert "[CRYPTO] cast6: inline bloat--" MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This reverts commit e6ccc727f30a02670f6a00df6d548942bc988f43. Above commit caused performance regression for CAST6. Reverting gives following increase in tcrypt speed tests (revert-vs-old ratios). AMD Phenom II X6 1055T, x86-64: size ecb cbc ctr lrw xts enc dec enc dec enc dec enc dec enc dec 16b 1.15x 1.17x 1.16x 1.17x 1.16x 1.16x 1.14x 1.19x 1.05x 1.07x 64b 1.19x 1.23x 1.20x 1.22x 1.19x 1.19x 1.16x 1.24x 1.12x 1.12x 256b 1.21x 1.24x 1.22x 1.24x 1.20x 1.20x 1.17x 1.21x 1.16x 1.14x 1kb 1.21x 1.25x 1.22x 1.24x 1.21x 1.21x 1.18x 1.22x 1.17x 1.15x 8kb 1.21x 1.25x 1.22x 1.24x 1.21x 1.21x 1.18x 1.22x 1.18x 1.15x Cc: Ilpo Järvinen Signed-off-by: Jussi Kivilinna Signed-off-by: Herbert Xu --- crypto/cast6_generic.c | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'crypto') diff --git a/crypto/cast6_generic.c b/crypto/cast6_generic.c index dc9309d70405..1acd2f1c48fc 100644 --- a/crypto/cast6_generic.c +++ b/crypto/cast6_generic.c @@ -370,7 +370,7 @@ static const u8 Tr[4][8] = { }; /* forward octave */ -static void W(u32 *key, unsigned int i) +static inline void W(u32 *key, unsigned int i) { u32 I; key[6] ^= F1(key[7], Tr[i % 4][0], Tm[i][0]); @@ -434,7 +434,7 @@ int cast6_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) EXPORT_SYMBOL_GPL(cast6_setkey); /*forward quad round*/ -static void Q(u32 *block, u8 *Kr, u32 *Km) +static inline void Q(u32 *block, u8 *Kr, u32 *Km) { u32 I; block[2] ^= F1(block[3], Kr[0], Km[0]); @@ -444,7 +444,7 @@ static void Q(u32 *block, u8 *Kr, u32 *Km) } /*reverse quad round*/ -static void QBAR(u32 *block, u8 *Kr, u32 *Km) +static inline void QBAR(u32 *block, u8 *Kr, u32 *Km) { u32 I; block[3] ^= F1(block[0], Kr[3], Km[3]); -- cgit v1.2.3 From c2b3711d11d08a34493dec45769d2e96141f8647 Mon Sep 17 00:00:00 2001 From: Jussi Kivilinna Date: Wed, 29 Aug 2012 23:37:25 +0300 Subject: crypto: 842 - remove .cra_list initialization .cra_list initialization is unneeded and have been removed from all other crypto modules except 842. Cc: Robert Jennings Signed-off-by: Jussi Kivilinna Acked-by: Seth Jennings Signed-off-by: Herbert Xu --- crypto/842.c | 1 - 1 file changed, 1 deletion(-) (limited to 'crypto') diff --git a/crypto/842.c b/crypto/842.c index 144767db44e5..65c7a89cfa09 100644 --- a/crypto/842.c +++ b/crypto/842.c @@ -157,7 +157,6 @@ static struct crypto_alg alg = { .cra_flags = CRYPTO_ALG_TYPE_COMPRESS, .cra_ctxsize = sizeof(struct nx842_ctx), .cra_module = THIS_MODULE, - .cra_list = LIST_HEAD_INIT(alg.cra_list), .cra_init = nx842_init, .cra_exit = nx842_exit, .cra_u = { .compress = { -- cgit v1.2.3 From b6f3fefe1fa1e8ea8f6b654e7d552253373cd1c0 Mon Sep 17 00:00:00 2001 From: Suresh Siddha Date: Mon, 17 Sep 2012 10:37:26 -0700 Subject: crypto, tcrypt: remove local_bh_disable/enable() around local_irq_disable/enable() Ran into this while looking at some new crypto code using FPU hitting a WARN_ON_ONCE(!irq_fpu_usable()) in the kernel_fpu_begin() on a x86 kernel that uses the new eagerfpu model. In short, current eagerfpu changes return 0 for interrupted_kernel_fpu_idle() and the in_interrupt() thinks it is in the interrupt context because of the local_bh_disable(). Thus resulting in the WARN_ON(). Remove the local_bh_disable/enable() calls around the existing local_irq_disable/enable() calls. local_irq_disable/enable() already disables the BH. [ If there are any other legitimate users calling kernel_fpu_begin() from the process context but with BH disabled, then we can look into fixing the irq_fpu_usable() in future. ] Signed-off-by: Suresh Siddha Cc: Tim Chen Signed-off-by: Herbert Xu --- crypto/tcrypt.c | 6 ------ 1 file changed, 6 deletions(-) (limited to 'crypto') diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c index 871076b1e0ab..1ac77b720b2d 100644 --- a/crypto/tcrypt.c +++ b/crypto/tcrypt.c @@ -97,7 +97,6 @@ static int test_cipher_cycles(struct blkcipher_desc *desc, int enc, int ret = 0; int i; - local_bh_disable(); local_irq_disable(); /* Warm-up run. */ @@ -130,7 +129,6 @@ static int test_cipher_cycles(struct blkcipher_desc *desc, int enc, out: local_irq_enable(); - local_bh_enable(); if (ret == 0) printk("1 operation in %lu cycles (%d bytes)\n", @@ -300,7 +298,6 @@ static int test_hash_cycles_digest(struct hash_desc *desc, int i; int ret; - local_bh_disable(); local_irq_disable(); /* Warm-up run. */ @@ -327,7 +324,6 @@ static int test_hash_cycles_digest(struct hash_desc *desc, out: local_irq_enable(); - local_bh_enable(); if (ret) return ret; @@ -348,7 +344,6 @@ static int test_hash_cycles(struct hash_desc *desc, struct scatterlist *sg, if (plen == blen) return test_hash_cycles_digest(desc, sg, blen, out); - local_bh_disable(); local_irq_disable(); /* Warm-up run. */ @@ -391,7 +386,6 @@ static int test_hash_cycles(struct hash_desc *desc, struct scatterlist *sg, out: local_irq_enable(); - local_bh_enable(); if (ret) return ret; -- cgit v1.2.3 From f44d83d19d8fde77bc499131bc9c0bc8583785c8 Mon Sep 17 00:00:00 2001 From: Jussi Kivilinna Date: Wed, 19 Sep 2012 09:42:49 +0300 Subject: crypto: testmgr - allow non-multi page and multi page skcipher tests from same test template Allow non-multi page and multi page skcipher tests to be run on same test template, to avoid duplicating data. Signed-off-by: Jussi Kivilinna Signed-off-by: Herbert Xu --- crypto/testmgr.c | 2 +- crypto/testmgr.h | 170 ++++++++++++++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 170 insertions(+), 2 deletions(-) (limited to 'crypto') diff --git a/crypto/testmgr.c b/crypto/testmgr.c index 575b57c3244b..8183777a49b6 100644 --- a/crypto/testmgr.c +++ b/crypto/testmgr.c @@ -804,7 +804,7 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, else memset(iv, 0, MAX_IVLEN); - if (!(template[i].np)) { + if (!(template[i].np) || (template[i].also_non_np)) { j++; ret = -EINVAL; diff --git a/crypto/testmgr.h b/crypto/testmgr.h index 6eb3ef5e3f88..70984f5341e2 100644 --- a/crypto/testmgr.h +++ b/crypto/testmgr.h @@ -53,6 +53,7 @@ struct cipher_testvec { char *result; unsigned short tap[MAX_TAP]; int np; + unsigned char also_non_np; unsigned char fail; unsigned char wk; /* weak key flag */ unsigned char klen; @@ -2468,6 +2469,9 @@ static struct cipher_testvec bf_enc_tv_template[] = { "\xC2\xF4\x6D\xFF\xF6\xCD\x6B\x40" "\xE1\xB3\xBF\xD4\x38\x2B\xC8\x3B", .rlen = 40, + .also_non_np = 1, + .np = 2, + .tap = { 40 - 8, 8 }, }, }; @@ -2541,6 +2545,9 @@ static struct cipher_testvec bf_dec_tv_template[] = { "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" "\x1E\x92\x29\xC0\x34\xCB\x62\xF9", .rlen = 40, + .also_non_np = 1, + .np = 2, + .tap = { 40 - 8, 8 }, }, }; @@ -2579,6 +2586,9 @@ static struct cipher_testvec bf_cbc_enc_tv_template[] = { "\x1B\xD9\x02\xB6\x48\xB0\x87\x25" "\x01\x9C\x93\x63\x51\x60\x82\xD2", .rlen = 40, + .also_non_np = 1, + .np = 2, + .tap = { 40 - 8, 8 }, }, }; @@ -2617,6 +2627,9 @@ static struct cipher_testvec bf_cbc_dec_tv_template[] = { "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" "\x1E\x92\x29\xC0\x34\xCB\x62\xF9", .rlen = 40, + .also_non_np = 1, + .np = 2, + .tap = { 40 - 8, 8 }, }, }; @@ -2661,6 +2674,9 @@ static struct cipher_testvec bf_ctr_enc_tv_template[] = { "\xE4\x1F\x5E\xA5\x89\xAC\x32\xBC" "\x3D\xA7\xE9", .rlen = 43, + .also_non_np = 1, + .np = 2, + .tap = { 43 - 8, 8 }, }, }; @@ -2705,6 +2721,9 @@ static struct cipher_testvec bf_ctr_dec_tv_template[] = { "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" "\x6D\x04\x9B", .rlen = 43, + .also_non_np = 1, + .np = 2, + .tap = { 43 - 8, 8 }, }, }; @@ -2884,6 +2903,9 @@ static struct cipher_testvec tf_enc_tv_template[] = { "\x58\x33\x9B\x78\xC7\x58\x48\x6B" "\x2C\x75\x64\xC4\xCA\xC1\x7E\xD5", .rlen = 496, + .also_non_np = 1, + .np = 2, + .tap = { 496 - 16, 16 }, }, }; @@ -3049,6 +3071,9 @@ static struct cipher_testvec tf_dec_tv_template[] = { "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" "\xDC\x50\xE7\x7E\x15\x89\x20\xB7", .rlen = 496, + .also_non_np = 1, + .np = 2, + .tap = { 496 - 16, 16 }, }, }; @@ -3229,6 +3254,9 @@ static struct cipher_testvec tf_cbc_enc_tv_template[] = { "\x30\x70\x56\xA4\x37\xDD\x7C\xC0" "\x0A\xA3\x30\x10\x26\x25\x41\x2C", .rlen = 496, + .also_non_np = 1, + .np = 2, + .tap = { 496 - 16, 16 }, }, }; @@ -3409,6 +3437,9 @@ static struct cipher_testvec tf_cbc_dec_tv_template[] = { "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" "\xDC\x50\xE7\x7E\x15\x89\x20\xB7", .rlen = 496, + .also_non_np = 1, + .np = 2, + .tap = { 496 - 16, 16 }, }, }; @@ -3683,6 +3714,9 @@ static struct cipher_testvec tf_ctr_enc_tv_template[] = { "\xC5\xC9\x7F\x9E\xCF\x33\x7A\xDF" "\x6C\x82\x9D", .rlen = 499, + .also_non_np = 1, + .np = 2, + .tap = { 499 - 16, 16 }, }, }; @@ -3957,6 +3991,9 @@ static struct cipher_testvec tf_ctr_dec_tv_template[] = { "\xDC\x50\xE7\x7E\x15\x89\x20\xB7" "\x2B\xC2\x59", .rlen = 499, + .also_non_np = 1, + .np = 2, + .tap = { 499 - 16, 16 }, }, }; @@ -4206,6 +4243,9 @@ static struct cipher_testvec tf_lrw_enc_tv_template[] = { "\x80\x18\xc4\x6c\x03\xd3\xb7\xba" "\x11\xd7\xb8\x6e\xea\xe1\x80\x30", .rlen = 512, + .also_non_np = 1, + .np = 2, + .tap = { 512 - 16, 16 }, }, }; @@ -4456,6 +4496,9 @@ static struct cipher_testvec tf_lrw_dec_tv_template[] = { "\xe9\x2e\xc4\x29\x0f\x84\xdb\xc4" "\x21\xc4\xc2\x75\x67\x89\x37\x0a", .rlen = 512, + .also_non_np = 1, + .np = 2, + .tap = { 512 - 16, 16 }, }, }; @@ -4795,6 +4838,9 @@ static struct cipher_testvec tf_xts_enc_tv_template[] = { "\xa4\x05\x0b\xb2\xb3\xa8\x30\x97" "\x37\x30\xe1\x91\x8d\xb3\x2a\xff", .rlen = 512, + .also_non_np = 1, + .np = 2, + .tap = { 512 - 16, 16 }, }, }; @@ -5135,6 +5181,9 @@ static struct cipher_testvec tf_xts_dec_tv_template[] = { "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", .rlen = 512, + .also_non_np = 1, + .np = 2, + .tap = { 512 - 16, 16 }, }, }; @@ -5242,6 +5291,9 @@ static struct cipher_testvec serpent_enc_tv_template[] = { "\x30\xD4\x2C\xF2\x8E\x06\x4B\x39" "\xB3\x12\x1D\xB3\x17\x46\xE6\xD6", .rlen = 144, + .also_non_np = 1, + .np = 2, + .tap = { 144 - 16, 16 }, }, }; @@ -5377,6 +5429,9 @@ static struct cipher_testvec serpent_dec_tv_template[] = { "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A", .rlen = 144, + .also_non_np = 1, + .np = 2, + .tap = { 144 - 16, 16 }, }, }; @@ -5468,6 +5523,9 @@ static struct cipher_testvec serpent_cbc_enc_tv_template[] = { "\x27\xDF\x89\x1D\x86\x3E\xF7\x5A" "\xF6\xE3\x0F\xC7\x6B\x4C\x96\x7C", .rlen = 144, + .also_non_np = 1, + .np = 2, + .tap = { 144 - 16, 16 }, }, }; @@ -5518,6 +5576,9 @@ static struct cipher_testvec serpent_cbc_dec_tv_template[] = { "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A", .rlen = 144, + .also_non_np = 1, + .np = 2, + .tap = { 144 - 16, 16 }, }, }; @@ -5616,6 +5677,9 @@ static struct cipher_testvec serpent_ctr_enc_tv_template[] = { "\x5D\xE1\x4F\xA1\xEA\xB3\xCA\xB9" "\xE6\xD0\x97", .rlen = 147, + .also_non_np = 1, + .np = 2, + .tap = { 147 - 16, 16 }, }, }; @@ -5714,6 +5778,9 @@ static struct cipher_testvec serpent_ctr_dec_tv_template[] = { "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A" "\xF1\x65\xFC", .rlen = 147, + .also_non_np = 1, + .np = 2, + .tap = { 147 - 16, 16 }, }, }; @@ -5963,6 +6030,9 @@ static struct cipher_testvec serpent_lrw_enc_tv_template[] = { "\x5c\xc6\x84\xfe\x7c\xcb\x26\xfd" "\xd9\x51\x0f\xd7\x94\x2f\xc5\xa7", .rlen = 512, + .also_non_np = 1, + .np = 2, + .tap = { 512 - 16, 16 }, }, }; @@ -6213,6 +6283,9 @@ static struct cipher_testvec serpent_lrw_dec_tv_template[] = { "\xe9\x2e\xc4\x29\x0f\x84\xdb\xc4" "\x21\xc4\xc2\x75\x67\x89\x37\x0a", .rlen = 512, + .also_non_np = 1, + .np = 2, + .tap = { 512 - 16, 16 }, }, }; @@ -6552,6 +6625,9 @@ static struct cipher_testvec serpent_xts_enc_tv_template[] = { "\xaf\x43\x0b\xc5\x20\x41\x92\x20" "\xd4\xa0\x91\x98\x11\x5f\x4d\xb1", .rlen = 512, + .also_non_np = 1, + .np = 2, + .tap = { 512 - 16, 16 }, }, }; @@ -6892,6 +6968,9 @@ static struct cipher_testvec serpent_xts_dec_tv_template[] = { "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", .rlen = 512, + .also_non_np = 1, + .np = 2, + .tap = { 512 - 16, 16 }, }, }; @@ -7072,6 +7151,9 @@ static struct cipher_testvec cast6_enc_tv_template[] = { "\x84\x52\x6D\x68\xDE\xC6\x64\xB2" "\x11\x74\x93\x57\xB4\x7E\xC6\x00", .rlen = 496, + .also_non_np = 1, + .np = 2, + .tap = { 496 - 16, 16 }, }, }; @@ -7240,6 +7322,9 @@ static struct cipher_testvec cast6_dec_tv_template[] = { "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" "\xDC\x50\xE7\x7E\x15\x89\x20\xB7", .rlen = 496, + .also_non_np = 1, + .np = 2, + .tap = { 496 - 16, 16 }, }, }; @@ -7378,6 +7463,9 @@ static struct cipher_testvec cast6_cbc_enc_tv_template[] = { "\x4D\x59\x7D\xC5\x28\x69\xFA\x92" "\x22\x46\x89\x2D\x0F\x2B\x08\x24", .rlen = 496, + .also_non_np = 1, + .np = 2, + .tap = { 496 - 16, 16 }, }, }; @@ -7516,6 +7604,9 @@ static struct cipher_testvec cast6_cbc_dec_tv_template[] = { "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" "\xDC\x50\xE7\x7E\x15\x89\x20\xB7", .rlen = 496, + .also_non_np = 1, + .np = 2, + .tap = { 496 - 16, 16 }, }, }; @@ -7654,6 +7745,9 @@ static struct cipher_testvec cast6_ctr_enc_tv_template[] = { "\x0E\x74\x33\x30\x62\xB9\x89\xDF" "\xF9\xC5\xDD\x27\xB3\x39\xCB\xCB", .rlen = 496, + .also_non_np = 1, + .np = 2, + .tap = { 496 - 16, 16 }, }, }; @@ -7792,6 +7886,9 @@ static struct cipher_testvec cast6_ctr_dec_tv_template[] = { "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" "\xDC\x50\xE7\x7E\x15\x89\x20\xB7", .rlen = 496, + .also_non_np = 1, + .np = 2, + .tap = { 496 - 16, 16 }, }, }; @@ -7936,6 +8033,9 @@ static struct cipher_testvec cast6_lrw_enc_tv_template[] = { "\x8D\xD9\xCD\x3B\x22\x67\x18\xC7" "\xC4\xF5\x99\x61\xBC\xBB\x5B\x46", .rlen = 512, + .also_non_np = 1, + .np = 2, + .tap = { 512 - 16, 16 }, }, }; @@ -8080,6 +8180,9 @@ static struct cipher_testvec cast6_lrw_dec_tv_template[] = { "\xe9\x2e\xc4\x29\x0f\x84\xdb\xc4" "\x21\xc4\xc2\x75\x67\x89\x37\x0a", .rlen = 512, + .also_non_np = 1, + .np = 2, + .tap = { 512 - 16, 16 }, }, }; @@ -8226,6 +8329,9 @@ static struct cipher_testvec cast6_xts_enc_tv_template[] = { "\xA1\xAC\xE8\xCF\xC6\x74\xCF\xDC" "\x22\x60\x4E\xE8\xA4\x5D\x85\xB9", .rlen = 512, + .also_non_np = 1, + .np = 2, + .tap = { 512 - 16, 16 }, }, }; @@ -8372,6 +8478,9 @@ static struct cipher_testvec cast6_xts_dec_tv_template[] = { "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", .rlen = 512, + .also_non_np = 1, + .np = 2, + .tap = { 512 - 16, 16 }, }, }; @@ -9721,6 +9830,9 @@ static struct cipher_testvec aes_lrw_enc_tv_template[] = { "\xcd\x7e\x2b\x5d\x43\xea\x42\xe7" "\x74\x3f\x7d\x58\x88\x75\xde\x3e", .rlen = 512, + .also_non_np = 1, + .np = 2, + .tap = { 512 - 16, 16 }, } }; @@ -9972,6 +10084,9 @@ static struct cipher_testvec aes_lrw_dec_tv_template[] = { "\xe9\x2e\xc4\x29\x0f\x84\xdb\xc4" "\x21\xc4\xc2\x75\x67\x89\x37\x0a", .rlen = 512, + .also_non_np = 1, + .np = 2, + .tap = { 512 - 16, 16 }, } }; @@ -10313,6 +10428,9 @@ static struct cipher_testvec aes_xts_enc_tv_template[] = { "\xc4\xf3\x6f\xfd\xa9\xfc\xea\x70" "\xb9\xc6\xe6\x93\xe1\x48\xc1\x51", .rlen = 512, + .also_non_np = 1, + .np = 2, + .tap = { 512 - 16, 16 }, } }; @@ -10654,7 +10772,9 @@ static struct cipher_testvec aes_xts_dec_tv_template[] = { "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", .rlen = 512, - + .also_non_np = 1, + .np = 2, + .tap = { 512 - 16, 16 }, } }; @@ -13695,6 +13815,9 @@ static struct cipher_testvec cast5_enc_tv_template[] = { "\x4F\xFE\x24\x9C\x9A\x02\xE5\x57" "\xF5\xBC\x25\xD6\x02\x56\x57\x1C", .rlen = 496, + .also_non_np = 1, + .np = 2, + .tap = { 496 - 16, 16 }, }, }; @@ -13853,6 +13976,9 @@ static struct cipher_testvec cast5_dec_tv_template[] = { "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" "\xDC\x50\xE7\x7E\x15\x89\x20\xB7", .rlen = 496, + .also_non_np = 1, + .np = 2, + .tap = { 496 - 16, 16 }, }, }; @@ -13988,6 +14114,9 @@ static struct cipher_testvec cast5_cbc_enc_tv_template[] = { "\x15\x5F\xDB\xE9\xB1\x83\xD2\xE6" "\x1D\x18\x66\x44\x5B\x8F\x14\xEB", .rlen = 496, + .also_non_np = 1, + .np = 2, + .tap = { 496 - 16, 16 }, }, }; @@ -14123,6 +14252,9 @@ static struct cipher_testvec cast5_cbc_dec_tv_template[] = { "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" "\xDC\x50\xE7\x7E\x15\x89\x20\xB7", .rlen = 496, + .also_non_np = 1, + .np = 2, + .tap = { 496 - 16, 16 }, }, }; @@ -14258,6 +14390,9 @@ static struct cipher_testvec cast5_ctr_enc_tv_template[] = { "\x8C\x98\xDB\xDE\xFC\x72\x94\xAA" "\xC0\x0D\x96\xAA\x23\xF8\xFE\x13", .rlen = 496, + .also_non_np = 1, + .np = 2, + .tap = { 496 - 16, 16 }, }, }; @@ -14393,6 +14528,9 @@ static struct cipher_testvec cast5_ctr_dec_tv_template[] = { "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" "\xDC\x50\xE7\x7E\x15\x89\x20\xB7", .rlen = 496, + .also_non_np = 1, + .np = 2, + .tap = { 496 - 16, 16 }, }, }; @@ -15310,6 +15448,9 @@ static struct cipher_testvec camellia_enc_tv_template[] = { "\x0D\xD0\xFD\xC4\x65\xA5\x69\xB9" "\xF1\xF6\xB1\xA5\xB2\x75\x4F\x8A", .rlen = 48, + .also_non_np = 1, + .np = 2, + .tap = { 48 - 16, 16 }, }, }; @@ -15368,6 +15509,9 @@ static struct cipher_testvec camellia_dec_tv_template[] = { "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48", .rlen = 48, + .also_non_np = 1, + .np = 2, + .tap = { 48 - 16, 16 }, }, }; @@ -15422,6 +15566,9 @@ static struct cipher_testvec camellia_cbc_enc_tv_template[] = { "\xB9\xF9\xC2\x27\x6A\xB6\x31\x27" "\xA6\xAD\xEF\xE5\x5D\xE4\x02\x01", .rlen = 48, + .also_non_np = 1, + .np = 2, + .tap = { 48 - 16, 16 }, }, }; @@ -15476,6 +15623,9 @@ static struct cipher_testvec camellia_cbc_dec_tv_template[] = { "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48", .rlen = 48, + .also_non_np = 1, + .np = 2, + .tap = { 48 - 16, 16 }, }, }; @@ -15527,6 +15677,9 @@ static struct cipher_testvec camellia_ctr_enc_tv_template[] = { "\x60\xFC\xE8\x94\xE8\xB5\x09\x2C" "\x1E\x43\xEF", .rlen = 51, + .also_non_np = 1, + .np = 2, + .tap = { 51 - 16, 16 }, }, }; @@ -15578,6 +15731,9 @@ static struct cipher_testvec camellia_ctr_dec_tv_template[] = { "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" "\xDF\x76\x0D", .rlen = 51, + .also_non_np = 1, + .np = 2, + .tap = { 51 - 16, 16 }, }, }; @@ -15828,6 +15984,9 @@ static struct cipher_testvec camellia_lrw_enc_tv_template[] = { "\xb2\x1a\xd8\x4c\xbd\x1d\x10\xe9" "\x5a\xa8\x92\x7f\xba\xe6\x0c\x95", .rlen = 512, + .also_non_np = 1, + .np = 2, + .tap = { 512 - 16, 16 }, }, }; @@ -16078,6 +16237,9 @@ static struct cipher_testvec camellia_lrw_dec_tv_template[] = { "\xe9\x2e\xc4\x29\x0f\x84\xdb\xc4" "\x21\xc4\xc2\x75\x67\x89\x37\x0a", .rlen = 512, + .also_non_np = 1, + .np = 2, + .tap = { 512 - 16, 16 }, }, }; @@ -16417,6 +16579,9 @@ static struct cipher_testvec camellia_xts_enc_tv_template[] = { "\xb7\x16\xd8\x12\x5c\xcd\x7d\x4e" "\xd5\xc6\x99\xcc\x4e\x6c\x94\x95", .rlen = 512, + .also_non_np = 1, + .np = 2, + .tap = { 512 - 16, 16 }, }, }; @@ -16757,6 +16922,9 @@ static struct cipher_testvec camellia_xts_dec_tv_template[] = { "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", .rlen = 512, + .also_non_np = 1, + .np = 2, + .tap = { 512 - 16, 16 }, }, }; -- cgit v1.2.3 From 9cac3a297d8d7a46a2d430913187547b8441a0b5 Mon Sep 17 00:00:00 2001 From: Jussi Kivilinna Date: Wed, 19 Sep 2012 09:42:54 +0300 Subject: crypto: testmgr - add test vectors for partial ctr(cast5) and ctr(cast6) Signed-off-by: Jussi Kivilinna Signed-off-by: Herbert Xu --- crypto/testmgr.h | 66 ++++++++++++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 62 insertions(+), 4 deletions(-) (limited to 'crypto') diff --git a/crypto/testmgr.h b/crypto/testmgr.h index 70984f5341e2..2257157da66b 100644 --- a/crypto/testmgr.h +++ b/crypto/testmgr.h @@ -6979,8 +6979,8 @@ static struct cipher_testvec serpent_xts_dec_tv_template[] = { #define CAST6_DEC_TEST_VECTORS 4 #define CAST6_CBC_ENC_TEST_VECTORS 1 #define CAST6_CBC_DEC_TEST_VECTORS 1 -#define CAST6_CTR_ENC_TEST_VECTORS 1 -#define CAST6_CTR_DEC_TEST_VECTORS 1 +#define CAST6_CTR_ENC_TEST_VECTORS 2 +#define CAST6_CTR_DEC_TEST_VECTORS 2 #define CAST6_LRW_ENC_TEST_VECTORS 1 #define CAST6_LRW_DEC_TEST_VECTORS 1 #define CAST6_XTS_ENC_TEST_VECTORS 1 @@ -7612,6 +7612,22 @@ static struct cipher_testvec cast6_cbc_dec_tv_template[] = { static struct cipher_testvec cast6_ctr_enc_tv_template[] = { { /* Generated from TF test vectors */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F" + "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64", + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A", + .ilen = 17, + .result = "\x26\x0A\xF1\xE2\x3F\x8A\xEF\xA3" + "\x53\x9A\x5E\x1B\x2A\x1A\xC6\x0A" + "\x57", + .rlen = 17, + }, { /* Generated from TF test vectors */ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" @@ -7753,6 +7769,22 @@ static struct cipher_testvec cast6_ctr_enc_tv_template[] = { static struct cipher_testvec cast6_ctr_dec_tv_template[] = { { /* Generated from TF test vectors */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F" + "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64", + .input = "\x26\x0A\xF1\xE2\x3F\x8A\xEF\xA3" + "\x53\x9A\x5E\x1B\x2A\x1A\xC6\x0A" + "\x57", + .ilen = 17, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A", + .rlen = 17, + }, { /* Generated from TF test vectors */ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" @@ -13657,8 +13689,8 @@ static struct cprng_testvec ansi_cprng_aes_tv_template[] = { #define CAST5_DEC_TEST_VECTORS 4 #define CAST5_CBC_ENC_TEST_VECTORS 1 #define CAST5_CBC_DEC_TEST_VECTORS 1 -#define CAST5_CTR_ENC_TEST_VECTORS 1 -#define CAST5_CTR_DEC_TEST_VECTORS 1 +#define CAST5_CTR_ENC_TEST_VECTORS 2 +#define CAST5_CTR_DEC_TEST_VECTORS 2 static struct cipher_testvec cast5_enc_tv_template[] = { { @@ -14260,6 +14292,19 @@ static struct cipher_testvec cast5_cbc_dec_tv_template[] = { static struct cipher_testvec cast5_ctr_enc_tv_template[] = { { /* Generated from TF test vectors */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A", + .klen = 16, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F", + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A", + .ilen = 17, + .result = "\xFF\xC4\x2E\x82\x3D\xF8\xA8\x39" + "\x7C\x52\xC4\xD3\xBB\x62\xC6\xA8" + "\x0C", + .rlen = 17, + }, { /* Generated from TF test vectors */ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A", .klen = 16, @@ -14398,6 +14443,19 @@ static struct cipher_testvec cast5_ctr_enc_tv_template[] = { static struct cipher_testvec cast5_ctr_dec_tv_template[] = { { /* Generated from TF test vectors */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A", + .klen = 16, + .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F", + .input = "\xFF\xC4\x2E\x82\x3D\xF8\xA8\x39" + "\x7C\x52\xC4\xD3\xBB\x62\xC6\xA8" + "\x0C", + .ilen = 17, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A", + .rlen = 17, + }, { /* Generated from TF test vectors */ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A", .klen = 16, -- cgit v1.2.3 From 549595a0c782c1271cbcda51b6d07a95de1f894d Mon Sep 17 00:00:00 2001 From: Jussi Kivilinna Date: Wed, 19 Sep 2012 09:42:59 +0300 Subject: crypto: testmgr - add test vectors for CTR mode IV increasement More precisely, test 'long word' and 'long long word' overflow and carry handling. Signed-off-by: Jussi Kivilinna Signed-off-by: Herbert Xu --- crypto/testmgr.h | 1075 +++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 1074 insertions(+), 1 deletion(-) (limited to 'crypto') diff --git a/crypto/testmgr.h b/crypto/testmgr.h index 2257157da66b..76d7f6cc82f5 100644 --- a/crypto/testmgr.h +++ b/crypto/testmgr.h @@ -2677,6 +2677,141 @@ static struct cipher_testvec bf_ctr_enc_tv_template[] = { .also_non_np = 1, .np = 2, .tap = { 43 - 8, 8 }, + }, { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFD", + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A" + "\xF1\x65\xFC\x93\x07\x9E\x35\xCC" + "\x40\xD7\x6E\x05\x79\x10\xA7\x1B" + "\xB2\x49\xE0\x54\xEB\x82\x19\x8D" + "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF" + "\x96\x0A\xA1\x38\xCF\x43\xDA\x71" + "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3" + "\x57\xEE\x85\x1C\x90\x27\xBE\x32" + "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4" + "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16" + "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88" + "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA" + "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49" + "\xE0\x77\x0E\x82\x19\xB0\x24\xBB" + "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D" + "\xC4\x38\xCF\x66\xFD\x71\x08\x9F" + "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11" + "\x85\x1C\xB3\x27\xBE\x55\xEC\x60" + "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2" + "\x69\x00\x74\x0B\xA2\x16\xAD\x44" + "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6" + "\x2A\xC1\x58\xEF\x63\xFA\x91\x05" + "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77" + "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9" + "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B" + "\xF2\x66\xFD\x94\x08\x9F\x36\xCD" + "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C" + "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E" + "\x25\xBC\x30\xC7\x5E\xF5\x69\x00" + "\x97\x0B\xA2\x39\xD0\x44\xDB\x72" + "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4" + "\x58\xEF\x86\x1D\x91\x28\xBF\x33" + "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5" + "\x3C\xD3\x47\xDE\x75\x0C\x80\x17" + "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89" + "\x20\x94\x2B\xC2\x36\xCD\x64\xFB" + "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A" + "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC" + "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E" + "\xC5\x39\xD0\x67\xFE\x72\x09\xA0" + "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12" + "\x86\x1D\xB4\x28\xBF\x56\xED\x61" + "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3" + "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" + "\xDC\x50\xE7\x7E\x15\x89\x20\xB7" + "\x2B\xC2\x59\xF0\x64\xFB\x92\x06", + .ilen = 504, + .result = "\x5F\x58\x6E\x60\x51\x6E\xDC\x3D" + "\xD1\xBB\xF7\xB7\xFD\x04\x44\x82" + "\xDC\x9F\x4B\x02\xF1\xD2\x5A\x6F" + "\x25\xF9\x27\x21\xF2\xD2\x9A\x01" + "\xBD\xAD\x3D\x93\x87\xCA\x0D\xFE" + "\xB7\x2C\x17\x1F\x42\x8C\x13\xB2" + "\x62\x44\x72\xB9\x5D\xC0\xF8\x37" + "\xDF\xEA\x78\x81\x8F\xA6\x34\xB2" + "\x07\x09\x7C\xB9\x3A\xA0\x2B\x18" + "\x34\x6A\x9D\x3D\xA5\xEB\xF4\x60" + "\xF8\x98\xA2\x39\x81\x23\x6C\xA9" + "\x70\xCA\xCC\x45\xD8\x1F\xDF\x44" + "\x2A\x67\x7A\x88\x28\xDC\x36\x83" + "\x18\xD7\x48\x43\x17\x2B\x1B\xE6" + "\x0B\x82\x59\x14\x26\x67\x08\x09" + "\x5B\x5D\x38\xD0\x81\xCE\x54\x2A" + "\xCD\x22\x94\x42\xF5\xBA\x74\x7E" + "\xD9\x00\x40\xA9\x0D\x0B\xBD\x8E" + "\xC4\x8E\x5E\x17\x8F\x48\xE2\xB8" + "\xF4\xCC\x19\x76\xAB\x48\x29\xAA" + "\x81\xD5\xCE\xD5\x8A\x3B\xC9\x21" + "\xEF\x50\x4F\x04\x02\xBF\xE1\x1F" + "\x59\x28\x1A\xE4\x18\x16\xA0\x29" + "\xBF\x34\xA9\x2D\x28\x83\xC0\x5E" + "\xEA\x44\xC4\x6E\xAB\x24\x79\x9D" + "\x2D\xA1\xE8\x55\xCA\x74\xFC\xBD" + "\xFE\xDD\xDA\xA5\xFB\x34\x90\x31" + "\x0E\x62\x28\x9B\xDC\xD7\xA1\xBB" + "\xF0\x1A\xB3\xE2\xD0\xFA\xBD\xE8" + "\x5C\x5A\x10\x67\xF6\x6A\x17\x3F" + "\xC5\xE9\x09\x08\xDD\x22\x77\x42" + "\x26\x6A\x6A\x7A\x3F\x87\x80\x0C" + "\xF0\xFF\x15\x8E\x84\x86\xC0\x10" + "\x0F\x8D\x33\x06\xB8\x72\xA4\x47" + "\x6B\xED\x2E\x05\x94\x6C\x5C\x5B" + "\x13\xF6\x77\xEE\x3B\x16\xDF\xC2" + "\x63\x66\x07\x6D\x3F\x6C\x51\x7C" + "\x1C\xAC\x80\xB6\x58\x48\xB7\x9D" + "\xB4\x19\xD8\x19\x45\x66\x27\x02" + "\xA1\xA9\x99\xF3\x1F\xE5\xA7\x1D" + "\x31\xE7\x1B\x0D\xFF\xBB\xB5\xA1" + "\xF5\x9C\x45\x1E\x18\x19\xA1\xE7" + "\xC2\xF1\xBF\x68\xC3\xEC\xCF\x53" + "\x67\xA6\x2B\x7D\x3C\x6D\x24\xC3" + "\xE8\xE6\x07\x5A\x09\xE0\x32\xA8" + "\x52\xF6\xE9\xED\x0E\xC6\x0A\x6A" + "\xFC\x60\x2A\xE0\x93\xCE\xB8\x2E" + "\xA2\xA8\x0E\x79\x9E\x34\x5D\x37" + "\x6F\x12\xFE\x48\x7B\xE7\xB9\x22" + "\x29\xE8\xD7\xBE\x5D\xD1\x8B\xD9" + "\x91\x51\x4E\x71\xF2\x98\x85\x16" + "\x25\x7A\x76\x8A\x51\x0E\x65\x14" + "\x81\xB5\x3A\x37\xFD\xEC\xB5\x8A" + "\xE1\xCF\x41\x72\x14\x29\x4C\xF0" + "\x20\xD9\x9A\xC5\x66\xA4\x03\x76" + "\x5B\xA4\x15\x4F\x0E\x64\x39\x40" + "\x25\xF9\x20\x22\xF5\x88\xF5\xBA" + "\xE4\xDF\x45\x61\xBF\x8D\x7A\x24" + "\x4B\x92\x71\xD9\x2F\x77\xA7\x95" + "\xA8\x7F\x61\xD5\xA4\x57\xB0\xFB" + "\xB5\x77\xBA\x1C\xEE\x71\xFA\xB0" + "\x16\x4C\x18\x6B\xF2\x69\xA0\x07" + "\xEF\xBE\xEC\x69\xAC\xA8\x63\x9E", + .rlen = 504, }, }; @@ -2724,6 +2859,141 @@ static struct cipher_testvec bf_ctr_dec_tv_template[] = { .also_non_np = 1, .np = 2, .tap = { 43 - 8, 8 }, + }, { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFD", + .input = "\x5F\x58\x6E\x60\x51\x6E\xDC\x3D" + "\xD1\xBB\xF7\xB7\xFD\x04\x44\x82" + "\xDC\x9F\x4B\x02\xF1\xD2\x5A\x6F" + "\x25\xF9\x27\x21\xF2\xD2\x9A\x01" + "\xBD\xAD\x3D\x93\x87\xCA\x0D\xFE" + "\xB7\x2C\x17\x1F\x42\x8C\x13\xB2" + "\x62\x44\x72\xB9\x5D\xC0\xF8\x37" + "\xDF\xEA\x78\x81\x8F\xA6\x34\xB2" + "\x07\x09\x7C\xB9\x3A\xA0\x2B\x18" + "\x34\x6A\x9D\x3D\xA5\xEB\xF4\x60" + "\xF8\x98\xA2\x39\x81\x23\x6C\xA9" + "\x70\xCA\xCC\x45\xD8\x1F\xDF\x44" + "\x2A\x67\x7A\x88\x28\xDC\x36\x83" + "\x18\xD7\x48\x43\x17\x2B\x1B\xE6" + "\x0B\x82\x59\x14\x26\x67\x08\x09" + "\x5B\x5D\x38\xD0\x81\xCE\x54\x2A" + "\xCD\x22\x94\x42\xF5\xBA\x74\x7E" + "\xD9\x00\x40\xA9\x0D\x0B\xBD\x8E" + "\xC4\x8E\x5E\x17\x8F\x48\xE2\xB8" + "\xF4\xCC\x19\x76\xAB\x48\x29\xAA" + "\x81\xD5\xCE\xD5\x8A\x3B\xC9\x21" + "\xEF\x50\x4F\x04\x02\xBF\xE1\x1F" + "\x59\x28\x1A\xE4\x18\x16\xA0\x29" + "\xBF\x34\xA9\x2D\x28\x83\xC0\x5E" + "\xEA\x44\xC4\x6E\xAB\x24\x79\x9D" + "\x2D\xA1\xE8\x55\xCA\x74\xFC\xBD" + "\xFE\xDD\xDA\xA5\xFB\x34\x90\x31" + "\x0E\x62\x28\x9B\xDC\xD7\xA1\xBB" + "\xF0\x1A\xB3\xE2\xD0\xFA\xBD\xE8" + "\x5C\x5A\x10\x67\xF6\x6A\x17\x3F" + "\xC5\xE9\x09\x08\xDD\x22\x77\x42" + "\x26\x6A\x6A\x7A\x3F\x87\x80\x0C" + "\xF0\xFF\x15\x8E\x84\x86\xC0\x10" + "\x0F\x8D\x33\x06\xB8\x72\xA4\x47" + "\x6B\xED\x2E\x05\x94\x6C\x5C\x5B" + "\x13\xF6\x77\xEE\x3B\x16\xDF\xC2" + "\x63\x66\x07\x6D\x3F\x6C\x51\x7C" + "\x1C\xAC\x80\xB6\x58\x48\xB7\x9D" + "\xB4\x19\xD8\x19\x45\x66\x27\x02" + "\xA1\xA9\x99\xF3\x1F\xE5\xA7\x1D" + "\x31\xE7\x1B\x0D\xFF\xBB\xB5\xA1" + "\xF5\x9C\x45\x1E\x18\x19\xA1\xE7" + "\xC2\xF1\xBF\x68\xC3\xEC\xCF\x53" + "\x67\xA6\x2B\x7D\x3C\x6D\x24\xC3" + "\xE8\xE6\x07\x5A\x09\xE0\x32\xA8" + "\x52\xF6\xE9\xED\x0E\xC6\x0A\x6A" + "\xFC\x60\x2A\xE0\x93\xCE\xB8\x2E" + "\xA2\xA8\x0E\x79\x9E\x34\x5D\x37" + "\x6F\x12\xFE\x48\x7B\xE7\xB9\x22" + "\x29\xE8\xD7\xBE\x5D\xD1\x8B\xD9" + "\x91\x51\x4E\x71\xF2\x98\x85\x16" + "\x25\x7A\x76\x8A\x51\x0E\x65\x14" + "\x81\xB5\x3A\x37\xFD\xEC\xB5\x8A" + "\xE1\xCF\x41\x72\x14\x29\x4C\xF0" + "\x20\xD9\x9A\xC5\x66\xA4\x03\x76" + "\x5B\xA4\x15\x4F\x0E\x64\x39\x40" + "\x25\xF9\x20\x22\xF5\x88\xF5\xBA" + "\xE4\xDF\x45\x61\xBF\x8D\x7A\x24" + "\x4B\x92\x71\xD9\x2F\x77\xA7\x95" + "\xA8\x7F\x61\xD5\xA4\x57\xB0\xFB" + "\xB5\x77\xBA\x1C\xEE\x71\xFA\xB0" + "\x16\x4C\x18\x6B\xF2\x69\xA0\x07" + "\xEF\xBE\xEC\x69\xAC\xA8\x63\x9E", + .ilen = 504, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A" + "\xF1\x65\xFC\x93\x07\x9E\x35\xCC" + "\x40\xD7\x6E\x05\x79\x10\xA7\x1B" + "\xB2\x49\xE0\x54\xEB\x82\x19\x8D" + "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF" + "\x96\x0A\xA1\x38\xCF\x43\xDA\x71" + "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3" + "\x57\xEE\x85\x1C\x90\x27\xBE\x32" + "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4" + "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16" + "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88" + "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA" + "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49" + "\xE0\x77\x0E\x82\x19\xB0\x24\xBB" + "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D" + "\xC4\x38\xCF\x66\xFD\x71\x08\x9F" + "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11" + "\x85\x1C\xB3\x27\xBE\x55\xEC\x60" + "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2" + "\x69\x00\x74\x0B\xA2\x16\xAD\x44" + "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6" + "\x2A\xC1\x58\xEF\x63\xFA\x91\x05" + "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77" + "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9" + "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B" + "\xF2\x66\xFD\x94\x08\x9F\x36\xCD" + "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C" + "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E" + "\x25\xBC\x30\xC7\x5E\xF5\x69\x00" + "\x97\x0B\xA2\x39\xD0\x44\xDB\x72" + "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4" + "\x58\xEF\x86\x1D\x91\x28\xBF\x33" + "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5" + "\x3C\xD3\x47\xDE\x75\x0C\x80\x17" + "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89" + "\x20\x94\x2B\xC2\x36\xCD\x64\xFB" + "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A" + "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC" + "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E" + "\xC5\x39\xD0\x67\xFE\x72\x09\xA0" + "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12" + "\x86\x1D\xB4\x28\xBF\x56\xED\x61" + "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3" + "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" + "\xDC\x50\xE7\x7E\x15\x89\x20\xB7" + "\x2B\xC2\x59\xF0\x64\xFB\x92\x06", + .rlen = 504, }, }; @@ -3578,6 +3848,140 @@ static struct cipher_testvec tf_ctr_enc_tv_template[] = { "\xB9\x32\xE2\xC1\x82\xAC\xFE\xCC" "\xC5\xC9\x7F\x9E\xCF\x33\x7A\xDF", .rlen = 496, + }, { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF" + "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFD", + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A" + "\xF1\x65\xFC\x93\x07\x9E\x35\xCC" + "\x40\xD7\x6E\x05\x79\x10\xA7\x1B" + "\xB2\x49\xE0\x54\xEB\x82\x19\x8D" + "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF" + "\x96\x0A\xA1\x38\xCF\x43\xDA\x71" + "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3" + "\x57\xEE\x85\x1C\x90\x27\xBE\x32" + "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4" + "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16" + "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88" + "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA" + "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49" + "\xE0\x77\x0E\x82\x19\xB0\x24\xBB" + "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D" + "\xC4\x38\xCF\x66\xFD\x71\x08\x9F" + "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11" + "\x85\x1C\xB3\x27\xBE\x55\xEC\x60" + "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2" + "\x69\x00\x74\x0B\xA2\x16\xAD\x44" + "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6" + "\x2A\xC1\x58\xEF\x63\xFA\x91\x05" + "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77" + "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9" + "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B" + "\xF2\x66\xFD\x94\x08\x9F\x36\xCD" + "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C" + "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E" + "\x25\xBC\x30\xC7\x5E\xF5\x69\x00" + "\x97\x0B\xA2\x39\xD0\x44\xDB\x72" + "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4" + "\x58\xEF\x86\x1D\x91\x28\xBF\x33" + "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5" + "\x3C\xD3\x47\xDE\x75\x0C\x80\x17" + "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89" + "\x20\x94\x2B\xC2\x36\xCD\x64\xFB" + "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A" + "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC" + "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E" + "\xC5\x39\xD0\x67\xFE\x72\x09\xA0" + "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12" + "\x86\x1D\xB4\x28\xBF\x56\xED\x61" + "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3" + "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" + "\xDC\x50\xE7\x7E\x15\x89\x20\xB7", + .ilen = 496, + .result = "\xEB\x44\xAF\x49\x27\xB8\xFB\x44" + "\x4C\xA6\xC3\x0C\x8B\xD0\x01\x0C" + "\x53\xC8\x16\x38\xDE\x40\x4F\x91" + "\x25\x6D\x4C\xA0\x9A\x87\x1E\xDA" + "\x88\x7E\x89\xE9\x67\x2B\x83\xA2" + "\x5F\x2E\x23\x3E\x45\xB9\x77\x7B" + "\xA6\x7E\x47\x36\x81\x9F\x9B\xF3" + "\xE0\xF0\xD7\x47\xA9\xC8\xEF\x33" + "\x0C\x43\xFE\x67\x50\x0A\x2C\x3E" + "\xA0\xE1\x25\x8E\x80\x07\x4A\xC0" + "\x64\x89\x9F\x6A\x27\x96\x07\xA6" + "\x9B\xC8\x1B\x21\x60\xAE\x5D\x01" + "\xE2\xCD\xC8\xAA\x6C\x9D\x1C\x34" + "\x39\x18\x09\xA4\x82\x59\x78\xE7" + "\xFC\x59\x65\xF2\x94\xFF\xFB\xE2" + "\x3C\xDA\xB1\x90\x95\xBF\x91\xE3" + "\xE6\x87\x31\x9E\x16\x85\xAD\xB1" + "\x4C\xAE\x43\x4D\x19\x58\xB5\x5E" + "\x2E\xF5\x09\xAA\x39\xF4\xC0\xB3" + "\xD4\x4D\xDB\x73\x7A\xD4\xF1\xBF" + "\x89\x16\x4D\x2D\xA2\x26\x33\x72" + "\x18\x33\x7E\xD6\xD2\x16\xA4\x54" + "\xF4\x8C\xB3\x52\xDF\x21\x9C\xEB" + "\xBF\x49\xD3\xF9\x05\x06\xCB\xD2" + "\xA9\xD2\x3B\x6E\x19\x8C\xBC\x19" + "\xAB\x89\xD6\xD8\xCD\x56\x89\x5E" + "\xAC\x00\xE3\x50\x63\x4A\x80\x9A" + "\x05\xBC\x50\x39\xD3\x32\xD9\x0D" + "\xE3\x20\x0D\x75\x54\xEC\xE6\x31" + "\x14\xB9\x3A\x59\x00\x43\x37\x8E" + "\x8C\x5A\x79\x62\x14\x76\x8A\xAE" + "\x8F\xCC\xA1\x6C\x38\x78\xDD\x2D" + "\x8B\x6D\xEA\xBD\x7B\x25\xFF\x60" + "\xC9\x87\xB1\x79\x1E\xA5\x86\x68" + "\x81\xB4\xE2\xC1\x05\x7D\x3A\x73" + "\xD0\xDA\x75\x77\x9E\x05\x27\xF1" + "\x08\xA9\x66\x64\x6C\xBC\x82\x17" + "\x2C\x23\x5F\x62\x4D\x02\x1A\x58" + "\xE7\xB7\x23\x6D\xE2\x20\xDA\xEF" + "\xB4\xB3\x3F\xB2\x2B\x69\x98\x83" + "\x95\x87\x13\x57\x60\xD7\xB5\xB1" + "\xEE\x0A\x2F\x95\x36\x4C\x76\x5D" + "\x5F\xD9\x19\xED\xB9\xA5\x48\xBF" + "\xC8\xAB\x0F\x71\xCC\x61\x8E\x0A" + "\xD0\x29\x44\xA8\xB9\xC1\xE8\xC8" + "\xC9\xA8\x28\x81\xFB\x50\xF2\xF0" + "\x26\xAE\x39\xB8\x91\xCD\xA8\xAC" + "\xDE\x55\x1B\x50\x14\x53\x44\x17" + "\x54\x46\xFC\xB1\xE4\x07\x6B\x9A" + "\x01\x14\xF0\x2E\x2E\xDB\x46\x1B" + "\x1A\x09\x97\xA9\xB6\x97\x79\x06" + "\xFB\xCB\x85\xCF\xDD\xA1\x41\xB1" + "\x00\xAA\xF7\xE0\x89\x73\xFB\xE5" + "\xBF\x84\xDB\xC9\xCD\xC4\xA2\x0D" + "\x3B\xAC\xF9\xDF\x96\xBF\x88\x23" + "\x41\x67\xA1\x24\x99\x7E\xCC\x9B" + "\x02\x8F\x6A\x49\xF6\x25\xBA\x7A" + "\xF4\x78\xFD\x79\x62\x63\x4F\x14" + "\xD6\x11\x11\x04\x05\x5F\x7E\xEA" + "\x4C\xB6\xF8\xF4\x5F\x48\x52\x54" + "\x94\x63\xA8\x4E\xCF\xD2\x1B\x1B" + "\x22\x18\x6A\xAF\x6E\x3E\xE1\x0D", + .rlen = 496, }, { /* Generated with Crypto++ */ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" @@ -3855,6 +4259,140 @@ static struct cipher_testvec tf_ctr_dec_tv_template[] = { "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" "\xDC\x50\xE7\x7E\x15\x89\x20\xB7", .rlen = 496, + }, { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF" + "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFD", + .input = "\xEB\x44\xAF\x49\x27\xB8\xFB\x44" + "\x4C\xA6\xC3\x0C\x8B\xD0\x01\x0C" + "\x53\xC8\x16\x38\xDE\x40\x4F\x91" + "\x25\x6D\x4C\xA0\x9A\x87\x1E\xDA" + "\x88\x7E\x89\xE9\x67\x2B\x83\xA2" + "\x5F\x2E\x23\x3E\x45\xB9\x77\x7B" + "\xA6\x7E\x47\x36\x81\x9F\x9B\xF3" + "\xE0\xF0\xD7\x47\xA9\xC8\xEF\x33" + "\x0C\x43\xFE\x67\x50\x0A\x2C\x3E" + "\xA0\xE1\x25\x8E\x80\x07\x4A\xC0" + "\x64\x89\x9F\x6A\x27\x96\x07\xA6" + "\x9B\xC8\x1B\x21\x60\xAE\x5D\x01" + "\xE2\xCD\xC8\xAA\x6C\x9D\x1C\x34" + "\x39\x18\x09\xA4\x82\x59\x78\xE7" + "\xFC\x59\x65\xF2\x94\xFF\xFB\xE2" + "\x3C\xDA\xB1\x90\x95\xBF\x91\xE3" + "\xE6\x87\x31\x9E\x16\x85\xAD\xB1" + "\x4C\xAE\x43\x4D\x19\x58\xB5\x5E" + "\x2E\xF5\x09\xAA\x39\xF4\xC0\xB3" + "\xD4\x4D\xDB\x73\x7A\xD4\xF1\xBF" + "\x89\x16\x4D\x2D\xA2\x26\x33\x72" + "\x18\x33\x7E\xD6\xD2\x16\xA4\x54" + "\xF4\x8C\xB3\x52\xDF\x21\x9C\xEB" + "\xBF\x49\xD3\xF9\x05\x06\xCB\xD2" + "\xA9\xD2\x3B\x6E\x19\x8C\xBC\x19" + "\xAB\x89\xD6\xD8\xCD\x56\x89\x5E" + "\xAC\x00\xE3\x50\x63\x4A\x80\x9A" + "\x05\xBC\x50\x39\xD3\x32\xD9\x0D" + "\xE3\x20\x0D\x75\x54\xEC\xE6\x31" + "\x14\xB9\x3A\x59\x00\x43\x37\x8E" + "\x8C\x5A\x79\x62\x14\x76\x8A\xAE" + "\x8F\xCC\xA1\x6C\x38\x78\xDD\x2D" + "\x8B\x6D\xEA\xBD\x7B\x25\xFF\x60" + "\xC9\x87\xB1\x79\x1E\xA5\x86\x68" + "\x81\xB4\xE2\xC1\x05\x7D\x3A\x73" + "\xD0\xDA\x75\x77\x9E\x05\x27\xF1" + "\x08\xA9\x66\x64\x6C\xBC\x82\x17" + "\x2C\x23\x5F\x62\x4D\x02\x1A\x58" + "\xE7\xB7\x23\x6D\xE2\x20\xDA\xEF" + "\xB4\xB3\x3F\xB2\x2B\x69\x98\x83" + "\x95\x87\x13\x57\x60\xD7\xB5\xB1" + "\xEE\x0A\x2F\x95\x36\x4C\x76\x5D" + "\x5F\xD9\x19\xED\xB9\xA5\x48\xBF" + "\xC8\xAB\x0F\x71\xCC\x61\x8E\x0A" + "\xD0\x29\x44\xA8\xB9\xC1\xE8\xC8" + "\xC9\xA8\x28\x81\xFB\x50\xF2\xF0" + "\x26\xAE\x39\xB8\x91\xCD\xA8\xAC" + "\xDE\x55\x1B\x50\x14\x53\x44\x17" + "\x54\x46\xFC\xB1\xE4\x07\x6B\x9A" + "\x01\x14\xF0\x2E\x2E\xDB\x46\x1B" + "\x1A\x09\x97\xA9\xB6\x97\x79\x06" + "\xFB\xCB\x85\xCF\xDD\xA1\x41\xB1" + "\x00\xAA\xF7\xE0\x89\x73\xFB\xE5" + "\xBF\x84\xDB\xC9\xCD\xC4\xA2\x0D" + "\x3B\xAC\xF9\xDF\x96\xBF\x88\x23" + "\x41\x67\xA1\x24\x99\x7E\xCC\x9B" + "\x02\x8F\x6A\x49\xF6\x25\xBA\x7A" + "\xF4\x78\xFD\x79\x62\x63\x4F\x14" + "\xD6\x11\x11\x04\x05\x5F\x7E\xEA" + "\x4C\xB6\xF8\xF4\x5F\x48\x52\x54" + "\x94\x63\xA8\x4E\xCF\xD2\x1B\x1B" + "\x22\x18\x6A\xAF\x6E\x3E\xE1\x0D", + .ilen = 496, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A" + "\xF1\x65\xFC\x93\x07\x9E\x35\xCC" + "\x40\xD7\x6E\x05\x79\x10\xA7\x1B" + "\xB2\x49\xE0\x54\xEB\x82\x19\x8D" + "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF" + "\x96\x0A\xA1\x38\xCF\x43\xDA\x71" + "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3" + "\x57\xEE\x85\x1C\x90\x27\xBE\x32" + "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4" + "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16" + "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88" + "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA" + "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49" + "\xE0\x77\x0E\x82\x19\xB0\x24\xBB" + "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D" + "\xC4\x38\xCF\x66\xFD\x71\x08\x9F" + "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11" + "\x85\x1C\xB3\x27\xBE\x55\xEC\x60" + "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2" + "\x69\x00\x74\x0B\xA2\x16\xAD\x44" + "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6" + "\x2A\xC1\x58\xEF\x63\xFA\x91\x05" + "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77" + "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9" + "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B" + "\xF2\x66\xFD\x94\x08\x9F\x36\xCD" + "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C" + "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E" + "\x25\xBC\x30\xC7\x5E\xF5\x69\x00" + "\x97\x0B\xA2\x39\xD0\x44\xDB\x72" + "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4" + "\x58\xEF\x86\x1D\x91\x28\xBF\x33" + "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5" + "\x3C\xD3\x47\xDE\x75\x0C\x80\x17" + "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89" + "\x20\x94\x2B\xC2\x36\xCD\x64\xFB" + "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A" + "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC" + "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E" + "\xC5\x39\xD0\x67\xFE\x72\x09\xA0" + "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12" + "\x86\x1D\xB4\x28\xBF\x56\xED\x61" + "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3" + "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" + "\xDC\x50\xE7\x7E\x15\x89\x20\xB7", + .rlen = 496, }, { /* Generated with Crypto++ */ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" @@ -5680,6 +6218,140 @@ static struct cipher_testvec serpent_ctr_enc_tv_template[] = { .also_non_np = 1, .np = 2, .tap = { 147 - 16, 16 }, + }, { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF" + "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFD", + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A" + "\xF1\x65\xFC\x93\x07\x9E\x35\xCC" + "\x40\xD7\x6E\x05\x79\x10\xA7\x1B" + "\xB2\x49\xE0\x54\xEB\x82\x19\x8D" + "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF" + "\x96\x0A\xA1\x38\xCF\x43\xDA\x71" + "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3" + "\x57\xEE\x85\x1C\x90\x27\xBE\x32" + "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4" + "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16" + "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88" + "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA" + "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49" + "\xE0\x77\x0E\x82\x19\xB0\x24\xBB" + "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D" + "\xC4\x38\xCF\x66\xFD\x71\x08\x9F" + "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11" + "\x85\x1C\xB3\x27\xBE\x55\xEC\x60" + "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2" + "\x69\x00\x74\x0B\xA2\x16\xAD\x44" + "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6" + "\x2A\xC1\x58\xEF\x63\xFA\x91\x05" + "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77" + "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9" + "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B" + "\xF2\x66\xFD\x94\x08\x9F\x36\xCD" + "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C" + "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E" + "\x25\xBC\x30\xC7\x5E\xF5\x69\x00" + "\x97\x0B\xA2\x39\xD0\x44\xDB\x72" + "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4" + "\x58\xEF\x86\x1D\x91\x28\xBF\x33" + "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5" + "\x3C\xD3\x47\xDE\x75\x0C\x80\x17" + "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89" + "\x20\x94\x2B\xC2\x36\xCD\x64\xFB" + "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A" + "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC" + "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E" + "\xC5\x39\xD0\x67\xFE\x72\x09\xA0" + "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12" + "\x86\x1D\xB4\x28\xBF\x56\xED\x61" + "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3" + "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" + "\xDC\x50\xE7\x7E\x15\x89\x20\xB7", + .ilen = 496, + .result = "\x06\x9A\xF8\xB4\x53\x88\x62\xFC" + "\x68\xB8\x2E\xDF\xC1\x05\x0F\x3D" + "\xAF\x4D\x95\xAE\xC4\xE9\x1C\xDC" + "\xF6\x2B\x8F\x90\x89\xF6\x7E\x1A" + "\xA6\xB9\xE4\xF4\xFA\xCA\xE5\x7E" + "\x71\x28\x06\x4F\xE8\x08\x39\xDA" + "\xA5\x0E\xC8\xC0\xB8\x16\xE5\x69" + "\xE5\xCA\xEC\x4F\x63\x2C\xC0\x9B" + "\x9F\x3E\x39\x79\xF0\xCD\x64\x35" + "\x4A\xD3\xC8\xA9\x31\xCD\x48\x5B" + "\x92\x3D\x8F\x3F\x96\xBD\xB3\x18" + "\x74\x2A\x5D\x29\x3F\x57\x8F\xE2" + "\x67\x9A\xE0\xE5\xD4\x4A\xE2\x47" + "\xBC\xF6\xEB\x14\xF3\x8C\x20\xC2" + "\x7D\xE2\x43\x81\x86\x72\x2E\xB1" + "\x39\xF6\x95\xE1\x1F\xCB\x76\x33" + "\x5B\x7D\x23\x0F\x3A\x67\x2A\x2F" + "\xB9\x37\x9D\xDD\x1F\x16\xA1\x3C" + "\x70\xFE\x52\xAA\x93\x3C\xC4\x46" + "\xB1\xE5\xFF\xDA\xAF\xE2\x84\xFE" + "\x25\x92\xB2\x63\xBD\x49\x77\xB4" + "\x22\xA4\x6A\xD5\x04\xE0\x45\x58" + "\x1C\x34\x96\x7C\x03\x0C\x13\xA2" + "\x05\x22\xE2\xCB\x5A\x35\x03\x09" + "\x40\xD2\x82\x05\xCA\x58\x73\xF2" + "\x29\x5E\x01\x47\x13\x32\x78\xBE" + "\x06\xB0\x51\xDB\x6C\x31\xA0\x1C" + "\x74\xBC\x8D\x25\xDF\xF8\x65\xD1" + "\x38\x35\x11\x26\x4A\xB4\x06\x32" + "\xFA\xD2\x07\x77\xB3\x74\x98\x80" + "\x61\x59\xA8\x9F\xF3\x6F\x2A\xBF" + "\xE6\xA5\x9A\xC4\x6B\xA6\x49\x6F" + "\xBC\x47\xD9\xFB\xC6\xEF\x25\x65" + "\x96\xAC\x9F\xE4\x81\x4B\xD8\xBA" + "\xD6\x9B\xC9\x6D\x58\x40\x81\x02" + "\x73\x44\x4E\x43\x6E\x37\xBB\x11" + "\xE3\xF9\xB8\x2F\xEC\x76\x34\xEA" + "\x90\xCD\xB7\x2E\x0E\x32\x71\xE8" + "\xBB\x4E\x0B\x98\xA4\x17\x17\x5B" + "\x07\xB5\x82\x3A\xC4\xE8\x42\x51" + "\x5A\x4C\x4E\x7D\xBF\xC4\xC0\x4F" + "\x68\xB8\xC6\x4A\x32\x6F\x0B\xD7" + "\x85\xED\x6B\xFB\x72\xD2\xA5\x8F" + "\xBF\xF9\xAC\x59\x50\xA8\x08\x70" + "\xEC\xBD\x0A\xBF\xE5\x87\xA1\xC2" + "\x92\x14\x78\xAF\xE8\xEA\x2E\xDD" + "\xC1\x03\x9A\xAA\x89\x8B\x32\x46" + "\x5B\x18\x27\xBA\x46\xAA\x64\xDE" + "\xE3\xD5\xA3\xFC\x7B\x5B\x61\xDB" + "\x7E\xDA\xEC\x30\x17\x19\xF8\x80" + "\xB5\x5E\x27\xB5\x37\x3A\x1F\x28" + "\x07\x73\xC3\x63\xCE\xFF\x8C\xFE" + "\x81\x4E\xF8\x24\xF3\xB8\xC7\xE8" + "\x16\x9A\xCC\x58\x2F\x88\x1C\x4B" + "\xBB\x33\xA2\x73\xF0\x1C\x89\x0E" + "\xDC\x34\x27\x89\x98\xCE\x1C\xA2" + "\xD8\xB8\x90\xBE\xEC\x72\x28\x13" + "\xAC\x7B\xF1\xD0\x7F\x7A\x28\x50" + "\xB7\x99\x65\x8A\xC9\xC6\x21\x34" + "\x7F\x67\x9D\xB7\x2C\xCC\xF5\x17" + "\x2B\x89\xAC\xB0\xD7\x1E\x47\xB0" + "\x61\xAF\xD4\x63\x6D\xB8\x2D\x20", + .rlen = 496, }, }; @@ -5781,6 +6453,140 @@ static struct cipher_testvec serpent_ctr_dec_tv_template[] = { .also_non_np = 1, .np = 2, .tap = { 147 - 16, 16 }, + }, { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF" + "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFD", + .input = "\x06\x9A\xF8\xB4\x53\x88\x62\xFC" + "\x68\xB8\x2E\xDF\xC1\x05\x0F\x3D" + "\xAF\x4D\x95\xAE\xC4\xE9\x1C\xDC" + "\xF6\x2B\x8F\x90\x89\xF6\x7E\x1A" + "\xA6\xB9\xE4\xF4\xFA\xCA\xE5\x7E" + "\x71\x28\x06\x4F\xE8\x08\x39\xDA" + "\xA5\x0E\xC8\xC0\xB8\x16\xE5\x69" + "\xE5\xCA\xEC\x4F\x63\x2C\xC0\x9B" + "\x9F\x3E\x39\x79\xF0\xCD\x64\x35" + "\x4A\xD3\xC8\xA9\x31\xCD\x48\x5B" + "\x92\x3D\x8F\x3F\x96\xBD\xB3\x18" + "\x74\x2A\x5D\x29\x3F\x57\x8F\xE2" + "\x67\x9A\xE0\xE5\xD4\x4A\xE2\x47" + "\xBC\xF6\xEB\x14\xF3\x8C\x20\xC2" + "\x7D\xE2\x43\x81\x86\x72\x2E\xB1" + "\x39\xF6\x95\xE1\x1F\xCB\x76\x33" + "\x5B\x7D\x23\x0F\x3A\x67\x2A\x2F" + "\xB9\x37\x9D\xDD\x1F\x16\xA1\x3C" + "\x70\xFE\x52\xAA\x93\x3C\xC4\x46" + "\xB1\xE5\xFF\xDA\xAF\xE2\x84\xFE" + "\x25\x92\xB2\x63\xBD\x49\x77\xB4" + "\x22\xA4\x6A\xD5\x04\xE0\x45\x58" + "\x1C\x34\x96\x7C\x03\x0C\x13\xA2" + "\x05\x22\xE2\xCB\x5A\x35\x03\x09" + "\x40\xD2\x82\x05\xCA\x58\x73\xF2" + "\x29\x5E\x01\x47\x13\x32\x78\xBE" + "\x06\xB0\x51\xDB\x6C\x31\xA0\x1C" + "\x74\xBC\x8D\x25\xDF\xF8\x65\xD1" + "\x38\x35\x11\x26\x4A\xB4\x06\x32" + "\xFA\xD2\x07\x77\xB3\x74\x98\x80" + "\x61\x59\xA8\x9F\xF3\x6F\x2A\xBF" + "\xE6\xA5\x9A\xC4\x6B\xA6\x49\x6F" + "\xBC\x47\xD9\xFB\xC6\xEF\x25\x65" + "\x96\xAC\x9F\xE4\x81\x4B\xD8\xBA" + "\xD6\x9B\xC9\x6D\x58\x40\x81\x02" + "\x73\x44\x4E\x43\x6E\x37\xBB\x11" + "\xE3\xF9\xB8\x2F\xEC\x76\x34\xEA" + "\x90\xCD\xB7\x2E\x0E\x32\x71\xE8" + "\xBB\x4E\x0B\x98\xA4\x17\x17\x5B" + "\x07\xB5\x82\x3A\xC4\xE8\x42\x51" + "\x5A\x4C\x4E\x7D\xBF\xC4\xC0\x4F" + "\x68\xB8\xC6\x4A\x32\x6F\x0B\xD7" + "\x85\xED\x6B\xFB\x72\xD2\xA5\x8F" + "\xBF\xF9\xAC\x59\x50\xA8\x08\x70" + "\xEC\xBD\x0A\xBF\xE5\x87\xA1\xC2" + "\x92\x14\x78\xAF\xE8\xEA\x2E\xDD" + "\xC1\x03\x9A\xAA\x89\x8B\x32\x46" + "\x5B\x18\x27\xBA\x46\xAA\x64\xDE" + "\xE3\xD5\xA3\xFC\x7B\x5B\x61\xDB" + "\x7E\xDA\xEC\x30\x17\x19\xF8\x80" + "\xB5\x5E\x27\xB5\x37\x3A\x1F\x28" + "\x07\x73\xC3\x63\xCE\xFF\x8C\xFE" + "\x81\x4E\xF8\x24\xF3\xB8\xC7\xE8" + "\x16\x9A\xCC\x58\x2F\x88\x1C\x4B" + "\xBB\x33\xA2\x73\xF0\x1C\x89\x0E" + "\xDC\x34\x27\x89\x98\xCE\x1C\xA2" + "\xD8\xB8\x90\xBE\xEC\x72\x28\x13" + "\xAC\x7B\xF1\xD0\x7F\x7A\x28\x50" + "\xB7\x99\x65\x8A\xC9\xC6\x21\x34" + "\x7F\x67\x9D\xB7\x2C\xCC\xF5\x17" + "\x2B\x89\xAC\xB0\xD7\x1E\x47\xB0" + "\x61\xAF\xD4\x63\x6D\xB8\x2D\x20", + .ilen = 496, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A" + "\xF1\x65\xFC\x93\x07\x9E\x35\xCC" + "\x40\xD7\x6E\x05\x79\x10\xA7\x1B" + "\xB2\x49\xE0\x54\xEB\x82\x19\x8D" + "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF" + "\x96\x0A\xA1\x38\xCF\x43\xDA\x71" + "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3" + "\x57\xEE\x85\x1C\x90\x27\xBE\x32" + "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4" + "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16" + "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88" + "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA" + "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49" + "\xE0\x77\x0E\x82\x19\xB0\x24\xBB" + "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D" + "\xC4\x38\xCF\x66\xFD\x71\x08\x9F" + "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11" + "\x85\x1C\xB3\x27\xBE\x55\xEC\x60" + "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2" + "\x69\x00\x74\x0B\xA2\x16\xAD\x44" + "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6" + "\x2A\xC1\x58\xEF\x63\xFA\x91\x05" + "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77" + "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9" + "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B" + "\xF2\x66\xFD\x94\x08\x9F\x36\xCD" + "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C" + "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E" + "\x25\xBC\x30\xC7\x5E\xF5\x69\x00" + "\x97\x0B\xA2\x39\xD0\x44\xDB\x72" + "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4" + "\x58\xEF\x86\x1D\x91\x28\xBF\x33" + "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5" + "\x3C\xD3\x47\xDE\x75\x0C\x80\x17" + "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89" + "\x20\x94\x2B\xC2\x36\xCD\x64\xFB" + "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A" + "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC" + "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E" + "\xC5\x39\xD0\x67\xFE\x72\x09\xA0" + "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12" + "\x86\x1D\xB4\x28\xBF\x56\xED\x61" + "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3" + "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" + "\xDC\x50\xE7\x7E\x15\x89\x20\xB7", + .rlen = 496, }, }; @@ -15738,6 +16544,140 @@ static struct cipher_testvec camellia_ctr_enc_tv_template[] = { .also_non_np = 1, .np = 2, .tap = { 51 - 16, 16 }, + }, { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF" + "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFD", + .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A" + "\xF1\x65\xFC\x93\x07\x9E\x35\xCC" + "\x40\xD7\x6E\x05\x79\x10\xA7\x1B" + "\xB2\x49\xE0\x54\xEB\x82\x19\x8D" + "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF" + "\x96\x0A\xA1\x38\xCF\x43\xDA\x71" + "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3" + "\x57\xEE\x85\x1C\x90\x27\xBE\x32" + "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4" + "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16" + "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88" + "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA" + "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49" + "\xE0\x77\x0E\x82\x19\xB0\x24\xBB" + "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D" + "\xC4\x38\xCF\x66\xFD\x71\x08\x9F" + "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11" + "\x85\x1C\xB3\x27\xBE\x55\xEC\x60" + "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2" + "\x69\x00\x74\x0B\xA2\x16\xAD\x44" + "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6" + "\x2A\xC1\x58\xEF\x63\xFA\x91\x05" + "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77" + "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9" + "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B" + "\xF2\x66\xFD\x94\x08\x9F\x36\xCD" + "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C" + "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E" + "\x25\xBC\x30\xC7\x5E\xF5\x69\x00" + "\x97\x0B\xA2\x39\xD0\x44\xDB\x72" + "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4" + "\x58\xEF\x86\x1D\x91\x28\xBF\x33" + "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5" + "\x3C\xD3\x47\xDE\x75\x0C\x80\x17" + "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89" + "\x20\x94\x2B\xC2\x36\xCD\x64\xFB" + "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A" + "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC" + "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E" + "\xC5\x39\xD0\x67\xFE\x72\x09\xA0" + "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12" + "\x86\x1D\xB4\x28\xBF\x56\xED\x61" + "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3" + "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" + "\xDC\x50\xE7\x7E\x15\x89\x20\xB7", + .ilen = 496, + .result = "\x85\x79\x6C\x8B\x2B\x6D\x14\xF9" + "\xA6\x83\xB6\x80\x5B\x3A\xF3\x7E" + "\x30\x29\xEB\x1F\xDC\x19\x5F\xEB" + "\xF7\xC4\x27\x04\x51\x87\xD7\x6F" + "\xB8\x4E\x07\xFB\xAC\x3B\x08\xB4" + "\x4D\xCB\xE8\xE1\x71\x7D\x4F\x48" + "\xCD\x81\x64\xA5\xC4\x07\x1A\x9A" + "\x4B\x62\x90\x0E\xC8\xB3\x2B\x6B" + "\x8F\x9C\x6E\x72\x4B\xBA\xEF\x07" + "\x2C\x56\x07\x5E\x37\x30\x60\xA9" + "\xE3\xEF\xD6\x69\xE1\xA1\x77\x64" + "\x93\x75\x7A\xB7\x7A\x3B\xE9\x43" + "\x23\x35\x95\x91\x80\x8A\xC7\xCF" + "\xC3\xD5\xBF\xE7\xFE\x4C\x06\x6B" + "\x05\x19\x48\xE2\x62\xBA\x4F\xF2" + "\xFB\xEE\xE4\xCB\x79\x9D\xA3\x10" + "\x1D\x29\x8C\x1D\x7A\x88\x5A\xDD" + "\x4E\xB6\x18\xAA\xCD\xE6\x33\x96" + "\xD9\x0F\x90\x5A\x78\x76\x4D\x77" + "\x3C\x20\x89\x3B\xA3\xF9\x07\xFD" + "\xE4\xE8\x20\x2D\x15\x0A\x63\x49" + "\xF5\x4F\x89\xD8\xDE\xA1\x28\x78" + "\x28\x07\x09\x1B\x03\x94\x1D\x4B" + "\x82\x28\x1E\x1D\x95\xBA\xAC\x85" + "\x71\x6E\x3C\x18\x4B\x77\x74\x79" + "\xBF\x67\x0A\x53\x3C\x94\xD9\x60" + "\xE9\x6D\x40\x34\xA0\x2A\x53\x5D" + "\x27\xD5\x47\xF9\xC3\x4B\x27\x29" + "\xE4\x76\x9C\x3F\xA7\x1C\x87\xFC" + "\x6E\x0F\xCF\x9B\x60\xF0\xF0\x8B" + "\x70\x1C\x84\x81\x72\x4D\xB4\x98" + "\x23\x62\xE7\x6A\x2B\xFC\xA5\xB2" + "\xFF\xF5\x71\x07\xCD\x90\x23\x13" + "\x19\xD7\x79\x36\x6C\x9D\x55\x8B" + "\x93\x78\x86\x05\x69\x46\xD0\xC5" + "\x39\x09\xEB\x79\xEF\xFA\x9F\xAE" + "\xF3\xD5\x44\xC3\xFD\x86\xD2\x7C" + "\x83\x4B\xD8\x75\x9C\x18\x04\x7B" + "\x73\xAD\x72\xA4\xF6\xAB\xCF\x4B" + "\xCC\x01\x45\x90\xA6\x43\x05\x0C" + "\x6C\x4F\x62\x77\x57\x97\x9F\xEE" + "\x75\xA7\x3C\x38\xD1\x0F\x3D\x0E" + "\x2C\x43\x98\xFB\x13\x65\x73\xE4" + "\x3C\x1E\xD6\x90\x08\xF7\xE0\x99" + "\x3B\xF1\x9D\x6C\x48\xA9\x0E\x32" + "\x17\xC2\xCC\x20\xA1\x19\x26\xAA" + "\xE0\x75\x2F\xFB\x54\x66\x0A\xDF" + "\xB5\xF2\x1F\xC1\x34\x3C\x30\x56" + "\xE8\xDC\xF7\x92\x6B\xBF\x17\x24" + "\xEC\x94\xB5\x3B\xD6\xCE\xA2\x54" + "\x10\x7F\x50\xDE\x69\x77\xD5\x37" + "\xFE\x9C\x10\x83\xC5\xEB\xC9\x53" + "\xB7\xF3\xC4\x20\xAF\x0A\x7E\x57" + "\x3A\xE6\x75\xFE\x89\x00\x6E\x48" + "\xFB\x99\x17\x2C\xF6\x64\x40\x95" + "\x5E\xDC\x7A\xA6\x70\xC7\xF4\xDD" + "\x52\x05\x24\x34\xF9\x0E\xC8\x64" + "\x6D\xE2\xD8\x80\x53\x31\x4C\xFE" + "\xB4\x3A\x5F\x19\xCF\x42\x1B\x22" + "\x0B\x2D\x7B\xF1\xC5\x43\xF7\x5E" + "\x12\xA8\x01\x64\x16\x0B\x26\x5A" + "\x0C\x95\x0F\x40\xC5\x5A\x06\x7C", + .rlen = 496, }, }; @@ -15792,8 +16732,141 @@ static struct cipher_testvec camellia_ctr_dec_tv_template[] = { .also_non_np = 1, .np = 2, .tap = { 51 - 16, 16 }, + }, { /* Generated with Crypto++ */ + .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9" + "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A" + "\x27\x04\xE1\x27\x04\xE1\xBE\x9B" + "\x78\xBE\x9B\x78\x55\x32\x0F\x55", + .klen = 32, + .iv = "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF" + "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFD", + .input = "\x85\x79\x6C\x8B\x2B\x6D\x14\xF9" + "\xA6\x83\xB6\x80\x5B\x3A\xF3\x7E" + "\x30\x29\xEB\x1F\xDC\x19\x5F\xEB" + "\xF7\xC4\x27\x04\x51\x87\xD7\x6F" + "\xB8\x4E\x07\xFB\xAC\x3B\x08\xB4" + "\x4D\xCB\xE8\xE1\x71\x7D\x4F\x48" + "\xCD\x81\x64\xA5\xC4\x07\x1A\x9A" + "\x4B\x62\x90\x0E\xC8\xB3\x2B\x6B" + "\x8F\x9C\x6E\x72\x4B\xBA\xEF\x07" + "\x2C\x56\x07\x5E\x37\x30\x60\xA9" + "\xE3\xEF\xD6\x69\xE1\xA1\x77\x64" + "\x93\x75\x7A\xB7\x7A\x3B\xE9\x43" + "\x23\x35\x95\x91\x80\x8A\xC7\xCF" + "\xC3\xD5\xBF\xE7\xFE\x4C\x06\x6B" + "\x05\x19\x48\xE2\x62\xBA\x4F\xF2" + "\xFB\xEE\xE4\xCB\x79\x9D\xA3\x10" + "\x1D\x29\x8C\x1D\x7A\x88\x5A\xDD" + "\x4E\xB6\x18\xAA\xCD\xE6\x33\x96" + "\xD9\x0F\x90\x5A\x78\x76\x4D\x77" + "\x3C\x20\x89\x3B\xA3\xF9\x07\xFD" + "\xE4\xE8\x20\x2D\x15\x0A\x63\x49" + "\xF5\x4F\x89\xD8\xDE\xA1\x28\x78" + "\x28\x07\x09\x1B\x03\x94\x1D\x4B" + "\x82\x28\x1E\x1D\x95\xBA\xAC\x85" + "\x71\x6E\x3C\x18\x4B\x77\x74\x79" + "\xBF\x67\x0A\x53\x3C\x94\xD9\x60" + "\xE9\x6D\x40\x34\xA0\x2A\x53\x5D" + "\x27\xD5\x47\xF9\xC3\x4B\x27\x29" + "\xE4\x76\x9C\x3F\xA7\x1C\x87\xFC" + "\x6E\x0F\xCF\x9B\x60\xF0\xF0\x8B" + "\x70\x1C\x84\x81\x72\x4D\xB4\x98" + "\x23\x62\xE7\x6A\x2B\xFC\xA5\xB2" + "\xFF\xF5\x71\x07\xCD\x90\x23\x13" + "\x19\xD7\x79\x36\x6C\x9D\x55\x8B" + "\x93\x78\x86\x05\x69\x46\xD0\xC5" + "\x39\x09\xEB\x79\xEF\xFA\x9F\xAE" + "\xF3\xD5\x44\xC3\xFD\x86\xD2\x7C" + "\x83\x4B\xD8\x75\x9C\x18\x04\x7B" + "\x73\xAD\x72\xA4\xF6\xAB\xCF\x4B" + "\xCC\x01\x45\x90\xA6\x43\x05\x0C" + "\x6C\x4F\x62\x77\x57\x97\x9F\xEE" + "\x75\xA7\x3C\x38\xD1\x0F\x3D\x0E" + "\x2C\x43\x98\xFB\x13\x65\x73\xE4" + "\x3C\x1E\xD6\x90\x08\xF7\xE0\x99" + "\x3B\xF1\x9D\x6C\x48\xA9\x0E\x32" + "\x17\xC2\xCC\x20\xA1\x19\x26\xAA" + "\xE0\x75\x2F\xFB\x54\x66\x0A\xDF" + "\xB5\xF2\x1F\xC1\x34\x3C\x30\x56" + "\xE8\xDC\xF7\x92\x6B\xBF\x17\x24" + "\xEC\x94\xB5\x3B\xD6\xCE\xA2\x54" + "\x10\x7F\x50\xDE\x69\x77\xD5\x37" + "\xFE\x9C\x10\x83\xC5\xEB\xC9\x53" + "\xB7\xF3\xC4\x20\xAF\x0A\x7E\x57" + "\x3A\xE6\x75\xFE\x89\x00\x6E\x48" + "\xFB\x99\x17\x2C\xF6\x64\x40\x95" + "\x5E\xDC\x7A\xA6\x70\xC7\xF4\xDD" + "\x52\x05\x24\x34\xF9\x0E\xC8\x64" + "\x6D\xE2\xD8\x80\x53\x31\x4C\xFE" + "\xB4\x3A\x5F\x19\xCF\x42\x1B\x22" + "\x0B\x2D\x7B\xF1\xC5\x43\xF7\x5E" + "\x12\xA8\x01\x64\x16\x0B\x26\x5A" + "\x0C\x95\x0F\x40\xC5\x5A\x06\x7C", + .ilen = 496, + .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31" + "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3" + "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15" + "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87" + "\x1E\x92\x29\xC0\x34\xCB\x62\xF9" + "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48" + "\xDF\x76\x0D\x81\x18\xAF\x23\xBA" + "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C" + "\xC3\x37\xCE\x65\xFC\x70\x07\x9E" + "\x12\xA9\x40\xD7\x4B\xE2\x79\x10" + "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F" + "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1" + "\x68\xFF\x73\x0A\xA1\x15\xAC\x43" + "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5" + "\x29\xC0\x57\xEE\x62\xF9\x90\x04" + "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76" + "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8" + "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A" + "\xF1\x65\xFC\x93\x07\x9E\x35\xCC" + "\x40\xD7\x6E\x05\x79\x10\xA7\x1B" + "\xB2\x49\xE0\x54\xEB\x82\x19\x8D" + "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF" + "\x96\x0A\xA1\x38\xCF\x43\xDA\x71" + "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3" + "\x57\xEE\x85\x1C\x90\x27\xBE\x32" + "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4" + "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16" + "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88" + "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA" + "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49" + "\xE0\x77\x0E\x82\x19\xB0\x24\xBB" + "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D" + "\xC4\x38\xCF\x66\xFD\x71\x08\x9F" + "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11" + "\x85\x1C\xB3\x27\xBE\x55\xEC\x60" + "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2" + "\x69\x00\x74\x0B\xA2\x16\xAD\x44" + "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6" + "\x2A\xC1\x58\xEF\x63\xFA\x91\x05" + "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77" + "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9" + "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B" + "\xF2\x66\xFD\x94\x08\x9F\x36\xCD" + "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C" + "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E" + "\x25\xBC\x30\xC7\x5E\xF5\x69\x00" + "\x97\x0B\xA2\x39\xD0\x44\xDB\x72" + "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4" + "\x58\xEF\x86\x1D\x91\x28\xBF\x33" + "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5" + "\x3C\xD3\x47\xDE\x75\x0C\x80\x17" + "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89" + "\x20\x94\x2B\xC2\x36\xCD\x64\xFB" + "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A" + "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC" + "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E" + "\xC5\x39\xD0\x67\xFE\x72\x09\xA0" + "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12" + "\x86\x1D\xB4\x28\xBF\x56\xED\x61" + "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3" + "\x6A\x01\x75\x0C\xA3\x17\xAE\x45" + "\xDC\x50\xE7\x7E\x15\x89\x20\xB7", + .rlen = 496, }, - }; static struct cipher_testvec camellia_lrw_enc_tv_template[] = { -- cgit v1.2.3 From 08d6af8c160b6bd9b21a3177e2b1bebc72a21041 Mon Sep 17 00:00:00 2001 From: Jussi Kivilinna Date: Fri, 21 Sep 2012 10:26:47 +0300 Subject: crypto: testmgr - make test_skcipher also test 'dst != src' code paths Currrently test_skcipher uses same buffer for destination and source. However in any places, 'dst != src' take different path than 'dst == src' case. Therefore make test_skcipher also run tests with destination buffer being different than source buffer. Signed-off-by: Jussi Kivilinna Acked-by: David S. Miller Signed-off-by: Herbert Xu --- crypto/testmgr.c | 107 +++++++++++++++++++++++++++++++++++++++---------------- 1 file changed, 76 insertions(+), 31 deletions(-) (limited to 'crypto') diff --git a/crypto/testmgr.c b/crypto/testmgr.c index 8183777a49b6..00f54d54fc44 100644 --- a/crypto/testmgr.c +++ b/crypto/testmgr.c @@ -761,8 +761,9 @@ out_nobuf: return ret; } -static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, - struct cipher_testvec *template, unsigned int tcount) +static int __test_skcipher(struct crypto_ablkcipher *tfm, int enc, + struct cipher_testvec *template, unsigned int tcount, + const bool diff_dst) { const char *algo = crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm)); @@ -770,16 +771,26 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, char *q; struct ablkcipher_request *req; struct scatterlist sg[8]; - const char *e; + struct scatterlist sgout[8]; + const char *e, *d; struct tcrypt_result result; void *data; char iv[MAX_IVLEN]; char *xbuf[XBUFSIZE]; + char *xoutbuf[XBUFSIZE]; int ret = -ENOMEM; if (testmgr_alloc_buf(xbuf)) goto out_nobuf; + if (diff_dst && testmgr_alloc_buf(xoutbuf)) + goto out_nooutbuf; + + if (diff_dst) + d = "-ddst"; + else + d = ""; + if (enc == ENCRYPT) e = "encryption"; else @@ -789,8 +800,8 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, req = ablkcipher_request_alloc(tfm, GFP_KERNEL); if (!req) { - printk(KERN_ERR "alg: skcipher: Failed to allocate request " - "for %s\n", algo); + pr_err("alg: skcipher%s: Failed to allocate request for %s\n", + d, algo); goto out; } @@ -822,16 +833,21 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, ret = crypto_ablkcipher_setkey(tfm, template[i].key, template[i].klen); if (!ret == template[i].fail) { - printk(KERN_ERR "alg: skcipher: setkey failed " - "on test %d for %s: flags=%x\n", j, - algo, crypto_ablkcipher_get_flags(tfm)); + pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n", + d, j, algo, + crypto_ablkcipher_get_flags(tfm)); goto out; } else if (ret) continue; sg_init_one(&sg[0], data, template[i].ilen); + if (diff_dst) { + data = xoutbuf[0]; + sg_init_one(&sgout[0], data, template[i].ilen); + } - ablkcipher_request_set_crypt(req, sg, sg, + ablkcipher_request_set_crypt(req, sg, + (diff_dst) ? sgout : sg, template[i].ilen, iv); ret = enc ? crypto_ablkcipher_encrypt(req) : @@ -850,16 +866,15 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, } /* fall through */ default: - printk(KERN_ERR "alg: skcipher: %s failed on " - "test %d for %s: ret=%d\n", e, j, algo, - -ret); + pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n", + d, e, j, algo, -ret); goto out; } q = data; if (memcmp(q, template[i].result, template[i].rlen)) { - printk(KERN_ERR "alg: skcipher: Test %d " - "failed on %s for %s\n", j, e, algo); + pr_err("alg: skcipher%s: Test %d failed on %s for %s\n", + d, j, e, algo); hexdump(q, template[i].rlen); ret = -EINVAL; goto out; @@ -886,9 +901,8 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, ret = crypto_ablkcipher_setkey(tfm, template[i].key, template[i].klen); if (!ret == template[i].fail) { - printk(KERN_ERR "alg: skcipher: setkey failed " - "on chunk test %d for %s: flags=%x\n", - j, algo, + pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n", + d, j, algo, crypto_ablkcipher_get_flags(tfm)); goto out; } else if (ret) @@ -897,6 +911,8 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, temp = 0; ret = -EINVAL; sg_init_table(sg, template[i].np); + if (diff_dst) + sg_init_table(sgout, template[i].np); for (k = 0; k < template[i].np; k++) { if (WARN_ON(offset_in_page(IDX[k]) + template[i].tap[k] > PAGE_SIZE)) @@ -913,11 +929,24 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, q[template[i].tap[k]] = 0; sg_set_buf(&sg[k], q, template[i].tap[k]); + if (diff_dst) { + q = xoutbuf[IDX[k] >> PAGE_SHIFT] + + offset_in_page(IDX[k]); + + sg_set_buf(&sgout[k], q, + template[i].tap[k]); + + memset(q, 0, template[i].tap[k]); + if (offset_in_page(q) + + template[i].tap[k] < PAGE_SIZE) + q[template[i].tap[k]] = 0; + } temp += template[i].tap[k]; } - ablkcipher_request_set_crypt(req, sg, sg, + ablkcipher_request_set_crypt(req, sg, + (diff_dst) ? sgout : sg, template[i].ilen, iv); ret = enc ? @@ -937,23 +966,25 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, } /* fall through */ default: - printk(KERN_ERR "alg: skcipher: %s failed on " - "chunk test %d for %s: ret=%d\n", e, j, - algo, -ret); + pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n", + d, e, j, algo, -ret); goto out; } temp = 0; ret = -EINVAL; for (k = 0; k < template[i].np; k++) { - q = xbuf[IDX[k] >> PAGE_SHIFT] + - offset_in_page(IDX[k]); + if (diff_dst) + q = xoutbuf[IDX[k] >> PAGE_SHIFT] + + offset_in_page(IDX[k]); + else + q = xbuf[IDX[k] >> PAGE_SHIFT] + + offset_in_page(IDX[k]); if (memcmp(q, template[i].result + temp, template[i].tap[k])) { - printk(KERN_ERR "alg: skcipher: Chunk " - "test %d failed on %s at page " - "%u for %s\n", j, e, k, algo); + pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n", + d, j, e, k, algo); hexdump(q, template[i].tap[k]); goto out; } @@ -962,11 +993,8 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, for (n = 0; offset_in_page(q + n) && q[n]; n++) ; if (n) { - printk(KERN_ERR "alg: skcipher: " - "Result buffer corruption in " - "chunk test %d on %s at page " - "%u for %s: %u bytes:\n", j, e, - k, algo, n); + pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n", + d, j, e, k, algo, n); hexdump(q, n); goto out; } @@ -979,11 +1007,28 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, out: ablkcipher_request_free(req); + if (diff_dst) + testmgr_free_buf(xoutbuf); +out_nooutbuf: testmgr_free_buf(xbuf); out_nobuf: return ret; } +static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, + struct cipher_testvec *template, unsigned int tcount) +{ + int ret; + + /* test 'dst == src' case */ + ret = __test_skcipher(tfm, enc, template, tcount, false); + if (ret) + return ret; + + /* test 'dst != src' case */ + return __test_skcipher(tfm, enc, template, tcount, true); +} + static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate, struct comp_testvec *dtemplate, int ctcount, int dtcount) { -- cgit v1.2.3 From d8a32ac25698cd60b02bed2100379803c7f964e3 Mon Sep 17 00:00:00 2001 From: Jussi Kivilinna Date: Fri, 21 Sep 2012 10:26:52 +0300 Subject: crypto: testmgr - make test_aead also test 'dst != src' code paths Currrently test_aead uses same buffer for destination and source. However in any places, 'dst != src' take different path than 'dst == src' case. Therefore make test_aead also run tests with destination buffer being different than source buffer. Signed-off-by: Jussi Kivilinna Acked-by: David S. Miller Signed-off-by: Herbert Xu --- crypto/testmgr.c | 153 ++++++++++++++++++++++++++++++++++++++----------------- 1 file changed, 105 insertions(+), 48 deletions(-) (limited to 'crypto') diff --git a/crypto/testmgr.c b/crypto/testmgr.c index 00f54d54fc44..941d75cd1f7c 100644 --- a/crypto/testmgr.c +++ b/crypto/testmgr.c @@ -358,8 +358,9 @@ out_nobuf: return ret; } -static int test_aead(struct crypto_aead *tfm, int enc, - struct aead_testvec *template, unsigned int tcount) +static int __test_aead(struct crypto_aead *tfm, int enc, + struct aead_testvec *template, unsigned int tcount, + const bool diff_dst) { const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)); unsigned int i, j, k, n, temp; @@ -367,15 +368,18 @@ static int test_aead(struct crypto_aead *tfm, int enc, char *q; char *key; struct aead_request *req; - struct scatterlist sg[8]; - struct scatterlist asg[8]; - const char *e; + struct scatterlist *sg; + struct scatterlist *asg; + struct scatterlist *sgout; + const char *e, *d; struct tcrypt_result result; unsigned int authsize; void *input; + void *output; void *assoc; char iv[MAX_IVLEN]; char *xbuf[XBUFSIZE]; + char *xoutbuf[XBUFSIZE]; char *axbuf[XBUFSIZE]; if (testmgr_alloc_buf(xbuf)) @@ -383,6 +387,21 @@ static int test_aead(struct crypto_aead *tfm, int enc, if (testmgr_alloc_buf(axbuf)) goto out_noaxbuf; + if (diff_dst && testmgr_alloc_buf(xoutbuf)) + goto out_nooutbuf; + + /* avoid "the frame size is larger than 1024 bytes" compiler warning */ + sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 3 : 2), GFP_KERNEL); + if (!sg) + goto out_nosg; + asg = &sg[8]; + sgout = &asg[8]; + + if (diff_dst) + d = "-ddst"; + else + d = ""; + if (enc == ENCRYPT) e = "encryption"; else @@ -392,8 +411,8 @@ static int test_aead(struct crypto_aead *tfm, int enc, req = aead_request_alloc(tfm, GFP_KERNEL); if (!req) { - printk(KERN_ERR "alg: aead: Failed to allocate request for " - "%s\n", algo); + pr_err("alg: aead%s: Failed to allocate request for %s\n", + d, algo); goto out; } @@ -432,9 +451,8 @@ static int test_aead(struct crypto_aead *tfm, int enc, ret = crypto_aead_setkey(tfm, key, template[i].klen); if (!ret == template[i].fail) { - printk(KERN_ERR "alg: aead: setkey failed on " - "test %d for %s: flags=%x\n", j, algo, - crypto_aead_get_flags(tfm)); + pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n", + d, j, algo, crypto_aead_get_flags(tfm)); goto out; } else if (ret) continue; @@ -442,18 +460,26 @@ static int test_aead(struct crypto_aead *tfm, int enc, authsize = abs(template[i].rlen - template[i].ilen); ret = crypto_aead_setauthsize(tfm, authsize); if (ret) { - printk(KERN_ERR "alg: aead: Failed to set " - "authsize to %u on test %d for %s\n", - authsize, j, algo); + pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n", + d, authsize, j, algo); goto out; } sg_init_one(&sg[0], input, template[i].ilen + (enc ? authsize : 0)); + if (diff_dst) { + output = xoutbuf[0]; + sg_init_one(&sgout[0], output, + template[i].ilen + + (enc ? authsize : 0)); + } else { + output = input; + } + sg_init_one(&asg[0], assoc, template[i].alen); - aead_request_set_crypt(req, sg, sg, + aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, template[i].ilen, iv); aead_request_set_assoc(req, asg, template[i].alen); @@ -466,10 +492,8 @@ static int test_aead(struct crypto_aead *tfm, int enc, case 0: if (template[i].novrfy) { /* verification was supposed to fail */ - printk(KERN_ERR "alg: aead: %s failed " - "on test %d for %s: ret was 0, " - "expected -EBADMSG\n", - e, j, algo); + pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n", + d, e, j, algo); /* so really, we got a bad message */ ret = -EBADMSG; goto out; @@ -489,15 +513,15 @@ static int test_aead(struct crypto_aead *tfm, int enc, continue; /* fall through */ default: - printk(KERN_ERR "alg: aead: %s failed on test " - "%d for %s: ret=%d\n", e, j, algo, -ret); + pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n", + d, e, j, algo, -ret); goto out; } - q = input; + q = output; if (memcmp(q, template[i].result, template[i].rlen)) { - printk(KERN_ERR "alg: aead: Test %d failed on " - "%s for %s\n", j, e, algo); + pr_err("alg: aead%s: Test %d failed on %s for %s\n", + d, j, e, algo); hexdump(q, template[i].rlen); ret = -EINVAL; goto out; @@ -522,9 +546,8 @@ static int test_aead(struct crypto_aead *tfm, int enc, ret = crypto_aead_setkey(tfm, key, template[i].klen); if (!ret == template[i].fail) { - printk(KERN_ERR "alg: aead: setkey failed on " - "chunk test %d for %s: flags=%x\n", j, - algo, crypto_aead_get_flags(tfm)); + pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n", + d, j, algo, crypto_aead_get_flags(tfm)); goto out; } else if (ret) continue; @@ -533,6 +556,8 @@ static int test_aead(struct crypto_aead *tfm, int enc, ret = -EINVAL; sg_init_table(sg, template[i].np); + if (diff_dst) + sg_init_table(sgout, template[i].np); for (k = 0, temp = 0; k < template[i].np; k++) { if (WARN_ON(offset_in_page(IDX[k]) + template[i].tap[k] > PAGE_SIZE)) @@ -551,14 +576,26 @@ static int test_aead(struct crypto_aead *tfm, int enc, q[n] = 0; sg_set_buf(&sg[k], q, template[i].tap[k]); + + if (diff_dst) { + q = xoutbuf[IDX[k] >> PAGE_SHIFT] + + offset_in_page(IDX[k]); + + memset(q, 0, template[i].tap[k]); + if (offset_in_page(q) + n < PAGE_SIZE) + q[n] = 0; + + sg_set_buf(&sgout[k], q, + template[i].tap[k]); + } + temp += template[i].tap[k]; } ret = crypto_aead_setauthsize(tfm, authsize); if (ret) { - printk(KERN_ERR "alg: aead: Failed to set " - "authsize to %u on chunk test %d for " - "%s\n", authsize, j, algo); + pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n", + d, authsize, j, algo); goto out; } @@ -571,6 +608,9 @@ static int test_aead(struct crypto_aead *tfm, int enc, } sg[k - 1].length += authsize; + + if (diff_dst) + sgout[k - 1].length += authsize; } sg_init_table(asg, template[i].anp); @@ -588,7 +628,7 @@ static int test_aead(struct crypto_aead *tfm, int enc, temp += template[i].atap[k]; } - aead_request_set_crypt(req, sg, sg, + aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, template[i].ilen, iv); @@ -602,10 +642,8 @@ static int test_aead(struct crypto_aead *tfm, int enc, case 0: if (template[i].novrfy) { /* verification was supposed to fail */ - printk(KERN_ERR "alg: aead: %s failed " - "on chunk test %d for %s: ret " - "was 0, expected -EBADMSG\n", - e, j, algo); + pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n", + d, e, j, algo); /* so really, we got a bad message */ ret = -EBADMSG; goto out; @@ -625,32 +663,35 @@ static int test_aead(struct crypto_aead *tfm, int enc, continue; /* fall through */ default: - printk(KERN_ERR "alg: aead: %s failed on " - "chunk test %d for %s: ret=%d\n", e, j, - algo, -ret); + pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n", + d, e, j, algo, -ret); goto out; } ret = -EINVAL; for (k = 0, temp = 0; k < template[i].np; k++) { - q = xbuf[IDX[k] >> PAGE_SHIFT] + - offset_in_page(IDX[k]); + if (diff_dst) + q = xoutbuf[IDX[k] >> PAGE_SHIFT] + + offset_in_page(IDX[k]); + else + q = xbuf[IDX[k] >> PAGE_SHIFT] + + offset_in_page(IDX[k]); n = template[i].tap[k]; if (k == template[i].np - 1) n += enc ? authsize : -authsize; if (memcmp(q, template[i].result + temp, n)) { - printk(KERN_ERR "alg: aead: Chunk " - "test %d failed on %s at page " - "%u for %s\n", j, e, k, algo); + pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n", + d, j, e, k, algo); hexdump(q, n); goto out; } q += n; if (k == template[i].np - 1 && !enc) { - if (memcmp(q, template[i].input + + if (!diff_dst && + memcmp(q, template[i].input + temp + n, authsize)) n = authsize; else @@ -661,11 +702,8 @@ static int test_aead(struct crypto_aead *tfm, int enc, ; } if (n) { - printk(KERN_ERR "alg: aead: Result " - "buffer corruption in chunk " - "test %d on %s at page %u for " - "%s: %u bytes:\n", j, e, k, - algo, n); + pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n", + d, j, e, k, algo, n); hexdump(q, n); goto out; } @@ -679,6 +717,11 @@ static int test_aead(struct crypto_aead *tfm, int enc, out: aead_request_free(req); + kfree(sg); +out_nosg: + if (diff_dst) + testmgr_free_buf(xoutbuf); +out_nooutbuf: testmgr_free_buf(axbuf); out_noaxbuf: testmgr_free_buf(xbuf); @@ -686,6 +729,20 @@ out_noxbuf: return ret; } +static int test_aead(struct crypto_aead *tfm, int enc, + struct aead_testvec *template, unsigned int tcount) +{ + int ret; + + /* test 'dst == src' case */ + ret = __test_aead(tfm, enc, template, tcount, false); + if (ret) + return ret; + + /* test 'dst != src' case */ + return __test_aead(tfm, enc, template, tcount, true); +} + static int test_cipher(struct crypto_cipher *tfm, int enc, struct cipher_testvec *template, unsigned int tcount) { -- cgit v1.2.3 From 54216bbd76aa394c93a9e92c0a2e3b314391b134 Mon Sep 17 00:00:00 2001 From: Jussi Kivilinna Date: Fri, 21 Sep 2012 10:27:10 +0300 Subject: crypto: tcrypt - add missing tests for camellia and ghash Add missing tests for ctr(camellia), lrw(camellia), xts(camellia) and ghash, as these have test vectors available. Signed-off-by: Jussi Kivilinna Acked-by: David S. Miller Signed-off-by: Herbert Xu --- crypto/tcrypt.c | 7 +++++++ 1 file changed, 7 insertions(+) (limited to 'crypto') diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c index 1ac77b720b2d..e87fa60f5831 100644 --- a/crypto/tcrypt.c +++ b/crypto/tcrypt.c @@ -1112,6 +1112,9 @@ static int do_test(int m) case 32: ret += tcrypt_test("ecb(camellia)"); ret += tcrypt_test("cbc(camellia)"); + ret += tcrypt_test("ctr(camellia)"); + ret += tcrypt_test("lrw(camellia)"); + ret += tcrypt_test("xts(camellia)"); break; case 33: ret += tcrypt_test("sha224"); @@ -1165,6 +1168,10 @@ static int do_test(int m) ret += tcrypt_test("rfc4309(ccm(aes))"); break; + case 46: + ret += tcrypt_test("ghash"); + break; + case 100: ret += tcrypt_test("hmac(md5)"); break; -- cgit v1.2.3