summaryrefslogtreecommitdiff
path: root/crypto
diff options
context:
space:
mode:
authorEric Biggers <ebiggers@google.com>2018-02-14 21:42:20 +0300
committerHerbert Xu <herbert@gondor.apana.org.au>2018-02-22 17:16:54 +0300
commitc8c36413ca8ccbf7a0afe71247fc4617ee2dfcfe (patch)
tree5d93fa8434763f697ddde4f90833f0ab563ea5f2 /crypto
parentda7a0ab5b4babbe5d7a46f852582be06a00a28f0 (diff)
downloadlinux-c8c36413ca8ccbf7a0afe71247fc4617ee2dfcfe.tar.xz
crypto: speck - export common helpers
Export the Speck constants and transform context and the ->setkey(), ->encrypt(), and ->decrypt() functions so that they can be reused by the ARM NEON implementation of Speck-XTS. The generic key expansion code will be reused because it is not performance-critical and is not vectorizable, while the generic encryption and decryption functions are needed as fallbacks and for the XTS tweak encryption. Signed-off-by: Eric Biggers <ebiggers@google.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto')
-rw-r--r--crypto/speck.c90
1 files changed, 49 insertions, 41 deletions
diff --git a/crypto/speck.c b/crypto/speck.c
index 4e80ad76bcd7..58aa9f7f91f7 100644
--- a/crypto/speck.c
+++ b/crypto/speck.c
@@ -24,6 +24,7 @@
*/
#include <asm/unaligned.h>
+#include <crypto/speck.h>
#include <linux/bitops.h>
#include <linux/crypto.h>
#include <linux/init.h>
@@ -31,22 +32,6 @@
/* Speck128 */
-#define SPECK128_BLOCK_SIZE 16
-
-#define SPECK128_128_KEY_SIZE 16
-#define SPECK128_128_NROUNDS 32
-
-#define SPECK128_192_KEY_SIZE 24
-#define SPECK128_192_NROUNDS 33
-
-#define SPECK128_256_KEY_SIZE 32
-#define SPECK128_256_NROUNDS 34
-
-struct speck128_tfm_ctx {
- u64 round_keys[SPECK128_256_NROUNDS];
- int nrounds;
-};
-
static __always_inline void speck128_round(u64 *x, u64 *y, u64 k)
{
*x = ror64(*x, 8);
@@ -65,9 +50,9 @@ static __always_inline void speck128_unround(u64 *x, u64 *y, u64 k)
*x = rol64(*x, 8);
}
-static void speck128_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
+void crypto_speck128_encrypt(const struct speck128_tfm_ctx *ctx,
+ u8 *out, const u8 *in)
{
- const struct speck128_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
u64 y = get_unaligned_le64(in);
u64 x = get_unaligned_le64(in + 8);
int i;
@@ -78,10 +63,16 @@ static void speck128_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
put_unaligned_le64(y, out);
put_unaligned_le64(x, out + 8);
}
+EXPORT_SYMBOL_GPL(crypto_speck128_encrypt);
-static void speck128_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
+static void speck128_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
+{
+ crypto_speck128_encrypt(crypto_tfm_ctx(tfm), out, in);
+}
+
+void crypto_speck128_decrypt(const struct speck128_tfm_ctx *ctx,
+ u8 *out, const u8 *in)
{
- const struct speck128_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
u64 y = get_unaligned_le64(in);
u64 x = get_unaligned_le64(in + 8);
int i;
@@ -92,11 +83,16 @@ static void speck128_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
put_unaligned_le64(y, out);
put_unaligned_le64(x, out + 8);
}
+EXPORT_SYMBOL_GPL(crypto_speck128_decrypt);
-static int speck128_setkey(struct crypto_tfm *tfm, const u8 *key,
+static void speck128_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
+{
+ crypto_speck128_decrypt(crypto_tfm_ctx(tfm), out, in);
+}
+
+int crypto_speck128_setkey(struct speck128_tfm_ctx *ctx, const u8 *key,
unsigned int keylen)
{
- struct speck128_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
u64 l[3];
u64 k;
int i;
@@ -138,21 +134,15 @@ static int speck128_setkey(struct crypto_tfm *tfm, const u8 *key,
return 0;
}
+EXPORT_SYMBOL_GPL(crypto_speck128_setkey);
-/* Speck64 */
-
-#define SPECK64_BLOCK_SIZE 8
-
-#define SPECK64_96_KEY_SIZE 12
-#define SPECK64_96_NROUNDS 26
-
-#define SPECK64_128_KEY_SIZE 16
-#define SPECK64_128_NROUNDS 27
+static int speck128_setkey(struct crypto_tfm *tfm, const u8 *key,
+ unsigned int keylen)
+{
+ return crypto_speck128_setkey(crypto_tfm_ctx(tfm), key, keylen);
+}
-struct speck64_tfm_ctx {
- u32 round_keys[SPECK64_128_NROUNDS];
- int nrounds;
-};
+/* Speck64 */
static __always_inline void speck64_round(u32 *x, u32 *y, u32 k)
{
@@ -172,9 +162,9 @@ static __always_inline void speck64_unround(u32 *x, u32 *y, u32 k)
*x = rol32(*x, 8);
}
-static void speck64_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
+void crypto_speck64_encrypt(const struct speck64_tfm_ctx *ctx,
+ u8 *out, const u8 *in)
{
- const struct speck64_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
u32 y = get_unaligned_le32(in);
u32 x = get_unaligned_le32(in + 4);
int i;
@@ -185,10 +175,16 @@ static void speck64_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
put_unaligned_le32(y, out);
put_unaligned_le32(x, out + 4);
}
+EXPORT_SYMBOL_GPL(crypto_speck64_encrypt);
-static void speck64_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
+static void speck64_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
+{
+ crypto_speck64_encrypt(crypto_tfm_ctx(tfm), out, in);
+}
+
+void crypto_speck64_decrypt(const struct speck64_tfm_ctx *ctx,
+ u8 *out, const u8 *in)
{
- const struct speck64_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
u32 y = get_unaligned_le32(in);
u32 x = get_unaligned_le32(in + 4);
int i;
@@ -199,11 +195,16 @@ static void speck64_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
put_unaligned_le32(y, out);
put_unaligned_le32(x, out + 4);
}
+EXPORT_SYMBOL_GPL(crypto_speck64_decrypt);
-static int speck64_setkey(struct crypto_tfm *tfm, const u8 *key,
+static void speck64_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
+{
+ crypto_speck64_decrypt(crypto_tfm_ctx(tfm), out, in);
+}
+
+int crypto_speck64_setkey(struct speck64_tfm_ctx *ctx, const u8 *key,
unsigned int keylen)
{
- struct speck64_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
u32 l[3];
u32 k;
int i;
@@ -236,6 +237,13 @@ static int speck64_setkey(struct crypto_tfm *tfm, const u8 *key,
return 0;
}
+EXPORT_SYMBOL_GPL(crypto_speck64_setkey);
+
+static int speck64_setkey(struct crypto_tfm *tfm, const u8 *key,
+ unsigned int keylen)
+{
+ return crypto_speck64_setkey(crypto_tfm_ctx(tfm), key, keylen);
+}
/* Algorithm definitions */