summaryrefslogtreecommitdiff
path: root/drivers/crypto/ccp/ccp-crypto-aes-cmac.c
diff options
context:
space:
mode:
authorHerbert Xu <herbert@gondor.apana.org.au>2022-12-02 12:20:49 +0300
committerHerbert Xu <herbert@gondor.apana.org.au>2022-12-09 13:45:00 +0300
commit99c6b20edfc031610240afca97ba9be5ec6f5750 (patch)
treed915c78e08364358e435944bb5b994ffa354616c /drivers/crypto/ccp/ccp-crypto-aes-cmac.c
parent1c64a7e1f931821acadf964c5ddb0dc41abf9e20 (diff)
downloadlinux-99c6b20edfc031610240afca97ba9be5ec6f5750.tar.xz
crypto: ccp - Set DMA alignment explicitly
This driver has been implicitly relying on kmalloc alignment to be sufficient for DMA. This may no longer be the case with upcoming arm64 changes. This patch changes it to explicitly request DMA alignment from the Crypto API. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'drivers/crypto/ccp/ccp-crypto-aes-cmac.c')
-rw-r--r--drivers/crypto/ccp/ccp-crypto-aes-cmac.c21
1 files changed, 11 insertions, 10 deletions
diff --git a/drivers/crypto/ccp/ccp-crypto-aes-cmac.c b/drivers/crypto/ccp/ccp-crypto-aes-cmac.c
index 11a305fa19e6..d8426bdf3190 100644
--- a/drivers/crypto/ccp/ccp-crypto-aes-cmac.c
+++ b/drivers/crypto/ccp/ccp-crypto-aes-cmac.c
@@ -25,7 +25,7 @@ static int ccp_aes_cmac_complete(struct crypto_async_request *async_req,
{
struct ahash_request *req = ahash_request_cast(async_req);
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
- struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx(req);
+ struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx_dma(req);
unsigned int digest_size = crypto_ahash_digestsize(tfm);
if (ret)
@@ -56,8 +56,8 @@ static int ccp_do_cmac_update(struct ahash_request *req, unsigned int nbytes,
unsigned int final)
{
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
- struct ccp_ctx *ctx = crypto_ahash_ctx(tfm);
- struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx(req);
+ struct ccp_ctx *ctx = crypto_ahash_ctx_dma(tfm);
+ struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx_dma(req);
struct scatterlist *sg, *cmac_key_sg = NULL;
unsigned int block_size =
crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
@@ -182,7 +182,7 @@ e_free:
static int ccp_aes_cmac_init(struct ahash_request *req)
{
- struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx(req);
+ struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx_dma(req);
memset(rctx, 0, sizeof(*rctx));
@@ -219,7 +219,7 @@ static int ccp_aes_cmac_digest(struct ahash_request *req)
static int ccp_aes_cmac_export(struct ahash_request *req, void *out)
{
- struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx(req);
+ struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx_dma(req);
struct ccp_aes_cmac_exp_ctx state;
/* Don't let anything leak to 'out' */
@@ -238,7 +238,7 @@ static int ccp_aes_cmac_export(struct ahash_request *req, void *out)
static int ccp_aes_cmac_import(struct ahash_request *req, const void *in)
{
- struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx(req);
+ struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx_dma(req);
struct ccp_aes_cmac_exp_ctx state;
/* 'in' may not be aligned so memcpy to local variable */
@@ -256,7 +256,7 @@ static int ccp_aes_cmac_import(struct ahash_request *req, const void *in)
static int ccp_aes_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
unsigned int key_len)
{
- struct ccp_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
+ struct ccp_ctx *ctx = crypto_ahash_ctx_dma(tfm);
struct ccp_crypto_ahash_alg *alg =
ccp_crypto_ahash_alg(crypto_ahash_tfm(tfm));
u64 k0_hi, k0_lo, k1_hi, k1_lo, k2_hi, k2_lo;
@@ -334,13 +334,14 @@ static int ccp_aes_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
static int ccp_aes_cmac_cra_init(struct crypto_tfm *tfm)
{
- struct ccp_ctx *ctx = crypto_tfm_ctx(tfm);
+ struct ccp_ctx *ctx = crypto_tfm_ctx_dma(tfm);
struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
ctx->complete = ccp_aes_cmac_complete;
ctx->u.aes.key_len = 0;
- crypto_ahash_set_reqsize(ahash, sizeof(struct ccp_aes_cmac_req_ctx));
+ crypto_ahash_set_reqsize_dma(ahash,
+ sizeof(struct ccp_aes_cmac_req_ctx));
return 0;
}
@@ -382,7 +383,7 @@ int ccp_register_aes_cmac_algs(struct list_head *head)
CRYPTO_ALG_KERN_DRIVER_ONLY |
CRYPTO_ALG_NEED_FALLBACK;
base->cra_blocksize = AES_BLOCK_SIZE;
- base->cra_ctxsize = sizeof(struct ccp_ctx);
+ base->cra_ctxsize = sizeof(struct ccp_ctx) + crypto_dma_padding();
base->cra_priority = CCP_CRA_PRIORITY;
base->cra_init = ccp_aes_cmac_cra_init;
base->cra_module = THIS_MODULE;