aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/crypto/allwinner/sun8i-ce/sun8i-ce-cipher.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/crypto/allwinner/sun8i-ce/sun8i-ce-cipher.c')
-rw-r--r--drivers/crypto/allwinner/sun8i-ce/sun8i-ce-cipher.c85
1 files changed, 35 insertions, 50 deletions
diff --git a/drivers/crypto/allwinner/sun8i-ce/sun8i-ce-cipher.c b/drivers/crypto/allwinner/sun8i-ce/sun8i-ce-cipher.c
index 5663df49dd81..021614b65e39 100644
--- a/drivers/crypto/allwinner/sun8i-ce/sun8i-ce-cipher.c
+++ b/drivers/crypto/allwinner/sun8i-ce/sun8i-ce-cipher.c
@@ -111,7 +111,7 @@ static int sun8i_ce_cipher_fallback(struct skcipher_request *areq)
if (IS_ENABLED(CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG)) {
struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
- struct sun8i_ce_alg_template *algt __maybe_unused;
+ struct sun8i_ce_alg_template *algt;
algt = container_of(alg, struct sun8i_ce_alg_template,
alg.skcipher.base);
@@ -131,21 +131,19 @@ static int sun8i_ce_cipher_fallback(struct skcipher_request *areq)
return err;
}
-static int sun8i_ce_cipher_prepare(struct crypto_engine *engine, void *async_req)
+static int sun8i_ce_cipher_prepare(struct skcipher_request *areq,
+ struct ce_task *cet)
{
- struct skcipher_request *areq = container_of(async_req, struct skcipher_request, base);
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
struct sun8i_ce_dev *ce = op->ce;
struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
struct sun8i_ce_alg_template *algt;
- struct sun8i_ce_flow *chan;
- struct ce_task *cet;
struct scatterlist *sg;
unsigned int todo, len, offset, ivsize;
u32 common, sym;
- int flow, i;
+ int i;
int nr_sgs = 0;
int nr_sgd = 0;
int err = 0;
@@ -163,14 +161,9 @@ static int sun8i_ce_cipher_prepare(struct crypto_engine *engine, void *async_req
if (IS_ENABLED(CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG))
algt->stat_req++;
- flow = rctx->flow;
-
- chan = &ce->chanlist[flow];
-
- cet = chan->tl;
memset(cet, 0, sizeof(struct ce_task));
- cet->t_id = cpu_to_le32(flow);
+ cet->t_id = cpu_to_le32(rctx->flow);
common = ce->variant->alg_cipher[algt->ce_algo_id];
common |= rctx->op_dir | CE_COMM_INT;
cet->t_common_ctl = cpu_to_le32(common);
@@ -209,11 +202,11 @@ static int sun8i_ce_cipher_prepare(struct crypto_engine *engine, void *async_req
if (areq->iv && ivsize > 0) {
if (rctx->op_dir & CE_DECRYPTION) {
offset = areq->cryptlen - ivsize;
- scatterwalk_map_and_copy(chan->backup_iv, areq->src,
+ scatterwalk_map_and_copy(rctx->backup_iv, areq->src,
offset, ivsize, 0);
}
- memcpy(chan->bounce_iv, areq->iv, ivsize);
- rctx->addr_iv = dma_map_single(ce->dev, chan->bounce_iv, ivsize,
+ memcpy(rctx->bounce_iv, areq->iv, ivsize);
+ rctx->addr_iv = dma_map_single(ce->dev, rctx->bounce_iv, ivsize,
DMA_TO_DEVICE);
if (dma_mapping_error(ce->dev, rctx->addr_iv)) {
dev_err(ce->dev, "Cannot DMA MAP IV\n");
@@ -276,7 +269,6 @@ static int sun8i_ce_cipher_prepare(struct crypto_engine *engine, void *async_req
goto theend_sgs;
}
- chan->timeout = areq->cryptlen;
rctx->nr_sgs = ns;
rctx->nr_sgd = nd;
return 0;
@@ -300,13 +292,13 @@ theend_iv:
offset = areq->cryptlen - ivsize;
if (rctx->op_dir & CE_DECRYPTION) {
- memcpy(areq->iv, chan->backup_iv, ivsize);
- memzero_explicit(chan->backup_iv, ivsize);
+ memcpy(areq->iv, rctx->backup_iv, ivsize);
+ memzero_explicit(rctx->backup_iv, ivsize);
} else {
scatterwalk_map_and_copy(areq->iv, areq->dst, offset,
ivsize, 0);
}
- memzero_explicit(chan->bounce_iv, ivsize);
+ memzero_explicit(rctx->bounce_iv, ivsize);
}
dma_unmap_single(ce->dev, rctx->addr_key, op->keylen, DMA_TO_DEVICE);
@@ -315,24 +307,17 @@ theend:
return err;
}
-static void sun8i_ce_cipher_unprepare(struct crypto_engine *engine,
- void *async_req)
+static void sun8i_ce_cipher_unprepare(struct skcipher_request *areq,
+ struct ce_task *cet)
{
- struct skcipher_request *areq = container_of(async_req, struct skcipher_request, base);
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
struct sun8i_ce_dev *ce = op->ce;
struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
- struct sun8i_ce_flow *chan;
- struct ce_task *cet;
unsigned int ivsize, offset;
int nr_sgs = rctx->nr_sgs;
int nr_sgd = rctx->nr_sgd;
- int flow;
- flow = rctx->flow;
- chan = &ce->chanlist[flow];
- cet = chan->tl;
ivsize = crypto_skcipher_ivsize(tfm);
if (areq->src == areq->dst) {
@@ -349,43 +334,43 @@ static void sun8i_ce_cipher_unprepare(struct crypto_engine *engine,
DMA_TO_DEVICE);
offset = areq->cryptlen - ivsize;
if (rctx->op_dir & CE_DECRYPTION) {
- memcpy(areq->iv, chan->backup_iv, ivsize);
- memzero_explicit(chan->backup_iv, ivsize);
+ memcpy(areq->iv, rctx->backup_iv, ivsize);
+ memzero_explicit(rctx->backup_iv, ivsize);
} else {
scatterwalk_map_and_copy(areq->iv, areq->dst, offset,
ivsize, 0);
}
- memzero_explicit(chan->bounce_iv, ivsize);
+ memzero_explicit(rctx->bounce_iv, ivsize);
}
dma_unmap_single(ce->dev, rctx->addr_key, op->keylen, DMA_TO_DEVICE);
}
-static void sun8i_ce_cipher_run(struct crypto_engine *engine, void *areq)
-{
- struct skcipher_request *breq = container_of(areq, struct skcipher_request, base);
- struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(breq);
- struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
- struct sun8i_ce_dev *ce = op->ce;
- struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(breq);
- int flow, err;
-
- flow = rctx->flow;
- err = sun8i_ce_run_task(ce, flow, crypto_tfm_alg_name(breq->base.tfm));
- sun8i_ce_cipher_unprepare(engine, areq);
- local_bh_disable();
- crypto_finalize_skcipher_request(engine, breq, err);
- local_bh_enable();
-}
-
int sun8i_ce_cipher_do_one(struct crypto_engine *engine, void *areq)
{
- int err = sun8i_ce_cipher_prepare(engine, areq);
+ struct skcipher_request *req = skcipher_request_cast(areq);
+ struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(req);
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
+ struct sun8i_cipher_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
+ struct sun8i_ce_dev *ce = ctx->ce;
+ struct sun8i_ce_flow *chan;
+ int err;
+
+ chan = &ce->chanlist[rctx->flow];
+ err = sun8i_ce_cipher_prepare(req, chan->tl);
if (err)
return err;
- sun8i_ce_cipher_run(engine, areq);
+ err = sun8i_ce_run_task(ce, rctx->flow,
+ crypto_tfm_alg_name(req->base.tfm));
+
+ sun8i_ce_cipher_unprepare(req, chan->tl);
+
+ local_bh_disable();
+ crypto_finalize_skcipher_request(engine, req, err);
+ local_bh_enable();
+
return 0;
}