unsigned int keylen;
unsigned int enckeylen;
unsigned int authkeylen;
- dma_addr_t dma_buf;
- dma_addr_t dma_hw_context;
};
#define HASH_MAX_BLOCK_SIZE SHA512_BLOCK_SIZE
struct ahash_request *areq)
{
struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
+ struct talitos_private *priv = dev_get_drvdata(dev);
+ bool is_sec1 = has_ftr_sec1(priv);
+ struct talitos_desc *desc = &edesc->desc;
+ struct talitos_desc *desc2 = desc + 1;
+
+ unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
+ if (desc->next_desc &&
+ desc->ptr[5].ptr != desc2->ptr[5].ptr)
+ unmap_single_talitos_ptr(dev, &desc2->ptr[5], DMA_FROM_DEVICE);
talitos_sg_unmap(dev, edesc, req_ctx->psrc, NULL, 0, 0);
+ /* When using hashctx-in, must unmap it. */
+ if (from_talitos_ptr_len(&edesc->desc.ptr[1], is_sec1))
+ unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1],
+ DMA_TO_DEVICE);
+ else if (desc->next_desc)
+ unmap_single_talitos_ptr(dev, &desc2->ptr[1],
+ DMA_TO_DEVICE);
+
+ if (is_sec1 && req_ctx->nbuf)
+ unmap_single_talitos_ptr(dev, &desc->ptr[3],
+ DMA_TO_DEVICE);
+
if (edesc->dma_len)
dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
DMA_BIDIRECTIONAL);
/* hash context in */
if (!req_ctx->first || req_ctx->swinit) {
- to_talitos_ptr(&desc->ptr[1], ctx->dma_hw_context,
- req_ctx->hw_context_size, is_sec1);
+ map_single_talitos_ptr(dev, &desc->ptr[1],
+ req_ctx->hw_context_size,
+ (char *)req_ctx->hw_context,
+ DMA_TO_DEVICE);
req_ctx->swinit = 0;
}
/* Indicate next op is not the first. */
* data in
*/
if (is_sec1 && req_ctx->nbuf) {
- dma_addr_t dma_buf = ctx->dma_buf + req_ctx->buf_idx *
- HASH_MAX_BLOCK_SIZE;
-
- to_talitos_ptr(&desc->ptr[3], dma_buf, req_ctx->nbuf, is_sec1);
+ map_single_talitos_ptr(dev, &desc->ptr[3], req_ctx->nbuf,
+ req_ctx->buf[req_ctx->buf_idx],
+ DMA_TO_DEVICE);
} else {
sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
&desc->ptr[3], sg_count, offset, 0);
crypto_ahash_digestsize(tfm),
areq->result, DMA_FROM_DEVICE);
else
- to_talitos_ptr(&desc->ptr[5], ctx->dma_hw_context,
- req_ctx->hw_context_size, is_sec1);
+ map_single_talitos_ptr(dev, &desc->ptr[5],
+ req_ctx->hw_context_size,
+ req_ctx->hw_context, DMA_FROM_DEVICE);
/* last DWORD empty */
desc->hdr |= DESC_HDR_MODE0_MDEU_CONT;
desc->hdr &= ~DESC_HDR_DONE_NOTIFY;
- to_talitos_ptr(&desc2->ptr[1], ctx->dma_hw_context,
- req_ctx->hw_context_size, is_sec1);
-
+ if (desc->ptr[1].ptr)
+ copy_talitos_ptr(&desc2->ptr[1], &desc->ptr[1],
+ is_sec1);
+ else
+ map_single_talitos_ptr(dev, &desc2->ptr[1],
+ req_ctx->hw_context_size,
+ req_ctx->hw_context,
+ DMA_TO_DEVICE);
copy_talitos_ptr(&desc2->ptr[2], &desc->ptr[2], is_sec1);
sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
&desc2->ptr[3], sg_count, offset, 0);
sync_needed = true;
copy_talitos_ptr(&desc2->ptr[5], &desc->ptr[5], is_sec1);
if (req_ctx->last)
- to_talitos_ptr(&desc->ptr[5], ctx->dma_hw_context,
- req_ctx->hw_context_size, is_sec1);
+ map_single_talitos_ptr(dev, &desc->ptr[5],
+ req_ctx->hw_context_size,
+ req_ctx->hw_context,
+ DMA_FROM_DEVICE);
next_desc = dma_map_single(dev, &desc2->hdr1, TALITOS_DESC_SIZE,
DMA_BIDIRECTIONAL);
static int ahash_init(struct ahash_request *areq)
{
struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
- struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
- struct device *dev = ctx->dev;
struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
unsigned int size;
- struct talitos_private *priv = dev_get_drvdata(dev);
- bool is_sec1 = has_ftr_sec1(priv);
/* Initialize the context */
req_ctx->buf_idx = 0;
: TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
req_ctx->hw_context_size = size;
- if (ctx->dma_hw_context)
- dma_unmap_single(dev, ctx->dma_hw_context, size,
- DMA_BIDIRECTIONAL);
- ctx->dma_hw_context = dma_map_single(dev, req_ctx->hw_context, size,
- DMA_BIDIRECTIONAL);
- if (ctx->dma_buf)
- dma_unmap_single(dev, ctx->dma_buf, sizeof(req_ctx->buf),
- DMA_TO_DEVICE);
- if (is_sec1)
- ctx->dma_buf = dma_map_single(dev, req_ctx->buf,
- sizeof(req_ctx->buf),
- DMA_TO_DEVICE);
return 0;
}
static int ahash_init_sha224_swinit(struct ahash_request *areq)
{
struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
- struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
- struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
- struct device *dev = ctx->dev;
ahash_init(areq);
req_ctx->swinit = 1;/* prevent h/w initting context with sha256 values*/
req_ctx->hw_context[8] = 0;
req_ctx->hw_context[9] = 0;
- dma_sync_single_for_device(dev, ctx->dma_hw_context,
- req_ctx->hw_context_size, DMA_TO_DEVICE);
-
return 0;
}
/* request SEC to INIT hash. */
if (req_ctx->first && !req_ctx->swinit)
edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_INIT;
- if (is_sec1) {
- dma_addr_t dma_buf = ctx->dma_buf + req_ctx->buf_idx *
- HASH_MAX_BLOCK_SIZE;
-
- dma_sync_single_for_device(dev, dma_buf,
- req_ctx->nbuf, DMA_TO_DEVICE);
- }
/* When the tfm context has a keylen, it's an HMAC.
* A first or last (ie. not middle) descriptor must request HMAC.
{
struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
struct talitos_export_state *export = out;
- struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
- struct talitos_ctx *ctx = crypto_ahash_ctx(ahash);
- struct device *dev = ctx->dev;
- dma_sync_single_for_cpu(dev, ctx->dma_hw_context,
- req_ctx->hw_context_size, DMA_FROM_DEVICE);
memcpy(export->hw_context, req_ctx->hw_context,
req_ctx->hw_context_size);
memcpy(export->buf, req_ctx->buf[req_ctx->buf_idx], req_ctx->nbuf);
struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
const struct talitos_export_state *export = in;
unsigned int size;
- struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
- struct device *dev = ctx->dev;
- struct talitos_private *priv = dev_get_drvdata(dev);
- bool is_sec1 = has_ftr_sec1(priv);
memset(req_ctx, 0, sizeof(*req_ctx));
size = (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
: TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
req_ctx->hw_context_size = size;
- if (ctx->dma_hw_context)
- dma_unmap_single(dev, ctx->dma_hw_context, size,
- DMA_BIDIRECTIONAL);
-
memcpy(req_ctx->hw_context, export->hw_context, size);
- ctx->dma_hw_context = dma_map_single(dev, req_ctx->hw_context, size,
- DMA_BIDIRECTIONAL);
- if (ctx->dma_buf)
- dma_unmap_single(dev, ctx->dma_buf, sizeof(req_ctx->buf),
- DMA_TO_DEVICE);
memcpy(req_ctx->buf[0], export->buf, export->nbuf);
- if (is_sec1)
- ctx->dma_buf = dma_map_single(dev, req_ctx->buf,
- sizeof(req_ctx->buf),
- DMA_TO_DEVICE);
req_ctx->swinit = export->swinit;
req_ctx->first = export->first;
req_ctx->last = export->last;
dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
}
-static void talitos_cra_exit_ahash(struct crypto_tfm *tfm)
-{
- struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
- struct device *dev = ctx->dev;
- unsigned int size;
-
- talitos_cra_exit(tfm);
-
- size = (crypto_ahash_digestsize(__crypto_ahash_cast(tfm)) <=
- SHA256_DIGEST_SIZE)
- ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
- : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
-
- if (ctx->dma_hw_context)
- dma_unmap_single(dev, ctx->dma_hw_context, size,
- DMA_BIDIRECTIONAL);
- if (ctx->dma_buf)
- dma_unmap_single(dev, ctx->dma_buf, HASH_MAX_BLOCK_SIZE * 2,
- DMA_TO_DEVICE);
-}
-
/*
* given the alg's descriptor header template, determine whether descriptor
* type and primary/secondary execution units required match the hw
case CRYPTO_ALG_TYPE_AHASH:
alg = &t_alg->algt.alg.hash.halg.base;
alg->cra_init = talitos_cra_init_ahash;
- alg->cra_exit = talitos_cra_exit_ahash;
+ alg->cra_exit = talitos_cra_exit;
alg->cra_type = &crypto_ahash_type;
t_alg->algt.alg.hash.init = ahash_init;
t_alg->algt.alg.hash.update = ahash_update;