diff options
author | Harsh Jain <harsh@chelsio.com> | 2017-06-15 09:13:44 +0200 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2017-06-20 05:21:39 +0200 |
commit | ee0863ba118d37609a795a15c08e9acf6809c038 (patch) | |
tree | 01f906bee02fa86cbc6d88fdb64c8fbe5ad8c9ba /drivers/crypto/chelsio | |
parent | crypto: chcr - Add ctr mode and process large sg entries for cipher (diff) | |
download | linux-ee0863ba118d37609a795a15c08e9acf6809c038.tar.xz linux-ee0863ba118d37609a795a15c08e9acf6809c038.zip |
chcr - Add debug counters
Count types of operation done by HW.
Signed-off-by: Harsh Jain <harsh@chelsio.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'drivers/crypto/chelsio')
-rw-r--r-- | drivers/crypto/chelsio/chcr_algo.c | 16 | ||||
-rw-r--r-- | drivers/crypto/chelsio/chcr_core.c | 2 |
2 files changed, 17 insertions, 1 deletions
diff --git a/drivers/crypto/chelsio/chcr_algo.c b/drivers/crypto/chelsio/chcr_algo.c index 03b817f185dd..2f388af232ee 100644 --- a/drivers/crypto/chelsio/chcr_algo.c +++ b/drivers/crypto/chelsio/chcr_algo.c @@ -154,6 +154,7 @@ int chcr_handle_resp(struct crypto_async_request *req, unsigned char *input, struct uld_ctx *u_ctx = ULD_CTX(ctx); struct chcr_req_ctx ctx_req; unsigned int digestsize, updated_digestsize; + struct adapter *adap = padap(ctx->dev); switch (tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK) { case CRYPTO_ALG_TYPE_AEAD: @@ -207,6 +208,7 @@ int chcr_handle_resp(struct crypto_async_request *req, unsigned char *input, ctx_req.req.ahash_req->base.complete(req, err); break; } + atomic_inc(&adap->chcr_stats.complete); return err; } @@ -639,6 +641,7 @@ static struct sk_buff *create_cipher_wr(struct cipher_wr_param *wrparam) unsigned int ivsize = AES_BLOCK_SIZE, kctx_len; gfp_t flags = wrparam->req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL : GFP_ATOMIC; + struct adapter *adap = padap(ctx->dev); phys_dsgl = get_space_for_phys_dsgl(reqctx->dst_nents); @@ -701,6 +704,7 @@ static struct sk_buff *create_cipher_wr(struct cipher_wr_param *wrparam) skb_set_transport_header(skb, transhdr_len); write_buffer_to_skb(skb, &frags, reqctx->iv, ivsize); write_sg_to_skb(skb, &frags, wrparam->srcsg, wrparam->bytes); + atomic_inc(&adap->chcr_stats.cipher_rqst); create_wreq(ctx, chcr_req, &(wrparam->req->base), skb, kctx_len, 0, 1, sizeof(struct cpl_rx_phys_dsgl) + phys_dsgl, ablkctx->ciph_mode == CHCR_SCMD_CIPHER_MODE_AES_CBC); @@ -1337,6 +1341,7 @@ static struct sk_buff *create_hash_wr(struct ahash_request *req, u8 hash_size_in_response = 0; gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL : GFP_ATOMIC; + struct adapter *adap = padap(ctx->dev); iopad_alignment = KEYCTX_ALIGN_PAD(digestsize); kctx_len = param->alg_prm.result_size + iopad_alignment; @@ -1393,7 +1398,7 @@ static struct sk_buff *create_hash_wr(struct ahash_request *req, param->bfr_len); if (param->sg_len != 0) write_sg_to_skb(skb, &frags, req->src, param->sg_len); - + atomic_inc(&adap->chcr_stats.digest_rqst); create_wreq(ctx, chcr_req, &req->base, skb, kctx_len, hash_size_in_response, 0, DUMMY_BYTES, 0); req_ctx->skb = skb; @@ -1873,6 +1878,7 @@ static struct sk_buff *create_authenc_wr(struct aead_request *req, int null = 0; gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL : GFP_ATOMIC; + struct adapter *adap = padap(ctx->dev); if (aeadctx->enckey_len == 0 || (req->cryptlen == 0)) goto err; @@ -1911,6 +1917,7 @@ static struct sk_buff *create_authenc_wr(struct aead_request *req, T6_MAX_AAD_SIZE, transhdr_len + (sgl_len(src_nent + MIN_AUTH_SG) * 8), op_type)) { + atomic_inc(&adap->chcr_stats.fallback); return ERR_PTR(chcr_aead_fallback(req, op_type)); } skb = alloc_skb((transhdr_len + sizeof(struct sge_opaque_hdr)), flags); @@ -1983,6 +1990,7 @@ static struct sk_buff *create_authenc_wr(struct aead_request *req, } write_buffer_to_skb(skb, &frags, req->iv, ivsize); write_sg_to_skb(skb, &frags, src, req->cryptlen); + atomic_inc(&adap->chcr_stats.cipher_rqst); create_wreq(ctx, chcr_req, &req->base, skb, kctx_len, size, 1, sizeof(struct cpl_rx_phys_dsgl) + dst_size, 0); reqctx->skb = skb; @@ -2206,6 +2214,7 @@ static struct sk_buff *create_aead_ccm_wr(struct aead_request *req, int error = -EINVAL, src_nent; gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL : GFP_ATOMIC; + struct adapter *adap = padap(ctx->dev); if (op_type && req->cryptlen < crypto_aead_authsize(tfm)) @@ -2245,6 +2254,7 @@ static struct sk_buff *create_aead_ccm_wr(struct aead_request *req, T6_MAX_AAD_SIZE - 18, transhdr_len + (sgl_len(src_nent + MIN_CCM_SG) * 8), op_type)) { + atomic_inc(&adap->chcr_stats.fallback); return ERR_PTR(chcr_aead_fallback(req, op_type)); } @@ -2282,6 +2292,7 @@ static struct sk_buff *create_aead_ccm_wr(struct aead_request *req, skb_set_transport_header(skb, transhdr_len); frags = fill_aead_req_fields(skb, req, src, ivsize, aeadctx); + atomic_inc(&adap->chcr_stats.aead_rqst); create_wreq(ctx, chcr_req, &req->base, skb, kctx_len, 0, 1, sizeof(struct cpl_rx_phys_dsgl) + dst_size, 0); reqctx->skb = skb; @@ -2316,6 +2327,7 @@ static struct sk_buff *create_gcm_wr(struct aead_request *req, int error = -EINVAL, src_nent; gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL : GFP_ATOMIC; + struct adapter *adap = padap(ctx->dev); /* validate key size */ if (aeadctx->enckey_len == 0) @@ -2355,6 +2367,7 @@ static struct sk_buff *create_gcm_wr(struct aead_request *req, T6_MAX_AAD_SIZE, transhdr_len + (sgl_len(src_nent + MIN_GCM_SG) * 8), op_type)) { + atomic_inc(&adap->chcr_stats.fallback); return ERR_PTR(chcr_aead_fallback(req, op_type)); } skb = alloc_skb((transhdr_len + sizeof(struct sge_opaque_hdr)), flags); @@ -2421,6 +2434,7 @@ static struct sk_buff *create_gcm_wr(struct aead_request *req, write_sg_to_skb(skb, &frags, req->src, assoclen); write_buffer_to_skb(skb, &frags, reqctx->iv, ivsize); write_sg_to_skb(skb, &frags, src, req->cryptlen); + atomic_inc(&adap->chcr_stats.aead_rqst); create_wreq(ctx, chcr_req, &req->base, skb, kctx_len, size, 1, sizeof(struct cpl_rx_phys_dsgl) + dst_size, reqctx->verify); diff --git a/drivers/crypto/chelsio/chcr_core.c b/drivers/crypto/chelsio/chcr_core.c index a6c938a913c7..5ae659af6a90 100644 --- a/drivers/crypto/chelsio/chcr_core.c +++ b/drivers/crypto/chelsio/chcr_core.c @@ -100,6 +100,7 @@ static int cpl_fw6_pld_handler(struct chcr_dev *dev, struct cpl_fw6_pld *fw6_pld; u32 ack_err_status = 0; int error_status = 0; + struct adapter *adap = padap(dev); fw6_pld = (struct cpl_fw6_pld *)input; req = (struct crypto_async_request *)(uintptr_t)be64_to_cpu( @@ -111,6 +112,7 @@ static int cpl_fw6_pld_handler(struct chcr_dev *dev, if (CHK_MAC_ERR_BIT(ack_err_status) || CHK_PAD_ERR_BIT(ack_err_status)) error_status = -EBADMSG; + atomic_inc(&adap->chcr_stats.error); } /* call completion callback with failure status */ if (req) { |