summaryrefslogtreecommitdiffstats
path: root/drivers/crypto/atmel-aes.c
diff options
context:
space:
mode:
authorLeilei Zhao <leilei.zhao@atmel.com>2015-04-07 11:45:10 +0200
committerHerbert Xu <herbert@gondor.apana.org.au>2015-04-08 16:20:04 +0200
commit289b2623df34ebec4c25b7d31804b70fc90b92c6 (patch)
tree6d45df75cece75529ccfd449a9987055e0abfaf6 /drivers/crypto/atmel-aes.c
parentcrypto: atmel-aes - initialize spinlock in probe (diff)
downloadlinux-289b2623df34ebec4c25b7d31804b70fc90b92c6.tar.xz
linux-289b2623df34ebec4c25b7d31804b70fc90b92c6.zip
crypto: atmel-aes - sync the buf used in DMA or CPU
The input buffer and output buffer are mapped for DMA transfer in Atmel AES driver. But they are also be used by CPU when the requested crypt length is not bigger than the threshold value 16. The buffers will be cached in cache line when CPU accessed them. When DMA uses the buffers again, the memory can happened to be flushed by cache while DMA starts transfer. So using API dma_sync_single_for_device and dma_sync_single_for_cpu in DMA to ensure DMA coherence and CPU always access the correct value. This fix the issue that the encrypted result periodically goes wrong when doing performance test with OpenSSH. Signed-off-by: Leilei Zhao <leilei.zhao@atmel.com> Acked-by: Nicolas Ferre <nicolas.ferre@atmel.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'drivers/crypto/atmel-aes.c')
-rw-r--r--drivers/crypto/atmel-aes.c16
1 files changed, 12 insertions, 4 deletions
diff --git a/drivers/crypto/atmel-aes.c b/drivers/crypto/atmel-aes.c
index 15c7dbd0a13d..fb760664d28f 100644
--- a/drivers/crypto/atmel-aes.c
+++ b/drivers/crypto/atmel-aes.c
@@ -315,10 +315,10 @@ static int atmel_aes_crypt_dma(struct atmel_aes_dev *dd,
dd->dma_size = length;
- if (!(dd->flags & AES_FLAGS_FAST)) {
- dma_sync_single_for_device(dd->dev, dma_addr_in, length,
- DMA_TO_DEVICE);
- }
+ dma_sync_single_for_device(dd->dev, dma_addr_in, length,
+ DMA_TO_DEVICE);
+ dma_sync_single_for_device(dd->dev, dma_addr_out, length,
+ DMA_FROM_DEVICE);
if (dd->flags & AES_FLAGS_CFB8) {
dd->dma_lch_in.dma_conf.dst_addr_width =
@@ -391,6 +391,11 @@ static int atmel_aes_crypt_cpu_start(struct atmel_aes_dev *dd)
{
dd->flags &= ~AES_FLAGS_DMA;
+ dma_sync_single_for_cpu(dd->dev, dd->dma_addr_in,
+ dd->dma_size, DMA_TO_DEVICE);
+ dma_sync_single_for_cpu(dd->dev, dd->dma_addr_out,
+ dd->dma_size, DMA_FROM_DEVICE);
+
/* use cache buffers */
dd->nb_in_sg = atmel_aes_sg_length(dd->req, dd->in_sg);
if (!dd->nb_in_sg)
@@ -459,6 +464,9 @@ static int atmel_aes_crypt_dma_start(struct atmel_aes_dev *dd)
dd->flags |= AES_FLAGS_FAST;
} else {
+ dma_sync_single_for_cpu(dd->dev, dd->dma_addr_in,
+ dd->dma_size, DMA_TO_DEVICE);
+
/* use cache buffers */
count = atmel_aes_sg_copy(&dd->in_sg, &dd->in_offset,
dd->buf_in, dd->buflen, dd->total, 0);