Lines Matching refs:sgd
28 struct scatterlist *sgs, *sgd; in rk_cipher_need_fallback() local
37 sgd = req->dst; in rk_cipher_need_fallback()
38 while (sgs && sgd) { in rk_cipher_need_fallback()
43 if (!IS_ALIGNED(sgd->offset, sizeof(u32))) { in rk_cipher_need_fallback()
52 dtodo = min(len, sgd->length); in rk_cipher_need_fallback()
63 sgd = sg_next(sgd); in rk_cipher_need_fallback()
290 struct scatterlist *sgd, unsigned int todo) in crypto_dma_start() argument
294 CRYPTO_WRITE(dev, RK_CRYPTO_BTDMAS, sg_dma_address(sgd)); in crypto_dma_start()
304 struct scatterlist *sgs, *sgd; in rk_cipher_run() local
334 sgd = areq->dst; in rk_cipher_run()
336 while (sgs && sgd && len) { in rk_cipher_run()
339 sgd = sg_next(sgd); in rk_cipher_run()
347 if (sgs == sgd) { in rk_cipher_run()
359 err = dma_map_sg(rkc->dev, sgd, 1, DMA_FROM_DEVICE); in rk_cipher_run()
378 crypto_dma_start(rkc, sgs, sgd, todo / 4); in rk_cipher_run()
386 if (sgs == sgd) { in rk_cipher_run()
390 dma_unmap_sg(rkc->dev, sgd, 1, DMA_FROM_DEVICE); in rk_cipher_run()
396 offset = sgd->length - ivsize; in rk_cipher_run()
397 scatterwalk_map_and_copy(iv, sgd, offset, ivsize, 0); in rk_cipher_run()
401 sgd = sg_next(sgd); in rk_cipher_run()
424 if (sgs == sgd) { in rk_cipher_run()
428 dma_unmap_sg(rkc->dev, sgd, 1, DMA_FROM_DEVICE); in rk_cipher_run()