/linux-6.12.1/drivers/crypto/intel/qat/qat_common/ |
D | qat_bl.c | 111 if (unlikely(dma_mapping_error(dev, bufl->buffers[y].addr))) in __qat_bl_sgl_to_bufl() 121 if (unlikely(dma_mapping_error(dev, blp))) in __qat_bl_sgl_to_bufl() 166 if (unlikely(dma_mapping_error(dev, buffers[y].addr))) in __qat_bl_sgl_to_bufl() 184 if (unlikely(dma_mapping_error(dev, bloutp))) in __qat_bl_sgl_to_bufl() 197 if (!dma_mapping_error(dev, bloutp)) in __qat_bl_sgl_to_bufl() 204 if (!dma_mapping_error(dev, buflout->buffers[i].addr)) in __qat_bl_sgl_to_bufl() 214 if (!dma_mapping_error(dev, blp)) in __qat_bl_sgl_to_bufl() 219 if (!dma_mapping_error(dev, bufl->buffers[i].addr)) in __qat_bl_sgl_to_bufl() 263 if (!dma_mapping_error(dev, bl->buffers[i].addr)) in qat_bl_sgl_unmap() 299 if (unlikely(dma_mapping_error(dev, bufl->buffers[y].addr))) in qat_bl_sgl_map() [all …]
|
D | qat_asym_algs.c | 308 if (unlikely(dma_mapping_error(dev, qat_req->in.dh.in.b))) in qat_dh_compute_value() 330 if (unlikely(dma_mapping_error(dev, qat_req->out.dh.r))) in qat_dh_compute_value() 339 if (unlikely(dma_mapping_error(dev, qat_req->phy_in))) in qat_dh_compute_value() 345 if (unlikely(dma_mapping_error(dev, qat_req->phy_out))) in qat_dh_compute_value() 361 if (!dma_mapping_error(dev, qat_req->phy_out)) in qat_dh_compute_value() 366 if (!dma_mapping_error(dev, qat_req->phy_in)) in qat_dh_compute_value() 371 if (!dma_mapping_error(dev, qat_req->out.dh.r)) in qat_dh_compute_value() 377 if (!dma_mapping_error(dev, qat_req->in.dh.in.b)) in qat_dh_compute_value() 765 if (unlikely(dma_mapping_error(dev, qat_req->in.rsa.enc.m))) in qat_rsa_enc() 780 if (unlikely(dma_mapping_error(dev, qat_req->out.rsa.enc.c))) in qat_rsa_enc() [all …]
|
/linux-6.12.1/drivers/crypto/caam/ |
D | caampkc.c | 352 if (dma_mapping_error(dev, edesc->sec4_sg_dma)) { in rsa_edesc_alloc() 416 if (dma_mapping_error(dev, pdb->n_dma)) { in set_rsa_pub_pdb() 422 if (dma_mapping_error(dev, pdb->e_dma)) { in set_rsa_pub_pdb() 461 if (dma_mapping_error(dev, pdb->n_dma)) { in set_rsa_priv_f1_pdb() 467 if (dma_mapping_error(dev, pdb->d_dma)) { in set_rsa_priv_f1_pdb() 510 if (dma_mapping_error(dev, pdb->d_dma)) { in set_rsa_priv_f2_pdb() 516 if (dma_mapping_error(dev, pdb->p_dma)) { in set_rsa_priv_f2_pdb() 522 if (dma_mapping_error(dev, pdb->q_dma)) { in set_rsa_priv_f2_pdb() 528 if (dma_mapping_error(dev, pdb->tmp1_dma)) { in set_rsa_priv_f2_pdb() 534 if (dma_mapping_error(dev, pdb->tmp2_dma)) { in set_rsa_priv_f2_pdb() [all …]
|
D | blob_gen.c | 92 if (dma_mapping_error(jrdev, dma_in)) { in caam_process_blob() 100 if (dma_mapping_error(jrdev, dma_out)) { in caam_process_blob()
|
D | caamhash.c | 153 if (dma_mapping_error(jrdev, state->ctx_dma)) { in map_seq_out_ptr_ctx() 176 if (dma_mapping_error(jrdev, state->buf_dma)) { in buf_map_to_sec4_sg() 194 if (dma_mapping_error(jrdev, state->ctx_dma)) { in ctx_map_to_sec4_sg() 380 if (dma_mapping_error(jrdev, key_dma)) { in hash_digest_key() 741 if (dma_mapping_error(ctx->jrdev, src_dma)) { in ahash_edesc_add_src() 911 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in ahash_update_ctx() 984 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in ahash_final_ctx() 1171 if (dma_mapping_error(jrdev, state->buf_dma)) { in ahash_final_no_ctx() 1276 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in ahash_update_no_ctx() 1825 if (dma_mapping_error(ctx->jrdev, ctx->adata.key_dma)) { in caam_hash_cra_init() [all …]
|
D | caamalg_qi2.c | 491 if (dma_mapping_error(dev, iv_dma)) { in aead_edesc_alloc() 515 if (dma_mapping_error(dev, edesc->assoclen_dma)) { in aead_edesc_alloc() 536 if (dma_mapping_error(dev, qm_sg_dma)) { in aead_edesc_alloc() 1209 if (dma_mapping_error(dev, iv_dma)) { in skcipher_edesc_alloc() 1233 if (dma_mapping_error(dev, edesc->qm_sg_dma)) { in skcipher_edesc_alloc() 1592 if (dma_mapping_error(ctx->dev, dma_addr)) { in caam_cra_init() 3109 if (dma_mapping_error(dev, state->buf_dma)) { in buf_map_to_qm_sg() 3127 if (dma_mapping_error(dev, state->ctx_dma)) { in ctx_map_to_qm_sg() 3238 if (dma_mapping_error(ctx->dev, key_dma)) { in hash_digest_key() 3258 if (dma_mapping_error(ctx->dev, flc_dma)) { in hash_digest_key() [all …]
|
/linux-6.12.1/drivers/net/ethernet/stmicro/stmmac/ |
D | ring_mode.c | 43 if (dma_mapping_error(priv->device, des2)) in jumbo_frm() 64 if (dma_mapping_error(priv->device, des2)) in jumbo_frm() 78 if (dma_mapping_error(priv->device, des2)) in jumbo_frm()
|
D | chain_mode.c | 39 if (dma_mapping_error(priv->device, des2)) in jumbo_frm() 57 if (dma_mapping_error(priv->device, des2)) in jumbo_frm() 70 if (dma_mapping_error(priv->device, des2)) in jumbo_frm()
|
/linux-6.12.1/drivers/infiniband/hw/qib/ |
D | qib_user_pages.c | 64 if (dma_mapping_error(&hwdev->dev, phys)) in qib_map_page() 71 if (dma_mapping_error(&hwdev->dev, phys)) in qib_map_page()
|
/linux-6.12.1/drivers/spi/ |
D | spi-fsl-cpm.c | 139 if (dma_mapping_error(dev, mspi->tx_dma)) { in fsl_spi_cpm_bufs() 150 if (dma_mapping_error(dev, mspi->rx_dma)) { in fsl_spi_cpm_bufs() 355 if (dma_mapping_error(dev, mspi->dma_dummy_tx)) { in fsl_spi_cpm_init() 362 if (dma_mapping_error(dev, mspi->dma_dummy_rx)) { in fsl_spi_cpm_init()
|
/linux-6.12.1/drivers/crypto/allwinner/sun8i-ce/ |
D | sun8i-ce-prng.c | 98 if (dma_mapping_error(ce->dev, dma_iv)) { in sun8i_ce_prng_generate() 105 if (dma_mapping_error(ce->dev, dma_dst)) { in sun8i_ce_prng_generate()
|
/linux-6.12.1/drivers/crypto/allwinner/sun8i-ss/ |
D | sun8i-ss-prng.c | 109 if (dma_mapping_error(ss->dev, dma_iv)) { in sun8i_ss_prng_generate() 116 if (dma_mapping_error(ss->dev, dma_dst)) { in sun8i_ss_prng_generate()
|
D | sun8i-ss-hash.c | 528 if (dma_mapping_error(ss->dev, addr_res)) { in sun8i_ss_hash_run() 580 err = dma_mapping_error(ss->dev, addr_xpad); in sun8i_ss_hash_run() 601 if (dma_mapping_error(ss->dev, addr_res)) { in sun8i_ss_hash_run() 607 err = dma_mapping_error(ss->dev, addr_xpad); in sun8i_ss_hash_run() 640 if (dma_mapping_error(ss->dev, addr_pad)) { in sun8i_ss_hash_run()
|
/linux-6.12.1/drivers/crypto/marvell/octeontx2/ |
D | otx2_cpt_reqmgr.h | 231 if (unlikely(dma_mapping_error(&pdev->dev, list[i].dma_addr))) { in setup_sgio_components() 300 if (unlikely(dma_mapping_error(&pdev->dev, list[i].dma_addr))) { in sgv2io_components_setup() 395 if (unlikely(dma_mapping_error(&pdev->dev, info->dptr_baddr))) { in cn10k_sgv2_info_create() 471 if (unlikely(dma_mapping_error(&pdev->dev, info->dptr_baddr))) { in otx2_sg_info_create()
|
/linux-6.12.1/drivers/net/ethernet/amd/xgbe/ |
D | xgbe-desc.c | 319 if (dma_mapping_error(pdata->dev, pages_dma)) { in xgbe_alloc_pages() 560 if (dma_mapping_error(pdata->dev, skb_dma)) { in xgbe_map_tx_skb() 584 if (dma_mapping_error(pdata->dev, skb_dma)) { in xgbe_map_tx_skb() 616 if (dma_mapping_error(pdata->dev, skb_dma)) { in xgbe_map_tx_skb()
|
/linux-6.12.1/drivers/net/ethernet/synopsys/ |
D | dwc-xlgmac-desc.c | 352 if (dma_mapping_error(pdata->dev, pages_dma)) { in xlgmac_alloc_pages() 536 if (dma_mapping_error(pdata->dev, skb_dma)) { in xlgmac_map_tx_skb() 560 if (dma_mapping_error(pdata->dev, skb_dma)) { in xlgmac_map_tx_skb() 592 if (dma_mapping_error(pdata->dev, skb_dma)) { in xlgmac_map_tx_skb()
|
/linux-6.12.1/drivers/misc/bcm-vk/ |
D | bcm_vk_sg.c | 112 if (unlikely(dma_mapping_error(dev, sg_addr))) { in bcm_vk_dma_alloc() 125 if (unlikely(dma_mapping_error(dev, addr))) { in bcm_vk_dma_alloc()
|
/linux-6.12.1/drivers/crypto/aspeed/ |
D | aspeed-hace-hash.c | 148 if (dma_mapping_error(hace_dev->dev, rctx->digest_dma_addr)) { in aspeed_ahash_dma_prepare() 193 if (dma_mapping_error(hace_dev->dev, rctx->digest_dma_addr)) { in aspeed_ahash_dma_prepare_sg() 207 if (dma_mapping_error(hace_dev->dev, rctx->buffer_dma_addr)) { in aspeed_ahash_dma_prepare_sg() 377 if (dma_mapping_error(hace_dev->dev, rctx->digest_dma_addr)) { in aspeed_ahash_hmac_resume() 386 if (dma_mapping_error(hace_dev->dev, rctx->buffer_dma_addr)) { in aspeed_ahash_hmac_resume() 420 if (dma_mapping_error(hace_dev->dev, rctx->digest_dma_addr)) { in aspeed_ahash_req_final() 430 if (dma_mapping_error(hace_dev->dev, rctx->buffer_dma_addr)) { in aspeed_ahash_req_final()
|
/linux-6.12.1/drivers/scsi/ |
D | a3000.c | 66 if (dma_mapping_error(hdata->dev, addr)) { in dma_setup() 107 if (dma_mapping_error(hdata->dev, addr)) { in dma_setup()
|
D | a2091.c | 61 if (dma_mapping_error(hdata->dev, addr)) { in dma_setup() 96 if (dma_mapping_error(hdata->dev, addr)) { in dma_setup()
|
/linux-6.12.1/tools/virtio/linux/ |
D | dma-mapping.h | 28 #define dma_mapping_error(...) (0) macro
|
/linux-6.12.1/drivers/crypto/ccree/ |
D | cc_buffer_mgr.c | 393 if (dma_mapping_error(dev, req_ctx->gen_ctx.iv_dma_addr)) { in cc_map_cipher_request() 568 if (dma_mapping_error(dev, areq_ctx->gen_ctx.iv_dma_addr)) { in cc_aead_chain_iv() 979 if (dma_mapping_error(dev, dma_addr)) { in cc_map_aead_request() 993 if (dma_mapping_error(dev, dma_addr)) { in cc_map_aead_request() 1011 if (dma_mapping_error(dev, dma_addr)) { in cc_map_aead_request() 1021 if (dma_mapping_error(dev, dma_addr)) { in cc_map_aead_request() 1032 if (dma_mapping_error(dev, dma_addr)) { in cc_map_aead_request() 1044 if (dma_mapping_error(dev, dma_addr)) { in cc_map_aead_request()
|
/linux-6.12.1/drivers/soc/qcom/ |
D | qcom-geni-se.c | 708 if (dma_mapping_error(wrapper->dev, *iova)) in geni_se_tx_dma_prep() 760 if (dma_mapping_error(wrapper->dev, *iova)) in geni_se_rx_dma_prep() 780 if (!dma_mapping_error(wrapper->dev, iova)) in geni_se_tx_dma_unprep() 797 if (!dma_mapping_error(wrapper->dev, iova)) in geni_se_rx_dma_unprep()
|
/linux-6.12.1/drivers/crypto/cavium/cpt/ |
D | cptvf_reqmanager.c | 63 if (unlikely(dma_mapping_error(&pdev->dev, in setup_sgio_components() 191 if (dma_mapping_error(&pdev->dev, info->dptr_baddr)) { in setup_sgio_list() 210 if (dma_mapping_error(&pdev->dev, info->rptr_baddr)) { in setup_sgio_list() 455 if (dma_mapping_error(&pdev->dev, info->comp_baddr)) { in process_request()
|
/linux-6.12.1/drivers/crypto/ |
D | mxs-dcp.c | 187 dma_err = dma_mapping_error(sdcp->dev, desc_phys); in mxs_dcp_start_dma() 239 ret = dma_mapping_error(sdcp->dev, key_phys); in mxs_dcp_run_aes() 246 ret = dma_mapping_error(sdcp->dev, src_phys); in mxs_dcp_run_aes() 252 ret = dma_mapping_error(sdcp->dev, dst_phys); in mxs_dcp_run_aes() 609 ret = dma_mapping_error(sdcp->dev, buf_phys); in mxs_dcp_run_sha() 645 ret = dma_mapping_error(sdcp->dev, digest_phys); in mxs_dcp_run_sha()
|