Searched refs:kern_cq (Results 1 – 4 of 4) sorted by relevance
/linux-6.12.1/drivers/infiniband/hw/erdma/ |
D | erdma_cq.c | 11 __be32 *cqe = get_queue_entry(cq->kern_cq.qbuf, cq->kern_cq.ci, in get_next_valid_cqe() 16 return owner ^ !!(cq->kern_cq.ci & cq->depth) ? cqe : NULL; in get_next_valid_cqe() 22 FIELD_PREP(ERDMA_CQDB_IDX_MASK, (cq->kern_cq.notify_cnt)) | in notify_cq() 26 FIELD_PREP(ERDMA_CQDB_CMDSN_MASK, cq->kern_cq.cmdsn) | in notify_cq() 27 FIELD_PREP(ERDMA_CQDB_CI_MASK, cq->kern_cq.ci); in notify_cq() 29 *cq->kern_cq.dbrec = db_data; in notify_cq() 30 writeq(db_data, cq->kern_cq.db); in notify_cq() 39 spin_lock_irqsave(&cq->kern_cq.lock, irq_flags); in erdma_req_notify_cq() 46 cq->kern_cq.notify_cnt++; in erdma_req_notify_cq() 48 spin_unlock_irqrestore(&cq->kern_cq.lock, irq_flags); in erdma_req_notify_cq() [all …]
|
D | erdma_verbs.c | 202 req.qbuf_addr_l = lower_32_bits(cq->kern_cq.qbuf_dma_addr); in create_cq_cmd() 203 req.qbuf_addr_h = upper_32_bits(cq->kern_cq.qbuf_dma_addr); in create_cq_cmd() 210 req.cq_dbrec_dma = cq->kern_cq.dbrec_dma; in create_cq_cmd() 1253 cq->kern_cq.qbuf, cq->kern_cq.qbuf_dma_addr); in erdma_destroy_cq() 1254 dma_pool_free(dev->db_pool, cq->kern_cq.dbrec, in erdma_destroy_cq() 1255 cq->kern_cq.dbrec_dma); in erdma_destroy_cq() 1629 cq->kern_cq.qbuf = in erdma_init_kernel_cq() 1631 &cq->kern_cq.qbuf_dma_addr, GFP_KERNEL); in erdma_init_kernel_cq() 1632 if (!cq->kern_cq.qbuf) in erdma_init_kernel_cq() 1635 cq->kern_cq.dbrec = dma_pool_zalloc(dev->db_pool, GFP_KERNEL, in erdma_init_kernel_cq() [all …]
|
D | erdma_verbs.h | 270 struct erdma_kcq_info kern_cq; member
|
D | erdma_eq.c | 165 cq->kern_cq.cmdsn++; in erdma_ceq_completion_handler()
|