Lines Matching full:rf

16 	struct irdma_pci_f *rf = iwdev->rf;  in irdma_query_device()  local
17 struct pci_dev *pcidev = iwdev->rf->pcidev; in irdma_query_device()
18 struct irdma_hw_attrs *hw_attrs = &rf->sc_dev.hw_attrs; in irdma_query_device()
26 props->fw_ver = (u64)irdma_fw_major_ver(&rf->sc_dev) << 32 | in irdma_query_device()
27 irdma_fw_minor_ver(&rf->sc_dev); in irdma_query_device()
34 props->hw_ver = rf->pcidev->revision; in irdma_query_device()
37 props->max_qp = rf->max_qp - rf->used_qps; in irdma_query_device()
41 props->max_cq = rf->max_cq - rf->used_cqs; in irdma_query_device()
42 props->max_cqe = rf->max_cqe - 1; in irdma_query_device()
43 props->max_mr = rf->max_mr - rf->used_mrs; in irdma_query_device()
45 props->max_pd = rf->max_pd - rf->used_pds; in irdma_query_device()
54 props->max_ah = rf->max_ah; in irdma_query_device()
55 props->max_mcast_grp = rf->max_mcg; in irdma_query_device()
57 props->max_total_mcast_qp_attach = rf->max_qp * IRDMA_MAX_MGS_PER_CTX; in irdma_query_device()
106 props->max_msg_sz = iwdev->rf->sc_dev.hw_attrs.max_hw_outbound_msg_size; in irdma_query_port()
128 pfn = ((uintptr_t)ucontext->iwdev->rf->sc_dev.hw_regs[IRDMA_DB_ADDR_OFFSET] + in irdma_mmap_legacy()
129 pci_resource_start(ucontext->iwdev->rf->pcidev, 0)) >> PAGE_SHIFT; in irdma_mmap_legacy()
199 pci_resource_start(ucontext->iwdev->rf->pcidev, 0)) >> PAGE_SHIFT; in irdma_mmap()
237 cqp_request = irdma_alloc_and_get_cqp_request(&iwdev->rf->cqp, true); in irdma_alloc_push_page()
249 cqp_info->in.u.manage_push_page.cqp = &iwdev->rf->cqp.sc_cqp; in irdma_alloc_push_page()
252 status = irdma_handle_cqp_op(iwdev->rf, cqp_request); in irdma_alloc_push_page()
254 iwdev->rf->sc_dev.hw_attrs.max_hw_device_pages) { in irdma_alloc_push_page()
259 irdma_put_cqp_request(&iwdev->rf->cqp, cqp_request); in irdma_alloc_push_page()
280 struct irdma_uk_attrs *uk_attrs = &iwdev->rf->sc_dev.hw_attrs.uk_attrs; in irdma_alloc_ucontext()
304 uresp.max_qps = iwdev->rf->max_qp; in irdma_alloc_ucontext()
305 uresp.max_pds = iwdev->rf->sc_dev.hw_attrs.max_hw_pds; in irdma_alloc_ucontext()
306 uresp.wq_size = iwdev->rf->sc_dev.hw_attrs.max_qp_wr * 2; in irdma_alloc_ucontext()
312 u64 bar_off = (uintptr_t)iwdev->rf->sc_dev.hw_regs[IRDMA_DB_ADDR_OFFSET]; in irdma_alloc_ucontext()
377 struct irdma_sc_dev *dev = &iwdev->rf->sc_dev; in irdma_alloc_pd()
378 struct irdma_pci_f *rf = iwdev->rf; in irdma_alloc_pd() local
387 err = irdma_alloc_rsrc(rf, rf->allocated_pds, rf->max_pd, &pd_id, in irdma_alloc_pd()
388 &rf->next_pd); in irdma_alloc_pd()
410 irdma_free_rsrc(rf, rf->allocated_pds, pd_id); in irdma_alloc_pd()
425 irdma_free_rsrc(iwdev->rf, iwdev->rf->allocated_pds, iwpd->sc_pd.pd_id); in irdma_dealloc_pd()
488 bar_off = (uintptr_t)iwdev->rf->sc_dev.hw_regs[IRDMA_DB_ADDR_OFFSET]; in irdma_setup_push_mmap_entries()
541 irdma_cqp_qp_destroy_cmd(&iwdev->rf->sc_dev, &iwqp->sc_qp); in irdma_destroy_qp()
703 mem->va = dma_alloc_coherent(iwdev->rf->hw.device, mem->size, in irdma_setup_kmode_qp()
733 struct irdma_pci_f *rf = iwqp->iwdev->rf; in irdma_cqp_create_qp_cmd() local
739 cqp_request = irdma_alloc_and_get_cqp_request(&rf->cqp, true); in irdma_cqp_create_qp_cmd()
754 status = irdma_handle_cqp_op(rf, cqp_request); in irdma_cqp_create_qp_cmd()
755 irdma_put_cqp_request(&rf->cqp, cqp_request); in irdma_cqp_create_qp_cmd()
764 struct irdma_sc_dev *dev = &iwdev->rf->sc_dev; in irdma_roce_fill_and_set_qpctx_info()
804 struct irdma_sc_dev *dev = &iwdev->rf->sc_dev; in irdma_iw_fill_and_set_qpctx_info()
833 struct irdma_sc_dev *dev = &iwdev->rf->sc_dev; in irdma_validate_qp_attrs()
882 struct irdma_pci_f *rf = iwdev->rf; in irdma_create_qp() local
888 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_create_qp()
931 err_code = irdma_alloc_rsrc(rf, rf->allocated_qps, rf->max_qp, in irdma_create_qp()
932 &qp_num, &rf->next_qp); in irdma_create_qp()
1010 rf->qp_table[qp_num] = iwqp; in irdma_create_qp()
1014 irdma_cqp_qp_destroy_cmd(&rf->sc_dev, &iwqp->sc_qp); in irdma_create_qp()
1191 struct irdma_sc_dev *dev = &iwdev->rf->sc_dev; in irdma_modify_qp_roce()
1329 irdma_add_arp(iwdev->rf, local_ip, udp_info->ipv4, in irdma_modify_qp_roce()
1547 struct irdma_sc_dev *dev = &iwdev->rf->sc_dev; in irdma_modify_qp()
1781 * @rf: RDMA PCI function
1784 static void irdma_cq_free_rsrc(struct irdma_pci_f *rf, struct irdma_cq *iwcq) in irdma_cq_free_rsrc() argument
1789 dma_free_coherent(rf->sc_dev.hw->device, iwcq->kmem.size, in irdma_cq_free_rsrc()
1792 dma_free_coherent(rf->sc_dev.hw->device, in irdma_cq_free_rsrc()
1798 irdma_free_rsrc(rf, rf->allocated_cqs, cq->cq_uk.cq_id); in irdma_cq_free_rsrc()
1867 irdma_cq_wq_destroy(iwdev->rf, cq); in irdma_destroy_cq()
1872 irdma_cq_free_rsrc(iwdev->rf, iwcq); in irdma_destroy_cq()
1897 struct irdma_pci_f *rf; in irdma_resize_cq() local
1903 rf = iwdev->rf; in irdma_resize_cq()
1905 if (!(rf->sc_dev.hw_attrs.uk_attrs.feature_flags & in irdma_resize_cq()
1912 if (entries > rf->max_cqe) in irdma_resize_cq()
1917 if (rf->sc_dev.hw_attrs.uk_attrs.hw_rev >= IRDMA_GEN_2) in irdma_resize_cq()
1977 cqp_request = irdma_alloc_and_get_cqp_request(&rf->cqp, true); in irdma_resize_cq()
1994 ret = irdma_handle_cqp_op(rf, cqp_request); in irdma_resize_cq()
1995 irdma_put_cqp_request(&rf->cqp, cqp_request); in irdma_resize_cq()
2049 struct irdma_pci_f *rf = iwdev->rf; in irdma_create_cq() local
2053 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_create_cq()
2070 err_code = irdma_alloc_rsrc(rf, rf->allocated_cqs, rf->max_cq, &cq_num, in irdma_create_cq()
2071 &rf->next_cq); in irdma_create_cq()
2085 if (attr->comp_vector < rf->ceqs_count) in irdma_create_cq()
2123 if (rf->sc_dev.hw_attrs.uk_attrs.feature_flags & in irdma_create_cq()
2153 if (entries < 1 || entries > rf->max_cqe) { in irdma_create_cq()
2198 cqp_request = irdma_alloc_and_get_cqp_request(&rf->cqp, true); in irdma_create_cq()
2210 err_code = irdma_handle_cqp_op(rf, cqp_request); in irdma_create_cq()
2211 irdma_put_cqp_request(&rf->cqp, cqp_request); in irdma_create_cq()
2228 rf->cq_table[cq_num] = iwcq; in irdma_create_cq()
2233 irdma_cq_wq_destroy(rf, cq); in irdma_create_cq()
2235 irdma_cq_free_rsrc(rf, iwcq); in irdma_create_cq()
2272 stag_idx = (stag & iwdev->rf->mr_stagmask) >> IRDMA_CQPSQ_STAG_IDX_S; in irdma_free_stag()
2273 irdma_free_rsrc(iwdev->rf, iwdev->rf->allocated_mrs, stag_idx); in irdma_free_stag()
2293 driver_key = random & ~iwdev->rf->mr_stagmask; in irdma_create_stag()
2294 next_stag_index = (random & iwdev->rf->mr_stagmask) >> 8; in irdma_create_stag()
2295 next_stag_index %= iwdev->rf->max_mr; in irdma_create_stag()
2297 ret = irdma_alloc_rsrc(iwdev->rf, iwdev->rf->allocated_mrs, in irdma_create_stag()
2298 iwdev->rf->max_mr, &stag_index, in irdma_create_stag()
2414 * @rf: RDMA PCI function
2418 static int irdma_setup_pbles(struct irdma_pci_f *rf, struct irdma_mr *iwmr, in irdma_setup_pbles() argument
2429 status = irdma_get_pble(rf->pble_rsrc, palloc, iwmr->page_cnt, in irdma_setup_pbles()
2473 err = irdma_setup_pbles(iwdev->rf, iwmr, lvl); in irdma_handle_q_mem()
2526 irdma_free_pble(iwdev->rf->pble_rsrc, palloc); in irdma_handle_q_mem()
2546 cqp_request = irdma_alloc_and_get_cqp_request(&iwdev->rf->cqp, true); in irdma_hw_alloc_mw()
2562 cqp_info->in.u.mw_alloc.dev = &iwdev->rf->sc_dev; in irdma_hw_alloc_mw()
2564 status = irdma_handle_cqp_op(iwdev->rf, cqp_request); in irdma_hw_alloc_mw()
2565 irdma_put_cqp_request(&iwdev->rf->cqp, cqp_request); in irdma_hw_alloc_mw()
2612 cqp_request = irdma_alloc_and_get_cqp_request(&iwdev->rf->cqp, true); in irdma_dealloc_mw()
2624 cqp_info->in.u.dealloc_stag.dev = &iwdev->rf->sc_dev; in irdma_dealloc_mw()
2626 irdma_handle_cqp_op(iwdev->rf, cqp_request); in irdma_dealloc_mw()
2627 irdma_put_cqp_request(&iwdev->rf->cqp, cqp_request); in irdma_dealloc_mw()
2648 cqp_request = irdma_alloc_and_get_cqp_request(&iwdev->rf->cqp, true); in irdma_hw_alloc_stag()
2663 cqp_info->in.u.alloc_stag.dev = &iwdev->rf->sc_dev; in irdma_hw_alloc_stag()
2665 status = irdma_handle_cqp_op(iwdev->rf, cqp_request); in irdma_hw_alloc_stag()
2666 irdma_put_cqp_request(&iwdev->rf->cqp, cqp_request); in irdma_hw_alloc_stag()
2712 err_code = irdma_get_pble(iwdev->rf->pble_rsrc, palloc, iwmr->page_cnt, in irdma_alloc_mr()
2725 irdma_free_pble(iwdev->rf->pble_rsrc, palloc); in irdma_alloc_mr()
2798 cqp_request = irdma_alloc_and_get_cqp_request(&iwdev->rf->cqp, true); in irdma_hwreg_mr()
2832 cqp_info->in.u.mr_reg_non_shared.dev = &iwdev->rf->sc_dev; in irdma_hwreg_mr()
2834 ret = irdma_handle_cqp_op(iwdev->rf, cqp_request); in irdma_hwreg_mr()
2835 irdma_put_cqp_request(&iwdev->rf->cqp, cqp_request); in irdma_hwreg_mr()
2854 err = irdma_setup_pbles(iwdev->rf, iwmr, lvl); in irdma_reg_user_mr_type_mem()
2862 irdma_free_pble(iwdev->rf->pble_rsrc, &iwpbl->pble_alloc); in irdma_reg_user_mr_type_mem()
2891 irdma_free_pble(iwdev->rf->pble_rsrc, &iwpbl->pble_alloc); in irdma_reg_user_mr_type_mem()
2918 iwdev->rf->sc_dev.hw_attrs.page_size_cap : SZ_4K; in irdma_alloc_iwmr()
2988 if (iwdev->rf->sc_dev.hw_attrs.uk_attrs.feature_flags & IRDMA_FEATURE_CQ_RESIZE) in irdma_reg_user_mr_type_cq()
3029 if (len > iwdev->rf->sc_dev.hw_attrs.max_mr_size) in irdma_reg_user_mr()
3095 if (len > iwdev->rf->sc_dev.hw_attrs.max_mr_size) in irdma_reg_user_mr_dmabuf()
3143 cqp_request = irdma_alloc_and_get_cqp_request(&iwdev->rf->cqp, true); in irdma_hwdereg_mr()
3158 cqp_info->in.u.dealloc_stag.dev = &iwdev->rf->sc_dev; in irdma_hwdereg_mr()
3160 status = irdma_handle_cqp_op(iwdev->rf, cqp_request); in irdma_hwdereg_mr()
3161 irdma_put_cqp_request(&iwdev->rf->cqp, cqp_request); in irdma_hwdereg_mr()
3196 iwdev->rf->sc_dev.hw_attrs.page_size_cap, in irdma_rereg_mr_trans()
3243 if (len > iwdev->rf->sc_dev.hw_attrs.max_mr_size) in irdma_rereg_user_mr()
3263 irdma_free_pble(iwdev->rf->pble_rsrc, in irdma_rereg_user_mr()
3412 irdma_free_pble(iwdev->rf->pble_rsrc, &iwpbl->pble_alloc); in irdma_dereg_mr()
3443 dev = &iwqp->iwdev->rf->sc_dev; in irdma_post_send()
4010 irdma_fw_major_ver(&iwdev->rf->sc_dev), in irdma_get_dev_fw_str()
4011 irdma_fw_minor_ver(&iwdev->rf->sc_dev)); in irdma_get_dev_fw_str()
4023 struct irdma_sc_dev *dev = &iwdev->rf->sc_dev; in irdma_alloc_hw_port_stats()
4046 if (iwdev->rf->rdma_ver >= IRDMA_GEN_2) in irdma_get_hw_stats()
4047 irdma_cqp_gather_stats_cmd(&iwdev->rf->sc_dev, iwdev->vsi.pestat, true); in irdma_get_hw_stats()
4049 irdma_cqp_gather_stats_gen1(&iwdev->rf->sc_dev, iwdev->vsi.pestat); in irdma_get_hw_stats()
4076 * @rf: RDMA PCI function
4079 static void mcast_list_add(struct irdma_pci_f *rf, in mcast_list_add() argument
4082 list_add(&new_elem->list, &rf->mc_qht_list.list); in mcast_list_add()
4097 * @rf: RDMA PCI function
4100 static struct mc_table_list *mcast_list_lookup_ip(struct irdma_pci_f *rf, in mcast_list_lookup_ip() argument
4106 list_for_each_safe (pos, q, &rf->mc_qht_list.list) { in mcast_list_lookup_ip()
4131 cqp_request = irdma_alloc_and_get_cqp_request(&iwdev->rf->cqp, true); in irdma_mcast_cqp_op()
4140 cqp_info->in.u.mc_create.cqp = &iwdev->rf->cqp.sc_cqp; in irdma_mcast_cqp_op()
4141 status = irdma_handle_cqp_op(iwdev->rf, cqp_request); in irdma_mcast_cqp_op()
4142 irdma_put_cqp_request(&iwdev->rf->cqp, cqp_request); in irdma_mcast_cqp_op()
4190 struct irdma_pci_f *rf = iwdev->rf; in irdma_attach_mcast() local
4224 spin_lock_irqsave(&rf->qh_list_lock, flags); in irdma_attach_mcast()
4225 mc_qht_elem = mcast_list_lookup_ip(rf, ip_addr); in irdma_attach_mcast()
4229 spin_unlock_irqrestore(&rf->qh_list_lock, flags); in irdma_attach_mcast()
4237 ret = irdma_alloc_rsrc(rf, rf->allocated_mcgs, rf->max_mcg, in irdma_attach_mcast()
4238 &mgn, &rf->next_mcg); in irdma_attach_mcast()
4248 dma_mem_mc->va = dma_alloc_coherent(rf->hw.device, in irdma_attach_mcast()
4253 irdma_free_rsrc(rf, rf->allocated_mcgs, mgn); in irdma_attach_mcast()
4265 mc_qht_elem->mc_grp_ctx.hmc_fcn_id = iwdev->rf->sc_dev.hmc_fn_id; in irdma_attach_mcast()
4270 spin_lock_irqsave(&rf->qh_list_lock, flags); in irdma_attach_mcast()
4271 mcast_list_add(rf, mc_qht_elem); in irdma_attach_mcast()
4275 spin_unlock_irqrestore(&rf->qh_list_lock, flags); in irdma_attach_mcast()
4283 spin_unlock_irqrestore(&rf->qh_list_lock, flags); in irdma_attach_mcast()
4305 dma_free_coherent(rf->hw.device, in irdma_attach_mcast()
4310 irdma_free_rsrc(rf, rf->allocated_mcgs, in irdma_attach_mcast()
4330 struct irdma_pci_f *rf = iwdev->rf; in irdma_detach_mcast() local
4345 spin_lock_irqsave(&rf->qh_list_lock, flags); in irdma_detach_mcast()
4346 mc_qht_elem = mcast_list_lookup_ip(rf, ip_addr); in irdma_detach_mcast()
4348 spin_unlock_irqrestore(&rf->qh_list_lock, flags); in irdma_detach_mcast()
4358 spin_unlock_irqrestore(&rf->qh_list_lock, flags); in irdma_detach_mcast()
4364 spin_lock_irqsave(&rf->qh_list_lock, flags); in irdma_detach_mcast()
4365 mcast_list_add(rf, mc_qht_elem); in irdma_detach_mcast()
4366 spin_unlock_irqrestore(&rf->qh_list_lock, flags); in irdma_detach_mcast()
4370 dma_free_coherent(rf->hw.device, in irdma_detach_mcast()
4375 irdma_free_rsrc(rf, rf->allocated_mcgs, in irdma_detach_mcast()
4379 spin_unlock_irqrestore(&rf->qh_list_lock, flags); in irdma_detach_mcast()
4394 struct irdma_pci_f *rf = iwdev->rf; in irdma_create_hw_ah() local
4397 err = irdma_alloc_rsrc(rf, rf->allocated_ahs, rf->max_ah, &ah->sc_ah.ah_info.ah_idx, in irdma_create_hw_ah()
4398 &rf->next_ah); in irdma_create_hw_ah()
4402 err = irdma_ah_cqp_op(rf, &ah->sc_ah, IRDMA_OP_AH_CREATE, sleep, in irdma_create_hw_ah()
4414 irdma_cqp_ce_handler(rf, &rf->ccq.sc_cq); in irdma_create_hw_ah()
4427 irdma_free_rsrc(iwdev->rf, iwdev->rf->allocated_ahs, ah->sc_ah.ah_info.ah_idx); in irdma_create_hw_ah()
4439 struct irdma_pci_f *rf = iwdev->rf; in irdma_setup_ah() local
4449 irdma_sc_init_ah(&rf->sc_dev, sc_ah); in irdma_setup_ah()
4496 ah_info->dst_arpindex = irdma_add_arp(iwdev->rf, ah_info->dest_ip_addr, in irdma_setup_ah()
4569 irdma_ah_cqp_op(iwdev->rf, &ah->sc_ah, IRDMA_OP_AH_DESTROY, in irdma_destroy_ah()
4572 irdma_free_rsrc(iwdev->rf, iwdev->rf->allocated_ahs, in irdma_destroy_ah()
4791 struct pci_dev *pcidev = iwdev->rf->pcidev; in irdma_init_rdma_device()
4799 iwdev->ibdev.num_comp_vectors = iwdev->rf->ceqs_count; in irdma_init_rdma_device()
4844 dma_set_max_seg_size(iwdev->rf->hw.device, UINT_MAX); in irdma_ib_register_device()
4845 ret = ib_register_device(&iwdev->ibdev, "irdma%d", iwdev->rf->hw.device); in irdma_ib_register_device()
4873 irdma_ctrl_deinit_hw(iwdev->rf); in irdma_ib_dealloc_device()
4874 kfree(iwdev->rf); in irdma_ib_dealloc_device()