Lines Matching refs:sc_dev

18 	struct irdma_hw_attrs *hw_attrs = &rf->sc_dev.hw_attrs;  in irdma_query_device()
26 props->fw_ver = (u64)irdma_fw_major_ver(&rf->sc_dev) << 32 | in irdma_query_device()
27 irdma_fw_minor_ver(&rf->sc_dev); in irdma_query_device()
106 props->max_msg_sz = iwdev->rf->sc_dev.hw_attrs.max_hw_outbound_msg_size; in irdma_query_port()
128 pfn = ((uintptr_t)ucontext->iwdev->rf->sc_dev.hw_regs[IRDMA_DB_ADDR_OFFSET] + in irdma_mmap_legacy()
254 iwdev->rf->sc_dev.hw_attrs.max_hw_device_pages) { in irdma_alloc_push_page()
280 struct irdma_uk_attrs *uk_attrs = &iwdev->rf->sc_dev.hw_attrs.uk_attrs; in irdma_alloc_ucontext()
305 uresp.max_pds = iwdev->rf->sc_dev.hw_attrs.max_hw_pds; in irdma_alloc_ucontext()
306 uresp.wq_size = iwdev->rf->sc_dev.hw_attrs.max_qp_wr * 2; in irdma_alloc_ucontext()
312 u64 bar_off = (uintptr_t)iwdev->rf->sc_dev.hw_regs[IRDMA_DB_ADDR_OFFSET]; in irdma_alloc_ucontext()
377 struct irdma_sc_dev *dev = &iwdev->rf->sc_dev; in irdma_alloc_pd()
488 bar_off = (uintptr_t)iwdev->rf->sc_dev.hw_regs[IRDMA_DB_ADDR_OFFSET]; in irdma_setup_push_mmap_entries()
541 irdma_cqp_qp_destroy_cmd(&iwdev->rf->sc_dev, &iwqp->sc_qp); in irdma_destroy_qp()
764 struct irdma_sc_dev *dev = &iwdev->rf->sc_dev; in irdma_roce_fill_and_set_qpctx_info()
804 struct irdma_sc_dev *dev = &iwdev->rf->sc_dev; in irdma_iw_fill_and_set_qpctx_info()
833 struct irdma_sc_dev *dev = &iwdev->rf->sc_dev; in irdma_validate_qp_attrs()
888 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_create_qp()
1014 irdma_cqp_qp_destroy_cmd(&rf->sc_dev, &iwqp->sc_qp); in irdma_create_qp()
1191 struct irdma_sc_dev *dev = &iwdev->rf->sc_dev; in irdma_modify_qp_roce()
1547 struct irdma_sc_dev *dev = &iwdev->rf->sc_dev; in irdma_modify_qp()
1789 dma_free_coherent(rf->sc_dev.hw->device, iwcq->kmem.size, in irdma_cq_free_rsrc()
1792 dma_free_coherent(rf->sc_dev.hw->device, in irdma_cq_free_rsrc()
1905 if (!(rf->sc_dev.hw_attrs.uk_attrs.feature_flags & in irdma_resize_cq()
1917 if (rf->sc_dev.hw_attrs.uk_attrs.hw_rev >= IRDMA_GEN_2) in irdma_resize_cq()
2053 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_create_cq()
2123 if (rf->sc_dev.hw_attrs.uk_attrs.feature_flags & in irdma_create_cq()
2562 cqp_info->in.u.mw_alloc.dev = &iwdev->rf->sc_dev; in irdma_hw_alloc_mw()
2624 cqp_info->in.u.dealloc_stag.dev = &iwdev->rf->sc_dev; in irdma_dealloc_mw()
2663 cqp_info->in.u.alloc_stag.dev = &iwdev->rf->sc_dev; in irdma_hw_alloc_stag()
2832 cqp_info->in.u.mr_reg_non_shared.dev = &iwdev->rf->sc_dev; in irdma_hwreg_mr()
2918 iwdev->rf->sc_dev.hw_attrs.page_size_cap : SZ_4K; in irdma_alloc_iwmr()
2988 if (iwdev->rf->sc_dev.hw_attrs.uk_attrs.feature_flags & IRDMA_FEATURE_CQ_RESIZE) in irdma_reg_user_mr_type_cq()
3029 if (len > iwdev->rf->sc_dev.hw_attrs.max_mr_size) in irdma_reg_user_mr()
3095 if (len > iwdev->rf->sc_dev.hw_attrs.max_mr_size) in irdma_reg_user_mr_dmabuf()
3158 cqp_info->in.u.dealloc_stag.dev = &iwdev->rf->sc_dev; in irdma_hwdereg_mr()
3196 iwdev->rf->sc_dev.hw_attrs.page_size_cap, in irdma_rereg_mr_trans()
3243 if (len > iwdev->rf->sc_dev.hw_attrs.max_mr_size) in irdma_rereg_user_mr()
3443 dev = &iwqp->iwdev->rf->sc_dev; in irdma_post_send()
4010 irdma_fw_major_ver(&iwdev->rf->sc_dev), in irdma_get_dev_fw_str()
4011 irdma_fw_minor_ver(&iwdev->rf->sc_dev)); in irdma_get_dev_fw_str()
4023 struct irdma_sc_dev *dev = &iwdev->rf->sc_dev; in irdma_alloc_hw_port_stats()
4047 irdma_cqp_gather_stats_cmd(&iwdev->rf->sc_dev, iwdev->vsi.pestat, true); in irdma_get_hw_stats()
4049 irdma_cqp_gather_stats_gen1(&iwdev->rf->sc_dev, iwdev->vsi.pestat); in irdma_get_hw_stats()
4265 mc_qht_elem->mc_grp_ctx.hmc_fcn_id = iwdev->rf->sc_dev.hmc_fn_id; in irdma_attach_mcast()
4449 irdma_sc_init_ah(&rf->sc_dev, sc_ah); in irdma_setup_ah()