Lines Matching refs:iwmr

2333 static void irdma_copy_user_pgaddrs(struct irdma_mr *iwmr, u64 *pbl,  in irdma_copy_user_pgaddrs()  argument
2336 struct ib_umem *region = iwmr->region; in irdma_copy_user_pgaddrs()
2337 struct irdma_pbl *iwpbl = &iwmr->iwpbl; in irdma_copy_user_pgaddrs()
2346 if (iwmr->type == IRDMA_MEMREG_TYPE_QP) in irdma_copy_user_pgaddrs()
2349 rdma_umem_for_each_dma_block(region, &biter, iwmr->page_size) { in irdma_copy_user_pgaddrs()
2418 static int irdma_setup_pbles(struct irdma_pci_f *rf, struct irdma_mr *iwmr, in irdma_setup_pbles() argument
2421 struct irdma_pbl *iwpbl = &iwmr->iwpbl; in irdma_setup_pbles()
2429 status = irdma_get_pble(rf->pble_rsrc, palloc, iwmr->page_cnt, in irdma_setup_pbles()
2440 pbl = iwmr->pgaddrmem; in irdma_setup_pbles()
2443 irdma_copy_user_pgaddrs(iwmr, pbl, level); in irdma_setup_pbles()
2446 iwmr->pgaddrmem[0] = *pbl; in irdma_setup_pbles()
2463 struct irdma_mr *iwmr = iwpbl->iwmr; in irdma_handle_q_mem() local
2467 u64 *arr = iwmr->pgaddrmem; in irdma_handle_q_mem()
2472 pg_size = iwmr->page_size; in irdma_handle_q_mem()
2473 err = irdma_setup_pbles(iwdev->rf, iwmr, lvl); in irdma_handle_q_mem()
2480 switch (iwmr->type) { in irdma_handle_q_mem()
2538 static int irdma_hw_alloc_mw(struct irdma_device *iwdev, struct irdma_mr *iwmr) in irdma_hw_alloc_mw() argument
2541 struct irdma_pd *iwpd = to_iwpd(iwmr->ibmr.pd); in irdma_hw_alloc_mw()
2553 if (iwmr->ibmw.type == IB_MW_TYPE_1) in irdma_hw_alloc_mw()
2557 info->mw_stag_index = iwmr->stag >> IRDMA_CQPSQ_STAG_IDX_S; in irdma_hw_alloc_mw()
2578 struct irdma_mr *iwmr = to_iwmw(ibmw); in irdma_alloc_mw() local
2586 iwmr->stag = stag; in irdma_alloc_mw()
2589 err_code = irdma_hw_alloc_mw(iwdev, iwmr); in irdma_alloc_mw()
2606 struct irdma_mr *iwmr = to_iwmr((struct ib_mr *)ibmw); in irdma_dealloc_mw() local
2628 irdma_free_stag(iwdev, iwmr->stag); in irdma_dealloc_mw()
2639 struct irdma_mr *iwmr) in irdma_hw_alloc_stag() argument
2642 struct ib_pd *pd = iwmr->ibmr.pd; in irdma_hw_alloc_stag()
2656 info->stag_idx = iwmr->stag >> IRDMA_CQPSQ_STAG_IDX_S; in irdma_hw_alloc_stag()
2658 info->total_len = iwmr->len; in irdma_hw_alloc_stag()
2670 iwmr->is_hwreg = 1; in irdma_hw_alloc_stag()
2686 struct irdma_mr *iwmr; in irdma_alloc_mr() local
2690 iwmr = kzalloc(sizeof(*iwmr), GFP_KERNEL); in irdma_alloc_mr()
2691 if (!iwmr) in irdma_alloc_mr()
2700 iwmr->stag = stag; in irdma_alloc_mr()
2701 iwmr->ibmr.rkey = stag; in irdma_alloc_mr()
2702 iwmr->ibmr.lkey = stag; in irdma_alloc_mr()
2703 iwmr->ibmr.pd = pd; in irdma_alloc_mr()
2704 iwmr->ibmr.device = pd->device; in irdma_alloc_mr()
2705 iwpbl = &iwmr->iwpbl; in irdma_alloc_mr()
2706 iwpbl->iwmr = iwmr; in irdma_alloc_mr()
2707 iwmr->type = IRDMA_MEMREG_TYPE_MEM; in irdma_alloc_mr()
2709 iwmr->page_cnt = max_num_sg; in irdma_alloc_mr()
2711 iwmr->len = max_num_sg * PAGE_SIZE; in irdma_alloc_mr()
2712 err_code = irdma_get_pble(iwdev->rf->pble_rsrc, palloc, iwmr->page_cnt, in irdma_alloc_mr()
2717 err_code = irdma_hw_alloc_stag(iwdev, iwmr); in irdma_alloc_mr()
2723 return &iwmr->ibmr; in irdma_alloc_mr()
2729 kfree(iwmr); in irdma_alloc_mr()
2741 struct irdma_mr *iwmr = to_iwmr(ibmr); in irdma_set_page() local
2742 struct irdma_pbl *iwpbl = &iwmr->iwpbl; in irdma_set_page()
2746 if (unlikely(iwmr->npages == iwmr->page_cnt)) in irdma_set_page()
2751 palloc->level2.leaf + (iwmr->npages >> PBLE_512_SHIFT); in irdma_set_page()
2753 palloc_info->addr[iwmr->npages & (PBLE_PER_PAGE - 1)] = addr; in irdma_set_page()
2756 pbl[iwmr->npages] = addr; in irdma_set_page()
2758 iwmr->npages++; in irdma_set_page()
2773 struct irdma_mr *iwmr = to_iwmr(ibmr); in irdma_map_mr_sg() local
2775 iwmr->npages = 0; in irdma_map_mr_sg()
2786 static int irdma_hwreg_mr(struct irdma_device *iwdev, struct irdma_mr *iwmr, in irdma_hwreg_mr() argument
2789 struct irdma_pbl *iwpbl = &iwmr->iwpbl; in irdma_hwreg_mr()
2791 struct ib_pd *pd = iwmr->ibmr.pd; in irdma_hwreg_mr()
2806 stag_info->stag_idx = iwmr->stag >> IRDMA_CQPSQ_STAG_IDX_S; in irdma_hwreg_mr()
2807 stag_info->stag_key = (u8)iwmr->stag; in irdma_hwreg_mr()
2808 stag_info->total_len = iwmr->len; in irdma_hwreg_mr()
2816 stag_info->page_size = iwmr->page_size; in irdma_hwreg_mr()
2827 stag_info->reg_addr_pa = iwmr->pgaddrmem[0]; in irdma_hwreg_mr()
2838 iwmr->is_hwreg = 1; in irdma_hwreg_mr()
2843 static int irdma_reg_user_mr_type_mem(struct irdma_mr *iwmr, int access, in irdma_reg_user_mr_type_mem() argument
2846 struct irdma_device *iwdev = to_iwdev(iwmr->ibmr.device); in irdma_reg_user_mr_type_mem()
2847 struct irdma_pbl *iwpbl = &iwmr->iwpbl; in irdma_reg_user_mr_type_mem()
2852 lvl = iwmr->page_cnt != 1 ? PBLE_LEVEL_1 | PBLE_LEVEL_2 : PBLE_LEVEL_0; in irdma_reg_user_mr_type_mem()
2854 err = irdma_setup_pbles(iwdev->rf, iwmr, lvl); in irdma_reg_user_mr_type_mem()
2860 iwmr->page_size); in irdma_reg_user_mr_type_mem()
2874 iwmr->stag = stag; in irdma_reg_user_mr_type_mem()
2875 iwmr->ibmr.rkey = stag; in irdma_reg_user_mr_type_mem()
2876 iwmr->ibmr.lkey = stag; in irdma_reg_user_mr_type_mem()
2879 err = irdma_hwreg_mr(iwdev, iwmr, access); in irdma_reg_user_mr_type_mem()
2902 struct irdma_mr *iwmr; in irdma_alloc_iwmr() local
2905 iwmr = kzalloc(sizeof(*iwmr), GFP_KERNEL); in irdma_alloc_iwmr()
2906 if (!iwmr) in irdma_alloc_iwmr()
2909 iwpbl = &iwmr->iwpbl; in irdma_alloc_iwmr()
2910 iwpbl->iwmr = iwmr; in irdma_alloc_iwmr()
2911 iwmr->region = region; in irdma_alloc_iwmr()
2912 iwmr->ibmr.pd = pd; in irdma_alloc_iwmr()
2913 iwmr->ibmr.device = pd->device; in irdma_alloc_iwmr()
2914 iwmr->ibmr.iova = virt; in irdma_alloc_iwmr()
2915 iwmr->type = reg_type; in irdma_alloc_iwmr()
2920 iwmr->page_size = ib_umem_find_best_pgsz(region, pgsz_bitmap, virt); in irdma_alloc_iwmr()
2921 if (unlikely(!iwmr->page_size)) { in irdma_alloc_iwmr()
2922 kfree(iwmr); in irdma_alloc_iwmr()
2926 iwmr->len = region->length; in irdma_alloc_iwmr()
2928 iwmr->page_cnt = ib_umem_num_dma_blocks(region, iwmr->page_size); in irdma_alloc_iwmr()
2930 return iwmr; in irdma_alloc_iwmr()
2933 static void irdma_free_iwmr(struct irdma_mr *iwmr) in irdma_free_iwmr() argument
2935 kfree(iwmr); in irdma_free_iwmr()
2940 struct irdma_mr *iwmr) in irdma_reg_user_mr_type_qp() argument
2942 struct irdma_device *iwdev = to_iwdev(iwmr->ibmr.device); in irdma_reg_user_mr_type_qp()
2943 struct irdma_pbl *iwpbl = &iwmr->iwpbl; in irdma_reg_user_mr_type_qp()
2952 ib_umem_offset(iwmr->region)) in irdma_reg_user_mr_type_qp()
2956 if (total > iwmr->page_cnt) in irdma_reg_user_mr_type_qp()
2977 struct irdma_mr *iwmr) in irdma_reg_user_mr_type_cq() argument
2979 struct irdma_device *iwdev = to_iwdev(iwmr->ibmr.device); in irdma_reg_user_mr_type_cq()
2980 struct irdma_pbl *iwpbl = &iwmr->iwpbl; in irdma_reg_user_mr_type_cq()
2991 if (total > iwmr->page_cnt) in irdma_reg_user_mr_type_cq()
3026 struct irdma_mr *iwmr = NULL; in irdma_reg_user_mr() local
3048 iwmr = irdma_alloc_iwmr(region, pd, virt, req.reg_type); in irdma_reg_user_mr()
3049 if (IS_ERR(iwmr)) { in irdma_reg_user_mr()
3051 return (struct ib_mr *)iwmr; in irdma_reg_user_mr()
3056 err = irdma_reg_user_mr_type_qp(req, udata, iwmr); in irdma_reg_user_mr()
3062 err = irdma_reg_user_mr_type_cq(req, udata, iwmr); in irdma_reg_user_mr()
3067 err = irdma_reg_user_mr_type_mem(iwmr, access, true); in irdma_reg_user_mr()
3077 return &iwmr->ibmr; in irdma_reg_user_mr()
3080 irdma_free_iwmr(iwmr); in irdma_reg_user_mr()
3092 struct irdma_mr *iwmr; in irdma_reg_user_mr_dmabuf() local
3105 iwmr = irdma_alloc_iwmr(&umem_dmabuf->umem, pd, virt, IRDMA_MEMREG_TYPE_MEM); in irdma_reg_user_mr_dmabuf()
3106 if (IS_ERR(iwmr)) { in irdma_reg_user_mr_dmabuf()
3107 err = PTR_ERR(iwmr); in irdma_reg_user_mr_dmabuf()
3111 err = irdma_reg_user_mr_type_mem(iwmr, access, true); in irdma_reg_user_mr_dmabuf()
3115 return &iwmr->ibmr; in irdma_reg_user_mr_dmabuf()
3118 irdma_free_iwmr(iwmr); in irdma_reg_user_mr_dmabuf()
3129 struct irdma_mr *iwmr = to_iwmr(ib_mr); in irdma_hwdereg_mr() local
3132 struct irdma_pbl *iwpbl = &iwmr->iwpbl; in irdma_hwdereg_mr()
3140 if (!iwmr->is_hwreg) in irdma_hwdereg_mr()
3165 iwmr->is_hwreg = 0; in irdma_hwdereg_mr()
3179 static int irdma_rereg_mr_trans(struct irdma_mr *iwmr, u64 start, u64 len, in irdma_rereg_mr_trans() argument
3182 struct irdma_device *iwdev = to_iwdev(iwmr->ibmr.device); in irdma_rereg_mr_trans()
3183 struct irdma_pbl *iwpbl = &iwmr->iwpbl; in irdma_rereg_mr_trans()
3184 struct ib_pd *pd = iwmr->ibmr.pd; in irdma_rereg_mr_trans()
3188 region = ib_umem_get(pd->device, start, len, iwmr->access); in irdma_rereg_mr_trans()
3192 iwmr->region = region; in irdma_rereg_mr_trans()
3193 iwmr->ibmr.iova = virt; in irdma_rereg_mr_trans()
3194 iwmr->ibmr.pd = pd; in irdma_rereg_mr_trans()
3195 iwmr->page_size = ib_umem_find_best_pgsz(region, in irdma_rereg_mr_trans()
3198 if (unlikely(!iwmr->page_size)) { in irdma_rereg_mr_trans()
3203 iwmr->len = region->length; in irdma_rereg_mr_trans()
3205 iwmr->page_cnt = ib_umem_num_dma_blocks(region, iwmr->page_size); in irdma_rereg_mr_trans()
3207 err = irdma_reg_user_mr_type_mem(iwmr, iwmr->access, false); in irdma_rereg_mr_trans()
3239 struct irdma_mr *iwmr = to_iwmr(ib_mr); in irdma_rereg_user_mr() local
3240 struct irdma_pbl *iwpbl = &iwmr->iwpbl; in irdma_rereg_user_mr()
3254 iwmr->access = new_access; in irdma_rereg_user_mr()
3257 iwmr->ibmr.pd = new_pd; in irdma_rereg_user_mr()
3258 iwmr->ibmr.device = new_pd->device; in irdma_rereg_user_mr()
3267 if (iwmr->region) { in irdma_rereg_user_mr()
3268 ib_umem_release(iwmr->region); in irdma_rereg_user_mr()
3269 iwmr->region = NULL; in irdma_rereg_user_mr()
3272 ret = irdma_rereg_mr_trans(iwmr, start, len, virt); in irdma_rereg_user_mr()
3274 ret = irdma_hwreg_mr(iwdev, iwmr, iwmr->access); in irdma_rereg_user_mr()
3294 struct irdma_mr *iwmr; in irdma_reg_phys_mr() local
3298 iwmr = kzalloc(sizeof(*iwmr), GFP_KERNEL); in irdma_reg_phys_mr()
3299 if (!iwmr) in irdma_reg_phys_mr()
3302 iwmr->ibmr.pd = pd; in irdma_reg_phys_mr()
3303 iwmr->ibmr.device = pd->device; in irdma_reg_phys_mr()
3304 iwpbl = &iwmr->iwpbl; in irdma_reg_phys_mr()
3305 iwpbl->iwmr = iwmr; in irdma_reg_phys_mr()
3306 iwmr->type = IRDMA_MEMREG_TYPE_MEM; in irdma_reg_phys_mr()
3314 iwmr->stag = stag; in irdma_reg_phys_mr()
3315 iwmr->ibmr.iova = *iova_start; in irdma_reg_phys_mr()
3316 iwmr->ibmr.rkey = stag; in irdma_reg_phys_mr()
3317 iwmr->ibmr.lkey = stag; in irdma_reg_phys_mr()
3318 iwmr->page_cnt = 1; in irdma_reg_phys_mr()
3319 iwmr->pgaddrmem[0] = addr; in irdma_reg_phys_mr()
3320 iwmr->len = size; in irdma_reg_phys_mr()
3321 iwmr->page_size = SZ_4K; in irdma_reg_phys_mr()
3322 ret = irdma_hwreg_mr(iwdev, iwmr, access); in irdma_reg_phys_mr()
3328 return &iwmr->ibmr; in irdma_reg_phys_mr()
3331 kfree(iwmr); in irdma_reg_phys_mr()
3353 static void irdma_del_memlist(struct irdma_mr *iwmr, in irdma_del_memlist() argument
3356 struct irdma_pbl *iwpbl = &iwmr->iwpbl; in irdma_del_memlist()
3359 switch (iwmr->type) { in irdma_del_memlist()
3388 struct irdma_mr *iwmr = to_iwmr(ib_mr); in irdma_dereg_mr() local
3390 struct irdma_pbl *iwpbl = &iwmr->iwpbl; in irdma_dereg_mr()
3393 if (iwmr->type != IRDMA_MEMREG_TYPE_MEM) { in irdma_dereg_mr()
3394 if (iwmr->region) { in irdma_dereg_mr()
3400 irdma_del_memlist(iwmr, ucontext); in irdma_dereg_mr()
3409 irdma_free_stag(iwdev, iwmr->stag); in irdma_dereg_mr()
3414 if (iwmr->region) in irdma_dereg_mr()
3415 ib_umem_release(iwmr->region); in irdma_dereg_mr()
3417 kfree(iwmr); in irdma_dereg_mr()
3543 struct irdma_mr *iwmr = to_iwmr(reg_wr(ib_wr)->mr); in irdma_post_send() local
3544 struct irdma_pble_alloc *palloc = &iwmr->iwpbl.pble_alloc; in irdma_post_send()
3555 stag_info.va = (void *)(uintptr_t)iwmr->ibmr.iova; in irdma_post_send()
3556 stag_info.total_len = iwmr->ibmr.length; in irdma_post_send()
3560 if (iwmr->npages > IRDMA_MIN_PAGES_PER_FMR) in irdma_post_send()