Lines Matching full:sg
523 struct ib_sge *sg) in mlx5r_umr_unmap_free_xlt() argument
527 dma_unmap_single(ddev, sg->addr, sg->length, DMA_TO_DEVICE); in mlx5r_umr_unmap_free_xlt()
528 mlx5r_umr_free_xlt(xlt, sg->length); in mlx5r_umr_unmap_free_xlt()
534 static void *mlx5r_umr_create_xlt(struct mlx5_ib_dev *dev, struct ib_sge *sg, in mlx5r_umr_create_xlt() argument
545 sg->length = nents * ent_size; in mlx5r_umr_create_xlt()
546 dma = dma_map_single(ddev, xlt, sg->length, DMA_TO_DEVICE); in mlx5r_umr_create_xlt()
549 mlx5r_umr_free_xlt(xlt, sg->length); in mlx5r_umr_create_xlt()
552 sg->addr = dma; in mlx5r_umr_create_xlt()
553 sg->lkey = dev->umrc.pd->local_dma_lkey; in mlx5r_umr_create_xlt()
560 unsigned int flags, struct ib_sge *sg) in mlx5r_umr_set_update_xlt_ctrl_seg() argument
569 cpu_to_be16(mlx5r_umr_get_xlt_octo(sg->length)); in mlx5r_umr_set_update_xlt_ctrl_seg()
588 struct ib_sge *sg) in mlx5r_umr_set_update_xlt_data_seg() argument
590 data_seg->byte_count = cpu_to_be32(sg->length); in mlx5r_umr_set_update_xlt_data_seg()
591 data_seg->lkey = cpu_to_be32(sg->lkey); in mlx5r_umr_set_update_xlt_data_seg()
592 data_seg->addr = cpu_to_be64(sg->addr); in mlx5r_umr_set_update_xlt_data_seg()
607 struct mlx5_ib_mr *mr, struct ib_sge *sg, in mlx5r_umr_final_update_xlt() argument
634 cpu_to_be16(mlx5r_umr_get_xlt_octo(sg->length)); in mlx5r_umr_final_update_xlt()
635 wqe->data_seg.byte_count = cpu_to_be32(sg->length); in mlx5r_umr_final_update_xlt()
651 struct ib_sge sg; in _mlx5r_umr_update_mr_pas() local
656 entry = mlx5r_umr_create_xlt(dev, &sg, in _mlx5r_umr_update_mr_pas()
662 orig_sg_length = sg.length; in _mlx5r_umr_update_mr_pas()
663 mlx5r_umr_set_update_xlt_ctrl_seg(&wqe.ctrl_seg, flags, &sg); in _mlx5r_umr_update_mr_pas()
674 mlx5r_umr_set_update_xlt_data_seg(&wqe.data_seg, &sg); in _mlx5r_umr_update_mr_pas()
678 if (curr_entry == entry + sg.length) { in _mlx5r_umr_update_mr_pas()
679 dma_sync_single_for_device(ddev, sg.addr, sg.length, in _mlx5r_umr_update_mr_pas()
686 dma_sync_single_for_cpu(ddev, sg.addr, sg.length, in _mlx5r_umr_update_mr_pas()
688 offset += sg.length; in _mlx5r_umr_update_mr_pas()
713 sg.length = ALIGN(final_size, MLX5_UMR_FLEX_ALIGNMENT); in _mlx5r_umr_update_mr_pas()
714 memset(curr_entry, 0, sg.length - final_size); in _mlx5r_umr_update_mr_pas()
715 mlx5r_umr_final_update_xlt(dev, &wqe, mr, &sg, flags); in _mlx5r_umr_update_mr_pas()
717 dma_sync_single_for_device(ddev, sg.addr, sg.length, DMA_TO_DEVICE); in _mlx5r_umr_update_mr_pas()
721 sg.length = orig_sg_length; in _mlx5r_umr_update_mr_pas()
722 mlx5r_umr_unmap_free_xlt(dev, entry, &sg); in _mlx5r_umr_update_mr_pas()
769 struct ib_sge sg; in mlx5r_umr_update_xlt() local
789 xlt = mlx5r_umr_create_xlt(dev, &sg, npages, desc_size, flags); in mlx5r_umr_update_xlt()
793 pages_iter = sg.length / desc_size; in mlx5r_umr_update_xlt()
794 orig_sg_length = sg.length; in mlx5r_umr_update_xlt()
803 mlx5r_umr_set_update_xlt_ctrl_seg(&wqe.ctrl_seg, flags, &sg); in mlx5r_umr_update_xlt()
805 mlx5r_umr_set_update_xlt_data_seg(&wqe.data_seg, &sg); in mlx5r_umr_update_xlt()
812 dma_sync_single_for_cpu(ddev, sg.addr, sg.length, in mlx5r_umr_update_xlt()
815 dma_sync_single_for_device(ddev, sg.addr, sg.length, in mlx5r_umr_update_xlt()
817 sg.length = ALIGN(size_to_map, MLX5_UMR_FLEX_ALIGNMENT); in mlx5r_umr_update_xlt()
820 mlx5r_umr_final_update_xlt(dev, &wqe, mr, &sg, flags); in mlx5r_umr_update_xlt()
824 sg.length = orig_sg_length; in mlx5r_umr_update_xlt()
825 mlx5r_umr_unmap_free_xlt(dev, xlt, &sg); in mlx5r_umr_update_xlt()