Lines Matching refs:gtt
697 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_get_user_pages() local
698 unsigned long start = gtt->userptr; in amdgpu_ttm_tt_get_user_pages()
722 if (unlikely((gtt->userflags & AMDGPU_GEM_USERPTR_ANONONLY) && in amdgpu_ttm_tt_get_user_pages()
746 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_tt_discard_user_pages() local
748 if (gtt && gtt->userptr && range) in amdgpu_ttm_tt_discard_user_pages()
761 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_get_user_pages_done() local
763 if (!gtt || !gtt->userptr || !range) in amdgpu_ttm_tt_get_user_pages_done()
767 gtt->userptr, ttm->num_pages); in amdgpu_ttm_tt_get_user_pages_done()
799 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_pin_userptr() local
800 int write = !(gtt->userflags & AMDGPU_GEM_USERPTR_READONLY); in amdgpu_ttm_tt_pin_userptr()
818 drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address, in amdgpu_ttm_tt_pin_userptr()
836 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_unpin_userptr() local
837 int write = !(gtt->userflags & AMDGPU_GEM_USERPTR_READONLY); in amdgpu_ttm_tt_unpin_userptr()
860 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_gart_bind_gfx9_mqd() local
873 gtt->offset + (page_idx << PAGE_SHIFT), in amdgpu_ttm_gart_bind_gfx9_mqd()
874 1, >t->ttm.dma_address[page_idx], flags); in amdgpu_ttm_gart_bind_gfx9_mqd()
880 gtt->offset + ((page_idx + 1) << PAGE_SHIFT), in amdgpu_ttm_gart_bind_gfx9_mqd()
882 >t->ttm.dma_address[page_idx + 1], in amdgpu_ttm_gart_bind_gfx9_mqd()
893 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_gart_bind() local
901 amdgpu_gart_bind(adev, gtt->offset, ttm->num_pages, in amdgpu_ttm_gart_bind()
902 gtt->ttm.dma_address, flags); in amdgpu_ttm_gart_bind()
904 gtt->bound = true; in amdgpu_ttm_gart_bind()
918 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_backend_bind() local
925 if (gtt->bound) in amdgpu_ttm_backend_bind()
928 if (gtt->userptr) { in amdgpu_ttm_backend_bind()
939 attach = gtt->gobj->import_attach; in amdgpu_ttm_backend_bind()
947 drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address, in amdgpu_ttm_backend_bind()
958 gtt->offset = AMDGPU_BO_INVALID_OFFSET; in amdgpu_ttm_backend_bind()
966 gtt->offset = (u64)bo_mem->start << PAGE_SHIFT; in amdgpu_ttm_backend_bind()
967 amdgpu_gart_bind(adev, gtt->offset, ttm->num_pages, in amdgpu_ttm_backend_bind()
968 gtt->ttm.dma_address, flags); in amdgpu_ttm_backend_bind()
969 gtt->bound = true; in amdgpu_ttm_backend_bind()
985 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(bo->ttm); in amdgpu_ttm_alloc_gart() local
1015 gtt->offset = (u64)tmp->start << PAGE_SHIFT; in amdgpu_ttm_alloc_gart()
1052 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_backend_unbind() local
1055 if (gtt->userptr) { in amdgpu_ttm_backend_unbind()
1057 } else if (ttm->sg && gtt->gobj->import_attach) { in amdgpu_ttm_backend_unbind()
1060 attach = gtt->gobj->import_attach; in amdgpu_ttm_backend_unbind()
1065 if (!gtt->bound) in amdgpu_ttm_backend_unbind()
1068 if (gtt->offset == AMDGPU_BO_INVALID_OFFSET) in amdgpu_ttm_backend_unbind()
1072 amdgpu_gart_unbind(adev, gtt->offset, ttm->num_pages); in amdgpu_ttm_backend_unbind()
1073 gtt->bound = false; in amdgpu_ttm_backend_unbind()
1079 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_backend_destroy() local
1081 if (gtt->usertask) in amdgpu_ttm_backend_destroy()
1082 put_task_struct(gtt->usertask); in amdgpu_ttm_backend_destroy()
1084 ttm_tt_fini(>t->ttm); in amdgpu_ttm_backend_destroy()
1085 kfree(gtt); in amdgpu_ttm_backend_destroy()
1101 struct amdgpu_ttm_tt *gtt; in amdgpu_ttm_tt_create() local
1104 gtt = kzalloc(sizeof(struct amdgpu_ttm_tt), GFP_KERNEL); in amdgpu_ttm_tt_create()
1105 if (!gtt) in amdgpu_ttm_tt_create()
1108 gtt->gobj = &bo->base; in amdgpu_ttm_tt_create()
1110 gtt->pool_id = KFD_XCP_MEM_ID(adev, abo->xcp_id); in amdgpu_ttm_tt_create()
1112 gtt->pool_id = abo->xcp_id; in amdgpu_ttm_tt_create()
1120 if (ttm_sg_tt_init(>t->ttm, bo, page_flags, caching)) { in amdgpu_ttm_tt_create()
1121 kfree(gtt); in amdgpu_ttm_tt_create()
1124 return >t->ttm; in amdgpu_ttm_tt_create()
1138 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_populate() local
1144 if (gtt->userptr) { in amdgpu_ttm_tt_populate()
1154 if (adev->mman.ttm_pools && gtt->pool_id >= 0) in amdgpu_ttm_tt_populate()
1155 pool = &adev->mman.ttm_pools[gtt->pool_id]; in amdgpu_ttm_tt_populate()
1177 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_unpopulate() local
1184 if (gtt->userptr) { in amdgpu_ttm_tt_unpopulate()
1199 if (adev->mman.ttm_pools && gtt->pool_id >= 0) in amdgpu_ttm_tt_unpopulate()
1200 pool = &adev->mman.ttm_pools[gtt->pool_id]; in amdgpu_ttm_tt_unpopulate()
1217 struct amdgpu_ttm_tt *gtt; in amdgpu_ttm_tt_get_userptr() local
1222 gtt = (void *)tbo->ttm; in amdgpu_ttm_tt_get_userptr()
1223 *user_addr = gtt->userptr; in amdgpu_ttm_tt_get_userptr()
1242 struct amdgpu_ttm_tt *gtt; in amdgpu_ttm_tt_set_userptr() local
1254 gtt = ttm_to_amdgpu_ttm_tt(bo->ttm); in amdgpu_ttm_tt_set_userptr()
1255 gtt->userptr = addr; in amdgpu_ttm_tt_set_userptr()
1256 gtt->userflags = flags; in amdgpu_ttm_tt_set_userptr()
1258 if (gtt->usertask) in amdgpu_ttm_tt_set_userptr()
1259 put_task_struct(gtt->usertask); in amdgpu_ttm_tt_set_userptr()
1260 gtt->usertask = current->group_leader; in amdgpu_ttm_tt_set_userptr()
1261 get_task_struct(gtt->usertask); in amdgpu_ttm_tt_set_userptr()
1271 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_get_usermm() local
1273 if (gtt == NULL) in amdgpu_ttm_tt_get_usermm()
1276 if (gtt->usertask == NULL) in amdgpu_ttm_tt_get_usermm()
1279 return gtt->usertask->mm; in amdgpu_ttm_tt_get_usermm()
1290 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_affect_userptr() local
1293 if (gtt == NULL || !gtt->userptr) in amdgpu_ttm_tt_affect_userptr()
1299 size = (unsigned long)gtt->ttm.num_pages * PAGE_SIZE; in amdgpu_ttm_tt_affect_userptr()
1300 if (gtt->userptr > end || gtt->userptr + size <= start) in amdgpu_ttm_tt_affect_userptr()
1304 *userptr = gtt->userptr; in amdgpu_ttm_tt_affect_userptr()
1313 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_is_userptr() local
1315 if (gtt == NULL || !gtt->userptr) in amdgpu_ttm_tt_is_userptr()
1326 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_is_readonly() local
1328 if (gtt == NULL) in amdgpu_ttm_tt_is_readonly()
1331 return !!(gtt->userflags & AMDGPU_GEM_USERPTR_READONLY); in amdgpu_ttm_tt_is_readonly()