Lines Matching refs:ttm

89 	return resource_is_vram(bo->ttm.resource) ||  in xe_bo_is_vram()
90 resource_is_stolen_vram(xe_bo_device(bo), bo->ttm.resource); in xe_bo_is_vram()
95 return bo->ttm.resource->mem_type == XE_PL_STOLEN; in xe_bo_is_stolen()
148 mgr = ttm_manager_type(&xe->ttm, res->mem_type); in res_to_mem_region()
174 vram = to_xe_ttm_vram_mgr(ttm_manager_type(&xe->ttm, mem_type))->vram; in add_vram()
282 struct ttm_tt ttm; member
290 struct xe_ttm_tt *xe_tt = container_of(tt, struct xe_ttm_tt, ttm); in xe_tt_map_sg()
321 struct xe_ttm_tt *xe_tt = container_of(tt, struct xe_ttm_tt, ttm); in xe_tt_unmap_sg()
333 struct ttm_tt *tt = bo->ttm.ttm; in xe_bo_sg()
334 struct xe_ttm_tt *xe_tt = container_of(tt, struct xe_ttm_tt, ttm); in xe_bo_sg()
399 err = ttm_tt_init(&tt->ttm, &bo->ttm, page_flags, caching, extra_pages); in xe_ttm_tt_create()
405 return &tt->ttm; in xe_ttm_tt_create()
487 struct drm_gem_object *obj = &bo->ttm.base; in xe_bo_trigger_rebind()
492 dma_resv_assert_held(bo->ttm.base.resv); in xe_bo_trigger_rebind()
494 if (!list_empty(&bo->ttm.base.gpuva.list)) { in xe_bo_trigger_rebind()
495 dma_resv_iter_begin(&cursor, bo->ttm.base.resv, in xe_bo_trigger_rebind()
515 !dma_resv_test_signaled(bo->ttm.base.resv, in xe_bo_trigger_rebind()
519 timeout = dma_resv_wait_timeout(bo->ttm.base.resv, in xe_bo_trigger_rebind()
557 struct xe_ttm_tt *xe_tt = container_of(ttm_bo->ttm, struct xe_ttm_tt, in xe_bo_move_dmabuf()
558 ttm); in xe_bo_move_dmabuf()
563 xe_assert(xe, ttm_bo->ttm); in xe_bo_move_dmabuf()
607 struct ttm_buffer_object *ttm_bo = &bo->ttm; in xe_bo_move_notify()
654 struct ttm_tt *ttm = ttm_bo->ttm; in xe_bo_move() local
661 ttm && ttm_tt_is_populated(ttm)) ? true : false; in xe_bo_move()
665 if ((!old_mem && ttm) && !handle_system_ccs) { in xe_bo_move()
667 ret = xe_tt_map_sg(ttm); in xe_bo_move()
680 tt_has_data = ttm && (ttm_tt_is_populated(ttm) || in xe_bo_move()
681 (ttm->page_flags & TTM_TT_FLAG_SWAPPED)); in xe_bo_move()
686 needs_clear = (ttm && ttm->page_flags & TTM_TT_FLAG_ZERO_ALLOC) || in xe_bo_move()
687 (!ttm && ttm_bo->type == ttm_bo_type_device); in xe_bo_move()
690 ret = xe_tt_map_sg(ttm); in xe_bo_move()
849 ttm_bo->ttm) in xe_bo_move()
850 xe_tt_unmap_sg(ttm_bo->ttm); in xe_bo_move()
883 if (WARN_ON(!bo->ttm.resource)) in xe_bo_evict_pinned()
892 ret = ttm_bo_mem_space(&bo->ttm, &placement, &new_mem, &ctx); in xe_bo_evict_pinned()
896 if (!bo->ttm.ttm) { in xe_bo_evict_pinned()
897 bo->ttm.ttm = xe_ttm_tt_create(&bo->ttm, 0); in xe_bo_evict_pinned()
898 if (!bo->ttm.ttm) { in xe_bo_evict_pinned()
904 ret = ttm_tt_populate(bo->ttm.bdev, bo->ttm.ttm, &ctx); in xe_bo_evict_pinned()
908 ret = dma_resv_reserve_fences(bo->ttm.base.resv, 1); in xe_bo_evict_pinned()
912 ret = xe_bo_move(&bo->ttm, false, &ctx, new_mem, NULL); in xe_bo_evict_pinned()
919 ttm_resource_free(&bo->ttm, &new_mem); in xe_bo_evict_pinned()
945 if (WARN_ON(!bo->ttm.resource)) in xe_bo_restore_pinned()
954 if (WARN_ON(!bo->ttm.ttm && !xe_bo_is_stolen(bo))) in xe_bo_restore_pinned()
960 ret = ttm_bo_mem_space(&bo->ttm, &bo->placement, &new_mem, &ctx); in xe_bo_restore_pinned()
964 ret = ttm_tt_populate(bo->ttm.bdev, bo->ttm.ttm, &ctx); in xe_bo_restore_pinned()
968 ret = dma_resv_reserve_fences(bo->ttm.base.resv, 1); in xe_bo_restore_pinned()
972 ret = xe_bo_move(&bo->ttm, false, &ctx, new_mem, NULL); in xe_bo_restore_pinned()
979 ttm_resource_free(&bo->ttm, &new_mem); in xe_bo_restore_pinned()
1082 struct xe_ttm_tt *xe_tt = container_of(ttm_bo->ttm, in xe_ttm_bo_delete_mem_notify()
1083 struct xe_ttm_tt, ttm); in xe_ttm_bo_delete_mem_notify()
1111 if (bo->ttm.base.import_attach) in xe_ttm_bo_destroy()
1112 drm_prime_gem_destroy(&bo->ttm.base, NULL); in xe_ttm_bo_destroy()
1113 drm_gem_object_release(&bo->ttm.base); in xe_ttm_bo_destroy()
1164 ttm_bo_set_bulk_move(&bo->ttm, NULL); in xe_gem_object_close()
1319 bo->ttm.base.funcs = &xe_gem_object_funcs; in ___xe_bo_create_locked()
1320 bo->ttm.priority = XE_BO_PRIORITY_NORMAL; in ___xe_bo_create_locked()
1327 drm_gem_private_object_init(&xe->drm, &bo->ttm.base, size); in ___xe_bo_create_locked()
1337 xe_ttm_bo_destroy(&bo->ttm); in ___xe_bo_create_locked()
1346 err = ttm_bo_init_reserved(&xe->ttm, &bo->ttm, type, in ___xe_bo_create_locked()
1370 long timeout = dma_resv_wait_timeout(bo->ttm.base.resv, in ___xe_bo_create_locked()
1377 dma_resv_unlock(bo->ttm.base.resv); in ___xe_bo_create_locked()
1385 ttm_bo_set_bulk_move(&bo->ttm, bulk); in ___xe_bo_create_locked()
1387 ttm_bo_move_to_lru_tail_unlocked(&bo->ttm); in ___xe_bo_create_locked()
1716 ttm_bo_pin(&bo->ttm); in xe_bo_pin_external()
1722 ttm_bo_move_to_lru_tail_unlocked(&bo->ttm); in xe_bo_pin_external()
1744 xe_assert(xe, !bo->ttm.base.import_attach); in xe_bo_pin()
1764 vram_region_gpu_offset(bo->ttm.resource)) >> PAGE_SHIFT; in xe_bo_pin()
1775 ttm_bo_pin(&bo->ttm); in xe_bo_pin()
1781 ttm_bo_move_to_lru_tail_unlocked(&bo->ttm); in xe_bo_pin()
1805 if (bo->ttm.pin_count == 1 && !list_empty(&bo->pinned_link)) in xe_bo_unpin_external()
1809 ttm_bo_unpin(&bo->ttm); in xe_bo_unpin_external()
1815 ttm_bo_move_to_lru_tail_unlocked(&bo->ttm); in xe_bo_unpin_external()
1823 xe_assert(xe, !bo->ttm.base.import_attach); in xe_bo_unpin()
1832 ttm_bo_unpin(&bo->ttm); in xe_bo_unpin()
1866 return ttm_bo_validate(&bo->ttm, &bo->placement, &ctx); in xe_bo_validate()
1894 xe_assert(xe, bo->ttm.ttm); in __xe_bo_addr()
1902 xe_res_first(bo->ttm.resource, page << PAGE_SHIFT, in __xe_bo_addr()
1904 return cur.start + offset + vram_region_gpu_offset(bo->ttm.resource); in __xe_bo_addr()
1910 if (!READ_ONCE(bo->ttm.pin_count)) in xe_bo_addr()
1936 ret = ttm_bo_kmap(&bo->ttm, 0, bo->size >> PAGE_SHIFT, &bo->kmap); in xe_bo_vmap()
2058 err = drm_gem_handle_create(file, &bo->ttm.base, &handle); in xe_gem_create_ioctl()
2120 return dma_resv_lock_interruptible(bo->ttm.base.resv, NULL); in xe_bo_lock()
2122 dma_resv_lock(bo->ttm.base.resv, NULL); in xe_bo_lock()
2135 dma_resv_unlock(bo->ttm.base.resv); in xe_bo_unlock()
2157 if (bo->ttm.type == ttm_bo_type_kernel) in xe_bo_can_migrate()
2160 if (bo->ttm.type == ttm_bo_type_sg) in xe_bo_can_migrate()
2195 struct xe_device *xe = ttm_to_xe_device(bo->ttm.bdev); in xe_bo_migrate()
2205 if (bo->ttm.resource->mem_type == mem_type) in xe_bo_migrate()
2230 return ttm_bo_validate(&bo->ttm, &placement, &ctx); in xe_bo_migrate()
2253 xe_evict_flags(&bo->ttm, &placement); in xe_bo_evict()
2254 ret = ttm_bo_validate(&bo->ttm, &placement, &ctx); in xe_bo_evict()
2258 dma_resv_wait_timeout(bo->ttm.base.resv, DMA_RESV_USAGE_KERNEL, in xe_bo_evict()
2278 if (!xe_device_has_flat_ccs(xe) || bo->ttm.type != ttm_bo_type_device) in xe_bo_needs_ccs_pages()
2323 drm_gem_object_free(&bo->ttm.base.refcount); in xe_bo_put_commit()
2336 drm_gem_object_put(&bo->ttm.base); in xe_bo_put()
2374 err = drm_gem_handle_create(file_priv, &bo->ttm.base, &handle); in xe_bo_dumb_create()
2376 drm_gem_object_put(&bo->ttm.base); in xe_bo_dumb_create()
2384 struct ttm_buffer_object *tbo = &bo->ttm; in xe_bo_runtime_pm_release_mmap_offset()