/linux-6.12.1/drivers/gpu/drm/i915/selftests/ |
D | i915_gem_gtt.c | 478 if (!drm_mm_node_allocated(&vma->node) || in fill_hole() 481 __func__, p->name, vma->node.start, vma->node.size, drm_mm_node_allocated(&vma->node), in fill_hole() 511 if (!drm_mm_node_allocated(&vma->node) || in fill_hole() 557 if (!drm_mm_node_allocated(&vma->node) || in fill_hole() 560 __func__, p->name, vma->node.start, vma->node.size, drm_mm_node_allocated(&vma->node), in fill_hole() 590 if (!drm_mm_node_allocated(&vma->node) || in fill_hole() 593 __func__, p->name, vma->node.start, vma->node.size, drm_mm_node_allocated(&vma->node), in fill_hole() 680 if (!drm_mm_node_allocated(&vma->node) || in walk_hole() 695 GEM_BUG_ON(drm_mm_node_allocated(&vma->node)); in walk_hole() 763 if (!drm_mm_node_allocated(&vma->node) || in pot_hole() [all …]
|
D | i915_gem_evict.c | 529 if (drm_mm_node_allocated(&hole)) in igt_evict_contexts()
|
/linux-6.12.1/drivers/gpu/drm/i915/ |
D | i915_vma.h | 146 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in i915_vma_size() 169 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in i915_vma_offset() 176 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in i915_ggtt_offset() 340 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in i915_vma_unpin() 353 return drm_mm_node_allocated(node) && node->color != color; in i915_node_color_differs()
|
D | i915_gem_gtt.c | 109 GEM_BUG_ON(drm_mm_node_allocated(node)); in i915_gem_gtt_reserve() 209 GEM_BUG_ON(drm_mm_node_allocated(node)); in i915_gem_gtt_insert()
|
D | i915_vma.c | 483 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in i915_vma_bind() 690 if (!drm_mm_node_allocated(&vma->node)) in i915_vma_misplaced() 756 GEM_BUG_ON(!drm_mm_node_allocated(node)); in i915_gem_valid_gtt_space() 796 GEM_BUG_ON(drm_mm_node_allocated(&vma->node)); in i915_vma_insert() 917 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in i915_vma_insert() 929 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in i915_vma_detach() 1750 if (!drm_mm_node_allocated(&vma->node)) in force_unbind() 1755 GEM_BUG_ON(drm_mm_node_allocated(&vma->node)); in force_unbind() 2098 if (!drm_mm_node_allocated(&vma->node)) in __i915_vma_unbind() 2128 if (!drm_mm_node_allocated(&vma->node)) in __i915_vma_unbind_async() [all …]
|
D | i915_gem.c | 337 GEM_BUG_ON(!drm_mm_node_allocated(node)); in i915_gem_gtt_prepare() 343 if (drm_mm_node_allocated(node)) { in i915_gem_gtt_prepare() 370 if (drm_mm_node_allocated(node)) { in i915_gem_gtt_cleanup() 418 if (drm_mm_node_allocated(&node)) { in i915_gem_gtt_pread() 597 if (drm_mm_node_allocated(&node)) { in i915_gem_gtt_pwrite_fast()
|
D | i915_vgpu.c | 152 if (!drm_mm_node_allocated(node)) in vgt_deballoon_space()
|
D | i915_gem_evict.c | 379 GEM_BUG_ON(!drm_mm_node_allocated(node)); in i915_gem_evict_for_node()
|
D | i915_debugfs.c | 203 if (!drm_mm_node_allocated(&vma->node)) in i915_debugfs_describe_obj()
|
/linux-6.12.1/drivers/gpu/drm/ |
D | drm_vma_manager.c | 208 if (!drm_mm_node_allocated(&node->vm_node)) in drm_vma_offset_add() 234 if (drm_mm_node_allocated(&node->vm_node)) { in drm_vma_offset_remove()
|
D | drm_mm.c | 174 if (drm_mm_node_allocated(hole_node)) { in drm_mm_interval_tree_add_node() 632 DRM_MM_BUG_ON(!drm_mm_node_allocated(node)); in drm_mm_remove_node() 755 DRM_MM_BUG_ON(!drm_mm_node_allocated(node)); in drm_mm_scan_add_block()
|
/linux-6.12.1/drivers/gpu/drm/i915/display/ |
D | intel_plane_initial.c | 238 if (drm_mm_node_allocated(&orig_mm)) { in initial_plane_vma() 254 if (drm_mm_node_allocated(&orig_mm)) in initial_plane_vma() 264 if (drm_mm_node_allocated(&orig_mm)) in initial_plane_vma()
|
/linux-6.12.1/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_gtt_mgr.c | 100 return drm_mm_node_allocated(&node->mm_nodes[0]); in amdgpu_gtt_mgr_has_gart_addr() 175 if (drm_mm_node_allocated(&node->mm_nodes[0])) in amdgpu_gtt_mgr_del()
|
/linux-6.12.1/drivers/gpu/drm/xe/ |
D | xe_ggtt.c | 445 xe_tile_assert(ggtt->tile, !drm_mm_node_allocated(&node->base)); in xe_ggtt_node_insert_balloon() 475 if (!drm_mm_node_allocated(&node->base)) in xe_ggtt_node_remove_balloon() 582 return drm_mm_node_allocated(&node->base); in xe_ggtt_node_allocated() 750 if (!drm_mm_node_allocated(node)) in xe_ggtt_assign_locked()
|
/linux-6.12.1/include/drm/ |
D | drm_vma_manager.h | 223 if (drm_mm_node_allocated(&node->vm_node)) in drm_vma_node_unmap()
|
D | drm_mm.h | 258 static inline bool drm_mm_node_allocated(const struct drm_mm_node *node) in drm_mm_node_allocated() function
|
/linux-6.12.1/drivers/gpu/drm/i915/gt/ |
D | intel_ggtt.c | 136 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in i915_ggtt_suspend_vm() 803 if (drm_mm_node_allocated(&ggtt->uc_fw)) in ggtt_release_guc_top() 810 if (drm_mm_node_allocated(&ggtt->error_capture)) in cleanup_init_ggtt() 878 if (drm_mm_node_allocated(&ggtt->error_capture)) { in init_ggtt() 1055 if (drm_mm_node_allocated(&ggtt->error_capture)) in ggtt_cleanup_hw() 1603 if (drm_mm_node_allocated(&ggtt->error_capture)) in i915_ggtt_resume()
|
D | selftest_reset.c | 34 if (!drm_mm_node_allocated(&ggtt->error_capture)) in __igt_reset_stolen()
|
/linux-6.12.1/drivers/gpu/drm/virtio/ |
D | virtgpu_object.c | 75 if (drm_mm_node_allocated(&vram->vram_node)) in virtio_gpu_cleanup_object()
|
D | virtgpu_vram.c | 15 unmap = drm_mm_node_allocated(&vram->vram_node); in virtio_gpu_vram_free()
|
/linux-6.12.1/drivers/gpu/drm/tests/ |
D | drm_mm_test.c | 108 if (!drm_mm_node_allocated(node) || node->mm != mm) { in assert_node()
|
/linux-6.12.1/drivers/gpu/drm/panfrost/ |
D | panfrost_gem.c | 81 if (drm_mm_node_allocated(&mapping->mmnode)) in panfrost_gem_teardown_mapping()
|
/linux-6.12.1/drivers/gpu/drm/imagination/ |
D | pvr_fw.c | 1167 if (drm_mm_node_allocated(&fw_obj->fw_mm_node)) { in pvr_fw_object_fw_map() 1233 if (!drm_mm_node_allocated(&fw_obj->fw_mm_node)) { in pvr_fw_object_fw_unmap() 1419 if (drm_mm_node_allocated(&fw_obj->fw_mm_node)) { in pvr_fw_object_destroy()
|
/linux-6.12.1/drivers/gpu/drm/i915/gem/ |
D | i915_gem_execbuffer.c | 663 if (drm_mm_node_allocated(&vma->node) && in eb_reserve_vma() 1041 if (drm_mm_node_allocated(&vma->node)) { in eb_validate_vmas() 1053 GEM_BUG_ON(drm_mm_node_allocated(&vma->node) && in eb_validate_vmas() 1184 if (!drm_mm_node_allocated(&cache->node)) in reloc_cache_remap() 1214 if (drm_mm_node_allocated(&cache->node)) { in reloc_cache_reset() 1330 if (drm_mm_node_allocated(&cache->node)) { in reloc_iomap()
|
D | i915_gem_stolen.c | 577 if (!drm_mm_node_allocated(&ggtt->error_capture)) in dbg_poison() 1027 return drm_mm_node_allocated(node); in i915_gem_stolen_node_allocated()
|