Home
last modified time | relevance | path

Searched refs:drm_mm_node_allocated (Results 1 – 25 of 33) sorted by relevance

12

/linux-6.12.1/drivers/gpu/drm/i915/selftests/
Di915_gem_gtt.c478 if (!drm_mm_node_allocated(&vma->node) || in fill_hole()
481 __func__, p->name, vma->node.start, vma->node.size, drm_mm_node_allocated(&vma->node), in fill_hole()
511 if (!drm_mm_node_allocated(&vma->node) || in fill_hole()
557 if (!drm_mm_node_allocated(&vma->node) || in fill_hole()
560 __func__, p->name, vma->node.start, vma->node.size, drm_mm_node_allocated(&vma->node), in fill_hole()
590 if (!drm_mm_node_allocated(&vma->node) || in fill_hole()
593 __func__, p->name, vma->node.start, vma->node.size, drm_mm_node_allocated(&vma->node), in fill_hole()
680 if (!drm_mm_node_allocated(&vma->node) || in walk_hole()
695 GEM_BUG_ON(drm_mm_node_allocated(&vma->node)); in walk_hole()
763 if (!drm_mm_node_allocated(&vma->node) || in pot_hole()
[all …]
Di915_gem_evict.c529 if (drm_mm_node_allocated(&hole)) in igt_evict_contexts()
/linux-6.12.1/drivers/gpu/drm/i915/
Di915_vma.h146 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in i915_vma_size()
169 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in i915_vma_offset()
176 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in i915_ggtt_offset()
340 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in i915_vma_unpin()
353 return drm_mm_node_allocated(node) && node->color != color; in i915_node_color_differs()
Di915_gem_gtt.c109 GEM_BUG_ON(drm_mm_node_allocated(node)); in i915_gem_gtt_reserve()
209 GEM_BUG_ON(drm_mm_node_allocated(node)); in i915_gem_gtt_insert()
Di915_vma.c483 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in i915_vma_bind()
690 if (!drm_mm_node_allocated(&vma->node)) in i915_vma_misplaced()
756 GEM_BUG_ON(!drm_mm_node_allocated(node)); in i915_gem_valid_gtt_space()
796 GEM_BUG_ON(drm_mm_node_allocated(&vma->node)); in i915_vma_insert()
917 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in i915_vma_insert()
929 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in i915_vma_detach()
1750 if (!drm_mm_node_allocated(&vma->node)) in force_unbind()
1755 GEM_BUG_ON(drm_mm_node_allocated(&vma->node)); in force_unbind()
2098 if (!drm_mm_node_allocated(&vma->node)) in __i915_vma_unbind()
2128 if (!drm_mm_node_allocated(&vma->node)) in __i915_vma_unbind_async()
[all …]
Di915_gem.c337 GEM_BUG_ON(!drm_mm_node_allocated(node)); in i915_gem_gtt_prepare()
343 if (drm_mm_node_allocated(node)) { in i915_gem_gtt_prepare()
370 if (drm_mm_node_allocated(node)) { in i915_gem_gtt_cleanup()
418 if (drm_mm_node_allocated(&node)) { in i915_gem_gtt_pread()
597 if (drm_mm_node_allocated(&node)) { in i915_gem_gtt_pwrite_fast()
Di915_vgpu.c152 if (!drm_mm_node_allocated(node)) in vgt_deballoon_space()
Di915_gem_evict.c379 GEM_BUG_ON(!drm_mm_node_allocated(node)); in i915_gem_evict_for_node()
Di915_debugfs.c203 if (!drm_mm_node_allocated(&vma->node)) in i915_debugfs_describe_obj()
/linux-6.12.1/drivers/gpu/drm/
Ddrm_vma_manager.c208 if (!drm_mm_node_allocated(&node->vm_node)) in drm_vma_offset_add()
234 if (drm_mm_node_allocated(&node->vm_node)) { in drm_vma_offset_remove()
Ddrm_mm.c174 if (drm_mm_node_allocated(hole_node)) { in drm_mm_interval_tree_add_node()
632 DRM_MM_BUG_ON(!drm_mm_node_allocated(node)); in drm_mm_remove_node()
755 DRM_MM_BUG_ON(!drm_mm_node_allocated(node)); in drm_mm_scan_add_block()
/linux-6.12.1/drivers/gpu/drm/i915/display/
Dintel_plane_initial.c238 if (drm_mm_node_allocated(&orig_mm)) { in initial_plane_vma()
254 if (drm_mm_node_allocated(&orig_mm)) in initial_plane_vma()
264 if (drm_mm_node_allocated(&orig_mm)) in initial_plane_vma()
/linux-6.12.1/drivers/gpu/drm/amd/amdgpu/
Damdgpu_gtt_mgr.c100 return drm_mm_node_allocated(&node->mm_nodes[0]); in amdgpu_gtt_mgr_has_gart_addr()
175 if (drm_mm_node_allocated(&node->mm_nodes[0])) in amdgpu_gtt_mgr_del()
/linux-6.12.1/drivers/gpu/drm/xe/
Dxe_ggtt.c445 xe_tile_assert(ggtt->tile, !drm_mm_node_allocated(&node->base)); in xe_ggtt_node_insert_balloon()
475 if (!drm_mm_node_allocated(&node->base)) in xe_ggtt_node_remove_balloon()
582 return drm_mm_node_allocated(&node->base); in xe_ggtt_node_allocated()
750 if (!drm_mm_node_allocated(node)) in xe_ggtt_assign_locked()
/linux-6.12.1/include/drm/
Ddrm_vma_manager.h223 if (drm_mm_node_allocated(&node->vm_node)) in drm_vma_node_unmap()
Ddrm_mm.h258 static inline bool drm_mm_node_allocated(const struct drm_mm_node *node) in drm_mm_node_allocated() function
/linux-6.12.1/drivers/gpu/drm/i915/gt/
Dintel_ggtt.c136 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in i915_ggtt_suspend_vm()
803 if (drm_mm_node_allocated(&ggtt->uc_fw)) in ggtt_release_guc_top()
810 if (drm_mm_node_allocated(&ggtt->error_capture)) in cleanup_init_ggtt()
878 if (drm_mm_node_allocated(&ggtt->error_capture)) { in init_ggtt()
1055 if (drm_mm_node_allocated(&ggtt->error_capture)) in ggtt_cleanup_hw()
1603 if (drm_mm_node_allocated(&ggtt->error_capture)) in i915_ggtt_resume()
Dselftest_reset.c34 if (!drm_mm_node_allocated(&ggtt->error_capture)) in __igt_reset_stolen()
/linux-6.12.1/drivers/gpu/drm/virtio/
Dvirtgpu_object.c75 if (drm_mm_node_allocated(&vram->vram_node)) in virtio_gpu_cleanup_object()
Dvirtgpu_vram.c15 unmap = drm_mm_node_allocated(&vram->vram_node); in virtio_gpu_vram_free()
/linux-6.12.1/drivers/gpu/drm/tests/
Ddrm_mm_test.c108 if (!drm_mm_node_allocated(node) || node->mm != mm) { in assert_node()
/linux-6.12.1/drivers/gpu/drm/panfrost/
Dpanfrost_gem.c81 if (drm_mm_node_allocated(&mapping->mmnode)) in panfrost_gem_teardown_mapping()
/linux-6.12.1/drivers/gpu/drm/imagination/
Dpvr_fw.c1167 if (drm_mm_node_allocated(&fw_obj->fw_mm_node)) { in pvr_fw_object_fw_map()
1233 if (!drm_mm_node_allocated(&fw_obj->fw_mm_node)) { in pvr_fw_object_fw_unmap()
1419 if (drm_mm_node_allocated(&fw_obj->fw_mm_node)) { in pvr_fw_object_destroy()
/linux-6.12.1/drivers/gpu/drm/i915/gem/
Di915_gem_execbuffer.c663 if (drm_mm_node_allocated(&vma->node) && in eb_reserve_vma()
1041 if (drm_mm_node_allocated(&vma->node)) { in eb_validate_vmas()
1053 GEM_BUG_ON(drm_mm_node_allocated(&vma->node) && in eb_validate_vmas()
1184 if (!drm_mm_node_allocated(&cache->node)) in reloc_cache_remap()
1214 if (drm_mm_node_allocated(&cache->node)) { in reloc_cache_reset()
1330 if (drm_mm_node_allocated(&cache->node)) { in reloc_iomap()
Di915_gem_stolen.c577 if (!drm_mm_node_allocated(&ggtt->error_capture)) in dbg_poison()
1027 return drm_mm_node_allocated(node); in i915_gem_stolen_node_allocated()

12