/linux-6.12.1/drivers/gpu/drm/radeon/ |
D | radeon_vm.c | 297 struct radeon_bo_va *bo_va; in radeon_vm_bo_find() local 299 list_for_each_entry(bo_va, &bo->va, bo_list) { in radeon_vm_bo_find() 300 if (bo_va->vm == vm) in radeon_vm_bo_find() 301 return bo_va; in radeon_vm_bo_find() 324 struct radeon_bo_va *bo_va; in radeon_vm_bo_add() local 326 bo_va = kzalloc(sizeof(struct radeon_bo_va), GFP_KERNEL); in radeon_vm_bo_add() 327 if (bo_va == NULL) in radeon_vm_bo_add() 330 bo_va->vm = vm; in radeon_vm_bo_add() 331 bo_va->bo = bo; in radeon_vm_bo_add() 332 bo_va->it.start = 0; in radeon_vm_bo_add() [all …]
|
D | radeon_gem.c | 202 struct radeon_bo_va *bo_va; in radeon_gem_object_open() local 215 bo_va = radeon_vm_bo_find(vm, rbo); in radeon_gem_object_open() 216 if (!bo_va) { in radeon_gem_object_open() 217 bo_va = radeon_vm_bo_add(rdev, vm, rbo); in radeon_gem_object_open() 219 ++bo_va->ref_count; in radeon_gem_object_open() 233 struct radeon_bo_va *bo_va; in radeon_gem_object_close() local 247 bo_va = radeon_vm_bo_find(vm, rbo); in radeon_gem_object_close() 248 if (bo_va) { in radeon_gem_object_close() 249 if (--bo_va->ref_count == 0) { in radeon_gem_object_close() 250 radeon_vm_bo_rmv(rdev, bo_va); in radeon_gem_object_close() [all …]
|
D | radeon_trace.h | 66 TP_PROTO(struct radeon_bo_va *bo_va), 67 TP_ARGS(bo_va), 75 __entry->soffset = bo_va->it.start; 76 __entry->eoffset = bo_va->it.last + 1; 77 __entry->flags = bo_va->flags;
|
D | radeon_cs.c | 506 struct radeon_bo_va *bo_va; in radeon_bo_vm_update_pte() local 531 bo_va = radeon_vm_bo_find(vm, bo); in radeon_bo_vm_update_pte() 532 if (bo_va == NULL) { in radeon_bo_vm_update_pte() 537 r = radeon_vm_bo_update(rdev, bo_va, bo->tbo.resource); in radeon_bo_vm_update_pte() 541 radeon_sync_fence(&p->ib.sync, bo_va->last_pt_update); in radeon_bo_vm_update_pte()
|
/linux-6.12.1/drivers/gpu/drm/lima/ |
D | lima_vm.c | 79 struct lima_bo_va *bo_va, *ret = NULL; in lima_vm_bo_find() local 81 list_for_each_entry(bo_va, &bo->va, list) { in lima_vm_bo_find() 82 if (bo_va->vm == vm) { in lima_vm_bo_find() 83 ret = bo_va; in lima_vm_bo_find() 93 struct lima_bo_va *bo_va; in lima_vm_bo_add() local 99 bo_va = lima_vm_bo_find(vm, bo); in lima_vm_bo_add() 100 if (bo_va) { in lima_vm_bo_add() 101 bo_va->ref_count++; in lima_vm_bo_add() 112 bo_va = kzalloc(sizeof(*bo_va), GFP_KERNEL); in lima_vm_bo_add() 113 if (!bo_va) { in lima_vm_bo_add() [all …]
|
/linux-6.12.1/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_vm.c | 1085 static void amdgpu_vm_bo_get_memory(struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_get_memory() argument 1088 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_get_memory() 1089 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_get_memory() 1110 struct amdgpu_bo_va *bo_va, *tmp; in amdgpu_vm_get_memory() local 1113 list_for_each_entry_safe(bo_va, tmp, &vm->idle, base.vm_status) in amdgpu_vm_get_memory() 1114 amdgpu_vm_bo_get_memory(bo_va, stats); in amdgpu_vm_get_memory() 1116 list_for_each_entry_safe(bo_va, tmp, &vm->evicted, base.vm_status) in amdgpu_vm_get_memory() 1117 amdgpu_vm_bo_get_memory(bo_va, stats); in amdgpu_vm_get_memory() 1119 list_for_each_entry_safe(bo_va, tmp, &vm->relocated, base.vm_status) in amdgpu_vm_get_memory() 1120 amdgpu_vm_bo_get_memory(bo_va, stats); in amdgpu_vm_get_memory() [all …]
|
D | amdgpu_csa.c | 66 struct amdgpu_bo *bo, struct amdgpu_bo_va **bo_va, in amdgpu_map_static_csa() argument 84 *bo_va = amdgpu_vm_bo_add(adev, vm, bo); in amdgpu_map_static_csa() 85 if (!*bo_va) { in amdgpu_map_static_csa() 90 r = amdgpu_vm_bo_map(adev, *bo_va, csa_addr, 0, size, in amdgpu_map_static_csa() 96 amdgpu_vm_bo_del(adev, *bo_va); in amdgpu_map_static_csa() 106 struct amdgpu_bo *bo, struct amdgpu_bo_va *bo_va, in amdgpu_unmap_static_csa() argument 124 r = amdgpu_vm_bo_unmap(adev, bo_va, csa_addr); in amdgpu_unmap_static_csa() 130 amdgpu_vm_bo_del(adev, bo_va); in amdgpu_unmap_static_csa()
|
D | amdgpu_seq64.c | 64 struct amdgpu_bo_va **bo_va) in amdgpu_seq64_map() argument 85 *bo_va = amdgpu_vm_bo_add(adev, vm, bo); in amdgpu_seq64_map() 86 if (!*bo_va) { in amdgpu_seq64_map() 92 r = amdgpu_vm_bo_map(adev, *bo_va, seq64_addr, 0, AMDGPU_VA_RESERVED_SEQ64_SIZE, in amdgpu_seq64_map() 96 amdgpu_vm_bo_del(adev, *bo_va); in amdgpu_seq64_map() 100 r = amdgpu_vm_bo_update(adev, *bo_va, false); in amdgpu_seq64_map() 103 amdgpu_vm_bo_del(adev, *bo_va); in amdgpu_seq64_map()
|
D | amdgpu_gem.c | 166 struct amdgpu_bo_va *bo_va; in amdgpu_gem_object_open() local 182 bo_va = amdgpu_vm_bo_find(vm, abo); in amdgpu_gem_object_open() 183 if (!bo_va) in amdgpu_gem_object_open() 184 bo_va = amdgpu_vm_bo_add(adev, vm, abo); in amdgpu_gem_object_open() 186 ++bo_va->ref_count; in amdgpu_gem_object_open() 233 struct amdgpu_bo_va *bo_va; in amdgpu_gem_object_close() local 250 bo_va = amdgpu_vm_bo_find(vm, bo); in amdgpu_gem_object_close() 251 if (!bo_va || --bo_va->ref_count) in amdgpu_gem_object_close() 254 amdgpu_vm_bo_del(adev, bo_va); in amdgpu_gem_object_close() 643 struct amdgpu_bo_va *bo_va, in amdgpu_gem_va_update_vm() argument [all …]
|
D | amdgpu_amdkfd_gpuvm.c | 83 if (entry->bo_va->base.vm == avm) in kfd_mem_is_attached() 558 struct amdgpu_bo *bo = attachment->bo_va->base.bo; in kfd_mem_dmamap_userptr() 605 struct amdgpu_bo *bo = attachment->bo_va->base.bo; in kfd_mem_dmamap_dmabuf() 648 struct amdgpu_bo *bo = attachment->bo_va->base.bo; in kfd_mem_dmamap_sg_bo() 725 struct amdgpu_bo *bo = attachment->bo_va->base.bo; in kfd_mem_dmaunmap_userptr() 771 struct amdgpu_bo *bo = attachment->bo_va->base.bo; in kfd_mem_dmaunmap_sg_bo() 876 struct amdgpu_bo_va *bo_va; in kfd_mem_attach() local 964 bo_va = amdgpu_vm_bo_find(vm, bo[i]); in kfd_mem_attach() 965 if (!bo_va) in kfd_mem_attach() 966 bo_va = amdgpu_vm_bo_add(adev, vm, bo[i]); in kfd_mem_attach() [all …]
|
D | amdgpu_csa.h | 35 struct amdgpu_bo *bo, struct amdgpu_bo_va **bo_va, 38 struct amdgpu_bo *bo, struct amdgpu_bo_va *bo_va,
|
D | amdgpu_umsch_mm.c | 76 struct amdgpu_bo_va *bo_va; member 83 struct amdgpu_bo *bo, struct amdgpu_bo_va **bo_va, in map_ring_data() argument 105 *bo_va = amdgpu_vm_bo_add(adev, vm, bo); in map_ring_data() 106 if (!*bo_va) { in map_ring_data() 111 r = amdgpu_vm_bo_map(adev, *bo_va, addr, 0, size, in map_ring_data() 119 r = amdgpu_vm_bo_update(adev, *bo_va, false); in map_ring_data() 123 amdgpu_sync_fence(&sync, (*bo_va)->last_pt_update); in map_ring_data() 139 amdgpu_vm_bo_del(adev, *bo_va); in map_ring_data() 149 struct amdgpu_bo *bo, struct amdgpu_bo_va *bo_va, in unmap_ring_data() argument 169 r = amdgpu_vm_bo_unmap(adev, bo_va, addr); in unmap_ring_data() [all …]
|
D | amdgpu_trace.h | 246 TP_PROTO(struct amdgpu_bo_va *bo_va, 248 TP_ARGS(bo_va, mapping), 258 __entry->bo = bo_va ? bo_va->base.bo : NULL; 270 TP_PROTO(struct amdgpu_bo_va *bo_va, 272 TP_ARGS(bo_va, mapping), 282 __entry->bo = bo_va ? bo_va->base.bo : NULL;
|
D | amdgpu_vm.h | 515 struct amdgpu_bo_va *bo_va, 527 struct amdgpu_bo_va *bo_va, 531 struct amdgpu_bo_va *bo_va, 535 struct amdgpu_bo_va *bo_va, 544 struct amdgpu_bo_va *bo_va);
|
D | amdgpu_cs.c | 917 e->bo_va = amdgpu_vm_bo_find(vm, e->bo); in amdgpu_cs_parser_bos() 1100 struct amdgpu_bo_va *bo_va; in amdgpu_cs_vm_handling() local 1132 bo_va = fpriv->csa_va; in amdgpu_cs_vm_handling() 1133 BUG_ON(!bo_va); in amdgpu_cs_vm_handling() 1134 r = amdgpu_vm_bo_update(adev, bo_va, false); in amdgpu_cs_vm_handling() 1138 r = amdgpu_sync_fence(&p->sync, bo_va->last_pt_update); in amdgpu_cs_vm_handling() 1149 bo_va = e->bo_va; in amdgpu_cs_vm_handling() 1150 if (bo_va == NULL) in amdgpu_cs_vm_handling() 1153 r = amdgpu_vm_bo_update(adev, bo_va, false); in amdgpu_cs_vm_handling() 1157 r = amdgpu_sync_fence(&p->sync, bo_va->last_pt_update); in amdgpu_cs_vm_handling() [all …]
|
D | amdgpu_seq64.h | 44 struct amdgpu_bo_va **bo_va);
|
D | amdgpu_mes.c | 1300 struct amdgpu_bo_va *bo_va; in amdgpu_mes_ctx_map_meta_data() local 1321 bo_va = amdgpu_vm_bo_add(adev, vm, ctx_data->meta_data_obj); in amdgpu_mes_ctx_map_meta_data() 1322 if (!bo_va) { in amdgpu_mes_ctx_map_meta_data() 1328 r = amdgpu_vm_bo_map(adev, bo_va, ctx_data->meta_data_gpu_addr, 0, in amdgpu_mes_ctx_map_meta_data() 1338 r = amdgpu_vm_bo_update(adev, bo_va, false); in amdgpu_mes_ctx_map_meta_data() 1343 amdgpu_sync_fence(&sync, bo_va->last_pt_update); in amdgpu_mes_ctx_map_meta_data() 1356 ctx_data->meta_data_va = bo_va; in amdgpu_mes_ctx_map_meta_data() 1360 amdgpu_vm_bo_del(adev, bo_va); in amdgpu_mes_ctx_map_meta_data() 1371 struct amdgpu_bo_va *bo_va = ctx_data->meta_data_va; in amdgpu_mes_ctx_unmap_meta_data() local 1373 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_mes_ctx_unmap_meta_data() [all …]
|
D | amdgpu_bo_list.h | 39 struct amdgpu_bo_va *bo_va; member
|
D | amdgpu_object.h | 65 struct amdgpu_bo_va *bo_va; member
|
D | amdgpu_amdkfd.h | 63 struct amdgpu_bo_va *bo_va; member
|
D | gmc_v9_0.c | 1133 struct amdgpu_vm *vm = mapping->bo_va->base.vm; in gmc_v9_0_get_coherence_flags() 1165 if (mapping->bo_va->is_xgmi) in gmc_v9_0_get_coherence_flags() 1245 struct amdgpu_bo *bo = mapping->bo_va->base.bo; in gmc_v9_0_get_vm_pte()
|
D | vcn_v1_0.c | 2033 if (!mapping || !mapping->bo_va || !mapping->bo_va->base.bo) in vcn_v1_0_validate_bo() 2036 bo = mapping->bo_va->base.bo; in vcn_v1_0_validate_bo()
|
D | gmc_v11_0.c | 482 struct amdgpu_bo *bo = mapping->bo_va->base.bo; in gmc_v11_0_get_vm_pte()
|
D | gmc_v12_0.c | 500 struct amdgpu_bo *bo = mapping->bo_va->base.bo; in gmc_v12_0_get_vm_pte()
|
/linux-6.12.1/drivers/gpu/drm/amd/amdkfd/ |
D | kfd_queue.c | 218 *pbo = amdgpu_bo_ref(mapping->bo_va->base.bo); in kfd_queue_buffer_get() 219 mapping->bo_va->queue_refcount++; in kfd_queue_buffer_get() 356 struct amdgpu_bo_va *bo_va; in kfd_queue_unref_bo_va() local 358 bo_va = amdgpu_vm_bo_find(vm, *bo); in kfd_queue_unref_bo_va() 359 if (bo_va && bo_va->queue_refcount) in kfd_queue_unref_bo_va() 360 bo_va->queue_refcount--; in kfd_queue_unref_bo_va()
|