Lines Matching refs:bo_va

297 	struct radeon_bo_va *bo_va;  in radeon_vm_bo_find()  local
299 list_for_each_entry(bo_va, &bo->va, bo_list) { in radeon_vm_bo_find()
300 if (bo_va->vm == vm) in radeon_vm_bo_find()
301 return bo_va; in radeon_vm_bo_find()
324 struct radeon_bo_va *bo_va; in radeon_vm_bo_add() local
326 bo_va = kzalloc(sizeof(struct radeon_bo_va), GFP_KERNEL); in radeon_vm_bo_add()
327 if (bo_va == NULL) in radeon_vm_bo_add()
330 bo_va->vm = vm; in radeon_vm_bo_add()
331 bo_va->bo = bo; in radeon_vm_bo_add()
332 bo_va->it.start = 0; in radeon_vm_bo_add()
333 bo_va->it.last = 0; in radeon_vm_bo_add()
334 bo_va->flags = 0; in radeon_vm_bo_add()
335 bo_va->ref_count = 1; in radeon_vm_bo_add()
336 INIT_LIST_HEAD(&bo_va->bo_list); in radeon_vm_bo_add()
337 INIT_LIST_HEAD(&bo_va->vm_status); in radeon_vm_bo_add()
340 list_add_tail(&bo_va->bo_list, &bo->va); in radeon_vm_bo_add()
343 return bo_va; in radeon_vm_bo_add()
448 struct radeon_bo_va *bo_va, in radeon_vm_bo_set_addr() argument
452 uint64_t size = radeon_bo_size(bo_va->bo); in radeon_vm_bo_set_addr()
453 struct radeon_vm *vm = bo_va->vm; in radeon_vm_bo_set_addr()
484 if (it && it != &bo_va->it) { in radeon_vm_bo_set_addr()
489 "(bo %p 0x%010lx 0x%010lx)\n", bo_va->bo, in radeon_vm_bo_set_addr()
497 if (bo_va->it.start || bo_va->it.last) { in radeon_vm_bo_set_addr()
506 tmp->it.start = bo_va->it.start; in radeon_vm_bo_set_addr()
507 tmp->it.last = bo_va->it.last; in radeon_vm_bo_set_addr()
509 tmp->bo = radeon_bo_ref(bo_va->bo); in radeon_vm_bo_set_addr()
511 interval_tree_remove(&bo_va->it, &vm->va); in radeon_vm_bo_set_addr()
513 bo_va->it.start = 0; in radeon_vm_bo_set_addr()
514 bo_va->it.last = 0; in radeon_vm_bo_set_addr()
515 list_del_init(&bo_va->vm_status); in radeon_vm_bo_set_addr()
522 bo_va->it.start = soffset; in radeon_vm_bo_set_addr()
523 bo_va->it.last = eoffset; in radeon_vm_bo_set_addr()
524 list_add(&bo_va->vm_status, &vm->cleared); in radeon_vm_bo_set_addr()
526 interval_tree_insert(&bo_va->it, &vm->va); in radeon_vm_bo_set_addr()
529 bo_va->flags = flags; in radeon_vm_bo_set_addr()
539 radeon_bo_unreserve(bo_va->bo); in radeon_vm_bo_set_addr()
582 radeon_bo_unreserve(bo_va->bo); in radeon_vm_bo_set_addr()
912 struct radeon_bo_va *bo_va, in radeon_vm_bo_update() argument
915 struct radeon_vm *vm = bo_va->vm; in radeon_vm_bo_update()
922 if (!bo_va->it.start) { in radeon_vm_bo_update()
924 bo_va->bo, vm); in radeon_vm_bo_update()
930 if (list_empty(&bo_va->vm_status)) { in radeon_vm_bo_update()
934 list_del_init(&bo_va->vm_status); in radeon_vm_bo_update()
936 list_del(&bo_va->vm_status); in radeon_vm_bo_update()
937 list_add(&bo_va->vm_status, &vm->cleared); in radeon_vm_bo_update()
941 bo_va->flags &= ~RADEON_VM_PAGE_VALID; in radeon_vm_bo_update()
942 bo_va->flags &= ~RADEON_VM_PAGE_SYSTEM; in radeon_vm_bo_update()
943 bo_va->flags &= ~RADEON_VM_PAGE_SNOOPED; in radeon_vm_bo_update()
944 if (bo_va->bo && radeon_ttm_tt_is_readonly(rdev, bo_va->bo->tbo.ttm)) in radeon_vm_bo_update()
945 bo_va->flags &= ~RADEON_VM_PAGE_WRITEABLE; in radeon_vm_bo_update()
950 bo_va->flags |= RADEON_VM_PAGE_VALID; in radeon_vm_bo_update()
953 bo_va->flags |= RADEON_VM_PAGE_SYSTEM; in radeon_vm_bo_update()
954 if (!(bo_va->bo->flags & (RADEON_GEM_GTT_WC | RADEON_GEM_GTT_UC))) in radeon_vm_bo_update()
955 bo_va->flags |= RADEON_VM_PAGE_SNOOPED; in radeon_vm_bo_update()
964 trace_radeon_vm_bo_update(bo_va); in radeon_vm_bo_update()
966 nptes = bo_va->it.last - bo_va->it.start + 1; in radeon_vm_bo_update()
975 flags = radeon_vm_page_flags(bo_va->flags); in radeon_vm_bo_update()
1004 if (!(bo_va->flags & RADEON_VM_PAGE_VALID)) { in radeon_vm_bo_update()
1011 r = radeon_vm_update_ptes(rdev, vm, &ib, bo_va->it.start, in radeon_vm_bo_update()
1012 bo_va->it.last + 1, addr, in radeon_vm_bo_update()
1013 radeon_vm_page_flags(bo_va->flags)); in radeon_vm_bo_update()
1028 radeon_vm_fence_pts(vm, bo_va->it.start, bo_va->it.last + 1, ib.fence); in radeon_vm_bo_update()
1029 radeon_fence_unref(&bo_va->last_pt_update); in radeon_vm_bo_update()
1030 bo_va->last_pt_update = radeon_fence_ref(ib.fence); in radeon_vm_bo_update()
1050 struct radeon_bo_va *bo_va; in radeon_vm_clear_freed() local
1055 bo_va = list_first_entry(&vm->freed, in radeon_vm_clear_freed()
1059 r = radeon_vm_bo_update(rdev, bo_va, NULL); in radeon_vm_clear_freed()
1060 radeon_bo_unref(&bo_va->bo); in radeon_vm_clear_freed()
1061 radeon_fence_unref(&bo_va->last_pt_update); in radeon_vm_clear_freed()
1063 list_del(&bo_va->vm_status); in radeon_vm_clear_freed()
1064 kfree(bo_va); in radeon_vm_clear_freed()
1088 struct radeon_bo_va *bo_va; in radeon_vm_clear_invalids() local
1093 bo_va = list_first_entry(&vm->invalidated, in radeon_vm_clear_invalids()
1097 r = radeon_vm_bo_update(rdev, bo_va, NULL); in radeon_vm_clear_invalids()
1119 struct radeon_bo_va *bo_va) in radeon_vm_bo_rmv() argument
1121 struct radeon_vm *vm = bo_va->vm; in radeon_vm_bo_rmv()
1123 list_del(&bo_va->bo_list); in radeon_vm_bo_rmv()
1126 if (bo_va->it.start || bo_va->it.last) in radeon_vm_bo_rmv()
1127 interval_tree_remove(&bo_va->it, &vm->va); in radeon_vm_bo_rmv()
1130 list_del(&bo_va->vm_status); in radeon_vm_bo_rmv()
1131 if (bo_va->it.start || bo_va->it.last) { in radeon_vm_bo_rmv()
1132 bo_va->bo = radeon_bo_ref(bo_va->bo); in radeon_vm_bo_rmv()
1133 list_add(&bo_va->vm_status, &vm->freed); in radeon_vm_bo_rmv()
1135 radeon_fence_unref(&bo_va->last_pt_update); in radeon_vm_bo_rmv()
1136 kfree(bo_va); in radeon_vm_bo_rmv()
1154 struct radeon_bo_va *bo_va; in radeon_vm_bo_invalidate() local
1156 list_for_each_entry(bo_va, &bo->va, bo_list) { in radeon_vm_bo_invalidate()
1157 spin_lock(&bo_va->vm->status_lock); in radeon_vm_bo_invalidate()
1158 if (list_empty(&bo_va->vm_status) && in radeon_vm_bo_invalidate()
1159 (bo_va->it.start || bo_va->it.last)) in radeon_vm_bo_invalidate()
1160 list_add(&bo_va->vm_status, &bo_va->vm->invalidated); in radeon_vm_bo_invalidate()
1161 spin_unlock(&bo_va->vm->status_lock); in radeon_vm_bo_invalidate()
1235 struct radeon_bo_va *bo_va, *tmp; in radeon_vm_fini() local
1241 rbtree_postorder_for_each_entry_safe(bo_va, tmp, in radeon_vm_fini()
1243 interval_tree_remove(&bo_va->it, &vm->va); in radeon_vm_fini()
1244 r = radeon_bo_reserve(bo_va->bo, false); in radeon_vm_fini()
1246 list_del_init(&bo_va->bo_list); in radeon_vm_fini()
1247 radeon_bo_unreserve(bo_va->bo); in radeon_vm_fini()
1248 radeon_fence_unref(&bo_va->last_pt_update); in radeon_vm_fini()
1249 kfree(bo_va); in radeon_vm_fini()
1252 list_for_each_entry_safe(bo_va, tmp, &vm->freed, vm_status) { in radeon_vm_fini()
1253 radeon_bo_unref(&bo_va->bo); in radeon_vm_fini()
1254 radeon_fence_unref(&bo_va->last_pt_update); in radeon_vm_fini()
1255 kfree(bo_va); in radeon_vm_fini()