Lines Matching refs:bo_va

1085 static void amdgpu_vm_bo_get_memory(struct amdgpu_bo_va *bo_va,  in amdgpu_vm_bo_get_memory()  argument
1088 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_get_memory()
1089 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_get_memory()
1110 struct amdgpu_bo_va *bo_va, *tmp; in amdgpu_vm_get_memory() local
1113 list_for_each_entry_safe(bo_va, tmp, &vm->idle, base.vm_status) in amdgpu_vm_get_memory()
1114 amdgpu_vm_bo_get_memory(bo_va, stats); in amdgpu_vm_get_memory()
1116 list_for_each_entry_safe(bo_va, tmp, &vm->evicted, base.vm_status) in amdgpu_vm_get_memory()
1117 amdgpu_vm_bo_get_memory(bo_va, stats); in amdgpu_vm_get_memory()
1119 list_for_each_entry_safe(bo_va, tmp, &vm->relocated, base.vm_status) in amdgpu_vm_get_memory()
1120 amdgpu_vm_bo_get_memory(bo_va, stats); in amdgpu_vm_get_memory()
1122 list_for_each_entry_safe(bo_va, tmp, &vm->moved, base.vm_status) in amdgpu_vm_get_memory()
1123 amdgpu_vm_bo_get_memory(bo_va, stats); in amdgpu_vm_get_memory()
1125 list_for_each_entry_safe(bo_va, tmp, &vm->invalidated, base.vm_status) in amdgpu_vm_get_memory()
1126 amdgpu_vm_bo_get_memory(bo_va, stats); in amdgpu_vm_get_memory()
1128 list_for_each_entry_safe(bo_va, tmp, &vm->done, base.vm_status) in amdgpu_vm_get_memory()
1129 amdgpu_vm_bo_get_memory(bo_va, stats); in amdgpu_vm_get_memory()
1145 int amdgpu_vm_bo_update(struct amdgpu_device *adev, struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_update() argument
1148 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_update()
1149 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_update()
1181 if (obj->import_attach && bo_va->is_xgmi) { in amdgpu_vm_bo_update()
1222 last_update = &bo_va->last_pt_update; in amdgpu_vm_bo_update()
1224 if (!clear && bo_va->base.moved) { in amdgpu_vm_bo_update()
1226 list_splice_init(&bo_va->valids, &bo_va->invalids); in amdgpu_vm_bo_update()
1228 } else if (bo_va->cleared != clear) { in amdgpu_vm_bo_update()
1229 list_splice_init(&bo_va->valids, &bo_va->invalids); in amdgpu_vm_bo_update()
1232 list_for_each_entry(mapping, &bo_va->invalids, list) { in amdgpu_vm_bo_update()
1266 amdgpu_vm_bo_evicted(&bo_va->base); in amdgpu_vm_bo_update()
1268 amdgpu_vm_bo_idle(&bo_va->base); in amdgpu_vm_bo_update()
1270 amdgpu_vm_bo_done(&bo_va->base); in amdgpu_vm_bo_update()
1273 list_splice_init(&bo_va->invalids, &bo_va->valids); in amdgpu_vm_bo_update()
1274 bo_va->cleared = clear; in amdgpu_vm_bo_update()
1275 bo_va->base.moved = false; in amdgpu_vm_bo_update()
1278 list_for_each_entry(mapping, &bo_va->valids, list) in amdgpu_vm_bo_update()
1493 struct amdgpu_bo_va *bo_va; in amdgpu_vm_handle_moved() local
1500 bo_va = list_first_entry(&vm->moved, struct amdgpu_bo_va, in amdgpu_vm_handle_moved()
1505 r = amdgpu_vm_bo_update(adev, bo_va, false); in amdgpu_vm_handle_moved()
1512 bo_va = list_first_entry(&vm->invalidated, struct amdgpu_bo_va, in amdgpu_vm_handle_moved()
1514 resv = bo_va->base.bo->tbo.base.resv; in amdgpu_vm_handle_moved()
1531 r = amdgpu_vm_bo_update(adev, bo_va, clear); in amdgpu_vm_handle_moved()
1542 bo_va->base.bo->tbo.base.import_attach && in amdgpu_vm_handle_moved()
1543 (!bo_va->base.bo->tbo.resource || in amdgpu_vm_handle_moved()
1544 bo_va->base.bo->tbo.resource->mem_type == TTM_PL_SYSTEM)) in amdgpu_vm_handle_moved()
1545 amdgpu_vm_bo_evicted_user(&bo_va->base); in amdgpu_vm_handle_moved()
1618 struct amdgpu_bo_va *bo_va; in amdgpu_vm_bo_add() local
1620 bo_va = kzalloc(sizeof(struct amdgpu_bo_va), GFP_KERNEL); in amdgpu_vm_bo_add()
1621 if (bo_va == NULL) { in amdgpu_vm_bo_add()
1624 amdgpu_vm_bo_base_init(&bo_va->base, vm, bo); in amdgpu_vm_bo_add()
1626 bo_va->ref_count = 1; in amdgpu_vm_bo_add()
1627 bo_va->last_pt_update = dma_fence_get_stub(); in amdgpu_vm_bo_add()
1628 INIT_LIST_HEAD(&bo_va->valids); in amdgpu_vm_bo_add()
1629 INIT_LIST_HEAD(&bo_va->invalids); in amdgpu_vm_bo_add()
1632 return bo_va; in amdgpu_vm_bo_add()
1636 bo_va->is_xgmi = true; in amdgpu_vm_bo_add()
1641 return bo_va; in amdgpu_vm_bo_add()
1655 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_insert_map() argument
1658 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_insert_map()
1659 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_insert_map()
1661 mapping->bo_va = bo_va; in amdgpu_vm_bo_insert_map()
1662 list_add(&mapping->list, &bo_va->invalids); in amdgpu_vm_bo_insert_map()
1668 if (amdgpu_vm_is_bo_always_valid(vm, bo) && !bo_va->base.moved) in amdgpu_vm_bo_insert_map()
1669 amdgpu_vm_bo_moved(&bo_va->base); in amdgpu_vm_bo_insert_map()
1671 trace_amdgpu_vm_bo_map(bo_va, mapping); in amdgpu_vm_bo_insert_map()
1723 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_map() argument
1728 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_map()
1729 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_map()
1758 amdgpu_vm_bo_insert_map(adev, bo_va, mapping); in amdgpu_vm_bo_map()
1782 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_replace_map() argument
1787 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_replace_map()
1800 r = amdgpu_vm_bo_clear_mappings(adev, bo_va->base.vm, saddr, size); in amdgpu_vm_bo_replace_map()
1814 amdgpu_vm_bo_insert_map(adev, bo_va, mapping); in amdgpu_vm_bo_replace_map()
1834 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_unmap() argument
1838 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_unmap()
1843 list_for_each_entry(mapping, &bo_va->valids, list) { in amdgpu_vm_bo_unmap()
1848 if (&mapping->list == &bo_va->valids) { in amdgpu_vm_bo_unmap()
1851 list_for_each_entry(mapping, &bo_va->invalids, list) { in amdgpu_vm_bo_unmap()
1856 if (&mapping->list == &bo_va->invalids) in amdgpu_vm_bo_unmap()
1862 mapping->bo_va = NULL; in amdgpu_vm_bo_unmap()
1863 trace_amdgpu_vm_bo_unmap(bo_va, mapping); in amdgpu_vm_bo_unmap()
1869 bo_va->last_pt_update); in amdgpu_vm_bo_unmap()
1925 before->bo_va = tmp->bo_va; in amdgpu_vm_bo_clear_mappings()
1926 list_add(&before->list, &tmp->bo_va->invalids); in amdgpu_vm_bo_clear_mappings()
1936 after->bo_va = tmp->bo_va; in amdgpu_vm_bo_clear_mappings()
1937 list_add(&after->list, &tmp->bo_va->invalids); in amdgpu_vm_bo_clear_mappings()
1956 tmp->bo_va = NULL; in amdgpu_vm_bo_clear_mappings()
1963 struct amdgpu_bo *bo = before->bo_va->base.bo; in amdgpu_vm_bo_clear_mappings()
1970 !before->bo_va->base.moved) in amdgpu_vm_bo_clear_mappings()
1971 amdgpu_vm_bo_moved(&before->bo_va->base); in amdgpu_vm_bo_clear_mappings()
1978 struct amdgpu_bo *bo = after->bo_va->base.bo; in amdgpu_vm_bo_clear_mappings()
1985 !after->bo_va->base.moved) in amdgpu_vm_bo_clear_mappings()
1986 amdgpu_vm_bo_moved(&after->bo_va->base); in amdgpu_vm_bo_clear_mappings()
2029 if (mapping->bo_va && mapping->bo_va->base.bo) { in amdgpu_vm_bo_trace_cs()
2032 bo = mapping->bo_va->base.bo; in amdgpu_vm_bo_trace_cs()
2053 struct amdgpu_bo_va *bo_va) in amdgpu_vm_bo_del() argument
2056 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_del()
2057 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_del()
2067 for (base = &bo_va->base.bo->vm_bo; *base; in amdgpu_vm_bo_del()
2069 if (*base != &bo_va->base) in amdgpu_vm_bo_del()
2072 *base = bo_va->base.next; in amdgpu_vm_bo_del()
2078 list_del(&bo_va->base.vm_status); in amdgpu_vm_bo_del()
2081 list_for_each_entry_safe(mapping, next, &bo_va->valids, list) { in amdgpu_vm_bo_del()
2084 mapping->bo_va = NULL; in amdgpu_vm_bo_del()
2085 trace_amdgpu_vm_bo_unmap(bo_va, mapping); in amdgpu_vm_bo_del()
2088 list_for_each_entry_safe(mapping, next, &bo_va->invalids, list) { in amdgpu_vm_bo_del()
2092 bo_va->last_pt_update); in amdgpu_vm_bo_del()
2095 dma_fence_put(bo_va->last_pt_update); in amdgpu_vm_bo_del()
2097 if (bo && bo_va->is_xgmi) in amdgpu_vm_bo_del()
2100 kfree(bo_va); in amdgpu_vm_bo_del()
2886 struct amdgpu_bo_va *bo_va, *tmp; in amdgpu_debugfs_vm_bo_info() local
2903 list_for_each_entry_safe(bo_va, tmp, &vm->idle, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
2904 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
2906 total_idle += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
2912 list_for_each_entry_safe(bo_va, tmp, &vm->evicted, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
2913 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
2915 total_evicted += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
2921 list_for_each_entry_safe(bo_va, tmp, &vm->relocated, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
2922 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
2924 total_relocated += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
2930 list_for_each_entry_safe(bo_va, tmp, &vm->moved, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
2931 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
2933 total_moved += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
2939 list_for_each_entry_safe(bo_va, tmp, &vm->invalidated, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
2940 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
2942 total_invalidated += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
2948 list_for_each_entry_safe(bo_va, tmp, &vm->done, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
2949 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
2951 total_done += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()