Home
last modified time | relevance | path

Searched refs:rbo (Results 1 – 14 of 14) sorted by relevance

/linux-6.12.1/drivers/gpu/drm/amd/display/amdgpu_dm/
Damdgpu_dm_wb.c89 struct amdgpu_bo *rbo; in amdgpu_dm_wb_prepare_job() local
100 rbo = gem_to_amdgpu_bo(obj); in amdgpu_dm_wb_prepare_job()
101 adev = amdgpu_ttm_adev(rbo->tbo.bdev); in amdgpu_dm_wb_prepare_job()
103 r = amdgpu_bo_reserve(rbo, true); in amdgpu_dm_wb_prepare_job()
109 r = dma_resv_reserve_fences(rbo->tbo.base.resv, 1); in amdgpu_dm_wb_prepare_job()
115 domain = amdgpu_display_supported_domains(adev, rbo->flags); in amdgpu_dm_wb_prepare_job()
117 rbo->flags |= AMDGPU_GEM_CREATE_VRAM_CONTIGUOUS; in amdgpu_dm_wb_prepare_job()
118 r = amdgpu_bo_pin(rbo, domain); in amdgpu_dm_wb_prepare_job()
125 r = amdgpu_ttm_alloc_gart(&rbo->tbo); in amdgpu_dm_wb_prepare_job()
127 DRM_ERROR("%p bind failed\n", rbo); in amdgpu_dm_wb_prepare_job()
[all …]
Damdgpu_dm_plane.c928 struct amdgpu_bo *rbo; in amdgpu_dm_plane_helper_prepare_fb() local
945 rbo = gem_to_amdgpu_bo(obj); in amdgpu_dm_plane_helper_prepare_fb()
946 adev = amdgpu_ttm_adev(rbo->tbo.bdev); in amdgpu_dm_plane_helper_prepare_fb()
947 r = amdgpu_bo_reserve(rbo, true); in amdgpu_dm_plane_helper_prepare_fb()
953 r = dma_resv_reserve_fences(rbo->tbo.base.resv, 1); in amdgpu_dm_plane_helper_prepare_fb()
960 domain = amdgpu_display_supported_domains(adev, rbo->flags); in amdgpu_dm_plane_helper_prepare_fb()
964 rbo->flags |= AMDGPU_GEM_CREATE_VRAM_CONTIGUOUS; in amdgpu_dm_plane_helper_prepare_fb()
965 r = amdgpu_bo_pin(rbo, domain); in amdgpu_dm_plane_helper_prepare_fb()
972 r = amdgpu_ttm_alloc_gart(&rbo->tbo); in amdgpu_dm_plane_helper_prepare_fb()
974 DRM_ERROR("%p bind failed\n", rbo); in amdgpu_dm_plane_helper_prepare_fb()
[all …]
/linux-6.12.1/drivers/gpu/drm/radeon/
Dradeon_object.c76 void radeon_ttm_placement_from_domain(struct radeon_bo *rbo, u32 domain) in radeon_ttm_placement_from_domain() argument
80 rbo->placement.placement = rbo->placements; in radeon_ttm_placement_from_domain()
85 if ((rbo->flags & RADEON_GEM_NO_CPU_ACCESS) && in radeon_ttm_placement_from_domain()
86 rbo->rdev->mc.visible_vram_size < rbo->rdev->mc.real_vram_size) { in radeon_ttm_placement_from_domain()
87 rbo->placements[c].fpfn = in radeon_ttm_placement_from_domain()
88 rbo->rdev->mc.visible_vram_size >> PAGE_SHIFT; in radeon_ttm_placement_from_domain()
89 rbo->placements[c].mem_type = TTM_PL_VRAM; in radeon_ttm_placement_from_domain()
90 rbo->placements[c++].flags = 0; in radeon_ttm_placement_from_domain()
93 rbo->placements[c].fpfn = 0; in radeon_ttm_placement_from_domain()
94 rbo->placements[c].mem_type = TTM_PL_VRAM; in radeon_ttm_placement_from_domain()
[all …]
Dradeon_fbdev.c43 struct radeon_bo *rbo = gem_to_radeon_bo(gobj); in radeon_fbdev_destroy_pinned_object() local
46 ret = radeon_bo_reserve(rbo, false); in radeon_fbdev_destroy_pinned_object()
48 radeon_bo_kunmap(rbo); in radeon_fbdev_destroy_pinned_object()
49 radeon_bo_unpin(rbo); in radeon_fbdev_destroy_pinned_object()
50 radeon_bo_unreserve(rbo); in radeon_fbdev_destroy_pinned_object()
62 struct radeon_bo *rbo = NULL; in radeon_fbdev_create_pinned_object() local
88 rbo = gem_to_radeon_bo(gobj); in radeon_fbdev_create_pinned_object()
107 ret = radeon_bo_set_tiling_flags(rbo, in radeon_fbdev_create_pinned_object()
114 ret = radeon_bo_reserve(rbo, false); in radeon_fbdev_create_pinned_object()
118 ret = radeon_bo_pin_restricted(rbo, RADEON_GEM_DOMAIN_VRAM, in radeon_fbdev_create_pinned_object()
[all …]
Dradeon_gem.c198 struct radeon_bo *rbo = gem_to_radeon_bo(obj); in radeon_gem_object_open() local
199 struct radeon_device *rdev = rbo->rdev; in radeon_gem_object_open()
210 r = radeon_bo_reserve(rbo, false); in radeon_gem_object_open()
215 bo_va = radeon_vm_bo_find(vm, rbo); in radeon_gem_object_open()
217 bo_va = radeon_vm_bo_add(rdev, vm, rbo); in radeon_gem_object_open()
221 radeon_bo_unreserve(rbo); in radeon_gem_object_open()
229 struct radeon_bo *rbo = gem_to_radeon_bo(obj); in radeon_gem_object_close() local
230 struct radeon_device *rdev = rbo->rdev; in radeon_gem_object_close()
241 r = radeon_bo_reserve(rbo, true); in radeon_gem_object_close()
247 bo_va = radeon_vm_bo_find(vm, rbo); in radeon_gem_object_close()
[all …]
Dradeon_ttm.c92 struct radeon_bo *rbo; in radeon_evict_flags() local
99 rbo = container_of(bo, struct radeon_bo, tbo); in radeon_evict_flags()
102 if (rbo->rdev->ring[radeon_copy_ring_index(rbo->rdev)].ready == false) in radeon_evict_flags()
103 radeon_ttm_placement_from_domain(rbo, RADEON_GEM_DOMAIN_CPU); in radeon_evict_flags()
104 else if (rbo->rdev->mc.visible_vram_size < rbo->rdev->mc.real_vram_size && in radeon_evict_flags()
105 bo->resource->start < (rbo->rdev->mc.visible_vram_size >> PAGE_SHIFT)) { in radeon_evict_flags()
106 unsigned fpfn = rbo->rdev->mc.visible_vram_size >> PAGE_SHIFT; in radeon_evict_flags()
114 radeon_ttm_placement_from_domain(rbo, RADEON_GEM_DOMAIN_VRAM | in radeon_evict_flags()
116 for (i = 0; i < rbo->placement.num_placement; i++) { in radeon_evict_flags()
117 if (rbo->placements[i].mem_type == TTM_PL_VRAM) { in radeon_evict_flags()
[all …]
Dradeon_legacy_crtc.c382 struct radeon_bo *rbo; in radeon_crtc_do_set_base() local
425 rbo = gem_to_radeon_bo(obj); in radeon_crtc_do_set_base()
427 r = radeon_bo_reserve(rbo, false); in radeon_crtc_do_set_base()
431 r = radeon_bo_pin_restricted(rbo, RADEON_GEM_DOMAIN_VRAM, 1 << 27, in radeon_crtc_do_set_base()
434 radeon_bo_unreserve(rbo); in radeon_crtc_do_set_base()
454 nsize = radeon_bo_size(rbo); in radeon_crtc_do_set_base()
464 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL); in radeon_crtc_do_set_base()
465 radeon_bo_unreserve(rbo); in radeon_crtc_do_set_base()
559 rbo = gem_to_radeon_bo(fb->obj[0]); in radeon_crtc_do_set_base()
560 r = radeon_bo_reserve(rbo, false); in radeon_crtc_do_set_base()
[all …]
Datombios_crtc.c1143 struct radeon_bo *rbo; in dce4_crtc_do_set_base() local
1167 rbo = gem_to_radeon_bo(obj); in dce4_crtc_do_set_base()
1168 r = radeon_bo_reserve(rbo, false); in dce4_crtc_do_set_base()
1173 fb_location = radeon_bo_gpu_offset(rbo); in dce4_crtc_do_set_base()
1175 r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location); in dce4_crtc_do_set_base()
1177 radeon_bo_unreserve(rbo); in dce4_crtc_do_set_base()
1182 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL); in dce4_crtc_do_set_base()
1183 radeon_bo_unreserve(rbo); in dce4_crtc_do_set_base()
1441 rbo = gem_to_radeon_bo(fb->obj[0]); in dce4_crtc_do_set_base()
1442 r = radeon_bo_reserve(rbo, false); in dce4_crtc_do_set_base()
[all …]
Dradeon_uvd.c304 void radeon_uvd_force_into_uvd_segment(struct radeon_bo *rbo, in radeon_uvd_force_into_uvd_segment() argument
309 for (i = 0; i < rbo->placement.num_placement; ++i) { in radeon_uvd_force_into_uvd_segment()
310 rbo->placements[i].fpfn = 0 >> PAGE_SHIFT; in radeon_uvd_force_into_uvd_segment()
311 rbo->placements[i].lpfn = (256 * 1024 * 1024) >> PAGE_SHIFT; in radeon_uvd_force_into_uvd_segment()
319 if (rbo->placement.num_placement > 1) in radeon_uvd_force_into_uvd_segment()
323 rbo->placements[1] = rbo->placements[0]; in radeon_uvd_force_into_uvd_segment()
324 rbo->placements[1].fpfn += (256 * 1024 * 1024) >> PAGE_SHIFT; in radeon_uvd_force_into_uvd_segment()
325 rbo->placements[1].lpfn += (256 * 1024 * 1024) >> PAGE_SHIFT; in radeon_uvd_force_into_uvd_segment()
326 rbo->placement.num_placement++; in radeon_uvd_force_into_uvd_segment()
Dradeon.h1684 void radeon_uvd_force_into_uvd_segment(struct radeon_bo *rbo,
2805 extern void radeon_ttm_placement_from_domain(struct radeon_bo *rbo, u32 domain);
/linux-6.12.1/drivers/gpu/drm/amd/amdgpu/
Damdgpu_vkms.c305 struct amdgpu_bo *rbo; in amdgpu_vkms_prepare_fb() local
321 rbo = gem_to_amdgpu_bo(obj); in amdgpu_vkms_prepare_fb()
322 adev = amdgpu_ttm_adev(rbo->tbo.bdev); in amdgpu_vkms_prepare_fb()
324 r = amdgpu_bo_reserve(rbo, true); in amdgpu_vkms_prepare_fb()
330 r = dma_resv_reserve_fences(rbo->tbo.base.resv, 1); in amdgpu_vkms_prepare_fb()
337 domain = amdgpu_display_supported_domains(adev, rbo->flags); in amdgpu_vkms_prepare_fb()
341 rbo->flags |= AMDGPU_GEM_CREATE_VRAM_CONTIGUOUS; in amdgpu_vkms_prepare_fb()
342 r = amdgpu_bo_pin(rbo, domain); in amdgpu_vkms_prepare_fb()
349 r = amdgpu_ttm_alloc_gart(&rbo->tbo); in amdgpu_vkms_prepare_fb()
351 DRM_ERROR("%p bind failed\n", rbo); in amdgpu_vkms_prepare_fb()
[all …]
Damdgpu_display.c685 struct amdgpu_bo *rbo; in extract_render_dcc_offset() local
690 rbo = gem_to_amdgpu_bo(obj); in extract_render_dcc_offset()
691 r = amdgpu_bo_reserve(rbo, false); in extract_render_dcc_offset()
700 r = amdgpu_bo_get_metadata(rbo, metadata, sizeof(metadata), &size, NULL); in extract_render_dcc_offset()
701 amdgpu_bo_unreserve(rbo); in extract_render_dcc_offset()
1166 struct amdgpu_bo *rbo; in amdgpu_display_get_fb_info() local
1176 rbo = gem_to_amdgpu_bo(amdgpu_fb->base.obj[0]); in amdgpu_display_get_fb_info()
1177 r = amdgpu_bo_reserve(rbo, false); in amdgpu_display_get_fb_info()
1186 amdgpu_bo_get_tiling_flags(rbo, tiling_flags); in amdgpu_display_get_fb_info()
1187 *tmz_surface = amdgpu_bo_encrypted(rbo); in amdgpu_display_get_fb_info()
[all …]
/linux-6.12.1/drivers/gpu/drm/qxl/
Dqxl_release.c284 struct qxl_bo **rbo) in qxl_alloc_release_reserved() argument
310 if (rbo) in qxl_alloc_release_reserved()
311 *rbo = NULL; in qxl_alloc_release_reserved()
341 if (rbo) in qxl_alloc_release_reserved()
342 *rbo = bo; in qxl_alloc_release_reserved()
Dqxl_drv.h377 struct qxl_bo **rbo);