Home
last modified time | relevance | path

Searched refs:tile_to_xe (Results 1 – 25 of 26) sorted by relevance

12

/linux-6.12.1/drivers/gpu/drm/xe/
Dxe_pcode.c49 drm_err(&tile_to_xe(tile)->drm, "PCODE Mailbox failed: %d %s", err, in pcode_mailbox_status()
64 if (tile_to_xe(tile)->info.skip_pcode) in __pcode_mailbox_rw()
92 if (tile_to_xe(tile)->info.skip_pcode) in pcode_mailbox_rw()
194 drm_err(&tile_to_xe(tile)->drm, in xe_pcode_request()
236 if (!tile_to_xe(tile)->info.has_llc) in xe_pcode_init_min_freq_table()
310 drmm_mutex_init(&tile_to_xe(tile)->drm, &tile->pcode.lock); in xe_pcode_init()
Dxe_ggtt.c142 u16 pat_index = tile_to_xe(ggtt->tile)->pat.idx[XE_CACHE_WB]; in xe_ggtt_clear()
214 struct xe_device *xe = tile_to_xe(ggtt->tile); in xe_ggtt_init_early()
285 struct xe_device *xe = tile_to_xe(ggtt->tile); in ggtt_node_remove()
314 struct xe_device *xe = tile_to_xe(node->ggtt->tile); in ggtt_node_remove_work_func()
335 xe = tile_to_xe(ggtt->tile); in xe_ggtt_node_remove()
355 struct xe_device *xe = tile_to_xe(ggtt->tile); in xe_ggtt_init()
400 struct xe_device *xe = tile_to_xe(ggtt->tile); in xe_ggtt_invalidate()
593 u16 pat_index = tile_to_xe(ggtt->tile)->pat.idx[cache_mode]; in xe_ggtt_map_bo()
627 xe_pm_runtime_get_noresume(tile_to_xe(ggtt->tile)); in __xe_ggtt_insert_bo_at()
651 xe_pm_runtime_put(tile_to_xe(ggtt->tile)); in __xe_ggtt_insert_bo_at()
[all …]
Dxe_sa.c36 struct xe_device *xe = tile_to_xe(tile); in xe_sa_bo_manager_init()
41 struct xe_sa_manager *sa_manager = drmm_kzalloc(&tile_to_xe(tile)->drm, in xe_sa_bo_manager_init()
99 struct xe_device *xe = tile_to_xe(sa_manager->bo->tile); in xe_sa_bo_flush_write()
Dxe_irq.c265 struct xe_device *xe = tile_to_xe(tile); in pick_engine_gt()
293 struct xe_device *xe = tile_to_xe(tile); in gt_irq_handler()
447 drm_dbg(&tile_to_xe(tile)->drm, in dg1_irq_handler()
511 HAS_HECI_GSCFI(tile_to_xe(tile))) { in gt_irq_reset()
525 xelp_intr_disable(tile_to_xe(tile)); in xelp_irq_reset()
529 if (IS_SRIOV_VF(tile_to_xe(tile))) in xelp_irq_reset()
538 dg1_intr_disable(tile_to_xe(tile)); in dg1_irq_reset()
542 if (IS_SRIOV_VF(tile_to_xe(tile))) in dg1_irq_reset()
Dxe_tile.c87 struct drm_device *drm = &tile_to_xe(tile)->drm; in xe_tile_alloc()
135 struct xe_device *xe = tile_to_xe(tile); in tile_ttm_mgr_init()
Dxe_tile_sysfs.c34 struct xe_device *xe = tile_to_xe(tile); in xe_tile_sysfs_init()
Dxe_pt.c59 struct xe_device *xe = tile_to_xe(tile); in __xe_pt_empty_pte()
563 if (GRAPHICS_VERx100(tile_to_xe(xe_walk->tile)) >= 1250 && level == 1 && in xe_pt_stage_bind_entry()
606 struct xe_device *xe = tile_to_xe(tile); in xe_pt_stage_bind()
781 xe_map_memset(tile_to_xe(xe_walk->tile), &xe_child->bo->vmap, in xe_pt_zap_ptes_entry()
844 xe_map_wr(tile_to_xe(tile), map, (qword_ofs + i) * in xe_vm_populate_pgtable()
1543 xe_map_wr(tile_to_xe(tile), map, (qword_ofs + i) * in xe_migrate_clear_pgtable_callback()
1659 err = vma_reserve_fences(tile_to_xe(tile), vma); in bind_op_prepare()
1668 xe_vm_dbg_print_entries(tile_to_xe(tile), pt_op->entries, in bind_op_prepare()
1733 err = vma_reserve_fences(tile_to_xe(tile), vma); in unbind_op_prepare()
1739 xe_vm_dbg_print_entries(tile_to_xe(tile), pt_op->entries, in unbind_op_prepare()
[all …]
Dxe_vram_freq.c106 struct xe_device *xe = tile_to_xe(tile); in xe_vram_freq_sysfs_init()
Dxe_assert.h147 xe_assert_msg(tile_to_xe(__tile), condition, "tile: %u VRAM %s\n" msg, \
Dxe_migrate.c187 struct xe_device *xe = tile_to_xe(tile); in xe_migrate_prepare_vm()
397 struct xe_device *xe = tile_to_xe(tile); in xe_migrate_init()
487 struct xe_device *xe = tile_to_xe(m->tile); in xe_migrate_res_sizes()
533 *L0_ofs = xe_migrate_vram_ofs(tile_to_xe(m->tile), in pte_update_size()
564 struct xe_device *xe = tile_to_xe(m->tile); in emit_pte()
1212 ppgtt_ofs = xe_migrate_vram_ofs(tile_to_xe(tile), in write_pgtable()
1314 struct xe_device *xe = tile_to_xe(tile); in __xe_migrate_update_pgtables()
Dxe_lmtt.c48 return tile_to_xe(lmtt_to_tile(lmtt)); in lmtt_to_xe()
189 struct xe_device *xe = tile_to_xe(tile); in lmtt_setup_dir_ptr()
Dxe_device.h65 if (drm_WARN_ON(&tile_to_xe(tile)->drm, gt_id >= XE_MAX_GT_PER_TILE)) in xe_tile_get_gt()
Dxe_device_types.h67 #define tile_to_xe(tile__) \ macro
Dxe_memirq.c35 return tile_to_xe(memirq_to_tile(memirq)); in memirq_to_xe()
Dxe_bo.h26 #define XE_BO_FLAG_VRAM_IF_DGFX(tile) (IS_DGFX(tile_to_xe(tile)) ? \
Dxe_gt_sriov_pf_policy.c41 struct xe_device *xe = tile_to_xe(tile); in pf_send_policy_klvs()
Dxe_vram.c220 struct xe_device *xe = tile_to_xe(tile); in tile_vram_size()
Dxe_gt_sriov_vf.c571 xe_tile_assert(tile, IS_SRIOV_VF(tile_to_xe(tile))); in deballoon_ggtt()
587 struct xe_device *xe = tile_to_xe(tile); in xe_gt_sriov_vf_prepare_ggtt()
Dxe_gsc_proxy.c412 struct xe_device *xe = tile_to_xe(tile); in xe_gsc_proxy_init()
Dxe_ttm_vram_mgr.c362 struct xe_device *xe = tile_to_xe(tile); in xe_ttm_vram_mgr_init()
Dxe_gt.c75 gt = drmm_kzalloc(&tile_to_xe(tile)->drm, sizeof(*gt), GFP_KERNEL); in xe_gt_alloc()
Dxe_gt_sriov_pf_config.c76 struct xe_device *xe = tile_to_xe(tile); in pf_send_vf_cfg_klvs()
/linux-6.12.1/drivers/gpu/drm/xe/display/
Dxe_fb_pin.c197 xe_pm_runtime_get_noresume(tile_to_xe(ggtt->tile)); in __xe_pin_fb_vma_ggtt()
262 xe_pm_runtime_put(tile_to_xe(ggtt->tile)); in __xe_pin_fb_vma_ggtt()
/linux-6.12.1/drivers/gpu/drm/xe/tests/
Dxe_bo.c95 offset = xe_device_ccs_bytes(tile_to_xe(tile), bo->size); in ccs_test_migrate()
Dxe_migrate.c75 struct xe_device *xe = tile_to_xe(m->tile); in test_copy()
186 struct xe_device *xe = tile_to_xe(tile); in xe_migrate_sanity_test()

12