/linux-6.12.1/drivers/gpu/drm/xe/ |
D | xe_irq.c | 43 xe_mmio_write32(mmio, reg, 0xffffffff); in assert_iir_is_zero() 45 xe_mmio_write32(mmio, reg, 0xffffffff); in assert_iir_is_zero() 63 xe_mmio_write32(mmio, IER(irqregs), bits); in unmask_and_enable() 64 xe_mmio_write32(mmio, IMR(irqregs), ~bits); in unmask_and_enable() 75 xe_mmio_write32(mmio, IMR(irqregs), ~0); in mask_and_disable() 79 xe_mmio_write32(mmio, IER(irqregs), 0); in mask_and_disable() 82 xe_mmio_write32(mmio, IIR(irqregs), ~0); in mask_and_disable() 84 xe_mmio_write32(mmio, IIR(irqregs), ~0); in mask_and_disable() 92 xe_mmio_write32(mmio, GFX_MSTR_IRQ, 0); in xelp_intr_disable() 114 xe_mmio_write32(mmio, IIR(GU_MISC_IRQ_OFFSET), iir); in gu_misc_irq_ack() [all …]
|
D | xe_gt_idle.c | 127 xe_mmio_write32(gt, MEDIA_POWERGATE_IDLE_HYSTERESIS, 25); in xe_gt_idle_enable_pg() 128 xe_mmio_write32(gt, RENDER_POWERGATE_IDLE_HYSTERESIS, 25); in xe_gt_idle_enable_pg() 131 xe_mmio_write32(gt, POWERGATE_ENABLE, pg_enable); in xe_gt_idle_enable_pg() 143 xe_mmio_write32(gt, POWERGATE_ENABLE, 0); in xe_gt_idle_disable_pg() 263 xe_mmio_write32(gt, RC_IDLE_HYSTERSIS, 0x3B9ACA); in xe_gt_idle_enable_c6() 265 xe_mmio_write32(gt, RC_CONTROL, in xe_gt_idle_enable_c6() 277 xe_mmio_write32(gt, RC_CONTROL, 0); in xe_gt_idle_disable_c6() 278 xe_mmio_write32(gt, RC_STATE, 0); in xe_gt_idle_disable_c6()
|
D | xe_gt_mcr.c | 497 xe_mmio_write32(gt, MCFG_MCR_SELECTOR, steer_val); in xe_gt_mcr_set_implicit_defaults() 498 xe_mmio_write32(gt, SF_MCR_SELECTOR, steer_val); in xe_gt_mcr_set_implicit_defaults() 592 xe_mmio_write32(gt, STEER_SEMAPHORE, 0x1); in mcr_unlock() 638 xe_mmio_write32(gt, steer_reg, steer_val); in rw_with_mcr_steering() 643 xe_mmio_write32(gt, reg, value); in rw_with_mcr_steering() 652 xe_mmio_write32(gt, steer_reg, MCR_MULTICAST); in rw_with_mcr_steering() 760 xe_mmio_write32(gt, reg, value); in xe_gt_mcr_multicast_write()
|
D | xe_guc.c | 239 xe_mmio_write32(gt, SOFT_SCRATCH(0), 0); in guc_write_params() 242 xe_mmio_write32(gt, SOFT_SCRATCH(1 + i), guc->params[i]); in guc_write_params() 436 xe_mmio_write32(gt, GDRST, GRDOM_GUC); in xe_guc_reset() 476 xe_mmio_write32(gt, GUC_SHIM_CONTROL, shim_flags); in guc_prepare_xfer() 478 xe_mmio_write32(gt, GT_PM_CONFIG, GT_DOORBELL_ENABLE); in guc_prepare_xfer() 497 xe_mmio_write32(gt, UOS_RSA_SCRATCH(0), rsa_ggtt_addr); in guc_xfer_rsa() 506 xe_mmio_write32(gt, UOS_RSA_SCRATCH(i), rsa[i]); in guc_xfer_rsa() 830 xe_mmio_write32(gt, SOFT_SCRATCH(15), 0); in guc_handle_mmio_msg() 847 xe_mmio_write32(gt, GUC_SG_INTR_ENABLE, in guc_enable_irq() 910 xe_mmio_write32(gt, guc->notify_reg, default_notify_data); in xe_guc_notify() [all …]
|
D | xe_execlist.c | 61 xe_mmio_write32(hwe->gt, RCU_MODE, in __start_lrc() 79 xe_mmio_write32(gt, RING_HWS_PGA(hwe->mmio_base), in __start_lrc() 82 xe_mmio_write32(gt, RING_MODE(hwe->mmio_base), in __start_lrc() 85 xe_mmio_write32(gt, RING_EXECLIST_SQ_CONTENTS_LO(hwe->mmio_base), in __start_lrc() 87 xe_mmio_write32(gt, RING_EXECLIST_SQ_CONTENTS_HI(hwe->mmio_base), in __start_lrc() 89 xe_mmio_write32(gt, RING_EXECLIST_CONTROL(hwe->mmio_base), in __start_lrc()
|
D | xe_pcode.c | 70 xe_mmio_write32(mmio, PCODE_DATA0, *data0); in __pcode_mailbox_rw() 71 xe_mmio_write32(mmio, PCODE_DATA1, data1 ? *data1 : 0); in __pcode_mailbox_rw() 72 xe_mmio_write32(mmio, PCODE_MAILBOX, PCODE_READY | mbox); in __pcode_mailbox_rw()
|
D | xe_reg_sr.c | 183 xe_mmio_write32(gt, reg, val); in apply_one_mmio() 244 xe_mmio_write32(gt, RING_FORCE_TO_NONPRIV(mmio_base, slot), in xe_reg_sr_apply_whitelist() 253 xe_mmio_write32(gt, RING_FORCE_TO_NONPRIV(mmio_base, slot), addr); in xe_reg_sr_apply_whitelist()
|
D | xe_uc_fw.c | 817 xe_mmio_write32(gt, DMA_ADDR_0_LOW, lower_32_bits(src_offset)); in uc_fw_xfer() 818 xe_mmio_write32(gt, DMA_ADDR_0_HIGH, in uc_fw_xfer() 822 xe_mmio_write32(gt, DMA_ADDR_1_LOW, offset); in uc_fw_xfer() 823 xe_mmio_write32(gt, DMA_ADDR_1_HIGH, DMA_ADDRESS_SPACE_WOPCM); in uc_fw_xfer() 829 xe_mmio_write32(gt, DMA_COPY_SIZE, in uc_fw_xfer() 833 xe_mmio_write32(gt, DMA_CTRL, in uc_fw_xfer() 844 xe_mmio_write32(gt, DMA_CTRL, _MASKED_BIT_DISABLE(dma_flags)); in uc_fw_xfer()
|
D | xe_gt_tlb_invalidation.c | 291 xe_mmio_write32(gt, PVC_GUC_TLB_INV_DESC1, in xe_gt_tlb_invalidation_ggtt() 293 xe_mmio_write32(gt, PVC_GUC_TLB_INV_DESC0, in xe_gt_tlb_invalidation_ggtt() 296 xe_mmio_write32(gt, GUC_TLB_INV_CR, in xe_gt_tlb_invalidation_ggtt()
|
D | xe_mmio.c | 225 void xe_mmio_write32(struct xe_gt *gt, struct xe_reg reg, u32 val) in xe_mmio_write32() function 263 xe_mmio_write32(gt, reg, reg_val); in xe_mmio_rmw32() 273 xe_mmio_write32(gt, reg, val); in xe_mmio_write32_and_verify()
|
D | xe_gt_sriov_pf.c | 75 xe_mmio_write32(gt, VIRTUAL_CTRL_REG, GUEST_GTT_UPDATE_EN); in pf_enable_ggtt_guest_update()
|
D | xe_device.c | 417 xe_mmio_write32(gt, GU_DEBUG, DRIVERFLR_STATUS); in xe_driver_flr() 438 xe_mmio_write32(gt, GU_DEBUG, DRIVERFLR_STATUS); in xe_driver_flr() 809 xe_mmio_write32(gt, VF_CAP_REG, 0); in xe_device_wmb() 850 xe_mmio_write32(gt, XE2_TDF_CTRL, TRANSIENT_FLUSH_REQUEST); in xe_device_td_flush() 881 xe_mmio_write32(gt, XE2_GLOBAL_INVAL, 0x1); in xe_device_l2_flush()
|
D | xe_oa.c | 369 xe_mmio_write32(stream->gt, oaheadptr, in xe_oa_append_reports() 386 xe_mmio_write32(stream->gt, __oa_regs(stream)->oa_status, 0); in xe_oa_init_oa_buffer() 387 xe_mmio_write32(stream->gt, __oa_regs(stream)->oa_head_ptr, in xe_oa_init_oa_buffer() 394 xe_mmio_write32(stream->gt, __oa_regs(stream)->oa_buffer, oa_buf); in xe_oa_init_oa_buffer() 395 xe_mmio_write32(stream->gt, __oa_regs(stream)->oa_tail_ptr, in xe_oa_init_oa_buffer() 447 xe_mmio_write32(stream->gt, regs->oa_ctrl, val); in xe_oa_enable() 452 xe_mmio_write32(stream->gt, __oa_regs(stream)->oa_ctrl, 0); in xe_oa_disable() 460 xe_mmio_write32(stream->gt, OA_TLB_INV_CR, 1); in xe_oa_disable() 752 xe_mmio_write32(stream->gt, __oa_regs(stream)->oa_ctrl, __oa_ccs_select(stream)); in xe_oa_configure_oac_context() 801 xe_mmio_write32(stream->gt, __oa_regs(stream)->oa_debug, in xe_oa_disable_metric_set() [all …]
|
D | xe_mmio.h | 19 void xe_mmio_write32(struct xe_gt *gt, struct xe_reg reg, u32 val);
|
D | xe_pat.c | 163 xe_mmio_write32(gt, reg, table[i].value); in program_pat() 319 xe_mmio_write32(gt, XE_REG(_PAT_ATS), xe2_pat_ats.value); in xe2lpm_program_pat() 322 xe_mmio_write32(gt, XE_REG(_PAT_PTA), xe2_pat_pta.value); in xe2lpm_program_pat()
|
D | xe_gt_ccs_mode.c | 77 xe_mmio_write32(gt, CCS_MODE, mode); in __xe_gt_apply_ccs_mode()
|
D | xe_hw_engine.c | 298 xe_mmio_write32(hwe->gt, reg, val); in xe_hw_engine_mmio_write32() 327 xe_mmio_write32(hwe->gt, RCU_MODE, in xe_hw_engine_enable_ring() 745 xe_mmio_write32(gt, GUNIT_GSC_INTR_ENABLE, 0); in check_gsc_availability() 746 xe_mmio_write32(gt, GUNIT_GSC_INTR_MASK, ~0); in check_gsc_availability()
|
D | xe_force_wake.c | 103 xe_mmio_write32(gt, domain->reg_ctl, domain->mask | (wake ? domain->val : 0)); in __domain_ctl()
|
D | xe_mocs.c | 693 xe_mmio_write32(gt, XELP_GLOBAL_MOCS(i), mocs); in __init_mocs_table() 733 xe_mmio_write32(gt, XELP_LNCFCMOCS(i), l3cc); in init_l3cc_table()
|
D | xe_guc_pc.c | 265 xe_mmio_write32(gt, RP_CONTROL, state); in pc_set_manual_rp_ctrl() 277 xe_mmio_write32(gt, RPNSWREQ, rpnswreq); in pc_set_cur_freq()
|
D | xe_lmtt.c | 196 xe_mmio_write32(tile->primary_gt, in lmtt_setup_dir_ptr()
|
D | xe_ggtt.c | 121 xe_mmio_write32(gt, GMD_ID, 0x0); in ggtt_update_access_counter()
|
D | xe_gt.c | 652 xe_mmio_write32(gt, GDRST, GRDOM_FULL); in do_gt_reset()
|
/linux-6.12.1/drivers/gpu/drm/xe/compat-i915-headers/ |
D | intel_uncore.h | 83 xe_mmio_write32(__compat_uncore_to_gt(uncore), reg, val); in intel_uncore_write() 139 xe_mmio_write32(__compat_uncore_to_gt(uncore), reg, val); in intel_uncore_write_fw() 155 xe_mmio_write32(__compat_uncore_to_gt(uncore), reg, val); in intel_uncore_write_notrace()
|