Lines Matching full:mmio

33 static void assert_iir_is_zero(struct xe_gt *mmio, struct xe_reg reg)  in assert_iir_is_zero()  argument
35 u32 val = xe_mmio_read32(mmio, reg); in assert_iir_is_zero()
40 drm_WARN(&gt_to_xe(mmio)->drm, 1, in assert_iir_is_zero()
43 xe_mmio_write32(mmio, reg, 0xffffffff); in assert_iir_is_zero()
44 xe_mmio_read32(mmio, reg); in assert_iir_is_zero()
45 xe_mmio_write32(mmio, reg, 0xffffffff); in assert_iir_is_zero()
46 xe_mmio_read32(mmio, reg); in assert_iir_is_zero()
55 struct xe_gt *mmio = tile->primary_gt; in unmask_and_enable() local
61 assert_iir_is_zero(mmio, IIR(irqregs)); in unmask_and_enable()
63 xe_mmio_write32(mmio, IER(irqregs), bits); in unmask_and_enable()
64 xe_mmio_write32(mmio, IMR(irqregs), ~bits); in unmask_and_enable()
67 xe_mmio_read32(mmio, IMR(irqregs)); in unmask_and_enable()
73 struct xe_gt *mmio = tile->primary_gt; in mask_and_disable() local
75 xe_mmio_write32(mmio, IMR(irqregs), ~0); in mask_and_disable()
77 xe_mmio_read32(mmio, IMR(irqregs)); in mask_and_disable()
79 xe_mmio_write32(mmio, IER(irqregs), 0); in mask_and_disable()
82 xe_mmio_write32(mmio, IIR(irqregs), ~0); in mask_and_disable()
83 xe_mmio_read32(mmio, IIR(irqregs)); in mask_and_disable()
84 xe_mmio_write32(mmio, IIR(irqregs), ~0); in mask_and_disable()
85 xe_mmio_read32(mmio, IIR(irqregs)); in mask_and_disable()
90 struct xe_gt *mmio = xe_root_mmio_gt(xe); in xelp_intr_disable() local
92 xe_mmio_write32(mmio, GFX_MSTR_IRQ, 0); in xelp_intr_disable()
100 return xe_mmio_read32(mmio, GFX_MSTR_IRQ); in xelp_intr_disable()
106 struct xe_gt *mmio = xe_root_mmio_gt(xe); in gu_misc_irq_ack() local
112 iir = xe_mmio_read32(mmio, IIR(GU_MISC_IRQ_OFFSET)); in gu_misc_irq_ack()
114 xe_mmio_write32(mmio, IIR(GU_MISC_IRQ_OFFSET), iir); in gu_misc_irq_ack()
121 struct xe_gt *mmio = xe_root_mmio_gt(xe); in xelp_intr_enable() local
123 xe_mmio_write32(mmio, GFX_MSTR_IRQ, MASTER_IRQ); in xelp_intr_enable()
125 xe_mmio_read32(mmio, GFX_MSTR_IRQ); in xelp_intr_enable()
210 struct xe_gt *mmio, in gt_engine_identity() argument
219 xe_mmio_write32(mmio, IIR_REG_SELECTOR(bank), BIT(bit)); in gt_engine_identity()
227 ident = xe_mmio_read32(mmio, INTR_IDENTITY_REG(bank)); in gt_engine_identity()
237 xe_mmio_write32(mmio, INTR_IDENTITY_REG(bank), ident); in gt_engine_identity()
294 struct xe_gt *mmio = tile->primary_gt; in gt_irq_handler() local
306 intr_dw[bank] = xe_mmio_read32(mmio, GT_INTR_DW(bank)); in gt_irq_handler()
308 identity[bit] = gt_engine_identity(xe, mmio, bank, bit); in gt_irq_handler()
309 xe_mmio_write32(mmio, GT_INTR_DW(bank), intr_dw[bank]); in gt_irq_handler()
379 struct xe_gt *mmio = xe_root_mmio_gt(xe); in dg1_intr_disable() local
383 xe_mmio_write32(mmio, DG1_MSTR_TILE_INTR, 0); in dg1_intr_disable()
386 val = xe_mmio_read32(mmio, DG1_MSTR_TILE_INTR); in dg1_intr_disable()
390 xe_mmio_write32(mmio, DG1_MSTR_TILE_INTR, val); in dg1_intr_disable()
397 struct xe_gt *mmio = xe_root_mmio_gt(xe); in dg1_intr_enable() local
399 xe_mmio_write32(mmio, DG1_MSTR_TILE_INTR, DG1_MSTR_IRQ); in dg1_intr_enable()
401 xe_mmio_read32(mmio, DG1_MSTR_TILE_INTR); in dg1_intr_enable()
434 struct xe_gt *mmio = tile->primary_gt; in dg1_irq_handler() local
439 master_ctl = xe_mmio_read32(mmio, GFX_MSTR_IRQ); in dg1_irq_handler()
443 * and all MMIO reads will be returned with all 1's. Ignore this in dg1_irq_handler()
452 xe_mmio_write32(mmio, GFX_MSTR_IRQ, master_ctl); in dg1_irq_handler()
477 struct xe_gt *mmio = tile->primary_gt; in gt_irq_reset() local
485 xe_mmio_write32(mmio, RENDER_COPY_INTR_ENABLE, 0); in gt_irq_reset()
486 xe_mmio_write32(mmio, VCS_VECS_INTR_ENABLE, 0); in gt_irq_reset()
488 xe_mmio_write32(mmio, CCS_RSVD_INTR_ENABLE, 0); in gt_irq_reset()
491 xe_mmio_write32(mmio, RCS0_RSVD_INTR_MASK, ~0); in gt_irq_reset()
492 xe_mmio_write32(mmio, BCS_RSVD_INTR_MASK, ~0); in gt_irq_reset()
494 xe_mmio_write32(mmio, XEHPC_BCS1_BCS2_INTR_MASK, ~0); in gt_irq_reset()
496 xe_mmio_write32(mmio, XEHPC_BCS3_BCS4_INTR_MASK, ~0); in gt_irq_reset()
498 xe_mmio_write32(mmio, XEHPC_BCS5_BCS6_INTR_MASK, ~0); in gt_irq_reset()
500 xe_mmio_write32(mmio, XEHPC_BCS7_BCS8_INTR_MASK, ~0); in gt_irq_reset()
501 xe_mmio_write32(mmio, VCS0_VCS1_INTR_MASK, ~0); in gt_irq_reset()
502 xe_mmio_write32(mmio, VCS2_VCS3_INTR_MASK, ~0); in gt_irq_reset()
503 xe_mmio_write32(mmio, VECS0_VECS1_INTR_MASK, ~0); in gt_irq_reset()
505 xe_mmio_write32(mmio, CCS0_CCS1_INTR_MASK, ~0); in gt_irq_reset()
507 xe_mmio_write32(mmio, CCS2_CCS3_INTR_MASK, ~0); in gt_irq_reset()
512 xe_mmio_write32(mmio, GUNIT_GSC_INTR_ENABLE, 0); in gt_irq_reset()
513 xe_mmio_write32(mmio, GUNIT_GSC_INTR_MASK, ~0); in gt_irq_reset()
514 xe_mmio_write32(mmio, HECI2_RSVD_INTR_MASK, ~0); in gt_irq_reset()
517 xe_mmio_write32(mmio, GPM_WGBOXPERF_INTR_ENABLE, 0); in gt_irq_reset()
518 xe_mmio_write32(mmio, GPM_WGBOXPERF_INTR_MASK, ~0); in gt_irq_reset()
519 xe_mmio_write32(mmio, GUC_SG_INTR_ENABLE, 0); in gt_irq_reset()
520 xe_mmio_write32(mmio, GUC_SG_INTR_MASK, ~0); in gt_irq_reset()
550 struct xe_gt *mmio = tile->primary_gt; in dg1_irq_reset_mstr() local
552 xe_mmio_write32(mmio, GFX_MSTR_IRQ, ~0); in dg1_irq_reset_mstr()