Lines Matching full:vgpu

93 static void read_vreg(struct intel_vgpu *vgpu, unsigned int offset,  in read_vreg()  argument
96 memcpy(p_data, &vgpu_vreg(vgpu, offset), bytes); in read_vreg()
99 static void write_vreg(struct intel_vgpu *vgpu, unsigned int offset, in write_vreg() argument
102 memcpy(&vgpu_vreg(vgpu, offset), p_data, bytes); in write_vreg()
179 void enter_failsafe_mode(struct intel_vgpu *vgpu, int reason) in enter_failsafe_mode() argument
194 pr_err("Now vgpu %d will enter failsafe mode.\n", vgpu->id); in enter_failsafe_mode()
195 vgpu->failsafe = true; in enter_failsafe_mode()
198 static int sanitize_fence_mmio_access(struct intel_vgpu *vgpu, in sanitize_fence_mmio_access() argument
201 unsigned int max_fence = vgpu_fence_sz(vgpu); in sanitize_fence_mmio_access()
209 * and we will let vgpu enter failsafe mode. in sanitize_fence_mmio_access()
211 if (!vgpu->pv_notified) in sanitize_fence_mmio_access()
212 enter_failsafe_mode(vgpu, in sanitize_fence_mmio_access()
221 static int gamw_echo_dev_rw_ia_write(struct intel_vgpu *vgpu, in gamw_echo_dev_rw_ia_write() argument
226 if (GRAPHICS_VER(vgpu->gvt->gt->i915) <= 10) { in gamw_echo_dev_rw_ia_write()
228 gvt_dbg_core("vgpu%d: ips enabled\n", vgpu->id); in gamw_echo_dev_rw_ia_write()
230 gvt_dbg_core("vgpu%d: ips disabled\n", vgpu->id); in gamw_echo_dev_rw_ia_write()
232 /* All engines must be enabled together for vGPU, in gamw_echo_dev_rw_ia_write()
242 write_vreg(vgpu, offset, p_data, bytes); in gamw_echo_dev_rw_ia_write()
246 static int fence_mmio_read(struct intel_vgpu *vgpu, unsigned int off, in fence_mmio_read() argument
251 ret = sanitize_fence_mmio_access(vgpu, offset_to_fence_num(off), in fence_mmio_read()
255 read_vreg(vgpu, off, p_data, bytes); in fence_mmio_read()
259 static int fence_mmio_write(struct intel_vgpu *vgpu, unsigned int off, in fence_mmio_write() argument
262 struct intel_gvt *gvt = vgpu->gvt; in fence_mmio_write()
266 ret = sanitize_fence_mmio_access(vgpu, fence_num, p_data, bytes); in fence_mmio_write()
269 write_vreg(vgpu, off, p_data, bytes); in fence_mmio_write()
272 intel_vgpu_write_fence(vgpu, fence_num, in fence_mmio_write()
273 vgpu_vreg64(vgpu, fence_num_to_offset(fence_num))); in fence_mmio_write()
283 static int mul_force_wake_write(struct intel_vgpu *vgpu, in mul_force_wake_write() argument
289 old = vgpu_vreg(vgpu, offset); in mul_force_wake_write()
292 if (GRAPHICS_VER(vgpu->gvt->gt->i915) >= 9) { in mul_force_wake_write()
312 vgpu_vreg(vgpu, offset) = new; in mul_force_wake_write()
313 vgpu_vreg(vgpu, ack_reg_offset) = (new & GENMASK(15, 0)); in mul_force_wake_write()
317 static int gdrst_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in gdrst_mmio_write() argument
323 write_vreg(vgpu, offset, p_data, bytes); in gdrst_mmio_write()
324 data = vgpu_vreg(vgpu, offset); in gdrst_mmio_write()
327 gvt_dbg_mmio("vgpu%d: request full GPU reset\n", vgpu->id); in gdrst_mmio_write()
331 gvt_dbg_mmio("vgpu%d: request RCS reset\n", vgpu->id); in gdrst_mmio_write()
335 gvt_dbg_mmio("vgpu%d: request VCS reset\n", vgpu->id); in gdrst_mmio_write()
339 gvt_dbg_mmio("vgpu%d: request BCS Reset\n", vgpu->id); in gdrst_mmio_write()
343 gvt_dbg_mmio("vgpu%d: request VECS Reset\n", vgpu->id); in gdrst_mmio_write()
347 gvt_dbg_mmio("vgpu%d: request VCS2 Reset\n", vgpu->id); in gdrst_mmio_write()
351 gvt_dbg_mmio("vgpu%d: request GUC Reset\n", vgpu->id); in gdrst_mmio_write()
352 vgpu_vreg_t(vgpu, GUC_STATUS) |= GS_MIA_IN_RESET; in gdrst_mmio_write()
354 engine_mask &= vgpu->gvt->gt->info.engine_mask; in gdrst_mmio_write()
358 intel_gvt_reset_vgpu_locked(vgpu, false, engine_mask); in gdrst_mmio_write()
361 vgpu_vreg(vgpu, offset) = 0; in gdrst_mmio_write()
366 static int gmbus_mmio_read(struct intel_vgpu *vgpu, unsigned int offset, in gmbus_mmio_read() argument
369 return intel_gvt_i2c_handle_gmbus_read(vgpu, offset, p_data, bytes); in gmbus_mmio_read()
372 static int gmbus_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in gmbus_mmio_write() argument
375 return intel_gvt_i2c_handle_gmbus_write(vgpu, offset, p_data, bytes); in gmbus_mmio_write()
378 static int pch_pp_control_mmio_write(struct intel_vgpu *vgpu, in pch_pp_control_mmio_write() argument
381 write_vreg(vgpu, offset, p_data, bytes); in pch_pp_control_mmio_write()
383 if (vgpu_vreg(vgpu, offset) & PANEL_POWER_ON) { in pch_pp_control_mmio_write()
384 vgpu_vreg_t(vgpu, PCH_PP_STATUS) |= PP_ON; in pch_pp_control_mmio_write()
385 vgpu_vreg_t(vgpu, PCH_PP_STATUS) |= PP_SEQUENCE_STATE_ON_IDLE; in pch_pp_control_mmio_write()
386 vgpu_vreg_t(vgpu, PCH_PP_STATUS) &= ~PP_SEQUENCE_POWER_DOWN; in pch_pp_control_mmio_write()
387 vgpu_vreg_t(vgpu, PCH_PP_STATUS) &= ~PP_CYCLE_DELAY_ACTIVE; in pch_pp_control_mmio_write()
390 vgpu_vreg_t(vgpu, PCH_PP_STATUS) &= in pch_pp_control_mmio_write()
396 static int transconf_mmio_write(struct intel_vgpu *vgpu, in transconf_mmio_write() argument
399 write_vreg(vgpu, offset, p_data, bytes); in transconf_mmio_write()
401 if (vgpu_vreg(vgpu, offset) & TRANS_ENABLE) in transconf_mmio_write()
402 vgpu_vreg(vgpu, offset) |= TRANS_STATE_ENABLE; in transconf_mmio_write()
404 vgpu_vreg(vgpu, offset) &= ~TRANS_STATE_ENABLE; in transconf_mmio_write()
408 static int lcpll_ctl_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in lcpll_ctl_mmio_write() argument
411 write_vreg(vgpu, offset, p_data, bytes); in lcpll_ctl_mmio_write()
413 if (vgpu_vreg(vgpu, offset) & LCPLL_PLL_DISABLE) in lcpll_ctl_mmio_write()
414 vgpu_vreg(vgpu, offset) &= ~LCPLL_PLL_LOCK; in lcpll_ctl_mmio_write()
416 vgpu_vreg(vgpu, offset) |= LCPLL_PLL_LOCK; in lcpll_ctl_mmio_write()
418 if (vgpu_vreg(vgpu, offset) & LCPLL_CD_SOURCE_FCLK) in lcpll_ctl_mmio_write()
419 vgpu_vreg(vgpu, offset) |= LCPLL_CD_SOURCE_FCLK_DONE; in lcpll_ctl_mmio_write()
421 vgpu_vreg(vgpu, offset) &= ~LCPLL_CD_SOURCE_FCLK_DONE; in lcpll_ctl_mmio_write()
426 static int dpy_reg_mmio_read(struct intel_vgpu *vgpu, unsigned int offset, in dpy_reg_mmio_read() argument
434 vgpu_vreg(vgpu, offset) = 1 << 17; in dpy_reg_mmio_read()
437 vgpu_vreg(vgpu, offset) = 0x3; in dpy_reg_mmio_read()
440 vgpu_vreg(vgpu, offset) = 0x2f << 16; in dpy_reg_mmio_read()
446 read_vreg(vgpu, offset, p_data, bytes); in dpy_reg_mmio_read()
451 * Only PIPE_A is enabled in current vGPU display and PIPE_A is tied to
466 static u32 bdw_vgpu_get_dp_bitrate(struct intel_vgpu *vgpu, enum port port) in bdw_vgpu_get_dp_bitrate() argument
469 u32 ddi_pll_sel = vgpu_vreg_t(vgpu, PORT_CLK_SEL(port)); in bdw_vgpu_get_dp_bitrate()
483 switch (vgpu_vreg_t(vgpu, SPLL_CTL) & SPLL_FREQ_MASK) { in bdw_vgpu_get_dp_bitrate()
494 gvt_dbg_dpy("vgpu-%d PORT_%c can't get freq from SPLL 0x%08x\n", in bdw_vgpu_get_dp_bitrate()
495 vgpu->id, port_name(port), vgpu_vreg_t(vgpu, SPLL_CTL)); in bdw_vgpu_get_dp_bitrate()
507 wrpll_ctl = vgpu_vreg_t(vgpu, WRPLL_CTL(DPLL_ID_WRPLL1)); in bdw_vgpu_get_dp_bitrate()
509 wrpll_ctl = vgpu_vreg_t(vgpu, WRPLL_CTL(DPLL_ID_WRPLL2)); in bdw_vgpu_get_dp_bitrate()
513 refclk = vgpu->gvt->gt->i915->display.dpll.ref_clks.ssc; in bdw_vgpu_get_dp_bitrate()
519 gvt_dbg_dpy("vgpu-%d PORT_%c WRPLL can't get refclk 0x%08x\n", in bdw_vgpu_get_dp_bitrate()
520 vgpu->id, port_name(port), wrpll_ctl); in bdw_vgpu_get_dp_bitrate()
532 gvt_dbg_dpy("vgpu-%d PORT_%c has invalid clock select 0x%08x\n", in bdw_vgpu_get_dp_bitrate()
533 vgpu->id, port_name(port), vgpu_vreg_t(vgpu, PORT_CLK_SEL(port))); in bdw_vgpu_get_dp_bitrate()
541 static u32 bxt_vgpu_get_dp_bitrate(struct intel_vgpu *vgpu, enum port port) in bxt_vgpu_get_dp_bitrate() argument
544 int refclk = vgpu->gvt->gt->i915->display.dpll.ref_clks.nssc; in bxt_vgpu_get_dp_bitrate()
565 gvt_dbg_dpy("vgpu-%d no PHY for PORT_%c\n", vgpu->id, port_name(port)); in bxt_vgpu_get_dp_bitrate()
569 temp = vgpu_vreg_t(vgpu, BXT_PORT_PLL_ENABLE(port)); in bxt_vgpu_get_dp_bitrate()
571 gvt_dbg_dpy("vgpu-%d PORT_%c PLL_ENABLE 0x%08x isn't enabled or locked\n", in bxt_vgpu_get_dp_bitrate()
572 vgpu->id, port_name(port), temp); in bxt_vgpu_get_dp_bitrate()
578 vgpu_vreg_t(vgpu, BXT_PORT_PLL(phy, ch, 0))) << 22; in bxt_vgpu_get_dp_bitrate()
579 if (vgpu_vreg_t(vgpu, BXT_PORT_PLL(phy, ch, 3)) & PORT_PLL_M2_FRAC_ENABLE) in bxt_vgpu_get_dp_bitrate()
581 vgpu_vreg_t(vgpu, BXT_PORT_PLL(phy, ch, 2))); in bxt_vgpu_get_dp_bitrate()
583 vgpu_vreg_t(vgpu, BXT_PORT_PLL(phy, ch, 1))); in bxt_vgpu_get_dp_bitrate()
585 vgpu_vreg_t(vgpu, BXT_PORT_PLL_EBB_0(phy, ch))); in bxt_vgpu_get_dp_bitrate()
587 vgpu_vreg_t(vgpu, BXT_PORT_PLL_EBB_0(phy, ch))); in bxt_vgpu_get_dp_bitrate()
592 gvt_dbg_dpy("vgpu-%d PORT_%c PLL has invalid divider\n", vgpu->id, port_name(port)); in bxt_vgpu_get_dp_bitrate()
605 static u32 skl_vgpu_get_dp_bitrate(struct intel_vgpu *vgpu, enum port port) in skl_vgpu_get_dp_bitrate() argument
611 if (!(vgpu_vreg_t(vgpu, DPLL_CTRL2) & DPLL_CTRL2_DDI_CLK_OFF(port)) && in skl_vgpu_get_dp_bitrate()
612 (vgpu_vreg_t(vgpu, DPLL_CTRL2) & DPLL_CTRL2_DDI_SEL_OVERRIDE(port))) { in skl_vgpu_get_dp_bitrate()
613 dpll_id += (vgpu_vreg_t(vgpu, DPLL_CTRL2) & in skl_vgpu_get_dp_bitrate()
617 gvt_dbg_dpy("vgpu-%d DPLL for PORT_%c isn't turned on\n", in skl_vgpu_get_dp_bitrate()
618 vgpu->id, port_name(port)); in skl_vgpu_get_dp_bitrate()
623 switch ((vgpu_vreg_t(vgpu, DPLL_CTRL1) & in skl_vgpu_get_dp_bitrate()
646 gvt_dbg_dpy("vgpu-%d PORT_%c fail to get DPLL-%d freq\n", in skl_vgpu_get_dp_bitrate()
647 vgpu->id, port_name(port), dpll_id); in skl_vgpu_get_dp_bitrate()
653 static void vgpu_update_refresh_rate(struct intel_vgpu *vgpu) in vgpu_update_refresh_rate() argument
655 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in vgpu_update_refresh_rate()
660 port = (vgpu_vreg_t(vgpu, TRANS_DDI_FUNC_CTL(dev_priv, TRANSCODER_A)) & in vgpu_update_refresh_rate()
663 gvt_dbg_dpy("vgpu-%d unsupported PORT_%c\n", vgpu->id, port_name(port)); in vgpu_update_refresh_rate()
669 dp_br = bdw_vgpu_get_dp_bitrate(vgpu, port); in vgpu_update_refresh_rate()
671 dp_br = bxt_vgpu_get_dp_bitrate(vgpu, port); in vgpu_update_refresh_rate()
673 dp_br = skl_vgpu_get_dp_bitrate(vgpu, port); in vgpu_update_refresh_rate()
676 link_m = vgpu_vreg_t(vgpu, PIPE_LINK_M1(dev_priv, TRANSCODER_A)); in vgpu_update_refresh_rate()
677 link_n = vgpu_vreg_t(vgpu, PIPE_LINK_N1(dev_priv, TRANSCODER_A)); in vgpu_update_refresh_rate()
680 htotal = (vgpu_vreg_t(vgpu, TRANS_HTOTAL(dev_priv, TRANSCODER_A)) >> TRANS_HTOTAL_SHIFT); in vgpu_update_refresh_rate()
681 vtotal = (vgpu_vreg_t(vgpu, TRANS_VTOTAL(dev_priv, TRANSCODER_A)) >> TRANS_VTOTAL_SHIFT); in vgpu_update_refresh_rate()
686 u32 *old_rate = &(intel_vgpu_port(vgpu, vgpu->display.port_num)->vrefresh_k); in vgpu_update_refresh_rate()
698 gvt_dbg_dpy("vgpu-%d PIPE_%c refresh rate updated to %d\n", in vgpu_update_refresh_rate()
699 vgpu->id, pipe_name(PIPE_A), new_rate); in vgpu_update_refresh_rate()
703 static int pipeconf_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in pipeconf_mmio_write() argument
708 write_vreg(vgpu, offset, p_data, bytes); in pipeconf_mmio_write()
709 data = vgpu_vreg(vgpu, offset); in pipeconf_mmio_write()
712 vgpu_vreg(vgpu, offset) |= TRANSCONF_STATE_ENABLE; in pipeconf_mmio_write()
713 vgpu_update_refresh_rate(vgpu); in pipeconf_mmio_write()
714 vgpu_update_vblank_emulation(vgpu, true); in pipeconf_mmio_write()
716 vgpu_vreg(vgpu, offset) &= ~TRANSCONF_STATE_ENABLE; in pipeconf_mmio_write()
717 vgpu_update_vblank_emulation(vgpu, false); in pipeconf_mmio_write()
776 static int force_nonpriv_write(struct intel_vgpu *vgpu, in force_nonpriv_write() argument
781 intel_gvt_render_mmio_to_engine(vgpu->gvt, offset); in force_nonpriv_write()
784 gvt_err("vgpu(%d) Invalid FORCE_NONPRIV offset %x(%dB)\n", in force_nonpriv_write()
785 vgpu->id, offset, bytes); in force_nonpriv_write()
791 gvt_err("vgpu(%d) Invalid FORCE_NONPRIV write %x at offset %x\n", in force_nonpriv_write()
792 vgpu->id, reg_nonpriv, offset); in force_nonpriv_write()
794 intel_vgpu_default_mmio_write(vgpu, offset, p_data, bytes); in force_nonpriv_write()
799 static int ddi_buf_ctl_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in ddi_buf_ctl_mmio_write() argument
802 write_vreg(vgpu, offset, p_data, bytes); in ddi_buf_ctl_mmio_write()
804 if (vgpu_vreg(vgpu, offset) & DDI_BUF_CTL_ENABLE) { in ddi_buf_ctl_mmio_write()
805 vgpu_vreg(vgpu, offset) &= ~DDI_BUF_IS_IDLE; in ddi_buf_ctl_mmio_write()
807 vgpu_vreg(vgpu, offset) |= DDI_BUF_IS_IDLE; in ddi_buf_ctl_mmio_write()
809 vgpu_vreg_t(vgpu, DP_TP_STATUS(PORT_E)) in ddi_buf_ctl_mmio_write()
815 static int fdi_rx_iir_mmio_write(struct intel_vgpu *vgpu, in fdi_rx_iir_mmio_write() argument
818 vgpu_vreg(vgpu, offset) &= ~*(u32 *)p_data; in fdi_rx_iir_mmio_write()
825 static int fdi_auto_training_started(struct intel_vgpu *vgpu) in fdi_auto_training_started() argument
827 u32 ddi_buf_ctl = vgpu_vreg_t(vgpu, DDI_BUF_CTL(PORT_E)); in fdi_auto_training_started()
828 u32 rx_ctl = vgpu_vreg(vgpu, _FDI_RXA_CTL); in fdi_auto_training_started()
829 u32 tx_ctl = vgpu_vreg_t(vgpu, DP_TP_CTL(PORT_E)); in fdi_auto_training_started()
841 static int check_fdi_rx_train_status(struct intel_vgpu *vgpu, in check_fdi_rx_train_status() argument
870 if (vgpu_vreg_t(vgpu, fdi_rx_imr) & fdi_iir_check_bits) in check_fdi_rx_train_status()
873 if (((vgpu_vreg_t(vgpu, fdi_tx_ctl) & fdi_tx_check_bits) in check_fdi_rx_train_status()
875 && ((vgpu_vreg_t(vgpu, fdi_rx_ctl) & fdi_rx_check_bits) in check_fdi_rx_train_status()
907 static int update_fdi_rx_iir_status(struct intel_vgpu *vgpu, in update_fdi_rx_iir_status() argument
925 write_vreg(vgpu, offset, p_data, bytes); in update_fdi_rx_iir_status()
929 ret = check_fdi_rx_train_status(vgpu, index, FDI_LINK_TRAIN_PATTERN1); in update_fdi_rx_iir_status()
933 vgpu_vreg_t(vgpu, fdi_rx_iir) |= FDI_RX_BIT_LOCK; in update_fdi_rx_iir_status()
935 ret = check_fdi_rx_train_status(vgpu, index, FDI_LINK_TRAIN_PATTERN2); in update_fdi_rx_iir_status()
939 vgpu_vreg_t(vgpu, fdi_rx_iir) |= FDI_RX_SYMBOL_LOCK; in update_fdi_rx_iir_status()
942 if (fdi_auto_training_started(vgpu)) in update_fdi_rx_iir_status()
943 vgpu_vreg_t(vgpu, DP_TP_STATUS(PORT_E)) |= in update_fdi_rx_iir_status()
951 static int dp_tp_ctl_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in dp_tp_ctl_mmio_write() argument
958 write_vreg(vgpu, offset, p_data, bytes); in dp_tp_ctl_mmio_write()
961 data = (vgpu_vreg(vgpu, offset) & GENMASK(10, 8)) >> 8; in dp_tp_ctl_mmio_write()
964 vgpu_vreg_t(vgpu, status_reg) |= (1 << 25); in dp_tp_ctl_mmio_write()
969 static int dp_tp_status_mmio_write(struct intel_vgpu *vgpu, in dp_tp_status_mmio_write() argument
978 vgpu_vreg(vgpu, offset) = (reg_val & ~sticky_mask) | in dp_tp_status_mmio_write()
979 (vgpu_vreg(vgpu, offset) & sticky_mask); in dp_tp_status_mmio_write()
980 vgpu_vreg(vgpu, offset) &= ~(reg_val & sticky_mask); in dp_tp_status_mmio_write()
984 static int pch_adpa_mmio_write(struct intel_vgpu *vgpu, in pch_adpa_mmio_write() argument
989 write_vreg(vgpu, offset, p_data, bytes); in pch_adpa_mmio_write()
990 data = vgpu_vreg(vgpu, offset); in pch_adpa_mmio_write()
993 vgpu_vreg(vgpu, offset) &= ~ADPA_CRT_HOTPLUG_FORCE_TRIGGER; in pch_adpa_mmio_write()
997 static int south_chicken2_mmio_write(struct intel_vgpu *vgpu, in south_chicken2_mmio_write() argument
1002 write_vreg(vgpu, offset, p_data, bytes); in south_chicken2_mmio_write()
1003 data = vgpu_vreg(vgpu, offset); in south_chicken2_mmio_write()
1006 vgpu_vreg(vgpu, offset) |= FDI_MPHY_IOSFSB_RESET_STATUS; in south_chicken2_mmio_write()
1008 vgpu_vreg(vgpu, offset) &= ~FDI_MPHY_IOSFSB_RESET_STATUS; in south_chicken2_mmio_write()
1015 static int pri_surf_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in pri_surf_mmio_write() argument
1018 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in pri_surf_mmio_write()
1022 write_vreg(vgpu, offset, p_data, bytes); in pri_surf_mmio_write()
1023 vgpu_vreg_t(vgpu, DSPSURFLIVE(dev_priv, pipe)) = vgpu_vreg(vgpu, offset); in pri_surf_mmio_write()
1025 vgpu_vreg_t(vgpu, PIPE_FLIPCOUNT_G4X(dev_priv, pipe))++; in pri_surf_mmio_write()
1027 if (vgpu_vreg_t(vgpu, DSPCNTR(dev_priv, pipe)) & PLANE_CTL_ASYNC_FLIP) in pri_surf_mmio_write()
1028 intel_vgpu_trigger_virtual_event(vgpu, event); in pri_surf_mmio_write()
1030 set_bit(event, vgpu->irq.flip_done_event[pipe]); in pri_surf_mmio_write()
1038 static int spr_surf_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in spr_surf_mmio_write() argument
1044 write_vreg(vgpu, offset, p_data, bytes); in spr_surf_mmio_write()
1045 vgpu_vreg_t(vgpu, SPRSURFLIVE(pipe)) = vgpu_vreg(vgpu, offset); in spr_surf_mmio_write()
1047 if (vgpu_vreg_t(vgpu, SPRCTL(pipe)) & PLANE_CTL_ASYNC_FLIP) in spr_surf_mmio_write()
1048 intel_vgpu_trigger_virtual_event(vgpu, event); in spr_surf_mmio_write()
1050 set_bit(event, vgpu->irq.flip_done_event[pipe]); in spr_surf_mmio_write()
1055 static int reg50080_mmio_write(struct intel_vgpu *vgpu, in reg50080_mmio_write() argument
1059 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in reg50080_mmio_write()
1064 write_vreg(vgpu, offset, p_data, bytes); in reg50080_mmio_write()
1066 vgpu_vreg_t(vgpu, DSPSURFLIVE(dev_priv, pipe)) = vgpu_vreg(vgpu, offset); in reg50080_mmio_write()
1067 vgpu_vreg_t(vgpu, PIPE_FLIPCOUNT_G4X(dev_priv, pipe))++; in reg50080_mmio_write()
1069 vgpu_vreg_t(vgpu, SPRSURFLIVE(pipe)) = vgpu_vreg(vgpu, offset); in reg50080_mmio_write()
1072 if ((vgpu_vreg(vgpu, offset) & REG50080_FLIP_TYPE_MASK) == REG50080_FLIP_TYPE_ASYNC) in reg50080_mmio_write()
1073 intel_vgpu_trigger_virtual_event(vgpu, event); in reg50080_mmio_write()
1075 set_bit(event, vgpu->irq.flip_done_event[pipe]); in reg50080_mmio_write()
1080 static int trigger_aux_channel_interrupt(struct intel_vgpu *vgpu, in trigger_aux_channel_interrupt() argument
1083 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in trigger_aux_channel_interrupt()
1102 intel_vgpu_trigger_virtual_event(vgpu, event); in trigger_aux_channel_interrupt()
1106 static int dp_aux_ch_ctl_trans_done(struct intel_vgpu *vgpu, u32 value, in dp_aux_ch_ctl_trans_done() argument
1122 vgpu_vreg(vgpu, reg) = value; in dp_aux_ch_ctl_trans_done()
1125 return trigger_aux_channel_interrupt(vgpu, reg); in dp_aux_ch_ctl_trans_done()
1163 static int dp_aux_ch_ctl_mmio_write(struct intel_vgpu *vgpu, in dp_aux_ch_ctl_mmio_write() argument
1166 struct intel_vgpu_display *display = &vgpu->display; in dp_aux_ch_ctl_mmio_write()
1178 write_vreg(vgpu, offset, p_data, bytes); in dp_aux_ch_ctl_mmio_write()
1179 data = vgpu_vreg(vgpu, offset); in dp_aux_ch_ctl_mmio_write()
1181 if (GRAPHICS_VER(vgpu->gvt->gt->i915) >= 9 && in dp_aux_ch_ctl_mmio_write()
1185 } else if (IS_BROADWELL(vgpu->gvt->gt->i915) && in dp_aux_ch_ctl_mmio_write()
1195 vgpu_vreg(vgpu, offset) = 0; in dp_aux_ch_ctl_mmio_write()
1203 msg = vgpu_vreg(vgpu, offset + 4); in dp_aux_ch_ctl_mmio_write()
1223 vgpu_vreg(vgpu, offset + 4) = AUX_NATIVE_REPLY_NAK; in dp_aux_ch_ctl_mmio_write()
1224 dp_aux_ch_ctl_trans_done(vgpu, data, offset, 2, true); in dp_aux_ch_ctl_mmio_write()
1240 u32 r = vgpu_vreg(vgpu, offset + 8 + t * 4); in dp_aux_ch_ctl_mmio_write()
1262 vgpu_vreg(vgpu, offset + 4) = 0; in dp_aux_ch_ctl_mmio_write()
1263 dp_aux_ch_ctl_trans_done(vgpu, data, offset, 1, in dp_aux_ch_ctl_mmio_write()
1281 vgpu_vreg(vgpu, offset + 4) = 0; in dp_aux_ch_ctl_mmio_write()
1282 vgpu_vreg(vgpu, offset + 8) = 0; in dp_aux_ch_ctl_mmio_write()
1283 vgpu_vreg(vgpu, offset + 12) = 0; in dp_aux_ch_ctl_mmio_write()
1284 vgpu_vreg(vgpu, offset + 16) = 0; in dp_aux_ch_ctl_mmio_write()
1285 vgpu_vreg(vgpu, offset + 20) = 0; in dp_aux_ch_ctl_mmio_write()
1287 dp_aux_ch_ctl_trans_done(vgpu, data, offset, len + 2, in dp_aux_ch_ctl_mmio_write()
1294 vgpu_vreg(vgpu, offset + 4 * idx) = 0; in dp_aux_ch_ctl_mmio_write()
1316 vgpu_vreg(vgpu, offset + in dp_aux_ch_ctl_mmio_write()
1322 dp_aux_ch_ctl_trans_done(vgpu, data, offset, len + 2, in dp_aux_ch_ctl_mmio_write()
1328 intel_gvt_i2c_handle_aux_ch_write(vgpu, port_index, offset, p_data); in dp_aux_ch_ctl_mmio_write()
1331 trigger_aux_channel_interrupt(vgpu, offset); in dp_aux_ch_ctl_mmio_write()
1335 static int mbctl_write(struct intel_vgpu *vgpu, unsigned int offset, in mbctl_write() argument
1339 write_vreg(vgpu, offset, p_data, bytes); in mbctl_write()
1343 static int vga_control_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in vga_control_mmio_write() argument
1348 write_vreg(vgpu, offset, p_data, bytes); in vga_control_mmio_write()
1349 vga_disable = vgpu_vreg(vgpu, offset) & VGA_DISP_DISABLE; in vga_control_mmio_write()
1351 gvt_dbg_core("vgpu%d: %s VGA mode\n", vgpu->id, in vga_control_mmio_write()
1356 static u32 read_virtual_sbi_register(struct intel_vgpu *vgpu, in read_virtual_sbi_register() argument
1359 struct intel_vgpu_display *display = &vgpu->display; in read_virtual_sbi_register()
1373 static void write_virtual_sbi_register(struct intel_vgpu *vgpu, in write_virtual_sbi_register() argument
1376 struct intel_vgpu_display *display = &vgpu->display; in write_virtual_sbi_register()
1397 static int sbi_data_mmio_read(struct intel_vgpu *vgpu, unsigned int offset, in sbi_data_mmio_read() argument
1400 if (((vgpu_vreg_t(vgpu, SBI_CTL_STAT) & SBI_OPCODE_MASK) >> in sbi_data_mmio_read()
1402 unsigned int sbi_offset = (vgpu_vreg_t(vgpu, SBI_ADDR) & in sbi_data_mmio_read()
1404 vgpu_vreg(vgpu, offset) = read_virtual_sbi_register(vgpu, in sbi_data_mmio_read()
1407 read_vreg(vgpu, offset, p_data, bytes); in sbi_data_mmio_read()
1411 static int sbi_ctl_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in sbi_ctl_mmio_write() argument
1416 write_vreg(vgpu, offset, p_data, bytes); in sbi_ctl_mmio_write()
1417 data = vgpu_vreg(vgpu, offset); in sbi_ctl_mmio_write()
1425 vgpu_vreg(vgpu, offset) = data; in sbi_ctl_mmio_write()
1427 if (((vgpu_vreg_t(vgpu, SBI_CTL_STAT) & SBI_OPCODE_MASK) >> in sbi_ctl_mmio_write()
1429 unsigned int sbi_offset = (vgpu_vreg_t(vgpu, SBI_ADDR) & in sbi_ctl_mmio_write()
1432 write_virtual_sbi_register(vgpu, sbi_offset, in sbi_ctl_mmio_write()
1433 vgpu_vreg_t(vgpu, SBI_DATA)); in sbi_ctl_mmio_write()
1441 static int pvinfo_mmio_read(struct intel_vgpu *vgpu, unsigned int offset, in pvinfo_mmio_read() argument
1446 read_vreg(vgpu, offset, p_data, bytes); in pvinfo_mmio_read()
1469 vgpu->pv_notified = true; in pvinfo_mmio_read()
1473 static int handle_g2v_notification(struct intel_vgpu *vgpu, int notification) in handle_g2v_notification() argument
1479 pdps = (u64 *)&vgpu_vreg64_t(vgpu, vgtif_reg(pdp[0])); in handle_g2v_notification()
1486 mm = intel_vgpu_get_ppgtt_mm(vgpu, root_entry_type, pdps); in handle_g2v_notification()
1490 return intel_vgpu_put_ppgtt_mm(vgpu, pdps); in handle_g2v_notification()
1501 static int send_display_ready_uevent(struct intel_vgpu *vgpu, int ready) in send_display_ready_uevent() argument
1503 struct kobject *kobj = &vgpu->gvt->gt->i915->drm.primary->kdev->kobj; in send_display_ready_uevent()
1511 snprintf(vmid_str, 20, "VMID=%d", vgpu->id); in send_display_ready_uevent()
1517 static int pvinfo_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in pvinfo_mmio_write() argument
1525 send_display_ready_uevent(vgpu, data ? 1 : 0); in pvinfo_mmio_write()
1528 handle_g2v_notification(vgpu, data); in pvinfo_mmio_write()
1546 enter_failsafe_mode(vgpu, GVT_FAILSAFE_INSUFFICIENT_RESOURCE); in pvinfo_mmio_write()
1556 write_vreg(vgpu, offset, p_data, bytes); in pvinfo_mmio_write()
1561 static int pf_write(struct intel_vgpu *vgpu, in pf_write() argument
1564 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in pf_write()
1572 vgpu->id); in pf_write()
1576 return intel_vgpu_default_mmio_write(vgpu, offset, p_data, bytes); in pf_write()
1579 static int power_well_ctl_mmio_write(struct intel_vgpu *vgpu, in power_well_ctl_mmio_write() argument
1582 write_vreg(vgpu, offset, p_data, bytes); in power_well_ctl_mmio_write()
1584 if (vgpu_vreg(vgpu, offset) & in power_well_ctl_mmio_write()
1586 vgpu_vreg(vgpu, offset) |= in power_well_ctl_mmio_write()
1589 vgpu_vreg(vgpu, offset) &= in power_well_ctl_mmio_write()
1594 static int gen9_dbuf_ctl_mmio_write(struct intel_vgpu *vgpu, in gen9_dbuf_ctl_mmio_write() argument
1597 write_vreg(vgpu, offset, p_data, bytes); in gen9_dbuf_ctl_mmio_write()
1599 if (vgpu_vreg(vgpu, offset) & DBUF_POWER_REQUEST) in gen9_dbuf_ctl_mmio_write()
1600 vgpu_vreg(vgpu, offset) |= DBUF_POWER_STATE; in gen9_dbuf_ctl_mmio_write()
1602 vgpu_vreg(vgpu, offset) &= ~DBUF_POWER_STATE; in gen9_dbuf_ctl_mmio_write()
1607 static int fpga_dbg_mmio_write(struct intel_vgpu *vgpu, in fpga_dbg_mmio_write() argument
1610 write_vreg(vgpu, offset, p_data, bytes); in fpga_dbg_mmio_write()
1612 if (vgpu_vreg(vgpu, offset) & FPGA_DBG_RM_NOCLAIM) in fpga_dbg_mmio_write()
1613 vgpu_vreg(vgpu, offset) &= ~FPGA_DBG_RM_NOCLAIM; in fpga_dbg_mmio_write()
1617 static int dma_ctrl_write(struct intel_vgpu *vgpu, unsigned int offset, in dma_ctrl_write() argument
1620 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in dma_ctrl_write()
1623 write_vreg(vgpu, offset, p_data, bytes); in dma_ctrl_write()
1624 mode = vgpu_vreg(vgpu, offset); in dma_ctrl_write()
1629 vgpu->id); in dma_ctrl_write()
1636 static int gen9_trtte_write(struct intel_vgpu *vgpu, unsigned int offset, in gen9_trtte_write() argument
1639 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in gen9_trtte_write()
1645 vgpu->id); in gen9_trtte_write()
1648 write_vreg(vgpu, offset, p_data, bytes); in gen9_trtte_write()
1653 static int gen9_trtt_chicken_write(struct intel_vgpu *vgpu, unsigned int offset, in gen9_trtt_chicken_write() argument
1656 write_vreg(vgpu, offset, p_data, bytes); in gen9_trtt_chicken_write()
1660 static int dpll_status_read(struct intel_vgpu *vgpu, unsigned int offset, in dpll_status_read() argument
1665 if (vgpu_vreg(vgpu, 0x46010) & (1 << 31)) in dpll_status_read()
1668 if (vgpu_vreg(vgpu, 0x46014) & (1 << 31)) in dpll_status_read()
1671 if (vgpu_vreg(vgpu, 0x46040) & (1 << 31)) in dpll_status_read()
1674 if (vgpu_vreg(vgpu, 0x46060) & (1 << 31)) in dpll_status_read()
1677 vgpu_vreg(vgpu, offset) = v; in dpll_status_read()
1679 return intel_vgpu_default_mmio_read(vgpu, offset, p_data, bytes); in dpll_status_read()
1682 static int mailbox_write(struct intel_vgpu *vgpu, unsigned int offset, in mailbox_write() argument
1687 u32 *data0 = &vgpu_vreg_t(vgpu, GEN6_PCODE_DATA); in mailbox_write()
1691 if (IS_SKYLAKE(vgpu->gvt->gt->i915) || in mailbox_write()
1692 IS_KABYLAKE(vgpu->gvt->gt->i915) || in mailbox_write()
1693 IS_COFFEELAKE(vgpu->gvt->gt->i915) || in mailbox_write()
1694 IS_COMETLAKE(vgpu->gvt->gt->i915)) { in mailbox_write()
1704 } else if (IS_BROXTON(vgpu->gvt->gt->i915)) { in mailbox_write()
1717 if (IS_SKYLAKE(vgpu->gvt->gt->i915) || in mailbox_write()
1718 IS_KABYLAKE(vgpu->gvt->gt->i915) || in mailbox_write()
1719 IS_COFFEELAKE(vgpu->gvt->gt->i915) || in mailbox_write()
1720 IS_COMETLAKE(vgpu->gvt->gt->i915)) in mailbox_write()
1729 vgpu->id, value, *data0); in mailbox_write()
1737 return intel_vgpu_default_mmio_write(vgpu, offset, &value, bytes); in mailbox_write()
1740 static int hws_pga_write(struct intel_vgpu *vgpu, unsigned int offset, in hws_pga_write() argument
1745 intel_gvt_render_mmio_to_engine(vgpu->gvt, offset); in hws_pga_write()
1748 !intel_gvt_ggtt_validate_range(vgpu, value, I915_GTT_PAGE_SIZE)) { in hws_pga_write()
1764 vgpu->hws_pga[engine->id] = value; in hws_pga_write()
1766 vgpu->id, value, offset); in hws_pga_write()
1768 return intel_vgpu_default_mmio_write(vgpu, offset, &value, bytes); in hws_pga_write()
1771 static int skl_power_well_ctl_write(struct intel_vgpu *vgpu, in skl_power_well_ctl_write() argument
1776 if (IS_BROXTON(vgpu->gvt->gt->i915)) in skl_power_well_ctl_write()
1783 return intel_vgpu_default_mmio_write(vgpu, offset, &v, bytes); in skl_power_well_ctl_write()
1786 static int skl_lcpll_write(struct intel_vgpu *vgpu, unsigned int offset, in skl_lcpll_write() argument
1795 vgpu_vreg(vgpu, offset) = v; in skl_lcpll_write()
1800 static int bxt_de_pll_enable_write(struct intel_vgpu *vgpu, in bxt_de_pll_enable_write() argument
1808 vgpu_vreg(vgpu, offset) = v; in bxt_de_pll_enable_write()
1813 static int bxt_port_pll_enable_write(struct intel_vgpu *vgpu, in bxt_port_pll_enable_write() argument
1821 vgpu_vreg(vgpu, offset) = v; in bxt_port_pll_enable_write()
1826 static int bxt_phy_ctl_family_write(struct intel_vgpu *vgpu, in bxt_phy_ctl_family_write() argument
1834 vgpu_vreg(vgpu, _BXT_PHY_CTL_DDI_A) = data; in bxt_phy_ctl_family_write()
1837 vgpu_vreg(vgpu, _BXT_PHY_CTL_DDI_B) = data; in bxt_phy_ctl_family_write()
1838 vgpu_vreg(vgpu, _BXT_PHY_CTL_DDI_C) = data; in bxt_phy_ctl_family_write()
1842 vgpu_vreg(vgpu, offset) = v; in bxt_phy_ctl_family_write()
1847 static int bxt_port_tx_dw3_read(struct intel_vgpu *vgpu, in bxt_port_tx_dw3_read() argument
1850 u32 v = vgpu_vreg(vgpu, offset); in bxt_port_tx_dw3_read()
1854 vgpu_vreg(vgpu, offset) = v; in bxt_port_tx_dw3_read()
1856 return intel_vgpu_default_mmio_read(vgpu, offset, p_data, bytes); in bxt_port_tx_dw3_read()
1859 static int bxt_pcs_dw12_grp_write(struct intel_vgpu *vgpu, in bxt_pcs_dw12_grp_write() argument
1865 vgpu_vreg(vgpu, offset - 0x600) = v; in bxt_pcs_dw12_grp_write()
1866 vgpu_vreg(vgpu, offset - 0x800) = v; in bxt_pcs_dw12_grp_write()
1868 vgpu_vreg(vgpu, offset - 0x400) = v; in bxt_pcs_dw12_grp_write()
1869 vgpu_vreg(vgpu, offset - 0x600) = v; in bxt_pcs_dw12_grp_write()
1872 vgpu_vreg(vgpu, offset) = v; in bxt_pcs_dw12_grp_write()
1877 static int bxt_gt_disp_pwron_write(struct intel_vgpu *vgpu, in bxt_gt_disp_pwron_write() argument
1883 vgpu_vreg_t(vgpu, BXT_PORT_CL1CM_DW0(DPIO_PHY0)) &= in bxt_gt_disp_pwron_write()
1885 vgpu_vreg_t(vgpu, BXT_PORT_CL1CM_DW0(DPIO_PHY0)) |= in bxt_gt_disp_pwron_write()
1890 vgpu_vreg_t(vgpu, BXT_PORT_CL1CM_DW0(DPIO_PHY1)) &= in bxt_gt_disp_pwron_write()
1892 vgpu_vreg_t(vgpu, BXT_PORT_CL1CM_DW0(DPIO_PHY1)) |= in bxt_gt_disp_pwron_write()
1897 vgpu_vreg(vgpu, offset) = v; in bxt_gt_disp_pwron_write()
1902 static int edp_psr_imr_iir_write(struct intel_vgpu *vgpu, in edp_psr_imr_iir_write() argument
1905 vgpu_vreg(vgpu, offset) = 0; in edp_psr_imr_iir_write()
1919 static int bxt_ppat_low_write(struct intel_vgpu *vgpu, unsigned int offset, in bxt_ppat_low_write() argument
1932 vgpu_vreg(vgpu, offset) = lower_32_bits(pat); in bxt_ppat_low_write()
1937 static int guc_status_read(struct intel_vgpu *vgpu, in guc_status_read() argument
1942 read_vreg(vgpu, offset, p_data, bytes); in guc_status_read()
1943 vgpu_vreg(vgpu, offset) &= ~GS_MIA_IN_RESET; in guc_status_read()
1947 static int mmio_read_from_hw(struct intel_vgpu *vgpu, in mmio_read_from_hw() argument
1950 struct intel_gvt *gvt = vgpu->gvt; in mmio_read_from_hw()
1962 vgpu == gvt->scheduler.engine_owner[engine->id] || in mmio_read_from_hw()
1966 vgpu_vreg(vgpu, offset) = in mmio_read_from_hw()
1971 return intel_vgpu_default_mmio_read(vgpu, offset, p_data, bytes); in mmio_read_from_hw()
1974 static int elsp_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in elsp_mmio_write() argument
1977 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in elsp_mmio_write()
1978 const struct intel_engine_cs *engine = intel_gvt_render_mmio_to_engine(vgpu->gvt, offset); in elsp_mmio_write()
1988 * vGPU reset, it's set on D0->D3 on PCI config write, and cleared after in elsp_mmio_write()
1989 * vGPU reset if in resuming. in elsp_mmio_write()
1991 * S3 resume, but no vGPU reset (triggered by QEMU devic model). After in elsp_mmio_write()
1993 * remains set which will break next vGPU reset logic (miss the expected in elsp_mmio_write()
1998 if (vgpu->d3_entered) in elsp_mmio_write()
1999 vgpu->d3_entered = false; in elsp_mmio_write()
2001 execlist = &vgpu->submission.execlist[engine->id]; in elsp_mmio_write()
2005 ret = intel_vgpu_submit_execlist(vgpu, engine); in elsp_mmio_write()
2016 static int ring_mode_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in ring_mode_mmio_write() argument
2021 intel_gvt_render_mmio_to_engine(vgpu->gvt, offset); in ring_mode_mmio_write()
2026 if (IS_COFFEELAKE(vgpu->gvt->gt->i915) || in ring_mode_mmio_write()
2027 IS_COMETLAKE(vgpu->gvt->gt->i915)) in ring_mode_mmio_write()
2029 write_vreg(vgpu, offset, p_data, bytes); in ring_mode_mmio_write()
2032 enter_failsafe_mode(vgpu, GVT_FAILSAFE_UNSUPPORTED_GUEST); in ring_mode_mmio_write()
2036 if ((IS_COFFEELAKE(vgpu->gvt->gt->i915) || in ring_mode_mmio_write()
2037 IS_COMETLAKE(vgpu->gvt->gt->i915)) && in ring_mode_mmio_write()
2039 enter_failsafe_mode(vgpu, GVT_FAILSAFE_UNSUPPORTED_GUEST); in ring_mode_mmio_write()
2049 !vgpu->pv_notified) { in ring_mode_mmio_write()
2050 enter_failsafe_mode(vgpu, GVT_FAILSAFE_UNSUPPORTED_GUEST); in ring_mode_mmio_write()
2064 ret = intel_vgpu_select_submission_ops(vgpu, in ring_mode_mmio_write()
2070 intel_vgpu_start_schedule(vgpu); in ring_mode_mmio_write()
2075 static int gvt_reg_tlb_control_handler(struct intel_vgpu *vgpu, in gvt_reg_tlb_control_handler() argument
2080 write_vreg(vgpu, offset, p_data, bytes); in gvt_reg_tlb_control_handler()
2081 vgpu_vreg(vgpu, offset) = 0; in gvt_reg_tlb_control_handler()
2102 set_bit(id, (void *)vgpu->submission.tlb_handle_pending); in gvt_reg_tlb_control_handler()
2107 static int ring_reset_ctl_write(struct intel_vgpu *vgpu, in ring_reset_ctl_write() argument
2112 write_vreg(vgpu, offset, p_data, bytes); in ring_reset_ctl_write()
2113 data = vgpu_vreg(vgpu, offset); in ring_reset_ctl_write()
2120 vgpu_vreg(vgpu, offset) = data; in ring_reset_ctl_write()
2124 static int csfe_chicken1_mmio_write(struct intel_vgpu *vgpu, in csfe_chicken1_mmio_write() argument
2131 write_vreg(vgpu, offset, p_data, bytes); in csfe_chicken1_mmio_write()
2135 enter_failsafe_mode(vgpu, GVT_FAILSAFE_UNSUPPORTED_GUEST); in csfe_chicken1_mmio_write()
3051 * @vgpu: a vGPU
3059 int intel_vgpu_default_mmio_read(struct intel_vgpu *vgpu, unsigned int offset, in intel_vgpu_default_mmio_read() argument
3062 read_vreg(vgpu, offset, p_data, bytes); in intel_vgpu_default_mmio_read()
3068 * @vgpu: a vGPU
3076 int intel_vgpu_default_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in intel_vgpu_default_mmio_write() argument
3079 write_vreg(vgpu, offset, p_data, bytes); in intel_vgpu_default_mmio_write()
3085 * @vgpu: a vGPU
3093 int intel_vgpu_mask_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in intel_vgpu_mask_mmio_write() argument
3098 old_vreg = vgpu_vreg(vgpu, offset); in intel_vgpu_mask_mmio_write()
3099 write_vreg(vgpu, offset, p_data, bytes); in intel_vgpu_mask_mmio_write()
3100 mask = vgpu_vreg(vgpu, offset) >> 16; in intel_vgpu_mask_mmio_write()
3101 vgpu_vreg(vgpu, offset) = (old_vreg & ~mask) | in intel_vgpu_mask_mmio_write()
3102 (vgpu_vreg(vgpu, offset) & mask); in intel_vgpu_mask_mmio_write()
3126 * @vgpu: a vGPU
3135 int intel_vgpu_mmio_reg_rw(struct intel_vgpu *vgpu, unsigned int offset, in intel_vgpu_mmio_reg_rw() argument
3138 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_vgpu_mmio_reg_rw()
3139 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_mmio_reg_rw()
3155 return func(vgpu, offset, pdata, bytes); in intel_vgpu_mmio_reg_rw()
3169 return mmio_info->read(vgpu, offset, pdata, bytes); in intel_vgpu_mmio_reg_rw()
3176 old_vreg = vgpu_vreg(vgpu, offset); in intel_vgpu_mmio_reg_rw()
3180 ret = mmio_info->write(vgpu, offset, pdata, bytes); in intel_vgpu_mmio_reg_rw()
3188 data |= vgpu_vreg(vgpu, offset) & ro_mask; in intel_vgpu_mmio_reg_rw()
3189 ret = mmio_info->write(vgpu, offset, &data, bytes); in intel_vgpu_mmio_reg_rw()
3194 u32 mask = vgpu_vreg(vgpu, offset) >> 16; in intel_vgpu_mmio_reg_rw()
3196 vgpu_vreg(vgpu, offset) = (old_vreg & ~mask) in intel_vgpu_mmio_reg_rw()
3197 | (vgpu_vreg(vgpu, offset) & mask); in intel_vgpu_mmio_reg_rw()
3205 intel_vgpu_default_mmio_read(vgpu, offset, pdata, bytes) : in intel_vgpu_mmio_reg_rw()
3206 intel_vgpu_default_mmio_write(vgpu, offset, pdata, bytes); in intel_vgpu_mmio_reg_rw()
3211 struct intel_vgpu *vgpu; in intel_gvt_restore_fence() local
3214 idr_for_each_entry(&(gvt)->vgpu_idr, vgpu, id) { in intel_gvt_restore_fence()
3216 for (i = 0; i < vgpu_fence_sz(vgpu); i++) in intel_gvt_restore_fence()
3217 intel_vgpu_write_fence(vgpu, i, vgpu_vreg64(vgpu, fence_num_to_offset(i))); in intel_gvt_restore_fence()
3224 struct intel_vgpu *vgpu = data; in mmio_pm_restore_handler() local
3228 intel_uncore_write(&dev_priv->uncore, _MMIO(offset), vgpu_vreg(vgpu, offset)); in mmio_pm_restore_handler()
3235 struct intel_vgpu *vgpu; in intel_gvt_restore_mmio() local
3238 idr_for_each_entry(&(gvt)->vgpu_idr, vgpu, id) { in intel_gvt_restore_mmio()
3240 intel_gvt_for_each_tracked_mmio(gvt, mmio_pm_restore_handler, vgpu); in intel_gvt_restore_mmio()