Lines Matching +full:0 +full:x0f000000

45 #define VCE_STATUS_VCPU_REPORT_FW_LOADED_MASK	0x02
66 if (ring->me == 0) in vce_v4_0_ring_get_rptr()
67 return RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_RPTR)); in vce_v4_0_ring_get_rptr()
69 return RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_RPTR2)); in vce_v4_0_ring_get_rptr()
71 return RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_RPTR3)); in vce_v4_0_ring_get_rptr()
88 if (ring->me == 0) in vce_v4_0_ring_get_wptr()
89 return RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_WPTR)); in vce_v4_0_ring_get_wptr()
91 return RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_WPTR2)); in vce_v4_0_ring_get_wptr()
93 return RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_WPTR3)); in vce_v4_0_ring_get_wptr()
114 if (ring->me == 0) in vce_v4_0_ring_set_wptr()
115 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_WPTR), in vce_v4_0_ring_set_wptr()
118 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_WPTR2), in vce_v4_0_ring_set_wptr()
121 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_WPTR3), in vce_v4_0_ring_set_wptr()
129 for (i = 0; i < 10; ++i) { in vce_v4_0_firmware_loaded()
130 for (j = 0; j < 100; ++j) { in vce_v4_0_firmware_loaded()
132 RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_STATUS)); in vce_v4_0_firmware_loaded()
135 return 0; in vce_v4_0_firmware_loaded()
140 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_SOFT_RESET), in vce_v4_0_firmware_loaded()
144 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_SOFT_RESET), 0, in vce_v4_0_firmware_loaded()
156 uint32_t data = 0, loop; in vce_v4_0_mmsch_start()
164 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_MMSCH_VF_CTX_ADDR_LO), lower_32_bits(addr)); in vce_v4_0_mmsch_start()
165 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_MMSCH_VF_CTX_ADDR_HI), upper_32_bits(addr)); in vce_v4_0_mmsch_start()
168 data = RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_MMSCH_VF_VMID)); in vce_v4_0_mmsch_start()
170 data |= (0 << VCE_MMSCH_VF_VMID__VF_CTX_VMID__SHIFT); /* use domain0 for MM scheduler */ in vce_v4_0_mmsch_start()
171 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_MMSCH_VF_VMID), data); in vce_v4_0_mmsch_start()
174 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_MMSCH_VF_CTX_SIZE), size); in vce_v4_0_mmsch_start()
177 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_MMSCH_VF_MAILBOX_RESP), 0); in vce_v4_0_mmsch_start()
179 WDOORBELL32(adev->vce.ring[0].doorbell_index, 0); in vce_v4_0_mmsch_start()
180 *adev->vce.ring[0].wptr_cpu_addr = 0; in vce_v4_0_mmsch_start()
181 adev->vce.ring[0].wptr = 0; in vce_v4_0_mmsch_start()
182 adev->vce.ring[0].wptr_old = 0; in vce_v4_0_mmsch_start()
185 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_MMSCH_VF_MAILBOX_HOST), 0x10000001); in vce_v4_0_mmsch_start()
187 data = RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_MMSCH_VF_MAILBOX_RESP)); in vce_v4_0_mmsch_start()
189 while ((data & 0x10000002) != 0x10000002) { in vce_v4_0_mmsch_start()
191 data = RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_MMSCH_VF_MAILBOX_RESP)); in vce_v4_0_mmsch_start()
202 return 0; in vce_v4_0_mmsch_start()
209 uint32_t table_size = 0; in vce_v4_0_sriov_start()
210 struct mmsch_v1_0_cmd_direct_write direct_wt = { { 0 } }; in vce_v4_0_sriov_start()
211 struct mmsch_v1_0_cmd_direct_read_modify_write direct_rd_mod_wt = { { 0 } }; in vce_v4_0_sriov_start()
212 struct mmsch_v1_0_cmd_direct_polling direct_poll = { { 0 } }; in vce_v4_0_sriov_start()
213 struct mmsch_v1_0_cmd_end end = { { 0 } }; in vce_v4_0_sriov_start()
222 if (header->vce_table_offset == 0 && header->vce_table_size == 0) { in vce_v4_0_sriov_start()
226 if (header->uvd_table_offset == 0 && header->uvd_table_size == 0) in vce_v4_0_sriov_start()
233 ring = &adev->vce.ring[0]; in vce_v4_0_sriov_start()
234 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_BASE_LO), in vce_v4_0_sriov_start()
236 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_BASE_HI), in vce_v4_0_sriov_start()
238 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_SIZE), in vce_v4_0_sriov_start()
242 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_CTRL), 0x398000); in vce_v4_0_sriov_start()
243 MMSCH_V1_0_INSERT_DIRECT_RD_MOD_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_CACHE_CTRL), ~0x1, 0); in vce_v4_0_sriov_start()
244 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_SWAP_CNTL), 0); in vce_v4_0_sriov_start()
245 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_SWAP_CNTL1), 0); in vce_v4_0_sriov_start()
246 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_VM_CTRL), 0); in vce_v4_0_sriov_start()
254 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, in vce_v4_0_sriov_start()
256 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, in vce_v4_0_sriov_start()
258 (tmr_mc_addr >> 40) & 0xff); in vce_v4_0_sriov_start()
259 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_OFFSET0), 0); in vce_v4_0_sriov_start()
261 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, in vce_v4_0_sriov_start()
264 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, in vce_v4_0_sriov_start()
266 (adev->vce.gpu_addr >> 40) & 0xff); in vce_v4_0_sriov_start()
267 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_OFFSET0), in vce_v4_0_sriov_start()
268 offset & ~0x0f000000); in vce_v4_0_sriov_start()
271 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, in vce_v4_0_sriov_start()
274 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, in vce_v4_0_sriov_start()
276 (adev->vce.gpu_addr >> 40) & 0xff); in vce_v4_0_sriov_start()
277 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, in vce_v4_0_sriov_start()
280 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, in vce_v4_0_sriov_start()
282 (adev->vce.gpu_addr >> 40) & 0xff); in vce_v4_0_sriov_start()
285 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_SIZE0), size); in vce_v4_0_sriov_start()
287 offset = (adev->firmware.load_type != AMDGPU_FW_LOAD_PSP) ? offset + size : 0; in vce_v4_0_sriov_start()
289 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_OFFSET1), in vce_v4_0_sriov_start()
290 (offset & ~0x0f000000) | (1 << 24)); in vce_v4_0_sriov_start()
291 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_SIZE1), size); in vce_v4_0_sriov_start()
295 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_OFFSET2), in vce_v4_0_sriov_start()
296 (offset & ~0x0f000000) | (2 << 24)); in vce_v4_0_sriov_start()
297 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_SIZE2), size); in vce_v4_0_sriov_start()
299 MMSCH_V1_0_INSERT_DIRECT_RD_MOD_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_CTRL2), ~0x100, 0); in vce_v4_0_sriov_start()
300 MMSCH_V1_0_INSERT_DIRECT_RD_MOD_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_SYS_INT_EN), in vce_v4_0_sriov_start()
305 MMSCH_V1_0_INSERT_DIRECT_RD_MOD_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_STATUS), in vce_v4_0_sriov_start()
307 MMSCH_V1_0_INSERT_DIRECT_RD_MOD_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CNTL), in vce_v4_0_sriov_start()
308 ~0x200001, VCE_VCPU_CNTL__CLK_EN_MASK); in vce_v4_0_sriov_start()
309 MMSCH_V1_0_INSERT_DIRECT_RD_MOD_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_SOFT_RESET), in vce_v4_0_sriov_start()
310 ~VCE_SOFT_RESET__ECPU_SOFT_RESET_MASK, 0); in vce_v4_0_sriov_start()
312 MMSCH_V1_0_INSERT_DIRECT_POLL(SOC15_REG_OFFSET(VCE, 0, mmVCE_STATUS), in vce_v4_0_sriov_start()
317 MMSCH_V1_0_INSERT_DIRECT_RD_MOD_WT(SOC15_REG_OFFSET(VCE, 0, mmVCE_STATUS), in vce_v4_0_sriov_start()
318 ~VCE_STATUS__JOB_BUSY_MASK, 0); in vce_v4_0_sriov_start()
341 ring = &adev->vce.ring[0]; in vce_v4_0_start()
343 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_RPTR), lower_32_bits(ring->wptr)); in vce_v4_0_start()
344 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_WPTR), lower_32_bits(ring->wptr)); in vce_v4_0_start()
345 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_BASE_LO), ring->gpu_addr); in vce_v4_0_start()
346 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_BASE_HI), upper_32_bits(ring->gpu_addr)); in vce_v4_0_start()
347 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_SIZE), ring->ring_size / 4); in vce_v4_0_start()
351 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_RPTR2), lower_32_bits(ring->wptr)); in vce_v4_0_start()
352 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_WPTR2), lower_32_bits(ring->wptr)); in vce_v4_0_start()
353 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_BASE_LO2), ring->gpu_addr); in vce_v4_0_start()
354 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_BASE_HI2), upper_32_bits(ring->gpu_addr)); in vce_v4_0_start()
355 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_SIZE2), ring->ring_size / 4); in vce_v4_0_start()
359 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_RPTR3), lower_32_bits(ring->wptr)); in vce_v4_0_start()
360 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_WPTR3), lower_32_bits(ring->wptr)); in vce_v4_0_start()
361 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_BASE_LO3), ring->gpu_addr); in vce_v4_0_start()
362 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_BASE_HI3), upper_32_bits(ring->gpu_addr)); in vce_v4_0_start()
363 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_SIZE3), ring->ring_size / 4); in vce_v4_0_start()
366 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_STATUS), VCE_STATUS__JOB_BUSY_MASK, in vce_v4_0_start()
369 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CNTL), 1, ~0x200001); in vce_v4_0_start()
371 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_SOFT_RESET), 0, in vce_v4_0_start()
378 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_STATUS), 0, ~VCE_STATUS__JOB_BUSY_MASK); in vce_v4_0_start()
385 return 0; in vce_v4_0_start()
392 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CNTL), 0, ~0x200001); in vce_v4_0_stop()
395 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_SOFT_RESET), in vce_v4_0_stop()
400 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_STATUS), 0); in vce_v4_0_stop()
407 return 0; in vce_v4_0_stop()
422 return 0; in vce_v4_0_early_init()
465 for (i = 0; i < adev->vce.num_rings; i++) { in vce_v4_0_sw_init()
469 ring->vm_hub = AMDGPU_MMHUB0(0); in vce_v4_0_sw_init()
478 if (i == 0) in vce_v4_0_sw_init()
483 r = amdgpu_ring_init(adev, ring, 512, &adev->vce.irq, 0, in vce_v4_0_sw_init()
528 for (i = 0; i < adev->vce.num_rings; i++) { in vce_v4_0_hw_init()
536 return 0; in vce_v4_0_hw_init()
553 return 0; in vce_v4_0_hw_fini()
562 return 0; in vce_v4_0_suspend()
590 amdgpu_asic_set_vce_clocks(adev, 0, 0); in vce_v4_0_suspend()
635 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_CLOCK_GATING_A), 0, ~(1 << 16)); in vce_v4_0_mc_resume()
636 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_UENC_CLOCK_GATING), 0x1FF000, ~0xFF9FF000); in vce_v4_0_mc_resume()
637 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_UENC_REG_CLOCK_GATING), 0x3F, ~0x3F); in vce_v4_0_mc_resume()
638 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_CLOCK_GATING_B), 0x1FF); in vce_v4_0_mc_resume()
640 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_CTRL), 0x00398000); in vce_v4_0_mc_resume()
641 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_CACHE_CTRL), 0x0, ~0x1); in vce_v4_0_mc_resume()
642 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_SWAP_CNTL), 0); in vce_v4_0_mc_resume()
643 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_SWAP_CNTL1), 0); in vce_v4_0_mc_resume()
644 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_VM_CTRL), 0); in vce_v4_0_mc_resume()
651 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_VCPU_CACHE_40BIT_BAR0), in vce_v4_0_mc_resume()
653 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_VCPU_CACHE_64BIT_BAR0), in vce_v4_0_mc_resume()
654 (tmr_mc_addr >> 40) & 0xff); in vce_v4_0_mc_resume()
655 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_OFFSET0), 0); in vce_v4_0_mc_resume()
657 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_VCPU_CACHE_40BIT_BAR0), in vce_v4_0_mc_resume()
659 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_VCPU_CACHE_64BIT_BAR0), in vce_v4_0_mc_resume()
660 (adev->vce.gpu_addr >> 40) & 0xff); in vce_v4_0_mc_resume()
661 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_OFFSET0), offset & ~0x0f000000); in vce_v4_0_mc_resume()
665 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_SIZE0), size); in vce_v4_0_mc_resume()
667 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_VCPU_CACHE_40BIT_BAR1), (adev->vce.gpu_addr >> 8)); in vce_v4_0_mc_resume()
668 …WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_VCPU_CACHE_64BIT_BAR1), (adev->vce.gpu_addr >> 40) & 0xf… in vce_v4_0_mc_resume()
669 offset = (adev->firmware.load_type != AMDGPU_FW_LOAD_PSP) ? offset + size : 0; in vce_v4_0_mc_resume()
671 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_OFFSET1), (offset & ~0x0f000000) | (1 << 24)); in vce_v4_0_mc_resume()
672 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_SIZE1), size); in vce_v4_0_mc_resume()
674 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_VCPU_CACHE_40BIT_BAR2), (adev->vce.gpu_addr >> 8)); in vce_v4_0_mc_resume()
675 …WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_VCPU_CACHE_64BIT_BAR2), (adev->vce.gpu_addr >> 40) & 0xf… in vce_v4_0_mc_resume()
678 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_OFFSET2), (offset & ~0x0f000000) | (2 << 24)); in vce_v4_0_mc_resume()
679 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_VCPU_CACHE_SIZE2), size); in vce_v4_0_mc_resume()
681 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_LMI_CTRL2), 0x0, ~0x100); in vce_v4_0_mc_resume()
682 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_SYS_INT_EN), in vce_v4_0_mc_resume()
691 return 0; in vce_v4_0_set_clockgating_state()
694 #if 0
698 u32 mask = 0;
700 mask |= (adev->vce.harvest_config & AMDGPU_VCE_HARVEST_VCE0) ? 0 : SRBM_STATUS2__VCE0_BUSY_MASK;
701 mask |= (adev->vce.harvest_config & AMDGPU_VCE_HARVEST_VCE1) ? 0 : SRBM_STATUS2__VCE1_BUSY_MASK;
711 for (i = 0; i < adev->usec_timeout; i++)
713 return 0;
718 #define VCE_STATUS_VCPU_REPORT_AUTO_BUSY_MASK 0x00000008L /* AUTO_BUSY */
719 #define VCE_STATUS_VCPU_REPORT_RB0_BUSY_MASK 0x00000010L /* RB0_BUSY */
720 #define VCE_STATUS_VCPU_REPORT_RB1_BUSY_MASK 0x00000020L /* RB1_BUSY */
727 u32 srbm_soft_reset = 0;
733 * (0 for 1st instance, 10 for 2nd instance).
743 WREG32_FIELD(GRBM_GFX_INDEX, INSTANCE_INDEX, 0);
744 if (RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_STATUS) & AMDGPU_VCE_STATUS_BUSY_MASK) {
748 WREG32_FIELD(GRBM_GFX_INDEX, INSTANCE_INDEX, 0x10);
749 if (RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_STATUS) & AMDGPU_VCE_STATUS_BUSY_MASK) {
753 WREG32_FIELD(GRBM_GFX_INDEX, INSTANCE_INDEX, 0);
760 adev->vce.srbm_soft_reset = 0;
771 return 0;
779 dev_info(adev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
793 return 0;
801 return 0;
814 return 0;
825 tmp = data = RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_ARB_CTRL));
832 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_ARB_CTRL), data);
849 data = RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_CLOCK_GATING_B));
850 data |= 0x1ff;
851 data &= ~0xef0000;
852 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_CLOCK_GATING_B), data);
854 data = RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_UENC_CLOCK_GATING));
855 data |= 0x3ff000;
856 data &= ~0xffc00000;
857 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_UENC_CLOCK_GATING), data);
859 data = RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_UENC_CLOCK_GATING_2));
860 data |= 0x2;
861 data &= ~0x00010000;
862 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_UENC_CLOCK_GATING_2), data);
864 data = RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_UENC_REG_CLOCK_GATING));
865 data |= 0x37f;
866 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_UENC_REG_CLOCK_GATING), data);
868 data = RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_UENC_DMA_DCLK_CTRL));
872 0x8;
873 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_UENC_DMA_DCLK_CTRL), data);
875 data = RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_CLOCK_GATING_B));
876 data &= ~0x80010;
877 data |= 0xe70008;
878 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_CLOCK_GATING_B), data);
880 data = RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_UENC_CLOCK_GATING));
881 data |= 0xffc00000;
882 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_UENC_CLOCK_GATING), data);
884 data = RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_UENC_CLOCK_GATING_2));
885 data |= 0x10000;
886 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_UENC_CLOCK_GATING_2), data);
888 data = RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_UENC_REG_CLOCK_GATING));
889 data &= ~0xffc00000;
890 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_UENC_REG_CLOCK_GATING), data);
892 data = RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_UENC_DMA_DCLK_CTRL));
896 0x8);
897 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_UENC_DMA_DCLK_CTRL), data);
927 return 0;
930 for (i = 0; i < 2; i++) {
931 /* Program VCE Instance 0 or 1 if not harvested */
939 uint32_t data = RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_CLOCK_GATING_A);
940 data &= ~(0xf | 0xff0);
941 data |= ((0x0 << 0) | (0x04 << 4));
942 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_CLOCK_GATING_A, data);
945 data = RREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_UENC_CLOCK_GATING);
946 data &= ~(0xf | 0xff0);
947 data |= ((0x0 << 0) | (0x04 << 4));
948 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_UENC_CLOCK_GATING, data);
954 WREG32_FIELD(GRBM_GFX_INDEX, VCE_INSTANCE, 0);
957 return 0;
1027 lower_32_bits(pd_addr), 0xffffffff); in vce_v4_0_emit_vm_flush()
1043 uint32_t val = 0; in vce_v4_0_set_interrupt_state()
1049 WREG32_P(SOC15_REG_OFFSET(VCE, 0, mmVCE_SYS_INT_EN), val, in vce_v4_0_set_interrupt_state()
1052 return 0; in vce_v4_0_set_interrupt_state()
1061 switch (entry->src_data[0]) { in vce_v4_0_process_interrupt()
1062 case 0: in vce_v4_0_process_interrupt()
1065 amdgpu_fence_process(&adev->vce.ring[entry->src_data[0]]); in vce_v4_0_process_interrupt()
1069 entry->src_id, entry->src_data[0]); in vce_v4_0_process_interrupt()
1073 return 0; in vce_v4_0_process_interrupt()
1098 .align_mask = 0x3f,
1132 for (i = 0; i < adev->vce.num_rings; i++) { in vce_v4_0_set_ring_funcs()
1154 .minor = 0,
1155 .rev = 0,