Lines Matching +full:hpd +full:- +full:reliable +full:- +full:delay
144 * cik_get_allowed_info_register - fetch the register for the info ioctl
150 * Returns 0 for success or -EINVAL for an invalid register
172 return -EINVAL; in cik_get_allowed_info_register()
184 spin_lock_irqsave(&rdev->didt_idx_lock, flags); in cik_didt_rreg()
187 spin_unlock_irqrestore(&rdev->didt_idx_lock, flags); in cik_didt_rreg()
195 spin_lock_irqsave(&rdev->didt_idx_lock, flags); in cik_didt_wreg()
198 spin_unlock_irqrestore(&rdev->didt_idx_lock, flags); in cik_didt_wreg()
227 actual_temp = (temp / 8) - 49; in kv_get_temp()
242 spin_lock_irqsave(&rdev->pciep_idx_lock, flags); in cik_pciep_rreg()
246 spin_unlock_irqrestore(&rdev->pciep_idx_lock, flags); in cik_pciep_rreg()
254 spin_lock_irqsave(&rdev->pciep_idx_lock, flags); in cik_pciep_wreg()
259 spin_unlock_irqrestore(&rdev->pciep_idx_lock, flags); in cik_pciep_wreg()
1621 switch (rdev->family) { in cik_init_golden_registers()
1698 * cik_get_xclk - get the xclk
1707 u32 reference_clock = rdev->clock.spll.reference_freq; in cik_get_xclk()
1709 if (rdev->flags & RADEON_IS_IGP) { in cik_get_xclk()
1720 * cik_mm_rdoorbell - read a doorbell dword
1730 if (index < rdev->doorbell.num_doorbells) { in cik_mm_rdoorbell()
1731 return readl(rdev->doorbell.ptr + index); in cik_mm_rdoorbell()
1739 * cik_mm_wdoorbell - write a doorbell dword
1750 if (index < rdev->doorbell.num_doorbells) { in cik_mm_wdoorbell()
1751 writel(v, rdev->doorbell.ptr + index); in cik_mm_wdoorbell()
1829 * cik_srbm_select - select specific register instances
1853 * ci_mc_load_microcode - load MC ucode into the hw
1869 if (!rdev->mc_fw) in ci_mc_load_microcode()
1870 return -EINVAL; in ci_mc_load_microcode()
1872 if (rdev->new_fw) { in ci_mc_load_microcode()
1874 (const struct mc_firmware_header_v1_0 *)rdev->mc_fw->data; in ci_mc_load_microcode()
1876 radeon_ucode_print_mc_hdr(&hdr->header); in ci_mc_load_microcode()
1878 regs_size = le32_to_cpu(hdr->io_debug_size_bytes) / (4 * 2); in ci_mc_load_microcode()
1880 (rdev->mc_fw->data + le32_to_cpu(hdr->io_debug_array_offset_bytes)); in ci_mc_load_microcode()
1881 ucode_size = le32_to_cpu(hdr->header.ucode_size_bytes) / 4; in ci_mc_load_microcode()
1883 (rdev->mc_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); in ci_mc_load_microcode()
1885 ucode_size = rdev->mc_fw->size / 4; in ci_mc_load_microcode()
1887 switch (rdev->family) { in ci_mc_load_microcode()
1897 return -EINVAL; in ci_mc_load_microcode()
1899 fw_data = (const __be32 *)rdev->mc_fw->data; in ci_mc_load_microcode()
1911 if (rdev->new_fw) { in ci_mc_load_microcode()
1921 if ((rdev->pdev->device == 0x6649) && ((tmp & 0xff00) == 0x5600)) { in ci_mc_load_microcode()
1930 if (rdev->new_fw) in ci_mc_load_microcode()
1942 for (i = 0; i < rdev->usec_timeout; i++) { in ci_mc_load_microcode()
1947 for (i = 0; i < rdev->usec_timeout; i++) { in ci_mc_load_microcode()
1958 * cik_init_microcode - load ucode images from disk
1981 switch (rdev->family) { in cik_init_microcode()
1984 if ((rdev->pdev->revision == 0x80) || in cik_init_microcode()
1985 (rdev->pdev->revision == 0x81) || in cik_init_microcode()
1986 (rdev->pdev->device == 0x665f)) in cik_init_microcode()
2002 if (rdev->pdev->revision == 0x80) in cik_init_microcode()
2055 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev); in cik_init_microcode()
2058 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev); in cik_init_microcode()
2061 if (rdev->pfp_fw->size != pfp_req_size) { in cik_init_microcode()
2063 rdev->pfp_fw->size, fw_name); in cik_init_microcode()
2064 err = -EINVAL; in cik_init_microcode()
2068 err = radeon_ucode_validate(rdev->pfp_fw); in cik_init_microcode()
2079 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev); in cik_init_microcode()
2082 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev); in cik_init_microcode()
2085 if (rdev->me_fw->size != me_req_size) { in cik_init_microcode()
2087 rdev->me_fw->size, fw_name); in cik_init_microcode()
2088 err = -EINVAL; in cik_init_microcode()
2091 err = radeon_ucode_validate(rdev->me_fw); in cik_init_microcode()
2102 err = request_firmware(&rdev->ce_fw, fw_name, rdev->dev); in cik_init_microcode()
2105 err = request_firmware(&rdev->ce_fw, fw_name, rdev->dev); in cik_init_microcode()
2108 if (rdev->ce_fw->size != ce_req_size) { in cik_init_microcode()
2110 rdev->ce_fw->size, fw_name); in cik_init_microcode()
2111 err = -EINVAL; in cik_init_microcode()
2114 err = radeon_ucode_validate(rdev->ce_fw); in cik_init_microcode()
2125 err = request_firmware(&rdev->mec_fw, fw_name, rdev->dev); in cik_init_microcode()
2128 err = request_firmware(&rdev->mec_fw, fw_name, rdev->dev); in cik_init_microcode()
2131 if (rdev->mec_fw->size != mec_req_size) { in cik_init_microcode()
2133 rdev->mec_fw->size, fw_name); in cik_init_microcode()
2134 err = -EINVAL; in cik_init_microcode()
2137 err = radeon_ucode_validate(rdev->mec_fw); in cik_init_microcode()
2147 if (rdev->family == CHIP_KAVERI) { in cik_init_microcode()
2149 err = request_firmware(&rdev->mec2_fw, fw_name, rdev->dev); in cik_init_microcode()
2153 err = radeon_ucode_validate(rdev->mec2_fw); in cik_init_microcode()
2163 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev); in cik_init_microcode()
2166 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev); in cik_init_microcode()
2169 if (rdev->rlc_fw->size != rlc_req_size) { in cik_init_microcode()
2171 rdev->rlc_fw->size, fw_name); in cik_init_microcode()
2172 err = -EINVAL; in cik_init_microcode()
2175 err = radeon_ucode_validate(rdev->rlc_fw); in cik_init_microcode()
2186 err = request_firmware(&rdev->sdma_fw, fw_name, rdev->dev); in cik_init_microcode()
2189 err = request_firmware(&rdev->sdma_fw, fw_name, rdev->dev); in cik_init_microcode()
2192 if (rdev->sdma_fw->size != sdma_req_size) { in cik_init_microcode()
2194 rdev->sdma_fw->size, fw_name); in cik_init_microcode()
2195 err = -EINVAL; in cik_init_microcode()
2198 err = radeon_ucode_validate(rdev->sdma_fw); in cik_init_microcode()
2209 if (!(rdev->flags & RADEON_IS_IGP)) { in cik_init_microcode()
2211 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev); in cik_init_microcode()
2214 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev); in cik_init_microcode()
2217 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev); in cik_init_microcode()
2221 if ((rdev->mc_fw->size != mc_req_size) && in cik_init_microcode()
2222 (rdev->mc_fw->size != mc2_req_size)){ in cik_init_microcode()
2224 rdev->mc_fw->size, fw_name); in cik_init_microcode()
2225 err = -EINVAL; in cik_init_microcode()
2227 DRM_INFO("%s: %zu bytes\n", fw_name, rdev->mc_fw->size); in cik_init_microcode()
2229 err = radeon_ucode_validate(rdev->mc_fw); in cik_init_microcode()
2243 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev); in cik_init_microcode()
2246 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev); in cik_init_microcode()
2250 release_firmware(rdev->smc_fw); in cik_init_microcode()
2251 rdev->smc_fw = NULL; in cik_init_microcode()
2253 } else if (rdev->smc_fw->size != smc_req_size) { in cik_init_microcode()
2255 rdev->smc_fw->size, fw_name); in cik_init_microcode()
2256 err = -EINVAL; in cik_init_microcode()
2259 err = radeon_ucode_validate(rdev->smc_fw); in cik_init_microcode()
2271 rdev->new_fw = false; in cik_init_microcode()
2274 err = -EINVAL; in cik_init_microcode()
2276 rdev->new_fw = true; in cik_init_microcode()
2281 if (err != -EINVAL) in cik_init_microcode()
2284 release_firmware(rdev->pfp_fw); in cik_init_microcode()
2285 rdev->pfp_fw = NULL; in cik_init_microcode()
2286 release_firmware(rdev->me_fw); in cik_init_microcode()
2287 rdev->me_fw = NULL; in cik_init_microcode()
2288 release_firmware(rdev->ce_fw); in cik_init_microcode()
2289 rdev->ce_fw = NULL; in cik_init_microcode()
2290 release_firmware(rdev->mec_fw); in cik_init_microcode()
2291 rdev->mec_fw = NULL; in cik_init_microcode()
2292 release_firmware(rdev->mec2_fw); in cik_init_microcode()
2293 rdev->mec2_fw = NULL; in cik_init_microcode()
2294 release_firmware(rdev->rlc_fw); in cik_init_microcode()
2295 rdev->rlc_fw = NULL; in cik_init_microcode()
2296 release_firmware(rdev->sdma_fw); in cik_init_microcode()
2297 rdev->sdma_fw = NULL; in cik_init_microcode()
2298 release_firmware(rdev->mc_fw); in cik_init_microcode()
2299 rdev->mc_fw = NULL; in cik_init_microcode()
2300 release_firmware(rdev->smc_fw); in cik_init_microcode()
2301 rdev->smc_fw = NULL; in cik_init_microcode()
2310 * cik_tiling_mode_table_init - init the hw tiling table
2322 u32 *tile = rdev->config.cik.tile_mode_array; in cik_tiling_mode_table_init()
2323 u32 *macrotile = rdev->config.cik.macrotile_mode_array; in cik_tiling_mode_table_init()
2325 ARRAY_SIZE(rdev->config.cik.tile_mode_array); in cik_tiling_mode_table_init()
2327 ARRAY_SIZE(rdev->config.cik.macrotile_mode_array); in cik_tiling_mode_table_init()
2330 u32 num_rbs = rdev->config.cik.max_backends_per_se * in cik_tiling_mode_table_init()
2331 rdev->config.cik.max_shader_engines; in cik_tiling_mode_table_init()
2333 switch (rdev->config.cik.mem_row_size_in_kb) { in cik_tiling_mode_table_init()
2346 num_pipe_configs = rdev->config.cik.max_tile_pipes; in cik_tiling_mode_table_init()
3016 * cik_select_se_sh - select which SE, SH to address
3043 * cik_create_bitmask - create a bitmask
3062 * cik_get_rb_disabled - computes the mask of disabled RBs
3092 * cik_setup_rb - setup the RBs on the asic
3099 * Configures per-SE/SH RB registers (CIK).
3114 if (rdev->family == CHIP_HAWAII) in cik_setup_rb()
3129 rdev->config.cik.backend_enable_mask = enabled_rbs; in cik_setup_rb()
3161 * cik_gpu_init - setup the 3D engine
3176 switch (rdev->family) { in cik_gpu_init()
3178 rdev->config.cik.max_shader_engines = 2; in cik_gpu_init()
3179 rdev->config.cik.max_tile_pipes = 4; in cik_gpu_init()
3180 rdev->config.cik.max_cu_per_sh = 7; in cik_gpu_init()
3181 rdev->config.cik.max_sh_per_se = 1; in cik_gpu_init()
3182 rdev->config.cik.max_backends_per_se = 2; in cik_gpu_init()
3183 rdev->config.cik.max_texture_channel_caches = 4; in cik_gpu_init()
3184 rdev->config.cik.max_gprs = 256; in cik_gpu_init()
3185 rdev->config.cik.max_gs_threads = 32; in cik_gpu_init()
3186 rdev->config.cik.max_hw_contexts = 8; in cik_gpu_init()
3188 rdev->config.cik.sc_prim_fifo_size_frontend = 0x20; in cik_gpu_init()
3189 rdev->config.cik.sc_prim_fifo_size_backend = 0x100; in cik_gpu_init()
3190 rdev->config.cik.sc_hiz_tile_fifo_size = 0x30; in cik_gpu_init()
3191 rdev->config.cik.sc_earlyz_tile_fifo_size = 0x130; in cik_gpu_init()
3195 rdev->config.cik.max_shader_engines = 4; in cik_gpu_init()
3196 rdev->config.cik.max_tile_pipes = 16; in cik_gpu_init()
3197 rdev->config.cik.max_cu_per_sh = 11; in cik_gpu_init()
3198 rdev->config.cik.max_sh_per_se = 1; in cik_gpu_init()
3199 rdev->config.cik.max_backends_per_se = 4; in cik_gpu_init()
3200 rdev->config.cik.max_texture_channel_caches = 16; in cik_gpu_init()
3201 rdev->config.cik.max_gprs = 256; in cik_gpu_init()
3202 rdev->config.cik.max_gs_threads = 32; in cik_gpu_init()
3203 rdev->config.cik.max_hw_contexts = 8; in cik_gpu_init()
3205 rdev->config.cik.sc_prim_fifo_size_frontend = 0x20; in cik_gpu_init()
3206 rdev->config.cik.sc_prim_fifo_size_backend = 0x100; in cik_gpu_init()
3207 rdev->config.cik.sc_hiz_tile_fifo_size = 0x30; in cik_gpu_init()
3208 rdev->config.cik.sc_earlyz_tile_fifo_size = 0x130; in cik_gpu_init()
3212 rdev->config.cik.max_shader_engines = 1; in cik_gpu_init()
3213 rdev->config.cik.max_tile_pipes = 4; in cik_gpu_init()
3214 rdev->config.cik.max_cu_per_sh = 8; in cik_gpu_init()
3215 rdev->config.cik.max_backends_per_se = 2; in cik_gpu_init()
3216 rdev->config.cik.max_sh_per_se = 1; in cik_gpu_init()
3217 rdev->config.cik.max_texture_channel_caches = 4; in cik_gpu_init()
3218 rdev->config.cik.max_gprs = 256; in cik_gpu_init()
3219 rdev->config.cik.max_gs_threads = 16; in cik_gpu_init()
3220 rdev->config.cik.max_hw_contexts = 8; in cik_gpu_init()
3222 rdev->config.cik.sc_prim_fifo_size_frontend = 0x20; in cik_gpu_init()
3223 rdev->config.cik.sc_prim_fifo_size_backend = 0x100; in cik_gpu_init()
3224 rdev->config.cik.sc_hiz_tile_fifo_size = 0x30; in cik_gpu_init()
3225 rdev->config.cik.sc_earlyz_tile_fifo_size = 0x130; in cik_gpu_init()
3231 rdev->config.cik.max_shader_engines = 1; in cik_gpu_init()
3232 rdev->config.cik.max_tile_pipes = 2; in cik_gpu_init()
3233 rdev->config.cik.max_cu_per_sh = 2; in cik_gpu_init()
3234 rdev->config.cik.max_sh_per_se = 1; in cik_gpu_init()
3235 rdev->config.cik.max_backends_per_se = 1; in cik_gpu_init()
3236 rdev->config.cik.max_texture_channel_caches = 2; in cik_gpu_init()
3237 rdev->config.cik.max_gprs = 256; in cik_gpu_init()
3238 rdev->config.cik.max_gs_threads = 16; in cik_gpu_init()
3239 rdev->config.cik.max_hw_contexts = 8; in cik_gpu_init()
3241 rdev->config.cik.sc_prim_fifo_size_frontend = 0x20; in cik_gpu_init()
3242 rdev->config.cik.sc_prim_fifo_size_backend = 0x100; in cik_gpu_init()
3243 rdev->config.cik.sc_hiz_tile_fifo_size = 0x30; in cik_gpu_init()
3244 rdev->config.cik.sc_earlyz_tile_fifo_size = 0x130; in cik_gpu_init()
3267 rdev->config.cik.num_tile_pipes = rdev->config.cik.max_tile_pipes; in cik_gpu_init()
3268 rdev->config.cik.mem_max_burst_length_bytes = 256; in cik_gpu_init()
3270 rdev->config.cik.mem_row_size_in_kb = (4 * (1 << (8 + tmp))) / 1024; in cik_gpu_init()
3271 if (rdev->config.cik.mem_row_size_in_kb > 4) in cik_gpu_init()
3272 rdev->config.cik.mem_row_size_in_kb = 4; in cik_gpu_init()
3274 rdev->config.cik.shader_engine_tile_size = 32; in cik_gpu_init()
3275 rdev->config.cik.num_gpus = 1; in cik_gpu_init()
3276 rdev->config.cik.multi_gpu_tile_size = 64; in cik_gpu_init()
3280 switch (rdev->config.cik.mem_row_size_in_kb) { in cik_gpu_init()
3300 rdev->config.cik.tile_config = 0; in cik_gpu_init()
3301 switch (rdev->config.cik.num_tile_pipes) { in cik_gpu_init()
3303 rdev->config.cik.tile_config |= (0 << 0); in cik_gpu_init()
3306 rdev->config.cik.tile_config |= (1 << 0); in cik_gpu_init()
3309 rdev->config.cik.tile_config |= (2 << 0); in cik_gpu_init()
3314 rdev->config.cik.tile_config |= (3 << 0); in cik_gpu_init()
3317 rdev->config.cik.tile_config |= in cik_gpu_init()
3319 rdev->config.cik.tile_config |= in cik_gpu_init()
3321 rdev->config.cik.tile_config |= in cik_gpu_init()
3335 cik_setup_rb(rdev, rdev->config.cik.max_shader_engines, in cik_gpu_init()
3336 rdev->config.cik.max_sh_per_se, in cik_gpu_init()
3337 rdev->config.cik.max_backends_per_se); in cik_gpu_init()
3339 rdev->config.cik.active_cus = 0; in cik_gpu_init()
3340 for (i = 0; i < rdev->config.cik.max_shader_engines; i++) { in cik_gpu_init()
3341 for (j = 0; j < rdev->config.cik.max_sh_per_se; j++) { in cik_gpu_init()
3342 rdev->config.cik.active_cus += in cik_gpu_init()
3376 WREG32(PA_SC_FIFO_SIZE, (SC_FRONTEND_PRIM_FIFO_SIZE(rdev->config.cik.sc_prim_fifo_size_frontend) | in cik_gpu_init()
3377 SC_BACKEND_PRIM_FIFO_SIZE(rdev->config.cik.sc_prim_fifo_size_backend) | in cik_gpu_init()
3378 SC_HIZ_TILE_FIFO_SIZE(rdev->config.cik.sc_hiz_tile_fifo_size) | in cik_gpu_init()
3379 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.cik.sc_earlyz_tile_fifo_size))); in cik_gpu_init()
3413 * cik_scratch_init - setup driver info for CP scratch regs
3426 rdev->scratch.num_reg = 7; in cik_scratch_init()
3427 rdev->scratch.reg_base = SCRATCH_REG0; in cik_scratch_init()
3428 for (i = 0; i < rdev->scratch.num_reg; i++) { in cik_scratch_init()
3429 rdev->scratch.free[i] = true; in cik_scratch_init()
3430 rdev->scratch.reg[i] = rdev->scratch.reg_base + (i * 4); in cik_scratch_init()
3435 * cik_ring_test - basic gfx ring test
3460 DRM_ERROR("radeon: cp failed to lock ring %d (%d).\n", ring->idx, r); in cik_ring_test()
3465 radeon_ring_write(ring, ((scratch - PACKET3_SET_UCONFIG_REG_START) >> 2)); in cik_ring_test()
3469 for (i = 0; i < rdev->usec_timeout; i++) { in cik_ring_test()
3475 if (i < rdev->usec_timeout) { in cik_ring_test()
3476 DRM_INFO("ring test on %d succeeded in %d usecs\n", ring->idx, i); in cik_ring_test()
3479 ring->idx, scratch, tmp); in cik_ring_test()
3480 r = -EINVAL; in cik_ring_test()
3487 * cik_hdp_flush_cp_ring_emit - emit an hdp flush on the cp
3497 struct radeon_ring *ring = &rdev->ring[ridx]; in cik_hdp_flush_cp_ring_emit()
3500 switch (ring->idx) { in cik_hdp_flush_cp_ring_emit()
3504 switch (ring->me) { in cik_hdp_flush_cp_ring_emit()
3506 ref_and_mask = CP2 << ring->pipe; in cik_hdp_flush_cp_ring_emit()
3509 ref_and_mask = CP6 << ring->pipe; in cik_hdp_flush_cp_ring_emit()
3532 * cik_fence_gfx_ring_emit - emit a fence on the gfx ring
3543 struct radeon_ring *ring = &rdev->ring[fence->ring]; in cik_fence_gfx_ring_emit()
3544 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cik_fence_gfx_ring_emit()
3557 radeon_ring_write(ring, fence->seq - 1); in cik_fence_gfx_ring_emit()
3568 radeon_ring_write(ring, fence->seq); in cik_fence_gfx_ring_emit()
3573 * cik_fence_compute_ring_emit - emit a fence on the compute ring
3584 struct radeon_ring *ring = &rdev->ring[fence->ring]; in cik_fence_compute_ring_emit()
3585 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cik_fence_compute_ring_emit()
3587 /* RELEASE_MEM - flush caches, send int */ in cik_fence_compute_ring_emit()
3596 radeon_ring_write(ring, fence->seq); in cik_fence_compute_ring_emit()
3601 * cik_semaphore_ring_emit - emit a semaphore on the CP ring
3616 uint64_t addr = semaphore->gpu_addr; in cik_semaphore_ring_emit()
3623 if (emit_wait && ring->idx == RADEON_RING_TYPE_GFX_INDEX) { in cik_semaphore_ring_emit()
3633 * cik_copy_cpdma - copy pages using the CP DMA engine
3652 int ring_index = rdev->asic->copy.blit_ring_index; in cik_copy_cpdma()
3653 struct radeon_ring *ring = &rdev->ring[ring_index]; in cik_copy_cpdma()
3670 radeon_sync_rings(rdev, &sync, ring->idx); in cik_copy_cpdma()
3676 size_in_bytes -= cur_size_in_bytes; in cik_copy_cpdma()
3691 r = radeon_fence_emit(rdev, &fence, ring->idx); in cik_copy_cpdma()
3708 * cik_ring_ib_execute - emit an IB (Indirect Buffer) on the gfx ring
3721 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cik_ring_ib_execute()
3722 unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0; in cik_ring_ib_execute()
3725 if (ib->is_const_ib) { in cik_ring_ib_execute()
3733 if (ring->rptr_save_reg) { in cik_ring_ib_execute()
3734 next_rptr = ring->wptr + 3 + 4; in cik_ring_ib_execute()
3736 radeon_ring_write(ring, ((ring->rptr_save_reg - in cik_ring_ib_execute()
3739 } else if (rdev->wb.enabled) { in cik_ring_ib_execute()
3740 next_rptr = ring->wptr + 5 + 4; in cik_ring_ib_execute()
3743 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc); in cik_ring_ib_execute()
3744 radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr)); in cik_ring_ib_execute()
3751 control |= ib->length_dw | (vm_id << 24); in cik_ring_ib_execute()
3754 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFFC)); in cik_ring_ib_execute()
3755 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF); in cik_ring_ib_execute()
3760 * cik_ib_test - basic gfx ring IB test
3783 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256); in cik_ib_test()
3790 ib.ptr[1] = ((scratch - PACKET3_SET_UCONFIG_REG_START) >> 2); in cik_ib_test()
3811 return -ETIMEDOUT; in cik_ib_test()
3814 for (i = 0; i < rdev->usec_timeout; i++) { in cik_ib_test()
3820 if (i < rdev->usec_timeout) { in cik_ib_test()
3821 DRM_INFO("ib test on ring %d succeeded in %u usecs\n", ib.fence->ring, i); in cik_ib_test()
3825 r = -EINVAL; in cik_ib_test()
3839 * PFP - Pre-Fetch Parser
3840 * ME - Micro Engine
3841 * CE - Constant Engine
3849 * MEC1 - Compute MicroEngine 1
3850 * MEC2 - Compute MicroEngine 2
3856 * cik_cp_gfx_enable - enable/disable the gfx CP MEs
3868 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX) in cik_cp_gfx_enable()
3869 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size); in cik_cp_gfx_enable()
3871 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false; in cik_cp_gfx_enable()
3877 * cik_cp_gfx_load_microcode - load the gfx CP ME ucode
3882 * Returns 0 for success, -EINVAL if the ucode is not available.
3888 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->ce_fw) in cik_cp_gfx_load_microcode()
3889 return -EINVAL; in cik_cp_gfx_load_microcode()
3893 if (rdev->new_fw) { in cik_cp_gfx_load_microcode()
3895 (const struct gfx_firmware_header_v1_0 *)rdev->pfp_fw->data; in cik_cp_gfx_load_microcode()
3897 (const struct gfx_firmware_header_v1_0 *)rdev->ce_fw->data; in cik_cp_gfx_load_microcode()
3899 (const struct gfx_firmware_header_v1_0 *)rdev->me_fw->data; in cik_cp_gfx_load_microcode()
3903 radeon_ucode_print_gfx_hdr(&pfp_hdr->header); in cik_cp_gfx_load_microcode()
3904 radeon_ucode_print_gfx_hdr(&ce_hdr->header); in cik_cp_gfx_load_microcode()
3905 radeon_ucode_print_gfx_hdr(&me_hdr->header); in cik_cp_gfx_load_microcode()
3909 (rdev->pfp_fw->data + le32_to_cpu(pfp_hdr->header.ucode_array_offset_bytes)); in cik_cp_gfx_load_microcode()
3910 fw_size = le32_to_cpu(pfp_hdr->header.ucode_size_bytes) / 4; in cik_cp_gfx_load_microcode()
3914 WREG32(CP_PFP_UCODE_ADDR, le32_to_cpu(pfp_hdr->header.ucode_version)); in cik_cp_gfx_load_microcode()
3918 (rdev->ce_fw->data + le32_to_cpu(ce_hdr->header.ucode_array_offset_bytes)); in cik_cp_gfx_load_microcode()
3919 fw_size = le32_to_cpu(ce_hdr->header.ucode_size_bytes) / 4; in cik_cp_gfx_load_microcode()
3923 WREG32(CP_CE_UCODE_ADDR, le32_to_cpu(ce_hdr->header.ucode_version)); in cik_cp_gfx_load_microcode()
3927 (rdev->me_fw->data + le32_to_cpu(me_hdr->header.ucode_array_offset_bytes)); in cik_cp_gfx_load_microcode()
3928 fw_size = le32_to_cpu(me_hdr->header.ucode_size_bytes) / 4; in cik_cp_gfx_load_microcode()
3932 WREG32(CP_ME_RAM_WADDR, le32_to_cpu(me_hdr->header.ucode_version)); in cik_cp_gfx_load_microcode()
3933 WREG32(CP_ME_RAM_RADDR, le32_to_cpu(me_hdr->header.ucode_version)); in cik_cp_gfx_load_microcode()
3938 fw_data = (const __be32 *)rdev->pfp_fw->data; in cik_cp_gfx_load_microcode()
3945 fw_data = (const __be32 *)rdev->ce_fw->data; in cik_cp_gfx_load_microcode()
3952 fw_data = (const __be32 *)rdev->me_fw->data; in cik_cp_gfx_load_microcode()
3963 * cik_cp_gfx_start - start the gfx ring
3973 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in cik_cp_gfx_start()
3977 WREG32(CP_MAX_CONTEXT, rdev->config.cik.max_hw_contexts - 1); in cik_cp_gfx_start()
4024 * cik_cp_gfx_fini - stop the gfx ring
4034 radeon_ring_fini(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]); in cik_cp_gfx_fini()
4038 * cik_cp_gfx_resume - setup the gfx ring buffer registers
4055 if (rdev->family != CHIP_HAWAII) in cik_cp_gfx_resume()
4058 /* Set the write pointer delay */ in cik_cp_gfx_resume()
4064 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF); in cik_cp_gfx_resume()
4066 /* ring 0 - compute and gfx */ in cik_cp_gfx_resume()
4068 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in cik_cp_gfx_resume()
4069 rb_bufsz = order_base_2(ring->ring_size / 8); in cik_cp_gfx_resume()
4078 ring->wptr = 0; in cik_cp_gfx_resume()
4079 WREG32(CP_RB0_WPTR, ring->wptr); in cik_cp_gfx_resume()
4082 WREG32(CP_RB0_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC); in cik_cp_gfx_resume()
4083 WREG32(CP_RB0_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF); in cik_cp_gfx_resume()
4088 if (!rdev->wb.enabled) in cik_cp_gfx_resume()
4094 rb_addr = ring->gpu_addr >> 8; in cik_cp_gfx_resume()
4100 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = true; in cik_cp_gfx_resume()
4101 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]); in cik_cp_gfx_resume()
4103 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false; in cik_cp_gfx_resume()
4107 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX) in cik_cp_gfx_resume()
4108 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size); in cik_cp_gfx_resume()
4118 if (rdev->wb.enabled) in cik_gfx_get_rptr()
4119 rptr = rdev->wb.wb[ring->rptr_offs/4]; in cik_gfx_get_rptr()
4135 WREG32(CP_RB0_WPTR, ring->wptr); in cik_gfx_set_wptr()
4144 if (rdev->wb.enabled) { in cik_compute_get_rptr()
4145 rptr = rdev->wb.wb[ring->rptr_offs/4]; in cik_compute_get_rptr()
4147 mutex_lock(&rdev->srbm_mutex); in cik_compute_get_rptr()
4148 cik_srbm_select(rdev, ring->me, ring->pipe, ring->queue, 0); in cik_compute_get_rptr()
4151 mutex_unlock(&rdev->srbm_mutex); in cik_compute_get_rptr()
4162 if (rdev->wb.enabled) { in cik_compute_get_wptr()
4164 wptr = rdev->wb.wb[ring->wptr_offs/4]; in cik_compute_get_wptr()
4166 mutex_lock(&rdev->srbm_mutex); in cik_compute_get_wptr()
4167 cik_srbm_select(rdev, ring->me, ring->pipe, ring->queue, 0); in cik_compute_get_wptr()
4170 mutex_unlock(&rdev->srbm_mutex); in cik_compute_get_wptr()
4180 rdev->wb.wb[ring->wptr_offs/4] = ring->wptr; in cik_compute_set_wptr()
4181 WDOORBELL32(ring->doorbell_index, ring->wptr); in cik_compute_set_wptr()
4189 cik_srbm_select(rdev, ring->me, ring->pipe, ring->queue, 0); in cik_compute_stop()
4197 for (j = 0; j < rdev->usec_timeout; j++) { in cik_compute_stop()
4210 * cik_cp_compute_enable - enable/disable the compute CP MEs
4223 * To make hibernation reliable we need to clear compute ring in cik_cp_compute_enable()
4226 mutex_lock(&rdev->srbm_mutex); in cik_cp_compute_enable()
4227 cik_compute_stop(rdev,&rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]); in cik_cp_compute_enable()
4228 cik_compute_stop(rdev,&rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]); in cik_cp_compute_enable()
4229 mutex_unlock(&rdev->srbm_mutex); in cik_cp_compute_enable()
4232 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false; in cik_cp_compute_enable()
4233 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false; in cik_cp_compute_enable()
4239 * cik_cp_compute_load_microcode - load the compute CP ME ucode
4244 * Returns 0 for success, -EINVAL if the ucode is not available.
4250 if (!rdev->mec_fw) in cik_cp_compute_load_microcode()
4251 return -EINVAL; in cik_cp_compute_load_microcode()
4255 if (rdev->new_fw) { in cik_cp_compute_load_microcode()
4257 (const struct gfx_firmware_header_v1_0 *)rdev->mec_fw->data; in cik_cp_compute_load_microcode()
4261 radeon_ucode_print_gfx_hdr(&mec_hdr->header); in cik_cp_compute_load_microcode()
4265 (rdev->mec_fw->data + le32_to_cpu(mec_hdr->header.ucode_array_offset_bytes)); in cik_cp_compute_load_microcode()
4266 fw_size = le32_to_cpu(mec_hdr->header.ucode_size_bytes) / 4; in cik_cp_compute_load_microcode()
4270 WREG32(CP_MEC_ME1_UCODE_ADDR, le32_to_cpu(mec_hdr->header.ucode_version)); in cik_cp_compute_load_microcode()
4273 if (rdev->family == CHIP_KAVERI) { in cik_cp_compute_load_microcode()
4275 (const struct gfx_firmware_header_v1_0 *)rdev->mec2_fw->data; in cik_cp_compute_load_microcode()
4278 (rdev->mec2_fw->data + in cik_cp_compute_load_microcode()
4279 le32_to_cpu(mec2_hdr->header.ucode_array_offset_bytes)); in cik_cp_compute_load_microcode()
4280 fw_size = le32_to_cpu(mec2_hdr->header.ucode_size_bytes) / 4; in cik_cp_compute_load_microcode()
4284 WREG32(CP_MEC_ME2_UCODE_ADDR, le32_to_cpu(mec2_hdr->header.ucode_version)); in cik_cp_compute_load_microcode()
4290 fw_data = (const __be32 *)rdev->mec_fw->data; in cik_cp_compute_load_microcode()
4296 if (rdev->family == CHIP_KAVERI) { in cik_cp_compute_load_microcode()
4298 fw_data = (const __be32 *)rdev->mec_fw->data; in cik_cp_compute_load_microcode()
4310 * cik_cp_compute_start - start the compute queues
4325 * cik_cp_compute_fini - stop the compute queues
4344 if (rdev->ring[idx].mqd_obj) { in cik_cp_compute_fini()
4345 r = radeon_bo_reserve(rdev->ring[idx].mqd_obj, false); in cik_cp_compute_fini()
4347 dev_warn(rdev->dev, "(%d) reserve MQD bo failed\n", r); in cik_cp_compute_fini()
4349 radeon_bo_unpin(rdev->ring[idx].mqd_obj); in cik_cp_compute_fini()
4350 radeon_bo_unreserve(rdev->ring[idx].mqd_obj); in cik_cp_compute_fini()
4352 radeon_bo_unref(&rdev->ring[idx].mqd_obj); in cik_cp_compute_fini()
4353 rdev->ring[idx].mqd_obj = NULL; in cik_cp_compute_fini()
4362 if (rdev->mec.hpd_eop_obj) { in cik_mec_fini()
4363 r = radeon_bo_reserve(rdev->mec.hpd_eop_obj, false); in cik_mec_fini()
4365 dev_warn(rdev->dev, "(%d) reserve HPD EOP bo failed\n", r); in cik_mec_fini()
4366 radeon_bo_unpin(rdev->mec.hpd_eop_obj); in cik_mec_fini()
4367 radeon_bo_unreserve(rdev->mec.hpd_eop_obj); in cik_mec_fini()
4369 radeon_bo_unref(&rdev->mec.hpd_eop_obj); in cik_mec_fini()
4370 rdev->mec.hpd_eop_obj = NULL; in cik_mec_fini()
4379 u32 *hpd; in cik_mec_init() local
4382 * KV: 2 MEC, 4 Pipes/MEC, 8 Queues/Pipe - 64 Queues total in cik_mec_init()
4383 * CI/KB: 1 MEC, 4 Pipes/MEC, 8 Queues/Pipe - 32 Queues total in cik_mec_init()
4385 if (rdev->family == CHIP_KAVERI) in cik_mec_init()
4386 rdev->mec.num_mec = 2; in cik_mec_init()
4388 rdev->mec.num_mec = 1; in cik_mec_init()
4389 rdev->mec.num_pipe = 4; in cik_mec_init()
4390 rdev->mec.num_queue = rdev->mec.num_mec * rdev->mec.num_pipe * 8; in cik_mec_init()
4392 if (rdev->mec.hpd_eop_obj == NULL) { in cik_mec_init()
4394 rdev->mec.num_mec *rdev->mec.num_pipe * MEC_HPD_SIZE * 2, in cik_mec_init()
4397 &rdev->mec.hpd_eop_obj); in cik_mec_init()
4399 dev_warn(rdev->dev, "(%d) create HDP EOP bo failed\n", r); in cik_mec_init()
4404 r = radeon_bo_reserve(rdev->mec.hpd_eop_obj, false); in cik_mec_init()
4409 r = radeon_bo_pin(rdev->mec.hpd_eop_obj, RADEON_GEM_DOMAIN_GTT, in cik_mec_init()
4410 &rdev->mec.hpd_eop_gpu_addr); in cik_mec_init()
4412 dev_warn(rdev->dev, "(%d) pin HDP EOP bo failed\n", r); in cik_mec_init()
4416 r = radeon_bo_kmap(rdev->mec.hpd_eop_obj, (void **)&hpd); in cik_mec_init()
4418 dev_warn(rdev->dev, "(%d) map HDP EOP bo failed\n", r); in cik_mec_init()
4424 memset(hpd, 0, rdev->mec.num_mec *rdev->mec.num_pipe * MEC_HPD_SIZE * 2); in cik_mec_init()
4426 radeon_bo_kunmap(rdev->mec.hpd_eop_obj); in cik_mec_init()
4427 radeon_bo_unreserve(rdev->mec.hpd_eop_obj); in cik_mec_init()
4500 * cik_cp_compute_resume - setup the compute queue registers
4530 mutex_lock(&rdev->srbm_mutex); in cik_cp_compute_resume()
4532 for (i = 0; i < (rdev->mec.num_pipe * rdev->mec.num_mec); ++i) { in cik_cp_compute_resume()
4534 int pipe = (i < 4) ? i : (i - 4); in cik_cp_compute_resume()
4538 eop_gpu_addr = rdev->mec.hpd_eop_gpu_addr + (i * MEC_HPD_SIZE * 2) ; in cik_cp_compute_resume()
4554 mutex_unlock(&rdev->srbm_mutex); in cik_cp_compute_resume()
4563 if (rdev->ring[idx].mqd_obj == NULL) { in cik_cp_compute_resume()
4568 NULL, &rdev->ring[idx].mqd_obj); in cik_cp_compute_resume()
4570 dev_warn(rdev->dev, "(%d) create MQD bo failed\n", r); in cik_cp_compute_resume()
4575 r = radeon_bo_reserve(rdev->ring[idx].mqd_obj, false); in cik_cp_compute_resume()
4580 r = radeon_bo_pin(rdev->ring[idx].mqd_obj, RADEON_GEM_DOMAIN_GTT, in cik_cp_compute_resume()
4583 dev_warn(rdev->dev, "(%d) pin MQD bo failed\n", r); in cik_cp_compute_resume()
4587 r = radeon_bo_kmap(rdev->ring[idx].mqd_obj, (void **)&buf); in cik_cp_compute_resume()
4589 dev_warn(rdev->dev, "(%d) map MQD bo failed\n", r); in cik_cp_compute_resume()
4598 mqd->header = 0xC0310800; in cik_cp_compute_resume()
4599 mqd->static_thread_mgmt01[0] = 0xffffffff; in cik_cp_compute_resume()
4600 mqd->static_thread_mgmt01[1] = 0xffffffff; in cik_cp_compute_resume()
4601 mqd->static_thread_mgmt23[0] = 0xffffffff; in cik_cp_compute_resume()
4602 mqd->static_thread_mgmt23[1] = 0xffffffff; in cik_cp_compute_resume()
4604 mutex_lock(&rdev->srbm_mutex); in cik_cp_compute_resume()
4605 cik_srbm_select(rdev, rdev->ring[idx].me, in cik_cp_compute_resume()
4606 rdev->ring[idx].pipe, in cik_cp_compute_resume()
4607 rdev->ring[idx].queue, 0); in cik_cp_compute_resume()
4615 mqd->queue_state.cp_hqd_pq_doorbell_control = in cik_cp_compute_resume()
4618 mqd->queue_state.cp_hqd_pq_doorbell_control |= DOORBELL_EN; in cik_cp_compute_resume()
4620 mqd->queue_state.cp_hqd_pq_doorbell_control &= ~DOORBELL_EN; in cik_cp_compute_resume()
4622 mqd->queue_state.cp_hqd_pq_doorbell_control); in cik_cp_compute_resume()
4625 mqd->queue_state.cp_hqd_dequeue_request = 0; in cik_cp_compute_resume()
4626 mqd->queue_state.cp_hqd_pq_rptr = 0; in cik_cp_compute_resume()
4627 mqd->queue_state.cp_hqd_pq_wptr= 0; in cik_cp_compute_resume()
4630 for (j = 0; j < rdev->usec_timeout; j++) { in cik_cp_compute_resume()
4635 WREG32(CP_HQD_DEQUEUE_REQUEST, mqd->queue_state.cp_hqd_dequeue_request); in cik_cp_compute_resume()
4636 WREG32(CP_HQD_PQ_RPTR, mqd->queue_state.cp_hqd_pq_rptr); in cik_cp_compute_resume()
4637 WREG32(CP_HQD_PQ_WPTR, mqd->queue_state.cp_hqd_pq_wptr); in cik_cp_compute_resume()
4641 mqd->queue_state.cp_mqd_base_addr = mqd_gpu_addr & 0xfffffffc; in cik_cp_compute_resume()
4642 mqd->queue_state.cp_mqd_base_addr_hi = upper_32_bits(mqd_gpu_addr); in cik_cp_compute_resume()
4643 WREG32(CP_MQD_BASE_ADDR, mqd->queue_state.cp_mqd_base_addr); in cik_cp_compute_resume()
4644 WREG32(CP_MQD_BASE_ADDR_HI, mqd->queue_state.cp_mqd_base_addr_hi); in cik_cp_compute_resume()
4646 mqd->queue_state.cp_mqd_control = RREG32(CP_MQD_CONTROL); in cik_cp_compute_resume()
4647 mqd->queue_state.cp_mqd_control &= ~MQD_VMID_MASK; in cik_cp_compute_resume()
4648 WREG32(CP_MQD_CONTROL, mqd->queue_state.cp_mqd_control); in cik_cp_compute_resume()
4651 hqd_gpu_addr = rdev->ring[idx].gpu_addr >> 8; in cik_cp_compute_resume()
4652 mqd->queue_state.cp_hqd_pq_base = hqd_gpu_addr; in cik_cp_compute_resume()
4653 mqd->queue_state.cp_hqd_pq_base_hi = upper_32_bits(hqd_gpu_addr); in cik_cp_compute_resume()
4654 WREG32(CP_HQD_PQ_BASE, mqd->queue_state.cp_hqd_pq_base); in cik_cp_compute_resume()
4655 WREG32(CP_HQD_PQ_BASE_HI, mqd->queue_state.cp_hqd_pq_base_hi); in cik_cp_compute_resume()
4658 mqd->queue_state.cp_hqd_pq_control = RREG32(CP_HQD_PQ_CONTROL); in cik_cp_compute_resume()
4659 mqd->queue_state.cp_hqd_pq_control &= in cik_cp_compute_resume()
4662 mqd->queue_state.cp_hqd_pq_control |= in cik_cp_compute_resume()
4663 order_base_2(rdev->ring[idx].ring_size / 8); in cik_cp_compute_resume()
4664 mqd->queue_state.cp_hqd_pq_control |= in cik_cp_compute_resume()
4667 mqd->queue_state.cp_hqd_pq_control |= BUF_SWAP_32BIT; in cik_cp_compute_resume()
4669 mqd->queue_state.cp_hqd_pq_control &= in cik_cp_compute_resume()
4671 mqd->queue_state.cp_hqd_pq_control |= in cik_cp_compute_resume()
4673 WREG32(CP_HQD_PQ_CONTROL, mqd->queue_state.cp_hqd_pq_control); in cik_cp_compute_resume()
4677 wb_gpu_addr = rdev->wb.gpu_addr + CIK_WB_CP1_WPTR_OFFSET; in cik_cp_compute_resume()
4679 wb_gpu_addr = rdev->wb.gpu_addr + CIK_WB_CP2_WPTR_OFFSET; in cik_cp_compute_resume()
4680 mqd->queue_state.cp_hqd_pq_wptr_poll_addr = wb_gpu_addr & 0xfffffffc; in cik_cp_compute_resume()
4681 mqd->queue_state.cp_hqd_pq_wptr_poll_addr_hi = upper_32_bits(wb_gpu_addr) & 0xffff; in cik_cp_compute_resume()
4682 WREG32(CP_HQD_PQ_WPTR_POLL_ADDR, mqd->queue_state.cp_hqd_pq_wptr_poll_addr); in cik_cp_compute_resume()
4684 mqd->queue_state.cp_hqd_pq_wptr_poll_addr_hi); in cik_cp_compute_resume()
4688 wb_gpu_addr = rdev->wb.gpu_addr + RADEON_WB_CP1_RPTR_OFFSET; in cik_cp_compute_resume()
4690 wb_gpu_addr = rdev->wb.gpu_addr + RADEON_WB_CP2_RPTR_OFFSET; in cik_cp_compute_resume()
4691 mqd->queue_state.cp_hqd_pq_rptr_report_addr = wb_gpu_addr & 0xfffffffc; in cik_cp_compute_resume()
4692 mqd->queue_state.cp_hqd_pq_rptr_report_addr_hi = in cik_cp_compute_resume()
4695 mqd->queue_state.cp_hqd_pq_rptr_report_addr); in cik_cp_compute_resume()
4697 mqd->queue_state.cp_hqd_pq_rptr_report_addr_hi); in cik_cp_compute_resume()
4701 mqd->queue_state.cp_hqd_pq_doorbell_control = in cik_cp_compute_resume()
4703 mqd->queue_state.cp_hqd_pq_doorbell_control &= ~DOORBELL_OFFSET_MASK; in cik_cp_compute_resume()
4704 mqd->queue_state.cp_hqd_pq_doorbell_control |= in cik_cp_compute_resume()
4705 DOORBELL_OFFSET(rdev->ring[idx].doorbell_index); in cik_cp_compute_resume()
4706 mqd->queue_state.cp_hqd_pq_doorbell_control |= DOORBELL_EN; in cik_cp_compute_resume()
4707 mqd->queue_state.cp_hqd_pq_doorbell_control &= in cik_cp_compute_resume()
4711 mqd->queue_state.cp_hqd_pq_doorbell_control = 0; in cik_cp_compute_resume()
4714 mqd->queue_state.cp_hqd_pq_doorbell_control); in cik_cp_compute_resume()
4717 rdev->ring[idx].wptr = 0; in cik_cp_compute_resume()
4718 mqd->queue_state.cp_hqd_pq_wptr = rdev->ring[idx].wptr; in cik_cp_compute_resume()
4719 WREG32(CP_HQD_PQ_WPTR, mqd->queue_state.cp_hqd_pq_wptr); in cik_cp_compute_resume()
4720 mqd->queue_state.cp_hqd_pq_rptr = RREG32(CP_HQD_PQ_RPTR); in cik_cp_compute_resume()
4723 mqd->queue_state.cp_hqd_vmid = 0; in cik_cp_compute_resume()
4724 WREG32(CP_HQD_VMID, mqd->queue_state.cp_hqd_vmid); in cik_cp_compute_resume()
4727 mqd->queue_state.cp_hqd_active = 1; in cik_cp_compute_resume()
4728 WREG32(CP_HQD_ACTIVE, mqd->queue_state.cp_hqd_active); in cik_cp_compute_resume()
4731 mutex_unlock(&rdev->srbm_mutex); in cik_cp_compute_resume()
4733 radeon_bo_kunmap(rdev->ring[idx].mqd_obj); in cik_cp_compute_resume()
4734 radeon_bo_unreserve(rdev->ring[idx].mqd_obj); in cik_cp_compute_resume()
4736 rdev->ring[idx].ready = true; in cik_cp_compute_resume()
4737 r = radeon_ring_test(rdev, idx, &rdev->ring[idx]); in cik_cp_compute_resume()
4739 rdev->ring[idx].ready = false; in cik_cp_compute_resume()
4795 dev_info(rdev->dev, " GRBM_STATUS=0x%08X\n", in cik_print_gpu_status_regs()
4797 dev_info(rdev->dev, " GRBM_STATUS2=0x%08X\n", in cik_print_gpu_status_regs()
4799 dev_info(rdev->dev, " GRBM_STATUS_SE0=0x%08X\n", in cik_print_gpu_status_regs()
4801 dev_info(rdev->dev, " GRBM_STATUS_SE1=0x%08X\n", in cik_print_gpu_status_regs()
4803 dev_info(rdev->dev, " GRBM_STATUS_SE2=0x%08X\n", in cik_print_gpu_status_regs()
4805 dev_info(rdev->dev, " GRBM_STATUS_SE3=0x%08X\n", in cik_print_gpu_status_regs()
4807 dev_info(rdev->dev, " SRBM_STATUS=0x%08X\n", in cik_print_gpu_status_regs()
4809 dev_info(rdev->dev, " SRBM_STATUS2=0x%08X\n", in cik_print_gpu_status_regs()
4811 dev_info(rdev->dev, " SDMA0_STATUS_REG = 0x%08X\n", in cik_print_gpu_status_regs()
4813 dev_info(rdev->dev, " SDMA1_STATUS_REG = 0x%08X\n", in cik_print_gpu_status_regs()
4815 dev_info(rdev->dev, " CP_STAT = 0x%08x\n", RREG32(CP_STAT)); in cik_print_gpu_status_regs()
4816 dev_info(rdev->dev, " CP_STALLED_STAT1 = 0x%08x\n", in cik_print_gpu_status_regs()
4818 dev_info(rdev->dev, " CP_STALLED_STAT2 = 0x%08x\n", in cik_print_gpu_status_regs()
4820 dev_info(rdev->dev, " CP_STALLED_STAT3 = 0x%08x\n", in cik_print_gpu_status_regs()
4822 dev_info(rdev->dev, " CP_CPF_BUSY_STAT = 0x%08x\n", in cik_print_gpu_status_regs()
4824 dev_info(rdev->dev, " CP_CPF_STALLED_STAT1 = 0x%08x\n", in cik_print_gpu_status_regs()
4826 dev_info(rdev->dev, " CP_CPF_STATUS = 0x%08x\n", RREG32(CP_CPF_STATUS)); in cik_print_gpu_status_regs()
4827 dev_info(rdev->dev, " CP_CPC_BUSY_STAT = 0x%08x\n", RREG32(CP_CPC_BUSY_STAT)); in cik_print_gpu_status_regs()
4828 dev_info(rdev->dev, " CP_CPC_STALLED_STAT1 = 0x%08x\n", in cik_print_gpu_status_regs()
4830 dev_info(rdev->dev, " CP_CPC_STATUS = 0x%08x\n", RREG32(CP_CPC_STATUS)); in cik_print_gpu_status_regs()
4834 * cik_gpu_check_soft_reset - check which blocks are busy
4915 * cik_gpu_soft_reset - soft reset GPU
4931 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask); in cik_gpu_soft_reset()
4934 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n", in cik_gpu_soft_reset()
4936 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n", in cik_gpu_soft_reset()
4967 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); in cik_gpu_soft_reset()
5003 if (!(rdev->flags & RADEON_IS_IGP)) { in cik_gpu_soft_reset()
5011 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp); in cik_gpu_soft_reset()
5025 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp); in cik_gpu_soft_reset()
5054 save->gmcon_reng_execute = RREG32(GMCON_RENG_EXECUTE); in kv_save_regs_for_reset()
5055 save->gmcon_misc = RREG32(GMCON_MISC); in kv_save_regs_for_reset()
5056 save->gmcon_misc3 = RREG32(GMCON_MISC3); in kv_save_regs_for_reset()
5058 WREG32(GMCON_RENG_EXECUTE, save->gmcon_reng_execute & ~RENG_EXECUTE_ON_PWR_UP); in kv_save_regs_for_reset()
5059 WREG32(GMCON_MISC, save->gmcon_misc & ~(RENG_EXECUTE_ON_REG_UPDATE | in kv_save_regs_for_reset()
5131 WREG32(GMCON_MISC3, save->gmcon_misc3); in kv_restore_regs_for_reset()
5132 WREG32(GMCON_MISC, save->gmcon_misc); in kv_restore_regs_for_reset()
5133 WREG32(GMCON_RENG_EXECUTE, save->gmcon_reng_execute); in kv_restore_regs_for_reset()
5142 dev_info(rdev->dev, "GPU pci config reset\n"); in cik_gpu_pci_config_reset()
5174 dev_warn(rdev->dev, "Wait for MC idle timed out !\n"); in cik_gpu_pci_config_reset()
5177 if (rdev->flags & RADEON_IS_IGP) in cik_gpu_pci_config_reset()
5181 pci_clear_master(rdev->pdev); in cik_gpu_pci_config_reset()
5188 for (i = 0; i < rdev->usec_timeout; i++) { in cik_gpu_pci_config_reset()
5195 if (rdev->flags & RADEON_IS_IGP) in cik_gpu_pci_config_reset()
5200 * cik_asic_reset - soft reset GPU
5241 * cik_gfx_is_lockup - check if the 3D engine is locked up
5264 * cik_mc_program - program the GPU memory controller
5289 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); in cik_mc_program()
5295 rdev->mc.vram_start >> 12); in cik_mc_program()
5297 rdev->mc.vram_end >> 12); in cik_mc_program()
5299 rdev->vram_scratch.gpu_addr >> 12); in cik_mc_program()
5300 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16; in cik_mc_program()
5301 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF); in cik_mc_program()
5304 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8)); in cik_mc_program()
5311 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); in cik_mc_program()
5320 * cik_mc_init - initialize the memory controller driver params
5334 rdev->mc.vram_is_ddr = true; in cik_mc_init()
5372 rdev->mc.vram_width = numchan * chansize; in cik_mc_init()
5374 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0); in cik_mc_init()
5375 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0); in cik_mc_init()
5377 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL; in cik_mc_init()
5378 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL; in cik_mc_init()
5379 rdev->mc.visible_vram_size = rdev->mc.aper_size; in cik_mc_init()
5380 si_vram_gtt_location(rdev, &rdev->mc); in cik_mc_init()
5389 * VMIDs 1-15 are used for userspace clients and are handled
5393 * cik_pcie_gart_tlb_flush - gart tlb flush callback
5404 /* bits 0-15 are the VM contexts0-15 */ in cik_pcie_gart_tlb_flush()
5409 * cik_pcie_gart_enable - gart enable
5414 * sets up the hw for VMIDs 1-15 which are allocated on
5423 if (rdev->gart.robj == NULL) { in cik_pcie_gart_enable()
5424 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n"); in cik_pcie_gart_enable()
5425 return -EINVAL; in cik_pcie_gart_enable()
5450 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12); in cik_pcie_gart_enable()
5451 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12); in cik_pcie_gart_enable()
5452 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); in cik_pcie_gart_enable()
5454 (u32)(rdev->dummy_page.addr >> 12)); in cik_pcie_gart_enable()
5463 /* restore context1-15 */ in cik_pcie_gart_enable()
5466 WREG32(VM_CONTEXT1_PAGE_TABLE_END_ADDR, rdev->vm_manager.max_pfn - 1); in cik_pcie_gart_enable()
5470 rdev->vm_manager.saved_table_addr[i]); in cik_pcie_gart_enable()
5472 WREG32(VM_CONTEXT8_PAGE_TABLE_BASE_ADDR + ((i - 8) << 2), in cik_pcie_gart_enable()
5473 rdev->vm_manager.saved_table_addr[i]); in cik_pcie_gart_enable()
5476 /* enable context1-15 */ in cik_pcie_gart_enable()
5478 (u32)(rdev->dummy_page.addr >> 12)); in cik_pcie_gart_enable()
5481 PAGE_TABLE_BLOCK_SIZE(radeon_vm_block_size - 9) | in cik_pcie_gart_enable()
5495 if (rdev->family == CHIP_KAVERI) { in cik_pcie_gart_enable()
5503 mutex_lock(&rdev->srbm_mutex); in cik_pcie_gart_enable()
5516 /* XXX SDMA RLC - todo */ in cik_pcie_gart_enable()
5519 mutex_unlock(&rdev->srbm_mutex); in cik_pcie_gart_enable()
5523 (unsigned)(rdev->mc.gtt_size >> 20), in cik_pcie_gart_enable()
5524 (unsigned long long)rdev->gart.table_addr); in cik_pcie_gart_enable()
5525 rdev->gart.ready = true; in cik_pcie_gart_enable()
5530 * cik_pcie_gart_disable - gart disable
5545 reg = VM_CONTEXT8_PAGE_TABLE_BASE_ADDR + ((i - 8) << 2); in cik_pcie_gart_disable()
5546 rdev->vm_manager.saved_table_addr[i] = RREG32(reg); in cik_pcie_gart_disable()
5569 * cik_pcie_gart_fini - vm fini callback
5584 * cik_ib_parse - vm ib_parse callback
5599 * VMIDs 1-15 are used for userspace clients and are handled
5603 * cik_vm_init - cik vm init callback
5608 * VMIDs 1-15) (CIK).
5616 * radeon graphics/compute will use VMIDs 1-15 in cik_vm_init()
5618 rdev->vm_manager.nvm = 16; in cik_vm_init()
5620 if (rdev->flags & RADEON_IS_IGP) { in cik_vm_init()
5623 rdev->vm_manager.vram_base_offset = tmp; in cik_vm_init()
5625 rdev->vm_manager.vram_base_offset = 0; in cik_vm_init()
5631 * cik_vm_fini - cik vm fini callback
5642 * cik_vm_decode_fault - print human readable fault info
5660 if (rdev->family == CHIP_HAWAII) in cik_vm_decode_fault()
5672 * cik_vm_flush - cik vm flush using the CP
5680 int usepfp = (ring->idx == RADEON_RING_TYPE_GFX_INDEX); in cik_vm_flush()
5690 (VM_CONTEXT8_PAGE_TABLE_BASE_ADDR + ((vm_id - 8) << 2)) >> 2); in cik_vm_flush()
5722 cik_hdp_flush_cp_ring_emit(rdev, ring->idx); in cik_vm_flush()
5724 /* bits 0-15 are the VM contexts0-15 */ in cik_vm_flush()
5753 * The RLC is a multi-purpose microengine that handles a
5786 for (i = 0; i < rdev->config.cik.max_shader_engines; i++) { in cik_wait_for_rlc_serdes()
5787 for (j = 0; j < rdev->config.cik.max_sh_per_se; j++) { in cik_wait_for_rlc_serdes()
5789 for (k = 0; k < rdev->usec_timeout; k++) { in cik_wait_for_rlc_serdes()
5799 for (k = 0; k < rdev->usec_timeout; k++) { in cik_wait_for_rlc_serdes()
5827 for (i = 0; i < rdev->usec_timeout; i++) { in cik_halt_rlc()
5847 for (i = 0; i < rdev->usec_timeout; i++) { in cik_enter_rlc_safe_mode()
5853 for (i = 0; i < rdev->usec_timeout; i++) { in cik_enter_rlc_safe_mode()
5869 * cik_rlc_stop - stop the RLC ME
5885 * cik_rlc_start - start the RLC ME
5901 * cik_rlc_resume - setup the RLC hw
5907 * Returns 0 for success, -EINVAL if the ucode is not available.
5913 if (!rdev->rlc_fw) in cik_rlc_resume()
5914 return -EINVAL; in cik_rlc_resume()
5939 if (rdev->new_fw) { in cik_rlc_resume()
5941 (const struct rlc_firmware_header_v1_0 *)rdev->rlc_fw->data; in cik_rlc_resume()
5943 (rdev->rlc_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); in cik_rlc_resume()
5945 radeon_ucode_print_rlc_hdr(&hdr->header); in cik_rlc_resume()
5947 size = le32_to_cpu(hdr->header.ucode_size_bytes) / 4; in cik_rlc_resume()
5951 WREG32(RLC_GPM_UCODE_ADDR, le32_to_cpu(hdr->header.ucode_version)); in cik_rlc_resume()
5955 switch (rdev->family) { in cik_rlc_resume()
5972 fw_data = (const __be32 *)rdev->rlc_fw->data; in cik_rlc_resume()
5979 /* XXX - find out what chips support lbpw */ in cik_rlc_resume()
5982 if (rdev->family == CHIP_BONAIRE) in cik_rlc_resume()
5996 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CGCG)) { in cik_enable_cgcg()
6030 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_MGCG)) { in cik_enable_mgcg()
6031 if (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_MGLS) { in cik_enable_mgcg()
6032 if (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CP_LS) { in cik_enable_mgcg()
6056 if (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CGTS) { in cik_enable_mgcg()
6062 if ((rdev->cg_flags & RADEON_CG_SUPPORT_GFX_MGLS) && in cik_enable_mgcg()
6063 (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CGTS_LS)) in cik_enable_mgcg()
6127 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_MC_LS)) in cik_enable_mc_ls()
6144 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_MC_MGCG)) in cik_enable_mc_mgcg()
6158 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_SDMA_MGCG)) { in cik_enable_sdma_mgcg()
6179 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_SDMA_LS)) { in cik_enable_sdma_mgls()
6207 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_UVD_MGCG)) { in cik_enable_uvd_mgcg()
6235 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_BIF_LS)) in cik_enable_bif_mgls()
6253 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_HDP_MGCG)) in cik_enable_hdp_mgcg()
6269 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_HDP_LS)) in cik_enable_hdp_ls()
6296 if (!(rdev->flags & RADEON_IS_IGP)) { in cik_update_cg()
6312 if (rdev->has_uvd) in cik_update_cg()
6331 if (rdev->has_uvd) in cik_init_cg()
6358 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_RLC_SMU_HS)) in cik_enable_sck_slowdown_on_pu()
6372 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_RLC_SMU_HS)) in cik_enable_sck_slowdown_on_pd()
6385 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_CP)) in cik_enable_cp_pg()
6398 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_GDS)) in cik_enable_gds_pg()
6417 if (rdev->family == CHIP_KAVERI) in cik_init_cp_pg_table()
6420 if (rdev->rlc.cp_table_ptr == NULL) in cik_init_cp_pg_table()
6424 dst_ptr = rdev->rlc.cp_table_ptr; in cik_init_cp_pg_table()
6426 if (rdev->new_fw) { in cik_init_cp_pg_table()
6431 hdr = (const struct gfx_firmware_header_v1_0 *)rdev->ce_fw->data; in cik_init_cp_pg_table()
6433 (rdev->ce_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); in cik_init_cp_pg_table()
6434 table_offset = le32_to_cpu(hdr->jt_offset); in cik_init_cp_pg_table()
6435 table_size = le32_to_cpu(hdr->jt_size); in cik_init_cp_pg_table()
6437 hdr = (const struct gfx_firmware_header_v1_0 *)rdev->pfp_fw->data; in cik_init_cp_pg_table()
6439 (rdev->pfp_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); in cik_init_cp_pg_table()
6440 table_offset = le32_to_cpu(hdr->jt_offset); in cik_init_cp_pg_table()
6441 table_size = le32_to_cpu(hdr->jt_size); in cik_init_cp_pg_table()
6443 hdr = (const struct gfx_firmware_header_v1_0 *)rdev->me_fw->data; in cik_init_cp_pg_table()
6445 (rdev->me_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); in cik_init_cp_pg_table()
6446 table_offset = le32_to_cpu(hdr->jt_offset); in cik_init_cp_pg_table()
6447 table_size = le32_to_cpu(hdr->jt_size); in cik_init_cp_pg_table()
6449 hdr = (const struct gfx_firmware_header_v1_0 *)rdev->mec_fw->data; in cik_init_cp_pg_table()
6451 (rdev->mec_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); in cik_init_cp_pg_table()
6452 table_offset = le32_to_cpu(hdr->jt_offset); in cik_init_cp_pg_table()
6453 table_size = le32_to_cpu(hdr->jt_size); in cik_init_cp_pg_table()
6455 hdr = (const struct gfx_firmware_header_v1_0 *)rdev->mec2_fw->data; in cik_init_cp_pg_table()
6457 (rdev->mec2_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); in cik_init_cp_pg_table()
6458 table_offset = le32_to_cpu(hdr->jt_offset); in cik_init_cp_pg_table()
6459 table_size = le32_to_cpu(hdr->jt_size); in cik_init_cp_pg_table()
6472 fw_data = (const __be32 *)rdev->ce_fw->data; in cik_init_cp_pg_table()
6475 fw_data = (const __be32 *)rdev->pfp_fw->data; in cik_init_cp_pg_table()
6478 fw_data = (const __be32 *)rdev->me_fw->data; in cik_init_cp_pg_table()
6481 fw_data = (const __be32 *)rdev->mec_fw->data; in cik_init_cp_pg_table()
6499 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG)) { in cik_enable_gfx_cgpg()
6539 for (i = 0; i < rdev->config.cik.max_cu_per_sh; i ++) { in cik_get_cu_active_bitmap()
6553 for (i = 0; i < rdev->config.cik.max_shader_engines; i++) { in cik_init_ao_cu_mask()
6554 for (j = 0; j < rdev->config.cik.max_sh_per_se; j++) { in cik_init_ao_cu_mask()
6558 for (k = 0; k < rdev->config.cik.max_cu_per_sh; k ++) { in cik_init_ao_cu_mask()
6586 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_SMG)) in cik_enable_gfx_static_mgpg()
6600 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_DMG)) in cik_enable_gfx_dynamic_mgpg()
6616 if (rdev->rlc.cs_data) { in cik_init_gfx_cgpg()
6618 WREG32(RLC_GPM_SCRATCH_DATA, upper_32_bits(rdev->rlc.clear_state_gpu_addr)); in cik_init_gfx_cgpg()
6619 WREG32(RLC_GPM_SCRATCH_DATA, lower_32_bits(rdev->rlc.clear_state_gpu_addr)); in cik_init_gfx_cgpg()
6620 WREG32(RLC_GPM_SCRATCH_DATA, rdev->rlc.clear_state_size); in cik_init_gfx_cgpg()
6626 if (rdev->rlc.reg_list) { in cik_init_gfx_cgpg()
6628 for (i = 0; i < rdev->rlc.reg_list_size; i++) in cik_init_gfx_cgpg()
6629 WREG32(RLC_GPM_SCRATCH_DATA, rdev->rlc.reg_list[i]); in cik_init_gfx_cgpg()
6637 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8); in cik_init_gfx_cgpg()
6638 WREG32(RLC_CP_TABLE_RESTORE, rdev->rlc.cp_table_gpu_addr >> 8); in cik_init_gfx_cgpg()
6673 if (rdev->rlc.cs_data == NULL) in cik_get_csb_size()
6681 for (sect = rdev->rlc.cs_data; sect->section != NULL; ++sect) { in cik_get_csb_size()
6682 for (ext = sect->section; ext->extent != NULL; ++ext) { in cik_get_csb_size()
6683 if (sect->id == SECT_CONTEXT) in cik_get_csb_size()
6684 count += 2 + ext->reg_count; in cik_get_csb_size()
6705 if (rdev->rlc.cs_data == NULL) in cik_get_csb_buffer()
6717 for (sect = rdev->rlc.cs_data; sect->section != NULL; ++sect) { in cik_get_csb_buffer()
6718 for (ext = sect->section; ext->extent != NULL; ++ext) { in cik_get_csb_buffer()
6719 if (sect->id == SECT_CONTEXT) { in cik_get_csb_buffer()
6721 cpu_to_le32(PACKET3(PACKET3_SET_CONTEXT_REG, ext->reg_count)); in cik_get_csb_buffer()
6722 buffer[count++] = cpu_to_le32(ext->reg_index - 0xa000); in cik_get_csb_buffer()
6723 for (i = 0; i < ext->reg_count; i++) in cik_get_csb_buffer()
6724 buffer[count++] = cpu_to_le32(ext->extent[i]); in cik_get_csb_buffer()
6732 buffer[count++] = cpu_to_le32(PA_SC_RASTER_CONFIG - PACKET3_SET_CONTEXT_REG_START); in cik_get_csb_buffer()
6733 switch (rdev->family) { in cik_get_csb_buffer()
6766 if (rdev->pg_flags) { in cik_init_pg()
6769 if (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG) { in cik_init_pg()
6781 if (rdev->pg_flags) { in cik_fini_pg()
6783 if (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG) { in cik_fini_pg()
6806 * cik_enable_interrupts - Enable the interrupt ring buffer
6821 rdev->ih.enabled = true; in cik_enable_interrupts()
6825 * cik_disable_interrupts - Disable the interrupt ring buffer
6843 rdev->ih.enabled = false; in cik_disable_interrupts()
6844 rdev->ih.rptr = 0; in cik_disable_interrupts()
6848 * cik_disable_interrupt_state - Disable all interrupt sources
6883 if (rdev->num_crtc >= 4) { in cik_disable_interrupt_state()
6887 if (rdev->num_crtc >= 6) { in cik_disable_interrupt_state()
6892 if (rdev->num_crtc >= 2) { in cik_disable_interrupt_state()
6896 if (rdev->num_crtc >= 4) { in cik_disable_interrupt_state()
6900 if (rdev->num_crtc >= 6) { in cik_disable_interrupt_state()
6925 * cik_irq_init - init and enable the interrupt ring
6958 WREG32(INTERRUPT_CNTL2, rdev->dummy_page.addr >> 8); in cik_irq_init()
6960 /* IH_DUMMY_RD_OVERRIDE=0 - dummy read disabled with msi, enabled without msi in cik_irq_init()
6961 * IH_DUMMY_RD_OVERRIDE=1 - dummy read controlled by IH_DUMMY_RD_EN in cik_irq_init()
6964 /* IH_REQ_NONSNOOP_EN=1 if ring is in non-cacheable memory, e.g., vram */ in cik_irq_init()
6968 WREG32(IH_RB_BASE, rdev->ih.gpu_addr >> 8); in cik_irq_init()
6969 rb_bufsz = order_base_2(rdev->ih.ring_size / 4); in cik_irq_init()
6975 if (rdev->wb.enabled) in cik_irq_init()
6979 WREG32(IH_RB_WPTR_ADDR_LO, (rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFFFFFFFC); in cik_irq_init()
6980 WREG32(IH_RB_WPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFF); in cik_irq_init()
6991 if (rdev->msi_enabled) in cik_irq_init()
6998 pci_set_master(rdev->pdev); in cik_irq_init()
7007 * cik_irq_set - enable/disable interrupt sources
7011 * Enable interrupt sources on the GPU (vblanks, hpd,
7025 if (!rdev->irq.installed) { in cik_irq_set()
7027 return -EINVAL; in cik_irq_set()
7030 if (!rdev->ih.enabled) { in cik_irq_set()
7061 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) { in cik_irq_set()
7065 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) { in cik_irq_set()
7066 struct radeon_ring *ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; in cik_irq_set()
7068 if (ring->me == 1) { in cik_irq_set()
7069 switch (ring->pipe) { in cik_irq_set()
7083 DRM_DEBUG("si_irq_set: sw int cp1 invalid pipe %d\n", ring->pipe); in cik_irq_set()
7086 } else if (ring->me == 2) { in cik_irq_set()
7087 switch (ring->pipe) { in cik_irq_set()
7101 DRM_DEBUG("si_irq_set: sw int cp1 invalid pipe %d\n", ring->pipe); in cik_irq_set()
7105 DRM_DEBUG("si_irq_set: sw int cp1 invalid me %d\n", ring->me); in cik_irq_set()
7108 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) { in cik_irq_set()
7109 struct radeon_ring *ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; in cik_irq_set()
7111 if (ring->me == 1) { in cik_irq_set()
7112 switch (ring->pipe) { in cik_irq_set()
7126 DRM_DEBUG("si_irq_set: sw int cp2 invalid pipe %d\n", ring->pipe); in cik_irq_set()
7129 } else if (ring->me == 2) { in cik_irq_set()
7130 switch (ring->pipe) { in cik_irq_set()
7144 DRM_DEBUG("si_irq_set: sw int cp2 invalid pipe %d\n", ring->pipe); in cik_irq_set()
7148 DRM_DEBUG("si_irq_set: sw int cp2 invalid me %d\n", ring->me); in cik_irq_set()
7152 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) { in cik_irq_set()
7157 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) { in cik_irq_set()
7162 if (rdev->irq.crtc_vblank_int[0] || in cik_irq_set()
7163 atomic_read(&rdev->irq.pflip[0])) { in cik_irq_set()
7167 if (rdev->irq.crtc_vblank_int[1] || in cik_irq_set()
7168 atomic_read(&rdev->irq.pflip[1])) { in cik_irq_set()
7172 if (rdev->irq.crtc_vblank_int[2] || in cik_irq_set()
7173 atomic_read(&rdev->irq.pflip[2])) { in cik_irq_set()
7177 if (rdev->irq.crtc_vblank_int[3] || in cik_irq_set()
7178 atomic_read(&rdev->irq.pflip[3])) { in cik_irq_set()
7182 if (rdev->irq.crtc_vblank_int[4] || in cik_irq_set()
7183 atomic_read(&rdev->irq.pflip[4])) { in cik_irq_set()
7187 if (rdev->irq.crtc_vblank_int[5] || in cik_irq_set()
7188 atomic_read(&rdev->irq.pflip[5])) { in cik_irq_set()
7192 if (rdev->irq.hpd[0]) { in cik_irq_set()
7193 DRM_DEBUG("cik_irq_set: hpd 1\n"); in cik_irq_set()
7196 if (rdev->irq.hpd[1]) { in cik_irq_set()
7197 DRM_DEBUG("cik_irq_set: hpd 2\n"); in cik_irq_set()
7200 if (rdev->irq.hpd[2]) { in cik_irq_set()
7201 DRM_DEBUG("cik_irq_set: hpd 3\n"); in cik_irq_set()
7204 if (rdev->irq.hpd[3]) { in cik_irq_set()
7205 DRM_DEBUG("cik_irq_set: hpd 4\n"); in cik_irq_set()
7208 if (rdev->irq.hpd[4]) { in cik_irq_set()
7209 DRM_DEBUG("cik_irq_set: hpd 5\n"); in cik_irq_set()
7212 if (rdev->irq.hpd[5]) { in cik_irq_set()
7213 DRM_DEBUG("cik_irq_set: hpd 6\n"); in cik_irq_set()
7235 if (rdev->num_crtc >= 4) { in cik_irq_set()
7239 if (rdev->num_crtc >= 6) { in cik_irq_set()
7244 if (rdev->num_crtc >= 2) { in cik_irq_set()
7250 if (rdev->num_crtc >= 4) { in cik_irq_set()
7256 if (rdev->num_crtc >= 6) { in cik_irq_set()
7277 * cik_irq_ack - ack interrupt sources
7281 * Ack interrupt sources on the GPU (vblanks, hpd,
7289 rdev->irq.stat_regs.cik.disp_int = RREG32(DISP_INTERRUPT_STATUS); in cik_irq_ack()
7290 rdev->irq.stat_regs.cik.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE); in cik_irq_ack()
7291 rdev->irq.stat_regs.cik.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2); in cik_irq_ack()
7292 rdev->irq.stat_regs.cik.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3); in cik_irq_ack()
7293 rdev->irq.stat_regs.cik.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4); in cik_irq_ack()
7294 rdev->irq.stat_regs.cik.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5); in cik_irq_ack()
7295 rdev->irq.stat_regs.cik.disp_int_cont6 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE6); in cik_irq_ack()
7297 rdev->irq.stat_regs.cik.d1grph_int = RREG32(GRPH_INT_STATUS + in cik_irq_ack()
7299 rdev->irq.stat_regs.cik.d2grph_int = RREG32(GRPH_INT_STATUS + in cik_irq_ack()
7301 if (rdev->num_crtc >= 4) { in cik_irq_ack()
7302 rdev->irq.stat_regs.cik.d3grph_int = RREG32(GRPH_INT_STATUS + in cik_irq_ack()
7304 rdev->irq.stat_regs.cik.d4grph_int = RREG32(GRPH_INT_STATUS + in cik_irq_ack()
7307 if (rdev->num_crtc >= 6) { in cik_irq_ack()
7308 rdev->irq.stat_regs.cik.d5grph_int = RREG32(GRPH_INT_STATUS + in cik_irq_ack()
7310 rdev->irq.stat_regs.cik.d6grph_int = RREG32(GRPH_INT_STATUS + in cik_irq_ack()
7314 if (rdev->irq.stat_regs.cik.d1grph_int & GRPH_PFLIP_INT_OCCURRED) in cik_irq_ack()
7317 if (rdev->irq.stat_regs.cik.d2grph_int & GRPH_PFLIP_INT_OCCURRED) in cik_irq_ack()
7320 if (rdev->irq.stat_regs.cik.disp_int & LB_D1_VBLANK_INTERRUPT) in cik_irq_ack()
7322 if (rdev->irq.stat_regs.cik.disp_int & LB_D1_VLINE_INTERRUPT) in cik_irq_ack()
7324 if (rdev->irq.stat_regs.cik.disp_int_cont & LB_D2_VBLANK_INTERRUPT) in cik_irq_ack()
7326 if (rdev->irq.stat_regs.cik.disp_int_cont & LB_D2_VLINE_INTERRUPT) in cik_irq_ack()
7329 if (rdev->num_crtc >= 4) { in cik_irq_ack()
7330 if (rdev->irq.stat_regs.cik.d3grph_int & GRPH_PFLIP_INT_OCCURRED) in cik_irq_ack()
7333 if (rdev->irq.stat_regs.cik.d4grph_int & GRPH_PFLIP_INT_OCCURRED) in cik_irq_ack()
7336 if (rdev->irq.stat_regs.cik.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) in cik_irq_ack()
7338 if (rdev->irq.stat_regs.cik.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) in cik_irq_ack()
7340 if (rdev->irq.stat_regs.cik.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) in cik_irq_ack()
7342 if (rdev->irq.stat_regs.cik.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) in cik_irq_ack()
7346 if (rdev->num_crtc >= 6) { in cik_irq_ack()
7347 if (rdev->irq.stat_regs.cik.d5grph_int & GRPH_PFLIP_INT_OCCURRED) in cik_irq_ack()
7350 if (rdev->irq.stat_regs.cik.d6grph_int & GRPH_PFLIP_INT_OCCURRED) in cik_irq_ack()
7353 if (rdev->irq.stat_regs.cik.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) in cik_irq_ack()
7355 if (rdev->irq.stat_regs.cik.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) in cik_irq_ack()
7357 if (rdev->irq.stat_regs.cik.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) in cik_irq_ack()
7359 if (rdev->irq.stat_regs.cik.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) in cik_irq_ack()
7363 if (rdev->irq.stat_regs.cik.disp_int & DC_HPD1_INTERRUPT) { in cik_irq_ack()
7368 if (rdev->irq.stat_regs.cik.disp_int_cont & DC_HPD2_INTERRUPT) { in cik_irq_ack()
7373 if (rdev->irq.stat_regs.cik.disp_int_cont2 & DC_HPD3_INTERRUPT) { in cik_irq_ack()
7378 if (rdev->irq.stat_regs.cik.disp_int_cont3 & DC_HPD4_INTERRUPT) { in cik_irq_ack()
7383 if (rdev->irq.stat_regs.cik.disp_int_cont4 & DC_HPD5_INTERRUPT) { in cik_irq_ack()
7388 if (rdev->irq.stat_regs.cik.disp_int_cont5 & DC_HPD6_INTERRUPT) { in cik_irq_ack()
7393 if (rdev->irq.stat_regs.cik.disp_int & DC_HPD1_RX_INTERRUPT) { in cik_irq_ack()
7398 if (rdev->irq.stat_regs.cik.disp_int_cont & DC_HPD2_RX_INTERRUPT) { in cik_irq_ack()
7403 if (rdev->irq.stat_regs.cik.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) { in cik_irq_ack()
7408 if (rdev->irq.stat_regs.cik.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) { in cik_irq_ack()
7413 if (rdev->irq.stat_regs.cik.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) { in cik_irq_ack()
7418 if (rdev->irq.stat_regs.cik.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) { in cik_irq_ack()
7426 * cik_irq_disable - disable interrupts
7442 * cik_irq_suspend - disable interrupts for suspend
7456 * cik_irq_fini - tear down interrupt support
7471 * cik_get_ih_wptr - get the IH ring buffer wptr
7485 if (rdev->wb.enabled) in cik_get_ih_wptr()
7486 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]); in cik_get_ih_wptr()
7496 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n", in cik_get_ih_wptr()
7497 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask); in cik_get_ih_wptr()
7498 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask; in cik_get_ih_wptr()
7503 return (wptr & rdev->ih.ptr_mask); in cik_get_ih_wptr()
7508 * [7:0] - interrupt source id
7509 * [31:8] - reserved
7510 * [59:32] - interrupt source data
7511 * [63:60] - reserved
7512 * [71:64] - RINGID
7515 * QUEUE_ID - for compute, which of the 8 queues owned by the dispatcher
7516 * - for gfx, hw shader state (0=PS...5=LS, 6=CS)
7517 * ME_ID - 0 = gfx, 1 = first 4 CS pipes, 2 = second 4 CS pipes
7518 * PIPE_ID - ME0 0=3D
7519 * - ME1&2 compute dispatcher (4 pipes each)
7522 * INSTANCE_ID - 0 = sdma0, 1 = sdma1
7523 * QUEUE_ID - 0 = gfx, 1 = rlc0, 2 = rlc1
7524 * [79:72] - VMID
7525 * [95:80] - PASID
7526 * [127:96] - reserved
7529 * cik_irq_process - interrupt handler
7540 struct radeon_ring *cp1_ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; in cik_irq_process()
7541 struct radeon_ring *cp2_ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; in cik_irq_process()
7553 if (!rdev->ih.enabled || rdev->shutdown) in cik_irq_process()
7560 if (atomic_xchg(&rdev->ih.lock, 1)) in cik_irq_process()
7563 rptr = rdev->ih.rptr; in cik_irq_process()
7576 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff; in cik_irq_process()
7577 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff; in cik_irq_process()
7578 ring_id = le32_to_cpu(rdev->ih.ring[ring_index + 2]) & 0xff; in cik_irq_process()
7584 if (!(rdev->irq.stat_regs.cik.disp_int & LB_D1_VBLANK_INTERRUPT)) in cik_irq_process()
7587 if (rdev->irq.crtc_vblank_int[0]) { in cik_irq_process()
7589 rdev->pm.vblank_sync = true; in cik_irq_process()
7590 wake_up(&rdev->irq.vblank_queue); in cik_irq_process()
7592 if (atomic_read(&rdev->irq.pflip[0])) in cik_irq_process()
7594 rdev->irq.stat_regs.cik.disp_int &= ~LB_D1_VBLANK_INTERRUPT; in cik_irq_process()
7599 if (!(rdev->irq.stat_regs.cik.disp_int & LB_D1_VLINE_INTERRUPT)) in cik_irq_process()
7602 rdev->irq.stat_regs.cik.disp_int &= ~LB_D1_VLINE_INTERRUPT; in cik_irq_process()
7614 if (!(rdev->irq.stat_regs.cik.disp_int_cont & LB_D2_VBLANK_INTERRUPT)) in cik_irq_process()
7617 if (rdev->irq.crtc_vblank_int[1]) { in cik_irq_process()
7619 rdev->pm.vblank_sync = true; in cik_irq_process()
7620 wake_up(&rdev->irq.vblank_queue); in cik_irq_process()
7622 if (atomic_read(&rdev->irq.pflip[1])) in cik_irq_process()
7624 rdev->irq.stat_regs.cik.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT; in cik_irq_process()
7629 if (!(rdev->irq.stat_regs.cik.disp_int_cont & LB_D2_VLINE_INTERRUPT)) in cik_irq_process()
7632 rdev->irq.stat_regs.cik.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT; in cik_irq_process()
7644 if (!(rdev->irq.stat_regs.cik.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)) in cik_irq_process()
7647 if (rdev->irq.crtc_vblank_int[2]) { in cik_irq_process()
7649 rdev->pm.vblank_sync = true; in cik_irq_process()
7650 wake_up(&rdev->irq.vblank_queue); in cik_irq_process()
7652 if (atomic_read(&rdev->irq.pflip[2])) in cik_irq_process()
7654 rdev->irq.stat_regs.cik.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT; in cik_irq_process()
7659 if (!(rdev->irq.stat_regs.cik.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)) in cik_irq_process()
7662 rdev->irq.stat_regs.cik.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT; in cik_irq_process()
7674 if (!(rdev->irq.stat_regs.cik.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)) in cik_irq_process()
7677 if (rdev->irq.crtc_vblank_int[3]) { in cik_irq_process()
7679 rdev->pm.vblank_sync = true; in cik_irq_process()
7680 wake_up(&rdev->irq.vblank_queue); in cik_irq_process()
7682 if (atomic_read(&rdev->irq.pflip[3])) in cik_irq_process()
7684 rdev->irq.stat_regs.cik.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT; in cik_irq_process()
7689 if (!(rdev->irq.stat_regs.cik.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)) in cik_irq_process()
7692 rdev->irq.stat_regs.cik.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT; in cik_irq_process()
7704 if (!(rdev->irq.stat_regs.cik.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)) in cik_irq_process()
7707 if (rdev->irq.crtc_vblank_int[4]) { in cik_irq_process()
7709 rdev->pm.vblank_sync = true; in cik_irq_process()
7710 wake_up(&rdev->irq.vblank_queue); in cik_irq_process()
7712 if (atomic_read(&rdev->irq.pflip[4])) in cik_irq_process()
7714 rdev->irq.stat_regs.cik.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT; in cik_irq_process()
7719 if (!(rdev->irq.stat_regs.cik.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)) in cik_irq_process()
7722 rdev->irq.stat_regs.cik.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT; in cik_irq_process()
7734 if (!(rdev->irq.stat_regs.cik.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)) in cik_irq_process()
7737 if (rdev->irq.crtc_vblank_int[5]) { in cik_irq_process()
7739 rdev->pm.vblank_sync = true; in cik_irq_process()
7740 wake_up(&rdev->irq.vblank_queue); in cik_irq_process()
7742 if (atomic_read(&rdev->irq.pflip[5])) in cik_irq_process()
7744 rdev->irq.stat_regs.cik.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT; in cik_irq_process()
7749 if (!(rdev->irq.stat_regs.cik.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)) in cik_irq_process()
7752 rdev->irq.stat_regs.cik.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT; in cik_irq_process()
7767 DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1); in cik_irq_process()
7769 radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1); in cik_irq_process()
7771 case 42: /* HPD hotplug */ in cik_irq_process()
7774 if (!(rdev->irq.stat_regs.cik.disp_int & DC_HPD1_INTERRUPT)) in cik_irq_process()
7777 rdev->irq.stat_regs.cik.disp_int &= ~DC_HPD1_INTERRUPT; in cik_irq_process()
7783 if (!(rdev->irq.stat_regs.cik.disp_int_cont & DC_HPD2_INTERRUPT)) in cik_irq_process()
7786 rdev->irq.stat_regs.cik.disp_int_cont &= ~DC_HPD2_INTERRUPT; in cik_irq_process()
7792 if (!(rdev->irq.stat_regs.cik.disp_int_cont2 & DC_HPD3_INTERRUPT)) in cik_irq_process()
7795 rdev->irq.stat_regs.cik.disp_int_cont2 &= ~DC_HPD3_INTERRUPT; in cik_irq_process()
7801 if (!(rdev->irq.stat_regs.cik.disp_int_cont3 & DC_HPD4_INTERRUPT)) in cik_irq_process()
7804 rdev->irq.stat_regs.cik.disp_int_cont3 &= ~DC_HPD4_INTERRUPT; in cik_irq_process()
7810 if (!(rdev->irq.stat_regs.cik.disp_int_cont4 & DC_HPD5_INTERRUPT)) in cik_irq_process()
7813 rdev->irq.stat_regs.cik.disp_int_cont4 &= ~DC_HPD5_INTERRUPT; in cik_irq_process()
7819 if (!(rdev->irq.stat_regs.cik.disp_int_cont5 & DC_HPD6_INTERRUPT)) in cik_irq_process()
7822 rdev->irq.stat_regs.cik.disp_int_cont5 &= ~DC_HPD6_INTERRUPT; in cik_irq_process()
7828 if (!(rdev->irq.stat_regs.cik.disp_int & DC_HPD1_RX_INTERRUPT)) in cik_irq_process()
7831 rdev->irq.stat_regs.cik.disp_int &= ~DC_HPD1_RX_INTERRUPT; in cik_irq_process()
7837 if (!(rdev->irq.stat_regs.cik.disp_int_cont & DC_HPD2_RX_INTERRUPT)) in cik_irq_process()
7840 rdev->irq.stat_regs.cik.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT; in cik_irq_process()
7846 if (!(rdev->irq.stat_regs.cik.disp_int_cont2 & DC_HPD3_RX_INTERRUPT)) in cik_irq_process()
7849 rdev->irq.stat_regs.cik.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT; in cik_irq_process()
7855 if (!(rdev->irq.stat_regs.cik.disp_int_cont3 & DC_HPD4_RX_INTERRUPT)) in cik_irq_process()
7858 rdev->irq.stat_regs.cik.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT; in cik_irq_process()
7864 if (!(rdev->irq.stat_regs.cik.disp_int_cont4 & DC_HPD5_RX_INTERRUPT)) in cik_irq_process()
7867 rdev->irq.stat_regs.cik.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT; in cik_irq_process()
7873 if (!(rdev->irq.stat_regs.cik.disp_int_cont5 & DC_HPD6_RX_INTERRUPT)) in cik_irq_process()
7876 rdev->irq.stat_regs.cik.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT; in cik_irq_process()
7903 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data); in cik_irq_process()
7904 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n", in cik_irq_process()
7906 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n", in cik_irq_process()
7940 if ((cp1_ring->me == me_id) & (cp1_ring->pipe == pipe_id)) in cik_irq_process()
7942 if ((cp2_ring->me == me_id) & (cp2_ring->pipe == pipe_id)) in cik_irq_process()
8025 rdev->pm.dpm.thermal.high_to_low = false; in cik_irq_process()
8030 rdev->pm.dpm.thermal.high_to_low = true; in cik_irq_process()
8082 rptr &= rdev->ih.ptr_mask; in cik_irq_process()
8086 schedule_work(&rdev->dp_work); in cik_irq_process()
8088 schedule_delayed_work(&rdev->hotplug_work, 0); in cik_irq_process()
8090 rdev->needs_reset = true; in cik_irq_process()
8091 wake_up_all(&rdev->fence_queue); in cik_irq_process()
8094 schedule_work(&rdev->pm.dpm.thermal.work); in cik_irq_process()
8095 rdev->ih.rptr = rptr; in cik_irq_process()
8096 atomic_set(&rdev->ih.lock, 0); in cik_irq_process()
8113 if (!rdev->has_uvd) in cik_uvd_init()
8118 dev_err(rdev->dev, "failed UVD (%d) init.\n", r); in cik_uvd_init()
8120 * At this point rdev->uvd.vcpu_bo is NULL which trickles down in cik_uvd_init()
8125 rdev->has_uvd = false; in cik_uvd_init()
8128 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL; in cik_uvd_init()
8129 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096); in cik_uvd_init()
8136 if (!rdev->has_uvd) in cik_uvd_start()
8141 dev_err(rdev->dev, "failed UVD resume (%d).\n", r); in cik_uvd_start()
8146 dev_err(rdev->dev, "failed UVD 4.2 resume (%d).\n", r); in cik_uvd_start()
8151 dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r); in cik_uvd_start()
8157 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0; in cik_uvd_start()
8165 if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size) in cik_uvd_resume()
8168 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX]; in cik_uvd_resume()
8169 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0)); in cik_uvd_resume()
8171 dev_err(rdev->dev, "failed initializing UVD ring (%d).\n", r); in cik_uvd_resume()
8176 dev_err(rdev->dev, "failed initializing UVD (%d).\n", r); in cik_uvd_resume()
8185 if (!rdev->has_vce) in cik_vce_init()
8190 dev_err(rdev->dev, "failed VCE (%d) init.\n", r); in cik_vce_init()
8192 * At this point rdev->vce.vcpu_bo is NULL which trickles down in cik_vce_init()
8197 rdev->has_vce = false; in cik_vce_init()
8200 rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_obj = NULL; in cik_vce_init()
8201 r600_ring_init(rdev, &rdev->ring[TN_RING_TYPE_VCE1_INDEX], 4096); in cik_vce_init()
8202 rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_obj = NULL; in cik_vce_init()
8203 r600_ring_init(rdev, &rdev->ring[TN_RING_TYPE_VCE2_INDEX], 4096); in cik_vce_init()
8210 if (!rdev->has_vce) in cik_vce_start()
8215 dev_err(rdev->dev, "failed VCE resume (%d).\n", r); in cik_vce_start()
8220 dev_err(rdev->dev, "failed VCE resume (%d).\n", r); in cik_vce_start()
8225 dev_err(rdev->dev, "failed initializing VCE1 fences (%d).\n", r); in cik_vce_start()
8230 dev_err(rdev->dev, "failed initializing VCE2 fences (%d).\n", r); in cik_vce_start()
8236 rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size = 0; in cik_vce_start()
8237 rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_size = 0; in cik_vce_start()
8245 if (!rdev->has_vce || !rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size) in cik_vce_resume()
8248 ring = &rdev->ring[TN_RING_TYPE_VCE1_INDEX]; in cik_vce_resume()
8249 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, VCE_CMD_NO_OP); in cik_vce_resume()
8251 dev_err(rdev->dev, "failed initializing VCE1 ring (%d).\n", r); in cik_vce_resume()
8254 ring = &rdev->ring[TN_RING_TYPE_VCE2_INDEX]; in cik_vce_resume()
8255 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, VCE_CMD_NO_OP); in cik_vce_resume()
8257 dev_err(rdev->dev, "failed initializing VCE1 ring (%d).\n", r); in cik_vce_resume()
8262 dev_err(rdev->dev, "failed initializing VCE (%d).\n", r); in cik_vce_resume()
8268 * cik_startup - program the asic to a functional state
8294 if (!(rdev->flags & RADEON_IS_IGP) && !rdev->pm.dpm_enabled) { in cik_startup()
8308 if (rdev->flags & RADEON_IS_IGP) { in cik_startup()
8309 if (rdev->family == CHIP_KAVERI) { in cik_startup()
8310 rdev->rlc.reg_list = spectre_rlc_save_restore_register_list; in cik_startup()
8311 rdev->rlc.reg_list_size = in cik_startup()
8314 rdev->rlc.reg_list = kalindi_rlc_save_restore_register_list; in cik_startup()
8315 rdev->rlc.reg_list_size = in cik_startup()
8319 rdev->rlc.cs_data = ci_cs_data; in cik_startup()
8320 rdev->rlc.cp_table_size = ALIGN(CP_ME_TABLE_SIZE * 5 * 4, 2048); /* CP JT */ in cik_startup()
8321 rdev->rlc.cp_table_size += 64 * 1024; /* GDS */ in cik_startup()
8342 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); in cik_startup()
8348 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); in cik_startup()
8354 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); in cik_startup()
8360 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r); in cik_startup()
8366 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r); in cik_startup()
8374 if (!rdev->irq.installed) { in cik_startup()
8388 if (rdev->family == CHIP_HAWAII) { in cik_startup()
8389 if (rdev->new_fw) in cik_startup()
8397 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in cik_startup()
8398 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET, in cik_startup()
8404 /* type-2 packets are deprecated on MEC, use type-3 instead */ in cik_startup()
8405 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; in cik_startup()
8406 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP1_RPTR_OFFSET, in cik_startup()
8410 ring->me = 1; /* first MEC */ in cik_startup()
8411 ring->pipe = 0; /* first pipe */ in cik_startup()
8412 ring->queue = 0; /* first queue */ in cik_startup()
8413 ring->wptr_offs = CIK_WB_CP1_WPTR_OFFSET; in cik_startup()
8415 /* type-2 packets are deprecated on MEC, use type-3 instead */ in cik_startup()
8416 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; in cik_startup()
8417 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP2_RPTR_OFFSET, in cik_startup()
8422 ring->me = 1; /* first MEC */ in cik_startup()
8423 ring->pipe = 0; /* first pipe */ in cik_startup()
8424 ring->queue = 1; /* second queue */ in cik_startup()
8425 ring->wptr_offs = CIK_WB_CP2_WPTR_OFFSET; in cik_startup()
8427 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; in cik_startup()
8428 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET, in cik_startup()
8433 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]; in cik_startup()
8434 r = radeon_ring_init(rdev, ring, ring->ring_size, CAYMAN_WB_DMA1_RPTR_OFFSET, in cik_startup()
8452 dev_err(rdev->dev, "IB initialization failed (%d).\n", r); in cik_startup()
8458 dev_err(rdev->dev, "vm manager initialization failed (%d).\n", r); in cik_startup()
8470 * cik_resume - resume the asic to a functional state
8483 atom_asic_init(rdev->mode_info.atom_context); in cik_resume()
8488 if (rdev->pm.pm_method == PM_METHOD_DPM) in cik_resume()
8491 rdev->accel_working = true; in cik_resume()
8495 rdev->accel_working = false; in cik_resume()
8504 * cik_suspend - suspend the asic
8519 if (rdev->has_uvd) { in cik_suspend()
8523 if (rdev->has_vce) in cik_suspend()
8540 * cik_init - asic specific driver and hw init
8557 return -EINVAL; in cik_init()
8560 if (!rdev->is_atom_bios) { in cik_init()
8561 dev_err(rdev->dev, "Expecting atombios for cayman GPU\n"); in cik_init()
8562 return -EINVAL; in cik_init()
8570 if (!rdev->bios) { in cik_init()
8571 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n"); in cik_init()
8572 return -EINVAL; in cik_init()
8575 atom_asic_init(rdev->mode_info.atom_context); in cik_init()
8598 if (rdev->flags & RADEON_IS_IGP) { in cik_init()
8599 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->ce_fw || in cik_init()
8600 !rdev->mec_fw || !rdev->sdma_fw || !rdev->rlc_fw) { in cik_init()
8608 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->ce_fw || in cik_init()
8609 !rdev->mec_fw || !rdev->sdma_fw || !rdev->rlc_fw || in cik_init()
8610 !rdev->mc_fw) { in cik_init()
8622 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in cik_init()
8623 ring->ring_obj = NULL; in cik_init()
8626 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; in cik_init()
8627 ring->ring_obj = NULL; in cik_init()
8629 r = radeon_doorbell_get(rdev, &ring->doorbell_index); in cik_init()
8633 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; in cik_init()
8634 ring->ring_obj = NULL; in cik_init()
8636 r = radeon_doorbell_get(rdev, &ring->doorbell_index); in cik_init()
8640 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; in cik_init()
8641 ring->ring_obj = NULL; in cik_init()
8644 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]; in cik_init()
8645 ring->ring_obj = NULL; in cik_init()
8651 rdev->ih.ring_obj = NULL; in cik_init()
8658 rdev->accel_working = true; in cik_init()
8661 dev_err(rdev->dev, "disabling GPU acceleration\n"); in cik_init()
8672 rdev->accel_working = false; in cik_init()
8679 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) { in cik_init()
8681 return -EINVAL; in cik_init()
8688 * cik_fini - asic specific driver and hw fini
8719 kfree(rdev->bios); in cik_fini()
8720 rdev->bios = NULL; in cik_fini()
8725 struct drm_device *dev = encoder->dev; in dce8_program_fmt()
8726 struct radeon_device *rdev = dev->dev_private; in dce8_program_fmt()
8728 struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc); in dce8_program_fmt()
8737 dither = radeon_connector->dither; in dce8_program_fmt()
8741 if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT) in dce8_program_fmt()
8745 if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) || in dce8_program_fmt()
8746 (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2)) in dce8_program_fmt()
8784 WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp); in dce8_program_fmt()
8789 * dce8_line_buffer_adjust - Set up the line buffer
8805 u32 pipe_offset = radeon_crtc->crtc_id * 0x20; in dce8_line_buffer_adjust()
8814 if (radeon_crtc->base.enabled && mode) { in dce8_line_buffer_adjust()
8815 if (mode->crtc_hdisplay < 1920) { in dce8_line_buffer_adjust()
8818 } else if (mode->crtc_hdisplay < 2560) { in dce8_line_buffer_adjust()
8821 } else if (mode->crtc_hdisplay < 4096) { in dce8_line_buffer_adjust()
8823 buffer_alloc = (rdev->flags & RADEON_IS_IGP) ? 2 : 4; in dce8_line_buffer_adjust()
8827 buffer_alloc = (rdev->flags & RADEON_IS_IGP) ? 2 : 4; in dce8_line_buffer_adjust()
8834 WREG32(LB_MEMORY_CTRL + radeon_crtc->crtc_offset, in dce8_line_buffer_adjust()
8839 for (i = 0; i < rdev->usec_timeout; i++) { in dce8_line_buffer_adjust()
8846 if (radeon_crtc->base.enabled && mode) { in dce8_line_buffer_adjust()
8863 * cik_get_number_of_dram_channels - get the number of dram channels
8915 * dce8_dram_bandwidth - get the dram bandwidth
8931 yclk.full = dfixed_const(wm->yclk); in dce8_dram_bandwidth()
8933 dram_channels.full = dfixed_const(wm->dram_channels * 4); in dce8_dram_bandwidth()
8944 * dce8_dram_bandwidth_for_display - get the dram bandwidth for display
8960 yclk.full = dfixed_const(wm->yclk); in dce8_dram_bandwidth_for_display()
8962 dram_channels.full = dfixed_const(wm->dram_channels * 4); in dce8_dram_bandwidth_for_display()
8973 * dce8_data_return_bandwidth - get the data return bandwidth
8989 sclk.full = dfixed_const(wm->sclk); in dce8_data_return_bandwidth()
9002 * dce8_dmif_request_bandwidth - get the dmif bandwidth
9018 disp_clk.full = dfixed_const(wm->disp_clk); in dce8_dmif_request_bandwidth()
9033 * dce8_available_bandwidth - get the min available bandwidth
9052 * dce8_average_bandwidth - get the average available bandwidth
9073 line_time.full = dfixed_const(wm->active_time + wm->blank_time); in dce8_average_bandwidth()
9075 bpp.full = dfixed_const(wm->bytes_per_pixel); in dce8_average_bandwidth()
9076 src_width.full = dfixed_const(wm->src_width); in dce8_average_bandwidth()
9078 bandwidth.full = dfixed_mul(bandwidth, wm->vsc); in dce8_average_bandwidth()
9085 * dce8_latency_watermark - get the latency watermark
9100 u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */ in dce8_latency_watermark()
9101 u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) + in dce8_latency_watermark()
9102 (wm->num_heads * cursor_line_pair_return_time); in dce8_latency_watermark()
9108 if (wm->num_heads == 0) in dce8_latency_watermark()
9113 if ((wm->vsc.full > a.full) || in dce8_latency_watermark()
9114 ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) || in dce8_latency_watermark()
9115 (wm->vtaps >= 5) || in dce8_latency_watermark()
9116 ((wm->vsc.full >= a.full) && wm->interlaced)) in dce8_latency_watermark()
9122 b.full = dfixed_const(wm->num_heads); in dce8_latency_watermark()
9124 tmp = div_u64((u64) dmif_size * (u64) wm->disp_clk, mc_latency + 512); in dce8_latency_watermark()
9127 lb_fill_bw = min(tmp, wm->disp_clk * wm->bytes_per_pixel / 1000); in dce8_latency_watermark()
9129 a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel); in dce8_latency_watermark()
9136 if (line_fill_time < wm->active_time) in dce8_latency_watermark()
9139 return latency + (line_fill_time - wm->active_time); in dce8_latency_watermark()
9144 * dce8_average_bandwidth_vs_dram_bandwidth_for_display - check
9157 (dce8_dram_bandwidth_for_display(wm) / wm->num_heads)) in dce8_average_bandwidth_vs_dram_bandwidth_for_display()
9164 * dce8_average_bandwidth_vs_available_bandwidth - check
9177 (dce8_available_bandwidth(wm) / wm->num_heads)) in dce8_average_bandwidth_vs_available_bandwidth()
9184 * dce8_check_latency_hiding - check latency hiding
9194 u32 lb_partitions = wm->lb_size / wm->src_width; in dce8_check_latency_hiding()
9195 u32 line_time = wm->active_time + wm->blank_time; in dce8_check_latency_hiding()
9201 if (wm->vsc.full > a.full) in dce8_check_latency_hiding()
9204 if (lb_partitions <= (wm->vtaps + 1)) in dce8_check_latency_hiding()
9210 latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time); in dce8_check_latency_hiding()
9219 * dce8_program_watermarks - program display watermarks
9233 struct drm_display_mode *mode = &radeon_crtc->base.mode; in dce8_program_watermarks()
9240 if (radeon_crtc->base.enabled && num_heads && mode) { in dce8_program_watermarks()
9241 active_time = (u32) div_u64((u64)mode->crtc_hdisplay * 1000000, in dce8_program_watermarks()
9242 (u32)mode->clock); in dce8_program_watermarks()
9243 line_time = (u32) div_u64((u64)mode->crtc_htotal * 1000000, in dce8_program_watermarks()
9244 (u32)mode->clock); in dce8_program_watermarks()
9248 if ((rdev->pm.pm_method == PM_METHOD_DPM) && in dce8_program_watermarks()
9249 rdev->pm.dpm_enabled) { in dce8_program_watermarks()
9255 wm_high.yclk = rdev->pm.current_mclk * 10; in dce8_program_watermarks()
9256 wm_high.sclk = rdev->pm.current_sclk * 10; in dce8_program_watermarks()
9259 wm_high.disp_clk = mode->clock; in dce8_program_watermarks()
9260 wm_high.src_width = mode->crtc_hdisplay; in dce8_program_watermarks()
9262 wm_high.blank_time = line_time - wm_high.active_time; in dce8_program_watermarks()
9264 if (mode->flags & DRM_MODE_FLAG_INTERLACE) in dce8_program_watermarks()
9266 wm_high.vsc = radeon_crtc->vsc; in dce8_program_watermarks()
9268 if (radeon_crtc->rmx_type != RMX_OFF) in dce8_program_watermarks()
9283 (rdev->disp_priority == 2)) { in dce8_program_watermarks()
9288 if ((rdev->pm.pm_method == PM_METHOD_DPM) && in dce8_program_watermarks()
9289 rdev->pm.dpm_enabled) { in dce8_program_watermarks()
9295 wm_low.yclk = rdev->pm.current_mclk * 10; in dce8_program_watermarks()
9296 wm_low.sclk = rdev->pm.current_sclk * 10; in dce8_program_watermarks()
9299 wm_low.disp_clk = mode->clock; in dce8_program_watermarks()
9300 wm_low.src_width = mode->crtc_hdisplay; in dce8_program_watermarks()
9302 wm_low.blank_time = line_time - wm_low.active_time; in dce8_program_watermarks()
9304 if (mode->flags & DRM_MODE_FLAG_INTERLACE) in dce8_program_watermarks()
9306 wm_low.vsc = radeon_crtc->vsc; in dce8_program_watermarks()
9308 if (radeon_crtc->rmx_type != RMX_OFF) in dce8_program_watermarks()
9323 (rdev->disp_priority == 2)) { in dce8_program_watermarks()
9328 radeon_crtc->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay); in dce8_program_watermarks()
9332 wm_mask = RREG32(DPG_WATERMARK_MASK_CONTROL + radeon_crtc->crtc_offset); in dce8_program_watermarks()
9336 WREG32(DPG_WATERMARK_MASK_CONTROL + radeon_crtc->crtc_offset, tmp); in dce8_program_watermarks()
9337 WREG32(DPG_PIPE_LATENCY_CONTROL + radeon_crtc->crtc_offset, in dce8_program_watermarks()
9341 tmp = RREG32(DPG_WATERMARK_MASK_CONTROL + radeon_crtc->crtc_offset); in dce8_program_watermarks()
9344 WREG32(DPG_WATERMARK_MASK_CONTROL + radeon_crtc->crtc_offset, tmp); in dce8_program_watermarks()
9345 WREG32(DPG_PIPE_LATENCY_CONTROL + radeon_crtc->crtc_offset, in dce8_program_watermarks()
9349 WREG32(DPG_WATERMARK_MASK_CONTROL + radeon_crtc->crtc_offset, wm_mask); in dce8_program_watermarks()
9352 radeon_crtc->line_time = line_time; in dce8_program_watermarks()
9353 radeon_crtc->wm_high = latency_watermark_a; in dce8_program_watermarks()
9354 radeon_crtc->wm_low = latency_watermark_b; in dce8_program_watermarks()
9358 * dce8_bandwidth_update - program display watermarks
9371 if (!rdev->mode_info.mode_config_initialized) in dce8_bandwidth_update()
9376 for (i = 0; i < rdev->num_crtc; i++) { in dce8_bandwidth_update()
9377 if (rdev->mode_info.crtcs[i]->base.enabled) in dce8_bandwidth_update()
9380 for (i = 0; i < rdev->num_crtc; i++) { in dce8_bandwidth_update()
9381 mode = &rdev->mode_info.crtcs[i]->base.mode; in dce8_bandwidth_update()
9382 lb_size = dce8_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode); in dce8_bandwidth_update()
9383 dce8_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads); in dce8_bandwidth_update()
9388 * cik_get_gpu_clock_counter - return GPU clock counter snapshot
9399 mutex_lock(&rdev->gpu_clock_mutex); in cik_get_gpu_clock_counter()
9403 mutex_unlock(&rdev->gpu_clock_mutex); in cik_get_gpu_clock_counter()
9430 return -ETIMEDOUT; in cik_set_uvd_clock()
9464 return -ETIMEDOUT; in cik_set_vce_clocks()
9477 return -ETIMEDOUT; in cik_set_vce_clocks()
9484 struct pci_dev *root = rdev->pdev->bus->self; in cik_pcie_gen3_enable()
9490 if (pci_is_root_bus(rdev->pdev->bus)) in cik_pcie_gen3_enable()
9496 if (rdev->flags & RADEON_IS_IGP) in cik_pcie_gen3_enable()
9499 if (!(rdev->flags & RADEON_IS_PCIE)) in cik_pcie_gen3_enable()
9527 if (!pci_is_pcie(root) || !pci_is_pcie(rdev->pdev)) in cik_pcie_gen3_enable()
9531 /* re-try equalization if gen3 is not already enabled */ in cik_pcie_gen3_enable()
9538 pcie_capability_set_word(rdev->pdev, PCI_EXP_LNKCTL, PCI_EXP_LNKCTL_HAWD); in cik_pcie_gen3_enable()
9556 pcie_capability_read_word(rdev->pdev, in cik_pcie_gen3_enable()
9564 pcie_capability_read_word(rdev->pdev, in cik_pcie_gen3_enable()
9570 pcie_capability_read_word(rdev->pdev, in cik_pcie_gen3_enable()
9589 pcie_capability_clear_and_set_word(rdev->pdev, PCI_EXP_LNKCTL, in cik_pcie_gen3_enable()
9601 pcie_capability_clear_and_set_word(rdev->pdev, PCI_EXP_LNKCTL2, in cik_pcie_gen3_enable()
9627 pcie_capability_clear_and_set_word(rdev->pdev, PCI_EXP_LNKCTL2, in cik_pcie_gen3_enable()
9634 for (i = 0; i < rdev->usec_timeout; i++) { in cik_pcie_gen3_enable()
9652 if (rdev->flags & RADEON_IS_IGP) in cik_program_aspm()
9655 if (!(rdev->flags & RADEON_IS_PCIE)) in cik_program_aspm()
9720 !pci_is_root_bus(rdev->pdev->bus)) { in cik_program_aspm()
9721 struct pci_dev *root = rdev->pdev->bus->self; in cik_program_aspm()