Home
last modified time | relevance | path

Searched refs:rb_cntl (Results 1 – 15 of 15) sorted by relevance

/linux-6.12.1/drivers/gpu/drm/amd/amdgpu/ !
Dvpe_v6_1.c211 uint32_t rb_bufsz, rb_cntl; in vpe_v6_1_ring_start() local
218 rb_cntl = RREG32(vpe_get_reg_offset(vpe, i, regVPEC_QUEUE0_RB_CNTL)); in vpe_v6_1_ring_start()
219 rb_cntl = REG_SET_FIELD(rb_cntl, VPEC_QUEUE0_RB_CNTL, RB_SIZE, rb_bufsz); in vpe_v6_1_ring_start()
220 rb_cntl = REG_SET_FIELD(rb_cntl, VPEC_QUEUE0_RB_CNTL, RB_PRIV, 1); in vpe_v6_1_ring_start()
221 rb_cntl = REG_SET_FIELD(rb_cntl, VPEC_QUEUE0_RB_CNTL, RB_VMID, 0); in vpe_v6_1_ring_start()
222 WREG32(vpe_get_reg_offset(vpe, i, regVPEC_QUEUE0_RB_CNTL), rb_cntl); in vpe_v6_1_ring_start()
236 rb_cntl = REG_SET_FIELD(rb_cntl, VPEC_QUEUE0_RB_CNTL, RPTR_WRITEBACK_ENABLE, 1); in vpe_v6_1_ring_start()
260 rb_cntl = REG_SET_FIELD(rb_cntl, VPEC_QUEUE0_RB_CNTL, RPTR_WRITEBACK_ENABLE, 1); in vpe_v6_1_ring_start()
261 rb_cntl = REG_SET_FIELD(rb_cntl, VPEC_QUEUE0_RB_CNTL, RB_ENABLE, 1); in vpe_v6_1_ring_start()
262 WREG32(vpe_get_reg_offset(vpe, i, regVPEC_QUEUE0_RB_CNTL), rb_cntl); in vpe_v6_1_ring_start()
Dsdma_v4_4_2.c488 u32 rb_cntl, ib_cntl; in sdma_v4_4_2_inst_gfx_stop() local
494 rb_cntl = RREG32_SDMA(i, regSDMA_GFX_RB_CNTL); in sdma_v4_4_2_inst_gfx_stop()
495 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA_GFX_RB_CNTL, RB_ENABLE, 0); in sdma_v4_4_2_inst_gfx_stop()
496 WREG32_SDMA(i, regSDMA_GFX_RB_CNTL, rb_cntl); in sdma_v4_4_2_inst_gfx_stop()
540 u32 rb_cntl, ib_cntl; in sdma_v4_4_2_inst_page_stop() local
544 rb_cntl = RREG32_SDMA(i, regSDMA_PAGE_RB_CNTL); in sdma_v4_4_2_inst_page_stop()
545 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA_PAGE_RB_CNTL, in sdma_v4_4_2_inst_page_stop()
547 WREG32_SDMA(i, regSDMA_PAGE_RB_CNTL, rb_cntl); in sdma_v4_4_2_inst_page_stop()
650 static uint32_t sdma_v4_4_2_rb_cntl(struct amdgpu_ring *ring, uint32_t rb_cntl) in sdma_v4_4_2_rb_cntl() argument
656 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA_GFX_RB_CNTL, RB_SIZE, rb_bufsz); in sdma_v4_4_2_rb_cntl()
[all …]
Dsdma_v2_4.c338 u32 rb_cntl, ib_cntl; in sdma_v2_4_gfx_stop() local
342 rb_cntl = RREG32(mmSDMA0_GFX_RB_CNTL + sdma_offsets[i]); in sdma_v2_4_gfx_stop()
343 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_GFX_RB_CNTL, RB_ENABLE, 0); in sdma_v2_4_gfx_stop()
344 WREG32(mmSDMA0_GFX_RB_CNTL + sdma_offsets[i], rb_cntl); in sdma_v2_4_gfx_stop()
402 u32 rb_cntl, ib_cntl; in sdma_v2_4_gfx_resume() local
426 rb_cntl = RREG32(mmSDMA0_GFX_RB_CNTL + sdma_offsets[i]); in sdma_v2_4_gfx_resume()
427 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_GFX_RB_CNTL, RB_SIZE, rb_bufsz); in sdma_v2_4_gfx_resume()
429 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_GFX_RB_CNTL, RB_SWAP_ENABLE, 1); in sdma_v2_4_gfx_resume()
430 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_GFX_RB_CNTL, in sdma_v2_4_gfx_resume()
433 WREG32(mmSDMA0_GFX_RB_CNTL + sdma_offsets[i], rb_cntl); in sdma_v2_4_gfx_resume()
[all …]
Dsdma_v7_0.c427 u32 rb_cntl, ib_cntl; in sdma_v7_0_gfx_stop() local
431 rb_cntl = RREG32_SOC15_IP(GC, sdma_v7_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_CNTL)); in sdma_v7_0_gfx_stop()
432 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_QUEUE0_RB_CNTL, RB_ENABLE, 0); in sdma_v7_0_gfx_stop()
433 WREG32_SOC15_IP(GC, sdma_v7_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_CNTL), rb_cntl); in sdma_v7_0_gfx_stop()
503 u32 rb_cntl, ib_cntl; in sdma_v7_0_gfx_resume() local
519 rb_cntl = RREG32_SOC15_IP(GC, sdma_v7_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_CNTL)); in sdma_v7_0_gfx_resume()
520 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_QUEUE0_RB_CNTL, RB_SIZE, rb_bufsz); in sdma_v7_0_gfx_resume()
522 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_QUEUE0_RB_CNTL, RB_SWAP_ENABLE, 1); in sdma_v7_0_gfx_resume()
523 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_QUEUE0_RB_CNTL, in sdma_v7_0_gfx_resume()
526 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_QUEUE0_RB_CNTL, RB_PRIV, 1); in sdma_v7_0_gfx_resume()
[all …]
Dsdma_v4_0.c924 u32 rb_cntl, ib_cntl; in sdma_v4_0_gfx_enable() local
928 rb_cntl = RREG32_SDMA(i, mmSDMA0_GFX_RB_CNTL); in sdma_v4_0_gfx_enable()
929 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_GFX_RB_CNTL, RB_ENABLE, enable ? 1 : 0); in sdma_v4_0_gfx_enable()
930 WREG32_SDMA(i, mmSDMA0_GFX_RB_CNTL, rb_cntl); in sdma_v4_0_gfx_enable()
958 u32 rb_cntl, ib_cntl; in sdma_v4_0_page_stop() local
962 rb_cntl = RREG32_SDMA(i, mmSDMA0_PAGE_RB_CNTL); in sdma_v4_0_page_stop()
963 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_PAGE_RB_CNTL, in sdma_v4_0_page_stop()
965 WREG32_SDMA(i, mmSDMA0_PAGE_RB_CNTL, rb_cntl); in sdma_v4_0_page_stop()
1066 static uint32_t sdma_v4_0_rb_cntl(struct amdgpu_ring *ring, uint32_t rb_cntl) in sdma_v4_0_rb_cntl() argument
1071 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_GFX_RB_CNTL, RB_SIZE, rb_bufsz); in sdma_v4_0_rb_cntl()
[all …]
Dsdma_v6_0.c395 u32 rb_cntl, ib_cntl; in sdma_v6_0_gfx_stop() local
399 rb_cntl = RREG32_SOC15_IP(GC, sdma_v6_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_CNTL)); in sdma_v6_0_gfx_stop()
400 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_QUEUE0_RB_CNTL, RB_ENABLE, 0); in sdma_v6_0_gfx_stop()
401 WREG32_SOC15_IP(GC, sdma_v6_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_CNTL), rb_cntl); in sdma_v6_0_gfx_stop()
482 u32 rb_cntl, ib_cntl; in sdma_v6_0_gfx_resume() local
498 rb_cntl = RREG32_SOC15_IP(GC, sdma_v6_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_CNTL)); in sdma_v6_0_gfx_resume()
499 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_QUEUE0_RB_CNTL, RB_SIZE, rb_bufsz); in sdma_v6_0_gfx_resume()
501 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_QUEUE0_RB_CNTL, RB_SWAP_ENABLE, 1); in sdma_v6_0_gfx_resume()
502 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_QUEUE0_RB_CNTL, in sdma_v6_0_gfx_resume()
505 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_QUEUE0_RB_CNTL, RB_PRIV, 1); in sdma_v6_0_gfx_resume()
[all …]
Dsdma_v3_0.c514 u32 rb_cntl, ib_cntl; in sdma_v3_0_gfx_stop() local
518 rb_cntl = RREG32(mmSDMA0_GFX_RB_CNTL + sdma_offsets[i]); in sdma_v3_0_gfx_stop()
519 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_GFX_RB_CNTL, RB_ENABLE, 0); in sdma_v3_0_gfx_stop()
520 WREG32(mmSDMA0_GFX_RB_CNTL + sdma_offsets[i], rb_cntl); in sdma_v3_0_gfx_stop()
639 u32 rb_cntl, ib_cntl, wptr_poll_cntl; in sdma_v3_0_gfx_resume() local
666 rb_cntl = RREG32(mmSDMA0_GFX_RB_CNTL + sdma_offsets[i]); in sdma_v3_0_gfx_resume()
667 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_GFX_RB_CNTL, RB_SIZE, rb_bufsz); in sdma_v3_0_gfx_resume()
669 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_GFX_RB_CNTL, RB_SWAP_ENABLE, 1); in sdma_v3_0_gfx_resume()
670 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_GFX_RB_CNTL, in sdma_v3_0_gfx_resume()
673 WREG32(mmSDMA0_GFX_RB_CNTL + sdma_offsets[i], rb_cntl); in sdma_v3_0_gfx_resume()
[all …]
Dsi_dma.c115 u32 rb_cntl; in si_dma_stop() local
120 rb_cntl = RREG32(DMA_RB_CNTL + sdma_offsets[i]); in si_dma_stop()
121 rb_cntl &= ~DMA_RB_ENABLE; in si_dma_stop()
122 WREG32(DMA_RB_CNTL + sdma_offsets[i], rb_cntl); in si_dma_stop()
129 u32 rb_cntl, dma_cntl, ib_cntl, rb_bufsz; in si_dma_start() local
141 rb_cntl = rb_bufsz << 1; in si_dma_start()
143 rb_cntl |= DMA_RB_SWAP_ENABLE | DMA_RPTR_WRITEBACK_SWAP_ENABLE; in si_dma_start()
145 WREG32(DMA_RB_CNTL + sdma_offsets[i], rb_cntl); in si_dma_start()
156 rb_cntl |= DMA_RPTR_WRITEBACK_ENABLE; in si_dma_start()
173 WREG32(DMA_RB_CNTL + sdma_offsets[i], rb_cntl | DMA_RB_ENABLE); in si_dma_start()
Dsdma_v5_2.c415 u32 rb_cntl, ib_cntl; in sdma_v5_2_gfx_stop() local
419 rb_cntl = RREG32_SOC15_IP(GC, sdma_v5_2_get_reg_offset(adev, i, mmSDMA0_GFX_RB_CNTL)); in sdma_v5_2_gfx_stop()
420 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_GFX_RB_CNTL, RB_ENABLE, 0); in sdma_v5_2_gfx_stop()
421 WREG32_SOC15_IP(GC, sdma_v5_2_get_reg_offset(adev, i, mmSDMA0_GFX_RB_CNTL), rb_cntl); in sdma_v5_2_gfx_stop()
535 u32 rb_cntl, ib_cntl; in sdma_v5_2_gfx_resume() local
552 rb_cntl = RREG32_SOC15_IP(GC, sdma_v5_2_get_reg_offset(adev, i, mmSDMA0_GFX_RB_CNTL)); in sdma_v5_2_gfx_resume()
553 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_GFX_RB_CNTL, RB_SIZE, rb_bufsz); in sdma_v5_2_gfx_resume()
555 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_GFX_RB_CNTL, RB_SWAP_ENABLE, 1); in sdma_v5_2_gfx_resume()
556 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_GFX_RB_CNTL, in sdma_v5_2_gfx_resume()
559 WREG32_SOC15_IP(GC, sdma_v5_2_get_reg_offset(adev, i, mmSDMA0_GFX_RB_CNTL), rb_cntl); in sdma_v5_2_gfx_resume()
[all …]
Dsdma_v5_0.c596 u32 rb_cntl, ib_cntl; in sdma_v5_0_gfx_stop() local
600 rb_cntl = RREG32_SOC15_IP(GC, sdma_v5_0_get_reg_offset(adev, i, mmSDMA0_GFX_RB_CNTL)); in sdma_v5_0_gfx_stop()
601 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_GFX_RB_CNTL, RB_ENABLE, 0); in sdma_v5_0_gfx_stop()
602 WREG32_SOC15_IP(GC, sdma_v5_0_get_reg_offset(adev, i, mmSDMA0_GFX_RB_CNTL), rb_cntl); in sdma_v5_0_gfx_stop()
718 u32 rb_cntl, ib_cntl; in sdma_v5_0_gfx_resume() local
735 rb_cntl = RREG32_SOC15_IP(GC, sdma_v5_0_get_reg_offset(adev, i, mmSDMA0_GFX_RB_CNTL)); in sdma_v5_0_gfx_resume()
736 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_GFX_RB_CNTL, RB_SIZE, rb_bufsz); in sdma_v5_0_gfx_resume()
738 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_GFX_RB_CNTL, RB_SWAP_ENABLE, 1); in sdma_v5_0_gfx_resume()
739 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_GFX_RB_CNTL, in sdma_v5_0_gfx_resume()
742 WREG32_SOC15_IP(GC, sdma_v5_0_get_reg_offset(adev, i, mmSDMA0_GFX_RB_CNTL), rb_cntl); in sdma_v5_0_gfx_resume()
[all …]
Dcik_sdma.c309 u32 rb_cntl; in cik_sdma_gfx_stop() local
313 rb_cntl = RREG32(mmSDMA0_GFX_RB_CNTL + sdma_offsets[i]); in cik_sdma_gfx_stop()
314 rb_cntl &= ~SDMA0_GFX_RB_CNTL__RB_ENABLE_MASK; in cik_sdma_gfx_stop()
315 WREG32(mmSDMA0_GFX_RB_CNTL + sdma_offsets[i], rb_cntl); in cik_sdma_gfx_stop()
428 u32 rb_cntl, ib_cntl; in cik_sdma_gfx_resume() local
454 rb_cntl = rb_bufsz << 1; in cik_sdma_gfx_resume()
456 rb_cntl |= SDMA0_GFX_RB_CNTL__RB_SWAP_ENABLE_MASK | in cik_sdma_gfx_resume()
459 WREG32(mmSDMA0_GFX_RB_CNTL + sdma_offsets[i], rb_cntl); in cik_sdma_gfx_resume()
473 rb_cntl |= SDMA0_GFX_RB_CNTL__RPTR_WRITEBACK_ENABLE_MASK; in cik_sdma_gfx_resume()
483 rb_cntl | SDMA0_GFX_RB_CNTL__RB_ENABLE_MASK); in cik_sdma_gfx_resume()
/linux-6.12.1/drivers/gpu/drm/radeon/ !
Dni_dma.c158 u32 rb_cntl; in cayman_dma_stop() local
165 rb_cntl = RREG32(DMA_RB_CNTL + DMA0_REGISTER_OFFSET); in cayman_dma_stop()
166 rb_cntl &= ~DMA_RB_ENABLE; in cayman_dma_stop()
167 WREG32(DMA_RB_CNTL + DMA0_REGISTER_OFFSET, rb_cntl); in cayman_dma_stop()
170 rb_cntl = RREG32(DMA_RB_CNTL + DMA1_REGISTER_OFFSET); in cayman_dma_stop()
171 rb_cntl &= ~DMA_RB_ENABLE; in cayman_dma_stop()
172 WREG32(DMA_RB_CNTL + DMA1_REGISTER_OFFSET, rb_cntl); in cayman_dma_stop()
189 u32 rb_cntl, dma_cntl, ib_cntl; in cayman_dma_resume() local
210 rb_cntl = rb_bufsz << 1; in cayman_dma_resume()
212 rb_cntl |= DMA_RB_SWAP_ENABLE | DMA_RPTR_WRITEBACK_SWAP_ENABLE; in cayman_dma_resume()
[all …]
Dr600_dma.c100 u32 rb_cntl = RREG32(DMA_RB_CNTL); in r600_dma_stop() local
105 rb_cntl &= ~DMA_RB_ENABLE; in r600_dma_stop()
106 WREG32(DMA_RB_CNTL, rb_cntl); in r600_dma_stop()
122 u32 rb_cntl, dma_cntl, ib_cntl; in r600_dma_resume() local
131 rb_cntl = rb_bufsz << 1; in r600_dma_resume()
133 rb_cntl |= DMA_RB_SWAP_ENABLE | DMA_RPTR_WRITEBACK_SWAP_ENABLE; in r600_dma_resume()
135 WREG32(DMA_RB_CNTL, rb_cntl); in r600_dma_resume()
148 rb_cntl |= DMA_RPTR_WRITEBACK_ENABLE; in r600_dma_resume()
169 WREG32(DMA_RB_CNTL, rb_cntl | DMA_RB_ENABLE); in r600_dma_resume()
Dcik_sdma.c251 u32 rb_cntl, reg_offset; in cik_sdma_gfx_stop() local
263 rb_cntl = RREG32(SDMA0_GFX_RB_CNTL + reg_offset); in cik_sdma_gfx_stop()
264 rb_cntl &= ~SDMA_RB_ENABLE; in cik_sdma_gfx_stop()
265 WREG32(SDMA0_GFX_RB_CNTL + reg_offset, rb_cntl); in cik_sdma_gfx_stop()
367 u32 rb_cntl, ib_cntl; in cik_sdma_gfx_resume() local
388 rb_cntl = rb_bufsz << 1; in cik_sdma_gfx_resume()
390 rb_cntl |= SDMA_RB_SWAP_ENABLE | SDMA_RPTR_WRITEBACK_SWAP_ENABLE; in cik_sdma_gfx_resume()
392 WREG32(SDMA0_GFX_RB_CNTL + reg_offset, rb_cntl); in cik_sdma_gfx_resume()
405 rb_cntl |= SDMA_RPTR_WRITEBACK_ENABLE; in cik_sdma_gfx_resume()
414 WREG32(SDMA0_GFX_RB_CNTL + reg_offset, rb_cntl | SDMA_RB_ENABLE); in cik_sdma_gfx_resume()
Dni.c1663 uint32_t rb_cntl; in cayman_cp_resume() local
1668 rb_cntl = order_base_2(ring->ring_size / 8); in cayman_cp_resume()
1669 rb_cntl |= order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8; in cayman_cp_resume()
1671 rb_cntl |= BUF_SWAP_32BIT; in cayman_cp_resume()
1673 WREG32(cp_rb_cntl[i], rb_cntl); in cayman_cp_resume()