Lines Matching +full:0 +full:x00000100
82 mmSDMA0_CHICKEN_BITS, 0xfc910007, 0x00810007,
83 mmSDMA0_CLK_CTRL, 0xff000fff, 0x00000000,
84 mmSDMA0_GFX_IB_CNTL, 0x800f0111, 0x00000100,
85 mmSDMA0_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
86 mmSDMA0_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
87 mmSDMA1_CHICKEN_BITS, 0xfc910007, 0x00810007,
88 mmSDMA1_CLK_CTRL, 0xff000fff, 0x00000000,
89 mmSDMA1_GFX_IB_CNTL, 0x800f0111, 0x00000100,
90 mmSDMA1_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
91 mmSDMA1_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
96 mmSDMA0_CLK_CTRL, 0xff000ff0, 0x00000100,
97 mmSDMA1_CLK_CTRL, 0xff000ff0, 0x00000100
102 mmSDMA0_CHICKEN_BITS, 0xfc910007, 0x00810007,
103 mmSDMA0_GFX_IB_CNTL, 0x800f0111, 0x00000100,
104 mmSDMA0_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
105 mmSDMA0_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
106 mmSDMA1_CHICKEN_BITS, 0xfc910007, 0x00810007,
107 mmSDMA1_GFX_IB_CNTL, 0x800f0111, 0x00000100,
108 mmSDMA1_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
109 mmSDMA1_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
114 mmSDMA0_CLK_CTRL, 0xff000ff0, 0x00000100,
115 mmSDMA1_CLK_CTRL, 0xff000ff0, 0x00000100
120 mmSDMA0_CHICKEN_BITS, 0xfc910007, 0x00810007,
121 mmSDMA0_CLK_CTRL, 0xff000fff, 0x00000000,
122 mmSDMA0_GFX_IB_CNTL, 0x800f0111, 0x00000100,
123 mmSDMA0_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
124 mmSDMA0_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
125 mmSDMA1_CHICKEN_BITS, 0xfc910007, 0x00810007,
126 mmSDMA1_CLK_CTRL, 0xff000fff, 0x00000000,
127 mmSDMA1_GFX_IB_CNTL, 0x800f0111, 0x00000100,
128 mmSDMA1_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
129 mmSDMA1_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
134 mmSDMA0_CHICKEN_BITS, 0xfc910007, 0x00810007,
135 mmSDMA0_CLK_CTRL, 0xff000fff, 0x00000000,
136 mmSDMA0_GFX_IB_CNTL, 0x800f0111, 0x00000100,
137 mmSDMA0_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
138 mmSDMA0_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
139 mmSDMA1_CHICKEN_BITS, 0xfc910007, 0x00810007,
140 mmSDMA1_CLK_CTRL, 0xff000fff, 0x00000000,
141 mmSDMA1_GFX_IB_CNTL, 0x800f0111, 0x00000100,
142 mmSDMA1_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
143 mmSDMA1_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
148 mmSDMA0_CHICKEN_BITS, 0xfc910007, 0x00810007,
149 mmSDMA0_CLK_CTRL, 0xff000fff, 0x00000000,
150 mmSDMA0_GFX_IB_CNTL, 0x00000100, 0x00000100,
151 mmSDMA0_POWER_CNTL, 0x00000800, 0x0003c800,
152 mmSDMA0_RLC0_IB_CNTL, 0x00000100, 0x00000100,
153 mmSDMA0_RLC1_IB_CNTL, 0x00000100, 0x00000100,
154 mmSDMA1_CHICKEN_BITS, 0xfc910007, 0x00810007,
155 mmSDMA1_CLK_CTRL, 0xff000fff, 0x00000000,
156 mmSDMA1_GFX_IB_CNTL, 0x00000100, 0x00000100,
157 mmSDMA1_POWER_CNTL, 0x00000800, 0x0003c800,
158 mmSDMA1_RLC0_IB_CNTL, 0x00000100, 0x00000100,
159 mmSDMA1_RLC1_IB_CNTL, 0x00000100, 0x00000100,
164 mmSDMA0_CLK_CTRL, 0xff000ff0, 0x00000100,
165 mmSDMA1_CLK_CTRL, 0xff000ff0, 0x00000100
170 mmSDMA0_GFX_IB_CNTL, 0x00000100, 0x00000100,
171 mmSDMA0_POWER_CNTL, 0x00000800, 0x0003c800,
172 mmSDMA0_RLC0_IB_CNTL, 0x00000100, 0x00000100,
173 mmSDMA0_RLC1_IB_CNTL, 0x00000100, 0x00000100,
178 mmSDMA0_CLK_CTRL, 0xffffffff, 0x00000100,
254 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v3_0_free_microcode()
265 * Returns 0 on success, error on failure.
270 int err = 0, i; in sdma_v3_0_init_microcode()
305 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_init_microcode()
306 if (i == 0) in sdma_v3_0_init_microcode()
331 chip_name, i == 0 ? "" : "1"); in sdma_v3_0_init_microcode()
332 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v3_0_init_microcode()
403 for (i = 0; i < count; i++) in sdma_v3_0_ring_insert_nop()
404 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v3_0_ring_insert_nop()
432 SDMA_PKT_INDIRECT_HEADER_VMID(vmid & 0xf)); in sdma_v3_0_ring_emit_ib()
434 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v3_0_ring_emit_ib()
437 amdgpu_ring_write(ring, 0); in sdma_v3_0_ring_emit_ib()
438 amdgpu_ring_write(ring, 0); in sdma_v3_0_ring_emit_ib()
451 u32 ref_and_mask = 0; in sdma_v3_0_ring_emit_hdp_flush()
453 if (ring->me == 0) in sdma_v3_0_ring_emit_hdp_flush()
465 amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) | in sdma_v3_0_ring_emit_hdp_flush()
502 amdgpu_ring_write(ring, SDMA_PKT_TRAP_INT_CONTEXT_INT_CONTEXT(0)); in sdma_v3_0_ring_emit_fence()
517 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_gfx_stop()
519 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_GFX_RB_CNTL, RB_ENABLE, 0); in sdma_v3_0_gfx_stop()
522 ib_cntl = REG_SET_FIELD(ib_cntl, SDMA0_GFX_IB_CNTL, IB_ENABLE, 0); in sdma_v3_0_gfx_stop()
549 u32 f32_cntl, phase_quantum = 0; in sdma_v3_0_ctx_switch_enable()
554 unsigned unit = 0; in sdma_v3_0_ctx_switch_enable()
576 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_ctx_switch_enable()
591 AUTO_CTXSW_ENABLE, 0); in sdma_v3_0_ctx_switch_enable()
618 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_enable()
621 f32_cntl = REG_SET_FIELD(f32_cntl, SDMA0_F32_CNTL, HALT, 0); in sdma_v3_0_enable()
634 * Returns 0 for success, error for failure.
645 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_gfx_resume()
650 for (j = 0; j < 16; j++) { in sdma_v3_0_gfx_resume()
651 vi_srbm_select(adev, 0, 0, 0, j); in sdma_v3_0_gfx_resume()
653 WREG32(mmSDMA0_GFX_VIRTUAL_ADDR + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
654 WREG32(mmSDMA0_GFX_APE1_CNTL + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
656 vi_srbm_select(adev, 0, 0, 0, 0); in sdma_v3_0_gfx_resume()
660 adev->gfx.config.gb_addr_config & 0x70); in sdma_v3_0_gfx_resume()
662 WREG32(mmSDMA0_SEM_WAIT_FAIL_TIMER_CNTL + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
676 ring->wptr = 0; in sdma_v3_0_gfx_resume()
677 WREG32(mmSDMA0_GFX_RB_RPTR + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
679 WREG32(mmSDMA0_GFX_IB_RPTR + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
680 WREG32(mmSDMA0_GFX_IB_OFFSET + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
684 upper_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFF); in sdma_v3_0_gfx_resume()
686 lower_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFC); in sdma_v3_0_gfx_resume()
700 doorbell = REG_SET_FIELD(doorbell, SDMA0_GFX_DOORBELL, ENABLE, 0); in sdma_v3_0_gfx_resume()
714 WREG32(mmSDMA0_GFX_RB_WPTR + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
721 ENABLE, 0); in sdma_v3_0_gfx_resume()
743 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_gfx_resume()
750 return 0; in sdma_v3_0_gfx_resume()
759 * Returns 0 for success, error for failure.
764 return 0; in sdma_v3_0_rlc_resume()
773 * Returns 0 for success, error for failure.
791 return 0; in sdma_v3_0_start()
801 * Returns 0 for success, error for failure.
817 tmp = 0xCAFEDEAD; in sdma_v3_0_ring_test_ring()
829 amdgpu_ring_write(ring, 0xDEADBEEF); in sdma_v3_0_ring_test_ring()
832 for (i = 0; i < adev->usec_timeout; i++) { in sdma_v3_0_ring_test_ring()
834 if (tmp == 0xDEADBEEF) in sdma_v3_0_ring_test_ring()
854 * Returns 0 on success, error on failure.
862 u32 tmp = 0; in sdma_v3_0_ring_test_ib()
871 tmp = 0xCAFEDEAD; in sdma_v3_0_ring_test_ib()
873 memset(&ib, 0, sizeof(ib)); in sdma_v3_0_ring_test_ib()
879 ib.ptr[0] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v3_0_ring_test_ib()
884 ib.ptr[4] = 0xDEADBEEF; in sdma_v3_0_ring_test_ib()
895 if (r == 0) { in sdma_v3_0_ring_test_ib()
898 } else if (r < 0) { in sdma_v3_0_ring_test_ib()
902 if (tmp == 0xDEADBEEF) in sdma_v3_0_ring_test_ib()
903 r = 0; in sdma_v3_0_ring_test_ib()
933 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v3_0_vm_copy_pte()
962 for (; ndw > 0; ndw -= 2) { in sdma_v3_0_vm_write_pte()
994 ib->ptr[ib->length_dw++] = 0; in sdma_v3_0_vm_set_pte_pde()
1012 for (i = 0; i < pad_count; i++) in sdma_v3_0_ring_pad_ib()
1013 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v3_0_ring_pad_ib()
1036 SDMA_PKT_POLL_REGMEM_HEADER_HDP_FLUSH(0) | in sdma_v3_0_ring_emit_pipeline_sync()
1039 amdgpu_ring_write(ring, addr & 0xfffffffc); in sdma_v3_0_ring_emit_pipeline_sync()
1040 amdgpu_ring_write(ring, upper_32_bits(addr) & 0xffffffff); in sdma_v3_0_ring_emit_pipeline_sync()
1042 amdgpu_ring_write(ring, 0xffffffff); /* mask */ in sdma_v3_0_ring_emit_pipeline_sync()
1043 amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) | in sdma_v3_0_ring_emit_pipeline_sync()
1064 SDMA_PKT_POLL_REGMEM_HEADER_HDP_FLUSH(0) | in sdma_v3_0_ring_emit_vm_flush()
1065 SDMA_PKT_POLL_REGMEM_HEADER_FUNC(0)); /* always */ in sdma_v3_0_ring_emit_vm_flush()
1067 amdgpu_ring_write(ring, 0); in sdma_v3_0_ring_emit_vm_flush()
1068 amdgpu_ring_write(ring, 0); /* reference */ in sdma_v3_0_ring_emit_vm_flush()
1069 amdgpu_ring_write(ring, 0); /* mask */ in sdma_v3_0_ring_emit_vm_flush()
1070 amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) | in sdma_v3_0_ring_emit_vm_flush()
1078 SDMA_PKT_SRBM_WRITE_HEADER_BYTE_EN(0xf)); in sdma_v3_0_ring_emit_wreg()
1106 return 0; in sdma_v3_0_early_init()
1133 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_sw_init()
1145 (i == 0) ? AMDGPU_SDMA_IRQ_INSTANCE0 : in sdma_v3_0_sw_init()
1160 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v3_0_sw_fini()
1164 return 0; in sdma_v3_0_sw_fini()
1188 return 0; in sdma_v3_0_hw_fini()
1223 for (i = 0; i < adev->usec_timeout; i++) { in sdma_v3_0_wait_for_idle()
1228 return 0; in sdma_v3_0_wait_for_idle()
1237 u32 srbm_soft_reset = 0; in sdma_v3_0_check_soft_reset()
1250 adev->sdma.srbm_soft_reset = 0; in sdma_v3_0_check_soft_reset()
1258 u32 srbm_soft_reset = 0; in sdma_v3_0_pre_soft_reset()
1261 return 0; in sdma_v3_0_pre_soft_reset()
1271 return 0; in sdma_v3_0_pre_soft_reset()
1277 u32 srbm_soft_reset = 0; in sdma_v3_0_post_soft_reset()
1280 return 0; in sdma_v3_0_post_soft_reset()
1290 return 0; in sdma_v3_0_post_soft_reset()
1296 u32 srbm_soft_reset = 0; in sdma_v3_0_soft_reset()
1300 return 0; in sdma_v3_0_soft_reset()
1307 dev_info(adev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp); in sdma_v3_0_soft_reset()
1321 return 0; in sdma_v3_0_soft_reset()
1336 sdma_cntl = REG_SET_FIELD(sdma_cntl, SDMA0_CNTL, TRAP_ENABLE, 0); in sdma_v3_0_set_trap_irq_state()
1352 sdma_cntl = REG_SET_FIELD(sdma_cntl, SDMA0_CNTL, TRAP_ENABLE, 0); in sdma_v3_0_set_trap_irq_state()
1367 return 0; in sdma_v3_0_set_trap_irq_state()
1376 instance_id = (entry->ring_id & 0x3) >> 0; in sdma_v3_0_process_trap_irq()
1377 queue_id = (entry->ring_id & 0xc) >> 2; in sdma_v3_0_process_trap_irq()
1380 case 0: in sdma_v3_0_process_trap_irq()
1382 case 0: in sdma_v3_0_process_trap_irq()
1383 amdgpu_fence_process(&adev->sdma.instance[0].ring); in sdma_v3_0_process_trap_irq()
1395 case 0: in sdma_v3_0_process_trap_irq()
1407 return 0; in sdma_v3_0_process_trap_irq()
1417 instance_id = (entry->ring_id & 0x3) >> 0; in sdma_v3_0_process_illegal_inst_irq()
1418 queue_id = (entry->ring_id & 0xc) >> 2; in sdma_v3_0_process_illegal_inst_irq()
1420 if (instance_id <= 1 && queue_id == 0) in sdma_v3_0_process_illegal_inst_irq()
1422 return 0; in sdma_v3_0_process_illegal_inst_irq()
1433 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_update_sdma_medium_grain_clock_gating()
1447 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_update_sdma_medium_grain_clock_gating()
1472 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_update_sdma_medium_grain_light_sleep()
1480 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_update_sdma_medium_grain_light_sleep()
1496 return 0; in sdma_v3_0_set_clockgating_state()
1510 return 0; in sdma_v3_0_set_clockgating_state()
1516 return 0; in sdma_v3_0_set_powergating_state()
1525 *flags = 0; in sdma_v3_0_get_clockgating_state()
1528 data = RREG32(mmSDMA0_CLK_CTRL + sdma_offsets[0]); in sdma_v3_0_get_clockgating_state()
1533 data = RREG32(mmSDMA0_POWER_CNTL + sdma_offsets[0]); in sdma_v3_0_get_clockgating_state()
1563 .align_mask = 0xf,
1593 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_set_ring_funcs()
1637 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v3_0_emit_copy_buffer()
1667 .copy_max_bytes = 0x3fffe0, /* not 0x3fffff due to HW limitation */
1671 .fill_max_bytes = 0x3fffe0, /* not 0x3fffff due to HW limitation */
1679 adev->mman.buffer_funcs_ring = &adev->sdma.instance[0].ring; in sdma_v3_0_set_buffer_funcs()
1695 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_set_vm_pte_funcs()
1706 .minor = 0,
1707 .rev = 0,
1716 .rev = 0,