Lines Matching refs:kiq_ring
3678 static void gfx10_kiq_set_resources(struct amdgpu_ring *kiq_ring, uint64_t queue_mask) in gfx10_kiq_set_resources() argument
3680 amdgpu_ring_write(kiq_ring, PACKET3(PACKET3_SET_RESOURCES, 6)); in gfx10_kiq_set_resources()
3681 amdgpu_ring_write(kiq_ring, PACKET3_SET_RESOURCES_VMID_MASK(0) | in gfx10_kiq_set_resources()
3683 amdgpu_ring_write(kiq_ring, lower_32_bits(queue_mask)); /* queue mask lo */ in gfx10_kiq_set_resources()
3684 amdgpu_ring_write(kiq_ring, upper_32_bits(queue_mask)); /* queue mask hi */ in gfx10_kiq_set_resources()
3685 amdgpu_ring_write(kiq_ring, 0); /* gws mask lo */ in gfx10_kiq_set_resources()
3686 amdgpu_ring_write(kiq_ring, 0); /* gws mask hi */ in gfx10_kiq_set_resources()
3687 amdgpu_ring_write(kiq_ring, 0); /* oac mask */ in gfx10_kiq_set_resources()
3688 amdgpu_ring_write(kiq_ring, 0); /* gds heap base:0, gds heap size:0 */ in gfx10_kiq_set_resources()
3691 static void gfx10_kiq_map_queues(struct amdgpu_ring *kiq_ring, in gfx10_kiq_map_queues() argument
3712 amdgpu_ring_write(kiq_ring, PACKET3(PACKET3_MAP_QUEUES, 5)); in gfx10_kiq_map_queues()
3714 amdgpu_ring_write(kiq_ring, /* Q_sel: 0, vmid: 0, engine: 0, num_Q: 1 */ in gfx10_kiq_map_queues()
3724 amdgpu_ring_write(kiq_ring, PACKET3_MAP_QUEUES_DOORBELL_OFFSET(ring->doorbell_index)); in gfx10_kiq_map_queues()
3725 amdgpu_ring_write(kiq_ring, lower_32_bits(mqd_addr)); in gfx10_kiq_map_queues()
3726 amdgpu_ring_write(kiq_ring, upper_32_bits(mqd_addr)); in gfx10_kiq_map_queues()
3727 amdgpu_ring_write(kiq_ring, lower_32_bits(wptr_addr)); in gfx10_kiq_map_queues()
3728 amdgpu_ring_write(kiq_ring, upper_32_bits(wptr_addr)); in gfx10_kiq_map_queues()
3731 static void gfx10_kiq_unmap_queues(struct amdgpu_ring *kiq_ring, in gfx10_kiq_unmap_queues() argument
3738 amdgpu_ring_write(kiq_ring, PACKET3(PACKET3_UNMAP_QUEUES, 4)); in gfx10_kiq_unmap_queues()
3739 amdgpu_ring_write(kiq_ring, /* Q_sel: 0, vmid: 0, engine: 0, num_Q: 1 */ in gfx10_kiq_unmap_queues()
3744 amdgpu_ring_write(kiq_ring, in gfx10_kiq_unmap_queues()
3748 amdgpu_ring_write(kiq_ring, lower_32_bits(gpu_addr)); in gfx10_kiq_unmap_queues()
3749 amdgpu_ring_write(kiq_ring, upper_32_bits(gpu_addr)); in gfx10_kiq_unmap_queues()
3750 amdgpu_ring_write(kiq_ring, seq); in gfx10_kiq_unmap_queues()
3752 amdgpu_ring_write(kiq_ring, 0); in gfx10_kiq_unmap_queues()
3753 amdgpu_ring_write(kiq_ring, 0); in gfx10_kiq_unmap_queues()
3754 amdgpu_ring_write(kiq_ring, 0); in gfx10_kiq_unmap_queues()
3758 static void gfx10_kiq_query_status(struct amdgpu_ring *kiq_ring, in gfx10_kiq_query_status() argument
3765 amdgpu_ring_write(kiq_ring, PACKET3(PACKET3_QUERY_STATUS, 5)); in gfx10_kiq_query_status()
3766 amdgpu_ring_write(kiq_ring, in gfx10_kiq_query_status()
3770 amdgpu_ring_write(kiq_ring, /* Q_sel: 0, vmid: 0, engine: 0, num_Q: 1 */ in gfx10_kiq_query_status()
3773 amdgpu_ring_write(kiq_ring, lower_32_bits(addr)); in gfx10_kiq_query_status()
3774 amdgpu_ring_write(kiq_ring, upper_32_bits(addr)); in gfx10_kiq_query_status()
3775 amdgpu_ring_write(kiq_ring, lower_32_bits(seq)); in gfx10_kiq_query_status()
3776 amdgpu_ring_write(kiq_ring, upper_32_bits(seq)); in gfx10_kiq_query_status()
3779 static void gfx10_kiq_invalidate_tlbs(struct amdgpu_ring *kiq_ring, in gfx10_kiq_invalidate_tlbs() argument
3783 gfx_v10_0_ring_invalidate_tlbs(kiq_ring, pasid, flush_type, all_hub, 1); in gfx10_kiq_invalidate_tlbs()
8761 struct amdgpu_ring *kiq_ring = &kiq->ring; in gfx_v10_0_ring_preempt_ib() local
8769 if (amdgpu_ring_alloc(kiq_ring, kiq->pmf->unmap_queues_size)) { in gfx_v10_0_ring_preempt_ib()
8778 kiq->pmf->kiq_unmap_queues(kiq_ring, ring, PREEMPT_QUEUES_NO_UNMAP, in gfx_v10_0_ring_preempt_ib()
8781 amdgpu_ring_commit(kiq_ring); in gfx_v10_0_ring_preempt_ib()
9425 struct amdgpu_ring *kiq_ring = &kiq->ring; in gfx_v10_0_reset_kgq() local
9439 if (amdgpu_ring_alloc(kiq_ring, 5 + 7 + 7 + kiq->pmf->map_queues_size)) { in gfx_v10_0_reset_kgq()
9452 gfx_v10_0_ring_emit_wreg(kiq_ring, in gfx_v10_0_reset_kgq()
9454 gfx_v10_0_wait_reg_mem(kiq_ring, 0, 1, 0, in gfx_v10_0_reset_kgq()
9457 gfx_v10_0_ring_emit_reg_wait(kiq_ring, in gfx_v10_0_reset_kgq()
9459 kiq->pmf->kiq_map_queues(kiq_ring, ring); in gfx_v10_0_reset_kgq()
9460 amdgpu_ring_commit(kiq_ring); in gfx_v10_0_reset_kgq()
9464 r = amdgpu_ring_test_ring(kiq_ring); in gfx_v10_0_reset_kgq()
9493 struct amdgpu_ring *kiq_ring = &kiq->ring; in gfx_v10_0_reset_kcq() local
9505 if (amdgpu_ring_alloc(kiq_ring, kiq->pmf->unmap_queues_size)) { in gfx_v10_0_reset_kcq()
9510 kiq->pmf->kiq_unmap_queues(kiq_ring, ring, RESET_QUEUES, in gfx_v10_0_reset_kcq()
9512 amdgpu_ring_commit(kiq_ring); in gfx_v10_0_reset_kcq()
9515 r = amdgpu_ring_test_ring(kiq_ring); in gfx_v10_0_reset_kcq()
9556 if (amdgpu_ring_alloc(kiq_ring, kiq->pmf->map_queues_size)) { in gfx_v10_0_reset_kcq()
9560 kiq->pmf->kiq_map_queues(kiq_ring, ring); in gfx_v10_0_reset_kcq()
9561 amdgpu_ring_commit(kiq_ring); in gfx_v10_0_reset_kcq()
9564 r = amdgpu_ring_test_ring(kiq_ring); in gfx_v10_0_reset_kcq()