Lines Matching refs:kiq_ring
245 static void gfx_v12_0_kiq_set_resources(struct amdgpu_ring *kiq_ring, in gfx_v12_0_kiq_set_resources() argument
248 amdgpu_ring_write(kiq_ring, PACKET3(PACKET3_SET_RESOURCES, 6)); in gfx_v12_0_kiq_set_resources()
249 amdgpu_ring_write(kiq_ring, PACKET3_SET_RESOURCES_VMID_MASK(0) | in gfx_v12_0_kiq_set_resources()
251 amdgpu_ring_write(kiq_ring, lower_32_bits(queue_mask)); /* queue mask lo */ in gfx_v12_0_kiq_set_resources()
252 amdgpu_ring_write(kiq_ring, upper_32_bits(queue_mask)); /* queue mask hi */ in gfx_v12_0_kiq_set_resources()
253 amdgpu_ring_write(kiq_ring, 0); /* gws mask lo */ in gfx_v12_0_kiq_set_resources()
254 amdgpu_ring_write(kiq_ring, 0); /* gws mask hi */ in gfx_v12_0_kiq_set_resources()
255 amdgpu_ring_write(kiq_ring, 0); /* oac mask */ in gfx_v12_0_kiq_set_resources()
256 amdgpu_ring_write(kiq_ring, 0); in gfx_v12_0_kiq_set_resources()
259 static void gfx_v12_0_kiq_map_queues(struct amdgpu_ring *kiq_ring, in gfx_v12_0_kiq_map_queues() argument
283 amdgpu_ring_write(kiq_ring, PACKET3(PACKET3_MAP_QUEUES, 5)); in gfx_v12_0_kiq_map_queues()
285 amdgpu_ring_write(kiq_ring, /* Q_sel: 0, vmid: 0, engine: 0, num_Q: 1 */ in gfx_v12_0_kiq_map_queues()
295 amdgpu_ring_write(kiq_ring, PACKET3_MAP_QUEUES_DOORBELL_OFFSET(ring->doorbell_index)); in gfx_v12_0_kiq_map_queues()
296 amdgpu_ring_write(kiq_ring, lower_32_bits(mqd_addr)); in gfx_v12_0_kiq_map_queues()
297 amdgpu_ring_write(kiq_ring, upper_32_bits(mqd_addr)); in gfx_v12_0_kiq_map_queues()
298 amdgpu_ring_write(kiq_ring, lower_32_bits(wptr_addr)); in gfx_v12_0_kiq_map_queues()
299 amdgpu_ring_write(kiq_ring, upper_32_bits(wptr_addr)); in gfx_v12_0_kiq_map_queues()
302 static void gfx_v12_0_kiq_unmap_queues(struct amdgpu_ring *kiq_ring, in gfx_v12_0_kiq_unmap_queues() argument
307 struct amdgpu_device *adev = kiq_ring->adev; in gfx_v12_0_kiq_unmap_queues()
315 amdgpu_ring_write(kiq_ring, PACKET3(PACKET3_UNMAP_QUEUES, 4)); in gfx_v12_0_kiq_unmap_queues()
316 amdgpu_ring_write(kiq_ring, /* Q_sel: 0, vmid: 0, engine: 0, num_Q: 1 */ in gfx_v12_0_kiq_unmap_queues()
321 amdgpu_ring_write(kiq_ring, in gfx_v12_0_kiq_unmap_queues()
325 amdgpu_ring_write(kiq_ring, lower_32_bits(gpu_addr)); in gfx_v12_0_kiq_unmap_queues()
326 amdgpu_ring_write(kiq_ring, upper_32_bits(gpu_addr)); in gfx_v12_0_kiq_unmap_queues()
327 amdgpu_ring_write(kiq_ring, seq); in gfx_v12_0_kiq_unmap_queues()
329 amdgpu_ring_write(kiq_ring, 0); in gfx_v12_0_kiq_unmap_queues()
330 amdgpu_ring_write(kiq_ring, 0); in gfx_v12_0_kiq_unmap_queues()
331 amdgpu_ring_write(kiq_ring, 0); in gfx_v12_0_kiq_unmap_queues()
335 static void gfx_v12_0_kiq_query_status(struct amdgpu_ring *kiq_ring, in gfx_v12_0_kiq_query_status() argument
341 amdgpu_ring_write(kiq_ring, PACKET3(PACKET3_QUERY_STATUS, 5)); in gfx_v12_0_kiq_query_status()
342 amdgpu_ring_write(kiq_ring, in gfx_v12_0_kiq_query_status()
346 amdgpu_ring_write(kiq_ring, /* Q_sel: 0, vmid: 0, engine: 0, num_Q: 1 */ in gfx_v12_0_kiq_query_status()
349 amdgpu_ring_write(kiq_ring, lower_32_bits(addr)); in gfx_v12_0_kiq_query_status()
350 amdgpu_ring_write(kiq_ring, upper_32_bits(addr)); in gfx_v12_0_kiq_query_status()
351 amdgpu_ring_write(kiq_ring, lower_32_bits(seq)); in gfx_v12_0_kiq_query_status()
352 amdgpu_ring_write(kiq_ring, upper_32_bits(seq)); in gfx_v12_0_kiq_query_status()
355 static void gfx_v12_0_kiq_invalidate_tlbs(struct amdgpu_ring *kiq_ring, in gfx_v12_0_kiq_invalidate_tlbs() argument
360 gfx_v12_0_ring_invalidate_tlbs(kiq_ring, pasid, flush_type, all_hub, 1); in gfx_v12_0_kiq_invalidate_tlbs()
4509 struct amdgpu_ring *kiq_ring = &kiq->ring; in gfx_v12_0_ring_preempt_ib() local
4520 if (amdgpu_ring_alloc(kiq_ring, kiq->pmf->unmap_queues_size)) { in gfx_v12_0_ring_preempt_ib()
4529 kiq->pmf->kiq_unmap_queues(kiq_ring, ring, PREEMPT_QUEUES_NO_UNMAP, in gfx_v12_0_ring_preempt_ib()
4532 amdgpu_ring_commit(kiq_ring); in gfx_v12_0_ring_preempt_ib()