Lines Matching +full:feedback +full:- +full:pin

15  * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
50 * radeon_vce_init - allocate memory, load vce firmware
65 INIT_DELAYED_WORK(&rdev->vce.idle_work, radeon_vce_idle_work_handler); in radeon_vce_init()
67 switch (rdev->family) { in radeon_vce_init()
84 return -EINVAL; in radeon_vce_init()
87 r = request_firmware(&rdev->vce_fw, fw_name, rdev->dev); in radeon_vce_init()
89 dev_err(rdev->dev, "radeon_vce: Can't load firmware \"%s\"\n", in radeon_vce_init()
96 size = rdev->vce_fw->size - strlen(fw_version) - 9; in radeon_vce_init()
97 c = rdev->vce_fw->data; in radeon_vce_init()
98 for (; size > 0; --size, ++c) in radeon_vce_init()
103 return -EINVAL; in radeon_vce_init()
107 return -EINVAL; in radeon_vce_init()
109 /* search for feedback version */ in radeon_vce_init()
111 size = rdev->vce_fw->size - strlen(fb_version) - 3; in radeon_vce_init()
112 c = rdev->vce_fw->data; in radeon_vce_init()
113 for (; size > 0; --size, ++c) in radeon_vce_init()
118 return -EINVAL; in radeon_vce_init()
121 if (sscanf(c, "%2u]", &rdev->vce.fb_version) != 1) in radeon_vce_init()
122 return -EINVAL; in radeon_vce_init()
124 DRM_INFO("Found VCE firmware/feedback version %d.%d.%d / %d!\n", in radeon_vce_init()
125 start, mid, end, rdev->vce.fb_version); in radeon_vce_init()
127 rdev->vce.fw_version = (start << 24) | (mid << 16) | (end << 8); in radeon_vce_init()
130 if ((rdev->vce.fw_version != ((40 << 24) | (2 << 16) | (2 << 8))) && in radeon_vce_init()
131 (rdev->vce.fw_version != ((50 << 24) | (0 << 16) | (1 << 8))) && in radeon_vce_init()
132 (rdev->vce.fw_version != ((50 << 24) | (1 << 16) | (2 << 8)))) in radeon_vce_init()
133 return -EINVAL; in radeon_vce_init()
137 if (rdev->family < CHIP_BONAIRE) in radeon_vce_init()
143 &rdev->vce.vcpu_bo); in radeon_vce_init()
145 dev_err(rdev->dev, "(%d) failed to allocate VCE bo\n", r); in radeon_vce_init()
149 r = radeon_bo_reserve(rdev->vce.vcpu_bo, false); in radeon_vce_init()
151 radeon_bo_unref(&rdev->vce.vcpu_bo); in radeon_vce_init()
152 dev_err(rdev->dev, "(%d) failed to reserve VCE bo\n", r); in radeon_vce_init()
156 r = radeon_bo_pin(rdev->vce.vcpu_bo, RADEON_GEM_DOMAIN_VRAM, in radeon_vce_init()
157 &rdev->vce.gpu_addr); in radeon_vce_init()
158 radeon_bo_unreserve(rdev->vce.vcpu_bo); in radeon_vce_init()
160 radeon_bo_unref(&rdev->vce.vcpu_bo); in radeon_vce_init()
161 dev_err(rdev->dev, "(%d) VCE bo pin failed\n", r); in radeon_vce_init()
166 atomic_set(&rdev->vce.handles[i], 0); in radeon_vce_init()
167 rdev->vce.filp[i] = NULL; in radeon_vce_init()
174 * radeon_vce_fini - free memory
182 if (rdev->vce.vcpu_bo == NULL) in radeon_vce_fini()
185 radeon_bo_unref(&rdev->vce.vcpu_bo); in radeon_vce_fini()
187 release_firmware(rdev->vce_fw); in radeon_vce_fini()
191 * radeon_vce_suspend - unpin VCE fw memory
200 if (rdev->vce.vcpu_bo == NULL) in radeon_vce_suspend()
204 if (atomic_read(&rdev->vce.handles[i])) in radeon_vce_suspend()
211 return -EINVAL; in radeon_vce_suspend()
215 * radeon_vce_resume - pin VCE fw memory
225 if (rdev->vce.vcpu_bo == NULL) in radeon_vce_resume()
226 return -EINVAL; in radeon_vce_resume()
228 r = radeon_bo_reserve(rdev->vce.vcpu_bo, false); in radeon_vce_resume()
230 dev_err(rdev->dev, "(%d) failed to reserve VCE bo\n", r); in radeon_vce_resume()
234 r = radeon_bo_kmap(rdev->vce.vcpu_bo, &cpu_addr); in radeon_vce_resume()
236 radeon_bo_unreserve(rdev->vce.vcpu_bo); in radeon_vce_resume()
237 dev_err(rdev->dev, "(%d) VCE map failed\n", r); in radeon_vce_resume()
241 memset(cpu_addr, 0, radeon_bo_size(rdev->vce.vcpu_bo)); in radeon_vce_resume()
242 if (rdev->family < CHIP_BONAIRE) in radeon_vce_resume()
245 memcpy(cpu_addr, rdev->vce_fw->data, rdev->vce_fw->size); in radeon_vce_resume()
247 radeon_bo_kunmap(rdev->vce.vcpu_bo); in radeon_vce_resume()
249 radeon_bo_unreserve(rdev->vce.vcpu_bo); in radeon_vce_resume()
255 * radeon_vce_idle_work_handler - power off VCE
268 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) { in radeon_vce_idle_work_handler()
274 schedule_delayed_work(&rdev->vce.idle_work, in radeon_vce_idle_work_handler()
280 * radeon_vce_note_usage - power up VCE
289 bool set_clocks = !cancel_delayed_work_sync(&rdev->vce.idle_work); in radeon_vce_note_usage()
290 set_clocks &= schedule_delayed_work(&rdev->vce.idle_work, in radeon_vce_note_usage()
293 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) { in radeon_vce_note_usage()
299 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) { in radeon_vce_note_usage()
308 * radeon_vce_free_handles - free still open VCE handles
319 uint32_t handle = atomic_read(&rdev->vce.handles[i]); in radeon_vce_free_handles()
320 if (!handle || rdev->vce.filp[i] != filp) in radeon_vce_free_handles()
330 rdev->vce.filp[i] = NULL; in radeon_vce_free_handles()
331 atomic_set(&rdev->vce.handles[i], 0); in radeon_vce_free_handles()
336 * radeon_vce_get_create_msg - generate a VCE create msg
381 ib.ptr[ib.length_dw++] = cpu_to_le32(0x05000005); /* feedback buffer */ in radeon_vce_get_create_msg()
403 * radeon_vce_get_destroy_msg - generate a VCE destroy msg
435 ib.ptr[ib.length_dw++] = cpu_to_le32(0x05000005); /* feedback buffer */ in radeon_vce_get_destroy_msg()
460 * radeon_vce_cs_reloc - command submission relocation
477 relocs_chunk = p->chunk_relocs; in radeon_vce_cs_reloc()
481 if (idx >= relocs_chunk->length_dw) { in radeon_vce_cs_reloc()
483 idx, relocs_chunk->length_dw); in radeon_vce_cs_reloc()
484 return -EINVAL; in radeon_vce_cs_reloc()
487 reloc = &p->relocs[(idx / 4)]; in radeon_vce_cs_reloc()
488 start = reloc->gpu_offset; in radeon_vce_cs_reloc()
489 end = start + radeon_bo_size(reloc->robj); in radeon_vce_cs_reloc()
492 p->ib.ptr[lo] = start & 0xFFFFFFFF; in radeon_vce_cs_reloc()
493 p->ib.ptr[hi] = start >> 32; in radeon_vce_cs_reloc()
497 return -EINVAL; in radeon_vce_cs_reloc()
499 if ((end - start) < size) { in radeon_vce_cs_reloc()
501 (unsigned)(end - start), size); in radeon_vce_cs_reloc()
502 return -EINVAL; in radeon_vce_cs_reloc()
509 * radeon_vce_validate_handle - validate stream handle
515 * Validates the handle and return the found session index or -EINVAL
527 if (atomic_read(&p->rdev->vce.handles[i]) == handle) { in radeon_vce_validate_handle()
528 if (p->rdev->vce.filp[i] != p->filp) { in radeon_vce_validate_handle()
530 return -EINVAL; in radeon_vce_validate_handle()
538 if (!atomic_cmpxchg(&p->rdev->vce.handles[i], 0, handle)) { in radeon_vce_validate_handle()
539 p->rdev->vce.filp[i] = p->filp; in radeon_vce_validate_handle()
540 p->rdev->vce.img_size[i] = 0; in radeon_vce_validate_handle()
547 return -EINVAL; in radeon_vce_validate_handle()
551 * radeon_vce_cs_parse - parse and validate the command stream
558 int session_idx = -1; in radeon_vce_cs_parse()
564 while (p->idx < p->chunk_ib->length_dw) { in radeon_vce_cs_parse()
565 uint32_t len = radeon_get_ib_value(p, p->idx); in radeon_vce_cs_parse()
566 uint32_t cmd = radeon_get_ib_value(p, p->idx + 1); in radeon_vce_cs_parse()
570 r = -EINVAL; in radeon_vce_cs_parse()
576 r = -EINVAL; in radeon_vce_cs_parse()
582 handle = radeon_get_ib_value(p, p->idx + 2); in radeon_vce_cs_parse()
587 size = &p->rdev->vce.img_size[session_idx]; in radeon_vce_cs_parse()
597 r = -EINVAL; in radeon_vce_cs_parse()
601 *size = radeon_get_ib_value(p, p->idx + 8) * in radeon_vce_cs_parse()
602 radeon_get_ib_value(p, p->idx + 10) * in radeon_vce_cs_parse()
615 r = radeon_vce_cs_reloc(p, p->idx + 10, p->idx + 9, in radeon_vce_cs_parse()
620 r = radeon_vce_cs_reloc(p, p->idx + 12, p->idx + 11, in radeon_vce_cs_parse()
631 r = radeon_vce_cs_reloc(p, p->idx + 3, p->idx + 2, in radeon_vce_cs_parse()
638 tmp = radeon_get_ib_value(p, p->idx + 4); in radeon_vce_cs_parse()
639 r = radeon_vce_cs_reloc(p, p->idx + 3, p->idx + 2, in radeon_vce_cs_parse()
645 case 0x05000005: // feedback buffer in radeon_vce_cs_parse()
646 r = radeon_vce_cs_reloc(p, p->idx + 3, p->idx + 2, in radeon_vce_cs_parse()
654 r = -EINVAL; in radeon_vce_cs_parse()
658 if (session_idx == -1) { in radeon_vce_cs_parse()
660 r = -EINVAL; in radeon_vce_cs_parse()
664 p->idx += len / 4; in radeon_vce_cs_parse()
669 r = -ENOENT; in radeon_vce_cs_parse()
679 atomic_cmpxchg(&p->rdev->vce.handles[i], handle, 0); in radeon_vce_cs_parse()
686 * radeon_vce_semaphore_emit - emit a semaphore command
699 uint64_t addr = semaphore->gpu_addr; in radeon_vce_semaphore_emit()
712 * radeon_vce_ib_execute - execute indirect buffer
720 struct radeon_ring *ring = &rdev->ring[ib->ring]; in radeon_vce_ib_execute()
722 radeon_ring_write(ring, cpu_to_le32(ib->gpu_addr)); in radeon_vce_ib_execute()
723 radeon_ring_write(ring, cpu_to_le32(upper_32_bits(ib->gpu_addr))); in radeon_vce_ib_execute()
724 radeon_ring_write(ring, cpu_to_le32(ib->length_dw)); in radeon_vce_ib_execute()
728 * radeon_vce_fence_emit - add a fence command to the ring
737 struct radeon_ring *ring = &rdev->ring[fence->ring]; in radeon_vce_fence_emit()
738 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in radeon_vce_fence_emit()
743 radeon_ring_write(ring, cpu_to_le32(fence->seq)); in radeon_vce_fence_emit()
749 * radeon_vce_ring_test - test if VCE ring is working
764 ring->idx, r); in radeon_vce_ring_test()
770 for (i = 0; i < rdev->usec_timeout; i++) { in radeon_vce_ring_test()
776 if (i < rdev->usec_timeout) { in radeon_vce_ring_test()
778 ring->idx, i); in radeon_vce_ring_test()
781 ring->idx); in radeon_vce_ring_test()
782 r = -ETIMEDOUT; in radeon_vce_ring_test()
789 * radeon_vce_ib_test - test if VCE IBs are working
800 r = radeon_vce_get_create_msg(rdev, ring->idx, 1, NULL); in radeon_vce_ib_test()
806 r = radeon_vce_get_destroy_msg(rdev, ring->idx, 1, &fence); in radeon_vce_ib_test()
818 r = -ETIMEDOUT; in radeon_vce_ib_test()
820 DRM_INFO("ib test on ring %d succeeded\n", ring->idx); in radeon_vce_ib_test()