Lines Matching refs:vgdev

59 	struct virtio_gpu_device *vgdev = dev->dev_private;  in virtio_gpu_ctrl_ack()  local
61 schedule_work(&vgdev->ctrlq.dequeue_work); in virtio_gpu_ctrl_ack()
67 struct virtio_gpu_device *vgdev = dev->dev_private; in virtio_gpu_cursor_ack() local
69 schedule_work(&vgdev->cursorq.dequeue_work); in virtio_gpu_cursor_ack()
72 int virtio_gpu_alloc_vbufs(struct virtio_gpu_device *vgdev) in virtio_gpu_alloc_vbufs() argument
74 vgdev->vbufs = kmem_cache_create("virtio-gpu-vbufs", in virtio_gpu_alloc_vbufs()
78 if (!vgdev->vbufs) in virtio_gpu_alloc_vbufs()
83 void virtio_gpu_free_vbufs(struct virtio_gpu_device *vgdev) in virtio_gpu_free_vbufs() argument
85 kmem_cache_destroy(vgdev->vbufs); in virtio_gpu_free_vbufs()
86 vgdev->vbufs = NULL; in virtio_gpu_free_vbufs()
90 virtio_gpu_get_vbuf(struct virtio_gpu_device *vgdev, in virtio_gpu_get_vbuf() argument
96 vbuf = kmem_cache_zalloc(vgdev->vbufs, GFP_KERNEL | __GFP_NOFAIL); in virtio_gpu_get_vbuf()
124 virtio_gpu_alloc_cursor(struct virtio_gpu_device *vgdev, in virtio_gpu_alloc_cursor() argument
130 (vgdev, sizeof(struct virtio_gpu_update_cursor), in virtio_gpu_alloc_cursor()
140 static void *virtio_gpu_alloc_cmd_resp(struct virtio_gpu_device *vgdev, in virtio_gpu_alloc_cmd_resp() argument
148 vbuf = virtio_gpu_get_vbuf(vgdev, cmd_size, in virtio_gpu_alloc_cmd_resp()
154 static void *virtio_gpu_alloc_cmd(struct virtio_gpu_device *vgdev, in virtio_gpu_alloc_cmd() argument
158 return virtio_gpu_alloc_cmd_resp(vgdev, NULL, vbuffer_p, size, in virtio_gpu_alloc_cmd()
163 static void *virtio_gpu_alloc_cmd_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_alloc_cmd_cb() argument
168 return virtio_gpu_alloc_cmd_resp(vgdev, cb, vbuffer_p, size, in virtio_gpu_alloc_cmd_cb()
173 static void free_vbuf(struct virtio_gpu_device *vgdev, in free_vbuf() argument
179 kmem_cache_free(vgdev->vbufs, vbuf); in free_vbuf()
198 struct virtio_gpu_device *vgdev = in virtio_gpu_dequeue_ctrl_func() local
207 spin_lock(&vgdev->ctrlq.qlock); in virtio_gpu_dequeue_ctrl_func()
209 virtqueue_disable_cb(vgdev->ctrlq.vq); in virtio_gpu_dequeue_ctrl_func()
210 reclaim_vbufs(vgdev->ctrlq.vq, &reclaim_list); in virtio_gpu_dequeue_ctrl_func()
212 } while (!virtqueue_enable_cb(vgdev->ctrlq.vq)); in virtio_gpu_dequeue_ctrl_func()
213 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_dequeue_ctrl_func()
218 trace_virtio_gpu_cmd_response(vgdev->ctrlq.vq, resp, entry->seqno); in virtio_gpu_dequeue_ctrl_func()
232 virtio_gpu_fence_event_process(vgdev, fence_id); in virtio_gpu_dequeue_ctrl_func()
235 entry->resp_cb(vgdev, entry); in virtio_gpu_dequeue_ctrl_func()
237 wake_up(&vgdev->ctrlq.ack_queue); in virtio_gpu_dequeue_ctrl_func()
241 virtio_gpu_array_put_free_delayed(vgdev, entry->objs); in virtio_gpu_dequeue_ctrl_func()
243 free_vbuf(vgdev, entry); in virtio_gpu_dequeue_ctrl_func()
249 struct virtio_gpu_device *vgdev = in virtio_gpu_dequeue_cursor_func() local
256 spin_lock(&vgdev->cursorq.qlock); in virtio_gpu_dequeue_cursor_func()
258 virtqueue_disable_cb(vgdev->cursorq.vq); in virtio_gpu_dequeue_cursor_func()
259 reclaim_vbufs(vgdev->cursorq.vq, &reclaim_list); in virtio_gpu_dequeue_cursor_func()
260 } while (!virtqueue_enable_cb(vgdev->cursorq.vq)); in virtio_gpu_dequeue_cursor_func()
261 spin_unlock(&vgdev->cursorq.qlock); in virtio_gpu_dequeue_cursor_func()
267 trace_virtio_gpu_cmd_response(vgdev->cursorq.vq, resp, entry->seqno); in virtio_gpu_dequeue_cursor_func()
269 free_vbuf(vgdev, entry); in virtio_gpu_dequeue_cursor_func()
271 wake_up(&vgdev->cursorq.ack_queue); in virtio_gpu_dequeue_cursor_func()
314 static int virtio_gpu_queue_ctrl_sgs(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_ctrl_sgs() argument
322 struct virtqueue *vq = vgdev->ctrlq.vq; in virtio_gpu_queue_ctrl_sgs()
325 if (!drm_dev_enter(vgdev->ddev, &idx)) { in virtio_gpu_queue_ctrl_sgs()
328 free_vbuf(vgdev, vbuf); in virtio_gpu_queue_ctrl_sgs()
332 if (vgdev->has_indirect) in virtio_gpu_queue_ctrl_sgs()
336 spin_lock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_ctrl_sgs()
339 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_ctrl_sgs()
340 virtio_gpu_notify(vgdev); in virtio_gpu_queue_ctrl_sgs()
341 wait_event(vgdev->ctrlq.ack_queue, vq->num_free >= elemcnt); in virtio_gpu_queue_ctrl_sgs()
349 virtio_gpu_fence_emit(vgdev, virtio_gpu_vbuf_ctrl_hdr(vbuf), in virtio_gpu_queue_ctrl_sgs()
360 vbuf->seqno = ++vgdev->ctrlq.seqno; in virtio_gpu_queue_ctrl_sgs()
363 atomic_inc(&vgdev->pending_commands); in virtio_gpu_queue_ctrl_sgs()
365 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_ctrl_sgs()
371 static int virtio_gpu_queue_fenced_ctrl_buffer(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_fenced_ctrl_buffer() argument
415 ret = virtio_gpu_queue_ctrl_sgs(vgdev, vbuf, fence, elemcnt, sgs, outcnt, in virtio_gpu_queue_fenced_ctrl_buffer()
425 void virtio_gpu_notify(struct virtio_gpu_device *vgdev) in virtio_gpu_notify() argument
429 if (!atomic_read(&vgdev->pending_commands)) in virtio_gpu_notify()
432 spin_lock(&vgdev->ctrlq.qlock); in virtio_gpu_notify()
433 atomic_set(&vgdev->pending_commands, 0); in virtio_gpu_notify()
434 notify = virtqueue_kick_prepare(vgdev->ctrlq.vq); in virtio_gpu_notify()
435 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_notify()
438 virtqueue_notify(vgdev->ctrlq.vq); in virtio_gpu_notify()
441 static int virtio_gpu_queue_ctrl_buffer(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_ctrl_buffer() argument
444 return virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, NULL); in virtio_gpu_queue_ctrl_buffer()
447 static void virtio_gpu_queue_cursor(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_cursor() argument
450 struct virtqueue *vq = vgdev->cursorq.vq; in virtio_gpu_queue_cursor()
455 if (!drm_dev_enter(vgdev->ddev, &idx)) { in virtio_gpu_queue_cursor()
456 free_vbuf(vgdev, vbuf); in virtio_gpu_queue_cursor()
464 spin_lock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
468 spin_unlock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
469 wait_event(vgdev->cursorq.ack_queue, vq->num_free >= outcnt); in virtio_gpu_queue_cursor()
470 spin_lock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
473 vbuf->seqno = ++vgdev->cursorq.seqno; in virtio_gpu_queue_cursor()
481 spin_unlock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
494 void virtio_gpu_cmd_create_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_create_resource() argument
503 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_create_resource()
513 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_create_resource()
517 static void virtio_gpu_cmd_unref_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_unref_cb() argument
528 void virtio_gpu_cmd_unref_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_unref_resource() argument
535 cmd_p = virtio_gpu_alloc_cmd_cb(vgdev, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_unref_resource()
543 ret = virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_unref_resource()
548 void virtio_gpu_cmd_set_scanout(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_set_scanout() argument
556 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_set_scanout()
567 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_set_scanout()
570 void virtio_gpu_cmd_resource_flush(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_flush() argument
580 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_flush()
591 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_resource_flush()
594 void virtio_gpu_cmd_transfer_to_host_2d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_transfer_to_host_2d() argument
604 bool use_dma_api = !virtio_has_dma_quirk(vgdev->vdev); in virtio_gpu_cmd_transfer_to_host_2d()
607 dma_sync_sgtable_for_device(vgdev->vdev->dev.parent, in virtio_gpu_cmd_transfer_to_host_2d()
610 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_to_host_2d()
622 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_transfer_to_host_2d()
626 virtio_gpu_cmd_resource_attach_backing(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_attach_backing() argument
635 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_attach_backing()
645 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_resource_attach_backing()
648 static void virtio_gpu_cmd_get_display_info_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_get_display_info_cb() argument
655 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_display_info_cb()
656 for (i = 0; i < vgdev->num_scanouts; i++) { in virtio_gpu_cmd_get_display_info_cb()
657 vgdev->outputs[i].info = resp->pmodes[i]; in virtio_gpu_cmd_get_display_info_cb()
669 vgdev->display_info_pending = false; in virtio_gpu_cmd_get_display_info_cb()
670 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_display_info_cb()
671 wake_up(&vgdev->resp_wq); in virtio_gpu_cmd_get_display_info_cb()
673 if (!drm_helper_hpd_irq_event(vgdev->ddev)) in virtio_gpu_cmd_get_display_info_cb()
674 drm_kms_helper_hotplug_event(vgdev->ddev); in virtio_gpu_cmd_get_display_info_cb()
677 static void virtio_gpu_cmd_get_capset_info_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_get_capset_info_cb() argument
686 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset_info_cb()
687 if (vgdev->capsets) { in virtio_gpu_cmd_get_capset_info_cb()
688 vgdev->capsets[i].id = le32_to_cpu(resp->capset_id); in virtio_gpu_cmd_get_capset_info_cb()
689 vgdev->capsets[i].max_version = le32_to_cpu(resp->capset_max_version); in virtio_gpu_cmd_get_capset_info_cb()
690 vgdev->capsets[i].max_size = le32_to_cpu(resp->capset_max_size); in virtio_gpu_cmd_get_capset_info_cb()
694 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset_info_cb()
695 wake_up(&vgdev->resp_wq); in virtio_gpu_cmd_get_capset_info_cb()
698 static void virtio_gpu_cmd_capset_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_capset_cb() argument
707 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_capset_cb()
708 list_for_each_entry(cache_ent, &vgdev->cap_cache, head) { in virtio_gpu_cmd_capset_cb()
719 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_capset_cb()
720 wake_up_all(&vgdev->resp_wq); in virtio_gpu_cmd_capset_cb()
735 static void virtio_gpu_cmd_get_edid_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_get_edid_cb() argument
746 if (scanout >= vgdev->num_scanouts) in virtio_gpu_cmd_get_edid_cb()
748 output = vgdev->outputs + scanout; in virtio_gpu_cmd_get_edid_cb()
753 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_edid_cb()
756 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_edid_cb()
759 wake_up(&vgdev->resp_wq); in virtio_gpu_cmd_get_edid_cb()
762 int virtio_gpu_cmd_get_display_info(struct virtio_gpu_device *vgdev) in virtio_gpu_cmd_get_display_info() argument
774 (vgdev, &virtio_gpu_cmd_get_display_info_cb, &vbuf, in virtio_gpu_cmd_get_display_info()
779 vgdev->display_info_pending = true; in virtio_gpu_cmd_get_display_info()
781 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_display_info()
785 int virtio_gpu_cmd_get_capset_info(struct virtio_gpu_device *vgdev, int idx) in virtio_gpu_cmd_get_capset_info() argument
797 (vgdev, &virtio_gpu_cmd_get_capset_info_cb, &vbuf, in virtio_gpu_cmd_get_capset_info()
804 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_capset_info()
808 int virtio_gpu_cmd_get_capset(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_get_capset() argument
821 if (idx >= vgdev->num_capsets) in virtio_gpu_cmd_get_capset()
824 if (version > vgdev->capsets[idx].max_version) in virtio_gpu_cmd_get_capset()
831 max_size = vgdev->capsets[idx].max_size; in virtio_gpu_cmd_get_capset()
847 cache_ent->id = vgdev->capsets[idx].id; in virtio_gpu_cmd_get_capset()
850 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset()
852 list_for_each_entry(search_ent, &vgdev->cap_cache, head) { in virtio_gpu_cmd_get_capset()
853 if (search_ent->id == vgdev->capsets[idx].id && in virtio_gpu_cmd_get_capset()
860 list_add_tail(&cache_ent->head, &vgdev->cap_cache); in virtio_gpu_cmd_get_capset()
861 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset()
872 (vgdev, &virtio_gpu_cmd_capset_cb, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_get_capset()
876 cmd_p->capset_id = cpu_to_le32(vgdev->capsets[idx].id); in virtio_gpu_cmd_get_capset()
879 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_capset()
884 int virtio_gpu_cmd_get_edids(struct virtio_gpu_device *vgdev) in virtio_gpu_cmd_get_edids() argument
891 if (WARN_ON(!vgdev->has_edid)) in virtio_gpu_cmd_get_edids()
894 for (scanout = 0; scanout < vgdev->num_scanouts; scanout++) { in virtio_gpu_cmd_get_edids()
901 (vgdev, &virtio_gpu_cmd_get_edid_cb, &vbuf, in virtio_gpu_cmd_get_edids()
906 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_edids()
912 void virtio_gpu_cmd_context_create(struct virtio_gpu_device *vgdev, uint32_t id, in virtio_gpu_cmd_context_create() argument
919 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_create()
927 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_create()
930 void virtio_gpu_cmd_context_destroy(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_context_destroy() argument
936 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_destroy()
941 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_destroy()
944 void virtio_gpu_cmd_context_attach_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_context_attach_resource() argument
952 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_attach_resource()
959 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_attach_resource()
962 void virtio_gpu_cmd_context_detach_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_context_detach_resource() argument
970 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_detach_resource()
977 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_detach_resource()
981 virtio_gpu_cmd_resource_create_3d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_create_3d() argument
990 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_create_3d()
1008 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_resource_create_3d()
1013 void virtio_gpu_cmd_transfer_to_host_3d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_transfer_to_host_3d() argument
1025 bool use_dma_api = !virtio_has_dma_quirk(vgdev->vdev); in virtio_gpu_cmd_transfer_to_host_3d()
1028 dma_sync_sgtable_for_device(vgdev->vdev->dev.parent, in virtio_gpu_cmd_transfer_to_host_3d()
1031 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_to_host_3d()
1045 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_transfer_to_host_3d()
1048 void virtio_gpu_cmd_transfer_from_host_3d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_transfer_from_host_3d() argument
1061 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_from_host_3d()
1075 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_transfer_from_host_3d()
1078 void virtio_gpu_cmd_submit(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_submit() argument
1087 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_submit()
1098 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_submit()
1101 void virtio_gpu_object_attach(struct virtio_gpu_device *vgdev, in virtio_gpu_object_attach() argument
1106 virtio_gpu_cmd_resource_attach_backing(vgdev, obj->hw_res_handle, in virtio_gpu_object_attach()
1110 void virtio_gpu_cursor_ping(struct virtio_gpu_device *vgdev, in virtio_gpu_cursor_ping() argument
1117 cur_p = virtio_gpu_alloc_cursor(vgdev, &vbuf); in virtio_gpu_cursor_ping()
1119 virtio_gpu_queue_cursor(vgdev, vbuf); in virtio_gpu_cursor_ping()
1122 static void virtio_gpu_cmd_resource_uuid_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_uuid_cb() argument
1131 spin_lock(&vgdev->resource_export_lock); in virtio_gpu_cmd_resource_uuid_cb()
1141 spin_unlock(&vgdev->resource_export_lock); in virtio_gpu_cmd_resource_uuid_cb()
1143 wake_up_all(&vgdev->resp_wq); in virtio_gpu_cmd_resource_uuid_cb()
1147 virtio_gpu_cmd_resource_assign_uuid(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_assign_uuid() argument
1157 spin_lock(&vgdev->resource_export_lock); in virtio_gpu_cmd_resource_assign_uuid()
1159 spin_unlock(&vgdev->resource_export_lock); in virtio_gpu_cmd_resource_assign_uuid()
1165 (vgdev, virtio_gpu_cmd_resource_uuid_cb, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_resource_assign_uuid()
1173 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_resource_assign_uuid()
1177 static void virtio_gpu_cmd_resource_map_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_map_cb() argument
1187 spin_lock(&vgdev->host_visible_lock); in virtio_gpu_cmd_resource_map_cb()
1196 spin_unlock(&vgdev->host_visible_lock); in virtio_gpu_cmd_resource_map_cb()
1197 wake_up_all(&vgdev->resp_wq); in virtio_gpu_cmd_resource_map_cb()
1200 int virtio_gpu_cmd_map(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_map() argument
1213 (vgdev, virtio_gpu_cmd_resource_map_cb, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_map()
1222 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_map()
1226 void virtio_gpu_cmd_unmap(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_unmap() argument
1232 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_unmap()
1238 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_unmap()
1242 virtio_gpu_cmd_resource_create_blob(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_create_blob() argument
1251 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_create_blob()
1266 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_resource_create_blob()
1270 void virtio_gpu_cmd_set_scanout_blob(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_set_scanout_blob() argument
1282 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_set_scanout_blob()
1303 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_set_scanout_blob()