Lines Matching full:vgpu
71 size_t (*rw)(struct intel_vgpu *vgpu, char *buf,
73 void (*release)(struct intel_vgpu *vgpu,
97 struct intel_vgpu *vgpu; member
128 static void gvt_unpin_guest_page(struct intel_vgpu *vgpu, unsigned long gfn, in gvt_unpin_guest_page() argument
131 vfio_unpin_pages(&vgpu->vfio_device, gfn << PAGE_SHIFT, in gvt_unpin_guest_page()
136 static int gvt_pin_guest_page(struct intel_vgpu *vgpu, unsigned long gfn, in gvt_pin_guest_page() argument
152 ret = vfio_pin_pages(&vgpu->vfio_device, cur_iova, 1, in gvt_pin_guest_page()
173 gvt_unpin_guest_page(vgpu, gfn, npage * PAGE_SIZE); in gvt_pin_guest_page()
177 static int gvt_dma_map_page(struct intel_vgpu *vgpu, unsigned long gfn, in gvt_dma_map_page() argument
180 struct device *dev = vgpu->gvt->gt->i915->drm.dev; in gvt_dma_map_page()
184 ret = gvt_pin_guest_page(vgpu, gfn, size, &page); in gvt_dma_map_page()
193 gvt_unpin_guest_page(vgpu, gfn, size); in gvt_dma_map_page()
200 static void gvt_dma_unmap_page(struct intel_vgpu *vgpu, unsigned long gfn, in gvt_dma_unmap_page() argument
203 struct device *dev = vgpu->gvt->gt->i915->drm.dev; in gvt_dma_unmap_page()
206 gvt_unpin_guest_page(vgpu, gfn, size); in gvt_dma_unmap_page()
209 static struct gvt_dma *__gvt_cache_find_dma_addr(struct intel_vgpu *vgpu, in __gvt_cache_find_dma_addr() argument
212 struct rb_node *node = vgpu->dma_addr_cache.rb_node; in __gvt_cache_find_dma_addr()
228 static struct gvt_dma *__gvt_cache_find_gfn(struct intel_vgpu *vgpu, gfn_t gfn) in __gvt_cache_find_gfn() argument
230 struct rb_node *node = vgpu->gfn_cache.rb_node; in __gvt_cache_find_gfn()
246 static int __gvt_cache_add(struct intel_vgpu *vgpu, gfn_t gfn, in __gvt_cache_add() argument
256 new->vgpu = vgpu; in __gvt_cache_add()
263 link = &vgpu->gfn_cache.rb_node; in __gvt_cache_add()
274 rb_insert_color(&new->gfn_node, &vgpu->gfn_cache); in __gvt_cache_add()
278 link = &vgpu->dma_addr_cache.rb_node; in __gvt_cache_add()
289 rb_insert_color(&new->dma_addr_node, &vgpu->dma_addr_cache); in __gvt_cache_add()
291 vgpu->nr_cache_entries++; in __gvt_cache_add()
295 static void __gvt_cache_remove_entry(struct intel_vgpu *vgpu, in __gvt_cache_remove_entry() argument
298 rb_erase(&entry->gfn_node, &vgpu->gfn_cache); in __gvt_cache_remove_entry()
299 rb_erase(&entry->dma_addr_node, &vgpu->dma_addr_cache); in __gvt_cache_remove_entry()
301 vgpu->nr_cache_entries--; in __gvt_cache_remove_entry()
304 static void gvt_cache_destroy(struct intel_vgpu *vgpu) in gvt_cache_destroy() argument
310 mutex_lock(&vgpu->cache_lock); in gvt_cache_destroy()
311 node = rb_first(&vgpu->gfn_cache); in gvt_cache_destroy()
313 mutex_unlock(&vgpu->cache_lock); in gvt_cache_destroy()
317 gvt_dma_unmap_page(vgpu, dma->gfn, dma->dma_addr, dma->size); in gvt_cache_destroy()
318 __gvt_cache_remove_entry(vgpu, dma); in gvt_cache_destroy()
319 mutex_unlock(&vgpu->cache_lock); in gvt_cache_destroy()
323 static void gvt_cache_init(struct intel_vgpu *vgpu) in gvt_cache_init() argument
325 vgpu->gfn_cache = RB_ROOT; in gvt_cache_init()
326 vgpu->dma_addr_cache = RB_ROOT; in gvt_cache_init()
327 vgpu->nr_cache_entries = 0; in gvt_cache_init()
328 mutex_init(&vgpu->cache_lock); in gvt_cache_init()
399 static size_t intel_vgpu_reg_rw_opregion(struct intel_vgpu *vgpu, char *buf, in intel_vgpu_reg_rw_opregion() argument
404 void *base = vgpu->region[i].data; in intel_vgpu_reg_rw_opregion()
408 if (pos >= vgpu->region[i].size || iswrite) { in intel_vgpu_reg_rw_opregion()
409 gvt_vgpu_err("invalid op or offset for Intel vgpu OpRegion\n"); in intel_vgpu_reg_rw_opregion()
412 count = min(count, (size_t)(vgpu->region[i].size - pos)); in intel_vgpu_reg_rw_opregion()
418 static void intel_vgpu_reg_release_opregion(struct intel_vgpu *vgpu, in intel_vgpu_reg_release_opregion() argument
440 static int handle_edid_regs(struct intel_vgpu *vgpu, in handle_edid_regs() argument
462 intel_vgpu_emulate_hotplug(vgpu, true); in handle_edid_regs()
464 intel_vgpu_emulate_hotplug(vgpu, false); in handle_edid_regs()
507 static size_t intel_vgpu_reg_rw_edid(struct intel_vgpu *vgpu, char *buf, in intel_vgpu_reg_rw_edid() argument
513 struct vfio_edid_region *region = vgpu->region[i].data; in intel_vgpu_reg_rw_edid()
517 ret = handle_edid_regs(vgpu, region, buf, count, pos, iswrite); in intel_vgpu_reg_rw_edid()
529 static void intel_vgpu_reg_release_edid(struct intel_vgpu *vgpu, in intel_vgpu_reg_release_edid() argument
540 static int intel_vgpu_register_reg(struct intel_vgpu *vgpu, in intel_vgpu_register_reg() argument
547 region = krealloc(vgpu->region, in intel_vgpu_register_reg()
548 (vgpu->num_regions + 1) * sizeof(*region), in intel_vgpu_register_reg()
553 vgpu->region = region; in intel_vgpu_register_reg()
554 vgpu->region[vgpu->num_regions].type = type; in intel_vgpu_register_reg()
555 vgpu->region[vgpu->num_regions].subtype = subtype; in intel_vgpu_register_reg()
556 vgpu->region[vgpu->num_regions].ops = ops; in intel_vgpu_register_reg()
557 vgpu->region[vgpu->num_regions].size = size; in intel_vgpu_register_reg()
558 vgpu->region[vgpu->num_regions].flags = flags; in intel_vgpu_register_reg()
559 vgpu->region[vgpu->num_regions].data = data; in intel_vgpu_register_reg()
560 vgpu->num_regions++; in intel_vgpu_register_reg()
564 int intel_gvt_set_opregion(struct intel_vgpu *vgpu) in intel_gvt_set_opregion() argument
569 /* Each vgpu has its own opregion, although VFIO would create another in intel_gvt_set_opregion()
573 base = vgpu_opregion(vgpu)->va; in intel_gvt_set_opregion()
582 ret = intel_vgpu_register_reg(vgpu, in intel_gvt_set_opregion()
591 int intel_gvt_set_edid(struct intel_vgpu *vgpu, int port_num) in intel_gvt_set_edid() argument
593 struct intel_vgpu_port *port = intel_vgpu_port(vgpu, port_num); in intel_gvt_set_edid()
609 ret = intel_vgpu_register_reg(vgpu, in intel_gvt_set_edid()
623 struct intel_vgpu *vgpu = vfio_dev_to_vgpu(vfio_dev); in intel_vgpu_dma_unmap() local
628 mutex_lock(&vgpu->cache_lock); in intel_vgpu_dma_unmap()
630 entry = __gvt_cache_find_gfn(vgpu, iov_pfn); in intel_vgpu_dma_unmap()
634 gvt_dma_unmap_page(vgpu, entry->gfn, entry->dma_addr, in intel_vgpu_dma_unmap()
636 __gvt_cache_remove_entry(vgpu, entry); in intel_vgpu_dma_unmap()
638 mutex_unlock(&vgpu->cache_lock); in intel_vgpu_dma_unmap()
641 static bool __kvmgt_vgpu_exist(struct intel_vgpu *vgpu) in __kvmgt_vgpu_exist() argument
647 mutex_lock(&vgpu->gvt->lock); in __kvmgt_vgpu_exist()
648 for_each_active_vgpu(vgpu->gvt, itr, id) { in __kvmgt_vgpu_exist()
652 if (vgpu->vfio_device.kvm == itr->vfio_device.kvm) { in __kvmgt_vgpu_exist()
658 mutex_unlock(&vgpu->gvt->lock); in __kvmgt_vgpu_exist()
664 struct intel_vgpu *vgpu = vfio_dev_to_vgpu(vfio_dev); in intel_vgpu_open_device() local
667 if (__kvmgt_vgpu_exist(vgpu)) in intel_vgpu_open_device()
670 vgpu->track_node.track_write = kvmgt_page_track_write; in intel_vgpu_open_device()
671 vgpu->track_node.track_remove_region = kvmgt_page_track_remove_region; in intel_vgpu_open_device()
672 ret = kvm_page_track_register_notifier(vgpu->vfio_device.kvm, in intel_vgpu_open_device()
673 &vgpu->track_node); in intel_vgpu_open_device()
675 gvt_vgpu_err("KVM is required to use Intel vGPU\n"); in intel_vgpu_open_device()
679 set_bit(INTEL_VGPU_STATUS_ATTACHED, vgpu->status); in intel_vgpu_open_device()
681 debugfs_create_ulong(KVMGT_DEBUGFS_FILENAME, 0444, vgpu->debugfs, in intel_vgpu_open_device()
682 &vgpu->nr_cache_entries); in intel_vgpu_open_device()
684 intel_gvt_activate_vgpu(vgpu); in intel_vgpu_open_device()
689 static void intel_vgpu_release_msi_eventfd_ctx(struct intel_vgpu *vgpu) in intel_vgpu_release_msi_eventfd_ctx() argument
693 trigger = vgpu->msi_trigger; in intel_vgpu_release_msi_eventfd_ctx()
696 vgpu->msi_trigger = NULL; in intel_vgpu_release_msi_eventfd_ctx()
702 struct intel_vgpu *vgpu = vfio_dev_to_vgpu(vfio_dev); in intel_vgpu_close_device() local
704 intel_gvt_release_vgpu(vgpu); in intel_vgpu_close_device()
706 clear_bit(INTEL_VGPU_STATUS_ATTACHED, vgpu->status); in intel_vgpu_close_device()
708 debugfs_lookup_and_remove(KVMGT_DEBUGFS_FILENAME, vgpu->debugfs); in intel_vgpu_close_device()
710 kvm_page_track_unregister_notifier(vgpu->vfio_device.kvm, in intel_vgpu_close_device()
711 &vgpu->track_node); in intel_vgpu_close_device()
713 kvmgt_protect_table_destroy(vgpu); in intel_vgpu_close_device()
714 gvt_cache_destroy(vgpu); in intel_vgpu_close_device()
716 WARN_ON(vgpu->nr_cache_entries); in intel_vgpu_close_device()
718 vgpu->gfn_cache = RB_ROOT; in intel_vgpu_close_device()
719 vgpu->dma_addr_cache = RB_ROOT; in intel_vgpu_close_device()
721 intel_vgpu_release_msi_eventfd_ctx(vgpu); in intel_vgpu_close_device()
724 static u64 intel_vgpu_get_bar_addr(struct intel_vgpu *vgpu, int bar) in intel_vgpu_get_bar_addr() argument
729 start_lo = (*(u32 *)(vgpu->cfg_space.virtual_cfg_space + bar)) & in intel_vgpu_get_bar_addr()
731 mem_type = (*(u32 *)(vgpu->cfg_space.virtual_cfg_space + bar)) & in intel_vgpu_get_bar_addr()
736 start_hi = (*(u32 *)(vgpu->cfg_space.virtual_cfg_space in intel_vgpu_get_bar_addr()
751 static int intel_vgpu_bar_rw(struct intel_vgpu *vgpu, int bar, u64 off, in intel_vgpu_bar_rw() argument
754 u64 bar_start = intel_vgpu_get_bar_addr(vgpu, bar); in intel_vgpu_bar_rw()
758 ret = intel_vgpu_emulate_mmio_write(vgpu, in intel_vgpu_bar_rw()
761 ret = intel_vgpu_emulate_mmio_read(vgpu, in intel_vgpu_bar_rw()
766 static inline bool intel_vgpu_in_aperture(struct intel_vgpu *vgpu, u64 off) in intel_vgpu_in_aperture() argument
768 return off >= vgpu_aperture_offset(vgpu) && in intel_vgpu_in_aperture()
769 off < vgpu_aperture_offset(vgpu) + vgpu_aperture_sz(vgpu); in intel_vgpu_in_aperture()
772 static int intel_vgpu_aperture_rw(struct intel_vgpu *vgpu, u64 off, in intel_vgpu_aperture_rw() argument
777 if (!intel_vgpu_in_aperture(vgpu, off) || in intel_vgpu_aperture_rw()
778 !intel_vgpu_in_aperture(vgpu, off + count)) { in intel_vgpu_aperture_rw()
783 aperture_va = io_mapping_map_wc(&vgpu->gvt->gt->ggtt->iomap, in intel_vgpu_aperture_rw()
799 static ssize_t intel_vgpu_rw(struct intel_vgpu *vgpu, char *buf, in intel_vgpu_rw() argument
807 if (index >= VFIO_PCI_NUM_REGIONS + vgpu->num_regions) { in intel_vgpu_rw()
815 ret = intel_vgpu_emulate_cfg_write(vgpu, pos, in intel_vgpu_rw()
818 ret = intel_vgpu_emulate_cfg_read(vgpu, pos, in intel_vgpu_rw()
822 ret = intel_vgpu_bar_rw(vgpu, PCI_BASE_ADDRESS_0, pos, in intel_vgpu_rw()
826 ret = intel_vgpu_aperture_rw(vgpu, pos, buf, count, is_write); in intel_vgpu_rw()
836 if (index >= VFIO_PCI_NUM_REGIONS + vgpu->num_regions) in intel_vgpu_rw()
840 return vgpu->region[index].ops->rw(vgpu, buf, count, in intel_vgpu_rw()
847 static bool gtt_entry(struct intel_vgpu *vgpu, loff_t *ppos) in gtt_entry() argument
850 struct intel_gvt *gvt = vgpu->gvt; in gtt_entry()
858 intel_vgpu_get_bar_gpa(vgpu, PCI_BASE_ADDRESS_0); in gtt_entry()
868 struct intel_vgpu *vgpu = vfio_dev_to_vgpu(vfio_dev); in intel_vgpu_read() local
877 gtt_entry(vgpu, ppos)) { in intel_vgpu_read()
880 ret = intel_vgpu_rw(vgpu, (char *)&val, sizeof(val), in intel_vgpu_read()
892 ret = intel_vgpu_rw(vgpu, (char *)&val, sizeof(val), in intel_vgpu_read()
904 ret = intel_vgpu_rw(vgpu, (char *)&val, sizeof(val), in intel_vgpu_read()
916 ret = intel_vgpu_rw(vgpu, &val, sizeof(val), ppos, in intel_vgpu_read()
943 struct intel_vgpu *vgpu = vfio_dev_to_vgpu(vfio_dev); in intel_vgpu_write() local
952 gtt_entry(vgpu, ppos)) { in intel_vgpu_write()
958 ret = intel_vgpu_rw(vgpu, (char *)&val, sizeof(val), in intel_vgpu_write()
970 ret = intel_vgpu_rw(vgpu, (char *)&val, sizeof(val), in intel_vgpu_write()
982 ret = intel_vgpu_rw(vgpu, (char *)&val, in intel_vgpu_write()
994 ret = intel_vgpu_rw(vgpu, &val, sizeof(val), in intel_vgpu_write()
1016 struct intel_vgpu *vgpu = vfio_dev_to_vgpu(vfio_dev); in intel_vgpu_mmap() local
1040 if (!intel_vgpu_in_aperture(vgpu, req_start)) in intel_vgpu_mmap()
1043 vgpu_aperture_offset(vgpu) + vgpu_aperture_sz(vgpu)) in intel_vgpu_mmap()
1046 pgoff = (gvt_aperture_pa_base(vgpu->gvt) >> PAGE_SHIFT) + pgoff; in intel_vgpu_mmap()
1051 static int intel_vgpu_get_irq_count(struct intel_vgpu *vgpu, int type) in intel_vgpu_get_irq_count() argument
1059 static int intel_vgpu_set_intx_mask(struct intel_vgpu *vgpu, in intel_vgpu_set_intx_mask() argument
1067 static int intel_vgpu_set_intx_unmask(struct intel_vgpu *vgpu, in intel_vgpu_set_intx_unmask() argument
1074 static int intel_vgpu_set_intx_trigger(struct intel_vgpu *vgpu, in intel_vgpu_set_intx_trigger() argument
1081 static int intel_vgpu_set_msi_trigger(struct intel_vgpu *vgpu, in intel_vgpu_set_msi_trigger() argument
1095 vgpu->msi_trigger = trigger; in intel_vgpu_set_msi_trigger()
1097 intel_vgpu_release_msi_eventfd_ctx(vgpu); in intel_vgpu_set_msi_trigger()
1102 static int intel_vgpu_set_irqs(struct intel_vgpu *vgpu, u32 flags, in intel_vgpu_set_irqs() argument
1106 int (*func)(struct intel_vgpu *vgpu, unsigned int index, in intel_vgpu_set_irqs()
1140 return func(vgpu, index, start, count, flags, data); in intel_vgpu_set_irqs()
1146 struct intel_vgpu *vgpu = vfio_dev_to_vgpu(vfio_dev); in intel_vgpu_ioctl() local
1149 gvt_dbg_core("vgpu%d ioctl, cmd: %d\n", vgpu->id, cmd); in intel_vgpu_ioctl()
1165 vgpu->num_regions; in intel_vgpu_ioctl()
1191 info.size = vgpu->gvt->device_info.cfg_space_size; in intel_vgpu_ioctl()
1197 info.size = vgpu->cfg_space.bar[info.index].size; in intel_vgpu_ioctl()
1217 info.size = gvt_aperture_sz(vgpu->gvt); in intel_vgpu_ioctl()
1229 PAGE_ALIGN(vgpu_aperture_offset(vgpu)); in intel_vgpu_ioctl()
1230 sparse->areas[0].size = vgpu_aperture_sz(vgpu); in intel_vgpu_ioctl()
1256 vgpu->num_regions) in intel_vgpu_ioctl()
1261 vgpu->num_regions); in intel_vgpu_ioctl()
1267 info.size = vgpu->region[i].size; in intel_vgpu_ioctl()
1268 info.flags = vgpu->region[i].flags; in intel_vgpu_ioctl()
1270 cap_type.type = vgpu->region[i].type; in intel_vgpu_ioctl()
1271 cap_type.subtype = vgpu->region[i].subtype; in intel_vgpu_ioctl()
1343 info.count = intel_vgpu_get_irq_count(vgpu, info.index); in intel_vgpu_ioctl()
1365 int max = intel_vgpu_get_irq_count(vgpu, hdr.index); in intel_vgpu_ioctl()
1381 ret = intel_vgpu_set_irqs(vgpu, hdr.flags, hdr.index, in intel_vgpu_ioctl()
1387 intel_gvt_reset_vgpu(vgpu); in intel_vgpu_ioctl()
1400 ret = intel_vgpu_query_plane(vgpu, &dmabuf); in intel_vgpu_ioctl()
1411 return intel_vgpu_get_dmabuf(vgpu, dmabuf_id); in intel_vgpu_ioctl()
1421 struct intel_vgpu *vgpu = dev_get_drvdata(dev); in vgpu_id_show() local
1423 return sprintf(buf, "%d\n", vgpu->id); in vgpu_id_show()
1446 struct intel_vgpu *vgpu = vfio_dev_to_vgpu(vfio_dev); in intel_vgpu_init_dev() local
1451 vgpu->gvt = kdev_to_i915(mdev->type->parent->dev)->gvt; in intel_vgpu_init_dev()
1452 ret = intel_gvt_create_vgpu(vgpu, type->conf); in intel_vgpu_init_dev()
1456 kvmgt_protect_table_init(vgpu); in intel_vgpu_init_dev()
1457 gvt_cache_init(vgpu); in intel_vgpu_init_dev()
1464 struct intel_vgpu *vgpu = vfio_dev_to_vgpu(vfio_dev); in intel_vgpu_release_dev() local
1466 intel_gvt_destroy_vgpu(vgpu); in intel_vgpu_release_dev()
1487 struct intel_vgpu *vgpu; in intel_vgpu_probe() local
1490 vgpu = vfio_alloc_device(intel_vgpu, vfio_device, &mdev->dev, in intel_vgpu_probe()
1492 if (IS_ERR(vgpu)) { in intel_vgpu_probe()
1493 gvt_err("failed to create intel vgpu: %ld\n", PTR_ERR(vgpu)); in intel_vgpu_probe()
1494 return PTR_ERR(vgpu); in intel_vgpu_probe()
1497 dev_set_drvdata(&mdev->dev, vgpu); in intel_vgpu_probe()
1498 ret = vfio_register_emulated_iommu_dev(&vgpu->vfio_device); in intel_vgpu_probe()
1507 vfio_put_device(&vgpu->vfio_device); in intel_vgpu_probe()
1513 struct intel_vgpu *vgpu = dev_get_drvdata(&mdev->dev); in intel_vgpu_remove() local
1515 vfio_unregister_group_dev(&vgpu->vfio_device); in intel_vgpu_remove()
1516 vfio_put_device(&vgpu->vfio_device); in intel_vgpu_remove()
1621 void intel_vgpu_detach_regions(struct intel_vgpu *vgpu) in intel_vgpu_detach_regions() argument
1625 if (!vgpu->region) in intel_vgpu_detach_regions()
1628 for (i = 0; i < vgpu->num_regions; i++) in intel_vgpu_detach_regions()
1629 if (vgpu->region[i].ops->release) in intel_vgpu_detach_regions()
1630 vgpu->region[i].ops->release(vgpu, in intel_vgpu_detach_regions()
1631 &vgpu->region[i]); in intel_vgpu_detach_regions()
1632 vgpu->num_regions = 0; in intel_vgpu_detach_regions()
1633 kfree(vgpu->region); in intel_vgpu_detach_regions()
1634 vgpu->region = NULL; in intel_vgpu_detach_regions()
1637 int intel_gvt_dma_map_guest_page(struct intel_vgpu *vgpu, unsigned long gfn, in intel_gvt_dma_map_guest_page() argument
1643 if (!test_bit(INTEL_VGPU_STATUS_ATTACHED, vgpu->status)) in intel_gvt_dma_map_guest_page()
1646 mutex_lock(&vgpu->cache_lock); in intel_gvt_dma_map_guest_page()
1648 entry = __gvt_cache_find_gfn(vgpu, gfn); in intel_gvt_dma_map_guest_page()
1650 ret = gvt_dma_map_page(vgpu, gfn, dma_addr, size); in intel_gvt_dma_map_guest_page()
1654 ret = __gvt_cache_add(vgpu, gfn, *dma_addr, size); in intel_gvt_dma_map_guest_page()
1659 gvt_dma_unmap_page(vgpu, gfn, entry->dma_addr, entry->size); in intel_gvt_dma_map_guest_page()
1660 __gvt_cache_remove_entry(vgpu, entry); in intel_gvt_dma_map_guest_page()
1662 ret = gvt_dma_map_page(vgpu, gfn, dma_addr, size); in intel_gvt_dma_map_guest_page()
1666 ret = __gvt_cache_add(vgpu, gfn, *dma_addr, size); in intel_gvt_dma_map_guest_page()
1674 mutex_unlock(&vgpu->cache_lock); in intel_gvt_dma_map_guest_page()
1678 gvt_dma_unmap_page(vgpu, gfn, *dma_addr, size); in intel_gvt_dma_map_guest_page()
1680 mutex_unlock(&vgpu->cache_lock); in intel_gvt_dma_map_guest_page()
1684 int intel_gvt_dma_pin_guest_page(struct intel_vgpu *vgpu, dma_addr_t dma_addr) in intel_gvt_dma_pin_guest_page() argument
1689 if (!test_bit(INTEL_VGPU_STATUS_ATTACHED, vgpu->status)) in intel_gvt_dma_pin_guest_page()
1692 mutex_lock(&vgpu->cache_lock); in intel_gvt_dma_pin_guest_page()
1693 entry = __gvt_cache_find_dma_addr(vgpu, dma_addr); in intel_gvt_dma_pin_guest_page()
1698 mutex_unlock(&vgpu->cache_lock); in intel_gvt_dma_pin_guest_page()
1707 gvt_dma_unmap_page(entry->vgpu, entry->gfn, entry->dma_addr, in __gvt_dma_release()
1709 __gvt_cache_remove_entry(entry->vgpu, entry); in __gvt_dma_release()
1712 void intel_gvt_dma_unmap_guest_page(struct intel_vgpu *vgpu, in intel_gvt_dma_unmap_guest_page() argument
1717 if (!test_bit(INTEL_VGPU_STATUS_ATTACHED, vgpu->status)) in intel_gvt_dma_unmap_guest_page()
1720 mutex_lock(&vgpu->cache_lock); in intel_gvt_dma_unmap_guest_page()
1721 entry = __gvt_cache_find_dma_addr(vgpu, dma_addr); in intel_gvt_dma_unmap_guest_page()
1724 mutex_unlock(&vgpu->cache_lock); in intel_gvt_dma_unmap_guest_page()
1746 struct intel_vgpu *vgpu; in intel_gvt_test_and_emulate_vblank() local
1750 idr_for_each_entry((&(gvt)->vgpu_idr), (vgpu), (id)) { in intel_gvt_test_and_emulate_vblank()
1753 if (test_bit(INTEL_VGPU_STATUS_ACTIVE, vgpu->status)) in intel_gvt_test_and_emulate_vblank()
1754 intel_vgpu_emulate_vblank(vgpu); in intel_gvt_test_and_emulate_vblank()
1854 struct intel_vgpu *vgpu; in intel_gvt_init_device() local
1913 vgpu = intel_gvt_create_idle_vgpu(gvt); in intel_gvt_init_device()
1914 if (IS_ERR(vgpu)) { in intel_gvt_init_device()
1915 ret = PTR_ERR(vgpu); in intel_gvt_init_device()
1916 gvt_err("failed to create idle vgpu\n"); in intel_gvt_init_device()
1919 gvt->idle_vgpu = vgpu; in intel_gvt_init_device()