Lines Matching full:vgpu

57 bool intel_gvt_ggtt_validate_range(struct intel_vgpu *vgpu, u64 addr, u32 size)  in intel_gvt_ggtt_validate_range()  argument
60 return vgpu_gmadr_is_valid(vgpu, addr); in intel_gvt_ggtt_validate_range()
62 if (vgpu_gmadr_is_aperture(vgpu, addr) && in intel_gvt_ggtt_validate_range()
63 vgpu_gmadr_is_aperture(vgpu, addr + size - 1)) in intel_gvt_ggtt_validate_range()
65 else if (vgpu_gmadr_is_hidden(vgpu, addr) && in intel_gvt_ggtt_validate_range()
66 vgpu_gmadr_is_hidden(vgpu, addr + size - 1)) in intel_gvt_ggtt_validate_range()
75 int intel_gvt_ggtt_gmadr_g2h(struct intel_vgpu *vgpu, u64 g_addr, u64 *h_addr) in intel_gvt_ggtt_gmadr_g2h() argument
77 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_gvt_ggtt_gmadr_g2h()
79 if (drm_WARN(&i915->drm, !vgpu_gmadr_is_valid(vgpu, g_addr), in intel_gvt_ggtt_gmadr_g2h()
83 if (vgpu_gmadr_is_aperture(vgpu, g_addr)) in intel_gvt_ggtt_gmadr_g2h()
84 *h_addr = vgpu_aperture_gmadr_base(vgpu) in intel_gvt_ggtt_gmadr_g2h()
85 + (g_addr - vgpu_aperture_offset(vgpu)); in intel_gvt_ggtt_gmadr_g2h()
87 *h_addr = vgpu_hidden_gmadr_base(vgpu) in intel_gvt_ggtt_gmadr_g2h()
88 + (g_addr - vgpu_hidden_offset(vgpu)); in intel_gvt_ggtt_gmadr_g2h()
93 int intel_gvt_ggtt_gmadr_h2g(struct intel_vgpu *vgpu, u64 h_addr, u64 *g_addr) in intel_gvt_ggtt_gmadr_h2g() argument
95 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_gvt_ggtt_gmadr_h2g()
97 if (drm_WARN(&i915->drm, !gvt_gmadr_is_valid(vgpu->gvt, h_addr), in intel_gvt_ggtt_gmadr_h2g()
101 if (gvt_gmadr_is_aperture(vgpu->gvt, h_addr)) in intel_gvt_ggtt_gmadr_h2g()
102 *g_addr = vgpu_aperture_gmadr_base(vgpu) in intel_gvt_ggtt_gmadr_h2g()
103 + (h_addr - gvt_aperture_gmadr_base(vgpu->gvt)); in intel_gvt_ggtt_gmadr_h2g()
105 *g_addr = vgpu_hidden_gmadr_base(vgpu) in intel_gvt_ggtt_gmadr_h2g()
106 + (h_addr - gvt_hidden_gmadr_base(vgpu->gvt)); in intel_gvt_ggtt_gmadr_h2g()
110 int intel_gvt_ggtt_index_g2h(struct intel_vgpu *vgpu, unsigned long g_index, in intel_gvt_ggtt_index_g2h() argument
116 ret = intel_gvt_ggtt_gmadr_g2h(vgpu, g_index << I915_GTT_PAGE_SHIFT, in intel_gvt_ggtt_index_g2h()
125 int intel_gvt_ggtt_h2g_index(struct intel_vgpu *vgpu, unsigned long h_index, in intel_gvt_ggtt_h2g_index() argument
131 ret = intel_gvt_ggtt_gmadr_h2g(vgpu, h_index << I915_GTT_PAGE_SHIFT, in intel_gvt_ggtt_h2g_index()
304 struct intel_vgpu *vgpu) in gtt_get_entry64() argument
306 const struct intel_gvt_device_info *info = &vgpu->gvt->device_info; in gtt_get_entry64()
313 ret = intel_gvt_read_gpa(vgpu, gpa + in gtt_get_entry64()
319 e->val64 = read_pte64(vgpu->gvt->gt->ggtt, index); in gtt_get_entry64()
329 struct intel_vgpu *vgpu) in gtt_set_entry64() argument
331 const struct intel_gvt_device_info *info = &vgpu->gvt->device_info; in gtt_set_entry64()
338 ret = intel_gvt_write_gpa(vgpu, gpa + in gtt_set_entry64()
344 write_pte64(vgpu->gvt->gt->ggtt, index, e->val64); in gtt_set_entry64()
554 const struct intel_gvt_gtt_pte_ops *pte_ops = mm->vgpu->gvt->gtt.pte_ops; in _ppgtt_get_root_entry()
561 entry, index, false, 0, mm->vgpu); in _ppgtt_get_root_entry()
581 const struct intel_gvt_gtt_pte_ops *pte_ops = mm->vgpu->gvt->gtt.pte_ops; in _ppgtt_set_root_entry()
585 entry, index, false, 0, mm->vgpu); in _ppgtt_set_root_entry()
597 const struct intel_gvt_gtt_pte_ops *pte_ops = mm->vgpu->gvt->gtt.pte_ops; in ggtt_get_guest_entry()
603 false, 0, mm->vgpu); in ggtt_get_guest_entry()
609 const struct intel_gvt_gtt_pte_ops *pte_ops = mm->vgpu->gvt->gtt.pte_ops; in ggtt_set_guest_entry()
614 false, 0, mm->vgpu); in ggtt_set_guest_entry()
620 const struct intel_gvt_gtt_pte_ops *pte_ops = mm->vgpu->gvt->gtt.pte_ops; in ggtt_get_host_entry()
624 pte_ops->get_entry(NULL, entry, index, false, 0, mm->vgpu); in ggtt_get_host_entry()
630 const struct intel_gvt_gtt_pte_ops *pte_ops = mm->vgpu->gvt->gtt.pte_ops; in ggtt_set_host_entry()
635 if (vgpu_gmadr_is_aperture(mm->vgpu, index << I915_GTT_PAGE_SHIFT)) { in ggtt_set_host_entry()
636 offset -= (vgpu_aperture_gmadr_base(mm->vgpu) >> PAGE_SHIFT); in ggtt_set_host_entry()
638 } else if (vgpu_gmadr_is_hidden(mm->vgpu, index << I915_GTT_PAGE_SHIFT)) { in ggtt_set_host_entry()
639 offset -= (vgpu_hidden_gmadr_base(mm->vgpu) >> PAGE_SHIFT); in ggtt_set_host_entry()
643 pte_ops->set_entry(NULL, entry, index, false, 0, mm->vgpu); in ggtt_set_host_entry()
655 struct intel_gvt *gvt = spt->vgpu->gvt; in ppgtt_spt_get_entry()
666 spt->vgpu); in ppgtt_spt_get_entry()
684 struct intel_gvt *gvt = spt->vgpu->gvt; in ppgtt_spt_set_entry()
695 spt->vgpu); in ppgtt_spt_set_entry()
736 static int detach_oos_page(struct intel_vgpu *vgpu,
741 struct device *kdev = spt->vgpu->gvt->gt->i915->drm.dev; in ppgtt_free_spt()
743 trace_spt_free(spt->vgpu->id, spt, spt->guest_page.type); in ppgtt_free_spt()
748 radix_tree_delete(&spt->vgpu->gtt.spt_tree, spt->shadow_page.mfn); in ppgtt_free_spt()
752 detach_oos_page(spt->vgpu, spt->guest_page.oos_page); in ppgtt_free_spt()
754 intel_vgpu_unregister_page_track(spt->vgpu, spt->guest_page.gfn); in ppgtt_free_spt()
761 static void ppgtt_free_all_spt(struct intel_vgpu *vgpu) in ppgtt_free_all_spt() argument
769 radix_tree_for_each_slot(slot, &vgpu->gtt.spt_tree, &iter, 0) { in ppgtt_free_all_spt()
802 struct intel_vgpu *vgpu, unsigned long gfn) in intel_vgpu_find_spt_by_gfn() argument
806 track = intel_vgpu_find_page_track(vgpu, gfn); in intel_vgpu_find_spt_by_gfn()
815 struct intel_vgpu *vgpu, unsigned long mfn) in intel_vgpu_find_spt_by_mfn() argument
817 return radix_tree_lookup(&vgpu->gtt.spt_tree, mfn); in intel_vgpu_find_spt_by_mfn()
824 struct intel_vgpu *vgpu, enum intel_gvt_gtt_type type) in ppgtt_alloc_spt() argument
826 struct device *kdev = vgpu->gvt->gt->i915->drm.dev; in ppgtt_alloc_spt()
834 if (reclaim_one_ppgtt_mm(vgpu->gvt)) in ppgtt_alloc_spt()
841 spt->vgpu = vgpu; in ppgtt_alloc_spt()
859 ret = radix_tree_insert(&vgpu->gtt.spt_tree, spt->shadow_page.mfn, spt); in ppgtt_alloc_spt()
874 struct intel_vgpu *vgpu, enum intel_gvt_gtt_type type, in ppgtt_alloc_spt_gfn() argument
880 spt = ppgtt_alloc_spt(vgpu, type); in ppgtt_alloc_spt_gfn()
887 ret = intel_vgpu_register_page_track(vgpu, gfn, in ppgtt_alloc_spt_gfn()
898 trace_spt_alloc(vgpu->id, spt, type, spt->shadow_page.mfn, gfn); in ppgtt_alloc_spt_gfn()
904 ((spt)->vgpu->gvt->device_info.gtt_entry_size_shift)
913 spt->vgpu->gvt->gtt.pte_ops->test_present(e))
919 spt->vgpu->gvt->gtt.pte_ops->test_present(e))
930 trace_spt_refcount(spt->vgpu->id, "inc", spt, v, (v + 1)); in ppgtt_get_spt()
938 trace_spt_refcount(spt->vgpu->id, "dec", spt, v, (v - 1)); in ppgtt_put_spt()
944 static int ppgtt_invalidate_spt_by_shadow_entry(struct intel_vgpu *vgpu, in ppgtt_invalidate_spt_by_shadow_entry() argument
947 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in ppgtt_invalidate_spt_by_shadow_entry()
948 const struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops; in ppgtt_invalidate_spt_by_shadow_entry()
969 vgpu->gtt.scratch_pt[cur_pt_type].page_mfn) in ppgtt_invalidate_spt_by_shadow_entry()
972 s = intel_vgpu_find_spt_by_mfn(vgpu, ops->get_pfn(e)); in ppgtt_invalidate_spt_by_shadow_entry()
984 struct intel_vgpu *vgpu = spt->vgpu; in ppgtt_invalidate_pte() local
985 const struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops; in ppgtt_invalidate_pte()
993 if (!pfn || pfn == vgpu->gtt.scratch_pt[type].page_mfn) in ppgtt_invalidate_pte()
996 intel_gvt_dma_unmap_guest_page(vgpu, pfn << PAGE_SHIFT); in ppgtt_invalidate_pte()
1001 struct intel_vgpu *vgpu = spt->vgpu; in ppgtt_invalidate_spt() local
1006 trace_spt_change(spt->vgpu->id, "die", spt, in ppgtt_invalidate_spt()
1033 spt->vgpu, &e); in ppgtt_invalidate_spt()
1042 trace_spt_change(spt->vgpu->id, "release", spt, in ppgtt_invalidate_spt()
1052 static bool vgpu_ips_enabled(struct intel_vgpu *vgpu) in vgpu_ips_enabled() argument
1054 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in vgpu_ips_enabled()
1057 u32 ips = vgpu_vreg_t(vgpu, GEN8_GAMW_ECO_DEV_RW_IA) & in vgpu_ips_enabled()
1071 struct intel_vgpu *vgpu, struct intel_gvt_gtt_entry *we) in ppgtt_populate_spt_by_guest_entry() argument
1073 const struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops; in ppgtt_populate_spt_by_guest_entry()
1081 ips = vgpu_ips_enabled(vgpu) && ops->test_ips(we); in ppgtt_populate_spt_by_guest_entry()
1083 spt = intel_vgpu_find_spt_by_gfn(vgpu, ops->get_pfn(we)); in ppgtt_populate_spt_by_guest_entry()
1106 spt = ppgtt_alloc_spt_gfn(vgpu, type, ops->get_pfn(we), ips); in ppgtt_populate_spt_by_guest_entry()
1112 ret = intel_vgpu_enable_page_track(vgpu, spt->guest_page.gfn); in ppgtt_populate_spt_by_guest_entry()
1120 trace_spt_change(vgpu->id, "new", spt, spt->guest_page.gfn, in ppgtt_populate_spt_by_guest_entry()
1137 const struct intel_gvt_gtt_pte_ops *ops = s->vgpu->gvt->gtt.pte_ops; in ppgtt_generate_shadow_entry()
1149 static int split_2MB_gtt_entry(struct intel_vgpu *vgpu, in split_2MB_gtt_entry() argument
1153 const struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops; in split_2MB_gtt_entry()
1165 sub_spt = ppgtt_alloc_spt(vgpu, GTT_TYPE_PPGTT_PTE_PT); in split_2MB_gtt_entry()
1170 ret = intel_gvt_dma_map_guest_page(vgpu, start_gfn + sub_index, in split_2MB_gtt_entry()
1199 trace_spt_change(sub_spt->vgpu->id, "release", sub_spt, in split_2MB_gtt_entry()
1205 static int split_64KB_gtt_entry(struct intel_vgpu *vgpu, in split_64KB_gtt_entry() argument
1209 const struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops; in split_64KB_gtt_entry()
1225 ret = intel_gvt_dma_map_guest_page(vgpu, start_gfn + i, in split_64KB_gtt_entry()
1236 static int ppgtt_populate_shadow_entry(struct intel_vgpu *vgpu, in ppgtt_populate_shadow_entry() argument
1240 const struct intel_gvt_gtt_pte_ops *pte_ops = vgpu->gvt->gtt.pte_ops; in ppgtt_populate_shadow_entry()
1254 ret = intel_gvt_dma_map_guest_page(vgpu, gfn, PAGE_SIZE, &dma_addr); in ppgtt_populate_shadow_entry()
1265 return split_64KB_gtt_entry(vgpu, spt, index, &se); in ppgtt_populate_shadow_entry()
1268 if (!HAS_PAGE_SIZES(vgpu->gvt->gt->i915, I915_GTT_PAGE_SIZE_2M) || in ppgtt_populate_shadow_entry()
1269 intel_gvt_dma_map_guest_page(vgpu, gfn, in ppgtt_populate_shadow_entry()
1271 return split_2MB_gtt_entry(vgpu, spt, index, &se); in ppgtt_populate_shadow_entry()
1289 struct intel_vgpu *vgpu = spt->vgpu; in ppgtt_populate_spt() local
1295 trace_spt_change(spt->vgpu->id, "born", spt, in ppgtt_populate_spt()
1300 s = ppgtt_populate_spt_by_guest_entry(vgpu, &ge); in ppgtt_populate_spt()
1309 ret = ppgtt_populate_shadow_entry(vgpu, spt, i, &ge); in ppgtt_populate_spt()
1324 struct intel_vgpu *vgpu = spt->vgpu; in ppgtt_handle_guest_entry_removal() local
1325 const struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops; in ppgtt_handle_guest_entry_removal()
1328 trace_spt_guest_change(spt->vgpu->id, "remove", spt, in ppgtt_handle_guest_entry_removal()
1338 vgpu->gtt.scratch_pt[spt->shadow_page.type].page_mfn) in ppgtt_handle_guest_entry_removal()
1343 intel_vgpu_find_spt_by_mfn(vgpu, ops->get_pfn(se)); in ppgtt_handle_guest_entry_removal()
1369 struct intel_vgpu *vgpu = spt->vgpu; in ppgtt_handle_guest_entry_add() local
1374 trace_spt_guest_change(spt->vgpu->id, "add", spt, spt->shadow_page.type, in ppgtt_handle_guest_entry_add()
1381 s = ppgtt_populate_spt_by_guest_entry(vgpu, we); in ppgtt_handle_guest_entry_add()
1390 ret = ppgtt_populate_shadow_entry(vgpu, spt, index, we); in ppgtt_handle_guest_entry_add()
1401 static int sync_oos_page(struct intel_vgpu *vgpu, in sync_oos_page() argument
1404 const struct intel_gvt_device_info *info = &vgpu->gvt->device_info; in sync_oos_page()
1405 struct intel_gvt *gvt = vgpu->gvt; in sync_oos_page()
1412 trace_oos_change(vgpu->id, "sync", oos_page->id, in sync_oos_page()
1420 ops->get_entry(oos_page->mem, &old, index, false, 0, vgpu); in sync_oos_page()
1422 spt->guest_page.gfn << PAGE_SHIFT, vgpu); in sync_oos_page()
1428 trace_oos_sync(vgpu->id, oos_page->id, in sync_oos_page()
1432 ret = ppgtt_populate_shadow_entry(vgpu, spt, index, &new); in sync_oos_page()
1436 ops->set_entry(oos_page->mem, &new, index, false, 0, vgpu); in sync_oos_page()
1444 static int detach_oos_page(struct intel_vgpu *vgpu, in detach_oos_page() argument
1447 struct intel_gvt *gvt = vgpu->gvt; in detach_oos_page()
1450 trace_oos_change(vgpu->id, "detach", oos_page->id, in detach_oos_page()
1466 struct intel_gvt *gvt = spt->vgpu->gvt; in attach_oos_page()
1469 ret = intel_gvt_read_gpa(spt->vgpu, in attach_oos_page()
1480 trace_oos_change(spt->vgpu->id, "attach", oos_page->id, in attach_oos_page()
1490 ret = intel_vgpu_enable_page_track(spt->vgpu, spt->guest_page.gfn); in ppgtt_set_guest_page_sync()
1494 trace_oos_change(spt->vgpu->id, "set page sync", oos_page->id, in ppgtt_set_guest_page_sync()
1498 return sync_oos_page(spt->vgpu, oos_page); in ppgtt_set_guest_page_sync()
1503 struct intel_gvt *gvt = spt->vgpu->gvt; in ppgtt_allocate_oos_page()
1516 ret = detach_oos_page(spt->vgpu, oos_page); in ppgtt_allocate_oos_page()
1532 trace_oos_change(spt->vgpu->id, "set page out of sync", oos_page->id, in ppgtt_set_guest_page_oos()
1535 list_add_tail(&oos_page->vm_list, &spt->vgpu->gtt.oos_page_list_head); in ppgtt_set_guest_page_oos()
1536 return intel_vgpu_disable_page_track(spt->vgpu, spt->guest_page.gfn); in ppgtt_set_guest_page_oos()
1540 * intel_vgpu_sync_oos_pages - sync all the out-of-synced shadow for vGPU
1541 * @vgpu: a vGPU
1544 * to sync all the out-of-synced shadow for vGPU
1549 int intel_vgpu_sync_oos_pages(struct intel_vgpu *vgpu) in intel_vgpu_sync_oos_pages() argument
1558 list_for_each_safe(pos, n, &vgpu->gtt.oos_page_list_head) { in intel_vgpu_sync_oos_pages()
1575 struct intel_vgpu *vgpu = spt->vgpu; in ppgtt_handle_guest_write_page_table() local
1577 const struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops; in ppgtt_handle_guest_write_page_table()
1609 vgpu->gtt.scratch_pt[type].page_mfn); in ppgtt_handle_guest_write_page_table()
1616 vgpu->gtt.scratch_pt[type].page_mfn); in ppgtt_handle_guest_write_page_table()
1620 vgpu->gtt.scratch_pt[type].page_mfn); in ppgtt_handle_guest_write_page_table()
1649 &spt->vgpu->gtt.post_shadow_list_head); in ppgtt_set_post_shadow()
1654 * @vgpu: a vGPU
1657 * to flush all the post shadows for a vGPU.
1662 int intel_vgpu_flush_post_shadow(struct intel_vgpu *vgpu) in intel_vgpu_flush_post_shadow() argument
1670 list_for_each_safe(pos, n, &vgpu->gtt.post_shadow_list_head) { in intel_vgpu_flush_post_shadow()
1693 struct intel_vgpu *vgpu = spt->vgpu; in ppgtt_handle_guest_write_page_table_bytes() local
1694 const struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops; in ppgtt_handle_guest_write_page_table_bytes()
1695 const struct intel_gvt_device_info *info = &vgpu->gvt->device_info; in ppgtt_handle_guest_write_page_table_bytes()
1728 ops->set_pfn(&se, vgpu->gtt.scratch_pt[type].page_mfn); in ppgtt_handle_guest_write_page_table_bytes()
1741 false, 0, vgpu); in ppgtt_handle_guest_write_page_table_bytes()
1756 struct intel_vgpu *vgpu = mm->vgpu; in invalidate_ppgtt_mm() local
1757 struct intel_gvt *gvt = vgpu->gvt; in invalidate_ppgtt_mm()
1772 ppgtt_invalidate_spt_by_shadow_entry(vgpu, &se); in invalidate_ppgtt_mm()
1776 trace_spt_guest_change(vgpu->id, "destroy root pointer", in invalidate_ppgtt_mm()
1786 struct intel_vgpu *vgpu = mm->vgpu; in shadow_ppgtt_mm() local
1787 struct intel_gvt *gvt = vgpu->gvt; in shadow_ppgtt_mm()
1797 if (!test_bit(INTEL_VGPU_STATUS_ATTACHED, vgpu->status)) in shadow_ppgtt_mm()
1808 trace_spt_guest_change(vgpu->id, __func__, NULL, in shadow_ppgtt_mm()
1811 spt = ppgtt_populate_spt_by_guest_entry(vgpu, &ge); in shadow_ppgtt_mm()
1820 trace_spt_guest_change(vgpu->id, "populate root pointer", in shadow_ppgtt_mm()
1830 static struct intel_vgpu_mm *vgpu_alloc_mm(struct intel_vgpu *vgpu) in vgpu_alloc_mm() argument
1838 mm->vgpu = vgpu; in vgpu_alloc_mm()
1851 * intel_vgpu_create_ppgtt_mm - create a ppgtt mm object for a vGPU
1852 * @vgpu: a vGPU
1856 * This function is used to create a ppgtt mm object for a vGPU.
1861 struct intel_vgpu_mm *intel_vgpu_create_ppgtt_mm(struct intel_vgpu *vgpu, in intel_vgpu_create_ppgtt_mm() argument
1864 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_create_ppgtt_mm()
1868 mm = vgpu_alloc_mm(vgpu); in intel_vgpu_create_ppgtt_mm()
1895 list_add_tail(&mm->ppgtt_mm.list, &vgpu->gtt.ppgtt_mm_list_head); in intel_vgpu_create_ppgtt_mm()
1904 static struct intel_vgpu_mm *intel_vgpu_create_ggtt_mm(struct intel_vgpu *vgpu) in intel_vgpu_create_ggtt_mm() argument
1909 mm = vgpu_alloc_mm(vgpu); in intel_vgpu_create_ggtt_mm()
1915 nr_entries = gvt_ggtt_gm_sz(vgpu->gvt) >> I915_GTT_PAGE_SHIFT; in intel_vgpu_create_ggtt_mm()
1918 vgpu->gvt->device_info.gtt_entry_size)); in intel_vgpu_create_ggtt_mm()
1924 mm->ggtt_mm.host_ggtt_aperture = vzalloc((vgpu_aperture_sz(vgpu) >> PAGE_SHIFT) * sizeof(u64)); in intel_vgpu_create_ggtt_mm()
1931 mm->ggtt_mm.host_ggtt_hidden = vzalloc((vgpu_hidden_sz(vgpu) >> PAGE_SHIFT) * sizeof(u64)); in intel_vgpu_create_ggtt_mm()
1946 * This function is used to destroy a mm object for vGPU
1954 gvt_err("vgpu mm pin count bug detected\n"); in _intel_vgpu_mm_release()
1959 mutex_lock(&mm->vgpu->gvt->gtt.ppgtt_mm_lock); in _intel_vgpu_mm_release()
1961 mutex_unlock(&mm->vgpu->gvt->gtt.ppgtt_mm_lock); in _intel_vgpu_mm_release()
1974 * intel_vgpu_unpin_mm - decrease the pin count of a vGPU mm object
1975 * @mm: a vGPU mm object
1977 * This function is called when user doesn't want to use a vGPU mm object
1985 * intel_vgpu_pin_mm - increase the pin count of a vGPU mm object
1986 * @mm: target vgpu mm
1988 * This function is called when user wants to use a vGPU mm object. If this
2006 mutex_lock(&mm->vgpu->gvt->gtt.ppgtt_mm_lock); in intel_vgpu_pin_mm()
2008 &mm->vgpu->gvt->gtt.ppgtt_mm_lru_list_head); in intel_vgpu_pin_mm()
2009 mutex_unlock(&mm->vgpu->gvt->gtt.ppgtt_mm_lock); in intel_vgpu_pin_mm()
2043 struct intel_vgpu *vgpu = mm->vgpu; in ppgtt_get_next_level_entry() local
2044 const struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops; in ppgtt_get_next_level_entry()
2047 s = intel_vgpu_find_spt_by_mfn(vgpu, ops->get_pfn(e)); in ppgtt_get_next_level_entry()
2071 struct intel_vgpu *vgpu = mm->vgpu; in intel_vgpu_gma_to_gpa() local
2072 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_gma_to_gpa()
2085 if (!vgpu_gmadr_is_valid(vgpu, gma)) in intel_vgpu_gma_to_gpa()
2094 trace_gma_translate(vgpu->id, "ggtt", 0, 0, gma, gpa); in intel_vgpu_gma_to_gpa()
2133 trace_gma_translate(vgpu->id, "ppgtt", 0, in intel_vgpu_gma_to_gpa()
2143 static int emulate_ggtt_mmio_read(struct intel_vgpu *vgpu, in emulate_ggtt_mmio_read() argument
2146 struct intel_vgpu_mm *ggtt_mm = vgpu->gtt.ggtt_mm; in emulate_ggtt_mmio_read()
2147 const struct intel_gvt_device_info *info = &vgpu->gvt->device_info; in emulate_ggtt_mmio_read()
2156 if (!intel_gvt_ggtt_validate_range(vgpu, in emulate_ggtt_mmio_read()
2171 * @vgpu: a vGPU
2181 int intel_vgpu_emulate_ggtt_mmio_read(struct intel_vgpu *vgpu, unsigned int off, in intel_vgpu_emulate_ggtt_mmio_read() argument
2184 const struct intel_gvt_device_info *info = &vgpu->gvt->device_info; in intel_vgpu_emulate_ggtt_mmio_read()
2191 ret = emulate_ggtt_mmio_read(vgpu, off, p_data, bytes); in intel_vgpu_emulate_ggtt_mmio_read()
2195 static void ggtt_invalidate_pte(struct intel_vgpu *vgpu, in ggtt_invalidate_pte() argument
2198 const struct intel_gvt_gtt_pte_ops *pte_ops = vgpu->gvt->gtt.pte_ops; in ggtt_invalidate_pte()
2202 if (pfn != vgpu->gvt->gtt.scratch_mfn) in ggtt_invalidate_pte()
2203 intel_gvt_dma_unmap_guest_page(vgpu, pfn << PAGE_SHIFT); in ggtt_invalidate_pte()
2206 static int emulate_ggtt_mmio_write(struct intel_vgpu *vgpu, unsigned int off, in emulate_ggtt_mmio_write() argument
2209 struct intel_gvt *gvt = vgpu->gvt; in emulate_ggtt_mmio_write()
2211 struct intel_vgpu_mm *ggtt_mm = vgpu->gtt.ggtt_mm; in emulate_ggtt_mmio_write()
2228 if (!vgpu_gmadr_is_valid(vgpu, gma)) in emulate_ggtt_mmio_write()
2286 ret = intel_gvt_dma_map_guest_page(vgpu, gfn, PAGE_SIZE, in emulate_ggtt_mmio_write()
2305 ggtt_invalidate_pte(vgpu, &e); in emulate_ggtt_mmio_write()
2314 * @vgpu: a vGPU
2324 int intel_vgpu_emulate_ggtt_mmio_write(struct intel_vgpu *vgpu, in intel_vgpu_emulate_ggtt_mmio_write() argument
2327 const struct intel_gvt_device_info *info = &vgpu->gvt->device_info; in intel_vgpu_emulate_ggtt_mmio_write()
2329 struct intel_vgpu_submission *s = &vgpu->submission; in intel_vgpu_emulate_ggtt_mmio_write()
2337 ret = emulate_ggtt_mmio_write(vgpu, off, p_data, bytes); in intel_vgpu_emulate_ggtt_mmio_write()
2343 for_each_engine(engine, vgpu->gvt->gt, i) { in intel_vgpu_emulate_ggtt_mmio_write()
2353 static int alloc_scratch_pages(struct intel_vgpu *vgpu, in alloc_scratch_pages() argument
2356 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in alloc_scratch_pages()
2357 struct intel_vgpu_gtt *gtt = &vgpu->gtt; in alloc_scratch_pages()
2358 const struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops; in alloc_scratch_pages()
2360 vgpu->gvt->device_info.gtt_entry_size_shift; in alloc_scratch_pages()
2363 struct device *dev = vgpu->gvt->gt->i915->drm.dev; in alloc_scratch_pages()
2385 gvt_dbg_mm("vgpu%d create scratch_pt: type %d mfn=0x%lx\n", in alloc_scratch_pages()
2386 vgpu->id, type, gtt->scratch_pt[type].page_mfn); in alloc_scratch_pages()
2411 ops->set_entry(scratch_pt, &se, i, false, 0, vgpu); in alloc_scratch_pages()
2417 static int release_scratch_page_tree(struct intel_vgpu *vgpu) in release_scratch_page_tree() argument
2420 struct device *dev = vgpu->gvt->gt->i915->drm.dev; in release_scratch_page_tree()
2424 if (vgpu->gtt.scratch_pt[i].page != NULL) { in release_scratch_page_tree()
2425 daddr = (dma_addr_t)(vgpu->gtt.scratch_pt[i].page_mfn << in release_scratch_page_tree()
2428 __free_page(vgpu->gtt.scratch_pt[i].page); in release_scratch_page_tree()
2429 vgpu->gtt.scratch_pt[i].page = NULL; in release_scratch_page_tree()
2430 vgpu->gtt.scratch_pt[i].page_mfn = 0; in release_scratch_page_tree()
2437 static int create_scratch_page_tree(struct intel_vgpu *vgpu) in create_scratch_page_tree() argument
2442 ret = alloc_scratch_pages(vgpu, i); in create_scratch_page_tree()
2450 release_scratch_page_tree(vgpu); in create_scratch_page_tree()
2455 * intel_vgpu_init_gtt - initialize per-vGPU graphics memory virulization
2456 * @vgpu: a vGPU
2458 * This function is used to initialize per-vGPU graphics memory virtualization
2464 int intel_vgpu_init_gtt(struct intel_vgpu *vgpu) in intel_vgpu_init_gtt() argument
2466 struct intel_vgpu_gtt *gtt = &vgpu->gtt; in intel_vgpu_init_gtt()
2474 gtt->ggtt_mm = intel_vgpu_create_ggtt_mm(vgpu); in intel_vgpu_init_gtt()
2480 intel_vgpu_reset_ggtt(vgpu, false); in intel_vgpu_init_gtt()
2484 return create_scratch_page_tree(vgpu); in intel_vgpu_init_gtt()
2487 void intel_vgpu_destroy_all_ppgtt_mm(struct intel_vgpu *vgpu) in intel_vgpu_destroy_all_ppgtt_mm() argument
2492 list_for_each_safe(pos, n, &vgpu->gtt.ppgtt_mm_list_head) { in intel_vgpu_destroy_all_ppgtt_mm()
2497 if (GEM_WARN_ON(!list_empty(&vgpu->gtt.ppgtt_mm_list_head))) in intel_vgpu_destroy_all_ppgtt_mm()
2498 gvt_err("vgpu ppgtt mm is not fully destroyed\n"); in intel_vgpu_destroy_all_ppgtt_mm()
2500 if (GEM_WARN_ON(!radix_tree_empty(&vgpu->gtt.spt_tree))) { in intel_vgpu_destroy_all_ppgtt_mm()
2502 ppgtt_free_all_spt(vgpu); in intel_vgpu_destroy_all_ppgtt_mm()
2506 static void intel_vgpu_destroy_ggtt_mm(struct intel_vgpu *vgpu) in intel_vgpu_destroy_ggtt_mm() argument
2511 &vgpu->gtt.ggtt_mm->ggtt_mm.partial_pte_list, in intel_vgpu_destroy_ggtt_mm()
2517 intel_vgpu_destroy_mm(vgpu->gtt.ggtt_mm); in intel_vgpu_destroy_ggtt_mm()
2518 vgpu->gtt.ggtt_mm = NULL; in intel_vgpu_destroy_ggtt_mm()
2522 * intel_vgpu_clean_gtt - clean up per-vGPU graphics memory virulization
2523 * @vgpu: a vGPU
2525 * This function is used to clean up per-vGPU graphics memory virtualization
2531 void intel_vgpu_clean_gtt(struct intel_vgpu *vgpu) in intel_vgpu_clean_gtt() argument
2533 intel_vgpu_destroy_all_ppgtt_mm(vgpu); in intel_vgpu_clean_gtt()
2534 intel_vgpu_destroy_ggtt_mm(vgpu); in intel_vgpu_clean_gtt()
2535 release_scratch_page_tree(vgpu); in intel_vgpu_clean_gtt()
2594 * @vgpu: a vGPU
2602 struct intel_vgpu_mm *intel_vgpu_find_ppgtt_mm(struct intel_vgpu *vgpu, in intel_vgpu_find_ppgtt_mm() argument
2608 list_for_each(pos, &vgpu->gtt.ppgtt_mm_list_head) { in intel_vgpu_find_ppgtt_mm()
2630 * @vgpu: a vGPU
2639 struct intel_vgpu_mm *intel_vgpu_get_ppgtt_mm(struct intel_vgpu *vgpu, in intel_vgpu_get_ppgtt_mm() argument
2644 mm = intel_vgpu_find_ppgtt_mm(vgpu, pdps); in intel_vgpu_get_ppgtt_mm()
2648 mm = intel_vgpu_create_ppgtt_mm(vgpu, root_entry_type, pdps); in intel_vgpu_get_ppgtt_mm()
2657 * @vgpu: a vGPU
2665 int intel_vgpu_put_ppgtt_mm(struct intel_vgpu *vgpu, u64 pdps[]) in intel_vgpu_put_ppgtt_mm() argument
2669 mm = intel_vgpu_find_ppgtt_mm(vgpu, pdps); in intel_vgpu_put_ppgtt_mm()
2755 * @vgpu: a vGPU
2757 * This function is called when invalidate all PPGTT instances of a vGPU.
2760 void intel_vgpu_invalidate_ppgtt(struct intel_vgpu *vgpu) in intel_vgpu_invalidate_ppgtt() argument
2765 list_for_each_safe(pos, n, &vgpu->gtt.ppgtt_mm_list_head) { in intel_vgpu_invalidate_ppgtt()
2768 mutex_lock(&vgpu->gvt->gtt.ppgtt_mm_lock); in intel_vgpu_invalidate_ppgtt()
2770 mutex_unlock(&vgpu->gvt->gtt.ppgtt_mm_lock); in intel_vgpu_invalidate_ppgtt()
2779 * @vgpu: a vGPU
2782 * This function is called at the vGPU create stage
2786 void intel_vgpu_reset_ggtt(struct intel_vgpu *vgpu, bool invalidate_old) in intel_vgpu_reset_ggtt() argument
2788 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_reset_ggtt()
2789 const struct intel_gvt_gtt_pte_ops *pte_ops = vgpu->gvt->gtt.pte_ops; in intel_vgpu_reset_ggtt()
2798 index = vgpu_aperture_gmadr_base(vgpu) >> PAGE_SHIFT; in intel_vgpu_reset_ggtt()
2799 num_entries = vgpu_aperture_sz(vgpu) >> PAGE_SHIFT; in intel_vgpu_reset_ggtt()
2802 ggtt_get_host_entry(vgpu->gtt.ggtt_mm, &old_entry, index); in intel_vgpu_reset_ggtt()
2803 ggtt_invalidate_pte(vgpu, &old_entry); in intel_vgpu_reset_ggtt()
2805 ggtt_set_host_entry(vgpu->gtt.ggtt_mm, &entry, index++); in intel_vgpu_reset_ggtt()
2808 index = vgpu_hidden_gmadr_base(vgpu) >> PAGE_SHIFT; in intel_vgpu_reset_ggtt()
2809 num_entries = vgpu_hidden_sz(vgpu) >> PAGE_SHIFT; in intel_vgpu_reset_ggtt()
2812 ggtt_get_host_entry(vgpu->gtt.ggtt_mm, &old_entry, index); in intel_vgpu_reset_ggtt()
2813 ggtt_invalidate_pte(vgpu, &old_entry); in intel_vgpu_reset_ggtt()
2815 ggtt_set_host_entry(vgpu->gtt.ggtt_mm, &entry, index++); in intel_vgpu_reset_ggtt()
2822 * intel_gvt_restore_ggtt - restore all vGPU's ggtt entries
2826 * GGTT entries of every vGPU.
2831 struct intel_vgpu *vgpu; in intel_gvt_restore_ggtt() local
2838 idr_for_each_entry(&(gvt)->vgpu_idr, vgpu, id) { in intel_gvt_restore_ggtt()
2839 mm = vgpu->gtt.ggtt_mm; in intel_gvt_restore_ggtt()
2841 num_low = vgpu_aperture_sz(vgpu) >> PAGE_SHIFT; in intel_gvt_restore_ggtt()
2842 offset = vgpu_aperture_gmadr_base(vgpu) >> PAGE_SHIFT; in intel_gvt_restore_ggtt()
2846 write_pte64(vgpu->gvt->gt->ggtt, offset + idx, pte); in intel_gvt_restore_ggtt()
2849 num_hi = vgpu_hidden_sz(vgpu) >> PAGE_SHIFT; in intel_gvt_restore_ggtt()
2850 offset = vgpu_hidden_gmadr_base(vgpu) >> PAGE_SHIFT; in intel_gvt_restore_ggtt()
2854 write_pte64(vgpu->gvt->gt->ggtt, offset + idx, pte); in intel_gvt_restore_ggtt()