Lines Matching full:gt
29 static long tlb_timeout_jiffies(struct xe_gt *gt) in tlb_timeout_jiffies() argument
35 long delay = xe_guc_ct_queue_proc_time_jiffies(>->uc.guc.ct); in tlb_timeout_jiffies()
42 if (WARN_ON_ONCE(!fence->gt)) in xe_gt_tlb_invalidation_fence_fini()
45 xe_pm_runtime_put(gt_to_xe(fence->gt)); in xe_gt_tlb_invalidation_fence_fini()
46 fence->gt = NULL; /* fini() should be called once */ in xe_gt_tlb_invalidation_fence_fini()
70 struct xe_gt *gt = container_of(work, struct xe_gt, in xe_gt_tlb_fence_timeout() local
72 struct xe_device *xe = gt_to_xe(gt); in xe_gt_tlb_fence_timeout()
75 LNL_FLUSH_WORK(>->uc.guc.ct.g2h_worker); in xe_gt_tlb_fence_timeout()
77 spin_lock_irq(>->tlb_invalidation.pending_lock); in xe_gt_tlb_fence_timeout()
79 >->tlb_invalidation.pending_fences, link) { in xe_gt_tlb_fence_timeout()
83 if (msecs_to_jiffies(since_inval_ms) < tlb_timeout_jiffies(gt)) in xe_gt_tlb_fence_timeout()
87 xe_gt_err(gt, "TLB invalidation fence timeout, seqno=%d recv=%d", in xe_gt_tlb_fence_timeout()
88 fence->seqno, gt->tlb_invalidation.seqno_recv); in xe_gt_tlb_fence_timeout()
93 if (!list_empty(>->tlb_invalidation.pending_fences)) in xe_gt_tlb_fence_timeout()
95 >->tlb_invalidation.fence_tdr, in xe_gt_tlb_fence_timeout()
96 tlb_timeout_jiffies(gt)); in xe_gt_tlb_fence_timeout()
97 spin_unlock_irq(>->tlb_invalidation.pending_lock); in xe_gt_tlb_fence_timeout()
101 * xe_gt_tlb_invalidation_init - Initialize GT TLB invalidation state
102 * @gt: graphics tile
104 * Initialize GT TLB invalidation state, purely software initialization, should
109 int xe_gt_tlb_invalidation_init(struct xe_gt *gt) in xe_gt_tlb_invalidation_init() argument
111 gt->tlb_invalidation.seqno = 1; in xe_gt_tlb_invalidation_init()
112 INIT_LIST_HEAD(>->tlb_invalidation.pending_fences); in xe_gt_tlb_invalidation_init()
113 spin_lock_init(>->tlb_invalidation.pending_lock); in xe_gt_tlb_invalidation_init()
114 spin_lock_init(>->tlb_invalidation.lock); in xe_gt_tlb_invalidation_init()
115 INIT_DELAYED_WORK(>->tlb_invalidation.fence_tdr, in xe_gt_tlb_invalidation_init()
122 * xe_gt_tlb_invalidation_reset - Initialize GT TLB invalidation reset
123 * @gt: graphics tile
125 * Signal any pending invalidation fences, should be called during a GT reset
127 void xe_gt_tlb_invalidation_reset(struct xe_gt *gt) in xe_gt_tlb_invalidation_reset() argument
137 mutex_lock(>->uc.guc.ct.lock); in xe_gt_tlb_invalidation_reset()
138 spin_lock_irq(>->tlb_invalidation.pending_lock); in xe_gt_tlb_invalidation_reset()
139 cancel_delayed_work(>->tlb_invalidation.fence_tdr); in xe_gt_tlb_invalidation_reset()
145 * completed a full GT reset. in xe_gt_tlb_invalidation_reset()
147 if (gt->tlb_invalidation.seqno == 1) in xe_gt_tlb_invalidation_reset()
150 pending_seqno = gt->tlb_invalidation.seqno - 1; in xe_gt_tlb_invalidation_reset()
151 WRITE_ONCE(gt->tlb_invalidation.seqno_recv, pending_seqno); in xe_gt_tlb_invalidation_reset()
154 >->tlb_invalidation.pending_fences, link) in xe_gt_tlb_invalidation_reset()
155 invalidation_fence_signal(gt_to_xe(gt), fence); in xe_gt_tlb_invalidation_reset()
156 spin_unlock_irq(>->tlb_invalidation.pending_lock); in xe_gt_tlb_invalidation_reset()
157 mutex_unlock(>->uc.guc.ct.lock); in xe_gt_tlb_invalidation_reset()
160 static bool tlb_invalidation_seqno_past(struct xe_gt *gt, int seqno) in tlb_invalidation_seqno_past() argument
162 int seqno_recv = READ_ONCE(gt->tlb_invalidation.seqno_recv); in tlb_invalidation_seqno_past()
177 struct xe_gt *gt = guc_to_gt(guc); in send_tlb_invalidation() local
178 struct xe_device *xe = gt_to_xe(gt); in send_tlb_invalidation()
182 xe_gt_assert(gt, fence); in send_tlb_invalidation()
191 seqno = gt->tlb_invalidation.seqno; in send_tlb_invalidation()
198 spin_lock_irq(>->tlb_invalidation.pending_lock); in send_tlb_invalidation()
205 if (tlb_invalidation_seqno_past(gt, seqno)) { in send_tlb_invalidation()
210 >->tlb_invalidation.pending_fences); in send_tlb_invalidation()
212 if (list_is_singular(>->tlb_invalidation.pending_fences)) in send_tlb_invalidation()
214 >->tlb_invalidation.fence_tdr, in send_tlb_invalidation()
215 tlb_timeout_jiffies(gt)); in send_tlb_invalidation()
217 spin_unlock_irq(>->tlb_invalidation.pending_lock); in send_tlb_invalidation()
222 gt->tlb_invalidation.seqno = (gt->tlb_invalidation.seqno + 1) % in send_tlb_invalidation()
224 if (!gt->tlb_invalidation.seqno) in send_tlb_invalidation()
225 gt->tlb_invalidation.seqno = 1; in send_tlb_invalidation()
228 xe_gt_stats_incr(gt, XE_GT_STATS_ID_TLB_INVAL, 1); in send_tlb_invalidation()
238 * xe_gt_tlb_invalidation_guc - Issue a TLB invalidation on this GT for the GuC
239 * @gt: graphics tile
248 static int xe_gt_tlb_invalidation_guc(struct xe_gt *gt, in xe_gt_tlb_invalidation_guc() argument
257 return send_tlb_invalidation(>->uc.guc, fence, action, in xe_gt_tlb_invalidation_guc()
262 * xe_gt_tlb_invalidation_ggtt - Issue a TLB invalidation on this GT for the GGTT
263 * @gt: graphics tile
270 int xe_gt_tlb_invalidation_ggtt(struct xe_gt *gt) in xe_gt_tlb_invalidation_ggtt() argument
272 struct xe_device *xe = gt_to_xe(gt); in xe_gt_tlb_invalidation_ggtt()
274 if (xe_guc_ct_enabled(>->uc.guc.ct) && in xe_gt_tlb_invalidation_ggtt()
275 gt->uc.guc.submission_state.enabled) { in xe_gt_tlb_invalidation_ggtt()
279 xe_gt_tlb_invalidation_fence_init(gt, &fence, true); in xe_gt_tlb_invalidation_ggtt()
280 ret = xe_gt_tlb_invalidation_guc(gt, &fence); in xe_gt_tlb_invalidation_ggtt()
289 xe_gt_WARN_ON(gt, xe_force_wake_get(gt_to_fw(gt), XE_FW_GT)); in xe_gt_tlb_invalidation_ggtt()
291 xe_mmio_write32(gt, PVC_GUC_TLB_INV_DESC1, in xe_gt_tlb_invalidation_ggtt()
293 xe_mmio_write32(gt, PVC_GUC_TLB_INV_DESC0, in xe_gt_tlb_invalidation_ggtt()
296 xe_mmio_write32(gt, GUC_TLB_INV_CR, in xe_gt_tlb_invalidation_ggtt()
299 xe_force_wake_put(gt_to_fw(gt), XE_FW_GT); in xe_gt_tlb_invalidation_ggtt()
306 * xe_gt_tlb_invalidation_range - Issue a TLB invalidation on this GT for an
309 * @gt: graphics tile
322 int xe_gt_tlb_invalidation_range(struct xe_gt *gt, in xe_gt_tlb_invalidation_range() argument
326 struct xe_device *xe = gt_to_xe(gt); in xe_gt_tlb_invalidation_range()
331 xe_gt_assert(gt, fence); in xe_gt_tlb_invalidation_range()
334 if (gt_to_xe(gt)->info.force_execlist) { in xe_gt_tlb_invalidation_range()
375 xe_gt_assert(gt, length >= SZ_4K); in xe_gt_tlb_invalidation_range()
376 xe_gt_assert(gt, is_power_of_2(length)); in xe_gt_tlb_invalidation_range()
377 xe_gt_assert(gt, !(length & GENMASK(ilog2(SZ_16M) - 1, in xe_gt_tlb_invalidation_range()
379 xe_gt_assert(gt, IS_ALIGNED(start, length)); in xe_gt_tlb_invalidation_range()
388 xe_gt_assert(gt, len <= MAX_TLB_INVALIDATION_LEN); in xe_gt_tlb_invalidation_range()
390 return send_tlb_invalidation(>->uc.guc, fence, action, len); in xe_gt_tlb_invalidation_range()
394 * xe_gt_tlb_invalidation_vma - Issue a TLB invalidation on this GT for a VMA
395 * @gt: graphics tile
406 int xe_gt_tlb_invalidation_vma(struct xe_gt *gt, in xe_gt_tlb_invalidation_vma() argument
410 xe_gt_assert(gt, vma); in xe_gt_tlb_invalidation_vma()
412 return xe_gt_tlb_invalidation_range(gt, fence, xe_vma_start(vma), in xe_gt_tlb_invalidation_vma()
431 struct xe_gt *gt = guc_to_gt(guc); in xe_guc_tlb_invalidation_done_handler() local
432 struct xe_device *xe = gt_to_xe(gt); in xe_guc_tlb_invalidation_done_handler()
454 spin_lock_irqsave(>->tlb_invalidation.pending_lock, flags); in xe_guc_tlb_invalidation_done_handler()
455 if (tlb_invalidation_seqno_past(gt, msg[0])) { in xe_guc_tlb_invalidation_done_handler()
456 spin_unlock_irqrestore(>->tlb_invalidation.pending_lock, flags); in xe_guc_tlb_invalidation_done_handler()
460 WRITE_ONCE(gt->tlb_invalidation.seqno_recv, msg[0]); in xe_guc_tlb_invalidation_done_handler()
463 >->tlb_invalidation.pending_fences, link) { in xe_guc_tlb_invalidation_done_handler()
466 if (!tlb_invalidation_seqno_past(gt, fence->seqno)) in xe_guc_tlb_invalidation_done_handler()
472 if (!list_empty(>->tlb_invalidation.pending_fences)) in xe_guc_tlb_invalidation_done_handler()
474 >->tlb_invalidation.fence_tdr, in xe_guc_tlb_invalidation_done_handler()
475 tlb_timeout_jiffies(gt)); in xe_guc_tlb_invalidation_done_handler()
477 cancel_delayed_work(>->tlb_invalidation.fence_tdr); in xe_guc_tlb_invalidation_done_handler()
479 spin_unlock_irqrestore(>->tlb_invalidation.pending_lock, flags); in xe_guc_tlb_invalidation_done_handler()
503 * @gt: GT
511 void xe_gt_tlb_invalidation_fence_init(struct xe_gt *gt, in xe_gt_tlb_invalidation_fence_init() argument
515 xe_pm_runtime_get_noresume(gt_to_xe(gt)); in xe_gt_tlb_invalidation_fence_init()
517 spin_lock_irq(>->tlb_invalidation.lock); in xe_gt_tlb_invalidation_fence_init()
519 >->tlb_invalidation.lock, in xe_gt_tlb_invalidation_fence_init()
521 spin_unlock_irq(>->tlb_invalidation.lock); in xe_gt_tlb_invalidation_fence_init()
527 fence->gt = gt; in xe_gt_tlb_invalidation_fence_init()