Lines Matching full:tl
40 struct intel_timeline *tl = in __timeline_retire() local
41 container_of(active, typeof(*tl), active); in __timeline_retire()
43 i915_vma_unpin(tl->hwsp_ggtt); in __timeline_retire()
44 intel_timeline_put(tl); in __timeline_retire()
49 struct intel_timeline *tl = in __timeline_active() local
50 container_of(active, typeof(*tl), active); in __timeline_active()
52 __i915_vma_pin(tl->hwsp_ggtt); in __timeline_active()
53 intel_timeline_get(tl); in __timeline_active()
171 struct intel_timeline *tl; in intel_timeline_create_from_engine() local
173 tl = __intel_timeline_create(engine->gt, hwsp, offset); in intel_timeline_create_from_engine()
174 if (IS_ERR(tl)) in intel_timeline_create_from_engine()
175 return tl; in intel_timeline_create_from_engine()
179 list_add_tail(&tl->engine_link, &engine->status_page.timelines); in intel_timeline_create_from_engine()
182 return tl; in intel_timeline_create_from_engine()
185 void __intel_timeline_pin(struct intel_timeline *tl) in __intel_timeline_pin() argument
187 GEM_BUG_ON(!atomic_read(&tl->pin_count)); in __intel_timeline_pin()
188 atomic_inc(&tl->pin_count); in __intel_timeline_pin()
191 int intel_timeline_pin(struct intel_timeline *tl, struct i915_gem_ww_ctx *ww) in intel_timeline_pin() argument
195 if (atomic_add_unless(&tl->pin_count, 1, 0)) in intel_timeline_pin()
198 if (!tl->hwsp_map) { in intel_timeline_pin()
199 err = intel_timeline_pin_map(tl); in intel_timeline_pin()
204 err = i915_ggtt_pin(tl->hwsp_ggtt, ww, 0, PIN_HIGH); in intel_timeline_pin()
208 tl->hwsp_offset = in intel_timeline_pin()
209 i915_ggtt_offset(tl->hwsp_ggtt) + in intel_timeline_pin()
210 offset_in_page(tl->hwsp_offset); in intel_timeline_pin()
211 GT_TRACE(tl->gt, "timeline:%llx using HWSP offset:%x\n", in intel_timeline_pin()
212 tl->fence_context, tl->hwsp_offset); in intel_timeline_pin()
214 i915_active_acquire(&tl->active); in intel_timeline_pin()
215 if (atomic_fetch_inc(&tl->pin_count)) { in intel_timeline_pin()
216 i915_active_release(&tl->active); in intel_timeline_pin()
217 __i915_vma_unpin(tl->hwsp_ggtt); in intel_timeline_pin()
223 void intel_timeline_reset_seqno(const struct intel_timeline *tl) in intel_timeline_reset_seqno() argument
225 u32 *hwsp_seqno = (u32 *)tl->hwsp_seqno; in intel_timeline_reset_seqno()
227 GEM_BUG_ON(!atomic_read(&tl->pin_count)); in intel_timeline_reset_seqno()
230 WRITE_ONCE(*hwsp_seqno, tl->seqno); in intel_timeline_reset_seqno()
234 void intel_timeline_enter(struct intel_timeline *tl) in intel_timeline_enter() argument
236 struct intel_gt_timelines *timelines = &tl->gt->timelines; in intel_timeline_enter()
247 * The rule is generally tl->mutex, otherwise engine->wakeref.mutex. in intel_timeline_enter()
251 * barrier, and there we use the tl->active_count as a means to in intel_timeline_enter()
254 * use atomic to manipulate tl->active_count. in intel_timeline_enter()
256 lockdep_assert_held(&tl->mutex); in intel_timeline_enter()
258 if (atomic_add_unless(&tl->active_count, 1, 0)) in intel_timeline_enter()
262 if (!atomic_fetch_inc(&tl->active_count)) { in intel_timeline_enter()
269 intel_timeline_reset_seqno(tl); in intel_timeline_enter()
270 list_add_tail(&tl->link, &timelines->active_list); in intel_timeline_enter()
275 void intel_timeline_exit(struct intel_timeline *tl) in intel_timeline_exit() argument
277 struct intel_gt_timelines *timelines = &tl->gt->timelines; in intel_timeline_exit()
280 lockdep_assert_held(&tl->mutex); in intel_timeline_exit()
282 GEM_BUG_ON(!atomic_read(&tl->active_count)); in intel_timeline_exit()
283 if (atomic_add_unless(&tl->active_count, -1, 1)) in intel_timeline_exit()
287 if (atomic_dec_and_test(&tl->active_count)) in intel_timeline_exit()
288 list_del(&tl->link); in intel_timeline_exit()
296 i915_syncmap_free(&tl->sync); in intel_timeline_exit()
299 static u32 timeline_advance(struct intel_timeline *tl) in timeline_advance() argument
301 GEM_BUG_ON(!atomic_read(&tl->pin_count)); in timeline_advance()
302 GEM_BUG_ON(tl->seqno & tl->has_initial_breadcrumb); in timeline_advance()
304 return tl->seqno += 1 + tl->has_initial_breadcrumb; in timeline_advance()
308 __intel_timeline_get_seqno(struct intel_timeline *tl, in __intel_timeline_get_seqno() argument
311 u32 next_ofs = offset_in_page(tl->hwsp_offset + TIMELINE_SEQNO_BYTES); in __intel_timeline_get_seqno()
317 tl->hwsp_offset = i915_ggtt_offset(tl->hwsp_ggtt) + next_ofs; in __intel_timeline_get_seqno()
318 tl->hwsp_seqno = tl->hwsp_map + next_ofs; in __intel_timeline_get_seqno()
319 intel_timeline_reset_seqno(tl); in __intel_timeline_get_seqno()
321 *seqno = timeline_advance(tl); in __intel_timeline_get_seqno()
322 GEM_BUG_ON(i915_seqno_passed(*tl->hwsp_seqno, *seqno)); in __intel_timeline_get_seqno()
326 int intel_timeline_get_seqno(struct intel_timeline *tl, in intel_timeline_get_seqno() argument
330 *seqno = timeline_advance(tl); in intel_timeline_get_seqno()
333 if (unlikely(!*seqno && tl->has_initial_breadcrumb)) in intel_timeline_get_seqno()
334 return __intel_timeline_get_seqno(tl, seqno); in intel_timeline_get_seqno()
343 struct intel_timeline *tl; in intel_timeline_read_hwsp() local
347 tl = rcu_dereference(from->timeline); in intel_timeline_read_hwsp()
349 !i915_active_acquire_if_busy(&tl->active)) in intel_timeline_read_hwsp()
350 tl = NULL; in intel_timeline_read_hwsp()
352 if (tl) { in intel_timeline_read_hwsp()
354 *hwsp = i915_ggtt_offset(tl->hwsp_ggtt) + in intel_timeline_read_hwsp()
359 if (tl && __i915_request_is_complete(from)) { in intel_timeline_read_hwsp()
360 i915_active_release(&tl->active); in intel_timeline_read_hwsp()
361 tl = NULL; in intel_timeline_read_hwsp()
365 if (!tl) in intel_timeline_read_hwsp()
369 if (!tl->has_initial_breadcrumb) { in intel_timeline_read_hwsp()
374 err = i915_active_add_request(&tl->active, to); in intel_timeline_read_hwsp()
377 i915_active_release(&tl->active); in intel_timeline_read_hwsp()
381 void intel_timeline_unpin(struct intel_timeline *tl) in intel_timeline_unpin() argument
383 GEM_BUG_ON(!atomic_read(&tl->pin_count)); in intel_timeline_unpin()
384 if (!atomic_dec_and_test(&tl->pin_count)) in intel_timeline_unpin()
387 i915_active_release(&tl->active); in intel_timeline_unpin()
388 __i915_vma_unpin(tl->hwsp_ggtt); in intel_timeline_unpin()
418 struct intel_timeline *tl, *tn; in intel_gt_show_timelines() local
422 list_for_each_entry_safe(tl, tn, &timelines->active_list, link) { in intel_gt_show_timelines()
427 if (!mutex_trylock(&tl->mutex)) { in intel_gt_show_timelines()
429 tl->fence_context); in intel_gt_show_timelines()
433 intel_timeline_get(tl); in intel_gt_show_timelines()
434 GEM_BUG_ON(!atomic_read(&tl->active_count)); in intel_gt_show_timelines()
435 atomic_inc(&tl->active_count); /* pin the list element */ in intel_gt_show_timelines()
441 list_for_each_entry_safe(rq, rn, &tl->requests, link) { in intel_gt_show_timelines()
452 drm_printf(m, "Timeline %llx: { ", tl->fence_context); in intel_gt_show_timelines()
456 *tl->hwsp_seqno, tl->seqno); in intel_gt_show_timelines()
457 fence = i915_active_fence_get(&tl->last_request); in intel_gt_show_timelines()
466 list_for_each_entry_safe(rq, rn, &tl->requests, link) in intel_gt_show_timelines()
470 mutex_unlock(&tl->mutex); in intel_gt_show_timelines()
474 list_safe_reset_next(tl, tn, link); in intel_gt_show_timelines()
475 if (atomic_dec_and_test(&tl->active_count)) in intel_gt_show_timelines()
476 list_del(&tl->link); in intel_gt_show_timelines()
479 if (refcount_dec_and_test(&tl->kref.refcount)) { in intel_gt_show_timelines()
480 GEM_BUG_ON(atomic_read(&tl->active_count)); in intel_gt_show_timelines()
481 list_add(&tl->link, &free); in intel_gt_show_timelines()
486 list_for_each_entry_safe(tl, tn, &free, link) in intel_gt_show_timelines()
487 __intel_timeline_free(&tl->kref); in intel_gt_show_timelines()