Lines Matching refs:tl

27 static struct page *hwsp_page(struct intel_timeline *tl)
29 struct drm_i915_gem_object *obj = tl->hwsp_ggtt->obj;
35 static unsigned long hwsp_cacheline(struct intel_timeline *tl)
37 unsigned long address = (unsigned long)page_address(hwsp_page(tl));
39 return (address + offset_in_page(tl->hwsp_offset)) / TIMELINE_SEQNO_BYTES;
42 static int selftest_tl_pin(struct intel_timeline *tl)
49 err = i915_gem_object_lock(tl->hwsp_ggtt->obj, &ww);
51 err = intel_timeline_pin(tl, &ww);
79 struct intel_timeline *tl)
81 tl = xchg(&state->history[idx], tl);
82 if (tl) {
83 radix_tree_delete(&state->cachelines, hwsp_cacheline(tl));
84 intel_timeline_unpin(tl);
85 intel_timeline_put(tl);
93 struct intel_timeline *tl;
100 tl = intel_timeline_create(state->gt);
101 if (IS_ERR(tl))
102 return PTR_ERR(tl);
104 err = selftest_tl_pin(tl);
106 intel_timeline_put(tl);
110 cacheline = hwsp_cacheline(tl);
111 err = radix_tree_insert(&state->cachelines, cacheline, tl);
117 intel_timeline_unpin(tl);
118 intel_timeline_put(tl);
123 __mock_hwsp_record(state, idx, tl);
204 static int __igt_sync(struct intel_timeline *tl,
211 if (__intel_timeline_sync_is_later(tl, ctx, p->seqno) != p->expected) {
218 ret = __intel_timeline_sync_set(tl, ctx, p->seqno);
246 struct intel_timeline tl;
250 mock_timeline_init(&tl, 0);
256 ret = __igt_sync(&tl, ctx, p, "1");
262 mock_timeline_fini(&tl);
264 mock_timeline_init(&tl, 0);
270 ret = __igt_sync(&tl, ctx, p, "2");
278 mock_timeline_fini(&tl);
290 struct intel_timeline tl;
296 mock_timeline_init(&tl, 0);
328 __intel_timeline_sync_set(&tl, id, 0);
343 if (!__intel_timeline_sync_is_later(&tl, id, 0)) {
344 mock_timeline_fini(&tl);
354 mock_timeline_fini(&tl);
357 mock_timeline_init(&tl, 0);
364 __intel_timeline_sync_set(&tl, count++, 0);
374 if (!__intel_timeline_sync_is_later(&tl, end_time, 0)) {
376 mock_timeline_fini(&tl);
384 mock_timeline_fini(&tl);
387 mock_timeline_init(&tl, 0);
398 if (!__intel_timeline_sync_is_later(&tl, id, seqno))
399 __intel_timeline_sync_set(&tl, id, seqno);
407 mock_timeline_fini(&tl);
415 mock_timeline_init(&tl, 0);
427 __intel_timeline_sync_is_later(&tl, id, 0);
428 __intel_timeline_sync_set(&tl, id, 0);
436 mock_timeline_fini(&tl);
485 checked_tl_write(struct intel_timeline *tl, struct intel_engine_cs *engine, u32 value)
490 err = selftest_tl_pin(tl);
496 if (READ_ONCE(*tl->hwsp_seqno) != tl->seqno) {
498 *tl->hwsp_seqno, tl->seqno);
499 intel_timeline_unpin(tl);
509 err = emit_ggtt_store_dw(rq, tl->hwsp_offset, value);
517 intel_timeline_unpin(tl);
553 struct intel_timeline *tl;
556 tl = intel_timeline_create(gt);
557 if (IS_ERR(tl)) {
558 err = PTR_ERR(tl);
562 rq = checked_tl_write(tl, engine, count);
564 intel_timeline_put(tl);
569 timelines[count++] = tl;
582 struct intel_timeline *tl = timelines[n];
584 if (!err && READ_ONCE(*tl->hwsp_seqno) != n) {
586 n, tl->fence_context, tl->hwsp_offset, *tl->hwsp_seqno);
590 intel_timeline_put(tl);
623 struct intel_timeline *tl;
629 tl = intel_timeline_create(gt);
630 if (IS_ERR(tl)) {
631 err = PTR_ERR(tl);
636 rq = checked_tl_write(tl, engine, count);
639 intel_timeline_put(tl);
644 timelines[count++] = tl;
654 struct intel_timeline *tl = timelines[n];
656 if (!err && READ_ONCE(*tl->hwsp_seqno) != n) {
658 n, tl->fence_context, tl->hwsp_offset, *tl->hwsp_seqno);
662 intel_timeline_put(tl);
674 struct intel_timeline *tl;
683 tl = intel_timeline_create(gt);
684 if (IS_ERR(tl))
685 return PTR_ERR(tl);
687 if (!tl->has_initial_breadcrumb)
690 err = selftest_tl_pin(tl);
708 tl->seqno = -4u;
710 mutex_lock_nested(&tl->mutex, SINGLE_DEPTH_NESTING);
711 err = intel_timeline_get_seqno(tl, rq, &seqno[0]);
712 mutex_unlock(&tl->mutex);
718 seqno[0], tl->hwsp_offset);
720 err = emit_ggtt_store_dw(rq, tl->hwsp_offset, seqno[0]);
725 hwsp_seqno[0] = tl->hwsp_seqno;
727 mutex_lock_nested(&tl->mutex, SINGLE_DEPTH_NESTING);
728 err = intel_timeline_get_seqno(tl, rq, &seqno[1]);
729 mutex_unlock(&tl->mutex);
735 seqno[1], tl->hwsp_offset);
737 err = emit_ggtt_store_dw(rq, tl->hwsp_offset, seqno[1]);
742 hwsp_seqno[1] = tl->hwsp_seqno;
772 intel_timeline_unpin(tl);
774 intel_timeline_put(tl);
829 struct intel_timeline *tl)
839 i915_gem_object_set_pat_index(obj, tl->hwsp_ggtt->obj->pat_index);
841 page_unmask_bits(tl->hwsp_ggtt->obj->mm.mapping));
943 static bool retire_requests(struct intel_timeline *tl)
947 mutex_lock(&tl->mutex);
948 list_for_each_entry_safe(rq, rn, &tl->requests, link)
951 mutex_unlock(&tl->mutex);
953 return !i915_active_fence_isset(&tl->last_request);
959 struct intel_timeline *tl = ce->timeline;
962 while (tl->seqno >= seqno) { /* Cause a wrap */
988 struct intel_timeline *tl;
1004 tl = intel_timeline_create(gt);
1005 if (IS_ERR(tl))
1006 return PTR_ERR(tl);
1008 if (!tl->has_initial_breadcrumb)
1011 selftest_tl_pin(tl);
1014 err = setup_watcher(&watcher[i], gt, tl);
1050 ce->timeline = intel_timeline_get(tl);
1063 tl->seqno = -12u + 2 * (count & 3);
1064 __intel_timeline_get_seqno(tl, &dummy);
1148 retire_requests(tl);
1169 intel_timeline_unpin(tl);
1175 intel_timeline_put(tl);
1193 struct intel_timeline *tl = ce->timeline;
1203 GEM_BUG_ON(i915_active_fence_isset(&tl->last_request));
1204 tl->seqno = -2u;
1205 WRITE_ONCE(*(u32 *)tl->hwsp_seqno, tl->seqno);
1220 GEM_BUG_ON(rcu_access_pointer(this->timeline) != tl);
1271 struct intel_timeline *tl;
1283 tl = ce->timeline;
1284 if (!tl->has_initial_breadcrumb)
1291 tl->seqno = -4u;
1292 WRITE_ONCE(*(u32 *)tl->hwsp_seqno, tl->seqno);
1307 GEM_BUG_ON(rcu_access_pointer(this->timeline) != tl);
1369 struct intel_timeline *tl;
1372 tl = intel_timeline_create(gt);
1373 if (IS_ERR(tl)) {
1374 err = PTR_ERR(tl);
1378 rq = checked_tl_write(tl, engine, count);
1380 intel_timeline_put(tl);
1388 intel_timeline_put(tl);
1393 if (READ_ONCE(*tl->hwsp_seqno) != count) {
1395 count, tl->fence_context,
1396 tl->hwsp_offset, *tl->hwsp_seqno);
1402 intel_timeline_put(tl);