Lines Matching +full:0 +full:xe

15 static bool sanity_fence_failed(struct xe_device *xe, struct dma_fence *fence,  in sanity_fence_failed()  argument
29 if (ret <= 0) { in sanity_fence_failed()
37 static int run_sanity_job(struct xe_migrate *m, struct xe_device *xe, in run_sanity_job() argument
41 u64 batch_base = xe_migrate_batch_base(m, xe->info.has_usm); in run_sanity_job()
57 if (sanity_fence_failed(xe, fence, str, test)) in run_sanity_job()
62 return 0; in run_sanity_job()
70 } } while (0)
75 struct xe_device *xe = tile_to_xe(m->tile); in test_copy() local
76 u64 retval, expected = 0; in test_copy()
82 struct xe_bo *remote = xe_bo_create_locked(xe, m->tile, NULL, in test_copy()
107 xe_map_memset(xe, &remote->vmap, 0, 0xd0, remote->size); in test_copy()
110 if (!sanity_fence_failed(xe, fence, big ? "Clearing remote big bo" : in test_copy()
112 retval = xe_map_rd(xe, &remote->vmap, 0, u64); in test_copy()
115 retval = xe_map_rd(xe, &remote->vmap, remote->size - 8, u64); in test_copy()
121 /* Try to copy 0xc0 from remote to vram with 2MB or 64KiB/4KiB pages */ in test_copy()
122 xe_map_memset(xe, &remote->vmap, 0, 0xc0, remote->size); in test_copy()
123 xe_map_memset(xe, &bo->vmap, 0, 0xd0, bo->size); in test_copy()
125 expected = 0xc0c0c0c0c0c0c0c0; in test_copy()
128 if (!sanity_fence_failed(xe, fence, big ? "Copying big bo remote -> vram" : in test_copy()
130 retval = xe_map_rd(xe, &bo->vmap, 0, u64); in test_copy()
133 retval = xe_map_rd(xe, &bo->vmap, bo->size - 8, u64); in test_copy()
140 xe_map_memset(xe, &remote->vmap, 0, 0xd0, remote->size); in test_copy()
141 xe_map_memset(xe, &bo->vmap, 0, 0xc0, bo->size); in test_copy()
145 if (!sanity_fence_failed(xe, fence, big ? "Copying big bo vram -> remote" : in test_copy()
147 retval = xe_map_rd(xe, &remote->vmap, 0, u64); in test_copy()
150 retval = xe_map_rd(xe, &remote->vmap, bo->size - 8, u64); in test_copy()
186 struct xe_device *xe = tile_to_xe(tile); in xe_migrate_sanity_test() local
202 big = xe_bo_create_pin_map(xe, tile, m->q->vm, SZ_4M, in xe_migrate_sanity_test()
211 pt = xe_bo_create_pin_map(xe, tile, m->q->vm, XE_PAGE_SIZE, in xe_migrate_sanity_test()
221 tiny = xe_bo_create_pin_map(xe, tile, m->q->vm, in xe_migrate_sanity_test()
232 bb = xe_bb_new(tile->primary_gt, 32, xe->info.has_usm); in xe_migrate_sanity_test()
244 xe_map_wr(xe, &bo->vmap, XE_PAGE_SIZE * (NUM_KERNEL_PDE - 1), u64, in xe_migrate_sanity_test()
245 0xdeaddeadbeefbeef); in xe_migrate_sanity_test()
246 expected = m->q->vm->pt_ops->pte_encode_bo(pt, 0, xe->pat.idx[XE_CACHE_WB], 0); in xe_migrate_sanity_test()
250 xe_res_first(pt->ttm.resource, 0, pt->size, &src_it); in xe_migrate_sanity_test()
252 xe_res_first_sg(xe_bo_sg(pt), 0, pt->size, &src_it); in xe_migrate_sanity_test()
257 run_sanity_job(m, xe, bb, bb->len, "Writing PTE for our fake PT", test); in xe_migrate_sanity_test()
259 retval = xe_map_rd(xe, &bo->vmap, XE_PAGE_SIZE * (NUM_KERNEL_PDE - 1), in xe_migrate_sanity_test()
264 bb->len = 0; in xe_migrate_sanity_test()
266 xe_map_wr(xe, &pt->vmap, 0, u32, 0xdeaddead); in xe_migrate_sanity_test()
267 expected = 0; in xe_migrate_sanity_test()
269 emit_clear(tile->primary_gt, bb, xe_migrate_vm_addr(NUM_KERNEL_PDE - 1, 0), 4, 4, in xe_migrate_sanity_test()
270 IS_DGFX(xe)); in xe_migrate_sanity_test()
271 run_sanity_job(m, xe, bb, 1, "Writing to our newly mapped pagetable", in xe_migrate_sanity_test()
274 retval = xe_map_rd(xe, &pt->vmap, 0, u32); in xe_migrate_sanity_test()
281 xe_map_memset(xe, &tiny->vmap, 0, 0x22, tiny->size); in xe_migrate_sanity_test()
282 expected = 0; in xe_migrate_sanity_test()
285 if (sanity_fence_failed(xe, fence, "Clearing small bo", test)) in xe_migrate_sanity_test()
289 retval = xe_map_rd(xe, &tiny->vmap, 0, u32); in xe_migrate_sanity_test()
291 retval = xe_map_rd(xe, &tiny->vmap, tiny->size - 4, u32); in xe_migrate_sanity_test()
296 if (xe->info.tile_count > 1) { in xe_migrate_sanity_test()
303 xe_map_memset(xe, &big->vmap, 0, 0x11, big->size); in xe_migrate_sanity_test()
304 expected = 0; in xe_migrate_sanity_test()
307 if (sanity_fence_failed(xe, fence, "Clearing big bo", test)) in xe_migrate_sanity_test()
311 retval = xe_map_rd(xe, &big->vmap, 0, u32); in xe_migrate_sanity_test()
313 retval = xe_map_rd(xe, &big->vmap, big->size - 4, u32); in xe_migrate_sanity_test()
318 if (xe->info.tile_count > 1) { in xe_migrate_sanity_test()
338 static int migrate_test_run_device(struct xe_device *xe) in migrate_test_run_device() argument
344 xe_pm_runtime_get(xe); in migrate_test_run_device()
346 for_each_tile(tile, xe, id) { in migrate_test_run_device()
355 xe_pm_runtime_put(xe); in migrate_test_run_device()
357 return 0; in migrate_test_run_device()
362 struct xe_device *xe = test->priv; in xe_migrate_sanity_kunit() local
364 migrate_test_run_device(xe); in xe_migrate_sanity_kunit()
373 struct xe_device *xe = gt_to_xe(gt); in blt_copy() local
386 xe_res_first_sg(xe_bo_sg(src_bo), 0, size, &src_it); in blt_copy()
388 xe_res_first(src, 0, size, &src_it); in blt_copy()
391 xe_res_first_sg(xe_bo_sg(dst_bo), 0, size, &dst_it); in blt_copy()
393 xe_res_first(dst, 0, size, &dst_it); in blt_copy()
399 u32 flush_flags = 0; in blt_copy()
401 u32 avail_pts = max_mem_transfer_per_pass(xe) / LEVEL0_PAGE_TABLE_ENCODE_SIZE; in blt_copy()
410 PTE_UPDATE_FLAG_IS_COMP_PTE) : 0; in blt_copy()
412 &src_L0_ofs, &src_L0_pt, 0, 0, in blt_copy()
416 PTE_UPDATE_FLAG_IS_COMP_PTE) : 0; in blt_copy()
418 &dst_L0_ofs, &dst_L0_pt, 0, in blt_copy()
422 batch_size += ((copy_only_ccs) ? 0 : EMIT_COPY_DW) + in blt_copy()
423 ((xe_device_has_flat_ccs(xe) && copy_only_ccs) ? EMIT_COPY_CCS_DW : 0); in blt_copy()
425 bb = xe_bb_new(gt, batch_size, xe->info.has_usm); in blt_copy()
455 xe_migrate_batch_base(m, xe->info.has_usm), in blt_copy()
493 static void test_migrate(struct xe_device *xe, struct xe_tile *tile, in test_migrate() argument
502 expected = 0xd0d0d0d0d0d0d0d0; in test_migrate()
503 xe_map_memset(xe, &sys_bo->vmap, 0, 0xd0, sys_bo->size); in test_migrate()
506 if (!sanity_fence_failed(xe, fence, "Blit copy from sysmem to vram", test)) { in test_migrate()
507 retval = xe_map_rd(xe, &vram_bo->vmap, 0, u64); in test_migrate()
526 retval = xe_map_rd(xe, &vram_bo->vmap, 0, u64); in test_migrate()
528 retval = xe_map_rd(xe, &vram_bo->vmap, vram_bo->size - 8, u64); in test_migrate()
533 if (!sanity_fence_failed(xe, fence, "Clear ccs buffer data", test)) { in test_migrate()
534 retval = xe_map_rd(xe, &ccs_bo->vmap, 0, u64); in test_migrate()
535 check(retval, 0, "Clear ccs data first value", test); in test_migrate()
537 retval = xe_map_rd(xe, &ccs_bo->vmap, ccs_bo->size - 8, u64); in test_migrate()
538 check(retval, 0, "Clear ccs data last value", test); in test_migrate()
554 if (timeout <= 0) { in test_migrate()
565 retval = xe_map_rd(xe, &vram_bo->vmap, 0, u64); in test_migrate()
567 retval = xe_map_rd(xe, &vram_bo->vmap, vram_bo->size - 8, u64); in test_migrate()
572 if (!sanity_fence_failed(xe, fence, "Clear ccs buffer data", test)) { in test_migrate()
573 retval = xe_map_rd(xe, &ccs_bo->vmap, 0, u64); in test_migrate()
574 check(retval, 0, "Clear ccs data first value", test); in test_migrate()
575 retval = xe_map_rd(xe, &ccs_bo->vmap, ccs_bo->size - 8, u64); in test_migrate()
576 check(retval, 0, "Clear ccs data last value", test); in test_migrate()
581 static void test_clear(struct xe_device *xe, struct xe_tile *tile, in test_clear() argument
587 expected = 0xd0d0d0d0d0d0d0d0; in test_clear()
588 xe_map_memset(xe, &sys_bo->vmap, 0, 0xd0, sys_bo->size); in test_clear()
591 if (!sanity_fence_failed(xe, fence, "Blit copy from sysmem to vram", test)) { in test_clear()
592 retval = xe_map_rd(xe, &vram_bo->vmap, 0, u64); in test_clear()
599 if (!sanity_fence_failed(xe, fence, "Blit copy from vram to sysmem", test)) { in test_clear()
600 retval = xe_map_rd(xe, &sys_bo->vmap, 0, u64); in test_clear()
602 retval = xe_map_rd(xe, &sys_bo->vmap, sys_bo->size - 8, u64); in test_clear()
608 expected = 0x0000000000000000; in test_clear()
611 if (sanity_fence_failed(xe, fence, "Clear vram_bo", test)) in test_clear()
617 if (!sanity_fence_failed(xe, fence, "Clear main buffer data", test)) { in test_clear()
618 retval = xe_map_rd(xe, &sys_bo->vmap, 0, u64); in test_clear()
620 retval = xe_map_rd(xe, &sys_bo->vmap, sys_bo->size - 8, u64); in test_clear()
627 if (!sanity_fence_failed(xe, fence, "Clear ccs buffer data", test)) { in test_clear()
628 retval = xe_map_rd(xe, &sys_bo->vmap, 0, u64); in test_clear()
630 retval = xe_map_rd(xe, &sys_bo->vmap, sys_bo->size - 8, u64); in test_clear()
636 static void validate_ccs_test_run_tile(struct xe_device *xe, struct xe_tile *tile, in validate_ccs_test_run_tile() argument
643 sys_bo = xe_bo_create_user(xe, NULL, NULL, SZ_4M, in validate_ccs_test_run_tile()
667 ccs_bo = xe_bo_create_user(xe, NULL, NULL, SZ_4M, in validate_ccs_test_run_tile()
691 vram_bo = xe_bo_create_user(xe, NULL, NULL, SZ_4M, in validate_ccs_test_run_tile()
713 test_clear(xe, tile, sys_bo, vram_bo, test); in validate_ccs_test_run_tile()
714 test_migrate(xe, tile, sys_bo, vram_bo, ccs_bo, test); in validate_ccs_test_run_tile()
736 static int validate_ccs_test_run_device(struct xe_device *xe) in validate_ccs_test_run_device() argument
742 if (!xe_device_has_flat_ccs(xe)) { in validate_ccs_test_run_device()
744 return 0; in validate_ccs_test_run_device()
747 if (!(GRAPHICS_VER(xe) >= 20 && IS_DGFX(xe))) { in validate_ccs_test_run_device()
749 return 0; in validate_ccs_test_run_device()
752 xe_pm_runtime_get(xe); in validate_ccs_test_run_device()
754 for_each_tile(tile, xe, id) in validate_ccs_test_run_device()
755 validate_ccs_test_run_tile(xe, tile, test); in validate_ccs_test_run_device()
757 xe_pm_runtime_put(xe); in validate_ccs_test_run_device()
759 return 0; in validate_ccs_test_run_device()
764 struct xe_device *xe = test->priv; in xe_validate_ccs_kunit() local
766 validate_ccs_test_run_device(xe); in xe_validate_ccs_kunit()