Home
last modified time | relevance | path

Searched refs:I915_GTT_PAGE_SIZE (Results 1 – 25 of 27) sorted by relevance

12

/linux-6.12.1/drivers/gpu/drm/i915/selftests/
Di915_gem_evict.c58 I915_GTT_PAGE_SIZE); in populate_ggtt()
123 I915_GTT_PAGE_SIZE, 0, 0, in igt_evict_something()
138 I915_GTT_PAGE_SIZE, 0, 0, in igt_evict_something()
170 obj = i915_gem_object_create_internal(gt->i915, I915_GTT_PAGE_SIZE); in igt_overcommit()
247 .start = I915_GTT_PAGE_SIZE * 2, in igt_evict_for_cache_color()
248 .size = I915_GTT_PAGE_SIZE, in igt_evict_for_cache_color()
265 obj = i915_gem_object_create_internal(gt->i915, I915_GTT_PAGE_SIZE); in igt_evict_for_cache_color()
274 I915_GTT_PAGE_SIZE | flags); in igt_evict_for_cache_color()
281 obj = i915_gem_object_create_internal(gt->i915, I915_GTT_PAGE_SIZE); in igt_evict_for_cache_color()
291 (I915_GTT_PAGE_SIZE * 2) | flags); in igt_evict_for_cache_color()
Di915_gem_gtt.c122 GEM_BUG_ON(!IS_ALIGNED(size, I915_GTT_PAGE_SIZE)); in fake_dma_object()
733 obj = i915_gem_object_create_internal(vm->i915, 2 * I915_GTT_PAGE_SIZE); in pot_hole()
1547 total + 2 * I915_GTT_PAGE_SIZE <= ggtt->vm.total; in igt_gtt_reserve()
1548 total += 2 * I915_GTT_PAGE_SIZE) { in igt_gtt_reserve()
1581 vma->node.size != 2*I915_GTT_PAGE_SIZE) { in igt_gtt_reserve()
1584 total, 2*I915_GTT_PAGE_SIZE); in igt_gtt_reserve()
1591 for (total = I915_GTT_PAGE_SIZE; in igt_gtt_reserve()
1592 total + 2 * I915_GTT_PAGE_SIZE <= ggtt->vm.total; in igt_gtt_reserve()
1593 total += 2 * I915_GTT_PAGE_SIZE) { in igt_gtt_reserve()
1627 vma->node.size != 2*I915_GTT_PAGE_SIZE) { in igt_gtt_reserve()
[all …]
/linux-6.12.1/drivers/gpu/drm/i915/gt/
Dintel_ggtt.c42 *start += I915_GTT_PAGE_SIZE; in i915_ggtt_color_adjust()
52 *end -= I915_GTT_PAGE_SIZE; in i915_ggtt_color_adjust()
443 (gen8_pte_t __iomem *)ggtt->gsm + offset / I915_GTT_PAGE_SIZE; in gen8_ggtt_insert_page()
483 gte += (vma_res->start - vma_res->guard) / I915_GTT_PAGE_SIZE; in gen8_ggtt_insert_entries()
484 end = gte + vma_res->guard / I915_GTT_PAGE_SIZE; in gen8_ggtt_insert_entries()
487 end += (vma_res->node_size + vma_res->guard) / I915_GTT_PAGE_SIZE; in gen8_ggtt_insert_entries()
514 start = (vma_res->start - vma_res->guard) / I915_GTT_PAGE_SIZE; in __gen8_ggtt_insert_entries_bind()
515 end = start + vma_res->guard / I915_GTT_PAGE_SIZE; in __gen8_ggtt_insert_entries_bind()
520 end += (vma_res->node_size + vma_res->guard) / I915_GTT_PAGE_SIZE; in __gen8_ggtt_insert_entries_bind()
522 vma_res->node_size / I915_GTT_PAGE_SIZE, pte_encode)) in __gen8_ggtt_insert_entries_bind()
[all …]
Dselftest_context.c112 vaddr += engine->context_size - I915_GTT_PAGE_SIZE; in __live_context_size()
113 memset(vaddr, POISON_INUSE, I915_GTT_PAGE_SIZE); in __live_context_size()
136 if (memchr_inv(vaddr, POISON_INUSE, I915_GTT_PAGE_SIZE)) { in __live_context_size()
178 engine->context_size += I915_GTT_PAGE_SIZE; in live_context_size()
182 engine->context_size -= I915_GTT_PAGE_SIZE; in live_context_size()
Dgen6_ppgtt.c78 const unsigned int first_entry = start / I915_GTT_PAGE_SIZE; in gen6_ppgtt_clear_range()
82 unsigned int num_entries = length / I915_GTT_PAGE_SIZE; in gen6_ppgtt_clear_range()
117 unsigned int first_entry = vma_res->start / I915_GTT_PAGE_SIZE; in gen6_ppgtt_insert_entries()
128 GEM_BUG_ON(sg_dma_len(iter.sg) < I915_GTT_PAGE_SIZE); in gen6_ppgtt_insert_entries()
131 iter.dma += I915_GTT_PAGE_SIZE; in gen6_ppgtt_insert_entries()
147 vma_res->page_sizes_gtt = I915_GTT_PAGE_SIZE; in gen6_ppgtt_insert_entries()
288 u32 ggtt_offset = vma_res->start / I915_GTT_PAGE_SIZE; in pd_vma_bind()
Dgen8_ppgtt.c469 GEM_BUG_ON(sg_dma_len(iter->sg) < I915_GTT_PAGE_SIZE); in gen8_ppgtt_insert_pte()
472 iter->dma += I915_GTT_PAGE_SIZE; in gen8_ppgtt_insert_pte()
542 page_size = I915_GTT_PAGE_SIZE; in xehp_ppgtt_insert_huge()
583 I915_GTT_PAGE_SIZE); in xehp_ppgtt_insert_huge()
648 page_size = I915_GTT_PAGE_SIZE; in gen8_ppgtt_insert_huge()
654 rem >= (I915_PDES - index) * I915_GTT_PAGE_SIZE)) in gen8_ppgtt_insert_huge()
682 rem >= (I915_PDES - index) * I915_GTT_PAGE_SIZE))) in gen8_ppgtt_insert_huge()
743 if (vma_res->bi.page_sizes.sg > I915_GTT_PAGE_SIZE) { in gen8_ppgtt_insert()
759 vma_res->page_sizes_gtt = I915_GTT_PAGE_SIZE; in gen8_ppgtt_insert()
Dintel_gtt.h49 #define I915_GTT_PAGE_SIZE I915_GTT_PAGE_SIZE_4K macro
52 #define I915_GTT_PAGE_MASK -I915_GTT_PAGE_SIZE
54 #define I915_GTT_MIN_ALIGNMENT I915_GTT_PAGE_SIZE
172 __for_each_sgt_daddr(__dp, __iter, __sgt, I915_GTT_PAGE_SIZE)
175 __for_each_daddr_next(__dp, __iter, I915_GTT_PAGE_SIZE)
Dintel_lrc.c970 memset(vaddr, CONTEXT_REDZONE, I915_GTT_PAGE_SIZE); in set_redzone()
981 if (memchr_inv(vaddr, CONTEXT_REDZONE, I915_GTT_PAGE_SIZE)) in check_redzone()
1076 context_size = round_up(engine->context_size, I915_GTT_PAGE_SIZE); in __lrc_alloc_state()
1079 context_size += I915_GTT_PAGE_SIZE; /* for redzone */ in __lrc_alloc_state()
1457 GEM_BUG_ON(cs - start > I915_GTT_PAGE_SIZE / sizeof(*cs)); in setup_per_ctx_bb()
1471 GEM_BUG_ON(cs - start > I915_GTT_PAGE_SIZE / sizeof(*cs)); in setup_indirect_ctx_bb()
Dintel_migrate.c380 page_size = I915_GTT_PAGE_SIZE; in emit_pte()
Dintel_engine_cs.c278 BUILD_BUG_ON(I915_GTT_PAGE_SIZE != PAGE_SIZE); in intel_engine_context_size()
/linux-6.12.1/drivers/gpu/drm/i915/gvt/
Daperture_gm.c54 start = ALIGN(gvt_hidden_gmadr_base(gvt), I915_GTT_PAGE_SIZE); in alloc_gm()
55 end = ALIGN(gvt_hidden_gmadr_end(gvt), I915_GTT_PAGE_SIZE); in alloc_gm()
60 start = ALIGN(gvt_aperture_gmadr_base(gvt), I915_GTT_PAGE_SIZE); in alloc_gm()
61 end = ALIGN(gvt_aperture_gmadr_end(gvt), I915_GTT_PAGE_SIZE); in alloc_gm()
68 size, I915_GTT_PAGE_SIZE, in alloc_gm()
263 vgpu_aperture_sz(vgpu) = ALIGN(request, I915_GTT_PAGE_SIZE); in alloc_resource()
274 vgpu_hidden_sz(vgpu) = ALIGN(request, I915_GTT_PAGE_SIZE); in alloc_resource()
Dreg.h112 I915_GTT_PAGE_SIZE)
Dscheduler.c186 I915_GTT_PAGE_SIZE - RING_CTX_SIZE); in populate_shadow_context()
240 gpa_size += I915_GTT_PAGE_SIZE; in populate_shadow_context()
250 gpa_size = I915_GTT_PAGE_SIZE; in populate_shadow_context()
1002 gpa_size += I915_GTT_PAGE_SIZE; in update_guest_context()
1012 gpa_size = I915_GTT_PAGE_SIZE; in update_guest_context()
1044 I915_GTT_PAGE_SIZE - sizeof(*shadow_ring_context)); in update_guest_context()
Dcmd_parser.c1572 if (guest_gma >= I915_GTT_PAGE_SIZE) { in cmd_address_audit()
1777 offset = gma & (I915_GTT_PAGE_SIZE - 1); in copy_gma_to_hva()
1779 copy_len = (end_gma - gma) >= (I915_GTT_PAGE_SIZE - offset) ? in copy_gma_to_hva()
1780 I915_GTT_PAGE_SIZE - offset : end_gma - gma; in copy_gma_to_hva()
2843 if (WARN_ON(!IS_ALIGNED(workload->rb_start, I915_GTT_PAGE_SIZE))) in scan_workload()
2887 I915_GTT_PAGE_SIZE))) in scan_wa_ctx()
Dgtt.c907 (I915_GTT_PAGE_SIZE >> pt_entry_size_shift(spt))
1418 for (index = 0; index < (I915_GTT_PAGE_SIZE >> in sync_oos_page()
1471 oos_page->mem, I915_GTT_PAGE_SIZE); in attach_oos_page()
2359 int page_entry_num = I915_GTT_PAGE_SIZE >> in alloc_scratch_pages()
/linux-6.12.1/drivers/gpu/drm/i915/
Di915_gem_gtt.c105 GEM_BUG_ON(!IS_ALIGNED(size, I915_GTT_PAGE_SIZE)); in i915_gem_gtt_reserve()
202 GEM_BUG_ON(!IS_ALIGNED(size, I915_GTT_PAGE_SIZE)); in i915_gem_gtt_insert()
206 GEM_BUG_ON(start > 0 && !IS_ALIGNED(start, I915_GTT_PAGE_SIZE)); in i915_gem_gtt_insert()
207 GEM_BUG_ON(end < U64_MAX && !IS_ALIGNED(end, I915_GTT_PAGE_SIZE)); in i915_gem_gtt_insert()
229 BUILD_BUG_ON(I915_GTT_MIN_ALIGNMENT > I915_GTT_PAGE_SIZE); in i915_gem_gtt_insert()
Di915_gem_evict.c341 GEM_BUG_ON(!IS_ALIGNED(start, I915_GTT_PAGE_SIZE)); in i915_gem_evict_for_node()
342 GEM_BUG_ON(!IS_ALIGNED(end, I915_GTT_PAGE_SIZE)); in i915_gem_evict_for_node()
365 start -= I915_GTT_PAGE_SIZE; in i915_gem_evict_for_node()
368 end += I915_GTT_PAGE_SIZE; in i915_gem_evict_for_node()
Di915_vma.c202 GEM_BUG_ON(!IS_ALIGNED(vma->size, I915_GTT_PAGE_SIZE)); in vma_create()
807 GEM_BUG_ON(!IS_ALIGNED(size, I915_GTT_PAGE_SIZE)); in i915_vma_insert()
824 GEM_BUG_ON(!IS_ALIGNED(start, I915_GTT_PAGE_SIZE)); in i915_vma_insert()
830 end = min_t(u64, end, (1ULL << 32) - I915_GTT_PAGE_SIZE); in i915_vma_insert()
831 GEM_BUG_ON(!IS_ALIGNED(end, I915_GTT_PAGE_SIZE)); in i915_vma_insert()
883 vma->page_sizes.sg > I915_GTT_PAGE_SIZE && in i915_vma_insert()
987 sg_set_page(sg, NULL, I915_GTT_PAGE_SIZE, 0); in rotate_pages()
990 sg_dma_len(sg) = I915_GTT_PAGE_SIZE; in rotate_pages()
995 left = (dst_stride - height) * I915_GTT_PAGE_SIZE; in rotate_pages()
1070 sg_set_page(sg, NULL, count * I915_GTT_PAGE_SIZE, 0); in add_padding_pages()
[all …]
Di915_vma_resource.c278 *start -= I915_GTT_PAGE_SIZE; in i915_vma_resource_color_adjust_range()
279 *end += I915_GTT_PAGE_SIZE; in i915_vma_resource_color_adjust_range()
/linux-6.12.1/drivers/gpu/drm/i915/display/
Dintel_dpt.c50 gen8_set_pte(base + offset / I915_GTT_PAGE_SIZE, in dpt_insert_page()
71 i = vma_res->start / I915_GTT_PAGE_SIZE; in dpt_insert_entries()
101 vma_res->page_sizes_gtt = I915_GTT_PAGE_SIZE; in dpt_bind_vma()
256 size = DIV_ROUND_UP_ULL(obj->size, I915_GTT_PAGE_SIZE); in intel_dpt_create()
258 size = round_up(size * sizeof(gen8_pte_t), I915_GTT_PAGE_SIZE); in intel_dpt_create()
291 vm->total = (size / sizeof(gen8_pte_t)) * I915_GTT_PAGE_SIZE; in intel_dpt_create()
Dintel_plane_initial.c62 gte += base / I915_GTT_PAGE_SIZE; in initial_plane_phys_lmem()
/linux-6.12.1/drivers/gpu/drm/i915/gem/selftests/
Dhuge_gem_object.c111 GEM_BUG_ON(!IS_ALIGNED(dma_size, I915_GTT_PAGE_SIZE)); in huge_gem_object()
Di915_gem_context.c1520 GEM_BUG_ON(offset < I915_GTT_PAGE_SIZE); in write_to_scratch()
1610 GEM_BUG_ON(offset < I915_GTT_PAGE_SIZE); in read_from_scratch()
1859 I915_GTT_PAGE_SIZE, vm_total, in igt_vm_isolation()
Dhuge_pages.c334 GEM_BUG_ON(!IS_ALIGNED(size, I915_GTT_PAGE_SIZE)); in fake_huge_pages_object()
/linux-6.12.1/drivers/gpu/drm/i915/gem/
Di915_gem_domain.c21 #define VTD_GUARD (168u * I915_GTT_PAGE_SIZE) /* 168 or tile-row PTE padding */

12