Searched refs:curr_pte (Results 1 – 6 of 6) sorted by relevance
/linux-6.12.1/drivers/accel/habanalabs/common/mmu/ |
D | mmu_v2.c | 60 u64 hop_addr[MMU_ARCH_6_HOPS] = { 0 }, hop_pte_addr[MMU_ARCH_6_HOPS] = { 0 }, curr_pte, in hl_mmu_v2_unmap() local 84 curr_pte = *(u64 *) (uintptr_t) hop_pte_addr[0]; in hl_mmu_v2_unmap() 87 hop_addr[i] = hl_mmu_get_next_hop_addr(ctx, curr_pte); in hl_mmu_v2_unmap() 96 curr_pte = *(u64 *) (uintptr_t) hop_pte_addr[i]; in hl_mmu_v2_unmap() 98 if ((i <= hop_last) && (curr_pte & mmu_prop->last_mask)) { in hl_mmu_v2_unmap() 110 if (!(curr_pte & PAGE_PRESENT_MASK)) in hl_mmu_v2_unmap() 134 curr_pte = 0, scrambled_virt_addr, scrambled_phys_addr; in hl_mmu_v2_map() local 156 curr_pte = *(u64 *) (uintptr_t) hop_pte_addr[0]; in hl_mmu_v2_map() 160 hop_addr[i] = hl_mmu_dr_get_alloc_next_hop_addr(ctx, curr_pte, &hop_new[i]); in hl_mmu_v2_map() 178 curr_pte = *(u64 *) (uintptr_t) hop_pte_addr[i]; in hl_mmu_v2_map() [all …]
|
D | mmu_v1.c | 222 u64 hop_addr[MMU_V1_MAX_HOPS] = {0}, hop_pte_addr[MMU_V1_MAX_HOPS] = {0}, curr_pte = 0; in hl_mmu_v1_unmap() local 236 hop_addr[hop_idx] = hl_mmu_get_next_hop_addr(ctx, curr_pte); in hl_mmu_v1_unmap() 244 curr_pte = *(u64 *) (uintptr_t) hop_pte_addr[hop_idx]; in hl_mmu_v1_unmap() 247 is_huge = curr_pte & mmu_prop->last_mask; in hl_mmu_v1_unmap() 256 hop_addr[hop_idx] = hl_mmu_get_next_hop_addr(ctx, curr_pte); in hl_mmu_v1_unmap() 262 curr_pte = *(u64 *) (uintptr_t) hop_pte_addr[hop_idx]; in hl_mmu_v1_unmap() 270 if (curr_pte == default_pte) { in hl_mmu_v1_unmap() 277 if (!(curr_pte & PAGE_PRESENT_MASK)) { in hl_mmu_v1_unmap() 288 if (!(curr_pte & PAGE_PRESENT_MASK)) in hl_mmu_v1_unmap() 326 u64 hop_addr[MMU_V1_MAX_HOPS] = {0}, hop_pte_addr[MMU_V1_MAX_HOPS] = {0}, curr_pte = 0; in hl_mmu_v1_map() local [all …]
|
D | mmu_v2_hr.c | 118 u64 curr_pte, scrambled_virt_addr, hop_pte_phys_addr[MMU_ARCH_6_HOPS] = { 0 }; in _hl_mmu_v2_hr_unmap() local 133 curr_pte = 0; in _hl_mmu_v2_hr_unmap() 141 &ctx->hdev->mmu_func[MMU_HR_PGT].hr_funcs, curr_pte); in _hl_mmu_v2_hr_unmap() 151 curr_pte = *(u64 *) (uintptr_t) hl_mmu_hr_pte_phys_to_virt(ctx, hops_pgt_info[i], in _hl_mmu_v2_hr_unmap() 155 if ((i < hop_last) && (curr_pte & mmu_prop->last_mask)) { in _hl_mmu_v2_hr_unmap() 167 if (!(curr_pte & PAGE_PRESENT_MASK)) in _hl_mmu_v2_hr_unmap() 210 curr_pte = 0, scrambled_virt_addr, scrambled_phys_addr; in _hl_mmu_v2_hr_map() local 249 mmu_prop, curr_pte, &hop_new[i]); in _hl_mmu_v2_hr_map() 256 curr_pte = *(u64 *) (uintptr_t) hl_mmu_hr_pte_phys_to_virt(ctx, hops_pgt_info[i], in _hl_mmu_v2_hr_map() 261 if (curr_pte & PAGE_PRESENT_MASK) { in _hl_mmu_v2_hr_map() [all …]
|
D | mmu.c | 721 u64 hl_mmu_get_next_hop_addr(struct hl_ctx *ctx, u64 curr_pte) in hl_mmu_get_next_hop_addr() argument 723 return (curr_pte & PAGE_PRESENT_MASK) ? (curr_pte & HOP_PHYS_ADDR_MASK) : ULLONG_MAX; in hl_mmu_get_next_hop_addr() 1045 u64 curr_pte) in hl_mmu_hr_get_next_hop_pgt_info() argument 1047 u64 next_hop_phys_addr = hl_mmu_get_next_hop_addr(ctx, curr_pte); in hl_mmu_hr_get_next_hop_pgt_info() 1135 u64 curr_pte, bool *is_new_hop) in hl_mmu_hr_get_alloc_next_hop() argument 1137 u64 hop_addr = hl_mmu_get_next_hop_addr(ctx, curr_pte); in hl_mmu_hr_get_alloc_next_hop() 1361 u64 hl_mmu_dr_get_alloc_next_hop_addr(struct hl_ctx *ctx, u64 curr_pte, bool *is_new_hop) in hl_mmu_dr_get_alloc_next_hop_addr() argument 1363 u64 hop_addr = hl_mmu_get_next_hop_addr(ctx, curr_pte); in hl_mmu_dr_get_alloc_next_hop_addr()
|
/linux-6.12.1/arch/x86/kvm/mmu/ |
D | paging_tmpl.h | 564 pt_element_t curr_pte; in FNAME() local 576 curr_pte = gw->prefetch_ptes[index]; in FNAME() 579 &curr_pte, sizeof(curr_pte)); in FNAME() 581 return r || curr_pte != gw->ptes[level - 1]; in FNAME()
|
/linux-6.12.1/drivers/accel/habanalabs/common/ |
D | habanalabs.h | 3891 u64 hl_mmu_get_next_hop_addr(struct hl_ctx *ctx, u64 curr_pte); 3911 u64 curr_pte); 3919 u64 curr_pte, bool *is_new_hop); 3943 u64 hl_mmu_dr_get_alloc_next_hop_addr(struct hl_ctx *ctx, u64 curr_pte, bool *is_new_hop);
|