Lines Matching refs:virt_addr
29 bool hl_is_dram_va(struct hl_device *hdev, u64 virt_addr) in hl_is_dram_va() argument
33 return hl_mem_area_inside_range(virt_addr, prop->dmmu.page_size, in hl_is_dram_va()
227 int hl_mmu_unmap_page(struct hl_ctx *ctx, u64 virt_addr, u32 page_size, bool flush_pte) in hl_mmu_unmap_page() argument
240 is_dram_addr = hl_is_dram_va(hdev, virt_addr); in hl_mmu_unmap_page()
252 real_virt_addr = virt_addr; in hl_mmu_unmap_page()
266 trace_habanalabs_mmu_unmap(&hdev->pdev->dev, virt_addr, 0, page_size, flush_pte); in hl_mmu_unmap_page()
293 int hl_mmu_map_page(struct hl_ctx *ctx, u64 virt_addr, u64 phys_addr, u32 page_size, in hl_mmu_map_page() argument
308 is_dram_addr = hl_is_dram_va(hdev, virt_addr); in hl_mmu_map_page()
327 (hdev->asic_funcs->scramble_addr(hdev, virt_addr) & in hl_mmu_map_page()
330 (virt_addr & (real_page_size - 1))))) in hl_mmu_map_page()
333 phys_addr, virt_addr, real_page_size); in hl_mmu_map_page()
336 real_virt_addr = virt_addr; in hl_mmu_map_page()
353 trace_habanalabs_mmu_map(&hdev->pdev->dev, virt_addr, phys_addr, page_size, flush_pte); in hl_mmu_map_page()
358 real_virt_addr = virt_addr; in hl_mmu_map_page()
382 int hl_mmu_map_contiguous(struct hl_ctx *ctx, u64 virt_addr, in hl_mmu_map_contiguous() argument
392 if (hl_mem_area_inside_range(virt_addr, size, in hl_mmu_map_contiguous()
395 else if (hl_mem_area_inside_range(virt_addr, size, in hl_mmu_map_contiguous()
398 else if (hl_mem_area_inside_range(virt_addr, size, in hl_mmu_map_contiguous()
405 curr_va = virt_addr + off; in hl_mmu_map_contiguous()
424 curr_va = virt_addr + off; in hl_mmu_map_contiguous()
443 int hl_mmu_unmap_contiguous(struct hl_ctx *ctx, u64 virt_addr, u32 size) in hl_mmu_unmap_contiguous() argument
452 if (hl_mem_area_inside_range(virt_addr, size, in hl_mmu_unmap_contiguous()
455 else if (hl_mem_area_inside_range(virt_addr, size, in hl_mmu_unmap_contiguous()
458 else if (hl_mem_area_inside_range(virt_addr, size, in hl_mmu_unmap_contiguous()
465 curr_va = virt_addr + off; in hl_mmu_unmap_contiguous()
476 static void hl_mmu_pa_page_with_offset(struct hl_ctx *ctx, u64 virt_addr, in hl_mmu_pa_page_with_offset() argument
512 abs_virt_addr = virt_addr - dram_base; in hl_mmu_pa_page_with_offset()
528 (virt_addr & offset_mask); in hl_mmu_pa_page_with_offset()
532 int hl_mmu_va_to_pa(struct hl_ctx *ctx, u64 virt_addr, u64 *phys_addr) in hl_mmu_va_to_pa() argument
539 rc = hl_mmu_get_tlb_info(ctx, virt_addr, &hops); in hl_mmu_va_to_pa()
543 hl_mmu_pa_page_with_offset(ctx, virt_addr, &hops, phys_addr); in hl_mmu_va_to_pa()
548 int hl_mmu_get_tlb_info(struct hl_ctx *ctx, u64 virt_addr, in hl_mmu_get_tlb_info() argument
562 hops->scrambled_vaddr = virt_addr; /* assume no scrambling */ in hl_mmu_get_tlb_info()
564 is_dram_addr = hl_mem_area_inside_range(virt_addr, prop->dmmu.page_size, in hl_mmu_get_tlb_info()
574 rc = mmu_funcs->get_tlb_info(ctx, virt_addr, hops); in hl_mmu_get_tlb_info()
582 hl_mmu_pa_page_with_offset(ctx, virt_addr, hops, &hops->unscrambled_paddr); in hl_mmu_get_tlb_info()
737 u8 hop_idx, u64 hop_addr, u64 virt_addr) in hl_mmu_get_hop_pte_phys_addr() argument
749 return hop_addr + ctx->hdev->asic_prop.mmu_pte_size * ((virt_addr & mask) >> shift); in hl_mmu_get_hop_pte_phys_addr()
794 if (ZERO_OR_NULL_PTR(hop0_pgt->virt_addr)) in hl_mmu_hr_pool_destroy()
797 gen_pool_free(*pool, (uintptr_t) hop0_pgt->virt_addr, hop_table_size); in hl_mmu_hr_pool_destroy()
831 u64 virt_addr; in hl_mmu_hr_init() local
854 virt_addr = (uintptr_t) hl_asic_dma_alloc_coherent(hdev, pool_chunk_size, in hl_mmu_hr_init()
857 if (ZERO_OR_NULL_PTR(virt_addr)) { in hl_mmu_hr_init()
864 rc = gen_pool_add_virt(hr_priv->mmu_pgt_pool, virt_addr, (phys_addr_t) dma_addr, in hl_mmu_hr_init()
874 hop0_pgt->virt_addr = (uintptr_t) in hl_mmu_hr_init()
879 if (!hop0_pgt->virt_addr) { in hl_mmu_hr_init()
936 gen_pool_free(hr_priv->mmu_pgt_pool, pgt_info->virt_addr, hop_table_size); in hl_mmu_hr_free_hop_remove_pgt()
959 return pgt->virt_addr + pte_offset; in hl_mmu_hr_pte_phys_to_virt()
977 u64 virt_addr = hl_mmu_hr_pte_phys_to_virt(ctx, pgt_info, phys_pte_addr, hop_table_size); in hl_mmu_hr_write_pte() local
979 *((u64 *) (uintptr_t) virt_addr) = val; in hl_mmu_hr_write_pte()
1071 void *virt_addr; in hl_mmu_hr_alloc_hop() local
1079 virt_addr = gen_pool_dma_zalloc_align(hr_priv->mmu_pgt_pool, in hl_mmu_hr_alloc_hop()
1083 if (virt_addr) in hl_mmu_hr_alloc_hop()
1087 virt_addr = hl_asic_dma_alloc_coherent(hdev, SZ_2M, &phys_addr, in hl_mmu_hr_alloc_hop()
1089 if (ZERO_OR_NULL_PTR(virt_addr)) in hl_mmu_hr_alloc_hop()
1092 if (gen_pool_add_virt(hr_priv->mmu_pgt_pool, (unsigned long)virt_addr, in hl_mmu_hr_alloc_hop()
1094 hl_asic_dma_free_coherent(hdev, SZ_2M, virt_addr, phys_addr); in hl_mmu_hr_alloc_hop()
1095 virt_addr = NULL; in hl_mmu_hr_alloc_hop()
1100 if (ZERO_OR_NULL_PTR(virt_addr)) { in hl_mmu_hr_alloc_hop()
1107 pgt_info->virt_addr = (unsigned long)virt_addr; in hl_mmu_hr_alloc_hop()
1155 int hl_mmu_hr_get_tlb_info(struct hl_ctx *ctx, u64 virt_addr, struct hl_mmu_hop_info *hops, in hl_mmu_hr_get_tlb_info() argument
1165 rc = hr_func->get_tlb_mapping_params(hdev, &mmu_prop, hops, virt_addr, &is_huge); in hl_mmu_hr_get_tlb_info()
1175 hops->scrambled_vaddr = hdev->asic_funcs->scramble_addr(hdev, virt_addr); in hl_mmu_hr_get_tlb_info()
1208 if (hops->scrambled_vaddr != virt_addr) in hl_mmu_hr_get_tlb_info()