/linux-6.12.1/mm/ |
D | userfaultfd.c | 102 static struct vm_area_struct *uffd_mfill_lock(struct mm_struct *dst_mm, in uffd_mfill_lock() argument 108 dst_vma = uffd_lock_vma(dst_mm, dst_start); in uffd_mfill_lock() 123 static struct vm_area_struct *uffd_mfill_lock(struct mm_struct *dst_mm, in uffd_mfill_lock() argument 129 mmap_read_lock(dst_mm); in uffd_mfill_lock() 130 dst_vma = find_vma_and_prepare_anon(dst_mm, dst_start); in uffd_mfill_lock() 139 mmap_read_unlock(dst_mm); in uffd_mfill_lock() 177 struct mm_struct *dst_mm = dst_vma->vm_mm; in mfill_atomic_install_pte() local 195 dst_pte = pte_offset_map_lock(dst_mm, dst_pmd, dst_addr, &ptl); in mfill_atomic_install_pte() 227 inc_mm_counter(dst_mm, mm_counter(folio)); in mfill_atomic_install_pte() 229 set_pte_at(dst_mm, dst_addr, dst_pte, _dst_pte); in mfill_atomic_install_pte() [all …]
|
D | huge_memory.c | 1566 int copy_huge_pmd(struct mm_struct *dst_mm, struct mm_struct *src_mm, in copy_huge_pmd() argument 1579 dst_ptl = pmd_lock(dst_mm, dst_pmd); in copy_huge_pmd() 1599 pgtable = pte_alloc_one(dst_mm); in copy_huge_pmd() 1603 dst_ptl = pmd_lock(dst_mm, dst_pmd); in copy_huge_pmd() 1625 add_mm_counter(dst_mm, MM_ANONPAGES, HPAGE_PMD_NR); in copy_huge_pmd() 1626 mm_inc_nr_ptes(dst_mm); in copy_huge_pmd() 1627 pgtable_trans_huge_deposit(dst_mm, dst_pmd, pgtable); in copy_huge_pmd() 1630 set_pmd_at(dst_mm, addr, dst_pmd, pmd); in copy_huge_pmd() 1637 pte_free(dst_mm, pgtable); in copy_huge_pmd() 1651 mm_get_huge_zero_folio(dst_mm); in copy_huge_pmd() [all …]
|
D | memory.c | 792 copy_nonpresent_pte(struct mm_struct *dst_mm, struct mm_struct *src_mm, in copy_nonpresent_pte() argument 808 if (unlikely(list_empty(&dst_mm->mmlist))) { in copy_nonpresent_pte() 810 if (list_empty(&dst_mm->mmlist)) in copy_nonpresent_pte() 811 list_add(&dst_mm->mmlist, in copy_nonpresent_pte() 891 set_pte_at(dst_mm, addr, dst_pte, in copy_nonpresent_pte() 897 set_pte_at(dst_mm, addr, dst_pte, pte); in copy_nonpresent_pte() 1084 struct mm_struct *dst_mm = dst_vma->vm_mm; in copy_pte_range() local 1108 dst_pte = pte_alloc_map_lock(dst_mm, dst_pmd, addr, &dst_ptl); in copy_pte_range() 1143 ret = copy_nonpresent_pte(dst_mm, src_mm, in copy_pte_range() 1193 add_mm_rss_vec(dst_mm, rss); in copy_pte_range() [all …]
|
D | hugetlb.c | 6536 struct mm_struct *dst_mm = dst_vma->vm_mm; in hugetlb_mfill_atomic_pte() local 6552 ptl = huge_pte_lock(h, dst_mm, dst_pte); in hugetlb_mfill_atomic_pte() 6555 if (!huge_pte_none(huge_ptep_get(dst_mm, dst_addr, dst_pte))) { in hugetlb_mfill_atomic_pte() 6561 set_huge_pte_at(dst_mm, dst_addr, dst_pte, _dst_pte, size); in hugetlb_mfill_atomic_pte() 6679 ptl = huge_pte_lock(h, dst_mm, dst_pte); in hugetlb_mfill_atomic_pte() 6691 if (!huge_pte_none_mostly(huge_ptep_get(dst_mm, dst_addr, dst_pte))) in hugetlb_mfill_atomic_pte() 6721 set_huge_pte_at(dst_mm, dst_addr, dst_pte, _dst_pte, size); in hugetlb_mfill_atomic_pte() 6723 hugetlb_count_add(pages_per_huge_page(h), dst_mm); in hugetlb_mfill_atomic_pte()
|
/linux-6.12.1/include/linux/ |
D | huge_mm.h | 12 int copy_huge_pmd(struct mm_struct *dst_mm, struct mm_struct *src_mm, 16 int copy_huge_pud(struct mm_struct *dst_mm, struct mm_struct *src_mm,
|
/linux-6.12.1/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_ttm.c | 295 struct amdgpu_res_cursor src_mm, dst_mm; in amdgpu_ttm_copy_mem_to_mem() local 307 amdgpu_res_first(dst->mem, dst->offset, size, &dst_mm); in amdgpu_ttm_copy_mem_to_mem() 316 cur_size = min3(src_mm.size, dst_mm.size, 256ULL << 20); in amdgpu_ttm_copy_mem_to_mem() 324 r = amdgpu_ttm_map_buffer(dst->bo, dst->mem, &dst_mm, in amdgpu_ttm_copy_mem_to_mem() 357 amdgpu_res_next(&dst_mm, cur_size); in amdgpu_ttm_copy_mem_to_mem()
|