Home
last modified time | relevance | path

Searched refs:vm_flags (Results 1 – 25 of 290) sorted by relevance

12345678910>>...12

/linux-6.12.1/mm/
Dmmap.c82 unsigned long vm_flags = vma->vm_flags; in vma_set_page_prot() local
85 vm_page_prot = vm_pgprot_modify(vma->vm_page_prot, vm_flags); in vma_set_page_prot()
87 vm_flags &= ~VM_SHARED; in vma_set_page_prot()
88 vm_page_prot = vm_pgprot_modify(vm_page_prot, vm_flags); in vma_set_page_prot()
285 unsigned long flags, vm_flags_t vm_flags, in do_mmap() argument
347 vm_flags |= calc_vm_prot_bits(prot, pkey) | calc_vm_flag_bits(file, flags) | in do_mmap()
353 addr = __get_unmapped_area(file, addr, len, pgoff, flags, vm_flags); in do_mmap()
366 if (!mlock_future_ok(mm, vm_flags, len)) in do_mmap()
408 vm_flags |= VM_SHARED | VM_MAYSHARE; in do_mmap()
410 vm_flags &= ~(VM_MAYWRITE | VM_SHARED); in do_mmap()
[all …]
Dnommu.c136 pgprot_t prot, unsigned long vm_flags, int node, in __vmalloc_node_range_noprof() argument
519 if (region->vm_flags & VM_MAPPED_COPY) in __put_nommu_region()
843 unsigned long vm_flags; in determine_vm_flags() local
845 vm_flags = calc_vm_prot_bits(prot, 0) | calc_vm_flag_bits(file, flags); in determine_vm_flags()
852 vm_flags |= VM_MAYREAD | VM_MAYWRITE | VM_MAYEXEC; in determine_vm_flags()
856 vm_flags |= (capabilities & NOMMU_VMFLAGS); in determine_vm_flags()
858 vm_flags |= VM_MAYREAD | VM_MAYWRITE | VM_MAYEXEC; in determine_vm_flags()
869 vm_flags |= VM_MAYOVERLAY; in determine_vm_flags()
872 vm_flags |= VM_SHARED | VM_MAYSHARE | in determine_vm_flags()
876 return vm_flags; in determine_vm_flags()
[all …]
Dmremap.c666 unsigned long vm_flags = vma->vm_flags; in move_vma() local
703 MADV_UNMERGEABLE, &vm_flags); in move_vma()
707 if (vm_flags & VM_ACCOUNT) { in move_vma()
717 if (vm_flags & VM_ACCOUNT) in move_vma()
751 if (vm_flags & VM_ACCOUNT && !(flags & MREMAP_DONTUNMAP)) { in move_vma()
769 vm_stat_account(mm, vma->vm_flags, new_len >> PAGE_SHIFT); in move_vma()
772 if (unlikely(vma->vm_flags & VM_PFNMAP)) in move_vma()
794 if (vm_flags & VM_ACCOUNT && !(flags & MREMAP_DONTUNMAP)) in move_vma()
799 if (vm_flags & VM_LOCKED) { in move_vma()
839 if (!old_len && !(vma->vm_flags & (VM_SHARED | VM_MAYSHARE))) { in vma_to_resize()
[all …]
Duserfaultfd.c50 else if (!(vma->vm_flags & VM_SHARED) && in find_vma_and_prepare_anon()
79 if (!(vma->vm_flags & VM_SHARED) && unlikely(!vma->anon_vma)) in uffd_lock_vma()
179 bool writable = dst_vma->vm_flags & VM_WRITE; in mfill_atomic_install_pte()
180 bool vm_shared = dst_vma->vm_flags & VM_SHARED; in mfill_atomic_install_pte()
683 if (!(dst_vma->vm_flags & VM_SHARED)) { in mfill_atomic_pte()
755 dst_vma->vm_flags & VM_SHARED)) in mfill_atomic()
762 if ((flags & MFILL_ATOMIC_WP) && !(dst_vma->vm_flags & VM_UFFD_WP)) in mfill_atomic()
1357 return !(vma->vm_flags & (VM_PFNMAP | VM_IO | VM_HUGETLB | in vma_move_compatible()
1366 if ((src_vma->vm_flags & VM_ACCESS_FLAGS) != (dst_vma->vm_flags & VM_ACCESS_FLAGS) || in validate_move_areas()
1371 if ((src_vma->vm_flags & VM_LOCKED) != (dst_vma->vm_flags & VM_LOCKED)) in validate_move_areas()
[all …]
Dmseal.c42 if (vma->vm_file || vma->vm_flags & VM_SHARED) in is_ro_anon()
49 if (!(vma->vm_flags & VM_WRITE) || in is_ro_anon()
76 vm_flags_t oldflags = vma->vm_flags; in mseal_fixup()
147 newflags = vma->vm_flags | VM_SEALED; in apply_mm_seal()
Dexecmem.c20 unsigned long vm_flags = VM_FLUSH_RESET_PERMS; in __execmem_alloc() local
29 vm_flags |= VM_DEFER_KMEMLEAK; in __execmem_alloc()
32 pgprot, vm_flags, NUMA_NO_NODE, in __execmem_alloc()
38 pgprot, vm_flags, NUMA_NO_NODE, in __execmem_alloc()
Dmprotect.c48 if (WARN_ON_ONCE(!(vma->vm_flags & VM_WRITE))) in can_change_pte_writable()
63 if (!(vma->vm_flags & VM_SHARED)) { in can_change_pte_writable()
104 if (prot_numa && !(vma->vm_flags & VM_SHARED) && in change_pte_range()
134 if (is_cow_mapping(vma->vm_flags) && in change_pte_range()
608 unsigned long oldflags = vma->vm_flags; in mprotect_fixup()
628 (vma->vm_flags & (VM_PFNMAP|VM_MIXEDMAP)) && in mprotect_fixup()
762 if (!(vma->vm_flags & VM_GROWSDOWN)) in do_mprotect_pkey()
770 if (!(vma->vm_flags & VM_GROWSUP)) in do_mprotect_pkey()
793 if (rier && (vma->vm_flags & VM_MAYEXEC)) in do_mprotect_pkey()
805 newflags |= (vma->vm_flags & ~mask_off_old_flags); in do_mprotect_pkey()
[all …]
Dmlock.c334 if (!(vma->vm_flags & VM_LOCKED)) in allow_mlock_munlock()
371 if (vma->vm_flags & VM_LOCKED) in mlock_pte_range()
396 if (vma->vm_flags & VM_LOCKED) in mlock_pte_range()
473 vm_flags_t oldflags = vma->vm_flags; in mlock_fixup()
545 newflags = vma->vm_flags & ~VM_LOCKED_MASK; in apply_vma_lock_flags()
586 if (vma->vm_flags & VM_LOCKED) { in count_mm_mlocked_page_nr()
666 vm_flags_t vm_flags = VM_LOCKED; in SYSCALL_DEFINE3() local
672 vm_flags |= VM_LOCKONFAULT; in SYSCALL_DEFINE3()
674 return do_mlock(start, len, vm_flags); in SYSCALL_DEFINE3()
731 newflags = vma->vm_flags & ~VM_LOCKED_MASK; in apply_mlockall_flags()
/linux-6.12.1/include/trace/events/
Dfs_dax.h18 __field(unsigned long, vm_flags)
31 __entry->vm_flags = vmf->vma->vm_flags;
43 __entry->vm_flags & VM_SHARED ? "shared" : "private",
70 __field(unsigned long, vm_flags)
79 __entry->vm_flags = vmf->vma->vm_flags;
89 __entry->vm_flags & VM_SHARED ? "shared" : "private",
111 __field(unsigned long, vm_flags)
122 __entry->vm_flags = vmf->vma->vm_flags;
134 __entry->vm_flags & VM_SHARED ? "shared" : "private",
158 __field(unsigned long, vm_flags)
[all …]
/linux-6.12.1/arch/powerpc/include/asm/book3s/64/
Dhash-pkey.h8 static inline u64 hash__vmflag_to_pte_pkey_bits(u64 vm_flags) in hash__vmflag_to_pte_pkey_bits() argument
10 return (((vm_flags & VM_PKEY_BIT0) ? H_PTE_PKEY_BIT0 : 0x0UL) | in hash__vmflag_to_pte_pkey_bits()
11 ((vm_flags & VM_PKEY_BIT1) ? H_PTE_PKEY_BIT1 : 0x0UL) | in hash__vmflag_to_pte_pkey_bits()
12 ((vm_flags & VM_PKEY_BIT2) ? H_PTE_PKEY_BIT2 : 0x0UL) | in hash__vmflag_to_pte_pkey_bits()
13 ((vm_flags & VM_PKEY_BIT3) ? H_PTE_PKEY_BIT3 : 0x0UL) | in hash__vmflag_to_pte_pkey_bits()
14 ((vm_flags & VM_PKEY_BIT4) ? H_PTE_PKEY_BIT4 : 0x0UL)); in hash__vmflag_to_pte_pkey_bits()
Dpkeys.h8 static inline u64 vmflag_to_pte_pkey_bits(u64 vm_flags) in vmflag_to_pte_pkey_bits() argument
15 return hash__vmflag_to_pte_pkey_bits(vm_flags); in vmflag_to_pte_pkey_bits()
/linux-6.12.1/arch/arm64/mm/
Dmmap.c84 pgprot_t vm_get_page_prot(unsigned long vm_flags) in vm_get_page_prot() argument
86 pteval_t prot = pgprot_val(protection_map[vm_flags & in vm_get_page_prot()
89 if (vm_flags & VM_ARM64_BTI) in vm_get_page_prot()
102 if (vm_flags & VM_MTE) in vm_get_page_prot()
107 if (vm_flags & VM_PKEY_BIT0) in vm_get_page_prot()
109 if (vm_flags & VM_PKEY_BIT1) in vm_get_page_prot()
111 if (vm_flags & VM_PKEY_BIT2) in vm_get_page_prot()
Dfault.c527 unsigned long vm_flags; in do_page_fault() local
555 vm_flags = VM_EXEC; in do_page_fault()
559 vm_flags = VM_WRITE; in do_page_fault()
563 vm_flags = VM_READ; in do_page_fault()
565 vm_flags |= VM_WRITE; in do_page_fault()
568 vm_flags |= VM_EXEC; in do_page_fault()
590 if (!(vma->vm_flags & vm_flags)) { in do_page_fault()
635 if (!(vma->vm_flags & vm_flags)) { in do_page_fault()
983 if (vma->vm_flags & VM_MTE) in vma_alloc_zeroed_movable_folio()
/linux-6.12.1/arch/sparc/include/asm/
Dmman.h57 #define arch_validate_flags(vm_flags) arch_validate_flags(vm_flags) argument
61 static inline bool arch_validate_flags(unsigned long vm_flags) in arch_validate_flags() argument
67 if (vm_flags & VM_SPARC_ADI) { in arch_validate_flags()
72 if (vm_flags & (VM_PFNMAP | VM_MIXEDMAP)) in arch_validate_flags()
83 if (vm_flags & VM_MERGEABLE) in arch_validate_flags()
/linux-6.12.1/include/linux/
Duserfaultfd_k.h168 return vma->vm_flags & (VM_UFFD_WP | VM_UFFD_MINOR); in uffd_disable_huge_pmd_share()
180 return vma->vm_flags & (VM_UFFD_WP | VM_UFFD_MINOR); in uffd_disable_fault_around()
185 return vma->vm_flags & VM_UFFD_MISSING; in userfaultfd_missing()
190 return vma->vm_flags & VM_UFFD_WP; in userfaultfd_wp()
195 return vma->vm_flags & VM_UFFD_MINOR; in userfaultfd_minor()
212 return vma->vm_flags & __VM_UFFD_FLAGS; in userfaultfd_armed()
216 unsigned long vm_flags, in vma_can_userfault() argument
219 vm_flags &= __VM_UFFD_FLAGS; in vma_can_userfault()
221 if (vm_flags & VM_DROPPABLE) in vma_can_userfault()
224 if ((vm_flags & VM_UFFD_MINOR) && in vma_can_userfault()
[all …]
Dhuge_mm.h96 #define thp_vma_allowable_order(vma, vm_flags, tva_flags, order) \ argument
97 (!!thp_vma_allowable_orders(vma, vm_flags, tva_flags, BIT(order)))
270 unsigned long vm_flags,
291 unsigned long vm_flags, in thp_vma_allowable_orders() argument
299 if (vm_flags & VM_HUGEPAGE) in thp_vma_allowable_orders()
302 ((vm_flags & VM_HUGEPAGE) && hugepage_global_enabled())) in thp_vma_allowable_orders()
310 return __thp_vma_allowable_orders(vma, vm_flags, tva_flags, orders); in thp_vma_allowable_orders()
326 unsigned long vm_flags) in vma_thp_disabled() argument
333 return (vm_flags & VM_NOHUGEPAGE) || in vma_thp_disabled()
347 vm_flags_t vm_flags);
[all …]
/linux-6.12.1/arch/x86/mm/
Dpgprot.c35 pgprot_t vm_get_page_prot(unsigned long vm_flags) in vm_get_page_prot() argument
37 unsigned long val = pgprot_val(protection_map[vm_flags & in vm_get_page_prot()
48 if (vm_flags & VM_PKEY_BIT0) in vm_get_page_prot()
50 if (vm_flags & VM_PKEY_BIT1) in vm_get_page_prot()
52 if (vm_flags & VM_PKEY_BIT2) in vm_get_page_prot()
54 if (vm_flags & VM_PKEY_BIT3) in vm_get_page_prot()
/linux-6.12.1/arch/arm64/include/asm/
Dmman.h67 static inline bool arch_validate_flags(unsigned long vm_flags) in arch_validate_flags() argument
73 return !(vm_flags & VM_MTE) || (vm_flags & VM_MTE_ALLOWED); in arch_validate_flags()
75 #define arch_validate_flags(vm_flags) arch_validate_flags(vm_flags) argument
/linux-6.12.1/arch/x86/kernel/
Dsys_x86_64.c115 static inline unsigned long stack_guard_placement(vm_flags_t vm_flags) in stack_guard_placement() argument
117 if (vm_flags & VM_SHADOW_STACK) in stack_guard_placement()
125 unsigned long pgoff, unsigned long flags, vm_flags_t vm_flags) in arch_get_unmapped_area() argument
152 info.start_gap = stack_guard_placement(vm_flags); in arch_get_unmapped_area()
163 unsigned long flags, vm_flags_t vm_flags) in arch_get_unmapped_area_topdown() argument
202 info.start_gap = stack_guard_placement(vm_flags); in arch_get_unmapped_area_topdown()
/linux-6.12.1/tools/testing/selftests/bpf/progs/
Dbpf_iter_task_vmas.c43 perm_str[0] = (vma->vm_flags & VM_READ) ? 'r' : '-'; in proc_maps()
44 perm_str[1] = (vma->vm_flags & VM_WRITE) ? 'w' : '-'; in proc_maps()
45 perm_str[2] = (vma->vm_flags & VM_EXEC) ? 'x' : '-'; in proc_maps()
46 perm_str[3] = (vma->vm_flags & VM_MAYSHARE) ? 's' : 'p'; in proc_maps()
/linux-6.12.1/arch/nios2/mm/
Dcacheflush.c90 if (!(vma->vm_flags & VM_MAYSHARE)) in flush_aliases()
138 if (vma == NULL || (vma->vm_flags & VM_EXEC)) in flush_cache_range()
159 if (vma->vm_flags & VM_EXEC) in flush_cache_page()
236 if (vma->vm_flags & VM_EXEC) in update_mmu_cache_range()
268 if (vma->vm_flags & VM_EXEC) in copy_from_user_page()
279 if (vma->vm_flags & VM_EXEC) in copy_to_user_page()
/linux-6.12.1/arch/hexagon/mm/
Dvm_fault.c70 if (!(vma->vm_flags & VM_EXEC)) in do_page_fault()
74 if (!(vma->vm_flags & VM_READ)) in do_page_fault()
78 if (!(vma->vm_flags & VM_WRITE)) in do_page_fault()
/linux-6.12.1/tools/testing/vma/
Dvma_internal.h212 const vm_flags_t vm_flags; member
376 static inline pgprot_t vm_get_page_prot(unsigned long vm_flags) in vm_get_page_prot() argument
378 return __pgprot(vm_flags); in vm_get_page_prot()
381 static inline bool is_shared_maywrite(vm_flags_t vm_flags) in is_shared_maywrite() argument
383 return (vm_flags & (VM_SHARED | VM_MAYWRITE)) == in is_shared_maywrite()
389 return is_shared_maywrite(vma->vm_flags); in vma_is_shared_maywrite()
875 unsigned long vm_flags) in khugepaged_enter_vma() argument
878 (void)vm_flags; in khugepaged_enter_vma()
/linux-6.12.1/drivers/sbus/char/
Dflash.c44 if ((vma->vm_flags & VM_READ) && in flash_mmap()
45 (vma->vm_flags & VM_WRITE)) { in flash_mmap()
49 if (vma->vm_flags & VM_READ) { in flash_mmap()
52 } else if (vma->vm_flags & VM_WRITE) { in flash_mmap()
/linux-6.12.1/arch/arm/mm/
Dfault.c271 unsigned long vm_flags = VM_ACCESS_FLAGS; in do_page_fault() local
293 vm_flags = VM_WRITE; in do_page_fault()
297 vm_flags = VM_EXEC; in do_page_fault()
321 if (!(vma->vm_flags & vm_flags)) { in do_page_fault()
360 if (!(vma->vm_flags & vm_flags)) { in do_page_fault()

12345678910>>...12