/linux-6.12.1/mm/ |
D | percpu-km.c | 65 pages = alloc_pages(gfp, order_base_2(nr_pages)); in pcpu_create_chunk() 109 size_t nr_pages, alloc_pages; in pcpu_verify_alloc_info() local 118 alloc_pages = roundup_pow_of_two(nr_pages); in pcpu_verify_alloc_info() 120 if (alloc_pages > nr_pages) in pcpu_verify_alloc_info() 122 alloc_pages - nr_pages); in pcpu_verify_alloc_info()
|
/linux-6.12.1/drivers/xen/ |
D | unpopulated-alloc.c | 39 unsigned int i, alloc_pages = round_up(nr_pages, PAGES_PER_SECTION); in fill_list() local 53 alloc_pages * PAGE_SIZE, mhp_range.start, mhp_range.end, in fill_list() 111 for (i = 0; i < alloc_pages; i++) { in fill_list() 128 for (i = 0; i < alloc_pages; i++) { in fill_list()
|
/linux-6.12.1/lib/ |
D | fortify_kunit.c | 263 #define TEST_vmalloc(checker, expected_pages, alloc_pages) do { \ argument 266 vmalloc((alloc_pages) * PAGE_SIZE), vfree(p)); \ 268 vzalloc((alloc_pages) * PAGE_SIZE), vfree(p)); \ 270 __vmalloc((alloc_pages) * PAGE_SIZE, gfp), vfree(p)); \ 275 #define TEST_kvmalloc(checker, expected_pages, alloc_pages) do { \ argument 281 kvmalloc((alloc_pages) * PAGE_SIZE, gfp), \ 284 kvmalloc_node((alloc_pages) * PAGE_SIZE, gfp, NUMA_NO_NODE), \ 287 kvzalloc((alloc_pages) * PAGE_SIZE, gfp), \ 290 kvzalloc_node((alloc_pages) * PAGE_SIZE, gfp, NUMA_NO_NODE), \ 293 kvcalloc(1, (alloc_pages) * PAGE_SIZE, gfp), \ [all …]
|
D | test_meminit.c | 69 page = alloc_pages(GFP_KERNEL, order); in do_alloc_pages_order() 76 page = alloc_pages(GFP_KERNEL, order); in do_alloc_pages_order()
|
/linux-6.12.1/arch/riscv/kernel/ |
D | unaligned_access_speed.c | 194 buf = alloc_pages(GFP_KERNEL, MISALIGNED_BUFFER_ORDER); in riscv_online_cpu() 233 bufs[cpu] = alloc_pages(GFP_KERNEL, MISALIGNED_BUFFER_ORDER); in check_unaligned_access_speed_all_cpus()
|
/linux-6.12.1/rust/helpers/ |
D | page.c | 8 return alloc_pages(gfp_mask, order); in rust_helper_alloc_pages()
|
/linux-6.12.1/drivers/infiniband/hw/vmw_pvrdma/ |
D | pvrdma_misc.c | 53 u64 npages, bool alloc_pages) in pvrdma_page_dir_init() argument 83 if (alloc_pages) { in pvrdma_page_dir_init()
|
/linux-6.12.1/drivers/iommu/ |
D | iommu-pages.h | 60 page = alloc_pages(gfp | __GFP_ZERO, order); in __iommu_alloc_pages()
|
/linux-6.12.1/Documentation/translations/zh_CN/core-api/ |
D | memory-allocation.rst | 21 你可以用 alloc_pages 直接向页面分配器请求页面。也可以使用更专业的分配器,
|
/linux-6.12.1/mm/kmsan/ |
D | hooks.c | 164 shadow = alloc_pages(gfp_mask, 1); in kmsan_ioremap_page_range() 165 origin = alloc_pages(gfp_mask, 1); in kmsan_ioremap_page_range()
|
/linux-6.12.1/drivers/net/ethernet/qlogic/qed/ |
D | qed_chain.c | 273 goto alloc_pages; in qed_chain_alloc_pbl() 288 alloc_pages: in qed_chain_alloc_pbl()
|
/linux-6.12.1/rust/kernel/ |
D | page.rs | 67 let page = unsafe { bindings::alloc_pages(flags.as_raw(), 0) }; in alloc_page()
|
/linux-6.12.1/include/linux/ |
D | gfp.h | 333 #define alloc_pages(...) alloc_hooks(alloc_pages_noprof(__VA_ARGS__)) macro 339 #define alloc_page(gfp_mask) alloc_pages(gfp_mask, 0)
|
/linux-6.12.1/arch/powerpc/include/asm/book3s/64/ |
D | pgalloc.h | 31 page = alloc_pages(pgtable_gfp_flags(mm, PGALLOC_GFP | __GFP_RETRY_MAYFAIL), in radix__pgd_alloc()
|
/linux-6.12.1/drivers/gpu/drm/i915/gem/ |
D | i915_gem_internal.c | 75 page = alloc_pages(gfp | (order ? QUIET : MAYFAIL), in i915_gem_object_get_pages_internal()
|
/linux-6.12.1/fs/ramfs/ |
D | file-nommu.c | 84 pages = alloc_pages(gfp, order); in ramfs_nommu_expand_for_mapping()
|
/linux-6.12.1/drivers/media/pci/intel/ipu6/ |
D | ipu6-dma.c | 76 pages[i] = alloc_pages(gfp, order); in __dma_alloc_buffer() 78 pages[i] = alloc_pages(gfp, --order); in __dma_alloc_buffer()
|
/linux-6.12.1/arch/x86/kernel/ |
D | kvmclock.c | 215 p = alloc_pages(GFP_KERNEL, order); in kvmclock_init_mem()
|
/linux-6.12.1/arch/x86/platform/efi/ |
D | memmap.c | 25 struct page *p = alloc_pages(GFP_KERNEL, order); in __efi_memmap_alloc_late()
|
/linux-6.12.1/drivers/staging/media/ipu3/ |
D | ipu3-dmamap.c | 58 page = alloc_pages((order_mask - order_size) ? in imgu_dmamap_alloc_buffer()
|
/linux-6.12.1/mm/kasan/ |
D | kasan_test_c.c | 290 pages = alloc_pages(GFP_KERNEL, order); in page_alloc_oob_right() 304 pages = alloc_pages(GFP_KERNEL, order); in page_alloc_uaf() 1753 p_page = alloc_pages(GFP_KERNEL, 1); in vmap_tags() 1794 page = alloc_pages(GFP_KERNEL, 1); in vm_map_ram_tags() 1837 pages = alloc_pages(GFP_KERNEL, order); in match_all_not_assigned()
|
/linux-6.12.1/drivers/net/wireless/intel/iwlwifi/fw/ |
D | paging.c | 72 block = alloc_pages(GFP_KERNEL, order); in iwl_alloc_fw_paging_mem()
|
/linux-6.12.1/kernel/dma/ |
D | pool.c | 96 page = alloc_pages(gfp, order); in atomic_pool_expand()
|
/linux-6.12.1/io_uring/ |
D | memmap.c | 28 page = alloc_pages(gfp, order); in io_mem_alloc_compound()
|
/linux-6.12.1/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_gart.c | 131 p = alloc_pages(gfp_flags, order); in amdgpu_gart_table_ram_alloc()
|