Home
last modified time | relevance | path

Searched refs:min_order (Results 1 – 9 of 9) sorted by relevance

/linux-6.12.1/mm/
Dfail_page_alloc.c12 u32 min_order; member
17 .min_order = 1,
30 if (order < fail_page_alloc.min_order) in should_fail_alloc_page()
62 debugfs_create_u32("min-order", mode, dir, &fail_page_alloc.min_order); in fail_page_alloc_debugfs()
Dreadahead.c458 unsigned int min_order = mapping_min_folio_order(mapping); in page_cache_ra_order() local
480 new_order = max(new_order, min_order); in page_cache_ra_order()
500 while (order > min_order && index + (1UL << order) - 1 > limit) in page_cache_ra_order()
739 unsigned int min_order = mapping_min_folio_order(mapping); in readahead_expand() local
756 folio = filemap_alloc_folio(gfp_mask, min_order); in readahead_expand()
785 folio = filemap_alloc_folio(gfp_mask, min_order); in readahead_expand()
Dfilemap.c1924 unsigned int min_order = mapping_min_folio_order(mapping); in __filemap_get_folio() local
1925 unsigned int order = max(min_order, FGF_GET_ORDER(fgp_flags)); in __filemap_get_folio()
1950 if (order > min_order) in __filemap_get_folio()
1965 } while (order-- > min_order); in __filemap_get_folio()
2469 unsigned int min_order = mapping_min_folio_order(mapping); in filemap_create_folio() local
2472 folio = filemap_alloc_folio(mapping_gfp_mask(mapping), min_order); in filemap_create_folio()
2490 index = (pos >> (PAGE_SHIFT + min_order)) << min_order; in filemap_create_folio()
Dhuge_memory.c3427 unsigned int min_order; in split_huge_page_to_list_to_order() local
3438 min_order = mapping_min_folio_order(folio->mapping); in split_huge_page_to_list_to_order()
3439 if (new_order < min_order) { in split_huge_page_to_list_to_order()
3441 min_order); in split_huge_page_to_list_to_order()
4003 unsigned int min_order; in split_huge_pages_in_file() local
4018 min_order = mapping_min_folio_order(mapping); in split_huge_pages_in_file()
4019 target_order = max(new_order, min_order); in split_huge_pages_in_file()
Dslub.c5034 unsigned int min_order, unsigned int max_order, in calc_slab_order() argument
5039 for (order = min_order; order <= max_order; order++) { in calc_slab_order()
5058 unsigned int min_order; in calculate_order() local
5080 min_order = max_t(unsigned int, slub_min_order, in calculate_order()
5082 if (order_objects(min_order, size) > MAX_OBJS_PER_PAGE) in calculate_order()
5101 order = calc_slab_order(size, min_order, slub_max_order, in calculate_order()
Dpage_alloc.c2187 int min_order = order; in __rmqueue_fallback() local
2198 min_order = pageblock_order; in __rmqueue_fallback()
2205 for (current_order = MAX_PAGE_ORDER; current_order >= min_order; in __rmqueue_fallback()
/linux-6.12.1/drivers/gpu/drm/
Ddrm_buddy.c167 unsigned int min_order) in __force_merge() argument
172 if (!min_order) in __force_merge()
175 if (min_order > mm->max_order) in __force_merge()
178 for (i = min_order - 1; i >= 0; i--) { in __force_merge()
214 if (order >= min_order) in __force_merge()
985 unsigned int min_order, order; in drm_buddy_alloc_blocks() local
1030 min_order = ilog2(min_block_size) - ilog2(mm->chunk_size); in drm_buddy_alloc_blocks()
1035 BUG_ON(order < min_order); in drm_buddy_alloc_blocks()
1045 if (order-- == min_order) { in drm_buddy_alloc_blocks()
1048 !__force_merge(mm, start, end, min_order)) { in drm_buddy_alloc_blocks()
[all …]
/linux-6.12.1/fs/ext4/
Dmballoc.c998 int i, order, min_order; in ext4_mb_choose_next_group_best_avail() local
1015 min_order = order - sbi->s_mb_best_avail_max_trim_order; in ext4_mb_choose_next_group_best_avail()
1016 if (min_order < 0) in ext4_mb_choose_next_group_best_avail()
1017 min_order = 0; in ext4_mb_choose_next_group_best_avail()
1025 if (1 << min_order < num_stripe_clusters) in ext4_mb_choose_next_group_best_avail()
1030 min_order = fls(num_stripe_clusters) - 1; in ext4_mb_choose_next_group_best_avail()
1033 if (1 << min_order < ac->ac_o_ex.fe_len) in ext4_mb_choose_next_group_best_avail()
1034 min_order = fls(ac->ac_o_ex.fe_len); in ext4_mb_choose_next_group_best_avail()
1036 for (i = order; i >= min_order; i--) { in ext4_mb_choose_next_group_best_avail()
/linux-6.12.1/drivers/md/bcache/
Dbtree.c608 static int mca_reap(struct btree *b, unsigned int min_order, bool flush) in mca_reap() argument
620 if (b->keys.page_order < min_order) in mca_reap()