Lines Matching refs:zone
26 static inline void show_node(struct zone *zone) in show_node() argument
29 printk("Node %d ", zone_to_nid(zone)); in show_node()
38 struct zone *zone; in si_mem_available() local
40 for_each_zone(zone) in si_mem_available()
41 wmark_low += low_wmark_pages(zone); in si_mem_available()
104 struct zone *zone = &pgdat->node_zones[zone_type]; in si_meminfo_node() local
106 if (is_highmem(zone)) { in si_meminfo_node()
107 managed_highpages += zone_managed_pages(zone); in si_meminfo_node()
108 free_highpages += zone_page_state(zone, NR_FREE_PAGES); in si_meminfo_node()
190 struct zone *zone; in show_free_areas() local
193 for_each_populated_zone(zone) { in show_free_areas()
194 if (zone_idx(zone) > max_zone_idx) in show_free_areas()
196 if (show_mem_node_skip(filter, zone_to_nid(zone), nodemask)) in show_free_areas()
200 free_pcp += per_cpu_ptr(zone->per_cpu_pageset, cpu)->count; in show_free_areas()
292 for_each_populated_zone(zone) { in show_free_areas()
295 if (zone_idx(zone) > max_zone_idx) in show_free_areas()
297 if (show_mem_node_skip(filter, zone_to_nid(zone), nodemask)) in show_free_areas()
302 free_pcp += per_cpu_ptr(zone->per_cpu_pageset, cpu)->count; in show_free_areas()
304 show_node(zone); in show_free_areas()
327 zone->name, in show_free_areas()
328 K(zone_page_state(zone, NR_FREE_PAGES)), in show_free_areas()
329 K(zone->watermark_boost), in show_free_areas()
330 K(min_wmark_pages(zone)), in show_free_areas()
331 K(low_wmark_pages(zone)), in show_free_areas()
332 K(high_wmark_pages(zone)), in show_free_areas()
333 K(zone->nr_reserved_highatomic), in show_free_areas()
334 K(zone_page_state(zone, NR_ZONE_ACTIVE_ANON)), in show_free_areas()
335 K(zone_page_state(zone, NR_ZONE_INACTIVE_ANON)), in show_free_areas()
336 K(zone_page_state(zone, NR_ZONE_ACTIVE_FILE)), in show_free_areas()
337 K(zone_page_state(zone, NR_ZONE_INACTIVE_FILE)), in show_free_areas()
338 K(zone_page_state(zone, NR_ZONE_UNEVICTABLE)), in show_free_areas()
339 K(zone_page_state(zone, NR_ZONE_WRITE_PENDING)), in show_free_areas()
340 K(zone->present_pages), in show_free_areas()
341 K(zone_managed_pages(zone)), in show_free_areas()
342 K(zone_page_state(zone, NR_MLOCK)), in show_free_areas()
343 K(zone_page_state(zone, NR_BOUNCE)), in show_free_areas()
345 K(this_cpu_read(zone->per_cpu_pageset->count)), in show_free_areas()
346 K(zone_page_state(zone, NR_FREE_CMA_PAGES))); in show_free_areas()
349 printk(KERN_CONT " %ld", zone->lowmem_reserve[i]); in show_free_areas()
353 for_each_populated_zone(zone) { in show_free_areas()
358 if (zone_idx(zone) > max_zone_idx) in show_free_areas()
360 if (show_mem_node_skip(filter, zone_to_nid(zone), nodemask)) in show_free_areas()
362 show_node(zone); in show_free_areas()
363 printk(KERN_CONT "%s: ", zone->name); in show_free_areas()
365 spin_lock_irqsave(&zone->lock, flags); in show_free_areas()
367 struct free_area *area = &zone->free_area[order]; in show_free_areas()
379 spin_unlock_irqrestore(&zone->lock, flags); in show_free_areas()
403 struct zone *zone; in __show_mem() local
408 for_each_populated_zone(zone) { in __show_mem()
410 total += zone->present_pages; in __show_mem()
411 reserved += zone->present_pages - zone_managed_pages(zone); in __show_mem()
413 if (is_highmem(zone)) in __show_mem()
414 highmem += zone->present_pages; in __show_mem()