Home
last modified time | relevance | path

Searched refs:lru_list (Results 1 – 25 of 27) sorted by relevance

12

/linux-6.12.1/fs/btrfs/
Dlru_cache.c17 INIT_LIST_HEAD(&cache->lru_list); in btrfs_lru_cache_init()
57 list_move_tail(&entry->lru_list, &cache->lru_list); in btrfs_lru_cache_lookup()
79 list_del(&entry->lru_list); in btrfs_lru_cache_remove()
137 lru_entry = list_first_entry(&cache->lru_list, in btrfs_lru_cache_store()
139 lru_list); in btrfs_lru_cache_store()
143 list_add_tail(&new_entry->lru_list, &cache->lru_list); in btrfs_lru_cache_store()
161 list_for_each_entry_safe(entry, tmp, &cache->lru_list, lru_list) in btrfs_lru_cache_clear()
Dlru_cache.h19 struct list_head lru_list; member
43 struct list_head lru_list; member
52 list_for_each_entry_safe_reverse((entry), (tmp), &(cache)->lru_list, lru_list)
57 return list_first_entry_or_null(&cache->lru_list, in btrfs_lru_cache_lru_entry()
58 struct btrfs_lru_cache_entry, lru_list); in btrfs_lru_cache_lru_entry()
Dzstd.c52 struct list_head lru_list; member
80 struct list_head lru_list; member
116 if (list_empty(&wsm.lru_list)) { in zstd_reclaim_timer_fn()
121 list_for_each_prev_safe(pos, next, &wsm.lru_list) { in zstd_reclaim_timer_fn()
123 lru_list); in zstd_reclaim_timer_fn()
134 list_del(&victim->lru_list); in zstd_reclaim_timer_fn()
143 if (!list_empty(&wsm.lru_list)) in zstd_reclaim_timer_fn()
188 INIT_LIST_HEAD(&wsm.lru_list); in zstd_init_workspace_manager()
213 list_del(&workspace->lru_list); in zstd_cleanup_workspace_manager()
249 list_del(&workspace->lru_list); in zstd_find_workspace()
[all …]
/linux-6.12.1/drivers/gpu/drm/i915/gvt/
Dsched_policy.c54 struct list_head lru_list; member
108 vgpu_data = container_of(pos, struct vgpu_sched_data, lru_list); in gvt_balance_timeslice()
113 vgpu_data = container_of(pos, struct vgpu_sched_data, lru_list); in gvt_balance_timeslice()
122 vgpu_data = container_of(pos, struct vgpu_sched_data, lru_list); in gvt_balance_timeslice()
186 vgpu_data = container_of(pos, struct vgpu_sched_data, lru_list); in find_busy_vgpu()
228 list_del_init(&vgpu_data->lru_list); in tbs_sched_func()
229 list_add_tail(&vgpu_data->lru_list, in tbs_sched_func()
321 INIT_LIST_HEAD(&data->lru_list); in tbs_sched_init_vgpu()
347 if (!list_empty(&vgpu_data->lru_list)) in tbs_sched_start_schedule()
355 list_add(&vgpu_data->lru_list, &sched_data->lru_runq_head); in tbs_sched_start_schedule()
[all …]
Dgtt.h169 struct list_head lru_list; member
Dgtt.c1879 INIT_LIST_HEAD(&mm->ppgtt_mm.lru_list); in intel_vgpu_create_ppgtt_mm()
1898 list_add_tail(&mm->ppgtt_mm.lru_list, &gvt->gtt.ppgtt_mm_lru_list_head); in intel_vgpu_create_ppgtt_mm()
1960 list_del(&mm->ppgtt_mm.lru_list); in _intel_vgpu_mm_release()
2007 list_move_tail(&mm->ppgtt_mm.lru_list, in intel_vgpu_pin_mm()
2023 mm = container_of(pos, struct intel_vgpu_mm, ppgtt_mm.lru_list); in reclaim_one_ppgtt_mm()
2028 list_del_init(&mm->ppgtt_mm.lru_list); in reclaim_one_ppgtt_mm()
2769 list_del_init(&mm->ppgtt_mm.lru_list); in intel_vgpu_invalidate_ppgtt()
/linux-6.12.1/include/linux/
Dmm_inline.h39 enum lru_list lru, enum zone_type zid, in __update_lru_size()
53 enum lru_list lru, enum zone_type zid, in update_lru_size()
87 static __always_inline enum lru_list folio_lru_list(struct folio *folio) in folio_lru_list()
89 enum lru_list lru; in folio_lru_list()
181 enum lru_list lru = type * LRU_INACTIVE_FILE; in lru_gen_update_size()
321 enum lru_list lru = folio_lru_list(folio); in lruvec_add_folio()
335 enum lru_list lru = folio_lru_list(folio); in lruvec_add_folio_tail()
349 enum lru_list lru = folio_lru_list(folio); in lruvec_del_folio()
Dmmzone.h276 enum lru_list { enum
297 static inline bool is_file_lru(enum lru_list lru) in is_file_lru()
302 static inline bool is_active_lru(enum lru_list lru) in is_active_lru()
Dmemcontrol.h934 void mem_cgroup_update_lru_size(struct lruvec *lruvec, enum lru_list lru,
939 enum lru_list lru, int zone_idx) in mem_cgroup_get_zone_lru_size()
1387 enum lru_list lru, int zone_idx) in mem_cgroup_get_zone_lru_size()
Dvmstat.h516 static inline const char *lru_list_name(enum lru_list lru) in lru_list_name()
/linux-6.12.1/fs/xfs/
Dxfs_mru_cache.c132 struct list_head *lru_list; in _xfs_mru_cache_migrate() local
145 lru_list = mru->lists + mru->lru_grp; in _xfs_mru_cache_migrate()
146 if (!list_empty(lru_list)) in _xfs_mru_cache_migrate()
147 list_splice_init(lru_list, mru->reap_list.prev); in _xfs_mru_cache_migrate()
171 lru_list = mru->lists + ((mru->lru_grp + grp) % mru->grp_count); in _xfs_mru_cache_migrate()
172 if (!list_empty(lru_list)) in _xfs_mru_cache_migrate()
/linux-6.12.1/net/netfilter/
Dxt_recent.c68 struct list_head lru_list; member
84 struct list_head lru_list; member
147 list_del(&e->lru_list); in recent_entry_remove()
163 e = list_entry(t->lru_list.next, struct recent_entry, lru_list); in recent_entry_reap()
186 e = list_entry(t->lru_list.next, struct recent_entry, lru_list); in recent_entry_init()
204 list_add_tail(&e->lru_list, &t->lru_list); in recent_entry_init()
215 list_move_tail(&e->lru_list, &t->lru_list); in recent_entry_update()
404 INIT_LIST_HEAD(&t->lru_list); in recent_mt_check()
/linux-6.12.1/drivers/infiniband/hw/hfi1/
Dmmu_rb.c64 INIT_LIST_HEAD(&h->lru_list); in hfi1_mmu_rb_register()
139 list_add_tail(&mnode->list, &handler->lru_list); in hfi1_mmu_rb_insert()
155 list_move_tail(&node->list, &handler->lru_list); in hfi1_mmu_rb_get_first()
239 list_for_each_entry_safe(rbnode, ptr, &handler->lru_list, list) { in hfi1_mmu_rb_evict()
Dmmu_rb.h46 struct list_head lru_list; member
/linux-6.12.1/drivers/gpu/drm/vmwgfx/
Dvmwgfx_resource.c687 struct list_head *lru_list = &dev_priv->res_lru[res->func->res_type]; in vmw_resource_validate() local
704 if (list_empty(lru_list) || !res->func->may_evict) { in vmw_resource_validate()
713 (list_first_entry(lru_list, struct vmw_resource, in vmw_resource_validate()
723 list_add_tail(&evict_res->lru_head, lru_list); in vmw_resource_validate()
897 struct list_head *lru_list = &dev_priv->res_lru[type]; in vmw_resource_evict_type() local
906 if (list_empty(lru_list)) in vmw_resource_evict_type()
910 list_first_entry(lru_list, struct vmw_resource, in vmw_resource_evict_type()
919 list_add_tail(&evict_res->lru_head, lru_list); in vmw_resource_evict_type()
/linux-6.12.1/kernel/bpf/
Dbpf_lru_list.c326 struct bpf_lru_list *l = &lru->common_lru.lru_list; in bpf_lru_list_pop_free_to_local()
538 bpf_lru_list_push_free(&lru->common_lru.lru_list, node); in bpf_common_lru_push_free()
568 struct bpf_lru_list *l = &lru->common_lru.lru_list; in bpf_common_lru_populate()
682 bpf_lru_list_init(&clru->lru_list); in bpf_lru_init()
Dbpf_lru_list.h47 struct bpf_lru_list lru_list; member
/linux-6.12.1/include/trace/events/
Dpagemap.h37 __field(enum lru_list, lru )
/linux-6.12.1/mm/
Dmmzone.c77 enum lru_list lru; in lruvec_init()
Dvmscan.c387 static unsigned long lruvec_lru_size(struct lruvec *lruvec, enum lru_list lru, in lruvec_lru_size()
1615 enum lru_list lru, unsigned long *nr_zone_taken) in update_lru_sizes()
1652 enum lru_list lru) in isolate_lru_folios()
1916 enum lru_list lru) in shrink_inactive_list()
2037 enum lru_list lru) in shrink_active_list()
2186 static unsigned long shrink_list(enum lru_list lru, unsigned long nr_to_scan, in shrink_list()
2228 static bool inactive_is_low(struct lruvec *lruvec, enum lru_list inactive_lru) in inactive_is_low()
2230 enum lru_list active_lru = inactive_lru + LRU_ACTIVE; in inactive_is_low()
2380 enum lru_list lru; in get_scan_count()
3213 enum lru_list lru = type * LRU_INACTIVE_FILE; in reset_batch_size()
[all …]
Dmemcontrol-v1.c2638 enum lru_list lru; in mem_cgroup_node_nr_lru_pages()
2658 enum lru_list lru; in mem_cgroup_nr_lru_pages()
/linux-6.12.1/drivers/md/dm-vdo/
Dblock-map.h68 struct list_head lru_list; member
Dblock-map.c340 if (info->cache->lru_list.prev != &info->lru_entry) in update_lru()
341 list_move_tail(&info->lru_entry, &info->cache->lru_list); in update_lru()
466 list_for_each_entry(info, &cache->lru_list, lru_entry) in select_lru_page()
2769 INIT_LIST_HEAD(&zone->page_cache.lru_list); in initialize_block_map_zone()
/linux-6.12.1/drivers/gpu/drm/panthor/
Dpanthor_mmu.c85 struct list_head lru_list; member
747 lru_vm = list_first_entry_or_null(&ptdev->mmu->as.lru_list, in panthor_vm_active()
823 list_add_tail(&vm->as.lru_node, &ptdev->mmu->as.lru_list); in panthor_vm_idle()
2703 INIT_LIST_HEAD(&mmu->as.lru_list); in panthor_mmu_init()
/linux-6.12.1/fs/gfs2/
Dglock.c68 static LIST_HEAD(lru_list);
222 list_move_tail(&gl->gl_lru, &lru_list); in gfs2_glock_add_to_lru()
2021 list_move(&gl->gl_lru, &lru_list); in gfs2_dispose_glock_lru()
2058 list_for_each_entry_safe(gl, next, &lru_list, gl_lru) { in gfs2_scan_glock_lru()

12