/linux-6.12.1/arch/loongarch/include/asm/ |
D | mmu_context.h | 52 get_new_mmu_context(struct mm_struct *mm, unsigned long cpu, bool *need_flush) in get_new_mmu_context() argument 57 *need_flush = true; /* start new asid cycle */ in get_new_mmu_context() 91 bool need_flush = false; in switch_mm_irqs_off() local 96 get_new_mmu_context(next, cpu, &need_flush); in switch_mm_irqs_off() 103 if (need_flush) in switch_mm_irqs_off() 151 bool need_flush = false; in drop_mmu_context() local 154 get_new_mmu_context(mm, cpu, &need_flush); in drop_mmu_context() 157 if (need_flush) in drop_mmu_context()
|
/linux-6.12.1/arch/x86/mm/ |
D | tlb.c | 217 u16 *new_asid, bool *need_flush) in choose_new_asid() argument 223 *need_flush = true; in choose_new_asid() 236 *need_flush = (this_cpu_read(cpu_tlbstate.ctxs[asid].tlb_gen) < in choose_new_asid() 250 *need_flush = true; in choose_new_asid() 280 bool need_flush) in load_new_mm_cr3() argument 284 if (need_flush) { in load_new_mm_cr3() 507 bool need_flush; in switch_mm_irqs_off() local 600 need_flush = true; in switch_mm_irqs_off() 624 choose_new_asid(next, next_tlb_gen, &new_asid, &need_flush); in switch_mm_irqs_off() 632 if (need_flush) { in switch_mm_irqs_off()
|
/linux-6.12.1/arch/arm/mm/ |
D | pmsa-v7.c | 368 unsigned int subregions, bool need_flush) in mpu_setup_region() argument 387 if (need_flush) in mpu_setup_region() 441 bool need_flush = region == PMSAv7_RAM_REGION; in pmsav7_setup() local 448 xip[i].subreg, need_flush); in pmsav7_setup()
|
/linux-6.12.1/arch/x86/kernel/ |
D | amd_gart_64.c | 87 static bool need_flush; /* global flush state. set for each gart wrap */ variable 104 need_flush = true; in alloc_iommu() 113 need_flush = true; in alloc_iommu() 117 need_flush = true; in alloc_iommu() 142 if (need_flush) { in flush_gart() 144 need_flush = false; in flush_gart()
|
/linux-6.12.1/drivers/gpu/drm/etnaviv/ |
D | etnaviv_buffer.c | 351 bool need_flush = switch_mmu_context || gpu->flush_seq != new_flush_seq; in etnaviv_buffer_queue() local 369 if (need_flush || switch_context) { in etnaviv_buffer_queue() 376 if (need_flush) { in etnaviv_buffer_queue() 405 if (need_flush) { in etnaviv_buffer_queue()
|
/linux-6.12.1/arch/sparc/kernel/ |
D | iommu-common.c | 19 static inline bool need_flush(struct iommu_map_table *iommu) in need_flush() function 206 (n < pool->hint || need_flush(iommu))) { in iommu_tbl_range_alloc()
|
/linux-6.12.1/mm/ |
D | highmem.c | 199 int need_flush = 0; in flush_all_zero_pkmaps() local 232 need_flush = 1; in flush_all_zero_pkmaps() 234 if (need_flush) in flush_all_zero_pkmaps()
|
/linux-6.12.1/drivers/infiniband/hw/mlx4/ |
D | cm.c | 529 int need_flush = 0; in mlx4_ib_cm_paravirt_clean() local 537 need_flush |= !cancel_delayed_work(&map->timeout); in mlx4_ib_cm_paravirt_clean() 543 if (need_flush) in mlx4_ib_cm_paravirt_clean()
|
/linux-6.12.1/arch/arm64/kvm/hyp/ |
D | pgtable.c | 1115 bool need_flush = false; in stage2_unmap_walker() local 1131 need_flush = !stage2_has_fwb(pgt); in stage2_unmap_walker() 1141 if (need_flush && mm_ops->dcache_clean_inval_poc) in stage2_unmap_walker()
|
/linux-6.12.1/fs/ceph/ |
D | snap.c | 583 capsnap->need_flush = true; in ceph_queue_cap_snap() 595 ceph_cap_string(dirty), capsnap->need_flush ? "" : "no_flush"); in ceph_queue_cap_snap()
|
D | super.h | 248 bool need_flush; member
|
D | caps.c | 1489 if (capsnap->need_flush) { in __prep_cap() 1654 BUG_ON(!capsnap->need_flush); in __ceph_flush_snaps() 3217 if (!capsnap->need_flush && in ceph_try_drop_cap_snap()
|
/linux-6.12.1/drivers/md/ |
D | dm-writecache.c | 1000 bool need_flush = false; in writecache_resume() local 1071 need_flush = true; in writecache_resume() 1092 need_flush = true; in writecache_resume() 1099 if (need_flush) { in writecache_resume()
|
D | raid10.c | 4651 int need_flush = 0; in reshape_request() local 4692 need_flush = 1; in reshape_request() 4714 need_flush = 1; in reshape_request() 4724 if (need_flush || in reshape_request()
|
/linux-6.12.1/drivers/gpu/drm/i915/gem/selftests/ |
D | i915_gem_context.c | 480 unsigned int need_flush; in cpu_fill() local 485 err = i915_gem_object_prepare_write(obj, &need_flush); in cpu_fill()
|
/linux-6.12.1/drivers/net/vmxnet3/ |
D | vmxnet3_drv.c | 1581 bool need_flush = false; in vmxnet3_rq_rx_complete() local 1642 need_flush |= act == XDP_REDIRECT; in vmxnet3_rq_rx_complete() 1699 need_flush |= act == XDP_REDIRECT; in vmxnet3_rq_rx_complete() 1972 if (need_flush) in vmxnet3_rq_rx_complete()
|