/linux-6.12.1/lib/ |
D | maple_tree.c | 200 static void mas_set_height(struct ma_state *mas) in mas_set_height() argument 202 unsigned int new_flags = mas->tree->ma_flags; in mas_set_height() 205 MAS_BUG_ON(mas, mas->depth > MAPLE_HEIGHT_MAX); in mas_set_height() 206 new_flags |= mas->depth << MT_FLAGS_HEIGHT_OFFSET; in mas_set_height() 207 mas->tree->ma_flags = new_flags; in mas_set_height() 210 static unsigned int mas_mt_height(struct ma_state *mas) in mas_mt_height() argument 212 return mt_height(mas->tree); in mas_mt_height() 252 static __always_inline void mas_set_err(struct ma_state *mas, long err) in mas_set_err() argument 254 mas->node = MA_ERROR(err); in mas_set_err() 255 mas->status = ma_error; in mas_set_err() [all …]
|
D | test_maple_tree.c | 22 #define mas_dump(mas) do {} while (0) argument 23 #define mas_wr_dump(mas) do {} while (0) argument 352 MA_STATE(mas, mt, 0, 0); in check_rev_find() 359 mas_set(&mas, 1000); in check_rev_find() 360 val = mas_find_rev(&mas, 1000); in check_rev_find() 362 val = mas_find_rev(&mas, 1000); in check_rev_find() 365 mas_set(&mas, 999); in check_rev_find() 366 val = mas_find_rev(&mas, 997); in check_rev_find() 369 mas_set(&mas, 1000); in check_rev_find() 370 val = mas_find_rev(&mas, 900); in check_rev_find() [all …]
|
/linux-6.12.1/drivers/spi/ |
D | spi-geni-qcom.c | 106 static void spi_slv_setup(struct spi_geni_master *mas) in spi_slv_setup() argument 108 struct geni_se *se = &mas->se; in spi_slv_setup() 113 dev_dbg(mas->dev, "spi slave setup done\n"); in spi_slv_setup() 117 struct spi_geni_master *mas, in get_spi_clk_cfg() argument 125 ret = geni_se_clk_freq_match(&mas->se, in get_spi_clk_cfg() 126 speed_hz * mas->oversampling, in get_spi_clk_cfg() 129 dev_err(mas->dev, "Failed(%d) to find src clk for %dHz\n", in get_spi_clk_cfg() 134 *clk_div = DIV_ROUND_UP(sclk_freq, mas->oversampling * speed_hz); in get_spi_clk_cfg() 135 actual_hz = sclk_freq / (mas->oversampling * *clk_div); in get_spi_clk_cfg() 137 dev_dbg(mas->dev, "req %u=>%u sclk %lu, idx %d, div %d\n", speed_hz, in get_spi_clk_cfg() [all …]
|
/linux-6.12.1/drivers/base/regmap/ |
D | regcache-maple.c | 20 MA_STATE(mas, mt, reg, reg); in regcache_maple_read() 25 entry = mas_walk(&mas); in regcache_maple_read() 31 *value = entry[reg - mas.index]; in regcache_maple_read() 42 MA_STATE(mas, mt, reg, reg); in regcache_maple_write() 50 entry = mas_walk(&mas); in regcache_maple_write() 52 entry[reg - mas.index] = val; in regcache_maple_write() 58 mas_set_range(&mas, reg - 1, reg + 1); in regcache_maple_write() 62 lower = mas_find(&mas, reg - 1); in regcache_maple_write() 64 index = mas.index; in regcache_maple_write() 65 lower_sz = (mas.last - mas.index + 1) * sizeof(unsigned long); in regcache_maple_write() [all …]
|
/linux-6.12.1/include/linux/ |
D | maple_tree.h | 272 #define mtree_lock_nested(mas, subclass) \ argument 455 struct ma_state *mas; member 468 #define mas_lock(mas) spin_lock(&((mas)->tree->ma_lock)) argument 469 #define mas_lock_nested(mas, subclass) \ argument 470 spin_lock_nested(&((mas)->tree->ma_lock), subclass) 471 #define mas_unlock(mas) spin_unlock(&((mas)->tree->ma_lock)) argument 498 .mas = ma_state, \ 510 void *mas_walk(struct ma_state *mas); 511 void *mas_store(struct ma_state *mas, void *entry); 512 void *mas_erase(struct ma_state *mas); [all …]
|
D | mm.h | 998 return mas_find(&vmi->mas, max - 1); in vma_find() 1007 return mas_find(&vmi->mas, ULONG_MAX); in vma_next() 1013 return mas_next_range(&vmi->mas, ULONG_MAX); in vma_iter_next_range() 1019 return mas_prev(&vmi->mas, 0); in vma_prev() 1025 __mas_set_range(&vmi->mas, start, end - 1); in vma_iter_clear_gfp() 1026 mas_store_gfp(&vmi->mas, NULL, gfp); in vma_iter_clear_gfp() 1027 if (unlikely(mas_is_err(&vmi->mas))) in vma_iter_clear_gfp() 1036 mas_destroy(&vmi->mas); in vma_iter_free() 1042 vmi->mas.index = vma->vm_start; in vma_iter_bulk_store() 1043 vmi->mas.last = vma->vm_end - 1; in vma_iter_bulk_store() [all …]
|
D | mm_types.h | 1122 struct ma_state mas; member 1127 .mas = { \ 1138 mas_init(&vmi->mas, &mm->mm_mt, addr); in vma_iter_init()
|
/linux-6.12.1/include/trace/events/ |
D | maple_tree.h | 15 TP_PROTO(const char *fn, struct ma_state *mas), 17 TP_ARGS(fn, mas), 30 __entry->min = mas->min; 31 __entry->max = mas->max; 32 __entry->index = mas->index; 33 __entry->last = mas->last; 34 __entry->node = mas->node; 48 TP_PROTO(const char *fn, struct ma_state *mas), 50 TP_ARGS(fn, mas), 63 __entry->min = mas->min; [all …]
|
/linux-6.12.1/mm/ |
D | vma.h | 156 if (vmi->mas.status != ma_start && in vma_iter_store_gfp() 157 ((vmi->mas.index > vma->vm_start) || (vmi->mas.last < vma->vm_start))) in vma_iter_store_gfp() 160 __mas_set_range(&vmi->mas, vma->vm_start, vma->vm_end - 1); in vma_iter_store_gfp() 161 mas_store_gfp(&vmi->mas, vma, gfp); in vma_iter_store_gfp() 162 if (unlikely(mas_is_err(&vmi->mas))) in vma_iter_store_gfp() 243 struct ma_state *mas = &vms->vmi->mas; in vms_abort_munmap_vmas() local 255 mas_set_range(mas, vms->start, vms->end - 1); in vms_abort_munmap_vmas() 256 mas_store_gfp(mas, NULL, GFP_KERNEL|__GFP_NOFAIL); in vms_abort_munmap_vmas() 272 void unmap_region(struct ma_state *mas, struct vm_area_struct *vma, 362 return mas_prev(&vmi->mas, min); in vma_prev_limit() [all …]
|
D | debug.c | 297 mas_dump(&vmi->mas); in vma_iter_dump_tree() 298 mt_dump(vmi->mas.tree, mt_dump_hex); in vma_iter_dump_tree()
|
D | vma.c | 344 void unmap_region(struct ma_state *mas, struct vm_area_struct *vma, in unmap_region() argument 353 unmap_vmas(&tlb, mas, vma, vma->vm_start, vma->vm_end, vma->vm_end, in unmap_region() 355 mas_set(mas, vma->vm_end); in unmap_region() 356 free_pgtables(&tlb, mas, vma, prev ? prev->vm_end : FIRST_USER_ADDRESS, in unmap_region() 1338 mt_init_flags(&mt_detach, vmi->mas.tree->ma_flags & MT_FLAGS_LOCK_MASK); in do_vmi_align_munmap()
|
D | mmap.c | 1382 mt_init_flags(&mt_detach, vmi.mas.tree->ma_flags & MT_FLAGS_LOCK_MASK); in __mmap_region() 1503 mas_preallocate(&vmi.mas, vma, in __mmap_region() 1575 unmap_region(&vmi.mas, vma, vmg.prev, vmg.next); in __mmap_region() 1934 unmap_vmas(&tlb, &vmi.mas, vma, 0, ULONG_MAX, ULONG_MAX, false); in exit_mmap() 1945 free_pgtables(&tlb, &vmi.mas, vma, FIRST_USER_ADDRESS, in exit_mmap()
|
D | memory.c | 364 void free_pgtables(struct mmu_gather *tlb, struct ma_state *mas, in free_pgtables() argument 378 next = mas_find(mas, ceiling - 1); in free_pgtables() 404 next = mas_find(mas, ceiling - 1); in free_pgtables() 1881 void unmap_vmas(struct mmu_gather *tlb, struct ma_state *mas, in unmap_vmas() argument 1903 vma = mas_find(mas, tree_end - 1); in unmap_vmas() 6234 MA_STATE(mas, &mm->mm_mt, address, address); in lock_vma_under_rcu() 6239 vma = mas_walk(&mas); in lock_vma_under_rcu()
|
D | internal.h | 384 void free_pgtables(struct mmu_gather *tlb, struct ma_state *mas,
|
/linux-6.12.1/tools/testing/vma/ |
D | vma_internal.h | 145 struct ma_state mas; member 150 .mas = { \ 368 mas_pause(&vmi->mas); in vma_iter_invalidate() 398 return mas_find(&vmi->mas, ULONG_MAX); in vma_next() 523 return mas_find(&vmi->mas, max - 1); in vma_find() 529 __mas_set_range(&vmi->mas, start, end - 1); in vma_iter_clear_gfp() 530 mas_store_gfp(&vmi->mas, NULL, gfp); in vma_iter_clear_gfp() 531 if (unlikely(mas_is_err(&vmi->mas))) in vma_iter_clear_gfp() 556 return mas_prev(&vmi->mas, 0); in vma_prev() 561 mas_set(&vmi->mas, addr); in vma_iter_set() [all …]
|
/linux-6.12.1/tools/testing/radix-tree/ |
D | maple.c | 66 static int get_alloc_node_count(struct ma_state *mas) in get_alloc_node_count() argument 69 struct maple_alloc *node = mas->alloc; in get_alloc_node_count() 80 static void check_mas_alloc_node_count(struct ma_state *mas) in check_mas_alloc_node_count() argument 82 mas_node_count_gfp(mas, MAPLE_ALLOC_SLOTS + 1, GFP_KERNEL); in check_mas_alloc_node_count() 83 mas_node_count_gfp(mas, MAPLE_ALLOC_SLOTS + 3, GFP_KERNEL); in check_mas_alloc_node_count() 84 MT_BUG_ON(mas->tree, get_alloc_node_count(mas) != mas->alloc->total); in check_mas_alloc_node_count() 85 mas_destroy(mas); in check_mas_alloc_node_count() 100 MA_STATE(mas, mt, 0, 0); in check_new_node() 102 check_mas_alloc_node_count(&mas); in check_new_node() 108 mas_node_count(&mas, 3); in check_new_node() [all …]
|
/linux-6.12.1/mm/damon/tests/ |
D | vaddr-kunit.h | 21 MA_STATE(mas, mt, 0, 0); in __link_vmas() 26 mas_lock(&mas); in __link_vmas() 28 mas_set_range(&mas, vmas[i].vm_start, vmas[i].vm_end - 1); in __link_vmas() 29 if (mas_store_gfp(&mas, &vmas[i], GFP_KERNEL)) in __link_vmas() 35 mas_unlock(&mas); in __link_vmas()
|
/linux-6.12.1/kernel/irq/ |
D | irqdesc.c | 153 MA_STATE(mas, &sparse_irqs, 0, 0); in irq_find_free_area() 155 if (mas_empty_area(&mas, from, MAX_SPARSE_IRQS, cnt)) in irq_find_free_area() 157 return mas.index; in irq_find_free_area() 173 MA_STATE(mas, &sparse_irqs, irq, irq); in irq_insert_desc() 174 WARN_ON(mas_store_gfp(&mas, desc, GFP_KERNEL) != 0); in irq_insert_desc() 179 MA_STATE(mas, &sparse_irqs, irq, irq); in delete_irq_desc() 180 mas_erase(&mas); in delete_irq_desc()
|
/linux-6.12.1/drivers/gpu/drm/nouveau/ |
D | nouveau_uvmm.c | 282 MA_STATE(mas, &uvmm->region_mt, addr, addr); in __nouveau_uvma_region_insert() 284 if (unlikely(mas_walk(&mas))) in __nouveau_uvma_region_insert() 287 if (unlikely(mas.last < last)) in __nouveau_uvma_region_insert() 290 mas.index = addr; in __nouveau_uvma_region_insert() 291 mas.last = last; in __nouveau_uvma_region_insert() 293 mas_store_gfp(&mas, reg, GFP_KERNEL); in __nouveau_uvma_region_insert() 322 MA_STATE(mas, &uvmm->region_mt, reg->va.addr, 0); in nouveau_uvma_region_remove() 324 mas_erase(&mas); in nouveau_uvma_region_remove() 362 MA_STATE(mas, &uvmm->region_mt, addr, 0); in nouveau_uvma_region_find_first() 364 return mas_find(&mas, addr + range - 1); in nouveau_uvma_region_find_first() [all …]
|
D | nouveau_debugfs.c | 209 MA_STATE(mas, &uvmm->region_mt, 0, 0); in nouveau_debugfs_gpuva_regions() 214 mas_for_each(&mas, reg, ULONG_MAX) in nouveau_debugfs_gpuva_regions()
|
/linux-6.12.1/Documentation/core-api/ |
D | maple_tree.rst | 145 The advanced API is based around the ma_state, this is where the 'mas' 152 The maple state keeps track of the range start and end in mas->index and 153 mas->last, respectively. 155 mas_walk() will walk the tree to the location of mas->index and set the 156 mas->index and mas->last according to the range for the entry.
|
/linux-6.12.1/drivers/media/i2c/ |
D | max9286.c | 749 struct max9286_asd *mas; in max9286_v4l2_notifier_register() local 751 mas = v4l2_async_nf_add_fwnode(&priv->notifier, source->fwnode, in max9286_v4l2_notifier_register() 753 if (IS_ERR(mas)) { in max9286_v4l2_notifier_register() 755 i, PTR_ERR(mas)); in max9286_v4l2_notifier_register() 757 return PTR_ERR(mas); in max9286_v4l2_notifier_register() 760 mas->source = source; in max9286_v4l2_notifier_register()
|
/linux-6.12.1/drivers/media/pci/saa7164/ |
D | saa7164-api.c | 615 u8 mas; in saa7164_api_set_dif() local 621 mas = 0xd0; in saa7164_api_set_dif() 623 mas = 0xe0; in saa7164_api_set_dif() 639 buf[0x0a] = mas; in saa7164_api_set_dif()
|
/linux-6.12.1/kernel/ |
D | fork.c | 662 mt_clear_in_rcu(vmi.mas.tree); in dup_mmap() 758 mt_set_in_rcu(vmi.mas.tree); in dup_mmap() 769 mas_set_range(&vmi.mas, mpnt->vm_start, mpnt->vm_end - 1); in dup_mmap() 770 mas_store(&vmi.mas, XA_ZERO_ENTRY); in dup_mmap()
|
/linux-6.12.1/fs/ |
D | libfs.c | 497 MA_STATE(mas, &octx->mt, offset, offset); in offset_find_next() 501 child = mas_find(&mas, LONG_MAX); in offset_find_next()
|