Home
last modified time | relevance | path

Searched refs:dmap (Results 1 – 25 of 31) sorted by relevance

12

/linux-6.12.1/drivers/android/
Ddbitmap.h31 static inline int dbitmap_enabled(struct dbitmap *dmap) in dbitmap_enabled() argument
33 return !!dmap->nbits; in dbitmap_enabled()
36 static inline void dbitmap_free(struct dbitmap *dmap) in dbitmap_free() argument
38 dmap->nbits = 0; in dbitmap_free()
39 kfree(dmap->map); in dbitmap_free()
43 static inline unsigned int dbitmap_shrink_nbits(struct dbitmap *dmap) in dbitmap_shrink_nbits() argument
47 if (dmap->nbits <= NBITS_MIN) in dbitmap_shrink_nbits()
56 bit = find_last_bit(dmap->map, dmap->nbits); in dbitmap_shrink_nbits()
57 if (bit < (dmap->nbits >> 2)) in dbitmap_shrink_nbits()
58 return dmap->nbits >> 1; in dbitmap_shrink_nbits()
[all …]
Dbinder_internal.h439 struct dbitmap dmap; member
/linux-6.12.1/fs/fuse/
Ddax.c131 struct fuse_dax_mapping *dmap; in alloc_dax_mapping() local
134 dmap = list_first_entry_or_null(&fcd->free_ranges, in alloc_dax_mapping()
136 if (dmap) { in alloc_dax_mapping()
137 list_del_init(&dmap->list); in alloc_dax_mapping()
144 return dmap; in alloc_dax_mapping()
149 struct fuse_dax_mapping *dmap) in __dmap_remove_busy_list() argument
151 list_del_init(&dmap->busy_list); in __dmap_remove_busy_list()
157 struct fuse_dax_mapping *dmap) in dmap_remove_busy_list() argument
160 __dmap_remove_busy_list(fcd, dmap); in dmap_remove_busy_list()
166 struct fuse_dax_mapping *dmap) in __dmap_add_to_free_pool() argument
[all …]
/linux-6.12.1/drivers/pmdomain/ti/
Domap_prm.c59 const struct omap_prm_domain_map *dmap; member
158 .pwrstctrl = 0x0, .pwrstst = 0x4, .dmap = &omap_prm_reton,
162 .pwrstctrl = 0x0, .pwrstst = 0x4, .dmap = &omap_prm_noinact,
167 .pwrstctrl = 0, .pwrstst = 0x4, .dmap = &omap_prm_all,
171 .pwrstctrl = 0x0, .pwrstst = 0x4, .dmap = &omap_prm_alwon,
175 .pwrstctrl = 0x0, .pwrstst = 0x4, .dmap = &omap_prm_reton,
182 .pwrstctrl = 0x0, .pwrstst = 0x4, .dmap = &omap_prm_noinact,
187 .pwrstctrl = 0x0, .pwrstst = 0x4, .dmap = &omap_prm_onoff_noauto,
191 .pwrstctrl = 0x0, .pwrstst = 0x4, .dmap = &omap_prm_noinact
195 .pwrstctrl = 0x0, .pwrstst = 0x4, .dmap = &omap_prm_onoff_noauto
[all …]
/linux-6.12.1/drivers/md/
Ddm-clone-metadata.c127 struct dirty_map dmap[2]; member
468 static int __dirty_map_init(struct dirty_map *dmap, unsigned long nr_words, in __dirty_map_init() argument
471 dmap->changed = 0; in __dirty_map_init()
473 dmap->dirty_words = kvzalloc(bitmap_size(nr_words), GFP_KERNEL); in __dirty_map_init()
474 if (!dmap->dirty_words) in __dirty_map_init()
477 dmap->dirty_regions = kvzalloc(bitmap_size(nr_regions), GFP_KERNEL); in __dirty_map_init()
478 if (!dmap->dirty_regions) { in __dirty_map_init()
479 kvfree(dmap->dirty_words); in __dirty_map_init()
486 static void __dirty_map_exit(struct dirty_map *dmap) in __dirty_map_exit() argument
488 kvfree(dmap->dirty_words); in __dirty_map_exit()
[all …]
Ddm-zoned-metadata.c1688 struct dmz_map *dmap; in dmz_load_mapping() local
1707 dmap = dmap_mblk->data; in dmz_load_mapping()
1713 dzone_id = le32_to_cpu(dmap[e].dzone_id); in dmz_load_mapping()
1741 bzone_id = le32_to_cpu(dmap[e].bzone_id); in dmz_load_mapping()
1838 struct dmz_map *dmap = dmap_mblk->data; in dmz_set_chunk_mapping() local
1841 dmap[map_idx].dzone_id = cpu_to_le32(dzone_id); in dmz_set_chunk_mapping()
1842 dmap[map_idx].bzone_id = cpu_to_le32(bzone_id); in dmz_set_chunk_mapping()
2051 struct dmz_map *dmap = dmap_mblk->data; in dmz_get_chunk_mapping() local
2061 dzone_id = le32_to_cpu(dmap[dmap_idx].dzone_id); in dmz_get_chunk_mapping()
/linux-6.12.1/drivers/media/pci/intel/ipu6/
Dipu6-mmu.c395 iova = alloc_iova(&mmu->dmap->iovad, n_pages, in allocate_trash_buffer()
396 PHYS_PFN(mmu->dmap->mmu_info->aperture_end), 0); in allocate_trash_buffer()
402 dma = dma_map_page(mmu->dmap->mmu_info->dev, mmu->trash_page, 0, in allocate_trash_buffer()
404 if (dma_mapping_error(mmu->dmap->mmu_info->dev, dma)) { in allocate_trash_buffer()
405 dev_err(mmu->dmap->mmu_info->dev, "Failed to map trash page\n"); in allocate_trash_buffer()
418 ret = ipu6_mmu_map(mmu->dmap->mmu_info, PFN_PHYS(iova_addr), in allocate_trash_buffer()
435 ipu6_mmu_unmap(mmu->dmap->mmu_info, PFN_PHYS(iova->pfn_lo), in allocate_trash_buffer()
437 dma_unmap_page(mmu->dmap->mmu_info->dev, mmu->pci_trash_page, in allocate_trash_buffer()
440 __free_iova(&mmu->dmap->iovad, iova); in allocate_trash_buffer()
450 mmu_info = mmu->dmap->mmu_info; in ipu6_mmu_hw_init()
[all …]
Dipu6-dma.c171 iova = alloc_iova(&mmu->dmap->iovad, count, in ipu6_dma_alloc()
193 ret = ipu6_mmu_map(mmu->dmap->mmu_info, in ipu6_dma_alloc()
222 pci_dma_addr = ipu6_mmu_iova_to_phys(mmu->dmap->mmu_info, in ipu6_dma_alloc()
227 ipu6_mmu_unmap(mmu->dmap->mmu_info, ipu6_iova, PAGE_SIZE); in ipu6_dma_alloc()
233 __free_iova(&mmu->dmap->iovad, iova); in ipu6_dma_alloc()
246 struct iova *iova = find_iova(&mmu->dmap->iovad, PHYS_PFN(dma_handle)); in ipu6_dma_free()
275 pci_dma_addr = ipu6_mmu_iova_to_phys(mmu->dmap->mmu_info, in ipu6_dma_free()
281 ipu6_mmu_unmap(mmu->dmap->mmu_info, PFN_PHYS(iova->pfn_lo), in ipu6_dma_free()
288 __free_iova(&mmu->dmap->iovad, iova); in ipu6_dma_free()
333 struct iova *iova = find_iova(&mmu->dmap->iovad, in ipu6_dma_unmap_sg()
[all …]
Dipu6-mmu.h35 struct ipu6_dma_mapping *dmap; member
48 struct ipu6_dma_mapping *dmap; member
/linux-6.12.1/sound/soc/tegra/
Dtegra_pcm.c80 struct snd_dmaengine_dai_dma_data *dmap; in tegra_pcm_open() local
88 dmap = snd_soc_dai_get_dma_data(cpu_dai, substream); in tegra_pcm_open()
101 chan = dma_request_chan(cpu_dai->dev, dmap->chan_name); in tegra_pcm_open()
105 dmap->chan_name); in tegra_pcm_open()
113 dmap->chan_name); in tegra_pcm_open()
146 struct snd_dmaengine_dai_dma_data *dmap; in tegra_pcm_hw_params() local
154 dmap = snd_soc_dai_get_dma_data(snd_soc_rtd_to_cpu(rtd, 0), substream); in tegra_pcm_hw_params()
155 if (!dmap) in tegra_pcm_hw_params()
169 slave_config.dst_addr = dmap->addr; in tegra_pcm_hw_params()
173 slave_config.src_addr = dmap->addr; in tegra_pcm_hw_params()
/linux-6.12.1/drivers/gpu/drm/ttm/
Dttm_resource.c660 struct iosys_map *dmap, in ttm_kmap_iter_iomap_map_local() argument
687 iosys_map_set_vaddr_iomem(dmap, addr); in ttm_kmap_iter_iomap_map_local()
742 struct iosys_map *dmap, in ttm_kmap_iter_linear_io_map_local() argument
748 *dmap = iter_io->dmap; in ttm_kmap_iter_linear_io_map_local()
749 iosys_map_incr(dmap, i * PAGE_SIZE); in ttm_kmap_iter_linear_io_map_local()
785 iosys_map_set_vaddr(&iter_io->dmap, mem->bus.addr); in ttm_kmap_iter_linear_io_init()
789 memset(&iter_io->dmap, 0, sizeof(iter_io->dmap)); in ttm_kmap_iter_linear_io_init()
791 iosys_map_set_vaddr_iomem(&iter_io->dmap, in ttm_kmap_iter_linear_io_init()
795 iosys_map_set_vaddr(&iter_io->dmap, in ttm_kmap_iter_linear_io_init()
802 if (iosys_map_is_null(&iter_io->dmap)) in ttm_kmap_iter_linear_io_init()
[all …]
Dttm_tt.c428 struct iosys_map *dmap, in ttm_kmap_iter_tt_map_local() argument
434 iosys_map_set_vaddr(dmap, kmap_local_page_prot(iter_tt->tt->pages[i], in ttm_kmap_iter_tt_map_local()
/linux-6.12.1/fs/nilfs2/
Dpage.c244 int nilfs_copy_dirty_pages(struct address_space *dmap, in nilfs_copy_dirty_pages() argument
265 dfolio = filemap_grab_folio(dmap, folio->index); in nilfs_copy_dirty_pages()
299 void nilfs_copy_back_pages(struct address_space *dmap, in nilfs_copy_back_pages() argument
317 dfolio = filemap_lock_folio(dmap, index); in nilfs_copy_back_pages()
335 xa_lock_irq(&dmap->i_pages); in nilfs_copy_back_pages()
336 f = __xa_store(&dmap->i_pages, index, folio, GFP_NOFS); in nilfs_copy_back_pages()
342 folio->mapping = dmap; in nilfs_copy_back_pages()
343 dmap->nrpages++; in nilfs_copy_back_pages()
345 __xa_set_mark(&dmap->i_pages, index, in nilfs_copy_back_pages()
348 xa_unlock_irq(&dmap->i_pages); in nilfs_copy_back_pages()
/linux-6.12.1/fs/xfs/
Dxfs_reflink.c1127 struct xfs_bmbt_irec *dmap, in xfs_reflink_remap_extent() argument
1138 bool dmap_written = xfs_bmap_is_written_extent(dmap); in xfs_reflink_remap_extent()
1166 resblks + dmap->br_blockcount, 0, false, &tp); in xfs_reflink_remap_extent()
1181 error = xfs_bmapi_read(ip, dmap->br_startoff, dmap->br_blockcount, in xfs_reflink_remap_extent()
1185 ASSERT(nimaps == 1 && smap.br_startoff == dmap->br_startoff); in xfs_reflink_remap_extent()
1192 dmap->br_blockcount = min(dmap->br_blockcount, smap.br_blockcount); in xfs_reflink_remap_extent()
1193 ASSERT(dmap->br_blockcount == smap.br_blockcount); in xfs_reflink_remap_extent()
1202 if (dmap->br_startblock == smap.br_startblock) { in xfs_reflink_remap_extent()
1203 if (dmap->br_state != smap.br_state) { in xfs_reflink_remap_extent()
1211 if (dmap->br_state == XFS_EXT_UNWRITTEN && in xfs_reflink_remap_extent()
[all …]
/linux-6.12.1/drivers/gpu/drm/
Ddrm_panic.c260 static void drm_panic_blit16(struct iosys_map *dmap, unsigned int dpitch, in drm_panic_blit16() argument
270 iosys_map_wr(dmap, y * dpitch + x * sizeof(u16), u16, fg16); in drm_panic_blit16()
273 static void drm_panic_blit24(struct iosys_map *dmap, unsigned int dpitch, in drm_panic_blit24() argument
286 iosys_map_wr(dmap, off, u8, (fg32 & 0x000000FF) >> 0); in drm_panic_blit24()
287 iosys_map_wr(dmap, off + 1, u8, (fg32 & 0x0000FF00) >> 8); in drm_panic_blit24()
288 iosys_map_wr(dmap, off + 2, u8, (fg32 & 0x00FF0000) >> 16); in drm_panic_blit24()
294 static void drm_panic_blit32(struct iosys_map *dmap, unsigned int dpitch, in drm_panic_blit32() argument
304 iosys_map_wr(dmap, y * dpitch + x * sizeof(u32), u32, fg32); in drm_panic_blit32()
363 static void drm_panic_fill16(struct iosys_map *dmap, unsigned int dpitch, in drm_panic_fill16() argument
371 iosys_map_wr(dmap, y * dpitch + x * sizeof(u16), u16, color); in drm_panic_fill16()
[all …]
/linux-6.12.1/fs/jfs/
Djfs_dmap.c64 static void dbAllocBits(struct bmap * bmp, struct dmap * dp, s64 blkno,
73 static int dbAllocNext(struct bmap * bmp, struct dmap * dp, s64 blkno,
75 static int dbAllocNear(struct bmap * bmp, struct dmap * dp, s64 blkno,
78 static int dbAllocDmap(struct bmap * bmp, struct dmap * dp, s64 blkno,
80 static int dbAllocDmapLev(struct bmap * bmp, struct dmap * dp, int nblocks,
91 static int dbFreeBits(struct bmap * bmp, struct dmap * dp, s64 blkno,
93 static int dbFreeDmap(struct bmap * bmp, struct dmap * dp, s64 blkno,
101 static int dbAllocDmapBU(struct bmap * bmp, struct dmap * dp, s64 blkno,
103 static int dbInitDmap(struct dmap * dp, s64 blkno, int nblocks);
104 static int dbInitDmapTree(struct dmap * dp);
[all …]
/linux-6.12.1/include/drm/ttm/
Dttm_kmap_iter.h32 struct iosys_map *dmap, pgoff_t i);
42 struct iosys_map *dmap);
Dttm_resource.h365 struct iosys_map dmap; member
/linux-6.12.1/drivers/misc/sgi-gru/
Dgrufault.c143 struct gru_tlb_fault_map *dmap) in get_clear_fault_map() argument
158 dmap->fault_bits[i] = k; in get_clear_fault_map()
522 struct gru_tlb_fault_map imap, dmap; in gru_intr() local
536 get_clear_fault_map(gru, &imap, &dmap); in gru_intr()
541 dmap.fault_bits[0], dmap.fault_bits[1]); in gru_intr()
543 for_each_cbr_in_tfm(cbrnum, dmap.fault_bits) { in gru_intr()
/linux-6.12.1/drivers/staging/media/ipu3/
Dipu3.c77 &imgu_pipe->queues[i].dmap); in imgu_dummybufs_cleanup()
98 &imgu_pipe->queues[i].dmap, size)) { in imgu_dummybufs_preallocate()
138 &imgu_pipe->queues[i].dmap, in imgu_dummybufs_init()
146 imgu_pipe->queues[i].dmap.daddr); in imgu_dummybufs_init()
163 if (WARN_ON(!imgu_pipe->queues[queue].dmap.vaddr)) in imgu_dummybufs_get()
176 imgu_pipe->queues[queue].dmap.daddr); in imgu_dummybufs_get()
Dipu3.h105 struct imgu_css_map dmap; member
/linux-6.12.1/drivers/edac/
Dpnd2_edac.c418 static struct d_cr_dmap dmap[DNV_NUM_CHANNELS]; variable
474 RD_REGP(&dmap[i], d_cr_dmap, dnv_dports[i]) || in dnv_get_registers()
979 daddr->rank = dnv_get_bit(pmiaddr, dmap[pmiidx].rs0 + 13, 0); in dnv_pmi2mem()
981 daddr->rank |= dnv_get_bit(pmiaddr, dmap[pmiidx].rs1 + 13, 1); in dnv_pmi2mem()
989 daddr->bank = dnv_get_bit(pmiaddr, dmap[pmiidx].ba0 + 6, 0); in dnv_pmi2mem()
990 daddr->bank |= dnv_get_bit(pmiaddr, dmap[pmiidx].ba1 + 6, 1); in dnv_pmi2mem()
991 daddr->bank |= dnv_get_bit(pmiaddr, dmap[pmiidx].bg0 + 6, 2); in dnv_pmi2mem()
993 daddr->bank |= dnv_get_bit(pmiaddr, dmap[pmiidx].bg1 + 6, 3); in dnv_pmi2mem()
/linux-6.12.1/drivers/iommu/
Dtegra-smmu.c575 dma_addr_t *dmap) in tegra_smmu_pte_lookup() argument
587 *dmap = smmu_pde_to_dma(smmu, pd[pd_index]); in tegra_smmu_pte_lookup()
593 dma_addr_t *dmap, struct page *page) in as_get_pte() argument
620 *dmap = dma; in as_get_pte()
624 *dmap = smmu_pde_to_dma(smmu, pd[pde]); in as_get_pte()
/linux-6.12.1/drivers/media/platform/xilinx/
Dxilinx-vipp.c476 struct xvip_dma *dmap; in xvip_graph_cleanup() local
482 list_for_each_entry_safe(dma, dmap, &xdev->dmas, list) { in xvip_graph_cleanup()
/linux-6.12.1/drivers/net/ethernet/apple/
Dbmac.c178 dbdma_continue(volatile struct dbdma_regs __iomem *dmap) in dbdma_continue() argument
180 dbdma_st32(&dmap->control, in dbdma_continue()
186 dbdma_reset(volatile struct dbdma_regs __iomem *dmap) in dbdma_reset() argument
188 dbdma_st32(&dmap->control, in dbdma_reset()
191 while (dbdma_ld32(&dmap->status) & RUN) in dbdma_reset()

12