Home
last modified time | relevance | path

Searched refs:nelems (Results 1 – 25 of 50) sorted by relevance

12

/linux-6.12.1/kernel/dma/
Ddebug.h25 int nelems, int dir);
52 int nelems, int direction);
56 int nelems, int direction);
78 int nelems, int dir) in debug_dma_unmap_sg() argument
120 int nelems, int direction) in debug_dma_sync_sg_for_cpu() argument
126 int nelems, int direction) in debug_dma_sync_sg_for_device() argument
Ddummy.c31 int nelems, enum dma_data_direction dir, in dma_dummy_map_sg() argument
38 int nelems, enum dma_data_direction dir, in dma_dummy_unmap_sg() argument
Dmapping.c397 int nelems, enum dma_data_direction dir) in __dma_sync_sg_for_cpu() argument
403 dma_direct_sync_sg_for_cpu(dev, sg, nelems, dir); in __dma_sync_sg_for_cpu()
405 iommu_dma_sync_sg_for_cpu(dev, sg, nelems, dir); in __dma_sync_sg_for_cpu()
407 ops->sync_sg_for_cpu(dev, sg, nelems, dir); in __dma_sync_sg_for_cpu()
408 trace_dma_sync_sg_for_cpu(dev, sg, nelems, dir); in __dma_sync_sg_for_cpu()
409 debug_dma_sync_sg_for_cpu(dev, sg, nelems, dir); in __dma_sync_sg_for_cpu()
414 int nelems, enum dma_data_direction dir) in __dma_sync_sg_for_device() argument
420 dma_direct_sync_sg_for_device(dev, sg, nelems, dir); in __dma_sync_sg_for_device()
422 iommu_dma_sync_sg_for_device(dev, sg, nelems, dir); in __dma_sync_sg_for_device()
424 ops->sync_sg_for_device(dev, sg, nelems, dir); in __dma_sync_sg_for_device()
[all …]
Ddebug.c1360 int nelems, int dir) in debug_dma_unmap_sg() argument
1368 for_each_sg(sglist, s, nelems, i) { in debug_dma_unmap_sg()
1378 .sg_call_ents = nelems, in debug_dma_unmap_sg()
1533 int nelems, int direction) in debug_dma_sync_sg_for_cpu() argument
1541 for_each_sg(sg, s, nelems, i) { in debug_dma_sync_sg_for_cpu()
1551 .sg_call_ents = nelems, in debug_dma_sync_sg_for_cpu()
1565 int nelems, int direction) in debug_dma_sync_sg_for_device() argument
1573 for_each_sg(sg, s, nelems, i) { in debug_dma_sync_sg_for_device()
1583 .sg_call_ents = nelems, in debug_dma_sync_sg_for_device()
/linux-6.12.1/drivers/xen/
Dswiotlb-xen.c326 xen_swiotlb_unmap_sg(struct device *hwdev, struct scatterlist *sgl, int nelems, in xen_swiotlb_unmap_sg() argument
334 for_each_sg(sgl, sg, nelems, i) in xen_swiotlb_unmap_sg()
341 xen_swiotlb_map_sg(struct device *dev, struct scatterlist *sgl, int nelems, in xen_swiotlb_map_sg() argument
349 for_each_sg(sgl, sg, nelems, i) { in xen_swiotlb_map_sg()
357 return nelems; in xen_swiotlb_map_sg()
366 int nelems, enum dma_data_direction dir) in xen_swiotlb_sync_sg_for_cpu() argument
371 for_each_sg(sgl, sg, nelems, i) { in xen_swiotlb_sync_sg_for_cpu()
379 int nelems, enum dma_data_direction dir) in xen_swiotlb_sync_sg_for_device() argument
384 for_each_sg(sgl, sg, nelems, i) { in xen_swiotlb_sync_sg_for_device()
/linux-6.12.1/arch/powerpc/kernel/
Ddma-iommu.c119 int nelems, enum dma_data_direction direction, in dma_iommu_map_sg() argument
122 return ppc_iommu_map_sg(dev, get_iommu_table_base(dev), sglist, nelems, in dma_iommu_map_sg()
127 int nelems, enum dma_data_direction direction, in dma_iommu_unmap_sg() argument
130 ppc_iommu_unmap_sg(get_iommu_table_base(dev), sglist, nelems, in dma_iommu_unmap_sg()
Diommu.c472 struct scatterlist *sglist, int nelems, in ppc_iommu_map_sg() argument
485 if ((nelems == 0) || !tbl) in ppc_iommu_map_sg()
490 incount = nelems; in ppc_iommu_map_sg()
496 DBG("sg mapping %d elements:\n", nelems); in ppc_iommu_map_sg()
499 for_each_sg(sglist, s, nelems, i) { in ppc_iommu_map_sg()
597 for_each_sg(sglist, s, nelems, i) { in ppc_iommu_map_sg()
615 int nelems, enum dma_data_direction direction, in ppc_iommu_unmap_sg() argument
626 while (nelems--) { in ppc_iommu_unmap_sg()
/linux-6.12.1/arch/sparc/kernel/
Diommu.c433 int nelems, enum dma_data_direction direction, in dma_4u_map_sg() argument
450 if (nelems == 0 || !iommu) in dma_4u_map_sg()
468 incount = nelems; in dma_4u_map_sg()
477 for_each_sg(sglist, s, nelems, i) { in dma_4u_map_sg()
555 for_each_sg(sglist, s, nelems, i) { in dma_4u_map_sg()
606 int nelems, enum dma_data_direction direction, in dma_4u_unmap_sg() argument
624 while (nelems--) { in dma_4u_unmap_sg()
696 struct scatterlist *sglist, int nelems, in dma_4u_sync_sg_for_cpu() argument
728 for_each_sg(sglist, sg, nelems, i) { in dma_4u_sync_sg_for_cpu()
Dpci_sun4v.c470 int nelems, enum dma_data_direction direction, in dma_4v_map_sg() argument
489 if (nelems == 0 || !iommu) in dma_4v_map_sg()
502 incount = nelems; in dma_4v_map_sg()
523 for_each_sg(sglist, s, nelems, i) { in dma_4v_map_sg()
604 for_each_sg(sglist, s, nelems, i) { in dma_4v_map_sg()
625 int nelems, enum dma_data_direction direction, in dma_4v_unmap_sg() argument
646 while (nelems--) { in dma_4v_unmap_sg()
/linux-6.12.1/include/linux/
Ddma-mapping.h290 int nelems, enum dma_data_direction dir);
292 int nelems, enum dma_data_direction dir);
316 struct scatterlist *sg, int nelems, enum dma_data_direction dir) in dma_sync_sg_for_cpu() argument
319 __dma_sync_sg_for_cpu(dev, sg, nelems, dir); in dma_sync_sg_for_cpu()
323 struct scatterlist *sg, int nelems, enum dma_data_direction dir) in dma_sync_sg_for_device() argument
326 __dma_sync_sg_for_device(dev, sg, nelems, dir); in dma_sync_sg_for_device()
347 struct scatterlist *sg, int nelems, enum dma_data_direction dir) in dma_sync_sg_for_cpu() argument
351 struct scatterlist *sg, int nelems, enum dma_data_direction dir) in dma_sync_sg_for_device() argument
Diommu-dma.h65 int nelems, enum dma_data_direction dir);
67 int nelems, enum dma_data_direction dir);
Drhashtable.h186 return atomic_read(&ht->nelems) > (tbl->size / 4 * 3) && in rht_grow_above_75()
199 return atomic_read(&ht->nelems) < (tbl->size * 3 / 10) && in rht_shrink_below_30()
211 return atomic_read(&ht->nelems) > tbl->size && in rht_grow_above_100()
223 return atomic_read(&ht->nelems) >= ht->max_elems; in rht_grow_above_max()
796 atomic_inc(&ht->nelems); in __rhashtable_insert_fast()
1066 atomic_dec(&ht->nelems); in __rhashtable_remove_fast_one()
Drhashtable-types.h91 atomic_t nelems; member
/linux-6.12.1/arch/powerpc/kvm/
Dguest-state-buffer.c45 gsb->hdr->nelems = cpu_to_be32(0); in kvmppc_gsb_new()
78 u32 nelems = kvmppc_gsb_nelems(gsb); in kvmppc_gsb_put() local
84 kvmppc_gsb_header(gsb)->nelems = cpu_to_be32(nelems + 1); in kvmppc_gsb_put()
/linux-6.12.1/arch/x86/kernel/
Damd_gart_64.c323 int nelems, struct scatterlist *sout, in __dma_map_cont() argument
334 for_each_sg(start, s, nelems, i) { in __dma_map_cont()
361 dma_map_cont(struct device *dev, struct scatterlist *start, int nelems, in dma_map_cont() argument
365 BUG_ON(nelems != 1); in dma_map_cont()
370 return __dma_map_cont(dev, start, nelems, sout, pages); in dma_map_cont()
/linux-6.12.1/arch/powerpc/include/asm/
Diommu.h262 struct scatterlist *sglist, int nelems,
268 int nelems,
Dguest-state-buffer.h208 __be32 nelems; member
385 return be32_to_cpu(kvmppc_gsb_header(gsb)->nelems); in kvmppc_gsb_nelems()
396 kvmppc_gsb_header(gsb)->nelems = cpu_to_be32(0); in kvmppc_gsb_reset()
/linux-6.12.1/lib/
Drhashtable.c397 unsigned int nelems = atomic_read(&ht->nelems); in rhashtable_shrink() local
400 if (nelems) in rhashtable_shrink()
401 size = roundup_pow_of_two(nelems * 3 / 2); in rhashtable_shrink()
587 atomic_inc(&ht->nelems); in rhashtable_insert_one()
1077 atomic_set(&ht->nelems, 0); in rhashtable_init_noprof()
/linux-6.12.1/tools/include/uapi/linux/
Dbtf.h114 __u32 nelems; member
/linux-6.12.1/include/uapi/linux/
Dbtf.h114 __u32 nelems; member
/linux-6.12.1/tools/bpf/bpftool/
Dbtf_dumper.c217 if (!arr->nelems) in is_str_array()
234 end_s = s + arr->nelems; in is_str_array()
266 for (i = 0; i < arr->nelems; i++) { in btf_dumper_array()
656 BTF_PRINT_ARG("[%d]", array->nelems); in __btf_dumper_type_only()
/linux-6.12.1/arch/powerpc/platforms/pseries/
Dvio.c552 int nelems, enum dma_data_direction direction, in vio_dma_iommu_map_sg() argument
561 for_each_sg(sglist, sgl, nelems, count) in vio_dma_iommu_map_sg()
567 ret = ppc_iommu_map_sg(dev, tbl, sglist, nelems, dma_get_mask(dev), in vio_dma_iommu_map_sg()
586 struct scatterlist *sglist, int nelems, in vio_dma_iommu_unmap_sg() argument
596 for_each_sg(sglist, sgl, nelems, count) in vio_dma_iommu_unmap_sg()
599 ppc_iommu_unmap_sg(tbl, sglist, nelems, direction, attrs); in vio_dma_iommu_unmap_sg()
/linux-6.12.1/tools/lib/bpf/
Dbtf.c381 a->nelems = bswap_32(a->nelems); in btf_bswap_type_rest()
770 __u32 nelems = 1; in btf__resolve_size() local
800 if (nelems && array->nelems > UINT32_MAX / nelems) in btf__resolve_size()
802 nelems *= array->nelems; in btf__resolve_size()
815 if (nelems && size > UINT32_MAX / nelems) in btf__resolve_size()
818 return nelems * size; in btf__resolve_size()
2165 a->nelems = nr_elems; in btf__add_array()
3812 h = hash_combine(h, info->nelems); in btf_hash_array()
3834 info1->nelems == info2->nelems; in btf_equal_array()
3847 return btf_array(t1)->nelems == btf_array(t2)->nelems; in btf_compat_array()
Dbtf_relocate.c104 int nelems) in search_btf_name_size() argument
107 int high = nelems - 1; in search_btf_name_size()
/linux-6.12.1/kernel/bpf/
Dbtf.c1944 u32 i, size, nelems = 1, id = 0; in __btf_resolve_size() local
1976 if (nelems && array->nelems > U32_MAX / nelems) in __btf_resolve_size()
1978 nelems *= array->nelems; in __btf_resolve_size()
1991 if (nelems && size > U32_MAX / nelems) in __btf_resolve_size()
1994 *type_size = nelems * size; in __btf_resolve_size()
1996 *total_nelems = nelems; in __btf_resolve_size()
3006 if (array->nelems && elem_size > U32_MAX / array->nelems) { in btf_array_resolve()
3012 env_stack_pop_resolved(env, elem_type_id, elem_size * array->nelems); in btf_array_resolve()
3023 array->type, array->index_type, array->nelems); in btf_array_log()
3062 for (i = 0; i < array->nelems; i++) { in __btf_array_show()
[all …]

12