/linux-6.12.1/drivers/staging/media/atomisp/pci/runtime/isys/src/ |
D | ibuf_ctrl_rmgr.c | 52 u32 aligned_size; in ia_css_isys_ibuf_rmgr_acquire() local 59 aligned_size = (size + (IBUF_ALIGN - 1)) & ~(IBUF_ALIGN - 1); in ia_css_isys_ibuf_rmgr_acquire() 68 if (handle->size >= aligned_size) { in ia_css_isys_ibuf_rmgr_acquire() 83 (ibuf_rsrc.free_size >= aligned_size)) { in ia_css_isys_ibuf_rmgr_acquire() 86 handle->size = aligned_size; in ia_css_isys_ibuf_rmgr_acquire() 89 ibuf_rsrc.free_start_addr += aligned_size; in ia_css_isys_ibuf_rmgr_acquire() 90 ibuf_rsrc.free_size -= aligned_size; in ia_css_isys_ibuf_rmgr_acquire()
|
/linux-6.12.1/drivers/gpu/drm/i915/selftests/ |
D | i915_gem_gtt.c | 263 u64 hole_size, aligned_size; in lowlevel_hole() local 265 aligned_size = max_t(u32, ilog2(min_alignment), size); in lowlevel_hole() 266 hole_size = (hole_end - hole_start) >> aligned_size; in lowlevel_hole() 287 GEM_BUG_ON(count * BIT_ULL(aligned_size) > vm->total); in lowlevel_hole() 288 GEM_BUG_ON(hole_start + count * BIT_ULL(aligned_size) > hole_end); in lowlevel_hole() 311 u64 addr = hole_start + order[n] * BIT_ULL(aligned_size); in lowlevel_hole() 314 GEM_BUG_ON(addr + BIT_ULL(aligned_size) > vm->total); in lowlevel_hole() 357 mock_vma_res->node_size = BIT_ULL(aligned_size); in lowlevel_hole() 370 u64 addr = hole_start + order[n] * BIT_ULL(aligned_size); in lowlevel_hole() 458 u64 aligned_size = round_up(obj->base.size, in fill_hole() local [all …]
|
/linux-6.12.1/drivers/gpu/drm/radeon/ |
D | radeon_fbdev.c | 66 int aligned_size, size; in radeon_fbdev_create_pinned_object() local 80 aligned_size = ALIGN(size, PAGE_SIZE); in radeon_fbdev_create_pinned_object() 81 ret = radeon_gem_object_create(rdev, aligned_size, 0, in radeon_fbdev_create_pinned_object() 85 pr_err("failed to allocate framebuffer (%d)\n", aligned_size); in radeon_fbdev_create_pinned_object()
|
/linux-6.12.1/arch/x86/kernel/ |
D | shstk.c | 508 unsigned long aligned_size; in SYSCALL_DEFINE3() local 528 aligned_size = PAGE_ALIGN(size); in SYSCALL_DEFINE3() 529 if (aligned_size < size) in SYSCALL_DEFINE3() 532 return alloc_shstk(addr, aligned_size, size, set_tok); in SYSCALL_DEFINE3()
|
/linux-6.12.1/drivers/net/wwan/iosm/ |
D | iosm_ipc_mux_codec.c | 1067 int aligned_size = 0; in ipc_mux_ul_adgh_encode() local 1108 aligned_size = ALIGN((pad_len + src_skb->len), 4); in ipc_mux_ul_adgh_encode() 1110 ipc_mux->size_needed = sizeof(struct mux_adgh) + aligned_size; in ipc_mux_ul_adgh_encode() 1203 int aligned_size, in mux_ul_dg_update_tbl_index() argument 1219 ipc_mux->size_needed += sizeof(*dg) + aligned_size; in mux_ul_dg_update_tbl_index() 1231 int aligned_size; in mux_ul_dg_encode() local 1245 aligned_size = ALIGN((head_pad_len + src_skb->len), 4); in mux_ul_dg_encode() 1246 ipc_mux->size_needed += sizeof(*dg) + aligned_size; in mux_ul_dg_encode() 1254 aligned_size, in mux_ul_dg_encode() 1275 offset += aligned_size; in mux_ul_dg_encode()
|
/linux-6.12.1/mm/kasan/ |
D | generic.c | 217 size_t aligned_size = round_up(global->size, KASAN_GRANULE_SIZE); in register_global() local 221 kasan_poison(global->beg + aligned_size, in register_global() 222 global->size_with_redzone - aligned_size, in register_global()
|
/linux-6.12.1/sound/core/seq/ |
D | seq_clientmgr.c | 422 size_t aligned_size; in snd_seq_read() local 455 aligned_size = sizeof(struct snd_seq_ump_event); in snd_seq_read() 457 aligned_size = sizeof(struct snd_seq_event); in snd_seq_read() 460 while (count >= aligned_size) { in snd_seq_read() 475 memcpy(&tmpev, &cell->event, aligned_size); in snd_seq_read() 477 if (copy_to_user(buf, &tmpev, aligned_size)) { in snd_seq_read() 481 count -= aligned_size; in snd_seq_read() 482 buf += aligned_size; in snd_seq_read() 485 aligned_size); in snd_seq_read() 492 if (copy_to_user(buf, &cell->event, aligned_size)) { in snd_seq_read() [all …]
|
/linux-6.12.1/mm/ |
D | slab_common.c | 899 unsigned int aligned_size = kmalloc_info[idx].size; in new_kmalloc_cache() local 927 aligned_size = ALIGN(aligned_size, minalign); in new_kmalloc_cache() 928 aligned_idx = __kmalloc_index(aligned_size, false); in new_kmalloc_cache() 934 aligned_size, flags); in new_kmalloc_cache()
|
/linux-6.12.1/drivers/crypto/caam/ |
D | caampkc.c | 993 unsigned aligned_size; in caam_rsa_set_priv_key_form() local 1005 aligned_size = ALIGN(raw_key->p_sz, dma_get_cache_alignment()); in caam_rsa_set_priv_key_form() 1006 rsa_key->tmp1 = kzalloc(aligned_size, GFP_KERNEL); in caam_rsa_set_priv_key_form() 1010 aligned_size = ALIGN(raw_key->q_sz, dma_get_cache_alignment()); in caam_rsa_set_priv_key_form() 1011 rsa_key->tmp2 = kzalloc(aligned_size, GFP_KERNEL); in caam_rsa_set_priv_key_form()
|
D | caamalg.c | 1627 unsigned int aligned_size; in skcipher_edesc_alloc() local 1701 aligned_size = sizeof(*edesc) + desc_bytes + sec4_sg_bytes; in skcipher_edesc_alloc() 1702 aligned_size = ALIGN(aligned_size, dma_get_cache_alignment()); in skcipher_edesc_alloc() 1703 aligned_size += ~(ARCH_KMALLOC_MINALIGN - 1) & in skcipher_edesc_alloc() 1705 aligned_size += ALIGN(ivsize, dma_get_cache_alignment()); in skcipher_edesc_alloc() 1706 edesc = kzalloc(aligned_size, flags); in skcipher_edesc_alloc()
|
/linux-6.12.1/tools/perf/util/ |
D | synthetic-events.c | 458 size_t size, aligned_size; in perf_event__synthesize_mmap_events() local 518 aligned_size = PERF_ALIGN(size, sizeof(u64)); in perf_event__synthesize_mmap_events() 521 (sizeof(event->mmap2.filename) - aligned_size)); in perf_event__synthesize_mmap_events() 523 (aligned_size - size)); in perf_event__synthesize_mmap_events() 2185 ssize_t size = 0, aligned_size = 0, padding; in perf_event__synthesize_tracing_data() local 2207 aligned_size = PERF_ALIGN(size, sizeof(u64)); in perf_event__synthesize_tracing_data() 2208 padding = aligned_size - size; in perf_event__synthesize_tracing_data() 2210 ev.tracing_data.size = aligned_size; in perf_event__synthesize_tracing_data() 2224 return aligned_size; in perf_event__synthesize_tracing_data()
|
/linux-6.12.1/drivers/net/phy/ |
D | mdio_bus.c | 150 size_t aligned_size = ALIGN(sizeof(*bus), NETDEV_ALIGN); in mdiobus_alloc_size() local 156 alloc_size = aligned_size + size; in mdiobus_alloc_size() 166 bus->priv = (void *)bus + aligned_size; in mdiobus_alloc_size()
|
/linux-6.12.1/net/netfilter/ |
D | nft_compat.c | 320 unsigned int info_size, aligned_size = XT_ALIGN(size); in nft_extension_dump_info() local 323 nla = nla_reserve(skb, attr, aligned_size); in nft_extension_dump_info() 329 memset(nla_data(nla) + info_size, 0, aligned_size - info_size); in nft_extension_dump_info()
|
D | x_tables.c | 313 int usersize, int size, int aligned_size) in xt_data_to_user() argument 318 if (usersize != aligned_size && in xt_data_to_user() 319 clear_user(dst + usersize, aligned_size - usersize)) in xt_data_to_user()
|
/linux-6.12.1/include/linux/netfilter/ |
D | x_tables.h | 302 int usersize, int size, int aligned_size);
|
/linux-6.12.1/drivers/gpu/drm/xe/ |
D | xe_lrc.c | 1121 size_t aligned_size; in xe_lrc_write_ring() local 1124 aligned_size = ALIGN(size, 8); in xe_lrc_write_ring() 1137 if (aligned_size > size) { in xe_lrc_write_ring()
|
D | xe_bo.c | 1277 size_t aligned_size; in ___xe_bo_create_locked() local 1294 aligned_size = ALIGN(size, align); in ___xe_bo_create_locked() 1300 aligned_size = ALIGN(size, SZ_4K); in ___xe_bo_create_locked() 1305 if (type == ttm_bo_type_device && aligned_size != size) in ___xe_bo_create_locked()
|
/linux-6.12.1/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_amdkfd_gpuvm.c | 1728 uint64_t aligned_size; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu() local 1801 aligned_size = PAGE_ALIGN(size); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu() 1807 ret = amdgpu_amdkfd_reserve_mem_limit(adev, aligned_size, flags, in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu() 1818 ret = amdgpu_gem_object_create(adev, aligned_size, 1, alloc_domain, alloc_flags, in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu() 1891 amdgpu_amdkfd_unreserve_mem_limit(adev, aligned_size, flags, xcp_id); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
|
/linux-6.12.1/drivers/infiniband/hw/qedr/ |
D | verbs.c | 734 u64 size, aligned_size; in qedr_align_cq_entries() local 738 aligned_size = ALIGN(size, PAGE_SIZE); in qedr_align_cq_entries() 740 return aligned_size / QEDR_CQE_SIZE; in qedr_align_cq_entries()
|