/linux-6.12.1/drivers/md/ |
D | dm-exception-store.c | 146 unsigned int chunk_size; in set_chunk_size() local 148 if (kstrtouint(chunk_size_arg, 10, &chunk_size)) { in set_chunk_size() 153 if (!chunk_size) { in set_chunk_size() 154 store->chunk_size = store->chunk_mask = store->chunk_shift = 0; in set_chunk_size() 158 return dm_exception_store_set_chunk_size(store, chunk_size, error); in set_chunk_size() 162 unsigned int chunk_size, in dm_exception_store_set_chunk_size() argument 166 if (!is_power_of_2(chunk_size)) { in dm_exception_store_set_chunk_size() 172 if (chunk_size % in dm_exception_store_set_chunk_size() 174 chunk_size % in dm_exception_store_set_chunk_size() 180 if (chunk_size > INT_MAX >> SECTOR_SHIFT) { in dm_exception_store_set_chunk_size() [all …]
|
D | dm-stripe.c | 38 uint32_t chunk_size; member 93 uint32_t chunk_size; in stripe_ctr() local 107 if (kstrtouint(argv[1], 10, &chunk_size) || !chunk_size) { in stripe_ctr() 119 if (sector_div(tmp_len, chunk_size)) { in stripe_ctr() 150 r = dm_set_target_max_io_len(ti, chunk_size); in stripe_ctr() 162 sc->chunk_size = chunk_size; in stripe_ctr() 163 if (chunk_size & (chunk_size - 1)) in stripe_ctr() 166 sc->chunk_size_shift = __ffs(chunk_size); in stripe_ctr() 209 chunk_offset = sector_div(chunk, sc->chunk_size); in stripe_map_sector() 211 chunk_offset = chunk & (sc->chunk_size - 1); in stripe_map_sector() [all …]
|
D | dm-unstripe.c | 22 uint32_t chunk_size; member 62 if (kstrtouint(argv[1], 10, &uc->chunk_size) || !uc->chunk_size) { in unstripe_ctr() 88 uc->unstripe_offset = (sector_t)uc->unstripe * uc->chunk_size; in unstripe_ctr() 89 uc->unstripe_width = (sector_t)(uc->stripes - 1) * uc->chunk_size; in unstripe_ctr() 90 uc->chunk_shift = is_power_of_2(uc->chunk_size) ? fls(uc->chunk_size) - 1 : 0; in unstripe_ctr() 93 if (sector_div(tmp_len, uc->chunk_size)) { in unstripe_ctr() 98 if (dm_set_target_max_io_len(ti, uc->chunk_size)) { in unstripe_ctr() 127 sector_div(tmp_sector, uc->chunk_size); in map_to_core() 157 uc->stripes, (unsigned long long)uc->chunk_size, uc->unstripe, in unstripe_status() 180 limits->chunk_sectors = uc->chunk_size; in unstripe_io_hints()
|
D | dm-snap-persistent.c | 84 __le32 chunk_size; member 175 len = ps->store->chunk_size << SECTOR_SHIFT; in alloc_area() 237 .sector = ps->store->chunk_size * chunk, in chunk_io() 238 .count = ps->store->chunk_size, in chunk_io() 297 memset(ps->area, 0, ps->store->chunk_size << SECTOR_SHIFT); in zero_memory_area() 310 unsigned int chunk_size; in read_header() local 318 if (!ps->store->chunk_size) { in read_header() 319 ps->store->chunk_size = max(DM_CHUNK_SIZE_DEFAULT_SECTORS, in read_header() 322 ps->store->chunk_mask = ps->store->chunk_size - 1; in read_header() 323 ps->store->chunk_shift = __ffs(ps->store->chunk_size); in read_header() [all …]
|
D | dm-snap-transient.c | 48 if (size < (tc->next_free + store->chunk_size)) in transient_prepare_exception() 52 tc->next_free += store->chunk_size; in transient_prepare_exception() 100 DMEMIT(" N %llu", (unsigned long long)store->chunk_size); in transient_status()
|
/linux-6.12.1/drivers/md/dm-vdo/indexer/ |
D | io-factory.c | 220 size_t chunk_size; in uds_read_from_buffered_reader() local 227 chunk_size = min(length, bytes_remaining_in_read_buffer(reader)); in uds_read_from_buffered_reader() 228 memcpy(data, reader->end, chunk_size); in uds_read_from_buffered_reader() 229 length -= chunk_size; in uds_read_from_buffered_reader() 230 data += chunk_size; in uds_read_from_buffered_reader() 231 reader->end += chunk_size; in uds_read_from_buffered_reader() 245 size_t chunk_size; in uds_verify_buffered_data() local 256 chunk_size = min(length, bytes_remaining_in_read_buffer(reader)); in uds_verify_buffered_data() 257 if (memcmp(value, reader->end, chunk_size) != 0) { in uds_verify_buffered_data() 262 length -= chunk_size; in uds_verify_buffered_data() [all …]
|
/linux-6.12.1/net/xdp/ |
D | xdp_umem.c | 160 u32 chunk_size = mr->chunk_size, headroom = mr->headroom; in xdp_umem_reg() local 166 if (chunk_size < XDP_UMEM_MIN_CHUNK_SIZE || chunk_size > PAGE_SIZE) { in xdp_umem_reg() 179 if (!unaligned_chunks && !is_power_of_2(chunk_size)) in xdp_umem_reg() 198 chunks = div_u64_rem(size, chunk_size, &chunks_rem); in xdp_umem_reg() 205 if (headroom >= chunk_size - XDP_PACKET_HEADROOM) in xdp_umem_reg() 216 umem->chunk_size = chunk_size; in xdp_umem_reg()
|
/linux-6.12.1/tools/testing/selftests/net/ |
D | tcp_mmap.c | 91 static size_t chunk_size = 512*1024; variable 191 buffer = mmap_large_buffer(chunk_size, &buffer_sz); in child_thread() 197 raddr = mmap(NULL, chunk_size + map_align, PROT_READ, flags, fd, 0); in child_thread() 224 zc.length = min(chunk_size, FILE_SZ - total); in child_thread() 232 assert(zc.length <= chunk_size); in child_thread() 245 assert(zc.recv_skip_hint <= chunk_size); in child_thread() 261 while (sub < chunk_size) { in child_thread() 262 lu = read(fd, buffer + sub, min(chunk_size - sub, in child_thread() 319 munmap(raddr, chunk_size + map_align); in child_thread() 373 rcvlowat = chunk_size; in do_accept() [all …]
|
/linux-6.12.1/drivers/platform/x86/intel/ifs/ |
D | load.c | 122 int i, num_chunks, chunk_size; in copy_hashes_authenticate_chunks() local 135 chunk_size = hashes_status.chunk_size * 1024; in copy_hashes_authenticate_chunks() 149 linear_addr = base + i * chunk_size; in copy_hashes_authenticate_chunks() 186 int i, num_chunks, chunk_size; in copy_hashes_authenticate_chunks_gen2() local 202 chunk_size = hashes_status.chunk_size * SZ_1K; in copy_hashes_authenticate_chunks_gen2() 212 ifsd->chunk_size = chunk_size; in copy_hashes_authenticate_chunks_gen2() 215 chunk_size = ifsd->chunk_size; in copy_hashes_authenticate_chunks_gen2() 235 linear_addr = base + i * chunk_size; in copy_hashes_authenticate_chunks_gen2()
|
/linux-6.12.1/drivers/gpu/drm/ |
D | drm_buddy.c | 234 int drm_buddy_init(struct drm_buddy *mm, u64 size, u64 chunk_size) in drm_buddy_init() argument 239 if (size < chunk_size) in drm_buddy_init() 242 if (chunk_size < SZ_4K) in drm_buddy_init() 245 if (!is_power_of_2(chunk_size)) in drm_buddy_init() 248 size = round_down(size, chunk_size); in drm_buddy_init() 253 mm->chunk_size = chunk_size; in drm_buddy_init() 254 mm->max_order = ilog2(size) - ilog2(chunk_size); in drm_buddy_init() 287 order = ilog2(size) - ilog2(chunk_size); in drm_buddy_init() 288 root_size = chunk_size << order; in drm_buddy_init() 297 BUG_ON(drm_buddy_block_size(mm, root) < chunk_size); in drm_buddy_init() [all …]
|
/linux-6.12.1/drivers/gpu/drm/panthor/ |
D | panthor_heap.c | 60 u32 chunk_size; member 147 chunk->bo = panthor_kernel_bo_create(ptdev, vm, heap->chunk_size, in panthor_alloc_heap_chunk() 173 (heap->chunk_size >> 12); in panthor_alloc_heap_chunk() 269 u32 chunk_size, in panthor_heap_create() argument 287 if (!IS_ALIGNED(chunk_size, PAGE_SIZE) || in panthor_heap_create() 288 chunk_size < SZ_128K || chunk_size > SZ_8M) in panthor_heap_create() 307 heap->chunk_size = chunk_size; in panthor_heap_create() 477 (heap->chunk_size >> 12); in panthor_heap_grow()
|
/linux-6.12.1/drivers/gpu/drm/tests/ |
D | drm_buddy_test.c | 19 static inline u64 get_size(int order, u64 chunk_size) in get_size() argument 21 return (1 << order) * chunk_size; in get_size() 523 size = get_size(order, mm.chunk_size); in drm_test_buddy_alloc_pathological() 537 size = get_size(0, mm.chunk_size); in drm_test_buddy_alloc_pathological() 547 size = get_size(top, mm.chunk_size); in drm_test_buddy_alloc_pathological() 558 size = get_size(order, mm.chunk_size); in drm_test_buddy_alloc_pathological() 594 size = get_size(order, mm.chunk_size); in drm_test_buddy_alloc_pessimistic() 607 size = get_size(0, mm.chunk_size); in drm_test_buddy_alloc_pessimistic() 619 size = get_size(order, mm.chunk_size); in drm_test_buddy_alloc_pessimistic() 635 size = get_size(order, mm.chunk_size); in drm_test_buddy_alloc_pessimistic() [all …]
|
/linux-6.12.1/arch/x86/platform/olpc/ |
D | olpc_dt.c | 131 const size_t chunk_size = max(PAGE_SIZE, size); in prom_early_alloc() local 139 res = memblock_alloc(chunk_size, SMP_CACHE_BYTES); in prom_early_alloc() 142 chunk_size); in prom_early_alloc() 144 prom_early_allocated += chunk_size; in prom_early_alloc() 145 memset(res, 0, chunk_size); in prom_early_alloc() 146 free_mem = chunk_size; in prom_early_alloc()
|
/linux-6.12.1/drivers/net/ethernet/mellanox/mlx5/core/steering/ |
D | dr_icm_pool.c | 323 enum mlx5dr_icm_chunk_size chunk_size, in dr_icm_chunk_init() argument 330 chunk->size = chunk_size; in dr_icm_chunk_init() 387 enum mlx5dr_icm_chunk_size chunk_size, in dr_icm_handle_buddies_get_mem() argument 399 chunk_size, seg); in dr_icm_handle_buddies_get_mem() 407 chunk_size); in dr_icm_handle_buddies_get_mem() 417 chunk_size); in dr_icm_handle_buddies_get_mem() 436 enum mlx5dr_icm_chunk_size chunk_size) in mlx5dr_icm_alloc_chunk() argument 443 if (chunk_size > pool->max_log_chunk_sz) in mlx5dr_icm_alloc_chunk() 448 ret = dr_icm_handle_buddies_get_mem(pool, chunk_size, &buddy, &seg); in mlx5dr_icm_alloc_chunk() 456 dr_icm_chunk_init(chunk, pool, chunk_size, buddy, seg); in mlx5dr_icm_alloc_chunk() [all …]
|
D | dr_ptrn.c | 82 u32 chunk_size; in dr_ptrn_alloc_pattern() local 85 chunk_size = ilog2(roundup_pow_of_two(num_of_actions)); in dr_ptrn_alloc_pattern() 87 chunk_size = max_t(u32, chunk_size, DR_CHUNK_SIZE_8); in dr_ptrn_alloc_pattern() 89 chunk = mlx5dr_icm_alloc_chunk(mgr->ptrn_icm_pool, chunk_size); in dr_ptrn_alloc_pattern()
|
/linux-6.12.1/drivers/net/ethernet/mellanox/mlxsw/ |
D | i2c.c | 337 int off = mlxsw_i2c->cmd.mb_off_in, chunk_size, i, j; in mlxsw_i2c_write() local 351 chunk_size = (in_mbox_size > mlxsw_i2c->block_size) ? in mlxsw_i2c_write() 353 write_tran.len = MLXSW_I2C_ADDR_WIDTH + chunk_size; in mlxsw_i2c_write() 356 mlxsw_i2c->block_size * i, chunk_size); in mlxsw_i2c_write() 376 off += chunk_size; in mlxsw_i2c_write() 377 in_mbox_size -= chunk_size; in mlxsw_i2c_write() 416 int num, chunk_size, reg_size, i, j; in mlxsw_i2c_cmd() local 462 chunk_size = (reg_size > mlxsw_i2c->block_size) ? in mlxsw_i2c_cmd() 464 read_tran[1].len = chunk_size; in mlxsw_i2c_cmd() 486 off += chunk_size; in mlxsw_i2c_cmd() [all …]
|
/linux-6.12.1/drivers/rtc/ |
D | rtc-isl12026.c | 327 size_t chunk_size, num_written; in isl12026_nvm_write() local 347 chunk_size = round_down(offset, ISL12026_PAGESIZE) + in isl12026_nvm_write() 349 chunk_size = min(bytes, chunk_size); in isl12026_nvm_write() 354 memcpy(payload + 2, v + num_written, chunk_size); in isl12026_nvm_write() 357 msgs[0].len = chunk_size + 2; in isl12026_nvm_write() 367 bytes -= chunk_size; in isl12026_nvm_write() 368 offset += chunk_size; in isl12026_nvm_write() 369 num_written += chunk_size; in isl12026_nvm_write()
|
/linux-6.12.1/fs/nilfs2/ |
D | dir.c | 113 unsigned int chunk_size = nilfs_chunk_size(dir); in nilfs_check_folio() local 121 if (limit & (chunk_size - 1)) in nilfs_check_folio() 136 if (((offs + rec_len - 1) ^ offs) & ~(chunk_size-1)) in nilfs_check_folio() 428 unsigned int chunk_size = nilfs_chunk_size(dir); in nilfs_add_link() local 457 rec_len = chunk_size; in nilfs_add_link() 458 de->rec_len = nilfs_rec_len_to_disk(chunk_size); in nilfs_add_link() 563 unsigned int chunk_size = nilfs_chunk_size(inode); in nilfs_make_empty() local 571 err = nilfs_prepare_chunk(folio, 0, chunk_size); in nilfs_make_empty() 577 memset(kaddr, 0, chunk_size); in nilfs_make_empty() 587 de->rec_len = nilfs_rec_len_to_disk(chunk_size - NILFS_DIR_REC_LEN(1)); in nilfs_make_empty() [all …]
|
/linux-6.12.1/arch/x86/kernel/cpu/mtrr/ |
D | cleanup.c | 461 u64 chunk_size, u64 gran_size) in x86_setup_var_mtrrs() argument 470 var_state.chunk_sizek = chunk_size >> 10; in x86_setup_var_mtrrs() 577 mtrr_calc_range_state(u64 chunk_size, u64 gran_size, in mtrr_calc_range_state() argument 594 num_reg = x86_setup_var_mtrrs(range, nr_range, chunk_size, gran_size); in mtrr_calc_range_state() 602 result[i].chunk_sizek = chunk_size >> 10; in mtrr_calc_range_state() 674 u64 chunk_size, gran_size; in mtrr_cleanup() local 748 for (chunk_size = gran_size; chunk_size < (1ULL<<32); in mtrr_cleanup() 749 chunk_size <<= 1) { in mtrr_cleanup() 754 mtrr_calc_range_state(chunk_size, gran_size, in mtrr_cleanup() 774 chunk_size = result[i].chunk_sizek; in mtrr_cleanup() [all …]
|
/linux-6.12.1/fs/ext2/ |
D | dir.c | 103 unsigned chunk_size = ext2_chunk_size(dir); in ext2_check_folio() local 112 if (limit & (chunk_size - 1)) in ext2_check_folio() 127 if (unlikely(((offs + rec_len - 1) ^ offs) & ~(chunk_size-1))) in ext2_check_folio() 481 unsigned chunk_size = ext2_chunk_size(dir); in ext2_add_link() local 510 rec_len = chunk_size; in ext2_add_link() 511 de->rec_len = ext2_rec_len_to_disk(chunk_size); in ext2_add_link() 620 unsigned chunk_size = ext2_chunk_size(inode); in ext2_make_empty() local 628 err = ext2_prepare_chunk(folio, 0, chunk_size); in ext2_make_empty() 634 memset(kaddr, 0, chunk_size); in ext2_make_empty() 644 de->rec_len = ext2_rec_len_to_disk(chunk_size - EXT2_DIR_REC_LEN(1)); in ext2_make_empty() [all …]
|
/linux-6.12.1/sound/soc/sof/ |
D | ipc4.c | 423 size_t chunk_size; in sof_ipc4_set_get_data() local 459 chunk_size = payload_limit; in sof_ipc4_set_get_data() 461 chunk_size = remaining; in sof_ipc4_set_get_data() 473 tx.data_size = chunk_size; in sof_ipc4_set_get_data() 476 tx_size = chunk_size; in sof_ipc4_set_get_data() 481 rx.data_size = chunk_size; in sof_ipc4_set_get_data() 485 rx_size = chunk_size; in sof_ipc4_set_get_data() 509 if (rx_size < chunk_size) { in sof_ipc4_set_get_data() 510 chunk_size = rx_size; in sof_ipc4_set_get_data() 517 offset += chunk_size; in sof_ipc4_set_get_data() [all …]
|
/linux-6.12.1/drivers/rpmsg/ |
D | qcom_glink_native.c | 904 __le32 chunk_size; in qcom_glink_rx_data() member 907 unsigned int chunk_size; in qcom_glink_rx_data() local 920 chunk_size = le32_to_cpu(hdr.chunk_size); in qcom_glink_rx_data() 923 if (avail < sizeof(hdr) + chunk_size) { in qcom_glink_rx_data() 936 liid, chunk_size, left_size, in qcom_glink_rx_data() 952 intent->data = kmalloc(chunk_size + left_size, in qcom_glink_rx_data() 960 intent->size = chunk_size + left_size; in qcom_glink_rx_data() 981 if (intent->size - intent->offset < chunk_size) { in qcom_glink_rx_data() 989 sizeof(hdr), chunk_size); in qcom_glink_rx_data() 990 intent->offset += chunk_size; in qcom_glink_rx_data() [all …]
|
/linux-6.12.1/include/drm/ |
D | drm_buddy.h | 90 u64 chunk_size; member 142 return mm->chunk_size << drm_buddy_block_order(block); in drm_buddy_block_size() 145 int drm_buddy_init(struct drm_buddy *mm, u64 size, u64 chunk_size);
|
/linux-6.12.1/drivers/net/wireless/marvell/libertas/ |
D | if_sdio.c | 446 u32 chunk_size; in if_sdio_prog_helper() local 475 chunk_size = min_t(size_t, size, 60); in if_sdio_prog_helper() 477 *((__le32*)chunk_buffer) = cpu_to_le32(chunk_size); in if_sdio_prog_helper() 478 memcpy(chunk_buffer + 4, firmware, chunk_size); in if_sdio_prog_helper() 487 firmware += chunk_size; in if_sdio_prog_helper() 488 size -= chunk_size; in if_sdio_prog_helper() 542 u32 chunk_size; in if_sdio_prog_real() local 613 chunk_size = min_t(size_t, req_size, 512); in if_sdio_prog_real() 615 memcpy(chunk_buffer, firmware, chunk_size); in if_sdio_prog_real() 621 chunk_buffer, roundup(chunk_size, 32)); in if_sdio_prog_real() [all …]
|
/linux-6.12.1/drivers/gpu/drm/i915/ |
D | i915_ttm_buddy_manager.c | 75 GEM_BUG_ON(min_page_size < mm->chunk_size); in i915_ttm_buddy_man_alloc() 83 n_pages = size >> ilog2(mm->chunk_size); in i915_ttm_buddy_man_alloc() 286 u64 chunk_size) in i915_ttm_buddy_man_init() argument 296 err = drm_buddy_init(&bman->mm, size, chunk_size); in i915_ttm_buddy_man_init() 302 GEM_BUG_ON(default_page_size < chunk_size); in i915_ttm_buddy_man_init() 384 size, mm->chunk_size, in i915_ttm_buddy_man_reserve()
|