/linux-6.12.1/arch/arm/mm/ |
D | cache-l2x0.c | 1609 unsigned long new_start, new_end; in bcm_inv_range() local 1616 new_start = bcm_l2_phys_addr(start); in bcm_inv_range() 1621 l2c210_inv_range(new_start, new_end); in bcm_inv_range() 1628 l2c210_inv_range(new_start, in bcm_inv_range() 1636 unsigned long new_start, new_end; in bcm_clean_range() local 1643 new_start = bcm_l2_phys_addr(start); in bcm_clean_range() 1648 l2c210_clean_range(new_start, new_end); in bcm_clean_range() 1655 l2c210_clean_range(new_start, in bcm_clean_range() 1663 unsigned long new_start, new_end; in bcm_flush_range() local 1675 new_start = bcm_l2_phys_addr(start); in bcm_flush_range() [all …]
|
/linux-6.12.1/drivers/nvdimm/ |
D | badrange.c | 150 u64 new_start = clr_end + 1; in badrange_forget() local 151 u64 new_len = bre_end - new_start + 1; in badrange_forget() 154 alloc_and_append_badrange_entry(badrange, new_start, in badrange_forget()
|
/linux-6.12.1/drivers/gpu/drm/ |
D | drm_buddy.c | 878 u64 new_start; in drm_buddy_block_trim() local 903 new_start = block_start; in drm_buddy_block_trim() 905 new_start = *start; in drm_buddy_block_trim() 907 if (new_start < block_start) in drm_buddy_block_trim() 910 if (!IS_ALIGNED(new_start, mm->chunk_size)) in drm_buddy_block_trim() 913 if (range_overflows(new_start, new_size, block_end)) in drm_buddy_block_trim() 928 err = __alloc_range(mm, &dfs, new_start, new_size, blocks, NULL); in drm_buddy_block_trim()
|
/linux-6.12.1/drivers/media/pci/bt8xx/ |
D | bttv-vbi.c | 329 __s32 new_start; in bttv_g_fmt_vbi_cap() local 331 new_start = frt->fmt.vbi.start[i] + tvnorm->vbistart[i] in bttv_g_fmt_vbi_cap() 334 frt->fmt.vbi.start[i] = min(new_start, max_end - 1); in bttv_g_fmt_vbi_cap()
|
/linux-6.12.1/drivers/iommu/iommufd/ |
D | io_pagetable.c | 1181 unsigned long new_start = iova + 1; in iopt_area_split() local 1196 if (new_start & (alignment - 1) || in iopt_area_split() 1197 iopt_area_start_byte(area, new_start) & (alignment - 1)) in iopt_area_split() 1232 (new_start - 1) - start_iova + 1, in iopt_area_split() 1237 rc = iopt_insert_area(iopt, rhs, area->pages, new_start, in iopt_area_split() 1238 iopt_area_start_byte(area, new_start), in iopt_area_split() 1239 last_iova - new_start + 1, area->iommu_prot); in iopt_area_split()
|
/linux-6.12.1/drivers/gpu/drm/radeon/ |
D | radeon_ttm.c | 139 uint64_t old_start, new_start; in radeon_move_blit() local 147 new_start = (u64)new_mem->start << PAGE_SHIFT; in radeon_move_blit() 162 new_start += rdev->mc.vram_start; in radeon_move_blit() 165 new_start += rdev->mc.gtt_start; in radeon_move_blit() 179 fence = radeon_copy(rdev, old_start, new_start, num_pages, bo->base.resv); in radeon_move_blit()
|
/linux-6.12.1/mm/ |
D | mmap.c | 1008 unsigned long new_start; in acct_stack_growth() local 1023 new_start = (vma->vm_flags & VM_GROWSUP) ? vma->vm_start : in acct_stack_growth() 1025 if (is_hugepage_only_range(vma->vm_mm, new_start, size)) in acct_stack_growth() 2340 unsigned long new_start = old_start - shift; in relocate_vma_down() local 2342 VMA_ITERATOR(vmi, mm, new_start); in relocate_vma_down() 2343 VMG_STATE(vmg, mm, &vmi, new_start, old_end, 0, vma->vm_pgoff); in relocate_vma_down() 2347 BUG_ON(new_start > new_end); in relocate_vma_down() 2369 vma, new_start, length, false, true)) in relocate_vma_down() 2395 return vma_shrink(&vmi, vma, new_start, new_end, vma->vm_pgoff); in relocate_vma_down()
|
D | readahead.c | 732 loff_t new_start, size_t new_len) in readahead_expand() argument 741 new_index = new_start / PAGE_SIZE; in readahead_expand() 774 new_len += new_start - readahead_pos(ractl); in readahead_expand()
|
D | z3fold.c | 725 unsigned short new_start = TOTAL_CHUNKS - zhdr->last_chunks - in z3fold_compact_page() local 727 mchunk_memmove(zhdr, new_start); in z3fold_compact_page() 728 zhdr->start_middle = new_start; in z3fold_compact_page()
|
/linux-6.12.1/kernel/bpf/ |
D | log.c | 93 u64 new_end, new_start; in bpf_verifier_vlog() local 98 new_start = new_end - log->len_total; in bpf_verifier_vlog() 100 new_start = log->start_pos; in bpf_verifier_vlog() 102 log->start_pos = new_start; in bpf_verifier_vlog()
|
/linux-6.12.1/fs/bcachefs/ |
D | btree_update.c | 194 struct bpos new_start = bkey_start_pos(new.k); in bch2_trans_update_extent_overwrite() local 195 unsigned front_split = bkey_lt(bkey_start_pos(old.k), new_start); in bch2_trans_update_extent_overwrite() 216 bch2_cut_back(new_start, update); in bch2_trans_update_extent_overwrite() 232 bch2_cut_front(new_start, update); in bch2_trans_update_extent_overwrite()
|
/linux-6.12.1/drivers/md/dm-vdo/indexer/ |
D | delta-index.c | 158 u64 new_start; in rebalance_delta_zone() local 163 new_start = delta_zone->new_offsets[first]; in rebalance_delta_zone() 164 if (delta_list->start != new_start) { in rebalance_delta_zone() 169 delta_list->start = new_start; in rebalance_delta_zone() 184 new_start = delta_zone->new_offsets[middle]; in rebalance_delta_zone() 190 if (new_start > delta_list->start) { in rebalance_delta_zone()
|
/linux-6.12.1/fs/ext4/ |
D | mballoc.c | 4233 ext4_mb_pa_rb_next_iter(ext4_lblk_t new_start, ext4_lblk_t cur_start, struct rb_node *node) in ext4_mb_pa_rb_next_iter() argument 4235 if (new_start < cur_start) in ext4_mb_pa_rb_next_iter() 4286 ext4_lblk_t new_start, tmp_pa_start, right_pa_start = -1; in ext4_mb_pa_adjust_overlap() local 4289 new_start = *start; in ext4_mb_pa_adjust_overlap() 4397 if (left_pa_end > new_start) in ext4_mb_pa_adjust_overlap() 4398 new_start = left_pa_end; in ext4_mb_pa_adjust_overlap() 4408 ext4_mb_pa_assert_overlap(ac, new_start, new_end); in ext4_mb_pa_adjust_overlap() 4410 *start = new_start; in ext4_mb_pa_adjust_overlap() 5133 ext4_lblk_t iter_start, new_start; in ext4_mb_pa_rb_insert() local 5141 new_start = new_pa->pa_lstart; in ext4_mb_pa_rb_insert() [all …]
|
/linux-6.12.1/fs/orangefs/ |
D | inode.c | 246 loff_t new_start = readahead_pos(rac); in orangefs_readahead() local 259 readahead_expand(rac, new_start, new_len); in orangefs_readahead()
|
/linux-6.12.1/drivers/gpu/drm/amd/amdkfd/ |
D | kfd_svm.c | 954 uint64_t new_start, uint64_t new_n, uint64_t *new_vram_pages) in svm_range_split_array() argument 965 d = (new_start - old_start) * size; in svm_range_split_array() 970 d = (new_start == old_start) ? new_n * size : 0; in svm_range_split_array() 1158 svm_range_split_head(struct svm_range *prange, uint64_t new_start, in svm_range_split_head() argument 1162 int r = svm_range_split(prange, new_start, prange->last, &head); in svm_range_split_head() 1166 if (!IS_ALIGNED(new_start, 1UL << prange->granularity)) in svm_range_split_head()
|
/linux-6.12.1/drivers/gpu/drm/i915/gt/uc/ |
D | intel_guc_submission.c | 1182 __extend_last_switch(struct intel_guc *guc, u64 *prev_start, u32 new_start) in __extend_last_switch() argument 1187 if (new_start == lower_32_bits(*prev_start)) in __extend_last_switch() 1203 if (new_start < gt_stamp_last && in __extend_last_switch() 1204 (new_start - gt_stamp_last) <= POLL_TIME_CLKS) in __extend_last_switch() 1207 if (new_start > gt_stamp_last && in __extend_last_switch() 1208 (gt_stamp_last - new_start) <= POLL_TIME_CLKS && gt_stamp_hi) in __extend_last_switch() 1211 *prev_start = ((u64)gt_stamp_hi << 32) | new_start; in __extend_last_switch()
|
/linux-6.12.1/include/linux/ |
D | pagemap.h | 1373 loff_t new_start, size_t new_len);
|
/linux-6.12.1/drivers/net/ethernet/netronome/nfp/bpf/ |
D | jit.c | 4330 s16 new_start = range_start; in nfp_bpf_opt_pkt_cache() local 4336 new_start = off; in nfp_bpf_opt_pkt_cache() 4349 if (new_end - new_start <= 64) { in nfp_bpf_opt_pkt_cache() 4351 range_start = new_start; in nfp_bpf_opt_pkt_cache()
|
/linux-6.12.1/fs/ocfs2/ |
D | alloc.c | 5827 unsigned int new_start) in ocfs2_truncate_log_can_coalesce() argument 5840 return current_tail == new_start; in ocfs2_truncate_log_can_coalesce()
|