Lines Matching refs:cur_offset
2019 u64 cur_offset = start; in run_delalloc_nocow() local
2041 while (cur_offset <= end) { in run_delalloc_nocow()
2054 cur_offset, 0); in run_delalloc_nocow()
2108 if (found_key.offset > cur_offset) { in run_delalloc_nocow()
2133 if (extent_end <= cur_offset) { in run_delalloc_nocow()
2138 nocow_args.start = cur_offset; in run_delalloc_nocow()
2159 cow_start = cur_offset; in run_delalloc_nocow()
2160 cur_offset = extent_end; in run_delalloc_nocow()
2161 if (cur_offset > end) in run_delalloc_nocow()
2184 nocow_end = cur_offset + nocow_args.file_extent.num_bytes - 1; in run_delalloc_nocow()
2185 lock_extent(&inode->io_tree, cur_offset, nocow_end, &cached_state); in run_delalloc_nocow()
2191 em = btrfs_create_io_em(inode, cur_offset, in run_delalloc_nocow()
2195 unlock_extent(&inode->io_tree, cur_offset, in run_delalloc_nocow()
2204 ordered = btrfs_alloc_ordered_extent(inode, cur_offset, in run_delalloc_nocow()
2212 btrfs_drop_extent_map_range(inode, cur_offset, in run_delalloc_nocow()
2215 unlock_extent(&inode->io_tree, cur_offset, in run_delalloc_nocow()
2230 extent_clear_unlock_delalloc(inode, cur_offset, nocow_end, in run_delalloc_nocow()
2236 cur_offset = extent_end; in run_delalloc_nocow()
2248 if (cur_offset <= end && cow_start == (u64)-1) in run_delalloc_nocow()
2249 cow_start = cur_offset; in run_delalloc_nocow()
2252 cur_offset = end; in run_delalloc_nocow()
2269 cur_offset = cow_start; in run_delalloc_nocow()
2275 if (cur_offset < end) { in run_delalloc_nocow()
2278 lock_extent(&inode->io_tree, cur_offset, end, &cached); in run_delalloc_nocow()
2279 extent_clear_unlock_delalloc(inode, cur_offset, end, in run_delalloc_nocow()
2286 btrfs_qgroup_free_data(inode, NULL, cur_offset, end - cur_offset + 1, NULL); in run_delalloc_nocow()
4929 u64 cur_offset; in btrfs_cont_expand() local
4947 cur_offset = hole_start; in btrfs_cont_expand()
4949 em = btrfs_get_extent(inode, NULL, cur_offset, block_end - cur_offset); in btrfs_cont_expand()
4957 hole_size = last_byte - cur_offset; in btrfs_cont_expand()
4962 ret = maybe_insert_hole(inode, cur_offset, hole_size); in btrfs_cont_expand()
4967 cur_offset, hole_size); in btrfs_cont_expand()
4973 btrfs_drop_extent_map_range(inode, cur_offset, in btrfs_cont_expand()
4974 cur_offset + hole_size - 1, in btrfs_cont_expand()
4979 hole_em->start = cur_offset; in btrfs_cont_expand()
4991 cur_offset, hole_size); in btrfs_cont_expand()
4998 cur_offset = last_byte; in btrfs_cont_expand()
4999 if (cur_offset >= block_end) in btrfs_cont_expand()
8770 u64 cur_offset = start; in __btrfs_prealloc_file_range() local
8807 &ins, cur_offset); in __btrfs_prealloc_file_range()
8824 btrfs_drop_extent_map_range(BTRFS_I(inode), cur_offset, in __btrfs_prealloc_file_range()
8825 cur_offset + ins.offset - 1, false); in __btrfs_prealloc_file_range()
8830 em->start = cur_offset; in __btrfs_prealloc_file_range()
8843 cur_offset += ins.offset; in __btrfs_prealloc_file_range()
8851 (cur_offset > inode->i_size)) { in __btrfs_prealloc_file_range()
8852 if (cur_offset > actual_len) in __btrfs_prealloc_file_range()
8855 i_size = cur_offset; in __btrfs_prealloc_file_range()