Lines Matching full:bytes

41 	u64 bytes;  member
51 u64 *bytes, bool for_alloc);
56 u64 bytes, bool update_stats);
579 static int io_ctl_add_entry(struct btrfs_io_ctl *io_ctl, u64 offset, u64 bytes, in io_ctl_add_entry() argument
589 put_unaligned_le64(bytes, &entry->bytes); in io_ctl_add_entry()
663 entry->bytes = get_unaligned_le64(&e->bytes); in io_ctl_read_entry()
726 * bytes we can have, or whatever is less than that. in recalculate_thresholds()
827 if (!e->bytes) { in __load_free_space_cache()
911 const u64 bytes = info->bytes; in copy_free_space_cache() local
916 ret = btrfs_add_free_space(block_group, offset, bytes); in copy_free_space_cache()
920 u64 bytes = ctl->unit; in copy_free_space_cache() local
922 ret = search_bitmap(ctl, info, &offset, &bytes, false); in copy_free_space_cache()
924 bitmap_clear_bits(ctl, info, offset, bytes, true); in copy_free_space_cache()
927 bytes); in copy_free_space_cache()
1104 ret = io_ctl_add_entry(io_ctl, e->offset, e->bytes, in write_cache_extent_entries()
1134 trim_entry->bytes, NULL); in write_cache_extent_entries()
1567 static inline unsigned long bytes_to_bits(u64 bytes, u32 unit) in bytes_to_bits() argument
1569 return (unsigned long)(div_u64(bytes, unit)); in bytes_to_bits()
1657 * we've found already if it's larger, or we want to use ->bytes.
1659 * This matters because find_free_space() will skip entries who's ->bytes is
1660 * less than the required bytes. So if we didn't search down this bitmap, we
1663 * ->max_extent_size set to 4K and ->bytes set to 1M. A second entry hasn't set
1664 * ->max_extent_size yet, has ->bytes set to 8K and it's contiguous. We will
1680 return entry->bytes; in get_max_extent_size()
1700 * want a section that has at least bytes size and comes at or after the given
1757 prev->offset + prev->bytes > offset) in tree_search_offset()
1789 prev->offset + prev->bytes > offset) in tree_search_offset()
1794 } else if (entry->offset + entry->bytes > offset) in tree_search_offset()
1810 if (entry->offset + entry->bytes > offset) in tree_search_offset()
1829 ctl->discardable_bytes[BTRFS_STAT_CURR] -= info->bytes; in unlink_free_space()
1833 ctl->free_space -= info->bytes; in unlink_free_space()
1843 ASSERT(info->bytes || info->bitmap); in link_free_space()
1852 ctl->discardable_bytes[BTRFS_STAT_CURR] += info->bytes; in link_free_space()
1855 ctl->free_space += info->bytes; in link_free_space()
1867 * want to re-link it into our ctl bytes index. in relink_bitmap_entry()
1880 u64 offset, u64 bytes, bool update_stat) in bitmap_clear_bits() argument
1886 count = bytes_to_bits(bytes, ctl->unit); in bitmap_clear_bits()
1892 info->bytes -= bytes; in bitmap_clear_bits()
1907 ctl->discardable_bytes[BTRFS_STAT_CURR] -= bytes; in bitmap_clear_bits()
1911 ctl->free_space -= bytes; in bitmap_clear_bits()
1916 u64 bytes) in btrfs_bitmap_set_bits() argument
1922 count = bytes_to_bits(bytes, ctl->unit); in btrfs_bitmap_set_bits()
1929 * We set some bytes, we have no idea what the max extent size is in btrfs_bitmap_set_bits()
1933 info->bytes += bytes; in btrfs_bitmap_set_bits()
1934 ctl->free_space += bytes; in btrfs_bitmap_set_bits()
1947 ctl->discardable_bytes[BTRFS_STAT_CURR] += bytes; in btrfs_bitmap_set_bits()
1952 * If we can not find suitable extent, we will use bytes to record
1957 u64 *bytes, bool for_alloc) in search_bitmap() argument
1971 bitmap_info->max_extent_size < *bytes) { in search_bitmap()
1972 *bytes = bitmap_info->max_extent_size; in search_bitmap()
1978 bits = bytes_to_bits(*bytes, ctl->unit); in search_bitmap()
1999 *bytes = (u64)(found_bits) * ctl->unit; in search_bitmap()
2003 *bytes = (u64)(max_bits) * ctl->unit; in search_bitmap()
2004 bitmap_info->max_extent_size = *bytes; in search_bitmap()
2009 /* Cache the size of the max extent in bytes */
2011 find_free_space(struct btrfs_free_space_ctl *ctl, u64 *offset, u64 *bytes, in find_free_space() argument
2042 * If we are using the bytes index then all subsequent entries in find_free_space()
2043 * in this tree are going to be < bytes, so simply set the max in find_free_space()
2049 if (entry->bytes < *bytes) { in find_free_space()
2060 if (*bytes >= align) { in find_free_space()
2071 * We don't break here if we're using the bytes index because we in find_free_space()
2077 if (entry->bytes < *bytes + align_off) { in find_free_space()
2085 u64 size = *bytes; in find_free_space()
2090 *bytes = size; in find_free_space()
2110 *bytes = entry->bytes - align_off; in find_free_space()
2121 info->bytes = 0; in add_new_bitmap()
2138 if (bitmap_info->bytes && !btrfs_free_space_trimmed(bitmap_info)) { in free_bitmap()
2141 ctl->discardable_bytes[BTRFS_STAT_CURR] -= bitmap_info->bytes; in free_bitmap()
2153 u64 *offset, u64 *bytes) in remove_from_bitmap() argument
2177 search_bytes = min(search_bytes, *bytes); in remove_from_bitmap()
2184 *bytes -= search_bytes; in remove_from_bitmap()
2186 if (*bytes) { in remove_from_bitmap()
2188 if (!bitmap_info->bytes) in remove_from_bitmap()
2192 * no entry after this bitmap, but we still have bytes to in remove_from_bitmap()
2222 } else if (!bitmap_info->bytes) in remove_from_bitmap()
2230 u64 bytes, enum btrfs_trim_state trim_state) in add_bytes_to_bitmap() argument
2243 ctl->discardable_bytes[BTRFS_STAT_CURR] += info->bytes; in add_bytes_to_bitmap()
2250 bytes_to_set = min(end - offset, bytes); in add_bytes_to_bitmap()
2271 if (!forced && info->bytes >= FORCE_EXTENT_THRESHOLD) in use_bitmap()
2286 if (info->bytes <= fs_info->sectorsize * 8) { in use_bitmap()
2318 u64 bytes, offset, bytes_added; in insert_into_bitmap() local
2322 bytes = info->bytes; in insert_into_bitmap()
2360 bytes, trim_state); in insert_into_bitmap()
2361 bytes -= bytes_added; in insert_into_bitmap()
2365 if (!bytes) { in insert_into_bitmap()
2379 bytes_added = add_bytes_to_bitmap(ctl, bitmap_info, offset, bytes, in insert_into_bitmap()
2381 bytes -= bytes_added; in insert_into_bitmap()
2385 if (!bytes) { in insert_into_bitmap()
2457 u64 bytes = info->bytes; in try_merge_free_space() local
2466 right_info = tree_search_offset(ctl, offset + bytes, 0, 0); in try_merge_free_space()
2479 info->bytes += right_info->bytes; in try_merge_free_space()
2486 left_info->offset + left_info->bytes == offset && in try_merge_free_space()
2490 info->bytes += left_info->bytes; in try_merge_free_space()
2505 const u64 end = info->offset + info->bytes; in steal_from_bitmap_to_end()
2507 u64 bytes; in steal_from_bitmap_to_end() local
2517 bytes = (j - i) * ctl->unit; in steal_from_bitmap_to_end()
2518 info->bytes += bytes; in steal_from_bitmap_to_end()
2524 bitmap_clear_bits(ctl, bitmap, end, bytes, update_stat); in steal_from_bitmap_to_end()
2526 if (!bitmap->bytes) in steal_from_bitmap_to_end()
2541 u64 bytes; in steal_from_bitmap_to_front() local
2567 bytes = (i + 1) * ctl->unit; in steal_from_bitmap_to_front()
2569 bytes = (i - prev_j) * ctl->unit; in steal_from_bitmap_to_front()
2571 info->offset -= bytes; in steal_from_bitmap_to_front()
2572 info->bytes += bytes; in steal_from_bitmap_to_front()
2578 bitmap_clear_bits(ctl, bitmap, info->offset, bytes, update_stat); in steal_from_bitmap_to_front()
2580 if (!bitmap->bytes) in steal_from_bitmap_to_front()
2623 u64 offset, u64 bytes, in __btrfs_add_free_space() argument
2630 u64 filter_bytes = bytes; in __btrfs_add_free_space()
2639 info->bytes = bytes; in __btrfs_add_free_space()
2670 filter_bytes = max(filter_bytes, info->bytes); in __btrfs_add_free_space()
2800 u64 offset, u64 bytes) in btrfs_remove_free_space() argument
2820 offset + bytes) { in btrfs_remove_free_space()
2822 offset + bytes - block_group->start; in btrfs_remove_free_space()
2831 if (!bytes) in btrfs_remove_free_space()
2857 u64 to_free = min(bytes, info->bytes); in btrfs_remove_free_space()
2859 info->bytes -= to_free; in btrfs_remove_free_space()
2861 if (info->bytes) { in btrfs_remove_free_space()
2869 bytes -= to_free; in btrfs_remove_free_space()
2872 u64 old_end = info->bytes + info->offset; in btrfs_remove_free_space()
2874 info->bytes = offset - info->offset; in btrfs_remove_free_space()
2880 /* Not enough bytes in this entry to satisfy us */ in btrfs_remove_free_space()
2881 if (old_end < offset + bytes) { in btrfs_remove_free_space()
2882 bytes -= old_end - offset; in btrfs_remove_free_space()
2885 } else if (old_end == offset + bytes) { in btrfs_remove_free_space()
2892 offset + bytes, in btrfs_remove_free_space()
2893 old_end - (offset + bytes), in btrfs_remove_free_space()
2900 ret = remove_from_bitmap(ctl, info, &offset, &bytes); in btrfs_remove_free_space()
2913 u64 bytes) in btrfs_dump_free_space() argument
2936 if (info->bytes >= bytes && !block_group->ro) in btrfs_dump_free_space()
2938 btrfs_crit(fs_info, "entry offset %llu, bytes %llu, bitmap %s", in btrfs_dump_free_space()
2939 info->offset, info->bytes, in btrfs_dump_free_space()
2946 "%d free space entries at or bigger than %llu bytes", in btrfs_dump_free_space()
2947 count, bytes); in btrfs_dump_free_space()
3011 entry->bytes; in __btrfs_return_cluster_to_free_space()
3021 entry->bytes; in __btrfs_return_cluster_to_free_space()
3085 u64 offset, u64 bytes, u64 empty_size, in btrfs_find_space_for_alloc() argument
3092 u64 bytes_search = bytes + empty_size; in btrfs_find_space_for_alloc()
3110 bitmap_clear_bits(ctl, entry, offset, bytes, true); in btrfs_find_space_for_alloc()
3113 atomic64_add(bytes, &discard_ctl->discard_bytes_saved); in btrfs_find_space_for_alloc()
3115 if (!entry->bytes) in btrfs_find_space_for_alloc()
3124 atomic64_add(bytes, &discard_ctl->discard_bytes_saved); in btrfs_find_space_for_alloc()
3126 entry->offset = offset + bytes; in btrfs_find_space_for_alloc()
3127 WARN_ON(entry->bytes < bytes + align_gap_len); in btrfs_find_space_for_alloc()
3129 entry->bytes -= bytes + align_gap_len; in btrfs_find_space_for_alloc()
3130 if (!entry->bytes) in btrfs_find_space_for_alloc()
3191 u64 bytes, u64 min_start, in btrfs_alloc_from_bitmap() argument
3197 u64 search_bytes = bytes; in btrfs_alloc_from_bitmap()
3201 search_bytes = bytes; in btrfs_alloc_from_bitmap()
3211 bitmap_clear_bits(ctl, entry, ret, bytes, false); in btrfs_alloc_from_bitmap()
3217 * given a cluster, try to allocate 'bytes' from it, returns 0
3222 struct btrfs_free_cluster *cluster, u64 bytes, in btrfs_alloc_from_cluster() argument
3235 if (bytes > cluster->max_size) in btrfs_alloc_from_cluster()
3247 if (entry->bytes < bytes) in btrfs_alloc_from_cluster()
3251 if (entry->bytes < bytes || in btrfs_alloc_from_cluster()
3263 cluster, entry, bytes, in btrfs_alloc_from_cluster()
3274 cluster->window_start += bytes; in btrfs_alloc_from_cluster()
3278 entry->offset += bytes; in btrfs_alloc_from_cluster()
3279 entry->bytes -= bytes; in btrfs_alloc_from_cluster()
3293 atomic64_add(bytes, &discard_ctl->discard_bytes_saved); in btrfs_alloc_from_cluster()
3295 ctl->free_space -= bytes; in btrfs_alloc_from_cluster()
3297 ctl->discardable_bytes[BTRFS_STAT_CURR] -= bytes; in btrfs_alloc_from_cluster()
3300 if (entry->bytes == 0) { in btrfs_alloc_from_cluster()
3323 u64 offset, u64 bytes, in btrfs_bitmap_cluster() argument
3341 want_bits = bytes_to_bits(bytes, ctl->unit); in btrfs_bitmap_cluster()
3410 * Try to find a cluster with at least bytes total bytes, at least one
3416 struct list_head *bitmaps, u64 offset, u64 bytes, in setup_cluster_no_bitmap() argument
3438 while (entry->bitmap || entry->bytes < min_bytes) { in setup_cluster_no_bitmap()
3447 window_free = entry->bytes; in setup_cluster_no_bitmap()
3448 max_extent = entry->bytes; in setup_cluster_no_bitmap()
3462 if (entry->bytes < min_bytes) in setup_cluster_no_bitmap()
3466 window_free += entry->bytes; in setup_cluster_no_bitmap()
3467 if (entry->bytes > max_extent) in setup_cluster_no_bitmap()
3468 max_extent = entry->bytes; in setup_cluster_no_bitmap()
3471 if (window_free < bytes || max_extent < cont1_bytes) in setup_cluster_no_bitmap()
3487 if (entry->bitmap || entry->bytes < min_bytes) in setup_cluster_no_bitmap()
3493 total_size += entry->bytes; in setup_cluster_no_bitmap()
3509 struct list_head *bitmaps, u64 offset, u64 bytes, in setup_cluster_bitmap() argument
3534 if (entry->bytes < bytes) in setup_cluster_bitmap()
3537 bytes, cont1_bytes, min_bytes); in setup_cluster_bitmap()
3551 * is to find at least bytes+empty_size.
3559 u64 offset, u64 bytes, u64 empty_size) in btrfs_find_space_cluster() argument
3576 cont1_bytes = bytes + empty_size; in btrfs_find_space_cluster()
3579 cont1_bytes = bytes; in btrfs_find_space_cluster()
3582 cont1_bytes = max(bytes, (bytes + empty_size) >> 2); in btrfs_find_space_cluster()
3592 if (ctl->free_space < bytes) { in btrfs_find_space_cluster()
3605 trace_btrfs_find_cluster(block_group, offset, bytes, empty_size, in btrfs_find_space_cluster()
3609 bytes + empty_size, in btrfs_find_space_cluster()
3613 offset, bytes + empty_size, in btrfs_find_space_cluster()
3650 u64 *total_trimmed, u64 start, u64 bytes, in do_trimming() argument
3660 const u64 end = start + bytes; in do_trimming()
3675 ret = btrfs_discard_extent(fs_info, start, bytes, &trimmed); in do_trimming()
3689 __btrfs_add_free_space(block_group, start, bytes, trim_state); in do_trimming()
3723 u64 bytes; in trim_no_bitmap() local
3753 extent_bytes = entry->bytes; in trim_no_bitmap()
3757 bytes = entry->bytes; in trim_no_bitmap()
3758 if (bytes < minlen) { in trim_no_bitmap()
3765 * Let bytes = BTRFS_MAX_DISCARD_SIZE + X. in trim_no_bitmap()
3770 bytes >= (max_discard_size + in trim_no_bitmap()
3772 bytes = max_discard_size; in trim_no_bitmap()
3775 entry->bytes -= max_discard_size; in trim_no_bitmap()
3782 bytes = min(extent_start + extent_bytes, end) - start; in trim_no_bitmap()
3783 if (bytes < minlen) { in trim_no_bitmap()
3795 trim_entry.bytes = extent_bytes; in trim_no_bitmap()
3799 ret = do_trimming(block_group, total_trimmed, start, bytes, in trim_no_bitmap()
3803 block_group->discard_cursor = start + bytes; in trim_no_bitmap()
3807 start += bytes; in trim_no_bitmap()
3854 ctl->discardable_bytes[BTRFS_STAT_CURR] += entry->bytes; in reset_trimming_bitmap()
3869 ctl->discardable_bytes[BTRFS_STAT_CURR] -= entry->bytes; in end_trimming_bitmap()
3886 u64 bytes; in trim_bitmaps() local
3931 bytes = minlen; in trim_bitmaps()
3932 ret2 = search_bitmap(ctl, entry, &start, &bytes, false); in trim_bitmaps()
3958 bytes = min(bytes, end - start); in trim_bitmaps()
3959 if (bytes < minlen || (async && maxlen && bytes > maxlen)) { in trim_bitmaps()
3966 * Let bytes = BTRFS_MAX_DISCARD_SIZE + X. in trim_bitmaps()
3973 bytes > (max_discard_size + minlen)) in trim_bitmaps()
3974 bytes = max_discard_size; in trim_bitmaps()
3976 bitmap_clear_bits(ctl, entry, start, bytes, true); in trim_bitmaps()
3977 if (entry->bytes == 0) in trim_bitmaps()
3982 trim_entry.bytes = bytes; in trim_bitmaps()
3986 ret = do_trimming(block_group, total_trimmed, start, bytes, in trim_bitmaps()
3987 start, bytes, 0, &trim_entry); in trim_bitmaps()
3999 start += bytes; in trim_bitmaps()
4190 u64 offset, u64 bytes, bool bitmap) in test_add_free_space_entry() argument
4209 info->bytes = bytes; in test_add_free_space_entry()
4237 bytes_added = add_bytes_to_bitmap(ctl, bitmap_info, offset, bytes, in test_add_free_space_entry()
4240 bytes -= bytes_added; in test_add_free_space_entry()
4244 if (bytes) in test_add_free_space_entry()
4260 u64 offset, u64 bytes) in test_check_exists() argument
4289 offset + bytes > bit_off) { in test_check_exists()
4299 if (tmp->offset + tmp->bytes < offset) in test_check_exists()
4301 if (offset + bytes < tmp->offset) { in test_check_exists()
4313 if (offset + bytes < tmp->offset) in test_check_exists()
4315 if (tmp->offset + tmp->bytes < offset) { in test_check_exists()
4332 if (offset > info->offset && offset < info->offset + info->bytes) in test_check_exists()