Lines Matching full:start

46 		pr_err("BTRFS: state leak: start %llu end %llu state %u in tree %d refs %d\n",  in btrfs_extent_state_leak_debug_check()
47 state->start, state->end, state->state, in btrfs_extent_state_leak_debug_check()
56 #define btrfs_debug_check_extent_io_range(tree, start, end) \ argument
57 __btrfs_debug_check_extent_io_range(__func__, (tree), (start), (end))
60 u64 start, u64 end) in __btrfs_debug_check_extent_io_range() argument
73 caller, btrfs_ino(inode), isize, start, end); in __btrfs_debug_check_extent_io_range()
215 changeset->bytes_changed += state->end - state->start + 1; in add_extent_changeset()
216 ret = ulist_add(&changeset->range_changed, state->start, state->end, in add_extent_changeset()
243 * entry->start <= offset && entry->end >= offset.
272 if (offset < entry->start) in tree_search_for_insert()
320 if (offset < entry->start) in tree_search_prev_next()
334 while (entry && offset < entry->start) in tree_search_prev_next()
355 "extent io tree error on %s state start %llu end %llu", in extent_io_tree_panic()
356 opname, state->start, state->end); in extent_io_tree_panic()
364 if (prev && prev->end == state->start - 1 && prev->state == state->state) { in merge_prev_state()
368 state->start = prev->start; in merge_prev_state()
380 if (next && next->start == state->end + 1 && next->state == state->state) { in merge_next_state()
446 const u64 start = state->start - 1; in insert_state() local
459 if (state->end < entry->start) { in insert_state()
460 if (try_merge && end == entry->start && in insert_state()
466 entry->start = state->start; in insert_state()
473 if (try_merge && entry->end == start && in insert_state()
516 * the tree has 'orig' at [orig->start, orig->end]. After calling, there
518 * prealloc: [orig->start, split - 1]
534 prealloc->start = orig->start; in split_state()
537 orig->start = split; in split_state()
547 if (prealloc->end < entry->start) { in split_state()
619 * The range [start, end] is inclusive.
623 int __clear_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in __clear_extent_bit() argument
638 btrfs_debug_check_extent_io_range(tree, start, end); in __clear_extent_bit()
639 trace_btrfs_clear_extent_bit(tree, start, end - start + 1, bits); in __clear_extent_bit()
672 cached->start <= start && cached->end > start) { in __clear_extent_bit()
683 state = tree_search(tree, start); in __clear_extent_bit()
687 if (state->start > end) in __clear_extent_bit()
689 WARN_ON(state->end < start); in __clear_extent_bit()
713 if (state->start < start) { in __clear_extent_bit()
717 err = split_state(tree, state, prealloc, start); in __clear_extent_bit()
735 if (state->start <= end && state->end > end) { in __clear_extent_bit()
756 start = last_end + 1; in __clear_extent_bit()
757 if (start <= end && state && !need_resched()) in __clear_extent_bit()
761 if (start > end) in __clear_extent_bit()
779 * The range [start, end] is inclusive.
782 static void wait_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in wait_extent_bit() argument
787 btrfs_debug_check_extent_io_range(tree, start, end); in wait_extent_bit()
798 state->start <= start && start < state->end) in wait_extent_bit()
806 state = tree_search(tree, start); in wait_extent_bit()
810 if (state->start > end) in wait_extent_bit()
816 start = state->start; in wait_extent_bit()
826 start = state->end + 1; in wait_extent_bit()
828 if (start > end) in wait_extent_bit()
865 * Find the first state struct with 'bits' set after 'start', and return it.
867 * 'start'.
870 u64 start, u32 bits) in find_first_extent_bit_state() argument
878 state = tree_search(tree, start); in find_first_extent_bit_state()
880 if (state->end >= start && (state->state & bits)) in find_first_extent_bit_state()
895 bool find_first_extent_bit(struct extent_io_tree *tree, u64 start, in find_first_extent_bit() argument
905 if (state->end == start - 1 && extent_state_in_tree(state)) { in find_first_extent_bit()
927 state = find_first_extent_bit_state(tree, start, bits); in find_first_extent_bit()
931 *start_ret = state->start; in find_first_extent_bit()
944 * @start: offset to start the search from
956 int find_contiguous_extent_bit(struct extent_io_tree *tree, u64 start, in find_contiguous_extent_bit() argument
965 state = find_first_extent_bit_state(tree, start, bits); in find_contiguous_extent_bit()
967 *start_ret = state->start; in find_contiguous_extent_bit()
970 if (state->start > (*end_ret + 1)) in find_contiguous_extent_bit()
982 * than 'max_bytes'. start and end are used to return the range,
986 bool btrfs_find_delalloc_range(struct extent_io_tree *tree, u64 *start, in btrfs_find_delalloc_range() argument
991 u64 cur_start = *start; in btrfs_find_delalloc_range()
1008 if (found && (state->start != cur_start || in btrfs_find_delalloc_range()
1018 *start = state->start; in btrfs_find_delalloc_range()
1025 total_bytes += state->end - state->start + 1; in btrfs_find_delalloc_range()
1042 * existing range is returned in failed_state in this case, and the start of the
1047 * [start, end] is inclusive This takes the tree lock.
1049 static int __set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in __set_extent_bit() argument
1066 btrfs_debug_check_extent_io_range(tree, start, end); in __set_extent_bit()
1067 trace_btrfs_set_extent_bit(tree, start, end - start + 1, bits); in __set_extent_bit()
1091 if (state->start <= start && state->end > start && in __set_extent_bit()
1099 state = tree_search_for_insert(tree, start, &p, &parent); in __set_extent_bit()
1104 prealloc->start = start; in __set_extent_bit()
1112 last_start = state->start; in __set_extent_bit()
1121 if (state->start == start && state->end <= end) { in __set_extent_bit()
1123 *failed_start = state->start; in __set_extent_bit()
1134 start = last_end + 1; in __set_extent_bit()
1136 if (start < end && state && state->start == start && in __set_extent_bit()
1157 if (state->start < start) { in __set_extent_bit()
1159 *failed_start = start; in __set_extent_bit()
1170 start = state->end + 1; in __set_extent_bit()
1178 ret = split_state(tree, state, prealloc, start); in __set_extent_bit()
1191 start = last_end + 1; in __set_extent_bit()
1193 if (start < end && state && state->start == start && in __set_extent_bit()
1206 if (state->start > start) { in __set_extent_bit()
1223 prealloc->start = start; in __set_extent_bit()
1234 start = this_end + 1; in __set_extent_bit()
1243 if (state->start <= end && state->end > end) { in __set_extent_bit()
1245 *failed_start = start; in __set_extent_bit()
1266 if (start > end) in __set_extent_bit()
1282 int set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in set_extent_bit() argument
1285 return __set_extent_bit(tree, start, end, bits, NULL, NULL, in set_extent_bit()
1293 * @start: the start offset in bytes
1307 int convert_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in convert_extent_bit() argument
1320 btrfs_debug_check_extent_io_range(tree, start, end); in convert_extent_bit()
1321 trace_btrfs_convert_extent_bit(tree, start, end - start + 1, bits, in convert_extent_bit()
1341 if (state->start <= start && state->end > start && in convert_extent_bit()
1350 state = tree_search_for_insert(tree, start, &p, &parent); in convert_extent_bit()
1357 prealloc->start = start; in convert_extent_bit()
1365 last_start = state->start; in convert_extent_bit()
1374 if (state->start == start && state->end <= end) { in convert_extent_bit()
1380 start = last_end + 1; in convert_extent_bit()
1381 if (start < end && state && state->start == start && in convert_extent_bit()
1402 if (state->start < start) { in convert_extent_bit()
1408 ret = split_state(tree, state, prealloc, start); in convert_extent_bit()
1420 start = last_end + 1; in convert_extent_bit()
1421 if (start < end && state && state->start == start && in convert_extent_bit()
1434 if (state->start > start) { in convert_extent_bit()
1453 prealloc->start = start; in convert_extent_bit()
1463 start = this_end + 1; in convert_extent_bit()
1472 if (state->start <= end && state->end > end) { in convert_extent_bit()
1491 if (start > end) in convert_extent_bit()
1507 * Find the first range that has @bits not set. This range could start before
1508 * @start.
1511 * @start: offset at/after which the found extent should start
1521 void find_first_clear_extent_bit(struct extent_io_tree *tree, u64 start, in find_first_clear_extent_bit() argument
1531 state = tree_search_prev_next(tree, start, &prev, &next); in find_first_clear_extent_bit()
1542 * We are past the last allocated chunk, set start at in find_first_clear_extent_bit()
1553 * At this point 'state' either contains 'start' or start is in find_first_clear_extent_bit()
1556 if (in_range(start, state->start, state->end - state->start + 1)) { in find_first_clear_extent_bit()
1561 * start in find_first_clear_extent_bit()
1563 start = state->end + 1; in find_first_clear_extent_bit()
1566 * 'start' falls within a range that doesn't in find_first_clear_extent_bit()
1567 * have the bits set, so take its start as the in find_first_clear_extent_bit()
1572 * start in find_first_clear_extent_bit()
1574 *start_ret = state->start; in find_first_clear_extent_bit()
1581 * start in find_first_clear_extent_bit()
1587 * start in find_first_clear_extent_bit()
1598 * Find the longest stretch from start until an entry which has the in find_first_clear_extent_bit()
1602 if (state->end >= start && !(state->state & bits)) { in find_first_clear_extent_bit()
1605 *end_ret = state->start - 1; in find_first_clear_extent_bit()
1619 * @start: The start offset of the range. This value is updated to the
1632 * called only once or if each call does not start where the
1637 * then @start is updated with the offset of the first byte with the bits set.
1640 u64 *start, u64 search_end, u64 max_bytes, in count_range_bits() argument
1646 u64 cur_start = *start; in count_range_bits()
1664 if (cached->start <= cur_start && cur_start <= cached->end) { in count_range_bits()
1666 } else if (cached->start > cur_start) { in count_range_bits()
1670 * The cached state starts after our search range's start. Check in count_range_bits()
1674 * no previous state record, we can start from our cached state. in count_range_bits()
1679 else if (prev->start <= cur_start && cur_start <= prev->end) in count_range_bits()
1692 if (state->start > search_end) in count_range_bits()
1694 if (contig && found && state->start > last + 1) in count_range_bits()
1698 max(cur_start, state->start); in count_range_bits()
1702 *start = max(cur_start, state->start); in count_range_bits()
1727 bool test_range_bit_exists(struct extent_io_tree *tree, u64 start, u64 end, u32 bit) in test_range_bit_exists() argument
1735 state = tree_search(tree, start); in test_range_bit_exists()
1736 while (state && start <= end) { in test_range_bit_exists()
1737 if (state->start > end) in test_range_bit_exists()
1745 /* If state->end is (u64)-1, start will overflow to 0 */ in test_range_bit_exists()
1746 start = state->end + 1; in test_range_bit_exists()
1747 if (start > end || start == 0) in test_range_bit_exists()
1756 * Check if the whole range [@start,@end) contains the single @bit set.
1758 bool test_range_bit(struct extent_io_tree *tree, u64 start, u64 end, u32 bit, in test_range_bit() argument
1767 if (cached && extent_state_in_tree(cached) && cached->start <= start && in test_range_bit()
1768 cached->end > start) in test_range_bit()
1771 state = tree_search(tree, start); in test_range_bit()
1772 while (state && start <= end) { in test_range_bit()
1773 if (state->start > start) { in test_range_bit()
1778 if (state->start > end) in test_range_bit()
1793 start = state->end + 1; in test_range_bit()
1794 if (start > end || start == 0) in test_range_bit()
1807 int set_record_extent_bits(struct extent_io_tree *tree, u64 start, u64 end, in set_record_extent_bits() argument
1817 return __set_extent_bit(tree, start, end, bits, NULL, NULL, NULL, changeset); in set_record_extent_bits()
1820 int clear_record_extent_bits(struct extent_io_tree *tree, u64 start, u64 end, in clear_record_extent_bits() argument
1829 return __clear_extent_bit(tree, start, end, bits, NULL, changeset); in clear_record_extent_bits()
1832 bool __try_lock_extent(struct extent_io_tree *tree, u64 start, u64 end, u32 bits, in __try_lock_extent() argument
1838 err = __set_extent_bit(tree, start, end, bits, &failed_start, in __try_lock_extent()
1841 if (failed_start > start) in __try_lock_extent()
1842 clear_extent_bit(tree, start, failed_start - 1, bits, cached); in __try_lock_extent()
1849 * Either insert or lock state struct between start and end use mask to tell
1852 int __lock_extent(struct extent_io_tree *tree, u64 start, u64 end, u32 bits, in __lock_extent() argument
1859 err = __set_extent_bit(tree, start, end, bits, &failed_start, in __lock_extent()
1862 if (failed_start != start) in __lock_extent()
1863 clear_extent_bit(tree, start, failed_start - 1, in __lock_extent()
1867 err = __set_extent_bit(tree, start, end, bits, in __lock_extent()