Lines Matching refs:start

45 		pr_err("BTRFS: state leak: start %llu end %llu state %u in tree %d refs %d\n",
46 state->start, state->end, state->state,
55 #define btrfs_debug_check_extent_io_range(tree, start, end) \
56 __btrfs_debug_check_extent_io_range(__func__, (tree), (start), (end))
59 u64 start, u64 end)
72 caller, btrfs_ino(inode), isize, start, end);
214 changeset->bytes_changed += state->end - state->start + 1;
215 ret = ulist_add(&changeset->range_changed, state->start, state->end,
242 * entry->start <= offset && entry->end >= offset.
271 if (offset < entry->start)
319 if (offset < entry->start)
333 while (entry && offset < entry->start)
354 "extent io tree error on %s state start %llu end %llu",
355 opname, state->start, state->end);
363 if (prev && prev->end == state->start - 1 && prev->state == state->state) {
367 state->start = prev->start;
379 if (next && next->start == state->end + 1 && next->state == state->state) {
445 const u64 start = state->start - 1;
458 if (state->end < entry->start) {
459 if (try_merge && end == entry->start &&
465 entry->start = state->start;
472 if (try_merge && entry->end == start &&
515 * the tree has 'orig' at [orig->start, orig->end]. After calling, there
517 * prealloc: [orig->start, split - 1]
533 prealloc->start = orig->start;
536 orig->start = split;
546 if (prealloc->end < entry->start) {
621 * The range [start, end] is inclusive.
625 int __clear_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
640 btrfs_debug_check_extent_io_range(tree, start, end);
641 trace_btrfs_clear_extent_bit(tree, start, end - start + 1, bits);
674 cached->start <= start && cached->end > start) {
685 state = tree_search(tree, start);
689 if (state->start > end)
691 WARN_ON(state->end < start);
715 if (state->start < start) {
719 err = split_state(tree, state, prealloc, start);
737 if (state->start <= end && state->end > end) {
758 start = last_end + 1;
759 if (start <= end && state && !need_resched())
763 if (start > end)
781 * The range [start, end] is inclusive.
784 static void wait_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
789 btrfs_debug_check_extent_io_range(tree, start, end);
800 state->start <= start && start < state->end)
808 state = tree_search(tree, start);
812 if (state->start > end)
818 start = state->start;
828 start = state->end + 1;
830 if (start > end)
868 * Find the first state struct with 'bits' set after 'start', and return it.
870 * 'start'.
873 u64 start, u32 bits)
881 state = tree_search(tree, start);
883 if (state->end >= start && (state->state & bits))
898 bool find_first_extent_bit(struct extent_io_tree *tree, u64 start,
908 if (state->end == start - 1 && extent_state_in_tree(state)) {
930 state = find_first_extent_bit_state(tree, start, bits);
934 *start_ret = state->start;
947 * @start: offset to start the search from
959 int find_contiguous_extent_bit(struct extent_io_tree *tree, u64 start,
968 state = find_first_extent_bit_state(tree, start, bits);
970 *start_ret = state->start;
973 if (state->start > (*end_ret + 1))
985 * than 'max_bytes'. start and end are used to return the range,
989 bool btrfs_find_delalloc_range(struct extent_io_tree *tree, u64 *start,
994 u64 cur_start = *start;
1011 if (found && (state->start != cur_start ||
1021 *start = state->start;
1028 total_bytes += state->end - state->start + 1;
1045 * existing range is returned in failed_state in this case, and the start of the
1050 * [start, end] is inclusive This takes the tree lock.
1052 static int __set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
1069 btrfs_debug_check_extent_io_range(tree, start, end);
1070 trace_btrfs_set_extent_bit(tree, start, end - start + 1, bits);
1091 if (state->start <= start && state->end > start &&
1099 state = tree_search_for_insert(tree, start, &p, &parent);
1104 prealloc->start = start;
1112 last_start = state->start;
1121 if (state->start == start && state->end <= end) {
1123 *failed_start = state->start;
1134 start = last_end + 1;
1136 if (start < end && state && state->start == start &&
1157 if (state->start < start) {
1159 *failed_start = start;
1170 start = state->end + 1;
1178 ret = split_state(tree, state, prealloc, start);
1191 start = last_end + 1;
1193 if (start < end && state && state->start == start &&
1206 if (state->start > start) {
1223 prealloc->start = start;
1234 start = this_end + 1;
1243 if (state->start <= end && state->end > end) {
1245 *failed_start = start;
1266 if (start > end)
1282 int set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
1285 return __set_extent_bit(tree, start, end, bits, NULL, NULL,
1293 * @start: the start offset in bytes
1307 int convert_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
1320 btrfs_debug_check_extent_io_range(tree, start, end);
1321 trace_btrfs_convert_extent_bit(tree, start, end - start + 1, bits,
1341 if (state->start <= start && state->end > start &&
1350 state = tree_search_for_insert(tree, start, &p, &parent);
1357 prealloc->start = start;
1365 last_start = state->start;
1374 if (state->start == start && state->end <= end) {
1380 start = last_end + 1;
1381 if (start < end && state && state->start == start &&
1402 if (state->start < start) {
1408 ret = split_state(tree, state, prealloc, start);
1420 start = last_end + 1;
1421 if (start < end && state && state->start == start &&
1434 if (state->start > start) {
1453 prealloc->start = start;
1463 start = this_end + 1;
1472 if (state->start <= end && state->end > end) {
1491 if (start > end)
1507 * Find the first range that has @bits not set. This range could start before
1508 * @start.
1511 * @start: offset at/after which the found extent should start
1521 void find_first_clear_extent_bit(struct extent_io_tree *tree, u64 start,
1531 state = tree_search_prev_next(tree, start, &prev, &next);
1542 * We are past the last allocated chunk, set start at
1553 * At this point 'state' either contains 'start' or start is
1556 if (in_range(start, state->start, state->end - state->start + 1)) {
1561 * start
1563 start = state->end + 1;
1566 * 'start' falls within a range that doesn't
1567 * have the bits set, so take its start as the
1572 * start
1574 *start_ret = state->start;
1581 * start
1587 * start
1598 * Find the longest stretch from start until an entry which has the
1602 if (state->end >= start && !(state->state & bits)) {
1605 *end_ret = state->start - 1;
1619 * @start: The start offset of the range. This value is updated to the
1632 * called only once or if each call does not start where the
1637 * then @start is updated with the offset of the first byte with the bits set.
1640 u64 *start, u64 search_end, u64 max_bytes,
1646 u64 cur_start = *start;
1664 if (cached->start <= cur_start && cur_start <= cached->end) {
1666 } else if (cached->start > cur_start) {
1670 * The cached state starts after our search range's start. Check
1674 * no previous state record, we can start from our cached state.
1679 else if (prev->start <= cur_start && cur_start <= prev->end)
1692 if (state->start > search_end)
1694 if (contig && found && state->start > last + 1)
1698 max(cur_start, state->start);
1702 *start = max(cur_start, state->start);
1727 bool test_range_bit_exists(struct extent_io_tree *tree, u64 start, u64 end, u32 bit)
1735 state = tree_search(tree, start);
1736 while (state && start <= end) {
1737 if (state->start > end)
1745 /* If state->end is (u64)-1, start will overflow to 0 */
1746 start = state->end + 1;
1747 if (start > end || start == 0)
1756 * Check if the whole range [@start,@end) contains the single @bit set.
1758 bool test_range_bit(struct extent_io_tree *tree, u64 start, u64 end, u32 bit,
1767 if (cached && extent_state_in_tree(cached) && cached->start <= start &&
1768 cached->end > start)
1771 state = tree_search(tree, start);
1772 while (state && start <= end) {
1773 if (state->start > start) {
1778 if (state->start > end)
1793 start = state->end + 1;
1794 if (start > end || start == 0)
1807 int set_record_extent_bits(struct extent_io_tree *tree, u64 start, u64 end,
1818 return __set_extent_bit(tree, start, end, bits, NULL, NULL, NULL, changeset);
1821 int clear_record_extent_bits(struct extent_io_tree *tree, u64 start, u64 end,
1830 return __clear_extent_bit(tree, start, end, bits, NULL, changeset);
1833 int try_lock_extent(struct extent_io_tree *tree, u64 start, u64 end,
1839 err = __set_extent_bit(tree, start, end, EXTENT_LOCKED, &failed_start,
1842 if (failed_start > start)
1843 clear_extent_bit(tree, start, failed_start - 1,
1851 * Either insert or lock state struct between start and end use mask to tell
1854 int lock_extent(struct extent_io_tree *tree, u64 start, u64 end,
1861 err = __set_extent_bit(tree, start, end, EXTENT_LOCKED, &failed_start,
1864 if (failed_start != start)
1865 clear_extent_bit(tree, start, failed_start - 1,
1870 err = __set_extent_bit(tree, start, end, EXTENT_LOCKED,