Lines Matching refs:start

62 	state->cache_node.start = state->start;
63 state->cache_node.size = state->end + 1 - state->start;
85 if (other->end == state->start - 1 &&
87 state->start = other->start;
97 if (other->start == state->end + 1 &&
99 other->start = state->start;
113 struct extent_state *state, u64 start, u64 end,
118 BUG_ON(end < start);
120 state->start = start;
138 prealloc->start = orig->start;
142 orig->start = split;
170 * @start: offset of the bitmap item in the extent buffer
174 void extent_buffer_bitmap_set(struct extent_buffer *eb, unsigned long start,
177 u8 *p = (u8 *)eb->data + start + BIT_BYTE(pos);
198 * @start: offset of the bitmap item in the extent buffer
202 void extent_buffer_bitmap_clear(struct extent_buffer *eb, unsigned long start,
205 u8 *p = (u8 *)eb->data + start + BIT_BYTE(pos);
226 int clear_extent_bits(struct extent_io_tree *tree, u64 start, u64 end, int bits)
246 node = search_cache_extent(&tree->state, start);
250 if (state->start > end)
269 if (state->start < start) {
270 err = split_state(tree, state, prealloc, start);
279 start = last_end + 1;
281 start = state->start;
291 if (state->start <= end && state->end > end) {
300 start = state->end + 1;
304 start = last_end + 1;
312 if (start > end)
320 int set_extent_bits(struct extent_io_tree *tree, u64 start, u64 end, int bits)
339 node = search_cache_extent(&tree->state, start);
341 err = insert_state(tree, prealloc, start, end, bits);
348 last_start = state->start;
357 if (state->start == start && state->end <= end) {
362 start = last_end + 1;
381 if (state->start < start) {
382 err = split_state(tree, state, prealloc, start);
389 start = state->end + 1;
393 start = last_end + 1;
395 start = state->start;
406 if (state->start > start) {
412 err = insert_state(tree, prealloc, start, this_end,
418 start = this_end + 1;
438 if (start > end)
443 int set_extent_dirty(struct extent_io_tree *tree, u64 start, u64 end)
445 return set_extent_bits(tree, start, end, EXTENT_DIRTY);
448 int clear_extent_dirty(struct extent_io_tree *tree, u64 start, u64 end)
450 return clear_extent_bits(tree, start, end, EXTENT_DIRTY);
453 int find_first_extent_bit(struct extent_io_tree *tree, u64 start,
464 node = search_cache_extent(&tree->state, start);
470 if (state->end >= start && (state->state & bits)) {
471 *start_ret = state->start;
484 int test_range_bit(struct extent_io_tree *tree, u64 start, u64 end,
491 node = search_cache_extent(&tree->state, start);
492 while (node && start <= end) {
495 if (filled && state->start > start) {
499 if (state->start > end)
509 start = state->end + 1;
510 if (start > end)
522 int set_state_private(struct extent_io_tree *tree, u64 start, u64 private)
528 node = search_cache_extent(&tree->state, start);
534 if (state->start != start) {
543 int get_state_private(struct extent_io_tree *tree, u64 start, u64 *private)
549 node = search_cache_extent(&tree->state, start);
555 if (state->start != start) {
578 eb->start = bytenr;
582 eb->cache_node.start = bytenr;
594 new = __alloc_extent_buffer(src->fs_info, src->start, src->len);
628 "dirty eb leak (aborted trans): start %llu len %u",
629 eb->start, eb->len);
648 if (cache && cache->start == bytenr &&
657 u64 start)
662 cache = search_cache_extent(&tree->cache, start);
678 if (cache && cache->start == bytenr &&
710 * This is especially important to avoid injecting eb->start == SZ_64K, as
747 unsigned long start, unsigned long len)
749 return memcmp(eb->data + start, ptrv, len);
753 unsigned long start, unsigned long len)
755 memcpy(dst, eb->data + start, len);
759 unsigned long start, unsigned long len)
761 memcpy(eb->data + start, src, len);
778 unsigned long start, unsigned long len)
780 memset(eb->data + start, c, len);
783 int extent_buffer_test_bit(struct extent_buffer *eb, unsigned long start,
786 return le_test_bit(nr, (u8 *)eb->data + start);
794 set_extent_dirty(tree, eb->start, eb->start + eb->len - 1);
805 clear_extent_dirty(tree, eb->start, eb->start + eb->len - 1);