1/* SPDX-License-Identifier: GPL-2.0 */
2/* Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES.
3 */
4#ifndef __IOMMUFD_DOUBLE_SPAN_H
5#define __IOMMUFD_DOUBLE_SPAN_H
6
7#include <linux/interval_tree.h>
8
9/*
10 * This is a variation of the general interval_tree_span_iter that computes the
11 * spans over the union of two different interval trees. Used ranges are broken
12 * up and reported based on the tree that provides the interval. The first span
13 * always takes priority. Like interval_tree_span_iter it is greedy and the same
14 * value of is_used will not repeat on two iteration cycles.
15 */
16struct interval_tree_double_span_iter {
17	struct rb_root_cached *itrees[2];
18	struct interval_tree_span_iter spans[2];
19	union {
20		unsigned long start_hole;
21		unsigned long start_used;
22	};
23	union {
24		unsigned long last_hole;
25		unsigned long last_used;
26	};
27	/* 0 = hole, 1 = used span[0], 2 = used span[1], -1 done iteration */
28	int is_used;
29};
30
31void interval_tree_double_span_iter_update(
32	struct interval_tree_double_span_iter *iter);
33void interval_tree_double_span_iter_first(
34	struct interval_tree_double_span_iter *iter,
35	struct rb_root_cached *itree1, struct rb_root_cached *itree2,
36	unsigned long first_index, unsigned long last_index);
37void interval_tree_double_span_iter_next(
38	struct interval_tree_double_span_iter *iter);
39
40static inline bool
41interval_tree_double_span_iter_done(struct interval_tree_double_span_iter *state)
42{
43	return state->is_used == -1;
44}
45
46#define interval_tree_for_each_double_span(span, itree1, itree2, first_index, \
47					   last_index)                        \
48	for (interval_tree_double_span_iter_first(span, itree1, itree2,       \
49						  first_index, last_index);   \
50	     !interval_tree_double_span_iter_done(span);                      \
51	     interval_tree_double_span_iter_next(span))
52
53#endif
54