1// SPDX-License-Identifier: GPL-2.0
2#include <stdlib.h>
3#include <assert.h>
4#include <stdio.h>
5#include <string.h>
6
7#include <linux/slab.h>
8#include <linux/radix-tree.h>
9
10#include "test.h"
11
12
13static void
14__simple_checks(struct radix_tree_root *tree, unsigned long index, int tag)
15{
16	unsigned long first = 0;
17	int ret;
18
19	item_check_absent(tree, index);
20	assert(item_tag_get(tree, index, tag) == 0);
21
22	item_insert(tree, index);
23	assert(item_tag_get(tree, index, tag) == 0);
24	item_tag_set(tree, index, tag);
25	ret = item_tag_get(tree, index, tag);
26	assert(ret != 0);
27	ret = tag_tagged_items(tree, first, ~0UL, 10, tag, !tag);
28	assert(ret == 1);
29	ret = item_tag_get(tree, index, !tag);
30	assert(ret != 0);
31	ret = item_delete(tree, index);
32	assert(ret != 0);
33	item_insert(tree, index);
34	ret = item_tag_get(tree, index, tag);
35	assert(ret == 0);
36	ret = item_delete(tree, index);
37	assert(ret != 0);
38	ret = item_delete(tree, index);
39	assert(ret == 0);
40}
41
42void simple_checks(void)
43{
44	unsigned long index;
45	RADIX_TREE(tree, GFP_KERNEL);
46
47	for (index = 0; index < 10000; index++) {
48		__simple_checks(&tree, index, 0);
49		__simple_checks(&tree, index, 1);
50	}
51	verify_tag_consistency(&tree, 0);
52	verify_tag_consistency(&tree, 1);
53	printv(2, "before item_kill_tree: %d allocated\n", nr_allocated);
54	item_kill_tree(&tree);
55	rcu_barrier();
56	printv(2, "after item_kill_tree: %d allocated\n", nr_allocated);
57}
58
59/*
60 * Check that tags propagate correctly when extending a tree.
61 */
62static void extend_checks(void)
63{
64	RADIX_TREE(tree, GFP_KERNEL);
65
66	item_insert(&tree, 43);
67	assert(item_tag_get(&tree, 43, 0) == 0);
68	item_tag_set(&tree, 43, 0);
69	assert(item_tag_get(&tree, 43, 0) == 1);
70	item_insert(&tree, 1000000);
71	assert(item_tag_get(&tree, 43, 0) == 1);
72
73	item_insert(&tree, 0);
74	item_tag_set(&tree, 0, 0);
75	item_delete(&tree, 1000000);
76	assert(item_tag_get(&tree, 43, 0) != 0);
77	item_delete(&tree, 43);
78	assert(item_tag_get(&tree, 43, 0) == 0);	/* crash */
79	assert(item_tag_get(&tree, 0, 0) == 1);
80
81	verify_tag_consistency(&tree, 0);
82
83	item_kill_tree(&tree);
84}
85
86/*
87 * Check that tags propagate correctly when contracting a tree.
88 */
89static void contract_checks(void)
90{
91	struct item *item;
92	int tmp;
93	RADIX_TREE(tree, GFP_KERNEL);
94
95	tmp = 1<<RADIX_TREE_MAP_SHIFT;
96	item_insert(&tree, tmp);
97	item_insert(&tree, tmp+1);
98	item_tag_set(&tree, tmp, 0);
99	item_tag_set(&tree, tmp, 1);
100	item_tag_set(&tree, tmp+1, 0);
101	item_delete(&tree, tmp+1);
102	item_tag_clear(&tree, tmp, 1);
103
104	assert(radix_tree_gang_lookup_tag(&tree, (void **)&item, 0, 1, 0) == 1);
105	assert(radix_tree_gang_lookup_tag(&tree, (void **)&item, 0, 1, 1) == 0);
106
107	assert(item_tag_get(&tree, tmp, 0) == 1);
108	assert(item_tag_get(&tree, tmp, 1) == 0);
109
110	verify_tag_consistency(&tree, 0);
111	item_kill_tree(&tree);
112}
113
114/*
115 * Stupid tag thrasher
116 *
117 * Create a large linear array corresponding to the tree.   Each element in
118 * the array is coherent with each node in the tree
119 */
120
121enum {
122	NODE_ABSENT = 0,
123	NODE_PRESENT = 1,
124	NODE_TAGGED = 2,
125};
126
127#define THRASH_SIZE		(1000 * 1000)
128#define N 127
129#define BATCH	33
130
131static void gang_check(struct radix_tree_root *tree,
132			char *thrash_state, int tag)
133{
134	struct item *items[BATCH];
135	int nr_found;
136	unsigned long index = 0;
137	unsigned long last_index = 0;
138
139	while ((nr_found = radix_tree_gang_lookup_tag(tree, (void **)items,
140					index, BATCH, tag))) {
141		int i;
142
143		for (i = 0; i < nr_found; i++) {
144			struct item *item = items[i];
145
146			while (last_index < item->index) {
147				assert(thrash_state[last_index] != NODE_TAGGED);
148				last_index++;
149			}
150			assert(thrash_state[last_index] == NODE_TAGGED);
151			last_index++;
152		}
153		index = items[nr_found - 1]->index + 1;
154	}
155}
156
157static void do_thrash(struct radix_tree_root *tree, char *thrash_state, int tag)
158{
159	int insert_chunk;
160	int delete_chunk;
161	int tag_chunk;
162	int untag_chunk;
163	int total_tagged = 0;
164	int total_present = 0;
165
166	for (insert_chunk = 1; insert_chunk < THRASH_SIZE; insert_chunk *= N)
167	for (delete_chunk = 1; delete_chunk < THRASH_SIZE; delete_chunk *= N)
168	for (tag_chunk = 1; tag_chunk < THRASH_SIZE; tag_chunk *= N)
169	for (untag_chunk = 1; untag_chunk < THRASH_SIZE; untag_chunk *= N) {
170		int i;
171		unsigned long index;
172		int nr_inserted = 0;
173		int nr_deleted = 0;
174		int nr_tagged = 0;
175		int nr_untagged = 0;
176		int actual_total_tagged;
177		int actual_total_present;
178
179		for (i = 0; i < insert_chunk; i++) {
180			index = rand() % THRASH_SIZE;
181			if (thrash_state[index] != NODE_ABSENT)
182				continue;
183			item_check_absent(tree, index);
184			item_insert(tree, index);
185			assert(thrash_state[index] != NODE_PRESENT);
186			thrash_state[index] = NODE_PRESENT;
187			nr_inserted++;
188			total_present++;
189		}
190
191		for (i = 0; i < delete_chunk; i++) {
192			index = rand() % THRASH_SIZE;
193			if (thrash_state[index] == NODE_ABSENT)
194				continue;
195			item_check_present(tree, index);
196			if (item_tag_get(tree, index, tag)) {
197				assert(thrash_state[index] == NODE_TAGGED);
198				total_tagged--;
199			} else {
200				assert(thrash_state[index] == NODE_PRESENT);
201			}
202			item_delete(tree, index);
203			assert(thrash_state[index] != NODE_ABSENT);
204			thrash_state[index] = NODE_ABSENT;
205			nr_deleted++;
206			total_present--;
207		}
208
209		for (i = 0; i < tag_chunk; i++) {
210			index = rand() % THRASH_SIZE;
211			if (thrash_state[index] != NODE_PRESENT) {
212				if (item_lookup(tree, index))
213					assert(item_tag_get(tree, index, tag));
214				continue;
215			}
216			item_tag_set(tree, index, tag);
217			item_tag_set(tree, index, tag);
218			assert(thrash_state[index] != NODE_TAGGED);
219			thrash_state[index] = NODE_TAGGED;
220			nr_tagged++;
221			total_tagged++;
222		}
223
224		for (i = 0; i < untag_chunk; i++) {
225			index = rand() % THRASH_SIZE;
226			if (thrash_state[index] != NODE_TAGGED)
227				continue;
228			item_check_present(tree, index);
229			assert(item_tag_get(tree, index, tag));
230			item_tag_clear(tree, index, tag);
231			item_tag_clear(tree, index, tag);
232			assert(thrash_state[index] != NODE_PRESENT);
233			thrash_state[index] = NODE_PRESENT;
234			nr_untagged++;
235			total_tagged--;
236		}
237
238		actual_total_tagged = 0;
239		actual_total_present = 0;
240		for (index = 0; index < THRASH_SIZE; index++) {
241			switch (thrash_state[index]) {
242			case NODE_ABSENT:
243				item_check_absent(tree, index);
244				break;
245			case NODE_PRESENT:
246				item_check_present(tree, index);
247				assert(!item_tag_get(tree, index, tag));
248				actual_total_present++;
249				break;
250			case NODE_TAGGED:
251				item_check_present(tree, index);
252				assert(item_tag_get(tree, index, tag));
253				actual_total_present++;
254				actual_total_tagged++;
255				break;
256			}
257		}
258
259		gang_check(tree, thrash_state, tag);
260
261		printv(2, "%d(%d) %d(%d) %d(%d) %d(%d) / "
262				"%d(%d) present, %d(%d) tagged\n",
263			insert_chunk, nr_inserted,
264			delete_chunk, nr_deleted,
265			tag_chunk, nr_tagged,
266			untag_chunk, nr_untagged,
267			total_present, actual_total_present,
268			total_tagged, actual_total_tagged);
269	}
270}
271
272static void thrash_tags(void)
273{
274	RADIX_TREE(tree, GFP_KERNEL);
275	char *thrash_state;
276
277	thrash_state = malloc(THRASH_SIZE);
278	memset(thrash_state, 0, THRASH_SIZE);
279
280	do_thrash(&tree, thrash_state, 0);
281
282	verify_tag_consistency(&tree, 0);
283	item_kill_tree(&tree);
284	free(thrash_state);
285}
286
287static void leak_check(void)
288{
289	RADIX_TREE(tree, GFP_KERNEL);
290
291	item_insert(&tree, 1000000);
292	item_delete(&tree, 1000000);
293	item_kill_tree(&tree);
294}
295
296static void __leak_check(void)
297{
298	RADIX_TREE(tree, GFP_KERNEL);
299
300	printv(2, "%d: nr_allocated=%d\n", __LINE__, nr_allocated);
301	item_insert(&tree, 1000000);
302	printv(2, "%d: nr_allocated=%d\n", __LINE__, nr_allocated);
303	item_delete(&tree, 1000000);
304	printv(2, "%d: nr_allocated=%d\n", __LINE__, nr_allocated);
305	item_kill_tree(&tree);
306	printv(2, "%d: nr_allocated=%d\n", __LINE__, nr_allocated);
307}
308
309static void single_check(void)
310{
311	struct item *items[BATCH];
312	RADIX_TREE(tree, GFP_KERNEL);
313	int ret;
314	unsigned long first = 0;
315
316	item_insert(&tree, 0);
317	item_tag_set(&tree, 0, 0);
318	ret = radix_tree_gang_lookup_tag(&tree, (void **)items, 0, BATCH, 0);
319	assert(ret == 1);
320	ret = radix_tree_gang_lookup_tag(&tree, (void **)items, 1, BATCH, 0);
321	assert(ret == 0);
322	verify_tag_consistency(&tree, 0);
323	verify_tag_consistency(&tree, 1);
324	ret = tag_tagged_items(&tree, first, 10, 10, XA_MARK_0, XA_MARK_1);
325	assert(ret == 1);
326	ret = radix_tree_gang_lookup_tag(&tree, (void **)items, 0, BATCH, 1);
327	assert(ret == 1);
328	item_tag_clear(&tree, 0, 0);
329	ret = radix_tree_gang_lookup_tag(&tree, (void **)items, 0, BATCH, 0);
330	assert(ret == 0);
331	item_kill_tree(&tree);
332}
333
334void tag_check(void)
335{
336	single_check();
337	extend_checks();
338	contract_checks();
339	rcu_barrier();
340	printv(2, "after extend_checks: %d allocated\n", nr_allocated);
341	__leak_check();
342	leak_check();
343	rcu_barrier();
344	printv(2, "after leak_check: %d allocated\n", nr_allocated);
345	simple_checks();
346	rcu_barrier();
347	printv(2, "after simple_checks: %d allocated\n", nr_allocated);
348	thrash_tags();
349	rcu_barrier();
350	printv(2, "after thrash_tags: %d allocated\n", nr_allocated);
351}
352