Lines Matching defs:mt

29 #define RCU_MT_BUG_ON(test, y) {if (y) { test->stop = true; } MT_BUG_ON(test->mt, y); }
32 struct maple_tree *mt;
49 struct maple_tree *mt;
91 static noinline void __init check_new_node(struct maple_tree *mt)
99 MA_STATE(mas, mt, 0, 0);
104 mtree_lock(mt);
109 MT_BUG_ON(mt, mas_alloc_req(&mas) != 3);
111 MT_BUG_ON(mt, mas.node != MA_ERROR(-ENOMEM));
112 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL));
114 MT_BUG_ON(mt, mas_allocated(&mas) != 3);
116 MT_BUG_ON(mt, not_empty(mn));
117 MT_BUG_ON(mt, mn == NULL);
118 MT_BUG_ON(mt, mas.alloc == NULL);
119 MT_BUG_ON(mt, mas.alloc->slot[0] == NULL);
123 mtree_unlock(mt);
127 mtree_lock(mt);
131 MT_BUG_ON(mt, mas_alloc_req(&mas) != 1);
134 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL));
137 MT_BUG_ON(mt, not_empty(mn));
138 MT_BUG_ON(mt, mn == NULL);
139 MT_BUG_ON(mt, mn->slot[0] != NULL);
140 MT_BUG_ON(mt, mn->slot[1] != NULL);
141 MT_BUG_ON(mt, mas_allocated(&mas) != 0);
152 MT_BUG_ON(mt, mas_allocated(&mas) != 3);
154 MT_BUG_ON(mt, mas_alloc_req(&mas) != 0);
156 MT_BUG_ON(mt, mas.alloc == NULL);
157 MT_BUG_ON(mt, mas.alloc->slot[0] == NULL);
158 MT_BUG_ON(mt, mas.alloc->slot[1] == NULL);
160 MT_BUG_ON(mt, mas_allocated(&mas) != 3);
167 MT_BUG_ON(mt, mas_alloc_req(&mas) != 1);
170 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL));
171 MT_BUG_ON(mt, mas_allocated(&mas) != 1);
174 MT_BUG_ON(mt, not_empty(mn));
175 MT_BUG_ON(mt, mas_allocated(&mas) != 0);
176 MT_BUG_ON(mt, mn == NULL);
177 MT_BUG_ON(mt, mn->slot[0] != NULL);
178 MT_BUG_ON(mt, mn->slot[1] != NULL);
179 MT_BUG_ON(mt, mas_allocated(&mas) != 0);
181 MT_BUG_ON(mt, mas_allocated(&mas) != 1);
182 MT_BUG_ON(mt, mas.alloc->node_count);
185 MT_BUG_ON(mt, mas_alloc_req(&mas) != 2);
187 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL));
188 MT_BUG_ON(mt, mas_allocated(&mas) != 3);
189 MT_BUG_ON(mt, mas.alloc == NULL);
190 MT_BUG_ON(mt, mas.alloc->slot[0] == NULL);
191 MT_BUG_ON(mt, mas.alloc->slot[1] == NULL);
194 MT_BUG_ON(mt, mas_allocated(&mas) != i);
195 MT_BUG_ON(mt, !mn);
196 MT_BUG_ON(mt, not_empty(mn));
203 MT_BUG_ON(mt, mas_alloc_req(&mas) != total);
205 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL));
225 MT_BUG_ON(mt, mas.alloc->node_count != e);
227 MT_BUG_ON(mt, not_empty(mn));
228 MT_BUG_ON(mt, mas_allocated(&mas) != i - 1);
229 MT_BUG_ON(mt, !mn);
238 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL));
241 MT_BUG_ON(mt, mas_allocated(&mas) != j - 1);
242 MT_BUG_ON(mt, !mn);
243 MT_BUG_ON(mt, not_empty(mn));
245 MT_BUG_ON(mt, mas_allocated(&mas) != j);
247 MT_BUG_ON(mt, not_empty(mn));
248 MT_BUG_ON(mt, mas_allocated(&mas) != j - 1);
252 MT_BUG_ON(mt, mas_allocated(&mas) != 0);
256 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL));
258 MT_BUG_ON(mt, mas_allocated(&mas) != i - j);
260 MT_BUG_ON(mt, mas_allocated(&mas) != i - j - 1);
266 MT_BUG_ON(mt, mas_allocated(&mas) != i - j);
268 MT_BUG_ON(mt, mas_allocated(&mas) != i);
270 MT_BUG_ON(mt, mas_allocated(&mas) != i - j);
272 MT_BUG_ON(mt, not_empty(mn));
275 MT_BUG_ON(mt, mas_allocated(&mas) != i - j - 1);
278 MT_BUG_ON(mt, mas_nomem(&mas, GFP_KERNEL));
287 MT_BUG_ON(mt, !mas.alloc);
293 MT_BUG_ON(mt, !smn->slot[j]);
299 MT_BUG_ON(mt, mas_allocated(&mas) != total);
303 MT_BUG_ON(mt, mas_allocated(&mas) != 0);
307 MT_BUG_ON(mt, mas_allocated(&mas) != i); /* check request filled */
310 MT_BUG_ON(mt, mn == NULL);
311 MT_BUG_ON(mt, not_empty(mn));
315 MT_BUG_ON(mt, mas_allocated(&mas) != 0);
319 MA_STATE(mas2, mt, 0, 0);
322 MT_BUG_ON(mt, mas_allocated(&mas) != i); /* check request filled */
325 MT_BUG_ON(mt, mn == NULL);
326 MT_BUG_ON(mt, not_empty(mn));
328 MT_BUG_ON(mt, mas_allocated(&mas2) != j);
330 MT_BUG_ON(mt, mas_allocated(&mas) != 0);
331 MT_BUG_ON(mt, mas_allocated(&mas2) != i);
334 MT_BUG_ON(mt, mas_allocated(&mas2) != j);
336 MT_BUG_ON(mt, mn == NULL);
337 MT_BUG_ON(mt, not_empty(mn));
341 MT_BUG_ON(mt, mas_allocated(&mas2) != 0);
345 MT_BUG_ON(mt, mas_allocated(&mas) != 0);
347 MT_BUG_ON(mt, mas.node != MA_ERROR(-ENOMEM));
348 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL));
349 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS + 1);
350 MT_BUG_ON(mt, mas.alloc->node_count != MAPLE_ALLOC_SLOTS);
353 MT_BUG_ON(mt, mn == NULL);
354 MT_BUG_ON(mt, not_empty(mn));
355 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS);
356 MT_BUG_ON(mt, mas.alloc->node_count != MAPLE_ALLOC_SLOTS - 1);
359 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS + 1);
360 MT_BUG_ON(mt, mas.alloc->node_count != MAPLE_ALLOC_SLOTS);
364 MT_BUG_ON(mt, mas_alloc_req(&mas) != 1);
365 MT_BUG_ON(mt, mas.node != MA_ERROR(-ENOMEM));
366 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL));
367 MT_BUG_ON(mt, mas_alloc_req(&mas));
368 MT_BUG_ON(mt, mas.alloc->node_count != 1);
369 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS + 2);
371 MT_BUG_ON(mt, not_empty(mn));
372 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS + 1);
373 MT_BUG_ON(mt, mas.alloc->node_count != MAPLE_ALLOC_SLOTS);
375 MT_BUG_ON(mt, mas.alloc->node_count != 1);
376 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS + 2);
378 MT_BUG_ON(mt, not_empty(mn));
383 MT_BUG_ON(mt, not_empty(mn));
387 MT_BUG_ON(mt, mas_allocated(&mas) != 0);
444 MT_BUG_ON(mt, mas_allocated(&mas) != 5);
449 MT_BUG_ON(mt, mas_allocated(&mas) != 10);
455 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS - 1);
460 MT_BUG_ON(mt, mas_allocated(&mas) != 10 + MAPLE_ALLOC_SLOTS - 1);
463 mtree_unlock(mt);
469 static noinline void __init check_erase(struct maple_tree *mt, unsigned long index,
472 MT_BUG_ON(mt, mtree_test_erase(mt, index) != ptr);
475 #define erase_check_load(mt, i) check_load(mt, set[i], entry[i%2])
476 #define erase_check_insert(mt, i) check_insert(mt, set[i], entry[i%2])
477 #define erase_check_erase(mt, i) check_erase(mt, set[i], entry[i%2])
479 static noinline void __init check_erase_testset(struct maple_tree *mt)
497 void *entry[2] = { ptr, mt };
502 mt_set_in_rcu(mt);
504 erase_check_insert(mt, i);
506 erase_check_load(mt, i);
509 erase_check_erase(mt, 1);
510 erase_check_load(mt, 0);
511 check_load(mt, set[1], NULL);
513 erase_check_load(mt, i);
516 erase_check_erase(mt, 2);
517 erase_check_load(mt, 0);
518 check_load(mt, set[1], NULL);
519 check_load(mt, set[2], NULL);
521 erase_check_insert(mt, 1);
522 erase_check_insert(mt, 2);
525 erase_check_load(mt, i);
528 erase_check_load(mt, 3);
529 erase_check_erase(mt, 1);
530 erase_check_load(mt, 0);
531 check_load(mt, set[1], NULL);
533 erase_check_load(mt, i);
539 root_node = mt->ma_root;
540 erase_check_insert(mt, 1);
542 erase_check_load(mt, 0);
543 check_load(mt, 5016, NULL);
544 erase_check_load(mt, 1);
545 check_load(mt, 5013, NULL);
546 erase_check_load(mt, 2);
547 check_load(mt, 5018, NULL);
548 erase_check_load(mt, 3);
550 erase_check_erase(mt, 2); /* erase 5017 to check append */
551 erase_check_load(mt, 0);
552 check_load(mt, 5016, NULL);
553 erase_check_load(mt, 1);
554 check_load(mt, 5013, NULL);
555 check_load(mt, set[2], NULL);
556 check_load(mt, 5018, NULL);
558 erase_check_load(mt, 3);
560 root_node = mt->ma_root;
561 erase_check_insert(mt, 2);
563 erase_check_load(mt, 0);
564 check_load(mt, 5016, NULL);
565 erase_check_load(mt, 1);
566 check_load(mt, 5013, NULL);
567 erase_check_load(mt, 2);
568 check_load(mt, 5018, NULL);
569 erase_check_load(mt, 3);
572 erase_check_erase(mt, 2); /* erase 5017 to check append */
573 erase_check_load(mt, 0);
574 check_load(mt, 5016, NULL);
575 check_load(mt, set[2], NULL);
576 erase_check_erase(mt, 0); /* erase 5015 to check append */
577 check_load(mt, set[0], NULL);
578 check_load(mt, 5016, NULL);
579 erase_check_insert(mt, 4); /* 1000 < Should not split. */
580 check_load(mt, set[0], NULL);
581 check_load(mt, 5016, NULL);
582 erase_check_load(mt, 1);
583 check_load(mt, 5013, NULL);
584 check_load(mt, set[2], NULL);
585 check_load(mt, 5018, NULL);
586 erase_check_load(mt, 4);
587 check_load(mt, 999, NULL);
588 check_load(mt, 1001, NULL);
589 erase_check_load(mt, 4);
590 if (mt_in_rcu(mt))
591 MT_BUG_ON(mt, root_node == mt->ma_root);
593 MT_BUG_ON(mt, root_node != mt->ma_root);
596 MT_BUG_ON(mt, !mte_is_leaf(mt->ma_root));
600 erase_check_insert(mt, 0);
601 erase_check_insert(mt, 2);
604 erase_check_insert(mt, i);
606 erase_check_load(mt, j);
609 erase_check_erase(mt, 14); /*6015 */
612 check_load(mt, set[i], NULL);
614 erase_check_load(mt, i);
616 erase_check_erase(mt, 16); /*7002 */
619 check_load(mt, set[i], NULL);
621 erase_check_load(mt, i);
626 erase_check_erase(mt, 13); /*6012 */
629 check_load(mt, set[i], NULL);
631 erase_check_load(mt, i);
634 erase_check_erase(mt, 15); /*7003 */
637 check_load(mt, set[i], NULL);
639 erase_check_load(mt, i);
643 erase_check_erase(mt, 17); /*7008 *should* cause coalesce. */
646 check_load(mt, set[i], NULL);
648 erase_check_load(mt, i);
651 erase_check_erase(mt, 18); /*7012 */
654 check_load(mt, set[i], NULL);
656 erase_check_load(mt, i);
660 erase_check_erase(mt, 19); /*7015 */
663 check_load(mt, set[i], NULL);
665 erase_check_load(mt, i);
668 erase_check_erase(mt, 20); /*8003 */
671 check_load(mt, set[i], NULL);
673 erase_check_load(mt, i);
676 erase_check_erase(mt, 21); /*8002 */
679 check_load(mt, set[i], NULL);
681 erase_check_load(mt, i);
685 erase_check_erase(mt, 22); /*8008 */
688 check_load(mt, set[i], NULL);
690 erase_check_load(mt, i);
693 erase_check_erase(mt, i);
697 check_load(mt, set[i], NULL);
699 erase_check_load(mt, i);
705 erase_check_insert(mt, i);
709 erase_check_erase(mt, i);
712 erase_check_load(mt, j);
714 check_load(mt, set[j], NULL);
719 erase_check_erase(mt, i);
722 erase_check_load(mt, j);
724 check_load(mt, set[j], NULL);
727 erase_check_insert(mt, 8);
728 erase_check_insert(mt, 9);
729 erase_check_erase(mt, 8);
736 #define erase_check_store_range(mt, a, i, ptr) mtree_test_store_range(mt, \
986 static noinline void __init check_erase2_testset(struct maple_tree *mt,
995 MA_STATE(mas, mt, 0, 0);
1002 MA_STATE(mas_start, mt, set[i+1], set[i+1]);
1003 MA_STATE(mas_end, mt, set[i+2], set[i+2]);
1030 erase_check_store_range(mt, set, i + 1, value);
1058 erase_check_store_range(mt, set, i + 1, value);
1063 check_erase(mt, set[i+1], xa_mk_value(set[i+1]));
1067 mt_validate(mt);
1069 MT_BUG_ON(mt, !mt_height(mt));
1071 mt_dump(mt, mt_dump_hex);
1079 mt_for_each(mt, foo, addr, ULONG_MAX) {
1082 pr_err("mt: %lu -> %p (%d)\n", addr+1, foo, check);
1092 MT_BUG_ON(mt, check != entry_count);
1105 MT_BUG_ON(mt, 1);
1120 mt_validate(mt);
1123 MT_BUG_ON(mt, check != entry_count);
1125 MT_BUG_ON(mt, mtree_load(mas.tree, 0) != NULL);
1131 static noinline void __init check_erase2_sets(struct maple_tree *mt)
33871 * Gap was found: mt 4041162752 gap_end 4041183232
33903 MA_STATE(mas, mt, 0, 0);
33906 check_erase2_testset(mt, set, ARRAY_SIZE(set));
33908 mtree_destroy(mt);
33910 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
33911 check_erase2_testset(mt, set2, ARRAY_SIZE(set2));
33913 MT_BUG_ON(mt, !!mt_find(mt, &start, 140735933906943UL));
33914 mtree_destroy(mt);
33917 mt_init_flags(mt, 0);
33918 check_erase2_testset(mt, set3, ARRAY_SIZE(set3));
33920 mtree_destroy(mt);
33922 mt_init_flags(mt, 0);
33923 check_erase2_testset(mt, set4, ARRAY_SIZE(set4));
33931 mtree_destroy(mt);
33933 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
33935 check_erase2_testset(mt, set5, ARRAY_SIZE(set5));
33938 mtree_destroy(mt);
33940 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
33941 check_erase2_testset(mt, set6, ARRAY_SIZE(set6));
33943 mtree_destroy(mt);
33945 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
33946 check_erase2_testset(mt, set7, ARRAY_SIZE(set7));
33948 mtree_destroy(mt);
33950 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
33951 check_erase2_testset(mt, set8, ARRAY_SIZE(set8));
33953 mtree_destroy(mt);
33955 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
33956 check_erase2_testset(mt, set9, ARRAY_SIZE(set9));
33958 mtree_destroy(mt);
33960 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
33961 check_erase2_testset(mt, set10, ARRAY_SIZE(set10));
33963 mtree_destroy(mt);
33966 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
33967 check_erase2_testset(mt, set11, ARRAY_SIZE(set11));
33970 MT_BUG_ON(mt, mas.last != 140014592573439);
33971 mtree_destroy(mt);
33974 mas.tree = mt;
33977 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
33978 check_erase2_testset(mt, set12, ARRAY_SIZE(set12));
33986 mtree_destroy(mt);
33989 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
33990 check_erase2_testset(mt, set13, ARRAY_SIZE(set13));
33991 mtree_erase(mt, 140373516443648);
33995 mtree_destroy(mt);
33996 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
33997 check_erase2_testset(mt, set14, ARRAY_SIZE(set14));
33999 mtree_destroy(mt);
34001 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34002 check_erase2_testset(mt, set15, ARRAY_SIZE(set15));
34004 mtree_destroy(mt);
34009 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34010 check_erase2_testset(mt, set16, ARRAY_SIZE(set16));
34013 MT_BUG_ON(mt, mas.last != 139921865547775);
34015 mtree_destroy(mt);
34024 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34025 check_erase2_testset(mt, set17, ARRAY_SIZE(set17));
34028 MT_BUG_ON(mt, mas.last != 139953197322239);
34029 /* MT_BUG_ON(mt, mas.index != 139953197318144); */
34031 mtree_destroy(mt);
34040 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34041 check_erase2_testset(mt, set18, ARRAY_SIZE(set18));
34044 MT_BUG_ON(mt, mas.last != 140222968475647);
34045 /*MT_BUG_ON(mt, mas.index != 140222966259712); */
34047 mtree_destroy(mt);
34058 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34059 check_erase2_testset(mt, set19, ARRAY_SIZE(set19));
34063 MT_BUG_ON(mt, entry != xa_mk_value(140656779083776));
34065 MT_BUG_ON(mt, entry != xa_mk_value(140656766251008));
34067 mtree_destroy(mt);
34074 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34075 check_erase2_testset(mt, set20, ARRAY_SIZE(set20));
34077 check_load(mt, 94849009414144, NULL);
34079 mtree_destroy(mt);
34082 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34083 check_erase2_testset(mt, set21, ARRAY_SIZE(set21));
34085 mt_validate(mt);
34087 mtree_destroy(mt);
34090 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34091 check_erase2_testset(mt, set22, ARRAY_SIZE(set22));
34093 mt_validate(mt);
34094 ptr = mtree_load(mt, 140551363362816);
34095 MT_BUG_ON(mt, ptr == mtree_load(mt, 140551363420159));
34097 mtree_destroy(mt);
34100 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34101 check_erase2_testset(mt, set23, ARRAY_SIZE(set23));
34104 mt_validate(mt);
34105 mtree_destroy(mt);
34109 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34110 check_erase2_testset(mt, set24, ARRAY_SIZE(set24));
34113 mt_validate(mt);
34114 mtree_destroy(mt);
34117 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34118 check_erase2_testset(mt, set25, ARRAY_SIZE(set25));
34121 mt_validate(mt);
34122 mtree_destroy(mt);
34127 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34128 check_erase2_testset(mt, set26, ARRAY_SIZE(set26));
34131 MT_BUG_ON(mt, mas.last != 140109040959487);
34133 mt_validate(mt);
34134 mtree_destroy(mt);
34139 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34140 check_erase2_testset(mt, set27, ARRAY_SIZE(set27));
34142 MT_BUG_ON(mt, NULL != mtree_load(mt, 140415537422336));
34144 mt_validate(mt);
34145 mtree_destroy(mt);
34149 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34150 check_erase2_testset(mt, set28, ARRAY_SIZE(set28));
34155 MT_BUG_ON(mt, mas.index != 139918401601536);
34157 mt_validate(mt);
34158 mtree_destroy(mt);
34165 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34166 check_erase2_testset(mt, set29, ARRAY_SIZE(set29));
34169 mt_validate(mt);
34170 mtree_destroy(mt);
34178 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34179 check_erase2_testset(mt, set30, ARRAY_SIZE(set30));
34182 mt_validate(mt);
34183 mtree_destroy(mt);
34191 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34192 check_erase2_testset(mt, set31, ARRAY_SIZE(set31));
34195 mt_validate(mt);
34196 mtree_destroy(mt);
34200 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34201 check_erase2_testset(mt, set32, ARRAY_SIZE(set32));
34204 mt_validate(mt);
34205 mtree_destroy(mt);
34209 * mt 140582827569152 gap_end 140582869532672
34220 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34221 check_erase2_testset(mt, set33, ARRAY_SIZE(set33));
34224 MT_BUG_ON(mt, mas.last != 140583003750399);
34226 mt_validate(mt);
34227 mtree_destroy(mt);
34235 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34236 check_erase2_testset(mt, set34, ARRAY_SIZE(set34));
34239 mt_validate(mt);
34240 mtree_destroy(mt);
34245 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34246 check_erase2_testset(mt, set35, ARRAY_SIZE(set35));
34249 mt_validate(mt);
34250 mtree_destroy(mt);
34255 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34256 check_erase2_testset(mt, set36, ARRAY_SIZE(set36));
34259 mt_validate(mt);
34260 mtree_destroy(mt);
34263 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34264 check_erase2_testset(mt, set37, ARRAY_SIZE(set37));
34266 MT_BUG_ON(mt, NULL != mtree_load(mt, 94637033459712));
34267 mt_validate(mt);
34268 mtree_destroy(mt);
34271 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34272 check_erase2_testset(mt, set38, ARRAY_SIZE(set38));
34274 MT_BUG_ON(mt, NULL != mtree_load(mt, 94637033459712));
34275 mt_validate(mt);
34276 mtree_destroy(mt);
34279 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34280 check_erase2_testset(mt, set39, ARRAY_SIZE(set39));
34282 mt_validate(mt);
34283 mtree_destroy(mt);
34286 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34287 check_erase2_testset(mt, set40, ARRAY_SIZE(set40));
34289 mt_validate(mt);
34290 mtree_destroy(mt);
34293 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34294 check_erase2_testset(mt, set41, ARRAY_SIZE(set41));
34296 mt_validate(mt);
34297 mtree_destroy(mt);
34302 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34303 check_erase2_testset(mt, set42, ARRAY_SIZE(set42));
34306 MT_BUG_ON(mt, mas.last != 4041211903);
34308 mt_validate(mt);
34309 mtree_destroy(mt);
34314 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
34315 check_erase2_testset(mt, set43, ARRAY_SIZE(set43));
34318 mt_validate(mt);
34319 mtree_destroy(mt);
34358 MA_STATE(mas, test->mt, 0, 0);
34468 MA_STATE(mas, test->mt, 0, 0);
34530 mt_dump(test->mt, mt_dump_dec);
34593 static void rcu_stress_rev(struct maple_tree *mt, struct rcu_test_struct2 *test,
34613 mtree_store_range(mt, start, end,
34623 mtree_store_range(mt, start, end,
34634 mtree_store_range(mt, start, end, NULL, GFP_KERNEL);
34641 mtree_store_range(mt, start, end,
34652 static void rcu_stress_fwd(struct maple_tree *mt, struct rcu_test_struct2 *test,
34670 mtree_store_range(mt, start, end,
34680 mtree_store_range(mt, start, end,
34691 mtree_store_range(mt, start, end, NULL, GFP_KERNEL);
34698 mtree_store_range(mt, start, end,
34720 static void rcu_stress(struct maple_tree *mt, bool forward)
34730 test.mt = mt;
34742 mtree_store_range(mt, seed, r,
34774 mtree_store_range(mt, test.index[add], test.last[add],
34778 mt_set_in_rcu(mt);
34784 rcu_stress_fwd(mt, &test, count, test_reader);
34786 rcu_stress_rev(mt, &test, count, test_reader);
34792 mt_validate(mt);
34797 struct maple_tree *mt; /* the maple tree */
34876 entry = mtree_load(test->mt, test->index);
34877 MT_BUG_ON(test->mt, eval_rcu_entry(test, entry, &update_2,
34897 MA_STATE(mas, test->mt, test->range_start, test->range_start);
34918 MT_BUG_ON(test->mt, entry != expected);
34926 MT_BUG_ON(test->mt, eval_rcu_entry(test, entry,
34939 void run_check_rcu(struct maple_tree *mt, struct rcu_test_struct *vals)
34946 mt_set_in_rcu(mt);
34947 MT_BUG_ON(mt, !mt_in_rcu(mt));
34962 mtree_store_range(mt, vals->index, vals->last, vals->entry2,
34968 MT_BUG_ON(mt, !vals->seen_entry2);
34974 MA_STATE(mas, test->mt, test->index, test->index);
34991 static noinline void run_check_rcu_slot_store(struct maple_tree *mt)
34996 struct rcu_test_struct3 test = {.stop = false, .mt = mt};
35004 mtree_store_range(mt, i * len, i * len + len - 1,
35008 mt_set_in_rcu(mt);
35009 MT_BUG_ON(mt, !mt_in_rcu(mt));
35023 mtree_store_range(mt, --start, ++end, xa_mk_value(100),
35032 mt_validate(mt);
35036 void run_check_rcu_slowread(struct maple_tree *mt, struct rcu_test_struct *vals)
35044 mt_set_in_rcu(mt);
35045 MT_BUG_ON(mt, !mt_in_rcu(mt));
35062 mtree_store(mt, index,
35073 MT_BUG_ON(mt, !vals->seen_entry2);
35074 MT_BUG_ON(mt, !vals->seen_entry3);
35075 MT_BUG_ON(mt, !vals->seen_both);
35077 static noinline void __init check_rcu_simulated(struct maple_tree *mt)
35083 MA_STATE(mas_writer, mt, 0, 0);
35084 MA_STATE(mas_reader, mt, target, target);
35088 mt_set_in_rcu(mt);
35100 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10));
35104 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(val));
35117 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10));
35121 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(val));
35134 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10));
35138 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10));
35151 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10));
35155 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(val));
35167 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10));
35171 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(val));
35183 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10));
35187 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(val));
35208 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10));
35213 MT_BUG_ON(mt, mas_next(&mas_reader, ULONG_MAX) != xa_mk_value(val));
35227 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10));
35232 MT_BUG_ON(mt, mas_prev(&mas_reader, 0) != xa_mk_value(val));
35238 static noinline void __init check_rcu_threaded(struct maple_tree *mt)
35248 mtree_store_range(mt, i*10, i*10 + 5,
35252 vals.mt = mt;
35263 run_check_rcu(mt, &vals);
35264 mtree_destroy(mt);
35266 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
35268 mtree_store_range(mt, i*10, i*10 + 5,
35275 vals.mt = mt;
35284 run_check_rcu(mt, &vals);
35285 mtree_destroy(mt);
35288 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
35289 run_check_rcu_slot_store(mt);
35290 mtree_destroy(mt);
35293 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
35294 rcu_stress(mt, true);
35295 mtree_destroy(mt);
35298 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
35299 rcu_stress(mt, false);
35300 mtree_destroy(mt);
35303 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
35305 mtree_store_range(mt, i*10, i*10 + 5,
35312 vals.mt = mt;
35333 run_check_rcu_slowread(mt, &vals);
35387 static void check_dfs_preorder(struct maple_tree *mt)
35391 MA_STATE(mas, mt, 0, 0);
35398 check_seq(mt, max, false);
35403 MT_BUG_ON(mt, count != e);
35404 mtree_destroy(mt);
35406 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
35412 check_seq(mt, max, false);
35418 MT_BUG_ON(mt, count != e);
35419 mtree_destroy(mt);
35421 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
35424 check_rev_seq(mt, max, false);
35430 MT_BUG_ON(mt, count != e);
35431 mtree_destroy(mt);
35433 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
35441 MT_BUG_ON(mt, mas_is_err(&mas));
35455 static noinline void __init check_prealloc(struct maple_tree *mt)
35462 MA_STATE(mas, mt, 10, 20);
35466 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i);
35470 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
35473 MT_BUG_ON(mt, allocated == 0);
35474 MT_BUG_ON(mt, allocated != 1 + height * 3);
35477 MT_BUG_ON(mt, allocated != 0);
35479 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
35482 MT_BUG_ON(mt, allocated == 0);
35483 MT_BUG_ON(mt, allocated != 1 + height * 3);
35484 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
35487 MT_BUG_ON(mt, allocated != 0);
35490 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
35493 MT_BUG_ON(mt, allocated != 1 + height * 3);
35495 MT_BUG_ON(mt, mas_allocated(&mas) != allocated - 1);
35498 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
35501 MT_BUG_ON(mt, allocated != 0);
35503 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
35506 MT_BUG_ON(mt, allocated != 1 + height * 3);
35508 MT_BUG_ON(mt, mas_allocated(&mas) != allocated - 1);
35509 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
35512 MT_BUG_ON(mt, allocated != 0);
35516 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
35519 MT_BUG_ON(mt, allocated != 1 + height * 3);
35521 MT_BUG_ON(mt, mas_allocated(&mas) != allocated - 1);
35523 MT_BUG_ON(mt, mas_allocated(&mas) != allocated);
35524 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
35527 MT_BUG_ON(mt, allocated != 0);
35529 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
35532 MT_BUG_ON(mt, allocated != 1 + height * 3);
35534 MT_BUG_ON(mt, mas_allocated(&mas) != 0);
35538 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
35540 MT_BUG_ON(mt, allocated != 0);
35542 MT_BUG_ON(mt, mas_allocated(&mas) != 0);
35545 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
35548 MT_BUG_ON(mt, allocated != 0);
35550 MT_BUG_ON(mt, mas_allocated(&mas) != 0);
35554 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
35557 MT_BUG_ON(mt, allocated != 1 + height * 2);
35559 MT_BUG_ON(mt, mas_allocated(&mas) != 0);
35563 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL & GFP_NOWAIT) == 0);
35566 MT_BUG_ON(mt, allocated != 0);
35571 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
35574 MT_BUG_ON(mt, allocated == 0);
35575 MT_BUG_ON(mt, allocated != 1 + height * 3);
35577 MT_BUG_ON(mt, mas_allocated(&mas) != 0);
35580 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL & GFP_NOWAIT) == 0);
35583 MT_BUG_ON(mt, allocated != 0);
35588 static noinline void __init check_spanning_write(struct maple_tree *mt)
35591 MA_STATE(mas, mt, 1200, 2380);
35594 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i);
35596 mtree_lock(mt);
35599 MT_BUG_ON(mt, mas_walk(&mas) != NULL);
35600 mtree_unlock(mt);
35601 mtree_destroy(mt);
35604 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i);
35606 mtree_lock(mt);
35610 MT_BUG_ON(mt, mas_walk(&mas) != NULL);
35611 mtree_unlock(mt);
35612 mt_validate(mt);
35613 mtree_destroy(mt);
35616 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
35618 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i);
35621 mtree_lock(mt);
35624 MT_BUG_ON(mt, mas_walk(&mas) != NULL);
35625 mtree_unlock(mt);
35626 mtree_destroy(mt);
35629 mt_init_flags(mt, 0);
35631 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i);
35634 mtree_lock(mt);
35637 MT_BUG_ON(mt, mas_walk(&mas) != NULL);
35638 mtree_unlock(mt);
35639 mtree_destroy(mt);
35642 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
35644 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i);
35647 mtree_lock(mt);
35650 MT_BUG_ON(mt, mas_walk(&mas) != NULL);
35651 mtree_unlock(mt);
35652 mtree_destroy(mt);
35655 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
35657 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i);
35660 mtree_lock(mt);
35663 MT_BUG_ON(mt, mas_walk(&mas) != NULL);
35664 mtree_unlock(mt);
35665 mtree_destroy(mt);
35668 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
35670 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i);
35673 mtree_lock(mt);
35676 MT_BUG_ON(mt, mas_walk(&mas) != NULL);
35677 mtree_unlock(mt);
35678 mtree_destroy(mt);
35684 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
35686 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i);
35689 mtree_lock(mt);
35692 MT_BUG_ON(mt, mas_walk(&mas) != NULL);
35693 mtree_unlock(mt);
35694 mtree_destroy(mt);
35700 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
35702 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i);
35704 mtree_lock(mt);
35707 MT_BUG_ON(mt, mas_walk(&mas) != NULL);
35708 mtree_unlock(mt);
35709 mtree_destroy(mt);
35712 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
35714 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i);
35715 mtree_lock(mt);
35726 MT_BUG_ON(mt, (mas_data_end(&mas)) != mt_slot_count(mas.node) - 1);
35729 mtree_unlock(mt);
35730 mtree_destroy(mt);
35733 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
35735 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i);
35737 mtree_lock(mt);
35740 MT_BUG_ON(mt, mas_walk(&mas) != NULL);
35741 mtree_unlock(mt);
35742 mtree_destroy(mt);
35744 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
35746 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i);
35748 mtree_lock(mt);
35751 mtree_unlock(mt);
35756 static noinline void __init check_null_expand(struct maple_tree *mt)
35760 MA_STATE(mas, mt, 959, 959);
35763 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i);
35770 MT_BUG_ON(mt, mtree_load(mt, 963) != NULL);
35771 MT_BUG_ON(mt, data_end != mas_data_end(&mas));
35779 MT_BUG_ON(mt, mtree_load(mt, 884) != NULL);
35780 MT_BUG_ON(mt, mtree_load(mt, 889) != NULL);
35782 MT_BUG_ON(mt, data_end != mas_data_end(&mas));
35791 MT_BUG_ON(mt, mtree_load(mt, 899) != NULL);
35792 MT_BUG_ON(mt, mtree_load(mt, 900) != NULL);
35793 MT_BUG_ON(mt, mtree_load(mt, 905) != NULL);
35794 MT_BUG_ON(mt, mtree_load(mt, 906) != NULL);
35796 MT_BUG_ON(mt, data_end - 2 != mas_data_end(&mas));
35805 MT_BUG_ON(mt, mtree_load(mt, 809) != NULL);
35806 MT_BUG_ON(mt, mtree_load(mt, 810) != NULL);
35807 MT_BUG_ON(mt, mtree_load(mt, 825) != NULL);
35808 MT_BUG_ON(mt, mtree_load(mt, 826) != NULL);
35810 MT_BUG_ON(mt, data_end - 4 != mas_data_end(&mas));
35817 static noinline void __init check_nomem(struct maple_tree *mt)
35819 MA_STATE(ms, mt, 1, 1);
35821 MT_BUG_ON(mt, !mtree_empty(mt));
35826 MT_BUG_ON(mt, mtree_insert(mt, 1, &ms, GFP_ATOMIC) != -ENOMEM);
35828 MT_BUG_ON(mt, mtree_insert(mt, 0, &ms, GFP_ATOMIC) != 0);
35838 mtree_lock(mt);
35840 MT_BUG_ON(mt, ms.node != MA_ERROR(-ENOMEM));
35842 MT_BUG_ON(mt, ms.status != ma_start);
35843 mtree_unlock(mt);
35844 MT_BUG_ON(mt, mtree_insert(mt, 2, mt, GFP_KERNEL) != 0);
35845 mtree_lock(mt);
35848 mtree_unlock(mt);
35849 mtree_destroy(mt);
35852 static noinline void __init check_locky(struct maple_tree *mt)
35854 MA_STATE(ms, mt, 2, 2);
35855 MA_STATE(reader, mt, 2, 2);
35858 mt_set_in_rcu(mt);
35864 mt_clear_in_rcu(mt);
36013 * @mt: The tree to build.
36021 static __init int build_full_tree(struct maple_tree *mt, unsigned int flags,
36024 MA_STATE(mas, mt, 0, 0);
36029 mt_init_flags(mt, flags);
36030 mtree_insert_range(mt, 0, ULONG_MAX, xa_mk_value(5), GFP_KERNEL);
36032 mtree_lock(mt);
36036 if (mt_height(mt) < height) {
36067 mtree_unlock(mt);
36069 MT_BUG_ON(mt, mt_height(mt) != height);
36070 /* pr_info("height:%u number of elements:%d\n", mt_height(mt), cnt); */
36074 static noinline void __init check_mtree_dup(struct maple_tree *mt)
36081 mt_init_flags(mt, 0);
36082 mtree_store_range(mt, 0, 0, xa_mk_value(0), GFP_KERNEL);
36083 ret = mtree_dup(mt, &new, GFP_KERNEL);
36086 if (compare_tree(mt, &new))
36089 mtree_destroy(mt);
36093 mt_init_flags(mt, 0);
36095 ret = mtree_dup(mt, &new, GFP_KERNEL);
36097 mtree_destroy(mt);
36101 mt_init_flags(mt, 0);
36104 ret = mtree_dup(mt, &new, GFP_KERNEL);
36106 mtree_destroy(mt);
36111 ret = build_full_tree(mt, 0, i);
36112 MT_BUG_ON(mt, ret);
36115 ret = mtree_dup(mt, &new, GFP_KERNEL);
36118 if (compare_tree(mt, &new))
36121 mtree_destroy(mt);
36126 ret = build_full_tree(mt, MT_FLAGS_ALLOC_RANGE, i);
36127 MT_BUG_ON(mt, ret);
36130 ret = mtree_dup(mt, &new, GFP_KERNEL);
36133 if (compare_tree(mt, &new))
36136 mtree_destroy(mt);
36143 mt_init_flags(mt, 0);
36146 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
36151 mtree_store_range(mt, j * 10, j * 10 + 5,
36155 ret = mtree_dup(mt, &new, GFP_KERNEL);
36158 if (compare_tree(mt, &new))
36161 mtree_destroy(mt);
36166 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
36168 mtree_store_range(mt, j * 10, j * 10 + 5,
36175 ret = mtree_dup(mt, &new, GFP_NOWAIT);
36178 mtree_destroy(mt);
36184 mt_init_flags(mt, 0);
36187 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
36192 mtree_store_range(mt, j * 10, j * 10 + 5,
36203 ret = mtree_dup(mt, &new, GFP_NOWAIT);
36208 mtree_destroy(mt);
36213 if (compare_tree(mt, &new))
36216 mtree_destroy(mt);