1#include "test/jemalloc_test.h"
2
3#ifdef JEMALLOC_FILL
4#  ifndef JEMALLOC_TEST_JUNK_OPT
5#    define JEMALLOC_TEST_JUNK_OPT "junk:true"
6#  endif
7const char *malloc_conf =
8    "abort:false,zero:false," JEMALLOC_TEST_JUNK_OPT;
9#endif
10
11static arena_dalloc_junk_small_t *arena_dalloc_junk_small_orig;
12static large_dalloc_junk_t *large_dalloc_junk_orig;
13static large_dalloc_maybe_junk_t *large_dalloc_maybe_junk_orig;
14static void *watch_for_junking;
15static bool saw_junking;
16
17static void
18watch_junking(void *p)
19{
20	watch_for_junking = p;
21	saw_junking = false;
22}
23
24static void
25arena_dalloc_junk_small_intercept(void *ptr, const arena_bin_info_t *bin_info)
26{
27	size_t i;
28
29	arena_dalloc_junk_small_orig(ptr, bin_info);
30	for (i = 0; i < bin_info->reg_size; i++) {
31		assert_u_eq(((uint8_t *)ptr)[i], JEMALLOC_FREE_JUNK,
32		    "Missing junk fill for byte %zu/%zu of deallocated region",
33		    i, bin_info->reg_size);
34	}
35	if (ptr == watch_for_junking)
36		saw_junking = true;
37}
38
39static void
40large_dalloc_junk_intercept(void *ptr, size_t usize)
41{
42	size_t i;
43
44	large_dalloc_junk_orig(ptr, usize);
45	for (i = 0; i < usize; i++) {
46		assert_u_eq(((uint8_t *)ptr)[i], JEMALLOC_FREE_JUNK,
47		    "Missing junk fill for byte %zu/%zu of deallocated region",
48		    i, usize);
49	}
50	if (ptr == watch_for_junking)
51		saw_junking = true;
52}
53
54static void
55large_dalloc_maybe_junk_intercept(void *ptr, size_t usize)
56{
57	large_dalloc_maybe_junk_orig(ptr, usize);
58	if (ptr == watch_for_junking)
59		saw_junking = true;
60}
61
62static void
63test_junk(size_t sz_min, size_t sz_max)
64{
65	uint8_t *s;
66	size_t sz_prev, sz, i;
67
68	if (opt_junk_free) {
69		arena_dalloc_junk_small_orig = arena_dalloc_junk_small;
70		arena_dalloc_junk_small = arena_dalloc_junk_small_intercept;
71		large_dalloc_junk_orig = large_dalloc_junk;
72		large_dalloc_junk = large_dalloc_junk_intercept;
73		large_dalloc_maybe_junk_orig = large_dalloc_maybe_junk;
74		large_dalloc_maybe_junk = large_dalloc_maybe_junk_intercept;
75	}
76
77	sz_prev = 0;
78	s = (uint8_t *)mallocx(sz_min, 0);
79	assert_ptr_not_null((void *)s, "Unexpected mallocx() failure");
80
81	for (sz = sallocx(s, 0); sz <= sz_max;
82	    sz_prev = sz, sz = sallocx(s, 0)) {
83		if (sz_prev > 0) {
84			assert_u_eq(s[0], 'a',
85			    "Previously allocated byte %zu/%zu is corrupted",
86			    ZU(0), sz_prev);
87			assert_u_eq(s[sz_prev-1], 'a',
88			    "Previously allocated byte %zu/%zu is corrupted",
89			    sz_prev-1, sz_prev);
90		}
91
92		for (i = sz_prev; i < sz; i++) {
93			if (opt_junk_alloc) {
94				assert_u_eq(s[i], JEMALLOC_ALLOC_JUNK,
95				    "Newly allocated byte %zu/%zu isn't "
96				    "junk-filled", i, sz);
97			}
98			s[i] = 'a';
99		}
100
101		if (xallocx(s, sz+1, 0, 0) == sz) {
102			uint8_t *t;
103			watch_junking(s);
104			t = (uint8_t *)rallocx(s, sz+1, 0);
105			assert_ptr_not_null((void *)t,
106			    "Unexpected rallocx() failure");
107			assert_ptr_ne(s, t, "Unexpected in-place rallocx()");
108			assert_zu_ge(sallocx(t, 0), sz+1,
109			    "Unexpectedly small rallocx() result");
110			assert_true(!opt_junk_free || saw_junking,
111			    "Expected region of size %zu to be junk-filled",
112			    sz);
113			s = t;
114		}
115	}
116
117	watch_junking(s);
118	dallocx(s, 0);
119	assert_true(!opt_junk_free || saw_junking,
120	    "Expected region of size %zu to be junk-filled", sz);
121
122	if (opt_junk_free) {
123		arena_dalloc_junk_small = arena_dalloc_junk_small_orig;
124		large_dalloc_junk = large_dalloc_junk_orig;
125		large_dalloc_maybe_junk = large_dalloc_maybe_junk_orig;
126	}
127}
128
129TEST_BEGIN(test_junk_small)
130{
131	test_skip_if(!config_fill);
132	test_junk(1, SMALL_MAXCLASS-1);
133}
134TEST_END
135
136TEST_BEGIN(test_junk_large)
137{
138	test_skip_if(!config_fill);
139	test_junk(SMALL_MAXCLASS+1, (1U << (LG_LARGE_MINCLASS+1)));
140}
141TEST_END
142
143int
144main(void)
145{
146	return (test(
147	    test_junk_small,
148	    test_junk_large));
149}
150