1234370Sjasone/*
2234370Sjasone * This radix tree implementation is tailored to the singular purpose of
3234370Sjasone * tracking which chunks are currently owned by jemalloc.  This functionality
4234370Sjasone * is mandatory for OS X, where jemalloc must be able to respond to object
5234370Sjasone * ownership queries.
6234370Sjasone *
7234370Sjasone *******************************************************************************
8234370Sjasone */
9234370Sjasone#ifdef JEMALLOC_H_TYPES
10234370Sjasone
11234370Sjasonetypedef struct rtree_s rtree_t;
12234370Sjasone
13234370Sjasone/*
14234370Sjasone * Size of each radix tree node (must be a power of 2).  This impacts tree
15234370Sjasone * depth.
16234370Sjasone */
17234370Sjasone#if (LG_SIZEOF_PTR == 2)
18234370Sjasone#  define RTREE_NODESIZE (1U << 14)
19234370Sjasone#else
20234370Sjasone#  define RTREE_NODESIZE CACHELINE
21234370Sjasone#endif
22234370Sjasone
23234370Sjasone#endif /* JEMALLOC_H_TYPES */
24234370Sjasone/******************************************************************************/
25234370Sjasone#ifdef JEMALLOC_H_STRUCTS
26234370Sjasone
27234370Sjasonestruct rtree_s {
28234370Sjasone	malloc_mutex_t	mutex;
29234370Sjasone	void		**root;
30234370Sjasone	unsigned	height;
31234370Sjasone	unsigned	level2bits[1]; /* Dynamically sized. */
32234370Sjasone};
33234370Sjasone
34234370Sjasone#endif /* JEMALLOC_H_STRUCTS */
35234370Sjasone/******************************************************************************/
36234370Sjasone#ifdef JEMALLOC_H_EXTERNS
37234370Sjasone
38234370Sjasonertree_t	*rtree_new(unsigned bits);
39242844Sjasonevoid	rtree_prefork(rtree_t *rtree);
40242844Sjasonevoid	rtree_postfork_parent(rtree_t *rtree);
41242844Sjasonevoid	rtree_postfork_child(rtree_t *rtree);
42234370Sjasone
43234370Sjasone#endif /* JEMALLOC_H_EXTERNS */
44234370Sjasone/******************************************************************************/
45234370Sjasone#ifdef JEMALLOC_H_INLINES
46234370Sjasone
47234370Sjasone#ifndef JEMALLOC_ENABLE_INLINE
48234370Sjasone#ifndef JEMALLOC_DEBUG
49234370Sjasonevoid	*rtree_get_locked(rtree_t *rtree, uintptr_t key);
50234370Sjasone#endif
51234370Sjasonevoid	*rtree_get(rtree_t *rtree, uintptr_t key);
52234370Sjasonebool	rtree_set(rtree_t *rtree, uintptr_t key, void *val);
53234370Sjasone#endif
54234370Sjasone
55234370Sjasone#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_RTREE_C_))
56234370Sjasone#define	RTREE_GET_GENERATE(f)						\
57234370Sjasone/* The least significant bits of the key are ignored. */		\
58234370SjasoneJEMALLOC_INLINE void *							\
59234370Sjasonef(rtree_t *rtree, uintptr_t key)					\
60234370Sjasone{									\
61234370Sjasone	void *ret;							\
62234370Sjasone	uintptr_t subkey;						\
63234370Sjasone	unsigned i, lshift, height, bits;				\
64234370Sjasone	void **node, **child;						\
65234370Sjasone									\
66234370Sjasone	RTREE_LOCK(&rtree->mutex);					\
67234370Sjasone	for (i = lshift = 0, height = rtree->height, node = rtree->root;\
68234370Sjasone	    i < height - 1;						\
69234370Sjasone	    i++, lshift += bits, node = child) {			\
70234370Sjasone		bits = rtree->level2bits[i];				\
71234370Sjasone		subkey = (key << lshift) >> ((ZU(1) << (LG_SIZEOF_PTR + \
72234370Sjasone		    3)) - bits);					\
73234370Sjasone		child = (void**)node[subkey];				\
74234370Sjasone		if (child == NULL) {					\
75234370Sjasone			RTREE_UNLOCK(&rtree->mutex);			\
76234370Sjasone			return (NULL);					\
77234370Sjasone		}							\
78234370Sjasone	}								\
79234370Sjasone									\
80234370Sjasone	/*								\
81234370Sjasone	 * node is a leaf, so it contains values rather than node	\
82234370Sjasone	 * pointers.							\
83234370Sjasone	 */								\
84234370Sjasone	bits = rtree->level2bits[i];					\
85234370Sjasone	subkey = (key << lshift) >> ((ZU(1) << (LG_SIZEOF_PTR+3)) -	\
86234370Sjasone	    bits);							\
87234370Sjasone	ret = node[subkey];						\
88234370Sjasone	RTREE_UNLOCK(&rtree->mutex);					\
89234370Sjasone									\
90234370Sjasone	RTREE_GET_VALIDATE						\
91234370Sjasone	return (ret);							\
92234370Sjasone}
93234370Sjasone
94234370Sjasone#ifdef JEMALLOC_DEBUG
95234370Sjasone#  define RTREE_LOCK(l)		malloc_mutex_lock(l)
96234370Sjasone#  define RTREE_UNLOCK(l)	malloc_mutex_unlock(l)
97234370Sjasone#  define RTREE_GET_VALIDATE
98234370SjasoneRTREE_GET_GENERATE(rtree_get_locked)
99234370Sjasone#  undef RTREE_LOCK
100234370Sjasone#  undef RTREE_UNLOCK
101234370Sjasone#  undef RTREE_GET_VALIDATE
102234370Sjasone#endif
103234370Sjasone
104234370Sjasone#define	RTREE_LOCK(l)
105234370Sjasone#define	RTREE_UNLOCK(l)
106234370Sjasone#ifdef JEMALLOC_DEBUG
107234370Sjasone   /*
108234370Sjasone    * Suppose that it were possible for a jemalloc-allocated chunk to be
109234370Sjasone    * munmap()ped, followed by a different allocator in another thread re-using
110234370Sjasone    * overlapping virtual memory, all without invalidating the cached rtree
111234370Sjasone    * value.  The result would be a false positive (the rtree would claim that
112234370Sjasone    * jemalloc owns memory that it had actually discarded).  This scenario
113234370Sjasone    * seems impossible, but the following assertion is a prudent sanity check.
114234370Sjasone    */
115234370Sjasone#  define RTREE_GET_VALIDATE						\
116234370Sjasone	assert(rtree_get_locked(rtree, key) == ret);
117234370Sjasone#else
118234370Sjasone#  define RTREE_GET_VALIDATE
119234370Sjasone#endif
120234370SjasoneRTREE_GET_GENERATE(rtree_get)
121234370Sjasone#undef RTREE_LOCK
122234370Sjasone#undef RTREE_UNLOCK
123234370Sjasone#undef RTREE_GET_VALIDATE
124234370Sjasone
125234370SjasoneJEMALLOC_INLINE bool
126234370Sjasonertree_set(rtree_t *rtree, uintptr_t key, void *val)
127234370Sjasone{
128234370Sjasone	uintptr_t subkey;
129234370Sjasone	unsigned i, lshift, height, bits;
130234370Sjasone	void **node, **child;
131234370Sjasone
132234370Sjasone	malloc_mutex_lock(&rtree->mutex);
133234370Sjasone	for (i = lshift = 0, height = rtree->height, node = rtree->root;
134234370Sjasone	    i < height - 1;
135234370Sjasone	    i++, lshift += bits, node = child) {
136234370Sjasone		bits = rtree->level2bits[i];
137234370Sjasone		subkey = (key << lshift) >> ((ZU(1) << (LG_SIZEOF_PTR+3)) -
138234370Sjasone		    bits);
139234370Sjasone		child = (void**)node[subkey];
140234370Sjasone		if (child == NULL) {
141234370Sjasone			child = (void**)base_alloc(sizeof(void *) <<
142234370Sjasone			    rtree->level2bits[i+1]);
143234370Sjasone			if (child == NULL) {
144234370Sjasone				malloc_mutex_unlock(&rtree->mutex);
145234370Sjasone				return (true);
146234370Sjasone			}
147234370Sjasone			memset(child, 0, sizeof(void *) <<
148234370Sjasone			    rtree->level2bits[i+1]);
149234370Sjasone			node[subkey] = child;
150234370Sjasone		}
151234370Sjasone	}
152234370Sjasone
153234370Sjasone	/* node is a leaf, so it contains values rather than node pointers. */
154234370Sjasone	bits = rtree->level2bits[i];
155234370Sjasone	subkey = (key << lshift) >> ((ZU(1) << (LG_SIZEOF_PTR+3)) - bits);
156234370Sjasone	node[subkey] = val;
157234370Sjasone	malloc_mutex_unlock(&rtree->mutex);
158234370Sjasone
159234370Sjasone	return (false);
160234370Sjasone}
161234370Sjasone#endif
162234370Sjasone
163234370Sjasone#endif /* JEMALLOC_H_INLINES */
164234370Sjasone/******************************************************************************/
165