1232809Sjmallett/*
2232809SjmallettCopyright (c) 2001 Wolfram Gloger
3232809SjmallettCopyright (c) 2006 Cavium networks
4232809Sjmallett
5232809SjmallettPermission to use, copy, modify, distribute, and sell this software
6232809Sjmallettand its documentation for any purpose is hereby granted without fee,
7232809Sjmallettprovided that (i) the above copyright notices and this permission
8232809Sjmallettnotice appear in all copies of the software and related documentation,
9232809Sjmallettand (ii) the name of Wolfram Gloger may not be used in any advertising
10232809Sjmallettor publicity relating to the software.
11232809Sjmallett
12232809SjmallettTHE SOFTWARE IS PROVIDED "AS-IS" AND WITHOUT WARRANTY OF ANY KIND,
13232809SjmallettEXPRESS, IMPLIED OR OTHERWISE, INCLUDING WITHOUT LIMITATION, ANY
14232809SjmallettWARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE.
15232809Sjmallett
16232809SjmallettIN NO EVENT SHALL WOLFRAM GLOGER BE LIABLE FOR ANY SPECIAL,
17232809SjmallettINCIDENTAL, INDIRECT OR CONSEQUENTIAL DAMAGES OF ANY KIND, OR ANY
18232809SjmallettDAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
19232809SjmallettWHETHER OR NOT ADVISED OF THE POSSIBILITY OF DAMAGE, AND ON ANY THEORY
20232809SjmallettOF LIABILITY, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
21232809SjmallettPERFORMANCE OF THIS SOFTWARE.
22232809Sjmallett*/
23232809Sjmallett
24232809Sjmallett/* $Id: arena.c 30481 2007-12-05 21:46:59Z rfranz $ */
25232809Sjmallett
26232809Sjmallett/* Compile-time constants.  */
27232809Sjmallett
28232809Sjmallett#define HEAP_MIN_SIZE (4096)   /* Must leave room for struct malloc_state, arena ptrs, etc., totals about 2400 bytes */
29232809Sjmallett
30232809Sjmallett#ifndef THREAD_STATS
31232809Sjmallett#define THREAD_STATS 0
32232809Sjmallett#endif
33232809Sjmallett
34232809Sjmallett/* If THREAD_STATS is non-zero, some statistics on mutex locking are
35232809Sjmallett   computed.  */
36232809Sjmallett
37232809Sjmallett/***************************************************************************/
38232809Sjmallett
39232809Sjmallett// made static to avoid conflicts with newlib
40232809Sjmallettstatic mstate         _int_new_arena __MALLOC_P ((size_t __ini_size));
41232809Sjmallett
42232809Sjmallett/***************************************************************************/
43232809Sjmallett
44232809Sjmallett#define top(ar_ptr) ((ar_ptr)->top)
45232809Sjmallett
46232809Sjmallett/* A heap is a single contiguous memory region holding (coalesceable)
47232809Sjmallett   malloc_chunks.    Not used unless compiling with
48232809Sjmallett   USE_ARENAS. */
49232809Sjmallett
50232809Sjmalletttypedef struct _heap_info {
51232809Sjmallett  mstate ar_ptr; /* Arena for this heap. */
52232809Sjmallett  struct _heap_info *prev; /* Previous heap. */
53232809Sjmallett  size_t size;   /* Current size in bytes. */
54232809Sjmallett  size_t pad;    /* Make sure the following data is properly aligned. */
55232809Sjmallett} heap_info;
56232809Sjmallett
57232809Sjmallett/* Thread specific data */
58232809Sjmallett
59232809Sjmallettstatic tsd_key_t arena_key;  // one per PP (thread)
60232809Sjmallettstatic CVMX_SHARED mutex_t list_lock;  // shared...
61232809Sjmallett
62232809Sjmallett#if THREAD_STATS
63232809Sjmallettstatic int stat_n_heaps;
64232809Sjmallett#define THREAD_STAT(x) x
65232809Sjmallett#else
66232809Sjmallett#define THREAD_STAT(x) do ; while(0)
67232809Sjmallett#endif
68232809Sjmallett
69232809Sjmallett/* Mapped memory in non-main arenas (reliable only for NO_THREADS). */
70232809Sjmallettstatic unsigned long arena_mem;
71232809Sjmallett
72232809Sjmallett/* Already initialized? */
73232809Sjmallettint CVMX_SHARED cvmx__malloc_initialized = -1;
74232809Sjmallett
75232809Sjmallett/**************************************************************************/
76232809Sjmallett
77232809Sjmallett#if USE_ARENAS
78232809Sjmallett
79232809Sjmallett/* find the heap and corresponding arena for a given ptr */
80232809Sjmallett
81232809Sjmallett#define arena_for_chunk(ptr) ((ptr)->arena_ptr)
82232809Sjmallett#define set_arena_for_chunk(ptr, arena) (ptr)->arena_ptr = (arena)
83232809Sjmallett
84232809Sjmallett
85232809Sjmallett#endif /* USE_ARENAS */
86232809Sjmallett
87232809Sjmallett/**************************************************************************/
88232809Sjmallett
89232809Sjmallett#ifndef NO_THREADS
90232809Sjmallett
91232809Sjmallett/* atfork support.  */
92232809Sjmallett
93232809Sjmallettstatic __malloc_ptr_t (*save_malloc_hook) __MALLOC_P ((size_t __size,
94232809Sjmallett						       __const __malloc_ptr_t));
95232809Sjmallettstatic void           (*save_free_hook) __MALLOC_P ((__malloc_ptr_t __ptr,
96232809Sjmallett						     __const __malloc_ptr_t));
97232809Sjmallettstatic Void_t*        save_arena;
98232809Sjmallett
99232809Sjmallett/* Magic value for the thread-specific arena pointer when
100232809Sjmallett   malloc_atfork() is in use.  */
101232809Sjmallett
102232809Sjmallett#define ATFORK_ARENA_PTR ((Void_t*)-1)
103232809Sjmallett
104232809Sjmallett/* The following hooks are used while the `atfork' handling mechanism
105232809Sjmallett   is active. */
106232809Sjmallett
107232809Sjmallettstatic Void_t*
108232809Sjmallettmalloc_atfork(size_t sz, const Void_t *caller)
109232809Sjmallett{
110232809Sjmallettreturn(NULL);
111232809Sjmallett}
112232809Sjmallett
113232809Sjmallettstatic void
114232809Sjmallettfree_atfork(Void_t* mem, const Void_t *caller)
115232809Sjmallett{
116232809Sjmallett  Void_t *vptr = NULL;
117232809Sjmallett  mstate ar_ptr;
118232809Sjmallett  mchunkptr p;                          /* chunk corresponding to mem */
119232809Sjmallett
120232809Sjmallett  if (mem == 0)                              /* free(0) has no effect */
121232809Sjmallett    return;
122232809Sjmallett
123232809Sjmallett  p = mem2chunk(mem);         /* do not bother to replicate free_check here */
124232809Sjmallett
125232809Sjmallett#if HAVE_MMAP
126232809Sjmallett  if (chunk_is_mmapped(p))                       /* release mmapped memory. */
127232809Sjmallett  {
128232809Sjmallett    munmap_chunk(p);
129232809Sjmallett    return;
130232809Sjmallett  }
131232809Sjmallett#endif
132232809Sjmallett
133232809Sjmallett  ar_ptr = arena_for_chunk(p);
134232809Sjmallett  tsd_getspecific(arena_key, vptr);
135232809Sjmallett  if(vptr != ATFORK_ARENA_PTR)
136232809Sjmallett    (void)mutex_lock(&ar_ptr->mutex);
137232809Sjmallett  _int_free(ar_ptr, mem);
138232809Sjmallett  if(vptr != ATFORK_ARENA_PTR)
139232809Sjmallett    (void)mutex_unlock(&ar_ptr->mutex);
140232809Sjmallett}
141232809Sjmallett
142232809Sjmallett
143232809Sjmallett
144232809Sjmallett#ifdef __linux__
145232809Sjmallett#error   __linux__defined!
146232809Sjmallett#endif
147232809Sjmallett
148232809Sjmallett#endif /* !defined NO_THREADS */
149232809Sjmallett
150232809Sjmallett
151232809Sjmallett
152232809Sjmallett/* Initialization routine. */
153232809Sjmallett#ifdef _LIBC
154232809Sjmallett#error  _LIBC is defined, and should not be
155232809Sjmallett#endif /* _LIBC */
156232809Sjmallett
157232809Sjmallettstatic CVMX_SHARED cvmx_spinlock_t malloc_init_spin_lock;
158232809Sjmallett
159232809Sjmallett
160232809Sjmallett
161232809Sjmallett
162232809Sjmallett/* Managing heaps and arenas (for concurrent threads) */
163232809Sjmallett
164232809Sjmallett#if USE_ARENAS
165232809Sjmallett
166232809Sjmallett#if MALLOC_DEBUG > 1
167232809Sjmallett
168232809Sjmallett/* Print the complete contents of a single heap to stderr. */
169232809Sjmallett
170232809Sjmallettstatic void
171232809Sjmallett#if __STD_C
172232809Sjmallettdump_heap(heap_info *heap)
173232809Sjmallett#else
174232809Sjmallettdump_heap(heap) heap_info *heap;
175232809Sjmallett#endif
176232809Sjmallett{
177232809Sjmallett  char *ptr;
178232809Sjmallett  mchunkptr p;
179232809Sjmallett
180232809Sjmallett  fprintf(stderr, "Heap %p, size %10lx:\n", heap, (long)heap->size);
181232809Sjmallett  ptr = (heap->ar_ptr != (mstate)(heap+1)) ?
182232809Sjmallett    (char*)(heap + 1) : (char*)(heap + 1) + sizeof(struct malloc_state);
183232809Sjmallett  p = (mchunkptr)(((unsigned long)ptr + MALLOC_ALIGN_MASK) &
184232809Sjmallett                  ~MALLOC_ALIGN_MASK);
185232809Sjmallett  for(;;) {
186232809Sjmallett    fprintf(stderr, "chunk %p size %10lx", p, (long)p->size);
187232809Sjmallett    if(p == top(heap->ar_ptr)) {
188232809Sjmallett      fprintf(stderr, " (top)\n");
189232809Sjmallett      break;
190232809Sjmallett    } else if(p->size == (0|PREV_INUSE)) {
191232809Sjmallett      fprintf(stderr, " (fence)\n");
192232809Sjmallett      break;
193232809Sjmallett    }
194232809Sjmallett    fprintf(stderr, "\n");
195232809Sjmallett    p = next_chunk(p);
196232809Sjmallett  }
197232809Sjmallett}
198232809Sjmallett
199232809Sjmallett#endif /* MALLOC_DEBUG > 1 */
200232809Sjmallett/* Delete a heap. */
201232809Sjmallett
202232809Sjmallett
203232809Sjmallettstatic mstate cvmx_new_arena(void *addr, size_t size)
204232809Sjmallett{
205232809Sjmallett  mstate a;
206232809Sjmallett  heap_info *h;
207232809Sjmallett  char *ptr;
208232809Sjmallett  unsigned long misalign;
209232809Sjmallett  int page_mask = malloc_getpagesize - 1;
210232809Sjmallett
211232809Sjmallett  debug_printf("cvmx_new_arena called, addr: %p, size %ld\n", addr, size);
212232809Sjmallett  debug_printf("heapinfo size: %ld, mstate size: %d\n", sizeof(heap_info), sizeof(struct malloc_state));
213232809Sjmallett
214232809Sjmallett  if (!addr || (size < HEAP_MIN_SIZE))
215232809Sjmallett  {
216232809Sjmallett      return(NULL);
217232809Sjmallett  }
218232809Sjmallett  /* We must zero out the arena as the malloc code assumes this. */
219232809Sjmallett  memset(addr, 0, size);
220232809Sjmallett
221232809Sjmallett  h = (heap_info *)addr;
222232809Sjmallett  h->size = size;
223232809Sjmallett
224232809Sjmallett  a = h->ar_ptr = (mstate)(h+1);
225232809Sjmallett  malloc_init_state(a);
226232809Sjmallett  /*a->next = NULL;*/
227232809Sjmallett  a->system_mem = a->max_system_mem = h->size;
228232809Sjmallett  arena_mem += h->size;
229232809Sjmallett  a->next = a;
230232809Sjmallett
231232809Sjmallett  /* Set up the top chunk, with proper alignment. */
232232809Sjmallett  ptr = (char *)(a + 1);
233232809Sjmallett  misalign = (unsigned long)chunk2mem(ptr) & MALLOC_ALIGN_MASK;
234232809Sjmallett  if (misalign > 0)
235232809Sjmallett    ptr += MALLOC_ALIGNMENT - misalign;
236232809Sjmallett  top(a) = (mchunkptr)ptr;
237232809Sjmallett  set_head(top(a), (((char*)h + h->size) - ptr) | PREV_INUSE);
238232809Sjmallett
239232809Sjmallett  return a;
240232809Sjmallett}
241232809Sjmallett
242232809Sjmallett
243232809Sjmallettint cvmx_add_arena(cvmx_arena_list_t *arena_list, void *ptr, size_t size)
244232809Sjmallett{
245232809Sjmallett  mstate a;
246232809Sjmallett
247232809Sjmallett  /* Enforce required alignement, and adjust size */
248232809Sjmallett  int misaligned = ((size_t)ptr) & (MALLOC_ALIGNMENT - 1);
249232809Sjmallett  if (misaligned)
250232809Sjmallett  {
251232809Sjmallett      ptr = (char*)ptr + MALLOC_ALIGNMENT - misaligned;
252232809Sjmallett      size -= MALLOC_ALIGNMENT - misaligned;
253232809Sjmallett  }
254232809Sjmallett
255232809Sjmallett  debug_printf("Adding arena at addr: %p, size %d\n", ptr, size);
256232809Sjmallett
257232809Sjmallett  a = cvmx_new_arena(ptr, size);  /* checks ptr and size */
258232809Sjmallett  if (!a)
259232809Sjmallett  {
260232809Sjmallett      return(-1);
261232809Sjmallett  }
262232809Sjmallett
263232809Sjmallett  debug_printf("cmvx_add_arena - arena_list: %p, *arena_list: %p\n", arena_list, *arena_list);
264232809Sjmallett  debug_printf("cmvx_add_arena - list: %p, new: %p\n", *arena_list, a);
265232809Sjmallett  mutex_init(&a->mutex);
266232809Sjmallett  mutex_lock(&a->mutex);
267232809Sjmallett
268232809Sjmallett
269232809Sjmallett  if (*arena_list)
270232809Sjmallett  {
271232809Sjmallett      mstate ar_ptr = *arena_list;
272232809Sjmallett      (void)mutex_lock(&ar_ptr->mutex);
273232809Sjmallett      a->next = ar_ptr->next;  // lock held on a and ar_ptr
274232809Sjmallett      ar_ptr->next = a;
275232809Sjmallett      (void)mutex_unlock(&ar_ptr->mutex);
276232809Sjmallett  }
277232809Sjmallett  else
278232809Sjmallett  {
279232809Sjmallett      *arena_list = a;
280232809Sjmallett//      a->next = a;
281232809Sjmallett  }
282232809Sjmallett
283232809Sjmallett  debug_printf("cvmx_add_arena - list: %p, list->next: %p\n", *arena_list, ((mstate)*arena_list)->next);
284232809Sjmallett
285232809Sjmallett  // unlock, since it is not going to be used immediately
286232809Sjmallett  (void)mutex_unlock(&a->mutex);
287232809Sjmallett
288232809Sjmallett  return(0);
289232809Sjmallett}
290232809Sjmallett
291232809Sjmallett
292232809Sjmallett
293232809Sjmallett#endif /* USE_ARENAS */
294