Deleted Added
full compact
huge.c (288090) huge.c (289900)
1#define JEMALLOC_HUGE_C_
2#include "jemalloc/internal/jemalloc_internal.h"
3
4/******************************************************************************/
5
6static extent_node_t *
7huge_node_get(const void *ptr)
8{

--- 119 unchanged lines hidden (view full) ---

128static void
129huge_ralloc_no_move_similar(void *ptr, size_t oldsize, size_t usize_min,
130 size_t usize_max, bool zero)
131{
132 size_t usize, usize_next;
133 extent_node_t *node;
134 arena_t *arena;
135 chunk_hooks_t chunk_hooks = CHUNK_HOOKS_INITIALIZER;
1#define JEMALLOC_HUGE_C_
2#include "jemalloc/internal/jemalloc_internal.h"
3
4/******************************************************************************/
5
6static extent_node_t *
7huge_node_get(const void *ptr)
8{

--- 119 unchanged lines hidden (view full) ---

128static void
129huge_ralloc_no_move_similar(void *ptr, size_t oldsize, size_t usize_min,
130 size_t usize_max, bool zero)
131{
132 size_t usize, usize_next;
133 extent_node_t *node;
134 arena_t *arena;
135 chunk_hooks_t chunk_hooks = CHUNK_HOOKS_INITIALIZER;
136 bool zeroed;
136 bool pre_zeroed, post_zeroed;
137
138 /* Increase usize to incorporate extra. */
139 for (usize = usize_min; usize < usize_max && (usize_next = s2u(usize+1))
140 <= oldsize; usize = usize_next)
141 ; /* Do nothing. */
142
143 if (oldsize == usize)
144 return;
145
146 node = huge_node_get(ptr);
147 arena = extent_node_arena_get(node);
137
138 /* Increase usize to incorporate extra. */
139 for (usize = usize_min; usize < usize_max && (usize_next = s2u(usize+1))
140 <= oldsize; usize = usize_next)
141 ; /* Do nothing. */
142
143 if (oldsize == usize)
144 return;
145
146 node = huge_node_get(ptr);
147 arena = extent_node_arena_get(node);
148 pre_zeroed = extent_node_zeroed_get(node);
148
149 /* Fill if necessary (shrinking). */
150 if (oldsize > usize) {
151 size_t sdiff = oldsize - usize;
152 if (config_fill && unlikely(opt_junk_free)) {
153 memset((void *)((uintptr_t)ptr + usize), 0x5a, sdiff);
149
150 /* Fill if necessary (shrinking). */
151 if (oldsize > usize) {
152 size_t sdiff = oldsize - usize;
153 if (config_fill && unlikely(opt_junk_free)) {
154 memset((void *)((uintptr_t)ptr + usize), 0x5a, sdiff);
154 zeroed = false;
155 post_zeroed = false;
155 } else {
156 } else {
156 zeroed = !chunk_purge_wrapper(arena, &chunk_hooks, ptr,
157 CHUNK_CEILING(oldsize), usize, sdiff);
157 post_zeroed = !chunk_purge_wrapper(arena, &chunk_hooks,
158 ptr, CHUNK_CEILING(oldsize), usize, sdiff);
158 }
159 } else
159 }
160 } else
160 zeroed = true;
161 post_zeroed = pre_zeroed;
161
162 malloc_mutex_lock(&arena->huge_mtx);
163 /* Update the size of the huge allocation. */
164 assert(extent_node_size_get(node) != usize);
165 extent_node_size_set(node, usize);
162
163 malloc_mutex_lock(&arena->huge_mtx);
164 /* Update the size of the huge allocation. */
165 assert(extent_node_size_get(node) != usize);
166 extent_node_size_set(node, usize);
166 /* Clear node's zeroed field if zeroing failed above. */
167 extent_node_zeroed_set(node, extent_node_zeroed_get(node) && zeroed);
167 /* Update zeroed. */
168 extent_node_zeroed_set(node, post_zeroed);
168 malloc_mutex_unlock(&arena->huge_mtx);
169
170 arena_chunk_ralloc_huge_similar(arena, ptr, oldsize, usize);
171
172 /* Fill if necessary (growing). */
173 if (oldsize < usize) {
174 if (zero || (config_fill && unlikely(opt_zero))) {
169 malloc_mutex_unlock(&arena->huge_mtx);
170
171 arena_chunk_ralloc_huge_similar(arena, ptr, oldsize, usize);
172
173 /* Fill if necessary (growing). */
174 if (oldsize < usize) {
175 if (zero || (config_fill && unlikely(opt_zero))) {
175 if (!zeroed) {
176 if (!pre_zeroed) {
176 memset((void *)((uintptr_t)ptr + oldsize), 0,
177 usize - oldsize);
178 }
179 } else if (config_fill && unlikely(opt_junk_alloc)) {
180 memset((void *)((uintptr_t)ptr + oldsize), 0xa5, usize -
181 oldsize);
182 }
183 }
184}
185
186static bool
187huge_ralloc_no_move_shrink(void *ptr, size_t oldsize, size_t usize)
188{
189 extent_node_t *node;
190 arena_t *arena;
191 chunk_hooks_t chunk_hooks;
192 size_t cdiff;
177 memset((void *)((uintptr_t)ptr + oldsize), 0,
178 usize - oldsize);
179 }
180 } else if (config_fill && unlikely(opt_junk_alloc)) {
181 memset((void *)((uintptr_t)ptr + oldsize), 0xa5, usize -
182 oldsize);
183 }
184 }
185}
186
187static bool
188huge_ralloc_no_move_shrink(void *ptr, size_t oldsize, size_t usize)
189{
190 extent_node_t *node;
191 arena_t *arena;
192 chunk_hooks_t chunk_hooks;
193 size_t cdiff;
193 bool zeroed;
194 bool pre_zeroed, post_zeroed;
194
195 node = huge_node_get(ptr);
196 arena = extent_node_arena_get(node);
195
196 node = huge_node_get(ptr);
197 arena = extent_node_arena_get(node);
198 pre_zeroed = extent_node_zeroed_get(node);
197 chunk_hooks = chunk_hooks_get(arena);
198
199 assert(oldsize > usize);
200
201 /* Split excess chunks. */
202 cdiff = CHUNK_CEILING(oldsize) - CHUNK_CEILING(usize);
203 if (cdiff != 0 && chunk_hooks.split(ptr, CHUNK_CEILING(oldsize),
204 CHUNK_CEILING(usize), cdiff, true, arena->ind))
205 return (true);
206
207 if (oldsize > usize) {
208 size_t sdiff = oldsize - usize;
209 if (config_fill && unlikely(opt_junk_free)) {
210 huge_dalloc_junk((void *)((uintptr_t)ptr + usize),
211 sdiff);
199 chunk_hooks = chunk_hooks_get(arena);
200
201 assert(oldsize > usize);
202
203 /* Split excess chunks. */
204 cdiff = CHUNK_CEILING(oldsize) - CHUNK_CEILING(usize);
205 if (cdiff != 0 && chunk_hooks.split(ptr, CHUNK_CEILING(oldsize),
206 CHUNK_CEILING(usize), cdiff, true, arena->ind))
207 return (true);
208
209 if (oldsize > usize) {
210 size_t sdiff = oldsize - usize;
211 if (config_fill && unlikely(opt_junk_free)) {
212 huge_dalloc_junk((void *)((uintptr_t)ptr + usize),
213 sdiff);
212 zeroed = false;
214 post_zeroed = false;
213 } else {
215 } else {
214 zeroed = !chunk_purge_wrapper(arena, &chunk_hooks,
216 post_zeroed = !chunk_purge_wrapper(arena, &chunk_hooks,
215 CHUNK_ADDR2BASE((uintptr_t)ptr + usize),
216 CHUNK_CEILING(oldsize),
217 CHUNK_ADDR2OFFSET((uintptr_t)ptr + usize), sdiff);
218 }
219 } else
217 CHUNK_ADDR2BASE((uintptr_t)ptr + usize),
218 CHUNK_CEILING(oldsize),
219 CHUNK_ADDR2OFFSET((uintptr_t)ptr + usize), sdiff);
220 }
221 } else
220 zeroed = true;
222 post_zeroed = pre_zeroed;
221
222 malloc_mutex_lock(&arena->huge_mtx);
223 /* Update the size of the huge allocation. */
224 extent_node_size_set(node, usize);
223
224 malloc_mutex_lock(&arena->huge_mtx);
225 /* Update the size of the huge allocation. */
226 extent_node_size_set(node, usize);
225 /* Clear node's zeroed field if zeroing failed above. */
226 extent_node_zeroed_set(node, extent_node_zeroed_get(node) && zeroed);
227 /* Update zeroed. */
228 extent_node_zeroed_set(node, post_zeroed);
227 malloc_mutex_unlock(&arena->huge_mtx);
228
229 /* Zap the excess chunks. */
230 arena_chunk_ralloc_huge_shrink(arena, ptr, oldsize, usize);
231
232 return (false);
233}
234

--- 199 unchanged lines hidden ---
229 malloc_mutex_unlock(&arena->huge_mtx);
230
231 /* Zap the excess chunks. */
232 arena_chunk_ralloc_huge_shrink(arena, ptr, oldsize, usize);
233
234 return (false);
235}
236

--- 199 unchanged lines hidden ---