Deleted Added
full compact
jemalloc.c (256823) jemalloc.c (261071)
1#define JEMALLOC_C_
2#include "jemalloc/internal/jemalloc_internal.h"
3
4/******************************************************************************/
5/* Data. */
6
7malloc_tsd_data(, arenas, arena_t *, NULL)
8malloc_tsd_data(, thread_allocated, thread_allocated_t,

--- 90 unchanged lines hidden (view full) ---

99 errno = utrace_serrno; \
100 } \
101} while (0)
102#else
103# define UTRACE(a, b, c)
104#endif
105
106/******************************************************************************/
1#define JEMALLOC_C_
2#include "jemalloc/internal/jemalloc_internal.h"
3
4/******************************************************************************/
5/* Data. */
6
7malloc_tsd_data(, arenas, arena_t *, NULL)
8malloc_tsd_data(, thread_allocated, thread_allocated_t,

--- 90 unchanged lines hidden (view full) ---

99 errno = utrace_serrno; \
100 } \
101} while (0)
102#else
103# define UTRACE(a, b, c)
104#endif
105
106/******************************************************************************/
107/* Function prototypes for non-inline static functions. */
107/*
108 * Function prototypes for static functions that are referenced prior to
109 * definition.
110 */
108
111
109static void stats_print_atexit(void);
110static unsigned malloc_ncpus(void);
111static bool malloc_conf_next(char const **opts_p, char const **k_p,
112 size_t *klen_p, char const **v_p, size_t *vlen_p);
113static void malloc_conf_error(const char *msg, const char *k, size_t klen,
114 const char *v, size_t vlen);
115static void malloc_conf_init(void);
116static bool malloc_init_hard(void);
112static bool malloc_init_hard(void);
117static int imemalign(void **memptr, size_t alignment, size_t size,
118 size_t min_alignment);
119
120/******************************************************************************/
121/*
122 * Begin miscellaneous support functions.
123 */
124
125/* Create a new arena and insert it into the arenas array at index ind. */
126arena_t *

--- 124 unchanged lines hidden (view full) ---

251/******************************************************************************/
252/*
253 * Begin initialization functions.
254 */
255
256static unsigned
257malloc_ncpus(void)
258{
113
114/******************************************************************************/
115/*
116 * Begin miscellaneous support functions.
117 */
118
119/* Create a new arena and insert it into the arenas array at index ind. */
120arena_t *

--- 124 unchanged lines hidden (view full) ---

245/******************************************************************************/
246/*
247 * Begin initialization functions.
248 */
249
250static unsigned
251malloc_ncpus(void)
252{
259 unsigned ret;
260 long result;
261
262#ifdef _WIN32
263 SYSTEM_INFO si;
264 GetSystemInfo(&si);
265 result = si.dwNumberOfProcessors;
266#else
267 result = sysconf(_SC_NPROCESSORS_ONLN);
268#endif
253 long result;
254
255#ifdef _WIN32
256 SYSTEM_INFO si;
257 GetSystemInfo(&si);
258 result = si.dwNumberOfProcessors;
259#else
260 result = sysconf(_SC_NPROCESSORS_ONLN);
261#endif
269 if (result == -1) {
270 /* Error. */
271 ret = 1;
272 } else {
273 ret = (unsigned)result;
274 }
275
276 return (ret);
262 return ((result == -1) ? 1 : (unsigned)result);
277}
278
279void
280arenas_cleanup(void *arg)
281{
282 arena_t *arena = *(arena_t **)arg;
283
284 malloc_mutex_lock(&arenas_lock);

--- 199 unchanged lines hidden (view full) ---

484 */
485 } else {
486 /* No configuration specified. */
487 buf[0] = '\0';
488 opts = buf;
489 }
490 break;
491 } default:
263}
264
265void
266arenas_cleanup(void *arg)
267{
268 arena_t *arena = *(arena_t **)arg;
269
270 malloc_mutex_lock(&arenas_lock);

--- 199 unchanged lines hidden (view full) ---

470 */
471 } else {
472 /* No configuration specified. */
473 buf[0] = '\0';
474 opts = buf;
475 }
476 break;
477 } default:
492 /* NOTREACHED */
493 assert(false);
478 not_reached();
494 buf[0] = '\0';
495 opts = buf;
496 }
497
498 while (*opts != '\0' && malloc_conf_next(&opts, &k, &klen, &v,
499 &vlen) == false) {
500#define CONF_HANDLE_BOOL(o, n) \
501 if (sizeof(n)-1 == klen && strncmp(n, k, \

--- 20 unchanged lines hidden (view full) ---

522 set_errno(0); \
523 um = malloc_strtoumax(v, &end, 0); \
524 if (get_errno() != 0 || (uintptr_t)end -\
525 (uintptr_t)v != vlen) { \
526 malloc_conf_error( \
527 "Invalid conf value", \
528 k, klen, v, vlen); \
529 } else if (clip) { \
479 buf[0] = '\0';
480 opts = buf;
481 }
482
483 while (*opts != '\0' && malloc_conf_next(&opts, &k, &klen, &v,
484 &vlen) == false) {
485#define CONF_HANDLE_BOOL(o, n) \
486 if (sizeof(n)-1 == klen && strncmp(n, k, \

--- 20 unchanged lines hidden (view full) ---

507 set_errno(0); \
508 um = malloc_strtoumax(v, &end, 0); \
509 if (get_errno() != 0 || (uintptr_t)end -\
510 (uintptr_t)v != vlen) { \
511 malloc_conf_error( \
512 "Invalid conf value", \
513 k, klen, v, vlen); \
514 } else if (clip) { \
530 if (um < min) \
515 if (min != 0 && um < min) \
531 o = min; \
532 else if (um > max) \
533 o = max; \
534 else \
535 o = um; \
536 } else { \
516 o = min; \
517 else if (um > max) \
518 o = max; \
519 else \
520 o = um; \
521 } else { \
537 if (um < min || um > max) { \
522 if ((min != 0 && um < min) || \
523 um > max) { \
538 malloc_conf_error( \
539 "Out-of-range " \
540 "conf value", \
541 k, klen, v, vlen); \
542 } else \
543 o = um; \
544 } \
545 continue; \

--- 149 unchanged lines hidden (view full) ---

695 malloc_initializer = INITIALIZER;
696
697 malloc_tsd_boot();
698 if (config_prof)
699 prof_boot0();
700
701 malloc_conf_init();
702
524 malloc_conf_error( \
525 "Out-of-range " \
526 "conf value", \
527 k, klen, v, vlen); \
528 } else \
529 o = um; \
530 } \
531 continue; \

--- 149 unchanged lines hidden (view full) ---

681 malloc_initializer = INITIALIZER;
682
683 malloc_tsd_boot();
684 if (config_prof)
685 prof_boot0();
686
687 malloc_conf_init();
688
703#if (!defined(JEMALLOC_MUTEX_INIT_CB) && !defined(JEMALLOC_ZONE) \
704 && !defined(_WIN32))
705 /* Register fork handlers. */
706 if (pthread_atfork(jemalloc_prefork, jemalloc_postfork_parent,
707 jemalloc_postfork_child) != 0) {
708 malloc_write("<jemalloc>: Error in pthread_atfork()\n");
709 if (opt_abort)
710 abort();
711 }
712#endif
713
714 if (opt_stats_print) {
715 /* Print statistics at exit. */
716 if (atexit(stats_print_atexit) != 0) {
717 malloc_write("<jemalloc>: Error in atexit()\n");
718 if (opt_abort)
719 abort();
720 }
721 }

--- 23 unchanged lines hidden (view full) ---

745 return (true);
746 }
747
748 if (huge_boot()) {
749 malloc_mutex_unlock(&init_lock);
750 return (true);
751 }
752
689 if (opt_stats_print) {
690 /* Print statistics at exit. */
691 if (atexit(stats_print_atexit) != 0) {
692 malloc_write("<jemalloc>: Error in atexit()\n");
693 if (opt_abort)
694 abort();
695 }
696 }

--- 23 unchanged lines hidden (view full) ---

720 return (true);
721 }
722
723 if (huge_boot()) {
724 malloc_mutex_unlock(&init_lock);
725 return (true);
726 }
727
753 if (malloc_mutex_init(&arenas_lock))
728 if (malloc_mutex_init(&arenas_lock)) {
729 malloc_mutex_unlock(&init_lock);
754 return (true);
730 return (true);
731 }
755
756 /*
757 * Create enough scaffolding to allow recursive allocation in
758 * malloc_ncpus().
759 */
760 narenas_total = narenas_auto = 1;
761 arenas = init_arenas;
762 memset(arenas, 0, sizeof(arena_t *) * narenas_auto);

--- 29 unchanged lines hidden (view full) ---

792 return (true);
793 }
794
795 if (config_prof && prof_boot2()) {
796 malloc_mutex_unlock(&init_lock);
797 return (true);
798 }
799
732
733 /*
734 * Create enough scaffolding to allow recursive allocation in
735 * malloc_ncpus().
736 */
737 narenas_total = narenas_auto = 1;
738 arenas = init_arenas;
739 memset(arenas, 0, sizeof(arena_t *) * narenas_auto);

--- 29 unchanged lines hidden (view full) ---

769 return (true);
770 }
771
772 if (config_prof && prof_boot2()) {
773 malloc_mutex_unlock(&init_lock);
774 return (true);
775 }
776
800 /* Get number of CPUs. */
801 malloc_mutex_unlock(&init_lock);
777 malloc_mutex_unlock(&init_lock);
778 /**********************************************************************/
779 /* Recursive allocation may follow. */
780
802 ncpus = malloc_ncpus();
781 ncpus = malloc_ncpus();
782
783#if (!defined(JEMALLOC_MUTEX_INIT_CB) && !defined(JEMALLOC_ZONE) \
784 && !defined(_WIN32))
785 /* LinuxThreads's pthread_atfork() allocates. */
786 if (pthread_atfork(jemalloc_prefork, jemalloc_postfork_parent,
787 jemalloc_postfork_child) != 0) {
788 malloc_write("<jemalloc>: Error in pthread_atfork()\n");
789 if (opt_abort)
790 abort();
791 }
792#endif
793
794 /* Done recursively allocating. */
795 /**********************************************************************/
803 malloc_mutex_lock(&init_lock);
804
805 if (mutex_boot()) {
806 malloc_mutex_unlock(&init_lock);
807 return (true);
808 }
809
810 if (opt_narenas == 0) {

--- 30 unchanged lines hidden (view full) ---

841 * since it was just mmap()ed, but let's be sure.
842 */
843 memset(arenas, 0, sizeof(arena_t *) * narenas_total);
844 /* Copy the pointer to the one arena that was already initialized. */
845 arenas[0] = init_arenas[0];
846
847 malloc_initialized = true;
848 malloc_mutex_unlock(&init_lock);
796 malloc_mutex_lock(&init_lock);
797
798 if (mutex_boot()) {
799 malloc_mutex_unlock(&init_lock);
800 return (true);
801 }
802
803 if (opt_narenas == 0) {

--- 30 unchanged lines hidden (view full) ---

834 * since it was just mmap()ed, but let's be sure.
835 */
836 memset(arenas, 0, sizeof(arena_t *) * narenas_total);
837 /* Copy the pointer to the one arena that was already initialized. */
838 arenas[0] = init_arenas[0];
839
840 malloc_initialized = true;
841 malloc_mutex_unlock(&init_lock);
842
849 return (false);
850}
851
852/*
853 * End initialization functions.
854 */
855/******************************************************************************/
856/*
857 * Begin malloc(3)-compatible functions.
858 */
859
843 return (false);
844}
845
846/*
847 * End initialization functions.
848 */
849/******************************************************************************/
850/*
851 * Begin malloc(3)-compatible functions.
852 */
853
854static void *
855imalloc_prof_sample(size_t usize, prof_thr_cnt_t *cnt)
856{
857 void *p;
858
859 if (cnt == NULL)
860 return (NULL);
861 if (prof_promote && usize <= SMALL_MAXCLASS) {
862 p = imalloc(SMALL_MAXCLASS+1);
863 if (p == NULL)
864 return (NULL);
865 arena_prof_promoted(p, usize);
866 } else
867 p = imalloc(usize);
868
869 return (p);
870}
871
872JEMALLOC_ALWAYS_INLINE_C void *
873imalloc_prof(size_t usize, prof_thr_cnt_t *cnt)
874{
875 void *p;
876
877 if ((uintptr_t)cnt != (uintptr_t)1U)
878 p = imalloc_prof_sample(usize, cnt);
879 else
880 p = imalloc(usize);
881 if (p == NULL)
882 return (NULL);
883 prof_malloc(p, usize, cnt);
884
885 return (p);
886}
887
888/*
889 * MALLOC_BODY() is a macro rather than a function because its contents are in
890 * the fast path, but inlining would cause reliability issues when determining
891 * how many frames to discard from heap profiling backtraces.
892 */
893#define MALLOC_BODY(ret, size, usize) do { \
894 if (malloc_init()) \
895 ret = NULL; \
896 else { \
897 if (config_prof && opt_prof) { \
898 prof_thr_cnt_t *cnt; \
899 \
900 usize = s2u(size); \
901 /* \
902 * Call PROF_ALLOC_PREP() here rather than in \
903 * imalloc_prof() so that imalloc_prof() can be \
904 * inlined without introducing uncertainty \
905 * about the number of backtrace frames to \
906 * ignore. imalloc_prof() is in the fast path \
907 * when heap profiling is enabled, so inlining \
908 * is critical to performance. (For \
909 * consistency all callers of PROF_ALLOC_PREP() \
910 * are structured similarly, even though e.g. \
911 * realloc() isn't called enough for inlining \
912 * to be critical.) \
913 */ \
914 PROF_ALLOC_PREP(1, usize, cnt); \
915 ret = imalloc_prof(usize, cnt); \
916 } else { \
917 if (config_stats || (config_valgrind && \
918 opt_valgrind)) \
919 usize = s2u(size); \
920 ret = imalloc(size); \
921 } \
922 } \
923} while (0)
924
860void *
861je_malloc(size_t size)
862{
863 void *ret;
864 size_t usize JEMALLOC_CC_SILENCE_INIT(0);
925void *
926je_malloc(size_t size)
927{
928 void *ret;
929 size_t usize JEMALLOC_CC_SILENCE_INIT(0);
865 prof_thr_cnt_t *cnt JEMALLOC_CC_SILENCE_INIT(NULL);
866
930
867 if (malloc_init()) {
868 ret = NULL;
869 goto label_oom;
870 }
871
872 if (size == 0)
873 size = 1;
874
931 if (size == 0)
932 size = 1;
933
875 if (config_prof && opt_prof) {
876 usize = s2u(size);
877 PROF_ALLOC_PREP(1, usize, cnt);
878 if (cnt == NULL) {
879 ret = NULL;
880 goto label_oom;
881 }
882 if (prof_promote && (uintptr_t)cnt != (uintptr_t)1U && usize <=
883 SMALL_MAXCLASS) {
884 ret = imalloc(SMALL_MAXCLASS+1);
885 if (ret != NULL)
886 arena_prof_promoted(ret, usize);
887 } else
888 ret = imalloc(size);
889 } else {
890 if (config_stats || (config_valgrind && opt_valgrind))
891 usize = s2u(size);
892 ret = imalloc(size);
893 }
934 MALLOC_BODY(ret, size, usize);
894
935
895label_oom:
896 if (ret == NULL) {
897 if (config_xmalloc && opt_xmalloc) {
898 malloc_write("<jemalloc>: Error in malloc(): "
899 "out of memory\n");
900 abort();
901 }
902 set_errno(ENOMEM);
903 }
936 if (ret == NULL) {
937 if (config_xmalloc && opt_xmalloc) {
938 malloc_write("<jemalloc>: Error in malloc(): "
939 "out of memory\n");
940 abort();
941 }
942 set_errno(ENOMEM);
943 }
904 if (config_prof && opt_prof && ret != NULL)
905 prof_malloc(ret, usize, cnt);
906 if (config_stats && ret != NULL) {
907 assert(usize == isalloc(ret, config_prof));
908 thread_allocated_tsd_get()->allocated += usize;
909 }
910 UTRACE(0, size, ret);
911 JEMALLOC_VALGRIND_MALLOC(ret != NULL, ret, usize, false);
912 return (ret);
913}
914
944 if (config_stats && ret != NULL) {
945 assert(usize == isalloc(ret, config_prof));
946 thread_allocated_tsd_get()->allocated += usize;
947 }
948 UTRACE(0, size, ret);
949 JEMALLOC_VALGRIND_MALLOC(ret != NULL, ret, usize, false);
950 return (ret);
951}
952
953static void *
954imemalign_prof_sample(size_t alignment, size_t usize, prof_thr_cnt_t *cnt)
955{
956 void *p;
957
958 if (cnt == NULL)
959 return (NULL);
960 if (prof_promote && usize <= SMALL_MAXCLASS) {
961 assert(sa2u(SMALL_MAXCLASS+1, alignment) != 0);
962 p = ipalloc(sa2u(SMALL_MAXCLASS+1, alignment), alignment,
963 false);
964 if (p == NULL)
965 return (NULL);
966 arena_prof_promoted(p, usize);
967 } else
968 p = ipalloc(usize, alignment, false);
969
970 return (p);
971}
972
973JEMALLOC_ALWAYS_INLINE_C void *
974imemalign_prof(size_t alignment, size_t usize, prof_thr_cnt_t *cnt)
975{
976 void *p;
977
978 if ((uintptr_t)cnt != (uintptr_t)1U)
979 p = imemalign_prof_sample(alignment, usize, cnt);
980 else
981 p = ipalloc(usize, alignment, false);
982 if (p == NULL)
983 return (NULL);
984 prof_malloc(p, usize, cnt);
985
986 return (p);
987}
988
915JEMALLOC_ATTR(nonnull(1))
916#ifdef JEMALLOC_PROF
917/*
918 * Avoid any uncertainty as to how many backtrace frames to ignore in
919 * PROF_ALLOC_PREP().
920 */
921JEMALLOC_NOINLINE
922#endif
923static int
989JEMALLOC_ATTR(nonnull(1))
990#ifdef JEMALLOC_PROF
991/*
992 * Avoid any uncertainty as to how many backtrace frames to ignore in
993 * PROF_ALLOC_PREP().
994 */
995JEMALLOC_NOINLINE
996#endif
997static int
924imemalign(void **memptr, size_t alignment, size_t size,
925 size_t min_alignment)
998imemalign(void **memptr, size_t alignment, size_t size, size_t min_alignment)
926{
927 int ret;
928 size_t usize;
929 void *result;
999{
1000 int ret;
1001 size_t usize;
1002 void *result;
930 prof_thr_cnt_t *cnt JEMALLOC_CC_SILENCE_INIT(NULL);
931
932 assert(min_alignment != 0);
933
1003
1004 assert(min_alignment != 0);
1005
934 if (malloc_init())
1006 if (malloc_init()) {
935 result = NULL;
1007 result = NULL;
936 else {
1008 goto label_oom;
1009 } else {
937 if (size == 0)
938 size = 1;
939
940 /* Make sure that alignment is a large enough power of 2. */
941 if (((alignment - 1) & alignment) != 0
942 || (alignment < min_alignment)) {
943 if (config_xmalloc && opt_xmalloc) {
944 malloc_write("<jemalloc>: Error allocating "
945 "aligned memory: invalid alignment\n");
946 abort();
947 }
948 result = NULL;
949 ret = EINVAL;
950 goto label_return;
951 }
952
953 usize = sa2u(size, alignment);
954 if (usize == 0) {
955 result = NULL;
1010 if (size == 0)
1011 size = 1;
1012
1013 /* Make sure that alignment is a large enough power of 2. */
1014 if (((alignment - 1) & alignment) != 0
1015 || (alignment < min_alignment)) {
1016 if (config_xmalloc && opt_xmalloc) {
1017 malloc_write("<jemalloc>: Error allocating "
1018 "aligned memory: invalid alignment\n");
1019 abort();
1020 }
1021 result = NULL;
1022 ret = EINVAL;
1023 goto label_return;
1024 }
1025
1026 usize = sa2u(size, alignment);
1027 if (usize == 0) {
1028 result = NULL;
956 ret = ENOMEM;
957 goto label_return;
1029 goto label_oom;
958 }
959
960 if (config_prof && opt_prof) {
1030 }
1031
1032 if (config_prof && opt_prof) {
1033 prof_thr_cnt_t *cnt;
1034
961 PROF_ALLOC_PREP(2, usize, cnt);
1035 PROF_ALLOC_PREP(2, usize, cnt);
962 if (cnt == NULL) {
963 result = NULL;
964 ret = EINVAL;
965 } else {
966 if (prof_promote && (uintptr_t)cnt !=
967 (uintptr_t)1U && usize <= SMALL_MAXCLASS) {
968 assert(sa2u(SMALL_MAXCLASS+1,
969 alignment) != 0);
970 result = ipalloc(sa2u(SMALL_MAXCLASS+1,
971 alignment), alignment, false);
972 if (result != NULL) {
973 arena_prof_promoted(result,
974 usize);
975 }
976 } else {
977 result = ipalloc(usize, alignment,
978 false);
979 }
980 }
1036 result = imemalign_prof(alignment, usize, cnt);
981 } else
982 result = ipalloc(usize, alignment, false);
1037 } else
1038 result = ipalloc(usize, alignment, false);
1039 if (result == NULL)
1040 goto label_oom;
983 }
984
1041 }
1042
985 if (result == NULL) {
986 if (config_xmalloc && opt_xmalloc) {
987 malloc_write("<jemalloc>: Error allocating aligned "
988 "memory: out of memory\n");
989 abort();
990 }
991 ret = ENOMEM;
992 goto label_return;
993 }
994
995 *memptr = result;
996 ret = 0;
1043 *memptr = result;
1044 ret = 0;
997
998label_return:
999 if (config_stats && result != NULL) {
1000 assert(usize == isalloc(result, config_prof));
1001 thread_allocated_tsd_get()->allocated += usize;
1002 }
1045label_return:
1046 if (config_stats && result != NULL) {
1047 assert(usize == isalloc(result, config_prof));
1048 thread_allocated_tsd_get()->allocated += usize;
1049 }
1003 if (config_prof && opt_prof && result != NULL)
1004 prof_malloc(result, usize, cnt);
1005 UTRACE(0, size, result);
1006 return (ret);
1050 UTRACE(0, size, result);
1051 return (ret);
1052label_oom:
1053 assert(result == NULL);
1054 if (config_xmalloc && opt_xmalloc) {
1055 malloc_write("<jemalloc>: Error allocating aligned memory: "
1056 "out of memory\n");
1057 abort();
1058 }
1059 ret = ENOMEM;
1060 goto label_return;
1007}
1008
1009int
1010je_posix_memalign(void **memptr, size_t alignment, size_t size)
1011{
1012 int ret = imemalign(memptr, alignment, size, sizeof(void *));
1013 JEMALLOC_VALGRIND_MALLOC(ret == 0, *memptr, isalloc(*memptr,
1014 config_prof), false);

--- 10 unchanged lines hidden (view full) ---

1025 ret = NULL;
1026 set_errno(err);
1027 }
1028 JEMALLOC_VALGRIND_MALLOC(err == 0, ret, isalloc(ret, config_prof),
1029 false);
1030 return (ret);
1031}
1032
1061}
1062
1063int
1064je_posix_memalign(void **memptr, size_t alignment, size_t size)
1065{
1066 int ret = imemalign(memptr, alignment, size, sizeof(void *));
1067 JEMALLOC_VALGRIND_MALLOC(ret == 0, *memptr, isalloc(*memptr,
1068 config_prof), false);

--- 10 unchanged lines hidden (view full) ---

1079 ret = NULL;
1080 set_errno(err);
1081 }
1082 JEMALLOC_VALGRIND_MALLOC(err == 0, ret, isalloc(ret, config_prof),
1083 false);
1084 return (ret);
1085}
1086
1087static void *
1088icalloc_prof_sample(size_t usize, prof_thr_cnt_t *cnt)
1089{
1090 void *p;
1091
1092 if (cnt == NULL)
1093 return (NULL);
1094 if (prof_promote && usize <= SMALL_MAXCLASS) {
1095 p = icalloc(SMALL_MAXCLASS+1);
1096 if (p == NULL)
1097 return (NULL);
1098 arena_prof_promoted(p, usize);
1099 } else
1100 p = icalloc(usize);
1101
1102 return (p);
1103}
1104
1105JEMALLOC_ALWAYS_INLINE_C void *
1106icalloc_prof(size_t usize, prof_thr_cnt_t *cnt)
1107{
1108 void *p;
1109
1110 if ((uintptr_t)cnt != (uintptr_t)1U)
1111 p = icalloc_prof_sample(usize, cnt);
1112 else
1113 p = icalloc(usize);
1114 if (p == NULL)
1115 return (NULL);
1116 prof_malloc(p, usize, cnt);
1117
1118 return (p);
1119}
1120
1033void *
1034je_calloc(size_t num, size_t size)
1035{
1036 void *ret;
1037 size_t num_size;
1038 size_t usize JEMALLOC_CC_SILENCE_INIT(0);
1121void *
1122je_calloc(size_t num, size_t size)
1123{
1124 void *ret;
1125 size_t num_size;
1126 size_t usize JEMALLOC_CC_SILENCE_INIT(0);
1039 prof_thr_cnt_t *cnt JEMALLOC_CC_SILENCE_INIT(NULL);
1040
1041 if (malloc_init()) {
1042 num_size = 0;
1043 ret = NULL;
1044 goto label_return;
1045 }
1046
1047 num_size = num * size;

--- 12 unchanged lines hidden (view full) ---

1060 } else if (((num | size) & (SIZE_T_MAX << (sizeof(size_t) << 2)))
1061 && (num_size / size != num)) {
1062 /* size_t overflow. */
1063 ret = NULL;
1064 goto label_return;
1065 }
1066
1067 if (config_prof && opt_prof) {
1127
1128 if (malloc_init()) {
1129 num_size = 0;
1130 ret = NULL;
1131 goto label_return;
1132 }
1133
1134 num_size = num * size;

--- 12 unchanged lines hidden (view full) ---

1147 } else if (((num | size) & (SIZE_T_MAX << (sizeof(size_t) << 2)))
1148 && (num_size / size != num)) {
1149 /* size_t overflow. */
1150 ret = NULL;
1151 goto label_return;
1152 }
1153
1154 if (config_prof && opt_prof) {
1155 prof_thr_cnt_t *cnt;
1156
1068 usize = s2u(num_size);
1069 PROF_ALLOC_PREP(1, usize, cnt);
1157 usize = s2u(num_size);
1158 PROF_ALLOC_PREP(1, usize, cnt);
1070 if (cnt == NULL) {
1071 ret = NULL;
1072 goto label_return;
1073 }
1074 if (prof_promote && (uintptr_t)cnt != (uintptr_t)1U && usize
1075 <= SMALL_MAXCLASS) {
1076 ret = icalloc(SMALL_MAXCLASS+1);
1077 if (ret != NULL)
1078 arena_prof_promoted(ret, usize);
1079 } else
1080 ret = icalloc(num_size);
1159 ret = icalloc_prof(usize, cnt);
1081 } else {
1082 if (config_stats || (config_valgrind && opt_valgrind))
1083 usize = s2u(num_size);
1084 ret = icalloc(num_size);
1085 }
1086
1087label_return:
1088 if (ret == NULL) {
1089 if (config_xmalloc && opt_xmalloc) {
1090 malloc_write("<jemalloc>: Error in calloc(): out of "
1091 "memory\n");
1092 abort();
1093 }
1094 set_errno(ENOMEM);
1095 }
1160 } else {
1161 if (config_stats || (config_valgrind && opt_valgrind))
1162 usize = s2u(num_size);
1163 ret = icalloc(num_size);
1164 }
1165
1166label_return:
1167 if (ret == NULL) {
1168 if (config_xmalloc && opt_xmalloc) {
1169 malloc_write("<jemalloc>: Error in calloc(): out of "
1170 "memory\n");
1171 abort();
1172 }
1173 set_errno(ENOMEM);
1174 }
1096
1097 if (config_prof && opt_prof && ret != NULL)
1098 prof_malloc(ret, usize, cnt);
1099 if (config_stats && ret != NULL) {
1100 assert(usize == isalloc(ret, config_prof));
1101 thread_allocated_tsd_get()->allocated += usize;
1102 }
1103 UTRACE(0, num_size, ret);
1104 JEMALLOC_VALGRIND_MALLOC(ret != NULL, ret, usize, true);
1105 return (ret);
1106}
1107
1175 if (config_stats && ret != NULL) {
1176 assert(usize == isalloc(ret, config_prof));
1177 thread_allocated_tsd_get()->allocated += usize;
1178 }
1179 UTRACE(0, num_size, ret);
1180 JEMALLOC_VALGRIND_MALLOC(ret != NULL, ret, usize, true);
1181 return (ret);
1182}
1183
1184static void *
1185irealloc_prof_sample(void *oldptr, size_t usize, prof_thr_cnt_t *cnt)
1186{
1187 void *p;
1188
1189 if (cnt == NULL)
1190 return (NULL);
1191 if (prof_promote && usize <= SMALL_MAXCLASS) {
1192 p = iralloc(oldptr, SMALL_MAXCLASS+1, 0, 0, false);
1193 if (p == NULL)
1194 return (NULL);
1195 arena_prof_promoted(p, usize);
1196 } else
1197 p = iralloc(oldptr, usize, 0, 0, false);
1198
1199 return (p);
1200}
1201
1202JEMALLOC_ALWAYS_INLINE_C void *
1203irealloc_prof(void *oldptr, size_t old_usize, size_t usize, prof_thr_cnt_t *cnt)
1204{
1205 void *p;
1206 prof_ctx_t *old_ctx;
1207
1208 old_ctx = prof_ctx_get(oldptr);
1209 if ((uintptr_t)cnt != (uintptr_t)1U)
1210 p = irealloc_prof_sample(oldptr, usize, cnt);
1211 else
1212 p = iralloc(oldptr, usize, 0, 0, false);
1213 if (p == NULL)
1214 return (NULL);
1215 prof_realloc(p, usize, cnt, old_usize, old_ctx);
1216
1217 return (p);
1218}
1219
1220JEMALLOC_INLINE_C void
1221ifree(void *ptr)
1222{
1223 size_t usize;
1224 UNUSED size_t rzsize JEMALLOC_CC_SILENCE_INIT(0);
1225
1226 assert(ptr != NULL);
1227 assert(malloc_initialized || IS_INITIALIZER);
1228
1229 if (config_prof && opt_prof) {
1230 usize = isalloc(ptr, config_prof);
1231 prof_free(ptr, usize);
1232 } else if (config_stats || config_valgrind)
1233 usize = isalloc(ptr, config_prof);
1234 if (config_stats)
1235 thread_allocated_tsd_get()->deallocated += usize;
1236 if (config_valgrind && opt_valgrind)
1237 rzsize = p2rz(ptr);
1238 iqalloc(ptr);
1239 JEMALLOC_VALGRIND_FREE(ptr, rzsize);
1240}
1241
1108void *
1109je_realloc(void *ptr, size_t size)
1110{
1111 void *ret;
1112 size_t usize JEMALLOC_CC_SILENCE_INIT(0);
1242void *
1243je_realloc(void *ptr, size_t size)
1244{
1245 void *ret;
1246 size_t usize JEMALLOC_CC_SILENCE_INIT(0);
1113 size_t old_size = 0;
1114 size_t old_rzsize JEMALLOC_CC_SILENCE_INIT(0);
1115 prof_thr_cnt_t *cnt JEMALLOC_CC_SILENCE_INIT(NULL);
1116 prof_ctx_t *old_ctx JEMALLOC_CC_SILENCE_INIT(NULL);
1247 size_t old_usize = 0;
1248 UNUSED size_t old_rzsize JEMALLOC_CC_SILENCE_INIT(0);
1117
1118 if (size == 0) {
1119 if (ptr != NULL) {
1249
1250 if (size == 0) {
1251 if (ptr != NULL) {
1120 /* realloc(ptr, 0) is equivalent to free(p). */
1121 assert(malloc_initialized || IS_INITIALIZER);
1122 if (config_prof) {
1123 old_size = isalloc(ptr, true);
1124 if (config_valgrind && opt_valgrind)
1125 old_rzsize = p2rz(ptr);
1126 } else if (config_stats) {
1127 old_size = isalloc(ptr, false);
1128 if (config_valgrind && opt_valgrind)
1129 old_rzsize = u2rz(old_size);
1130 } else if (config_valgrind && opt_valgrind) {
1131 old_size = isalloc(ptr, false);
1132 old_rzsize = u2rz(old_size);
1133 }
1134 if (config_prof && opt_prof) {
1135 old_ctx = prof_ctx_get(ptr);
1136 cnt = NULL;
1137 }
1138 iqalloc(ptr);
1139 ret = NULL;
1140 goto label_return;
1141 } else
1142 size = 1;
1252 /* realloc(ptr, 0) is equivalent to free(ptr). */
1253 UTRACE(ptr, 0, 0);
1254 ifree(ptr);
1255 return (NULL);
1256 }
1257 size = 1;
1143 }
1144
1145 if (ptr != NULL) {
1146 assert(malloc_initialized || IS_INITIALIZER);
1147 malloc_thread_init();
1148
1258 }
1259
1260 if (ptr != NULL) {
1261 assert(malloc_initialized || IS_INITIALIZER);
1262 malloc_thread_init();
1263
1149 if (config_prof) {
1150 old_size = isalloc(ptr, true);
1151 if (config_valgrind && opt_valgrind)
1152 old_rzsize = p2rz(ptr);
1153 } else if (config_stats) {
1154 old_size = isalloc(ptr, false);
1155 if (config_valgrind && opt_valgrind)
1156 old_rzsize = u2rz(old_size);
1157 } else if (config_valgrind && opt_valgrind) {
1158 old_size = isalloc(ptr, false);
1159 old_rzsize = u2rz(old_size);
1160 }
1264 if ((config_prof && opt_prof) || config_stats ||
1265 (config_valgrind && opt_valgrind))
1266 old_usize = isalloc(ptr, config_prof);
1267 if (config_valgrind && opt_valgrind)
1268 old_rzsize = config_prof ? p2rz(ptr) : u2rz(old_usize);
1269
1161 if (config_prof && opt_prof) {
1270 if (config_prof && opt_prof) {
1271 prof_thr_cnt_t *cnt;
1272
1162 usize = s2u(size);
1273 usize = s2u(size);
1163 old_ctx = prof_ctx_get(ptr);
1164 PROF_ALLOC_PREP(1, usize, cnt);
1274 PROF_ALLOC_PREP(1, usize, cnt);
1165 if (cnt == NULL) {
1166 old_ctx = NULL;
1167 ret = NULL;
1168 goto label_oom;
1169 }
1170 if (prof_promote && (uintptr_t)cnt != (uintptr_t)1U &&
1171 usize <= SMALL_MAXCLASS) {
1172 ret = iralloc(ptr, SMALL_MAXCLASS+1, 0, 0,
1173 false, false);
1174 if (ret != NULL)
1175 arena_prof_promoted(ret, usize);
1176 else
1177 old_ctx = NULL;
1178 } else {
1179 ret = iralloc(ptr, size, 0, 0, false, false);
1180 if (ret == NULL)
1181 old_ctx = NULL;
1182 }
1275 ret = irealloc_prof(ptr, old_usize, usize, cnt);
1183 } else {
1184 if (config_stats || (config_valgrind && opt_valgrind))
1185 usize = s2u(size);
1276 } else {
1277 if (config_stats || (config_valgrind && opt_valgrind))
1278 usize = s2u(size);
1186 ret = iralloc(ptr, size, 0, 0, false, false);
1279 ret = iralloc(ptr, size, 0, 0, false);
1187 }
1280 }
1188
1189label_oom:
1190 if (ret == NULL) {
1191 if (config_xmalloc && opt_xmalloc) {
1192 malloc_write("<jemalloc>: Error in realloc(): "
1193 "out of memory\n");
1194 abort();
1195 }
1196 set_errno(ENOMEM);
1197 }
1198 } else {
1199 /* realloc(NULL, size) is equivalent to malloc(size). */
1281 } else {
1282 /* realloc(NULL, size) is equivalent to malloc(size). */
1200 if (config_prof && opt_prof)
1201 old_ctx = NULL;
1202 if (malloc_init()) {
1203 if (config_prof && opt_prof)
1204 cnt = NULL;
1205 ret = NULL;
1206 } else {
1207 if (config_prof && opt_prof) {
1208 usize = s2u(size);
1209 PROF_ALLOC_PREP(1, usize, cnt);
1210 if (cnt == NULL)
1211 ret = NULL;
1212 else {
1213 if (prof_promote && (uintptr_t)cnt !=
1214 (uintptr_t)1U && usize <=
1215 SMALL_MAXCLASS) {
1216 ret = imalloc(SMALL_MAXCLASS+1);
1217 if (ret != NULL) {
1218 arena_prof_promoted(ret,
1219 usize);
1220 }
1221 } else
1222 ret = imalloc(size);
1223 }
1224 } else {
1225 if (config_stats || (config_valgrind &&
1226 opt_valgrind))
1227 usize = s2u(size);
1228 ret = imalloc(size);
1229 }
1230 }
1283 MALLOC_BODY(ret, size, usize);
1284 }
1231
1285
1232 if (ret == NULL) {
1233 if (config_xmalloc && opt_xmalloc) {
1234 malloc_write("<jemalloc>: Error in realloc(): "
1235 "out of memory\n");
1236 abort();
1237 }
1238 set_errno(ENOMEM);
1286 if (ret == NULL) {
1287 if (config_xmalloc && opt_xmalloc) {
1288 malloc_write(": Error in realloc(): "
1289 "out of memory\n");
1290 abort();
1239 }
1291 }
1292 set_errno(ENOMEM);
1240 }
1293 }
1241
1242label_return:
1243 if (config_prof && opt_prof)
1244 prof_realloc(ret, usize, cnt, old_size, old_ctx);
1245 if (config_stats && ret != NULL) {
1246 thread_allocated_t *ta;
1247 assert(usize == isalloc(ret, config_prof));
1248 ta = thread_allocated_tsd_get();
1249 ta->allocated += usize;
1294 if (config_stats && ret != NULL) {
1295 thread_allocated_t *ta;
1296 assert(usize == isalloc(ret, config_prof));
1297 ta = thread_allocated_tsd_get();
1298 ta->allocated += usize;
1250 ta->deallocated += old_size;
1299 ta->deallocated += old_usize;
1251 }
1252 UTRACE(ptr, size, ret);
1300 }
1301 UTRACE(ptr, size, ret);
1253 JEMALLOC_VALGRIND_REALLOC(ret, usize, ptr, old_size, old_rzsize, false);
1302 JEMALLOC_VALGRIND_REALLOC(ret, usize, ptr, old_usize, old_rzsize,
1303 false);
1254 return (ret);
1255}
1256
1257void
1258je_free(void *ptr)
1259{
1260
1261 UTRACE(ptr, 0, 0);
1304 return (ret);
1305}
1306
1307void
1308je_free(void *ptr)
1309{
1310
1311 UTRACE(ptr, 0, 0);
1262 if (ptr != NULL) {
1263 size_t usize;
1264 size_t rzsize JEMALLOC_CC_SILENCE_INIT(0);
1265
1266 assert(malloc_initialized || IS_INITIALIZER);
1267
1268 if (config_prof && opt_prof) {
1269 usize = isalloc(ptr, config_prof);
1270 prof_free(ptr, usize);
1271 } else if (config_stats || config_valgrind)
1272 usize = isalloc(ptr, config_prof);
1273 if (config_stats)
1274 thread_allocated_tsd_get()->deallocated += usize;
1275 if (config_valgrind && opt_valgrind)
1276 rzsize = p2rz(ptr);
1277 iqalloc(ptr);
1278 JEMALLOC_VALGRIND_FREE(ptr, rzsize);
1279 }
1312 if (ptr != NULL)
1313 ifree(ptr);
1280}
1281
1282/*
1283 * End malloc(3)-compatible functions.
1284 */
1285/******************************************************************************/
1286/*
1287 * Begin non-standard override functions.

--- 49 unchanged lines hidden (view full) ---

1337/*
1338 * End non-standard override functions.
1339 */
1340/******************************************************************************/
1341/*
1342 * Begin non-standard functions.
1343 */
1344
1314}
1315
1316/*
1317 * End malloc(3)-compatible functions.
1318 */
1319/******************************************************************************/
1320/*
1321 * Begin non-standard override functions.

--- 49 unchanged lines hidden (view full) ---

1371/*
1372 * End non-standard override functions.
1373 */
1374/******************************************************************************/
1375/*
1376 * Begin non-standard functions.
1377 */
1378
1345size_t
1346je_malloc_usable_size(JEMALLOC_USABLE_SIZE_CONST void *ptr)
1379JEMALLOC_ALWAYS_INLINE_C void *
1380imallocx(size_t usize, size_t alignment, bool zero, bool try_tcache,
1381 arena_t *arena)
1347{
1382{
1348 size_t ret;
1349
1383
1350 assert(malloc_initialized || IS_INITIALIZER);
1351 malloc_thread_init();
1384 assert(usize == ((alignment == 0) ? s2u(usize) : sa2u(usize,
1385 alignment)));
1352
1386
1353 if (config_ivsalloc)
1354 ret = ivsalloc(ptr, config_prof);
1387 if (alignment != 0)
1388 return (ipalloct(usize, alignment, zero, try_tcache, arena));
1389 else if (zero)
1390 return (icalloct(usize, try_tcache, arena));
1355 else
1391 else
1356 ret = (ptr != NULL) ? isalloc(ptr, config_prof) : 0;
1357
1358 return (ret);
1392 return (imalloct(usize, try_tcache, arena));
1359}
1360
1393}
1394
1361void
1362je_malloc_stats_print(void (*write_cb)(void *, const char *), void *cbopaque,
1363 const char *opts)
1395static void *
1396imallocx_prof_sample(size_t usize, size_t alignment, bool zero, bool try_tcache,
1397 arena_t *arena, prof_thr_cnt_t *cnt)
1364{
1398{
1399 void *p;
1365
1400
1366 stats_print(write_cb, cbopaque, opts);
1401 if (cnt == NULL)
1402 return (NULL);
1403 if (prof_promote && usize <= SMALL_MAXCLASS) {
1404 size_t usize_promoted = (alignment == 0) ?
1405 s2u(SMALL_MAXCLASS+1) : sa2u(SMALL_MAXCLASS+1, alignment);
1406 assert(usize_promoted != 0);
1407 p = imallocx(usize_promoted, alignment, zero, try_tcache,
1408 arena);
1409 if (p == NULL)
1410 return (NULL);
1411 arena_prof_promoted(p, usize);
1412 } else
1413 p = imallocx(usize, alignment, zero, try_tcache, arena);
1414
1415 return (p);
1367}
1368
1416}
1417
1369int
1370je_mallctl(const char *name, void *oldp, size_t *oldlenp, void *newp,
1371 size_t newlen)
1418JEMALLOC_ALWAYS_INLINE_C void *
1419imallocx_prof(size_t usize, size_t alignment, bool zero, bool try_tcache,
1420 arena_t *arena, prof_thr_cnt_t *cnt)
1372{
1421{
1422 void *p;
1373
1423
1374 if (malloc_init())
1375 return (EAGAIN);
1424 if ((uintptr_t)cnt != (uintptr_t)1U) {
1425 p = imallocx_prof_sample(usize, alignment, zero, try_tcache,
1426 arena, cnt);
1427 } else
1428 p = imallocx(usize, alignment, zero, try_tcache, arena);
1429 if (p == NULL)
1430 return (NULL);
1431 prof_malloc(p, usize, cnt);
1376
1432
1377 return (ctl_byname(name, oldp, oldlenp, newp, newlen));
1433 return (p);
1378}
1379
1434}
1435
1380int
1381je_mallctlnametomib(const char *name, size_t *mibp, size_t *miblenp)
1436void *
1437je_mallocx(size_t size, int flags)
1382{
1438{
1439 void *p;
1440 size_t usize;
1441 size_t alignment = (ZU(1) << (flags & MALLOCX_LG_ALIGN_MASK)
1442 & (SIZE_T_MAX-1));
1443 bool zero = flags & MALLOCX_ZERO;
1444 unsigned arena_ind = ((unsigned)(flags >> 8)) - 1;
1445 arena_t *arena;
1446 bool try_tcache;
1383
1447
1448 assert(size != 0);
1449
1384 if (malloc_init())
1450 if (malloc_init())
1385 return (EAGAIN);
1451 goto label_oom;
1386
1452
1387 return (ctl_nametomib(name, mibp, miblenp));
1453 if (arena_ind != UINT_MAX) {
1454 arena = arenas[arena_ind];
1455 try_tcache = false;
1456 } else {
1457 arena = NULL;
1458 try_tcache = true;
1459 }
1460
1461 usize = (alignment == 0) ? s2u(size) : sa2u(size, alignment);
1462 assert(usize != 0);
1463
1464 if (config_prof && opt_prof) {
1465 prof_thr_cnt_t *cnt;
1466
1467 PROF_ALLOC_PREP(1, usize, cnt);
1468 p = imallocx_prof(usize, alignment, zero, try_tcache, arena,
1469 cnt);
1470 } else
1471 p = imallocx(usize, alignment, zero, try_tcache, arena);
1472 if (p == NULL)
1473 goto label_oom;
1474
1475 if (config_stats) {
1476 assert(usize == isalloc(p, config_prof));
1477 thread_allocated_tsd_get()->allocated += usize;
1478 }
1479 UTRACE(0, size, p);
1480 JEMALLOC_VALGRIND_MALLOC(true, p, usize, zero);
1481 return (p);
1482label_oom:
1483 if (config_xmalloc && opt_xmalloc) {
1484 malloc_write("<jemalloc>: Error in mallocx(): out of memory\n");
1485 abort();
1486 }
1487 UTRACE(0, size, 0);
1488 return (NULL);
1388}
1389
1489}
1490
1390int
1391je_mallctlbymib(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1392 void *newp, size_t newlen)
1491static void *
1492irallocx_prof_sample(void *oldptr, size_t size, size_t alignment, size_t usize,
1493 bool zero, bool try_tcache_alloc, bool try_tcache_dalloc, arena_t *arena,
1494 prof_thr_cnt_t *cnt)
1393{
1495{
1496 void *p;
1394
1497
1395 if (malloc_init())
1396 return (EAGAIN);
1498 if (cnt == NULL)
1499 return (NULL);
1500 if (prof_promote && usize <= SMALL_MAXCLASS) {
1501 p = iralloct(oldptr, SMALL_MAXCLASS+1, (SMALL_MAXCLASS+1 >=
1502 size) ? 0 : size - (SMALL_MAXCLASS+1), alignment, zero,
1503 try_tcache_alloc, try_tcache_dalloc, arena);
1504 if (p == NULL)
1505 return (NULL);
1506 arena_prof_promoted(p, usize);
1507 } else {
1508 p = iralloct(oldptr, size, 0, alignment, zero,
1509 try_tcache_alloc, try_tcache_dalloc, arena);
1510 }
1397
1511
1398 return (ctl_bymib(mib, miblen, oldp, oldlenp, newp, newlen));
1512 return (p);
1399}
1400
1513}
1514
1401/*
1402 * End non-standard functions.
1403 */
1404/******************************************************************************/
1405/*
1406 * Begin experimental functions.
1407 */
1408#ifdef JEMALLOC_EXPERIMENTAL
1409
1410JEMALLOC_ALWAYS_INLINE_C void *
1515JEMALLOC_ALWAYS_INLINE_C void *
1411iallocm(size_t usize, size_t alignment, bool zero, bool try_tcache,
1412 arena_t *arena)
1516irallocx_prof(void *oldptr, size_t old_usize, size_t size, size_t alignment,
1517 size_t *usize, bool zero, bool try_tcache_alloc, bool try_tcache_dalloc,
1518 arena_t *arena, prof_thr_cnt_t *cnt)
1413{
1519{
1520 void *p;
1521 prof_ctx_t *old_ctx;
1414
1522
1415 assert(usize == ((alignment == 0) ? s2u(usize) : sa2u(usize,
1416 alignment)));
1523 old_ctx = prof_ctx_get(oldptr);
1524 if ((uintptr_t)cnt != (uintptr_t)1U)
1525 p = irallocx_prof_sample(oldptr, size, alignment, *usize, zero,
1526 try_tcache_alloc, try_tcache_dalloc, arena, cnt);
1527 else {
1528 p = iralloct(oldptr, size, 0, alignment, zero,
1529 try_tcache_alloc, try_tcache_dalloc, arena);
1530 }
1531 if (p == NULL)
1532 return (NULL);
1417
1533
1418 if (alignment != 0)
1419 return (ipallocx(usize, alignment, zero, try_tcache, arena));
1420 else if (zero)
1421 return (icallocx(usize, try_tcache, arena));
1422 else
1423 return (imallocx(usize, try_tcache, arena));
1534 if (p == oldptr && alignment != 0) {
1535 /*
1536 * The allocation did not move, so it is possible that the size
1537 * class is smaller than would guarantee the requested
1538 * alignment, and that the alignment constraint was
1539 * serendipitously satisfied. Additionally, old_usize may not
1540 * be the same as the current usize because of in-place large
1541 * reallocation. Therefore, query the actual value of usize.
1542 */
1543 *usize = isalloc(p, config_prof);
1544 }
1545 prof_realloc(p, *usize, cnt, old_usize, old_ctx);
1546
1547 return (p);
1424}
1425
1548}
1549
1426int
1427je_allocm(void **ptr, size_t *rsize, size_t size, int flags)
1550void *
1551je_rallocx(void *ptr, size_t size, int flags)
1428{
1429 void *p;
1552{
1553 void *p;
1430 size_t usize;
1431 size_t alignment = (ZU(1) << (flags & ALLOCM_LG_ALIGN_MASK)
1554 size_t usize, old_usize;
1555 UNUSED size_t old_rzsize JEMALLOC_CC_SILENCE_INIT(0);
1556 size_t alignment = (ZU(1) << (flags & MALLOCX_LG_ALIGN_MASK)
1432 & (SIZE_T_MAX-1));
1557 & (SIZE_T_MAX-1));
1433 bool zero = flags & ALLOCM_ZERO;
1558 bool zero = flags & MALLOCX_ZERO;
1434 unsigned arena_ind = ((unsigned)(flags >> 8)) - 1;
1559 unsigned arena_ind = ((unsigned)(flags >> 8)) - 1;
1560 bool try_tcache_alloc, try_tcache_dalloc;
1435 arena_t *arena;
1561 arena_t *arena;
1436 bool try_tcache;
1437
1438 assert(ptr != NULL);
1439 assert(size != 0);
1562
1563 assert(ptr != NULL);
1564 assert(size != 0);
1565 assert(malloc_initialized || IS_INITIALIZER);
1566 malloc_thread_init();
1440
1567
1441 if (malloc_init())
1442 goto label_oom;
1443
1444 if (arena_ind != UINT_MAX) {
1568 if (arena_ind != UINT_MAX) {
1569 arena_chunk_t *chunk;
1570 try_tcache_alloc = false;
1571 chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
1572 try_tcache_dalloc = (chunk == ptr || chunk->arena !=
1573 arenas[arena_ind]);
1445 arena = arenas[arena_ind];
1574 arena = arenas[arena_ind];
1446 try_tcache = false;
1447 } else {
1575 } else {
1576 try_tcache_alloc = true;
1577 try_tcache_dalloc = true;
1448 arena = NULL;
1578 arena = NULL;
1449 try_tcache = true;
1450 }
1451
1579 }
1580
1452 usize = (alignment == 0) ? s2u(size) : sa2u(size, alignment);
1453 if (usize == 0)
1454 goto label_oom;
1581 if ((config_prof && opt_prof) || config_stats ||
1582 (config_valgrind && opt_valgrind))
1583 old_usize = isalloc(ptr, config_prof);
1584 if (config_valgrind && opt_valgrind)
1585 old_rzsize = u2rz(old_usize);
1455
1456 if (config_prof && opt_prof) {
1457 prof_thr_cnt_t *cnt;
1458
1586
1587 if (config_prof && opt_prof) {
1588 prof_thr_cnt_t *cnt;
1589
1590 usize = (alignment == 0) ? s2u(size) : sa2u(size, alignment);
1591 assert(usize != 0);
1459 PROF_ALLOC_PREP(1, usize, cnt);
1592 PROF_ALLOC_PREP(1, usize, cnt);
1460 if (cnt == NULL)
1593 p = irallocx_prof(ptr, old_usize, size, alignment, &usize, zero,
1594 try_tcache_alloc, try_tcache_dalloc, arena, cnt);
1595 if (p == NULL)
1461 goto label_oom;
1596 goto label_oom;
1462 if (prof_promote && (uintptr_t)cnt != (uintptr_t)1U && usize <=
1463 SMALL_MAXCLASS) {
1464 size_t usize_promoted = (alignment == 0) ?
1465 s2u(SMALL_MAXCLASS+1) : sa2u(SMALL_MAXCLASS+1,
1466 alignment);
1467 assert(usize_promoted != 0);
1468 p = iallocm(usize_promoted, alignment, zero,
1469 try_tcache, arena);
1470 if (p == NULL)
1471 goto label_oom;
1472 arena_prof_promoted(p, usize);
1473 } else {
1474 p = iallocm(usize, alignment, zero, try_tcache, arena);
1475 if (p == NULL)
1476 goto label_oom;
1477 }
1478 prof_malloc(p, usize, cnt);
1479 } else {
1597 } else {
1480 p = iallocm(usize, alignment, zero, try_tcache, arena);
1598 p = iralloct(ptr, size, 0, alignment, zero, try_tcache_alloc,
1599 try_tcache_dalloc, arena);
1481 if (p == NULL)
1482 goto label_oom;
1600 if (p == NULL)
1601 goto label_oom;
1602 if (config_stats || (config_valgrind && opt_valgrind))
1603 usize = isalloc(p, config_prof);
1483 }
1604 }
1484 if (rsize != NULL)
1485 *rsize = usize;
1486
1605
1487 *ptr = p;
1488 if (config_stats) {
1606 if (config_stats) {
1489 assert(usize == isalloc(p, config_prof));
1490 thread_allocated_tsd_get()->allocated += usize;
1607 thread_allocated_t *ta;
1608 ta = thread_allocated_tsd_get();
1609 ta->allocated += usize;
1610 ta->deallocated += old_usize;
1491 }
1611 }
1492 UTRACE(0, size, p);
1493 JEMALLOC_VALGRIND_MALLOC(true, p, usize, zero);
1494 return (ALLOCM_SUCCESS);
1612 UTRACE(ptr, size, p);
1613 JEMALLOC_VALGRIND_REALLOC(p, usize, ptr, old_usize, old_rzsize, zero);
1614 return (p);
1495label_oom:
1496 if (config_xmalloc && opt_xmalloc) {
1615label_oom:
1616 if (config_xmalloc && opt_xmalloc) {
1497 malloc_write("<jemalloc>: Error in allocm(): "
1498 "out of memory\n");
1617 malloc_write("<jemalloc>: Error in rallocx(): out of memory\n");
1499 abort();
1500 }
1618 abort();
1619 }
1501 *ptr = NULL;
1502 UTRACE(0, size, 0);
1503 return (ALLOCM_ERR_OOM);
1620 UTRACE(ptr, size, 0);
1621 return (NULL);
1504}
1505
1622}
1623
1506int
1507je_rallocm(void **ptr, size_t *rsize, size_t size, size_t extra, int flags)
1624JEMALLOC_ALWAYS_INLINE_C size_t
1625ixallocx_helper(void *ptr, size_t old_usize, size_t size, size_t extra,
1626 size_t alignment, bool zero, arena_t *arena)
1508{
1627{
1509 void *p, *q;
1510 size_t usize;
1628 size_t usize;
1511 size_t old_size;
1512 size_t old_rzsize JEMALLOC_CC_SILENCE_INIT(0);
1513 size_t alignment = (ZU(1) << (flags & ALLOCM_LG_ALIGN_MASK)
1629
1630 if (ixalloc(ptr, size, extra, alignment, zero))
1631 return (old_usize);
1632 usize = isalloc(ptr, config_prof);
1633
1634 return (usize);
1635}
1636
1637static size_t
1638ixallocx_prof_sample(void *ptr, size_t old_usize, size_t size, size_t extra,
1639 size_t alignment, size_t max_usize, bool zero, arena_t *arena,
1640 prof_thr_cnt_t *cnt)
1641{
1642 size_t usize;
1643
1644 if (cnt == NULL)
1645 return (old_usize);
1646 /* Use minimum usize to determine whether promotion may happen. */
1647 if (prof_promote && ((alignment == 0) ? s2u(size) : sa2u(size,
1648 alignment)) <= SMALL_MAXCLASS) {
1649 if (ixalloc(ptr, SMALL_MAXCLASS+1, (SMALL_MAXCLASS+1 >=
1650 size+extra) ? 0 : size+extra - (SMALL_MAXCLASS+1),
1651 alignment, zero))
1652 return (old_usize);
1653 usize = isalloc(ptr, config_prof);
1654 if (max_usize < PAGE)
1655 arena_prof_promoted(ptr, usize);
1656 } else {
1657 usize = ixallocx_helper(ptr, old_usize, size, extra, alignment,
1658 zero, arena);
1659 }
1660
1661 return (usize);
1662}
1663
1664JEMALLOC_ALWAYS_INLINE_C size_t
1665ixallocx_prof(void *ptr, size_t old_usize, size_t size, size_t extra,
1666 size_t alignment, size_t max_usize, bool zero, arena_t *arena,
1667 prof_thr_cnt_t *cnt)
1668{
1669 size_t usize;
1670 prof_ctx_t *old_ctx;
1671
1672 old_ctx = prof_ctx_get(ptr);
1673 if ((uintptr_t)cnt != (uintptr_t)1U) {
1674 usize = ixallocx_prof_sample(ptr, old_usize, size, extra,
1675 alignment, zero, max_usize, arena, cnt);
1676 } else {
1677 usize = ixallocx_helper(ptr, old_usize, size, extra, alignment,
1678 zero, arena);
1679 }
1680 if (usize == old_usize)
1681 return (usize);
1682 prof_realloc(ptr, usize, cnt, old_usize, old_ctx);
1683
1684 return (usize);
1685}
1686
1687size_t
1688je_xallocx(void *ptr, size_t size, size_t extra, int flags)
1689{
1690 size_t usize, old_usize;
1691 UNUSED size_t old_rzsize JEMALLOC_CC_SILENCE_INIT(0);
1692 size_t alignment = (ZU(1) << (flags & MALLOCX_LG_ALIGN_MASK)
1514 & (SIZE_T_MAX-1));
1693 & (SIZE_T_MAX-1));
1515 bool zero = flags & ALLOCM_ZERO;
1516 bool no_move = flags & ALLOCM_NO_MOVE;
1694 bool zero = flags & MALLOCX_ZERO;
1517 unsigned arena_ind = ((unsigned)(flags >> 8)) - 1;
1695 unsigned arena_ind = ((unsigned)(flags >> 8)) - 1;
1518 bool try_tcache_alloc, try_tcache_dalloc;
1519 arena_t *arena;
1520
1521 assert(ptr != NULL);
1696 arena_t *arena;
1697
1698 assert(ptr != NULL);
1522 assert(*ptr != NULL);
1523 assert(size != 0);
1524 assert(SIZE_T_MAX - size >= extra);
1525 assert(malloc_initialized || IS_INITIALIZER);
1526 malloc_thread_init();
1527
1699 assert(size != 0);
1700 assert(SIZE_T_MAX - size >= extra);
1701 assert(malloc_initialized || IS_INITIALIZER);
1702 malloc_thread_init();
1703
1528 if (arena_ind != UINT_MAX) {
1529 arena_chunk_t *chunk;
1530 try_tcache_alloc = true;
1531 chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(*ptr);
1532 try_tcache_dalloc = (chunk == *ptr || chunk->arena !=
1533 arenas[arena_ind]);
1704 if (arena_ind != UINT_MAX)
1534 arena = arenas[arena_ind];
1705 arena = arenas[arena_ind];
1535 } else {
1536 try_tcache_alloc = true;
1537 try_tcache_dalloc = true;
1706 else
1538 arena = NULL;
1707 arena = NULL;
1539 }
1540
1708
1541 p = *ptr;
1709 old_usize = isalloc(ptr, config_prof);
1710 if (config_valgrind && opt_valgrind)
1711 old_rzsize = u2rz(old_usize);
1712
1542 if (config_prof && opt_prof) {
1543 prof_thr_cnt_t *cnt;
1713 if (config_prof && opt_prof) {
1714 prof_thr_cnt_t *cnt;
1544
1545 /*
1715 /*
1546 * usize isn't knowable before iralloc() returns when extra is
1716 * usize isn't knowable before ixalloc() returns when extra is
1547 * non-zero. Therefore, compute its maximum possible value and
1548 * use that in PROF_ALLOC_PREP() to decide whether to capture a
1549 * backtrace. prof_realloc() will use the actual usize to
1550 * decide whether to sample.
1551 */
1552 size_t max_usize = (alignment == 0) ? s2u(size+extra) :
1553 sa2u(size+extra, alignment);
1717 * non-zero. Therefore, compute its maximum possible value and
1718 * use that in PROF_ALLOC_PREP() to decide whether to capture a
1719 * backtrace. prof_realloc() will use the actual usize to
1720 * decide whether to sample.
1721 */
1722 size_t max_usize = (alignment == 0) ? s2u(size+extra) :
1723 sa2u(size+extra, alignment);
1554 prof_ctx_t *old_ctx = prof_ctx_get(p);
1555 old_size = isalloc(p, true);
1556 if (config_valgrind && opt_valgrind)
1557 old_rzsize = p2rz(p);
1558 PROF_ALLOC_PREP(1, max_usize, cnt);
1724 PROF_ALLOC_PREP(1, max_usize, cnt);
1559 if (cnt == NULL)
1560 goto label_oom;
1561 /*
1562 * Use minimum usize to determine whether promotion may happen.
1563 */
1564 if (prof_promote && (uintptr_t)cnt != (uintptr_t)1U
1565 && ((alignment == 0) ? s2u(size) : sa2u(size, alignment))
1566 <= SMALL_MAXCLASS) {
1567 q = irallocx(p, SMALL_MAXCLASS+1, (SMALL_MAXCLASS+1 >=
1568 size+extra) ? 0 : size+extra - (SMALL_MAXCLASS+1),
1569 alignment, zero, no_move, try_tcache_alloc,
1570 try_tcache_dalloc, arena);
1571 if (q == NULL)
1572 goto label_err;
1573 if (max_usize < PAGE) {
1574 usize = max_usize;
1575 arena_prof_promoted(q, usize);
1576 } else
1577 usize = isalloc(q, config_prof);
1578 } else {
1579 q = irallocx(p, size, extra, alignment, zero, no_move,
1580 try_tcache_alloc, try_tcache_dalloc, arena);
1581 if (q == NULL)
1582 goto label_err;
1583 usize = isalloc(q, config_prof);
1584 }
1585 prof_realloc(q, usize, cnt, old_size, old_ctx);
1586 if (rsize != NULL)
1587 *rsize = usize;
1725 usize = ixallocx_prof(ptr, old_usize, size, extra, alignment,
1726 max_usize, zero, arena, cnt);
1588 } else {
1727 } else {
1589 if (config_stats) {
1590 old_size = isalloc(p, false);
1591 if (config_valgrind && opt_valgrind)
1592 old_rzsize = u2rz(old_size);
1593 } else if (config_valgrind && opt_valgrind) {
1594 old_size = isalloc(p, false);
1595 old_rzsize = u2rz(old_size);
1596 }
1597 q = irallocx(p, size, extra, alignment, zero, no_move,
1598 try_tcache_alloc, try_tcache_dalloc, arena);
1599 if (q == NULL)
1600 goto label_err;
1601 if (config_stats)
1602 usize = isalloc(q, config_prof);
1603 if (rsize != NULL) {
1604 if (config_stats == false)
1605 usize = isalloc(q, config_prof);
1606 *rsize = usize;
1607 }
1728 usize = ixallocx_helper(ptr, old_usize, size, extra, alignment,
1729 zero, arena);
1608 }
1730 }
1731 if (usize == old_usize)
1732 goto label_not_resized;
1609
1733
1610 *ptr = q;
1611 if (config_stats) {
1612 thread_allocated_t *ta;
1613 ta = thread_allocated_tsd_get();
1614 ta->allocated += usize;
1734 if (config_stats) {
1735 thread_allocated_t *ta;
1736 ta = thread_allocated_tsd_get();
1737 ta->allocated += usize;
1615 ta->deallocated += old_size;
1738 ta->deallocated += old_usize;
1616 }
1739 }
1617 UTRACE(p, size, q);
1618 JEMALLOC_VALGRIND_REALLOC(q, usize, p, old_size, old_rzsize, zero);
1619 return (ALLOCM_SUCCESS);
1620label_err:
1621 if (no_move) {
1622 UTRACE(p, size, q);
1623 return (ALLOCM_ERR_NOT_MOVED);
1624 }
1625label_oom:
1626 if (config_xmalloc && opt_xmalloc) {
1627 malloc_write("<jemalloc>: Error in rallocm(): "
1628 "out of memory\n");
1629 abort();
1630 }
1631 UTRACE(p, size, 0);
1632 return (ALLOCM_ERR_OOM);
1740 JEMALLOC_VALGRIND_REALLOC(ptr, usize, ptr, old_usize, old_rzsize, zero);
1741label_not_resized:
1742 UTRACE(ptr, size, ptr);
1743 return (usize);
1633}
1634
1744}
1745
1635int
1636je_sallocm(const void *ptr, size_t *rsize, int flags)
1746size_t
1747je_sallocx(const void *ptr, int flags)
1637{
1748{
1638 size_t sz;
1749 size_t usize;
1639
1640 assert(malloc_initialized || IS_INITIALIZER);
1641 malloc_thread_init();
1642
1643 if (config_ivsalloc)
1750
1751 assert(malloc_initialized || IS_INITIALIZER);
1752 malloc_thread_init();
1753
1754 if (config_ivsalloc)
1644 sz = ivsalloc(ptr, config_prof);
1755 usize = ivsalloc(ptr, config_prof);
1645 else {
1646 assert(ptr != NULL);
1756 else {
1757 assert(ptr != NULL);
1647 sz = isalloc(ptr, config_prof);
1758 usize = isalloc(ptr, config_prof);
1648 }
1759 }
1649 assert(rsize != NULL);
1650 *rsize = sz;
1651
1760
1652 return (ALLOCM_SUCCESS);
1761 return (usize);
1653}
1654
1762}
1763
1655int
1656je_dallocm(void *ptr, int flags)
1764void
1765je_dallocx(void *ptr, int flags)
1657{
1658 size_t usize;
1766{
1767 size_t usize;
1659 size_t rzsize JEMALLOC_CC_SILENCE_INIT(0);
1768 UNUSED size_t rzsize JEMALLOC_CC_SILENCE_INIT(0);
1660 unsigned arena_ind = ((unsigned)(flags >> 8)) - 1;
1661 bool try_tcache;
1662
1663 assert(ptr != NULL);
1664 assert(malloc_initialized || IS_INITIALIZER);
1665
1666 if (arena_ind != UINT_MAX) {
1667 arena_chunk_t *chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);

--- 9 unchanged lines hidden (view full) ---

1677 if (config_stats == false && config_valgrind == false)
1678 usize = isalloc(ptr, config_prof);
1679 prof_free(ptr, usize);
1680 }
1681 if (config_stats)
1682 thread_allocated_tsd_get()->deallocated += usize;
1683 if (config_valgrind && opt_valgrind)
1684 rzsize = p2rz(ptr);
1769 unsigned arena_ind = ((unsigned)(flags >> 8)) - 1;
1770 bool try_tcache;
1771
1772 assert(ptr != NULL);
1773 assert(malloc_initialized || IS_INITIALIZER);
1774
1775 if (arena_ind != UINT_MAX) {
1776 arena_chunk_t *chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);

--- 9 unchanged lines hidden (view full) ---

1786 if (config_stats == false && config_valgrind == false)
1787 usize = isalloc(ptr, config_prof);
1788 prof_free(ptr, usize);
1789 }
1790 if (config_stats)
1791 thread_allocated_tsd_get()->deallocated += usize;
1792 if (config_valgrind && opt_valgrind)
1793 rzsize = p2rz(ptr);
1685 iqallocx(ptr, try_tcache);
1794 iqalloct(ptr, try_tcache);
1686 JEMALLOC_VALGRIND_FREE(ptr, rzsize);
1795 JEMALLOC_VALGRIND_FREE(ptr, rzsize);
1687
1688 return (ALLOCM_SUCCESS);
1689}
1690
1796}
1797
1691int
1692je_nallocm(size_t *rsize, size_t size, int flags)
1798size_t
1799je_nallocx(size_t size, int flags)
1693{
1694 size_t usize;
1800{
1801 size_t usize;
1695 size_t alignment = (ZU(1) << (flags & ALLOCM_LG_ALIGN_MASK)
1802 size_t alignment = (ZU(1) << (flags & MALLOCX_LG_ALIGN_MASK)
1696 & (SIZE_T_MAX-1));
1697
1698 assert(size != 0);
1699
1700 if (malloc_init())
1803 & (SIZE_T_MAX-1));
1804
1805 assert(size != 0);
1806
1807 if (malloc_init())
1701 return (ALLOCM_ERR_OOM);
1808 return (0);
1702
1703 usize = (alignment == 0) ? s2u(size) : sa2u(size, alignment);
1809
1810 usize = (alignment == 0) ? s2u(size) : sa2u(size, alignment);
1704 if (usize == 0)
1811 assert(usize != 0);
1812 return (usize);
1813}
1814
1815int
1816je_mallctl(const char *name, void *oldp, size_t *oldlenp, void *newp,
1817 size_t newlen)
1818{
1819
1820 if (malloc_init())
1821 return (EAGAIN);
1822
1823 return (ctl_byname(name, oldp, oldlenp, newp, newlen));
1824}
1825
1826int
1827je_mallctlnametomib(const char *name, size_t *mibp, size_t *miblenp)
1828{
1829
1830 if (malloc_init())
1831 return (EAGAIN);
1832
1833 return (ctl_nametomib(name, mibp, miblenp));
1834}
1835
1836int
1837je_mallctlbymib(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1838 void *newp, size_t newlen)
1839{
1840
1841 if (malloc_init())
1842 return (EAGAIN);
1843
1844 return (ctl_bymib(mib, miblen, oldp, oldlenp, newp, newlen));
1845}
1846
1847void
1848je_malloc_stats_print(void (*write_cb)(void *, const char *), void *cbopaque,
1849 const char *opts)
1850{
1851
1852 stats_print(write_cb, cbopaque, opts);
1853}
1854
1855size_t
1856je_malloc_usable_size(JEMALLOC_USABLE_SIZE_CONST void *ptr)
1857{
1858 size_t ret;
1859
1860 assert(malloc_initialized || IS_INITIALIZER);
1861 malloc_thread_init();
1862
1863 if (config_ivsalloc)
1864 ret = ivsalloc(ptr, config_prof);
1865 else
1866 ret = (ptr != NULL) ? isalloc(ptr, config_prof) : 0;
1867
1868 return (ret);
1869}
1870
1871/*
1872 * End non-standard functions.
1873 */
1874/******************************************************************************/
1875/*
1876 * Begin experimental functions.
1877 */
1878#ifdef JEMALLOC_EXPERIMENTAL
1879
1880int
1881je_allocm(void **ptr, size_t *rsize, size_t size, int flags)
1882{
1883 void *p;
1884
1885 assert(ptr != NULL);
1886
1887 p = je_mallocx(size, flags);
1888 if (p == NULL)
1705 return (ALLOCM_ERR_OOM);
1889 return (ALLOCM_ERR_OOM);
1890 if (rsize != NULL)
1891 *rsize = isalloc(p, config_prof);
1892 *ptr = p;
1893 return (ALLOCM_SUCCESS);
1894}
1706
1895
1896int
1897je_rallocm(void **ptr, size_t *rsize, size_t size, size_t extra, int flags)
1898{
1899 int ret;
1900 bool no_move = flags & ALLOCM_NO_MOVE;
1901
1902 assert(ptr != NULL);
1903 assert(*ptr != NULL);
1904 assert(size != 0);
1905 assert(SIZE_T_MAX - size >= extra);
1906
1907 if (no_move) {
1908 size_t usize = je_xallocx(*ptr, size, extra, flags);
1909 ret = (usize >= size) ? ALLOCM_SUCCESS : ALLOCM_ERR_NOT_MOVED;
1910 if (rsize != NULL)
1911 *rsize = usize;
1912 } else {
1913 void *p = je_rallocx(*ptr, size+extra, flags);
1914 if (p != NULL) {
1915 *ptr = p;
1916 ret = ALLOCM_SUCCESS;
1917 } else
1918 ret = ALLOCM_ERR_OOM;
1919 if (rsize != NULL)
1920 *rsize = isalloc(*ptr, config_prof);
1921 }
1922 return (ret);
1923}
1924
1925int
1926je_sallocm(const void *ptr, size_t *rsize, int flags)
1927{
1928
1929 assert(rsize != NULL);
1930 *rsize = je_sallocx(ptr, flags);
1931 return (ALLOCM_SUCCESS);
1932}
1933
1934int
1935je_dallocm(void *ptr, int flags)
1936{
1937
1938 je_dallocx(ptr, flags);
1939 return (ALLOCM_SUCCESS);
1940}
1941
1942int
1943je_nallocm(size_t *rsize, size_t size, int flags)
1944{
1945 size_t usize;
1946
1947 usize = je_nallocx(size, flags);
1948 if (usize == 0)
1949 return (ALLOCM_ERR_OOM);
1707 if (rsize != NULL)
1708 *rsize = usize;
1709 return (ALLOCM_SUCCESS);
1710}
1711
1712#endif
1713/*
1714 * End experimental functions.

--- 159 unchanged lines hidden ---
1950 if (rsize != NULL)
1951 *rsize = usize;
1952 return (ALLOCM_SUCCESS);
1953}
1954
1955#endif
1956/*
1957 * End experimental functions.

--- 159 unchanged lines hidden ---