allocation.inline.hpp revision 13249:a2753984d2c1
1/*
2 * Copyright (c) 1997, 2016, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25#ifndef SHARE_VM_MEMORY_ALLOCATION_INLINE_HPP
26#define SHARE_VM_MEMORY_ALLOCATION_INLINE_HPP
27
28#include "runtime/atomic.hpp"
29#include "runtime/os.hpp"
30#include "services/memTracker.hpp"
31#include "utilities/align.hpp"
32#include "utilities/globalDefinitions.hpp"
33
34// Explicit C-heap memory management
35
36void trace_heap_malloc(size_t size, const char* name, void *p);
37void trace_heap_free(void *p);
38
39#ifndef PRODUCT
40// Increments unsigned long value for statistics (not atomic on MP).
41inline void inc_stat_counter(volatile julong* dest, julong add_value) {
42#if defined(SPARC) || defined(X86)
43  // Sparc and X86 have atomic jlong (8 bytes) instructions
44  julong value = Atomic::load((volatile jlong*)dest);
45  value += add_value;
46  Atomic::store((jlong)value, (volatile jlong*)dest);
47#else
48  // possible word-tearing during load/store
49  *dest += add_value;
50#endif
51}
52#endif
53
54// allocate using malloc; will fail if no memory available
55inline char* AllocateHeap(size_t size, MEMFLAGS flags,
56    const NativeCallStack& stack,
57    AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
58  char* p = (char*) os::malloc(size, flags, stack);
59  #ifdef ASSERT
60  if (PrintMallocFree) trace_heap_malloc(size, "AllocateHeap", p);
61  #endif
62  if (p == NULL && alloc_failmode == AllocFailStrategy::EXIT_OOM) {
63    vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "AllocateHeap");
64  }
65  return p;
66}
67
68ALWAYSINLINE char* AllocateHeap(size_t size, MEMFLAGS flags,
69    AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
70  return AllocateHeap(size, flags, CURRENT_PC, alloc_failmode);
71}
72
73ALWAYSINLINE char* ReallocateHeap(char *old, size_t size, MEMFLAGS flag,
74    AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
75  char* p = (char*) os::realloc(old, size, flag, CURRENT_PC);
76  #ifdef ASSERT
77  if (PrintMallocFree) trace_heap_malloc(size, "ReallocateHeap", p);
78  #endif
79  if (p == NULL && alloc_failmode == AllocFailStrategy::EXIT_OOM) {
80    vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "ReallocateHeap");
81  }
82  return p;
83}
84
85inline void FreeHeap(void* p) {
86  #ifdef ASSERT
87  if (PrintMallocFree) trace_heap_free(p);
88  #endif
89  os::free(p);
90}
91
92
93template <MEMFLAGS F> void* CHeapObj<F>::operator new(size_t size,
94      const NativeCallStack& stack) throw() {
95  void* p = (void*)AllocateHeap(size, F, stack);
96#ifdef ASSERT
97  if (PrintMallocFree) trace_heap_malloc(size, "CHeapObj-new", p);
98#endif
99  return p;
100}
101
102template <MEMFLAGS F> void* CHeapObj<F>::operator new(size_t size) throw() {
103  return CHeapObj<F>::operator new(size, CALLER_PC);
104}
105
106template <MEMFLAGS F> void* CHeapObj<F>::operator new (size_t size,
107  const std::nothrow_t&  nothrow_constant, const NativeCallStack& stack) throw() {
108  void* p = (void*)AllocateHeap(size, F, stack,
109      AllocFailStrategy::RETURN_NULL);
110#ifdef ASSERT
111    if (PrintMallocFree) trace_heap_malloc(size, "CHeapObj-new", p);
112#endif
113    return p;
114  }
115
116template <MEMFLAGS F> void* CHeapObj<F>::operator new (size_t size,
117  const std::nothrow_t& nothrow_constant) throw() {
118  return CHeapObj<F>::operator new(size, nothrow_constant, CALLER_PC);
119}
120
121template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size,
122      const NativeCallStack& stack) throw() {
123  return CHeapObj<F>::operator new(size, stack);
124}
125
126template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size)
127  throw() {
128  return CHeapObj<F>::operator new(size, CALLER_PC);
129}
130
131template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size,
132  const std::nothrow_t&  nothrow_constant, const NativeCallStack& stack) throw() {
133  return CHeapObj<F>::operator new(size, nothrow_constant, stack);
134}
135
136template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size,
137  const std::nothrow_t& nothrow_constant) throw() {
138  return CHeapObj<F>::operator new(size, nothrow_constant, CALLER_PC);
139}
140
141template <MEMFLAGS F> void CHeapObj<F>::operator delete(void* p){
142    FreeHeap(p);
143}
144
145template <MEMFLAGS F> void CHeapObj<F>::operator delete [](void* p){
146    FreeHeap(p);
147}
148
149template <class E, MEMFLAGS F>
150size_t MmapArrayAllocator<E, F>::size_for(size_t length) {
151  size_t size = length * sizeof(E);
152  int alignment = os::vm_allocation_granularity();
153  return align_up(size, alignment);
154}
155
156template <class E, MEMFLAGS F>
157E* MmapArrayAllocator<E, F>::allocate_or_null(size_t length) {
158  size_t size = size_for(length);
159  int alignment = os::vm_allocation_granularity();
160
161  char* addr = os::reserve_memory(size, NULL, alignment, F);
162  if (addr == NULL) {
163    return NULL;
164  }
165
166  if (os::commit_memory(addr, size, !ExecMem, "Allocator (commit)")) {
167    return (E*)addr;
168  } else {
169    os::release_memory(addr, size);
170    return NULL;
171  }
172}
173
174template <class E, MEMFLAGS F>
175E* MmapArrayAllocator<E, F>::allocate(size_t length) {
176  size_t size = size_for(length);
177  int alignment = os::vm_allocation_granularity();
178
179  char* addr = os::reserve_memory(size, NULL, alignment, F);
180  if (addr == NULL) {
181    vm_exit_out_of_memory(size, OOM_MMAP_ERROR, "Allocator (reserve)");
182  }
183
184  os::commit_memory_or_exit(addr, size, !ExecMem, "Allocator (commit)");
185
186  return (E*)addr;
187}
188
189template <class E, MEMFLAGS F>
190void MmapArrayAllocator<E, F>::free(E* addr, size_t length) {
191  bool result = os::release_memory((char*)addr, size_for(length));
192  assert(result, "Failed to release memory");
193}
194
195template <class E, MEMFLAGS F>
196size_t MallocArrayAllocator<E, F>::size_for(size_t length) {
197  return length * sizeof(E);
198}
199
200template <class E, MEMFLAGS F>
201E* MallocArrayAllocator<E, F>::allocate(size_t length) {
202  return (E*)AllocateHeap(size_for(length), F);
203}
204
205template<class E, MEMFLAGS F>
206void MallocArrayAllocator<E, F>::free(E* addr, size_t /*length*/) {
207  FreeHeap(addr);
208}
209
210template <class E, MEMFLAGS F>
211bool ArrayAllocator<E, F>::should_use_malloc(size_t length) {
212  return MallocArrayAllocator<E, F>::size_for(length) < ArrayAllocatorMallocLimit;
213}
214
215template <class E, MEMFLAGS F>
216E* ArrayAllocator<E, F>::allocate_malloc(size_t length) {
217  return MallocArrayAllocator<E, F>::allocate(length);
218}
219
220template <class E, MEMFLAGS F>
221E* ArrayAllocator<E, F>::allocate_mmap(size_t length) {
222  return MmapArrayAllocator<E, F>::allocate(length);
223}
224
225template <class E, MEMFLAGS F>
226E* ArrayAllocator<E, F>::allocate(size_t length) {
227  if (should_use_malloc(length)) {
228    return allocate_malloc(length);
229  }
230
231  return allocate_mmap(length);
232}
233
234template <class E, MEMFLAGS F>
235E* ArrayAllocator<E, F>::reallocate(E* old_addr, size_t old_length, size_t new_length) {
236  E* new_addr = (new_length > 0)
237      ? allocate(new_length)
238      : NULL;
239
240  if (new_addr != NULL && old_addr != NULL) {
241    memcpy(new_addr, old_addr, MIN2(old_length, new_length) * sizeof(E));
242  }
243
244  if (old_addr != NULL) {
245    free(old_addr, old_length);
246  }
247
248  return new_addr;
249}
250
251template<class E, MEMFLAGS F>
252void ArrayAllocator<E, F>::free_malloc(E* addr, size_t length) {
253  MallocArrayAllocator<E, F>::free(addr, length);
254}
255
256template<class E, MEMFLAGS F>
257void ArrayAllocator<E, F>::free_mmap(E* addr, size_t length) {
258  MmapArrayAllocator<E, F>::free(addr, length);
259}
260
261template<class E, MEMFLAGS F>
262void ArrayAllocator<E, F>::free(E* addr, size_t length) {
263  if (addr != NULL) {
264    if (should_use_malloc(length)) {
265      free_malloc(addr, length);
266    } else {
267      free_mmap(addr, length);
268    }
269  }
270}
271
272#endif // SHARE_VM_MEMORY_ALLOCATION_INLINE_HPP
273