allocation.inline.hpp revision 7462:a0dd995271c4
1/*
2 * Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25#ifndef SHARE_VM_MEMORY_ALLOCATION_INLINE_HPP
26#define SHARE_VM_MEMORY_ALLOCATION_INLINE_HPP
27
28#include "runtime/atomic.inline.hpp"
29#include "runtime/os.hpp"
30#include "services/memTracker.hpp"
31
32// Explicit C-heap memory management
33
34void trace_heap_malloc(size_t size, const char* name, void *p);
35void trace_heap_free(void *p);
36
37#ifndef PRODUCT
38// Increments unsigned long value for statistics (not atomic on MP).
39inline void inc_stat_counter(volatile julong* dest, julong add_value) {
40#if defined(SPARC) || defined(X86)
41  // Sparc and X86 have atomic jlong (8 bytes) instructions
42  julong value = Atomic::load((volatile jlong*)dest);
43  value += add_value;
44  Atomic::store((jlong)value, (volatile jlong*)dest);
45#else
46  // possible word-tearing during load/store
47  *dest += add_value;
48#endif
49}
50#endif
51
52// allocate using malloc; will fail if no memory available
53inline char* AllocateHeap(size_t size, MEMFLAGS flags,
54    const NativeCallStack& stack,
55    AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
56  char* p = (char*) os::malloc(size, flags, stack);
57  #ifdef ASSERT
58  if (PrintMallocFree) trace_heap_malloc(size, "AllocateHeap", p);
59  #endif
60  if (p == NULL && alloc_failmode == AllocFailStrategy::EXIT_OOM) {
61    vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "AllocateHeap");
62  }
63  return p;
64}
65inline char* AllocateHeap(size_t size, MEMFLAGS flags,
66    AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
67  return AllocateHeap(size, flags, CURRENT_PC, alloc_failmode);
68}
69
70inline char* ReallocateHeap(char *old, size_t size, MEMFLAGS flag,
71    AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
72  char* p = (char*) os::realloc(old, size, flag, CURRENT_PC);
73  #ifdef ASSERT
74  if (PrintMallocFree) trace_heap_malloc(size, "ReallocateHeap", p);
75  #endif
76  if (p == NULL && alloc_failmode == AllocFailStrategy::EXIT_OOM) {
77    vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "ReallocateHeap");
78  }
79  return p;
80}
81
82inline void FreeHeap(void* p) {
83  #ifdef ASSERT
84  if (PrintMallocFree) trace_heap_free(p);
85  #endif
86  os::free(p);
87}
88
89
90template <MEMFLAGS F> void* CHeapObj<F>::operator new(size_t size,
91      const NativeCallStack& stack) throw() {
92  void* p = (void*)AllocateHeap(size, F, stack);
93#ifdef ASSERT
94  if (PrintMallocFree) trace_heap_malloc(size, "CHeapObj-new", p);
95#endif
96  return p;
97}
98
99template <MEMFLAGS F> void* CHeapObj<F>::operator new(size_t size) throw() {
100  return CHeapObj<F>::operator new(size, CALLER_PC);
101}
102
103template <MEMFLAGS F> void* CHeapObj<F>::operator new (size_t size,
104  const std::nothrow_t&  nothrow_constant, const NativeCallStack& stack) throw() {
105  void* p = (void*)AllocateHeap(size, F, stack,
106      AllocFailStrategy::RETURN_NULL);
107#ifdef ASSERT
108    if (PrintMallocFree) trace_heap_malloc(size, "CHeapObj-new", p);
109#endif
110    return p;
111  }
112
113template <MEMFLAGS F> void* CHeapObj<F>::operator new (size_t size,
114  const std::nothrow_t& nothrow_constant) throw() {
115  return CHeapObj<F>::operator new(size, nothrow_constant, CALLER_PC);
116}
117
118template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size,
119      const NativeCallStack& stack) throw() {
120  return CHeapObj<F>::operator new(size, stack);
121}
122
123template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size)
124  throw() {
125  return CHeapObj<F>::operator new(size, CALLER_PC);
126}
127
128template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size,
129  const std::nothrow_t&  nothrow_constant, const NativeCallStack& stack) throw() {
130  return CHeapObj<F>::operator new(size, nothrow_constant, stack);
131}
132
133template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size,
134  const std::nothrow_t& nothrow_constant) throw() {
135  return CHeapObj<F>::operator new(size, nothrow_constant, CALLER_PC);
136}
137
138template <MEMFLAGS F> void CHeapObj<F>::operator delete(void* p){
139    FreeHeap(p);
140}
141
142template <MEMFLAGS F> void CHeapObj<F>::operator delete [](void* p){
143    FreeHeap(p);
144}
145
146template <class E, MEMFLAGS F>
147char* ArrayAllocator<E, F>::allocate_inner(size_t &size, bool &use_malloc) {
148  char* addr = NULL;
149
150  if (use_malloc) {
151    addr = AllocateHeap(size, F);
152    if (addr == NULL && size >= (size_t)os::vm_allocation_granularity()) {
153      // malloc failed let's try with mmap instead
154      use_malloc = false;
155    } else {
156      return addr;
157    }
158  }
159
160  int alignment = os::vm_allocation_granularity();
161  size = align_size_up(size, alignment);
162
163  addr = os::reserve_memory(size, NULL, alignment, F);
164  if (addr == NULL) {
165    vm_exit_out_of_memory(size, OOM_MMAP_ERROR, "Allocator (reserve)");
166  }
167
168  os::commit_memory_or_exit(addr, size, !ExecMem, "Allocator (commit)");
169  return addr;
170}
171
172template <class E, MEMFLAGS F>
173E* ArrayAllocator<E, F>::allocate(size_t length) {
174  assert(_addr == NULL, "Already in use");
175
176  _size = sizeof(E) * length;
177  _use_malloc = should_use_malloc(_size);
178  _addr = allocate_inner(_size, _use_malloc);
179
180  return (E*)_addr;
181}
182
183template <class E, MEMFLAGS F>
184E* ArrayAllocator<E, F>::reallocate(size_t new_length) {
185  size_t new_size = sizeof(E) * new_length;
186  bool use_malloc = should_use_malloc(new_size);
187  char* new_addr = allocate_inner(new_size, use_malloc);
188
189  memcpy(new_addr, _addr, MIN2(new_size, _size));
190
191  free();
192  _size = new_size;
193  _use_malloc = use_malloc;
194  _addr = new_addr;
195  return (E*)new_addr;
196}
197
198template<class E, MEMFLAGS F>
199void ArrayAllocator<E, F>::free() {
200  if (_addr != NULL) {
201    if (_use_malloc) {
202      FreeHeap(_addr);
203    } else {
204      os::release_memory(_addr, _size);
205    }
206    _addr = NULL;
207  }
208}
209
210#endif // SHARE_VM_MEMORY_ALLOCATION_INLINE_HPP
211