allocation.hpp revision 13370:731370f39fcd
11541Srgrimes/*
21541Srgrimes * Copyright (c) 1997, 2017, Oracle and/or its affiliates. All rights reserved.
31541Srgrimes * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
41541Srgrimes *
51541Srgrimes * This code is free software; you can redistribute it and/or modify it
61541Srgrimes * under the terms of the GNU General Public License version 2 only, as
71541Srgrimes * published by the Free Software Foundation.
81541Srgrimes *
91541Srgrimes * This code is distributed in the hope that it will be useful, but WITHOUT
101541Srgrimes * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
111541Srgrimes * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
121541Srgrimes * version 2 for more details (a copy is included in the LICENSE file that
131541Srgrimes * accompanied this code).
1458705Scharnier *
151541Srgrimes * You should have received a copy of the GNU General Public License version
161541Srgrimes * 2 along with this work; if not, write to the Free Software Foundation,
171541Srgrimes * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
181541Srgrimes *
191541Srgrimes * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
201541Srgrimes * or visit www.oracle.com if you need additional information or have any
211541Srgrimes * questions.
221541Srgrimes *
231541Srgrimes */
241541Srgrimes
251541Srgrimes#ifndef SHARE_VM_MEMORY_ALLOCATION_HPP
261541Srgrimes#define SHARE_VM_MEMORY_ALLOCATION_HPP
271541Srgrimes
281541Srgrimes#include "runtime/globals.hpp"
291541Srgrimes#include "utilities/globalDefinitions.hpp"
301541Srgrimes#include "utilities/macros.hpp"
311541Srgrimes#ifdef COMPILER1
321541Srgrimes#include "c1/c1_globals.hpp"
331541Srgrimes#endif
3450477Speter#ifdef COMPILER2
351541Srgrimes#include "opto/c2_globals.hpp"
361541Srgrimes#endif
371541Srgrimes
381541Srgrimes#include <new>
391541Srgrimes
401541Srgrimes// The byte alignment to be used by Arena::Amalloc.  See bugid 4169348.
4134924Sbde// Note: this value must be a power of 2
4212662Sdg
4312662Sdg#define ARENA_AMALLOC_ALIGNMENT (2*BytesPerWord)
441541Srgrimes
4540794Speter#define ARENA_ALIGN_M1 (((size_t)(ARENA_AMALLOC_ALIGNMENT)) - 1)
4612726Sbde#define ARENA_ALIGN_MASK (~((size_t)ARENA_ALIGN_M1))
4712662Sdg#define ARENA_ALIGN(x) ((((size_t)(x)) + ARENA_ALIGN_M1) & ARENA_ALIGN_MASK)
4822521Sdyson
4912662Sdgclass AllocFailStrategy {
5012662Sdgpublic:
5112662Sdg  enum AllocFailEnum { EXIT_OOM, RETURN_NULL };
521541Srgrimes};
531541Srgrimestypedef AllocFailStrategy::AllocFailEnum AllocFailType;
5412623Sphk
5512623Sphk// All classes in the virtual machine must be subclassed
5612623Sphk// by one of the following allocation classes:
579759Sbde//
581541Srgrimes// For objects allocated in the resource area (see resourceArea.hpp).
5912820Sphk// - ResourceObj
601541Srgrimes//
611541Srgrimes// For objects allocated in the C-heap (managed by: free & malloc).
621541Srgrimes// - CHeapObj
631541Srgrimes//
641541Srgrimes// For objects allocated on the stack.
6512820Sphk// - StackObj
661541Srgrimes//
671541Srgrimes// For embedded objects.
681541Srgrimes// - ValueObj
691541Srgrimes//
701541Srgrimes// For classes used as name spaces.
711541Srgrimes// - AllStatic
721541Srgrimes//
731541Srgrimes// For classes in Metaspace (class data)
741541Srgrimes// - MetaspaceObj
759507Sdg//
7612286Sphk// The printable subclasses are used for debugging and define virtual
771541Srgrimes// member functions for printing. Classes that avoid allocating the
781541Srgrimes// vtbl entries in the objects should therefore not be the printable
791541Srgrimes// subclasses.
801541Srgrimes//
8169947Sjake// The following macros and function should be used to allocate memory
8214531Shsu// directly in the resource area or in the C-heap, The _OBJ variants
831541Srgrimes// of the NEW/FREE_C_HEAP macros are used for alloc/dealloc simple
841541Srgrimes// objects which are not inherited from CHeapObj, note constructor and
851541Srgrimes// destructor are not called. The preferable way to allocate objects
861541Srgrimes// is using the new operator.
8758634Scharnier//
881541Srgrimes// WARNING: The array variant must only be used for a homogenous array
8965904Sjhb// where all objects are of the exact type specified. If subtypes are
9065904Sjhb// stored in the array then must pay attention to calling destructors
9165904Sjhb// at needed.
921541Srgrimes//
931541Srgrimes//   NEW_RESOURCE_ARRAY(type, size)
941541Srgrimes//   NEW_RESOURCE_OBJ(type)
951541Srgrimes//   NEW_C_HEAP_ARRAY(type, size)
9669947Sjake//   NEW_C_HEAP_OBJ(type, memflags)
971541Srgrimes//   FREE_C_HEAP_ARRAY(type, old)
981541Srgrimes//   FREE_C_HEAP_OBJ(objname, type, memflags)
995455Sdg//   char* AllocateHeap(size_t size, const char* name);
1001541Srgrimes//   void  FreeHeap(void* p);
1011541Srgrimes//
1029507Sdg// C-heap allocation can be traced using +PrintHeapAllocation.
1039507Sdg// malloc and free should therefore never called directly.
1049507Sdg
1059507Sdg// Base class for objects allocated in the C-heap.
10634961Sphk
1079507Sdg// In non product mode we introduce a super class for all allocation classes
1089507Sdg// that supports printing.
1099507Sdg// We avoid the superclass in product mode since some C++ compilers add
1109507Sdg// a word overhead for empty super classes.
1119507Sdg
11262622Sjhb#ifdef PRODUCT
11312286Sphk#define ALLOCATION_SUPER_CLASS_SPEC
11462622Sjhb#else
11512286Sphk#define ALLOCATION_SUPER_CLASS_SPEC : public AllocatedObj
11662622Sjhbclass AllocatedObj {
11712286Sphk public:
11862622Sjhb  // Printing support
11912286Sphk  void print() const;
12062622Sjhb  void print_value() const;
12112286Sphk
12262622Sjhb  virtual void print_on(outputStream* st) const;
12312286Sphk  virtual void print_value_on(outputStream* st) const;
12462622Sjhb};
12512286Sphk#endif
12662622Sjhb
12751337Sdillon
12812286Sphk/*
12946381Sbillf * Memory types
13046381Sbillf */
1311541Srgrimesenum MemoryType {
13212286Sphk  // Memory type by sub systems. It occupies lower byte.
13362573Sphk  mtJavaHeap          = 0x00,  // Java heap
1341541Srgrimes  mtClass             = 0x01,  // memory class for Java classes
13512286Sphk  mtThread            = 0x02,  // memory for thread objects
13612286Sphk  mtThreadStack       = 0x03,
13712286Sphk  mtCode              = 0x04,  // memory for generated code
13812286Sphk  mtGC                = 0x05,  // memory for GC
13912286Sphk  mtCompiler          = 0x06,  // memory for compiler
1401541Srgrimes  mtInternal          = 0x07,  // memory used by VM, but does not belong to
1411541Srgrimes                                 // any of above categories, and not used for
14212286Sphk                                 // native memory tracking
1431541Srgrimes  mtOther             = 0x08,  // memory not used by VM
1441541Srgrimes  mtSymbol            = 0x09,  // symbol
1451541Srgrimes  mtNMT               = 0x0A,  // memory used by native memory tracking
1461541Srgrimes  mtClassShared       = 0x0B,  // class data sharing
14715809Sdyson  mtChunk             = 0x0C,  // chunk that holds content of arenas
1485455Sdg  mtTest              = 0x0D,  // Test type for verifying NMT
14915809Sdyson  mtTracing           = 0x0E,  // memory used for Tracing
15038517Sdfr  mtLogging           = 0x0F,  // memory for logging
1511541Srgrimes  mtArguments         = 0x10,  // memory for argument processing
1521541Srgrimes  mtModule            = 0x11,  // memory for module processing
1531541Srgrimes  mtNone              = 0x12,  // undefined
15469947Sjake  mt_number_of_types  = 0x13   // number of memory types (mtDontTrack
15514531Shsu                                 // is not included as validate type)
1561541Srgrimes};
1571541Srgrimes
1581541Srgrimestypedef MemoryType MEMFLAGS;
1591541Srgrimes
1601541Srgrimes
1611541Srgrimes#if INCLUDE_NMT
16265557Sjasone
1631541Srgrimesextern bool NMT_track_callsite;
1641541Srgrimes
1651541Srgrimes#else
1661541Srgrimes
1671541Srgrimesconst bool NMT_track_callsite = false;
1681541Srgrimes
1691541Srgrimes#endif // INCLUDE_NMT
1701541Srgrimes
1711541Srgrimesclass NativeCallStack;
1721541Srgrimes
1731541Srgrimes
1741541Srgrimestemplate <MEMFLAGS F> class CHeapObj ALLOCATION_SUPER_CLASS_SPEC {
1751541Srgrimes public:
17665557Sjasone  NOINLINE void* operator new(size_t size, const NativeCallStack& stack) throw();
17765557Sjasone  NOINLINE void* operator new(size_t size) throw();
17865557Sjasone  NOINLINE void* operator new (size_t size, const std::nothrow_t&  nothrow_constant,
17965557Sjasone                               const NativeCallStack& stack) throw();
1801541Srgrimes  NOINLINE void* operator new (size_t size, const std::nothrow_t&  nothrow_constant)
1811541Srgrimes                               throw();
1821541Srgrimes  NOINLINE void* operator new [](size_t size, const NativeCallStack& stack) throw();
1831541Srgrimes  NOINLINE void* operator new [](size_t size) throw();
1841541Srgrimes  NOINLINE void* operator new [](size_t size, const std::nothrow_t&  nothrow_constant,
1851541Srgrimes                               const NativeCallStack& stack) throw();
1861541Srgrimes  NOINLINE void* operator new [](size_t size, const std::nothrow_t&  nothrow_constant)
1871541Srgrimes                               throw();
1881541Srgrimes  void  operator delete(void* p);
1891541Srgrimes  void  operator delete [] (void* p);
1901541Srgrimes};
1911541Srgrimes
1921541Srgrimes// Base class for objects allocated on the stack only.
1931541Srgrimes// Calling new or delete will result in fatal error.
1941541Srgrimes
1955455Sdgclass StackObj ALLOCATION_SUPER_CLASS_SPEC {
19643748Sdillon private:
1971541Srgrimes  void* operator new(size_t size) throw();
1981541Srgrimes  void* operator new [](size_t size) throw();
19938517Sdfr#ifdef __IBMCPP__
2001541Srgrimes public:
2011541Srgrimes#endif
2021541Srgrimes  void  operator delete(void* p);
2031541Srgrimes  void  operator delete [](void* p);
2041541Srgrimes};
20569947Sjake
2061541Srgrimes// Base class for objects used as value objects.
2071541Srgrimes// Calling new or delete will result in fatal error.
2081541Srgrimes//
20915809Sdyson// Portability note: Certain compilers (e.g. gcc) will
2105455Sdg// always make classes bigger if it has a superclass, even
21115809Sdyson// if the superclass does not have any virtual methods or
21242957Sdillon// instance fields. The HotSpot implementation relies on this
21342957Sdillon// not to happen. So never make a ValueObj class a direct subclass
21442957Sdillon// of this object, but use the VALUE_OBJ_CLASS_SPEC class instead, e.g.,
21542957Sdillon// like this:
21642957Sdillon//
21718169Sdyson//   class A VALUE_OBJ_CLASS_SPEC {
2181541Srgrimes//     ...
2191541Srgrimes//   }
22018169Sdyson//
2211541Srgrimes// With gcc and possible other compilers the VALUE_OBJ_CLASS_SPEC can
2221541Srgrimes// be defined as a an empty string "".
22318169Sdyson//
2241541Srgrimesclass _ValueObj {
22518169Sdyson private:
2261541Srgrimes  void* operator new(size_t size) throw();
2271541Srgrimes  void  operator delete(void* p);
22818169Sdyson  void* operator new [](size_t size) throw();
2291541Srgrimes  void  operator delete [](void* p);
2301541Srgrimes};
2311541Srgrimes
2321541Srgrimes
2335455Sdg// Base class for objects stored in Metaspace.
23412286Sphk// Calling delete will result in fatal error.
2351541Srgrimes//
23612286Sphk// Do not inherit from something with a vptr because this class does
23712286Sphk// not introduce one.  This class is used to allocate both shared read-only
23846381Sbillf// and shared read-write classes.
23946381Sbillf//
24040794Speter
24140794Speterclass ClassLoaderData;
24240794Speterclass MetaspaceClosure;
24340794Speter
24462622Sjhbclass MetaspaceObj {
24540794Speter public:
24662622Sjhb  bool is_metaspace_object() const;
24740794Speter  bool is_shared() const;
24862622Sjhb  void print_address_on(outputStream* st) const;  // nonvirtual address printing
24940794Speter
25062622Sjhb#define METASPACE_OBJ_TYPES_DO(f) \
25146381Sbillf  f(Unknown) \
25262622Sjhb  f(Class) \
25346381Sbillf  f(Symbol) \
25462622Sjhb  f(TypeArrayU1) \
25540794Speter  f(TypeArrayU2) \
25662622Sjhb  f(TypeArrayU4) \
25740794Speter  f(TypeArrayU8) \
25862622Sjhb  f(TypeArrayOther) \
25940794Speter  f(Method) \
26062622Sjhb  f(ConstMethod) \
26140794Speter  f(MethodData) \
26262622Sjhb  f(ConstantPool) \
26340794Speter  f(ConstantPoolCache) \
26462622Sjhb  f(Annotations) \
26540794Speter  f(MethodCounters)
26662622Sjhb
26740794Speter#define METASPACE_OBJ_TYPE_DECLARE(name) name ## Type,
26862622Sjhb#define METASPACE_OBJ_TYPE_NAME_CASE(name) case name ## Type: return #name;
26940794Speter
27062622Sjhb  enum Type {
27140794Speter    // Types are MetaspaceObj::ClassType, MetaspaceObj::SymbolType, etc
27262622Sjhb    METASPACE_OBJ_TYPES_DO(METASPACE_OBJ_TYPE_DECLARE)
27340794Speter    _number_of_types
27462622Sjhb  };
27540794Speter
27662622Sjhb  static const char * type_name(Type type) {
27740794Speter    switch(type) {
27862622Sjhb    METASPACE_OBJ_TYPES_DO(METASPACE_OBJ_TYPE_NAME_CASE)
27940794Speter    default:
28062622Sjhb      ShouldNotReachHere();
28140794Speter      return NULL;
28262622Sjhb    }
28340794Speter  }
28462622Sjhb
28540794Speter  static MetaspaceObj::Type array_type(size_t elem_size) {
28662622Sjhb    switch (elem_size) {
28740794Speter    case 1: return TypeArrayU1Type;
28862622Sjhb    case 2: return TypeArrayU2Type;
28940794Speter    case 4: return TypeArrayU4Type;
29062622Sjhb    case 8: return TypeArrayU8Type;
29140794Speter    default:
29262622Sjhb      return TypeArrayOtherType;
29340794Speter    }
29462622Sjhb  }
29540794Speter
29662622Sjhb  void* operator new(size_t size, ClassLoaderData* loader_data,
29740794Speter                     size_t word_size,
29862622Sjhb                     Type type, Thread* thread) throw();
29940794Speter                     // can't use TRAPS from this header file.
30062622Sjhb  void operator delete(void* p) { ShouldNotCallThis(); }
30140794Speter
30262622Sjhb  // Declare a *static* method with the same signature in any subclass of MetaspaceObj
30340794Speter  // that should be read-only by default. See symbol.hpp for an example. This function
30462622Sjhb  // is used by the templates in metaspaceClosure.hpp
30540794Speter  static bool is_read_only_by_default() { return false; }
30662622Sjhb};
30740794Speter
30862622Sjhb// Base class for classes that constitute name spaces.
30940794Speter
31062622Sjhbclass AllStatic {
31140794Speter public:
31262622Sjhb  AllStatic()  { ShouldNotCallThis(); }
31340794Speter  ~AllStatic() { ShouldNotCallThis(); }
31462622Sjhb};
31540794Speter
31662622Sjhb
31740794Speter//------------------------------Chunk------------------------------------------
31862622Sjhb// Linked list of raw memory chunks
31940794Speterclass Chunk: CHeapObj<mtChunk> {
32062622Sjhb  friend class VMStructs;
32140794Speter
32262622Sjhb protected:
32340794Speter  Chunk*       _next;     // Next Chunk in list
32440794Speter  const size_t _len;      // Size of this Chunk
32540794Speter public:
32640794Speter  void* operator new(size_t size, AllocFailType alloc_failmode, size_t length) throw();
32740794Speter  void  operator delete(void* p);
32840794Speter  Chunk(size_t length);
32940794Speter
33040794Speter  enum {
33140794Speter    // default sizes; make them slightly smaller than 2**k to guard against
33240794Speter    // buddy-system style malloc implementations
33340794Speter#ifdef _LP64
33440794Speter    slack      = 40,            // [RGV] Not sure if this is right, but make it
33540794Speter                                //       a multiple of 8.
33640794Speter#else
33740794Speter    slack      = 20,            // suspected sizeof(Chunk) + internal malloc headers
33840794Speter#endif
33940794Speter
340    tiny_size  =  256  - slack, // Size of first chunk (tiny)
341    init_size  =  1*K  - slack, // Size of first chunk (normal aka small)
342    medium_size= 10*K  - slack, // Size of medium-sized chunk
343    size       = 32*K  - slack, // Default size of an Arena chunk (following the first)
344    non_pool_size = init_size + 32 // An initial size which is not one of above
345  };
346
347  void chop();                  // Chop this chunk
348  void next_chop();             // Chop next chunk
349  static size_t aligned_overhead_size(void) { return ARENA_ALIGN(sizeof(Chunk)); }
350  static size_t aligned_overhead_size(size_t byte_size) { return ARENA_ALIGN(byte_size); }
351
352  size_t length() const         { return _len;  }
353  Chunk* next() const           { return _next;  }
354  void set_next(Chunk* n)       { _next = n;  }
355  // Boundaries of data area (possibly unused)
356  char* bottom() const          { return ((char*) this) + aligned_overhead_size();  }
357  char* top()    const          { return bottom() + _len; }
358  bool contains(char* p) const  { return bottom() <= p && p <= top(); }
359
360  // Start the chunk_pool cleaner task
361  static void start_chunk_pool_cleaner_task();
362
363  static void clean_chunk_pool();
364};
365
366//------------------------------Arena------------------------------------------
367// Fast allocation of memory
368class Arena : public CHeapObj<mtNone> {
369protected:
370  friend class ResourceMark;
371  friend class HandleMark;
372  friend class NoHandleMark;
373  friend class VMStructs;
374
375  MEMFLAGS    _flags;           // Memory tracking flags
376
377  Chunk *_first;                // First chunk
378  Chunk *_chunk;                // current chunk
379  char *_hwm, *_max;            // High water mark and max in current chunk
380  // Get a new Chunk of at least size x
381  void* grow(size_t x, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
382  size_t _size_in_bytes;        // Size of arena (used for native memory tracking)
383
384  NOT_PRODUCT(static julong _bytes_allocated;) // total #bytes allocated since start
385  friend class AllocStats;
386  debug_only(void* malloc(size_t size);)
387  debug_only(void* internal_malloc_4(size_t x);)
388  NOT_PRODUCT(void inc_bytes_allocated(size_t x);)
389
390  void signal_out_of_memory(size_t request, const char* whence) const;
391
392  bool check_for_overflow(size_t request, const char* whence,
393      AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) const {
394    if (UINTPTR_MAX - request < (uintptr_t)_hwm) {
395      if (alloc_failmode == AllocFailStrategy::RETURN_NULL) {
396        return false;
397      }
398      signal_out_of_memory(request, whence);
399    }
400    return true;
401 }
402
403 public:
404  Arena(MEMFLAGS memflag);
405  Arena(MEMFLAGS memflag, size_t init_size);
406  ~Arena();
407  void  destruct_contents();
408  char* hwm() const             { return _hwm; }
409
410  // new operators
411  void* operator new (size_t size) throw();
412  void* operator new (size_t size, const std::nothrow_t& nothrow_constant) throw();
413
414  // dynamic memory type tagging
415  void* operator new(size_t size, MEMFLAGS flags) throw();
416  void* operator new(size_t size, const std::nothrow_t& nothrow_constant, MEMFLAGS flags) throw();
417  void  operator delete(void* p);
418
419  // Fast allocate in the arena.  Common case is: pointer test + increment.
420  void* Amalloc(size_t x, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
421    assert(is_power_of_2(ARENA_AMALLOC_ALIGNMENT) , "should be a power of 2");
422    x = ARENA_ALIGN(x);
423    debug_only(if (UseMallocOnly) return malloc(x);)
424    if (!check_for_overflow(x, "Arena::Amalloc", alloc_failmode))
425      return NULL;
426    NOT_PRODUCT(inc_bytes_allocated(x);)
427    if (_hwm + x > _max) {
428      return grow(x, alloc_failmode);
429    } else {
430      char *old = _hwm;
431      _hwm += x;
432      return old;
433    }
434  }
435  // Further assume size is padded out to words
436  void *Amalloc_4(size_t x, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
437    assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" );
438    debug_only(if (UseMallocOnly) return malloc(x);)
439    if (!check_for_overflow(x, "Arena::Amalloc_4", alloc_failmode))
440      return NULL;
441    NOT_PRODUCT(inc_bytes_allocated(x);)
442    if (_hwm + x > _max) {
443      return grow(x, alloc_failmode);
444    } else {
445      char *old = _hwm;
446      _hwm += x;
447      return old;
448    }
449  }
450
451  // Allocate with 'double' alignment. It is 8 bytes on sparc.
452  // In other cases Amalloc_D() should be the same as Amalloc_4().
453  void* Amalloc_D(size_t x, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
454    assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" );
455    debug_only(if (UseMallocOnly) return malloc(x);)
456#if defined(SPARC) && !defined(_LP64)
457#define DALIGN_M1 7
458    size_t delta = (((size_t)_hwm + DALIGN_M1) & ~DALIGN_M1) - (size_t)_hwm;
459    x += delta;
460#endif
461    if (!check_for_overflow(x, "Arena::Amalloc_D", alloc_failmode))
462      return NULL;
463    NOT_PRODUCT(inc_bytes_allocated(x);)
464    if (_hwm + x > _max) {
465      return grow(x, alloc_failmode); // grow() returns a result aligned >= 8 bytes.
466    } else {
467      char *old = _hwm;
468      _hwm += x;
469#if defined(SPARC) && !defined(_LP64)
470      old += delta; // align to 8-bytes
471#endif
472      return old;
473    }
474  }
475
476  // Fast delete in area.  Common case is: NOP (except for storage reclaimed)
477  void Afree(void *ptr, size_t size) {
478#ifdef ASSERT
479    if (ZapResourceArea) memset(ptr, badResourceValue, size); // zap freed memory
480    if (UseMallocOnly) return;
481#endif
482    if (((char*)ptr) + size == _hwm) _hwm = (char*)ptr;
483  }
484
485  void *Arealloc( void *old_ptr, size_t old_size, size_t new_size,
486      AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
487
488  // Move contents of this arena into an empty arena
489  Arena *move_contents(Arena *empty_arena);
490
491  // Determine if pointer belongs to this Arena or not.
492  bool contains( const void *ptr ) const;
493
494  // Total of all chunks in use (not thread-safe)
495  size_t used() const;
496
497  // Total # of bytes used
498  size_t size_in_bytes() const         {  return _size_in_bytes; };
499  void set_size_in_bytes(size_t size);
500
501  static void free_malloced_objects(Chunk* chunk, char* hwm, char* max, char* hwm2)  PRODUCT_RETURN;
502  static void free_all(char** start, char** end)                                     PRODUCT_RETURN;
503
504private:
505  // Reset this Arena to empty, access will trigger grow if necessary
506  void   reset(void) {
507    _first = _chunk = NULL;
508    _hwm = _max = NULL;
509    set_size_in_bytes(0);
510  }
511};
512
513// One of the following macros must be used when allocating
514// an array or object from an arena
515#define NEW_ARENA_ARRAY(arena, type, size) \
516  (type*) (arena)->Amalloc((size) * sizeof(type))
517
518#define REALLOC_ARENA_ARRAY(arena, type, old, old_size, new_size)    \
519  (type*) (arena)->Arealloc((char*)(old), (old_size) * sizeof(type), \
520                            (new_size) * sizeof(type) )
521
522#define FREE_ARENA_ARRAY(arena, type, old, size) \
523  (arena)->Afree((char*)(old), (size) * sizeof(type))
524
525#define NEW_ARENA_OBJ(arena, type) \
526  NEW_ARENA_ARRAY(arena, type, 1)
527
528
529//%note allocation_1
530extern char* resource_allocate_bytes(size_t size,
531    AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
532extern char* resource_allocate_bytes(Thread* thread, size_t size,
533    AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
534extern char* resource_reallocate_bytes( char *old, size_t old_size, size_t new_size,
535    AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
536extern void resource_free_bytes( char *old, size_t size );
537
538//----------------------------------------------------------------------
539// Base class for objects allocated in the resource area per default.
540// Optionally, objects may be allocated on the C heap with
541// new(ResourceObj::C_HEAP) Foo(...) or in an Arena with new (&arena)
542// ResourceObj's can be allocated within other objects, but don't use
543// new or delete (allocation_type is unknown).  If new is used to allocate,
544// use delete to deallocate.
545class ResourceObj ALLOCATION_SUPER_CLASS_SPEC {
546 public:
547  enum allocation_type { STACK_OR_EMBEDDED = 0, RESOURCE_AREA, C_HEAP, ARENA, allocation_mask = 0x3 };
548  static void set_allocation_type(address res, allocation_type type) NOT_DEBUG_RETURN;
549#ifdef ASSERT
550 private:
551  // When this object is allocated on stack the new() operator is not
552  // called but garbage on stack may look like a valid allocation_type.
553  // Store negated 'this' pointer when new() is called to distinguish cases.
554  // Use second array's element for verification value to distinguish garbage.
555  uintptr_t _allocation_t[2];
556  bool is_type_set() const;
557 public:
558  allocation_type get_allocation_type() const;
559  bool allocated_on_stack()    const { return get_allocation_type() == STACK_OR_EMBEDDED; }
560  bool allocated_on_res_area() const { return get_allocation_type() == RESOURCE_AREA; }
561  bool allocated_on_C_heap()   const { return get_allocation_type() == C_HEAP; }
562  bool allocated_on_arena()    const { return get_allocation_type() == ARENA; }
563  ResourceObj(); // default constructor
564  ResourceObj(const ResourceObj& r); // default copy constructor
565  ResourceObj& operator=(const ResourceObj& r); // default copy assignment
566  ~ResourceObj();
567#endif // ASSERT
568
569 public:
570  void* operator new(size_t size, allocation_type type, MEMFLAGS flags) throw();
571  void* operator new [](size_t size, allocation_type type, MEMFLAGS flags) throw();
572  void* operator new(size_t size, const std::nothrow_t&  nothrow_constant,
573      allocation_type type, MEMFLAGS flags) throw();
574  void* operator new [](size_t size, const std::nothrow_t&  nothrow_constant,
575      allocation_type type, MEMFLAGS flags) throw();
576
577  void* operator new(size_t size, Arena *arena) throw() {
578      address res = (address)arena->Amalloc(size);
579      DEBUG_ONLY(set_allocation_type(res, ARENA);)
580      return res;
581  }
582
583  void* operator new [](size_t size, Arena *arena) throw() {
584      address res = (address)arena->Amalloc(size);
585      DEBUG_ONLY(set_allocation_type(res, ARENA);)
586      return res;
587  }
588
589  void* operator new(size_t size) throw() {
590      address res = (address)resource_allocate_bytes(size);
591      DEBUG_ONLY(set_allocation_type(res, RESOURCE_AREA);)
592      return res;
593  }
594
595  void* operator new(size_t size, const std::nothrow_t& nothrow_constant) throw() {
596      address res = (address)resource_allocate_bytes(size, AllocFailStrategy::RETURN_NULL);
597      DEBUG_ONLY(if (res != NULL) set_allocation_type(res, RESOURCE_AREA);)
598      return res;
599  }
600
601  void* operator new [](size_t size) throw() {
602      address res = (address)resource_allocate_bytes(size);
603      DEBUG_ONLY(set_allocation_type(res, RESOURCE_AREA);)
604      return res;
605  }
606
607  void* operator new [](size_t size, const std::nothrow_t& nothrow_constant) throw() {
608      address res = (address)resource_allocate_bytes(size, AllocFailStrategy::RETURN_NULL);
609      DEBUG_ONLY(if (res != NULL) set_allocation_type(res, RESOURCE_AREA);)
610      return res;
611  }
612
613  void  operator delete(void* p);
614  void  operator delete [](void* p);
615};
616
617// One of the following macros must be used when allocating an array
618// or object to determine whether it should reside in the C heap on in
619// the resource area.
620
621#define NEW_RESOURCE_ARRAY(type, size)\
622  (type*) resource_allocate_bytes((size) * sizeof(type))
623
624#define NEW_RESOURCE_ARRAY_RETURN_NULL(type, size)\
625  (type*) resource_allocate_bytes((size) * sizeof(type), AllocFailStrategy::RETURN_NULL)
626
627#define NEW_RESOURCE_ARRAY_IN_THREAD(thread, type, size)\
628  (type*) resource_allocate_bytes(thread, (size) * sizeof(type))
629
630#define NEW_RESOURCE_ARRAY_IN_THREAD_RETURN_NULL(thread, type, size)\
631  (type*) resource_allocate_bytes(thread, (size) * sizeof(type), AllocFailStrategy::RETURN_NULL)
632
633#define REALLOC_RESOURCE_ARRAY(type, old, old_size, new_size)\
634  (type*) resource_reallocate_bytes((char*)(old), (old_size) * sizeof(type), (new_size) * sizeof(type))
635
636#define REALLOC_RESOURCE_ARRAY_RETURN_NULL(type, old, old_size, new_size)\
637  (type*) resource_reallocate_bytes((char*)(old), (old_size) * sizeof(type),\
638                                    (new_size) * sizeof(type), AllocFailStrategy::RETURN_NULL)
639
640#define FREE_RESOURCE_ARRAY(type, old, size)\
641  resource_free_bytes((char*)(old), (size) * sizeof(type))
642
643#define FREE_FAST(old)\
644    /* nop */
645
646#define NEW_RESOURCE_OBJ(type)\
647  NEW_RESOURCE_ARRAY(type, 1)
648
649#define NEW_RESOURCE_OBJ_RETURN_NULL(type)\
650  NEW_RESOURCE_ARRAY_RETURN_NULL(type, 1)
651
652#define NEW_C_HEAP_ARRAY3(type, size, memflags, pc, allocfail)\
653  (type*) AllocateHeap((size) * sizeof(type), memflags, pc, allocfail)
654
655#define NEW_C_HEAP_ARRAY2(type, size, memflags, pc)\
656  (type*) (AllocateHeap((size) * sizeof(type), memflags, pc))
657
658#define NEW_C_HEAP_ARRAY(type, size, memflags)\
659  (type*) (AllocateHeap((size) * sizeof(type), memflags))
660
661#define NEW_C_HEAP_ARRAY2_RETURN_NULL(type, size, memflags, pc)\
662  NEW_C_HEAP_ARRAY3(type, (size), memflags, pc, AllocFailStrategy::RETURN_NULL)
663
664#define NEW_C_HEAP_ARRAY_RETURN_NULL(type, size, memflags)\
665  NEW_C_HEAP_ARRAY3(type, (size), memflags, CURRENT_PC, AllocFailStrategy::RETURN_NULL)
666
667#define REALLOC_C_HEAP_ARRAY(type, old, size, memflags)\
668  (type*) (ReallocateHeap((char*)(old), (size) * sizeof(type), memflags))
669
670#define REALLOC_C_HEAP_ARRAY_RETURN_NULL(type, old, size, memflags)\
671  (type*) (ReallocateHeap((char*)(old), (size) * sizeof(type), memflags, AllocFailStrategy::RETURN_NULL))
672
673#define FREE_C_HEAP_ARRAY(type, old) \
674  FreeHeap((char*)(old))
675
676// allocate type in heap without calling ctor
677#define NEW_C_HEAP_OBJ(type, memflags)\
678  NEW_C_HEAP_ARRAY(type, 1, memflags)
679
680#define NEW_C_HEAP_OBJ_RETURN_NULL(type, memflags)\
681  NEW_C_HEAP_ARRAY_RETURN_NULL(type, 1, memflags)
682
683// deallocate obj of type in heap without calling dtor
684#define FREE_C_HEAP_OBJ(objname)\
685  FreeHeap((char*)objname);
686
687// for statistics
688#ifndef PRODUCT
689class AllocStats : StackObj {
690  julong start_mallocs, start_frees;
691  julong start_malloc_bytes, start_mfree_bytes, start_res_bytes;
692 public:
693  AllocStats();
694
695  julong num_mallocs();    // since creation of receiver
696  julong alloc_bytes();
697  julong num_frees();
698  julong free_bytes();
699  julong resource_bytes();
700  void   print();
701};
702#endif
703
704
705//------------------------------ReallocMark---------------------------------
706// Code which uses REALLOC_RESOURCE_ARRAY should check an associated
707// ReallocMark, which is declared in the same scope as the reallocated
708// pointer.  Any operation that could __potentially__ cause a reallocation
709// should check the ReallocMark.
710class ReallocMark: public StackObj {
711protected:
712  NOT_PRODUCT(int _nesting;)
713
714public:
715  ReallocMark()   PRODUCT_RETURN;
716  void check()    PRODUCT_RETURN;
717};
718
719// Helper class to allocate arrays that may become large.
720// Uses the OS malloc for allocations smaller than ArrayAllocatorMallocLimit
721// and uses mapped memory for larger allocations.
722// Most OS mallocs do something similar but Solaris malloc does not revert
723// to mapped memory for large allocations. By default ArrayAllocatorMallocLimit
724// is set so that we always use malloc except for Solaris where we set the
725// limit to get mapped memory.
726template <class E>
727class ArrayAllocator : public AllStatic {
728 private:
729  static bool should_use_malloc(size_t length);
730
731  static E* allocate_malloc(size_t length, MEMFLAGS flags);
732  static E* allocate_mmap(size_t length, MEMFLAGS flags);
733
734  static void free_malloc(E* addr, size_t length);
735  static void free_mmap(E* addr, size_t length);
736
737 public:
738  static E* allocate(size_t length, MEMFLAGS flags);
739  static E* reallocate(E* old_addr, size_t old_length, size_t new_length, MEMFLAGS flags);
740  static void free(E* addr, size_t length);
741};
742
743// Uses mmaped memory for all allocations. All allocations are initially
744// zero-filled. No pre-touching.
745template <class E>
746class MmapArrayAllocator : public AllStatic {
747 private:
748  static size_t size_for(size_t length);
749
750 public:
751  static E* allocate_or_null(size_t length, MEMFLAGS flags);
752  static E* allocate(size_t length, MEMFLAGS flags);
753  static void free(E* addr, size_t length);
754};
755
756// Uses malloc:ed memory for all allocations.
757template <class E>
758class MallocArrayAllocator : public AllStatic {
759 public:
760  static size_t size_for(size_t length);
761
762  static E* allocate(size_t length, MEMFLAGS flags);
763  static void free(E* addr, size_t length);
764};
765
766#endif // SHARE_VM_MEMORY_ALLOCATION_HPP
767