1//===-- asan_allocator.h ----------------------------------------*- C++ -*-===//
2//
3// This file is distributed under the University of Illinois Open Source
4// License. See LICENSE.TXT for details.
5//
6//===----------------------------------------------------------------------===//
7//
8// This file is a part of AddressSanitizer, an address sanity checker.
9//
10// ASan-private header for asan_allocator2.cc.
11//===----------------------------------------------------------------------===//
12
13#ifndef ASAN_ALLOCATOR_H
14#define ASAN_ALLOCATOR_H
15
16#include "asan_internal.h"
17#include "asan_interceptors.h"
18#include "sanitizer_common/sanitizer_allocator.h"
19#include "sanitizer_common/sanitizer_list.h"
20
21namespace __asan {
22
23enum AllocType {
24  FROM_MALLOC = 1,  // Memory block came from malloc, calloc, realloc, etc.
25  FROM_NEW = 2,     // Memory block came from operator new.
26  FROM_NEW_BR = 3   // Memory block came from operator new [ ]
27};
28
29static const uptr kNumberOfSizeClasses = 255;
30struct AsanChunk;
31
32void InitializeAllocator();
33void ReInitializeAllocator();
34
35class AsanChunkView {
36 public:
37  explicit AsanChunkView(AsanChunk *chunk) : chunk_(chunk) {}
38  bool IsValid();   // Checks if AsanChunkView points to a valid allocated
39                    // or quarantined chunk.
40  uptr Beg();       // First byte of user memory.
41  uptr End();       // Last byte of user memory.
42  uptr UsedSize();  // Size requested by the user.
43  uptr AllocTid();
44  uptr FreeTid();
45  bool Eq(const AsanChunkView &c) const { return chunk_ == c.chunk_; }
46  StackTrace GetAllocStack();
47  StackTrace GetFreeStack();
48  bool AddrIsInside(uptr addr, uptr access_size, sptr *offset) {
49    if (addr >= Beg() && (addr + access_size) <= End()) {
50      *offset = addr - Beg();
51      return true;
52    }
53    return false;
54  }
55  bool AddrIsAtLeft(uptr addr, uptr access_size, sptr *offset) {
56    (void)access_size;
57    if (addr < Beg()) {
58      *offset = Beg() - addr;
59      return true;
60    }
61    return false;
62  }
63  bool AddrIsAtRight(uptr addr, uptr access_size, sptr *offset) {
64    if (addr + access_size > End()) {
65      *offset = addr - End();
66      return true;
67    }
68    return false;
69  }
70
71 private:
72  AsanChunk *const chunk_;
73};
74
75AsanChunkView FindHeapChunkByAddress(uptr address);
76
77// List of AsanChunks with total size.
78class AsanChunkFifoList: public IntrusiveList<AsanChunk> {
79 public:
80  explicit AsanChunkFifoList(LinkerInitialized) { }
81  AsanChunkFifoList() { clear(); }
82  void Push(AsanChunk *n);
83  void PushList(AsanChunkFifoList *q);
84  AsanChunk *Pop();
85  uptr size() { return size_; }
86  void clear() {
87    IntrusiveList<AsanChunk>::clear();
88    size_ = 0;
89  }
90 private:
91  uptr size_;
92};
93
94struct AsanMapUnmapCallback {
95  void OnMap(uptr p, uptr size) const;
96  void OnUnmap(uptr p, uptr size) const;
97};
98
99#if SANITIZER_CAN_USE_ALLOCATOR64
100# if defined(__powerpc64__)
101const uptr kAllocatorSpace =  0xa0000000000ULL;
102const uptr kAllocatorSize  =  0x20000000000ULL;  // 2T.
103# else
104const uptr kAllocatorSpace = 0x600000000000ULL;
105const uptr kAllocatorSize  =  0x40000000000ULL;  // 4T.
106# endif
107typedef DefaultSizeClassMap SizeClassMap;
108typedef SizeClassAllocator64<kAllocatorSpace, kAllocatorSize, 0 /*metadata*/,
109    SizeClassMap, AsanMapUnmapCallback> PrimaryAllocator;
110#else  // Fallback to SizeClassAllocator32.
111static const uptr kRegionSizeLog = 20;
112static const uptr kNumRegions = SANITIZER_MMAP_RANGE_SIZE >> kRegionSizeLog;
113# if SANITIZER_WORDSIZE == 32
114typedef FlatByteMap<kNumRegions> ByteMap;
115# elif SANITIZER_WORDSIZE == 64
116typedef TwoLevelByteMap<(kNumRegions >> 12), 1 << 12> ByteMap;
117# endif
118typedef CompactSizeClassMap SizeClassMap;
119typedef SizeClassAllocator32<0, SANITIZER_MMAP_RANGE_SIZE, 16,
120  SizeClassMap, kRegionSizeLog,
121  ByteMap,
122  AsanMapUnmapCallback> PrimaryAllocator;
123#endif  // SANITIZER_CAN_USE_ALLOCATOR64
124
125typedef SizeClassAllocatorLocalCache<PrimaryAllocator> AllocatorCache;
126typedef LargeMmapAllocator<AsanMapUnmapCallback> SecondaryAllocator;
127typedef CombinedAllocator<PrimaryAllocator, AllocatorCache,
128    SecondaryAllocator> Allocator;
129
130
131struct AsanThreadLocalMallocStorage {
132  uptr quarantine_cache[16];
133  AllocatorCache allocator2_cache;
134  void CommitBack();
135 private:
136  // These objects are allocated via mmap() and are zero-initialized.
137  AsanThreadLocalMallocStorage() {}
138};
139
140void *asan_memalign(uptr alignment, uptr size, BufferedStackTrace *stack,
141                    AllocType alloc_type);
142void asan_free(void *ptr, BufferedStackTrace *stack, AllocType alloc_type);
143void asan_sized_free(void *ptr, uptr size, BufferedStackTrace *stack,
144                     AllocType alloc_type);
145
146void *asan_malloc(uptr size, BufferedStackTrace *stack);
147void *asan_calloc(uptr nmemb, uptr size, BufferedStackTrace *stack);
148void *asan_realloc(void *p, uptr size, BufferedStackTrace *stack);
149void *asan_valloc(uptr size, BufferedStackTrace *stack);
150void *asan_pvalloc(uptr size, BufferedStackTrace *stack);
151
152int asan_posix_memalign(void **memptr, uptr alignment, uptr size,
153                        BufferedStackTrace *stack);
154uptr asan_malloc_usable_size(void *ptr, uptr pc, uptr bp);
155
156uptr asan_mz_size(const void *ptr);
157void asan_mz_force_lock();
158void asan_mz_force_unlock();
159
160void PrintInternalAllocatorStats();
161
162}  // namespace __asan
163#endif  // ASAN_ALLOCATOR_H
164