1//===-- sanitizer_stackdepot.cpp ------------------------------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file is shared between AddressSanitizer and ThreadSanitizer
10// run-time libraries.
11//===----------------------------------------------------------------------===//
12
13#include "sanitizer_stackdepot.h"
14
15#include "sanitizer_common.h"
16#include "sanitizer_hash.h"
17#include "sanitizer_persistent_allocator.h"
18#include "sanitizer_stackdepotbase.h"
19
20namespace __sanitizer {
21
22static PersistentAllocator<uptr> traceAllocator;
23
24struct StackDepotNode {
25  using hash_type = u64;
26  hash_type stack_hash;
27  u32 link;
28
29  static const u32 kTabSizeLog = SANITIZER_ANDROID ? 16 : 20;
30  static const u32 kStackSizeBits = 16;
31
32  typedef StackTrace args_type;
33  bool eq(hash_type hash, const args_type &args) const {
34    return hash == stack_hash;
35  }
36  static uptr allocated();
37  static hash_type hash(const args_type &args) {
38    MurMur2Hash64Builder H(args.size * sizeof(uptr));
39    for (uptr i = 0; i < args.size; i++) H.add(args.trace[i]);
40    H.add(args.tag);
41    return H.get();
42  }
43  static bool is_valid(const args_type &args) {
44    return args.size > 0 && args.trace;
45  }
46  void store(u32 id, const args_type &args, hash_type hash);
47  args_type load(u32 id) const;
48  static StackDepotHandle get_handle(u32 id);
49
50  typedef StackDepotHandle handle_type;
51};
52
53// FIXME(dvyukov): this single reserved bit is used in TSan.
54typedef StackDepotBase<StackDepotNode, 1, StackDepotNode::kTabSizeLog>
55    StackDepot;
56static StackDepot theDepot;
57// Keep rarely accessed stack traces out of frequently access nodes to improve
58// caching efficiency.
59static TwoLevelMap<uptr *, StackDepot::kNodesSize1, StackDepot::kNodesSize2>
60    tracePtrs;
61// Keep mutable data out of frequently access nodes to improve caching
62// efficiency.
63static TwoLevelMap<atomic_uint32_t, StackDepot::kNodesSize1,
64                   StackDepot::kNodesSize2>
65    useCounts;
66
67int StackDepotHandle::use_count() const {
68  return atomic_load_relaxed(&useCounts[id_]);
69}
70
71void StackDepotHandle::inc_use_count_unsafe() {
72  atomic_fetch_add(&useCounts[id_], 1, memory_order_relaxed);
73}
74
75uptr StackDepotNode::allocated() {
76  return traceAllocator.allocated() + tracePtrs.MemoryUsage() +
77         useCounts.MemoryUsage();
78}
79
80void StackDepotNode::store(u32 id, const args_type &args, hash_type hash) {
81  stack_hash = hash;
82  uptr *stack_trace = traceAllocator.alloc(args.size + 1);
83  CHECK_LT(args.size, 1 << kStackSizeBits);
84  *stack_trace = args.size + (args.tag << kStackSizeBits);
85  internal_memcpy(stack_trace + 1, args.trace, args.size * sizeof(uptr));
86  tracePtrs[id] = stack_trace;
87}
88
89StackDepotNode::args_type StackDepotNode::load(u32 id) const {
90  const uptr *stack_trace = tracePtrs[id];
91  if (!stack_trace)
92    return {};
93  uptr size = *stack_trace & ((1 << kStackSizeBits) - 1);
94  uptr tag = *stack_trace >> kStackSizeBits;
95  return args_type(stack_trace + 1, size, tag);
96}
97
98StackDepotStats StackDepotGetStats() { return theDepot.GetStats(); }
99
100u32 StackDepotPut(StackTrace stack) { return theDepot.Put(stack); }
101
102StackDepotHandle StackDepotPut_WithHandle(StackTrace stack) {
103  return StackDepotNode::get_handle(theDepot.Put(stack));
104}
105
106StackTrace StackDepotGet(u32 id) {
107  return theDepot.Get(id);
108}
109
110void StackDepotLockAll() {
111  theDepot.LockAll();
112}
113
114void StackDepotUnlockAll() {
115  theDepot.UnlockAll();
116}
117
118void StackDepotPrintAll() {
119#if !SANITIZER_GO
120  theDepot.PrintAll();
121#endif
122}
123
124StackDepotHandle StackDepotNode::get_handle(u32 id) {
125  return StackDepotHandle(&theDepot.nodes[id], id);
126}
127
128void StackDepotTestOnlyUnmap() {
129  theDepot.TestOnlyUnmap();
130  tracePtrs.TestOnlyUnmap();
131  traceAllocator.TestOnlyUnmap();
132}
133
134} // namespace __sanitizer
135