1//===- llvm/ADT/SmallPtrSet.cpp - 'Normally small' pointer set ------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file implements the SmallPtrSet class.  See SmallPtrSet.h for an
10// overview of the algorithm.
11//
12//===----------------------------------------------------------------------===//
13
14#include "llvm/ADT/SmallPtrSet.h"
15#include "llvm/ADT/DenseMapInfo.h"
16#include "llvm/Support/MathExtras.h"
17#include "llvm/Support/ErrorHandling.h"
18#include <algorithm>
19#include <cassert>
20#include <cstdlib>
21
22using namespace llvm;
23
24void SmallPtrSetImplBase::shrink_and_clear() {
25  assert(!isSmall() && "Can't shrink a small set!");
26  free(CurArray);
27
28  // Reduce the number of buckets.
29  unsigned Size = size();
30  CurArraySize = Size > 16 ? 1 << (Log2_32_Ceil(Size) + 1) : 32;
31  NumNonEmpty = NumTombstones = 0;
32
33  // Install the new array.  Clear all the buckets to empty.
34  CurArray = (const void**)safe_malloc(sizeof(void*) * CurArraySize);
35
36  memset(CurArray, -1, CurArraySize*sizeof(void*));
37}
38
39std::pair<const void *const *, bool>
40SmallPtrSetImplBase::insert_imp_big(const void *Ptr) {
41  if (LLVM_UNLIKELY(size() * 4 >= CurArraySize * 3)) {
42    // If more than 3/4 of the array is full, grow.
43    Grow(CurArraySize < 64 ? 128 : CurArraySize * 2);
44  } else if (LLVM_UNLIKELY(CurArraySize - NumNonEmpty < CurArraySize / 8)) {
45    // If fewer of 1/8 of the array is empty (meaning that many are filled with
46    // tombstones), rehash.
47    Grow(CurArraySize);
48  }
49
50  // Okay, we know we have space.  Find a hash bucket.
51  const void **Bucket = const_cast<const void**>(FindBucketFor(Ptr));
52  if (*Bucket == Ptr)
53    return std::make_pair(Bucket, false); // Already inserted, good.
54
55  // Otherwise, insert it!
56  if (*Bucket == getTombstoneMarker())
57    --NumTombstones;
58  else
59    ++NumNonEmpty; // Track density.
60  *Bucket = Ptr;
61  incrementEpoch();
62  return std::make_pair(Bucket, true);
63}
64
65const void * const *SmallPtrSetImplBase::FindBucketFor(const void *Ptr) const {
66  unsigned Bucket = DenseMapInfo<void *>::getHashValue(Ptr) & (CurArraySize-1);
67  unsigned ArraySize = CurArraySize;
68  unsigned ProbeAmt = 1;
69  const void *const *Array = CurArray;
70  const void *const *Tombstone = nullptr;
71  while (true) {
72    // If we found an empty bucket, the pointer doesn't exist in the set.
73    // Return a tombstone if we've seen one so far, or the empty bucket if
74    // not.
75    if (LLVM_LIKELY(Array[Bucket] == getEmptyMarker()))
76      return Tombstone ? Tombstone : Array+Bucket;
77
78    // Found Ptr's bucket?
79    if (LLVM_LIKELY(Array[Bucket] == Ptr))
80      return Array+Bucket;
81
82    // If this is a tombstone, remember it.  If Ptr ends up not in the set, we
83    // prefer to return it than something that would require more probing.
84    if (Array[Bucket] == getTombstoneMarker() && !Tombstone)
85      Tombstone = Array+Bucket;  // Remember the first tombstone found.
86
87    // It's a hash collision or a tombstone. Reprobe.
88    Bucket = (Bucket + ProbeAmt++) & (ArraySize-1);
89  }
90}
91
92/// Grow - Allocate a larger backing store for the buckets and move it over.
93///
94void SmallPtrSetImplBase::Grow(unsigned NewSize) {
95  const void **OldBuckets = CurArray;
96  const void **OldEnd = EndPointer();
97  bool WasSmall = isSmall();
98
99  // Install the new array.  Clear all the buckets to empty.
100  const void **NewBuckets = (const void**) safe_malloc(sizeof(void*) * NewSize);
101
102  // Reset member only if memory was allocated successfully
103  CurArray = NewBuckets;
104  CurArraySize = NewSize;
105  memset(CurArray, -1, NewSize*sizeof(void*));
106
107  // Copy over all valid entries.
108  for (const void **BucketPtr = OldBuckets; BucketPtr != OldEnd; ++BucketPtr) {
109    // Copy over the element if it is valid.
110    const void *Elt = *BucketPtr;
111    if (Elt != getTombstoneMarker() && Elt != getEmptyMarker())
112      *const_cast<void**>(FindBucketFor(Elt)) = const_cast<void*>(Elt);
113  }
114
115  if (!WasSmall)
116    free(OldBuckets);
117  NumNonEmpty -= NumTombstones;
118  NumTombstones = 0;
119}
120
121SmallPtrSetImplBase::SmallPtrSetImplBase(const void **SmallStorage,
122                                         const SmallPtrSetImplBase &that) {
123  SmallArray = SmallStorage;
124
125  // If we're becoming small, prepare to insert into our stack space
126  if (that.isSmall()) {
127    CurArray = SmallArray;
128  // Otherwise, allocate new heap space (unless we were the same size)
129  } else {
130    CurArray = (const void**)safe_malloc(sizeof(void*) * that.CurArraySize);
131  }
132
133  // Copy over the that array.
134  CopyHelper(that);
135}
136
137SmallPtrSetImplBase::SmallPtrSetImplBase(const void **SmallStorage,
138                                         unsigned SmallSize,
139                                         SmallPtrSetImplBase &&that) {
140  SmallArray = SmallStorage;
141  MoveHelper(SmallSize, std::move(that));
142}
143
144void SmallPtrSetImplBase::CopyFrom(const SmallPtrSetImplBase &RHS) {
145  assert(&RHS != this && "Self-copy should be handled by the caller.");
146
147  if (isSmall() && RHS.isSmall())
148    assert(CurArraySize == RHS.CurArraySize &&
149           "Cannot assign sets with different small sizes");
150
151  // If we're becoming small, prepare to insert into our stack space
152  if (RHS.isSmall()) {
153    if (!isSmall())
154      free(CurArray);
155    CurArray = SmallArray;
156  // Otherwise, allocate new heap space (unless we were the same size)
157  } else if (CurArraySize != RHS.CurArraySize) {
158    if (isSmall())
159      CurArray = (const void**)safe_malloc(sizeof(void*) * RHS.CurArraySize);
160    else {
161      const void **T = (const void**)safe_realloc(CurArray,
162                                             sizeof(void*) * RHS.CurArraySize);
163      CurArray = T;
164    }
165  }
166
167  CopyHelper(RHS);
168}
169
170void SmallPtrSetImplBase::CopyHelper(const SmallPtrSetImplBase &RHS) {
171  // Copy over the new array size
172  CurArraySize = RHS.CurArraySize;
173
174  // Copy over the contents from the other set
175  std::copy(RHS.CurArray, RHS.EndPointer(), CurArray);
176
177  NumNonEmpty = RHS.NumNonEmpty;
178  NumTombstones = RHS.NumTombstones;
179}
180
181void SmallPtrSetImplBase::MoveFrom(unsigned SmallSize,
182                                   SmallPtrSetImplBase &&RHS) {
183  if (!isSmall())
184    free(CurArray);
185  MoveHelper(SmallSize, std::move(RHS));
186}
187
188void SmallPtrSetImplBase::MoveHelper(unsigned SmallSize,
189                                     SmallPtrSetImplBase &&RHS) {
190  assert(&RHS != this && "Self-move should be handled by the caller.");
191
192  if (RHS.isSmall()) {
193    // Copy a small RHS rather than moving.
194    CurArray = SmallArray;
195    std::copy(RHS.CurArray, RHS.CurArray + RHS.NumNonEmpty, CurArray);
196  } else {
197    CurArray = RHS.CurArray;
198    RHS.CurArray = RHS.SmallArray;
199  }
200
201  // Copy the rest of the trivial members.
202  CurArraySize = RHS.CurArraySize;
203  NumNonEmpty = RHS.NumNonEmpty;
204  NumTombstones = RHS.NumTombstones;
205
206  // Make the RHS small and empty.
207  RHS.CurArraySize = SmallSize;
208  assert(RHS.CurArray == RHS.SmallArray);
209  RHS.NumNonEmpty = 0;
210  RHS.NumTombstones = 0;
211}
212
213void SmallPtrSetImplBase::swap(SmallPtrSetImplBase &RHS) {
214  if (this == &RHS) return;
215
216  // We can only avoid copying elements if neither set is small.
217  if (!this->isSmall() && !RHS.isSmall()) {
218    std::swap(this->CurArray, RHS.CurArray);
219    std::swap(this->CurArraySize, RHS.CurArraySize);
220    std::swap(this->NumNonEmpty, RHS.NumNonEmpty);
221    std::swap(this->NumTombstones, RHS.NumTombstones);
222    return;
223  }
224
225  // FIXME: From here on we assume that both sets have the same small size.
226
227  // If only RHS is small, copy the small elements into LHS and move the pointer
228  // from LHS to RHS.
229  if (!this->isSmall() && RHS.isSmall()) {
230    assert(RHS.CurArray == RHS.SmallArray);
231    std::copy(RHS.CurArray, RHS.CurArray + RHS.NumNonEmpty, this->SmallArray);
232    std::swap(RHS.CurArraySize, this->CurArraySize);
233    std::swap(this->NumNonEmpty, RHS.NumNonEmpty);
234    std::swap(this->NumTombstones, RHS.NumTombstones);
235    RHS.CurArray = this->CurArray;
236    this->CurArray = this->SmallArray;
237    return;
238  }
239
240  // If only LHS is small, copy the small elements into RHS and move the pointer
241  // from RHS to LHS.
242  if (this->isSmall() && !RHS.isSmall()) {
243    assert(this->CurArray == this->SmallArray);
244    std::copy(this->CurArray, this->CurArray + this->NumNonEmpty,
245              RHS.SmallArray);
246    std::swap(RHS.CurArraySize, this->CurArraySize);
247    std::swap(RHS.NumNonEmpty, this->NumNonEmpty);
248    std::swap(RHS.NumTombstones, this->NumTombstones);
249    this->CurArray = RHS.CurArray;
250    RHS.CurArray = RHS.SmallArray;
251    return;
252  }
253
254  // Both a small, just swap the small elements.
255  assert(this->isSmall() && RHS.isSmall());
256  unsigned MinNonEmpty = std::min(this->NumNonEmpty, RHS.NumNonEmpty);
257  std::swap_ranges(this->SmallArray, this->SmallArray + MinNonEmpty,
258                   RHS.SmallArray);
259  if (this->NumNonEmpty > MinNonEmpty) {
260    std::copy(this->SmallArray + MinNonEmpty,
261              this->SmallArray + this->NumNonEmpty,
262              RHS.SmallArray + MinNonEmpty);
263  } else {
264    std::copy(RHS.SmallArray + MinNonEmpty, RHS.SmallArray + RHS.NumNonEmpty,
265              this->SmallArray + MinNonEmpty);
266  }
267  assert(this->CurArraySize == RHS.CurArraySize);
268  std::swap(this->NumNonEmpty, RHS.NumNonEmpty);
269  std::swap(this->NumTombstones, RHS.NumTombstones);
270}
271