1//===- llvm/ADT/SmallPtrSet.cpp - 'Normally small' pointer set ------------===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements the SmallPtrSet class.  See SmallPtrSet.h for an
11// overview of the algorithm.
12//
13//===----------------------------------------------------------------------===//
14
15#include "llvm/ADT/SmallPtrSet.h"
16#include "llvm/ADT/DenseMapInfo.h"
17#include "llvm/Support/MathExtras.h"
18#include <algorithm>
19#include <cstdlib>
20
21using namespace llvm;
22
23void SmallPtrSetImpl::shrink_and_clear() {
24  assert(!isSmall() && "Can't shrink a small set!");
25  free(CurArray);
26
27  // Reduce the number of buckets.
28  CurArraySize = NumElements > 16 ? 1 << (Log2_32_Ceil(NumElements) + 1) : 32;
29  NumElements = NumTombstones = 0;
30
31  // Install the new array.  Clear all the buckets to empty.
32  CurArray = (const void**)malloc(sizeof(void*) * (CurArraySize+1));
33  assert(CurArray && "Failed to allocate memory?");
34  memset(CurArray, -1, CurArraySize*sizeof(void*));
35
36  // The end pointer, always valid, is set to a valid element to help the
37  // iterator.
38  CurArray[CurArraySize] = 0;
39}
40
41bool SmallPtrSetImpl::insert_imp(const void * Ptr) {
42  if (isSmall()) {
43    // Check to see if it is already in the set.
44    for (const void **APtr = SmallArray, **E = SmallArray+NumElements;
45         APtr != E; ++APtr)
46      if (*APtr == Ptr)
47        return false;
48
49    // Nope, there isn't.  If we stay small, just 'pushback' now.
50    if (NumElements < CurArraySize-1) {
51      SmallArray[NumElements++] = Ptr;
52      return true;
53    }
54    // Otherwise, hit the big set case, which will call grow.
55  }
56
57  if (NumElements*4 >= CurArraySize*3) {
58    // If more than 3/4 of the array is full, grow.
59    Grow(CurArraySize < 64 ? 128 : CurArraySize*2);
60  } else if (CurArraySize-(NumElements+NumTombstones) < CurArraySize/8) {
61    // If fewer of 1/8 of the array is empty (meaning that many are filled with
62    // tombstones), rehash.
63    Grow(CurArraySize);
64  }
65
66  // Okay, we know we have space.  Find a hash bucket.
67  const void **Bucket = const_cast<const void**>(FindBucketFor(Ptr));
68  if (*Bucket == Ptr) return false; // Already inserted, good.
69
70  // Otherwise, insert it!
71  if (*Bucket == getTombstoneMarker())
72    --NumTombstones;
73  *Bucket = Ptr;
74  ++NumElements;  // Track density.
75  return true;
76}
77
78bool SmallPtrSetImpl::erase_imp(const void * Ptr) {
79  if (isSmall()) {
80    // Check to see if it is in the set.
81    for (const void **APtr = SmallArray, **E = SmallArray+NumElements;
82         APtr != E; ++APtr)
83      if (*APtr == Ptr) {
84        // If it is in the set, replace this element.
85        *APtr = E[-1];
86        E[-1] = getEmptyMarker();
87        --NumElements;
88        return true;
89      }
90
91    return false;
92  }
93
94  // Okay, we know we have space.  Find a hash bucket.
95  void **Bucket = const_cast<void**>(FindBucketFor(Ptr));
96  if (*Bucket != Ptr) return false;  // Not in the set?
97
98  // Set this as a tombstone.
99  *Bucket = getTombstoneMarker();
100  --NumElements;
101  ++NumTombstones;
102  return true;
103}
104
105const void * const *SmallPtrSetImpl::FindBucketFor(const void *Ptr) const {
106  unsigned Bucket = DenseMapInfo<void *>::getHashValue(Ptr) & (CurArraySize-1);
107  unsigned ArraySize = CurArraySize;
108  unsigned ProbeAmt = 1;
109  const void *const *Array = CurArray;
110  const void *const *Tombstone = 0;
111  while (1) {
112    // Found Ptr's bucket?
113    if (Array[Bucket] == Ptr)
114      return Array+Bucket;
115
116    // If we found an empty bucket, the pointer doesn't exist in the set.
117    // Return a tombstone if we've seen one so far, or the empty bucket if
118    // not.
119    if (Array[Bucket] == getEmptyMarker())
120      return Tombstone ? Tombstone : Array+Bucket;
121
122    // If this is a tombstone, remember it.  If Ptr ends up not in the set, we
123    // prefer to return it than something that would require more probing.
124    if (Array[Bucket] == getTombstoneMarker() && !Tombstone)
125      Tombstone = Array+Bucket;  // Remember the first tombstone found.
126
127    // It's a hash collision or a tombstone. Reprobe.
128    Bucket = (Bucket + ProbeAmt++) & (ArraySize-1);
129  }
130}
131
132/// Grow - Allocate a larger backing store for the buckets and move it over.
133///
134void SmallPtrSetImpl::Grow(unsigned NewSize) {
135  // Allocate at twice as many buckets, but at least 128.
136  unsigned OldSize = CurArraySize;
137
138  const void **OldBuckets = CurArray;
139  bool WasSmall = isSmall();
140
141  // Install the new array.  Clear all the buckets to empty.
142  CurArray = (const void**)malloc(sizeof(void*) * (NewSize+1));
143  assert(CurArray && "Failed to allocate memory?");
144  CurArraySize = NewSize;
145  memset(CurArray, -1, NewSize*sizeof(void*));
146
147  // The end pointer, always valid, is set to a valid element to help the
148  // iterator.
149  CurArray[NewSize] = 0;
150
151  // Copy over all the elements.
152  if (WasSmall) {
153    // Small sets store their elements in order.
154    for (const void **BucketPtr = OldBuckets, **E = OldBuckets+NumElements;
155         BucketPtr != E; ++BucketPtr) {
156      const void *Elt = *BucketPtr;
157      *const_cast<void**>(FindBucketFor(Elt)) = const_cast<void*>(Elt);
158    }
159  } else {
160    // Copy over all valid entries.
161    for (const void **BucketPtr = OldBuckets, **E = OldBuckets+OldSize;
162         BucketPtr != E; ++BucketPtr) {
163      // Copy over the element if it is valid.
164      const void *Elt = *BucketPtr;
165      if (Elt != getTombstoneMarker() && Elt != getEmptyMarker())
166        *const_cast<void**>(FindBucketFor(Elt)) = const_cast<void*>(Elt);
167    }
168
169    free(OldBuckets);
170    NumTombstones = 0;
171  }
172}
173
174SmallPtrSetImpl::SmallPtrSetImpl(const void **SmallStorage,
175                                 const SmallPtrSetImpl& that) {
176  SmallArray = SmallStorage;
177
178  // If we're becoming small, prepare to insert into our stack space
179  if (that.isSmall()) {
180    CurArray = SmallArray;
181  // Otherwise, allocate new heap space (unless we were the same size)
182  } else {
183    CurArray = (const void**)malloc(sizeof(void*) * (that.CurArraySize+1));
184    assert(CurArray && "Failed to allocate memory?");
185  }
186
187  // Copy over the new array size
188  CurArraySize = that.CurArraySize;
189
190  // Copy over the contents from the other set
191  memcpy(CurArray, that.CurArray, sizeof(void*)*(CurArraySize+1));
192
193  NumElements = that.NumElements;
194  NumTombstones = that.NumTombstones;
195}
196
197/// CopyFrom - implement operator= from a smallptrset that has the same pointer
198/// type, but may have a different small size.
199void SmallPtrSetImpl::CopyFrom(const SmallPtrSetImpl &RHS) {
200  if (isSmall() && RHS.isSmall())
201    assert(CurArraySize == RHS.CurArraySize &&
202           "Cannot assign sets with different small sizes");
203
204  // If we're becoming small, prepare to insert into our stack space
205  if (RHS.isSmall()) {
206    if (!isSmall())
207      free(CurArray);
208    CurArray = SmallArray;
209  // Otherwise, allocate new heap space (unless we were the same size)
210  } else if (CurArraySize != RHS.CurArraySize) {
211    if (isSmall())
212      CurArray = (const void**)malloc(sizeof(void*) * (RHS.CurArraySize+1));
213    else
214      CurArray = (const void**)realloc(CurArray, sizeof(void*)*(RHS.CurArraySize+1));
215    assert(CurArray && "Failed to allocate memory?");
216  }
217
218  // Copy over the new array size
219  CurArraySize = RHS.CurArraySize;
220
221  // Copy over the contents from the other set
222  memcpy(CurArray, RHS.CurArray, sizeof(void*)*(CurArraySize+1));
223
224  NumElements = RHS.NumElements;
225  NumTombstones = RHS.NumTombstones;
226}
227
228void SmallPtrSetImpl::swap(SmallPtrSetImpl &RHS) {
229  if (this == &RHS) return;
230
231  // We can only avoid copying elements if neither set is small.
232  if (!this->isSmall() && !RHS.isSmall()) {
233    std::swap(this->CurArray, RHS.CurArray);
234    std::swap(this->CurArraySize, RHS.CurArraySize);
235    std::swap(this->NumElements, RHS.NumElements);
236    std::swap(this->NumTombstones, RHS.NumTombstones);
237    return;
238  }
239
240  // FIXME: From here on we assume that both sets have the same small size.
241
242  // If only RHS is small, copy the small elements into LHS and move the pointer
243  // from LHS to RHS.
244  if (!this->isSmall() && RHS.isSmall()) {
245    std::copy(RHS.SmallArray, RHS.SmallArray+RHS.CurArraySize,
246              this->SmallArray);
247    std::swap(this->NumElements, RHS.NumElements);
248    std::swap(this->CurArraySize, RHS.CurArraySize);
249    RHS.CurArray = this->CurArray;
250    RHS.NumTombstones = this->NumTombstones;
251    this->CurArray = this->SmallArray;
252    this->NumTombstones = 0;
253    return;
254  }
255
256  // If only LHS is small, copy the small elements into RHS and move the pointer
257  // from RHS to LHS.
258  if (this->isSmall() && !RHS.isSmall()) {
259    std::copy(this->SmallArray, this->SmallArray+this->CurArraySize,
260              RHS.SmallArray);
261    std::swap(RHS.NumElements, this->NumElements);
262    std::swap(RHS.CurArraySize, this->CurArraySize);
263    this->CurArray = RHS.CurArray;
264    this->NumTombstones = RHS.NumTombstones;
265    RHS.CurArray = RHS.SmallArray;
266    RHS.NumTombstones = 0;
267    return;
268  }
269
270  // Both a small, just swap the small elements.
271  assert(this->isSmall() && RHS.isSmall());
272  assert(this->CurArraySize == RHS.CurArraySize);
273  std::swap_ranges(this->SmallArray, this->SmallArray+this->CurArraySize,
274                   RHS.SmallArray);
275  std::swap(this->NumElements, RHS.NumElements);
276}
277
278SmallPtrSetImpl::~SmallPtrSetImpl() {
279  if (!isSmall())
280    free(CurArray);
281}
282