1351290Sdim//===-- RangeMap.h ----------------------------------------------*- C++ -*-===//
2351290Sdim//
3351290Sdim// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4351290Sdim// See https://llvm.org/LICENSE.txt for license information.
5351290Sdim// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6351290Sdim//
7351290Sdim//===----------------------------------------------------------------------===//
8351290Sdim
9351290Sdim#ifndef LLDB_UTILITY_RANGEMAP_H
10351290Sdim#define LLDB_UTILITY_RANGEMAP_H
11351290Sdim
12351290Sdim#include <algorithm>
13351290Sdim#include <vector>
14351290Sdim
15351290Sdim#include "llvm/ADT/SmallVector.h"
16351290Sdim
17351290Sdim#include "lldb/lldb-private.h"
18351290Sdim
19351290Sdim// Uncomment to make sure all Range objects are sorted when needed
20351290Sdim//#define ASSERT_RANGEMAP_ARE_SORTED
21351290Sdim
22351290Sdimnamespace lldb_private {
23351290Sdim
24351290Sdim// Templatized classes for dealing with generic ranges and also collections of
25351290Sdim// ranges, or collections of ranges that have associated data.
26351290Sdim
27351290Sdim// A simple range class where you get to define the type of the range
28351290Sdim// base "B", and the type used for the range byte size "S".
29351290Sdimtemplate <typename B, typename S> struct Range {
30351290Sdim  typedef B BaseType;
31351290Sdim  typedef S SizeType;
32351290Sdim
33351290Sdim  BaseType base;
34351290Sdim  SizeType size;
35351290Sdim
36351290Sdim  Range() : base(0), size(0) {}
37351290Sdim
38351290Sdim  Range(BaseType b, SizeType s) : base(b), size(s) {}
39351290Sdim
40351290Sdim  void Clear(BaseType b = 0) {
41351290Sdim    base = b;
42351290Sdim    size = 0;
43351290Sdim  }
44351290Sdim
45351290Sdim  // Set the start value for the range, and keep the same size
46351290Sdim  BaseType GetRangeBase() const { return base; }
47351290Sdim
48351290Sdim  void SetRangeBase(BaseType b) { base = b; }
49351290Sdim
50351290Sdim  void Slide(BaseType slide) { base += slide; }
51351290Sdim
52351290Sdim  bool Union(const Range &rhs) {
53351290Sdim    if (DoesAdjoinOrIntersect(rhs)) {
54351290Sdim      auto new_end = std::max<BaseType>(GetRangeEnd(), rhs.GetRangeEnd());
55351290Sdim      base = std::min<BaseType>(base, rhs.base);
56351290Sdim      size = new_end - base;
57351290Sdim      return true;
58351290Sdim    }
59351290Sdim    return false;
60351290Sdim  }
61351290Sdim
62351290Sdim  BaseType GetRangeEnd() const { return base + size; }
63351290Sdim
64351290Sdim  void SetRangeEnd(BaseType end) {
65351290Sdim    if (end > base)
66351290Sdim      size = end - base;
67351290Sdim    else
68351290Sdim      size = 0;
69351290Sdim  }
70351290Sdim
71351290Sdim  SizeType GetByteSize() const { return size; }
72351290Sdim
73351290Sdim  void SetByteSize(SizeType s) { size = s; }
74351290Sdim
75351290Sdim  bool IsValid() const { return size > 0; }
76351290Sdim
77351290Sdim  bool Contains(BaseType r) const {
78351290Sdim    return (GetRangeBase() <= r) && (r < GetRangeEnd());
79351290Sdim  }
80351290Sdim
81351290Sdim  bool ContainsEndInclusive(BaseType r) const {
82351290Sdim    return (GetRangeBase() <= r) && (r <= GetRangeEnd());
83351290Sdim  }
84351290Sdim
85351290Sdim  bool Contains(const Range &range) const {
86351290Sdim    return Contains(range.GetRangeBase()) &&
87351290Sdim           ContainsEndInclusive(range.GetRangeEnd());
88351290Sdim  }
89351290Sdim
90351290Sdim  // Returns true if the two ranges adjoing or intersect
91351290Sdim  bool DoesAdjoinOrIntersect(const Range &rhs) const {
92351290Sdim    const BaseType lhs_base = this->GetRangeBase();
93351290Sdim    const BaseType rhs_base = rhs.GetRangeBase();
94351290Sdim    const BaseType lhs_end = this->GetRangeEnd();
95351290Sdim    const BaseType rhs_end = rhs.GetRangeEnd();
96351290Sdim    bool result = (lhs_base <= rhs_end) && (lhs_end >= rhs_base);
97351290Sdim    return result;
98351290Sdim  }
99351290Sdim
100351290Sdim  // Returns true if the two ranges intersect
101351290Sdim  bool DoesIntersect(const Range &rhs) const {
102351290Sdim    const BaseType lhs_base = this->GetRangeBase();
103351290Sdim    const BaseType rhs_base = rhs.GetRangeBase();
104351290Sdim    const BaseType lhs_end = this->GetRangeEnd();
105351290Sdim    const BaseType rhs_end = rhs.GetRangeEnd();
106351290Sdim    bool result = (lhs_base < rhs_end) && (lhs_end > rhs_base);
107351290Sdim    return result;
108351290Sdim  }
109351290Sdim
110351290Sdim  bool operator<(const Range &rhs) const {
111351290Sdim    if (base == rhs.base)
112351290Sdim      return size < rhs.size;
113351290Sdim    return base < rhs.base;
114351290Sdim  }
115351290Sdim
116351290Sdim  bool operator==(const Range &rhs) const {
117351290Sdim    return base == rhs.base && size == rhs.size;
118351290Sdim  }
119351290Sdim
120351290Sdim  bool operator!=(const Range &rhs) const {
121351290Sdim    return base != rhs.base || size != rhs.size;
122351290Sdim  }
123351290Sdim};
124351290Sdim
125351290Sdim// A range array class where you get to define the type of the ranges
126351290Sdim// that the collection contains.
127351290Sdim
128351290Sdimtemplate <typename B, typename S, unsigned N> class RangeArray {
129351290Sdimpublic:
130351290Sdim  typedef B BaseType;
131351290Sdim  typedef S SizeType;
132351290Sdim  typedef Range<B, S> Entry;
133351290Sdim  typedef llvm::SmallVector<Entry, N> Collection;
134351290Sdim
135351290Sdim  RangeArray() = default;
136351290Sdim
137351290Sdim  ~RangeArray() = default;
138351290Sdim
139351290Sdim  void Append(const Entry &entry) { m_entries.push_back(entry); }
140351290Sdim
141351290Sdim  void Append(B base, S size) { m_entries.emplace_back(base, size); }
142351290Sdim
143351290Sdim  bool RemoveEntrtAtIndex(uint32_t idx) {
144351290Sdim    if (idx < m_entries.size()) {
145351290Sdim      m_entries.erase(m_entries.begin() + idx);
146351290Sdim      return true;
147351290Sdim    }
148351290Sdim    return false;
149351290Sdim  }
150351290Sdim
151351290Sdim  void Sort() {
152351290Sdim    if (m_entries.size() > 1)
153351290Sdim      std::stable_sort(m_entries.begin(), m_entries.end());
154351290Sdim  }
155351290Sdim
156351290Sdim#ifdef ASSERT_RANGEMAP_ARE_SORTED
157351290Sdim  bool IsSorted() const {
158351290Sdim    typename Collection::const_iterator pos, end, prev;
159351290Sdim    for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
160351290Sdim         prev = pos++) {
161351290Sdim      if (prev != end && *pos < *prev)
162351290Sdim        return false;
163351290Sdim    }
164351290Sdim    return true;
165351290Sdim  }
166351290Sdim#endif
167351290Sdim
168351290Sdim  void CombineConsecutiveRanges() {
169351290Sdim#ifdef ASSERT_RANGEMAP_ARE_SORTED
170351290Sdim    assert(IsSorted());
171351290Sdim#endif
172351290Sdim    // Can't combine if ranges if we have zero or one range
173351290Sdim    if (m_entries.size() > 1) {
174351290Sdim      // The list should be sorted prior to calling this function
175351290Sdim      typename Collection::iterator pos;
176351290Sdim      typename Collection::iterator end;
177351290Sdim      typename Collection::iterator prev;
178351290Sdim      bool can_combine = false;
179351290Sdim      // First we determine if we can combine any of the Entry objects so we
180351290Sdim      // don't end up allocating and making a new collection for no reason
181351290Sdim      for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
182351290Sdim           pos != end; prev = pos++) {
183351290Sdim        if (prev != end && prev->DoesAdjoinOrIntersect(*pos)) {
184351290Sdim          can_combine = true;
185351290Sdim          break;
186351290Sdim        }
187351290Sdim      }
188351290Sdim
189351290Sdim      // We we can combine at least one entry, then we make a new collection
190351290Sdim      // and populate it accordingly, and then swap it into place.
191351290Sdim      if (can_combine) {
192351290Sdim        Collection minimal_ranges;
193351290Sdim        for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
194351290Sdim             pos != end; prev = pos++) {
195351290Sdim          if (prev != end && prev->DoesAdjoinOrIntersect(*pos))
196351290Sdim            minimal_ranges.back().SetRangeEnd(
197351290Sdim                std::max<BaseType>(prev->GetRangeEnd(), pos->GetRangeEnd()));
198351290Sdim          else
199351290Sdim            minimal_ranges.push_back(*pos);
200351290Sdim        }
201351290Sdim        // Use the swap technique in case our new vector is much smaller. We
202351290Sdim        // must swap when using the STL because std::vector objects never
203351290Sdim        // release or reduce the memory once it has been allocated/reserved.
204351290Sdim        m_entries.swap(minimal_ranges);
205351290Sdim      }
206351290Sdim    }
207351290Sdim  }
208351290Sdim
209351290Sdim  BaseType GetMinRangeBase(BaseType fail_value) const {
210351290Sdim#ifdef ASSERT_RANGEMAP_ARE_SORTED
211351290Sdim    assert(IsSorted());
212351290Sdim#endif
213351290Sdim    if (m_entries.empty())
214351290Sdim      return fail_value;
215351290Sdim    // m_entries must be sorted, so if we aren't empty, we grab the first
216351290Sdim    // range's base
217351290Sdim    return m_entries.front().GetRangeBase();
218351290Sdim  }
219351290Sdim
220351290Sdim  BaseType GetMaxRangeEnd(BaseType fail_value) const {
221351290Sdim#ifdef ASSERT_RANGEMAP_ARE_SORTED
222351290Sdim    assert(IsSorted());
223351290Sdim#endif
224351290Sdim    if (m_entries.empty())
225351290Sdim      return fail_value;
226351290Sdim    // m_entries must be sorted, so if we aren't empty, we grab the last
227351290Sdim    // range's end
228351290Sdim    return m_entries.back().GetRangeEnd();
229351290Sdim  }
230351290Sdim
231351290Sdim  void Slide(BaseType slide) {
232351290Sdim    typename Collection::iterator pos, end;
233351290Sdim    for (pos = m_entries.begin(), end = m_entries.end(); pos != end; ++pos)
234351290Sdim      pos->Slide(slide);
235351290Sdim  }
236351290Sdim
237351290Sdim  void Clear() { m_entries.clear(); }
238351290Sdim
239351290Sdim  bool IsEmpty() const { return m_entries.empty(); }
240351290Sdim
241351290Sdim  size_t GetSize() const { return m_entries.size(); }
242351290Sdim
243351290Sdim  const Entry *GetEntryAtIndex(size_t i) const {
244351290Sdim    return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
245351290Sdim  }
246351290Sdim
247351290Sdim  // Clients must ensure that "i" is a valid index prior to calling this
248351290Sdim  // function
249351290Sdim  const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
250351290Sdim
251351290Sdim  Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
252351290Sdim
253351290Sdim  const Entry *Back() const {
254351290Sdim    return (m_entries.empty() ? nullptr : &m_entries.back());
255351290Sdim  }
256351290Sdim
257351290Sdim  static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
258351290Sdim    return lhs.GetRangeBase() < rhs.GetRangeBase();
259351290Sdim  }
260351290Sdim
261351290Sdim  uint32_t FindEntryIndexThatContains(B addr) const {
262351290Sdim#ifdef ASSERT_RANGEMAP_ARE_SORTED
263351290Sdim    assert(IsSorted());
264351290Sdim#endif
265351290Sdim    if (!m_entries.empty()) {
266351290Sdim      Entry entry(addr, 1);
267351290Sdim      typename Collection::const_iterator begin = m_entries.begin();
268351290Sdim      typename Collection::const_iterator end = m_entries.end();
269351290Sdim      typename Collection::const_iterator pos =
270351290Sdim          std::lower_bound(begin, end, entry, BaseLessThan);
271351290Sdim
272351290Sdim      if (pos != end && pos->Contains(addr)) {
273351290Sdim        return std::distance(begin, pos);
274351290Sdim      } else if (pos != begin) {
275351290Sdim        --pos;
276351290Sdim        if (pos->Contains(addr))
277351290Sdim          return std::distance(begin, pos);
278351290Sdim      }
279351290Sdim    }
280351290Sdim    return UINT32_MAX;
281351290Sdim  }
282351290Sdim
283351290Sdim  const Entry *FindEntryThatContains(B addr) const {
284351290Sdim#ifdef ASSERT_RANGEMAP_ARE_SORTED
285351290Sdim    assert(IsSorted());
286351290Sdim#endif
287351290Sdim    if (!m_entries.empty()) {
288351290Sdim      Entry entry(addr, 1);
289351290Sdim      typename Collection::const_iterator begin = m_entries.begin();
290351290Sdim      typename Collection::const_iterator end = m_entries.end();
291351290Sdim      typename Collection::const_iterator pos =
292351290Sdim          std::lower_bound(begin, end, entry, BaseLessThan);
293351290Sdim
294351290Sdim      if (pos != end && pos->Contains(addr)) {
295351290Sdim        return &(*pos);
296351290Sdim      } else if (pos != begin) {
297351290Sdim        --pos;
298351290Sdim        if (pos->Contains(addr)) {
299351290Sdim          return &(*pos);
300351290Sdim        }
301351290Sdim      }
302351290Sdim    }
303351290Sdim    return nullptr;
304351290Sdim  }
305351290Sdim
306351290Sdim  const Entry *FindEntryThatContains(const Entry &range) const {
307351290Sdim#ifdef ASSERT_RANGEMAP_ARE_SORTED
308351290Sdim    assert(IsSorted());
309351290Sdim#endif
310351290Sdim    if (!m_entries.empty()) {
311351290Sdim      typename Collection::const_iterator begin = m_entries.begin();
312351290Sdim      typename Collection::const_iterator end = m_entries.end();
313351290Sdim      typename Collection::const_iterator pos =
314351290Sdim          std::lower_bound(begin, end, range, BaseLessThan);
315351290Sdim
316351290Sdim      if (pos != end && pos->Contains(range)) {
317351290Sdim        return &(*pos);
318351290Sdim      } else if (pos != begin) {
319351290Sdim        --pos;
320351290Sdim        if (pos->Contains(range)) {
321351290Sdim          return &(*pos);
322351290Sdim        }
323351290Sdim      }
324351290Sdim    }
325351290Sdim    return nullptr;
326351290Sdim  }
327351290Sdim
328351290Sdimprotected:
329351290Sdim  Collection m_entries;
330351290Sdim};
331351290Sdim
332351290Sdimtemplate <typename B, typename S> class RangeVector {
333351290Sdimpublic:
334351290Sdim  typedef B BaseType;
335351290Sdim  typedef S SizeType;
336351290Sdim  typedef Range<B, S> Entry;
337351290Sdim  typedef std::vector<Entry> Collection;
338351290Sdim
339351290Sdim  RangeVector() = default;
340351290Sdim
341351290Sdim  ~RangeVector() = default;
342351290Sdim
343351290Sdim  void Append(const Entry &entry) { m_entries.push_back(entry); }
344351290Sdim
345351290Sdim  void Append(B base, S size) { m_entries.emplace_back(base, size); }
346351290Sdim
347351290Sdim  // Insert an item into a sorted list and optionally combine it with any
348351290Sdim  // adjacent blocks if requested.
349351290Sdim  void Insert(const Entry &entry, bool combine) {
350351290Sdim    if (m_entries.empty()) {
351351290Sdim      m_entries.push_back(entry);
352351290Sdim      return;
353351290Sdim    }
354351290Sdim    auto begin = m_entries.begin();
355351290Sdim    auto end = m_entries.end();
356351290Sdim    auto pos = std::lower_bound(begin, end, entry);
357351290Sdim    if (combine) {
358351290Sdim      if (pos != end && pos->Union(entry)) {
359351290Sdim        CombinePrevAndNext(pos);
360351290Sdim        return;
361351290Sdim      }
362351290Sdim      if (pos != begin) {
363351290Sdim        auto prev = pos - 1;
364351290Sdim        if (prev->Union(entry)) {
365351290Sdim          CombinePrevAndNext(prev);
366351290Sdim          return;
367351290Sdim        }
368351290Sdim      }
369351290Sdim    }
370351290Sdim    m_entries.insert(pos, entry);
371351290Sdim  }
372351290Sdim
373351290Sdim  bool RemoveEntryAtIndex(uint32_t idx) {
374351290Sdim    if (idx < m_entries.size()) {
375351290Sdim      m_entries.erase(m_entries.begin() + idx);
376351290Sdim      return true;
377351290Sdim    }
378351290Sdim    return false;
379351290Sdim  }
380351290Sdim
381351290Sdim  void Sort() {
382351290Sdim    if (m_entries.size() > 1)
383351290Sdim      std::stable_sort(m_entries.begin(), m_entries.end());
384351290Sdim  }
385351290Sdim
386351290Sdim#ifdef ASSERT_RANGEMAP_ARE_SORTED
387351290Sdim  bool IsSorted() const {
388351290Sdim    typename Collection::const_iterator pos, end, prev;
389351290Sdim    // First we determine if we can combine any of the Entry objects so we
390351290Sdim    // don't end up allocating and making a new collection for no reason
391351290Sdim    for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
392351290Sdim         prev = pos++) {
393351290Sdim      if (prev != end && *pos < *prev)
394351290Sdim        return false;
395351290Sdim    }
396351290Sdim    return true;
397351290Sdim  }
398351290Sdim#endif
399351290Sdim
400351290Sdim  void CombineConsecutiveRanges() {
401351290Sdim#ifdef ASSERT_RANGEMAP_ARE_SORTED
402351290Sdim    assert(IsSorted());
403351290Sdim#endif
404351290Sdim    // Can't combine if ranges if we have zero or one range
405351290Sdim    if (m_entries.size() > 1) {
406351290Sdim      // The list should be sorted prior to calling this function
407351290Sdim      typename Collection::iterator pos;
408351290Sdim      typename Collection::iterator end;
409351290Sdim      typename Collection::iterator prev;
410351290Sdim      bool can_combine = false;
411351290Sdim      // First we determine if we can combine any of the Entry objects so we
412351290Sdim      // don't end up allocating and making a new collection for no reason
413351290Sdim      for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
414351290Sdim           pos != end; prev = pos++) {
415351290Sdim        if (prev != end && prev->DoesAdjoinOrIntersect(*pos)) {
416351290Sdim          can_combine = true;
417351290Sdim          break;
418351290Sdim        }
419351290Sdim      }
420351290Sdim
421351290Sdim      // We we can combine at least one entry, then we make a new collection
422351290Sdim      // and populate it accordingly, and then swap it into place.
423351290Sdim      if (can_combine) {
424351290Sdim        Collection minimal_ranges;
425351290Sdim        for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
426351290Sdim             pos != end; prev = pos++) {
427351290Sdim          if (prev != end && prev->DoesAdjoinOrIntersect(*pos))
428351290Sdim            minimal_ranges.back().SetRangeEnd(
429351290Sdim                std::max<BaseType>(prev->GetRangeEnd(), pos->GetRangeEnd()));
430351290Sdim          else
431351290Sdim            minimal_ranges.push_back(*pos);
432351290Sdim        }
433351290Sdim        // Use the swap technique in case our new vector is much smaller. We
434351290Sdim        // must swap when using the STL because std::vector objects never
435351290Sdim        // release or reduce the memory once it has been allocated/reserved.
436351290Sdim        m_entries.swap(minimal_ranges);
437351290Sdim      }
438351290Sdim    }
439351290Sdim  }
440351290Sdim
441351290Sdim  BaseType GetMinRangeBase(BaseType fail_value) const {
442351290Sdim#ifdef ASSERT_RANGEMAP_ARE_SORTED
443351290Sdim    assert(IsSorted());
444351290Sdim#endif
445351290Sdim    if (m_entries.empty())
446351290Sdim      return fail_value;
447351290Sdim    // m_entries must be sorted, so if we aren't empty, we grab the first
448351290Sdim    // range's base
449351290Sdim    return m_entries.front().GetRangeBase();
450351290Sdim  }
451351290Sdim
452351290Sdim  BaseType GetMaxRangeEnd(BaseType fail_value) const {
453351290Sdim#ifdef ASSERT_RANGEMAP_ARE_SORTED
454351290Sdim    assert(IsSorted());
455351290Sdim#endif
456351290Sdim    if (m_entries.empty())
457351290Sdim      return fail_value;
458351290Sdim    // m_entries must be sorted, so if we aren't empty, we grab the last
459351290Sdim    // range's end
460351290Sdim    return m_entries.back().GetRangeEnd();
461351290Sdim  }
462351290Sdim
463351290Sdim  void Slide(BaseType slide) {
464351290Sdim    typename Collection::iterator pos, end;
465351290Sdim    for (pos = m_entries.begin(), end = m_entries.end(); pos != end; ++pos)
466351290Sdim      pos->Slide(slide);
467351290Sdim  }
468351290Sdim
469351290Sdim  void Clear() { m_entries.clear(); }
470351290Sdim
471351290Sdim  void Reserve(typename Collection::size_type size) { m_entries.reserve(size); }
472351290Sdim
473351290Sdim  bool IsEmpty() const { return m_entries.empty(); }
474351290Sdim
475351290Sdim  size_t GetSize() const { return m_entries.size(); }
476351290Sdim
477351290Sdim  const Entry *GetEntryAtIndex(size_t i) const {
478351290Sdim    return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
479351290Sdim  }
480351290Sdim
481351290Sdim  // Clients must ensure that "i" is a valid index prior to calling this
482351290Sdim  // function
483351290Sdim  Entry &GetEntryRef(size_t i) { return m_entries[i]; }
484351290Sdim  const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
485351290Sdim
486351290Sdim  Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
487351290Sdim
488351290Sdim  const Entry *Back() const {
489351290Sdim    return (m_entries.empty() ? nullptr : &m_entries.back());
490351290Sdim  }
491351290Sdim
492351290Sdim  static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
493351290Sdim    return lhs.GetRangeBase() < rhs.GetRangeBase();
494351290Sdim  }
495351290Sdim
496351290Sdim  uint32_t FindEntryIndexThatContains(B addr) const {
497351290Sdim#ifdef ASSERT_RANGEMAP_ARE_SORTED
498351290Sdim    assert(IsSorted());
499351290Sdim#endif
500351290Sdim    if (!m_entries.empty()) {
501351290Sdim      Entry entry(addr, 1);
502351290Sdim      typename Collection::const_iterator begin = m_entries.begin();
503351290Sdim      typename Collection::const_iterator end = m_entries.end();
504351290Sdim      typename Collection::const_iterator pos =
505351290Sdim          std::lower_bound(begin, end, entry, BaseLessThan);
506351290Sdim
507351290Sdim      if (pos != end && pos->Contains(addr)) {
508351290Sdim        return std::distance(begin, pos);
509351290Sdim      } else if (pos != begin) {
510351290Sdim        --pos;
511351290Sdim        if (pos->Contains(addr))
512351290Sdim          return std::distance(begin, pos);
513351290Sdim      }
514351290Sdim    }
515351290Sdim    return UINT32_MAX;
516351290Sdim  }
517351290Sdim
518351290Sdim  const Entry *FindEntryThatContains(B addr) const {
519351290Sdim#ifdef ASSERT_RANGEMAP_ARE_SORTED
520351290Sdim    assert(IsSorted());
521351290Sdim#endif
522351290Sdim    if (!m_entries.empty()) {
523351290Sdim      Entry entry(addr, 1);
524351290Sdim      typename Collection::const_iterator begin = m_entries.begin();
525351290Sdim      typename Collection::const_iterator end = m_entries.end();
526351290Sdim      typename Collection::const_iterator pos =
527351290Sdim          std::lower_bound(begin, end, entry, BaseLessThan);
528351290Sdim
529351290Sdim      if (pos != end && pos->Contains(addr)) {
530351290Sdim        return &(*pos);
531351290Sdim      } else if (pos != begin) {
532351290Sdim        --pos;
533351290Sdim        if (pos->Contains(addr)) {
534351290Sdim          return &(*pos);
535351290Sdim        }
536351290Sdim      }
537351290Sdim    }
538351290Sdim    return nullptr;
539351290Sdim  }
540351290Sdim
541351290Sdim  const Entry *FindEntryThatContains(const Entry &range) const {
542351290Sdim#ifdef ASSERT_RANGEMAP_ARE_SORTED
543351290Sdim    assert(IsSorted());
544351290Sdim#endif
545351290Sdim    if (!m_entries.empty()) {
546351290Sdim      typename Collection::const_iterator begin = m_entries.begin();
547351290Sdim      typename Collection::const_iterator end = m_entries.end();
548351290Sdim      typename Collection::const_iterator pos =
549351290Sdim          std::lower_bound(begin, end, range, BaseLessThan);
550351290Sdim
551351290Sdim      if (pos != end && pos->Contains(range)) {
552351290Sdim        return &(*pos);
553351290Sdim      } else if (pos != begin) {
554351290Sdim        --pos;
555351290Sdim        if (pos->Contains(range)) {
556351290Sdim          return &(*pos);
557351290Sdim        }
558351290Sdim      }
559351290Sdim    }
560351290Sdim    return nullptr;
561351290Sdim  }
562351290Sdim
563351290Sdimprotected:
564351290Sdim  void CombinePrevAndNext(typename Collection::iterator pos) {
565351290Sdim    // Check if the prev or next entries in case they need to be unioned with
566351290Sdim    // the entry pointed to by "pos".
567351290Sdim    if (pos != m_entries.begin()) {
568351290Sdim      auto prev = pos - 1;
569351290Sdim      if (prev->Union(*pos))
570351290Sdim        m_entries.erase(pos);
571351290Sdim      pos = prev;
572351290Sdim    }
573351290Sdim
574351290Sdim    auto end = m_entries.end();
575351290Sdim    if (pos != end) {
576351290Sdim      auto next = pos + 1;
577351290Sdim      if (next != end) {
578351290Sdim        if (pos->Union(*next))
579351290Sdim          m_entries.erase(next);
580351290Sdim      }
581351290Sdim    }
582351290Sdim    return;
583351290Sdim  }
584351290Sdim
585351290Sdim  Collection m_entries;
586351290Sdim};
587351290Sdim
588351290Sdim// A simple range  with data class where you get to define the type of
589351290Sdim// the range base "B", the type used for the range byte size "S", and the type
590351290Sdim// for the associated data "T".
591351290Sdimtemplate <typename B, typename S, typename T>
592351290Sdimstruct RangeData : public Range<B, S> {
593351290Sdim  typedef T DataType;
594351290Sdim
595351290Sdim  DataType data;
596351290Sdim
597351290Sdim  RangeData() : Range<B, S>(), data() {}
598351290Sdim
599351290Sdim  RangeData(B base, S size) : Range<B, S>(base, size), data() {}
600351290Sdim
601351290Sdim  RangeData(B base, S size, DataType d) : Range<B, S>(base, size), data(d) {}
602351290Sdim};
603351290Sdim
604360784Sdimtemplate <typename B, typename S, typename T, unsigned N = 0,
605360784Sdim          class Compare = std::less<T>>
606351290Sdimclass RangeDataVector {
607351290Sdimpublic:
608351290Sdim  typedef lldb_private::Range<B, S> Range;
609351290Sdim  typedef RangeData<B, S, T> Entry;
610351290Sdim  typedef llvm::SmallVector<Entry, N> Collection;
611351290Sdim
612360784Sdim  RangeDataVector(Compare compare = Compare()) : m_compare(compare) {}
613351290Sdim
614351290Sdim  ~RangeDataVector() = default;
615351290Sdim
616351290Sdim  void Append(const Entry &entry) { m_entries.push_back(entry); }
617351290Sdim
618351290Sdim  void Sort() {
619351290Sdim    if (m_entries.size() > 1)
620360784Sdim      std::stable_sort(m_entries.begin(), m_entries.end(),
621360784Sdim                       [&compare = m_compare](const Entry &a, const Entry &b) {
622360784Sdim                         if (a.base != b.base)
623360784Sdim                           return a.base < b.base;
624360784Sdim                         if (a.size != b.size)
625360784Sdim                           return a.size < b.size;
626360784Sdim                         return compare(a.data, b.data);
627360784Sdim                       });
628351290Sdim  }
629351290Sdim
630351290Sdim#ifdef ASSERT_RANGEMAP_ARE_SORTED
631351290Sdim  bool IsSorted() const {
632351290Sdim    typename Collection::const_iterator pos, end, prev;
633351290Sdim    // First we determine if we can combine any of the Entry objects so we
634351290Sdim    // don't end up allocating and making a new collection for no reason
635351290Sdim    for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
636351290Sdim         prev = pos++) {
637351290Sdim      if (prev != end && *pos < *prev)
638351290Sdim        return false;
639351290Sdim    }
640351290Sdim    return true;
641351290Sdim  }
642351290Sdim#endif
643351290Sdim
644351290Sdim  void CombineConsecutiveEntriesWithEqualData() {
645351290Sdim#ifdef ASSERT_RANGEMAP_ARE_SORTED
646351290Sdim    assert(IsSorted());
647351290Sdim#endif
648351290Sdim    typename Collection::iterator pos;
649351290Sdim    typename Collection::iterator end;
650351290Sdim    typename Collection::iterator prev;
651351290Sdim    bool can_combine = false;
652351290Sdim    // First we determine if we can combine any of the Entry objects so we
653351290Sdim    // don't end up allocating and making a new collection for no reason
654351290Sdim    for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
655351290Sdim         prev = pos++) {
656351290Sdim      if (prev != end && prev->data == pos->data) {
657351290Sdim        can_combine = true;
658351290Sdim        break;
659351290Sdim      }
660351290Sdim    }
661351290Sdim
662351290Sdim    // We we can combine at least one entry, then we make a new collection and
663351290Sdim    // populate it accordingly, and then swap it into place.
664351290Sdim    if (can_combine) {
665351290Sdim      Collection minimal_ranges;
666351290Sdim      for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
667351290Sdim           pos != end; prev = pos++) {
668351290Sdim        if (prev != end && prev->data == pos->data)
669351290Sdim          minimal_ranges.back().SetRangeEnd(pos->GetRangeEnd());
670351290Sdim        else
671351290Sdim          minimal_ranges.push_back(*pos);
672351290Sdim      }
673351290Sdim      // Use the swap technique in case our new vector is much smaller. We must
674351290Sdim      // swap when using the STL because std::vector objects never release or
675351290Sdim      // reduce the memory once it has been allocated/reserved.
676351290Sdim      m_entries.swap(minimal_ranges);
677351290Sdim    }
678351290Sdim  }
679351290Sdim
680351290Sdim  void Clear() { m_entries.clear(); }
681351290Sdim
682351290Sdim  bool IsEmpty() const { return m_entries.empty(); }
683351290Sdim
684351290Sdim  size_t GetSize() const { return m_entries.size(); }
685351290Sdim
686351290Sdim  const Entry *GetEntryAtIndex(size_t i) const {
687351290Sdim    return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
688351290Sdim  }
689351290Sdim
690351290Sdim  Entry *GetMutableEntryAtIndex(size_t i) {
691351290Sdim    return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
692351290Sdim  }
693351290Sdim
694351290Sdim  // Clients must ensure that "i" is a valid index prior to calling this
695351290Sdim  // function
696351290Sdim  Entry &GetEntryRef(size_t i) { return m_entries[i]; }
697351290Sdim  const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
698351290Sdim
699351290Sdim  static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
700351290Sdim    return lhs.GetRangeBase() < rhs.GetRangeBase();
701351290Sdim  }
702351290Sdim
703351290Sdim  uint32_t FindEntryIndexThatContains(B addr) const {
704351290Sdim    const Entry *entry = FindEntryThatContains(addr);
705351290Sdim    if (entry)
706351290Sdim      return std::distance(m_entries.begin(), entry);
707351290Sdim    return UINT32_MAX;
708351290Sdim  }
709351290Sdim
710351290Sdim  uint32_t FindEntryIndexesThatContain(B addr,
711351290Sdim                                       std::vector<uint32_t> &indexes) const {
712351290Sdim#ifdef ASSERT_RANGEMAP_ARE_SORTED
713351290Sdim    assert(IsSorted());
714351290Sdim#endif
715360784Sdim    // Search the entries until the first entry that has a larger base address
716360784Sdim    // than `addr`. As m_entries is sorted by their base address, all following
717360784Sdim    // entries can't contain `addr` as their base address is already larger.
718360784Sdim    for (const auto &entry : m_entries) {
719360784Sdim      if (entry.Contains(addr))
720360784Sdim        indexes.push_back(entry.data);
721360784Sdim      else if (entry.GetRangeBase() > addr)
722360784Sdim        break;
723351290Sdim    }
724351290Sdim    return indexes.size();
725351290Sdim  }
726351290Sdim
727351290Sdim  Entry *FindEntryThatContains(B addr) {
728351290Sdim    return const_cast<Entry *>(
729351290Sdim        static_cast<const RangeDataVector *>(this)->FindEntryThatContains(
730351290Sdim            addr));
731351290Sdim  }
732351290Sdim
733351290Sdim  const Entry *FindEntryThatContains(B addr) const {
734351290Sdim    return FindEntryThatContains(Entry(addr, 1));
735351290Sdim  }
736351290Sdim
737351290Sdim  const Entry *FindEntryThatContains(const Entry &range) const {
738351290Sdim#ifdef ASSERT_RANGEMAP_ARE_SORTED
739351290Sdim    assert(IsSorted());
740351290Sdim#endif
741351290Sdim    if (!m_entries.empty()) {
742351290Sdim      typename Collection::const_iterator begin = m_entries.begin();
743351290Sdim      typename Collection::const_iterator end = m_entries.end();
744351290Sdim      typename Collection::const_iterator pos =
745351290Sdim          std::lower_bound(begin, end, range, BaseLessThan);
746351290Sdim
747351290Sdim      while (pos != begin && pos[-1].Contains(range))
748351290Sdim        --pos;
749351290Sdim
750351290Sdim      if (pos != end && pos->Contains(range))
751351290Sdim        return &(*pos);
752351290Sdim    }
753351290Sdim    return nullptr;
754351290Sdim  }
755351290Sdim
756351290Sdim  const Entry *FindEntryStartsAt(B addr) const {
757351290Sdim#ifdef ASSERT_RANGEMAP_ARE_SORTED
758351290Sdim    assert(IsSorted());
759351290Sdim#endif
760351290Sdim    if (!m_entries.empty()) {
761351290Sdim      auto begin = m_entries.begin(), end = m_entries.end();
762351290Sdim      auto pos = std::lower_bound(begin, end, Entry(addr, 1), BaseLessThan);
763351290Sdim      if (pos != end && pos->base == addr)
764351290Sdim        return &(*pos);
765351290Sdim    }
766351290Sdim    return nullptr;
767351290Sdim  }
768351290Sdim
769351290Sdim  // This method will return the entry that contains the given address, or the
770351290Sdim  // entry following that address.  If you give it an address of 0 and the
771351290Sdim  // first entry starts at address 0x100, you will get the entry at 0x100.
772351290Sdim  //
773351290Sdim  // For most uses, FindEntryThatContains is the correct one to use, this is a
774351290Sdim  // less commonly needed behavior.  It was added for core file memory regions,
775351290Sdim  // where we want to present a gap in the memory regions as a distinct region,
776351290Sdim  // so we need to know the start address of the next memory section that
777351290Sdim  // exists.
778351290Sdim  const Entry *FindEntryThatContainsOrFollows(B addr) const {
779351290Sdim#ifdef ASSERT_RANGEMAP_ARE_SORTED
780351290Sdim    assert(IsSorted());
781351290Sdim#endif
782351290Sdim    if (!m_entries.empty()) {
783351290Sdim      typename Collection::const_iterator begin = m_entries.begin();
784351290Sdim      typename Collection::const_iterator end = m_entries.end();
785351290Sdim      typename Collection::const_iterator pos =
786351290Sdim          std::lower_bound(m_entries.begin(), end, addr,
787351290Sdim                           [](const Entry &lhs, B rhs_base) -> bool {
788351290Sdim                             return lhs.GetRangeEnd() <= rhs_base;
789351290Sdim                           });
790351290Sdim
791351290Sdim      while (pos != begin && pos[-1].Contains(addr))
792351290Sdim        --pos;
793351290Sdim
794351290Sdim      if (pos != end)
795351290Sdim        return &(*pos);
796351290Sdim    }
797351290Sdim    return nullptr;
798351290Sdim  }
799351290Sdim
800351290Sdim  Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
801351290Sdim
802351290Sdim  const Entry *Back() const {
803351290Sdim    return (m_entries.empty() ? nullptr : &m_entries.back());
804351290Sdim  }
805351290Sdim
806351290Sdimprotected:
807351290Sdim  Collection m_entries;
808360784Sdim  Compare m_compare;
809351290Sdim};
810351290Sdim
811351290Sdim// A simple range  with data class where you get to define the type of
812351290Sdim// the range base "B", the type used for the range byte size "S", and the type
813351290Sdim// for the associated data "T".
814351290Sdimtemplate <typename B, typename T> struct AddressData {
815351290Sdim  typedef B BaseType;
816351290Sdim  typedef T DataType;
817351290Sdim
818351290Sdim  BaseType addr;
819351290Sdim  DataType data;
820351290Sdim
821351290Sdim  AddressData() : addr(), data() {}
822351290Sdim
823351290Sdim  AddressData(B a, DataType d) : addr(a), data(d) {}
824351290Sdim
825351290Sdim  bool operator<(const AddressData &rhs) const {
826351290Sdim    if (this->addr == rhs.addr)
827351290Sdim      return this->data < rhs.data;
828351290Sdim    return this->addr < rhs.addr;
829351290Sdim  }
830351290Sdim
831351290Sdim  bool operator==(const AddressData &rhs) const {
832351290Sdim    return this->addr == rhs.addr && this->data == rhs.data;
833351290Sdim  }
834351290Sdim
835351290Sdim  bool operator!=(const AddressData &rhs) const {
836351290Sdim    return this->addr != rhs.addr || this->data == rhs.data;
837351290Sdim  }
838351290Sdim};
839351290Sdim
840351290Sdimtemplate <typename B, typename T, unsigned N> class AddressDataArray {
841351290Sdimpublic:
842351290Sdim  typedef AddressData<B, T> Entry;
843351290Sdim  typedef llvm::SmallVector<Entry, N> Collection;
844351290Sdim
845351290Sdim  AddressDataArray() = default;
846351290Sdim
847351290Sdim  ~AddressDataArray() = default;
848351290Sdim
849351290Sdim  void Append(const Entry &entry) { m_entries.push_back(entry); }
850351290Sdim
851351290Sdim  void Sort() {
852351290Sdim    if (m_entries.size() > 1)
853351290Sdim      std::stable_sort(m_entries.begin(), m_entries.end());
854351290Sdim  }
855351290Sdim
856351290Sdim#ifdef ASSERT_RANGEMAP_ARE_SORTED
857351290Sdim  bool IsSorted() const {
858351290Sdim    typename Collection::const_iterator pos, end, prev;
859351290Sdim    // First we determine if we can combine any of the Entry objects so we
860351290Sdim    // don't end up allocating and making a new collection for no reason
861351290Sdim    for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
862351290Sdim         prev = pos++) {
863351290Sdim      if (prev != end && *pos < *prev)
864351290Sdim        return false;
865351290Sdim    }
866351290Sdim    return true;
867351290Sdim  }
868351290Sdim#endif
869351290Sdim
870351290Sdim  void Clear() { m_entries.clear(); }
871351290Sdim
872351290Sdim  bool IsEmpty() const { return m_entries.empty(); }
873351290Sdim
874351290Sdim  size_t GetSize() const { return m_entries.size(); }
875351290Sdim
876351290Sdim  const Entry *GetEntryAtIndex(size_t i) const {
877351290Sdim    return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
878351290Sdim  }
879351290Sdim
880351290Sdim  // Clients must ensure that "i" is a valid index prior to calling this
881351290Sdim  // function
882351290Sdim  const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
883351290Sdim
884351290Sdim  static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
885351290Sdim    return lhs.addr < rhs.addr;
886351290Sdim  }
887351290Sdim
888351290Sdim  Entry *FindEntry(B addr, bool exact_match_only) {
889351290Sdim#ifdef ASSERT_RANGEMAP_ARE_SORTED
890351290Sdim    assert(IsSorted());
891351290Sdim#endif
892351290Sdim    if (!m_entries.empty()) {
893351290Sdim      Entry entry;
894351290Sdim      entry.addr = addr;
895351290Sdim      typename Collection::iterator begin = m_entries.begin();
896351290Sdim      typename Collection::iterator end = m_entries.end();
897351290Sdim      typename Collection::iterator pos =
898351290Sdim          std::lower_bound(begin, end, entry, BaseLessThan);
899351290Sdim
900351290Sdim      while (pos != begin && pos[-1].addr == addr)
901351290Sdim        --pos;
902351290Sdim
903351290Sdim      if (pos != end) {
904351290Sdim        if (pos->addr == addr || !exact_match_only)
905351290Sdim          return &(*pos);
906351290Sdim      }
907351290Sdim    }
908351290Sdim    return nullptr;
909351290Sdim  }
910351290Sdim
911351290Sdim  const Entry *FindNextEntry(const Entry *entry) {
912351290Sdim    if (entry >= &*m_entries.begin() && entry + 1 < &*m_entries.end())
913351290Sdim      return entry + 1;
914351290Sdim    return nullptr;
915351290Sdim  }
916351290Sdim
917351290Sdim  Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
918351290Sdim
919351290Sdim  const Entry *Back() const {
920351290Sdim    return (m_entries.empty() ? nullptr : &m_entries.back());
921351290Sdim  }
922351290Sdim
923351290Sdimprotected:
924351290Sdim  Collection m_entries;
925351290Sdim};
926351290Sdim
927351290Sdim} // namespace lldb_private
928351290Sdim
929351290Sdim#endif // LLDB_UTILITY_RANGEMAP_H
930