1//===-- RangeMap.h ----------------------------------------------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#ifndef LLDB_UTILITY_RANGEMAP_H
10#define LLDB_UTILITY_RANGEMAP_H
11
12#include <algorithm>
13#include <vector>
14
15#include "llvm/ADT/SmallVector.h"
16
17#include "lldb/lldb-private.h"
18
19// Uncomment to make sure all Range objects are sorted when needed
20//#define ASSERT_RANGEMAP_ARE_SORTED
21
22namespace lldb_private {
23
24// Templatized classes for dealing with generic ranges and also collections of
25// ranges, or collections of ranges that have associated data.
26
27// A simple range class where you get to define the type of the range
28// base "B", and the type used for the range byte size "S".
29template <typename B, typename S> struct Range {
30  typedef B BaseType;
31  typedef S SizeType;
32
33  BaseType base;
34  SizeType size;
35
36  Range() : base(0), size(0) {}
37
38  Range(BaseType b, SizeType s) : base(b), size(s) {}
39
40  void Clear(BaseType b = 0) {
41    base = b;
42    size = 0;
43  }
44
45  // Set the start value for the range, and keep the same size
46  BaseType GetRangeBase() const { return base; }
47
48  void SetRangeBase(BaseType b) { base = b; }
49
50  void Slide(BaseType slide) { base += slide; }
51
52  bool Union(const Range &rhs) {
53    if (DoesAdjoinOrIntersect(rhs)) {
54      auto new_end = std::max<BaseType>(GetRangeEnd(), rhs.GetRangeEnd());
55      base = std::min<BaseType>(base, rhs.base);
56      size = new_end - base;
57      return true;
58    }
59    return false;
60  }
61
62  BaseType GetRangeEnd() const { return base + size; }
63
64  void SetRangeEnd(BaseType end) {
65    if (end > base)
66      size = end - base;
67    else
68      size = 0;
69  }
70
71  SizeType GetByteSize() const { return size; }
72
73  void SetByteSize(SizeType s) { size = s; }
74
75  bool IsValid() const { return size > 0; }
76
77  bool Contains(BaseType r) const {
78    return (GetRangeBase() <= r) && (r < GetRangeEnd());
79  }
80
81  bool ContainsEndInclusive(BaseType r) const {
82    return (GetRangeBase() <= r) && (r <= GetRangeEnd());
83  }
84
85  bool Contains(const Range &range) const {
86    return Contains(range.GetRangeBase()) &&
87           ContainsEndInclusive(range.GetRangeEnd());
88  }
89
90  // Returns true if the two ranges adjoing or intersect
91  bool DoesAdjoinOrIntersect(const Range &rhs) const {
92    const BaseType lhs_base = this->GetRangeBase();
93    const BaseType rhs_base = rhs.GetRangeBase();
94    const BaseType lhs_end = this->GetRangeEnd();
95    const BaseType rhs_end = rhs.GetRangeEnd();
96    bool result = (lhs_base <= rhs_end) && (lhs_end >= rhs_base);
97    return result;
98  }
99
100  // Returns true if the two ranges intersect
101  bool DoesIntersect(const Range &rhs) const {
102    const BaseType lhs_base = this->GetRangeBase();
103    const BaseType rhs_base = rhs.GetRangeBase();
104    const BaseType lhs_end = this->GetRangeEnd();
105    const BaseType rhs_end = rhs.GetRangeEnd();
106    bool result = (lhs_base < rhs_end) && (lhs_end > rhs_base);
107    return result;
108  }
109
110  bool operator<(const Range &rhs) const {
111    if (base == rhs.base)
112      return size < rhs.size;
113    return base < rhs.base;
114  }
115
116  bool operator==(const Range &rhs) const {
117    return base == rhs.base && size == rhs.size;
118  }
119
120  bool operator!=(const Range &rhs) const {
121    return base != rhs.base || size != rhs.size;
122  }
123};
124
125template <typename B, typename S, unsigned N = 0> class RangeVector {
126public:
127  typedef B BaseType;
128  typedef S SizeType;
129  typedef Range<B, S> Entry;
130  typedef llvm::SmallVector<Entry, N> Collection;
131
132  RangeVector() = default;
133
134  ~RangeVector() = default;
135
136  void Append(const Entry &entry) { m_entries.push_back(entry); }
137
138  void Append(B base, S size) { m_entries.emplace_back(base, size); }
139
140  // Insert an item into a sorted list and optionally combine it with any
141  // adjacent blocks if requested.
142  void Insert(const Entry &entry, bool combine) {
143    if (m_entries.empty()) {
144      m_entries.push_back(entry);
145      return;
146    }
147    auto begin = m_entries.begin();
148    auto end = m_entries.end();
149    auto pos = std::lower_bound(begin, end, entry);
150    if (combine) {
151      if (pos != end && pos->Union(entry)) {
152        CombinePrevAndNext(pos);
153        return;
154      }
155      if (pos != begin) {
156        auto prev = pos - 1;
157        if (prev->Union(entry)) {
158          CombinePrevAndNext(prev);
159          return;
160        }
161      }
162    }
163    m_entries.insert(pos, entry);
164  }
165
166  bool RemoveEntryAtIndex(uint32_t idx) {
167    if (idx < m_entries.size()) {
168      m_entries.erase(m_entries.begin() + idx);
169      return true;
170    }
171    return false;
172  }
173
174  void Sort() {
175    if (m_entries.size() > 1)
176      std::stable_sort(m_entries.begin(), m_entries.end());
177  }
178
179#ifdef ASSERT_RANGEMAP_ARE_SORTED
180  bool IsSorted() const {
181    typename Collection::const_iterator pos, end, prev;
182    // First we determine if we can combine any of the Entry objects so we
183    // don't end up allocating and making a new collection for no reason
184    for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
185         prev = pos++) {
186      if (prev != end && *pos < *prev)
187        return false;
188    }
189    return true;
190  }
191#endif
192
193  void CombineConsecutiveRanges() {
194#ifdef ASSERT_RANGEMAP_ARE_SORTED
195    assert(IsSorted());
196#endif
197    // Can't combine if ranges if we have zero or one range
198    if (m_entries.size() > 1) {
199      // The list should be sorted prior to calling this function
200      typename Collection::iterator pos;
201      typename Collection::iterator end;
202      typename Collection::iterator prev;
203      bool can_combine = false;
204      // First we determine if we can combine any of the Entry objects so we
205      // don't end up allocating and making a new collection for no reason
206      for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
207           pos != end; prev = pos++) {
208        if (prev != end && prev->DoesAdjoinOrIntersect(*pos)) {
209          can_combine = true;
210          break;
211        }
212      }
213
214      // We we can combine at least one entry, then we make a new collection
215      // and populate it accordingly, and then swap it into place.
216      if (can_combine) {
217        Collection minimal_ranges;
218        for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
219             pos != end; prev = pos++) {
220          if (prev != end && prev->DoesAdjoinOrIntersect(*pos))
221            minimal_ranges.back().SetRangeEnd(
222                std::max<BaseType>(prev->GetRangeEnd(), pos->GetRangeEnd()));
223          else
224            minimal_ranges.push_back(*pos);
225        }
226        // Use the swap technique in case our new vector is much smaller. We
227        // must swap when using the STL because std::vector objects never
228        // release or reduce the memory once it has been allocated/reserved.
229        m_entries.swap(minimal_ranges);
230      }
231    }
232  }
233
234  BaseType GetMinRangeBase(BaseType fail_value) const {
235#ifdef ASSERT_RANGEMAP_ARE_SORTED
236    assert(IsSorted());
237#endif
238    if (m_entries.empty())
239      return fail_value;
240    // m_entries must be sorted, so if we aren't empty, we grab the first
241    // range's base
242    return m_entries.front().GetRangeBase();
243  }
244
245  BaseType GetMaxRangeEnd(BaseType fail_value) const {
246#ifdef ASSERT_RANGEMAP_ARE_SORTED
247    assert(IsSorted());
248#endif
249    if (m_entries.empty())
250      return fail_value;
251    // m_entries must be sorted, so if we aren't empty, we grab the last
252    // range's end
253    return m_entries.back().GetRangeEnd();
254  }
255
256  void Slide(BaseType slide) {
257    typename Collection::iterator pos, end;
258    for (pos = m_entries.begin(), end = m_entries.end(); pos != end; ++pos)
259      pos->Slide(slide);
260  }
261
262  void Clear() { m_entries.clear(); }
263
264  void Reserve(typename Collection::size_type size) { m_entries.reserve(size); }
265
266  bool IsEmpty() const { return m_entries.empty(); }
267
268  size_t GetSize() const { return m_entries.size(); }
269
270  const Entry *GetEntryAtIndex(size_t i) const {
271    return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
272  }
273
274  // Clients must ensure that "i" is a valid index prior to calling this
275  // function
276  Entry &GetEntryRef(size_t i) { return m_entries[i]; }
277  const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
278
279  Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
280
281  const Entry *Back() const {
282    return (m_entries.empty() ? nullptr : &m_entries.back());
283  }
284
285  static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
286    return lhs.GetRangeBase() < rhs.GetRangeBase();
287  }
288
289  uint32_t FindEntryIndexThatContains(B addr) const {
290#ifdef ASSERT_RANGEMAP_ARE_SORTED
291    assert(IsSorted());
292#endif
293    if (!m_entries.empty()) {
294      Entry entry(addr, 1);
295      typename Collection::const_iterator begin = m_entries.begin();
296      typename Collection::const_iterator end = m_entries.end();
297      typename Collection::const_iterator pos =
298          std::lower_bound(begin, end, entry, BaseLessThan);
299
300      if (pos != end && pos->Contains(addr)) {
301        return std::distance(begin, pos);
302      } else if (pos != begin) {
303        --pos;
304        if (pos->Contains(addr))
305          return std::distance(begin, pos);
306      }
307    }
308    return UINT32_MAX;
309  }
310
311  const Entry *FindEntryThatContains(B addr) const {
312#ifdef ASSERT_RANGEMAP_ARE_SORTED
313    assert(IsSorted());
314#endif
315    if (!m_entries.empty()) {
316      Entry entry(addr, 1);
317      typename Collection::const_iterator begin = m_entries.begin();
318      typename Collection::const_iterator end = m_entries.end();
319      typename Collection::const_iterator pos =
320          std::lower_bound(begin, end, entry, BaseLessThan);
321
322      if (pos != end && pos->Contains(addr)) {
323        return &(*pos);
324      } else if (pos != begin) {
325        --pos;
326        if (pos->Contains(addr)) {
327          return &(*pos);
328        }
329      }
330    }
331    return nullptr;
332  }
333
334  const Entry *FindEntryThatContains(const Entry &range) const {
335#ifdef ASSERT_RANGEMAP_ARE_SORTED
336    assert(IsSorted());
337#endif
338    if (!m_entries.empty()) {
339      typename Collection::const_iterator begin = m_entries.begin();
340      typename Collection::const_iterator end = m_entries.end();
341      typename Collection::const_iterator pos =
342          std::lower_bound(begin, end, range, BaseLessThan);
343
344      if (pos != end && pos->Contains(range)) {
345        return &(*pos);
346      } else if (pos != begin) {
347        --pos;
348        if (pos->Contains(range)) {
349          return &(*pos);
350        }
351      }
352    }
353    return nullptr;
354  }
355
356protected:
357  void CombinePrevAndNext(typename Collection::iterator pos) {
358    // Check if the prev or next entries in case they need to be unioned with
359    // the entry pointed to by "pos".
360    if (pos != m_entries.begin()) {
361      auto prev = pos - 1;
362      if (prev->Union(*pos))
363        m_entries.erase(pos);
364      pos = prev;
365    }
366
367    auto end = m_entries.end();
368    if (pos != end) {
369      auto next = pos + 1;
370      if (next != end) {
371        if (pos->Union(*next))
372          m_entries.erase(next);
373      }
374    }
375    return;
376  }
377
378  Collection m_entries;
379};
380
381// A simple range  with data class where you get to define the type of
382// the range base "B", the type used for the range byte size "S", and the type
383// for the associated data "T".
384template <typename B, typename S, typename T>
385struct RangeData : public Range<B, S> {
386  typedef T DataType;
387
388  DataType data;
389
390  RangeData() : Range<B, S>(), data() {}
391
392  RangeData(B base, S size) : Range<B, S>(base, size), data() {}
393
394  RangeData(B base, S size, DataType d) : Range<B, S>(base, size), data(d) {}
395};
396
397// We can treat the vector as a flattened Binary Search Tree, augmenting it
398// with upper bounds (max of range endpoints) for every index allows us to
399// query for range containment quicker.
400template <typename B, typename S, typename T>
401struct AugmentedRangeData : public RangeData<B, S, T> {
402  B upper_bound;
403
404  AugmentedRangeData(const RangeData<B, S, T> &rd)
405      : RangeData<B, S, T>(rd), upper_bound() {}
406};
407
408template <typename B, typename S, typename T, unsigned N = 0,
409          class Compare = std::less<T>>
410class RangeDataVector {
411public:
412  typedef lldb_private::Range<B, S> Range;
413  typedef RangeData<B, S, T> Entry;
414  typedef AugmentedRangeData<B, S, T> AugmentedEntry;
415  typedef llvm::SmallVector<AugmentedEntry, N> Collection;
416
417  RangeDataVector(Compare compare = Compare()) : m_compare(compare) {}
418
419  ~RangeDataVector() = default;
420
421  void Append(const Entry &entry) { m_entries.emplace_back(entry); }
422
423  void Sort() {
424    if (m_entries.size() > 1)
425      std::stable_sort(m_entries.begin(), m_entries.end(),
426                       [&compare = m_compare](const Entry &a, const Entry &b) {
427                         if (a.base != b.base)
428                           return a.base < b.base;
429                         if (a.size != b.size)
430                           return a.size < b.size;
431                         return compare(a.data, b.data);
432                       });
433    if (!m_entries.empty())
434      ComputeUpperBounds(0, m_entries.size());
435  }
436
437#ifdef ASSERT_RANGEMAP_ARE_SORTED
438  bool IsSorted() const {
439    typename Collection::const_iterator pos, end, prev;
440    for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
441         prev = pos++) {
442      if (prev != end && *pos < *prev)
443        return false;
444    }
445    return true;
446  }
447#endif
448
449  void CombineConsecutiveEntriesWithEqualData() {
450#ifdef ASSERT_RANGEMAP_ARE_SORTED
451    assert(IsSorted());
452#endif
453    typename Collection::iterator pos;
454    typename Collection::iterator end;
455    typename Collection::iterator prev;
456    bool can_combine = false;
457    // First we determine if we can combine any of the Entry objects so we
458    // don't end up allocating and making a new collection for no reason
459    for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
460         prev = pos++) {
461      if (prev != end && prev->data == pos->data) {
462        can_combine = true;
463        break;
464      }
465    }
466
467    // We we can combine at least one entry, then we make a new collection and
468    // populate it accordingly, and then swap it into place.
469    if (can_combine) {
470      Collection minimal_ranges;
471      for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
472           pos != end; prev = pos++) {
473        if (prev != end && prev->data == pos->data)
474          minimal_ranges.back().SetRangeEnd(pos->GetRangeEnd());
475        else
476          minimal_ranges.push_back(*pos);
477      }
478      // Use the swap technique in case our new vector is much smaller. We must
479      // swap when using the STL because std::vector objects never release or
480      // reduce the memory once it has been allocated/reserved.
481      m_entries.swap(minimal_ranges);
482    }
483  }
484
485  void Clear() { m_entries.clear(); }
486
487  bool IsEmpty() const { return m_entries.empty(); }
488
489  size_t GetSize() const { return m_entries.size(); }
490
491  const Entry *GetEntryAtIndex(size_t i) const {
492    return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
493  }
494
495  Entry *GetMutableEntryAtIndex(size_t i) {
496    return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
497  }
498
499  // Clients must ensure that "i" is a valid index prior to calling this
500  // function
501  Entry &GetEntryRef(size_t i) { return m_entries[i]; }
502  const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
503
504  static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
505    return lhs.GetRangeBase() < rhs.GetRangeBase();
506  }
507
508  uint32_t FindEntryIndexThatContains(B addr) const {
509    const AugmentedEntry *entry =
510        static_cast<const AugmentedEntry *>(FindEntryThatContains(addr));
511    if (entry)
512      return std::distance(m_entries.begin(), entry);
513    return UINT32_MAX;
514  }
515
516  uint32_t FindEntryIndexesThatContain(B addr, std::vector<uint32_t> &indexes) {
517#ifdef ASSERT_RANGEMAP_ARE_SORTED
518    assert(IsSorted());
519#endif
520    if (!m_entries.empty())
521      FindEntryIndexesThatContain(addr, 0, m_entries.size(), indexes);
522
523    return indexes.size();
524  }
525
526  Entry *FindEntryThatContains(B addr) {
527    return const_cast<Entry *>(
528        static_cast<const RangeDataVector *>(this)->FindEntryThatContains(
529            addr));
530  }
531
532  const Entry *FindEntryThatContains(B addr) const {
533    return FindEntryThatContains(Entry(addr, 1));
534  }
535
536  const Entry *FindEntryThatContains(const Entry &range) const {
537#ifdef ASSERT_RANGEMAP_ARE_SORTED
538    assert(IsSorted());
539#endif
540    if (!m_entries.empty()) {
541      typename Collection::const_iterator begin = m_entries.begin();
542      typename Collection::const_iterator end = m_entries.end();
543      typename Collection::const_iterator pos =
544          std::lower_bound(begin, end, range, BaseLessThan);
545
546      while (pos != begin && pos[-1].Contains(range))
547        --pos;
548
549      if (pos != end && pos->Contains(range))
550        return &(*pos);
551    }
552    return nullptr;
553  }
554
555  const Entry *FindEntryStartsAt(B addr) const {
556#ifdef ASSERT_RANGEMAP_ARE_SORTED
557    assert(IsSorted());
558#endif
559    if (!m_entries.empty()) {
560      auto begin = m_entries.begin(), end = m_entries.end();
561      auto pos = std::lower_bound(begin, end, Entry(addr, 1), BaseLessThan);
562      if (pos != end && pos->base == addr)
563        return &(*pos);
564    }
565    return nullptr;
566  }
567
568  // This method will return the entry that contains the given address, or the
569  // entry following that address.  If you give it an address of 0 and the
570  // first entry starts at address 0x100, you will get the entry at 0x100.
571  //
572  // For most uses, FindEntryThatContains is the correct one to use, this is a
573  // less commonly needed behavior.  It was added for core file memory regions,
574  // where we want to present a gap in the memory regions as a distinct region,
575  // so we need to know the start address of the next memory section that
576  // exists.
577  const Entry *FindEntryThatContainsOrFollows(B addr) const {
578#ifdef ASSERT_RANGEMAP_ARE_SORTED
579    assert(IsSorted());
580#endif
581    if (!m_entries.empty()) {
582      typename Collection::const_iterator begin = m_entries.begin();
583      typename Collection::const_iterator end = m_entries.end();
584      typename Collection::const_iterator pos =
585          std::lower_bound(m_entries.begin(), end, addr,
586                           [](const Entry &lhs, B rhs_base) -> bool {
587                             return lhs.GetRangeEnd() <= rhs_base;
588                           });
589
590      while (pos != begin && pos[-1].Contains(addr))
591        --pos;
592
593      if (pos != end)
594        return &(*pos);
595    }
596    return nullptr;
597  }
598
599  Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
600
601  const Entry *Back() const {
602    return (m_entries.empty() ? nullptr : &m_entries.back());
603  }
604
605protected:
606  Collection m_entries;
607  Compare m_compare;
608
609private:
610  // Compute extra information needed for search
611  B ComputeUpperBounds(size_t lo, size_t hi) {
612    size_t mid = (lo + hi) / 2;
613    AugmentedEntry &entry = m_entries[mid];
614
615    entry.upper_bound = entry.base + entry.size;
616
617    if (lo < mid)
618      entry.upper_bound =
619          std::max(entry.upper_bound, ComputeUpperBounds(lo, mid));
620
621    if (mid + 1 < hi)
622      entry.upper_bound =
623          std::max(entry.upper_bound, ComputeUpperBounds(mid + 1, hi));
624
625    return entry.upper_bound;
626  }
627
628  // This is based on the augmented tree implementation found at
629  // https://en.wikipedia.org/wiki/Interval_tree#Augmented_tree
630  void FindEntryIndexesThatContain(B addr, size_t lo, size_t hi,
631                                   std::vector<uint32_t> &indexes) {
632    size_t mid = (lo + hi) / 2;
633    const AugmentedEntry &entry = m_entries[mid];
634
635    // addr is greater than the rightmost point of any interval below mid
636    // so there are cannot be any matches.
637    if (addr > entry.upper_bound)
638      return;
639
640    // Recursively search left subtree
641    if (lo < mid)
642      FindEntryIndexesThatContain(addr, lo, mid, indexes);
643
644    // If addr is smaller than the start of the current interval it
645    // cannot contain it nor can any of its right subtree.
646    if (addr < entry.base)
647      return;
648
649    if (entry.Contains(addr))
650      indexes.push_back(entry.data);
651
652    // Recursively search right subtree
653    if (mid + 1 < hi)
654      FindEntryIndexesThatContain(addr, mid + 1, hi, indexes);
655  }
656};
657
658// A simple range  with data class where you get to define the type of
659// the range base "B", the type used for the range byte size "S", and the type
660// for the associated data "T".
661template <typename B, typename T> struct AddressData {
662  typedef B BaseType;
663  typedef T DataType;
664
665  BaseType addr;
666  DataType data;
667
668  AddressData() : addr(), data() {}
669
670  AddressData(B a, DataType d) : addr(a), data(d) {}
671
672  bool operator<(const AddressData &rhs) const {
673    if (this->addr == rhs.addr)
674      return this->data < rhs.data;
675    return this->addr < rhs.addr;
676  }
677
678  bool operator==(const AddressData &rhs) const {
679    return this->addr == rhs.addr && this->data == rhs.data;
680  }
681
682  bool operator!=(const AddressData &rhs) const {
683    return this->addr != rhs.addr || this->data == rhs.data;
684  }
685};
686
687template <typename B, typename T, unsigned N> class AddressDataArray {
688public:
689  typedef AddressData<B, T> Entry;
690  typedef llvm::SmallVector<Entry, N> Collection;
691
692  AddressDataArray() = default;
693
694  ~AddressDataArray() = default;
695
696  void Append(const Entry &entry) { m_entries.push_back(entry); }
697
698  void Sort() {
699    if (m_entries.size() > 1)
700      std::stable_sort(m_entries.begin(), m_entries.end());
701  }
702
703#ifdef ASSERT_RANGEMAP_ARE_SORTED
704  bool IsSorted() const {
705    typename Collection::const_iterator pos, end, prev;
706    // First we determine if we can combine any of the Entry objects so we
707    // don't end up allocating and making a new collection for no reason
708    for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
709         prev = pos++) {
710      if (prev != end && *pos < *prev)
711        return false;
712    }
713    return true;
714  }
715#endif
716
717  void Clear() { m_entries.clear(); }
718
719  bool IsEmpty() const { return m_entries.empty(); }
720
721  size_t GetSize() const { return m_entries.size(); }
722
723  const Entry *GetEntryAtIndex(size_t i) const {
724    return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
725  }
726
727  // Clients must ensure that "i" is a valid index prior to calling this
728  // function
729  const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
730
731  static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
732    return lhs.addr < rhs.addr;
733  }
734
735  Entry *FindEntry(B addr, bool exact_match_only) {
736#ifdef ASSERT_RANGEMAP_ARE_SORTED
737    assert(IsSorted());
738#endif
739    if (!m_entries.empty()) {
740      Entry entry;
741      entry.addr = addr;
742      typename Collection::iterator begin = m_entries.begin();
743      typename Collection::iterator end = m_entries.end();
744      typename Collection::iterator pos =
745          std::lower_bound(begin, end, entry, BaseLessThan);
746
747      while (pos != begin && pos[-1].addr == addr)
748        --pos;
749
750      if (pos != end) {
751        if (pos->addr == addr || !exact_match_only)
752          return &(*pos);
753      }
754    }
755    return nullptr;
756  }
757
758  const Entry *FindNextEntry(const Entry *entry) {
759    if (entry >= &*m_entries.begin() && entry + 1 < &*m_entries.end())
760      return entry + 1;
761    return nullptr;
762  }
763
764  Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
765
766  const Entry *Back() const {
767    return (m_entries.empty() ? nullptr : &m_entries.back());
768  }
769
770protected:
771  Collection m_entries;
772};
773
774} // namespace lldb_private
775
776#endif // LLDB_UTILITY_RANGEMAP_H
777