1//===-- RangeMap.h ----------------------------------------------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#ifndef LLDB_UTILITY_RANGEMAP_H
10#define LLDB_UTILITY_RANGEMAP_H
11
12#include <algorithm>
13#include <vector>
14
15#include "llvm/ADT/SmallVector.h"
16
17#include "lldb/lldb-private.h"
18
19// Uncomment to make sure all Range objects are sorted when needed
20//#define ASSERT_RANGEMAP_ARE_SORTED
21
22namespace lldb_private {
23
24// Templatized classes for dealing with generic ranges and also collections of
25// ranges, or collections of ranges that have associated data.
26
27// A simple range class where you get to define the type of the range
28// base "B", and the type used for the range byte size "S".
29template <typename B, typename S> struct Range {
30  typedef B BaseType;
31  typedef S SizeType;
32
33  BaseType base;
34  SizeType size;
35
36  Range() : base(0), size(0) {}
37
38  Range(BaseType b, SizeType s) : base(b), size(s) {}
39
40  void Clear(BaseType b = 0) {
41    base = b;
42    size = 0;
43  }
44
45  // Set the start value for the range, and keep the same size
46  BaseType GetRangeBase() const { return base; }
47
48  void SetRangeBase(BaseType b) { base = b; }
49
50  void Slide(BaseType slide) { base += slide; }
51
52  bool Union(const Range &rhs) {
53    if (DoesAdjoinOrIntersect(rhs)) {
54      auto new_end = std::max<BaseType>(GetRangeEnd(), rhs.GetRangeEnd());
55      base = std::min<BaseType>(base, rhs.base);
56      size = new_end - base;
57      return true;
58    }
59    return false;
60  }
61
62  BaseType GetRangeEnd() const { return base + size; }
63
64  void SetRangeEnd(BaseType end) {
65    if (end > base)
66      size = end - base;
67    else
68      size = 0;
69  }
70
71  SizeType GetByteSize() const { return size; }
72
73  void SetByteSize(SizeType s) { size = s; }
74
75  bool IsValid() const { return size > 0; }
76
77  bool Contains(BaseType r) const {
78    return (GetRangeBase() <= r) && (r < GetRangeEnd());
79  }
80
81  bool ContainsEndInclusive(BaseType r) const {
82    return (GetRangeBase() <= r) && (r <= GetRangeEnd());
83  }
84
85  bool Contains(const Range &range) const {
86    return Contains(range.GetRangeBase()) &&
87           ContainsEndInclusive(range.GetRangeEnd());
88  }
89
90  // Returns true if the two ranges adjoing or intersect
91  bool DoesAdjoinOrIntersect(const Range &rhs) const {
92    const BaseType lhs_base = this->GetRangeBase();
93    const BaseType rhs_base = rhs.GetRangeBase();
94    const BaseType lhs_end = this->GetRangeEnd();
95    const BaseType rhs_end = rhs.GetRangeEnd();
96    bool result = (lhs_base <= rhs_end) && (lhs_end >= rhs_base);
97    return result;
98  }
99
100  // Returns true if the two ranges intersect
101  bool DoesIntersect(const Range &rhs) const {
102    const BaseType lhs_base = this->GetRangeBase();
103    const BaseType rhs_base = rhs.GetRangeBase();
104    const BaseType lhs_end = this->GetRangeEnd();
105    const BaseType rhs_end = rhs.GetRangeEnd();
106    bool result = (lhs_base < rhs_end) && (lhs_end > rhs_base);
107    return result;
108  }
109
110  bool operator<(const Range &rhs) const {
111    if (base == rhs.base)
112      return size < rhs.size;
113    return base < rhs.base;
114  }
115
116  bool operator==(const Range &rhs) const {
117    return base == rhs.base && size == rhs.size;
118  }
119
120  bool operator!=(const Range &rhs) const {
121    return base != rhs.base || size != rhs.size;
122  }
123};
124
125// A range array class where you get to define the type of the ranges
126// that the collection contains.
127
128template <typename B, typename S, unsigned N> class RangeArray {
129public:
130  typedef B BaseType;
131  typedef S SizeType;
132  typedef Range<B, S> Entry;
133  typedef llvm::SmallVector<Entry, N> Collection;
134
135  RangeArray() = default;
136
137  ~RangeArray() = default;
138
139  void Append(const Entry &entry) { m_entries.push_back(entry); }
140
141  void Append(B base, S size) { m_entries.emplace_back(base, size); }
142
143  bool RemoveEntrtAtIndex(uint32_t idx) {
144    if (idx < m_entries.size()) {
145      m_entries.erase(m_entries.begin() + idx);
146      return true;
147    }
148    return false;
149  }
150
151  void Sort() {
152    if (m_entries.size() > 1)
153      std::stable_sort(m_entries.begin(), m_entries.end());
154  }
155
156#ifdef ASSERT_RANGEMAP_ARE_SORTED
157  bool IsSorted() const {
158    typename Collection::const_iterator pos, end, prev;
159    for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
160         prev = pos++) {
161      if (prev != end && *pos < *prev)
162        return false;
163    }
164    return true;
165  }
166#endif
167
168  void CombineConsecutiveRanges() {
169#ifdef ASSERT_RANGEMAP_ARE_SORTED
170    assert(IsSorted());
171#endif
172    // Can't combine if ranges if we have zero or one range
173    if (m_entries.size() > 1) {
174      // The list should be sorted prior to calling this function
175      typename Collection::iterator pos;
176      typename Collection::iterator end;
177      typename Collection::iterator prev;
178      bool can_combine = false;
179      // First we determine if we can combine any of the Entry objects so we
180      // don't end up allocating and making a new collection for no reason
181      for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
182           pos != end; prev = pos++) {
183        if (prev != end && prev->DoesAdjoinOrIntersect(*pos)) {
184          can_combine = true;
185          break;
186        }
187      }
188
189      // We we can combine at least one entry, then we make a new collection
190      // and populate it accordingly, and then swap it into place.
191      if (can_combine) {
192        Collection minimal_ranges;
193        for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
194             pos != end; prev = pos++) {
195          if (prev != end && prev->DoesAdjoinOrIntersect(*pos))
196            minimal_ranges.back().SetRangeEnd(
197                std::max<BaseType>(prev->GetRangeEnd(), pos->GetRangeEnd()));
198          else
199            minimal_ranges.push_back(*pos);
200        }
201        // Use the swap technique in case our new vector is much smaller. We
202        // must swap when using the STL because std::vector objects never
203        // release or reduce the memory once it has been allocated/reserved.
204        m_entries.swap(minimal_ranges);
205      }
206    }
207  }
208
209  BaseType GetMinRangeBase(BaseType fail_value) const {
210#ifdef ASSERT_RANGEMAP_ARE_SORTED
211    assert(IsSorted());
212#endif
213    if (m_entries.empty())
214      return fail_value;
215    // m_entries must be sorted, so if we aren't empty, we grab the first
216    // range's base
217    return m_entries.front().GetRangeBase();
218  }
219
220  BaseType GetMaxRangeEnd(BaseType fail_value) const {
221#ifdef ASSERT_RANGEMAP_ARE_SORTED
222    assert(IsSorted());
223#endif
224    if (m_entries.empty())
225      return fail_value;
226    // m_entries must be sorted, so if we aren't empty, we grab the last
227    // range's end
228    return m_entries.back().GetRangeEnd();
229  }
230
231  void Slide(BaseType slide) {
232    typename Collection::iterator pos, end;
233    for (pos = m_entries.begin(), end = m_entries.end(); pos != end; ++pos)
234      pos->Slide(slide);
235  }
236
237  void Clear() { m_entries.clear(); }
238
239  bool IsEmpty() const { return m_entries.empty(); }
240
241  size_t GetSize() const { return m_entries.size(); }
242
243  const Entry *GetEntryAtIndex(size_t i) const {
244    return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
245  }
246
247  // Clients must ensure that "i" is a valid index prior to calling this
248  // function
249  const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
250
251  Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
252
253  const Entry *Back() const {
254    return (m_entries.empty() ? nullptr : &m_entries.back());
255  }
256
257  static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
258    return lhs.GetRangeBase() < rhs.GetRangeBase();
259  }
260
261  uint32_t FindEntryIndexThatContains(B addr) const {
262#ifdef ASSERT_RANGEMAP_ARE_SORTED
263    assert(IsSorted());
264#endif
265    if (!m_entries.empty()) {
266      Entry entry(addr, 1);
267      typename Collection::const_iterator begin = m_entries.begin();
268      typename Collection::const_iterator end = m_entries.end();
269      typename Collection::const_iterator pos =
270          std::lower_bound(begin, end, entry, BaseLessThan);
271
272      if (pos != end && pos->Contains(addr)) {
273        return std::distance(begin, pos);
274      } else if (pos != begin) {
275        --pos;
276        if (pos->Contains(addr))
277          return std::distance(begin, pos);
278      }
279    }
280    return UINT32_MAX;
281  }
282
283  const Entry *FindEntryThatContains(B addr) const {
284#ifdef ASSERT_RANGEMAP_ARE_SORTED
285    assert(IsSorted());
286#endif
287    if (!m_entries.empty()) {
288      Entry entry(addr, 1);
289      typename Collection::const_iterator begin = m_entries.begin();
290      typename Collection::const_iterator end = m_entries.end();
291      typename Collection::const_iterator pos =
292          std::lower_bound(begin, end, entry, BaseLessThan);
293
294      if (pos != end && pos->Contains(addr)) {
295        return &(*pos);
296      } else if (pos != begin) {
297        --pos;
298        if (pos->Contains(addr)) {
299          return &(*pos);
300        }
301      }
302    }
303    return nullptr;
304  }
305
306  const Entry *FindEntryThatContains(const Entry &range) const {
307#ifdef ASSERT_RANGEMAP_ARE_SORTED
308    assert(IsSorted());
309#endif
310    if (!m_entries.empty()) {
311      typename Collection::const_iterator begin = m_entries.begin();
312      typename Collection::const_iterator end = m_entries.end();
313      typename Collection::const_iterator pos =
314          std::lower_bound(begin, end, range, BaseLessThan);
315
316      if (pos != end && pos->Contains(range)) {
317        return &(*pos);
318      } else if (pos != begin) {
319        --pos;
320        if (pos->Contains(range)) {
321          return &(*pos);
322        }
323      }
324    }
325    return nullptr;
326  }
327
328protected:
329  Collection m_entries;
330};
331
332template <typename B, typename S> class RangeVector {
333public:
334  typedef B BaseType;
335  typedef S SizeType;
336  typedef Range<B, S> Entry;
337  typedef std::vector<Entry> Collection;
338
339  RangeVector() = default;
340
341  ~RangeVector() = default;
342
343  void Append(const Entry &entry) { m_entries.push_back(entry); }
344
345  void Append(B base, S size) { m_entries.emplace_back(base, size); }
346
347  // Insert an item into a sorted list and optionally combine it with any
348  // adjacent blocks if requested.
349  void Insert(const Entry &entry, bool combine) {
350    if (m_entries.empty()) {
351      m_entries.push_back(entry);
352      return;
353    }
354    auto begin = m_entries.begin();
355    auto end = m_entries.end();
356    auto pos = std::lower_bound(begin, end, entry);
357    if (combine) {
358      if (pos != end && pos->Union(entry)) {
359        CombinePrevAndNext(pos);
360        return;
361      }
362      if (pos != begin) {
363        auto prev = pos - 1;
364        if (prev->Union(entry)) {
365          CombinePrevAndNext(prev);
366          return;
367        }
368      }
369    }
370    m_entries.insert(pos, entry);
371  }
372
373  bool RemoveEntryAtIndex(uint32_t idx) {
374    if (idx < m_entries.size()) {
375      m_entries.erase(m_entries.begin() + idx);
376      return true;
377    }
378    return false;
379  }
380
381  void Sort() {
382    if (m_entries.size() > 1)
383      std::stable_sort(m_entries.begin(), m_entries.end());
384  }
385
386#ifdef ASSERT_RANGEMAP_ARE_SORTED
387  bool IsSorted() const {
388    typename Collection::const_iterator pos, end, prev;
389    // First we determine if we can combine any of the Entry objects so we
390    // don't end up allocating and making a new collection for no reason
391    for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
392         prev = pos++) {
393      if (prev != end && *pos < *prev)
394        return false;
395    }
396    return true;
397  }
398#endif
399
400  void CombineConsecutiveRanges() {
401#ifdef ASSERT_RANGEMAP_ARE_SORTED
402    assert(IsSorted());
403#endif
404    // Can't combine if ranges if we have zero or one range
405    if (m_entries.size() > 1) {
406      // The list should be sorted prior to calling this function
407      typename Collection::iterator pos;
408      typename Collection::iterator end;
409      typename Collection::iterator prev;
410      bool can_combine = false;
411      // First we determine if we can combine any of the Entry objects so we
412      // don't end up allocating and making a new collection for no reason
413      for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
414           pos != end; prev = pos++) {
415        if (prev != end && prev->DoesAdjoinOrIntersect(*pos)) {
416          can_combine = true;
417          break;
418        }
419      }
420
421      // We we can combine at least one entry, then we make a new collection
422      // and populate it accordingly, and then swap it into place.
423      if (can_combine) {
424        Collection minimal_ranges;
425        for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
426             pos != end; prev = pos++) {
427          if (prev != end && prev->DoesAdjoinOrIntersect(*pos))
428            minimal_ranges.back().SetRangeEnd(
429                std::max<BaseType>(prev->GetRangeEnd(), pos->GetRangeEnd()));
430          else
431            minimal_ranges.push_back(*pos);
432        }
433        // Use the swap technique in case our new vector is much smaller. We
434        // must swap when using the STL because std::vector objects never
435        // release or reduce the memory once it has been allocated/reserved.
436        m_entries.swap(minimal_ranges);
437      }
438    }
439  }
440
441  BaseType GetMinRangeBase(BaseType fail_value) const {
442#ifdef ASSERT_RANGEMAP_ARE_SORTED
443    assert(IsSorted());
444#endif
445    if (m_entries.empty())
446      return fail_value;
447    // m_entries must be sorted, so if we aren't empty, we grab the first
448    // range's base
449    return m_entries.front().GetRangeBase();
450  }
451
452  BaseType GetMaxRangeEnd(BaseType fail_value) const {
453#ifdef ASSERT_RANGEMAP_ARE_SORTED
454    assert(IsSorted());
455#endif
456    if (m_entries.empty())
457      return fail_value;
458    // m_entries must be sorted, so if we aren't empty, we grab the last
459    // range's end
460    return m_entries.back().GetRangeEnd();
461  }
462
463  void Slide(BaseType slide) {
464    typename Collection::iterator pos, end;
465    for (pos = m_entries.begin(), end = m_entries.end(); pos != end; ++pos)
466      pos->Slide(slide);
467  }
468
469  void Clear() { m_entries.clear(); }
470
471  void Reserve(typename Collection::size_type size) { m_entries.reserve(size); }
472
473  bool IsEmpty() const { return m_entries.empty(); }
474
475  size_t GetSize() const { return m_entries.size(); }
476
477  const Entry *GetEntryAtIndex(size_t i) const {
478    return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
479  }
480
481  // Clients must ensure that "i" is a valid index prior to calling this
482  // function
483  Entry &GetEntryRef(size_t i) { return m_entries[i]; }
484  const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
485
486  Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
487
488  const Entry *Back() const {
489    return (m_entries.empty() ? nullptr : &m_entries.back());
490  }
491
492  static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
493    return lhs.GetRangeBase() < rhs.GetRangeBase();
494  }
495
496  uint32_t FindEntryIndexThatContains(B addr) const {
497#ifdef ASSERT_RANGEMAP_ARE_SORTED
498    assert(IsSorted());
499#endif
500    if (!m_entries.empty()) {
501      Entry entry(addr, 1);
502      typename Collection::const_iterator begin = m_entries.begin();
503      typename Collection::const_iterator end = m_entries.end();
504      typename Collection::const_iterator pos =
505          std::lower_bound(begin, end, entry, BaseLessThan);
506
507      if (pos != end && pos->Contains(addr)) {
508        return std::distance(begin, pos);
509      } else if (pos != begin) {
510        --pos;
511        if (pos->Contains(addr))
512          return std::distance(begin, pos);
513      }
514    }
515    return UINT32_MAX;
516  }
517
518  const Entry *FindEntryThatContains(B addr) const {
519#ifdef ASSERT_RANGEMAP_ARE_SORTED
520    assert(IsSorted());
521#endif
522    if (!m_entries.empty()) {
523      Entry entry(addr, 1);
524      typename Collection::const_iterator begin = m_entries.begin();
525      typename Collection::const_iterator end = m_entries.end();
526      typename Collection::const_iterator pos =
527          std::lower_bound(begin, end, entry, BaseLessThan);
528
529      if (pos != end && pos->Contains(addr)) {
530        return &(*pos);
531      } else if (pos != begin) {
532        --pos;
533        if (pos->Contains(addr)) {
534          return &(*pos);
535        }
536      }
537    }
538    return nullptr;
539  }
540
541  const Entry *FindEntryThatContains(const Entry &range) const {
542#ifdef ASSERT_RANGEMAP_ARE_SORTED
543    assert(IsSorted());
544#endif
545    if (!m_entries.empty()) {
546      typename Collection::const_iterator begin = m_entries.begin();
547      typename Collection::const_iterator end = m_entries.end();
548      typename Collection::const_iterator pos =
549          std::lower_bound(begin, end, range, BaseLessThan);
550
551      if (pos != end && pos->Contains(range)) {
552        return &(*pos);
553      } else if (pos != begin) {
554        --pos;
555        if (pos->Contains(range)) {
556          return &(*pos);
557        }
558      }
559    }
560    return nullptr;
561  }
562
563protected:
564  void CombinePrevAndNext(typename Collection::iterator pos) {
565    // Check if the prev or next entries in case they need to be unioned with
566    // the entry pointed to by "pos".
567    if (pos != m_entries.begin()) {
568      auto prev = pos - 1;
569      if (prev->Union(*pos))
570        m_entries.erase(pos);
571      pos = prev;
572    }
573
574    auto end = m_entries.end();
575    if (pos != end) {
576      auto next = pos + 1;
577      if (next != end) {
578        if (pos->Union(*next))
579          m_entries.erase(next);
580      }
581    }
582    return;
583  }
584
585  Collection m_entries;
586};
587
588// A simple range  with data class where you get to define the type of
589// the range base "B", the type used for the range byte size "S", and the type
590// for the associated data "T".
591template <typename B, typename S, typename T>
592struct RangeData : public Range<B, S> {
593  typedef T DataType;
594
595  DataType data;
596
597  RangeData() : Range<B, S>(), data() {}
598
599  RangeData(B base, S size) : Range<B, S>(base, size), data() {}
600
601  RangeData(B base, S size, DataType d) : Range<B, S>(base, size), data(d) {}
602};
603
604template <typename B, typename S, typename T, unsigned N = 0,
605          class Compare = std::less<T>>
606class RangeDataVector {
607public:
608  typedef lldb_private::Range<B, S> Range;
609  typedef RangeData<B, S, T> Entry;
610  typedef llvm::SmallVector<Entry, N> Collection;
611
612  RangeDataVector(Compare compare = Compare()) : m_compare(compare) {}
613
614  ~RangeDataVector() = default;
615
616  void Append(const Entry &entry) { m_entries.push_back(entry); }
617
618  void Sort() {
619    if (m_entries.size() > 1)
620      std::stable_sort(m_entries.begin(), m_entries.end(),
621                       [&compare = m_compare](const Entry &a, const Entry &b) {
622                         if (a.base != b.base)
623                           return a.base < b.base;
624                         if (a.size != b.size)
625                           return a.size < b.size;
626                         return compare(a.data, b.data);
627                       });
628  }
629
630#ifdef ASSERT_RANGEMAP_ARE_SORTED
631  bool IsSorted() const {
632    typename Collection::const_iterator pos, end, prev;
633    // First we determine if we can combine any of the Entry objects so we
634    // don't end up allocating and making a new collection for no reason
635    for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
636         prev = pos++) {
637      if (prev != end && *pos < *prev)
638        return false;
639    }
640    return true;
641  }
642#endif
643
644  void CombineConsecutiveEntriesWithEqualData() {
645#ifdef ASSERT_RANGEMAP_ARE_SORTED
646    assert(IsSorted());
647#endif
648    typename Collection::iterator pos;
649    typename Collection::iterator end;
650    typename Collection::iterator prev;
651    bool can_combine = false;
652    // First we determine if we can combine any of the Entry objects so we
653    // don't end up allocating and making a new collection for no reason
654    for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
655         prev = pos++) {
656      if (prev != end && prev->data == pos->data) {
657        can_combine = true;
658        break;
659      }
660    }
661
662    // We we can combine at least one entry, then we make a new collection and
663    // populate it accordingly, and then swap it into place.
664    if (can_combine) {
665      Collection minimal_ranges;
666      for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
667           pos != end; prev = pos++) {
668        if (prev != end && prev->data == pos->data)
669          minimal_ranges.back().SetRangeEnd(pos->GetRangeEnd());
670        else
671          minimal_ranges.push_back(*pos);
672      }
673      // Use the swap technique in case our new vector is much smaller. We must
674      // swap when using the STL because std::vector objects never release or
675      // reduce the memory once it has been allocated/reserved.
676      m_entries.swap(minimal_ranges);
677    }
678  }
679
680  void Clear() { m_entries.clear(); }
681
682  bool IsEmpty() const { return m_entries.empty(); }
683
684  size_t GetSize() const { return m_entries.size(); }
685
686  const Entry *GetEntryAtIndex(size_t i) const {
687    return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
688  }
689
690  Entry *GetMutableEntryAtIndex(size_t i) {
691    return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
692  }
693
694  // Clients must ensure that "i" is a valid index prior to calling this
695  // function
696  Entry &GetEntryRef(size_t i) { return m_entries[i]; }
697  const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
698
699  static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
700    return lhs.GetRangeBase() < rhs.GetRangeBase();
701  }
702
703  uint32_t FindEntryIndexThatContains(B addr) const {
704    const Entry *entry = FindEntryThatContains(addr);
705    if (entry)
706      return std::distance(m_entries.begin(), entry);
707    return UINT32_MAX;
708  }
709
710  uint32_t FindEntryIndexesThatContain(B addr,
711                                       std::vector<uint32_t> &indexes) const {
712#ifdef ASSERT_RANGEMAP_ARE_SORTED
713    assert(IsSorted());
714#endif
715    // Search the entries until the first entry that has a larger base address
716    // than `addr`. As m_entries is sorted by their base address, all following
717    // entries can't contain `addr` as their base address is already larger.
718    for (const auto &entry : m_entries) {
719      if (entry.Contains(addr))
720        indexes.push_back(entry.data);
721      else if (entry.GetRangeBase() > addr)
722        break;
723    }
724    return indexes.size();
725  }
726
727  Entry *FindEntryThatContains(B addr) {
728    return const_cast<Entry *>(
729        static_cast<const RangeDataVector *>(this)->FindEntryThatContains(
730            addr));
731  }
732
733  const Entry *FindEntryThatContains(B addr) const {
734    return FindEntryThatContains(Entry(addr, 1));
735  }
736
737  const Entry *FindEntryThatContains(const Entry &range) const {
738#ifdef ASSERT_RANGEMAP_ARE_SORTED
739    assert(IsSorted());
740#endif
741    if (!m_entries.empty()) {
742      typename Collection::const_iterator begin = m_entries.begin();
743      typename Collection::const_iterator end = m_entries.end();
744      typename Collection::const_iterator pos =
745          std::lower_bound(begin, end, range, BaseLessThan);
746
747      while (pos != begin && pos[-1].Contains(range))
748        --pos;
749
750      if (pos != end && pos->Contains(range))
751        return &(*pos);
752    }
753    return nullptr;
754  }
755
756  const Entry *FindEntryStartsAt(B addr) const {
757#ifdef ASSERT_RANGEMAP_ARE_SORTED
758    assert(IsSorted());
759#endif
760    if (!m_entries.empty()) {
761      auto begin = m_entries.begin(), end = m_entries.end();
762      auto pos = std::lower_bound(begin, end, Entry(addr, 1), BaseLessThan);
763      if (pos != end && pos->base == addr)
764        return &(*pos);
765    }
766    return nullptr;
767  }
768
769  // This method will return the entry that contains the given address, or the
770  // entry following that address.  If you give it an address of 0 and the
771  // first entry starts at address 0x100, you will get the entry at 0x100.
772  //
773  // For most uses, FindEntryThatContains is the correct one to use, this is a
774  // less commonly needed behavior.  It was added for core file memory regions,
775  // where we want to present a gap in the memory regions as a distinct region,
776  // so we need to know the start address of the next memory section that
777  // exists.
778  const Entry *FindEntryThatContainsOrFollows(B addr) const {
779#ifdef ASSERT_RANGEMAP_ARE_SORTED
780    assert(IsSorted());
781#endif
782    if (!m_entries.empty()) {
783      typename Collection::const_iterator begin = m_entries.begin();
784      typename Collection::const_iterator end = m_entries.end();
785      typename Collection::const_iterator pos =
786          std::lower_bound(m_entries.begin(), end, addr,
787                           [](const Entry &lhs, B rhs_base) -> bool {
788                             return lhs.GetRangeEnd() <= rhs_base;
789                           });
790
791      while (pos != begin && pos[-1].Contains(addr))
792        --pos;
793
794      if (pos != end)
795        return &(*pos);
796    }
797    return nullptr;
798  }
799
800  Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
801
802  const Entry *Back() const {
803    return (m_entries.empty() ? nullptr : &m_entries.back());
804  }
805
806protected:
807  Collection m_entries;
808  Compare m_compare;
809};
810
811// A simple range  with data class where you get to define the type of
812// the range base "B", the type used for the range byte size "S", and the type
813// for the associated data "T".
814template <typename B, typename T> struct AddressData {
815  typedef B BaseType;
816  typedef T DataType;
817
818  BaseType addr;
819  DataType data;
820
821  AddressData() : addr(), data() {}
822
823  AddressData(B a, DataType d) : addr(a), data(d) {}
824
825  bool operator<(const AddressData &rhs) const {
826    if (this->addr == rhs.addr)
827      return this->data < rhs.data;
828    return this->addr < rhs.addr;
829  }
830
831  bool operator==(const AddressData &rhs) const {
832    return this->addr == rhs.addr && this->data == rhs.data;
833  }
834
835  bool operator!=(const AddressData &rhs) const {
836    return this->addr != rhs.addr || this->data == rhs.data;
837  }
838};
839
840template <typename B, typename T, unsigned N> class AddressDataArray {
841public:
842  typedef AddressData<B, T> Entry;
843  typedef llvm::SmallVector<Entry, N> Collection;
844
845  AddressDataArray() = default;
846
847  ~AddressDataArray() = default;
848
849  void Append(const Entry &entry) { m_entries.push_back(entry); }
850
851  void Sort() {
852    if (m_entries.size() > 1)
853      std::stable_sort(m_entries.begin(), m_entries.end());
854  }
855
856#ifdef ASSERT_RANGEMAP_ARE_SORTED
857  bool IsSorted() const {
858    typename Collection::const_iterator pos, end, prev;
859    // First we determine if we can combine any of the Entry objects so we
860    // don't end up allocating and making a new collection for no reason
861    for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
862         prev = pos++) {
863      if (prev != end && *pos < *prev)
864        return false;
865    }
866    return true;
867  }
868#endif
869
870  void Clear() { m_entries.clear(); }
871
872  bool IsEmpty() const { return m_entries.empty(); }
873
874  size_t GetSize() const { return m_entries.size(); }
875
876  const Entry *GetEntryAtIndex(size_t i) const {
877    return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
878  }
879
880  // Clients must ensure that "i" is a valid index prior to calling this
881  // function
882  const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
883
884  static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
885    return lhs.addr < rhs.addr;
886  }
887
888  Entry *FindEntry(B addr, bool exact_match_only) {
889#ifdef ASSERT_RANGEMAP_ARE_SORTED
890    assert(IsSorted());
891#endif
892    if (!m_entries.empty()) {
893      Entry entry;
894      entry.addr = addr;
895      typename Collection::iterator begin = m_entries.begin();
896      typename Collection::iterator end = m_entries.end();
897      typename Collection::iterator pos =
898          std::lower_bound(begin, end, entry, BaseLessThan);
899
900      while (pos != begin && pos[-1].addr == addr)
901        --pos;
902
903      if (pos != end) {
904        if (pos->addr == addr || !exact_match_only)
905          return &(*pos);
906      }
907    }
908    return nullptr;
909  }
910
911  const Entry *FindNextEntry(const Entry *entry) {
912    if (entry >= &*m_entries.begin() && entry + 1 < &*m_entries.end())
913      return entry + 1;
914    return nullptr;
915  }
916
917  Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
918
919  const Entry *Back() const {
920    return (m_entries.empty() ? nullptr : &m_entries.back());
921  }
922
923protected:
924  Collection m_entries;
925};
926
927} // namespace lldb_private
928
929#endif // LLDB_UTILITY_RANGEMAP_H
930