Back to home page

EIC code displayed by LXR

 
 

    


File indexing completed on 2026-05-10 08:42:58

0001 //===-- RangeMap.h ----------------------------------------------*- C++ -*-===//
0002 //
0003 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
0004 // See https://llvm.org/LICENSE.txt for license information.
0005 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
0006 //
0007 //===----------------------------------------------------------------------===//
0008 
0009 #ifndef LLDB_UTILITY_RANGEMAP_H
0010 #define LLDB_UTILITY_RANGEMAP_H
0011 
0012 #include <algorithm>
0013 #include <vector>
0014 
0015 #include "llvm/ADT/SmallVector.h"
0016 
0017 #include "lldb/lldb-private.h"
0018 
0019 // Uncomment to make sure all Range objects are sorted when needed
0020 //#define ASSERT_RANGEMAP_ARE_SORTED
0021 
0022 namespace lldb_private {
0023 
0024 // Templatized classes for dealing with generic ranges and also collections of
0025 // ranges, or collections of ranges that have associated data.
0026 
0027 // A simple range class where you get to define the type of the range
0028 // base "B", and the type used for the range byte size "S".
0029 template <typename B, typename S> struct Range {
0030   typedef B BaseType;
0031   typedef S SizeType;
0032 
0033   BaseType base;
0034   SizeType size;
0035 
0036   Range() : base(0), size(0) {}
0037 
0038   Range(BaseType b, SizeType s) : base(b), size(s) {}
0039 
0040   void Clear(BaseType b = 0) {
0041     base = b;
0042     size = 0;
0043   }
0044 
0045   BaseType GetRangeBase() const { return base; }
0046 
0047   /// Set the start value for the range, and keep the same size
0048   void SetRangeBase(BaseType b) { base = b; }
0049 
0050   void Slide(BaseType slide) { base += slide; }
0051 
0052   void ShrinkFront(S s) {
0053     base += s;
0054     size -= std::min(s, size);
0055   }
0056 
0057   bool Union(const Range &rhs) {
0058     if (DoesAdjoinOrIntersect(rhs)) {
0059       auto new_end = std::max<BaseType>(GetRangeEnd(), rhs.GetRangeEnd());
0060       base = std::min<BaseType>(base, rhs.base);
0061       size = new_end - base;
0062       return true;
0063     }
0064     return false;
0065   }
0066 
0067   Range Intersect(const Range &rhs) const {
0068     const BaseType lhs_base = this->GetRangeBase();
0069     const BaseType rhs_base = rhs.GetRangeBase();
0070     const BaseType lhs_end = this->GetRangeEnd();
0071     const BaseType rhs_end = rhs.GetRangeEnd();
0072     Range range;
0073     range.SetRangeBase(std::max(lhs_base, rhs_base));
0074     range.SetRangeEnd(std::min(lhs_end, rhs_end));
0075     return range;
0076   }
0077 
0078   BaseType GetRangeEnd() const { return base + size; }
0079 
0080   void SetRangeEnd(BaseType end) {
0081     if (end > base)
0082       size = end - base;
0083     else
0084       size = 0;
0085   }
0086 
0087   SizeType GetByteSize() const { return size; }
0088 
0089   void SetByteSize(SizeType s) { size = s; }
0090 
0091   bool IsValid() const { return size > 0; }
0092 
0093   bool Contains(BaseType r) const {
0094     return (GetRangeBase() <= r) && (r < GetRangeEnd());
0095   }
0096 
0097   bool ContainsEndInclusive(BaseType r) const {
0098     return (GetRangeBase() <= r) && (r <= GetRangeEnd());
0099   }
0100 
0101   bool Contains(const Range &range) const {
0102     return Contains(range.GetRangeBase()) &&
0103            ContainsEndInclusive(range.GetRangeEnd());
0104   }
0105 
0106   // Returns true if the two ranges adjoing or intersect
0107   bool DoesAdjoinOrIntersect(const Range &rhs) const {
0108     const BaseType lhs_base = this->GetRangeBase();
0109     const BaseType rhs_base = rhs.GetRangeBase();
0110     const BaseType lhs_end = this->GetRangeEnd();
0111     const BaseType rhs_end = rhs.GetRangeEnd();
0112     bool result = (lhs_base <= rhs_end) && (lhs_end >= rhs_base);
0113     return result;
0114   }
0115 
0116   // Returns true if the two ranges intersect
0117   bool DoesIntersect(const Range &rhs) const {
0118     return Intersect(rhs).IsValid();
0119   }
0120 
0121   bool operator<(const Range &rhs) const {
0122     if (base == rhs.base)
0123       return size < rhs.size;
0124     return base < rhs.base;
0125   }
0126 
0127   bool operator==(const Range &rhs) const {
0128     return base == rhs.base && size == rhs.size;
0129   }
0130 
0131   bool operator!=(const Range &rhs) const {
0132     return base != rhs.base || size != rhs.size;
0133   }
0134 };
0135 
0136 template <typename B, typename S, unsigned N = 0> class RangeVector {
0137 public:
0138   typedef B BaseType;
0139   typedef S SizeType;
0140   typedef Range<B, S> Entry;
0141   typedef llvm::SmallVector<Entry, N> Collection;
0142 
0143   RangeVector() = default;
0144 
0145   ~RangeVector() = default;
0146 
0147   static RangeVector GetOverlaps(const RangeVector &vec1,
0148                                  const RangeVector &vec2) {
0149 #ifdef ASSERT_RANGEMAP_ARE_SORTED
0150     assert(vec1.IsSorted() && vec2.IsSorted());
0151 #endif
0152     RangeVector result;
0153     auto pos1 = vec1.begin();
0154     auto end1 = vec1.end();
0155     auto pos2 = vec2.begin();
0156     auto end2 = vec2.end();
0157     while (pos1 != end1 && pos2 != end2) {
0158       Entry entry = pos1->Intersect(*pos2);
0159       if (entry.IsValid())
0160         result.Append(entry);
0161       if (pos1->GetRangeEnd() < pos2->GetRangeEnd())
0162         ++pos1;
0163       else
0164         ++pos2;
0165     }
0166     return result;
0167   }
0168 
0169   bool operator==(const RangeVector &rhs) const {
0170     if (GetSize() != rhs.GetSize())
0171       return false;
0172     for (size_t i = 0; i < GetSize(); ++i) {
0173       if (GetEntryRef(i) != rhs.GetEntryRef(i))
0174         return false;
0175     }
0176     return true;
0177   }
0178 
0179   void Append(const Entry &entry) { m_entries.push_back(entry); }
0180 
0181   void Append(B base, S size) { m_entries.emplace_back(base, size); }
0182 
0183   // Insert an item into a sorted list and optionally combine it with any
0184   // adjacent blocks if requested.
0185   void Insert(const Entry &entry, bool combine) {
0186     if (m_entries.empty()) {
0187       m_entries.push_back(entry);
0188       return;
0189     }
0190     auto begin = m_entries.begin();
0191     auto end = m_entries.end();
0192     auto pos = std::lower_bound(begin, end, entry);
0193     if (combine) {
0194       if (pos != end && pos->Union(entry)) {
0195         CombinePrevAndNext(pos);
0196         return;
0197       }
0198       if (pos != begin) {
0199         auto prev = pos - 1;
0200         if (prev->Union(entry)) {
0201           CombinePrevAndNext(prev);
0202           return;
0203         }
0204       }
0205     }
0206     m_entries.insert(pos, entry);
0207   }
0208 
0209   bool RemoveEntryAtIndex(uint32_t idx) {
0210     if (idx < m_entries.size()) {
0211       m_entries.erase(m_entries.begin() + idx);
0212       return true;
0213     }
0214     return false;
0215   }
0216 
0217   void Sort() {
0218     if (m_entries.size() > 1)
0219       std::stable_sort(m_entries.begin(), m_entries.end());
0220   }
0221 
0222 #ifdef ASSERT_RANGEMAP_ARE_SORTED
0223   bool IsSorted() const {
0224     typename Collection::const_iterator pos, end, prev;
0225     // First we determine if we can combine any of the Entry objects so we
0226     // don't end up allocating and making a new collection for no reason
0227     for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
0228          prev = pos++) {
0229       if (prev != end && *pos < *prev)
0230         return false;
0231     }
0232     return true;
0233   }
0234 #endif
0235 
0236   void CombineConsecutiveRanges() {
0237 #ifdef ASSERT_RANGEMAP_ARE_SORTED
0238     assert(IsSorted());
0239 #endif
0240     auto first_intersect = std::adjacent_find(
0241         m_entries.begin(), m_entries.end(), [](const Entry &a, const Entry &b) {
0242           return a.DoesAdjoinOrIntersect(b);
0243         });
0244     if (first_intersect == m_entries.end())
0245       return;
0246 
0247     // We can combine at least one entry, then we make a new collection and
0248     // populate it accordingly, and then swap it into place.
0249     auto pos = std::next(first_intersect);
0250     Collection minimal_ranges(m_entries.begin(), pos);
0251     for (; pos != m_entries.end(); ++pos) {
0252       Entry &back = minimal_ranges.back();
0253       if (back.DoesAdjoinOrIntersect(*pos))
0254         back.SetRangeEnd(std::max(back.GetRangeEnd(), pos->GetRangeEnd()));
0255       else
0256         minimal_ranges.push_back(*pos);
0257     }
0258     m_entries.swap(minimal_ranges);
0259   }
0260 
0261   BaseType GetMinRangeBase(BaseType fail_value) const {
0262 #ifdef ASSERT_RANGEMAP_ARE_SORTED
0263     assert(IsSorted());
0264 #endif
0265     if (m_entries.empty())
0266       return fail_value;
0267     // m_entries must be sorted, so if we aren't empty, we grab the first
0268     // range's base
0269     return m_entries.front().GetRangeBase();
0270   }
0271 
0272   BaseType GetMaxRangeEnd(BaseType fail_value) const {
0273 #ifdef ASSERT_RANGEMAP_ARE_SORTED
0274     assert(IsSorted());
0275 #endif
0276     if (m_entries.empty())
0277       return fail_value;
0278     // m_entries must be sorted, so if we aren't empty, we grab the last
0279     // range's end
0280     return m_entries.back().GetRangeEnd();
0281   }
0282 
0283   void Slide(BaseType slide) {
0284     typename Collection::iterator pos, end;
0285     for (pos = m_entries.begin(), end = m_entries.end(); pos != end; ++pos)
0286       pos->Slide(slide);
0287   }
0288 
0289   void Clear() { m_entries.clear(); }
0290 
0291   void Reserve(typename Collection::size_type size) { m_entries.reserve(size); }
0292 
0293   bool IsEmpty() const { return m_entries.empty(); }
0294 
0295   size_t GetSize() const { return m_entries.size(); }
0296 
0297   const Entry *GetEntryAtIndex(size_t i) const {
0298     return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
0299   }
0300 
0301   // Clients must ensure that "i" is a valid index prior to calling this
0302   // function
0303   Entry &GetEntryRef(size_t i) { return m_entries[i]; }
0304   const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
0305 
0306   Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
0307 
0308   const Entry *Back() const {
0309     return (m_entries.empty() ? nullptr : &m_entries.back());
0310   }
0311 
0312   static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
0313     return lhs.GetRangeBase() < rhs.GetRangeBase();
0314   }
0315 
0316   uint32_t FindEntryIndexThatContains(B addr) const {
0317 #ifdef ASSERT_RANGEMAP_ARE_SORTED
0318     assert(IsSorted());
0319 #endif
0320     if (!m_entries.empty()) {
0321       Entry entry(addr, 1);
0322       typename Collection::const_iterator begin = m_entries.begin();
0323       typename Collection::const_iterator end = m_entries.end();
0324       typename Collection::const_iterator pos =
0325           std::lower_bound(begin, end, entry, BaseLessThan);
0326 
0327       if (pos != end && pos->Contains(addr)) {
0328         return std::distance(begin, pos);
0329       } else if (pos != begin) {
0330         --pos;
0331         if (pos->Contains(addr))
0332           return std::distance(begin, pos);
0333       }
0334     }
0335     return UINT32_MAX;
0336   }
0337 
0338   const Entry *FindEntryThatContains(B addr) const {
0339 #ifdef ASSERT_RANGEMAP_ARE_SORTED
0340     assert(IsSorted());
0341 #endif
0342     if (!m_entries.empty()) {
0343       Entry entry(addr, 1);
0344       typename Collection::const_iterator begin = m_entries.begin();
0345       typename Collection::const_iterator end = m_entries.end();
0346       typename Collection::const_iterator pos =
0347           std::lower_bound(begin, end, entry, BaseLessThan);
0348 
0349       if (pos != end && pos->Contains(addr)) {
0350         return &(*pos);
0351       } else if (pos != begin) {
0352         --pos;
0353         if (pos->Contains(addr)) {
0354           return &(*pos);
0355         }
0356       }
0357     }
0358     return nullptr;
0359   }
0360 
0361   const Entry *FindEntryThatContains(const Entry &range) const {
0362 #ifdef ASSERT_RANGEMAP_ARE_SORTED
0363     assert(IsSorted());
0364 #endif
0365     if (!m_entries.empty()) {
0366       typename Collection::const_iterator begin = m_entries.begin();
0367       typename Collection::const_iterator end = m_entries.end();
0368       typename Collection::const_iterator pos =
0369           std::lower_bound(begin, end, range, BaseLessThan);
0370 
0371       if (pos != end && pos->Contains(range)) {
0372         return &(*pos);
0373       } else if (pos != begin) {
0374         --pos;
0375         if (pos->Contains(range)) {
0376           return &(*pos);
0377         }
0378       }
0379     }
0380     return nullptr;
0381   }
0382 
0383   using const_iterator = typename Collection::const_iterator;
0384   const_iterator begin() const { return m_entries.begin(); }
0385   const_iterator end() const { return m_entries.end(); }
0386 
0387 protected:
0388   void CombinePrevAndNext(typename Collection::iterator pos) {
0389     // Check if the prev or next entries in case they need to be unioned with
0390     // the entry pointed to by "pos".
0391     if (pos != m_entries.begin()) {
0392       auto prev = pos - 1;
0393       if (prev->Union(*pos))
0394         m_entries.erase(pos);
0395       pos = prev;
0396     }
0397 
0398     auto end = m_entries.end();
0399     if (pos != end) {
0400       auto next = pos + 1;
0401       if (next != end) {
0402         if (pos->Union(*next))
0403           m_entries.erase(next);
0404       }
0405     }
0406   }
0407 
0408   Collection m_entries;
0409 };
0410 
0411 // A simple range  with data class where you get to define the type of
0412 // the range base "B", the type used for the range byte size "S", and the type
0413 // for the associated data "T".
0414 template <typename B, typename S, typename T>
0415 struct RangeData : public Range<B, S> {
0416   typedef T DataType;
0417 
0418   DataType data;
0419 
0420   RangeData() : Range<B, S>(), data() {}
0421 
0422   RangeData(B base, S size) : Range<B, S>(base, size), data() {}
0423 
0424   RangeData(B base, S size, DataType d) : Range<B, S>(base, size), data(d) {}
0425 };
0426 
0427 // We can treat the vector as a flattened Binary Search Tree, augmenting it
0428 // with upper bounds (max of range endpoints) for every index allows us to
0429 // query for range containment quicker.
0430 template <typename B, typename S, typename T>
0431 struct AugmentedRangeData : public RangeData<B, S, T> {
0432   B upper_bound;
0433 
0434   AugmentedRangeData(const RangeData<B, S, T> &rd)
0435       : RangeData<B, S, T>(rd), upper_bound() {}
0436 };
0437 
0438 template <typename B, typename S, typename T, unsigned N = 0,
0439           class Compare = std::less<T>>
0440 class RangeDataVector {
0441 public:
0442   typedef lldb_private::Range<B, S> Range;
0443   typedef RangeData<B, S, T> Entry;
0444   typedef AugmentedRangeData<B, S, T> AugmentedEntry;
0445   typedef llvm::SmallVector<AugmentedEntry, N> Collection;
0446 
0447   RangeDataVector(Compare compare = Compare()) : m_compare(compare) {}
0448 
0449   ~RangeDataVector() = default;
0450 
0451   void Append(const Entry &entry) { m_entries.emplace_back(entry); }
0452 
0453   /// Append a range with data to the vector
0454   /// \param B The base of the memory range
0455   /// \param S The size of the memory range
0456   /// \param T The data associated with the memory range
0457   void Append(B &&b, S &&s, T &&t) { m_entries.emplace_back(Entry(b, s, t)); }
0458 
0459   bool Erase(uint32_t start, uint32_t end) {
0460     if (start >= end || end > m_entries.size())
0461       return false;
0462     m_entries.erase(begin() + start, begin() + end);
0463     return true;
0464   }
0465 
0466   void Sort() {
0467     if (m_entries.size() > 1)
0468       std::stable_sort(m_entries.begin(), m_entries.end(),
0469                        [&compare = m_compare](const Entry &a, const Entry &b) {
0470                          if (a.base != b.base)
0471                            return a.base < b.base;
0472                          if (a.size != b.size)
0473                            return a.size < b.size;
0474                          return compare(a.data, b.data);
0475                        });
0476     if (!m_entries.empty())
0477       ComputeUpperBounds(0, m_entries.size());
0478   }
0479 
0480 #ifdef ASSERT_RANGEMAP_ARE_SORTED
0481   bool IsSorted() const {
0482     typename Collection::const_iterator pos, end, prev;
0483     for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
0484          prev = pos++) {
0485       if (prev != end && *pos < *prev)
0486         return false;
0487     }
0488     return true;
0489   }
0490 #endif
0491 
0492   void CombineConsecutiveEntriesWithEqualData() {
0493 #ifdef ASSERT_RANGEMAP_ARE_SORTED
0494     assert(IsSorted());
0495 #endif
0496     typename Collection::iterator pos;
0497     typename Collection::iterator end;
0498     typename Collection::iterator prev;
0499     bool can_combine = false;
0500     // First we determine if we can combine any of the Entry objects so we
0501     // don't end up allocating and making a new collection for no reason
0502     for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
0503          prev = pos++) {
0504       if (prev != end && prev->data == pos->data) {
0505         can_combine = true;
0506         break;
0507       }
0508     }
0509 
0510     // We can combine at least one entry, then we make a new collection and
0511     // populate it accordingly, and then swap it into place.
0512     if (can_combine) {
0513       Collection minimal_ranges;
0514       for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
0515            pos != end; prev = pos++) {
0516         if (prev != end && prev->data == pos->data)
0517           minimal_ranges.back().SetRangeEnd(pos->GetRangeEnd());
0518         else
0519           minimal_ranges.push_back(*pos);
0520       }
0521       // Use the swap technique in case our new vector is much smaller. We must
0522       // swap when using the STL because std::vector objects never release or
0523       // reduce the memory once it has been allocated/reserved.
0524       m_entries.swap(minimal_ranges);
0525     }
0526   }
0527 
0528   void Clear() { m_entries.clear(); }
0529 
0530   bool IsEmpty() const { return m_entries.empty(); }
0531 
0532   size_t GetSize() const { return m_entries.size(); }
0533 
0534   const Entry *GetEntryAtIndex(size_t i) const {
0535     return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
0536   }
0537 
0538   Entry *GetMutableEntryAtIndex(size_t i) {
0539     return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
0540   }
0541 
0542   // Clients must ensure that "i" is a valid index prior to calling this
0543   // function
0544   Entry &GetEntryRef(size_t i) { return m_entries[i]; }
0545   const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
0546 
0547   static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
0548     return lhs.GetRangeBase() < rhs.GetRangeBase();
0549   }
0550 
0551   uint32_t FindEntryIndexThatContains(B addr) const {
0552     const AugmentedEntry *entry =
0553         static_cast<const AugmentedEntry *>(FindEntryThatContains(addr));
0554     if (entry)
0555       return std::distance(m_entries.begin(), entry);
0556     return UINT32_MAX;
0557   }
0558 
0559   uint32_t FindEntryIndexesThatContain(B addr, std::vector<uint32_t> &indexes) {
0560 #ifdef ASSERT_RANGEMAP_ARE_SORTED
0561     assert(IsSorted());
0562 #endif
0563     if (!m_entries.empty())
0564       FindEntryIndexesThatContain(addr, 0, m_entries.size(), indexes);
0565 
0566     return indexes.size();
0567   }
0568 
0569   Entry *FindEntryThatContains(B addr) {
0570     return const_cast<Entry *>(
0571         static_cast<const RangeDataVector *>(this)->FindEntryThatContains(
0572             addr));
0573   }
0574 
0575   const Entry *FindEntryThatContains(B addr) const {
0576     return FindEntryThatContains(Entry(addr, 1));
0577   }
0578 
0579   const Entry *FindEntryThatContains(const Entry &range) const {
0580 #ifdef ASSERT_RANGEMAP_ARE_SORTED
0581     assert(IsSorted());
0582 #endif
0583     if (!m_entries.empty()) {
0584       typename Collection::const_iterator begin = m_entries.begin();
0585       typename Collection::const_iterator end = m_entries.end();
0586       typename Collection::const_iterator pos =
0587           std::lower_bound(begin, end, range, BaseLessThan);
0588 
0589       while (pos != begin && pos[-1].Contains(range))
0590         --pos;
0591 
0592       if (pos != end && pos->Contains(range))
0593         return &(*pos);
0594     }
0595     return nullptr;
0596   }
0597 
0598   const Entry *FindEntryStartsAt(B addr) const {
0599 #ifdef ASSERT_RANGEMAP_ARE_SORTED
0600     assert(IsSorted());
0601 #endif
0602     if (!m_entries.empty()) {
0603       auto begin = m_entries.begin(), end = m_entries.end();
0604       auto pos = std::lower_bound(begin, end, Entry(addr, 1), BaseLessThan);
0605       if (pos != end && pos->base == addr)
0606         return &(*pos);
0607     }
0608     return nullptr;
0609   }
0610 
0611   // This method will return the entry that contains the given address, or the
0612   // entry following that address.  If you give it an address of 0 and the
0613   // first entry starts at address 0x100, you will get the entry at 0x100.
0614   //
0615   // For most uses, FindEntryThatContains is the correct one to use, this is a
0616   // less commonly needed behavior.  It was added for core file memory regions,
0617   // where we want to present a gap in the memory regions as a distinct region,
0618   // so we need to know the start address of the next memory section that
0619   // exists.
0620   const Entry *FindEntryThatContainsOrFollows(B addr) const {
0621 #ifdef ASSERT_RANGEMAP_ARE_SORTED
0622     assert(IsSorted());
0623 #endif
0624     if (!m_entries.empty()) {
0625       typename Collection::const_iterator begin = m_entries.begin();
0626       typename Collection::const_iterator end = m_entries.end();
0627       typename Collection::const_iterator pos = llvm::lower_bound(
0628           m_entries, addr, [](const Entry &lhs, B rhs_base) -> bool {
0629             return lhs.GetRangeEnd() <= rhs_base;
0630           });
0631 
0632       while (pos != begin && pos[-1].Contains(addr))
0633         --pos;
0634 
0635       if (pos != end)
0636         return &(*pos);
0637     }
0638     return nullptr;
0639   }
0640 
0641   uint32_t FindEntryIndexThatContainsOrFollows(B addr) const {
0642 #ifdef ASSERT_RANGEMAP_ARE_SORTED
0643     assert(IsSorted());
0644 #endif
0645     const AugmentedEntry *entry = static_cast<const AugmentedEntry *>(
0646         FindEntryThatContainsOrFollows(addr));
0647     if (entry)
0648       return std::distance(m_entries.begin(), entry);
0649     return UINT32_MAX;
0650   }
0651 
0652   Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
0653 
0654   const Entry *Back() const {
0655     return (m_entries.empty() ? nullptr : &m_entries.back());
0656   }
0657 
0658   using const_iterator = typename Collection::const_iterator;
0659   const_iterator begin() const { return m_entries.begin(); }
0660   const_iterator end() const { return m_entries.end(); }
0661 
0662 protected:
0663   Collection m_entries;
0664   Compare m_compare;
0665 
0666 private:
0667   // Compute extra information needed for search
0668   B ComputeUpperBounds(size_t lo, size_t hi) {
0669     size_t mid = (lo + hi) / 2;
0670     AugmentedEntry &entry = m_entries[mid];
0671 
0672     entry.upper_bound = entry.base + entry.size;
0673 
0674     if (lo < mid)
0675       entry.upper_bound =
0676           std::max(entry.upper_bound, ComputeUpperBounds(lo, mid));
0677 
0678     if (mid + 1 < hi)
0679       entry.upper_bound =
0680           std::max(entry.upper_bound, ComputeUpperBounds(mid + 1, hi));
0681 
0682     return entry.upper_bound;
0683   }
0684 
0685   // This is based on the augmented tree implementation found at
0686   // https://en.wikipedia.org/wiki/Interval_tree#Augmented_tree
0687   void FindEntryIndexesThatContain(B addr, size_t lo, size_t hi,
0688                                    std::vector<uint32_t> &indexes) {
0689     size_t mid = (lo + hi) / 2;
0690     const AugmentedEntry &entry = m_entries[mid];
0691 
0692     // addr is greater than the rightmost point of any interval below mid
0693     // so there are cannot be any matches.
0694     if (addr > entry.upper_bound)
0695       return;
0696 
0697     // Recursively search left subtree
0698     if (lo < mid)
0699       FindEntryIndexesThatContain(addr, lo, mid, indexes);
0700 
0701     // If addr is smaller than the start of the current interval it
0702     // cannot contain it nor can any of its right subtree.
0703     if (addr < entry.base)
0704       return;
0705 
0706     if (entry.Contains(addr))
0707       indexes.push_back(entry.data);
0708 
0709     // Recursively search right subtree
0710     if (mid + 1 < hi)
0711       FindEntryIndexesThatContain(addr, mid + 1, hi, indexes);
0712   }
0713 };
0714 
0715 // A simple range  with data class where you get to define the type of
0716 // the range base "B", the type used for the range byte size "S", and the type
0717 // for the associated data "T".
0718 template <typename B, typename T> struct AddressData {
0719   typedef B BaseType;
0720   typedef T DataType;
0721 
0722   BaseType addr;
0723   DataType data;
0724 
0725   AddressData() : addr(), data() {}
0726 
0727   AddressData(B a, DataType d) : addr(a), data(d) {}
0728 
0729   bool operator<(const AddressData &rhs) const {
0730     if (this->addr == rhs.addr)
0731       return this->data < rhs.data;
0732     return this->addr < rhs.addr;
0733   }
0734 
0735   bool operator==(const AddressData &rhs) const {
0736     return this->addr == rhs.addr && this->data == rhs.data;
0737   }
0738 
0739   bool operator!=(const AddressData &rhs) const {
0740     return this->addr != rhs.addr || this->data == rhs.data;
0741   }
0742 };
0743 
0744 template <typename B, typename T, unsigned N> class AddressDataArray {
0745 public:
0746   typedef AddressData<B, T> Entry;
0747   typedef llvm::SmallVector<Entry, N> Collection;
0748 
0749   AddressDataArray() = default;
0750 
0751   ~AddressDataArray() = default;
0752 
0753   void Append(const Entry &entry) { m_entries.push_back(entry); }
0754 
0755   void Sort() {
0756     if (m_entries.size() > 1)
0757       std::stable_sort(m_entries.begin(), m_entries.end());
0758   }
0759 
0760 #ifdef ASSERT_RANGEMAP_ARE_SORTED
0761   bool IsSorted() const {
0762     typename Collection::const_iterator pos, end, prev;
0763     // First we determine if we can combine any of the Entry objects so we
0764     // don't end up allocating and making a new collection for no reason
0765     for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
0766          prev = pos++) {
0767       if (prev != end && *pos < *prev)
0768         return false;
0769     }
0770     return true;
0771   }
0772 #endif
0773 
0774   void Clear() { m_entries.clear(); }
0775 
0776   bool IsEmpty() const { return m_entries.empty(); }
0777 
0778   size_t GetSize() const { return m_entries.size(); }
0779 
0780   const Entry *GetEntryAtIndex(size_t i) const {
0781     return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
0782   }
0783 
0784   // Clients must ensure that "i" is a valid index prior to calling this
0785   // function
0786   const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
0787 
0788   static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
0789     return lhs.addr < rhs.addr;
0790   }
0791 
0792   Entry *FindEntry(B addr, bool exact_match_only) {
0793 #ifdef ASSERT_RANGEMAP_ARE_SORTED
0794     assert(IsSorted());
0795 #endif
0796     if (!m_entries.empty()) {
0797       Entry entry;
0798       entry.addr = addr;
0799       typename Collection::iterator begin = m_entries.begin();
0800       typename Collection::iterator end = m_entries.end();
0801       typename Collection::iterator pos =
0802           llvm::lower_bound(m_entries, entry, BaseLessThan);
0803 
0804       while (pos != begin && pos[-1].addr == addr)
0805         --pos;
0806 
0807       if (pos != end) {
0808         if (pos->addr == addr || !exact_match_only)
0809           return &(*pos);
0810       }
0811     }
0812     return nullptr;
0813   }
0814 
0815   const Entry *FindNextEntry(const Entry *entry) {
0816     if (entry >= &*m_entries.begin() && entry + 1 < &*m_entries.end())
0817       return entry + 1;
0818     return nullptr;
0819   }
0820 
0821   Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
0822 
0823   const Entry *Back() const {
0824     return (m_entries.empty() ? nullptr : &m_entries.back());
0825   }
0826 
0827 protected:
0828   Collection m_entries;
0829 };
0830 
0831 } // namespace lldb_private
0832 
0833 #endif // LLDB_UTILITY_RANGEMAP_H