LLDB mainline
RangeMap.h
Go to the documentation of this file.
1//===-- RangeMap.h ----------------------------------------------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#ifndef LLDB_UTILITY_RANGEMAP_H
10#define LLDB_UTILITY_RANGEMAP_H
11
12#include <algorithm>
13#include <vector>
14
15#include "llvm/ADT/SmallVector.h"
16
17#include "lldb/lldb-private.h"
18
19// Uncomment to make sure all Range objects are sorted when needed
20//#define ASSERT_RANGEMAP_ARE_SORTED
21
22namespace lldb_private {
23
24// Templatized classes for dealing with generic ranges and also collections of
25// ranges, or collections of ranges that have associated data.
26
27// A simple range class where you get to define the type of the range
28// base "B", and the type used for the range byte size "S".
29template <typename B, typename S> struct Range {
30 typedef B BaseType;
31 typedef S SizeType;
32
35
36 Range() : base(0), size(0) {}
37
38 Range(BaseType b, SizeType s) : base(b), size(s) {}
39
40 void Clear(BaseType b = 0) {
41 base = b;
42 size = 0;
43 }
44
45 BaseType GetRangeBase() const { return base; }
46
47 /// Set the start value for the range, and keep the same size
48 void SetRangeBase(BaseType b) { base = b; }
49
50 void Slide(BaseType slide) { base += slide; }
51
52 void ShrinkFront(S s) {
53 base += s;
54 size -= std::min(s, size);
55 }
56
57 bool Union(const Range &rhs) {
58 if (DoesAdjoinOrIntersect(rhs)) {
59 auto new_end = std::max<BaseType>(GetRangeEnd(), rhs.GetRangeEnd());
60 base = std::min<BaseType>(base, rhs.base);
61 size = new_end - base;
62 return true;
63 }
64 return false;
65 }
66
67 Range Intersect(const Range &rhs) const {
68 const BaseType lhs_base = this->GetRangeBase();
69 const BaseType rhs_base = rhs.GetRangeBase();
70 const BaseType lhs_end = this->GetRangeEnd();
71 const BaseType rhs_end = rhs.GetRangeEnd();
72 Range range;
73 range.SetRangeBase(std::max(lhs_base, rhs_base));
74 range.SetRangeEnd(std::min(lhs_end, rhs_end));
75 return range;
76 }
77
78 BaseType GetRangeEnd() const { return base + size; }
79
81 if (end > base)
82 size = end - base;
83 else
84 size = 0;
85 }
86
87 SizeType GetByteSize() const { return size; }
88
89 void SetByteSize(SizeType s) { size = s; }
90
91 bool IsValid() const { return size > 0; }
92
93 bool Contains(BaseType r) const {
94 return (GetRangeBase() <= r) && (r < GetRangeEnd());
95 }
96
98 return (GetRangeBase() <= r) && (r <= GetRangeEnd());
99 }
100
101 bool Contains(const Range &range) const {
102 return Contains(range.GetRangeBase()) &&
104 }
105
106 // Returns true if the two ranges adjoing or intersect
107 bool DoesAdjoinOrIntersect(const Range &rhs) const {
108 const BaseType lhs_base = this->GetRangeBase();
109 const BaseType rhs_base = rhs.GetRangeBase();
110 const BaseType lhs_end = this->GetRangeEnd();
111 const BaseType rhs_end = rhs.GetRangeEnd();
112 bool result = (lhs_base <= rhs_end) && (lhs_end >= rhs_base);
113 return result;
114 }
115
116 // Returns true if the two ranges intersect
117 bool DoesIntersect(const Range &rhs) const {
118 return Intersect(rhs).IsValid();
119 }
120
121 bool operator<(const Range &rhs) const {
122 if (base == rhs.base)
123 return size < rhs.size;
124 return base < rhs.base;
125 }
126
127 bool operator==(const Range &rhs) const {
128 return base == rhs.base && size == rhs.size;
129 }
130
131 bool operator!=(const Range &rhs) const {
132 return base != rhs.base || size != rhs.size;
133 }
134};
135
136template <typename B, typename S, unsigned N = 0> class RangeVector {
137public:
138 typedef B BaseType;
139 typedef S SizeType;
141 typedef llvm::SmallVector<Entry, N> Collection;
142
143 RangeVector() = default;
144
145 ~RangeVector() = default;
146
148 const RangeVector &vec2) {
149#ifdef ASSERT_RANGEMAP_ARE_SORTED
150 assert(vec1.IsSorted() && vec2.IsSorted());
151#endif
152 RangeVector result;
153 auto pos1 = vec1.begin();
154 auto end1 = vec1.end();
155 auto pos2 = vec2.begin();
156 auto end2 = vec2.end();
157 while (pos1 != end1 && pos2 != end2) {
158 Entry entry = pos1->Intersect(*pos2);
159 if (entry.IsValid())
160 result.Append(entry);
161 if (pos1->GetRangeEnd() < pos2->GetRangeEnd())
162 ++pos1;
163 else
164 ++pos2;
165 }
166 return result;
167 }
168
169 bool operator==(const RangeVector &rhs) const {
170 if (GetSize() != rhs.GetSize())
171 return false;
172 for (size_t i = 0; i < GetSize(); ++i) {
173 if (GetEntryRef(i) != rhs.GetEntryRef(i))
174 return false;
175 }
176 return true;
177 }
178
179 void Append(const Entry &entry) { m_entries.push_back(entry); }
180
181 void Append(B base, S size) { m_entries.emplace_back(base, size); }
182
183 // Insert an item into a sorted list and optionally combine it with any
184 // adjacent blocks if requested.
185 void Insert(const Entry &entry, bool combine) {
186 if (m_entries.empty()) {
187 m_entries.push_back(entry);
188 return;
189 }
190 auto begin = m_entries.begin();
191 auto end = m_entries.end();
192 auto pos = std::lower_bound(begin, end, entry);
193 if (combine) {
194 if (pos != end && pos->Union(entry)) {
196 return;
197 }
198 if (pos != begin) {
199 auto prev = pos - 1;
200 if (prev->Union(entry)) {
201 CombinePrevAndNext(prev);
202 return;
203 }
204 }
205 }
206 m_entries.insert(pos, entry);
207 }
208
209 bool RemoveEntryAtIndex(uint32_t idx) {
210 if (idx < m_entries.size()) {
211 m_entries.erase(m_entries.begin() + idx);
212 return true;
213 }
214 return false;
215 }
216
217 void Sort() {
218 if (m_entries.size() > 1)
219 llvm::stable_sort(m_entries);
220 }
221
222#ifdef ASSERT_RANGEMAP_ARE_SORTED
223 bool IsSorted() const {
224 typename Collection::const_iterator pos, end, prev;
225 // First we determine if we can combine any of the Entry objects so we
226 // don't end up allocating and making a new collection for no reason
227 for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
228 prev = pos++) {
229 if (prev != end && *pos < *prev)
230 return false;
231 }
232 return true;
233 }
234#endif
235
237#ifdef ASSERT_RANGEMAP_ARE_SORTED
238 assert(IsSorted());
239#endif
240 auto first_intersect = std::adjacent_find(
241 m_entries.begin(), m_entries.end(), [](const Entry &a, const Entry &b) {
242 return a.DoesAdjoinOrIntersect(b);
243 });
244 if (first_intersect == m_entries.end())
245 return;
246
247 // We can combine at least one entry, then we make a new collection and
248 // populate it accordingly, and then swap it into place.
249 auto pos = std::next(first_intersect);
250 Collection minimal_ranges(m_entries.begin(), pos);
251 for (; pos != m_entries.end(); ++pos) {
252 Entry &back = minimal_ranges.back();
253 if (back.DoesAdjoinOrIntersect(*pos))
254 back.SetRangeEnd(std::max(back.GetRangeEnd(), pos->GetRangeEnd()));
255 else
256 minimal_ranges.push_back(*pos);
257 }
258 m_entries.swap(minimal_ranges);
259 }
260
262#ifdef ASSERT_RANGEMAP_ARE_SORTED
263 assert(IsSorted());
264#endif
265 if (m_entries.empty())
266 return fail_value;
267 // m_entries must be sorted, so if we aren't empty, we grab the first
268 // range's base
269 return m_entries.front().GetRangeBase();
270 }
271
272 BaseType GetMaxRangeEnd(BaseType fail_value) const {
273#ifdef ASSERT_RANGEMAP_ARE_SORTED
274 assert(IsSorted());
275#endif
276 if (m_entries.empty())
277 return fail_value;
278 // m_entries must be sorted, so if we aren't empty, we grab the last
279 // range's end
280 return m_entries.back().GetRangeEnd();
281 }
282
283 void Slide(BaseType slide) {
284 typename Collection::iterator pos, end;
285 for (pos = m_entries.begin(), end = m_entries.end(); pos != end; ++pos)
286 pos->Slide(slide);
287 }
288
289 void Clear() { m_entries.clear(); }
290
291 void Reserve(typename Collection::size_type size) { m_entries.reserve(size); }
292
293 bool IsEmpty() const { return m_entries.empty(); }
294
295 size_t GetSize() const { return m_entries.size(); }
296
297 const Entry *GetEntryAtIndex(size_t i) const {
298 return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
299 }
300
301 // Clients must ensure that "i" is a valid index prior to calling this
302 // function
303 Entry &GetEntryRef(size_t i) { return m_entries[i]; }
304 const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
305
306 Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
307
308 const Entry *Back() const {
309 return (m_entries.empty() ? nullptr : &m_entries.back());
310 }
311
312 static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
313 return lhs.GetRangeBase() < rhs.GetRangeBase();
314 }
315
316 uint32_t FindEntryIndexThatContains(B addr) const {
317#ifdef ASSERT_RANGEMAP_ARE_SORTED
318 assert(IsSorted());
319#endif
320 if (!m_entries.empty()) {
321 Entry entry(addr, 1);
322 typename Collection::const_iterator begin = m_entries.begin();
323 typename Collection::const_iterator end = m_entries.end();
324 typename Collection::const_iterator pos =
325 std::lower_bound(begin, end, entry, BaseLessThan);
326
327 if (pos != end && pos->Contains(addr)) {
328 return std::distance(begin, pos);
329 } else if (pos != begin) {
330 --pos;
331 if (pos->Contains(addr))
332 return std::distance(begin, pos);
333 }
334 }
335 return UINT32_MAX;
336 }
337
338 const Entry *FindEntryThatContains(B addr) const {
339#ifdef ASSERT_RANGEMAP_ARE_SORTED
340 assert(IsSorted());
341#endif
342 if (!m_entries.empty()) {
343 Entry entry(addr, 1);
344 typename Collection::const_iterator begin = m_entries.begin();
345 typename Collection::const_iterator end = m_entries.end();
346 typename Collection::const_iterator pos =
347 std::lower_bound(begin, end, entry, BaseLessThan);
348
349 if (pos != end && pos->Contains(addr)) {
350 return &(*pos);
351 } else if (pos != begin) {
352 --pos;
353 if (pos->Contains(addr)) {
354 return &(*pos);
355 }
356 }
357 }
358 return nullptr;
359 }
360
361 const Entry *FindEntryThatContains(const Entry &range) const {
362#ifdef ASSERT_RANGEMAP_ARE_SORTED
363 assert(IsSorted());
364#endif
365 if (!m_entries.empty()) {
366 typename Collection::const_iterator begin = m_entries.begin();
367 typename Collection::const_iterator end = m_entries.end();
368 typename Collection::const_iterator pos =
369 std::lower_bound(begin, end, range, BaseLessThan);
370
371 if (pos != end && pos->Contains(range)) {
372 return &(*pos);
373 } else if (pos != begin) {
374 --pos;
375 if (pos->Contains(range)) {
376 return &(*pos);
377 }
378 }
379 }
380 return nullptr;
381 }
382
383 const Entry *FindEntryThatIntersects(const Entry &range) const {
384#ifdef ASSERT_RANGEMAP_ARE_SORTED
385 assert(IsSorted());
386#endif
387 if (!m_entries.empty()) {
388 typename Collection::const_iterator begin = m_entries.begin();
389 typename Collection::const_iterator end = m_entries.end();
390 typename Collection::const_iterator pos =
391 std::lower_bound(begin, end, range, BaseLessThan);
392
393 while (pos != begin && pos[-1].DoesIntersect(range))
394 --pos;
395
396 if (pos != end && pos->DoesIntersect(range))
397 return &(*pos);
398 }
399 return nullptr;
400 }
401
402 using const_iterator = typename Collection::const_iterator;
403 const_iterator begin() const { return m_entries.begin(); }
404 const_iterator end() const { return m_entries.end(); }
405
406protected:
407 void CombinePrevAndNext(typename Collection::iterator pos) {
408 // Check if the prev or next entries in case they need to be unioned with
409 // the entry pointed to by "pos".
410 if (pos != m_entries.begin()) {
411 auto prev = pos - 1;
412 if (prev->Union(*pos))
413 m_entries.erase(pos);
414 pos = prev;
415 }
416
417 auto end = m_entries.end();
418 if (pos != end) {
419 auto next = pos + 1;
420 if (next != end) {
421 if (pos->Union(*next))
422 m_entries.erase(next);
423 }
424 }
425 }
426
428};
429
430// A simple range with data class where you get to define the type of
431// the range base "B", the type used for the range byte size "S", and the type
432// for the associated data "T".
433template <typename B, typename S, typename T>
434struct RangeData : public Range<B, S> {
435 typedef T DataType;
436
438
439 RangeData() : Range<B, S>(), data() {}
440
441 RangeData(B base, S size) : Range<B, S>(base, size), data() {}
442
443 RangeData(B base, S size, DataType d) : Range<B, S>(base, size), data(d) {}
444};
445
446// We can treat the vector as a flattened Binary Search Tree, augmenting it
447// with upper bounds (max of range endpoints) for every index allows us to
448// query for range containment quicker.
449template <typename B, typename S, typename T>
450struct AugmentedRangeData : public RangeData<B, S, T> {
452
455};
456
457template <typename B, typename S, typename T, unsigned N = 0,
458 class Compare = std::less<T>>
460public:
464 typedef llvm::SmallVector<AugmentedEntry, N> Collection;
465
467
468 RangeDataVector(std::initializer_list<AugmentedEntry> entries,
470 : m_entries(entries), m_compare(compare) {}
471
472 ~RangeDataVector() = default;
473
474 void Append(const Entry &entry) { m_entries.emplace_back(entry); }
475
476 /// Append a range with data to the vector
477 /// \param B The base of the memory range
478 /// \param S The size of the memory range
479 /// \param T The data associated with the memory range
480 void Append(B &&b, S &&s, T &&t) { m_entries.emplace_back(Entry(b, s, t)); }
481
482 bool Erase(uint32_t start, uint32_t end) {
483 if (start >= end || end > m_entries.size())
484 return false;
485 m_entries.erase(begin() + start, begin() + end);
486 return true;
487 }
488
489 void Sort() {
490 if (m_entries.size() > 1)
491 llvm::stable_sort(m_entries,
492 [&compare = m_compare](const Entry &a, const Entry &b) {
493 if (a.base != b.base)
494 return a.base < b.base;
495 if (a.size != b.size)
496 return a.size < b.size;
497 return compare(a.data, b.data);
498 });
499 if (!m_entries.empty())
500 ComputeUpperBounds(0, m_entries.size());
501 }
502
503#ifdef ASSERT_RANGEMAP_ARE_SORTED
504 bool IsSorted() const {
505 typename Collection::const_iterator pos, end, prev;
506 for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
507 prev = pos++) {
508 if (prev != end && *pos < *prev)
509 return false;
510 }
511 return true;
512 }
513#endif
514
516#ifdef ASSERT_RANGEMAP_ARE_SORTED
517 assert(IsSorted());
518#endif
519 auto first_intersect = std::adjacent_find(
520 m_entries.begin(), m_entries.end(), [](const Entry &a, const Entry &b) {
521 return a.DoesAdjoinOrIntersect(b) && a.data == b.data;
522 });
523
524 if (first_intersect == m_entries.end())
525 return;
526
527 // We can combine at least one entry. Make a new collection and populate it
528 // accordingly, and then swap it into place.
529 auto pos = std::next(first_intersect);
530 Collection minimal_ranges(m_entries.begin(), pos);
531 for (; pos != m_entries.end(); ++pos) {
532 Entry &back = minimal_ranges.back();
533 if (back.DoesAdjoinOrIntersect(*pos) && back.data == pos->data)
534 back.SetRangeEnd(std::max(back.GetRangeEnd(), pos->GetRangeEnd()));
535 else
536 minimal_ranges.push_back(*pos);
537 }
538 m_entries.swap(minimal_ranges);
539 ComputeUpperBounds(0, m_entries.size());
540 }
541
542 void Clear() { m_entries.clear(); }
543
544 bool IsEmpty() const { return m_entries.empty(); }
545
546 size_t GetSize() const { return m_entries.size(); }
547
548 const Entry *GetEntryAtIndex(size_t i) const {
549 return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
550 }
551
553 return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
554 }
555
556 // Clients must ensure that "i" is a valid index prior to calling this
557 // function
558 Entry &GetEntryRef(size_t i) { return m_entries[i]; }
559 const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
560
561 static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
562 return lhs.GetRangeBase() < rhs.GetRangeBase();
563 }
564
565 uint32_t FindEntryIndexThatContains(B addr) const {
566 const AugmentedEntry *entry =
567 static_cast<const AugmentedEntry *>(FindEntryThatContains(addr));
568 if (entry)
569 return std::distance(m_entries.begin(), entry);
570 return UINT32_MAX;
571 }
572
573 uint32_t FindEntryIndexesThatContain(B addr, std::vector<uint32_t> &indexes) {
574#ifdef ASSERT_RANGEMAP_ARE_SORTED
575 assert(IsSorted());
576#endif
577 if (!m_entries.empty())
578 FindEntryIndexesThatContain(addr, 0, m_entries.size(), indexes);
579
580 return indexes.size();
581 }
582
584 return const_cast<Entry *>(
585 static_cast<const RangeDataVector *>(this)->FindEntryThatContains(
586 addr));
587 }
588
589 const Entry *FindEntryThatContains(B addr) const {
590 return FindEntryThatContains(Entry(addr, 1));
591 }
592
593 const Entry *FindEntryThatContains(const Entry &range) const {
594#ifdef ASSERT_RANGEMAP_ARE_SORTED
595 assert(IsSorted());
596#endif
597 if (!m_entries.empty()) {
598 typename Collection::const_iterator begin = m_entries.begin();
599 typename Collection::const_iterator end = m_entries.end();
600 typename Collection::const_iterator pos =
601 std::lower_bound(begin, end, range, BaseLessThan);
602
603 while (pos != begin && pos[-1].Contains(range))
604 --pos;
605
606 if (pos != end && pos->Contains(range))
607 return &(*pos);
608 }
609 return nullptr;
610 }
611
612 const Entry *FindEntryStartsAt(B addr) const {
613#ifdef ASSERT_RANGEMAP_ARE_SORTED
614 assert(IsSorted());
615#endif
616 if (!m_entries.empty()) {
617 auto begin = m_entries.begin(), end = m_entries.end();
618 auto pos = std::lower_bound(begin, end, Entry(addr, 1), BaseLessThan);
619 if (pos != end && pos->base == addr)
620 return &(*pos);
621 }
622 return nullptr;
623 }
624
625 // This method will return the entry that contains the given address, or the
626 // entry following that address. If you give it an address of 0 and the
627 // first entry starts at address 0x100, you will get the entry at 0x100.
628 //
629 // For most uses, FindEntryThatContains is the correct one to use, this is a
630 // less commonly needed behavior. It was added for core file memory regions,
631 // where we want to present a gap in the memory regions as a distinct region,
632 // so we need to know the start address of the next memory section that
633 // exists.
635#ifdef ASSERT_RANGEMAP_ARE_SORTED
636 assert(IsSorted());
637#endif
638 if (!m_entries.empty()) {
639 typename Collection::const_iterator begin = m_entries.begin();
640 typename Collection::const_iterator end = m_entries.end();
641 typename Collection::const_iterator pos = llvm::lower_bound(
642 m_entries, addr, [](const Entry &lhs, B rhs_base) -> bool {
643 return lhs.GetRangeEnd() <= rhs_base;
644 });
645
646 while (pos != begin && pos[-1].Contains(addr))
647 --pos;
648
649 if (pos != end)
650 return &(*pos);
651 }
652 return nullptr;
653 }
654
656#ifdef ASSERT_RANGEMAP_ARE_SORTED
657 assert(IsSorted());
658#endif
659 const AugmentedEntry *entry = static_cast<const AugmentedEntry *>(
661 if (entry)
662 return std::distance(m_entries.begin(), entry);
663 return UINT32_MAX;
664 }
665
666 Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
667
668 const Entry *Back() const {
669 return (m_entries.empty() ? nullptr : &m_entries.back());
670 }
671
672 using const_iterator = typename Collection::const_iterator;
673 const_iterator begin() const { return m_entries.begin(); }
674 const_iterator end() const { return m_entries.end(); }
675
676protected:
679
680private:
681 // Compute extra information needed for search
682 B ComputeUpperBounds(size_t lo, size_t hi) {
683 size_t mid = (lo + hi) / 2;
684 AugmentedEntry &entry = m_entries[mid];
685
686 entry.upper_bound = entry.base + entry.size;
687
688 if (lo < mid)
689 entry.upper_bound =
690 std::max(entry.upper_bound, ComputeUpperBounds(lo, mid));
691
692 if (mid + 1 < hi)
693 entry.upper_bound =
694 std::max(entry.upper_bound, ComputeUpperBounds(mid + 1, hi));
695
696 return entry.upper_bound;
697 }
698
699 // This is based on the augmented tree implementation found at
700 // https://en.wikipedia.org/wiki/Interval_tree#Augmented_tree
701 void FindEntryIndexesThatContain(B addr, size_t lo, size_t hi,
702 std::vector<uint32_t> &indexes) {
703 size_t mid = (lo + hi) / 2;
704 const AugmentedEntry &entry = m_entries[mid];
705
706 // addr is greater than the rightmost point of any interval below mid
707 // so there are cannot be any matches.
708 if (addr > entry.upper_bound)
709 return;
710
711 // Recursively search left subtree
712 if (lo < mid)
713 FindEntryIndexesThatContain(addr, lo, mid, indexes);
714
715 // If addr is smaller than the start of the current interval it
716 // cannot contain it nor can any of its right subtree.
717 if (addr < entry.base)
718 return;
719
720 if (entry.Contains(addr))
721 indexes.push_back(entry.data);
722
723 // Recursively search right subtree
724 if (mid + 1 < hi)
725 FindEntryIndexesThatContain(addr, mid + 1, hi, indexes);
726 }
727};
728
729// A simple range with data class where you get to define the type of
730// the range base "B", the type used for the range byte size "S", and the type
731// for the associated data "T".
732template <typename B, typename T> struct AddressData {
733 typedef B BaseType;
734 typedef T DataType;
735
738
740
741 AddressData(B a, DataType d) : addr(a), data(d) {}
742
743 bool operator<(const AddressData &rhs) const {
744 if (this->addr == rhs.addr)
745 return this->data < rhs.data;
746 return this->addr < rhs.addr;
747 }
748
749 bool operator==(const AddressData &rhs) const {
750 return this->addr == rhs.addr && this->data == rhs.data;
751 }
752
753 bool operator!=(const AddressData &rhs) const {
754 return this->addr != rhs.addr || this->data == rhs.data;
755 }
756};
757
758template <typename B, typename T, unsigned N> class AddressDataArray {
759public:
761 typedef llvm::SmallVector<Entry, N> Collection;
762
763 AddressDataArray() = default;
764
765 ~AddressDataArray() = default;
766
767 void Append(const Entry &entry) { m_entries.push_back(entry); }
768
769 void Sort() {
770 if (m_entries.size() > 1)
771 std::stable_sort(m_entries.begin(), m_entries.end());
772 }
773
774#ifdef ASSERT_RANGEMAP_ARE_SORTED
775 bool IsSorted() const {
776 typename Collection::const_iterator pos, end, prev;
777 // First we determine if we can combine any of the Entry objects so we
778 // don't end up allocating and making a new collection for no reason
779 for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
780 prev = pos++) {
781 if (prev != end && *pos < *prev)
782 return false;
783 }
784 return true;
785 }
786#endif
787
788 void Clear() { m_entries.clear(); }
789
790 bool IsEmpty() const { return m_entries.empty(); }
791
792 size_t GetSize() const { return m_entries.size(); }
793
794 const Entry *GetEntryAtIndex(size_t i) const {
795 return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
796 }
797
798 // Clients must ensure that "i" is a valid index prior to calling this
799 // function
800 const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
801
802 static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
803 return lhs.addr < rhs.addr;
804 }
805
806 Entry *FindEntry(B addr, bool exact_match_only) {
807#ifdef ASSERT_RANGEMAP_ARE_SORTED
808 assert(IsSorted());
809#endif
810 if (!m_entries.empty()) {
811 Entry entry;
812 entry.addr = addr;
813 typename Collection::iterator begin = m_entries.begin();
814 typename Collection::iterator end = m_entries.end();
815 typename Collection::iterator pos =
816 llvm::lower_bound(m_entries, entry, BaseLessThan);
817
818 while (pos != begin && pos[-1].addr == addr)
819 --pos;
820
821 if (pos != end) {
822 if (pos->addr == addr || !exact_match_only)
823 return &(*pos);
824 }
825 }
826 return nullptr;
827 }
828
829 const Entry *FindNextEntry(const Entry *entry) {
830 if (entry >= &*m_entries.begin() && entry + 1 < &*m_entries.end())
831 return entry + 1;
832 return nullptr;
833 }
834
835 Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
836
837 const Entry *Back() const {
838 return (m_entries.empty() ? nullptr : &m_entries.back());
839 }
840
841protected:
843};
844
845} // namespace lldb_private
846
847#endif // LLDB_UTILITY_RANGEMAP_H
static bool Compare(BreakpointLocationSP lhs, lldb::break_id_t val)
Entry * FindEntry(B addr, bool exact_match_only)
Definition RangeMap.h:806
llvm::SmallVector< Entry, N > Collection
Definition RangeMap.h:761
AddressData< B, T > Entry
Definition RangeMap.h:760
const Entry * GetEntryAtIndex(size_t i) const
Definition RangeMap.h:794
const Entry * Back() const
Definition RangeMap.h:837
const Entry & GetEntryRef(size_t i) const
Definition RangeMap.h:800
void Append(const Entry &entry)
Definition RangeMap.h:767
static bool BaseLessThan(const Entry &lhs, const Entry &rhs)
Definition RangeMap.h:802
const Entry * FindNextEntry(const Entry *entry)
Definition RangeMap.h:829
const Entry * FindEntryThatContains(B addr) const
Definition RangeMap.h:589
AugmentedRangeData< lldb::addr_t, lldb::addr_t, DWARFExpression > AugmentedEntry
Definition RangeMap.h:463
void CombineConsecutiveEntriesWithEqualData()
Definition RangeMap.h:515
uint32_t FindEntryIndexThatContainsOrFollows(B addr) const
Definition RangeMap.h:655
uint32_t FindEntryIndexesThatContain(B addr, std::vector< uint32_t > &indexes)
Definition RangeMap.h:573
Entry & GetEntryRef(size_t i)
Definition RangeMap.h:558
uint32_t FindEntryIndexThatContains(B addr) const
Definition RangeMap.h:565
B ComputeUpperBounds(size_t lo, size_t hi)
Definition RangeMap.h:682
const Entry * FindEntryThatContains(const Entry &range) const
Definition RangeMap.h:593
static bool BaseLessThan(const Entry &lhs, const Entry &rhs)
Definition RangeMap.h:561
const Entry * GetEntryAtIndex(size_t i) const
Definition RangeMap.h:548
Entry * GetMutableEntryAtIndex(size_t i)
Definition RangeMap.h:552
const Entry & GetEntryRef(size_t i) const
Definition RangeMap.h:559
const Entry * Back() const
Definition RangeMap.h:668
const_iterator begin() const
Definition RangeMap.h:673
RangeDataVector(std::initializer_list< AugmentedEntry > entries, Compare compare=Compare())
Definition RangeMap.h:468
const Entry * FindEntryStartsAt(B addr) const
Definition RangeMap.h:612
const Entry * FindEntryThatContainsOrFollows(B addr) const
Definition RangeMap.h:634
void Append(const Entry &entry)
Definition RangeMap.h:474
void Append(B &&b, S &&s, T &&t)
Append a range with data to the vector.
Definition RangeMap.h:480
RangeDataVector(Compare compare=Compare())
Definition RangeMap.h:466
bool Erase(uint32_t start, uint32_t end)
Definition RangeMap.h:482
void FindEntryIndexesThatContain(B addr, size_t lo, size_t hi, std::vector< uint32_t > &indexes)
Definition RangeMap.h:701
Entry * FindEntryThatContains(B addr)
Definition RangeMap.h:583
const Entry * FindEntryThatContains(B addr) const
Definition RangeMap.h:338
typename Collection::const_iterator const_iterator
Definition RangeMap.h:402
uint32_t FindEntryIndexThatContains(B addr) const
Definition RangeMap.h:316
const Entry & GetEntryRef(size_t i) const
Definition RangeMap.h:304
void Slide(BaseType slide)
Definition RangeMap.h:283
const Entry * GetEntryAtIndex(size_t i) const
Definition RangeMap.h:297
void CombinePrevAndNext(typename Collection::iterator pos)
Definition RangeMap.h:407
const_iterator begin() const
Definition RangeMap.h:403
BaseType GetMaxRangeEnd(BaseType fail_value) const
Definition RangeMap.h:272
void Append(B base, S size)
Definition RangeMap.h:181
Entry & GetEntryRef(size_t i)
Definition RangeMap.h:303
bool RemoveEntryAtIndex(uint32_t idx)
Definition RangeMap.h:209
static bool BaseLessThan(const Entry &lhs, const Entry &rhs)
Definition RangeMap.h:312
void Append(const Entry &entry)
Definition RangeMap.h:179
llvm::SmallVector< Entry, N > Collection
Definition RangeMap.h:141
BaseType GetMinRangeBase(BaseType fail_value) const
Definition RangeMap.h:261
static RangeVector GetOverlaps(const RangeVector &vec1, const RangeVector &vec2)
Definition RangeMap.h:147
size_t GetSize() const
Definition RangeMap.h:295
void Reserve(typename Collection::size_type size)
Definition RangeMap.h:291
const_iterator end() const
Definition RangeMap.h:404
void Insert(const Entry &entry, bool combine)
Definition RangeMap.h:185
const Entry * FindEntryThatIntersects(const Entry &range) const
Definition RangeMap.h:383
const Entry * Back() const
Definition RangeMap.h:308
bool operator==(const RangeVector &rhs) const
Definition RangeMap.h:169
const Entry * FindEntryThatContains(const Entry &range) const
Definition RangeMap.h:361
#define UINT32_MAX
A class that represents a running process on the host machine.
llvm::APFloat::cmpResult compare(Scalar lhs, Scalar rhs)
Definition Scalar.cpp:872
bool operator==(const AddressData &rhs) const
Definition RangeMap.h:749
bool operator<(const AddressData &rhs) const
Definition RangeMap.h:743
bool operator!=(const AddressData &rhs) const
Definition RangeMap.h:753
AddressData(B a, DataType d)
Definition RangeMap.h:741
AugmentedRangeData(const RangeData< B, S, T > &rd)
Definition RangeMap.h:453
RangeData(B base, S size, DataType d)
Definition RangeMap.h:443
RangeData(B base, S size)
Definition RangeMap.h:441
bool ContainsEndInclusive(BaseType r) const
Definition RangeMap.h:97
bool DoesAdjoinOrIntersect(const Range &rhs) const
Definition RangeMap.h:107
void Clear(BaseType b=0)
Definition RangeMap.h:40
bool Contains(const Range &range) const
Definition RangeMap.h:101
bool Contains(BaseType r) const
Definition RangeMap.h:93
BaseType GetRangeBase() const
Definition RangeMap.h:45
Range(BaseType b, SizeType s)
Definition RangeMap.h:38
bool operator==(const Range &rhs) const
Definition RangeMap.h:127
bool IsValid() const
Definition RangeMap.h:91
bool Union(const Range &rhs)
Definition RangeMap.h:57
void ShrinkFront(S s)
Definition RangeMap.h:52
void SetRangeEnd(BaseType end)
Definition RangeMap.h:80
SizeType GetByteSize() const
Definition RangeMap.h:87
void SetRangeBase(BaseType b)
Set the start value for the range, and keep the same size.
Definition RangeMap.h:48
BaseType GetRangeEnd() const
Definition RangeMap.h:78
bool DoesIntersect(const Range &rhs) const
Definition RangeMap.h:117
bool operator<(const Range &rhs) const
Definition RangeMap.h:121
void Slide(BaseType slide)
Definition RangeMap.h:50
bool operator!=(const Range &rhs) const
Definition RangeMap.h:131
Range Intersect(const Range &rhs) const
Definition RangeMap.h:67
void SetByteSize(SizeType s)
Definition RangeMap.h:89