LLDB mainline
RangeMap.h
Go to the documentation of this file.
1//===-- RangeMap.h ----------------------------------------------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#ifndef LLDB_UTILITY_RANGEMAP_H
10#define LLDB_UTILITY_RANGEMAP_H
11
12#include <algorithm>
13#include <vector>
14
15#include "llvm/ADT/SmallVector.h"
16
17#include "lldb/lldb-private.h"
18
19// Uncomment to make sure all Range objects are sorted when needed
20//#define ASSERT_RANGEMAP_ARE_SORTED
21
22namespace lldb_private {
23
24// Templatized classes for dealing with generic ranges and also collections of
25// ranges, or collections of ranges that have associated data.
26
27// A simple range class where you get to define the type of the range
28// base "B", and the type used for the range byte size "S".
29template <typename B, typename S> struct Range {
30 typedef B BaseType;
31 typedef S SizeType;
32
35
36 Range() : base(0), size(0) {}
37
38 Range(BaseType b, SizeType s) : base(b), size(s) {}
39
40 void Clear(BaseType b = 0) {
41 base = b;
42 size = 0;
43 }
44
45 BaseType GetRangeBase() const { return base; }
46
47 /// Set the start value for the range, and keep the same size
48 void SetRangeBase(BaseType b) { base = b; }
49
50 void Slide(BaseType slide) { base += slide; }
51
52 void ShrinkFront(S s) {
53 base += s;
54 size -= std::min(s, size);
55 }
56
57 bool Union(const Range &rhs) {
58 if (DoesAdjoinOrIntersect(rhs)) {
59 auto new_end = std::max<BaseType>(GetRangeEnd(), rhs.GetRangeEnd());
60 base = std::min<BaseType>(base, rhs.base);
61 size = new_end - base;
62 return true;
63 }
64 return false;
65 }
66
67 Range Intersect(const Range &rhs) const {
68 const BaseType lhs_base = this->GetRangeBase();
69 const BaseType rhs_base = rhs.GetRangeBase();
70 const BaseType lhs_end = this->GetRangeEnd();
71 const BaseType rhs_end = rhs.GetRangeEnd();
72 Range range;
73 range.SetRangeBase(std::max(lhs_base, rhs_base));
74 range.SetRangeEnd(std::min(lhs_end, rhs_end));
75 return range;
76 }
77
78 BaseType GetRangeEnd() const { return base + size; }
79
81 if (end > base)
82 size = end - base;
83 else
84 size = 0;
85 }
86
87 SizeType GetByteSize() const { return size; }
88
89 void SetByteSize(SizeType s) { size = s; }
90
91 bool IsValid() const { return size > 0; }
92
93 bool Contains(BaseType r) const {
94 return (GetRangeBase() <= r) && (r < GetRangeEnd());
95 }
96
98 return (GetRangeBase() <= r) && (r <= GetRangeEnd());
99 }
100
101 bool Contains(const Range &range) const {
102 return Contains(range.GetRangeBase()) &&
104 }
105
106 // Returns true if the two ranges adjoing or intersect
107 bool DoesAdjoinOrIntersect(const Range &rhs) const {
108 const BaseType lhs_base = this->GetRangeBase();
109 const BaseType rhs_base = rhs.GetRangeBase();
110 const BaseType lhs_end = this->GetRangeEnd();
111 const BaseType rhs_end = rhs.GetRangeEnd();
112 bool result = (lhs_base <= rhs_end) && (lhs_end >= rhs_base);
113 return result;
114 }
115
116 // Returns true if the two ranges intersect
117 bool DoesIntersect(const Range &rhs) const {
118 return Intersect(rhs).IsValid();
119 }
120
121 bool operator<(const Range &rhs) const {
122 if (base == rhs.base)
123 return size < rhs.size;
124 return base < rhs.base;
125 }
126
127 bool operator==(const Range &rhs) const {
128 return base == rhs.base && size == rhs.size;
129 }
130
131 bool operator!=(const Range &rhs) const {
132 return base != rhs.base || size != rhs.size;
133 }
134};
135
136template <typename B, typename S, unsigned N = 0> class RangeVector {
137public:
138 typedef B BaseType;
139 typedef S SizeType;
141 typedef llvm::SmallVector<Entry, N> Collection;
142
143 RangeVector() = default;
144
145 ~RangeVector() = default;
146
148 const RangeVector &vec2) {
149#ifdef ASSERT_RANGEMAP_ARE_SORTED
150 assert(vec1.IsSorted() && vec2.IsSorted());
151#endif
152 RangeVector result;
153 auto pos1 = vec1.begin();
154 auto end1 = vec1.end();
155 auto pos2 = vec2.begin();
156 auto end2 = vec2.end();
157 while (pos1 != end1 && pos2 != end2) {
158 Entry entry = pos1->Intersect(*pos2);
159 if (entry.IsValid())
160 result.Append(entry);
161 if (pos1->GetRangeEnd() < pos2->GetRangeEnd())
162 ++pos1;
163 else
164 ++pos2;
165 }
166 return result;
167 }
168
169 bool operator==(const RangeVector &rhs) const {
170 if (GetSize() != rhs.GetSize())
171 return false;
172 for (size_t i = 0; i < GetSize(); ++i) {
173 if (GetEntryRef(i) != rhs.GetEntryRef(i))
174 return false;
175 }
176 return true;
177 }
178
179 void Append(const Entry &entry) { m_entries.push_back(entry); }
180
181 void Append(B base, S size) { m_entries.emplace_back(base, size); }
182
183 // Insert an item into a sorted list and optionally combine it with any
184 // adjacent blocks if requested.
185 void Insert(const Entry &entry, bool combine) {
186 if (m_entries.empty()) {
187 m_entries.push_back(entry);
188 return;
189 }
190 auto begin = m_entries.begin();
191 auto end = m_entries.end();
192 auto pos = std::lower_bound(begin, end, entry);
193 if (combine) {
194 if (pos != end && pos->Union(entry)) {
196 return;
197 }
198 if (pos != begin) {
199 auto prev = pos - 1;
200 if (prev->Union(entry)) {
201 CombinePrevAndNext(prev);
202 return;
203 }
204 }
205 }
206 m_entries.insert(pos, entry);
207 }
208
209 bool RemoveEntryAtIndex(uint32_t idx) {
210 if (idx < m_entries.size()) {
211 m_entries.erase(m_entries.begin() + idx);
212 return true;
213 }
214 return false;
215 }
216
217 void Sort() {
218 if (m_entries.size() > 1)
219 llvm::stable_sort(m_entries);
220 }
221
222#ifdef ASSERT_RANGEMAP_ARE_SORTED
223 bool IsSorted() const {
224 typename Collection::const_iterator pos, end, prev;
225 // First we determine if we can combine any of the Entry objects so we
226 // don't end up allocating and making a new collection for no reason
227 for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
228 prev = pos++) {
229 if (prev != end && *pos < *prev)
230 return false;
231 }
232 return true;
233 }
234#endif
235
237#ifdef ASSERT_RANGEMAP_ARE_SORTED
238 assert(IsSorted());
239#endif
240 auto first_intersect = std::adjacent_find(
241 m_entries.begin(), m_entries.end(), [](const Entry &a, const Entry &b) {
242 return a.DoesAdjoinOrIntersect(b);
243 });
244 if (first_intersect == m_entries.end())
245 return;
246
247 // We can combine at least one entry, then we make a new collection and
248 // populate it accordingly, and then swap it into place.
249 auto pos = std::next(first_intersect);
250 Collection minimal_ranges(m_entries.begin(), pos);
251 for (; pos != m_entries.end(); ++pos) {
252 Entry &back = minimal_ranges.back();
253 if (back.DoesAdjoinOrIntersect(*pos))
254 back.SetRangeEnd(std::max(back.GetRangeEnd(), pos->GetRangeEnd()));
255 else
256 minimal_ranges.push_back(*pos);
257 }
258 m_entries.swap(minimal_ranges);
259 }
260
262#ifdef ASSERT_RANGEMAP_ARE_SORTED
263 assert(IsSorted());
264#endif
265 if (m_entries.empty())
266 return fail_value;
267 // m_entries must be sorted, so if we aren't empty, we grab the first
268 // range's base
269 return m_entries.front().GetRangeBase();
270 }
271
272 BaseType GetMaxRangeEnd(BaseType fail_value) const {
273#ifdef ASSERT_RANGEMAP_ARE_SORTED
274 assert(IsSorted());
275#endif
276 if (m_entries.empty())
277 return fail_value;
278 // m_entries must be sorted, so if we aren't empty, we grab the last
279 // range's end
280 return m_entries.back().GetRangeEnd();
281 }
282
283 void Slide(BaseType slide) {
284 typename Collection::iterator pos, end;
285 for (pos = m_entries.begin(), end = m_entries.end(); pos != end; ++pos)
286 pos->Slide(slide);
287 }
288
289 void Clear() { m_entries.clear(); }
290
291 void Reserve(typename Collection::size_type size) { m_entries.reserve(size); }
292
293 bool IsEmpty() const { return m_entries.empty(); }
294
295 size_t GetSize() const { return m_entries.size(); }
296
297 const Entry *GetEntryAtIndex(size_t i) const {
298 return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
299 }
300
301 // Clients must ensure that "i" is a valid index prior to calling this
302 // function
303 Entry &GetEntryRef(size_t i) { return m_entries[i]; }
304 const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
305
306 Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
307
308 const Entry *Back() const {
309 return (m_entries.empty() ? nullptr : &m_entries.back());
310 }
311
312 static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
313 return lhs.GetRangeBase() < rhs.GetRangeBase();
314 }
315
316 uint32_t FindEntryIndexThatContains(B addr) const {
317#ifdef ASSERT_RANGEMAP_ARE_SORTED
318 assert(IsSorted());
319#endif
320 if (!m_entries.empty()) {
321 Entry entry(addr, 1);
322 typename Collection::const_iterator begin = m_entries.begin();
323 typename Collection::const_iterator end = m_entries.end();
324 typename Collection::const_iterator pos =
325 std::lower_bound(begin, end, entry, BaseLessThan);
326
327 if (pos != end && pos->Contains(addr)) {
328 return std::distance(begin, pos);
329 } else if (pos != begin) {
330 --pos;
331 if (pos->Contains(addr))
332 return std::distance(begin, pos);
333 }
334 }
335 return UINT32_MAX;
336 }
337
338 const Entry *FindEntryThatContains(B addr) const {
339#ifdef ASSERT_RANGEMAP_ARE_SORTED
340 assert(IsSorted());
341#endif
342 if (!m_entries.empty()) {
343 Entry entry(addr, 1);
344 typename Collection::const_iterator begin = m_entries.begin();
345 typename Collection::const_iterator end = m_entries.end();
346 typename Collection::const_iterator pos =
347 std::lower_bound(begin, end, entry, BaseLessThan);
348
349 if (pos != end && pos->Contains(addr)) {
350 return &(*pos);
351 } else if (pos != begin) {
352 --pos;
353 if (pos->Contains(addr)) {
354 return &(*pos);
355 }
356 }
357 }
358 return nullptr;
359 }
360
361 const Entry *FindEntryThatContains(const Entry &range) const {
362#ifdef ASSERT_RANGEMAP_ARE_SORTED
363 assert(IsSorted());
364#endif
365 if (!m_entries.empty()) {
366 typename Collection::const_iterator begin = m_entries.begin();
367 typename Collection::const_iterator end = m_entries.end();
368 typename Collection::const_iterator pos =
369 std::lower_bound(begin, end, range, BaseLessThan);
370
371 if (pos != end && pos->Contains(range)) {
372 return &(*pos);
373 } else if (pos != begin) {
374 --pos;
375 if (pos->Contains(range)) {
376 return &(*pos);
377 }
378 }
379 }
380 return nullptr;
381 }
382
383 const Entry *FindEntryThatIntersects(const Entry &range) const {
384#ifdef ASSERT_RANGEMAP_ARE_SORTED
385 assert(IsSorted());
386#endif
387 if (!m_entries.empty()) {
388 typename Collection::const_iterator begin = m_entries.begin();
389 typename Collection::const_iterator end = m_entries.end();
390 typename Collection::const_iterator pos =
391 std::lower_bound(begin, end, range, BaseLessThan);
392
393 while (pos != begin && pos[-1].DoesIntersect(range))
394 --pos;
395
396 if (pos != end && pos->DoesIntersect(range))
397 return &(*pos);
398 }
399 return nullptr;
400 }
401
402 using const_iterator = typename Collection::const_iterator;
403 const_iterator begin() const { return m_entries.begin(); }
404 const_iterator end() const { return m_entries.end(); }
405
406protected:
407 void CombinePrevAndNext(typename Collection::iterator pos) {
408 // Check if the prev or next entries in case they need to be unioned with
409 // the entry pointed to by "pos".
410 if (pos != m_entries.begin()) {
411 auto prev = pos - 1;
412 if (prev->Union(*pos))
413 m_entries.erase(pos);
414 pos = prev;
415 }
416
417 auto end = m_entries.end();
418 if (pos != end) {
419 auto next = pos + 1;
420 if (next != end) {
421 if (pos->Union(*next))
422 m_entries.erase(next);
423 }
424 }
425 }
426
428};
429
430// A simple range with data class where you get to define the type of
431// the range base "B", the type used for the range byte size "S", and the type
432// for the associated data "T".
433template <typename B, typename S, typename T>
434struct RangeData : public Range<B, S> {
435 typedef T DataType;
436
438
439 RangeData() : Range<B, S>(), data() {}
440
441 RangeData(B base, S size) : Range<B, S>(base, size), data() {}
442
443 RangeData(B base, S size, DataType d) : Range<B, S>(base, size), data(d) {}
444};
445
446// We can treat the vector as a flattened Binary Search Tree, augmenting it
447// with upper bounds (max of range endpoints) for every index allows us to
448// query for range containment quicker.
449template <typename B, typename S, typename T>
450struct AugmentedRangeData : public RangeData<B, S, T> {
452
455};
456
457template <typename B, typename S, typename T, unsigned N = 0,
458 class Compare = std::less<T>>
460public:
464 typedef llvm::SmallVector<AugmentedEntry, N> Collection;
465
467
468 ~RangeDataVector() = default;
469
470 void Append(const Entry &entry) { m_entries.emplace_back(entry); }
471
472 /// Append a range with data to the vector
473 /// \param B The base of the memory range
474 /// \param S The size of the memory range
475 /// \param T The data associated with the memory range
476 void Append(B &&b, S &&s, T &&t) { m_entries.emplace_back(Entry(b, s, t)); }
477
478 bool Erase(uint32_t start, uint32_t end) {
479 if (start >= end || end > m_entries.size())
480 return false;
481 m_entries.erase(begin() + start, begin() + end);
482 return true;
483 }
484
485 void Sort() {
486 if (m_entries.size() > 1)
487 llvm::stable_sort(m_entries,
488 [&compare = m_compare](const Entry &a, const Entry &b) {
489 if (a.base != b.base)
490 return a.base < b.base;
491 if (a.size != b.size)
492 return a.size < b.size;
493 return compare(a.data, b.data);
494 });
495 if (!m_entries.empty())
496 ComputeUpperBounds(0, m_entries.size());
497 }
498
499#ifdef ASSERT_RANGEMAP_ARE_SORTED
500 bool IsSorted() const {
501 typename Collection::const_iterator pos, end, prev;
502 for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
503 prev = pos++) {
504 if (prev != end && *pos < *prev)
505 return false;
506 }
507 return true;
508 }
509#endif
510
512#ifdef ASSERT_RANGEMAP_ARE_SORTED
513 assert(IsSorted());
514#endif
515 auto first_intersect = std::adjacent_find(
516 m_entries.begin(), m_entries.end(), [](const Entry &a, const Entry &b) {
517 return a.DoesAdjoinOrIntersect(b) && a.data == b.data;
518 });
519
520 if (first_intersect == m_entries.end())
521 return;
522
523 // We can combine at least one entry. Make a new collection and populate it
524 // accordingly, and then swap it into place.
525 auto pos = std::next(first_intersect);
526 Collection minimal_ranges(m_entries.begin(), pos);
527 for (; pos != m_entries.end(); ++pos) {
528 Entry &back = minimal_ranges.back();
529 if (back.DoesAdjoinOrIntersect(*pos) && back.data == pos->data)
530 back.SetRangeEnd(std::max(back.GetRangeEnd(), pos->GetRangeEnd()));
531 else
532 minimal_ranges.push_back(*pos);
533 }
534 m_entries.swap(minimal_ranges);
535 ComputeUpperBounds(0, m_entries.size());
536 }
537
538 void Clear() { m_entries.clear(); }
539
540 bool IsEmpty() const { return m_entries.empty(); }
541
542 size_t GetSize() const { return m_entries.size(); }
543
544 const Entry *GetEntryAtIndex(size_t i) const {
545 return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
546 }
547
549 return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
550 }
551
552 // Clients must ensure that "i" is a valid index prior to calling this
553 // function
554 Entry &GetEntryRef(size_t i) { return m_entries[i]; }
555 const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
556
557 static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
558 return lhs.GetRangeBase() < rhs.GetRangeBase();
559 }
560
561 uint32_t FindEntryIndexThatContains(B addr) const {
562 const AugmentedEntry *entry =
563 static_cast<const AugmentedEntry *>(FindEntryThatContains(addr));
564 if (entry)
565 return std::distance(m_entries.begin(), entry);
566 return UINT32_MAX;
567 }
568
569 uint32_t FindEntryIndexesThatContain(B addr, std::vector<uint32_t> &indexes) {
570#ifdef ASSERT_RANGEMAP_ARE_SORTED
571 assert(IsSorted());
572#endif
573 if (!m_entries.empty())
574 FindEntryIndexesThatContain(addr, 0, m_entries.size(), indexes);
575
576 return indexes.size();
577 }
578
580 return const_cast<Entry *>(
581 static_cast<const RangeDataVector *>(this)->FindEntryThatContains(
582 addr));
583 }
584
585 const Entry *FindEntryThatContains(B addr) const {
586 return FindEntryThatContains(Entry(addr, 1));
587 }
588
589 const Entry *FindEntryThatContains(const Entry &range) const {
590#ifdef ASSERT_RANGEMAP_ARE_SORTED
591 assert(IsSorted());
592#endif
593 if (!m_entries.empty()) {
594 typename Collection::const_iterator begin = m_entries.begin();
595 typename Collection::const_iterator end = m_entries.end();
596 typename Collection::const_iterator pos =
597 std::lower_bound(begin, end, range, BaseLessThan);
598
599 while (pos != begin && pos[-1].Contains(range))
600 --pos;
601
602 if (pos != end && pos->Contains(range))
603 return &(*pos);
604 }
605 return nullptr;
606 }
607
608 const Entry *FindEntryStartsAt(B addr) const {
609#ifdef ASSERT_RANGEMAP_ARE_SORTED
610 assert(IsSorted());
611#endif
612 if (!m_entries.empty()) {
613 auto begin = m_entries.begin(), end = m_entries.end();
614 auto pos = std::lower_bound(begin, end, Entry(addr, 1), BaseLessThan);
615 if (pos != end && pos->base == addr)
616 return &(*pos);
617 }
618 return nullptr;
619 }
620
621 // This method will return the entry that contains the given address, or the
622 // entry following that address. If you give it an address of 0 and the
623 // first entry starts at address 0x100, you will get the entry at 0x100.
624 //
625 // For most uses, FindEntryThatContains is the correct one to use, this is a
626 // less commonly needed behavior. It was added for core file memory regions,
627 // where we want to present a gap in the memory regions as a distinct region,
628 // so we need to know the start address of the next memory section that
629 // exists.
631#ifdef ASSERT_RANGEMAP_ARE_SORTED
632 assert(IsSorted());
633#endif
634 if (!m_entries.empty()) {
635 typename Collection::const_iterator begin = m_entries.begin();
636 typename Collection::const_iterator end = m_entries.end();
637 typename Collection::const_iterator pos = llvm::lower_bound(
638 m_entries, addr, [](const Entry &lhs, B rhs_base) -> bool {
639 return lhs.GetRangeEnd() <= rhs_base;
640 });
641
642 while (pos != begin && pos[-1].Contains(addr))
643 --pos;
644
645 if (pos != end)
646 return &(*pos);
647 }
648 return nullptr;
649 }
650
652#ifdef ASSERT_RANGEMAP_ARE_SORTED
653 assert(IsSorted());
654#endif
655 const AugmentedEntry *entry = static_cast<const AugmentedEntry *>(
657 if (entry)
658 return std::distance(m_entries.begin(), entry);
659 return UINT32_MAX;
660 }
661
662 Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
663
664 const Entry *Back() const {
665 return (m_entries.empty() ? nullptr : &m_entries.back());
666 }
667
668 using const_iterator = typename Collection::const_iterator;
669 const_iterator begin() const { return m_entries.begin(); }
670 const_iterator end() const { return m_entries.end(); }
671
672protected:
675
676private:
677 // Compute extra information needed for search
678 B ComputeUpperBounds(size_t lo, size_t hi) {
679 size_t mid = (lo + hi) / 2;
680 AugmentedEntry &entry = m_entries[mid];
681
682 entry.upper_bound = entry.base + entry.size;
683
684 if (lo < mid)
685 entry.upper_bound =
686 std::max(entry.upper_bound, ComputeUpperBounds(lo, mid));
687
688 if (mid + 1 < hi)
689 entry.upper_bound =
690 std::max(entry.upper_bound, ComputeUpperBounds(mid + 1, hi));
691
692 return entry.upper_bound;
693 }
694
695 // This is based on the augmented tree implementation found at
696 // https://en.wikipedia.org/wiki/Interval_tree#Augmented_tree
697 void FindEntryIndexesThatContain(B addr, size_t lo, size_t hi,
698 std::vector<uint32_t> &indexes) {
699 size_t mid = (lo + hi) / 2;
700 const AugmentedEntry &entry = m_entries[mid];
701
702 // addr is greater than the rightmost point of any interval below mid
703 // so there are cannot be any matches.
704 if (addr > entry.upper_bound)
705 return;
706
707 // Recursively search left subtree
708 if (lo < mid)
709 FindEntryIndexesThatContain(addr, lo, mid, indexes);
710
711 // If addr is smaller than the start of the current interval it
712 // cannot contain it nor can any of its right subtree.
713 if (addr < entry.base)
714 return;
715
716 if (entry.Contains(addr))
717 indexes.push_back(entry.data);
718
719 // Recursively search right subtree
720 if (mid + 1 < hi)
721 FindEntryIndexesThatContain(addr, mid + 1, hi, indexes);
722 }
723};
724
725// A simple range with data class where you get to define the type of
726// the range base "B", the type used for the range byte size "S", and the type
727// for the associated data "T".
728template <typename B, typename T> struct AddressData {
729 typedef B BaseType;
730 typedef T DataType;
731
734
736
737 AddressData(B a, DataType d) : addr(a), data(d) {}
738
739 bool operator<(const AddressData &rhs) const {
740 if (this->addr == rhs.addr)
741 return this->data < rhs.data;
742 return this->addr < rhs.addr;
743 }
744
745 bool operator==(const AddressData &rhs) const {
746 return this->addr == rhs.addr && this->data == rhs.data;
747 }
748
749 bool operator!=(const AddressData &rhs) const {
750 return this->addr != rhs.addr || this->data == rhs.data;
751 }
752};
753
754template <typename B, typename T, unsigned N> class AddressDataArray {
755public:
757 typedef llvm::SmallVector<Entry, N> Collection;
758
759 AddressDataArray() = default;
760
761 ~AddressDataArray() = default;
762
763 void Append(const Entry &entry) { m_entries.push_back(entry); }
764
765 void Sort() {
766 if (m_entries.size() > 1)
767 std::stable_sort(m_entries.begin(), m_entries.end());
768 }
769
770#ifdef ASSERT_RANGEMAP_ARE_SORTED
771 bool IsSorted() const {
772 typename Collection::const_iterator pos, end, prev;
773 // First we determine if we can combine any of the Entry objects so we
774 // don't end up allocating and making a new collection for no reason
775 for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
776 prev = pos++) {
777 if (prev != end && *pos < *prev)
778 return false;
779 }
780 return true;
781 }
782#endif
783
784 void Clear() { m_entries.clear(); }
785
786 bool IsEmpty() const { return m_entries.empty(); }
787
788 size_t GetSize() const { return m_entries.size(); }
789
790 const Entry *GetEntryAtIndex(size_t i) const {
791 return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
792 }
793
794 // Clients must ensure that "i" is a valid index prior to calling this
795 // function
796 const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
797
798 static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
799 return lhs.addr < rhs.addr;
800 }
801
802 Entry *FindEntry(B addr, bool exact_match_only) {
803#ifdef ASSERT_RANGEMAP_ARE_SORTED
804 assert(IsSorted());
805#endif
806 if (!m_entries.empty()) {
807 Entry entry;
808 entry.addr = addr;
809 typename Collection::iterator begin = m_entries.begin();
810 typename Collection::iterator end = m_entries.end();
811 typename Collection::iterator pos =
812 llvm::lower_bound(m_entries, entry, BaseLessThan);
813
814 while (pos != begin && pos[-1].addr == addr)
815 --pos;
816
817 if (pos != end) {
818 if (pos->addr == addr || !exact_match_only)
819 return &(*pos);
820 }
821 }
822 return nullptr;
823 }
824
825 const Entry *FindNextEntry(const Entry *entry) {
826 if (entry >= &*m_entries.begin() && entry + 1 < &*m_entries.end())
827 return entry + 1;
828 return nullptr;
829 }
830
831 Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
832
833 const Entry *Back() const {
834 return (m_entries.empty() ? nullptr : &m_entries.back());
835 }
836
837protected:
839};
840
841} // namespace lldb_private
842
843#endif // LLDB_UTILITY_RANGEMAP_H
static bool Compare(BreakpointLocationSP lhs, lldb::break_id_t val)
Entry * FindEntry(B addr, bool exact_match_only)
Definition RangeMap.h:802
llvm::SmallVector< Entry, N > Collection
Definition RangeMap.h:757
AddressData< B, T > Entry
Definition RangeMap.h:756
const Entry * GetEntryAtIndex(size_t i) const
Definition RangeMap.h:790
const Entry * Back() const
Definition RangeMap.h:833
const Entry & GetEntryRef(size_t i) const
Definition RangeMap.h:796
void Append(const Entry &entry)
Definition RangeMap.h:763
static bool BaseLessThan(const Entry &lhs, const Entry &rhs)
Definition RangeMap.h:798
const Entry * FindNextEntry(const Entry *entry)
Definition RangeMap.h:825
const Entry * FindEntryThatContains(B addr) const
Definition RangeMap.h:585
AugmentedRangeData< lldb::addr_t, lldb::addr_t, DWARFExpression > AugmentedEntry
Definition RangeMap.h:463
void CombineConsecutiveEntriesWithEqualData()
Definition RangeMap.h:511
uint32_t FindEntryIndexThatContainsOrFollows(B addr) const
Definition RangeMap.h:651
uint32_t FindEntryIndexesThatContain(B addr, std::vector< uint32_t > &indexes)
Definition RangeMap.h:569
Entry & GetEntryRef(size_t i)
Definition RangeMap.h:554
uint32_t FindEntryIndexThatContains(B addr) const
Definition RangeMap.h:561
B ComputeUpperBounds(size_t lo, size_t hi)
Definition RangeMap.h:678
const Entry * FindEntryThatContains(const Entry &range) const
Definition RangeMap.h:589
static bool BaseLessThan(const Entry &lhs, const Entry &rhs)
Definition RangeMap.h:557
const Entry * GetEntryAtIndex(size_t i) const
Definition RangeMap.h:544
Entry * GetMutableEntryAtIndex(size_t i)
Definition RangeMap.h:548
const Entry & GetEntryRef(size_t i) const
Definition RangeMap.h:555
const Entry * Back() const
Definition RangeMap.h:664
const_iterator begin() const
Definition RangeMap.h:669
const Entry * FindEntryStartsAt(B addr) const
Definition RangeMap.h:608
const Entry * FindEntryThatContainsOrFollows(B addr) const
Definition RangeMap.h:630
void Append(const Entry &entry)
Definition RangeMap.h:470
void Append(B &&b, S &&s, T &&t)
Append a range with data to the vector.
Definition RangeMap.h:476
RangeDataVector(Compare compare=Compare())
Definition RangeMap.h:466
bool Erase(uint32_t start, uint32_t end)
Definition RangeMap.h:478
void FindEntryIndexesThatContain(B addr, size_t lo, size_t hi, std::vector< uint32_t > &indexes)
Definition RangeMap.h:697
Entry * FindEntryThatContains(B addr)
Definition RangeMap.h:579
const Entry * FindEntryThatContains(B addr) const
Definition RangeMap.h:338
typename Collection::const_iterator const_iterator
Definition RangeMap.h:402
uint32_t FindEntryIndexThatContains(B addr) const
Definition RangeMap.h:316
const Entry & GetEntryRef(size_t i) const
Definition RangeMap.h:304
void Slide(BaseType slide)
Definition RangeMap.h:283
const Entry * GetEntryAtIndex(size_t i) const
Definition RangeMap.h:297
void CombinePrevAndNext(typename Collection::iterator pos)
Definition RangeMap.h:407
const_iterator begin() const
Definition RangeMap.h:403
BaseType GetMaxRangeEnd(BaseType fail_value) const
Definition RangeMap.h:272
void Append(B base, S size)
Definition RangeMap.h:181
Entry & GetEntryRef(size_t i)
Definition RangeMap.h:303
bool RemoveEntryAtIndex(uint32_t idx)
Definition RangeMap.h:209
static bool BaseLessThan(const Entry &lhs, const Entry &rhs)
Definition RangeMap.h:312
void Append(const Entry &entry)
Definition RangeMap.h:179
llvm::SmallVector< Entry, N > Collection
Definition RangeMap.h:141
BaseType GetMinRangeBase(BaseType fail_value) const
Definition RangeMap.h:261
static RangeVector GetOverlaps(const RangeVector &vec1, const RangeVector &vec2)
Definition RangeMap.h:147
size_t GetSize() const
Definition RangeMap.h:295
void Reserve(typename Collection::size_type size)
Definition RangeMap.h:291
const_iterator end() const
Definition RangeMap.h:404
void Insert(const Entry &entry, bool combine)
Definition RangeMap.h:185
const Entry * FindEntryThatIntersects(const Entry &range) const
Definition RangeMap.h:383
const Entry * Back() const
Definition RangeMap.h:308
bool operator==(const RangeVector &rhs) const
Definition RangeMap.h:169
const Entry * FindEntryThatContains(const Entry &range) const
Definition RangeMap.h:361
#define UINT32_MAX
A class that represents a running process on the host machine.
llvm::APFloat::cmpResult compare(Scalar lhs, Scalar rhs)
Definition Scalar.cpp:886
bool operator==(const AddressData &rhs) const
Definition RangeMap.h:745
bool operator<(const AddressData &rhs) const
Definition RangeMap.h:739
bool operator!=(const AddressData &rhs) const
Definition RangeMap.h:749
AddressData(B a, DataType d)
Definition RangeMap.h:737
AugmentedRangeData(const RangeData< B, S, T > &rd)
Definition RangeMap.h:453
RangeData(B base, S size, DataType d)
Definition RangeMap.h:443
RangeData(B base, S size)
Definition RangeMap.h:441
bool ContainsEndInclusive(BaseType r) const
Definition RangeMap.h:97
bool DoesAdjoinOrIntersect(const Range &rhs) const
Definition RangeMap.h:107
void Clear(BaseType b=0)
Definition RangeMap.h:40
bool Contains(const Range &range) const
Definition RangeMap.h:101
bool Contains(BaseType r) const
Definition RangeMap.h:93
BaseType GetRangeBase() const
Definition RangeMap.h:45
Range(BaseType b, SizeType s)
Definition RangeMap.h:38
bool operator==(const Range &rhs) const
Definition RangeMap.h:127
bool IsValid() const
Definition RangeMap.h:91
bool Union(const Range &rhs)
Definition RangeMap.h:57
void ShrinkFront(S s)
Definition RangeMap.h:52
void SetRangeEnd(BaseType end)
Definition RangeMap.h:80
SizeType GetByteSize() const
Definition RangeMap.h:87
void SetRangeBase(BaseType b)
Set the start value for the range, and keep the same size.
Definition RangeMap.h:48
BaseType GetRangeEnd() const
Definition RangeMap.h:78
bool DoesIntersect(const Range &rhs) const
Definition RangeMap.h:117
bool operator<(const Range &rhs) const
Definition RangeMap.h:121
void Slide(BaseType slide)
Definition RangeMap.h:50
bool operator!=(const Range &rhs) const
Definition RangeMap.h:131
Range Intersect(const Range &rhs) const
Definition RangeMap.h:67
void SetByteSize(SizeType s)
Definition RangeMap.h:89