1 //===-- DWARFDebugAranges.cpp -----------------------------------*- C++ -*-===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 10 #include "DWARFDebugAranges.h" 11 #include "DWARFCompileUnit.h" 12 #include "DWARFContext.h" 13 #include "llvm/Support/Format.h" 14 #include "llvm/Support/raw_ostream.h" 15 #include <algorithm> 16 #include <cassert> 17 using namespace llvm; 18 19 // Compare function DWARFDebugAranges::Range structures 20 static bool RangeLessThan(const DWARFDebugAranges::Range &range1, 21 const DWARFDebugAranges::Range &range2) { 22 return range1.LoPC < range2.LoPC; 23 } 24 25 namespace { 26 class CountArangeDescriptors { 27 public: 28 CountArangeDescriptors(uint32_t &count_ref) : Count(count_ref) {} 29 void operator()(const DWARFDebugArangeSet &set) { 30 Count += set.getNumDescriptors(); 31 } 32 uint32_t &Count; 33 }; 34 35 class AddArangeDescriptors { 36 public: 37 AddArangeDescriptors(DWARFDebugAranges::RangeColl &ranges) 38 : RangeCollection(ranges) {} 39 void operator()(const DWARFDebugArangeSet& set) { 40 const DWARFDebugArangeSet::Descriptor* arange_desc_ptr; 41 DWARFDebugAranges::Range range; 42 range.Offset = set.getCompileUnitDIEOffset(); 43 44 for (uint32_t i=0; (arange_desc_ptr = set.getDescriptor(i)) != NULL; ++i){ 45 range.LoPC = arange_desc_ptr->Address; 46 range.Length = arange_desc_ptr->Length; 47 48 // Insert each item in increasing address order so binary searching 49 // can later be done! 50 DWARFDebugAranges::RangeColl::iterator insert_pos = 51 std::lower_bound(RangeCollection.begin(), RangeCollection.end(), 52 range, RangeLessThan); 53 RangeCollection.insert(insert_pos, range); 54 } 55 } 56 DWARFDebugAranges::RangeColl& RangeCollection; 57 }; 58 } 59 60 bool DWARFDebugAranges::extract(DataExtractor debug_aranges_data) { 61 if (debug_aranges_data.isValidOffset(0)) { 62 uint32_t offset = 0; 63 64 typedef std::vector<DWARFDebugArangeSet> SetCollection; 65 typedef SetCollection::const_iterator SetCollectionIter; 66 SetCollection sets; 67 68 DWARFDebugArangeSet set; 69 Range range; 70 while (set.extract(debug_aranges_data, &offset)) 71 sets.push_back(set); 72 73 uint32_t count = 0; 74 75 std::for_each(sets.begin(), sets.end(), CountArangeDescriptors(count)); 76 77 if (count > 0) { 78 Aranges.reserve(count); 79 AddArangeDescriptors range_adder(Aranges); 80 std::for_each(sets.begin(), sets.end(), range_adder); 81 } 82 } 83 return false; 84 } 85 86 bool DWARFDebugAranges::generate(DWARFContext *ctx) { 87 clear(); 88 if (ctx) { 89 const uint32_t num_compile_units = ctx->getNumCompileUnits(); 90 for (uint32_t cu_idx = 0; cu_idx < num_compile_units; ++cu_idx) { 91 DWARFCompileUnit *cu = ctx->getCompileUnitAtIndex(cu_idx); 92 if (cu) 93 cu->buildAddressRangeTable(this, true); 94 } 95 } 96 return !isEmpty(); 97 } 98 99 void DWARFDebugAranges::dump(raw_ostream &OS) const { 100 const uint32_t num_ranges = getNumRanges(); 101 for (uint32_t i = 0; i < num_ranges; ++i) { 102 const Range &range = Aranges[i]; 103 OS << format("0x%8.8x: [0x%8.8llx - 0x%8.8llx)\n", range.Offset, 104 (uint64_t)range.LoPC, (uint64_t)range.HiPC()); 105 } 106 } 107 108 void DWARFDebugAranges::Range::dump(raw_ostream &OS) const { 109 OS << format("{0x%8.8x}: [0x%8.8llx - 0x%8.8llx)\n", Offset, LoPC, HiPC()); 110 } 111 112 void DWARFDebugAranges::appendRange(uint32_t offset, uint64_t low_pc, 113 uint64_t high_pc) { 114 if (!Aranges.empty()) { 115 if (Aranges.back().Offset == offset && Aranges.back().HiPC() == low_pc) { 116 Aranges.back().setHiPC(high_pc); 117 return; 118 } 119 } 120 Aranges.push_back(Range(low_pc, high_pc, offset)); 121 } 122 123 void DWARFDebugAranges::sort(bool minimize, uint32_t n) { 124 const size_t orig_arange_size = Aranges.size(); 125 // Size of one? If so, no sorting is needed 126 if (orig_arange_size <= 1) 127 return; 128 // Sort our address range entries 129 std::stable_sort(Aranges.begin(), Aranges.end(), RangeLessThan); 130 131 if (!minimize) 132 return; 133 134 // Most address ranges are contiguous from function to function 135 // so our new ranges will likely be smaller. We calculate the size 136 // of the new ranges since although std::vector objects can be resized, 137 // the will never reduce their allocated block size and free any excesss 138 // memory, so we might as well start a brand new collection so it is as 139 // small as possible. 140 141 // First calculate the size of the new minimal arange vector 142 // so we don't have to do a bunch of re-allocations as we 143 // copy the new minimal stuff over to the new collection. 144 size_t minimal_size = 1; 145 for (size_t i = 1; i < orig_arange_size; ++i) { 146 if (!Range::SortedOverlapCheck(Aranges[i-1], Aranges[i], n)) 147 ++minimal_size; 148 } 149 150 // If the sizes are the same, then no consecutive aranges can be 151 // combined, we are done. 152 if (minimal_size == orig_arange_size) 153 return; 154 155 // Else, make a new RangeColl that _only_ contains what we need. 156 RangeColl minimal_aranges; 157 minimal_aranges.resize(minimal_size); 158 uint32_t j = 0; 159 minimal_aranges[j] = Aranges[0]; 160 for (size_t i = 1; i < orig_arange_size; ++i) { 161 if(Range::SortedOverlapCheck (minimal_aranges[j], Aranges[i], n)) { 162 minimal_aranges[j].setHiPC (Aranges[i].HiPC()); 163 } else { 164 // Only increment j if we aren't merging 165 minimal_aranges[++j] = Aranges[i]; 166 } 167 } 168 assert (j+1 == minimal_size); 169 170 // Now swap our new minimal aranges into place. The local 171 // minimal_aranges will then contian the old big collection 172 // which will get freed. 173 minimal_aranges.swap(Aranges); 174 } 175 176 uint32_t DWARFDebugAranges::findAddress(uint64_t address) const { 177 if (!Aranges.empty()) { 178 Range range(address); 179 RangeCollIterator begin = Aranges.begin(); 180 RangeCollIterator end = Aranges.end(); 181 RangeCollIterator pos = lower_bound(begin, end, range, RangeLessThan); 182 183 if (pos != end && pos->LoPC <= address && address < pos->HiPC()) { 184 return pos->Offset; 185 } else if (pos != begin) { 186 --pos; 187 if (pos->LoPC <= address && address < pos->HiPC()) 188 return (*pos).Offset; 189 } 190 } 191 return -1U; 192 } 193 194 bool 195 DWARFDebugAranges::allRangesAreContiguous(uint64_t &LoPC, uint64_t &HiPC) const{ 196 if (Aranges.empty()) 197 return false; 198 199 uint64_t next_addr = 0; 200 RangeCollIterator begin = Aranges.begin(); 201 for (RangeCollIterator pos = begin, end = Aranges.end(); pos != end; 202 ++pos) { 203 if (pos != begin && pos->LoPC != next_addr) 204 return false; 205 next_addr = pos->HiPC(); 206 } 207 // We checked for empty at the start of function so front() will be valid. 208 LoPC = Aranges.front().LoPC; 209 // We checked for empty at the start of function so back() will be valid. 210 HiPC = Aranges.back().HiPC(); 211 return true; 212 } 213 214 bool DWARFDebugAranges::getMaxRange(uint64_t &LoPC, uint64_t &HiPC) const { 215 if (Aranges.empty()) 216 return false; 217 // We checked for empty at the start of function so front() will be valid. 218 LoPC = Aranges.front().LoPC; 219 // We checked for empty at the start of function so back() will be valid. 220 HiPC = Aranges.back().HiPC(); 221 return true; 222 } 223 224