1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ART_RUNTIME_GC_SPACE_MEMORY_TOOL_MALLOC_SPACE_INL_H_ 18 #define ART_RUNTIME_GC_SPACE_MEMORY_TOOL_MALLOC_SPACE_INL_H_ 19 20 #include "base/memory_tool.h" 21 #include "memory_tool_malloc_space.h" 22 #include "memory_tool_settings.h" 23 24 namespace art { 25 namespace gc { 26 namespace space { 27 28 namespace memory_tool_details { 29 30 template <size_t kMemoryToolRedZoneBytes, bool kUseObjSizeForUsable> 31 inline mirror::Object* AdjustForValgrind(void* obj_with_rdz, size_t num_bytes, 32 size_t bytes_allocated, size_t usable_size, 33 size_t bytes_tl_bulk_allocated, 34 size_t* bytes_allocated_out, size_t* usable_size_out, 35 size_t* bytes_tl_bulk_allocated_out) { 36 if (bytes_allocated_out != nullptr) { 37 *bytes_allocated_out = bytes_allocated; 38 } 39 if (bytes_tl_bulk_allocated_out != nullptr) { 40 *bytes_tl_bulk_allocated_out = bytes_tl_bulk_allocated; 41 } 42 43 // This cuts over-provision and is a trade-off between testing the over-provisioning code paths 44 // vs checking overflows in the regular paths. 45 if (usable_size_out != nullptr) { 46 if (kUseObjSizeForUsable) { 47 *usable_size_out = num_bytes; 48 } else { 49 *usable_size_out = usable_size - 2 * kMemoryToolRedZoneBytes; 50 } 51 } 52 53 // Left redzone. 54 MEMORY_TOOL_MAKE_NOACCESS(obj_with_rdz, kMemoryToolRedZoneBytes); 55 56 // Make requested memory readable. 57 // (If the allocator assumes memory is zeroed out, we might get UNDEFINED warnings, so make 58 // everything DEFINED initially.) 59 mirror::Object* result = reinterpret_cast<mirror::Object*>( 60 reinterpret_cast<uint8_t*>(obj_with_rdz) + kMemoryToolRedZoneBytes); 61 MEMORY_TOOL_MAKE_DEFINED(result, num_bytes); 62 63 // Right redzone. Assumes that if bytes_allocated > usable_size, then the difference is 64 // management data at the upper end, and for simplicity we will not protect that. 65 // At the moment, this fits RosAlloc (no management data in a slot, usable_size == alloc_size) 66 // and DlMalloc (allocation_size = (usable_size == num_bytes) + 4, 4 is management) 67 MEMORY_TOOL_MAKE_NOACCESS(reinterpret_cast<uint8_t*>(result) + num_bytes, 68 usable_size - (num_bytes + kMemoryToolRedZoneBytes)); 69 70 return result; 71 } 72 73 inline size_t GetObjSizeNoThreadSafety(mirror::Object* obj) NO_THREAD_SAFETY_ANALYSIS { 74 return obj->SizeOf<kVerifyNone>(); 75 } 76 77 } // namespace memory_tool_details 78 79 template <typename S, 80 size_t kMemoryToolRedZoneBytes, 81 bool kAdjustForRedzoneInAllocSize, 82 bool kUseObjSizeForUsable> 83 mirror::Object* 84 MemoryToolMallocSpace<S, 85 kMemoryToolRedZoneBytes, 86 kAdjustForRedzoneInAllocSize, 87 kUseObjSizeForUsable>::AllocWithGrowth( 88 Thread* self, size_t num_bytes, size_t* bytes_allocated_out, size_t* usable_size_out, 89 size_t* bytes_tl_bulk_allocated_out) { 90 size_t bytes_allocated; 91 size_t usable_size; 92 size_t bytes_tl_bulk_allocated; 93 void* obj_with_rdz = S::AllocWithGrowth(self, num_bytes + 2 * kMemoryToolRedZoneBytes, 94 &bytes_allocated, &usable_size, 95 &bytes_tl_bulk_allocated); 96 if (obj_with_rdz == nullptr) { 97 return nullptr; 98 } 99 100 return memory_tool_details::AdjustForValgrind<kMemoryToolRedZoneBytes, kUseObjSizeForUsable>( 101 obj_with_rdz, num_bytes, 102 bytes_allocated, usable_size, 103 bytes_tl_bulk_allocated, 104 bytes_allocated_out, 105 usable_size_out, 106 bytes_tl_bulk_allocated_out); 107 } 108 109 template <typename S, 110 size_t kMemoryToolRedZoneBytes, 111 bool kAdjustForRedzoneInAllocSize, 112 bool kUseObjSizeForUsable> 113 mirror::Object* MemoryToolMallocSpace<S, 114 kMemoryToolRedZoneBytes, 115 kAdjustForRedzoneInAllocSize, 116 kUseObjSizeForUsable>::Alloc( 117 Thread* self, size_t num_bytes, size_t* bytes_allocated_out, size_t* usable_size_out, 118 size_t* bytes_tl_bulk_allocated_out) { 119 size_t bytes_allocated; 120 size_t usable_size; 121 size_t bytes_tl_bulk_allocated; 122 void* obj_with_rdz = S::Alloc(self, num_bytes + 2 * kMemoryToolRedZoneBytes, 123 &bytes_allocated, &usable_size, &bytes_tl_bulk_allocated); 124 if (obj_with_rdz == nullptr) { 125 return nullptr; 126 } 127 128 return memory_tool_details::AdjustForValgrind<kMemoryToolRedZoneBytes, 129 kUseObjSizeForUsable>(obj_with_rdz, num_bytes, 130 bytes_allocated, usable_size, 131 bytes_tl_bulk_allocated, 132 bytes_allocated_out, 133 usable_size_out, 134 bytes_tl_bulk_allocated_out); 135 } 136 137 template <typename S, 138 size_t kMemoryToolRedZoneBytes, 139 bool kAdjustForRedzoneInAllocSize, 140 bool kUseObjSizeForUsable> 141 mirror::Object* MemoryToolMallocSpace<S, 142 kMemoryToolRedZoneBytes, 143 kAdjustForRedzoneInAllocSize, 144 kUseObjSizeForUsable>::AllocThreadUnsafe( 145 Thread* self, size_t num_bytes, size_t* bytes_allocated_out, size_t* usable_size_out, 146 size_t* bytes_tl_bulk_allocated_out) { 147 size_t bytes_allocated; 148 size_t usable_size; 149 size_t bytes_tl_bulk_allocated; 150 void* obj_with_rdz = S::AllocThreadUnsafe(self, num_bytes + 2 * kMemoryToolRedZoneBytes, 151 &bytes_allocated, &usable_size, 152 &bytes_tl_bulk_allocated); 153 if (obj_with_rdz == nullptr) { 154 return nullptr; 155 } 156 157 return memory_tool_details::AdjustForValgrind<kMemoryToolRedZoneBytes, kUseObjSizeForUsable>( 158 obj_with_rdz, num_bytes, 159 bytes_allocated, usable_size, 160 bytes_tl_bulk_allocated, 161 bytes_allocated_out, 162 usable_size_out, 163 bytes_tl_bulk_allocated_out); 164 } 165 166 template <typename S, 167 size_t kMemoryToolRedZoneBytes, 168 bool kAdjustForRedzoneInAllocSize, 169 bool kUseObjSizeForUsable> 170 size_t MemoryToolMallocSpace<S, 171 kMemoryToolRedZoneBytes, 172 kAdjustForRedzoneInAllocSize, 173 kUseObjSizeForUsable>::AllocationSize( 174 mirror::Object* obj, size_t* usable_size) { 175 size_t result = S::AllocationSize(reinterpret_cast<mirror::Object*>( 176 reinterpret_cast<uint8_t*>(obj) - (kAdjustForRedzoneInAllocSize ? kMemoryToolRedZoneBytes : 0)), 177 usable_size); 178 if (usable_size != nullptr) { 179 if (kUseObjSizeForUsable) { 180 *usable_size = memory_tool_details::GetObjSizeNoThreadSafety(obj); 181 } else { 182 *usable_size = *usable_size - 2 * kMemoryToolRedZoneBytes; 183 } 184 } 185 return result; 186 } 187 188 template <typename S, 189 size_t kMemoryToolRedZoneBytes, 190 bool kAdjustForRedzoneInAllocSize, 191 bool kUseObjSizeForUsable> 192 size_t MemoryToolMallocSpace<S, 193 kMemoryToolRedZoneBytes, 194 kAdjustForRedzoneInAllocSize, 195 kUseObjSizeForUsable>::Free( 196 Thread* self, mirror::Object* ptr) { 197 void* obj_after_rdz = reinterpret_cast<void*>(ptr); 198 uint8_t* obj_with_rdz = reinterpret_cast<uint8_t*>(obj_after_rdz) - kMemoryToolRedZoneBytes; 199 200 // Make redzones undefined. 201 size_t usable_size; 202 size_t allocation_size = AllocationSize(ptr, &usable_size); 203 204 // Unprotect the allocation. 205 // Use the obj-size-for-usable flag to determine whether usable_size is the more important one, 206 // e.g., whether there's data in the allocation_size (and usable_size can't be trusted). 207 if (kUseObjSizeForUsable) { 208 MEMORY_TOOL_MAKE_UNDEFINED(obj_with_rdz, allocation_size); 209 } else { 210 MEMORY_TOOL_MAKE_UNDEFINED(obj_with_rdz, usable_size + 2 * kMemoryToolRedZoneBytes); 211 } 212 213 return S::Free(self, reinterpret_cast<mirror::Object*>(obj_with_rdz)); 214 } 215 216 template <typename S, 217 size_t kMemoryToolRedZoneBytes, 218 bool kAdjustForRedzoneInAllocSize, 219 bool kUseObjSizeForUsable> 220 size_t MemoryToolMallocSpace<S, 221 kMemoryToolRedZoneBytes, 222 kAdjustForRedzoneInAllocSize, 223 kUseObjSizeForUsable>::FreeList( 224 Thread* self, size_t num_ptrs, mirror::Object** ptrs) { 225 size_t freed = 0; 226 for (size_t i = 0; i < num_ptrs; i++) { 227 freed += Free(self, ptrs[i]); 228 ptrs[i] = nullptr; 229 } 230 return freed; 231 } 232 233 template <typename S, 234 size_t kMemoryToolRedZoneBytes, 235 bool kAdjustForRedzoneInAllocSize, 236 bool kUseObjSizeForUsable> 237 template <typename... Params> 238 MemoryToolMallocSpace<S, 239 kMemoryToolRedZoneBytes, 240 kAdjustForRedzoneInAllocSize, 241 kUseObjSizeForUsable>::MemoryToolMallocSpace( 242 MemMap* mem_map, size_t initial_size, Params... params) : S(mem_map, initial_size, params...) { 243 // Don't want to change the valgrind states of the mem map here as the allocator is already 244 // initialized at this point and that may interfere with what the allocator does internally. Note 245 // that the tail beyond the initial size is mprotected. 246 } 247 248 template <typename S, 249 size_t kMemoryToolRedZoneBytes, 250 bool kAdjustForRedzoneInAllocSize, 251 bool kUseObjSizeForUsable> 252 size_t MemoryToolMallocSpace<S, 253 kMemoryToolRedZoneBytes, 254 kAdjustForRedzoneInAllocSize, 255 kUseObjSizeForUsable>::MaxBytesBulkAllocatedFor(size_t num_bytes) { 256 return S::MaxBytesBulkAllocatedFor(num_bytes + 2 * kMemoryToolRedZoneBytes); 257 } 258 259 } // namespace space 260 } // namespace gc 261 } // namespace art 262 263 #endif // ART_RUNTIME_GC_SPACE_MEMORY_TOOL_MALLOC_SPACE_INL_H_ 264