/art/runtime/gc/space/ |
dlmalloc_space-inl.h | 30 size_t* usable_size) { 34 obj = AllocWithoutGrowthLocked(self, num_bytes, bytes_allocated, usable_size); 43 inline size_t DlMallocSpace::AllocationSizeNonvirtual(mirror::Object* obj, size_t* usable_size) { 46 if (usable_size != nullptr) { 47 *usable_size = size; 54 size_t* usable_size) { 61 size_t allocation_size = AllocationSizeNonvirtual(result, usable_size);
|
bump_pointer_space-inl.h | 27 size_t* usable_size) { 32 if (usable_size != nullptr) { 33 *usable_size = num_bytes; 41 size_t* usable_size) { 54 if (UNLIKELY(usable_size != nullptr)) { 55 *usable_size = num_bytes; 84 inline size_t BumpPointerSpace::AllocationSizeNonvirtual(mirror::Object* obj, size_t* usable_size) 87 if (usable_size != nullptr) { 88 *usable_size = RoundUp(num_bytes, kAlignment);
|
rosalloc_space-inl.h | 28 inline size_t RosAllocSpace::AllocationSizeNonvirtual(mirror::Object* obj, size_t* usable_size) { 43 if (usable_size != nullptr) { 44 *usable_size = size_by_size; 51 size_t* bytes_allocated, size_t* usable_size) { 66 if (usable_size != nullptr) { 67 *usable_size = rosalloc_size;
|
valgrind_malloc_space-inl.h | 35 size_t* usable_size) { 37 bytes_allocated, usable_size); 52 size_t* usable_size) { 54 usable_size); 67 size_t ValgrindMallocSpace<S, A>::AllocationSize(mirror::Object* obj, size_t* usable_size) { 69 reinterpret_cast<byte*>(obj) - kValgrindRedZoneBytes), usable_size); 78 size_t usable_size = 0; local 79 AllocationSize(ptr, &usable_size); 80 VALGRIND_MAKE_MEM_UNDEFINED(obj_with_rdz, usable_size);
|
rosalloc_space.h | 50 size_t* usable_size) OVERRIDE LOCKS_EXCLUDED(lock_); 52 size_t* usable_size) OVERRIDE { 53 return AllocNonvirtual(self, num_bytes, bytes_allocated, usable_size); 56 size_t* usable_size) 58 return AllocNonvirtualThreadUnsafe(self, num_bytes, bytes_allocated, usable_size); 60 size_t AllocationSize(mirror::Object* obj, size_t* usable_size) OVERRIDE { 61 return AllocationSizeNonvirtual(obj, usable_size); 69 size_t* usable_size) { 71 return AllocCommon(self, num_bytes, bytes_allocated, usable_size); 74 size_t* bytes_allocated, size_t* usable_size) { [all...] |
dlmalloc_space.h | 51 size_t* usable_size) OVERRIDE LOCKS_EXCLUDED(lock_); 54 size_t* usable_size) OVERRIDE LOCKS_EXCLUDED(lock_) { 55 return AllocNonvirtual(self, num_bytes, bytes_allocated, usable_size); 58 virtual size_t AllocationSize(mirror::Object* obj, size_t* usable_size) OVERRIDE { 59 return AllocationSizeNonvirtual(obj, usable_size); 78 size_t* usable_size) LOCKS_EXCLUDED(lock_); 81 size_t AllocationSizeNonvirtual(mirror::Object* obj, size_t* usable_size); 137 size_t* usable_size)
|
valgrind_malloc_space.h | 34 size_t* usable_size) OVERRIDE; 36 size_t* usable_size) OVERRIDE; 38 size_t AllocationSize(mirror::Object* obj, size_t* usable_size) OVERRIDE;
|
zygote_space.h | 49 size_t* usable_size) OVERRIDE; 51 size_t AllocationSize(mirror::Object* obj, size_t* usable_size) OVERRIDE;
|
bump_pointer_space.h | 50 size_t* usable_size) OVERRIDE; 53 size_t* usable_size) 60 size_t AllocationSize(mirror::Object* obj, size_t* usable_size) OVERRIDE 62 return AllocationSizeNonvirtual(obj, usable_size); 74 size_t AllocationSizeNonvirtual(mirror::Object* obj, size_t* usable_size)
|
large_object_space.cc | 41 size_t* usable_size) OVERRIDE { 44 usable_size); 50 if (usable_size != nullptr) { 51 *usable_size = num_bytes; // Since we have redzones, shrink the usable size. 56 virtual size_t AllocationSize(mirror::Object* obj, size_t* usable_size) OVERRIDE { 59 return LargeObjectMapSpace::AllocationSize(object_with_rdz, usable_size); 111 size_t* bytes_allocated, size_t* usable_size) { 131 if (usable_size != nullptr) { 132 *usable_size = allocation_size; 157 size_t LargeObjectMapSpace::AllocationSize(mirror::Object* obj, size_t* usable_size) { [all...] |
malloc_space.h | 58 size_t* bytes_allocated, size_t* usable_size) = 0; 61 size_t* usable_size) = 0; 62 // Return the storage space required by obj. If usable_size isn't nullptr then it is set to the 64 virtual size_t AllocationSize(mirror::Object* obj, size_t* usable_size) = 0;
|
large_object_space.h | 119 size_t AllocationSize(mirror::Object* obj, size_t* usable_size); 121 size_t* usable_size); 147 size_t AllocationSize(mirror::Object* obj, size_t* usable_size) OVERRIDE 150 size_t* usable_size) OVERRIDE;
|
space.h | 200 size_t* usable_size) = 0; 204 size_t* usable_size) 206 return Alloc(self, num_bytes, bytes_allocated, usable_size); 210 virtual size_t AllocationSize(mirror::Object* obj, size_t* usable_size) = 0;
|
zygote_space.cc | 79 size_t* usable_size) { 84 size_t ZygoteSpace::AllocationSize(mirror::Object* obj, size_t* usable_size) {
|
space_test.h | 64 size_t* bytes_allocated, size_t* usable_size) 68 mirror::Object* obj = alloc_space->Alloc(self, bytes, bytes_allocated, usable_size); 76 size_t* bytes_allocated, size_t* usable_size) 80 mirror::Object* obj = alloc_space->AllocWithGrowth(self, bytes, bytes_allocated, usable_size); 348 size_t allocation_size, usable_size; local 351 &usable_size); 355 EXPECT_EQ(usable_size, computed_usable_size); 363 size_t allocation_size, usable_size; local 364 lots_of_objects[i] = AllocWithGrowth(space, self, 1024, &allocation_size, &usable_size); 368 EXPECT_EQ(usable_size, computed_usable_size) [all...] |
dlmalloc_space.cc | 129 size_t* bytes_allocated, size_t* usable_size) { 137 result = AllocWithoutGrowthLocked(self, num_bytes, bytes_allocated, usable_size);
|
rosalloc_space.cc | 147 size_t* bytes_allocated, size_t* usable_size) { 155 result = AllocCommon(self, num_bytes, bytes_allocated, usable_size);
|
/art/runtime/gc/ |
heap-inl.h | 58 size_t usable_size; local 75 usable_size = bytes_allocated; 76 pre_fence_visitor(obj, usable_size); 80 &usable_size); 83 obj = AllocateInternalWithGc(self, allocator, byte_count, &bytes_allocated, &usable_size, 97 DCHECK_GT(usable_size, 0u); 117 pre_fence_visitor(obj, usable_size); 123 CHECK_LE(obj->SizeOf(), usable_size); 185 size_t* usable_size) { 198 *usable_size = alloc_size [all...] |
heap.cc | [all...] |
heap.h | 653 size_t* bytes_allocated, size_t* usable_size, 672 size_t* usable_size) [all...] |
/external/chromium_org/gpu/command_buffer/service/ |
vertex_attrib_manager.cc | 84 uint32 usable_size = buffer_size - offset_; local 85 GLuint num_elements = usable_size / real_stride_ + 86 ((usable_size % real_stride_) >=
|
/art/runtime/mirror/ |
array-inl.h | 87 void operator()(Object* obj, size_t usable_size) const 89 UNUSED(usable_size); 110 void operator()(Object* obj, size_t usable_size) const 115 int32_t length = (usable_size - header_size_) / component_size_;
|
object.cc | 107 void operator()(Object* obj, size_t usable_size) const 109 UNUSED(usable_size);
|
class-inl.h | 716 mirror::Object* obj, size_t usable_size) const { 717 DCHECK_LE(class_size_, usable_size);
|
/external/compiler-rt/lib/asan/ |
asan_allocator2.cc | 650 uptr usable_size = AllocationSize(reinterpret_cast<uptr>(ptr)); 651 if (flags()->check_malloc_usable_size && (usable_size == 0)) { 655 return usable_size;
|