| /art/runtime/generated/ |
| asm_support_gen.h | 132 DEFINE_CHECK_EQ(static_cast<int32_t>(ROSALLOC_MAX_THREAD_LOCAL_BRACKET_SIZE), (static_cast<int32_t>((art::gc::allocator::RosAlloc::kMaxThreadLocalBracketSize)))) 134 DEFINE_CHECK_EQ(static_cast<int32_t>(ROSALLOC_BRACKET_QUANTUM_SIZE_SHIFT), (static_cast<int32_t>((art::gc::allocator::RosAlloc::kThreadLocalBracketQuantumSizeShift)))) 136 DEFINE_CHECK_EQ(static_cast<int32_t>(ROSALLOC_BRACKET_QUANTUM_SIZE_MASK), (static_cast<int32_t>((static_cast<int32_t>(art::gc::allocator::RosAlloc::kThreadLocalBracketQuantumSize - 1))))) 138 DEFINE_CHECK_EQ(static_cast<uint32_t>(ROSALLOC_BRACKET_QUANTUM_SIZE_MASK_TOGGLED32), (static_cast<uint32_t>((~static_cast<uint32_t>(art::gc::allocator::RosAlloc::kThreadLocalBracketQuantumSize - 1))))) 140 DEFINE_CHECK_EQ(static_cast<uint64_t>(ROSALLOC_BRACKET_QUANTUM_SIZE_MASK_TOGGLED64), (static_cast<uint64_t>((~static_cast<uint64_t>(art::gc::allocator::RosAlloc::kThreadLocalBracketQuantumSize - 1))))) 142 DEFINE_CHECK_EQ(static_cast<int32_t>(ROSALLOC_RUN_FREE_LIST_OFFSET), (static_cast<int32_t>((art::gc::allocator::RosAlloc::RunFreeListOffset())))) 144 DEFINE_CHECK_EQ(static_cast<int32_t>(ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET), (static_cast<int32_t>((art::gc::allocator::RosAlloc::RunFreeListHeadOffset())))) 146 DEFINE_CHECK_EQ(static_cast<int32_t>(ROSALLOC_RUN_FREE_LIST_SIZE_OFFSET), (static_cast<int32_t>((art::gc::allocator::RosAlloc::RunFreeListSizeOffset())))) 148 DEFINE_CHECK_EQ(static_cast<int32_t>(ROSALLOC_SLOT_NEXT_OFFSET), (static_cast<int32_t>((art::gc::allocator::RosAlloc::RunSlotNextOffset()))))
|
| /frameworks/native/vulkan/libvulkan/ |
| swapchain.cpp | 487 const VkAllocationCallbacks* allocator, 489 if (!allocator) 490 allocator = &GetData(instance).allocator; 491 void* mem = allocator->pfnAllocation(allocator->pUserData, sizeof(Surface), 506 allocator->pfnFree(allocator->pUserData, surface); 519 allocator->pfnFree(allocator->pUserData, surface) 1505 const VkAllocationCallbacks* allocator = &GetData(device).allocator; local [all...] |
| /art/runtime/gc/space/ |
| rosalloc_space.h | 20 #include "gc/allocator/rosalloc.h" 33 // An alloc space implemented using a runs-of-slots memory allocator. Not final as may be 102 allocator::RosAlloc* GetRosAlloc() const { 114 MallocSpace* CreateInstance(MemMap* mem_map, const std::string& name, void* allocator, 151 allocator::RosAlloc* rosalloc, uint8_t* begin, uint8_t* end, uint8_t* limit, 165 static allocator::RosAlloc* CreateRosAlloc(void* base, size_t morecore_start, size_t initial_size, 178 allocator::RosAlloc* rosalloc_;
|
| /external/libxcam/wrapper/gstreamer/ |
| gstxcambufferpool.cpp | 111 if (pool->allocator) 112 gst_object_unref (pool->allocator); 147 pool->allocator = gst_dmabuf_allocator_new (); 148 if (pool->allocator == NULL) { 149 GST_WARNING ("xcam buffer pool get allocator failed"); 190 pool->allocator, dup (video_buf->get_fd ()), video_buf->get_size ());
|
| /external/llvm/unittests/ADT/ |
| IntervalMapTest.cpp | 21 UUMap::Allocator allocator; local 22 UUMap map(allocator); 55 UUMap::Allocator allocator; local 56 UUMap map(allocator); 142 UUMap::Allocator allocator; local 143 UUMap map(allocator); 212 UUMap::Allocator allocator local 326 UUMap::Allocator allocator; local 469 UUMap::Allocator allocator; local 545 UUMap::Allocator allocator; local 569 UUMap::Allocator allocator; local 613 UUMap::Allocator allocator; local [all...] |
| /external/swiftshader/third_party/LLVM/unittests/ADT/ |
| IntervalMapTest.cpp | 21 UUMap::Allocator allocator; local 22 UUMap map(allocator); 55 UUMap::Allocator allocator; local 56 UUMap map(allocator); 142 UUMap::Allocator allocator; local 143 UUMap map(allocator); 212 UUMap::Allocator allocator local 326 UUMap::Allocator allocator; local 469 UUMap::Allocator allocator; local 545 UUMap::Allocator allocator; local 569 UUMap::Allocator allocator; local 613 UUMap::Allocator allocator; local [all...] |
| /external/tensorflow/tensorflow/core/kernels/ |
| scatter_nd_op_test.cc | 20 #include "tensorflow/core/framework/allocator.h" 59 // Tensor expected(allocator(), DT_STRING, TensorShape({1})); 72 // Tensor expected(allocator(), DT_BOOL, TensorShape({1})); 90 Tensor expected(allocator(), DT_FLOAT, TensorShape({5, 3})); 109 Tensor expected(allocator(), DT_FLOAT, TensorShape({5, 3})); 139 Tensor expected(allocator(), DT_FLOAT, TensorShape({5})); 155 Tensor expected(allocator(), DT_FLOAT, TensorShape({5})); 171 Tensor expected(allocator(), DT_FLOAT, TensorShape({8}));
|
| /external/webrtc/talk/app/webrtc/ |
| peerconnectionfactory.cc | 252 rtc::scoped_ptr<cricket::PortAllocator> allocator, 265 if (!allocator) { 266 allocator.reset(new cricket::BasicPortAllocator( 269 allocator->SetNetworkIgnoreMask(options_.network_ignore_mask); 273 if (!pc->Initialize(configuration, constraints, std::move(allocator),
|
| /frameworks/native/libs/vr/libpdx/private/pdx/rpc/ |
| buffer_wrapper.h | 99 template <typename T, typename Allocator> 100 class BufferWrapper<std::vector<T, Allocator>> { 102 using BufferType = typename std::vector<T, Allocator>; 114 BufferWrapper(const BufferType& buffer, const Allocator& allocator) 115 : buffer_(buffer, allocator) {} 117 BufferWrapper(BufferType&& buffer, const Allocator& allocator) 118 : buffer_(std::move(buffer), allocator) {} 166 template <typename T, typename Allocator = std::allocator<T> [all...] |
| /system/core/libmemunreachable/tests/ |
| HeapWalker_test.cpp | 24 #include "Allocator.h" 90 allocator::vector<Range> leaked(heap_); 118 allocator::vector<Range> leaked(heap_); 146 allocator::vector<Range> leaked(heap_); 171 allocator::vector<Range> leaked(heap_); 195 allocator::vector<Range> leaked(heap_);
|
| /art/libartbase/base/ |
| bit_vector.h | 28 class Allocator; 118 Allocator* allocator); 121 Allocator* allocator, 127 Allocator* allocator); 262 Allocator* GetAllocator() const; 289 Allocator* const allocator_; // Allocator if expandable [all...] |
| /external/dng_sdk/source/ |
| dng_resample.h | 91 dng_memory_allocator &allocator); 127 dng_memory_allocator &allocator); 211 dng_memory_allocator &allocator);
|
| dng_simple_image.cpp | 28 dng_memory_allocator &allocator) 35 , fAllocator (allocator) 42 fMemory.Reset (allocator.Allocate (bytes));
|
| dng_1d_table.cpp | 108 void dng_1d_table::Initialize (dng_memory_allocator &allocator, 113 fBuffer.Reset (allocator.Allocate ((kTableSize + 2) * sizeof (real32)));
|
| dng_xmp_sdk.h | 189 dng_memory_block * Serialize (dng_memory_allocator &allocator, 196 void PackageForJPEG (dng_memory_allocator &allocator,
|
| /external/swiftshader/third_party/LLVM/include/llvm/Support/ |
| Allocator.h | 1 //===--- Allocator.h - Simple memory allocation abstraction -----*- C++ -*-===// 52 /// the bump allocator. 60 /// allocation strategy for the bump allocator. In particular, this is used 62 /// interface uses MemSlab's instead of void *'s so that the allocator 71 /// MallocSlabAllocator - The default slab allocator for the bump allocator 75 /// Allocator - The underlying allocator that we forward to. 77 MallocAllocator Allocator; 80 MallocSlabAllocator() : Allocator() { } [all...] |
| /external/vixl/src/ |
| code-buffer-vixl.cc | 62 #error Unknown code buffer allocator. 65 // Aarch64 instructions must be word aligned, we assert the default allocator 91 #error Unknown code buffer allocator. 183 #error Unknown code buffer allocator.
|
| /system/core/libmemunreachable/ |
| HeapWalker.cpp | 25 #include "Allocator.h" 74 allocator::vector<Range> to_do(1, root, allocator_); 92 void HeapWalker::Root(const allocator::vector<uintptr_t>& vals) { 119 bool HeapWalker::Leaked(allocator::vector<Range>& leaked, size_t limit, size_t* num_leaks_out,
|
| /external/webrtc/webrtc/p2p/client/ |
| portallocator_unittest.cc | 277 allocator().set_allow_tcp_listen(false); 323 cricket::BasicPortAllocator& allocator() { function in class:PortAllocatorTest 370 allocator().set_step_delay(cricket::kMinimumStepDelay); 391 // Tests that we can init the port allocator and create a session. 393 EXPECT_EQ(&network_manager_, allocator().network_manager()); 394 EXPECT_EQ(kStunAddr, *allocator().stun_servers().begin()); 395 ASSERT_EQ(1u, allocator().turn_servers().size()); 396 EXPECT_EQ(cricket::RELAY_GTURN, allocator().turn_servers()[0].type); 398 EXPECT_TRUE(allocator().turn_servers()[0].credentials.username.empty()); 399 EXPECT_TRUE(allocator().turn_servers()[0].credentials.password.empty()) [all...] |
| /art/compiler/optimizing/ |
| code_sinking.cc | 256 // Local allocator to discard data structures created below at the end of this optimization. 257 ScopedArenaAllocator allocator(graph_->GetArenaStack()); 260 ScopedArenaVector<HInstruction*> worklist(allocator.Adapter(kArenaAllocMisc)); 261 ArenaBitVector processed_instructions(&allocator, number_of_instructions, /* expandable */ false); 263 ArenaBitVector post_dominated(&allocator, graph_->GetBlocks().size(), /* expandable */ false); 266 &allocator, number_of_instructions, /* expandable */ false); 268 ScopedArenaVector<HInstruction*> move_in_order(allocator.Adapter(kArenaAllocMisc));
|
| /bionic/linker/ |
| linker_allocator.cpp | 45 // LinkerMemeoryAllocator is general purpose allocator 50 // If size is >= 1k allocator proxies malloc call directly to mmap 51 // If size < 1k allocator uses SmallObjectAllocator for the size 56 // For a pointer allocated using proxy-to-mmap allocator unmaps 309 LinkerSmallObjectAllocator* allocator = get_small_object_allocator(info->type); local 310 if (allocator != info->allocator_addr) { 314 old_size = allocator->get_block_size(); 337 LinkerSmallObjectAllocator* allocator = get_small_object_allocator(info->type); local 338 if (allocator != info->allocator_addr) { 339 async_safe_fatal("invalid pointer %p (invalid allocator address for the page)", ptr) [all...] |
| /external/tensorflow/tensorflow/core/grappler/clusters/ |
| single_machine.cc | 198 // No need to enable GPU allocator stats since its stats are always collected. 218 auto* allocator = device->GetAllocator(AllocatorAttributes()); local 219 if (!allocator->TracksAllocationSizes()) { 223 allocator->GetStats(&stats); 406 auto* allocator = device->GetAllocator(AllocatorAttributes()); local 407 if (!allocator->TracksAllocationSizes()) { 411 allocator->ClearStats();
|
| /art/compiler/utils/arm64/ |
| assembler_arm64.h | 66 explicit Arm64Assembler(ArenaAllocator* allocator) : Assembler(allocator) {}
|
| /external/libchrome/base/ |
| feature_list.h | 98 // Initializes feature overrides through the field trial allocator, which 101 void InitializeFromSharedMemory(PersistentMemoryAllocator* allocator); 135 // Loops through feature overrides and serializes them all into |allocator|. 136 void AddFeaturesToAllocator(PersistentMemoryAllocator* allocator);
|
| /external/libchrome/base/metrics/ |
| sparse_histogram.h | 34 // Create a histogram using data in persistent storage. The allocator must 37 PersistentHistogramAllocator* allocator, 68 SparseHistogram(PersistentHistogramAllocator* allocator,
|