| /external/tensorflow/tensorflow/core/kernels/ |
| fake_quant_ops_test.cc | 72 Tensor expected(allocator(), DT_FLOAT, shape); 101 Tensor expected(allocator(), DT_FLOAT, TensorShape({2, 3})); 131 Tensor expected(allocator(), DT_FLOAT, shape); 380 Tensor expected(allocator(), DT_FLOAT, TensorShape({2, 3})); 410 Tensor expected(allocator(), DT_FLOAT, TensorShape({2, 3})); 441 Tensor expected(allocator(), DT_FLOAT, TensorShape({2, 3})); 472 Tensor expected(allocator(), DT_FLOAT, TensorShape({2, 3})); 668 Tensor expected_bprop_wrt_input(allocator(), DT_FLOAT, TensorShape({2, 3})); 676 Tensor expected_bprop_wrt_min(allocator(), DT_FLOAT, TensorShape({})); 681 Tensor expected_bprop_wrt_max(allocator(), DT_FLOAT, TensorShape({})) [all...] |
| scatter_op_test.cc | 20 #include "tensorflow/core/framework/allocator.h" 58 Tensor expected(allocator(), DT_STRING, TensorShape({1})); 71 Tensor expected(allocator(), DT_BOOL, TensorShape({1})); 89 Tensor expected(allocator(), DT_FLOAT, TensorShape({5, 3})); 108 Tensor expected(allocator(), DT_FLOAT, TensorShape({5, 3})); 125 Tensor expected(allocator(), DT_FLOAT, TensorShape({5})); 141 Tensor expected(allocator(), DT_FLOAT, TensorShape({5})); 157 Tensor expected(allocator(), DT_FLOAT, TensorShape({8}));
|
| /art/compiler/optimizing/ |
| optimizing_unit_test.h | 59 ScopedArenaAllocator* allocator, 63 LiveInterval::MakeInterval(allocator, DataType::Type::kInt32, defined_by); 122 ArenaAllocator* const allocator = pool_and_allocator_->GetAllocator(); local 126 const uint8_t* dex_data = reinterpret_cast<uint8_t*>(allocator->Alloc(kDexDataSize)); 138 return new (allocator) HGraph( 139 allocator,
|
| ssa_liveness_analysis.h | 37 BlockInfo(ScopedArenaAllocator* allocator, const HBasicBlock& block, size_t number_of_ssa_values) 39 live_in_(allocator, number_of_ssa_values, false, kArenaAllocSsaLiveness), 40 live_out_(allocator, number_of_ssa_values, false, kArenaAllocSsaLiveness), 41 kill_(allocator, number_of_ssa_values, false, kArenaAllocSsaLiveness) { 87 LiveRange* Dup(ScopedArenaAllocator* allocator) const { 88 return new (allocator) LiveRange( 89 start_, end_, next_ == nullptr ? nullptr : next_->Dup(allocator)); 140 UsePosition* Clone(ScopedArenaAllocator* allocator) const { 141 return new (allocator) UsePosition(user_, input_index_, position_); 185 EnvUsePosition* Clone(ScopedArenaAllocator* allocator) const [all...] |
| gvn.cc | 40 explicit ValueSet(ScopedArenaAllocator* allocator) 41 : allocator_(allocator), 43 buckets_(allocator->AllocArray<Node*>(num_buckets_, kArenaAllocGvn)), 44 buckets_owned_(allocator, num_buckets_, false, kArenaAllocGvn), 54 ValueSet(ScopedArenaAllocator* allocator, const ValueSet& other) 55 : allocator_(allocator), 57 buckets_(allocator->AllocArray<Node*>(num_buckets_, kArenaAllocGvn)), 58 buckets_owned_(allocator, num_buckets_, false, kArenaAllocGvn), 199 Node* Dup(ScopedArenaAllocator* allocator, Node* new_next = nullptr) { 200 return new (allocator) Node(instruction_, hash_code_, new_next) [all...] |
| codegen_test_utils.h | 265 static void Run(const InternalCodeAllocator& allocator, 272 CommonCompilerTest::MakeExecutable(allocator.GetMemory(), allocator.GetSize()); 273 fptr f = reinterpret_cast<fptr>(allocator.GetMemory()); 308 InternalCodeAllocator allocator; local 309 codegen->Compile(&allocator); 310 Run(allocator, *codegen, has_result, expected);
|
| /external/clang/test/CodeGenCXX/ |
| debug-info-use-after-free.cpp | 99 template < typename _Tp > struct allocator:new_allocator < _Tp > { struct in inherits:new_allocator 113 allocator < _Tp > >struct vector:_Vector_base < _Tp, _Alloc > { }; 139 template < typename _Tp > class allocator:__gnu_cxx::new_allocator < _Tp > { class in namespace:std 143 template < typename _Tp, typename _Alloc = std::allocator < _Tp > >class vector:_Vector_base < _Tp, 200 std::allocator; 205 allocator < _Tp > >class hash_map {
|
| /external/compiler-rt/lib/asan/ |
| asan_allocator.cc | 12 // Implementation of ASan's memory allocator, 2-nd version. 13 // This variant uses the allocator from sanitizer_common, i.e. the one shared 52 // The memory chunk allocated from the underlying allocator looks like this: 99 // Every chunk of memory allocated by this allocator can be in one of 3 states: 148 // Clear the magic value, as allocator internals may overwrite the 224 struct Allocator { 230 AsanAllocator allocator; member in struct:__asan::Allocator 242 explicit Allocator(LinkerInitialized) 265 allocator.Init(options.may_return_null); 270 allocator.SetMayReturnNull(options.may_return_null) [all...] |
| /external/dng_sdk/source/ |
| dng_opcodes.cpp | 308 dng_memory_allocator *allocator, 314 allocator, 323 *allocator); 448 dng_memory_allocator *allocator, 458 fBuffer [threadIndex] . Reset (allocator->Allocate (bufferSize)); 468 *allocator);
|
| /external/libcxx/test/std/utilities/allocator.adaptor/allocator.adaptor.types/ |
| allocator_pointers.pass.cpp | 115 test_pointer<std::scoped_allocator_adaptor<std::allocator<char>>> (); 116 test_pointer<std::scoped_allocator_adaptor<std::allocator<int>>> (); 117 test_pointer<std::scoped_allocator_adaptor<std::allocator<Foo>>> (); 119 test_void_pointer<std::scoped_allocator_adaptor<std::allocator<char>>> (); 120 test_void_pointer<std::scoped_allocator_adaptor<std::allocator<int>>> (); 121 test_void_pointer<std::scoped_allocator_adaptor<std::allocator<Foo>>> ();
|
| /external/libcxx/test/std/utilities/memory/default.allocator/ |
| allocator_pointers.pass.cpp | 114 test_pointer<std::allocator<char>> (); 115 test_pointer<std::allocator<int>> (); 116 test_pointer<std::allocator<Foo>> (); 118 test_void_pointer<std::allocator<char>> (); 119 test_void_pointer<std::allocator<int>> (); 120 test_void_pointer<std::allocator<Foo>> ();
|
| /external/skia/src/core/ |
| SkSpriteBlitter_RGB565.cpp | 67 SkArenaAlloc* allocator) { 68 SkASSERT(allocator != nullptr); 86 return allocator->make<Sprite_D16_S32>(source, paint.getBlendMode()); 155 SkArenaAlloc* allocator) { 156 SkASSERT(allocator != nullptr); 174 return allocator->make<Sprite_D8_S32>(source, paint.getBlendMode());
|
| /external/skqp/src/core/ |
| SkSpriteBlitter_RGB565.cpp | 67 SkArenaAlloc* allocator) { 68 SkASSERT(allocator != nullptr); 86 return allocator->make<Sprite_D16_S32>(source, paint.getBlendMode()); 155 SkArenaAlloc* allocator) { 156 SkASSERT(allocator != nullptr); 174 return allocator->make<Sprite_D8_S32>(source, paint.getBlendMode());
|
| /external/tensorflow/tensorflow/compiler/xla/service/ |
| transfer_manager.cc | 184 const Shape& on_host_shape, DeviceMemoryAllocator* allocator, 196 on_host_shape, on_device_shape, allocator->platform(), device_ordinal)); 205 allocator->Allocate(shaped_buffer->device_ordinal(), 214 DeviceMemoryAllocator* allocator, 218 AllocateShapedBuffer(on_host_shape, allocator, device_ordinal)); 219 return ScopedShapedBuffer::MakeScoped(unscoped_buffer.get(), allocator);
|
| /prebuilts/ndk/r16/sources/cxx-stl/llvm-libc++/test/std/utilities/allocator.adaptor/allocator.adaptor.types/ |
| allocator_pointers.pass.cpp | 115 test_pointer<std::scoped_allocator_adaptor<std::allocator<char>>> (); 116 test_pointer<std::scoped_allocator_adaptor<std::allocator<int>>> (); 117 test_pointer<std::scoped_allocator_adaptor<std::allocator<Foo>>> (); 119 test_void_pointer<std::scoped_allocator_adaptor<std::allocator<char>>> (); 120 test_void_pointer<std::scoped_allocator_adaptor<std::allocator<int>>> (); 121 test_void_pointer<std::scoped_allocator_adaptor<std::allocator<Foo>>> ();
|
| /prebuilts/ndk/r16/sources/cxx-stl/llvm-libc++/test/std/utilities/memory/default.allocator/ |
| allocator_pointers.pass.cpp | 114 test_pointer<std::allocator<char>> (); 115 test_pointer<std::allocator<int>> (); 116 test_pointer<std::allocator<Foo>> (); 118 test_void_pointer<std::allocator<char>> (); 119 test_void_pointer<std::allocator<int>> (); 120 test_void_pointer<std::allocator<Foo>> ();
|
| /external/libchrome/base/debug/ |
| activity_tracker_unittest.cc | 286 EXPECT_EQ(std::string("foo"), global->allocator()->Name()); 295 EXPECT_EQ(std::string("bar"), global->allocator()->Name()); 421 // Get the memory-allocator references to that data. 423 global->allocator()->GetAsReference( 428 global->allocator()->GetAsReference( 433 global->allocator()->GetAsReference( 439 const size_t tracker_size = global->allocator()->GetAllocSize(tracker_ref); 471 global->allocator()->GetType(proc_data_ref)); 473 global->allocator()->GetType(tracker_ref)); 475 global->allocator()->GetType(user_data_ref)) [all...] |
| /external/webrtc/webrtc/p2p/client/ |
| basicportallocator.cc | 143 BasicPortAllocator *allocator, 149 ice_ufrag, ice_pwd, allocator->flags()), 150 allocator_(allocator), network_thread_(NULL), 151 socket_factory_(allocator->socket_factory()), 469 LOG_J(LS_INFO, port) << "Added port to allocator"; 661 LOG_J(LS_INFO, port) << "Removed port from allocator (" 730 rtc::SocketAddress(ip_, 0), session_->allocator()->min_port(), 731 session_->allocator()->max_port())); 845 session_->allocator()->step_delay(), 886 session_->allocator()->origin(), emit_local_candidate_for_anyaddress) [all...] |
| /frameworks/native/vulkan/libvulkan/ |
| driver.cpp | 95 const VkAllocationCallbacks& allocator); 98 const VkAllocationCallbacks& allocator); 298 const VkAllocationCallbacks& allocator) 300 allocator_(allocator), 310 const VkAllocationCallbacks& allocator) 312 allocator_(allocator), 658 InstanceData* AllocateInstanceData(const VkAllocationCallbacks& allocator) { 659 void* data_mem = allocator.pfnAllocation( 660 allocator.pUserData, sizeof(InstanceData), alignof(InstanceData), 665 return new (data_mem) InstanceData(allocator); [all...] |
| /external/deqp/external/vulkancts/modules/vulkan/draw/ |
| vktDrawImageObjectUtil.cpp | 165 vk::Allocator& allocator, 178 read(queue, allocator, layout, offset, width, height, 1, mipLevel, arrayElement, aspect, vk::VK_IMAGE_TYPE_2D, 183 readUsingBuffer(queue, allocator, layout, offset, width, height, 1, mipLevel, arrayElement, aspect, m_pixelAccessData.data()); 189 vk::Allocator& allocator, 203 read(queue, allocator, layout, offset, width, height, depth, mipLevel, arrayElement, aspect, vk::VK_IMAGE_TYPE_3D, 208 readUsingBuffer(queue, allocator, layout, offset, width, height, depth, mipLevel, arrayElement, aspect, m_pixelAccessData.data()); 214 vk::Allocator& allocator, [all...] |
| /external/deqp/modules/glshared/ |
| glsMemoryStressCase.cpp | 861 MemObjectAllocator allocator(log, m_renderCtx, m_objectTypes, m_config, deStringHash(getName())); 863 if (!allocator.allocUntilFailure()) 866 allocator.clearObjects(); 868 log << TestLog::Message << "Timeout. Couldn't exhaust memory in timelimit. Allocated " << allocator.getObjectCount() << " objects." << TestLog::EndMessage; 878 allocator.clearObjects(); 880 m_allocated.push_back(allocator.getObjectCount()); 882 if (m_iteration != 0 && allocator.getObjectCount() == 0) 885 log << TestLog::Message << "Got error when allocation object count: " << allocator.getObjectCount() << " bytes: " << allocator.getBytes() << TestLog::EndMessage; 887 if ((allocator.getGLError() == 0) && (allocator.getResult() == MemObjectAllocator::RESULT_GOT_BAD_ALLOC) [all...] |
| /external/tensorflow/tensorflow/contrib/verbs/ |
| rdma_mgr.cc | 270 // cpu_allocator() returns visitable allocator 283 Allocator* allocators[] = { 293 std::set<Allocator*> instrumented_; 296 for (Allocator* allocator : allocators) { 299 &RdmaMemoryMgr::Singleton(), _1, _2, allocator->Name()); 303 auto* visitable_allocator = dynamic_cast<VisitableAllocator*>(allocator); 305 << "is not visitable for instrumentation" << allocator->Name(); 306 // Make sure we don't instrument the same allocator twice 307 if (instrumented_.find(allocator) == std::end(instrumented_)) [all...] |
| /external/libchrome/base/metrics/ |
| field_trial.cc | 51 // Constants for the field trial allocator. 211 HANDLE CreateReadOnlyHandle(FieldTrialList::FieldTrialAllocator* allocator) { 212 HANDLE src = allocator->shared_memory()->handle().GetHandle(); 223 int CreateReadOnlyHandle(FieldTrialList::FieldTrialAllocator* allocator) { 225 allocator->shared_memory()->ShareReadOnlyToProcess(GetCurrentProcessHandle(), 726 FieldTrialAllocator* allocator = global_->field_trial_allocator_.get(); local 727 FieldTrialAllocator::Iterator mem_iter(allocator); 794 // occur in some browser tests which don't initialize the allocator. 900 // anyways). But some browser tests don't create the allocator, so we need 1314 FieldTrialAllocator* allocator = global_->field_trial_allocator_.get(); local [all...] |
| /art/openjdkjvmti/ |
| jvmti_weak_table-inl.h | 248 template <typename Storage, class Allocator> 250 using allocator_type = Allocator; 253 : allocator(alloc), 254 data(reserve > 0 ? allocator.allocate(reserve) : nullptr), 261 allocator.deallocate(data, capacity); 280 Storage* tmp = allocator.allocate(new_capacity); 287 allocator.deallocate(old, capacity); 299 Allocator allocator; member in struct:openjdkjvmti::JvmtiWeakTable::ReleasableContainer 344 JvmtiAllocator<void> allocator(jvmti_env) [all...] |
| /art/runtime/gc/ |
| heap-inl.h | 48 AllocatorType allocator, 62 // Need to check that we aren't the large object allocator since the large object allocation code 82 if (IsTLABAllocator(allocator)) { 86 if (IsTLABAllocator(allocator) && byte_count <= self->TlabSize()) { 98 !kInstrumented && allocator == kAllocatorTypeRosAlloc && 112 obj = TryToAllocate<kInstrumented, false>(self, allocator, byte_count, &bytes_allocated, 116 // or changes the allocator in a suspend point here, we need to retry the allocation. 118 allocator, 126 // allocator or instrumentation changed. 128 // AllocObject will pick up the new allocator type, and instrumented as true is the saf [all...] |