HomeSort by relevance Sort by last modified time
    Searched refs:allocator (Results 301 - 325 of 1428) sorted by null

<<11121314151617181920>>

  /prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/x86_64-w64-mingw32/include/c++/4.8.3/ext/pb_ds/
list_update_policy.hpp 56 template<typename _Alloc = std::allocator<char> >
91 template<std::size_t Max_Count = 5, typename _Alloc = std::allocator<char> >
  /prebuilts/misc/common/swig/include/2.0.11/go/
std_list.i 12 template<class T, class Alloc = allocator<T> >
  /prebuilts/ndk/r16/sources/cxx-stl/llvm-libc++/test/std/containers/container.adaptors/queue/queue.defn/
types.pass.cpp 55 static_assert(( std::uses_allocator<std::queue<int>, std::allocator<int> >::value), "");
56 static_assert((!std::uses_allocator<std::queue<int, C>, std::allocator<int> >::value), "");
  /prebuilts/ndk/r16/sources/cxx-stl/llvm-libc++/test/std/containers/container.adaptors/stack/stack.defn/
types.pass.cpp 56 static_assert(( std::uses_allocator<std::stack<int>, std::allocator<int> >::value), "");
57 static_assert((!std::uses_allocator<std::stack<int, C>, std::allocator<int> >::value), "");
  /system/core/libmemunreachable/
ScopedSignalHandler.h 35 explicit ScopedSignalHandler(Allocator<Fn> allocator) : allocator_(allocator), signal_(-1) {}
71 Allocator<Fn> allocator_;
75 // to be a static map of signals to handlers, but allocated with Allocator.
  /external/tensorflow/tensorflow/core/kernels/
non_max_suppression_op_test.cc 16 #include "tensorflow/core/framework/allocator.h"
54 Tensor expected(allocator(), DT_INT32, TensorShape({3}));
68 Tensor expected(allocator(), DT_INT32, TensorShape({3}));
83 Tensor expected(allocator(), DT_INT32, TensorShape({2}));
98 Tensor expected(allocator(), DT_INT32, TensorShape({3}));
110 Tensor expected(allocator(), DT_INT32, TensorShape({1}));
133 Tensor expected(allocator(), DT_INT32, TensorShape({1}));
174 Tensor expected(allocator(), DT_INT32, TensorShape({0}));
207 Tensor expected(allocator(), DT_INT32, TensorShape({3}));
223 Tensor expected(allocator(), DT_INT32, TensorShape({3}))
    [all...]
gather_op_test.cc 21 #include "tensorflow/core/framework/allocator.h"
62 Tensor expected(allocator(), DT_FLOAT, TensorShape({}));
80 Tensor expected(allocator(), DT_COMPLEX64, TensorShape({}));
97 Tensor expected(allocator(), DT_FLOAT, TensorShape({4, 3}));
113 Tensor expected(allocator(), DT_FLOAT, TensorShape({5, 4}));
129 Tensor expected(allocator(), DT_FLOAT, TensorShape({4, 0}));
144 Tensor expected(allocator(), DT_FLOAT, TensorShape({4, 3}));
159 Tensor expected(allocator(), DT_FLOAT, TensorShape({2, 3}));
sparse_to_dense_op_test.cc 21 #include "tensorflow/core/framework/allocator.h"
66 Tensor expected(allocator(), DT_FLOAT, {5});
85 Tensor expected(allocator(), DT_DOUBLE, {5});
104 Tensor expected(allocator(), DT_FLOAT, {5});
123 Tensor expected(allocator(), DT_FLOAT, {3, 4});
145 Tensor expected(allocator(), DT_FLOAT, {3, 4});
167 Tensor expected(allocator(), DT_FLOAT, {3, 4, 2});
189 Tensor expected(allocator(), DT_FLOAT, {3, 4, 2});
  /frameworks/base/libs/hwui/
LayerBuilder.cpp 237 void LayerBuilder::onDeferOp(LinearAllocator& allocator, const BakedOpState* bakedState) {
241 flushLayerClears(allocator);
253 void LayerBuilder::flushLayerClears(LinearAllocator& allocator) {
256 // put the verts in the frame allocator, since
259 Vertex* const verts = (Vertex*)allocator.create_trivial_array<Vertex>(vertCount);
274 SkPaint* paint = allocator.create<SkPaint>();
276 SimpleRectsOp* op = allocator.create_trivial<SimpleRectsOp>(
279 BakedOpState::directConstruct(allocator, &repaintClip, bounds, *op);
280 deferUnmergeableOp(allocator, bakedState, OpBatchType::Vertices);
284 void LayerBuilder::deferUnmergeableOp(LinearAllocator& allocator, BakedOpState* op
    [all...]
  /external/clang/test/CodeGenCXX/
destructors.cpp 35 struct allocator { struct in namespace:PR7526
36 ~allocator() throw();
39 struct allocator_derived : allocator { };
44 // CHECK1-LABEL: define void @_ZN6PR75269allocatorD2Ev(%"struct.PR7526::allocator"* %this) unnamed_addr
46 allocator::~allocator() throw() { foo(); }
eh.cpp 93 template <class T> struct allocator { struct in namespace:test6
94 ~allocator() throw() { }
98 allocator<int> a;
  /art/compiler/optimizing/
register_allocator.cc 32 RegisterAllocator::RegisterAllocator(ScopedArenaAllocator* allocator,
35 : allocator_(allocator),
39 std::unique_ptr<RegisterAllocator> RegisterAllocator::Create(ScopedArenaAllocator* allocator,
46 new (allocator) RegisterAllocatorLinearScan(allocator, codegen, analysis));
49 new (allocator) RegisterAllocatorGraphColor(allocator, codegen, analysis));
118 ScopedArenaAllocator allocator(codegen.GetGraph()->GetArenaStack());
120 allocator.Adapter(kArenaAllocRegisterAllocatorValidate));
134 ArenaBitVector::Create(&allocator, max_end, false, kArenaAllocRegisterAllocatorValidate))
    [all...]
ssa_phi_elimination.cc 32 // Use local allocator for allocating memory used by this optimization.
33 ScopedArenaAllocator allocator(graph_->GetArenaStack());
36 ScopedArenaVector<HPhi*> worklist(allocator.Adapter(kArenaAllocSsaPhiElimination));
42 ScopedArenaSet<HPhi*> initially_live(allocator.Adapter(kArenaAllocSsaPhiElimination));
126 // Use local allocator for allocating memory used by this optimization.
127 ScopedArenaAllocator allocator(graph_->GetArenaStack());
130 ScopedArenaVector<HPhi*> worklist(allocator.Adapter(kArenaAllocSsaPhiElimination));
141 ArenaBitVector visited_phis_in_cycle(&allocator,
146 ScopedArenaVector<HPhi*> cycle_worklist(allocator.Adapter(kArenaAllocSsaPhiElimination));
instruction_simplifier_shared.cc 78 ArenaAllocator* allocator = mul->GetBlock()->GetGraph()->GetAllocator(); local
79 HMultiplyAccumulate* mulacc = new (allocator) HMultiplyAccumulate(
108 ArenaAllocator* allocator = mul->GetBlock()->GetGraph()->GetAllocator(); local
140 new (allocator) HMultiplyAccumulate(type,
153 new (allocator) HMultiplyAccumulate(type,
258 ArenaAllocator* allocator = graph->GetAllocator(); local
261 HIntermediateAddress* address = new (allocator) HIntermediateAddress(array, offset, kNoDexPc);
292 ArenaAllocator* allocator = graph->GetAllocator(); local
331 new (allocator) HIntermediateAddressIndex(index, offset, shift, kNoDexPc);
linear_order.cc 97 ScopedArenaAllocator allocator(graph->GetArenaStack());
99 allocator.Adapter(kArenaAllocLinearOrder));
111 ScopedArenaVector<HBasicBlock*> worklist(allocator.Adapter(kArenaAllocLinearOrder));
  /external/deqp/external/vulkancts/framework/vulkan/
vkRef.hpp 77 Deleter (const DeviceInterface& deviceIface, VkDevice device, const VkAllocationCallbacks* allocator)
80 , m_allocator (allocator)
100 Deleter (const PlatformInterface& platformIface, VkInstance instance, const VkAllocationCallbacks* allocator)
102 , m_allocator (allocator)
120 Deleter (const InstanceInterface& instanceIface, VkDevice device, const VkAllocationCallbacks* allocator)
122 , m_allocator (allocator)
140 Deleter (const InstanceInterface& instanceIface, VkInstance instance, const VkAllocationCallbacks* allocator)
143 , m_allocator (allocator)
163 Deleter (const InstanceInterface& instanceIface, VkInstance instance, const VkAllocationCallbacks* allocator)
166 , m_allocator (allocator)
    [all...]
  /external/tensorflow/tensorflow/compiler/xla/client/
local_client.cc 125 if (!run_options.allocator()) {
126 return InvalidArgument("an allocator must be provided to ExecuteLocally");
129 if (run_options.allocator()->platform() != backend.platform()) {
131 "allocator platform (%s) does not match service platform (%s)",
132 run_options.allocator()->platform()->Name().c_str(),
155 if (run_options.allocator() == nullptr) {
177 run_options.allocator());
192 return ScopedShapedBuffer::MakeScoped(result.get(), run_options->allocator());
270 DeviceMemoryAllocator* allocator) {
271 if (allocator == nullptr)
    [all...]
  /packages/apps/Test/connectivity/sl4n/rapidjson/include/rapidjson/
pointer.h 41 //! Represents a JSON Pointer. Use Pointer for UTF8 encoding and default allocator.
65 \tparam Allocator The allocator type for allocating memory for internal representation.
68 However, Allocator of GenericPointer is independent of Allocator of Value.
70 template <typename ValueType, typename Allocator = CrtAllocator>
104 \param allocator User supplied allocator for this pointer. If no allocator is provided, it creates a self-owned one.
106 explicit GenericPointer(const Ch* source, Allocator* allocator = 0) : allocator_(allocator), ownAllocator_(), nameBuffer_(), tokens_(), tokenCount_(), (…)
    [all...]
  /external/skia/src/gpu/
GrAllocator.h 21 * Create an allocator
131 * Iterates through the allocator. This is faster than using operator[] when walking linearly
132 * through the allocator.
139 Iter(const GrAllocator* allocator)
140 : fAllocator(allocator)
142 , fIndexInBlock(allocator->fItemsPerBlock - 1)
235 * Create an allocator
314 * Iterates through the allocator. This is faster than using operator[] when walking linearly
315 * through the allocator.
322 Iter(const GrTAllocator* allocator) : fImpl(&allocator->fAllocator) {
    [all...]
  /external/skqp/src/gpu/
GrAllocator.h 21 * Create an allocator
131 * Iterates through the allocator. This is faster than using operator[] when walking linearly
132 * through the allocator.
139 Iter(const GrAllocator* allocator)
140 : fAllocator(allocator)
142 , fIndexInBlock(allocator->fItemsPerBlock - 1)
235 * Create an allocator
314 * Iterates through the allocator. This is faster than using operator[] when walking linearly
315 * through the allocator.
322 Iter(const GrTAllocator* allocator) : fImpl(&allocator->fAllocator) {
    [all...]
  /system/bt/osi/src/
list.cc 3 #include "osi/include/allocator.h"
17 const allocator_t* allocator; member in struct:list_t
24 // Behaves the same as |list_new|, except you get to specify the allocator.
31 list->allocator = zeroed_allocator;
43 list->allocator->free(list);
94 list_node_t* node = (list_node_t*)list->allocator->alloc(sizeof(list_node_t));
109 list_node_t* node = (list_node_t*)list->allocator->alloc(sizeof(list_node_t));
123 list_node_t* node = (list_node_t*)list->allocator->alloc(sizeof(list_node_t));
211 list->allocator->free(node);
  /art/compiler/utils/
jni_macro_assembler_test.h 83 virtual Ass* CreateAssembler(ArenaAllocator* allocator) {
84 return new (allocator) Ass(allocator);
  /external/clang/test/Index/
index-templates.cpp 6 template<typename T> class allocator;
8 template<typename T, typename Alloc = allocator<T> >
93 typename Allocator = allocator<Pair<Key, Value> > >
126 // CHECK-LOAD: index-templates.cpp:6:28: ClassTemplate=allocator:6:28 Extent=[6:1 - 6:37]
131 // CHECK-LOAD: index-templates.cpp:8:39: TemplateRef=allocator:6:28 Extent=[8:39 - 8:48]
206 // CHECK-USRS: index-templates.cpp c:@ST>1#T@allocator Extent=[6:1 - 6:37]
215 // CHECK-USRS: index-templates.cpp c:@S@vector>#$@S@Z1#$@S@allocator>#S0_ Extent=[18:1 - 18:26]
217 // CHECK-USRS: index-templates.cpp c:@S@vector>#$@S@Z2#$@S@allocator>#S0_ Extent=[22:1 - 25:2]
218 // CHECK-USRS: index-templates.cpp c:@S@vector>#$@S@Z2#$@S@allocator>#S0_@F@clear# Extent=[24:3 - 24:15
    [all...]
  /external/clang/test/SemaTemplate/
issue150.cpp 75 template<typename > struct allocator ;
89 template<class> class = allocator> class IntervalSet>
98 template<class> class = allocator> class IntervalSet>
  /external/libcxx/test/std/utilities/tuple/tuple.tuple/tuple.cnstr/
alloc_const_Types.pass.cpp 36 return {std::allocator_arg, std::allocator<void>{}, i};
41 return {std::allocator_arg, std::allocator<void>{}, i};
48 std::tuple<int*> t = {std::allocator_arg, std::allocator<void>{}, 0};

Completed in 932 milliseconds

<<11121314151617181920>>