HomeSort by relevance Sort by last modified time
    Searched refs:kAlignment (Results 1 - 25 of 40) sorted by null

1 2

  /external/libmojo/mojo/public/cpp/bindings/lib/
bindings_internal.cc 12 const size_t kAlignment = 8;
16 return t + (kAlignment - (t % kAlignment)) % kAlignment;
30 return !(reinterpret_cast<uintptr_t>(ptr) % kAlignment);
  /art/runtime/gc/accounting/
space_bitmap.cc 34 template<size_t kAlignment>
35 size_t SpaceBitmap<kAlignment>::ComputeBitmapSize(uint64_t capacity) {
36 const uint64_t kBytesCoveredPerWord = kAlignment * kBitsPerIntPtrT;
40 template<size_t kAlignment>
41 size_t SpaceBitmap<kAlignment>::ComputeHeapSize(uint64_t bitmap_bytes) {
42 return bitmap_bytes * kBitsPerByte * kAlignment;
45 template<size_t kAlignment>
46 SpaceBitmap<kAlignment>* SpaceBitmap<kAlignment>::CreateFromMemMap(
54 template<size_t kAlignment>
    [all...]
bitmap.cc 80 template<size_t kAlignment>
81 MemoryRangeBitmap<kAlignment>* MemoryRangeBitmap<kAlignment>::Create(
83 CHECK_ALIGNED(cover_begin, kAlignment);
84 CHECK_ALIGNED(cover_end, kAlignment);
85 const size_t num_bits = (cover_end - cover_begin) / kAlignment;
90 template<size_t kAlignment>
91 MemoryRangeBitmap<kAlignment>* MemoryRangeBitmap<kAlignment>::CreateFromMemMap(
space_bitmap-inl.h 32 template<size_t kAlignment>
33 inline bool SpaceBitmap<kAlignment>::AtomicTestAndSet(const mirror::Object* obj) {
54 template<size_t kAlignment>
55 inline bool SpaceBitmap<kAlignment>::Test(const mirror::Object* obj) const {
64 template<size_t kAlignment> template<typename Visitor>
65 inline void SpaceBitmap<kAlignment>::VisitMarkedRange(uintptr_t visit_begin,
70 for (uintptr_t i = visit_begin; i < visit_end; i += kAlignment) {
86 const size_t bit_start = (offset_start / kAlignment) % kBitsPerIntPtrT;
87 const size_t bit_end = (offset_end / kAlignment) % kBitsPerIntPtrT;
112 mirror::Object* obj = reinterpret_cast<mirror::Object*>(ptr_base + shift * kAlignment);
    [all...]
space_bitmap.h 40 template<size_t kAlignment>
47 // heap_begin of heap_capacity bytes, where objects are guaranteed to be kAlignment-aligned.
62 return offset / kAlignment / kBitsPerIntPtrT;
67 return static_cast<T>(index * kAlignment * kBitsPerIntPtrT);
71 return (offset / kAlignment) % kBitsPerIntPtrT;
123 for (; visit_begin < visit_end; visit_begin += kAlignment) {
233 template<size_t kAlignment>
234 std::ostream& operator << (std::ostream& stream, const SpaceBitmap<kAlignment>& bitmap);
bitmap.h 126 // One bit per kAlignment in range (start, end]
127 template<size_t kAlignment>
147 const uintptr_t addr = CoverBegin() + bit_index * kAlignment;
155 return (addr - CoverBegin()) / kAlignment;
181 : Bitmap(mem_map, num_bits), cover_begin_(begin), cover_end_(begin + kAlignment * num_bits) {
space_bitmap_test.cc 155 template <size_t kAlignment>
169 size_t offset = RoundDown(r.next() % heap_capacity, kAlignment);
183 size_t offset = RoundDown(r.next() % heap_capacity, kAlignment);
185 size_t end = offset + RoundDown(r.next() % (remain + 1), kAlignment);
191 for (uintptr_t k = offset; k < end; k += kAlignment) {
  /art/runtime/base/
arena_allocator_test.cc 137 for (size_t size = 1; size <= ArenaAllocator::kAlignment + 1; ++size) {
139 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(allocation))
157 const size_t original_size = ArenaAllocator::kAlignment * 2;
160 const size_t new_size = ArenaAllocator::kAlignment * 3;
170 const size_t original_size = ArenaAllocator::kAlignment * 2;
173 const size_t new_size = ArenaAllocator::kAlignment * 2 + (ArenaAllocator::kAlignment / 2);
183 const size_t original_size = ArenaAllocator::kAlignment * 2 + (ArenaAllocator::kAlignment / 2);
186 const size_t new_size = ArenaAllocator::kAlignment * 4
    [all...]
scoped_arena_allocator.h 69 static constexpr size_t kAlignment = 8u;
93 // Add kAlignment for the free or used tag. Required to preserve alignment.
94 size_t rounded_bytes = RoundUp(bytes + (kIsDebugBuild ? kAlignment : 0u), kAlignment);
102 ptr += kAlignment;
arena_allocator.h 295 bytes = RoundUp(bytes, kAlignment);
301 DCHECK_ALIGNED(ret, kAlignment);
335 const size_t aligned_ptr_size = RoundUp(ptr_size, kAlignment);
341 const size_t aligned_new_size = RoundUp(new_size, kAlignment);
348 DCHECK_ALIGNED(ptr_, kAlignment);
383 static constexpr size_t kAlignment = 8u;
  /external/skia/src/gpu/
GrMemoryPool.h 118 kAlignment = 8,
119 kHeaderSize = GR_CT_ALIGN_UP(sizeof(BlockHeader), kAlignment),
120 kPerAllocPad = GR_CT_ALIGN_UP(sizeof(AllocHeader), kAlignment),
171 kPerAllocPad + GR_CT_ALIGN_UP(sizeof(T), kAlignment);
GrMemoryPool.cpp 26 minAllocSize = SkTMax<size_t>(GrSizeAlignUp(minAllocSize, kAlignment), kSmallestMinAllocSize);
27 preallocSize = SkTMax<size_t>(GrSizeAlignUp(preallocSize, kAlignment), minAllocSize);
63 size = GrSizeAlignUp(size, kAlignment);
145 SkASSERT(!(reinterpret_cast<intptr_t>(block) % kAlignment));
180 SkASSERT(!(b % kAlignment));
181 SkASSERT(!(totalSize % kAlignment));
182 SkASSERT(!(block->fCurrPtr % kAlignment));
  /external/skia/tests/
VkHeapTests.cpp 124 const VkDeviceSize kAlignment = 16;
131 REPORTER_ASSERT(reporter, heap.alloc(19 * 1024 - 3, kAlignment, kMemType, kHeapIndex, &alloc0));
132 REPORTER_ASSERT(reporter, heap.alloc(5 * 1024 - 9, kAlignment, kMemType, kHeapIndex, &alloc1));
133 REPORTER_ASSERT(reporter, heap.alloc(15 * 1024 - 15, kAlignment, kMemType, kHeapIndex, &alloc2));
134 REPORTER_ASSERT(reporter, heap.alloc(3 * 1024 - 6, kAlignment, kMemType, kHeapIndex, &alloc3));
141 REPORTER_ASSERT(reporter, heap.alloc(40 * 1024, kAlignment, kMemType, kHeapIndex, &alloc0));
146 REPORTER_ASSERT(reporter, heap.alloc(40 * 1024, kAlignment, kMemType, kHeapIndex, &alloc3));
149 REPORTER_ASSERT(reporter, heap.alloc(22 * 1024, kAlignment, kMemType, kHeapIndex, &alloc2));
160 REPORTER_ASSERT(reporter, heap.alloc(128 * 1024, kAlignment, kMemType, kHeapIndex, &alloc0));
164 REPORTER_ASSERT(reporter, heap.alloc(24 * 1024, kAlignment, kMemType, kHeapIndex, &alloc0))
    [all...]
  /external/webrtc/talk/session/media/
planarfunctions_unittest.cc 64 static const int kAlignment = 16;
165 uint8_t* image_pointer = new uint8_t[y_size + u_size + v_size + kAlignment];
166 y_pointer = ALIGNP(image_pointer, kAlignment);
167 u_pointer = ALIGNP(&image_pointer[y_size], kAlignment);
168 v_pointer = ALIGNP(&image_pointer[y_size + u_size], kAlignment);
207 uint8_t* image_pointer = new uint8_t[2 * height * awidth + kAlignment];
208 yuv_pointer = ALIGNP(image_pointer, kAlignment);
287 ((height + 1) / 2) * ((width + 1) / 2) * 2 + kAlignment];
288 y_pointer = ALIGNP(image_pointer, kAlignment);
326 ((height + 1) / 2) * ((width + 1) / 2) * 2 + kAlignment];
    [all...]
yuvscaler_unittest.cc 53 static const int kAlignment = 16;
105 new uint8_t[isize + kAlignment + memoffset]());
107 new uint8_t[osize + kAlignment + memoffset]());
109 new uint8_t[osize + kAlignment + memoffset]());
111 uint8_t* ibuf = ALIGNP(ibuffer.get(), kAlignment) + memoffset;
112 uint8_t* obuf = ALIGNP(obuffer.get(), kAlignment) + memoffset;
113 uint8_t* xbuf = ALIGNP(xbuffer.get(), kAlignment) + memoffset;
212 scoped_ptr<uint8_t[]> ibuffer(new uint8_t[I420_SIZE(iw, ih) + kAlignment]);
213 scoped_ptr<uint8_t[]> obuffer(new uint8_t[I420_SIZE(ow, oh) + kAlignment]);
215 uint8_t* ibuf = ALIGNP(ibuffer.get(), kAlignment);
    [all...]
  /art/runtime/gc/space/
bump_pointer_space-inl.h 31 num_bytes = RoundUp(num_bytes, kAlignment);
48 num_bytes = RoundUp(num_bytes, kAlignment);
67 DCHECK_ALIGNED(num_bytes, kAlignment);
region_space-inl.h 30 num_bytes = RoundUp(num_bytes, kAlignment);
47 DCHECK_ALIGNED(num_bytes, kAlignment);
95 DCHECK_ALIGNED(num_bytes, kAlignment);
236 return reinterpret_cast<mirror::Object*>(RoundUp(position, kAlignment));
243 DCHECK_ALIGNED(num_bytes, kAlignment);
large_object_space.cc 259 return AlignSize() * FreeListSpace::kAlignment;
264 DCHECK_ALIGNED(size, FreeListSpace::kAlignment);
265 alloc_size_ = (size / FreeListSpace::kAlignment) | (free ? kFlagFree : 0u);
298 // Return how many kAlignment units there are before the free block.
304 return GetPrevFree() * FreeListSpace::kAlignment;
308 DCHECK_ALIGNED(bytes, FreeListSpace::kAlignment);
309 prev_free_ = bytes / FreeListSpace::kAlignment;
316 // Contains the size of the previous free block with kAlignment as the unit. If 0 then the
320 // Allocation size of this object in kAlignment as the unit.
348 CHECK_EQ(size % kAlignment, 0U)
    [all...]
bump_pointer_space.h 166 static constexpr size_t kAlignment = 8;
194 size_t unused_; // Ensures alignment of kAlignment.
197 static_assert(sizeof(BlockHeader) % kAlignment == 0,
198 "continuous block must be kAlignment aligned");
bump_pointer_space.cc 93 return reinterpret_cast<mirror::Object*>(RoundUp(position, kAlignment));
141 bytes = RoundUp(bytes, kAlignment);
225 *usable_size = RoundUp(num_bytes, kAlignment);
large_object_space.h 168 static constexpr size_t kAlignment = kPageSize;
187 return (address - reinterpret_cast<uintptr_t>(Begin())) / kAlignment;
193 return reinterpret_cast<uintptr_t>(Begin()) + slot * kAlignment;
  /external/gemmlowp/test/
test_allocator.cc 32 !(reinterpret_cast<std::uintptr_t>(int32_array) % Allocator::kAlignment));
34 !(reinterpret_cast<std::uintptr_t>(int8_array) % Allocator::kAlignment));
  /external/gemmlowp/internal/
allocator.h 106 static const std::size_t kAlignment = kDefaultCacheLineSize;
119 storage_ = memalign(kAlignment, storage_size_);
121 if (posix_memalign(&storage_, kAlignment, storage_size_)) {
161 const std::size_t bytes = RoundUp<kAlignment>(n * sizeof(T));
  /art/runtime/gc/
heap-inl.h 82 byte_count = RoundUp(byte_count, space::BumpPointerSpace::kAlignment);
262 alloc_size = RoundUp(alloc_size, space::BumpPointerSpace::kAlignment);
342 alloc_size = RoundUp(alloc_size, space::RegionSpace::kAlignment);
353 static_assert(space::RegionSpace::kAlignment == space::BumpPointerSpace::kAlignment,
355 static_assert(kObjectAlignment == space::BumpPointerSpace::kAlignment,
  /external/boringssl/src/tool/
speed.cc 206 static const unsigned kAlignment = 16;
217 std::unique_ptr<uint8_t[]> in_storage(new uint8_t[chunk_len + kAlignment]);
218 std::unique_ptr<uint8_t[]> out_storage(new uint8_t[chunk_len + overhead_len + kAlignment]);
219 std::unique_ptr<uint8_t[]> in2_storage(new uint8_t[chunk_len + kAlignment]);
223 uint8_t *const in = align(in_storage.get(), kAlignment);
225 uint8_t *const out = align(out_storage.get(), kAlignment);
227 uint8_t *const in2 = align(in2_storage.get(), kAlignment);

Completed in 526 milliseconds

1 2