Lines Matching full:uptr
87 template <uptr kMaxSizeLog, uptr kMaxNumCachedT, uptr kMaxBytesCachedLog>
89 static const uptr kMinSizeLog = 4;
90 static const uptr kMidSizeLog = kMinSizeLog + 4;
91 static const uptr kMinSize = 1 << kMinSizeLog;
92 static const uptr kMidSize = 1 << kMidSizeLog;
93 static const uptr kMidClass = kMidSize / kMinSize;
94 static const uptr S = 2;
95 static const uptr M = (1 << S) - 1;
98 static const uptr kMaxNumCached = kMaxNumCachedT;
104 uptr count;
108 static const uptr kMaxSize = 1UL << kMaxSizeLog;
109 static const uptr kNumClasses =
112 static const uptr kNumClassesRounded =
117 static uptr Size(uptr class_id) {
121 uptr t = kMidSize << (class_id >> S);
125 static uptr ClassID(uptr size) {
129 uptr l = MostSignificantSetBitIndex(size);
130 uptr hbits = (size >> (l - S)) & M;
131 uptr lbits = size & ((1 << (l - S)) - 1);
132 uptr l1 = l - kMidSizeLog;
136 static uptr MaxCached(uptr class_id) {
138 uptr n = (1UL << kMaxBytesCachedLog) / Size(class_id);
139 return Max<uptr>(1, Min(kMaxNumCached, n));
143 uptr prev_s = 0;
144 uptr total_cached = 0;
145 for (uptr i = 0; i < kNumClasses; i++) {
146 uptr s = Size(i);
149 uptr d = s - prev_s;
150 uptr p = prev_s ? (d * 100 / prev_s) : 0;
151 uptr l = s ? MostSignificantSetBitIndex(s) : 0;
152 uptr cached = MaxCached(i) * s;
162 static bool SizeClassRequiresSeparateTransferBatch(uptr class_id) {
164 sizeof(uptr) * (kMaxNumCached - MaxCached(class_id));
168 for (uptr c = 1; c < kNumClasses; c++) {
170 uptr s = Size(c);
181 for (uptr s = 1; s <= kMaxSize; s++) {
182 uptr c = ClassID(s);
278 void OnMap(uptr p, uptr size) const { }
279 void OnUnmap(uptr p, uptr size) const { }
283 typedef void (*ForEachChunkCallback)(uptr chunk, void *arg);
301 template <const uptr kSpaceBeg, const uptr kSpaceSize,
302 const uptr kMetadataSize, class SizeClassMap,
313 reinterpret_cast<uptr>(Mprotect(kSpaceBeg, kSpaceSize)));
317 void MapWithCallback(uptr beg, uptr size) {
318 CHECK_EQ(beg, reinterpret_cast<uptr>(MmapFixedOrDie(beg, size)));
322 void UnmapWithCallback(uptr beg, uptr size) {
327 static bool CanAllocate(uptr size, uptr alignment) {
333 uptr class_id) {
343 NOINLINE void DeallocateBatch(AllocatorStats *stat, uptr class_id, Batch *b) {
351 return reinterpret_cast<uptr>(p) / kSpaceSize == kSpaceBeg / kSpaceSize;
354 static uptr GetSizeClass(const void *p) {
355 return (reinterpret_cast<uptr>(p) / kRegionSize) % kNumClassesRounded;
359 uptr class_id = GetSizeClass(p);
360 uptr size = SizeClassMap::Size(class_id);
362 uptr chunk_idx = GetChunkIdx((uptr)p, size);
363 uptr reg_beg = (uptr)p & ~(kRegionSize - 1);
364 uptr beg = chunk_idx * size;
365 uptr next_beg = beg + size;
373 static uptr GetActuallyAllocatedSize(void *p) {
378 uptr ClassID(uptr size) { return SizeClassMap::ClassID(size); }
381 uptr class_id = GetSizeClass(p);
382 uptr size = SizeClassMap::Size(class_id);
383 uptr chunk_idx = GetChunkIdx(reinterpret_cast<uptr>(p), size);
388 uptr TotalMemoryUsed() {
389 uptr res = 0;
390 for (uptr i = 0; i < kNumClasses; i++)
401 uptr total_mapped = 0;
402 uptr n_allocated = 0;
403 uptr n_freed = 0;
404 for (uptr class_id = 1; class_id < kNumClasses; class_id++) {
413 for (uptr class_id = 1; class_id < kNumClasses; class_id++) {
428 for (uptr i = 0; i < kNumClasses; i++) {
442 for (uptr class_id = 1; class_id < kNumClasses; class_id++) {
444 uptr chunk_size = SizeClassMap::Size(class_id);
445 uptr region_beg = kSpaceBeg + class_id * kRegionSize;
446 for (uptr chunk = region_beg;
456 static const uptr kNumClasses = SizeClassMap::kNumClasses;
457 static const uptr kNumClassesRounded = SizeClassMap::kNumClassesRounded;
460 static const uptr kRegionSize = kSpaceSize / kNumClassesRounded;
461 static const uptr kSpaceEnd = kSpaceBeg + kSpaceSize;
467 static const uptr kPopulateSize = 1 << 14;
469 static const uptr kUserMapSize = 1 << 16;
471 static const uptr kMetaMapSize = 1 << 16;
476 uptr allocated_user; // Bytes allocated for user memory.
477 uptr allocated_meta; // Bytes allocated for metadata.
478 uptr mapped_user; // Bytes mapped for user memory.
479 uptr
480 uptr n_allocated, n_freed; // Just stats.
484 static uptr AdditionalSize() {
489 RegionInfo *GetRegionInfo(uptr class_id) {
495 static uptr GetChunkIdx(uptr chunk, uptr size) {
496 uptr offset = chunk % kRegionSize;
505 uptr class_id, RegionInfo *region) {
510 uptr size = SizeClassMap::Size(class_id);
511 uptr count = size < kPopulateSize ? SizeClassMap::MaxCached(class_id) : 1;
512 uptr beg_idx = region->allocated_user;
513 uptr end_idx = beg_idx + count * size;
514 uptr region_beg = kSpaceBeg + kRegionSize * class_id;
517 uptr map_size = kUserMapSize;
525 uptr total_count = (region->mapped_user - beg_idx - size)
529 uptr map_size = kMetaMapSize;
551 for (uptr i = 0; i < count; i++)
573 void set(uptr idx, u8 val) {
578 u8 operator[] (uptr idx) {
609 template <const uptr kSpaceBeg, const u64 kSpaceSize,
610 const uptr kMetadataSize, class SizeClassMap,
611 const uptr kRegionSizeLog,
626 void *MapWithCallback(uptr size) {
629 MapUnmapCallback().OnMap((uptr)res, size);
633 void UnmapWithCallback(uptr beg, uptr size) {
638 static bool CanAllocate(uptr size, uptr alignment) {
645 uptr mem = reinterpret_cast<uptr>(p);
646 uptr beg = ComputeRegionBeg(mem);
647 uptr size = SizeClassMap::Size(GetSizeClass(p));
649 uptr n = offset / (u32)size; // 32-bit division
650 uptr meta = (beg + kRegionSize) - (n + 1) * kMetadataSize;
655 uptr class_id) {
667 NOINLINE void DeallocateBatch(AllocatorStats *stat, uptr class_id, Batch *b) {
679 uptr GetSizeClass(const void *p) {
680 return possible_regions[ComputeRegionId(reinterpret_cast<uptr>(p))];
685 uptr mem = reinterpret_cast<uptr>(p);
686 uptr beg = ComputeRegionBeg(mem);
687 uptr size = SizeClassMap::Size(GetSizeClass(p));
690 uptr res = beg + (n * (u32)size);
694 uptr GetActuallyAllocatedSize(void *p) {
699 uptr ClassID(uptr size) { return SizeClassMap::ClassID(size); }
701 uptr TotalMemoryUsed() {
703 uptr res = 0;
704 for (uptr i = 0; i < kNumPossibleRegions; i++)
711 for (uptr i = 0; i < kNumPossibleRegions; i++)
719 for (uptr i = 0; i < kNumClasses; i++) {
733 for (uptr region = 0; region < kNumPossibleRegions; region++)
735 uptr chunk_size = SizeClassMap::Size(possible_regions[region]);
736 uptr max_chunks_in_region = kRegionSize / (chunk_size + kMetadataSize);
737 uptr region_beg = region * kRegionSize;
738 for (uptr chunk = region_beg;
751 static const uptr kNumClasses = SizeClassMap::kNumClasses;
754 static const uptr kRegionSize = 1 << kRegionSizeLog;
755 static const uptr kNumPossibleRegions = kSpaceSize / kRegionSize;
760 char padding[kCacheLineSize - sizeof(uptr) - sizeof(IntrusiveList<Batch>)];
764 uptr ComputeRegionId(uptr mem) {
765 uptr res = mem >> kRegionSizeLog;
770 uptr ComputeRegionBeg(uptr mem) {
774 uptr AllocateRegion(AllocatorStats *stat, uptr class_id) {
776 uptr res = reinterpret_cast<uptr>(MmapAlignedOrDie(kRegionSize, kRegionSize,
785 SizeClassInfo *GetSizeClassInfo(uptr class_id) {
791 SizeClassInfo *sci, uptr class_id) {
792 uptr size = SizeClassMap::Size(class_id);
793 uptr reg = AllocateRegion(stat, class_id);
794 uptr n_chunks = kRegionSize / (size + kMetadataSize);
795 uptr max_count = SizeClassMap::MaxCached(class_id);
797 for (uptr i = reg; i < reg + n_chunks * size; i += size) {
828 static const uptr kNumClasses = SizeClassAllocator::kNumClasses;
842 void *Allocate(SizeClassAllocator *allocator, uptr class_id) {
854 void Deallocate(SizeClassAllocator *allocator, uptr class_id, void *p) {
869 for (uptr class_id = 0; class_id < kNumClasses; class_id++) {
880 uptr count;
881 uptr max_count;
890 for (uptr i = 0; i < kNumClasses; i++) {
896 NOINLINE void Refill(SizeClassAllocator *allocator, uptr class_id) {
901 for (uptr i = 0; i < b->count; i++)
908 NOINLINE void Drain(SizeClassAllocator *allocator, uptr class_id) {
916 uptr cnt = Min(c->max_count / 2, c->count);
917 for (uptr i = 0; i < cnt; i++) {
939 void *Allocate(AllocatorStats *stat, uptr size, uptr alignment) {
941 uptr map_size = RoundUpMapSize(size);
945 uptr map_beg = reinterpret_cast<uptr>(
948 uptr map_end = map_beg + map_size;
949 uptr res = map_beg + page_size_;
958 uptr size_log = MostSignificantSetBitIndex(map_size);
962 uptr idx = n_chunks_++;
981 uptr idx = h->chunk_idx;
997 uptr TotalMemoryUsed() {
999 uptr res = 0;
1000 for (uptr i = 0; i < n_chunks_; i++) {
1012 uptr GetActuallyAllocatedSize(void *p) {
1019 CHECK(IsAligned(reinterpret_cast<uptr>(p), page_size_));
1024 uptr p = reinterpret_cast<uptr>(ptr);
1026 uptr nearest_chunk = 0;
1028 for (uptr i = 0; i < n_chunks_; i++) {
1029 uptr ch = reinterpret_cast<uptr>(chunks_[i]);
1048 uptr p = reinterpret_cast<uptr>(ptr);
1049 uptr n = n_chunks_;
1053 SortArray(reinterpret_cast<uptr*>(chunks_), n);
1054 for (uptr i = 0; i < n; i++)
1057 min_mmap_ = reinterpret_cast<uptr>(chunks_[0]);
1058 max_mmap_ = reinterpret_cast<uptr>(chunks_[n - 1]) +
1063 uptr beg = 0, end = n - 1;
1067 uptr mid = (beg + end) / 2; // Invariant: mid >= beg + 1
1068 if (p < reinterpret_cast<uptr>(chunks_[mid]))
1077 if (p >= reinterpret_cast<uptr>(chunks_[end]))
1092 for (uptr i = 0; i < ARRAY_SIZE(stats.by_size_log); i++) {
1093 uptr c = stats.by_size_log[i];
1113 for (uptr i = 0; i < n_chunks_; i++)
1114 callback(reinterpret_cast<uptr>(GetUser(chunks_[i])), arg);
1120 uptr map_beg;
1121 uptr map_size;
1122 uptr size;
1123 uptr chunk_idx;
1126 Header *GetHeader(uptr p) {
1131 return GetHeader(reinterpret_cast<uptr>(p));
1135 CHECK(IsAligned((uptr)h, page_size_));
1136 return reinterpret_cast<void*>(reinterpret_cast<uptr>(h) + page_size_);
1139 uptr RoundUpMapSize(uptr size) {
1143 uptr page_size_;
1145 uptr n_chunks_;
1146 uptr min_mmap_, max_mmap_;
1149 uptr n_allocs, n_frees, currently_allocated, max_allocated, by_size_log[64];
1170 void *Allocate(AllocatorCache *cache, uptr size, uptr alignment,
1185 CHECK_EQ(reinterpret_cast<uptr>(res) & (alignment - 1), 0);
1199 void *Reallocate(AllocatorCache *cache, void *p, uptr new_size,
1200 uptr alignment) {
1208 uptr old_size = GetActuallyAllocatedSize(p);
1209 uptr memcpy_size = Min(new_size, old_size);
1247 uptr GetActuallyAllocatedSize(void *p) {
1253 uptr TotalMemoryUsed() {
1306 bool CallocShouldReturnNullDueToOverflow(uptr size, uptr n);