Home | History | Annotate | Download | only in alloc

Lines Matching refs:hs

37 static size_t getMaximumSize(const HeapSource *hs);
184 static bool isSoftLimited(const HeapSource *hs)
192 return hs->softLimit <= hs->idealSize;
199 static size_t getAllocLimit(const HeapSource *hs)
201 if (isSoftLimited(hs)) {
202 return hs->softLimit;
204 return mspace_max_allowed_footprint(hs2heap(hs)->msp);
212 static size_t oldHeapOverhead(const HeapSource *hs, bool includeActive)
222 for (/* i = i */; i < hs->numHeaps; i++) {
224 footprint += mspace_footprint(hs->heaps[i].msp);
233 static Heap *ptr2heap(const HeapSource *hs, const void *ptr)
235 const size_t numHeaps = hs->numHeaps;
239 const Heap *const heap = &hs->heaps[i];
264 HeapSource* hs = gDvm.gcHeap->heapSource;
265 dvmHeapBitmapSetObjectBit(&hs->liveBits, ptr);
279 HeapSource* hs = gDvm.gcHeap->heapSource;
280 dvmHeapBitmapClearObjectBit(&hs->liveBits, ptr);
324 static bool addInitialHeap(HeapSource *hs, mspace msp, size_t maximumSize)
326 assert(hs != NULL);
328 if (hs->numHeaps != 0) {
331 hs->heaps[0].msp = msp;
332 hs->heaps[0].maximumSize = maximumSize;
333 hs->heaps[0].concurrentStartBytes = SIZE_MAX;
334 hs->heaps[0].base = hs->heapBase;
335 hs->heaps[0].limit = hs->heapBase + hs->heaps[0].maximumSize;
336 hs->numHeaps = 1;
344 static bool addNewHeap(HeapSource *hs)
348 assert(hs != NULL);
349 if (hs->numHeaps >= HEAP_SOURCE_MAX_HEAP_COUNT) {
351 hs->numHeaps, HEAP_SOURCE_MAX_HEAP_COUNT);
362 void *sbrk0 = contiguous_mspace_sbrk0(hs->heaps[0].msp);
364 size_t overhead = base - hs->heaps[0].base;
365 assert(((size_t)hs->heaps[0].base & (SYSTEM_PAGE_SIZE - 1)) == 0);
367 if (overhead + HEAP_MIN_FREE >= hs->maximumSize) {
370 overhead, hs->maximumSize);
374 heap.maximumSize = hs->growthLimit - overhead;
378 heap.msp = createMspace(base, HEAP_MIN_FREE, hs->maximumSize - overhead);
385 hs->heaps[0].maximumSize = overhead;
386 hs->heaps[0].limit = base;
387 mspace msp = hs->heaps[0].msp;
393 memmove(&hs->heaps[1], &hs->heaps[0], hs->numHeaps * sizeof(hs->heaps[0]));
394 hs->heaps[0] = heap;
395 hs->numHeaps++;
507 HeapSource *hs;
545 hs = (HeapSource *)malloc(sizeof(*hs));
546 if (hs == NULL) {
551 memset(hs, 0, sizeof(*hs));
553 hs->targetUtilization = DEFAULT_HEAP_UTILIZATION;
554 hs->startSize = startSize;
555 hs->maximumSize = maximumSize;
556 hs->growthLimit = growthLimit;
557 hs->idealSize = startSize;
558 hs->softLimit = SIZE_MAX; // no soft limit at first
559 hs->numHeaps = 0;
560 hs->sawZygote = gDvm.zygote;
561 hs->hasGcThread = false;
562 hs
563 hs->heapLength = length;
564 if (!addInitialHeap(hs, msp, growthLimit)) {
568 if (!dvmHeapBitmapInit(&hs->liveBits, base, length, "dalvik-bitmap-1")) {
572 if (!dvmHeapBitmapInit(&hs->markBits, base, length, "dalvik-bitmap-2")) {
574 dvmHeapBitmapDelete(&hs->liveBits);
577 if (!allocMarkStack(&gcHeap->markContext.stack, hs->maximumSize)) {
579 dvmHeapBitmapDelete(&hs->markBits);
580 dvmHeapBitmapDelete(&hs->liveBits);
583 gcHeap->markContext.bitmap = &hs->markBits;
584 gcHeap->heapSource = hs;
586 gHs = hs;
608 HeapSource *hs = gHs; // use a local to avoid the implicit "volatile"
620 return addNewHeap(hs);
641 HeapSource *hs = (*gcHeap)->heapSource;
642 dvmHeapBitmapDelete(&hs->liveBits);
643 dvmHeapBitmapDelete(&hs->markBits);
645 munmap(hs->heapBase, hs->heapLength);
646 free(hs);
670 HeapSource *hs = gHs;
676 assert(arrayLen >= hs->numHeaps || perHeapStats == NULL);
677 for (size_t i = 0; i < hs->numHeaps; i++) {
678 Heap *const heap = &hs->heaps[i];
707 HeapSource *hs = gHs;
711 assert(numHeaps <= hs->numHeaps);
713 base[i] = (uintptr_t)hs->heaps[i].base;
714 max[i] = MIN((uintptr_t)hs->heaps[i].limit - 1, hs->markBits.max);
796 HeapSource *hs = gHs;
797 Heap* heap = hs2heap(hs);
798 if (heap->bytesAllocated + n > hs->softLimit) {
804 FRACTIONAL_MB(hs->softLimit), n);
815 if (gDvm.gcHeap->gcRunning || !hs->hasGcThread) {
835 static void* heapAllocAndGrow(HeapSource *hs, Heap *heap, size_t n)
861 HeapSource *hs = gHs;
862 Heap* heap = hs2heap(hs);
868 size_t oldIdealSize = hs->idealSize;
869 if (isSoftLimited(hs)) {
873 hs->softLimit = SIZE_MAX;
888 ptr = heapAllocAndGrow(hs, heap, n);
1000 HeapSource *hs = gHs;
1004 if (dvmHeapSourceContains(obj) && hs->sawZygote) {
1005 Heap *heap = ptr2heap(hs, obj);
1010 return heap != hs->heaps;
1048 static size_t getMaximumSize(const HeapSource *hs)
1050 return hs->growthLimit;
1090 HeapSource *hs = gHs;
1091 size_t ret = oldHeapOverhead(hs, false);
1093 ret += hs->heaps[0].bytesAllocated;
1105 HeapSource *hs = gHs;
1109 return hs->idealSize;
1116 static void setSoftLimit(HeapSource *hs, size_t softLimit)
1122 mspace msp = hs->heaps[0].msp;
1128 hs
1134 hs->softLimit = SIZE_MAX;
1147 HeapSource *hs = gHs;
1148 size_t maximumSize = getMaximumSize(hs);
1167 setSoftLimit(hs, activeMax);
1168 hs->idealSize = max;
1187 HeapSource *hs = gHs;
1191 return (float)hs->targetUtilization / (float)HEAP_UTILIZATION_MAX;
1200 HeapSource *hs = gHs;
1213 hs->targetUtilization =
1216 hs->targetUtilization, HEAP_UTILIZATION_MAX, newTarget);
1252 HeapSource *hs = gHs;
1253 Heap* heap = hs2heap(hs);
1266 getUtilizationTarget(currentHeapUsed, hs->targetUtilization);
1276 size_t freeBytes = getAllocLimit(hs);
1310 HeapSource *hs = gHs;
1312 for (size_t i = 0; i < hs->numHeaps; i++) {
1313 Heap *heap = &hs->heaps[i];
1348 HeapSource *hs = gHs;
1349 for (size_t i = hs->numHeaps; i > 0; --i) {
1350 mspace_walk_heap(hs->heaps[i-1].msp, callback, arg);