Home | History | Annotate | Download | only in src

Lines Matching full:usize

799 arena_huge_malloc_stats_update(arena_t *arena, size_t usize)
801 szind_t index = size2index(usize) - nlclasses - NBINS;
806 arena->stats.allocated_huge += usize;
812 arena_huge_malloc_stats_update_undo(arena_t *arena, size_t usize)
814 szind_t index = size2index(usize) - nlclasses - NBINS;
819 arena->stats.allocated_huge -= usize;
825 arena_huge_dalloc_stats_update(arena_t *arena, size_t usize)
827 szind_t index = size2index(usize) - nlclasses - NBINS;
832 arena->stats.allocated_huge -= usize;
838 arena_huge_dalloc_stats_update_undo(arena_t *arena, size_t usize)
840 szind_t index = size2index(usize) - nlclasses - NBINS;
845 arena->stats.allocated_huge += usize;
851 arena_huge_ralloc_stats_update(arena_t *arena, size_t oldsize, size_t usize)
855 arena_huge_malloc_stats_update(arena, usize);
860 size_t usize)
864 arena_huge_malloc_stats_update_undo(arena, usize);
895 size_t usize, size_t alignment, bool *zero, size_t csize)
906 arena_huge_malloc_stats_update_undo(arena, usize);
907 arena->stats.mapped -= usize;
909 arena_nactive_sub(arena, usize >> LG_PAGE);
917 arena_chunk_alloc_huge(arena_t *arena, size_t usize, size_t alignment,
922 size_t csize = CHUNK_CEILING(usize);
928 arena_huge_malloc_stats_update(arena, usize);
929 arena->stats.mapped += usize;
931 arena_nactive_add(arena, usize >> LG_PAGE);
937 ret = arena_chunk_alloc_huge_hard(arena, &chunk_hooks, usize,
945 arena_chunk_dalloc_huge(arena_t *arena, void *chunk, size_t usize)
950 csize = CHUNK_CEILING(usize);
953 arena_huge_dalloc_stats_update(arena, usize);
954 arena->stats.mapped -= usize;
956 arena_nactive_sub(arena, usize >> LG_PAGE);
964 size_t usize)
967 assert(CHUNK_CEILING(oldsize) == CHUNK_CEILING(usize));
968 assert(oldsize != usize);
972 arena_huge_ralloc_stats_update(arena, oldsize, usize);
973 if (oldsize < usize)
974 arena_nactive_add(arena, (usize - oldsize) >> LG_PAGE);
976 arena_nactive_sub(arena, (oldsize - usize) >> LG_PAGE);
982 size_t usize)
984 size_t udiff = oldsize - usize;
985 size_t cdiff = CHUNK_CEILING(oldsize) - CHUNK_CEILING(usize);
989 arena_huge_ralloc_stats_update(arena, oldsize, usize);
998 CHUNK_CEILING(usize));
1007 void *chunk, size_t oldsize, size_t usize, bool *zero, void *nchunk,
1020 usize);
1036 size_t usize, bool *zero)
1041 size_t udiff = usize - oldsize;
1042 size_t cdiff = CHUNK_CEILING(usize) - CHUNK_CEILING(oldsize);
1048 arena_huge_ralloc_stats_update(arena, oldsize, usize);
1058 chunk, oldsize, usize, zero, nchunk, udiff,
2278 arena_redzone_corruption(void *ptr, size_t usize, bool after,
2284 after ? "after" : "before", ptr, usize, byte);
2350 arena_quarantine_junk_small(void *ptr, size_t usize)
2357 assert(usize <= SMALL_MAXCLASS);
2359 binind = size2index(usize);
2369 size_t usize;
2374 usize = index2size(binind);
2393 if (config_prof && !isthreaded && arena_prof_accum(arena, usize))
2402 memset(ret, 0, usize);
2404 JEMALLOC_VALGRIND_MAKE_MEM_UNDEFINED(ret, usize);
2410 JEMALLOC_VALGRIND_MAKE_MEM_UNDEFINED(ret, usize);
2411 memset(ret, 0, usize);
2422 size_t usize;
2429 usize = index2size(binind);
2443 run = arena_run_alloc_large(arena, usize + large_pad, zero);
2456 arena->stats.allocated_large += usize;
2462 idump = arena_prof_accum_locked(arena, usize);
2470 memset(ret, 0xa5, usize);
2472 memset(ret, 0, usize);
2498 arena_palloc_large(tsd_t *tsd, arena_t *arena, size_t usize, size_t alignment,
2508 assert(usize == PAGE_CEILING(usize));
2515 alloc_size = usize + large_pad + alignment - PAGE;
2529 assert(alloc_size >= leadsize + usize);
2530 trailsize = alloc_size - leadsize - usize - large_pad;
2544 arena_run_trim_tail(arena, chunk, run, usize + large_pad +
2545 trailsize, usize + large_pad, false);
2547 if (arena_run_init_large(arena, run, usize + large_pad, zero)) {
2562 szind_t index = size2index(usize) - NBINS;
2566 arena->stats.allocated_large += usize;
2575 memset(ret, 0xa5, usize);
2577 memset(ret, 0, usize);
2584 arena_palloc(tsd_t *tsd, arena_t *arena, size_t usize, size_t alignment,
2589 if (usize <= SMALL_MAXCLASS && (alignment < PAGE || (alignment == PAGE
2590 && (usize & PAGE_MASK) == 0))) {
2592 ret = arena_malloc(tsd, arena, usize, size2index(usize), zero,
2594 } else if (usize <= large_maxclass && alignment <= PAGE) {
2601 ret = arena_malloc(tsd, arena, usize, size2index(usize), zero,
2606 if (likely(usize <= large_maxclass)) {
2607 ret = arena_palloc_large(tsd, arena, usize, alignment,
2610 ret = huge_malloc(tsd, arena, usize, zero, tcache);
2612 ret = huge_palloc(tsd, arena, usize, alignment, zero,
2786 arena_dalloc_junk_large(void *ptr, size_t usize)
2790 memset(ptr, 0x5a, usize);
2808 size_t usize = arena_mapbits_large_size_get(chunk, pageind) -
2812 arena_dalloc_junk_large(ptr, usize);
2814 szind_t index = size2index(usize) - NBINS;
2817 arena->stats.allocated_large -= usize;
2904 size_t usize, splitsize, size, flag_dirty, flag_unzeroed_mask;
2906 usize = usize_max;
2907 while (oldsize + followsize < usize)
2908 usize = index2size(size2index(usize)-1);
2909 assert(usize >= usize_min);
2910 assert(usize >= oldsize);
2911 splitsize = usize - oldsize;
2985 arena_ralloc_junk_large(void *ptr, size_t old_usize, size_t usize)
2989 memset((void *)((uintptr_t)ptr + usize), 0x5a,
2990 old_usize - usize);
3087 arena_ralloc_move_helper(tsd_t *tsd, arena_t *arena, size_t usize,
3092 return (arena_malloc(tsd, arena, usize, size2index(usize), zero,
3094 usize = sa2u(usize, alignment);
3095 if (unlikely(usize == 0 || usize > HUGE_MAXCLASS))
3097 return (ipalloct(tsd, usize, alignment, zero, tcache, arena));
3105 size_t usize;
3107 usize = s2u(size);
3108 if (unlikely(usize == 0 || size > HUGE_MAXCLASS))
3111 if (likely(usize <= large_maxclass)) {
3115 if (!arena_ralloc_no_move(tsd, ptr, oldsize, usize, 0, zero))
3123 ret = arena_ralloc_move_helper(tsd, arena, usize, alignment,
3133 copysize = (usize < oldsize) ? usize : oldsize;
3138 ret = huge_ralloc(tsd, arena, ptr, oldsize, usize, alignment,