/external/jemalloc/include/jemalloc/internal/ |
quarantine.h | 16 size_t usize; member in struct:quarantine_obj_s
|
jemalloc_internal.h | 628 size_t usize = grp_size + mod_size; local 629 return (usize); 669 size_t usize = (size + delta_mask) & ~delta_mask; 670 return (usize); 704 size_t usize; 724 usize = s2u(ALIGNMENT_CEILING(size, alignment)); 725 if (usize < LARGE_MINCLASS) 726 return (usize); 738 usize = (size <= LARGE_MINCLASS) ? LARGE_MINCLASS : s2u(size); 744 if (usize + large_pad + alignment - PAGE <= arena_maxrun [all...] |
/external/jemalloc/src/ |
quarantine.c | 102 assert(obj->usize == isalloc(obj->ptr, config_prof)); 104 quarantine->curbytes -= obj->usize; 122 size_t usize = isalloc(ptr, config_prof); local 135 if (quarantine->curbytes + usize > opt_quarantine) { 136 size_t upper_bound = (opt_quarantine >= usize) ? opt_quarantine 137 - usize : 0; 146 if (quarantine->curbytes + usize <= opt_quarantine) { 151 obj->usize = usize; 152 quarantine->curbytes += usize; [all...] |
base.c | 82 size_t csize, usize; local 92 usize = s2u(csize); 93 extent_node_init(&key, NULL, NULL, usize, false, false);
|
huge.c | 34 huge_malloc(tsd_t *tsd, arena_t *arena, size_t usize, bool zero, 38 assert(usize == s2u(usize)); 40 return (huge_palloc(tsd, arena, usize, chunksize, zero, tcache)); 44 huge_palloc(tsd_t *tsd, arena_t *arena, size_t usize, size_t alignment, 54 ausize = sa2u(usize, alignment); 83 usize, alignment, &is_zeroed)) == NULL) { 88 extent_node_init(node, arena, ret, usize, is_zeroed, true); 91 arena_chunk_dalloc_huge(arena, ret, usize); 104 memset(ret, 0, usize); 139 size_t usize, usize_next; local [all...] |
ckh.c | 265 size_t usize; local 268 usize = sa2u(sizeof(ckhc_t) << lg_curcells, CACHELINE); 269 if (unlikely(usize == 0 || usize > HUGE_MAXCLASS)) { 273 tab = (ckhc_t *)ipallocztm(tsd, usize, CACHELINE, true, NULL, 305 size_t usize; local 314 usize = sa2u(sizeof(ckhc_t) << lg_curcells, CACHELINE); 315 if (unlikely(usize == 0 || usize > HUGE_MAXCLASS)) 317 tab = (ckhc_t *)ipallocztm(tsd, usize, CACHELINE, true, NULL, true 354 size_t mincells, usize; local [all...] |
jemalloc.c | 1574 size_t usize; local 1816 size_t usize; local 2165 size_t usize; local 2267 size_t usize; local 2340 size_t usize; local 2353 size_t usize; local 2367 size_t usize_max, usize; local 2417 size_t usize, old_usize; local 2475 size_t usize; local 2513 size_t usize; local 2527 size_t usize; local 2551 size_t usize; local [all...] |
arena.c | 799 arena_huge_malloc_stats_update(arena_t *arena, size_t usize) 801 szind_t index = size2index(usize) - nlclasses - NBINS; 806 arena->stats.allocated_huge += usize; 812 arena_huge_malloc_stats_update_undo(arena_t *arena, size_t usize) 814 szind_t index = size2index(usize) - nlclasses - NBINS; 819 arena->stats.allocated_huge -= usize; 825 arena_huge_dalloc_stats_update(arena_t *arena, size_t usize) 827 szind_t index = size2index(usize) - nlclasses - NBINS; 832 arena->stats.allocated_huge -= usize; 838 arena_huge_dalloc_stats_update_undo(arena_t *arena, size_t usize) 2369 size_t usize; local 2422 size_t usize; local 2808 size_t usize = arena_mapbits_large_size_get(chunk, pageind) - local 2904 size_t usize, splitsize, size, flag_dirty, flag_unzeroed_mask; local 3105 size_t usize; local [all...] |
/external/jemalloc/test/integration/ |
allocated.c | 18 size_t sz, usize; local 69 usize = malloc_usable_size(p); 70 assert_u64_le(a0 + usize, a1, 86 assert_u64_le(d0 + usize, d1,
|
/external/tpm2/ |
TpmFail.c | 116 UINT32 usize; local 119 || UINT32_Unmarshal(&usize, buffer, size) != TPM_RC_SUCCESS 123 header->size = usize;
|
/external/v8/src/runtime/ |
runtime-atomics.cc | 580 uint32_t usize = NumberToUint32(*size); local 581 return isolate->heap()->ToBoolean(AtomicIsLockFree(usize));
|
/external/opencv3/modules/core/src/ |
arithm.cpp | 5566 Size ssize = _src.size(), lsize = _lowerb.size(), usize = _upperb.size(); local [all...] |
/external/elfutils/src/ |
readelf.c | 4354 uint8_t usize = *(uint8_t *) data++; local [all...] |