HomeSort by relevance Sort by last modified time
    Searched defs:usize (Results 1 - 17 of 17) sorted by null

  /external/jemalloc/include/jemalloc/internal/
quarantine.h 16 size_t usize; member in struct:quarantine_obj_s
prof.h 284 void prof_malloc_sample_object(tsdn_t *tsdn, const void *ptr, size_t usize,
286 void prof_free_sampled_object(tsd_t *tsd, size_t usize, prof_tctx_t *tctx);
334 void prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize,
336 void prof_tctx_reset(tsdn_t *tsdn, const void *ptr, size_t usize,
338 bool prof_sample_accum_update(tsd_t *tsd, size_t usize, bool commit,
340 prof_tctx_t *prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active,
342 void prof_malloc(tsdn_t *tsdn, const void *ptr, size_t usize,
344 void prof_realloc(tsd_t *tsd, const void *ptr, size_t usize,
347 void prof_free(tsd_t *tsd, const void *ptr, size_t usize);
411 prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize, prof_tctx_t *tctx
525 prof_malloc_sample_object(tsd_tsdn(tsd), ptr, usize, tctx); local
527 prof_tctx_reset(tsd_tsdn(tsd), ptr, usize, old_ptr, old_tctx); local
    [all...]
jemalloc_internal.h 659 size_t usize = (psz + delta_mask) & ~delta_mask; local
660 return (usize);
739 size_t usize = grp_size + mod_size; local
740 return (usize);
780 size_t usize = (size + delta_mask) & ~delta_mask; local
781 return (usize);
815 size_t usize; local
835 usize = s2u(ALIGNMENT_CEILING(size, alignment));
836 if (usize < LARGE_MINCLASS)
837 return (usize);
1146 size_t usize = isalloc(tsdn, ptr, false); local
1207 size_t usize, copysize; local
    [all...]
  /external/jemalloc/test/integration/
allocated.c 18 size_t sz, usize; local
70 usize = malloc_usable_size(p);
71 assert_u64_le(a0 + usize, a1,
87 assert_u64_le(d0 + usize, d1,
  /external/jemalloc/src/
base.c 93 size_t csize, usize; local
103 usize = s2u(csize);
104 extent_node_init(&key, NULL, NULL, usize, 0, false, false);
quarantine.c 100 assert(obj->usize == isalloc(tsdn, obj->ptr, config_prof));
102 quarantine->curbytes -= obj->usize;
120 size_t usize = isalloc(tsd_tsdn(tsd), ptr, config_prof); local
133 if (quarantine->curbytes + usize > opt_quarantine) {
134 size_t upper_bound = (opt_quarantine >= usize) ? opt_quarantine
135 - usize : 0;
144 if (quarantine->curbytes + usize <= opt_quarantine) {
149 obj->usize = usize;
150 quarantine->curbytes += usize;
    [all...]
huge.c 43 huge_malloc(tsdn_t *tsdn, arena_t *arena, size_t usize, bool zero)
46 assert(usize == s2u(usize));
48 return (huge_palloc(tsdn, arena, usize, chunksize, zero));
52 huge_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment,
66 ausize = sa2u(usize, alignment);
99 arena, usize, alignment, &sn, &is_zeroed)) == NULL) {
104 extent_node_init(node, arena, ret, usize, sn, is_zeroed, true);
107 arena_chunk_dalloc_huge(tsdn, arena, ret, usize, sn);
120 memset(ret, 0, usize);
155 size_t usize, usize_next; local
    [all...]
ckh.c 266 size_t usize; local
269 usize = sa2u(sizeof(ckhc_t) << lg_curcells, CACHELINE);
270 if (unlikely(usize == 0 || usize > HUGE_MAXCLASS)) {
274 tab = (ckhc_t *)ipallocztm(tsd_tsdn(tsd), usize, CACHELINE,
306 size_t usize; local
315 usize = sa2u(sizeof(ckhc_t) << lg_curcells, CACHELINE);
316 if (unlikely(usize == 0 || usize > HUGE_MAXCLASS))
318 tab = (ckhc_t *)ipallocztm(tsd_tsdn(tsd), usize, CACHELINE, true, NULL
355 size_t mincells, usize; local
    [all...]
jemalloc.c 1571 prof_malloc(tsd_tsdn(tsd), p, usize, tctx); local
1697 prof_malloc(tsd_tsdn(tsd), p, usize, tctx); local
1708 size_t usize; local
1885 size_t usize; local
2215 prof_malloc(tsd_tsdn(tsd), p, *usize, tctx); local
2287 size_t usize; local
2376 size_t usize; local
2452 size_t usize; local
2465 size_t usize; local
2479 size_t usize_max, usize; local
2529 size_t usize, old_usize; local
2589 size_t usize; local
2637 size_t usize; local
2654 size_t usize; local
2683 size_t usize; local
    [all...]
arena.c 811 arena_huge_malloc_stats_update(arena_t *arena, size_t usize)
813 szind_t index = size2index(usize) - nlclasses - NBINS;
818 arena->stats.allocated_huge += usize;
824 arena_huge_malloc_stats_update_undo(arena_t *arena, size_t usize)
826 szind_t index = size2index(usize) - nlclasses - NBINS;
831 arena->stats.allocated_huge -= usize;
837 arena_huge_dalloc_stats_update(arena_t *arena, size_t usize)
839 szind_t index = size2index(usize) - nlclasses - NBINS;
844 arena->stats.allocated_huge -= usize;
850 arena_huge_reset_stats_cancel(arena_t *arena, size_t usize)
1879 size_t usize = isalloc(tsd_tsdn(tsd), ptr, local
1941 size_t usize; local
2569 size_t usize; local
2622 size_t usize; local
3017 size_t usize = arena_mapbits_large_size_get(chunk, pageind) - local
3115 size_t usize, splitsize, size, flag_dirty, flag_unzeroed_mask; local
3317 size_t usize; local
    [all...]
  /external/tpm2/
TpmFail.c 116 UINT32 usize; local
119 || UINT32_Unmarshal(&usize, buffer, size) != TPM_RC_SUCCESS
123 header->size = usize;
  /external/libcups/cups/
pwg-media.c 272 char usize[12 + 1 + 12 + 3], /* Unit size: NNNNNNNNNNNNxNNNNNNNNNNNNuu */ local
314 name = usize;
358 uptr = usize;
359 (*format)(uptr, sizeof(usize) - (size_t)(uptr - usize), width);
362 (*format)(uptr, sizeof(usize) - (size_t)(uptr - usize), length);
366 * Safe because usize can hold up to 12 + 1 + 12 + 4 bytes.
375 snprintf(keyword, keysize, "%s_%s_%s", prefix, name, usize);
    [all...]
  /external/v8/src/runtime/
runtime-atomics.cc 580 uint32_t usize = NumberToUint32(*size); local
581 return isolate->heap()->ToBoolean(AtomicIsLockFree(usize));
  /device/linaro/bootloader/edk2/AppPkg/Applications/Python/Python-2.7.10/Objects/
unicodeobject.c 3928 int usize = 0; local
    [all...]
  /device/linaro/bootloader/edk2/AppPkg/Applications/Python/Python-2.7.2/Objects/
unicodeobject.c 953 Py_ssize_t usize; local
958 usize = PyUnicode_GET_SIZE(*callresult);
959 for (upos = 0; upos<usize;)
3871 int usize = 0; local
    [all...]
  /external/pcre/dist2/src/
pcre2test.c 4791 size_t bsize, usize; local
    [all...]
  /external/elfutils/src/
readelf.c 4354 uint8_t usize = *(uint8_t *) data++; local
    [all...]

Completed in 3821 milliseconds