HomeSort by relevance Sort by last modified time
    Searched refs:usize (Results 1 - 25 of 33) sorted by null

1 2

  /external/jemalloc/src/
valgrind.c 9 valgrind_make_mem_noaccess(void *ptr, size_t usize)
12 VALGRIND_MAKE_MEM_NOACCESS(ptr, usize);
16 valgrind_make_mem_undefined(void *ptr, size_t usize)
19 VALGRIND_MAKE_MEM_UNDEFINED(ptr, usize);
23 valgrind_make_mem_defined(void *ptr, size_t usize)
26 VALGRIND_MAKE_MEM_DEFINED(ptr, usize);
30 valgrind_freelike_block(void *ptr, size_t usize)
33 VALGRIND_FREELIKE_BLOCK(ptr, usize);
huge.c 43 huge_malloc(tsdn_t *tsdn, arena_t *arena, size_t usize, bool zero)
46 assert(usize == s2u(usize));
48 return (huge_palloc(tsdn, arena, usize, chunksize, zero));
52 huge_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment,
66 ausize = sa2u(usize, alignment);
99 arena, usize, alignment, &sn, &is_zeroed)) == NULL) {
104 extent_node_init(node, arena, ret, usize, sn, is_zeroed, true);
107 arena_chunk_dalloc_huge(tsdn, arena, ret, usize, sn);
120 memset(ret, 0, usize);
155 size_t usize, usize_next; local
    [all...]
jemalloc.c 1571 prof_malloc(tsd_tsdn(tsd), p, usize, tctx); local
1697 prof_malloc(tsd_tsdn(tsd), p, usize, tctx); local
1708 size_t usize; local
1885 size_t usize; local
2215 prof_malloc(tsd_tsdn(tsd), p, *usize, tctx); local
2287 size_t usize; local
2376 size_t usize; local
2452 size_t usize; local
2465 size_t usize; local
2479 size_t usize_max, usize; local
2529 size_t usize, old_usize; local
2589 size_t usize; local
2637 size_t usize; local
2654 size_t usize; local
2683 size_t usize; local
    [all...]
quarantine.c 100 assert(obj->usize == isalloc(tsdn, obj->ptr, config_prof));
102 quarantine->curbytes -= obj->usize;
120 size_t usize = isalloc(tsd_tsdn(tsd), ptr, config_prof); local
133 if (quarantine->curbytes + usize > opt_quarantine) {
134 size_t upper_bound = (opt_quarantine >= usize) ? opt_quarantine
135 - usize : 0;
144 if (quarantine->curbytes + usize <= opt_quarantine) {
149 obj->usize = usize;
150 quarantine->curbytes += usize;
    [all...]
arena.c 811 arena_huge_malloc_stats_update(arena_t *arena, size_t usize)
813 szind_t index = size2index(usize) - nlclasses - NBINS;
818 arena->stats.allocated_huge += usize;
824 arena_huge_malloc_stats_update_undo(arena_t *arena, size_t usize)
826 szind_t index = size2index(usize) - nlclasses - NBINS;
831 arena->stats.allocated_huge -= usize;
837 arena_huge_dalloc_stats_update(arena_t *arena, size_t usize)
839 szind_t index = size2index(usize) - nlclasses - NBINS;
844 arena->stats.allocated_huge -= usize;
850 arena_huge_reset_stats_cancel(arena_t *arena, size_t usize)
1879 size_t usize = isalloc(tsd_tsdn(tsd), ptr, local
1941 size_t usize; local
2569 size_t usize; local
2622 size_t usize; local
3017 size_t usize = arena_mapbits_large_size_get(chunk, pageind) - local
3115 size_t usize, splitsize, size, flag_dirty, flag_unzeroed_mask; local
3317 size_t usize; local
    [all...]
ckh.c 266 size_t usize; local
269 usize = sa2u(sizeof(ckhc_t) << lg_curcells, CACHELINE);
270 if (unlikely(usize == 0 || usize > HUGE_MAXCLASS)) {
274 tab = (ckhc_t *)ipallocztm(tsd_tsdn(tsd), usize, CACHELINE,
306 size_t usize; local
315 usize = sa2u(sizeof(ckhc_t) << lg_curcells, CACHELINE);
316 if (unlikely(usize == 0 || usize > HUGE_MAXCLASS))
318 tab = (ckhc_t *)ipallocztm(tsd_tsdn(tsd), usize, CACHELINE, true, NULL
355 size_t mincells, usize; local
    [all...]
base.c 93 size_t csize, usize; local
103 usize = s2u(csize);
104 extent_node_init(&key, NULL, NULL, usize, 0, false, false);
prof.c 226 prof_malloc_sample_object(tsdn_t *tsdn, const void *ptr, size_t usize,
230 prof_tctx_set(tsdn, ptr, usize, tctx);
234 tctx->cnts.curbytes += usize;
237 tctx->cnts.accumbytes += usize;
244 prof_free_sampled_object(tsd_t *tsd, size_t usize, prof_tctx_t *tctx)
249 assert(tctx->cnts.curbytes >= usize);
251 tctx->cnts.curbytes -= usize;
    [all...]
  /external/jemalloc/include/jemalloc/internal/
valgrind.h 10 * jemalloc, so it is critical that all callers of these macros provide usize
16 #define JEMALLOC_VALGRIND_MAKE_MEM_NOACCESS(ptr, usize) do { \
18 valgrind_make_mem_noaccess(ptr, usize); \
20 #define JEMALLOC_VALGRIND_MAKE_MEM_UNDEFINED(ptr, usize) do { \
22 valgrind_make_mem_undefined(ptr, usize); \
24 #define JEMALLOC_VALGRIND_MAKE_MEM_DEFINED(ptr, usize) do { \
26 valgrind_make_mem_defined(ptr, usize); \
33 #define JEMALLOC_VALGRIND_MALLOC(cond, tsdn, ptr, usize, zero) do { \
35 VALGRIND_MALLOCLIKE_BLOCK(ptr, usize, p2rz(tsdn, ptr), \
51 #define JEMALLOC_VALGRIND_REALLOC(moved, tsdn, ptr, usize, ptr_null,
    [all...]
huge.h 12 void *huge_malloc(tsdn_t *tsdn, arena_t *arena, size_t usize, bool zero);
13 void *huge_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize,
18 size_t usize, size_t alignment, bool zero, tcache_t *tcache);
prof.h 284 void prof_malloc_sample_object(tsdn_t *tsdn, const void *ptr, size_t usize,
286 void prof_free_sampled_object(tsd_t *tsd, size_t usize, prof_tctx_t *tctx);
334 void prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize,
336 void prof_tctx_reset(tsdn_t *tsdn, const void *ptr, size_t usize,
338 bool prof_sample_accum_update(tsd_t *tsd, size_t usize, bool commit,
340 prof_tctx_t *prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active,
342 void prof_malloc(tsdn_t *tsdn, const void *ptr, size_t usize,
344 void prof_realloc(tsd_t *tsd, const void *ptr, size_t usize,
347 void prof_free(tsd_t *tsd, const void *ptr, size_t usize);
411 prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize, prof_tctx_t *tctx
525 prof_malloc_sample_object(tsd_tsdn(tsd), ptr, usize, tctx); local
527 prof_tctx_reset(tsd_tsdn(tsd), ptr, usize, old_ptr, old_tctx); local
    [all...]
jemalloc_internal.h 659 size_t usize = (psz + delta_mask) & ~delta_mask; local
660 return (usize);
739 size_t usize = grp_size + mod_size; local
740 return (usize);
780 size_t usize = (size + delta_mask) & ~delta_mask; local
781 return (usize);
815 size_t usize; local
835 usize = s2u(ALIGNMENT_CEILING(size, alignment));
836 if (usize < LARGE_MINCLASS)
837 return (usize);
1146 size_t usize = isalloc(tsdn, ptr, false); local
1207 size_t usize, copysize; local
    [all...]
quarantine.h 16 size_t usize; member in struct:quarantine_obj_s
tcache.h 301 size_t usize JEMALLOC_CC_SILENCE_INIT(0);
321 * Only compute usize if required. The checks in the following if
325 usize = index2size(binind);
326 assert(tcache_salloc(tsd_tsdn(tsd), ret) == usize);
335 memset(ret, 0, usize);
342 memset(ret, 0, usize);
348 tcache->prof_accumbytes += usize;
378 size_t usize JEMALLOC_CC_SILENCE_INIT(0);
380 /* Only compute usize on demand */
383 usize = index2size(binind)
    [all...]
arena.h 543 void *arena_chunk_alloc_huge(tsdn_t *tsdn, arena_t *arena, size_t usize,
546 size_t usize, size_t sn);
548 void *chunk, size_t oldsize, size_t usize);
550 void *chunk, size_t oldsize, size_t usize, size_t sn);
552 void *chunk, size_t oldsize, size_t usize, bool *zero);
574 void arena_quarantine_junk_small(void *ptr, size_t usize);
579 void *arena_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize,
592 void arena_dalloc_junk_large(void *ptr, size_t usize);
694 void arena_prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize,
696 void arena_prof_tctx_reset(tsdn_t *tsdn, const void *ptr, size_t usize,
    [all...]
  /external/jemalloc/test/unit/
junk.c 41 arena_dalloc_junk_large_intercept(void *ptr, size_t usize)
45 arena_dalloc_junk_large_orig(ptr, usize);
46 for (i = 0; i < usize; i++) {
49 i, usize);
56 huge_dalloc_junk_intercept(void *ptr, size_t usize)
59 huge_dalloc_junk_orig(ptr, usize);
171 arena_ralloc_junk_large_intercept(void *ptr, size_t old_usize, size_t usize)
174 arena_ralloc_junk_large_orig(ptr, old_usize, usize);
176 assert_zu_eq(usize, shrink_size(large_maxclass), "Unexpected usize");
    [all...]
quarantine.c 62 arena_redzone_corruption_replacement(void *ptr, size_t usize, bool after,
  /external/strace/
sched.c 100 unsigned int usize)
105 if (usize) {
107 size = usize <= sizeof(attr) ? usize : (unsigned) sizeof(attr);
116 usize = attr.size;
117 if (!usize)
118 usize = SCHED_ATTR_MIN_SIZE;
119 size = usize <= sizeof(attr) ? usize : (unsigned) sizeof(attr);
143 if (usize > size
    [all...]
  /external/jemalloc/test/integration/
allocated.c 18 size_t sz, usize; local
70 usize = malloc_usable_size(p);
71 assert_u64_le(a0 + usize, a1,
87 assert_u64_le(d0 + usize, d1,
  /external/libcups/cups/
pwg-media.c 273 char usize[12 + 1 + 12 + 3], /* Unit size: NNNNNNNNNNNNxNNNNNNNNNNNNuu */ local
315 name = usize;
359 uptr = usize;
360 (*format)(uptr, sizeof(usize) - (size_t)(uptr - usize), width);
363 (*format)(uptr, sizeof(usize) - (size_t)(uptr - usize), length);
367 * Safe because usize can hold up to 12 + 1 + 12 + 4 bytes.
376 snprintf(keyword, keysize, "%s_%s_%s", prefix, name, usize);
    [all...]
  /external/tpm2/
TpmFail.c 116 UINT32 usize; local
119 || UINT32_Unmarshal(&usize, buffer, size) != TPM_RC_SUCCESS
123 header->size = usize;
  /external/v8/src/runtime/
runtime-atomics.cc 580 uint32_t usize = NumberToUint32(*size); local
581 return isolate->heap()->ToBoolean(AtomicIsLockFree(usize));
  /external/python/cpython3/Python/
fileutils.c 1085 int usize; local
1101 usize = MultiByteToWideChar(CP_ACP, 0, mode, -1, wmode, sizeof(wmode));
1102 if (usize == 0) {
    [all...]
  /device/linaro/bootloader/edk2/AppPkg/Applications/Python/Python-2.7.2/Lib/test/
test_sys.py 746 usize = len(u'\0'.encode('unicode-internal'))
751 check(s, size(h + 'PPlP') + usize * (len(s) + 1))
  /external/python/cpython2/Lib/test/
test_sys.py 755 usize = len(u'\0'.encode('unicode-internal'))
760 check(s, size('PPlP') + usize * (len(s) + 1))

Completed in 516 milliseconds

1 2