Home | History | Annotate | Download | only in stdlib

Lines Matching refs:nb

3165 static void* mmap_alloc(mstate m, size_t nb) {
3166 size_t mmsize = granularity_align(nb + SIX_SIZE_T_SIZES + CHUNK_ALIGN_MASK);
3167 if (mmsize > nb) { /* Check for wrap around 0 */
3192 static mchunkptr mmap_resize(mstate m, mchunkptr oldp, size_t nb) {
3194 if (is_small(nb)) /* Can't shrink mmap regions below small size */
3197 if (oldsize >= nb + SIZE_T_SIZE &&
3198 (oldsize - nb) <= (mparams.granularity << 1))
3203 size_t newmmsize = granularity_align(nb + SIX_SIZE_T_SIZES +
3274 size_t nb) {
3278 mchunkptr q = chunk_plus_offset(p, nb);
3279 size_t qsize = psize - nb;
3280 set_size_and_pinuse_of_inuse_chunk(m, p, nb);
3310 check_malloced_chunk(m, chunk2mem(p), nb);
3371 static void* sys_alloc(mstate m, size_t nb) {
3379 if (use_mmap(m) && nb >= mparams.mmap_threshold) {
3380 void* mem = mmap_alloc(m, nb);
3411 asize = granularity_align(nb + TOP_FOOT_SIZE + MALLOC_ALIGNMENT + SIZE_T_ONE);
3425 asize = granularity_align(nb - m->topsize + TOP_FOOT_SIZE + MALLOC_ALIGNMENT + SIZE_T_ONE);
3437 asize < nb + TOP_FOOT_SIZE + SIZE_T_ONE) {
3438 size_t esize = granularity_align(nb + TOP_FOOT_SIZE + MALLOC_ALIGNMENT + SIZE_T_ONE - asize);
3462 size_t req = nb + TOP_FOOT_SIZE + MALLOC_ALIGNMENT + SIZE_T_ONE;
3464 if (rsize > nb) { /* Fail if wraps around zero */
3475 size_t asize = granularity_align(nb + TOP_FOOT_SIZE + MALLOC_ALIGNMENT + SIZE_T_ONE);
3485 if (ssize > nb + TOP_FOOT_SIZE) {
3537 return prepend_alloc(m, tbase, oldbase, nb);
3544 if (nb < m->topsize) { /* Allocate from new or extended top space */
3545 size_t rsize = m->topsize -= nb;
3547 mchunkptr r = m->top = chunk_plus_offset(p, nb);
3549 set_size_and_pinuse_of_inuse_chunk(m, p, nb);
3551 check_malloced_chunk(m, chunk2mem(p), nb);
3669 static void* tmalloc_large(mstate m, size_t nb) {
3671 size_t rsize = -nb; /* Unsigned negation */
3674 compute_tree_index(nb, idx);
3677 /* Traverse tree for this bin looking for node with size == nb */
3678 size_t sizebits = nb << leftshift_for_tree_index(idx);
3682 size_t trem = chunksize(t) - nb;
3693 t = rst; /* set t to least subtree holding sizes > nb */
3711 size_t trem = chunksize(t) - nb;
3720 if (v != 0 && rsize < (size_t)(m->dvsize - nb)) {
3722 mchunkptr r = chunk_plus_offset(v, nb);
3723 assert(chunksize(v) == rsize + nb);
3727 set_inuse_and_pinuse(m, v, (rsize + nb));
3729 set_size_and_pinuse_of_inuse_chunk(m, v, nb);
3742 static void* tmalloc_small(mstate m, size_t nb) {
3750 rsize = chunksize(t) - nb;
3753 size_t trem = chunksize(t) - nb;
3761 mchunkptr r = chunk_plus_offset(v, nb);
3762 assert(chunksize(v) == rsize + nb);
3766 set_inuse_and_pinuse(m, v, (rsize + nb));
3768 set_size_and_pinuse_of_inuse_chunk(m, v, nb);
3798 size_t nb = request2size(bytes);
3800 newp = mmap_resize(m, oldp, nb);
3801 else if (oldsize >= nb) { /* already big enough */
3802 size_t rsize = oldsize - nb;
3805 mchunkptr remainder = chunk_plus_offset(newp, nb);
3806 set_inuse(m, newp, nb);
3811 else if (next == m->top && oldsize + m->topsize > nb) {
3814 size_t newtopsize = newsize - nb;
3815 mchunkptr newtop = chunk_plus_offset(oldp, nb);
3816 set_inuse(m, oldp, nb);
3870 size_t nb = request2size(bytes);
3871 size_t req = nb + alignment + MIN_CHUNK_SIZE - CHUNK_OVERHEAD;
3913 if (size > nb + MIN_CHUNK_SIZE) {
3914 size_t remainder_size = size - nb;
3915 mchunkptr remainder = chunk_plus_offset(p, nb);
3916 set_inuse(m, p, nb);
3922 assert (chunksize(p) >= nb);
4097 size_t nb;
4101 nb = (bytes < MIN_REQUEST)? MIN_CHUNK_SIZE : pad_request(bytes);
4102 idx = small_index(nb);
4114 check_malloced_chunk(gm, mem, nb);
4118 else if (nb > gm->dvsize) {
4130 rsize = small_index2size(i) - nb;
4135 set_size_and_pinuse_of_inuse_chunk(gm, p, nb);
4136 r = chunk_plus_offset(p, nb);
4141 check_malloced_chunk(gm, mem, nb);
4145 else if (gm->treemap != 0 && (mem = tmalloc_small(gm, nb)) != 0) {
4146 check_malloced_chunk(gm, mem, nb);
4152 nb = MAX_SIZE_T; /* Too big to allocate. Force failure (in sys alloc) */
4154 nb = pad_request(bytes);
4155 if (gm->treemap != 0 && (mem = tmalloc_large(gm, nb)) != 0) {
4156 check_malloced_chunk(gm, mem, nb);
4161 if (nb <= gm->dvsize) {
4162 size_t rsize = gm->dvsize - nb;
4165 mchunkptr r = gm->dv = chunk_plus_offset(p, nb);
4168 set_size_and_pinuse_of_inuse_chunk(gm, p, nb);
4177 check_malloced_chunk(gm, mem, nb);
4181 else if (nb < gm->topsize) { /* Split top */
4182 size_t rsize = gm->topsize -= nb;
4184 mchunkptr r = gm->top = chunk_plus_offset(p, nb);
4186 set_size_and_pinuse_of_inuse_chunk(gm, p, nb);
4189 check_malloced_chunk(gm, mem, nb);
4193 mem = sys_alloc(gm, nb);
4504 size_t nb;
4508 nb = (bytes < MIN_REQUEST)? MIN_CHUNK_SIZE : pad_request(bytes);
4509 idx = small_index(nb);
4521 check_malloced_chunk(ms, mem, nb);
4525 else if (nb > ms->dvsize) {
4537 rsize = small_index2size(i) - nb;
4542 set_size_and_pinuse_of_inuse_chunk(ms, p, nb);
4543 r = chunk_plus_offset(p, nb);
4548 check_malloced_chunk(ms, mem, nb);
4552 else if (ms->treemap != 0 && (mem = tmalloc_small(ms, nb)) != 0) {
4553 check_malloced_chunk(ms, mem, nb);
4559 nb = MAX_SIZE_T; /* Too big to allocate. Force failure (in sys alloc) */
4561 nb = pad_request(bytes);
4562 if (ms->treemap != 0 && (mem = tmalloc_large(ms, nb)) != 0) {
4563 check_malloced_chunk(ms, mem, nb);
4568 if (nb <= ms->dvsize) {
4569 size_t rsize = ms->dvsize - nb;
4572 mchunkptr r = ms->dv = chunk_plus_offset(p, nb);
4575 set_size_and_pinuse_of_inuse_chunk(ms, p, nb);
4584 check_malloced_chunk(ms, mem, nb);
4588 else if (nb < ms->topsize) { /* Split top */
4589 size_t rsize = ms->topsize -= nb;
4591 mchunkptr r = ms->top = chunk_plus_offset(p, nb);
4593 set_size_and_pinuse_of_inuse_chunk(ms, p, nb);
4596 check_malloced_chunk(ms, mem, nb);
4600 mem = sys_alloc(ms, nb);