Lines Matching refs:nb
3840 static void* mmap_alloc(mstate m, size_t nb) {
3841 size_t mmsize = mmap_align(nb + SIX_SIZE_T_SIZES + CHUNK_ALIGN_MASK);
3847 if (mmsize > nb) { /* Check for wrap around 0 */
3872 static mchunkptr mmap_resize(mstate m, mchunkptr oldp, size_t nb, int flags) {
3875 if (is_small(nb)) /* Can't shrink mmap regions below small size */
3878 if (oldsize >= nb + SIZE_T_SIZE &&
3879 (oldsize - nb) <= (mparams.granularity << 1))
3884 size_t newmmsize = mmap_align(nb + SIX_SIZE_T_SIZES + CHUNK_ALIGN_MASK);
3955 size_t nb) {
3959 mchunkptr q = chunk_plus_offset(p, nb);
3960 size_t qsize = psize - nb;
3961 set_size_and_pinuse_of_inuse_chunk(m, p, nb);
3991 check_malloced_chunk(m, chunk2mem(p), nb);
4051 static void* sys_alloc(mstate m, size_t nb) {
4060 if (use_mmap(m) && nb >= mparams.mmap_threshold && m->topsize != 0) {
4061 void* mem = mmap_alloc(m, nb);
4066 asize = granularity_align(nb + SYS_ALLOC_PADDING);
4067 if (asize <= nb) {
4100 we can malloc nb bytes upon success, so pad with enough space for
4119 if (ssize > nb && ssize < HALF_MAX_SIZE_T &&
4130 ssize = granularity_align(nb - m->topsize + SYS_ALLOC_PADDING);
4142 ssize < nb + SYS_ALLOC_PADDING) {
4143 size_t esize = granularity_align(nb + SYS_ALLOC_PADDING - ssize);
4185 if (ssize > nb + TOP_FOOT_SIZE) {
4244 return prepend_alloc(m, tbase, oldbase, nb);
4251 if (nb < m->topsize) { /* Allocate from new or extended top space */
4252 size_t rsize = m->topsize -= nb;
4254 mchunkptr r = m->top = chunk_plus_offset(p, nb);
4256 set_size_and_pinuse_of_inuse_chunk(m, p, nb);
4258 check_malloced_chunk(m, chunk2mem(p), nb);
4456 static void* tmalloc_large(mstate m, size_t nb) {
4458 size_t rsize = -nb; /* Unsigned negation */
4461 compute_tree_index(nb, idx);
4463 /* Traverse tree for this bin looking for node with size == nb */
4464 size_t sizebits = nb << leftshift_for_tree_index(idx);
4468 size_t trem = chunksize(t) - nb;
4479 t = rst; /* set t to least subtree holding sizes > nb */
4496 size_t trem = chunksize(t) - nb;
4505 if (v != 0 && rsize < (size_t)(m->dvsize - nb)) {
4507 mchunkptr r = chunk_plus_offset(v, nb);
4508 assert(chunksize(v) == rsize + nb);
4512 set_inuse_and_pinuse(m, v, (rsize + nb));
4514 set_size_and_pinuse_of_inuse_chunk(m, v, nb);
4527 static void* tmalloc_small(mstate m, size_t nb) {
4534 rsize = chunksize(t) - nb;
4537 size_t trem = chunksize(t) - nb;
4545 mchunkptr r = chunk_plus_offset(v, nb);
4546 assert(chunksize(v) == rsize + nb);
4550 set_inuse_and_pinuse(m, v, (rsize + nb));
4552 set_size_and_pinuse_of_inuse_chunk(m, v, nb);
4596 size_t nb;
4600 nb = (bytes < MIN_REQUEST)? MIN_CHUNK_SIZE : pad_request(bytes);
4601 idx = small_index(nb);
4613 check_malloced_chunk(gm, mem, nb);
4617 else if (nb > gm->dvsize) {
4629 rsize = small_index2size(i) - nb;
4634 set_size_and_pinuse_of_inuse_chunk(gm, p, nb);
4635 r = chunk_plus_offset(p, nb);
4640 check_malloced_chunk(gm, mem, nb);
4644 else if (gm->treemap != 0 && (mem = tmalloc_small(gm, nb)) != 0) {
4645 check_malloced_chunk(gm, mem, nb);
4651 nb = MAX_SIZE_T; /* Too big to allocate. Force failure (in sys alloc) */
4653 nb = pad_request(bytes);
4654 if (gm->treemap != 0 && (mem = tmalloc_large(gm, nb)) != 0) {
4655 check_malloced_chunk(gm, mem, nb);
4660 if (nb <= gm->dvsize) {
4661 size_t rsize = gm->dvsize - nb;
4664 mchunkptr r = gm->dv = chunk_plus_offset(p, nb);
4667 set_size_and_pinuse_of_inuse_chunk(gm, p, nb);
4676 check_malloced_chunk(gm, mem, nb);
4680 else if (nb < gm->topsize) { /* Split top */
4681 size_t rsize = gm->topsize -= nb;
4683 mchunkptr r = gm->top = chunk_plus_offset(p, nb);
4685 set_size_and_pinuse_of_inuse_chunk(gm, p, nb);
4688 check_malloced_chunk(gm, mem, nb);
4692 mem = sys_alloc(gm, nb);
4833 static mchunkptr try_realloc_chunk(mstate m, mchunkptr p, size_t nb,
4841 newp = mmap_resize(m, p, nb, can_move);
4843 else if (oldsize >= nb) { /* already big enough */
4844 size_t rsize = oldsize - nb;
4846 mchunkptr r = chunk_plus_offset(p, nb);
4847 set_inuse(m, p, nb);
4854 if (oldsize + m->topsize > nb) {
4856 size_t newtopsize = newsize - nb;
4857 mchunkptr newtop = chunk_plus_offset(p, nb);
4858 set_inuse(m, p, nb);
4867 if (oldsize + dvs >= nb) {
4868 size_t dsize = oldsize + dvs - nb;
4870 mchunkptr r = chunk_plus_offset(p, nb);
4872 set_inuse(m, p, nb);
4889 if (oldsize + nextsize >= nb) {
4890 size_t rsize = oldsize + nextsize - nb;
4897 mchunkptr r = chunk_plus_offset(p, nb);
4898 set_inuse(m, p, nb);
4927 size_t nb = request2size(bytes);
4928 size_t req = nb + alignment + MIN_CHUNK_SIZE - CHUNK_OVERHEAD;
4967 if (size > nb + MIN_CHUNK_SIZE) {
4968 size_t remainder_size = size - nb;
4969 mchunkptr remainder = chunk_plus_offset(p, nb);
4970 set_inuse(m, p, nb);
4977 assert (chunksize(p) >= nb);
5221 size_t nb = request2size(bytes);
5233 mchunkptr newp = try_realloc_chunk(m, oldp, nb, 1);
5259 size_t nb = request2size(bytes);
5271 mchunkptr newp = try_realloc_chunk(m, oldp, nb, 0);
5532 size_t nb;
5536 nb = (bytes < MIN_REQUEST)? MIN_CHUNK_SIZE : pad_request(bytes);
5537 idx = small_index(nb);
5549 nb);
5553 else if (nb > ms->dvsize) {
5565 rsize = small_index2size(i) - nb;
5570 set_size_and_pinuse_of_inuse_chunk(ms, p, nb);
5571 r = chunk_plus_offset(p, nb);
5576 check_malloced_chunk(ms, mem, nb);
5580 else if (ms->treemap != 0 && (mem = tmalloc_small(ms, nb)) != 0) {
5581 check_malloced_chunk(ms, mem, nb);
5587 nb = MAX_SIZE_T; /* Too big to allocate. Force failure (in sys alloc) */
5589 nb = pad_request(bytes);
5590 if (ms->treemap != 0 && (mem = tmalloc_large(ms, nb)) != 0) {
5591 check_malloced_chunk(ms, mem, nb);
5596 if (nb <= ms->dvsize) {
5597 size_t rsize = ms->dvsize - nb;
5600 mchunkptr r = ms->dv = chunk_plus_offset(p, nb);
5603 set_size_and_pinuse_of_inuse_chunk(ms, p, nb);
5612 check_malloced_chunk(ms, mem, nb);
5616 else if (nb < ms->topsize) { /* Split top */
5617 size_t rsize = ms->topsize -= nb;
5619 mchunkptr r = ms->top = chunk_plus_offset(p, nb);
5621 set_size_and_pinuse_of_inuse_chunk(ms, p, nb);
5624 check_malloced_chunk(ms, mem, nb);
5628 mem = sys_alloc(ms, nb);
5773 size_t nb = request2size(bytes);
5785 mchunkptr newp = try_realloc_chunk(m, oldp, nb, 1);
5811 size_t nb = request2size(bytes);
5824 mchunkptr newp = try_realloc_chunk(m, oldp, nb, 0);