Lines Matching refs:nb
3667 static void* mmap_alloc(mstate m, size_t nb) {
3668 size_t mmsize = mmap_align(nb + SIX_SIZE_T_SIZES + CHUNK_ALIGN_MASK);
3669 if (mmsize > nb) { /* Check for wrap around 0 */
3694 static mchunkptr mmap_resize(mstate m, mchunkptr oldp, size_t nb) {
3696 if (is_small(nb)) /* Can't shrink mmap regions below small size */
3699 if (oldsize >= nb + SIZE_T_SIZE &&
3700 (oldsize - nb) <= (mparams.granularity << 1))
3705 size_t newmmsize = mmap_align(nb + SIX_SIZE_T_SIZES + CHUNK_ALIGN_MASK);
3775 size_t nb) {
3779 mchunkptr q = chunk_plus_offset(p, nb);
3780 size_t qsize = psize - nb;
3781 set_size_and_pinuse_of_inuse_chunk(m, p, nb);
3811 check_malloced_chunk(m, chunk2mem(p), nb);
3871 static void* sys_alloc(mstate m, size_t nb) {
3879 if (use_mmap(m) && nb >= mparams.mmap_threshold && m->topsize != 0) {
3880 void* mem = mmap_alloc(m, nb);
3902 we can malloc nb bytes upon success, so pad with enough space for
3916 asize = granularity_align(nb + SYS_ALLOC_PADDING);
3930 asize = granularity_align(nb - m->topsize + SYS_ALLOC_PADDING);
3942 asize < nb + SYS_ALLOC_PADDING) {
3943 size_t esize = granularity_align(nb + SYS_ALLOC_PADDING - asize);
3967 size_t rsize = granularity_align(nb + SYS_ALLOC_PADDING);
3968 if (rsize > nb) { /* Fail if wraps around zero */
3979 size_t asize = granularity_align(nb + SYS_ALLOC_PADDING);
3989 if (ssize > nb + TOP_FOOT_SIZE) {
4048 return prepend_alloc(m, tbase, oldbase, nb);
4055 if (nb < m->topsize) { /* Allocate from new or extended top space */
4056 size_t rsize = m->topsize -= nb;
4058 mchunkptr r = m->top = chunk_plus_offset(p, nb);
4060 set_size_and_pinuse_of_inuse_chunk(m, p, nb);
4062 check_malloced_chunk(m, chunk2mem(p), nb);
4189 static void* tmalloc_large(mstate m, size_t nb) {
4191 size_t rsize = -nb; /* Unsigned negation */
4194 compute_tree_index(nb, idx);
4196 /* Traverse tree for this bin looking for node with size == nb */
4197 size_t sizebits = nb << leftshift_for_tree_index(idx);
4201 size_t trem = chunksize(t) - nb;
4212 t = rst; /* set t to least subtree holding sizes > nb */
4229 size_t trem = chunksize(t) - nb;
4238 if (v != 0 && rsize < (size_t)(m->dvsize - nb)) {
4240 mchunkptr r = chunk_plus_offset(v, nb);
4241 assert(chunksize(v) == rsize + nb);
4245 set_inuse_and_pinuse(m, v, (rsize + nb));
4247 set_size_and_pinuse_of_inuse_chunk(m, v, nb);
4260 static void* tmalloc_small(mstate m, size_t nb) {
4267 rsize = chunksize(t) - nb;
4270 size_t trem = chunksize(t) - nb;
4278 mchunkptr r = chunk_plus_offset(v, nb);
4279 assert(chunksize(v) == rsize + nb);
4283 set_inuse_and_pinuse(m, v, (rsize + nb));
4285 set_size_and_pinuse_of_inuse_chunk(m, v, nb);
4315 size_t nb = request2size(bytes);
4317 newp = mmap_resize(m, oldp, nb);
4318 else if (oldsize >= nb) { /* already big enough */
4319 size_t rsize = oldsize - nb;
4322 mchunkptr remainder = chunk_plus_offset(newp, nb);
4323 set_inuse(m, newp, nb);
4328 else if (next == m->top && oldsize + m->topsize > nb) {
4331 size_t newtopsize = newsize - nb;
4332 mchunkptr newtop = chunk_plus_offset(oldp, nb);
4333 set_inuse(m, oldp, nb);
4391 size_t nb = request2size(bytes);
4392 size_t req = nb + alignment + MIN_CHUNK_SIZE - CHUNK_OVERHEAD;
4434 if (size > nb + MIN_CHUNK_SIZE) {
4435 size_t remainder_size = size - nb;
4436 nb);
4437 set_inuse(m, p, nb);
4443 assert (chunksize(p) >= nb);
4623 size_t nb;
4627 nb = (bytes < MIN_REQUEST)? MIN_CHUNK_SIZE : pad_request(bytes);
4628 idx = small_index(nb);
4640 check_malloced_chunk(gm, mem, nb);
4644 else if (nb > gm->dvsize) {
4656 rsize = small_index2size(i) - nb;
4661 set_size_and_pinuse_of_inuse_chunk(gm, p, nb);
4662 r = chunk_plus_offset(p, nb);
4667 check_malloced_chunk(gm, mem, nb);
4671 else if (gm->treemap != 0 && (mem = tmalloc_small(gm, nb)) != 0) {
4672 check_malloced_chunk(gm, mem, nb);
4678 nb = MAX_SIZE_T; /* Too big to allocate. Force failure (in sys alloc) */
4680 nb = pad_request(bytes);
4681 if (gm->treemap != 0 && (mem = tmalloc_large(gm, nb)) != 0) {
4682 check_malloced_chunk(gm, mem, nb);
4687 if (nb <= gm->dvsize) {
4688 size_t rsize = gm->dvsize - nb;
4691 mchunkptr r = gm->dv = chunk_plus_offset(p, nb);
4694 set_size_and_pinuse_of_inuse_chunk(gm, p, nb);
4703 check_malloced_chunk(gm, mem, nb);
4707 else if (nb < gm->topsize) { /* Split top */
4708 size_t rsize = gm->topsize -= nb;
4710 mchunkptr r = gm->top = chunk_plus_offset(p, nb);
4712 set_size_and_pinuse_of_inuse_chunk(gm, p, nb);
4715 check_malloced_chunk(gm, mem, nb);
4719 mem = sys_alloc(gm, nb);
5058 size_t nb;
5062 nb = (bytes < MIN_REQUEST)? MIN_CHUNK_SIZE : pad_request(bytes);
5063 idx = small_index(nb);
5075 check_malloced_chunk(ms, mem, nb);
5079 else if (nb > ms->dvsize) {
5091 rsize = small_index2size(i) - nb;
5096 set_size_and_pinuse_of_inuse_chunk(ms, p, nb);
5097 r = chunk_plus_offset(p, nb);
5102 check_malloced_chunk(ms, mem, nb);
5106 else if (ms->treemap != 0 && (mem = tmalloc_small(ms, nb)) != 0) {
5107 check_malloced_chunk(ms, mem, nb);
5113 nb = MAX_SIZE_T; /* Too big to allocate. Force failure (in sys alloc) */
5115 nb = pad_request(bytes);
5116 if (ms->treemap != 0 && (mem = tmalloc_large(ms, nb)) != 0) {
5117 check_malloced_chunk(ms, mem, nb);
5122 if (nb <= ms->dvsize) {
5123 size_t rsize = ms->dvsize - nb;
5126 mchunkptr r = ms->dv = chunk_plus_offset(p, nb);
5129 set_size_and_pinuse_of_inuse_chunk(ms, p, nb);
5138 check_malloced_chunk(ms, mem, nb);
5142 else if (nb < ms->topsize) { /* Split top */
5143 size_t rsize = ms->topsize -= nb;
5145 mchunkptr r = ms->top = chunk_plus_offset(p, nb);
5147 set_size_and_pinuse_of_inuse_chunk(ms, p, nb);
5150 check_malloced_chunk(ms, mem, nb);
5154 mem = sys_alloc(ms, nb);