Lines Matching refs:nb

3838 static void* mmap_alloc(mstate m, size_t nb) {  in mmap_alloc()  argument
3839 size_t mmsize = mmap_align(nb + SIX_SIZE_T_SIZES + CHUNK_ALIGN_MASK); in mmap_alloc()
3845 if (mmsize > nb) { /* Check for wrap around 0 */ in mmap_alloc()
3870 static mchunkptr mmap_resize(mstate m, mchunkptr oldp, size_t nb, int flags) { in mmap_resize() argument
3873 if (is_small(nb)) /* Can't shrink mmap regions below small size */ in mmap_resize()
3876 if (oldsize >= nb + SIZE_T_SIZE && in mmap_resize()
3877 (oldsize - nb) <= (mparams.granularity << 1)) in mmap_resize()
3882 size_t newmmsize = mmap_align(nb + SIX_SIZE_T_SIZES + CHUNK_ALIGN_MASK); in mmap_resize()
3953 size_t nb) { in prepend_alloc() argument
3957 mchunkptr q = chunk_plus_offset(p, nb); in prepend_alloc()
3958 size_t qsize = psize - nb; in prepend_alloc()
3959 set_size_and_pinuse_of_inuse_chunk(m, p, nb); in prepend_alloc()
3989 check_malloced_chunk(m, chunk2mem(p), nb); in prepend_alloc()
4049 static void* sys_alloc(mstate m, size_t nb) { in sys_alloc() argument
4058 if (use_mmap(m) && nb >= mparams.mmap_threshold && m->topsize != 0) { in sys_alloc()
4059 void* mem = mmap_alloc(m, nb); in sys_alloc()
4064 asize = granularity_align(nb + SYS_ALLOC_PADDING); in sys_alloc()
4065 if (asize <= nb) in sys_alloc()
4109 if (ssize > nb && ssize < HALF_MAX_SIZE_T && in sys_alloc()
4120 ssize = granularity_align(nb - m->topsize + SYS_ALLOC_PADDING); in sys_alloc()
4132 ssize < nb + SYS_ALLOC_PADDING) { in sys_alloc()
4133 size_t esize = granularity_align(nb + SYS_ALLOC_PADDING - ssize); in sys_alloc()
4175 if (ssize > nb + TOP_FOOT_SIZE) { in sys_alloc()
4234 return prepend_alloc(m, tbase, oldbase, nb); in sys_alloc()
4241 if (nb < m->topsize) { /* Allocate from new or extended top space */ in sys_alloc()
4242 size_t rsize = m->topsize -= nb; in sys_alloc()
4244 mchunkptr r = m->top = chunk_plus_offset(p, nb); in sys_alloc()
4246 set_size_and_pinuse_of_inuse_chunk(m, p, nb); in sys_alloc()
4248 check_malloced_chunk(m, chunk2mem(p), nb); in sys_alloc()
4446 static void* tmalloc_large(mstate m, size_t nb) { in tmalloc_large() argument
4448 size_t rsize = -nb; /* Unsigned negation */ in tmalloc_large()
4451 compute_tree_index(nb, idx); in tmalloc_large()
4454 size_t sizebits = nb << leftshift_for_tree_index(idx); in tmalloc_large()
4458 size_t trem = chunksize(t) - nb; in tmalloc_large()
4486 size_t trem = chunksize(t) - nb; in tmalloc_large()
4495 if (v != 0 && rsize < (size_t)(m->dvsize - nb)) { in tmalloc_large()
4497 mchunkptr r = chunk_plus_offset(v, nb); in tmalloc_large()
4498 assert(chunksize(v) == rsize + nb); in tmalloc_large()
4502 set_inuse_and_pinuse(m, v, (rsize + nb)); in tmalloc_large()
4504 set_size_and_pinuse_of_inuse_chunk(m, v, nb); in tmalloc_large()
4517 static void* tmalloc_small(mstate m, size_t nb) { in tmalloc_small() argument
4524 rsize = chunksize(t) - nb; in tmalloc_small()
4527 size_t trem = chunksize(t) - nb; in tmalloc_small()
4535 mchunkptr r = chunk_plus_offset(v, nb); in tmalloc_small()
4536 assert(chunksize(v) == rsize + nb); in tmalloc_small()
4540 set_inuse_and_pinuse(m, v, (rsize + nb)); in tmalloc_small()
4542 set_size_and_pinuse_of_inuse_chunk(m, v, nb); in tmalloc_small()
4586 size_t nb; in dlmalloc() local
4590 nb = (bytes < MIN_REQUEST)? MIN_CHUNK_SIZE : pad_request(bytes); in dlmalloc()
4591 idx = small_index(nb); in dlmalloc()
4603 check_malloced_chunk(gm, mem, nb); in dlmalloc()
4607 else if (nb > gm->dvsize) { in dlmalloc()
4619 rsize = small_index2size(i) - nb; in dlmalloc()
4624 set_size_and_pinuse_of_inuse_chunk(gm, p, nb); in dlmalloc()
4625 r = chunk_plus_offset(p, nb); in dlmalloc()
4630 check_malloced_chunk(gm, mem, nb); in dlmalloc()
4634 else if (gm->treemap != 0 && (mem = tmalloc_small(gm, nb)) != 0) { in dlmalloc()
4635 check_malloced_chunk(gm, mem, nb); in dlmalloc()
4641 nb = MAX_SIZE_T; /* Too big to allocate. Force failure (in sys alloc) */ in dlmalloc()
4643 nb = pad_request(bytes); in dlmalloc()
4644 if (gm->treemap != 0 && (mem = tmalloc_large(gm, nb)) != 0) { in dlmalloc()
4645 check_malloced_chunk(gm, mem, nb); in dlmalloc()
4650 if (nb <= gm->dvsize) { in dlmalloc()
4651 size_t rsize = gm->dvsize - nb; in dlmalloc()
4654 mchunkptr r = gm->dv = chunk_plus_offset(p, nb); in dlmalloc()
4657 set_size_and_pinuse_of_inuse_chunk(gm, p, nb); in dlmalloc()
4666 check_malloced_chunk(gm, mem, nb); in dlmalloc()
4670 else if (nb < gm->topsize) { /* Split top */ in dlmalloc()
4671 size_t rsize = gm->topsize -= nb; in dlmalloc()
4673 mchunkptr r = gm->top = chunk_plus_offset(p, nb); in dlmalloc()
4675 set_size_and_pinuse_of_inuse_chunk(gm, p, nb); in dlmalloc()
4678 check_malloced_chunk(gm, mem, nb); in dlmalloc()
4682 mem = sys_alloc(gm, nb); in dlmalloc()
4831 static mchunkptr try_realloc_chunk(mstate m, mchunkptr p, size_t nb, in try_realloc_chunk() argument
4839 newp = mmap_resize(m, p, nb, can_move); in try_realloc_chunk()
4841 else if (oldsize >= nb) { /* already big enough */ in try_realloc_chunk()
4842 size_t rsize = oldsize - nb; in try_realloc_chunk()
4844 mchunkptr r = chunk_plus_offset(p, nb); in try_realloc_chunk()
4845 set_inuse(m, p, nb); in try_realloc_chunk()
4852 if (oldsize + m->topsize > nb) { in try_realloc_chunk()
4854 size_t newtopsize = newsize - nb; in try_realloc_chunk()
4855 mchunkptr newtop = chunk_plus_offset(p, nb); in try_realloc_chunk()
4856 set_inuse(m, p, nb); in try_realloc_chunk()
4865 if (oldsize + dvs >= nb) { in try_realloc_chunk()
4866 size_t dsize = oldsize + dvs - nb; in try_realloc_chunk()
4868 mchunkptr r = chunk_plus_offset(p, nb); in try_realloc_chunk()
4870 set_inuse(m, p, nb); in try_realloc_chunk()
4887 if (oldsize + nextsize >= nb) { in try_realloc_chunk()
4888 size_t rsize = oldsize + nextsize - nb; in try_realloc_chunk()
4895 mchunkptr r = chunk_plus_offset(p, nb); in try_realloc_chunk()
4896 set_inuse(m, p, nb); in try_realloc_chunk()
4925 size_t nb = request2size(bytes); in internal_memalign() local
4926 size_t req = nb + alignment + MIN_CHUNK_SIZE - CHUNK_OVERHEAD; in internal_memalign()
4969 if (size > nb + MIN_CHUNK_SIZE) { in internal_memalign()
4970 size_t remainder_size = size - nb; in internal_memalign()
4971 mchunkptr remainder = chunk_plus_offset(p, nb); in internal_memalign()
4972 set_inuse(m, p, nb); in internal_memalign()
4979 assert (chunksize(p) >= nb); in internal_memalign()
5226 size_t nb = request2size(bytes); in dlrealloc() local
5242 mchunkptr newp = try_realloc_chunk(m, oldp, nb, 1); in dlrealloc()
5272 size_t nb = request2size(bytes); in dlrealloc_in_place() local
5284 mchunkptr newp = try_realloc_chunk(m, oldp, nb, 0); in dlrealloc_in_place()
5543 size_t nb; in mspace_malloc() local
5547 nb = (bytes < MIN_REQUEST)? MIN_CHUNK_SIZE : pad_request(bytes); in mspace_malloc()
5548 idx = small_index(nb); in mspace_malloc()
5560 check_malloced_chunk(ms, mem, nb); in mspace_malloc()
5564 else if (nb > ms->dvsize) { in mspace_malloc()
5576 rsize = small_index2size(i) - nb; in mspace_malloc()
5581 set_size_and_pinuse_of_inuse_chunk(ms, p, nb); in mspace_malloc()
5582 r = chunk_plus_offset(p, nb); in mspace_malloc()
5587 check_malloced_chunk(ms, mem, nb); in mspace_malloc()
5591 else if (ms->treemap != 0 && (mem = tmalloc_small(ms, nb)) != 0) { in mspace_malloc()
5592 check_malloced_chunk(ms, mem, nb); in mspace_malloc()
5598 nb = MAX_SIZE_T; /* Too big to allocate. Force failure (in sys alloc) */ in mspace_malloc()
5600 nb = pad_request(bytes); in mspace_malloc()
5601 if (ms->treemap != 0 && (mem = tmalloc_large(ms, nb)) != 0) { in mspace_malloc()
5602 check_malloced_chunk(ms, mem, nb); in mspace_malloc()
5607 if (nb <= ms->dvsize) { in mspace_malloc()
5608 size_t rsize = ms->dvsize - nb; in mspace_malloc()
5611 mchunkptr r = ms->dv = chunk_plus_offset(p, nb); in mspace_malloc()
5614 set_size_and_pinuse_of_inuse_chunk(ms, p, nb); in mspace_malloc()
5623 check_malloced_chunk(ms, mem, nb); in mspace_malloc()
5627 else if (nb < ms->topsize) { /* Split top */ in mspace_malloc()
5628 size_t rsize = ms->topsize -= nb; in mspace_malloc()
5630 mchunkptr r = ms->top = chunk_plus_offset(p, nb); in mspace_malloc()
5632 set_size_and_pinuse_of_inuse_chunk(ms, p, nb); in mspace_malloc()
5635 check_malloced_chunk(ms, mem, nb); in mspace_malloc()
5639 mem = sys_alloc(ms, nb); in mspace_malloc()
5784 size_t nb = request2size(bytes); in mspace_realloc() local
5796 mchunkptr newp = try_realloc_chunk(m, oldp, nb, 1); in mspace_realloc()
5822 size_t nb = request2size(bytes); in mspace_realloc_in_place() local
5835 mchunkptr newp = try_realloc_chunk(m, oldp, nb, 0); in mspace_realloc_in_place()