Lines Matching refs:stack
199 // If we don't use stack depot, we store the alloc/free stack traces
227 static void GetStackTraceFromId(u32 id, StackTrace *stack) {
232 internal_memcpy(stack->trace, trace, sizeof(uptr) * size);
233 stack->size = size;
236 void AsanChunkView::GetAllocStack(StackTrace *stack) {
238 GetStackTraceFromId(chunk_->alloc_context_id, stack);
240 StackTrace::UncompressStack(stack, chunk_->AllocStackBeg(),
244 void AsanChunkView::GetFreeStack(StackTrace *stack) {
246 GetStackTraceFromId(chunk_->free_context_id, stack);
248 StackTrace::UncompressStack(stack, chunk_->FreeStackBeg(),
313 static void *Allocate(uptr size, uptr alignment, StackTrace *stack,
318 CHECK(stack);
396 m->alloc_context_id = StackDepotPut(stack->trace, stack->size);
399 StackTrace::CompressStack(stack, m->AllocStackBeg(), m->AllocStackSize());
436 static void ReportInvalidFree(void *ptr, u8 chunk_state, StackTrace *stack) {
438 ReportDoubleFree((uptr)ptr, stack);
440 ReportFreeNotMalloced((uptr)ptr, stack);
444 void *ptr, StackTrace *stack) {
449 ReportInvalidFree(ptr, old_chunk_state, stack);
456 StackTrace *stack, AllocType alloc_type) {
460 ReportAllocTypeMismatch((uptr)ptr, stack,
469 m->free_context_id = StackDepotPut(stack->trace, stack->size);
472 StackTrace::CompressStack(stack, m->FreeStackBeg(), m->FreeStackSize());
497 static void Deallocate(void *ptr, StackTrace *stack, AllocType alloc_type) {
505 AtomicallySetQuarantineFlag(m, ptr, stack);
506 QuarantineChunk(m, ptr, stack, alloc_type);
509 static void *Reallocate(void *old_ptr, uptr new_size, StackTrace *stack) {
519 void *new_ptr = Allocate(new_size, 8, stack, FROM_MALLOC, true);
523 ReportInvalidFree(old_ptr, chunk_state, stack);
529 Deallocate(old_ptr, stack, FROM_MALLOC);
623 void *asan_memalign(uptr alignment, uptr size, StackTrace *stack,
625 return Allocate(size, alignment, stack, alloc_type, true);
629 void asan_free(void *ptr, StackTrace *stack, AllocType alloc_type) {
630 Deallocate(ptr, stack, alloc_type);
634 void *asan_malloc(uptr size, StackTrace *stack) {
635 return Allocate(size, 8, stack, FROM_MALLOC, true);
638 void *asan_calloc(uptr nmemb, uptr size, StackTrace *stack) {
640 void *ptr = Allocate(nmemb * size, 8, stack, FROM_MALLOC, false);
648 void *asan_realloc(void *p, uptr size, StackTrace *stack) {
650 return Allocate(size, 8, stack, FROM_MALLOC, true);
652 Deallocate(p, stack, FROM_MALLOC);
655 return Reallocate(p, size, stack);
658 void *asan_valloc(uptr size, StackTrace *stack) {
659 return Allocate(size, GetPageSizeCached(), stack, FROM_MALLOC, true);
662 void *asan_pvalloc(uptr size, StackTrace *stack) {
669 return Allocate(size, PageSize, stack, FROM_MALLOC, true);
673 StackTrace *stack) {
674 void *ptr = Allocate(size, alignment, stack, FROM_MALLOC, true);
681 uptr asan_malloc_usable_size(void *ptr, StackTrace *stack) {
682 CHECK(stack);
686 ReportMallocUsableSizeNotOwned((uptr)ptr, stack);
807 ReportAsanGetAllocatedSizeNotOwned(ptr, &stack);