Home | History | Annotate | Download | only in memcheck

Lines Matching refs:chunks

66 //   "chunks" are used interchangeably below.
185 // - Find all the blocks (a.k.a. chunks) to check. Mempool chunks require
245 /*--- Getting the initial chunks, and searching them. ---*/
262 MC_Chunk** chunks,
271 a_lo = chunks[i]->data;
272 a_hi = ((Addr)chunks[i]->data) + chunks[i]->szB;
281 // chunks[i]. Return -1 if none found. This assumes that chunks[]
285 MC_Chunk** chunks,
299 a_mid_lo = chunks[mid]->data;
300 a_mid_hi = chunks[mid]->data + chunks[mid]->szB;
307 if (chunks[mid]->szB == 0)
324 tl_assert(retVal == find_chunk_for_OLD ( ptr, chunks, n_chunks ));
334 // Our goal is to construct a set of chunks that includes every
338 MC_Chunk **mallocs, **chunks, *mc;
342 // First we collect all the malloc chunks into an array and sort it.
343 // We do this because we want to query the chunks by interior
364 VG_(HT_ResetIter)(mp->chunks);
365 while ( (mc = VG_(HT_Next)(mp->chunks)) ) {
392 chunks = VG_(malloc)("mc.fas.2", sizeof(VgHashNode*) * (n_chunks));
395 // Copy the mempool chunks and the non-marked malloc chunks into a
396 // combined array of chunks.
399 VG_(HT_ResetIter)(mp->chunks);
400 while ( (mc = VG_(HT_Next)(mp->chunks)) ) {
402 chunks[s++] = mc;
408 chunks[s++] = mallocs[m];
419 return chunks;
449 // (as a free operation potentially destroys one or more chunks).
454 // How many chunks we're dealing with.
462 // chunks will be converted and merged in loss record, maintained in lr_table
482 // Records chunks that are currently being processed. Each element in the
484 // 'lc_n_chunks' because in the worst case that's how many chunks could be
1065 skip efficiently such fully unaddressable SM chunks.
1071 SM chunks will be skipped efficiently: first one is skipped
1111 // Skip invalid chunks.
1366 // Convert the chunks into loss records, merging them where appropriate.
1652 // Match the chunks with loss records.
1783 // Get the chunks, stop if there were none.
1822 // for mempool chunks, but if custom-allocated blocks are put in a separate
1984 MC_Chunk** chunks;
1994 chunks = find_active_chunks(&n_chunks);
1999 // Scan active malloc-ed chunks
2001 lc_scan_memory(chunks[i]->data, chunks[i]->szB,
2006 VG_(free) ( chunks );