Sanity.c 35.5 KB
Newer Older
1
/* -----------------------------------------------------------------------------
2
 *
3
 * (c) The GHC Team, 1998-2006
4 5 6
 *
 * Sanity checking code for the heap and stack.
 *
7
 * Used when debugging: check that everything reasonable.
8 9 10 11
 *
 *    - All things that are supposed to be pointers look like pointers.
 *
 *    - Objects in text space are marked as static closures, those
12
 *      in the heap are dynamic.
13 14 15
 *
 * ---------------------------------------------------------------------------*/

16
#include "PosixSource.h"
17 18
#include "Rts.h"

Ben Gamari's avatar
Ben Gamari committed
19
#if defined(DEBUG)                                                   /* whole file */
20 21

#include "RtsUtils.h"
Simon Marlow's avatar
Simon Marlow committed
22 23
#include "sm/Storage.h"
#include "sm/BlockAlloc.h"
Simon Marlow's avatar
Simon Marlow committed
24
#include "GCThread.h"
25
#include "Sanity.h"
26
#include "Schedule.h"
27
#include "Apply.h"
Simon Marlow's avatar
Simon Marlow committed
28
#include "Printer.h"
Simon Marlow's avatar
Simon Marlow committed
29
#include "Arena.h"
Ian Lynagh's avatar
Ian Lynagh committed
30
#include "RetainerProfile.h"
gcampax's avatar
gcampax committed
31
#include "CNF.h"
32 33
#include "sm/NonMoving.h"
#include "sm/NonMovingMark.h"
34
#include "Profiling.h" // prof_arena
35

36 37 38
/* -----------------------------------------------------------------------------
   Forward decls.
   -------------------------------------------------------------------------- */
39

40 41
static void  checkSmallBitmap    ( StgPtr payload, StgWord bitmap, uint32_t );
static void  checkLargeBitmap    ( StgPtr payload, StgLargeBitmap*, uint32_t );
42
static void  checkClosureShallow ( const StgClosure * );
43
static void  checkSTACK          (StgStack *stack);
44

45 46 47
static W_    countNonMovingSegments ( struct NonmovingSegment *segs );
static W_    countNonMovingHeap     ( struct NonmovingHeap *heap );

48 49 50
/* -----------------------------------------------------------------------------
   Check stack sanity
   -------------------------------------------------------------------------- */
51

52
static void
53
checkSmallBitmap( StgPtr payload, StgWord bitmap, uint32_t size )
54
{
55
    uint32_t i;
56

57
    for(i = 0; i < size; i++, bitmap >>= 1 ) {
58 59 60
        if ((bitmap & 1) == 0) {
            checkClosureShallow((StgClosure *)payload[i]);
        }
61 62 63
    }
}

64
static void
65
checkLargeBitmap( StgPtr payload, StgLargeBitmap* large_bitmap, uint32_t size )
66
{
ken's avatar
ken committed
67
    StgWord bmp;
68
    uint32_t i, j;
69 70

    i = 0;
71
    for (bmp=0; i < size; bmp++) {
72 73 74 75 76 77 78
        StgWord bitmap = large_bitmap->bitmap[bmp];
        j = 0;
        for(; i < size && j < BITS_IN(W_); j++, i++, bitmap >>= 1 ) {
            if ((bitmap & 1) == 0) {
                checkClosureShallow((StgClosure *)payload[i]);
            }
        }
79 80 81 82 83 84 85 86
    }
}

/*
 * check that it looks like a valid closure - without checking its payload
 * used to avoid recursion between checking PAPs and checking stack
 * chunks.
 */
87
static void
88
checkClosureShallow( const StgClosure* p )
89
{
90
    ASSERT(LOOKS_LIKE_CLOSURE_PTR(UNTAG_CONST_CLOSURE(p)));
91 92
}

93
// check an individual stack object
94
StgOffset
95
checkStackFrame( StgPtr c )
96
{
97
    uint32_t size;
98 99 100 101 102 103 104 105 106
    const StgRetInfoTable* info;

    info = get_ret_itbl((StgClosure *)c);

    /* All activation records have 'bitmap' style layout info. */
    switch (info->i.type) {

    case UPDATE_FRAME:
      ASSERT(LOOKS_LIKE_CLOSURE_PTR(((StgUpdateFrame*)c)->updatee));
Ben Gamari's avatar
Ben Gamari committed
107
      FALLTHROUGH;
108 109 110
    case ATOMICALLY_FRAME:
    case CATCH_RETRY_FRAME:
    case CATCH_STM_FRAME:
111 112
    case CATCH_FRAME:
      // small bitmap cases (<= 32 entries)
113
    case UNDERFLOW_FRAME:
114 115
    case STOP_FRAME:
    case RET_SMALL:
116 117 118 119
        size = BITMAP_SIZE(info->i.layout.bitmap);
        checkSmallBitmap((StgPtr)c + 1,
                         BITMAP_BITS(info->i.layout.bitmap), size);
        return 1 + size;
120 121

    case RET_BCO: {
122
        StgBCO *bco;
123
        uint32_t size;
124 125 126 127
        bco = (StgBCO *)*(c+1);
        size = BCO_BITMAP_SIZE(bco);
        checkLargeBitmap((StgPtr)c + 2, BCO_BITMAP(bco), size);
        return 2 + size;
128 129 130
    }

    case RET_BIG: // large bitmap (> 32 entries)
131 132 133
        size = GET_LARGE_BITMAP(&info->i)->size;
        checkLargeBitmap((StgPtr)c + 1, GET_LARGE_BITMAP(&info->i), size);
        return 1 + size;
134 135 136

    case RET_FUN:
    {
137
        const StgFunInfoTable *fun_info;
138 139 140
        StgRetFun *ret_fun;

        ret_fun = (StgRetFun *)c;
141
        fun_info = get_fun_itbl(UNTAG_CONST_CLOSURE(ret_fun->fun));
142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158
        size = ret_fun->size;
        switch (fun_info->f.fun_type) {
        case ARG_GEN:
            checkSmallBitmap((StgPtr)ret_fun->payload,
                             BITMAP_BITS(fun_info->f.b.bitmap), size);
            break;
        case ARG_GEN_BIG:
            checkLargeBitmap((StgPtr)ret_fun->payload,
                             GET_FUN_LARGE_BITMAP(fun_info), size);
            break;
        default:
            checkSmallBitmap((StgPtr)ret_fun->payload,
                             BITMAP_BITS(stg_arg_bitmaps[fun_info->f.fun_type]),
                             size);
            break;
        }
        return sizeofW(StgRetFun) + size;
159 160 161
    }

    default:
162
        barf("checkStackFrame: weird activation record found on stack (%p %d).",c,info->i.type);
163 164 165
    }
}

166
// check sections of stack between update frames
167
void
168 169 170 171 172 173
checkStackChunk( StgPtr sp, StgPtr stack_end )
{
    StgPtr p;

    p = sp;
    while (p < stack_end) {
174
        p += checkStackFrame( p );
175
    }
176
    ASSERT( p == stack_end );
177 178
}

179
static void
Ian Lynagh's avatar
Ian Lynagh committed
180
checkPAP (StgClosure *tagged_fun, StgClosure** payload, StgWord n_args)
181
{
182 183
    const StgClosure *fun;
    const StgFunInfoTable *fun_info;
184

185
    fun = UNTAG_CONST_CLOSURE(tagged_fun);
186 187
    ASSERT(LOOKS_LIKE_CLOSURE_PTR(fun));
    fun_info = get_fun_itbl(fun);
188

189 190
    switch (fun_info->f.fun_type) {
    case ARG_GEN:
191 192 193
        checkSmallBitmap( (StgPtr)payload,
                          BITMAP_BITS(fun_info->f.b.bitmap), n_args );
        break;
194
    case ARG_GEN_BIG:
195 196 197 198
        checkLargeBitmap( (StgPtr)payload,
                          GET_FUN_LARGE_BITMAP(fun_info),
                          n_args );
        break;
199
    case ARG_BCO:
200 201 202 203
        checkLargeBitmap( (StgPtr)payload,
                          BCO_BITMAP(fun),
                          n_args );
        break;
204
    default:
205 206 207 208
        checkSmallBitmap( (StgPtr)payload,
                          BITMAP_BITS(stg_arg_bitmaps[fun_info->f.fun_type]),
                          n_args );
        break;
209
    }
210

Ian Lynagh's avatar
Ian Lynagh committed
211
    ASSERT(fun_info->f.arity > TAG_MASK ? GET_CLOSURE_TAG(tagged_fun) == 0
Ian Lynagh's avatar
Ian Lynagh committed
212
           : GET_CLOSURE_TAG(tagged_fun) == fun_info->f.arity);
213 214
}

215 216 217 218 219 220 221 222 223 224 225
#if defined(PROFILING)
static void
checkClosureProfSanity(const StgClosure *p)
{
    StgProfHeader prof_hdr = p->header.prof;
    CostCentreStack *ccs = prof_hdr.ccs;
    if (HEAP_ALLOCED_GC((void*)ccs)) {
        checkPtrInArena((StgPtr)ccs, prof_arena);
    }
}
#endif
226

227
// Returns closure size in words
228
StgOffset
229
checkClosure( const StgClosure* p )
230 231 232
{
    const StgInfoTable *info;

233
    ASSERT(LOOKS_LIKE_CLOSURE_PTR(p));
234

235
    p = UNTAG_CONST_CLOSURE(p);
236

237
    info = p->header.info;
238
    load_load_barrier();
239 240 241 242

    if (IS_FORWARDING_PTR(info)) {
        barf("checkClosure: found EVACUATED closure %d", info->type);
    }
243 244 245 246 247

#if defined(PROFILING)
    checkClosureProfSanity(p);
#endif

248
    info = INFO_PTR_TO_STRUCT(info);
249
    load_load_barrier();
250

251
    switch (info->type) {
252

253 254
    case MVAR_CLEAN:
    case MVAR_DIRTY:
255 256 257 258 259 260
      {
        StgMVar *mvar = (StgMVar *)p;
        ASSERT(LOOKS_LIKE_CLOSURE_PTR(mvar->head));
        ASSERT(LOOKS_LIKE_CLOSURE_PTR(mvar->tail));
        ASSERT(LOOKS_LIKE_CLOSURE_PTR(mvar->value));
        return sizeofW(StgMVar);
261 262
      }

263
    case THUNK:
264 265 266 267 268 269
    case THUNK_1_0:
    case THUNK_0_1:
    case THUNK_1_1:
    case THUNK_0_2:
    case THUNK_2_0:
      {
270
        uint32_t i;
271 272 273 274
        for (i = 0; i < info->layout.payload.ptrs; i++) {
          ASSERT(LOOKS_LIKE_CLOSURE_PTR(((StgThunk *)p)->payload[i]));
        }
        return thunk_sizeW_fromITBL(info);
275 276 277 278 279 280 281 282
      }

    case FUN:
    case FUN_1_0:
    case FUN_0_1:
    case FUN_1_1:
    case FUN_0_2:
    case FUN_2_0:
283
    case CONSTR:
Simon Marlow's avatar
Simon Marlow committed
284
    case CONSTR_NOCAF:
285 286 287 288 289
    case CONSTR_1_0:
    case CONSTR_0_1:
    case CONSTR_1_1:
    case CONSTR_0_2:
    case CONSTR_2_0:
290
    case BLACKHOLE:
291 292
    case PRIM:
    case MUT_PRIM:
293 294
    case MUT_VAR_CLEAN:
    case MUT_VAR_DIRTY:
295
    case TVAR:
296 297
    case THUNK_STATIC:
    case FUN_STATIC:
298
    case COMPACT_NFDATA:
299
        {
300
            uint32_t i;
301 302 303 304 305
            for (i = 0; i < info->layout.payload.ptrs; i++) {
                ASSERT(LOOKS_LIKE_CLOSURE_PTR(p->payload[i]));
            }
            return sizeW_fromITBL(info);
        }
306

307 308 309 310 311 312 313 314
    case BLOCKING_QUEUE:
    {
        StgBlockingQueue *bq = (StgBlockingQueue *)p;

        // NO: the BH might have been updated now
        // ASSERT(get_itbl(bq->bh)->type == BLACKHOLE);
        ASSERT(LOOKS_LIKE_CLOSURE_PTR(bq->bh));

315
        ASSERT(get_itbl((StgClosure *)(bq->owner))->type == TSO);
316 317 318 319 320 321
        ASSERT(// A bq with no other blocked TSOs:
               bq->queue == (MessageBlackHole*)END_TSO_QUEUE ||
               // A bq with blocked TSOs in its queue:
               bq->queue->header.info == &stg_MSG_BLACKHOLE_info ||
               // A bq with a deleted (in throwToMsg()) MSG_BLACKHOLE:
               bq->queue->header.info == &stg_IND_info);
322
        ASSERT(bq->link == (StgBlockingQueue*)END_TSO_QUEUE ||
323 324
               get_itbl((StgClosure *)(bq->link))->type == IND ||
               get_itbl((StgClosure *)(bq->link))->type == BLOCKING_QUEUE);
325 326 327 328

        return sizeofW(StgBlockingQueue);
    }

329
    case BCO: {
330 331 332 333 334
        StgBCO *bco = (StgBCO *)p;
        ASSERT(LOOKS_LIKE_CLOSURE_PTR(bco->instrs));
        ASSERT(LOOKS_LIKE_CLOSURE_PTR(bco->literals));
        ASSERT(LOOKS_LIKE_CLOSURE_PTR(bco->ptrs));
        return bco_sizeW(bco);
335 336
    }

337
    case IND_STATIC: /* (1, 0) closure */
338
      ASSERT(LOOKS_LIKE_CLOSURE_PTR(((StgIndStatic*)p)->indirectee));
339 340
      return sizeW_fromITBL(info);

341 342 343 344 345
    case WEAK:
      /* deal with these specially - the info table isn't
       * representative of the actual layout.
       */
      { StgWeak *w = (StgWeak *)p;
346 347 348 349 350 351 352
        ASSERT(LOOKS_LIKE_CLOSURE_PTR(w->key));
        ASSERT(LOOKS_LIKE_CLOSURE_PTR(w->value));
        ASSERT(LOOKS_LIKE_CLOSURE_PTR(w->finalizer));
        if (w->link) {
          ASSERT(LOOKS_LIKE_CLOSURE_PTR(w->link));
        }
        return sizeW_fromITBL(info);
353 354 355
      }

    case THUNK_SELECTOR:
356 357
            ASSERT(LOOKS_LIKE_CLOSURE_PTR(((StgSelector *)p)->selectee));
            return THUNK_SELECTOR_sizeW();
358 359

    case IND:
360 361 362 363 364 365 366 367
        {
            /* we don't expect to see any of these after GC
             * but they might appear during execution
             */
            StgInd *ind = (StgInd *)p;
            ASSERT(LOOKS_LIKE_CLOSURE_PTR(ind->indirectee));
            return sizeofW(StgInd);
        }
368 369 370 371 372

    case RET_BCO:
    case RET_SMALL:
    case RET_BIG:
    case UPDATE_FRAME:
373
    case UNDERFLOW_FRAME:
374 375
    case STOP_FRAME:
    case CATCH_FRAME:
376 377 378
    case ATOMICALLY_FRAME:
    case CATCH_RETRY_FRAME:
    case CATCH_STM_FRAME:
379
            barf("checkClosure: stack frame");
380

381 382
    case AP:
    {
383 384 385
        StgAP* ap = (StgAP *)p;
        checkPAP (ap->fun, ap->payload, ap->n_args);
        return ap_sizeW(ap);
386 387
    }

388
    case PAP:
389
    {
390 391 392
        StgPAP* pap = (StgPAP *)p;
        checkPAP (pap->fun, pap->payload, pap->n_args);
        return pap_sizeW(pap);
393
    }
394

395
    case AP_STACK:
396 397 398 399 400
    {
        StgAP_STACK *ap = (StgAP_STACK *)p;
        ASSERT(LOOKS_LIKE_CLOSURE_PTR(ap->fun));
        checkStackChunk((StgPtr)ap->payload, (StgPtr)ap->payload + ap->size);
        return ap_stack_sizeW(ap);
401 402
    }

403
    case ARR_WORDS:
siddhanathan's avatar
siddhanathan committed
404
            return arr_words_sizeW((StgArrBytes *)p);
405

406 407
    case MUT_ARR_PTRS_CLEAN:
    case MUT_ARR_PTRS_DIRTY:
408 409
    case MUT_ARR_PTRS_FROZEN_CLEAN:
    case MUT_ARR_PTRS_FROZEN_DIRTY:
410 411
        {
            StgMutArrPtrs* a = (StgMutArrPtrs *)p;
412
            uint32_t i;
413 414 415 416 417
            for (i = 0; i < a->ptrs; i++) {
                ASSERT(LOOKS_LIKE_CLOSURE_PTR(a->payload[i]));
            }
            return mut_arr_ptrs_sizeW(a);
        }
418

419 420 421 422 423 424 425 426 427 428 429 430
    case SMALL_MUT_ARR_PTRS_CLEAN:
    case SMALL_MUT_ARR_PTRS_DIRTY:
    case SMALL_MUT_ARR_PTRS_FROZEN_CLEAN:
    case SMALL_MUT_ARR_PTRS_FROZEN_DIRTY:
        {
            StgSmallMutArrPtrs *a = (StgSmallMutArrPtrs *)p;
            for (uint32_t i = 0; i < a->ptrs; i++) {
                ASSERT(LOOKS_LIKE_CLOSURE_PTR(a->payload[i]));
            }
            return small_mut_arr_ptrs_sizeW(a);
        }

431
    case TSO:
432
        checkTSO((StgTSO *)p);
433 434 435 436 437
        return sizeofW(StgTSO);

    case STACK:
        checkSTACK((StgStack*)p);
        return stack_sizeW((StgStack*)p);
438

439 440
    case TREC_CHUNK:
      {
441
        uint32_t i;
442 443 444 445 446 447 448 449 450
        StgTRecChunk *tc = (StgTRecChunk *)p;
        ASSERT(LOOKS_LIKE_CLOSURE_PTR(tc->prev_chunk));
        for (i = 0; i < tc -> next_entry_idx; i ++) {
          ASSERT(LOOKS_LIKE_CLOSURE_PTR(tc->entries[i].tvar));
          ASSERT(LOOKS_LIKE_CLOSURE_PTR(tc->entries[i].expected_value));
          ASSERT(LOOKS_LIKE_CLOSURE_PTR(tc->entries[i].new_value));
        }
        return sizeofW(StgTRecChunk);
      }
451

452
    default:
gcampax's avatar
gcampax committed
453
        barf("checkClosure (closure type %d)", info->type);
454 455 456
    }
}

457

458 459 460 461 462
/* -----------------------------------------------------------------------------
   Check Heap Sanity

   After garbage collection, the live heap is in a state where we can
   run through and check that all the pointers point to the right
463 464
   place.  This function starts at a given position and sanity-checks
   all the objects in the remainder of the chain.
465 466
   -------------------------------------------------------------------------- */

Simon Marlow's avatar
Simon Marlow committed
467
void checkHeapChain (bdescr *bd)
468
{
469
    for (; bd != NULL; bd = bd->link) {
470
        if(!(bd->flags & BF_SWEPT)) {
471
            StgPtr p = bd->start;
472
            while (p < bd->free) {
473
                uint32_t size = checkClosure((StgClosure *)p);
474 475 476
                /* This is the smallest size of closure that can live in the heap */
                ASSERT( size >= MIN_PAYLOAD_SIZE + sizeofW(StgHeader) );
                p += size;
477

478
                /* skip over slop, see Note [slop on the heap] */
479 480 481
                while (p < bd->free &&
                       (*p < 0x1000 || !LOOKS_LIKE_INFO_PTR(*p))) { p++; }
            }
482
        }
483
    }
484 485
}

486 487 488 489 490 491 492 493 494 495 496 497 498 499
/* -----------------------------------------------------------------------------
 * Check nonmoving heap sanity
 *
 * After a concurrent sweep the nonmoving heap can be checked for validity.
 * -------------------------------------------------------------------------- */

static void checkNonmovingSegments (struct NonmovingSegment *seg)
{
    while (seg != NULL) {
        const nonmoving_block_idx count = nonmovingSegmentBlockCount(seg);
        for (nonmoving_block_idx i=0; i < count; i++) {
            if (seg->bitmap[i] == nonmovingMarkEpoch) {
                StgPtr p = nonmovingSegmentGetBlock(seg, i);
                checkClosure((StgClosure *) p);
500
            } else if (i < nonmovingSegmentInfo(seg)->next_free_snap){
501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520
                seg->bitmap[i] = 0;
            }
        }
        seg = seg->link;
    }
}

void checkNonmovingHeap (const struct NonmovingHeap *heap)
{
    for (unsigned int i=0; i < NONMOVING_ALLOCA_CNT; i++) {
        const struct NonmovingAllocator *alloc = heap->allocators[i];
        checkNonmovingSegments(alloc->filled);
        checkNonmovingSegments(alloc->active);
        for (unsigned int cap=0; cap < n_capabilities; cap++) {
            checkNonmovingSegments(alloc->current[cap]);
        }
    }
}


Simon Marlow's avatar
Simon Marlow committed
521
void
522
checkHeapChunk(StgPtr start, StgPtr end)
523 524
{
  StgPtr p;
525
  uint32_t size;
526 527

  for (p=start; p<end; p+=size) {
528
    ASSERT(LOOKS_LIKE_INFO_PTR(*p));
529
    size = checkClosure((StgClosure *)p);
530
    /* This is the smallest size of closure that can live in the heap. */
531
    ASSERT( size >= MIN_PAYLOAD_SIZE + sizeofW(StgHeader) );
532
  }
533 534
}

535
void
536
checkLargeObjects(bdescr *bd)
537 538
{
  while (bd != NULL) {
539 540 541
    if (!(bd->flags & BF_PINNED)) {
      checkClosure((StgClosure *)bd->start);
    }
542 543 544
    bd = bd->link;
  }
}
545

gcampax's avatar
gcampax committed
546 547 548
static void
checkCompactObjects(bdescr *bd)
{
549 550
    // Compact objects are similar to large objects, but they have a
    // StgCompactNFDataBlock at the beginning, before the actual closure
gcampax's avatar
gcampax committed
551 552

    for ( ; bd != NULL; bd = bd->link) {
Ben Gamari's avatar
Ben Gamari committed
553
        ASSERT(bd->flags & BF_COMPACT);
gcampax's avatar
gcampax committed
554

555 556
        StgCompactNFDataBlock *block = (StgCompactNFDataBlock*)bd->start;
        StgCompactNFData *str = block->owner;
Ben Gamari's avatar
Ben Gamari committed
557
        ASSERT((W_)str == (W_)block + sizeof(StgCompactNFDataBlock));
gcampax's avatar
gcampax committed
558

559 560
        StgWord totalW = 0;
        StgCompactNFDataBlock *last;
gcampax's avatar
gcampax committed
561 562
        for ( ; block ; block = block->next) {
            last = block;
Ben Gamari's avatar
Ben Gamari committed
563
            ASSERT(block->owner == str);
gcampax's avatar
gcampax committed
564 565

            totalW += Bdescr((P_)block)->blocks * BLOCK_SIZE_W;
566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584

            StgPtr start = Bdescr((P_)block)->start + sizeofW(StgCompactNFDataBlock);
            StgPtr free;
            if (Bdescr((P_)block)->start == (P_)str->nursery) {
                free = str->hp;
            } else {
                free = Bdescr((P_)block)->free;
            }
            StgPtr p = start;
            while (p < free)  {
                // We can't use checkClosure() here because in
                // compactAdd#/compactAddWithSharing# when we see a non-
                // compactable object (a function, mutable object, or pinned
                // object) we leave the location for the object in the payload
                // empty.
                StgClosure *c = (StgClosure*)p;
                checkClosureShallow(c);
                p += closure_sizeW(c);
            }
gcampax's avatar
gcampax committed
585 586
        }

Ben Gamari's avatar
Ben Gamari committed
587 588
        ASSERT(str->totalW == totalW);
        ASSERT(str->last == last);
gcampax's avatar
gcampax committed
589 590 591
    }
}

592 593
static void
checkSTACK (StgStack *stack)
594
{
595 596 597
    StgPtr sp = stack->sp;
    StgOffset stack_size = stack->stack_size;
    StgPtr stack_end = stack->stack + stack_size;
598

599
    ASSERT(stack->stack <= sp && sp <= stack_end);
600

601 602 603 604 605 606
    checkStackChunk(sp, stack_end);
}

void
checkTSO(StgTSO *tso)
{
607 608
    StgTSO *next = tso->_link;
    const StgInfoTable *info = (const StgInfoTable*) tso->_link->header.info;
609
    load_load_barrier();
610 611 612 613

    ASSERT(next == END_TSO_QUEUE ||
           info == &stg_MVAR_TSO_QUEUE_info ||
           info == &stg_TSO_info ||
614 615
           info == &stg_WHITEHOLE_info); // used to happen due to STM doing
                                         // lockTSO(), might not happen now
616 617

    if (   tso->why_blocked == BlockedOnMVar
618
        || tso->why_blocked == BlockedOnMVarRead
619 620
        || tso->why_blocked == BlockedOnBlackHole
        || tso->why_blocked == BlockedOnMsgThrowTo
621
        || tso->why_blocked == NotBlocked
622
        ) {
623 624 625
        ASSERT(LOOKS_LIKE_CLOSURE_PTR(tso->block_info.closure));
    }

626 627
    ASSERT(LOOKS_LIKE_CLOSURE_PTR(tso->bq));
    ASSERT(LOOKS_LIKE_CLOSURE_PTR(tso->blocked_exceptions));
628
    ASSERT(LOOKS_LIKE_CLOSURE_PTR(tso->stackobj));
629 630 631
    ASSERT(LOOKS_LIKE_CLOSURE_PTR(tso->global_link) &&
            (tso->global_link == END_TSO_QUEUE ||
             get_itbl((StgClosure*)tso->global_link)->type == TSO));
632 633
}

634
/*
635 636 637 638
   Check that all TSOs have been evacuated.
   Optionally also check the sanity of the TSOs.
*/
void
Ben Gamari's avatar
Ben Gamari committed
639
checkGlobalTSOList (bool checkTSOs)
640
{
641 642
  for (uint32_t g = 0; g < RtsFlags.GcFlags.generations; g++) {
      for (StgTSO *tso = generations[g].threads; tso != END_TSO_QUEUE;
643 644
           tso = tso->global_link) {
          ASSERT(LOOKS_LIKE_CLOSURE_PTR(tso));
645
          ASSERT(get_itbl((StgClosure *)tso)->type == TSO);
646
          if (checkTSOs) {
647
              checkTSO(tso);
648
          }
649 650 651

          // If this TSO is dirty and in an old generation, it better
          // be on the mutable list.
652
          if (tso->dirty) {
Simon Marlow's avatar
Simon Marlow committed
653
              ASSERT(Bdescr((P_)tso)->gen_no == 0 || (tso->flags & TSO_MARKED));
654 655
              tso->flags &= ~TSO_MARKED;
          }
Simon Marlow's avatar
Simon Marlow committed
656

657 658 659 660 661 662 663 664 665 666 667 668
          StgStack *stack = tso->stackobj;
          while (1) {
              if (stack->dirty & STACK_DIRTY) {
                  ASSERT(Bdescr((P_)stack)->gen_no == 0 || (stack->dirty & STACK_SANE));
                  stack->dirty &= ~STACK_SANE;
              }
              StgUnderflowFrame *frame =
                  (StgUnderflowFrame*) (stack->stack + stack->stack_size
                          - sizeofW(StgUnderflowFrame));
              if (frame->info != &stg_stack_underflow_frame_info
                      || frame->next_chunk == (StgStack*)END_TSO_QUEUE) {
                  break;
Simon Marlow's avatar
Simon Marlow committed
669
              }
670
              stack = frame->next_chunk;
Simon Marlow's avatar
Simon Marlow committed
671
          }
672
      }
673 674 675
  }
}

676 677 678 679
/* -----------------------------------------------------------------------------
   Check mutable list sanity.
   -------------------------------------------------------------------------- */

Simon Marlow's avatar
Simon Marlow committed
680
static void
681
checkMutableList( bdescr *mut_bd, uint32_t gen )
682 683
{
    bdescr *bd;
684 685
    StgPtr q;
    StgClosure *p;
686

687
    for (bd = mut_bd; bd != NULL; bd = bd->link) {
688 689
        for (q = bd->start; q < bd->free; q++) {
            p = (StgClosure *)*q;
Simon Marlow's avatar
Simon Marlow committed
690 691 692 693 694
            ASSERT(!HEAP_ALLOCED(p) || Bdescr((P_)p)->gen_no == gen);
            checkClosure(p);

            switch (get_itbl(p)->type) {
            case TSO:
695
                ((StgTSO *)p)->flags |= TSO_MARKED;
Simon Marlow's avatar
Simon Marlow committed
696 697
                break;
            case STACK:
698
                ((StgStack *)p)->dirty |= STACK_SANE;
Simon Marlow's avatar
Simon Marlow committed
699
                break;
700
            }
Simon Marlow's avatar
Simon Marlow committed
701
        }
702 703
    }
}
704

Simon Marlow's avatar
Simon Marlow committed
705
static void
706
checkLocalMutableLists (uint32_t cap_no)
707
{
708
    uint32_t g;
Simon Marlow's avatar
Simon Marlow committed
709
    for (g = 1; g < RtsFlags.GcFlags.generations; g++) {
710
        checkMutableList(capabilities[cap_no]->mut_lists[g], g);
Simon Marlow's avatar
Simon Marlow committed
711 712
    }
}
713

Simon Marlow's avatar
Simon Marlow committed
714 715 716
static void
checkMutableLists (void)
{
717
    uint32_t i;
Simon Marlow's avatar
Simon Marlow committed
718 719
    for (i = 0; i < n_capabilities; i++) {
        checkLocalMutableLists(i);
720 721 722
    }
}

723 724 725
/*
  Check the static objects list.
*/
726 727 728
void
checkStaticObjects ( StgClosure* static_objects )
{
729
  StgClosure *p = static_objects;
730
  const StgInfoTable *info;
731

732 733
  while (p != END_OF_STATIC_OBJECT_LIST) {
    p = UNTAG_STATIC_LIST_PTR(p);
734 735 736 737
    checkClosure(p);
    info = get_itbl(p);
    switch (info->type) {
    case IND_STATIC:
738
      {
739
        const StgClosure *indirectee;
740

741
        indirectee = UNTAG_CONST_CLOSURE(((StgIndStatic *)p)->indirectee);
742 743 744 745
        ASSERT(LOOKS_LIKE_CLOSURE_PTR(indirectee));
        ASSERT(LOOKS_LIKE_INFO_PTR((StgWord)indirectee->header.info));
        p = *IND_STATIC_LINK((StgClosure *)p);
        break;
746 747 748
      }

    case THUNK_STATIC:
749
      p = *THUNK_STATIC_LINK((StgClosure *)p);
750 751 752
      break;

    case FUN_STATIC:
753
      p = *STATIC_LINK(info,(StgClosure *)p);
754 755
      break;

Simon Marlow's avatar
Simon Marlow committed
756 757 758 759 760
    case CONSTR:
    case CONSTR_NOCAF:
    case CONSTR_1_0:
    case CONSTR_2_0:
    case CONSTR_1_1:
761
      p = *STATIC_LINK(info,(StgClosure *)p);
762 763 764
      break;

    default:
765 766
      barf("checkStaticObjetcs: strange closure %p (%s)",
           p, info_type(p));
767 768 769 770
    }
  }
}

771 772
/* Nursery sanity check */
void
773
checkNurserySanity (nursery *nursery)
774 775
{
    bdescr *bd, *prev;
776
    uint32_t blocks = 0;
777 778

    prev = NULL;
779
    for (bd = nursery->blocks; bd != NULL; bd = bd->link) {
Simon Marlow's avatar
Simon Marlow committed
780 781
        ASSERT(bd->gen == g0);
        ASSERT(bd->u.back == prev);
782 783
        prev = bd;
        blocks += bd->blocks;
784 785
    }

786
    ASSERT(blocks == nursery->n_blocks);
787 788
}

789
static void checkGeneration (generation *gen,
Ben Gamari's avatar
Ben Gamari committed
790
                             bool after_major_gc USED_IF_THREADS)
Simon Marlow's avatar
Simon Marlow committed
791
{
792
    uint32_t n;
Simon Marlow's avatar
Simon Marlow committed
793 794
    gen_workspace *ws;

795
    //ASSERT(countBlocks(gen->blocks) == gen->n_blocks);
Simon Marlow's avatar
Simon Marlow committed
796 797 798
    ASSERT(countBlocks(gen->large_objects) == gen->n_large_blocks);

#if defined(THREADED_RTS)
799
    // Note [heap sanity checking with SMP]
800
    // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
801
    //
802 803
    // heap sanity checking doesn't work with SMP for two reasons:
    //
804 805 806 807 808 809
    //   * We can't zero the slop. However, we can sanity-check the heap after a
    //     major gc, because there is no slop. See also Updates.h and Note
    //     [zeroing slop when overwriting closures].
    //
    //   * The nonmoving collector may be mutating its large object lists,
    //     unless we were in fact called by the nonmoving collector.
Simon Marlow's avatar
Simon Marlow committed
810 811 812
    if (!after_major_gc) return;
#endif

813
    if (RtsFlags.GcFlags.useNonmoving && gen == oldest_gen) {
814
        ASSERT(countNonMovingSegments(nonmovingHeap.free) == (W_) nonmovingHeap.n_free * NONMOVING_SEGMENT_BLOCKS);
815 816
        ASSERT(countBlocks(nonmoving_large_objects) == n_nonmoving_large_blocks);
        ASSERT(countBlocks(nonmoving_marked_large_objects) == n_nonmoving_marked_large_blocks);
817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833

        // Compact regions
        // Accounting here is tricky due to the fact that the CNF allocation
        // code modifies generation->n_compact_blocks directly. However, most
        // objects being swept by the nonmoving GC are tracked in
        // nonmoving_*_compact_objects. Consequently we can only maintain a very loose
        // sanity invariant here.
        uint32_t counted_cnf_blocks = 0;
        counted_cnf_blocks += countCompactBlocks(nonmoving_marked_compact_objects);
        counted_cnf_blocks += countCompactBlocks(nonmoving_compact_objects);
        counted_cnf_blocks += countCompactBlocks(oldest_gen->compact_objects);

        uint32_t total_cnf_blocks = 0;
        total_cnf_blocks += n_nonmoving_compact_blocks + oldest_gen->n_compact_blocks;
        total_cnf_blocks += n_nonmoving_marked_compact_blocks;

        ASSERT(counted_cnf_blocks == total_cnf_blocks);
834 835
    }

Simon Marlow's avatar
Simon Marlow committed
836 837 838 839 840 841 842 843 844 845
    checkHeapChain(gen->blocks);

    for (n = 0; n < n_capabilities; n++) {
        ws = &gc_threads[n]->gens[gen->no];
        checkHeapChain(ws->todo_bd);
        checkHeapChain(ws->part_list);
        checkHeapChain(ws->scavd_list);
    }

    checkLargeObjects(gen->large_objects);
gcampax's avatar
gcampax committed
846
    checkCompactObjects(gen->compact_objects);
Simon Marlow's avatar
Simon Marlow committed
847
}
848 849

/* Full heap sanity check. */
Ben Gamari's avatar
Ben Gamari committed
850
static void checkFullHeap (bool after_major_gc)
851
{
852
    uint32_t g, n;
853 854

    for (g = 0; g < RtsFlags.GcFlags.generations; g++) {
Simon Marlow's avatar
Simon Marlow committed
855
        checkGeneration(&generations[g], after_major_gc);
856
    }
857 858
    for (n = 0; n < n_capabilities; n++) {
        checkNurserySanity(&nurseries[n]);
859
    }
Simon Marlow's avatar
Simon Marlow committed
860 861
}

Ben Gamari's avatar
Ben Gamari committed
862
void checkSanity (bool after_gc, bool major_gc)
Simon Marlow's avatar
Simon Marlow committed
863 864 865
{
    checkFullHeap(after_gc && major_gc);

866 867 868 869
    checkFreeListSanity();

    // always check the stacks in threaded mode, because checkHeap()
    // does nothing in this case.
Simon Marlow's avatar
Simon Marlow committed
870 871
    if (after_gc) {
        checkMutableLists();
Ben Gamari's avatar
Ben Gamari committed
872
        checkGlobalTSOList(true);
873 874 875
    }
}

gcampax's avatar
gcampax committed
876 877 878 879 880 881 882 883
static void
markCompactBlocks(bdescr *bd)
{
    for (; bd != NULL; bd = bd->link) {
        compactMarkKnown(((StgCompactNFDataBlock*)bd->start)->owner);
    }
}

884 885 886 887 888 889 890 891 892
static void
markNonMovingSegments(struct NonmovingSegment *seg)
{
    while (seg) {
        markBlocks(Bdescr((P_)seg));
        seg = seg->link;
    }
}

Simon Marlow's avatar
Simon Marlow committed
893 894 895 896 897 898 899 900 901 902
// If memInventory() calculates that we have a memory leak, this
// function will try to find the block(s) that are leaking by marking
// all the ones that we know about, and search through memory to find
// blocks that are not marked.  In the debugger this can help to give
// us a clue about what kind of block leaked.  In the future we might
// annotate blocks with their allocation site to give more helpful
// info.
static void
findMemoryLeak (void)
{
903
    uint32_t g, i, j;
Simon Marlow's avatar
Simon Marlow committed
904 905
    for (g = 0; g < RtsFlags.GcFlags.generations; g++) {
        for (i = 0; i < n_capabilities; i++) {
906
            markBlocks(capabilities[i]->mut_lists[g]);
Simon Marlow's avatar
Simon Marlow committed
907 908 909 910 911 912
            markBlocks(gc_threads[i]->gens[g].part_list);
            markBlocks(gc_threads[i]->gens[g].scavd_list);
            markBlocks(gc_threads[i]->gens[g].todo_bd);
        }
        markBlocks(generations[g].blocks);
        markBlocks(generations[g].large_objects);
gcampax's avatar
gcampax committed
913
        markCompactBlocks(generations[g].compact_objects);
Simon Marlow's avatar
Simon Marlow committed
914
    }
Simon Marlow's avatar
Simon Marlow committed
915

916
    for (i = 0; i < n_nurseries; i++) {
Simon Marlow's avatar
Simon Marlow committed
917
        markBlocks(nurseries[i].blocks);
918 919 920
    }

    for (i = 0; i < n_capabilities; i++) {
921
        markBlocks(gc_threads[i]->free_blocks);
922
        markBlocks(capabilities[i]->pinned_object_block);
923
        markBlocks(capabilities[i]->upd_rem_set.queue.blocks);
Simon Marlow's avatar
Simon Marlow committed
924
    }
Simon Marlow's avatar
Simon Marlow committed
925

926
    if (RtsFlags.GcFlags.useNonmoving) {
927
        markBlocks(upd_rem_set_block_list);
928 929
        markBlocks(nonmoving_large_objects);
        markBlocks(nonmoving_marked_large_objects);
930 931
        markBlocks(nonmoving_compact_objects);
        markBlocks(nonmoving_marked_compact_objects);
932 933 934 935 936 937 938 939 940 941 942 943 944 945
        for (i = 0; i < NONMOVING_ALLOCA_CNT; i++) {
            struct NonmovingAllocator *alloc = nonmovingHeap.allocators[i];
            markNonMovingSegments(alloc->filled);
            markNonMovingSegments(alloc->active);
            for (j = 0; j < n_capabilities; j++) {
                markNonMovingSegments(alloc->current[j]);
            }
        }
        markNonMovingSegments(nonmovingHeap.sweep_list);
        markNonMovingSegments(nonmovingHeap.free);
        if (current_mark_queue)
            markBlocks(current_mark_queue->blocks);
    }

Ben Gamari's avatar
Ben Gamari committed
946
#if defined(PROFILING)
Simon Marlow's avatar
Simon Marlow committed
947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962
  // TODO:
  // if (RtsFlags.ProfFlags.doHeapProfile == HEAP_BY_RETAINER) {
  //    markRetainerBlocks();
  // }
#endif

  // count the blocks allocated by the arena allocator
  // TODO:
  // markArenaBlocks();

  // count the blocks containing executable memory
  markBlocks(exec_block);

  reportUnmarkedBlocks();
}

963 964 965 966 967
void
checkRunQueue(Capability *cap)
{
    StgTSO *prev, *tso;
    prev = END_TSO_QUEUE;
968 969 970
    uint32_t n;
    for (n = 0, tso = cap->run_queue_hd; tso != END_TSO_QUEUE;
         prev = tso, tso = tso->_link, n++) {
971 972 973 974
        ASSERT(prev == END_TSO_QUEUE || prev->_link == tso);
        ASSERT(tso->block_info.prev == prev);
    }
    ASSERT(cap->run_queue_tl == prev);
975
    ASSERT(cap->n_run_queue == n);
976
}
Simon Marlow's avatar
Simon Marlow committed
977 978 979 980 981 982 983 984 985 986 987 988 989

/* -----------------------------------------------------------------------------
   Memory leak detection

   memInventory() checks for memory leaks by counting up all the
   blocks we know about and comparing that to the number of blocks
   allegedly floating around in the system.
   -------------------------------------------------------------------------- */

// Useful for finding partially full blocks in gdb
void findSlop(bdescr *bd);
void findSlop(bdescr *bd)
{
990
    W_ slop;
Simon Marlow's avatar
Simon Marlow committed
991 992 993 994

    for (; bd != NULL; bd = bd->link) {
        slop = (bd->blocks * BLOCK_SIZE_W) - (bd->free - bd->start);
        if (slop > (1024/sizeof(W_))) {
995
            debugBelch("block at %p (bdescr %p) has %" FMT_Word "KB slop\n",
996
                       bd->start, bd, slop / (1024/(W_)sizeof(W_)));
Simon Marlow's avatar
Simon Marlow committed
997 998 999 1000
        }
    }
}

1001
static W_
1002
genBlocks (generation *gen)
Simon Marlow's avatar
Simon Marlow committed
1003
{
1004 1005 1006 1007 1008 1009
    W_ ret = 0;
    if (RtsFlags.GcFlags.useNonmoving && gen == oldest_gen) {
        // See Note [Live data accounting in nonmoving collector].
        ASSERT(countNonMovingHeap(&nonmovingHeap) == gen->n_blocks);
        ret += countAllocdBlocks(nonmoving_large_objects);
        ret += countAllocdBlocks(nonmoving_marked_large_objects);
1010 1011
        ret += countAllocdCompactBlocks(nonmoving_compact_objects);
        ret += countAllocdCompactBlocks(nonmoving_marked_compact_objects);
1012 1013 1014 1015 1016
        ret += countNonMovingHeap(&nonmovingHeap);
        if (current_mark_queue)
            ret += countBlocks(current_mark_queue->blocks);
    } else {
        ASSERT(countBlocks(gen->blocks) == gen->n_blocks);
1017 1018
        ASSERT(countCompactBlocks(gen->compact_objects) == gen->n_compact_blocks);
        ASSERT(countCompactBlocks(gen->compact_blocks_in_import) == gen->n_compact_blocks_in_import);
1019 1020 1021
        ret += gen->n_blocks;
    }

1022
    ASSERT(countBlocks(gen->large_objects) == gen->n_large_blocks);
1023 1024

    ret += gen->n_old_blocks +
gcampax's avatar
gcampax committed
1025
        countAllocdBlocks(gen->large_objects) +
1026 1027
        countAllocdCompactBlocks(gen->compact_objects) +
        countAllocdCompactBlocks(gen->compact_blocks_in_import);
1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062
    return ret;
}

static W_
countNonMovingSegments(struct NonmovingSegment *segs)
{
    W_ ret = 0;
    while (segs) {
        ret += countBlocks(Bdescr((P_)segs));
        segs = segs->link;
    }
    return ret;
}

static W_
countNonMovingAllocator(struct NonmovingAllocator *alloc)
{
    W_ ret = countNonMovingSegments(alloc->filled)
           + countNonMovingSegments(alloc->active);
    for (uint32_t i = 0; i < n_capabilities; ++i) {
        ret += countNonMovingSegments(alloc->current[i]);
    }
    return ret;
}

static W_
countNonMovingHeap(struct NonmovingHeap *heap)
{
    W_ ret = 0;
    for (int alloc_idx = 0; alloc_idx < NONMOVING_ALLOCA_CNT; alloc_idx++) {
        ret += countNonMovingAllocator(heap->allocators[alloc_idx]);
    }
    ret += countNonMovingSegments(heap->sweep_list);
    ret += countNonMovingSegments(heap->free);
    return ret;
Simon Marlow's avatar
Simon Marlow committed
1063 1064 1065
}

void
Ben Gamari's avatar
Ben Gamari committed
1066
memInventory (bool show)
Simon Marlow's avatar
Simon Marlow committed
1067
{
1068
  uint32_t g, i;
1069
  W_ gen_blocks[RtsFlags.GcFlags.generations];
1070
  W_ nursery_blocks = 0, retainer_blocks = 0,
1071 1072
      arena_blocks = 0, exec_blocks = 0, gc_free_blocks = 0,
      upd_rem_set_blocks = 0;
1073
  W_ live_blocks = 0, free_blocks = 0;
Ben Gamari's avatar
Ben Gamari committed
1074
  bool leak;
Simon Marlow's avatar
Simon Marlow committed
1075

1076 1077 1078 1079 1080 1081 1082 1083
#if defined(THREADED_RTS)
  // Can't easily do a memory inventory: We might race with the nonmoving
  // collector. In principle we could try to take nonmoving_collection_mutex
  // and do an inventory if we have it but we don't currently implement this.
  if (RtsFlags.GcFlags.useNonmoving)
    return;
#endif

Simon Marlow's avatar
Simon Marlow committed
1084 1085 1086 1087 1088
  // count the blocks we current have

  for (g = 0; g < RtsFlags.GcFlags.generations; g++) {
      gen_blocks[g] = 0;
      for (i = 0; i < n_capabilities; i++) {
1089
          gen_blocks[g] += countBlocks(capabilities[i]->mut_lists[g]);
Simon Marlow's avatar
Simon Marlow committed
1090 1091 1092
          gen_blocks[g] += countBlocks(gc_threads[i]->gens[g].part_list);
          gen_blocks[g] += countBlocks(gc_threads[i]->gens[g].scavd_list);
          gen_blocks[g] += countBlocks(gc_threads[i]->gens[g].todo_bd);
1093
      }
1094
      gen_blocks[g] += genBlocks(&generations[g]);
Simon Marlow's avatar
Simon Marlow committed
1095 1096
  }

1097
  for (i = 0; i < n_nurseries; i++) {