Scav.c 60.9 KB
Newer Older
1
2
/* -----------------------------------------------------------------------------
 *
3
 * (c) The GHC Team 1998-2008
4
5
6
 *
 * Generational garbage collector: scavenging functions
 *
7
8
 * Documentation on the architecture of the Garbage Collector can be
 * found in the online commentary:
9
 *
10
 *   http://ghc.haskell.org/trac/ghc/wiki/Commentary/Rts/Storage/GC
11
 *
12
13
 * ---------------------------------------------------------------------------*/

Simon Marlow's avatar
Simon Marlow committed
14
#include "PosixSource.h"
15
#include "Rts.h"
Simon Marlow's avatar
Simon Marlow committed
16

17
18
#include "Storage.h"
#include "GC.h"
19
#include "GCThread.h"
20
#include "GCUtils.h"
21
#include "Compact.h"
22
#include "MarkStack.h"
23
24
25
26
#include "Evac.h"
#include "Scav.h"
#include "Apply.h"
#include "Trace.h"
27
#include "Sanity.h"
28
#include "Capability.h"
Simon Marlow's avatar
Simon Marlow committed
29
#include "LdvProfile.h"
30
31
32

static void scavenge_stack (StgPtr p, StgPtr stack_end);

33
34
35
static void scavenge_large_bitmap (StgPtr p,
                                   StgLargeBitmap *large_bitmap,
                                   StgWord size );
36

37
38
39
#if defined(THREADED_RTS) && !defined(PARALLEL_GC)
# define evacuate(a) evacuate1(a)
# define scavenge_loop(a) scavenge_loop1(a)
Simon Marlow's avatar
Simon Marlow committed
40
# define scavenge_block(a) scavenge_block1(a)
41
42
# define scavenge_mutable_list(bd,g) scavenge_mutable_list1(bd,g)
# define scavenge_capability_mut_lists(cap) scavenge_capability_mut_Lists1(cap)
43
44
45
46
47
48
49
50
51
#endif

/* -----------------------------------------------------------------------------
   Scavenge a TSO.
   -------------------------------------------------------------------------- */

static void
scavengeTSO (StgTSO *tso)
{
52
53
    rtsBool saved_eager;

Simon Marlow's avatar
Simon Marlow committed
54
    debugTrace(DEBUG_gc,"scavenging thread %d",(int)tso->id);
55

Simon Marlow's avatar
Simon Marlow committed
56
    // update the pointer from the InCall.
57
    if (tso->bound != NULL) {
Simon Marlow's avatar
Simon Marlow committed
58
59
60
61
62
63
        // NB. We can't just set tso->bound->tso = tso, because this
        // might be an invalid copy the TSO resulting from multiple
        // threads evacuating the TSO simultaneously (see
        // Evac.c:copy_tag()).  Calling evacuate() on this pointer
        // will ensure that we update it to point to the correct copy.
        evacuate((StgClosure **)&tso->bound->tso);
64
65
    }

66
    saved_eager = gct->eager_promotion;
simonmar@microsoft.com's avatar
simonmar@microsoft.com committed
67
68
    gct->eager_promotion = rtsFalse;

69
70
    evacuate((StgClosure **)&tso->blocked_exceptions);
    evacuate((StgClosure **)&tso->bq);
71

72
73
74
    // scavange current transaction record
    evacuate((StgClosure **)&tso->trec);

75
    evacuate((StgClosure **)&tso->stackobj);
76
77

    evacuate((StgClosure **)&tso->_link);
78
    if (   tso->why_blocked == BlockedOnMVar
79
        || tso->why_blocked == BlockedOnMVarRead
80
81
        || tso->why_blocked == BlockedOnBlackHole
        || tso->why_blocked == BlockedOnMsgThrowTo
82
        || tso->why_blocked == NotBlocked
83
84
        ) {
        evacuate(&tso->block_info.closure);
85
    }
86
87
88
89
90
91
92
93
94
95
#ifdef THREADED_RTS
    // in the THREADED_RTS, block_info.closure must always point to a
    // valid closure, because we assume this in throwTo().  In the
    // non-threaded RTS it might be a FD (for
    // BlockedOnRead/BlockedOnWrite) or a time value (BlockedOnDelay)
    else {
        tso->block_info.closure = (StgClosure *)END_TSO_QUEUE;
    }
#endif

96
    tso->dirty = gct->failed_to_evac;
simonmar@microsoft.com's avatar
simonmar@microsoft.com committed
97
98

    gct->eager_promotion = saved_eager;
99
100
}

101
102
103
104
105
106
/* -----------------------------------------------------------------------------
   Mutable arrays of pointers
   -------------------------------------------------------------------------- */

static StgPtr scavenge_mut_arr_ptrs (StgMutArrPtrs *a)
{
107
    W_ m;
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
    rtsBool any_failed;
    StgPtr p, q;

    any_failed = rtsFalse;
    p = (StgPtr)&a->payload[0];
    for (m = 0; (int)m < (int)mutArrPtrsCards(a->ptrs) - 1; m++)
    {
        q = p + (1 << MUT_ARR_PTRS_CARD_BITS);
        for (; p < q; p++) {
            evacuate((StgClosure**)p);
        }
        if (gct->failed_to_evac) {
            any_failed = rtsTrue;
            *mutArrPtrsCard(a,m) = 1;
            gct->failed_to_evac = rtsFalse;
        } else {
            *mutArrPtrsCard(a,m) = 0;
        }
    }

    q = (StgPtr)&a->payload[a->ptrs];
    if (p < q) {
        for (; p < q; p++) {
            evacuate((StgClosure**)p);
        }
        if (gct->failed_to_evac) {
            any_failed = rtsTrue;
            *mutArrPtrsCard(a,m) = 1;
            gct->failed_to_evac = rtsFalse;
        } else {
            *mutArrPtrsCard(a,m) = 0;
        }
    }

    gct->failed_to_evac = any_failed;
    return (StgPtr)a + mut_arr_ptrs_sizeW(a);
}
145

146
147
148
// scavenge only the marked areas of a MUT_ARR_PTRS
static StgPtr scavenge_mut_arr_ptrs_marked (StgMutArrPtrs *a)
{
149
    W_ m;
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
    StgPtr p, q;
    rtsBool any_failed;

    any_failed = rtsFalse;
    for (m = 0; m < mutArrPtrsCards(a->ptrs); m++)
    {
        if (*mutArrPtrsCard(a,m) != 0) {
            p = (StgPtr)&a->payload[m << MUT_ARR_PTRS_CARD_BITS];
            q = stg_min(p + (1 << MUT_ARR_PTRS_CARD_BITS),
                        (StgPtr)&a->payload[a->ptrs]);
            for (; p < q; p++) {
                evacuate((StgClosure**)p);
            }
            if (gct->failed_to_evac) {
                any_failed = rtsTrue;
                gct->failed_to_evac = rtsFalse;
            } else {
                *mutArrPtrsCard(a,m) = 0;
            }
        }
    }

    gct->failed_to_evac = any_failed;
    return (StgPtr)a + mut_arr_ptrs_sizeW(a);
}

176
177
178
179
180
181
182
183
184
185
186
187
188
189
STATIC_INLINE StgPtr
scavenge_small_bitmap (StgPtr p, StgWord size, StgWord bitmap)
{
    while (size > 0) {
        if ((bitmap & 1) == 0) {
            evacuate((StgClosure **)p);
        }
        p++;
        bitmap = bitmap >> 1;
        size--;
    }
    return p;
}

190
191
192
193
194
195
196
197
198
199
/* -----------------------------------------------------------------------------
   Blocks of function args occur on the stack (at the top) and
   in PAPs.
   -------------------------------------------------------------------------- */

STATIC_INLINE StgPtr
scavenge_arg_block (StgFunInfoTable *fun_info, StgClosure **args)
{
    StgPtr p;
    StgWord bitmap;
200
    StgWord size;
201
202
203
204

    p = (StgPtr)args;
    switch (fun_info->f.fun_type) {
    case ARG_GEN:
205
206
207
        bitmap = BITMAP_BITS(fun_info->f.b.bitmap);
        size = BITMAP_SIZE(fun_info->f.b.bitmap);
        goto small_bitmap;
208
    case ARG_GEN_BIG:
209
210
211
212
        size = GET_FUN_LARGE_BITMAP(fun_info)->size;
        scavenge_large_bitmap(p, GET_FUN_LARGE_BITMAP(fun_info), size);
        p += size;
        break;
213
    default:
214
215
        bitmap = BITMAP_BITS(stg_arg_bitmaps[fun_info->f.fun_type]);
        size = BITMAP_SIZE(stg_arg_bitmaps[fun_info->f.fun_type]);
216
    small_bitmap:
217
        p = scavenge_small_bitmap(p, size, bitmap);
218
        break;
219
220
221
222
    }
    return p;
}

Simon Marlow's avatar
Simon Marlow committed
223
STATIC_INLINE GNUC_ATTR_HOT StgPtr
224
225
226
227
228
scavenge_PAP_payload (StgClosure *fun, StgClosure **payload, StgWord size)
{
    StgPtr p;
    StgWord bitmap;
    StgFunInfoTable *fun_info;
229

Simon Marlow's avatar
Simon Marlow committed
230
    fun_info = get_fun_itbl(UNTAG_CLOSURE(fun));
231
232
233
234
235
    ASSERT(fun_info->i.type != PAP);
    p = (StgPtr)payload;

    switch (fun_info->f.fun_type) {
    case ARG_GEN:
236
237
        bitmap = BITMAP_BITS(fun_info->f.b.bitmap);
        goto small_bitmap;
238
    case ARG_GEN_BIG:
239
240
241
        scavenge_large_bitmap(p, GET_FUN_LARGE_BITMAP(fun_info), size);
        p += size;
        break;
242
    case ARG_BCO:
243
244
245
        scavenge_large_bitmap((StgPtr)payload, BCO_BITMAP(fun), size);
        p += size;
        break;
246
    default:
247
        bitmap = BITMAP_BITS(stg_arg_bitmaps[fun_info->f.fun_type]);
248
    small_bitmap:
249
        p = scavenge_small_bitmap(p, size, bitmap);
250
        break;
251
252
253
254
    }
    return p;
}

Simon Marlow's avatar
Simon Marlow committed
255
STATIC_INLINE GNUC_ATTR_HOT StgPtr
256
257
scavenge_PAP (StgPAP *pap)
{
258
    evacuate(&pap->fun);
259
260
261
262
263
264
    return scavenge_PAP_payload (pap->fun, pap->payload, pap->n_args);
}

STATIC_INLINE StgPtr
scavenge_AP (StgAP *ap)
{
265
    evacuate(&ap->fun);
266
267
268
    return scavenge_PAP_payload (ap->fun, ap->payload, ap->n_args);
}

269
270
271
272
273
274
275
276
277
278
/* -----------------------------------------------------------------------------
   Scavenge SRTs
   -------------------------------------------------------------------------- */

/* Similar to scavenge_large_bitmap(), but we don't write back the
 * pointers we get back from evacuate().
 */
static void
scavenge_large_srt_bitmap( StgLargeSRT *large_srt )
{
279
    nat i, j, size;
280
281
    StgWord bitmap;
    StgClosure **p;
282

283
284
    size   = (nat)large_srt->l.size;
    p      = (StgClosure **)large_srt->srt;
285
286
287

    for (i = 0; i < size / BITS_IN(W_); i++) {
        bitmap = large_srt->l.bitmap[i];
Simon Marlow's avatar
Simon Marlow committed
288
289
        // skip zero words: bitmaps can be very sparse, and this helps
        // performance a lot in some cases.
290
291
292
293
294
295
296
297
        if (bitmap != 0) {
            for (j = 0; j < BITS_IN(W_); j++) {
                if ((bitmap & 1) != 0) {
                    evacuate(p);
                }
                p++;
                bitmap = bitmap >> 1;
            }
298
        } else {
299
300
301
302
303
304
305
306
307
308
            p += BITS_IN(W_);
        }
    }
    if (size % BITS_IN(W_) != 0) {
        bitmap = large_srt->l.bitmap[i];
        for (j = 0; j < size % BITS_IN(W_); j++) {
            if ((bitmap & 1) != 0) {
                evacuate(p);
            }
            p++;
309
310
            bitmap = bitmap >> 1;
        }
311
312
313
314
315
316
317
    }
}

/* evacuate the SRT.  If srt_bitmap is zero, then there isn't an
 * srt field in the info table.  That's ok, because we'll
 * never dereference it.
 */
Simon Marlow's avatar
Simon Marlow committed
318
STATIC_INLINE GNUC_ATTR_HOT void
319
320
321
322
323
324
325
326
scavenge_srt (StgClosure **srt, nat srt_bitmap)
{
  nat bitmap;
  StgClosure **p;

  bitmap = srt_bitmap;
  p = srt;

327
  if (bitmap == (StgHalfWord)(-1)) {
328
329
330
331
332
333
      scavenge_large_srt_bitmap( (StgLargeSRT *)srt );
      return;
  }

  while (bitmap != 0) {
      if ((bitmap & 1) != 0) {
334
#if defined(COMPILING_WINDOWS_DLL)
335
336
337
338
339
340
341
342
343
344
345
346
347
          // Special-case to handle references to closures hiding out in DLLs, since
          // double indirections required to get at those. The code generator knows
          // which is which when generating the SRT, so it stores the (indirect)
          // reference to the DLL closure in the table by first adding one to it.
          // We check for this here, and undo the addition before evacuating it.
          //
          // If the SRT entry hasn't got bit 0 set, the SRT entry points to a
          // closure that's fixed at link-time, and no extra magic is required.
          if ( (W_)(*srt) & 0x1 ) {
              evacuate( (StgClosure**) ((W_) (*srt) & ~0x1));
          } else {
              evacuate(p);
          }
348
#else
349
          evacuate(p);
350
351
352
353
354
355
356
357
#endif
      }
      p++;
      bitmap = bitmap >> 1;
  }
}


Simon Marlow's avatar
Simon Marlow committed
358
STATIC_INLINE GNUC_ATTR_HOT void
359
360
361
scavenge_thunk_srt(const StgInfoTable *info)
{
    StgThunkInfoTable *thunk_info;
362
    nat bitmap;
363
364
365
366

    if (!major_gc) return;

    thunk_info = itbl_to_thunk_itbl(info);
367
368
369
370
371
372
    bitmap = thunk_info->i.srt_bitmap;
    if (bitmap) {
        // don't read srt_offset if bitmap==0, because it doesn't exist
        // and so the memory might not be readable.
        scavenge_srt((StgClosure **)GET_SRT(thunk_info), bitmap);
    }
373
374
}

Simon Marlow's avatar
Simon Marlow committed
375
STATIC_INLINE GNUC_ATTR_HOT void
376
377
378
scavenge_fun_srt(const StgInfoTable *info)
{
    StgFunInfoTable *fun_info;
379
    nat bitmap;
380
381

    if (!major_gc) return;
382

383
    fun_info = itbl_to_fun_itbl(info);
384
385
386
387
388
389
    bitmap = fun_info->i.srt_bitmap;
    if (bitmap) {
        // don't read srt_offset if bitmap==0, because it doesn't exist
        // and so the memory might not be readable.
        scavenge_srt((StgClosure **)GET_FUN_SRT(fun_info), bitmap);
    }
390
391
392
393
394
}

/* -----------------------------------------------------------------------------
   Scavenge a block from the given scan pointer up to bd->free.

Simon Marlow's avatar
Simon Marlow committed
395
   evac_gen_no is set by the caller to be either zero (for a step in a
396
   generation < N) or G where G is the generation of the step being
397
   scavenged.
398

Simon Marlow's avatar
Simon Marlow committed
399
   We sometimes temporarily change evac_gen_no back to zero if we're
400
   scavenging a mutable object where eager promotion isn't such a good
401
   idea.
402
403
   -------------------------------------------------------------------------- */

Simon Marlow's avatar
Simon Marlow committed
404
static GNUC_ATTR_HOT void
405
406
407
408
409
scavenge_block (bdescr *bd)
{
  StgPtr p, q;
  StgInfoTable *info;
  rtsBool saved_eager_promotion;
Simon Marlow's avatar
Simon Marlow committed
410
  gen_workspace *ws;
411

Simon Marlow's avatar
Simon Marlow committed
412
  debugTrace(DEBUG_gc, "scavenging block %p (gen %d) @ %p",
413
             bd->start, bd->gen_no, bd->u.scan);
414
415

  gct->scan_bd = bd;
Simon Marlow's avatar
Simon Marlow committed
416
  gct->evac_gen_no = bd->gen_no;
417
418
419
  saved_eager_promotion = gct->eager_promotion;
  gct->failed_to_evac = rtsFalse;

Simon Marlow's avatar
Simon Marlow committed
420
  ws = &gct->gens[bd->gen->no];
421
422

  p = bd->u.scan;
423

424
425
426
427
428
  // we might be evacuating into the very object that we're
  // scavenging, so we have to check the real bd->free pointer each
  // time around the loop.
  while (p < bd->free || (bd == ws->todo_bd && p < ws->todo_free)) {

429
      ASSERT(bd->link == NULL);
430
431
    ASSERT(LOOKS_LIKE_CLOSURE_PTR(p));
    info = get_itbl((StgClosure *)p);
432

433
434
435
436
437
438
439
    ASSERT(gct->thunk_selector_depth == 0);

    q = p;
    switch (info->type) {

    case MVAR_CLEAN:
    case MVAR_DIRTY:
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
    {
        StgMVar *mvar = ((StgMVar *)p);
        gct->eager_promotion = rtsFalse;
        evacuate((StgClosure **)&mvar->head);
        evacuate((StgClosure **)&mvar->tail);
        evacuate((StgClosure **)&mvar->value);
        gct->eager_promotion = saved_eager_promotion;

        if (gct->failed_to_evac) {
            mvar->header.info = &stg_MVAR_DIRTY_info;
        } else {
            mvar->header.info = &stg_MVAR_CLEAN_info;
        }
        p += sizeofW(StgMVar);
        break;
455
456
    }

457
458
    case TVAR:
    {
459
460
        StgTVar *tvar = ((StgTVar *)p);
        gct->eager_promotion = rtsFalse;
461
462
        evacuate((StgClosure **)&tvar->current_value);
        evacuate((StgClosure **)&tvar->first_watch_queue_entry);
463
464
465
466
467
468
469
470
471
        gct->eager_promotion = saved_eager_promotion;

        if (gct->failed_to_evac) {
            tvar->header.info = &stg_TVAR_DIRTY_info;
        } else {
            tvar->header.info = &stg_TVAR_CLEAN_info;
        }
        p += sizeofW(StgTVar);
        break;
472
473
    }

474
    case FUN_2_0:
475
476
477
478
479
        scavenge_fun_srt(info);
        evacuate(&((StgClosure *)p)->payload[1]);
        evacuate(&((StgClosure *)p)->payload[0]);
        p += sizeofW(StgHeader) + 2;
        break;
480
481

    case THUNK_2_0:
482
483
484
485
486
        scavenge_thunk_srt(info);
        evacuate(&((StgThunk *)p)->payload[1]);
        evacuate(&((StgThunk *)p)->payload[0]);
        p += sizeofW(StgThunk) + 2;
        break;
487
488

    case CONSTR_2_0:
489
490
491
492
493
        evacuate(&((StgClosure *)p)->payload[1]);
        evacuate(&((StgClosure *)p)->payload[0]);
        p += sizeofW(StgHeader) + 2;
        break;

494
    case THUNK_1_0:
495
496
497
498
499
        scavenge_thunk_srt(info);
        evacuate(&((StgThunk *)p)->payload[0]);
        p += sizeofW(StgThunk) + 1;
        break;

500
    case FUN_1_0:
501
        scavenge_fun_srt(info);
502
    case CONSTR_1_0:
503
504
505
506
        evacuate(&((StgClosure *)p)->payload[0]);
        p += sizeofW(StgHeader) + 1;
        break;

507
    case THUNK_0_1:
508
509
510
511
        scavenge_thunk_srt(info);
        p += sizeofW(StgThunk) + 1;
        break;

512
    case FUN_0_1:
513
        scavenge_fun_srt(info);
514
    case CONSTR_0_1:
515
516
517
        p += sizeofW(StgHeader) + 1;
        break;

518
    case THUNK_0_2:
519
520
521
522
        scavenge_thunk_srt(info);
        p += sizeofW(StgThunk) + 2;
        break;

523
    case FUN_0_2:
524
        scavenge_fun_srt(info);
525
    case CONSTR_0_2:
526
527
528
        p += sizeofW(StgHeader) + 2;
        break;

529
    case THUNK_1_1:
530
531
532
533
        scavenge_thunk_srt(info);
        evacuate(&((StgThunk *)p)->payload[0]);
        p += sizeofW(StgThunk) + 2;
        break;
534
535

    case FUN_1_1:
536
        scavenge_fun_srt(info);
537
    case CONSTR_1_1:
538
539
540
541
        evacuate(&((StgClosure *)p)->payload[0]);
        p += sizeofW(StgHeader) + 2;
        break;

542
    case FUN:
543
544
        scavenge_fun_srt(info);
        goto gen_obj;
545
546
547

    case THUNK:
    {
548
549
550
551
552
553
554
555
556
        StgPtr end;

        scavenge_thunk_srt(info);
        end = (P_)((StgThunk *)p)->payload + info->layout.payload.ptrs;
        for (p = (P_)((StgThunk *)p)->payload; p < end; p++) {
            evacuate((StgClosure **)p);
        }
        p += info->layout.payload.nptrs;
        break;
557
    }
558

559
560
561
    gen_obj:
    case CONSTR:
    case WEAK:
562
    case PRIM:
563
    {
564
565
566
567
568
569
570
571
        StgPtr end;

        end = (P_)((StgClosure *)p)->payload + info->layout.payload.ptrs;
        for (p = (P_)((StgClosure *)p)->payload; p < end; p++) {
            evacuate((StgClosure **)p);
        }
        p += info->layout.payload.nptrs;
        break;
572
573
574
    }

    case BCO: {
575
576
577
578
579
580
        StgBCO *bco = (StgBCO *)p;
        evacuate((StgClosure **)&bco->instrs);
        evacuate((StgClosure **)&bco->literals);
        evacuate((StgClosure **)&bco->ptrs);
        p += bco_sizeW(bco);
        break;
581
582
583
    }

    case IND_PERM:
584
    case BLACKHOLE:
585
586
587
        evacuate(&((StgInd *)p)->indirectee);
        p += sizeofW(StgInd);
        break;
588
589
590

    case MUT_VAR_CLEAN:
    case MUT_VAR_DIRTY:
591
592
593
594
595
596
597
598
599
600
601
        gct->eager_promotion = rtsFalse;
        evacuate(&((StgMutVar *)p)->var);
        gct->eager_promotion = saved_eager_promotion;

        if (gct->failed_to_evac) {
            ((StgClosure *)q)->header.info = &stg_MUT_VAR_DIRTY_info;
        } else {
            ((StgClosure *)q)->header.info = &stg_MUT_VAR_CLEAN_info;
        }
        p += sizeofW(StgMutVar);
        break;
602

603
604
605
    case BLOCKING_QUEUE:
    {
        StgBlockingQueue *bq = (StgBlockingQueue *)p;
606
607

        gct->eager_promotion = rtsFalse;
608
609
610
611
        evacuate(&bq->bh);
        evacuate((StgClosure**)&bq->owner);
        evacuate((StgClosure**)&bq->queue);
        evacuate((StgClosure**)&bq->link);
612
        gct->eager_promotion = saved_eager_promotion;
613

614
615
616
617
618
        if (gct->failed_to_evac) {
            bq->header.info = &stg_BLOCKING_QUEUE_DIRTY_info;
        } else {
            bq->header.info = &stg_BLOCKING_QUEUE_CLEAN_info;
        }
619
620
621
        p += sizeofW(StgBlockingQueue);
        break;
    }
622
623

    case THUNK_SELECTOR:
624
625
626
627
628
    {
        StgSelector *s = (StgSelector *)p;
        evacuate(&s->selectee);
        p += THUNK_SELECTOR_sizeW();
        break;
629
630
631
632
633
    }

    // A chunk of stack saved in a heap object
    case AP_STACK:
    {
634
        StgAP_STACK *ap = (StgAP_STACK *)p;
635

636
637
638
639
        evacuate(&ap->fun);
        scavenge_stack((StgPtr)ap->payload, (StgPtr)ap->payload + ap->size);
        p = (StgPtr)ap->payload + ap->size;
        break;
640
641
642
    }

    case PAP:
643
644
        p = scavenge_PAP((StgPAP *)p);
        break;
645
646

    case AP:
647
648
        p = scavenge_AP((StgAP *)p);
        break;
649
650

    case ARR_WORDS:
651
        // nothing to follow
siddhanathan's avatar
siddhanathan committed
652
        p += arr_words_sizeW((StgArrBytes *)p);
653
        break;
654
655
656
657

    case MUT_ARR_PTRS_CLEAN:
    case MUT_ARR_PTRS_DIRTY:
    {
658
659
660
661
662
        // We don't eagerly promote objects pointed to by a mutable
        // array, but if we find the array only points to objects in
        // the same or an older generation, we mark it "clean" and
        // avoid traversing it during minor GCs.
        gct->eager_promotion = rtsFalse;
663

664
        p = scavenge_mut_arr_ptrs((StgMutArrPtrs*)p);
665

666
667
668
669
670
        if (gct->failed_to_evac) {
            ((StgClosure *)q)->header.info = &stg_MUT_ARR_PTRS_DIRTY_info;
        } else {
            ((StgClosure *)q)->header.info = &stg_MUT_ARR_PTRS_CLEAN_info;
        }
671

672
673
674
        gct->eager_promotion = saved_eager_promotion;
        gct->failed_to_evac = rtsTrue; // always put it on the mutable list.
        break;
675
676
677
678
    }

    case MUT_ARR_PTRS_FROZEN:
    case MUT_ARR_PTRS_FROZEN0:
679
        // follow everything
680
    {
681
        p = scavenge_mut_arr_ptrs((StgMutArrPtrs*)p);
682

683
684
685
        // If we're going to put this object on the mutable list, then
        // set its info ptr to MUT_ARR_PTRS_FROZEN0 to indicate that.
        if (gct->failed_to_evac) {
686
            ((StgClosure *)q)->header.info = &stg_MUT_ARR_PTRS_FROZEN0_info;
687
688
689
690
        } else {
            ((StgClosure *)q)->header.info = &stg_MUT_ARR_PTRS_FROZEN_info;
        }
        break;
691
692
    }

693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
    case SMALL_MUT_ARR_PTRS_CLEAN:
    case SMALL_MUT_ARR_PTRS_DIRTY:
        // follow everything
    {
        StgPtr next;

        // We don't eagerly promote objects pointed to by a mutable
        // array, but if we find the array only points to objects in
        // the same or an older generation, we mark it "clean" and
        // avoid traversing it during minor GCs.
        gct->eager_promotion = rtsFalse;
        next = p + small_mut_arr_ptrs_sizeW((StgSmallMutArrPtrs*)p);
        for (p = (P_)((StgSmallMutArrPtrs *)p)->payload; p < next; p++) {
            evacuate((StgClosure **)p);
        }
        gct->eager_promotion = saved_eager_promotion;

        if (gct->failed_to_evac) {
            ((StgClosure *)q)->header.info = &stg_SMALL_MUT_ARR_PTRS_DIRTY_info;
        } else {
            ((StgClosure *)q)->header.info = &stg_SMALL_MUT_ARR_PTRS_CLEAN_info;
        }

        gct->failed_to_evac = rtsTrue; // always put it on the mutable list.
        break;
    }

    case SMALL_MUT_ARR_PTRS_FROZEN:
    case SMALL_MUT_ARR_PTRS_FROZEN0:
        // follow everything
    {
        StgPtr next;

        next = p + small_mut_arr_ptrs_sizeW((StgSmallMutArrPtrs*)p);
        for (p = (P_)((StgSmallMutArrPtrs *)p)->payload; p < next; p++) {
            evacuate((StgClosure **)p);
        }

        // If we're going to put this object on the mutable list, then
        // set its info ptr to SMALL_MUT_ARR_PTRS_FROZEN0 to indicate that.
        if (gct->failed_to_evac) {
            ((StgClosure *)q)->header.info = &stg_SMALL_MUT_ARR_PTRS_FROZEN0_info;
        } else {
            ((StgClosure *)q)->header.info = &stg_SMALL_MUT_ARR_PTRS_FROZEN_info;
        }
        break;
    }

741
    case TSO:
742
    {
743
744
        scavengeTSO((StgTSO *)p);
        p += sizeofW(StgTSO);
745
        break;
746
747
    }

748
749
750
751
752
753
754
755
756
757
758
759
760
761
    case STACK:
    {
        StgStack *stack = (StgStack*)p;

        gct->eager_promotion = rtsFalse;

        scavenge_stack(stack->sp, stack->stack + stack->stack_size);
        stack->dirty = gct->failed_to_evac;
        p += stack_sizeW(stack);

        gct->eager_promotion = saved_eager_promotion;
        break;
    }

762
    case MUT_PRIM:
763
      {
764
        StgPtr end;
765

766
        gct->eager_promotion = rtsFalse;
767

768
769
770
771
772
        end = (P_)((StgClosure *)p)->payload + info->layout.payload.ptrs;
        for (p = (P_)((StgClosure *)p)->payload; p < end; p++) {
            evacuate((StgClosure **)p);
        }
        p += info->layout.payload.nptrs;
773

774
775
776
        gct->eager_promotion = saved_eager_promotion;
        gct->failed_to_evac = rtsTrue; // mutable
        break;
777
778
779
780
      }

    case TREC_CHUNK:
      {
781
782
783
784
785
786
787
788
789
790
791
792
793
794
        StgWord i;
        StgTRecChunk *tc = ((StgTRecChunk *) p);
        TRecEntry *e = &(tc -> entries[0]);
        gct->eager_promotion = rtsFalse;
        evacuate((StgClosure **)&tc->prev_chunk);
        for (i = 0; i < tc -> next_entry_idx; i ++, e++ ) {
          evacuate((StgClosure **)&e->tvar);
          evacuate((StgClosure **)&e->expected_value);
          evacuate((StgClosure **)&e->new_value);
        }
        gct->eager_promotion = saved_eager_promotion;
        gct->failed_to_evac = rtsTrue; // mutable
        p += sizeofW(StgTRecChunk);
        break;
795
796
797
      }

    default:
798
799
        barf("scavenge: unimplemented/strange closure type %d @ %p",
             info->type, p);
800
801
802
803
    }

    /*
     * We need to record the current object on the mutable list if
804
     *  (a) It is actually mutable, or
805
806
807
808
809
     *  (b) It contains pointers to a younger generation.
     * Case (b) arises if we didn't manage to promote everything that
     * the current object points to into the current generation.
     */
    if (gct->failed_to_evac) {
810
811
812
813
        gct->failed_to_evac = rtsFalse;
        if (bd->gen_no > 0) {
            recordMutableGen_GC((StgClosure *)q, bd->gen_no);
        }
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
    }
  }

  if (p > bd->free)  {
      gct->copied += ws->todo_free - bd->free;
      bd->free = p;
  }

  debugTrace(DEBUG_gc, "   scavenged %ld bytes",
             (unsigned long)((bd->free - bd->u.scan) * sizeof(W_)));

  // update stats: this is a block that has been scavenged
  gct->scanned += bd->free - bd->u.scan;
  bd->u.scan = bd->free;

  if (bd != ws->todo_bd) {
      // we're not going to evac any more objects into
      // this block, so push it now.
      push_scanned_block(bd, ws);
  }

  gct->scan_bd = NULL;
}
837
838
839
840
841
842
843
844
/* -----------------------------------------------------------------------------
   Scavenge everything on the mark stack.

   This is slightly different from scavenge():
      - we don't walk linearly through the objects, so the scavenger
        doesn't need to advance the pointer on to the next object.
   -------------------------------------------------------------------------- */

845
static void
846
847
848
849
scavenge_mark_stack(void)
{
    StgPtr p, q;
    StgInfoTable *info;
850
    rtsBool saved_eager_promotion;
851

Simon Marlow's avatar
Simon Marlow committed
852
    gct->evac_gen_no = oldest_gen->no;
853
    saved_eager_promotion = gct->eager_promotion;
854

855
    while ((p = pop_mark_stack())) {
856

857
858
859
860
        ASSERT(LOOKS_LIKE_CLOSURE_PTR(p));
        info = get_itbl((StgClosure *)p);

        q = p;
861
        switch (info->type) {
862

863
864
        case MVAR_CLEAN:
        case MVAR_DIRTY:
865
        {
866
            StgMVar *mvar = ((StgMVar *)p);
867
            gct->eager_promotion = rtsFalse;
868
869
870
            evacuate((StgClosure **)&mvar->head);
            evacuate((StgClosure **)&mvar->tail);
            evacuate((StgClosure **)&mvar->value);
871
            gct->eager_promotion = saved_eager_promotion;
872

873
            if (gct->failed_to_evac) {
874
875
876
877
878
879
                mvar->header.info = &stg_MVAR_DIRTY_info;
            } else {
                mvar->header.info = &stg_MVAR_CLEAN_info;
            }
            break;
        }
880

881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
        case TVAR:
        {
            StgTVar *tvar = ((StgTVar *)p);
            gct->eager_promotion = rtsFalse;
            evacuate((StgClosure **)&tvar->current_value);
            evacuate((StgClosure **)&tvar->first_watch_queue_entry);
            gct->eager_promotion = saved_eager_promotion;

            if (gct->failed_to_evac) {
                tvar->header.info = &stg_TVAR_DIRTY_info;
            } else {
                tvar->header.info = &stg_TVAR_CLEAN_info;
            }
            break;
        }

897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
        case FUN_2_0:
            scavenge_fun_srt(info);
            evacuate(&((StgClosure *)p)->payload[1]);
            evacuate(&((StgClosure *)p)->payload[0]);
            break;

        case THUNK_2_0:
            scavenge_thunk_srt(info);
            evacuate(&((StgThunk *)p)->payload[1]);
            evacuate(&((StgThunk *)p)->payload[0]);
            break;

        case CONSTR_2_0:
            evacuate(&((StgClosure *)p)->payload[1]);
            evacuate(&((StgClosure *)p)->payload[0]);
            break;

        case FUN_1_0:
        case FUN_1_1:
            scavenge_fun_srt(info);
            evacuate(&((StgClosure *)p)->payload[0]);
            break;

        case THUNK_1_0:
        case THUNK_1_1:
            scavenge_thunk_srt(info);
            evacuate(&((StgThunk *)p)->payload[0]);
            break;

        case CONSTR_1_0:
        case CONSTR_1_1:
            evacuate(&((StgClosure *)p)->payload[0]);
            break;

        case FUN_0_1:
        case FUN_0_2:
            scavenge_fun_srt(info);
            break;

        case THUNK_0_1:
        case THUNK_0_2:
            scavenge_thunk_srt(info);
            break;

        case CONSTR_0_1:
        case CONSTR_0_2:
            break;

        case FUN:
            scavenge_fun_srt(info);
            goto gen_obj;

        case THUNK:
        {
            StgPtr end;

            scavenge_thunk_srt(info);
            end = (P_)((StgThunk *)p)->payload + info->layout.payload.ptrs;
            for (p = (P_)((StgThunk *)p)->payload; p < end; p++) {
                evacuate((StgClosure **)p);
            }
            break;
        }

        gen_obj:
        case CONSTR:
        case WEAK:
        case PRIM:
        {
            StgPtr end;

            end = (P_)((StgClosure *)p)->payload + info->layout.payload.ptrs;
            for (p = (P_)((StgClosure *)p)->payload; p < end; p++) {
                evacuate((StgClosure **)p);
            }
            break;
        }

        case BCO: {
            StgBCO *bco = (StgBCO *)p;
            evacuate((StgClosure **)&bco->instrs);
            evacuate((StgClosure **)&bco->literals);
            evacuate((StgClosure **)&bco->ptrs);
            break;
        }

        case IND_PERM:
            // don't need to do anything here: the only possible case
            // is that we're in a 1-space compacting collector, with
            // no "old" generation.
            break;

        case IND:
990
        case BLACKHOLE:
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
            evacuate(&((StgInd *)p)->indirectee);
            break;

        case MUT_VAR_CLEAN:
        case MUT_VAR_DIRTY: {
            gct->eager_promotion = rtsFalse;
            evacuate(&((StgMutVar *)p)->var);
            gct->eager_promotion = saved_eager_promotion;

            if (gct->failed_to_evac) {
                ((StgClosure *)q)->header.info = &stg_MUT_VAR_DIRTY_info;
            } else {
                ((StgClosure *)q)->header.info = &stg_MUT_VAR_CLEAN_info;
            }
            break;
        }
1007

1008
1009
1010
        case BLOCKING_QUEUE:
        {
            StgBlockingQueue *bq = (StgBlockingQueue *)p;
1011

1012
1013
1014
1015
1016
1017
            gct->eager_promotion = rtsFalse;
            evacuate(&bq->bh);
            evacuate((StgClosure**)&bq->owner);
            evacuate((StgClosure**)&bq->queue);
            evacuate((StgClosure**)&bq->link);
            gct->eager_promotion = saved_eager_promotion;
1018

1019
1020
1021
1022
1023
1024
1025
1026
            if (gct->failed_to_evac) {
                bq->header.info = &stg_BLOCKING_QUEUE_DIRTY_info;
            } else {
                bq->header.info = &stg_BLOCKING_QUEUE_CLEAN_info;
            }
            break;
        }

1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
        case ARR_WORDS:
            break;

        case THUNK_SELECTOR:
        {
            StgSelector *s = (StgSelector *)p;
            evacuate(&s->selectee);
            break;
        }

        // A chunk of stack saved in a heap object
        case AP_STACK:
        {
            StgAP_STACK *ap = (StgAP_STACK *)p;

            evacuate(&ap->fun);
            scavenge_stack((StgPtr)ap->payload, (StgPtr)ap->payload + ap->size);
            break;
        }

        case PAP:
            scavenge_PAP((StgPAP *)p);
            break;

        case AP:
            scavenge_AP((StgAP *)p);
            break;

        case MUT_ARR_PTRS_CLEAN:
        case MUT_ARR_PTRS_DIRTY:
            // follow everything
        {
            // We don't eagerly promote objects pointed to by a mutable
            // array, but if we find the array only points to objects in
            // the same or an older generation, we mark it "clean" and
            // avoid traversing it during minor GCs.
            gct->eager_promotion = rtsFalse;
1064

1065
1066
1067
1068
1069
1070
1071
            scavenge_mut_arr_ptrs((StgMutArrPtrs *)p);

            if (gct->failed_to_evac) {
                ((StgClosure *)q)->header.info = &stg_MUT_ARR_PTRS_DIRTY_info;
            } else {
                ((StgClosure *)q)->header.info = &stg_MUT_ARR_PTRS_CLEAN_info;
            }
1072

1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
            gct->eager_promotion = saved_eager_promotion;
            gct->failed_to_evac = rtsTrue; // mutable anyhow.
            break;
        }

        case MUT_ARR_PTRS_FROZEN:
        case MUT_ARR_PTRS_FROZEN0:
            // follow everything
        {
            StgPtr q = p;

1084
            scavenge_mut_arr_ptrs((StgMutArrPtrs *)p);
1085

1086
1087
1088
1089
1090
1091
1092
1093
1094
            // If we're going to put this object on the mutable list, then
            // set its info ptr to MUT_ARR_PTRS_FROZEN0 to indicate that.
            if (gct->failed_to_evac) {
                ((StgClosure *)q)->header.info = &stg_MUT_ARR_PTRS_FROZEN0_info;
            } else {
                ((StgClosure *)q)->header.info = &stg_MUT_ARR_PTRS_FROZEN_info;
            }
            break;
        }
1095

1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
        case SMALL_MUT_ARR_PTRS_CLEAN:
        case SMALL_MUT_ARR_PTRS_DIRTY:
            // follow everything
        {
            StgPtr next;
            rtsBool saved_eager;

            // We don't eagerly promote objects pointed to by a mutable
            // array, but if we find the array only points to objects in
            // the same or an older generation, we mark it "clean" and
            // avoid traversing it during minor GCs.
            saved_eager = gct->eager_promotion;
            gct->eager_promotion = rtsFalse;
            next = p + small_mut_arr_ptrs_sizeW((StgSmallMutArrPtrs*)p);
            for (p = (P_)((StgSmallMutArrPtrs *)p)->payload; p < next; p++) {
                evacuate((StgClosure **)p);
            }
            gct->eager_promotion = saved_eager;

            if (gct->failed_to_evac) {
                ((StgClosure *)q)->header.info = &stg_SMALL_MUT_ARR_PTRS_DIRTY_info;
            } else {
                ((StgClosure *)q)->header.info = &stg_SMALL_MUT_ARR_PTRS_CLEAN_info;
            }

            gct->failed_to_evac = rtsTrue; // mutable anyhow.
            break;
        }

        case SMALL_MUT_ARR_PTRS_FROZEN:
        case SMALL_MUT_ARR_PTRS_FROZEN0:
            // follow everything
        {
            StgPtr next, q = p;
1130

1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
            next = p + small_mut_arr_ptrs_sizeW((StgSmallMutArrPtrs*)p);
            for (p = (P_)((StgSmallMutArrPtrs *)p)->payload; p < next; p++) {
                evacuate((StgClosure **)p);
            }

            // If we're going to put this object on the mutable list, then
            // set its info ptr to SMALL_MUT_ARR_PTRS_FROZEN0 to indicate that.
            if (gct->failed_to_evac) {
                ((StgClosure *)q)->header.info = &stg_SMALL_MUT_ARR_PTRS_FROZEN0_info;
            } else {
                ((StgClosure *)q)->header.info = &stg_SMALL_MUT_ARR_PTRS_FROZEN_info;
            }
            break;
        }

1146
1147
        case TSO:
        {
simonmar@microsoft.com's avatar
simonmar@microsoft.com committed
1148
            scavengeTSO((StgTSO*)p);
1149
1150
            break;
        }
1151

1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
        case STACK:
        {
            StgStack *stack = (StgStack*)p;

            gct->eager_promotion = rtsFalse;

            scavenge_stack(stack->sp, stack->stack + stack->stack_size);
            stack->dirty = gct->failed_to_evac;

            gct->eager_promotion = saved_eager_promotion;
            break;
        }

1165
1166
1167
        case MUT_PRIM:
        {
            StgPtr end;
1168

1169
            gct->eager_promotion = rtsFalse;
1170

1171
1172
1173
1174
            end = (P_)((StgClosure *)p)->payload + info->layout.payload.ptrs;
            for (p = (P_)((StgClosure *)p)->payload; p < end; p++) {
                evacuate((StgClosure **)p);
            }
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192

            gct->eager_promotion = saved_eager_promotion;
            gct->failed_to_evac = rtsTrue; // mutable
            break;
        }

        case TREC_CHUNK:
          {
            StgWord i;
            StgTRecChunk *tc = ((StgTRecChunk *) p);
            TRecEntry *e = &(tc -> entries[0]);
            gct->eager_promotion = rtsFalse;
            evacuate((StgClosure **)&tc->prev_chunk);
            for (i = 0; i < tc -> next_entry_idx; i ++, e++ ) {
              evacuate((StgClosure **)&e->tvar);
              evacuate((StgClosure **)&e->expected_value);
              evacuate((StgClosure **)&e->new_value);
            }
1193
1194
1195
            gct->eager_promotion = saved_eager_promotion;
            gct->failed_to_evac = rtsTrue; // mutable
            break;
1196
1197
1198
1199
1200
          }

        default:
            barf("scavenge_mark_stack: unimplemented/strange closure type %d @ %p",
                 info->type, p);
1201
1202
        }

1203
1204
        if (gct->failed_to_evac) {
            gct->failed_to_evac = rtsFalse;
Simon Marlow's avatar
Simon Marlow committed
1205
1206
            if (gct->evac_gen_no) {
                recordMutableGen_GC((StgClosure *)q, gct->evac_gen_no);
1207
1208
            }
        }
1209
    } // while (p = pop_mark_stack())
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
}

/* -----------------------------------------------------------------------------
   Scavenge one object.

   This is used for objects that are temporarily marked as mutable
   because they contain old-to-new generation pointers.  Only certain
   objects can have this property.
   -------------------------------------------------------------------------- */

static rtsBool
scavenge_one(StgPtr p)
{
    const StgInfoTable *info;
    rtsBool no_luck;
1225
    rtsBool saved_eager_promotion;
1226

1227
1228
    saved_eager_promotion = gct->eager_promotion;

1229
1230
    ASSERT(LOOKS_LIKE_CLOSURE_PTR(p));
    info = get_itbl((StgClosure *)p);
1231

1232
    switch (info->type) {
1233

1234
1235
    case MVAR_CLEAN:
    case MVAR_DIRTY:
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
    {
        StgMVar *mvar = ((StgMVar *)p);
        gct->eager_promotion = rtsFalse;
        evacuate((StgClosure **)&mvar->head);
        evacuate((StgClosure **)&mvar->tail);
        evacuate((StgClosure **)&mvar->value);
        gct->eager_promotion = saved_eager_promotion;

        if (gct->failed_to_evac) {
            mvar->header.info = &stg_MVAR_DIRTY_info;
        } else {
            mvar->header.info = &stg_MVAR_CLEAN_info;
        }
        break;
1250
1251
    }

1252
1253
    case TVAR:
    {
1254
1255
        StgTVar *tvar = ((StgTVar *)p);
        gct->eager_promotion = rtsFalse;
1256
1257
        evacuate((StgClosure **)&tvar->current_value);
        evacuate((StgClosure **)&tvar->first_watch_queue_entry);
1258
        gct->eager_promotion = saved_eager_promotion;
1259

1260
1261
1262
1263
1264
        if (gct->failed_to_evac) {
            tvar->header.info = &stg_TVAR_DIRTY_info;
        } else {
            tvar->header.info = &stg_TVAR_CLEAN_info;
        }
1265
1266
1267
        break;
    }

1268
1269
1270
1271
1272
1273
1274
    case THUNK:
    case THUNK_1_0:
    case THUNK_0_1:
    case THUNK_1_1:
    case THUNK_0_2:
    case THUNK_2_0:
    {
1275
1276
1277
1278
1279
1280
1281
        StgPtr q, end;

        end = (StgPtr)((StgThunk *)p)->payload + info->layout.payload.ptrs;
        for (q = (StgPtr)((StgThunk *)p)->payload; q < end; q++) {
            evacuate((StgClosure **)q);
        }
        break;
1282
1283
1284
    }

    case FUN:
1285
    case FUN_1_0:                       // hardly worth specialising these guys
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
    case FUN_0_1:
    case FUN_1_1:
    case FUN_0_2:
    case FUN_2_0:
    case CONSTR:
    case CONSTR_1_0:
    case CONSTR_0_1:
    case CONSTR_1_1:
    case CONSTR_0_2:
    case CONSTR_2_0:
    case WEAK:
1297
    case PRIM:
1298
1299
    case IND_PERM:
    {
1300
1301
1302
1303
1304
1305
1306
        StgPtr q, end;

        end = (StgPtr)((StgClosure *)p)->payload + info->layout.payload.ptrs;
        for (q = (StgPtr)((StgClosure *)p)->payload; q < end; q++) {
            evacuate((StgClosure **)q);
        }
        break;
1307
    }
1308

1309
1310
    case MUT_VAR_CLEAN:
    case MUT_VAR_DIRTY: {
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
        StgPtr q = p;

        gct->eager_promotion = rtsFalse;
        evacuate(&((StgMutVar *)p)->var);
        gct->eager_promotion = saved_eager_promotion;

        if (gct->failed_to_evac) {
            ((StgClosure *)q)->header.info = &stg_MUT_VAR_DIRTY_info;
        } else {
            ((StgClosure *)q)->header.info = &stg_MUT_VAR_CLEAN_info;
        }
        break;
1323
1324
    }

1325
1326
1327
    case BLOCKING_QUEUE:
    {
        StgBlockingQueue *bq = (StgBlockingQueue *)p;
1328

1329
1330
1331
1332
1333
1334
        gct->eager_promotion = rtsFalse;
        evacuate(&bq->bh);
        evacuate((StgClosure**)&bq->owner);
        evacuate((StgClosure**)&bq->queue);
        evacuate((StgClosure**)&bq->link);
        gct->eager_promotion = saved_eager_promotion;
1335

1336
1337
1338
1339
1340
1341
1342
1343
        if (gct->failed_to_evac) {
            bq->header.info = &stg_BLOCKING_QUEUE_DIRTY_info;
        } else {
            bq->header.info = &stg_BLOCKING_QUEUE_CLEAN_info;
        }
        break;
    }

1344
    case THUNK_SELECTOR:
1345
1346
1347
1348
    {
        StgSelector *s = (StgSelector *)p;
        evacuate(&s->selectee);
        break;
1349