Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Sign in / Register
Toggle navigation
Menu
Open sidebar
Glasgow Haskell Compiler
GHC
Commits
4e79709d
Commit
4e79709d
authored
Apr 17, 2008
by
simonmarhaskell@gmail.com
Browse files
remove EVACUATED: store the forwarding pointer in the info pointer
parent
a4e09e8f
Changes
13
Hide whitespace changes
Inline
Side-by-side
includes/ClosureTypes.h
View file @
4e79709d
...
...
@@ -82,7 +82,6 @@
#define FETCH_ME 58
#define FETCH_ME_BQ 59
#define RBH 60
#define EVACUATED 61
#define REMOTE_REF 62
#define TVAR_WATCH_QUEUE 63
#define INVARIANT_CHECK_QUEUE 64
...
...
includes/Closures.h
View file @
4e79709d
...
...
@@ -173,11 +173,6 @@ typedef struct {
StgHeader
header
;
}
StgStopFrame
;
typedef
struct
{
StgHeader
header
;
StgClosure
*
evacuee
;
}
StgEvacuated
;
typedef
struct
{
StgHeader
header
;
StgWord
data
;
...
...
includes/Storage.h
View file @
4e79709d
...
...
@@ -357,7 +357,7 @@ void dirty_MUT_VAR(StgRegTable *reg, StgClosure *p);
-------------------------------------------------------------------------- */
#define LOOKS_LIKE_INFO_PTR(p) \
(p && LOOKS_LIKE_INFO_PTR_NOT_NULL(p))
(p &&
(IS_FORWARDING_PTR(p) ||
LOOKS_LIKE_INFO_PTR_NOT_NULL(p))
)
#define LOOKS_LIKE_INFO_PTR_NOT_NULL(p) \
(((StgInfoTable *)(INFO_PTR_TO_STRUCT(p)))->type != INVALID_OBJECT && \
...
...
@@ -592,4 +592,8 @@ extern StgClosure * RTS_VAR(caf_list);
extern
StgClosure
*
RTS_VAR
(
revertible_caf_list
);
extern
StgTSO
*
RTS_VAR
(
resurrected_threads
);
#define IS_FORWARDING_PTR(p) ((((StgWord)p) & 1) != 0)
#define MK_FORWARDING_PTR(p) (((StgWord)p) | 1)
#define UN_FORWARDING_PTR(p) (((StgWord)p) - 1)
#endif
/* STORAGE_H */
rts/LdvProfile.c
View file @
4e79709d
...
...
@@ -68,26 +68,27 @@ STATIC_INLINE nat
processHeapClosureForDead
(
StgClosure
*
c
)
{
nat
size
;
StgInfoTable
*
info
;
const
StgInfoTable
*
info
;
info
=
get_itbl
(
c
);
if
(
info
->
type
!=
EVACUATED
)
{
ASSERT
(((
LDVW
(
c
)
&
LDV_CREATE_MASK
)
>>
LDV_SHIFT
)
<=
era
&&
((
LDVW
(
c
)
&
LDV_CREATE_MASK
)
>>
LDV_SHIFT
)
>
0
);
ASSERT
(((
LDVW
(
c
)
&
LDV_STATE_MASK
)
==
LDV_STATE_CREATE
)
||
(
(
LDVW
(
c
)
&
LDV_LAST_MASK
)
<=
era
&&
(
LDVW
(
c
)
&
LDV_LAST_MASK
)
>
0
));
}
if
(
info
->
type
==
EVACUATED
)
{
info
=
c
->
header
.
info
;
if
(
IS_FORWARDING_PTR
(
info
))
{
// The size of the evacuated closure is currently stored in
// the LDV field. See SET_EVACUAEE_FOR_LDV() in
// includes/StgLdvProf.h.
return
LDVW
(
c
);
}
info
=
INFO_PTR_TO_STRUCT
(
info
);
ASSERT
(((
LDVW
(
c
)
&
LDV_CREATE_MASK
)
>>
LDV_SHIFT
)
<=
era
&&
((
LDVW
(
c
)
&
LDV_CREATE_MASK
)
>>
LDV_SHIFT
)
>
0
);
ASSERT
(((
LDVW
(
c
)
&
LDV_STATE_MASK
)
==
LDV_STATE_CREATE
)
||
(
(
LDVW
(
c
)
&
LDV_LAST_MASK
)
<=
era
&&
(
LDVW
(
c
)
&
LDV_LAST_MASK
)
>
0
));
size
=
closure_sizeW
(
c
);
...
...
rts/RetainerProfile.c
View file @
4e79709d
...
...
@@ -626,7 +626,6 @@ push( StgClosure *c, retainer c_child_r, StgClosure **first_child )
case
FETCH_ME_BQ
:
case
RBH
:
case
REMOTE_REF
:
case
EVACUATED
:
case
INVALID_OBJECT
:
default:
barf
(
"Invalid object *c in push()"
);
...
...
@@ -992,7 +991,6 @@ pop( StgClosure **c, StgClosure **cp, retainer *r )
case
FETCH_ME_BQ
:
case
RBH
:
case
REMOTE_REF
:
case
EVACUATED
:
case
INVALID_OBJECT
:
default:
barf
(
"Invalid object *c in pop()"
);
...
...
@@ -1157,7 +1155,6 @@ isRetainer( StgClosure *c )
case
FETCH_ME_BQ
:
case
RBH
:
case
REMOTE_REF
:
case
EVACUATED
:
case
INVALID_OBJECT
:
default:
barf
(
"Invalid object in isRetainer(): %d"
,
get_itbl
(
c
)
->
type
);
...
...
rts/Sanity.c
View file @
4e79709d
...
...
@@ -257,7 +257,13 @@ checkClosure( StgClosure* p )
ASSERT
(
!
closure_STATIC
(
p
));
}
info
=
get_itbl
(
p
);
info
=
p
->
header
.
info
;
if
(
IS_FORWARDING_PTR
(
info
))
{
barf
(
"checkClosure: found EVACUATED closure %d"
,
info
->
type
);
}
info
=
INFO_PTR_TO_STRUCT
(
info
);
switch
(
info
->
type
)
{
case
MVAR_CLEAN
:
...
...
@@ -506,10 +512,6 @@ checkClosure( StgClosure* p )
return
sizeofW
(
StgTRecHeader
);
}
case
EVACUATED
:
barf
(
"checkClosure: found EVACUATED closure %d"
,
info
->
type
);
default:
barf
(
"checkClosure (closure type %d)"
,
info
->
type
);
}
...
...
rts/StgMiscClosures.cmm
View file @
4e79709d
...
...
@@ -408,14 +408,6 @@ INFO_TABLE(stg_WHITEHOLE, 0,0, WHITEHOLE, "WHITEHOLE", "WHITEHOLE")
INFO_TABLE
(
stg_TSO
,
0
,
0
,
TSO
,
"
TSO
"
,
"
TSO
"
)
{
foreign
"
C
"
barf
(
"
TSO object entered!
"
)
never
returns
;
}
/* ----------------------------------------------------------------------------
Evacuees are left behind by the garbage collector. Any attempt to enter
one is a real bug.
------------------------------------------------------------------------- */
INFO_TABLE
(
stg_EVACUATED
,
1
,
0
,
EVACUATED
,
"
EVACUATED
"
,
"
EVACUATED
"
)
{
foreign
"
C
"
barf
(
"
EVACUATED object entered!
"
)
never
returns
;
}
/* ----------------------------------------------------------------------------
Weak pointers
...
...
rts/sm/Evac.c
View file @
4e79709d
...
...
@@ -76,11 +76,13 @@ alloc_for_copy (nat size, step *stp)
The evacuate() code
-------------------------------------------------------------------------- */
#def
ine
PARALLEL_GC
#
un
def PARALLEL_GC
#include
"Evac.c-inc"
#undef PARALLEL_GC
#ifdef THREADED_RTS
#define PARALLEL_GC
#include
"Evac.c-inc"
#endif
/* -----------------------------------------------------------------------------
Evacuate a large object
...
...
@@ -261,9 +263,10 @@ selector_chain:
}
while
(
info_ptr
==
(
W_
)
&
stg_WHITEHOLE_info
);
// make sure someone else didn't get here first...
if
(
INFO_PTR_TO_STRUCT
(
info_ptr
)
->
type
!=
THUNK_SELECTOR
)
{
if
(
IS_FORWARDING_PTR
(
p
)
||
INFO_PTR_TO_STRUCT
(
info_ptr
)
->
type
!=
THUNK_SELECTOR
)
{
// v. tricky now. The THUNK_SELECTOR has been evacuated
// by another thread, and is now either
EVACUATED
or IND.
// by another thread, and is now either
a forwarding ptr
or IND.
// We need to extract ourselves from the current situation
// as cleanly as possible.
// - unlock the closure
...
...
@@ -298,7 +301,16 @@ selector_loop:
// from-space during marking, for example. We rely on the property
// that evacuate() doesn't mind if it gets passed a to-space pointer.
info
=
get_itbl
(
selectee
);
info
=
(
StgInfoTable
*
)
selectee
->
header
.
info
;
if
(
IS_FORWARDING_PTR
(
info
))
{
// We don't follow pointers into to-space; the constructor
// has already been evacuated, so we won't save any space
// leaks by evaluating this selector thunk anyhow.
goto
bale_out
;
}
info
=
INFO_PTR_TO_STRUCT
(
info
);
switch
(
info
->
type
)
{
case
WHITEHOLE
:
goto
bale_out
;
// about to be evacuated by another thread (or a loop).
...
...
@@ -333,33 +345,38 @@ selector_loop:
// evaluating until we find the real value, and then
// update the whole chain to point to the value.
val_loop:
info
=
get_itbl
(
UNTAG_CLOSURE
(
val
));
switch
(
info
->
type
)
{
case
IND
:
case
IND_PERM
:
case
IND_OLDGEN
:
case
IND_OLDGEN_PERM
:
case
IND_STATIC
:
val
=
((
StgInd
*
)
val
)
->
indirectee
;
goto
val_loop
;
case
THUNK_SELECTOR
:
((
StgClosure
*
)
p
)
->
payload
[
0
]
=
(
StgClosure
*
)
prev_thunk_selector
;
prev_thunk_selector
=
p
;
p
=
(
StgSelector
*
)
val
;
goto
selector_chain
;
default:
((
StgClosure
*
)
p
)
->
payload
[
0
]
=
(
StgClosure
*
)
prev_thunk_selector
;
prev_thunk_selector
=
p
;
*
q
=
val
;
if
(
evac
)
evacuate
(
q
);
val
=
*
q
;
// evacuate() cannot recurse through
// eval_thunk_selector(), because we know val is not
// a THUNK_SELECTOR.
unchain_thunk_selectors
(
prev_thunk_selector
,
val
);
return
;
info_ptr
=
(
StgWord
)
UNTAG_CLOSURE
(
val
)
->
header
.
info
;
if
(
!
IS_FORWARDING_PTR
(
info_ptr
))
{
info
=
INFO_PTR_TO_STRUCT
(
info_ptr
);
switch
(
info
->
type
)
{
case
IND
:
case
IND_PERM
:
case
IND_OLDGEN
:
case
IND_OLDGEN_PERM
:
case
IND_STATIC
:
val
=
((
StgInd
*
)
val
)
->
indirectee
;
goto
val_loop
;
case
THUNK_SELECTOR
:
((
StgClosure
*
)
p
)
->
payload
[
0
]
=
(
StgClosure
*
)
prev_thunk_selector
;
prev_thunk_selector
=
p
;
p
=
(
StgSelector
*
)
val
;
goto
selector_chain
;
default:
break
;
}
}
((
StgClosure
*
)
p
)
->
payload
[
0
]
=
(
StgClosure
*
)
prev_thunk_selector
;
prev_thunk_selector
=
p
;
*
q
=
val
;
if
(
evac
)
evacuate
(
q
);
val
=
*
q
;
// evacuate() cannot recurse through
// eval_thunk_selector(), because we know val is not
// a THUNK_SELECTOR.
unchain_thunk_selectors
(
prev_thunk_selector
,
val
);
return
;
}
case
IND
:
...
...
@@ -371,12 +388,6 @@ selector_loop:
selectee
=
UNTAG_CLOSURE
(
((
StgInd
*
)
selectee
)
->
indirectee
);
goto
selector_loop
;
case
EVACUATED
:
// We don't follow pointers into to-space; the constructor
// has already been evacuated, so we won't save any space
// leaks by evaluating this selector thunk anyhow.
goto
bale_out
;
case
THUNK_SELECTOR
:
{
StgClosure
*
val
;
...
...
@@ -432,7 +443,7 @@ bale_out:
// check whether it was updated in the meantime.
*
q
=
(
StgClosure
*
)
p
;
if
(
evac
)
{
copy
(
q
,(
StgClosure
*
)
p
,
THUNK_SELECTOR_sizeW
(),
bd
->
step
->
to
);
copy
(
q
,(
const
StgInfoTable
*
)
info_ptr
,(
StgClosure
*
)
p
,
THUNK_SELECTOR_sizeW
(),
bd
->
step
->
to
);
}
unchain_thunk_selectors
(
prev_thunk_selector
,
*
q
);
return
;
...
...
rts/sm/Evac.c-inc
View file @
4e79709d
...
...
@@ -10,55 +10,77 @@
// non-minor, parallel, GC. This file contains the code for both,
// controllled by the CPP symbol MINOR_GC.
#ifndef PARALLEL_GC
#define copy(a,b,c,d) copy1(a,b,c,d)
#define copy_tag(a,b,c,d,e) copy_tag1(a,b,c,d,e)
#define copyPart(a,b,c,d,e) copyPart1(a,b,c,d,e)
#define evacuate(a) evacuate1(a)
#if defined(THREADED_RTS)
# if !defined(PARALLEL_GC)
# define copy(a,b,c,d,e) copy1(a,b,c,d,e)
# define copy_tag(a,b,c,d,e,f) copy_tag1(a,b,c,d,e,f)
# define copy_tag_nolock(a,b,c,d,e,f) copy_tag1(a,b,c,d,e,f)
# define copyPart(a,b,c,d,e) copyPart1(a,b,c,d,e)
# define evacuate(a) evacuate1(a)
# endif
#else
#undef copy
#undef copy_tag
#undef copyPart
#undef evacuate
# define copy_tag_nolock(a,b,c,d,e,f) copy_tag(a,b,c,d,e,f)
#endif
STATIC_INLINE
void
copy_tag
(
StgClosure
**
p
,
StgClosure
*
src
,
nat
size
,
step
*
stp
,
StgWord
tag
)
copy_tag
(
StgClosure
**
p
,
const
StgInfoTable
*
info
,
StgClosure
*
src
,
nat
size
,
step
*
stp
,
StgWord
tag
)
{
StgPtr
to
,
tagged_to
,
from
;
StgPtr
to
,
from
;
nat
i
;
StgWord
info
;
#if defined(PARALLEL_GC) && defined(THREADED_RTS)
spin:
info
=
xchg
((
StgPtr
)
&
src
->
header
.
info
,
(
W_
)
&
stg_WHITEHOLE_info
);
// so.. what is it?
if
(
info
==
(
W_
)
&
stg_WHITEHOLE_info
)
{
#ifdef PROF_SPIN
whitehole_spin
++
;
#endif
goto
spin
;
to
=
alloc_for_copy
(
size
,
stp
);
TICK_GC_WORDS_COPIED
(
size
);
from
=
(
StgPtr
)
src
;
to
[
0
]
=
(
W_
)
info
;
for
(
i
=
1
;
i
<
size
;
i
++
)
{
// unroll for small i
to
[
i
]
=
from
[
i
];
}
if
(
info
==
(
W_
)
&
stg_EVACUATED_info
||
info
==
(
W_
)
&
stg_IND_info
)
{
// NB. a closure might be updated with an IND by
// unchain_selector_thunks(), hence the test above.
src
->
header
.
info
=
(
const
StgInfoTable
*
)
info
;
return
evacuate
(
p
);
// does the failed_to_evac stuff
// if (to+size+2 < bd->start + BLOCK_SIZE_W) {
// __builtin_prefetch(to + size + 2, 1);
// }
#if defined(PARALLEL_GC)
{
const
StgInfoTable
*
new_info
;
new_info
=
(
const
StgInfoTable
*
)
cas
((
StgPtr
)
&
src
->
header
.
info
,
(
W_
)
info
,
MK_FORWARDING_PTR
(
to
));
if
(
new_info
!=
info
)
{
return
evacuate
(
p
);
// does the failed_to_evac stuff
}
else
{
*
p
=
TAG_CLOSURE
(
tag
,(
StgClosure
*
)
to
);
}
}
#else
ASSERT
(
n_gc_threads
==
1
);
info
=
(
W_
)
src
->
header
.
info
;
src
->
header
.
info
=
&
stg_EVACUATED_info
;
src
->
header
.
info
=
(
const
StgInfoTable
*
)
MK_FORWARDING_PTR
(
to
);
*
p
=
TAG_CLOSURE
(
tag
,(
StgClosure
*
)
to
);
#endif
#ifdef PROFILING
// We store the size of the just evacuated object in the LDV word so that
// the profiler can guess the position of the next object later.
SET_EVACUAEE_FOR_LDV
(
from
,
size
);
#endif
}
#if defined(PARALLEL_GC)
STATIC_INLINE
void
copy_tag_nolock
(
StgClosure
**
p
,
const
StgInfoTable
*
info
,
StgClosure
*
src
,
nat
size
,
step
*
stp
,
StgWord
tag
)
{
StgPtr
to
,
from
;
nat
i
;
to
=
alloc_for_copy
(
size
,
stp
);
tagged_to
=
(
StgPtr
)
TAG_CLOSURE
(
tag
,(
StgClosure
*
)
to
);
*
p
=
(
StgClosure
*
)
tagged_
to
;
*
p
=
TAG_CLOSURE
(
tag
,(
StgClosure
*
)
to
);
src
->
header
.
info
=
(
const
StgInfoTable
*
)
MK_FORWARDING_PTR
(
to
)
;
TICK_GC_WORDS_COPIED
(
size
);
from
=
(
StgPtr
)
src
;
to
[
0
]
=
info
;
to
[
0
]
=
(
W_
)
info
;
for
(
i
=
1
;
i
<
size
;
i
++
)
{
// unroll for small i
to
[
i
]
=
from
[
i
];
}
...
...
@@ -67,19 +89,13 @@ spin:
// __builtin_prefetch(to + size + 2, 1);
// }
((
StgEvacuated
*
)
from
)
->
evacuee
=
(
StgClosure
*
)
tagged_to
;
#if defined(PARALLEL_GC) && defined(THREADED_RTS)
write_barrier
();
((
StgEvacuated
*
)
from
)
->
header
.
info
=
&
stg_EVACUATED_info
;
#endif
#ifdef PROFILING
// We store the size of the just evacuated object in the LDV word so that
// the profiler can guess the position of the next object later.
SET_EVACUAEE_FOR_LDV
(
from
,
size
);
#endif
}
#endif
/* Special version of copy() for when we only want to copy the info
* pointer of an object, but reserve some padding after it. This is
...
...
@@ -92,7 +108,7 @@ copyPart(StgClosure **p, StgClosure *src, nat size_to_reserve, nat size_to_copy,
nat
i
;
StgWord
info
;
#if defined(PARALLEL_GC)
&& defined(THREADED_RTS)
#if defined(PARALLEL_GC)
spin:
info
=
xchg
((
StgPtr
)
&
src
->
header
.
info
,
(
W_
)
&
stg_WHITEHOLE_info
);
if
(
info
==
(
W_
)
&
stg_WHITEHOLE_info
)
{
...
...
@@ -101,14 +117,13 @@ spin:
#endif
goto
spin
;
}
if
(
info
==
(
W_
)
&
stg_EVACUATED_
info
)
{
if
(
IS_FORWARDING_PTR
(
info
)
)
{
src
->
header
.
info
=
(
const
StgInfoTable
*
)
info
;
evacuate
(
p
);
// does the failed_to_evac stuff
return
;
}
#else
info
=
(
W_
)
src
->
header
.
info
;
src
->
header
.
info
=
&
stg_EVACUATED_info
;
#endif
to
=
alloc_for_copy
(
size_to_reserve
,
stp
);
...
...
@@ -122,11 +137,10 @@ spin:
to
[
i
]
=
from
[
i
];
}
((
StgEvacuated
*
)
from
)
->
evacuee
=
(
StgClosure
*
)
to
;
#if defined(PARALLEL_GC) && defined(THREADED_RTS)
#if defined(PARALLEL_GC)
write_barrier
();
((
StgEvacuated
*
)
from
)
->
header
.
info
=
&
stg_EVACUATED_info
;
#endif
src
->
header
.
info
=
(
const
StgInfoTable
*
)
MK_FORWARDING_PTR
(
to
);
#ifdef PROFILING
// We store the size of the just evacuated object in the LDV word so that
...
...
@@ -141,9 +155,10 @@ spin:
/* Copy wrappers that don't tag the closure after copying */
STATIC_INLINE
void
copy
(
StgClosure
**
p
,
StgClosure
*
src
,
nat
size
,
step
*
stp
)
copy
(
StgClosure
**
p
,
const
StgInfoTable
*
info
,
StgClosure
*
src
,
nat
size
,
step
*
stp
)
{
copy_tag
(
p
,
src
,
size
,
stp
,
0
);
copy_tag
(
p
,
info
,
src
,
size
,
stp
,
0
);
}
/* ----------------------------------------------------------------------------
...
...
@@ -356,9 +371,37 @@ loop:
stp
=
bd
->
step
->
to
;
info
=
get_itbl
(
q
);
switch
(
info
->
type
)
{
info
=
q
->
header
.
info
;
if
(
IS_FORWARDING_PTR
(
info
))
{
/* Already evacuated, just return the forwarding address.
* HOWEVER: if the requested destination generation (gct->evac_step) is
* older than the actual generation (because the object was
* already evacuated to a younger generation) then we have to
* set the gct->failed_to_evac flag to indicate that we couldn't
* manage to promote the object to the desired generation.
*/
/*
* Optimisation: the check is fairly expensive, but we can often
* shortcut it if either the required generation is 0, or the
* current object (the EVACUATED) is in a high enough generation.
* We know that an EVACUATED always points to an object in the
* same or an older generation. stp is the lowest step that the
* current object would be evacuated to, so we only do the full
* check if stp is too low.
*/
StgClosure
*
e
=
(
StgClosure
*
)
UN_FORWARDING_PTR
(
info
);
*
p
=
TAG_CLOSURE
(
tag
,
e
);
if
(
stp
<
gct
->
evac_step
)
{
// optimisation
if
(
Bdescr
((
P_
)
e
)
->
step
<
gct
->
evac_step
)
{
gct
->
failed_to_evac
=
rtsTrue
;
TICK_GC_FAILED_PROMOTION
();
}
}
return
;
}
switch
(
INFO_PTR_TO_STRUCT
(
info
)
->
type
)
{
case
WHITEHOLE
:
goto
loop
;
...
...
@@ -367,27 +410,27 @@ loop:
case
MUT_VAR_DIRTY
:
case
MVAR_CLEAN
:
case
MVAR_DIRTY
:
copy
(
p
,
q
,
sizeW_fromITBL
(
info
),
stp
);
copy
(
p
,
info
,
q
,
sizeW_fromITBL
(
INFO_PTR_TO_STRUCT
(
info
)
)
,
stp
);
return
;
case
CONSTR_0_1
:
{
StgWord
w
=
(
StgWord
)
q
->
payload
[
0
];
if
(
q
->
header
.
info
==
Czh_con_info
&&
if
(
info
==
Czh_con_info
&&
// unsigned, so always true: (StgChar)w >= MIN_CHARLIKE &&
(
StgChar
)
w
<=
MAX_CHARLIKE
)
{
*
p
=
TAG_CLOSURE
(
tag
,
(
StgClosure
*
)
CHARLIKE_CLOSURE
((
StgChar
)
w
)
);
}
else
if
(
q
->
header
.
info
==
Izh_con_info
&&
else
if
(
info
==
Izh_con_info
&&
(
StgInt
)
w
>=
MIN_INTLIKE
&&
(
StgInt
)
w
<=
MAX_INTLIKE
)
{
*
p
=
TAG_CLOSURE
(
tag
,
(
StgClosure
*
)
INTLIKE_CLOSURE
((
StgInt
)
w
)
);
}
else
{
copy_tag
(
p
,
q
,
sizeofW
(
StgHeader
)
+
1
,
stp
,
tag
);
copy_tag
_nolock
(
p
,
info
,
q
,
sizeofW
(
StgHeader
)
+
1
,
stp
,
tag
);
}
return
;
}
...
...
@@ -395,12 +438,12 @@ loop:
case
FUN_0_1
:
case
FUN_1_0
:
case
CONSTR_1_0
:
copy_tag
(
p
,
q
,
sizeofW
(
StgHeader
)
+
1
,
stp
,
tag
);
copy_tag
_nolock
(
p
,
info
,
q
,
sizeofW
(
StgHeader
)
+
1
,
stp
,
tag
);
return
;
case
THUNK_1_0
:
case
THUNK_0_1
:
copy
(
p
,
q
,
sizeofW
(
StgThunk
)
+
1
,
stp
);
copy
(
p
,
info
,
q
,
sizeofW
(
StgThunk
)
+
1
,
stp
);
return
;
case
THUNK_1_1
:
...
...
@@ -413,7 +456,7 @@ loop:
stp
=
bd
->
step
;
}
#endif
copy
(
p
,
q
,
sizeofW
(
StgThunk
)
+
2
,
stp
);
copy
(
p
,
info
,
q
,
sizeofW
(
StgThunk
)
+
2
,
stp
);
return
;
case
FUN_1_1
:
...
...
@@ -421,28 +464,31 @@ loop:
case
FUN_0_2
:
case
CONSTR_1_1
:
case
CONSTR_2_0
:
copy_tag
(
p
,
q
,
sizeofW
(
StgHeader
)
+
2
,
stp
,
tag
);
copy_tag
_nolock
(
p
,
info
,
q
,
sizeofW
(
StgHeader
)
+
2
,
stp
,
tag
);
return
;
case
CONSTR_0_2
:
copy_tag
(
p
,
q
,
sizeofW
(
StgHeader
)
+
2
,
stp
,
tag
);
copy_tag
_nolock
(
p
,
info
,
q
,
sizeofW
(
StgHeader
)
+
2
,
stp
,
tag
);
return
;
case
THUNK
:
copy
(
p
,
q
,
thunk_sizeW_fromITBL
(
info
),
stp
);
copy
(
p
,
info
,
q
,
thunk_sizeW_fromITBL
(
INFO_PTR_TO_STRUCT
(
info
)
)
,
stp
);
return
;
case
FUN
:
case
IND_PERM
:
case
IND_OLDGEN_PERM
:
case
CONSTR
:
copy_tag_nolock
(
p
,
info
,
q
,
sizeW_fromITBL
(
INFO_PTR_TO_STRUCT
(
info
)),
stp
,
tag
);
return
;
case
WEAK
:
case
STABLE_NAME
:
case
CONSTR
:
copy_tag
(
p
,
q
,
sizeW_fromITBL
(
info
),
stp
,
tag
);
copy_tag
(
p
,
info
,
q
,
sizeW_fromITBL
(
INFO_PTR_TO_STRUCT
(
info
)),
stp
,
tag
);
return
;
case
BCO
:
copy
(
p
,
q
,
bco_sizeW
((
StgBCO
*
)
q
),
stp
);
copy
(
p
,
info
,
q
,
bco_sizeW
((
StgBCO
*
)
q
),
stp
);
return
;
case
CAF_BLACKHOLE
:
...
...
@@ -477,49 +523,20 @@ loop:
barf
(
"evacuate: stack frame at %p
\n
"
,
q
);
case
PAP
:
copy
(
p
,
q
,
pap_sizeW
((
StgPAP
*
)
q
),
stp
);
copy
(
p
,
info
,
q
,
pap_sizeW
((
StgPAP
*
)
q
),
stp
);
return
;
case
AP
:
copy
(
p
,
q
,
ap_sizeW
((
StgAP
*
)
q
),
stp
);
copy
(
p
,
info
,
q
,
ap_sizeW
((
StgAP
*
)
q
),
stp
);
return
;
case
AP_STACK
:
copy
(
p
,
q
,
ap_stack_sizeW
((
StgAP_STACK
*
)
q
),
stp
);
return
;
case
EVACUATED
:
/* Already evacuated, just return the forwarding address.
* HOWEVER: if the requested destination generation (gct->evac_step) is
* older than the actual generation (because the object was
* already evacuated to a younger generation) then we have to
* set the gct->failed_to_evac flag to indicate that we couldn't
* manage to promote the object to the desired generation.
*/
/*
* Optimisation: the check is fairly expensive, but we can often
* shortcut it if either the required generation is 0, or the
* current object (the EVACUATED) is in a high enough generation.
* We know that an EVACUATED always points to an object in the
* same or an older generation. stp is the lowest step that the
* current object would be evacuated to, so we only do the full
* check if stp is too low.
*/
{
StgClosure
*
e
=
((
StgEvacuated
*
)
q
)
->
evacuee
;