Commit 657e1018 authored by Ian Lynagh's avatar Ian Lynagh
Browse files

Whitespace only in CgHeapery

parent cdf946e4
......@@ -5,26 +5,19 @@
\section[CgHeapery]{Heap management functions}
\begin{code}
{-# OPTIONS -fno-warn-tabs #-}
-- The above warning supression flag is a temporary kludge.
-- While working on this module you are encouraged to remove it and
-- detab the module (please do the detabbing in a separate patch). See
-- http://hackage.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#TabsvsSpaces
-- for details
module CgHeapery (
initHeapUsage, getVirtHp, setVirtHp, setRealHp,
getHpRelOffset, hpRel,
initHeapUsage, getVirtHp, setVirtHp, setRealHp,
getHpRelOffset, hpRel,
funEntryChecks, thunkEntryChecks,
altHeapCheck, unbxTupleHeapCheck,
hpChkGen, hpChkNodePointsAssignSp0,
stkChkGen, stkChkNodePoints,
funEntryChecks, thunkEntryChecks,
altHeapCheck, unbxTupleHeapCheck,
hpChkGen, hpChkNodePointsAssignSp0,
stkChkGen, stkChkNodePoints,
layOutDynConstr, layOutStaticConstr,
mkVirtHeapOffsets, mkStaticClosureFields, mkStaticClosure,
layOutDynConstr, layOutStaticConstr,
mkVirtHeapOffsets, mkStaticClosureFields, mkStaticClosure,
allocDynClosure, emitSetDynHdr
allocDynClosure, emitSetDynHdr
) where
#include "HsVersions.h"
......@@ -59,17 +52,17 @@ import Data.Maybe (fromMaybe)
%************************************************************************
%* *
%* *
\subsection[CgUsages-heapery]{Monad things for fiddling with heap usage}
%* *
%* *
%************************************************************************
The heap always grows upwards, so hpRel is easy
\begin{code}
hpRel :: VirtualHpOffset -- virtual offset of Hp
-> VirtualHpOffset -- virtual offset of The Thing
-> WordOff -- integer word offset
hpRel :: VirtualHpOffset -- virtual offset of Hp
-> VirtualHpOffset -- virtual offset of The Thing
-> WordOff -- integer word offset
hpRel hp off = off - hp
\end{code}
......@@ -85,47 +78,47 @@ rje: Note the slightly suble fixed point behaviour needed here
\begin{code}
initHeapUsage :: (VirtualHpOffset -> Code) -> Code
initHeapUsage fcode
= do { orig_hp_usage <- getHpUsage
; setHpUsage initHpUsage
; fixC_(\heap_usage2 -> do
{ fcode (heapHWM heap_usage2)
; getHpUsage })
; setHpUsage orig_hp_usage }
= do { orig_hp_usage <- getHpUsage
; setHpUsage initHpUsage
; fixC_(\heap_usage2 -> do
{ fcode (heapHWM heap_usage2)
; getHpUsage })
; setHpUsage orig_hp_usage }
setVirtHp :: VirtualHpOffset -> Code
setVirtHp new_virtHp
= do { hp_usage <- getHpUsage
; setHpUsage (hp_usage {virtHp = new_virtHp}) }
= do { hp_usage <- getHpUsage
; setHpUsage (hp_usage {virtHp = new_virtHp}) }
getVirtHp :: FCode VirtualHpOffset
getVirtHp
= do { hp_usage <- getHpUsage
; return (virtHp hp_usage) }
getVirtHp
= do { hp_usage <- getHpUsage
; return (virtHp hp_usage) }
setRealHp :: VirtualHpOffset -> Code
setRealHp new_realHp
= do { hp_usage <- getHpUsage
; setHpUsage (hp_usage {realHp = new_realHp}) }
= do { hp_usage <- getHpUsage
; setHpUsage (hp_usage {realHp = new_realHp}) }
getHpRelOffset :: VirtualHpOffset -> FCode CmmExpr
getHpRelOffset virtual_offset
= do { hp_usg <- getHpUsage
; return (cmmRegOffW hpReg (hpRel (realHp hp_usg) virtual_offset)) }
= do { hp_usg <- getHpUsage
; return (cmmRegOffW hpReg (hpRel (realHp hp_usg) virtual_offset)) }
\end{code}
%************************************************************************
%* *
Layout of heap objects
%* *
%* *
Layout of heap objects
%* *
%************************************************************************
\begin{code}
layOutDynConstr, layOutStaticConstr
:: DataCon
-> [(CgRep,a)]
-> (ClosureInfo,
[(a,VirtualHpOffset)])
:: DataCon
-> [(CgRep,a)]
-> (ClosureInfo,
[(a,VirtualHpOffset)])
layOutDynConstr = layOutConstr False
layOutStaticConstr = layOutConstr True
......@@ -136,8 +129,8 @@ layOutConstr is_static data_con args
= (mkConInfo is_static data_con tot_wds ptr_wds,
things_w_offsets)
where
(tot_wds, -- #ptr_wds + #nonptr_wds
ptr_wds, -- #ptr_wds
(tot_wds, -- #ptr_wds + #nonptr_wds
ptr_wds, -- #ptr_wds
things_w_offsets) = mkVirtHeapOffsets False{-not a thunk-} args
\end{code}
......@@ -147,26 +140,26 @@ list
\begin{code}
mkVirtHeapOffsets
:: Bool -- True <=> is a thunk
-> [(CgRep,a)] -- Things to make offsets for
-> (WordOff, -- _Total_ number of words allocated
WordOff, -- Number of words allocated for *pointers*
[(a, VirtualHpOffset)])
-- Things with their offsets from start of
-- object in order of increasing offset
:: Bool -- True <=> is a thunk
-> [(CgRep,a)] -- Things to make offsets for
-> (WordOff, -- _Total_ number of words allocated
WordOff, -- Number of words allocated for *pointers*
[(a, VirtualHpOffset)])
-- Things with their offsets from start of
-- object in order of increasing offset
-- First in list gets lowest offset, which is initial offset + 1.
mkVirtHeapOffsets is_thunk things
= let non_void_things = filterOut (isVoidArg . fst) things
(ptrs, non_ptrs) = separateByPtrFollowness non_void_things
(wds_of_ptrs, ptrs_w_offsets) = mapAccumL computeOffset 0 ptrs
(tot_wds, non_ptrs_w_offsets) = mapAccumL computeOffset wds_of_ptrs non_ptrs
= let non_void_things = filterOut (isVoidArg . fst) things
(ptrs, non_ptrs) = separateByPtrFollowness non_void_things
(wds_of_ptrs, ptrs_w_offsets) = mapAccumL computeOffset 0 ptrs
(tot_wds, non_ptrs_w_offsets) = mapAccumL computeOffset wds_of_ptrs non_ptrs
in
(tot_wds, wds_of_ptrs, ptrs_w_offsets ++ non_ptrs_w_offsets)
where
hdr_size | is_thunk = thunkHdrSize
| otherwise = fixedHdrSize
hdr_size | is_thunk = thunkHdrSize
| otherwise = fixedHdrSize
computeOffset wds_so_far (rep, thing)
= (wds_so_far + cgRepSizeW rep, (thing, hdr_size + wds_so_far))
......@@ -174,24 +167,24 @@ mkVirtHeapOffsets is_thunk things
%************************************************************************
%* *
Lay out a static closure
%* *
%* *
Lay out a static closure
%* *
%************************************************************************
Make a static closure, adding on any extra padding needed for CAFs,
and adding a static link field if necessary.
\begin{code}
mkStaticClosureFields
:: ClosureInfo
-> CostCentreStack
-> Bool -- Has CAF refs
-> [CmmLit] -- Payload
-> [CmmLit] -- The full closure
mkStaticClosureFields
:: ClosureInfo
-> CostCentreStack
-> Bool -- Has CAF refs
-> [CmmLit] -- Payload
-> [CmmLit] -- The full closure
mkStaticClosureFields cl_info ccs caf_refs payload
= mkStaticClosure info_lbl ccs payload padding_wds
static_link_field saved_info_field
= mkStaticClosure info_lbl ccs payload padding_wds
static_link_field saved_info_field
where
info_lbl = infoTableLabelFromCI cl_info
......@@ -210,23 +203,23 @@ mkStaticClosureFields cl_info ccs caf_refs payload
is_caf = closureNeedsUpdSpace cl_info
padding_wds
| not is_caf = []
| otherwise = ASSERT(null payload) [mkIntCLit 0]
| not is_caf = []
| otherwise = ASSERT(null payload) [mkIntCLit 0]
static_link_field
| is_caf || staticClosureNeedsLink cl_info = [static_link_value]
| otherwise = []
| is_caf || staticClosureNeedsLink cl_info = [static_link_value]
| otherwise = []
saved_info_field
| is_caf = [mkIntCLit 0]
| otherwise = []
| is_caf = [mkIntCLit 0]
| otherwise = []
-- for a static constructor which has NoCafRefs, we set the
-- static link field to a non-zero value so the garbage
-- collector will ignore it.
-- for a static constructor which has NoCafRefs, we set the
-- static link field to a non-zero value so the garbage
-- collector will ignore it.
static_link_value
| caf_refs = mkIntCLit 0
| otherwise = mkIntCLit 1
| caf_refs = mkIntCLit 0
| otherwise = mkIntCLit 1
mkStaticClosure :: CLabel -> CostCentreStack -> [CmmLit]
-> [CmmLit] -> [CmmLit] -> [CmmLit] -> [CmmLit]
......@@ -239,10 +232,10 @@ mkStaticClosure info_lbl ccs payload padding_wds static_link_field saved_info_fi
++ saved_info_field
where
variable_header_words
= staticGranHdr
++ staticParHdr
++ staticProfHdr ccs
++ staticTickyHdr
= staticGranHdr
++ staticParHdr
++ staticProfHdr ccs
++ staticTickyHdr
padLitToWord :: CmmLit -> [CmmLit]
padLitToWord lit = lit : padding pad_length
......@@ -257,9 +250,9 @@ padLitToWord lit = lit : padding pad_length
\end{code}
%************************************************************************
%* *
%* *
\subsection[CgHeapery-heap-overflow]{Heap overflow checking}
%* *
%* *
%************************************************************************
The new code for heapChecks. For GrAnSim the code for doing a heap check
......@@ -275,39 +268,39 @@ A heap/stack check at a function or thunk entry point.
\begin{code}
funEntryChecks :: ClosureInfo -> CmmStmts -> Maybe [GlobalReg] -> Code -> Code
funEntryChecks cl_info reg_save_code live code
funEntryChecks cl_info reg_save_code live code
= hpStkCheck cl_info True reg_save_code live code
thunkEntryChecks :: ClosureInfo -> Code -> Code
thunkEntryChecks cl_info code
thunkEntryChecks cl_info code
= hpStkCheck cl_info False noStmts (Just [node]) code
hpStkCheck :: ClosureInfo -- Function closure
-> Bool -- Is a function? (not a thunk)
-> CmmStmts -- Register saves
hpStkCheck :: ClosureInfo -- Function closure
-> Bool -- Is a function? (not a thunk)
-> CmmStmts -- Register saves
-> Maybe [GlobalReg] -- Live registers
-> Code
-> Code
-> Code
-> Code
hpStkCheck cl_info is_fun reg_save_code live code
= getFinalStackHW $ \ spHw -> do
{ sp <- getRealSp
; let stk_words = spHw - sp
; initHeapUsage $ \ hpHw -> do
{ -- Emit heap checks, but be sure to do it lazily so
-- that the conditionals on hpHw don't cause a black hole
codeOnly $ do
{ do_checks stk_words hpHw full_save_code rts_label full_live
; tickyAllocHeap hpHw }
; setRealHp hpHw
; code }
}
= getFinalStackHW $ \ spHw -> do
{ sp <- getRealSp
; let stk_words = spHw - sp
; initHeapUsage $ \ hpHw -> do
{ -- Emit heap checks, but be sure to do it lazily so
-- that the conditionals on hpHw don't cause a black hole
codeOnly $ do
{ do_checks stk_words hpHw full_save_code rts_label full_live
; tickyAllocHeap hpHw }
; setRealHp hpHw
; code }
}
where
(node_asst, full_live)
| nodeMustPointToIt (closureLFInfo cl_info)
= (noStmts, live)
| otherwise
= (oneStmt (CmmAssign nodeReg (CmmLit (CmmLabel closure_lbl)))
| nodeMustPointToIt (closureLFInfo cl_info)
= (noStmts, live)
| otherwise
= (oneStmt (CmmAssign nodeReg (CmmLit (CmmLabel closure_lbl)))
,Just $ node : fromMaybe [] live)
-- Strictly speaking, we should tag node here. But if
-- node doesn't point to the closure, the code for the closure
......@@ -317,11 +310,11 @@ hpStkCheck cl_info is_fun reg_save_code live code
full_save_code = node_asst `plusStmts` reg_save_code
rts_label | is_fun = CmmReg (CmmGlobal GCFun)
-- Function entry point
| otherwise = CmmReg (CmmGlobal GCEnter1)
-- Thunk or case return
-- In the thunk/case-return case, R1 points to a closure
-- which should be (re)-entered after GC
-- Function entry point
| otherwise = CmmReg (CmmGlobal GCEnter1)
-- Thunk or case return
-- In the thunk/case-return case, R1 points to a closure
-- which should be (re)-entered after GC
\end{code}
Heap checks in a case alternative are nice and easy, provided this is
......@@ -342,20 +335,20 @@ For primitive returns, we have an unlifted value in some register
heap-check code for these cases.
\begin{code}
altHeapCheck
:: AltType -- PolyAlt, PrimAlt, AlgAlt, but *not* UbxTupAlt
-- (Unboxed tuples are dealt with by ubxTupleHeapCheck)
-> Code -- Continuation
altHeapCheck
:: AltType -- PolyAlt, PrimAlt, AlgAlt, but *not* UbxTupAlt
-- (Unboxed tuples are dealt with by ubxTupleHeapCheck)
-> Code -- Continuation
-> Code
altHeapCheck alt_type code
= initHeapUsage $ \ hpHw -> do
{ codeOnly $ do
{ do_checks 0 {- no stack chk -} hpHw
noStmts {- nothign to save -}
rts_label live
; tickyAllocHeap hpHw }
; setRealHp hpHw
; code }
{ codeOnly $ do
{ do_checks 0 {- no stack chk -} hpHw
noStmts {- nothign to save -}
rts_label live
; tickyAllocHeap hpHw }
; setRealHp hpHw
; code }
where
(rts_label, live) = gc_info alt_type
......@@ -363,26 +356,26 @@ altHeapCheck alt_type code
gc_info PolyAlt = (mkL "stg_gc_unpt_r1" , Just [node])
-- Do *not* enter R1 after a heap check in
-- a polymorphic case. It might be a function
-- and the entry code for a function (currently)
-- applies it
--
-- However R1 is guaranteed to be a pointer
-- Do *not* enter R1 after a heap check in
-- a polymorphic case. It might be a function
-- and the entry code for a function (currently)
-- applies it
--
-- However R1 is guaranteed to be a pointer
gc_info (AlgAlt _) = (stg_gc_enter1, Just [node])
-- Enter R1 after the heap check; it's a pointer
-- Enter R1 after the heap check; it's a pointer
gc_info (PrimAlt tc)
= case primRepToCgRep (tyConPrimRep tc) of
VoidArg -> (mkL "stg_gc_noregs", Just [])
FloatArg -> (mkL "stg_gc_f1", Just [FloatReg 1])
DoubleArg -> (mkL "stg_gc_d1", Just [DoubleReg 1])
LongArg -> (mkL "stg_gc_l1", Just [LongReg 1])
-- R1 is boxed but unlifted:
PtrArg -> (mkL "stg_gc_unpt_r1", Just [node])
-- R1 is unboxed:
NonPtrArg -> (mkL "stg_gc_unbx_r1", Just [node])
VoidArg -> (mkL "stg_gc_noregs", Just [])
FloatArg -> (mkL "stg_gc_f1", Just [FloatReg 1])
DoubleArg -> (mkL "stg_gc_d1", Just [DoubleReg 1])
LongArg -> (mkL "stg_gc_l1", Just [LongReg 1])
-- R1 is boxed but unlifted:
PtrArg -> (mkL "stg_gc_unpt_r1", Just [node])
-- R1 is unboxed:
NonPtrArg -> (mkL "stg_gc_unbx_r1", Just [node])
gc_info (UbxTupAlt _) = panic "altHeapCheck"
\end{code}
......@@ -396,40 +389,40 @@ always organise the stack-resident fields into pointers &
non-pointers, and pass the number of each to the heap check code.
\begin{code}
unbxTupleHeapCheck
:: [(Id, GlobalReg)] -- Live registers
-> WordOff -- no. of stack slots containing ptrs
-> WordOff -- no. of stack slots containing nonptrs
-> CmmStmts -- code to insert in the failure path
-> Code
-> Code
unbxTupleHeapCheck
:: [(Id, GlobalReg)] -- Live registers
-> WordOff -- no. of stack slots containing ptrs
-> WordOff -- no. of stack slots containing nonptrs
-> CmmStmts -- code to insert in the failure path
-> Code
-> Code
unbxTupleHeapCheck regs ptrs nptrs fail_code code
-- We can't manage more than 255 pointers/non-pointers
-- We can't manage more than 255 pointers/non-pointers
-- in a generic heap check.
| ptrs > 255 || nptrs > 255 = panic "altHeapCheck"
| otherwise
| otherwise
= initHeapUsage $ \ hpHw -> do
{ codeOnly $ do { do_checks 0 {- no stack check -} hpHw
full_fail_code rts_label live
; tickyAllocHeap hpHw }
; setRealHp hpHw
; code }
{ codeOnly $ do { do_checks 0 {- no stack check -} hpHw
full_fail_code rts_label live
; tickyAllocHeap hpHw }
; setRealHp hpHw
; code }
where
full_fail_code = fail_code `plusStmts` oneStmt assign_liveness
assign_liveness = CmmAssign (CmmGlobal (VanillaReg 9 VNonGcPtr)) -- Ho ho ho!
(CmmLit (mkWordCLit liveness))
liveness = mkRegLiveness regs ptrs nptrs
assign_liveness = CmmAssign (CmmGlobal (VanillaReg 9 VNonGcPtr)) -- Ho ho ho!
(CmmLit (mkWordCLit liveness))
liveness = mkRegLiveness regs ptrs nptrs
live = Just $ map snd regs
rts_label = CmmLit (CmmLabel (mkCmmCodeLabel rtsPackageId (fsLit "stg_gc_ut")))
rts_label = CmmLit (CmmLabel (mkCmmCodeLabel rtsPackageId (fsLit "stg_gc_ut")))
\end{code}
%************************************************************************
%* *
Heap/Stack Checks.
%* *
%* *
Heap/Stack Checks.
%* *
%************************************************************************
When failing a check, we save a return address on the stack and
......@@ -442,83 +435,83 @@ again on re-entry because someone else might have stolen the resource
in the meantime.
\begin{code}
do_checks :: WordOff -- Stack headroom
-> WordOff -- Heap headroom
-> CmmStmts -- Assignments to perform on failure
-> CmmExpr -- Rts address to jump to on failure
do_checks :: WordOff -- Stack headroom
-> WordOff -- Heap headroom
-> CmmStmts -- Assignments to perform on failure
-> CmmExpr -- Rts address to jump to on failure
-> Maybe [GlobalReg] -- Live registers
-> Code
-> Code
do_checks 0 0 _ _ _ = nopC
do_checks _ hp _ _ _
| hp > bLOCKS_PER_MBLOCK * bLOCK_SIZE_W
= sorry (unlines [
"Trying to allocate more than " ++ show (bLOCKS_PER_MBLOCK * bLOCK_SIZE) ++ " bytes.",
"Trying to allocate more than " ++ show (bLOCKS_PER_MBLOCK * bLOCK_SIZE) ++ " bytes.",
"",
"See: http://hackage.haskell.org/trac/ghc/ticket/4505",
"Suggestion: read data from a file instead of having large static data",
"structures in the code."])
do_checks stk hp reg_save_code rts_lbl live
= do_checks' (CmmLit (mkIntCLit (stk*wORD_SIZE)))
(CmmLit (mkIntCLit (hp*wORD_SIZE)))
(stk /= 0) (hp /= 0) reg_save_code rts_lbl live
= do_checks' (CmmLit (mkIntCLit (stk*wORD_SIZE)))
(CmmLit (mkIntCLit (hp*wORD_SIZE)))
(stk /= 0) (hp /= 0) reg_save_code rts_lbl live
-- The offsets are now in *bytes*
do_checks' :: CmmExpr -> CmmExpr -> Bool -> Bool -> CmmStmts -> CmmExpr
do_checks' :: CmmExpr -> CmmExpr -> Bool -> Bool -> CmmStmts -> CmmExpr
-> Maybe [GlobalReg] -> Code
do_checks' stk_expr hp_expr stk_nonzero hp_nonzero reg_save_code rts_lbl live
= do { doGranAllocate hp_expr
= do { doGranAllocate hp_expr
-- The failure block: this saves the registers and jumps to
-- the appropriate RTS stub.
; exit_blk_id <- forkLabelledCode $ do {
; emitStmts reg_save_code
; stmtC (CmmJump rts_lbl live) }
-- In the case of a heap-check failure, we must also set
-- HpAlloc. NB. HpAlloc is *only* set if Hp has been
-- incremented by the heap check, it must not be set in the
-- event that a stack check failed, because the RTS stub will
-- retreat Hp by HpAlloc.
; hp_blk_id <- if hp_nonzero
; emitStmts reg_save_code
; stmtC (CmmJump rts_lbl live) }
-- In the case of a heap-check failure, we must also set
-- HpAlloc. NB. HpAlloc is *only* set if Hp has been
-- incremented by the heap check, it must not be set in the
-- event that a stack check failed, because the RTS stub will
-- retreat Hp by HpAlloc.
; hp_blk_id <- if hp_nonzero
then forkLabelledCode $ do
stmtC (CmmAssign (CmmGlobal HpAlloc) hp_expr)
stmtC (CmmBranch exit_blk_id)
stmtC (CmmAssign (CmmGlobal HpAlloc) hp_expr)
stmtC (CmmBranch exit_blk_id)
else return exit_blk_id
-- Check for stack overflow *FIRST*; otherwise
-- we might bumping Hp and then failing stack oflo
; whenC stk_nonzero
(stmtC (CmmCondBranch stk_oflo exit_blk_id))
; whenC hp_nonzero
(stmtsC [CmmAssign hpReg
(cmmOffsetExprB (CmmReg hpReg) hp_expr),
CmmCondBranch hp_oflo hp_blk_id])
-- Bump heap pointer, and test for heap exhaustion
-- Note that we don't move the heap pointer unless the
-- stack check succeeds. Otherwise we might end up
-- with slop at the end of the current block, which can
-- confuse the LDV profiler.
-- Check for stack overflow *FIRST*; otherwise
-- we might bumping Hp and then failing stack oflo
; whenC stk_nonzero
(stmtC (CmmCondBranch stk_oflo exit_blk_id))
; whenC hp_nonzero
(stmtsC [CmmAssign hpReg
(cmmOffsetExprB (CmmReg hpReg) hp_expr),
CmmCondBranch hp_oflo hp_blk_id])
-- Bump heap pointer, and test for heap exhaustion
-- Note that we don't move the heap pointer unless the
-- stack check succeeds. Otherwise we might end up
-- with slop at the end of the current block, which can
-- confuse the LDV profiler.
}
where
-- Stk overflow if (Sp - stk_bytes < SpLim)
stk_oflo = CmmMachOp mo_wordULt
[CmmMachOp mo_wordSub [CmmReg spReg, stk_expr],
CmmReg (CmmGlobal SpLim)]
-- Hp overflow if (Hp > HpLim)
-- (Hp has been incremented by now)
-- HpLim points to the LAST WORD of valid allocation space.
hp_oflo = CmmMachOp mo_wordUGt
[CmmReg hpReg, CmmReg (CmmGlobal HpLim)]
-- Stk overflow if (Sp - stk_bytes < SpLim)
stk_oflo = CmmMachOp mo_wordULt
[CmmMachOp mo_wordSub [CmmReg spReg, stk_expr],
CmmReg (CmmGlobal SpLim)]
-- Hp overflow if (Hp > HpLim)
-- (Hp has been incremented by now)
-- HpLim points to the LAST WORD of valid allocation space.
hp_oflo = CmmMachOp mo_wordUGt
[CmmReg hpReg, CmmReg (CmmGlobal HpLim)]
\end{code}
%************************************************************************
%* *
%* *
Generic Heap/Stack Checks - used in the RTS
%* *
%* *
%************************************************************************
\begin{code}
......@@ -528,7 +521,7 @@ hpChkGen bytes liveness reentry
stg_gc_gen (Just activeStgRegs)
where
assigns = mkStmts [ mk_vanilla_assignment 9 liveness,
mk_vanilla_assignment 10 reentry ]
mk_vanilla_assignment 10 reentry ]
-- a heap check where R1 points to the closure to enter on return, and
-- we want to assign to Sp[0] on failure (used in AutoApply.cmm:BUILD_PAP).
......@@ -544,7 +537,7 @@ stkChkGen bytes liveness reentry
stg_gc_gen (Just activeStgRegs)
where
assigns = mkStmts [ mk_vanilla_assignment 9 liveness,
mk_vanilla_assignment 10 reentry ]
mk_vanilla_assignment 10 reentry ]
mk_vanilla_assignment :: Int -> CmmExpr -> CmmStmt
mk_vanilla_assignment n e
......@@ -562,9 +555,9 @@ stg_gc_enter1 = CmmReg (CmmGlobal GCEnter1)
\end{code}
%************************************************************************
%* *
%*