Commit 9684dbb1 authored by Ömer Sinan Ağacan's avatar Ömer Sinan Ağacan

Remove StgRubbishArg and CmmArg

The idea behind adding special "rubbish" arguments was in unboxed sum types
depending on the tag some arguments are not used and we don't want to move some
special values (like 0 for literals and some special pointer for boxed slots)
for those arguments (to stack locations or registers). "StgRubbishArg" was an
indicator to the code generator that the value won't be used. During Stg-to-Cmm
we were then not generating any move or store instructions at all.

This caused problems in the register allocator because some variables were only
initialized in some code paths. As an example, suppose we have this STG: (after
unarise)

    Lib.$WT =
        \r [dt_sit]
            case
                case dt_sit of {
                  Lib.F dt_siv [Occ=Once] ->
                      (#,,#) [1# dt_siv StgRubbishArg::GHC.Prim.Int#];
                  Lib.I dt_siw [Occ=Once] ->
                      (#,,#) [2# StgRubbishArg::GHC.Types.Any dt_siw];
                }
            of
            dt_six
            { (#,,#) us_giC us_giD us_giE -> Lib.T [us_giC us_giD us_giE];
            };

This basically unpacks a sum type to an unboxed sum with 3 fields, and then
moves the unboxed sum to a constructor (`Lib.T`).

This is the Cmm for the inner case expression (case expression in the scrutinee
position of the outer case):

    ciN:
        ...
        -- look at dt_sit's tag
        if (_ciT::P64 != 1) goto ciS; else goto ciR;
    ciS: -- Tag is 2, i.e. Lib.F
        _siw::I64 = I64[_siu::P64 + 6];
        _giE::I64 = _siw::I64;
        _giD::P64 = stg_RUBBISH_ENTRY_info;
        _giC::I64 = 2;
        goto ciU;
    ciR: -- Tag is 1, i.e. Lib.I
        _siv::P64 = P64[_siu::P64 + 7];
        _giD::P64 = _siv::P64;
        _giC::I64 = 1;
        goto ciU;

Here one of the blocks `ciS` and `ciR` is executed and then the execution
continues to `ciR`, but only `ciS` initializes `_giE`, in the other branch
`_giE` is not initialized, because it's "rubbish" in the STG and so we don't
generate an assignment during code generator. The code generator then panics
during the register allocations:

    ghc-stage1: panic! (the 'impossible' happened)
      (GHC version 8.1.20160722 for x86_64-unknown-linux):
            LocalReg's live-in to graph ciY {_giE::I64}

(`_giD` is also "rubbish" in `ciS`, but it's still initialized because it's a
pointer slot, we have to initialize it otherwise garbage collector follows the
pointer to some random place. So we only remove assignment if the "rubbish" arg
has unboxed type.)

This patch removes `StgRubbishArg` and `CmmArg`. We now always initialize
rubbish slots. If the slot is for boxed types we use the existing `absentError`,
otherwise we initialize the slot with literal 0.

Reviewers: simonpj, erikd, austin, simonmar, bgamari

Reviewed By: erikd

Subscribers: thomie

Differential Revision: https://phabricator.haskell.org/D2446
parent e79bb2c1
......@@ -66,7 +66,6 @@ module CLabel (
mkSMAP_DIRTY_infoLabel,
mkEMPTY_MVAR_infoLabel,
mkArrWords_infoLabel,
mkRUBBISH_ENTRY_infoLabel,
mkTopTickyCtrLabel,
mkCAFBlackHoleInfoTableLabel,
......@@ -507,7 +506,7 @@ mkDirty_MUT_VAR_Label, mkSplitMarkerLabel, mkUpdInfoLabel,
mkEMPTY_MVAR_infoLabel, mkTopTickyCtrLabel,
mkCAFBlackHoleInfoTableLabel, mkCAFBlackHoleEntryLabel,
mkArrWords_infoLabel, mkSMAP_FROZEN_infoLabel, mkSMAP_FROZEN0_infoLabel,
mkSMAP_DIRTY_infoLabel, mkRUBBISH_ENTRY_infoLabel :: CLabel
mkSMAP_DIRTY_infoLabel :: CLabel
mkDirty_MUT_VAR_Label = mkForeignLabel (fsLit "dirty_MUT_VAR") Nothing ForeignLabelInExternalPackage IsFunction
mkSplitMarkerLabel = CmmLabel rtsUnitId (fsLit "__stg_split_marker") CmmCode
mkUpdInfoLabel = CmmLabel rtsUnitId (fsLit "stg_upd_frame") CmmInfo
......@@ -525,7 +524,6 @@ mkArrWords_infoLabel = CmmLabel rtsUnitId (fsLit "stg_ARR_WORDS")
mkSMAP_FROZEN_infoLabel = CmmLabel rtsUnitId (fsLit "stg_SMALL_MUT_ARR_PTRS_FROZEN") CmmInfo
mkSMAP_FROZEN0_infoLabel = CmmLabel rtsUnitId (fsLit "stg_SMALL_MUT_ARR_PTRS_FROZEN0") CmmInfo
mkSMAP_DIRTY_infoLabel = CmmLabel rtsUnitId (fsLit "stg_SMALL_MUT_ARR_PTRS_DIRTY") CmmInfo
mkRUBBISH_ENTRY_infoLabel = CmmLabel rtsUnitId (fsLit "stg_RUBBISH_ENTRY") CmmInfo
-----
mkCmmInfoLabel, mkCmmEntryLabel, mkCmmRetInfoLabel, mkCmmRetLabel,
......
......@@ -6,7 +6,6 @@
module CmmExpr
( CmmExpr(..), cmmExprType, cmmExprWidth, maybeInvertCmmExpr
, CmmArg(..)
, CmmReg(..), cmmRegType
, CmmLit(..), cmmLitType
, LocalReg(..), localRegType
......@@ -36,7 +35,6 @@ import CmmMachOp
import CmmType
import DynFlags
import Outputable (panic)
import Type
import Unique
import Data.Set (Set)
......@@ -75,10 +73,6 @@ data CmmReg
| CmmGlobal GlobalReg
deriving( Eq, Ord )
data CmmArg
= CmmExprArg CmmExpr
| CmmRubbishArg Type -- See StgRubbishArg in StgSyn.hs
-- | A stack area is either the stack slot where a variable is spilled
-- or the stack space where function arguments and results are passed.
data Area
......
......@@ -1032,7 +1032,7 @@ lowerSafeForeignCall dflags block
(_, regs, copyout) =
copyOutOflow dflags NativeReturn Jump (Young succ)
(map (CmmExprArg . CmmReg . CmmLocal) res)
(map (CmmReg . CmmLocal) res)
ret_off []
-- NB. after resumeThread returns, the top-of-stack probably contains
......
......@@ -1105,7 +1105,7 @@ pushStackFrame fields body = do
exprs <- sequence fields
updfr_off <- getUpdFrameOff
let (new_updfr_off, _, g) = copyOutOflow dflags NativeReturn Ret Old
[] updfr_off (map CmmExprArg exprs)
[] updfr_off exprs
emit g
withUpdFrameOff new_updfr_off body
......@@ -1176,7 +1176,7 @@ doReturn exprs_code = do
mkReturnSimple :: DynFlags -> [CmmActual] -> UpdFrameOffset -> CmmAGraph
mkReturnSimple dflags actuals updfr_off =
mkReturn dflags e (map CmmExprArg actuals) updfr_off
mkReturn dflags e actuals updfr_off
where e = entryCode dflags (CmmLoad (CmmStackSlot Old updfr_off)
(gcWord dflags))
......@@ -1195,7 +1195,7 @@ doJumpWithStack expr_code stk_code args_code = do
stk_args <- sequence stk_code
args <- sequence args_code
updfr_off <- getUpdFrameOff
emit (mkJumpExtra dflags NativeNodeCall expr (map CmmExprArg args) updfr_off (map CmmExprArg stk_args))
emit (mkJumpExtra dflags NativeNodeCall expr args updfr_off stk_args)
doCall :: CmmParse CmmExpr -> [CmmParse LocalReg] -> [CmmParse CmmExpr]
-> CmmParse ()
......@@ -1205,7 +1205,7 @@ doCall expr_code res_code args_code = do
args <- sequence args_code
ress <- sequence res_code
updfr_off <- getUpdFrameOff
c <- code $ mkCall expr (NativeNodeCall,NativeReturn) ress (map CmmExprArg args) updfr_off []
c <- code $ mkCall expr (NativeNodeCall,NativeReturn) ress args updfr_off []
emit c
adjCallTarget :: DynFlags -> CCallConv -> CmmExpr -> [(CmmExpr, ForeignHint) ]
......
......@@ -10,7 +10,7 @@
module CmmUtils(
-- CmmType
primRepCmmType, slotCmmType, slotForeignHint, cmmArgType,
primRepCmmType, slotCmmType, slotForeignHint,
typeCmmType, typeForeignHint,
-- CmmLit
......@@ -127,10 +127,6 @@ primElemRepCmmType DoubleElemRep = f64
typeCmmType :: DynFlags -> UnaryType -> CmmType
typeCmmType dflags ty = primRepCmmType dflags (typePrimRep ty)
cmmArgType :: DynFlags -> CmmArg -> CmmType
cmmArgType dflags (CmmExprArg e) = cmmExprType dflags e
cmmArgType dflags (CmmRubbishArg ty) = typeCmmType dflags ty
primRepForeignHint :: PrimRep -> ForeignHint
primRepForeignHint VoidRep = panic "primRepForeignHint:VoidRep"
primRepForeignHint PtrRep = AddrHint
......
......@@ -7,7 +7,7 @@ module MkGraph
, lgraphOfAGraph, labelAGraph
, stackStubExpr
, mkNop, mkAssign, mkAssign', mkStore, mkStore'
, mkNop, mkAssign, mkStore
, mkUnsafeCall, mkFinalCall, mkCallReturnsTo
, mkJumpReturnsTo
, mkJump, mkJumpExtra
......@@ -17,18 +17,13 @@ module MkGraph
, copyInOflow, copyOutOflow
, noExtraStack
, toCall, Transfer(..)
, rubbishExpr
)
where
import BlockId
import CLabel (mkRUBBISH_ENTRY_infoLabel)
import Cmm
import CmmCallConv
import CmmSwitch (SwitchTargets)
import CmmUtils (cmmArgType)
import TyCon (isGcPtrRep)
import RepType (typePrimRep)
import Compiler.Hoopl hiding (Unique, (<*>), mkFirst, mkMiddle, mkLast, mkLabel, mkBranch, Shape(..))
import DynFlags
......@@ -41,7 +36,7 @@ import UniqSupply
import Control.Monad
import Data.List
import Data.Maybe
import Prelude (($),Int,Bool,Eq(..),otherwise) -- avoid importing (<*>)
import Prelude (($),Int,Bool,Eq(..)) -- avoid importing (<*>)
#include "HsVersions.h"
......@@ -199,30 +194,12 @@ mkAssign :: CmmReg -> CmmExpr -> CmmAGraph
mkAssign l (CmmReg r) | l == r = mkNop
mkAssign l r = mkMiddle $ CmmAssign l r
mkAssign' :: CmmReg -> CmmArg -> CmmAGraph
mkAssign' l (CmmRubbishArg ty)
| isGcPtrRep (typePrimRep ty)
= mkAssign l rubbishExpr
| otherwise
= mkNop
mkAssign' l (CmmExprArg r)
= mkAssign l r
mkStore :: CmmExpr -> CmmExpr -> CmmAGraph
mkStore l r = mkMiddle $ CmmStore l r
mkStore' :: CmmExpr -> CmmArg -> CmmAGraph
mkStore' l (CmmRubbishArg ty)
| isGcPtrRep (typePrimRep ty)
= mkStore l rubbishExpr
| otherwise
= mkNop
mkStore' l (CmmExprArg r)
= mkStore l r
---------- Control transfer
mkJump :: DynFlags -> Convention -> CmmExpr
-> [CmmArg]
-> [CmmExpr]
-> UpdFrameOffset
-> CmmAGraph
mkJump dflags conv e actuals updfr_off =
......@@ -238,8 +215,8 @@ mkRawJump dflags e updfr_off vols =
\arg_space _ -> toCall e Nothing updfr_off 0 arg_space vols
mkJumpExtra :: DynFlags -> Convention -> CmmExpr -> [CmmArg]
-> UpdFrameOffset -> [CmmArg]
mkJumpExtra :: DynFlags -> Convention -> CmmExpr -> [CmmExpr]
-> UpdFrameOffset -> [CmmExpr]
-> CmmAGraph
mkJumpExtra dflags conv e actuals updfr_off extra_stack =
lastWithArgsAndExtraStack dflags Jump Old conv actuals updfr_off extra_stack $
......@@ -252,7 +229,7 @@ mkCbranch pred ifso ifnot likely =
mkSwitch :: CmmExpr -> SwitchTargets -> CmmAGraph
mkSwitch e tbl = mkLast $ CmmSwitch e tbl
mkReturn :: DynFlags -> CmmExpr -> [CmmArg] -> UpdFrameOffset
mkReturn :: DynFlags -> CmmExpr -> [CmmExpr] -> UpdFrameOffset
-> CmmAGraph
mkReturn dflags e actuals updfr_off =
lastWithArgs dflags Ret Old NativeReturn actuals updfr_off $
......@@ -262,17 +239,17 @@ mkBranch :: BlockId -> CmmAGraph
mkBranch bid = mkLast (CmmBranch bid)
mkFinalCall :: DynFlags
-> CmmExpr -> CCallConv -> [CmmArg] -> UpdFrameOffset
-> CmmExpr -> CCallConv -> [CmmExpr] -> UpdFrameOffset
-> CmmAGraph
mkFinalCall dflags f _ actuals updfr_off =
lastWithArgs dflags Call Old NativeDirectCall actuals updfr_off $
toCall f Nothing updfr_off 0
mkCallReturnsTo :: DynFlags -> CmmExpr -> Convention -> [CmmArg]
mkCallReturnsTo :: DynFlags -> CmmExpr -> Convention -> [CmmExpr]
-> BlockId
-> ByteOff
-> UpdFrameOffset
-> [CmmArg]
-> [CmmExpr]
-> CmmAGraph
mkCallReturnsTo dflags f callConv actuals ret_lbl ret_off updfr_off extra_stack = do
lastWithArgsAndExtraStack dflags Call (Young ret_lbl) callConv actuals
......@@ -281,7 +258,7 @@ mkCallReturnsTo dflags f callConv actuals ret_lbl ret_off updfr_off extra_stack
-- Like mkCallReturnsTo, but does not push the return address (it is assumed to be
-- already on the stack).
mkJumpReturnsTo :: DynFlags -> CmmExpr -> Convention -> [CmmArg]
mkJumpReturnsTo :: DynFlags -> CmmExpr -> Convention -> [CmmExpr]
-> BlockId
-> ByteOff
-> UpdFrameOffset
......@@ -349,9 +326,9 @@ copyIn dflags conv area formals extra_stk
data Transfer = Call | JumpRet | Jump | Ret deriving Eq
copyOutOflow :: DynFlags -> Convention -> Transfer -> Area -> [CmmArg]
copyOutOflow :: DynFlags -> Convention -> Transfer -> Area -> [CmmExpr]
-> UpdFrameOffset
-> [CmmArg] -- extra stack args
-> [CmmExpr] -- extra stack args
-> (Int, [GlobalReg], CmmAGraph)
-- Generate code to move the actual parameters into the locations
......@@ -369,9 +346,9 @@ copyOutOflow dflags conv transfer area actuals updfr_off extra_stack_stuff
(regs, graph) = foldr co ([], mkNop) (setRA ++ args ++ stack_params)
co (v, RegisterParam r) (rs, ms)
= (r:rs, mkAssign' (CmmGlobal r) v <*> ms)
= (r:rs, mkAssign (CmmGlobal r) v <*> ms)
co (v, StackParam off) (rs, ms)
= (rs, mkStore' (CmmStackSlot area off) v <*> ms)
= (rs, mkStore (CmmStackSlot area off) v <*> ms)
(setRA, init_offset) =
case area of
......@@ -379,7 +356,7 @@ copyOutOflow dflags conv transfer area actuals updfr_off extra_stack_stuff
-- the return address if making a call
case transfer of
Call ->
([(CmmExprArg (CmmLit (CmmBlock id)), StackParam init_offset)],
([(CmmLit (CmmBlock id), StackParam init_offset)],
widthInBytes (wordWidth dflags))
JumpRet ->
([],
......@@ -389,11 +366,11 @@ copyOutOflow dflags conv transfer area actuals updfr_off extra_stack_stuff
Old -> ([], updfr_off)
(extra_stack_off, stack_params) =
assignStack dflags init_offset (cmmArgType dflags) extra_stack_stuff
assignStack dflags init_offset (cmmExprType dflags) extra_stack_stuff
args :: [(CmmArg, ParamLocation)] -- The argument and where to put it
args :: [(CmmExpr, ParamLocation)] -- The argument and where to put it
(stk_size, args) = assignArgumentsPos dflags extra_stack_off conv
(cmmArgType dflags) actuals
(cmmExprType dflags) actuals
......@@ -402,7 +379,7 @@ mkCallEntry :: DynFlags -> Convention -> [CmmFormal] -> [CmmFormal]
mkCallEntry dflags conv formals extra_stk
= copyInOflow dflags conv Old formals extra_stk
lastWithArgs :: DynFlags -> Transfer -> Area -> Convention -> [CmmArg]
lastWithArgs :: DynFlags -> Transfer -> Area -> Convention -> [CmmExpr]
-> UpdFrameOffset
-> (ByteOff -> [GlobalReg] -> CmmAGraph)
-> CmmAGraph
......@@ -411,8 +388,8 @@ lastWithArgs dflags transfer area conv actuals updfr_off last =
updfr_off noExtraStack last
lastWithArgsAndExtraStack :: DynFlags
-> Transfer -> Area -> Convention -> [CmmArg]
-> UpdFrameOffset -> [CmmArg]
-> Transfer -> Area -> Convention -> [CmmExpr]
-> UpdFrameOffset -> [CmmExpr]
-> (ByteOff -> [GlobalReg] -> CmmAGraph)
-> CmmAGraph
lastWithArgsAndExtraStack dflags transfer area conv actuals updfr_off
......@@ -423,7 +400,7 @@ lastWithArgsAndExtraStack dflags transfer area conv actuals updfr_off
updfr_off extra_stack
noExtraStack :: [CmmArg]
noExtraStack :: [CmmExpr]
noExtraStack = []
toCall :: CmmExpr -> Maybe BlockId -> UpdFrameOffset -> ByteOff
......@@ -431,7 +408,3 @@ toCall :: CmmExpr -> Maybe BlockId -> UpdFrameOffset -> ByteOff
-> CmmAGraph
toCall e cont updfr_off res_space arg_space regs =
mkLast $ CmmCall e cont regs arg_space res_space updfr_off
--------------
rubbishExpr :: CmmExpr
rubbishExpr = CmmLit (CmmLabel mkRUBBISH_ENTRY_infoLabel)
......@@ -53,9 +53,6 @@ instance Outputable CmmExpr where
instance Outputable CmmReg where
ppr e = pprReg e
instance Outputable CmmArg where
ppr a = pprArg a
instance Outputable CmmLit where
ppr l = pprLit l
......@@ -278,11 +275,5 @@ pprGlobalReg gr
-----------------------------------------------------------------------------
pprArg :: CmmArg -> SDoc
pprArg (CmmExprArg e) = ppr e
pprArg (CmmRubbishArg ty) = text "Rubbish" <+> dcolon <+> ppr ty
-----------------------------------------------------------------------------
commafy :: [SDoc] -> SDoc
commafy xs = fsep $ punctuate comma xs
......@@ -241,7 +241,7 @@ cgDataCon data_con
do { _ <- ticky_code
; ldvEnter (CmmReg nodeReg)
; tickyReturnOldCon (length arg_things)
; void $ emitReturn [CmmExprArg (cmmOffsetB dflags (CmmReg nodeReg) (tagForCon dflags data_con))]
; void $ emitReturn [cmmOffsetB dflags (CmmReg nodeReg) (tagForCon dflags data_con)]
}
-- The case continuation code expects a tagged pointer
......
......@@ -551,7 +551,7 @@ mkSlowEntryCode bndr cl_info arg_regs -- function closure is already in `Node'
-- mkDirectJump does not clobber `Node' containing function closure
jump = mkJump dflags NativeNodeCall
(mkLblExpr fast_lbl)
(map (CmmExprArg . CmmReg . CmmLocal) (node : arg_regs))
(map (CmmReg . CmmLocal) (node : arg_regs))
(initUpdFrameOff dflags)
tscope <- getTickScope
emitProcWithConvention Slow Nothing slow_lbl
......
......@@ -88,7 +88,7 @@ cgTopRhsCon dflags id con args =
-- needs to poke around inside it.
info_tbl = mkDataConInfoTable dflags con True ptr_wds nonptr_wds
get_lit (arg, _offset) = do { CmmExprArg (CmmLit lit) <- getArgAmode arg
get_lit (arg, _offset) = do { CmmLit lit <- getArgAmode arg
; return lit }
; payload <- mapM get_lit nv_args_w_offsets
......
......@@ -19,8 +19,7 @@ module StgCmmEnv (
bindArgsToRegs, bindToReg, rebindToReg,
bindArgToReg, idToReg,
getArgAmode, getArgAmode_no_rubbish,
getNonVoidArgAmodes, getNonVoidArgAmodes_no_rubbish,
getArgAmode, getNonVoidArgAmodes,
getCgIdInfo,
maybeLetNoEscape,
) where
......@@ -37,7 +36,6 @@ import CLabel
import BlockId
import CmmExpr
import CmmUtils
import Control.Monad
import DynFlags
import Id
import MkGraph
......@@ -166,19 +164,11 @@ cgLookupPanic id
--------------------
getArgAmode :: NonVoid StgArg -> FCode CmmArg
getArgAmode (NonVoid (StgVarArg var)) =
do { info <- getCgIdInfo var; return (CmmExprArg (idInfoToAmode info)) }
getArgAmode (NonVoid (StgLitArg lit)) = liftM (CmmExprArg . CmmLit) $ cgLit lit
getArgAmode (NonVoid (StgRubbishArg ty)) = return (CmmRubbishArg ty)
getArgAmode_no_rubbish :: NonVoid StgArg -> FCode CmmExpr
getArgAmode_no_rubbish (NonVoid (StgVarArg var)) =
do { info <- getCgIdInfo var; return (idInfoToAmode info) }
getArgAmode_no_rubbish (NonVoid (StgLitArg lit)) = liftM CmmLit $ cgLit lit
getArgAmode_no_rubbish arg@(NonVoid (StgRubbishArg _)) = pprPanic "getArgAmode_no_rubbish" (ppr arg)
getNonVoidArgAmodes :: [StgArg] -> FCode [CmmArg]
getArgAmode :: NonVoid StgArg -> FCode CmmExpr
getArgAmode (NonVoid (StgVarArg var)) = idInfoToAmode <$> getCgIdInfo var
getArgAmode (NonVoid (StgLitArg lit)) = CmmLit <$> cgLit lit
getNonVoidArgAmodes :: [StgArg] -> FCode [CmmExpr]
-- NB: Filters out void args,
-- so the result list may be shorter than the argument list
getNonVoidArgAmodes [] = return []
......@@ -188,12 +178,6 @@ getNonVoidArgAmodes (arg:args)
; amodes <- getNonVoidArgAmodes args
; return ( amode : amodes ) }
-- This version assumes arguments are not rubbish. I think this assumption holds
-- as long as we don't pass unboxed sums to primops and foreign fns.
getNonVoidArgAmodes_no_rubbish :: [StgArg] -> FCode [CmmExpr]
getNonVoidArgAmodes_no_rubbish
= mapM (getArgAmode_no_rubbish . NonVoid) . filter (not . isVoidRep . argPrimRep)
------------------------------------------------------------------------
-- Interface functions for binding and re-binding names
......
......@@ -68,7 +68,7 @@ cgExpr (StgOpApp op args ty) = cgOpApp op args ty
cgExpr (StgConApp con args _)= cgConApp con args
cgExpr (StgTick t e) = cgTick t >> cgExpr e
cgExpr (StgLit lit) = do cmm_lit <- cgLit lit
emitReturn [CmmExprArg (CmmLit cmm_lit)]
emitReturn [CmmLit cmm_lit]
cgExpr (StgLet binds expr) = do { cgBind binds; cgExpr expr }
cgExpr (StgLetNoEscape binds expr) =
......@@ -309,7 +309,7 @@ cgCase (StgOpApp (StgPrimOp op) args _) bndr (AlgAlt tycon) alts
where
do_enum_primop :: PrimOp -> [StgArg] -> FCode CmmExpr
do_enum_primop TagToEnumOp [arg] -- No code!
= getArgAmode_no_rubbish (NonVoid arg)
= getArgAmode (NonVoid arg)
do_enum_primop primop args
= do dflags <- getDynFlags
tmp <- newTemp (bWord dflags)
......@@ -517,7 +517,7 @@ isSimpleOp :: StgOp -> [StgArg] -> FCode Bool
-- True iff the op cannot block or allocate
isSimpleOp (StgFCallOp (CCall (CCallSpec _ _ safe)) _) _ = return $! not (playSafe safe)
isSimpleOp (StgPrimOp op) stg_args = do
arg_exprs <- getNonVoidArgAmodes_no_rubbish stg_args
arg_exprs <- getNonVoidArgAmodes stg_args
dflags <- getDynFlags
-- See Note [Inlining out-of-line primops and heap checks]
return $! isJust $ shouldInlinePrimOp dflags op arg_exprs
......@@ -684,7 +684,7 @@ cgConApp con stg_args
; emit =<< fcode_init
; tickyReturnNewCon (length stg_args)
; emitReturn [CmmExprArg (idInfoToAmode idinfo)] }
; emitReturn [idInfoToAmode idinfo] }
cgIdApp :: Id -> [StgArg] -> FCode ReturnKind
cgIdApp fun_id [] | isVoidTy (idType fun_id) = emitReturn []
......@@ -707,7 +707,7 @@ cgIdApp fun_id args = do
case getCallMethod dflags fun_name cg_fun_id lf_info n_args v_args (cg_loc fun_info) self_loop_info of
-- A value in WHNF, so we can just return it.
ReturnIt -> emitReturn [CmmExprArg fun] -- ToDo: does ReturnIt guarantee tagged?
ReturnIt -> emitReturn [fun] -- ToDo: does ReturnIt guarantee tagged?
EnterIt -> ASSERT( null args ) -- Discarding arguments
emitEnter fun
......@@ -857,7 +857,7 @@ emitEnter fun = do
Return -> do
{ let entry = entryCode dflags $ closureInfoPtr dflags $ CmmReg nodeReg
; emit $ mkJump dflags NativeNodeCall entry
[CmmExprArg (cmmUntag dflags fun)] updfr_off
[cmmUntag dflags fun] updfr_off
; return AssignedDirectly
}
......@@ -893,7 +893,7 @@ emitEnter fun = do
; updfr_off <- getUpdFrameOff
; let area = Young lret
; let (outArgs, regs, copyout) = copyOutOflow dflags NativeNodeCall Call area
[CmmExprArg fun] updfr_off []
[fun] updfr_off []
-- refer to fun via nodeReg after the copyout, to avoid having
-- both live simultaneously; this sometimes enables fun to be
-- inlined in the RHS of the R1 assignment.
......
......@@ -111,7 +111,7 @@ cgForeignCall (CCall (CCallSpec target cconv safety)) stg_args res_ty
_something_else ->
do { _ <- emitForeignCall safety res_regs call_target call_args
; emitReturn (map (CmmExprArg . CmmReg . CmmLocal) res_regs)
; emitReturn (map (CmmReg . CmmLocal) res_regs)
}
}
......@@ -524,12 +524,10 @@ getFCallArgs args
= do { mb_cmms <- mapM get args
; return (catMaybes mb_cmms) }
where
get arg@(StgRubbishArg{})
= pprPanic "getFCallArgs" (text "Rubbish arg in foreign call:" <+> ppr arg)
get arg | isVoidRep arg_rep
= return Nothing
| otherwise
= do { cmm <- getArgAmode_no_rubbish (NonVoid arg)
= do { cmm <- getArgAmode (NonVoid arg)
; dflags <- getDynFlags
; return (Just (add_shim dflags arg_ty cmm, hint)) }
where
......
......@@ -72,7 +72,7 @@ allocDynClosure
allocDynClosureCmm
:: Maybe Id -> CmmInfoTable -> LambdaFormInfo -> CmmExpr -> CmmExpr
-> [(CmmArg, ByteOff)]
-> [(CmmExpr, ByteOff)]
-> FCode CmmExpr -- returns Hp+n
-- allocDynClosure allocates the thing in the heap,
......@@ -113,7 +113,7 @@ allocHeapClosure
:: SMRep -- ^ representation of the object
-> CmmExpr -- ^ info pointer
-> CmmExpr -- ^ cost centre
-> [(CmmArg,ByteOff)] -- ^ payload
-> [(CmmExpr,ByteOff)] -- ^ payload
-> FCode CmmExpr -- ^ returns the address of the object
allocHeapClosure rep info_ptr use_cc payload = do
profDynAlloc rep use_cc
......@@ -144,7 +144,7 @@ allocHeapClosure rep info_ptr use_cc payload = do
emitSetDynHdr :: CmmExpr -> CmmExpr -> CmmExpr -> FCode ()
emitSetDynHdr base info_ptr ccs
= do dflags <- getDynFlags
hpStore base (zip (map CmmExprArg (header dflags)) [0, wORD_SIZE dflags ..])
hpStore base (zip (header dflags) [0, wORD_SIZE dflags ..])
where
header :: DynFlags -> [CmmExpr]
header dflags = [info_ptr] ++ dynProfHdr dflags ccs
......@@ -152,11 +152,11 @@ emitSetDynHdr base info_ptr ccs
-- No ticky header
-- Store the item (expr,off) in base[off]
hpStore :: CmmExpr -> [(CmmArg, ByteOff)] -> FCode ()
hpStore :: CmmExpr -> [(CmmExpr, ByteOff)] -> FCode ()
hpStore base vals = do
dflags <- getDynFlags
sequence_ $
[ emitStore (cmmOffsetB dflags base off) val | (CmmExprArg val,off) <- vals ]
[ emitStore (cmmOffsetB dflags base off) val | (val,off) <- vals ]
-----------------------------------------------------------
-- Layout of static closures
......@@ -364,7 +364,7 @@ entryHeapCheck' is_fastf node arity args code
= do dflags <- getDynFlags
let is_thunk = arity == 0
args' = map (CmmExprArg . CmmReg . CmmLocal) args
args' = map (CmmReg . CmmLocal) args
stg_gc_fun = CmmReg (CmmGlobal GCFun)
stg_gc_enter1 = CmmReg (CmmGlobal GCEnter1)
......@@ -376,13 +376,13 @@ entryHeapCheck' is_fastf node arity args code
-}
gc_call upd
| is_thunk
= mkJump dflags NativeNodeCall stg_gc_enter1 [CmmExprArg node] upd
= mkJump dflags NativeNodeCall stg_gc_enter1 [node] upd
| is_fastf
= mkJump dflags NativeNodeCall stg_gc_fun (CmmExprArg node : args') upd
= mkJump dflags NativeNodeCall stg_gc_fun (node : args') upd
| otherwise
= mkJump dflags Slow stg_gc_fun (CmmExprArg node : args') upd
= mkJump dflags Slow stg_gc_fun (node : args') upd
updfr_sz <- getUpdFrameOff
......@@ -446,7 +446,7 @@ cannedGCReturnsTo checkYield cont_on_stack gc regs lret off code
updfr_sz <- getUpdFrameOff
heapCheck False checkYield (gc_call dflags gc updfr_sz) code
where
reg_exprs = map (CmmExprArg . CmmReg . CmmLocal) regs
reg_exprs = map (CmmReg . CmmLocal) regs
-- Note [stg_gc arguments]
-- NB. we use the NativeReturn convention for passing arguments
......
......@@ -68,7 +68,7 @@ import Control.Monad
--
-- > p=x; q=y;
--
emitReturn :: [CmmArg] -> FCode ReturnKind
emitReturn :: [CmmExpr] -> FCode ReturnKind
emitReturn results
= do { dflags <- getDynFlags
; sequel <- getSequel
......@@ -90,7 +90,7 @@ emitReturn results
-- using the call/return convention @conv@, passing @args@, and
-- returning the results to the current sequel.
--
emitCall :: (Convention, Convention) -> CmmExpr -> [CmmArg] -> FCode ReturnKind
emitCall :: (Convention, Convention) -> CmmExpr -> [CmmExpr] -> FCode ReturnKind
emitCall convs fun args
= emitCallWithExtraStack convs fun args noExtraStack
......@@ -101,8 +101,8 @@ emitCall convs fun args
-- @stack@, and returning the results to the current sequel.
--
emitCallWithExtraStack
:: (Convention, Convention) -> CmmExpr -> [CmmArg]
-> [CmmArg] -> FCode ReturnKind
:: (Convention, Convention) -> CmmExpr -> [CmmExpr]
-> [CmmExpr] -> FCode ReturnKind
emitCallWithExtraStack (callConv, retConv) fun args extra_stack
= do { dflags <- getDynFlags
; adjustHpBackwards
......@@ -187,7 +187,7 @@ slowCall fun stg_args
(r, slow_code) <- getCodeR $ do
r <- direct_call "slow_call" NativeNodeCall
(mkRtsApFastLabel rts_fun) arity ((P,Just (CmmExprArg fun)):argsreps)
(mkRtsApFastLabel rts_fun) arity ((P,Just fun):argsreps)
emitComment $ mkFastString ("slow_call for " ++
showSDoc dflags (ppr fun) ++
" with pat " ++ unpackFS rts_fun)
......@@ -213,7 +213,7 @@ slowCall fun stg_args
fast_code <- getCode $
emitCall (NativeNodeCall, NativeReturn)
(entryCode dflags fun_iptr)
(nonVArgs ((P,Just (CmmExprArg funv)):argsreps))
(nonVArgs ((P,Just funv):argsreps))
slow_lbl <- newLabelC
fast_lbl <- newLabelC
......@@ -271,7 +271,7 @@ slowCall fun stg_args
direct_call :: String
-> Convention -- e.g. NativeNodeCall or NativeDirectCall
-> CLabel -> RepArity
-> [(ArgRep,Maybe CmmArg)] -> FCode ReturnKind
-> [(ArgRep,Maybe CmmExpr)] -> FCode ReturnKind
direct_call caller call_conv lbl arity args
| debugIsOn && real_arity > length args -- Too few args
= do -- Caller should ensure that there enough args!
......@@ -299,11 +299,11 @@ direct_call caller call_conv lbl arity args
-- When constructing calls, it is easier to keep the ArgReps and the
-- CmmArgs zipped together. However, a void argument has no