MkGraph.hs 13.1 KB
Newer Older
1 2 3
{-# LANGUAGE GADTs #-}

module MkGraph
4 5
  ( CmmAGraph, CgStmt(..)
  , (<*>), catAGraphs
Simon Marlow's avatar
Simon Marlow committed
6
  , mkLabel, mkMiddle, mkLast, outOfLine
7
  , lgraphOfAGraph, labelAGraph
8 9

  , stackStubExpr
Simon Marlow's avatar
Simon Marlow committed
10 11 12
  , mkNop, mkAssign, mkStore, mkUnsafeCall, mkFinalCall, mkCallReturnsTo
  , mkJump, mkDirectJump, mkForeignJump, mkForeignJumpExtra, mkJumpGC
  , mkCbranch, mkSwitch
13
  , mkReturn, mkReturnSimple, mkComment, mkCallEntry, mkBranch
Simon Marlow's avatar
Simon Marlow committed
14
  , copyInOflow, copyOutOflow
15
  , noExtraStack
16
  , toCall, Transfer(..)
17 18 19 20 21 22 23
  )
where

import BlockId
import Cmm
import CmmCallConv (assignArgumentsPos, ParamLocation(..))

24

25 26 27 28 29 30 31
import Compiler.Hoopl hiding (Unique, (<*>), mkFirst, mkMiddle, mkLast, mkLabel, mkBranch, Shape(..))
import FastString
import ForeignCall
import Outputable
import Prelude hiding (succ)
import SMRep (ByteOff)
import UniqSupply
32
import OrdList
33 34 35 36

#include "HsVersions.h"


37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119
-----------------------------------------------------------------------------
-- Building Graphs


-- | CmmAGraph is a chunk of code consisting of:
--
--   * ordinary statements (assignments, stores etc.)
--   * jumps
--   * labels
--   * out-of-line labelled blocks
--
-- The semantics is that control falls through labels and out-of-line
-- blocks.  Everything after a jump up to the next label is by
-- definition unreachable code, and will be discarded.
--
-- Two CmmAGraphs can be stuck together with <*>, with the meaning that
-- control flows from the first to the second.
--
-- A 'CmmAGraph' can be turned into a 'CmmGraph' (closed at both ends)
-- by providing a label for the entry point; see 'labelAGraph'.
--
type CmmAGraph = OrdList CgStmt

data CgStmt
  = CgLabel BlockId
  | CgStmt  (CmmNode O O)
  | CgLast  (CmmNode O C)
  | CgFork  BlockId CmmAGraph

flattenCmmAGraph :: BlockId -> CmmAGraph -> CmmGraph
flattenCmmAGraph id stmts =
    CmmGraph { g_entry = id,
               g_graph = GMany NothingO body NothingO }
  where
  (block, blocks) = flatten (fromOL stmts)
  entry = blockJoinHead (CmmEntry id) block
  body = foldr addBlock emptyBody (entry:blocks)

  flatten :: [CgStmt] -> (Block CmmNode O C, [Block CmmNode C C])
  flatten [] = panic "flatten []"

  -- A label at the end of a function or fork: this label must not be reachable,
  -- but it might be referred to from another BB that also isn't reachable.
  -- Eliminating these has to be done with a dead-code analysis.  For now,
  -- we just make it into a well-formed block by adding a recursive jump.
  flatten [CgLabel id]
    = (goto_id, [blockJoinHead (CmmEntry id) goto_id] )
    where goto_id = blockJoinTail emptyBlock (CmmBranch id)

  -- A jump/branch: throw away all the code up to the next label, because
  -- it is unreachable.  Be careful to keep forks that we find on the way.
  flatten (CgLast stmt : stmts)
    = case dropWhile isOrdinaryStmt stmts of
        [] ->
            ( sing, [] )
        [CgLabel id] ->
            ( sing, [blockJoin (CmmEntry id) emptyBlock (CmmBranch id)] )
        (CgLabel id : stmts) ->
            ( sing, blockJoinHead (CmmEntry id) block : blocks )
            where (block,blocks) = flatten stmts
        (CgFork fork_id stmts : ss) -> 
            flatten (CgFork fork_id stmts : CgLast stmt : ss)
        _ -> panic "MkGraph.flatten"
    where
      sing = blockJoinTail emptyBlock stmt

  flatten (s:ss) = 
        case s of
          CgStmt stmt -> (blockCons stmt block, blocks)
          CgLabel id  -> (blockJoinTail emptyBlock (CmmBranch id),
                          blockJoinHead (CmmEntry id) block : blocks)
          CgFork fork_id stmts -> 
                (block, blockJoinHead (CmmEntry fork_id) fork_block : fork_blocks ++ blocks)
                where (fork_block, fork_blocks) = flatten (fromOL stmts)
          _ -> panic "MkGraph.flatten"
    where (block,blocks) = flatten ss

isOrdinaryStmt :: CgStmt -> Bool
isOrdinaryStmt (CgStmt _) = True
isOrdinaryStmt (CgLast _) = True
isOrdinaryStmt _          = False


120 121 122 123

---------- AGraph manipulation

(<*>)          :: CmmAGraph -> CmmAGraph -> CmmAGraph
124 125
(<*>)           = appOL

126
catAGraphs     :: [CmmAGraph] -> CmmAGraph
127 128 129 130 131
catAGraphs      = concatOL

-- | created a sequence "goto id; id:" as an AGraph
mkLabel        :: BlockId -> CmmAGraph
mkLabel bid     = unitOL (CgLabel bid)
132

133 134 135
-- | creates an open AGraph from a given node
mkMiddle        :: CmmNode O O -> CmmAGraph
mkMiddle middle = unitOL (CgStmt middle)
136

137 138 139
-- | created a closed AGraph from a given node
mkLast         :: CmmNode O C -> CmmAGraph
mkLast last     = unitOL (CgLast last)
140

Simon Marlow's avatar
Simon Marlow committed
141 142 143
-- | A labelled code block; should end in a last node
outOfLine      :: BlockId -> CmmAGraph -> CmmAGraph
outOfLine l g   = unitOL (CgFork l g)
144 145

-- | allocate a fresh label for the entry point
146
lgraphOfAGraph :: CmmAGraph -> UniqSM CmmGraph
147 148 149 150
lgraphOfAGraph g = do u <- getUniqueM
                      return (flattenCmmAGraph (mkBlockId u) g)

-- | use the given BlockId as the label of the entry point
151
labelAGraph    :: BlockId -> CmmAGraph -> UniqSM CmmGraph
152
labelAGraph lbl ag = return (flattenCmmAGraph lbl ag)
153 154 155

---------- No-ops
mkNop        :: CmmAGraph
156 157
mkNop         = nilOL

158
mkComment    :: FastString -> CmmAGraph
159 160 161 162 163 164
#ifdef DEBUG
-- SDM: generating all those comments takes time, this saved about 4% for me
mkComment fs  = mkMiddle $ CmmComment fs
#else
mkComment _   = nilOL
#endif
165 166 167

---------- Assignment and store
mkAssign     :: CmmReg  -> CmmExpr -> CmmAGraph
168
mkAssign l r  = mkMiddle $ CmmAssign l r
169

170 171
mkStore      :: CmmExpr -> CmmExpr -> CmmAGraph
mkStore  l r  = mkMiddle $ CmmStore  l r
172 173

---------- Control transfer
174 175
mkJump          :: CmmExpr -> [CmmActual] -> UpdFrameOffset -> CmmAGraph
mkJump e actuals updfr_off =
Simon Marlow's avatar
Simon Marlow committed
176
  lastWithArgs Jump Old NativeNodeCall actuals updfr_off $
177 178 179 180
    toCall e Nothing updfr_off 0

mkDirectJump    :: CmmExpr -> [CmmActual] -> UpdFrameOffset -> CmmAGraph
mkDirectJump e actuals updfr_off =
Simon Marlow's avatar
Simon Marlow committed
181
  lastWithArgs Jump Old NativeDirectCall actuals updfr_off $
182 183 184 185
    toCall e Nothing updfr_off 0

mkJumpGC        :: CmmExpr -> [CmmActual] -> UpdFrameOffset -> CmmAGraph
mkJumpGC e actuals updfr_off =
Simon Marlow's avatar
Simon Marlow committed
186
  lastWithArgs Jump Old GC actuals updfr_off $
187 188 189 190 191
    toCall e Nothing updfr_off 0

mkForeignJump   :: Convention -> CmmExpr -> [CmmActual] -> UpdFrameOffset
                -> CmmAGraph
mkForeignJump conv e actuals updfr_off =
192
  mkForeignJumpExtra conv e actuals updfr_off noExtraStack
Simon Marlow's avatar
Simon Marlow committed
193 194 195 196 197 198

mkForeignJumpExtra :: Convention -> CmmExpr -> [CmmActual]
                -> UpdFrameOffset -> (ByteOff, [(CmmExpr, ByteOff)])
                -> CmmAGraph
mkForeignJumpExtra conv e actuals updfr_off extra_stack =
  lastWithArgsAndExtraStack Jump Old conv actuals updfr_off extra_stack $
199 200 201 202 203 204 205 206
    toCall e Nothing updfr_off 0

mkCbranch       :: CmmExpr -> BlockId -> BlockId -> CmmAGraph
mkCbranch pred ifso ifnot = mkLast (CmmCondBranch pred ifso ifnot)

mkSwitch        :: CmmExpr -> [Maybe BlockId] -> CmmAGraph
mkSwitch e tbl   = mkLast $ CmmSwitch e tbl

207
mkReturn        :: CmmExpr -> [CmmActual] -> UpdFrameOffset -> CmmAGraph
208
mkReturn e actuals updfr_off =
Simon Marlow's avatar
Simon Marlow committed
209
  lastWithArgs Ret  Old NativeReturn actuals updfr_off $
210 211
    toCall e Nothing updfr_off 0

212
mkReturnSimple  :: [CmmActual] -> UpdFrameOffset -> CmmAGraph
213
mkReturnSimple actuals updfr_off =
214 215
  mkReturn e actuals updfr_off
  where e = CmmLoad (CmmStackSlot Old updfr_off) gcWord
216 217

mkBranch        :: BlockId -> CmmAGraph
218 219 220 221 222
mkBranch bid     = mkLast (CmmBranch bid)

mkFinalCall   :: CmmExpr -> CCallConv -> [CmmActual] -> UpdFrameOffset
              -> CmmAGraph
mkFinalCall f _ actuals updfr_off =
Simon Marlow's avatar
Simon Marlow committed
223
  lastWithArgs Call Old NativeDirectCall actuals updfr_off $
224 225
    toCall f Nothing updfr_off 0

Simon Marlow's avatar
Simon Marlow committed
226 227 228 229 230 231 232 233 234 235 236
mkCallReturnsTo :: CmmExpr -> Convention -> [CmmActual]
                -> BlockId
                -> ByteOff
                -> UpdFrameOffset
                -> (ByteOff, [(CmmExpr,ByteOff)])
                -> CmmAGraph
mkCallReturnsTo f callConv actuals ret_lbl ret_off updfr_off extra_stack = do
  lastWithArgsAndExtraStack Call (Young ret_lbl) callConv actuals
     updfr_off extra_stack $
       toCall f (Just ret_lbl) updfr_off ret_off

237 238 239
mkUnsafeCall  :: ForeignTarget -> [CmmFormal] -> [CmmActual] -> CmmAGraph
mkUnsafeCall t fs as = mkMiddle $ CmmUnsafeForeignCall t fs as

240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259

--------------------------------------------------------------------------




-- Why are we inserting extra blocks that simply branch to the successors?
-- Because in addition to the branch instruction, @mkBranch@ will insert
-- a necessary adjustment to the stack pointer.


-- For debugging purposes, we can stub out dead stack slots:
stackStubExpr :: Width -> CmmExpr
stackStubExpr w = CmmLit (CmmInt 0 w)

-- When we copy in parameters, we usually want to put overflow
-- parameters on the stack, but sometimes we want to pass
-- the variables in their spill slots.
-- Therefore, for copying arguments and results, we provide different
-- functions to pass the arguments in an overflow area and to pass them in spill slots.
260
copyInOflow  :: Convention -> Area -> [CmmFormal] -> (Int, CmmAGraph)
261 262 263 264 265 266

copyInOflow conv area formals = (offset, catAGraphs $ map mkMiddle nodes)
  where (offset, nodes) = copyIn oneCopyOflowI conv area formals

type SlotCopier = Area -> (LocalReg, ByteOff) -> (ByteOff, [CmmNode O O]) ->
                          (ByteOff, [CmmNode O O])
267
type CopyIn  = SlotCopier -> Convention -> Area -> [CmmFormal] -> (ByteOff, [CmmNode O O])
268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283

-- Return the number of bytes used for copying arguments, as well as the
-- instructions to copy the arguments.
copyIn :: CopyIn
copyIn oflow conv area formals =
  foldr ci (init_offset, []) args'
  where ci (reg, RegisterParam r) (n, ms) =
          (n, CmmAssign (CmmLocal reg) (CmmReg $ CmmGlobal r) : ms)
        ci (r, StackParam off) (n, ms) = oflow area (r, off) (n, ms)
        init_offset = widthInBytes wordWidth -- infotable
        args  = assignArgumentsPos conv localRegType formals
        args' = foldl adjust [] args
          where adjust rst (v, StackParam off) = (v, StackParam (off + init_offset)) : rst
                adjust rst x@(_, RegisterParam _) = x : rst

-- Copy-in one arg, using overflow space if needed.
Simon Marlow's avatar
Simon Marlow committed
284
oneCopyOflowI :: SlotCopier
285 286 287 288 289 290 291
oneCopyOflowI area (reg, off) (n, ms) =
  (max n off, CmmAssign (CmmLocal reg) (CmmLoad (CmmStackSlot area off) ty) : ms)
  where ty = localRegType reg

-- Factoring out the common parts of the copyout functions yielded something
-- more complicated:

292 293
data Transfer = Call | Jump | Ret deriving Eq

Simon Marlow's avatar
Simon Marlow committed
294 295 296 297
copyOutOflow :: Convention -> Transfer -> Area -> [CmmActual]
             -> UpdFrameOffset
             -> (ByteOff, [(CmmExpr,ByteOff)]) -- extra stack stuff
             -> (Int, CmmAGraph)
298

299
-- Generate code to move the actual parameters into the locations
300 301
-- required by the calling convention.  This includes a store for the
-- return address.
302
--
303 304 305 306 307
-- The argument layout function ignores the pointer to the info table,
-- so we slot that in here. When copying-out to a young area, we set
-- the info table for return and adjust the offsets of the other
-- parameters.  If this is a call instruction, we adjust the offsets
-- of the other parameters.
Simon Marlow's avatar
Simon Marlow committed
308 309 310
copyOutOflow conv transfer area actuals updfr_off
  (extra_stack_off, extra_stack_stuff)
  = foldr co (init_offset, mkNop) (args' ++ stack_params)
311 312 313 314
  where 
    co (v, RegisterParam r) (n, ms) = (n, mkAssign (CmmGlobal r) v <*> ms)
    co (v, StackParam off)  (n, ms) = (max n off, mkStore (CmmStackSlot area off) v <*> ms)

Simon Marlow's avatar
Simon Marlow committed
315 316 317
    stack_params = [ (e, StackParam (off + init_offset))
                   | (e,off) <- extra_stack_stuff ]

318
    (setRA, init_offset) =
Simon Marlow's avatar
Simon Marlow committed
319 320
      case area of
            Young id -> id `seq` -- Generate a store instruction for
321
                                 -- the return address if making a call
322 323 324 325
                  if transfer == Call then
                    ([(CmmLit (CmmBlock id), StackParam init_offset)],
                     widthInBytes wordWidth)
                  else ([], 0)
Simon Marlow's avatar
Simon Marlow committed
326 327 328
            Old -> ([], updfr_off)

    arg_offset = init_offset + extra_stack_off
329

330
    args :: [(CmmExpr, ParamLocation)]   -- The argument and where to put it
331 332 333
    args = assignArgumentsPos conv cmmExprType actuals

    args' = foldl adjust setRA args
Simon Marlow's avatar
Simon Marlow committed
334
      where adjust rst   (v, StackParam off)  = (v, StackParam (off + arg_offset)) : rst
335 336
            adjust rst x@(_, RegisterParam _) = x : rst

337 338


339
mkCallEntry :: Convention -> [CmmFormal] -> (Int, CmmAGraph)
Simon Marlow's avatar
Simon Marlow committed
340
mkCallEntry conv formals = copyInOflow conv Old formals
341

Simon Marlow's avatar
Simon Marlow committed
342 343 344 345
lastWithArgs :: Transfer -> Area -> Convention -> [CmmActual]
             -> UpdFrameOffset
             -> (ByteOff -> CmmAGraph)
             -> CmmAGraph
346
lastWithArgs transfer area conv actuals updfr_off last =
347 348
  lastWithArgsAndExtraStack transfer area conv actuals
                            updfr_off noExtraStack last
Simon Marlow's avatar
Simon Marlow committed
349 350 351 352 353 354 355 356 357

lastWithArgsAndExtraStack :: Transfer -> Area -> Convention -> [CmmActual]
             -> UpdFrameOffset -> (ByteOff, [(CmmExpr,ByteOff)])
             -> (ByteOff -> CmmAGraph)
             -> CmmAGraph
lastWithArgsAndExtraStack transfer area conv actuals updfr_off
                          extra_stack last =
  let (outArgs, copies) = copyOutOflow conv transfer area actuals
                             updfr_off extra_stack in
358 359
  copies <*> last outArgs

Simon Marlow's avatar
Simon Marlow committed
360 361
noExtraStack :: (ByteOff, [(CmmExpr,ByteOff)])
noExtraStack = (0,[])
362 363 364

toCall :: CmmExpr -> Maybe BlockId -> UpdFrameOffset -> ByteOff -> ByteOff
       -> CmmAGraph
365 366
toCall e cont updfr_off res_space arg_space =
  mkLast $ CmmCall e cont arg_space res_space updfr_off