MkGraph.hs 13.3 KB
Newer Older
1 2 3
{-# LANGUAGE GADTs #-}

module MkGraph
4 5
  ( CmmAGraph, CgStmt(..)
  , (<*>), catAGraphs
Simon Marlow's avatar
Simon Marlow committed
6
  , mkLabel, mkMiddle, mkLast, outOfLine
7
  , lgraphOfAGraph, labelAGraph
8 9

  , stackStubExpr
Simon Marlow's avatar
Simon Marlow committed
10 11 12
  , mkNop, mkAssign, mkStore, mkUnsafeCall, mkFinalCall, mkCallReturnsTo
  , mkJump, mkDirectJump, mkForeignJump, mkForeignJumpExtra, mkJumpGC
  , mkCbranch, mkSwitch
13
  , mkReturn, mkReturnSimple, mkComment, mkCallEntry, mkBranch
Simon Marlow's avatar
Simon Marlow committed
14
  , copyInOflow, copyOutOflow
15
  , toCall, Transfer(..)
16 17 18 19 20 21 22
  )
where

import BlockId
import Cmm
import CmmCallConv (assignArgumentsPos, ParamLocation(..))

23

24 25 26 27 28 29 30
import Compiler.Hoopl hiding (Unique, (<*>), mkFirst, mkMiddle, mkLast, mkLabel, mkBranch, Shape(..))
import FastString
import ForeignCall
import Outputable
import Prelude hiding (succ)
import SMRep (ByteOff)
import UniqSupply
31
import OrdList
32 33 34 35

#include "HsVersions.h"


36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118
-----------------------------------------------------------------------------
-- Building Graphs


-- | CmmAGraph is a chunk of code consisting of:
--
--   * ordinary statements (assignments, stores etc.)
--   * jumps
--   * labels
--   * out-of-line labelled blocks
--
-- The semantics is that control falls through labels and out-of-line
-- blocks.  Everything after a jump up to the next label is by
-- definition unreachable code, and will be discarded.
--
-- Two CmmAGraphs can be stuck together with <*>, with the meaning that
-- control flows from the first to the second.
--
-- A 'CmmAGraph' can be turned into a 'CmmGraph' (closed at both ends)
-- by providing a label for the entry point; see 'labelAGraph'.
--
type CmmAGraph = OrdList CgStmt

data CgStmt
  = CgLabel BlockId
  | CgStmt  (CmmNode O O)
  | CgLast  (CmmNode O C)
  | CgFork  BlockId CmmAGraph

flattenCmmAGraph :: BlockId -> CmmAGraph -> CmmGraph
flattenCmmAGraph id stmts =
    CmmGraph { g_entry = id,
               g_graph = GMany NothingO body NothingO }
  where
  (block, blocks) = flatten (fromOL stmts)
  entry = blockJoinHead (CmmEntry id) block
  body = foldr addBlock emptyBody (entry:blocks)

  flatten :: [CgStmt] -> (Block CmmNode O C, [Block CmmNode C C])
  flatten [] = panic "flatten []"

  -- A label at the end of a function or fork: this label must not be reachable,
  -- but it might be referred to from another BB that also isn't reachable.
  -- Eliminating these has to be done with a dead-code analysis.  For now,
  -- we just make it into a well-formed block by adding a recursive jump.
  flatten [CgLabel id]
    = (goto_id, [blockJoinHead (CmmEntry id) goto_id] )
    where goto_id = blockJoinTail emptyBlock (CmmBranch id)

  -- A jump/branch: throw away all the code up to the next label, because
  -- it is unreachable.  Be careful to keep forks that we find on the way.
  flatten (CgLast stmt : stmts)
    = case dropWhile isOrdinaryStmt stmts of
        [] ->
            ( sing, [] )
        [CgLabel id] ->
            ( sing, [blockJoin (CmmEntry id) emptyBlock (CmmBranch id)] )
        (CgLabel id : stmts) ->
            ( sing, blockJoinHead (CmmEntry id) block : blocks )
            where (block,blocks) = flatten stmts
        (CgFork fork_id stmts : ss) -> 
            flatten (CgFork fork_id stmts : CgLast stmt : ss)
        _ -> panic "MkGraph.flatten"
    where
      sing = blockJoinTail emptyBlock stmt

  flatten (s:ss) = 
        case s of
          CgStmt stmt -> (blockCons stmt block, blocks)
          CgLabel id  -> (blockJoinTail emptyBlock (CmmBranch id),
                          blockJoinHead (CmmEntry id) block : blocks)
          CgFork fork_id stmts -> 
                (block, blockJoinHead (CmmEntry fork_id) fork_block : fork_blocks ++ blocks)
                where (fork_block, fork_blocks) = flatten (fromOL stmts)
          _ -> panic "MkGraph.flatten"
    where (block,blocks) = flatten ss

isOrdinaryStmt :: CgStmt -> Bool
isOrdinaryStmt (CgStmt _) = True
isOrdinaryStmt (CgLast _) = True
isOrdinaryStmt _          = False


119 120 121 122

---------- AGraph manipulation

(<*>)          :: CmmAGraph -> CmmAGraph -> CmmAGraph
123 124
(<*>)           = appOL

125
catAGraphs     :: [CmmAGraph] -> CmmAGraph
126 127 128 129 130
catAGraphs      = concatOL

-- | created a sequence "goto id; id:" as an AGraph
mkLabel        :: BlockId -> CmmAGraph
mkLabel bid     = unitOL (CgLabel bid)
131

132 133 134
-- | creates an open AGraph from a given node
mkMiddle        :: CmmNode O O -> CmmAGraph
mkMiddle middle = unitOL (CgStmt middle)
135

136 137 138
-- | created a closed AGraph from a given node
mkLast         :: CmmNode O C -> CmmAGraph
mkLast last     = unitOL (CgLast last)
139

Simon Marlow's avatar
Simon Marlow committed
140 141 142
-- | A labelled code block; should end in a last node
outOfLine      :: BlockId -> CmmAGraph -> CmmAGraph
outOfLine l g   = unitOL (CgFork l g)
143 144

-- | allocate a fresh label for the entry point
145
lgraphOfAGraph :: CmmAGraph -> UniqSM CmmGraph
146 147 148 149
lgraphOfAGraph g = do u <- getUniqueM
                      return (flattenCmmAGraph (mkBlockId u) g)

-- | use the given BlockId as the label of the entry point
150
labelAGraph    :: BlockId -> CmmAGraph -> UniqSM CmmGraph
151
labelAGraph lbl ag = return (flattenCmmAGraph lbl ag)
152 153 154

---------- No-ops
mkNop        :: CmmAGraph
155 156
mkNop         = nilOL

157
mkComment    :: FastString -> CmmAGraph
158 159 160 161 162 163
#ifdef DEBUG
-- SDM: generating all those comments takes time, this saved about 4% for me
mkComment fs  = mkMiddle $ CmmComment fs
#else
mkComment _   = nilOL
#endif
164 165 166

---------- Assignment and store
mkAssign     :: CmmReg  -> CmmExpr -> CmmAGraph
167
mkAssign l r  = mkMiddle $ CmmAssign l r
168

169 170
mkStore      :: CmmExpr -> CmmExpr -> CmmAGraph
mkStore  l r  = mkMiddle $ CmmStore  l r
171 172

---------- Control transfer
173 174
mkJump          :: CmmExpr -> [CmmActual] -> UpdFrameOffset -> CmmAGraph
mkJump e actuals updfr_off =
Simon Marlow's avatar
Simon Marlow committed
175
  lastWithArgs Jump Old NativeNodeCall actuals updfr_off $
176 177 178 179
    toCall e Nothing updfr_off 0

mkDirectJump    :: CmmExpr -> [CmmActual] -> UpdFrameOffset -> CmmAGraph
mkDirectJump e actuals updfr_off =
Simon Marlow's avatar
Simon Marlow committed
180
  lastWithArgs Jump Old NativeDirectCall actuals updfr_off $
181 182 183 184
    toCall e Nothing updfr_off 0

mkJumpGC        :: CmmExpr -> [CmmActual] -> UpdFrameOffset -> CmmAGraph
mkJumpGC e actuals updfr_off =
Simon Marlow's avatar
Simon Marlow committed
185
  lastWithArgs Jump Old GC actuals updfr_off $
186 187 188 189 190
    toCall e Nothing updfr_off 0

mkForeignJump   :: Convention -> CmmExpr -> [CmmActual] -> UpdFrameOffset
                -> CmmAGraph
mkForeignJump conv e actuals updfr_off =
Simon Marlow's avatar
Simon Marlow committed
191 192 193 194 195 196 197 198
  lastWithArgs Jump Old conv actuals updfr_off $
    toCall e Nothing updfr_off 0

mkForeignJumpExtra :: Convention -> CmmExpr -> [CmmActual]
                -> UpdFrameOffset -> (ByteOff, [(CmmExpr, ByteOff)])
                -> CmmAGraph
mkForeignJumpExtra conv e actuals updfr_off extra_stack =
  lastWithArgsAndExtraStack Jump Old conv actuals updfr_off extra_stack $
199 200 201 202 203 204 205 206
    toCall e Nothing updfr_off 0

mkCbranch       :: CmmExpr -> BlockId -> BlockId -> CmmAGraph
mkCbranch pred ifso ifnot = mkLast (CmmCondBranch pred ifso ifnot)

mkSwitch        :: CmmExpr -> [Maybe BlockId] -> CmmAGraph
mkSwitch e tbl   = mkLast $ CmmSwitch e tbl

207
mkReturn        :: CmmExpr -> [CmmActual] -> UpdFrameOffset -> CmmAGraph
208
mkReturn e actuals updfr_off =
Simon Marlow's avatar
Simon Marlow committed
209
  lastWithArgs Ret  Old NativeReturn actuals updfr_off $
210
    toCall e Nothing updfr_off 0
Simon Marlow's avatar
Simon Marlow committed
211
    -- where e = CmmLoad (CmmStackSlot Old updfr_off) gcWord
212

213
mkReturnSimple  :: [CmmActual] -> UpdFrameOffset -> CmmAGraph
214
mkReturnSimple actuals updfr_off =
Simon Marlow's avatar
Simon Marlow committed
215
  lastWithArgs Ret  Old NativeReturn actuals updfr_off $
216
    toCall e Nothing updfr_off 0
Simon Marlow's avatar
Simon Marlow committed
217
    where e = CmmLoad (CmmStackSlot Old updfr_off) gcWord
218 219

mkBranch        :: BlockId -> CmmAGraph
220 221 222 223 224
mkBranch bid     = mkLast (CmmBranch bid)

mkFinalCall   :: CmmExpr -> CCallConv -> [CmmActual] -> UpdFrameOffset
              -> CmmAGraph
mkFinalCall f _ actuals updfr_off =
Simon Marlow's avatar
Simon Marlow committed
225
  lastWithArgs Call Old NativeDirectCall actuals updfr_off $
226 227
    toCall f Nothing updfr_off 0

Simon Marlow's avatar
Simon Marlow committed
228 229 230 231 232 233 234 235 236 237 238
mkCallReturnsTo :: CmmExpr -> Convention -> [CmmActual]
                -> BlockId
                -> ByteOff
                -> UpdFrameOffset
                -> (ByteOff, [(CmmExpr,ByteOff)])
                -> CmmAGraph
mkCallReturnsTo f callConv actuals ret_lbl ret_off updfr_off extra_stack = do
  lastWithArgsAndExtraStack Call (Young ret_lbl) callConv actuals
     updfr_off extra_stack $
       toCall f (Just ret_lbl) updfr_off ret_off

239 240 241
mkUnsafeCall  :: ForeignTarget -> [CmmFormal] -> [CmmActual] -> CmmAGraph
mkUnsafeCall t fs as = mkMiddle $ CmmUnsafeForeignCall t fs as

242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261

--------------------------------------------------------------------------




-- Why are we inserting extra blocks that simply branch to the successors?
-- Because in addition to the branch instruction, @mkBranch@ will insert
-- a necessary adjustment to the stack pointer.


-- For debugging purposes, we can stub out dead stack slots:
stackStubExpr :: Width -> CmmExpr
stackStubExpr w = CmmLit (CmmInt 0 w)

-- When we copy in parameters, we usually want to put overflow
-- parameters on the stack, but sometimes we want to pass
-- the variables in their spill slots.
-- Therefore, for copying arguments and results, we provide different
-- functions to pass the arguments in an overflow area and to pass them in spill slots.
262
copyInOflow  :: Convention -> Area -> [CmmFormal] -> (Int, CmmAGraph)
263 264 265 266 267 268

copyInOflow conv area formals = (offset, catAGraphs $ map mkMiddle nodes)
  where (offset, nodes) = copyIn oneCopyOflowI conv area formals

type SlotCopier = Area -> (LocalReg, ByteOff) -> (ByteOff, [CmmNode O O]) ->
                          (ByteOff, [CmmNode O O])
269
type CopyIn  = SlotCopier -> Convention -> Area -> [CmmFormal] -> (ByteOff, [CmmNode O O])
270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285

-- Return the number of bytes used for copying arguments, as well as the
-- instructions to copy the arguments.
copyIn :: CopyIn
copyIn oflow conv area formals =
  foldr ci (init_offset, []) args'
  where ci (reg, RegisterParam r) (n, ms) =
          (n, CmmAssign (CmmLocal reg) (CmmReg $ CmmGlobal r) : ms)
        ci (r, StackParam off) (n, ms) = oflow area (r, off) (n, ms)
        init_offset = widthInBytes wordWidth -- infotable
        args  = assignArgumentsPos conv localRegType formals
        args' = foldl adjust [] args
          where adjust rst (v, StackParam off) = (v, StackParam (off + init_offset)) : rst
                adjust rst x@(_, RegisterParam _) = x : rst

-- Copy-in one arg, using overflow space if needed.
Simon Marlow's avatar
Simon Marlow committed
286
oneCopyOflowI :: SlotCopier
287 288 289 290 291 292 293
oneCopyOflowI area (reg, off) (n, ms) =
  (max n off, CmmAssign (CmmLocal reg) (CmmLoad (CmmStackSlot area off) ty) : ms)
  where ty = localRegType reg

-- Factoring out the common parts of the copyout functions yielded something
-- more complicated:

294 295
data Transfer = Call | Jump | Ret deriving Eq

Simon Marlow's avatar
Simon Marlow committed
296 297 298 299
copyOutOflow :: Convention -> Transfer -> Area -> [CmmActual]
             -> UpdFrameOffset
             -> (ByteOff, [(CmmExpr,ByteOff)]) -- extra stack stuff
             -> (Int, CmmAGraph)
300

301
-- Generate code to move the actual parameters into the locations
302 303
-- required by the calling convention.  This includes a store for the
-- return address.
304
--
305 306 307 308 309
-- The argument layout function ignores the pointer to the info table,
-- so we slot that in here. When copying-out to a young area, we set
-- the info table for return and adjust the offsets of the other
-- parameters.  If this is a call instruction, we adjust the offsets
-- of the other parameters.
Simon Marlow's avatar
Simon Marlow committed
310 311 312
copyOutOflow conv transfer area actuals updfr_off
  (extra_stack_off, extra_stack_stuff)
  = foldr co (init_offset, mkNop) (args' ++ stack_params)
313 314 315 316
  where 
    co (v, RegisterParam r) (n, ms) = (n, mkAssign (CmmGlobal r) v <*> ms)
    co (v, StackParam off)  (n, ms) = (max n off, mkStore (CmmStackSlot area off) v <*> ms)

Simon Marlow's avatar
Simon Marlow committed
317 318 319
    stack_params = [ (e, StackParam (off + init_offset))
                   | (e,off) <- extra_stack_stuff ]

320
    (setRA, init_offset) =
Simon Marlow's avatar
Simon Marlow committed
321 322
      case area of
            Young id -> id `seq` -- Generate a store instruction for
323 324 325 326 327
           	    	     	     -- the return address if making a call
                  if transfer == Call then
                    ([(CmmLit (CmmBlock id), StackParam init_offset)],
                     widthInBytes wordWidth)
                  else ([], 0)
Simon Marlow's avatar
Simon Marlow committed
328 329 330
            Old -> ([], updfr_off)

    arg_offset = init_offset + extra_stack_off
331

332
    args :: [(CmmExpr, ParamLocation)]   -- The argument and where to put it
333 334 335
    args = assignArgumentsPos conv cmmExprType actuals

    args' = foldl adjust setRA args
Simon Marlow's avatar
Simon Marlow committed
336
      where adjust rst   (v, StackParam off)  = (v, StackParam (off + arg_offset)) : rst
337 338
            adjust rst x@(_, RegisterParam _) = x : rst

339 340


341
mkCallEntry :: Convention -> [CmmFormal] -> (Int, CmmAGraph)
Simon Marlow's avatar
Simon Marlow committed
342
mkCallEntry conv formals = copyInOflow conv Old formals
343

Simon Marlow's avatar
Simon Marlow committed
344 345 346 347
lastWithArgs :: Transfer -> Area -> Convention -> [CmmActual]
             -> UpdFrameOffset
             -> (ByteOff -> CmmAGraph)
             -> CmmAGraph
348
lastWithArgs transfer area conv actuals updfr_off last =
Simon Marlow's avatar
Simon Marlow committed
349 350 351 352 353 354 355 356 357 358 359 360
  let (outArgs, copies) = copyOutOflow conv transfer area actuals
                             updfr_off noExtraStack in
  copies <*> last outArgs

lastWithArgsAndExtraStack :: Transfer -> Area -> Convention -> [CmmActual]
             -> UpdFrameOffset -> (ByteOff, [(CmmExpr,ByteOff)])
             -> (ByteOff -> CmmAGraph)
             -> CmmAGraph
lastWithArgsAndExtraStack transfer area conv actuals updfr_off
                          extra_stack last =
  let (outArgs, copies) = copyOutOflow conv transfer area actuals
                             updfr_off extra_stack in
361 362
  copies <*> last outArgs

Simon Marlow's avatar
Simon Marlow committed
363 364
noExtraStack :: (ByteOff, [(CmmExpr,ByteOff)])
noExtraStack = (0,[])
365 366 367

toCall :: CmmExpr -> Maybe BlockId -> UpdFrameOffset -> ByteOff -> ByteOff
       -> CmmAGraph
368 369
toCall e cont updfr_off res_space arg_space =
  mkLast $ CmmCall e cont arg_space res_space updfr_off