CmmNode.hs 30.4 KB
Newer Older
1
{-# LANGUAGE BangPatterns #-}
2 3
{-# LANGUAGE CPP #-}
{-# LANGUAGE ExplicitForAll #-}
4
{-# LANGUAGE FlexibleContexts #-}
5
{-# LANGUAGE FlexibleInstances #-}
6
{-# LANGUAGE GADTs #-}
7
{-# LANGUAGE MultiParamTypeClasses #-}
Gabor Greif's avatar
Gabor Greif committed
8
{-# LANGUAGE StandaloneDeriving #-}
9
{-# LANGUAGE UndecidableInstances #-}
Simon Marlow's avatar
Simon Marlow committed
10

11 12
-- CmmNode type for representation using Hoopl graphs.

13
module CmmNode (
Peter Wortmann's avatar
Peter Wortmann committed
14
     CmmNode(..), CmmFormal, CmmActual, CmmTickish,
15 16
     UpdFrameOffset, Convention(..),
     ForeignConvention(..), ForeignTarget(..), foreignTargetHints,
17
     CmmReturnInfo(..),
18
     mapExp, mapExpDeep, wrapRecExp, foldExp, foldExpDeep, wrapRecExpf,
Peter Wortmann's avatar
Peter Wortmann committed
19 20 21 22
     mapExpM, mapExpDeepM, wrapRecExpM, mapSuccessors,

     -- * Tick scopes
     CmmTickScope(..), isTickSubScope, combineTickScopes,
23
  ) where
24

25
import CodeGen.Platform
26
import CmmExpr
27
import CmmSwitch
28
import DynFlags
29 30
import FastString
import ForeignCall
Peter Wortmann's avatar
Peter Wortmann committed
31
import Outputable
32
import SMRep
Peter Wortmann's avatar
Peter Wortmann committed
33
import CoreSyn (Tickish)
Peter Wortmann's avatar
Peter Wortmann committed
34
import qualified Unique as U
35

36 37 38
import Hoopl.Block
import Hoopl.Graph
import Hoopl.Label
39
import Data.Maybe
niteria's avatar
niteria committed
40
import Data.List (tails,sortBy)
41
import Prelude hiding (succ)
niteria's avatar
niteria committed
42 43
import Unique (nonDetCmpUnique)
import Util
44 45 46 47 48


------------------------
-- CmmNode

Simon Marlow's avatar
Simon Marlow committed
49 50
#define ULabel {-# UNPACK #-} !Label

51
data CmmNode e x where
Peter Wortmann's avatar
Peter Wortmann committed
52
  CmmEntry :: ULabel -> CmmTickScope -> CmmNode C O
Simon Peyton Jones's avatar
Simon Peyton Jones committed
53

54
  CmmComment :: FastString -> CmmNode O O
Simon Peyton Jones's avatar
Simon Peyton Jones committed
55

Peter Wortmann's avatar
Peter Wortmann committed
56 57
    -- Tick annotation, covering Cmm code in our tick scope. We only
    -- expect non-code @Tickish@ at this point (e.g. @SourceNote@).
Peter Wortmann's avatar
Peter Wortmann committed
58
    -- See Note [CmmTick scoping details]
Peter Wortmann's avatar
Peter Wortmann committed
59 60
  CmmTick :: !CmmTickish -> CmmNode O O

61 62 63 64 65
    -- Unwind pseudo-instruction, encoding stack unwinding
    -- instructions for a debugger. This describes how to reconstruct
    -- the "old" value of a register if we want to navigate the stack
    -- up one frame. Having unwind information for @Sp@ will allow the
    -- debugger to "walk" the stack.
66 67
    --
    -- See Note [What is this unwinding business?] in Debug
68
  CmmUnwind :: [(GlobalReg, Maybe CmmExpr)] -> CmmNode O O
69

Simon Marlow's avatar
Simon Marlow committed
70
  CmmAssign :: !CmmReg -> !CmmExpr -> CmmNode O O
Simon Marlow's avatar
Simon Marlow committed
71
    -- Assign to register
Simon Peyton Jones's avatar
Simon Peyton Jones committed
72

Simon Marlow's avatar
Simon Marlow committed
73
  CmmStore :: !CmmExpr -> !CmmExpr -> CmmNode O O
Simon Marlow's avatar
Simon Marlow committed
74 75
    -- Assign to memory location.  Size is
    -- given by cmmExprType of the rhs.
Simon Peyton Jones's avatar
Simon Peyton Jones committed
76

77 78 79 80 81 82 83
  CmmUnsafeForeignCall ::       -- An unsafe foreign call;
                                -- see Note [Foreign calls]
                                -- Like a "fat machine instruction"; can occur
                                -- in the middle of a block
      ForeignTarget ->          -- call target
      [CmmFormal] ->            -- zero or more results
      [CmmActual] ->            -- zero or more arguments
84
      CmmNode O O
85
      -- Semantics: clobbers any GlobalRegs for which callerSaves r == True
86
      -- See Note [Unsafe foreign calls clobber caller-save registers]
87
      --
Simon Marlow's avatar
Simon Marlow committed
88 89 90
      -- Invariant: the arguments and the ForeignTarget must not
      -- mention any registers for which CodeGen.Platform.callerSaves
      -- is True.  See Note [Register Parameter Passing].
Simon Peyton Jones's avatar
Simon Peyton Jones committed
91

Simon Marlow's avatar
Simon Marlow committed
92 93
  CmmBranch :: ULabel -> CmmNode O C
                                   -- Goto another block in the same procedure
Simon Peyton Jones's avatar
Simon Peyton Jones committed
94

95 96
  CmmCondBranch :: {                 -- conditional branch
      cml_pred :: CmmExpr,
97 98 99
      cml_true, cml_false :: ULabel,
      cml_likely :: Maybe Bool       -- likely result of the conditional,
                                     -- if known
100
  } -> CmmNode O C
Simon Peyton Jones's avatar
Simon Peyton Jones committed
101

102 103 104 105
  CmmSwitch
    :: CmmExpr       -- Scrutinee, of some integral type
    -> SwitchTargets -- Cases. See [Note SwitchTargets]
    -> CmmNode O C
Simon Peyton Jones's avatar
Simon Peyton Jones committed
106 107

  CmmCall :: {                -- A native call or tail call
108 109 110 111
      cml_target :: CmmExpr,  -- never a CmmPrim to a CallishMachOp!

      cml_cont :: Maybe Label,
          -- Label of continuation (Nothing for return or tail call)
112 113 114 115 116
          --
          -- Note [Continuation BlockId]: these BlockIds are called
          -- Continuation BlockIds, and are the only BlockIds that can
          -- occur in CmmExprs, namely as (CmmLit (CmmBlock b)) or
          -- (CmmStackSlot (Young b) _).
117

118 119 120 121 122 123 124 125
      cml_args_regs :: [GlobalReg],
          -- The argument GlobalRegs (Rx, Fx, Dx, Lx) that are passed
          -- to the call.  This is essential information for the
          -- native code generator's register allocator; without
          -- knowing which GlobalRegs are live it has to assume that
          -- they are all live.  This list should only include
          -- GlobalRegs that are mapped to real machine registers on
          -- the target platform.
Simon Peyton Jones's avatar
Simon Peyton Jones committed
126

127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149
      cml_args :: ByteOff,
          -- Byte offset, from the *old* end of the Area associated with
          -- the Label (if cml_cont = Nothing, then Old area), of
          -- youngest outgoing arg.  Set the stack pointer to this before
          -- transferring control.
          -- (NB: an update frame might also have been stored in the Old
          --      area, but it'll be in an older part than the args.)

      cml_ret_args :: ByteOff,
          -- For calls *only*, the byte offset for youngest returned value
          -- This is really needed at the *return* point rather than here
          -- at the call, but in practice it's convenient to record it here.

      cml_ret_off :: ByteOff
        -- For calls *only*, the byte offset of the base of the frame that
        -- must be described by the info table for the return point.
        -- The older words are an update frames, which have their own
        -- info-table and layout information

        -- From a liveness point of view, the stack words older than
        -- cml_ret_off are treated as live, even if the sequel of
        -- the call goes into a loop.
  } -> CmmNode O C
Simon Peyton Jones's avatar
Simon Peyton Jones committed
150

151
  CmmForeignCall :: {           -- A safe foreign call; see Note [Foreign calls]
152
                                -- Always the last node of a block
153
      tgt   :: ForeignTarget,   -- call target and convention
154 155
      res   :: [CmmFormal],     -- zero or more results
      args  :: [CmmActual],     -- zero or more arguments; see Note [Register parameter passing]
Simon Marlow's avatar
Simon Marlow committed
156
      succ  :: ULabel,          -- Label of continuation
157 158
      ret_args :: ByteOff,      -- same as cml_ret_args
      ret_off :: ByteOff,       -- same as cml_ret_off
159 160 161 162 163
      intrbl:: Bool             -- whether or not the call is interruptible
  } -> CmmNode O C

{- Note [Foreign calls]
~~~~~~~~~~~~~~~~~~~~~~~
Simon Peyton Jones's avatar
Simon Peyton Jones committed
164 165
A CmmUnsafeForeignCall is used for *unsafe* foreign calls;
a CmmForeignCall call is used for *safe* foreign calls.
166 167 168 169 170

Unsafe ones are mostly easy: think of them as a "fat machine
instruction".  In particular, they do *not* kill all live registers,
just the registers they return to (there was a bit of code in GHC that
conservatively assumed otherwise.)  However, see [Register parameter passing].
171

172
Safe ones are trickier.  A safe foreign call
173 174
     r = f(x)
ultimately expands to
175 176
     push "return address"      -- Never used to return to;
                                -- just points an info table
177 178
     save registers into TSO
     call suspendThread
179
     r = f(x)                   -- Make the call
180 181 182 183 184 185 186
     call resumeThread
     restore registers
     pop "return address"
We cannot "lower" a safe foreign call to this sequence of Cmms, because
after we've saved Sp all the Cmm optimiser's assumptions are broken.

Note that a safe foreign call needs an info table.
187 188 189 190 191 192

So Safe Foreign Calls must remain as last nodes until the stack is
made manifest in CmmLayoutStack, where they are lowered into the above
sequence.
-}

193 194
{- Note [Unsafe foreign calls clobber caller-save registers]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
195 196 197 198 199 200 201 202 203 204 205 206 207 208 209

A foreign call is defined to clobber any GlobalRegs that are mapped to
caller-saves machine registers (according to the prevailing C ABI).
StgCmmUtils.callerSaves tells you which GlobalRegs are caller-saves.

This is a design choice that makes it easier to generate code later.
We could instead choose to say that foreign calls do *not* clobber
caller-saves regs, but then we would have to figure out which regs
were live across the call later and insert some saves/restores.

Furthermore when we generate code we never have any GlobalRegs live
across a call, because they are always copied-in to LocalRegs and
copied-out again before making a call/jump.  So all we have to do is
avoid any code motion that would make a caller-saves GlobalReg live
across a foreign call during subsequent optimisations.
210 211
-}

212 213 214 215 216 217 218 219 220 221 222 223 224 225 226
{- Note [Register parameter passing]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
On certain architectures, some registers are utilized for parameter
passing in the C calling convention.  For example, in x86-64 Linux
convention, rdi, rsi, rdx and rcx (as well as r8 and r9) may be used for
argument passing.  These are registers R3-R6, which our generated
code may also be using; as a result, it's necessary to save these
values before doing a foreign call.  This is done during initial
code generation in callerSaveVolatileRegs in StgCmmUtils.hs.  However,
one result of doing this is that the contents of these registers
may mysteriously change if referenced inside the arguments.  This
is dangerous, so you'll need to disable inlining much in the same
way is done in cmm/CmmOpt.hs currently.  We should fix this!
-}

227 228
---------------------------------------------
-- Eq instance of CmmNode
Gabor Greif's avatar
Gabor Greif committed
229 230

deriving instance Eq (CmmNode e x)
231 232 233 234 235

----------------------------------------------
-- Hoopl instances of CmmNode

instance NonLocal CmmNode where
Peter Wortmann's avatar
Peter Wortmann committed
236
  entryLabel (CmmEntry l _) = l
237 238 239

  successors (CmmBranch l) = [l]
  successors (CmmCondBranch {cml_true=t, cml_false=f}) = [f, t] -- meets layout constraint
240
  successors (CmmSwitch _ ids) = switchTargetsToList ids
241 242 243 244 245 246 247
  successors (CmmCall {cml_cont=l}) = maybeToList l
  successors (CmmForeignCall {succ=l}) = [l]


--------------------------------------------------
-- Various helper types

248 249 250
type CmmActual = CmmExpr
type CmmFormal = LocalReg

251 252
type UpdFrameOffset = ByteOff

253 254
-- | A convention maps a list of values (function arguments or return
-- values) to registers or stack locations.
255
data Convention
256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277
  = NativeDirectCall
       -- ^ top-level Haskell functions use @NativeDirectCall@, which
       -- maps arguments to registers starting with R2, according to
       -- how many registers are available on the platform.  This
       -- convention ignores R1, because for a top-level function call
       -- the function closure is implicit, and doesn't need to be passed.
  | NativeNodeCall
       -- ^ non-top-level Haskell functions, which pass the address of
       -- the function closure in R1 (regardless of whether R1 is a
       -- real register or not), and the rest of the arguments in
       -- registers or on the stack.
  | NativeReturn
       -- ^ a native return.  The convention for returns depends on
       -- how many values are returned: for just one value returned,
       -- the appropriate register is used (R1, F1, etc.). regardless
       -- of whether it is a real register or not.  For multiple
       -- values returned, they are mapped to registers or the stack.
  | Slow
       -- ^ Slow entry points: all args pushed on the stack
  | GC
       -- ^ Entry to the garbage collector: uses the node reg!
       -- (TODO: I don't think we need this --SDM)
278 279 280 281 282 283 284
  deriving( Eq )

data ForeignConvention
  = ForeignConvention
        CCallConv               -- Which foreign-call convention
        [ForeignHint]           -- Extra info about the args
        [ForeignHint]           -- Extra info about the result
285
        CmmReturnInfo
286 287
  deriving Eq

288 289 290 291 292
data CmmReturnInfo
  = CmmMayReturn
  | CmmNeverReturns
  deriving ( Eq )

293 294 295 296 297 298 299 300
data ForeignTarget        -- The target of a foreign call
  = ForeignTarget                -- A foreign procedure
        CmmExpr                  -- Its address
        ForeignConvention        -- Its calling convention
  | PrimTarget            -- A possibly-side-effecting machine operation
        CallishMachOp            -- Which one
  deriving Eq

301 302 303 304 305 306 307 308 309 310 311
foreignTargetHints :: ForeignTarget -> ([ForeignHint], [ForeignHint])
foreignTargetHints target
  = ( res_hints ++ repeat NoHint
    , arg_hints ++ repeat NoHint )
  where
    (res_hints, arg_hints) =
       case target of
          PrimTarget op -> callishMachOpHints op
          ForeignTarget _ (ForeignConvention _ arg_hints res_hints _) ->
             (res_hints, arg_hints)

312 313 314
--------------------------------------------------
-- Instances of register and slot users / definers

315
instance UserOfRegs LocalReg (CmmNode e x) where
316
  foldRegsUsed dflags f !z n = case n of
317 318 319
    CmmAssign _ expr -> fold f z expr
    CmmStore addr rval -> fold f (fold f z addr) rval
    CmmUnsafeForeignCall t _ args -> fold f (fold f z t) args
320
    CmmCondBranch expr _ _ _ -> fold f z expr
321 322 323 324
    CmmSwitch expr _ -> fold f z expr
    CmmCall {cml_target=tgt} -> fold f z tgt
    CmmForeignCall {tgt=tgt, args=args} -> fold f (fold f z tgt) args
    _ -> z
325 326
    where fold :: forall a b. UserOfRegs LocalReg a
               => (b -> LocalReg -> b) -> b -> a -> b
327
          fold f z n = foldRegsUsed dflags f z n
328

329
instance UserOfRegs GlobalReg (CmmNode e x) where
330
  foldRegsUsed dflags f !z n = case n of
331 332 333
    CmmAssign _ expr -> fold f z expr
    CmmStore addr rval -> fold f (fold f z addr) rval
    CmmUnsafeForeignCall t _ args -> fold f (fold f z t) args
334
    CmmCondBranch expr _ _ _ -> fold f z expr
335 336 337 338
    CmmSwitch expr _ -> fold f z expr
    CmmCall {cml_target=tgt, cml_args_regs=args} -> fold f (fold f z args) tgt
    CmmForeignCall {tgt=tgt, args=args} -> fold f (fold f z tgt) args
    _ -> z
339 340
    where fold :: forall a b.  UserOfRegs GlobalReg a
               => (b -> GlobalReg -> b) -> b -> a -> b
341
          fold f z n = foldRegsUsed dflags f z n
342

343
instance (Ord r, UserOfRegs r CmmReg) => UserOfRegs r ForeignTarget where
344 345
  -- The (Ord r) in the context is necessary here
  -- See Note [Recursive superclasses] in TcInstDcls
346 347
  foldRegsUsed _      _ !z (PrimTarget _)      = z
  foldRegsUsed dflags f !z (ForeignTarget e _) = foldRegsUsed dflags f z e
348 349

instance DefinerOfRegs LocalReg (CmmNode e x) where
350
  foldRegsDefd dflags f !z n = case n of
351 352 353 354
    CmmAssign lhs _ -> fold f z lhs
    CmmUnsafeForeignCall _ fs _ -> fold f z fs
    CmmForeignCall {res=res} -> fold f z res
    _ -> z
355 356
    where fold :: forall a b. DefinerOfRegs LocalReg a
               => (b -> LocalReg -> b) -> b -> a -> b
357 358 359
          fold f z n = foldRegsDefd dflags f z n

instance DefinerOfRegs GlobalReg (CmmNode e x) where
360
  foldRegsDefd dflags f !z n = case n of
361 362
    CmmAssign lhs _ -> fold f z lhs
    CmmUnsafeForeignCall tgt _ _  -> fold f z (foreignTargetRegs tgt)
363 364 365
    CmmCall        {} -> fold f z activeRegs
    CmmForeignCall {} -> fold f z activeRegs
                      -- See Note [Safe foreign calls clobber STG registers]
366
    _ -> z
367 368
    where fold :: forall a b. DefinerOfRegs GlobalReg a
               => (b -> GlobalReg -> b) -> b -> a -> b
369 370 371 372 373 374 375 376
          fold f z n = foldRegsDefd dflags f z n

          platform = targetPlatform dflags
          activeRegs = activeStgRegs platform
          activeCallerSavesRegs = filter (callerSaves platform) activeRegs

          foreignTargetRegs (ForeignTarget _ (ForeignConvention _ _ _ CmmNeverReturns)) = []
          foreignTargetRegs _ = activeCallerSavesRegs
377

378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445
-- Note [Safe foreign calls clobber STG registers]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- During stack layout phase every safe foreign call is expanded into a block
-- that contains unsafe foreign call (instead of safe foreign call) and ends
-- with a normal call (See Note [Foreign calls]). This means that we must
-- treat safe foreign call as if it was a normal call (because eventually it
-- will be). This is important if we try to run sinking pass before stack
-- layout phase. Consider this example of what might go wrong (this is cmm
-- code from stablename001 test). Here is code after common block elimination
-- (before stack layout):
--
--  c1q6:
--      _s1pf::P64 = R1;
--      _c1q8::I64 = performMajorGC;
--      I64[(young<c1q9> + 8)] = c1q9;
--      foreign call "ccall" arg hints:  []  result hints:  [] (_c1q8::I64)(...)
--                   returns to c1q9 args: ([]) ress: ([])ret_args: 8ret_off: 8;
--  c1q9:
--      I64[(young<c1qb> + 8)] = c1qb;
--      R1 = _s1pc::P64;
--      call stg_makeStableName#(R1) returns to c1qb, args: 8, res: 8, upd: 8;
--
-- If we run sinking pass now (still before stack layout) we will get this:
--
--  c1q6:
--      I64[(young<c1q9> + 8)] = c1q9;
--      foreign call "ccall" arg hints:  []  result hints:  [] performMajorGC(...)
--                   returns to c1q9 args: ([]) ress: ([])ret_args: 8ret_off: 8;
--  c1q9:
--      I64[(young<c1qb> + 8)] = c1qb;
--      _s1pf::P64 = R1;         <------ _s1pf sunk past safe foreign call
--      R1 = _s1pc::P64;
--      call stg_makeStableName#(R1) returns to c1qb, args: 8, res: 8, upd: 8;
--
-- Notice that _s1pf was sunk past a foreign call. When we run stack layout
-- safe call to performMajorGC will be turned into:
--
--  c1q6:
--      _s1pc::P64 = P64[Sp + 8];
--      I64[Sp - 8] = c1q9;
--      Sp = Sp - 8;
--      I64[I64[CurrentTSO + 24] + 16] = Sp;
--      P64[CurrentNursery + 8] = Hp + 8;
--      (_u1qI::I64) = call "ccall" arg hints:  [PtrHint,]
--                           result hints:  [PtrHint] suspendThread(BaseReg, 0);
--      call "ccall" arg hints:  []  result hints:  [] performMajorGC();
--      (_u1qJ::I64) = call "ccall" arg hints:  [PtrHint]
--                           result hints:  [PtrHint] resumeThread(_u1qI::I64);
--      BaseReg = _u1qJ::I64;
--      _u1qK::P64 = CurrentTSO;
--      _u1qL::P64 = I64[_u1qK::P64 + 24];
--      Sp = I64[_u1qL::P64 + 16];
--      SpLim = _u1qL::P64 + 192;
--      HpAlloc = 0;
--      Hp = I64[CurrentNursery + 8] - 8;
--      HpLim = I64[CurrentNursery] + (%MO_SS_Conv_W32_W64(I32[CurrentNursery + 48]) * 4096 - 1);
--      call (I64[Sp])() returns to c1q9, args: 8, res: 8, upd: 8;
--  c1q9:
--      I64[(young<c1qb> + 8)] = c1qb;
--      _s1pf::P64 = R1;         <------ INCORRECT!
--      R1 = _s1pc::P64;
--      call stg_makeStableName#(R1) returns to c1qb, args: 8, res: 8, upd: 8;
--
-- Notice that c1q6 now ends with a call. Sinking _s1pf::P64 = R1 past that
-- call is clearly incorrect. This is what would happen if we assumed that
-- safe foreign call has the same semantics as unsafe foreign call. To prevent
-- this we need to treat safe foreign call as if was normal call.
446 447 448 449

-----------------------------------
-- mapping Expr in CmmNode

450
mapForeignTarget :: (CmmExpr -> CmmExpr) -> ForeignTarget -> ForeignTarget
451 452 453 454
mapForeignTarget exp   (ForeignTarget e c) = ForeignTarget (exp e) c
mapForeignTarget _   m@(PrimTarget _)      = m

wrapRecExp :: (CmmExpr -> CmmExpr) -> CmmExpr -> CmmExpr
455 456 457
-- Take a transformer on expressions and apply it recursively.
-- (wrapRecExp f e) first recursively applies itself to sub-expressions of e
--                  then  uses f to rewrite the resulting expression
458 459 460 461 462
wrapRecExp f (CmmMachOp op es)    = f (CmmMachOp op $ map (wrapRecExp f) es)
wrapRecExp f (CmmLoad addr ty)    = f (CmmLoad (wrapRecExp f addr) ty)
wrapRecExp f e                    = f e

mapExp :: (CmmExpr -> CmmExpr) -> CmmNode e x -> CmmNode e x
Peter Wortmann's avatar
Peter Wortmann committed
463
mapExp _ f@(CmmEntry{})                          = f
464
mapExp _ m@(CmmComment _)                        = m
Peter Wortmann's avatar
Peter Wortmann committed
465
mapExp _ m@(CmmTick _)                           = m
466
mapExp f   (CmmUnwind regs)                      = CmmUnwind (map (fmap (fmap f)) regs)
467 468 469 470
mapExp f   (CmmAssign r e)                       = CmmAssign r (f e)
mapExp f   (CmmStore addr e)                     = CmmStore (f addr) (f e)
mapExp f   (CmmUnsafeForeignCall tgt fs as)      = CmmUnsafeForeignCall (mapForeignTarget f tgt) fs (map f as)
mapExp _ l@(CmmBranch _)                         = l
471
mapExp f   (CmmCondBranch e ti fi l)             = CmmCondBranch (f e) ti fi l
472
mapExp f   (CmmSwitch e ids)                     = CmmSwitch (f e) ids
473
mapExp f   n@CmmCall {cml_target=tgt}            = n{cml_target = f tgt}
474
mapExp f   (CmmForeignCall tgt fs as succ ret_args updfr intrbl) = CmmForeignCall (mapForeignTarget f tgt) fs (map f as) succ ret_args updfr intrbl
475 476 477 478

mapExpDeep :: (CmmExpr -> CmmExpr) -> CmmNode e x -> CmmNode e x
mapExpDeep f = mapExp $ wrapRecExp f

479 480 481 482 483 484 485 486
------------------------------------------------------------------------
-- mapping Expr in CmmNode, but not performing allocation if no changes

mapForeignTargetM :: (CmmExpr -> Maybe CmmExpr) -> ForeignTarget -> Maybe ForeignTarget
mapForeignTargetM f (ForeignTarget e c) = (\x -> ForeignTarget x c) `fmap` f e
mapForeignTargetM _ (PrimTarget _)      = Nothing

wrapRecExpM :: (CmmExpr -> Maybe CmmExpr) -> (CmmExpr -> Maybe CmmExpr)
487 488
-- (wrapRecExpM f e) first recursively applies itself to sub-expressions of e
--                   then  gives f a chance to rewrite the resulting expression
489 490 491 492 493
wrapRecExpM f n@(CmmMachOp op es)  = maybe (f n) (f . CmmMachOp op)    (mapListM (wrapRecExpM f) es)
wrapRecExpM f n@(CmmLoad addr ty)  = maybe (f n) (f . flip CmmLoad ty) (wrapRecExpM f addr)
wrapRecExpM f e                    = f e

mapExpM :: (CmmExpr -> Maybe CmmExpr) -> CmmNode e x -> Maybe (CmmNode e x)
Peter Wortmann's avatar
Peter Wortmann committed
494
mapExpM _ (CmmEntry{})              = Nothing
495
mapExpM _ (CmmComment _)            = Nothing
Peter Wortmann's avatar
Peter Wortmann committed
496
mapExpM _ (CmmTick _)               = Nothing
497
mapExpM f (CmmUnwind regs)          = CmmUnwind `fmap` mapM (\(r,e) -> mapM f e >>= \e' -> pure (r,e')) regs
498 499 500
mapExpM f (CmmAssign r e)           = CmmAssign r `fmap` f e
mapExpM f (CmmStore addr e)         = (\[addr', e'] -> CmmStore addr' e') `fmap` mapListM f [addr, e]
mapExpM _ (CmmBranch _)             = Nothing
501
mapExpM f (CmmCondBranch e ti fi l) = (\x -> CmmCondBranch x ti fi l) `fmap` f e
502
mapExpM f (CmmSwitch e tbl)         = (\x -> CmmSwitch x tbl)       `fmap` f e
503
mapExpM f (CmmCall tgt mb_id r o i s) = (\x -> CmmCall x mb_id r o i s) `fmap` f tgt
504 505 506 507
mapExpM f (CmmUnsafeForeignCall tgt fs as)
    = case mapForeignTargetM f tgt of
        Just tgt' -> Just (CmmUnsafeForeignCall tgt' fs (mapListJ f as))
        Nothing   -> (\xs -> CmmUnsafeForeignCall tgt fs xs) `fmap` mapListM f as
508
mapExpM f (CmmForeignCall tgt fs as succ ret_args updfr intrbl)
509
    = case mapForeignTargetM f tgt of
510 511
        Just tgt' -> Just (CmmForeignCall tgt' fs (mapListJ f as) succ ret_args updfr intrbl)
        Nothing   -> (\xs -> CmmForeignCall tgt fs xs succ ret_args updfr intrbl) `fmap` mapListM f as
512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530

-- share as much as possible
mapListM :: (a -> Maybe a) -> [a] -> Maybe [a]
mapListM f xs = let (b, r) = mapListT f xs
                in if b then Just r else Nothing

mapListJ :: (a -> Maybe a) -> [a] -> [a]
mapListJ f xs = snd (mapListT f xs)

mapListT :: (a -> Maybe a) -> [a] -> (Bool, [a])
mapListT f xs = foldr g (False, []) (zip3 (tails xs) xs (map f xs))
    where g (_,   y, Nothing) (True, ys)  = (True,  y:ys)
          g (_,   _, Just y)  (True, ys)  = (True,  y:ys)
          g (ys', _, Nothing) (False, _)  = (False, ys')
          g (_,   _, Just y)  (False, ys) = (True,  y:ys)

mapExpDeepM :: (CmmExpr -> Maybe CmmExpr) -> CmmNode e x -> Maybe (CmmNode e x)
mapExpDeepM f = mapExpM $ wrapRecExpM f

531 532 533
-----------------------------------
-- folding Expr in CmmNode

534
foldExpForeignTarget :: (CmmExpr -> z -> z) -> ForeignTarget -> z -> z
535 536 537 538
foldExpForeignTarget exp (ForeignTarget e _) z = exp e z
foldExpForeignTarget _   (PrimTarget _)      z = z

-- Take a folder on expressions and apply it recursively.
539 540
-- Specifically (wrapRecExpf f e z) deals with CmmMachOp and CmmLoad
-- itself, delegating all the other CmmExpr forms to 'f'.
541 542 543 544 545 546 547 548
wrapRecExpf :: (CmmExpr -> z -> z) -> CmmExpr -> z -> z
wrapRecExpf f e@(CmmMachOp _ es) z = foldr (wrapRecExpf f) (f e z) es
wrapRecExpf f e@(CmmLoad addr _) z = wrapRecExpf f addr (f e z)
wrapRecExpf f e                  z = f e z

foldExp :: (CmmExpr -> z -> z) -> CmmNode e x -> z -> z
foldExp _ (CmmEntry {}) z                         = z
foldExp _ (CmmComment {}) z                       = z
Peter Wortmann's avatar
Peter Wortmann committed
549
foldExp _ (CmmTick {}) z                          = z
550
foldExp f (CmmUnwind xs) z                        = foldr (maybe id f) z (map snd xs)
551 552 553 554
foldExp f (CmmAssign _ e) z                       = f e z
foldExp f (CmmStore addr e) z                     = f addr $ f e z
foldExp f (CmmUnsafeForeignCall t _ as) z         = foldr f (foldExpForeignTarget f t z) as
foldExp _ (CmmBranch _) z                         = z
555
foldExp f (CmmCondBranch e _ _ _) z               = f e z
556 557 558 559 560
foldExp f (CmmSwitch e _) z                       = f e z
foldExp f (CmmCall {cml_target=tgt}) z            = f tgt z
foldExp f (CmmForeignCall {tgt=tgt, args=args}) z = foldr f (foldExpForeignTarget f tgt z) args

foldExpDeep :: (CmmExpr -> z -> z) -> CmmNode e x -> z -> z
Simon Marlow's avatar
Simon Marlow committed
561
foldExpDeep f = foldExp (wrapRecExpf f)
Simon Marlow's avatar
Simon Marlow committed
562

Simon Marlow's avatar
Simon Marlow committed
563 564 565
-- -----------------------------------------------------------------------------

mapSuccessors :: (Label -> Label) -> CmmNode O C -> CmmNode O C
566 567 568
mapSuccessors f (CmmBranch bid)         = CmmBranch (f bid)
mapSuccessors f (CmmCondBranch p y n l) = CmmCondBranch p (f y) (f n) l
mapSuccessors f (CmmSwitch e ids)       = CmmSwitch e (mapSwitchTargets f ids)
569
mapSuccessors _ n = n
Simon Marlow's avatar
Simon Marlow committed
570

Peter Wortmann's avatar
Peter Wortmann committed
571 572 573 574
-- -----------------------------------------------------------------------------

-- | Tickish in Cmm context (annotations only)
type CmmTickish = Tickish ()
Peter Wortmann's avatar
Peter Wortmann committed
575 576 577 578 579 580 581 582 583 584 585 586

-- | Tick scope identifier, allowing us to reason about what
-- annotations in a Cmm block should scope over. We especially take
-- care to allow optimisations to reorganise blocks without losing
-- tick association in the process.
data CmmTickScope
  = GlobalScope
    -- ^ The global scope is the "root" of the scope graph. Every
    -- scope is a sub-scope of the global scope. It doesn't make sense
    -- to add ticks to this scope. On the other hand, this means that
    -- setting this scope on a block means no ticks apply to it.

587
  | SubScope !U.Unique CmmTickScope
Peter Wortmann's avatar
Peter Wortmann committed
588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657
    -- ^ Constructs a new sub-scope to an existing scope. This allows
    -- us to translate Core-style scoping rules (see @tickishScoped@)
    -- into the Cmm world. Suppose the following code:
    --
    --   tick<1> case ... of
    --             A -> tick<2> ...
    --             B -> tick<3> ...
    --
    -- We want the top-level tick annotation to apply to blocks
    -- generated for the A and B alternatives. We can achieve that by
    -- generating tick<1> into a block with scope a, while the code
    -- for alternatives A and B gets generated into sub-scopes a/b and
    -- a/c respectively.

  | CombinedScope CmmTickScope CmmTickScope
    -- ^ A combined scope scopes over everything that the two given
    -- scopes cover. It is therefore a sub-scope of either scope. This
    -- is required for optimisations. Consider common block elimination:
    --
    --   A -> tick<2> case ... of
    --     C -> [common]
    --   B -> tick<3> case ... of
    --     D -> [common]
    --
    -- We will generate code for the C and D alternatives, and figure
    -- out afterwards that it's actually common code. Scoping rules
    -- dictate that the resulting common block needs to be covered by
    -- both tick<2> and tick<3>, therefore we need to construct a
    -- scope that is a child to *both* scope. Now we can do that - if
    -- we assign the scopes a/c and b/d to the common-ed up blocks,
    -- the new block could have a combined tick scope a/c+b/d, which
    -- both tick<2> and tick<3> apply to.

-- Note [CmmTick scoping details]:
--
-- The scope of a @CmmTick@ is given by the @CmmEntry@ node of the
-- same block. Note that as a result of this, optimisations making
-- tick scopes more specific can *reduce* the amount of code a tick
-- scopes over. Fixing this would require a separate @CmmTickScope@
-- field for @CmmTick@. Right now we do not do this simply because I
-- couldn't find an example where it actually mattered -- multiple
-- blocks within the same scope generally jump to each other, which
-- prevents common block elimination from happening in the first
-- place. But this is no strong reason, so if Cmm optimisations become
-- more involved in future this might have to be revisited.

-- | Output all scope paths.
scopeToPaths :: CmmTickScope -> [[U.Unique]]
scopeToPaths GlobalScope           = [[]]
scopeToPaths (SubScope u s)        = map (u:) (scopeToPaths s)
scopeToPaths (CombinedScope s1 s2) = scopeToPaths s1 ++ scopeToPaths s2

-- | Returns the head uniques of the scopes. This is based on the
-- assumption that the @Unique@ of @SubScope@ identifies the
-- underlying super-scope. Used for efficient equality and comparison,
-- see below.
scopeUniques :: CmmTickScope -> [U.Unique]
scopeUniques GlobalScope           = []
scopeUniques (SubScope u _)        = [u]
scopeUniques (CombinedScope s1 s2) = scopeUniques s1 ++ scopeUniques s2

-- Equality and order is based on the head uniques defined above. We
-- take care to short-cut the (extremly) common cases.
instance Eq CmmTickScope where
  GlobalScope    == GlobalScope     = True
  GlobalScope    == _               = False
  _              == GlobalScope     = False
  (SubScope u _) == (SubScope u' _) = u == u'
  (SubScope _ _) == _               = False
  _              == (SubScope _ _)  = False
niteria's avatar
niteria committed
658 659 660 661 662 663 664 665 666
  scope          == scope'          =
    sortBy nonDetCmpUnique (scopeUniques scope) ==
    sortBy nonDetCmpUnique (scopeUniques scope')
    -- This is still deterministic because
    -- the order is the same for equal lists

-- This is non-deterministic but we do not currently support deterministic
-- code-generation. See Note [Unique Determinism and code generation]
-- See Note [No Ord for Unique]
Peter Wortmann's avatar
Peter Wortmann committed
667 668 669 670
instance Ord CmmTickScope where
  compare GlobalScope    GlobalScope     = EQ
  compare GlobalScope    _               = LT
  compare _              GlobalScope     = GT
niteria's avatar
niteria committed
671 672 673 674
  compare (SubScope u _) (SubScope u' _) = nonDetCmpUnique u u'
  compare scope scope'                   = cmpList nonDetCmpUnique
     (sortBy nonDetCmpUnique $ scopeUniques scope)
     (sortBy nonDetCmpUnique $ scopeUniques scope')
Peter Wortmann's avatar
Peter Wortmann committed
675 676 677

instance Outputable CmmTickScope where
  ppr GlobalScope     = text "global"
Peter Wortmann's avatar
Peter Wortmann committed
678 679
  ppr (SubScope us GlobalScope)
                      = ppr us
Peter Wortmann's avatar
Peter Wortmann committed
680 681 682 683 684 685 686 687 688 689 690 691 692 693 694
  ppr (SubScope us s) = ppr s <> char '/' <> ppr us
  ppr combined        = parens $ hcat $ punctuate (char '+') $
                        map (hcat . punctuate (char '/') . map ppr . reverse) $
                        scopeToPaths combined

-- | Checks whether two tick scopes are sub-scopes of each other. True
-- if the two scopes are equal.
isTickSubScope :: CmmTickScope -> CmmTickScope -> Bool
isTickSubScope = cmp
  where cmp _              GlobalScope             = True
        cmp GlobalScope    _                       = False
        cmp (CombinedScope s1 s2) s'               = cmp s1 s' && cmp s2 s'
        cmp s              (CombinedScope s1' s2') = cmp s s1' || cmp s s2'
        cmp (SubScope u s) s'@(SubScope u' _)      = u == u' || cmp s s'

Peter Wortmann's avatar
Peter Wortmann committed
695 696 697
-- | Combine two tick scopes. The new scope should be sub-scope of
-- both parameters. We simplfy automatically if one tick scope is a
-- sub-scope of the other already.
Peter Wortmann's avatar
Peter Wortmann committed
698 699
combineTickScopes :: CmmTickScope -> CmmTickScope -> CmmTickScope
combineTickScopes s1 s2
Peter Wortmann's avatar
Peter Wortmann committed
700 701
  | s1 `isTickSubScope` s2 = s1
  | s2 `isTickSubScope` s1 = s2
Peter Wortmann's avatar
Peter Wortmann committed
702
  | otherwise              = CombinedScope s1 s2