Commit 957bf375 authored by simonpj's avatar simonpj
Browse files

[project @ 2003-02-04 15:09:38 by simonpj]

-------------------------------------
	Remove all vestiges of usage analysis
	-------------------------------------

This commit removes a large blob of usage-analysis-related code, almost
all of which was commented out.

Sadly, it doesn't look as if Keith is going to have enough time to polish it
up, and in any case the actual performance benefits (so far as we can measure
them) turned out to be pretty modest (a few percent).

So, with regret, I'm chopping it all out.  It's still there in the repository
if anyone wants go hack on it.  And Tobias Gedell at Chalmers is implementing
a different analysis, via External Core.
parent e8f681e4
......@@ -92,7 +92,7 @@ import Var ( Id, DictId,
)
import qualified Var ( mkLocalId, mkGlobalId, mkSpecPragmaId )
import Type ( Type, typePrimRep, addFreeTyVars,
usOnce, eqUsage, seqType, splitTyConApp_maybe )
seqType, splitTyConApp_maybe )
import IdInfo
......@@ -463,13 +463,12 @@ idLBVarInfo :: Id -> LBVarInfo
idLBVarInfo id = lbvarInfo (idInfo id)
isOneShotLambda :: Id -> Bool
isOneShotLambda id = analysis
where analysis = case idLBVarInfo id of
LBVarInfo u | u `eqUsage` usOnce -> True
other -> False
isOneShotLambda id = case idLBVarInfo id of
IsOneShotLambda -> True
NoLBVarInfo -> False
setOneShotLambda :: Id -> Id
setOneShotLambda id = modifyIdInfo (`setLBVarInfo` LBVarInfo usOnce) id
setOneShotLambda id = modifyIdInfo (`setLBVarInfo` IsOneShotLambda) id
clearOneShotLambda :: Id -> Id
clearOneShotLambda id
......
......@@ -80,7 +80,7 @@ module IdInfo (
import CoreSyn
import Type ( Type, usOnce, eqUsage )
import Type ( Type )
import PrimOp ( PrimOp )
import NameEnv ( NameEnv, lookupNameEnv )
import Name ( Name )
......@@ -94,7 +94,6 @@ import BasicTypes ( OccInfo(..), isFragileOcc, isDeadOcc, seqOccInfo, isLoopBrea
import DataCon ( DataCon )
import ForeignCall ( ForeignCall )
import FieldLabel ( FieldLabel )
import Type ( usOnce )
import Demand hiding( Demand, seqDemand )
import qualified Demand
import NewDemand
......@@ -642,42 +641,28 @@ instance Show CprInfo where
%************************************************************************
If the @Id@ is a lambda-bound variable then it may have lambda-bound
var info. The usage analysis (UsageSP) detects whether the lambda
binding this var is a ``one-shot'' lambda; that is, whether it is
applied at most once.
var info. Sometimes we know whether the lambda binding this var is a
``one-shot'' lambda; that is, whether it is applied at most once.
This information may be useful in optimisation, as computations may
safely be floated inside such a lambda without risk of duplicating
work.
\begin{code}
data LBVarInfo
= NoLBVarInfo
| LBVarInfo Type -- The lambda that binds this Id has this usage
-- annotation (i.e., if ==usOnce, then the
-- lambda is applied at most once).
-- The annotation's kind must be `$'
-- HACK ALERT! placing this info here is a short-term hack,
-- but it minimises changes to the rest of the compiler.
-- Hack agreed by SLPJ/KSW 1999-04.
data LBVarInfo = NoLBVarInfo
| IsOneShotLambda -- The lambda is applied at most once).
seqLBVar l = l `seq` ()
\end{code}
\begin{code}
hasNoLBVarInfo NoLBVarInfo = True
hasNoLBVarInfo other = False
hasNoLBVarInfo NoLBVarInfo = True
hasNoLBVarInfo IsOneShotLambda = False
noLBVarInfo = NoLBVarInfo
-- not safe to print or parse LBVarInfo because it is not really a
-- property of the definition, but a property of the context.
pprLBVarInfo NoLBVarInfo = empty
pprLBVarInfo (LBVarInfo u) | u `eqUsage` usOnce
= ptext SLIT("OneShot")
| otherwise
= empty
pprLBVarInfo IsOneShotLambda = ptext SLIT("OneShot")
instance Outputable LBVarInfo where
ppr = pprLBVarInfo
......
......@@ -751,7 +751,6 @@ substIdInfo subst is_fragile_occ info
| otherwise = Just (info `setOccInfo` (if zap_occ then NoOccInfo else old_occ)
`setSpecInfo` substRules subst old_rules
`setWorkerInfo` substWorker subst old_wrkr
`setLBVarInfo` substLBVar subst old_lbv
`setUnfoldingInfo` noUnfolding)
-- setSpecInfo does a seq
-- setWorkerInfo does a seq
......@@ -759,14 +758,12 @@ substIdInfo subst is_fragile_occ info
nothing_to_do = not zap_occ &&
isEmptyCoreRules old_rules &&
not (workerExists old_wrkr) &&
hasNoLBVarInfo old_lbv &&
not (hasUnfolding (unfoldingInfo info))
zap_occ = is_fragile_occ old_occ
old_occ = occInfo info
old_rules = specInfo info
old_wrkr = workerInfo info
old_lbv = lbvarInfo info
------------------
substIdType :: Subst -> Id -> Id
......@@ -831,10 +828,4 @@ substVarSet subst fvs
DoneEx expr -> exprFreeVars expr
DoneTy ty -> tyVarsOfType ty
ContEx se' expr -> substVarSet (setSubstEnv subst se') (exprFreeVars expr)
------------------
substLBVar subst NoLBVarInfo = NoLBVarInfo
substLBVar subst (LBVarInfo ty) = ty1 `seq` LBVarInfo ty1
where
ty1 = substTy subst ty
\end{code}
......@@ -8,7 +8,6 @@ module HsTypes (
HsType(..), HsTyVarBndr(..), HsTyOp(..),
, HsContext, HsPred(..)
, HsTupCon(..), hsTupParens, mkHsTupCon,
, hsUsOnce, hsUsMany
, mkHsForAllTy, mkHsDictTy, mkHsIParamTy
, hsTyVarName, hsTyVarNames, replaceTyVarName
......@@ -47,8 +46,7 @@ import Subst ( substTyWith )
import PprType ( {- instance Outputable Kind -}, pprParendKind, pprKind )
import BasicTypes ( Boxity(..), Arity, IPName, tupleParens )
import PrelNames ( listTyConKey, parrTyConKey,
usOnceTyConKey, usManyTyConKey, hasKey, unboundKey,
usOnceTyConName, usManyTyConName )
hasKey, unboundKey )
import SrcLoc ( noSrcLoc )
import Util ( eqListBy, lengthIs )
import FiniteMap
......@@ -143,15 +141,6 @@ data HsTyOp name = HsArrow | HsTyOp name
-- But when we generate or parse interface files, we use HsFunTy.
-- This keeps interfaces a bit smaller, because there are a lot of arrows
-----------------------
hsUsOnce, hsUsMany :: HsType RdrName
hsUsOnce = HsTyVar (mkUnqual tvName FSLIT(".")) -- deep magic
hsUsMany = HsTyVar (mkUnqual tvName FSLIT("!")) -- deep magic
hsUsOnce_Name, hsUsMany_Name :: HsType Name
hsUsOnce_Name = HsTyVar usOnceTyConName
hsUsMany_Name = HsTyVar usManyTyConName
-----------------------
data HsTupCon = HsTupCon Boxity Arity
......@@ -428,8 +417,6 @@ toHsType ty@(TyConApp tc tys) -- Must be saturated because toHsType's arg is of
| isTupleTyCon tc = HsTupleTy (HsTupCon (tupleTyConBoxity tc) (tyConArity tc)) tys'
| tc `hasKey` listTyConKey = HsListTy (head tys')
| tc `hasKey` parrTyConKey = HsPArrTy (head tys')
| tc `hasKey` usOnceTyConKey = hsUsOnce_Name -- must print !, . unqualified
| tc `hasKey` usManyTyConKey = hsUsMany_Name -- must print !, . unqualified
| otherwise = generic_case
where
generic_case = foldl HsAppTy (HsTyVar (getName tc)) tys'
......
......@@ -78,7 +78,6 @@ module CmdLineOpts (
opt_StgDoLetNoEscapes,
opt_UnfoldCasms,
opt_CprOff,
opt_UsageSPOn,
opt_UnboxStrictFields,
opt_SimplNoPreInlining,
opt_SimplDoEtaReduction,
......@@ -186,7 +185,6 @@ data CoreToDo -- These are diff core-to-core passes,
| CoreDoWorkerWrapper
| CoreDoSpecialising
| CoreDoSpecConstr
| CoreDoUSPInf
| CoreDoOldStrictness
| CoreDoGlomBinds
| CoreCSE
......@@ -250,7 +248,6 @@ data DynFlag
| Opt_D_dump_tc
| Opt_D_dump_types
| Opt_D_dump_rules
| Opt_D_dump_usagesp
| Opt_D_dump_cse
| Opt_D_dump_worker_wrapper
| Opt_D_dump_rn_trace
......@@ -269,7 +266,6 @@ data DynFlag
| Opt_D_dump_minimal_imports
| Opt_DoCoreLinting
| Opt_DoStgLinting
| Opt_DoUSPLinting
| Opt_WarnIsError -- -Werror; makes warnings fatal
| Opt_WarnDuplicateExports
......@@ -594,7 +590,6 @@ opt_CprOff = lookUp FSLIT("-fcpr-off")
opt_LiberateCaseThreshold = lookup_def_int "-fliberate-case-threshold" (10::Int)
opt_StgDoLetNoEscapes = lookUp FSLIT("-flet-no-escape")
opt_UnfoldCasms = lookUp FSLIT("-funfold-casms-in-hi-file")
opt_UsageSPOn = lookUp FSLIT("-fusagesp-on")
opt_UnboxStrictFields = lookUp FSLIT("-funbox-strict-fields")
opt_MaxWorkerArgs = lookup_def_int "-fmax-worker-args" (10::Int)
......@@ -671,7 +666,6 @@ isStaticHscFlag f =
"ffoldr-build-on",
"flet-no-escape",
"funfold-casms-in-hi-file",
"fusagesp-on",
"funbox-strict-fields",
"femit-extern-decls",
"fglobalise-toplev-names",
......
-----------------------------------------------------------------------------
-- $Id: DriverFlags.hs,v 1.110 2003/01/09 11:39:20 simonmar Exp $
-- $Id: DriverFlags.hs,v 1.111 2003/02/04 15:09:40 simonpj Exp $
--
-- Driver flags
--
......@@ -319,9 +319,6 @@ static_flags =
, ( "frule-check",
SepArg (\s -> writeIORef v_RuleCheck (Just s)) )
, ( "fusagesp" , NoArg (do writeIORef v_UsageSPInf True
add v_Opt_C "-fusagesp-on") )
, ( "fexcess-precision" , NoArg (do writeIORef v_Excess_precision True
add v_Opt_C "-fexcess-precision"))
......@@ -397,7 +394,6 @@ dynamic_flags = [
, ( "ddump-tc", NoArg (setDynFlag Opt_D_dump_tc) )
, ( "ddump-types", NoArg (setDynFlag Opt_D_dump_types) )
, ( "ddump-rules", NoArg (setDynFlag Opt_D_dump_rules) )
, ( "ddump-usagesp", NoArg (setDynFlag Opt_D_dump_usagesp) )
, ( "ddump-cse", NoArg (setDynFlag Opt_D_dump_cse) )
, ( "ddump-worker-wrapper", NoArg (setDynFlag Opt_D_dump_worker_wrapper) )
, ( "dshow-passes", NoArg (setVerbosity "2") )
......@@ -417,7 +413,6 @@ dynamic_flags = [
, ( "ddump-vect", NoArg (setDynFlag Opt_D_dump_vect) )
, ( "dcore-lint", NoArg (setDynFlag Opt_DoCoreLinting) )
, ( "dstg-lint", NoArg (setDynFlag Opt_DoStgLinting) )
, ( "dusagesp-lint", NoArg (setDynFlag Opt_DoUSPLinting) )
------ Machine dependant (-m<blah>) stuff ---------------------------
......
-----------------------------------------------------------------------------
-- $Id: DriverState.hs,v 1.89 2002/12/19 18:43:53 wolfgang Exp $
-- $Id: DriverState.hs,v 1.90 2003/02/04 15:09:40 simonpj Exp $
--
-- Settings for the driver
--
......@@ -191,7 +191,6 @@ setOptLevel n = do
GLOBAL_VAR(v_minus_o2_for_C, False, Bool)
GLOBAL_VAR(v_MaxSimplifierIterations, 4, Int)
GLOBAL_VAR(v_StgStats, False, Bool)
GLOBAL_VAR(v_UsageSPInf, False, Bool) -- Off by default
GLOBAL_VAR(v_Strictness, True, Bool)
GLOBAL_VAR(v_CSE, True, Bool)
GLOBAL_VAR(v_RuleCheck, Nothing, Maybe String)
......@@ -230,7 +229,6 @@ buildCoreToDo :: IO [CoreToDo]
buildCoreToDo = do
opt_level <- readIORef v_OptLevel
max_iter <- readIORef v_MaxSimplifierIterations
usageSP <- readIORef v_UsageSPInf
strictness <- readIORef v_Strictness
cse <- readIORef v_CSE
rule_check <- readIORef v_RuleCheck
......@@ -278,10 +276,6 @@ buildCoreToDo = do
],
case rule_check of { Just pat -> CoreDoRuleCheck 2 pat; Nothing -> CoreDoNothing },
-- infer usage information here in case we need it later.
-- (add more of these where you need them --KSW 1999-04)
if usageSP then CoreDoUSPInf else CoreDoNothing,
CoreDoSimplify (SimplPhase 1) [
-- Need inline-phase2 here so that build/augment get
-- inlined. I found that spectral/hartel/genfft lost some useful
......
......@@ -3,44 +3,50 @@ module LexCore where
import ParserCoreUtils
import Ratio
import Char
import Numeric( readFloat )
isNameChar c = isAlpha c || isDigit c || (c == '_') || (c == '\'')
isKeywordChar c = isAlpha c || (c == '_')
lexer :: (Token -> P a) -> P a
lexer cont [] = cont TKEOF []
lexer cont ('\n':cs) = \line -> lexer cont cs (line+1)
lexer cont [] = cont TKEOF []
lexer cont ('\n':cs) = \line -> lexer cont cs (line+1)
lexer cont ('-':'>':cs) = cont TKrarrow cs
lexer cont (c:cs)
| isSpace c = lexer cont cs
| isSpace c = lexer cont cs
| isLower c || (c == '_') = lexName cont TKname (c:cs)
| isUpper c = lexName cont TKcname (c:cs)
| isUpper c = lexName cont TKcname (c:cs)
| isDigit c || (c == '-') = lexNum cont (c:cs)
lexer cont ('%':cs) = lexKeyword cont cs
lexer cont ('\'':cs) = lexChar cont cs
lexer cont ('\"':cs) = lexString [] cont cs
lexer cont ('#':cs) = cont TKhash cs
lexer cont ('(':cs) = cont TKoparen cs
lexer cont (')':cs) = cont TKcparen cs
lexer cont ('{':cs) = cont TKobrace cs
lexer cont ('}':cs) = cont TKcbrace cs
lexer cont ('=':cs) = cont TKeq cs
lexer cont ('%':cs) = lexKeyword cont cs
lexer cont ('\'':cs) = lexChar cont cs
lexer cont ('\"':cs) = lexString [] cont cs
lexer cont ('#':cs) = cont TKhash cs
lexer cont ('(':cs) = cont TKoparen cs
lexer cont (')':cs) = cont TKcparen cs
lexer cont ('{':cs) = cont TKobrace cs
lexer cont ('}':cs) = cont TKcbrace cs
lexer cont ('=':cs) = cont TKeq cs
lexer cont (':':':':cs) = cont TKcoloncolon cs
lexer cont ('*':cs) = cont TKstar cs
lexer cont ('.':cs) = cont TKdot cs
lexer cont ('\\':cs) = cont TKlambda cs
lexer cont ('@':cs) = cont TKat cs
lexer cont ('?':cs) = cont TKquestion cs
lexer cont (';':cs) = cont TKsemicolon cs
lexer cont (c:cs) = failP "invalid character" [c]
lexer cont ('*':cs) = cont TKstar cs
lexer cont ('.':cs) = cont TKdot cs
lexer cont ('\\':cs) = cont TKlambda cs
lexer cont ('@':cs) = cont TKat cs
lexer cont ('?':cs) = cont TKquestion cs
lexer cont (';':cs) = cont TKsemicolon cs
lexer cont (c:cs) = failP "invalid character" [c]
lexChar cont ('\\':'x':h1:h0:'\'':cs)
| isHexEscape [h1,h0] = cont (TKchar (hexToChar h1 h0)) cs
lexChar cont ('\\':cs) = failP "invalid char character" ('\\':(take 10 cs))
lexChar cont ('\'':cs) = failP "invalid char character" ['\'']
lexChar cont ('\"':cs) = failP "invalid char character" ['\"']
lexChar cont ('\\':cs) = failP "invalid char character" ('\\':(take 10 cs))
lexChar cont ('\'':cs) = failP "invalid char character" ['\'']
lexChar cont ('\"':cs) = failP "invalid char character" ['\"']
lexChar cont (c:'\'':cs) = cont (TKchar c) cs
lexString s cont ('\\':'x':h1:h0:cs)
| isHexEscape [h1,h0] = lexString (s++[hexToChar h1 h0]) cont cs
lexString s cont ('\\':cs) = failP "invalid string character" ['\\']
......@@ -50,23 +56,20 @@ lexString s cont (c:cs) = lexString (s++[c]) cont cs
isHexEscape = all (\c -> isHexDigit c && (isDigit c || isLower c))
hexToChar h1 h0 =
chr(
(digitToInt h1) * 16 +
(digitToInt h0))
hexToChar h1 h0 = chr (digitToInt h1 * 16 + digitToInt h0)
lexNum cont cs =
case cs of
('-':cs) -> f (-1) cs
_ -> f 1 cs
('-':cs) -> f (-1) cs
_ -> f 1 cs
where f sgn cs =
case span isDigit cs of
(digits,'.':c:rest) | isDigit c ->
cont (TKrational (numer % denom)) rest'
where (fpart,rest') = span isDigit (c:rest)
denom = 10^(length fpart)
numer = sgn * ((read digits) * denom + (read fpart))
(digits,'.':c:rest)
| isDigit c -> cont (TKrational r) rest'
where ((r,rest'):_) = readFloat (digits ++ ('.':c:rest))
-- When reading a floating-point number, which is
-- a bit comlicated, use the Haskell 98 library function
(digits,rest) -> cont (TKinteger (sgn * (read digits))) rest
lexName cont cstr cs = cont (cstr name) rest
......
......@@ -440,14 +440,11 @@ dollarMainName = varQual mAIN_Name FSLIT("$main") dollarMainKey
runIOName = varQual pREL_TOP_HANDLER_Name FSLIT("runIO") runMainKey
-- Stuff from GHC.Prim
usOnceTyConName = kindQual FSLIT(".") usOnceTyConKey
usManyTyConName = kindQual FSLIT("!") usManyTyConKey
superKindName = kindQual FSLIT("KX") kindConKey
superBoxityName = kindQual FSLIT("BX") boxityConKey
liftedConName = kindQual FSLIT("*") liftedConKey
unliftedConName = kindQual FSLIT("#") unliftedConKey
openKindConName = kindQual FSLIT("?") anyBoxConKey
usageKindConName = kindQual FSLIT("$") usageConKey
typeConName = kindQual FSLIT("Type") typeConKey
funTyConName = tcQual gHC_PRIM_Name FSLIT("(->)") funTyConKey
......@@ -807,11 +804,6 @@ bcoPrimTyConKey = mkPreludeTyConUnique 73
ptrTyConKey = mkPreludeTyConUnique 74
funPtrTyConKey = mkPreludeTyConUnique 75
-- Usage type constructors
usageConKey = mkPreludeTyConUnique 76
usOnceTyConKey = mkPreludeTyConUnique 77
usManyTyConKey = mkPreludeTyConUnique 78
-- Generic Type Constructors
crossTyConKey = mkPreludeTyConUnique 79
plusTyConKey = mkPreludeTyConUnique 80
......
......@@ -43,7 +43,6 @@ import LiberateCase ( liberateCase )
import SAT ( doStaticArgs )
import Specialise ( specProgram)
import SpecConstr ( specConstrProgram)
import UsageSPInf ( doUsageSPInf )
import DmdAnal ( dmdAnalPgm )
import WorkWrap ( wwTopBinds )
#ifdef OLD_STRICTNESS
......@@ -173,8 +172,6 @@ doCorePass dfs rb us binds CoreDoOldStrictness
#endif
doCorePass dfs rb us binds CoreDoPrintCore
= _scc_ "PrintCore" noStats dfs (printCore binds)
doCorePass dfs rb us binds CoreDoUSPInf
= _scc_ "CoreUsageSPInf" noStats dfs (doUsageSPInf dfs us binds)
doCorePass dfs rb us binds CoreDoGlomBinds
= noStats dfs (glomBinds dfs binds)
doCorePass dfs rb us binds (CoreDoRuleCheck phase pat)
......
......@@ -96,7 +96,7 @@ module TcType (
tidyTopType, tidyType, tidyPred, tidyTypes, tidyFreeTyVars, tidyOpenType, tidyOpenTypes,
tidyTyVarBndr, tidyOpenTyVar, tidyOpenTyVars,
typeKind, eqKind, eqUsage,
typeKind, eqKind,
tyVarsOfType, tyVarsOfTypes, tyVarsOfPred, tyVarsOfTheta
) where
......@@ -127,7 +127,7 @@ import Type ( -- Re-exports
tidyTopType, tidyType, tidyPred, tidyTypes,
tidyFreeTyVars, tidyOpenType, tidyOpenTypes,
tidyTyVarBndr, tidyOpenTyVar,
tidyOpenTyVars, eqKind, eqUsage,
tidyOpenTyVars, eqKind,
hasMoreBoxityInfo, liftedBoxity,
superBoxity, typeKind, superKind, repType
)
......@@ -449,8 +449,7 @@ The type of a method for class C is always of the form:
where sig_ty is the type given by the method's signature, and thus in general
is a ForallTy. At the point that splitMethodTy is called, it is expected
that the outer Forall has already been stripped off. splitMethodTy then
returns (C a1..an, sig_ty') where sig_ty' is sig_ty with any Notes or
Usages stripped off.
returns (C a1..an, sig_ty') where sig_ty' is sig_ty with any Notes stripped off.
\begin{code}
tcSplitMethodTy :: Type -> (PredType, Type)
......
......@@ -140,12 +140,6 @@ ppr_ty ctxt_prec ty@(TyConApp tycon tys)
other -> maybeParen ctxt_prec tYCON_PREC
(ppr tycon <+> ppr_ty tYCON_PREC ty)
-- USAGE CASE
| (tycon `hasKey` usOnceTyConKey || tycon `hasKey` usManyTyConKey),
null tys
= -- For usages (! and .), always print bare OccName, without pkg/mod/uniq
ppr (getOccName (tyConName tycon))
-- TUPLE CASE (boxed and unboxed)
| isTupleTyCon tycon,
tys `lengthIs` tyConArity tycon -- No magic if partially applied
......
......@@ -20,11 +20,6 @@ module Type (
isTypeKind, isAnyTypeKind,
funTyCon,
usageKindCon, -- :: KX
usageTypeKind, -- :: KX
usOnceTyCon, usManyTyCon, -- :: $
usOnce, usMany, -- :: $
-- exports from this module:
hasMoreBoxityInfo, defaultKind,
......@@ -67,7 +62,7 @@ module Type (
tidyTopType, tidyPred,
-- Comparison
eqType, eqKind, eqUsage,
eqType, eqKind,
-- Seq
seqType, seqTypes
......@@ -875,7 +870,6 @@ I don't think this is harmful, but it's soemthing to watch out for.
\begin{code}
eqType t1 t2 = eq_ty emptyVarEnv t1 t2
eqKind = eqType -- No worries about looking
eqUsage = eqType -- through source types for these two
-- Look through Notes
eq_ty env (NoteTy _ t1) t2 = eq_ty env t1 t2
......
......@@ -18,11 +18,6 @@ module TypeRep (
liftedTypeKind, unliftedTypeKind, openTypeKind, -- :: KX
mkArrowKind, mkArrowKinds, -- :: KX -> KX -> KX
usageKindCon, -- :: KX
usageTypeKind, -- :: KX
usOnceTyCon, usManyTyCon, -- :: $
usOnce, usMany, -- :: $
funTyCon
) where
......@@ -41,7 +36,6 @@ import Binary
-- others
import PrelNames ( superKindName, superBoxityName, liftedConName,
unliftedConName, typeConName, openKindConName,
usageKindConName, usOnceTyConName, usManyTyConName,
funTyConName
)
\end{code}
......@@ -242,8 +236,6 @@ kind :: KX = kind -> kind
| Type liftedness -- (Type *) is printed as just *
-- (Type #) is printed as just #
| UsageKind -- Printed '$'; used for usage annotations
| OpenKind -- Can be lifted or unlifted
-- Printed '?'
......@@ -302,7 +294,7 @@ unliftedBoxityCon = mkKindCon unliftedConName superBoxity
\end{code}
------------------------------------------
Define kinds: Type, Type *, Type #, OpenKind, and UsageKind
Define kinds: Type, Type *, Type #, OpenKind
\begin{code}
typeCon :: KindCon -- :: BX -> KX
......@@ -315,9 +307,6 @@ unliftedTypeKind = TyConApp typeCon [unliftedBoxity]
openKindCon = mkKindCon openKindConName superKind
openTypeKind = TyConApp openKindCon []
usageKindCon = mkKindCon usageKindConName superKind
usageTypeKind = TyConApp usageKindCon []
\end{code}
------------------------------------------
......@@ -338,7 +327,6 @@ Binary kinds for interface files
instance Binary Kind where
put_ bh k@(TyConApp tc [])
| tc == openKindCon = putByte bh 0
| tc == usageKindCon = putByte bh 1
put_ bh k@(TyConApp tc [TyConApp bc _])
| tc == typeCon && bc == liftedBoxityCon = putByte bh 2
| tc == typeCon && bc == unliftedBoxityCon = putByte bh 3
......@@ -349,7 +337,6 @@ instance Binary Kind where
b <- getByte bh
case b of
0 -> return openTypeKind
1 -> return usageTypeKind
2 -> return liftedTypeKind
3 -> return unliftedTypeKind
_ -> do f <- get bh; a <- get bh; return (FunTy f a)
......@@ -374,17 +361,4 @@ funTyCon = mkFunTyCon funTyConName (mkArrowKinds [liftedTypeKind, liftedTypeKind
-- a prefix way, thus: (->) Int# Int#. And this is unusual.
\end{code}
------------------------------------------
Usage tycons @.@ and @!@
The usage tycons are of kind usageTypeKind (`$'). The types contain
no values, and are used purely for usage annotation.
\begin{code}
usOnceTyCon = mkKindCon usOnceTyConName usageTypeKind
usOnce = TyConApp usOnceTyCon []
usManyTyCon = mkKindCon usManyTyConName usageTypeKind
usMany = TyConApp usManyTyCon []
\end{code}
%
% (c) The GRASP/AQUA Project, Glasgow University, 1998
%
\section[UConSet]{UsageSP constraint solver}
This code is (based on) PhD work of Keith Wansbrough <kw217@cl.cam.ac.uk>,
February 1998 .. April 1999.
Keith Wansbrough 1998-02-16..1999-04-29
\begin{code}
module UConSet ( {- SEE BELOW: -- KSW 2000-10-13
UConSet,
emptyUConSet,
eqManyUConSet,
eqUConSet,
leqUConSet,
unionUCS,
unionUCSs,
solveUCS, -}
) where
#include "HsVersions.h"
import VarEnv
import Bag ( Bag, unitBag, emptyBag, unionBags, foldlBag, bagToList )
import Outputable
import PprType
{- ENTIRE FILE COMMENTED OUT FOR NOW -- KSW 2000-10-13
This monomorphic version of the analysis is outdated. I'm
currently ripping out the old one and inserting the new one. For
now, I'm simply commenting out this entire file.
\end{code}
======================================================================
The data type:
~~~~~~~~~~~~~~
First, individual constraints on particular variables. This is
private to the implementation.
\begin{code}
data UCon = UCEq UVar UVar -- j = k (equivalence)