diff --git a/ghc/compiler/Makefile b/ghc/compiler/Makefile
index 85b577e2bd28231d572520ef727811b8df7aa50f..33bbd9eb420a174fb288276e8bff8e3bc662363d 100644
--- a/ghc/compiler/Makefile
+++ b/ghc/compiler/Makefile
@@ -1,5 +1,5 @@
 # -----------------------------------------------------------------------------
-# $Id: Makefile,v 1.208 2002/02/08 15:02:30 simonmar Exp $
+# $Id: Makefile,v 1.209 2002/02/11 08:20:38 chak Exp $
 
 TOP = ..
 
@@ -96,7 +96,7 @@ CLEAN_FILES += $(CONFIG_HS)
 ALL_DIRS = \
   utils basicTypes types hsSyn prelude rename typecheck deSugar coreSyn \
   specialise simplCore stranal stgSyn simplStg codeGen absCSyn main \
-  profiling parser usageSP cprAnalysis compMan
+  profiling parser usageSP cprAnalysis compMan ndpFlatten
 
 # Make sure we include Config.hs even if it doesn't exist yet...
 ALL_SRCS += $(CONFIG_HS)
diff --git a/ghc/compiler/basicTypes/Unique.lhs b/ghc/compiler/basicTypes/Unique.lhs
index 510e7286471b49302c1308a7cc21b81ebe998cb7..44c8c074522fcec416f565cae7914f90fea08cae 100644
--- a/ghc/compiler/basicTypes/Unique.lhs
+++ b/ghc/compiler/basicTypes/Unique.lhs
@@ -40,6 +40,7 @@ module Unique (
 	mkTupleTyConUnique, mkTupleDataConUnique,
 	mkPreludeMiscIdUnique, mkPreludeDataConUnique,
 	mkPreludeTyConUnique, mkPreludeClassUnique,
+	mkPArrDataConUnique,
 
 	mkBuiltinUnique,
 	mkPseudoUnique1, mkPseudoUnique2, mkPseudoUnique3
@@ -322,6 +323,10 @@ isTupleKey u = case unpkUnique u of
 mkPrimOpIdUnique op		= mkUnique '9' op
 mkPreludeMiscIdUnique i		= mkUnique '0' i
 
+-- No numbers left anymore, so I pick something different for the character
+-- tag 
+mkPArrDataConUnique a	        = mkUnique ':' (2*a)
+
 -- The "tyvar uniques" print specially nicely: a, b, c, etc.
 -- See pprUnique for details
 
diff --git a/ghc/compiler/deSugar/Check.lhs b/ghc/compiler/deSugar/Check.lhs
index 17e0e52dff6743230e632499a2c153d27085a9a9..d44583445164f4e423bd5bf36477736f048f2502 100644
--- a/ghc/compiler/deSugar/Check.lhs
+++ b/ghc/compiler/deSugar/Check.lhs
@@ -142,6 +142,8 @@ untidy b (ConPatIn name pats)  =
 untidy b (ConOpPatIn pat1 name fixity pat2) = 
        pars b (ConOpPatIn (untidy_pars pat1) name fixity (untidy_pars pat2)) 
 untidy _ (ListPatIn pats)  = ListPatIn (map untidy_no_pars pats) 
+untidy _ (PArrPatIn pats)  = 
+       panic "Check.untidy: Shouldn't get a parallel array here!"
 untidy _ (TuplePatIn pats boxed) = TuplePatIn (map untidy_no_pars pats) boxed
 
 untidy _ pat = pprPanic "Check.untidy: SigPatIn" (ppr pat)
@@ -523,12 +525,26 @@ make_con (ConPat id _ _ _ _) (p:q:ps, constraints)
     where name   = getName id
           fixity = panic "Check.make_con: Guessing fixity"
 
-make_con (ConPat id _ _ _ pats) (ps,constraints) 
+make_con (ConPat id _ _ _ pats) (ps, constraints) 
       | isTupleTyCon tc = (TuplePatIn pats_con (tupleTyConBoxity tc) : rest_pats, constraints) 
       | otherwise       = (ConPatIn name pats_con		     : rest_pats, constraints)
     where name      = getName id
 	  (pats_con, rest_pats) = splitAtList pats ps
 	  tc	    = dataConTyCon id
+
+-- reconstruct parallel array pattern
+--
+-- * don't check for the type only; we need to make sure that we are really
+--   dealing with one of the fake constructors and not with the real
+--   representation 
+--
+make_con (ConPat id _ _ _ pats) (ps, constraints) 
+  | isPArrFakeCon id = (PArrPatIn patsCon     : restPats, constraints) 
+  | otherwise        = (ConPatIn name patsCon : restPats, constraints)
+  where 
+    name                = getName id
+    (patsCon, restPats) = splitAtList pats ps
+    tc	                = dataConTyCon id
 	  
 
 make_whole_con :: DataCon -> WarningPat
@@ -575,6 +591,13 @@ simplify_pat (ListPat ty ps) = foldr (\ x -> \y -> ConPat consDataCon list_ty []
 	                             (map simplify_pat ps)
                              where list_ty = mkListTy ty
 
+-- introduce fake parallel array constructors to be able to handle parallel
+-- arrays with the existing machinery for constructor pattern
+--
+simplify_pat (PArrPat ty ps)
+  = ConPat (parrFakeCon arity) (mkPArrTy ty) [] [] (map simplify_pat ps)
+  where
+    arity = length ps
 
 simplify_pat (TuplePat ps boxity)
   = ConPat (tupleCon boxity arity)
diff --git a/ghc/compiler/deSugar/DsExpr.lhs b/ghc/compiler/deSugar/DsExpr.lhs
index 162ae247c756a0354bd91b4514f18d28a6646c71..5d7ff191dfa44dd71fc58af6af0bf7a46e76b395 100644
--- a/ghc/compiler/deSugar/DsExpr.lhs
+++ b/ghc/compiler/deSugar/DsExpr.lhs
@@ -32,7 +32,7 @@ import DsMonad
 import DsBinds		( dsMonoBinds, AutoScc(..) )
 import DsGRHSs		( dsGuarded )
 import DsCCall		( dsCCall, resultWrapper )
-import DsListComp	( dsListComp )
+import DsListComp	( dsListComp, dsPArrComp )
 import DsUtils		( mkErrorAppDs, mkStringLit, mkStringLitFS, 
 			  mkConsExpr, mkNilExpr, mkIntegerLit
 			)
@@ -49,7 +49,7 @@ import TyCon		( tyConDataCons )
 import TysWiredIn	( tupleCon, listTyCon, charDataCon, intDataCon )
 import BasicTypes	( RecFlag(..), Boxity(..), ipNameName )
 import Maybes		( maybeToBool )
-import PrelNames	( hasKey, ratioTyConKey )
+import PrelNames	( hasKey, ratioTyConKey, toPName )
 import Util		( zipEqual, zipWithEqual )
 import Outputable
 
@@ -262,27 +262,26 @@ dsExpr (HsWith expr binds)
         = dsExpr e	`thenDs` \ e' ->
 	  returnDs (Let (NonRec (ipNameName n) e') body)
 
-dsExpr (HsDoOut do_or_lc stmts return_id then_id fail_id result_ty src_loc)
-  | maybeToBool maybe_list_comp
+-- We need the `ListComp' form to use `deListComp' (rather than the "do" form)
+-- because the interpretation of `stmts' depends on what sort of thing it is.
+--
+dsExpr (HsDoOut ListComp stmts return_id then_id fail_id result_ty src_loc)
   =	-- Special case for list comprehensions
     putSrcLocDs src_loc $
     dsListComp stmts elt_ty
+  where
+    (_, [elt_ty]) = tcSplitTyConApp result_ty
 
-  | otherwise
+dsExpr (HsDoOut DoExpr   stmts return_id then_id fail_id result_ty src_loc)
   = putSrcLocDs src_loc $
-    dsDo do_or_lc stmts return_id then_id fail_id result_ty
+    dsDo DoExpr stmts return_id then_id fail_id result_ty
+
+dsExpr (HsDoOut PArrComp stmts return_id then_id fail_id result_ty src_loc)
+  =	-- Special case for array comprehensions
+    putSrcLocDs src_loc $
+    dsPArrComp stmts elt_ty
   where
-    maybe_list_comp 
-	= case (do_or_lc, tcSplitTyConApp_maybe result_ty) of
-	    (ListComp, Just (tycon, [elt_ty]))
-		  | tycon == listTyCon
-		 -> Just elt_ty
-	    other -> Nothing
-	-- We need the ListComp form to use deListComp (rather than the "do" form)
-	-- because the interpretation of ExprStmt depends on what sort of thing
-	-- it is.
-
-    Just elt_ty = maybe_list_comp
+    (_, [elt_ty]) = tcSplitTyConApp result_ty
 
 dsExpr (HsIf guard_expr then_expr else_expr src_loc)
   = putSrcLocDs src_loc $
@@ -319,6 +318,21 @@ dsExpr (ExplicitList ty xs)
 		go xs					`thenDs` \ core_xs ->
 		returnDs (mkConsExpr ty core_x core_xs)
 
+-- we create a list from the array elements and convert them into a list using
+-- `PrelPArr.toP'
+--
+-- * the main disadvantage to this scheme is that `toP' traverses the list
+--   twice: once to determine the length and a second time to put to elements
+--   into the array; this inefficiency could be avoided by exposing some of
+--   the innards of `PrelPArr' to the compiler (ie, have a `PrelPArrBase') so
+--   that we can exploit the fact that we already know the length of the array
+--   here at compile time
+--
+dsExpr (ExplicitPArr ty xs)
+  = dsLookupGlobalValue toPName				`thenDs` \toP      ->
+    dsExpr (ExplicitList ty xs)				`thenDs` \coreList ->
+    returnDs (mkApps (Var toP) [Type ty, coreList])
+
 dsExpr (ExplicitTuple expr_list boxity)
   = mapDs dsExpr expr_list	  `thenDs` \ core_exprs  ->
     returnDs (mkConApp (tupleCon boxity (length expr_list))
@@ -347,6 +361,24 @@ dsExpr (ArithSeqOut expr (FromThenTo from thn two))
     dsExpr thn		  `thenDs` \ thn2 ->
     dsExpr two		  `thenDs` \ two2 ->
     returnDs (mkApps expr2 [from2, thn2, two2])
+
+dsExpr (PArrSeqOut expr (FromTo from two))
+  = dsExpr expr		  `thenDs` \ expr2 ->
+    dsExpr from		  `thenDs` \ from2 ->
+    dsExpr two		  `thenDs` \ two2 ->
+    returnDs (mkApps expr2 [from2, two2])
+
+dsExpr (PArrSeqOut expr (FromThenTo from thn two))
+  = dsExpr expr		  `thenDs` \ expr2 ->
+    dsExpr from		  `thenDs` \ from2 ->
+    dsExpr thn		  `thenDs` \ thn2 ->
+    dsExpr two		  `thenDs` \ two2 ->
+    returnDs (mkApps expr2 [from2, thn2, two2])
+
+dsExpr (PArrSeqOut expr _)
+  = panic "DsExpr.dsExpr: Infinite parallel array!"
+    -- the parser shouldn't have generated it and the renamer and typechecker
+    -- shouldn't have let it through
 \end{code}
 
 \noindent
@@ -512,6 +544,7 @@ dsExpr (DictApp expr dicts)	-- becomes a curried application
 dsExpr (HsDo _ _ _)	    = panic "dsExpr:HsDo"
 dsExpr (ExprWithTySig _ _)  = panic "dsExpr:ExprWithTySig"
 dsExpr (ArithSeqIn _)	    = panic "dsExpr:ArithSeqIn"
+dsExpr (PArrSeqIn _)	    = panic "dsExpr:PArrSeqIn"
 #endif
 
 \end{code}
@@ -534,7 +567,7 @@ dsDo do_or_lc stmts return_id then_id fail_id result_ty
 	(_, b_ty) = tcSplitAppTy result_ty	-- result_ty must be of the form (m b)
 	is_do	  = case do_or_lc of
 			DoExpr   -> True
-			ListComp -> False
+			_	 -> False
 	
 	-- For ExprStmt, see the comments near HsExpr.Stmt about 
 	-- exactly what ExprStmts mean!
diff --git a/ghc/compiler/deSugar/DsListComp.lhs b/ghc/compiler/deSugar/DsListComp.lhs
index ebe08c61892e7efcd699c4913668240ec2e09bf0..99b8980f26aa8a3047c4ec6a31b99eb88b0e7bf1 100644
--- a/ghc/compiler/deSugar/DsListComp.lhs
+++ b/ghc/compiler/deSugar/DsListComp.lhs
@@ -1,18 +1,23 @@
 %
 % (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
 %
-\section[DsListComp]{Desugaring list comprehensions}
+\section[DsListComp]{Desugaring list comprehensions and array comprehensions}
 
 \begin{code}
-module DsListComp ( dsListComp ) where
+module DsListComp ( dsListComp, dsPArrComp ) where
 
 #include "HsVersions.h"
 
 import {-# SOURCE #-} DsExpr ( dsExpr, dsLet )
 
 import BasicTypes	( Boxity(..) )
-import HsSyn		( OutPat(..), HsExpr(..), Stmt(..), HsMatchContext(..), HsDoContext(..) )
-import TcHsSyn		( TypecheckedStmt, TypecheckedPat, TypecheckedHsExpr, outPatType )
+import DataCon		( dataConId )
+import TyCon		( tyConName )
+import HsSyn		( OutPat(..), HsExpr(..), Stmt(..),
+			  HsMatchContext(..), HsDoContext(..),
+			  collectHsOutBinders )
+import TcHsSyn		( TypecheckedStmt, TypecheckedPat, TypecheckedHsExpr,
+			  outPatType )
 import CoreSyn
 
 import DsMonad		-- the monadery used in the desugarer
@@ -22,12 +27,18 @@ import CmdLineOpts	( opt_FoldrBuildOn )
 import CoreUtils	( exprType, mkIfThenElse )
 import Id		( idType )
 import Var              ( Id )
-import Type		( mkTyVarTy, mkFunTys, mkFunTy, Type )
+import Type		( mkTyVarTy, mkFunTys, mkFunTy, Type,
+			  splitTyConApp_maybe )
 import TysPrim		( alphaTyVar )
-import TysWiredIn	( nilDataCon, consDataCon, unitDataConId, mkListTy, mkTupleTy )
+import TysWiredIn	( nilDataCon, consDataCon, unitDataConId, unitTy,
+			  mkListTy, mkTupleTy, intDataCon )
 import Match		( matchSimply )
-import PrelNames	( foldrName, buildName )
+import PrelNames	( trueDataConName, falseDataConName, foldrName,
+			  buildName, replicatePName, mapPName, filterPName,
+			  zipPName, crossPName, parrTyConName ) 
+import PrelInfo		( pAT_ERROR_ID )
 import SrcLoc		( noSrcLoc )
+import Panic		( panic )
 \end{code}
 
 List comprehensions may be desugared in one of two ways: ``ordinary''
@@ -319,4 +330,146 @@ dfListComp c_id n_id (BindStmt pat list1 locn : quals)
     )
 \end{code}
 
+%************************************************************************
+%*									*
+\subsection[DsPArrComp]{Desugaring of array comprehensions}
+%*									*
+%************************************************************************
+
+\begin{code}
+
+-- entry point for desugaring a parallel array comprehension
+--
+--   [:e | qss:] = <<[:e | qss:]>> () [:():]
+--
+dsPArrComp      :: [TypecheckedStmt] 
+	        -> Type		    -- Don't use; called with `undefined' below
+	        -> DsM CoreExpr
+dsPArrComp qs _  =
+  dsLookupGlobalValue replicatePName			  `thenDs` \repP ->
+  let unitArray = mkApps (Var repP) [Type unitTy, 
+				     mkConApp intDataCon [mkIntLit 1], 
+				     mkTupleExpr []]
+  in
+  dePArrComp qs (TuplePat [] Boxed) unitArray
 
+-- the work horse
+--
+dePArrComp :: [TypecheckedStmt] 
+	   -> TypecheckedPat		-- the current generator pattern
+	   -> CoreExpr			-- the current generator expression
+	   -> DsM CoreExpr
+--
+--  <<[:e' | :]>> pa ea = mapP (\pa -> e') ea
+--
+dePArrComp [ResultStmt e' _] pa cea =
+  dsLookupGlobalValue mapPName				  `thenDs` \mapP    ->
+  let ty = parrElemType cea
+  in
+  deLambda ty pa e'					  `thenDs` \(clam, 
+								     ty'e') ->
+  returnDs $ mkApps (Var mapP) [Type ty, Type ty'e', clam, cea]
+--
+--  <<[:e' | b, qs:]>> pa ea = <<[:e' | qs:]>> pa (filterP (\pa -> b) ea)
+--
+dePArrComp (ExprStmt b _ _ : qs) pa cea =
+  dsLookupGlobalValue filterPName			  `thenDs` \filterP  ->
+  let ty = parrElemType cea
+  in
+  deLambda ty pa b					  `thenDs` \(clam,_) ->
+  dePArrComp qs pa (mkApps (Var filterP) [Type ty, clam, cea])
+--
+--  <<[:e' | p <- e, qs:]>> pa ea = 
+--    let ef = filterP (\x -> case x of {p -> True; _ -> False}) e
+--    in
+--    <<[:e' | qs:]>> (pa, p) (crossP ea ef)
+--
+dePArrComp (BindStmt p e _ : qs) pa cea =
+  dsLookupGlobalValue falseDataConName			  `thenDs` \falseId ->
+  dsLookupGlobalValue trueDataConName			  `thenDs` \trueId ->
+  dsLookupGlobalValue filterPName			  `thenDs` \filterP ->
+  dsLookupGlobalValue crossPName			  `thenDs` \crossP  ->
+  dsExpr e						  `thenDs` \ce      ->
+  let ty'cea = parrElemType cea
+      ty'ce  = parrElemType ce
+      false  = Var falseId
+      true   = Var trueId
+  in
+  newSysLocalDs ty'ce					  `thenDs` \v       ->
+  matchSimply (Var v) (DoCtxt PArrComp) p true false      `thenDs` \pred    ->
+  let cef    = mkApps (Var filterP) [Type ty'ce, mkLams [v] pred, ce]
+      ty'cef = ty'ce				-- filterP preserves the type
+      pa'    = TuplePat [pa, p] Boxed
+  in
+  dePArrComp qs pa' (mkApps (Var crossP) [Type ty'cea, Type ty'cef, cea, cef])
+--
+--  <<[:e' | let ds, qs:]>> pa ea = 
+--    <<[:e' | qs:]>> (pa, (x_1, ..., x_n)) 
+--		      (mapP (\v@pa -> (v, let ds in (x_1, ..., x_n))) ea)
+--  where
+--    {x_1, ..., x_n} = DV (ds)		-- Defined Variables
+--
+dePArrComp (LetStmt ds : qs) pa cea =
+  dsLookupGlobalValue mapPName				  `thenDs` \mapP    ->
+  let xs     = collectHsOutBinders ds
+      ty'cea = parrElemType cea
+  in
+  newSysLocalDs ty'cea					  `thenDs` \v       ->
+  dsLet ds (mkTupleExpr xs)				  `thenDs` \clet    ->
+  newSysLocalDs (exprType clet)				  `thenDs` \let'v   ->
+  let projBody = mkDsLet (NonRec let'v clet) $ mkTupleExpr [v, let'v]
+      errTy    = exprType projBody
+      errMsg   = "DsListComp.dePArrComp: internal error!"
+  in
+  mkErrorAppDs pAT_ERROR_ID errTy errMsg                  `thenDs` \cerr    ->
+  matchSimply (Var v) (DoCtxt PArrComp) pa projBody cerr  `thenDs` \ccase   ->
+  let pa'    = TuplePat [pa, TuplePat (map VarPat xs) Boxed] Boxed
+      proj   = mkLams [v] ccase
+  in
+  dePArrComp qs pa' (mkApps (Var mapP) [Type ty'cea, proj, cea])
+--
+--  <<[:e' | qs | qss:]>> pa ea = 
+--    <<[:e' | qss:]>> (pa, (x_1, ..., x_n)) 
+--		       (zipP ea <<[:(x_1, ..., x_n) | qs:]>>)
+--    where
+--      {x_1, ..., x_n} = DV (qs)
+--
+dePArrComp (ParStmtOut []             : qss2) pa cea = dePArrComp qss2 pa cea
+dePArrComp (ParStmtOut ((xs, qs):qss) : qss2) pa cea =
+  dsLookupGlobalValue zipPName				  `thenDs` \zipP    ->
+  let pa'     = TuplePat [pa, TuplePat (map VarPat xs) Boxed] Boxed
+      ty'cea  = parrElemType cea
+      resStmt = ResultStmt (ExplicitTuple (map HsVar xs) Boxed) noSrcLoc
+  in
+  dsPArrComp (qs ++ [resStmt]) undefined		  `thenDs` \cqs     ->
+  let ty'cqs = parrElemType cqs
+      cea'   = mkApps (Var zipP) [Type ty'cea, Type ty'cqs, cea, cqs]
+  in
+  dePArrComp (ParStmtOut qss : qss2) pa' cea'
+
+-- generate Core corresponding to `\p -> e'
+--
+deLambda        :: Type			-- type of the argument
+		-> TypecheckedPat	-- argument pattern
+		-> TypecheckedHsExpr	-- body
+		-> DsM (CoreExpr, Type)
+deLambda ty p e  =
+  newSysLocalDs ty					  `thenDs` \v       ->
+  dsExpr e						  `thenDs` \ce      ->
+  let errTy    = exprType ce
+      errMsg   = "DsListComp.deLambda: internal error!"
+  in
+  mkErrorAppDs pAT_ERROR_ID errTy errMsg                  `thenDs` \cerr    ->
+  matchSimply (Var v) (DoCtxt PArrComp) p ce cerr	  `thenDs` \res	    ->
+  returnDs (mkLams [v] res, errTy)
+
+-- obtain the element type of the parallel array produced by the given Core
+-- expression
+--
+parrElemType   :: CoreExpr -> Type
+parrElemType e  = 
+  case splitTyConApp_maybe (exprType e) of
+    Just (tycon, [ty]) | tyConName tycon == parrTyConName -> ty
+    _							  -> panic
+      "DsListComp.parrElemType: not a parallel array type"
+\end{code}
diff --git a/ghc/compiler/deSugar/DsUtils.lhs b/ghc/compiler/deSugar/DsUtils.lhs
index 6b45c58108ee6e528a0ce79af831661bce4c5fe0..9bb99a65c5403dc167f6ddf751e86a6a0b3d4542 100644
--- a/ghc/compiler/deSugar/DsUtils.lhs
+++ b/ghc/compiler/deSugar/DsUtils.lhs
@@ -44,23 +44,24 @@ import MkId		( rebuildConArgs )
 import Id		( idType, Id, mkWildId )
 import Literal		( Literal(..), inIntRange, tARGET_MAX_INT )
 import TyCon		( isNewTyCon, tyConDataCons, isRecursiveTyCon )
-import DataCon		( DataCon, dataConStrictMarks, dataConId )
-import Type		( mkFunTy, isUnLiftedType, Type )
+import DataCon		( DataCon, dataConStrictMarks, dataConId,
+			  dataConSourceArity )
+import Type		( mkFunTy, isUnLiftedType, Type, splitTyConApp )
 import TcType		( tcTyConAppTyCon, isIntTy, isFloatTy, isDoubleTy )
 import TysPrim		( intPrimTy, charPrimTy, floatPrimTy, doublePrimTy )
 import TysWiredIn	( nilDataCon, consDataCon, 
                           tupleCon,
 			  unitDataConId, unitTy,
                           charTy, charDataCon, 
-                          intDataCon, smallIntegerDataCon, 
+                          intTy, intDataCon, smallIntegerDataCon, 
 			  floatDataCon, 
                           doubleDataCon,
-			  stringTy
-			)
+			  stringTy, isPArrFakeCon )
 import BasicTypes	( Boxity(..) )
 import UniqSet		( mkUniqSet, minusUniqSet, isEmptyUniqSet, UniqSet )
 import PrelNames	( unpackCStringName, unpackCStringUtf8Name, 
-			  plusIntegerName, timesIntegerName )
+			  plusIntegerName, timesIntegerName, 
+			  lengthPName, indexPName )
 import Outputable
 import UnicodeUtil      ( stringToUtf8 )
 import Util             ( isSingleton )
@@ -265,6 +266,9 @@ mkCoAlgCaseMatchResult var match_alts
   = ASSERT( null (tail match_alts) && null (tail arg_ids) )
     mkCoLetsMatchResult [NonRec arg_id newtype_rhs] match_result
 
+  | isPArrFakeAlts match_alts	-- Sugared parallel array; use a literal case 
+  = MatchResult CanFail mk_parrCase
+
   | otherwise			-- Datatype case; use a case
   = MatchResult fail_flag mk_case
   where
@@ -309,6 +313,72 @@ mkCoAlgCaseMatchResult var match_alts
     un_mentioned_constructors
         = mkUniqSet data_cons `minusUniqSet` mkUniqSet [ con | (con, _, _) <- match_alts]
     exhaustive_case = isEmptyUniqSet un_mentioned_constructors
+
+	-- Stuff for parallel arrays
+	-- 
+	-- * the following is to desugar cases over fake constructors for
+	--   parallel arrays, which are introduced by `tidy1' in the `PArrPat'
+	--   case
+	--
+	-- Concerning `isPArrFakeAlts':
+	--
+	-- * it is *not* sufficient to just check the type of the type
+	--   constructor, as we have to be careful not to confuse the real
+	--   representation of parallel arrays with the fake constructors;
+	--   moreover, a list of alternatives must not mix fake and real
+	--   constructors (this is checked earlier on)
+	--
+	-- FIXME: We actually go through the whole list and make sure that
+	--	  either all or none of the constructors are fake parallel
+	--	  array constructors.  This is to spot equations that mix fake
+	--	  constructors with the real representation defined in
+	--	  `PrelPArr'.  It would be nicer to spot this situation
+	--	  earlier and raise a proper error message, but it can really
+	--	  only happen in `PrelPArr' anyway.
+	--
+    isPArrFakeAlts [(dcon, _, _)]      = isPArrFakeCon dcon
+    isPArrFakeAlts ((dcon, _, _):alts) = 
+      case (isPArrFakeCon dcon, isPArrFakeAlts alts) of
+        (True , True ) -> True
+        (False, False) -> False
+	_	       -> 
+	  panic "DsUtils: You may not mix `[:...:]' with `PArr' patterns"
+    --
+    mk_parrCase fail = 		   
+      dsLookupGlobalValue lengthPName			`thenDs` \lengthP  ->
+      unboxAlt						`thenDs` \alt      ->
+      returnDs (Case (len lengthP) (mkWildId intTy) [alt])
+      where
+	elemTy      = case splitTyConApp (idType var) of
+		        (_, [elemTy]) -> elemTy
+		        _	        -> panic panicMsg
+        panicMsg    = "DsUtils.mkCoAlgCaseMatchResult: not a parallel array?"
+	len lengthP = mkApps (Var lengthP) [Type elemTy, Var var]
+	--
+	unboxAlt = 
+	  newSysLocalDs intPrimTy			`thenDs` \l	   ->
+	  dsLookupGlobalValue indexPName		`thenDs` \indexP   ->
+	  mapDs (mkAlt indexP) match_alts               `thenDs` \alts     ->
+	  returnDs (DataAlt intDataCon, [l], (Case (Var l) wild (dft : alts)))
+          where
+	    wild = mkWildId intPrimTy
+	    dft  = (DEFAULT, [], fail)
+	--
+	-- each alternative matches one array length (corresponding to one
+	-- fake array constructor), so the match is on a literal; each
+	-- alternative's body is extended by a local binding for each
+	-- constructor argument, which are bound to array elements starting
+	-- with the first
+	--
+	mkAlt indexP (con, args, MatchResult _ bodyFun) = 
+	  bodyFun fail					`thenDs` \body     ->
+	  returnDs (LitAlt lit, [], mkDsLets binds body)
+	  where
+	    lit   = MachInt $ toInteger (dataConSourceArity con)
+	    binds = [NonRec arg (indexExpr i) | (i, arg) <- zip [1..] args]
+	    --
+	    indexExpr i = mkApps (Var indexP) [Type elemTy, Var var, toInt i]
+	    toInt     i = mkConApp intDataCon [Lit $ MachInt i]
 \end{code}
 
 
diff --git a/ghc/compiler/deSugar/Match.lhs b/ghc/compiler/deSugar/Match.lhs
index 74be345ca1853189e91399b8f875c34ca6e794fc..1f9fcdadf26c6477e97a8d4f5c8546293b41a688 100644
--- a/ghc/compiler/deSugar/Match.lhs
+++ b/ghc/compiler/deSugar/Match.lhs
@@ -24,7 +24,8 @@ import MatchCon		( matchConFamily )
 import MatchLit		( matchLiterals )
 import PrelInfo		( pAT_ERROR_ID )
 import TcType		( mkTyVarTys, Type, tcTyConAppArgs, tcEqType )
-import TysWiredIn	( nilDataCon, consDataCon, mkTupleTy, mkListTy, tupleCon )
+import TysWiredIn	( nilDataCon, consDataCon, mkTupleTy, mkListTy,
+			  tupleCon, parrFakeCon, mkPArrTy )
 import BasicTypes	( Boxity(..) )
 import UniqSet
 import ErrUtils		( addWarnLocHdrLine, dontAddErrLoc )
@@ -314,7 +315,8 @@ Replace the `as' pattern @x@@p@ with the pattern p and a binding @x = v@.
 \item
 Removing lazy (irrefutable) patterns (you don't want to know...).
 \item
-Converting explicit tuple- and list-pats into ordinary @ConPats@.
+Converting explicit tuple-, list-, and parallel-array-pats into ordinary
+@ConPats@. 
 \item
 Convert the literal pat "" to [].
 \end{itemize}
@@ -441,6 +443,15 @@ tidy1 v (ListPat ty pats) match_result
 	      (ConPat nilDataCon  list_ty [] [] [])
 	      pats
 
+-- introduce fake parallel array constructors to be able to handle parallel
+-- arrays with the existing machinery for constructor pattern
+--
+tidy1 v (PArrPat ty pats) match_result
+  = returnDs (parrConPat, match_result)
+  where
+    arity      = length pats
+    parrConPat = ConPat (parrFakeCon arity) (mkPArrTy ty) [] [] pats
+
 tidy1 v (TuplePat pats boxity) match_result
   = returnDs (tuple_ConPat, match_result)
   where
diff --git a/ghc/compiler/hsSyn/HsExpr.lhs b/ghc/compiler/hsSyn/HsExpr.lhs
index 91ddad3b9afe09c1babdf0bc68b3e8180e2b6b0d..419cb3161a58b65c5135b9aeec00d83703dd3861 100644
--- a/ghc/compiler/hsSyn/HsExpr.lhs
+++ b/ghc/compiler/hsSyn/HsExpr.lhs
@@ -101,6 +101,10 @@ data HsExpr id pat
 		PostTcType	-- Gives type of components of list
 		[HsExpr id pat]
 
+  | ExplicitPArr		-- syntactic parallel array: [:e1, ..., en:]
+		PostTcType	-- type of elements of the parallel array
+		[HsExpr id pat]
+
   | ExplicitTuple		-- tuple
 		[HsExpr id pat]
 				-- NB: Unit is ExplicitTuple []
@@ -137,6 +141,11 @@ data HsExpr id pat
   | ArithSeqOut
 		(HsExpr id pat)		-- (typechecked, of course)
 		(ArithSeqInfo id pat)
+  | PArrSeqIn           		-- arith. sequence for parallel array
+		(ArithSeqInfo id pat)	-- [:e1..e2:] or [:e1, e2..e3:]
+  | PArrSeqOut
+		(HsExpr id pat)		-- (typechecked, of course)
+		(ArithSeqInfo id pat)
 
   | HsCCall	CLabelString	-- call into the C world; string is
 		[HsExpr id pat]	-- the C function; exprs are the
@@ -305,6 +314,9 @@ ppr_expr (HsDoOut do_or_list_comp stmts _ _ _ _ _) = pprDo do_or_list_comp stmts
 ppr_expr (ExplicitList _ exprs)
   = brackets (fsep (punctuate comma (map ppr_expr exprs)))
 
+ppr_expr (ExplicitPArr _ exprs)
+  = pabrackets (fsep (punctuate comma (map ppr_expr exprs)))
+
 ppr_expr (ExplicitTuple exprs boxity)
   = tupleParens boxity (sep (punctuate comma (map ppr_expr exprs)))
 
@@ -327,6 +339,11 @@ ppr_expr (ArithSeqIn info)
 ppr_expr (ArithSeqOut expr info)
   = brackets (ppr info)
 
+ppr_expr (PArrSeqIn info)
+  = pabrackets (ppr info)
+ppr_expr (PArrSeqOut expr info)
+  = pabrackets (ppr info)
+
 ppr_expr EWildPat = char '_'
 ppr_expr (ELazyPat e) = char '~' <> pprParendExpr e
 ppr_expr (EAsPat v e) = ppr v <> char '@' <> pprParendExpr e
@@ -363,7 +380,11 @@ ppr_expr (DictApp expr dnames)
 	 4 (brackets (interpp'SP dnames))
 
 ppr_expr (HsType id) = ppr id
-    
+
+-- add parallel array brackets around a document
+--
+pabrackets   :: SDoc -> SDoc
+pabrackets p  = ptext SLIT("[:") <> p <> ptext SLIT(":]")    
 \end{code}
 
 Parenthesize unless very simple:
@@ -382,6 +403,7 @@ pprParendExpr expr
       HsVar _		    -> pp_as_was
       HsIPVar _		    -> pp_as_was
       ExplicitList _ _      -> pp_as_was
+      ExplicitPArr _ _      -> pp_as_was
       ExplicitTuple _ _	    -> pp_as_was
       HsPar _		    -> pp_as_was
 
@@ -589,6 +611,7 @@ depends on the context.  Consider the following contexts:
 		E :: rhs_ty
 	  Translation: E
 
+Array comprehensions are handled like list comprehensions -=chak
 
 \begin{code}
 consLetStmt :: HsBinds id pat -> [Stmt id pat] -> [Stmt id pat]
@@ -610,14 +633,20 @@ pprStmt (ParStmt stmtss)
 pprStmt (ParStmtOut stmtss)
  = hsep (map (\stmts -> ptext SLIT("| ") <> ppr stmts) stmtss)
 
-pprDo :: (Outputable id, Outputable pat) => HsDoContext -> [Stmt id pat] -> SDoc
+pprDo :: (Outputable id, Outputable pat) 
+      => HsDoContext -> [Stmt id pat] -> SDoc
 pprDo DoExpr stmts   = hang (ptext SLIT("do")) 2 (vcat (map ppr stmts))
-pprDo ListComp stmts = brackets $
-		       hang (pprExpr expr <+> char '|')
-			  4 (interpp'SP quals)
-		     where
-		       ResultStmt expr _ = last stmts	-- Last stmt should
-		       quals	         = init stmts	-- be an ResultStmt
+pprDo ListComp stmts = pprComp brackets   stmts
+pprDo PArrComp stmts = pprComp pabrackets stmts
+
+pprComp :: (Outputable id, Outputable pat) 
+	=> (SDoc -> SDoc) -> [Stmt id pat] -> SDoc
+pprComp brack stmts = brack $
+		      hang (pprExpr expr <+> char '|')
+			 4 (interpp'SP quals)
+		    where
+		      ResultStmt expr _ = last stmts  -- Last stmt should
+		      quals	        = init stmts  -- be an ResultStmt
 \end{code}
 
 %************************************************************************
@@ -667,7 +696,9 @@ data HsMatchContext id	-- Context of a Match or Stmt
   | RecUpd		-- Record update
   deriving ()
 
-data HsDoContext = ListComp | DoExpr
+data HsDoContext = ListComp 
+		 | DoExpr 
+		 | PArrComp	-- parallel array comprehension
 \end{code}
 
 \begin{code}
@@ -691,7 +722,10 @@ pprMatchContext RecUpd	     	  = ptext SLIT("In a record-update construct")
 pprMatchContext PatBindRhs   	  = ptext SLIT("In a pattern binding")
 pprMatchContext LambdaExpr   	  = ptext SLIT("In a lambda abstraction")
 pprMatchContext (DoCtxt DoExpr)   = ptext SLIT("In a 'do' expression pattern binding")
-pprMatchContext (DoCtxt ListComp) = ptext SLIT("In a 'list comprehension' pattern binding")
+pprMatchContext (DoCtxt ListComp) = 
+  ptext SLIT("In a 'list comprehension' pattern binding")
+pprMatchContext (DoCtxt PArrComp) = 
+  ptext SLIT("In an 'array comprehension' pattern binding")
 
 -- Used to generate the string for a *runtime* error message
 matchContextErrString (FunRhs fun)    	= "function " ++ showSDoc (ppr fun)
@@ -701,4 +735,5 @@ matchContextErrString RecUpd	      	= "record update"
 matchContextErrString LambdaExpr      	=  "lambda"
 matchContextErrString (DoCtxt DoExpr)   = "'do' expression"
 matchContextErrString (DoCtxt ListComp) = "list comprehension"
+matchContextErrString (DoCtxt PArrComp) = "array comprehension"
 \end{code}
diff --git a/ghc/compiler/hsSyn/HsPat.lhs b/ghc/compiler/hsSyn/HsPat.lhs
index 00df7797b42b788be659fb2e4bdc670f78f4676f..c801a86888a8ddc91a456d0bd8d72918c529e2ee 100644
--- a/ghc/compiler/hsSyn/HsPat.lhs
+++ b/ghc/compiler/hsSyn/HsPat.lhs
@@ -12,7 +12,7 @@ module HsPat (
 	failureFreePat, isWildPat, 
 	patsAreAllCons, isConPat, 
 	patsAreAllLits,	isLitPat,
-	collectPatBinders, collectPatsBinders,
+	collectPatBinders, collectOutPatBinders, collectPatsBinders,
 	collectSigTysFromPat, collectSigTysFromPats
     ) where
 
@@ -66,6 +66,8 @@ data InPat name
 
   | ListPatIn	    [InPat name]	-- syntactic list
 					-- must have >= 1 elements
+  | PArrPatIn	    [InPat name]	-- syntactic parallel array
+					-- must have >= 1 elements
   | TuplePatIn	    [InPat name] Boxity	-- tuple (boxed?)
 
   | RecPatIn	    name 		-- record
@@ -96,6 +98,9 @@ data OutPat id
   | ListPat		 	-- Syntactic list
 		    Type	-- The type of the elements
    	    	    [OutPat id]
+  | PArrPat		 	-- Syntactic parallel array
+		    Type	-- The type of the elements
+   	    	    [OutPat id]
 
   | TuplePat	    [OutPat id]	-- Tuple
 		    Boxity
@@ -158,6 +163,7 @@ pprInPat (LazyPatIn pat)      = char '~' <> ppr pat
 pprInPat (AsPatIn name pat)   = parens (hcat [ppr name, char '@', ppr pat])
 pprInPat (ParPatIn pat)	      = parens (pprInPat pat)
 pprInPat (ListPatIn pats)     = brackets (interpp'SP pats)
+pprInPat (PArrPatIn pats)     = pabrackets (interpp'SP pats)
 pprInPat (TuplePatIn pats bx) = tupleParens bx (interpp'SP pats)
 pprInPat (NPlusKPatIn n k _)  = parens (hcat [ppr n, char '+', ppr k])
 pprInPat (NPatIn l)	      = ppr l
@@ -179,6 +185,11 @@ pprInPat (RecPatIn con rpats)
     pp_rpat (v, p, _)    = hsep [ppr v, char '=', ppr p]
 
 pprInPat (TypePatIn ty) = ptext SLIT("{|") <> ppr ty <> ptext SLIT("|}")
+
+-- add parallel array brackets around a document
+--
+pabrackets   :: SDoc -> SDoc
+pabrackets p  = ptext SLIT("[:") <> p <> ptext SLIT(":]")
 \end{code}
 
 \begin{code}
@@ -210,6 +221,7 @@ pprOutPat (ConPat name ty tyvars dicts pats)
       _ -> hsep [ppr name, interppSP tyvars, interppSP dicts, interppSP pats]
 
 pprOutPat (ListPat ty pats)      = brackets (interpp'SP pats)
+pprOutPat (PArrPat ty pats)      = pabrackets (interpp'SP pats)
 pprOutPat (TuplePat pats boxity) = tupleParens boxity (interpp'SP pats)
 
 pprOutPat (RecPat con ty tvs dicts rpats)
@@ -278,6 +290,7 @@ failureFreePat (AsPat _ pat)		  = failureFreePat pat
 failureFreePat (ConPat con tys _ _ pats)  = only_con con && all failureFreePat pats
 failureFreePat (RecPat con _ _ _ fields)  = only_con con && and [ failureFreePat pat | (_,pat,_) <- fields ]
 failureFreePat (ListPat _ _)		  = False
+failureFreePat (PArrPat _ _)		  = False
 failureFreePat (TuplePat pats _)	  = all failureFreePat pats
 failureFreePat (DictPat _ _)		  = True
 failureFreePat other_pat		  = False   -- Literals, NPat
@@ -295,6 +308,7 @@ patsAreAllCons pat_list = all isConPat pat_list
 isConPat (AsPat _ pat)		= isConPat pat
 isConPat (ConPat _ _ _ _ _)	= True
 isConPat (ListPat _ _)		= True
+isConPat (PArrPat _ _)		= True
 isConPat (TuplePat _ _)		= True
 isConPat (RecPat _ _ _ _ _)	= True
 isConPat (DictPat ds ms)	= (length ds + length ms) > 1
@@ -318,6 +332,9 @@ collected is important; see @HsBinds.lhs@.
 collectPatBinders :: InPat a -> [a]
 collectPatBinders pat = collect pat []
 
+collectOutPatBinders :: OutPat a -> [a]
+collectOutPatBinders pat = collectOut pat []
+
 collectPatsBinders :: [InPat a] -> [a]
 collectPatsBinders pats = foldr collect [] pats
 
@@ -333,11 +350,31 @@ collect (ConPatIn c pats)   	 bndrs = foldr collect bndrs pats
 collect (ConOpPatIn p1 c f p2)   bndrs = collect p1 (collect p2 bndrs)
 collect (ParPatIn  pat)     	 bndrs = collect pat bndrs
 collect (ListPatIn pats)    	 bndrs = foldr collect bndrs pats
+collect (PArrPatIn pats)    	 bndrs = foldr collect bndrs pats
 collect (TuplePatIn pats _)  	 bndrs = foldr collect bndrs pats
 collect (RecPatIn c fields) 	 bndrs = foldr (\ (f,pat,_) bndrs -> collect pat bndrs) bndrs fields
 -- Generics
 collect (TypePatIn ty)           bndrs = bndrs
 -- assume the type variables do not need to be bound
+
+-- collect the bounds *value* variables in renamed patterns; type variables
+-- are *not* collected
+--
+collectOut (WildPat _)	      	    bndrs = bndrs
+collectOut (VarPat var)      	    bndrs = var : bndrs
+collectOut (LazyPat pat)     	    bndrs = collectOut pat bndrs
+collectOut (AsPat a pat)     	    bndrs = a : collectOut pat bndrs
+collectOut (ListPat _ pats)  	    bndrs = foldr collectOut bndrs pats
+collectOut (PArrPat _ pats)  	    bndrs = foldr collectOut bndrs pats
+collectOut (TuplePat pats _) 	    bndrs = foldr collectOut bndrs pats
+collectOut (ConPat _ _ _ ds pats)   bndrs = ds ++ foldr collectOut bndrs pats
+collectOut (RecPat _ _ _ ds fields) bndrs = ds ++ foldr comb bndrs fields
+  where
+    comb (_, pat, _) bndrs = collectOut pat bndrs
+collectOut (LitPat _ _)	      	    bndrs = bndrs
+collectOut (NPat _ _ _)		    bndrs = bndrs
+collectOut (NPlusKPat n _ _ _ _)    bndrs = n : bndrs
+collectOut (DictPat ids1 ids2)      bndrs = ids1 ++ ids2 ++ bndrs
 \end{code}
 
 \begin{code}
@@ -359,6 +396,7 @@ collect_pat (ConPatIn c pats)      acc = foldr collect_pat acc pats
 collect_pat (ConOpPatIn p1 c f p2) acc = collect_pat p1 (collect_pat p2 acc)
 collect_pat (ParPatIn  pat)        acc = collect_pat pat acc
 collect_pat (ListPatIn pats)       acc = foldr collect_pat acc pats
+collect_pat (PArrPatIn pats)       acc = foldr collect_pat acc pats
 collect_pat (TuplePatIn pats _)    acc = foldr collect_pat acc pats
 collect_pat (RecPatIn c fields)    acc = foldr (\ (f,pat,_) acc -> collect_pat pat acc) acc fields
 -- Generics
diff --git a/ghc/compiler/hsSyn/HsSyn.lhs b/ghc/compiler/hsSyn/HsSyn.lhs
index cb42ba5625a37e422a9722be88bfc5405e5aa011..6a393cf2fa5f951d9eaccbe8031b1a97504b4bc0 100644
--- a/ghc/compiler/hsSyn/HsSyn.lhs
+++ b/ghc/compiler/hsSyn/HsSyn.lhs
@@ -23,7 +23,7 @@ module HsSyn (
 	module HsTypes,
 	Fixity, NewOrData, 
 
-	collectHsBinders, collectLocatedHsBinders, 
+	collectHsBinders, collectHsOutBinders, collectLocatedHsBinders, 
 	collectMonoBinders, collectLocatedMonoBinders,
 	collectSigTysFromMonoBinds,
 	hsModuleName, hsModuleImports
@@ -132,6 +132,15 @@ collectHsBinders (MonoBind b _ _)
 collectHsBinders (ThenBinds b1 b2)
  = collectHsBinders b1 ++ collectHsBinders b2
 
+-- corresponds to `collectHsBinders', but operates on renamed patterns
+--
+collectHsOutBinders :: HsBinds name (OutPat name) -> [name]
+collectHsOutBinders EmptyBinds = []
+collectHsOutBinders (MonoBind b _ _) 
+ = collectMonoOutBinders b
+collectHsOutBinders (ThenBinds b1 b2)
+ = collectHsOutBinders b1 ++ collectHsOutBinders b2
+
 collectLocatedMonoBinders :: MonoBinds name (InPat name) -> [(name,SrcLoc)]
 collectLocatedMonoBinders binds
   = go binds []
@@ -149,6 +158,17 @@ collectMonoBinders binds
     go (PatMonoBind pat _ loc) acc = collectPatBinders pat ++ acc
     go (FunMonoBind f _ _ loc) acc = f : acc
     go (AndMonoBinds bs1 bs2)  acc = go bs1 (go bs2 acc)
+
+-- corresponds to `collectMonoBinders', but operates on renamed patterns
+--
+collectMonoOutBinders :: MonoBinds name (OutPat name) -> [name]
+collectMonoOutBinders binds
+  = go binds []
+  where
+    go EmptyMonoBinds	       acc = acc
+    go (PatMonoBind pat _ loc) acc = collectOutPatBinders pat ++ acc
+    go (FunMonoBind f _ _ loc) acc = f : acc
+    go (AndMonoBinds bs1 bs2)  acc = go bs1 (go bs2 acc)
 \end{code}
 
 %************************************************************************
diff --git a/ghc/compiler/hsSyn/HsTypes.lhs b/ghc/compiler/hsSyn/HsTypes.lhs
index 5e9b874cce4ec1b2c0a52c368b5bfbd476762d99..acdf8fd408bab60e434cfa5a531c804dd06913f4 100644
--- a/ghc/compiler/hsSyn/HsTypes.lhs
+++ b/ghc/compiler/hsSyn/HsTypes.lhs
@@ -43,9 +43,9 @@ import Var		( TyVar, tyVarKind )
 import Subst		( substTyWith )
 import PprType		( {- instance Outputable Kind -}, pprParendKind )
 import BasicTypes	( Boxity(..), Arity, IPName, tupleParens )
-import PrelNames	( mkTupConRdrName, listTyConKey, usOnceTyConKey, usManyTyConKey, hasKey,
-			  usOnceTyConName, usManyTyConName
-			)
+import PrelNames	( mkTupConRdrName, listTyConKey, parrTyConKey,
+			  usOnceTyConKey, usManyTyConKey, hasKey,
+			  usOnceTyConName, usManyTyConName )
 import FiniteMap
 import Util		( eqListBy, lengthIs )
 import Outputable
@@ -98,6 +98,8 @@ data HsType name
 
   | HsListTy		(HsType name)	-- Element type
 
+  | HsPArrTy		(HsType name)	-- Elem. type of parallel array: [:t:]
+
   | HsTupleTy		(HsTupCon name)
 			[HsType name]	-- Element types (length gives arity)
   -- Generics
@@ -275,6 +277,9 @@ ppr_mono_ty ctxt_prec (HsFunTy ty1 ty2)
 
 ppr_mono_ty ctxt_prec (HsTupleTy con tys) = hsTupParens con (interpp'SP tys)
 ppr_mono_ty ctxt_prec (HsListTy ty)	  = brackets (ppr_mono_ty pREC_TOP ty)
+ppr_mono_ty ctxt_prec (HsPArrTy ty)	  = pabrackets (ppr_mono_ty pREC_TOP ty)
+  where
+    pabrackets p = ptext SLIT("[:") <> p <> ptext SLIT(":]")
 
 ppr_mono_ty ctxt_prec (HsAppTy fun_ty arg_ty)
   = maybeParen (ctxt_prec >= pREC_CON)
@@ -344,6 +349,7 @@ toHsType ty@(TyConApp tc tys)	-- Must be saturated because toHsType's arg is of
   | not saturated	       = generic_case
   | isTupleTyCon tc	       = HsTupleTy (HsTupCon (getName tc) (tupleTyConBoxity tc) (tyConArity tc)) tys'
   | tc `hasKey` listTyConKey   = HsListTy (head tys')
+  | tc `hasKey` parrTyConKey   = HsPArrTy (head tys')
   | tc `hasKey` usOnceTyConKey = hsUsOnce_Name		 -- must print !, . unqualified
   | tc `hasKey` usManyTyConKey = hsUsMany_Name		 -- must print !, . unqualified
   | otherwise		       = generic_case
@@ -449,6 +455,9 @@ eq_hsType env (HsTupleTy c1 tys1) (HsTupleTy c2 tys2)
 eq_hsType env (HsListTy ty1) (HsListTy ty2)
   = eq_hsType env ty1 ty2
 
+eq_hsType env (HsPArrTy ty1) (HsPArrTy ty2)
+  = eq_hsType env ty1 ty2
+
 eq_hsType env (HsAppTy fun_ty1 arg_ty1) (HsAppTy fun_ty2 arg_ty2)
   = eq_hsType env fun_ty1 fun_ty2 && eq_hsType env arg_ty1 arg_ty2
 
diff --git a/ghc/compiler/main/CmdLineOpts.lhs b/ghc/compiler/main/CmdLineOpts.lhs
index e19c24ac277e2c8cd1da609c2246ea52d197daff..ea6ea711ec0a84d34c36b40079805600df7d06b6 100644
--- a/ghc/compiler/main/CmdLineOpts.lhs
+++ b/ghc/compiler/main/CmdLineOpts.lhs
@@ -67,6 +67,7 @@ module CmdLineOpts (
 	opt_Parallel,
 	opt_SMP,
 	opt_RuntimeTypes,
+	opt_Flatten,
 
 	-- optimisation opts
 	opt_NoMethodSharing,
@@ -255,6 +256,7 @@ data DynFlag
    | Opt_D_dump_simpl_stats
    | Opt_D_dump_tc_trace
    | Opt_D_dump_BCOs
+   | Opt_D_dump_vect
    | Opt_D_source_stats
    | Opt_D_verbose_core2core
    | Opt_D_verbose_stg2stg
@@ -287,6 +289,7 @@ data DynFlag
    | Opt_AllowIncoherentInstances
    | Opt_NoMonomorphismRestriction
    | Opt_GlasgowExts
+   | Opt_PArr			       -- syntactic support for parallel arrays
    | Opt_Generics
    | Opt_NoImplicitPrelude 
 
@@ -565,6 +568,7 @@ opt_MaxContextReductionDepth	= lookup_def_int "-fcontext-stack" mAX_CONTEXT_REDU
 opt_NumbersStrict		= lookUp  SLIT("-fnumbers-strict")
 opt_Parallel			= lookUp  SLIT("-fparallel")
 opt_SMP				= lookUp  SLIT("-fsmp")
+opt_Flatten			= lookUp  SLIT("-fflatten")
 
 -- optimisation opts
 opt_NoMethodSharing		= lookUp  SLIT("-fno-method-sharing")
@@ -645,6 +649,7 @@ isStaticHscFlag f =
 	"fnumbers-strict",
 	"fparallel",
 	"fsmp",
+	"fflatten",
 	"fsemi-tagging",
 	"ffoldr-build-on",
 	"flet-no-escape",
diff --git a/ghc/compiler/main/DriverFlags.hs b/ghc/compiler/main/DriverFlags.hs
index a507e8fb6d91f686523b67e9b880b696874efa5f..bfb3c00880ffec6720b4a02ebc5a56767fd2274f 100644
--- a/ghc/compiler/main/DriverFlags.hs
+++ b/ghc/compiler/main/DriverFlags.hs
@@ -1,7 +1,7 @@
 {-# OPTIONS -#include "hschooks.h" #-}
 
 -----------------------------------------------------------------------------
--- $Id: DriverFlags.hs,v 1.85 2002/01/25 10:28:14 simonmar Exp $
+-- $Id: DriverFlags.hs,v 1.86 2002/02/11 08:20:41 chak Exp $
 --
 -- Driver flags
 --
@@ -198,6 +198,7 @@ static_flags =
   ,  ( "gransim"	, NoArg (addNoDups v_Ways	WayGran) )
   ,  ( "smp"		, NoArg (addNoDups v_Ways	WaySMP) )
   ,  ( "debug"		, NoArg (addNoDups v_Ways	WayDebug) )
+  ,  ( "ndp"		, NoArg (addNoDups v_Ways	WayNDP) )
  	-- ToDo: user ways
 
 	------ Debugging ----------------------------------------------------
@@ -393,6 +394,7 @@ dynamic_flags = [
   ,  ( "ddump-hi-diffs",         NoArg (setDynFlag Opt_D_dump_hi_diffs) )
   ,  ( "ddump-hi",               NoArg (setDynFlag Opt_D_dump_hi) )
   ,  ( "ddump-minimal-imports",  NoArg (setDynFlag Opt_D_dump_minimal_imports) )
+  ,  ( "ddump-vect",         	 NoArg (setDynFlag Opt_D_dump_vect) )
   ,  ( "dcore-lint",       	 NoArg (setDynFlag Opt_DoCoreLinting) )
   ,  ( "dstg-lint",        	 NoArg (setDynFlag Opt_DoStgLinting) )
   ,  ( "dusagesp-lint",        	 NoArg (setDynFlag Opt_DoUSPLinting) )
@@ -444,6 +446,7 @@ fFlags = [
   ( "warn-unused-matches",       	Opt_WarnUnusedMatches ),
   ( "warn-deprecations",         	Opt_WarnDeprecations ),
   ( "glasgow-exts", 		 	Opt_GlasgowExts ),
+  ( "parr",				Opt_PArr ),
   ( "allow-overlapping-instances", 	Opt_AllowOverlappingInstances ),
   ( "allow-undecidable-instances", 	Opt_AllowUndecidableInstances ),
   ( "allow-incoherent-instances", 	Opt_AllowIncoherentInstances ),
diff --git a/ghc/compiler/main/DriverState.hs b/ghc/compiler/main/DriverState.hs
index 2daa8177f463b3d5a25734d98f16757867958819..39934b99a552211020ea102aa63833041b3e44f2 100644
--- a/ghc/compiler/main/DriverState.hs
+++ b/ghc/compiler/main/DriverState.hs
@@ -1,5 +1,5 @@
 -----------------------------------------------------------------------------
--- $Id: DriverState.hs,v 1.66 2002/01/04 16:02:04 simonmar Exp $
+-- $Id: DriverState.hs,v 1.67 2002/02/11 08:20:41 chak Exp $
 --
 -- Settings for the driver
 --
@@ -573,6 +573,7 @@ data WayName
   | WayPar
   | WayGran
   | WaySMP
+  | WayNDP
   | WayDebug
   | WayUser_a
   | WayUser_b
@@ -598,7 +599,9 @@ GLOBAL_VAR(v_Ways, [] ,[WayName])
 allowed_combination way = way `elem` combs
   where  -- the sub-lists must be ordered according to WayName, 
          -- because findBuildTag sorts them
-    combs                = [ [WayProf,WayUnreg], [WayProf,WaySMP] ]
+    combs                = [ [WayProf, WayUnreg], 
+			     [WayProf, WaySMP]  ,
+			     [WayProf, WayNDP]  ]
 
 findBuildTag :: IO [String]  -- new options
 findBuildTag = do
@@ -703,6 +706,10 @@ way_details =
 	, "-optc-DSMP"
 	, "-fvia-C" ]),
 
+    (WayNDP, Way  "ndp" "Nested data parallelism"
+	[ "-fparr"
+	, "-fflatten"]),
+
     (WayUser_a,  Way  "a"  "User way 'a'"  ["$WAY_a_REAL_OPTS"]),	
     (WayUser_b,  Way  "b"  "User way 'b'"  ["$WAY_b_REAL_OPTS"]),	
     (WayUser_c,  Way  "c"  "User way 'c'"  ["$WAY_c_REAL_OPTS"]),	
diff --git a/ghc/compiler/main/HscMain.lhs b/ghc/compiler/main/HscMain.lhs
index 9a8e23fae250012bd4e6363412df875f8e7878c8..5267fbae58305e14d4136337724366e6da106f20 100644
--- a/ghc/compiler/main/HscMain.lhs
+++ b/ghc/compiler/main/HscMain.lhs
@@ -45,7 +45,7 @@ import Id		( idName )
 import IdInfo		( CafInfo(..), CgInfoEnv, CgInfo(..) )
 import StringBuffer	( hGetStringBuffer, freeStringBuffer )
 import Parser
-import Lex		( PState(..), ParseResult(..) )
+import Lex		( PState(..), ParseResult(..), ExtFlags(..), mkPState )
 import SrcLoc		( mkSrcLoc )
 import Finder		( findModule )
 import Rename		( checkOldIface, renameModule, closeIfaceDecls )
@@ -57,6 +57,7 @@ import MkIface		( mkFinalIface )
 import TcModule
 import InstEnv		( emptyInstEnv )
 import Desugar
+import Flattening       ( flatten, flattenExpr )
 import SimplCore
 import CoreUtils	( coreBindsSize )
 import CoreTidy		( tidyCorePgm )
@@ -245,6 +246,13 @@ hscRecomp ghci_mode dflags have_object
              <- _scc_ "DeSugar" 
 		deSugar dflags pcs_tc hst this_mod print_unqual tc_result
 
+ 	    -------------------
+ 	    -- FLATTENING
+ 	    -------------------
+	; flat_details
+	     <- _scc_ "Flattening"
+		flatten dflags pcs_tc hst ds_details
+
 	; pcs_middle
 	    <- _scc_ "pcs_middle"
 	        if ghci_mode == OneShot 
@@ -271,7 +279,7 @@ hscRecomp ghci_mode dflags have_object
  	    -------------------
 	; simpl_details
 	     <- _scc_     "Core2Core"
-		core2core dflags pcs_middle hst dont_discard ds_details
+		core2core dflags pcs_middle hst dont_discard flat_details
 
  	    -------------------
  	    -- TIDY
@@ -411,12 +419,11 @@ myParseModule dflags src_filename
 
       buf <- hGetStringBuffer True{-expand tabs-} src_filename
 
-      let glaexts | dopt Opt_GlasgowExts dflags = 1#
-	          | otherwise 		        = 0#
+      let exts = ExtFlags {glasgowExtsEF = dopt Opt_GlasgowExts dflags,
+			   parrEF	 = dopt Opt_PArr	dflags}
+	  loc  = mkSrcLoc (_PK_ src_filename) 1
 
-      case parseModule buf PState{ bol = 0#, atbol = 1#,
-	 		           context = [], glasgow_exts = glaexts,
-  			           loc = mkSrcLoc (_PK_ src_filename) 1 } of {
+      case parseModule buf (mkPState loc exts) of {
 
 	PFailed err -> do { hPutStrLn stderr (showSDoc err);
                             freeStringBuffer buf;
@@ -549,8 +556,11 @@ hscStmt dflags hst hit pcs0 icontext stmt just_expr
 		-- Desugar it
 	  ds_expr <- deSugarExpr dflags pcs2 hst iNTERACTIVE print_unqual tc_expr
 	
+		-- Flatten it
+	; flat_expr <- flattenExpr dflags pcs2 hst ds_expr
+
 		-- Simplify it
-	; simpl_expr <- simplifyExpr dflags pcs2 hst ds_expr
+	; simpl_expr <- simplifyExpr dflags pcs2 hst flat_expr
 
 		-- Tidy it (temporary, until coreSat does cloning)
 	; tidy_expr <- tidyCoreExpr simpl_expr
@@ -582,12 +592,11 @@ hscParseStmt dflags str
 
       buf <- stringToStringBuffer str
 
-      let glaexts | dopt Opt_GlasgowExts dflags = 1#
-       	          | otherwise  	                = 0#
+      let exts = ExtFlags {glasgowExtsEF = dopt Opt_GlasgowExts dflags,
+			   parrEF	 = dopt Opt_PArr	dflags}
+	  loc  = mkSrcLoc SLIT("<interactive>") 1
 
-      case parseStmt buf PState{ bol = 0#, atbol = 1#,
-	 		         context = [], glasgow_exts = glaexts,
-			         loc = mkSrcLoc SLIT("<interactive>") 1 } of {
+      case parseStmt buf (mkPState loc exts) of {
 
 	PFailed err -> do { hPutStrLn stderr (showSDoc err);
 --	Not yet implemented in <4.11    freeStringBuffer buf;
@@ -667,13 +676,11 @@ hscThing dflags hst hit pcs0 ic str
 myParseIdentifier dflags str
   = do buf <- stringToStringBuffer str
  
-       let glaexts | dopt Opt_GlasgowExts dflags = 1#
-		   | otherwise			 = 0#
+       let exts = ExtFlags {glasgowExtsEF = dopt Opt_GlasgowExts dflags,
+			    parrEF	  = dopt Opt_PArr	 dflags}
+	   loc  = mkSrcLoc SLIT("<interactive>") 1
 
-       case parseIdentifier buf 
-		PState{ bol = 0#, atbol = 1#,
- 		        context = [], glasgow_exts = glaexts,
-		        loc = mkSrcLoc SLIT("<interactive>") 1 } of
+       case parseIdentifier buf (mkPState loc exts) of
 
 	  PFailed err -> do { hPutStrLn stderr (showSDoc err);
            		      freeStringBuffer buf;
diff --git a/ghc/compiler/main/ParsePkgConf.y b/ghc/compiler/main/ParsePkgConf.y
index cae45bc84d4b6d9333193886a3e13e7fb057499c..c6e65801f3491aa19245c91f30136bcfb25b5f51 100644
--- a/ghc/compiler/main/ParsePkgConf.y
+++ b/ghc/compiler/main/ParsePkgConf.y
@@ -81,9 +81,10 @@ happyError buf PState{ loc = loc } = PFailed (srcParseErr buf loc)
 loadPackageConfig :: FilePath -> IO [PackageConfig]
 loadPackageConfig conf_filename = do
    buf <- hGetStringBuffer False conf_filename
-   case parse buf PState{ bol = 0#, atbol = 1#,
-	 	          context = [], glasgow_exts = 0#,
-  			  loc = mkSrcLoc (_PK_ conf_filename) 1 } of
+   let loc  = mkSrcLoc (_PK_ conf_filename) 1
+       exts = ExtFlags {glasgowExtsEF = False,
+			parrEF	      = False}
+   case parse buf (mkPState loc exts) of
 	PFailed err -> do
 	    freeStringBuffer buf
             throwDyn (InstallationError (showSDoc err))
diff --git a/ghc/compiler/ndpFlatten/FlattenInfo.hs b/ghc/compiler/ndpFlatten/FlattenInfo.hs
new file mode 100644
index 0000000000000000000000000000000000000000..4a08c69ba3d8fabbd0f533bfdc74ac73ebfde05f
--- /dev/null
+++ b/ghc/compiler/ndpFlatten/FlattenInfo.hs
@@ -0,0 +1,43 @@
+--  $Id$
+--
+--  Copyright (c) 2002 Manuel M T Chakravarty & Gabriele Keller
+--
+--  Information for modules outside of the flattening module collection.
+--
+--- DESCRIPTION ---------------------------------------------------------------
+--
+--  This module contains information that is needed, and thus imported, by
+--  modules that are otherwise independent of flattening and may in fact be
+--  directly or indirectly imported by some of the flattening-related
+--  modules.  This is to avoid cyclic module dependencies.
+-- 
+--- DOCU ----------------------------------------------------------------------
+--
+--  Language: Haskell 98
+--
+--- TODO ----------------------------------------------------------------------
+--
+
+module FlattenInfo (
+  namesNeededForFlattening
+) where
+
+import CmdLineOpts (opt_Flatten)
+import NameSet     (FreeVars, emptyFVs, mkFVs)
+import PrelNames   (fstName, andName, orName, lengthPName, replicatePName,
+		    mapPName, bpermutePName, bpermuteDftPName, indexOfPName)
+
+
+-- this is a list of names that need to be available if flattening is
+-- performed (EXPORTED)
+--
+-- * needs to be kept in sync with the names used in Core generation in
+--   `FlattenMonad' and `NDPCoreUtils'
+--
+namesNeededForFlattening :: FreeVars
+namesNeededForFlattening
+  | not opt_Flatten = emptyFVs		-- none without -fflatten
+  | otherwise       = mkFVs
+    [fstName, andName, orName, lengthPName, replicatePName, mapPName,
+    bpermutePName, bpermuteDftPName, indexOfPName]
+    -- stuff from PrelGHC doesn't have to go here
diff --git a/ghc/compiler/ndpFlatten/FlattenMonad.hs b/ghc/compiler/ndpFlatten/FlattenMonad.hs
new file mode 100644
index 0000000000000000000000000000000000000000..1a6955e26a6711c62488af96551820a276e6fdb3
--- /dev/null
+++ b/ghc/compiler/ndpFlatten/FlattenMonad.hs
@@ -0,0 +1,454 @@
+--  $Id$
+--
+--  Copyright (c) [2001..2002] Manuel M T Chakravarty & Gabriele Keller
+--
+--  Monad maintaining parallel contexts and substitutions for flattening.
+--
+--- DESCRIPTION ---------------------------------------------------------------
+--
+--  The flattening transformation needs to perform a fair amount of plumbing.
+--  It needs to mainatin a set of variables, called the parallel context for
+--  lifting, variable substitutions in case alternatives, and so on.
+--  Moreover, we need to manage uniques to create new variables.  The monad
+--  defined in this module takes care of maintaining this state.
+-- 
+--- DOCU ----------------------------------------------------------------------
+--
+--  Language: Haskell 98
+--
+--  * a parallel context is a set of variables that get vectorised during a
+--    lifting transformations (ie, their type changes from `t' to `[:t:]')
+--
+--  * all vectorised variables in a parallel context have the same size; we
+--    call this also the size of the parallel context
+--
+--  * we represent contexts by maps that give the lifted version of a variable
+--    (remember that in GHC, variables contain type information that changes
+--    during lifting)
+--
+--- TODO ----------------------------------------------------------------------
+--
+--  * Assumptions currently made that should (if they turn out to be true) be
+--    documented in The Commentary:
+--
+--    - Local bindings can be copied without any need to alpha-rename bound
+--      variables (or their uniques).  Such renaming is only necessary when
+--      bindings in a recursive group are replicated; implying that this is
+--      required in the case of top-level bindings).  (Note: The CoreTidy path
+--      generates global uniques before code generation.)
+--
+--  * One FIXME left to resolve.
+--
+
+module FlattenMonad (
+
+  -- monad definition
+  --
+  Flatten, runFlatten,
+
+  -- variable generation
+  --
+  newVar, mkBind,
+  
+  -- context management & query operations
+  --
+  extendContext, packContext, liftVar, liftConst, intersectWithContext,
+
+  -- construction of prelude functions
+  --
+  mk'fst, mk'eq, mk'neq, mk'and, mk'or, mk'lengthP, mk'replicateP, mk'mapP,
+  mk'bpermuteP, mk'bpermuteDftP, mk'indexOfP
+) where
+
+-- standard
+import Monad	    (mplus)
+
+-- GHC
+import CmdLineOpts  (opt_Flatten)
+import Panic        (panic)
+import Outputable   (Outputable(ppr), pprPanic)
+import UniqSupply   (UniqSupply, splitUniqSupply, uniqFromSupply)
+import OccName	    (UserFS)
+import Var          (Var(..))
+import Id	    (Id, mkSysLocal)
+import Name	    (Name)
+import VarSet       (VarSet, emptyVarSet, unitVarSet, extendVarSet,
+		     varSetElems, unionVarSet)
+import VarEnv       (VarEnv, emptyVarEnv, unitVarEnv, zipVarEnv, plusVarEnv,
+		     elemVarEnv, lookupVarEnv, lookupVarEnv_NF, delVarEnvList)
+import TyCon        (tyConName)
+import Type	    (Type, tyConAppTyCon)
+import HscTypes	    (HomeSymbolTable, PersistentCompilerState(..),
+		     TyThing(..), lookupType)
+import PrelNames    (charPrimTyConName, intPrimTyConName, floatPrimTyConName,
+		     doublePrimTyConName, fstName, andName, orName,
+		     eqCharName, eqIntName, eqFloatName, eqDoubleName,
+		     neqCharName, neqIntName, neqFloatName, neqDoubleName,
+		     lengthPName, replicatePName, mapPName, bpermutePName,
+		     bpermuteDftPName, indexOfPName)
+import CoreSyn      (Expr(..), Bind(..), CoreBndr, CoreExpr, CoreBind, mkApps,
+		     bindersOfBinds)
+import CoreUtils    (exprType)
+
+-- friends
+import NDPCoreUtils (parrElemTy)
+
+
+-- definition of the monad
+-- -----------------------
+
+-- state maintained by the flattening monad
+--
+data FlattenState = FlattenState {
+
+		      -- our source for uniques
+		      --
+		      us       :: UniqSupply,
+
+		      -- environment containing all known names (including all
+		      -- Prelude functions)
+		      --
+		      env      :: Name -> Id,
+
+		      -- this variable determines the parallel context; if
+		      -- `Nothing', we are in pure vectorisation mode, no
+		      -- lifting going on
+		      --
+		      ctxtVar  :: Maybe Var,
+
+		      -- environment that maps each variable that is
+		      -- vectorised in the current parallel context to the
+		      -- vectorised version of that variable
+		      --
+		      ctxtEnv :: VarEnv Var,
+
+		      -- those variables from the *domain* of `ctxtEnv' that
+		      -- have been used since the last context restriction (cf.
+		      -- `restrictContext') 
+		      --
+		      usedVars :: VarSet
+		    }
+
+-- initial value of the flattening state
+--
+initialFlattenState :: PersistentCompilerState 
+		    -> HomeSymbolTable 
+		    -> UniqSupply 
+		    -> FlattenState
+initialFlattenState pcs hst us = 
+  FlattenState {
+    us	     = us,
+    env      = lookup,
+    ctxtVar  = Nothing,
+    ctxtEnv  = emptyVarEnv,
+    usedVars = emptyVarSet
+  }
+  where
+    lookup n = 
+      case lookupType hst (pcs_PTE pcs) n of
+        Just (AnId v) -> v 
+	_             -> pprPanic "FlattenMonad: unknown name:" (ppr n)
+
+-- the monad representation (EXPORTED ABSTRACTLY)
+--
+newtype Flatten a = Flatten {
+		      unFlatten :: (FlattenState -> (a, FlattenState))
+		    }
+
+instance Monad Flatten where
+  return x = Flatten $ \s -> (x, s)
+  m >>= n  = Flatten $ \s -> let 
+			       (r, s') = unFlatten m s
+			     in
+			     unFlatten (n r) s'
+
+-- execute the given flattening computation (EXPORTED)
+--
+runFlatten :: PersistentCompilerState 
+	   -> HomeSymbolTable 
+	   -> UniqSupply 
+	   -> Flatten a 
+	   -> a    
+runFlatten pcs hst us m = fst $ unFlatten m (initialFlattenState pcs hst us)
+
+
+-- variable generation
+-- -------------------
+
+-- generate a new local variable whose name is based on the given lexeme and
+-- whose type is as specified in the second argument (EXPORTED)
+--
+newVar           :: UserFS -> Type -> Flatten Var
+newVar lexeme ty  = Flatten $ \state ->
+  let
+    (us1, us2) = splitUniqSupply (us state)
+    state'     = state {us = us2}
+  in
+  (mkSysLocal lexeme (uniqFromSupply us1) ty, state')
+
+-- generate a non-recursive binding using a new binder whose name is derived
+-- from the given lexeme (EXPORTED)
+--
+mkBind          :: UserFS -> CoreExpr -> Flatten (CoreBndr, CoreBind)
+mkBind lexeme e  =
+  do
+    v <- newVar lexeme (exprType e)
+    return (v, NonRec v e)
+
+
+-- context management
+-- ------------------
+
+-- extend the parallel context by the given set of variables (EXPORTED)
+--
+-- * if there is no parallel context at the moment, the first element of the
+--   variable list will be used to determine the new parallel context
+--
+-- * the second argument is executed in the current context extended with the
+--   given variables
+--
+-- * the variables must already have been lifted by transforming their type,
+--   but they *must* have retained their original name (or, at least, their
+--   unique); this is needed so that they match the original variable in
+--   variable environments
+--
+-- * any trace of the given set of variables has to be removed from the state
+--   at the end of this operation
+--
+extendContext      :: [Var] -> Flatten a -> Flatten a
+extendContext [] m  = m
+extendContext vs m  = Flatten $ \state -> 
+  let 
+    extState       = state {
+		       ctxtVar = ctxtVar state `mplus` Just (head vs),
+		       ctxtEnv = ctxtEnv state `plusVarEnv` zipVarEnv vs vs
+		     }
+    (r, extState') = unFlatten m extState
+    resState       = extState' { -- remove `vs' from the result state
+		       ctxtVar  = ctxtVar state,
+		       ctxtEnv  = ctxtEnv state,
+		       usedVars = usedVars extState' `delVarEnvList` vs
+		     }
+  in
+  (r, resState)
+
+-- execute the second argument in a restricted context (EXPORTED)
+--
+-- * all variables in the current parallel context are packed according to
+--   the permutation vector associated with the variable passed as the first
+--   argument (ie, all elements of vectorised context variables that are
+--   invalid in the restricted context are dropped)
+--
+-- * the returned list of core binders contains the operations that perform
+--   the restriction on all variables in the parallel context that *do* occur
+--   during the execution of the second argument (ie, `liftVar' is executed at
+--   least once on any such variable)
+--
+packContext        :: Var -> Flatten a -> Flatten (a, [CoreBind])
+packContext perm m  = Flatten $ \state ->
+  let
+    -- FIXME: To set the packed environment to the unpacked on is a hack of
+    --   which I am not sure yet (a) whether it works and (b) whether it's
+    --   really worth it.  The one advantages is that, we can use a var set,
+    --   after all, instead of a var environment.
+    --
+    --	 The idea is the following: If we have to pack a variable `x', we
+    --	 generate `let{-NonRec-} x = bpermuteP perm x in ...'.  As this is a
+    --	 non-recursive binding, the lhs `x' overshadows the rhs `x' in the
+    --	 body of the let.
+    --
+    --   NB: If we leave it like this, `mkCoreBind' can be simplified.
+    packedCtxtEnv     = ctxtEnv state
+    packedState       = state {
+	                  ctxtVar  = fmap
+				       (lookupVarEnv_NF packedCtxtEnv)
+				       (ctxtVar state),
+		          ctxtEnv  = packedCtxtEnv, 
+		          usedVars = emptyVarSet
+		        }
+    (r, packedState') = unFlatten m packedState
+    resState	      = state {    -- revert to the unpacked context
+			  ctxtVar  = ctxtVar state,
+			  ctxtEnv  = ctxtEnv state,
+		        }
+    bndrs	      = map mkCoreBind . varSetElems . usedVars $ packedState'
+
+    -- generate a binding for the packed variant of a context variable
+    --
+    mkCoreBind var    = let
+			  rhs = fst $ unFlatten (mk'bpermuteP (varType var) 
+							      (Var perm) 
+							      (Var var)
+						) state
+			in
+			NonRec (lookupVarEnv_NF packedCtxtEnv var) $ rhs
+		          
+  in
+  ((r, bndrs), resState)
+
+-- lift a single variable in the current context (EXPORTED)
+--
+-- * if the variable does not occur in the context, it's value is vectorised to
+--   match the size of the current context
+--
+-- * otherwise, the variable is replaced by whatever the context environment
+--   maps it to (this may either be simply the lifted version of the original
+--   variable or a packed variant of that variable)
+--
+-- * the monad keeps track of all lifted variables that occur in the parallel
+--   context, so that `packContext' can determine the correct set of core
+--   bindings
+--
+liftVar     :: Var -> Flatten CoreExpr
+liftVar var  = Flatten $ \s ->
+  let 
+    v          = ctxtVarErr s
+    v'elemType = parrElemTy . varType $ v
+    len        = fst $ unFlatten (mk'lengthP v'elemType (Var v)) s
+    replicated = fst $ unFlatten (mk'replicateP (varType var) len (Var var)) s
+  in case lookupVarEnv (ctxtEnv s) var of
+    Just liftedVar -> (Var liftedVar, 
+		       s {usedVars = usedVars s `extendVarSet` var})
+    Nothing        -> (replicated, s)
+
+-- lift a constant expression in the current context (EXPORTED)
+--
+-- * the value of the constant expression is vectorised to match the current
+--   parallel context
+--
+liftConst   :: CoreExpr -> Flatten CoreExpr
+liftConst e  = Flatten $ \s ->
+  let
+     v          = ctxtVarErr s
+     v'elemType = parrElemTy . varType $ v
+     len        = fst $ unFlatten (mk'lengthP v'elemType (Var v)) s
+  in 
+  (fst $ unFlatten (mk'replicateP (exprType e) len e ) s, s)
+
+-- pick those variables of the given set that occur (if albeit in lifted form)
+-- in the current parallel context (EXPORTED)
+--
+-- * the variables returned are from the given set and *not* the corresponding
+--   context variables
+--
+intersectWithContext    :: VarSet -> Flatten [Var]
+intersectWithContext vs  = Flatten $ \s ->
+  let
+    vs' = filter (`elemVarEnv` ctxtEnv s) (varSetElems vs)
+  in
+  (vs', s)
+
+
+-- construct applications of prelude functions
+-- -------------------------------------------
+
+-- NB: keep all the used names listed in `FlattenInfo.namesNeededForFlattening'
+
+-- generate an application of `fst' (EXPORTED)
+--
+mk'fst           :: Type -> Type -> CoreExpr -> Flatten CoreExpr
+mk'fst ty1 ty2 a  = mkFunApp fstName [Type ty1, Type ty2, a]
+
+-- generate an application of `&&' (EXPORTED)
+--
+mk'and       :: CoreExpr -> CoreExpr -> Flatten CoreExpr
+mk'and a1 a2  = mkFunApp andName [a1, a2]
+
+-- generate an application of `||' (EXPORTED)
+--
+mk'or       :: CoreExpr -> CoreExpr -> Flatten CoreExpr
+mk'or a1 a2  = mkFunApp orName [a1, a2]
+
+-- generate an application of `==' where the arguments may only be literals
+-- that may occur in a Core case expression (i.e., `Char', `Int', `Float', and
+-- `Double') (EXPORTED)
+--
+mk'eq          :: Type -> CoreExpr -> CoreExpr -> Flatten CoreExpr
+mk'eq ty a1 a2  = mkFunApp eqName [a1, a2]
+		  where
+		    name = tyConName . tyConAppTyCon $ ty
+		    --
+		    eqName | name == charPrimTyConName   = eqCharName
+			   | name == intPrimTyConName    = eqIntName
+			   | name == floatPrimTyConName  = eqFloatName
+			   | name == doublePrimTyConName = eqDoubleName
+			   | otherwise 		         =
+			     pprPanic "FlattenMonad.mk'eq: " (ppr ty)
+
+-- generate an application of `==' where the arguments may only be literals
+-- that may occur in a Core case expression (i.e., `Char', `Int', `Float', and
+-- `Double') (EXPORTED)
+--
+mk'neq          :: Type -> CoreExpr -> CoreExpr -> Flatten CoreExpr
+mk'neq ty a1 a2  = mkFunApp neqName [a1, a2]
+		   where
+		     name = tyConName . tyConAppTyCon $ ty
+		     --
+		     neqName | name == charPrimTyConName   = neqCharName
+			     | name == intPrimTyConName    = neqIntName
+			     | name == floatPrimTyConName  = neqFloatName
+			     | name == doublePrimTyConName = neqDoubleName
+			     | otherwise		   =
+			       pprPanic "FlattenMonad.mk'neq: " (ppr ty)
+
+-- generate an application of `lengthP' (EXPORTED)
+--
+mk'lengthP      :: Type -> CoreExpr -> Flatten CoreExpr
+mk'lengthP ty a  = mkFunApp lengthPName [Type ty, a]
+
+-- generate an application of `replicateP' (EXPORTED)
+--
+mk'replicateP          :: Type -> CoreExpr -> CoreExpr -> Flatten CoreExpr
+mk'replicateP ty a1 a2  = mkFunApp replicatePName [Type ty, a1, a2]
+
+-- generate an application of `replicateP' (EXPORTED)
+--
+mk'mapP :: Type -> Type -> CoreExpr -> CoreExpr -> Flatten CoreExpr
+mk'mapP ty1 ty2 a1 a2  = mkFunApp mapPName [Type ty1, Type ty2, a1, a2]
+
+-- generate an application of `bpermuteP' (EXPORTED)
+--
+mk'bpermuteP          :: Type -> CoreExpr -> CoreExpr -> Flatten CoreExpr
+mk'bpermuteP ty a1 a2  = mkFunApp bpermutePName [Type ty, a1, a2]
+
+-- generate an application of `bpermuteDftP' (EXPORTED)
+--
+mk'bpermuteDftP :: Type -> CoreExpr -> CoreExpr -> CoreExpr -> Flatten CoreExpr
+mk'bpermuteDftP ty a1 a2 a3 = mkFunApp bpermuteDftPName [Type ty, a1, a2, a3]
+
+-- generate an application of `indexOfP' (EXPORTED)
+--
+mk'indexOfP          :: Type -> CoreExpr -> CoreExpr -> Flatten CoreExpr
+mk'indexOfP ty a1 a2  = mkFunApp indexOfPName [Type ty, a1, a2]
+
+
+-- auxilliary functions
+-- --------------------
+
+-- obtain the context variable, aborting if it is not available (as this
+-- signals an internal error in the usage of the `Flatten' monad)
+--
+ctxtVarErr   :: FlattenState -> Var
+ctxtVarErr s  = case ctxtVar s of
+		  Nothing -> panic "FlattenMonad.ctxtVarErr: No context \
+				   \variable available!"
+		  Just v  -> v
+
+-- given the name of a known function and a set of arguments (needs to include
+-- all needed type arguments), build a Core expression that applies the named
+-- function to those arguments
+--
+mkFunApp           :: Name -> [CoreExpr] -> Flatten CoreExpr
+mkFunApp name args  =
+  do
+    fun <- lookupName name
+    return $ mkApps (Var fun) args
+
+-- get the `Id' of a known `Name'
+--
+-- * this can be the `Name' of any function that's visible on the toplevel of
+--   the current compilation unit
+--
+lookupName      :: Name -> Flatten Id
+lookupName name  = Flatten $ \s ->
+  (env s name, s)
diff --git a/ghc/compiler/ndpFlatten/Flattening.hs b/ghc/compiler/ndpFlatten/Flattening.hs
new file mode 100644
index 0000000000000000000000000000000000000000..4733bc43cab3920afaf96288f249a1cbcc7b51f4
--- /dev/null
+++ b/ghc/compiler/ndpFlatten/Flattening.hs
@@ -0,0 +1,812 @@
+--  $Id$
+--
+--  Copyright (c) [2001..2002] Manuel M T Chakravarty & Gabriele Keller
+--  
+--  Vectorisation and lifting
+--
+--- DESCRIPTION ---------------------------------------------------------------
+--
+--  This module implements the vectorisation and function lifting
+--  transformations of the flattening transformation.
+-- 
+--- DOCU ----------------------------------------------------------------------
+--
+--  Language: Haskell 98 with C preprocessor
+--
+--  Types: 
+--    the transformation on types has five purposes:
+--
+--        1) for each type definition, derive the lifted version of this type
+--             liftTypeef
+--        2) change the type annotations of functions & variables acc. to rep.
+--             flattenType
+--        3) derive the type of a lifted function
+--             liftType
+--        4) sumtypes:
+--             this is the most fuzzy and complicated part. For each lifted
+--             sumtype we need to generate function to access and combine the
+--             component arrays
+--
+--   NOTE: the type information of variables and data constructors is *not*
+--          changed to reflect it's representation. This has to be solved 
+--          somehow (???, FIXME)  using type indexed types
+--
+--   Vectorisation:
+--    is very naive at the moment. One of the most striking inefficiencies is
+--    application vect (app e1 e2) -> app (fst (vect e1) (vect e2)) if e1 is a
+--    lambda abstraction. The vectorisation produces a pair consisting of the
+--    original and the lifted function, but the lifted version is discarded.
+--    I'm also not sure how much of this would be thrown out by the simplifier
+--    eventually
+--
+--        *) vectorise
+--
+--  Conventions:
+--
+--- TODO ----------------------------------------------------------------------
+--
+--   * look closer into the definition of type definition (TypeThing or so)
+--
+
+module Flattening (
+  flatten, flattenExpr, 
+) where 
+
+-- standard
+import Monad        (liftM, foldM)
+
+-- GHC
+import CmdLineOpts  (opt_Flatten)
+import Panic        (panic)
+import ErrUtils     (dumpIfSet_dyn)
+import UniqSupply   (UniqSupply, mkSplitUniqSupply)
+import CmdLineOpts  (DynFlag(..), DynFlags)
+import Literal      (Literal, literalType)
+import Var	    (Var(..),TyVar)
+import DataCon	    (DataCon, dataConTag)
+import TypeRep      (Type(..))
+import Type         (isTypeKind)
+import HscTypes	    (HomeSymbolTable, PersistentCompilerState, ModDetails(..))
+import CoreFVs	    (exprFreeVars)
+import CoreSyn	    (Expr(..), Bind(..), Alt(..), AltCon(..), Note(..),
+		     CoreBndr, CoreExpr, CoreBind, CoreAlt, mkLams, mkLets,
+		     mkApps, mkIntLitInt)  
+import PprCore      (pprCoreExpr)
+import CoreLint	    (showPass, endPass)
+
+import CoreUtils    (exprType, applyTypeToArg, mkPiType)
+import VarEnv       (IdEnv, mkVarEnv, zipVarEnv, extendVarEnv)
+import TysWiredIn   (mkTupleTy)
+import BasicTypes   (Boxity(..))
+import Outputable   (showSDoc, Outputable(..))
+
+
+-- friends
+import NDPCoreUtils (tupleTyArgs, funTyArgs, parrElemTy, isDefault,
+		     isLit, mkPArrTy, mkTuple, isSimpleExpr, boolTy, substIdEnv)
+import FlattenMonad (Flatten, runFlatten, mkBind, extendContext, packContext,
+		     liftVar, liftConst, intersectWithContext, mk'fst,
+		     mk'lengthP, mk'replicateP, mk'mapP, mk'bpermuteDftP,
+		     mk'indexOfP,mk'eq,mk'neq) 
+
+-- FIXME: fro debugging - remove this
+import IOExts    (trace)
+
+
+#include "HsVersions.h"
+{-# INLINE slit #-}
+slit x = FastString.mkFastCharString# x
+-- FIXME: SLIT() doesn't work for some strange reason
+
+
+-- toplevel transformation
+-- -----------------------
+
+-- entry point to the flattening transformation for the compiler driver when
+-- compiling a complete module (EXPORTED) 
+--
+flatten :: DynFlags 
+	-> PersistentCompilerState 
+	-> HomeSymbolTable
+	-> ModDetails			-- the module to be flattened
+	-> IO ModDetails
+flatten dflags pcs hst modDetails@(ModDetails {md_binds = binds}) 
+  | not opt_Flatten = return modDetails -- skip without -fflatten
+  | otherwise       =
+  do
+    us <- mkSplitUniqSupply 'l'		-- 'l' as in fLattening
+    --
+    -- announce vectorisation
+    --
+    showPass dflags "Flattening [first phase: vectorisation]"
+    --
+    -- vectorise all toplevel bindings
+    --
+    let binds' = runFlatten pcs hst us $ vectoriseTopLevelBinds binds
+    --
+    -- and dump the result if requested
+    --
+    endPass dflags "Flattening [first phase: vectorisation]" 
+	    Opt_D_dump_vect binds'
+    return $ modDetails {md_binds = binds'}
+
+-- entry point to the flattening transformation for the compiler driver when
+-- compiling a single expression in interactive mode (EXPORTED) 
+--
+flattenExpr :: DynFlags 
+	    -> PersistentCompilerState 
+	    -> HomeSymbolTable 
+	    -> CoreExpr			-- the expression to be flattened
+	    -> IO CoreExpr
+flattenExpr dflags pcs hst expr
+  | not opt_Flatten = return expr       -- skip without -fflatten
+  | otherwise       =
+  do
+    us <- mkSplitUniqSupply 'l'		-- 'l' as in fLattening
+    --
+    -- announce vectorisation
+    --
+    showPass dflags "Flattening [first phase: vectorisation]"
+    --
+    -- vectorise the expression
+    --
+    let expr' = fst . runFlatten pcs hst us $ vectorise expr
+    --
+    -- and dump the result if requested
+    --
+    dumpIfSet_dyn dflags Opt_D_dump_vect "Vectorised expression"
+		  (pprCoreExpr expr')
+    return expr'
+
+
+-- vectorisation of bindings and expressions
+-- -----------------------------------------
+
+
+vectoriseTopLevelBinds:: [CoreBind] -> Flatten [CoreBind]
+vectoriseTopLevelBinds binds =
+  do
+    vbinds <- mapM vectoriseBind binds
+    return (adjustTypeBinds vbinds)
+
+adjustTypeBinds:: [CoreBind] -> [CoreBind]
+adjustTypeBinds vbinds =
+    let 
+       ids = concat (map extIds vbinds)
+       idEnv =  zipVarEnv ids ids
+     in map (substIdEnvBind idEnv) vbinds
+  where 
+    -- FIXME replace by 'bindersOf'
+    extIds (NonRec b expr) = [b]
+    extIds (Rec      bnds) = map fst bnds
+    substIdEnvBind idEnv (NonRec b expr) = NonRec b (substIdEnv idEnv expr)
+    substIdEnvBind idEnv (Rec bnds)      
+       = Rec (map (\ (b,e) -> (b, (substIdEnv idEnv e))) bnds) 
+
+-- vectorise a single core binder
+--
+vectoriseBind	              :: CoreBind -> Flatten CoreBind
+vectoriseBind (NonRec b expr)  = 
+  liftM (NonRec b) $ liftM fst $ vectorise expr
+vectoriseBind (Rec bindings)   = 
+  liftM Rec        $ mapM vectoriseOne bindings
+  where
+    vectoriseOne (b, expr) = 
+      do
+	(vexpr, ty) <- vectorise expr
+	return (b{varType = ty}, vexpr)
+
+
+-- Searches for function definitions and creates a lifted version for 
+-- each function.
+-- We have only two interesting cases:
+-- 1) function application  (ex1) (ex2)
+--      vectorise both subexpressions. The function will end up becoming a
+--      pair (orig. fun, lifted fun), choose first component (in many cases,
+--      this is pretty inefficient, since the lifted version is generated
+--      although it is clear that it won't be used
+-- 
+-- 2) lambda abstraction
+--      any function has to exist in two forms: it's original form and it's 
+--      lifted form. Therefore, every lambda abstraction is transformed into
+--      a pair of functions: the original function and its lifted variant
+-- 
+--
+--  FIXME: currently, I use 'exprType' all over the place - this is terribly
+--  inefficient. It should be suffiecient to change 'vectorise' and 'lift' to
+--  return the type of the result expression as well.
+--
+vectorise:: CoreExpr -> Flatten (CoreExpr, Type)
+vectorise (Var id)  =  
+  do 
+    let varTy  = varType id
+    let vecTy  = vectoriseTy varTy
+    return ((Var id{varType = vecTy}), vecTy)
+
+vectorise (Lit lit) =  
+  return ((Lit lit), literalType lit) 
+
+
+vectorise e@(App expr t@(Type _)) = 
+  do 
+    (vexpr, vexprTy) <- vectorise expr
+    return ((App vexpr t), applyTypeToArg vexprTy t) 
+
+vectorise  (App (Lam b expr) arg) =
+  do
+    (varg, argTy)    <- vectorise arg
+    (vexpr, vexprTy) <- vectorise expr
+    let vb            = b{varType = argTy} 
+    return ((App (Lam vb  vexpr) varg), 
+            applyTypeToArg (mkPiType vb vexprTy) varg)
+
+-- if vexpr expects a type as first argument
+-- application stays just as it is
+--
+vectorise (App expr arg) =          
+  do 
+    (vexpr, vexprTy) <-  vectorise expr
+    (varg,  vargTy)  <-  vectorise arg
+
+    if (isPolyType vexprTy)
+      then do
+        let resTy =  applyTypeToArg vexprTy varg
+        return (App vexpr varg, resTy)
+      else do 
+        let [t1, t2] = tupleTyArgs  vexprTy
+        vexpr'      <-  mk'fst t1 t2 vexpr
+        let resTy    = applyTypeToArg t1 varg   
+        return  ((App vexpr' varg), resTy)  -- apply the first component of
+                                            -- the vectorized function
+  where
+    isPolyType t =  
+        (case t  of
+           (ForAllTy _ _)  -> True
+           (NoteTy _ nt)   -> isPolyType nt
+           _               -> False)
+    
+
+vectorise  e@(Lam b expr)
+  | isTypeKind (varType b) = 
+      do
+        (vexpr, vexprTy) <- vectorise expr          -- don't vectorise 'b'!
+        return ((Lam b vexpr), mkPiType b vexprTy)
+  | otherwise =
+     do          
+       (vexpr, vexprTy)  <- vectorise expr
+       let vb             = b{varType = vectoriseTy (varType b)}
+       let ve             =  Lam  vb  vexpr 
+       (lexpr, lexprTy)  <- lift e
+       let veTy = mkPiType vb vexprTy  
+       return $ (mkTuple [veTy, lexprTy] [ve, lexpr], 
+                 mkTupleTy Boxed 2 [veTy, lexprTy])
+
+vectorise (Let bind body) = 
+  do    
+    vbind            <- vectoriseBind bind
+    (vbody, vbodyTy) <- vectorise body
+    return ((Let vbind vbody), vbodyTy)
+
+vectorise (Case expr b alts) =
+  do 
+    (vexpr, vexprTy) <- vectorise expr
+    valts <- mapM vectorise' alts
+    return (Case vexpr b{varType = vexprTy} (map fst valts), snd (head valts))
+  where vectorise' (con, bs, expr) = 
+          do 
+            (vexpr, vexprTy) <- vectorise expr
+            return ((con, bs, vexpr), vexprTy)  -- FIXME: change type of con
+                                                --   and bs
+
+
+
+vectorise (Note note expr) = 
+ do 
+   (vexpr, vexprTy) <- vectorise expr        -- FIXME: is this ok or does it
+   return ((Note note vexpr), vexprTy)       --   change the validity of note?
+
+vectorise e@(Type t) = 
+  return (e, t)                              -- FIXME: panic instead of 't'???
+
+
+{-
+myShowTy (TyVarTy _) = "TyVar "
+myShowTy (AppTy t1 t2) = 
+  "AppTy (" ++ (myShowTy t1) ++ ", " ++ (myShowTy t2) ++ ")"
+myShowTy (TyConApp _ t) =
+  "TyConApp TC (" ++ (myShowTy t) ++ ")"
+-}
+
+vectoriseTy :: Type -> Type 
+vectoriseTy t@(TyVarTy v)      =  t
+vectoriseTy t@(AppTy t1 t2)    = 
+  AppTy (vectoriseTy t1) (vectoriseTy t2)
+vectoriseTy t@(TyConApp tc ts) = 
+  TyConApp tc (map vectoriseTy ts)
+vectoriseTy t@(FunTy t1 t2)    = 
+  mkTupleTy Boxed 2 [(FunTy (vectoriseTy t1) (vectoriseTy t2)), 
+                     (liftTy t)]
+vectoriseTy  t@(ForAllTy v ty)  = 
+  ForAllTy v (vectoriseTy  ty)
+vectoriseTy t@(NoteTy note ty) =  -- FIXME: is the note still valid after
+  NoteTy note  (vectoriseTy ty)   --   this or should we just throw it away
+vectoriseTy  t =  t
+
+
+-- liftTy: wrap the type in an array but be careful with function types
+--    on the *top level* (is this sufficient???)
+
+liftTy:: Type -> Type
+liftTy (FunTy t1 t2)   = FunTy (liftTy t1) (liftTy t2)
+liftTy (ForAllTy tv t) = ForAllTy tv (liftTy t)
+liftTy (NoteTy n t)    = NoteTy n $ liftTy t
+liftTy  t              = mkPArrTy t
+
+
+--  lifting:
+-- ----------
+--  * liftType
+--  * lift
+
+
+-- liftBinderType: Converts a  type 'a' stored in the binder to the
+-- representation of '[:a:]' will therefore call liftType
+--  
+--  lift type, don't change name (incl unique) nor IdInfo. IdInfo looks ok,
+--  but I'm not entirely sure about some fields (e.g., strictness info)
+liftBinderType:: CoreBndr ->  Flatten CoreBndr
+liftBinderType bndr = return $  bndr {varType = liftTy (varType bndr)}
+
+-- lift: lifts an expression (a -> [:a:])
+-- If the expression is a simple expression, it is treated like a constant
+-- expression. 
+-- If the body of a lambda expression is a simple expression, it is
+-- transformed into a mapP
+lift:: CoreExpr -> Flatten (CoreExpr, Type)
+lift cExpr@(Var id)    = 
+  do
+    lVar@(Var lId) <- liftVar id
+    return (lVar, varType lId)
+
+lift cExpr@(Lit lit)   = 
+  do
+    lLit  <- liftConst cExpr
+    return (lLit, exprType lLit)   
+                                   
+
+lift (Lam b expr)
+  | isSimpleExpr expr      =  liftSimpleFun b expr
+  | isTypeKind (varType b) = 
+    do
+      (lexpr, lexprTy) <- lift expr  -- don't lift b!
+      return (Lam b lexpr, mkPiType b lexprTy)
+  | otherwise =
+    do
+      lb               <- liftBinderType b
+      (lexpr, lexprTy) <- extendContext [lb] (lift expr)
+      return ((Lam lb lexpr) , mkPiType lb lexprTy)
+
+lift (App expr1 expr2) = 
+  do
+    (lexpr1, lexpr1Ty) <- lift expr1
+    (lexpr2, _)        <- lift expr2
+    return ((App lexpr1 lexpr2), applyTypeToArg lexpr1Ty lexpr2)
+
+
+lift (Let (NonRec b expr1) expr2) 
+  |isSimpleExpr expr2 =
+    do  			
+      (lexpr1, _)        <- lift expr1
+      (lexpr2, lexpr2Ty) <- liftSimpleFun b expr2
+      let (t1, t2) = funTyArgs lexpr2Ty
+      liftM (\x -> (x, liftTy t2)) $  mk'mapP t1 t2 lexpr2 lexpr1 
+
+  | otherwise =
+    do 
+      (lexpr1, _)        <- lift expr1
+      lb                 <- liftBinderType b
+      (lexpr2, lexpr2Ty) <- extendContext [lb] (lift expr1)
+      return ((Let (NonRec lb lexpr1) lexpr2), lexpr2Ty)
+
+lift (Let (Rec binds) expr2) =
+  do
+    let (bndVars, exprs)  = unzip binds
+    lBndVars           <- mapM liftBinderType bndVars 
+    lexprs             <- extendContext bndVars (mapM lift exprs)
+    (lexpr2, lexpr2Ty) <- extendContext bndVars (lift expr2)
+    return ((Let (Rec (zip  lBndVars (map fst lexprs))) lexpr2), lexpr2Ty)
+
+-- FIXME: 
+-- Assumption: alternatives can either be literals or data construtors.
+--             Due to type restrictions, I don't think it is possible 
+--             that they are mixed.
+--             The handling of literals and data constructors is completely
+--             different
+--
+--
+-- let b = expr in alts
+--
+-- I think I read somewhere that the default case (if present) is stored
+-- in the head of the list. Assume for now this is true, have to check
+--
+-- (1) literals
+-- (2) data constructors
+--
+-- FIXME: optimisation: first, filter out all simple expression and 
+--   loop (mapP & filter) over all the corresponding values in a single
+--   traversal:
+							     
+--    (1) splitAlts:: [Alt CoreBndr] -> ([Alt CoreBndr],[Alt CoreBndr])
+--                                       simple alts     reg alts
+--    (2) if simpleAlts = [] then (just as before)
+--        if regAlts    = [] then (the whole thing is just a loop)
+--        otherwise (a) compute index vector for simpleAlts (for def permute
+--                      later on
+--                  (b) 
+lift cExpr@(Case expr b alts)  =
+  do  
+    (lExpr, _) <- lift expr
+    lb    <- liftBinderType  b     -- lift alt-expression
+    lalts <- if isLit alts 
+                then extendContext [lb] (liftCaseLit b alts)
+                else extendContext [lb] (liftCaseDataCon b alts)
+    letWrapper lExpr b lalts
+
+lift (Note (Coerce t1 t2) expr) =
+  do  
+    (lexpr, t) <- lift expr
+    let lt1 = liftTy t1
+    return ((Note (Coerce lt1 (liftTy t2)) lexpr), lt1)
+
+lift (Note note expr) =
+  do 
+    (lexpr, t) <- lift expr
+    return ((Note note lexpr), t)
+
+lift e@(Type t) = return (e, t)
+
+
+-- auxilliary functions for lifting of case statements 
+--
+
+liftCaseDataCon:: CoreBndr -> [Alt CoreBndr] -> 
+       Flatten (([CoreBind], [CoreBind], [CoreBind]))
+liftCaseDataCon b [] =
+  return ([], [], [])
+liftCaseDataCon b alls@(alt:alts)
+  | isDefault alt  =
+    do
+      (i,  e,  defAltBndrs) <- liftCaseDataConDefault b alt alts 
+      (is, es, altBndrs)    <- liftCaseDataCon' b alts 
+      return (i:is, e:es, defAltBndrs ++ altBndrs)
+  | otherwise =
+    liftCaseDataCon' b alls
+
+liftCaseDataCon':: CoreBndr -> [Alt CoreBndr] ->  
+    Flatten ([CoreBind], [CoreBind], [CoreBind])
+liftCaseDataCon' _ [] =
+  do
+    return ([], [], []) 
+
+
+liftCaseDataCon' b ((DataAlt dcon, bnds, expr): alts) =
+  do
+    (permBnd, exprBnd, packBnd)    <-  liftSingleDataCon b dcon bnds expr   
+    (permBnds, exprBnds, packBnds) <-  liftCaseDataCon' b alts 
+    return (permBnd:permBnds, exprBnd:exprBnds, packBnd ++ packBnds)
+
+
+-- FIXME: is is really necessary to return the binding to the permutation
+-- array in the data constructor case, as the representation already 
+-- contains the extended flag vector
+liftSingleDataCon:: CoreBndr -> DataCon -> [CoreBndr] -> CoreExpr ->
+  Flatten (CoreBind, CoreBind, [CoreBind])
+liftSingleDataCon b dcon bnds expr =
+  do 
+    let dconId           = dataConTag dcon
+    indexExpr           <- mkIndexOfExprDCon (varType b)  b dconId
+    (b', bbind)         <- mkBind (slit "is"#) indexExpr
+    lbnds               <- mapM liftBinderType bnds
+    ((lExpr, _), bnds') <- packContext  b' (extendContext lbnds (lift expr))
+    (_, vbind)          <- mkBind (slit "r"#) lExpr
+    return (bbind, vbind, bnds')
+
+-- FIXME: clean this up. the datacon and the literal case are so
+--   similar that it would be easy to use the same function here
+--   instead of duplicating all the code.
+--
+liftCaseDataConDefault:: CoreBndr -> (Alt CoreBndr) ->  [Alt CoreBndr] 
+  ->  Flatten (CoreBind, CoreBind, [CoreBind])
+liftCaseDataConDefault b (_, _, def) alts =
+  do
+    let dconIds        = map (\(DataAlt d, _, _) -> dataConTag d) alts
+    indexExpr         <- mkIndexOfExprDConDft (varType b) b dconIds
+    (b', bbind)       <- mkBind (slit "is"#) indexExpr
+    ((lDef, _), bnds) <- packContext  b' (lift def)     
+    (_, vbind)        <- mkBind (slit "r"#) lDef
+    return (bbind, vbind, bnds)
+
+-- liftCaseLit: checks if we have a default case and handles it 
+-- if necessary
+liftCaseLit:: CoreBndr -> [Alt CoreBndr] -> 
+       Flatten ([CoreBind], [CoreBind], [CoreBind])
+liftCaseLit b [] =
+    return ([], [], [])    --FIXME: a case with no cases at all???
+liftCaseLit b alls@(alt:alts)
+  | isDefault alt  =
+    do
+        (i,  e,  defAltBndrs) <- liftCaseLitDefault b alt alts 
+        (is, es, altBndrs)    <- liftCaseLit' b alts 
+        return (i:is, e:es, defAltBndrs ++ altBndrs)
+  | otherwise = 
+    do 
+      liftCaseLit' b alls 
+
+-- liftCaseLitDefault: looks at all the other alternatives which 
+--    contain a literal and filters all those elements from the 
+--    array which do not match any of the literals in the other
+--    alternatives.
+liftCaseLitDefault:: CoreBndr -> (Alt CoreBndr) ->  [Alt CoreBndr] 
+  ->  Flatten (CoreBind, CoreBind, [CoreBind])
+liftCaseLitDefault b (_, _, def) alts =
+  do
+    let lits           = map (\(LitAlt l, _, _) -> l) alts
+    indexExpr         <- mkIndexOfExprDft (varType b) b lits
+    (b', bbind)       <- mkBind (slit "is"#) indexExpr
+    ((lDef, _), bnds) <- packContext  b' (lift def)     
+    (_, vbind)        <- mkBind (slit "r"#) lDef
+    return (bbind, vbind, bnds)
+
+-- FIXME: 
+--  Assumption: in case of Lit, the list of binders of the alt is empty.
+--
+-- returns 
+--   a list of all vars bound to the expr in the body of the alternative
+--   a list of (var, expr) pairs, where var has to be bound to expr
+--   by letWrapper
+liftCaseLit':: CoreBndr -> [Alt CoreBndr] ->  
+    Flatten ([CoreBind], [CoreBind], [CoreBind])						       
+liftCaseLit' _ [] =
+  do
+    return ([], [], [])
+liftCaseLit' b ((LitAlt lit, [], expr):alts) =
+  do
+    (permBnd, exprBnd, packBnd)    <-  liftSingleCaseLit b lit expr 
+    (permBnds, exprBnds, packBnds) <-  liftCaseLit' b alts 
+    return (permBnd:permBnds, exprBnd:exprBnds, packBnd ++ packBnds)
+
+-- lift a single alternative of the form: case  b of lit -> expr. 
+--    
+--   It returns the bindings:
+--   (a) let b' = indexOfP (mapP (\x -> x == lit) b)
+--
+--   (b) lift expr in the packed context. Returns lexpr and the
+--       list of binds (bnds) that describe the packed arrays
+--
+--   (c) create new var v' to bind lexpr to
+--
+--   (d) return (b' = indexOf...., v' = lexpr, bnds)
+liftSingleCaseLit:: CoreBndr -> Literal -> CoreExpr  -> 
+  Flatten (CoreBind, CoreBind, [CoreBind])
+liftSingleCaseLit b lit expr =
+ do 
+   indexExpr          <- mkIndexOfExpr (varType b) b lit -- (a)
+   (b', bbind)        <- mkBind (slit "is"#) indexExpr
+   ((lExpr, t), bnds) <- packContext  b' (lift expr)     -- (b)         
+   (_, vbind)         <- mkBind (slit "r"#) lExpr
+   return (bbind, vbind, bnds)
+
+-- letWrapper lExpr b ([indexbnd_i], [exprbnd_i], [pckbnd_ij])
+-- 
+-- let b = lExpr in
+--  let index_bnd_1 in
+--    let packbnd_11 in
+--      ... packbnd_1m in 
+--         let exprbnd_1 in        ....
+--      ...
+--          let nvar = replicate dummy (length <current context>)
+--               nvar1 = bpermuteDftP index_bnd_1 ...
+--
+--   in bpermuteDftP index_bnd_n nvar_(n-1)
+--
+letWrapper:: CoreExpr -> CoreBndr ->([CoreBind], [CoreBind], [CoreBind]) ->
+  Flatten (CoreExpr, Type)
+letWrapper lExpr b (indBnds, exprBnds, pckBnds)  =
+  do 
+    (defBpBnds, ty) <- dftbpBinders indBnds exprBnds
+    let resExpr      = getExprOfBind (head defBpBnds)
+    return ((mkLets (indBnds ++ pckBnds ++ exprBnds ++ defBpBnds) resExpr), ty)
+
+-- dftbpBinders: return the list of binders necessary to construct the overall
+--   result from the subresults computed in the different branches of the case
+--   statement. The binding which contains the final result is in the *head*
+--   of the result list.
+-- 
+-- dftbpBinders [ind_i = ...] [expr_i = ...] = [dn = ..., d_n-1 = .., d1 = ...]
+--
+-- let def = replicate (length of context) undefined
+--     d1  = bpermuteDftP dft e1 i1
+--     .....
+--
+dftbpBinders:: [CoreBind] -> [CoreBind] -> Flatten ([CoreBind], Type)
+dftbpBinders indexBnds exprBnds =
+  do
+    let expr = getExprOfBind (head exprBnds)
+    defVecExpr     <- createDftArrayBind expr
+    ((b, bnds), t) <- dftbpBinders' indexBnds exprBnds defVecExpr
+    return ((b:bnds),t)
+  where
+    dftbpBinders' :: [CoreBind] 
+		  -> [CoreBind] 
+		  -> CoreBind 
+		  -> Flatten ((CoreBind, [CoreBind]), Type)
+    dftbpBinders' [] [] cBnd =
+      return ((cBnd, []), panic "dftbpBinders: undefined type")
+    dftbpBinders' (i:is) (e:es) cBind =
+      do
+	let iVar = getVarOfBind i
+	let eVar = getVarOfBind e
+	let cVar = getVarOfBind cBind
+        let ty   = varType eVar
+	newBnd  <- mkDftBackpermute ty iVar eVar cVar
+	((fBnd, restBnds), _) <- dftbpBinders' is es newBnd
+	return ((fBnd, (newBnd:restBnds)), liftTy ty)
+
+    dftbpBinders'  _ _ _ = 
+      panic "Flattening.dftbpBinders: index and expression binder lists \ 
+	    \have different length!"
+
+getExprOfBind:: CoreBind -> CoreExpr
+getExprOfBind (NonRec _ expr) = expr
+
+getVarOfBind:: CoreBind -> Var
+getVarOfBind (NonRec b _) = b
+
+
+
+-- Optimised Transformation
+-- =========================
+--
+
+-- liftSimpleFun
+--   if variables x_1 to x_i occur in the context *and* free in expr
+--   then 
+--   (liftSimpleExpression expr) => mapP (\ (x1,..xn) -> expr) (x1,..xn)
+--
+liftSimpleFun:: CoreBndr -> CoreExpr -> Flatten (CoreExpr, Type)
+liftSimpleFun b expr =
+  do
+    bndVars <- collectBoundVars expr
+    let bndVars'     = b:bndVars
+        bndVarsTuple = mkTuple (map varType bndVars') (map Var bndVars')
+	lamExpr      = mkLams (b:bndVars) expr     -- FIXME: should be tuple
+                                                   -- here 
+    let (t1, t2)     = funTyArgs . exprType $ lamExpr
+    mapExpr         <-  mk'mapP t1 t2 lamExpr bndVarsTuple
+    let lexpr        = mkApps mapExpr [bndVarsTuple]
+    return (lexpr, undefined)                      -- FIXME!!!!!
+
+
+collectBoundVars:: CoreExpr -> Flatten [CoreBndr]
+collectBoundVars  expr = 
+  intersectWithContext (exprFreeVars expr)
+
+
+-- auxilliary routines
+-- -------------------
+
+-- mkIndexOfExpr b lit ->
+--   indexOf (mapP (\x -> x == lit) b) b
+--
+mkIndexOfExpr:: Type -> CoreBndr -> Literal -> Flatten CoreExpr
+mkIndexOfExpr  varType b lit =
+  do 
+    eqExpr        <- mk'eq  varType (Var b) (Lit lit)
+    let lambdaExpr = (Lam b eqExpr)
+    mk'indexOfP varType  lambdaExpr (Var b)
+
+-- there is FlattenMonad.mk'indexOfP as well as
+-- CoreSyn.mkApps and CoreSyn.mkLam, all of which should help here
+
+-- for case-distinction over data constructors:
+-- let b = expr in 
+--   case b of
+--      dcon args -> ....
+-- dconId = dataConTag dcon 
+-- the call "mkIndexOfExprDCon b dconId" computes the core expression for
+-- indexOfP (\x -> x == dconId) b)
+--
+mkIndexOfExprDCon::Type -> CoreBndr -> Int -> Flatten CoreExpr
+mkIndexOfExprDCon  varType b dId = 
+  do 
+    let intExpr    = mkIntLitInt dId
+    eqExpr        <- mk'eq  varType (Var b) intExpr
+    let lambdaExpr = (Lam b intExpr)
+    mk'indexOfP varType lambdaExpr (Var b) 
+
+  
+
+-- there is FlattenMonad.mk'indexOfP as well as
+-- CoreSyn.mkApps and CoreSyn.mkLam, all of which should help here
+
+-- mk'IndexOfExprDConDft b dconIds : Generates the index expression for the
+-- default case. "dconIds" is a list of all the data constructor idents which 
+-- are covered by the other cases.
+-- indexOfP (\x -> x != dconId_1 && ....) b)
+--
+mkIndexOfExprDConDft:: Type -> CoreBndr -> [Int] -> Flatten CoreExpr
+mkIndexOfExprDConDft varType b dId  = 
+  do 
+    let intExprs   = map mkIntLitInt dId
+    bExpr         <- foldM (mk'neq varType) (head intExprs) (tail intExprs)
+    let lambdaExpr = (Lam b bExpr)
+    mk'indexOfP varType (Var b) bExpr
+  
+
+-- mkIndexOfExprDef b [lit1, lit2,...] ->
+--   indexOf (\x -> not (x == lit1 || x == lit2 ....) b
+mkIndexOfExprDft:: Type -> CoreBndr -> [Literal] -> Flatten CoreExpr
+mkIndexOfExprDft varType b lits = 
+  do 
+    let litExprs   = map (\l-> Lit l)  lits
+    bExpr         <- foldM (mk'neq varType) (head litExprs) (tail litExprs)
+    let lambdaExpr = (Lam b bExpr)
+    mk'indexOfP varType bExpr (Var b) 
+
+
+-- create a back-permute binder
+--
+-- * `mkDftBackpermute ty indexArrayVar srcArrayVar dftArrayVar' creates a
+--   Core binding of the form
+--
+--     x = bpermuteDftP indexArrayVar srcArrayVar dftArrayVar
+--
+--   where `x' is a new local variable
+--
+mkDftBackpermute :: Type -> Var -> Var -> Var -> Flatten CoreBind
+mkDftBackpermute ty idx src dft = 
+  do
+    rhs <- mk'bpermuteDftP ty (Var idx) (Var src) (Var dft)
+    liftM snd $ mkBind (slit "dbp"#) rhs
+
+-- create a dummy array with elements of the given type, which can be used as
+-- default array for the combination of the subresults of the lifted case
+-- expression
+--
+createDftArrayBind    :: CoreExpr -> Flatten CoreBind
+createDftArrayBind e  =
+  panic "Flattening.createDftArrayBind: not implemented yet"
+{-
+  do
+    let ty = parrElemTy . exprType $ expr
+    len <- mk'lengthP e
+    rhs <- mk'replicateP ty len err??
+    lift snd $ mkBind (slit "dft"#) rhs
+FIXME: nicht so einfach; man kann kein "error"-Wert nehmen, denn der w"urde
+  beim bpermuteDftP sofort evaluiert, aber es ist auch schwer m"oglich einen
+  generischen Wert f"ur jeden beliebigen Typ zu erfinden.
+-}
+
+
+
+
+-- show functions (the pretty print functions sometimes don't 
+-- show it the way I want....
+
+-- shows just the structure
+showCoreExpr (Var _ )    = "Var "
+showCoreExpr (Lit _) = "Lit "
+showCoreExpr (App e1 e2) = 
+  "(App \n  " ++ (showCoreExpr e1) ++ "\n  " ++ (showCoreExpr e2) ++ ") "
+showCoreExpr (Lam b e)   =
+  "Lam b " ++ (showCoreExpr e)
+showCoreExpr (Let bnds expr) =
+  "Let \n" ++ (showBinds bnds) ++ "in " ++ (showCoreExpr expr)
+  where showBinds (NonRec b e) = showBind (b,e)
+        showBinds (Rec bnds)   = concat (map showBind bnds)
+        showBind (b,e) = "  b = " ++ (showCoreExpr e)++ "\n"
+showCoreExpr (Case ex b alts) =
+  "Case b = " ++ (showCoreExpr ex) ++ " of \n" ++ (showAlts alts)
+  where showAlts _ = ""  
+showCoreExpr (Note _ ex) = "Note n " ++ (showCoreExpr ex)
+showCoreExpr (Type t) = "Type"
\ No newline at end of file
diff --git a/ghc/compiler/ndpFlatten/NDPCoreUtils.hs b/ghc/compiler/ndpFlatten/NDPCoreUtils.hs
new file mode 100644
index 0000000000000000000000000000000000000000..1d221baae1232de0743ca7b532787fd4400e7626
--- /dev/null
+++ b/ghc/compiler/ndpFlatten/NDPCoreUtils.hs
@@ -0,0 +1,175 @@
+--  $Id$
+--
+--  Copyright (c) [2001..2002] Manuel M T Chakravarty & Gabriele Keller
+--
+--  Auxiliary routines for NDP-related Core transformations.
+--
+--- DESCRIPTION ---------------------------------------------------------------
+--
+--  This module exports all functions to access and alter the `Type' data 
+--  structure from modules `Type' and `CoreExpr' from `CoreSyn'.  As it is part
+--  of the NDP flattening component, the functions provide access to all the
+--  fields that are important for the flattening and lifting transformation.
+-- 
+--- DOCU ----------------------------------------------------------------------
+--
+--  Language: Haskell 98
+--
+--- TODO ----------------------------------------------------------------------
+--
+
+module NDPCoreUtils (
+
+  -- type inspection functions
+  --
+  tupleTyArgs,		-- :: Type -> [Type]
+  funTyArgs,		-- :: Type -> (Type, Type)
+  parrElemTy,		-- :: Type -> Type
+
+  -- Core generation functions
+  --
+  mkTuple,		-- :: [Type] -> [CoreExpr] -> CoreExpr
+  mkInt,		-- :: CoreExpr -> CoreExpr
+
+  -- query functions
+  --
+  isDefault,            -- :: CoreAlt -> Bool
+  isLit,		-- :: [CoreAlt] -> Bool
+  isSimpleExpr,		-- :: CoreExpr -> Bool
+
+  -- re-exported functions
+  --
+  mkPArrTy,		-- :: Type -> Type
+  boolTy,		-- :: Type
+  
+  -- substitution
+  -- 
+  substIdEnv
+) where
+
+-- GHC
+import Panic      (panic)
+import Outputable (Outputable(ppr), pprPanic)
+import BasicTypes (Boxity(..))
+import Var        (Var)
+import Type       (Type, splitTyConApp_maybe, splitFunTy)
+import TyCon      (TyCon(..), isTupleTyCon)
+import PrelNames  (parrTyConName)
+import TysWiredIn (parrTyCon, unitDataConId, tupleCon, intDataCon, mkPArrTy,
+		   boolTy) 
+import CoreSyn    (CoreBndr, CoreExpr, CoreBind, CoreAlt, Expr(..), AltCon(..),
+		   Bind(..), mkConApp)
+import Var        (Id)
+import VarEnv     (IdEnv, delVarEnv, delVarEnvList, lookupVarEnv)
+
+-- friends: don't import any to avoid cyclic imports
+-- 
+
+
+-- type inspection functions
+-- -------------------------
+
+-- determines the argument types of a tuple type (EXPORTED)
+--
+tupleTyArgs    :: Type -> [Type]
+tupleTyArgs ty  =
+  case splitTyConApp_maybe ty of
+    Just (tyCon, argTys) | isTupleTyCon tyCon -> argTys
+    _					      -> 
+      pprPanic "NDPCoreUtils.tupleTyArgs: wrong type: " (ppr ty)
+
+-- determines the argument and result type of a function type (EXPORTED)
+--
+funTyArgs :: Type -> (Type, Type)
+funTyArgs  = splitFunTy
+
+-- for a type of the form `[:t:]', yield `t' (EXPORTED)
+--
+-- * if the type has any other form, a fatal error occurs
+--
+parrElemTy    :: Type -> Type
+parrElemTy ty  = 
+  case splitTyConApp_maybe ty of
+    Just (tyCon, [argTy]) | tyConName tyCon == parrTyConName -> argTy
+    _							     -> 
+      pprPanic "NDPCoreUtils.parrElemTy: wrong type: " (ppr ty)
+
+
+-- Core generation functions
+-- -------------------------
+
+-- make a tuple construction expression from a list of argument types and
+-- argument values (EXPORTED)
+--
+-- * the two lists need to be of the same length
+--
+mkTuple                                  :: [Type] -> [CoreExpr] -> CoreExpr
+mkTuple []  []                            = Var unitDataConId
+mkTuple [_] [e]                           = e
+mkTuple ts  es  | length ts == length es  = 
+  mkConApp (tupleCon Boxed (length es)) (map Type ts ++ es)
+mkTuple _   _                             =
+  panic "NDPCoreUtils.mkTuple: mismatch between number of types and exprs!"
+
+-- make a boxed integer from an unboxed one (EXPORTED)
+--
+mkInt   :: CoreExpr -> CoreExpr
+mkInt e  = mkConApp intDataCon [e]
+
+
+-- query functions
+-- ---------------
+
+-- checks whether a given case alternative is a default alternative (EXPORTED)
+--
+isDefault                 :: CoreAlt -> Bool
+isDefault (DEFAULT, _, _)  = True
+isDefault _                = False
+
+-- check whether a list of case alternatives in belongs to a case over a
+-- literal type (EXPORTED) 
+--
+isLit			      :: [CoreAlt] -> Bool
+isLit ((DEFAULT, _, _ ):alts)  = isLit alts
+isLit ((LitAlt _, _, _):_   )  = True
+isLit _                        = False
+
+-- FIXME: this function should get a more expressive name and maybe also a
+--	  more detailed return type (depends on how the analysis goes)
+isSimpleExpr:: CoreExpr -> Bool
+isSimpleExpr _ =
+  -- FIXME
+  False
+
+
+--  Substitution
+--  -------------
+
+substIdEnv:: IdEnv Id -> CoreExpr -> CoreExpr
+substIdEnv env e@(Lit _) = e
+substIdEnv env e@(Var id)  =
+  case (lookupVarEnv env id) of
+    Just v -> (Var v)
+    _      -> e
+substIdEnv env (App e arg) =
+  App (substIdEnv env e) (substIdEnv env arg)
+substIdEnv env (Lam b expr) =
+  Lam b (substIdEnv (delVarEnv env b) expr)
+substIdEnv env (Let (NonRec b expr1) expr2) =
+  Let (NonRec b (substIdEnv env expr1)) 
+         (substIdEnv (delVarEnv env b) expr2)
+substIdEnv env (Let (Rec bnds) expr) = 
+   let 
+     newEnv  = delVarEnvList env (map fst bnds)
+     newExpr = substIdEnv newEnv expr 
+     substBnd (b,e) = (b, substIdEnv newEnv e)      
+   in Let (Rec (map substBnd bnds)) newExpr
+substIdEnv env (Case expr b alts) =
+   Case (substIdEnv newEnv expr) b (map substAlt alts)
+   where
+     newEnv = delVarEnv env b
+     substAlt (c, bnds, expr) =
+       (c, bnds, substIdEnv (delVarEnvList env bnds) expr)
+substIdEnv env (Note n expr) =
+  Note n (substIdEnv env expr)
+substIdEnv env e@(Type t) = e
\ No newline at end of file
diff --git a/ghc/compiler/ndpFlatten/PArrAnal.hs b/ghc/compiler/ndpFlatten/PArrAnal.hs
new file mode 100644
index 0000000000000000000000000000000000000000..0c25805d2c98bbca09aa575753442782b00c31f0
--- /dev/null
+++ b/ghc/compiler/ndpFlatten/PArrAnal.hs
@@ -0,0 +1,202 @@
+--  $Id$
+--
+--  Copyright (c) 2002 Manuel M T Chakravarty & Gabriele Keller
+--  
+--  Analysis phase for an optimised flattening transformation
+--
+--- DESCRIPTION ---------------------------------------------------------------
+--
+--  This module implements an analysis phase that identifies Core expressions
+--  that need not be transformed during flattening.  The expressions when
+--  executed in a parallel context are implemented as an iteration over the
+--  original scalar computation, instead of vectorising the computation.  This
+--  usually improves efficiency by increasing locality and also reduces code
+--  size. 
+--
+--- DOCU ----------------------------------------------------------------------
+--
+--  Language: Haskell 98 with C preprocessor
+--
+-- Analyse the expression and annotate each simple subexpression accordingly. 
+--
+--  The result of the analysis is stored in a new field in IdInfo (has yet to
+--  be extended)
+--
+--  A simple expression is any expression which is not a function, not of
+--  recursive type and does not contain a value of PArray type. Polymorphic
+--  variables are simple expressions even though they might be instantiated to
+--  a parray value or function.
+--
+--- TODO ----------------------------------------------------------------------
+--
+
+module PArrAnal (
+  markScalarExprs	-- :: [CoreBind] -> [CoreBind]
+) where
+
+import Panic   (panic)
+import Outputable (pprPanic, ppr)
+import CoreSyn (CoreBind)
+
+import TypeRep      (Type(..))
+import Var (Var(..),Id)
+import Literal      (Literal)
+import CoreSyn (Expr(..),CoreExpr,Bind(..))
+-- 
+
+data ArrayUsage = Prim | NonPrim | Array 
+                | PolyExpr (Id -> Maybe (ArrayUsage -> ArrayUsage))
+                | PolyFun (ArrayUsage -> ArrayUsage)
+
+         
+arrUsage:: CoreExpr -> ArrayUsage
+arrUsage (Var id)  = varArrayUsage id
+arrUsage (Lit lit) = litArrayUsage lit
+arrUsage (App expr1 expr2) =
+  let
+    arr1 = arrUsage expr1
+    arr2 = arrUsage expr2
+  in 
+  case (arr1, arr2) of   
+    (_,        Array)  -> Array
+    (PolyFun f, _)     -> f arr2
+    (_, _)             -> arr1
+
+arrUsage (Lam b expr) =
+  bindType (b, expr)
+
+arrUsage (Let (NonRec b expr1) expr2) =
+  arrUsage (App (Lam b expr2) expr1)
+
+arrUsage (Let (Rec bnds) expr) =
+  let 
+    t1 = foldr combineArrayUsage Prim (map bindType bnds)
+    t2 = arrUsage expr
+  in if isArrayUsage t1 then Array else t2
+
+arrUsage (Case expr b alts) = 
+  let 
+    t1 = arrUsage expr
+    t2 = scanType (map (arrUsage . (\ (_,_,x) -> x)) alts)
+  in scanType [t1, t2]
+
+arrUsage (Note n expr) =
+  arrUsage expr
+
+arrUsage (Type t) =
+  typeArrayUsage  t
+
+bindType (b, expr) =
+  let
+    bT    = varArrayUsage b
+    exprT = arrUsage expr
+  in case (bT, exprT) of
+       (Array, _) -> Array
+       _          -> exprT
+
+scanType:: [ArrayUsage] -> ArrayUsage
+scanType [t]        = t
+scanType (Array:ts) = Array
+scanType (_:ts)     = scanType ts
+  
+
+
+-- the code expression represents a built-in function which generates
+-- an array
+isArrayGen:: CoreExpr -> Bool
+isArrayGen _ = 
+  panic "PArrAnal: isArrayGen: not yet implemented"
+
+isArrayCon:: CoreExpr -> Bool
+isArrayCon _ = 
+  panic "PArrAnal: isArrayCon: not yet implemented"
+
+markScalarExprs:: [CoreBind] -> [CoreBind]
+markScalarExprs _ =
+  panic "PArrAnal.markScalarExprs: not implemented yet"
+
+
+varArrayUsage:: Id -> ArrayUsage
+varArrayUsage =
+  panic "PArrAnal.varArrayUsage: not yet implented"
+
+litArrayUsage:: Literal -> ArrayUsage
+litArrayUsage =
+  panic "PArrAnal.litArrayUsage: not yet implented"
+
+
+typeArrayUsage:: Type -> ArrayUsage
+typeArrayUsage (TyVarTy tvar) = 
+  PolyExpr (tIdFun tvar)
+typeArrayUsage (AppTy _ _) =
+   panic "PArrAnal.typeArrayUsage: AppTy case not yet implemented"
+typeArrayUsage (TyConApp tc tcargs) =
+  let
+    tcargsAU = map typeArrayUsage tcargs
+    tcCombine  = foldr combineArrayUsage Prim tcargsAU
+  in auCon tcCombine
+typeArrayUsage t@(SourceTy _) =
+  pprPanic "PArrAnal.typeArrayUsage: encountered 'SourceType - shouldn't be here!"
+           (ppr t)                 
+ 
+
+combineArrayUsage:: ArrayUsage -> ArrayUsage -> ArrayUsage 
+combineArrayUsage Array _  = Array 
+combineArrayUsage _ Array  = Array 
+combineArrayUsage (PolyExpr f1) (PolyExpr f2) =
+  PolyExpr f'   
+  where 
+    f' var = 
+      let
+        f1lookup = f1 var
+        f2lookup = f2 var
+       in 
+       case (f1lookup, f2lookup) of
+         (Nothing, _) -> f2lookup
+         (_, Nothing) -> f1lookup
+         (Just f1', Just f2') -> Just ( \e -> (combineArrayUsage (f1' e) (f2' e)))
+combineArrayUsage (PolyFun f) (PolyExpr g) = 
+        panic ("PArrAnal.typeArrayUsage: PolyFun as argument in data" ++
+               " constructor - should not (?) happen\n")
+combineArrayUsage (PolyExpr g) (PolyFun f)  = 
+        panic ("PArrAnal.typeArrayUsage: PolyFun as argument in data" ++
+               " constructor - should not (?) happen\n")
+combineArrayUsage NonPrim _ = NonPrim
+combineArrayUsage _ NonPrim = NonPrim
+combineArrayUsage Prim Prim = Prim
+
+
+isArrayUsage:: ArrayUsage -> Bool
+isArrayUsage Array = True
+isArrayUsage _     = False
+
+--  Functions to serve as arguments for PolyExpr
+--  ---------------------------------------------
+
+tIdFun:: Var -> Var -> Maybe (ArrayUsage -> ArrayUsage) 
+tIdFun t tcomp =
+  if t == tcomp then
+     Just auId
+  else
+     Nothing  
+
+-- Functions to serve as argument for PolyFun
+-- -------------------------------------------
+
+auId:: ArrayUsage -> ArrayUsage 
+auId = id
+
+auCon:: ArrayUsage -> ArrayUsage
+auCon Prim = NonPrim
+auCon (PolyExpr f) = PolyExpr f'
+  where f' v  = case f v of
+                   Nothing -> Nothing
+                   Just g  -> Just  ( \e -> (auCon (g e)))
+auCon (PolyFun f)  = PolyFun (auCon . f)
+auCon _    = Array
+
+-- traversal of Core expressions
+-- -----------------------------
+
+-- FIXME: implement
+
diff --git a/ghc/compiler/ndpFlatten/TODO b/ghc/compiler/ndpFlatten/TODO
new file mode 100644
index 0000000000000000000000000000000000000000..e59660920574f4410eddf5dccffde3c76fe88a8a
--- /dev/null
+++ b/ghc/compiler/ndpFlatten/TODO
@@ -0,0 +1,202 @@
+	           TODO List for Flattening Support in GHC	     -*-text-*-
+		   =======================================
+
+Middle-End Related
+~~~~~~~~~~~~~~~~~~
+
+Flattening Transformation
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+* Complete and test
+
+* Complete the analysis
+
+* Type transformation: The idea solution would probably be if we can add some
+  generic machinery, so that we can define all the rules for handling the type
+  and value transformations in a library.  (The PrelPArr for WayNDP.)
+
+
+Library Related
+~~~~~~~~~~~~~~~
+
+* Problem with re-exporting PrelPArr from Prelude is that it would also be
+  visible when -pparr is not given.  There should be a mechanism to implicitly
+  import more than one module (like PERVASIVE modules in M3)
+
+* We need a PrelPArr-like library for when flattening is used, too.  In fact,
+  we need some library routines that are on the level of merely vectorised
+  code (eg, for the dummy default vectors), and then, all the `PArrays' stuff
+  implementing fast unboxed arrays and fusion.
+
+* Enum is a problem.  Ideally, we would like `enumFromToP' and
+  `enumFromThenToP' to be members of `Enum'.  On the other hand, we really do
+  not want to change `Enum'.  The solution for the moment is to define
+
+    enumFromTo x y       = mapP toEnum [:fromEnum x .. fromEnum y:]
+    enumFromThenTo x y z = mapP toEnum [:fromEnum x, fromEnum y .. fromEnum z:]
+
+  like the Haskell Report does for the list versions.  This is hopefully
+  efficient enough as array fusion should fold the two traversals into one.
+  [DONE]
+
+
+DOCU that should go into the Commentary
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The type constructor [::]
+-------------------------
+
+The array type constructor [::] is quite similar to [] (list constructor) in
+that GHC has to know about it (in TysWiredIn); however, there are some
+differences:
+
+* [::] is an abstract type, whereas [] is not
+
+* if flattening is switched on, all occurences of the type are actually
+  removed by appropriate program transformations.
+
+The module PrelPArr that actually implements nested parallel arrays.  [::] is
+eliminated only if in addition to array support, flattening is activated.  It
+is just an option rather than the only method to implement those arrays.
+
+  Flags: -fparr	      -- syntactic support for parallel arrays (via `PrelPArr')
+			 * Dynamic hsc option; can be reversed with -fno-parr
+	 -fflatten    -- flattening transformation
+			 * Static hsc option
+	 -ndp	      -- this a way option, which implies -fparr and -fflatten
+			 (way options are handled by the driver and are not
+			 directly seen by hsc)
+	 -ddump-vect  -- dump Core after vectorisation
+		         * Dynamic hsc option
+
+* PrelPArr implements array variants of the Prelude list functions plus some
+  extra functions (also, some list functions (eg, those generating infinite
+  lists) have been left out.
+
+* prelude/PrelNames has been extended with all the names from PrelPArr that
+  need to be known inside the compiler
+
+* The variable GhcSupportsPArr, which can be set in build.mk decides whether
+  `PrelPArr' is to be compiled or not.  (We probably need to supress compiling
+  PrelPArr in WayNDP, or rather replace it with a different PrelPArr.)
+
+* Say something about `TysWiredIn.parrTyCon' as soon as we know how it
+  actually works... 
+
+Parser and AST Notes:
+- Parser and AST is quite straight forward.  Essentially, the list cases
+  duplicated with a name containing `PArr' or `parr' and modified to fit the
+  slightly different semantics (ie, finite length, strict).
+- The value and pattern `[::]' is an empty explicit parallel array (ie,
+  something of the form `ExplicitPArr ty []' in the AST).  This is in contrast
+  to lists, which use the nil-constructor instead.  In the case of parallel
+  arrays, using a constructor would be rather awkward, as it is not a
+  constructor-based type.
+- Thus, array patterns have the general form `[:p1, p2, ..., pn:]', where n >=
+  0.  Thus, two array patterns overlap iff they have the same length.
+- The type constructor for parallel is internally represented as a
+  `TyCon.AlgTyCon' with a wired in definition in `TysWiredIn'.  
+
+Desugarer Notes:
+- Desugaring of patterns involving parallel arrays:
+  * In Match.tidy1, we use fake array constructors; ie, any pattern `[:p1, ...,
+    pn:]' is replaces by the expression `MkPArr<n> p1 ... pn', where
+    `MkPArr<n>' is the n-ary array constructor.  These constructors are fake,
+    because they are never used to actually represent array values; in fact,
+    they are removed again before pattern compilation is finished.  However,
+    the use of these fake constructors implies that we need not modify large
+    parts of the machinery of the pattern matching compiler, as array patterns
+    are handled like any other constructor pattern.
+  * Check.simplify_pat introduces the same fake constructors as Match.tidy1
+    and removed again by Check.make_con.
+  * In DsUtils.mkCoAlgCaseMatchResult, we catch the case of array patterns and
+    generate code as the following example illustrates, where the LHS is the
+    code that would be produced if array construtors would really exist:
+
+      case v of pa {
+	MkPArr1 x1       -> e1
+	MkPArr2 x2 x3 x4 -> e2
+	DFT	         -> e3
+      }
+
+    =>
+
+      case lengthP v of
+        Int# i# -> 
+	  case i# of l {
+	    1   -> let x1 = v!:0                       in e1
+	    3   -> let x2 = v!:0; x2 = v!:1; x3 = v!:2 in e2
+	    DFT ->					      e3
+	  }
+  * The desugaring of array comprehensions is in `DsListComp', but follows
+    rules that are different from that for translating list comprehensions.
+    Denotationally, it boils down to the same, but the operational
+    requirements for an efficient implementation of array comprehensions are
+    rather different.
+
+    [:e | qss:] = <<[:e | qss:]>> () [:():]
+
+    <<[:e' |           :]>> pa ea = mapP (\pa -> e') ea
+    <<[:e' | b     , qs:]>> pa ea = <<[:e' | qs:]>> pa (filterP (\pa -> b) ea)
+    <<[:e' | p <- e, qs:]>> pa ea = 
+      let ef = filterP (\x -> case x of {p -> True; _ -> False}) e
+      in
+      <<[:e' | qs:]>> (pa, p) (crossP ea ef)
+    <<[:e' | let ds, qs:]>> pa ea = 
+      <<[:e' | qs:]>> (pa, (x_1, ..., x_n)) 
+		      (mapP (\v@pa -> (v, let ds in (x_1, ..., x_n))) ea)
+    where
+      {x_1, ..., x_n} = DV (ds)		-- Defined Variables
+    <<[:e' | qs | qss:]>>   pa ea = 
+      <<[:e' | qss:]>> (pa, (x_1, ..., x_n)) 
+		       (zipP ea <<[:(x_1, ..., x_n) | qs:]>>)
+    where
+      {x_1, ..., x_n} = DV (qs)
+
+    Moreover, we have
+
+      crossP       :: [:a:] -> [:b:] -> [:(a, b):]
+      crossP a1 a2  = let
+			len1 = lengthP a1
+			len2 = lengthP a2
+			x1   = concatP $ mapP (replicateP len2) a1
+			x2   = concatP $ replicateP len1 a2
+		      in
+		      zipP x1 x2
+
+    For a more efficient implementation of `crossP', see `PrelPArr'.
+
+    Optimisations: 
+    - In the `p <- e' rule, if `pa = ()', drop it and simplify the `crossP ea
+      e' to `e'.
+    - We assume that fusion will optimise sequences of array processing
+      combinators.
+    - Do we want to have the following function?
+
+        mapFilterP :: (a -> Maybe b) -> [:a:] -> [:b:]
+
+      Even with fusion `(mapP (\p -> e) . filterP (\p -> b))' may still result
+      in redundant pattern matching operations.  (Let's wait with this until
+      we have seen what the Simplifier does to the generated code.)
+
+Flattening Notes:
+* The story about getting access to all the names like "fst" etc that we need
+  to generate during flattening is quite involved.  To have a reasonable
+  chance to get at the stuff, we need to put flattening inbetween the
+  desugarer and the simplifier as an extra pass in HscMain.hscMain.  After
+  that point, the persistent compiler state is zapped (for heap space
+  reduction reasons, I guess) and nothing remains of the imported interfaces
+  in one shot mode.
+
+  Moreover, to get the Ids that we need into the type environment, we need to
+  force the renamer to include them.  This is done in
+  RnEnv.getImplicitModuleFVs, which computes all implicitly imported names.
+  We let it add the names from FlattenInfo.namesNeededForFlattening.
+
+  Given all these arrangements, FlattenMonad can obtain the needed Ids from
+  the persistent compiler state without much further hassle.
+
+  [It might be worthwhile to document in the non-Flattening part of the
+  Commentary that the persistent compiler state is zapped after desugaring and
+  how the free variables determined by the renamer imply which names are
+  imported.] 
diff --git a/ghc/compiler/parser/Lex.lhs b/ghc/compiler/parser/Lex.lhs
index dfc3945b6e8ce67227fb23ccf717c8b02f3d4969..06fe82f43cf8b5f1da2279d9c7c0d4135e2af9a1 100644
--- a/ghc/compiler/parser/Lex.lhs
+++ b/ghc/compiler/parser/Lex.lhs
@@ -23,7 +23,7 @@ module Lex (
 
 	-- Monad for parser
 	Token(..), lexer, ParseResult(..), PState(..),
-	checkVersion, 
+	checkVersion, ExtFlags(..), mkPState, 
 	StringBuffer,
 
 	P, thenP, thenP_, returnP, mapP, failP, failMsgP,
@@ -55,6 +55,7 @@ import GlaExts
 import Ctype
 import Char		( chr, ord )
 import PrelRead 	( readRational__ ) -- Glasgow non-std
+import PrelBits		( Bits(..) )	   -- non-std
 \end{code}
 
 %************************************************************************
@@ -192,6 +193,8 @@ data Token
   | ITccurlybar                 -- |}, for type applications
   | ITvccurly
   | ITobrack
+  | ITopabrack			-- [:, for parallel arrays with -fparr
+  | ITcpabrack			-- :], for parallel arrays with -fparr
   | ITcbrack
   | IToparen
   | ITcparen
@@ -387,7 +390,8 @@ The lexical analyser
 
 Lexer state:
 
-	- (glaexts) lexing an interface file or -fglasgow-exts
+	- (exts)  lexing a source with extensions, eg, an interface file or 
+		  with -fglasgow-exts
 	- (bol)   pointer to beginning of line (for column calculations)
 	- (buf)   pointer to beginning of token
 	- (buf)   pointer to current char
@@ -397,7 +401,7 @@ Lexer state:
 lexer :: (Token -> P a) -> P a
 lexer cont buf s@(PState{
 		    loc = loc,
-		    glasgow_exts = glaexts,
+		    extsBitmap = exts,
 		    bol = bol,
 		    atbol = atbol,
 		    context = ctx
@@ -444,7 +448,7 @@ lexer cont buf s@(PState{
 			          (map toUpper (lexemeToString buf2)) in
 		  case lookupUFM pragmaKeywordsFM lexeme of
 			-- ignore RULES pragmas when -fglasgow-exts is off
-			Just ITrules_prag | not (flag glaexts) ->
+			Just ITrules_prag | not (glaExtsEnabled exts) ->
 			   skip_to_end (stepOnBy# buf 2#) s'
 			Just ITline_prag -> 
 			   line_prag skip_to_end buf2 s'
@@ -481,7 +485,7 @@ lexer cont buf s@(PState{
 		       atbol = atbol}
 
 	      	 is_a_token | atbol /=# 0# = lexBOL cont buf s'
-	      		    | otherwise    = lexToken cont glaexts buf s'
+	      		    | otherwise    = lexToken cont exts buf s'
 
 -- {-# LINE .. #-} pragmas.  yeuch.
 line_prag cont buf s@PState{loc=loc} =
@@ -541,7 +545,7 @@ skipNestedComment' orig_loc cont buf = loop buf
 lexBOL :: (Token -> P a) -> P a
 lexBOL cont buf s@(PState{
 		    loc = loc,
-		    glasgow_exts = glaexts,
+		    extsBitmap = exts,
 		    bol = bol,
 		    atbol = atbol,
 		    context = ctx
@@ -553,7 +557,7 @@ lexBOL cont buf s@(PState{
 	        --trace ("col = " ++ show (I# col) ++ ", layout: inserting ';'") $
 		cont ITsemi buf s{atbol = 0#}
 	else
-		lexToken cont glaexts buf s{atbol = 0#}
+		lexToken cont exts buf s{atbol = 0#}
   where
 	col = currentIndex# buf -# bol
 
@@ -572,18 +576,21 @@ lexBOL cont buf s@(PState{
 
 
 lexToken :: (Token -> P a) -> Int# -> P a
-lexToken cont glaexts buf =
+lexToken cont exts buf =
 -- trace "lexToken" $
   case currentChar# buf of
 
     -- special symbols ----------------------------------------------------
-    '('# | flag glaexts && lookAhead# buf 1# `eqChar#` '#'# 
+    '('# | glaExtsEnabled exts && lookAhead# buf 1# `eqChar#` '#'# 
 		-> cont IToubxparen (setCurrentPos# buf 2#)
 	 | otherwise
 		-> cont IToparen (incLexeme buf)
 
     ')'# -> cont ITcparen    (incLexeme buf)
-    '['# -> cont ITobrack    (incLexeme buf)
+    '['# | parrEnabled exts && lookAhead# buf 1# `eqChar#` ':'# ->
+	    cont ITopabrack  (setCurrentPos# buf 2#)
+	 | otherwise -> 
+	    cont ITobrack    (incLexeme buf)
     ']'# -> cont ITcbrack    (incLexeme buf)
     ','# -> cont ITcomma     (incLexeme buf)
     ';'# -> cont ITsemi      (incLexeme buf)
@@ -592,26 +599,31 @@ lexToken cont glaexts buf =
 		(_:ctx') -> cont ITccurly (incLexeme buf) s{context=ctx'}
 		_  	 -> lexError "too many '}'s" buf s
     '|'# -> case lookAhead# buf 1# of
-	         '}'#  | flag glaexts -> cont ITccurlybar 
-                                              (setCurrentPos# buf 2#)
-                 _                    -> lex_sym cont (incLexeme buf)
+	         '}'#  | glaExtsEnabled exts -> cont ITccurlybar 
+                                                     (setCurrentPos# buf 2#)
+                 _                           -> lex_sym cont (incLexeme buf)
+    ':'# -> case lookAhead# buf 1# of
+	         ']'#  | parrEnabled exts    -> cont ITcpabrack
+                                                     (setCurrentPos# buf 2#)
+                 _                           -> lex_sym cont (incLexeme buf)
 
                 
     '#'# -> case lookAhead# buf 1# of
-		')'#  | flag glaexts -> cont ITcubxparen (setCurrentPos# buf 2#)
+		')'#  | glaExtsEnabled exts 
+		     -> cont ITcubxparen (setCurrentPos# buf 2#)
 		'-'# -> case lookAhead# buf 2# of
 			   '}'# -> cont ITclose_prag (setCurrentPos# buf 3#)
 			   _    -> lex_sym cont (incLexeme buf)
 		_    -> lex_sym cont (incLexeme buf)
 
-    '`'# | flag glaexts && lookAhead# buf 1# `eqChar#` '`'#
+    '`'# | glaExtsEnabled exts && lookAhead# buf 1# `eqChar#` '`'#
 		-> lex_cstring cont (setCurrentPos# buf 2#)
 	 | otherwise
 	   	-> cont ITbackquote (incLexeme buf)
 
-    '{'# ->	-- look for "{-##" special iface pragma
+    '{'# ->	-- look for "{-##" special iface pragma   -- for Emacs: -}
             case lookAhead# buf 1# of
-           '|'# | flag glaexts 
+           '|'# | glaExtsEnabled exts 
                 -> cont ITocurlybar (setCurrentPos# buf 2#)
 	   '-'# -> case lookAhead# buf 2# of
 		    '#'# -> case lookAhead# buf 3# of
@@ -626,11 +638,11 @@ lexToken cont glaexts buf =
 	   _ -> (layoutOff `thenP_` cont ITocurly)  (incLexeme buf) 
 
     -- strings/characters -------------------------------------------------
-    '\"'#{-"-} -> lex_string cont glaexts [] (incLexeme buf)
-    '\''#      -> lex_char (char_end cont) glaexts (incLexeme buf)
+    '\"'#{-"-} -> lex_string cont exts [] (incLexeme buf)
+    '\''#      -> lex_char (char_end cont) exts (incLexeme buf)
 
     -- strictness and cpr pragmas and __scc treated specially.
-    '_'# | flag glaexts ->
+    '_'# | glaExtsEnabled exts ->
 	 case lookAhead# buf 1# of
 	   '_'# -> case lookAhead# buf 2# of
 	   	    'S'# -> 
@@ -642,15 +654,15 @@ lexToken cont glaexts buf =
 	   	    's'# -> 
 			case prefixMatch (stepOnBy# buf 3#) "cc" of
 		               Just buf' -> lex_scc cont (stepOverLexeme buf')
-		     	       Nothing   -> lex_id cont glaexts buf
-		    _ -> lex_id cont glaexts buf
-	   _    -> lex_id cont glaexts buf
+		     	       Nothing   -> lex_id cont exts buf
+		    _ -> lex_id cont exts buf
+	   _    -> lex_id cont exts buf
 
 	-- Hexadecimal and octal constants
     '0'# | (ch `eqChar#` 'x'# || ch `eqChar#` 'X'#) && is_hexdigit ch2
-		-> readNum (after_lexnum cont glaexts) buf' is_hexdigit 16 hex
+		-> readNum (after_lexnum cont exts) buf' is_hexdigit 16 hex
 	 | (ch `eqChar#` 'o'# || ch `eqChar#` 'O'#) && is_octdigit ch2
-		-> readNum (after_lexnum cont glaexts) buf' is_octdigit  8 oct_or_dec
+		-> readNum (after_lexnum cont exts) buf' is_octdigit  8 oct_or_dec
 	where ch   = lookAhead# buf 1#
 	      ch2  = lookAhead# buf 2#
 	      buf' = setCurrentPos# buf 2#
@@ -662,14 +674,14 @@ lexToken cont glaexts buf =
 	       trace "lexIface: misplaced NUL?" $ 
 	       cont (ITunknown "\NUL") (stepOn buf)
 
-    '?'# | flag glaexts && is_lower (lookAhead# buf 1#) ->
+    '?'# | glaExtsEnabled exts && is_lower (lookAhead# buf 1#) ->
 	    lex_ip ITdupipvarid cont (incLexeme buf)
-    '%'# | flag glaexts && is_lower (lookAhead# buf 1#) ->
+    '%'# | glaExtsEnabled exts && is_lower (lookAhead# buf 1#) ->
 	    lex_ip ITsplitipvarid cont (incLexeme buf)
-    c | is_digit  c -> lex_num cont glaexts 0 buf
+    c | is_digit  c -> lex_num cont exts 0 buf
       | is_symbol c -> lex_sym cont buf
-      | is_upper  c -> lex_con cont glaexts buf
-      | is_ident  c -> lex_id  cont glaexts buf
+      | is_upper  c -> lex_con cont exts buf
+      | is_ident  c -> lex_id  cont exts buf
       | otherwise   -> lexError "illegal character" buf
 
 -- Int# is unlifted, and therefore faster than Bool for flags.
@@ -693,51 +705,51 @@ lex_prag cont buf
 -------------------------------------------------------------------------------
 -- Strings & Chars
 
-lex_string cont glaexts s buf
+lex_string cont exts s buf
   = case currentChar# buf of
 	'"'#{-"-} -> 
 	   let buf' = incLexeme buf
                s' = mkFastStringNarrow (map chr (reverse s)) 
            in case currentChar# buf' of
-		'#'# | flag glaexts -> if all (<= 0xFF) s
+		'#'# | glaExtsEnabled exts -> if all (<= 0xFF) s
                     then cont (ITprimstring s') (incLexeme buf')
                     else lexError "primitive string literal must contain only characters <= \'\\xFF\'" buf'
 		_                   -> cont (ITstring s') buf'
 
 	-- ignore \& in a string, deal with string gaps
 	'\\'# | next_ch `eqChar#` '&'# 
-		-> lex_string cont glaexts s buf'
+		-> lex_string cont exts s buf'
 	      | is_space next_ch
-		-> lex_stringgap cont glaexts s (incLexeme buf)
+		-> lex_stringgap cont exts s (incLexeme buf)
 
 	    where next_ch = lookAhead# buf 1#
 		  buf' = setCurrentPos# buf 2#
 
-	_ -> lex_char (lex_next_string cont s) glaexts buf
+	_ -> lex_char (lex_next_string cont s) exts buf
 
-lex_stringgap cont glaexts s buf
+lex_stringgap cont exts s buf
   = let buf' = incLexeme buf in
     case currentChar# buf of
-	'\n'# -> \st@PState{loc = loc} -> lex_stringgap cont glaexts s buf' 
+	'\n'# -> \st@PState{loc = loc} -> lex_stringgap cont exts s buf' 
 		  st{loc = incSrcLine loc}
-	'\\'# -> lex_string cont glaexts s buf'
-	c | is_space c -> lex_stringgap cont glaexts s buf'
+	'\\'# -> lex_string cont exts s buf'
+	c | is_space c -> lex_stringgap cont exts s buf'
 	other -> charError buf'
 
-lex_next_string cont s glaexts c buf = lex_string cont glaexts (c:s) buf
+lex_next_string cont s exts c buf = lex_string cont exts (c:s) buf
 
 lex_char :: (Int# -> Int -> P a) -> Int# -> P a
-lex_char cont glaexts buf
+lex_char cont exts buf
   = case currentChar# buf of
-	'\\'# -> lex_escape (cont glaexts) (incLexeme buf)
-	c | is_any c -> cont glaexts (I# (ord# c)) (incLexeme buf)
+	'\\'# -> lex_escape (cont exts) (incLexeme buf)
+	c | is_any c -> cont exts (I# (ord# c)) (incLexeme buf)
 	other -> charError buf
 
-char_end cont glaexts c buf
+char_end cont exts c buf
   = case currentChar# buf of
 	'\''# -> let buf' = incLexeme buf in
 		 case currentChar# buf' of
-			'#'# | flag glaexts 
+			'#'# | glaExtsEnabled exts 
 				-> cont (ITprimchar c) (incLexeme buf')
 			_   	-> cont (ITchar c) buf'
 	_     -> charError buf
@@ -892,7 +904,7 @@ lex_scc cont buf =
 -- Numbers
 
 lex_num :: (Token -> P a) -> Int# -> Integer -> P a
-lex_num cont glaexts acc buf =
+lex_num cont exts acc buf =
  case scanNumLit acc buf of
      (acc',buf') ->
        case currentChar# buf' of
@@ -919,18 +931,18 @@ lex_num cont glaexts acc buf =
 		    v = readRational__ (lexemeToString l)
 
 		in case currentChar# l of -- glasgow exts only
-		      '#'# | flag glaexts -> let l' = incLexeme l in
+		      '#'# | glaExtsEnabled exts -> let l' = incLexeme l in
 			      case currentChar# l' of
 				'#'# -> cont (ITprimdouble v) (incLexeme l')
 				_    -> cont (ITprimfloat  v) l'
 		      _ -> cont (ITrational v) l
 
-         _ -> after_lexnum cont glaexts acc' buf'
+         _ -> after_lexnum cont exts acc' buf'
 		
-after_lexnum cont glaexts i buf
+after_lexnum cont exts i buf
   = case currentChar# buf of
-	'#'# | flag glaexts -> cont (ITprimint i) (incLexeme buf)
-	_    -> cont (ITinteger i) buf
+	'#'# | glaExtsEnabled exts -> cont (ITprimint i) (incLexeme buf)
+	_                          -> cont (ITinteger i) buf
 
 -----------------------------------------------------------------------------
 -- C "literal literal"s  (i.e. things like ``NULL'', ``stdout'' etc.)
@@ -953,11 +965,11 @@ lex_ip ip_constr cont buf =
    buf' -> cont (ip_constr (tailFS lexeme)) buf'
 	where lexeme = lexemeToFastString buf'
 
-lex_id cont glaexts buf =
+lex_id cont exts buf =
  let buf1 = expandWhile# is_ident buf in
  seq buf1 $
 
- case (if flag glaexts 
+ case (if glaExtsEnabled exts 
 	then expandWhile# (eqChar# '#'#) buf1 -- slurp trailing hashes
 	else buf1) 				of { buf' ->
 
@@ -970,7 +982,7 @@ lex_id cont glaexts buf =
 
  let var_token = cont (ITvarid lexeme) buf' in
 
- if not (flag glaexts)
+ if not (glaExtsEnabled exts)
    then var_token
    else
 
@@ -996,11 +1008,11 @@ lex_sym cont buf =
 -- The argument buf is the StringBuffer representing the lexeme
 -- identified so far, where the next character is upper-case.
 
-lex_con cont glaexts buf =
+lex_con cont exts buf =
  -- trace ("con: "{-++unpackFS lexeme-}) $
  let empty_buf = stepOverLexeme buf in
- case expandWhile# is_ident empty_buf    of { buf1 ->
- case slurp_trailing_hashes buf1 glaexts of { con_buf ->
+ case expandWhile# is_ident empty_buf of { buf1 ->
+ case slurp_trailing_hashes buf1 exts of { con_buf ->
 
  let all_buf = mergeLexemes buf con_buf
      
@@ -1014,13 +1026,13 @@ lex_con cont glaexts buf =
  in
 
  case currentChar# all_buf of
-     '.'# -> maybe_qualified cont glaexts all_lexeme 
+     '.'# -> maybe_qualified cont exts all_lexeme 
 		(incLexeme all_buf) just_a_conid
      _    -> just_a_conid
   }}
 
 
-maybe_qualified cont glaexts mod buf just_a_conid =
+maybe_qualified cont exts mod buf just_a_conid =
  -- trace ("qid: "{-++unpackFS lexeme-}) $
  case currentChar# buf of
   '['# -> 	-- Special case for []
@@ -1031,7 +1043,7 @@ maybe_qualified cont glaexts mod buf just_a_conid =
   '('# ->  -- Special case for (,,,)
 	   -- This *is* necessary to deal with e.g. "instance C PrelBase.(,,)"
     case lookAhead# buf 1# of
-     '#'# | flag glaexts -> case lookAhead# buf 2# of
+     '#'# | glaExtsEnabled exts -> case lookAhead# buf 2# of
 		','# -> lex_ubx_tuple cont mod (setCurrentPos# buf 3#) 
 				just_a_conid
 		_    -> just_a_conid
@@ -1041,14 +1053,14 @@ maybe_qualified cont glaexts mod buf just_a_conid =
 
   '-'# -> case lookAhead# buf 1# of
             '>'# -> cont (ITqconid (mod,SLIT("(->)"))) (setCurrentPos# buf 2#)
-            _    -> lex_id3 cont glaexts mod buf just_a_conid
+            _    -> lex_id3 cont exts mod buf just_a_conid
 
-  _    -> lex_id3 cont glaexts mod buf just_a_conid
+  _    -> lex_id3 cont exts mod buf just_a_conid
 
 
-lex_id3 cont glaexts mod buf just_a_conid
+lex_id3 cont exts mod buf just_a_conid
   | is_upper (currentChar# buf) =
-     lex_con cont glaexts buf
+     lex_con cont exts buf
 
   | is_symbol (currentChar# buf) =
      let 
@@ -1075,7 +1087,7 @@ lex_id3 cont glaexts mod buf just_a_conid
     	    then just_a_conid
     	    else
 
-     case slurp_trailing_hashes buf1 glaexts of { buf' ->
+     case slurp_trailing_hashes buf1 exts of { buf' ->
 
      let
       lexeme	  = lexemeToFastString buf'
@@ -1091,9 +1103,9 @@ lex_id3 cont glaexts mod buf just_a_conid
 			   -> just_a_conid	   -- avoid M.where etc.
      }}}
 
-slurp_trailing_hashes buf glaexts
-  | flag glaexts = expandWhile# (`eqChar#` '#'#) buf
-  | otherwise    = buf
+slurp_trailing_hashes buf exts
+  | glaExtsEnabled exts = expandWhile# (`eqChar#` '#'#) buf
+  | otherwise		= buf
 
 
 mk_var_token pk_str
@@ -1204,11 +1216,11 @@ data ParseResult a
   | PFailed Message
 
 data PState = PState { 
-	loc           :: SrcLoc,
-	glasgow_exts  :: Int#,
-	bol           :: Int#,
-	atbol         :: Int#,
-	context	      :: [LayoutContext]
+	loc        :: SrcLoc,
+	extsBitmap :: Int#,	-- bitmap that determines permitted extensions
+	bol        :: Int#,
+	atbol      :: Int#,
+	context	   :: [LayoutContext]
      }
 
 type P a = StringBuffer		-- Input string
@@ -1356,6 +1368,48 @@ checkVersion mb@Nothing  buf s@(PState{loc = loc})
  | "hi-boot" `isSuffixOf` (_UNPK_ (srcLocFile loc)) = POk s ()
  | otherwise = PFailed (ifaceVersionErr mb loc ([]::[Token]){-Todo-})
 
+
+-- for reasons of efficiency, flags indicating language extensions (eg,
+-- -fglasgow-exts or -fparr) are represented by a bitmap stored in an unboxed
+-- integer
+
+glaExtsBit, ffiBit, parrBit :: Int
+glaExtsBit = 0
+ffiBit	   = 1	-- FIXME: not used yet; still part of `glaExtsBit'
+parrBit	   = 2
+
+glaExtsEnabled, ffiEnabled, parrEnabled :: Int# -> Bool
+glaExtsEnabled flags = testBit (I# flags) glaExtsBit
+ffiEnabled     flags = testBit (I# flags) ffiBit
+parrEnabled    flags = testBit (I# flags) parrBit
+
+-- convenient record-based bitmap for the interface to the rest of the world
+--
+data ExtFlags = ExtFlags {
+		  glasgowExtsEF :: Bool,
+--		  ffiEF		:: Bool,  -- commented out to avoid warnings
+		  parrEF	:: Bool	  -- while not used yet
+		}
+
+-- create a parse state
+--
+mkPState          :: SrcLoc -> ExtFlags -> PState
+mkPState loc exts  = PState {
+		       loc        = loc,
+		       extsBitmap = case bitmap of {I# bits -> bits},
+		       bol	  = 0#,
+		       atbol      = 1#,
+		       context    = []
+		     }
+		     where
+		       bitmap =     glaExtsBit `setBitIf` glasgowExtsEF exts
+--			        .|. ffiBit     `setBitIf` ffiEF		exts
+				.|. parrBit    `setBitIf` parrEF	exts
+                       --
+		       b `setBitIf` cond | cond      = bit b
+					 | otherwise = 0
+
+
 -----------------------------------------------------------------
 
 ifaceParseErr :: StringBuffer -> SrcLoc -> Message
diff --git a/ghc/compiler/parser/ParseUtil.lhs b/ghc/compiler/parser/ParseUtil.lhs
index 8d579373875fd0b193d615d212e30d6da51272e6..73f31fa0577de2bfd874cbbbbfbddf2dee2b24c5 100644
--- a/ghc/compiler/parser/ParseUtil.lhs
+++ b/ghc/compiler/parser/ParseUtil.lhs
@@ -226,6 +226,8 @@ checkPat e [] = case e of
 	HsPar e		   -> checkPat e [] `thenP` (returnP . ParPatIn)
 	ExplicitList _ es  -> mapP (\e -> checkPat e []) es `thenP` \ps ->
 			      returnP (ListPatIn ps)
+	ExplicitPArr _ es  -> mapP (\e -> checkPat e []) es `thenP` \ps ->
+			      returnP (PArrPatIn ps)
 
 	ExplicitTuple es b -> mapP (\e -> checkPat e []) es `thenP` \ps ->
 			      returnP (TuplePatIn ps b)
diff --git a/ghc/compiler/parser/Parser.y b/ghc/compiler/parser/Parser.y
index e3f305fa921d60682ca086835306a6a3dc6eb81c..ec7af294b74732e00a51b67d477d837e0e82652f 100644
--- a/ghc/compiler/parser/Parser.y
+++ b/ghc/compiler/parser/Parser.y
@@ -1,6 +1,6 @@
 {-								-*-haskell-*-
 -----------------------------------------------------------------------------
-$Id: Parser.y,v 1.83 2002/02/04 03:40:32 chak Exp $
+$Id: Parser.y,v 1.84 2002/02/11 08:20:44 chak Exp $
 
 Haskell grammar.
 
@@ -18,9 +18,9 @@ import RdrHsSyn
 import Lex
 import ParseUtil
 import RdrName
-import PrelNames	( mAIN_Name, unitTyCon_RDR, funTyCon_RDR, listTyCon_RDR,
-			  tupleTyCon_RDR, unitCon_RDR, nilCon_RDR, tupleCon_RDR
-			)
+import PrelNames	( mAIN_Name, unitTyCon_RDR, funTyCon_RDR, 
+			  listTyCon_RDR, parrTyCon_RDR, tupleTyCon_RDR, 
+			  unitCon_RDR, nilCon_RDR, tupleCon_RDR )
 import ForeignCall	( Safety(..), CExportSpec(..), CCallSpec(..), 
 			  CCallConv(..), CCallTarget(..), defaultCCallConv,
 			  DNCallSpec(..) )
@@ -175,6 +175,8 @@ Conflicts: 21 shift/reduce, -=chak[4Feb2]
  vccurly	{ ITvccurly } -- virtual close curly (from layout)
  '['		{ ITobrack }
  ']'		{ ITcbrack }
+ '[:'		{ ITopabrack }
+ ':]'		{ ITcpabrack }
  '('		{ IToparen }
  ')'		{ ITcparen }
  '(#'		{ IToubxparen }
@@ -662,6 +664,7 @@ atype :: { RdrNameHsType }
 	| '(' type ',' types ')'	{ HsTupleTy (mkHsTupCon tcName Boxed  ($2:$4)) ($2 : reverse $4) }
 	| '(#' types '#)'		{ HsTupleTy (mkHsTupCon tcName Unboxed     $2) (reverse $2)	 }
 	| '[' type ']'			{ HsListTy $2 }
+	| '[:' type ':]'		{ HsPArrTy $2 }
 	| '(' ctype ')'		        { $2 }
 -- Generics
         | INTEGER                       { HsNumTy $1 }
@@ -883,6 +886,7 @@ aexp1	:: { RdrNameHsExpr }
 	| '(' exp ',' texps ')'		{ ExplicitTuple ($2 : reverse $4) Boxed}
 	| '(#' texps '#)'		{ ExplicitTuple (reverse $2)      Unboxed }
 	| '[' list ']'                  { $2 }
+	| '[:' parr ':]'                { $2 }
 	| '(' infixexp qop ')'		{ (SectionL $2 (HsVar $3))  }
 	| '(' qopm infixexp ')'		{ (SectionR $2 $3) }
 	| qvar '@' aexp			{ EAsPat $1 $3 }
@@ -931,6 +935,35 @@ quals :: { [RdrNameStmt] }
 	: quals ',' stmt		{ $3 : $1 }
 	| stmt				{ [$1] }
 
+-----------------------------------------------------------------------------
+-- Parallel array expressions
+
+-- The rules below are little bit contorted; see the list case for details.
+-- Note that, in contrast to lists, we only have finite arithmetic sequences.
+-- Moreover, we allow explicit arrays with no element (represented by the nil
+-- constructor in the list case).
+
+parr :: { RdrNameHsExpr }
+	: 				{ ExplicitPArr placeHolderType [] }
+	| exp				{ ExplicitPArr placeHolderType [$1] }
+	| lexps 			{ ExplicitPArr placeHolderType 
+						       (reverse $1) }
+	| exp '..' exp	 		{ PArrSeqIn (FromTo $1 $3) }
+	| exp ',' exp '..' exp		{ PArrSeqIn (FromThenTo $1 $3 $5) }
+	| exp srcloc pquals		{% let {
+					     body [qs] = qs;
+					     body  qss = [ParStmt 
+							   (map reverse qss)]}
+					   in
+					   returnP $ 
+					     HsDo PArrComp 
+						  (reverse (ResultStmt $1 $2 
+							    : body $3))
+						  $2
+					}
+
+-- We are reusing `lexps' and `pquals' from the list case.
+
 -----------------------------------------------------------------------------
 -- Case alternatives
 
@@ -1047,6 +1080,7 @@ gtycon 	:: { RdrName }
 	| '(' ')'			{ unitTyCon_RDR }
 	| '(' '->' ')'			{ funTyCon_RDR }
 	| '[' ']'			{ listTyCon_RDR }
+	| '[:' ':]'			{ parrTyCon_RDR }
 	| '(' commas ')'		{ tupleTyCon_RDR $2 }
 
 gcon 	:: { RdrName }
@@ -1054,6 +1088,7 @@ gcon 	:: { RdrName }
 	| '[' ']'		{ nilCon_RDR }
 	| '(' commas ')'	{ tupleCon_RDR $2 }
  	| qcon			{ $1 }
+-- the case of '[:' ':]' is part of the production `parr'
 
 var 	:: { RdrName }
 	: varid			{ $1 }
diff --git a/ghc/compiler/parser/RdrHsSyn.lhs b/ghc/compiler/parser/RdrHsSyn.lhs
index 7629070b5862d6aca00b2b22b41182358ced5533..5df53ae23716c20589da47bcf7f7f0e183fd89ae 100644
--- a/ghc/compiler/parser/RdrHsSyn.lhs
+++ b/ghc/compiler/parser/RdrHsSyn.lhs
@@ -143,6 +143,7 @@ extract_tys tys = foldr extract_ty [] tys
 
 extract_ty (HsAppTy ty1 ty2)          acc = extract_ty ty1 (extract_ty ty2 acc)
 extract_ty (HsListTy ty)              acc = extract_ty ty acc
+extract_ty (HsPArrTy ty)              acc = extract_ty ty acc
 extract_ty (HsTupleTy _ tys)          acc = foldr extract_ty acc tys
 extract_ty (HsFunTy ty1 ty2)          acc = extract_ty ty1 (extract_ty ty2 acc)
 extract_ty (HsPredTy p)		      acc = extract_pred p acc
diff --git a/ghc/compiler/prelude/PrelNames.lhs b/ghc/compiler/prelude/PrelNames.lhs
index be714d1a9af9280b41cdadb45f89fa0713dd0c84..8a823302a1bb95e93834d5db45c1053556740ae6 100644
--- a/ghc/compiler/prelude/PrelNames.lhs
+++ b/ghc/compiler/prelude/PrelNames.lhs
@@ -148,7 +148,11 @@ knownKeyNames
 	returnMName,
 	failMName,
 	fromRationalName,
-    
+
+        -- not class methods, but overloaded (for parallel arrays)
+	enumFromToPName,
+	enumFromThenToPName,
+
 	deRefStablePtrName,
 	newStablePtrName,
 	bindIOName,
@@ -171,6 +175,20 @@ knownKeyNames
 	buildName,
 	augmentName,
 
+        -- Parallel array operations
+	nullPName,
+	lengthPName,
+	replicatePName,
+	mapPName,
+	filterPName,
+	zipPName,
+	crossPName,
+	indexPName,
+	toPName,
+	bpermutePName,
+	bpermuteDftPName,
+	indexOfPName,
+
 	-- FFI primitive types that are not wired-in.
 	int8TyConName,
 	int16TyConName,
@@ -190,7 +208,19 @@ knownKeyNames
 	assertName,
 	runSTRepName,
 	printName,
-	splitIdName, fstIdName, sndIdName	-- Used by splittery
+	splitName, fstName, sndName,	-- Used by splittery
+
+	-- Others (needed for flattening and not mentioned before)
+	andName,
+	orName,
+	eqCharName, 
+	eqIntName,
+	eqFloatName, 
+	eqDoubleName, 
+	neqCharName, 
+	neqIntName,
+	neqFloatName, 
+	neqDoubleName
     ]
 \end{code}
 
@@ -210,6 +240,7 @@ pREL_SHOW_Name    = mkModuleName "PrelShow"
 pREL_READ_Name    = mkModuleName "PrelRead"
 pREL_NUM_Name     = mkModuleName "PrelNum"
 pREL_LIST_Name    = mkModuleName "PrelList"
+pREL_PARR_Name    = mkModuleName "PrelPArr"
 pREL_TUP_Name     = mkModuleName "PrelTup"
 pREL_PACK_Name    = mkModuleName "PrelPack"
 pREL_CONC_Name    = mkModuleName "PrelConc"
@@ -364,8 +395,8 @@ nilDataConName 	  = dataQual pREL_BASE_Name SLIT("[]") nilDataConKey
 consDataConName	  = dataQual pREL_BASE_Name SLIT(":") consDataConKey
 
 -- PrelTup
-fstIdName	  = varQual pREL_TUP_Name SLIT("fst") fstIdKey
-sndIdName	  = varQual pREL_TUP_Name SLIT("snd") sndIdKey
+fstName		  = varQual pREL_TUP_Name SLIT("fst") fstIdKey
+sndName		  = varQual pREL_TUP_Name SLIT("snd") sndIdKey
 
 -- Generics
 crossTyConName     = tcQual   pREL_BASE_Name SLIT(":*:") crossTyConKey
@@ -377,14 +408,25 @@ genUnitTyConName   = tcQual   pREL_BASE_Name SLIT("Unit") genUnitTyConKey
 genUnitDataConName = dataQual pREL_BASE_Name SLIT("Unit") genUnitDataConKey
 
 -- Random PrelBase functions
-unsafeCoerceName  = varQual pREL_BASE_Name SLIT("unsafeCoerce") unsafeCoerceIdKey
+unsafeCoerceName  = varQual pREL_BASE_Name SLIT("unsafeCoerce") 
+							     unsafeCoerceIdKey
 otherwiseIdName   = varQual pREL_BASE_Name SLIT("otherwise") otherwiseIdKey
-appendName	  = varQual pREL_BASE_Name SLIT("++") appendIdKey
-foldrName	  = varQual pREL_BASE_Name SLIT("foldr") foldrIdKey
-mapName	   	  = varQual pREL_BASE_Name SLIT("map") mapIdKey
-buildName	  = varQual pREL_BASE_Name SLIT("build") buildIdKey
-augmentName	  = varQual pREL_BASE_Name SLIT("augment") augmentIdKey
-eqStringName	  = varQual pREL_BASE_Name SLIT("eqString") eqStringIdKey
+appendName	  = varQual pREL_BASE_Name SLIT("++")	     appendIdKey
+foldrName	  = varQual pREL_BASE_Name SLIT("foldr")     foldrIdKey
+mapName	   	  = varQual pREL_BASE_Name SLIT("map")	     mapIdKey
+buildName	  = varQual pREL_BASE_Name SLIT("build")     buildIdKey
+augmentName	  = varQual pREL_BASE_Name SLIT("augment")   augmentIdKey
+eqStringName	  = varQual pREL_BASE_Name SLIT("eqString")  eqStringIdKey
+andName		  = varQual pREL_BASE_Name SLIT("&&")	     andIdKey
+orName		  = varQual pREL_BASE_Name SLIT("||")	     orIdKey
+eqCharName	  = varQual pREL_GHC_Name  SLIT("eqChar#")   eqCharIdKey
+eqIntName	  = varQual pREL_GHC_Name  SLIT("==#")       eqIntIdKey
+eqFloatName	  = varQual pREL_GHC_Name  SLIT("eqFloat#")  eqFloatIdKey
+eqDoubleName	  = varQual pREL_GHC_Name  SLIT("==##")	     eqDoubleIdKey
+neqCharName	  = varQual pREL_GHC_Name  SLIT("neqChar#")  neqCharIdKey
+neqIntName	  = varQual pREL_GHC_Name  SLIT("/=#")       neqIntIdKey
+neqFloatName	  = varQual pREL_GHC_Name  SLIT("neqFloat#") neqFloatIdKey
+neqDoubleName	  = varQual pREL_GHC_Name  SLIT("/=##")	     neqDoubleIdKey
 
 -- Strings
 unpackCStringName       = varQual pREL_BASE_Name SLIT("unpackCString#") unpackCStringIdKey
@@ -455,6 +497,10 @@ enumFromToName	   = varQual pREL_ENUM_Name SLIT("enumFromTo") enumFromToClassOpK
 enumFromThenName   = varQual pREL_ENUM_Name SLIT("enumFromThen") enumFromThenClassOpKey
 enumFromThenToName = varQual pREL_ENUM_Name SLIT("enumFromThenTo") enumFromThenToClassOpKey
 
+-- Overloaded via Class Enum
+enumFromToPName	   = varQual pREL_PARR_Name SLIT("enumFromToP") enumFromToPIdKey
+enumFromThenToPName= varQual pREL_PARR_Name SLIT("enumFromThenToP") enumFromThenToPIdKey
+
 -- Class Bounded
 boundedClassName  = clsQual pREL_ENUM_Name SLIT("Bounded") boundedClassKey
 
@@ -463,6 +509,23 @@ concatName	  = varQual pREL_LIST_Name SLIT("concat") concatIdKey
 filterName	  = varQual pREL_LIST_Name SLIT("filter") filterIdKey
 zipName	   	  = varQual pREL_LIST_Name SLIT("zip") zipIdKey
 
+-- parallel array types and functions
+parrTyConName	  = tcQual  pREL_PARR_Name SLIT("[::]")       parrTyConKey
+parrDataConName   = dataQual pREL_PARR_Name SLIT("PArr")      parrDataConKey
+nullPName	  = varQual pREL_PARR_Name SLIT("nullP")      nullPIdKey
+lengthPName	  = varQual pREL_PARR_Name SLIT("lengthP")    lengthPIdKey
+replicatePName	  = varQual pREL_PARR_Name SLIT("replicateP") replicatePIdKey
+mapPName	  = varQual pREL_PARR_Name SLIT("mapP")       mapPIdKey
+filterPName	  = varQual pREL_PARR_Name SLIT("filterP")    filterPIdKey
+zipPName	  = varQual pREL_PARR_Name SLIT("zipP")       zipPIdKey
+crossPName	  = varQual pREL_PARR_Name SLIT("crossP")     crossPIdKey
+indexPName	  = varQual pREL_PARR_Name SLIT("!:")	      indexPIdKey
+toPName	          = varQual pREL_PARR_Name SLIT("toP")	      toPIdKey
+bpermutePName     = varQual pREL_PARR_Name SLIT("bpermuteP")  bpermutePIdKey
+bpermuteDftPName  = varQual pREL_PARR_Name SLIT("bpermuteDftP") 
+							      bpermuteDftPIdKey
+indexOfPName      = varQual pREL_PARR_Name SLIT("indexOfP")   indexOfPIdKey
+
 -- IOBase things
 ioTyConName	  = tcQual   pREL_IO_BASE_Name SLIT("IO") ioTyConKey
 ioDataConName     = dataQual pREL_IO_BASE_Name SLIT("IO") ioDataConKey
@@ -500,7 +563,7 @@ funPtrDataConName = dataQual pREL_PTR_Name SLIT("FunPtr") funPtrDataConKey
 byteArrayTyConName	  = tcQual pREL_BYTEARR_Name  SLIT("ByteArray") byteArrayTyConKey
 mutableByteArrayTyConName = tcQual pREL_BYTEARR_Name  SLIT("MutableByteArray") mutableByteArrayTyConKey
 
--- Forign objects and weak pointers
+-- Foreign objects and weak pointers
 foreignObjTyConName   = tcQual   fOREIGNOBJ_Name SLIT("ForeignObj") foreignObjTyConKey
 foreignObjDataConName = dataQual fOREIGNOBJ_Name SLIT("ForeignObj") foreignObjDataConKey
 foreignPtrTyConName   = tcQual   pREL_FOREIGN_Name SLIT("ForeignPtr") foreignPtrTyConKey
@@ -516,7 +579,7 @@ getTagName	   = varQual pREL_GHC_Name SLIT("getTag#") getTagIdKey
 runSTRepName	   = varQual pREL_ST_Name  SLIT("runSTRep") runSTRepIdKey
 
 -- The "split" Id for splittable implicit parameters
-splitIdName = varQual pREL_SPLIT_Name SLIT("split") splitIdKey
+splitName          = varQual pREL_SPLIT_Name SLIT("split") splitIdKey
 \end{code}
 
 %************************************************************************
@@ -588,6 +651,7 @@ populate the occurrence list above.
 funTyCon_RDR  		= nameRdrName funTyConName
 nilCon_RDR    		= nameRdrName nilDataConName
 listTyCon_RDR 		= nameRdrName listTyConName
+parrTyCon_RDR 		= nameRdrName parrTyConName
 ioTyCon_RDR		= nameRdrName ioTyConName
 intTyCon_RDR 		= nameRdrName intTyConName
 eq_RDR 			= nameRdrName eqName
@@ -767,6 +831,9 @@ crossTyConKey		      		= mkPreludeTyConUnique 79
 plusTyConKey		      		= mkPreludeTyConUnique 80
 genUnitTyConKey				= mkPreludeTyConUnique 81
 
+-- Parallel array type constructor
+parrTyConKey				= mkPreludeTyConUnique 82
+
 unitTyConKey = mkTupleTyConUnique Boxed 0
 \end{code}
 
@@ -803,6 +870,9 @@ crossDataConKey		      		= mkPreludeDataConUnique 20
 inlDataConKey		      		= mkPreludeDataConUnique 21
 inrDataConKey		      		= mkPreludeDataConUnique 22
 genUnitDataConKey			= mkPreludeDataConUnique 23
+
+-- Data constructor for parallel arrays
+parrDataConKey				= mkPreludeDataConUnique 24
 \end{code}
 
 %************************************************************************
@@ -868,6 +938,35 @@ runSTRepIdKey		      = mkPreludeMiscIdUnique 54
 
 dollarMainKey		      = mkPreludeMiscIdUnique 55
 runMainKey		      = mkPreludeMiscIdUnique 56
+
+andIdKey		      = mkPreludeMiscIdUnique 57
+orIdKey			      = mkPreludeMiscIdUnique 58
+eqCharIdKey		      = mkPreludeMiscIdUnique 59
+eqIntIdKey		      = mkPreludeMiscIdUnique 60
+eqFloatIdKey		      = mkPreludeMiscIdUnique 61
+eqDoubleIdKey		      = mkPreludeMiscIdUnique 62
+neqCharIdKey		      = mkPreludeMiscIdUnique 63
+neqIntIdKey		      = mkPreludeMiscIdUnique 64
+neqFloatIdKey		      = mkPreludeMiscIdUnique 65
+neqDoubleIdKey		      = mkPreludeMiscIdUnique 66
+
+-- NB: Currently a gap of four slots
+
+-- Parallel array functions
+nullPIdKey	              = mkPreludeMiscIdUnique 70
+lengthPIdKey		      = mkPreludeMiscIdUnique 71
+replicatePIdKey		      = mkPreludeMiscIdUnique 72
+mapPIdKey		      = mkPreludeMiscIdUnique 73
+filterPIdKey		      = mkPreludeMiscIdUnique 74
+zipPIdKey		      = mkPreludeMiscIdUnique 75
+crossPIdKey		      = mkPreludeMiscIdUnique 76
+indexPIdKey		      = mkPreludeMiscIdUnique 77
+toPIdKey		      = mkPreludeMiscIdUnique 78
+enumFromToPIdKey              = mkPreludeMiscIdUnique 79
+enumFromThenToPIdKey          = mkPreludeMiscIdUnique 80
+bpermutePIdKey		      = mkPreludeMiscIdUnique 81
+bpermuteDftPIdKey	      = mkPreludeMiscIdUnique 82
+indexOfPIdKey		      = mkPreludeMiscIdUnique 83
 \end{code}
 
 Certain class operations from Prelude classes.  They get their own
diff --git a/ghc/compiler/prelude/TysWiredIn.lhs b/ghc/compiler/prelude/TysWiredIn.lhs
index 18bf9a0caa835eb19e68e5787b52ba1092d9f2f6..ade3426f96604882ae5aceb3e87df0b9c38c5d2c 100644
--- a/ghc/compiler/prelude/TysWiredIn.lhs
+++ b/ghc/compiler/prelude/TysWiredIn.lhs
@@ -69,7 +69,11 @@ module TysWiredIn (
 	voidTy,
 	wordDataCon,
 	wordTy,
-	wordTyCon
+	wordTyCon,
+
+        -- parallel arrays
+	mkPArrTy,
+	parrTyCon, parrFakeCon, isPArrTyCon, isPArrFakeCon
     ) where
 
 #include "HsVersions.h"
@@ -88,18 +92,19 @@ import Name		( Name, nameRdrName, nameUnique, nameOccName,
 			  nameModule, mkWiredInName )
 import OccName		( mkOccFS, tcName, dataName, mkWorkerOcc, mkGenOcc1, mkGenOcc2 )
 import RdrName		( rdrNameOcc )
-import DataCon		( DataCon, mkDataCon, dataConId )
+import DataCon		( DataCon, mkDataCon, dataConId, dataConSourceArity )
 import Var		( TyVar, tyVarKind )
 import TyCon		( TyCon, AlgTyConFlavour(..), tyConDataCons,
-			  mkTupleTyCon, mkAlgTyCon
+			  mkTupleTyCon, mkAlgTyCon, tyConName
 			)
 
 import BasicTypes	( Arity, RecFlag(..), Boxity(..), isBoxed, StrictnessMark(..) )
 
-import Type		( Type, mkTyConTy, mkTyConApp, mkTyVarTys, 
+import Type		( Type, mkTyConTy, mkTyConApp, mkTyVarTy, mkTyVarTys, 
 			  mkArrowKinds, liftedTypeKind, unliftedTypeKind,
 			  ThetaType )
-import Unique		( incrUnique, mkTupleTyConUnique, mkTupleDataConUnique )
+import Unique		( incrUnique, mkTupleTyConUnique,
+			  mkTupleDataConUnique, mkPArrDataConUnique )
 import PrelNames
 import Array
 
@@ -130,6 +135,7 @@ data_tycons = genericTyCons ++
     	      , intTyCon
     	      , integerTyCon
     	      , listTyCon
+	      , parrTyCon
     	      , wordTyCon
     	      ]
 
@@ -539,6 +545,100 @@ mkTupleTy boxity arity tys = mkTyConApp (tupleTyCon boxity arity) tys
 unitTy    = mkTupleTy Boxed 0 []
 \end{code}
 
+%************************************************************************
+%*									*
+\subsection[TysWiredIn-PArr]{The @[::]@ type}
+%*									*
+%************************************************************************
+
+Special syntax for parallel arrays needs some wired in definitions.
+
+\begin{code}
+-- construct a type representing the application of the parallel array
+-- constructor 
+--
+mkPArrTy    :: Type -> Type
+mkPArrTy ty  = mkTyConApp parrTyCon [ty]
+
+-- represents the type constructor of parallel arrays
+--
+-- * this must match the definition in `PrelPArr'
+--
+-- NB: Although the constructor is given here, it will not be accessible in
+--     user code as it is not in the environment of any compiled module except
+--     `PrelPArr'.
+--
+parrTyCon :: TyCon
+parrTyCon  = tycon
+  where
+    tycon   = mkAlgTyCon 
+		parrTyConName 
+		kind
+		tyvars
+		[]               -- No context
+		[(True, False)]
+		[parrDataCon]	 -- The constructor defined in `PrelPArr'
+		1		 -- The real definition has one constructor
+		[]		 -- No record selectors
+		DataTyCon
+		NonRecursive
+		genInfo
+    tyvars  = alpha_tyvar
+    mod     = nameModule parrTyConName
+    kind    = mkArrowKinds (map tyVarKind tyvars) liftedTypeKind
+    genInfo = mk_tc_gen_info mod (nameUnique parrTyConName) parrTyConName tycon
+
+parrDataCon :: DataCon
+parrDataCon  = pcDataCon 
+	         parrDataConName 
+		 alpha_tyvar		-- forall'ed type variables
+		 []			-- context
+		 [intPrimTy,		-- 1st argument: Int#
+		  mkTyConApp		-- 2nd argument: Array# a
+		    arrayPrimTyCon 
+		    alpha_ty] 
+		 parrTyCon
+
+-- check whether a type constructor is the constructor for parallel arrays
+--
+isPArrTyCon    :: TyCon -> Bool
+isPArrTyCon tc  = tyConName tc == parrTyConName
+
+-- fake array constructors
+--
+-- * these constructors are never really used to represent array values;
+--   however, they are very convenient during desugaring (and, in particular,
+--   in the pattern matching compiler) to treat array pattern just like
+--   yet another constructor pattern
+--
+parrFakeCon                        :: Arity -> DataCon
+parrFakeCon i | i > mAX_TUPLE_SIZE  = mkPArrFakeCon  i	-- build one specially
+parrFakeCon i                       = parrFakeConArr!i
+
+-- pre-defined set of constructors
+--
+parrFakeConArr :: Array Int DataCon
+parrFakeConArr  = array (0, mAX_TUPLE_SIZE) [(i, mkPArrFakeCon i)   
+					    | i <- [0..mAX_TUPLE_SIZE]]
+
+-- build a fake parallel array constructor for the given arity
+--
+mkPArrFakeCon       :: Int -> DataCon
+mkPArrFakeCon arity  = pcDataCon name [tyvar] [] tyvarTys parrTyCon
+  where
+	tyvar     = head alphaTyVars
+	tyvarTys  = replicate arity $ mkTyVarTy tyvar
+        nameStr   = _PK_ ("MkPArr" ++ show arity)
+	name      = mkWiredInName mod (mkOccFS dataName nameStr) uniq
+	uniq      = mkPArrDataConUnique arity
+	mod	  = mkPrelModule pREL_PARR_Name
+
+-- checks whether a data constructor is a fake constructor for parallel arrays
+--
+isPArrFakeCon      :: DataCon -> Bool
+isPArrFakeCon dcon  = dcon == parrFakeCon (dataConSourceArity dcon)
+\end{code}
+
 %************************************************************************
 %*                                                                      *
 \subsection{Wired In Type Constructors for Representation Types}
diff --git a/ghc/compiler/rename/ParseIface.y b/ghc/compiler/rename/ParseIface.y
index b71b71f1d91caad4b6375281a55e1f4dddc5eec2..cbeaeed1d7659663b44ef1b79dcc6891c843f57a 100644
--- a/ghc/compiler/rename/ParseIface.y
+++ b/ghc/compiler/rename/ParseIface.y
@@ -1,4 +1,4 @@
-{-	Notes about the syntax of interface files
+{-	Notes about the syntax of interface files		  -*-haskell-*-
 	~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 The header
 ~~~~~~~~~~
@@ -166,6 +166,8 @@ import FastString	( tailFS )
  '|}'		{ ITccurlybar } 			-- special symbols
  '['		{ ITobrack }
  ']'		{ ITcbrack }
+ '[:'		{ ITopabrack }
+ ':]'		{ ITcpabrack }
  '('		{ IToparen }
  ')'		{ ITcparen }
  '(#'		{ IToubxparen }
@@ -388,10 +390,10 @@ maybe_idinfo  : {- empty -} 	{ \_ -> [] }
     -}
 
 pragma	:: { Maybe (ParseResult [HsIdInfo RdrName]) }
-pragma	: src_loc PRAGMA	{ Just (parseIdInfo $2 PState{ bol = 0#, atbol = 1#,
-							context = [],
-							glasgow_exts = 1#,
-							loc = $1 })
+pragma	: src_loc PRAGMA	{ let exts = ExtFlags {glasgowExtsEF = True,
+						       parrEF	     = True}
+				  in
+				  Just (parseIdInfo $2 (mkPState $1 exts))
 				}
 
 -----------------------------------------------------------------------------
@@ -401,10 +403,9 @@ pragma	: src_loc PRAGMA	{ Just (parseIdInfo $2 PState{ bol = 0#, atbol = 1#,
 rules_and_deprecs_part :: { () -> ([RdrNameRuleDecl], IfaceDeprecs) }
 rules_and_deprecs_part
   : {- empty -}		{ \_ -> ([], Nothing) }
-  | src_loc PRAGMA	{ \_ -> case parseRules $2 PState{ bol = 0#, atbol = 1#,
-							   context = [],
-							   glasgow_exts = 1#,
-							   loc = $1 } of
+  | src_loc PRAGMA	{ \_ -> let exts = ExtFlags {glasgowExtsEF = True,
+						     parrEF	   = True}
+				in case parseRules $2 (mkPState $1 exts) of
 					POk _ rds   -> rds
 					PFailed err -> pprPanic "Rules/Deprecations parse failed" err
 			}
@@ -557,6 +558,7 @@ atype		:  qtc_name 			  	{ HsTyVar $1 }
 		|  '(' types2 ')'	  		{ HsTupleTy (mkHsTupCon tcName Boxed   $2) $2 }
 		|  '(#' types0 '#)'			{ HsTupleTy (mkHsTupCon tcName Unboxed $2) $2 }
 		|  '[' type ']'		  		{ HsListTy  $2 }
+		|  '[:' type ':]'			{ HsPArrTy $2 }
 		|  '{' qcls_name atypes '}'		{ mkHsDictTy $2 $3 }
 		|  '{' ipvar_name '::' type '}'		{ mkHsIParamTy $2 $4 }
 		|  '(' type ')'		  		{ $2 }
@@ -586,6 +588,7 @@ tatype		:  qtc_name 			  	{ HsTyVar $1 }
 		|  '(' types2 ')'	  		{ HsTupleTy (mkHsTupCon tcName Boxed   $2) $2 }
 		|  '(#' types0 '#)'			{ HsTupleTy (mkHsTupCon tcName Unboxed $2) $2 }
 		|  '[' type ']'		  		{ HsListTy  $2 }
+		|  '[:' type ':]'			{ HsPArrTy $2 }
 		|  '{' qcls_name atypes '}'		{ mkHsDictTy $2 $3 }
 		|  '{' ipvar_name '::' type '}'		{ mkHsIParamTy $2 $4 }
 		|  '(' type ')'		  		{ $2 }
diff --git a/ghc/compiler/rename/RnEnv.lhs b/ghc/compiler/rename/RnEnv.lhs
index 331b0d0c3a8e17d3d8af10e19935e804df050085..d12aab932503c50329f6da2093344a9921d4727b 100644
--- a/ghc/compiler/rename/RnEnv.lhs
+++ b/ghc/compiler/rename/RnEnv.lhs
@@ -10,6 +10,7 @@ module RnEnv where		-- Export everything
 
 import {-# SOURCE #-} RnHiFiles
 
+import FlattenInfo      ( namesNeededForFlattening )
 import HsSyn
 import RdrHsSyn		( RdrNameIE, RdrNameHsType, extractHsTyRdrTyVars )
 import RdrName		( RdrName, rdrNameModule, rdrNameOcc, isQual, isUnqual, isOrig,
@@ -439,6 +440,9 @@ ubiquitousNames
 	-- Add occurrences for very frequently used types.
 	--  	 (e.g. we don't want to be bothered with making funTyCon a
 	--	  free var at every function application!)
+  `plusFV`
+    namesNeededForFlattening
+        -- this will be empty unless flattening is activated
 
 checkMain ghci_mode mod_name gbl_env
 	-- LOOKUP main IF WE'RE IN MODULE Main
@@ -447,7 +451,8 @@ checkMain ghci_mode mod_name gbl_env
 	-- so that the type checker will find them
 	--
 	-- We have to return the main_name separately, because it's a
-	-- bona fide 'use', and should be recorded as such, but the others aren't
+	-- bona fide 'use', and should be recorded as such, but the others
+	-- aren't 
   | mod_name /= mAIN_Name
   = returnRn (Nothing, emptyFVs, emptyFVs)
 
diff --git a/ghc/compiler/rename/RnExpr.lhs b/ghc/compiler/rename/RnExpr.lhs
index 846812df39771a842caa3a0f93f5e674890edcd5..cda67c42554c7ed853c579797ebe257ffe5a2561 100644
--- a/ghc/compiler/rename/RnExpr.lhs
+++ b/ghc/compiler/rename/RnExpr.lhs
@@ -28,18 +28,21 @@ import RnTypes		( rnHsTypeFVs )
 import RnHiFiles	( lookupFixityRn )
 import CmdLineOpts	( DynFlag(..), opt_IgnoreAsserts )
 import Literal		( inIntRange, inCharRange )
-import BasicTypes	( Fixity(..), FixityDirection(..), IPName(..), defaultFixity, negateFixity )
+import BasicTypes	( Fixity(..), FixityDirection(..), IPName(..),
+			  defaultFixity, negateFixity )
 import PrelNames	( hasKey, assertIdKey, 
 			  eqClassName, foldrName, buildName, eqStringName,
 			  cCallableClassName, cReturnableClassName, 
 			  monadClassName, enumClassName, ordClassName,
-			  ratioDataConName, splitIdName, fstIdName, sndIdName,
+			  ratioDataConName, splitName, fstName, sndName,
 			  ioDataConName, plusIntegerName, timesIntegerName,
-			  assertErr_RDR
-			)
+			  assertErr_RDR,
+			  replicatePName, mapPName, filterPName,
+			  falseDataConName, trueDataConName, crossPName,
+			  zipPName, lengthPName, indexPName, toPName,
+			  enumFromToPName, enumFromThenToPName )
 import TysPrim		( charPrimTyCon, addrPrimTyCon, intPrimTyCon, 
-			  floatPrimTyCon, doublePrimTyCon
-			)
+			  floatPrimTyCon, doublePrimTyCon )
 import TysWiredIn	( intTyCon )
 import Name		( NamedThing(..), mkSysLocalName, nameSrcLoc )
 import NameSet
@@ -132,6 +135,13 @@ rnPat (ListPatIn pats)
   = mapFvRn rnPat pats			`thenRn` \ (patslist, fvs) ->
     returnRn (ListPatIn patslist, fvs `addOneFV` listTyCon_name)
 
+rnPat (PArrPatIn pats)
+  = mapFvRn rnPat pats			`thenRn` \ (patslist, fvs) ->
+    returnRn (PArrPatIn patslist, 
+	      fvs `plusFV` implicit_fvs `addOneFV` parrTyCon_name)
+  where
+    implicit_fvs = mkFVs [lengthPName, indexPName]
+
 rnPat (TuplePatIn pats boxed)
   = mapFvRn rnPat pats					   `thenRn` \ (patslist, fvs) ->
     returnRn (TuplePatIn patslist boxed, fvs `addOneFV` tycon_name)
@@ -278,7 +288,7 @@ rnExpr (HsIPVar v)
   = newIPName v			`thenRn` \ name ->
     let 
 	fvs = case name of
-		Linear _  -> mkFVs [splitIdName, fstIdName, sndIdName]
+		Linear _  -> mkFVs [splitName, fstName, sndName]
 		Dupable _ -> emptyFVs 
     in   
     returnRn (HsIPVar name, fvs)
@@ -381,16 +391,24 @@ rnExpr e@(HsDo do_or_lc stmts src_loc)
     }					`thenRn_`
     returnRn (HsDo do_or_lc stmts' src_loc, fvs `plusFV` implicit_fvs)
   where
-    implicit_fvs = mkFVs [foldrName, buildName, monadClassName]
+    implicit_fvs = case do_or_lc of
+      PArrComp -> mkFVs [replicatePName, mapPName, filterPName,
+			 falseDataConName, trueDataConName, crossPName,
+			 zipPName]
+      _        -> mkFVs [foldrName, buildName, monadClassName]
 	-- Monad stuff should not be necessary for a list comprehension
 	-- but the typechecker looks up the bind and return Ids anyway
 	-- Oh well.
 
-
 rnExpr (ExplicitList _ exps)
   = rnExprs exps		 	`thenRn` \ (exps', fvs) ->
     returnRn  (ExplicitList placeHolderType exps', fvs `addOneFV` listTyCon_name)
 
+rnExpr (ExplicitPArr _ exps)
+  = rnExprs exps		 	`thenRn` \ (exps', fvs) ->
+    returnRn  (ExplicitPArr placeHolderType exps', 
+	       fvs `addOneFV` toPName `addOneFV` parrTyCon_name)
+
 rnExpr (ExplicitTuple exps boxity)
   = rnExprs exps	 			`thenRn` \ (exps', fvs) ->
     returnRn (ExplicitTuple exps' boxity, fvs `addOneFV` tycon_name)
@@ -449,6 +467,28 @@ rnExpr (ArithSeqIn seq)
        rnExpr expr3	`thenRn` \ (expr3', fvExpr3) ->
        returnRn (FromThenTo expr1' expr2' expr3',
 		  plusFVs [fvExpr1, fvExpr2, fvExpr3])
+
+rnExpr (PArrSeqIn seq)
+  = rn_seq seq	 		       `thenRn` \ (new_seq, fvs) ->
+    returnRn (PArrSeqIn new_seq, 
+	      fvs `plusFV` mkFVs [enumFromToPName, enumFromThenToPName])
+  where
+
+    -- the parser shouldn't generate these two
+    --
+    rn_seq (From     _  ) = panic "RnExpr.rnExpr: Infinite parallel array!"
+    rn_seq (FromThen _ _) = panic "RnExpr.rnExpr: Infinite parallel array!"
+
+    rn_seq (FromTo expr1 expr2)
+     = rnExpr expr1	`thenRn` \ (expr1', fvExpr1) ->
+       rnExpr expr2	`thenRn` \ (expr2', fvExpr2) ->
+       returnRn (FromTo expr1' expr2', fvExpr1 `plusFV` fvExpr2)
+    rn_seq (FromThenTo expr1 expr2 expr3)
+     = rnExpr expr1	`thenRn` \ (expr1', fvExpr1) ->
+       rnExpr expr2	`thenRn` \ (expr2', fvExpr2) ->
+       rnExpr expr3	`thenRn` \ (expr3', fvExpr3) ->
+       returnRn (FromThenTo expr1' expr2' expr3',
+		  plusFVs [fvExpr1, fvExpr2, fvExpr3])
 \end{code}
 
 These three are pattern syntax appearing in expressions.
diff --git a/ghc/compiler/rename/RnHiFiles.lhs b/ghc/compiler/rename/RnHiFiles.lhs
index 4eb5504718f200299d5b70689a3556b3bff3e254..7c405de3344e062231908115ba1573d37b47698c 100644
--- a/ghc/compiler/rename/RnHiFiles.lhs
+++ b/ghc/compiler/rename/RnHiFiles.lhs
@@ -559,15 +559,14 @@ readIface file_path
 	Left io_error  -> bale_out (text (show io_error)) ;
 	Right contents -> 
 
-    case parseIface contents init_parser_state of
+    case parseIface contents (mkPState loc exts) of
 	POk _ iface          -> returnRn (Right iface)
 	PFailed err 	     -> bale_out err
     }
   where
-    init_parser_state = PState{ bol = 0#, atbol = 1#,
-				context = [],
-				glasgow_exts = 1#,
-				loc = mkSrcLoc (mkFastString file_path) 1 }
+    exts = ExtFlags {glasgowExtsEF = True,
+		     parrEF	   = True}
+    loc  = mkSrcLoc (mkFastString file_path) 1
 
     bale_out err = returnRn (Left (badIfaceFile file_path err))
 \end{code}
diff --git a/ghc/compiler/rename/RnHsSyn.lhs b/ghc/compiler/rename/RnHsSyn.lhs
index 58a1acc024776e1ea155777987d3cbd049e7e79c..539a81e41db7b14d800e72b547ac8b2851f5a447 100644
--- a/ghc/compiler/rename/RnHsSyn.lhs
+++ b/ghc/compiler/rename/RnHsSyn.lhs
@@ -11,7 +11,7 @@ module RnHsSyn where
 import HsSyn
 import HsCore
 import Class		( FunDep, DefMeth(..) )
-import TysWiredIn	( tupleTyCon, listTyCon, charTyCon )
+import TysWiredIn	( tupleTyCon, listTyCon, parrTyCon, charTyCon )
 import Name		( Name, getName, isTyVarName )
 import NameSet
 import BasicTypes	( Boxity )
@@ -56,9 +56,10 @@ type RenamedDeprecation		= DeprecDecl		Name
 These free-variable finders returns tycons and classes too.
 
 \begin{code}
-charTyCon_name, listTyCon_name :: Name
+charTyCon_name, listTyCon_name, parrTyCon_name :: Name
 charTyCon_name    = getName charTyCon
 listTyCon_name    = getName listTyCon
+parrTyCon_name    = getName parrTyCon
 
 tupleTyCon_name :: Boxity -> Int -> Name
 tupleTyCon_name boxity n = getName (tupleTyCon boxity n)
@@ -75,6 +76,7 @@ extractHsTyNames ty
   where
     get (HsAppTy ty1 ty2)      = get ty1 `unionNameSets` get ty2
     get (HsListTy ty)          = unitNameSet listTyCon_name `unionNameSets` get ty
+    get (HsPArrTy ty)          = unitNameSet parrTyCon_name `unionNameSets` get ty
     get (HsTupleTy con tys)    = hsTupConFVs con `unionNameSets` extractHsTyNames_s tys
     get (HsFunTy ty1 ty2)      = get ty1 `unionNameSets` get ty2
     get (HsPredTy p)	       = extractHsPredTyNames p
diff --git a/ghc/compiler/rename/RnTypes.lhs b/ghc/compiler/rename/RnTypes.lhs
index 61a14ef2056ee1d068c3931944044427176c6ae8..2d544f5d79fa7eb58ee5841fbe40a6798eb0bc1f 100644
--- a/ghc/compiler/rename/RnTypes.lhs
+++ b/ghc/compiler/rename/RnTypes.lhs
@@ -115,6 +115,10 @@ rnHsType doc (HsListTy ty)
   = rnHsType doc ty				`thenRn` \ ty' ->
     returnRn (HsListTy ty')
 
+rnHsType doc (HsPArrTy ty)
+  = rnHsType doc ty				`thenRn` \ ty' ->
+    returnRn (HsPArrTy ty')
+
 -- Unboxed tuples are allowed to have poly-typed arguments.  These
 -- sometimes crop up as a result of CPR worker-wrappering dictionaries.
 rnHsType doc (HsTupleTy (HsTupCon _ boxity arity) tys)
diff --git a/ghc/compiler/simplCore/SimplCore.lhs b/ghc/compiler/simplCore/SimplCore.lhs
index e8db09447b272ece71408a50c726271dcf2c575e..598b985cc3514a11c9979fa37898d1e911fd2885 100644
--- a/ghc/compiler/simplCore/SimplCore.lhs
+++ b/ghc/compiler/simplCore/SimplCore.lhs
@@ -8,8 +8,9 @@ module SimplCore ( core2core, simplifyExpr ) where
 
 #include "HsVersions.h"
 
-import CmdLineOpts	( CoreToDo(..), SimplifierSwitch(..), SimplifierMode(..),
-			  DynFlags, DynFlag(..), dopt, dopt_CoreToDo
+import CmdLineOpts	( CoreToDo(..), SimplifierSwitch(..),
+			  SimplifierMode(..), DynFlags, DynFlag(..), dopt,
+			  dopt_CoreToDo
 			)
 import CoreSyn
 import CoreFVs		( ruleRhsFreeVars )
diff --git a/ghc/compiler/typecheck/TcExpr.lhs b/ghc/compiler/typecheck/TcExpr.lhs
index 603955947a055cc30cf9aa19909d569a1d59b92f..56fc0e37a6582775da321dad9cd56a1a1f319413 100644
--- a/ghc/compiler/typecheck/TcExpr.lhs
+++ b/ghc/compiler/typecheck/TcExpr.lhs
@@ -16,8 +16,8 @@ import TcHsSyn		( TcExpr, TcRecordBinds, simpleHsLitTy  )
 
 import TcMonad
 import TcUnify		( tcSub, tcGen, (<$>),
-			  unifyTauTy, unifyFunTy, unifyListTy, unifyTupleTy
-			)
+			  unifyTauTy, unifyFunTy, unifyListTy, unifyPArrTy,
+			  unifyTupleTy )
 import BasicTypes	( RecFlag(..),  isMarkedStrict )
 import Inst		( InstOrigin(..), 
 			  LIE, mkLIE, emptyLIE, unitLIE, plusLIE, plusLIEs,
@@ -52,11 +52,12 @@ import Name		( Name )
 import TyCon		( TyCon, tyConTyVars, isAlgTyCon, tyConDataCons )
 import Subst		( mkTopTyVarSubst, substTheta, substTy )
 import VarSet		( elemVarSet )
-import TysWiredIn	( boolTy, mkListTy, listTyCon )
+import TysWiredIn	( boolTy, mkListTy, mkPArrTy, listTyCon, parrTyCon )
 import PrelNames	( cCallableClassName, 
 			  cReturnableClassName, 
 			  enumFromName, enumFromThenName, 
 			  enumFromToName, enumFromThenToName,
+			  enumFromToPName, enumFromThenToPName,
 			  thenMName, failMName, returnMName, ioTyConName
 			)
 import Outputable
@@ -323,6 +324,15 @@ tcMonoExpr in_expr@(ExplicitList _ exprs) res_ty	-- Non-empty list
       = tcAddErrCtxt (listCtxt expr) $
 	tcMonoExpr expr elt_ty
 
+tcMonoExpr in_expr@(ExplicitPArr _ exprs) res_ty	-- maybe empty
+  = unifyPArrTy res_ty                        `thenTc` \ elt_ty ->  
+    mapAndUnzipTc (tc_elt elt_ty) exprs	      `thenTc` \ (exprs', lies) ->
+    returnTc (ExplicitPArr elt_ty exprs', plusLIEs lies)
+  where
+    tc_elt elt_ty expr
+      = tcAddErrCtxt (parrCtxt expr) $
+	tcMonoExpr expr elt_ty
+
 tcMonoExpr (ExplicitTuple exprs boxity) res_ty
   = unifyTupleTy boxity (length exprs) res_ty	`thenTc` \ arg_tys ->
     mapAndUnzipTc (\ (expr, arg_ty) -> tcMonoExpr expr arg_ty)
@@ -541,6 +551,36 @@ tcMonoExpr in_expr@(ArithSeqIn seq@(FromThenTo expr1 expr2 expr3)) res_ty
     returnTc (ArithSeqOut (HsVar (instToId eft))
 			  (FromThenTo expr1' expr2' expr3'),
 	      lie1 `plusLIE` lie2 `plusLIE` lie3 `plusLIE` unitLIE eft)
+
+tcMonoExpr in_expr@(PArrSeqIn seq@(FromTo expr1 expr2)) res_ty
+  = tcAddErrCtxt (parrSeqCtxt in_expr) $
+    unifyPArrTy  res_ty         			`thenTc`    \ elt_ty ->  
+    tcMonoExpr expr1 elt_ty				`thenTc`    \ (expr1',lie1) ->
+    tcMonoExpr expr2 elt_ty				`thenTc`    \ (expr2',lie2) ->
+    tcLookupGlobalId enumFromToPName			`thenNF_Tc` \ sel_id ->
+    newMethod (PArrSeqOrigin seq) sel_id [elt_ty]	`thenNF_Tc` \ enum_from_to ->
+
+    returnTc (PArrSeqOut (HsVar (instToId enum_from_to))
+			 (FromTo expr1' expr2'),
+	      lie1 `plusLIE` lie2 `plusLIE` unitLIE enum_from_to)
+
+tcMonoExpr in_expr@(PArrSeqIn seq@(FromThenTo expr1 expr2 expr3)) res_ty
+  = tcAddErrCtxt  (parrSeqCtxt in_expr) $
+    unifyPArrTy  res_ty         			`thenTc`    \ elt_ty ->  
+    tcMonoExpr expr1 elt_ty				`thenTc`    \ (expr1',lie1) ->
+    tcMonoExpr expr2 elt_ty				`thenTc`    \ (expr2',lie2) ->
+    tcMonoExpr expr3 elt_ty				`thenTc`    \ (expr3',lie3) ->
+    tcLookupGlobalId enumFromThenToPName		`thenNF_Tc` \ sel_id ->
+    newMethod (PArrSeqOrigin seq) sel_id [elt_ty]	`thenNF_Tc` \ eft ->
+
+    returnTc (PArrSeqOut (HsVar (instToId eft))
+			 (FromThenTo expr1' expr2' expr3'),
+	      lie1 `plusLIE` lie2 `plusLIE` lie3 `plusLIE` unitLIE eft)
+
+tcMonoExpr (PArrSeqIn _) _ 
+  = panic "TcExpr.tcMonoExpr: Infinite parallel array!"
+    -- the parser shouldn't have generated it and the renamer shouldn't have
+    -- let it through
 \end{code}
 
 %************************************************************************
@@ -688,6 +728,27 @@ tcExpr_id expr         = newTyVarTy openTypeKind	`thenNF_Tc` \ id_ty ->
 %************************************************************************
 
 \begin{code}
+-- I don't like this lumping together of do expression and list/array
+-- comprehensions; creating the monad instances is entirely pointless in the
+-- latter case; I'll leave the list case as it is for the moment, but handle
+-- arrays extra (would be better to handle arrays and lists together, though)
+-- -=chak
+--
+tcDoStmts PArrComp stmts src_loc res_ty
+  =
+    ASSERT( not (null stmts) )
+    tcAddSrcLoc src_loc	$
+
+    unifyPArrTy res_ty			      `thenTc` \elt_ty              ->
+    let tc_ty = mkTyConTy parrTyCon
+	m_ty  = (mkPArrTy, elt_ty)
+    in
+    tcStmts (DoCtxt PArrComp) m_ty stmts      `thenTc` \(stmts', stmts_lie) ->
+    returnTc (HsDoOut PArrComp stmts'
+		      undefined undefined undefined  -- don't touch!
+		      res_ty src_loc,
+	      stmts_lie)
+
 tcDoStmts do_or_lc stmts src_loc res_ty
   =	-- get the Monad and MonadZero classes
 	-- create type consisting of a fresh monad tyvar
@@ -697,10 +758,14 @@ tcDoStmts do_or_lc stmts src_loc res_ty
 	-- If it's a comprehension we're dealing with, 
 	-- force it to be a list comprehension.
 	-- (as of Haskell 98, monad comprehensions are no more.)
+	-- Similarily, array comprehensions must involve parallel arrays types
+	--   -=chak
     (case do_or_lc of
        ListComp -> unifyListTy res_ty			`thenTc` \ elt_ty ->
 		   returnNF_Tc (mkTyConTy listTyCon, (mkListTy, elt_ty))
 
+       PArrComp -> panic "TcExpr.tcDoStmts: How did we get here?!?"
+
        _	-> newTyVarTy (mkArrowKind liftedTypeKind liftedTypeKind)	`thenNF_Tc` \ m_ty ->
 		   newTyVarTy liftedTypeKind 					`thenNF_Tc` \ elt_ty ->
 		   unifyTauTy res_ty (mkAppTy m_ty elt_ty)			`thenTc_`
@@ -874,6 +939,9 @@ Boring and alphabetical:
 arithSeqCtxt expr
   = hang (ptext SLIT("In an arithmetic sequence:")) 4 (ppr expr)
 
+parrSeqCtxt expr
+  = hang (ptext SLIT("In a parallel array sequence:")) 4 (ppr expr)
+
 caseCtxt expr
   = hang (ptext SLIT("In the case expression:")) 4 (ppr expr)
 
@@ -887,6 +955,9 @@ exprSigCtxt expr
 listCtxt expr
   = hang (ptext SLIT("In the list element:")) 4 (ppr expr)
 
+parrCtxt expr
+  = hang (ptext SLIT("In the parallel array element:")) 4 (ppr expr)
+
 predCtxt expr
   = hang (ptext SLIT("In the predicate expression:")) 4 (ppr expr)
 
diff --git a/ghc/compiler/typecheck/TcHsSyn.lhs b/ghc/compiler/typecheck/TcHsSyn.lhs
index 2c8ce2594166d32e730f30d38ccadb4f3db29e30..39661e487542078ae7f7022bf74eaa50e856a84a 100644
--- a/ghc/compiler/typecheck/TcHsSyn.lhs
+++ b/ghc/compiler/typecheck/TcHsSyn.lhs
@@ -54,7 +54,7 @@ import TysPrim	  ( charPrimTy, intPrimTy, floatPrimTy,
 		    doublePrimTy, addrPrimTy
 		  )
 import TysWiredIn ( charTy, stringTy, intTy, integerTy,
-		    mkListTy, mkTupleTy, unitTy )
+		    mkListTy, mkPArrTy, mkTupleTy, unitTy )
 import CoreSyn    ( Expr )
 import Var	  ( isId )
 import BasicTypes ( RecFlag(..), Boxity(..), IPName(..), ipNameName )
@@ -161,6 +161,7 @@ outPatType (LazyPat pat)	= outPatType pat
 outPatType (AsPat var pat)	= idType var
 outPatType (ConPat _ ty _ _ _)	= ty
 outPatType (ListPat ty _)	= mkListTy ty
+outPatType (PArrPat ty _)	= mkPArrTy ty
 outPatType (TuplePat pats box)	= mkTupleTy box (length pats) (map outPatType pats)
 outPatType (RecPat _ ty _ _ _)  = ty
 outPatType (SigPat _ ty _)	= ty
@@ -190,6 +191,7 @@ collectTypedPatBinders (AsPat a pat)	       = a : collectTypedPatBinders pat
 collectTypedPatBinders (SigPat pat _ _)	       = collectTypedPatBinders pat
 collectTypedPatBinders (ConPat _ _ _ _ pats)   = concat (map collectTypedPatBinders pats)
 collectTypedPatBinders (ListPat t pats)        = concat (map collectTypedPatBinders pats)
+collectTypedPatBinders (PArrPat t pats)        = concat (map collectTypedPatBinders pats)
 collectTypedPatBinders (TuplePat pats _)       = concat (map collectTypedPatBinders pats)
 collectTypedPatBinders (RecPat _ _ _ _ fields) = concat (map (\ (f,pat,_) -> collectTypedPatBinders pat)
 							  fields)
@@ -493,6 +495,11 @@ zonkExpr (ExplicitList ty exprs)
     mapNF_Tc zonkExpr exprs	`thenNF_Tc` \ new_exprs ->
     returnNF_Tc (ExplicitList new_ty new_exprs)
 
+zonkExpr (ExplicitPArr ty exprs)
+  = zonkTcTypeToType ty		`thenNF_Tc` \ new_ty ->
+    mapNF_Tc zonkExpr exprs	`thenNF_Tc` \ new_exprs ->
+    returnNF_Tc (ExplicitPArr new_ty new_exprs)
+
 zonkExpr (ExplicitTuple exprs boxed)
   = mapNF_Tc zonkExpr exprs  	`thenNF_Tc` \ new_exprs ->
     returnNF_Tc (ExplicitTuple new_exprs boxed)
@@ -514,12 +521,18 @@ zonkExpr (RecordUpdOut expr in_ty out_ty dicts rbinds)
 
 zonkExpr (ExprWithTySig _ _) = panic "zonkExpr:ExprWithTySig"
 zonkExpr (ArithSeqIn _)      = panic "zonkExpr:ArithSeqIn"
+zonkExpr (PArrSeqIn _)       = panic "zonkExpr:PArrSeqIn"
 
 zonkExpr (ArithSeqOut expr info)
   = zonkExpr expr	`thenNF_Tc` \ new_expr ->
     zonkArithSeq info	`thenNF_Tc` \ new_info ->
     returnNF_Tc (ArithSeqOut new_expr new_info)
 
+zonkExpr (PArrSeqOut expr info)
+  = zonkExpr expr	`thenNF_Tc` \ new_expr ->
+    zonkArithSeq info	`thenNF_Tc` \ new_info ->
+    returnNF_Tc (PArrSeqOut new_expr new_info)
+
 zonkExpr (HsCCall fun args may_gc is_casm result_ty)
   = mapNF_Tc zonkExpr args 	`thenNF_Tc` \ new_args ->
     zonkTcTypeToType result_ty	`thenNF_Tc` \ new_result_ty ->
@@ -667,6 +680,11 @@ zonkPat (ListPat ty pats)
     zonkPats pats		`thenNF_Tc` \ (new_pats, ids) ->
     returnNF_Tc (ListPat new_ty new_pats, ids)
 
+zonkPat (PArrPat ty pats)
+  = zonkTcTypeToType ty	`thenNF_Tc` \ new_ty ->
+    zonkPats pats		`thenNF_Tc` \ (new_pats, ids) ->
+    returnNF_Tc (PArrPat new_ty new_pats, ids)
+
 zonkPat (TuplePat pats boxed)
   = zonkPats pats   		`thenNF_Tc` \ (new_pats, ids) ->
     returnNF_Tc (TuplePat new_pats boxed, ids)
diff --git a/ghc/compiler/typecheck/TcMType.lhs b/ghc/compiler/typecheck/TcMType.lhs
index eb37c4629b8867e72d68ec6dbf6648e1d77616e7..49ef3f982cbb091025adc1aac244b3d1db393715 100644
--- a/ghc/compiler/typecheck/TcMType.lhs
+++ b/ghc/compiler/typecheck/TcMType.lhs
@@ -1070,5 +1070,3 @@ nonBoxedPrimCCallErr clas inst_ty
   = hang (ptext SLIT("Unacceptable instance type for ccall-ish class"))
 	 4 (pprClassPred clas [inst_ty])
 \end{code}
-
-
diff --git a/ghc/compiler/typecheck/TcMonad.lhs b/ghc/compiler/typecheck/TcMonad.lhs
index 11cb6bdab94ddde75cfa402af7f412360ecae200..dceff864fb232890019ce1f791f05c2b3c31f034 100644
--- a/ghc/compiler/typecheck/TcMonad.lhs
+++ b/ghc/compiler/typecheck/TcMonad.lhs
@@ -659,6 +659,7 @@ data InstOrigin
   | PatOrigin RenamedPat
 
   | ArithSeqOrigin RenamedArithSeqInfo -- [x..], [x..y] etc
+  | PArrSeqOrigin  RenamedArithSeqInfo -- [:x..y:] and [:x,y..z:]
 
   | SignatureOrigin		-- A dict created from a type signature
   | Rank2Origin			-- A dict created when typechecking the argument
@@ -715,6 +716,8 @@ pprInstLoc (orig, locn, ctxt)
 	= hsep [ptext SLIT("the pattern"), quotes (ppr pat)]
     pp_orig (ArithSeqOrigin seq)
 	= hsep [ptext SLIT("the arithmetic sequence"), quotes (ppr seq)]
+    pp_orig (PArrSeqOrigin seq)
+	= hsep [ptext SLIT("the parallel array sequence"), quotes (ppr seq)]
     pp_orig (SignatureOrigin)
 	=  ptext SLIT("a type signature")
     pp_orig (Rank2Origin)
diff --git a/ghc/compiler/typecheck/TcMonoType.lhs b/ghc/compiler/typecheck/TcMonoType.lhs
index 21d554d45603bd2c69cca724fe2fd24d008e9046..1d33e945b1991c774d0104c5fefb797983b8a904 100644
--- a/ghc/compiler/typecheck/TcMonoType.lhs
+++ b/ghc/compiler/typecheck/TcMonoType.lhs
@@ -49,7 +49,7 @@ import TyCon		( TyCon, isSynTyCon, tyConKind )
 import Class		( classTyCon )
 import Name		( Name )
 import NameSet
-import TysWiredIn	( mkListTy, mkTupleTy, genUnitTyCon )
+import TysWiredIn	( mkListTy, mkPArrTy, mkTupleTy, genUnitTyCon )
 import BasicTypes	( Boxity(..) )
 import SrcLoc		( SrcLoc )
 import Util		( lengthIs )
@@ -267,6 +267,10 @@ kcHsType (HsListTy ty)
   = kcLiftedType ty		`thenTc` \ tau_ty ->
     returnTc liftedTypeKind
 
+kcHsType (HsPArrTy ty)
+  = kcLiftedType ty		`thenTc` \ tau_ty ->
+    returnTc liftedTypeKind
+
 kcHsType (HsTupleTy (HsTupCon _ boxity _) tys)
   = mapTc kcTypeType tys	`thenTc_`
     returnTc (case boxity of
@@ -400,6 +404,10 @@ tc_type (HsListTy ty)
   = tc_type ty	`thenTc` \ tau_ty ->
     returnTc (mkListTy tau_ty)
 
+tc_type (HsPArrTy ty)
+  = tc_type ty	`thenTc` \ tau_ty ->
+    returnTc (mkPArrTy tau_ty)
+
 tc_type (HsTupleTy (HsTupCon _ boxity arity) tys)
   = ASSERT( tys `lengthIs` arity )
     tc_types tys	`thenTc` \ tau_tys ->
diff --git a/ghc/compiler/typecheck/TcPat.lhs b/ghc/compiler/typecheck/TcPat.lhs
index 0c40272c61be70eb152cec848d75d36edbe9cf44..51a04dda9002fd821b010a71527d2a4993959a7d 100644
--- a/ghc/compiler/typecheck/TcPat.lhs
+++ b/ghc/compiler/typecheck/TcPat.lhs
@@ -27,8 +27,9 @@ import TcMType 		( tcInstTyVars, newTyVarTy, getTcTyVar, putTcTyVar )
 import TcType		( TcType, TcTyVar, TcSigmaType,
 			  mkTyConApp, mkClassPred, liftedTypeKind, tcGetTyVar_maybe,
 			  isHoleTyVar, openTypeKind )
-import TcUnify		( tcSub, unifyTauTy, unifyListTy, unifyTupleTy, 
-			  mkCoercion, idCoercion, isIdCoercion, (<$>), PatCoFn )
+import TcUnify		( tcSub, unifyTauTy, unifyListTy, unifyPArrTy,
+			  unifyTupleTy,  mkCoercion, idCoercion, isIdCoercion,
+			  (<$>), PatCoFn )
 import TcMonoType	( tcHsSigType, UserTypeCtxt(..) )
 
 import TysWiredIn	( stringTy )
@@ -159,7 +160,7 @@ tcPat tc_bndr (SigPatIn pat sig) pat_ty
 
 %************************************************************************
 %*									*
-\subsection{Explicit lists and tuples}
+\subsection{Explicit lists, parallel arrays, and tuples}
 %*									*
 %************************************************************************
 
@@ -170,6 +171,12 @@ tcPat tc_bndr pat_in@(ListPatIn pats) pat_ty
     tcPats tc_bndr pats (repeat elem_ty)	`thenTc` \ (pats', lie_req, tvs, ids, lie_avail) ->
     returnTc (ListPat elem_ty pats', lie_req, tvs, ids, lie_avail)
 
+tcPat tc_bndr pat_in@(PArrPatIn pats) pat_ty
+  = tcAddErrCtxt (patCtxt pat_in)		$
+    unifyPArrTy pat_ty				`thenTc` \ elem_ty ->
+    tcPats tc_bndr pats (repeat elem_ty)	`thenTc` \ (pats', lie_req, tvs, ids, lie_avail) ->
+    returnTc (PArrPat elem_ty pats', lie_req, tvs, ids, lie_avail)
+
 tcPat tc_bndr pat_in@(TuplePatIn pats boxity) pat_ty
   = tcAddErrCtxt (patCtxt pat_in)	$
 
diff --git a/ghc/compiler/typecheck/TcSimplify.lhs b/ghc/compiler/typecheck/TcSimplify.lhs
index ca9180fae91515621516cb14ad2b15f7b0351f6b..edf065988a77840648758bd78ef069887de56299 100644
--- a/ghc/compiler/typecheck/TcSimplify.lhs
+++ b/ghc/compiler/typecheck/TcSimplify.lhs
@@ -52,7 +52,7 @@ import NameSet		( NameSet, mkNameSet, elemNameSet )
 import Class		( classBigSig )
 import FunDeps		( oclose, grow, improve, pprEquationDoc )
 import PrelInfo		( isNumericClass, isCreturnableClass, isCcallishClass, 
-			  splitIdName, fstIdName, sndIdName )
+			  splitName, fstName, sndName )
 
 import Subst		( mkTopTyVarSubst, substTheta, substTy )
 import TysWiredIn	( unitTy, pairTyCon )
@@ -1192,8 +1192,8 @@ split n split_id avail wanted
 		    returnNF_Tc (andMonoBindList binds', concat rhss')
 
 	  do_one rhs = tcGetUnique 			`thenNF_Tc` \ uniq -> 
-		       tcLookupGlobalId fstIdName	`thenNF_Tc` \ fst_id -> 
-		       tcLookupGlobalId sndIdName	`thenNF_Tc` \ snd_id -> 
+		       tcLookupGlobalId fstName		`thenNF_Tc` \ fst_id ->
+		       tcLookupGlobalId sndName		`thenNF_Tc` \ snd_id ->
 		       let 
 			  x = mkUserLocal occ uniq pair_ty loc
 		       in
@@ -1416,7 +1416,7 @@ isAvailable avails wanted = lookupFM avails wanted
 addLinearAvailable :: Avails -> Avail -> Inst -> NF_TcM (Avails, [Inst])
 addLinearAvailable avails avail wanted
   | need_split avail
-  = tcLookupGlobalId splitIdName		`thenNF_Tc` \ split_id ->
+  = tcLookupGlobalId splitName			`thenNF_Tc` \ split_id ->
     newMethodAtLoc (instLoc wanted) split_id 
 		   [linearInstType wanted]	`thenNF_Tc` \ (split_inst,_) ->
     returnNF_Tc (addToFM avails wanted (Linear 2 split_inst avail), [split_inst])
diff --git a/ghc/compiler/typecheck/TcUnify.lhs b/ghc/compiler/typecheck/TcUnify.lhs
index 8ee07bc1f21021596f6c3426a0738f6bbcb5fd5a..56ae76492deb3b2a14b7aee832f19218edec44db 100644
--- a/ghc/compiler/typecheck/TcUnify.lhs
+++ b/ghc/compiler/typecheck/TcUnify.lhs
@@ -11,7 +11,7 @@ module TcUnify (
 
 	-- Various unifications
   unifyTauTy, unifyTauTyList, unifyTauTyLists, 
-  unifyFunTy, unifyListTy, unifyTupleTy,
+  unifyFunTy, unifyListTy, unifyPArrTy, unifyTupleTy,
   unifyKind, unifyKinds, unifyOpenTypeKind,
 
 	-- Coercions
@@ -51,7 +51,7 @@ import TcMType		( getTcTyVar, putTcTyVar, tcInstType,
 			  newTyVarTy, newTyVarTys, newBoxityVar, newHoleTyVarTy,
 			  zonkTcType, zonkTcTyVars, zonkTcTyVar )
 import TcSimplify	( tcSimplifyCheck )
-import TysWiredIn	( listTyCon, mkListTy, mkTupleTy )
+import TysWiredIn	( listTyCon, parrTyCon, mkListTy, mkPArrTy, mkTupleTy )
 import TcEnv		( TcTyThing(..), tcExtendGlobalTyVars, tcGetGlobalTyVars, tcLEnvElts )
 import TyCon		( tyConArity, isTupleTyCon, tupleTyConBoxity )
 import PprType		( pprType )
@@ -734,6 +734,26 @@ unify_list_ty_help ty	-- Revert to ordinary unification
   = newTyVarTy liftedTypeKind		`thenNF_Tc` \ elt_ty ->
     unifyTauTy ty (mkListTy elt_ty)	`thenTc_`
     returnTc elt_ty
+
+-- variant for parallel arrays
+--
+unifyPArrTy :: TcType              -- expected list type
+	    -> TcM TcType	   -- list element type
+
+unifyPArrTy ty@(TyVarTy tyvar)
+  = getTcTyVar tyvar	`thenNF_Tc` \ maybe_ty ->
+    case maybe_ty of
+      Just ty' -> unifyPArrTy ty'
+      _        -> unify_parr_ty_help ty
+unifyPArrTy ty
+  = case tcSplitTyConApp_maybe ty of
+      Just (tycon, [arg_ty]) | tycon == parrTyCon -> returnTc arg_ty
+      _  					  -> unify_parr_ty_help ty
+
+unify_parr_ty_help ty	-- Revert to ordinary unification
+  = newTyVarTy liftedTypeKind		`thenNF_Tc` \ elt_ty ->
+    unifyTauTy ty (mkPArrTy elt_ty)	`thenTc_`
+    returnTc elt_ty
 \end{code}
 
 \begin{code}
diff --git a/ghc/compiler/types/PprType.lhs b/ghc/compiler/types/PprType.lhs
index 0285731658fa4d77d06b8688e7885ec1c1c55b7a..39ae2ee8589ba8eb8be33422b283b0fd9fe659ba 100644
--- a/ghc/compiler/types/PprType.lhs
+++ b/ghc/compiler/types/PprType.lhs
@@ -151,10 +151,18 @@ ppr_ty ctxt_prec ty@(TyConApp tycon tys)
     [ty] <- tys
   = brackets (ppr_ty tOP_PREC ty)
 
+	-- PARALLEL ARRAY CASE
+  | tycon `hasKey` parrTyConKey,
+    [ty] <- tys
+  = pabrackets (ppr_ty tOP_PREC ty)
+
 	-- GENERAL CASE
   | otherwise
   = ppr_tc_app ctxt_prec tycon tys
 
+  where
+    pabrackets p = ptext SLIT("[:") <> p <> ptext SLIT(":]")
+
 
 ppr_ty ctxt_prec ty@(ForAllTy _ _)
   = getPprStyle $ \ sty -> 
diff --git a/ghc/lib/std/Makefile b/ghc/lib/std/Makefile
index 9248da24eb4056b06114f8020c7c414e46f1b84f..fe934632c590d8d3a81df81846a9130484ad2533 100644
--- a/ghc/lib/std/Makefile
+++ b/ghc/lib/std/Makefile
@@ -58,6 +58,7 @@ PrelIO_HC_OPTS       += -fno-ignore-asserts
 # Special options
 PrelStorable_HC_OPTS = -monly-3-regs
 PrelCError_HC_OPTS   = +RTS -K4m -RTS
+PrelPArr_HC_OPTS     = -fparr
 
 #-----------------------------------------------------------------------------
 # 	Dependency generation
diff --git a/ghc/lib/std/PrelPArr.hs b/ghc/lib/std/PrelPArr.hs
new file mode 100644
index 0000000000000000000000000000000000000000..ca9ea0e8a5a6648a143dae9fe8792f1844fa38b9
--- /dev/null
+++ b/ghc/lib/std/PrelPArr.hs
@@ -0,0 +1,644 @@
+--  $Id: PrelPArr.hs,v 1.2 2002/02/11 08:20:49 chak Exp $
+--
+--  Copyright (c) [2001..2002] Manuel M T Chakravarty & Gabriele Keller
+--
+--  Basic implementation of Parallel Arrays.
+--
+--- DESCRIPTION ---------------------------------------------------------------
+--
+--  This module has two functions: (1) It defines the interface to the
+--  parallel array extension of the Prelude and (2) it provides a vanilla
+--  implementation of parallel arrays that does not require to flatten the
+--  array code.  The implementation is not very optimised.
+--
+--- DOCU ----------------------------------------------------------------------
+--
+--  Language: Haskell 98 plus unboxed values and parallel arrays
+--
+--  The semantic difference between standard Haskell arrays (aka "lazy
+--  arrays") and parallel arrays (aka "strict arrays") is that the evaluation
+--  of two different elements of a lazy array is independent, whereas in a
+--  strict array either non or all elements are evaluated.  In other words,
+--  when a parallel array is evaluated to WHNF, all its elements will be
+--  evaluated to WHNF.  The name parallel array indicates that all array
+--  elements may, in general, be evaluated to WHNF in parallel without any
+--  need to resort to speculative evaluation.  This parallel evaluation
+--  semantics is also beneficial in the sequential case, as it facilitates
+--  loop-based array processing as known from classic array-based languages,
+--  such as Fortran.
+--
+--  The interface of this module is essentially a variant of the list
+--  component of the Prelude, but also includes some functions (such as
+--  permutations) that are not provided for lists.  The following list
+--  operations are not supported on parallel arrays, as they would require the
+--  availability of infinite parallel arrays: `iterate', `repeat', and `cycle'.
+--
+--  The current implementation is quite simple and entirely based on boxed
+--  arrays.  One disadvantage of boxed arrays is that they require to
+--  immediately initialise all newly allocated arrays with an error thunk to
+--  keep the garbage collector happy, even if it is guaranteed that the array
+--  is fully initialised with different values before passing over the
+--  user-visible interface boundary.  Currently, no effort is made to use
+--  raw memory copy operations to speed things up.
+--
+--- TODO ----------------------------------------------------------------------
+--
+--  * We probably want a standard library `PArray' in addition to the prelude
+--    extension in the same way as the standard library `List' complements the
+--    list functions from the prelude.
+--
+--  * Currently, functions that emphasis the constructor-based definition of
+--    lists (such as, head, last, tail, and init) are not supported.  
+--
+--    Is it worthwhile to support the string processing functions lines,
+--    words, unlines, and unwords?  (Currently, they are not implemented.)
+--
+--    It can, however, be argued that it would be worthwhile to include them
+--    for completeness' sake; maybe only in the standard library `PArray'.
+--
+--  * Prescans are often more useful for array programming than scans.  Shall
+--    we include them into the Prelude or the library?
+--
+--  * Due to the use of the iterator `loop', we could define some fusion rules
+--    in this module.
+--
+--  * We might want to add bounds checks that can be deactivated.
+--
+
+{-# OPTIONS -fno-implicit-prelude #-}
+
+module PrelPArr (
+  [::],			-- abstract
+
+  mapP,			-- :: (a -> b) -> [:a:] -> [:b:]
+  (+:+),		-- :: [:a:] -> [:a:] -> [:a:]
+  filterP,		-- :: (a -> Bool) -> [:a:] -> [:a:]
+  concatP,		-- :: [:[:a:]:] -> [:a:]
+  concatMapP,		-- :: (a -> [:b:]) -> [:a:] -> [:b:]
+--  head, last, tail, init,   -- it's not wise to use them on arrays
+  nullP,	        -- :: [:a:] -> Bool
+  lengthP,		-- :: [:a:] -> Int
+  (!:),			-- :: [:a:] -> Int -> a
+  foldlP,		-- :: (a -> b -> a) -> a -> [:b:] -> a
+  foldl1P,		-- :: (a -> a -> a) ->      [:a:] -> a
+  scanlP,		-- :: (a -> b -> a) -> a -> [:b:] -> [:a:]
+  scanl1P,		-- :: (a -> a -> a) ->      [:a:] -> [:a:]
+  foldrP,		-- :: (a -> b -> b) -> b -> [:a:] -> b
+  foldr1P,		-- :: (a -> a -> a) ->      [:a:] -> a
+  scanrP,		-- :: (a -> b -> b) -> b -> [:a:] -> [:b:]
+  scanr1P,		-- :: (a -> a -> a) ->      [:a:] -> [:a:]
+--  iterate, repeat,	      -- parallel arrays must be finite
+  replicateP,		-- :: Int -> a -> [:a:]
+--  cycle,		      -- parallel arrays must be finite
+  takeP,		-- :: Int -> [:a:] -> [:a:]
+  dropP,		-- :: Int -> [:a:] -> [:a:]
+  splitAtP,		-- :: Int -> [:a:] -> ([:a:],[:a:])
+  takeWhileP,		-- :: (a -> Bool) -> [:a:] -> [:a:]
+  dropWhileP,		-- :: (a -> Bool) -> [:a:] -> [:a:]
+  spanP,		-- :: (a -> Bool) -> [:a:] -> ([:a:], [:a:])
+  breakP,		-- :: (a -> Bool) -> [:a:] -> ([:a:], [:a:])
+--  lines, words, unlines, unwords,  -- is string processing really needed
+  reverseP,	        -- :: [:a:] -> [:a:]
+  andP,			-- :: [:Bool:] -> Bool
+  orP, 			-- :: [:Bool:] -> Bool
+  anyP,			-- :: (a -> Bool) -> [:a:] -> Bool
+  allP,			-- :: (a -> Bool) -> [:a:] -> Bool
+  elemP,		-- :: (Eq a) => a -> [:a:] -> Bool
+  notElemP,		-- :: (Eq a) => a -> [:a:] -> Bool
+  lookupP,		-- :: (Eq a) => a -> [:(a, b):] -> Maybe b
+  sumP,			-- :: (Num a) => [:a:] -> a
+  productP, 		-- :: (Num a) => [:a:] -> a
+  maximumP,		-- :: (Ord a) => [:a:] -> a
+  minimumP,		-- :: (Ord a) => [:a:] -> a
+  zipP,			-- :: [:a:] -> [:b:]          -> [:(a, b)   :]
+  zip3P,		-- :: [:a:] -> [:b:] -> [:c:] -> [:(a, b, c):]
+  zipWithP,		-- :: (a -> b -> c)      -> [:a:] -> [:b:] -> [:c:]
+  zipWith3P,		-- :: (a -> b -> c -> d) -> [:a:]->[:b:]->[:c:]->[:d:]
+  unzipP,		-- :: [:(a, b)   :] -> ([:a:], [:b:])
+  unzip3P,		-- :: [:(a, b, c):] -> ([:a:], [:b:], [:c:])
+
+  -- overloaded functions
+  --
+  enumFromToP,		-- :: Enum a => a -> a      -> [:a:]
+  enumFromThenToP,	-- :: Enum a => a -> a -> a -> [:a:]
+
+  -- the following functions are not available on lists
+  --
+  toP,			-- :: [a] -> [:a:]
+  fromP,		-- :: [:a:] -> [a]
+  sliceP,		-- :: Int -> Int -> [:e:] -> [:e:]
+  foldP,		-- :: (e -> e -> e) -> e -> [:e:] -> e
+  fold1P,		-- :: (e -> e -> e) ->      [:e:] -> e
+  permuteP,		-- :: [:Int:] -> [:e:] ->          [:e:]
+  bpermuteP,		-- :: [:Int:] -> [:e:] ->          [:e:]
+  bpermuteDftP,		-- :: [:Int:] -> [:e:] -> [:e:] -> [:e:]
+  crossP,		-- :: [:a:] -> [:b:] -> [:(a, b):]
+  indexOfP		-- :: (a -> Bool) -> [:a:] -> [:Int:]
+) where
+
+import PrelBase
+import PrelST   (ST(..), STRep, runST)
+import PrelList
+import PrelShow
+import PrelRead
+
+infixl 9  !:
+infixr 5  +:+
+infix  4  `elemP`, `notElemP`
+
+
+-- representation of parallel arrays
+-- ---------------------------------
+
+-- this rather straight forward implementation maps parallel arrays to the
+-- internal representation used for standard Haskell arrays in GHC's Prelude
+-- (EXPORTED ABSTRACTLY)
+--
+-- * This definition *must* be kept in sync with `TysWiredIn.parrTyCon'!
+--
+data [::] e = PArr Int# (Array# e)
+
+
+-- exported operations on parallel arrays
+-- --------------------------------------
+
+-- operations corresponding to list operations
+--
+
+mapP   :: (a -> b) -> [:a:] -> [:b:]
+mapP f  = fst . loop (mapEFL f) noAL
+
+(+:+)     :: [:a:] -> [:a:] -> [:a:]
+a1 +:+ a2  = fst $ loop (mapEFL sel) noAL (enumFromToP 0 (len1 + len2 - 1))
+		       -- we can't use the [:x..y:] form here for tedious
+		       -- reasons to do with the typechecker and the fact that
+		       -- `enumFromToP' is defined in the same module
+	     where
+	       len1 = lengthP a1
+	       len2 = lengthP a2
+	       --
+	       sel i | i < len1  = a1!:i
+		     | otherwise = a2!:(i - len1)
+
+filterP   :: (a -> Bool) -> [:a:] -> [:a:]
+filterP p  = fst . loop (filterEFL p) noAL
+
+concatP     :: [:[:a:]:] -> [:a:]
+concatP xss  = foldlP (+:+) [::] xss
+
+concatMapP   :: (a -> [:b:]) -> [:a:] -> [:b:]
+concatMapP f  = concatP . mapP f
+
+--  head, last, tail, init,   -- it's not wise to use them on arrays
+
+nullP      :: [:a:] -> Bool
+nullP [::]  = True
+nullP _     = False
+
+lengthP             :: [:a:] -> Int
+lengthP (PArr n# _)  = I# n#
+
+(!:) :: [:a:] -> Int -> a
+(!:)  = indexPArr
+
+foldlP     :: (a -> b -> a) -> a -> [:b:] -> a
+foldlP f z  = snd . loop (foldEFL (flip f)) z
+
+foldl1P        :: (a -> a -> a) -> [:a:] -> a
+foldl1P f [::]  = error "Prelude.foldl1P: empty array"
+foldl1P f a     = snd $ loopFromTo 1 (lengthP a - 1) (foldEFL f) (a!:0) a
+
+scanlP     :: (a -> b -> a) -> a -> [:b:] -> [:a:]
+scanlP f z  = fst . loop (scanEFL (flip f)) z
+
+scanl1P        :: (a -> a -> a) -> [:a:] -> [:a:]
+acanl1P f [::]  = error "Prelude.scanl1P: empty array"
+scanl1P f a     = fst $ loopFromTo 1 (lengthP a - 1) (scanEFL f) (a!:0) a
+
+foldrP :: (a -> b -> b) -> b -> [:a:] -> b
+foldrP  = error "Prelude.foldrP: not implemented yet" -- FIXME
+
+foldr1P :: (a -> a -> a) -> [:a:] -> a
+foldr1P  = error "Prelude.foldr1P: not implemented yet" -- FIXME
+
+scanrP :: (a -> b -> b) -> b -> [:a:] -> [:b:]
+scanrP  = error "Prelude.scanrP: not implemented yet" -- FIXME
+
+scanr1P :: (a -> a -> a) -> [:a:] -> [:a:]
+scanr1P  = error "Prelude.scanr1P: not implemented yet" -- FIXME
+
+--  iterate, repeat	      -- parallel arrays must be finite
+
+replicateP             :: Int -> a -> [:a:]
+{-# INLINE replicateP #-}
+replicateP n e  = runST (do
+  marr# <- newArray n e
+  mkPArr n marr#)
+
+--  cycle		      -- parallel arrays must be finite
+
+takeP   :: Int -> [:a:] -> [:a:]
+takeP n  = sliceP 0 (n - 1)
+
+dropP     :: Int -> [:a:] -> [:a:]
+dropP n a  = sliceP (n - 1) (lengthP a - 1) a
+
+splitAtP      :: Int -> [:a:] -> ([:a:],[:a:])
+splitAtP n xs  = (takeP n xs, dropP n xs)
+
+takeWhileP :: (a -> Bool) -> [:a:] -> [:a:]
+takeWhileP  = error "Prelude.takeWhileP: not implemented yet" -- FIXME
+
+dropWhileP :: (a -> Bool) -> [:a:] -> [:a:]
+dropWhileP  = error "Prelude.dropWhileP: not implemented yet" -- FIXME
+
+spanP :: (a -> Bool) -> [:a:] -> ([:a:], [:a:])
+spanP  = error "Prelude.spanP: not implemented yet" -- FIXME
+
+breakP   :: (a -> Bool) -> [:a:] -> ([:a:], [:a:])
+breakP p  = spanP (not . p)
+
+--  lines, words, unlines, unwords,  -- is string processing really needed
+
+reverseP   :: [:a:] -> [:a:]
+reverseP a  = permuteP (enumFromThenToP (len - 1) (len - 2) 0) a
+		       -- we can't use the [:x, y..z:] form here for tedious
+		       -- reasons to do with the typechecker and the fact that
+		       -- `enumFromThenToP' is defined in the same module
+	      where
+	        len = lengthP a
+
+andP :: [:Bool:] -> Bool
+andP  = foldP (&&) True
+
+orP :: [:Bool:] -> Bool
+orP  = foldP (||) True
+
+anyP   :: (a -> Bool) -> [:a:] -> Bool
+anyP p  = orP . mapP p
+
+allP :: (a -> Bool) -> [:a:] -> Bool
+allP p  = andP . mapP p
+
+elemP   :: (Eq a) => a -> [:a:] -> Bool
+elemP x  = anyP (== x)
+
+notElemP   :: (Eq a) => a -> [:a:] -> Bool
+notElemP x  = allP (/= x)
+
+lookupP :: (Eq a) => a -> [:(a, b):] -> Maybe b
+lookupP  = error "Prelude.lookupP: not implemented yet" -- FIXME
+
+sumP :: (Num a) => [:a:] -> a
+sumP  = foldP (+) 0
+
+productP :: (Num a) => [:a:] -> a
+productP  = foldP (*) 0
+
+maximumP      :: (Ord a) => [:a:] -> a
+maximumP [::]  = error "Prelude.maximumP: empty parallel array"
+maximumP xs    = fold1P max xs
+
+minimumP :: (Ord a) => [:a:] -> a
+minimumP [::]  = error "Prelude.minimumP: empty parallel array"
+minimumP xs    = fold1P min xs
+
+zipP :: [:a:] -> [:b:] -> [:(a, b):]
+zipP  = zipWithP (,)
+
+zip3P :: [:a:] -> [:b:] -> [:c:] -> [:(a, b, c):]
+zip3P  = zipWith3P (,,)
+
+zipWithP         :: (a -> b -> c) -> [:a:] -> [:b:] -> [:c:]
+zipWithP f a1 a2  = let 
+		      len1 = lengthP a1
+		      len2 = lengthP a2
+		      len  = len1 `min` len2
+		    in
+		    fst $ loopFromTo 0 (len - 1) combine 0 a1
+		    where
+		      combine e1 i = (Just $ f e1 (a2!:i), i + 1)
+
+zipWith3P :: (a -> b -> c -> d) -> [:a:]->[:b:]->[:c:]->[:d:]
+zipWith3P f a1 a2 a3 = let 
+			len1 = lengthP a1
+			len2 = lengthP a2
+			len3 = lengthP a3
+			len  = len1 `min` len2 `min` len3
+		      in
+		      fst $ loopFromTo 0 (len - 1) combine 0 a1
+		      where
+			combine e1 i = (Just $ f e1 (a2!:i) (a3!:i), i + 1)
+
+unzipP   :: [:(a, b):] -> ([:a:], [:b:])
+unzipP a  = (fst $ loop (mapEFL fst) noAL a, fst $ loop (mapEFL snd) noAL a)
+-- FIXME: these two functions should be optimised using a tupled custom loop
+unzip3P   :: [:(a, b, c):] -> ([:a:], [:b:], [:c:])
+unzip3P a  = (fst $ loop (mapEFL fst3) noAL a, 
+	      fst $ loop (mapEFL snd3) noAL a,
+	      fst $ loop (mapEFL trd3) noAL a)
+	     where
+	       fst3 (a, _, _) = a
+	       snd3 (_, b, _) = b
+	       trd3 (_, _, c) = c
+
+-- instances
+--
+
+instance Eq a => Eq [:a:] where
+  a1 == a2 | lengthP a1 == lengthP a2 = andP (zipWithP (==) a1 a2)
+	   | otherwise		      = False
+
+instance Ord a => Ord [:a:] where
+  compare a1 a2 = case foldlP combineOrdering EQ (zipWithP compare a1 a2) of
+		    EQ | lengthP a1 == lengthP a2 -> EQ
+		       | lengthP a1 <  lengthP a2 -> LT
+		       | otherwise		  -> GT
+		  where
+		    combineOrdering EQ    EQ    = EQ
+		    combineOrdering EQ    other = other
+		    combineOrdering other _     = other
+
+instance Functor [::] where
+  fmap = mapP
+
+instance Monad [::] where
+  m >>= k  = foldrP ((+:+) . k      ) [::] m
+  m >>  k  = foldrP ((+:+) . const k) [::] m
+  return x = [:x:]
+  fail _   = [::]
+
+instance Show a => Show [:a:]  where
+  showsPrec _  = showPArr . fromP
+    where
+      showPArr []     s = "[::]" ++ s
+      showPArr (x:xs) s = "[:" ++ shows x (showPArr' xs s)
+
+      showPArr' []     s = ":]" ++ s
+      showPArr' (y:ys) s = ',' : shows y (showPArr' ys s)
+
+instance Read a => Read [:a:]  where
+  readsPrec _ a = [(toP v, rest) | (v, rest) <- readPArr a]
+    where
+      readPArr = readParen False (\r -> do
+					  ("[:",s) <- lex r
+					  readPArr1 s)
+      readPArr1 s = 
+	(do { (":]", t) <- lex s; return ([], t) }) ++
+	(do { (x, t) <- reads s; (xs, u) <- readPArr2 t; return (x:xs, u) })
+
+      readPArr2 s = 
+	(do { (":]", t) <- lex s; return ([], t) }) ++
+	(do { (",", t) <- lex s; (x, u) <- reads t; (xs, v) <- readPArr2 u; 
+	      return (x:xs, v) })
+
+-- overloaded functions
+-- 
+
+-- Ideally, we would like `enumFromToP' and `enumFromThenToP' to be members of
+-- `Enum'.  On the other hand, we really do not want to change `Enum'.  Thus,
+-- for the moment, we hope that the compiler is sufficiently clever to
+-- properly fuse the following definition.
+
+enumFromToP	:: Enum a => a -> a -> [:a:]
+enumFromToP x y  = mapP toEnum (eftInt (fromEnum x) (fromEnum y))
+  where
+    eftInt x y = scanlP (+) x $ replicateP (y - x + 1) 1
+
+enumFromThenToP	      :: Enum a => a -> a -> a -> [:a:]
+enumFromThenToP x y z  = 
+  mapP toEnum (efttInt (fromEnum x) (fromEnum y) (fromEnum z))
+  where
+    efttInt x y z = scanlP (+) x $ 
+		      replicateP ((z - x + 1) `div` delta - 1) delta
+      where
+       delta = y - x
+
+-- the following functions are not available on lists
+--
+
+-- create an array from a list (EXPORTED)
+--
+toP   :: [a] -> [:a:]
+toP l  = fst $ loop store l (replicateP (length l) ())
+	 where
+	   store _ (x:xs) = (Just x, xs)
+
+-- convert an array to a list (EXPORTED)
+--
+fromP   :: [:a:] -> [a]
+fromP a  = [a!:i | i <- [0..lengthP a - 1]]
+
+-- cut a subarray out of an array (EXPORTED)
+--
+sliceP :: Int -> Int -> [:e:] -> [:e:]
+sliceP from to a = 
+  fst $ loopFromTo (0 `max` from) (to `min` (lengthP a - 1)) (mapEFL id) noAL a
+
+-- parallel folding (EXPORTED)
+--
+-- * the first argument must be associative; otherwise, the result is undefined
+--
+foldP :: (e -> e -> e) -> e -> [:e:] -> e
+foldP  = foldlP
+
+-- parallel folding without explicit neutral (EXPORTED)
+--
+-- * the first argument must be associative; otherwise, the result is undefined
+--
+fold1P :: (e -> e -> e) -> [:e:] -> e
+fold1P  = foldl1P
+
+-- permute an array according to the permutation vector in the first argument
+-- (EXPORTED)
+--
+permuteP       :: [:Int:] -> [:e:] -> [:e:]
+permuteP is es  = fst $ loop (mapEFL (es!:)) noAL is
+
+-- permute an array according to the back-permutation vector in the first
+-- argument (EXPORTED)
+--
+-- * the permutation vector must represent a surjective function; otherwise,
+--   the result is undefined
+--
+bpermuteP       :: [:Int:] -> [:e:] -> [:e:]
+bpermuteP is es  = error "Prelude.bpermuteP: not implemented yet" -- FIXME
+
+-- permute an array according to the back-permutation vector in the first
+-- argument, which need not be surjective (EXPORTED)
+--
+-- * any elements in the result that are not covered by the back-permutation
+--   vector assume the value of the corresponding position of the third
+--   argument 
+--
+bpermuteDftP       :: [:Int:] -> [:e:] -> [:e:] -> [:e:]
+bpermuteDftP is es  = error "Prelude.bpermuteDftP: not implemented yet"-- FIXME
+
+-- computes the cross combination of two arrays (EXPORTED)
+--
+crossP       :: [:a:] -> [:b:] -> [:(a, b):]
+crossP a1 a2  = fst $ loop combine (0, 0) $ replicateP len ()
+		where
+		  len1 = lengthP a1
+		  len2 = lengthP a2
+		  len  = len1 * len2
+		  --
+		  combine _ (i, j) = (Just $ (a1!:i, a2!:j), next)
+				     where
+				       next | (i + 1) == len1 = (0    , j + 1)
+					    | otherwise       = (i + 1, j)
+
+{- An alternative implementation
+   * The one above is certainly better for flattened code, but here where we
+     are handling boxed arrays, the trade off is less clear.  However, I
+     think, the above one is still better.
+
+crossP a1 a2  = let
+		  len1 = lengthP a1
+		  len2 = lengthP a2
+		  x1   = concatP $ mapP (replicateP len2) a1
+		  x2   = concatP $ replicateP len1 a2
+		in
+		zipP x1 x2
+ -}
+
+-- computes an index array for all elements of the second argument for which
+-- the predicate yields `True' (EXPORTED)
+--
+indexOfP     :: (a -> Bool) -> [:a:] -> [:Int:]
+indexOfP p a  = fst $ loop calcIdx 0 a
+		where
+		  calcIdx e idx | p e       = (Just idx, idx + 1)
+				| otherwise = (Nothing , idx    )
+
+
+-- auxiliary functions
+-- -------------------
+
+-- internally used mutable boxed arrays
+--
+data MPArr s e = MPArr Int# (MutableArray# s e)
+
+-- allocate a new mutable array that is pre-initialised with a given value
+--
+newArray             :: Int -> e -> ST s (MPArr s e)
+{-# INLINE newArray #-}
+newArray (I# n#) e  = ST $ \s1# ->
+  case newArray# n# e s1# of { (# s2#, marr# #) ->
+  (# s2#, MPArr n# marr# #)}
+
+-- convert a mutable array into the external parallel array representation
+--
+mkPArr                           :: Int -> MPArr s e -> ST s [:e:]
+{-# INLINE mkPArr #-}
+mkPArr (I# n#) (MPArr _ marr#)  = ST $ \s1# ->
+  case unsafeFreezeArray# marr# s1#   of { (# s2#, arr# #) ->
+  (# s2#, PArr n# arr# #) }
+
+-- general array iterator
+--
+-- * corresponds to `loopA' from ``Functional Array Fusion'', Chakravarty &
+--   Keller, ICFP 2001
+--
+loop :: (e -> acc -> (Maybe e', acc))    -- mapping & folding, once per element
+     -> acc				 -- initial acc value
+     -> [:e:]				 -- input array
+     -> ([:e':], acc)
+{-# INLINE loop #-}
+loop mf acc arr = loopFromTo 0 (lengthP arr - 1) mf acc arr
+
+-- general array iterator with bounds
+--
+loopFromTo :: Int			 -- from index
+	   -> Int			 -- to index
+	   -> (e -> acc -> (Maybe e', acc))
+	   -> acc
+	   -> [:e:]
+	   -> ([:e':], acc)
+{-# INLINE loopFromTo #-}
+loopFromTo from to mf start arr = runST (do
+  marr      <- newArray (to - from + 1) noElem
+  (n', acc) <- trans from to marr arr mf start
+  arr       <- mkPArr n' marr
+  return (arr, acc))
+  where
+    noElem = error "PrelPArr.loopFromTo: I do not exist!"
+	     -- unlike standard Haskell arrays, this value represents an
+	     -- internal error
+
+-- actually loop body of `loop'
+--
+-- * for this to be really efficient, it has to be translated with the
+--   constructor specialisation phase "SpecConstr" switched on; as of GHC 5.03
+--   this requires an optimisation level of at least -O2
+--
+trans :: Int				-- index of first elem to process
+      -> Int				-- index of last elem to process
+      -> MPArr s e'			-- destination array
+      -> [:e:]				-- source array
+      -> (e -> acc -> (Maybe e', acc))	-- mutator
+      -> acc				-- initial accumulator
+      -> ST s (Int, acc)		-- final destination length/final acc
+{-# INLINE trans #-}
+trans from to marr arr mf start = trans' from 0 start
+  where
+    trans' arrOff marrOff acc 
+      | arrOff > to = return (marrOff, acc)
+      | otherwise   = do
+		        let (oe', acc') = mf (arr `indexPArr` arrOff) acc
+			marrOff' <- case oe' of
+				      Nothing -> return marrOff 
+				      Just e' -> do
+					writeMPArr marr marrOff e'
+					return $ marrOff + 1
+                        trans' (arrOff + 1) marrOff' acc'
+
+
+-- common patterns for using `loop'
+--
+
+-- initial value for the accumulator when the accumulator is not needed
+--
+noAL :: ()
+noAL  = ()
+
+-- `loop' mutator maps a function over array elements
+--
+mapEFL   :: (e -> e') -> (e -> () -> (Maybe e', ()))
+{-# INLINE mapEFL #-}
+mapEFL f  = \e a -> (Just $ f e, ())
+
+-- `loop' mutator that filter elements according to a predicate
+--
+filterEFL   :: (e -> Bool) -> (e -> () -> (Maybe e, ()))
+{-# INLINE filterEFL #-}
+filterEFL p  = \e a -> if p e then (Just e, ()) else (Nothing, ())
+
+-- `loop' mutator for array folding
+--
+foldEFL   :: (e -> acc -> acc) -> (e -> acc -> (Maybe (), acc))
+{-# INLINE foldEFL #-}
+foldEFL f  = \e a -> (Nothing, f e a)
+
+-- `loop' mutator for array scanning
+--
+scanEFL   :: (e -> acc -> acc) -> (e -> acc -> (Maybe acc, acc))
+{-# INLINE scanEFL #-}
+scanEFL f  = \e a -> (Just a, f e a)
+
+-- elementary array operations
+--
+
+-- unlifted array indexing 
+--
+indexPArr                       :: [:e:] -> Int -> e
+{-# INLINE indexPArr #-}
+indexPArr (PArr _ arr#) (I# i#)  = 
+  case indexArray# arr# i# of (# e #) -> e
+
+-- encapsulate writing into a mutable array into the `ST' monad
+--
+writeMPArr                           :: MPArr s e -> Int -> e -> ST s ()
+{-# INLINE writeMPArr #-}
+writeMPArr (MPArr _ marr#) (I# i#) e  = ST $ \s# ->
+  case writeArray# marr# i# e s# of s'# -> (# s'#, () #)