Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • ghc/ghc
  • bgamari/ghc
  • syd/ghc
  • ggreif/ghc
  • watashi/ghc
  • RolandSenn/ghc
  • mpickering/ghc
  • DavidEichmann/ghc
  • carter/ghc
  • harpocrates/ghc
  • ethercrow/ghc
  • mijicd/ghc
  • adamse/ghc
  • alexbiehl/ghc
  • gridaphobe/ghc
  • trofi/ghc
  • supersven/ghc
  • ppk/ghc
  • ulysses4ever/ghc
  • AndreasK/ghc
  • ghuntley/ghc
  • shayne-fletcher-da/ghc
  • fgaz/ghc
  • yav/ghc
  • osa1/ghc
  • mbbx6spp/ghc
  • JulianLeviston/ghc
  • reactormonk/ghc
  • rae/ghc
  • takenobu-hs/ghc
  • michalt/ghc
  • andrewthad/ghc
  • hsyl20/ghc
  • scottgw/ghc
  • sjakobi/ghc
  • angerman/ghc
  • RyanGlScott/ghc
  • hvr/ghc
  • howtonotwin/ghc
  • chessai/ghc
  • m-renaud/ghc
  • brprice/ghc
  • stevehartdata/ghc
  • sighingnow/ghc
  • kgardas/ghc
  • ckoparkar/ghc
  • alp/ghc
  • smaeul/ghc
  • kakkun61/ghc
  • sykloid/ghc
  • newhoggy/ghc
  • toonn/ghc
  • nineonine/ghc
  • Phyx/ghc
  • ezyang/ghc
  • tweag/ghc
  • langston/ghc
  • ndmitchell/ghc
  • rockbmb/ghc
  • artempyanykh/ghc
  • mniip/ghc
  • mynguyenbmc/ghc
  • alexfmpe/ghc
  • crockeea/ghc
  • nh2/ghc
  • vaibhavsagar/ghc
  • phadej/ghc
  • Haskell-mouse/ghc
  • lolotp/ghc
  • spacekitteh/ghc
  • michaelpj/ghc
  • mgsloan/ghc
  • HPCohen/ghc
  • tmobile/ghc
  • radrow/ghc
  • simonmar/ghc
  • _deepfire/ghc
  • Ericson2314/ghc
  • leitao/ghc
  • fumieval/ghc
  • trac-isovector/ghc
  • cblp/ghc
  • xich/ghc
  • ciil/ghc
  • erthalion/ghc
  • xldenis/ghc
  • autotaker/ghc
  • haskell-wasm/ghc
  • kcsongor/ghc
  • agander/ghc
  • Baranowski/ghc
  • trac-dredozubov/ghc
  • 23Skidoo/ghc
  • iustin/ghc
  • ningning/ghc
  • josefs/ghc
  • kabuhr/ghc
  • gallais/ghc
  • dten/ghc
  • expipiplus1/ghc
  • Pluralia/ghc
  • rohanjr/ghc
  • intricate/ghc
  • kirelagin/ghc
  • Javran/ghc
  • DanielG/ghc
  • trac-mizunashi_mana/ghc
  • pparkkin/ghc
  • bollu/ghc
  • ntc2/ghc
  • jaspervdj/ghc
  • JoshMeredith/ghc
  • wz1000/ghc
  • zkourouma/ghc
  • code5hot/ghc
  • jdprice/ghc
  • tdammers/ghc
  • J-mie6/ghc
  • trac-lantti/ghc
  • ch1bo/ghc
  • cgohla/ghc
  • lucamolteni/ghc
  • acairncross/ghc
  • amerocu/ghc
  • chreekat/ghc
  • txsmith/ghc
  • trupill/ghc
  • typetetris/ghc
  • sergv/ghc
  • fryguybob/ghc
  • erikd/ghc
  • trac-roland/ghc
  • setupminimal/ghc
  • Friede80/ghc
  • SkyWriter/ghc
  • xplorld/ghc
  • abrar/ghc
  • obsidiansystems/ghc
  • Icelandjack/ghc
  • adinapoli/ghc
  • trac-matthewbauer/ghc
  • heatsink/ghc
  • dwijnand/ghc
  • Cmdv/ghc
  • alinab/ghc
  • pepeiborra/ghc
  • fommil/ghc
  • luochen1990/ghc
  • rlupton20/ghc
  • applePrincess/ghc
  • lehins/ghc
  • ronmrdechai/ghc
  • leeadam/ghc
  • harendra/ghc
  • mightymosquito1991/ghc
  • trac-gershomb/ghc
  • lucajulian/ghc
  • Rizary/ghc
  • VictorCMiraldo/ghc
  • jamesbrock/ghc
  • andrewdmeier/ghc
  • luke/ghc
  • pranaysashank/ghc
  • cocreature/ghc
  • hithroc/ghc
  • obreitwi/ghc
  • slrtbtfs/ghc
  • kaol/ghc
  • yairchu/ghc
  • Mathemagician98/ghc
  • trac-taylorfausak/ghc
  • leungbk/ghc
  • MichaWiedenmann/ghc
  • chris-martin/ghc
  • TDecki/ghc
  • adithyaov/ghc
  • trac-gelisam/ghc
  • Lysxia/ghc
  • complyue/ghc
  • bwignall/ghc
  • sternmull/ghc
  • sonika/ghc
  • leif/ghc
  • broadwaylamb/ghc
  • myszon/ghc
  • danbroooks/ghc
  • Mechachleopteryx/ghc
  • zardyh/ghc
  • trac-vdukhovni/ghc
  • OmarKhaledAbdo/ghc
  • arrowd/ghc
  • Bodigrim/ghc
  • matheus23/ghc
  • cardenaso11/ghc
  • trac-Athas/ghc
  • mb720/ghc
  • DylanZA/ghc
  • liff/ghc
  • typedrat/ghc
  • trac-claude/ghc
  • jbm/ghc
  • Gertjan423/ghc
  • PHO/ghc
  • JKTKops/ghc
  • kockahonza/ghc
  • msakai/ghc
  • Sir4ur0n/ghc
  • barambani/ghc
  • vishnu.c/ghc
  • dcoutts/ghc
  • trac-runeks/ghc
  • trac-MaxGabriel/ghc
  • lexi.lambda/ghc
  • strake/ghc
  • spavikevik/ghc
  • JakobBruenker/ghc
  • rmanne/ghc
  • gdziadkiewicz/ghc
  • ani/ghc
  • iliastsi/ghc
  • smunix/ghc
  • judah/ghc
  • blackgnezdo/ghc
  • emilypi/ghc
  • trac-bpfoley/ghc
  • muesli4/ghc
  • trac-gkaracha/ghc
  • Kleidukos/ghc
  • nek0/ghc
  • TristanCacqueray/ghc
  • dwulive/ghc
  • mbakke/ghc
  • arybczak/ghc
  • Yang123321/ghc
  • maksbotan/ghc
  • QuietMisdreavus/ghc
  • trac-olshanskydr/ghc
  • emekoi/ghc
  • samuela/ghc
  • josephcsible/ghc
  • dramforever/ghc
  • lpsmith/ghc
  • DenisFrezzato/ghc
  • michivi/ghc
  • jneira/ghc
  • jeffhappily/ghc
  • Ivan-Yudin/ghc
  • nakaji-dayo/ghc
  • gdevanla/ghc
  • galen/ghc
  • fendor/ghc
  • yaitskov/ghc
  • rcythr/ghc
  • awpr/ghc
  • jeremyschlatter/ghc
  • Aver1y/ghc
  • mitchellvitez/ghc
  • merijn/ghc
  • tomjaguarpaw1/ghc
  • trac-NoidedSuper/ghc
  • erewok/ghc
  • trac-junji.hashimoto/ghc
  • adamwespiser/ghc
  • bjaress/ghc
  • jhrcek/ghc
  • leonschoorl/ghc
  • lukasz-golebiewski/ghc
  • sheaf/ghc
  • last-g/ghc
  • carassius1014/ghc
  • eschwartz/ghc
  • dwincort/ghc
  • felixwiemuth/ghc
  • TimWSpence/ghc
  • marcusmonteirodesouza/ghc
  • WJWH/ghc
  • vtols/ghc
  • theobat/ghc
  • BinderDavid/ghc
  • ckoparkar0/ghc
  • alexander-kjeldaas/ghc
  • dme2/ghc
  • philderbeast/ghc
  • aaronallen8455/ghc
  • rayshih/ghc
  • benkard/ghc
  • mpardalos/ghc
  • saidelman/ghc
  • leiftw/ghc
  • ca333/ghc
  • bwroga/ghc
  • nmichael44/ghc
  • trac-crobbins/ghc
  • felixonmars/ghc
  • adityagupta1089/ghc
  • hgsipiere/ghc
  • treeowl/ghc
  • alexpeits/ghc
  • CraigFe/ghc
  • dnlkrgr/ghc
  • kerckhove_ts/ghc
  • cptwunderlich/ghc
  • eiais/ghc
  • hahohihu/ghc
  • sanchayan/ghc
  • lemmih/ghc
  • sehqlr/ghc
  • trac-dbeacham/ghc
  • luite/ghc
  • trac-f-a/ghc
  • vados/ghc
  • luntain/ghc
  • fatho/ghc
  • alexbiehl-gc/ghc
  • dcbdan/ghc
  • tvh/ghc
  • liam-ly/ghc
  • timbobbarnes/ghc
  • GovanifY/ghc
  • shanth2600/ghc
  • gliboc/ghc
  • duog/ghc
  • moxonsghost/ghc
  • zander/ghc
  • masaeedu/ghc
  • georgefst/ghc
  • guibou/ghc
  • nicuveo/ghc
  • mdebruijne/ghc
  • stjordanis/ghc
  • emiflake/ghc
  • wygulmage/ghc
  • frasertweedale/ghc
  • coot/ghc
  • aratamizuki/ghc
  • tsandstr/ghc
  • mrBliss/ghc
  • Anton-Latukha/ghc
  • tadfisher/ghc
  • vapourismo/ghc
  • Sorokin-Anton/ghc
  • basile-henry/ghc
  • trac-mightybyte/ghc
  • AbsoluteNikola/ghc
  • cobrien99/ghc
  • songzh/ghc
  • blamario/ghc
  • aj4ayushjain/ghc
  • trac-utdemir/ghc
  • tangcl/ghc
  • hdgarrood/ghc
  • maerwald/ghc
  • arjun/ghc
  • ratherforky/ghc
  • haskieLambda/ghc
  • EmilGedda/ghc
  • Bogicevic/ghc
  • eddiejessup/ghc
  • kozross/ghc
  • AlistairB/ghc
  • 3Rafal/ghc
  • christiaanb/ghc
  • trac-bit/ghc
  • matsumonkie/ghc
  • trac-parsonsmatt/ghc
  • chisui/ghc
  • jaro/ghc
  • trac-kmiyazato/ghc
  • davidsd/ghc
  • Tritlo/ghc
  • I-B-3/ghc
  • lykahb/ghc
  • AriFordsham/ghc
  • turion1/ghc
  • berberman/ghc
  • christiantakle/ghc
  • zyklotomic/ghc
  • trac-ocramz/ghc
  • CSEdd/ghc
  • doyougnu/ghc
  • mmhat/ghc
  • why-not-try-calmer/ghc
  • plutotulp/ghc
  • kjekac/ghc
  • Manvi07/ghc
  • teo/ghc
  • cactus/ghc
  • CarrieMY/ghc
  • abel/ghc
  • yihming/ghc
  • tsakki/ghc
  • jessicah/ghc
  • oliverbunting/ghc
  • meld/ghc
  • friedbrice/ghc
  • Joald/ghc
  • abarbu/ghc
  • DigitalBrains1/ghc
  • sterni/ghc
  • alexDarcy/ghc
  • hexchain/ghc
  • minimario/ghc
  • zliu41/ghc
  • tommd/ghc
  • jazcarate/ghc
  • peterbecich/ghc
  • alirezaghey/ghc
  • solomon/ghc
  • mikael.urankar/ghc
  • davjam/ghc
  • int-index/ghc
  • MorrowM/ghc
  • nrnrnr/ghc
  • Sonfamm/ghc-test-only
  • afzt1/ghc
  • nguyenhaibinh-tpc/ghc
  • trac-lierdakil/ghc
  • MichaWiedenmann1/ghc
  • jmorag/ghc
  • Ziharrk/ghc
  • trac-MitchellSalad/ghc
  • juampe/ghc
  • jwaldmann/ghc
  • snowleopard/ghc
  • juhp/ghc
  • normalcoder/ghc
  • ksqsf/ghc
  • trac-jberryman/ghc
  • roberth/ghc
  • 1ntEgr8/ghc
  • epworth/ghc
  • MrAdityaAlok/ghc
  • JunmingZhao42/ghc
  • jappeace/ghc
  • trac-Gabriel439/ghc
  • alt-romes/ghc
  • HugoPeters1024/ghc
  • 10ne1/ghc-fork
  • agentultra/ghc
  • Garfield1002/ghc
  • ChickenProp/ghc
  • clyring/ghc
  • MaxHearnden/ghc
  • jumper149/ghc
  • vem/ghc
  • ketzacoatl/ghc
  • Rosuavio/ghc
  • jackohughes/ghc
  • p4l1ly/ghc
  • konsumlamm/ghc
  • shlevy/ghc
  • torsten.schmits/ghc
  • andremarianiello/ghc
  • amesgen/ghc
  • googleson78/ghc
  • InfiniteVerma/ghc
  • uhbif19/ghc
  • yiyunliu/ghc
  • raehik/ghc
  • mrkun/ghc
  • telser/ghc
  • 1Jajen1/ghc
  • slotThe/ghc
  • WinstonHartnett/ghc
  • mpilgrem/ghc
  • dreamsmasher/ghc
  • schuelermine/ghc
  • trac-Viwor/ghc
  • undergroundquizscene/ghc
  • evertedsphere/ghc
  • coltenwebb/ghc
  • oberblastmeister/ghc
  • agrue/ghc
  • lf-/ghc
  • zacwood9/ghc
  • steshaw/ghc
  • high-cloud/ghc
  • SkamDart/ghc
  • PiDelport/ghc
  • maoif/ghc
  • RossPaterson/ghc
  • CharlesTaylor7/ghc
  • ribosomerocker/ghc
  • trac-ramirez7/ghc
  • daig/ghc
  • NicolasT/ghc
  • FinleyMcIlwaine/ghc
  • lawtonnichols/ghc
  • jmtd/ghc
  • ozkutuk/ghc
  • wildsebastian/ghc
  • nikshalark/ghc
  • lrzlin/ghc
  • tobias/ghc
  • fw/ghc
  • hawkinsw/ghc
  • type-dance/ghc
  • rui314/ghc
  • ocharles/ghc
  • wavewave/ghc
  • TheKK/ghc
  • nomeata/ghc
  • trac-csabahruska/ghc
  • jonathanjameswatson/ghc
  • L-as/ghc
  • Axman6/ghc
  • barracuda156/ghc
  • trac-jship/ghc
  • jake-87/ghc
  • meooow/ghc
  • rebeccat/ghc
  • hamana55/ghc
  • Enigmage/ghc
  • kokobd/ghc
  • agevelt/ghc
  • gshen42/ghc
  • chrismwendt/ghc
  • MangoIV/ghc
  • teto/ghc
  • Sookr1/ghc
  • trac-thomasjm/ghc
  • barci2/ghc-dev
  • trac-m4dc4p/ghc
  • dixonary/ghc
  • breakerzirconia/ghc
  • alexsio27444/ghc
  • glocq/ghc
  • sourabhxyz/ghc
  • ryantrinkle/ghc
  • Jade/ghc
  • scedfaliako/ghc
  • martijnbastiaan/ghc
  • trac-george.colpitts/ghc
  • ammarbinfaisal/ghc
  • mimi.vx/ghc
  • lortabac/ghc
  • trac-zyla/ghc
  • benbellick/ghc
  • aadaa-fgtaa/ghc
  • jvanbruegge/ghc
  • archbung/ghc
  • gilmi/ghc
  • mfonism/ghc
  • alex-mckenna/ghc
  • Ei30metry/ghc
  • DiegoDiverio/ghc
  • jorgecunhamendes/ghc
  • liesnikov/ghc
  • akrmn/ghc
  • trac-simplifierticks/ghc
  • jacco/ghc
  • rhendric/ghc
  • damhiya/ghc
  • ryndubei/ghc
  • DaveBarton/ghc
  • trac-Profpatsch/ghc
  • GZGavinZhao/ghc
  • ncfavier/ghc
  • jameshaydon/ghc
  • ajccosta/ghc
  • dschrempf/ghc
  • cydparser/ghc
  • LinuxUserGD/ghc
  • elodielander/ghc
  • facundominguez/ghc
  • psilospore/ghc
  • lachrimae/ghc
  • dylan-thinnes/ghc-type-errors-plugin
  • hamishmack/ghc
  • Leary/ghc
  • lzszt/ghc
  • lyokha/ghc
  • trac-glaubitz/ghc
  • Rewbert/ghc
  • andreabedini/ghc
  • Jasagredo/ghc
  • sol/ghc
  • OlegAlexander/ghc
  • trac-sthibaul/ghc
  • avdv/ghc
  • Wendaolee/ghc
  • ur4t/ghc
  • daylily/ghc
  • boltzmannrain/ghc
  • mmzk1526/ghc
  • trac-fizzixnerd/ghc
  • soulomoon/ghc
  • rwmjones/ghc
  • j14i/ghc
  • tracsis/ghc
  • gesh/ghc
  • flip101/ghc
  • eldritch-cookie/ghc
  • LemonjamesD/ghc
  • pgujjula/ghc
  • skeuchel/ghc
  • noteed/ghc
  • gulin.serge/ghc
  • Torrekie/ghc
  • jlwoodwa/ghc
  • ayanamists/ghc
  • husong998/ghc
  • trac-edmundnoble/ghc
  • josephf/ghc
  • contrun/ghc
  • baulig/ghc
  • edsko/ghc
  • mzschr/ghc-issue-24732
  • ulidtko/ghc
  • Arsen/ghc
  • trac-sjoerd_visscher/ghc
  • crumbtoo/ghc
  • L0neGamer/ghc
  • DrewFenwick/ghc
  • benz0li/ghc
  • MaciejWas/ghc
  • jordanrule/ghc
  • trac-qqwy/ghc
  • LiamGoodacre/ghc
  • isomorpheme/ghc
  • trac-danidiaz/ghc
  • Kariim/ghc
  • MTaimoorZaeem/ghc
  • hololeap/ghc
  • ticat-fp/ghc
  • meritamen/ghc
  • criskell/ghc
  • trac-kraai/ghc
  • aergus/ghc
  • jdral/ghc
  • SamB/ghc
  • Tristian/ghc
  • ywgrit/ghc
  • KatsuPatrick/ghc
  • OsePedro/ghc
  • mpscholten/ghc
  • fp/ghc
  • zaquest/ghc
  • fangyi-zhou/ghc
  • augyg/ghc
640 results
Show changes
Commits on Source (48508)
Showing
with 4594 additions and 252 deletions
#Top-level dirs:
^alex/
^common-rts/
^CONTRIB/
^dll/
^greencard/
^green-card/
^haddock/
^haggis/
^happy/
^hdirect/
^hood/
^hslibs/
^hws/
^hx/
^literate/
^mhms/
^mkworld/
^nofib(/|$)
^lib/
^misc/
^mkworld/
^runtime/
^testsuite(/|$)
# bindists
^ghc-
^bin-manifest-
#Packages:
^libraries/Cabal(/|$)
^libraries/ALUT(/|$)
^libraries/GLUT(/|$)
^libraries/HGL(/|$)
^libraries/HUnit(/|$)
^libraries/HaXml(/|$)
^libraries/Japi(/|$)
^libraries/OpenAL(/|$)
^libraries/OpenGL(/|$)
^libraries/QuickCheck(/|$)
^libraries/Win32(/|$)
^libraries/X11(/|$)
^libraries/array(/|$)
^libraries/arrows(/|$)
^libraries/base(/|$)
^libraries/base3-compat(/|$)
^libraries/bytestring(/|$)
^libraries/cgi(/|$)
^libraries/concurrent(/|$)
^libraries/containers(/|$)
^libraries/directory(/|$)
^libraries/editline(/|$)
^libraries/fgl(/|$)
^libraries/filepath(/|$)
^libraries/getopt(/|$)
^libraries/ghc-prim(/|$)
^libraries/haskell-src(/|$)
^libraries/haskell98(/|$)
^libraries/hpc(/|$)
^libraries/html(/|$)
^libraries/integer-.*(/|$)
^libraries/old-locale(/|$)
^libraries/old-time(/|$)
^libraries/monads(/|$)
^libraries/mtl(/|$)
^libraries/ndp(/|$)
^libraries/network(/|$)
^libraries/packedstring(/|$)
^libraries/parsec(/|$)
^libraries/parallel(/|$)
^libraries/pretty(/|$)
^libraries/process(/|$)
^libraries/random(/|$)
^libraries/readline(/|$)
^libraries/regex-base(/|$)
^libraries/regex-compat(/|$)
^libraries/regex-posix(/|$)
^libraries/st(/|$)
^libraries/stm(/|$)
^libraries/template-haskell(/|$)
^libraries/time(/|$)
^libraries/timeout(/|$)
^libraries/unique(/|$)
^libraries/unix(/|$)
^libraries/xhtml(/|$)
# Other library bits that get generated:
^libraries/bootstrapping/
^libraries/stamp/
^libraries/cabal-bin$
^libraries/ifBuildable(/|$)
^libraries/installPackage(/|$)
^libraries/index.html
^libraries/doc-index.*\.html
^libraries/haddock-util.js
^libraries/haddock.css
^libraries/haskell_icon.gif
^libraries/minus.gif
^libraries/plus.gif
^libraries/libraries.txt
# It's often useful to have somewhere in the build tree to install to
^inst(/|$)
# Boring file regexps:
\.hi$
\.hi-boot$
\.o-boot$
\.p_o$
\.t_o$
\.debug_o$
\.thr_o$
\.thr_p_o$
\.thr_debug_o$
\.o$
\.a$
\.o\.cmd$
# *.ko files aren't boring by default because they might
# be Korean translations rather than kernel modules.
# \.ko$
\.ko\.cmd$
\.mod\.c$
(^|/)\.tmp_versions($|/)
(^|/)CVS($|/)
(^|/)RCS($|/)
~$
#(^|/)\.[^/]
(^|/)_darcs($|/)
\.bak$
\.BAK$
\.orig$
(^|/)vssver\.scc$
\.swp$
(^|/)MT($|/)
(^|/)\{arch\}($|/)
(^|/).arch-ids($|/)
(^|/),
\.class$
\.prof$
(^|/)\.DS_Store$
(^|/)BitKeeper($|/)
(^|/)ChangeSet($|/)
(^|/)\.svn($|/)
(^|/)\.git($|/)
\.git-ignore$
\.py[co]$
\#
\.cvsignore$
(^|/)Thumbs\.db$
\.depend$
\.depend-.*$
^compiler/primop-
^compiler/cmm/CmmLex.hs$
^compiler/cmm/CmmParse.hs$
^compiler/ghci/LibFFI.hs$
^compiler/ghci/LibFFI_hsc.c$
^compiler/main/Config.hs$
^compiler/main/ParsePkgConf.hs$
^compiler/parser/Parser.y$
^compiler/parser/Parser.hs$
^compiler/parser/Lexer.hs$
^compiler/parser/ParserCore.hs$
^compiler/parser/HaddockLex.hs
^compiler/parser/HaddockParse.hs
^compiler/prelude/primops.txt$
^compiler/stage1($|/)
^compiler/stage2($|/)
^compiler/stage3($|/)
^compiler/utils/Fingerprint.hs$
^compiler/utils/Fingerprint_hsc.c$
^mk/build.mk$
^mk/validate.mk$
^mk/are-validating.mk$
^mk/config.h.in$
^mk/config.h$
^mk/config.mk$
^mk/stamp-h$
^stage3.package.conf$
^inplace-datadir(/|$)
(^|/)autom4te.cache($|/)
^rts/AutoApply.*cmm$
^rts/sm/Evac_thr.c$
^rts/sm/Scav_thr.c$
package.conf.inplace$
package.conf.installed$
(^|/)config.log$
(^|/)config.status$
(^|/)configure$
^ghc.spec$
^docs/users_guide/ug-book.xml$
^docs/man/flags.xml$
^docs/man/flags.xsl$
^docs/man/ghc.1$
^extra-gcc-opts$
# ignore scripts like push-monk
^push-
^pull-
# Common log file names; testlog is made by validate
^testlog
^log
^utils/[a-zA-Z0-9-]+/dist-install(/|$)
^utils/[a-zA-Z0-9-]+/dist-inplace(/|$)
^utils/[a-zA-Z0-9-]+/install-inplace(/|$)
^compiler/Makefile-stage[1-3](/|$)
^compiler/dist-stage[1-3](/|$)
^ghc/dist-stage[1-3](/|$)
^ghc/stage[1-3]-inplace(/|$)
^utils/ext-core/Driver$
^utils/ext-core/PrimEnv.hs$
^utils/genapply/genapply$
^utils/genprimopcode/Lexer.hs$
^utils/genprimopcode/Parser.hs$
^utils/genprimopcode/genprimopcode$
^utils/ghc-pkg/Version.hs$
^utils/ghc-pkg/ghc-pkg-inplace$
^utils/ghc-pkg/ghc-pkg-inplace.bin$
^utils/ghc-pkg/ghc-pkg-inplace.hs$
^utils/ghc-pkg/ghc-pkg.bin$
^utils/hasktags/hasktags$
^utils/hasktags/hasktags-inplace$
^utils/hp2ps/hp2ps$
^utils/hpc/HpcParser.hs$
^utils/hpc/hpc$
^utils/hpc/hpc-inplace$
^utils/hsc2hs(/|$)
^utils/haddock(/|$)
^utils/lndir/lndir$
^utils/mkdependC/mkdependC$
^utils/mkdirhier/mkdirhier$
^utils/prof/cgprof/cgprof$
^utils/prof/ghcprof-inplace$
^utils/pwd/pwd$
^utils/pwd/pwd-inplace$
^utils/runghc/runghc$
^utils/runghc/runghc-inplace$
^utils/runghc/runhaskell$
^utils/runstdtest/runstdtest$
^utils/unlit/unlit$
^driver/ghci/ghc-pkg-inplace$
^driver/ghci/ghci-inplace$
^driver/mangler/ghc-asm$
^driver/mangler/ghc-asm.prl$
^driver/package.conf$
^driver/package.conf.inplace.old$
^driver/split/ghc-split$
^driver/split/ghc-split.prl$
^driver/stamp-pkg-conf-rts$
^includes/DerivedConstants.h$
^includes/GHCConstants.h$
^includes/ghcautoconf.h$
^includes/ghcplatform.h$
^includes/mkDerivedConstantsHdr$
^includes/mkGHCConstants$
^libffi/build($|/)
^libffi/ffi.h$
^libffi/stamp.ffi.static$
# http://editorconfig.org
root = true
[*.hs]
indent_style = space
indent_size = 2
trim_trailing_whitespace = true
insert_final_newline = true
charset = utf-8
end_of_line = lf
[Makefile]
indent_style = tab
[*.c]
indent_style = space
indent_size = 2
--command sh -c "HADRIAN_ARGS=-j exec ./hadrian/ghci-multi -j"
--reload compiler
--reload ghc
--reload includes
--restart hadrian/ghci-multi
5eecb20a0368b599d03930e2dbb0e91540de4cb2
# Configure git to ignore commits listed in this file with:
#
# git config blame.ignoreRevsFile .git-ignore-revs
# Module Hierarchy Renamings
af332442123878c1b61d236dce46418efcbe8750
255418da5d264fb2758bc70925adb2094f34adc3
1941ef4f050c0dfcb68229641fcbbde3a10f1072
528df8ecb4e2f9c78b1ae4ab7ff8230644e9b643
18a346a4b5a02b8c62e8eedb91b35c2d8e754b96
817f93eac4d13f680e8e3e7a25eb403b1864f82e
1b1067d14b656bbbfa7c47f156ec2700c9751549
240f5bf6f53515535be5bf3ef7632aa69ae21e3e
1500f0898e85316c7c97a2f759d83278a072ab0e
3ca52151881451ce5b3a7740d003e811b586140d
cf739945b8b28ff463dc44925348f20b3c1f22cb
da7f74797e8c322006eba385c9cbdce346dd1d43
6e2d9ee25bce06ae51d2f1cf8df4f7422106a383
d491a6795d507eabe35d8aec63c534d29f2d305b
99a9f51bf8207c79241fc0b685fadeb222a61292
eb6082358cdb5f271a8e4c74044a12f97352c52f
5119296440e6846c553c72b8a93afc5ecfa576f0
447864a94a1679b5b079e08bb7208a0005381cef
# convert CRLF into LF on checkin
# don't convert anything on checkout
* text=auto eol=lf
mk/win32-tarballs.md5sum text=auto eol=LF
testsuite/tests/parser/should_run/T25375.hs text=auto eol=crlf
# -----------------------------------------------------------------------------
# generic generated file patterns
Thumbs.db
.DS_Store
*~
*#
#*#
*.bak
*.BAK
*.orig
*.prof
*.rej
*.patch
*.stackdump
*.hi
*.hi-boot
*.hie
*.hie-boot
*.o-boot
*.p_o
*.t_o
*.debug_o
*.thr_o
*.thr_p_o
*.thr_debug_o
*.o
*.a
*.o.cmd
*.depend*
*.dyn_o
*.dyn_hi
__pycache__
.mypy_cache
*.SYMDEF*
a.out
log
tags
TAGS
autom4te.cache
config.log
config.status
configure
# GHC's own aclocal.m4 is generated by aclocal
/aclocal.m4
# Temporarily generated configure files
confdefs.h
# Hadrian files
stage0
stage1
stage2
# Ignore _build, _validatebuild and any other custom build directories headed by _
_*
*/generated/
*/ghc-stage1
.shake.*
.hadrian_ghci
.hadrian_ghci_multi/
.hie-bios
hadrian/bootstrap/jq-bin
# -----------------------------------------------------------------------------
# Ignore any overlapped darcs repos and back up files
*-darcs-backup*
_darcs/
# -----------------------------------------------------------------------------
# sub-repositories
/ghc-tarballs/
# -----------------------------------------------------------------------------
# Cabal dist directories
/driver/ghc/dist/
/driver/haddock/dist/
/driver/ghci/dist/
/libffi/dist-install/
/libraries/*/dist-boot/
/libraries/*/dist-install/
/libraries/*/dist-newstyle/
/libraries/dist-haddock/
/linters/*/dist-install/
/utils/*/dist*/
/compiler/stage1/
/compiler/stage2/
/compiler/stage3/
/ghc/stage1/
/ghc/stage2/
/ghc/stage3/
/utils/iserv/stage2*/
# -----------------------------------------------------------------------------
# specific generated files
/.gitlab/jobs-metadata.json
/bindist-list
/bindist-list.uniq
/bindistprep/
/bindisttest/HelloWorld
/bindisttest/
/bootstrapping/
/ch01.html
/ch02.html
/compiler/dist/
/compiler/Bytecodes.h
/compiler/ClosureTypes.h
/compiler/FunTypes.h
/compiler/MachRegs.h
/compiler/MachRegs
/compiler/GHC/CmmToLlvm/Version/Bounds.hs
/compiler/ghc.cabal
/compiler/ghc.cabal.old
/distrib/configure.ac
/distrib/ghc.iss
/docs/index.html
/docs/man
/docs/users_guide/.log
/docs/users_guide/users_guide
/docs/users_guide/ghc.1
/docs/users_guide/flags.pyc
/docs/users_guide/ghc_config.py
/docs/users_guide/ghc_config.pyc
/docs/users_guide/users_guide.pdf
/docs/users_guide/build-html
/docs/users_guide/build-pdf
/docs/users_guide/build-man
/docs/users_guide/.doctrees-*
/docs/users_guide/.doctrees/
/docs/users_guide/ghc_packages.pyc
/docs/users_guide/utils.pyc
/driver/ghci/ghc-pkg-inplace
/driver/ghci/ghci-inplace
/driver/ghci/ghci-wrapper.cabal
/driver/ghci/ghci.res
/driver/ghci/cwrapper.c
/driver/ghci/cwrapper.h
/driver/ghci/getLocation.c
/driver/ghci/getLocation.h
/driver/ghci/isMinTTY.c
/driver/ghci/isMinTTY.h
/driver/package.conf
/driver/package.conf.inplace.old
/settings
/ghc.spec
/ghc/ghc-bin.cabal
/index.html
/inplace/
/libffi/build/
/libffi/ffi.h
/libffi/package.conf.inplace
/libffi/package.conf.inplace.raw
/libffi/stamp*
/libffi/package.conf.install
/libffi/package.conf.install.raw
/libraries/bootstrapping.conf
/libraries/prologue.txt
/libraries/doc-index*.html
/libraries/frames.html
/libraries/ghc-boot/GNUmakefile
/libraries/ghc-boot/ghc-boot.cabal
/libraries/ghc-boot-th/GNUmakefile
/libraries/ghc-boot-th/ghc-boot-th.cabal
/libraries/ghc-boot-th-next/ghc-boot-th-next.cabal
/libraries/ghc-boot-th/ghc.mk
/libraries/ghc-heap/ghc-heap.cabal
/libraries/ghc-internal/ghc-internal.cabal
/libraries/ghc-experimental/ghc-experimental.cabal
/libraries/base/base.cabal
/libraries/ghci/GNUmakefile
/libraries/ghci/ghci.cabal
/libraries/ghci/ghc.mk
/libraries/haddock-util.js
/libraries/hslogo-16.png
/libraries/index-frames.html
/libraries/index.html
/libraries/libiserv/libiserv.cabal
/libraries/minus.gif
/libraries/ocean.css
/libraries/plus.gif
/libraries/synopsis.png
/libraries/stamp/
/libraries/template-haskell/template-haskell.cabal
/linter.log
/mk/are-validating.mk
/mk/build.mk
/mk/config.mk
/mk/config.mk.old
/mk/system-cxx-std-lib-1.0.conf
/mk/install.mk
/mk/project.mk
/mk/project.mk.old
/mk/validate.mk
/stage3.package.conf
/testsuite_summary*.txt
/testsuite*.xml
/testlog*
/utils/iserv/iserv.cabal
/utils/iserv-proxy/iserv-proxy.cabal
/utils/remote-iserv/remote-iserv.cabal
/utils/mkUserGuidePart/mkUserGuidePart.cabal
/utils/runghc/runghc.cabal
/utils/gen-dll/gen-dll.cabal
/utils/ghc-pkg/ghc-pkg.cabal
utils/unlit/fs.*
libraries/ghc-internal/include/fs.h
libraries/ghc-internal/cbits/fs.c
missing-win32-tarballs
/extra-gcc-opts
/sdistprep
.tm_properties
VERSION
GIT_COMMIT_ID
# -------------------------------------------------------------------------------------
# when using a docker image, one can mount the source code directory as the home folder
# -------------------------------------------------------------------------------------
.arcrc
.ghc
.bash_history
.gitconfig
# Should be equal to testdir_suffix from testsuite/driver/testlib.py.
*.run
# -----------------------------------------------------------------------------
# ghc.nix
ghc.nix/
# gdb
.gdb_history
.gdbinit
# -----------------------------------------------------------------------------
# Tooling
# direnv
.envrc
.direnv
# Visual Studio Code
.vscode
# Tooling - ghcide
*.hiedb
# clangd
.clangd
dist-newstyle/
variables:
GIT_SSL_NO_VERIFY: "1"
# Commit of ghc/ci-images repository from which to pull Docker images
DOCKER_REV: 94df7d589f0ded990826bc7a4d7f5a40d6055a4f
# Sequential version number of all cached things.
# Bump to invalidate GitLab CI cache.
CACHE_REV: 11
# Disable shallow clones; they break our linting rules
GIT_DEPTH: 0
# Always start with a fresh clone to avoid non-hermetic builds
GIT_STRATEGY: clone
# Overridden by individual jobs
CONFIGURE_ARGS: ""
# Overridden by individual jobs
CONFIGURE_WRAPPER: ""
GIT_SUBMODULE_STRATEGY: "normal"
# GitLab recommends using https:, not ssh:, to clone submodules. See #25528.
GIT_SUBMODULE_FORCE_HTTPS: 1
# Makes ci.sh isolate CABAL_DIR
HERMETIC: "YES"
# Reduce XZ compression level for regular jobs (it is bumped to 9 for releases
# and nightly jobs). In my experiments I've got the following bindist size in
# the given time for each compression level (with the quick flavour):
#
# XZ_OPT Time Size
# -9 4m06s 112 MB
# -8 4m00s 114 MB
# -7 3m50s 116 MB
# -6 (default) 3m40s 118 MB
# -5 2m47s 123 MB
# -4 1m57s 134 MB
# -3 1m03s 129 MB
# -2 49.73s 136 MB
# -1 37.72s 142 MB
# -0 34.40s 156 MB
#
XZ_OPT: "-1"
default:
interruptible: true
stages:
- not-interruptible
- tool-lint # Source linting of the tools
- quick-build # A very quick smoke-test to weed out broken commits
- full-build # Build all the things
- packaging # Source distribution, etc.
- testing # head.hackage correctness and compiler performance testing
- deploy # push documentation
# Note [The CI Story]
# ~~~~~~~~~~~~~~~~~~~
#
# There are a few different types of pipelines. Among them:
#
# 1. marge-bot merges to `master`. Here we perform an exhaustive validation
# across all of the platforms which we support. In addition, we push
# performance metric notes upstream, providing a persistent record of the
# performance characteristics of the compiler.
#
# 2. merge requests. Here we perform a slightly less exhaustive battery of
# testing. Namely we omit some configurations (e.g. the unregisterised job).
# These use the merge request's base commit for performance metric
# comparisons.
#
# These and other pipelines are defined implicitly by the rules of individual
# jobs.
#
# At the top level, however, we can declare that pipelines (of whatever type)
# only run when:
#
# 1. Processing a merge request (as mentioned above)
#
# 2. Processing a tag
#
# 3. Pushing to master on the root ghc/ghc repo (as mentioned above)
#
# 4. Pushing to a release branch on the root ghc/ghc repo
#
# 5. Somebody manually triggers a pipeline from the GitLab UI
#
# In particular, note that pipelines don't automatically run just when changes
# are pushed to a feature branch.
workflow:
rules:
- if: $CI_MERGE_REQUEST_ID
- if: $CI_COMMIT_TAG
# N.B.: If we weren't explicit about CI_PROJECT_ID, the following rule would
# cause a duplicate pipeline for merge requests coming from the master
# branch of a fork.
- if: $CI_PROJECT_ID == "1" && $CI_COMMIT_BRANCH == "master"
- if: $CI_PROJECT_ID == "1" && $CI_COMMIT_BRANCH =~ /ghc-[0-9]+\.[0-9]+/
- if: '$CI_PIPELINE_SOURCE == "web"'
# which versions of GHC to allow bootstrap with
.bootstrap_matrix : &bootstrap_matrix
matrix:
- GHC_VERSION: 9.8.1
DOCKER_IMAGE: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb12-ghc9_8:$DOCKER_REV"
- GHC_VERSION: 9.10.1
DOCKER_IMAGE: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb12-ghc9_10:$DOCKER_REV"
# Allow linters to fail on draft MRs.
# This must be explicitly transcluded in lint jobs which
# override `rules:`
.drafts-can-fail-lint: &drafts-can-fail-lint
if: "$CI_MERGE_REQUEST_TITLE =~ /^\\s*(Draft|wip|WIP):/"
allow_failure: true
.lint:
stage: tool-lint
tags:
- lint
rules:
- *drafts-can-fail-lint
- when: always
.nightly: &nightly
variables:
XZ_OPT: "-9"
rules:
- if: $NIGHTLY
artifacts:
when: always
expire_in: 8 weeks
.release: &release
variables:
BUILD_FLAVOUR: "release"
XZ_OPT: "-9"
IGNORE_PERF_FAILURES: "all"
HADDOCK_HYPERLINKED_SOURCES: "YES"
artifacts:
when: always
expire_in: 1 year
rules:
- if: '$RELEASE_JOB == "yes"'
.full-ci: &full-ci
- if: '$CI_MERGE_REQUEST_LABELS =~ /.*full-ci.*/'
- if: '$CI_MERGE_REQUEST_LABELS =~ /.*marge_bot_batch_merge_job.*/'
- if: '$CI_COMMIT_BRANCH == "master"'
- if: '$CI_COMMIT_BRANCH =~ /ghc-[0-9]+\.[0-9]+/'
############################################################
# Runner Tags
############################################################
#
# * x86_64-linux: Any Docker-capable x86_64 Linux machine
# * aarch64-linux: Any Docker-capable AArch64 Linux machine
# * x86_64-windows: A x86_64 Windows machine
# * lint: Any Docker-capable x86_64 Linux machine; distinct from
# x86_64-linux to ensure low-latency availability.
#
####
# HACK
###
#
# Since 58cfcc65 the default for jobs has been "interruptible", this means
# that when new commits are pushed to a branch which already has a running
# pipeline then the old pipelines for this branch are cancelled.
#
# This includes the master branch, and in particular, new commits merged
# to the master branch will cancel the nightly job.
#
# The semantics of pipeline cancelling are actually a bit more complicated
# though. The interruptible flag is *per job*, but once a pipeline has run
# *any* non-interruptible job, then the whole pipeline is considered
# non-interruptible (ref
# https://gitlab.com/gitlab-org/gitlab/-/issues/32837). This leads to the
# hack in this MR where by default all jobs are `interruptible: True`, but
# for pipelines we definitely want to run, there is a dummy job which
# happens first, which is `interreuptible: False`. This has the effect of
# dirtying the whole pipeline and
#
# For now, this patch solves the immediate problem of making sure nightly
# jobs are not cancelled.
# In the future, we may want to enable this job also for the master
# branch, making that change might mean we need more CI capacity than
# currently available.
not-interruptible:
stage: not-interruptible
script: "true"
interruptible: false
image: "debian:10"
variables:
GIT_STRATEGY: none
tags:
- lint
rules:
# - if: '$CI_COMMIT_BRANCH == "master"'
# when: always
- if: $NIGHTLY
when: always
############################################################
# Validate jobs
############################################################
# These jobs are generated by running the ./.gitlab/generate_jobs script
include: '.gitlab/jobs.yaml'
############################################################
# tool linting
############################################################
ghc-linters:
stage: tool-lint
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb12:$DOCKER_REV"
extends: .lint-params
variables:
BUILD_FLAVOUR: default
script:
- .gitlab/ci.sh configure
- timeout 10m .gitlab/ci.sh run_hadrian test --test-root-dirs="testsuite/tests/linters"
dependencies: []
rules:
- if: $CI_MERGE_REQUEST_ID
- *drafts-can-fail-lint
# Run mypy Python typechecker on linter scripts.
lint-linters:
image: "registry.gitlab.haskell.org/ghc/ci-images/linters:$DOCKER_REV"
extends: .lint
script:
- mypy testsuite/tests/linters/regex-linters/*.py
dependencies: []
# Check that .T files all parse by listing broken tests.
lint-testsuite:
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb9:$DOCKER_REV"
extends: .lint
script:
- make -Ctestsuite list_broken TEST_HC=$GHC
dependencies: []
# Run mypy Python typechecker on testsuite driver
typecheck-testsuite:
image: "registry.gitlab.haskell.org/ghc/ci-images/linters:$DOCKER_REV"
extends: .lint
script:
- mypy testsuite/driver/runtests.py
dependencies: []
# We allow the submodule checker to fail when run on merge requests (to
# accommodate, e.g., haddock changes not yet upstream) but not on `master` or
# Marge jobs.
.lint-submods:
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb12:$DOCKER_REV"
extends: .lint-params
variables:
BUILD_FLAVOUR: default
script:
- .gitlab/ci.sh configure
- .gitlab/ci.sh run_hadrian stage0:exe:lint-submodule-refs
- git fetch "$CI_MERGE_REQUEST_PROJECT_URL" $CI_MERGE_REQUEST_TARGET_BRANCH_NAME
- base="$(git merge-base FETCH_HEAD $CI_COMMIT_SHA)"
- "echo Linting submodule changes between $base..$CI_COMMIT_SHA"
- git submodule foreach git remote update
- _build/stageBoot/bin/lint-submodule-refs . $(git rev-list $base..$CI_COMMIT_SHA)
dependencies: []
# We allow the submodule checker to fail when run on merge requests (to
# accommodate, e.g., haddock changes not yet upstream) but not on `master` or
# Marge jobs.
lint-author:
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb12:$DOCKER_REV"
extends: .lint-params
variables:
BUILD_FLAVOUR: default
script:
- git fetch "$CI_MERGE_REQUEST_PROJECT_URL" $CI_MERGE_REQUEST_TARGET_BRANCH_NAME
- base="$(git merge-base FETCH_HEAD $CI_COMMIT_SHA)"
- "echo Linting authors between $base..$CI_COMMIT_SHA"
- .gitlab/ci.sh lint_author $base $CI_COMMIT_SHA
dependencies: []
rules:
- if: $CI_MERGE_REQUEST_ID
- *drafts-can-fail-lint
lint-ci-config:
image: nixos/nix:2.25.2
extends: .lint
# We don't need history/submodules in this job
variables:
GIT_DEPTH: 1
GIT_SUBMODULE_STRATEGY: none
before_script:
- echo "experimental-features = nix-command flakes" >> /etc/nix/nix.conf
# Note [Nix-in-Docker]
# ~~~~~~~~~~~~~~~~~~~~
# The nixos/nix default config is max-jobs=1 and cores=$(logical
# cores num) which doesn't play nice with our $CPUS convention. We
# fix it before invoking any nix build to avoid oversubscribing
# while allowing a reasonable degree of parallelism.
# FIXME: Disabling build-users-group=nixbld is a workaround for a Nix-in-Docker issue. See
# https://gitlab.haskell.org/ghc/head.hackage/-/issues/38#note_560487 for
# discussion.
- echo "cores = $CPUS" >> /etc/nix/nix.conf
- echo "max-jobs = $CPUS" >> /etc/nix/nix.conf
- nix run nixpkgs#gnused -- -i -e 's/ nixbld//' /etc/nix/nix.conf
script:
- nix run .gitlab/generate-ci#generate-jobs
# 1 if .gitlab/generate_jobs changed the output of the generated config
- nix shell nixpkgs#git -c git diff --exit-code
# And run this to generate the .gitlab/jobs-metadata.json
- nix run .gitlab/generate-ci#generate-job-metadata
artifacts:
when: always
paths:
- .gitlab/jobs-metadata.json
- .gitlab/jobs.yaml
dependencies: []
lint-submods:
extends: .lint-submods
# Allow failure on merge requests since any necessary submodule patches may
# not be upstreamed yet.
rules:
- if: '$CI_MERGE_REQUEST_LABELS =~ /.*marge_bot_batch_merge_job.*/'
allow_failure: false
# Don't run on nightly because the program needs a base commit to check.
- if: $NIGHTLY
when: never
- allow_failure: true
lint-submods-branch:
extends: .lint-submods
variables:
BUILD_FLAVOUR: default
script:
- .gitlab/ci.sh configure
- .gitlab/ci.sh run_hadrian stage0:exe:lint-submodule-refs
- "echo Linting submodule changes between $CI_COMMIT_BEFORE_SHA..$CI_COMMIT_SHA"
- git submodule foreach git remote update
- _build/stageBoot/bin/lint-submodule-refs . $(git rev-list $CI_COMMIT_BEFORE_SHA..$CI_COMMIT_SHA)
rules:
- if: '$CI_COMMIT_BRANCH == "master"'
- if: '$CI_COMMIT_BRANCH =~ /ghc-[0.9]+\.[0-9]+/'
- *drafts-can-fail-lint
############################################################
# GHC source code linting
############################################################
.lint-params:
needs: []
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb12:$DOCKER_REV"
extends: .lint
before_script:
- export PATH="/opt/toolchain/bin:$PATH"
# workaround for docker permissions
- sudo chown ghc:ghc -R .
- .gitlab/ci.sh setup
after_script:
- .gitlab/ci.sh save_cache
- cat ci-timings
variables:
GHC_FLAGS: -Werror
cache:
key: lint-$CACHE_REV
paths:
- cabal-cache
# Disabled due to #22830
.hlint-ghc-and-base:
extends: .lint-params
image: "registry.gitlab.haskell.org/ghc/ci-images/linters:$DOCKER_REV"
variables:
BUILD_FLAVOUR: default
script:
- .gitlab/ci.sh setup
- .gitlab/ci.sh configure
- .gitlab/ci.sh run_hadrian lint:ghc-internal
- .gitlab/ci.sh run_hadrian lint:ghc-experimental
- .gitlab/ci.sh run_hadrian lint:base
- .gitlab/ci.sh run_hadrian lint:compiler
############################################################
# GHC-in-GHCi (Hadrian)
############################################################
hadrian-ghc-in-ghci:
stage: quick-build
needs:
- job: lint-linters
- job: lint-submods
optional: true
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb12:$DOCKER_REV"
before_script:
# workaround for docker permissions
- sudo chown ghc:ghc -R .
variables:
GHC_FLAGS: -Werror
tags:
- x86_64-linux
script:
- git clean -xdf && git submodule foreach git clean -xdf
- . .gitlab/ci.sh setup
- . .gitlab/ci.sh configure
# Enable -Werror when building hadrian
- "echo 'package hadrian' > hadrian/cabal.project.local"
- "echo ' ghc-options: -Werror' >> hadrian/cabal.project.local"
# Load ghc-in-ghci then immediately exit and check the modules loaded
- export CORES="$(mk/detect-cpu-count.sh)"
- echo ":q" | HADRIAN_ARGS=-j$CORES hadrian/ghci -j$CORES | tail -n2 | grep "Ok,"
after_script:
- .gitlab/ci.sh save_cache
- cat ci-timings
cache:
key: hadrian-ghci-$CACHE_REV
paths:
- cabal-cache
############################################################
# Hadrian Multi-Repl
############################################################
hadrian-multi:
stage: testing
needs:
- job: x86_64-linux-fedora33-release
optional: true
- job: nightly-x86_64-linux-fedora33-release
optional: true
- job: release-x86_64-linux-fedora33-release
optional: true
dependencies: null
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-fedora33:$DOCKER_REV"
before_script:
# workaround for docker permissions
- sudo chown ghc:ghc -R .
variables:
GHC_FLAGS: "-Werror=-Wno-error=incomplete-record-selectors -Wwarn=deprecations -Wwarn=unused-imports"
# -Wno-error=incomplete-record-selectors is present because -Wall now
# includes -Wincomplete-record-selectors, and hadrian-multi has many, many
# warnings about incomplete record selectors. A better fix would be to
# remove the use of incomplete record selectors, since each of them represents
# a potential crash.
CONFIGURE_ARGS: --enable-bootstrap-with-devel-snapshot
tags:
- x86_64-linux
script:
- export BOOT_HC=$GHC
- root=$(pwd)/ghc
- ls
- |
mkdir tmp
tar -xf ghc-x86_64-linux-fedora33-release.tar.xz -C tmp
pushd tmp/ghc-*/
./configure --prefix=$root
make install
popd
rm -Rf tmp
- export HC=$root/bin/ghc
# This GHC means, use this GHC to configure with
- export GHC=$root/bin/ghc
- . .gitlab/ci.sh setup
- . .gitlab/ci.sh configure
# Now GHC means, use this GHC for hadrian
- export GHC=$BOOT_HC
- export CORES="$(mk/detect-cpu-count.sh)"
# Load hadrian-multi then immediately exit and check the modules loaded
- echo ":q" | HADRIAN_ARGS=-j$CORES hadrian/ghci-multi -j$CORES | tail -n2 | grep "Ok,"
after_script:
- .gitlab/ci.sh save_cache
cache:
key: hadrian-ghci-$CACHE_REV
paths:
- cabal-cache
rules:
- *full-ci
############################################################
# stack-hadrian-build
############################################################
# Verify that Hadrian builds with stack. Note that we don't actually perform a
# build of GHC itself; we merely test that the Hadrian executable builds and
# works (by invoking `hadrian --version`).
stack-hadrian-build:
extends: hadrian-ghc-in-ghci
stage: quick-build
script:
- . .gitlab/ci.sh setup
- . .gitlab/ci.sh configure
- hadrian/build-stack --version
####################################
# Testing reinstallable ghc codepath
####################################
test-cabal-reinstall-x86_64-linux-deb10:
extends: nightly-x86_64-linux-deb10-validate
stage: full-build
variables:
REINSTALL_GHC: "yes"
BUILD_FLAVOUR: validate
TEST_ENV: "x86_64-linux-deb10-cabal-install"
rules:
- if: $NIGHTLY
- if: '$CI_MERGE_REQUEST_LABELS =~ /.*test-reinstall.*/'
########################################
# Testing ABI is invariant across builds
########################################
abi-test-nightly:
stage: full-build
needs:
- job: nightly-x86_64-linux-fedora33-release-hackage
- job: nightly-x86_64-linux-fedora33-release
tags:
- x86_64-linux
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-fedora33:$DOCKER_REV"
dependencies: null
before_script:
- mkdir -p normal
- mkdir -p hackage
- tar -xf ghc-x86_64-linux-fedora33-release.tar.xz -C normal/
- tar -xf ghc-x86_64-linux-fedora33-release-hackage_docs.tar.xz -C hackage/
script:
- .gitlab/ci.sh compare_interfaces_of "normal/ghc-*" "hackage/ghc-*"
artifacts:
paths:
- out
rules:
# This job is broken. Disabling it until some kind soul can finish its
# implementation. #23269
- when: never
- if: $NIGHTLY
############################################################
# Packaging
############################################################
doc-tarball:
stage: packaging
needs:
- job: x86_64-linux-deb12-numa-slow-validate
optional: true
- job: nightly-x86_64-linux-deb12-validate
optional: true
- job: release-x86_64-linux-deb12-release
optional: true
- job: x86_64-windows-validate
optional: true
- job: nightly-x86_64-windows-validate
optional: true
- job: release-x86_64-windows-release
optional: true
tags:
- x86_64-linux
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb12:$DOCKER_REV"
dependencies: null
variables:
LINUX_BINDIST: "ghc-x86_64-linux-deb12.tar.xz"
WINDOWS_BINDIST: "ghc-x86_64-windows.tar.xz"
artifacts:
expose_as: "Documentation Preview"
paths:
- haddock.html.tar.xz
- docs/haddock/
- libraries.html.tar.xz
- docs/libraries/
- users_guide.html.tar.xz
- docs/users_guide/
- docs/index.html
- Haddock.pdf
- users_guide.pdf
script:
- |
mv "ghc-x86_64-linux-deb12-numa-slow-validate.tar.xz" "$LINUX_BINDIST" \
|| mv "ghc-x86_64-linux-deb12-validate.tar.xz" "$LINUX_BINDIST" \
|| mv "ghc-x86_64-linux-deb12-release.tar.xz" "$LINUX_BINDIST" \
|| true
mv "ghc-x86_64-windows-validate.tar.xz" "$WINDOWS_BINDIST" \
|| mv "ghc-x86_64-windows-release.tar.xz" "$WINDOWS_BINDIST" \
|| true
if [ ! -f "$LINUX_BINDIST" ]; then
echo "Error: $LINUX_BINDIST does not exist. Did the Debian 9 job fail?"
exit 1
fi
if [ ! -f "$WINDOWS_BINDIST" ]; then
echo "Error: $WINDOWS_BINDIST does not exist. Did the 64-bit Windows job fail?"
exit 1
fi
- rm -Rf docs
- bash -ex distrib/mkDocs/mkDocs $LINUX_BINDIST $WINDOWS_BINDIST
- mv docs/*.tar.xz docs/*.pdf .
- ls -lh
hackage-doc-tarball:
stage: packaging
needs:
- job: nightly-x86_64-linux-fedora33-release-hackage
optional: true
- job: release-x86_64-linux-fedora33-release-hackage
optional: true
- job: source-tarball
tags:
- x86_64-linux
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb12:$DOCKER_REV"
dependencies: null
variables:
# Don't clone the git repo..
GIT_STRATEGY: none
# Don't attempt to boot a source tarball
NO_BOOT: "1"
artifacts:
paths:
- hackage_docs
before_script:
- tar -xf ghc-*[0-9]-src.tar.xz
- tar -xf ghc-x86_64-linux-fedora33-release.tar.xz -C ghc*/
script:
- cd ghc*/
- mv .gitlab/rel_eng/upload_ghc_libs.py .
- . .gitlab/ci.sh setup
- . .gitlab/ci.sh configure
- ./upload_ghc_libs.py prepare --bindist ghc*linux/
- mv .upload-libs/docs ../hackage_docs
rules:
- if: $NIGHTLY
- if: '$RELEASE_JOB == "yes"'
source-tarball:
stage: full-build
needs:
- hadrian-ghc-in-ghci
tags:
- x86_64-linux
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb12:$DOCKER_REV"
dependencies: []
artifacts:
paths:
- ghc-*.tar.xz
script:
- sudo chown ghc:ghc -R .
- . .gitlab/ci.sh setup
- . .gitlab/ci.sh configure
- ./hadrian/build source-dist
- mv _build/source-dist/*.xz .
rules:
- if: $NIGHTLY
- if: '$RELEASE_JOB == "yes"'
- if: '$CI_MERGE_REQUEST_LABELS =~ /.*test-bootstrap.*/'
- *full-ci
generate-hadrian-bootstrap-sources:
stage: full-build
needs:
- hadrian-ghc-in-ghci
tags:
- x86_64-linux
image: "$DOCKER_IMAGE"
dependencies: []
parallel: *bootstrap_matrix
artifacts:
paths:
- hadrian-bootstrap-sources-*.tar.gz
script:
- bash -c "[ $($GHC --numeric-version) = $GHC_VERSION ] || { echo $GHC_VERSION is not the same as the version of $GHC && exit 1; }"
- python3 ./hadrian/bootstrap/bootstrap.py -w $GHC fetch -o hadrian-bootstrap-sources-$GHC_VERSION
rules:
- if: $NIGHTLY
- if: '$RELEASE_JOB == "yes"'
- if: '$CI_MERGE_REQUEST_LABELS =~ /.*test-bootstrap.*/'
- *full-ci
package-hadrian-bootstrap-sources:
stage: full-build
tags:
- x86_64-linux
needs: ["generate-hadrian-bootstrap-sources"]
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb12:$DOCKER_REV"
artifacts:
paths:
- hadrian-bootstrap-sources-all.tar.gz
script:
- tar -czvf hadrian-bootstrap-sources-all.tar.gz hadrian-bootstrap-sources-*.tar.gz
rules:
- if: $NIGHTLY
- if: '$RELEASE_JOB == "yes"'
- if: '$CI_MERGE_REQUEST_LABELS =~ /.*test-bootstrap.*/'
- *full-ci
test-bootstrap:
stage: full-build
needs: [generate-hadrian-bootstrap-sources, source-tarball]
tags:
- x86_64-linux
image: "$DOCKER_IMAGE"
parallel: *bootstrap_matrix
dependencies: null
script:
- sudo chown ghc:ghc -R .
- mkdir test-bootstrap
- tar -xf ghc-*[0-9]-src.tar.xz -C test-bootstrap
- tar -xf ghc-*-testsuite.tar.xz -C test-bootstrap
- cp hadrian-bootstrap-sources-$GHC_VERSION.tar.gz test-bootstrap/ghc-*
- pushd test-bootstrap/ghc-*
- python3 ./hadrian/bootstrap/bootstrap.py -w $GHC --bootstrap-sources hadrian-bootstrap-sources-$GHC_VERSION.tar.gz
- export HADRIAN_PATH="$PWD/_build/bin/hadrian"
- .gitlab/ci.sh setup
# Bootstrapping should not depend on HAPPY or ALEX so set them to false
# so the build fails if they are invoked.
- unset HAPPY; unset ALEX
# Check the commands are not available, parens are crucial to start a subshell
- (! command -v alex --version)
- (! command -v happy --version)
- .gitlab/ci.sh configure
- .gitlab/ci.sh build_hadrian
- .gitlab/ci.sh test_hadrian
- popd
- rm -Rf test-bootstrap
variables:
# Don't record performance benchmarks
TEST_ENV: ""
BIN_DIST_NAME: "ghc-x86_64-deb12-linux"
BUILD_FLAVOUR: "validate"
NO_BOOT: "1"
rules:
- if: $NIGHTLY
- if: '$CI_MERGE_REQUEST_LABELS =~ /.*test-bootstrap.*/'
- *full-ci
- if: '$RELEASE_JOB == "yes"'
when: always
variables:
BUILD_FLAVOUR: "release"
############################################################
# Testing via head.hackage
############################################################
# Triggering jobs in the ghc/head.hackage project requires that we have a job
# token for that repository. Furthermore the head.hackage CI job must have
# access to an unprivileged access token with the ability to query the ghc/ghc
# project such that it can find the job ID of the fedora33 job for the current
# pipeline.
#
# hackage-lint: Can be triggered on any MR, normal validate pipeline or nightly build.
# Runs head.hackage with -dlint and a slow-validate bindist
#
# hackage-label-lint: Trigged on MRs with "user-facing" label, runs the slow-validate
# head.hackage build with -dlint.
#
# nightly-hackage-lint: Runs automatically on nightly pipelines with slow-validate + dlint config.
#
# nightly-hackage-perf: Runs automaticaly on nightly pipelines with release build and eventlogging enabled.
#
# release-hackage-lint: Runs automatically on release pipelines with -dlint on a release bindist.
.hackage:
stage: testing
variables:
UPSTREAM_PROJECT_PATH: "$CI_PROJECT_PATH"
UPSTREAM_PROJECT_ID: "$CI_PROJECT_ID"
UPSTREAM_PIPELINE_ID: "$CI_PIPELINE_ID"
RELEASE_JOB: "$RELEASE_JOB"
trigger:
project: "ghc/head.hackage"
branch: "upstream-testing"
strategy: "depend"
hackage-lint:
needs:
- job: x86_64-linux-deb12-numa-slow-validate
optional: true
artifacts: false
- job: nightly-x86_64-linux-deb12-numa-slow-validate
optional: true
artifacts: false
- job: nightly-aarch64-linux-deb12-validate
optional: true
artifacts: false
- job: aarch64-linux-deb12-validate
optional: true
artifacts: false
extends: .hackage
variables:
SLOW_VALIDATE: 1
EXTRA_HC_OPTS: "-dlint"
# No for release jobs because there isn't a slow-valdate bindist. There is an
# automatic pipeline for release bindists (see release-hackage-lint)
rules:
- if: '$RELEASE_JOB != "yes"'
when: manual
hackage-label-lint:
needs:
- job: x86_64-linux-deb12-numa-slow-validate
optional: true
artifacts: false
- job: aarch64-linux-deb12-validate
optional: true
artifacts: false
extends: .hackage
variables:
SLOW_VALIDATE: 1
EXTRA_HC_OPTS: "-dlint"
rules:
- if: '$CI_MERGE_REQUEST_LABELS =~ /.*user-facing.*/'
# The head.hackage job is split into two jobs because enabling `-dlint`
# affects the total allocation numbers for the simplifier portion significantly.
nightly-hackage-lint:
needs:
- job: nightly-x86_64-linux-deb12-numa-slow-validate
optional: true
artifacts: false
- job: nightly-aarch64-linux-deb12-validate
optional: true
artifacts: false
rules:
- if: $NIGHTLY
variables:
NIGHTLY: "$NIGHTLY"
extends: .hackage
variables:
SLOW_VALIDATE: 1
EXTRA_HC_OPTS: "-dlint"
nightly-hackage-perf:
needs:
- job: nightly-x86_64-linux-fedora33-release
optional: true
artifacts: false
- job: nightly-aarch64-linux-deb12-validate
optional: true
artifacts: false
rules:
- if: $NIGHTLY
variables:
NIGHTLY: "$NIGHTLY"
extends: .hackage
variables:
# Generate logs for nightly builds which include timing information.
EXTRA_HC_OPTS: "-ddump-timings"
# Ask head.hackage to generate eventlogs
EVENTLOGGING: 1
release-hackage-lint:
needs:
- job: release-x86_64-linux-fedora33-release
optional: true
artifacts: false
- job: release-aarch64-linux-deb12-release+no_split_sections
optional: true
artifacts: false
rules:
- if: '$RELEASE_JOB == "yes"'
extends: .hackage
# The ghcup metadata pipeline requires all prior jobs to
# pass. The hackage job can easily fail due to API changes
# or similar - so we allow it to fail.
allow_failure: true
variables:
# No slow-validate bindist on release pipeline
EXTRA_HC_OPTS: "-dlint"
############################################################
# Testing via test-primops
############################################################
# Triggering jobs in the ghc/test-primops project
.test-primops:
stage: testing
variables:
UPSTREAM_PROJECT_PATH: "$CI_PROJECT_PATH"
UPSTREAM_PROJECT_ID: "$CI_PROJECT_ID"
UPSTREAM_PIPELINE_ID: "$CI_PIPELINE_ID"
trigger:
project: "ghc/test-primops"
branch: "upstream-testing"
strategy: "depend"
.test-primops-validate-template:
needs:
- job: x86_64-linux-deb12-validate
artifacts: false
- job: aarch64-linux-deb12-validate
artifacts: false
- job: aarch64-darwin-validate
artifacts: false
- job: x86_64-darwin-validate
artifacts: false
extends: .test-primops
test-primops-label:
extends: .test-primops-validate-template
rules:
- if: '$CI_MERGE_REQUEST_LABELS =~ /.*test-primops.*/'
# We do not use *.full-ci here since that would imply running in nightly
# where we do not have the normal validate jobs. We have the -nightly job
# below to handle this case.
- if: '$CI_MERGE_REQUEST_LABELS =~ /.*full-ci.*/'
test-primops-nightly:
extends: .test-primops
needs:
- job: nightly-x86_64-linux-deb12-validate
artifacts: false
- job: nightly-aarch64-linux-deb12-validate
artifacts: false
- job: nightly-aarch64-darwin-validate
artifacts: false
- job: nightly-x86_64-darwin-validate
artifacts: false
rules:
- if: $NIGHTLY
test-primops-release:
extends: .test-primops
rules:
- if: '$RELEASE_JOB == "yes"'
############################################################
# Nofib testing
# (Disabled: See #21859)
############################################################
perf-nofib:
# Dependencies used by perf-nofib can't be built when some compiler changes
# aren't (yet) supported by head.hackage.
# Hence we allow this job to fail.
allow_failure: true
stage: testing
needs:
- job: x86_64-linux-fedora33-release
optional: true
- job: nightly-x86_64-linux-fedora33-release
optional: true
- job: release-x86_64-linux-fedora33-release
optional: true
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-fedora33:$DOCKER_REV"
rules:
- when: never
- *full-ci
tags:
- x86_64-linux
before_script:
- cd nofib
- "cabal update --index=$HACKAGE_INDEX_STATE --project-file=cabal.project.head-hackage"
script:
- root=$(pwd)/ghc
- |
mkdir tmp
tar -xf ../ghc-x86_64-linux-fedora33-release.tar.xz -C tmp
pushd tmp/ghc-*/
./configure --prefix=$root
make install
popd
rm -Rf tmp
- export PATH=$root/bin:$PATH
- cabal install -w "$root/bin/ghc" --lib regex-compat unboxed-ref parallel random-1.2.1 --allow-newer --package-env local.env --project-file=cabal.project.head-hackage
- export GHC_ENVIRONMENT="$(pwd)/local.env"
- "make HC=$root/bin/ghc BOOT_HC=$root/bin/ghc boot mode=fast -j$CPUS"
- "make HC=$root/bin/ghc BOOT_HC=$root/bin/ghc EXTRA_RUNTEST_OPTS='-cachegrind +RTS -V0 -RTS' NoFibRuns=1 mode=fast -j$CPUS 2>&1 | tee nofib.log"
artifacts:
expire_in: 12 week
when: always
paths:
- nofib/nofib.log
############################################################
# Ad-hoc performance testing
############################################################
perf:
stage: testing
needs:
- job: x86_64-linux-fedora33-release
optional: true
- job: nightly-x86_64-linux-fedora33-release
optional: true
- job: release-x86_64-linux-fedora33-release
optional: true
dependencies: null
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-fedora33:$DOCKER_REV"
tags:
- x86_64-linux-perf
script:
- root=$(pwd)/ghc
- |
mkdir tmp
tar -xf ghc-x86_64-linux-fedora33-release.tar.xz -C tmp
pushd tmp/ghc-*/
./configure --prefix=$root
make install
popd
rm -Rf tmp
- export BOOT_HC=$(which ghc)
- export HC=$root/bin/ghc
- .gitlab/ci.sh perf_test
artifacts:
expire_in: 2 year
when: always
paths:
- out
rules:
- *full-ci
############################################################
# ABI testing
############################################################
abi-test:
stage: testing
needs:
- job: x86_64-linux-fedora33-release
optional: true
- job: nightly-x86_64-linux-fedora33-release
optional: true
- job: release-x86_64-linux-fedora33-release
optional: true
dependencies: null
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-fedora33:$DOCKER_REV"
rules:
- if: $CI_MERGE_REQUEST_ID
- if: '$CI_COMMIT_BRANCH == "master"'
- if: '$CI_COMMIT_BRANCH =~ /ghc-[0.9]+\.[0-9]+/'
tags:
- x86_64-linux
script:
- root=$(pwd)/ghc
- |
mkdir tmp
tar -xf ghc-x86_64-linux-fedora33-release.tar.xz -C tmp
pushd tmp/ghc-*/
./configure --prefix=$root
make install
popd
rm -Rf tmp
- export BOOT_HC=$(which ghc)
- export HC=$root/bin/ghc
- .gitlab/ci.sh abi_test
artifacts:
paths:
- out
rules:
- *full-ci
- if: '$CI_MERGE_REQUEST_LABELS =~ /.*test-abi.*/'
############################################################
# Documentation deployment via GitLab Pages
############################################################
pages:
stage: deploy
needs: [doc-tarball]
dependencies: null
image: ghcci/x86_64-linux-deb9:0.2
# See #18973
allow_failure: true
tags:
- x86_64-linux
script:
- mkdir -p public/doc
# haddock docs are not in the hadrian produce doc tarballs at the moment
# - tar -xf haddock.html.tar.xz -C public/doc
- tar -xf libraries.html.tar.xz -C public/doc
- tar -xf users_guide.html.tar.xz -C public/doc
- |
cat >public/index.html <<EOF
<!DOCTYPE HTML>
<meta charset="UTF-8">
<meta http-equiv="refresh" content="1; url=doc/">
EOF
- cp -f docs/index.html public/doc
rules:
# N.B. only run this on ghc/ghc since the deployed pages are quite large
# and we only serve GitLab Pages for ghc/ghc.
- if: '$CI_COMMIT_BRANCH == "master" && $CI_PROJECT_NAMESPACE == "ghc"'
- if: '$CI_MERGE_REQUEST_LABELS =~ /.*publish-docs.*/'
artifacts:
paths:
- public
#############################################################
# Generation of GHCUp metadata
#############################################################
project-version:
stage: packaging
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb12:$DOCKER_REV"
tags:
- x86_64-linux
variables:
BUILD_FLAVOUR: default
script:
# Calculate the project version
- sudo chown ghc:ghc -R .
- .gitlab/ci.sh setup
- .gitlab/ci.sh configure
- echo "ProjectVersion=$(cat VERSION)" > version.sh
needs: []
dependencies: []
artifacts:
paths:
- version.sh
.ghcup-metadata:
stage: deploy
image: nixos/nix:2.25.2
dependencies: null
tags:
- x86_64-linux
variables:
BUILD_FLAVOUR: default
GIT_SUBMODULE_STRATEGY: "none"
before_script:
- echo "experimental-features = nix-command flakes" >> /etc/nix/nix.conf
# FIXME: See Note [Nix-in-Docker]
- echo "cores = $CPUS" >> /etc/nix/nix.conf
- echo "max-jobs = $CPUS" >> /etc/nix/nix.conf
- nix run nixpkgs#gnused -- -i -e 's/ nixbld//' /etc/nix/nix.conf
- nix-channel --update
- cat version.sh
# Calculate the project version
- . ./version.sh
# Download existing ghcup metadata for the correct year
- PipelineYear="$(date -d $CI_PIPELINE_CREATED_AT +%Y)"
- nix shell nixpkgs#wget -c wget "https://ghc.gitlab.haskell.org/ghcup-metadata/ghcup-nightlies-$PipelineYear-0.0.7.yaml" -O ghcup-0.0.7.yaml
- nix run .gitlab/generate-ci#generate-job-metadata
artifacts:
paths:
- metadata_test.yaml
- version.sh
ghcup-metadata-nightly:
extends: .ghcup-metadata
# Explicit needs for validate pipeline because we only need certain bindists
needs:
- job: nightly-x86_64-linux-fedora33-release
artifacts: false
- job: nightly-x86_64-linux-centos7-validate
artifacts: false
- job: nightly-x86_64-linux-ubuntu22_04-validate
artifacts: false
- job: nightly-x86_64-linux-ubuntu20_04-validate
artifacts: false
- job: nightly-x86_64-linux-ubuntu18_04-validate
artifacts: false
- job: nightly-x86_64-linux-rocky8-validate
artifacts: false
- job: nightly-x86_64-darwin-validate
artifacts: false
- job: nightly-aarch64-darwin-validate
artifacts: false
- job: nightly-x86_64-windows-validate
artifacts: false
- job: nightly-x86_64-linux-alpine3_12-validate
artifacts: false
- job: nightly-x86_64-linux-alpine3_20-validate
artifacts: false
- job: nightly-x86_64-linux-deb9-validate
artifacts: false
- job: nightly-i386-linux-deb10-validate
artifacts: false
- job: nightly-i386-linux-deb12-validate
artifacts: false
- job: nightly-x86_64-linux-deb10-validate
artifacts: false
- job: nightly-aarch64-linux-deb10-validate
artifacts: false
- job: nightly-x86_64-linux-deb11-validate
artifacts: false
- job: nightly-x86_64-linux-deb12-validate
artifacts: false
- job: nightly-aarch64-linux-deb12-validate
artifacts: false
- job: nightly-aarch64-linux-alpine3_18-validate
artifacts: false
- job: source-tarball
artifacts: false
- job: project-version
script:
- nix shell -f .gitlab/rel_eng -c ghcup-metadata --metadata ghcup-0.0.7.yaml --date="$(date -d $CI_PIPELINE_CREATED_AT +%Y-%m-%d)" --pipeline-id="$CI_PIPELINE_ID" --version="$ProjectVersion" > "metadata_test.yaml"
rules:
- if: $NIGHTLY
# Update the ghcup metadata with information about this nightly pipeline
ghcup-metadata-nightly-push:
stage: deploy
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-fedora33:$DOCKER_REV"
dependencies: null
tags:
- x86_64-linux
variables:
BUILD_FLAVOUR: default
GIT_SUBMODULE_STRATEGY: "none"
needs:
- job: ghcup-metadata-nightly
artifacts: true
script:
- git clone https://gitlab.haskell.org/ghc/ghcup-metadata.git
- PipelineYear="$(date -d $CI_PIPELINE_CREATED_AT +%Y)"
- cp metadata_test.yaml "ghcup-metadata/ghcup-nightlies-$PipelineYear-0.0.7.yaml"
- cp metadata_test.yaml "ghcup-metadata/ghcup-nightlies-0.0.7.yaml"
- cd ghcup-metadata
- git config user.email "ghc-ci@gitlab-haskell.org"
- git config user.name "GHC GitLab CI"
- git remote add gitlab_origin https://oauth2:$PROJECT_PUSH_TOKEN@gitlab.haskell.org/ghc/ghcup-metadata.git
- git add .
- git commit -m "Update metadata"
- git push gitlab_origin HEAD:updates
rules:
# Only run the update on scheduled nightly pipelines, ie once a day
- if: $NIGHTLY && $CI_PIPELINE_SOURCE == "schedule" && $CI_COMMIT_BRANCH == "master"
ghcup-metadata-release:
# No explicit needs for release pipeline as we assume we need everything and everything will pass.
extends: .ghcup-metadata
script:
- nix shell -f .gitlab/rel_eng -c ghcup-metadata --release-mode --metadata ghcup-0.0.7.yaml --date="$(date -d $CI_PIPELINE_CREATED_AT +%Y-%m-%d)" --pipeline-id="$CI_PIPELINE_ID" --version="$ProjectVersion" --fragment
- nix shell -f .gitlab/rel_eng -c ghcup-metadata --release-mode --metadata ghcup-0.0.7.yaml --date="$(date -d $CI_PIPELINE_CREATED_AT +%Y-%m-%d)" --pipeline-id="$CI_PIPELINE_ID" --version="$ProjectVersion" > "metadata_test.yaml"
rules:
- if: '$RELEASE_JOB == "yes"'
.ghcup-metadata-testing:
stage: deploy
variables:
UPSTREAM_PROJECT_PATH: "$CI_PROJECT_PATH"
UPSTREAM_PROJECT_ID: "$CI_PROJECT_ID"
UPSTREAM_PIPELINE_ID: "$CI_PIPELINE_ID"
RELEASE_JOB: "$RELEASE_JOB"
# Do not inherit global variables (such as CONFIGURE_ARGS) as these take
# precedence over the variables defined in the downstream job.
inherit:
variables: false
trigger:
project: "ghc/ghcup-ci"
branch: "upstream-testing"
strategy: "depend"
forward:
yaml_variables: true
pipeline_variables: false
ghcup-metadata-testing-nightly:
needs:
- job: ghcup-metadata-nightly
artifacts: false
extends: .ghcup-metadata-testing
variables:
NIGHTLY: "$NIGHTLY"
UPSTREAM_JOB_NAME: "ghcup-metadata-nightly"
rules:
- if: '$NIGHTLY == "1"'
ghcup-metadata-testing-release:
needs:
- job: ghcup-metadata-release
artifacts: false
extends: .ghcup-metadata-testing
variables:
UPSTREAM_JOB_NAME: "ghcup-metadata-release"
rules:
- if: '$RELEASE_JOB == "yes"'
when: manual
# Where the GitLab happens
## Updating PERF_NOTES_PUSH_CREDENTIALS
This CI variable is used by test-metrics.sh to push performance data as a git
note to https://gitlab.haskell.org/ghc/ghc-performance-notes.
The current token will expire on 2025-07-02.
### STEPS
Set and fetch the updated token:
```
GITLAB_WRITE=<Your Gitlab API token>
one_year_later="$(date --date='1 year' --iso-8601)"
curl -X POST --header "PRIVATE-TOKEN: $GITLAB_WRITE" -H "Content-Type: application/json" \
--data '{"name":"test-metrics.sh", "scopes":["write_repository"], "expires_at":"$one_year_later"}' \
https://gitlab.haskell.org/api/v4/projects/117/access_tokens \
| jq .token
```
Update the variable:
```
GITLAB_WRITE=<Your Gitlab API token>
NEW_VALUE=<Output from the above>
curl --fail-with-body --request PUT --header "PRIVATE-TOKEN: $GITLAB_WRITE" \
"https://gitlab.haskell.org/api/v4/projects/1/variables/PERF_NOTES_PUSH_CREDENTIALS" \
--form "value=$NEW_VALUE"
```
#!/usr/bin/env bash
# shellcheck disable=SC2230
# shellcheck disable=SC1090
# This is the primary driver of the GitLab CI infrastructure.
# Run `ci.sh usage` for usage information.
set -Eeuo pipefail
# Configuration:
# N.B. You may want to also update the index-state in hadrian/cabal.project.
HACKAGE_INDEX_STATE="2025-01-27T17:45:32Z"
MIN_HAPPY_VERSION="1.20"
MIN_ALEX_VERSION="3.2.6"
TOP="$(pwd)"
if [ ! -d "$TOP/.gitlab" ]; then
echo "This script expects to be run from the root of a ghc checkout"
fi
CABAL_CACHE="$TOP/${CABAL_CACHE:-cabal-cache}"
source "$TOP/.gitlab/common.sh"
function time_it() {
local name="$1"
shift
local start=$(date +%s)
local res=0
set +e
( set -e ; $@ )
res=$?
set -e
local end=$(date +%s)
local delta=$(expr $end - $start)
echo "$name took $delta seconds"
printf "%15s | $delta" > ci-timings
return $res
}
function usage() {
cat <<EOF
$0 - GHC continuous integration driver
Common Modes:
usage Show this usage message.
setup Prepare environment for a build.
configure Run ./configure.
clean Clean the tree
shell Run an interactive shell with a configured build environment.
save_test_output Generate unexpected-test-output.tar.gz
save_cache Preserve the cabal cache
Hadrian build system
build_hadrian Build GHC via the Hadrian build system
test_hadrian Test GHC via the Hadrian build system
Environment variables affecting both build systems:
CROSS_TARGET Triple of cross-compilation target.
VERBOSE Set to non-empty for verbose build output
RUNTEST_ARGS Arguments passed to runtest.py
MSYSTEM (Windows-only) Which platform to build from (CLANG64).
IGNORE_PERF_FAILURES
Whether to ignore perf failures (one of "increases",
"decreases", or "all")
HERMETIC Take measures to avoid looking at anything in \$HOME
CONFIGURE_ARGS Arguments passed to configure script.
CONFIGURE_WRAPPER Wrapper for the configure script (e.g. Emscripten's emconfigure).
ENABLE_NUMA Whether to enable numa support for the build (disabled by default)
INSTALL_CONFIGURE_ARGS
Arguments passed to the binary distribution configure script
during installation of test toolchain.
NIX_SYSTEM On Darwin, the target platform of the desired toolchain
(either "x86-64-darwin" or "aarch-darwin")
NO_BOOT Whether to run ./boot or not, used when testing the source dist
Environment variables determining build configuration of Hadrian system:
BUILD_FLAVOUR Which flavour to build.
REINSTALL_GHC Build and test a reinstalled "stage3" ghc built using cabal-install
This tests the "reinstall" configuration
CROSS_EMULATOR The emulator to use for testing of cross-compilers.
Environment variables determining bootstrap toolchain (Linux):
GHC Path of GHC executable to use for bootstrapping.
CABAL Path of cabal-install executable to use for bootstrapping.
ALEX Path of alex executable to use for bootstrapping.
HAPPY Path of alex executable to use for bootstrapping.
Environment variables determining bootstrap toolchain (non-Linux):
GHC_VERSION Which GHC version to fetch for bootstrapping.
CABAL_INSTALL_VERSION
Cabal-install version to fetch for bootstrapping.
EOF
}
function setup_locale() {
# Musl doesn't provide locale support at all...
if ! which locale > /dev/null; then
info "No locale executable. Skipping locale setup..."
return
fi
# BSD grep terminates early with -q, consequently locale -a will get a
# SIGPIPE and the pipeline will fail with pipefail.
shopt -o -u pipefail
if locale -a | grep -q C.UTF-8; then
# Debian
export LANG=C.UTF-8
elif locale -a | grep -q C.utf8; then
# Fedora calls it this
export LANG=C.utf8
elif locale -a | grep -q en_US.UTF-8; then
# Centos doesn't have C.UTF-8
export LANG=en_US.UTF-8
elif locale -a | grep -q en_US.utf8; then
# Centos doesn't have C.UTF-8
export LANG=en_US.utf8
else
error "Failed to find usable locale"
info "Available locales:"
locale -a
fail "No usable locale, aborting..."
fi
info "Using locale $LANG..."
export LC_ALL=$LANG
shopt -o -s pipefail
}
function mingw_init() {
case "$MSYSTEM" in
CLANG64)
target_triple="x86_64-unknown-mingw32"
boot_triple="x86_64-unknown-mingw32" # triple of bootstrap GHC
;;
*)
fail "win32-init: Unknown MSYSTEM $MSYSTEM"
;;
esac
# Bring mingw toolchain into PATH.
# This is extracted from /etc/profile since this script inexplicably fails to
# run under gitlab-runner.
# shellcheck disable=SC1091
source /etc/msystem
MINGW_MOUNT_POINT="${MINGW_PREFIX}"
PATH="$MINGW_MOUNT_POINT/bin:$PATH"
# We always use mingw64 Python to avoid path length issues like #17483.
export PYTHON="/mingw64/bin/python3"
# And need to use sphinx-build from the environment
export SPHINXBUILD="/mingw64/bin/sphinx-build.exe"
}
# This will contain GHC's local native toolchain
toolchain="$TOP/toolchain"
mkdir -p "$toolchain/bin"
PATH="$toolchain/bin:$PATH"
export METRICS_FILE="$TOP/performance-metrics.tsv"
cores="$(mk/detect-cpu-count.sh)"
# Use a local temporary directory to ensure that concurrent builds don't
# interfere with one another
mkdir -p "$TOP/tmp"
export TMP="$TOP/tmp"
export TEMP="$TOP/tmp"
function show_tool() {
local tool="$1"
info "$tool = ${!tool}"
${!tool} --version
}
function set_toolchain_paths() {
case "$(uname -m)-$(uname)" in
# Linux toolchains are included in the Docker image
*-Linux) toolchain_source="env" ;;
# Darwin toolchains are provided via .gitlab/darwin/toolchain.nix
*-Darwin) toolchain_source="nix" ;;
*) toolchain_source="extracted" ;;
esac
case "$toolchain_source" in
extracted)
# These are populated by setup_toolchain
GHC="$toolchain/bin/ghc$exe"
CABAL="$toolchain/bin/cabal$exe"
HAPPY="$toolchain/bin/happy$exe"
ALEX="$toolchain/bin/alex$exe"
if [ "$(uname)" = "FreeBSD" ]; then
GHC=/usr/local/bin/ghc
fi
;;
nix)
if [[ ! -f toolchain.sh ]]; then
case "$NIX_SYSTEM" in
x86_64-darwin|aarch64-darwin) ;;
*) fail "unknown NIX_SYSTEM" ;;
esac
info "Building toolchain for $NIX_SYSTEM"
nix-build --quiet .gitlab/darwin/toolchain.nix --argstr system "$NIX_SYSTEM" -o toolchain.sh
fi
source toolchain.sh
;;
env)
# These are generally set by the Docker image but
# we provide these handy fallbacks in case the
# script isn't run from within a GHC CI docker image.
: ${GHC:=$(which ghc)}
: ${CABAL:=$(which cabal)}
: ${HAPPY:=$(which happy)}
: ${ALEX:=$(which alex)}
;;
*) fail "bad toolchain_source"
esac
export GHC
export CABAL
export HAPPY
export ALEX
if [[ "${CROSS_TARGET:-}" == *"wasm"* ]]; then
source "/home/ghc/.ghc-wasm/env"
fi
}
function cabal_update() {
# In principle -w shouldn't be necessary here but with
# cabal-install 3.8.1.0 it is, due to cabal#8447.
run "$CABAL" update -w "$GHC" "hackage.haskell.org,${HACKAGE_INDEX_STATE}"
}
# Extract GHC toolchain
function setup() {
echo "=== TIMINGS ===" > ci-timings
if [ -d "$CABAL_CACHE" ]; then
info "Extracting cabal cache from $CABAL_CACHE to $CABAL_DIR..."
mkdir -p "$CABAL_DIR"
cp -Rf "$CABAL_CACHE"/* "$CABAL_DIR"
fi
case $toolchain_source in
extracted) time_it "setup" setup_toolchain ;;
*) ;;
esac
cabal_update || fail "cabal update failed"
# Make sure that git works
git config user.email "ghc-ci@gitlab-haskell.org"
git config user.name "GHC GitLab CI"
info "====================================================="
info "Toolchain versions"
info "====================================================="
show_tool GHC
show_tool CABAL
show_tool HAPPY
show_tool ALEX
info "====================================================="
info "ghc --info"
info "====================================================="
$GHC --info
}
function fetch_ghc() {
if [ ! -e "$GHC" ]; then
local v="$GHC_VERSION"
if [[ -z "$v" ]]; then
fail "neither GHC nor GHC_VERSION are not set"
fi
start_section "fetch GHC"
url="https://downloads.haskell.org/~ghc/${GHC_VERSION}/ghc-${GHC_VERSION}-${boot_triple}.tar.xz"
info "Fetching GHC binary distribution from $url..."
curl "$url" > ghc.tar.xz || fail "failed to fetch GHC binary distribution"
$TAR -xJf ghc.tar.xz || fail "failed to extract GHC binary distribution"
case "$(uname)" in
MSYS_*|MINGW*)
cp -r ghc-${GHC_VERSION}*/* "$toolchain"
;;
*)
pushd ghc-${GHC_VERSION}*
./configure --prefix="$toolchain"
"$MAKE" install
popd
;;
esac
rm -Rf "ghc-${GHC_VERSION}" ghc.tar.xz
end_section "fetch GHC"
fi
}
function fetch_cabal() {
if [ ! -e "$CABAL" ]; then
local v="$CABAL_INSTALL_VERSION"
if [[ -z "$v" ]]; then
fail "neither CABAL nor CABAL_INSTALL_VERSION are not set"
fi
start_section "fetch cabal"
case "$(uname)" in
# N.B. Windows uses zip whereas all others use .tar.xz
MSYS_*|MINGW*)
case "$MSYSTEM" in
CLANG64) cabal_arch="x86_64" ;;
*) fail "unknown MSYSTEM $MSYSTEM" ;;
esac
url="https://downloads.haskell.org/~cabal/cabal-install-$v/cabal-install-$v-$cabal_arch-windows.zip"
info "Fetching cabal binary distribution from $url..."
curl "$url" > "$TMP/cabal.zip"
unzip "$TMP/cabal.zip"
mv cabal.exe "$CABAL"
;;
*)
local base_url="https://downloads.haskell.org/~cabal/cabal-install-$v/"
case "$(uname)" in
Darwin) cabal_url="$base_url/cabal-install-$v-x86_64-apple-darwin17.7.0.tar.xz" ;;
FreeBSD) cabal_url="$base_url/cabal-install-$v-x86_64-freebsd14.tar.xz" ;;
*) fail "don't know where to fetch cabal-install for $(uname)"
esac
echo "Fetching cabal-install from $cabal_url"
curl "$cabal_url" > cabal.tar.xz
tmp="$(tar -tJf cabal.tar.xz | head -n1)"
$TAR -xJf cabal.tar.xz
# Check if the bindist has directory structure
if [[ "$tmp" = "cabal" ]]; then
mv cabal "$toolchain/bin"
else
mv "$tmp/cabal" "$toolchain/bin"
fi
;;
esac
end_section "fetch cabal"
fi
}
# For non-Docker platforms we prepare the bootstrap toolchain
# here. For Docker platforms this is done in the Docker image
# build.
function setup_toolchain() {
fetch_ghc
fetch_cabal
cabal_update
local cabal_install="$CABAL v2-install \
--with-compiler=$GHC \
--index-state=$HACKAGE_INDEX_STATE \
--installdir=$toolchain/bin \
--ignore-project \
--overwrite-policy=always"
# Avoid symlinks on Windows
case "$(uname)" in
MSYS_*|MINGW*) cabal_install="$cabal_install --install-method=copy" ;;
*) ;;
esac
info "Building happy..."
$cabal_install happy --constraint="happy>=$MIN_HAPPY_VERSION"
info "Building alex..."
$cabal_install alex --constraint="alex>=$MIN_ALEX_VERSION"
}
function cleanup_submodules() {
start_section "clean submodules"
if [ -d .git ]; then
info "Cleaning submodules..."
# On Windows submodules can inexplicably get into funky states where git
# believes that the submodule is initialized yet its associated repository
# is not valid. Avoid failing in this case with the following insanity.
git submodule sync || git submodule deinit --force --all
git submodule update --init
git submodule foreach git clean -xdf
else
info "Not cleaning submodules, not in a git repo"
fi;
end_section "clean submodules"
}
function configure() {
case "${CONFIGURE_WRAPPER:-}" in
emconfigure) source "$EMSDK/emsdk_env.sh" ;;
*) ;;
esac
if [[ -z "${NO_BOOT:-}" ]]; then
start_section "booting"
run python3 boot
end_section "booting"
fi
read -r -a args <<< "${CONFIGURE_ARGS:-}"
if [[ -n "${target_triple:-}" ]]; then
args+=("--target=$target_triple")
fi
if [[ -n "${ENABLE_NUMA:-}" ]]; then
args+=("--enable-numa")
else
args+=("--disable-numa")
fi
if [[ -n ${HAPPY:-} ]]; then
args+=("HAPPY=$HAPPY")
fi
if [[ -n ${ALEX:-} ]]; then
args+=("ALEX=$ALEX")
fi
start_section "configuring"
# See https://stackoverflow.com/questions/7577052 for a rationale for the
# args[@] symbol-soup below.
run ${CONFIGURE_WRAPPER:-} ./configure \
--enable-tarballs-autodownload \
"${args[@]+"${args[@]}"}" \
GHC="$GHC" \
|| ( cat config.log; fail "configure failed" )
end_section "configuring"
}
function fetch_perf_notes() {
info "Fetching perf notes..."
"$TOP/.gitlab/test-metrics.sh" pull
}
function push_perf_notes() {
if [[ -z "${TEST_ENV:-}" ]]; then
return
fi
# TODO: Remove this check, see #25299
# It is easy to forget to update this when testing a new cross platform
if [[ -n "${CROSS_TARGET:-}" ]] && [[ "${CROSS_TARGET:-}" != *"javascript"* ]] && [[ "${CROSS_TARGET:-}" != *"wasm"* ]]; then
info "Can't test cross-compiled build."
return
fi
info "Pushing perf notes..."
"$TOP/.gitlab/test-metrics.sh" push
}
# Figure out which commit should be used by the testsuite driver as a
# performance baseline. See Note [The CI Story].
function determine_metric_baseline() {
if [ -n "${CI_MERGE_REQUEST_DIFF_BASE_SHA:-}" ]; then
PERF_BASELINE_COMMIT="$CI_MERGE_REQUEST_DIFF_BASE_SHA"
export PERF_BASELINE_COMMIT
info "Using $PERF_BASELINE_COMMIT for performance metric baseline..."
fi
}
function check_msys2_deps() {
# Ensure that GHC on Windows doesn't have any dynamic dependencies on msys2
case "$(uname)" in
MSYS_*|MINGW*)
sysroot="$(cygpath "$SYSTEMROOT")"
PATH="$sysroot/System32:$sysroot;$sysroot/Wbem" $@ \
|| fail "'$@' failed; there may be unwanted dynamic dependencies."
;;
esac
}
# If RELEASE_JOB = yes then we skip builds with a validate flavour.
# This has the effect of
# (1) Skipping validate jobs when trying to do release builds
# (2) Ensured we don't accidentally build release builds with validate flavour.
#
# We should never try to build a validate build in a release pipeline so this is
# very defensive in case we have made a mistake somewhere.
function check_release_build() {
if [ "${RELEASE_JOB:-}" == "yes" ] && [[ "${BUILD_FLAVOUR:-}" == *"validate"* ]]
then
info "Exiting build because this is a validate build in a release job"
exit 0;
fi
}
function build_hadrian() {
if [ -z "${BIN_DIST_NAME:-}" ]; then
fail "BIN_DIST_NAME not set"
fi
if [ -n "${BIN_DIST_PREP_TAR_COMP:-}" ]; then
fail "BIN_DIST_PREP_TAR_COMP must not be set for hadrian (you mean BIN_DIST_NAME)"
fi
check_release_build
# Just to be sure, use the same hackage index state when building Hadrian.
echo "index-state: $HACKAGE_INDEX_STATE" > hadrian/cabal.project.local
# We can safely enable parallel compression for x64. By the time
# hadrian calls tar/xz to produce bindist, there's no other build
# work taking place.
if [[ "${CI_JOB_NAME:-}" != *"i386"* ]]; then
export XZ_OPT="${XZ_OPT:-} -T$cores"
fi
if [[ -n "${REINSTALL_GHC:-}" ]]; then
run_hadrian build-cabal -V
else
case "$(uname)" in
MSYS_*|MINGW*)
run_hadrian test:all_deps reloc-binary-dist -V
mv _build/reloc-bindist/ghc*.tar.xz "$BIN_DIST_NAME.tar.xz"
;;
*)
run_hadrian test:all_deps binary-dist -V
mv _build/bindist/ghc*.tar.xz "$BIN_DIST_NAME.tar.xz"
;;
esac
fi
}
# run's `make DESTDIR=$1 install` and then
# merges the file tree to the actual destination $2,
# ensuring that `DESTDIR` is properly honoured by the
# build system
function make_install_destdir() {
local destdir=$1
local instdir=$2
mkdir -p "$destdir"
mkdir -p "$instdir"
run "$MAKE" DESTDIR="$destdir" install || fail "make install failed"
# check for empty dir portably
# https://superuser.com/a/667100
if find "$instdir" -mindepth 1 -maxdepth 1 | read; then
fail "$instdir is not empty!"
fi
info "merging file tree from $destdir to $instdir"
cp -a "$destdir/$instdir"/* "$instdir"/
"$instdir"/bin/${cross_prefix}ghc-pkg recache
}
# install the binary distribution in directory $1 to $2.
function install_bindist() {
case "${CONFIGURE_WRAPPER:-}" in
emconfigure) source "$EMSDK/emsdk_env.sh" ;;
*) ;;
esac
local bindist="$1"
local instdir="$2"
pushd "$bindist"
case "$(uname)" in
MSYS_*|MINGW*)
mkdir -p "$instdir"
cp -a * "$instdir"
;;
*)
read -r -a args <<< "${INSTALL_CONFIGURE_ARGS:-}"
# FIXME: The bindist configure script shouldn't need to be reminded of
# the target platform. See #21970.
if [ -n "${CROSS_TARGET:-}" ]; then
args+=( "--target=$CROSS_TARGET" "--host=$CROSS_TARGET" )
fi
run ${CONFIGURE_WRAPPER:-} ./configure \
--prefix="$instdir" \
"${args[@]+"${args[@]}"}" || fail "bindist configure failed"
make_install_destdir "$TOP"/destdir "$instdir"
# And check the `--info` of the installed compiler, sometimes useful in CI log.
"$instdir"/bin/ghc --info
;;
esac
popd
}
function test_hadrian() {
check_msys2_deps _build/stage1/bin/ghc --version
check_release_build
# Ensure that statically-linked builds are actually static
if [[ "${BUILD_FLAVOUR}" = *static* ]]; then
bad_execs=""
for binary in _build/stage1/bin/*; do
if ldd "${binary}" &> /dev/null; then
warn "${binary} is not static!"
ldd "${binary}"
echo
bad_execs="$bad_execs $binary"
fi
done
if [ -n "$bad_execs" ]; then
fail "the following executables contain dynamic-object references: $bad_execs"
fi
fi
if [[ "${CROSS_EMULATOR:-}" == "NOT_SET" ]]; then
info "Cannot test cross-compiled build without CROSS_EMULATOR being set."
return
# special case for JS backend
elif [ -n "${CROSS_TARGET:-}" ] && [ "${CROSS_EMULATOR:-}" == "js-emulator" ]; then
# The JS backend doesn't support CROSS_EMULATOR logic yet
unset CROSS_EMULATOR
# run "hadrian test" directly, not using the bindist, even though it did get installed.
# This is a temporary solution, See !9515 for the status of hadrian support.
run_hadrian \
test \
--summary-junit=./junit.xml \
--test-have-intree-files \
--docs=none \
"runtest.opts+=${RUNTEST_ARGS:-}" \
"runtest.opts+=--unexpected-output-dir=$TOP/unexpected-test-output" \
|| fail "cross-compiled hadrian main testsuite"
elif [[ -n "${CROSS_TARGET:-}" ]] && [[ "${CROSS_TARGET:-}" == *"wasm"* ]]; then
run_hadrian \
test \
--summary-junit=./junit.xml \
"runtest.opts+=${RUNTEST_ARGS:-}" \
"runtest.opts+=--unexpected-output-dir=$TOP/unexpected-test-output" \
|| fail "hadrian main testsuite targetting $CROSS_TARGET"
elif [ -n "${CROSS_TARGET:-}" ]; then
local instdir="$TOP/_build/install"
local test_compiler="$instdir/bin/${cross_prefix}ghc$exe"
install_bindist _build/bindist/ghc-*/ "$instdir"
echo 'main = putStrLn "hello world"' > expected
run "$test_compiler" -package ghc "$TOP/.gitlab/hello.hs" -o hello
${CROSS_EMULATOR:-} ./hello > actual
run diff expected actual
elif [[ -n "${REINSTALL_GHC:-}" ]]; then
run_hadrian \
test \
--test-root-dirs=testsuite/tests/stage1 \
--test-compiler=stage-cabal \
--test-root-dirs=testsuite/tests/perf \
--test-root-dirs=testsuite/tests/typecheck \
"runtest.opts+=${RUNTEST_ARGS:-}" \
"runtest.opts+=--unexpected-output-dir=$TOP/unexpected-test-output" \
|| fail "hadrian cabal-install test"
else
local instdir="$TOP/_build/install"
local test_compiler="$instdir/bin/${cross_prefix}ghc$exe"
install_bindist _build/bindist/ghc-*/ "$instdir"
if [[ "${WINDOWS_HOST}" == "no" ]] && [ -z "${CROSS_TARGET:-}" ]
then
run_hadrian \
test \
--test-root-dirs=testsuite/tests/stage1 \
--test-compiler=stage1 \
"runtest.opts+=${RUNTEST_ARGS:-}" || fail "hadrian stage1 test"
info "STAGE1_TEST=$?"
fi
# Ensure the resulting compiler has the correct bignum-flavour,
# except for cross-compilers as they may not support the interpreter
if [ -z "${CROSS_TARGET:-}" ]
then
test_compiler_backend=$(${test_compiler} -e "GHC.Num.Backend.backendName")
if [ $test_compiler_backend != "\"$BIGNUM_BACKEND\"" ]; then
fail "Test compiler has a different BIGNUM_BACKEND ($test_compiler_backend) than requested ($BIGNUM_BACKEND)"
fi
fi
# If we are doing a release job, check the compiler can build a profiled executable
if [ "${RELEASE_JOB:-}" == "yes" ]; then
echo "main = print ()" > proftest.hs
run ${test_compiler} -prof proftest.hs || fail "hadrian profiled libs test"
rm proftest.hs
fi
run_hadrian \
test \
--summary-junit=./junit.xml \
--test-have-intree-files \
--test-compiler="${test_compiler}" \
"runtest.opts+=${RUNTEST_ARGS:-}" \
"runtest.opts+=--unexpected-output-dir=$TOP/unexpected-test-output" \
|| fail "hadrian main testsuite"
info "STAGE2_TEST=$?"
fi
}
function summarise_hi_files() {
hi_files=$(find . -type f -name "*.hi" | sort)
for iface in $hi_files; do echo "$iface $($HC --show-iface "$iface" | grep " ABI hash:")"; done | tee $OUT/abis
for iface in $hi_files; do echo "$iface $($HC --show-iface "$iface" | grep " interface hash:")"; done | tee $OUT/interfaces
for iface in $hi_files; do
fname="$OUT/$(dirname "$iface")"
mkdir -p "$fname"
$HC --show-iface "$iface" > "$OUT/$iface"
done
}
function summarise_o_files() {
OBJDUMP=$(if test "$(uname)" == "Darwin"; then echo "objdump -m"; else echo "objdump"; fi)
o_files=$(find . -type f -name "*.o" | sort)
for o in $o_files; do
fname="$OUT/objs/$(dirname "$o")"
mkdir -p "$fname"
# To later compare object dumps except for the first line which prints the file path
$OBJDUMP --all-headers "$o" | tail -n+2 > "$OUT/objs/$o.all-headers"
$OBJDUMP --disassemble-all "$o" | tail -n+2 > "$OUT/objs/$o.disassemble-all"
done
}
function cabal_abi_test() {
if [ -z "$OUT" ]; then
fail "OUT not set"
fi
cp -r libraries/Cabal $DIR
pushd $DIR
echo $PWD
start_section "Cabal test: $OUT"
mkdir -p "$OUT"
"$HC" \
-hidir tmp -odir tmp -fforce-recomp -haddock \
-iCabal/Cabal/src -XNoPolyKinds Distribution.Simple -j"$cores" \
-fobject-determinism \
"$@" 2>&1 | sed '1d' | tee $OUT/log
summarise_hi_files
summarise_o_files
popd
end_section "Cabal test: $OUT"
}
function cabal_test() {
if [ -z "$OUT" ]; then
fail "OUT not set"
fi
start_section "Cabal test: $OUT"
mkdir -p "$OUT"
run "$HC" \
-hidir tmp -odir tmp -fforce-recomp \
-dumpdir "$OUT/dumps" -ddump-timings \
+RTS --machine-readable "-t$OUT/rts.log" -RTS \
-ilibraries/Cabal/Cabal/src -XNoPolyKinds Distribution.Simple \
"$@" 2>&1 | tee $OUT/log
rm -Rf tmp
end_section "Cabal test: $OUT"
}
function run_perf_test() {
if [ -z "$HC" ]; then
fail "HC not set"
fi
mkdir -p out
git -C libraries/Cabal/ rev-parse HEAD > out/cabal_commit
$HC --print-project-git-commit-id > out/ghc_commit
OUT=out/Cabal-O0 cabal_test -O0
OUT=out/Cabal-O1 cabal_test -O1
OUT=out/Cabal-O2 cabal_test -O2
}
function check_interfaces(){
difference=$(diff "$1/$3" "$2/$3") || warn "diff failed"
if [ -z "$difference" ]
then
info "$1 and $2 $3 match"
else
echo $difference
for line in $(echo "$difference" | tr ' ' '\n' | grep ".hi" | sort | uniq); do
diff "$1/$line" "$2/$line"
done
fail "$4"
fi
}
function check_objects(){
# Big fast check
if diff -r "$1" "$2"
then
echo "Objects are the same"
else
echo "--------------------------------------------------------------------------------"
echo "Comparing all objects (1. headers, 2. disassembly). Stopping at first failure..."
echo "--------------------------------------------------------------------------------"
pushd "$1" >/dev/null
OBJ_DUMPS=$(find . -type f -name "*.all-headers" -or -name "*.disassemble-all")
popd >/dev/null
for dump in $OBJ_DUMPS
do
if diff "$1/$dump" "$2/$dump"
then
fail "Mismatched object: $dump"
fi
done
fail "Some objects are mismatched, but theres no diff with --all-headers or --disassemble-all. Perhaps try objdump -s"
fi
}
function abi_test() {
for i in {1..10}; do info "iteration $i"; run_abi_test; done
}
function run_abi_test() {
if [ -z "$HC" ]; then
fail "HC not set"
fi
mkdir -p out
OUT="$PWD/out/run1" DIR=$(mktemp -d XXXX-looooooooong) cabal_abi_test -O1 -haddock
# Count uniques in reverse one of the runs to get more non-determinism exposed
OUT="$PWD/out/run2" DIR=$(mktemp -d XXXX-short) cabal_abi_test -O1 -haddock -dunique-increment=-1 -dinitial-unique=16777215
check_interfaces out/run1 out/run2 abis "Mismatched ABI hash"
check_interfaces out/run1 out/run2 interfaces "Mismatched interface hashes"
check_objects out/run1 out/run2
}
function save_test_output() {
tar -czf unexpected-test-output.tar.gz unexpected-test-output
}
function save_cache () {
info "Storing cabal cache from $CABAL_DIR to $CABAL_CACHE..."
rm -Rf "$CABAL_CACHE"
cp -Rf "$CABAL_DIR" "$CABAL_CACHE"
}
function clean() {
rm -R tmp
run rm -Rf _build
}
function run_hadrian() {
if [ -z "${BUILD_FLAVOUR:-}" ]; then
fail "BUILD_FLAVOUR not set"
fi
read -r -a args <<< "${HADRIAN_ARGS:-}"
if [ -n "${VERBOSE:-}" ]; then args+=("-V"); fi
# Before running the compiler, unset variables gitlab env vars as these
# can destabilise the performance test (see #20341)
(unset $(compgen -v | grep CI_*);
run "${HADRIAN_PATH:-hadrian/build-cabal}" \
--flavour="$BUILD_FLAVOUR" \
-j"$cores" \
--broken-test="${BROKEN_TESTS:-}" \
--bignum=$BIGNUM_BACKEND \
"${args[@]+"${args[@]}"}" \
"$@")
}
# A convenience function to allow debugging in the CI environment.
function shell() {
local cmd="${@: 1}"
if [ -z "$cmd" ]; then
cmd="bash -i"
fi
run $cmd
}
function lint_author(){
base=$1
head=$2
for email in $(git log --format='%ae' $base..$head); do
if [ $email == "ghc-ci@gitlab-haskell.org" ];
then
fail "Commit has GHC CI author, please amend the author information."
fi
done
}
function abi_of(){
DIR=$(realpath $1)
mkdir -p "$OUT"
pushd $DIR
summarise_hi_files
popd
}
# Checks that the interfaces in folder $1 match the interfaces in folder $2
function compare_interfaces_of(){
OUT=$PWD/out/run1 abi_of $1
OUT=$PWD/out/run2 abi_of $2
check_interfaces out/run1 out/run2 abis "Mismatched ABI hash"
check_interfaces out/run1 out/run2 interfaces "Mismatched interface hashes"
}
setup_locale
# Platform-specific environment initialization
if [ -n "${HERMETIC:-}" ]; then
export CABAL_DIR="$TOP/cabal"
# We previously set HOME=/nonexistent but apparently nix wants $HOME to exist
# so sadly we must settle for someplace writable.
export HOME="$TOP/tmp-home"
else
BIN_DIST_NAME="${BIN_DIST_NAME:-}"
case "$(uname)" in
MSYS_*|MINGW*) CABAL_DIR="$APPDATA/cabal" ;;
*) CABAL_DIR="$HOME/.cabal" ;;
esac
fi
case "$(uname)" in
MSYS_*|MINGW*)
exe=".exe"
# N.B. cabal-install expects CABAL_DIR to be a Windows path
CABAL_DIR="$(cygpath -w "$CABAL_DIR")"
WINDOWS_HOST="yes"
;;
*)
exe=""
WINDOWS_HOST="no"
;;
esac
MAKE="make"
TAR="tar"
case "$(uname)" in
MSYS_*|MINGW*) mingw_init ;;
Darwin) boot_triple="x86_64-apple-darwin" ;;
FreeBSD)
boot_triple="x86_64-portbld-freebsd"
MAKE="gmake"
TAR="gtar"
;;
Linux) ;;
*) fail "uname $(uname) is not supported" ;;
esac
if [ -n "${CROSS_TARGET:-}" ]; then
info "Cross-compiling for $CROSS_TARGET..."
target_triple="$CROSS_TARGET"
cross_prefix="$target_triple-"
else
cross_prefix=""
fi
echo "Branch name ${CI_MERGE_REQUEST_SOURCE_BRANCH_NAME:-}"
# Ignore performance improvements in @marge-bot batches.
# See #19562.
if [ "${CI_MERGE_REQUEST_SOURCE_BRANCH_NAME:-}" == "wip/marge_bot_batch_merge_job" ]; then
if [ -z "${IGNORE_PERF_FAILURES:-}" ]; then
IGNORE_PERF_FAILURES="decreases"
echo "Ignoring perf failures"
fi
fi
echo "CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH:-}"
echo "CI_PROJECT_PATH: ${CI_PROJECT_PATH:-}"
if [ "${CI_COMMIT_BRANCH:-}" == "master" ] && [ "${CI_PROJECT_PATH:-}" == "ghc/ghc" ]; then
if [ -z "${IGNORE_PERF_FAILURES:-}" ]; then
IGNORE_PERF_FAILURES="decreases"
echo "Ignoring perf failures"
fi
fi
if [ -n "${IGNORE_PERF_FAILURES:-}" ]; then
RUNTEST_ARGS=( "${RUNTEST_ARGS[@]:-}" "--ignore-perf-failures=$IGNORE_PERF_FAILURES" )
fi
if [[ -z ${BIGNUM_BACKEND:-} ]]; then BIGNUM_BACKEND=gmp; fi
determine_metric_baseline
set_toolchain_paths
case ${1:-help} in
help|usage) usage ;;
setup) setup && cleanup_submodules ;;
configure) time_it "configure" configure ;;
build_hadrian) time_it "build" build_hadrian ;;
# N.B. Always push notes, even if the build fails. This is okay to do as the
# testsuite driver doesn't record notes for tests that fail due to
# correctness.
test_hadrian)
fetch_perf_notes
res=0
time_it "test" test_hadrian || res=$?
push_perf_notes
exit $res ;;
run_hadrian) shift; run_hadrian "$@" ;;
perf_test) run_perf_test ;;
abi_test) abi_test ;;
cabal_test) cabal_test ;;
lint_author) shift; lint_author "$@" ;;
compare_interfaces_of) shift; compare_interfaces_of "$@" ;;
clean) clean ;;
save_test_output) save_test_output ;;
save_cache) save_cache ;;
shell) shift; shell "$@" ;;
*) fail "unknown mode $1" ;;
esac
# Common bash utilities
# ----------------------
# Colors
BLACK="0;30"
GRAY="1;30"
RED="0;31"
LT_RED="1;31"
BROWN="0;33"
LT_BROWN="1;33"
GREEN="0;32"
LT_GREEN="1;32"
BLUE="0;34"
LT_BLUE="1;34"
PURPLE="0;35"
LT_PURPLE="1;35"
CYAN="0;36"
LT_CYAN="1;36"
WHITE="1;37"
LT_GRAY="0;37"
# GitLab Pipelines log section delimiters
# https://gitlab.com/gitlab-org/gitlab-foss/issues/14664
start_section() {
name="$1"
echo -e "section_start:$(date +%s):$name\015\033[0K"
}
end_section() {
name="$1"
echo -e "section_end:$(date +%s):$name\015\033[0K"
}
echo_color() {
local color="$1"
local msg="$2"
echo -e "\033[${color}m${msg}\033[0m"
}
error() { echo_color "${RED}" "$1"; }
warn() { echo_color "${LT_BROWN}" "$1"; }
info() { echo_color "${LT_BLUE}" "$1"; }
fail() { error "error: $1"; exit 1; }
function run() {
info "Running $*..."
"$@" || ( error "$* failed"; return 1; )
}
{
"niv": {
"branch": "master",
"description": "Easy dependency management for Nix projects",
"homepage": "https://github.com/nmattia/niv",
"owner": "nmattia",
"repo": "niv",
"rev": "e0ca65c81a2d7a4d82a189f1e23a48d59ad42070",
"sha256": "1pq9nh1d8nn3xvbdny8fafzw87mj7gsmp6pxkdl65w2g18rmcmzx",
"type": "tarball",
"url": "https://github.com/nmattia/niv/archive/e0ca65c81a2d7a4d82a189f1e23a48d59ad42070.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"nixpkgs": {
"branch": "nixos-unstable",
"description": "Nix Packages collection",
"homepage": "",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "2893f56de08021cffd9b6b6dfc70fd9ccd51eb60",
"sha256": "1anwxmjpm21msnnlrjdz19w31bxnbpn4kgf93sn3npihi7wf4a8h",
"type": "tarball",
"url": "https://github.com/nixos/nixpkgs/archive/2893f56de08021cffd9b6b6dfc70fd9ccd51eb60.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
}
}
# This file has been generated by Niv.
let
#
# The fetchers. fetch_<type> fetches specs of type <type>.
#
fetch_file = pkgs: name: spec:
let
name' = sanitizeName name + "-src";
in
if spec.builtin or true then
builtins_fetchurl { inherit (spec) url sha256; name = name'; }
else
pkgs.fetchurl { inherit (spec) url sha256; name = name'; };
fetch_tarball = pkgs: name: spec:
let
name' = sanitizeName name + "-src";
in
if spec.builtin or true then
builtins_fetchTarball { name = name'; inherit (spec) url sha256; }
else
pkgs.fetchzip { name = name'; inherit (spec) url sha256; };
fetch_git = name: spec:
let
ref =
if spec ? ref then spec.ref else
if spec ? branch then "refs/heads/${spec.branch}" else
if spec ? tag then "refs/tags/${spec.tag}" else
abort "In git source '${name}': Please specify `ref`, `tag` or `branch`!";
in
builtins.fetchGit { url = spec.repo; inherit (spec) rev; inherit ref; };
fetch_local = spec: spec.path;
fetch_builtin-tarball = name: throw
''[${name}] The niv type "builtin-tarball" is deprecated. You should instead use `builtin = true`.
$ niv modify ${name} -a type=tarball -a builtin=true'';
fetch_builtin-url = name: throw
''[${name}] The niv type "builtin-url" will soon be deprecated. You should instead use `builtin = true`.
$ niv modify ${name} -a type=file -a builtin=true'';
#
# Various helpers
#
# https://github.com/NixOS/nixpkgs/pull/83241/files#diff-c6f540a4f3bfa4b0e8b6bafd4cd54e8bR695
sanitizeName = name:
(
concatMapStrings (s: if builtins.isList s then "-" else s)
(
builtins.split "[^[:alnum:]+._?=-]+"
((x: builtins.elemAt (builtins.match "\\.*(.*)" x) 0) name)
)
);
# The set of packages used when specs are fetched using non-builtins.
mkPkgs = sources: system:
let
sourcesNixpkgs =
import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) { inherit system; };
hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath;
hasThisAsNixpkgsPath = <nixpkgs> == ./.;
in
if builtins.hasAttr "nixpkgs" sources
then sourcesNixpkgs
else if hasNixpkgsPath && ! hasThisAsNixpkgsPath then
import <nixpkgs> {}
else
abort
''
Please specify either <nixpkgs> (through -I or NIX_PATH=nixpkgs=...) or
add a package called "nixpkgs" to your sources.json.
'';
# The actual fetching function.
fetch = pkgs: name: spec:
if ! builtins.hasAttr "type" spec then
abort "ERROR: niv spec ${name} does not have a 'type' attribute"
else if spec.type == "file" then fetch_file pkgs name spec
else if spec.type == "tarball" then fetch_tarball pkgs name spec
else if spec.type == "git" then fetch_git name spec
else if spec.type == "local" then fetch_local spec
else if spec.type == "builtin-tarball" then fetch_builtin-tarball name
else if spec.type == "builtin-url" then fetch_builtin-url name
else
abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}";
# If the environment variable NIV_OVERRIDE_${name} is set, then use
# the path directly as opposed to the fetched source.
replace = name: drv:
let
saneName = stringAsChars (c: if isNull (builtins.match "[a-zA-Z0-9]" c) then "_" else c) name;
ersatz = builtins.getEnv "NIV_OVERRIDE_${saneName}";
in
if ersatz == "" then drv else
# this turns the string into an actual Nix path (for both absolute and
# relative paths)
if builtins.substring 0 1 ersatz == "/" then /. + ersatz else /. + builtins.getEnv "PWD" + "/${ersatz}";
# Ports of functions for older nix versions
# a Nix version of mapAttrs if the built-in doesn't exist
mapAttrs = builtins.mapAttrs or (
f: set: with builtins;
listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set))
);
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/lists.nix#L295
range = first: last: if first > last then [] else builtins.genList (n: first + n) (last - first + 1);
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L257
stringToCharacters = s: map (p: builtins.substring p 1 s) (range 0 (builtins.stringLength s - 1));
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L269
stringAsChars = f: s: concatStrings (map f (stringToCharacters s));
concatMapStrings = f: list: concatStrings (map f list);
concatStrings = builtins.concatStringsSep "";
# https://github.com/NixOS/nixpkgs/blob/8a9f58a375c401b96da862d969f66429def1d118/lib/attrsets.nix#L331
optionalAttrs = cond: as: if cond then as else {};
# fetchTarball version that is compatible between all the versions of Nix
builtins_fetchTarball = { url, name ? null, sha256 }@attrs:
let
inherit (builtins) lessThan nixVersion fetchTarball;
in
if lessThan nixVersion "1.12" then
fetchTarball ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; }))
else
fetchTarball attrs;
# fetchurl version that is compatible between all the versions of Nix
builtins_fetchurl = { url, name ? null, sha256 }@attrs:
let
inherit (builtins) lessThan nixVersion fetchurl;
in
if lessThan nixVersion "1.12" then
fetchurl ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; }))
else
fetchurl attrs;
# Create the final "sources" from the config
mkSources = config:
mapAttrs (
name: spec:
if builtins.hasAttr "outPath" spec
then abort
"The values in sources.json should not have an 'outPath' attribute"
else
spec // { outPath = replace name (fetch config.pkgs name spec); }
) config.sources;
# The "config" used by the fetchers
mkConfig =
{ sourcesFile ? if builtins.pathExists ./sources.json then ./sources.json else null
, sources ? if isNull sourcesFile then {} else builtins.fromJSON (builtins.readFile sourcesFile)
, system ? builtins.currentSystem
, pkgs ? mkPkgs sources system
}: rec {
# The sources, i.e. the attribute set of spec name to spec
inherit sources;
# The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers
inherit pkgs;
};
in
mkSources (mkConfig {}) // { __functor = _: settings: mkSources (mkConfig settings); }
{ system }:
let
sources = import ./nix/sources.nix;
nixpkgsSrc = sources.nixpkgs;
pkgs = import nixpkgsSrc { inherit system; };
hostPkgs = import nixpkgsSrc { };
in
let
hsPkgs = pkgs.haskellPackages;
alex = hsPkgs.alex;
happy = hsPkgs.happy;
targetTriple = pkgs.stdenv.targetPlatform.config;
ghcBindists = let version = ghc.version; in {
aarch64-darwin = hostPkgs.fetchurl {
url = "https://downloads.haskell.org/ghc/${version}/ghc-${version}-aarch64-apple-darwin.tar.xz";
sha256 = "sha256-c1GTMJf3/yiW/t4QL532EswD5JVlgA4getkfsxj4TaA=";
};
x86_64-darwin = hostPkgs.fetchurl {
url = "https://downloads.haskell.org/ghc/${version}/ghc-${version}-x86_64-apple-darwin.tar.xz";
sha256 = "sha256-LrYniMG0phsvyW6dhQC+3ompvzcxnwAe6GezEqqzoTQ=";
};
};
ghc = pkgs.stdenv.mkDerivation rec {
# Using 9.6.2 because of #24050
version = "9.6.2";
name = "ghc";
src = ghcBindists.${pkgs.stdenv.hostPlatform.system};
configureFlags = [
"CC=/usr/bin/clang"
"CLANG=/usr/bin/clang"
"AR=/usr/bin/ar"
"LLC=${llvm}/bin/llc"
"OPT=${llvm}/bin/opt"
"LLVMAS=${llvm_clang}/bin/clang"
"CONF_CC_OPTS_STAGE2=--target=${targetTriple}"
"CONF_CXX_OPTS_STAGE2=--target=${targetTriple}"
"CONF_GCC_LINKER_OPTS_STAGE2=--target=${targetTriple}"
];
buildPhase = "true";
# This is a horrible hack because the configure script invokes /usr/bin/clang
# without a `--target` flag. Then depending on whether the `nix` binary itself is
# a native x86 or arm64 binary means that /usr/bin/clang thinks it needs to run in
# x86 or arm64 mode.
# The correct answer for the check in question is the first one we try, so by replacing
# the condition to true; we select the right C++ standard library still.
preConfigure = ''
sed "s/\"\$CC\" -o actest actest.o \''${1} 2>\/dev\/null/true/i" configure > configure.new
mv configure.new configure
chmod +x configure
cat configure
'';
# N.B. Work around #20253.
nativeBuildInputs = [ pkgs.gnused ];
postInstallPhase = ''
settings="$out/lib/ghc-${version}/settings"
sed -i -e "s%\"llc\"%\"${llvm}/bin/llc\"%" $settings
sed -i -e "s%\"opt\"%\"${llvm}/bin/opt\"%" $settings
sed -i -e "s%\"clang\"%\"/usr/bin/clang\"%" $settings
sed -i -e 's%("C compiler command", "")%("C compiler command", "/usr/bin/clang")%' $settings
sed -i -e 's%("C compiler flags", "")%("C compiler flags", "--target=${targetTriple}")%' $settings
sed -i -e 's%("C++ compiler flags", "")%("C++ compiler flags", "--target=${targetTriple}")%' $settings
sed -i -e 's%("C compiler link flags", "")%("C compiler link flags", "--target=${targetTriple}")%' $settings
'';
# Sanity check: verify that we can compile hello world.
doInstallCheck = true;
installCheckPhase = ''
unset DYLD_LIBRARY_PATH
$out/bin/ghc --info
cd $TMP
mkdir test-ghc; cd test-ghc
cat > main.hs << EOF
{-# LANGUAGE TemplateHaskell #-}
module Main where
main = putStrLn \$([|"yes"|])
EOF
$out/bin/ghc --make -v3 main.hs || exit 1
echo compilation ok
[ $(./main) == "yes" ]
'';
};
ourtexlive = with pkgs;
texlive.combine {
inherit (texlive)
scheme-medium collection-xetex fncychap titlesec tabulary varwidth
framed capt-of wrapfig needspace dejavu-otf helvetic upquote;
};
fonts = with pkgs; makeFontsConf { fontDirectories = [ dejavu_fonts ]; };
llvm = pkgs.llvm_15;
llvm_clang = pkgs.llvmPackages_15.clang-unwrapped;
in
pkgs.writeTextFile {
name = "toolchain";
text = ''
export PATH
PATH="${pkgs.autoconf}/bin:$PATH"
PATH="${pkgs.automake}/bin:$PATH"
export FONTCONFIG_FILE=${fonts}
export XELATEX="${ourtexlive}/bin/xelatex"
export MAKEINDEX="${ourtexlive}/bin/makeindex"
export HAPPY="${happy}/bin/happy"
export ALEX="${alex}/bin/alex"
export GHC="${ghc}/bin/ghc"
export LLC="${llvm}/bin/llc"
export OPT="${llvm}/bin/opt"
export LLVMAS="${llvm_clang}/bin/clang"
export SPHINXBUILD="${pkgs.python3Packages.sphinx}/bin/sphinx-build"
export CABAL_INSTALL="${pkgs.cabal-install}/bin/cabal"
export CABAL="$CABAL_INSTALL"
sdk_path="$(xcrun --sdk macosx --show-sdk-path)"
: ''${CONFIGURE_ARGS:=}
CONFIGURE_ARGS+="''${CONFIGURE_ARGS:+ }--with-ffi-libraries=$sdk_path/usr/lib --with-ffi-includes=$sdk_path/usr/include/ffi --build=${targetTriple}"
export CONFIGURE_ARGS
'';
}
Copyright (c) 2023, The GHC Developers
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of The GHC Developers nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# generate-ci
This is the generator for GHC's GitLab CI infrastructure. In particular, this
generates two outputs:
* `.gitlab/jobs.yaml`, which is a YAML (or, strictly speaking, JSON)
file which defines the bulk of the validation, nightly, and release jobs of
GHC's CI. This is committed to the GHC repository and must be updated
whenever `gen_ci.hs` is modified.
* `.gitlab/jobs-metadata.json`, which is a mapping between platforms and
produced binary distribution names used when producing `ghcup` metadata
for nightly pipeline artifacts (see the `.ghcup-metadata` job in
`/.gitlab-ci.yaml`).
## Modifying the CI configuration (nix)
The jobs are defined in `gen_ci.hs`. After modifying this you can run
```sh
nix run .gitlab/generate-ci#generate-jobs
```
from the top of the GHC repository to update the generated configuration.
## Modifying the CI configuration (without nix)
One can run `update-ci` without Nix as follows (assuming one has `jq`,
`cabal-install`, and GHC installed):
```sh
$ cabal build generate-ci
$ PATH="$(dirname $(cabal list-bin generate-ci)):$PATH"
$ ./generate-jobs
```
{
"nodes": {
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1710146030,
"narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1724334015,
"narHash": "sha256-5sfvc0MswIRNdRWioUhG58rGKGn2o90Ck6l6ClpwQqA=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "6d204f819efff3d552a88d0a44b5aaaee172b784",
"type": "github"
},
"original": {
"id": "nixpkgs",
"type": "indirect"
}
},
"root": {
"inputs": {
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}
{
description = "GHC CI Generator";
inputs.flake-utils.url = "github:numtide/flake-utils";
outputs = { self, nixpkgs, flake-utils }:
flake-utils.lib.eachDefaultSystem (system:
let pkgs = nixpkgs.legacyPackages.${system}; in
{
packages = rec {
# The Haskell generator executable
generate-ci = pkgs.haskellPackages.callCabal2nix "generate-ci" ./. {};
# Wrapper scripts
generate-job-metadata = pkgs.runCommand "generate-job-metadata" {
nativeBuildInputs = with pkgs; [ makeWrapper ];
} ''
mkdir -p $out/bin
makeWrapper ${./generate-job-metadata} $out/bin/generate-job-metadata \
--prefix PATH : ${with pkgs; lib.makeBinPath [ generate-ci gitMinimal ]}
'';
generate-jobs = pkgs.runCommand "generate-jobs" {
nativeBuildInputs = with pkgs; [ makeWrapper ];
} ''
mkdir -p $out/bin
makeWrapper ${./generate-jobs} $out/bin/generate-jobs \
--prefix PATH : ${with pkgs; lib.makeBinPath [ generate-ci jq gitMinimal ]}
'';
default = generate-jobs;
};
apps = rec {
generate-jobs = flake-utils.lib.mkApp {
drv = self.packages.${system}.generate-jobs;
};
generate-job-metadata = flake-utils.lib.mkApp {
drv = self.packages.${system}.generate-job-metadata;
};
default = generate-jobs;
};
}
);
}
This diff is collapsed.
cabal-version: 3.0
name: generate-ci
version: 0.1.0.0
license: BSD-3-Clause
license-file: LICENSE
build-type: Simple
executable generate-ci
main-is: gen_ci.hs
ghc-options: -Wall
build-depends: base,
containers,
bytestring,
aeson >= 1.8.1
default-language: Haskell2010