Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • ghc/ghc
  • bgamari/ghc
  • syd/ghc
  • ggreif/ghc
  • watashi/ghc
  • RolandSenn/ghc
  • mpickering/ghc
  • DavidEichmann/ghc
  • carter/ghc
  • harpocrates/ghc
  • ethercrow/ghc
  • mijicd/ghc
  • adamse/ghc
  • alexbiehl/ghc
  • gridaphobe/ghc
  • trofi/ghc
  • supersven/ghc
  • ppk/ghc
  • ulysses4ever/ghc
  • AndreasK/ghc
  • ghuntley/ghc
  • shayne-fletcher-da/ghc
  • fgaz/ghc
  • yav/ghc
  • osa1/ghc
  • mbbx6spp/ghc
  • JulianLeviston/ghc
  • reactormonk/ghc
  • rae/ghc
  • takenobu-hs/ghc
  • michalt/ghc
  • andrewthad/ghc
  • hsyl20/ghc
  • scottgw/ghc
  • sjakobi/ghc
  • angerman/ghc
  • RyanGlScott/ghc
  • hvr/ghc
  • howtonotwin/ghc
  • chessai/ghc
  • m-renaud/ghc
  • brprice/ghc
  • stevehartdata/ghc
  • sighingnow/ghc
  • kgardas/ghc
  • ckoparkar/ghc
  • alp/ghc
  • smaeul/ghc
  • kakkun61/ghc
  • sykloid/ghc
  • newhoggy/ghc
  • toonn/ghc
  • nineonine/ghc
  • Phyx/ghc
  • ezyang/ghc
  • tweag/ghc
  • langston/ghc
  • ndmitchell/ghc
  • rockbmb/ghc
  • artempyanykh/ghc
  • mniip/ghc
  • mynguyenbmc/ghc
  • alexfmpe/ghc
  • crockeea/ghc
  • nh2/ghc
  • vaibhavsagar/ghc
  • phadej/ghc
  • Haskell-mouse/ghc
  • lolotp/ghc
  • spacekitteh/ghc
  • michaelpj/ghc
  • mgsloan/ghc
  • HPCohen/ghc
  • tmobile/ghc
  • radrow/ghc
  • simonmar/ghc
  • _deepfire/ghc
  • Ericson2314/ghc
  • leitao/ghc
  • fumieval/ghc
  • trac-isovector/ghc
  • cblp/ghc
  • xich/ghc
  • ciil/ghc
  • erthalion/ghc
  • xldenis/ghc
  • autotaker/ghc
  • haskell-wasm/ghc
  • kcsongor/ghc
  • agander/ghc
  • Baranowski/ghc
  • trac-dredozubov/ghc
  • 23Skidoo/ghc
  • iustin/ghc
  • ningning/ghc
  • josefs/ghc
  • kabuhr/ghc
  • gallais/ghc
  • dten/ghc
  • expipiplus1/ghc
  • Pluralia/ghc
  • rohanjr/ghc
  • intricate/ghc
  • kirelagin/ghc
  • Javran/ghc
  • DanielG/ghc
  • trac-mizunashi_mana/ghc
  • pparkkin/ghc
  • bollu/ghc
  • ntc2/ghc
  • jaspervdj/ghc
  • JoshMeredith/ghc
  • wz1000/ghc
  • zkourouma/ghc
  • code5hot/ghc
  • jdprice/ghc
  • tdammers/ghc
  • J-mie6/ghc
  • trac-lantti/ghc
  • ch1bo/ghc
  • cgohla/ghc
  • lucamolteni/ghc
  • acairncross/ghc
  • amerocu/ghc
  • chreekat/ghc
  • txsmith/ghc
  • trupill/ghc
  • typetetris/ghc
  • sergv/ghc
  • fryguybob/ghc
  • erikd/ghc
  • trac-roland/ghc
  • setupminimal/ghc
  • Friede80/ghc
  • SkyWriter/ghc
  • xplorld/ghc
  • abrar/ghc
  • obsidiansystems/ghc
  • Icelandjack/ghc
  • adinapoli/ghc
  • trac-matthewbauer/ghc
  • heatsink/ghc
  • dwijnand/ghc
  • Cmdv/ghc
  • alinab/ghc
  • pepeiborra/ghc
  • fommil/ghc
  • luochen1990/ghc
  • rlupton20/ghc
  • applePrincess/ghc
  • lehins/ghc
  • ronmrdechai/ghc
  • leeadam/ghc
  • harendra/ghc
  • mightymosquito1991/ghc
  • trac-gershomb/ghc
  • lucajulian/ghc
  • Rizary/ghc
  • VictorCMiraldo/ghc
  • jamesbrock/ghc
  • andrewdmeier/ghc
  • luke/ghc
  • pranaysashank/ghc
  • cocreature/ghc
  • hithroc/ghc
  • obreitwi/ghc
  • slrtbtfs/ghc
  • kaol/ghc
  • yairchu/ghc
  • Mathemagician98/ghc
  • trac-taylorfausak/ghc
  • leungbk/ghc
  • MichaWiedenmann/ghc
  • chris-martin/ghc
  • TDecki/ghc
  • adithyaov/ghc
  • trac-gelisam/ghc
  • Lysxia/ghc
  • complyue/ghc
  • bwignall/ghc
  • sternmull/ghc
  • sonika/ghc
  • leif/ghc
  • broadwaylamb/ghc
  • myszon/ghc
  • danbroooks/ghc
  • Mechachleopteryx/ghc
  • zardyh/ghc
  • trac-vdukhovni/ghc
  • OmarKhaledAbdo/ghc
  • arrowd/ghc
  • Bodigrim/ghc
  • matheus23/ghc
  • cardenaso11/ghc
  • trac-Athas/ghc
  • mb720/ghc
  • DylanZA/ghc
  • liff/ghc
  • typedrat/ghc
  • trac-claude/ghc
  • jbm/ghc
  • Gertjan423/ghc
  • PHO/ghc
  • JKTKops/ghc
  • kockahonza/ghc
  • msakai/ghc
  • Sir4ur0n/ghc
  • barambani/ghc
  • vishnu.c/ghc
  • dcoutts/ghc
  • trac-runeks/ghc
  • trac-MaxGabriel/ghc
  • lexi.lambda/ghc
  • strake/ghc
  • spavikevik/ghc
  • JakobBruenker/ghc
  • rmanne/ghc
  • gdziadkiewicz/ghc
  • ani/ghc
  • iliastsi/ghc
  • smunix/ghc
  • judah/ghc
  • blackgnezdo/ghc
  • emilypi/ghc
  • trac-bpfoley/ghc
  • muesli4/ghc
  • trac-gkaracha/ghc
  • Kleidukos/ghc
  • nek0/ghc
  • TristanCacqueray/ghc
  • dwulive/ghc
  • mbakke/ghc
  • arybczak/ghc
  • Yang123321/ghc
  • maksbotan/ghc
  • QuietMisdreavus/ghc
  • trac-olshanskydr/ghc
  • emekoi/ghc
  • samuela/ghc
  • josephcsible/ghc
  • dramforever/ghc
  • lpsmith/ghc
  • DenisFrezzato/ghc
  • michivi/ghc
  • jneira/ghc
  • jeffhappily/ghc
  • Ivan-Yudin/ghc
  • nakaji-dayo/ghc
  • gdevanla/ghc
  • galen/ghc
  • fendor/ghc
  • yaitskov/ghc
  • rcythr/ghc
  • awpr/ghc
  • jeremyschlatter/ghc
  • Aver1y/ghc
  • mitchellvitez/ghc
  • merijn/ghc
  • tomjaguarpaw1/ghc
  • trac-NoidedSuper/ghc
  • erewok/ghc
  • trac-junji.hashimoto/ghc
  • adamwespiser/ghc
  • bjaress/ghc
  • jhrcek/ghc
  • leonschoorl/ghc
  • lukasz-golebiewski/ghc
  • sheaf/ghc
  • last-g/ghc
  • carassius1014/ghc
  • eschwartz/ghc
  • dwincort/ghc
  • felixwiemuth/ghc
  • TimWSpence/ghc
  • marcusmonteirodesouza/ghc
  • WJWH/ghc
  • vtols/ghc
  • theobat/ghc
  • BinderDavid/ghc
  • ckoparkar0/ghc
  • alexander-kjeldaas/ghc
  • dme2/ghc
  • philderbeast/ghc
  • aaronallen8455/ghc
  • rayshih/ghc
  • benkard/ghc
  • mpardalos/ghc
  • saidelman/ghc
  • leiftw/ghc
  • ca333/ghc
  • bwroga/ghc
  • nmichael44/ghc
  • trac-crobbins/ghc
  • felixonmars/ghc
  • adityagupta1089/ghc
  • hgsipiere/ghc
  • treeowl/ghc
  • alexpeits/ghc
  • CraigFe/ghc
  • dnlkrgr/ghc
  • kerckhove_ts/ghc
  • cptwunderlich/ghc
  • eiais/ghc
  • hahohihu/ghc
  • sanchayan/ghc
  • lemmih/ghc
  • sehqlr/ghc
  • trac-dbeacham/ghc
  • luite/ghc
  • trac-f-a/ghc
  • vados/ghc
  • luntain/ghc
  • fatho/ghc
  • alexbiehl-gc/ghc
  • dcbdan/ghc
  • tvh/ghc
  • liam-ly/ghc
  • timbobbarnes/ghc
  • GovanifY/ghc
  • shanth2600/ghc
  • gliboc/ghc
  • duog/ghc
  • moxonsghost/ghc
  • zander/ghc
  • masaeedu/ghc
  • georgefst/ghc
  • guibou/ghc
  • nicuveo/ghc
  • mdebruijne/ghc
  • stjordanis/ghc
  • emiflake/ghc
  • wygulmage/ghc
  • frasertweedale/ghc
  • coot/ghc
  • aratamizuki/ghc
  • tsandstr/ghc
  • mrBliss/ghc
  • Anton-Latukha/ghc
  • tadfisher/ghc
  • vapourismo/ghc
  • Sorokin-Anton/ghc
  • basile-henry/ghc
  • trac-mightybyte/ghc
  • AbsoluteNikola/ghc
  • cobrien99/ghc
  • songzh/ghc
  • blamario/ghc
  • aj4ayushjain/ghc
  • trac-utdemir/ghc
  • tangcl/ghc
  • hdgarrood/ghc
  • maerwald/ghc
  • arjun/ghc
  • ratherforky/ghc
  • haskieLambda/ghc
  • EmilGedda/ghc
  • Bogicevic/ghc
  • eddiejessup/ghc
  • kozross/ghc
  • AlistairB/ghc
  • 3Rafal/ghc
  • christiaanb/ghc
  • trac-bit/ghc
  • matsumonkie/ghc
  • trac-parsonsmatt/ghc
  • chisui/ghc
  • jaro/ghc
  • trac-kmiyazato/ghc
  • davidsd/ghc
  • Tritlo/ghc
  • I-B-3/ghc
  • lykahb/ghc
  • AriFordsham/ghc
  • turion1/ghc
  • berberman/ghc
  • christiantakle/ghc
  • zyklotomic/ghc
  • trac-ocramz/ghc
  • CSEdd/ghc
  • doyougnu/ghc
  • mmhat/ghc
  • why-not-try-calmer/ghc
  • plutotulp/ghc
  • kjekac/ghc
  • Manvi07/ghc
  • teo/ghc
  • cactus/ghc
  • CarrieMY/ghc
  • abel/ghc
  • yihming/ghc
  • tsakki/ghc
  • jessicah/ghc
  • oliverbunting/ghc
  • meld/ghc
  • friedbrice/ghc
  • Joald/ghc
  • abarbu/ghc
  • DigitalBrains1/ghc
  • sterni/ghc
  • alexDarcy/ghc
  • hexchain/ghc
  • minimario/ghc
  • zliu41/ghc
  • tommd/ghc
  • jazcarate/ghc
  • peterbecich/ghc
  • alirezaghey/ghc
  • solomon/ghc
  • mikael.urankar/ghc
  • davjam/ghc
  • int-index/ghc
  • MorrowM/ghc
  • nrnrnr/ghc
  • Sonfamm/ghc-test-only
  • afzt1/ghc
  • nguyenhaibinh-tpc/ghc
  • trac-lierdakil/ghc
  • MichaWiedenmann1/ghc
  • jmorag/ghc
  • Ziharrk/ghc
  • trac-MitchellSalad/ghc
  • juampe/ghc
  • jwaldmann/ghc
  • snowleopard/ghc
  • juhp/ghc
  • normalcoder/ghc
  • ksqsf/ghc
  • trac-jberryman/ghc
  • roberth/ghc
  • 1ntEgr8/ghc
  • epworth/ghc
  • MrAdityaAlok/ghc
  • JunmingZhao42/ghc
  • jappeace/ghc
  • trac-Gabriel439/ghc
  • alt-romes/ghc
  • HugoPeters1024/ghc
  • 10ne1/ghc-fork
  • agentultra/ghc
  • Garfield1002/ghc
  • ChickenProp/ghc
  • clyring/ghc
  • MaxHearnden/ghc
  • jumper149/ghc
  • vem/ghc
  • ketzacoatl/ghc
  • Rosuavio/ghc
  • jackohughes/ghc
  • p4l1ly/ghc
  • konsumlamm/ghc
  • shlevy/ghc
  • torsten.schmits/ghc
  • andremarianiello/ghc
  • amesgen/ghc
  • googleson78/ghc
  • InfiniteVerma/ghc
  • uhbif19/ghc
  • yiyunliu/ghc
  • raehik/ghc
  • mrkun/ghc
  • telser/ghc
  • 1Jajen1/ghc
  • slotThe/ghc
  • WinstonHartnett/ghc
  • mpilgrem/ghc
  • dreamsmasher/ghc
  • schuelermine/ghc
  • trac-Viwor/ghc
  • undergroundquizscene/ghc
  • evertedsphere/ghc
  • coltenwebb/ghc
  • oberblastmeister/ghc
  • agrue/ghc
  • lf-/ghc
  • zacwood9/ghc
  • steshaw/ghc
  • high-cloud/ghc
  • SkamDart/ghc
  • PiDelport/ghc
  • maoif/ghc
  • RossPaterson/ghc
  • CharlesTaylor7/ghc
  • ribosomerocker/ghc
  • trac-ramirez7/ghc
  • daig/ghc
  • NicolasT/ghc
  • FinleyMcIlwaine/ghc
  • lawtonnichols/ghc
  • jmtd/ghc
  • ozkutuk/ghc
  • wildsebastian/ghc
  • lrzlin/ghc
  • tobias/ghc
  • fw/ghc
  • hawkinsw/ghc
  • type-dance/ghc
  • rui314/ghc
  • ocharles/ghc
  • wavewave/ghc
  • TheKK/ghc
  • nomeata/ghc
  • trac-csabahruska/ghc
  • jonathanjameswatson/ghc
  • L-as/ghc
  • Axman6/ghc
  • barracuda156/ghc
  • trac-jship/ghc
  • jake-87/ghc
  • meooow/ghc
  • rebeccat/ghc
  • hamana55/ghc
  • Enigmage/ghc
  • kokobd/ghc
  • agevelt/ghc
  • gshen42/ghc
  • chrismwendt/ghc
  • MangoIV/ghc
  • teto/ghc
  • Sookr1/ghc
  • trac-thomasjm/ghc
  • barci2/ghc-dev
  • trac-m4dc4p/ghc
  • dixonary/ghc
  • breakerzirconia/ghc
  • alexsio27444/ghc
  • glocq/ghc
  • sourabhxyz/ghc
  • ryantrinkle/ghc
  • Jade/ghc
  • scedfaliako/ghc
  • martijnbastiaan/ghc
  • trac-george.colpitts/ghc
  • ammarbinfaisal/ghc
  • mimi.vx/ghc
  • lortabac/ghc
  • trac-zyla/ghc
  • benbellick/ghc
  • aadaa-fgtaa/ghc
  • jvanbruegge/ghc
  • archbung/ghc
  • gilmi/ghc
  • mfonism/ghc
  • alex-mckenna/ghc
  • Ei30metry/ghc
  • DiegoDiverio/ghc
  • jorgecunhamendes/ghc
  • liesnikov/ghc
  • akrmn/ghc
  • trac-simplifierticks/ghc
  • jacco/ghc
  • rhendric/ghc
  • damhiya/ghc
  • ryndubei/ghc
  • DaveBarton/ghc
  • trac-Profpatsch/ghc
  • GZGavinZhao/ghc
  • ncfavier/ghc
  • jameshaydon/ghc
  • ajccosta/ghc
  • dschrempf/ghc
  • cydparser/ghc
  • LinuxUserGD/ghc
  • elodielander/ghc
  • facundominguez/ghc
  • psilospore/ghc
  • lachrimae/ghc
  • dylan-thinnes/ghc-type-errors-plugin
  • hamishmack/ghc
  • Leary/ghc
  • lzszt/ghc
  • lyokha/ghc
  • trac-glaubitz/ghc
  • Rewbert/ghc
  • andreabedini/ghc
  • Jasagredo/ghc
  • sol/ghc
  • OlegAlexander/ghc
  • trac-sthibaul/ghc
  • avdv/ghc
  • Wendaolee/ghc
  • ur4t/ghc
  • daylily/ghc
  • boltzmannrain/ghc
  • mmzk1526/ghc
  • trac-fizzixnerd/ghc
  • soulomoon/ghc
  • rwmjones/ghc
  • j14i/ghc
  • tracsis/ghc
  • gesh/ghc
  • flip101/ghc
  • eldritch-cookie/ghc
  • LemonjamesD/ghc
  • pgujjula/ghc
  • skeuchel/ghc
  • noteed/ghc
  • Torrekie/ghc
  • jlwoodwa/ghc
  • ayanamists/ghc
  • husong998/ghc
  • trac-edmundnoble/ghc
  • josephf/ghc
  • contrun/ghc
  • baulig/ghc
  • edsko/ghc
  • mzschr/ghc-issue-24732
  • ulidtko/ghc
  • Arsen/ghc
  • trac-sjoerd_visscher/ghc
  • crumbtoo/ghc
  • L0neGamer/ghc
  • DrewFenwick/ghc
  • benz0li/ghc
  • MaciejWas/ghc
  • jordanrule/ghc
  • trac-qqwy/ghc
  • LiamGoodacre/ghc
  • isomorpheme/ghc
  • trac-danidiaz/ghc
  • Kariim/ghc
  • taimoorzaeem/ghc
  • hololeap/ghc
  • ticat-fp/ghc
  • meritamen/ghc
  • criskell/ghc
  • trac-kraai/ghc
  • aergus/ghc
  • jdral/ghc
  • SamB/ghc
  • Tristian/ghc
  • ywgrit/ghc
  • KatsuPatrick/ghc
  • OsePedro/ghc
  • mpscholten/ghc
  • zaquest/ghc
  • fangyi-zhou/ghc
  • augyg/ghc
  • rkirkman/ghc
  • gulin.serge/ghc-windows-aarch64-bootstrap
  • iris/ghc
  • kwxm/ghc
  • maralorn/ghc
  • rafl/ghc
  • nikshalark/ghc
  • mrcjkb/ghc
  • blackheaven/ghc
  • laurenyim/ghc
  • bolt12/ghc
  • Xitian9/ghc
  • wenkokke/ghc
  • kephas/ghc
651 results
Show changes
Commits on Source (14130)
Showing
with 2822 additions and 1044 deletions
# Configure the environment
MSYSTEM=MINGW64
THREADS=9
SKIP_PERF_TESTS=YES
BUILD_FLAVOUR=
source /etc/profile || true # a terrible, terrible workaround for msys2 brokenness
# Don't set -e until after /etc/profile is sourced
set -ex
cd $APPVEYOR_BUILD_FOLDER
case "$1" in
"prepare")
# Prepare the tree
git config remote.origin.url git://github.com/ghc/ghc.git
git config --global url."git://github.com/ghc/packages-".insteadOf git://github.com/ghc/packages/
git submodule init
git submodule --quiet update --recursive
;;
"build")
# Build the compiler
./boot
cat <<EOF >> mk/build.mk
BuildFlavour=$BUILD_FLAVOUR
ifneq "\$(BuildFlavour)" ""
include mk/flavours/\$(BuildFlavour).mk
endif
EOF
./configure --enable-tarballs-autodownload
make -j$THREADS
;;
"test")
make binary-dist
curl https://ghc-artifacts.s3.amazonaws.com/tools/ghc-artifact-collector-x86_64-windows --output ghc-artifact-collector
./ghc-artifact-collector *.tar.xz
make test THREADS=$THREADS
;;
*)
echo "$0: unknown mode $1"
exit 1
;;
esac
# http://editorconfig.org
root = true
[*.hs]
indent_style = space
indent_size = 2
trim_trailing_whitespace = true
insert_final_newline = true
charset = utf-8
end_of_line = lf
[Makefile]
indent_style = tab
[*.c]
indent_style = space
indent_size = 2
--command sh ./hadrian/ghci.sh --command sh -c "HADRIAN_ARGS=-j exec ./hadrian/ghci-multi -j"
--reload compiler --reload compiler
--reload ghc --reload ghc
--reload includes --reload includes
--restart hadrian/ --restart hadrian/ghci-multi
5eecb20a0368b599d03930e2dbb0e91540de4cb2
# Configure git to ignore commits listed in this file with:
#
# git config blame.ignoreRevsFile .git-ignore-revs
# Module Hierarchy Renamings
af332442123878c1b61d236dce46418efcbe8750
255418da5d264fb2758bc70925adb2094f34adc3
1941ef4f050c0dfcb68229641fcbbde3a10f1072
528df8ecb4e2f9c78b1ae4ab7ff8230644e9b643
18a346a4b5a02b8c62e8eedb91b35c2d8e754b96
817f93eac4d13f680e8e3e7a25eb403b1864f82e
1b1067d14b656bbbfa7c47f156ec2700c9751549
240f5bf6f53515535be5bf3ef7632aa69ae21e3e
1500f0898e85316c7c97a2f759d83278a072ab0e
3ca52151881451ce5b3a7740d003e811b586140d
cf739945b8b28ff463dc44925348f20b3c1f22cb
da7f74797e8c322006eba385c9cbdce346dd1d43
6e2d9ee25bce06ae51d2f1cf8df4f7422106a383
d491a6795d507eabe35d8aec63c534d29f2d305b
99a9f51bf8207c79241fc0b685fadeb222a61292
eb6082358cdb5f271a8e4c74044a12f97352c52f
5119296440e6846c553c72b8a93afc5ecfa576f0
447864a94a1679b5b079e08bb7208a0005381cef
# convert CRLF into LF on checkin
# don't convert anything on checkout
* text=auto eol=lf
mk/win32-tarballs.md5sum text=auto eol=LF mk/win32-tarballs.md5sum text=auto eol=LF
testsuite/tests/parser/should_run/T25375.hs text=auto eol=crlf
...@@ -17,6 +17,8 @@ Thumbs.db ...@@ -17,6 +17,8 @@ Thumbs.db
*.hi *.hi
*.hi-boot *.hi-boot
*.hie
*.hie-boot
*.o-boot *.o-boot
*.p_o *.p_o
*.t_o *.t_o
...@@ -33,6 +35,7 @@ Thumbs.db ...@@ -33,6 +35,7 @@ Thumbs.db
__pycache__ __pycache__
.mypy_cache .mypy_cache
*.SYMDEF* *.SYMDEF*
a.out
log log
tags tags
...@@ -42,6 +45,8 @@ autom4te.cache ...@@ -42,6 +45,8 @@ autom4te.cache
config.log config.log
config.status config.status
configure configure
# GHC's own aclocal.m4 is generated by aclocal
/aclocal.m4
# Temporarily generated configure files # Temporarily generated configure files
confdefs.h confdefs.h
...@@ -50,11 +55,15 @@ confdefs.h ...@@ -50,11 +55,15 @@ confdefs.h
stage0 stage0
stage1 stage1
stage2 stage2
_build # Ignore _build, _validatebuild and any other custom build directories headed by _
_*
*/generated/ */generated/
*/ghc-stage1 */ghc-stage1
.shake.* .shake.*
.hadrian_ghci .hadrian_ghci
.hadrian_ghci_multi/
.hie-bios
hadrian/bootstrap/jq-bin
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------
# Ignore any overlapped darcs repos and back up files # Ignore any overlapped darcs repos and back up files
...@@ -73,12 +82,12 @@ _darcs/ ...@@ -73,12 +82,12 @@ _darcs/
/driver/ghc/dist/ /driver/ghc/dist/
/driver/haddock/dist/ /driver/haddock/dist/
/driver/ghci/dist/ /driver/ghci/dist/
/includes/dist-*/
/libffi/dist-install/ /libffi/dist-install/
/libraries/*/dist-boot/ /libraries/*/dist-boot/
/libraries/*/dist-install/ /libraries/*/dist-install/
/libraries/*/dist-newstyle/
/libraries/dist-haddock/ /libraries/dist-haddock/
/rts/dist/ /linters/*/dist-install/
/utils/*/dist*/ /utils/*/dist*/
/compiler/stage1/ /compiler/stage1/
/compiler/stage2/ /compiler/stage2/
...@@ -91,6 +100,7 @@ _darcs/ ...@@ -91,6 +100,7 @@ _darcs/
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------
# specific generated files # specific generated files
/.gitlab/jobs-metadata.json
/bindist-list /bindist-list
/bindist-list.uniq /bindist-list.uniq
/bindistprep/ /bindistprep/
...@@ -100,12 +110,19 @@ _darcs/ ...@@ -100,12 +110,19 @@ _darcs/
/ch01.html /ch01.html
/ch02.html /ch02.html
/compiler/dist/ /compiler/dist/
/compiler/Bytecodes.h
/compiler/ClosureTypes.h
/compiler/FunTypes.h
/compiler/MachRegs.h
/compiler/MachRegs
/compiler/GHC/CmmToLlvm/Version/Bounds.hs
/compiler/ghc.cabal /compiler/ghc.cabal
/compiler/ghc.cabal.old /compiler/ghc.cabal.old
/distrib/configure.ac /distrib/configure.ac
/distrib/ghc.iss /distrib/ghc.iss
/docs/man
/docs/index.html /docs/index.html
/docs/man
/docs/users_guide/.log
/docs/users_guide/users_guide /docs/users_guide/users_guide
/docs/users_guide/ghc.1 /docs/users_guide/ghc.1
/docs/users_guide/flags.pyc /docs/users_guide/flags.pyc
...@@ -121,16 +138,19 @@ _darcs/ ...@@ -121,16 +138,19 @@ _darcs/
/docs/users_guide/utils.pyc /docs/users_guide/utils.pyc
/driver/ghci/ghc-pkg-inplace /driver/ghci/ghc-pkg-inplace
/driver/ghci/ghci-inplace /driver/ghci/ghci-inplace
/driver/ghci/ghci-wrapper.cabal
/driver/ghci/ghci.res /driver/ghci/ghci.res
/driver/ghci/cwrapper.c
/driver/ghci/cwrapper.h
/driver/ghci/getLocation.c
/driver/ghci/getLocation.h
/driver/ghci/isMinTTY.c
/driver/ghci/isMinTTY.h
/driver/package.conf /driver/package.conf
/driver/package.conf.inplace.old /driver/package.conf.inplace.old
/settings /settings
/ghc.spec /ghc.spec
/ghc/ghc-bin.cabal /ghc/ghc-bin.cabal
/includes/dist/
/includes/ghcautoconf.h
/includes/ghcplatform.h
/includes/ghcversion.h
/index.html /index.html
/inplace/ /inplace/
/libffi/build/ /libffi/build/
...@@ -148,8 +168,12 @@ _darcs/ ...@@ -148,8 +168,12 @@ _darcs/
/libraries/ghc-boot/ghc-boot.cabal /libraries/ghc-boot/ghc-boot.cabal
/libraries/ghc-boot-th/GNUmakefile /libraries/ghc-boot-th/GNUmakefile
/libraries/ghc-boot-th/ghc-boot-th.cabal /libraries/ghc-boot-th/ghc-boot-th.cabal
/libraries/ghc-boot-th-next/ghc-boot-th-next.cabal
/libraries/ghc-boot-th/ghc.mk /libraries/ghc-boot-th/ghc.mk
/libraries/ghc-heap/ghc-heap.cabal /libraries/ghc-heap/ghc-heap.cabal
/libraries/ghc-internal/ghc-internal.cabal
/libraries/ghc-experimental/ghc-experimental.cabal
/libraries/base/base.cabal
/libraries/ghci/GNUmakefile /libraries/ghci/GNUmakefile
/libraries/ghci/ghci.cabal /libraries/ghci/ghci.cabal
/libraries/ghci/ghc.mk /libraries/ghci/ghc.mk
...@@ -167,19 +191,13 @@ _darcs/ ...@@ -167,19 +191,13 @@ _darcs/
/linter.log /linter.log
/mk/are-validating.mk /mk/are-validating.mk
/mk/build.mk /mk/build.mk
/mk/config.h
/mk/config.h.in
/mk/config.mk /mk/config.mk
/mk/config.mk.old /mk/config.mk.old
/mk/system-cxx-std-lib-1.0.conf
/mk/install.mk /mk/install.mk
/mk/project.mk /mk/project.mk
/mk/project.mk.old /mk/project.mk.old
/mk/validate.mk /mk/validate.mk
/rts/rts.cabal
/rts/package.conf.inplace
/rts/package.conf.inplace.raw
/rts/package.conf.install
/rts/package.conf.install.raw
/stage3.package.conf /stage3.package.conf
/testsuite_summary*.txt /testsuite_summary*.txt
/testsuite*.xml /testsuite*.xml
...@@ -190,11 +208,10 @@ _darcs/ ...@@ -190,11 +208,10 @@ _darcs/
/utils/mkUserGuidePart/mkUserGuidePart.cabal /utils/mkUserGuidePart/mkUserGuidePart.cabal
/utils/runghc/runghc.cabal /utils/runghc/runghc.cabal
/utils/gen-dll/gen-dll.cabal /utils/gen-dll/gen-dll.cabal
utils/lndir/fs.* /utils/ghc-pkg/ghc-pkg.cabal
utils/unlit/fs.* utils/unlit/fs.*
rts/fs.* libraries/ghc-internal/include/fs.h
libraries/base/include/fs.h libraries/ghc-internal/cbits/fs.c
libraries/base/cbits/fs.c
missing-win32-tarballs missing-win32-tarballs
/extra-gcc-opts /extra-gcc-opts
...@@ -215,11 +232,6 @@ GIT_COMMIT_ID ...@@ -215,11 +232,6 @@ GIT_COMMIT_ID
# Should be equal to testdir_suffix from testsuite/driver/testlib.py. # Should be equal to testdir_suffix from testsuite/driver/testlib.py.
*.run *.run
# -----------------------------------------------------------------------------
# Output of ghc-in-ghci
/.ghci-objects/
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------
# ghc.nix # ghc.nix
ghc.nix/ ghc.nix/
...@@ -228,8 +240,19 @@ ghc.nix/ ...@@ -228,8 +240,19 @@ ghc.nix/
.gdb_history .gdb_history
.gdbinit .gdbinit
# Tooling - direnv # -----------------------------------------------------------------------------
.envrc # Tooling
# direnv
.envrc
.direnv
# Tooling - vscode # Visual Studio Code
.vscode .vscode
# Tooling - ghcide
*.hiedb
# clangd
.clangd
dist-newstyle/
...@@ -2,38 +2,154 @@ variables: ...@@ -2,38 +2,154 @@ variables:
GIT_SSL_NO_VERIFY: "1" GIT_SSL_NO_VERIFY: "1"
# Commit of ghc/ci-images repository from which to pull Docker images # Commit of ghc/ci-images repository from which to pull Docker images
DOCKER_REV: e517150438cd9df9564fb91adc4b42e2667b2bc1 DOCKER_REV: 6e9f8f17086e56e83adae4a8a9d63e2fec3cb6c7
# Sequential version number capturing the versions of all tools fetched by # Sequential version number of all cached things.
# .gitlab/win32-init.sh. # Bump to invalidate GitLab CI cache.
WINDOWS_TOOLCHAIN_VERSION: 1 CACHE_REV: 11
# Disable shallow clones; they break our linting rules # Disable shallow clones; they break our linting rules
GIT_DEPTH: 0 GIT_DEPTH: 0
before_script: # Always start with a fresh clone to avoid non-hermetic builds
- git submodule sync --recursive GIT_STRATEGY: clone
- git submodule update --init --recursive
- git checkout .gitmodules # Overridden by individual jobs
- "git fetch https://gitlab.haskell.org/ghc/ghc-performance-notes.git refs/notes/perf:refs/notes/perf || true" CONFIGURE_ARGS: ""
# Overridden by individual jobs
CONFIGURE_WRAPPER: ""
GIT_SUBMODULE_STRATEGY: "normal"
# GitLab recommends using https:, not ssh:, to clone submodules. See #25528.
GIT_SUBMODULE_FORCE_HTTPS: 1
# Makes ci.sh isolate CABAL_DIR
HERMETIC: "YES"
# Reduce XZ compression level for regular jobs (it is bumped to 9 for releases
# and nightly jobs). In my experiments I've got the following bindist size in
# the given time for each compression level (with the quick flavour):
#
# XZ_OPT Time Size
# -9 4m06s 112 MB
# -8 4m00s 114 MB
# -7 3m50s 116 MB
# -6 (default) 3m40s 118 MB
# -5 2m47s 123 MB
# -4 1m57s 134 MB
# -3 1m03s 129 MB
# -2 49.73s 136 MB
# -1 37.72s 142 MB
# -0 34.40s 156 MB
#
XZ_OPT: "-1"
default:
interruptible: true
stages: stages:
- lint # Source linting - not-interruptible
- build # A quick smoke-test to weed out broken commits - tool-lint # Source linting of the tools
- full-build # Build all the things - quick-build # A very quick smoke-test to weed out broken commits
- cleanup # See Note [Cleanup after the shell executor] - full-build # Build all the things
- packaging # Source distribution, etc. - packaging # Source distribution, etc.
- testing # head.hackage correctness and compiler performance testing - testing # head.hackage correctness and compiler performance testing
- deploy # push documentation - deploy # push documentation
# N.B.Don't run on wip/ branches, instead on run on merge requests. # Note [The CI Story]
.only-default: &only-default # ~~~~~~~~~~~~~~~~~~~
only: #
- master # There are a few different types of pipelines. Among them:
- /ghc-[0-9]+\.[0-9]+/ #
- merge_requests # 1. marge-bot merges to `master`. Here we perform an exhaustive validation
- tags # across all of the platforms which we support. In addition, we push
- web # performance metric notes upstream, providing a persistent record of the
# performance characteristics of the compiler.
#
# 2. merge requests. Here we perform a slightly less exhaustive battery of
# testing. Namely we omit some configurations (e.g. the unregisterised job).
# These use the merge request's base commit for performance metric
# comparisons.
#
# These and other pipelines are defined implicitly by the rules of individual
# jobs.
#
# At the top level, however, we can declare that pipelines (of whatever type)
# only run when:
#
# 1. Processing a merge request (as mentioned above)
#
# 2. Processing a tag
#
# 3. Pushing to master on the root ghc/ghc repo (as mentioned above)
#
# 4. Pushing to a release branch on the root ghc/ghc repo
#
# 5. Somebody manually triggers a pipeline from the GitLab UI
#
# In particular, note that pipelines don't automatically run just when changes
# are pushed to a feature branch.
workflow:
rules:
- if: $CI_MERGE_REQUEST_ID
- if: $CI_COMMIT_TAG
# N.B.: If we weren't explicit about CI_PROJECT_ID, the following rule would
# cause a duplicate pipeline for merge requests coming from the master
# branch of a fork.
- if: $CI_PROJECT_ID == "1" && $CI_COMMIT_BRANCH == "master"
- if: $CI_PROJECT_ID == "1" && $CI_COMMIT_BRANCH =~ /ghc-[0-9]+\.[0-9]+/
- if: '$CI_PIPELINE_SOURCE == "web"'
# which versions of GHC to allow bootstrap with
.bootstrap_matrix : &bootstrap_matrix
matrix:
- GHC_VERSION: 9.8.1
DOCKER_IMAGE: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb12-ghc9_8:$DOCKER_REV"
- GHC_VERSION: 9.10.1
DOCKER_IMAGE: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb12-ghc9_10:$DOCKER_REV"
# Allow linters to fail on draft MRs.
# This must be explicitly transcluded in lint jobs which
# override `rules:`
.drafts-can-fail-lint: &drafts-can-fail-lint
if: "$CI_MERGE_REQUEST_TITLE =~ /^\\s*(Draft|wip|WIP):/"
allow_failure: true
.lint:
stage: tool-lint
tags:
- lint
rules:
- *drafts-can-fail-lint
- when: always
.nightly: &nightly
variables:
XZ_OPT: "-9"
rules:
- if: $NIGHTLY
artifacts:
when: always
expire_in: 8 weeks
.release: &release
variables:
BUILD_FLAVOUR: "release"
XZ_OPT: "-9"
IGNORE_PERF_FAILURES: "all"
HADDOCK_HYPERLINKED_SOURCES: "YES"
artifacts:
when: always
expire_in: 1 year
rules:
- if: '$RELEASE_JOB == "yes"'
.full-ci: &full-ci
- if: '$CI_MERGE_REQUEST_LABELS =~ /.*full-ci.*/'
- if: '$CI_MERGE_REQUEST_LABELS =~ /.*marge_bot_batch_merge_job.*/'
- if: '$CI_COMMIT_BRANCH == "master"'
- if: '$CI_COMMIT_BRANCH =~ /ghc-[0-9]+\.[0-9]+/'
############################################################ ############################################################
# Runner Tags # Runner Tags
...@@ -46,857 +162,436 @@ stages: ...@@ -46,857 +162,436 @@ stages:
# x86_64-linux to ensure low-latency availability. # x86_64-linux to ensure low-latency availability.
# #
####
# HACK
###
#
# Since 58cfcc65 the default for jobs has been "interruptible", this means
# that when new commits are pushed to a branch which already has a running
# pipeline then the old pipelines for this branch are cancelled.
#
# This includes the master branch, and in particular, new commits merged
# to the master branch will cancel the nightly job.
#
# The semantics of pipeline cancelling are actually a bit more complicated
# though. The interruptible flag is *per job*, but once a pipeline has run
# *any* non-interruptible job, then the whole pipeline is considered
# non-interruptible (ref
# https://gitlab.com/gitlab-org/gitlab/-/issues/32837). This leads to the
# hack in this MR where by default all jobs are `interruptible: True`, but
# for pipelines we definitely want to run, there is a dummy job which
# happens first, which is `interreuptible: False`. This has the effect of
# dirtying the whole pipeline and
#
# For now, this patch solves the immediate problem of making sure nightly
# jobs are not cancelled.
# In the future, we may want to enable this job also for the master
# branch, making that change might mean we need more CI capacity than
# currently available.
not-interruptible:
stage: not-interruptible
script: "true"
interruptible: false
image: "debian:10"
variables:
GIT_STRATEGY: none
tags:
- lint
rules:
# - if: '$CI_COMMIT_BRANCH == "master"'
# when: always
- if: $NIGHTLY
when: always
############################################################
# Validate jobs
############################################################
# These jobs are generated by running the ./.gitlab/generate_jobs script
include: '.gitlab/jobs.yaml'
############################################################ ############################################################
# Linting # tool linting
############################################################ ############################################################
ghc-linters: ghc-linters:
stage: lint stage: tool-lint
image: "registry.gitlab.haskell.org/ghc/ci-images/linters:$DOCKER_REV" image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb12:$DOCKER_REV"
extends: .lint-params
variables:
BUILD_FLAVOUR: default
script: script:
- git fetch "$CI_MERGE_REQUEST_PROJECT_URL" $CI_MERGE_REQUEST_TARGET_BRANCH_NAME - .gitlab/ci.sh configure
- base="$(git merge-base FETCH_HEAD $CI_COMMIT_SHA)" - timeout 10m .gitlab/ci.sh run_hadrian test --test-root-dirs="testsuite/tests/linters"
- "echo Linting changes between $base..$CI_COMMIT_SHA"
# - validate-commit-msg .git $(git rev-list $base..$CI_COMMIT_SHA)
- validate-whitespace .git $(git rev-list $base..$CI_COMMIT_SHA)
- .gitlab/linters/check-makefiles.py $base $CI_COMMIT_SHA
- .gitlab/linters/check-cpp.py $base $CI_COMMIT_SHA
- .gitlab/linters/check-version-number.sh
dependencies: [] dependencies: []
tags: rules:
- lint - if: $CI_MERGE_REQUEST_ID
only: - *drafts-can-fail-lint
refs:
- merge_requests
# Run mypy Python typechecker on linter scripts. # Run mypy Python typechecker on linter scripts.
lint-linters: lint-linters:
<<: *only-default
stage: lint
image: "registry.gitlab.haskell.org/ghc/ci-images/linters:$DOCKER_REV" image: "registry.gitlab.haskell.org/ghc/ci-images/linters:$DOCKER_REV"
extends: .lint
script: script:
- mypy .gitlab/linters/*.py - mypy testsuite/tests/linters/regex-linters/*.py
dependencies: [] dependencies: []
tags:
- lint
# Check that .T files all parse by listing broken tests. # Check that .T files all parse by listing broken tests.
lint-testsuite: lint-testsuite:
<<: *only-default
stage: lint
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb9:$DOCKER_REV" image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb9:$DOCKER_REV"
extends: .lint
script: script:
- make -Ctestsuite list_broken TEST_HC=ghc - make -Ctestsuite list_broken TEST_HC=$GHC
dependencies: [] dependencies: []
tags:
- lint
# Run mypy Python typechecker on testsuite driver # Run mypy Python typechecker on testsuite driver
typecheck-testsuite: typecheck-testsuite:
<<: *only-default
stage: lint
image: "registry.gitlab.haskell.org/ghc/ci-images/linters:$DOCKER_REV" image: "registry.gitlab.haskell.org/ghc/ci-images/linters:$DOCKER_REV"
extends: .lint
script: script:
- mypy testsuite/driver/runtests.py - mypy testsuite/driver/runtests.py
dependencies: [] dependencies: []
tags:
- lint
# We allow the submodule checker to fail when run on merge requests (to # We allow the submodule checker to fail when run on merge requests (to
# accomodate, e.g., haddock changes not yet upstream) but not on `master` or # accommodate, e.g., haddock changes not yet upstream) but not on `master` or
# Marge jobs. # Marge jobs.
.lint-submods: .lint-submods:
<<: *only-default image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb12:$DOCKER_REV"
stage: lint extends: .lint-params
image: "registry.gitlab.haskell.org/ghc/ci-images/linters:$DOCKER_REV" variables:
BUILD_FLAVOUR: default
script: script:
- .gitlab/ci.sh configure
- .gitlab/ci.sh run_hadrian stage0:exe:lint-submodule-refs
- git fetch "$CI_MERGE_REQUEST_PROJECT_URL" $CI_MERGE_REQUEST_TARGET_BRANCH_NAME - git fetch "$CI_MERGE_REQUEST_PROJECT_URL" $CI_MERGE_REQUEST_TARGET_BRANCH_NAME
- base="$(git merge-base FETCH_HEAD $CI_COMMIT_SHA)" - base="$(git merge-base FETCH_HEAD $CI_COMMIT_SHA)"
- "echo Linting submodule changes between $base..$CI_COMMIT_SHA" - "echo Linting submodule changes between $base..$CI_COMMIT_SHA"
- git submodule foreach git remote update - git submodule foreach git remote update
# TODO: Fix submodule linter - _build/stageBoot/bin/lint-submodule-refs . $(git rev-list $base..$CI_COMMIT_SHA)
- submodchecker .git $(git rev-list $base..$CI_COMMIT_SHA) || true
dependencies: [] dependencies: []
tags:
- lint
lint-submods-marge: # We allow the submodule checker to fail when run on merge requests (to
extends: .lint-submods # accommodate, e.g., haddock changes not yet upstream) but not on `master` or
only: # Marge jobs.
refs: lint-author:
- merge_requests image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb12:$DOCKER_REV"
variables: extends: .lint-params
- "$CI_MERGE_REQUEST_LABELS =~ /.*marge_bot_batch_merge_job.*/" variables:
BUILD_FLAVOUR: default
script:
- git fetch "$CI_MERGE_REQUEST_PROJECT_URL" $CI_MERGE_REQUEST_TARGET_BRANCH_NAME
- base="$(git merge-base FETCH_HEAD $CI_COMMIT_SHA)"
- "echo Linting authors between $base..$CI_COMMIT_SHA"
- .gitlab/ci.sh lint_author $base $CI_COMMIT_SHA
dependencies: []
rules:
- if: $CI_MERGE_REQUEST_ID
- *drafts-can-fail-lint
lint-ci-config:
image: nixos/nix:2.25.2
extends: .lint
# We don't need history/submodules in this job
variables:
GIT_DEPTH: 1
GIT_SUBMODULE_STRATEGY: none
before_script:
- echo "experimental-features = nix-command flakes" >> /etc/nix/nix.conf
# Note [Nix-in-Docker]
# ~~~~~~~~~~~~~~~~~~~~
# The nixos/nix default config is max-jobs=1 and cores=$(logical
# cores num) which doesn't play nice with our $CPUS convention. We
# fix it before invoking any nix build to avoid oversubscribing
# while allowing a reasonable degree of parallelism.
# FIXME: Disabling build-users-group=nixbld is a workaround for a Nix-in-Docker issue. See
# https://gitlab.haskell.org/ghc/head.hackage/-/issues/38#note_560487 for
# discussion.
- echo "cores = $CPUS" >> /etc/nix/nix.conf
- echo "max-jobs = $CPUS" >> /etc/nix/nix.conf
- nix run nixpkgs#gnused -- -i -e 's/ nixbld//' /etc/nix/nix.conf
script:
- nix run .gitlab/generate-ci#generate-jobs
# 1 if .gitlab/generate_jobs changed the output of the generated config
- nix shell nixpkgs#git -c git diff --exit-code
# And run this to generate the .gitlab/jobs-metadata.json
- nix run .gitlab/generate-ci#generate-job-metadata
artifacts:
when: always
paths:
- .gitlab/jobs-metadata.json
- .gitlab/jobs.yaml
dependencies: []
lint-submods-mr: lint-submods:
extends: .lint-submods extends: .lint-submods
# Allow failure since any necessary submodule patches may not be upstreamed # Allow failure on merge requests since any necessary submodule patches may
# yet. # not be upstreamed yet.
allow_failure: true rules:
only: - if: '$CI_MERGE_REQUEST_LABELS =~ /.*marge_bot_batch_merge_job.*/'
refs: allow_failure: false
- merge_requests # Don't run on nightly because the program needs a base commit to check.
except: - if: $NIGHTLY
variables: when: never
- "$CI_MERGE_REQUEST_LABELS =~ /.*marge_bot_batch_merge_job.*/" - allow_failure: true
lint-submods-branch: lint-submods-branch:
extends: .lint-submods extends: .lint-submods
variables:
BUILD_FLAVOUR: default
script: script:
- .gitlab/ci.sh configure
- .gitlab/ci.sh run_hadrian stage0:exe:lint-submodule-refs
- "echo Linting submodule changes between $CI_COMMIT_BEFORE_SHA..$CI_COMMIT_SHA" - "echo Linting submodule changes between $CI_COMMIT_BEFORE_SHA..$CI_COMMIT_SHA"
- git submodule foreach git remote update - git submodule foreach git remote update
- submodchecker .git $(git rev-list $CI_COMMIT_BEFORE_SHA..$CI_COMMIT_SHA) - _build/stageBoot/bin/lint-submodule-refs . $(git rev-list $CI_COMMIT_BEFORE_SHA..$CI_COMMIT_SHA)
only: rules:
refs: - if: '$CI_COMMIT_BRANCH == "master"'
- master - if: '$CI_COMMIT_BRANCH =~ /ghc-[0.9]+\.[0-9]+/'
- /ghc-[0-9]+\.[0-9]+/ - *drafts-can-fail-lint
.lint-changelogs:
<<: *only-default
stage: lint
image: "registry.gitlab.haskell.org/ghc/ci-images/linters:$DOCKER_REV"
dependencies: []
tags:
- lint
script:
- |
grep TBA libraries/*/changelog.md && (
echo "Error: Found \"TBA\"s in changelogs."
exit 1
)
lint-changelogs:
extends: .lint-changelogs
# Allow failure since this isn't a final release.
allow_failure: true
only:
refs:
- /ghc-[0-9]+\.[0-9]+/
lint-release-changelogs:
extends: .lint-changelogs
only:
refs:
- /ghc-[0-9]+\.[0-9]+\.[0-9]+-.*/
############################################################ ############################################################
# Validation via Pipelines (hadrian) # GHC source code linting
############################################################ ############################################################
.validate-hadrian: .lint-params:
<<: *only-default needs: []
script: image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb12:$DOCKER_REV"
- cabal update extends: .lint
- git clean -xdf && git submodule foreach git clean -xdf before_script:
- .gitlab/prepare-system.sh - export PATH="/opt/toolchain/bin:$PATH"
- if [[ -d ./cabal-cache ]]; then cp -R ./.cabal-cache ~/.cabal-cache; fi # workaround for docker permissions
- ./boot - sudo chown ghc:ghc -R .
- ./configure $CONFIGURE_ARGS - .gitlab/ci.sh setup
- hadrian/build.cabal.sh -j`mk/detect-cpu-count.sh` --docs=no-sphinx binary-dist after_script:
- hadrian/build.cabal.sh -j`mk/detect-cpu-count.sh` --docs=no-sphinx test --summary-junit=./junit.xml - .gitlab/ci.sh save_cache
- mv _build/bindist/ghc*.tar.xz ghc.tar.xz - cat ci_timings.txt
variables:
GHC_FLAGS: -Werror
cache: cache:
key: hadrian key: lint-$CACHE_REV
paths: paths:
- cabal-cache - cabal-cache
artifacts:
reports:
junit: junit.xml
expire_in: 2 week
paths:
- ghc.tar.xz
- junit.xml
validate-x86_64-linux-deb9-hadrian: # Disabled due to #22830
extends: .validate-hadrian .hlint-ghc-and-base:
stage: build extends: .lint-params
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb9:$DOCKER_REV" image: "registry.gitlab.haskell.org/ghc/ci-images/linters:$DOCKER_REV"
before_script: variables:
# workaround for docker permissions BUILD_FLAVOUR: default
- sudo chown ghc:ghc -R . script:
- git submodule sync --recursive - .gitlab/ci.sh setup
- git submodule update --init --recursive - .gitlab/ci.sh configure
- git checkout .gitmodules - .gitlab/ci.sh run_hadrian lint:ghc-internal
- "git fetch https://gitlab.haskell.org/ghc/ghc-performance-notes.git refs/notes/perf:refs/notes/perf || true" - .gitlab/ci.sh run_hadrian lint:ghc-experimental
tags: - .gitlab/ci.sh run_hadrian lint:base
- x86_64-linux - .gitlab/ci.sh run_hadrian lint:compiler
############################################################
# GHC-in-GHCi (Hadrian)
############################################################
hadrian-ghc-in-ghci: hadrian-ghc-in-ghci:
<<: *only-default stage: quick-build
stage: build needs:
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb9:$DOCKER_REV" - job: lint-linters
- job: lint-submods
optional: true
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb12:$DOCKER_REV"
before_script: before_script:
# workaround for docker permissions # workaround for docker permissions
- sudo chown ghc:ghc -R . - sudo chown ghc:ghc -R .
- git submodule sync --recursive variables:
- git submodule update --init --recursive GHC_FLAGS: -Werror
- git checkout .gitmodules
tags: tags:
- x86_64-linux - x86_64-linux
script: script:
- cabal update
- cd hadrian; cabal new-build --project-file=ci.project; cd ..
- git clean -xdf && git submodule foreach git clean -xdf - git clean -xdf && git submodule foreach git clean -xdf
- .gitlab/prepare-system.sh - . .gitlab/ci.sh setup
- if [[ -d ./cabal-cache ]]; then cp -R ./.cabal-cache ~/.cabal-cache; fi - . .gitlab/ci.sh configure
- ./boot # Enable -Werror when building hadrian
- ./configure $CONFIGURE_ARGS - "echo 'package hadrian' > hadrian/cabal.project.local"
- "echo ' ghc-options: -Werror' >> hadrian/cabal.project.local"
# Load ghc-in-ghci then immediately exit and check the modules loaded # Load ghc-in-ghci then immediately exit and check the modules loaded
- echo ":q" | hadrian/ghci.sh | tail -n2 | grep "Ok," - export CORES="$(mk/detect-cpu-count.sh)"
- echo ":q" | HADRIAN_ARGS=-j$CORES hadrian/ghci -j$CORES | tail -n2 | grep "Ok,"
after_script:
- .gitlab/ci.sh save_cache
- cat ci_timings.txt
cache: cache:
key: hadrian-ghci key: hadrian-ghci-$CACHE_REV
paths: paths:
- cabal-cache - cabal-cache
############################################################ ############################################################
# Validation via Pipelines (make) # Hadrian Multi-Repl
############################################################ ############################################################
.validate: hadrian-multi:
<<: *only-default stage: testing
variables: needs:
TEST_TYPE: test - job: x86_64-linux-fedora33-release
before_script: optional: true
- git clean -xdf && git submodule foreach git clean -xdf - job: nightly-x86_64-linux-fedora33-release
script: optional: true
- ./boot - job: release-x86_64-linux-fedora33-release
- ./configure $CONFIGURE_ARGS optional: true
- | dependencies: null
THREADS=`mk/detect-cpu-count.sh` image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-fedora33:$DOCKER_REV"
make V=0 -j$THREADS WERROR=-Werror
- |
make bindisttest
- |
make binary-dist TAR_COMP_OPTS="-1"
- |
# Prepare to push git notes.
METRICS_FILE=$(mktemp)
git config user.email "ben+ghc-ci@smart-cactus.org"
git config user.name "GHC GitLab CI"
- |
THREADS=`mk/detect-cpu-count.sh`
make $TEST_TYPE THREADS=$THREADS JUNIT_FILE=../../junit.xml METRICS_FILE=$METRICS_FILE
- |
# Push git notes.
METRICS_FILE=$METRICS_FILE .gitlab/push-test-metrics.sh
dependencies: []
artifacts:
reports:
junit: junit.xml
expire_in: 2 week
paths:
- ghc-*.tar.xz
- junit.xml
#################################
# x86_64-darwin
#################################
validate-x86_64-darwin:
extends: .validate
stage: full-build
tags:
- x86_64-darwin
variables:
GHC_VERSION: 8.6.5
CABAL_INSTALL_VERSION: 2.4.1.0
BIN_DIST_PREP_TAR_COMP: "bindistprep/ghc-x86_64-apple-darwin.tar.xz"
MACOSX_DEPLOYMENT_TARGET: "10.7"
# Only Sierra and onwards supports clock_gettime. See #12858
ac_cv_func_clock_gettime: "no"
LANG: "en_US.UTF-8"
CONFIGURE_ARGS: --with-intree-gmp
TEST_ENV: "x86_64-darwin"
before_script: before_script:
- git clean -xdf && git submodule foreach git clean -xdf # workaround for docker permissions
- git submodule sync --recursive - sudo chown ghc:ghc -R .
- git submodule update --init --recursive
- git checkout .gitmodules
- "git fetch https://gitlab.haskell.org/ghc/ghc-performance-notes.git refs/notes/perf:refs/notes/perf || true"
- bash .gitlab/darwin-init.sh
- PATH="`pwd`/toolchain/bin:$PATH"
after_script:
- cp -Rf $HOME/.cabal cabal-cache
artifacts:
when: always
expire_in: 2 week
cache:
key: "darwin-$GHC_VERSION"
paths:
- cabal-cache
- toolchain
# Disabled because of OS X CI capacity
.validate-x86_64-darwin-hadrian:
<<: *only-default
stage: full-build
tags:
- x86_64-darwin
variables: variables:
GHC_VERSION: 8.6.3 GHC_FLAGS: "-Werror=-Wno-error=incomplete-record-selectors -Wwarn=deprecations -Wwarn=unused-imports"
MACOSX_DEPLOYMENT_TARGET: "10.7" # -Wno-error=incomplete-record-selectors is present because -Wall now
ac_cv_func_clock_gettime: "no" # includes -Wincomplete-record-selectors, and hadrian-multi has many, many
LANG: "en_US.UTF-8" # warnings about incomplete record selectors. A better fix would be to
CONFIGURE_ARGS: --with-intree-gmp # remove the use of incomplete record selectors, since each of them represents
TEST_ENV: "x86_64-darwin" # a potential crash.
before_script: CONFIGURE_ARGS: --enable-bootstrap-with-devel-snapshot
- git clean -xdf && git submodule foreach git clean -xdf
- git submodule sync --recursive
- git submodule update --init --recursive
- git checkout .gitmodules
- "git fetch https://gitlab.haskell.org/ghc/ghc-performance-notes.git refs/notes/perf:refs/notes/perf || true"
- bash .gitlab/darwin-init.sh
- PATH="`pwd`/toolchain/bin:$PATH"
script:
- cabal update
- ./boot
- ./configure $CONFIGURE_ARGS
- hadrian/build.cabal.sh -j`mk/detect-cpu-count.sh` --docs=no-sphinx binary-dist
- hadrian/build.cabal.sh -j`mk/detect-cpu-count.sh` --docs=no-sphinx test --summary-junit=./junit.xml
- mv _build/bindist/ghc*.tar.xz ghc.tar.xz
after_script:
- cp -Rf $HOME/.cabal cabal-cache
artifacts:
when: always
expire_in: 2 week
reports:
junit: junit.xml
paths:
- ghc.tar.xz
- junit.xml
.validate-linux:
extends: .validate
tags: tags:
- x86_64-linux - x86_64-linux
before_script: script:
- git clean -xdf && git submodule foreach git clean -xdf - export BOOT_HC=$GHC
- git submodule sync --recursive - root=$(pwd)/ghc
- git submodule update --init --recursive - ls
- git checkout .gitmodules
- "git fetch https://gitlab.haskell.org/ghc/ghc-performance-notes.git refs/notes/perf:refs/notes/perf || true"
# Build hyperlinked sources for documentation when building releases
- | - |
if [[ -n "$CI_COMMIT_TAG" ]]; then mkdir tmp
echo "EXTRA_HADDOCK_OPTS += --hyperlinked-source --quickjump" >> mk/build.mk tar -xf ghc-x86_64-linux-fedora33-release.tar.xz -C tmp
fi pushd tmp/ghc-*/
./configure --prefix=$root
- .gitlab/prepare-system.sh make install
# workaround for docker permissions popd
- sudo chown ghc:ghc -R . rm -Rf tmp
- export HC=$root/bin/ghc
# This GHC means, use this GHC to configure with
- export GHC=$root/bin/ghc
- . .gitlab/ci.sh setup
- . .gitlab/ci.sh configure
# Now GHC means, use this GHC for hadrian
- export GHC=$BOOT_HC
- export CORES="$(mk/detect-cpu-count.sh)"
# Load hadrian-multi then immediately exit and check the modules loaded
- echo ":q" | HADRIAN_ARGS=-j$CORES hadrian/ghci-multi -j$CORES | tail -n2 | grep "Ok,"
after_script: after_script:
- cp -Rf $HOME/.cabal cabal-cache - .gitlab/ci.sh save_cache
cache: cache:
key: linux key: hadrian-ghci-$CACHE_REV
paths: paths:
- cabal-cache - cabal-cache
- toolchain rules:
- *full-ci
#################################
# aarch64-linux-deb9
#################################
.build-aarch64-linux-deb9:
extends: .validate-linux
stage: full-build
image: "registry.gitlab.haskell.org/ghc/ci-images/aarch64-linux-deb9:$DOCKER_REV"
allow_failure: true
variables:
TEST_ENV: "aarch64-linux-deb9"
BIN_DIST_PREP_TAR_COMP: "bindistprep/ghc-aarch64-linux-deb9.tar.xz"
# Inexplicably makeindex fails
BUILD_SPHINX_PDF: "NO"
cache:
key: linux-aarch64-deb9
tags:
- aarch64-linux
validate-aarch64-linux-deb9:
extends: .build-aarch64-linux-deb9
artifacts:
when: always
expire_in: 2 week
nightly-aarch64-linux-deb9:
extends: .build-aarch64-linux-deb9
artifacts:
expire_in: 2 year
variables:
TEST_TYPE: slowtest
only:
variables:
- $NIGHTLY
#################################
# i386-linux-deb9
#################################
.build-i386-linux-deb9:
extends: .validate-linux
stage: full-build
image: "registry.gitlab.haskell.org/ghc/ci-images/i386-linux-deb9:$DOCKER_REV"
variables:
TEST_ENV: "i386-linux-deb9"
BIN_DIST_PREP_TAR_COMP: "bindistprep/ghc-i386-deb9-linux.tar.xz"
cache:
key: linux-i386-deb9
validate-i386-linux-deb9:
extends: .build-i386-linux-deb9
artifacts:
when: always
expire_in: 2 week
nightly-i386-linux-deb9:
extends: .build-i386-linux-deb9
variables:
TEST_TYPE: slowtest
artifacts:
when: always
expire_in: 2 week
only:
variables:
- $NIGHTLY
#################################
# x86_64-linux-deb9
#################################
.build-x86_64-linux-deb9:
extends: .validate-linux
stage: full-build
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb9:$DOCKER_REV"
variables:
TEST_ENV: "x86_64-linux-deb9"
BIN_DIST_PREP_TAR_COMP: "bindistprep/ghc-x86_64-deb9-linux.tar.xz"
cache:
key: linux-x86_64-deb9
# Disabled to reduce CI load
.validate-x86_64-linux-deb9:
extends: .build-x86_64-linux-deb9
artifacts:
when: always
expire_in: 2 week
nightly-x86_64-linux-deb9:
extends: .build-x86_64-linux-deb9
artifacts:
expire_in: 2 year
variables:
TEST_TYPE: slowtest
only:
variables:
- $NIGHTLY
# N.B. Has DEBUG assertions enabled in stage2
validate-x86_64-linux-deb9-debug:
extends: .build-x86_64-linux-deb9
stage: build
variables:
BUILD_FLAVOUR: validate
# Ensure that stage2 also has DEBUG enabled
ValidateSpeed: SLOW
# Override validate flavour default; see #16890.
BUILD_SPHINX_PDF: "YES"
TEST_TYPE: slowtest
TEST_ENV: "x86_64-linux-deb9-debug"
artifacts:
when: always
expire_in: 2 week
# Disabled to alleviate CI load
.validate-x86_64-linux-deb9-llvm:
extends: .build-x86_64-linux-deb9
stage: full-build
variables:
BUILD_FLAVOUR: perf-llvm
TEST_ENV: "x86_64-linux-deb9-llvm"
nightly-x86_64-linux-deb9-llvm:
extends: .build-x86_64-linux-deb9
stage: full-build
variables:
BUILD_FLAVOUR: perf-llvm
TEST_ENV: "x86_64-linux-deb9-llvm"
only:
variables:
- $NIGHTLY
validate-x86_64-linux-deb9-integer-simple:
extends: .build-x86_64-linux-deb9
stage: full-build
variables:
INTEGER_LIBRARY: integer-simple
TEST_ENV: "x86_64-linux-deb9-integer-simple"
BIN_DIST_PREP_TAR_COMP: "bindistprep/ghc-x86_64-deb9-linux-integer-simple.tar.xz"
nightly-x86_64-linux-deb9-integer-simple:
extends: .build-x86_64-linux-deb9
stage: full-build
variables:
INTEGER_LIBRARY: integer-simple
TEST_ENV: "x86_64-linux-deb9-integer-simple"
TEST_TYPE: slowtest
artifacts:
expire_in: 2 year
only:
variables:
- $NIGHTLY
validate-x86_64-linux-deb9-unreg:
extends: .build-x86_64-linux-deb9
stage: full-build
variables:
CONFIGURE_ARGS: --enable-unregisterised
TEST_ENV: "x86_64-linux-deb9-unreg"
release-x86_64-linux-deb9-dwarf:
extends: .validate-linux
stage: build
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb9:$DOCKER_REV"
variables:
CONFIGURE_ARGS: "--enable-dwarf-unwind"
BUILD_FLAVOUR: dwarf
TEST_ENV: "x86_64-linux-deb9-dwarf"
artifacts:
when: always
expire_in: 2 week
cache:
key: linux-x86_64-deb9
#################################
# x86_64-linux-deb8
#################################
release-x86_64-linux-deb8:
extends: .validate-linux
stage: full-build
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb8:$DOCKER_REV"
variables:
TEST_ENV: "x86_64-linux-deb8"
BIN_DIST_PREP_TAR_COMP: "bindistprep/ghc-x86_64-deb8-linux.tar.xz"
only:
- tags
cache:
key: linux-x86_64-deb8
artifacts:
when: always
expire_in: 2 week
#################################
# x86_64-linux-alpine
#################################
.build-x86_64-linux-alpine:
extends: .validate-linux
stage: full-build
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-alpine:$DOCKER_REV"
# There are currently a few failing tests
allow_failure: true
variables:
BUILD_SPHINX_PDF: "NO"
TEST_ENV: "x86_64-linux-alpine"
BIN_DIST_PREP_TAR_COMP: "bindistprep/ghc-x86_64-alpine-linux.tar.xz"
# Can't use ld.gold due to #13958.
CONFIGURE_ARGS: "--disable-ld-override"
cache:
key: linux-x86_64-alpine
artifacts:
when: always
expire_in: 2 week
release-x86_64-linux-alpine:
extends: .build-x86_64-linux-alpine
only:
- tags
nightly-x86_64-linux-alpine:
extends: .build-x86_64-linux-alpine
only:
variables:
- $NIGHTLY
#################################
# x86_64-linux-centos7
#################################
release-x86_64-linux-centos7:
extends: .validate-linux
stage: full-build
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-centos7:$DOCKER_REV"
variables:
# The sphinx release shipped with Centos 7 fails to build out documentation
BUILD_SPHINX_HTML: "NO"
BUILD_SPHINX_PDF: "NO"
TEST_ENV: "x86_64-linux-centos7"
BIN_DIST_PREP_TAR_COMP: "bindistprep/ghc-x86_64-centos7-linux.tar.xz"
only:
- tags
cache:
key: linux-x86_64-centos7
artifacts:
when: always
expire_in: 2 week
#################################
# x86_64-linux-fedora27
#################################
validate-x86_64-linux-fedora27:
extends: .validate-linux
stage: full-build
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-fedora27:$DOCKER_REV"
variables:
TEST_ENV: "x86_64-linux-fedora27"
BIN_DIST_PREP_TAR_COMP: "bindistprep/ghc-x86_64-fedora27-linux.tar.xz"
cache:
key: linux-x86_64-fedora27
artifacts:
when: always
expire_in: 2 week
############################################################ ############################################################
# Validation via Pipelines (Windows) # stack-hadrian-build
############################################################ ############################################################
.build-windows: # Verify that Hadrian builds with stack. Note that we don't actually perform a
<<: *only-default # build of GHC itself; we merely test that the Hadrian executable builds and
before_script: # works (by invoking `hadrian --version`).
- git clean -xdf stack-hadrian-build:
- git submodule foreach git clean -xdf extends: hadrian-ghc-in-ghci
stage: quick-build
# Use a local temporary directory to ensure that concurrent builds don't
# interfere with one another
- |
mkdir tmp
set TMP=%cd%\tmp
set TEMP=%cd%\tmp
- set PATH=C:\msys64\usr\bin;%PATH%
- git submodule sync --recursive
- git submodule update --init --recursive
- git checkout .gitmodules
- "git fetch https://gitlab.haskell.org/ghc/ghc-performance-notes.git refs/notes/perf:refs/notes/perf || true"
- bash .gitlab/win32-init.sh
after_script:
- rd /s /q tmp
- robocopy /np /nfl /ndl /e "%APPDATA%\cabal" cabal-cache
- bash -c 'make clean || true'
dependencies: []
variables:
FORCE_SYMLINKS: 1
LANG: "en_US.UTF-8"
SPHINXBUILD: "/mingw64/bin/sphinx-build.exe"
cache:
paths:
- cabal-cache
- ghc-8.6.5
- ghc-tarballs
.build-windows-hadrian:
extends: .build-windows
stage: full-build
variables:
GHC_VERSION: "8.6.5"
# due to #16574 this currently fails
allow_failure: true
script: script:
- | - . .gitlab/ci.sh setup
python boot - . .gitlab/ci.sh configure
bash -c './configure --enable-tarballs-autodownload GHC=`pwd`/toolchain/bin/ghc HAPPY=`pwd`/toolchain/bin/happy ALEX=`pwd`/toolchain/bin/alex' - hadrian/build-stack --version
- bash -c "PATH=`pwd`/toolchain/bin:$PATH hadrian/build.cabal.sh -j`mk/detect-cpu-count.sh` --flavour=Quick --docs=no-sphinx binary-dist"
- mv _build/bindist/ghc*.tar.xz ghc.tar.xz
- bash -c "PATH=`pwd`/toolchain/bin:$PATH hadrian/build.cabal.sh -j`mk/detect-cpu-count.sh` --flavour=quick test --summary-junit=./junit.xml --skip-perf"
# skipping perf tests for now since we build a quick-flavoured GHC,
# which might result in some broken perf tests?
tags:
- x86_64-windows
artifacts:
reports:
junit: junit.xml
expire_in: 2 week
when: always
paths:
- ghc.tar.xz
- junit.xml
validate-x86_64-windows-hadrian: ####################################
extends: .build-windows-hadrian # Testing reinstallable ghc codepath
variables: ####################################
MSYSTEM: MINGW64
cache:
key: "x86_64-windows-hadrian-$WINDOWS_TOOLCHAIN_VERSION"
nightly-i386-windows-hadrian:
extends: .build-windows-hadrian
variables:
MSYSTEM: MINGW32
only:
variables:
- $NIGHTLY
cache:
key: "i386-windows-hadrian-$WINDOWS_TOOLCHAIN_VERSION"
.build-windows-make: test-cabal-reinstall-x86_64-linux-deb10:
extends: .build-windows extends: nightly-x86_64-linux-deb10-validate
stage: full-build stage: full-build
# due to #16084
allow_failure: true
variables: variables:
BUILD_FLAVOUR: "quick" REINSTALL_GHC: "yes"
GHC_VERSION: "8.6.5" BUILD_FLAVOUR: validate
BIN_DIST_PREP_TAR_COMP: "bindistprep/ghc-x86_64-mingw32.tar.xz" TEST_ENV: "x86_64-linux-deb10-cabal-install"
script: rules:
- | - if: $NIGHTLY
python boot - if: '$CI_MERGE_REQUEST_LABELS =~ /.*test-reinstall.*/'
bash -c './configure --enable-tarballs-autodownload GHC=`pwd`/toolchain/bin/ghc HAPPY=`pwd`/toolchain/bin/happy ALEX=`pwd`/toolchain/bin/alex $CONFIGURE_ARGS'
- bash -c "echo include mk/flavours/${BUILD_FLAVOUR}.mk > mk/build.mk"
- bash -c "echo 'GhcLibHcOpts+=-haddock' >> mk/build.mk"
- bash -c "PATH=`pwd`/toolchain/bin:$PATH make -j`mk/detect-cpu-count.sh`"
- bash -c "PATH=`pwd`/toolchain/bin:$PATH make bindisttest"
- bash -c "PATH=`pwd`/toolchain/bin:$PATH make binary-dist TAR_COMP_OPTS=-1"
- bash -c 'make V=0 test THREADS=`mk/detect-cpu-count.sh` JUNIT_FILE=../../junit.xml'
tags:
- x86_64-windows
artifacts:
when: always
expire_in: 2 week
reports:
junit: junit.xml
paths:
- ghc-*.tar.xz
- junit.xml
validate-x86_64-windows: ########################################
extends: .build-windows-make # Testing ABI is invariant across builds
variables: ########################################
MSYSTEM: MINGW64
CONFIGURE_ARGS: "--target=x86_64-unknown-mingw32"
cache:
key: "x86_64-windows-$WINDOWS_TOOLCHAIN_VERSION"
nightly-x86_64-windows: abi-test-nightly:
extends: .build-windows-make
stage: full-build stage: full-build
variables: needs:
BUILD_FLAVOUR: "validate" - job: nightly-x86_64-linux-fedora33-release-hackage
MSYSTEM: MINGW64 - job: nightly-x86_64-linux-fedora33-release
CONFIGURE_ARGS: "--target=x86_64-unknown-mingw32"
only:
variables:
- $NIGHTLY
cache:
key: "x86_64-windows-$WINDOWS_TOOLCHAIN_VERSION"
# Normal Windows validate builds are profiled; that won't do for releases.
release-x86_64-windows:
extends: validate-x86_64-windows
variables:
MSYSTEM: MINGW64
BUILD_FLAVOUR: "perf"
CONFIGURE_ARGS: "--target=x86_64-unknown-mingw32"
only:
- tags
release-i386-windows:
extends: .build-windows-make
only:
- tags
variables:
MSYSTEM: MINGW32
BUILD_FLAVOUR: "perf"
CONFIGURE_ARGS: "--target=i386-unknown-mingw32"
# Due to #15934
BUILD_PROF_LIBS: "NO"
cache:
key: "i386-windows-$WINDOWS_TOOLCHAIN_VERSION"
nightly-i386-windows:
extends: .build-windows-make
only:
variables:
- $NIGHTLY
variables:
MSYSTEM: MINGW32
CONFIGURE_ARGS: "--target=i386-unknown-mingw32"
# Due to #15934
BUILD_PROF_LIBS: "NO"
cache:
key: "i386-windows-$WINDOWS_TOOLCHAIN_VERSION"
############################################################
# Cleanup
############################################################
# Note [Cleaning up after shell executor]
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# As noted in [1], gitlab-runner's shell executor doesn't clean up its working
# directory after builds. Unfortunately, we are forced to use the shell executor
# on Darwin. To avoid running out of disk space we add a stage at the end of
# the build to remove the /.../GitLabRunner/builds directory. Since we only run a
# single build at a time on Darwin this should be safe.
#
# We used to have a similar cleanup job on Windows as well however it ended up
# being quite fragile as we have multiple Windows builders yet there is no
# guarantee that the cleanup job is run on the same machine as the build itself
# was run. Consequently we were forced to instead handle cleanup with a separate
# cleanup cron job on Windows.
#
# [1] https://gitlab.com/gitlab-org/gitlab-runner/issues/3856
# See Note [Cleanup after shell executor]
cleanup-darwin:
<<: *only-default
stage: cleanup
tags: tags:
- x86_64-darwin - x86_64-linux
when: always image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-fedora33:$DOCKER_REV"
dependencies: [] dependencies: null
before_script: before_script:
- echo "Time to clean up" - mkdir -p normal
- mkdir -p hackage
- tar -xf ghc-x86_64-linux-fedora33-release.tar.xz -C normal/
- tar -xf ghc-x86_64-linux-fedora33-release-hackage_docs.tar.xz -C hackage/
script: script:
- echo "Let's go" - .gitlab/ci.sh compare_interfaces_of "normal/ghc-*" "hackage/ghc-*"
after_script: artifacts:
- BUILD_DIR=$CI_PROJECT_DIR paths:
- echo "Cleaning $BUILD_DIR" - out
- cd $HOME rules:
- rm -Rf $BUILD_DIR/* # This job is broken. Disabling it until some kind soul can finish its
- exit 0 # implementation. #23269
- when: never
- if: $NIGHTLY
############################################################ ############################################################
# Packaging # Packaging
############################################################ ############################################################
doc-tarball: doc-tarball:
<<: *only-default
stage: packaging stage: packaging
needs:
- job: x86_64-linux-deb12-numa-slow-validate
optional: true
- job: nightly-x86_64-linux-deb12-validate
optional: true
- job: release-x86_64-linux-deb12-release
optional: true
- job: x86_64-windows-validate
optional: true
- job: nightly-x86_64-windows-validate
optional: true
- job: release-x86_64-windows-release
optional: true
tags: tags:
- x86_64-linux - x86_64-linux
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb9:$DOCKER_REV" image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb12:$DOCKER_REV"
dependencies: dependencies: null
- validate-x86_64-linux-deb9-debug
- validate-x86_64-windows
variables: variables:
LINUX_BINDIST: "ghc-x86_64-deb9-linux.tar.xz" LINUX_BINDIST: "ghc-x86_64-linux-deb12.tar.xz"
WINDOWS_BINDIST: "ghc-x86_64-mingw32.tar.xz" WINDOWS_BINDIST: "ghc-x86_64-windows.tar.xz"
# Due to Windows allow_failure
allow_failure: true
artifacts: artifacts:
expose_as: "Documentation Preview"
paths: paths:
- haddock.html.tar.xz - haddock.html.tar.xz
- docs/haddock/
- libraries.html.tar.xz - libraries.html.tar.xz
- docs/libraries/
- users_guide.html.tar.xz - users_guide.html.tar.xz
- index.html - docs/users_guide/
- "*.pdf" - docs/index.html
- Haddock.pdf
- users_guide.pdf
script: script:
- | - |
mv "ghc-x86_64-linux-deb12-numa-slow-validate.tar.xz" "$LINUX_BINDIST" \
|| mv "ghc-x86_64-linux-deb12-validate.tar.xz" "$LINUX_BINDIST" \
|| mv "ghc-x86_64-linux-deb12-release.tar.xz" "$LINUX_BINDIST" \
|| true
mv "ghc-x86_64-windows-validate.tar.xz" "$WINDOWS_BINDIST" \
|| mv "ghc-x86_64-windows-release.tar.xz" "$WINDOWS_BINDIST" \
|| true
if [ ! -f "$LINUX_BINDIST" ]; then if [ ! -f "$LINUX_BINDIST" ]; then
echo "Error: $LINUX_BINDIST does not exist. Did the Debian 9 job fail?" echo "Error: $LINUX_BINDIST does not exist. Did the Debian 9 job fail?"
exit 1 exit 1
...@@ -907,30 +602,148 @@ doc-tarball: ...@@ -907,30 +602,148 @@ doc-tarball:
fi fi
- rm -Rf docs - rm -Rf docs
- bash -ex distrib/mkDocs/mkDocs $LINUX_BINDIST $WINDOWS_BINDIST - bash -ex distrib/mkDocs/mkDocs $LINUX_BINDIST $WINDOWS_BINDIST
- mv docs/*.tar.xz docs/*.pdf .
- ls -lh - ls -lh
- mv docs/*.tar.xz docs/index.html .
source-tarball: hackage-doc-tarball:
stage: packaging stage: packaging
needs:
- job: nightly-x86_64-linux-fedora33-release-hackage
optional: true
- job: release-x86_64-linux-fedora33-release-hackage
optional: true
- job: source-tarball
tags: tags:
- x86_64-linux - x86_64-linux
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb9:$DOCKER_REV" image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb12:$DOCKER_REV"
dependencies: null
variables:
# Don't clone the git repo..
GIT_STRATEGY: none
# Don't attempt to boot a source tarball
NO_BOOT: "1"
artifacts:
paths:
- hackage_docs
before_script:
- tar -xf ghc-*[0-9]-src.tar.xz
- tar -xf ghc-x86_64-linux-fedora33-release.tar.xz -C ghc*/
script:
- cd ghc*/
- mv .gitlab/rel_eng/upload_ghc_libs.py .
- . .gitlab/ci.sh setup
- . .gitlab/ci.sh configure
- ./upload_ghc_libs.py prepare --bindist ghc*linux/
- mv .upload-libs/docs ../hackage_docs
rules:
- if: $NIGHTLY
- if: '$RELEASE_JOB == "yes"'
source-tarball:
stage: full-build
needs:
- hadrian-ghc-in-ghci
tags:
- x86_64-linux
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb12:$DOCKER_REV"
dependencies: [] dependencies: []
only:
- tags
artifacts: artifacts:
paths: paths:
- ghc-*.tar.xz - ghc-*.tar.xz
- version
script: script:
- mk/get-win32-tarballs.sh download all - sudo chown ghc:ghc -R .
- ./boot - . .gitlab/ci.sh setup
- ./configure - . .gitlab/ci.sh configure
- make sdist - ./hadrian/build source-dist
- mv sdistprep/*.xz . - mv _build/source-dist/*.xz .
- make show! --quiet VALUE=ProjectVersion > version rules:
- source version - if: $NIGHTLY
- echo "$ProjectVersion" > version - if: '$RELEASE_JOB == "yes"'
- if: '$CI_MERGE_REQUEST_LABELS =~ /.*test-bootstrap.*/'
- *full-ci
generate-hadrian-bootstrap-sources:
stage: full-build
needs:
- hadrian-ghc-in-ghci
tags:
- x86_64-linux
image: "$DOCKER_IMAGE"
dependencies: []
parallel: *bootstrap_matrix
artifacts:
paths:
- hadrian-bootstrap-sources-*.tar.gz
script:
- bash -c "[ $($GHC --numeric-version) = $GHC_VERSION ] || { echo $GHC_VERSION is not the same as the version of $GHC && exit 1; }"
- python3 ./hadrian/bootstrap/bootstrap.py -w $GHC fetch -o hadrian-bootstrap-sources-$GHC_VERSION
rules:
- if: $NIGHTLY
- if: '$RELEASE_JOB == "yes"'
- if: '$CI_MERGE_REQUEST_LABELS =~ /.*test-bootstrap.*/'
- *full-ci
package-hadrian-bootstrap-sources:
stage: full-build
tags:
- x86_64-linux
needs: ["generate-hadrian-bootstrap-sources"]
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb12:$DOCKER_REV"
artifacts:
paths:
- hadrian-bootstrap-sources-all.tar.gz
script:
- tar -czvf hadrian-bootstrap-sources-all.tar.gz hadrian-bootstrap-sources-*.tar.gz
rules:
- if: $NIGHTLY
- if: '$RELEASE_JOB == "yes"'
- if: '$CI_MERGE_REQUEST_LABELS =~ /.*test-bootstrap.*/'
- *full-ci
test-bootstrap:
stage: full-build
needs: [generate-hadrian-bootstrap-sources, source-tarball]
tags:
- x86_64-linux
image: "$DOCKER_IMAGE"
parallel: *bootstrap_matrix
dependencies: null
script:
- sudo chown ghc:ghc -R .
- mkdir test-bootstrap
- tar -xf ghc-*[0-9]-src.tar.xz -C test-bootstrap
- tar -xf ghc-*-testsuite.tar.xz -C test-bootstrap
- cp hadrian-bootstrap-sources-$GHC_VERSION.tar.gz test-bootstrap/ghc-*
- pushd test-bootstrap/ghc-*
- python3 ./hadrian/bootstrap/bootstrap.py -w $GHC --bootstrap-sources hadrian-bootstrap-sources-$GHC_VERSION.tar.gz
- export HADRIAN_PATH="$PWD/_build/bin/hadrian"
- .gitlab/ci.sh setup
# Bootstrapping should not depend on HAPPY or ALEX so set them to false
# so the build fails if they are invoked.
- unset HAPPY; unset ALEX
# Check the commands are not available, parens are crucial to start a subshell
- (! command -v alex --version)
- (! command -v happy --version)
- .gitlab/ci.sh configure
- .gitlab/ci.sh build_hadrian
- .gitlab/ci.sh test_hadrian
- popd
- rm -Rf test-bootstrap
variables:
# Don't record performance benchmarks
TEST_ENV: ""
BIN_DIST_NAME: "ghc-x86_64-deb12-linux"
BUILD_FLAVOUR: "validate"
NO_BOOT: "1"
rules:
- if: $NIGHTLY
- if: '$CI_MERGE_REQUEST_LABELS =~ /.*test-bootstrap.*/'
- *full-ci
- if: '$RELEASE_JOB == "yes"'
when: always
variables:
BUILD_FLAVOUR: "release"
############################################################ ############################################################
...@@ -940,73 +753,355 @@ source-tarball: ...@@ -940,73 +753,355 @@ source-tarball:
# Triggering jobs in the ghc/head.hackage project requires that we have a job # Triggering jobs in the ghc/head.hackage project requires that we have a job
# token for that repository. Furthermore the head.hackage CI job must have # token for that repository. Furthermore the head.hackage CI job must have
# access to an unprivileged access token with the ability to query the ghc/ghc # access to an unprivileged access token with the ability to query the ghc/ghc
# project such that it can find the job ID of the fedora27 job for the current # project such that it can find the job ID of the fedora33 job for the current
# pipeline. # pipeline.
#
# hackage-lint: Can be triggered on any MR, normal validate pipeline or nightly build.
# Runs head.hackage with -dlint and a slow-validate bindist
#
# hackage-label-lint: Trigged on MRs with "user-facing" label, runs the slow-validate
# head.hackage build with -dlint.
#
# nightly-hackage-lint: Runs automatically on nightly pipelines with slow-validate + dlint config.
#
# nightly-hackage-perf: Runs automaticaly on nightly pipelines with release build and eventlogging enabled.
#
# release-hackage-lint: Runs automatically on release pipelines with -dlint on a release bindist.
.hackage: .hackage:
<<: *only-default
stage: testing stage: testing
image: ghcci/x86_64-linux-deb9:0.2
tags:
- x86_64-linux
dependencies: []
variables: variables:
HEAD_HACKAGE_PROJECT_ID: "78" UPSTREAM_PROJECT_PATH: "$CI_PROJECT_PATH"
script: UPSTREAM_PROJECT_ID: "$CI_PROJECT_ID"
- bash .gitlab/start-head.hackage.sh UPSTREAM_PIPELINE_ID: "$CI_PIPELINE_ID"
RELEASE_JOB: "$RELEASE_JOB"
hackage: trigger:
project: "ghc/head.hackage"
branch: "upstream-testing"
strategy: "depend"
hackage-lint:
needs:
- job: x86_64-linux-deb12-numa-slow-validate
optional: true
artifacts: false
- job: nightly-x86_64-linux-deb12-numa-slow-validate
optional: true
artifacts: false
- job: nightly-aarch64-linux-deb12-validate
optional: true
artifacts: false
- job: aarch64-linux-deb12-validate
optional: true
artifacts: false
extends: .hackage extends: .hackage
variables:
SLOW_VALIDATE: 1
EXTRA_HC_OPTS: "-dlint"
# No for release jobs because there isn't a slow-valdate bindist. There is an
# automatic pipeline for release bindists (see release-hackage-lint)
rules:
- if: '$RELEASE_JOB != "yes"'
when: manual when: manual
hackage-label: hackage-label-lint:
needs:
- job: x86_64-linux-deb12-numa-slow-validate
optional: true
artifacts: false
- job: aarch64-linux-deb12-validate
optional: true
artifacts: false
extends: .hackage extends: .hackage
only: variables:
variables: SLOW_VALIDATE: 1
- $CI_MERGE_REQUEST_LABELS =~ /.*user-facing.*/ EXTRA_HC_OPTS: "-dlint"
rules:
nightly-hackage: - if: '$CI_MERGE_REQUEST_LABELS =~ /.*user-facing.*/'
# The head.hackage job is split into two jobs because enabling `-dlint`
# affects the total allocation numbers for the simplifier portion significantly.
nightly-hackage-lint:
needs:
- job: nightly-x86_64-linux-deb12-numa-slow-validate
optional: true
artifacts: false
- job: nightly-aarch64-linux-deb12-validate
optional: true
artifacts: false
rules:
- if: $NIGHTLY
variables:
NIGHTLY: "$NIGHTLY"
extends: .hackage extends: .hackage
only: variables:
variables: SLOW_VALIDATE: 1
- $NIGHTLY EXTRA_HC_OPTS: "-dlint"
nightly-hackage-perf:
needs:
- job: nightly-x86_64-linux-fedora33-release
optional: true
artifacts: false
- job: nightly-aarch64-linux-deb12-validate
optional: true
artifacts: false
rules:
- if: $NIGHTLY
variables:
NIGHTLY: "$NIGHTLY"
extends: .hackage
variables:
# Generate logs for nightly builds which include timing information.
EXTRA_HC_OPTS: "-ddump-timings"
# Ask head.hackage to generate eventlogs
EVENTLOGGING: 1
release-hackage-lint:
needs:
- job: release-x86_64-linux-fedora33-release
optional: true
artifacts: false
- job: release-aarch64-linux-deb12-release+no_split_sections
optional: true
artifacts: false
rules:
- if: '$RELEASE_JOB == "yes"'
extends: .hackage
# The ghcup metadata pipeline requires all prior jobs to
# pass. The hackage job can easily fail due to API changes
# or similar - so we allow it to fail.
allow_failure: true
variables:
# No slow-validate bindist on release pipeline
EXTRA_HC_OPTS: "-dlint"
############################################################
# Testing via test-primops
############################################################
# Triggering jobs in the ghc/test-primops project
.test-primops:
stage: testing
variables:
UPSTREAM_PROJECT_PATH: "$CI_PROJECT_PATH"
UPSTREAM_PROJECT_ID: "$CI_PROJECT_ID"
UPSTREAM_PIPELINE_ID: "$CI_PIPELINE_ID"
trigger:
project: "ghc/test-primops"
branch: "upstream-testing"
strategy: "depend"
.test-primops-validate-template:
needs:
- job: x86_64-linux-deb12-validate
artifacts: false
- job: aarch64-linux-deb12-validate
artifacts: false
- job: aarch64-darwin-validate
artifacts: false
- job: x86_64-darwin-validate
artifacts: false
extends: .test-primops
test-primops-label:
extends: .test-primops-validate-template
rules:
- if: '$CI_MERGE_REQUEST_LABELS =~ /.*test-primops.*/'
# We do not use *.full-ci here since that would imply running in nightly
# where we do not have the normal validate jobs. We have the -nightly job
# below to handle this case.
- if: '$CI_MERGE_REQUEST_LABELS =~ /.*full-ci.*/'
test-primops-nightly:
extends: .test-primops
needs:
- job: nightly-x86_64-linux-deb12-validate
artifacts: false
- job: nightly-aarch64-linux-deb12-validate
artifacts: false
- job: nightly-aarch64-darwin-validate
artifacts: false
- job: nightly-x86_64-darwin-validate
artifacts: false
rules:
- if: $NIGHTLY
test-primops-release:
extends: .test-primops
rules:
- if: '$RELEASE_JOB == "yes"'
############################################################ ############################################################
# Nofib testing # Nofib testing
# (Disabled: See #21859)
############################################################ ############################################################
perf-nofib: perf-nofib:
# Dependencies used by perf-nofib can't be built when some compiler changes
# aren't (yet) supported by head.hackage.
# Hence we allow this job to fail.
allow_failure: true
stage: testing stage: testing
dependencies: needs:
- release-x86_64-linux-deb9-dwarf - job: x86_64-linux-fedora33-release
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb9:$DOCKER_REV" optional: true
only: - job: nightly-x86_64-linux-fedora33-release
refs: optional: true
- merge_requests - job: release-x86_64-linux-fedora33-release
- master optional: true
- /ghc-[0-9]+\.[0-9]+/ image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-fedora33:$DOCKER_REV"
rules:
- when: never
- *full-ci
tags: tags:
- x86_64-linux - x86_64-linux
before_script:
- cd nofib
- "cabal update --index=$HACKAGE_INDEX_STATE --project-file=cabal.project.head-hackage"
script: script:
- root=$(pwd)/ghc - root=$(pwd)/ghc
- | - |
mkdir tmp mkdir tmp
tar -xf ghc-*-x86_64-unknown-linux.tar.xz -C tmp tar -xf ../ghc-x86_64-linux-fedora33-release.tar.xz -C tmp
pushd tmp/ghc-*/ pushd tmp/ghc-*/
./configure --prefix=$root ./configure --prefix=$root
make install make install
popd popd
rm -Rf tmp rm -Rf tmp
- export BOOT_HC=$(which ghc)
- cabal update; cabal install -w $BOOT_HC regex-compat
- export PATH=$root/bin:$PATH - export PATH=$root/bin:$PATH
- make -C nofib boot mode=fast -j$CPUS - cabal install -w "$root/bin/ghc" --lib regex-compat unboxed-ref parallel random-1.2.1 --allow-newer --package-env local.env --project-file=cabal.project.head-hackage
- "make -C nofib EXTRA_RUNTEST_OPTS='-cachegrind +RTS -V0 -RTS' NoFibRuns=1 mode=fast -j$CPUS 2>&1 | tee nofib.log" - export GHC_ENVIRONMENT="$(pwd)/local.env"
- "make HC=$root/bin/ghc BOOT_HC=$root/bin/ghc boot mode=fast -j$CPUS"
- "make HC=$root/bin/ghc BOOT_HC=$root/bin/ghc EXTRA_RUNTEST_OPTS='-cachegrind +RTS -V0 -RTS' NoFibRuns=1 mode=fast -j$CPUS 2>&1 | tee nofib.log"
artifacts: artifacts:
expire_in: 12 week expire_in: 12 week
when: always when: always
paths: paths:
- nofib.log - nofib/nofib.log
############################################################
# Ad-hoc performance testing
############################################################
perf:
stage: testing
needs:
- job: x86_64-linux-fedora33-release
optional: true
- job: nightly-x86_64-linux-fedora33-release
optional: true
- job: release-x86_64-linux-fedora33-release
optional: true
dependencies: null
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-fedora33:$DOCKER_REV"
tags:
- x86_64-linux-perf
script:
- root=$(pwd)/ghc
- |
mkdir tmp
tar -xf ghc-x86_64-linux-fedora33-release.tar.xz -C tmp
pushd tmp/ghc-*/
./configure --prefix=$root
make install
popd
rm -Rf tmp
- export BOOT_HC=$(which ghc)
- export HC=$root/bin/ghc
- .gitlab/ci.sh perf_test
artifacts:
expire_in: 2 year
when: always
paths:
- out
rules:
- *full-ci
############################################################
# ABI testing
############################################################
abi-test:
stage: testing
needs:
- job: x86_64-linux-fedora33-release
optional: true
- job: nightly-x86_64-linux-fedora33-release
optional: true
- job: release-x86_64-linux-fedora33-release
optional: true
dependencies: null
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-fedora33:$DOCKER_REV"
rules:
- if: $CI_MERGE_REQUEST_ID
- if: '$CI_COMMIT_BRANCH == "master"'
- if: '$CI_COMMIT_BRANCH =~ /ghc-[0.9]+\.[0-9]+/'
tags:
- x86_64-linux
script:
- root=$(pwd)/ghc
- |
mkdir tmp
tar -xf ghc-x86_64-linux-fedora33-release.tar.xz -C tmp
pushd tmp/ghc-*/
./configure --prefix=$root
make install
popd
rm -Rf tmp
- export BOOT_HC=$(which ghc)
- export HC=$root/bin/ghc
- .gitlab/ci.sh abi_test
artifacts:
paths:
- out
rules:
- *full-ci
- if: '$CI_MERGE_REQUEST_LABELS =~ /.*test-abi.*/'
############################################################
# ghc-wasm-meta integration testing
############################################################
.ghc-wasm-meta:
stage: testing
variables:
UPSTREAM_GHC_PIPELINE_ID: $CI_PIPELINE_ID
UPSTREAM_GHC_PROJECT_ID: $CI_PROJECT_ID
rules:
- if: '$CI_MERGE_REQUEST_LABELS =~ /.*test-wasm.*/'
trigger:
project: haskell-wasm/ghc-wasm-meta
branch: master
strategy: depend
ghc-wasm-meta-gmp:
extends: .ghc-wasm-meta
needs:
- job: x86_64-linux-alpine3_20-wasm-cross_wasm32-wasi-release+host_fully_static+text_simdutf
artifacts: false
variables:
UPSTREAM_GHC_FLAVOUR: gmp
UPSTREAM_GHC_JOB_NAME: x86_64-linux-alpine3_20-wasm-cross_wasm32-wasi-release+host_fully_static+text_simdutf
ghc-wasm-meta-native:
extends: .ghc-wasm-meta
needs:
- job: x86_64-linux-alpine3_20-wasm-int_native-cross_wasm32-wasi-release+host_fully_static+text_simdutf
artifacts: false
variables:
UPSTREAM_GHC_FLAVOUR: native
UPSTREAM_GHC_JOB_NAME: x86_64-linux-alpine3_20-wasm-int_native-cross_wasm32-wasi-release+host_fully_static+text_simdutf
ghc-wasm-meta-unreg:
extends: .ghc-wasm-meta
needs:
- job: x86_64-linux-alpine3_20-wasm-unreg-cross_wasm32-wasi-release+host_fully_static+text_simdutf
artifacts: false
variables:
UPSTREAM_GHC_FLAVOUR: unreg
UPSTREAM_GHC_JOB_NAME: x86_64-linux-alpine3_20-wasm-unreg-cross_wasm32-wasi-release+host_fully_static+text_simdutf
############################################################ ############################################################
# Documentation deployment via GitLab Pages # Documentation deployment via GitLab Pages
...@@ -1014,16 +1109,17 @@ perf-nofib: ...@@ -1014,16 +1109,17 @@ perf-nofib:
pages: pages:
stage: deploy stage: deploy
dependencies: needs: [doc-tarball]
- doc-tarball dependencies: null
image: ghcci/x86_64-linux-deb9:0.2 image: ghcci/x86_64-linux-deb9:0.2
# Due to Windows allow_failure # See #18973
allow_failure: true allow_failure: true
tags: tags:
- x86_64-linux - x86_64-linux
script: script:
- mkdir -p public/doc - mkdir -p public/doc
- tar -xf haddock.html.tar.xz -C public/doc # haddock docs are not in the hadrian produce doc tarballs at the moment
# - tar -xf haddock.html.tar.xz -C public/doc
- tar -xf libraries.html.tar.xz -C public/doc - tar -xf libraries.html.tar.xz -C public/doc
- tar -xf users_guide.html.tar.xz -C public/doc - tar -xf users_guide.html.tar.xz -C public/doc
- | - |
...@@ -1032,9 +1128,201 @@ pages: ...@@ -1032,9 +1128,201 @@ pages:
<meta charset="UTF-8"> <meta charset="UTF-8">
<meta http-equiv="refresh" content="1; url=doc/"> <meta http-equiv="refresh" content="1; url=doc/">
EOF EOF
- cp -f index.html public/doc - cp -f docs/index.html public/doc
only: rules:
- master # N.B. only run this on ghc/ghc since the deployed pages are quite large
# and we only serve GitLab Pages for ghc/ghc.
- if: '$CI_COMMIT_BRANCH == "master" && $CI_PROJECT_NAMESPACE == "ghc"'
- if: '$CI_MERGE_REQUEST_LABELS =~ /.*publish-docs.*/'
artifacts: artifacts:
paths: paths:
- public - public
#############################################################
# Generation of GHCUp metadata
#############################################################
project-version:
stage: packaging
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb12:$DOCKER_REV"
tags:
- x86_64-linux
variables:
BUILD_FLAVOUR: default
script:
# Calculate the project version
- sudo chown ghc:ghc -R .
- .gitlab/ci.sh setup
- .gitlab/ci.sh configure
- echo "ProjectVersion=$(cat VERSION)" > version.sh
needs: []
dependencies: []
artifacts:
paths:
- version.sh
.ghcup-metadata:
stage: deploy
image: nixos/nix:2.25.2
dependencies: null
tags:
- x86_64-linux
variables:
BUILD_FLAVOUR: default
GIT_SUBMODULE_STRATEGY: "none"
before_script:
- echo "experimental-features = nix-command flakes" >> /etc/nix/nix.conf
# FIXME: See Note [Nix-in-Docker]
- echo "cores = $CPUS" >> /etc/nix/nix.conf
- echo "max-jobs = $CPUS" >> /etc/nix/nix.conf
- nix run nixpkgs#gnused -- -i -e 's/ nixbld//' /etc/nix/nix.conf
- nix-channel --update
- cat version.sh
# Calculate the project version
- . ./version.sh
# Download existing ghcup metadata for the correct year
- PipelineYear="$(date -d $CI_PIPELINE_CREATED_AT +%Y)"
- nix shell nixpkgs#wget -c wget "https://ghc.gitlab.haskell.org/ghcup-metadata/ghcup-nightlies-$PipelineYear-0.0.7.yaml" -O ghcup-0.0.7.yaml
- nix run .gitlab/generate-ci#generate-job-metadata
artifacts:
paths:
- metadata_test.yaml
- version.sh
ghcup-metadata-nightly:
extends: .ghcup-metadata
# Explicit needs for validate pipeline because we only need certain bindists
needs:
- job: nightly-x86_64-linux-fedora33-release
artifacts: false
- job: nightly-x86_64-linux-ubuntu24_04-validate
artifacts: false
- job: nightly-x86_64-linux-ubuntu22_04-validate
artifacts: false
- job: nightly-x86_64-linux-ubuntu20_04-validate
artifacts: false
- job: nightly-x86_64-linux-ubuntu18_04-validate
artifacts: false
- job: nightly-x86_64-linux-rocky8-validate
artifacts: false
- job: nightly-x86_64-darwin-validate
artifacts: false
- job: nightly-aarch64-darwin-validate
artifacts: false
- job: nightly-x86_64-windows-validate
artifacts: false
- job: nightly-x86_64-linux-alpine3_12-validate
artifacts: false
- job: nightly-x86_64-linux-alpine3_20-validate
artifacts: false
- job: nightly-x86_64-linux-deb9-validate
artifacts: false
- job: nightly-i386-linux-deb10-validate
artifacts: false
- job: nightly-i386-linux-deb12-validate
artifacts: false
- job: nightly-x86_64-linux-deb10-validate
artifacts: false
- job: nightly-aarch64-linux-deb10-validate
artifacts: false
- job: nightly-x86_64-linux-deb11-validate
artifacts: false
- job: nightly-x86_64-linux-deb12-validate
artifacts: false
- job: nightly-aarch64-linux-deb12-validate
artifacts: false
- job: nightly-aarch64-linux-alpine3_18-validate
artifacts: false
- job: source-tarball
artifacts: false
- job: project-version
script:
- nix shell -f .gitlab/rel_eng -c ghcup-metadata --metadata ghcup-0.0.7.yaml --date="$(date -d $CI_PIPELINE_CREATED_AT +%Y-%m-%d)" --pipeline-id="$CI_PIPELINE_ID" --version="$ProjectVersion" > "metadata_test.yaml"
rules:
- if: $NIGHTLY
# Update the ghcup metadata with information about this nightly pipeline
ghcup-metadata-nightly-push:
stage: deploy
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-fedora33:$DOCKER_REV"
dependencies: null
tags:
- x86_64-linux
variables:
BUILD_FLAVOUR: default
GIT_SUBMODULE_STRATEGY: "none"
needs:
- job: ghcup-metadata-nightly
artifacts: true
script:
- git clone https://gitlab.haskell.org/ghc/ghcup-metadata.git
- PipelineYear="$(date -d $CI_PIPELINE_CREATED_AT +%Y)"
- cp metadata_test.yaml "ghcup-metadata/ghcup-nightlies-$PipelineYear-0.0.7.yaml"
- cp metadata_test.yaml "ghcup-metadata/ghcup-nightlies-0.0.7.yaml"
- cd ghcup-metadata
- git config user.email "ghc-ci@gitlab-haskell.org"
- git config user.name "GHC GitLab CI"
- git remote add gitlab_origin https://oauth2:$PROJECT_PUSH_TOKEN@gitlab.haskell.org/ghc/ghcup-metadata.git
- git add .
- git commit -m "Update metadata"
- git push gitlab_origin HEAD:updates
rules:
# Only run the update on scheduled nightly pipelines, ie once a day
- if: $NIGHTLY && $CI_PIPELINE_SOURCE == "schedule" && $CI_COMMIT_BRANCH == "master"
ghcup-metadata-release:
# No explicit needs for release pipeline as we assume we need everything and everything will pass.
extends: .ghcup-metadata
script:
- nix shell -f .gitlab/rel_eng -c ghcup-metadata --release-mode --metadata ghcup-0.0.7.yaml --date="$(date -d $CI_PIPELINE_CREATED_AT +%Y-%m-%d)" --pipeline-id="$CI_PIPELINE_ID" --version="$ProjectVersion" --fragment
- nix shell -f .gitlab/rel_eng -c ghcup-metadata --release-mode --metadata ghcup-0.0.7.yaml --date="$(date -d $CI_PIPELINE_CREATED_AT +%Y-%m-%d)" --pipeline-id="$CI_PIPELINE_ID" --version="$ProjectVersion" > "metadata_test.yaml"
rules:
- if: '$RELEASE_JOB == "yes"'
.ghcup-metadata-testing:
stage: deploy
variables:
UPSTREAM_PROJECT_PATH: "$CI_PROJECT_PATH"
UPSTREAM_PROJECT_ID: "$CI_PROJECT_ID"
UPSTREAM_PIPELINE_ID: "$CI_PIPELINE_ID"
RELEASE_JOB: "$RELEASE_JOB"
# Do not inherit global variables (such as CONFIGURE_ARGS) as these take
# precedence over the variables defined in the downstream job.
inherit:
variables: false
trigger:
project: "ghc/ghcup-ci"
branch: "upstream-testing"
strategy: "depend"
forward:
yaml_variables: true
pipeline_variables: false
ghcup-metadata-testing-nightly:
needs:
- job: ghcup-metadata-nightly
artifacts: false
extends: .ghcup-metadata-testing
variables:
NIGHTLY: "$NIGHTLY"
UPSTREAM_JOB_NAME: "ghcup-metadata-nightly"
rules:
- if: '$NIGHTLY == "1"'
ghcup-metadata-testing-release:
needs:
- job: ghcup-metadata-release
artifacts: false
extends: .ghcup-metadata-testing
variables:
UPSTREAM_JOB_NAME: "ghcup-metadata-release"
rules:
- if: '$RELEASE_JOB == "yes"'
when: manual
# Where the GitLab happens
## Updating PERF_NOTES_PUSH_CREDENTIALS
This CI variable is used by test-metrics.sh to push performance data as a git
note to https://gitlab.haskell.org/ghc/ghc-performance-notes.
The current token will expire on 2025-07-02.
### STEPS
Set and fetch the updated token:
```
GITLAB_WRITE=<Your Gitlab API token>
one_year_later="$(date --date='1 year' --iso-8601)"
curl -X POST --header "PRIVATE-TOKEN: $GITLAB_WRITE" -H "Content-Type: application/json" \
--data '{"name":"test-metrics.sh", "scopes":["write_repository"], "expires_at":"$one_year_later"}' \
https://gitlab.haskell.org/api/v4/projects/117/access_tokens \
| jq .token
```
Update the variable:
```
GITLAB_WRITE=<Your Gitlab API token>
NEW_VALUE=<Output from the above>
curl --fail-with-body --request PUT --header "PRIVATE-TOKEN: $GITLAB_WRITE" \
"https://gitlab.haskell.org/api/v4/projects/1/variables/PERF_NOTES_PUSH_CREDENTIALS" \
--form "value=$NEW_VALUE"
```
#!/usr/bin/env bash
# shellcheck disable=SC2230
# shellcheck disable=SC1090
# This is the primary driver of the GitLab CI infrastructure.
# Run `ci.sh usage` for usage information.
set -Eeuo pipefail
# Configuration:
# N.B. You may want to also update the index-state in hadrian/cabal.project.
HACKAGE_INDEX_STATE="2025-01-27T17:45:32Z"
MIN_HAPPY_VERSION="1.20"
MIN_ALEX_VERSION="3.2.6"
TOP="$(pwd)"
if [ ! -d "$TOP/.gitlab" ]; then
echo "This script expects to be run from the root of a ghc checkout"
fi
CABAL_CACHE="$TOP/${CABAL_CACHE:-cabal-cache}"
source "$TOP/.gitlab/common.sh"
function time_it() {
local name="$1"
shift
local start=$(date +%s)
local res=0
set +e
( set -e ; $@ )
res=$?
set -e
local end=$(date +%s)
local delta=$(expr $end - $start)
echo "$name took $delta seconds"
if [[ ! -e ci_timings.txt ]]; then
echo "=== TIMINGS ===" > ci_timings.txt
fi
printf "%15s | $delta\n" $name >> ci_timings.txt
return $res
}
function usage() {
cat <<EOF
$0 - GHC continuous integration driver
Common Modes:
usage Show this usage message.
setup Prepare environment for a build.
configure Run ./configure.
clean Clean the tree
shell Run an interactive shell with a configured build environment.
save_test_output Generate unexpected-test-output.tar.gz
save_cache Preserve the cabal cache
Hadrian build system
build_hadrian Build GHC via the Hadrian build system
test_hadrian Test GHC via the Hadrian build system
Environment variables affecting both build systems:
CROSS_TARGET Triple of cross-compilation target.
VERBOSE Set to non-empty for verbose build output
RUNTEST_ARGS Arguments passed to runtest.py
MSYSTEM (Windows-only) Which platform to build from (CLANG64).
IGNORE_PERF_FAILURES
Whether to ignore perf failures (one of "increases",
"decreases", or "all")
HERMETIC Take measures to avoid looking at anything in \$HOME
CONFIGURE_ARGS Arguments passed to configure script.
CONFIGURE_WRAPPER Wrapper for the configure script (e.g. Emscripten's emconfigure).
ENABLE_NUMA Whether to enable numa support for the build (disabled by default)
INSTALL_CONFIGURE_ARGS
Arguments passed to the binary distribution configure script
during installation of test toolchain.
NIX_SYSTEM On Darwin, the target platform of the desired toolchain
(either "x86-64-darwin" or "aarch-darwin")
NO_BOOT Whether to run ./boot or not, used when testing the source dist
Environment variables determining build configuration of Hadrian system:
BUILD_FLAVOUR Which flavour to build.
REINSTALL_GHC Build and test a reinstalled "stage3" ghc built using cabal-install
This tests the "reinstall" configuration
CROSS_EMULATOR The emulator to use for testing of cross-compilers.
Environment variables determining bootstrap toolchain (Linux):
GHC Path of GHC executable to use for bootstrapping.
CABAL Path of cabal-install executable to use for bootstrapping.
ALEX Path of alex executable to use for bootstrapping.
HAPPY Path of alex executable to use for bootstrapping.
Environment variables determining bootstrap toolchain (non-Linux):
GHC_VERSION Which GHC version to fetch for bootstrapping.
CABAL_INSTALL_VERSION
Cabal-install version to fetch for bootstrapping.
EOF
}
function setup_locale() {
# Musl doesn't provide locale support at all...
if ! which locale > /dev/null; then
info "No locale executable. Skipping locale setup..."
return
fi
# BSD grep terminates early with -q, consequently locale -a will get a
# SIGPIPE and the pipeline will fail with pipefail.
shopt -o -u pipefail
if locale -a | grep -q C.UTF-8; then
# Debian
export LANG=C.UTF-8
elif locale -a | grep -q C.utf8; then
# Fedora calls it this
export LANG=C.utf8
elif locale -a | grep -q en_US.UTF-8; then
# Centos doesn't have C.UTF-8
export LANG=en_US.UTF-8
elif locale -a | grep -q en_US.utf8; then
# Centos doesn't have C.UTF-8
export LANG=en_US.utf8
else
error "Failed to find usable locale"
info "Available locales:"
locale -a
fail "No usable locale, aborting..."
fi
info "Using locale $LANG..."
export LC_ALL=$LANG
shopt -o -s pipefail
}
function mingw_init() {
case "$MSYSTEM" in
CLANG64)
target_triple="x86_64-unknown-mingw32"
boot_triple="x86_64-unknown-mingw32" # triple of bootstrap GHC
;;
*)
fail "win32-init: Unknown MSYSTEM $MSYSTEM"
;;
esac
# Bring mingw toolchain into PATH.
# This is extracted from /etc/profile since this script inexplicably fails to
# run under gitlab-runner.
# shellcheck disable=SC1091
source /etc/msystem
MINGW_MOUNT_POINT="${MINGW_PREFIX}"
PATH="$MINGW_MOUNT_POINT/bin:$PATH"
# We always use mingw64 Python to avoid path length issues like #17483.
export PYTHON="/mingw64/bin/python3"
# And need to use sphinx-build from the environment
export SPHINXBUILD="/mingw64/bin/sphinx-build.exe"
}
# This will contain GHC's local native toolchain
toolchain="$TOP/toolchain"
mkdir -p "$toolchain/bin"
PATH="$toolchain/bin:$PATH"
export METRICS_FILE="$TOP/performance-metrics.tsv"
cores="$(mk/detect-cpu-count.sh)"
# Use a local temporary directory to ensure that concurrent builds don't
# interfere with one another
mkdir -p "$TOP/tmp"
export TMP="$TOP/tmp"
export TEMP="$TOP/tmp"
function show_tool() {
local tool="$1"
info "$tool = ${!tool}"
${!tool} --version
}
function set_toolchain_paths() {
case "$(uname -m)-$(uname)" in
# Linux toolchains are included in the Docker image
*-Linux) toolchain_source="env" ;;
# Darwin toolchains are provided via .gitlab/darwin/toolchain.nix
*-Darwin) toolchain_source="nix" ;;
*) toolchain_source="extracted" ;;
esac
case "$toolchain_source" in
extracted)
# These are populated by setup_toolchain
GHC="$toolchain/bin/ghc$exe"
CABAL="$toolchain/bin/cabal$exe"
HAPPY="$toolchain/bin/happy$exe"
ALEX="$toolchain/bin/alex$exe"
if [ "$(uname)" = "FreeBSD" ]; then
GHC=/usr/local/bin/ghc
fi
;;
nix)
if [[ ! -f toolchain.sh ]]; then
case "$NIX_SYSTEM" in
x86_64-darwin|aarch64-darwin) ;;
*) fail "unknown NIX_SYSTEM" ;;
esac
info "Building toolchain for $NIX_SYSTEM"
nix-build --quiet .gitlab/darwin/toolchain.nix --argstr system "$NIX_SYSTEM" -o toolchain.sh
fi
source toolchain.sh
;;
env)
# These are generally set by the Docker image but
# we provide these handy fallbacks in case the
# script isn't run from within a GHC CI docker image.
: ${GHC:=$(which ghc)}
: ${CABAL:=$(which cabal)}
: ${HAPPY:=$(which happy)}
: ${ALEX:=$(which alex)}
;;
*) fail "bad toolchain_source"
esac
export GHC
export CABAL
export HAPPY
export ALEX
if [[ "${CROSS_TARGET:-}" == *"wasm"* ]]; then
source "/home/ghc/.ghc-wasm/env"
fi
}
function cabal_update() {
# In principle -w shouldn't be necessary here but with
# cabal-install 3.8.1.0 it is, due to cabal#8447.
run "$CABAL" update -w "$GHC" "hackage.haskell.org,${HACKAGE_INDEX_STATE}"
}
# Extract GHC toolchain
function setup() {
if [ -d "$CABAL_CACHE" ]; then
info "Extracting cabal cache from $CABAL_CACHE to $CABAL_DIR..."
mkdir -p "$CABAL_DIR"
cp -Rf "$CABAL_CACHE"/* "$CABAL_DIR"
fi
case $toolchain_source in
extracted) time_it "setup" setup_toolchain ;;
*) ;;
esac
cabal_update || fail "cabal update failed"
# Make sure that git works
git config user.email "ghc-ci@gitlab-haskell.org"
git config user.name "GHC GitLab CI"
info "====================================================="
info "Toolchain versions"
info "====================================================="
show_tool GHC
show_tool CABAL
show_tool HAPPY
show_tool ALEX
info "====================================================="
info "ghc --info"
info "====================================================="
$GHC --info
}
function fetch_ghc() {
if [ ! -e "$GHC" ]; then
local v="$GHC_VERSION"
if [[ -z "$v" ]]; then
fail "neither GHC nor GHC_VERSION are not set"
fi
start_section fetch-ghc "Fetch GHC"
url="https://downloads.haskell.org/~ghc/${GHC_VERSION}/ghc-${GHC_VERSION}-${boot_triple}.tar.xz"
info "Fetching GHC binary distribution from $url..."
curl "$url" > ghc.tar.xz || fail "failed to fetch GHC binary distribution"
$TAR -xJf ghc.tar.xz || fail "failed to extract GHC binary distribution"
case "$(uname)" in
MSYS_*|MINGW*)
cp -r ghc-${GHC_VERSION}*/* "$toolchain"
;;
*)
pushd ghc-${GHC_VERSION}*
./configure --prefix="$toolchain"
"$MAKE" install
popd
;;
esac
rm -Rf "ghc-${GHC_VERSION}" ghc.tar.xz
end_section fetch-ghc
fi
}
function fetch_cabal() {
if [ ! -e "$CABAL" ]; then
local v="$CABAL_INSTALL_VERSION"
if [[ -z "$v" ]]; then
fail "neither CABAL nor CABAL_INSTALL_VERSION are not set"
fi
start_section fetch-cabal "Fetch Cabal"
case "$(uname)" in
# N.B. Windows uses zip whereas all others use .tar.xz
MSYS_*|MINGW*)
case "$MSYSTEM" in
CLANG64) cabal_arch="x86_64" ;;
*) fail "unknown MSYSTEM $MSYSTEM" ;;
esac
url="https://downloads.haskell.org/~cabal/cabal-install-$v/cabal-install-$v-$cabal_arch-windows.zip"
info "Fetching cabal binary distribution from $url..."
curl "$url" > "$TMP/cabal.zip"
unzip "$TMP/cabal.zip"
mv cabal.exe "$CABAL"
;;
*)
local base_url="https://downloads.haskell.org/~cabal/cabal-install-$v/"
case "$(uname)" in
Darwin) cabal_url="$base_url/cabal-install-$v-x86_64-apple-darwin17.7.0.tar.xz" ;;
FreeBSD) cabal_url="$base_url/cabal-install-$v-x86_64-freebsd14.tar.xz" ;;
*) fail "don't know where to fetch cabal-install for $(uname)"
esac
echo "Fetching cabal-install from $cabal_url"
curl "$cabal_url" > cabal.tar.xz
tmp="$(tar -tJf cabal.tar.xz | head -n1)"
$TAR -xJf cabal.tar.xz
# Check if the bindist has directory structure
if [[ "$tmp" = "cabal" ]]; then
mv cabal "$toolchain/bin"
else
mv "$tmp/cabal" "$toolchain/bin"
fi
;;
esac
end_section fetch-cabal
fi
}
# For non-Docker platforms we prepare the bootstrap toolchain
# here. For Docker platforms this is done in the Docker image
# build.
function setup_toolchain() {
start_section setup-toolchain "Setup toolchain"
fetch_ghc
fetch_cabal
cabal_update
local cabal_install="$CABAL v2-install \
--with-compiler=$GHC \
--index-state=$HACKAGE_INDEX_STATE \
--installdir=$toolchain/bin \
--ignore-project \
--overwrite-policy=always"
# Avoid symlinks on Windows
case "$(uname)" in
MSYS_*|MINGW*) cabal_install="$cabal_install --install-method=copy" ;;
*) ;;
esac
info "Building happy..."
$cabal_install happy --constraint="happy>=$MIN_HAPPY_VERSION"
info "Building alex..."
$cabal_install alex --constraint="alex>=$MIN_ALEX_VERSION"
end_section setup-toolchain
}
function cleanup_submodules() {
start_section clean-submodules "Clean submodules"
if [ -d .git ]; then
info "Cleaning submodules..."
# On Windows submodules can inexplicably get into funky states where git
# believes that the submodule is initialized yet its associated repository
# is not valid. Avoid failing in this case with the following insanity.
git submodule sync || git submodule deinit --force --all
git submodule update --init
git submodule foreach git clean -xdf
else
info "Not cleaning submodules, not in a git repo"
fi;
end_section clean-submodules
}
function configure() {
case "${CONFIGURE_WRAPPER:-}" in
emconfigure) source "$EMSDK/emsdk_env.sh" ;;
*) ;;
esac
if [[ -z "${NO_BOOT:-}" ]]; then
start_section "booting"
run python3 boot
end_section "booting"
fi
read -r -a args <<< "${CONFIGURE_ARGS:-}"
if [[ -n "${target_triple:-}" ]]; then
args+=("--target=$target_triple")
fi
if [[ -n "${ENABLE_NUMA:-}" ]]; then
args+=("--enable-numa")
else
args+=("--disable-numa")
fi
if [[ -n ${HAPPY:-} ]]; then
args+=("HAPPY=$HAPPY")
fi
if [[ -n ${ALEX:-} ]]; then
args+=("ALEX=$ALEX")
fi
start_section "configuring"
# See https://stackoverflow.com/questions/7577052 for a rationale for the
# args[@] symbol-soup below.
run ${CONFIGURE_WRAPPER:-} ./configure \
--enable-tarballs-autodownload \
"${args[@]+"${args[@]}"}" \
GHC="$GHC" \
|| ( cat config.log; fail "configure failed" )
end_section "configuring"
}
function fetch_perf_notes() {
info "Fetching perf notes..."
"$TOP/.gitlab/test-metrics.sh" pull
}
function push_perf_notes() {
if [[ -z "${TEST_ENV:-}" ]]; then
return
fi
# TODO: Remove this check, see #25299
# It is easy to forget to update this when testing a new cross platform
if [[ -n "${CROSS_TARGET:-}" ]] && [[ "${CROSS_TARGET:-}" != *"javascript"* ]] && [[ "${CROSS_TARGET:-}" != *"wasm"* ]]; then
info "Can't test cross-compiled build."
return
fi
info "Pushing perf notes..."
"$TOP/.gitlab/test-metrics.sh" push
}
# Figure out which commit should be used by the testsuite driver as a
# performance baseline. See Note [The CI Story].
function determine_metric_baseline() {
if [ -n "${CI_MERGE_REQUEST_DIFF_BASE_SHA:-}" ]; then
PERF_BASELINE_COMMIT="$CI_MERGE_REQUEST_DIFF_BASE_SHA"
export PERF_BASELINE_COMMIT
info "Using $PERF_BASELINE_COMMIT for performance metric baseline..."
fi
}
function check_msys2_deps() {
# Ensure that GHC on Windows doesn't have any dynamic dependencies on msys2
case "$(uname)" in
MSYS_*|MINGW*)
sysroot="$(cygpath "$SYSTEMROOT")"
PATH="$sysroot/System32:$sysroot;$sysroot/Wbem" $@ \
|| fail "'$@' failed; there may be unwanted dynamic dependencies."
;;
esac
}
# If RELEASE_JOB = yes then we skip builds with a validate flavour.
# This has the effect of
# (1) Skipping validate jobs when trying to do release builds
# (2) Ensured we don't accidentally build release builds with validate flavour.
#
# We should never try to build a validate build in a release pipeline so this is
# very defensive in case we have made a mistake somewhere.
function check_release_build() {
if [ "${RELEASE_JOB:-}" == "yes" ] && [[ "${BUILD_FLAVOUR:-}" == *"validate"* ]]
then
info "Exiting build because this is a validate build in a release job"
exit 0;
fi
}
function build_hadrian() {
start_section build-hadrian "Build via Hadrian"
if [ -z "${BIN_DIST_NAME:-}" ]; then
fail "BIN_DIST_NAME not set"
fi
if [ -n "${BIN_DIST_PREP_TAR_COMP:-}" ]; then
fail "BIN_DIST_PREP_TAR_COMP must not be set for hadrian (you mean BIN_DIST_NAME)"
fi
check_release_build
# Just to be sure, use the same hackage index state when building Hadrian.
echo "index-state: $HACKAGE_INDEX_STATE" > hadrian/cabal.project.local
# We can safely enable parallel compression for x64. By the time
# hadrian calls tar/xz to produce bindist, there's no other build
# work taking place.
if [[ "${CI_JOB_NAME:-}" != *"i386"* ]]; then
export XZ_OPT="${XZ_OPT:-} -T$cores"
fi
if [[ -n "${REINSTALL_GHC:-}" ]]; then
run_hadrian build-cabal -V
else
case "$(uname)" in
MSYS_*|MINGW*)
run_hadrian test:all_deps reloc-binary-dist -V
mv _build/reloc-bindist/ghc*.tar.xz "$BIN_DIST_NAME.tar.xz"
;;
*)
run_hadrian test:all_deps binary-dist -V
mv _build/bindist/ghc*.tar.xz "$BIN_DIST_NAME.tar.xz"
;;
esac
fi
end_section build-hadrian
}
# run's `make DESTDIR=$1 install` and then
# merges the file tree to the actual destination $2,
# ensuring that `DESTDIR` is properly honoured by the
# build system
function make_install_destdir() {
local destdir=$1
local instdir=$2
mkdir -p "$destdir"
mkdir -p "$instdir"
run "$MAKE" DESTDIR="$destdir" install || fail "make install failed"
# check for empty dir portably
# https://superuser.com/a/667100
if find "$instdir" -mindepth 1 -maxdepth 1 | read; then
fail "$instdir is not empty!"
fi
info "merging file tree from $destdir to $instdir"
cp -a "$destdir/$instdir"/* "$instdir"/
"$instdir"/bin/${cross_prefix}ghc-pkg recache
}
# install the binary distribution in directory $1 to $2.
function install_bindist() {
start_section install-bindist "Install bindist"
case "${CONFIGURE_WRAPPER:-}" in
emconfigure) source "$EMSDK/emsdk_env.sh" ;;
*) ;;
esac
local bindist="$1"
local instdir="$2"
pushd "$bindist"
case "$(uname)" in
MSYS_*|MINGW*)
mkdir -p "$instdir"
cp -a * "$instdir"
;;
*)
read -r -a args <<< "${INSTALL_CONFIGURE_ARGS:-}"
if [[ "${CROSS_TARGET:-no_cross_target}" =~ "mingw" ]]; then
# We suppose that host target = build target.
# By the fact above it is clearly turning out which host value is
# for currently built compiler.
# The fix for #21970 will probably remove this if-branch.
local -r CROSS_HOST_GUESS=$($SHELL ./config.guess)
args+=( "--target=$CROSS_TARGET" "--host=$CROSS_HOST_GUESS" )
# FIXME: The bindist configure script shouldn't need to be reminded of
# the target platform. See #21970.
elif [ -n "${CROSS_TARGET:-}" ]; then
args+=( "--target=$CROSS_TARGET" "--host=$CROSS_TARGET" )
fi
run ${CONFIGURE_WRAPPER:-} ./configure \
--prefix="$instdir" \
"${args[@]+"${args[@]}"}" || fail "bindist configure failed"
make_install_destdir "$TOP"/destdir "$instdir"
# And check the `--info` of the installed compiler, sometimes useful in CI log.
"$instdir/bin/${cross_prefix}ghc$exe" --info
;;
esac
popd
end_section install-bindist
}
function test_hadrian() {
start_section test-hadrian "Test via Hadrian"
check_msys2_deps _build/stage1/bin/ghc --version
check_release_build
# Ensure that statically-linked builds are actually static
if [[ "${BUILD_FLAVOUR}" = *static* ]]; then
bad_execs=""
for binary in _build/stage1/bin/*; do
if ldd "${binary}" &> /dev/null; then
warn "${binary} is not static!"
ldd "${binary}"
echo
bad_execs="$bad_execs $binary"
fi
done
if [ -n "$bad_execs" ]; then
fail "the following executables contain dynamic-object references: $bad_execs"
fi
fi
if [[ "${CROSS_EMULATOR:-}" == "NOT_SET" ]]; then
info "Cannot test cross-compiled build without CROSS_EMULATOR being set."
return
# special case for JS backend
elif [ -n "${CROSS_TARGET:-}" ] && [ "${CROSS_EMULATOR:-}" == "js-emulator" ]; then
# The JS backend doesn't support CROSS_EMULATOR logic yet
unset CROSS_EMULATOR
# run "hadrian test" directly, not using the bindist, even though it did get installed.
# This is a temporary solution, See !9515 for the status of hadrian support.
run_hadrian \
test \
--summary-junit=./junit.xml \
--test-have-intree-files \
--docs=none \
"runtest.opts+=${RUNTEST_ARGS:-}" \
"runtest.opts+=--unexpected-output-dir=$TOP/unexpected-test-output" \
|| fail "cross-compiled hadrian main testsuite"
elif [[ -n "${CROSS_TARGET:-}" ]] && [[ "${CROSS_TARGET:-}" == *"wasm"* ]]; then
run_hadrian \
test \
--summary-junit=./junit.xml \
"runtest.opts+=${RUNTEST_ARGS:-}" \
"runtest.opts+=--unexpected-output-dir=$TOP/unexpected-test-output" \
|| fail "hadrian main testsuite targetting $CROSS_TARGET"
elif [ -n "${CROSS_TARGET:-}" ]; then
local instdir="$TOP/_build/install"
local test_compiler="$instdir/bin/${cross_prefix}ghc$exe"
install_bindist _build/bindist/ghc-*/ "$instdir"
echo 'main = putStrLn "hello world"' > expected
run "$test_compiler" -package ghc "$TOP/.gitlab/hello.hs" -o hello
if [[ "${CROSS_TARGET:-no_cross_target}" =~ "mingw" ]]; then
${CROSS_EMULATOR:-} ./hello.exe > actual
else
${CROSS_EMULATOR:-} ./hello > actual
fi
# We have to use `-w` to make the test more stable across supported
# platforms, i.e. Windows:
# $ cmp expected actual
# differ: byte 30, line 1
# $ diff expected actual
# 1c1
# < main = putStrLn "hello world"
# ---
# > main = putStrLn "hello world"
run diff -w expected actual
elif [[ -n "${REINSTALL_GHC:-}" ]]; then
run_hadrian \
test \
--test-root-dirs=testsuite/tests/stage1 \
--test-compiler=stage-cabal \
--test-root-dirs=testsuite/tests/perf \
--test-root-dirs=testsuite/tests/typecheck \
"runtest.opts+=${RUNTEST_ARGS:-}" \
"runtest.opts+=--unexpected-output-dir=$TOP/unexpected-test-output" \
|| fail "hadrian cabal-install test"
else
local instdir="$TOP/_build/install"
local test_compiler="$instdir/bin/${cross_prefix}ghc$exe"
install_bindist _build/bindist/ghc-*/ "$instdir"
if [[ "${WINDOWS_HOST}" == "no" ]] && [ -z "${CROSS_TARGET:-}" ]
then
run_hadrian \
test \
--test-root-dirs=testsuite/tests/stage1 \
--test-compiler=stage1 \
"runtest.opts+=${RUNTEST_ARGS:-}" || fail "hadrian stage1 test"
info "STAGE1_TEST=$?"
fi
# Ensure the resulting compiler has the correct bignum-flavour,
# except for cross-compilers as they may not support the interpreter
if [ -z "${CROSS_TARGET:-}" ]
then
test_compiler_backend=$(${test_compiler} -e "GHC.Num.Backend.backendName")
if [ $test_compiler_backend != "\"$BIGNUM_BACKEND\"" ]; then
fail "Test compiler has a different BIGNUM_BACKEND ($test_compiler_backend) than requested ($BIGNUM_BACKEND)"
fi
fi
# If we are doing a release job, check the compiler can build a profiled executable
if [ "${RELEASE_JOB:-}" == "yes" ]; then
echo "main = print ()" > proftest.hs
run ${test_compiler} -prof proftest.hs || fail "hadrian profiled libs test"
rm proftest.hs
fi
run_hadrian \
test \
--summary-junit=./junit.xml \
--test-have-intree-files \
--test-compiler="${test_compiler}" \
"runtest.opts+=${RUNTEST_ARGS:-}" \
"runtest.opts+=--unexpected-output-dir=$TOP/unexpected-test-output" \
|| fail "hadrian main testsuite"
info "STAGE2_TEST=$?"
fi
end_section test-hadrian
}
function summarise_hi_files() {
hi_files=$(find . -type f -name "*.hi" | sort)
for iface in $hi_files; do echo "$iface $($HC --show-iface "$iface" | grep " ABI hash:")"; done | tee $OUT/abis
for iface in $hi_files; do echo "$iface $($HC --show-iface "$iface" | grep " interface hash:")"; done | tee $OUT/interfaces
for iface in $hi_files; do
fname="$OUT/$(dirname "$iface")"
mkdir -p "$fname"
$HC --show-iface "$iface" > "$OUT/$iface"
done
}
function summarise_o_files() {
OBJDUMP=$(if test "$(uname)" == "Darwin"; then echo "objdump -m"; else echo "objdump"; fi)
o_files=$(find . -type f -name "*.o" | sort)
for o in $o_files; do
fname="$OUT/objs/$(dirname "$o")"
mkdir -p "$fname"
# To later compare object dumps except for the first line which prints the file path
$OBJDUMP --all-headers "$o" | tail -n+2 > "$OUT/objs/$o.all-headers"
$OBJDUMP --disassemble-all "$o" | tail -n+2 > "$OUT/objs/$o.disassemble-all"
done
}
function cabal_abi_test() {
if [ -z "$OUT" ]; then
fail "OUT not set"
fi
cp -r libraries/Cabal $DIR
pushd $DIR
echo $PWD
start_section cabal-abi-test "Cabal ABI test: $OUT"
mkdir -p "$OUT"
"$HC" \
-hidir tmp -odir tmp -fforce-recomp -haddock \
-iCabal/Cabal/src -XNoPolyKinds Distribution.Simple -j"$cores" \
-fobject-determinism \
"$@" 2>&1 | sed '1d' | tee $OUT/log
summarise_hi_files
summarise_o_files
popd
end_section cabal-abi-test
}
function cabal_test() {
if [ -z "$OUT" ]; then
fail "OUT not set"
fi
start_section cabal-test "Cabal test: $OUT"
mkdir -p "$OUT"
run "$HC" \
-hidir tmp -odir tmp -fforce-recomp \
-dumpdir "$OUT/dumps" -ddump-timings \
+RTS --machine-readable "-t$OUT/rts.log" -RTS \
-ilibraries/Cabal/Cabal/src -XNoPolyKinds Distribution.Simple \
"$@" 2>&1 | tee $OUT/log
rm -Rf tmp
end_section cabal-test
}
function run_perf_test() {
if [ -z "$HC" ]; then
fail "HC not set"
fi
mkdir -p out
git -C libraries/Cabal/ rev-parse HEAD > out/cabal_commit
$HC --print-project-git-commit-id > out/ghc_commit
OUT=out/Cabal-O0 cabal_test -O0
OUT=out/Cabal-O1 cabal_test -O1
OUT=out/Cabal-O2 cabal_test -O2
}
function check_interfaces(){
difference=$(diff "$1/$3" "$2/$3") || warn "diff failed"
if [ -z "$difference" ]
then
info "$1 and $2 $3 match"
else
echo $difference
for line in $(echo "$difference" | tr ' ' '\n' | grep ".hi" | sort | uniq); do
diff "$1/$line" "$2/$line"
done
fail "$4"
fi
}
function check_objects(){
# Big fast check
if diff -r "$1" "$2"
then
echo "Objects are the same"
else
echo "--------------------------------------------------------------------------------"
echo "Comparing all objects (1. headers, 2. disassembly). Stopping at first failure..."
echo "--------------------------------------------------------------------------------"
pushd "$1" >/dev/null
OBJ_DUMPS=$(find . -type f -name "*.all-headers" -or -name "*.disassemble-all")
popd >/dev/null
for dump in $OBJ_DUMPS
do
if diff "$1/$dump" "$2/$dump"
then
fail "Mismatched object: $dump"
fi
done
fail "Some objects are mismatched, but theres no diff with --all-headers or --disassemble-all. Perhaps try objdump -s"
fi
}
function abi_test() {
for i in {1..10}; do info "iteration $i"; run_abi_test; done
}
function run_abi_test() {
if [ -z "$HC" ]; then
fail "HC not set"
fi
mkdir -p out
OUT="$PWD/out/run1" DIR=$(mktemp -d XXXX-looooooooong) cabal_abi_test -O1 -haddock
# Count uniques in reverse one of the runs to get more non-determinism exposed
OUT="$PWD/out/run2" DIR=$(mktemp -d XXXX-short) cabal_abi_test -O1 -haddock -dunique-increment=-1 -dinitial-unique=16777215
check_interfaces out/run1 out/run2 abis "Mismatched ABI hash"
check_interfaces out/run1 out/run2 interfaces "Mismatched interface hashes"
check_objects out/run1 out/run2
}
function save_test_output() {
tar -czf unexpected-test-output.tar.gz unexpected-test-output
}
function save_cache () {
info "Storing cabal cache from $CABAL_DIR to $CABAL_CACHE..."
rm -Rf "$CABAL_CACHE"
cp -Rf "$CABAL_DIR" "$CABAL_CACHE"
}
function clean() {
rm -R tmp
run rm -Rf _build
}
function run_hadrian() {
if [ -z "${BUILD_FLAVOUR:-}" ]; then
fail "BUILD_FLAVOUR not set"
fi
read -r -a args <<< "${HADRIAN_ARGS:-}"
if [ -n "${VERBOSE:-}" ]; then args+=("-V"); fi
# Before running the compiler, unset variables gitlab env vars as these
# can destabilise the performance test (see #20341)
(unset $(compgen -v | grep CI_*);
run "${HADRIAN_PATH:-hadrian/build-cabal}" \
--flavour="$BUILD_FLAVOUR" \
-j"$cores" \
--broken-test="${BROKEN_TESTS:-}" \
--bignum=$BIGNUM_BACKEND \
"${args[@]+"${args[@]}"}" \
"$@")
}
# A convenience function to allow debugging in the CI environment.
function shell() {
local cmd="${@: 1}"
if [ -z "$cmd" ]; then
cmd="bash -i"
fi
run $cmd
}
function lint_author(){
base=$1
head=$2
for email in $(git log --format='%ae' $base..$head); do
if [ $email == "ghc-ci@gitlab-haskell.org" ];
then
fail "Commit has GHC CI author, please amend the author information."
fi
done
}
function abi_of(){
DIR=$(realpath $1)
mkdir -p "$OUT"
pushd $DIR
summarise_hi_files
popd
}
# Checks that the interfaces in folder $1 match the interfaces in folder $2
function compare_interfaces_of(){
OUT=$PWD/out/run1 abi_of $1
OUT=$PWD/out/run2 abi_of $2
check_interfaces out/run1 out/run2 abis "Mismatched ABI hash"
check_interfaces out/run1 out/run2 interfaces "Mismatched interface hashes"
}
setup_locale
# Platform-specific environment initialization
if [ -n "${HERMETIC:-}" ]; then
export CABAL_DIR="$TOP/cabal"
# We previously set HOME=/nonexistent but apparently nix wants $HOME to exist
# so sadly we must settle for someplace writable.
export HOME="$TOP/tmp-home"
else
BIN_DIST_NAME="${BIN_DIST_NAME:-}"
case "$(uname)" in
MSYS_*|MINGW*) CABAL_DIR="$APPDATA/cabal" ;;
*) CABAL_DIR="$HOME/.cabal" ;;
esac
fi
case "$(uname)" in
MSYS_*|MINGW*)
exe=".exe"
# N.B. cabal-install expects CABAL_DIR to be a Windows path
CABAL_DIR="$(cygpath -w "$CABAL_DIR")"
WINDOWS_HOST="yes"
;;
*)
exe=""
WINDOWS_HOST="no"
;;
esac
MAKE="make"
TAR="tar"
case "$(uname)" in
MSYS_*|MINGW*) mingw_init ;;
Darwin) boot_triple="x86_64-apple-darwin" ;;
FreeBSD)
boot_triple="x86_64-portbld-freebsd"
MAKE="gmake"
TAR="gtar"
;;
Linux) ;;
*) fail "uname $(uname) is not supported" ;;
esac
if [ -n "${CROSS_TARGET:-}" ]; then
info "Cross-compiling for $CROSS_TARGET..."
target_triple="$CROSS_TARGET"
cross_prefix="$target_triple-"
else
cross_prefix=""
fi
echo "Branch name ${CI_MERGE_REQUEST_SOURCE_BRANCH_NAME:-}"
# Ignore performance improvements in @marge-bot batches.
# See #19562.
if [ "${CI_MERGE_REQUEST_SOURCE_BRANCH_NAME:-}" == "wip/marge_bot_batch_merge_job" ]; then
if [ -z "${IGNORE_PERF_FAILURES:-}" ]; then
IGNORE_PERF_FAILURES="decreases"
echo "Ignoring perf failures"
fi
fi
echo "CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH:-}"
echo "CI_PROJECT_PATH: ${CI_PROJECT_PATH:-}"
if [ "${CI_COMMIT_BRANCH:-}" == "master" ] && [ "${CI_PROJECT_PATH:-}" == "ghc/ghc" ]; then
if [ -z "${IGNORE_PERF_FAILURES:-}" ]; then
IGNORE_PERF_FAILURES="decreases"
echo "Ignoring perf failures"
fi
fi
if [ -n "${IGNORE_PERF_FAILURES:-}" ]; then
RUNTEST_ARGS=( "${RUNTEST_ARGS[@]:-}" "--ignore-perf-failures=$IGNORE_PERF_FAILURES" )
fi
if [[ -z ${BIGNUM_BACKEND:-} ]]; then BIGNUM_BACKEND=gmp; fi
determine_metric_baseline
set_toolchain_paths
case ${1:-help} in
help|usage) usage ;;
setup) setup && cleanup_submodules ;;
configure) time_it "configure" configure ;;
build_hadrian) time_it "build" build_hadrian ;;
# N.B. Always push notes, even if the build fails. This is okay to do as the
# testsuite driver doesn't record notes for tests that fail due to
# correctness.
test_hadrian)
fetch_perf_notes
res=0
time_it "test" test_hadrian || res=$?
push_perf_notes
exit $res ;;
run_hadrian) shift; run_hadrian "$@" ;;
perf_test) run_perf_test ;;
abi_test) abi_test ;;
cabal_test) cabal_test ;;
lint_author) shift; lint_author "$@" ;;
compare_interfaces_of) shift; compare_interfaces_of "$@" ;;
clean) clean ;;
save_test_output) save_test_output ;;
save_cache) save_cache ;;
shell) shift; shell "$@" ;;
*) fail "unknown mode $1" ;;
esac
# Circle CI "backend" for Gitlab CI
# =================================
#
# Usage example:
# .gitlab/circle-ci-job.sh validate-x86_64-linux
#
# There are two things to configure to get artifacts to be
# uploaded to gitlab properly:
#
# - At https://<gitlab host>/admin/application_settings, expand the
# Continuous Integration and Deployment section and set the
# "Maximum artifacts size (MB)" field to something large enough
# to contain the bindists (the test reports are tiny in comparison).
# 500MB seems to work fine, but 200MB might be sufficient.
#
# - If gitlab is exposed behind some form of proxy (e.g nginx), make sure
# the maximum client request body size is large enough to contain all the
# artifacts of a build. For nginx, this would be the following configuration
# option: https://nginx.org/en/docs/http/ngx_http_core_module.html#client_max_body_size
# (which can be set with services.nginx.clientMaxBodySize on nixos).
#!/usr/bin/env sh
set -e
GHCCI_URL="localhost:8888"
[ $# -gt 0 ] || (echo You need to pass the Circle CI job type as argument to this script; exit 1)
[ ${CI_RUNNER_ID:-} ] || (echo "CI_RUNNER_ID is not set"; exit 1)
[ ${CI_JOB_ID:-} ] || (echo "CI_JOB_ID is not set"; exit 1)
[ ${CI_COMMIT_SHA:-} ] || (echo "CI_COMMIT_SHA is not set"; exit 1)
[ ${CI_REPOSITORY_URL:-} ] || (echo "CI_REPOSITORY_URL is not set"; exit 1)
[ ${CI_PIPELINE_ID:-} ] || (echo "CI_PIPELINE_ID is not set"; exit 1)
# the first argument to this script is the Circle CI job type:
# validate-x86_64-linux, validate-i386-linux, ...
CIRCLE_JOB="circleci-$1"
gitlab_user=$(echo $CI_REPOSITORY_URL | cut -d/ -f4)
gitlab_repo=$(echo $CI_REPOSITORY_URL | cut -d/ -f5 | cut -d. -f1)
BODY="{ \"jobType\": \"$CIRCLE_JOB\", \"source\": { \"user\": \"$gitlab_user\", \"project\":\"$gitlab_repo\", \"commit\":\"$CI_COMMIT_SHA\" }, \"pipelineID\": $CI_PIPELINE_ID, \"runnerID\": $CI_RUNNER_ID, \"jobID\": $CI_JOB_ID }"
RESP=$(curl -s -XPOST -H "Content-Type: application/json" -d "$BODY" \
http://${GHCCI_URL}/job)
if [ $? -eq 0 ]; then
build_num=$(echo $RESP | jq '.build_num')
circle_url=$(echo $RESP | jq '.url')
else
echo "Couldn't submit job"
echo $RESP
exit 1
fi
echo Circle CI build number: $build_num
echo Circle CI build page: $circle_url
outcome="null"
STATUS_URL="http://${GHCCI_URL}/job/${build_num}"
STATUS_RESP=""
while [ "$outcome" == "null" ]; do
sleep 30s
STATUS_RESP=$(curl -s $STATUS_URL)
if [ $? -eq 0 ]; then
new_outcome=$(echo $STATUS_RESP | jq '.outcome')
jq_exitcode=$?
if [ "$new_outcome" == "null" ] && [ $jq_exitcode -ne 0 ]; then
echo "Couldn't read 'outcome' field in JSON:"
echo $STATUS_RESP
echo "Skipping"
else
outcome="$new_outcome"
fi
else
echo "curl failed:"
echo $STATUS_RESP
echo "Skipping"
fi
done
if [ "$outcome" == "\"success\"" ]; then
echo The build passed
artifactsBody=$(curl -s http://${GHCCI_URL}/job/${build_num}/artifacts)
(echo $artifactsBody | jq '.[] | .url' | xargs wget -q) || echo "No artifacts"
exit 0
else
echo The build failed
artifactsBody=$(curl -s http://${GHCCI_URL}/job/${build_num}/artifacts)
(echo $artifactsBody | jq '.[] | .url' | xargs wget -q) || echo "No artifacts"
failing_step=$(echo $STATUS_RESP | jq '.steps | .[] | .actions | .[] | select(.status != "success")')
failing_step_name=$(echo $failing_step | jq '.name' | sed -e 's/^"//' -e 's/"$//' -e 's/\\r\\n/\n/')
echo "Failing step: $failing_step_name"
failing_cmds=$(echo $failing_step | jq '.bash_command' | sed -e 's/^"//' -e 's/"$//' -e 's/\\r\\n/\n/')
echo "Failing command(s):"
echo $failing_cmds
log_url=$(echo $failing_step | jq '.output_url' | sed -e 's/^"//' -e 's/"$//' -e 's/\\r\\n/\n/')
echo "Log url: $log_url"
last_log_lines=$(curl -s $log_url | gunzip | jq '.[] | select(.type == "out") | .message' | sed -e 's/^"//' -e 's/"$//' -e 's/\\r\\n/\n/' | tail -50)
echo End of the build log:
echo $last_log_lines
exit 1
fi
# Common bash utilities
# ----------------------
# Colors
BLACK="0;30"
GRAY="1;30"
RED="0;31"
LT_RED="1;31"
BROWN="0;33"
LT_BROWN="1;33"
GREEN="0;32"
LT_GREEN="1;32"
BLUE="0;34"
LT_BLUE="1;34"
PURPLE="0;35"
LT_PURPLE="1;35"
CYAN="0;36"
LT_CYAN="1;36"
WHITE="1;37"
LT_GRAY="0;37"
# GitLab Pipelines log section delimiters
# https://docs.gitlab.com/ci/jobs/job_logs/#custom-collapsible-sections
function start_section () {
local section_title="${1}"
local section_description="${2:-$section_title}"
echo -e "section_start:$(date +%s):${section_title}[collapsed=true]\r\e[0K${section_description}"
}
function end_section () {
local section_title="${1}"
echo -e "section_end:$(date +%s):${section_title}\r\e[0K"
}
echo_color() {
local color="$1"
local msg="$2"
echo -e "\033[${color}m${msg}\033[0m"
}
error() { echo_color "${RED}" "$1"; }
warn() { echo_color "${LT_BROWN}" "$1"; }
info() { echo_color "${LT_BLUE}" "$1"; }
fail() { error "error: $1"; exit 1; }
function run() {
info "Running $*..."
"$@" || ( error "$* failed"; return 1; )
}
#!/bin/bash
set -e
toolchain=`pwd`/toolchain
PATH="$toolchain/bin:$PATH"
if [ -d "`pwd`/cabal-cache" ]; then
cp -Rf cabal-cache $HOME/.cabal
fi
if [ ! -e $toolchain/bin/ghc ]; then
mkdir -p tmp
cd tmp
ghc_tarball="https://downloads.haskell.org/~ghc/$GHC_VERSION/ghc-$GHC_VERSION-x86_64-apple-darwin.tar.xz"
echo "Fetching GHC from $ghc_tarball"
curl $ghc_tarball | tar -xJ
cd ghc-$GHC_VERSION
./configure --prefix=$toolchain
make install
cd ../..
rm -Rf tmp
fi
if [ ! -e $toolchain/bin/cabal ]; then
cabal_tarball="https://downloads.haskell.org/~cabal/cabal-install-$CABAL_INSTALL_VERSION/cabal-install-$CABAL_INSTALL_VERSION-x86_64-apple-darwin-sierra.tar.xz"
echo "Fetching cabal-install from $cabal_tarball"
curl $cabal_tarball | tar -xz
mv cabal $toolchain/bin
fi
if [ ! -e $toolchain/bin/happy ]; then
cabal update
cabal new-install happy --symlink-bindir=$toolchain/bin
fi
if [ ! -e $toolchain/bin/alex ]; then
cabal update
cabal new-install alex --symlink-bindir=$toolchain/bin
fi
{
"niv": {
"branch": "master",
"description": "Easy dependency management for Nix projects",
"homepage": "https://github.com/nmattia/niv",
"owner": "nmattia",
"repo": "niv",
"rev": "e0ca65c81a2d7a4d82a189f1e23a48d59ad42070",
"sha256": "1pq9nh1d8nn3xvbdny8fafzw87mj7gsmp6pxkdl65w2g18rmcmzx",
"type": "tarball",
"url": "https://github.com/nmattia/niv/archive/e0ca65c81a2d7a4d82a189f1e23a48d59ad42070.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"nixpkgs": {
"branch": "nixos-unstable",
"description": "Nix Packages collection",
"homepage": "",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "2893f56de08021cffd9b6b6dfc70fd9ccd51eb60",
"sha256": "1anwxmjpm21msnnlrjdz19w31bxnbpn4kgf93sn3npihi7wf4a8h",
"type": "tarball",
"url": "https://github.com/nixos/nixpkgs/archive/2893f56de08021cffd9b6b6dfc70fd9ccd51eb60.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
}
}
# This file has been generated by Niv.
let
#
# The fetchers. fetch_<type> fetches specs of type <type>.
#
fetch_file = pkgs: name: spec:
let
name' = sanitizeName name + "-src";
in
if spec.builtin or true then
builtins_fetchurl { inherit (spec) url sha256; name = name'; }
else
pkgs.fetchurl { inherit (spec) url sha256; name = name'; };
fetch_tarball = pkgs: name: spec:
let
name' = sanitizeName name + "-src";
in
if spec.builtin or true then
builtins_fetchTarball { name = name'; inherit (spec) url sha256; }
else
pkgs.fetchzip { name = name'; inherit (spec) url sha256; };
fetch_git = name: spec:
let
ref =
if spec ? ref then spec.ref else
if spec ? branch then "refs/heads/${spec.branch}" else
if spec ? tag then "refs/tags/${spec.tag}" else
abort "In git source '${name}': Please specify `ref`, `tag` or `branch`!";
in
builtins.fetchGit { url = spec.repo; inherit (spec) rev; inherit ref; };
fetch_local = spec: spec.path;
fetch_builtin-tarball = name: throw
''[${name}] The niv type "builtin-tarball" is deprecated. You should instead use `builtin = true`.
$ niv modify ${name} -a type=tarball -a builtin=true'';
fetch_builtin-url = name: throw
''[${name}] The niv type "builtin-url" will soon be deprecated. You should instead use `builtin = true`.
$ niv modify ${name} -a type=file -a builtin=true'';
#
# Various helpers
#
# https://github.com/NixOS/nixpkgs/pull/83241/files#diff-c6f540a4f3bfa4b0e8b6bafd4cd54e8bR695
sanitizeName = name:
(
concatMapStrings (s: if builtins.isList s then "-" else s)
(
builtins.split "[^[:alnum:]+._?=-]+"
((x: builtins.elemAt (builtins.match "\\.*(.*)" x) 0) name)
)
);
# The set of packages used when specs are fetched using non-builtins.
mkPkgs = sources: system:
let
sourcesNixpkgs =
import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) { inherit system; };
hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath;
hasThisAsNixpkgsPath = <nixpkgs> == ./.;
in
if builtins.hasAttr "nixpkgs" sources
then sourcesNixpkgs
else if hasNixpkgsPath && ! hasThisAsNixpkgsPath then
import <nixpkgs> {}
else
abort
''
Please specify either <nixpkgs> (through -I or NIX_PATH=nixpkgs=...) or
add a package called "nixpkgs" to your sources.json.
'';
# The actual fetching function.
fetch = pkgs: name: spec:
if ! builtins.hasAttr "type" spec then
abort "ERROR: niv spec ${name} does not have a 'type' attribute"
else if spec.type == "file" then fetch_file pkgs name spec
else if spec.type == "tarball" then fetch_tarball pkgs name spec
else if spec.type == "git" then fetch_git name spec
else if spec.type == "local" then fetch_local spec
else if spec.type == "builtin-tarball" then fetch_builtin-tarball name
else if spec.type == "builtin-url" then fetch_builtin-url name
else
abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}";
# If the environment variable NIV_OVERRIDE_${name} is set, then use
# the path directly as opposed to the fetched source.
replace = name: drv:
let
saneName = stringAsChars (c: if isNull (builtins.match "[a-zA-Z0-9]" c) then "_" else c) name;
ersatz = builtins.getEnv "NIV_OVERRIDE_${saneName}";
in
if ersatz == "" then drv else
# this turns the string into an actual Nix path (for both absolute and
# relative paths)
if builtins.substring 0 1 ersatz == "/" then /. + ersatz else /. + builtins.getEnv "PWD" + "/${ersatz}";
# Ports of functions for older nix versions
# a Nix version of mapAttrs if the built-in doesn't exist
mapAttrs = builtins.mapAttrs or (
f: set: with builtins;
listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set))
);
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/lists.nix#L295
range = first: last: if first > last then [] else builtins.genList (n: first + n) (last - first + 1);
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L257
stringToCharacters = s: map (p: builtins.substring p 1 s) (range 0 (builtins.stringLength s - 1));
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L269
stringAsChars = f: s: concatStrings (map f (stringToCharacters s));
concatMapStrings = f: list: concatStrings (map f list);
concatStrings = builtins.concatStringsSep "";
# https://github.com/NixOS/nixpkgs/blob/8a9f58a375c401b96da862d969f66429def1d118/lib/attrsets.nix#L331
optionalAttrs = cond: as: if cond then as else {};
# fetchTarball version that is compatible between all the versions of Nix
builtins_fetchTarball = { url, name ? null, sha256 }@attrs:
let
inherit (builtins) lessThan nixVersion fetchTarball;
in
if lessThan nixVersion "1.12" then
fetchTarball ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; }))
else
fetchTarball attrs;
# fetchurl version that is compatible between all the versions of Nix
builtins_fetchurl = { url, name ? null, sha256 }@attrs:
let
inherit (builtins) lessThan nixVersion fetchurl;
in
if lessThan nixVersion "1.12" then
fetchurl ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; }))
else
fetchurl attrs;
# Create the final "sources" from the config
mkSources = config:
mapAttrs (
name: spec:
if builtins.hasAttr "outPath" spec
then abort
"The values in sources.json should not have an 'outPath' attribute"
else
spec // { outPath = replace name (fetch config.pkgs name spec); }
) config.sources;
# The "config" used by the fetchers
mkConfig =
{ sourcesFile ? if builtins.pathExists ./sources.json then ./sources.json else null
, sources ? if isNull sourcesFile then {} else builtins.fromJSON (builtins.readFile sourcesFile)
, system ? builtins.currentSystem
, pkgs ? mkPkgs sources system
}: rec {
# The sources, i.e. the attribute set of spec name to spec
inherit sources;
# The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers
inherit pkgs;
};
in
mkSources (mkConfig {}) // { __functor = _: settings: mkSources (mkConfig settings); }
{ system }:
let
sources = import ./nix/sources.nix;
nixpkgsSrc = sources.nixpkgs;
pkgs = import nixpkgsSrc { inherit system; };
hostPkgs = import nixpkgsSrc { };
in
let
hsPkgs = pkgs.haskellPackages;
alex = hsPkgs.alex;
happy = hsPkgs.happy;
targetTriple = pkgs.stdenv.targetPlatform.config;
ghcBindists = let version = ghc.version; in {
aarch64-darwin = hostPkgs.fetchurl {
url = "https://downloads.haskell.org/ghc/${version}/ghc-${version}-aarch64-apple-darwin.tar.xz";
sha256 = "sha256-c1GTMJf3/yiW/t4QL532EswD5JVlgA4getkfsxj4TaA=";
};
x86_64-darwin = hostPkgs.fetchurl {
url = "https://downloads.haskell.org/ghc/${version}/ghc-${version}-x86_64-apple-darwin.tar.xz";
sha256 = "sha256-LrYniMG0phsvyW6dhQC+3ompvzcxnwAe6GezEqqzoTQ=";
};
};
ghc = pkgs.stdenv.mkDerivation rec {
# Using 9.6.2 because of #24050
version = "9.6.2";
name = "ghc";
src = ghcBindists.${pkgs.stdenv.hostPlatform.system};
configureFlags = [
"CC=/usr/bin/clang"
"CLANG=/usr/bin/clang"
"AR=/usr/bin/ar"
"LLC=${llvm}/bin/llc"
"OPT=${llvm}/bin/opt"
"LLVMAS=${llvm_clang}/bin/clang"
"CONF_CC_OPTS_STAGE2=--target=${targetTriple}"
"CONF_CXX_OPTS_STAGE2=--target=${targetTriple}"
"CONF_GCC_LINKER_OPTS_STAGE2=--target=${targetTriple}"
];
buildPhase = "true";
# This is a horrible hack because the configure script invokes /usr/bin/clang
# without a `--target` flag. Then depending on whether the `nix` binary itself is
# a native x86 or arm64 binary means that /usr/bin/clang thinks it needs to run in
# x86 or arm64 mode.
# The correct answer for the check in question is the first one we try, so by replacing
# the condition to true; we select the right C++ standard library still.
preConfigure = ''
sed "s/\"\$CC\" -o actest actest.o \''${1} 2>\/dev\/null/true/i" configure > configure.new
mv configure.new configure
chmod +x configure
cat configure
'';
# N.B. Work around #20253.
nativeBuildInputs = [ pkgs.gnused ];
postInstallPhase = ''
settings="$out/lib/ghc-${version}/settings"
sed -i -e "s%\"llc\"%\"${llvm}/bin/llc\"%" $settings
sed -i -e "s%\"opt\"%\"${llvm}/bin/opt\"%" $settings
sed -i -e "s%\"clang\"%\"/usr/bin/clang\"%" $settings
sed -i -e 's%("C compiler command", "")%("C compiler command", "/usr/bin/clang")%' $settings
sed -i -e 's%("C compiler flags", "")%("C compiler flags", "--target=${targetTriple}")%' $settings
sed -i -e 's%("C++ compiler flags", "")%("C++ compiler flags", "--target=${targetTriple}")%' $settings
sed -i -e 's%("C compiler link flags", "")%("C compiler link flags", "--target=${targetTriple}")%' $settings
'';
# Sanity check: verify that we can compile hello world.
doInstallCheck = true;
installCheckPhase = ''
unset DYLD_LIBRARY_PATH
$out/bin/ghc --info
cd $TMP
mkdir test-ghc; cd test-ghc
cat > main.hs << EOF
{-# LANGUAGE TemplateHaskell #-}
module Main where
main = putStrLn \$([|"yes"|])
EOF
$out/bin/ghc --make -v3 main.hs || exit 1
echo compilation ok
[ $(./main) == "yes" ]
'';
};
ourtexlive = with pkgs;
texlive.combine {
inherit (texlive)
scheme-medium collection-xetex fncychap titlesec tabulary varwidth
framed capt-of wrapfig needspace dejavu-otf helvetic upquote;
};
fonts = with pkgs; makeFontsConf { fontDirectories = [ dejavu_fonts ]; };
llvm = pkgs.llvm_15;
llvm_clang = pkgs.llvmPackages_15.clang-unwrapped;
in
pkgs.writeTextFile {
name = "toolchain";
text = ''
export PATH
PATH="${pkgs.autoconf}/bin:$PATH"
PATH="${pkgs.automake}/bin:$PATH"
export FONTCONFIG_FILE=${fonts}
export XELATEX="${ourtexlive}/bin/xelatex"
export MAKEINDEX="${ourtexlive}/bin/makeindex"
export HAPPY="${happy}/bin/happy"
export ALEX="${alex}/bin/alex"
export GHC="${ghc}/bin/ghc"
export LLC="${llvm}/bin/llc"
export OPT="${llvm}/bin/opt"
export LLVMAS="${llvm_clang}/bin/clang"
export SPHINXBUILD="${pkgs.python3Packages.sphinx}/bin/sphinx-build"
export CABAL_INSTALL="${pkgs.cabal-install}/bin/cabal"
export CABAL="$CABAL_INSTALL"
sdk_path="$(xcrun --sdk macosx --show-sdk-path)"
: ''${CONFIGURE_ARGS:=}
CONFIGURE_ARGS+="''${CONFIGURE_ARGS:+ }--with-ffi-libraries=$sdk_path/usr/lib --with-ffi-includes=$sdk_path/usr/include/ffi --build=${targetTriple}"
export CONFIGURE_ARGS
'';
}
Copyright (c) 2023, The GHC Developers
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of The GHC Developers nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# generate-ci
This is the generator for GHC's GitLab CI infrastructure. In particular, this
generates two outputs:
* `.gitlab/jobs.yaml`, which is a YAML (or, strictly speaking, JSON)
file which defines the bulk of the validation, nightly, and release jobs of
GHC's CI. This is committed to the GHC repository and must be updated
whenever `gen_ci.hs` is modified.
* `.gitlab/jobs-metadata.json`, which is a mapping between platforms and
produced binary distribution names used when producing `ghcup` metadata
for nightly pipeline artifacts (see the `.ghcup-metadata` job in
`/.gitlab-ci.yaml`).
## Modifying the CI configuration (nix)
The jobs are defined in `gen_ci.hs`. After modifying this you can run
```sh
nix run .gitlab/generate-ci#generate-jobs
```
from the top of the GHC repository to update the generated configuration.
## Modifying the CI configuration (without nix)
One can run `update-ci` without Nix as follows (assuming one has `jq`,
`cabal-install`, and GHC installed):
```sh
$ cabal build generate-ci
$ PATH="$(dirname $(cabal list-bin generate-ci)):$PATH"
$ ./generate-jobs
```
{
"nodes": {
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1710146030,
"narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1724334015,
"narHash": "sha256-5sfvc0MswIRNdRWioUhG58rGKGn2o90Ck6l6ClpwQqA=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "6d204f819efff3d552a88d0a44b5aaaee172b784",
"type": "github"
},
"original": {
"id": "nixpkgs",
"type": "indirect"
}
},
"root": {
"inputs": {
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}
{
description = "GHC CI Generator";
inputs.flake-utils.url = "github:numtide/flake-utils";
outputs = { self, nixpkgs, flake-utils }:
flake-utils.lib.eachDefaultSystem (system:
let pkgs = nixpkgs.legacyPackages.${system}; in
{
packages = rec {
# The Haskell generator executable
generate-ci = pkgs.haskellPackages.callCabal2nix "generate-ci" ./. {};
# Wrapper scripts
generate-job-metadata = pkgs.runCommand "generate-job-metadata" {
nativeBuildInputs = with pkgs; [ makeWrapper ];
} ''
mkdir -p $out/bin
makeWrapper ${./generate-job-metadata} $out/bin/generate-job-metadata \
--prefix PATH : ${with pkgs; lib.makeBinPath [ generate-ci gitMinimal ]}
'';
generate-jobs = pkgs.runCommand "generate-jobs" {
nativeBuildInputs = with pkgs; [ makeWrapper ];
} ''
mkdir -p $out/bin
makeWrapper ${./generate-jobs} $out/bin/generate-jobs \
--prefix PATH : ${with pkgs; lib.makeBinPath [ generate-ci jq gitMinimal ]}
'';
default = generate-jobs;
};
apps = rec {
generate-jobs = flake-utils.lib.mkApp {
drv = self.packages.${system}.generate-jobs;
};
generate-job-metadata = flake-utils.lib.mkApp {
drv = self.packages.${system}.generate-job-metadata;
};
default = generate-jobs;
};
}
);
}