Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • ghc/ghc
  • bgamari/ghc
  • syd/ghc
  • ggreif/ghc
  • watashi/ghc
  • RolandSenn/ghc
  • mpickering/ghc
  • DavidEichmann/ghc
  • carter/ghc
  • harpocrates/ghc
  • ethercrow/ghc
  • mijicd/ghc
  • adamse/ghc
  • alexbiehl/ghc
  • gridaphobe/ghc
  • trofi/ghc
  • supersven/ghc
  • ppk/ghc
  • ulysses4ever/ghc
  • AndreasK/ghc
  • ghuntley/ghc
  • shayne-fletcher-da/ghc
  • fgaz/ghc
  • yav/ghc
  • osa1/ghc
  • mbbx6spp/ghc
  • JulianLeviston/ghc
  • reactormonk/ghc
  • rae/ghc
  • takenobu-hs/ghc
  • michalt/ghc
  • andrewthad/ghc
  • hsyl20/ghc
  • scottgw/ghc
  • sjakobi/ghc
  • angerman/ghc
  • RyanGlScott/ghc
  • hvr/ghc
  • howtonotwin/ghc
  • chessai/ghc
  • m-renaud/ghc
  • brprice/ghc
  • stevehartdata/ghc
  • sighingnow/ghc
  • kgardas/ghc
  • ckoparkar/ghc
  • alp/ghc
  • smaeul/ghc
  • kakkun61/ghc
  • sykloid/ghc
  • newhoggy/ghc
  • toonn/ghc
  • nineonine/ghc
  • Phyx/ghc
  • ezyang/ghc
  • tweag/ghc
  • langston/ghc
  • ndmitchell/ghc
  • rockbmb/ghc
  • artempyanykh/ghc
  • mniip/ghc
  • mynguyenbmc/ghc
  • alexfmpe/ghc
  • crockeea/ghc
  • nh2/ghc
  • vaibhavsagar/ghc
  • phadej/ghc
  • Haskell-mouse/ghc
  • lolotp/ghc
  • spacekitteh/ghc
  • michaelpj/ghc
  • mgsloan/ghc
  • HPCohen/ghc
  • tmobile/ghc
  • radrow/ghc
  • simonmar/ghc
  • _deepfire/ghc
  • Ericson2314/ghc
  • leitao/ghc
  • fumieval/ghc
  • trac-isovector/ghc
  • cblp/ghc
  • xich/ghc
  • ciil/ghc
  • erthalion/ghc
  • xldenis/ghc
  • autotaker/ghc
  • haskell-wasm/ghc
  • kcsongor/ghc
  • agander/ghc
  • Baranowski/ghc
  • trac-dredozubov/ghc
  • 23Skidoo/ghc
  • iustin/ghc
  • ningning/ghc
  • josefs/ghc
  • kabuhr/ghc
  • gallais/ghc
  • dten/ghc
  • expipiplus1/ghc
  • Pluralia/ghc
  • rohanjr/ghc
  • intricate/ghc
  • kirelagin/ghc
  • Javran/ghc
  • DanielG/ghc
  • trac-mizunashi_mana/ghc
  • pparkkin/ghc
  • bollu/ghc
  • ntc2/ghc
  • jaspervdj/ghc
  • JoshMeredith/ghc
  • wz1000/ghc
  • zkourouma/ghc
  • code5hot/ghc
  • jdprice/ghc
  • tdammers/ghc
  • J-mie6/ghc
  • trac-lantti/ghc
  • ch1bo/ghc
  • cgohla/ghc
  • lucamolteni/ghc
  • acairncross/ghc
  • amerocu/ghc
  • chreekat/ghc
  • txsmith/ghc
  • trupill/ghc
  • typetetris/ghc
  • sergv/ghc
  • fryguybob/ghc
  • erikd/ghc
  • trac-roland/ghc
  • setupminimal/ghc
  • Friede80/ghc
  • SkyWriter/ghc
  • xplorld/ghc
  • abrar/ghc
  • obsidiansystems/ghc
  • Icelandjack/ghc
  • adinapoli/ghc
  • trac-matthewbauer/ghc
  • heatsink/ghc
  • dwijnand/ghc
  • Cmdv/ghc
  • alinab/ghc
  • pepeiborra/ghc
  • fommil/ghc
  • luochen1990/ghc
  • rlupton20/ghc
  • applePrincess/ghc
  • lehins/ghc
  • ronmrdechai/ghc
  • leeadam/ghc
  • harendra/ghc
  • mightymosquito1991/ghc
  • trac-gershomb/ghc
  • lucajulian/ghc
  • Rizary/ghc
  • VictorCMiraldo/ghc
  • jamesbrock/ghc
  • andrewdmeier/ghc
  • luke/ghc
  • pranaysashank/ghc
  • cocreature/ghc
  • hithroc/ghc
  • obreitwi/ghc
  • slrtbtfs/ghc
  • kaol/ghc
  • yairchu/ghc
  • Mathemagician98/ghc
  • trac-taylorfausak/ghc
  • leungbk/ghc
  • MichaWiedenmann/ghc
  • chris-martin/ghc
  • TDecki/ghc
  • adithyaov/ghc
  • trac-gelisam/ghc
  • Lysxia/ghc
  • complyue/ghc
  • bwignall/ghc
  • sternmull/ghc
  • sonika/ghc
  • leif/ghc
  • broadwaylamb/ghc
  • myszon/ghc
  • danbroooks/ghc
  • Mechachleopteryx/ghc
  • zardyh/ghc
  • trac-vdukhovni/ghc
  • OmarKhaledAbdo/ghc
  • arrowd/ghc
  • Bodigrim/ghc
  • matheus23/ghc
  • cardenaso11/ghc
  • trac-Athas/ghc
  • mb720/ghc
  • DylanZA/ghc
  • liff/ghc
  • typedrat/ghc
  • trac-claude/ghc
  • jbm/ghc
  • Gertjan423/ghc
  • PHO/ghc
  • JKTKops/ghc
  • kockahonza/ghc
  • msakai/ghc
  • Sir4ur0n/ghc
  • barambani/ghc
  • vishnu.c/ghc
  • dcoutts/ghc
  • trac-runeks/ghc
  • trac-MaxGabriel/ghc
  • lexi.lambda/ghc
  • strake/ghc
  • spavikevik/ghc
  • JakobBruenker/ghc
  • rmanne/ghc
  • gdziadkiewicz/ghc
  • ani/ghc
  • iliastsi/ghc
  • smunix/ghc
  • judah/ghc
  • blackgnezdo/ghc
  • emilypi/ghc
  • trac-bpfoley/ghc
  • muesli4/ghc
  • trac-gkaracha/ghc
  • Kleidukos/ghc
  • nek0/ghc
  • TristanCacqueray/ghc
  • dwulive/ghc
  • mbakke/ghc
  • arybczak/ghc
  • Yang123321/ghc
  • maksbotan/ghc
  • QuietMisdreavus/ghc
  • trac-olshanskydr/ghc
  • emekoi/ghc
  • samuela/ghc
  • josephcsible/ghc
  • dramforever/ghc
  • lpsmith/ghc
  • DenisFrezzato/ghc
  • michivi/ghc
  • jneira/ghc
  • jeffhappily/ghc
  • Ivan-Yudin/ghc
  • nakaji-dayo/ghc
  • gdevanla/ghc
  • galen/ghc
  • fendor/ghc
  • yaitskov/ghc
  • rcythr/ghc
  • awpr/ghc
  • jeremyschlatter/ghc
  • Aver1y/ghc
  • mitchellvitez/ghc
  • merijn/ghc
  • tomjaguarpaw1/ghc
  • trac-NoidedSuper/ghc
  • erewok/ghc
  • trac-junji.hashimoto/ghc
  • adamwespiser/ghc
  • bjaress/ghc
  • jhrcek/ghc
  • leonschoorl/ghc
  • lukasz-golebiewski/ghc
  • sheaf/ghc
  • last-g/ghc
  • carassius1014/ghc
  • eschwartz/ghc
  • dwincort/ghc
  • felixwiemuth/ghc
  • TimWSpence/ghc
  • marcusmonteirodesouza/ghc
  • WJWH/ghc
  • vtols/ghc
  • theobat/ghc
  • BinderDavid/ghc
  • ckoparkar0/ghc
  • alexander-kjeldaas/ghc
  • dme2/ghc
  • philderbeast/ghc
  • aaronallen8455/ghc
  • rayshih/ghc
  • benkard/ghc
  • mpardalos/ghc
  • saidelman/ghc
  • leiftw/ghc
  • ca333/ghc
  • bwroga/ghc
  • nmichael44/ghc
  • trac-crobbins/ghc
  • felixonmars/ghc
  • adityagupta1089/ghc
  • hgsipiere/ghc
  • treeowl/ghc
  • alexpeits/ghc
  • CraigFe/ghc
  • dnlkrgr/ghc
  • kerckhove_ts/ghc
  • cptwunderlich/ghc
  • eiais/ghc
  • hahohihu/ghc
  • sanchayan/ghc
  • lemmih/ghc
  • sehqlr/ghc
  • trac-dbeacham/ghc
  • luite/ghc
  • trac-f-a/ghc
  • vados/ghc
  • luntain/ghc
  • fatho/ghc
  • alexbiehl-gc/ghc
  • dcbdan/ghc
  • tvh/ghc
  • liam-ly/ghc
  • timbobbarnes/ghc
  • GovanifY/ghc
  • shanth2600/ghc
  • gliboc/ghc
  • duog/ghc
  • moxonsghost/ghc
  • zander/ghc
  • masaeedu/ghc
  • georgefst/ghc
  • guibou/ghc
  • nicuveo/ghc
  • mdebruijne/ghc
  • stjordanis/ghc
  • emiflake/ghc
  • wygulmage/ghc
  • frasertweedale/ghc
  • coot/ghc
  • aratamizuki/ghc
  • tsandstr/ghc
  • mrBliss/ghc
  • Anton-Latukha/ghc
  • tadfisher/ghc
  • vapourismo/ghc
  • Sorokin-Anton/ghc
  • basile-henry/ghc
  • trac-mightybyte/ghc
  • AbsoluteNikola/ghc
  • cobrien99/ghc
  • songzh/ghc
  • blamario/ghc
  • aj4ayushjain/ghc
  • trac-utdemir/ghc
  • tangcl/ghc
  • hdgarrood/ghc
  • maerwald/ghc
  • arjun/ghc
  • ratherforky/ghc
  • haskieLambda/ghc
  • EmilGedda/ghc
  • Bogicevic/ghc
  • eddiejessup/ghc
  • kozross/ghc
  • AlistairB/ghc
  • 3Rafal/ghc
  • christiaanb/ghc
  • trac-bit/ghc
  • matsumonkie/ghc
  • trac-parsonsmatt/ghc
  • chisui/ghc
  • jaro/ghc
  • trac-kmiyazato/ghc
  • davidsd/ghc
  • Tritlo/ghc
  • I-B-3/ghc
  • lykahb/ghc
  • AriFordsham/ghc
  • turion1/ghc
  • berberman/ghc
  • christiantakle/ghc
  • zyklotomic/ghc
  • trac-ocramz/ghc
  • CSEdd/ghc
  • doyougnu/ghc
  • mmhat/ghc
  • why-not-try-calmer/ghc
  • plutotulp/ghc
  • kjekac/ghc
  • Manvi07/ghc
  • teo/ghc
  • cactus/ghc
  • CarrieMY/ghc
  • abel/ghc
  • yihming/ghc
  • tsakki/ghc
  • jessicah/ghc
  • oliverbunting/ghc
  • meld/ghc
  • friedbrice/ghc
  • Joald/ghc
  • abarbu/ghc
  • DigitalBrains1/ghc
  • sterni/ghc
  • alexDarcy/ghc
  • hexchain/ghc
  • minimario/ghc
  • zliu41/ghc
  • tommd/ghc
  • jazcarate/ghc
  • peterbecich/ghc
  • alirezaghey/ghc
  • solomon/ghc
  • mikael.urankar/ghc
  • davjam/ghc
  • int-index/ghc
  • MorrowM/ghc
  • nrnrnr/ghc
  • Sonfamm/ghc-test-only
  • afzt1/ghc
  • nguyenhaibinh-tpc/ghc
  • trac-lierdakil/ghc
  • MichaWiedenmann1/ghc
  • jmorag/ghc
  • Ziharrk/ghc
  • trac-MitchellSalad/ghc
  • juampe/ghc
  • jwaldmann/ghc
  • snowleopard/ghc
  • juhp/ghc
  • normalcoder/ghc
  • ksqsf/ghc
  • trac-jberryman/ghc
  • roberth/ghc
  • 1ntEgr8/ghc
  • epworth/ghc
  • MrAdityaAlok/ghc
  • JunmingZhao42/ghc
  • jappeace/ghc
  • trac-Gabriel439/ghc
  • alt-romes/ghc
  • HugoPeters1024/ghc
  • 10ne1/ghc-fork
  • agentultra/ghc
  • Garfield1002/ghc
  • ChickenProp/ghc
  • clyring/ghc
  • MaxHearnden/ghc
  • jumper149/ghc
  • vem/ghc
  • ketzacoatl/ghc
  • Rosuavio/ghc
  • jackohughes/ghc
  • p4l1ly/ghc
  • konsumlamm/ghc
  • shlevy/ghc
  • torsten.schmits/ghc
  • andremarianiello/ghc
  • amesgen/ghc
  • googleson78/ghc
  • InfiniteVerma/ghc
  • uhbif19/ghc
  • yiyunliu/ghc
  • raehik/ghc
  • mrkun/ghc
  • telser/ghc
  • 1Jajen1/ghc
  • slotThe/ghc
  • WinstonHartnett/ghc
  • mpilgrem/ghc
  • dreamsmasher/ghc
  • schuelermine/ghc
  • trac-Viwor/ghc
  • undergroundquizscene/ghc
  • evertedsphere/ghc
  • coltenwebb/ghc
  • oberblastmeister/ghc
  • agrue/ghc
  • lf-/ghc
  • zacwood9/ghc
  • steshaw/ghc
  • high-cloud/ghc
  • SkamDart/ghc
  • PiDelport/ghc
  • maoif/ghc
  • RossPaterson/ghc
  • CharlesTaylor7/ghc
  • ribosomerocker/ghc
  • trac-ramirez7/ghc
  • daig/ghc
  • NicolasT/ghc
  • FinleyMcIlwaine/ghc
  • lawtonnichols/ghc
  • jmtd/ghc
  • ozkutuk/ghc
  • wildsebastian/ghc
  • nikshalark/ghc
  • lrzlin/ghc
  • tobias/ghc
  • fw/ghc
  • hawkinsw/ghc
  • type-dance/ghc
  • rui314/ghc
  • ocharles/ghc
  • wavewave/ghc
  • TheKK/ghc
  • nomeata/ghc
  • trac-csabahruska/ghc
  • jonathanjameswatson/ghc
  • L-as/ghc
  • Axman6/ghc
  • barracuda156/ghc
  • trac-jship/ghc
  • jake-87/ghc
  • meooow/ghc
  • rebeccat/ghc
  • hamana55/ghc
  • Enigmage/ghc
  • kokobd/ghc
  • agevelt/ghc
  • gshen42/ghc
  • chrismwendt/ghc
  • MangoIV/ghc
  • teto/ghc
  • Sookr1/ghc
  • trac-thomasjm/ghc
  • barci2/ghc-dev
  • trac-m4dc4p/ghc
  • dixonary/ghc
  • breakerzirconia/ghc
  • alexsio27444/ghc
  • glocq/ghc
  • sourabhxyz/ghc
  • ryantrinkle/ghc
  • Jade/ghc
  • scedfaliako/ghc
  • martijnbastiaan/ghc
  • trac-george.colpitts/ghc
  • ammarbinfaisal/ghc
  • mimi.vx/ghc
  • lortabac/ghc
  • trac-zyla/ghc
  • benbellick/ghc
  • aadaa-fgtaa/ghc
  • jvanbruegge/ghc
  • archbung/ghc
  • gilmi/ghc
  • mfonism/ghc
  • alex-mckenna/ghc
  • Ei30metry/ghc
  • DiegoDiverio/ghc
  • jorgecunhamendes/ghc
  • liesnikov/ghc
  • akrmn/ghc
  • trac-simplifierticks/ghc
  • jacco/ghc
  • rhendric/ghc
  • damhiya/ghc
  • ryndubei/ghc
  • DaveBarton/ghc
  • trac-Profpatsch/ghc
  • GZGavinZhao/ghc
  • ncfavier/ghc
  • jameshaydon/ghc
  • ajccosta/ghc
  • dschrempf/ghc
  • cydparser/ghc
  • LinuxUserGD/ghc
  • elodielander/ghc
  • facundominguez/ghc
  • psilospore/ghc
  • lachrimae/ghc
  • dylan-thinnes/ghc-type-errors-plugin
  • hamishmack/ghc
  • Leary/ghc
  • lzszt/ghc
  • lyokha/ghc
  • trac-glaubitz/ghc
  • Rewbert/ghc
  • andreabedini/ghc
  • Jasagredo/ghc
  • sol/ghc
  • OlegAlexander/ghc
  • trac-sthibaul/ghc
  • avdv/ghc
  • Wendaolee/ghc
  • ur4t/ghc
  • daylily/ghc
  • boltzmannrain/ghc
  • mmzk1526/ghc
  • trac-fizzixnerd/ghc
  • soulomoon/ghc
  • rwmjones/ghc
  • j14i/ghc
  • tracsis/ghc
  • gesh/ghc
  • flip101/ghc
  • eldritch-cookie/ghc
  • LemonjamesD/ghc
  • pgujjula/ghc
  • skeuchel/ghc
  • noteed/ghc
  • gulin.serge/ghc
  • Torrekie/ghc
  • jlwoodwa/ghc
  • ayanamists/ghc
  • husong998/ghc
  • trac-edmundnoble/ghc
  • josephf/ghc
  • contrun/ghc
  • baulig/ghc
  • edsko/ghc
  • mzschr/ghc-issue-24732
  • ulidtko/ghc
  • Arsen/ghc
  • trac-sjoerd_visscher/ghc
  • crumbtoo/ghc
  • L0neGamer/ghc
  • DrewFenwick/ghc
  • benz0li/ghc
  • MaciejWas/ghc
  • jordanrule/ghc
  • trac-qqwy/ghc
  • LiamGoodacre/ghc
  • isomorpheme/ghc
  • trac-danidiaz/ghc
  • Kariim/ghc
  • MTaimoorZaeem/ghc
  • hololeap/ghc
  • ticat-fp/ghc
  • meritamen/ghc
  • criskell/ghc
  • trac-kraai/ghc
  • aergus/ghc
  • jdral/ghc
  • SamB/ghc
  • Tristian/ghc
  • ywgrit/ghc
  • KatsuPatrick/ghc
  • OsePedro/ghc
  • mpscholten/ghc
  • fp/ghc
  • zaquest/ghc
  • fangyi-zhou/ghc
639 results
Show changes
Commits on Source (69)
Showing
with 1521 additions and 91 deletions
......@@ -6,7 +6,7 @@ variables:
# Sequential version number of all cached things.
# Bump to invalidate GitLab CI cache.
CACHE_REV: 8
CACHE_REV: 9
# Disable shallow clones; they break our linting rules
GIT_DEPTH: 0
......@@ -166,7 +166,9 @@ not-interruptible:
stage: not-interruptible
script: "true"
interruptible: false
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb10:$DOCKER_REV"
image: "debian:10"
variables:
GIT_STRATEGY: none
tags:
- lint
rules:
......@@ -560,7 +562,7 @@ hackage-doc-tarball:
- tar -xf ghc-x86_64-linux-fedora33-release.tar.xz -C ghc*/
script:
- cd ghc*/
- mv .gitlab/upload_ghc_libs.py .
- mv .gitlab/rel_eng/upload_ghc_libs.py .
- .gitlab/ci.sh setup
- .gitlab/ci.sh configure
- ./upload_ghc_libs.py prepare --bindist ghc*linux/
......@@ -579,6 +581,7 @@ source-tarball:
paths:
- ghc-*.tar.xz
script:
- sudo chown ghc:ghc -R .
- ./boot
- ./configure
- ./hadrian/build source-dist
......@@ -632,6 +635,7 @@ test-bootstrap:
parallel: *bootstrap_matrix
dependencies: null
script:
- sudo chown ghc:ghc -R .
- mkdir test-bootstrap
- tar -xf ghc-*[0-9]-src.tar.xz -C test-bootstrap
- tar -xf ghc-*-testsuite.tar.xz -C test-bootstrap
......@@ -940,3 +944,138 @@ pages:
artifacts:
paths:
- public
#############################################################
# Generation of GHCUp metadata
#############################################################
# TODO: MP: This way of determining the project version is sadly very slow.
# It seems overkill to have to setup a complete environment, and build hadrian to get
# it to generate a single file containing the version information.
project-version:
stage: packaging
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb10:$DOCKER_REV"
tags:
- x86_64-linux
variables:
BUILD_FLAVOUR: default
script:
# Calculate the project version
- sudo chown ghc:ghc -R .
- .gitlab/ci.sh setup
- .gitlab/ci.sh configure
- .gitlab/ci.sh run_hadrian VERSION
- echo "ProjectVersion=$(cat VERSION)" > version.sh
needs: []
dependencies: []
artifacts:
paths:
- version.sh
rules:
- if: '$NIGHTLY'
- if: '$RELEASE_JOB == "yes"'
.ghcup-metadata:
stage: deploy
image: "nixos/nix:2.12.0"
dependencies: null
tags:
- x86_64-linux
variables:
BUILD_FLAVOUR: default
GIT_SUBMODULE_STRATEGY: "none"
before_script:
- echo "experimental-features = nix-command flakes" >> /etc/nix/nix.conf
- nix-channel --update
- cat version.sh
# Calculate the project version
- . ./version.sh
# Download existing ghcup metadata
- nix shell --extra-experimental-features nix-command --extra-experimental-features flakes nixpkgs#wget -c wget "https://raw.githubusercontent.com/haskell/ghcup-metadata/develop/ghcup-0.0.7.yaml"
- .gitlab/generate_job_metadata
artifacts:
paths:
- metadata_test.yaml
- version.sh
ghcup-metadata-nightly:
extends: .ghcup-metadata
# Explicit needs for validate pipeline because we only need certain bindists
needs:
- job: nightly-x86_64-linux-fedora33-release
artifacts: false
- job: nightly-x86_64-linux-centos7-validate
artifacts: false
- job: nightly-x86_64-darwin-validate
artifacts: false
- job: nightly-aarch64-darwin-validate
artifacts: false
- job: nightly-x86_64-windows-validate
artifacts: false
- job: nightly-x86_64-linux-alpine3_12-int_native-validate+fully_static
artifacts: false
- job: nightly-x86_64-linux-deb9-validate
artifacts: false
- job: nightly-i386-linux-deb9-validate
artifacts: false
- job: nightly-x86_64-linux-deb10-validate
artifacts: false
- job: nightly-aarch64-linux-deb10-validate
artifacts: false
- job: nightly-x86_64-linux-deb11-validate
artifacts: false
- job: source-tarball
artifacts: false
- job: project-version
script:
- nix shell --extra-experimental-features nix-command -f .gitlab/rel_eng -c ghcup-metadata --metadata ghcup-0.0.7.yaml --pipeline-id="$CI_PIPELINE_ID" --version="$ProjectVersion" > "metadata_test.yaml"
rules:
- if: $NIGHTLY
ghcup-metadata-release:
# No explicit needs for release pipeline as we assume we need everything and everything will pass.
extends: .ghcup-metadata
script:
- nix shell --extra-experimental-features nix-command -f .gitlab/rel_eng -c ghcup-metadata --release-mode --metadata ghcup-0.0.7.yaml --pipeline-id="$CI_PIPELINE_ID" --version="$ProjectVersion" > "metadata_test.yaml"
rules:
- if: '$RELEASE_JOB == "yes"'
.ghcup-metadata-testing:
stage: deploy
variables:
UPSTREAM_PROJECT_PATH: "$CI_PROJECT_PATH"
UPSTREAM_PROJECT_ID: "$CI_PROJECT_ID"
UPSTREAM_PIPELINE_ID: "$CI_PIPELINE_ID"
RELEASE_JOB: "$RELEASE_JOB"
trigger:
project: "ghc/ghcup-ci"
branch: "upstream-testing"
strategy: "depend"
ghcup-metadata-testing-nightly:
needs:
- job: ghcup-metadata-nightly
artifacts: false
extends: .ghcup-metadata-testing
variables:
NIGHTLY: "$NIGHTLY"
UPSTREAM_JOB_NAME: "ghcup-metadata-nightly"
rules:
- if: '$NIGHTLY == "1"'
ghcup-metadata-testing-release:
needs:
- job: ghcup-metadata-release
artifacts: false
extends: .ghcup-metadata-testing
variables:
UPSTREAM_JOB_NAME: "ghcup-metadata-release"
rules:
- if: '$RELEASE_JOB == "yes"'
when: manual
......@@ -17,6 +17,7 @@ import Data.List (intercalate)
import Data.Set (Set)
import qualified Data.Set as S
import System.Environment
import Data.Maybe
{-
Note [Generating the CI pipeline]
......@@ -84,6 +85,16 @@ names of jobs to update these other places.
3. The ghc-head-from script downloads release artifacts based on a pipeline change.
4. Some subsequent CI jobs have explicit dependencies (for example docs-tarball, perf, perf-nofib)
Note [Generation Modes]
~~~~~~~~~~~~~~~~~~~~~~~
There are two different modes this script can operate in:
* `gitlab`: Generates a job.yaml which defines all the pipelines for the platforms
* `metadata`: Generates a file which maps a platform the the "default" validate and
nightly pipeline. This file is intended to be used when generating
ghcup metadata.
-}
-----------------------------------------------------------------------------
......@@ -98,7 +109,13 @@ data Opsys
| Windows deriving (Eq)
data LinuxDistro
= Debian11 | Debian10 | Debian9 | Fedora33 | Ubuntu2004 | Centos7 | Alpine deriving (Eq)
= Debian11 | Debian10 | Debian9
| Fedora33
| Ubuntu2004
| Centos7
| Alpine
| Rocky8
deriving (Eq)
data Arch = Amd64 | AArch64 | I386
......@@ -256,6 +273,7 @@ distroName Fedora33 = "fedora33"
distroName Ubuntu2004 = "ubuntu20_04"
distroName Centos7 = "centos7"
distroName Alpine = "alpine3_12"
distroName Rocky8 = "rocky8"
opsysName :: Opsys -> String
opsysName (Linux distro) = "linux-" ++ distroName distro
......@@ -337,6 +355,9 @@ instance (Ord k, Semigroup v) => Monoid (MonoidalMap k v) where
mminsertWith :: Ord k => (a -> a -> a) -> k -> a -> MonoidalMap k a -> MonoidalMap k a
mminsertWith f k v (MonoidalMap m) = MonoidalMap (Map.insertWith f k v m)
mmlookup :: Ord k => k -> MonoidalMap k a -> Maybe a
mmlookup k (MonoidalMap m) = Map.lookup k m
type Variables = MonoidalMap String [String]
(=:) :: String -> String -> Variables
......@@ -399,6 +420,9 @@ distroVariables Alpine = mconcat
distroVariables Centos7 = mconcat [
"HADRIAN_ARGS" =: "--docs=no-sphinx"
]
distroVariables Rocky8 = mconcat [
"HADRIAN_ARGS" =: "--docs=no-sphinx"
]
distroVariables Fedora33 = mconcat
-- LLC/OPT do not work for some reason in our fedora images
-- These tests fail with this error: T11649 T5681 T7571 T8131b
......@@ -567,6 +591,7 @@ data Job
, jobArtifacts :: Artifacts
, jobCache :: Cache
, jobRules :: OnOffRules
, jobPlatform :: (Arch, Opsys)
}
instance ToJSON Job where
......@@ -590,9 +615,11 @@ instance ToJSON Job where
]
-- | Build a job description from the system description and 'BuildConfig'
job :: Arch -> Opsys -> BuildConfig -> (String, Job)
job arch opsys buildConfig = (jobName, Job {..})
job :: Arch -> Opsys -> BuildConfig -> NamedJob Job
job arch opsys buildConfig = NamedJob { name = jobName, jobInfo = Job {..} }
where
jobPlatform = (arch, opsys)
jobRules = emptyRules
jobName = testEnv arch opsys buildConfig
......@@ -702,20 +729,20 @@ delVariable k j = j { jobVariables = MonoidalMap $ Map.delete k $ unMonoidalMap
-- Building the standard jobs
--
-- | Make a normal validate CI job
validate :: Arch -> Opsys -> BuildConfig -> (String, Job)
validate :: Arch -> Opsys -> BuildConfig -> NamedJob Job
validate = job
-- | Make a normal nightly CI job
nightly :: Arch -> Opsys -> BuildConfig -> ([Char], Job)
nightly :: Arch -> Opsys -> BuildConfig -> NamedJob Job
nightly arch opsys bc =
let (n, j) = job arch opsys bc
in ("nightly-" ++ n, addJobRule Nightly . keepArtifacts "8 weeks" . highCompression $ j)
let NamedJob n j = job arch opsys bc
in NamedJob { name = "nightly-" ++ n, jobInfo = addJobRule Nightly . keepArtifacts "8 weeks" . highCompression $ j}
-- | Make a normal release CI job
release :: Arch -> Opsys -> BuildConfig -> ([Char], Job)
release :: Arch -> Opsys -> BuildConfig -> NamedJob Job
release arch opsys bc =
let (n, j) = job arch opsys (bc { buildFlavour = Release })
in ("release-" ++ n, addJobRule ReleaseOnly . keepArtifacts "1 year" . ignorePerfFailures . highCompression $ j)
let NamedJob n j = job arch opsys (bc { buildFlavour = Release })
in NamedJob { name = "release-" ++ n, jobInfo = addJobRule ReleaseOnly . keepArtifacts "1 year" . ignorePerfFailures . highCompression $ j}
-- Specific job modification functions
......@@ -758,17 +785,33 @@ addValidateRule t = modifyValidateJobs (addJobRule t)
disableValidate :: JobGroup Job -> JobGroup Job
disableValidate = addValidateRule Disable
data NamedJob a = NamedJob { name :: String, jobInfo :: a } deriving Functor
renameJob :: (String -> String) -> NamedJob a -> NamedJob a
renameJob f (NamedJob n i) = NamedJob (f n) i
instance ToJSON a => ToJSON (NamedJob a) where
toJSON nj = object
[ "name" A..= name nj
, "jobInfo" A..= jobInfo nj ]
-- Jobs are grouped into either triples or pairs depending on whether the
-- job is just validate and nightly, or also release.
data JobGroup a = StandardTriple { v :: (String, a)
, n :: (String, a)
, r :: (String, a) }
| ValidateOnly { v :: (String, a)
, n :: (String, a) } deriving Functor
data JobGroup a = StandardTriple { v :: NamedJob a
, n :: NamedJob a
, r :: NamedJob a }
| ValidateOnly { v :: NamedJob a
, n :: NamedJob a } deriving Functor
instance ToJSON a => ToJSON (JobGroup a) where
toJSON jg = object
[ "n" A..= n jg
, "r" A..= r jg
]
rename :: (String -> String) -> JobGroup a -> JobGroup a
rename f (StandardTriple (nv, v) (nn, n) (nr, r)) = StandardTriple (f nv, v) (f nn, n) (f nr, r)
rename f (ValidateOnly (nv, v) (nn, n)) = ValidateOnly (f nv, v) (f nn, n)
rename f (StandardTriple nv nn nr) = StandardTriple (renameJob f nv) (renameJob f nn) (renameJob f nr)
rename f (ValidateOnly nv nn) = ValidateOnly (renameJob f nv) (renameJob f nn)
-- | Construct a 'JobGroup' which consists of a validate, nightly and release build with
-- a specific config.
......@@ -789,13 +832,21 @@ validateBuilds :: Arch -> Opsys -> BuildConfig -> JobGroup Job
validateBuilds a op bc = ValidateOnly (validate a op bc) (nightly a op bc)
flattenJobGroup :: JobGroup a -> [(String, a)]
flattenJobGroup (StandardTriple a b c) = [a,b,c]
flattenJobGroup (ValidateOnly a b) = [a, b]
flattenJobGroup (StandardTriple a b c) = map flattenNamedJob [a,b,c]
flattenJobGroup (ValidateOnly a b) = map flattenNamedJob [a, b]
flattenNamedJob :: NamedJob a -> (String, a)
flattenNamedJob (NamedJob n i) = (n, i)
-- | Specification for all the jobs we want to build.
jobs :: Map String Job
jobs = Map.fromList $ concatMap (filter is_enabled_job . flattenJobGroup)
jobs = Map.fromList $ concatMap (filter is_enabled_job . flattenJobGroup) job_groups
where
is_enabled_job (_, Job {jobRules = OnOffRules {..}}) = not $ Disable `S.member` rule_set
job_groups :: [JobGroup Job]
job_groups =
[ disableValidate (standardBuilds Amd64 (Linux Debian10))
, standardBuildsWithConfig Amd64 (Linux Debian10) dwarf
, validateBuilds Amd64 (Linux Debian10) nativeInt
......@@ -810,6 +861,7 @@ jobs = Map.fromList $ concatMap (filter is_enabled_job . flattenJobGroup)
-- not being at EOL until April 2023 and they still need tinfo5.
, disableValidate (standardBuildsWithConfig Amd64 (Linux Debian9) (splitSectionsBroken vanilla))
, disableValidate (standardBuilds Amd64 (Linux Ubuntu2004))
, disableValidate (standardBuilds Amd64 (Linux Rocky8))
, disableValidate (standardBuildsWithConfig Amd64 (Linux Centos7) (splitSectionsBroken vanilla))
-- Fedora33 job is always built with perf so there's one job in the normal
-- validate pipeline which is built with perf.
......@@ -823,7 +875,7 @@ jobs = Map.fromList $ concatMap (filter is_enabled_job . flattenJobGroup)
, allowFailureGroup (addValidateRule FreeBSDLabel (standardBuilds Amd64 FreeBSD13))
, standardBuilds AArch64 Darwin
, standardBuildsWithConfig AArch64 (Linux Debian10) (splitSectionsBroken vanilla)
, disableValidate (standardBuildsWithConfig AArch64 (Linux Debian10) llvm)
, disableValidate (validateBuilds AArch64 (Linux Debian10) llvm)
, standardBuildsWithConfig I386 (Linux Debian9) (splitSectionsBroken vanilla)
, standardBuildsWithConfig Amd64 (Linux Alpine) (splitSectionsBroken static)
, disableValidate (allowFailureGroup (standardBuildsWithConfig Amd64 (Linux Alpine) staticNativeInt))
......@@ -838,10 +890,7 @@ jobs = Map.fromList $ concatMap (filter is_enabled_job . flattenJobGroup)
]
where
is_enabled_job (_, Job {jobRules = OnOffRules {..}}) = not $ Disable `S.member` rule_set
hackage_doc_job = rename (<> "-hackage") . modifyJobs (addVariable "HADRIAN_ARGS" "--haddock-base-url")
tsan_jobs =
modifyJobs
( addVariable "TSAN_OPTIONS" "suppressions=$CI_PROJECT_DIR/rts/.tsan-suppressions"
......@@ -865,10 +914,59 @@ jobs = Map.fromList $ concatMap (filter is_enabled_job . flattenJobGroup)
, buildFlavour = Release -- TODO: This needs to be validate but wasm backend doesn't pass yet
}
mkPlatform :: Arch -> Opsys -> String
mkPlatform arch opsys = archName arch <> "-" <> opsysName opsys
-- | This map tells us for a specific arch/opsys combo what the job name for
-- nightly/release pipelines is. This is used by the ghcup metadata generation so that
-- things like bindist names etc are kept in-sync.
--
-- For cases where there are just
--
-- Otherwise:
-- * Prefer jobs which have a corresponding release pipeline
-- * Explicitly require tie-breaking for other cases.
platform_mapping :: Map String (JobGroup BindistInfo)
platform_mapping = Map.map go $
Map.fromListWith combine [ (uncurry mkPlatform (jobPlatform (jobInfo $ v j)), j) | j <- job_groups ]
where
whitelist = [ "x86_64-linux-alpine3_12-int_native-validate+fully_static"
, "x86_64-linux-deb10-validate"
, "x86_64-linux-fedora33-release"
, "x86_64-windows-validate"
]
combine a b
| name (v a) `elem` whitelist = a -- Explicitly selected
| name (v b) `elem` whitelist = b
| hasReleaseBuild a, not (hasReleaseBuild b) = a -- Has release build, but other doesn't
| hasReleaseBuild b, not (hasReleaseBuild a) = b
| otherwise = error (show (name (v a)) ++ show (name (v b)))
go = fmap (BindistInfo . unwords . fromJust . mmlookup "BIN_DIST_NAME" . jobVariables)
hasReleaseBuild (StandardTriple{}) = True
hasReleaseBuild (ValidateOnly{}) = False
data BindistInfo = BindistInfo { bindistName :: String }
instance ToJSON BindistInfo where
toJSON (BindistInfo n) = object [ "bindistName" A..= n ]
main :: IO ()
main = do
as <- getArgs
ass <- getArgs
case ass of
-- See Note [Generation Modes]
("gitlab":as) -> write_result as jobs
("metadata":as) -> write_result as platform_mapping
_ -> error "gen_ci.hs <gitlab|metadata> [file.json]"
write_result as obj =
(case as of
[] -> B.putStrLn
(fp:_) -> B.writeFile fp)
(A.encode jobs)
(A.encode obj)
#! /usr/bin/env nix-shell
#!nix-shell -i bash -p cabal-install "haskell.packages.ghc924.ghcWithPackages (pkgs: with pkgs; [aeson])" git jq
cd "$(dirname "${BASH_SOURCE[0]}")"
cabal run gen_ci -- metadata jobs-metadata.json
......@@ -7,7 +7,7 @@ set -euo pipefail
cd "$(dirname "${BASH_SOURCE[0]}")"
tmp=$(mktemp)
cabal run gen_ci -- $tmp
cabal run gen_ci -- gitlab $tmp
rm -f jobs.yaml
echo "### THIS IS A GENERATED FILE, DO NOT MODIFY DIRECTLY" > jobs.yaml
cat $tmp | jq | tee -a jobs.yaml
{-# OPTIONS_GHC -Wno-missing-fields #-}
{-# OPTIONS_GHC -Wall -Wno-missing-fields #-}
import GHC hiding (parseModule)
import GHC.Data.StringBuffer
......@@ -9,6 +9,7 @@ import GHC.Platform
import GHC.Plugins
import GHC.Settings
import GHC.Settings.Config
import System.Mem.Weak
fakeSettings :: Settings
fakeSettings =
......@@ -41,5 +42,6 @@ parse dflags src = do
main :: IO ()
main = do
_ <- mkWeak runGhc runGhc Nothing
m <- parse fakeDynFlags "main = putStrLn \"hello world\""
putStrLn $ showSDoc fakeDynFlags $ ppr m
......@@ -1812,6 +1812,66 @@
"XZ_OPT": "-9"
}
},
"nightly-x86_64-linux-rocky8-validate": {
"after_script": [
".gitlab/ci.sh save_cache",
".gitlab/ci.sh clean",
"cat ci_timings"
],
"allow_failure": false,
"artifacts": {
"expire_in": "8 weeks",
"paths": [
"ghc-x86_64-linux-rocky8-validate.tar.xz",
"junit.xml"
],
"reports": {
"junit": "junit.xml"
},
"when": "always"
},
"cache": {
"key": "x86_64-linux-rocky8-$CACHE_REV",
"paths": [
"cabal-cache",
"toolchain"
]
},
"dependencies": [],
"image": "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-rocky8:$DOCKER_REV",
"needs": [
{
"artifacts": false,
"job": "hadrian-ghc-in-ghci"
}
],
"rules": [
{
"if": "($CI_MERGE_REQUEST_LABELS !~ /.*fast-ci.*/) && ($RELEASE_JOB != \"yes\") && ($NIGHTLY) && (\"true\" == \"true\") && (\"true\" == \"true\") && (\"true\" == \"true\")",
"when": "on_success"
}
],
"script": [
"sudo chown ghc:ghc -R .",
".gitlab/ci.sh setup",
".gitlab/ci.sh configure",
".gitlab/ci.sh build_hadrian",
".gitlab/ci.sh test_hadrian"
],
"stage": "full-build",
"tags": [
"x86_64-linux"
],
"variables": {
"BIGNUM_BACKEND": "gmp",
"BIN_DIST_NAME": "ghc-x86_64-linux-rocky8-validate",
"BUILD_FLAVOUR": "validate",
"CONFIGURE_ARGS": "",
"HADRIAN_ARGS": "--docs=no-sphinx",
"TEST_ENV": "x86_64-linux-rocky8-validate",
"XZ_OPT": "-9"
}
},
"nightly-x86_64-linux-ubuntu20_04-validate": {
"after_script": [
".gitlab/ci.sh save_cache",
......@@ -2053,66 +2113,6 @@
"XZ_OPT": "-9"
}
},
"release-aarch64-linux-deb10-release+llvm": {
"after_script": [
".gitlab/ci.sh save_cache",
".gitlab/ci.sh clean",
"cat ci_timings"
],
"allow_failure": false,
"artifacts": {
"expire_in": "1 year",
"paths": [
"ghc-aarch64-linux-deb10-release+llvm.tar.xz",
"junit.xml"
],
"reports": {
"junit": "junit.xml"
},
"when": "always"
},
"cache": {
"key": "aarch64-linux-deb10-$CACHE_REV",
"paths": [
"cabal-cache",
"toolchain"
]
},
"dependencies": [],
"image": "registry.gitlab.haskell.org/ghc/ci-images/aarch64-linux-deb10:$DOCKER_REV",
"needs": [
{
"artifacts": false,
"job": "hadrian-ghc-in-ghci"
}
],
"rules": [
{
"if": "($CI_MERGE_REQUEST_LABELS !~ /.*fast-ci.*/) && ($RELEASE_JOB == \"yes\") && ($NIGHTLY == null) && (\"true\" == \"true\") && (\"true\" == \"true\") && (\"true\" == \"true\")",
"when": "on_success"
}
],
"script": [
"sudo chown ghc:ghc -R .",
".gitlab/ci.sh setup",
".gitlab/ci.sh configure",
".gitlab/ci.sh build_hadrian",
".gitlab/ci.sh test_hadrian"
],
"stage": "full-build",
"tags": [
"aarch64-linux"
],
"variables": {
"BIGNUM_BACKEND": "gmp",
"BIN_DIST_NAME": "ghc-aarch64-linux-deb10-release+llvm",
"BUILD_FLAVOUR": "release+llvm",
"CONFIGURE_ARGS": "",
"IGNORE_PERF_FAILURES": "all",
"TEST_ENV": "aarch64-linux-deb10-release+llvm",
"XZ_OPT": "-9"
}
},
"release-aarch64-linux-deb10-release+no_split_sections": {
"after_script": [
".gitlab/ci.sh save_cache",
......@@ -2977,6 +2977,67 @@
"XZ_OPT": "-9"
}
},
"release-x86_64-linux-rocky8-release": {
"after_script": [
".gitlab/ci.sh save_cache",
".gitlab/ci.sh clean",
"cat ci_timings"
],
"allow_failure": false,
"artifacts": {
"expire_in": "1 year",
"paths": [
"ghc-x86_64-linux-rocky8-release.tar.xz",
"junit.xml"
],
"reports": {
"junit": "junit.xml"
},
"when": "always"
},
"cache": {
"key": "x86_64-linux-rocky8-$CACHE_REV",
"paths": [
"cabal-cache",
"toolchain"
]
},
"dependencies": [],
"image": "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-rocky8:$DOCKER_REV",
"needs": [
{
"artifacts": false,
"job": "hadrian-ghc-in-ghci"
}
],
"rules": [
{
"if": "($CI_MERGE_REQUEST_LABELS !~ /.*fast-ci.*/) && ($RELEASE_JOB == \"yes\") && ($NIGHTLY == null) && (\"true\" == \"true\") && (\"true\" == \"true\") && (\"true\" == \"true\")",
"when": "on_success"
}
],
"script": [
"sudo chown ghc:ghc -R .",
".gitlab/ci.sh setup",
".gitlab/ci.sh configure",
".gitlab/ci.sh build_hadrian",
".gitlab/ci.sh test_hadrian"
],
"stage": "full-build",
"tags": [
"x86_64-linux"
],
"variables": {
"BIGNUM_BACKEND": "gmp",
"BIN_DIST_NAME": "ghc-x86_64-linux-rocky8-release",
"BUILD_FLAVOUR": "release",
"CONFIGURE_ARGS": "",
"HADRIAN_ARGS": "--docs=no-sphinx",
"IGNORE_PERF_FAILURES": "all",
"TEST_ENV": "x86_64-linux-rocky8-release",
"XZ_OPT": "-9"
}
},
"release-x86_64-linux-ubuntu20_04-release": {
"after_script": [
".gitlab/ci.sh save_cache",
......
let sources = import ./nix/sources.nix; in
{ nixpkgs ? (import sources.nixpkgs {}) }:
with nixpkgs;
let
fetch-gitlab-artifacts = nixpkgs.callPackage ./fetch-gitlab-artifacts {};
mk-ghcup-metadata = nixpkgs.callPackage ./mk-ghcup-metadata { fetch-gitlab=fetch-gitlab-artifacts;};
bindistPrepEnv = pkgs.buildFHSUserEnv {
name = "enter-fhs";
targetPkgs = pkgs: with pkgs; [
# all
gcc binutils gnumake gmp ncurses5 git elfutils
# source-release.sh
xorg.lndir curl python3 which automake autoconf m4 file
haskell.compiler.ghc8107 haskellPackages.happy haskellPackages.alex
];
runScript = "$SHELL -x";
};
scripts = stdenv.mkDerivation {
name = "rel-eng-scripts";
nativeBuildInputs = [ makeWrapper ];
preferLocalBuild = true;
buildCommand = ''
mkdir -p $out/bin
makeWrapper ${./upload.sh} $out/bin/upload.sh \
--prefix PATH : ${moreutils}/bin \
--prefix PATH : ${lftp}/bin \
--prefix PATH : ${lzip}/bin \
--prefix PATH : ${zip}/bin \
--prefix PATH : ${s3cmd}/bin \
--prefix PATH : ${gnupg}/bin \
--prefix PATH : ${pinentry}/bin \
--prefix PATH : ${parallel}/bin \
--prefix PATH : ${python3}/bin \
--set ENTER_FHS_ENV ${bindistPrepEnv}/bin/enter-fhs \
--set BASH ${bash}/bin/bash
makeWrapper ${./upload_ghc_libs.py} $out/bin/upload-ghc-libs
'';
};
in
symlinkJoin {
name = "ghc-rel-eng";
preferLocalBuild = true;
paths = [
scripts
fetch-gitlab-artifacts
mk-ghcup-metadata
];
}
result
fetch-gitlab
out
# fetch-gitlab-artifacts
This script is used to fetch and rename GHC binary distributions from GitLab
Pipelines for upload to `downloads.haskell.org`.
## Workflow
1. [Configure]() a `python-gitlab` profile for <https://gitlab.haskell.org/>:
```
$ cat > $HOME/.python-gitlab.cfg <<EOF
[haskell]
url = https://gitlab.haskell.org/
private_token = $PRIVATE_GITLAB_TOKEN
ssl_verify = true
api_version = 4
EOF
```
1. Push a release tag to ghc/ghc>
1. Wait until the CI pipeline completes
1. Run `fetch-gitlab -p $PIPELINE_ID -r $RELEASE_NAME` where `$PIPELINE_ID` is
the ID of the GitLab release pipeline and `$RELEASE_NAME` is the name of the
GHC release (e.g. `8.8.1` or `8.8.1-alpha1`)
1. The binary distributions will be placed in the `out` directory.
\ No newline at end of file
{ nix-gitignore, python3Packages, unzip }:
let
fetch-gitlab = { buildPythonPackage, python-gitlab, unzip }:
buildPythonPackage {
pname = "fetch-gitlab";
version = "0.0.1";
src = nix-gitignore.gitignoreSource [] ./.;
propagatedBuildInputs = [ python3Packages.python-gitlab unzip ];
preferLocalBuild = true;
};
in
python3Packages.callPackage fetch-gitlab { inherit unzip; }
import logging
from pathlib import Path
import subprocess
import gitlab
import json
logging.basicConfig(level=logging.INFO)
def strip_prefix(s, prefix):
if s.startswith(prefix):
return s[len(prefix):]
else:
return None
def job_triple(job_name):
bindists = {
'release-x86_64-windows-release': 'x86_64-unknown-mingw32',
'release-x86_64-windows-int_native-release': 'x86_64-unknown-mingw32-int_native',
'release-x86_64-ubuntu20_04-release': 'x86_64-ubuntu20_04-linux',
'release-x86_64-linux-fedora33-release+debug_info': 'x86_64-fedora33-linux-dwarf',
'release-x86_64-linux-fedora33-release': 'x86_64-fedora33-linux',
'release-x86_64-linux-fedora27-release': 'x86_64-fedora27-linux',
'release-x86_64-linux-deb11-release': 'x86_64-deb11-linux',
'release-x86_64-linux-deb10-release+debug_info': 'x86_64-deb10-linux-dwarf',
'release-x86_64-linux-deb10-release': 'x86_64-deb10-linux',
'release-x86_64-linux-deb9-release': 'x86_64-deb9-linux',
'release-x86_64-linux-centos7-release': 'x86_64-centos7-linux',
'release-x86_64-linux-alpine3_12-release+fully_static': 'x86_64-alpine3_12-linux-static',
'release-x86_64-linux-alpine3_12-int_native-release+fully_static': 'x86_64-alpine3_12-linux-static-int_native',
'release-x86_64-darwin-release': 'x86_64-apple-darwin',
'release-i386-linux-deb9-release': 'i386-deb9-linux',
'release-armv7-linux-deb10-release': 'armv7-deb10-linux',
'release-aarch64-linux-deb10-release': 'aarch64-deb10-linux',
'release-aarch64-darwin-release': 'aarch64-apple-darwin',
'source-tarball': 'src',
'package-hadrian-bootstrap-sources': 'hadrian-bootstrap-sources',
'doc-tarball': 'docs',
'hackage-doc-tarball': 'hackage_docs',
}
# Some bindists use the +no_split_sections transformer due to upstream
# toolchain bugs.
bindists.update({
f'{k}+no_split_sections': v
for k,v in bindists.items()
})
if job_name in bindists:
return bindists[job_name]
else:
#return strip_prefix(job.name, 'validate-')
return None
def fetch_artifacts(release: str, pipeline_id: int,
dest_dir: Path, gl: gitlab.Gitlab):
dest_dir.mkdir(exist_ok=True)
# Write the pipeline id into output directory
with open(f"{dest_dir}/metadata.json", 'w') as out: json.dump({ "pipeline_id": pipeline_id }, out)
proj = gl.projects.get('ghc/ghc')
pipeline = proj.pipelines.get(pipeline_id)
tmpdir = Path("fetch-gitlab")
tmpdir.mkdir(exist_ok=True)
for pipeline_job in pipeline.jobs.list(all=True):
if len(pipeline_job.artifacts) == 0:
logging.info(f'job {pipeline_job.name} ({pipeline_job.id}) has no artifacts')
continue
job = proj.jobs.get(pipeline_job.id)
triple = job_triple(job.name)
if triple is None:
logging.info(f'ignoring {job.name}')
continue
#artifactZips = [ artifact
# for artifact in job.artifacts
# if artifact['filename'] == 'artifacts.zip' ]
try:
destdir = tmpdir / job.name
zip_name = Path(f"{tmpdir}/{job.name}.zip")
if not zip_name.exists() or zip_name.stat().st_size == 0:
logging.info(f'downloading archive {zip_name} for job {job.name} (job {job.id})...')
with open(zip_name, 'wb') as f:
job.artifacts(streamed=True, action=f.write)
if zip_name.stat().st_size == 0:
logging.info(f'artifact archive for job {job.name} (job {job.id}) is empty')
continue
subprocess.run(['unzip', '-bo', zip_name, '-d', destdir])
bindist_files = list(destdir.glob('ghc*.tar.xz'))
if job.name == 'source-tarball':
for f in bindist_files:
dest = dest_dir / f.name
logging.info(f'extracted {job.name} to {dest}')
f.replace(dest)
elif job.name == 'package-hadrian-bootstrap-sources':
all_bootstrap_sources = destdir / 'hadrian-bootstrap-sources-all.tar.gz'
dest = dest_dir / 'hadrian-bootstrap-sources'
dest.mkdir()
subprocess.run(['tar', '-xf', all_bootstrap_sources, '-C', dest])
logging.info(f'extracted {job.name}/{all_bootstrap_sources} to {dest}')
elif job.name == 'doc-tarball':
dest = dest_dir / 'docs'
dest.mkdir()
doc_files = list(destdir.glob('*.tar.xz'))
for f in doc_files:
subprocess.run(['tar', '-xf', f, '-C', dest])
logging.info(f'extracted docs {f} to {dest}')
index_path = destdir / 'index.html'
index_path.replace(dest / 'index.html')
elif job.name == 'hackage-doc-tarball':
dest = dest_dir / 'hackage_docs'
logging.info(f'moved hackage_docs to {dest}')
(destdir / 'hackage_docs').replace(dest)
else:
dest = dest_dir / f'ghc-{release}-{triple}.tar.xz'
if dest.exists():
logging.info(f'bindist {dest} already exists')
continue
if len(bindist_files) == 0:
logging.warn(f'Bindist does not exist')
continue
bindist = bindist_files[0]
logging.info(f'extracted {job.name} to {dest}')
bindist.replace(dest)
except Exception as e:
logging.error(f'Error fetching job {job.name}: {e}')
pass
def main():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--pipeline', '-p', required=True, type=int, help="pipeline id")
parser.add_argument('--release', '-r', required=True, type=str, help="release name")
parser.add_argument('--output', '-o', type=Path, default=Path.cwd(), help="output directory")
parser.add_argument('--profile', '-P', default='haskell',
help='python-gitlab.cfg profile name')
args = parser.parse_args()
gl = gitlab.Gitlab.from_config(args.profile)
fetch_artifacts(args.release, args.pipeline,
dest_dir=args.output, gl=gl)
#!/usr/bin/env python
from distutils.core import setup
setup(name='fetch-gitlab',
author='Ben Gamari',
author_email='ben@smart-cactus.org',
py_modules=['fetch_gitlab'],
entry_points={
'console_scripts': [
'fetch-gitlab=fetch_gitlab:main',
]
}
)
result
fetch-gitlab
out
# mk-ghcup-metadata
This script is used to automatically generate metadata suitable for consumption by
GHCUp.
# Usage
```
nix run -f .gitlab/rel_eng/ -c ghcup-metadata
```
```
options:
-h, --help show this help message and exit
--metadata METADATA Path to GHCUp metadata
--pipeline-id PIPELINE_ID
Which pipeline to generate metadata for
--release-mode Generate metadata which points to downloads folder
--fragment Output the generated fragment rather than whole modified file
--version VERSION Version of the GHC compiler
```
The script also requires the `.gitlab/jobs-metadata.yaml` file which can be generated
by running `.gitlab/generate_jobs_metadata` script if you want to run it locally.
## CI Pipelines
The metadata is generated by the nightly and release pipelines.
* Nightly pipelines generate metadata where the bindist URLs point immediatley to
nightly artifacts.
* Release jobs can pass the `--release-mode` flag which downloads the artifacts from
the pipeline but the final download URLs for users point into the downloads folder.
The mapping from platform to bindist is not clever, it is just what the GHCUp developers
tell us to use.
## Testing Pipelines
The metadata is tested by the `ghcup-ci` repo which is triggered by the
`ghcup-metadata-testing-nightly` job.
This job sets the following variables which are then used by the downstream job
to collect the metadata from the correct place:
* `UPSTREAM_PIPELINE_ID` - The pipeline ID which the generated metadata lives in
* `UPSTREAM_PROJECT_ID` - The project ID for the upstream project (almost always `1` (for ghc/ghc))
* `UPSTREAM_JOB_NAME` - The job which the metadata belongs to (ie `ghcup-metadata-nightly`)
* `UPSTREAM_PROJECT_PATH` - The path of the upstream project (almost always ghc/ghc)
Nightly pipelines are tested automaticaly but release pipelines are manually triggered
as the testing requires the bindists to be uploaded into the final release folder.
{ nix-gitignore, python3Packages, fetch-gitlab }:
let
ghcup-metadata = { buildPythonPackage, python-gitlab, pyyaml }:
buildPythonPackage {
pname = "ghcup-metadata";
version = "0.0.1";
src = nix-gitignore.gitignoreSource [] ./.;
propagatedBuildInputs = [fetch-gitlab python-gitlab pyyaml ];
preferLocalBuild = true;
};
in
python3Packages.callPackage ghcup-metadata { }
#! /usr/bin/env nix-shell
#! nix-shell -i python3 -p curl "python3.withPackages (ps:[ps.pyyaml ps.python-gitlab ])"
"""
A tool for generating metadata suitable for GHCUp
There are two ways to prepare metadata:
* From a nightly pipeline.
* From a release pipeline.
In any case the script takes the same arguments:
* --metadata: The path to existing GHCup metadata to which we want to add the new entry.
* --version: GHC version of the pipeline
* --pipeline-id: The pipeline to generate metadata for
* --release-mode: Download from a release pipeline but generate URLs to point to downloads folder.
* --fragment: Only print out the updated fragment rather than the modified file
The script will then download the relevant bindists to compute the hashes. The
generated metadata is printed to stdout.
The metadata can then be used by passing the `--url-source` flag to ghcup.
"""
from subprocess import run, check_call
from getpass import getpass
import shutil
from pathlib import Path
from typing import NamedTuple, Callable, List, Dict, Optional
import tempfile
import re
import pickle
import os
import yaml
import gitlab
from urllib.request import urlopen
import hashlib
import sys
import json
import urllib.parse
import fetch_gitlab
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
gl = gitlab.Gitlab('https://gitlab.haskell.org', per_page=100)
# TODO: Take this file as an argument
metadata_file = ".gitlab/jobs-metadata.json"
release_base = "https://downloads.haskell.org/~ghc/{version}/ghc-{version}-{bindistName}"
eprint(f"Reading job metadata from {metadata_file}.")
with open(metadata_file, 'r') as f:
job_mapping = json.load(f)
eprint(f"Supported platforms: {job_mapping.keys()}")
# Artifact precisely specifies a job what the bindist to download is called.
class Artifact(NamedTuple):
job_name: str
name: str
subdir: str
# Platform spec provides a specification which is agnostic to Job
# PlatformSpecs are converted into Artifacts by looking in the jobs-metadata.json file.
class PlatformSpec(NamedTuple):
name: str
subdir: str
source_artifact = Artifact('source-tarball', 'ghc-{version}-src.tar.xz', 'ghc-{version}' )
def debian(arch, n):
return linux_platform(arch, "{arch}-linux-deb{n}".format(arch=arch, n=n))
def darwin(arch):
return PlatformSpec ( '{arch}-darwin'.format(arch=arch)
, 'ghc-{version}-{arch}-apple-darwin'.format(arch=arch, version="{version}") )
windowsArtifact = PlatformSpec ( 'x86_64-windows'
, 'ghc-{version}-x86_64-unknown-mingw32' )
def centos(n):
return linux_platform("x86_64", "x86_64-linux-centos{n}".format(n=n))
def fedora(n):
return linux_platform("x86_64", "x86_64-linux-fedora{n}".format(n=n))
def alpine(n):
return linux_platform("x86_64", "x86_64-linux-alpine{n}".format(n=n))
def linux_platform(arch, opsys):
return PlatformSpec( opsys, 'ghc-{version}-{arch}-unknown-linux'.format(version="{version}", arch=arch) )
base_url = 'https://gitlab.haskell.org/ghc/ghc/-/jobs/{job_id}/artifacts/raw/{artifact_name}'
hash_cache = {}
# Download a URL and return its hash
def download_and_hash(url):
if url in hash_cache: return hash_cache[url]
eprint ("Opening {}".format(url))
response = urlopen(url)
sz = response.headers['content-length']
hasher = hashlib.sha256()
CHUNK = 2**22
for n,text in enumerate(iter(lambda: response.read(CHUNK), '')):
if not text: break
eprint("{:.2f}% {} / {} of {}".format (((n + 1) * CHUNK) / int(sz) * 100, (n + 1) * CHUNK, sz, url))
hasher.update(text)
digest = hasher.hexdigest()
hash_cache[url] = digest
return digest
# Make the metadata for one platform.
def mk_one_metadata(release_mode, version, job_map, artifact):
job_id = job_map[artifact.job_name].id
url = base_url.format(job_id=job_id, artifact_name=urllib.parse.quote_plus(artifact.name.format(version=version)))
# In --release-mode, the URL in the metadata needs to point into the downloads folder
# rather then the pipeline.
if release_mode:
final_url = release_base.format( version=version
, bindistName=urllib.parse.quote_plus(f"{fetch_gitlab.job_triple(artifact.job_name)}.tar.xz"))
else:
final_url = url
eprint(f"Making metadata for: {artifact}")
eprint(f"Bindist URL: {url}")
eprint(f"Download URL: {final_url}")
# Download and hash from the release pipeline, this must not change anyway during upload.
h = download_and_hash(url)
res = { "dlUri": final_url, "dlSubdir": artifact.subdir.format(version=version), "dlHash" : h }
eprint(res)
return res
# Turns a platform into an Artifact respecting pipeline_type
# Looks up the right job to use from the .gitlab/jobs-metadata.json file
def mk_from_platform(pipeline_type, platform):
info = job_mapping[platform.name][pipeline_type]
eprint(f"From {platform.name} / {pipeline_type} selecting {info['name']}")
return Artifact(info['name'] , f"{info['jobInfo']['bindistName']}.tar.xz", platform.subdir)
# Generate the new metadata for a specific GHC mode etc
def mk_new_yaml(release_mode, version, pipeline_type, job_map):
def mk(platform):
eprint("\n=== " + platform.name + " " + ('=' * (75 - len(platform.name))))
return mk_one_metadata(release_mode, version, job_map, mk_from_platform(pipeline_type, platform))
# Here are all the bindists we can distribute
centos7 = mk(centos(7))
fedora33 = mk(fedora(33))
darwin_x86 = mk(darwin("x86_64"))
darwin_arm64 = mk(darwin("aarch64"))
windows = mk(windowsArtifact)
alpine3_12 = mk(alpine("3_12"))
deb9 = mk(debian("x86_64", 9))
deb10 = mk(debian("x86_64", 10))
deb11 = mk(debian("x86_64", 11))
deb10_arm64 = mk(debian("aarch64", 10))
deb9_i386 = mk(debian("i386", 9))
source = mk_one_metadata(release_mode, version, job_map, source_artifact)
# The actual metadata, this is not a precise science, but just what the ghcup
# developers want.
a64 = { "Linux_Debian": { "< 10": deb9
, "(>= 10 && < 11)": deb10
, ">= 11": deb11
, "unknown_versioning": deb11 }
, "Linux_Ubuntu" : { "unknown_versioning": deb10
, "( >= 16 && < 19 )": deb9
}
, "Linux_Mint" : { "< 20": deb9
, ">= 20": deb10 }
, "Linux_CentOS" : { "( >= 7 && < 8 )" : centos7
, "unknown_versioning" : centos7 }
, "Linux_Fedora" : { ">= 33": fedora33
, "unknown_versioning": centos7 }
, "Linux_RedHat" : { "unknown_versioning": centos7 }
#MP: Replace here with Rocky8 when that job is in the pipeline
, "Linux_UnknownLinux" : { "unknown_versioning": fedora33 }
, "Darwin" : { "unknown_versioning" : darwin_x86 }
, "Windows" : { "unknown_versioning" : windows }
, "Linux_Alpine" : { "unknown_versioning": alpine3_12 }
}
a32 = { "Linux_Debian": { "<10": deb9_i386, "unknown_versioning": deb9_i386 }
, "Linux_Ubuntu": { "unknown_versioning": deb9_i386 }
, "Linux_Mint" : { "unknown_versioning": deb9_i386 }
, "Linux_UnknownLinux" : { "unknown_versioning": deb9_i386 }
}
arm64 = { "Linux_UnknownLinux": { "unknown_versioning": deb10_arm64 }
, "Darwin": { "unknown_versioning": darwin_arm64 }
}
if release_mode:
version_parts = version.split('.')
if len(version_parts) == 3:
final_version = version
elif len(version_parts) == 4:
final_version = '.'.join(version_parts[:2] + [str(int(version_parts[2]) + 1)])
change_log = f"https://downloads.haskell.org/~ghc/{version}/docs/users_guide/{final_version}-notes.html"
else:
change_log = "https://gitlab.haskell.org"
return { "viTags": ["Latest", "TODO_base_version"]
# Check that this link exists
, "viChangeLog": change_log
, "viSourceDL": source
, "viArch": { "A_64": a64
, "A_32": a32
, "A_ARM64": arm64
}
}
def main() -> None:
import argparse
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--metadata', required=True, type=Path, help='Path to GHCUp metadata')
parser.add_argument('--pipeline-id', required=True, type=int, help='Which pipeline to generate metadata for')
parser.add_argument('--release-mode', action='store_true', help='Generate metadata which points to downloads folder')
parser.add_argument('--fragment', action='store_true', help='Output the generated fragment rather than whole modified file')
# TODO: We could work out the --version from the project-version CI job.
parser.add_argument('--version', required=True, type=str, help='Version of the GHC compiler')
args = parser.parse_args()
project = gl.projects.get(1, lazy=True)
pipeline = project.pipelines.get(args.pipeline_id)
jobs = pipeline.jobs.list()
job_map = { job.name: job for job in jobs }
# Bit of a hacky way to determine what pipeline we are dealing with but
# the aarch64-darwin job should stay stable for a long time.
if 'nightly-aarch64-darwin-validate' in job_map:
pipeline_type = 'n'
if args.release_mode:
raise Exception("Incompatible arguments: nightly pipeline but using --release-mode")
elif 'release-aarch64-darwin-release' in job_map:
pipeline_type = 'r'
else:
raise Exception("Not a nightly nor release pipeline")
eprint(f"Pipeline Type: {pipeline_type}")
new_yaml = mk_new_yaml(args.release_mode, args.version, pipeline_type, job_map)
if args.fragment:
print(yaml.dump({ args.version : new_yaml }))
else:
with open(args.metadata, 'r') as file:
ghcup_metadata = yaml.safe_load(file)
ghcup_metadata['ghcupDownloads']['GHC'][args.version] = new_yaml
print(yaml.dump(ghcup_metadata))
if __name__ == '__main__':
main()
#!/usr/bin/env python
from distutils.core import setup
setup(name='ghcup-metadata',
author='Matthew Pickering',
author_email='matthew@well-typed.com',
py_modules=['mk_ghcup_metadata'],
entry_points={
'console_scripts': [
'ghcup-metadata=mk_ghcup_metadata:main',
]
}
)
{
"binutils-gdb": {
"branch": "master",
"repo": "https://sourceware.org/git/binutils-gdb.git",
"rev": "49c843e6d2d0577200e7c1d2d02855f21a3a9dde",
"type": "git"
},
"gdb-walkers": {
"branch": "master",
"description": "Bring mdb walkers to gdb, also add other helpful commands.",
"homepage": "",
"owner": "hardenedapple",
"repo": "gdb-walkers",
"rev": "c0701c4c87852bd09e21ca313c48dd4a649cfd0d",
"sha256": "1sd61a90lg8bkddl8lp15qady1wvbjmhjgm0d3lb813nwimlka9y",
"type": "tarball",
"url": "https://github.com/hardenedapple/gdb-walkers/archive/c0701c4c87852bd09e21ca313c48dd4a649cfd0d.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"niv": {
"branch": "master",
"description": "Easy dependency management for Nix projects",
"homepage": "https://github.com/nmattia/niv",
"owner": "nmattia",
"repo": "niv",
"rev": "82e5cd1ad3c387863f0545d7591512e76ab0fc41",
"sha256": "090l219mzc0gi33i3psgph6s2pwsc8qy4lyrqjdj4qzkvmaj65a7",
"type": "tarball",
"url": "https://github.com/nmattia/niv/archive/82e5cd1ad3c387863f0545d7591512e76ab0fc41.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"nixpkgs": {
"branch": "nixos-22.11",
"description": "Nix Packages collection",
"homepage": "",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "2d10e73416ec1449ef74aeac7faf2cf8c556ff5a",
"sha256": "00s89np0sqr3jxxp5h9nrpqy30fy4vsrmis6mmryrrmjqh09lpfv",
"type": "tarball",
"url": "https://github.com/NixOS/nixpkgs/archive/2d10e73416ec1449ef74aeac7faf2cf8c556ff5a.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"processor-trace": {
"branch": "master",
"description": "libipt - an Intel(R) Processor Trace decoder library",
"homepage": "",
"owner": "01org",
"repo": "processor-trace",
"rev": "c848a85c3104e2f5780741f85de5c9e65476ece2",
"sha256": "1ml8g6pm2brlcqp90yvgc780xf64d6k2km7fiqs88wvhlwsl7vzf",
"type": "tarball",
"url": "https://github.com/01org/processor-trace/archive/c848a85c3104e2f5780741f85de5c9e65476ece2.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"rr": {
"branch": "master",
"description": "Record and Replay Framework",
"homepage": "http://rr-project.org/",
"owner": "rr-debugger",
"repo": "rr",
"rev": "e77b5f8ca4b360daffd31cf72cb6b093fa9e0b62",
"sha256": "sha256:1gxphqcv1yw2ffmjp0d2cv0mpccr00pf9jhf44rq57jqdsvlfn2c",
"type": "tarball",
"url": "https://github.com/rr-debugger/rr/archive/3f87444659d1f063397fabc7791ed3b13b15c798.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
}
}
# This file has been generated by Niv.
let
#
# The fetchers. fetch_<type> fetches specs of type <type>.
#
fetch_file = pkgs: name: spec:
let
name' = sanitizeName name + "-src";
in
if spec.builtin or true then
builtins_fetchurl { inherit (spec) url sha256; name = name'; }
else
pkgs.fetchurl { inherit (spec) url sha256; name = name'; };
fetch_tarball = pkgs: name: spec:
let
name' = sanitizeName name + "-src";
in
if spec.builtin or true then
builtins_fetchTarball { name = name'; inherit (spec) url sha256; }
else
pkgs.fetchzip { name = name'; inherit (spec) url sha256; };
fetch_git = name: spec:
let
ref =
if spec ? ref then spec.ref else
if spec ? branch then "refs/heads/${spec.branch}" else
if spec ? tag then "refs/tags/${spec.tag}" else
abort "In git source '${name}': Please specify `ref`, `tag` or `branch`!";
submodules = if spec ? submodules then spec.submodules else false;
submoduleArg =
let
nixSupportsSubmodules = builtins.compareVersions builtins.nixVersion "2.4" >= 0;
emptyArgWithWarning =
if submodules == true
then
builtins.trace
(
"The niv input \"${name}\" uses submodules "
+ "but your nix's (${builtins.nixVersion}) builtins.fetchGit "
+ "does not support them"
)
{}
else {};
in
if nixSupportsSubmodules
then { inherit submodules; }
else emptyArgWithWarning;
in
builtins.fetchGit
({ url = spec.repo; inherit (spec) rev; inherit ref; } // submoduleArg);
fetch_local = spec: spec.path;
fetch_builtin-tarball = name: throw
''[${name}] The niv type "builtin-tarball" is deprecated. You should instead use `builtin = true`.
$ niv modify ${name} -a type=tarball -a builtin=true'';
fetch_builtin-url = name: throw
''[${name}] The niv type "builtin-url" will soon be deprecated. You should instead use `builtin = true`.
$ niv modify ${name} -a type=file -a builtin=true'';
#
# Various helpers
#
# https://github.com/NixOS/nixpkgs/pull/83241/files#diff-c6f540a4f3bfa4b0e8b6bafd4cd54e8bR695
sanitizeName = name:
(
concatMapStrings (s: if builtins.isList s then "-" else s)
(
builtins.split "[^[:alnum:]+._?=-]+"
((x: builtins.elemAt (builtins.match "\\.*(.*)" x) 0) name)
)
);
# The set of packages used when specs are fetched using non-builtins.
mkPkgs = sources: system:
let
sourcesNixpkgs =
import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) { inherit system; };
hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath;
hasThisAsNixpkgsPath = <nixpkgs> == ./.;
in
if builtins.hasAttr "nixpkgs" sources
then sourcesNixpkgs
else if hasNixpkgsPath && ! hasThisAsNixpkgsPath then
import <nixpkgs> {}
else
abort
''
Please specify either <nixpkgs> (through -I or NIX_PATH=nixpkgs=...) or
add a package called "nixpkgs" to your sources.json.
'';
# The actual fetching function.
fetch = pkgs: name: spec:
if ! builtins.hasAttr "type" spec then
abort "ERROR: niv spec ${name} does not have a 'type' attribute"
else if spec.type == "file" then fetch_file pkgs name spec
else if spec.type == "tarball" then fetch_tarball pkgs name spec
else if spec.type == "git" then fetch_git name spec
else if spec.type == "local" then fetch_local spec
else if spec.type == "builtin-tarball" then fetch_builtin-tarball name
else if spec.type == "builtin-url" then fetch_builtin-url name
else
abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}";
# If the environment variable NIV_OVERRIDE_${name} is set, then use
# the path directly as opposed to the fetched source.
replace = name: drv:
let
saneName = stringAsChars (c: if isNull (builtins.match "[a-zA-Z0-9]" c) then "_" else c) name;
ersatz = builtins.getEnv "NIV_OVERRIDE_${saneName}";
in
if ersatz == "" then drv else
# this turns the string into an actual Nix path (for both absolute and
# relative paths)
if builtins.substring 0 1 ersatz == "/" then /. + ersatz else /. + builtins.getEnv "PWD" + "/${ersatz}";
# Ports of functions for older nix versions
# a Nix version of mapAttrs if the built-in doesn't exist
mapAttrs = builtins.mapAttrs or (
f: set: with builtins;
listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set))
);
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/lists.nix#L295
range = first: last: if first > last then [] else builtins.genList (n: first + n) (last - first + 1);
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L257
stringToCharacters = s: map (p: builtins.substring p 1 s) (range 0 (builtins.stringLength s - 1));
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L269
stringAsChars = f: s: concatStrings (map f (stringToCharacters s));
concatMapStrings = f: list: concatStrings (map f list);
concatStrings = builtins.concatStringsSep "";
# https://github.com/NixOS/nixpkgs/blob/8a9f58a375c401b96da862d969f66429def1d118/lib/attrsets.nix#L331
optionalAttrs = cond: as: if cond then as else {};
# fetchTarball version that is compatible between all the versions of Nix
builtins_fetchTarball = { url, name ? null, sha256 }@attrs:
let
inherit (builtins) lessThan nixVersion fetchTarball;
in
if lessThan nixVersion "1.12" then
fetchTarball ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; }))
else
fetchTarball attrs;
# fetchurl version that is compatible between all the versions of Nix
builtins_fetchurl = { url, name ? null, sha256 }@attrs:
let
inherit (builtins) lessThan nixVersion fetchurl;
in
if lessThan nixVersion "1.12" then
fetchurl ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; }))
else
fetchurl attrs;
# Create the final "sources" from the config
mkSources = config:
mapAttrs (
name: spec:
if builtins.hasAttr "outPath" spec
then abort
"The values in sources.json should not have an 'outPath' attribute"
else
spec // { outPath = replace name (fetch config.pkgs name spec); }
) config.sources;
# The "config" used by the fetchers
mkConfig =
{ sourcesFile ? if builtins.pathExists ./sources.json then ./sources.json else null
, sources ? if isNull sourcesFile then {} else builtins.fromJSON (builtins.readFile sourcesFile)
, system ? builtins.currentSystem
, pkgs ? mkPkgs sources system
}: rec {
# The sources, i.e. the attribute set of spec name to spec
inherit sources;
# The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers
inherit pkgs;
};
in
mkSources (mkConfig {}) // { __functor = _: settings: mkSources (mkConfig settings); }
#!/usr/bin/env bash
set -e
# This is a script for preparing and uploading a release of GHC.
#
# Usage,
# 1. Update $ver
# 2. Set $SIGNING_KEY to your key id (prefixed with '=')
# 3. Create a directory and place the source and binary tarballs there
# 4. Run this script from that directory
#
# You can also invoke the script with an argument to perform only
# a subset of the usual release,
#
# upload.sh recompress produce lzip tarballs from xz tarballs
#
# upload.sh gen_hashes generate signed hashes of the release
# tarballs
#
# upload.sh prepare_docs (deprecated) prepare the documentation directory
# (this should be unecessary as the script which
# fetches artifacts should create this folder from
# the doc-tarball job)
#
# upload.sh upload_docs upload documentation to hackage from the hackage_docs folder
#
# upload.sh upload upload the tarballs and documentation
# to downloads.haskell.org
#
# Prerequisites: moreutils
if [ -z "$SIGNING_KEY" ]; then
SIGNING_KEY="=Benjamin Gamari <ben@well-typed.com>"
fi
# Infer release name from directory name
if [ -z "$rel_name" ]; then
rel_name="$(basename $(pwd))"
fi
# Infer version from tarball names
if [ -z "$ver" ]; then
ver="$(ls ghc-*.tar.* | sed -ne 's/ghc-\([0-9]\+\.[0-9]\+\.[0-9]\+\(\.[0-9]\+\)\?\).\+/\1/p' | head -n1)"
if [ -z "$ver" ]; then echo "Failed to infer \$ver"; exit 1; fi
fi
host="gitlab-storage.haskell.org"
usage() {
echo "Usage: [rel_name=<name>] ver=7.10.3-rc2 $0 <action>"
echo
echo "where,"
echo " ver gives the version number (e.g. the name of the tarballs, in the case of"
echo " a release candidate something like 7.10.3.20150820, otherwise just 7.10.3)"
echo " rel_name gives the release name (e.g. in the case of a release candidate 7.10.3-rc2"
echo " otherwise just 7.10.3)"
echo "and <action> is one of,"
echo " [nothing] do everything below"
echo " recompress produce lzip and gzip tarballs from xz tarballs"
echo " gen_hashes generated hashes of the release tarballs"
echo " sign sign hashes of the release tarballs"
echo " prepare_docs prepare the documentation directory"
echo " upload_docs upload documentation downloads.haskell.org"
echo " upload upload the tarballs and documentation to downloads.haskell.org"
echo " purge_all purge entire release from the CDN"
echo " purge_file file purge a given file from the CDN"
echo " verify verify the signatures in this directory"
echo
}
if [ -z "$ver" ]; then
usage
exit 1
fi
if [ -z "$rel_name" ]; then
rel_name="$ver"
fi
# returns the set of files that must have hashes generated.
function hash_files() {
echo $(find -maxdepth 1 \
-iname '*.xz' \
-o -iname '*.lz' \
-o -iname '*.bz2' \
-o -iname '*.zip' \
)
echo $(find -maxdepth 1 -iname '*.patch')
}
function gen_hashes() {
echo -n "Hashing..."
sha1sum $(hash_files) >| SHA1SUMS &
sha256sum $(hash_files) >| SHA256SUMS &
wait
echo "done"
}
function sign() {
# Kill DISPLAY lest pinentry won't work
DISPLAY=
eval "$(gpg-agent --daemon --sh --pinentry-program $(which pinentry))"
for i in $(hash_files) SHA1SUMS SHA256SUMS; do
if [ -e $i -a -e $i.sig -a $i.sig -nt $i ]; then
echo "Skipping signing of $i"
continue
elif [ -e $i.sig ] && gpg2 --verify $i.sig; then
# Don't resign if current signature is valid
touch $i.sig
continue
fi
echo "Signing $i"
rm -f $i.sig
gpg2 --use-agent --detach-sign --local-user="$SIGNING_KEY" $i
done
}
function verify() {
if [ $(find -iname '*.sig' | wc -l) -eq 0 ]; then
echo "No signatures to verify"
return
fi
for i in *.sig; do
echo
echo Verifying $i
gpg2 --verify $i $(basename $i .sig)
done
}
function upload() {
verify
chmod ugo+r,o-w -R .
dir=$(echo $rel_name | sed s/-release//)
lftp -c " \
open -u ghc: sftp://$host && \
mirror -P20 -c --reverse --exclude=fetch-gitlab --exclude=out . ghc/$dir && \
wait all;"
chmod ugo-w $(ls *.xz *.bz2 *.zip)
}
function purge_all() {
# Purge CDN cache
curl -X PURGE http://downloads.haskell.org/ghc/
curl -X PURGE http://downloads.haskell.org/~ghc/
curl -X PURGE http://downloads.haskell.org/ghc/$dir
curl -X PURGE http://downloads.haskell.org/ghc/$dir/
curl -X PURGE http://downloads.haskell.org/~ghc/$dir
curl -X PURGE http://downloads.haskell.org/~ghc/$dir/
for i in *; do
purge_file $i
done
}
function purge_file() {
curl -X PURGE http://downloads.haskell.org/~ghc/$rel_name/$i
curl -X PURGE http://downloads.haskell.org/~ghc/$rel_name/$i/
curl -X PURGE http://downloads.haskell.org/~ghc/$rel_name/$i/docs/
curl -X PURGE http://downloads.haskell.org/ghc/$rel_name/$i
curl -X PURGE http://downloads.haskell.org/ghc/$rel_name/$i/
curl -X PURGE http://downloads.haskell.org/ghc/$rel_name/$i/docs/
}
function prepare_docs() {
echo "THIS COMMAND IS DEPRECATED, THE DOCS FOLDER SHOULD BE PREPARED BY THE FETCH SCRIPT"
local tmp
rm -Rf docs
if [ -z "$GHC_TREE" ]; then
tmp="$(mktemp -d)"
tar -xf "ghc-$ver-src.tar.xz" -C "$tmp"
GHC_TREE="$tmp/ghc-$ver"
fi
mkdocs="$GHC_TREE/distrib/mkDocs/mkDocs"
if [ ! -e "$mkdocs" ]; then
echo "Couldn't find GHC mkDocs at $mkdocs."
echo "Perhaps you need to override GHC_TREE?"
rm -Rf "$tmp"
exit 1
fi
windows_bindist="$(ls ghc-$ver-x86_64-unknown-mingw32.tar.xz | head -n1)"
linux_bindist="$(ls ghc-$ver-x86_64-deb9-linux.tar.xz | head -n1)"
echo "Windows bindist: $windows_bindist"
echo "Linux bindist: $linux_bindist"
$ENTER_FHS_ENV $mkdocs $linux_bindist $windows_bindist
if [ -d "$tmp" ]; then rm -Rf "$tmp"; fi
mkdir -p docs/html
tar -Jxf "$linux_bindist"
cp -R "ghc-$ver/docs/users_guide/build-html/users_guide docs/html/users_guide"
#cp -R ghc-$ver/utils/haddock/doc/haddock docs/html/haddock
rm -R "ghc-$ver"
tar -Jxf docs/libraries.html.tar.xz -C docs/html
mv docs/index.html docs/html
}
function recompress() {
combine <(basename -s .xz *.xz) not <(basename -s .lz *.lz) | \
parallel 'echo "Recompressing {}.xz to {}.lz"; unxz -c {}.xz | lzip - -o {}.lz'
for darwin_bindist in $(ls ghc-*-darwin.tar.xz); do
local dest="$(basename $darwin_bindist .xz).bz2"
if [[ ! -f "$dest" ]]; then
echo "Recompressing Darwin bindist to bzip2..."
unxz -c "$darwin_bindist" | bzip2 > "$dest"
fi
done
for windows_bindist in $(ls ghc-*-mingw32*.tar.xz); do
local tmp="$(mktemp -d tmp.XXX)"
local dest="$(realpath $(basename $windows_bindist .tar.xz).zip)"
echo $dest
if [[ ! -f "$dest" ]]; then
echo "Recompressing Windows bindist to zip..."
tar -C "$tmp" -xf "$windows_bindist"
ls $tmp
(cd "$tmp"; zip -9 -r "$dest" *)
fi
rm -R "$tmp"
done
}
function upload_docs() {
local tmp="$(mktemp -d)"
tar -xf ghc-$ver-src.tar.xz -C "$tmp"
GHC_TREE="$tmp/ghc-$ver"
local args=$@
if [[ -n "$PUBLISH" ]]; then
echo "Publishing to Hackage..."
args+=( "--publish" )
fi
"$GHC_TREE/.gitlab/rel_eng/upload_ghc_libs.py" upload --docs=hackage_docs ${args[@]}
}
if [ "x$1" == "x" ]; then
recompress
gen_hashes
sign
if [ ! -d docs ]; then
prepare_docs || ( rm -R docs; exit 1 )
fi
if [ -d hackage_docs ]; then
upload_docs
fi
upload
purge_all
else
$@
fi