Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • FinleyMcIlwaine-master-patch-27418
  • alex
  • bifunctors-5.5.8
  • ci-test
  • drop-9.2-support
  • extra
  • finley/hls-9.6-patches
  • finley/hls-9.6-patches-2
  • fix-1
  • foliage
  • ghc-bignum
  • ghc-from
  • hgeometry-deriveds
  • master
  • rts-stats
  • small-improvements
  • staging
  • sv/ghcide-findercache
  • teo/more-c-deps
  • upstream-testing
  • wip/T93
  • wip/aarch64-ci
  • wip/add-ihaskell
  • wip/atomic-counters
  • wip/benchmarks
  • wip/bgamari
  • wip/bifunctor
  • wip/broken-what34
  • wip/bump
  • wip/constraints
  • wip/docker-bump
  • wip/docker-env
  • wip/fix-ghc-lib-parser
  • wip/fix-repo
  • wip/fix-restricted-eval
  • wip/fix-upstream-url
  • wip/flake-aarch64
  • wip/foliage
  • wip/ghc-11112-b
  • wip/ghc-debug-bump
  • wip/ghc-debug-commit
  • wip/ghc-debug-tests
  • wip/ghc-exactprint
  • wip/ghc-tcplugins
  • wip/ghcide
  • wip/ghcide-2.9.0.0
  • wip/happy-1.20.0-GHC9.6
  • wip/liquidhaskell
  • wip/luite/fix-ghcide-910
  • wip/mi_globals_liquidhaskell_boot
  • wip/microstache
  • wip/monomorphic-list
  • wip/no-update-branch-on-mr
  • wip/parallel-builds
  • wip/pizza-rolls
  • wip/romes/extend
  • wip/romes/hpt-refactor
  • wip/sand-witch/revert-th-argpat
  • wip/separate-logs
  • wip/signal-9
  • wip/stackage-snapshots
  • wip/stackage-wip
  • wip/staged-pipeline
  • wip/teo/bump-ghc-debug
  • wip/teo/containers-0.8
  • wip/teo/linear-broken
  • wip/text-2.0
  • wip/update-doctest-patch
  • wip/vty-5.36
  • wip/windows
  • wip/zubin-stackage
71 results

Target

Select target project
  • ghc/head.hackage
  • RyanGlScott/head.hackage
  • vaibhavsagar/head.hackage
  • phadej/head.hackage
  • jessoune29/head.hackage
  • alanz/head.hackage
  • clint/head.hackage
  • osa1/head.hackage
  • supersven/head.hackage
  • fendor/head.hackage
  • hsyl20/head.hackage
  • adinapoli/head.hackage
  • alexbiehl/head.hackage
  • mimi.vx/head.hackage
  • Kleidukos/head.hackage
  • wz1000/head.hackage
  • alinab/head.hackage
  • teo/head.hackage
  • duog/head.hackage
  • sheaf/head.hackage
  • expipiplus1/head.hackage
  • drsooch/head.hackage
  • tobias/head.hackage
  • brandonchinn178/head.hackage
  • mpickering/hooks-setup-testing
  • Mikolaj/head.hackage
  • RandomMoonwalker/head.hackage
  • facundominguez/head.hackage
  • trac-fizzixnerd/head.hackage
  • neil.mayhew/head.hackage
  • jappeace/head.hackage
31 results
Select Git revision
  • 2020-06-08
  • bv-sized-libBF
  • core-lint
  • drop-old-patches
  • gitlab-ci
  • gitlab-ci-nix
  • hackage-deploy
  • master
  • parsley
  • patch-tool-fixes
  • save-compiler-info
  • wip/monomorphic-list
12 results
Show changes
Commits on Source (680)
patches/* -text
...@@ -9,29 +9,40 @@ ...@@ -9,29 +9,40 @@
# To accomplish this we use the ci executable in ./ci. This drives a set of # To accomplish this we use the ci executable in ./ci. This drives a set of
# cabal v2-build builds and preserves their results. # cabal v2-build builds and preserves their results.
# #
# The compiler to be tested can be taken from a number of sources. The # The execution flow looks something like:
# build-master, build-9.0, and build-9.2 jobs form the validation pipeline of the #
# head.hackage repository. In addition, other GitLab projects (e.g. ghc/ghc>) # - Gitlab runner
# - (nix run)
# - run-ci
# - ./run-ci (the Nix package just wraps the script)
# - (nix run) (when USE_NIX=1)
# - head-hackage-ci $EXTRA_OPTS (a Cabal project in ci/)
# - ci/Main.hs
# - TestPatches.testPatches <$> TestPatches.config
# - option '--test-package'
# - <something similar for building the packages>
#
# EXTRA_OPTS are injected into the execution flow inside ./run-ci, which in turn
# sources them from ci/config.sh.
#
# The compiler to be tested can be taken from a number of sources.
# head.hackage's own validation pipeline runs against GHC HEAD and the three
# supported major versions. In addition, other GitLab projects (e.g. ghc/ghc>)
# can trigger a multi-project pipeline, specifying a GHC binary distribution # can trigger a multi-project pipeline, specifying a GHC binary distribution
# via either the GHC_TARBALL or UPSTREAM_* variables. # via either the GHC_TARBALL or UPSTREAM_* variables.
#
stages:
- test
- update-repo
- deploy
variables: variables:
# Which nixos/nix Docker image tag to use # Which lix-project/lix Docker image tag to use
DOCKER_TAG: "2.3" DOCKER_TAG: "2.93.3"
# Default GHC bindist
GHC_TARBALL: "https://gitlab.haskell.org/api/v4/projects/1/jobs/artifacts/master/raw/ghc-x86_64-fedora27-linux.tar.xz?job=validate-x86_64-linux-fedora27"
# Default this to ghc/ghc> to make it more convenient to run from the web # Default this to ghc/ghc> to make it more convenient to run from the web
# interface. # interface.
UPSTREAM_PROJECT_ID: 1 UPSTREAM_PROJECT_ID: 1
UPSTREAM_PROJECT_PATH: "ghc/ghc" UPSTREAM_PROJECT_PATH: "ghc/ghc"
GIT_SUBMODULE_STRATEGY: recursive
# CPUS is set by the runner, as usual. # CPUS is set by the runner, as usual.
# EXTRA_HC_OPTS are passed to via --ghc-options to GHC during the package # EXTRA_HC_OPTS are passed to via --ghc-options to GHC during the package
...@@ -44,7 +55,7 @@ variables: ...@@ -44,7 +55,7 @@ variables:
# Multi-project pipeline variables: # Multi-project pipeline variables:
# #
# These are set by the "upstream" pipeline for `build-pipeline` pipelines: # These are set by the "upstream" pipeline for downstream pipelines:
# #
# UPSTREAM_PROJECT_PATH: The path of the upstream project (e.g. `ghc/ghc`) # UPSTREAM_PROJECT_PATH: The path of the upstream project (e.g. `ghc/ghc`)
# UPSTREAM_PIPELINE_ID: The ID of the upstream pipeline # UPSTREAM_PIPELINE_ID: The ID of the upstream pipeline
...@@ -54,167 +65,31 @@ variables: ...@@ -54,167 +65,31 @@ variables:
# UPSTREAM_COMMIT_SHA: The ref or commit SHA of the GHC build to be tested # UPSTREAM_COMMIT_SHA: The ref or commit SHA of the GHC build to be tested
# #
# We explictly set the locale to avoid happy choking up on UTF-8 source code. See #31
LANG: "C.UTF-8"
# A build triggered from a ghc/ghc> pipeline. stages:
build-pipeline: - generate
extends: .build - dispatch
before_script:
- |
if [ -n "$UPSTREAM_COMMIT_SHA" ]; then
# N.B. We can't use this if the upstream pipeline might be in-progress
# since the below URL cannot provide an artifact until a pipeline has
# run to completion on the requested branch. This is in general
# not the case for GHC pipelines. Consequently, in this case we will
# usually rather provide UPSTREAM_PIPELINE_ID.
echo "Pulling binary distribution from commit $UPSTREAM_COMMIT_SHA of project $UPSTREAM_PROJECT_PATH..."
GHC_TARBALL="https://gitlab.haskell.org/$UPSTREAM_PROJECT_PATH/-/jobs/artifacts/$UPSTREAM_COMMIT_SHA/raw/ghc-x86_64-fedora27-linux.tar.xz?job=validate-x86_64-linux-fedora27"
elif [ -n "$UPSTREAM_PIPELINE_ID" ]; then
job_name="validate-x86_64-linux-fedora27"
echo "Pulling ${job_name} binary distribution from Pipeline $UPSTREAM_PIPELINE_ID..."
job_id=$(nix run -f ci/default.nix \
-c find-job $UPSTREAM_PROJECT_ID $UPSTREAM_PIPELINE_ID $job_name)
echo "Using job $job_id..."
GHC_TARBALL="https://gitlab.haskell.org/$UPSTREAM_PROJECT_PATH/-/jobs/$job_id/artifacts/raw/ghc-x86_64-fedora27-linux.tar.xz"
fi
rules:
- if: '$UPSTREAM_COMMIT_SHA || $UPSTREAM_PIPELINE_ID'
when: always
- when: never
# Build against the master branch
build-master:
extends: .build
variables:
GHC_TARBALL: "https://gitlab.haskell.org/api/v4/projects/1/jobs/artifacts/master/raw/ghc-x86_64-fedora27-linux.tar.xz?job=validate-x86_64-linux-fedora27"
EXTRA_HC_OPTS: "-dcore-lint -ddump-timings"
rules:
- if: '$UPSTREAM_COMMIT_SHA || $UPSTREAM_PIPELINE_ID'
when: never
- when: always
# Build against the 9.0 branch
build-9.0:
extends: .build
variables:
GHC_TARBALL: "https://gitlab.haskell.org/api/v4/projects/1/jobs/artifacts/ghc-9.0/raw/ghc-x86_64-fedora27-linux.tar.xz?job=validate-x86_64-linux-fedora27"
EXTRA_HC_OPTS: "-dcore-lint"
rules:
- if: '$UPSTREAM_COMMIT_SHA || $UPSTREAM_PIPELINE_ID'
when: never
- when: always
# Build against the 9.2 branch
build-9.2:
extends: .build
variables:
GHC_TARBALL: "https://gitlab.haskell.org/api/v4/projects/1/jobs/artifacts/ghc-9.2/raw/ghc-x86_64-fedora27-linux.tar.xz?job=validate-x86_64-linux-fedora27"
EXTRA_HC_OPTS: "-dcore-lint"
rules:
- if: '$UPSTREAM_COMMIT_SHA || $UPSTREAM_PIPELINE_ID'
when: never
- when: always
.build:
stage: test
tags:
- x86_64-linux
image: "nixos/nix:$DOCKER_TAG"
cache:
key: build-HEAD
paths:
- store.nar
before_script:
- |
if [ -e store.nar ]; then
echo "Extracting cached Nix store..."
nix-store --import -vv < store.nar || echo "invalid cache"
else
echo "No cache found"
fi
script:
# Install GHC
- echo "Bindist tarball is $GHC_TARBALL"
- nix run -f ./ci -c curl -L "$GHC_TARBALL" > ghc.tar.xz
- |
nix build \
-f ci/ghc-from-artifact.nix \
--arg ghcTarball ./ghc.tar.xz \
--out-link ghc
- export GHC=`pwd`/ghc/bin/ghc
- rm -Rf $HOME/.cabal/packages/local ci/run
# Build CI executable
- |
nix-build ./ci -j$CPUS --no-build-output
nix-store --export \
$(nix-store -qR --include-outputs \
$(nix-instantiate --quiet ./ci)) \
> store.nar
# Test it
- nix run -f ./ci -c run-ci
after_script:
- ls -lh
- |
nix run -f ./ci -c \
tar -cJf results.tar.xz -C ci/run \
results.json logs compiler-info
artifacts:
when: always
paths:
- results.tar.xz
# Build and deploy a Hackage repository
update-repo:
stage: update-repo
tags:
- x86_64-linux
- head.hackage
image: "nixos/nix:$DOCKER_TAG"
generate-pipeline:
variables: variables:
KEYS_TARBALL: https://downloads.haskell.org/ghc/head.hackage-keys.tar.enc GIT_SUBMODULE_STRATEGY: none
# KEYS_TARBALL_KEY provided by protected variable image: alpine:latest
tags: [x86_64-linux]
rules: stage: generate
- if: '$CI_COMMIT_BRANCH == "master"' script: ./ci/generate-pipeline.sh
script:
- nix-channel --add https://nixos.org/channels/nixpkgs-unstable nixpkgs
- nix-channel --update
- nix build -f ci/default.nix
- nix run -f ci/default.nix -c build-repo.sh extract-keys
- nix run -f ci/default.nix -c build-repo.sh build-repo
dependencies:
- build-master
after_script:
- rm -Rf keys
artifacts:
paths:
- repo
pages:
stage: deploy
tags:
- x86_64-linux
- head.hackage
image: "nixos/nix:$DOCKER_TAG"
script:
- mv repo public
dependencies:
- update-repo
rules:
- if: '$CI_COMMIT_BRANCH == "master"'
artifacts: artifacts:
paths: paths:
- public - gitlab-generated-pipeline.yml
run-pipeline:
stage: dispatch
trigger:
strategy: depend
forward:
pipeline_variables: true
include:
- artifact: gitlab-generated-pipeline.yml
job: generate-pipeline
[submodule "tests/ghc-debug"]
path = tests/ghc-debug
url = https://gitlab.haskell.org/ghc/ghc-debug.git
[submodule "tests/text"]
path = tests/text
url = https://github.com/haskell/text.git
[submodule "tests/bytestring"]
path = tests/bytestring
url = https://github.com/haskell/bytestring.git
[submodule "tests/containers"]
path = tests/containers
url = https://github.com/haskell/containers.git
Copyright 2023 The GHC Team
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors
may be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
...@@ -6,12 +6,11 @@ Submit PRs with patch(es) relative to the source tarball(s) of ...@@ -6,12 +6,11 @@ Submit PRs with patch(es) relative to the source tarball(s) of
existing Hackage package(s). existing Hackage package(s).
- The patches MUST apply cleanly by `patch -p1` when inside the - The patches MUST apply cleanly by `patch -p1` when inside the
original unpacked source-tarball. (Travis CI will verify this when original unpacked source-tarball. (CI will verify this when
you submit a PR). you submit a PR).
- The patches SHOULD work with at least GHC HEAD and the most recent - The patches SHOULD work with at least GHC HEAD and a set of recent stable
stable released GHC version (currently this means with GHC 9.0, GHC 9.2, and released GHC versions (currently this means with GHC 9.10, 9.12, 9.14 and 9.15).
GHC 9.3).
- The patches SHOULD ideally result in the same code being compiled, - The patches SHOULD ideally result in the same code being compiled,
as one of the main purposes of these patches is to make regression as one of the main purposes of these patches is to make regression
...@@ -23,6 +22,12 @@ existing Hackage package(s). ...@@ -23,6 +22,12 @@ existing Hackage package(s).
This repo contains `<pkg-id>.patch` files in the This repo contains `<pkg-id>.patch` files in the
[`patches/`](./patches/) folder (where `<pkg-id>` refers to a specific [`patches/`](./patches/) folder (where `<pkg-id>` refers to a specific
release of a package, e.g. `lens-4.15.3`). release of a package, e.g. `lens-4.15.3`).
Adding a patch forces the system to use that specific version,
so empty patch files may exist to force the system to use that a
newer version, instead of a previous patch if available.
For example consider a patched `th-abstraction-0.5.0`, and an empty patch `th-abstraction-0.6.0`,
if we were to remove the empty patch, `0.6.0`, certain libraries such
as `generics-sop` fail to build, because it's forced to use `0.5.0`.
Once merged to `master`, all package releases whose `<pkg-id>` is Once merged to `master`, all package releases whose `<pkg-id>` is
mentioned will enter the *HEAD.hackage* package index; if there is a mentioned will enter the *HEAD.hackage* package index; if there is a
...@@ -57,6 +62,21 @@ repository head.hackage.ghc.haskell.org ...@@ -57,6 +62,21 @@ repository head.hackage.ghc.haskell.org
active-repositories: hackage.haskell.org, head.hackage.ghc.haskell.org:override active-repositories: hackage.haskell.org, head.hackage.ghc.haskell.org:override
``` ```
The use of `:override` forces cabal's constraint solver to pick versions of
libraries that have corresponding patches in head.hackage whenever possible.
This may or may not be what you want depending on your use case. If you wish
to permit cabal to choose build plans that include different versions of
libraries than what are patched in head.hackage, skip the `:override`:
```cabal
active-repositories: hackage.haskell.org, head.hackage.ghc.haskell.org
```
Also see
https://cabal.readthedocs.io/en/3.12/cabal-project-description-file.html#cfg-field-active-repositories.
`HEAD.hackage` doesn't bump the bounds of boot packages + certain other packages to avoid the busywork of bumping them. When using `HEAD.hackage`, you should use `--allow-newer` for these packages. The full list is [here](https://gitlab.haskell.org/ghc/head.hackage/-/blob/90570e1c4606c1d7d3d41797ec1b32d1b984067b/ci/MakeConstraints.hs#L40-49).
### As an add-on remote repository ### As an add-on remote repository
It is *not* recommended to add the `HEAD.hackage` repository index to It is *not* recommended to add the `HEAD.hackage` repository index to
...@@ -122,38 +142,32 @@ initialize it as a git repository, and the patch. ...@@ -122,38 +142,32 @@ initialize it as a git repository, and the patch.
### Adding a patch ### Adding a patch
The `scripts/patch-tool` script is a tool for conveniently authoring and updating The `scripts/patch-tool` script is a tool for conveniently authoring and updating patches. For example, to patch the `doctest` package, you can run the following steps:
patches. For instance, if you find that the `doctest` package needs to be
patched first run:
```
$ scripts/patch-tool unpack doctest
```
This will extract a `doctest` source tree to `packages/doctest-$version` and
initialize it as a git repository. You can now proceed to edit the tree as
necessary and run
```
$ scripts/patch-tool update-patches
```
This will create an appropriately-named patch in `patches/` from the edits in
the `doctest` tree.
### Usage with `nix` 1. `scripts/patch-tool unpack doctest`
1. Modify files in `packages/doctest-$version/` as necessary
1. Build/test as normal, e.g. `cabal build doctest`
1. `scripts/patch-tool update-patches`
1. Commit the patch
When contributing a patch, one needs to be mindful of [Hackage revisions].
head.hackage doesn't combine patches with the revisions of a package. Instead,
a patch is applied on the unrevised package (also called revision 0). This
implies that when contributing patches, it might be necessary to additionally
include the changes that are already in some revision. Moreover, this also
implies that if a patch only contains changes that are already present in
revisions, then contributing the patch to head.hackage is useless as the changes
are already available for building.
[Hackage revisions]: https://github.com/haskell-infra/hackage-trustees/blob/master/revisions-information.md
`default.nix` is a [Nix](https://nixos.org/nix/) expression which can be used to
build `head.hackage` packages using GHC 8.6.1-alpha2:
```
$ nix build -f ./. haskellPackages.servant
```
It can also be used to build a compiler from a local source tree and use this to
build `head.hackage` packages:
```
$ nix build -f ./. --arg ghc "(import ghc-from-source.nix {ghc-path=$GHC_TREE;})"
```
### GitLab CI ### GitLab CI
GHC's GitLab instance uses GitLab CI and the `head-hackage-ci` tool (contained GHC's GitLab instance uses GitLab CI and the `head-hackage-ci` tool (contained
in the `ci/` directory) to test the `head.hackage` patchset against GHC releases in the `ci/` directory) to test the `head.hackage` patchset against GHC releases
and snapshots. and snapshots. It can also compile head.hackage using a patch to GHC; just add
the `user-facing` label to a GHC MR, and the existing CI infrastructure will
invoke head.hackage.
To run a similar build locally start by downloading and installing a binary To run a similar build locally start by downloading and installing a binary
distribution appropriate for your distribution and then call the `run-ci` script: distribution appropriate for your distribution and then call the `run-ci` script:
...@@ -166,6 +180,15 @@ $ ./run-ci ...@@ -166,6 +180,15 @@ $ ./run-ci
This will build all packages having patches and produce a textual summary, as This will build all packages having patches and produce a textual summary, as
well as a JSON file (`result.json`) describing the outcome. well as a JSON file (`result.json`) describing the outcome.
If you are using nix you can run:
```
nix-shell ci/ --command run-ci
```
Note that we currently rely on IOG's Hydra instance for caching of flake
outputs to ensure that they aren't rebuilt with every job.
### Hackage repository ### Hackage repository
......
-- Need an empty file for cabal.project.local to work
-- https://github.com/haskell/cabal/issues/9168
...@@ -14,27 +14,65 @@ import Text.PrettyPrint.ANSI.Leijen (Doc, vcat, (<+>)) ...@@ -14,27 +14,65 @@ import Text.PrettyPrint.ANSI.Leijen (Doc, vcat, (<+>))
import Utils import Utils
-- These dependencies cause issues when testing boot libraries because the test-suites
-- introduce circular dependencies. One way to solve the circularity is to select
-- older version of packages (namely unix) which doesn't have the bytestring dependency (<= 2.5)
-- but we want to use the newer version of unix and just not use the optional
-- features of optparse-applicative nor tasty.
extraConstraints :: [String]
extraConstraints = [
"optparse-applicative -process"
, "tasty -unix"
]
-- These packages we must use the installed version, because there's no way to upgrade
-- them
bootPkgs :: S.Set Cabal.PackageName bootPkgs :: S.Set Cabal.PackageName
bootPkgs = S.fromList bootPkgs = S.fromList
[ "base" [ "base"
, "template-haskell" , "template-haskell"
, "time"
, "Cabal"
, "ghc" , "ghc"
, "ghc-prim" , "ghc-prim"
, "integer-gmp" , "integer-gmp"
, "bytestring"
, "text"
, "binary"
, "ghc-bignum" , "ghc-bignum"
] ]
-- These packages are installed, but we can install newer versions if the build plan
-- allows.. so we --allow-newer them in order to help find more build plans.
allowNewerPkgs :: S.Set Cabal.PackageName
allowNewerPkgs = S.fromList
[ "time"
, "binary"
, "bytestring"
, "Cabal"
, "containers"
, "deepseq"
, "text"
, "ghc-boot"
, "ghc-boot-th" ] `S.union` bootPkgs
constraints :: [String] -> Doc
constraints constraints =
"constraints:" PP.<$$> PP.indent 2 constraintsDoc
where
constraintsDoc = PP.vcat $ PP.punctuate "," (map PP.text constraints)
allowNewer :: S.Set Cabal.PackageName -> Doc allowNewer :: S.Set Cabal.PackageName -> Doc
allowNewer pkgs = allowNewer pkgs =
"allow-newer:" PP.<$$> PP.indent 2 pkgsDoc "allow-newer:" PP.<$$> PP.indent 2 pkgsDoc
where where
pkgsDoc = PP.vcat $ PP.punctuate "," $ map prettyPackageName $ S.toList pkgs pkgsDoc = PP.vcat $ PP.punctuate "," $ map prettyPackageName $ S.toList pkgs
installedConstraints :: S.Set Cabal.PackageName -> S.Set Cabal.PackageName -> Doc
installedConstraints bootPkgs patchedPkgs =
"constraints:" PP.<$$> PP.indent 2 pkgsDoc
where
pkgsDoc = PP.vcat $ PP.punctuate ","
[ prettyPackageName bootPkg <+> "installed"
| bootPkg <- S.toList bootPkgs
, bootPkg `S.notMember` patchedPkgs
]
versionConstraints :: [(Cabal.PackageName, Version)] -> Doc versionConstraints :: [(Cabal.PackageName, Version)] -> Doc
versionConstraints pkgs = versionConstraints pkgs =
"constraints:" PP.<$$> PP.indent 2 body "constraints:" PP.<$$> PP.indent 2 body
...@@ -62,9 +100,14 @@ makeConstraints :: FilePath -- ^ patch directory ...@@ -62,9 +100,14 @@ makeConstraints :: FilePath -- ^ patch directory
-> IO Doc -> IO Doc
makeConstraints patchDir = do makeConstraints patchDir = do
patches <- findPatchedPackages patchDir patches <- findPatchedPackages patchDir
let doc = PP.vcat let patchedPkgs = S.fromList $ map fst patches
[ allowNewer bootPkgs doc = PP.vcat
[ allowNewer allowNewerPkgs
, ""
, installedConstraints bootPkgs patchedPkgs
, "" , ""
, versionConstraints patches , versionConstraints patches
, ""
, constraints extraConstraints
] ]
return doc return doc
...@@ -77,7 +77,7 @@ The below is all orchestrated by `run-ci.sh`: ...@@ -77,7 +77,7 @@ The below is all orchestrated by `run-ci.sh`:
the outcome of the build the outcome of the build
1. Write a JSON report (of type `Types.RunResult ()`) to `result.json` 1. Write a JSON report (of type `Types.RunResult ()`) to `result.json`
1. Examine the failed units and determine whether there were any unexpected failures. 1. Examine the failed packages and determine whether there were any unexpected failures.
### Build plans and empty patches ### Build plans and empty patches
......
...@@ -11,7 +11,7 @@ module TestPatches ...@@ -11,7 +11,7 @@ module TestPatches
import Control.Monad import Control.Monad
import Data.Foldable import Data.Foldable
import Data.List (intercalate) import Data.List (intercalate, partition)
import Data.Maybe import Data.Maybe
import Data.Text (Text) import Data.Text (Text)
import GHC.Generics import GHC.Generics
...@@ -64,38 +64,51 @@ buildToolPackage (BuildToolPackages pkgs) name = name `S.member` pkgs ...@@ -64,38 +64,51 @@ buildToolPackage (BuildToolPackages pkgs) name = name `S.member` pkgs
data Config = Config { configPatchDir :: FilePath data Config = Config { configPatchDir :: FilePath
, configCompiler :: FilePath , configCompiler :: FilePath
, configLoggingWrapper :: Maybe FilePath
, configGhcOptions :: [String] , configGhcOptions :: [String]
, configCabalOptions :: [String] , configCabalOptions :: [String]
, configOnlyPackages :: Maybe (S.Set Cabal.PackageName) , configOnlyPackages :: Maybe (S.Set Cabal.PackageName)
, configConcurrency :: Int , configConcurrency :: Int
, configExtraCabalFragments :: [FilePath] , configExtraCabalFragments :: [FilePath]
, configExtraPackages :: [(Cabal.PackageName, Version)] , configExtraPackages :: [(Cabal.PackageName, Version)]
, configTestPackages :: [(Cabal.PackageName, FilePath)]
, configExpectedBrokenPkgs :: BrokenPackages , configExpectedBrokenPkgs :: BrokenPackages
, configBuildToolPkgs :: BuildToolPackages , configBuildToolPkgs :: BuildToolPackages
} }
cabalOptions :: Config -> [String] cabalOptions :: Config -> [String]
cabalOptions cfg = cabalOptions cfg =
let
compilerOption =
maybe
[ "-w", configCompiler cfg ]
(\l -> [ "-w", l, "--with-hc-pkg", configCompiler cfg <> "-pkg" ])
(configLoggingWrapper cfg)
in
configCabalOptions cfg ++ configCabalOptions cfg ++
[ "-w", configCompiler cfg compilerOption
] ++ concatMap (\opt -> ["--ghc-options", opt]) (configGhcOptions cfg)
config :: Parser TestPatches.Config config :: Parser TestPatches.Config
config = config =
TestPatches.Config TestPatches.Config
<$> patchDir <$> patchDir
<*> compiler <*> compiler
<*> loggingWrapper
<*> ghcOptions <*> ghcOptions
<*> cabalOptions <*> cabalOptions
<*> onlyPackages <*> onlyPackages
<*> concurrency <*> concurrency
<*> extraCabalFragments <*> extraCabalFragments
<*> extraPackages <*> extraPackages
<*> testPackages
<*> expectedBrokenPkgs <*> expectedBrokenPkgs
<*> buildToolPkgs <*> buildToolPkgs
where where
patchDir = option str (short 'p' <> long "patches" <> help "patch directory" <> value "./patches") patchDir = option str (short 'p' <> long "patches" <> help "patch directory" <> value "./patches")
compiler = option str (short 'w' <> long "with-compiler" <> help "path of compiler") compiler = option str (short 'w' <> long "with-compiler" <> help "path of compiler")
loggingWrapper =
fmap Just (option str (long "logging-wrapper" <> help "path of compiler logging wrapper"))
<|> pure Nothing
ghcOptions = many $ option str (short 'f' <> long "ghc-option" <> help "flag to pass to compiler") ghcOptions = many $ option str (short 'f' <> long "ghc-option" <> help "flag to pass to compiler")
cabalOptions = many $ option str (short 'F' <> long "cabal-option" <> help "flag to pass to cabal-install") cabalOptions = many $ option str (short 'F' <> long "cabal-option" <> help "flag to pass to cabal-install")
onlyPackages = onlyPackages =
...@@ -104,6 +117,7 @@ config = ...@@ -104,6 +117,7 @@ config =
concurrency = option auto (short 'j' <> long "concurrency" <> value 1 <> help "number of concurrent builds") concurrency = option auto (short 'j' <> long "concurrency" <> value 1 <> help "number of concurrent builds")
extraCabalFragments = many $ option str (long "extra-cabal-fragment" <> help "path of extra configuration to include in cabal project files") extraCabalFragments = many $ option str (long "extra-cabal-fragment" <> help "path of extra configuration to include in cabal project files")
extraPackages = many $ option pkgVer (short 'P' <> long "extra-package" <> help "other, un-patched packages to test") extraPackages = many $ option pkgVer (short 'P' <> long "extra-package" <> help "other, un-patched packages to test")
testPackages = many $ option pkgNamePath (short 'T' <> long "test-package" <> help "A package to run tests for")
expectedBrokenPkgs = expectedBrokenPkgs =
fmap (BrokenPackages . S.fromList) $ many fmap (BrokenPackages . S.fromList) $ many
$ option $ option
...@@ -128,6 +142,19 @@ config = ...@@ -128,6 +142,19 @@ config =
, "expected to be in form of PKG_NAME==VERSION" , "expected to be in form of PKG_NAME==VERSION"
] ]
pkgNamePath :: ReadM (Cabal.PackageName, FilePath)
pkgNamePath = str >>= parse . T.pack
where
parse s
| [name, fp] <- T.splitOn "=" s
= pure (Cabal.mkPackageName $ T.unpack name, T.unpack fp)
| otherwise
= fail $ unlines
[ "Invalid test package specified:"
, "expected to be in form of PKG_NAME=FILEPATH"
]
pkgName :: ReadM Cabal.PackageName pkgName :: ReadM Cabal.PackageName
pkgName = str >>= maybe (fail "invalid package name") pure . simpleParse pkgName = str >>= maybe (fail "invalid package name") pure . simpleParse
...@@ -152,14 +179,28 @@ testPatches cfg = do ...@@ -152,14 +179,28 @@ testPatches cfg = do
, patchedPackageResult = res , patchedPackageResult = res
} }
return [tpatch] return [tpatch]
testedPatches <- fold <$> mapConcurrentlyN (fromIntegral $ configConcurrency cfg) build (S.toList packages') testedPatches <- fold <$> mapConcurrentlyN (fromIntegral $ configConcurrency cfg) build (S.toList packages')
let runResult = RunResult testedPatches compInfo
print $ resultSummary (configExpectedBrokenPkgs cfg) runResult let test :: (Cabal.PackageName, FilePath) -> IO ([TestedPatch LogOutput])
test (pname, fpath) = do
res <- testPackage cfg (pname, fpath)
let tpatch = TestedPatch { patchedPackageName = PkgName $ T.pack $ display pname
, patchedPackageVersion = Ver $ []
, patchedPackageResult = res
}
return [tpatch]
testResults <- fold <$> mapM test (configTestPackages cfg)
let runResult = RunResult { testedPatches = testedPatches
, testedTests = testResults
, compilerInfo = compInfo
}
let (okay, msg) = resultSummary (configExpectedBrokenPkgs cfg) runResult
print msg
BSL.writeFile "results.json" . encode =<< writeLogs "logs" runResult BSL.writeFile "results.json" . encode =<< writeLogs "logs" runResult
let failedBuilds = failedUnits (configExpectedBrokenPkgs cfg) runResult
planningFailures = planningErrors runResult
okay = null failedBuilds && null planningFailures
unless okay $ exitWith $ ExitFailure 1 unless okay $ exitWith $ ExitFailure 1
writeLogs :: FilePath -> RunResult LogOutput -> IO (RunResult ()) writeLogs :: FilePath -> RunResult LogOutput -> IO (RunResult ())
...@@ -191,37 +232,70 @@ failedUnits broken = M.filter didFail . runResultUnits ...@@ -191,37 +232,70 @@ failedUnits broken = M.filter didFail . runResultUnits
planningErrors :: RunResult log -> [(PkgName, Ver)] planningErrors :: RunResult log -> [(PkgName, Ver)]
planningErrors runResult = planningErrors runResult =
[ (patchedPackageName tpatch, patchedPackageVersion tpatch) [ (patchedPackageName tpatch, patchedPackageVersion tpatch)
| tpatch <- testedPatches runResult | tpatch <- testedPatches runResult ++ testedTests runResult
, PackagePlanningFailed _ <- pure $ patchedPackageResult tpatch , PackagePlanningFailed _ <- pure $ patchedPackageResult tpatch
] ]
resultSummary :: forall log. BrokenPackages -> RunResult log -> Doc resultSummary :: forall log. BrokenPackages -> RunResult log -> (Bool, Doc)
resultSummary broken runResult = vcat resultSummary broken runResult = (ok, msg)
[ "Total units built:" <+> pshow (length allUnits)
, ""
, pshow (length planningErrs) <+> "had no valid install plan:"
, PP.indent 4 $ vcat $ map (uncurry prettyPkgVer) planningErrs
, ""
, pshow (length failedUnits) <+> "units failed to build:"
, PP.indent 4 $ vcat
[ prettyPkgVer (pkgName binfo) (version binfo) <+> expectedDoc
| (binfo, _) <- M.elems failedUnits
, let expectedDoc
| failureExpected broken (pkgName binfo) = PP.parens $ PP.yellow $ PP.text "expected"
| otherwise = mempty
]
, ""
, pshow (length failedDependsUnits) <+> "units failed to build due to unbuildable dependencies."
]
where where
ok = null planningErrs
&& null failedTests
&& null failedTestsBuild
&& null failedUnits
msg = vcat
[ "Total packages built:" <+> pshow (length allUnits)
, ""
, pshow (length expectedPlanningErrs) <+> "had no valid install plan (expected):"
, PP.indent 4 $ vcat $ map (uncurry prettyPkgVer) expectedPlanningErrs
, ""
, pshow (length planningErrs) <+> "had no valid install plan:"
, PP.indent 4 $ vcat $ map (uncurry prettyPkgVer) planningErrs
, ""
, pshow (length failedUnits) <+> "packages failed to build:"
, PP.indent 4 $ vcat
[ prettyPkgVer (pkgName binfo) (version binfo)
| (binfo, _) <- M.elems failedUnits ]
, pshow (length expectedFailedUnits) <+> "packages failed to build (expected):"
, PP.indent 4 $ vcat
[ prettyPkgVer (pkgName binfo) (version binfo)
| (binfo, _) <- M.elems expectedFailedUnits ]
, pshow (length failedTargetUnits) <+> "target packages failed to build:"
, PP.indent 4 $ vcat
[ prettyPkgVer pkg ver
| (pkg, ver) <- failedTargetUnits ]
, ""
, pshow (length failedDependsUnits) <+> "packages failed to build due to unbuildable dependencies."
, ""
, pshow (length failedTestsBuild) <+> "testsuites failed build."
, PP.indent 4 $ vcat
[ prettyPkgName pkg_name | pkg_name <- failedTestsBuild ]
, pshow (length failedTests) <+> "testsuites failed."
, PP.indent 4 $ vcat
[ prettyPkgName pkg_name | pkg_name <- failedTests ]
]
allUnits = runResultUnits runResult allUnits = runResultUnits runResult
planningErrs = planningErrors runResult (expectedPlanningErrs, planningErrs) =
partition (failureExpected broken . fst) (planningErrors runResult)
failedTests = [ pkg_name | (TestedPatch pkg_name ver (PackageResult (PackageBuildSucceeded PackageTestsFailed) _)) <- testedTests runResult ]
failedUnits :: M.Map UnitId (BuildInfo, BuildResult log) failedTestsBuild = [ pkg_name | (TestedPatch pkg_name ver (PackageResult PackageBuildFailed _)) <- testedTests runResult ]
failedUnits = M.filter failed allUnits
failedTargetUnits =
[ (patchedPackageName tp, patchedPackageVersion tp)
| tp <- testedPatches runResult
, not $ isSuccessfulPackageResult (patchedPackageResult tp)
]
failedUnits, expectedFailedUnits :: M.Map UnitId (BuildInfo, BuildResult log)
(expectedFailedUnits, failedUnits) = M.partition splitExpected (M.filter failed allUnits)
where failed (_, BuildFailed _) = True where failed (_, BuildFailed _) = True
failed _ = False failed _ = False
splitExpected (binfo, _) = failureExpected broken (pkgName binfo)
failedDependsUnits :: M.Map UnitId (S.Set UnitId) failedDependsUnits :: M.Map UnitId (S.Set UnitId)
failedDependsUnits = M.filter (not . S.null) (failedDeps allUnits) failedDependsUnits = M.filter (not . S.null) (failedDeps allUnits)
...@@ -231,10 +305,15 @@ toPkgName = PkgName . T.pack . display ...@@ -231,10 +305,15 @@ toPkgName = PkgName . T.pack . display
toVer :: Version -> Ver toVer :: Version -> Ver
toVer = Ver . versionNumbers toVer = Ver . versionNumbers
prettyPkgName :: PkgName -> Doc
prettyPkgName (PkgName pname) =
PP.blue (PP.text $ T.unpack pname)
-- | For @cabal-plan@ types. -- | For @cabal-plan@ types.
prettyPkgVer :: PkgName -> Ver -> Doc prettyPkgVer :: PkgName -> Ver -> Doc
prettyPkgVer (PkgName pname) (Ver ver) = prettyPkgVer pname (Ver ver) =
PP.blue (PP.text $ T.unpack pname) <+> PP.green (PP.text $ intercalate "." $ map show ver) prettyPkgName pname
<+> PP.green (PP.text $ intercalate "." $ map show ver)
-- | For @Cabal@ types. -- | For @Cabal@ types.
prettyPackageVersion :: Cabal.PackageName -> Version -> Doc prettyPackageVersion :: Cabal.PackageName -> Version -> Doc
...@@ -250,6 +329,7 @@ buildPackage cfg pname version = do ...@@ -250,6 +329,7 @@ buildPackage cfg pname version = do
createDirectoryIfMissing True dirName createDirectoryIfMissing True dirName
copyFile "cabal.project" (dirName </> "cabal.project") copyFile "cabal.project" (dirName </> "cabal.project")
appendFile (dirName </> "cabal.project") "packages: .\n" appendFile (dirName </> "cabal.project") "packages: .\n"
appendFile (dirName </> "cabal.project") $ "package *\n ghc-options:" ++ unwords (configGhcOptions cfg)
TIO.writeFile TIO.writeFile
(dirName </> concat ["test-", display pname, ".cabal"]) (dirName </> concat ["test-", display pname, ".cabal"])
(makeTestCabalFile cfg pname version) (makeTestCabalFile cfg pname version)
...@@ -258,8 +338,35 @@ buildPackage cfg pname version = do ...@@ -258,8 +338,35 @@ buildPackage cfg pname version = do
code <- runProcess $ setWorkingDir dirName code <- runProcess $ setWorkingDir dirName
$ proc "cabal" $ proc "cabal"
$ ["new-build"] ++ cabalOptions cfg $ ["new-build"] ++ cabalOptions cfg
whatHappened ("=> Build of" <+> prettyPackageVersion pname version) cfg pname dirName code Nothing
where
dirName = "test-" ++ display pname ++ "-" ++ display version
testPackage :: Config -> (Cabal.PackageName, FilePath) -> IO (PackageResult LogOutput)
testPackage cfg (pname, fpath) = do
logMsg $ "=> Testing" <+> prettyPackageName pname
-- figure out what happened -- prepare the test package
createDirectoryIfMissing True dirName
copyFile "cabal.project" (dirName </> "cabal.project")
appendFile (dirName </> "cabal.project") ("packages: " ++ fpath ++ "\n")
-- run the build
code <- runProcess $ setWorkingDir dirName
$ proc "cabal"
$ ["new-build", Cabal.unPackageName pname, "--enable-tests"] ++ cabalOptions cfg
case code of
ExitSuccess -> do
runCode <- runProcess $ setWorkingDir dirName
$ proc "cabal"
$ ["new-test", Cabal.unPackageName pname, "--enable-tests"] ++ cabalOptions cfg
whatHappened ("=> Test of" <+> prettyPackageName pname) cfg pname dirName code (Just runCode)
_ ->
whatHappened ("=> Test of" <+> prettyPackageName pname) cfg pname dirName code Nothing
where
dirName = "test-" ++ display pname
whatHappened herald cfg pname dirName code runCode = do
compilerId <- getCompilerId (configCompiler cfg)
let planPath = dirName </> "dist-newstyle" </> "cache" </> "plan.json" let planPath = dirName </> "dist-newstyle" </> "cache" </> "plan.json"
planExists <- doesFileExist planPath planExists <- doesFileExist planPath
case planExists of case planExists of
...@@ -269,20 +376,29 @@ buildPackage cfg pname version = do ...@@ -269,20 +376,29 @@ buildPackage cfg pname version = do
let logDir = cabalDir </> "logs" </> compilerId let logDir = cabalDir </> "logs" </> compilerId
results <- mapM (checkUnit logDir) (pjUnits plan) results <- mapM (checkUnit logDir) (pjUnits plan)
logMsg $ logMsg $
let result = case code of let result = case fromMaybe code runCode of
ExitSuccess -> PP.cyan "succeeded" ExitSuccess -> PP.cyan "succeeded"
ExitFailure n -> PP.red "failed" <+> PP.parens ("code" <+> pshow n) ExitFailure n -> PP.red "failed" <+> PP.parens ("code" <+> pshow n)
in "=> Build of" <+> prettyPackageVersion pname version <+> result in herald <+> result
-- N.B. we remove the build directory on failure to ensure -- N.B. we remove the build directory on failure to ensure
-- that we re-extract the source if the user re-runs after -- that we re-extract the source if the user re-runs after
-- modifying a patch. -- modifying a patch.
unless (code == ExitSuccess) $ removeDirectoryRecursive dirName unless (code == ExitSuccess) $ removeDirectoryRecursive dirName
return $ PackageResult (code == ExitSuccess) (mergeInfoPlan (planToBuildInfo plan) results) return $ PackageResult codesToStatus (mergeInfoPlan (planToBuildInfo plan) results)
False -> do False -> do
logMsg $ PP.red $ "=> Planning for" <+> prettyPackageVersion pname version <+> "failed" logMsg $ PP.red $ "=> Planning for" <+> herald <+> "failed"
removeDirectoryRecursive dirName removeDirectoryRecursive dirName
return $ PackagePlanningFailed mempty return $ PackagePlanningFailed mempty
where where
codesToStatus =
case code of
ExitSuccess -> PackageBuildSucceeded $
case runCode of
Nothing -> NoTests
Just rCode -> case rCode of
ExitSuccess -> PackageTestsSucceeded
_ -> PackageTestsFailed
_ -> PackageBuildFailed
planToBuildInfo :: PlanJson -> M.Map UnitId BuildInfo planToBuildInfo :: PlanJson -> M.Map UnitId BuildInfo
planToBuildInfo plan = M.fromList planToBuildInfo plan = M.fromList
[ (uId unit, info) [ (uId unit, info)
...@@ -309,9 +425,18 @@ buildPackage cfg pname version = do ...@@ -309,9 +425,18 @@ buildPackage cfg pname version = do
case exists of case exists of
True -> do True -> do
buildLog <- TE.decodeUtf8With TE.lenientDecode <$> BS.readFile logPath buildLog <- TE.decodeUtf8With TE.lenientDecode <$> BS.readFile logPath
let PkgId (PkgName unitPkgName) _pvers = uPId unit
if | T.null buildLog if | T.null buildLog
-> return $ BuildFailed (LogOutput buildLog) -> return $ BuildFailed (LogOutput buildLog)
| any isInstallingLine $ take 20 $ reverse $ T.lines buildLog | any isInstallingLine $ take 20 $ reverse $ T.lines buildLog
-- Note that it's not enough to check for isInstallingLine, as
-- it's possible for packages with custom Setup.hs scripts to
-- fail even after installation has completed (e.g., Agda, as
-- reported in #47). But only apply this check to the package
-- being tested, as we only want to label the tested package as
-- failing, not any of its dependencies.
, not (Cabal.unPackageName pname == T.unpack unitPkgName) ||
isPackageBuildSucceeded codesToStatus
-> return $ BuildSucceeded (LogOutput buildLog) -> return $ BuildSucceeded (LogOutput buildLog)
| otherwise | otherwise
-> return $ BuildFailed (LogOutput buildLog) -> return $ BuildFailed (LogOutput buildLog)
...@@ -330,7 +455,7 @@ buildPackage cfg pname version = do ...@@ -330,7 +455,7 @@ buildPackage cfg pname version = do
where where
err = M.mapMissing $ \_ _ -> error "error merging" err = M.mapMissing $ \_ _ -> error "error merging"
dirName = "test-" ++ display pname ++ "-" ++ display version
makeTestCabalFile :: Config -> Cabal.PackageName -> Version -> T.Text makeTestCabalFile :: Config -> Cabal.PackageName -> Version -> T.Text
makeTestCabalFile cfg pname' ver' = makeTestCabalFile cfg pname' ver' =
...@@ -392,7 +517,6 @@ setup cfg = do ...@@ -392,7 +517,6 @@ setup cfg = do
constraints <- MakeConstraints.makeConstraints (configPatchDir cfg) constraints <- MakeConstraints.makeConstraints (configPatchDir cfg)
appendFile "cabal.project" $ show $ vcat $ appendFile "cabal.project" $ show $ vcat $
[ "with-compiler: " <> PP.text (configCompiler cfg) [ "with-compiler: " <> PP.text (configCompiler cfg)
, MakeConstraints.allowNewer MakeConstraints.bootPkgs
, constraints , constraints
] ++ map PP.text extraFragments ] ++ map PP.text extraFragments
......
...@@ -3,12 +3,17 @@ ...@@ -3,12 +3,17 @@
{-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveTraversable #-} {-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE NamedFieldPuns #-}
module Types module Types
( RunResult(..) ( RunResult(..)
, PackageStatus(..)
, isPackageBuildSucceeded
, PackageTestStatus(..)
, runResultUnits , runResultUnits
, TestedPatch(..) , TestedPatch(..)
, PackageResult(..) , PackageResult(..)
, isSuccessfulPackageResult
, BuildInfo(..) , BuildInfo(..)
, BuildResult(..) , BuildResult(..)
, LogOutput(..) , LogOutput(..)
...@@ -47,17 +52,34 @@ data BuildResult log ...@@ -47,17 +52,34 @@ data BuildResult log
deriving stock (Show, Generic, Functor, Foldable, Traversable) deriving stock (Show, Generic, Functor, Foldable, Traversable)
deriving anyclass (ToJSON, FromJSON) deriving anyclass (ToJSON, FromJSON)
data PackageTestStatus = NoTests | PackageTestsFailed | PackageTestsSucceeded
deriving stock (Show, Generic)
deriving anyclass (ToJSON, FromJSON)
data PackageStatus = PackageBuildFailed | PackageBuildSucceeded PackageTestStatus
deriving stock (Show, Generic)
deriving anyclass (ToJSON, FromJSON)
isPackageBuildSucceeded :: PackageStatus -> Bool
isPackageBuildSucceeded PackageBuildSucceeded{} = True
isPackageBuildSucceeded PackageBuildFailed = False
-- | The result of an attempt to tested a patch -- | The result of an attempt to tested a patch
data PackageResult log data PackageResult log
= PackagePlanningFailed { planningError :: T.Text } = PackagePlanningFailed { planningError :: T.Text }
-- ^ Our attempt to build the package resulting in no viable install plan. -- ^ Our attempt to build the package resulting in no viable install plan.
| PackageResult { packageBuilt :: Bool | PackageResult { packageStatus :: PackageStatus
, units :: M.Map UnitId (BuildInfo, BuildResult log) , units :: M.Map UnitId (BuildInfo, BuildResult log)
} }
-- ^ We attempted to build the package. -- ^ We attempted to build the package.
deriving stock (Show, Generic, Functor, Foldable, Traversable) deriving stock (Show, Generic, Functor, Foldable, Traversable)
deriving anyclass (ToJSON, FromJSON) deriving anyclass (ToJSON, FromJSON)
isSuccessfulPackageResult :: PackageResult log -> Bool
isSuccessfulPackageResult PackagePlanningFailed{} = False
isSuccessfulPackageResult PackageResult{packageStatus} = isPackageBuildSucceeded packageStatus
-- | Information about a patch which we tested. -- | Information about a patch which we tested.
data TestedPatch log data TestedPatch log
= TestedPatch { patchedPackageName :: PkgName = TestedPatch { patchedPackageName :: PkgName
...@@ -70,6 +92,7 @@ data TestedPatch log ...@@ -70,6 +92,7 @@ data TestedPatch log
-- | The result of a CI run. -- | The result of a CI run.
data RunResult log data RunResult log
= RunResult { testedPatches :: [TestedPatch log] = RunResult { testedPatches :: [TestedPatch log]
, testedTests :: [TestedPatch log]
, compilerInfo :: CompilerInfo , compilerInfo :: CompilerInfo
} }
deriving stock (Show, Generic, Functor, Foldable, Traversable) deriving stock (Show, Generic, Functor, Foldable, Traversable)
...@@ -78,7 +101,7 @@ data RunResult log ...@@ -78,7 +101,7 @@ data RunResult log
runResultUnits :: RunResult log -> M.Map UnitId (BuildInfo, BuildResult log) runResultUnits :: RunResult log -> M.Map UnitId (BuildInfo, BuildResult log)
runResultUnits runResult = M.unions runResultUnits runResult = M.unions
[ units [ units
| tpatch <- testedPatches runResult | tpatch <- testedPatches runResult ++ testedTests runResult
, PackageResult _ units <- pure $ patchedPackageResult tpatch , PackageResult _ units <- pure $ patchedPackageResult tpatch
] ]
......
...@@ -12,4 +12,5 @@ with pkgs; ...@@ -12,4 +12,5 @@ with pkgs;
hexpat = [ expat ]; hexpat = [ expat ];
hgmp = [ gmp ]; hgmp = [ gmp ];
posix-api = [ systemd ]; posix-api = [ systemd ];
lame = [ lame ];
} }
...@@ -63,10 +63,12 @@ repository $REPO_NAME ...@@ -63,10 +63,12 @@ repository $REPO_NAME
EOF EOF
} }
build_index() { build_index_page() {
local commit="$CI_COMMIT_SHA" local commit="$CI_COMMIT_SHA"
local commit_url="https://gitlab.haskell.org/ghc/head.hackage/commit/$commit" local commit_url="https://gitlab.haskell.org/ghc/head.hackage/commit/$commit"
build_repository_blurb >repo/cabal.project.local build_repository_blurb >repo/repo.cabal.project
cat repo/repo.cabal.project > repo/cabal.project
build_constraints >> repo/cabal.project
cat >repo/ci.html <<EOF cat >repo/ci.html <<EOF
<!DOCTYPE html> <!DOCTYPE html>
...@@ -123,22 +125,24 @@ EOF ...@@ -123,22 +125,24 @@ EOF
<p>The source of this package repository is at <a href="https://gitlab.haskell.org/ghc/head.hackage">gitlab.haskell.org/ghc/head.hackage</a>. <p>The source of this package repository is at <a href="https://gitlab.haskell.org/ghc/head.hackage">gitlab.haskell.org/ghc/head.hackage</a>.
<p>To use package repository with <code>cabal-install</code> add the following <p>To use package repository with <code>cabal-install >= 3.6</code> simply run the following:
to your project's <code>cabal.project.local</code> and run
<code>cabal v2-update</code>: (consider using <code>scripts/head.hackage.sh update</code> as <code>v2-update</code> is broken, <a href="https://github.com/haskell/cabal/issues/5952">Cabal bug #5952</a>)
<pre><code> <pre><code>
$(cat repo/cabal.project.local) $ curl https://ghc.gitlab.haskell.org/head.hackage/cabal.project >> cabal.project.local
$ cabal update
</code></pre> </code></pre>
<p>Finally, you may want to add the <a <p>This will add the following <code>source-repository</code> stanza to your project's <code>cabal.project.local</code>:
href="cabal.constraints">constraints</a> to your project to ensure that <pre><code>
cabal chooses the patched releases. $(cat repo/repo.cabal.project)
</code></pre>
as well as the version constraints in <a
href="cabal.constraints"><code>cabal.constraints</code></a>.
<p>If you find a package that doesn't build with a recent GHC <p>If you find a package that doesn't build with a recent GHC
pre-release see the <a pre-release see the <a
href="https://gitlab.haskell.org/ghc/head.hackage#adding-a-patch">contributor href="https://gitlab.haskell.org/ghc/head.hackage/-/blob/master/README.md#adding-a-patch">contributor
documentation</a> for instructions on how to contribute a patch. documentation</a> for instructions on how to contribute a patch.
<p>If you encounter other trouble refer to the <p>If you encounter other trouble refer to the
<a href="https://gitlab.haskell.org/ghc/head.hackage">head.hackage <a href="https://gitlab.haskell.org/ghc/head.hackage">head.hackage
documentation</a> or documentation</a> or
...@@ -169,7 +173,14 @@ build_repo() { ...@@ -169,7 +173,14 @@ build_repo() {
cabal update cabal update
cabal fetch acme-box-0.0.0.0 cabal fetch acme-box-0.0.0.0
mkdir -p repo/package mkdir -p repo/package
cp $HOME/.cabal/packages/hackage.haskell.org/acme-box/0.0.0.0/acme-box-0.0.0.0.tar.gz repo/package
# if ~/.cabal exists cabal-install will use that, otherwise packages go into $XDG_CACHE_HOME/cabal
if [ -d "$HOME/.cabal" ]; then
cp "$HOME/.cabal/packages/hackage.haskell.org/acme-box/0.0.0.0/acme-box-0.0.0.0.tar.gz" repo/package
else
cp "${XDG_CACHE_HOME:-$HOME/.cache}/cabal/packages/hackage.haskell.org/acme-box/0.0.0.0/acme-box-0.0.0.0.tar.gz" repo/package
fi
log "Bootstrapping repository..." log "Bootstrapping repository..."
hackage-repo-tool bootstrap --keys=./keys --repo=./repo hackage-repo-tool bootstrap --keys=./keys --repo=./repo
...@@ -188,8 +199,8 @@ build_repo() { ...@@ -188,8 +199,8 @@ build_repo() {
log "Building constraints..." log "Building constraints..."
build_constraints > repo/cabal.constraints build_constraints > repo/cabal.constraints
log "Building index..." log "Building index page..."
build_index build_index_page
rm -R tmp rm -R tmp
} }
...@@ -201,7 +212,7 @@ case $1 in ...@@ -201,7 +212,7 @@ case $1 in
build-repository-blurb) build_repository_blurb ;; build-repository-blurb) build_repository_blurb ;;
build-index) build-index)
build_constraints > repo/cabal.constraints build_constraints > repo/cabal.constraints
build_index ;; build_index_page ;;
*) *)
echo "error: Unknown command $1." echo "error: Unknown command $1."
echo echo
......
...@@ -2,5 +2,5 @@ packages: . ...@@ -2,5 +2,5 @@ packages: .
source-repository-package source-repository-package
type: git type: git
location: https://github.com/bgamari/hackage-overlay-repo-tool location: https://gitlab.haskell.org/ghc/hackage-overlay-repo-tool
tag: ae385155d319cd1fa85f359c2b903abb0cf47b90 tag: 52f54229b08c6e86dd163dd42a78b22c10ffb099
-- This cabal project file is included in the CI configuration to
-- preclude the solver from using ancient package versions. See, for instance,
-- ghc/ghc#23048.
index-state:
hackage.haskell.org 2025-05-11T00:00:00Z,
head.hackage HEAD
constraints: th-abstraction >= 0.4
-- aeson is now quite old, don't attempt to use it
constraints: aeson >= 1.0,
-- don't allow uvector, which is deprecated; vector should be used instead
constraints: uvector == 0.0
-- containers-0.5 generally won't build with any recent compiler
constraints: containers >= 0.6
-- earlier transformers-compat versions don't include the Eq constraint on Eq1
-- instances needed by GHC 9.6
constraints: transformers-compat >= 0.7
...@@ -23,6 +23,11 @@ function only_package() { ...@@ -23,6 +23,11 @@ function only_package() {
done done
} }
function test_package() {
echo "Adding $@ to --test-package list"
EXTRA_OPTS="$EXTRA_OPTS --test-package=$1=$2"
}
# Return the version number of the most recent release of the given package # Return the version number of the most recent release of the given package
function latest_version() { function latest_version() {
pkg=$1 pkg=$1
...@@ -59,6 +64,18 @@ function ghc_commit() { ...@@ -59,6 +64,18 @@ function ghc_commit() {
$GHC --print-project-git-commit-id $GHC --print-project-git-commit-id
} }
function ghc_arch() {
$GHC --print-host-platform
}
# ======================================================================
# Baseline constraints
#
# These constraints are applied to preclude the solver from producing build
# plans using ancient, under-constrained package versions.
EXTRA_OPTS="$EXTRA_OPTS --extra-cabal-fragment=$(pwd)/config.cabal.project"
# ====================================================================== # ======================================================================
# The lists begin here # The lists begin here
# #
...@@ -68,18 +85,35 @@ function ghc_commit() { ...@@ -68,18 +85,35 @@ function ghc_commit() {
version="$(ghc_version)" version="$(ghc_version)"
commit="$(ghc_commit)" commit="$(ghc_commit)"
arch="$(ghc_arch)"
echo "Found GHC $version, commit $commit." echo "Found GHC $version, commit $commit."
case $version in case $version in
9.0.*) 9.10.*)
# package ticket # package ticket
broken liquidhaskell-boot 350
# singletons-base only supports the latest ghc
broken singletons-base 00000
;; ;;
9.2.*) 9.12.*)
# package ticket # package ticket
broken liquidhaskell-boot 350
;; ;;
9.3.*) 9.14.*)
# package ticket # package ticket
broken ghcide 00000
broken Agda 26154
# broken until strict-containers is updated to work with the new containers
broken strict-containers 00000
;;
9.15.*)
# package ticket
broken ghcide 00000
# broken until strict-containers is updated to work with the new containers
broken strict-containers 00000
broken liquidhaskell-boot 26513
;; ;;
*) *)
...@@ -87,33 +121,113 @@ case $version in ...@@ -87,33 +121,113 @@ case $version in
;; ;;
esac esac
case $arch in
x86_64-*-*)
# package ticket
;;
aarch64-*-*)
# These just don't build on aarch64
# package ticket
broken charsetdetect 00000
broken packman 00000
;;
*)
echo "$arch is unknown to head.hackage, assuming nothing is broken."
;;
esac
# Extra packages # Extra packages
# ============== # ==============
# #
# These are packages which we don't have patches for but want to test anyways. # These are packages which we don't have patches for but want to test anyways.
extra_package lens extra_package lens 5.2.3
extra_package aeson extra_package generic-lens 2.2.2.0
extra_package criterion extra_package optics 0.4.2.1
extra_package scotty extra_package aeson 2.2.3.0
extra_package generic-lens extra_package criterion 1.6.3.0
extra_package microstache extra_package scotty 0.21
extra_package singletons extra_package generic-lens 2.2.2.0
extra_package servant extra_package microstache 1.0.2.3
extra_package hgmp extra_package singletons-base 3.5
extra_package servant 0.20.1
extra_package hgmp 0.1.2.1
extra_package Agda 2.7.0.1
extra_package mmark 0.0.7.6
extra_package doctest 0.24.0
extra_package tasty 1.5.3
extra_package pandoc 3.1.11.1
extra_package servant-conduit 0.16
extra_package servant-machines 0.16
extra_package linear-generics 0.2.3
extra_package futhark 0.25.13
extra_package generic-random 1.5.0.1
extra_package lame 0.2.2
extra_package inspection-testing 0.5.0.3
extra_package ghcide 2.9.0.0
extra_package ghc-typelits-extra 0.4.7
# This package is affected by https://gitlab.haskell.org/ghc/ghc/-/issues/22912
extra_package vector-space 0.16
# Build-tool packages # Build-tool packages
build_tool_package alex build_tool_package alex
build_tool_package happy build_tool_package happy
build_tool_package c2hs
# Quick build configuration # $BUILD_MODE controls how head.hackage runs.
# ========================= # ===========================================
#
# Four build modes exist: FULL, QUICK, TEST, and COMPAT.
# #
# If $QUICK is defined we build the "quick" configuration, which builds a small # FULL.
# subset of the overall package set. We do this during the merge request # ------
# validation pipeline. # Build all patched + extra packages.
#
# QUICK.
# ------
# Build the "quick" configuration, which builds a small subset of the overall
# package set. (Also runs tests!) We do this during the merge request validation
# pipeline. Note: If "$QUICK" is non-null, it is used as a backwards-compat
# synonym for BUILD_MODE=QUICK.
#
# TEST.
# -----
# Just build the local test packages and run the tests.
#
# COMPAT: FULL + TEST.
# --------------------
# Backwards-compat default build mode.
#
: ${BUILD_MODE:=COMPAT}
if [ -n "$QUICK" ]; then if [ -n "$QUICK" ]; then
only_package Cabal BUILD_MODE=QUICK
only_package microlens
only_package free
only_package optparse-applicative
fi fi
case "$BUILD_MODE" in
FULL) ;;
QUICK)
only_package tasty
only_package Cabal
only_package microlens
only_package free
only_package optparse-applicative
test_package system-test "$(pwd)/../tests/ghc-debug/**/*.cabal"
test_package ghc-tests "$(pwd)/../tests/ghc-tests"
;;
TEST)
# FIXME: I specify a single "only_package" to prevent all the other
# packages from being built. Morally, I really want to say "build
# nothing at all besides the tests".
only_package tasty
test_package system-test "$(pwd)/../tests/ghc-debug/**/*.cabal"
test_package ghc-tests "$(pwd)/../tests/ghc-tests"
test_package all "$(pwd)/../tests/text"
test_package bytestring-tests "$(pwd)/../tests/bytestring"
test_package all "$(pwd)/../tests/containers/containers-tests"
;;
COMPAT)
test_package system-test "$(pwd)/../tests/ghc-debug/**/*.cabal"
test_package ghc-tests "$(pwd)/../tests/ghc-tests"
;;
esac
let sources = import ./nix/sources.nix; { sources ? import ./nix/sources.nix, nixpkgs ? (import sources.nixpkgs.outPath {}) }:
in
{ nixpkgs ? (import sources.nixpkgs.outPath {}) }:
with nixpkgs; with nixpkgs;
let let
haskellPackages = nixpkgs.haskellPackages.override { all-cabal-hashes = sources.all-cabal-hashes.outPath; }; haskellPackages = nixpkgs.haskellPackages;
hackage-repo-tool = hackage-repo-tool =
let src = sources.hackage-security.outPath; let src = sources.hackage-security.outPath;
in haskellPackages.callCabal2nix "hackage-repo-tool" "${src}/hackage-repo-tool" { in nixpkgs.haskell.lib.doJailbreak (haskellPackages.callCabal2nix "hackage-repo-tool" "${src}/hackage-repo-tool" {});
optparse-applicative = haskellPackages.callHackage "optparse-applicative" "0.15.1.0" {};
};
overlay-tool = overlay-tool =
let src = sources.overlay-tool; let src = sources.overlay-tool;
in haskellPackages.callCabal2nix "hackage-overlay-repo-tool" src { }; in nixpkgs.haskell.lib.doJailbreak (haskellPackages.callCabal2nix "hackage-overlay-repo-tool" src { });
head-hackage-ci = head-hackage-ci =
let let
src = nixpkgs.nix-gitignore.gitignoreSource [] ./.; src = nixpkgs.nix-gitignore.gitignoreSource [] ./.;
in haskellPackages.callCabal2nix "head-hackage-ci" src {}; in haskellPackages.callCabal2nix "head-hackage-ci" src {};
buildDeps = import ./build-deps.nix { pkgs = nixpkgs; };
buildDepsFragment = buildDepsFragment =
let let
buildDeps = import ./build-deps.nix { pkgs = nixpkgs; };
mkCabalFragment = pkgName: deps: mkCabalFragment = pkgName: deps:
with pkgs.lib; with pkgs.lib;
...@@ -46,32 +42,45 @@ let ...@@ -46,32 +42,45 @@ let
let let
deps = [ deps = [
bash curl gnutar findutils patch rsync openssl bash curl gnutar findutils patch rsync openssl
cabal-install ghc gcc binutils-unwrapped pwgen gnused haskellPackages.cabal-install haskellPackages.ghc gcc binutils-unwrapped pwgen gnused
hackage-repo-tool overlay-tool python3 jq hackage-repo-tool overlay-tool python3 jq pkg-config
git # cabal-install wants this to fetch source-repository-packages git # cabal-install wants this to fetch source-repository-packages
]; ];
pkg_config_depends = lib.makeSearchPathOutput "dev" "lib/pkgconfig" (lib.concatLists (lib.attrValues buildDeps));
in in
runCommand "repo" { runCommand "repo" {
nativeBuildInputs = [ makeWrapper ]; nativeBuildInputs = [ makeWrapper ];
cabalDepsSrc = buildDepsFragment;
} '' } ''
mkdir -p $out/bin mkdir -p $out/bin
makeWrapper ${head-hackage-ci}/bin/head-hackage-ci $out/bin/head-hackage-ci \ makeWrapper ${head-hackage-ci}/bin/head-hackage-ci $out/bin/head-hackage-ci \
--prefix PATH : ${stdenv.lib.makeBinPath deps}:$out/bin --prefix PATH : ${lib.makeBinPath deps}:$out/bin
makeWrapper ${./build-repo.sh} $out/bin/build-repo.sh \ makeWrapper ${./build-repo.sh} $out/bin/build-repo.sh \
--prefix PATH : ${stdenv.lib.makeBinPath deps}:$out/bin --prefix PATH : ${lib.makeBinPath deps}:$out/bin
makeWrapper ${./discover_tarball.sh} $out/bin/discover_tarball.sh \
--prefix PATH : ${lib.makeBinPath deps}:$out/bin
makeWrapper ${../run-ci} $out/bin/run-ci \ makeWrapper ${../run-ci} $out/bin/run-ci \
--prefix PATH : ${stdenv.lib.makeBinPath deps}:$out/bin \ --prefix PATH : ${lib.makeBinPath deps}:$out/bin \
--prefix PKG_CONFIG_PATH : ${pkg_config_depends} \
--set USE_NIX 1 \ --set USE_NIX 1 \
--set CI_CONFIG ${./config.sh} --set CI_CONFIG ${./config.sh}
makeWrapper ${./find-job.sh} $out/bin/find-job \ makeWrapper ${./find-job.sh} $out/bin/find-job \
--prefix PATH : ${stdenv.lib.makeBinPath deps}:$out/bin --prefix PATH : ${lib.makeBinPath deps}:$out/bin
makeWrapper ${./find-latest-job.sh} $out/bin/find-latest-job \
--prefix PATH : ${lib.makeBinPath deps}:$out/bin
makeWrapper ${xz}/bin/xz $out/bin/xz makeWrapper ${xz}/bin/xz $out/bin/xz
makeWrapper ${curl}/bin/curl $out/bin/curl makeWrapper ${curl}/bin/curl $out/bin/curl
''; '';
in in
build-repo mkShell {
name = "head-hackage-build-env";
buildInputs = [ build-repo ];
cabalDepsSrc = buildDepsFragment;
}
#! /usr/bin/env bash
set -e
cd "$(dirname "${BASH_SOURCE[0]}")"
ARCH="$(uname -m)"
>&2 echo "NIGHTLY=${NIGHTLY}"
>&2 echo "RELEASE_JOB=${RELEASE_JOB}"
>&2 echo "SLOW_VALIDATE=${SLOW_VALIDATE}"
>&2 echo "ARCH=${ARCH}"
# NOTE: If you add a new JOB_NAME here then you also might need to modify the
# `needs` field in ghc/ghc .gitlab-ci.yml to avoid triggering the downstream job
# too early.
# Before the update to deb12, we use the deb10 bindists
case $UPSTREAM_BRANCH_NAME in
ghc-9.10)
case $ARCH in
aarch64)
if [ -n "$NIGHTLY" ]; then
BINDIST_NAME="ghc-aarch64-linux-deb10-validate.tar.xz"
JOB_NAME="nightly-aarch64-linux-deb10-validate"
elif [ "$RELEASE_JOB" == "yes" ]; then
BINDIST_NAME="ghc-aarch64-linux-deb10-release+no_split_sections.tar.xz"
JOB_NAME="release-aarch64-linux-deb10-release+no_split_sections"
else
BINDIST_NAME="ghc-aarch64-linux-deb10-validate.tar.xz"
JOB_NAME="aarch64-linux-deb10-validate"
fi
;;
*)
if [ -n "$SLOW_VALIDATE" ]; then
BINDIST_NAME="ghc-x86_64-linux-deb10-numa-slow-validate.tar.xz"
if [ -n "$NIGHTLY" ]; then
JOB_NAME="nightly-x86_64-linux-deb10-numa-slow-validate"
elif [ "$RELEASE_JOB" == "yes" ]; then
echo "No slow validate build in release job"
exit 2
else
JOB_NAME="x86_64-linux-deb10-numa-slow-validate"
fi
else
BINDIST_NAME="ghc-x86_64-linux-fedora33-release.tar.xz"
if [ -n "$NIGHTLY" ]; then
JOB_NAME="nightly-x86_64-linux-fedora33-release"
elif [ "$RELEASE_JOB" == "yes" ]; then
JOB_NAME="release-x86_64-linux-fedora33-release"
else
JOB_NAME="x86_64-linux-fedora33-release"
fi
fi
;;
esac
;;
ghc-9.12|ghc-9.14) # Post update to use deb12
case $ARCH in
aarch64)
if [ -n "$NIGHTLY" ]; then
BINDIST_NAME="ghc-aarch64-linux-deb12-validate.tar.xz"
JOB_NAME="nightly-aarch64-linux-deb12-validate"
elif [ "$RELEASE_JOB" == "yes" ]; then
BINDIST_NAME="ghc-aarch64-linux-deb12-release+no_split_sections.tar.xz"
JOB_NAME="release-aarch64-linux-deb12-release+no_split_sections"
else
BINDIST_NAME="ghc-aarch64-linux-deb12-validate.tar.xz"
JOB_NAME="aarch64-linux-deb12-validate"
fi
;;
*)
if [ -n "$SLOW_VALIDATE" ]; then
BINDIST_NAME="ghc-x86_64-linux-deb12-numa-slow-validate.tar.xz"
if [ -n "$NIGHTLY" ]; then
JOB_NAME="nightly-x86_64-linux-deb12-numa-slow-validate"
elif [ "$RELEASE_JOB" == "yes" ]; then
echo "No slow validate build in release job"
exit 2
else
JOB_NAME="x86_64-linux-deb12-numa-slow-validate"
fi
else
BINDIST_NAME="ghc-x86_64-linux-fedora33-release.tar.xz"
if [ -n "$NIGHTLY" ]; then
JOB_NAME="nightly-x86_64-linux-fedora33-release"
elif [ "$RELEASE_JOB" == "yes" ]; then
JOB_NAME="release-x86_64-linux-fedora33-release"
else
JOB_NAME="x86_64-linux-fedora33-release"
fi
fi
;;
esac
;;
*)
case $ARCH in
aarch64)
if [ -n "$NIGHTLY" ]; then
BINDIST_NAME="ghc-aarch64-linux-deb12-validate.tar.xz"
JOB_NAME="nightly-aarch64-linux-deb12-validate"
elif [ "$RELEASE_JOB" == "yes" ]; then
BINDIST_NAME="ghc-aarch64-linux-deb12-release+no_split_sections.tar.xz"
JOB_NAME="release-aarch64-linux-deb12-release+no_split_sections"
else
BINDIST_NAME="ghc-aarch64-linux-deb12-validate.tar.xz"
JOB_NAME="aarch64-linux-deb12-validate"
fi
;;
*)
if [ -n "$SLOW_VALIDATE" ]; then
BINDIST_NAME="ghc-x86_64-linux-deb12-numa-slow-validate.tar.xz"
if [ -n "$NIGHTLY" ]; then
JOB_NAME="nightly-x86_64-linux-deb12-numa-slow-validate"
elif [ "$RELEASE_JOB" == "yes" ]; then
echo "No slow validate build in release job"
exit 2
else
JOB_NAME="x86_64-linux-deb12-numa-slow-validate"
fi
else
BINDIST_NAME="ghc-x86_64-linux-deb12-validate.tar.xz"
if [ -n "$NIGHTLY" ]; then
JOB_NAME="nightly-x86_64-linux-deb12-validate"
elif [ "$RELEASE_JOB" == "yes" ]; then
JOB_NAME="release-x86_64-linux-deb12-validate"
else
JOB_NAME="x86_64-linux-deb12-validate"
fi
fi
;;
esac
;;
esac
>&2 echo "BINDIST_NAME=${BINDIST_NAME}"
>&2 echo "JOB_NAME=${JOB_NAME}"
if [ -n "$UPSTREAM_COMMIT_SHA" ]; then
# N.B. We can't use this if the upstream pipeline might be in-progress
# since the below URL cannot provide an artifact until a pipeline has
# run to completion on the requested branch. This is in general
# not the case for GHC pipelines. Consequently, in this case we will
# usually rather provide UPSTREAM_PIPELINE_ID.
>&2 echo "Pulling binary distribution from commit $UPSTREAM_COMMIT_SHA of project $UPSTREAM_PROJECT_PATH..."
GHC_TARBALL="https://gitlab.haskell.org/$UPSTREAM_PROJECT_PATH/-/jobs/artifacts/$UPSTREAM_COMMIT_SHA/raw/$BINDIST_NAME?job=$JOB_NAME"
elif [ -n "$UPSTREAM_PIPELINE_ID" ]; then
job_name=$JOB_NAME
>&2 echo "Pulling ${job_name} binary distribution from Pipeline $UPSTREAM_PIPELINE_ID..."
job_id=$(find-job $UPSTREAM_PROJECT_ID $UPSTREAM_PIPELINE_ID $job_name)
>&2 echo "Using job $job_id..."
echo "https://gitlab.haskell.org/$UPSTREAM_PROJECT_PATH/-/jobs/$job_id/artifacts/raw/$BINDIST_NAME"
elif [ -n "$UPSTREAM_BRANCH_NAME" ]; then
job_name=$JOB_NAME
>&2 echo "Finding ${job_name} binary distribution from $UPSTREAM_BRANCH_NAME..."
job_id=$(find-latest-job "$UPSTREAM_PROJECT_ID" "$UPSTREAM_BRANCH_NAME" "$JOB_NAME")
>&2 echo "Using job $job_id..."
echo "https://gitlab.haskell.org/$UPSTREAM_PROJECT_PATH/-/jobs/$job_id/artifacts/raw/$BINDIST_NAME"
fi
...@@ -6,22 +6,24 @@ project_id=$1 ...@@ -6,22 +6,24 @@ project_id=$1
pipeline_id=$2 pipeline_id=$2
job_name=$3 job_name=$3
resp=$(mktemp)
# Access token is a protected environment variable in the head.hackage project and # Access token is a protected environment variable in the head.hackage project and
# is necessary for this query to succeed. Sadly job tokens only seem to # is necessary for this query to succeed. Sadly job tokens only seem to
# give us access to the project being built. # give us access to the project being built.
curl \ curl \
--silent --show-error \ --silent --show-error \
-H "Private-Token: $ACCESS_TOKEN" \ -H "Private-Token: $ACCESS_TOKEN" \
"https://gitlab.haskell.org/api/v4/projects/$project_id/pipelines/$pipeline_id/jobs?scope[]=success" \ "https://gitlab.haskell.org/api/v4/projects/$project_id/pipelines/$pipeline_id/jobs?scope[]=success&per_page=50" \
> resp.json > "$resp"
job_id=$(jq ". | map(select(.name == \"$job_name\")) | .[0].id" < resp.json) job_id=$(jq ". | map(select(.name == \"$job_name\")) | .[0].id" < "$resp")
if [ "$job_id" = "null" ]; then if [ "$job_id" = "null" ]; then
echo "Error finding job $job_name for $pipeline_id in project $project_id:" >&2 echo "Error finding job $job_name for $pipeline_id in project $project_id:" >&2
cat resp.json >&2 cat "$resp" >&2
rm resp.json rm "$resp"
exit 1 exit 1
else else
rm resp.json rm "$resp"
echo -n "$job_id" echo -n "$job_id"
fi fi
#!/usr/bin/env bash
set -e
project_id=$1
branch_name=$2
job_name=$3
resp=$(mktemp)
# Access token is a protected environment variable in the head.hackage project and
# is necessary for this query to succeed. Sadly job tokens only seem to
# give us access to the project being built.
curl \
--silent --show-error \
-H "Private-Token: $ACCESS_TOKEN" \
"https://gitlab.haskell.org/api/v4/projects/$project_id/pipelines/?ref=$branch_name&scope=finished&per_page=50" \
> "$resp"
job_ids=$(jq ". | map(.id) " < "$resp")
if [ "$job_ids" = "null" ]; then
echo "Error finding job $job_name for $pipeline_id in project $project_id:" >&2
cat "$resp" >&2
rm "$resp"
exit 1
else
for i in $(echo $job_ids | jq '.[]'); do
if find-job $project_id $i $job_name; then
exit 0
fi
done
echo "Error finding job $job_name for $branch_name project $project_id:" >&2
exit 1
fi
#!/bin/sh
PIPELINE_TYPE=validation
if [ -n "$PIPELINE_OVERRIDE" ]; then
PIPELINE_TYPE="$PIPELINE_OVERRIDE"
# Triggered by GHC
elif [ "$CI_PIPELINE_SOURCE" = "pipeline" -a '(' -n "$UPSTREAM_COMMIT_SHA" -o -n "$UPSTREAM_PIPELINE_ID" ')' ]; then
PIPELINE_TYPE=downstream
# Nightly repo update, to be scheduled
#
# SCHEDULE_TYPE must be set when creating the scheduled job. It is used to
# explicitly identify which schedule we want.
elif [ "$CI_PIPELINE_SOURCE" = "schedule" -a "$SCHEDULE_TYPE" = "update-repo" ]; then
PIPELINE_TYPE=update-repo
fi
mk_pipeline () {
echo "Generating $PIPELINE_TYPE pipeline"
cp -v $1 gitlab-generated-pipeline.yml
}
case "$PIPELINE_TYPE" in
validation)
mk_pipeline ci/pipelines/validation.yml
;;
downstream)
mk_pipeline ci/pipelines/downstream.yml
;;
update-repo)
mk_pipeline ci/pipelines/update-repo.yml
;;
update-branch)
mk_pipeline ci/pipelines/update-branch.yml
;;
*)
echo "Unknown pipeline type: $PIPELINE_TYPE"
exit 1
;;
esac