Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • ghc/head.hackage
  • RyanGlScott/head.hackage
  • vaibhavsagar/head.hackage
  • phadej/head.hackage
  • jessoune29/head.hackage
  • alanz/head.hackage
  • clint/head.hackage
  • osa1/head.hackage
  • supersven/head.hackage
  • fendor/head.hackage
  • hsyl20/head.hackage
  • adinapoli/head.hackage
  • alexbiehl/head.hackage
  • mimi.vx/head.hackage
  • Kleidukos/head.hackage
  • wz1000/head.hackage
  • alinab/head.hackage
  • teo/head.hackage
  • duog/head.hackage
  • sheaf/head.hackage
  • expipiplus1/head.hackage
  • drsooch/head.hackage
  • tobias/head.hackage
  • brandonchinn178/head.hackage
  • mpickering/hooks-setup-testing
  • Mikolaj/head.hackage
  • RandomMoonwalker/head.hackage
  • facundominguez/head.hackage
  • trac-fizzixnerd/head.hackage
  • neil.mayhew/head.hackage
  • jappeace/head.hackage
31 results
Show changes
Commits on Source (851)
patches/* -text
......@@ -2,177 +2,94 @@
# ===========================
#
# This is the GitLab CI automation that drives GHC's head.hackage testing.
# The goal is to be able to test GHC by building a (small) subset of Hackage.
# The goal is to be able to test GHC by building a subset of Hackage.
# Moreover, we want to be able to collect logs of failed builds as well as
# performance metrics from builds that succeed.
#
# To accomplish this we use head.hackage's native Nix support and the
# ghc-artefact-nix expression to make GHC binary distributions usable from
# within Nix. These components are tied together by ./scripts/build-all.nix,
# which contains the list of packages which we build as well as some simple
# configuration to minimize the cost of the builds.
# To accomplish this we use the ci executable in ./ci. This drives a set of
# cabal v2-build builds and preserves their results.
#
# The execution flow looks something like:
#
# - Gitlab runner
# - (nix run)
# - run-ci
# - ./run-ci (the Nix package just wraps the script)
# - (nix run) (when USE_NIX=1)
# - head-hackage-ci $EXTRA_OPTS (a Cabal project in ci/)
# - ci/Main.hs
# - TestPatches.testPatches <$> TestPatches.config
# - option '--test-package'
# - <something similar for building the packages>
#
# EXTRA_OPTS are injected into the execution flow inside ./run-ci, which in turn
# sources them from ci/config.sh.
#
# The compiler to be tested can be taken from a number of sources.
# head.hackage's own validation pipeline runs against GHC HEAD and the three
# supported major versions. In addition, other GitLab projects (e.g. ghc/ghc>)
# can trigger a multi-project pipeline, specifying a GHC binary distribution
# via either the GHC_TARBALL or UPSTREAM_* variables.
#
stages:
- test
- update-repo
- deploy
variables:
# Which nixos/nix Docker image tag to use
DOCKER_TAG: "2.3"
# Default GHC bindist
GHC_TARBALL: "https://gitlab.haskell.org/api/v4/projects/1/jobs/artifacts/master/raw/ghc-x86_64-fedora27-linux.tar.xz?job=validate-x86_64-linux-fedora27"
DOCKER_TAG: "2.13.1"
# Project ID of ghc/ghc
GHC_PROJECT_ID: "1"
# Default this to ghc/ghc> to make it more convenient to run from the web
# interface.
UPSTREAM_PROJECT_ID: 1
UPSTREAM_PROJECT_PATH: "ghc/ghc"
# ACCESS_TOKEN provided via protected environment variable
GIT_SUBMODULE_STRATEGY: recursive
# EXTRA_HC_OPTS provided by GHC job. These are passed to via --ghc-options to
# GHC during the package builds. This is instantiated with, e.g., -dcore-lint
# during GHC validation builds.
# CPUS is set by the runner, as usual.
# A build triggered from a ghc/ghc> pipeline.
build-pipeline:
extends: .build
before_script:
- |
if [ -n "$GHC_PIPELINE_ID" ]; then
job_name="validate-x86_64-linux-fedora27"
job_id=$(nix run -f scripts/build-all.nix find-job \
--arg bindistTarball $GHC_TARBALL \
-c find-job.sh $GHC_PROJECT_ID $GHC_PIPELINE_ID $job_name)
echo "Pulling ${job_name} binary distribution from Pipeline $GHC_PIPELINE_ID (job $job_id)..."
fi
only:
- api
- pipelines
- triggers
- web
# EXTRA_HC_OPTS are passed to via --ghc-options to GHC during the package
# builds. This is instantiated with, e.g., -dcore-lint during GHC validation
# builds.
# Build against the master branch
build-master:
extends: .build
variables:
GHC_TARBALL: "https://gitlab.haskell.org/api/v4/projects/1/jobs/artifacts/master/raw/ghc-x86_64-fedora27-linux.tar.xz?job=validate-x86_64-linux-fedora27"
EXTRA_HC_OPTS: "-dcore-lint"
only:
- branches
- merge_requests
# Build against the 8.10 branch
build-8.10:
extends: .build
variables:
GHC_TARBALL: "https://gitlab.haskell.org/api/v4/projects/1/jobs/artifacts/ghc-8.10/raw/ghc-x86_64-fedora27-linux.tar.xz?job=validate-x86_64-linux-fedora27"
EXTRA_HC_OPTS: "-dcore-lint"
only:
- branches
- merge_requests
.build:
stage: test
# ONLY_PACKAGES can be passed to restrict set of packages that are built.
tags:
- x86_64-linux
image: "nixos/nix:$DOCKER_TAG"
cache:
key: build-HEAD
paths:
- store.nar
# EXTRA_OPTS are passed directly to test-patches.
before_script:
- |
if [ -e store.nar ]; then
echo "Extracting cached Nix store..."
nix-store --import -vv < store.nar || echo "invalid cache"
else
echo "No cache found"
fi
# Multi-project pipeline variables:
#
# These are set by the "upstream" pipeline for downstream pipelines:
#
# UPSTREAM_PROJECT_PATH: The path of the upstream project (e.g. `ghc/ghc`)
# UPSTREAM_PIPELINE_ID: The ID of the upstream pipeline
#
# Instead of UPSTREAM_PIPELINE_ID you can also pass:
script:
# Install GHC
- echo "Bindist tarball is $GHC_TARBALL"
- |
nix build \
-f https://github.com/mpickering/ghc-artefact-nix/archive/master.tar.gz \
--argstr url $GHC_TARBALL \
--out-link ghc \
ghcHEAD
- export GHC=`pwd`/ghc/bin/ghc
- rm -Rf $HOME/.cabal/packages/local ci/run
# Build CI executable
- |
nix-build ./ci -j$CPUS --no-build-output
nix-store --export \
$(nix-store -qR --include-outputs \
$(nix-instantiate --quiet ./ci)) \
> store.nar
# Test it
- nix run -f ./ci -c run-ci
# UPSTREAM_COMMIT_SHA: The ref or commit SHA of the GHC build to be tested
#
after_script:
- ls -lh
- |
nix run -f ./ci -c \
tar -cJf results.tar.xz -C ci/run \
results.json logs
# We explictly set the locale to avoid happy choking up on UTF-8 source code. See #31
LANG: "C.UTF-8"
artifacts:
when: always
paths:
- results.tar.xz
# Build and deploy a Hackage repository
update-repo:
stage: update-repo
tags:
- x86_64-linux
- head.hackage
stages:
- generate
- dispatch
image: "nixos/nix:$DOCKER_TAG"
generate-pipeline:
variables:
KEYS_TARBALL: https://downloads.haskell.org/ghc/head.hackage-keys.tar.enc
# KEYS_TARBALL_KEY provided by protected variable
only:
- master
script:
- nix-channel --add https://nixos.org/channels/nixpkgs-unstable nixpkgs
- nix-channel --update
- nix build -f ci/default.nix
- nix run -f ci/default.nix -c build-repo.sh extract-keys
- nix run -f ci/default.nix -c build-repo.sh build-repo
dependencies:
- build-master
after_script:
- rm -Rf keys
artifacts:
paths:
- repo
pages:
stage: deploy
tags:
- x86_64-linux
- head.hackage
image: "nixos/nix:$DOCKER_TAG"
script:
- mv repo public
dependencies:
- update-repo
only:
- master
GIT_SUBMODULE_STRATEGY: none
image: alpine:latest
tags: [x86_64-linux]
stage: generate
script: ./ci/generate-pipeline.sh
artifacts:
paths:
- public
- gitlab-generated-pipeline.yml
run-pipeline:
stage: dispatch
trigger:
strategy: depend
forward:
pipeline_variables: true
include:
- artifact: gitlab-generated-pipeline.yml
job: generate-pipeline
[submodule "tests/ghc-debug"]
path = tests/ghc-debug
url = https://gitlab.haskell.org/ghc/ghc-debug.git
[submodule "tests/text"]
path = tests/text
url = https://github.com/haskell/text.git
[submodule "tests/bytestring"]
path = tests/bytestring
url = https://github.com/haskell/bytestring.git
[submodule "tests/containers"]
path = tests/containers
url = https://github.com/haskell/containers.git
Copyright 2023 The GHC Team
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors
may be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
......@@ -6,12 +6,11 @@ Submit PRs with patch(es) relative to the source tarball(s) of
existing Hackage package(s).
- The patches MUST apply cleanly by `patch -p1` when inside the
original unpacked source-tarball. (Travis CI will verify this when
original unpacked source-tarball. (CI will verify this when
you submit a PR).
- The patches SHOULD work with at least GHC HEAD and the most recent
stable released GHC version (currently this means with GHC 8.10.1 and
GHC 8.11).
- The patches SHOULD work with at least GHC HEAD and a set of recent stable
released GHC versions (currently this means with GHC 9.6, 9.8, 9.10, 9.12 and 9.13).
- The patches SHOULD ideally result in the same code being compiled,
as one of the main purposes of these patches is to make regression
......@@ -23,6 +22,12 @@ existing Hackage package(s).
This repo contains `<pkg-id>.patch` files in the
[`patches/`](./patches/) folder (where `<pkg-id>` refers to a specific
release of a package, e.g. `lens-4.15.3`).
Adding a patch forces the system to use that specific version,
so empty patch files may exist to force the system to use that a
newer version, instead of a previous patch if available.
For example consider a patched `th-abstraction-0.5.0`, and an empty patch `th-abstraction-0.6.0`,
if we were to remove the empty patch, `0.6.0`, certain libraries such
as `generics-sop` fail to build, because it's forced to use `0.5.0`.
Once merged to `master`, all package releases whose `<pkg-id>` is
mentioned will enter the *HEAD.hackage* package index; if there is a
......@@ -42,6 +47,35 @@ allowing to maximise sharing via the nix-style package-db cache store.
## How to use
If you know what you are looking for, here it is:
```cabal
repository head.hackage.ghc.haskell.org
url: https://ghc.gitlab.haskell.org/head.hackage/
secure: True
key-threshold: 3
root-keys:
f76d08be13e9a61a377a85e2fb63f4c5435d40f8feb3e12eb05905edb8cdea89
26021a13b401500c8eb2761ca95c61f2d625bfef951b939a8124ed12ecf07329
7541f32a4ccca4f97aea3b22f5e593ba2c0267546016b992dfadcd2fe944e55d
active-repositories: hackage.haskell.org, head.hackage.ghc.haskell.org:override
```
The use of `:override` forces cabal's constraint solver to pick versions of
libraries that have corresponding patches in head.hackage whenever possible.
This may or may not be what you want depending on your use case. If you wish
to permit cabal to choose build plans that include different versions of
libraries than what are patched in head.hackage, skip the `:override`:
```cabal
active-repositories: hackage.haskell.org, head.hackage.ghc.haskell.org
```
Also see
https://cabal.readthedocs.io/en/3.12/cabal-project-description-file.html#cfg-field-active-repositories.
`HEAD.hackage` doesn't bump the bounds of boot packages + certain other packages to avoid the busywork of bumping them. When using `HEAD.hackage`, you should use `--allow-newer` for these packages. The full list is [here](https://gitlab.haskell.org/ghc/head.hackage/-/blob/90570e1c4606c1d7d3d41797ec1b32d1b984067b/ci/MakeConstraints.hs#L40-49).
### As an add-on remote repository
......@@ -108,50 +142,53 @@ initialize it as a git repository, and the patch.
### Adding a patch
The `scripts/patch-tool` script is a tool for conveniently authoring and updating
patches. For instance, if you find that the `doctest` package needs to be
patched first run:
```
$ scripts/patch-tool unpack doctest
```
This will extract a `doctest` source tree to `packages/doctest-$version` and
initialize it as a git repository. You can now proceed to edit the tree as
necessary and run
```
$ scripts/patch-tool update-patches
```
This will create an appropriately-named patch in `patches/` from the edits in
the `doctest` tree.
The `scripts/patch-tool` script is a tool for conveniently authoring and updating patches. For example, to patch the `doctest` package, you can run the following steps:
### Usage with `nix`
1. `scripts/patch-tool unpack doctest`
1. Modify files in `packages/doctest-$version/` as necessary
1. Build/test as normal, e.g. `cabal build doctest`
1. `scripts/patch-tool update-patches`
1. Commit the patch
When contributing a patch, one needs to be mindful of [Hackage revisions].
head.hackage doesn't combine patches with the revisions of a package. Instead,
a patch is applied on the unrevised package (also called revision 0). This
implies that when contributing patches, it might be necessary to additionally
include the changes that are already in some revision. Moreover, this also
implies that if a patch only contains changes that are already present in
revisions, then contributing the patch to head.hackage is useless as the changes
are already available for building.
[Hackage revisions]: https://github.com/haskell-infra/hackage-trustees/blob/master/revisions-information.md
`default.nix` is a [Nix](https://nixos.org/nix/) expression which can be used to
build `head.hackage` packages using GHC 8.6.1-alpha2:
```
$ nix build -f ./. haskellPackages.servant
```
It can also be used to build a compiler from a local source tree and use this to
build `head.hackage` packages:
```
$ nix build -f ./. --arg ghc "(import ghc-from-source.nix {ghc-path=$GHC_TREE;})"
```
### GitLab CI
GHC's GitLab instance uses GitLab CI and the `head-hackage-ci` tool (contained
in the `ci/` directory) to test the `head.hackage` patchset against GHC releases
and snapshots.
and snapshots. It can also compile head.hackage using a patch to GHC; just add
the `user-facing` label to a GHC MR, and the existing CI infrastructure will
invoke head.hackage.
To run a similar build locally start by downloading and installing a binary
distribution appropriate for your distribution and then call the `run-ci` script:
```
$ export GHC_TARBALL=./ghc-x86_64-fedora27-linux.tar.xz
# for extra correctness assurance...
$ export GHC=/path/to/my/ghc
# enable Core Linting for extra correctness assurance...
$ export EXTRA_HC_OPTS=-dcore-lint
$ ./run-ci
```
This will build all packages having patches and produce a textual summary, as
well as a JSON file (`result.json`) describing the outcome.
If you are using nix you can run:
```
nix-shell ci/ --command run-ci
```
Note that we currently rely on IOG's Hydra instance for caching of flake
outputs to ensure that they aren't rebuilt with every job.
### Hackage repository
......
-- Need an empty file for cabal.project.local to work
-- https://github.com/haskell/cabal/issues/9168
......@@ -14,22 +14,65 @@ import Text.PrettyPrint.ANSI.Leijen (Doc, vcat, (<+>))
import Utils
-- These dependencies cause issues when testing boot libraries because the test-suites
-- introduce circular dependencies. One way to solve the circularity is to select
-- older version of packages (namely unix) which doesn't have the bytestring dependency (<= 2.5)
-- but we want to use the newer version of unix and just not use the optional
-- features of optparse-applicative nor tasty.
extraConstraints :: [String]
extraConstraints = [
"optparse-applicative -process"
, "tasty -unix"
]
-- These packages we must use the installed version, because there's no way to upgrade
-- them
bootPkgs :: S.Set Cabal.PackageName
bootPkgs = S.fromList
[ "base"
, "template-haskell"
, "time"
, "Cabal"
, "ghc"
, "ghc-prim"
, "integer-gmp"
, "ghc-bignum"
]
-- These packages are installed, but we can install newer versions if the build plan
-- allows.. so we --allow-newer them in order to help find more build plans.
allowNewerPkgs :: S.Set Cabal.PackageName
allowNewerPkgs = S.fromList
[ "time"
, "binary"
, "bytestring"
, "Cabal"
, "containers"
, "deepseq"
, "text"
, "ghc-boot"
, "ghc-boot-th" ] `S.union` bootPkgs
constraints :: [String] -> Doc
constraints constraints =
"constraints:" PP.<$$> PP.indent 2 constraintsDoc
where
constraintsDoc = PP.vcat $ PP.punctuate "," (map PP.text constraints)
allowNewer :: S.Set Cabal.PackageName -> Doc
allowNewer pkgs =
"allow-newer:" PP.<$$> PP.indent 2 pkgsDoc
where
pkgsDoc = PP.vcat $ PP.punctuate "," $ map prettyPackageName $ S.toList pkgs
installedConstraints :: S.Set Cabal.PackageName -> S.Set Cabal.PackageName -> Doc
installedConstraints bootPkgs patchedPkgs =
"constraints:" PP.<$$> PP.indent 2 pkgsDoc
where
pkgsDoc = PP.vcat $ PP.punctuate ","
[ prettyPackageName bootPkg <+> "installed"
| bootPkg <- S.toList bootPkgs
, bootPkg `S.notMember` patchedPkgs
]
versionConstraints :: [(Cabal.PackageName, Version)] -> Doc
versionConstraints pkgs =
"constraints:" PP.<$$> PP.indent 2 body
......@@ -57,9 +100,14 @@ makeConstraints :: FilePath -- ^ patch directory
-> IO Doc
makeConstraints patchDir = do
patches <- findPatchedPackages patchDir
let doc = PP.vcat
[ allowNewer bootPkgs
let patchedPkgs = S.fromList $ map fst patches
doc = PP.vcat
[ allowNewer allowNewerPkgs
, ""
, installedConstraints bootPkgs patchedPkgs
, ""
, versionConstraints patches
, ""
, constraints extraConstraints
]
return doc
......@@ -25,7 +25,7 @@ Naturally, many Haskell packages have dependencies on native libraries.
* *from nixpkgs*: Here we use [nix][nix] and the [nixpkgs][nixpkgs] package set
to provide native libraries. These dependencies are defined in
`ci/build-deps.nix`. This mode is
`ci/build-deps.nix`. This mode is
[nix]: https://nixos.org/nix/
[nixpkgs]: https://github.com/NixOS/nixpkgs
......@@ -49,7 +49,7 @@ The below is all orchestrated by `run-ci.sh`:
1. Build a set of command-line arguments destined for `head-hackage-ci`
from the broken-package set above and a set of "extra" packages
defined in `config.sh`
defined in `config.sh`
1. If we are using `nixpkgs` to get native libraries: compute a
`cabal.project` fragment from the dependency information in
......@@ -77,5 +77,28 @@ The below is all orchestrated by `run-ci.sh`:
the outcome of the build
1. Write a JSON report (of type `Types.RunResult ()`) to `result.json`
1. Examine the failed units and determine whether there were any unexpected failures.
1. Examine the failed packages and determine whether there were any unexpected failures.
### Build plans and empty patches
When testing a package, the CI driver will construct a build plan that favors
versions of Hackage libraries with `head.hackage` patches over versions of the same
library that lack patches. For example, if CI tests a library that depends on
library `foo` that has two Hackage releases, 0.1 and 0.2, then if `foo-0.1` has
a patch but `foo-0.2` does not, then the driver will include `foo-0.1` in the
build plan even though `foo-0.2` has a more recent version number. This is done
to reduce the likelihood of subsequent Hackage releases of `foo` breaking the
CI due to API changes.
Sometimes, this approach can work against you. Suppose that another library
`bar` also depends on `foo`. Moreover, `bar` requires the use of `foo-0.2` and
excludes `foo-0.1` in its version bounds. Because `foo-0.1` has a patch but
`foo-0.2` does not, however, the CI driver will insist on using `foo-0.1` when
constructing build plans, which means that it will fail to find a valid build
plan for `bar`!
The simplest way to fix this sort of problem is to add a patch for `foo-0.2`.
If there are patches for both `foo-0.1` and `foo-0.2` present, then the CI
driver will admit build plans with either version of `foo`. In the event that
`foo-0.2` already compiles with all supported versions of GHC, you can simply
add an empty patch by running `touch patches/foo-0.2.patch`.
......@@ -11,7 +11,7 @@ module TestPatches
import Control.Monad
import Data.Foldable
import Data.List (intercalate)
import Data.List (intercalate, partition)
import Data.Maybe
import Data.Text (Text)
import GHC.Generics
......@@ -64,38 +64,51 @@ buildToolPackage (BuildToolPackages pkgs) name = name `S.member` pkgs
data Config = Config { configPatchDir :: FilePath
, configCompiler :: FilePath
, configLoggingWrapper :: Maybe FilePath
, configGhcOptions :: [String]
, configCabalOptions :: [String]
, configOnlyPackages :: Maybe (S.Set Cabal.PackageName)
, configConcurrency :: Int
, configExtraCabalFragments :: [FilePath]
, configExtraPackages :: [(Cabal.PackageName, Version)]
, configTestPackages :: [(Cabal.PackageName, FilePath)]
, configExpectedBrokenPkgs :: BrokenPackages
, configBuildToolPkgs :: BuildToolPackages
}
cabalOptions :: Config -> [String]
cabalOptions cfg =
let
compilerOption =
maybe
[ "-w", configCompiler cfg ]
(\l -> [ "-w", l, "--with-hc-pkg", configCompiler cfg <> "-pkg" ])
(configLoggingWrapper cfg)
in
configCabalOptions cfg ++
[ "-w", configCompiler cfg
] ++ concatMap (\opt -> ["--ghc-options", opt]) (configGhcOptions cfg)
compilerOption
config :: Parser TestPatches.Config
config =
TestPatches.Config
<$> patchDir
<*> compiler
<*> loggingWrapper
<*> ghcOptions
<*> cabalOptions
<*> onlyPackages
<*> concurrency
<*> extraCabalFragments
<*> extraPackages
<*> testPackages
<*> expectedBrokenPkgs
<*> buildToolPkgs
where
patchDir = option str (short 'p' <> long "patches" <> help "patch directory" <> value "./patches")
compiler = option str (short 'w' <> long "with-compiler" <> help "path of compiler")
loggingWrapper =
fmap Just (option str (long "logging-wrapper" <> help "path of compiler logging wrapper"))
<|> pure Nothing
ghcOptions = many $ option str (short 'f' <> long "ghc-option" <> help "flag to pass to compiler")
cabalOptions = many $ option str (short 'F' <> long "cabal-option" <> help "flag to pass to cabal-install")
onlyPackages =
......@@ -104,6 +117,7 @@ config =
concurrency = option auto (short 'j' <> long "concurrency" <> value 1 <> help "number of concurrent builds")
extraCabalFragments = many $ option str (long "extra-cabal-fragment" <> help "path of extra configuration to include in cabal project files")
extraPackages = many $ option pkgVer (short 'P' <> long "extra-package" <> help "other, un-patched packages to test")
testPackages = many $ option pkgNamePath (short 'T' <> long "test-package" <> help "A package to run tests for")
expectedBrokenPkgs =
fmap (BrokenPackages . S.fromList) $ many
$ option
......@@ -128,12 +142,26 @@ config =
, "expected to be in form of PKG_NAME==VERSION"
]
pkgNamePath :: ReadM (Cabal.PackageName, FilePath)
pkgNamePath = str >>= parse . T.pack
where
parse s
| [name, fp] <- T.splitOn "=" s
= pure (Cabal.mkPackageName $ T.unpack name, T.unpack fp)
| otherwise
= fail $ unlines
[ "Invalid test package specified:"
, "expected to be in form of PKG_NAME=FILEPATH"
]
pkgName :: ReadM Cabal.PackageName
pkgName = str >>= maybe (fail "invalid package name") pure . simpleParse
testPatches :: Config -> IO ()
testPatches cfg = do
setup cfg
compInfo <- getCompilerInfo cfg
packages <- findPatchedPackages (configPatchDir cfg)
packages <- return (packages ++ configExtraPackages cfg)
let packages' :: S.Set (Cabal.PackageName, Version)
......@@ -151,14 +179,28 @@ testPatches cfg = do
, patchedPackageResult = res
}
return [tpatch]
testedPatches <- fold <$> mapConcurrentlyN (fromIntegral $ configConcurrency cfg) build (S.toList packages')
let runResult = RunResult testedPatches
print $ resultSummary (configExpectedBrokenPkgs cfg) runResult
let test :: (Cabal.PackageName, FilePath) -> IO ([TestedPatch LogOutput])
test (pname, fpath) = do
res <- testPackage cfg (pname, fpath)
let tpatch = TestedPatch { patchedPackageName = PkgName $ T.pack $ display pname
, patchedPackageVersion = Ver $ []
, patchedPackageResult = res
}
return [tpatch]
testResults <- fold <$> mapM test (configTestPackages cfg)
let runResult = RunResult { testedPatches = testedPatches
, testedTests = testResults
, compilerInfo = compInfo
}
let (okay, msg) = resultSummary (configExpectedBrokenPkgs cfg) runResult
print msg
BSL.writeFile "results.json" . encode =<< writeLogs "logs" runResult
let failedBuilds = failedUnits (configExpectedBrokenPkgs cfg) runResult
planningFailures = planningErrors runResult
okay = null failedBuilds && null planningFailures
unless okay $ exitWith $ ExitFailure 1
writeLogs :: FilePath -> RunResult LogOutput -> IO (RunResult ())
......@@ -190,37 +232,70 @@ failedUnits broken = M.filter didFail . runResultUnits
planningErrors :: RunResult log -> [(PkgName, Ver)]
planningErrors runResult =
[ (patchedPackageName tpatch, patchedPackageVersion tpatch)
| tpatch <- testedPatches runResult
| tpatch <- testedPatches runResult ++ testedTests runResult
, PackagePlanningFailed _ <- pure $ patchedPackageResult tpatch
]
resultSummary :: forall log. BrokenPackages -> RunResult log -> Doc
resultSummary broken runResult = vcat
[ "Total units built:" <+> pshow (length allUnits)
, ""
, pshow (length planningErrs) <+> "had no valid install plan:"
, PP.indent 4 $ vcat $ map (uncurry prettyPkgVer) planningErrs
, ""
, pshow (length failedUnits) <+> "units failed to build:"
, PP.indent 4 $ vcat
[ prettyPkgVer (pkgName binfo) (version binfo) <+> expectedDoc
| (binfo, _) <- M.elems failedUnits
, let expectedDoc
| failureExpected broken (pkgName binfo) = PP.parens $ PP.yellow $ PP.text "expected"
| otherwise = mempty
]
, ""
, pshow (length failedDependsUnits) <+> "units failed to build due to unbuildable dependencies."
]
resultSummary :: forall log. BrokenPackages -> RunResult log -> (Bool, Doc)
resultSummary broken runResult = (ok, msg)
where
ok = null planningErrs
&& null failedTests
&& null failedTestsBuild
&& null failedUnits
msg = vcat
[ "Total packages built:" <+> pshow (length allUnits)
, ""
, pshow (length expectedPlanningErrs) <+> "had no valid install plan (expected):"
, PP.indent 4 $ vcat $ map (uncurry prettyPkgVer) expectedPlanningErrs
, ""
, pshow (length planningErrs) <+> "had no valid install plan:"
, PP.indent 4 $ vcat $ map (uncurry prettyPkgVer) planningErrs
, ""
, pshow (length failedUnits) <+> "packages failed to build:"
, PP.indent 4 $ vcat
[ prettyPkgVer (pkgName binfo) (version binfo)
| (binfo, _) <- M.elems failedUnits ]
, pshow (length expectedFailedUnits) <+> "packages failed to build (expected):"
, PP.indent 4 $ vcat
[ prettyPkgVer (pkgName binfo) (version binfo)
| (binfo, _) <- M.elems expectedFailedUnits ]
, pshow (length failedTargetUnits) <+> "target packages failed to build:"
, PP.indent 4 $ vcat
[ prettyPkgVer pkg ver
| (pkg, ver) <- failedTargetUnits ]
, ""
, pshow (length failedDependsUnits) <+> "packages failed to build due to unbuildable dependencies."
, ""
, pshow (length failedTestsBuild) <+> "testsuites failed build."
, PP.indent 4 $ vcat
[ prettyPkgName pkg_name | pkg_name <- failedTestsBuild ]
, pshow (length failedTests) <+> "testsuites failed."
, PP.indent 4 $ vcat
[ prettyPkgName pkg_name | pkg_name <- failedTests ]
]
allUnits = runResultUnits runResult
planningErrs = planningErrors runResult
(expectedPlanningErrs, planningErrs) =
partition (failureExpected broken . fst) (planningErrors runResult)
failedTests = [ pkg_name | (TestedPatch pkg_name ver (PackageResult (PackageBuildSucceeded PackageTestsFailed) _)) <- testedTests runResult ]
failedTestsBuild = [ pkg_name | (TestedPatch pkg_name ver (PackageResult PackageBuildFailed _)) <- testedTests runResult ]
failedUnits :: M.Map UnitId (BuildInfo, BuildResult log)
failedUnits = M.filter failed allUnits
failedTargetUnits =
[ (patchedPackageName tp, patchedPackageVersion tp)
| tp <- testedPatches runResult
, not $ isSuccessfulPackageResult (patchedPackageResult tp)
]
failedUnits, expectedFailedUnits :: M.Map UnitId (BuildInfo, BuildResult log)
(expectedFailedUnits, failedUnits) = M.partition splitExpected (M.filter failed allUnits)
where failed (_, BuildFailed _) = True
failed _ = False
splitExpected (binfo, _) = failureExpected broken (pkgName binfo)
failedDependsUnits :: M.Map UnitId (S.Set UnitId)
failedDependsUnits = M.filter (not . S.null) (failedDeps allUnits)
......@@ -230,10 +305,15 @@ toPkgName = PkgName . T.pack . display
toVer :: Version -> Ver
toVer = Ver . versionNumbers
prettyPkgName :: PkgName -> Doc
prettyPkgName (PkgName pname) =
PP.blue (PP.text $ T.unpack pname)
-- | For @cabal-plan@ types.
prettyPkgVer :: PkgName -> Ver -> Doc
prettyPkgVer (PkgName pname) (Ver ver) =
PP.blue (PP.text $ T.unpack pname) <+> PP.green (PP.text $ intercalate "." $ map show ver)
prettyPkgVer pname (Ver ver) =
prettyPkgName pname
<+> PP.green (PP.text $ intercalate "." $ map show ver)
-- | For @Cabal@ types.
prettyPackageVersion :: Cabal.PackageName -> Version -> Doc
......@@ -249,6 +329,7 @@ buildPackage cfg pname version = do
createDirectoryIfMissing True dirName
copyFile "cabal.project" (dirName </> "cabal.project")
appendFile (dirName </> "cabal.project") "packages: .\n"
appendFile (dirName </> "cabal.project") $ "package *\n ghc-options:" ++ unwords (configGhcOptions cfg)
TIO.writeFile
(dirName </> concat ["test-", display pname, ".cabal"])
(makeTestCabalFile cfg pname version)
......@@ -257,8 +338,35 @@ buildPackage cfg pname version = do
code <- runProcess $ setWorkingDir dirName
$ proc "cabal"
$ ["new-build"] ++ cabalOptions cfg
whatHappened ("=> Build of" <+> prettyPackageVersion pname version) cfg pname dirName code Nothing
where
dirName = "test-" ++ display pname ++ "-" ++ display version
-- figure out what happened
testPackage :: Config -> (Cabal.PackageName, FilePath) -> IO (PackageResult LogOutput)
testPackage cfg (pname, fpath) = do
logMsg $ "=> Testing" <+> prettyPackageName pname
-- prepare the test package
createDirectoryIfMissing True dirName
copyFile "cabal.project" (dirName </> "cabal.project")
appendFile (dirName </> "cabal.project") ("packages: " ++ fpath ++ "\n")
-- run the build
code <- runProcess $ setWorkingDir dirName
$ proc "cabal"
$ ["new-build", Cabal.unPackageName pname, "--enable-tests"] ++ cabalOptions cfg
case code of
ExitSuccess -> do
runCode <- runProcess $ setWorkingDir dirName
$ proc "cabal"
$ ["new-test", Cabal.unPackageName pname, "--enable-tests"] ++ cabalOptions cfg
whatHappened ("=> Test of" <+> prettyPackageName pname) cfg pname dirName code (Just runCode)
_ ->
whatHappened ("=> Test of" <+> prettyPackageName pname) cfg pname dirName code Nothing
where
dirName = "test-" ++ display pname
whatHappened herald cfg pname dirName code runCode = do
compilerId <- getCompilerId (configCompiler cfg)
let planPath = dirName </> "dist-newstyle" </> "cache" </> "plan.json"
planExists <- doesFileExist planPath
case planExists of
......@@ -268,15 +376,29 @@ buildPackage cfg pname version = do
let logDir = cabalDir </> "logs" </> compilerId
results <- mapM (checkUnit logDir) (pjUnits plan)
logMsg $
let result = case code of
let result = case fromMaybe code runCode of
ExitSuccess -> PP.cyan "succeeded"
ExitFailure n -> PP.red "failed" <+> PP.parens ("code" <+> pshow n)
in "=> Build of" <+> prettyPackageVersion pname version <+> result
return $ PackageResult (code == ExitSuccess) (mergeInfoPlan (planToBuildInfo plan) results)
in herald <+> result
-- N.B. we remove the build directory on failure to ensure
-- that we re-extract the source if the user re-runs after
-- modifying a patch.
unless (code == ExitSuccess) $ removeDirectoryRecursive dirName
return $ PackageResult codesToStatus (mergeInfoPlan (planToBuildInfo plan) results)
False -> do
logMsg $ PP.red $ "=> Planning for" <+> prettyPackageVersion pname version <+> "failed"
logMsg $ PP.red $ "=> Planning for" <+> herald <+> "failed"
removeDirectoryRecursive dirName
return $ PackagePlanningFailed mempty
where
codesToStatus =
case code of
ExitSuccess -> PackageBuildSucceeded $
case runCode of
Nothing -> NoTests
Just rCode -> case rCode of
ExitSuccess -> PackageTestsSucceeded
_ -> PackageTestsFailed
_ -> PackageBuildFailed
planToBuildInfo :: PlanJson -> M.Map UnitId BuildInfo
planToBuildInfo plan = M.fromList
[ (uId unit, info)
......@@ -303,9 +425,18 @@ buildPackage cfg pname version = do
case exists of
True -> do
buildLog <- TE.decodeUtf8With TE.lenientDecode <$> BS.readFile logPath
let PkgId (PkgName unitPkgName) _pvers = uPId unit
if | T.null buildLog
-> return $ BuildFailed (LogOutput buildLog)
| any isInstallingLine $ take 5 $ reverse $ T.lines buildLog
| any isInstallingLine $ take 20 $ reverse $ T.lines buildLog
-- Note that it's not enough to check for isInstallingLine, as
-- it's possible for packages with custom Setup.hs scripts to
-- fail even after installation has completed (e.g., Agda, as
-- reported in #47). But only apply this check to the package
-- being tested, as we only want to label the tested package as
-- failing, not any of its dependencies.
, not (Cabal.unPackageName pname == T.unpack unitPkgName) ||
isPackageBuildSucceeded codesToStatus
-> return $ BuildSucceeded (LogOutput buildLog)
| otherwise
-> return $ BuildFailed (LogOutput buildLog)
......@@ -324,7 +455,7 @@ buildPackage cfg pname version = do
where
err = M.mapMissing $ \_ _ -> error "error merging"
dirName = "test-" ++ display pname ++ "-" ++ display version
makeTestCabalFile :: Config -> Cabal.PackageName -> Version -> T.Text
makeTestCabalFile cfg pname' ver' =
......@@ -347,6 +478,12 @@ makeTestCabalFile cfg pname' ver' =
| otherwise
= "build-depends: " <> pname <> " == " <> ver
getCompilerInfo :: Config -> IO CompilerInfo
getCompilerInfo cfg = do
(out,err) <- readProcess_ $ proc (configCompiler cfg) ["--info"]
BSL.writeFile "compiler-info" out
return $ CompilerInfo $ read $ T.unpack $ TE.decodeUtf8 $ BSL.toStrict out
setup :: Config -> IO ()
setup cfg = do
keysExist <- doesDirectoryExist "keys"
......@@ -380,7 +517,7 @@ setup cfg = do
constraints <- MakeConstraints.makeConstraints (configPatchDir cfg)
appendFile "cabal.project" $ show $ vcat $
[ "with-compiler: " <> PP.text (configCompiler cfg)
, MakeConstraints.allowNewer MakeConstraints.bootPkgs
, constraints
] ++ map PP.text extraFragments
runProcess_ $ proc "cabal" ["new-update"]
......
......@@ -3,15 +3,21 @@
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE NamedFieldPuns #-}
module Types
( RunResult(..)
, PackageStatus(..)
, isPackageBuildSucceeded
, PackageTestStatus(..)
, runResultUnits
, TestedPatch(..)
, PackageResult(..)
, isSuccessfulPackageResult
, BuildInfo(..)
, BuildResult(..)
, LogOutput(..)
, CompilerInfo(..)
) where
import Cabal.Plan
......@@ -46,17 +52,34 @@ data BuildResult log
deriving stock (Show, Generic, Functor, Foldable, Traversable)
deriving anyclass (ToJSON, FromJSON)
data PackageTestStatus = NoTests | PackageTestsFailed | PackageTestsSucceeded
deriving stock (Show, Generic)
deriving anyclass (ToJSON, FromJSON)
data PackageStatus = PackageBuildFailed | PackageBuildSucceeded PackageTestStatus
deriving stock (Show, Generic)
deriving anyclass (ToJSON, FromJSON)
isPackageBuildSucceeded :: PackageStatus -> Bool
isPackageBuildSucceeded PackageBuildSucceeded{} = True
isPackageBuildSucceeded PackageBuildFailed = False
-- | The result of an attempt to tested a patch
data PackageResult log
= PackagePlanningFailed { planningError :: T.Text }
-- ^ Our attempt to build the package resulting in no viable install plan.
| PackageResult { packageBuilt :: Bool
| PackageResult { packageStatus :: PackageStatus
, units :: M.Map UnitId (BuildInfo, BuildResult log)
}
-- ^ We attempted to build the package.
deriving stock (Show, Generic, Functor, Foldable, Traversable)
deriving anyclass (ToJSON, FromJSON)
isSuccessfulPackageResult :: PackageResult log -> Bool
isSuccessfulPackageResult PackagePlanningFailed{} = False
isSuccessfulPackageResult PackageResult{packageStatus} = isPackageBuildSucceeded packageStatus
-- | Information about a patch which we tested.
data TestedPatch log
= TestedPatch { patchedPackageName :: PkgName
......@@ -68,14 +91,17 @@ data TestedPatch log
-- | The result of a CI run.
data RunResult log
= RunResult { testedPatches :: [TestedPatch log] }
= RunResult { testedPatches :: [TestedPatch log]
, testedTests :: [TestedPatch log]
, compilerInfo :: CompilerInfo
}
deriving stock (Show, Generic, Functor, Foldable, Traversable)
deriving anyclass (ToJSON, FromJSON)
runResultUnits :: RunResult log -> M.Map UnitId (BuildInfo, BuildResult log)
runResultUnits runResult = M.unions
[ units
| tpatch <- testedPatches runResult
| tpatch <- testedPatches runResult ++ testedTests runResult
, PackageResult _ units <- pure $ patchedPackageResult tpatch
]
......@@ -83,3 +109,7 @@ runResultUnits runResult = M.unions
newtype LogOutput = LogOutput { getLogOutput :: T.Text }
deriving stock (Eq, Ord, Show)
deriving newtype (ToJSON, FromJSON)
newtype CompilerInfo = CompilerInfo [(String, String)]
deriving stock (Show, Generic)
deriving anyclass (ToJSON, FromJSON)
# To update nixpkgs bump rev and baseNixpkgs's sha256.
let
rev = "c2fd152c98dc618c9c06b1551faee17e79a03b7f";
in
fetchTarball {
url = "https://github.com/NixOS/nixpkgs/archive/${rev}.tar.gz";
sha256 = "07cafslsgdymzcfcpgzw8fqv07r4wslsz7xijni1l1n2agcj2436";
}
......@@ -10,4 +10,7 @@ with pkgs;
bzlib = [ bzip2 ];
hmatrix = [ blas liblapack ];
hexpat = [ expat ];
hgmp = [ gmp ];
posix-api = [ systemd ];
lame = [ lame ];
}
......@@ -11,6 +11,10 @@ set -e
cipher=aes-256-cbc
if [ -z "$PATCHES" ]; then PATCHES=./patches; fi
log() {
echo "$1"
}
# For use by administrator.
gen_keys_tarball() {
hackage-repo-tool create-keys --keys=./keys
......@@ -59,10 +63,12 @@ repository $REPO_NAME
EOF
}
build_index() {
build_index_page() {
local commit="$CI_COMMIT_SHA"
local commit_url="https://gitlab.haskell.org/ghc/head.hackage/commit/$commit"
build_repository_blurb >repo/cabal.project.local
build_repository_blurb >repo/repo.cabal.project
cat repo/repo.cabal.project > repo/cabal.project
build_constraints >> repo/cabal.project
cat >repo/ci.html <<EOF
<!DOCTYPE html>
......@@ -119,22 +125,24 @@ EOF
<p>The source of this package repository is at <a href="https://gitlab.haskell.org/ghc/head.hackage">gitlab.haskell.org/ghc/head.hackage</a>.
<p>To use package repository with <code>cabal-install</code> add the following
to your project's <code>cabal.project.local</code> and run
<code>cabal v2-update</code>: (consider using <code>scripts/head.hackage.sh update</code> as <code>v2-update</code> is broken, <a href="https://github.com/haskell/cabal/issues/5952">Cabal bug #5952</a>)
<p>To use package repository with <code>cabal-install >= 3.6</code> simply run the following:
<pre><code>
$(cat repo/cabal.project.local)
$ curl https://ghc.gitlab.haskell.org/head.hackage/cabal.project >> cabal.project.local
$ cabal update
</code></pre>
<p>Finally, you may want to add the <a
href="cabal.constraints">constraints</a> to your project to ensure that
cabal chooses the patched releases.
<p>This will add the following <code>source-repository</code> stanza to your project's <code>cabal.project.local</code>:
<pre><code>
$(cat repo/repo.cabal.project)
</code></pre>
as well as the version constraints in <a
href="cabal.constraints"><code>cabal.constraints</code></a>.
<p>If you find a package that doesn't build with a recent GHC
pre-release see the <a
href="https://gitlab.haskell.org/ghc/head.hackage#adding-a-patch">contributor
href="https://gitlab.haskell.org/ghc/head.hackage/-/blob/master/README.md#adding-a-patch">contributor
documentation</a> for instructions on how to contribute a patch.
<p>If you encounter other trouble refer to the
<a href="https://gitlab.haskell.org/ghc/head.hackage">head.hackage
documentation</a> or
......@@ -157,28 +165,42 @@ build_constraints() {
# Build the hackage repository
build_repo() {
log "Building Hackage repository in $(pwd)/repo..."
# hackage-repo-tool bootstrap fails unless there is at least one package in the
# repo. Seed things with acme-box.
log "Fetching acme-box..."
cabal update
cabal fetch acme-box-0.0.0.0
mkdir -p repo/package
cp $HOME/.cabal/packages/hackage.haskell.org/acme-box/0.0.0.0/acme-box-0.0.0.0.tar.gz repo/package
mkdir -p tmp
cp -R $PATCHES tmp/patches
# if ~/.cabal exists cabal-install will use that, otherwise packages go into $XDG_CACHE_HOME/cabal
if [ -d "$HOME/.cabal" ]; then
cp "$HOME/.cabal/packages/hackage.haskell.org/acme-box/0.0.0.0/acme-box-0.0.0.0.tar.gz" repo/package
else
cp "${XDG_CACHE_HOME:-$HOME/.cache}/cabal/packages/hackage.haskell.org/acme-box/0.0.0.0/acme-box-0.0.0.0.tar.gz" repo/package
fi
log "Bootstrapping repository..."
hackage-repo-tool bootstrap --keys=./keys --repo=./repo
mkdir -p template tmp/patches.cache
log "Patching packages..."
mkdir -p tmp template tmp/patches.cache
cp -R $PATCHES tmp/patches
tool \
--patches=./tmp/patches \
--repo-cache=./cache \
--keys=./keys \
--repo-name=head.hackage \
--repo-url=http://hackage.haskell.org/ \
--template=template \
./repo
log "Building constraints..."
build_constraints > repo/cabal.constraints
build_index
log "Building index page..."
build_index_page
rm -R tmp
}
......@@ -190,7 +212,7 @@ case $1 in
build-repository-blurb) build_repository_blurb ;;
build-index)
build_constraints > repo/cabal.constraints
build_index ;;
build_index_page ;;
*)
echo "error: Unknown command $1."
echo
......
......@@ -2,5 +2,5 @@ packages: .
source-repository-package
type: git
location: https://github.com/bgamari/hackage-overlay-repo-tool
tag: 18eb61c830ad908d36d343f400a1588af6b9a03a
location: https://gitlab.haskell.org/ghc/hackage-overlay-repo-tool
tag: 52f54229b08c6e86dd163dd42a78b22c10ffb099
-- This cabal project file is included in the CI configuration to
-- preclude the solver from using ancient package versions. See, for instance,
-- ghc/ghc#23048.
index-state:
hackage.haskell.org 2025-05-11T00:00:00Z,
head.hackage HEAD
constraints: th-abstraction >= 0.4
-- aeson is now quite old, don't attempt to use it
constraints: aeson >= 1.0,
-- don't allow uvector, which is deprecated; vector should be used instead
constraints: uvector == 0.0
-- containers-0.5 generally won't build with any recent compiler
constraints: containers >= 0.6
-- earlier transformers-compat versions don't include the Eq constraint on Eq1
-- instances needed by GHC 9.6
constraints: transformers-compat >= 0.7
......@@ -16,6 +16,18 @@ function broken() {
EXTRA_OPTS="$EXTRA_OPTS --expect-broken=$pkg_name"
}
function only_package() {
echo "Adding $@ to --only package list"
for pkg in $@; do
EXTRA_OPTS="$EXTRA_OPTS --only=$pkg"
done
}
function test_package() {
echo "Adding $@ to --test-package list"
EXTRA_OPTS="$EXTRA_OPTS --test-package=$1=$2"
}
# Return the version number of the most recent release of the given package
function latest_version() {
pkg=$1
......@@ -52,6 +64,18 @@ function ghc_commit() {
$GHC --print-project-git-commit-id
}
function ghc_arch() {
$GHC --print-host-platform
}
# ======================================================================
# Baseline constraints
#
# These constraints are applied to preclude the solver from producing build
# plans using ancient, under-constrained package versions.
EXTRA_OPTS="$EXTRA_OPTS --extra-cabal-fragment=$(pwd)/config.cabal.project"
# ======================================================================
# The lists begin here
#
......@@ -61,16 +85,38 @@ function ghc_commit() {
version="$(ghc_version)"
commit="$(ghc_commit)"
arch="$(ghc_arch)"
echo "Found GHC $version, commit $commit."
case $version in
8.10.*)
# package ticket
broken "hgeometry-ipe" 17566
9.6.*)
# package ticket
broken liquidhaskell-boot 350
# singletons-base only supports the latest ghc
broken singletons-base 00000
;;
9.8.*)
# package ticket
broken liquidhaskell-boot 350
# singletons-base only supports the latest ghc
broken singletons-base 00000
;;
9.10.*)
# package ticket
broken liquidhaskell-boot 350
# singletons-base only supports the latest ghc
broken singletons-base 00000
;;
9.12.*)
# package ticket
broken liquidhaskell-boot 350
;;
8.11.*)
# package ticket
broken "primitive" 18291
9.13.*)
# package ticket
broken ghcide 00000
;;
*)
......@@ -78,15 +124,113 @@ case $version in
;;
esac
case $arch in
x86_64-*-*)
# package ticket
;;
aarch64-*-*)
# These just don't build on aarch64
# package ticket
broken charsetdetect 00000
broken packman 00000
;;
*)
echo "$arch is unknown to head.hackage, assuming nothing is broken."
;;
esac
# Extra packages
extra_package lens
extra_package aeson
extra_package criterion
extra_package scotty
extra_package generic-lens
extra_package microstache
extra_package singletons
extra_package servant
# ==============
#
# These are packages which we don't have patches for but want to test anyways.
extra_package lens 5.2.3
extra_package generic-lens 2.2.2.0
extra_package optics 0.4.2.1
extra_package aeson 2.2.3.0
extra_package criterion 1.6.3.0
extra_package scotty 0.21
extra_package generic-lens 2.2.2.0
extra_package microstache 1.0.2.3
extra_package singletons-base 3.5
extra_package servant 0.20.1
extra_package hgmp 0.1.2.1
extra_package Agda 2.7.0.1
extra_package mmark 0.0.7.6
extra_package doctest 0.24.0
extra_package tasty 1.5.3
extra_package pandoc 3.1.11.1
extra_package servant-conduit 0.16
extra_package servant-machines 0.16
extra_package linear-generics 0.2.3
extra_package futhark 0.25.13
extra_package generic-random 1.5.0.1
extra_package lame 0.2.2
extra_package inspection-testing 0.5.0.3
extra_package ghcide 2.9.0.0
extra_package ghc-typelits-extra 0.4.7
# This package is affected by https://gitlab.haskell.org/ghc/ghc/-/issues/22912
extra_package vector-space 0.16
# Build-tool packages
build_tool_package alex
build_tool_package happy
build_tool_package c2hs
# $BUILD_MODE controls how head.hackage runs.
# ===========================================
#
# Four build modes exist: FULL, QUICK, TEST, and COMPAT.
#
# FULL.
# ------
# Build all patched + extra packages.
#
# QUICK.
# ------
# Build the "quick" configuration, which builds a small subset of the overall
# package set. (Also runs tests!) We do this during the merge request validation
# pipeline. Note: If "$QUICK" is non-null, it is used as a backwards-compat
# synonym for BUILD_MODE=QUICK.
#
# TEST.
# -----
# Just build the local test packages and run the tests.
#
# COMPAT: FULL + TEST.
# --------------------
# Backwards-compat default build mode.
#
: ${BUILD_MODE:=COMPAT}
if [ -n "$QUICK" ]; then
BUILD_MODE=QUICK
fi
case "$BUILD_MODE" in
FULL) ;;
QUICK)
only_package tasty
only_package Cabal
only_package microlens
only_package free
only_package optparse-applicative
test_package system-test "$(pwd)/../tests/ghc-debug/**/*.cabal"
test_package ghc-tests "$(pwd)/../tests/ghc-tests"
;;
TEST)
# FIXME: I specify a single "only_package" to prevent all the other
# packages from being built. Morally, I really want to say "build
# nothing at all besides the tests".
only_package tasty
test_package system-test "$(pwd)/../tests/ghc-debug/**/*.cabal"
test_package ghc-tests "$(pwd)/../tests/ghc-tests"
test_package all "$(pwd)/../tests/text"
test_package bytestring-tests "$(pwd)/../tests/bytestring"
test_package all "$(pwd)/../tests/containers/containers-tests"
;;
COMPAT)
test_package system-test "$(pwd)/../tests/ghc-debug/**/*.cabal"
test_package ghc-tests "$(pwd)/../tests/ghc-tests"
;;
esac
{ nixpkgs ? (import (import ./base-nixpkgs.nix) {}) }:
{ sources ? import ./nix/sources.nix, nixpkgs ? (import sources.nixpkgs.outPath {}) }:
with nixpkgs;
let
haskellPackages = nixpkgs.haskellPackages;
hackage-repo-tool =
let
src = fetchFromGitHub {
owner = "haskell";
repo = "hackage-security";
rev = "474768743f407edef988d4153f081b2c662fe84f";
sha256 = "1n46ablyna72zl9whirfv72l715wlmi6jahbf02asbsxkx3b7xnm";
};
in haskellPackages.callCabal2nix "hackage-repo-tool" "${src}/hackage-repo-tool" {};
let src = sources.hackage-security.outPath;
in nixpkgs.haskell.lib.doJailbreak (haskellPackages.callCabal2nix "hackage-repo-tool" "${src}/hackage-repo-tool" {});
overlay-tool =
let
src = fetchFromGitHub {
owner = "bgamari";
repo = "hackage-overlay-repo-tool";
rev = "7aac81e9bc468b103dd78b9c662672c86fe236f7";
sha256 = "0i4iw8nbhvc2xx05c0hbnnjyhap3b4xsclmxnmfa6dsa2ym02jc0";
};
in haskellPackages.callCabal2nix "hackage-overlay-repo-tool" src {};
let src = sources.overlay-tool;
in nixpkgs.haskell.lib.doJailbreak (haskellPackages.callCabal2nix "hackage-overlay-repo-tool" src { });
head-hackage-ci =
let
src = nixpkgs.nix-gitignore.gitignoreSource [] ./.;
in haskellPackages.callCabal2nix "head-hackage-ci" src {};
buildDeps = import ./build-deps.nix { pkgs = nixpkgs; };
buildDepsFragment =
let
buildDeps = import ./build-deps.nix { pkgs = nixpkgs; };
mkCabalFragment = pkgName: deps:
with pkgs.lib;
......@@ -51,31 +42,45 @@ let
let
deps = [
bash curl gnutar findutils patch rsync openssl
cabal-install ghc gcc binutils-unwrapped pwgen gnused
hackage-repo-tool overlay-tool python3 jq
haskellPackages.cabal-install haskellPackages.ghc gcc binutils-unwrapped pwgen gnused
hackage-repo-tool overlay-tool python3 jq pkg-config
git # cabal-install wants this to fetch source-repository-packages
];
pkg_config_depends = lib.makeSearchPathOutput "dev" "lib/pkgconfig" (lib.concatLists (lib.attrValues buildDeps));
in
runCommand "repo" {
nativeBuildInputs = [ makeWrapper ];
cabalDepsSrc = buildDepsFragment;
} ''
mkdir -p $out/bin
makeWrapper ${head-hackage-ci}/bin/head-hackage-ci $out/bin/head-hackage-ci \
--prefix PATH : ${stdenv.lib.makeBinPath deps}:$out/bin
--prefix PATH : ${lib.makeBinPath deps}:$out/bin
makeWrapper ${./build-repo.sh} $out/bin/build-repo.sh \
--prefix PATH : ${stdenv.lib.makeBinPath deps}:$out/bin
--prefix PATH : ${lib.makeBinPath deps}:$out/bin
makeWrapper ${./discover_tarball.sh} $out/bin/discover_tarball.sh \
--prefix PATH : ${lib.makeBinPath deps}:$out/bin
makeWrapper ${../run-ci} $out/bin/run-ci \
--prefix PATH : ${stdenv.lib.makeBinPath deps}:$out/bin \
--prefix PATH : ${lib.makeBinPath deps}:$out/bin \
--prefix PKG_CONFIG_PATH : ${pkg_config_depends} \
--set USE_NIX 1 \
--set CI_CONFIG ${./config.sh}
makeWrapper ${./find-job.sh} $out/bin/find-job \
--prefix PATH : ${stdenv.lib.makeBinPath deps}:$out/bin
--prefix PATH : ${lib.makeBinPath deps}:$out/bin
makeWrapper ${./find-latest-job.sh} $out/bin/find-latest-job \
--prefix PATH : ${lib.makeBinPath deps}:$out/bin
makeWrapper ${xz}/bin/xz $out/bin/xz
makeWrapper ${curl}/bin/curl $out/bin/curl
'';
in
build-repo
mkShell {
name = "head-hackage-build-env";
buildInputs = [ build-repo ];
cabalDepsSrc = buildDepsFragment;
}
#! /usr/bin/env bash
set -e
cd "$(dirname "${BASH_SOURCE[0]}")"
ARCH="$(uname -m)"
>&2 echo "NIGHTLY=${NIGHTLY}"
>&2 echo "RELEASE_JOB=${RELEASE_JOB}"
>&2 echo "SLOW_VALIDATE=${SLOW_VALIDATE}"
>&2 echo "ARCH=${ARCH}"
# NOTE: If you add a new JOB_NAME here then you also might need to modify the
# `needs` field in ghc/ghc .gitlab-ci.yml to avoid triggering the downstream job
# too early.
# Before the update to deb12, we use the deb10 bindists
case $UPSTREAM_BRANCH_NAME in
ghc-9.6|ghc-9.8|ghc-9.10)
case $ARCH in
aarch64)
if [ -n "$NIGHTLY" ]; then
BINDIST_NAME="ghc-aarch64-linux-deb10-validate.tar.xz"
JOB_NAME="nightly-aarch64-linux-deb10-validate"
elif [ "$RELEASE_JOB" == "yes" ]; then
BINDIST_NAME="ghc-aarch64-linux-deb10-release+no_split_sections.tar.xz"
JOB_NAME="release-aarch64-linux-deb10-release+no_split_sections"
else
BINDIST_NAME="ghc-aarch64-linux-deb10-validate.tar.xz"
JOB_NAME="aarch64-linux-deb10-validate"
fi
;;
*)
if [ -n "$SLOW_VALIDATE" ]; then
BINDIST_NAME="ghc-x86_64-linux-deb10-numa-slow-validate.tar.xz"
if [ -n "$NIGHTLY" ]; then
JOB_NAME="nightly-x86_64-linux-deb10-numa-slow-validate"
elif [ "$RELEASE_JOB" == "yes" ]; then
echo "No slow validate build in release job"
exit 2
else
JOB_NAME="x86_64-linux-deb10-numa-slow-validate"
fi
else
BINDIST_NAME="ghc-x86_64-linux-fedora33-release.tar.xz"
if [ -n "$NIGHTLY" ]; then
JOB_NAME="nightly-x86_64-linux-fedora33-release"
elif [ "$RELEASE_JOB" == "yes" ]; then
JOB_NAME="release-x86_64-linux-fedora33-release"
else
JOB_NAME="x86_64-linux-fedora33-release"
fi
fi
;;
esac
;;
*) # Post update to use deb12
case $ARCH in
aarch64)
if [ -n "$NIGHTLY" ]; then
BINDIST_NAME="ghc-aarch64-linux-deb12-validate.tar.xz"
JOB_NAME="nightly-aarch64-linux-deb12-validate"
elif [ "$RELEASE_JOB" == "yes" ]; then
BINDIST_NAME="ghc-aarch64-linux-deb12-release+no_split_sections.tar.xz"
JOB_NAME="release-aarch64-linux-deb12-release+no_split_sections"
else
BINDIST_NAME="ghc-aarch64-linux-deb12-validate.tar.xz"
JOB_NAME="aarch64-linux-deb12-validate"
fi
;;
*)
if [ -n "$SLOW_VALIDATE" ]; then
BINDIST_NAME="ghc-x86_64-linux-deb12-numa-slow-validate.tar.xz"
if [ -n "$NIGHTLY" ]; then
JOB_NAME="nightly-x86_64-linux-deb12-numa-slow-validate"
elif [ "$RELEASE_JOB" == "yes" ]; then
echo "No slow validate build in release job"
exit 2
else
JOB_NAME="x86_64-linux-deb12-numa-slow-validate"
fi
else
BINDIST_NAME="ghc-x86_64-linux-fedora33-release.tar.xz"
if [ -n "$NIGHTLY" ]; then
JOB_NAME="nightly-x86_64-linux-fedora33-release"
elif [ "$RELEASE_JOB" == "yes" ]; then
JOB_NAME="release-x86_64-linux-fedora33-release"
else
JOB_NAME="x86_64-linux-fedora33-release"
fi
fi
;;
esac
;;
esac
>&2 echo "BINDIST_NAME=${BINDIST_NAME}"
>&2 echo "JOB_NAME=${JOB_NAME}"
if [ -n "$UPSTREAM_COMMIT_SHA" ]; then
# N.B. We can't use this if the upstream pipeline might be in-progress
# since the below URL cannot provide an artifact until a pipeline has
# run to completion on the requested branch. This is in general
# not the case for GHC pipelines. Consequently, in this case we will
# usually rather provide UPSTREAM_PIPELINE_ID.
>&2 echo "Pulling binary distribution from commit $UPSTREAM_COMMIT_SHA of project $UPSTREAM_PROJECT_PATH..."
GHC_TARBALL="https://gitlab.haskell.org/$UPSTREAM_PROJECT_PATH/-/jobs/artifacts/$UPSTREAM_COMMIT_SHA/raw/$BINDIST_NAME?job=$JOB_NAME"
elif [ -n "$UPSTREAM_PIPELINE_ID" ]; then
job_name=$JOB_NAME
>&2 echo "Pulling ${job_name} binary distribution from Pipeline $UPSTREAM_PIPELINE_ID..."
job_id=$(find-job $UPSTREAM_PROJECT_ID $UPSTREAM_PIPELINE_ID $job_name)
>&2 echo "Using job $job_id..."
echo "https://gitlab.haskell.org/$UPSTREAM_PROJECT_PATH/-/jobs/$job_id/artifacts/raw/$BINDIST_NAME"
elif [ -n "$UPSTREAM_BRANCH_NAME" ]; then
job_name=$JOB_NAME
>&2 echo "Finding ${job_name} binary distribution from $UPSTREAM_BRANCH_NAME..."
job_id=$(find-latest-job "$UPSTREAM_PROJECT_ID" "$UPSTREAM_BRANCH_NAME" "$JOB_NAME")
>&2 echo "Using job $job_id..."
echo "https://gitlab.haskell.org/$UPSTREAM_PROJECT_PATH/-/jobs/$job_id/artifacts/raw/$BINDIST_NAME"
fi
......@@ -6,22 +6,24 @@ project_id=$1
pipeline_id=$2
job_name=$3
resp=$(mktemp)
# Access token is a protected environment variable in the head.hackage project and
# is necessary for this query to succeed. Sadly job tokens only seem to
# give us access to the project being built.
curl \
--silent --show-error \
-H "Private-Token: $ACCESS_TOKEN" \
"https://gitlab.haskell.org/api/v4/projects/$project_id/pipelines/$pipeline_id/jobs?scope[]=success" \
> resp.json
"https://gitlab.haskell.org/api/v4/projects/$project_id/pipelines/$pipeline_id/jobs?scope[]=success&per_page=50" \
> "$resp"
job_id=$(jq ". | map(select(.name == \"$job_name\")) | .[0].id" < resp.json)
job_id=$(jq ". | map(select(.name == \"$job_name\")) | .[0].id" < "$resp")
if [ "$job_id" = "null" ]; then
echo "Error finding job $job_name for $pipeline_id in project $project_id:" >&2
cat resp.json >&2
rm resp.json
cat "$resp" >&2
rm "$resp"
exit 1
else
rm resp.json
rm "$resp"
echo -n "$job_id"
fi
#!/usr/bin/env bash
set -e
project_id=$1
branch_name=$2
job_name=$3
resp=$(mktemp)
# Access token is a protected environment variable in the head.hackage project and
# is necessary for this query to succeed. Sadly job tokens only seem to
# give us access to the project being built.
curl \
--silent --show-error \
-H "Private-Token: $ACCESS_TOKEN" \
"https://gitlab.haskell.org/api/v4/projects/$project_id/pipelines/?ref=$branch_name&scope=finished&per_page=50" \
> "$resp"
job_ids=$(jq ". | map(.id) " < "$resp")
if [ "$job_ids" = "null" ]; then
echo "Error finding job $job_name for $pipeline_id in project $project_id:" >&2
cat "$resp" >&2
rm "$resp"
exit 1
else
for i in $(echo $job_ids | jq '.[]'); do
if find-job $project_id $i $job_name; then
exit 0
fi
done
echo "Error finding job $job_name for $branch_name project $project_id:" >&2
exit 1
fi