Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • ghc/head.hackage
  • RyanGlScott/head.hackage
  • vaibhavsagar/head.hackage
  • phadej/head.hackage
  • jessoune29/head.hackage
  • alanz/head.hackage
  • clint/head.hackage
  • osa1/head.hackage
  • supersven/head.hackage
  • fendor/head.hackage
  • hsyl20/head.hackage
  • adinapoli/head.hackage
  • alexbiehl/head.hackage
  • mimi.vx/head.hackage
  • Kleidukos/head.hackage
  • wz1000/head.hackage
  • alinab/head.hackage
  • teo/head.hackage
  • duog/head.hackage
  • sheaf/head.hackage
  • expipiplus1/head.hackage
  • drsooch/head.hackage
  • tobias/head.hackage
  • brandonchinn178/head.hackage
  • mpickering/hooks-setup-testing
  • Mikolaj/head.hackage
  • RandomMoonwalker/head.hackage
  • facundominguez/head.hackage
  • trac-fizzixnerd/head.hackage
  • neil.mayhew/head.hackage
  • jappeace/head.hackage
31 results
Show changes
Commits on Source (311)
patches/* -text
...@@ -9,31 +9,32 @@ ...@@ -9,31 +9,32 @@
# To accomplish this we use the ci executable in ./ci. This drives a set of # To accomplish this we use the ci executable in ./ci. This drives a set of
# cabal v2-build builds and preserves their results. # cabal v2-build builds and preserves their results.
# #
# The compiler to be tested can be taken from a number of sources. The # The execution flow looks something like:
# build-master and build-9-* jobs form the validation pipeline of the #
# head.hackage repository. In addition, other GitLab projects (e.g. ghc/ghc>) # - Gitlab runner
# - (nix run)
# - run-ci
# - ./run-ci (the Nix package just wraps the script)
# - (nix run) (when USE_NIX=1)
# - head-hackage-ci $EXTRA_OPTS (a Cabal project in ci/)
# - ci/Main.hs
# - TestPatches.testPatches <$> TestPatches.config
# - option '--test-package'
# - <something similar for building the packages>
#
# EXTRA_OPTS are injected into the execution flow inside ./run-ci, which in turn
# sources them from ci/config.sh.
#
# The compiler to be tested can be taken from a number of sources.
# head.hackage's own validation pipeline runs against GHC HEAD and the three
# supported major versions. In addition, other GitLab projects (e.g. ghc/ghc>)
# can trigger a multi-project pipeline, specifying a GHC binary distribution # can trigger a multi-project pipeline, specifying a GHC binary distribution
# via either the GHC_TARBALL or UPSTREAM_* variables. # via either the GHC_TARBALL or UPSTREAM_* variables.
# #
# Prevent duplicate workflows
workflow:
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_PIPELINE_SOURCE == "pipeline" # Triggered by GHC
- if: $CI_PIPELINE_SOURCE == "scheduled"
- if: $CI_PIPELINE_SOURCE == "web"
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS
when: never
- if: $CI_COMMIT_BRANCH
stages:
- test
- update-repo
- deploy
variables: variables:
# Which nixos/nix Docker image tag to use # Which nixos/nix Docker image tag to use
DOCKER_TAG: "2.3.12" DOCKER_TAG: "2.13.1"
# Default this to ghc/ghc> to make it more convenient to run from the web # Default this to ghc/ghc> to make it more convenient to run from the web
# interface. # interface.
...@@ -54,7 +55,7 @@ variables: ...@@ -54,7 +55,7 @@ variables:
# Multi-project pipeline variables: # Multi-project pipeline variables:
# #
# These are set by the "upstream" pipeline for `build-pipeline` pipelines: # These are set by the "upstream" pipeline for downstream pipelines:
# #
# UPSTREAM_PROJECT_PATH: The path of the upstream project (e.g. `ghc/ghc`) # UPSTREAM_PROJECT_PATH: The path of the upstream project (e.g. `ghc/ghc`)
# UPSTREAM_PIPELINE_ID: The ID of the upstream pipeline # UPSTREAM_PIPELINE_ID: The ID of the upstream pipeline
...@@ -64,279 +65,31 @@ variables: ...@@ -64,279 +65,31 @@ variables:
# UPSTREAM_COMMIT_SHA: The ref or commit SHA of the GHC build to be tested # UPSTREAM_COMMIT_SHA: The ref or commit SHA of the GHC build to be tested
# #
# We explictly set the locale to avoid happy chocking up on UTF-8 source code. See #31 # We explictly set the locale to avoid happy choking up on UTF-8 source code. See #31
LANG: "C.UTF-8" LANG: "C.UTF-8"
stages:
- generate
- dispatch
# A build triggered from a ghc/ghc> pipeline.
.build-pipeline:
extends: .build
before_script:
- |
if [ -e store.nar ]; then
echo "Extracting cached Nix store..."
nix-store --import -vv < store.nar || echo "invalid cache"
else
echo "No cache found"
fi
- GHC_TARBALL=$(nix run -f ./ci -c discover_tarball.sh)
rules:
- if: '$UPSTREAM_COMMIT_SHA || $UPSTREAM_PIPELINE_ID'
when: always
- when: never
build-pipeline:
extends: .build-pipeline
variables:
BUILD_MODE: FULL
test-pipeline:
extends: .build-pipeline
variables:
BUILD_MODE: TEST
allow_failure: true
# Build against the master branch
.build-master:
extends: .build
before_script:
- |
if [ -e store.nar ]; then
echo "Extracting cached Nix store..."
nix-store --import -vv < store.nar || echo "invalid cache"
else
echo "No cache found"
fi
- GHC_TARBALL=$(nix run -f ./ci -c discover_tarball.sh)
variables:
UPSTREAM_BRANCH_NAME: master
EXTRA_HC_OPTS: "-dcore-lint"
SLOW_VALIDATE: 1
rules:
- if: '$UPSTREAM_COMMIT_SHA || $UPSTREAM_PIPELINE_ID'
when: never
- when: always
build-master:
extends: .build-master
variables:
BUILD_MODE: FULL
test-master:
extends: .build-master
variables:
BUILD_MODE: TEST
allow_failure: true
# Build against the 9.2 branch
.build-9.2:
extends: .build
variables:
GHC_TARBALL: "https://gitlab.haskell.org/api/v4/projects/1/jobs/artifacts/ghc-9.2/raw/ghc-x86_64-fedora27-linux.tar.xz?job=validate-x86_64-linux-fedora27"
EXTRA_HC_OPTS: "-dcore-lint"
rules:
- if: '$UPSTREAM_COMMIT_SHA || $UPSTREAM_PIPELINE_ID'
when: never
- when: always
build-9.2:
extends: .build-9.2
variables:
BUILD_MODE: FULL
test-9.2:
extends: .build-9.2
variables:
BUILD_MODE: TEST
allow_failure: true
# Build against the 9.4 branch
.build-9.4:
extends: .build
before_script:
- |
if [ -e store.nar ]; then
echo "Extracting cached Nix store..."
nix-store --import -vv < store.nar || echo "invalid cache"
else
echo "No cache found"
fi
- GHC_TARBALL=$(nix run -f ./ci -c discover_tarball.sh)
variables:
UPSTREAM_BRANCH_NAME: ghc-9.4
EXTRA_HC_OPTS: "-dcore-lint"
rules:
- if: '$UPSTREAM_COMMIT_SHA || $UPSTREAM_PIPELINE_ID'
when: never
- when: always
build-9.4:
extends: .build-9.4
variables:
BUILD_MODE: FULL
test-9.4:
extends: .build-9.4
variables:
BUILD_MODE: TEST
allow_failure: true
# Build against the 9.6 branch
.build-9.6:
extends: .build
before_script:
- |
if [ -e store.nar ]; then
echo "Extracting cached Nix store..."
nix-store --import -vv < store.nar || echo "invalid cache"
else
echo "No cache found"
fi
- GHC_TARBALL=$(nix run -f ./ci -c discover_tarball.sh)
variables:
UPSTREAM_BRANCH_NAME: ghc-9.6
EXTRA_HC_OPTS: "-dcore-lint"
rules:
- if: '$UPSTREAM_COMMIT_SHA || $UPSTREAM_PIPELINE_ID'
when: never
- when: always
build-9.6:
extends: .build-9.6
variables:
BUILD_MODE: FULL
test-9.6:
extends: .build-9.6
variables:
BUILD_MODE: TEST
allow_failure: true
.build:
stage: test
interruptible: true
tags:
- x86_64-linux
image: "nixos/nix:$DOCKER_TAG"
cache:
key: build-HEAD
when: always
paths:
- store.nar
before_script:
- |
if [ -e store.nar ]; then
echo "Extracting cached Nix store..."
nix-store --import -vv < store.nar || echo "invalid cache"
else
echo "No cache found"
fi
script:
# Install GHC
- echo "Bindist tarball is $GHC_TARBALL"
- nix run -f ./ci -c curl -L "$GHC_TARBALL" > ghc.tar.xz
- |
nix build \
-f ci/ghc-from-artifact.nix \
--arg ghcTarball ./ghc.tar.xz \
--out-link ghc
- export GHC=`pwd`/ghc/bin/ghc
- rm -Rf $HOME/.cabal/packages/local ci/run
# Build CI executable
- |
nix-build ./ci -j$CPUS --no-build-output
nix-store --export \
$(nix-store -qR --include-outputs \
$(nix-instantiate --quiet ./ci)) \
> store.nar
# Test it
- nix run -f ./ci -c run-ci
after_script:
- ls -lh
- |
nix run -f ./ci -c \
tar -cJf results.tar.xz -C ci/run \
results.json logs compiler-info eventlogs
artifacts:
when: always
paths:
- results.tar.xz
# Build and deploy a Hackage repository
update-repo:
stage: update-repo
cache:
key: build-HEAD
when: always
paths:
- store.nar
tags:
- x86_64-linux
- head.hackage
image: "nixos/nix:$DOCKER_TAG"
generate-pipeline:
variables: variables:
KEYS_TARBALL: https://downloads.haskell.org/ghc/head.hackage-keys.tar.enc GIT_SUBMODULE_STRATEGY: none
# KEYS_TARBALL_KEY provided by protected variable image: alpine:latest
tags: [x86_64-linux]
rules: stage: generate
- if: '$CI_COMMIT_BRANCH == "master"' script: ./ci/generate-pipeline.sh
script:
- nix-channel --add https://nixos.org/channels/nixpkgs-unstable nixpkgs
- nix-channel --update
- nix build -f ci/default.nix
- nix run -f ci/default.nix -c build-repo.sh extract-keys
- nix run -f ci/default.nix -c build-repo.sh build-repo
dependencies:
- build-master
after_script:
- rm -Rf keys
artifacts: artifacts:
paths: paths:
- repo - gitlab-generated-pipeline.yml
pages: run-pipeline:
stage: deploy stage: dispatch
tags: trigger:
- x86_64-linux strategy: depend
- head.hackage forward:
image: "nixos/nix:$DOCKER_TAG" pipeline_variables: true
script: include:
- mv repo public - artifact: gitlab-generated-pipeline.yml
dependencies: job: generate-pipeline
- update-repo
rules:
- if: '$CI_COMMIT_BRANCH == "master"'
artifacts:
paths:
- public
# This job is for updating a special branch called "upstream-testing" which is used
# when we want to trigger jobs to run on head.hackage. This branch is not protected so
# the permissions about who can trigger jobs on upstream-testing are much more liberal than
# who can trigger jobs which run on master (which have the side-effect of updating the repo).
update-branch:
stage: test
image: "nixos/nix:$DOCKER_TAG"
tags:
- x86_64-linux
script:
- nix run nixpkgs.git -c git remote -v
- nix run nixpkgs.git -c git reset --hard origin/master
- nix run nixpkgs.git -c git push "https://gitlab-ci-token:$PROJECT_ACCESS_TOKEN@$CI_SERVER_HOST/$CI_PROJECT_PATH.git" +HEAD:upstream-testing -o ci.skip
rules:
- if: $CI_COMMIT_BRANCH == "master"
Copyright 2023 The GHC Team
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors
may be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
...@@ -9,9 +9,8 @@ existing Hackage package(s). ...@@ -9,9 +9,8 @@ existing Hackage package(s).
original unpacked source-tarball. (CI will verify this when original unpacked source-tarball. (CI will verify this when
you submit a PR). you submit a PR).
- The patches SHOULD work with at least GHC HEAD and the most recent - The patches SHOULD work with at least GHC HEAD and a set of recent stable
stable released GHC version (currently this means with GHC 9.2, 9.4, and released GHC versions (currently this means with GHC 9.8, 9.10, 9.12, 9.14 and 9.15).
9.5).
- The patches SHOULD ideally result in the same code being compiled, - The patches SHOULD ideally result in the same code being compiled,
as one of the main purposes of these patches is to make regression as one of the main purposes of these patches is to make regression
...@@ -23,6 +22,12 @@ existing Hackage package(s). ...@@ -23,6 +22,12 @@ existing Hackage package(s).
This repo contains `<pkg-id>.patch` files in the This repo contains `<pkg-id>.patch` files in the
[`patches/`](./patches/) folder (where `<pkg-id>` refers to a specific [`patches/`](./patches/) folder (where `<pkg-id>` refers to a specific
release of a package, e.g. `lens-4.15.3`). release of a package, e.g. `lens-4.15.3`).
Adding a patch forces the system to use that specific version,
so empty patch files may exist to force the system to use that a
newer version, instead of a previous patch if available.
For example consider a patched `th-abstraction-0.5.0`, and an empty patch `th-abstraction-0.6.0`,
if we were to remove the empty patch, `0.6.0`, certain libraries such
as `generics-sop` fail to build, because it's forced to use `0.5.0`.
Once merged to `master`, all package releases whose `<pkg-id>` is Once merged to `master`, all package releases whose `<pkg-id>` is
mentioned will enter the *HEAD.hackage* package index; if there is a mentioned will enter the *HEAD.hackage* package index; if there is a
...@@ -68,7 +73,9 @@ active-repositories: hackage.haskell.org, head.hackage.ghc.haskell.org ...@@ -68,7 +73,9 @@ active-repositories: hackage.haskell.org, head.hackage.ghc.haskell.org
``` ```
Also see Also see
https://cabal.readthedocs.io/en/latest/cabal-project.html#cfg-field-active-repositories. https://cabal.readthedocs.io/en/3.12/cabal-project-description-file.html#cfg-field-active-repositories.
`HEAD.hackage` doesn't bump the bounds of boot packages + certain other packages to avoid the busywork of bumping them. When using `HEAD.hackage`, you should use `--allow-newer` for these packages. The full list is [here](https://gitlab.haskell.org/ghc/head.hackage/-/blob/90570e1c4606c1d7d3d41797ec1b32d1b984067b/ci/MakeConstraints.hs#L40-49).
### As an add-on remote repository ### As an add-on remote repository
...@@ -135,33 +142,25 @@ initialize it as a git repository, and the patch. ...@@ -135,33 +142,25 @@ initialize it as a git repository, and the patch.
### Adding a patch ### Adding a patch
The `scripts/patch-tool` script is a tool for conveniently authoring and updating The `scripts/patch-tool` script is a tool for conveniently authoring and updating patches. For example, to patch the `doctest` package, you can run the following steps:
patches. For instance, if you find that the `doctest` package needs to be
patched first run:
```
$ scripts/patch-tool unpack doctest
```
This will extract a `doctest` source tree to `packages/doctest-$version` and
initialize it as a git repository. You can now proceed to edit the tree as
necessary and run
```
$ scripts/patch-tool update-patches
```
This will create an appropriately-named patch in `patches/` from the edits in
the `doctest` tree.
### Usage with `nix` 1. `scripts/patch-tool unpack doctest`
1. Modify files in `packages/doctest-$version/` as necessary
1. Build/test as normal, e.g. `cabal build doctest`
1. `scripts/patch-tool update-patches`
1. Commit the patch
When contributing a patch, one needs to be mindful of [Hackage revisions].
head.hackage doesn't combine patches with the revisions of a package. Instead,
a patch is applied on the unrevised package (also called revision 0). This
implies that when contributing patches, it might be necessary to additionally
include the changes that are already in some revision. Moreover, this also
implies that if a patch only contains changes that are already present in
revisions, then contributing the patch to head.hackage is useless as the changes
are already available for building.
[Hackage revisions]: https://github.com/haskell-infra/hackage-trustees/blob/master/revisions-information.md
`default.nix` is a [Nix](https://nixos.org/nix/) expression which can be used to
build `head.hackage` packages using GHC 8.6.1-alpha2:
```
$ nix build -f ./. haskellPackages.servant
```
It can also be used to build a compiler from a local source tree and use this to
build `head.hackage` packages:
```
$ nix build -f ./. --arg ghc "(import ghc-from-source.nix {ghc-path=$GHC_TREE;})"
```
### GitLab CI ### GitLab CI
GHC's GitLab instance uses GitLab CI and the `head-hackage-ci` tool (contained GHC's GitLab instance uses GitLab CI and the `head-hackage-ci` tool (contained
...@@ -181,6 +180,15 @@ $ ./run-ci ...@@ -181,6 +180,15 @@ $ ./run-ci
This will build all packages having patches and produce a textual summary, as This will build all packages having patches and produce a textual summary, as
well as a JSON file (`result.json`) describing the outcome. well as a JSON file (`result.json`) describing the outcome.
If you are using nix you can run:
```
nix-shell ci/ --command run-ci
```
Note that we currently rely on IOG's Hydra instance for caching of flake
outputs to ensure that they aren't rebuilt with every job.
### Hackage repository ### Hackage repository
......
-- Need an empty file for cabal.project.local to work
-- https://github.com/haskell/cabal/issues/9168
...@@ -45,7 +45,11 @@ allowNewerPkgs = S.fromList ...@@ -45,7 +45,11 @@ allowNewerPkgs = S.fromList
, "binary" , "binary"
, "bytestring" , "bytestring"
, "Cabal" , "Cabal"
, "text" ] `S.union` bootPkgs , "containers"
, "deepseq"
, "text"
, "ghc-boot"
, "ghc-boot-th" ] `S.union` bootPkgs
constraints :: [String] -> Doc constraints :: [String] -> Doc
constraints constraints = constraints constraints =
......
...@@ -77,7 +77,7 @@ The below is all orchestrated by `run-ci.sh`: ...@@ -77,7 +77,7 @@ The below is all orchestrated by `run-ci.sh`:
the outcome of the build the outcome of the build
1. Write a JSON report (of type `Types.RunResult ()`) to `result.json` 1. Write a JSON report (of type `Types.RunResult ()`) to `result.json`
1. Examine the failed units and determine whether there were any unexpected failures. 1. Examine the failed packages and determine whether there were any unexpected failures.
### Build plans and empty patches ### Build plans and empty patches
......
...@@ -11,7 +11,7 @@ module TestPatches ...@@ -11,7 +11,7 @@ module TestPatches
import Control.Monad import Control.Monad
import Data.Foldable import Data.Foldable
import Data.List (intercalate) import Data.List (intercalate, partition)
import Data.Maybe import Data.Maybe
import Data.Text (Text) import Data.Text (Text)
import GHC.Generics import GHC.Generics
...@@ -193,7 +193,10 @@ testPatches cfg = do ...@@ -193,7 +193,10 @@ testPatches cfg = do
testResults <- fold <$> mapM test (configTestPackages cfg) testResults <- fold <$> mapM test (configTestPackages cfg)
let runResult = RunResult testedPatches testResults compInfo let runResult = RunResult { testedPatches = testedPatches
, testedTests = testResults
, compilerInfo = compInfo
}
let (okay, msg) = resultSummary (configExpectedBrokenPkgs cfg) runResult let (okay, msg) = resultSummary (configExpectedBrokenPkgs cfg) runResult
print msg print msg
...@@ -242,21 +245,28 @@ resultSummary broken runResult = (ok, msg) ...@@ -242,21 +245,28 @@ resultSummary broken runResult = (ok, msg)
&& null failedTestsBuild && null failedTestsBuild
&& null failedUnits && null failedUnits
msg = vcat msg = vcat
[ "Total units built:" <+> pshow (length allUnits) [ "Total packages built:" <+> pshow (length allUnits)
, ""
, pshow (length expectedPlanningErrs) <+> "had no valid install plan (expected):"
, PP.indent 4 $ vcat $ map (uncurry prettyPkgVer) expectedPlanningErrs
, "" , ""
, pshow (length planningErrs) <+> "had no valid install plan:" , pshow (length planningErrs) <+> "had no valid install plan:"
, PP.indent 4 $ vcat $ map (uncurry prettyPkgVer) planningErrs , PP.indent 4 $ vcat $ map (uncurry prettyPkgVer) planningErrs
, "" , ""
, pshow (length failedUnits) <+> "units failed to build:" , pshow (length failedUnits) <+> "packages failed to build:"
, PP.indent 4 $ vcat , PP.indent 4 $ vcat
[ prettyPkgVer (pkgName binfo) (version binfo) [ prettyPkgVer (pkgName binfo) (version binfo)
| (binfo, _) <- M.elems failedUnits ] | (binfo, _) <- M.elems failedUnits ]
, pshow (length expectedFailedUnits) <+> "units failed to build (expected):" , pshow (length expectedFailedUnits) <+> "packages failed to build (expected):"
, PP.indent 4 $ vcat , PP.indent 4 $ vcat
[ prettyPkgVer (pkgName binfo) (version binfo) [ prettyPkgVer (pkgName binfo) (version binfo)
| (binfo, _) <- M.elems expectedFailedUnits ] | (binfo, _) <- M.elems expectedFailedUnits ]
, pshow (length failedTargetUnits) <+> "target packages failed to build:"
, PP.indent 4 $ vcat
[ prettyPkgVer pkg ver
| (pkg, ver) <- failedTargetUnits ]
, "" , ""
, pshow (length failedDependsUnits) <+> "units failed to build due to unbuildable dependencies." , pshow (length failedDependsUnits) <+> "packages failed to build due to unbuildable dependencies."
, "" , ""
, pshow (length failedTestsBuild) <+> "testsuites failed build." , pshow (length failedTestsBuild) <+> "testsuites failed build."
, PP.indent 4 $ vcat , PP.indent 4 $ vcat
...@@ -266,12 +276,19 @@ resultSummary broken runResult = (ok, msg) ...@@ -266,12 +276,19 @@ resultSummary broken runResult = (ok, msg)
[ prettyPkgName pkg_name | pkg_name <- failedTests ] [ prettyPkgName pkg_name | pkg_name <- failedTests ]
] ]
allUnits = runResultUnits runResult allUnits = runResultUnits runResult
planningErrs = planningErrors runResult (expectedPlanningErrs, planningErrs) =
partition (failureExpected broken . fst) (planningErrors runResult)
failedTests = [ pkg_name | (TestedPatch pkg_name ver (PackageResult (PackageBuildSucceeded PackageTestsFailed) _)) <- testedTests runResult ] failedTests = [ pkg_name | (TestedPatch pkg_name ver (PackageResult (PackageBuildSucceeded PackageTestsFailed) _)) <- testedTests runResult ]
failedTestsBuild = [ pkg_name | (TestedPatch pkg_name ver (PackageResult PackageBuildFailed _)) <- testedTests runResult ] failedTestsBuild = [ pkg_name | (TestedPatch pkg_name ver (PackageResult PackageBuildFailed _)) <- testedTests runResult ]
failedTargetUnits =
[ (patchedPackageName tp, patchedPackageVersion tp)
| tp <- testedPatches runResult
, not $ isSuccessfulPackageResult (patchedPackageResult tp)
]
failedUnits, expectedFailedUnits :: M.Map UnitId (BuildInfo, BuildResult log) failedUnits, expectedFailedUnits :: M.Map UnitId (BuildInfo, BuildResult log)
(expectedFailedUnits, failedUnits) = M.partition splitExpected (M.filter failed allUnits) (expectedFailedUnits, failedUnits) = M.partition splitExpected (M.filter failed allUnits)
where failed (_, BuildFailed _) = True where failed (_, BuildFailed _) = True
......
...@@ -3,6 +3,7 @@ ...@@ -3,6 +3,7 @@
{-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveTraversable #-} {-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE NamedFieldPuns #-}
module Types module Types
( RunResult(..) ( RunResult(..)
...@@ -12,6 +13,7 @@ module Types ...@@ -12,6 +13,7 @@ module Types
, runResultUnits , runResultUnits
, TestedPatch(..) , TestedPatch(..)
, PackageResult(..) , PackageResult(..)
, isSuccessfulPackageResult
, BuildInfo(..) , BuildInfo(..)
, BuildResult(..) , BuildResult(..)
, LogOutput(..) , LogOutput(..)
...@@ -74,6 +76,10 @@ data PackageResult log ...@@ -74,6 +76,10 @@ data PackageResult log
deriving stock (Show, Generic, Functor, Foldable, Traversable) deriving stock (Show, Generic, Functor, Foldable, Traversable)
deriving anyclass (ToJSON, FromJSON) deriving anyclass (ToJSON, FromJSON)
isSuccessfulPackageResult :: PackageResult log -> Bool
isSuccessfulPackageResult PackagePlanningFailed{} = False
isSuccessfulPackageResult PackageResult{packageStatus} = isPackageBuildSucceeded packageStatus
-- | Information about a patch which we tested. -- | Information about a patch which we tested.
data TestedPatch log data TestedPatch log
= TestedPatch { patchedPackageName :: PkgName = TestedPatch { patchedPackageName :: PkgName
......
...@@ -12,4 +12,5 @@ with pkgs; ...@@ -12,4 +12,5 @@ with pkgs;
hexpat = [ expat ]; hexpat = [ expat ];
hgmp = [ gmp ]; hgmp = [ gmp ];
posix-api = [ systemd ]; posix-api = [ systemd ];
lame = [ lame ];
} }
...@@ -63,10 +63,12 @@ repository $REPO_NAME ...@@ -63,10 +63,12 @@ repository $REPO_NAME
EOF EOF
} }
build_index() { build_index_page() {
local commit="$CI_COMMIT_SHA" local commit="$CI_COMMIT_SHA"
local commit_url="https://gitlab.haskell.org/ghc/head.hackage/commit/$commit" local commit_url="https://gitlab.haskell.org/ghc/head.hackage/commit/$commit"
build_repository_blurb >repo/cabal.project.local build_repository_blurb >repo/repo.cabal.project
cat repo/repo.cabal.project > repo/cabal.project
build_constraints >> repo/cabal.project
cat >repo/ci.html <<EOF cat >repo/ci.html <<EOF
<!DOCTYPE html> <!DOCTYPE html>
...@@ -123,16 +125,18 @@ EOF ...@@ -123,16 +125,18 @@ EOF
<p>The source of this package repository is at <a href="https://gitlab.haskell.org/ghc/head.hackage">gitlab.haskell.org/ghc/head.hackage</a>. <p>The source of this package repository is at <a href="https://gitlab.haskell.org/ghc/head.hackage">gitlab.haskell.org/ghc/head.hackage</a>.
<p>To use package repository with <code>cabal-install</code> add the following <p>To use package repository with <code>cabal-install >= 3.6</code> simply run the following:
to your project's <code>cabal.project.local</code> and run
<code>cabal v2-update</code>: (consider using <code>scripts/head.hackage.sh update</code> as <code>v2-update</code> is broken, <a href="https://github.com/haskell/cabal/issues/5952">Cabal bug #5952</a>)
<pre><code> <pre><code>
$(cat repo/cabal.project.local) $ curl https://ghc.gitlab.haskell.org/head.hackage/cabal.project >> cabal.project.local
$ cabal update
</code></pre> </code></pre>
<p>Finally, you may want to add the <a <p>This will add the following <code>source-repository</code> stanza to your project's <code>cabal.project.local</code>:
href="cabal.constraints">constraints</a> to your project to ensure that <pre><code>
cabal chooses the patched releases. $(cat repo/repo.cabal.project)
</code></pre>
as well as the version constraints in <a
href="cabal.constraints"><code>cabal.constraints</code></a>.
<p>If you find a package that doesn't build with a recent GHC <p>If you find a package that doesn't build with a recent GHC
pre-release see the <a pre-release see the <a
...@@ -169,7 +173,14 @@ build_repo() { ...@@ -169,7 +173,14 @@ build_repo() {
cabal update cabal update
cabal fetch acme-box-0.0.0.0 cabal fetch acme-box-0.0.0.0
mkdir -p repo/package mkdir -p repo/package
cp $HOME/.cabal/packages/hackage.haskell.org/acme-box/0.0.0.0/acme-box-0.0.0.0.tar.gz repo/package
# if ~/.cabal exists cabal-install will use that, otherwise packages go into $XDG_CACHE_HOME/cabal
if [ -d "$HOME/.cabal" ]; then
cp "$HOME/.cabal/packages/hackage.haskell.org/acme-box/0.0.0.0/acme-box-0.0.0.0.tar.gz" repo/package
else
cp "${XDG_CACHE_HOME:-$HOME/.cache}/cabal/packages/hackage.haskell.org/acme-box/0.0.0.0/acme-box-0.0.0.0.tar.gz" repo/package
fi
log "Bootstrapping repository..." log "Bootstrapping repository..."
hackage-repo-tool bootstrap --keys=./keys --repo=./repo hackage-repo-tool bootstrap --keys=./keys --repo=./repo
...@@ -188,8 +199,8 @@ build_repo() { ...@@ -188,8 +199,8 @@ build_repo() {
log "Building constraints..." log "Building constraints..."
build_constraints > repo/cabal.constraints build_constraints > repo/cabal.constraints
log "Building index..." log "Building index page..."
build_index build_index_page
rm -R tmp rm -R tmp
} }
...@@ -201,7 +212,7 @@ case $1 in ...@@ -201,7 +212,7 @@ case $1 in
build-repository-blurb) build_repository_blurb ;; build-repository-blurb) build_repository_blurb ;;
build-index) build-index)
build_constraints > repo/cabal.constraints build_constraints > repo/cabal.constraints
build_index ;; build_index_page ;;
*) *)
echo "error: Unknown command $1." echo "error: Unknown command $1."
echo echo
......
...@@ -2,5 +2,5 @@ packages: . ...@@ -2,5 +2,5 @@ packages: .
source-repository-package source-repository-package
type: git type: git
location: https://github.com/bgamari/hackage-overlay-repo-tool location: https://gitlab.haskell.org/ghc/hackage-overlay-repo-tool
tag: fb950449022edd8fa270777ac6f0de1fefc15edc tag: 52f54229b08c6e86dd163dd42a78b22c10ffb099
-- This cabal project file is included in the CI configuration to
-- preclude the solver from using ancient package versions. See, for instance,
-- ghc/ghc#23048.
index-state:
hackage.haskell.org 2025-05-11T00:00:00Z,
head.hackage HEAD
constraints: th-abstraction >= 0.4
-- aeson is now quite old, don't attempt to use it
constraints: aeson >= 1.0,
-- don't allow uvector, which is deprecated; vector should be used instead
constraints: uvector == 0.0
-- containers-0.5 generally won't build with any recent compiler
constraints: containers >= 0.6
-- earlier transformers-compat versions don't include the Eq constraint on Eq1
-- instances needed by GHC 9.6
constraints: transformers-compat >= 0.7
...@@ -64,6 +64,18 @@ function ghc_commit() { ...@@ -64,6 +64,18 @@ function ghc_commit() {
$GHC --print-project-git-commit-id $GHC --print-project-git-commit-id
} }
function ghc_arch() {
$GHC --print-host-platform
}
# ======================================================================
# Baseline constraints
#
# These constraints are applied to preclude the solver from producing build
# plans using ancient, under-constrained package versions.
EXTRA_OPTS="$EXTRA_OPTS --extra-cabal-fragment=$(pwd)/config.cabal.project"
# ====================================================================== # ======================================================================
# The lists begin here # The lists begin here
# #
...@@ -73,22 +85,42 @@ function ghc_commit() { ...@@ -73,22 +85,42 @@ function ghc_commit() {
version="$(ghc_version)" version="$(ghc_version)"
commit="$(ghc_commit)" commit="$(ghc_commit)"
arch="$(ghc_arch)"
echo "Found GHC $version, commit $commit." echo "Found GHC $version, commit $commit."
case $version in case $version in
9.2.*) 9.8.*)
# package ticket
broken liquidhaskell-boot 350
# singletons-base only supports the latest ghc
broken singletons-base 00000
;;
9.10.*)
# package ticket # package ticket
broken liquidhaskell-boot 350
# singletons-base only supports the latest ghc
broken singletons-base 00000
;; ;;
9.4.*) 9.12.*)
# package ticket # package ticket
broken liquidhaskell-boot 350
;; ;;
9.6.*) 9.14.*)
# package ticket # package ticket
broken ghcide 00000
broken Agda 26154
# broken until strict-containers is updated to work with the new containers
broken strict-containers 00000
;; ;;
9.7.*) 9.15.*)
# package ticket # package ticket
broken ghcide 00000
broken Agda 26154
# broken until strict-containers is updated to work with the new containers
broken strict-containers 00000
;; ;;
*) *)
...@@ -96,28 +128,60 @@ case $version in ...@@ -96,28 +128,60 @@ case $version in
;; ;;
esac esac
case $arch in
x86_64-*-*)
# package ticket
;;
aarch64-*-*)
# These just don't build on aarch64
# package ticket
broken charsetdetect 00000
broken packman 00000
;;
*)
echo "$arch is unknown to head.hackage, assuming nothing is broken."
;;
esac
# Extra packages # Extra packages
# ============== # ==============
# #
# These are packages which we don't have patches for but want to test anyways. # These are packages which we don't have patches for but want to test anyways.
extra_package lens extra_package lens 5.2.3
extra_package aeson extra_package generic-lens 2.2.2.0
extra_package criterion extra_package optics 0.4.2.1
extra_package scotty extra_package aeson 2.2.3.0
extra_package generic-lens extra_package criterion 1.6.3.0
extra_package microstache extra_package scotty 0.21
extra_package singletons-base 3.1 extra_package generic-lens 2.2.2.0
extra_package servant extra_package microstache 1.0.2.3
extra_package hgmp extra_package singletons-base 3.5
extra_package Agda extra_package servant 0.20.1
extra_package mmark extra_package hgmp 0.1.2.1
extra_package doctest 0.21.0 extra_package Agda 2.7.0.1
extra_package tasty extra_package mmark 0.0.7.6
extra_package pandoc extra_package doctest 0.24.0
extra_package tasty 1.5.3
extra_package pandoc 3.1.11.1
extra_package servant-conduit 0.16
extra_package servant-machines 0.16
extra_package linear-generics 0.2.3
extra_package futhark 0.25.13
extra_package generic-random 1.5.0.1
extra_package lame 0.2.2
extra_package inspection-testing 0.5.0.3
extra_package ghcide 2.9.0.0
extra_package ghc-typelits-extra 0.4.7
# This package is affected by https://gitlab.haskell.org/ghc/ghc/-/issues/22912
extra_package vector-space 0.16
# Build-tool packages # Build-tool packages
build_tool_package alex build_tool_package alex
build_tool_package happy build_tool_package happy
build_tool_package c2hs
# $BUILD_MODE controls how head.hackage runs. # $BUILD_MODE controls how head.hackage runs.
# =========================================== # ===========================================
...@@ -155,7 +219,7 @@ case "$BUILD_MODE" in ...@@ -155,7 +219,7 @@ case "$BUILD_MODE" in
only_package microlens only_package microlens
only_package free only_package free
only_package optparse-applicative only_package optparse-applicative
test_package system-test "$(pwd)/../tests/ghc-debug/test/" test_package system-test "$(pwd)/../tests/ghc-debug/**/*.cabal"
test_package ghc-tests "$(pwd)/../tests/ghc-tests" test_package ghc-tests "$(pwd)/../tests/ghc-tests"
;; ;;
TEST) TEST)
...@@ -163,14 +227,14 @@ case "$BUILD_MODE" in ...@@ -163,14 +227,14 @@ case "$BUILD_MODE" in
# packages from being built. Morally, I really want to say "build # packages from being built. Morally, I really want to say "build
# nothing at all besides the tests". # nothing at all besides the tests".
only_package tasty only_package tasty
test_package system-test "$(pwd)/../tests/ghc-debug/test/" test_package system-test "$(pwd)/../tests/ghc-debug/**/*.cabal"
test_package ghc-tests "$(pwd)/../tests/ghc-tests" test_package ghc-tests "$(pwd)/../tests/ghc-tests"
test_package all "$(pwd)/../tests/text" test_package all "$(pwd)/../tests/text"
test_package bytestring-tests "$(pwd)/../tests/bytestring" test_package bytestring-tests "$(pwd)/../tests/bytestring"
test_package all "$(pwd)/../tests/containers/containers-tests" test_package all "$(pwd)/../tests/containers/containers-tests"
;; ;;
COMPAT) COMPAT)
test_package system-test "$(pwd)/../tests/ghc-debug/test/" test_package system-test "$(pwd)/../tests/ghc-debug/**/*.cabal"
test_package ghc-tests "$(pwd)/../tests/ghc-tests" test_package ghc-tests "$(pwd)/../tests/ghc-tests"
;; ;;
esac esac
let sources = import ./nix/sources.nix; { sources ? import ./nix/sources.nix, nixpkgs ? (import sources.nixpkgs.outPath {}) }:
in
{ nixpkgs ? (import sources.nixpkgs.outPath {}) }:
with nixpkgs; with nixpkgs;
let let
haskellPackages = nixpkgs.haskell.packages.ghc925.override { all-cabal-hashes = sources.all-cabal-hashes.outPath; }; haskellPackages = nixpkgs.haskellPackages;
hackage-repo-tool = hackage-repo-tool =
let src = sources.hackage-security.outPath; let src = sources.hackage-security.outPath;
...@@ -45,7 +42,7 @@ let ...@@ -45,7 +42,7 @@ let
let let
deps = [ deps = [
bash curl gnutar findutils patch rsync openssl bash curl gnutar findutils patch rsync openssl
cabal-install ghc gcc binutils-unwrapped pwgen gnused haskellPackages.cabal-install haskellPackages.ghc gcc binutils-unwrapped pwgen gnused
hackage-repo-tool overlay-tool python3 jq pkg-config hackage-repo-tool overlay-tool python3 jq pkg-config
git # cabal-install wants this to fetch source-repository-packages git # cabal-install wants this to fetch source-repository-packages
]; ];
...@@ -55,7 +52,6 @@ let ...@@ -55,7 +52,6 @@ let
in in
runCommand "repo" { runCommand "repo" {
nativeBuildInputs = [ makeWrapper ]; nativeBuildInputs = [ makeWrapper ];
cabalDepsSrc = buildDepsFragment;
} '' } ''
mkdir -p $out/bin mkdir -p $out/bin
makeWrapper ${head-hackage-ci}/bin/head-hackage-ci $out/bin/head-hackage-ci \ makeWrapper ${head-hackage-ci}/bin/head-hackage-ci $out/bin/head-hackage-ci \
...@@ -83,4 +79,8 @@ let ...@@ -83,4 +79,8 @@ let
makeWrapper ${curl}/bin/curl $out/bin/curl makeWrapper ${curl}/bin/curl $out/bin/curl
''; '';
in in
build-repo mkShell {
name = "head-hackage-build-env";
buildInputs = [ build-repo ];
cabalDepsSrc = buildDepsFragment;
}
...@@ -4,30 +4,96 @@ set -e ...@@ -4,30 +4,96 @@ set -e
cd "$(dirname "${BASH_SOURCE[0]}")" cd "$(dirname "${BASH_SOURCE[0]}")"
ARCH="$(uname -m)"
>&2 echo "NIGHTLY=${NIGHTLY}" >&2 echo "NIGHTLY=${NIGHTLY}"
>&2 echo "RELEASE_JOB=${RELEASE_JOB}" >&2 echo "RELEASE_JOB=${RELEASE_JOB}"
>&2 echo "SLOW_VALIDATE=${SLOW_VALIDATE}" >&2 echo "SLOW_VALIDATE=${SLOW_VALIDATE}"
>&2 echo "ARCH=${ARCH}"
if [ -n "$SLOW_VALIDATE" ]; then # NOTE: If you add a new JOB_NAME here then you also might need to modify the
BINDIST_NAME="ghc-x86_64-linux-deb10-numa-slow-validate.tar.xz" # `needs` field in ghc/ghc .gitlab-ci.yml to avoid triggering the downstream job
if [ -n "$NIGHTLY" ]; then # too early.
JOB_NAME="nightly-x86_64-linux-deb10-numa-slow-validate"
elif [ "$RELEASE_JOB" == "yes" ]; then # Before the update to deb12, we use the deb10 bindists
echo "No slow validate build in release job" case $UPSTREAM_BRANCH_NAME in
exit 2 ghc-9.8|ghc-9.10)
else case $ARCH in
JOB_NAME="x86_64-linux-deb10-numa-slow-validate" aarch64)
fi if [ -n "$NIGHTLY" ]; then
else BINDIST_NAME="ghc-aarch64-linux-deb10-validate.tar.xz"
BINDIST_NAME="ghc-x86_64-linux-fedora33-release.tar.xz" JOB_NAME="nightly-aarch64-linux-deb10-validate"
if [ -n "$NIGHTLY" ]; then elif [ "$RELEASE_JOB" == "yes" ]; then
JOB_NAME="nightly-x86_64-linux-fedora33-release" BINDIST_NAME="ghc-aarch64-linux-deb10-release+no_split_sections.tar.xz"
elif [ "$RELEASE_JOB" == "yes" ]; then JOB_NAME="release-aarch64-linux-deb10-release+no_split_sections"
JOB_NAME="release-x86_64-linux-fedora33-release" else
else BINDIST_NAME="ghc-aarch64-linux-deb10-validate.tar.xz"
JOB_NAME="x86_64-linux-fedora33-release" JOB_NAME="aarch64-linux-deb10-validate"
fi fi
fi ;;
*)
if [ -n "$SLOW_VALIDATE" ]; then
BINDIST_NAME="ghc-x86_64-linux-deb10-numa-slow-validate.tar.xz"
if [ -n "$NIGHTLY" ]; then
JOB_NAME="nightly-x86_64-linux-deb10-numa-slow-validate"
elif [ "$RELEASE_JOB" == "yes" ]; then
echo "No slow validate build in release job"
exit 2
else
JOB_NAME="x86_64-linux-deb10-numa-slow-validate"
fi
else
BINDIST_NAME="ghc-x86_64-linux-fedora33-release.tar.xz"
if [ -n "$NIGHTLY" ]; then
JOB_NAME="nightly-x86_64-linux-fedora33-release"
elif [ "$RELEASE_JOB" == "yes" ]; then
JOB_NAME="release-x86_64-linux-fedora33-release"
else
JOB_NAME="x86_64-linux-fedora33-release"
fi
fi
;;
esac
;;
*) # Post update to use deb12
case $ARCH in
aarch64)
if [ -n "$NIGHTLY" ]; then
BINDIST_NAME="ghc-aarch64-linux-deb12-validate.tar.xz"
JOB_NAME="nightly-aarch64-linux-deb12-validate"
elif [ "$RELEASE_JOB" == "yes" ]; then
BINDIST_NAME="ghc-aarch64-linux-deb12-release+no_split_sections.tar.xz"
JOB_NAME="release-aarch64-linux-deb12-release+no_split_sections"
else
BINDIST_NAME="ghc-aarch64-linux-deb12-validate.tar.xz"
JOB_NAME="aarch64-linux-deb12-validate"
fi
;;
*)
if [ -n "$SLOW_VALIDATE" ]; then
BINDIST_NAME="ghc-x86_64-linux-deb12-numa-slow-validate.tar.xz"
if [ -n "$NIGHTLY" ]; then
JOB_NAME="nightly-x86_64-linux-deb12-numa-slow-validate"
elif [ "$RELEASE_JOB" == "yes" ]; then
echo "No slow validate build in release job"
exit 2
else
JOB_NAME="x86_64-linux-deb12-numa-slow-validate"
fi
else
BINDIST_NAME="ghc-x86_64-linux-fedora33-release.tar.xz"
if [ -n "$NIGHTLY" ]; then
JOB_NAME="nightly-x86_64-linux-fedora33-release"
elif [ "$RELEASE_JOB" == "yes" ]; then
JOB_NAME="release-x86_64-linux-fedora33-release"
else
JOB_NAME="x86_64-linux-fedora33-release"
fi
fi
;;
esac
;;
esac
>&2 echo "BINDIST_NAME=${BINDIST_NAME}" >&2 echo "BINDIST_NAME=${BINDIST_NAME}"
>&2 echo "JOB_NAME=${JOB_NAME}" >&2 echo "JOB_NAME=${JOB_NAME}"
......
#!/bin/sh
PIPELINE_TYPE=validation
if [ -n "$PIPELINE_OVERRIDE" ]; then
PIPELINE_TYPE="$PIPELINE_OVERRIDE"
# Triggered by GHC
elif [ "$CI_PIPELINE_SOURCE" = "pipeline" -a '(' -n "$UPSTREAM_COMMIT_SHA" -o -n "$UPSTREAM_PIPELINE_ID" ')' ]; then
PIPELINE_TYPE=downstream
# Nightly repo update, to be scheduled
#
# SCHEDULE_TYPE must be set when creating the scheduled job. It is used to
# explicitly identify which schedule we want.
elif [ "$CI_PIPELINE_SOURCE" = "schedule" -a "$SCHEDULE_TYPE" = "update-repo" ]; then
PIPELINE_TYPE=update-repo
fi
mk_pipeline () {
echo "Generating $PIPELINE_TYPE pipeline"
cp -v $1 gitlab-generated-pipeline.yml
}
case "$PIPELINE_TYPE" in
validation)
mk_pipeline ci/pipelines/validation.yml
;;
downstream)
mk_pipeline ci/pipelines/downstream.yml
;;
update-repo)
mk_pipeline ci/pipelines/update-repo.yml
;;
update-branch)
mk_pipeline ci/pipelines/update-branch.yml
;;
*)
echo "Unknown pipeline type: $PIPELINE_TYPE"
exit 1
;;
esac
{
"all-cabal-hashes": {
"branch": "hackage",
"description": "A repository containing all cabal files, with added metadata for package hashes",
"homepage": null,
"owner": "commercialhaskell",
"repo": "all-cabal-hashes",
"rev": "b0a2944a580a29defa7e68ebc6298bf9d851d86a",
"sha256": "163m68dcqc0i13s631g18sx5phgabjcvkl0nvkmcgsaqd8fnbzfc",
"type": "file",
"url": "https://github.com/commercialhaskell/all-cabal-hashes/archive/b0a2944a580a29defa7e68ebc6298bf9d851d86a.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"ghc-artefact-nix": {
"branch": "master",
"description": "Create environments with GHC HEAD artefacts",
"homepage": null,
"owner": "mpickering",
"repo": "ghc-artefact-nix",
"rev": "3684936ecde09234f51410e07ccd1c7f48d4f4ac",
"sha256": "17aly3bz9im5zcz9vazi3a4dqpw6xsqsk2afwb1r6mxn92wfzcys",
"type": "tarball",
"url": "https://github.com/mpickering/ghc-artefact-nix/archive/3684936ecde09234f51410e07ccd1c7f48d4f4ac.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"hackage-security": {
"branch": "master",
"description": "Hackage security framework based on TUF (The Update Framework)",
"homepage": "http://hackage.haskell.org/package/hackage-security",
"owner": "haskell",
"repo": "hackage-security",
"rev": "048844cb006eb880e256d7393928d6fd422ab6dd",
"sha256": "0rlx9shfa46c61pdn5inr2d3cyfkhz9xwajd5rjpfdqllkmh7c77",
"type": "tarball",
"url": "https://github.com/haskell/hackage-security/archive/048844cb006eb880e256d7393928d6fd422ab6dd.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"niv": {
"branch": "master",
"description": "Easy dependency management for Nix projects",
"homepage": "https://github.com/nmattia/niv",
"owner": "nmattia",
"repo": "niv",
"rev": "5830a4dd348d77e39a0f3c4c762ff2663b602d4c",
"sha256": "1d3lsrqvci4qz2hwjrcnd8h5vfkg8aypq3sjd4g3izbc8frwz5sm",
"type": "tarball",
"url": "https://github.com/nmattia/niv/archive/5830a4dd348d77e39a0f3c4c762ff2663b602d4c.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"nixpkgs": {
"branch": "master",
"description": "Nix Packages collection",
"homepage": "",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "33e0d99cbedf2acfd7340d2150837fbb28039a64",
"sha256": "15ll14rycfarqd7isyfms1fhszw9k36ars58gvdw3bkka5mj48cr",
"type": "tarball",
"url": "https://github.com/NixOS/nixpkgs/archive/33e0d99cbedf2acfd7340d2150837fbb28039a64.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"overlay-tool": {
"branch": "master",
"description": null,
"homepage": null,
"owner": "bgamari",
"repo": "hackage-overlay-repo-tool",
"rev": "8fdc6a32292db3b54d02b6689ec6b2af10059a0d",
"sha256": "1af6v50mhigl4djfixiiwmb126zjbgnnndaqjhvj11smn90ikp90",
"type": "tarball",
"url": "https://github.com/bgamari/hackage-overlay-repo-tool/archive/8fdc6a32292db3b54d02b6689ec6b2af10059a0d.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
}
}
# This file has been generated by Niv. # extract sources from the flake.lock using flake-compat
(import
let (
let lock = builtins.fromJSON (builtins.readFile ../../flake.lock); in
# fetchTarball {
# The fetchers. fetch_<type> fetches specs of type <type>. url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz";
# sha256 = lock.nodes.flake-compat.locked.narHash;
}
fetch_file = pkgs: name: spec: )
let { src = ../../.; }
name' = sanitizeName name + "-src"; ).defaultNix.inputs
in
if spec.builtin or true then
builtins_fetchurl { inherit (spec) url sha256; name = name'; }
else
pkgs.fetchurl { inherit (spec) url sha256; name = name'; };
fetch_tarball = pkgs: name: spec:
let
name' = sanitizeName name + "-src";
in
if spec.builtin or true then
builtins_fetchTarball { name = name'; inherit (spec) url sha256; }
else
pkgs.fetchzip { name = name'; inherit (spec) url sha256; };
fetch_git = name: spec:
let
ref =
if spec ? ref then spec.ref else
if spec ? branch then "refs/heads/${spec.branch}" else
if spec ? tag then "refs/tags/${spec.tag}" else
abort "In git source '${name}': Please specify `ref`, `tag` or `branch`!";
in
builtins.fetchGit { url = spec.repo; inherit (spec) rev; inherit ref; };
fetch_local = spec: spec.path;
fetch_builtin-tarball = name: throw
''[${name}] The niv type "builtin-tarball" is deprecated. You should instead use `builtin = true`.
$ niv modify ${name} -a type=tarball -a builtin=true'';
fetch_builtin-url = name: throw
''[${name}] The niv type "builtin-url" will soon be deprecated. You should instead use `builtin = true`.
$ niv modify ${name} -a type=file -a builtin=true'';
#
# Various helpers
#
# https://github.com/NixOS/nixpkgs/pull/83241/files#diff-c6f540a4f3bfa4b0e8b6bafd4cd54e8bR695
sanitizeName = name:
(
concatMapStrings (s: if builtins.isList s then "-" else s)
(
builtins.split "[^[:alnum:]+._?=-]+"
((x: builtins.elemAt (builtins.match "\\.*(.*)" x) 0) name)
)
);
# The set of packages used when specs are fetched using non-builtins.
mkPkgs = sources: system:
let
sourcesNixpkgs =
import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) { inherit system; };
hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath;
hasThisAsNixpkgsPath = <nixpkgs> == ./.;
in
if builtins.hasAttr "nixpkgs" sources
then sourcesNixpkgs
else if hasNixpkgsPath && ! hasThisAsNixpkgsPath then
import <nixpkgs> {}
else
abort
''
Please specify either <nixpkgs> (through -I or NIX_PATH=nixpkgs=...) or
add a package called "nixpkgs" to your sources.json.
'';
# The actual fetching function.
fetch = pkgs: name: spec:
if ! builtins.hasAttr "type" spec then
abort "ERROR: niv spec ${name} does not have a 'type' attribute"
else if spec.type == "file" then fetch_file pkgs name spec
else if spec.type == "tarball" then fetch_tarball pkgs name spec
else if spec.type == "git" then fetch_git name spec
else if spec.type == "local" then fetch_local spec
else if spec.type == "builtin-tarball" then fetch_builtin-tarball name
else if spec.type == "builtin-url" then fetch_builtin-url name
else
abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}";
# If the environment variable NIV_OVERRIDE_${name} is set, then use
# the path directly as opposed to the fetched source.
replace = name: drv:
let
saneName = stringAsChars (c: if isNull (builtins.match "[a-zA-Z0-9]" c) then "_" else c) name;
ersatz = builtins.getEnv "NIV_OVERRIDE_${saneName}";
in
if ersatz == "" then drv else
# this turns the string into an actual Nix path (for both absolute and
# relative paths)
if builtins.substring 0 1 ersatz == "/" then /. + ersatz else /. + builtins.getEnv "PWD" + "/${ersatz}";
# Ports of functions for older nix versions
# a Nix version of mapAttrs if the built-in doesn't exist
mapAttrs = builtins.mapAttrs or (
f: set: with builtins;
listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set))
);
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/lists.nix#L295
range = first: last: if first > last then [] else builtins.genList (n: first + n) (last - first + 1);
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L257
stringToCharacters = s: map (p: builtins.substring p 1 s) (range 0 (builtins.stringLength s - 1));
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L269
stringAsChars = f: s: concatStrings (map f (stringToCharacters s));
concatMapStrings = f: list: concatStrings (map f list);
concatStrings = builtins.concatStringsSep "";
# https://github.com/NixOS/nixpkgs/blob/8a9f58a375c401b96da862d969f66429def1d118/lib/attrsets.nix#L331
optionalAttrs = cond: as: if cond then as else {};
# fetchTarball version that is compatible between all the versions of Nix
builtins_fetchTarball = { url, name ? null, sha256 }@attrs:
let
inherit (builtins) lessThan nixVersion fetchTarball;
in
if lessThan nixVersion "1.12" then
fetchTarball ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; }))
else
fetchTarball attrs;
# fetchurl version that is compatible between all the versions of Nix
builtins_fetchurl = { url, name ? null, sha256 }@attrs:
let
inherit (builtins) lessThan nixVersion fetchurl;
in
if lessThan nixVersion "1.12" then
fetchurl ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; }))
else
fetchurl attrs;
# Create the final "sources" from the config
mkSources = config:
mapAttrs (
name: spec:
if builtins.hasAttr "outPath" spec
then abort
"The values in sources.json should not have an 'outPath' attribute"
else
spec // { outPath = replace name (fetch config.pkgs name spec); }
) config.sources;
# The "config" used by the fetchers
mkConfig =
{ sourcesFile ? if builtins.pathExists ./sources.json then ./sources.json else null
, sources ? if isNull sourcesFile then {} else builtins.fromJSON (builtins.readFile sourcesFile)
, system ? builtins.currentSystem
, pkgs ? mkPkgs sources system
}: rec {
# The sources, i.e. the attribute set of spec name to spec
inherit sources;
# The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers
inherit pkgs;
};
in
mkSources (mkConfig {}) // { __functor = _: settings: mkSources (mkConfig settings); }
# Defines .run-ci
include: ci/pipelines/lib/run-head-hackage-ci.yml
# A build triggered from a ghc/ghc> pipeline.
.downstream:
extends: .run-ci
parallel:
matrix:
- ARCH: aarch64
- ARCH: x86_64
tags:
- ${ARCH}-linux
build-downstream:
extends: .downstream
variables:
BUILD_MODE: FULL
test-downstream:
extends: .downstream
variables:
BUILD_MODE: TEST
allow_failure: true