...
 
Commits (84)
......@@ -57,6 +57,7 @@ build-master:
extends: .build
variables:
GHC_TARBALL: "https://gitlab.haskell.org/api/v4/projects/1/jobs/artifacts/master/raw/ghc-x86_64-fedora27-linux.tar.xz?job=validate-x86_64-linux-fedora27"
EXTRA_HC_OPTS: "-dcore-lint"
only:
- branches
- merge_requests
......@@ -65,7 +66,18 @@ build-master:
build-8.8:
extends: .build
variables:
GHC_TARBALL: "https://gitlab.haskell.org/api/v4/projects/1/jobs/artifacts/ghc-8.8/raw/ghc-8.8.1-x86_64-unknown-linux.tar.xz?job=validate-x86_64-linux-fedora27"
GHC_TARBALL: "https://gitlab.haskell.org/api/v4/projects/1/jobs/artifacts/ghc-8.8/raw/ghc-x86_64-fedora27-linux.tar.xz?job=validate-x86_64-linux-fedora27"
EXTRA_HC_OPTS: "-dcore-lint"
only:
- branches
- merge_requests
# Build against the 8.10 branch
build-8.10:
extends: .build
variables:
GHC_TARBALL: "https://gitlab.haskell.org/api/v4/projects/1/jobs/artifacts/ghc-8.10/raw/ghc-x86_64-fedora27-linux.tar.xz?job=validate-x86_64-linux-fedora27"
EXTRA_HC_OPTS: "-dcore-lint"
only:
- branches
- merge_requests
......@@ -75,7 +87,6 @@ build-8.8:
tags:
- x86_64-linux
- head.hackage
image: nixos/nix
......@@ -84,7 +95,17 @@ build-8.8:
paths:
- store.nar
before_script:
- |
if [ -e store.nar ]; then
echo "Extracting cached Nix store..."
nix-store --import -vv < store.nar || echo "invalid cache"
else
echo "No cache found"
fi
script:
# Install GHC
- echo "Bindist tarball is $GHC_TARBALL"
- |
nix build \
......@@ -92,28 +113,29 @@ build-8.8:
--argstr url $GHC_TARBALL \
--out-link ghc \
ghcHEAD
- GHC=`pwd`/ghc/bin/ghc
- rm -Rf $HOME/.cabal/pacakages/local tmp; mkdir -p tmp; cd tmp
- export GHC=`pwd`/ghc/bin/ghc
- rm -Rf $HOME/.cabal/packages/local ci/run
# Build CI executable
- |
EXTRA_OPTS="--cabal-option=-j$CPUS" # Use cabal's build parallelism
if [ -n "$EXTRA_HC_OPTS" ]; then
EXTRA_OPTS="$EXTRA_OPTS --ghc-option=\"$EXTRA_HC_OPTS\""
fi
nix-build ./ci -j$CPUS --no-build-output
nix-store --export \
$(nix-store -qR --include-outputs \
$(nix-instantiate --quiet ./ci)) \
> store.nar
# Test it
- nix run -f ./ci -c run-ci
nix eval --raw -f ../ci cabalDepsSrc > deps.cabal.project
nix run -f ../ci -c \
head-hackage-ci \
test-patches \
--extra-cabal-fragment=$(pwd)/deps.cabal.project \
--patches=../patches \
--with-compiler=$GHC \
$EXTRA_OPTS
- nix run -f ../ci -c xz results.json
after_script:
- ls -lh
- |
nix run -f ./ci -c \
tar -cJf results.tar.xz -C ci/run \
results.json logs
artifacts:
when: always
paths:
- results.json.xz
- results.tar.xz
# Build and deploy a Hackage repository
update-repo:
......@@ -135,11 +157,9 @@ update-repo:
script:
- nix-channel --add https://nixos.org/channels/nixpkgs-unstable nixpkgs
- nix-channel --update
- nix build -f scripts/build-repo.nix
- nix run -f scripts/build-repo.nix -c build-repo.sh extract-build-repo
- nix run -f '<nixpkgs>' gnutar -c tar -zxf logs.tar.gz
- mv logs repo
- cp summary.dot.svg repo
- nix build -f ci/default.nix
- nix run -f ci/default.nix -c build-repo.sh extract-keys
- nix run -f ci/default.nix -c build-repo.sh build-repo
dependencies:
- build-master
......
......@@ -17,24 +17,17 @@ existing Hackage package(s).
as one of the main purposes of these patches is to make regression
testing possible. I.e. try to avoid conditional compilation.
- If only the `.cabal` file needs to be modified, a `.cabal` file
SHOULD be used instead of a `.patch` file. If the changes to the
`.cabal` file are too invasive (e.g. removing modules, changing the
structure of the package etc), a `.patch` file must be used.
## How this works
This repo contains `<pkg-id>.patch` and `<pkg-id>.cabal` files in the
This repo contains `<pkg-id>.patch` files in the
[`patches/`](./patches/) folder (where `<pkg-id>` refers to a specific
release of a package, e.g. `lens-4.15.3`).
Once merged to `master`, all package releases whose `<pkg-id>` is
mentioned will enter the *HEAD.hackage* package index; if there is a
`.patch` file, the respective releases tarballs are patched
(i.e. mutated!). If there is a `.cabal` file, it is included as a
revision in the package index. Consequently, if there is only a
`.cabal` file and no `.patch` file, the original source `.tar.gz` is
included in verbatim (i.e. *not* mutated).
(i.e. mutated!).
If this operation succeeds, the `HEAD.hackage` package index at
http://HEAD.hackage.haskell.org/ is updated to contain the new index
......@@ -144,41 +137,28 @@ $ nix build -f ./. --arg ghc "(import ghc-from-source.nix {ghc-path=$GHC_TREE;})
```
### GitLab CI
[GHC's GitLab instance](https://gitlab.haskell.org/ghc/head.hackage) uses
GitLab CI and `nix` to build a subset of the head.hackage package set using GHC
snapshots.
GHC's GitLab instance uses GitLab CI and the `head-hackage-ci` tool (contained
in the `ci/` directory) to test the `head.hackage` patchset against GHC releases
and snapshots.
To run a similar build locally simply download a binary distribution from a
`x86_64-fedora27-linux` CI job and run:
To run a similar build locally start by downloading and installing a binary
distribution appropriate for your distribution and then call the `run-ci` script:
```
$ export GHC_TARBALL=./ghc-x86_64-fedora27-linux.tar.xz
# for extra correctness assurance...
$ export EXTRA_HC_OPTS=-dcore-lint
$ scripts/build-nix.sh
$ ./run-ci
```
This will build the set of packages defined by the `testedPackages` list in
`scripts/build-all.nix`.
This will build all packages having patches and produce a textual summary, as
well as a JSON file (`result.json`) describing the outcome.
After building `testedPackages` (allowing for failures) the script job runs
`scripts/summarize.py`, which produces a few artifacts:
* a JSON summary (`summary.json`) which includes the full dependency graph as
well as which package builds failed
* a DOT graph (`summary.dot`) showing the package depedencies and their build
success. This can be rendered with `scripts/render-graph.sh`.
* a directory (`./logs`) of build logs
Note that `build-nix.sh` can also be used to build packages not included in
`testedPackages`:
```
$ scripts/build-nix.sh pandoc
```
### Hackage repository
[GHC's GitLab instance](https://gitlab.haskell.org/ghc/head.hackage) uses
GitLab CI to deploy a Hackage repository with the patches provided by
`head.hackage`. See the [repository]() for usage instructions.
GHC's GitLab instance uses GitLab CI to deploy a Hackage repository with the
patches provided by `head.hackage`. See the
[repository](http://ghc.gitlab.haskell.org/head.hackage/) for usage
instructions.
### Travis CI
......
run
dist
dist-newstyle
# head.hackage CI driver
This is the application which drives GHC's `head.hackage` continuous
integration infrastructure, namely `head-hackage-ci`. The general goals here
are three-fold:
1. test that the patches in the `head.hackage` repository build
2. use these patches to smoke-test GHC pre-releases and nightly builds
3. provide the patches as a Hackage repository
(<https://ghc.gitlab.haskell.org/head-hackage>) for consumption by end-users
We accomplish this via the `head-hackage-ci` executable. This executable has
two modes:
* `test-patches` is the primary driver of the CI testing process
* `make-constraints` is a utility for producing `cabal-install` constraints
to ensure that only patched versions of packages are used in install plans
Naturally, many Haskell packages have dependencies on native libraries.
`head-hackage-ci` supports two ways of providing these libraries:
* *from the host system*: Here we just rely on the host system to provide
native libraries; it is up to the user to ensure that the necessary packages
are installed.
* *from nixpkgs*: Here we use [nix][nix] and the [nixpkgs][nixpkgs] package set
to provide native libraries. These dependencies are defined in
`ci/build-deps.nix`. This mode is
[nix]: https://nixos.org/nix/
[nixpkgs]: https://github.com/NixOS/nixpkgs
## Test procedure
The testing part of the CI process (goals (1) and (2) above) uses `head-hackage-ci`'s
`test-patches` mode and some shell scripts (namely `ci/config.sh` and
`ci/build-repo.sh`) (and in the case of a Nix-based build, `ci/build-deps.nix`).
The below is all orchestrated by `run-ci.sh`:
1. Call `ci/config.sh` to determine the configuration of the run. This does
the following:
1. Identify the version of the compiler being tested (provided by the user
via the `GHC` environment variable)
1. Use the compiler version to find the set of packages
that we expected to be broken.
1. Build a set of command-line arguments destined for `head-hackage-ci`
from the broken-package set above and a set of "extra" packages
defined in `config.sh`
1. If we are using `nixpkgs` to get native libraries: compute a
`cabal.project` fragment from the dependency information in
`ci/build-deps.nix` (this logic lives in `ci/default.nix`).
1. Call `head-hackage-ci test-patches` with the computed arguments. This does the following:
1. Determine the set of packages to test (determined by the contents of the
`patches/` directory and additional packages provided via the
`--extra-package` flag)
1. Build a local Hackage repository of patched packages using the
`build-repo.sh` script (which itself depends upon
`hackage-repo-overlay-tool` and `hackage-overlay-tool`)
1. Build a `cabal.project` file containing configuration (e.g. the location
of the local repository, the location of the compiler, where native
dependencies are found, etc.)
1. Call `cabal v2-update` to inform `cabal` of the patched package repository
1. For each package to test:
1. Create a new working directory
1. Copy the previously constructed `cabal.project` into our working directory
1. Construct a dummy Cabal package depending only on the package under test
1. Call `cabal new-build` to build the package
1. Examine the state of `plan.json` and the cabal `logs` directory to work out
the outcome of the build
1. Write a JSON report (of type `Types.RunResult ()`) to `result.json`
1. Examine the failed units and determine whether there were any unexpected failures.
This diff is collapsed.
{-# LANGUAGE DerivingStrategies #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Types
( RunResult(..)
, runResultUnits
, TestedPatch(..)
, PackageResult(..)
, BuildInfo(..)
, BuildResult(..)
, LogOutput(..)
) where
import Cabal.Plan
......@@ -29,41 +33,53 @@ data BuildInfo
deriving anyclass (ToJSON, FromJSON)
-- | The result of a unit build.
data BuildResult
= BuildSucceeded { buildLog :: T.Text }
data BuildResult log
= BuildSucceeded { buildLog :: log }
-- ^ the build succeeded.
| BuildPreexisted
-- ^ the unit pre-existed in the global package database.
| BuildFailed { buildLog :: T.Text }
| BuildFailed { buildLog :: log }
-- ^ the build failed
| BuildNotAttempted
-- ^ the build was not attempted either because a dependency failed or it
-- is an executable or testsuite component
deriving stock (Show, Generic)
deriving stock (Show, Generic, Functor, Foldable, Traversable)
deriving anyclass (ToJSON, FromJSON)
-- | The result of an attempt to tested a patch
data PackageResult
data PackageResult log
= PackagePlanningFailed { planningError :: T.Text }
-- ^ Our attempt to build the package resulting in no viable install plan.
| PackageResult { packageBuilt :: Bool
, units :: M.Map UnitId (BuildInfo, BuildResult)
, units :: M.Map UnitId (BuildInfo, BuildResult log)
}
-- ^ We attempted to build the package.
deriving stock (Show, Generic)
deriving stock (Show, Generic, Functor, Foldable, Traversable)
deriving anyclass (ToJSON, FromJSON)
-- | Information about a patch which we tested.
data TestedPatch
data TestedPatch log
= TestedPatch { patchedPackageName :: PkgName
, patchedPackageVersion :: Ver
, patchedPackageResult :: PackageResult
, patchedPackageResult :: PackageResult log
}
deriving stock (Show, Generic)
deriving stock (Show, Generic, Functor, Foldable, Traversable)
deriving anyclass (ToJSON, FromJSON)
-- | The result of a CI run.
data RunResult
= RunResult { testedPatches :: [TestedPatch] }
deriving stock (Show, Generic)
data RunResult log
= RunResult { testedPatches :: [TestedPatch log] }
deriving stock (Show, Generic, Functor, Foldable, Traversable)
deriving anyclass (ToJSON, FromJSON)
runResultUnits :: RunResult log -> M.Map UnitId (BuildInfo, BuildResult log)
runResultUnits runResult = M.unions
[ units
| tpatch <- testedPatches runResult
, PackageResult _ units <- pure $ patchedPackageResult tpatch
]
-- | Logged output from a build.
newtype LogOutput = LogOutput { getLogOutput :: T.Text }
deriving stock (Eq, Ord, Show)
deriving newtype (ToJSON, FromJSON)
{ pkgs }:
let
# Maps Haskell package names to a list of their native library dependency
# attributes.
pkgDeps= with pkgs;
{
zlib = [ zlib ];
digest = [ zlib ];
};
mkCabalFragment = pkgName: deps:
with pkgs.lib;
let
libDirs = concatStringsSep " " (map (dep: getOutput "lib" dep + "/lib") deps);
includeDirs = concatStringsSep " " (map (dep: getOutput "dev" dep + "/include") deps);
in ''
package ${pkgName}
extra-lib-dirs: ${libDirs}
extra-include-dirs: ${includeDirs}
'';
in
pkgs.lib.concatStringsSep "\n" (pkgs.lib.mapAttrsToList mkCabalFragment pkgDeps)
# Maps Haskell package names to a list of the Nixpkgs attributes corresponding
# to their native library dependencies.
with pkgs;
{
zlib = [ zlib ];
digest = [ zlib ];
regex-pcre = [ pcre ];
bzlib = [ bzip2 ];
}
......@@ -164,12 +164,13 @@ build_repo() {
mkdir -p repo/package
cp $HOME/.cabal/packages/hackage.haskell.org/acme-box/0.0.0.0/acme-box-0.0.0.0.tar.gz repo/package
cp -R $PATCHES patches
mkdir -p tmp
cp -R $PATCHES tmp/patches
hackage-repo-tool bootstrap --keys=./keys --repo=./repo
mkdir -p template patches.cache
mkdir -p template tmp/patches.cache
tool \
--patches=./patches \
--patches=./tmp/patches \
--repo-cache=./cache \
--keys=./keys \
--repo-name=head.hackage \
......@@ -178,15 +179,13 @@ build_repo() {
build_constraints > repo/cabal.constraints
build_index
rm -R tmp
}
case $1 in
gen-keys) gen_keys_tarball ;;
extract-keys) extract_keys_tarball ;;
build-repo) build_repo ;;
extract-build-repo)
extract_keys_tarball
build_repo ;;
build-constraints) build_constraints ;;
build-repository-blurb) build_repository_blurb ;;
build-index)
......
packages: .
source-repository-package
type: git
location: https://github.com/bgamari/hackage-overlay-repo-tool
tag: 18eb61c830ad908d36d343f400a1588af6b9a03a
# vi: set filetype=sh
# Packages expected not to build due to GHC bugs. This is `source`'d by the CI
# script and the arguments in BROKEN_ARGS are added to the hackage-ci
# command-line.
# Mark the named package as broken.
#
# Usage:
# broken $pkg_name $ghc_ticket_number
#
function broken() {
pkg_name="$1"
ticket="$2"
echo "Marking $pkg_name as broken due to #$ticket"
EXTRA_OPTS="$EXTRA_OPTS --expect-broken=$pkg_name"
}
# Return the version number of the most recent release of the given package
function latest_version() {
pkg=$1
curl -s -H "Accept: application/json" -L -X GET http://hackage.haskell.org/package/$pkg/preferred | jq '.["normal-version"] | .[0]' -r
}
# Add a package to the set of packages that lack patches but are nevertheless
# tested.
function extra_package() {
pkg_name="$1"
version="$2"
if [ -z "$version" ]; then
version=$(latest_version $pkg_name)
fi
echo "Adding $pkg_name-$version to extra package set"
EXTRA_OPTS="$EXTRA_OPTS --extra-package=$pkg_name==$version"
}
# Mark a package to be declared with build-tool-depends, not build-depends.
# This is necessary for packages that do not have a library component.
function build_tool_package() {
pkg_name="$1"
echo "Adding $pkg_name as a build-tool package"
EXTRA_OPTS="$EXTRA_OPTS --build-tool-package=$pkg_name"
}
if [ -z "$GHC" ]; then GHC=ghc; fi
function ghc_version() {
$GHC --version | sed 's/.*version \([0-9]*\.\([0-9]*\.\)*\)/\1/'
}
# ======================================================================
# The lists begin here
#
# For instance:
#
# broken "lens" 17988
version="$(ghc_version)"
echo "Found GHC $version."
case $version in
8.8.*)
# package ticket
broken "parameterized-utils" 17056
;;
8.11.*)
# package ticket
broken "JuicyPixels" 17590
broken "free-algebras" 17710
;;
*)
echo "No broken packages for GHC $version"
;;
esac
# Extra packages
extra_package lens
extra_package aeson
extra_package criterion
extra_package scotty
extra_package generic-lens
extra_package microstache
extra_package singletons
extra_package servant
# Build-tool packages
build_tool_package alex
......@@ -17,29 +17,48 @@ let
src = fetchFromGitHub {
owner = "bgamari";
repo = "hackage-overlay-repo-tool";
rev = "18eb61c830ad908d36d343f400a1588af6b9a03a";
sha256 = "1y1fw5x9lyd533lm67s7iyzb4640y8lya11sdjia0yd1j5if6s40";
rev = "7aac81e9bc468b103dd78b9c662672c86fe236f7";
sha256 = "0i4iw8nbhvc2xx05c0hbnnjyhap3b4xsclmxnmfa6dsa2ym02jc0";
};
in haskellPackages.callCabal2nix "hackage-overlay-repo-tool" src {};
head-hackage-ci =
haskellPackages.callCabal2nix "head-hackage-ci" ./. {};
let
src = nixpkgs.nix-gitignore.gitignoreSource [] ./.;
in haskellPackages.callCabal2nix "head-hackage-ci" src {};
buildDepsFragment =
let
buildDeps = import ./build-deps.nix { pkgs = nixpkgs; };
buildDeps = import ./build-deps.nix { pkgs = nixpkgs; };
mkCabalFragment = pkgName: deps:
with pkgs.lib;
let
libDirs = concatStringsSep " " (map (dep: getOutput "lib" dep + "/lib") deps);
includeDirs = concatStringsSep " " (map (dep: getOutput "dev" dep + "/include") deps);
in ''
package ${pkgName}
extra-lib-dirs: ${libDirs}
extra-include-dirs: ${includeDirs}
'';
in
pkgs.lib.concatStringsSep "\n"
(pkgs.lib.mapAttrsToList mkCabalFragment buildDeps);
buildDepsFile = pkgs.writeText "deps.cabal.project" buildDeps;
buildDepsFile = pkgs.writeText "deps.cabal.project" buildDepsFragment;
build-repo =
let
deps = [
bash curl gnutar findutils patch rsync openssl
cabal-install ghc gcc binutils-unwrapped pwgen gnused
hackage-repo-tool overlay-tool python3
hackage-repo-tool overlay-tool python3 jq
git # cabal-install wants this to fetch source-repository-packages
];
in
runCommand "repo" {
nativeBuildInputs = [ makeWrapper ];
cabalDepsSrc = buildDeps;
cabalDepsSrc = buildDepsFragment;
} ''
mkdir -p $out/bin
makeWrapper ${head-hackage-ci}/bin/head-hackage-ci $out/bin/head-hackage-ci \
......@@ -48,6 +67,14 @@ let
makeWrapper ${./build-repo.sh} $out/bin/build-repo.sh \
--prefix PATH : ${stdenv.lib.makeBinPath deps}:$out/bin
makeWrapper ${../run-ci} $out/bin/run-ci \
--prefix PATH : ${stdenv.lib.makeBinPath deps}:$out/bin \
--set USE_NIX 1 \
--set CI_CONFIG ${./config.sh}
makeWrapper ${./find-job.sh} $out/bin/find-job \
--prefix PATH : ${stdenv.lib.makeBinPath deps}:$out/bin
makeWrapper ${xz}/bin/xz $out/bin/xz
'';
in
......
#!/usr/bin/env bash
set -e
project_id=$1
pipeline_id=$2
job_name=$3
# Access token is a protected environment variable in the head.hackage project and
# is necessary for this query to succeed. Sadly job tokens only seem to
# give us access to the project being built.
curl \
--silent --show-error \
-H "Private-Token: $ACCESS_TOKEN" \
"https://gitlab.haskell.org/api/v4/projects/$project_id/pipelines/$pipeline_id/jobs?scope[]=success" \
> resp.json
job_id=$(jq ". | map(select(.name == \"$job_name\")) | .[0].id" < resp.json)
if [ "$job_id" = "null" ]; then
echo "Error finding job $job_name for $pipeline_id in project $project_id:" >&2
cat resp.json >&2
rm resp.json
exit 1
else
rm resp.json
echo -n "$job_id"
fi
diff --git a/Cabal.cabal b/Cabal.cabal
index b97d346..bbe236e 100644
--- a/Cabal.cabal
+++ b/Cabal.cabal
@@ -1,5 +1,6 @@
name: Cabal
version: 2.4.1.0
+x-revision: 2
copyright: 2003-2018, Cabal Development Team (see AUTHORS file)
license: BSD3
license-file: LICENSE
@@ -183,7 +184,7 @@ flag bundled-binary-generic
library
build-depends:
array >= 0.4.0.1 && < 0.6,
- base >= 4.6 && < 5,
+ base >= 4.8 && < 4.13,
bytestring >= 0.10.0.0 && < 0.11,
containers >= 0.5.0.0 && < 0.7,
deepseq >= 1.3.0.1 && < 1.5,
diff --git a/Distribution/Compat/ReadP.hs b/Distribution/Compat/ReadP.hs
index 1f5a989..e314592 100644
--- a/Distribution/Compat/ReadP.hs
+++ b/Distribution/Compat/ReadP.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE CPP #-}
{-# LANGUAGE GADTs #-}
-----------------------------------------------------------------------------
-- |
@@ -113,7 +114,9 @@ instance Monad (P s) where
(Result x p) >>= k = k x `mplus` (p >>= k)
(Final r) >>= k = final [ys' | (x,s) <- r, ys' <- run (k x) s]
+#if !(MIN_VERSION_base(4,13,0))
fail = Fail.fail
+#endif
instance Fail.MonadFail (P s) where
fail _ = Fail
@@ -172,7 +175,9 @@ instance s ~ Char => Alternative (Parser r s) where
instance Monad (Parser r s) where
return = pure
+#if !(MIN_VERSION_base(4,13,0))
fail = Fail.fail
+#endif
R m >>= f = R (\k -> m (\a -> let R m' = f a in m' k))
instance Fail.MonadFail (Parser r s) where
diff --git a/Distribution/ParseUtils.hs b/Distribution/ParseUtils.hs
index 0e79049..f4b805c 100644
--- a/Distribution/ParseUtils.hs
+++ b/Distribution/ParseUtils.hs
@@ -19,6 +19,7 @@
-- This module is meant to be local-only to Distribution...
{-# OPTIONS_HADDOCK hide #-}
+{-# LANGUAGE CPP #-}
{-# LANGUAGE Rank2Types #-}
module Distribution.ParseUtils (
LineNo, PError(..), PWarning(..), locatedErrorMsg, syntaxError, warning,
@@ -107,7 +108,9 @@ instance Monad ParseResult where
ParseOk ws x >>= f = case f x of
ParseFailed err -> ParseFailed err
ParseOk ws' x' -> ParseOk (ws'++ws) x'
+#if !(MIN_VERSION_base(4,13,0))
fail = Fail.fail
+#endif
instance Fail.MonadFail ParseResult where
fail s = ParseFailed (FromString s Nothing)
diff --git a/Distribution/Parsec/Class.hs b/Distribution/Parsec/Class.hs
index d65ea54..d182360 100644
--- a/Distribution/Parsec/Class.hs
+++ b/Distribution/Parsec/Class.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE CPP #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
@@ -55,7 +56,7 @@ class Parsec a where
--
-- * knows @cabal-version@ we work with
--
-class (P.CharParsing m, MonadPlus m) => CabalParsing m where
+class (P.CharParsing m, MonadPlus m, Fail.MonadFail m) => CabalParsing m where
parsecWarning :: PWarnType -> String -> m ()
parsecHaskellString :: m String
@@ -116,7 +117,9 @@ instance Monad ParsecParser where
(>>) = (*>)
{-# INLINE (>>) #-}
+#if !(MIN_VERSION_base(4,13,0))
fail = Fail.fail
+#endif
instance MonadPlus ParsecParser where
mzero = empty
diff --git a/Distribution/Simple/PreProcess.hs b/Distribution/Simple/PreProcess.hs
index 4933028..2af2316 100644
--- a/Distribution/Simple/PreProcess.hs
+++ b/Distribution/Simple/PreProcess.hs
@@ -122,6 +122,12 @@ data PreProcessor = PreProcessor {
-- preprocessor's output name format.
type PreProcessorExtras = FilePath -> IO [FilePath]
+-- | A newtype around 'PreProcessorExtras', useful for storing
+-- 'PreProcessorExtras' inside of another type constructor (e.g., a list)
+-- without impredicativity (recall that the 'IO' type, which is contained in
+-- 'PreProcessorExtras', is a synonym for @'HasCallStack' => Prelude.IO@, which
+-- is a polymorphic type).
+newtype WrappedPreProcessorExtras = WrapPPE { unWrapPPE :: PreProcessorExtras }
mkSimplePreProcessor :: (FilePath -> FilePath -> Verbosity -> IO ())
-> (FilePath, FilePath)
@@ -694,8 +700,8 @@ knownSuffixHandlers =
]
-- |Standard preprocessors with possible extra C sources: c2hs, hsc2hs.
-knownExtrasHandlers :: [ PreProcessorExtras ]
-knownExtrasHandlers = [ ppC2hsExtras, ppHsc2hsExtras ]
+knownExtrasHandlers :: [ WrappedPreProcessorExtras ]
+knownExtrasHandlers = [ WrapPPE ppC2hsExtras, WrapPPE ppHsc2hsExtras ]
-- | Find any extra C sources generated by preprocessing that need to
-- be added to the component (addresses issue #238).
@@ -732,7 +738,7 @@ preprocessExtras verbosity comp lbi = case comp of
pp :: FilePath -> IO [FilePath]
pp dir = (map (dir </>) . filter not_sub . concat)
<$> for knownExtrasHandlers
- (withLexicalCallStack (\f -> f dir))
+ (withLexicalCallStack (\f -> f dir) . unWrapPPE)
-- TODO: This is a terrible hack to work around #3545 while we don't
-- reorganize the directory layout. Basically, for the main
-- library, we might accidentally pick up autogenerated sources for
diff --git a/src/Data/Decimal.hs b/src/Data/Decimal.hs
index 340b49b..5fbd685 100644
--- a/src/Data/Decimal.hs
+++ b/src/Data/Decimal.hs
@@ -66,8 +66,8 @@ import Text.ParserCombinators.ReadP
-- will return \"1.500\". Conversely the "Read" instance will use the decimal
-- places to determine the precision.
data DecimalRaw i = Decimal {
- decimalPlaces :: ! Word8,
- decimalMantissa :: ! i}
+ decimalPlaces :: !Word8,
+ decimalMantissa :: !i}
deriving (Typeable)
diff --git a/src/Graphics/Text/TrueType/CharacterMap.hs b/src/Graphics/Text/TrueType/CharacterMap.hs
index 2663806..75d2655 100644
--- a/src/Graphics/Text/TrueType/CharacterMap.hs
+++ b/src/Graphics/Text/TrueType/CharacterMap.hs
@@ -102,7 +102,7 @@ instance NFData CharacterMaps where
rnf (CharacterMaps maps) = rnf maps `seq` ()
instance Binary CharacterMaps where
- put _ = fail "Unimplemented"
+ put _ = error "Unimplemented"
get = do
startIndex <- bytesRead
versionNumber <- getWord16be
@@ -192,7 +192,7 @@ instance Ord CharacterTable where
compare _ _ = GT
instance Binary CharacterTable where
- put _ = fail "Binary.put CharacterTable - Unimplemented"
+ put _ = error "Binary.put CharacterTable - Unimplemented"
get = do
format <- getWord16be
case format of
@@ -305,7 +305,7 @@ instance CharMappeable Format0 where
langIdOfCharMap = _format0Language
instance Binary Format0 where
- put _ = fail "Binary.Format0.put - unimplemented"
+ put _ = error "Binary.Format0.put - unimplemented"
get = do
tableSize <- getWord16be
when (tableSize /= 262) $
@@ -347,7 +347,7 @@ instance Binary Format2SubHeader where
instance Binary Format2 where
- put _ = fail "Format2.put - unimplemented"
+ put _ = error "Format2.put - unimplemented"
get = do
_tableSize <- getWord16be
lang <- getWord16be
@@ -376,7 +376,7 @@ instance CharMappeable Format6 where
langIdOfCharMap = _format6Language
instance Binary Format6 where
- put _ = fail "Format6.put - unimplemented"
+ put _ = error "Format6.put - unimplemented"
get = do
_length <- getWord16be
language <- getWord16be
diff --git a/src/Graphics/Text/TrueType/Glyph.hs b/src/Graphics/Text/TrueType/Glyph.hs
index 5209222..aa0c500 100644
--- a/src/Graphics/Text/TrueType/Glyph.hs
+++ b/src/Graphics/Text/TrueType/Glyph.hs
@@ -313,7 +313,7 @@ getSimpleOutline counterCount = do
where breaker array ix = VU.splitAt (fromIntegral ix + 1) array
instance Binary Glyph where
- put _ = fail "Glyph.put - unimplemented"
+ put _ = error "Glyph.put - unimplemented"
get = do
hdr <- get
case _glfNumberOfContours hdr of
diff --git a/src/Graphics/Text/TrueType/Header.hs b/src/Graphics/Text/TrueType/Header.hs
index 2c425e5..abd6589 100644
--- a/src/Graphics/Text/TrueType/Header.hs
+++ b/src/Graphics/Text/TrueType/Header.hs
@@ -100,7 +100,7 @@ instance NFData FontHeader where
rnf (FontHeader {}) = ()
instance Binary FontHeader where
- put _ = fail "Unimplemented"
+ put _ = error "Unimplemented"
get =
FontHeader <$> get <*> get <*> g32 <*> g32 <*> get
<*> g16 <*> g64 <*> g64 <*> get <*> get
diff --git a/src/Graphics/Text/TrueType/MaxpTable.hs b/src/Graphics/Text/TrueType/MaxpTable.hs
index a0508c6..29c773c 100644
--- a/src/Graphics/Text/TrueType/MaxpTable.hs
+++ b/src/Graphics/Text/TrueType/MaxpTable.hs
@@ -50,7 +50,7 @@ instance NFData MaxpTable where
rnf (MaxpTable {}) = ()
instance Binary MaxpTable where
- put _ = fail "Unimplemented"
+ put _ = error "Unimplemented"
get = MaxpTable
<$> get <*> g16 <*> g16 <*> g16 <*> g16 <*> g16
<*> g16 <*> g16 <*> g16 <*> g16 <*> g16 <*> g16
diff --git a/src/Graphics/Text/TrueType/Name.hs b/src/Graphics/Text/TrueType/Name.hs
index 8c23605..c05a55f 100644
--- a/src/Graphics/Text/TrueType/Name.hs
+++ b/src/Graphics/Text/TrueType/Name.hs
@@ -35,7 +35,7 @@ instance NFData NameTable where
rnf (NameTable {}) = ()
instance Binary NameTable where
- put _ = fail "Binary.put NameTable - unimplemented"
+ put _ = error "Binary.put NameTable - unimplemented"
get = do
nameFormatId <- getWord16be
when (nameFormatId /= 0) $
diff --git a/Network/TCP.hs b/Network/TCP.hs
index 6f20319..ce01117 100644
--- a/Network/TCP.hs
+++ b/Network/TCP.hs
@@ -89,7 +89,7 @@ instance Eq EndPoint where
map toLower host1 == map toLower host2 && port1 == port2
data Conn a
- = MkConn { connSock :: ! Socket
+ = MkConn { connSock :: !Socket
, connHandle :: Handle
, connBuffer :: BufferOp a
, connInput :: Maybe a
diff --git a/System/Unix/Chroot.hs b/System/Unix/Chroot.hs
index 06bf3dd..12de580 100644
--- a/System/Unix/Chroot.hs
+++ b/System/Unix/Chroot.hs
@@ -1,4 +1,4 @@
-{-# LANGUAGE ForeignFunctionInterface #-}
+{-# LANGUAGE CPP, ForeignFunctionInterface #-}
-- | This module, except for useEnv, is copied from the build-env package.
module System.Unix.Chroot
( fchroot
@@ -45,7 +45,11 @@ chroot fp = withCString fp $ \cfp -> throwErrnoIfMinus1_ "chroot" (c_chroot cfp)
fchroot :: (MonadIO m, MonadMask m) => FilePath -> m a -> m a
fchroot path action =
do origWd <- liftIO $ getWorkingDirectory
- rootFd <- liftIO $ openFd "/" ReadOnly Nothing defaultFileFlags
+ rootFd <- liftIO $ openFd "/" ReadOnly
+#if !(MIN_VERSION_unix(2,8,0))
+ Nothing
+#endif
+ defaultFileFlags
liftIO $ chroot path
liftIO $ changeWorkingDirectory "/"
action `finally` (liftIO $ breakFree origWd rootFd)
diff --git a/src/FRP/Yampa/Event.hs b/src/FRP/Yampa/Event.hs
index 804fe46..0639f3f 100644
--- a/src/FRP/Yampa/Event.hs
+++ b/src/FRP/Yampa/Event.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-}
-----------------------------------------------------------------------------------------
-- |
@@ -90,6 +91,7 @@ module FRP.Yampa.Event where
import Control.Applicative
import Control.DeepSeq (NFData(..))
+import qualified Control.Monad.Fail as Fail
import FRP.Yampa.Diagnostics
@@ -175,9 +177,14 @@ instance Monad Event where
-- | See 'pure'.
return = pure
+#if !(MIN_VERSION_base(4,13,0))
-- | Fail with 'NoEvent'.
- fail _ = NoEvent
+ fail = Fail.fail
+#endif
+instance Fail.MonadFail Event where
+ -- | Fail with 'NoEvent'.
+ fail _ = NoEvent
-- | Alternative instance
instance Alternative Event where
diff --git a/src/Data/Active.hs b/src/Data/Active.hs
index c5e1b3f..4db447b 100644
--- a/src/Data/Active.hs
+++ b/src/Data/Active.hs
@@ -162,31 +162,6 @@ import Linear.Affine
-- Time
------------------------------------------------------------
--- | An abstract type for representing /points in time/. Note that
--- literal numeric values may be used as @Time@s, thanks to the the
--- 'Num' and 'Fractional' instances.
-newtype Time n = Time { unTime :: n }
- deriving (Eq, Ord, Show, Read, Enum, Num, Fractional, Real, RealFrac, Functor)
-
-makeWrapped ''Time
-
--- | A convenient wrapper function to convert a numeric value into a time.
-toTime :: n -> Time n
-toTime = Time
-
--- | A convenient unwrapper function to turn a time into a numeric value.
-fromTime :: Time n -> n
-fromTime = unTime
-
-instance Affine Time where
- type Diff Time = Duration
- (Time t1) .-. (Time t2) = Duration (t1 - t2)
- (Time t) .+^ (Duration d) = Time (t + d)
-
--- instance Deadline Time a where
--- -- choose tm deadline (if before / at deadline) (if after deadline)
--- choose t1 t2 a b = if t1 <= t2 then a else b
-
-- | An abstract type representing /elapsed time/ between two points
-- in time. Note that durations can be negative. Literal numeric
-- values may be used as @Duration@s thanks to the 'Num' and
@@ -218,6 +193,31 @@ instance Num n => Monoid (Duration n) where
mappend = (<>)
mempty = 0
+-- | An abstract type for representing /points in time/. Note that
+-- literal numeric values may be used as @Time@s, thanks to the the
+-- 'Num' and 'Fractional' instances.
+newtype Time n = Time { unTime :: n }
+ deriving (Eq, Ord, Show, Read, Enum, Num, Fractional, Real, RealFrac, Functor)
+
+makeWrapped ''Time
+
+-- | A convenient wrapper function to convert a numeric value into a time.
+toTime :: n -> Time n
+toTime = Time
+
+-- | A convenient unwrapper function to turn a time into a numeric value.
+fromTime :: Time n -> n
+fromTime = unTime
+
+instance Affine Time where
+ type Diff Time = Duration
+ (Time t1) .-. (Time t2) = Duration (t1 - t2)
+ (Time t) .+^ (Duration d) = Time (t + d)
+
+-- instance Deadline Time a where
+-- -- choose tm deadline (if before / at deadline) (if after deadline)
+-- choose t1 t2 a b = if t1 <= t2 then a else b
+
-- | An @Era@ is a concrete span of time, that is, a pair of times
-- representing the start and end of the era. @Era@s form a
-- semigroup: the combination of two @Era@s is the smallest @Era@
diff --git a/Simulation/Aivika/Agent.hs b/Simulation/Aivika/Agent.hs
index 7eae961..01ff777 100644
--- a/Simulation/Aivika/Agent.hs
+++ b/Simulation/Aivika/Agent.hs
@@ -211,7 +211,7 @@ selectState st =
"Use the setStateTransition function to define " ++
"the transition state: activateState."
ProcessingMode ->
- do x0 @ (Just st0) <- readIORef (agentStateRef agent)
+ do x0@(Just st0) <- readIORef (agentStateRef agent)
invokeEvent p $ traversePath x0 st
-- | Set the activation computation for the specified state.
diff --git a/Simulation/Aivika/DoubleLinkedList.hs b/Simulation/Aivika/DoubleLinkedList.hs
index 66322c8..47a4a6f 100644
--- a/Simulation/Aivika/DoubleLinkedList.hs
+++ b/Simulation/Aivika/DoubleLinkedList.hs
@@ -201,10 +201,10 @@ listRemoveBy x p = readIORef (listHead x) >>= loop
(Nothing, Nothing) ->
do writeIORef (listHead x) Nothing
writeIORef (listTail x) Nothing
- (Nothing, head' @ (Just item')) ->
+ (Nothing, head'@(Just item')) ->
do writeIORef (itemPrev item') Nothing
writeIORef (listHead x) head'
- (tail' @ (Just item'), Nothing) ->
+ (tail'@(Just item'), Nothing) ->
do writeIORef (itemNext item') Nothing
writeIORef (listTail x) tail'
(Just prev', Just next') ->
diff --git a/Simulation/Aivika/Internal/Cont.hs b/Simulation/Aivika/Internal/Cont.hs
index 379c2dc..f7ce5f9 100644
--- a/Simulation/Aivika/Internal/Cont.hs
+++ b/Simulation/Aivika/Internal/Cont.hs
@@ -777,7 +777,7 @@ freezeContReentering c a m =
writeIORef rc Nothing
case c of
Nothing -> return Nothing
- z @ (Just c) ->
+ z@(Just c) ->
do f <- invokeEvent p $
contPreemptionBegun $
contId $ contAux c
diff --git a/Simulation/Aivika/Stream.hs b/Simulation/Aivika/Stream.hs
index 74d126c..7daa04c 100644
--- a/Simulation/Aivika/Stream.hs
+++ b/Simulation/Aivika/Stream.hs
@@ -289,7 +289,7 @@ rightStream (Cons s) = Cons y where
-- | Replace the 'Left' values.
replaceLeftStream :: Stream (Either a b) -> Stream c -> Stream (Either c b)
-replaceLeftStream (Cons sab) (ys0 @ ~(Cons sc)) = Cons z where
+replaceLeftStream (Cons sab) (ys0@(~(Cons sc))) = Cons z where
z = do (a, xs) <- sab
case a of
Left _ ->
@@ -300,7 +300,7 @@ replaceLeftStream (Cons sab) (ys0 @ ~(Cons sc)) = Cons z where
-- | Replace the 'Right' values.
replaceRightStream :: Stream (Either a b) -> Stream c -> Stream (Either a c)
-replaceRightStream (Cons sab) (ys0 @ ~(Cons sc)) = Cons z where
+replaceRightStream (Cons sab) (ys0@(~(Cons sc))) = Cons z where
z = do (a, xs) <- sab
case a of
Right _ ->
diff --git a/Simulation/Aivika/Trans/DoubleLinkedList.hs b/Simulation/Aivika/Trans/DoubleLinkedList.hs
index 0dbb4b1..4c530a4 100644
--- a/Simulation/Aivika/Trans/DoubleLinkedList.hs
+++ b/Simulation/Aivika/Trans/DoubleLinkedList.hs
@@ -211,10 +211,10 @@ listRemoveBy x p = readRef (listHead x) >>= loop
(Nothing, Nothing) ->
do writeRef (listHead x) Nothing
writeRef (listTail x) Nothing
- (Nothing, head' @ (Just item')) ->
+ (Nothing, head'@(Just item')) ->
do writeRef (itemPrev item') Nothing
writeRef (listHead x) head'
- (tail' @ (Just item'), Nothing) ->
+ (tail'@(Just item'), Nothing) ->
do writeRef (itemNext item') Nothing
writeRef (listTail x) tail'
(Just prev', Just next') ->
diff --git a/Simulation/Aivika/Trans/Internal/Cont.hs b/Simulation/Aivika/Trans/Internal/Cont.hs
index 26736d3..d241f12 100644
--- a/Simulation/Aivika/Trans/Internal/Cont.hs
+++ b/Simulation/Aivika/Trans/Internal/Cont.hs
@@ -821,7 +821,7 @@ freezeContReentering c a m =
invokeEvent p $ writeRef rc Nothing
case c of
Nothing -> return Nothing
- z @ (Just c) ->
+ z@(Just c) ->
do f <- invokeEvent p $
contPreemptionBegun $
contId $ contAux c
diff --git a/Simulation/Aivika/Trans/Stream.hs b/Simulation/Aivika/Trans/Stream.hs
index 1cc5017..82af37c 100644
--- a/Simulation/Aivika/Trans/Stream.hs
+++ b/Simulation/Aivika/Trans/Stream.hs
@@ -323,7 +323,7 @@ rightStream (Cons s) = Cons y where
-- | Replace the 'Left' values.
replaceLeftStream :: MonadDES m => Stream m (Either a b) -> Stream m c -> Stream m (Either c b)
{-# INLINABLE replaceLeftStream #-}
-replaceLeftStream (Cons sab) (ys0 @ ~(Cons sc)) = Cons z where
+replaceLeftStream (Cons sab) (ys0@(~(Cons sc))) = Cons z where
z = do (a, xs) <- sab
case a of
Left _ ->
@@ -335,7 +335,7 @@ replaceLeftStream (Cons sab) (ys0 @ ~(Cons sc)) = Cons z where
-- | Replace the 'Right' values.
replaceRightStream :: MonadDES m => Stream m (Either a b) -> Stream m c -> Stream m (Either a c)
{-# INLINABLE replaceRightStream #-}
-replaceRightStream (Cons sab) (ys0 @ ~(Cons sc)) = Cons z where
+replaceRightStream (Cons sab) (ys0@(~(Cons sc))) = Cons z where
z = do (a, xs) <- sab
case a of
Right _ ->
diff --git a/src/DFAMin.hs b/src/DFAMin.hs
index e75c593..e338c8b 100644
--- a/src/DFAMin.hs
+++ b/src/DFAMin.hs
@@ -32,9 +32,9 @@ import Data.List as List
-- end;
minimizeDFA :: Ord a => DFA Int a -> DFA Int a
-minimizeDFA dfa@ DFA { dfa_start_states = starts,
- dfa_states = statemap
- }
+minimizeDFA dfa@DFA { dfa_start_states = starts,
+ dfa_states = statemap
+ }
= DFA { dfa_start_states = starts,
dfa_states = Map.fromList states }
where
diff --git a/src/Data/Barbie/Internal/Constraints.hs b/src/Data/Barbie/Internal/Constraints.hs
index d1da2f2..7087ed2 100644
--- a/src/Data/Barbie/Internal/Constraints.hs
+++ b/src/Data/Barbie/Internal/Constraints.hs
@@ -179,7 +179,7 @@ gbaddDictsDefault
class GAllBC (repbf :: * -> *) where
type GAllB (c :: k -> Constraint) repbf :: Constraint
-class GAllBC repbx => GConstraintsB c (f :: k -> *) repbx repbf repbdf where
+class GAllBC repbx => GConstraintsB c f repbx repbf repbdf where
gbaddDicts :: GAllB c repbx => repbf x -> repbdf x
diff --git a/src/Data/Barbie/Internal/Product.hs b/src/Data/Barbie/Internal/Product.hs
index 31abc4b..6fef250 100644
--- a/src/Data/Barbie/Internal/Product.hs
+++ b/src/Data/Barbie/Internal/Product.hs
@@ -168,7 +168,7 @@ gbuniqDefault x
= toN (gbuniq @f @f @_ @(RepN (b f)) @(RepN (b (f `Product` f))) x)
{-# INLINE gbuniqDefault #-}
-class GProductB (f :: k -> *) (g :: k -> *) repbf repbg repbfg where
+class GProductB f g repbf repbg repbfg where
gbprod :: repbf x -> repbg x -> repbfg x
gbuniq :: (forall a . f a) -> repbf x
diff --git a/System/IO/Posix/MMap.hs b/System/IO/Posix/MMap.hs
index adf71e2..3525c84 100644
--- a/System/IO/Posix/MMap.hs
+++ b/System/IO/Posix/MMap.hs
@@ -1,4 +1,4 @@
-{-# LANGUAGE ForeignFunctionInterface #-}
+{-# LANGUAGE CPP, ForeignFunctionInterface #-}
--------------------------------------------------------------------
-- |
-- Module : System.IO.Posix.MMap
@@ -98,7 +98,11 @@ import System.Posix
--
unsafeMMapFile :: FilePath -> IO ByteString
unsafeMMapFile f = do
- fd <- openFd f ReadOnly Nothing defaultFileFlags
+ fd <- openFd f ReadOnly
+#if !(MIN_VERSION_unix(2,8,0))
+ Nothing
+#endif
+ defaultFileFlags
always (closeFd fd) $ do
stat <- getFdStatus fd
let size = fromIntegral (fileSize stat)
diff --git a/System/IO/Posix/MMap/Lazy.hs b/System/IO/Posix/MMap/Lazy.hs
index 16c9539..ffb7535 100644
--- a/System/IO/Posix/MMap/Lazy.hs
+++ b/System/IO/Posix/MMap/Lazy.hs
@@ -91,7 +91,11 @@ import System.Posix
--
unsafeMMapFile :: FilePath -> IO ByteString
unsafeMMapFile path = do
- fd <- openFd path ReadOnly Nothing defaultFileFlags
+ fd <- openFd path ReadOnly
+#if !(MIN_VERSION_unix(2,8,0))
+ Nothing
+#endif
+ defaultFileFlags
always (closeFd fd) $ do
stat <- getFdStatus fd
let size = fromIntegral (fileSize stat)
diff --git a/Codec/Compression/BZip/Stream.hsc b/Codec/Compression/BZip/Stream.hsc
index 5a8c785..39913da 100644
--- a/Codec/Compression/BZip/Stream.hsc
+++ b/Codec/Compression/BZip/Stream.hsc
@@ -70,10 +70,12 @@ import Data.ByteString.Internal (nullForeignPtr)
import System.IO.Unsafe (unsafeInterleaveIO)
import System.IO (hPutStrLn, stderr)
import Control.Applicative (Applicative(..))
-import Control.Monad (liftM, ap)
+import Control.Monad (liftM, ap, fail)
+import Control.Monad.Fail (MonadFail)
+import qualified Control.Monad.Fail as MonadFail
import Control.Exception (assert)
-import Prelude hiding (length)
+import Prelude hiding (fail, length)
#include "bzlib.h"
@@ -233,6 +235,11 @@ instance Monad Stream where
-- m >>= f = (m `thenZ` \a -> consistencyCheck `thenZ_` returnZ a) `thenZ` f
(>>) = thenZ_
return = returnZ
+#if !(MIN_VERSION_base(4,13,0))
+ fail = MonadFail.fail
+#endif
+
+instance MonadFail Stream where
fail = (finalise >>) . failZ
returnZ :: a -> Stream a
diff --git a/bzlib.cabal b/bzlib.cabal
index 362577d..0557026 100644
--- a/bzlib.cabal
+++ b/bzlib.cabal
@@ -30,7 +30,7 @@ library
Codec.Compression.BZip.Internal
other-modules: Codec.Compression.BZip.Stream
extensions: CPP, ForeignFunctionInterface
- build-depends: base >= 3 && < 5,
+ build-depends: base >= 4.9 && < 5,
bytestring == 0.9.* || == 0.10.*
includes: bzlib.h
ghc-options: -Wall
--
2.24.0
diff --git a/Math/Combinat/Groups/Braid.hs b/Math/Combinat/Groups/Braid.hs
index 3760cac..7030ea2 100644
--- a/Math/Combinat/Groups/Braid.hs
+++ b/Math/Combinat/Groups/Braid.hs
@@ -282,7 +282,7 @@ isPureBraid braid = (braidPermutation braid == P.identity n) where
-- we got the two-line notation of the permutation.
--
braidPermutation :: KnownNat n => Braid n -> Permutation
-braidPermutation braid@ (Braid gens) = perm where
+braidPermutation braid@(Braid gens) = perm where
n = numberOfStrands braid
perm = _braidPermutation n (map brGenIdx gens)
diff --git a/Math/Combinat/Numbers/Primes.hs b/Math/Combinat/Numbers/Primes.hs
index 6cf837f..0122d6d 100644
--- a/Math/Combinat/Numbers/Primes.hs
+++ b/Math/Combinat/Numbers/Primes.hs
@@ -54,10 +54,10 @@ primesTMWE = 2:3:5:7: gaps 11 wheel (fold3t $ roll 11 wheel primes') where
pairs ((x:xs):ys:t) = (x : union xs ys) : pairs t
wheel = 2:4:2:4:6:2:6:4:2:4:6:6:2:6:4:2:6:4:6:8:4:2:4:2:
4:8:6:4:6:2:4:6:2:6:6:4:2:4:6:2:6:4:2:4:2:10:2:10:wheel
- gaps k ws@(w:t) cs@ ~(c:u)
+ gaps k ws@(w:t) cs@(~(c:u))
| k==c = gaps (k+w) t u
| True = k : gaps (k+w) t cs
- roll k ws@(w:t) ps@ ~(p:u)
+ roll k ws@(w:t) ps@(~(p:u))
| k==p = scanl (\c d->c+p*d) (p*p) ws : roll (k+w) t u
| True = roll (k+w) t ps
diff --git a/Math/Combinat/Tableaux/LittlewoodRichardson.hs b/Math/Combinat/Tableaux/LittlewoodRichardson.hs
index a6a58e3..3e4229f 100644
--- a/Math/Combinat/Tableaux/LittlewoodRichardson.hs
+++ b/Math/Combinat/Tableaux/LittlewoodRichardson.hs
@@ -212,7 +212,7 @@ lrScalar :: SkewPartition -> SkewPartition -> Int
lrScalar lambdaMu alphaBeta = _lrScalar (fromSkewPartition lambdaMu) (fromSkewPartition alphaBeta)
_lrScalar :: (Partition,Partition) -> (Partition,Partition) -> Int
-_lrScalar (plam @(Partition lam ) , pmu @(Partition mu0) )
+_lrScalar (plam@(Partition lam ) , pmu@(Partition mu0) )
(palpha@(Partition alpha) , pbeta@(Partition beta)) =
if not (pmu `isSubPartitionOf` plam )
|| not (pbeta `isSubPartitionOf` palpha)