Skip to content
Commits on Source (102)
# Questions about this file?
# See https://gitlab.haskell.org/ghc/ghc/wikis/continuous-integration/usage.
version: 2
aliases:
- &defaults
working_directory: ~/ghc
- &prepare
run:
name: prepare-system
command: .circleci/prepare-system.sh
- &submodules
run:
name: submodules
command: .circleci/fetch-submodules.sh
- &buildenv
# ideally we would simply set THREADS here instead of re-detecting it every
# time we need it below. Unfortunately, there is no way to set an environment
# variable with the result of a shell script.
SKIP_PERF_TESTS: NO
VERBOSE: 2
- &boot
run:
name: Boot
command: |
./boot
- &set_git_identity
run:
name: Set Git Identity
command: |
git config user.email "ghc-circleci@haskell.org"
git config user.name "GHC CircleCI"
- &configure_unix
run:
name: Configure
command: ./configure
- &configure_unix_32
run:
name: Configure
command: |
setarch i386 ./configure --with-ghc=/opt/ghc-i386/8.4.2/bin/ghc
- &configure_bsd
run:
name: Configure
command: ./configure --target=x86_64-unknown-freebsd10
- &configure_unreg
run:
name: Configure
command: ./configure --enable-unregisterised
- &make
run:
name: Build
command: "make -j`mk/detect-cpu-count.sh` V=0"
- &build_hadrian
run:
name: Build GHC using Hadrian
command: |
cabal update
hadrian/build.sh -j`mk/detect-cpu-count.sh`
- &test
run:
name: Test
command: |
mkdir -p test-results
METRICS_FILE=$(mktemp)
echo "export METRICS_FILE=$METRICS_FILE" >> $BASH_ENV
make test THREADS=`mk/detect-cpu-count.sh` SKIP_PERF_TESTS=$SKIP_PERF_TESTS TEST_ENV=$TEST_ENV JUNIT_FILE=../../test-results/junit.xml METRICS_FILE=$METRICS_FILE
- &store_test_results
store_test_results:
path: test-results
- &push_perf_note
run:
name: Push Performance Git Notes
command: .circleci/push-test-metrics.sh
- &store_test_artifacts
store_artifacts:
# we might want to add the whole output of the test phase
# too at some point
path: test-results/junit.xml
- &slowtest
run:
name: Full Test
command: |
mkdir -p test-results
make slowtest SKIP_PERF_TESTS=YES THREADS=`mk/detect-cpu-count.sh` JUNIT_FILE=../../test-results/junit.xml
- &bindist
run:
name: Create bindist
# Reduce compression effort to 3
command: make binary-dist TAR_COMP_OPTS="-2" && mv ghc*.tar.xz ghc.tar.xz
# Building bindist takes ~15 minutes without output, account for
# that.
no_output_timeout: "30m"
- &store_bindist
store_artifacts:
path: ghc.tar.xz
- &only_release_tags
tags:
only: /^ghc-.*/
- &ignore_gitlab_branches
branches:
ignore: /^gitlab\/.*/
jobs:
"validate-x86_64-freebsd":
docker:
- image: ghcci/x86_64-freebsd
environment:
TARGET: FreeBSD
<<: *buildenv
GHC_COLLECTOR_FLAVOR: x86_64-freebsd
TEST_ENV: x86_64-freebsd
steps:
- checkout
- *set_git_identity
- *prepare
- *submodules
- *boot
- *configure_bsd
- *make
- *bindist
- *store_bindist
- *test
- *store_test_results
- *store_test_artifacts
- *push_perf_note
"validate-x86_64-darwin":
macos:
xcode: "9.0"
environment:
# Disable sphinx PDF output as MacTeX apparently doesn't provide xelatex
BUILD_SPHINX_PDF: "NO"
MACOSX_DEPLOYMENT_TARGET: "10.7"
# Only Sierra and onwards supports clock_gettime. See #12858
ac_cv_func_clock_gettime: "no"
GHC_COLLECTOR_FLAVOR: x86_64-darwin
<<: *buildenv
TEST_ENV: x86_64-darwin
steps:
- checkout
- *set_git_identity
- *prepare
- *submodules
- *boot
- *configure_unix
- *make
- *bindist
- *store_bindist
- *test
- *store_test_results
- *store_test_artifacts
- *push_perf_note
"validate-x86_64-linux-deb8":
docker:
- image: ghcci/x86_64-linux-deb8:0.1
environment:
<<: *buildenv
GHC_COLLECTOR_FLAVOR: x86_64-linux
TEST_ENV: x86_64-linux-deb8
steps:
- checkout
- *set_git_identity
- *prepare
- *submodules
- *boot
- *configure_unix
- *make
- *bindist
- *store_bindist
- *test
- *store_test_results
- *store_test_artifacts
- *push_perf_note
"validate-x86_64-linux-deb9-integer-simple":
docker:
- image: ghcci/x86_64-linux-deb9:0.2
environment:
<<: *buildenv
INTEGER_LIBRARY: integer-simple
GHC_COLLECTOR_FLAVOR: x86_64-linux-deb9
steps:
- checkout
- *set_git_identity
- *prepare
- *submodules
- *boot
- *configure_unix
- *make
- *bindist
- *store_bindist
- *test
- *store_test_results
- *store_test_artifacts
- *push_perf_note
"validate-x86_64-linux-deb9":
docker:
- image: ghcci/x86_64-linux-deb9:0.1
environment:
<<: *buildenv
GHC_COLLECTOR_FLAVOR: x86_64-linux-deb9
TEST_ENV: x86_64-linux-deb9
steps:
- checkout
- *set_git_identity
- *prepare
- *submodules
- *boot
- *configure_unix
- *make
- *bindist
- *store_bindist
- *test
- *store_test_results
- *store_test_artifacts
- *push_perf_note
"validate-hadrian-x86_64-linux-deb8":
docker:
- image: ghcci/x86_64-linux-deb8:0.1
environment:
<<: *buildenv
steps:
- checkout
- *set_git_identity
- *prepare
- *submodules
- *boot
- *configure_unix
- *build_hadrian
"validate-x86_64-linux-deb8-unreg":
docker:
- image: ghcci/x86_64-linux-deb8:0.1
environment:
<<: *buildenv
TEST_ENV: x86_64-linux-deb8-unreg
steps:
- checkout
- *set_git_identity
- *prepare
- *submodules
- *boot
- *configure_unreg
- *make
- *test
- *store_test_results
- *push_perf_note
- *store_test_artifacts
"validate-x86_64-linux-deb9-llvm":
docker:
- image: ghcci/x86_64-linux-deb9:0.2
environment:
<<: *buildenv
BUILD_FLAVOUR: perf-llvm
TEST_ENV: x86_64-linux-deb9-llvm
steps:
- run:
name: Verify that llc works
command: llc
- checkout
- *set_git_identity
- *prepare
- *submodules
- *boot
- *configure_unix
- *make
- *test
- *store_test_results
- *store_test_artifacts
- *push_perf_note
# Nightly build with -DDEBUG using devel2 flavour
"validate-x86_64-linux-deb8-debug":
docker:
- image: ghcci/x86_64-linux-deb8:0.1
environment:
BUILD_FLAVOUR: devel2
<<: *buildenv
TEST_ENV: x86_64-linux-deb8-debug
SKIP_PERF_TESTS: YES
steps:
- checkout
- *set_git_identity
- *prepare
- *submodules
- *boot
- *configure_unix
- *make
- *test
- *store_test_results
- *store_test_artifacts
- *push_perf_note
"validate-i386-linux-deb9":
docker:
- image: ghcci/i386-linux-deb9:0.2
environment:
<<: *buildenv
GHC_COLLECTOR_FLAVOR: i386-linux-deb9
TEST_ENV: i386-linux-deb9
steps:
- checkout
- *set_git_identity
- *prepare
- *submodules
- *boot
- *configure_unix_32
- *make
- *bindist
- *store_bindist
- *test
- *store_test_results
- *store_test_artifacts
- *push_perf_note
"validate-i386-linux-deb8":
docker:
- image: ghcci/i386-linux-deb8:0.1
environment:
<<: *buildenv
GHC_COLLECTOR_FLAVOR: i386-linux
TEST_ENV: i386-linux-deb8
steps:
- checkout
- *set_git_identity
- *prepare
- *submodules
- *boot
- *configure_unix_32
- *make
- *bindist
- *store_bindist
- *test
- *store_test_results
- *store_test_artifacts
- *push_perf_note
"validate-x86_64-linux-centos7":
docker:
- image: ghcci/x86_64-linux-centos7:0.1
environment:
<<: *buildenv
GHC_COLLECTOR_FLAVOR: x86_64-centos7
LANG: en_US.UTF-8
TEST_ENV: x86_64-centos7
# Sphinx is too old
BUILD_SPHINX_PDF: NO
steps:
- checkout
- *set_git_identity
- *prepare
- *submodules
- *boot
- *configure_unix
- *make
- *bindist
- *store_bindist
- *test
- *store_test_results
- *store_test_artifacts
- *push_perf_note
"validate-x86_64-linux-fedora27":
docker:
- image: ghcci/x86_64-linux-fedora27:0.1
environment:
<<: *buildenv
GHC_COLLECTOR_FLAVOR: x86_64-fedora
TEST_ENV: x86_64-linux-fedora27
steps:
- checkout
- *set_git_identity
- *prepare
- *submodules
- *boot
- *configure_unix
- *make
- *bindist
- *store_bindist
- *test
- *store_test_results
- *store_test_artifacts
- *push_perf_note
"slow-validate-x86_64-linux-deb8":
docker:
- image: ghcci/x86_64-linux-deb8:0.1
environment:
<<: *buildenv
GHC_COLLECTOR_FLAVOR: x86_64-linux-deb8
steps:
- checkout
- *prepare
- *submodules
- *boot
- *configure_unix
- *make
- *slowtest
- *store_test_results
- *store_test_artifacts
- *push_perf_note
workflows:
version: 2
validate:
jobs:
- validate-x86_64-linux-deb8:
filters:
<<: [*only_release_tags, *ignore_gitlab_branches]
# FreeBSD disabled: https://github.com/haskell/unix/issues/102
# - validate-x86_64-freebsd
- validate-x86_64-darwin:
filters:
<<: [*only_release_tags, *ignore_gitlab_branches]
- validate-x86_64-linux-deb8-llvm:
filters:
<<: *ignore_gitlab_branches
- validate-i386-linux-deb8:
filters:
<<: [*only_release_tags, *ignore_gitlab_branches]
- validate-x86_64-linux-deb9:
filters:
<<: [*only_release_tags, *ignore_gitlab_branches]
- validate-i386-linux-deb9:
filters:
<<: [*only_release_tags, *ignore_gitlab_branches]
- validate-x86_64-linux-centos7:
filters:
<<: [*only_release_tags, *ignore_gitlab_branches]
- validate-hadrian-x86_64-linux-deb8:
filters:
<<: *ignore_gitlab_branches
- validate-x86_64-linux-fedora27:
filters:
<<: [*only_release_tags, *ignore_gitlab_branches]
nightly:
triggers:
- schedule:
cron: "0 0 * * *"
filters:
branches:
only:
- master
jobs:
- validate-x86_64-linux-deb8-unreg
- validate-x86_64-linux-deb8-llvm
- validate-x86_64-linux-deb8-debug
- validate-x86_64-linux-deb9
- validate-x86_64-linux-deb9-integer-simple
- slow-validate-x86_64-linux-deb8
notify:
webhooks:
- url: https://phabricator.haskell.org/harbormaster/hook/circleci/
#!/usr/bin/env bash
set -euo pipefail
# Use github.com/ghc for those submodule repositories we couldn't connect to.
git config remote.origin.url git://github.com/ghc/ghc.git
git config --global url."git://github.com/ghc/packages-".insteadOf git://github.com/ghc/packages/
git submodule init # Don't be quiet, we want to show these urls.
git submodule --quiet update --recursive # Now we can be quiet again.
#!/usr/bin/env bash
# vim: sw=2 et
set -euo pipefail
fail() {
echo "ERROR: $*" >&2
exit 1
}
if [ "$CIRCLE_REPOSITORY_URL" != "git@github.com:ghc/ghc.git" ]; then
exit 0
fi
GHC_ORIGIN=git@git.haskell.org:ghc
# Add git.haskell.org as a known host.
echo "|1|F3mPVCE55+KfApNIMYQ3Dv39sGE=|1bRkvJEJhAN2R0LE/lAjFCEJGl0= ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBBUZS9jGBkE5UzpSo6irnIgcQcfzvbuIOsFc8+N61FwtZncRntbaKPuUimOFPgeaUZLl6Iajz6IIs7aduU0/v+I=" >> ~/.ssh/known_hosts
echo "|1|2VUMjYSRVpT2qJPA0rA9ap9xILY=|5OThkI4ED9V0J+Es7D5FOD55Klk= ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC+3TLluLAO4lkW60W+N2DFkS+WoRFGqLwHzgd1ifxG9TIm31wChPY3E/hgMnJmgGqWCF4UDUemmyCycEaL7FtKfzjTAclg9EfpQnozyE3T5hIo2WL7SN5O8ttG/bYGuDnn14jLnWwJyN4oz/znWFiDG9e2Oc9YFNlQ+PK8ae5xR4gqBB7EOoj9J1EiPqG2OXRr5Mei3TLsRDU6fnz/e4oFJpKWWeN6M63oePv0qoaGjxcrATZUWsuWrxVMmYo9kP1xRuFJbAUw2m4uVP+793SW1zxySi1HBMtJG+gCDdZZSwYbkV1hassLWBHv1qPttncfX8Zek3Z3VolaTmfWJTo9" >> ~/.ssh/known_hosts
# Check that a git notes dont already exist.
# This is a percausion as we reset refs/notes/perf and we want to avoid data loss.
if [ $(git notes --ref=perf list | wc -l) -ne 0 ]
then
fail "Found an existing git note on HEAD. Expected no git note."
fi
# Assert that the METRICS_FILE exists and can be read.
if [ "$METRICS_FILE" = "" ] || ! [ -r $METRICS_FILE ]
then
fail "Metrics file not found: $METRICS_FILE"
fi
# Reset the git notes and append the metrics file to the notes, then push and return the result.
# This is favoured over a git notes merge as it avoids potential data loss/duplication from the merge strategy.
function reset_append_note_push {
git fetch -f $GHC_ORIGIN refs/notes/perf:refs/notes/perf || true
echo "git notes --ref=perf append -F $METRICS_FILE HEAD"
git notes --ref=perf append -F $METRICS_FILE HEAD
git push $GHC_ORIGIN refs/notes/perf
}
# Push the metrics file as a git note. This may fail if another task pushes a note first. In that case
# the latest note is fetched and appended.
MAX_RETRY=20
until reset_append_note_push || [ $MAX_RETRY -le 0 ]
do
((MAX_RETRY--))
echo ""
echo "Failed to push git notes. Fetching, appending, and retrying... $MAX_RETRY retries left."
done
...@@ -2,7 +2,7 @@ variables: ...@@ -2,7 +2,7 @@ variables:
GIT_SSL_NO_VERIFY: "1" GIT_SSL_NO_VERIFY: "1"
# Commit of ghc/ci-images repository from which to pull Docker images # Commit of ghc/ci-images repository from which to pull Docker images
DOCKER_REV: ac65f31dcffb09cd7ca7aaa70f447fcbb19f427f DOCKER_REV: 88e952f165f48cfb956ac9a2486a9263aa4f777c
# Sequential version number capturing the versions of all tools fetched by # Sequential version number capturing the versions of all tools fetched by
# .gitlab/win32-init.sh. # .gitlab/win32-init.sh.
...@@ -27,6 +27,7 @@ stages: ...@@ -27,6 +27,7 @@ stages:
- hackage # head.hackage testing - hackage # head.hackage testing
- deploy # push documentation - deploy # push documentation
# N.B.Don't run on wip/ branches, instead on run on merge requests.
.only-default: &only-default .only-default: &only-default
only: only:
- master - master
...@@ -70,16 +71,38 @@ ghc-linters: ...@@ -70,16 +71,38 @@ ghc-linters:
refs: refs:
- merge_requests - merge_requests
lint-linters:
<<: *only-default
stage: lint
image: "registry.gitlab.haskell.org/ghc/ci-images/linters:$DOCKER_REV"
script:
- mypy .gitlab/linters/*.py
dependencies: []
tags:
- lint
lint-testsuite:
<<: *only-default
stage: lint
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb9:$DOCKER_REV"
script:
- make -Ctestsuite list_broken TEST_HC=ghc
dependencies: []
tags:
- lint
# We allow the submodule checker to fail when run on merge requests (to # We allow the submodule checker to fail when run on merge requests (to
# accomodate, e.g., haddock changes not yet upstream) but not on `master` or # accomodate, e.g., haddock changes not yet upstream) but not on `master` or
# Marge jobs. # Marge jobs.
.lint-submods: .lint-submods:
<<: *only-default
stage: lint stage: lint
image: "registry.gitlab.haskell.org/ghc/ci-images/linters:$DOCKER_REV" image: "registry.gitlab.haskell.org/ghc/ci-images/linters:$DOCKER_REV"
script: script:
- git fetch "$CI_MERGE_REQUEST_PROJECT_URL" $CI_MERGE_REQUEST_TARGET_BRANCH_NAME - git fetch "$CI_MERGE_REQUEST_PROJECT_URL" $CI_MERGE_REQUEST_TARGET_BRANCH_NAME
- base="$(git merge-base FETCH_HEAD $CI_COMMIT_SHA)" - base="$(git merge-base FETCH_HEAD $CI_COMMIT_SHA)"
- "echo Linting submodule changes between $base..$CI_COMMIT_SHA" - "echo Linting submodule changes between $base..$CI_COMMIT_SHA"
- git submodule foreach git remote update
- submodchecker .git $(git rev-list $base..$CI_COMMIT_SHA) - submodchecker .git $(git rev-list $base..$CI_COMMIT_SHA)
dependencies: [] dependencies: []
tags: tags:
...@@ -109,6 +132,7 @@ lint-submods-branch: ...@@ -109,6 +132,7 @@ lint-submods-branch:
extends: .lint-submods extends: .lint-submods
script: script:
- "echo Linting submodule changes between $CI_COMMIT_BEFORE_SHA..$CI_COMMIT_SHA" - "echo Linting submodule changes between $CI_COMMIT_BEFORE_SHA..$CI_COMMIT_SHA"
- git submodule foreach git remote update
- submodchecker .git $(git rev-list $CI_COMMIT_BEFORE_SHA..$CI_COMMIT_SHA) - submodchecker .git $(git rev-list $CI_COMMIT_BEFORE_SHA..$CI_COMMIT_SHA)
only: only:
refs: refs:
...@@ -116,6 +140,7 @@ lint-submods-branch: ...@@ -116,6 +140,7 @@ lint-submods-branch:
- /ghc-[0-9]+\.[0-9]+/ - /ghc-[0-9]+\.[0-9]+/
.lint-changelogs: .lint-changelogs:
<<: *only-default
stage: lint stage: lint
image: "registry.gitlab.haskell.org/ghc/ci-images/linters:$DOCKER_REV" image: "registry.gitlab.haskell.org/ghc/ci-images/linters:$DOCKER_REV"
dependencies: [] dependencies: []
...@@ -149,11 +174,10 @@ lint-release-changelogs: ...@@ -149,11 +174,10 @@ lint-release-changelogs:
.validate-hadrian: .validate-hadrian:
<<: *only-default <<: *only-default
allow_failure: true
script: script:
- cabal update - cabal update
- git clean -xdf && git submodule foreach git clean -xdf - git clean -xdf && git submodule foreach git clean -xdf
- bash .circleci/prepare-system.sh - .gitlab/prepare-system.sh
- if [[ -d ./cabal-cache ]]; then cp -R ./.cabal-cache ~/.cabal-cache; fi - if [[ -d ./cabal-cache ]]; then cp -R ./.cabal-cache ~/.cabal-cache; fi
- ./boot - ./boot
- ./configure $CONFIGURE_ARGS - ./configure $CONFIGURE_ARGS
...@@ -204,7 +228,7 @@ hadrian-ghc-in-ghci: ...@@ -204,7 +228,7 @@ hadrian-ghc-in-ghci:
- cabal update - cabal update
- cd hadrian; cabal new-build --project-file=ci.project; cd .. - cd hadrian; cabal new-build --project-file=ci.project; cd ..
- git clean -xdf && git submodule foreach git clean -xdf - git clean -xdf && git submodule foreach git clean -xdf
- bash .circleci/prepare-system.sh - .gitlab/prepare-system.sh
- if [[ -d ./cabal-cache ]]; then cp -R ./.cabal-cache ~/.cabal-cache; fi - if [[ -d ./cabal-cache ]]; then cp -R ./.cabal-cache ~/.cabal-cache; fi
- ./boot - ./boot
- ./configure $CONFIGURE_ARGS - ./configure $CONFIGURE_ARGS
...@@ -352,7 +376,7 @@ validate-x86_64-darwin: ...@@ -352,7 +376,7 @@ validate-x86_64-darwin:
echo "EXTRA_HADDOCK_OPTS += --hyperlinked-source --quickjump" >> mk/build.mk echo "EXTRA_HADDOCK_OPTS += --hyperlinked-source --quickjump" >> mk/build.mk
fi fi
- bash .circleci/prepare-system.sh - .gitlab/prepare-system.sh
# workaround for docker permissions # workaround for docker permissions
- sudo chown ghc:ghc -R . - sudo chown ghc:ghc -R .
after_script: after_script:
...@@ -375,6 +399,8 @@ validate-x86_64-darwin: ...@@ -375,6 +399,8 @@ validate-x86_64-darwin:
variables: variables:
TEST_ENV: "aarch64-linux-deb9" TEST_ENV: "aarch64-linux-deb9"
BIN_DIST_PREP_TAR_COMP: "bindistprep/ghc-aarch64-linux-deb9.tar.xz" BIN_DIST_PREP_TAR_COMP: "bindistprep/ghc-aarch64-linux-deb9.tar.xz"
# Inexplicably makeindex fails
BUILD_SPHINX_PDF: "NO"
cache: cache:
key: linux-aarch64-deb9 key: linux-aarch64-deb9
tags: tags:
...@@ -441,7 +467,8 @@ nightly-i386-linux-deb9: ...@@ -441,7 +467,8 @@ nightly-i386-linux-deb9:
cache: cache:
key: linux-x86_64-deb9 key: linux-x86_64-deb9
validate-x86_64-linux-deb9: # Disabled to reduce CI load
.validate-x86_64-linux-deb9:
extends: .build-x86_64-linux-deb9 extends: .build-x86_64-linux-deb9
artifacts: artifacts:
when: always when: always
...@@ -463,14 +490,29 @@ validate-x86_64-linux-deb9-debug: ...@@ -463,14 +490,29 @@ validate-x86_64-linux-deb9-debug:
stage: build stage: build
variables: variables:
BUILD_FLAVOUR: validate BUILD_FLAVOUR: validate
TEST_TYPE: slowtest
TEST_ENV: "x86_64-linux-deb9-debug" TEST_ENV: "x86_64-linux-deb9-debug"
artifacts:
when: always
expire_in: 2 week
# Disabled to alleviate CI load
.validate-x86_64-linux-deb9-llvm:
extends: .build-x86_64-linux-deb9
stage: full-build
variables:
BUILD_FLAVOUR: perf-llvm
TEST_ENV: "x86_64-linux-deb9-llvm"
validate-x86_64-linux-deb9-llvm: nightly-x86_64-linux-deb9-llvm:
extends: .build-x86_64-linux-deb9 extends: .build-x86_64-linux-deb9
stage: full-build stage: full-build
variables: variables:
BUILD_FLAVOUR: perf-llvm BUILD_FLAVOUR: perf-llvm
TEST_ENV: "x86_64-linux-deb9-llvm" TEST_ENV: "x86_64-linux-deb9-llvm"
only:
variables:
- $NIGHTLY
validate-x86_64-linux-deb9-integer-simple: validate-x86_64-linux-deb9-integer-simple:
extends: .build-x86_64-linux-deb9 extends: .build-x86_64-linux-deb9
...@@ -504,11 +546,10 @@ release-x86_64-linux-deb9-dwarf: ...@@ -504,11 +546,10 @@ release-x86_64-linux-deb9-dwarf:
extends: .validate-linux extends: .validate-linux
stage: build stage: build
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb9:$DOCKER_REV" image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-deb9:$DOCKER_REV"
allow_failure: true
variables: variables:
CONFIGURE_ARGS: "--enable-dwarf-unwind" CONFIGURE_ARGS: "--enable-dwarf-unwind"
BUILD_FLAVOUR: dwarf BUILD_FLAVOUR: dwarf
TEST_ENV: "x86_64-linux-deb9" TEST_ENV: "x86_64-linux-deb9-dwarf"
artifacts: artifacts:
when: always when: always
expire_in: 2 week expire_in: 2 week
...@@ -535,6 +576,30 @@ release-x86_64-linux-deb8: ...@@ -535,6 +576,30 @@ release-x86_64-linux-deb8:
when: always when: always
expire_in: 2 week expire_in: 2 week
#################################
# x86_64-linux-alpine
#################################
release-x86_64-linux-alpine:
extends: .validate-linux
stage: full-build
image: "registry.gitlab.haskell.org/ghc/ci-images/x86_64-linux-alpine:$DOCKER_REV"
# There are currently a few failing tests
allow_failure: true
variables:
BUILD_SPHINX_PDF: "NO"
TEST_ENV: "x86_64-linux-alpine"
BIN_DIST_PREP_TAR_COMP: "bindistprep/ghc-x86_64-alpine-linux.tar.xz"
# Can't use ld.gold due to #13958.
CONFIGURE_ARGS: "--disable-ld-override"
only:
- tags
cache:
key: linux-x86_64-alpine
artifacts:
when: always
expire_in: 2 week
################################# #################################
# x86_64-linux-centos7 # x86_64-linux-centos7
################################# #################################
...@@ -776,7 +841,7 @@ doc-tarball: ...@@ -776,7 +841,7 @@ doc-tarball:
- x86_64-linux - x86_64-linux
image: ghcci/x86_64-linux-deb9:0.2 image: ghcci/x86_64-linux-deb9:0.2
dependencies: dependencies:
- validate-x86_64-linux-deb9 - validate-x86_64-linux-deb9-debug
- validate-x86_64-windows - validate-x86_64-windows
variables: variables:
LINUX_BINDIST: "ghc-x86_64-deb9-linux.tar.xz" LINUX_BINDIST: "ghc-x86_64-deb9-linux.tar.xz"
......
...@@ -12,8 +12,10 @@ from linter import run_linters, RegexpLinter ...@@ -12,8 +12,10 @@ from linter import run_linters, RegexpLinter
linters = [ linters = [
RegexpLinter(r'--interactive', RegexpLinter(r'--interactive',
message = "Warning: Use `$(TEST_HC_OPTS_INTERACTIVE)` instead of `--interactive -ignore-dot-ghci -v0`.") message = "Warning: Use `$(TEST_HC_OPTS_INTERACTIVE)` instead of `--interactive -ignore-dot-ghci -v0`."
).add_path_filter(lambda path: path.suffix == '.T')
] ]
if __name__ == '__main__': if __name__ == '__main__':
run_linters(linters, subdir='testsuite') run_linters(linters,
subdir='testsuite')
...@@ -7,10 +7,11 @@ import sys ...@@ -7,10 +7,11 @@ import sys
import re import re
import textwrap import textwrap
import subprocess import subprocess
from typing import List, Optional from pathlib import Path
from typing import List, Optional, Callable, Sequence
from collections import namedtuple from collections import namedtuple
def lint_failure(file, line_no, line_content, message): def lint_failure(file, line_no: int, line_content: str, message: str):
""" Print a lint failure message. """ """ Print a lint failure message. """
wrapper = textwrap.TextWrapper(initial_indent=' ', wrapper = textwrap.TextWrapper(initial_indent=' ',
subsequent_indent=' ') subsequent_indent=' ')
...@@ -29,7 +30,7 @@ def lint_failure(file, line_no, line_content, message): ...@@ -29,7 +30,7 @@ def lint_failure(file, line_no, line_content, message):
print(textwrap.dedent(msg)) print(textwrap.dedent(msg))
def get_changed_files(base_commit, head_commit, def get_changed_files(base_commit: str, head_commit: str,
subdir: str = '.'): subdir: str = '.'):
""" Get the files changed by the given range of commits. """ """ Get the files changed by the given range of commits. """
cmd = ['git', 'diff', '--name-only', cmd = ['git', 'diff', '--name-only',
...@@ -46,12 +47,21 @@ class Linter(object): ...@@ -46,12 +47,21 @@ class Linter(object):
""" """
def __init__(self): def __init__(self):
self.warnings = [] # type: List[Warning] self.warnings = [] # type: List[Warning]
self.path_filters = [] # type: List[Callable[[Path], bool]]
def add_warning(self, w: Warning): def add_warning(self, w: Warning):
self.warnings.append(w) self.warnings.append(w)
def lint(self, path): def add_path_filter(self, f: Callable[[Path], bool]) -> "Linter":
pass self.path_filters.append(f)
return self
def do_lint(self, path: Path):
if all(f(path) for f in self.path_filters):
self.lint(path)
def lint(self, path: Path):
raise NotImplementedError
class LineLinter(Linter): class LineLinter(Linter):
""" """
...@@ -59,32 +69,32 @@ class LineLinter(Linter): ...@@ -59,32 +69,32 @@ class LineLinter(Linter):
the given line from a file and calls :func:`add_warning` for any lint the given line from a file and calls :func:`add_warning` for any lint
issues found. issues found.
""" """
def lint(self, path): def lint(self, path: Path):
if os.path.isfile(path): if path.is_file():
with open(path, 'r') as f: with path.open('r') as f:
for line_no, line in enumerate(f): for line_no, line in enumerate(f):
self.lint_line(path, line_no+1, line) self.lint_line(path, line_no+1, line)
def lint_line(self, path, line_no, line): def lint_line(self, path: Path, line_no: int, line: str):
pass raise NotImplementedError
class RegexpLinter(LineLinter): class RegexpLinter(LineLinter):
""" """
A :class:`RegexpLinter` produces the given warning message for A :class:`RegexpLinter` produces the given warning message for
all lines matching the given regular expression. all lines matching the given regular expression.
""" """
def __init__(self, regex, message): def __init__(self, regex: str, message: str):
LineLinter.__init__(self) LineLinter.__init__(self)
self.re = re.compile(regex) self.re = re.compile(regex)
self.message = message self.message = message
def lint_line(self, path, line_no, line): def lint_line(self, path: Path, line_no: int, line: str):
if self.re.search(line): if self.re.search(line):
w = Warning(path=path, line_no=line_no, line_content=line[:-1], w = Warning(path=path, line_no=line_no, line_content=line[:-1],
message=self.message) message=self.message)
self.add_warning(w) self.add_warning(w)
def run_linters(linters: List[Linter], def run_linters(linters: Sequence[Linter],
subdir: str = '.') -> None: subdir: str = '.') -> None:
import argparse import argparse
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
...@@ -96,7 +106,7 @@ def run_linters(linters: List[Linter], ...@@ -96,7 +106,7 @@ def run_linters(linters: List[Linter],
if path.startswith('.gitlab/linters'): if path.startswith('.gitlab/linters'):
continue continue
for linter in linters: for linter in linters:
linter.lint(path) linter.do_lint(Path(path))
warnings = [warning warnings = [warning
for linter in linters for linter in linters
......
...@@ -86,10 +86,21 @@ read over this page carefully: ...@@ -86,10 +86,21 @@ read over this page carefully:
<https://gitlab.haskell.org/ghc/ghc/wikis/building/using> <https://gitlab.haskell.org/ghc/ghc/wikis/building/using>
A web based code explorer for the GHC source code with semantic analysis
and type information of the GHC sources is available at:
<https://haskell-code-explorer.mfix.io/>
Look for `GHC` in `Package-name`. For example, here is the link to
[GHC-8.6.5](https://haskell-code-explorer.mfix.io/package/ghc-8.6.5).
If you want to watch issues and code review activities, the following page is a good start: If you want to watch issues and code review activities, the following page is a good start:
<https://gitlab.haskell.org/ghc/ghc/activity> <https://gitlab.haskell.org/ghc/ghc/activity>
How to communicate with us How to communicate with us
========================== ==========================
......
...@@ -511,6 +511,7 @@ AC_DEFUN([FP_SETTINGS], ...@@ -511,6 +511,7 @@ AC_DEFUN([FP_SETTINGS],
then then
SettingsCCompilerCommand="$(basename $CC)" SettingsCCompilerCommand="$(basename $CC)"
SettingsCCompilerFlags="$CONF_CC_OPTS_STAGE2" SettingsCCompilerFlags="$CONF_CC_OPTS_STAGE2"
SettingsCxxCompilerFlags="$CONF_CXX_OPTS_STAGE2"
SettingsHaskellCPPCommand="$(basename $HaskellCPPCmd)" SettingsHaskellCPPCommand="$(basename $HaskellCPPCmd)"
SettingsHaskellCPPFlags="$HaskellCPPArgs" SettingsHaskellCPPFlags="$HaskellCPPArgs"
SettingsLdCommand="$(basename $LdCmd)" SettingsLdCommand="$(basename $LdCmd)"
...@@ -564,6 +565,7 @@ AC_DEFUN([FP_SETTINGS], ...@@ -564,6 +565,7 @@ AC_DEFUN([FP_SETTINGS],
SettingsOptCommand="$OptCmd" SettingsOptCommand="$OptCmd"
fi fi
SettingsCCompilerFlags="$CONF_CC_OPTS_STAGE2" SettingsCCompilerFlags="$CONF_CC_OPTS_STAGE2"
SettingsCxxCompilerFlags="$CONF_CXX_OPTS_STAGE2"
SettingsCCompilerLinkFlags="$CONF_GCC_LINKER_OPTS_STAGE2" SettingsCCompilerLinkFlags="$CONF_GCC_LINKER_OPTS_STAGE2"
SettingsCCompilerSupportsNoPie="$CONF_GCC_SUPPORTS_NO_PIE" SettingsCCompilerSupportsNoPie="$CONF_GCC_SUPPORTS_NO_PIE"
SettingsLdFlags="$CONF_LD_LINKER_OPTS_STAGE2" SettingsLdFlags="$CONF_LD_LINKER_OPTS_STAGE2"
...@@ -571,6 +573,7 @@ AC_DEFUN([FP_SETTINGS], ...@@ -571,6 +573,7 @@ AC_DEFUN([FP_SETTINGS],
AC_SUBST(SettingsHaskellCPPCommand) AC_SUBST(SettingsHaskellCPPCommand)
AC_SUBST(SettingsHaskellCPPFlags) AC_SUBST(SettingsHaskellCPPFlags)
AC_SUBST(SettingsCCompilerFlags) AC_SUBST(SettingsCCompilerFlags)
AC_SUBST(SettingsCxxCompilerFlags)
AC_SUBST(SettingsCCompilerLinkFlags) AC_SUBST(SettingsCCompilerLinkFlags)
AC_SUBST(SettingsCCompilerSupportsNoPie) AC_SUBST(SettingsCCompilerSupportsNoPie)
AC_SUBST(SettingsLdCommand) AC_SUBST(SettingsLdCommand)
...@@ -866,7 +869,7 @@ case $TargetPlatform in ...@@ -866,7 +869,7 @@ case $TargetPlatform in
esac ;; esac ;;
i386-unknown-mingw32) fptools_cv_leading_underscore=yes;; i386-unknown-mingw32) fptools_cv_leading_underscore=yes;;
x86_64-unknown-mingw32) fptools_cv_leading_underscore=no;; x86_64-unknown-mingw32) fptools_cv_leading_underscore=no;;
*) AC_RUN_IFELSE([AC_LANG_SOURCE([[#ifdef HAVE_NLIST_H *) AC_RUN_IFELSE([AC_LANG_SOURCE([[#if defined(HAVE_NLIST_H)
#include <nlist.h> #include <nlist.h>
struct nlist xYzzY1[] = {{"xYzzY1", 0},{0}}; struct nlist xYzzY1[] = {{"xYzzY1", 0},{0}};
struct nlist xYzzY2[] = {{"_xYzzY2", 0},{0}}; struct nlist xYzzY2[] = {{"_xYzzY2", 0},{0}};
...@@ -876,7 +879,7 @@ int main(argc, argv) ...@@ -876,7 +879,7 @@ int main(argc, argv)
int argc; int argc;
char **argv; char **argv;
{ {
#ifdef HAVE_NLIST_H #if defined(HAVE_NLIST_H)
if(nlist(argv[0], xYzzY1) == 0 && xYzzY1[0].n_value != 0) if(nlist(argv[0], xYzzY1) == 0 && xYzzY1[0].n_value != 0)
exit(1); exit(1);
if(nlist(argv[0], xYzzY2) == 0 && xYzzY2[0].n_value != 0) if(nlist(argv[0], xYzzY2) == 0 && xYzzY2[0].n_value != 0)
...@@ -1650,16 +1653,16 @@ then ...@@ -1650,16 +1653,16 @@ then
[fptools_cv_timer_create_works], [fptools_cv_timer_create_works],
[AC_TRY_RUN([ [AC_TRY_RUN([
#include <stdio.h> #include <stdio.h>
#ifdef HAVE_STDLIB_H #if defined(HAVE_STDLIB_H)
#include <stdlib.h> #include <stdlib.h>
#endif #endif
#ifdef HAVE_TIME_H #if defined(HAVE_TIME_H)
#include <time.h> #include <time.h>
#endif #endif
#ifdef HAVE_SIGNAL_H #if defined(HAVE_SIGNAL_H)
#include <signal.h> #include <signal.h>
#endif #endif
#ifdef HAVE_UNISTD_H #if defined(HAVE_UNISTD_H)
#include <unistd.h> #include <unistd.h>
#endif #endif
......
...@@ -567,7 +567,7 @@ lambdas if it is not applied to enough arguments; e.g. (#14561) ...@@ -567,7 +567,7 @@ lambdas if it is not applied to enough arguments; e.g. (#14561)
The desugar has special magic to detect such cases: DsExpr.badUseOfLevPolyPrimop. The desugar has special magic to detect such cases: DsExpr.badUseOfLevPolyPrimop.
And we want that magic to apply to levity-polymorphic compulsory-inline things. And we want that magic to apply to levity-polymorphic compulsory-inline things.
The easiest way to do this is for hasNoBinding to return True of all things The easiest way to do this is for hasNoBinding to return True of all things
that have compulsory unfolding. A very Ids with a compulsory unfolding also that have compulsory unfolding. Some Ids with a compulsory unfolding also
have a binding, but it does not harm to say they don't here, and its a very have a binding, but it does not harm to say they don't here, and its a very
simple way to fix #14561. simple way to fix #14561.
......
...@@ -29,6 +29,7 @@ module MkId ( ...@@ -29,6 +29,7 @@ module MkId (
nullAddrId, seqId, lazyId, lazyIdKey, nullAddrId, seqId, lazyId, lazyIdKey,
coercionTokenId, magicDictId, coerceId, coercionTokenId, magicDictId, coerceId,
proxyHashId, noinlineId, noinlineIdName, proxyHashId, noinlineId, noinlineIdName,
coerceName,
-- Re-export error Ids -- Re-export error Ids
module PrelRules module PrelRules
...@@ -71,6 +72,7 @@ import DynFlags ...@@ -71,6 +72,7 @@ import DynFlags
import Outputable import Outputable
import FastString import FastString
import ListSetOps import ListSetOps
import Var (VarBndr(Bndr))
import qualified GHC.LanguageExtensions as LangExt import qualified GHC.LanguageExtensions as LangExt
import Data.Maybe ( maybeToList ) import Data.Maybe ( maybeToList )
...@@ -298,6 +300,24 @@ so the data constructor for T:C had a single argument, namely the ...@@ -298,6 +300,24 @@ so the data constructor for T:C had a single argument, namely the
predicate (C a). But now we treat that as an ordinary argument, not predicate (C a). But now we treat that as an ordinary argument, not
part of the theta-type, so all is well. part of the theta-type, so all is well.
Note [Newtype workers]
~~~~~~~~~~~~~~~~~~~~~~
A newtype does not really have a worker. Instead, newtype constructors
just unfold into a cast. But we need *something* for, say, MkAge to refer
to. So, we do this:
* The Id used as the newtype worker will have a compulsory unfolding to
a cast. See Note [Compulsory newtype unfolding]
* This Id is labeled as a DataConWrapId. We don't want to use a DataConWorkId,
as those have special treatment in the back end.
* There is no top-level binding, because the compulsory unfolding
means that it will be inlined (to a cast) at every call site.
We probably should have a NewtypeWorkId, but these Ids disappear as soon as
we desugar anyway, so it seems a step too far.
Note [Compulsory newtype unfolding] Note [Compulsory newtype unfolding]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Newtype wrappers, just like workers, have compulsory unfoldings. Newtype wrappers, just like workers, have compulsory unfoldings.
...@@ -320,6 +340,32 @@ effect whether a wrapper is present or not: ...@@ -320,6 +340,32 @@ effect whether a wrapper is present or not:
We'd like 'map Age' to match the LHS. For this to happen, Age We'd like 'map Age' to match the LHS. For this to happen, Age
must be unfolded, otherwise we'll be stuck. This is tested in T16208. must be unfolded, otherwise we'll be stuck. This is tested in T16208.
It also allows for the posssibility of levity polymorphic newtypes
with wrappers (with -XUnliftedNewtypes):
newtype N (a :: TYPE r) = MkN a
With -XUnliftedNewtypes, this is allowed -- even though MkN is levity-
polymorphic. It's OK because MkN evaporates in the compiled code, becoming
just a cast. That is, it has a compulsory unfolding. As long as its
argument is not levity-polymorphic (which it can't be, according to
Note [Levity polymorphism invariants] in CoreSyn), and it's saturated,
no levity-polymorphic code ends up in the code generator. The saturation
condition is effectively checked by Note [Detecting forced eta expansion]
in DsExpr.
However, if we make a *wrapper* for a newtype, we get into trouble.
The saturation condition is no longer checked (because hasNoBinding
returns False) and indeed we generate a forbidden levity-polymorphic
binding.
The solution is simple, though: just make the newtype wrappers
as ephemeral as the newtype workers. In other words, give the wrappers
compulsory unfoldings and no bindings. The compulsory unfolding is given
in wrap_unf in mkDataConRep, and the lack of a binding happens in
TidyPgm.getTyConImplicitBinds, where we say that a newtype has no implicit
bindings.
************************************************************************ ************************************************************************
* * * *
\subsection{Dictionary selectors} \subsection{Dictionary selectors}
...@@ -447,6 +493,8 @@ mkDataConWorkId :: Name -> DataCon -> Id ...@@ -447,6 +493,8 @@ mkDataConWorkId :: Name -> DataCon -> Id
mkDataConWorkId wkr_name data_con mkDataConWorkId wkr_name data_con
| isNewTyCon tycon | isNewTyCon tycon
= mkGlobalId (DataConWrapId data_con) wkr_name wkr_ty nt_work_info = mkGlobalId (DataConWrapId data_con) wkr_name wkr_ty nt_work_info
-- See Note [Newtype workers]
| otherwise | otherwise
= mkGlobalId (DataConWorkId data_con) wkr_name wkr_ty alg_wkr_info = mkGlobalId (DataConWorkId data_con) wkr_name wkr_ty alg_wkr_info
...@@ -575,6 +623,7 @@ But if we inline the wrapper, we get ...@@ -575,6 +623,7 @@ But if we inline the wrapper, we get
map (\a. case i of I# i# a -> Foo i# a) (f a) map (\a. case i of I# i# a -> Foo i# a) (f a)
and now case-of-known-constructor eliminates the redundant allocation. and now case-of-known-constructor eliminates the redundant allocation.
-} -}
mkDataConRep :: DynFlags mkDataConRep :: DynFlags
...@@ -604,7 +653,7 @@ mkDataConRep dflags fam_envs wrap_name mb_bangs data_con ...@@ -604,7 +653,7 @@ mkDataConRep dflags fam_envs wrap_name mb_bangs data_con
-- We need to get the CAF info right here because TidyPgm -- We need to get the CAF info right here because TidyPgm
-- does not tidy the IdInfo of implicit bindings (like the wrapper) -- does not tidy the IdInfo of implicit bindings (like the wrapper)
-- so it not make sure that the CAF info is sane -- so it not make sure that the CAF info is sane
`setNeverLevPoly` wrap_ty `setLevityInfoWithType` wrap_ty
wrap_sig = mkClosedStrictSig wrap_arg_dmds (dataConCPR data_con) wrap_sig = mkClosedStrictSig wrap_arg_dmds (dataConCPR data_con)
...@@ -1403,19 +1452,23 @@ coerceId = pcMiscPrelId coerceName ty info ...@@ -1403,19 +1452,23 @@ coerceId = pcMiscPrelId coerceName ty info
where where
info = noCafIdInfo `setInlinePragInfo` alwaysInlinePragma info = noCafIdInfo `setInlinePragInfo` alwaysInlinePragma
`setUnfoldingInfo` mkCompulsoryUnfolding rhs `setUnfoldingInfo` mkCompulsoryUnfolding rhs
`setNeverLevPoly` ty eqRTy = mkTyConApp coercibleTyCon [ tYPE r , a, b ]
eqRTy = mkTyConApp coercibleTyCon [ liftedTypeKind eqRPrimTy = mkTyConApp eqReprPrimTyCon [ tYPE r, tYPE r, a, b ]
, alphaTy, betaTy ] ty = mkForAllTys [ Bndr rv Inferred
eqRPrimTy = mkTyConApp eqReprPrimTyCon [ liftedTypeKind , Bndr av Specified
, liftedTypeKind , Bndr bv Specified
, alphaTy, betaTy ] ] $
ty = mkSpecForAllTys [alphaTyVar, betaTyVar] $ mkInvisFunTy eqRTy $
mkInvisFunTy eqRTy $ mkVisFunTy a b
mkVisFunTy alphaTy betaTy
bndrs@[rv,av,bv] = mkTemplateKiTyVar runtimeRepTy
[eqR,x,eq] = mkTemplateLocals [eqRTy, alphaTy, eqRPrimTy] (\r -> [tYPE r, tYPE r])
rhs = mkLams [alphaTyVar, betaTyVar, eqR, x] $
mkWildCase (Var eqR) eqRTy betaTy $ [r, a, b] = mkTyVarTys bndrs
[eqR,x,eq] = mkTemplateLocals [eqRTy, a, eqRPrimTy]
rhs = mkLams (bndrs ++ [eqR, x]) $
mkWildCase (Var eqR) eqRTy b $
[(DataAlt coercibleDataCon, [eq], Cast (Var x) (mkCoVarCo eq))] [(DataAlt coercibleDataCon, [eq], Cast (Var x) (mkCoVarCo eq))]
{- {-
......
...@@ -4,6 +4,7 @@ ...@@ -4,6 +4,7 @@
-} -}
{-# LANGUAGE CPP #-} {-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE PatternSynonyms #-} {-# LANGUAGE PatternSynonyms #-}
#if !defined(GHC_LOADED_INTO_GHCI) #if !defined(GHC_LOADED_INTO_GHCI)
...@@ -148,20 +149,18 @@ pattern UniqResult x y = (# x, y #) ...@@ -148,20 +149,18 @@ pattern UniqResult x y = (# x, y #)
#else #else
data UniqResult result = UniqResult !result {-# UNPACK #-} !UniqSupply data UniqResult result = UniqResult !result {-# UNPACK #-} !UniqSupply
deriving (Functor)
#endif #endif
-- | A monad which just gives the ability to obtain 'Unique's -- | A monad which just gives the ability to obtain 'Unique's
newtype UniqSM result = USM { unUSM :: UniqSupply -> UniqResult result } newtype UniqSM result = USM { unUSM :: UniqSupply -> UniqResult result }
deriving (Functor)
instance Monad UniqSM where instance Monad UniqSM where
(>>=) = thenUs (>>=) = thenUs
(>>) = (*>) (>>) = (*>)
instance Functor UniqSM where
fmap f (USM x) = USM (\us0 -> case x us0 of
UniqResult r us1 -> UniqResult (f r) us1)
instance Applicative UniqSM where instance Applicative UniqSM where
pure = returnUs pure = returnUs
(USM f) <*> (USM x) = USM $ \us0 -> case f us0 of (USM f) <*> (USM x) = USM $ \us0 -> case f us0 of
......
...@@ -5,6 +5,7 @@ ...@@ -5,6 +5,7 @@
-- CmmLint: checking the correctness of Cmm statements and expressions -- CmmLint: checking the correctness of Cmm statements and expressions
-- --
----------------------------------------------------------------------------- -----------------------------------------------------------------------------
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE GADTs #-} {-# LANGUAGE GADTs #-}
module CmmLint ( module CmmLint (
cmmLint, cmmLintGraph cmmLint, cmmLintGraph
...@@ -24,7 +25,7 @@ import PprCmm () ...@@ -24,7 +25,7 @@ import PprCmm ()
import Outputable import Outputable
import DynFlags import DynFlags
import Control.Monad (liftM, ap) import Control.Monad (ap)
-- Things to check: -- Things to check:
-- - invariant on CmmBlock in CmmExpr (see comment there) -- - invariant on CmmBlock in CmmExpr (see comment there)
...@@ -212,9 +213,7 @@ checkCond _ expr ...@@ -212,9 +213,7 @@ checkCond _ expr
-- just a basic error monad: -- just a basic error monad:
newtype CmmLint a = CmmLint { unCL :: DynFlags -> Either SDoc a } newtype CmmLint a = CmmLint { unCL :: DynFlags -> Either SDoc a }
deriving (Functor)
instance Functor CmmLint where
fmap = liftM
instance Applicative CmmLint where instance Applicative CmmLint where
pure a = CmmLint (\_ -> Right a) pure a = CmmLint (\_ -> Right a)
......
...@@ -198,6 +198,8 @@ necessary to the stack to accommodate it (e.g. 2). ...@@ -198,6 +198,8 @@ necessary to the stack to accommodate it (e.g. 2).
----------------------------------------------------------------------------- -} ----------------------------------------------------------------------------- -}
{ {
{-# LANGUAGE TupleSections #-}
module CmmParse ( parseCmmFile ) where module CmmParse ( parseCmmFile ) where
import GhcPrelude import GhcPrelude
...@@ -808,7 +810,7 @@ foreign_formals :: { [CmmParse (LocalReg, ForeignHint)] } ...@@ -808,7 +810,7 @@ foreign_formals :: { [CmmParse (LocalReg, ForeignHint)] }
| foreign_formal ',' foreign_formals { $1 : $3 } | foreign_formal ',' foreign_formals { $1 : $3 }
foreign_formal :: { CmmParse (LocalReg, ForeignHint) } foreign_formal :: { CmmParse (LocalReg, ForeignHint) }
: local_lreg { do e <- $1; return (e, (inferCmmHint (CmmReg (CmmLocal e)))) } : local_lreg { do e <- $1; return (e, inferCmmHint (CmmReg (CmmLocal e))) }
| STRING local_lreg {% do h <- parseCmmHint $1; | STRING local_lreg {% do h <- parseCmmHint $1;
return $ do return $ do
e <- $2; return (e,h) } e <- $2; return (e,h) }
...@@ -999,36 +1001,36 @@ machOps = listToUFM $ ...@@ -999,36 +1001,36 @@ machOps = listToUFM $
callishMachOps :: UniqFM ([CmmExpr] -> (CallishMachOp, [CmmExpr])) callishMachOps :: UniqFM ([CmmExpr] -> (CallishMachOp, [CmmExpr]))
callishMachOps = listToUFM $ callishMachOps = listToUFM $
map (\(x, y) -> (mkFastString x, y)) [ map (\(x, y) -> (mkFastString x, y)) [
( "write_barrier", (,) MO_WriteBarrier ), ( "write_barrier", (MO_WriteBarrier,)),
( "memcpy", memcpyLikeTweakArgs MO_Memcpy ), ( "memcpy", memcpyLikeTweakArgs MO_Memcpy ),
( "memset", memcpyLikeTweakArgs MO_Memset ), ( "memset", memcpyLikeTweakArgs MO_Memset ),
( "memmove", memcpyLikeTweakArgs MO_Memmove ), ( "memmove", memcpyLikeTweakArgs MO_Memmove ),
( "memcmp", memcpyLikeTweakArgs MO_Memcmp ), ( "memcmp", memcpyLikeTweakArgs MO_Memcmp ),
("prefetch0", (,) $ MO_Prefetch_Data 0), ("prefetch0", (MO_Prefetch_Data 0,)),
("prefetch1", (,) $ MO_Prefetch_Data 1), ("prefetch1", (MO_Prefetch_Data 1,)),
("prefetch2", (,) $ MO_Prefetch_Data 2), ("prefetch2", (MO_Prefetch_Data 2,)),
("prefetch3", (,) $ MO_Prefetch_Data 3), ("prefetch3", (MO_Prefetch_Data 3,)),
( "popcnt8", (,) $ MO_PopCnt W8 ), ( "popcnt8", (MO_PopCnt W8,)),
( "popcnt16", (,) $ MO_PopCnt W16 ), ( "popcnt16", (MO_PopCnt W16,)),
( "popcnt32", (,) $ MO_PopCnt W32 ), ( "popcnt32", (MO_PopCnt W32,)),
( "popcnt64", (,) $ MO_PopCnt W64 ), ( "popcnt64", (MO_PopCnt W64,)),
( "pdep8", (,) $ MO_Pdep W8 ), ( "pdep8", (MO_Pdep W8,)),
( "pdep16", (,) $ MO_Pdep W16 ), ( "pdep16", (MO_Pdep W16,)),
( "pdep32", (,) $ MO_Pdep W32 ), ( "pdep32", (MO_Pdep W32,)),
( "pdep64", (,) $ MO_Pdep W64 ), ( "pdep64", (MO_Pdep W64,)),
( "pext8", (,) $ MO_Pext W8 ), ( "pext8", (MO_Pext W8,)),
( "pext16", (,) $ MO_Pext W16 ), ( "pext16", (MO_Pext W16,)),
( "pext32", (,) $ MO_Pext W32 ), ( "pext32", (MO_Pext W32,)),
( "pext64", (,) $ MO_Pext W64 ), ( "pext64", (MO_Pext W64,)),
( "cmpxchg8", (,) $ MO_Cmpxchg W8 ), ( "cmpxchg8", (MO_Cmpxchg W8,)),
( "cmpxchg16", (,) $ MO_Cmpxchg W16 ), ( "cmpxchg16", (MO_Cmpxchg W16,)),
( "cmpxchg32", (,) $ MO_Cmpxchg W32 ), ( "cmpxchg32", (MO_Cmpxchg W32,)),
( "cmpxchg64", (,) $ MO_Cmpxchg W64 ) ( "cmpxchg64", (MO_Cmpxchg W64,))
-- ToDo: the rest, maybe -- ToDo: the rest, maybe
-- edit: which rest? -- edit: which rest?
......
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE GADTs #-} {-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-} {-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE StandaloneDeriving #-}
module Hoopl.Block module Hoopl.Block
( C ( C
, O , O
...@@ -64,14 +66,8 @@ data MaybeC ex t where ...@@ -64,14 +66,8 @@ data MaybeC ex t where
JustC :: t -> MaybeC C t JustC :: t -> MaybeC C t
NothingC :: MaybeC O t NothingC :: MaybeC O t
deriving instance Functor (MaybeO ex)
instance Functor (MaybeO ex) where deriving instance Functor (MaybeC ex)
fmap _ NothingO = NothingO
fmap f (JustO a) = JustO (f a)
instance Functor (MaybeC ex) where
fmap _ NothingC = NothingC
fmap f (JustC a) = JustC (f a)
-- ----------------------------------------------------------------------------- -- -----------------------------------------------------------------------------
-- The Block type -- The Block type
......
{-# LANGUAGE CPP, GADTs #-} {-# LANGUAGE CPP, DeriveFunctor, GADTs, PatternSynonyms #-}
----------------------------------------------------------------------------- -----------------------------------------------------------------------------
-- --
...@@ -61,7 +61,7 @@ import Data.Map (Map) ...@@ -61,7 +61,7 @@ import Data.Map (Map)
import Data.Word import Data.Word
import System.IO import System.IO
import qualified Data.Map as Map import qualified Data.Map as Map
import Control.Monad (liftM, ap) import Control.Monad (ap)
import qualified Data.Array.Unsafe as U ( castSTUArray ) import qualified Data.Array.Unsafe as U ( castSTUArray )
import Data.Array.ST import Data.Array.ST
...@@ -1082,10 +1082,7 @@ pprExternDecl lbl ...@@ -1082,10 +1082,7 @@ pprExternDecl lbl
<> semi <> semi
type TEState = (UniqSet LocalReg, Map CLabel ()) type TEState = (UniqSet LocalReg, Map CLabel ())
newtype TE a = TE { unTE :: TEState -> (a, TEState) } newtype TE a = TE { unTE :: TEState -> (a, TEState) } deriving (Functor)
instance Functor TE where
fmap = liftM
instance Applicative TE where instance Applicative TE where
pure a = TE $ \s -> (a, s) pure a = TE $ \s -> (a, s)
......
...@@ -577,7 +577,7 @@ isSimpleScrut _ _ = return False ...@@ -577,7 +577,7 @@ isSimpleScrut _ _ = return False
isSimpleOp :: StgOp -> [StgArg] -> FCode Bool isSimpleOp :: StgOp -> [StgArg] -> FCode Bool
-- True iff the op cannot block or allocate -- True iff the op cannot block or allocate
isSimpleOp (StgFCallOp (CCall (CCallSpec _ _ safe)) _ _) _ = return $! not (playSafe safe) isSimpleOp (StgFCallOp (CCall (CCallSpec _ _ safe)) _) _ = return $! not (playSafe safe)
-- dataToTag# evalautes its argument, see Note [dataToTag#] in primops.txt.pp -- dataToTag# evalautes its argument, see Note [dataToTag#] in primops.txt.pp
isSimpleOp (StgPrimOp DataToTagOp) _ = return False isSimpleOp (StgPrimOp DataToTagOp) _ = return False
isSimpleOp (StgPrimOp op) stg_args = do isSimpleOp (StgPrimOp op) stg_args = do
......
{-# LANGUAGE DeriveFunctor #-}
-- | Our extended FCode monad. -- | Our extended FCode monad.
-- We add a mapping from names to CmmExpr, to support local variable names in -- We add a mapping from names to CmmExpr, to support local variable names in
...@@ -53,7 +54,7 @@ import UniqFM ...@@ -53,7 +54,7 @@ import UniqFM
import Unique import Unique
import UniqSupply import UniqSupply
import Control.Monad (liftM, ap) import Control.Monad (ap)
-- | The environment contains variable definitions or blockids. -- | The environment contains variable definitions or blockids.
data Named data Named
...@@ -73,6 +74,7 @@ type Decls = [(FastString,Named)] ...@@ -73,6 +74,7 @@ type Decls = [(FastString,Named)]
-- and a list of local declarations. Returns the resulting list of declarations. -- and a list of local declarations. Returns the resulting list of declarations.
newtype CmmParse a newtype CmmParse a
= EC { unEC :: String -> Env -> Decls -> FCode (Decls, a) } = EC { unEC :: String -> Env -> Decls -> FCode (Decls, a) }
deriving (Functor)
type ExtCode = CmmParse () type ExtCode = CmmParse ()
...@@ -82,9 +84,6 @@ returnExtFC a = EC $ \_ _ s -> return (s, a) ...@@ -82,9 +84,6 @@ returnExtFC a = EC $ \_ _ s -> return (s, a)
thenExtFC :: CmmParse a -> (a -> CmmParse b) -> CmmParse b thenExtFC :: CmmParse a -> (a -> CmmParse b) -> CmmParse b
thenExtFC (EC m) k = EC $ \c e s -> do (s',r) <- m c e s; unEC (k r) c e s' thenExtFC (EC m) k = EC $ \c e s -> do (s',r) <- m c e s; unEC (k r) c e s'
instance Functor CmmParse where
fmap = liftM
instance Applicative CmmParse where instance Applicative CmmParse where
pure = returnExtFC pure = returnExtFC
(<*>) = ap (<*>) = ap
......
...@@ -619,6 +619,9 @@ typeToStgFArgType typ ...@@ -619,6 +619,9 @@ typeToStgFArgType typ
| tycon == mutableByteArrayPrimTyCon = StgByteArrayType | tycon == mutableByteArrayPrimTyCon = StgByteArrayType
| otherwise = StgPlainType | otherwise = StgPlainType
where where
-- should be a tycon app, since this is a foreign call -- Should be a tycon app, since this is a foreign call. We look
-- through newtypes so the offset does not change if a user replaces
-- a type in a foreign function signature with a representationally
-- equivalent newtype.
tycon = tyConAppTyCon (unwrapType typ) tycon = tyConAppTyCon (unwrapType typ)
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE GADTs #-} {-# LANGUAGE GADTs #-}
----------------------------------------------------------------------------- -----------------------------------------------------------------------------
...@@ -111,9 +112,7 @@ import Data.List ...@@ -111,9 +112,7 @@ import Data.List
-------------------------------------------------------- --------------------------------------------------------
newtype FCode a = FCode { doFCode :: CgInfoDownwards -> CgState -> (a, CgState) } newtype FCode a = FCode { doFCode :: CgInfoDownwards -> CgState -> (a, CgState) }
deriving (Functor)
instance Functor FCode where
fmap f (FCode g) = FCode $ \i s -> case g i s of (a, s') -> (f a, s')
instance Applicative FCode where instance Applicative FCode where
pure val = FCode (\_info_down state -> (val, state)) pure val = FCode (\_info_down state -> (val, state))
......