diff --git a/.circleci/config.yml b/.circleci/config.yml index 062adcb5ec..e2be3f6528 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -60,14 +60,14 @@ version: 2 jobs: stackage-lts22: docker: - - image: haskell:9.6.5-slim-buster + - image: haskell:9.6.6-slim-bullseye environment: - STACK_FILE: "stack-lts22.yaml" <<: *defaults - stackage-nightly: + stackage-lts23: docker: - - image: haskell:9.8.2-slim-buster + - image: haskell:9.8.4-slim-bullseye environment: - STACK_FILE: "stack.yaml" <<: *defaults @@ -77,4 +77,4 @@ workflows: multiple-ghcs: jobs: - stackage-lts22 - - stackage-nightly + - stackage-lts23 diff --git a/.github/actions/bindist-actions/action-centos7/action.yaml b/.github/actions/bindist-actions/action-centos7/action.yaml new file mode 100644 index 0000000000..66f97295f0 --- /dev/null +++ b/.github/actions/bindist-actions/action-centos7/action.yaml @@ -0,0 +1,23 @@ +description: Container for centos7 +inputs: + stage: + description: which stage to build + required: true + version: + description: which GHC version to build/test + required: false +name: action-centos7 +runs: + entrypoint: .github/scripts/entrypoint.sh + env: + GHC_VERSION: ${{ inputs.version }} + INSTALL: sed -i 's/mirrorlist/#mirrorlist/g' /etc/yum.repos.d/CentOS-* && sed + -i 's|#baseurl=http://mirror.centos.org|baseurl=http://vault.centos.org|g' /etc/yum.repos.d/CentOS-* + && yum -y install epel-release && yum install -y + STAGE: ${{ inputs.stage }} + TOOLS: autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs + findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs + ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which + xz zlib-devel patchelf + image: centos:7 + using: docker diff --git a/.github/actions/bindist-actions/action-deb10/action.yaml b/.github/actions/bindist-actions/action-deb10/action.yaml new file mode 100644 index 0000000000..da96b04669 --- /dev/null +++ b/.github/actions/bindist-actions/action-deb10/action.yaml @@ -0,0 +1,21 @@ +description: Container for deb10 +inputs: + stage: + description: which stage to build + required: true + version: + description: which GHC version to build/test + required: false +name: action-deb10 +runs: + entrypoint: .github/scripts/entrypoint.sh + env: + GHC_VERSION: ${{ inputs.version }} + INSTALL: apt-get update && apt-get install -y + STAGE: ${{ inputs.stage }} + TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev + git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc + autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 + libtinfo5 patchelf + image: debian:10 + using: docker diff --git a/.github/actions/bindist-actions/action-deb11/action.yaml b/.github/actions/bindist-actions/action-deb11/action.yaml new file mode 100644 index 0000000000..8ffe78e1db --- /dev/null +++ b/.github/actions/bindist-actions/action-deb11/action.yaml @@ -0,0 +1,21 @@ +description: Container for deb11 +inputs: + stage: + description: which stage to build + required: true + version: + description: which GHC version to build/test + required: false +name: action-deb11 +runs: + entrypoint: .github/scripts/entrypoint.sh + env: + GHC_VERSION: ${{ inputs.version }} + INSTALL: apt-get update && apt-get install -y + STAGE: ${{ inputs.stage }} + TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev + git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc + autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 + libtinfo5 patchelf + image: debian:11 + using: docker diff --git a/.github/actions/bindist-actions/action-deb9/action.yaml b/.github/actions/bindist-actions/action-deb9/action.yaml new file mode 100644 index 0000000000..693e3845a5 --- /dev/null +++ b/.github/actions/bindist-actions/action-deb9/action.yaml @@ -0,0 +1,24 @@ +description: Container for deb9 +inputs: + stage: + description: which stage to build + required: true + version: + description: which GHC version to build/test + required: false +name: action-deb9 +runs: + entrypoint: .github/scripts/entrypoint.sh + env: + GHC_VERSION: ${{ inputs.version }} + INSTALL: sed -i s/deb.debian.org/archive.debian.org/g /etc/apt/sources.list && + sed -i 's|security.debian.org|archive.debian.org/|g' /etc/apt/sources.list && + sed -i /-updates/d /etc/apt/sources.list && apt-get update && apt-get install + -y + STAGE: ${{ inputs.stage }} + TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev + git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc + autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 + libtinfo5 patchelf + image: debian:9 + using: docker diff --git a/.github/actions/bindist-actions/action-fedora27/action.yaml b/.github/actions/bindist-actions/action-fedora27/action.yaml new file mode 100644 index 0000000000..e77b944a5e --- /dev/null +++ b/.github/actions/bindist-actions/action-fedora27/action.yaml @@ -0,0 +1,21 @@ +description: Container for fedora27 +inputs: + stage: + description: which stage to build + required: true + version: + description: which GHC version to build/test + required: false +name: action-fedora27 +runs: + entrypoint: .github/scripts/entrypoint.sh + env: + GHC_VERSION: ${{ inputs.version }} + INSTALL: dnf install -y + STAGE: ${{ inputs.stage }} + TOOLS: autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs + findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs + ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which + xz zlib-devel patchelf + image: fedora:27 + using: docker diff --git a/.github/actions/bindist-actions/action-fedora33/action.yaml b/.github/actions/bindist-actions/action-fedora33/action.yaml new file mode 100644 index 0000000000..d20c8feccd --- /dev/null +++ b/.github/actions/bindist-actions/action-fedora33/action.yaml @@ -0,0 +1,21 @@ +description: Container for fedora33 +inputs: + stage: + description: which stage to build + required: true + version: + description: which GHC version to build/test + required: false +name: action-fedora33 +runs: + entrypoint: .github/scripts/entrypoint.sh + env: + GHC_VERSION: ${{ inputs.version }} + INSTALL: dnf install -y + STAGE: ${{ inputs.stage }} + TOOLS: autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs + findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs + ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which + xz zlib-devel patchelf + image: fedora:33 + using: docker diff --git a/.github/actions/bindist-actions/action-mint193/action.yaml b/.github/actions/bindist-actions/action-mint193/action.yaml new file mode 100644 index 0000000000..e1269e0e56 --- /dev/null +++ b/.github/actions/bindist-actions/action-mint193/action.yaml @@ -0,0 +1,21 @@ +description: Container for mint193 +inputs: + stage: + description: which stage to build + required: true + version: + description: which GHC version to build/test + required: false +name: action-mint193 +runs: + entrypoint: .github/scripts/entrypoint.sh + env: + GHC_VERSION: ${{ inputs.version }} + INSTALL: apt-get update && apt-get install -y + STAGE: ${{ inputs.stage }} + TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev + git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc + autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 + libtinfo5 patchelf + image: linuxmintd/mint19.3-amd64 + using: docker diff --git a/.github/actions/bindist-actions/action-mint202/action.yaml b/.github/actions/bindist-actions/action-mint202/action.yaml new file mode 100644 index 0000000000..adea7272f1 --- /dev/null +++ b/.github/actions/bindist-actions/action-mint202/action.yaml @@ -0,0 +1,21 @@ +description: Container for mint202 +inputs: + stage: + description: which stage to build + required: true + version: + description: which GHC version to build/test + required: false +name: action-mint202 +runs: + entrypoint: .github/scripts/entrypoint.sh + env: + GHC_VERSION: ${{ inputs.version }} + INSTALL: apt-get update && apt-get install -y + STAGE: ${{ inputs.stage }} + TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev + git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc + autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 + libtinfo5 patchelf + image: linuxmintd/mint20.2-amd64 + using: docker diff --git a/.github/actions/bindist-actions/action-ubuntu1804/action.yaml b/.github/actions/bindist-actions/action-ubuntu1804/action.yaml new file mode 100644 index 0000000000..6a6f4662a0 --- /dev/null +++ b/.github/actions/bindist-actions/action-ubuntu1804/action.yaml @@ -0,0 +1,21 @@ +description: Container for ubuntu1804 +inputs: + stage: + description: which stage to build + required: true + version: + description: which GHC version to build/test + required: false +name: action-ubuntu1804 +runs: + entrypoint: .github/scripts/entrypoint.sh + env: + GHC_VERSION: ${{ inputs.version }} + INSTALL: apt-get update && apt-get install -y + STAGE: ${{ inputs.stage }} + TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev + git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc + autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 + libtinfo5 patchelf + image: ubuntu:18.04 + using: docker diff --git a/.github/actions/bindist-actions/action-ubuntu2004/action.yaml b/.github/actions/bindist-actions/action-ubuntu2004/action.yaml new file mode 100644 index 0000000000..3a5b57a370 --- /dev/null +++ b/.github/actions/bindist-actions/action-ubuntu2004/action.yaml @@ -0,0 +1,21 @@ +description: Container for ubuntu2004 +inputs: + stage: + description: which stage to build + required: true + version: + description: which GHC version to build/test + required: false +name: action-ubuntu2004 +runs: + entrypoint: .github/scripts/entrypoint.sh + env: + GHC_VERSION: ${{ inputs.version }} + INSTALL: apt-get update && apt-get install -y + STAGE: ${{ inputs.stage }} + TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev + git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc + autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 + libtinfo5 patchelf + image: ubuntu:20.04 + using: docker diff --git a/.github/actions/bindist-actions/action-ubuntu2204/action.yaml b/.github/actions/bindist-actions/action-ubuntu2204/action.yaml new file mode 100644 index 0000000000..857776507d --- /dev/null +++ b/.github/actions/bindist-actions/action-ubuntu2204/action.yaml @@ -0,0 +1,21 @@ +description: Container for ubuntu2204 +inputs: + stage: + description: which stage to build + required: true + version: + description: which GHC version to build/test + required: false +name: action-ubuntu2204 +runs: + entrypoint: .github/scripts/entrypoint.sh + env: + GHC_VERSION: ${{ inputs.version }} + INSTALL: apt-get update && apt-get install -y + STAGE: ${{ inputs.stage }} + TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev + git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc + autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 + libtinfo5 patchelf + image: ubuntu:22.04 + using: docker diff --git a/.github/actions/bindist-actions/action-unknown/action.yaml b/.github/actions/bindist-actions/action-unknown/action.yaml new file mode 100644 index 0000000000..96cf0593e9 --- /dev/null +++ b/.github/actions/bindist-actions/action-unknown/action.yaml @@ -0,0 +1,21 @@ +description: Container for unknown +inputs: + stage: + description: which stage to build + required: true + version: + description: which GHC version to build/test + required: false +name: action-unknown +runs: + entrypoint: .github/scripts/entrypoint.sh + env: + GHC_VERSION: ${{ inputs.version }} + INSTALL: yum -y install epel-release && yum install -y --allowerasing + STAGE: ${{ inputs.stage }} + TOOLS: autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs + findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs + ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which + xz zlib-devel patchelf + image: rockylinux:8 + using: docker diff --git a/.github/actions/setup-build/action.yml b/.github/actions/setup-build/action.yml index 67d64ac09e..da1ece3140 100644 --- a/.github/actions/setup-build/action.yml +++ b/.github/actions/setup-build/action.yml @@ -7,7 +7,7 @@ inputs: cabal: description: "Cabal version" required: false - default: "3.10.2.0" + default: "3.14.2.0" os: description: "Operating system: Linux, Windows or macOS" required: true @@ -31,7 +31,7 @@ runs: sudo chown -R $USER /usr/local/.ghcup shell: bash - - uses: haskell-actions/setup@v2.7.3 + - uses: haskell-actions/setup@v2.7.10 id: HaskEnvSetup with: ghc-version : ${{ inputs.ghc }} @@ -116,3 +116,18 @@ runs: - name: "Remove freeze file" run: rm -f cabal.project.freeze shell: bash + + # Make sure to clear all unneeded `ghcup`` caches. + # At some point, we were running out of disk space, see issue + # https://github.com/haskell/haskell-language-server/issues/4386 for details. + # + # Using "printf" debugging (`du -sh *` and `df -h /`) and binary searching, + # we figured out that `ghcup` caches are taking up a sizable portion of the + # disk space. + # Thus, we remove anything we don't need, especially caches and temporary files. + # For got measure, we also make sure no other tooling versions are + # installed besides the ones we explicitly want. + - name: "Remove ghcup caches" + if: runner.os == 'Linux' + run: ghcup gc --ghc-old --share-dir --hls-no-ghc --cache --tmpdirs --unset + shell: bash diff --git a/.github/generate-ci/LICENSE b/.github/generate-ci/LICENSE new file mode 100644 index 0000000000..261eeb9e9f --- /dev/null +++ b/.github/generate-ci/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/.github/generate-ci/README.mkd b/.github/generate-ci/README.mkd new file mode 100644 index 0000000000..fef645ea12 --- /dev/null +++ b/.github/generate-ci/README.mkd @@ -0,0 +1,5 @@ +# generate-ci + +This is the generator for the release bindist CI. + +Edit ./gen_ci.hs to change configuration and run "./generate-jobs" to regenerate diff --git a/.github/generate-ci/cabal.project b/.github/generate-ci/cabal.project new file mode 100644 index 0000000000..e6fdbadb43 --- /dev/null +++ b/.github/generate-ci/cabal.project @@ -0,0 +1 @@ +packages: . diff --git a/.github/generate-ci/gen_ci.hs b/.github/generate-ci/gen_ci.hs new file mode 100644 index 0000000000..1cdba1ca41 --- /dev/null +++ b/.github/generate-ci/gen_ci.hs @@ -0,0 +1,615 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} + +import Control.Monad +import Data.Maybe + +import Data.Aeson hiding ( encode ) +import Data.Aeson.Types (Pair) +import qualified Data.Aeson.Key as K +import Data.Yaml + +import qualified Data.ByteString as BS + +import qualified Data.List as L + +import System.Directory +import System.FilePath +import System.Environment + +------------------------------------------------------------------------------- +-- Configuration parameters +------------------------------------------------------------------------------- + +data Opsys + = Linux Distro + | Darwin + | Windows deriving (Eq) + +osName :: Opsys -> String +osName Darwin = "mac" +osName Windows = "windows" +osName (Linux d) = "linux-" ++ distroName d + +data Distro + = Debian9 + | Debian10 + | Debian11 + | Ubuntu1804 + | Ubuntu2004 + | Ubuntu2204 + | Mint193 + | Mint202 + | Fedora27 + | Fedora33 + | Centos7 + | Rocky8 + deriving (Eq, Enum, Bounded) + +allDistros :: [Distro] +allDistros = [minBound .. maxBound] + +data Arch = Amd64 | AArch64 +archName :: Arch -> String +archName Amd64 = "x86_64" +archName AArch64 = "aarch64" + +artifactName :: Arch -> Opsys -> String +artifactName arch opsys = archName arch ++ "-" ++ case opsys of + Linux distro -> "linux-" ++ distroName distro + Darwin -> "apple-darwin" + Windows -> "mingw64" + +data GHC + = GHC948 + | GHC967 + | GHC984 + | GHC9101 + | GHC9122 + deriving (Eq, Enum, Bounded) + +ghcVersion :: GHC -> String +ghcVersion GHC948 = "9.4.8" +ghcVersion GHC967 = "9.6.7" +ghcVersion GHC984 = "9.8.4" +ghcVersion GHC9101 = "9.10.1" +ghcVersion GHC9122 = "9.12.2" + +ghcVersionIdent :: GHC -> String +ghcVersionIdent = filter (/= '.') . ghcVersion + +allGHCs :: [GHC] +allGHCs = [minBound .. maxBound] + +data Stage = Build GHC | Bindist | Test + +------------------------------------------------------------------------------- +-- Distro Configuration +------------------------------------------------------------------------------- + +distroImage :: Distro -> String +distroImage Debian9 = "debian:9" +distroImage Debian10 = "debian:10" +distroImage Debian11 = "debian:11" +distroImage Ubuntu1804 = "ubuntu:18.04" +distroImage Ubuntu2004 = "ubuntu:20.04" +distroImage Ubuntu2204 = "ubuntu:22.04" +distroImage Mint193 = "linuxmintd/mint19.3-amd64" +distroImage Mint202 = "linuxmintd/mint20.2-amd64" +distroImage Fedora27 = "fedora:27" +distroImage Fedora33 = "fedora:33" +distroImage Centos7 = "centos:7" +distroImage Rocky8 = "rockylinux:8" + +distroName :: Distro -> String +distroName Debian9 = "deb9" +distroName Debian10 = "deb10" +distroName Debian11 = "deb11" +distroName Ubuntu1804 = "ubuntu1804" +distroName Ubuntu2004 = "ubuntu2004" +distroName Ubuntu2204 = "ubuntu2204" +distroName Mint193 = "mint193" +distroName Mint202 = "mint202" +distroName Fedora27 = "fedora27" +distroName Fedora33 = "fedora33" +distroName Centos7 = "centos7" +distroName Rocky8 = "unknown" + +distroInstall :: Distro -> String +distroInstall Debian9 = "sed -i s/deb.debian.org/archive.debian.org/g /etc/apt/sources.list && sed -i 's|security.debian.org|archive.debian.org/|g' /etc/apt/sources.list && sed -i /-updates/d /etc/apt/sources.list && apt-get update && apt-get install -y" +distroInstall Debian10 = "apt-get update && apt-get install -y" +distroInstall Debian11 = "apt-get update && apt-get install -y" +distroInstall Ubuntu1804 = "apt-get update && apt-get install -y" +distroInstall Ubuntu2004 = "apt-get update && apt-get install -y" +distroInstall Ubuntu2204 = "apt-get update && apt-get install -y" +distroInstall Mint193 = "apt-get update && apt-get install -y" +distroInstall Mint202 = "apt-get update && apt-get install -y" +distroInstall Fedora27 = "dnf install -y" +distroInstall Fedora33 = "dnf install -y" +distroInstall Centos7 = "sed -i 's/mirrorlist/#mirrorlist/g' /etc/yum.repos.d/CentOS-* && sed -i 's|#baseurl=http://mirror.centos.org|baseurl=http://vault.centos.org|g' /etc/yum.repos.d/CentOS-* && yum -y install epel-release && yum install -y" +distroInstall Rocky8 = "yum -y install epel-release && yum install -y --allowerasing" + +distroTools :: Distro -> String +distroTools Debian9 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf" +distroTools Debian10 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf" +distroTools Debian11 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf" +distroTools Ubuntu1804 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf" +distroTools Ubuntu2004 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf" +distroTools Ubuntu2204 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf" +distroTools Mint193 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf" +distroTools Mint202 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf" +distroTools Fedora27 = "autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf" +distroTools Fedora33 = "autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf" +distroTools Centos7 = "autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf" +distroTools Rocky8 = "autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf" + +------------------------------------------------------------------------------- +-- OS/runner Config +------------------------------------------------------------------------------- + +baseEnv :: [(Key,Value)] +baseEnv = [ "AWS_SECRET_ACCESS_KEY" .= str "${{ secrets.AWS_SECRET_ACCESS_KEY }}" + , "AWS_ACCESS_KEY_ID" .= str "${{ secrets.AWS_ACCESS_KEY_ID }}" + , "S3_HOST" .= str "${{ secrets.S3_HOST }}" + , "TZ" .= str "Asia/Singapore" + ] + +-- | Environment configuration +envVars :: Arch -> Opsys -> Value +envVars arch os = object $ + baseEnv + ++ [ "TARBALL_EXT" .= str (case os of + Windows -> "zip" + _ -> "tar.xz") + , "ARCH" .= str (case arch of + Amd64 -> "64" + AArch64 -> "ARM64") + , "ADD_CABAL_ARGS" .= str (case (os,arch) of + (Linux _, Amd64) -> "--enable-split-sections" + _ -> "") + , "ARTIFACT" .= artifactName arch os + ] + ++ [ "DEBIAN_FRONTEND" .= str "noninteractive" + | Linux _ <- [os] + ] + ++ [ "MACOSX_DEPLOYMENT_TARGET" .= str "10.13" + | Darwin <- [os] + ] + ++ [ "HOMEBREW_CHANGE_ARCH_TO_ARM" .= str "1" + | Darwin <- [os], AArch64 <- [arch] + ] + +-- | Runner selection +runner :: Arch -> Opsys -> [Value] +runner Amd64 (Linux _) = ["ubuntu-latest"] +runner AArch64 (Linux _) = ["self-hosted", "Linux", "ARM64", "maerwald"] +runner Amd64 Darwin = ["macOS-13"] +runner AArch64 Darwin = ["self-hosted", "macOS", "ARM64"] +runner Amd64 Windows = ["windows-latest"] +runner AArch64 Windows = error "aarch64 windows not supported" + +-- | Runner selection for bindist jobs +bindistRunner :: Arch -> Opsys -> [Value] +bindistRunner Amd64 (Linux _) = ["self-hosted", "linux-space", "maerwald"] +bindistRunner AArch64 (Linux _) = ["self-hosted", "Linux", "ARM64", "maerwald"] +bindistRunner Amd64 Darwin = ["macOS-13"] +bindistRunner AArch64 Darwin = ["self-hosted", "macOS", "ARM64"] +bindistRunner Amd64 Windows = ["windows-latest"] +bindistRunner AArch64 Windows = error "aarch64 windows not supported" + +------------------------------------------------------------------------------- +-- Action generatation +------------------------------------------------------------------------------- +-- Each x86-linux job has its own action, living in a separate file +-- The contents of the file are derived from the 'Action' datatype +-- +-- We do this so that we can run the build in the right kind of OS container, +-- but not be forced to run the checkout and upload artifact in the same container +-- +-- This is because we want to use container images that are not supported by +-- github provided actions, see for instance https://github.com/actions/upload-artifact/issues/489 +------------------------------------------------------------------------------- + +-- | Container actions for x86-linux runners. +-- Each of these corresponds to a separate action file, +-- called 'actionName', located at 'actionPath' +data Action + = Action + { actionName :: String + , actionDistro :: Distro + } + +actionDir :: FilePath +actionDir = "./.github/actions/bindist-actions/" + +actionPath :: Distro -> FilePath +actionPath d = actionDir ++ distroActionName d + +instance ToJSON Action where + toJSON Action{..} = object + [ "name" .= actionName + , "description" .= str ("Container for " ++ distroName actionDistro) + , "inputs" .= object + [ "stage" .= object + [ "description" .= str "which stage to build" + , "required" .= True + ] + , "version" .= object + [ "description" .= str "which GHC version to build/test" + , "required" .= False + ] + ] + , "runs" .= object + [ "using" .= str "docker" + , "image" .= distroImage actionDistro + , "entrypoint" .= str ".github/scripts/entrypoint.sh" + , "env" .= object + [ "STAGE" .= str "${{ inputs.stage }}" + , "INSTALL" .= distroInstall actionDistro + , "TOOLS" .= distroTools actionDistro + , "GHC_VERSION" .= str "${{ inputs.version }}" + ] + ] + ] + +configAction :: Config -> Maybe Action +configAction (MkConfig Amd64 (Linux d) _) = Just $ Action (distroActionName d) d +configAction _ = Nothing + +distroActionName :: Distro -> String +distroActionName d = "action-" ++ distroName d + +customAction :: Distro -> Stage -> Value +customAction d st = flip (ghAction stepName (actionPath d)) [] $ case st of + Build v -> + [ "stage" .= str "BUILD" + , "version" .= ghcVersion v + ] + Test -> + [ "stage" .= str "TEST" + ] + Bindist -> + [ "stage" .= str "BINDIST" + ] + where + stepName = case st of + Build v -> "Build " ++ ghcVersion v + Test -> "Test" + Bindist -> "Bindist" + +------------------------------------------------------------------------------- +-- CI generation +------------------------------------------------------------------------------- +-- This is the code that generates the bindist workflow + +-- | Global CI config type +data CI = CI [Config] + +data Config = MkConfig Arch Opsys [GHC] + +instance ToJSON CI where + toJSON (CI cs) = object + [ "name" .= str "Build and release" + , "on" .= object [ "push" .= object ["tags" .= [str "*"]] + , "schedule" .= [object ["cron" .= str "0 2 * * 1"]] + ] + , "env" .= object + [ "CABAL_CACHE_DISABLE" .= str "${{ vars.CABAL_CACHE_DISABLE }}" + , "CABAL_CACHE_NONFATAL" .= str "${{ vars.CABAL_CACHE_NONFATAL }}" + ] + , "jobs" .= object (concatMap (getConfigJobs . makeJobs) cs ++ [releaseJob cs]) + ] + +type Job = Pair + +data ConfigJobs = ConfigJobs { buildJobs :: [Job], bindistJob :: Job, testJob :: Job} + +getConfigJobs :: ConfigJobs -> [Job] +getConfigJobs ConfigJobs{..} = buildJobs ++ [bindistJob, testJob] + +makeJobs :: Config -> ConfigJobs +makeJobs (MkConfig arch os vs) = + ConfigJobs + { buildJobs = [ buildJob arch os ver | ver <- vs ] + , bindistJob = mkBindistJob arch os vs + , testJob = mkTestJob arch os + } + +buildJobName :: Arch -> Opsys -> GHC -> String +buildJobName arch os version = L.intercalate "-" ["build",archName arch, osName os, ghcVersionIdent version] + +testJobName :: Arch -> Opsys -> String +testJobName arch os = L.intercalate "-" ["test",archName arch, osName os] + +bindistJobName :: Arch -> Opsys -> String +bindistJobName arch os = L.intercalate "-" ["bindist",archName arch, osName os] + +bindistName :: Arch -> Opsys -> String +bindistName arch os = "bindist-" ++ artifactName arch os + +setupAction :: Arch -> Opsys -> [Value] +-- some +setupAction AArch64 (Linux Ubuntu2004) = + [ ghRun "clean and git config for aarch64-linux" "bash" [] $ unlines + [ "find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} +" + , "git config --global --get-all safe.directory | grep '^\\*$' || git config --global --add safe.directory \"*\"" + ] + ] +setupAction _ _ = [] + +releaseJob :: [Config] -> Job +releaseJob cs = + "release" .= object + [ "name" .= str "release" + , "runs-on" .= str "ubuntu-latest" + , "needs" .= [testJobName arch os | MkConfig arch os _ <- cs] + , "if" .= str "startsWith(github.ref, 'refs/tags/')" + , "steps" .= ( [ checkoutAction ] + ++ [ downloadArtifacts (bindistName arch os) "./out" | MkConfig arch os _ <- cs] + ++ [ ghRun "Prepare release" "bash" [] $ unlines + [ "sudo apt-get update && sudo apt-get install -y tar xz-utils" + , "cd out/plan.json" + , "tar cf plan_json.tar *" + , "mv plan_json.tar ../" + , "cd ../.." + , "export RELEASE=$GITHUB_REF_NAME" + , "git archive --format=tar.gz -o \"out/haskell-language-server-${RELEASE}-src.tar.gz\" --prefix=\"haskell-language-server-${RELEASE}/\" HEAD" + ] + , ghAction "Release" "softprops/action-gh-release@v2" + [ "draft" .= True + , "files" .= unlines + [ "./out/*.zip" + , "./out/*.tar.xz" + , "./out/*.tar.gz" + , "./out/*.tar" + ] + ] [] + ]) + ] + + + +buildJob :: Arch -> Opsys -> GHC -> Job +buildJob arch os v = + K.fromString (buildJobName arch os v) .= object + [ "runs-on" .= runner arch os + , "name" .= str (buildJobName arch os v ++ " (Build binaries)") + , "environment" .= str "CI" + , "env" .= thisEnv + , "steps" .= + ( setupAction arch os + ++ [ checkoutAction ] + ++ buildStep arch os + ++ [uploadArtifacts ("artifacts-"++buildJobName arch os v) outputname]) + ] + + where thisEnv = envVars arch os + art = artifactName arch os + outputname + | Windows <- os = "./out/*" + | otherwise = ("out-"++art++"-"++ghcVersion v++".tar") + buildStep Amd64 (Linux d) = [customAction d (Build v)] + buildStep AArch64 (Linux Ubuntu2004) = + [ ghAction "Build aarch64-linux binaries" "docker://hasufell/arm64v8-ubuntu-haskell:focal" + [ "args" .= str "bash .github/scripts/build.sh" ] + [ "GHC_VERSION" .= ghcVersion v ] + , ghAction "Tar aarch64-linux binaries" "docker://hasufell/arm64v8-ubuntu-haskell:focal" + [ "args" .= str "bash .github/scripts/tar.sh" ] + [ "GHC_VERSION" .= ghcVersion v ] + ] + buildStep AArch64 (Linux _) = error "aarch64-linux non-ubuntu not supported" + + buildStep Amd64 Darwin = [ghRun "Run build" "sh" ["GHC_VERSION" .= ghcVersion v] $ unlines $ + [ "brew install coreutils tree" + , "bash .github/scripts/build.sh" + , "tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/" + ] + ] + buildStep AArch64 Darwin = [ghRun "Run build" "sh" ["GHC_VERSION" .= ghcVersion v] $ unlines $ + [ "bash .github/scripts/brew.sh git coreutils autoconf automake tree" + , "export PATH=\"$HOME/.brew/bin:$HOME/.brew/sbin:$PATH\"" + , "export LD=ld" + , "bash .github/scripts/build.sh" + , "tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/" + ] + ] + + buildStep Amd64 Windows = [ghRun "Run build" "pwsh" ["GHC_VERSION" .= ghcVersion v] $ unlines $ + [ "$env:CHERE_INVOKING = 1" + , "$env:MSYS2_PATH_TYPE = \"inherit\"" + , "$ErrorActionPreference = \"Stop\"" + , "C:\\msys64\\usr\\bin\\bash -lc \"bash .github/scripts/build.sh\"" + ] + ] + buildStep AArch64 Windows = error "aarch64 windows not supported" + +mkBindistJob :: Arch -> Opsys -> [GHC] -> Job +mkBindistJob arch os vs = + K.fromString (bindistJobName arch os) .= object + [ "runs-on" .= bindistRunner arch os + , "name" .= (bindistJobName arch os ++ " (Prepare bindist)") + , "needs" .= [buildJobName arch os ver | ver <- vs] + , "env" .= thisEnv + , "steps" .= + ( setupAction arch os + ++ [ checkoutAction ] + ++ [downloadArtifacts ("artifacts-"++buildJobName arch os v) outputPath | v <- vs] + ++ bindistStep arch os + ++ [ uploadArtifacts (bindistName arch os) "./out/*.tar.xz\n./out/plan.json/*\n./out/*.zip" ]) + ] + where thisEnv = envVars arch os + + outputPath + | Windows <- os = "./out" + | otherwise = "./" + + bindistStep Amd64 (Linux d) = [customAction d Bindist] + bindistStep AArch64 (Linux Ubuntu2004) = + [ ghAction "Unpack aarch64-linux binaries" "docker://hasufell/arm64v8-ubuntu-haskell:focal" + [ "args" .= str "bash .github/scripts/untar.sh" ] + [ ] + , ghAction "Tar aarch64-linux binaries" "docker://hasufell/arm64v8-ubuntu-haskell:focal" + [ "args" .= str "bash .github/scripts/bindist.sh" ] + [ ] + ] + bindistStep AArch64 (Linux _) = error "aarch64-linux non-ubuntu not supported" + + bindistStep Amd64 Darwin = [ghRun "Create bindist" "sh" [] $ unlines $ + [ "brew install coreutils tree" + , "for bindist in out-*.tar ; do" + , " tar xf \"${bindist}\"" + , "done" + , "unset bindist" + , "bash .github/scripts/bindist.sh" + ] + ] + bindistStep AArch64 Darwin = [ghRun "Run build" "sh" [] $ unlines $ + [ "bash .github/scripts/brew.sh git coreutils llvm@13 autoconf automake tree" + , "export PATH=\"$HOME/.brew/bin:$HOME/.brew/sbin:$HOME/.brew/opt/llvm@13/bin:$PATH\"" + , "export CC=\"$HOME/.brew/opt/llvm@13/bin/clang\"" + , "export CXX=\"$HOME/.brew/opt/llvm@13/bin/clang++\"" + , "export LD=ld" + , "export AR=\"$HOME/.brew/opt/llvm@13/bin/llvm-ar\"" + , "export RANLIB=\"$HOME/.brew/opt/llvm@13/bin/llvm-ranlib\"" + , "for bindist in out-*.tar ; do" + , " tar xf \"${bindist}\"" + , "done" + , "unset bindist" + , "bash .github/scripts/bindist.sh" + ] + ] + + bindistStep Amd64 Windows = [ghRun "Run build" "pwsh" [] $ unlines $ + [ "C:\\msys64\\usr\\bin\\bash -lc \"pacman --disable-download-timeout --noconfirm -Syuu\"" + , "C:\\msys64\\usr\\bin\\bash -lc \"pacman --disable-download-timeout --noconfirm -Syuu\"" + , "C:\\msys64\\usr\\bin\\bash -lc \"pacman --disable-download-timeout --noconfirm -S unzip zip git\"" + , "taskkill /F /FI \"MODULES eq msys-2.0.dll\"" + , "$env:CHERE_INVOKING = 1" + , "$env:MSYS2_PATH_TYPE = \"inherit\"" + , "C:\\msys64\\usr\\bin\\bash -lc \"bash .github/scripts/bindist.sh\"" + ] + ] + bindistStep AArch64 Windows = error "aarch64 windows not supported" + +mkTestJob :: Arch -> Opsys -> Job +mkTestJob arch os = + K.fromString (testJobName arch os) .= object + [ "runs-on" .= runner arch os + , "name" .= str (testJobName arch os ++ " (Test binaries)") + , "needs" .= [bindistJobName arch os] + , "environment" .= str "CI" + , "env" .= thisEnv + , "steps" .= + ( setupAction arch os + ++ [ checkoutAction , downloadArtifacts (bindistName arch os) "./out" ] + ++ testStep arch os) + ] + where thisEnv = envVars arch os + + testStep Amd64 (Linux d) = [customAction d Test] + testStep AArch64 (Linux Ubuntu2004) = + [ ghAction "Run test" "docker://hasufell/arm64v8-ubuntu-haskell:focal" + [ "args" .= str "bash .github/scripts/test.sh" ] + [ ] + ] + testStep AArch64 (Linux _) = error "aarch64-linux non-ubuntu not supported" + + testStep Amd64 Darwin = [ghRun "Run test" "sh" [] $ unlines $ + [ "brew install coreutils tree" + , "bash .github/scripts/test.sh" + ] + ] + testStep AArch64 Darwin = [ghRun "Run test" "sh" [] $ unlines $ + [ "bash .github/scripts/brew.sh git coreutils llvm@13 autoconf automake tree" + , "export PATH=\"$HOME/.brew/bin:$HOME/.brew/sbin:$HOME/.brew/opt/llvm@13/bin:$PATH\"" + , "export CC=\"$HOME/.brew/opt/llvm@13/bin/clang\"" + , "export CXX=\"$HOME/.brew/opt/llvm@13/bin/clang++\"" + , "export LD=ld" + , "export AR=\"$HOME/.brew/opt/llvm@13/bin/llvm-ar\"" + , "export RANLIB=\"$HOME/.brew/opt/llvm@13/bin/llvm-ranlib\"" + , "bash .github/scripts/test.sh" + ] + ] + + testStep Amd64 Windows = + [ ghRun "install windows deps" "pwsh" [] $ unlines $ + [ "C:\\msys64\\usr\\bin\\bash -lc \"pacman --disable-download-timeout --noconfirm -Syuu\"" + , "C:\\msys64\\usr\\bin\\bash -lc \"pacman --disable-download-timeout --noconfirm -Syuu\"" + , "C:\\msys64\\usr\\bin\\bash -lc \"pacman --disable-download-timeout --noconfirm -S make mingw-w64-x86_64-clang curl autoconf mingw-w64-x86_64-pkgconf ca-certificates base-devel gettext autoconf make libtool automake python p7zip patch unzip zip git\"" + , "taskkill /F /FI \"MODULES eq msys-2.0.dll\"" + ] + , ghRun "Run test" "pwsh" [] $ unlines $ + [ "$env:CHERE_INVOKING = 1" + , "$env:MSYS2_PATH_TYPE = \"inherit\"" + , "C:\\msys64\\usr\\bin\\bash -lc \"bash .github/scripts/test.sh\"" + ] + ] + testStep AArch64 Windows = error "aarch64 windows not supported" + + +ciConfigs :: [Config] +ciConfigs = + [ MkConfig Amd64 Darwin allGHCs + , MkConfig AArch64 Darwin allGHCs + , MkConfig Amd64 Windows allGHCs + , MkConfig AArch64 (Linux Ubuntu2004) allGHCs] + ++ [ MkConfig Amd64 (Linux distro) allGHCs | distro <- allDistros ] + +main :: IO () +main = do + [root] <- getArgs + setCurrentDirectory root + removeDirectoryRecursive actionDir + createDirectoryIfMissing True actionDir + forM_ (mapMaybe configAction ciConfigs) $ \a -> do + let path = actionPath (actionDistro a) + createDirectoryIfMissing True path + BS.writeFile (path "action.yaml") $ encode a + BS.putStr "### DO NOT EDIT - GENERATED FILE\n" + BS.putStr "### This file was generated by ./.github/generate-ci/gen_ci.hs\n" + BS.putStr "### Edit that file and run ./.github/generate-ci/generate-jobs to regenerate\n" + BS.putStr $ encode $ CI ciConfigs + + +------------------------------------------------------------------------------- +-- Utils +------------------------------------------------------------------------------- + +str :: String -> String +str = id + +ghAction :: String -> String -> [(Key,Value)] -> [(Key,Value)] -> Value +ghAction name uses args env = object $ + [ "name" .= name + , "uses" .= uses + ] + ++ case args of + [] -> [] + xs -> [ "with" .= object xs ] + ++ case env of + [] -> [] + xs -> [ "env" .= object xs ] + +ghRun :: String -> String -> [(Key,Value)] -> String -> Value +ghRun name shell env script = object $ + [ "name" .= name + , "shell" .= shell + , "run" .= script + ] + ++ case env of + [] -> [] + xs -> [ "env" .= object xs ] + +checkoutAction :: Value +checkoutAction = ghAction "Checkout" "actions/checkout@v4" [] [] + +uploadArtifacts :: String -> String -> Value +uploadArtifacts name path = ghAction "Upload artifact" "actions/upload-artifact@v4" + [ "if-no-files-found" .= str "error" + , "retention-days" .= (2 :: Int) + , "name" .= name + , "path" .= path + ] [] + +downloadArtifacts :: String -> String -> Value +downloadArtifacts name path = ghAction "Download artifacts" "actions/download-artifact@v4" [ "name" .= name, "path" .= path ] [] diff --git a/.github/generate-ci/generate-ci.cabal b/.github/generate-ci/generate-ci.cabal new file mode 100644 index 0000000000..ae9e9d3f52 --- /dev/null +++ b/.github/generate-ci/generate-ci.cabal @@ -0,0 +1,18 @@ +cabal-version: 3.0 +name: generate-ci +version: 0.1.0.0 +license: Apache-2.0 +license-file: LICENSE +build-type: Simple + +executable generate-ci + main-is: gen_ci.hs + ghc-options: -Wall + build-depends: base, + bytestring, + containers, + directory, + filepath, + aeson, + yaml >= 0.11.11.2 + default-language: Haskell2010 diff --git a/.github/generate-ci/generate-jobs b/.github/generate-ci/generate-jobs new file mode 100755 index 0000000000..4cffc82d2a --- /dev/null +++ b/.github/generate-ci/generate-jobs @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +set -e + +root="$(git rev-parse --show-toplevel)/" +cd "$root/.github/generate-ci/" + +cabal run -v0 generate-ci "$root" > ../workflows/release.yaml + diff --git a/.github/mergify.yml b/.github/mergify.yml index 15e2dd2653..c0b76f7eec 100644 --- a/.github/mergify.yml +++ b/.github/mergify.yml @@ -2,26 +2,19 @@ queue_rules: - name: default # Mergify always respects the branch protection settings # so we can left empty mergify own ones - conditions: [] - -pull_request_rules: - - name: Automatically merge pull requests - conditions: + queue_conditions: - label=merge me - '#approved-reviews-by>=1' - actions: - queue: - method: squash - name: default - # The queue action automatically updates PRs that - # have entered the queue, but in order to do that - # they must have passed CI. Since our CI is a bit - # flaky, PRs can fail to get in, which then means - # they don't get updated, which is extra annoying. - # This just adds the updating as an independent - # step. + merge_conditions: [] + merge_method: squash + +pull_request_rules: - name: Automatically update pull requests conditions: - label=merge me actions: update: + - name: refactored queue action rule + conditions: [] + actions: + queue: diff --git a/.github/scripts/bindist.sh b/.github/scripts/bindist.sh index 72e8fe4676..b50aeb2aca 100644 --- a/.github/scripts/bindist.sh +++ b/.github/scripts/bindist.sh @@ -5,10 +5,7 @@ set -eux . .github/scripts/env.sh . .github/scripts/common.sh -# ensure ghcup -if ! command -v ghcup ; then - install_ghcup -fi +install_ghcup # create tarball/zip case "${TARBALL_EXT}" in @@ -24,8 +21,8 @@ case "${TARBALL_EXT}" in # from the oldest version in the list : "${GHCS:="$(cd "$CI_PROJECT_DIR/out/${ARTIFACT}" && rm -f ./*.json && for ghc in * ; do printf "%s\n" "$ghc" ; done | sort -r | tr '\n' ' ')"}" emake --version - emake GHCUP=ghcup ARTIFACT="${ARTIFACT}" GHCS="${GHCS}" bindist - emake GHCUP=ghcup ARTIFACT="${ARTIFACT}" bindist-tar + emake GHCUP=ghcup ARTIFACT="${ARTIFACT}" GHCS="${GHCS}" bindist || fail_with_ghcup_logs "make bindist failed" + emake GHCUP=ghcup ARTIFACT="${ARTIFACT}" bindist-tar || fail_with_ghcup_logs "make bindist failed" ;; *) fail "Unknown TARBALL_EXT: ${TARBALL_EXT}" diff --git a/.github/scripts/brew.sh b/.github/scripts/brew.sh index 0f889c6299..4066dfb885 100644 --- a/.github/scripts/brew.sh +++ b/.github/scripts/brew.sh @@ -19,9 +19,7 @@ mkdir -p $CI_PROJECT_DIR/.brew_cache export HOMEBREW_CACHE=$CI_PROJECT_DIR/.brew_cache mkdir -p $CI_PROJECT_DIR/.brew_logs export HOMEBREW_LOGS=$CI_PROJECT_DIR/.brew_logs -mkdir -p /private/tmp/.brew_tmp -export HOMEBREW_TEMP=/private/tmp/.brew_tmp +export HOMEBREW_TEMP=$(mktemp -d) #brew update brew install ${1+"$@"} - diff --git a/.github/scripts/build.sh b/.github/scripts/build.sh index d27a940e14..1c0eae6252 100644 --- a/.github/scripts/build.sh +++ b/.github/scripts/build.sh @@ -11,7 +11,9 @@ uname pwd env -# ensure ghcup +# Ensure ghcup is present and properly configured. +# Sets up the vanilla channel, as HLS CI provides binaries +# for GHCup's vanilla channel. install_ghcup # ensure cabal-cache @@ -19,7 +21,7 @@ download_cabal_cache "$HOME/.local/bin/cabal-cache" # build -ghcup install ghc "${GHC_VERSION}" +ghcup install ghc "${GHC_VERSION}" || fail_with_ghcup_logs "install ghc" ghcup set ghc "${GHC_VERSION}" sed -i.bak -e '/DELETE MARKER FOR CI/,/END DELETE/d' cabal.project # see comment in cabal.project ecabal update diff --git a/.github/scripts/common.sh b/.github/scripts/common.sh index dde41675cf..a10d84045e 100644 --- a/.github/scripts/common.sh +++ b/.github/scripts/common.sh @@ -139,7 +139,7 @@ install_ghcup() { source "$(dirname "${GHCUP_BIN}")/env" # make sure we use the vanilla channel for installing binaries # see https://github.com/haskell/ghcup-metadata/pull/166#issuecomment-1893075575 - ghcup config set url-source https://raw.githubusercontent.com/haskell/ghcup-metadata/master/ghcup-vanilla-0.0.8.yaml + ghcup config set url-source https://raw.githubusercontent.com/haskell/ghcup-metadata/refs/heads/master/ghcup-vanilla-0.0.9.yaml ghcup install cabal --set "${BOOTSTRAP_HASKELL_CABAL_VERSION}" fi } @@ -182,6 +182,10 @@ error() { echo_color "${RED}" "$1"; } warn() { echo_color "${LT_BROWN}" "$1"; } info() { echo_color "${LT_BLUE}" "$1"; } +fail_with_ghcup_logs() { + cat /github/workspace/.ghcup/logs/* + fail "$!" +} fail() { error "error: $1"; exit 1; } run() { diff --git a/.github/scripts/entrypoint.sh b/.github/scripts/entrypoint.sh new file mode 100755 index 0000000000..f02e4ec17a --- /dev/null +++ b/.github/scripts/entrypoint.sh @@ -0,0 +1,32 @@ +#!/bin/bash + +set -x + +bash -c "$INSTALL curl bash git tree $TOOLS" + +unset INSTALL +unset TOOLS + +if [ "${ARTIFACT}" = "x86_64-linux-unknown" ]; then + echo "NAME=Linux" > /etc/os-release + echo "ID=linux" >> /etc/os-release + echo "PRETTY_NAME=Linux" >> /etc/os-release +fi + +case "$STAGE" in + "BUILD") + bash .github/scripts/build.sh + tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/ + ;; + "BINDIST") + set -eux + for bindist in out-*.tar ; do + tar -xf "${bindist}" + done + unset bindist + bash .github/scripts/bindist.sh + ;; + "TEST") + bash .github/scripts/test.sh +esac + diff --git a/.github/scripts/env.sh b/.github/scripts/env.sh index 2486869453..90e7219661 100644 --- a/.github/scripts/env.sh +++ b/.github/scripts/env.sh @@ -11,7 +11,7 @@ fi export PATH="$HOME/.local/bin:$PATH" export BOOTSTRAP_HASKELL_NONINTERACTIVE=1 -export BOOTSTRAP_HASKELL_CABAL_VERSION="${CABAL_VER:-3.10.2.0}" +export BOOTSTRAP_HASKELL_CABAL_VERSION="${CABAL_VER:-3.10.3.0}" export BOOTSTRAP_HASKELL_ADJUST_CABAL_CONFIG=no export BOOTSTRAP_HASKELL_INSTALL_NO_STACK=yes export BOOTSTRAP_HASKELL_ADJUST_BASHRC=1 diff --git a/.github/scripts/test.sh b/.github/scripts/test.sh index 04cf680779..ad6676fd51 100644 --- a/.github/scripts/test.sh +++ b/.github/scripts/test.sh @@ -8,14 +8,25 @@ set -eux . .github/scripts/env.sh . .github/scripts/common.sh -test_package="bytestring-0.11.1.0" -test_module="Data/ByteString.hs" +test_package="text-2.1.2" +test_module="src/Data/Text.hs" create_cradle() { echo "cradle:" > hie.yaml echo " cabal:" >> hie.yaml } +# Tests and benchmarks can't be built on some GHC versions, such as GHC 9.10.1 on Windows. +# Disable these packages for now, building bytestring-0.12.1.0 works completely fine. +create_cabal_project() { + echo "packages: ./" > cabal.project + echo "" >> cabal.project + echo "tests: False" >> cabal.project + echo "benchmarks: False" >> cabal.project + + echo "flags: -simdutf -pure-haskell" >> cabal.project +} + enter_test_package() { local tmp_dir tmp_dir=$(mktempdir) @@ -38,7 +49,7 @@ test_all_hls() { bin_noexe=${bin/.exe/} if ! [[ "${bin_noexe}" =~ "haskell-language-server-wrapper" ]] && ! [[ "${bin_noexe}" =~ "~" ]] ; then if ghcup install ghc --set "${bin_noexe/haskell-language-server-/}" ; then - "${hls}" typecheck "${test_module}" || fail "failed to typecheck with HLS for GHC ${bin_noexe/haskell-language-server-/}" + "${hls}" --debug typecheck "${test_module}" || fail "failed to typecheck with HLS for GHC ${bin_noexe/haskell-language-server-/}" # After running the test, free up disk space by deleting the unneeded GHC version. # Helps us staying beneath the 14GB SSD disk limit. @@ -49,7 +60,7 @@ test_all_hls() { fi done # install the recommended GHC version so the wrapper can launch HLS - ghcup install ghc --set recommended + ghcup install ghc --set 9.10.1 "$bindir/haskell-language-server-wrapper${ext}" typecheck "${test_module}" || fail "failed to typecheck with HLS wrapper" } @@ -60,7 +71,7 @@ env # ensure ghcup install_ghcup -ghcup install ghc --set 9.4.5 +ghcup install ghc --set 9.4.8 (cd .. && ecabal update) # run cabal update outside project dir @@ -77,6 +88,7 @@ case "${TARBALL_EXT}" in enter_test_package create_cradle + create_cabal_project test_all_hls "$GHCUP_BIN" ;; @@ -106,6 +118,7 @@ case "${TARBALL_EXT}" in enter_test_package create_cradle + create_cabal_project test_all_hls "$(ghcup whereis bindir)" ;; diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml index f3834cac6c..c8953d4d2b 100644 --- a/.github/workflows/bench.yml +++ b/.github/workflows/bench.yml @@ -17,7 +17,6 @@ on: jobs: pre_job: runs-on: ubuntu-latest - if: contains(github.event.pull_request.labels.*.name, 'performance') outputs: should_skip: ${{ steps.skip_check.outputs.should_skip }} steps: @@ -53,8 +52,8 @@ jobs: # see discussion https://github.com/haskell/haskell-language-server/pull/4118 # also possible to add more GHCs if we performs better in the future. ghc: - - '9.6' - '9.8' + - '9.10' os: - ubuntu-latest @@ -62,7 +61,7 @@ jobs: # change of the strategy may require changing the bootstrapping/run code steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: # By default, the `pull_request` event has a `GITHUB_SHA` env variable @@ -101,46 +100,47 @@ jobs: tar -czf cabal.tar.gz * - name: Upload workspace - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: workspace-${{ matrix.ghc }}-${{ matrix.os }} retention-days: 1 path: workspace.tar.gz - name: Upload .cabal - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: cabal-home-${{ matrix.ghc }}-${{ matrix.os }} retention-days: 1 path: ~/.cabal/cabal.tar.gz bench_example: + if: contains(github.event.pull_request.labels.*.name, 'performance') needs: [bench_init, pre_job] runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: - ghc: ['9.6', '9.8'] + ghc: ['9.8', '9.10'] os: [ubuntu-latest] - cabal: ['3.10'] + cabal: ['3.14'] example: ['cabal', 'lsp-types'] steps: - - uses: haskell-actions/setup@v2.7.3 + - uses: haskell-actions/setup@v2.7.11 with: ghc-version : ${{ matrix.ghc }} cabal-version: ${{ matrix.cabal }} enable-stack: false - name: Download cabal home - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: cabal-home-${{ matrix.ghc }}-${{ matrix.os }} path: . - name: Download workspace - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: workspace-${{ matrix.ghc }}-${{ matrix.os }} path: . @@ -165,7 +165,7 @@ jobs: run: find bench-results -name "*.csv" -or -name "*.svg" -or -name "*.html" | xargs tar -czf benchmark-artifacts.tar.gz - name: Archive benchmarking artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: bench-results-${{ matrix.example }}-${{ runner.os }}-${{ matrix.ghc }} path: benchmark-artifacts.tar.gz @@ -175,7 +175,7 @@ jobs: run: find bench-results -name "*.log" -or -name "*.hp" | xargs tar -czf benchmark-logs.tar.gz - name: Archive benchmark logs - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: bench-logs-${{ matrix.example }}-${{ runner.os }}-${{ matrix.ghc }} path: benchmark-logs.tar.gz diff --git a/.github/workflows/caching.yml b/.github/workflows/caching.yml index b9e25eee4f..569d380951 100644 --- a/.github/workflows/caching.yml +++ b/.github/workflows/caching.yml @@ -89,10 +89,6 @@ jobs: - ubuntu-latest - macOS-latest - windows-latest - exclude: - # We disable this this combo in test.yml due to long path issues, so we also need to disable it here - - os: windows-latest - ghc: "9.2" steps: - uses: actions/checkout@v3 @@ -105,7 +101,7 @@ jobs: # Fetching from github cache is faster than doing it from hackage # Sources does not change per ghc and ghc version son only doing it # for one matrix job (it is arbitrary) - - if: steps.compiled-deps.outputs.cache-hit != 'true' && runner.os == 'Linux' && matrix.ghc == '9.2' + - if: steps.compiled-deps.outputs.cache-hit != 'true' && runner.os == 'Linux' && matrix.ghc == '9.6' name: Download sources run: | cabal $cabalBuild --only-download --enable-benchmarks --enable-tests @@ -120,7 +116,7 @@ jobs: # We build ghcide with benchs and test enabled to include its dependencies in the cache # (including shake-bench) # Only for the same ghc and os used in the bench workflow, so we save cache space - - if: steps.compiled-deps.outputs.cache-hit != 'true' && runner.os == 'Linux' && matrix.ghc == '9.2' + - if: steps.compiled-deps.outputs.cache-hit != 'true' && runner.os == 'Linux' && matrix.ghc == '9.6' name: Build ghcide benchmark run: | cabal $cabalBuild ghcide --enable-benchmarks --enable-tests diff --git a/.github/workflows/nix.yml b/.github/workflows/nix.yml index 7eabbc6d2f..f62a8d1cd1 100644 --- a/.github/workflows/nix.yml +++ b/.github/workflows/nix.yml @@ -49,12 +49,12 @@ jobs: steps: - uses: actions/checkout@v3 - - uses: cachix/install-nix-action@V27 + - uses: cachix/install-nix-action@v31 with: extra_nix_config: | experimental-features = nix-command flakes nix_path: nixpkgs=channel:nixos-unstable - - uses: cachix/cachix-action@v15 + - uses: cachix/cachix-action@v16 with: name: haskell-language-server authToken: ${{ secrets.HLS_CACHIX_AUTH_TOKEN }} diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml index 2775ca37ad..40d79afbf2 100644 --- a/.github/workflows/pre-commit.yml +++ b/.github/workflows/pre-commit.yml @@ -27,7 +27,7 @@ jobs: - uses: ./.github/actions/setup-build with: # select a stable GHC version - ghc: 9.2 + ghc: 9.6 os: ${{ runner.os }} shorten-hls: false @@ -53,7 +53,7 @@ jobs: ${{ inputs.cache-prefix }}${{ env.cache-name }}-${{ inputs.os }}-${{ inputs.ghc }}- ${{ inputs.cache-prefix }}${{ env.cache-name }}-${{ inputs.os }}- - - uses: actions/setup-python@v5 + - uses: actions/setup-python@v3 - uses: pre-commit/action@v3.0.1 with: extra_args: --files ${{ needs.file-diff.outputs.git-diff }} diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 5dffaaa915..5eb3076d29 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -1,1023 +1,4225 @@ -name: Build and release - -on: - push: - tags: - - '*' - schedule: - - cron: '0 2 * * 1' +### DO NOT EDIT - GENERATED FILE +### This file was generated by ./.github/generate-ci/gen_ci.hs +### Edit that file and run ./.github/generate-ci/generate-jobs to regenerate env: CABAL_CACHE_DISABLE: ${{ vars.CABAL_CACHE_DISABLE }} CABAL_CACHE_NONFATAL: ${{ vars.CABAL_CACHE_NONFATAL }} - jobs: - build-linux: - name: Build linux binaries - ## We need the environment here, to have access to the `vars` context. - ## Allows us to specify: `CABAL_CACHE_DISABLE=yes`. - ## The environments can be seen in https://github.com/haskell/haskell-language-server/settings/environments - ## assuming you have the proper permissions. - environment: CI - runs-on: ubuntu-latest + bindist-aarch64-linux-ubuntu2004: + env: + ADD_CABAL_ARGS: '' + ARCH: ARM64 + ARTIFACT: aarch64-linux-ubuntu2004 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-aarch64-linux-ubuntu2004 (Prepare bindist) + needs: + - build-aarch64-linux-ubuntu2004-948 + - build-aarch64-linux-ubuntu2004-967 + - build-aarch64-linux-ubuntu2004-984 + - build-aarch64-linux-ubuntu2004-9101 + - build-aarch64-linux-ubuntu2004-9122 + runs-on: + - self-hosted + - Linux + - ARM64 + - maerwald + steps: + - name: clean and git config for aarch64-linux + run: | + find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} + + git config --global --get-all safe.directory | grep '^\*$' || git config --global --add safe.directory "*" + shell: bash + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-aarch64-linux-ubuntu2004-948 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-aarch64-linux-ubuntu2004-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-aarch64-linux-ubuntu2004-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-aarch64-linux-ubuntu2004-9101 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-aarch64-linux-ubuntu2004-9122 + path: ./ + - name: Unpack aarch64-linux binaries + uses: docker://hasufell/arm64v8-ubuntu-haskell:focal + with: + args: bash .github/scripts/untar.sh + - name: Tar aarch64-linux binaries + uses: docker://hasufell/arm64v8-ubuntu-haskell:focal + with: + args: bash .github/scripts/bindist.sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-aarch64-linux-ubuntu2004 + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-aarch64-mac: + env: + ADD_CABAL_ARGS: '' + ARCH: ARM64 + ARTIFACT: aarch64-apple-darwin + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + HOMEBREW_CHANGE_ARCH_TO_ARM: '1' + MACOSX_DEPLOYMENT_TARGET: '10.13' + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-aarch64-mac (Prepare bindist) + needs: + - build-aarch64-mac-948 + - build-aarch64-mac-967 + - build-aarch64-mac-984 + - build-aarch64-mac-9101 + - build-aarch64-mac-9122 + runs-on: + - self-hosted + - macOS + - ARM64 + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-aarch64-mac-948 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-aarch64-mac-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-aarch64-mac-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-aarch64-mac-9101 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-aarch64-mac-9122 + path: ./ + - name: Run build + run: | + bash .github/scripts/brew.sh git coreutils llvm@13 autoconf automake tree + export PATH="$HOME/.brew/bin:$HOME/.brew/sbin:$HOME/.brew/opt/llvm@13/bin:$PATH" + export CC="$HOME/.brew/opt/llvm@13/bin/clang" + export CXX="$HOME/.brew/opt/llvm@13/bin/clang++" + export LD=ld + export AR="$HOME/.brew/opt/llvm@13/bin/llvm-ar" + export RANLIB="$HOME/.brew/opt/llvm@13/bin/llvm-ranlib" + for bindist in out-*.tar ; do + tar xf "${bindist}" + done + unset bindist + bash .github/scripts/bindist.sh + shell: sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-aarch64-apple-darwin + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-linux-centos7: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-centos7 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-x86_64-linux-centos7 (Prepare bindist) + needs: + - build-x86_64-linux-centos7-948 + - build-x86_64-linux-centos7-967 + - build-x86_64-linux-centos7-984 + - build-x86_64-linux-centos7-9101 + - build-x86_64-linux-centos7-9122 + runs-on: + - self-hosted + - linux-space + - maerwald + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-centos7-948 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-centos7-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-centos7-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-centos7-9101 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-centos7-9122 + path: ./ + - name: Bindist + uses: ./.github/actions/bindist-actions/action-centos7 + with: + stage: BINDIST + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-linux-centos7 + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-linux-deb10: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb10 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-x86_64-linux-deb10 (Prepare bindist) + needs: + - build-x86_64-linux-deb10-948 + - build-x86_64-linux-deb10-967 + - build-x86_64-linux-deb10-984 + - build-x86_64-linux-deb10-9101 + - build-x86_64-linux-deb10-9122 + runs-on: + - self-hosted + - linux-space + - maerwald + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb10-948 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb10-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb10-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb10-9101 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb10-9122 + path: ./ + - name: Bindist + uses: ./.github/actions/bindist-actions/action-deb10 + with: + stage: BINDIST + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-linux-deb10 + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-linux-deb11: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb11 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-x86_64-linux-deb11 (Prepare bindist) + needs: + - build-x86_64-linux-deb11-948 + - build-x86_64-linux-deb11-967 + - build-x86_64-linux-deb11-984 + - build-x86_64-linux-deb11-9101 + - build-x86_64-linux-deb11-9122 + runs-on: + - self-hosted + - linux-space + - maerwald + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb11-948 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb11-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb11-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb11-9101 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb11-9122 + path: ./ + - name: Bindist + uses: ./.github/actions/bindist-actions/action-deb11 + with: + stage: BINDIST + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-linux-deb11 + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-linux-deb9: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb9 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-x86_64-linux-deb9 (Prepare bindist) + needs: + - build-x86_64-linux-deb9-948 + - build-x86_64-linux-deb9-967 + - build-x86_64-linux-deb9-984 + - build-x86_64-linux-deb9-9101 + - build-x86_64-linux-deb9-9122 + runs-on: + - self-hosted + - linux-space + - maerwald + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb9-948 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb9-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb9-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb9-9101 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb9-9122 + path: ./ + - name: Bindist + uses: ./.github/actions/bindist-actions/action-deb9 + with: + stage: BINDIST + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-linux-deb9 + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-linux-fedora27: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-fedora27 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-x86_64-linux-fedora27 (Prepare bindist) + needs: + - build-x86_64-linux-fedora27-948 + - build-x86_64-linux-fedora27-967 + - build-x86_64-linux-fedora27-984 + - build-x86_64-linux-fedora27-9101 + - build-x86_64-linux-fedora27-9122 + runs-on: + - self-hosted + - linux-space + - maerwald + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-fedora27-948 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-fedora27-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-fedora27-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-fedora27-9101 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-fedora27-9122 + path: ./ + - name: Bindist + uses: ./.github/actions/bindist-actions/action-fedora27 + with: + stage: BINDIST + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-linux-fedora27 + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-linux-fedora33: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-fedora33 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-x86_64-linux-fedora33 (Prepare bindist) + needs: + - build-x86_64-linux-fedora33-948 + - build-x86_64-linux-fedora33-967 + - build-x86_64-linux-fedora33-984 + - build-x86_64-linux-fedora33-9101 + - build-x86_64-linux-fedora33-9122 + runs-on: + - self-hosted + - linux-space + - maerwald + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-fedora33-948 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-fedora33-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-fedora33-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-fedora33-9101 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-fedora33-9122 + path: ./ + - name: Bindist + uses: ./.github/actions/bindist-actions/action-fedora33 + with: + stage: BINDIST + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-linux-fedora33 + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-linux-mint193: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint193 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-x86_64-linux-mint193 (Prepare bindist) + needs: + - build-x86_64-linux-mint193-948 + - build-x86_64-linux-mint193-967 + - build-x86_64-linux-mint193-984 + - build-x86_64-linux-mint193-9101 + - build-x86_64-linux-mint193-9122 + runs-on: + - self-hosted + - linux-space + - maerwald + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-mint193-948 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-mint193-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-mint193-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-mint193-9101 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-mint193-9122 + path: ./ + - name: Bindist + uses: ./.github/actions/bindist-actions/action-mint193 + with: + stage: BINDIST + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-linux-mint193 + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-linux-mint202: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint202 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-x86_64-linux-mint202 (Prepare bindist) + needs: + - build-x86_64-linux-mint202-948 + - build-x86_64-linux-mint202-967 + - build-x86_64-linux-mint202-984 + - build-x86_64-linux-mint202-9101 + - build-x86_64-linux-mint202-9122 + runs-on: + - self-hosted + - linux-space + - maerwald + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-mint202-948 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-mint202-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-mint202-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-mint202-9101 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-mint202-9122 + path: ./ + - name: Bindist + uses: ./.github/actions/bindist-actions/action-mint202 + with: + stage: BINDIST + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-linux-mint202 + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-linux-ubuntu1804: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu1804 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-x86_64-linux-ubuntu1804 (Prepare bindist) + needs: + - build-x86_64-linux-ubuntu1804-948 + - build-x86_64-linux-ubuntu1804-967 + - build-x86_64-linux-ubuntu1804-984 + - build-x86_64-linux-ubuntu1804-9101 + - build-x86_64-linux-ubuntu1804-9122 + runs-on: + - self-hosted + - linux-space + - maerwald + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-ubuntu1804-948 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-ubuntu1804-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-ubuntu1804-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-ubuntu1804-9101 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-ubuntu1804-9122 + path: ./ + - name: Bindist + uses: ./.github/actions/bindist-actions/action-ubuntu1804 + with: + stage: BINDIST + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-linux-ubuntu1804 + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-linux-ubuntu2004: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu2004 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-x86_64-linux-ubuntu2004 (Prepare bindist) + needs: + - build-x86_64-linux-ubuntu2004-948 + - build-x86_64-linux-ubuntu2004-967 + - build-x86_64-linux-ubuntu2004-984 + - build-x86_64-linux-ubuntu2004-9101 + - build-x86_64-linux-ubuntu2004-9122 + runs-on: + - self-hosted + - linux-space + - maerwald + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-ubuntu2004-948 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-ubuntu2004-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-ubuntu2004-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-ubuntu2004-9101 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-ubuntu2004-9122 + path: ./ + - name: Bindist + uses: ./.github/actions/bindist-actions/action-ubuntu2004 + with: + stage: BINDIST + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-linux-ubuntu2004 + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-linux-ubuntu2204: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu2204 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-x86_64-linux-ubuntu2204 (Prepare bindist) + needs: + - build-x86_64-linux-ubuntu2204-948 + - build-x86_64-linux-ubuntu2204-967 + - build-x86_64-linux-ubuntu2204-984 + - build-x86_64-linux-ubuntu2204-9101 + - build-x86_64-linux-ubuntu2204-9122 + runs-on: + - self-hosted + - linux-space + - maerwald + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-ubuntu2204-948 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-ubuntu2204-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-ubuntu2204-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-ubuntu2204-9101 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-ubuntu2204-9122 + path: ./ + - name: Bindist + uses: ./.github/actions/bindist-actions/action-ubuntu2204 + with: + stage: BINDIST + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-linux-ubuntu2204 + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-linux-unknown: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-unknown + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-x86_64-linux-unknown (Prepare bindist) + needs: + - build-x86_64-linux-unknown-948 + - build-x86_64-linux-unknown-967 + - build-x86_64-linux-unknown-984 + - build-x86_64-linux-unknown-9101 + - build-x86_64-linux-unknown-9122 + runs-on: + - self-hosted + - linux-space + - maerwald + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-unknown-948 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-unknown-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-unknown-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-unknown-9101 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-unknown-9122 + path: ./ + - name: Bindist + uses: ./.github/actions/bindist-actions/action-unknown + with: + stage: BINDIST + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-linux-unknown + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-mac: + env: + ADD_CABAL_ARGS: '' + ARCH: '64' + ARTIFACT: x86_64-apple-darwin + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + MACOSX_DEPLOYMENT_TARGET: '10.13' + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-x86_64-mac (Prepare bindist) + needs: + - build-x86_64-mac-948 + - build-x86_64-mac-967 + - build-x86_64-mac-984 + - build-x86_64-mac-9101 + - build-x86_64-mac-9122 + runs-on: + - macOS-13 + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-mac-948 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-mac-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-mac-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-mac-9101 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-mac-9122 + path: ./ + - name: Create bindist + run: | + brew install coreutils tree + for bindist in out-*.tar ; do + tar xf "${bindist}" + done + unset bindist + bash .github/scripts/bindist.sh + shell: sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-apple-darwin + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-windows: env: + ADD_CABAL_ARGS: '' + ARCH: '64' + ARTIFACT: x86_64-mingw64 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: zip + TZ: Asia/Singapore + name: bindist-x86_64-windows (Prepare bindist) + needs: + - build-x86_64-windows-948 + - build-x86_64-windows-967 + - build-x86_64-windows-984 + - build-x86_64-windows-9101 + - build-x86_64-windows-9122 + runs-on: + - windows-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-windows-948 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-windows-967 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-windows-984 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-windows-9101 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-windows-9122 + path: ./out + - name: Run build + run: | + C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -Syuu" + C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -Syuu" + C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -S unzip zip git" + taskkill /F /FI "MODULES eq msys-2.0.dll" + $env:CHERE_INVOKING = 1 + $env:MSYS2_PATH_TYPE = "inherit" + C:\msys64\usr\bin\bash -lc "bash .github/scripts/bindist.sh" + shell: pwsh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-mingw64 + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + build-aarch64-linux-ubuntu2004-9101: + env: + ADD_CABAL_ARGS: '' + ARCH: ARM64 + ARTIFACT: aarch64-linux-ubuntu2004 AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive S3_HOST: ${{ secrets.S3_HOST }} TARBALL_EXT: tar.xz - ARCH: 64 - DEBIAN_FRONTEND: noninteractive - TZ: Asia/Singapore - strategy: - fail-fast: false - matrix: - ghc: ["9.8.2", "9.6.5", "9.4.8", "9.2.8"] - platform: [ { image: "debian:9" - , installCmd: "sed -i s/deb.debian.org/archive.debian.org/g /etc/apt/sources.list && sed -i 's|security.debian.org|archive.debian.org/|g' /etc/apt/sources.list && sed -i /-updates/d /etc/apt/sources.list && apt-get update && apt-get install -y" - , toolRequirements: "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf" - , DISTRO: "Debian" - , ARTIFACT: "x86_64-linux-deb9" - , ADD_CABAL_ARGS: "--enable-split-sections" - }, - { image: "debian:10" - , installCmd: "apt-get update && apt-get install -y" - , toolRequirements: "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf" - , DISTRO: "Debian" - , ARTIFACT: "x86_64-linux-deb10" - , ADD_CABAL_ARGS: "--enable-split-sections" - }, - { image: "debian:11" - , installCmd: "apt-get update && apt-get install -y" - , toolRequirements: "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf" - , DISTRO: "Debian" - , ARTIFACT: "x86_64-linux-deb11" - , ADD_CABAL_ARGS: "--enable-split-sections" - }, - { image: "ubuntu:18.04" - , installCmd: "apt-get update && apt-get install -y" - , toolRequirements: "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf" - , DISTRO: "Ubuntu" - , ARTIFACT: "x86_64-linux-ubuntu18.04" - , ADD_CABAL_ARGS: "--enable-split-sections" - }, - { image: "ubuntu:20.04" - , installCmd: "apt-get update && apt-get install -y" - , toolRequirements: "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf" - , DISTRO: "Ubuntu" - , ARTIFACT: "x86_64-linux-ubuntu20.04" - , ADD_CABAL_ARGS: "--enable-split-sections" - }, - { image: "ubuntu:22.04" - , installCmd: "apt-get update && apt-get install -y" - , toolRequirements: "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf" - , DISTRO: "Ubuntu" - , ARTIFACT: "x86_64-linux-ubuntu22.04" - , ADD_CABAL_ARGS: "--enable-split-sections" - }, - { image: "linuxmintd/mint19.3-amd64" - , installCmd: "apt-get update && apt-get install -y" - , toolRequirements: "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf" - , DISTRO: "Mint" - , ARTIFACT: "x86_64-linux-mint19.3" - , ADD_CABAL_ARGS: "--enable-split-sections" - }, - { image: "linuxmintd/mint20.2-amd64" - , installCmd: "apt-get update && apt-get install -y" - , toolRequirements: "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf" - , DISTRO: "Mint" - , ARTIFACT: "x86_64-linux-mint20.2" - , ADD_CABAL_ARGS: "--enable-split-sections" - }, - { image: "fedora:27" - , installCmd: "dnf install -y" - , toolRequirements: "autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf" - , DISTRO: "Fedora" - , ARTIFACT: "x86_64-linux-fedora27" - , ADD_CABAL_ARGS: "--enable-split-sections" - }, - { image: "fedora:33" - , installCmd: "dnf install -y" - , toolRequirements: "autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf" - , DISTRO: "Fedora" - , ARTIFACT: "x86_64-linux-fedora33" - , ADD_CABAL_ARGS: "--enable-split-sections" - }, - { image: "centos:7" - , installCmd: "yum -y install epel-release && yum install -y" - , toolRequirements: "autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf" - , DISTRO: "CentOS" - , ARTIFACT: "x86_64-linux-centos7" - , ADD_CABAL_ARGS: "--enable-split-sections" - } - ] - # TODO: rm - # Instead of manually adding the Unknown Linux Bindist jobs here, - # it should be part of the matrix above. - # However, due to GHC 9.4 shenanigans, we need some special logic. - # https://gitlab.haskell.org/ghc/ghc/-/issues/22268 - # - # Perhaps we can migrate *all* unknown linux builds to a uniform - # image. - include: - - ghc: 9.2.8 - platform: - { image: "rockylinux:8" - , installCmd: "yum -y install epel-release && yum install -y --allowerasing" - , toolRequirements: "autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf" - , DISTRO: "Unknown" - , ARTIFACT: "x86_64-linux-unknown" - , ADD_CABAL_ARGS: "--enable-split-sections" - } - - ghc: 9.4.8 - platform: - { image: "fedora:27" - , installCmd: "dnf install -y" - , toolRequirements: "autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf" - , DISTRO: "Unknown" - , ARTIFACT: "x86_64-linux-unknown" - , ADD_CABAL_ARGS: "--enable-split-sections" - } - - ghc: 9.6.5 - platform: - { image: "rockylinux:8" - , installCmd: "yum -y install epel-release && yum install -y --allowerasing" - , toolRequirements: "autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf" - , DISTRO: "Unknown" - , ARTIFACT: "x86_64-linux-unknown" - , ADD_CABAL_ARGS: "--enable-split-sections" - } - - ghc: 9.8.2 - platform: - { image: "rockylinux:8" - , installCmd: "yum -y install epel-release && yum install -y --allowerasing" - , toolRequirements: "autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf" - , DISTRO: "Unknown" - , ARTIFACT: "x86_64-linux-unknown" - , ADD_CABAL_ARGS: "--enable-split-sections" - } - container: - image: ${{ matrix.platform.image }} - steps: - - name: Install requirements - shell: sh - run: | - ${{ matrix.platform.installCmd }} curl bash git ${{ matrix.platform.toolRequirements }} - - - if: matrix.platform.DISTRO == 'Unknown' - run: | - echo "NAME=Linux" > /etc/os-release - echo "ID=linux" >> /etc/os-release - echo "PRETTY_NAME=Linux" >> /etc/os-release - - - uses: actions/checkout@v3 - - - name: Run build - run: | - bash .github/scripts/build.sh - tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/ - - env: - ARTIFACT: ${{ matrix.platform.ARTIFACT }} - DISTRO: ${{ matrix.platform.DISTRO }} - ADD_CABAL_ARGS: ${{ matrix.platform.ADD_CABAL_ARGS }} - GHC_VERSION: ${{ matrix.ghc }} - - - if: always() - name: Upload artifact - uses: actions/upload-artifact@v3 - with: - if-no-files-found: error - retention-days: 2 - name: artifacts-${{ matrix.platform.ARTIFACT }} - path: | - ./out-${{ matrix.platform.ARTIFACT }}-${{ matrix.ghc }}.tar - - build-arm: - name: Build ARM binary - ## We need the environment here, to have access to the `vars` context. - ## Allows us to specify: `CABAL_CACHE_DISABLE=yes`. - ## The environments can be seen in https://github.com/haskell/haskell-language-server/settings/environments - ## assuming you have the proper permissions. - environment: CI - runs-on: [self-hosted, Linux, ARM64, maerwald] - env: - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - S3_HOST: ${{ secrets.S3_HOST }} - TARBALL_EXT: tar.xz - ADD_CABAL_ARGS: "" - DEBIAN_FRONTEND: noninteractive - TZ: Asia/Singapore - ARTIFACT: "aarch64-linux-ubuntu20" + TZ: Asia/Singapore + environment: CI + name: build-aarch64-linux-ubuntu2004-9101 (Build binaries) + runs-on: + - self-hosted + - Linux + - ARM64 + - maerwald + steps: + - name: clean and git config for aarch64-linux + run: | + find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} + + git config --global --get-all safe.directory | grep '^\*$' || git config --global --add safe.directory "*" + shell: bash + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.10.1 + name: Build aarch64-linux binaries + uses: docker://hasufell/arm64v8-ubuntu-haskell:focal + with: + args: bash .github/scripts/build.sh + - env: + GHC_VERSION: 9.10.1 + name: Tar aarch64-linux binaries + uses: docker://hasufell/arm64v8-ubuntu-haskell:focal + with: + args: bash .github/scripts/tar.sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-aarch64-linux-ubuntu2004-9101 + path: out-aarch64-linux-ubuntu2004-9.10.1.tar + retention-days: 2 + build-aarch64-linux-ubuntu2004-9122: + env: + ADD_CABAL_ARGS: '' ARCH: ARM64 - DISTRO: Ubuntu - strategy: - fail-fast: true - matrix: - ghc: ["9.8.2", "9.6.5", "9.4.8", "9.2.8" ] - steps: - - uses: docker://arm64v8/ubuntu:focal - name: Cleanup (aarch64 linux) - with: - args: "find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} +" - - - name: git config - run: | - git config --global --get-all safe.directory | grep '^\*$' || git config --global --add safe.directory "*" - shell: bash - - - name: Checkout code - uses: actions/checkout@v3 - - - uses: docker://hasufell/arm64v8-ubuntu-haskell:focal - name: Run build (aarch64 linux) - with: - args: bash .github/scripts/build.sh - env: - GHC_VERSION: ${{ matrix.ghc }} - - - uses: docker://hasufell/arm64v8-ubuntu-haskell:focal - name: Run build (aarch64 linux) - with: - args: bash .github/scripts/tar.sh - env: - GHC_VERSION: ${{ matrix.ghc }} - - - if: always() - name: Upload artifact - uses: actions/upload-artifact@v3 - with: - if-no-files-found: error - retention-days: 2 - name: artifacts-arm - path: | - ./out-${{ env.ARTIFACT }}-${{ matrix.ghc }}.tar - - build-mac-x86_64: - name: Build binary (Mac x86_64) - ## We need the environment here, to have access to the `vars` context. - ## Allows us to specify: `CABAL_CACHE_DISABLE=yes`. - ## The environments can be seen in https://github.com/haskell/haskell-language-server/settings/environments - ## assuming you have the proper permissions. - environment: CI - runs-on: macOS-11 - env: - MACOSX_DEPLOYMENT_TARGET: 10.13 - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - S3_HOST: ${{ secrets.S3_HOST }} - ADD_CABAL_ARGS: "" - ARTIFACT: "x86_64-apple-darwin" - ARCH: 64 - TARBALL_EXT: tar.xz - DISTRO: na - strategy: - fail-fast: false - matrix: - ghc: ["9.8.2", "9.6.5", "9.4.8", "9.2.8"] - steps: - - name: Checkout code - uses: actions/checkout@v3 - - - name: Run build - run: | - brew install coreutils tree - bash .github/scripts/build.sh - tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/ - env: - GHC_VERSION: ${{ matrix.ghc }} - - - if: always() - name: Upload artifact - uses: actions/upload-artifact@v3 - with: - if-no-files-found: error - retention-days: 2 - name: artifacts-mac-x86_64 - path: | - ./out-${{ env.ARTIFACT }}-${{ matrix.ghc }}.tar - - build-mac-aarch64: - name: Build binary (Mac aarch64) - ## We need the environment here, to have access to the `vars` context. - ## Allows us to specify: `CABAL_CACHE_DISABLE=yes`. - ## The environments can be seen in https://github.com/haskell/haskell-language-server/settings/environments - ## assuming you have the proper permissions. - environment: CI - runs-on: [self-hosted, macOS, ARM64] - env: - MACOSX_DEPLOYMENT_TARGET: 10.13 - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - S3_HOST: ${{ secrets.S3_HOST }} - ADD_CABAL_ARGS: "" - ARTIFACT: "aarch64-apple-darwin" + ARTIFACT: aarch64-linux-ubuntu2004 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-aarch64-linux-ubuntu2004-9122 (Build binaries) + runs-on: + - self-hosted + - Linux + - ARM64 + - maerwald + steps: + - name: clean and git config for aarch64-linux + run: | + find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} + + git config --global --get-all safe.directory | grep '^\*$' || git config --global --add safe.directory "*" + shell: bash + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.12.2 + name: Build aarch64-linux binaries + uses: docker://hasufell/arm64v8-ubuntu-haskell:focal + with: + args: bash .github/scripts/build.sh + - env: + GHC_VERSION: 9.12.2 + name: Tar aarch64-linux binaries + uses: docker://hasufell/arm64v8-ubuntu-haskell:focal + with: + args: bash .github/scripts/tar.sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-aarch64-linux-ubuntu2004-9122 + path: out-aarch64-linux-ubuntu2004-9.12.2.tar + retention-days: 2 + build-aarch64-linux-ubuntu2004-948: + env: + ADD_CABAL_ARGS: '' ARCH: ARM64 + ARTIFACT: aarch64-linux-ubuntu2004 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} TARBALL_EXT: tar.xz - DISTRO: na - HOMEBREW_CHANGE_ARCH_TO_ARM: 1 - strategy: - fail-fast: false - matrix: - ghc: ["9.8.2", "9.6.5", "9.4.8", "9.2.8"] - steps: - - name: Checkout code - uses: actions/checkout@v3 - - - name: Run build - run: | - bash .github/scripts/brew.sh git coreutils autoconf automake tree - export PATH="$HOME/.brew/bin:$HOME/.brew/sbin:$PATH" - export LD=ld - bash .github/scripts/build.sh - tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/ - env: - GHC_VERSION: ${{ matrix.ghc }} - - - if: always() - name: Upload artifact - uses: actions/upload-artifact@v3 - with: - if-no-files-found: error - retention-days: 2 - name: artifacts-mac-aarch64 - path: | - ./out-${{ env.ARTIFACT }}-${{ matrix.ghc }}.tar - - build-win: - name: Build binary (Win) - ## We need the environment here, to have access to the `vars` context. - ## Allows us to specify: `CABAL_CACHE_DISABLE=yes`. - ## The environments can be seen in https://github.com/haskell/haskell-language-server/settings/environments - ## assuming you have the proper permissions. - environment: CI - runs-on: windows-latest - env: - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - S3_HOST: ${{ secrets.S3_HOST }} - ADD_CABAL_ARGS: "" - ARTIFACT: "x86_64-mingw64" - ARCH: 64 - TARBALL_EXT: "zip" - DISTRO: na - strategy: - fail-fast: false - matrix: - ghc: ["9.8.2", "9.6.5", "9.4.8", "9.2.8"] - steps: - - name: install windows deps - shell: pwsh - run: | - C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -Syuu" - C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -Syuu" - C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -S make mingw-w64-x86_64-clang curl autoconf mingw-w64-x86_64-pkgconf ca-certificates base-devel gettext autoconf make libtool automake python p7zip patch unzip zip git" - taskkill /F /FI "MODULES eq msys-2.0.dll" - - - name: Checkout code - uses: actions/checkout@v3 - - - name: Run build (windows) - run: | - $env:CHERE_INVOKING = 1 - $env:MSYS2_PATH_TYPE = "inherit" - $ErrorActionPreference = "Stop" - C:\msys64\usr\bin\bash -lc "bash .github/scripts/build.sh" - shell: pwsh - env: - GHC_VERSION: ${{ matrix.ghc }} - - - if: always() - name: Upload artifact - uses: actions/upload-artifact@v3 - with: - if-no-files-found: error - retention-days: 2 - name: artifacts-win - path: | - ./out/* - - bindist-linux: - name: Tar linux bindists (linux) - runs-on: [self-hosted, linux-space, maerwald] - needs: ["build-linux"] - env: - TARBALL_EXT: tar.xz - ARCH: 64 - DEBIAN_FRONTEND: noninteractive - TZ: Asia/Singapore - strategy: - fail-fast: false - matrix: - include: - - image: debian:9 - installCmd: sed -i s/deb.debian.org/archive.debian.org/g /etc/apt/sources.list && sed -i 's|security.debian.org|archive.debian.org/|g' /etc/apt/sources.list && sed -i /-updates/d /etc/apt/sources.list && apt-get update && apt-get install -y - toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree - DISTRO: Debian - ARTIFACT: "x86_64-linux-deb9" - - image: debian:10 - installCmd: apt-get update && apt-get install -y - toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree - DISTRO: Debian - ARTIFACT: "x86_64-linux-deb10" - - image: debian:11 - installCmd: apt-get update && apt-get install -y - toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree - DISTRO: Debian - ARTIFACT: "x86_64-linux-deb11" - - image: ubuntu:18.04 - installCmd: apt-get update && apt-get install -y - toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree - DISTRO: Ubuntu - ARTIFACT: "x86_64-linux-ubuntu18.04" - - image: ubuntu:20.04 - installCmd: apt-get update && apt-get install -y - toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree - DISTRO: Ubuntu - ARTIFACT: "x86_64-linux-ubuntu20.04" - - image: ubuntu:22.04 - installCmd: apt-get update && apt-get install -y - toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree - DISTRO: Ubuntu - ARTIFACT: "x86_64-linux-ubuntu22.04" - - image: fedora:27 - installCmd: dnf install -y - toolRequirements: autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf tree - DISTRO: Fedora - ARTIFACT: "x86_64-linux-fedora27" - - image: fedora:33 - installCmd: dnf install -y - toolRequirements: autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf tree - DISTRO: Fedora - ARTIFACT: "x86_64-linux-fedora33" - - image: centos:7 - installCmd: yum -y install epel-release && yum install -y - toolRequirements: autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf tree - DISTRO: CentOS - ARTIFACT: "x86_64-linux-centos7" - - image: linuxmintd/mint19.3-amd64 - installCmd: apt-get update && apt-get install -y - toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree - DISTRO: Mint - ARTIFACT: "x86_64-linux-mint19.3" - - image: "fedora:33" - installCmd: "dnf install -y" - toolRequirements: "autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf tree" - DISTRO: "Unknown" - ARTIFACT: "x86_64-linux-unknown" - - image: linuxmintd/mint20.2-amd64 - installCmd: apt-get update && apt-get install -y - toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree - DISTRO: Mint - ARTIFACT: "x86_64-linux-mint20.2" - container: - image: ${{ matrix.image }} - steps: - - name: Install requirements - shell: sh - run: | - ${{ matrix.installCmd }} curl bash git ${{ matrix.toolRequirements }} - - - if: matrix.DISTRO == 'Unknown' - run: | - echo "NAME=Linux" > /etc/os-release - echo "ID=linux" >> /etc/os-release - echo "PRETTY_NAME=Linux" >> /etc/os-release - - - uses: actions/checkout@v3 - - - uses: actions/download-artifact@v3 - with: - name: artifacts-${{ matrix.ARTIFACT }} - path: ./ - - - name: Create bindist - run: | - set -eux - for bindist in out-*.tar ; do - tar -xf "${bindist}" - done - unset bindist - bash .github/scripts/bindist.sh - env: - ARTIFACT: ${{ matrix.ARTIFACT }} - - - name: Upload bindist - uses: actions/upload-artifact@v3 - with: - if-no-files-found: error - name: bindists-${{ matrix.ARTIFACT }} - path: | - ./out/*.tar.xz - ./out/plan.json/* - - - uses: geekyeggo/delete-artifact@v2 - with: - name: artifacts-${{ matrix.ARTIFACT }} - - bindist-arm: - name: Tar linux bindists (arm) - runs-on: [self-hosted, Linux, ARM64, maerwald] - needs: ["build-arm"] + TZ: Asia/Singapore + environment: CI + name: build-aarch64-linux-ubuntu2004-948 (Build binaries) + runs-on: + - self-hosted + - Linux + - ARM64 + - maerwald + steps: + - name: clean and git config for aarch64-linux + run: | + find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} + + git config --global --get-all safe.directory | grep '^\*$' || git config --global --add safe.directory "*" + shell: bash + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.4.8 + name: Build aarch64-linux binaries + uses: docker://hasufell/arm64v8-ubuntu-haskell:focal + with: + args: bash .github/scripts/build.sh + - env: + GHC_VERSION: 9.4.8 + name: Tar aarch64-linux binaries + uses: docker://hasufell/arm64v8-ubuntu-haskell:focal + with: + args: bash .github/scripts/tar.sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-aarch64-linux-ubuntu2004-948 + path: out-aarch64-linux-ubuntu2004-9.4.8.tar + retention-days: 2 + build-aarch64-linux-ubuntu2004-967: env: + ADD_CABAL_ARGS: '' + ARCH: ARM64 + ARTIFACT: aarch64-linux-ubuntu2004 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-aarch64-linux-ubuntu2004-967 (Build binaries) + runs-on: + - self-hosted + - Linux + - ARM64 + - maerwald + steps: + - name: clean and git config for aarch64-linux + run: | + find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} + + git config --global --get-all safe.directory | grep '^\*$' || git config --global --add safe.directory "*" + shell: bash + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.6.7 + name: Build aarch64-linux binaries + uses: docker://hasufell/arm64v8-ubuntu-haskell:focal + with: + args: bash .github/scripts/build.sh + - env: + GHC_VERSION: 9.6.7 + name: Tar aarch64-linux binaries + uses: docker://hasufell/arm64v8-ubuntu-haskell:focal + with: + args: bash .github/scripts/tar.sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-aarch64-linux-ubuntu2004-967 + path: out-aarch64-linux-ubuntu2004-9.6.7.tar + retention-days: 2 + build-aarch64-linux-ubuntu2004-984: + env: + ADD_CABAL_ARGS: '' ARCH: ARM64 + ARTIFACT: aarch64-linux-ubuntu2004 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} DEBIAN_FRONTEND: noninteractive - ARTIFACT: "aarch64-linux-ubuntu20" - TZ: Asia/Singapore - steps: - - uses: docker://arm64v8/ubuntu:focal - name: Cleanup (aarch64 linux) - with: - args: "find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} +" - - - name: git config - run: | - git config --global --get-all safe.directory | grep '^\*$' || git config --global --add safe.directory "*" - shell: bash - - - uses: actions/checkout@v3 - - - uses: actions/download-artifact@v3 - with: - name: artifacts-arm - path: ./ - - - uses: docker://hasufell/arm64v8-ubuntu-haskell:focal - name: Unpack - with: - args: bash .github/scripts/untar.sh - - - uses: docker://hasufell/arm64v8-ubuntu-haskell:focal - name: Create bindist (aarch64 linux) - with: - args: bash .github/scripts/bindist.sh - - - name: Upload bindist - uses: actions/upload-artifact@v3 - with: - if-no-files-found: error - name: bindists-arm - path: | - ./out/*.tar.xz - ./out/plan.json/* - - - uses: geekyeggo/delete-artifact@v2 - with: - name: artifacts-arm - - bindist-mac-x86_64: - name: Tar bindists (Mac x86_64) - runs-on: macOS-11 - needs: ["build-mac-x86_64"] - env: - TARBALL_EXT: tar.xz - ARCH: 64 - ARTIFACT: "x86_64-apple-darwin" - steps: - - uses: actions/checkout@v3 - - - uses: actions/download-artifact@v3 - with: - name: artifacts-mac-x86_64 - path: ./ - - - name: Create bindist - run: | - brew install coreutils tree - for bindist in out-*.tar ; do - tar xf "${bindist}" - done - unset bindist - bash .github/scripts/bindist.sh - - - name: Upload bindist - uses: actions/upload-artifact@v3 - with: - if-no-files-found: error - name: bindists-mac-x86_64 - path: | - ./out/*.tar.xz - ./out/plan.json/* - - - uses: geekyeggo/delete-artifact@v2 - with: - name: artifacts-mac-x86_64 - - bindist-mac-aarch64: - name: Tar bindists (Mac aarch64) - runs-on: [self-hosted, macOS, ARM64] - needs: ["build-mac-aarch64"] + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-aarch64-linux-ubuntu2004-984 (Build binaries) + runs-on: + - self-hosted + - Linux + - ARM64 + - maerwald + steps: + - name: clean and git config for aarch64-linux + run: | + find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} + + git config --global --get-all safe.directory | grep '^\*$' || git config --global --add safe.directory "*" + shell: bash + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.8.4 + name: Build aarch64-linux binaries + uses: docker://hasufell/arm64v8-ubuntu-haskell:focal + with: + args: bash .github/scripts/build.sh + - env: + GHC_VERSION: 9.8.4 + name: Tar aarch64-linux binaries + uses: docker://hasufell/arm64v8-ubuntu-haskell:focal + with: + args: bash .github/scripts/tar.sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-aarch64-linux-ubuntu2004-984 + path: out-aarch64-linux-ubuntu2004-9.8.4.tar + retention-days: 2 + build-aarch64-mac-9101: env: + ADD_CABAL_ARGS: '' + ARCH: ARM64 + ARTIFACT: aarch64-apple-darwin + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + HOMEBREW_CHANGE_ARCH_TO_ARM: '1' + MACOSX_DEPLOYMENT_TARGET: '10.13' + S3_HOST: ${{ secrets.S3_HOST }} TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-aarch64-mac-9101 (Build binaries) + runs-on: + - self-hosted + - macOS + - ARM64 + steps: + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.10.1 + name: Run build + run: | + bash .github/scripts/brew.sh git coreutils autoconf automake tree + export PATH="$HOME/.brew/bin:$HOME/.brew/sbin:$PATH" + export LD=ld + bash .github/scripts/build.sh + tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/ + shell: sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-aarch64-mac-9101 + path: out-aarch64-apple-darwin-9.10.1.tar + retention-days: 2 + build-aarch64-mac-9122: + env: + ADD_CABAL_ARGS: '' ARCH: ARM64 - ARTIFACT: "aarch64-apple-darwin" - steps: - - uses: actions/checkout@v3 - - - uses: actions/download-artifact@v3 - with: - name: artifacts-mac-aarch64 - path: ./ - - - name: Create bindist - run: | - bash .github/scripts/brew.sh git coreutils llvm@13 autoconf automake tree - export PATH="$HOME/.brew/bin:$HOME/.brew/sbin:$HOME/.brew/opt/llvm@13/bin:$PATH" - export CC="$HOME/.brew/opt/llvm@13/bin/clang" - export CXX="$HOME/.brew/opt/llvm@13/bin/clang++" - export LD=ld - export AR="$HOME/.brew/opt/llvm@13/bin/llvm-ar" - export RANLIB="$HOME/.brew/opt/llvm@13/bin/llvm-ranlib" - for bindist in out-*.tar ; do - tar xf "${bindist}" - done - unset bindist - bash .github/scripts/bindist.sh - - - name: Upload bindist - uses: actions/upload-artifact@v3 - with: - if-no-files-found: error - name: bindists-mac-aarch64 - path: | - ./out/*.tar.xz - ./out/plan.json/* - - - uses: geekyeggo/delete-artifact@v2 - with: - name: artifacts-mac-aarch64 - - bindist-win: - name: Tar bindists (Windows) - runs-on: windows-latest - needs: ["build-win"] + ARTIFACT: aarch64-apple-darwin + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + HOMEBREW_CHANGE_ARCH_TO_ARM: '1' + MACOSX_DEPLOYMENT_TARGET: '10.13' + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-aarch64-mac-9122 (Build binaries) + runs-on: + - self-hosted + - macOS + - ARM64 + steps: + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.12.2 + name: Run build + run: | + bash .github/scripts/brew.sh git coreutils autoconf automake tree + export PATH="$HOME/.brew/bin:$HOME/.brew/sbin:$PATH" + export LD=ld + bash .github/scripts/build.sh + tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/ + shell: sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-aarch64-mac-9122 + path: out-aarch64-apple-darwin-9.12.2.tar + retention-days: 2 + build-aarch64-mac-948: env: - TARBALL_EXT: zip - ARTIFACT: "x86_64-mingw64" - ARCH: 64 - steps: - - name: install windows deps - shell: pwsh - run: | - C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -Syuu" - C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -Syuu" - C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -S unzip zip git" - taskkill /F /FI "MODULES eq msys-2.0.dll" - - - uses: actions/checkout@v3 - - - uses: actions/download-artifact@v3 - with: - name: artifacts-win - path: ./out - - - name: Create bindist - run: | - $env:CHERE_INVOKING = 1 - $env:MSYS2_PATH_TYPE = "inherit" - C:\msys64\usr\bin\bash -lc "bash .github/scripts/bindist.sh" - shell: pwsh - - - name: Upload bindist - uses: actions/upload-artifact@v3 - with: - if-no-files-found: error - name: bindists-win - path: | - ./out/*.zip - ./out/plan.json/* - - - uses: geekyeggo/delete-artifact@v2 - with: - name: artifacts-win - - test-linux: - name: Test linux binaries - runs-on: ubuntu-latest - needs: ["bindist-linux"] - env: - TARBALL_EXT: tar.xz - ARCH: 64 - DEBIAN_FRONTEND: noninteractive - TZ: Asia/Singapore - strategy: - fail-fast: false - matrix: - include: - - image: debian:9 - installCmd: sed -i s/deb.debian.org/archive.debian.org/g /etc/apt/sources.list && sed -i 's|security.debian.org|archive.debian.org/|g' /etc/apt/sources.list && sed -i /-updates/d /etc/apt/sources.list && apt-get update && apt-get install -y - toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree - DISTRO: Debian - ARTIFACT: "x86_64-linux-deb9" - - image: debian:10 - installCmd: apt-get update && apt-get install -y - toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree - DISTRO: Debian - ARTIFACT: "x86_64-linux-deb10" - - image: debian:11 - installCmd: apt-get update && apt-get install -y - toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree - DISTRO: Debian - ARTIFACT: "x86_64-linux-deb11" - - image: ubuntu:18.04 - installCmd: apt-get update && apt-get install -y - toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree - DISTRO: Ubuntu - ARTIFACT: "x86_64-linux-ubuntu18.04" - - image: ubuntu:20.04 - installCmd: apt-get update && apt-get install -y - toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree - DISTRO: Ubuntu - ARTIFACT: "x86_64-linux-ubuntu20.04" - - image: ubuntu:22.04 - installCmd: apt-get update && apt-get install -y - toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree - DISTRO: Ubuntu - ARTIFACT: "x86_64-linux-ubuntu22.04" - - image: fedora:27 - installCmd: dnf install -y - toolRequirements: autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf tree - DISTRO: Fedora - ARTIFACT: "x86_64-linux-fedora27" - - image: fedora:33 - installCmd: dnf install -y - toolRequirements: autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf tree - DISTRO: Fedora - ARTIFACT: "x86_64-linux-fedora33" - - image: centos:7 - installCmd: yum -y install epel-release && yum install -y - toolRequirements: autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf tree - DISTRO: CentOS - ARTIFACT: "x86_64-linux-centos7" - - image: linuxmintd/mint19.3-amd64 - installCmd: apt-get update && apt-get install -y - toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree - DISTRO: Mint - ARTIFACT: "x86_64-linux-mint19.3" - - image: "fedora:33" - installCmd: dnf install -y - toolRequirements: autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf tree - DISTRO: "Unknown" - ARTIFACT: "x86_64-linux-unknown" - - image: linuxmintd/mint20.2-amd64 - installCmd: apt-get update && apt-get install -y - toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree - DISTRO: Mint - ARTIFACT: "x86_64-linux-mint20.2" - container: - image: ${{ matrix.image }} - steps: - - name: Install requirements - shell: sh - run: | - ${{ matrix.installCmd }} curl bash git ${{ matrix.toolRequirements }} - - - if: matrix.DISTRO == 'Unknown' - run: | - echo "NAME=Linux" > /etc/os-release - echo "ID=linux" >> /etc/os-release - echo "PRETTY_NAME=Linux" >> /etc/os-release - - - uses: actions/checkout@v3 - - - uses: actions/download-artifact@v3 - with: - name: bindists-${{ matrix.ARTIFACT }} - path: ./out - - - name: Run test - run: bash .github/scripts/test.sh - env: - ARTIFACT: ${{ matrix.ARTIFACT }} - DISTRO: ${{ matrix.DISTRO }} - - test-arm: - name: Test ARM binary - runs-on: [self-hosted, Linux, ARM64, maerwald] - needs: ["bindist-arm"] - env: - TARBALL_EXT: tar.xz - DEBIAN_FRONTEND: noninteractive - TZ: Asia/Singapore - ARTIFACT: "aarch64-linux-ubuntu20" + ADD_CABAL_ARGS: '' ARCH: ARM64 - DISTRO: Ubuntu - steps: - - uses: docker://arm64v8/ubuntu:focal - name: Cleanup (aarch64 linux) - with: - args: "find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} +" - - - name: git config - run: | - git config --global --get-all safe.directory | grep '^\*$' || git config --global --add safe.directory "*" - shell: bash - - - name: Checkout code - uses: actions/checkout@v3 - - - uses: actions/download-artifact@v3 - with: - name: bindists-arm - path: ./out - - - uses: docker://hasufell/arm64v8-ubuntu-haskell:focal - name: Run test (aarch64 linux) - with: - args: bash .github/scripts/test.sh - - test-mac-x86_64: - name: Test binary (Mac x86_64) - runs-on: macOS-11 - needs: ["bindist-mac-x86_64"] - env: - MACOSX_DEPLOYMENT_TARGET: 10.13 - ARTIFACT: "x86_64-apple-darwin" - ARCH: 64 - TARBALL_EXT: tar.xz - DISTRO: na - steps: - - name: Checkout code - uses: actions/checkout@v3 - - - uses: actions/download-artifact@v3 - with: - name: bindists-mac-x86_64 - path: ./out - - - name: Run test (mac) - run: | - brew install coreutils tree - bash .github/scripts/test.sh - - test-mac-aarch64: - name: Test binary (Mac aarch64) - runs-on: [self-hosted, macOS, ARM64] - needs: ["bindist-mac-aarch64"] - env: - MACOSX_DEPLOYMENT_TARGET: 10.13 - ARTIFACT: "aarch64-apple-darwin" + ARTIFACT: aarch64-apple-darwin + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + HOMEBREW_CHANGE_ARCH_TO_ARM: '1' + MACOSX_DEPLOYMENT_TARGET: '10.13' + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-aarch64-mac-948 (Build binaries) + runs-on: + - self-hosted + - macOS + - ARM64 + steps: + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.4.8 + name: Run build + run: | + bash .github/scripts/brew.sh git coreutils autoconf automake tree + export PATH="$HOME/.brew/bin:$HOME/.brew/sbin:$PATH" + export LD=ld + bash .github/scripts/build.sh + tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/ + shell: sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-aarch64-mac-948 + path: out-aarch64-apple-darwin-9.4.8.tar + retention-days: 2 + build-aarch64-mac-967: + env: + ADD_CABAL_ARGS: '' ARCH: ARM64 + ARTIFACT: aarch64-apple-darwin + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + HOMEBREW_CHANGE_ARCH_TO_ARM: '1' + MACOSX_DEPLOYMENT_TARGET: '10.13' + S3_HOST: ${{ secrets.S3_HOST }} TARBALL_EXT: tar.xz - DISTRO: n - HOMEBREW_CHANGE_ARCH_TO_ARM: 1 - steps: - - name: Checkout code - uses: actions/checkout@v3 - - - uses: actions/download-artifact@v3 - with: - name: bindists-mac-aarch64 - path: ./out - - - name: Run test (mac) - run: | - bash .github/scripts/brew.sh git coreutils llvm@13 autoconf automake tree - export PATH="$HOME/.brew/bin:$HOME/.brew/sbin:$HOME/.brew/opt/llvm@13/bin:$PATH" - export CC="$HOME/.brew/opt/llvm@13/bin/clang" - export CXX="$HOME/.brew/opt/llvm@13/bin/clang++" - export LD=ld - export AR="$HOME/.brew/opt/llvm@13/bin/llvm-ar" - export RANLIB="$HOME/.brew/opt/llvm@13/bin/llvm-ranlib" - bash .github/scripts/test.sh - - test-win: - name: Test binary (Win) - runs-on: windows-latest - needs: ["bindist-win"] - env: - ARTIFACT: "x86_64-mingw64" - ARCH: 64 - TARBALL_EXT: zip - DISTRO: na - strategy: - fail-fast: false - steps: - - name: install windows deps - shell: pwsh - run: | - C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -Syuu" - C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -Syuu" - C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -S make mingw-w64-x86_64-clang curl autoconf mingw-w64-x86_64-pkgconf ca-certificates base-devel gettext autoconf make libtool automake python p7zip patch unzip zip git" - taskkill /F /FI "MODULES eq msys-2.0.dll" - - - name: Checkout code - uses: actions/checkout@v3 - - - uses: actions/download-artifact@v3 - with: - name: bindists-win - path: ./out - - - name: Run test (windows) - run: | - $env:CHERE_INVOKING = 1 - $env:MSYS2_PATH_TYPE = "inherit" - C:\msys64\usr\bin\bash -lc "bash .github/scripts/test.sh" - shell: pwsh - - release: - name: release - needs: ["test-linux", "test-mac-x86_64", "test-mac-aarch64", "test-win", "test-arm"] - runs-on: ubuntu-latest - if: startsWith(github.ref, 'refs/tags/') + TZ: Asia/Singapore + environment: CI + name: build-aarch64-mac-967 (Build binaries) + runs-on: + - self-hosted + - macOS + - ARM64 + steps: + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.6.7 + name: Run build + run: | + bash .github/scripts/brew.sh git coreutils autoconf automake tree + export PATH="$HOME/.brew/bin:$HOME/.brew/sbin:$PATH" + export LD=ld + bash .github/scripts/build.sh + tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/ + shell: sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-aarch64-mac-967 + path: out-aarch64-apple-darwin-9.6.7.tar + retention-days: 2 + build-aarch64-mac-984: + env: + ADD_CABAL_ARGS: '' + ARCH: ARM64 + ARTIFACT: aarch64-apple-darwin + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + HOMEBREW_CHANGE_ARCH_TO_ARM: '1' + MACOSX_DEPLOYMENT_TARGET: '10.13' + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-aarch64-mac-984 (Build binaries) + runs-on: + - self-hosted + - macOS + - ARM64 + steps: + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.8.4 + name: Run build + run: | + bash .github/scripts/brew.sh git coreutils autoconf automake tree + export PATH="$HOME/.brew/bin:$HOME/.brew/sbin:$PATH" + export LD=ld + bash .github/scripts/build.sh + tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/ + shell: sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-aarch64-mac-984 + path: out-aarch64-apple-darwin-9.8.4.tar + retention-days: 2 + build-x86_64-linux-centos7-9101: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-centos7 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-centos7-9101 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.10.1 + uses: ./.github/actions/bindist-actions/action-centos7 + with: + stage: BUILD + version: 9.10.1 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-centos7-9101 + path: out-x86_64-linux-centos7-9.10.1.tar + retention-days: 2 + build-x86_64-linux-centos7-9122: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-centos7 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-centos7-9122 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.12.2 + uses: ./.github/actions/bindist-actions/action-centos7 + with: + stage: BUILD + version: 9.12.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-centos7-9122 + path: out-x86_64-linux-centos7-9.12.2.tar + retention-days: 2 + build-x86_64-linux-centos7-948: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-centos7 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-centos7-948 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.4.8 + uses: ./.github/actions/bindist-actions/action-centos7 + with: + stage: BUILD + version: 9.4.8 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-centos7-948 + path: out-x86_64-linux-centos7-9.4.8.tar + retention-days: 2 + build-x86_64-linux-centos7-967: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-centos7 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-centos7-967 (Build binaries) + runs-on: + - ubuntu-latest steps: - - name: Checkout code - uses: actions/checkout@v3 - - - uses: actions/download-artifact@v3 - with: - path: ./out - name: bindists-x86_64-linux-deb9 - - uses: actions/download-artifact@v3 - with: - path: ./out - name: bindists-x86_64-linux-deb10 - - uses: actions/download-artifact@v3 - with: - path: ./out - name: bindists-x86_64-linux-deb11 - - uses: actions/download-artifact@v3 - with: - path: ./out - name: bindists-x86_64-linux-ubuntu18.04 - - uses: actions/download-artifact@v3 - with: - path: ./out - name: bindists-x86_64-linux-ubuntu20.04 - - uses: actions/download-artifact@v3 - with: - path: ./out - name: bindists-x86_64-linux-ubuntu22.04 - - uses: actions/download-artifact@v3 - with: - path: ./out - name: bindists-x86_64-linux-fedora27 - - uses: actions/download-artifact@v3 - with: - path: ./out - name: bindists-x86_64-linux-fedora33 - - uses: actions/download-artifact@v3 - with: - path: ./out - name: bindists-x86_64-linux-centos7 - - uses: actions/download-artifact@v3 - with: - path: ./out - name: bindists-x86_64-linux-unknown - - uses: actions/download-artifact@v3 - with: - path: ./out - name: bindists-x86_64-linux-mint19.3 - - uses: actions/download-artifact@v3 - with: - path: ./out - name: bindists-x86_64-linux-mint20.2 - - - uses: actions/download-artifact@v3 - with: - name: bindists-arm - path: ./out - - - uses: actions/download-artifact@v3 - with: - name: bindists-mac-x86_64 - path: ./out - - - uses: actions/download-artifact@v3 - with: - name: bindists-mac-aarch64 - path: ./out - - - uses: actions/download-artifact@v3 - with: - name: bindists-win - path: ./out - - - name: Install requirements - run: | - sudo apt-get update && sudo apt-get install -y tar xz-utils - shell: bash - - - name: tar plan.json - run: | - cd out/plan.json - tar cf plan_json.tar * - mv plan_json.tar ../ - shell: bash - - - name: build source tarball - run: | - export RELEASE=$GITHUB_REF_NAME - git archive --format=tar.gz -o "out/haskell-language-server-${RELEASE}-src.tar.gz" --prefix="haskell-language-server-${RELEASE}/" HEAD - shell: bash - - - name: Release - uses: softprops/action-gh-release@v2 - with: - draft: true - files: | - ./out/*.zip - ./out/*.tar.xz - ./out/*.tar.gz - ./out/*.tar + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.6.7 + uses: ./.github/actions/bindist-actions/action-centos7 + with: + stage: BUILD + version: 9.6.7 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-centos7-967 + path: out-x86_64-linux-centos7-9.6.7.tar + retention-days: 2 + build-x86_64-linux-centos7-984: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-centos7 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-centos7-984 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.8.4 + uses: ./.github/actions/bindist-actions/action-centos7 + with: + stage: BUILD + version: 9.8.4 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-centos7-984 + path: out-x86_64-linux-centos7-9.8.4.tar + retention-days: 2 + build-x86_64-linux-deb10-9101: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb10 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb10-9101 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.10.1 + uses: ./.github/actions/bindist-actions/action-deb10 + with: + stage: BUILD + version: 9.10.1 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb10-9101 + path: out-x86_64-linux-deb10-9.10.1.tar + retention-days: 2 + build-x86_64-linux-deb10-9122: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb10 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb10-9122 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.12.2 + uses: ./.github/actions/bindist-actions/action-deb10 + with: + stage: BUILD + version: 9.12.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb10-9122 + path: out-x86_64-linux-deb10-9.12.2.tar + retention-days: 2 + build-x86_64-linux-deb10-948: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb10 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb10-948 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.4.8 + uses: ./.github/actions/bindist-actions/action-deb10 + with: + stage: BUILD + version: 9.4.8 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb10-948 + path: out-x86_64-linux-deb10-9.4.8.tar + retention-days: 2 + build-x86_64-linux-deb10-967: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb10 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb10-967 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.6.7 + uses: ./.github/actions/bindist-actions/action-deb10 + with: + stage: BUILD + version: 9.6.7 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb10-967 + path: out-x86_64-linux-deb10-9.6.7.tar + retention-days: 2 + build-x86_64-linux-deb10-984: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb10 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb10-984 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.8.4 + uses: ./.github/actions/bindist-actions/action-deb10 + with: + stage: BUILD + version: 9.8.4 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb10-984 + path: out-x86_64-linux-deb10-9.8.4.tar + retention-days: 2 + build-x86_64-linux-deb11-9101: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb11 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb11-9101 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.10.1 + uses: ./.github/actions/bindist-actions/action-deb11 + with: + stage: BUILD + version: 9.10.1 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb11-9101 + path: out-x86_64-linux-deb11-9.10.1.tar + retention-days: 2 + build-x86_64-linux-deb11-9122: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb11 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb11-9122 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.12.2 + uses: ./.github/actions/bindist-actions/action-deb11 + with: + stage: BUILD + version: 9.12.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb11-9122 + path: out-x86_64-linux-deb11-9.12.2.tar + retention-days: 2 + build-x86_64-linux-deb11-948: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb11 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb11-948 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.4.8 + uses: ./.github/actions/bindist-actions/action-deb11 + with: + stage: BUILD + version: 9.4.8 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb11-948 + path: out-x86_64-linux-deb11-9.4.8.tar + retention-days: 2 + build-x86_64-linux-deb11-967: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb11 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb11-967 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.6.7 + uses: ./.github/actions/bindist-actions/action-deb11 + with: + stage: BUILD + version: 9.6.7 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb11-967 + path: out-x86_64-linux-deb11-9.6.7.tar + retention-days: 2 + build-x86_64-linux-deb11-984: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb11 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb11-984 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.8.4 + uses: ./.github/actions/bindist-actions/action-deb11 + with: + stage: BUILD + version: 9.8.4 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb11-984 + path: out-x86_64-linux-deb11-9.8.4.tar + retention-days: 2 + build-x86_64-linux-deb9-9101: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb9 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb9-9101 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.10.1 + uses: ./.github/actions/bindist-actions/action-deb9 + with: + stage: BUILD + version: 9.10.1 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb9-9101 + path: out-x86_64-linux-deb9-9.10.1.tar + retention-days: 2 + build-x86_64-linux-deb9-9122: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb9 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb9-9122 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.12.2 + uses: ./.github/actions/bindist-actions/action-deb9 + with: + stage: BUILD + version: 9.12.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb9-9122 + path: out-x86_64-linux-deb9-9.12.2.tar + retention-days: 2 + build-x86_64-linux-deb9-948: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb9 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb9-948 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.4.8 + uses: ./.github/actions/bindist-actions/action-deb9 + with: + stage: BUILD + version: 9.4.8 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb9-948 + path: out-x86_64-linux-deb9-9.4.8.tar + retention-days: 2 + build-x86_64-linux-deb9-967: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb9 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb9-967 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.6.7 + uses: ./.github/actions/bindist-actions/action-deb9 + with: + stage: BUILD + version: 9.6.7 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb9-967 + path: out-x86_64-linux-deb9-9.6.7.tar + retention-days: 2 + build-x86_64-linux-deb9-984: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb9 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb9-984 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.8.4 + uses: ./.github/actions/bindist-actions/action-deb9 + with: + stage: BUILD + version: 9.8.4 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb9-984 + path: out-x86_64-linux-deb9-9.8.4.tar + retention-days: 2 + build-x86_64-linux-fedora27-9101: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-fedora27 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-fedora27-9101 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.10.1 + uses: ./.github/actions/bindist-actions/action-fedora27 + with: + stage: BUILD + version: 9.10.1 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-fedora27-9101 + path: out-x86_64-linux-fedora27-9.10.1.tar + retention-days: 2 + build-x86_64-linux-fedora27-9122: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-fedora27 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-fedora27-9122 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.12.2 + uses: ./.github/actions/bindist-actions/action-fedora27 + with: + stage: BUILD + version: 9.12.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-fedora27-9122 + path: out-x86_64-linux-fedora27-9.12.2.tar + retention-days: 2 + build-x86_64-linux-fedora27-948: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-fedora27 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-fedora27-948 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.4.8 + uses: ./.github/actions/bindist-actions/action-fedora27 + with: + stage: BUILD + version: 9.4.8 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-fedora27-948 + path: out-x86_64-linux-fedora27-9.4.8.tar + retention-days: 2 + build-x86_64-linux-fedora27-967: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-fedora27 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-fedora27-967 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.6.7 + uses: ./.github/actions/bindist-actions/action-fedora27 + with: + stage: BUILD + version: 9.6.7 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-fedora27-967 + path: out-x86_64-linux-fedora27-9.6.7.tar + retention-days: 2 + build-x86_64-linux-fedora27-984: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-fedora27 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-fedora27-984 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.8.4 + uses: ./.github/actions/bindist-actions/action-fedora27 + with: + stage: BUILD + version: 9.8.4 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-fedora27-984 + path: out-x86_64-linux-fedora27-9.8.4.tar + retention-days: 2 + build-x86_64-linux-fedora33-9101: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-fedora33 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-fedora33-9101 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.10.1 + uses: ./.github/actions/bindist-actions/action-fedora33 + with: + stage: BUILD + version: 9.10.1 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-fedora33-9101 + path: out-x86_64-linux-fedora33-9.10.1.tar + retention-days: 2 + build-x86_64-linux-fedora33-9122: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-fedora33 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-fedora33-9122 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.12.2 + uses: ./.github/actions/bindist-actions/action-fedora33 + with: + stage: BUILD + version: 9.12.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-fedora33-9122 + path: out-x86_64-linux-fedora33-9.12.2.tar + retention-days: 2 + build-x86_64-linux-fedora33-948: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-fedora33 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-fedora33-948 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.4.8 + uses: ./.github/actions/bindist-actions/action-fedora33 + with: + stage: BUILD + version: 9.4.8 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-fedora33-948 + path: out-x86_64-linux-fedora33-9.4.8.tar + retention-days: 2 + build-x86_64-linux-fedora33-967: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-fedora33 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-fedora33-967 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.6.7 + uses: ./.github/actions/bindist-actions/action-fedora33 + with: + stage: BUILD + version: 9.6.7 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-fedora33-967 + path: out-x86_64-linux-fedora33-9.6.7.tar + retention-days: 2 + build-x86_64-linux-fedora33-984: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-fedora33 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-fedora33-984 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.8.4 + uses: ./.github/actions/bindist-actions/action-fedora33 + with: + stage: BUILD + version: 9.8.4 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-fedora33-984 + path: out-x86_64-linux-fedora33-9.8.4.tar + retention-days: 2 + build-x86_64-linux-mint193-9101: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint193 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-mint193-9101 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.10.1 + uses: ./.github/actions/bindist-actions/action-mint193 + with: + stage: BUILD + version: 9.10.1 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-mint193-9101 + path: out-x86_64-linux-mint193-9.10.1.tar + retention-days: 2 + build-x86_64-linux-mint193-9122: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint193 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-mint193-9122 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.12.2 + uses: ./.github/actions/bindist-actions/action-mint193 + with: + stage: BUILD + version: 9.12.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-mint193-9122 + path: out-x86_64-linux-mint193-9.12.2.tar + retention-days: 2 + build-x86_64-linux-mint193-948: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint193 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-mint193-948 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.4.8 + uses: ./.github/actions/bindist-actions/action-mint193 + with: + stage: BUILD + version: 9.4.8 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-mint193-948 + path: out-x86_64-linux-mint193-9.4.8.tar + retention-days: 2 + build-x86_64-linux-mint193-967: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint193 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-mint193-967 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.6.7 + uses: ./.github/actions/bindist-actions/action-mint193 + with: + stage: BUILD + version: 9.6.7 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-mint193-967 + path: out-x86_64-linux-mint193-9.6.7.tar + retention-days: 2 + build-x86_64-linux-mint193-984: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint193 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-mint193-984 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.8.4 + uses: ./.github/actions/bindist-actions/action-mint193 + with: + stage: BUILD + version: 9.8.4 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-mint193-984 + path: out-x86_64-linux-mint193-9.8.4.tar + retention-days: 2 + build-x86_64-linux-mint202-9101: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint202 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-mint202-9101 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.10.1 + uses: ./.github/actions/bindist-actions/action-mint202 + with: + stage: BUILD + version: 9.10.1 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-mint202-9101 + path: out-x86_64-linux-mint202-9.10.1.tar + retention-days: 2 + build-x86_64-linux-mint202-9122: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint202 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-mint202-9122 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.12.2 + uses: ./.github/actions/bindist-actions/action-mint202 + with: + stage: BUILD + version: 9.12.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-mint202-9122 + path: out-x86_64-linux-mint202-9.12.2.tar + retention-days: 2 + build-x86_64-linux-mint202-948: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint202 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-mint202-948 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.4.8 + uses: ./.github/actions/bindist-actions/action-mint202 + with: + stage: BUILD + version: 9.4.8 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-mint202-948 + path: out-x86_64-linux-mint202-9.4.8.tar + retention-days: 2 + build-x86_64-linux-mint202-967: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint202 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-mint202-967 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.6.7 + uses: ./.github/actions/bindist-actions/action-mint202 + with: + stage: BUILD + version: 9.6.7 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-mint202-967 + path: out-x86_64-linux-mint202-9.6.7.tar + retention-days: 2 + build-x86_64-linux-mint202-984: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint202 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-mint202-984 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.8.4 + uses: ./.github/actions/bindist-actions/action-mint202 + with: + stage: BUILD + version: 9.8.4 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-mint202-984 + path: out-x86_64-linux-mint202-9.8.4.tar + retention-days: 2 + build-x86_64-linux-ubuntu1804-9101: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu1804 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-ubuntu1804-9101 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.10.1 + uses: ./.github/actions/bindist-actions/action-ubuntu1804 + with: + stage: BUILD + version: 9.10.1 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-ubuntu1804-9101 + path: out-x86_64-linux-ubuntu1804-9.10.1.tar + retention-days: 2 + build-x86_64-linux-ubuntu1804-9122: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu1804 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-ubuntu1804-9122 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.12.2 + uses: ./.github/actions/bindist-actions/action-ubuntu1804 + with: + stage: BUILD + version: 9.12.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-ubuntu1804-9122 + path: out-x86_64-linux-ubuntu1804-9.12.2.tar + retention-days: 2 + build-x86_64-linux-ubuntu1804-948: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu1804 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-ubuntu1804-948 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.4.8 + uses: ./.github/actions/bindist-actions/action-ubuntu1804 + with: + stage: BUILD + version: 9.4.8 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-ubuntu1804-948 + path: out-x86_64-linux-ubuntu1804-9.4.8.tar + retention-days: 2 + build-x86_64-linux-ubuntu1804-967: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu1804 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-ubuntu1804-967 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.6.7 + uses: ./.github/actions/bindist-actions/action-ubuntu1804 + with: + stage: BUILD + version: 9.6.7 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-ubuntu1804-967 + path: out-x86_64-linux-ubuntu1804-9.6.7.tar + retention-days: 2 + build-x86_64-linux-ubuntu1804-984: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu1804 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-ubuntu1804-984 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.8.4 + uses: ./.github/actions/bindist-actions/action-ubuntu1804 + with: + stage: BUILD + version: 9.8.4 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-ubuntu1804-984 + path: out-x86_64-linux-ubuntu1804-9.8.4.tar + retention-days: 2 + build-x86_64-linux-ubuntu2004-9101: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu2004 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-ubuntu2004-9101 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.10.1 + uses: ./.github/actions/bindist-actions/action-ubuntu2004 + with: + stage: BUILD + version: 9.10.1 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-ubuntu2004-9101 + path: out-x86_64-linux-ubuntu2004-9.10.1.tar + retention-days: 2 + build-x86_64-linux-ubuntu2004-9122: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu2004 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-ubuntu2004-9122 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.12.2 + uses: ./.github/actions/bindist-actions/action-ubuntu2004 + with: + stage: BUILD + version: 9.12.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-ubuntu2004-9122 + path: out-x86_64-linux-ubuntu2004-9.12.2.tar + retention-days: 2 + build-x86_64-linux-ubuntu2004-948: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu2004 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-ubuntu2004-948 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.4.8 + uses: ./.github/actions/bindist-actions/action-ubuntu2004 + with: + stage: BUILD + version: 9.4.8 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-ubuntu2004-948 + path: out-x86_64-linux-ubuntu2004-9.4.8.tar + retention-days: 2 + build-x86_64-linux-ubuntu2004-967: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu2004 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-ubuntu2004-967 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.6.7 + uses: ./.github/actions/bindist-actions/action-ubuntu2004 + with: + stage: BUILD + version: 9.6.7 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-ubuntu2004-967 + path: out-x86_64-linux-ubuntu2004-9.6.7.tar + retention-days: 2 + build-x86_64-linux-ubuntu2004-984: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu2004 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-ubuntu2004-984 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.8.4 + uses: ./.github/actions/bindist-actions/action-ubuntu2004 + with: + stage: BUILD + version: 9.8.4 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-ubuntu2004-984 + path: out-x86_64-linux-ubuntu2004-9.8.4.tar + retention-days: 2 + build-x86_64-linux-ubuntu2204-9101: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu2204 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-ubuntu2204-9101 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.10.1 + uses: ./.github/actions/bindist-actions/action-ubuntu2204 + with: + stage: BUILD + version: 9.10.1 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-ubuntu2204-9101 + path: out-x86_64-linux-ubuntu2204-9.10.1.tar + retention-days: 2 + build-x86_64-linux-ubuntu2204-9122: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu2204 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-ubuntu2204-9122 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.12.2 + uses: ./.github/actions/bindist-actions/action-ubuntu2204 + with: + stage: BUILD + version: 9.12.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-ubuntu2204-9122 + path: out-x86_64-linux-ubuntu2204-9.12.2.tar + retention-days: 2 + build-x86_64-linux-ubuntu2204-948: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu2204 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-ubuntu2204-948 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.4.8 + uses: ./.github/actions/bindist-actions/action-ubuntu2204 + with: + stage: BUILD + version: 9.4.8 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-ubuntu2204-948 + path: out-x86_64-linux-ubuntu2204-9.4.8.tar + retention-days: 2 + build-x86_64-linux-ubuntu2204-967: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu2204 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-ubuntu2204-967 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.6.7 + uses: ./.github/actions/bindist-actions/action-ubuntu2204 + with: + stage: BUILD + version: 9.6.7 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-ubuntu2204-967 + path: out-x86_64-linux-ubuntu2204-9.6.7.tar + retention-days: 2 + build-x86_64-linux-ubuntu2204-984: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu2204 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-ubuntu2204-984 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.8.4 + uses: ./.github/actions/bindist-actions/action-ubuntu2204 + with: + stage: BUILD + version: 9.8.4 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-ubuntu2204-984 + path: out-x86_64-linux-ubuntu2204-9.8.4.tar + retention-days: 2 + build-x86_64-linux-unknown-9101: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-unknown + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-unknown-9101 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.10.1 + uses: ./.github/actions/bindist-actions/action-unknown + with: + stage: BUILD + version: 9.10.1 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-unknown-9101 + path: out-x86_64-linux-unknown-9.10.1.tar + retention-days: 2 + build-x86_64-linux-unknown-9122: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-unknown + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-unknown-9122 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.12.2 + uses: ./.github/actions/bindist-actions/action-unknown + with: + stage: BUILD + version: 9.12.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-unknown-9122 + path: out-x86_64-linux-unknown-9.12.2.tar + retention-days: 2 + build-x86_64-linux-unknown-948: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-unknown + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-unknown-948 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.4.8 + uses: ./.github/actions/bindist-actions/action-unknown + with: + stage: BUILD + version: 9.4.8 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-unknown-948 + path: out-x86_64-linux-unknown-9.4.8.tar + retention-days: 2 + build-x86_64-linux-unknown-967: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-unknown + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-unknown-967 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.6.7 + uses: ./.github/actions/bindist-actions/action-unknown + with: + stage: BUILD + version: 9.6.7 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-unknown-967 + path: out-x86_64-linux-unknown-9.6.7.tar + retention-days: 2 + build-x86_64-linux-unknown-984: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-unknown + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-unknown-984 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.8.4 + uses: ./.github/actions/bindist-actions/action-unknown + with: + stage: BUILD + version: 9.8.4 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-unknown-984 + path: out-x86_64-linux-unknown-9.8.4.tar + retention-days: 2 + build-x86_64-mac-9101: + env: + ADD_CABAL_ARGS: '' + ARCH: '64' + ARTIFACT: x86_64-apple-darwin + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + MACOSX_DEPLOYMENT_TARGET: '10.13' + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-mac-9101 (Build binaries) + runs-on: + - macOS-13 + steps: + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.10.1 + name: Run build + run: | + brew install coreutils tree + bash .github/scripts/build.sh + tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/ + shell: sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-mac-9101 + path: out-x86_64-apple-darwin-9.10.1.tar + retention-days: 2 + build-x86_64-mac-9122: + env: + ADD_CABAL_ARGS: '' + ARCH: '64' + ARTIFACT: x86_64-apple-darwin + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + MACOSX_DEPLOYMENT_TARGET: '10.13' + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-mac-9122 (Build binaries) + runs-on: + - macOS-13 + steps: + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.12.2 + name: Run build + run: | + brew install coreutils tree + bash .github/scripts/build.sh + tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/ + shell: sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-mac-9122 + path: out-x86_64-apple-darwin-9.12.2.tar + retention-days: 2 + build-x86_64-mac-948: + env: + ADD_CABAL_ARGS: '' + ARCH: '64' + ARTIFACT: x86_64-apple-darwin + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + MACOSX_DEPLOYMENT_TARGET: '10.13' + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-mac-948 (Build binaries) + runs-on: + - macOS-13 + steps: + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.4.8 + name: Run build + run: | + brew install coreutils tree + bash .github/scripts/build.sh + tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/ + shell: sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-mac-948 + path: out-x86_64-apple-darwin-9.4.8.tar + retention-days: 2 + build-x86_64-mac-967: + env: + ADD_CABAL_ARGS: '' + ARCH: '64' + ARTIFACT: x86_64-apple-darwin + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + MACOSX_DEPLOYMENT_TARGET: '10.13' + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-mac-967 (Build binaries) + runs-on: + - macOS-13 + steps: + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.6.7 + name: Run build + run: | + brew install coreutils tree + bash .github/scripts/build.sh + tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/ + shell: sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-mac-967 + path: out-x86_64-apple-darwin-9.6.7.tar + retention-days: 2 + build-x86_64-mac-984: + env: + ADD_CABAL_ARGS: '' + ARCH: '64' + ARTIFACT: x86_64-apple-darwin + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + MACOSX_DEPLOYMENT_TARGET: '10.13' + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-mac-984 (Build binaries) + runs-on: + - macOS-13 + steps: + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.8.4 + name: Run build + run: | + brew install coreutils tree + bash .github/scripts/build.sh + tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/ + shell: sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-mac-984 + path: out-x86_64-apple-darwin-9.8.4.tar + retention-days: 2 + build-x86_64-windows-9101: + env: + ADD_CABAL_ARGS: '' + ARCH: '64' + ARTIFACT: x86_64-mingw64 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: zip + TZ: Asia/Singapore + environment: CI + name: build-x86_64-windows-9101 (Build binaries) + runs-on: + - windows-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.10.1 + name: Run build + run: | + $env:CHERE_INVOKING = 1 + $env:MSYS2_PATH_TYPE = "inherit" + $ErrorActionPreference = "Stop" + C:\msys64\usr\bin\bash -lc "bash .github/scripts/build.sh" + shell: pwsh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-windows-9101 + path: ./out/* + retention-days: 2 + build-x86_64-windows-9122: + env: + ADD_CABAL_ARGS: '' + ARCH: '64' + ARTIFACT: x86_64-mingw64 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: zip + TZ: Asia/Singapore + environment: CI + name: build-x86_64-windows-9122 (Build binaries) + runs-on: + - windows-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.12.2 + name: Run build + run: | + $env:CHERE_INVOKING = 1 + $env:MSYS2_PATH_TYPE = "inherit" + $ErrorActionPreference = "Stop" + C:\msys64\usr\bin\bash -lc "bash .github/scripts/build.sh" + shell: pwsh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-windows-9122 + path: ./out/* + retention-days: 2 + build-x86_64-windows-948: + env: + ADD_CABAL_ARGS: '' + ARCH: '64' + ARTIFACT: x86_64-mingw64 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: zip + TZ: Asia/Singapore + environment: CI + name: build-x86_64-windows-948 (Build binaries) + runs-on: + - windows-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.4.8 + name: Run build + run: | + $env:CHERE_INVOKING = 1 + $env:MSYS2_PATH_TYPE = "inherit" + $ErrorActionPreference = "Stop" + C:\msys64\usr\bin\bash -lc "bash .github/scripts/build.sh" + shell: pwsh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-windows-948 + path: ./out/* + retention-days: 2 + build-x86_64-windows-967: + env: + ADD_CABAL_ARGS: '' + ARCH: '64' + ARTIFACT: x86_64-mingw64 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: zip + TZ: Asia/Singapore + environment: CI + name: build-x86_64-windows-967 (Build binaries) + runs-on: + - windows-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.6.7 + name: Run build + run: | + $env:CHERE_INVOKING = 1 + $env:MSYS2_PATH_TYPE = "inherit" + $ErrorActionPreference = "Stop" + C:\msys64\usr\bin\bash -lc "bash .github/scripts/build.sh" + shell: pwsh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-windows-967 + path: ./out/* + retention-days: 2 + build-x86_64-windows-984: + env: + ADD_CABAL_ARGS: '' + ARCH: '64' + ARTIFACT: x86_64-mingw64 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: zip + TZ: Asia/Singapore + environment: CI + name: build-x86_64-windows-984 (Build binaries) + runs-on: + - windows-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.8.4 + name: Run build + run: | + $env:CHERE_INVOKING = 1 + $env:MSYS2_PATH_TYPE = "inherit" + $ErrorActionPreference = "Stop" + C:\msys64\usr\bin\bash -lc "bash .github/scripts/build.sh" + shell: pwsh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-windows-984 + path: ./out/* + retention-days: 2 + release: + if: startsWith(github.ref, 'refs/tags/') + name: release + needs: + - test-x86_64-mac + - test-aarch64-mac + - test-x86_64-windows + - test-aarch64-linux-ubuntu2004 + - test-x86_64-linux-deb9 + - test-x86_64-linux-deb10 + - test-x86_64-linux-deb11 + - test-x86_64-linux-ubuntu1804 + - test-x86_64-linux-ubuntu2004 + - test-x86_64-linux-ubuntu2204 + - test-x86_64-linux-mint193 + - test-x86_64-linux-mint202 + - test-x86_64-linux-fedora27 + - test-x86_64-linux-fedora33 + - test-x86_64-linux-centos7 + - test-x86_64-linux-unknown + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-apple-darwin + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-aarch64-apple-darwin + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-mingw64 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-aarch64-linux-ubuntu2004 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-deb9 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-deb10 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-deb11 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-ubuntu1804 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-ubuntu2004 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-ubuntu2204 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-mint193 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-mint202 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-fedora27 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-fedora33 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-centos7 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-unknown + path: ./out + - name: Prepare release + run: | + sudo apt-get update && sudo apt-get install -y tar xz-utils + cd out/plan.json + tar cf plan_json.tar * + mv plan_json.tar ../ + cd ../.. + export RELEASE=$GITHUB_REF_NAME + git archive --format=tar.gz -o "out/haskell-language-server-${RELEASE}-src.tar.gz" --prefix="haskell-language-server-${RELEASE}/" HEAD + shell: bash + - name: Release + uses: softprops/action-gh-release@v2 + with: + draft: true + files: | + ./out/*.zip + ./out/*.tar.xz + ./out/*.tar.gz + ./out/*.tar + test-aarch64-linux-ubuntu2004: + env: + ADD_CABAL_ARGS: '' + ARCH: ARM64 + ARTIFACT: aarch64-linux-ubuntu2004 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-aarch64-linux-ubuntu2004 (Test binaries) + needs: + - bindist-aarch64-linux-ubuntu2004 + runs-on: + - self-hosted + - Linux + - ARM64 + - maerwald + steps: + - name: clean and git config for aarch64-linux + run: | + find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} + + git config --global --get-all safe.directory | grep '^\*$' || git config --global --add safe.directory "*" + shell: bash + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-aarch64-linux-ubuntu2004 + path: ./out + - name: Run test + uses: docker://hasufell/arm64v8-ubuntu-haskell:focal + with: + args: bash .github/scripts/test.sh + test-aarch64-mac: + env: + ADD_CABAL_ARGS: '' + ARCH: ARM64 + ARTIFACT: aarch64-apple-darwin + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + HOMEBREW_CHANGE_ARCH_TO_ARM: '1' + MACOSX_DEPLOYMENT_TARGET: '10.13' + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-aarch64-mac (Test binaries) + needs: + - bindist-aarch64-mac + runs-on: + - self-hosted + - macOS + - ARM64 + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-aarch64-apple-darwin + path: ./out + - name: Run test + run: | + bash .github/scripts/brew.sh git coreutils llvm@13 autoconf automake tree + export PATH="$HOME/.brew/bin:$HOME/.brew/sbin:$HOME/.brew/opt/llvm@13/bin:$PATH" + export CC="$HOME/.brew/opt/llvm@13/bin/clang" + export CXX="$HOME/.brew/opt/llvm@13/bin/clang++" + export LD=ld + export AR="$HOME/.brew/opt/llvm@13/bin/llvm-ar" + export RANLIB="$HOME/.brew/opt/llvm@13/bin/llvm-ranlib" + bash .github/scripts/test.sh + shell: sh + test-x86_64-linux-centos7: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-centos7 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-x86_64-linux-centos7 (Test binaries) + needs: + - bindist-x86_64-linux-centos7 + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-centos7 + path: ./out + - name: Test + uses: ./.github/actions/bindist-actions/action-centos7 + with: + stage: TEST + test-x86_64-linux-deb10: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb10 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-x86_64-linux-deb10 (Test binaries) + needs: + - bindist-x86_64-linux-deb10 + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-deb10 + path: ./out + - name: Test + uses: ./.github/actions/bindist-actions/action-deb10 + with: + stage: TEST + test-x86_64-linux-deb11: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb11 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-x86_64-linux-deb11 (Test binaries) + needs: + - bindist-x86_64-linux-deb11 + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-deb11 + path: ./out + - name: Test + uses: ./.github/actions/bindist-actions/action-deb11 + with: + stage: TEST + test-x86_64-linux-deb9: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb9 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-x86_64-linux-deb9 (Test binaries) + needs: + - bindist-x86_64-linux-deb9 + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-deb9 + path: ./out + - name: Test + uses: ./.github/actions/bindist-actions/action-deb9 + with: + stage: TEST + test-x86_64-linux-fedora27: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-fedora27 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-x86_64-linux-fedora27 (Test binaries) + needs: + - bindist-x86_64-linux-fedora27 + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-fedora27 + path: ./out + - name: Test + uses: ./.github/actions/bindist-actions/action-fedora27 + with: + stage: TEST + test-x86_64-linux-fedora33: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-fedora33 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-x86_64-linux-fedora33 (Test binaries) + needs: + - bindist-x86_64-linux-fedora33 + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-fedora33 + path: ./out + - name: Test + uses: ./.github/actions/bindist-actions/action-fedora33 + with: + stage: TEST + test-x86_64-linux-mint193: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint193 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-x86_64-linux-mint193 (Test binaries) + needs: + - bindist-x86_64-linux-mint193 + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-mint193 + path: ./out + - name: Test + uses: ./.github/actions/bindist-actions/action-mint193 + with: + stage: TEST + test-x86_64-linux-mint202: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint202 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-x86_64-linux-mint202 (Test binaries) + needs: + - bindist-x86_64-linux-mint202 + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-mint202 + path: ./out + - name: Test + uses: ./.github/actions/bindist-actions/action-mint202 + with: + stage: TEST + test-x86_64-linux-ubuntu1804: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu1804 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-x86_64-linux-ubuntu1804 (Test binaries) + needs: + - bindist-x86_64-linux-ubuntu1804 + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-ubuntu1804 + path: ./out + - name: Test + uses: ./.github/actions/bindist-actions/action-ubuntu1804 + with: + stage: TEST + test-x86_64-linux-ubuntu2004: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu2004 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-x86_64-linux-ubuntu2004 (Test binaries) + needs: + - bindist-x86_64-linux-ubuntu2004 + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-ubuntu2004 + path: ./out + - name: Test + uses: ./.github/actions/bindist-actions/action-ubuntu2004 + with: + stage: TEST + test-x86_64-linux-ubuntu2204: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu2204 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-x86_64-linux-ubuntu2204 (Test binaries) + needs: + - bindist-x86_64-linux-ubuntu2204 + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-ubuntu2204 + path: ./out + - name: Test + uses: ./.github/actions/bindist-actions/action-ubuntu2204 + with: + stage: TEST + test-x86_64-linux-unknown: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-unknown + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-x86_64-linux-unknown (Test binaries) + needs: + - bindist-x86_64-linux-unknown + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-unknown + path: ./out + - name: Test + uses: ./.github/actions/bindist-actions/action-unknown + with: + stage: TEST + test-x86_64-mac: + env: + ADD_CABAL_ARGS: '' + ARCH: '64' + ARTIFACT: x86_64-apple-darwin + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + MACOSX_DEPLOYMENT_TARGET: '10.13' + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-x86_64-mac (Test binaries) + needs: + - bindist-x86_64-mac + runs-on: + - macOS-13 + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-apple-darwin + path: ./out + - name: Run test + run: | + brew install coreutils tree + bash .github/scripts/test.sh + shell: sh + test-x86_64-windows: + env: + ADD_CABAL_ARGS: '' + ARCH: '64' + ARTIFACT: x86_64-mingw64 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: zip + TZ: Asia/Singapore + environment: CI + name: test-x86_64-windows (Test binaries) + needs: + - bindist-x86_64-windows + runs-on: + - windows-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-mingw64 + path: ./out + - name: install windows deps + run: | + C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -Syuu" + C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -Syuu" + C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -S make mingw-w64-x86_64-clang curl autoconf mingw-w64-x86_64-pkgconf ca-certificates base-devel gettext autoconf make libtool automake python p7zip patch unzip zip git" + taskkill /F /FI "MODULES eq msys-2.0.dll" + shell: pwsh + - name: Run test + run: | + $env:CHERE_INVOKING = 1 + $env:MSYS2_PATH_TYPE = "inherit" + C:\msys64\usr\bin\bash -lc "bash .github/scripts/test.sh" + shell: pwsh +name: Build and release +'on': + push: + tags: + - '*' + schedule: + - cron: 0 2 * * 1 diff --git a/.github/workflows/supported-ghc-versions.json b/.github/workflows/supported-ghc-versions.json index 387811c11b..e46e627b7c 100644 --- a/.github/workflows/supported-ghc-versions.json +++ b/.github/workflows/supported-ghc-versions.json @@ -1 +1 @@ -["9.10", "9.8", "9.6", "9.4" , "9.2" ] +["9.12", "9.10", "9.8", "9.6", "9.4"] diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 84e75963d6..71a9e85443 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -78,9 +78,6 @@ jobs: - true - false exclude: - # Don't do anything for windows on 9.2, it has particularly bad long-path issues - - os: windows-latest - ghc: "9.2" # Exclude the test configuration on macos, it's sufficiently similar to other OSs # that it mostly just burns CI time. Buiding is still useful since it catches # solver issues. @@ -142,7 +139,7 @@ jobs: run: cabal test hls-refactor-plugin-tests || cabal test hls-refactor-plugin-tests # TODO enable when it supports 9.10 - - if: matrix.test && matrix.ghc != '9.10' + - if: matrix.test && matrix.ghc != '9.10' && matrix.ghc != '9.12' name: Test hls-floskell-plugin run: cabal test hls-floskell-plugin-tests || cabal test hls-floskell-plugin-tests @@ -159,17 +156,15 @@ jobs: run: cabal test hls-eval-plugin-tests || cabal test hls-eval-plugin-tests # TODO enable when it supports 9.10 - - if: matrix.test && matrix.ghc != '9.10' + - if: matrix.test && matrix.ghc != '9.10' && matrix.ghc != '9.12' name: Test hls-splice-plugin run: cabal test hls-splice-plugin-tests || cabal test hls-splice-plugin-tests - # TODO enable when it supports 9.10 - - if: matrix.test && matrix.ghc != '9.2' && matrix.ghc != '9.10' + - if: matrix.test && matrix.ghc != '9.12' name: Test hls-stan-plugin run: cabal test hls-stan-plugin-tests || cabal test hls-stan-plugin-tests - # TODO enable when it supports 9.10 - - if: matrix.test && matrix.ghc != '9.10' + - if: matrix.test name: Test hls-stylish-haskell-plugin run: cabal test hls-stylish-haskell-plugin-tests || cabal test hls-stylish-haskell-plugin-tests @@ -230,12 +225,12 @@ jobs: name: Test hls-explicit-record-fields-plugin test suite run: cabal test hls-explicit-record-fields-plugin-tests || cabal test hls-explicit-record-fields-plugin-tests - ## version needs to be limited since the tests depend on cabal-fmt which only builds using specific ghc versions - - if: matrix.test && matrix.ghc == '9.2' + # versions need to be limited since the tests depend on cabal-fmt which only builds with ghc <9.10 + - if: matrix.test && matrix.ghc != '9.10' && matrix.ghc != '9.12' name: Test hls-cabal-fmt-plugin test suite run: cabal test hls-cabal-fmt-plugin-tests --flag=isolateCabalfmtTests || cabal test hls-cabal-fmt-plugin-tests --flag=isolateCabalfmtTests - - if: matrix.test + - if: matrix.test && matrix.ghc != '9.12' name: Test hls-cabal-gild-plugin test suite run: cabal test hls-cabal-gild-plugin-tests --flag=isolateCabalGildTests || cabal test hls-cabal-gild-plugin-tests --flag=isolateCabalGildTests @@ -244,7 +239,7 @@ jobs: run: cabal test hls-cabal-plugin-tests || cabal test hls-cabal-plugin-tests # TODO enable when it supports 9.10 - - if: matrix.test && matrix.ghc != '9.10' + - if: matrix.test && matrix.ghc != '9.10' && matrix.ghc != '9.12' name: Test hls-retrie-plugin test suite run: cabal test hls-retrie-plugin-tests || cabal test hls-retrie-plugin-tests diff --git a/.gitignore b/.gitignore index 29ead939cc..2413a1fcf5 100644 --- a/.gitignore +++ b/.gitignore @@ -7,6 +7,9 @@ cabal.project.local .tasty-rerun-log +# emacs +/.dir-locals.el + # shake build information _build/ diff --git a/.hlint.yaml b/.hlint.yaml index 0bf0e0a313..edc6886871 100644 --- a/.hlint.yaml +++ b/.hlint.yaml @@ -110,6 +110,7 @@ - CompletionTests #Previously part of GHCIDE Main tests - DiagnosticTests #Previously part of GHCIDE Main tests - FindDefinitionAndHoverTests #Previously part of GHCIDE Main tests + - FindImplementationAndHoverTests #Previously part of GHCIDE Main tests - TestUtils #Previously part of GHCIDE Main tests - CodeLensTests #Previously part of GHCIDE Main tests @@ -134,6 +135,7 @@ - Ide.Plugin.Eval.Parse.Comments - Ide.Plugin.Eval.CodeLens - FindDefinitionAndHoverTests #Previously part of GHCIDE Main tests + - FindImplementationAndHoverTests #Previously part of GHCIDE Main tests - name: [Prelude.init, Data.List.init] within: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 87de7c4790..03edd673b7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,32 +1,23 @@ -{ - "repos": [ - { - "hooks": [ - { - "entry": "stylish-haskell --inplace", - "exclude": "(^Setup.hs$|test/testdata/.*$|test/data/.*$|test/manual/lhs/.*$|^hie-compat/.*$|^plugins/hls-tactics-plugin/.*$|^ghcide/src/Development/IDE/GHC/Compat.hs$|^plugins/hls-refactor-plugin/src/Development/IDE/GHC/Compat/ExactPrint.hs$|^ghcide/src/Development/IDE/GHC/Compat/Core.hs$|^ghcide/src/Development/IDE/Spans/Pragmas.hs$|^ghcide/src/Development/IDE/LSP/Outline.hs$|^plugins/hls-splice-plugin/src/Ide/Plugin/Splice.hs$|^ghcide/src/Development/IDE/Core/Rules.hs$|^ghcide/src/Development/IDE/Core/Compile.hs$|^plugins/hls-refactor-plugin/src/Development/IDE/GHC/ExactPrint.hs$|^plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/ExactPrint.hs$)", - "files": "\\.l?hs$", - "id": "stylish-haskell", - "language": "system", - "name": "stylish-haskell", - "pass_filenames": true, - "types": [ - "file" - ] - } - ], - "repo": "local" - }, - { - "repo": "https://github.com/pre-commit/pre-commit-hooks", - "rev": "v4.1.0", - "hooks": [ - { - "id": "mixed-line-ending", - "args": ["--fix", "lf"], - "exclude": "test/testdata/.*CRLF.*?\\.hs$" - } - ] - } - ] -} +# https://pre-commit.com/ +# https://github.com/pre-commit/pre-commit +repos: + - hooks: + - entry: stylish-haskell --inplace + exclude: >- + (^Setup.hs$|test/testdata/.*$|test/data/.*$|test/manual/lhs/.*$|^hie-compat/.*$|^plugins/hls-tactics-plugin/.*$|^ghcide/src/Development/IDE/GHC/Compat.hs$|^plugins/hls-refactor-plugin/src/Development/IDE/GHC/Compat/ExactPrint.hs$|^ghcide/src/Development/IDE/GHC/Compat/Core.hs$|^ghcide/src/Development/IDE/Spans/Pragmas.hs$|^ghcide/src/Development/IDE/LSP/Outline.hs$|^plugins/hls-splice-plugin/src/Ide/Plugin/Splice.hs$|^ghcide/src/Development/IDE/Core/Rules.hs$|^ghcide/src/Development/IDE/Core/Compile.hs$|^plugins/hls-refactor-plugin/src/Development/IDE/GHC/ExactPrint.hs$|^plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/ExactPrint.hs$|^plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Handlers.hs$) + files: \.l?hs$ + id: stylish-haskell + language: system + name: stylish-haskell + pass_filenames: true + types: + - file + repo: local + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.1.0 + hooks: + - id: mixed-line-ending + args: + - '--fix' + - lf + exclude: test/testdata/.*CRLF.*?\.hs$ diff --git a/.readthedocs.yaml b/.readthedocs.yaml index c420108677..f5135a9af1 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -1,6 +1,7 @@ version: 2 sphinx: + builder: "html" configuration: docs/conf.py build: diff --git a/ChangeLog.md b/ChangeLog.md index 34465b5910..3c8441f26d 100644 --- a/ChangeLog.md +++ b/ChangeLog.md @@ -1,5 +1,275 @@ # Changelog for haskell-language-server +## 2.10.0.0 + +- Bindists for GHC 9.12.2 + - This is only basic support, many plugins are not yet compatible. +- Bindists for GHC 9.8.4 +- Bindists for GHC 9.6.7 +- `hls-cabal-plugin` features + - Support for `cabal-add` + - Goto Definition for common sections + - Outline of .cabal files +- Fix handling of LSP resolve requests +- Display Inlay Hints + - Records + - Imports + +### Pull Requests + +- Fix cabal check for Hackage release + ([#4528](https://github.com/haskell/haskell-language-server/pull/4528)) by @fendor +- GHC 9.12 support + ([#4527](https://github.com/haskell/haskell-language-server/pull/4527)) by @wz1000 +- Bump cachix/install-nix-action from 30 to 31 + ([#4525](https://github.com/haskell/haskell-language-server/pull/4525)) by @dependabot[bot] +- Bump cachix/cachix-action from 15 to 16 + ([#4523](https://github.com/haskell/haskell-language-server/pull/4523)) by @dependabot[bot] +- Bump haskell-actions/setup from 2.7.9 to 2.7.10 + ([#4522](https://github.com/haskell/haskell-language-server/pull/4522)) by @dependabot[bot] +- Bump haskell-actions/setup from 2.7.9 to 2.7.10 in /.github/actions/setup-build + ([#4521](https://github.com/haskell/haskell-language-server/pull/4521)) by @dependabot[bot] +- Move ghcide-test to stand alone dir + ([#4520](https://github.com/haskell/haskell-language-server/pull/4520)) by @soulomoon +- refactor: remove unnecessary instance and use of unsafeCoerce + ([#4518](https://github.com/haskell/haskell-language-server/pull/4518)) by @MangoIV +- convert `pre-commit-config.yaml` from JSON to YAML + ([#4513](https://github.com/haskell/haskell-language-server/pull/4513)) by @peterbecich +- Enable bench for 9.10 + ([#4512](https://github.com/haskell/haskell-language-server/pull/4512)) by @soulomoon +- Bugfix: Explicit record fields inlay hints for polymorphic records + ([#4510](https://github.com/haskell/haskell-language-server/pull/4510)) by @wczyz +- Capitalization of "Replace" + ([#4509](https://github.com/haskell/haskell-language-server/pull/4509)) by @dschrempf +- document eval plugin not supporting multiline expressions + ([#4495](https://github.com/haskell/haskell-language-server/pull/4495)) by @noughtmare +- Documentation: Imrpove "Contributing" (and amend Sphinx builders) + ([#4494](https://github.com/haskell/haskell-language-server/pull/4494)) by @dschrempf +- Documentation: HLS plugin tutorial improvements + ([#4491](https://github.com/haskell/haskell-language-server/pull/4491)) by @dschrempf +- Nix tooling (minor changes) + ([#4490](https://github.com/haskell/haskell-language-server/pull/4490)) by @dschrempf +- Bump haskell-actions/setup from 2.7.8 to 2.7.9 + ([#4483](https://github.com/haskell/haskell-language-server/pull/4483)) by @dependabot[bot] +- Bump haskell-actions/setup from 2.7.8 to 2.7.9 in /.github/actions/setup-build + ([#4482](https://github.com/haskell/haskell-language-server/pull/4482)) by @dependabot[bot] +- Rework bindist CI + ([#4481](https://github.com/haskell/haskell-language-server/pull/4481)) by @wz1000 +- Remove Unsafe Dynflags deadcode, they don't exist any more! + ([#4480](https://github.com/haskell/haskell-language-server/pull/4480)) by @fendor +- Implement fallback handler for `*/resolve` requests + ([#4478](https://github.com/haskell/haskell-language-server/pull/4478)) by @fendor +- Bump haskell-actions/setup from 2.7.7 to 2.7.8 + ([#4477](https://github.com/haskell/haskell-language-server/pull/4477)) by @dependabot[bot] +- Bump haskell-actions/setup from 2.7.7 to 2.7.8 in /.github/actions/setup-build + ([#4476](https://github.com/haskell/haskell-language-server/pull/4476)) by @dependabot[bot] +- Bump haskell-actions/setup from 2.7.6 to 2.7.7 + ([#4471](https://github.com/haskell/haskell-language-server/pull/4471)) by @dependabot[bot] +- Bump haskell-actions/setup from 2.7.6 to 2.7.7 in /.github/actions/setup-build + ([#4470](https://github.com/haskell/haskell-language-server/pull/4470)) by @dependabot[bot] +- Allow building with GHC 9.8.4 + ([#4459](https://github.com/haskell/haskell-language-server/pull/4459)) by @fendor +- Update python read-the-docs dependencies to latest + ([#4457](https://github.com/haskell/haskell-language-server/pull/4457)) by @fendor +- More tests and better docs for cabal-add + ([#4455](https://github.com/haskell/haskell-language-server/pull/4455)) by @VenInf +- ci(mergify): upgrade configuration to current format + ([#4454](https://github.com/haskell/haskell-language-server/pull/4454)) by @mergify[bot] +- Support record positional construction inlay hints + ([#4447](https://github.com/haskell/haskell-language-server/pull/4447)) by @jetjinser +- Build HLS with GHC 9.8.3 + ([#4444](https://github.com/haskell/haskell-language-server/pull/4444)) by @fendor +- Don't suggest -Wno-deferred-out-of-scope-variables + ([#4441](https://github.com/haskell/haskell-language-server/pull/4441)) by @jeukshi +- Enable hls-stan-plugin for GHC 9.10.1 + ([#4437](https://github.com/haskell/haskell-language-server/pull/4437)) by @fendor +- Enhance formatting of the `cabal-version` error message + ([#4436](https://github.com/haskell/haskell-language-server/pull/4436)) by @fendor +- Support structured diagnostics 2 + ([#4433](https://github.com/haskell/haskell-language-server/pull/4433)) by @noughtmare +- Cabal ignore if for completions (#4289) + ([#4427](https://github.com/haskell/haskell-language-server/pull/4427)) by @SamuelLess +- Fix cabal-add testdata for hls-cabal-plugin-tests + ([#4426](https://github.com/haskell/haskell-language-server/pull/4426)) by @fendor +- gracefully handle errors for unsupported cabal version + ([#4425](https://github.com/haskell/haskell-language-server/pull/4425)) by @fridewald +- Fix pre-commit in CI + ([#4424](https://github.com/haskell/haskell-language-server/pull/4424)) by @fendor +- link executables dynamically to speed up linking + ([#4423](https://github.com/haskell/haskell-language-server/pull/4423)) by @develop7 +- Cabal plugin: implement check for package.yaml in a stack project + ([#4422](https://github.com/haskell/haskell-language-server/pull/4422)) by @JMoss-dev +- Fix exporting operator pattern synonym + ([#4420](https://github.com/haskell/haskell-language-server/pull/4420)) by @pbrinkmeier +- Add docs about running tests for new contributors + ([#4418](https://github.com/haskell/haskell-language-server/pull/4418)) by @pbrinkmeier +- Bump cachix/install-nix-action from 29 to 30 + ([#4413](https://github.com/haskell/haskell-language-server/pull/4413)) by @dependabot[bot] +- Bump cachix/install-nix-action from V27 to 29 + ([#4411](https://github.com/haskell/haskell-language-server/pull/4411)) by @dependabot[bot] +- Avoid expectFail in the test suite + ([#4402](https://github.com/haskell/haskell-language-server/pull/4402)) by @sgillespie +- Fix typos in hls-cabal-fmt-plugin + ([#4399](https://github.com/haskell/haskell-language-server/pull/4399)) by @fendor +- Jump to instance definition and explain typeclass evidence + ([#4392](https://github.com/haskell/haskell-language-server/pull/4392)) by @fendor +- Update cabal-add dependency + ([#4389](https://github.com/haskell/haskell-language-server/pull/4389)) by @VenInf +- Improve error message for `--probe-tools` + ([#4387](https://github.com/haskell/haskell-language-server/pull/4387)) by @sgillespie +- Documentation for build-depends on hover + ([#4385](https://github.com/haskell/haskell-language-server/pull/4385)) by @VenInf +- Bump haskell-actions/setup from 2.7.3 to 2.7.6 + ([#4384](https://github.com/haskell/haskell-language-server/pull/4384)) by @dependabot[bot] +- Bump haskell-actions/setup from 2.7.5 to 2.7.6 in /.github/actions/setup-build + ([#4383](https://github.com/haskell/haskell-language-server/pull/4383)) by @dependabot[bot] +- Clear GHCup caches in CI to not run out of space in CI + ([#4382](https://github.com/haskell/haskell-language-server/pull/4382)) by @fendor +- Cabal go to module's definition + ([#4380](https://github.com/haskell/haskell-language-server/pull/4380)) by @VenInf +- Add Goto Definition for cabal common sections + ([#4375](https://github.com/haskell/haskell-language-server/pull/4375)) by @ChristophHochrainer +- cabal-add integration as a CodeAction + ([#4360](https://github.com/haskell/haskell-language-server/pull/4360)) by @VenInf +- Bump haskell-actions/setup from 2.7.3 to 2.7.5 in /.github/actions/setup-build + ([#4354](https://github.com/haskell/haskell-language-server/pull/4354)) by @dependabot[bot] +- Support Inlay hints for record wildcards + ([#4351](https://github.com/haskell/haskell-language-server/pull/4351)) by @jetjinser +- Remove componentInternalUnits + ([#4350](https://github.com/haskell/haskell-language-server/pull/4350)) by @soulomoon +- Fix core file location in `GetLinkable` + ([#4347](https://github.com/haskell/haskell-language-server/pull/4347)) by @soulomoon +- Release 2.9.0.1 + ([#4346](https://github.com/haskell/haskell-language-server/pull/4346)) by @wz1000 +- Using captureKicksDiagnostics to speed up multiple plugin tests + ([#4339](https://github.com/haskell/haskell-language-server/pull/4339)) by @komikat +- Get files from Shake VFS from within plugin handlers + ([#4328](https://github.com/haskell/haskell-language-server/pull/4328)) by @awjchen +- Cabal plugin outline view + ([#4323](https://github.com/haskell/haskell-language-server/pull/4323)) by @VenInf +- Add missing documentation for cabal formatters + ([#4322](https://github.com/haskell/haskell-language-server/pull/4322)) by @fendor +- Provide explicit import in inlay hints + ([#4235](https://github.com/haskell/haskell-language-server/pull/4235)) by @jetjinser +- Add codeactions for cabal field names + ([#3273](https://github.com/haskell/haskell-language-server/pull/3273)) by @dyniec + +## 2.9.0.1 + +- Bindists for GHC 9.6.6 + +## 2.9.0.0 + +- Bindists for GHC 9.10.1 by @wz1000, @jhrcek, @michaelpj +- More hls-graph reliability improvements by @soulomoon +- Refactoring of test suite runners by @soulomoon +- Fixes in multiple home units support by @wz1000 + +### Pull Requests + +- Fix quadratic memory usage in GetLocatedImports + ([#4318](https://github.com/haskell/haskell-language-server/pull/4318)) by @mpickering +- Bump stack configs + CI to 9.6.5 and 9.8.2 + ([#4316](https://github.com/haskell/haskell-language-server/pull/4316)) by @jhrcek +- Add support for Fourmolu 0.16 + ([#4314](https://github.com/haskell/haskell-language-server/pull/4314)) by @ brandonchinn178 +- Code action to remove redundant record field import (fixes #4220) + ([#4308](https://github.com/haskell/haskell-language-server/pull/4308)) by @battermann +- Use restricted monad for plugins (#4057) + ([#4304](https://github.com/haskell/haskell-language-server/pull/4304)) by @awjchen +- 4301 we need to implement utility to wait for all runnning keys in hls graph done + ([#4302](https://github.com/haskell/haskell-language-server/pull/4302)) by @soulomoon +- Call useWithStale instead of useWithStaleFast when calling ParseCabalFields + ([#4294](https://github.com/haskell/haskell-language-server/pull/4294)) by @VeryMilkyJoe +- test: add test documenting #806 + ([#4292](https://github.com/haskell/haskell-language-server/pull/4292)) by @develop7 +- ghcide: drop ghc-check and ghc-paths dependency + ([#4291](https://github.com/haskell/haskell-language-server/pull/4291)) by @wz1000 +- Limit number of valid hole fits to 10 + ([#4288](https://github.com/haskell/haskell-language-server/pull/4288)) by @akshaymankar +- Add common stanza to completion data + ([#4286](https://github.com/haskell/haskell-language-server/pull/4286)) by @VeryMilkyJoe +- FindImports: ThisPkg means some home unit, not "this" unit + ([#4284](https://github.com/haskell/haskell-language-server/pull/4284)) by @wz1000 +- Remove redudant absolutization in session loader + ([#4280](https://github.com/haskell/haskell-language-server/pull/4280)) by @soulomoon +- Bump to new lsp versions + ([#4279](https://github.com/haskell/haskell-language-server/pull/4279)) by @michaelpj +- Put more test code into pre-commit + ([#4275](https://github.com/haskell/haskell-language-server/pull/4275)) by @soulomoon +- Delete library ghcide test utils + ([#4274](https://github.com/haskell/haskell-language-server/pull/4274)) by @soulomoon +- Delete testUtil from ghcide-tests + ([#4272](https://github.com/haskell/haskell-language-server/pull/4272)) by @soulomoon +- CI change, only run bench on performance label + ([#4271](https://github.com/haskell/haskell-language-server/pull/4271)) by @soulomoon +- Migrate WatchedFileTests + ([#4269](https://github.com/haskell/haskell-language-server/pull/4269)) by @soulomoon +- Migrate UnitTests + ([#4268](https://github.com/haskell/haskell-language-server/pull/4268)) by @soulomoon +- Migrate SafeTests + ([#4267](https://github.com/haskell/haskell-language-server/pull/4267)) by @soulomoon +- Migrate SymlinkTests + ([#4266](https://github.com/haskell/haskell-language-server/pull/4266)) by @soulomoon +- Remove unused and outdated CHANGELOG files + ([#4264](https://github.com/haskell/haskell-language-server/pull/4264)) by @fendor +- Enable cabal flaky test + ([#4263](https://github.com/haskell/haskell-language-server/pull/4263)) by @soulomoon +- Migrate RootUriTests + ([#4261](https://github.com/haskell/haskell-language-server/pull/4261)) by @soulomoon +- Migrate PreprocessorTests + ([#4260](https://github.com/haskell/haskell-language-server/pull/4260)) by @soulomoon +- Migrate PluginSimpleTests + ([#4259](https://github.com/haskell/haskell-language-server/pull/4259)) by @soulomoon +- Migrate ClientSettingsTests + ([#4258](https://github.com/haskell/haskell-language-server/pull/4258)) by @soulomoon +- Unify critical session running in hls + ([#4256](https://github.com/haskell/haskell-language-server/pull/4256)) by @soulomoon +- Bump cachix/cachix-action from 14 to 15 + ([#4255](https://github.com/haskell/haskell-language-server/pull/4255)) by @dependabot[bot] +- Bump haskell-actions/setup from 2.7.2 to 2.7.3 + ([#4254](https://github.com/haskell/haskell-language-server/pull/4254)) by @dependabot[bot] +- Bump haskell-actions/setup from 2.7.2 to 2.7.3 in /.github/actions/setup-build + ([#4253](https://github.com/haskell/haskell-language-server/pull/4253)) by @dependabot[bot] +- Shorter file names completion + ([#4252](https://github.com/haskell/haskell-language-server/pull/4252)) by @VenInf +- Fix progress start delay + ([#4249](https://github.com/haskell/haskell-language-server/pull/4249)) by @michaelpj +- Bump cachix/install-nix-action from 26 to 27 + ([#4245](https://github.com/haskell/haskell-language-server/pull/4245)) by @dependabot[bot] +- Bump haskell-actions/setup from 2.7.1 to 2.7.2 + ([#4244](https://github.com/haskell/haskell-language-server/pull/4244)) by @dependabot[bot] +- Bump haskell-actions/setup from 2.7.1 to 2.7.2 in /.github/actions/setup-build + ([#4243](https://github.com/haskell/haskell-language-server/pull/4243)) by @dependabot[bot] +- Enable test for #717 + ([#4241](https://github.com/haskell/haskell-language-server/pull/4241)) by @soulomoon +- Remove Pepe from CODEOWNERS + ([#4239](https://github.com/haskell/haskell-language-server/pull/4239)) by @michaelpj +- Fix resultBuilt(dirty mechanism) in hls-graph + ([#4238](https://github.com/haskell/haskell-language-server/pull/4238)) by @soulomoon +- Support for 9.10 + ([#4233](https://github.com/haskell/haskell-language-server/pull/4233)) by @wz1000 +- Refactor hls-test-util and reduce getCurrentDirectory after initilization + ([#4231](https://github.com/haskell/haskell-language-server/pull/4231)) by @soulomoon +- [Migrate BootTests] part of #4173 Migrate ghcide tests to hls test utils + ([#4227](https://github.com/haskell/haskell-language-server/pull/4227)) by @soulomoon +- Actually enable pedantic flag in ci flags job + ([#4224](https://github.com/haskell/haskell-language-server/pull/4224)) by @jhrcek +- Cleanup cabal files, ghc compat code, fix ghc warnings + ([#4222](https://github.com/haskell/haskell-language-server/pull/4222)) by @jhrcek +- Another attempt at using the lsp API for some progress reporting + ([#4218](https://github.com/haskell/haskell-language-server/pull/4218)) by @michaelpj +- [Migrate diagnosticTests] part of #4173 Migrate ghcide tests to hls test utils + ([#4207](https://github.com/haskell/haskell-language-server/pull/4207)) by @soulomoon +- Prepare release 2.8.0.0 + ([#4191](https://github.com/haskell/haskell-language-server/pull/4191)) by @wz1000 +- Stabilize the build system by correctly house keeping the dirtykeys and rule values [flaky test #4185 #4093] + ([#4190](https://github.com/haskell/haskell-language-server/pull/4190)) by @soulomoon +- hls-cabal-plugin: refactor context search to use `readFields` + ([#4186](https://github.com/haskell/haskell-language-server/pull/4186)) by @fendor +- 3944 extend the properties api to better support nested configuration + ([#3952](https://github.com/haskell/haskell-language-server/pull/3952)) by @soulomoon + ## 2.8.0.0 - Bindists for GHC 9.6.5 diff --git a/RELEASING.md b/RELEASING.md index 73f887b9fc..a48b32cb93 100644 --- a/RELEASING.md +++ b/RELEASING.md @@ -3,10 +3,9 @@ ## Release checklist - [ ] check ghcup supports new GHC releases if any -- [ ] set the supported GHCs in workflow file `.github/workflows/release.yaml` - - There is currently a list of GHC versions for each major platform. Search for `ghc: [` to find all lists. - - Look for `TODO:` to find locations that require extra care for GHC versions. - [ ] check all plugins still work if release includes code changes +- [ ] set the supported GHCs in workflow file `.github/generate-ci/gen_ci.hs` +- [ ] regenerate the CI via `./.github/generate-ci/generate-jobs` - [ ] bump package versions in all `*.cabal` files (same version as hls) - HLS uses lockstep versioning. The core packages and all plugins use the same version number, and only support exactly this version. - Exceptions: @@ -21,6 +20,7 @@ - Generate a ChangeLog via `./GenChangelogs.hs ` - `` is the git tag you want to generate the ChangeLog from. - `` is a github access key: https://github.com/settings/tokens +- [ ] update https://haskell-language-server.readthedocs.io/en/latest/support/ghc-version-support.html#current-ghc-version-support-status - [ ] create release branch as `wip/` - `git switch -c wip/` - [ ] create release tag as `` @@ -50,7 +50,6 @@ - [ ] publish release on github - [ ] upload hackage packages - requires credentials -- [ ] update https://haskell-language-server.readthedocs.io/en/latest/support/ghc-version-support.html#current-ghc-version-support-status - [ ] Supported tools table needs to be updated: - https://www.haskell.org/ghcup/install/#supported-platforms - https://github.com/haskell/ghcup-hs/blob/master/docs/install.md#supported-platforms diff --git a/cabal.project b/cabal.project index 18ce3e76f5..f79f33e7db 100644 --- a/cabal.project +++ b/cabal.project @@ -7,11 +7,14 @@ packages: ./hls-plugin-api ./hls-test-utils -index-state: 2024-06-13T17:12:34Z + +index-state: 2025-05-06T13:26:29Z tests: True test-show-details: direct +benchmarks: True + write-ghc-environment-files: never -- Many of our tests only work single-threaded, and the only way to @@ -41,17 +44,16 @@ constraints: bitvec -simd, -if impl(ghc >= 9.9) +-- Some of the formatters need the latest Cabal-syntax version, +-- but 'cabal-install-parsers-0.6.2' only has Cabal-syntax (>=3.12.0.0 && <3.13). +-- So, we relax the upper bounds here. +-- fourmolu-0.18.0 and ormolu-0.8 depend on Cabal-syntax == 3.14.*, while +-- cabal-add depends on cabal-install-parsers. +allow-newer: + cabal-install-parsers:Cabal-syntax, + +if impl(ghc >= 9.11) benchmarks: False - constraints: - lens >= 5.3.2, - -- See - -- https://github.com/haskell/stylish-haskell/issues/479 - -- https://github.com/ennocramer/floskell/pull/82 - -- https://github.com/ndmitchell/hlint/pull/1594 - haskell-language-server -stylishHaskell -hlint -retrie -splice -floskell, allow-newer: - haddock-library:base, - haddock-library:containers, -else - benchmarks: True + cabal-install-parsers:base, + cabal-install-parsers:time, diff --git a/docs/Makefile b/docs/Makefile index d4bb2cbb9e..bb113155fa 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -1,5 +1,4 @@ # Minimal makefile for Sphinx documentation -# # You can set these variables from the command line, and also # from the environment for the first two. @@ -8,13 +7,7 @@ SPHINXBUILD ?= sphinx-build SOURCEDIR = . BUILDDIR = _build -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) +.PHONY: Makefile -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) +html: Makefile + @$(SPHINXBUILD) -n -W "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/configuration.md b/docs/configuration.md index 4edc2c7936..9da816c09e 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -41,9 +41,11 @@ This option obviously would not make sense for language servers for other langua Here is a list of the additional settings currently supported by `haskell-language-server`, along with their setting key (you may not need to know this) and default: - Formatting provider (`haskell.formattingProvider`, default `ormolu`): what formatter to use; one of `floskell`, `ormolu`, `fourmolu`, or `stylish-haskell`. +- Cabal formatting provider (`haskell.cabalFormattingProvider`, default `cabal-gild`): what formatter to use for cabal files; one of `cabal-gild` or `cabal-fmt`. - Max completions (`haskell.maxCompletions`, default 40): maximum number of completions sent to the LSP client. - Check project (`haskell.checkProject`, default true): whether to typecheck the entire project on initial load. As it is activated by default could drive to bad performance in large projects. - Check parents (`haskell.checkParents`, default `CheckOnSave`): when to typecheck reverse dependencies of a file; one of `NeverCheck`, `CheckOnSave` (means dependent/parent modules will only be checked when you save), or `AlwaysCheck` (means re-typechecking them on every change). +- Session loading preference (`haskell.sessionLoading`, default `singleComponent`): how to load sessions; one of `singleComponent` (means always loading only a single component when a new component is discovered) or `multipleComponents` (means always preferring loading multiple components in the cradle at once). `multipleComponents` might not be always possible, if the tool doesn't support multiple components loading. The cradle can decide how to handle these situations, and whether to honour the preference at all. #### Generic plugin configuration @@ -61,7 +63,7 @@ Plugins have a generic config to control their behaviour. The schema of such con - `haskell.plugin.eval.config.diff`, default true: When reloading haddock test results in changes, mark it with WAS/NOW. - `haskell.plugin.eval.config.exception`, default false: When the command results in an exception, mark it with `*** Exception:`. - `rename`: - - `haskell.plugin.rename.config.diff`, default false: Enables renaming across modules (experimental) + - `haskell.plugin.rename.config.crossModule`, default false: Enables renaming across modules (experimental) - `ghcide-completions`: - `haskell.plugin.ghcide-completions.config.snippetsOn`, default true: Inserts snippets when using code completions. - `haskell.plugin.ghcide-completions.config.autoExtendOn`, default true: Extends the import list automatically when completing a out-of-scope identifier. diff --git a/docs/contributing/contributing.md b/docs/contributing/contributing.md index c38ce0421d..134a03b89c 100644 --- a/docs/contributing/contributing.md +++ b/docs/contributing/contributing.md @@ -2,9 +2,9 @@ The Haskell tooling dream is near, we need your help! -## How to contact the haskell ide team +## How to contact the Haskell Language Server (HLS) team -- Join the [haskell-language-server channel](https://matrix.to/#/#haskell-language-server:matrix.org) in [matrix](https://matrix.org/) (primary communication channel). +- Join the [haskell-language-server channel](https://matrix.to/#/#haskell-language-server:matrix.org) on [matrix](https://matrix.org/) (primary communication channel). - Join [our IRC channel](https://web.libera.chat/?channels=#haskell-language-server) at `#haskell-language-server` on [`libera`](https://libera.chat/) (secondary communication channel - all messages in this IRC channel are automatically bridged to the Matrix channel). - Visit [the project GitHub repo](https://github.com/haskell/haskell-language-server) to view the source code, or open issues or pull requests. @@ -17,7 +17,7 @@ $ git clone https://github.com/haskell/haskell-language-server The project can then be built with both `cabal build` and `stack build`. -### Using Cabal +### Building with Cabal ```shell # If you have not run `cabal update` in a while @@ -26,15 +26,15 @@ $ cabal update $ cabal build ``` -### Using Stack +### Building with Stack ```shell $ stack build ``` -### Using Nix +### Building with Nix -The instructions below show how to set up a Cachix binary cache and open a nix shell for local development. +The instructions below show how to set up a Cachix binary cache and open a Nix shell for local development. ```shell $ cachix use haskell-language-server @@ -45,19 +45,19 @@ $ cabal build #### Flakes support -If you are using nix 2.4 style command (enabled by `experimental-features = nix-command`), +If you are using Nix 2.4 style commands (enabled by `experimental-features = nix-command`), you can use `nix develop` instead of `nix-shell` to enter the development shell. To enter the shell with specific GHC versions: -* `nix develop` - default GHC version -* `nix develop .#shell-ghc90` - GHC 9.0.1 (substitute GHC version as appropriate) +* `nix develop` - default GHC version, +* `nix develop .#shell-ghc90` - GHC 9.0.1 (substitute GHC version as appropriate). -If you are looking for a Nix expression to create haskell-language-server binaries, see https://github.com/haskell/haskell-language-server/issues/122 +If you are looking for a Nix expression to create `haskell-language-server` binaries, see https://github.com/haskell/haskell-language-server/issues/122 ## Testing The tests make use of the [Tasty](https://github.com/feuerbach/tasty) test framework. -There are two test suites in the main haskell-language-server package, functional tests, and wrapper tests. +There are two test suites in the main `haskell-language-server` package, functional tests, and wrapper tests. Some of the wrapper tests expect `stack` to be present on the system, or else they fail. Other project packages, like the core library or plugins, can have their own test suite. @@ -81,10 +81,18 @@ Running just the wrapper tests $ cabal test wrapper-test ``` +Running just the tests for a specific plugin + +```bash +$ cabal test hls--plugin-tests +# E.g. +$ cabal test hls-refactor-plugin-tests +``` + Running a subset of tests Tasty supports providing -[Patterns](https://github.com/feuerbach/tasty#patterns) as command +[patterns](https://github.com/feuerbach/tasty#patterns) as command line arguments, to select the specific tests to run. ```bash @@ -92,11 +100,10 @@ $ cabal test func-test --test-option "-p hlint" ``` The above recompiles everything every time you use a different test option though. - -An alternative, which only recompiles when tests (or dependencies) change: +An alternative, which only recompiles when tests (or dependencies) change is to pass the `TASTY_PATTERN` environment variable: ```bash -$ cabal run haskell-language-server:func-test -- -p "hlint enables" +$ TASTY_PATTERN='hlint' cabal test func-test ``` ## Using HLS on HLS code @@ -119,7 +126,7 @@ If you want to test HLS while hacking on it (you can even test it on HLS codebas 3. (Every time you change the HLS code) Rebuild HLS 4. (Every time you change the HLS code) Restart the LSP workspace -### Find the path to the hacked HLS you build +### Find the path to your HLS build Note that unless you change the GHC version or the HLS version between builds, the path should remain the same, this is why you need to set it only once. #### Using Cabal @@ -138,9 +145,9 @@ $ echo $(pwd)/$(stack path --dist-dir)/build/haskell-language-server/haskell-lan /haskell-language-server ``` -### Configure your editor to use it +### Configuring your editor to use your HLS build -#### VS Code +#### Configuring VS Code When using VS Code you can set up each project to use a specific HLS executable: - If it doesn't already exist in your project directory, create a directory called `.vscode`. @@ -151,7 +158,7 @@ When using VS Code you can set up each project to use a specific HLS executable: } ``` -#### Emacs +#### Configuring Emacs There are several ways to configure the HLS server path: - `M-x customize-grouplsp-haskellLsp Haskell Server Path` - Evaluate `(setq lsp-haskell-server-path "/path/to/your/hacked/haskell-language-server")` @@ -173,63 +180,56 @@ There are several ways to configure the HLS server path: The project includes a [`.editorconfig`](https://editorconfig.org) [file](https://github.com/haskell/haskell-language-server/blob/master/.editorconfig) with the editor basic settings used by the project. However, most editors will need some action to honour those settings automatically. -For example vscode needs to have installed a specific [extension](https://marketplace.visualstudio.com/items?itemName=EditorConfig.EditorConfig). +For example VS Code needs to have installed a specific [extension](https://marketplace.visualstudio.com/items?itemName=EditorConfig.EditorConfig). Please, try to follow those basic settings to keep the codebase as uniform as possible. ### Formatter pre-commit hook -We are using [pre-commit](https://pre-commit.com/) to configure git pre-commit hook for formatting. Although it is possible to run formatting manually, we recommend you to use it to set pre-commit hook as our CI checks pre-commit hook is applied or not. +We are using [pre-commit](https://pre-commit.com/) to configure the git pre-commit hook for formatting. Although it is possible to format code manually, we recommend you to use the pre-commit hook as our CI checks if the hook was applied or not. -If you are using Nix or Gitpod, pre-commit hook is automatically installed. Otherwise, follow instructions on -[https://pre-commit.com/](https://pre-commit.com/) to install the `pre-commit` tool, then run the following command: +If you are using Nix or Gitpod, the pre-commit hook is automatically installed. Otherwise, follow the instructions on +[https://pre-commit.com/](https://pre-commit.com/) to install the `pre-commit` tool. Then run the following command: ```sh pre-commit install ``` -#### Why some components are excluded from automatic formatting? +#### Why are some components excluded from automatic formatting? -- `test/testdata` and `test/data` are there as we want to test formatting plugins. -- `hie-compat` is there as we want to keep its code as close to GHC as possible. +- `test/testdata` and `test/data` are excluded because we want to test formatting plugins. +- `hie-compat` is excluded because we want to keep its code as close to GHC as possible. -## Introduction tutorial +## Plugin tutorial -See the [tutorial](./plugin-tutorial.md) on writing a plugin in HLS. +See the [tutorial on writing a plugin in HLS](./plugin-tutorial.md). ## Measuring, benchmarking and tracing ### Benchmarks -If you are touching performance sensitive code, take the time to run a differential -benchmark between HEAD and master using the benchHist script. This assumes that -"master" points to the upstream master. - -Run the benchmarks with `cabal bench`. - -It should take around 25 minutes and the results will be stored in the `bench-results` folder. To interpret the results, see the comments in the `bench/Main.hs` module. +If you are touching performance sensitive code, take the time to run a differential benchmark between `HEAD` and `origin/master` (see [bench/README](https://github.com/haskell/haskell-language-server/blob/master/bench/README.md)). -More details in [bench/README](https://github.com/haskell/haskell-language-server/blob/master/bench/README.md) +Run the benchmarks with `cabal bench`. The runtime is about 25 minutes and the results will be stored in the `bench-results` folder. To interpret the results, see the comments in the [bench/Main.hs](https://github.com/haskell/haskell-language-server/blob/master/bench/Main.hs) module. ### Tracing -HLS records opentelemetry [eventlog traces](https://downloads.haskell.org/~ghc/latest/docs/html/users_guide/runtime_control.html#rts-eventlog) via [opentelemetry](https://hackage.haskell.org/package/opentelemetry). To generate the traces, build with `-eventlog` and run with `+RTS -l`. To visualize the traces, install [Tracy](https://github.com/wolfpld/tracy) and use [eventlog-to-tracy](https://hackage.haskell.org/package/opentelemetry-extra) to open the generated eventlog. +HLS records [eventlog traces](https://downloads.haskell.org/~ghc/latest/docs/html/users_guide/runtime_control.html#rts-eventlog) via [opentelemetry](https://hackage.haskell.org/package/opentelemetry). To generate the traces, build with `-eventlog` and run with `+RTS -l`. To visualize the traces, install [Tracy](https://github.com/wolfpld/tracy) and use [eventlog-to-tracy](https://hackage.haskell.org/package/opentelemetry-extra) to open the generated eventlog. ## Adding support for a new editor Adding support for new editors is fairly easy if the editor already has good support for generic LSP-based extensions. -In that case, there will likely be an editor-specific support system for this (like `lsp-mode` for Emacs). -This will typically provide instructions for how to support new languages. +In that case, there will likely be an editor-specific support system (e.g., `lsp-mode` for Emacs). +The support system will typically provide instructions for how to add support for new languages. -In some cases you may need to write a small bit of additional client support, or expose a way for the user to set the server's [configuration options](../configuration.md#configuring-haskell-language-server) and -for them to configure how the server is started. +In some cases you may need to write a small bit of additional client support, or expose a way for the user to set the server's [configuration options](../configuration.md#configuring-haskell-language-server) and for them to configure how the server is started. -## Building the docs +## Building the documentation -The docs are built with [Sphinx](https://www.sphinx-doc.org/en/master/) and [ReadTheDocs](https://docs.readthedocs.io/en/stable/index.html), the documentation for both is helpful. +The documentation is built with [Sphinx](https://www.sphinx-doc.org/en/master/) and [ReadTheDocs](https://docs.readthedocs.io/en/stable/index.html), the documentation of both is helpful. -To build the docs you need to install some Python prerequisites. You can either `pip install -r docs/requirements.txt`, or simply enter a `nix-shell`. +You need to install some Python prerequisites. You can either `pip install -r docs/requirements.txt`, or simply enter a `nix-shell`. -Then to build and preview the docs: +Then to build and preview the documentation: ``` cd docs @@ -237,9 +237,9 @@ make html firefox _build/html/index.html ``` -Alternatively, you can build the entire thing as a Nix derivation from the flake with `nix build .#docs`. +Alternatively, you can build the documentation as a Nix derivation from the Flake with `nix build .#docs`. -The docs are also built and previewed on every PR, so you can check them from the PR status. +The documentation is also built and previewed on every PR, so you can check them from the PR status. ## Working on code actions @@ -248,8 +248,8 @@ To make HLS easier to maintain, please follow these design guidelines when addin 1. Prefer `ghc-exactprint` to manual text parsing. 2. Prefer `ghc-exactprint` to manual code generation. 3. Code generating actions should not try to format the generated code. Assume that the user is also leveraging HLS for automated code formatting. -4. Put new code actions in their own plugin unless they are very closely aligned with an existing ghcide code action. +4. Put new code actions in their own plugin unless they are very closely aligned with an existing code action. ## Sponsorship -If you want to contribute financially you can do so via [open-collective](https://opencollective.com/haskell-language-server). In the past the funding has been used to sponsor [summer student projects](https://mpickering.github.io/ide/posts/2021-07-22-summer-of-hls.html). +If you want to contribute financially, you can do so via [open-collective](https://opencollective.com/haskell-language-server). In the past, the funding was used to sponsor [summer student projects](https://mpickering.github.io/ide/posts/2021-07-22-summer-of-hls.html). diff --git a/docs/contributing/plugin-tutorial.md b/docs/contributing/plugin-tutorial.md index 63d0de1a58..c952ef9eb2 100644 --- a/docs/contributing/plugin-tutorial.md +++ b/docs/contributing/plugin-tutorial.md @@ -1,23 +1,24 @@ # Let’s write a Haskell Language Server plugin +Originally written by Pepe Iborra, maintained by the Haskell community. -Haskell Language Server is an LSP server for the Haskell programming language. It builds on several previous efforts -to create a Haskell IDE, you can find many more details on the history and architecture in the [IDE 2020](https://mpickering.github.io/ide/index.html) community page. +Haskell Language Server (HLS) is an LSP server for the Haskell programming language. It builds on several previous efforts +to create a Haskell IDE. You can find many more details on the history and architecture in the [IDE 2020](https://mpickering.github.io/ide/index.html) community page. In this article we are going to cover the creation of an HLS plugin from scratch: a code lens to display explicit import lists. -Along the way we will learn about HLS, its plugin model, and the relationship with ghcide and LSP. +Along the way we will learn about HLS, its plugin model, and the relationship with `ghcide` and LSP. ## Introduction Writing plugins for HLS is a joy. Personally, I enjoy the ability to tap into the gigantic bag of goodies that is GHC, as well as the IDE integration thanks to LSP. -In the last couple of months I have written various HLS (and ghcide) plugins for things like: +In the last couple of months I have written various HLS (and `ghcide`) plugins for things like: 1. Suggest imports for variables not in scope, 2. Remove redundant imports, -2. Evaluate code in comments (a la doctest), -3. Integrate the retrie refactoring library. +2. Evaluate code in comments (à la [doctest](https://docs.python.org/3/library/doctest.html)), +3. Integrate the [retrie](https://github.com/facebookincubator/retrie) refactoring library. -These plugins are small but meaningful steps towards a more polished IDE experience, and in writing them I didn't have to worry about performance, UI, distribution, or even think for the most part, since it's always another tool (usually GHC) doing all the heavy lifting. The plugins also make these tools much more accessible to all the users of HLS. +These plugins are small but meaningful steps towards a more polished IDE experience, and in writing them I didn't have to worry about performance, UI, distribution, or even think for the most part, since it's always another tool (usually GHC) doing all the heavy lifting. The plugins also make these tools much more accessible to all users of HLS. ## The task @@ -27,14 +28,14 @@ Here is a visual statement of what we want to accomplish: And here is the gist of the algorithm: -1. Request the type checking artefacts from the ghcide subsystem -2. Extract the actual import lists from the type checked AST, +1. Request the type checking artifacts from the `ghcide` subsystem +2. Extract the actual import lists from the type-checked AST, 3. Ask GHC to produce the minimal import lists for this AST, -4. For every import statement without a explicit import list, find out the minimal import list, and produce a code lens to display it together with a command to graft it on. +4. For every import statement without an explicit import list, find out the minimal import list, and produce a code lens to display it together with a command to graft it on. ## Setup -To get started, let’s fetch the HLS repo and build it. You need at least GHC 9.0 for this: +To get started, let’s fetch the HLS repository and build it. You need at least GHC 9.0 for this: ``` git clone --recursive http://github.com/haskell/haskell-language-server hls @@ -43,7 +44,7 @@ cabal update cabal build ``` -If you run into any issues trying to build the binaries, the #haskell-language-server IRC chat room in +If you run into any issues trying to build the binaries, the `#haskell-language-server` IRC chat room in [Libera Chat](https://libera.chat/) is always a good place to ask for help. Once cabal is done take a note of the location of the `haskell-language-server` binary and point your LSP client to it. In VSCode this is done by editing the "Haskell Server Executable Path" setting. This way you can simply test your changes by reloading your editor after rebuilding the binary. @@ -67,19 +68,18 @@ data PluginDescriptor = , pluginRenameProvider :: !(Maybe RenameProvider) } ``` -A plugin has a unique id, a set of rules, a set of command handlers, and a set of "providers": +A plugin has a unique ID, a set of rules, a set of command handlers, and a set of "providers": -* Rules add new targets to the Shake build graph defined in ghcide. 99% of plugins need not define any new rules. +* Rules add new targets to the Shake build graph defined in `ghcide`. 99% of plugins need not define any new rules. * Commands are an LSP abstraction for actions initiated by the user which are handled in the server. These actions can be long running and involve multiple modules. Many plugins define command handlers. * Providers are a query-like abstraction where the LSP client asks the server for information. These queries must be fulfilled as quickly as possible. The HLS codebase includes several plugins under the namespace `Ide.Plugin.*`, the most relevant are: -- The ghcide plugin, which embeds ghcide as a plugin (ghcide is also the engine under HLS). -- The example and example2 plugins, offering a dubious welcome to new contributors -- The ormolu, fourmolu, floskell and stylish-haskell plugins, a testament to the code formatting wars of our community. -- The eval plugin, a code lens provider to evaluate code in comments -- The retrie plugin, a code actions provider to execute retrie commands +- The `ghcide` plugin, which embeds `ghcide` as a plugin (`ghcide` is also the engine under HLS), +- The `ormolu`, `fourmolu`, `floskell` and `stylish-haskell` plugins, a testament to the code formatting wars of our community, +- The `eval` plugin, a code lens provider to evaluate code in comments, +- The `retrie` plugin, a code actions provider to execute retrie commands. I would recommend looking at the existing plugins for inspiration and reference. @@ -134,11 +134,11 @@ Providers are functions that receive some inputs and produce an IO computation t All providers receive an `LSP.LspFuncs` value, which is a record of functions to perform LSP actions. Most providers can safely ignore this argument, since the LSP interaction is automatically managed by HLS. Some of its capabilities are: -- Querying the LSP client capabilities -- Manual progress reporting and cancellation, for plugins that provide long running commands (like the Retrie plugin), -- Custom user interactions via [message dialogs](https://microsoft.github.io/language-server-protocol/specification#window_showMessage). For instance, the Retrie plugin uses this to report skipped modules. +- Querying the LSP client capabilities, +- Manual progress reporting and cancellation, for plugins that provide long running commands (like the `retrie` plugin), +- Custom user interactions via [message dialogs](https://microsoft.github.io/language-server-protocol/specification#window_showMessage). For instance, the `retrie` plugin uses this to report skipped modules. -The second argument plugins receive is `IdeState`, which encapsulates all the ghcide state including the build graph. This allows to request ghcide rule results, which leverages Shake to parallelize and reuse previous results as appropriate. Rule types are instances of the `RuleResult` type family, and +The second argument, which plugins receive, is `IdeState`. `IdeState` encapsulates all the `ghcide` state including the build graph. This allows to request `ghcide` rule results, which leverages Shake to parallelize and reuse previous results as appropriate. Rule types are instances of the `RuleResult` type family, and most of them are defined in `Development.IDE.Core.RuleTypes`. Some relevant rule types are: ```haskell -- | The parse tree for the file using GetFileContents @@ -157,7 +157,7 @@ type instance RuleResult GhcSessionDeps = HscEnvEq type instance RuleResult GetModSummary = ModSummary ``` -The `use` family of combinators allow to request rule results. For example, the following code is used in the Eval plugin to request a GHC session and a module summary (for the imports) in order to set up an interactive evaluation environment +The `use` family of combinators allows to request rule results. For example, the following code is used in the `eval` plugin to request a GHC session and a module summary (for the imports) in order to set up an interactive evaluation environment ```haskell let nfp = toNormalizedFilePath' fp session <- runAction "runEvalCmd.ghcSession" state $ use_ GhcSessionDeps nfp @@ -167,7 +167,7 @@ The `use` family of combinators allow to request rule results. For example, the There are three flavours of `use` combinators: 1. `use*` combinators block and propagate errors, -2. `useWithStale*` combinators block and switch to stale data in case of error, +2. `useWithStale*` combinators block and switch to stale data in case of an error, 3. `useWithStaleFast*` combinators return immediately with stale data if any, or block otherwise. ## LSP abstractions @@ -199,7 +199,7 @@ To keep things simple our plugin won't make use of the unresolved facility, embe ## The explicit imports plugin -To provide code lenses, our plugin must define a code lens provider as well as a Command handler. +To provide code lenses, our plugin must define a code lens provider as well as a command handler. The code at `Ide.Plugin.Example` shows how the convenience `defaultPluginDescriptor` function is used to bootstrap the plugin and how to add the desired providers: @@ -221,7 +221,7 @@ Our plugin provider has two components that need to be fleshed out. Let's start importLensCommand :: PluginCommand ``` -`PluginCommand` is a type synonym defined in `LSP.Types` as: +`PluginCommand` is a data type defined in `LSP.Types` as: ```haskell data PluginCommand = forall a. (FromJSON a) => @@ -241,7 +241,7 @@ type CommandFunction a = ``` `CommandFunction` takes in the familiar `LspFuncs` and `IdeState` arguments, together with a JSON encoded argument. -I recommend checking the LSP spec in order to understand how commands work, but briefly the LSP server (us) initially sends a command descriptor to the client, in this case as part of a code lens. When the client decides to execute the command on behalf of a user action (in this case a click on the code lens), the client sends this descriptor back to the LSP server which then proceeds to handle and execute the command. The latter part is implemented by the `commandFunc` field of our `PluginCommand` value. +I recommend checking the LSP specifications in order to understand how commands work, but briefly the LSP server (us) initially sends a command descriptor to the client, in this case as part of a code lens. When the client decides to execute the command on behalf of a user action (in this case a click on the code lens), the client sends this descriptor back to the LSP server which then proceeds to handle and execute the command. The latter part is implemented by the `commandFunc` field of our `PluginCommand` value. For our command, we are going to have a very simple handler that receives a diff (`WorkspaceEdit`) and returns it to the client. The diff will be generated by our code lens provider and sent as part of the code lens to the LSP client, who will send it back to our command handler when the user activates @@ -270,10 +270,10 @@ runImportCommand _lspFuncs _state (ImportCommandParams edit) = do The code lens provider implements all the steps of the algorithm described earlier: -> 1. Request the type checking artefacts from the ghcide subsystem -> 2. Extract the actual import lists from the type checked AST, +> 1. Request the type checking artefacts from the `ghcide` subsystem +> 2. Extract the actual import lists from the type-checked AST, > 3. Ask GHC to produce the minimal import lists for this AST, -> 4. For every import statement without a explicit import list, find out what's the minimal import list, and produce a code lens to display it together with a diff to graft the import list in. +> 4. For every import statement without an explicit import list, find out the minimal import list, and produce a code lens to display it together with a command to graft it on. The provider takes the usual `LspFuncs` and `IdeState` argument, as well as a `CodeLensParams` value containing the URI for a file, and returns an IO action producing either an error or a list of code lenses for that file. @@ -282,7 +282,7 @@ for a file, and returns an IO action producing either an error or a list of code provider :: CodeLensProvider provider _lspFuncs -- LSP functions, not used state -- ghcide state, used to retrieve typechecking artifacts - pId -- plugin Id + pId -- Plugin ID CodeLensParams{_textDocument = TextDocumentIdentifier{_uri}} -- VSCode uses URIs instead of file paths -- haskell-lsp provides conversion functions @@ -292,7 +292,7 @@ provider _lspFuncs -- LSP functions, not used tmr <- runAction "importLens" state $ use TypeCheck nfp -- We also need a GHC session with all the dependencies hsc <- runAction "importLens" state $ use GhcSessionDeps nfp - -- Use the GHC api to extract the "minimal" imports + -- Use the GHC API to extract the "minimal" imports (imports, mbMinImports) <- extractMinimalImports hsc tmr case mbMinImports of @@ -309,10 +309,10 @@ provider _lspFuncs -- LSP functions, not used = return $ Right (List []) ``` -Note how simple it is to retrieve the type checking artifacts for the module as well as a fully setup Ghc session via the Ghcide rules. +Note how simple it is to retrieve the type checking artifacts for the module as well as a fully setup GHC session via the `ghcide` rules. The function `extractMinimalImports` extracts the import statements from the AST and generates the minimal import lists, implementing steps 2 and 3 of the algorithm. -The details of the GHC api are not relevant to this tutorial, but the code is terse and easy to read: +The details of the GHC API are not relevant to this tutorial, but the code is terse and easy to read: ```haskell extractMinimalImports @@ -320,7 +320,7 @@ extractMinimalImports -> Maybe TcModuleResult -> IO ([LImportDecl GhcRn], Maybe [LImportDecl GhcRn]) extractMinimalImports (Just hsc)) (Just (tmrModule -> TypecheckedModule{..})) = do - -- extract the original imports and the typechecking environment + -- Extract the original imports and the typechecking environment let (tcEnv,_) = tm_internals_ Just (_, imports, _, _) = tm_renamed_source ParsedModule{ pm_parsed_source = L loc _} = tm_parsed_module @@ -337,7 +337,7 @@ extractMinimalImports (Just hsc)) (Just (tmrModule -> TypecheckedModule{..})) = extractMinimalImports _ _ = return ([], Nothing) ``` -The function `generateLens` implements the last piece of the algorithm, step 4, producing a code lens for an import statement that lacks an import list. Note how the code lens includes an `ImportCommandParams` value +The function `generateLens` implements step 4 of the algorithm, producing a code lens for an import statement that lacks an import list. Note how the code lens includes an `ImportCommandParams` value that contains a workspace edit that rewrites the import statement, as expected by our command provider. ```haskell @@ -355,24 +355,24 @@ generateLens pId uri minImports (L src imp) | RealSrcSpan l <- src , Just explicit <- Map.lookup (srcSpanStart src) minImports , L _ mn <- ideclName imp - -- (almost) no one wants to see an explicit import list for Prelude + -- (Almost) no one wants to see an explicit import list for Prelude , mn /= moduleName pRELUDE = do -- The title of the command is just the minimal explicit import decl let title = T.pack $ prettyPrint explicit - -- the range of the code lens is the span of the original import decl + -- The range of the code lens is the span of the original import decl _range :: Range = realSrcSpanToRange l - -- the code lens has no extra data + -- The code lens has no extra data _xdata = Nothing - -- an edit that replaces the whole declaration with the explicit one + -- An edit that replaces the whole declaration with the explicit one edit = WorkspaceEdit (Just editsMap) Nothing editsMap = HashMap.fromList [(uri, List [importEdit])] importEdit = TextEdit _range title - -- the command argument is simply the edit + -- The command argument is simply the edit _arguments = Just [toJSON $ ImportCommandParams edit] - -- create the command + -- Create the command _command <- Just <$> mkLspCommand pId importCommandId title _arguments - -- create and return the code lens + -- Create and return the code lens return $ Just CodeLens{..} | otherwise = return Nothing @@ -380,13 +380,13 @@ generateLens pId uri minImports (L src imp) ## Wrapping up -There's only one haskell code change left to do at this point: "link" the plugin in the `HlsPlugins` HLS module. -However integrating the plugin in haskell-language-server itself will need some changes in config files. The best way is looking for the id (f.e. `hls-class-plugin`) of an existing plugin: -- `./cabal*.project` and `./stack*.yaml`: add the plugin package in the `packages` field -- `./haskell-language-server.cabal`: add a conditional block with the plugin package dependency -- `./.github/workflows/test.yml`: add a block to run the test suite of the plugin -- `./.github/workflows/hackage.yml`: add the plugin to the component list to release the plugin package to hackage -- `./*.nix`: add the plugin to nix builds +There's only one Haskell code change left to do at this point: "link" the plugin in the `HlsPlugins` HLS module. +However integrating the plugin in HLS itself will need some changes in configuration files. The best way is looking for the ID (f.e. `hls-class-plugin`) of an existing plugin: +- `./cabal*.project` and `./stack*.yaml`: add the plugin package in the `packages` field, +- `./haskell-language-server.cabal`: add a conditional block with the plugin package dependency, +- `./.github/workflows/test.yml`: add a block to run the test suite of the plugin, +- `./.github/workflows/hackage.yml`: add the plugin to the component list to release the plugin package to Hackage, +- `./*.nix`: add the plugin to Nix builds. The full code as used in this tutorial, including imports, can be found in [this Gist](https://gist.github.com/pepeiborra/49b872b2e9ad112f61a3220cdb7db967) as well as in this [branch](https://github.com/pepeiborra/ide/blob/imports-lens/src/Ide/Plugin/ImportLens.hs) diff --git a/docs/features.md b/docs/features.md index a701a45b82..1eab0054b4 100644 --- a/docs/features.md +++ b/docs/features.md @@ -81,6 +81,16 @@ Known limitations: - Only works for [local definitions](https://github.com/haskell/haskell-language-server/issues/708). +## Jump to implementation + +Provided by: `ghcide` + +Jump to the implementation instance of a type class method. + +Known limitations: + +- Only works for [local definitions](https://github.com/haskell/haskell-language-server/issues/708). + ## Jump to note definition Provided by: `hls-notes-plugin` @@ -111,6 +121,7 @@ Completions for language pragmas. ## Formatting Format your code with various Haskell code formatters. +The default Haskell code formatter is `ormolu`, and the Haskell formatter can be configured via the `formattingProvider` option. | Formatter | Provided by | | --------------- | ---------------------------- | @@ -119,12 +130,17 @@ Format your code with various Haskell code formatters. | Ormolu | `hls-ormolu-plugin` | | Stylish Haskell | `hls-stylish-haskell-plugin` | +--- + Format your cabal files with a cabal code formatter. +The default cabal code formatter is `cabal-gild`, which needs to be available on the `$PATH`, +or the location needs to be explicitly provided. +To change the cabal formatter, edit the `cabalFormattingProvider` option. | Formatter | Provided by | |-----------------|------------------------------| | cabal-fmt | `hls-cabal-fmt-plugin` | - +| cabal-gild | `hls-cabal-gild-plugin` | ## Document symbols @@ -310,6 +326,14 @@ Code action kind: `quickfix` Correct common misspelling of SPDX Licenses such as `BSD-3-Clause`. +### Add dependency to `cabal` file + +Provided by: `hls-cabal-plugin` + +Code action kind: `quickfix` + +Add a missing package dependency to your `.cabal` file. + ## Code lenses ### Add type signature @@ -322,7 +346,7 @@ Shows the type signature for bindings without type signatures, and adds it with Provided by: `hls-eval-plugin` -Evaluates code blocks in comments with a click. [Tutorial](https://github.com/haskell/haskell-language-server/blob/master/plugins/hls-eval-plugin/README.md). +Evaluates code blocks in comments with a click. A code action is also provided. [Tutorial](https://github.com/haskell/haskell-language-server/blob/master/plugins/hls-eval-plugin/README.md). ![Eval Demo](../plugins/hls-eval-plugin/demo.gif) @@ -387,6 +411,17 @@ Known limitations: - Cross-module renaming requires all components to be indexed, which sometimes causes [partial renames in multi-component projects](https://github.com/haskell/haskell-language-server/issues/2193). +To eagerly load all components, you need to + +- set `haskell.sessionLoading` to `multipleComponents`, +- set `hie.yaml` to load all components (currently only cabal supports this), + ```yaml + cradle: + cabal: + component: all + ``` +- and enable tests and benchmarks in `cabal.project` with `tests: True` and `benchmarks: True`. + ## Semantic tokens Provided by: `hls-semantic-tokens-plugin` diff --git a/docs/index.rst b/docs/index.rst index 0cf743688c..e3e8fab81c 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,7 +1,7 @@ haskell-language-server ======================= -Official Haskell Language Server implementation. :ref:`Read more`. +Official Haskell Language Server implementation. :ref:`Read more`. .. toctree:: :maxdepth: 2 diff --git a/docs/installation.md b/docs/installation.md index 4d021e2040..4a1147ade5 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -120,14 +120,16 @@ built against the official Fedora ghc package. ## FreeBSD -HLS is available for installation from official binary packages. Use +HLS is available for installation via [devel/hs-haskell-language-server](https://www.freshports.org/devel/hs-haskell-language-server) +port or from official binary packages. Use ```bash pkg install hs-haskell-language-server ``` -to install it. At the moment, HLS installed this way only supports the same GHC -version as the ports one. +to install it. HLS installed this way targets the same GHC version that the [lang/ghc](https://www.freshports.org/lang/ghc) +port produces. Use the `pkg search haskell-language` command to list HLS packages +for other GHCs. ## Gentoo diff --git a/docs/requirements.txt b/docs/requirements.txt index bb67e0bf03..4bdb963497 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,4 +1,4 @@ -Sphinx~=5.3.0 -sphinx-rtd-theme~=1.1.0 -myst-parser~=1.0.0 -docutils<0.19 +Sphinx~=8.1.3 +sphinx-rtd-theme~=3.0.2 +myst-parser~=4.0.0 +docutils~=0.21.2 diff --git a/docs/support/ghc-version-support.md b/docs/support/ghc-version-support.md index 488a5a1310..aa29c60c0a 100644 --- a/docs/support/ghc-version-support.md +++ b/docs/support/ghc-version-support.md @@ -15,39 +15,44 @@ Support status (see the support policy below for more details): - "full support": this GHC version is currently actively supported, and most [tier 2 plugins](./plugin-support.md) work - "deprecated": this GHC version was supported in the past, but is now deprecated -| GHC version | Last supporting HLS version | Support status | -|--------------|--------------------------------------------------------------------------------------|-----------------------------------------------------------------------------| -| 9.8.2 | [latest](https://github.com/haskell/haskell-language-server/releases/latest) | full support | -| 9.8.1 | [2.6.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.6.0.0) | full support | -| 9.6.5 | [latest](https://github.com/haskell/haskell-language-server/releases/latest) | full support | -| 9.6.4 | [2.6.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.6.0.0) | full support | -| 9.6.3 | [2.5.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.5.0.0) | full support | -| 9.6.2 | [2.2.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.2.0.0) | deprecated | -| 9.6.1 | [2.0.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.0.0.0) | deprecated | -| 9.4.8 | [latest](https://github.com/haskell/haskell-language-server/releases/latest) | full support | -| 9.4.7 | [2.5.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.5.0.0) | deprecated | -| 9.4.6 | [2.2.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.2.0.0) | deprecated | -| 9.4.5 | [2.2.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.2.0.0) | deprecated | -| 9.4.4 | [1.10.0.0](https://github.com/haskell/haskell-language-server/releases/tag/1.10.0.0) | deprecated | -| 9.4.3 | [1.9.1.0](https://github.com/haskell/haskell-language-server/releases/tag/1.9.1.0) | deprecated | -| 9.4.(1,2) | [1.8.0.0](https://github.com/haskell/haskell-language-server/releases/tag/1.8.0.0) | deprecated | -| 9.2.8 | [latest](https://github.com/haskell/haskell-language-server/releases/latest) | full support | -| 9.2.7 | [2.0.0.1](https://github.com/haskell/haskell-language-server/releases/tag/2.0.0.1) | deprecated | -| 9.2.(5,6) | [1.9.1.0](https://github.com/haskell/haskell-language-server/releases/tag/1.9.1.0) | deprecated | -| 9.2.(3,4) | [1.8.0.0](https://github.com/haskell/haskell-language-server/releases/tag/1.8.0.0) | deprecated | -| 9.2.(1,2) | [1.7.0.0](https://github.com/haskell/haskell-language-server/releases/tag/1.7.0.0) | deprecated | -| 9.0.2 | [2.4.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.4.0.0) | deprecated | -| 9.0.1 | [1.6.1.0](https://github.com/haskell/haskell-language-server/releases/tag/1.6.1.0) | deprecated | -| 8.10.7 | [2.2.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.2.0.0) | deprecated | -| 8.10.6 | [1.6.1.0](https://github.com/haskell/haskell-language-server/releases/tag/1.6.1.0) | deprecated | -| 8.10.5 | [1.5.1](https://github.com/haskell/haskell-language-server/releases/tag/1.5.1) | deprecated | -| 8.10.(4,3,2) | [1.4.0](https://github.com/haskell/haskell-language-server/releases/tag/1.4.0) | deprecated | -| 8.10.1 | [0.9.0](https://github.com/haskell/haskell-language-server/releases/tag/0.9.0) | deprecated | -| 8.8.4 | [1.8.0](https://github.com/haskell/haskell-language-server/releases/1.8.0) | deprecated | -| 8.8.3 | [1.5.1](https://github.com/haskell/haskell-language-server/releases/1.5.1) | deprecated | -| 8.8.2 | [1.2.0](https://github.com/haskell/haskell-language-server/releases/tag/1.2.0) | deprecated | -| 8.6.5 | [1.8.0.0](https://github.com/haskell/haskell-language-server/releases/tag/1.8.0.0) | deprecated | -| 8.6.4 | [1.4.0](https://github.com/haskell/haskell-language-server/releases/tag/1.4.0) | deprecated | +| GHC version | Last supporting HLS version | Support status | +| ------------ | ------------------------------------------------------------------------------------ | -------------- | +| 9.12.2 | [latest](https://github.com/haskell/haskell-language-server/releases/latest) | full support | +| 9.10.1 | [latest](https://github.com/haskell/haskell-language-server/releases/latest) | full support | +| 9.8.4 | [latest](https://github.com/haskell/haskell-language-server/releases/latest) | full support | +| 9.8.2 | [2.9.0.1](https://github.com/haskell/haskell-language-server/releases/tag/2.9.0.1) | deprecated | +| 9.8.1 | [2.6.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.6.0.0) | deprecated | +| 9.6.7 | [latest](https://github.com/haskell/haskell-language-server/releases/latest) | full support | +| 9.6.6 | [2.9.0.1](https://github.com/haskell/haskell-language-server/releases/tag/2.9.0.1) | deprecated | +| 9.6.5 | [2.9.0.1](https://github.com/haskell/haskell-language-server/releases/tag/2.9.0.1) | deprecated | +| 9.6.4 | [2.6.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.6.0.0) | deprecated | +| 9.6.3 | [2.5.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.5.0.0) | deprecated | +| 9.6.2 | [2.2.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.2.0.0) | deprecated | +| 9.6.1 | [2.0.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.0.0.0) | deprecated | +| 9.4.8 | [latest](https://github.com/haskell/haskell-language-server/releases/latest) | full support | +| 9.4.7 | [2.5.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.5.0.0) | deprecated | +| 9.4.6 | [2.2.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.2.0.0) | deprecated | +| 9.4.5 | [2.2.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.2.0.0) | deprecated | +| 9.4.4 | [1.10.0.0](https://github.com/haskell/haskell-language-server/releases/tag/1.10.0.0) | deprecated | +| 9.4.3 | [1.9.1.0](https://github.com/haskell/haskell-language-server/releases/tag/1.9.1.0) | deprecated | +| 9.4.(1,2) | [1.8.0.0](https://github.com/haskell/haskell-language-server/releases/tag/1.8.0.0) | deprecated | +| 9.2.8 | [2.9.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.9.0.0) | deprecated | +| 9.2.7 | [2.0.0.1](https://github.com/haskell/haskell-language-server/releases/tag/2.0.0.1) | deprecated | +| 9.2.(5,6) | [1.9.1.0](https://github.com/haskell/haskell-language-server/releases/tag/1.9.1.0) | deprecated | +| 9.2.(3,4) | [1.8.0.0](https://github.com/haskell/haskell-language-server/releases/tag/1.8.0.0) | deprecated | +| 9.2.(1,2) | [1.7.0.0](https://github.com/haskell/haskell-language-server/releases/tag/1.7.0.0) | deprecated | +| 9.0.2 | [2.4.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.4.0.0) | deprecated | +| 9.0.1 | [1.6.1.0](https://github.com/haskell/haskell-language-server/releases/tag/1.6.1.0) | deprecated | +| 8.10.7 | [2.2.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.2.0.0) | deprecated | +| 8.10.6 | [1.6.1.0](https://github.com/haskell/haskell-language-server/releases/tag/1.6.1.0) | deprecated | +| 8.10.5 | [1.5.1](https://github.com/haskell/haskell-language-server/releases/tag/1.5.1) | deprecated | +| 8.10.(4,3,2) | [1.4.0](https://github.com/haskell/haskell-language-server/releases/tag/1.4.0) | deprecated | +| 8.10.1 | [0.9.0](https://github.com/haskell/haskell-language-server/releases/tag/0.9.0) | deprecated | +| 8.8.4 | [1.8.0](https://github.com/haskell/haskell-language-server/releases/1.8.0) | deprecated | +| 8.8.3 | [1.5.1](https://github.com/haskell/haskell-language-server/releases/1.5.1) | deprecated | +| 8.8.2 | [1.2.0](https://github.com/haskell/haskell-language-server/releases/tag/1.2.0) | deprecated | +| 8.6.5 | [1.8.0.0](https://github.com/haskell/haskell-language-server/releases/tag/1.8.0.0) | deprecated | +| 8.6.4 | [1.4.0](https://github.com/haskell/haskell-language-server/releases/tag/1.4.0) | deprecated | GHC versions not in the list have never been supported by HLS. LTS stands for [Stackage](https://www.stackage.org/) Long Term Support. @@ -75,26 +80,62 @@ Major versions of GHC which are not supported by HLS on master are extremely unl ## GHC version deprecation policy -### Major versions +### Base policy -A major GHC version is a "legacy" version if it is 3 or more major versions behind the latest GHC version that is +This is the static part of the policy that can be checked by a machine. -1. Fully supported by HLS -2. Used in the a Stackage LTS +#### Major versions -For example, if 9.2 is the latest major version fully supported by HLS and used in a Stackage LTS, then the 8.8 major version and older will be legacy. +HLS will support major versions of GHC until they are older than _both_ -HLS will support all non-legacy major versions of GHC. +1. The major version of GHC used in the current Stackage LTS; and +2. The major version of GHC recommended by GHCup -### Minor versions +For example, if + +1. Stackage LTS uses GHC 9.2; and +2. GHCUp recommends GHC 9.4 + +then HLS will support back to GHC 9.2. + +#### Minor versions For the latest supported major GHC version we will support at least 2 minor versions. For the rest of the supported major GHC versions, we will support at least the latest minor version in Stackage LTS (so 1 minor version). -### Announcements +### Extended policy + +This is the part of the policy that needs evaluation by a human and possibly followed +by a discussion. + +#### Ecosystem factors + +To establish and apply the policy we take the following ecosystem factors into account: + +- Support status of HLS +- The most recent [stackage](https://www.stackage.org/) LTS snapshot +- The GHC version recommended by GHCup +- The GHC versions used in the most popular [linux distributions](https://repology.org/project/ghc/versions) +- The reliability of different ghc versions on the major operating systems (Linux, Windows, MacOS) +- The [Haskell Survey results](https://taylor.fausak.me/2022/11/18/haskell-survey-results/#s2q4) -We will warn users about the upcoming deprecation of a GHC version in the notes of the release *prior* to the deprecation itself. +### Supporting a GHC version beyond normal deprecation time + +In cases where the base policy demands a deprecation, but ecosystem factors +suggest that it's still widely used (e.g. last [Haskell Survey results](https://taylor.fausak.me/2022/11/18/haskell-survey-results/#s2q4)), +the deprecation should be suspended for the next release and the situation be re-evaluated for the release after that. + +When we decide to keep on an old version, we should track it as follows: + +1. open a ticket on HLS issue tracker wrt discussing to deprecate said GHC version + - explain the reason the GHC version wasn't deprecated (context) + - explain the maintenance burden it causes (reason) + - evaluate whether it impacts the next HLS release (impact) +2. discuss whether ecosystem factors changed + - e.g. if Haskell Survey results show that 25% or more of users are still on the GHC version in question, then dropping should be avoided +3. if dropping is still undesired, but maintenance burden is also high, then set out a call-for-help and contact HF for additional funding to support this GHC version +4. if no help or funding was received within 2 releases (say, e.g. 3-6 months), then drop the version regardless ### Why deprecate older versions of GHC? @@ -107,12 +148,3 @@ We will warn users about the upcoming deprecation of a GHC version in the notes So we need to limit the GHC support to save maintainers and contributors time and reduce CI resources. At same time we aim to support the right balance of GHC versions to minimize the impact on users. - -### What factors do we take into account when deprecating a version? - -To establish and apply the policy we take into account: - -- Completeness: support includes all plugins and features -- The most recent [stackage](https://www.stackage.org/) LTS snapshot -- The GHC versions used in the most popular [linux distributions](https://repology.org/project/ghc/versions) -- The reliability of different ghc versions on the major operating systems (Linux, Windows, MacOS) diff --git a/docs/support/plugin-support.md b/docs/support/plugin-support.md index 70c6472c1f..7e0d7220e8 100644 --- a/docs/support/plugin-support.md +++ b/docs/support/plugin-support.md @@ -37,33 +37,34 @@ For example, a plugin to provide a formatter which has itself been abandoned has ## Current plugin support tiers -| Plugin | Tier | Unsupported GHC versions | -|-------------------------------------|------|--------------------------| -| ghcide core plugins | 1 | | -| `hls-call-hierarchy-plugin` | 1 | | -| `hls-code-range-plugin` | 1 | | -| `hls-explicit-imports-plugin` | 1 | | -| `hls-pragmas-plugin` | 1 | | -| `hls-refactor-plugin` | 1 | | -| `hls-alternate-number-plugin` | 2 | | -| `hls-cabal-fmt-plugin` | 2 | | -| `hls-class-plugin` | 2 | | -| `hls-change-type-signature-plugin` | 2 | | -| `hls-eval-plugin` | 2 | | -| `hls-explicit-fixity-plugin` | 2 | | -| `hls-explicit-record-fields-plugin` | 2 | | -| `hls-fourmolu-plugin` | 2 | | -| `hls-gadt-plugin` | 2 | | -| `hls-hlint-plugin` | 2 | | -| `hls-module-name-plugin` | 2 | | -| `hls-notes-plugin` | 2 | | -| `hls-qualify-imported-names-plugin` | 2 | | -| `hls-ormolu-plugin` | 2 | | -| `hls-rename-plugin` | 2 | | -| `hls-stylish-haskell-plugin` | 2 | | -| `hls-overloaded-record-dot-plugin` | 2 | | -| `hls-semantic-tokens-plugin` | 2 | | -| `hls-floskell-plugin` | 3 | | -| `hls-stan-plugin` | 3 | 9.2.(4-8) | -| `hls-retrie-plugin` | 3 | | -| `hls-splice-plugin` | 3 | | +| Plugin | Tier | Unsupported GHC versions | +| ------------------------------------ | ---- | ------------------------ | +| ghcide core plugins | 1 | | +| `hls-call-hierarchy-plugin` | 1 | | +| `hls-code-range-plugin` | 1 | | +| `hls-explicit-imports-plugin` | 1 | | +| `hls-pragmas-plugin` | 1 | | +| `hls-refactor-plugin` | 2 | | +| `hls-alternate-number-format-plugin` | 2 | | +| `hls-cabal-fmt-plugin` | 2 | | +| `hls-cabal-gild-plugin` | 2 | | +| `hls-class-plugin` | 2 | | +| `hls-change-type-signature-plugin` | 2 | | +| `hls-eval-plugin` | 2 | | +| `hls-explicit-fixity-plugin` | 2 | | +| `hls-explicit-record-fields-plugin` | 2 | | +| `hls-fourmolu-plugin` | 2 | | +| `hls-gadt-plugin` | 2 | | +| `hls-hlint-plugin` | 2 | 9.10.1 | +| `hls-module-name-plugin` | 2 | | +| `hls-notes-plugin` | 2 | | +| `hls-qualify-imported-names-plugin` | 2 | | +| `hls-ormolu-plugin` | 2 | | +| `hls-rename-plugin` | 2 | | +| `hls-stylish-haskell-plugin` | 2 | | +| `hls-overloaded-record-dot-plugin` | 2 | | +| `hls-semantic-tokens-plugin` | 2 | | +| `hls-floskell-plugin` | 3 | 9.10.1, 9.12.2 | +| `hls-stan-plugin` | 3 | 9.12.2 | +| `hls-retrie-plugin` | 3 | 9.10.1, 9.12.2 | +| `hls-splice-plugin` | 3 | 9.10.1, 9.12.2 | diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md index 8a60854ccb..428fbe32f2 100644 --- a/docs/troubleshooting.md +++ b/docs/troubleshooting.md @@ -189,7 +189,7 @@ stack install haskell-language-server You also can leverage `ghcup compile hls`: ```bash -ghcup compile hls -v 1.9.0.0 --ghc 9.2.5 +ghcup compile hls -v 2.9.0.0 --ghc 9.6.5 ``` ### Preprocessors diff --git a/docs/what-is-hls.md b/docs/what-is-hls.md index 960eef3f1b..8b46076121 100644 --- a/docs/what-is-hls.md +++ b/docs/what-is-hls.md @@ -1,6 +1,6 @@ -# What is haskell-language-server? +# What is the Haskell Language Server? -The `haskell-language-server` (HLS) project is an implementation of a server (a "language server") for the [Language Server Protocol](https://microsoft.github.io/language-server-protocol/) (LSP). +The Haskell Language Server (HLS) is an implementation of a server (a "language server") for the [Language Server Protocol](https://microsoft.github.io/language-server-protocol/) (LSP). A language server talks to a client (typically an editor), which can ask the server to perform various operations, such as reporting errors or providing code completions. The advantage of this system is that clients and servers can interoperate more easily so long as they all speak the LSP protocol. In the case of HLS, that means that it can be used with many different editors, since editor support for the LSP protocol is now widespread. @@ -35,7 +35,7 @@ Here are a few pieces of jargon that you may come across in the HLS docs or when - *Semantic highlighting*: Special syntax highlighting performed by the server. - *Method*: A LSP method is a function in the LSP protocol that the client can invoke to perform some action, e.g. ask for completions at a point. -## haskell-language-server +## Haskell Language Server ### HLS and its wrapper @@ -51,7 +51,7 @@ Plugins can also be disabled independently to allow users to customize the behav These plugins all (currently) live in the HLS repository and are developed in tandem with the core HLS functionality. -See the [configuration page](./configuration.md#generic-plugin-configuration) for more on configuring plugins. +See the [configuration page](./configuration.md#Generic plugin configuration) for more on configuring plugins. ### hie-bios diff --git a/exe/Wrapper.hs b/exe/Wrapper.hs index 3b80f37c49..2c2401ab6a 100644 --- a/exe/Wrapper.hs +++ b/exe/Wrapper.hs @@ -9,7 +9,6 @@ module Main where import Control.Monad.Extra import Data.Default -import Data.Either.Extra (eitherToMaybe) import Data.Foldable import Data.List import Data.List.Extra (trimEnd) @@ -76,8 +75,11 @@ main = do putStrLn $ showProgramVersionOfInterest programsOfInterest putStrLn "Tool versions in your project" cradle <- findProjectCradle' recorder False - ghcVersion <- runExceptT $ getRuntimeGhcVersion' cradle - putStrLn $ showProgramVersion "ghc" $ mkVersion =<< eitherToMaybe ghcVersion + runExceptT (getRuntimeGhcVersion' cradle) >>= \case + Left err -> + T.hPutStrLn stderr (prettyError err NoShorten) + Right ghcVersion -> + putStrLn $ showProgramVersion "ghc" $ mkVersion ghcVersion VersionMode PrintVersion -> putStrLn hlsVer diff --git a/flake.lock b/flake.lock index ed5b4a4d7a..3fb48889a5 100644 --- a/flake.lock +++ b/flake.lock @@ -3,11 +3,11 @@ "flake-compat": { "flake": false, "locked": { - "lastModified": 1696426674, - "narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=", + "lastModified": 1733328505, + "narHash": "sha256-NeCCThCEP3eCl2l/+27kNNK7QrwZB1IJCrXfrbv5oqU=", "owner": "edolstra", "repo": "flake-compat", - "rev": "0f9255e01c2351cc7d116c072cb317785dd33b33", + "rev": "ff81ac966bb2cae68946d5ed5fc4994f96d0ffec", "type": "github" }, "original": { @@ -21,11 +21,11 @@ "systems": "systems" }, "locked": { - "lastModified": 1710146030, - "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=", + "lastModified": 1731533236, + "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", "owner": "numtide", "repo": "flake-utils", - "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a", + "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", "type": "github" }, "original": { @@ -36,11 +36,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1718149104, - "narHash": "sha256-Ds1QpobBX2yoUDx9ZruqVGJ/uQPgcXoYuobBguyKEh8=", + "lastModified": 1739019272, + "narHash": "sha256-7Fu7oazPoYCbDzb9k8D/DdbKrC3aU1zlnc39Y8jy/s8=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "e913ae340076bbb73d9f4d3d065c2bca7caafb16", + "rev": "fa35a3c8e17a3de613240fea68f876e5b4896aec", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 16b4ce5ea2..934333cff0 100644 --- a/flake.nix +++ b/flake.nix @@ -4,7 +4,7 @@ inputs = { nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable"; flake-utils.url = "github:numtide/flake-utils"; - # for default.nix + # For default.nix flake-compat = { url = "github:edolstra/flake-compat"; flake = false; @@ -12,7 +12,7 @@ }; outputs = - inputs@{ self, nixpkgs, flake-utils, ... }: + { nixpkgs, flake-utils, ... }: flake-utils.lib.eachSystem [ "x86_64-linux" "x86_64-darwin" "aarch64-linux" "aarch64-darwin" ] (system: let @@ -27,8 +27,10 @@ name = "hls-docs"; src = pkgs.lib.sourceFilesBySuffices ./. [ ".py" ".rst" ".md" ".png" ".gif" ".svg" ".cabal" ]; buildInputs = [ pythonWithPackages ]; - # -n gives warnings on missing link targets, -W makes warnings into errors - buildPhase = ''cd docs; sphinx-build -n -W . $out''; + buildPhase = '' + cd docs + make --makefile=${./docs/Makefile} html BUILDDIR=$out + ''; dontInstall = true; }; @@ -50,25 +52,21 @@ mkDevShell = hpkgs: with pkgs; mkShell { name = "haskell-language-server-dev-ghc${hpkgs.ghc.version}"; - # For binary Haskell tools, we use the default nixpkgs GHC - # This removes a rebuild with a different GHC version - # The drawback of this approach is that our shell may pull two GHC - # version in scope. + # For binary Haskell tools, we use the default Nixpkgs GHC version. + # This removes a rebuild with a different GHC version. The drawback of + # this approach is that our shell may pull two GHC versions in scope. buildInputs = [ - # our compiling toolchain + # Compiler toolchain hpkgs.ghc pkgs.haskellPackages.cabal-install - # Dependencies needed to build some parts of hackage + # Dependencies needed to build some parts of Hackage gmp zlib ncurses # Changelog tooling (gen-hls-changelogs pkgs.haskellPackages) # For the documentation pythonWithPackages - # @guibou: I'm not sure this is needed. - hlint (pkgs.haskell.lib.justStaticExecutables (pkgs.haskell.lib.dontCheck pkgs.haskellPackages.opentelemetry-extra)) capstone - # ormolu stylish-haskell pre-commit ] ++ lib.optionals (!stdenv.isDarwin) @@ -92,7 +90,7 @@ ''; }; - in with pkgs; rec { + in rec { # Developement shell with only dev tools devShells = { default = mkDevShell pkgs.haskellPackages; @@ -102,9 +100,7 @@ shell-ghc910 = mkDevShell pkgs.haskell.packages.ghc910; }; - packages = { - docs = docs; - }; + packages = { inherit docs; }; # The attributes for the default shell and package changed in recent versions of Nix, # these are here for backwards compatibility with the old versions. diff --git a/ghcide-bench/src/Experiments.hs b/ghcide-bench/src/Experiments.hs index 525f07a37d..c53ffd0a7c 100644 --- a/ghcide-bench/src/Experiments.hs +++ b/ghcide-bench/src/Experiments.hs @@ -266,7 +266,7 @@ experiments = flip allM docs $ \DocumentPositions{..} -> do bottom <- pred . length . T.lines <$> documentContents doc diags <- getCurrentDiagnostics doc - case requireDiagnostic diags (DiagnosticSeverity_Error, (fromIntegral bottom, 8), "Found hole", Nothing) of + case requireDiagnostic diags (DiagnosticSeverity_Error, (fromIntegral bottom, 8), "Found hole", Just "GHC-88464", Nothing) of Nothing -> pure True Just _err -> pure False ), @@ -857,11 +857,9 @@ getBuildEdgesCount = tryCallTestPlugin GetBuildEdgesCount getRebuildsCount :: Session (Either (TResponseError @ClientToServer (Method_CustomMethod "test")) Int) getRebuildsCount = tryCallTestPlugin GetRebuildsCount --- Copy&paste from ghcide/test/Development.IDE.Test getStoredKeys :: Session [Text] getStoredKeys = callTestPlugin GetStoredKeys --- Copy&paste from ghcide/test/Development.IDE.Test tryCallTestPlugin :: (A.FromJSON b) => TestRequest -> Session (Either (TResponseError @ClientToServer (Method_CustomMethod "test")) b) tryCallTestPlugin cmd = do let cm = SMethod_CustomMethod (Proxy @"test") @@ -873,7 +871,6 @@ tryCallTestPlugin cmd = do A.Success a -> Right a A.Error e -> error e --- Copy&paste from ghcide/test/Development.IDE.Test callTestPlugin :: (A.FromJSON b) => TestRequest -> Session b callTestPlugin cmd = do res <- tryCallTestPlugin cmd diff --git a/ghcide/test/LICENSE b/ghcide-test/LICENSE similarity index 100% rename from ghcide/test/LICENSE rename to ghcide-test/LICENSE diff --git a/ghcide/test/data/TH/THA.hs b/ghcide-test/data/TH/THA.hs similarity index 100% rename from ghcide/test/data/TH/THA.hs rename to ghcide-test/data/TH/THA.hs diff --git a/ghcide/test/data/TH/THB.hs b/ghcide-test/data/TH/THB.hs similarity index 100% rename from ghcide/test/data/TH/THB.hs rename to ghcide-test/data/TH/THB.hs diff --git a/ghcide/test/data/TH/THC.hs b/ghcide-test/data/TH/THC.hs similarity index 100% rename from ghcide/test/data/TH/THC.hs rename to ghcide-test/data/TH/THC.hs diff --git a/ghcide/test/data/TH/hie.yaml b/ghcide-test/data/TH/hie.yaml similarity index 100% rename from ghcide/test/data/TH/hie.yaml rename to ghcide-test/data/TH/hie.yaml diff --git a/ghcide/test/data/THCoreFile/THA.hs b/ghcide-test/data/THCoreFile/THA.hs similarity index 100% rename from ghcide/test/data/THCoreFile/THA.hs rename to ghcide-test/data/THCoreFile/THA.hs diff --git a/ghcide/test/data/THCoreFile/THB.hs b/ghcide-test/data/THCoreFile/THB.hs similarity index 100% rename from ghcide/test/data/THCoreFile/THB.hs rename to ghcide-test/data/THCoreFile/THB.hs diff --git a/ghcide/test/data/THCoreFile/THC.hs b/ghcide-test/data/THCoreFile/THC.hs similarity index 100% rename from ghcide/test/data/THCoreFile/THC.hs rename to ghcide-test/data/THCoreFile/THC.hs diff --git a/ghcide/test/data/THCoreFile/hie.yaml b/ghcide-test/data/THCoreFile/hie.yaml similarity index 100% rename from ghcide/test/data/THCoreFile/hie.yaml rename to ghcide-test/data/THCoreFile/hie.yaml diff --git a/ghcide/test/data/THLoading/A.hs b/ghcide-test/data/THLoading/A.hs similarity index 100% rename from ghcide/test/data/THLoading/A.hs rename to ghcide-test/data/THLoading/A.hs diff --git a/ghcide/test/data/THLoading/B.hs b/ghcide-test/data/THLoading/B.hs similarity index 100% rename from ghcide/test/data/THLoading/B.hs rename to ghcide-test/data/THLoading/B.hs diff --git a/ghcide/test/data/THLoading/THA.hs b/ghcide-test/data/THLoading/THA.hs similarity index 100% rename from ghcide/test/data/THLoading/THA.hs rename to ghcide-test/data/THLoading/THA.hs diff --git a/ghcide/test/data/THLoading/THB.hs b/ghcide-test/data/THLoading/THB.hs similarity index 100% rename from ghcide/test/data/THLoading/THB.hs rename to ghcide-test/data/THLoading/THB.hs diff --git a/ghcide/test/data/THLoading/hie.yaml b/ghcide-test/data/THLoading/hie.yaml similarity index 100% rename from ghcide/test/data/THLoading/hie.yaml rename to ghcide-test/data/THLoading/hie.yaml diff --git a/ghcide/test/data/THNewName/A.hs b/ghcide-test/data/THNewName/A.hs similarity index 100% rename from ghcide/test/data/THNewName/A.hs rename to ghcide-test/data/THNewName/A.hs diff --git a/ghcide/test/data/THNewName/B.hs b/ghcide-test/data/THNewName/B.hs similarity index 100% rename from ghcide/test/data/THNewName/B.hs rename to ghcide-test/data/THNewName/B.hs diff --git a/ghcide/test/data/THNewName/C.hs b/ghcide-test/data/THNewName/C.hs similarity index 100% rename from ghcide/test/data/THNewName/C.hs rename to ghcide-test/data/THNewName/C.hs diff --git a/ghcide/test/data/THNewName/hie.yaml b/ghcide-test/data/THNewName/hie.yaml similarity index 100% rename from ghcide/test/data/THNewName/hie.yaml rename to ghcide-test/data/THNewName/hie.yaml diff --git a/ghcide/test/data/THUnboxed/THA.hs b/ghcide-test/data/THUnboxed/THA.hs similarity index 100% rename from ghcide/test/data/THUnboxed/THA.hs rename to ghcide-test/data/THUnboxed/THA.hs diff --git a/ghcide/test/data/THUnboxed/THB.hs b/ghcide-test/data/THUnboxed/THB.hs similarity index 100% rename from ghcide/test/data/THUnboxed/THB.hs rename to ghcide-test/data/THUnboxed/THB.hs diff --git a/ghcide/test/data/THUnboxed/THC.hs b/ghcide-test/data/THUnboxed/THC.hs similarity index 100% rename from ghcide/test/data/THUnboxed/THC.hs rename to ghcide-test/data/THUnboxed/THC.hs diff --git a/ghcide/test/data/THUnboxed/hie.yaml b/ghcide-test/data/THUnboxed/hie.yaml similarity index 100% rename from ghcide/test/data/THUnboxed/hie.yaml rename to ghcide-test/data/THUnboxed/hie.yaml diff --git a/ghcide/test/data/boot/A.hs b/ghcide-test/data/boot/A.hs similarity index 100% rename from ghcide/test/data/boot/A.hs rename to ghcide-test/data/boot/A.hs diff --git a/ghcide/test/data/boot/A.hs-boot b/ghcide-test/data/boot/A.hs-boot similarity index 100% rename from ghcide/test/data/boot/A.hs-boot rename to ghcide-test/data/boot/A.hs-boot diff --git a/ghcide/test/data/boot/B.hs b/ghcide-test/data/boot/B.hs similarity index 100% rename from ghcide/test/data/boot/B.hs rename to ghcide-test/data/boot/B.hs diff --git a/ghcide/test/data/boot/C.hs b/ghcide-test/data/boot/C.hs similarity index 100% rename from ghcide/test/data/boot/C.hs rename to ghcide-test/data/boot/C.hs diff --git a/ghcide/test/data/boot/hie.yaml b/ghcide-test/data/boot/hie.yaml similarity index 100% rename from ghcide/test/data/boot/hie.yaml rename to ghcide-test/data/boot/hie.yaml diff --git a/ghcide/test/data/boot2/A.hs b/ghcide-test/data/boot2/A.hs similarity index 100% rename from ghcide/test/data/boot2/A.hs rename to ghcide-test/data/boot2/A.hs diff --git a/ghcide/test/data/boot2/B.hs b/ghcide-test/data/boot2/B.hs similarity index 100% rename from ghcide/test/data/boot2/B.hs rename to ghcide-test/data/boot2/B.hs diff --git a/ghcide/test/data/boot2/B.hs-boot b/ghcide-test/data/boot2/B.hs-boot similarity index 100% rename from ghcide/test/data/boot2/B.hs-boot rename to ghcide-test/data/boot2/B.hs-boot diff --git a/ghcide/test/data/boot2/C.hs b/ghcide-test/data/boot2/C.hs similarity index 100% rename from ghcide/test/data/boot2/C.hs rename to ghcide-test/data/boot2/C.hs diff --git a/ghcide/test/data/boot2/D.hs b/ghcide-test/data/boot2/D.hs similarity index 100% rename from ghcide/test/data/boot2/D.hs rename to ghcide-test/data/boot2/D.hs diff --git a/ghcide/test/data/boot2/E.hs b/ghcide-test/data/boot2/E.hs similarity index 100% rename from ghcide/test/data/boot2/E.hs rename to ghcide-test/data/boot2/E.hs diff --git a/ghcide/test/data/boot2/hie.yaml b/ghcide-test/data/boot2/hie.yaml similarity index 100% rename from ghcide/test/data/boot2/hie.yaml rename to ghcide-test/data/boot2/hie.yaml diff --git a/ghcide/test/data/cabal-exe/a/a.cabal b/ghcide-test/data/cabal-exe/a/a.cabal similarity index 100% rename from ghcide/test/data/cabal-exe/a/a.cabal rename to ghcide-test/data/cabal-exe/a/a.cabal diff --git a/ghcide/test/data/cabal-exe/a/src/Main.hs b/ghcide-test/data/cabal-exe/a/src/Main.hs similarity index 100% rename from ghcide/test/data/cabal-exe/a/src/Main.hs rename to ghcide-test/data/cabal-exe/a/src/Main.hs diff --git a/ghcide/test/data/cabal-exe/cabal.project b/ghcide-test/data/cabal-exe/cabal.project similarity index 100% rename from ghcide/test/data/cabal-exe/cabal.project rename to ghcide-test/data/cabal-exe/cabal.project diff --git a/ghcide/test/data/cabal-exe/hie.yaml b/ghcide-test/data/cabal-exe/hie.yaml similarity index 100% rename from ghcide/test/data/cabal-exe/hie.yaml rename to ghcide-test/data/cabal-exe/hie.yaml diff --git a/ghcide/test/data/hover/Bar.hs b/ghcide-test/data/hover/Bar.hs similarity index 100% rename from ghcide/test/data/hover/Bar.hs rename to ghcide-test/data/hover/Bar.hs diff --git a/ghcide/test/data/hover/Foo.hs b/ghcide-test/data/hover/Foo.hs similarity index 100% rename from ghcide/test/data/hover/Foo.hs rename to ghcide-test/data/hover/Foo.hs diff --git a/ghcide/test/data/hover/GotoHover.hs b/ghcide-test/data/hover/GotoHover.hs similarity index 100% rename from ghcide/test/data/hover/GotoHover.hs rename to ghcide-test/data/hover/GotoHover.hs diff --git a/ghcide-test/data/hover/GotoImplementation.hs b/ghcide-test/data/hover/GotoImplementation.hs new file mode 100644 index 0000000000..12038857c6 --- /dev/null +++ b/ghcide-test/data/hover/GotoImplementation.hs @@ -0,0 +1,30 @@ +{-# LANGUAGE GADTs, GeneralisedNewtypeDeriving, DerivingStrategies #-} +{-# OPTIONS_GHC -Wno-missing-methods #-} +module GotoImplementation where + +data AAA = AAA +instance Num AAA where +aaa :: Num x => x +aaa = 1 +aaa1 :: AAA = aaa + +class BBB a where + bbb :: a -> a +instance BBB AAA where + bbb = const AAA +bbbb :: AAA +bbbb = bbb AAA + +ccc :: Show a => a -> String +ccc d = show d + +newtype Q k = Q k + deriving newtype (Eq, Show) +ddd :: (Show k, Eq k) => k -> String +ddd k = if Q k == Q k then show k else "" +ddd1 = ddd (Q 0) + +data GadtTest a where + GadtTest :: Int -> GadtTest Int +printUsingEvidence :: Show a => GadtTest a -> String +printUsingEvidence (GadtTest i) = show i diff --git a/ghcide/test/data/hover/RecordDotSyntax.hs b/ghcide-test/data/hover/RecordDotSyntax.hs similarity index 100% rename from ghcide/test/data/hover/RecordDotSyntax.hs rename to ghcide-test/data/hover/RecordDotSyntax.hs diff --git a/ghcide/test/data/hover/hie.yaml b/ghcide-test/data/hover/hie.yaml similarity index 64% rename from ghcide/test/data/hover/hie.yaml rename to ghcide-test/data/hover/hie.yaml index e2b3e97c5d..de7cc991cc 100644 --- a/ghcide/test/data/hover/hie.yaml +++ b/ghcide-test/data/hover/hie.yaml @@ -1 +1 @@ -cradle: {direct: {arguments: ["Foo", "Bar", "GotoHover", "RecordDotSyntax"]}} +cradle: {direct: {arguments: ["Foo", "Bar", "GotoHover", "RecordDotSyntax", "GotoImplementation"]}} diff --git a/ghcide/test/data/ignore-fatal/IgnoreFatal.hs b/ghcide-test/data/ignore-fatal/IgnoreFatal.hs similarity index 74% rename from ghcide/test/data/ignore-fatal/IgnoreFatal.hs rename to ghcide-test/data/ignore-fatal/IgnoreFatal.hs index 77b11c5bb3..b73787f166 100644 --- a/ghcide/test/data/ignore-fatal/IgnoreFatal.hs +++ b/ghcide-test/data/ignore-fatal/IgnoreFatal.hs @@ -1,7 +1,7 @@ -- "missing signature" is declared a fatal warning in the cabal file, -- but is ignored in this module. -{-# OPTIONS_GHC -fno-warn-missing-signatures #-} +{-# OPTIONS_GHC -Wno-missing-signatures #-} module IgnoreFatal where diff --git a/ghcide/test/data/ignore-fatal/cabal.project b/ghcide-test/data/ignore-fatal/cabal.project similarity index 100% rename from ghcide/test/data/ignore-fatal/cabal.project rename to ghcide-test/data/ignore-fatal/cabal.project diff --git a/ghcide/test/data/ignore-fatal/hie.yaml b/ghcide-test/data/ignore-fatal/hie.yaml similarity index 100% rename from ghcide/test/data/ignore-fatal/hie.yaml rename to ghcide-test/data/ignore-fatal/hie.yaml diff --git a/ghcide/test/data/ignore-fatal/ignore-fatal.cabal b/ghcide-test/data/ignore-fatal/ignore-fatal.cabal similarity index 100% rename from ghcide/test/data/ignore-fatal/ignore-fatal.cabal rename to ghcide-test/data/ignore-fatal/ignore-fatal.cabal diff --git a/ghcide/test/data/multi-unit-reexport/a-1.0.0-inplace b/ghcide-test/data/multi-unit-reexport/a-1.0.0-inplace similarity index 100% rename from ghcide/test/data/multi-unit-reexport/a-1.0.0-inplace rename to ghcide-test/data/multi-unit-reexport/a-1.0.0-inplace diff --git a/ghcide/test/data/multi-unit-reexport/a/A.hs b/ghcide-test/data/multi-unit-reexport/a/A.hs similarity index 100% rename from ghcide/test/data/multi-unit-reexport/a/A.hs rename to ghcide-test/data/multi-unit-reexport/a/A.hs diff --git a/ghcide/test/data/multi-unit-reexport/b-1.0.0-inplace b/ghcide-test/data/multi-unit-reexport/b-1.0.0-inplace similarity index 100% rename from ghcide/test/data/multi-unit-reexport/b-1.0.0-inplace rename to ghcide-test/data/multi-unit-reexport/b-1.0.0-inplace diff --git a/ghcide/test/data/multi-unit-reexport/b/B.hs b/ghcide-test/data/multi-unit-reexport/b/B.hs similarity index 100% rename from ghcide/test/data/multi-unit-reexport/b/B.hs rename to ghcide-test/data/multi-unit-reexport/b/B.hs diff --git a/ghcide/test/data/multi-unit-reexport/c-1.0.0-inplace b/ghcide-test/data/multi-unit-reexport/c-1.0.0-inplace similarity index 100% rename from ghcide/test/data/multi-unit-reexport/c-1.0.0-inplace rename to ghcide-test/data/multi-unit-reexport/c-1.0.0-inplace diff --git a/ghcide/test/data/multi-unit-reexport/c/C.hs b/ghcide-test/data/multi-unit-reexport/c/C.hs similarity index 100% rename from ghcide/test/data/multi-unit-reexport/c/C.hs rename to ghcide-test/data/multi-unit-reexport/c/C.hs diff --git a/ghcide/test/data/multi-unit-reexport/cabal.project b/ghcide-test/data/multi-unit-reexport/cabal.project similarity index 100% rename from ghcide/test/data/multi-unit-reexport/cabal.project rename to ghcide-test/data/multi-unit-reexport/cabal.project diff --git a/ghcide/test/data/multi-unit-reexport/hie.yaml b/ghcide-test/data/multi-unit-reexport/hie.yaml similarity index 100% rename from ghcide/test/data/multi-unit-reexport/hie.yaml rename to ghcide-test/data/multi-unit-reexport/hie.yaml diff --git a/ghcide/test/data/multi-unit/a-1.0.0-inplace b/ghcide-test/data/multi-unit/a-1.0.0-inplace similarity index 100% rename from ghcide/test/data/multi-unit/a-1.0.0-inplace rename to ghcide-test/data/multi-unit/a-1.0.0-inplace diff --git a/ghcide/test/data/multi-unit/a/A.hs b/ghcide-test/data/multi-unit/a/A.hs similarity index 100% rename from ghcide/test/data/multi-unit/a/A.hs rename to ghcide-test/data/multi-unit/a/A.hs diff --git a/ghcide/test/data/multi-unit/b-1.0.0-inplace b/ghcide-test/data/multi-unit/b-1.0.0-inplace similarity index 100% rename from ghcide/test/data/multi-unit/b-1.0.0-inplace rename to ghcide-test/data/multi-unit/b-1.0.0-inplace diff --git a/ghcide/test/data/multi-unit/b/B.hs b/ghcide-test/data/multi-unit/b/B.hs similarity index 100% rename from ghcide/test/data/multi-unit/b/B.hs rename to ghcide-test/data/multi-unit/b/B.hs diff --git a/ghcide/test/data/multi-unit/c-1.0.0-inplace b/ghcide-test/data/multi-unit/c-1.0.0-inplace similarity index 100% rename from ghcide/test/data/multi-unit/c-1.0.0-inplace rename to ghcide-test/data/multi-unit/c-1.0.0-inplace diff --git a/ghcide/test/data/multi-unit/c/C.hs b/ghcide-test/data/multi-unit/c/C.hs similarity index 100% rename from ghcide/test/data/multi-unit/c/C.hs rename to ghcide-test/data/multi-unit/c/C.hs diff --git a/ghcide/test/data/multi-unit/cabal.project b/ghcide-test/data/multi-unit/cabal.project similarity index 100% rename from ghcide/test/data/multi-unit/cabal.project rename to ghcide-test/data/multi-unit/cabal.project diff --git a/ghcide/test/data/multi-unit/hie.yaml b/ghcide-test/data/multi-unit/hie.yaml similarity index 100% rename from ghcide/test/data/multi-unit/hie.yaml rename to ghcide-test/data/multi-unit/hie.yaml diff --git a/ghcide/test/data/multi/a/A.hs b/ghcide-test/data/multi/a/A.hs similarity index 100% rename from ghcide/test/data/multi/a/A.hs rename to ghcide-test/data/multi/a/A.hs diff --git a/ghcide/test/data/multi/a/a.cabal b/ghcide-test/data/multi/a/a.cabal similarity index 100% rename from ghcide/test/data/multi/a/a.cabal rename to ghcide-test/data/multi/a/a.cabal diff --git a/ghcide/test/data/multi/b/B.hs b/ghcide-test/data/multi/b/B.hs similarity index 100% rename from ghcide/test/data/multi/b/B.hs rename to ghcide-test/data/multi/b/B.hs diff --git a/ghcide/test/data/multi/b/b.cabal b/ghcide-test/data/multi/b/b.cabal similarity index 100% rename from ghcide/test/data/multi/b/b.cabal rename to ghcide-test/data/multi/b/b.cabal diff --git a/ghcide/test/data/multi/c/C.hs b/ghcide-test/data/multi/c/C.hs similarity index 100% rename from ghcide/test/data/multi/c/C.hs rename to ghcide-test/data/multi/c/C.hs diff --git a/ghcide/test/data/multi/c/c.cabal b/ghcide-test/data/multi/c/c.cabal similarity index 100% rename from ghcide/test/data/multi/c/c.cabal rename to ghcide-test/data/multi/c/c.cabal diff --git a/ghcide/test/data/multi/cabal.project b/ghcide-test/data/multi/cabal.project similarity index 100% rename from ghcide/test/data/multi/cabal.project rename to ghcide-test/data/multi/cabal.project diff --git a/ghcide/test/data/multi/hie.yaml b/ghcide-test/data/multi/hie.yaml similarity index 100% rename from ghcide/test/data/multi/hie.yaml rename to ghcide-test/data/multi/hie.yaml diff --git a/ghcide/test/data/plugin-knownnat/KnownNat.hs b/ghcide-test/data/plugin-knownnat/KnownNat.hs similarity index 100% rename from ghcide/test/data/plugin-knownnat/KnownNat.hs rename to ghcide-test/data/plugin-knownnat/KnownNat.hs diff --git a/ghcide/test/data/plugin-knownnat/cabal.project b/ghcide-test/data/plugin-knownnat/cabal.project similarity index 100% rename from ghcide/test/data/plugin-knownnat/cabal.project rename to ghcide-test/data/plugin-knownnat/cabal.project diff --git a/ghcide/test/data/plugin-knownnat/plugin.cabal b/ghcide-test/data/plugin-knownnat/plugin.cabal similarity index 100% rename from ghcide/test/data/plugin-knownnat/plugin.cabal rename to ghcide-test/data/plugin-knownnat/plugin.cabal diff --git a/ghcide/test/data/recomp/A.hs b/ghcide-test/data/recomp/A.hs similarity index 100% rename from ghcide/test/data/recomp/A.hs rename to ghcide-test/data/recomp/A.hs diff --git a/ghcide/test/data/recomp/B.hs b/ghcide-test/data/recomp/B.hs similarity index 100% rename from ghcide/test/data/recomp/B.hs rename to ghcide-test/data/recomp/B.hs diff --git a/ghcide/test/data/recomp/P.hs b/ghcide-test/data/recomp/P.hs similarity index 100% rename from ghcide/test/data/recomp/P.hs rename to ghcide-test/data/recomp/P.hs diff --git a/ghcide/test/data/recomp/hie.yaml b/ghcide-test/data/recomp/hie.yaml similarity index 100% rename from ghcide/test/data/recomp/hie.yaml rename to ghcide-test/data/recomp/hie.yaml diff --git a/ghcide/test/data/references/Main.hs b/ghcide-test/data/references/Main.hs similarity index 100% rename from ghcide/test/data/references/Main.hs rename to ghcide-test/data/references/Main.hs diff --git a/ghcide/test/data/references/OtherModule.hs b/ghcide-test/data/references/OtherModule.hs similarity index 100% rename from ghcide/test/data/references/OtherModule.hs rename to ghcide-test/data/references/OtherModule.hs diff --git a/ghcide/test/data/references/OtherOtherModule.hs b/ghcide-test/data/references/OtherOtherModule.hs similarity index 100% rename from ghcide/test/data/references/OtherOtherModule.hs rename to ghcide-test/data/references/OtherOtherModule.hs diff --git a/ghcide/test/data/references/References.hs b/ghcide-test/data/references/References.hs similarity index 100% rename from ghcide/test/data/references/References.hs rename to ghcide-test/data/references/References.hs diff --git a/ghcide/test/data/references/hie.yaml b/ghcide-test/data/references/hie.yaml similarity index 100% rename from ghcide/test/data/references/hie.yaml rename to ghcide-test/data/references/hie.yaml diff --git a/ghcide/test/data/rootUri/dirA/Foo.hs b/ghcide-test/data/rootUri/dirA/Foo.hs similarity index 100% rename from ghcide/test/data/rootUri/dirA/Foo.hs rename to ghcide-test/data/rootUri/dirA/Foo.hs diff --git a/ghcide/test/data/rootUri/dirA/foo.cabal b/ghcide-test/data/rootUri/dirA/foo.cabal similarity index 100% rename from ghcide/test/data/rootUri/dirA/foo.cabal rename to ghcide-test/data/rootUri/dirA/foo.cabal diff --git a/ghcide/test/data/rootUri/dirB/Foo.hs b/ghcide-test/data/rootUri/dirB/Foo.hs similarity index 100% rename from ghcide/test/data/rootUri/dirB/Foo.hs rename to ghcide-test/data/rootUri/dirB/Foo.hs diff --git a/ghcide/test/data/rootUri/dirB/foo.cabal b/ghcide-test/data/rootUri/dirB/foo.cabal similarity index 100% rename from ghcide/test/data/rootUri/dirB/foo.cabal rename to ghcide-test/data/rootUri/dirB/foo.cabal diff --git a/ghcide/test/data/symlink/hie.yaml b/ghcide-test/data/symlink/hie.yaml similarity index 100% rename from ghcide/test/data/symlink/hie.yaml rename to ghcide-test/data/symlink/hie.yaml diff --git a/ghcide/test/data/symlink/other_loc/.gitkeep b/ghcide-test/data/symlink/other_loc/.gitkeep similarity index 100% rename from ghcide/test/data/symlink/other_loc/.gitkeep rename to ghcide-test/data/symlink/other_loc/.gitkeep diff --git a/ghcide/test/data/symlink/some_loc/Sym.hs b/ghcide-test/data/symlink/some_loc/Sym.hs similarity index 100% rename from ghcide/test/data/symlink/some_loc/Sym.hs rename to ghcide-test/data/symlink/some_loc/Sym.hs diff --git a/ghcide/test/data/symlink/src/Foo.hs b/ghcide-test/data/symlink/src/Foo.hs similarity index 100% rename from ghcide/test/data/symlink/src/Foo.hs rename to ghcide-test/data/symlink/src/Foo.hs diff --git a/ghcide-test/data/watched-files/reload/reload.cabal b/ghcide-test/data/watched-files/reload/reload.cabal new file mode 100644 index 0000000000..d9d5607a94 --- /dev/null +++ b/ghcide-test/data/watched-files/reload/reload.cabal @@ -0,0 +1,12 @@ +cabal-version: 2.4 +name: reload +version: 0.1.0.0 +author: Lin Jian +maintainer: me@linj.tech +build-type: Simple + +library + exposed-modules: MyLib + build-depends: base + hs-source-dirs: src + default-language: Haskell2010 diff --git a/ghcide-test/data/watched-files/reload/src/MyLib.hs b/ghcide-test/data/watched-files/reload/src/MyLib.hs new file mode 100644 index 0000000000..bbb506d001 --- /dev/null +++ b/ghcide-test/data/watched-files/reload/src/MyLib.hs @@ -0,0 +1,6 @@ +module MyLib (someFunc) where + +import Data.List.Split + +someFunc :: IO () +someFunc = putStrLn "someFunc" diff --git a/ghcide/test/data/working-dir/a/A.hs b/ghcide-test/data/working-dir/a/A.hs similarity index 100% rename from ghcide/test/data/working-dir/a/A.hs rename to ghcide-test/data/working-dir/a/A.hs diff --git a/ghcide/test/data/working-dir/a/B.hs b/ghcide-test/data/working-dir/a/B.hs similarity index 100% rename from ghcide/test/data/working-dir/a/B.hs rename to ghcide-test/data/working-dir/a/B.hs diff --git a/ghcide/test/data/working-dir/a/a.cabal b/ghcide-test/data/working-dir/a/a.cabal similarity index 100% rename from ghcide/test/data/working-dir/a/a.cabal rename to ghcide-test/data/working-dir/a/a.cabal diff --git a/ghcide/test/data/working-dir/a/wdtest b/ghcide-test/data/working-dir/a/wdtest similarity index 100% rename from ghcide/test/data/working-dir/a/wdtest rename to ghcide-test/data/working-dir/a/wdtest diff --git a/ghcide/test/data/working-dir/cabal.project b/ghcide-test/data/working-dir/cabal.project similarity index 100% rename from ghcide/test/data/working-dir/cabal.project rename to ghcide-test/data/working-dir/cabal.project diff --git a/ghcide/test/data/working-dir/hie.yaml b/ghcide-test/data/working-dir/hie.yaml similarity index 100% rename from ghcide/test/data/working-dir/hie.yaml rename to ghcide-test/data/working-dir/hie.yaml diff --git a/ghcide/test/exe/AsyncTests.hs b/ghcide-test/exe/AsyncTests.hs similarity index 100% rename from ghcide/test/exe/AsyncTests.hs rename to ghcide-test/exe/AsyncTests.hs diff --git a/ghcide/test/exe/BootTests.hs b/ghcide-test/exe/BootTests.hs similarity index 100% rename from ghcide/test/exe/BootTests.hs rename to ghcide-test/exe/BootTests.hs diff --git a/ghcide/test/exe/CPPTests.hs b/ghcide-test/exe/CPPTests.hs similarity index 96% rename from ghcide/test/exe/CPPTests.hs rename to ghcide-test/exe/CPPTests.hs index 91a59adc76..762e6632f1 100644 --- a/ghcide/test/exe/CPPTests.hs +++ b/ghcide-test/exe/CPPTests.hs @@ -42,7 +42,7 @@ tests = ," failed" ,"#endif" ] - expectDiagnostics [("A.hs", [(DiagnosticSeverity_Error, (3, 2), "Variable not in scope: worked")])] + expectDiagnostics [("A.hs", [(DiagnosticSeverity_Error, (3, 2), "Variable not in scope: worked", Just "GHC-88464")])] ] where expectError :: T.Text -> Cursor -> Session () @@ -50,7 +50,7 @@ tests = _ <- createDoc "Testing.hs" "haskell" content expectDiagnostics [ ( "Testing.hs", - [(DiagnosticSeverity_Error, cursor, "error: unterminated")] + [(DiagnosticSeverity_Error, cursor, "error: unterminated", Nothing)] ) ] expectNoMoreDiagnostics 0.5 diff --git a/ghcide/test/exe/ClientSettingsTests.hs b/ghcide-test/exe/ClientSettingsTests.hs similarity index 100% rename from ghcide/test/exe/ClientSettingsTests.hs rename to ghcide-test/exe/ClientSettingsTests.hs diff --git a/ghcide/test/exe/CodeLensTests.hs b/ghcide-test/exe/CodeLensTests.hs similarity index 100% rename from ghcide/test/exe/CodeLensTests.hs rename to ghcide-test/exe/CodeLensTests.hs diff --git a/ghcide/test/exe/CompletionTests.hs b/ghcide-test/exe/CompletionTests.hs similarity index 97% rename from ghcide/test/exe/CompletionTests.hs rename to ghcide-test/exe/CompletionTests.hs index 26d8d17fc2..a980d47233 100644 --- a/ghcide/test/exe/CompletionTests.hs +++ b/ghcide-test/exe/CompletionTests.hs @@ -276,8 +276,7 @@ nonLocalCompletionTests = where brokenForWinGhc = knownBrokenOnWindows "Windows has strange things in scope for some reason" brokenForWinOldGhc = - knownBrokenInSpecificEnv [HostOS Windows, GhcVer GHC92] "Windows (GHC == 9.2) has strange things in scope for some reason" - . knownBrokenInSpecificEnv [HostOS Windows, GhcVer GHC94] "Windows (GHC == 9.4) has strange things in scope for some reason" + knownBrokenInSpecificEnv [HostOS Windows, GhcVer GHC94] "Windows (GHC == 9.4) has strange things in scope for some reason" . knownBrokenInSpecificEnv [HostOS Windows, GhcVer GHC96] "Windows (GHC == 9.6) has strange things in scope for some reason" . knownBrokenInSpecificEnv [HostOS Windows, GhcVer GHC98] "Windows (GHC == 9.8) has strange things in scope for some reason" @@ -564,13 +563,10 @@ completionDocTests = _ <- waitForDiagnostics compls <- getCompletions doc pos rcompls <- forM compls $ \item -> do - if isJust (item ^. L.data_) - then do - rsp <- request SMethod_CompletionItemResolve item - case rsp ^. L.result of - Left err -> liftIO $ assertFailure ("completionItem/resolve failed with: " <> show err) - Right x -> pure x - else pure item + rsp <- request SMethod_CompletionItemResolve item + case rsp ^. L.result of + Left err -> liftIO $ assertFailure ("completionItem/resolve failed with: " <> show err) + Right x -> pure x let compls' = [ -- We ignore doc uris since it points to the local path which determined by specific machines case mn of diff --git a/ghcide/test/exe/Config.hs b/ghcide-test/exe/Config.hs similarity index 84% rename from ghcide/test/exe/Config.hs rename to ghcide-test/exe/Config.hs index cd58fd5ead..c98023e90e 100644 --- a/ghcide/test/exe/Config.hs +++ b/ghcide-test/exe/Config.hs @@ -5,6 +5,8 @@ module Config( mkIdeTestFs , dummyPlugin + -- * runners for testing specific plugins + , testSessionWithPlugin -- * runners for testing with dummy plugin , runWithDummyPlugin , testWithDummyPlugin @@ -26,13 +28,16 @@ module Config( , withLongTimeout , lspTestCaps , lspTestCapsNoFileWatches + , testDataDir ) where import Control.Exception (bracket_) import Control.Lens.Setter ((.~)) +import Control.Monad (unless) import Data.Foldable (traverse_) import Data.Function ((&)) import qualified Data.Text as T +import Development.IDE (Pretty) import Development.IDE.Test (canonicalizeUri) import Ide.Types (defaultPluginDescriptor) import qualified Language.LSP.Protocol.Lens as L @@ -43,11 +48,21 @@ import Test.Hls import qualified Test.Hls.FileSystem as FS testDataDir :: FilePath -testDataDir = "ghcide" "test" "data" +testDataDir = "ghcide-test" "data" mkIdeTestFs :: [FS.FileTree] -> FS.VirtualFileTree mkIdeTestFs = FS.mkVirtualFileTree testDataDir +-- * Run with some injected plugin +-- testSessionWithPlugin :: FS.VirtualFileTree -> (FilePath -> Session a) -> IO a +testSessionWithPlugin :: Pretty b => FS.VirtualFileTree -> PluginTestDescriptor b -> (FilePath -> Session a) -> IO a +testSessionWithPlugin fs plugin = runSessionWithTestConfig def + { testPluginDescriptor = plugin + , testDirLocation = Right fs + , testConfigCaps = lspTestCaps + , testShiftRoot = True + } + -- * A dummy plugin for testing ghcIde dummyPlugin :: PluginTestDescriptor () dummyPlugin = mkPluginTestDescriptor (\_ pid -> defaultPluginDescriptor pid "dummyTestPlugin") "core" @@ -100,6 +115,7 @@ pattern R x y x' y' = Range (Position x y) (Position x' y') data Expect = ExpectRange Range -- Both gotoDef and hover should report this range + | ExpectRanges [Range] -- definition lookup with multiple results | ExpectLocation Location -- | ExpectDefRange Range -- Only gotoDef should report this range | ExpectHoverRange Range -- Only hover should report this range @@ -108,6 +124,7 @@ data Expect | ExpectHoverTextRegex T.Text -- the hover message must match this pattern | ExpectExternFail -- definition lookup in other file expected to fail | ExpectNoDefinitions + | ExpectNoImplementations | ExpectNoHover -- | ExpectExtern -- TODO: as above, but expected to succeed: need some more info in here, once we have some working examples deriving Eq @@ -124,12 +141,16 @@ checkDefs (defToLocation -> defs) mkExpectations = traverse_ check =<< mkExpecta check (ExpectRange expectedRange) = do def <- assertOneDefinitionFound defs assertRangeCorrect def expectedRange + check (ExpectRanges ranges) = + traverse_ (assertHasRange defs) ranges check (ExpectLocation expectedLocation) = do def <- assertOneDefinitionFound defs liftIO $ do canonActualLoc <- canonicalizeLocation def canonExpectedLoc <- canonicalizeLocation expectedLocation canonActualLoc @?= canonExpectedLoc + check ExpectNoImplementations = do + liftIO $ assertBool "Expecting no implementations" $ null defs check ExpectNoDefinitions = do liftIO $ assertBool "Expecting no definitions" $ null defs check ExpectExternFail = liftIO $ assertFailure "Expecting to fail to find in external file" @@ -142,6 +163,10 @@ checkDefs (defToLocation -> defs) mkExpectations = traverse_ check =<< mkExpecta assertRangeCorrect Location{_range = foundRange} expectedRange = liftIO $ expectedRange @=? foundRange + assertHasRange actualRanges expectedRange = do + let hasRange = any (\Location{_range=foundRange} -> foundRange == expectedRange) actualRanges + unless hasRange $ liftIO $ assertFailure $ + "expected range: " <> show expectedRange <> "\nbut got ranges: " <> show defs canonicalizeLocation :: Location -> IO Location canonicalizeLocation (Location uri range) = Location <$> canonicalizeUri uri <*> pure range diff --git a/ghcide/test/exe/CradleTests.hs b/ghcide-test/exe/CradleTests.hs similarity index 93% rename from ghcide/test/exe/CradleTests.hs rename to ghcide-test/exe/CradleTests.hs index cdfbb06ea2..046b8bbf2f 100644 --- a/ghcide/test/exe/CradleTests.hs +++ b/ghcide-test/exe/CradleTests.hs @@ -10,7 +10,6 @@ import Control.Applicative.Combinators import Control.Lens ((^.)) import Control.Monad.IO.Class (liftIO) import qualified Data.Text as T -import Development.IDE.GHC.Compat (GhcVersion (..)) import Development.IDE.GHC.Util import Development.IDE.Plugin.Test (WaitForIdeRuleResult (..)) import Development.IDE.Test (expectDiagnostics, @@ -30,7 +29,8 @@ import Language.LSP.Protocol.Types hiding import Language.LSP.Test import System.FilePath import System.IO.Extra hiding (withTempDir) -import Test.Hls (ignoreForGhcVersions) +import Test.Hls.Util (EnvSpec (..), OS (..), + ignoreInEnv) import Test.Tasty import Test.Tasty.HUnit @@ -41,11 +41,9 @@ tests = testGroup "cradle" ,testGroup "ignore-fatal" [ignoreFatalWarning] ,testGroup "loading" [loadCradleOnlyonce, retryFailedCradle] ,testGroup "multi" (multiTests "multi") - ,ignoreForGhcVersions [GHC92] "multiple units not supported on 9.2" - $ testGroup "multi-unit" (multiTests "multi-unit") + ,testGroup "multi-unit" (multiTests "multi-unit") ,testGroup "sub-directory" [simpleSubDirectoryTest] - ,ignoreForGhcVersions [GHC92] "multiple units not supported on 9.2" - $ testGroup "multi-unit-rexport" [multiRexportTest] + ,testGroup "multi-unit-rexport" [multiRexportTest] ] loadCradleOnlyonce :: TestTree @@ -113,7 +111,7 @@ simpleSubDirectoryTest = mainSource <- liftIO $ readFileUtf8 mainPath _mdoc <- createDoc mainPath "haskell" mainSource expectDiagnosticsWithTags - [("a/src/Main.hs", [(DiagnosticSeverity_Warning,(2,0), "Top-level binding", Nothing)]) -- So that we know P has been loaded + [("a/src/Main.hs", [(DiagnosticSeverity_Warning,(2,0), "Top-level binding", Just "GHC-38417", Nothing)]) -- So that we know P has been loaded ] expectNoMoreDiagnostics 0.5 @@ -173,7 +171,8 @@ simpleMultiTest3 variant = -- Like simpleMultiTest but open the files in component 'a' in a separate session simpleMultiDefTest :: FilePath -> TestTree -simpleMultiDefTest variant = testCase (multiTestName variant "def-test") $ runWithExtraFiles variant $ \dir -> do +simpleMultiDefTest variant = ignoreForWindows $ testCase testName $ + runWithExtraFiles variant $ \dir -> do let aPath = dir "a/A.hs" bPath = dir "b/B.hs" adoc <- liftIO $ runInDir dir $ do @@ -188,6 +187,11 @@ simpleMultiDefTest variant = testCase (multiTestName variant "def-test") $ runWi let fooL = mkL (adoc ^. L.uri) 2 0 2 3 checkDefs locs (pure [fooL]) expectNoMoreDiagnostics 0.5 + where + testName = multiTestName variant "def-test" + ignoreForWindows + | testName == "simple-multi-def-test" = ignoreInEnv [HostOS Windows] "Test is flaky on Windows, see #4270" + | otherwise = id multiRexportTest :: TestTree multiRexportTest = @@ -211,7 +215,7 @@ sessionDepsArePickedUp = testWithDummyPluginEmpty' "cradle: {direct: {arguments: []}}" -- Open without OverloadedStrings and expect an error. doc <- createDoc "Foo.hs" "haskell" fooContent - expectDiagnostics [("Foo.hs", [(DiagnosticSeverity_Error, (3, 6), "Couldn't match type")])] + expectDiagnostics [("Foo.hs", [(DiagnosticSeverity_Error, (3, 6), "Couldn't match type", Just "GHC-83865")])] -- Update hie.yaml to enable OverloadedStrings. liftIO $ diff --git a/ghcide/test/exe/DependentFileTest.hs b/ghcide-test/exe/DependentFileTest.hs similarity index 98% rename from ghcide/test/exe/DependentFileTest.hs rename to ghcide-test/exe/DependentFileTest.hs index d2d19cf88d..1f243819e3 100644 --- a/ghcide/test/exe/DependentFileTest.hs +++ b/ghcide-test/exe/DependentFileTest.hs @@ -46,7 +46,7 @@ tests = testGroup "addDependentFile" _fooDoc <- createDoc "Foo.hs" "haskell" fooContent doc <- createDoc "Baz.hs" "haskell" bazContent expectDiagnostics - [("Foo.hs", [(DiagnosticSeverity_Error, (4,11), "Couldn't match type")])] + [("Foo.hs", [(DiagnosticSeverity_Error, (4,11), "Couldn't match type", Just "GHC-83865")])] -- Now modify the dependent file liftIO $ writeFile depFilePath "B" sendNotification SMethod_WorkspaceDidChangeWatchedFiles $ DidChangeWatchedFilesParams diff --git a/ghcide/test/exe/DiagnosticTests.hs b/ghcide-test/exe/DiagnosticTests.hs similarity index 92% rename from ghcide/test/exe/DiagnosticTests.hs rename to ghcide-test/exe/DiagnosticTests.hs index 660dcb3241..615e6ad69e 100644 --- a/ghcide/test/exe/DiagnosticTests.hs +++ b/ghcide-test/exe/DiagnosticTests.hs @@ -48,7 +48,7 @@ tests = testGroup "diagnostics" [ testWithDummyPluginEmpty "fix syntax error" $ do let content = T.unlines [ "module Testing wher" ] doc <- createDoc "Testing.hs" "haskell" content - expectDiagnostics [("Testing.hs", [(DiagnosticSeverity_Error, (0, 15), "parse error")])] + expectDiagnostics [("Testing.hs", [(DiagnosticSeverity_Error, (0, 15), "parse error", Just "GHC-58481")])] let change = TextDocumentContentChangeEvent $ InL TextDocumentContentChangePartial { _range = Range (Position 0 15) (Position 0 19) , _rangeLength = Nothing @@ -67,18 +67,18 @@ tests = testGroup "diagnostics" , _text = "wher" } changeDoc doc [change] - expectDiagnostics [("Testing.hs", [(DiagnosticSeverity_Error, (0, 15), "parse error")])] + expectDiagnostics [("Testing.hs", [(DiagnosticSeverity_Error, (0, 15), "parse error", Just "GHC-58481")])] , testWithDummyPluginEmpty "update syntax error" $ do let content = T.unlines [ "module Testing(missing) where" ] doc <- createDoc "Testing.hs" "haskell" content - expectDiagnostics [("Testing.hs", [(DiagnosticSeverity_Error, (0, 15), "Not in scope: 'missing'")])] + expectDiagnostics [("Testing.hs", [(DiagnosticSeverity_Error, (0, 15), "Not in scope: 'missing'", Just "GHC-76037")])] let change = TextDocumentContentChangeEvent $ InL TextDocumentContentChangePartial { _range = Range (Position 0 15) (Position 0 16) , _rangeLength = Nothing , _text = "l" } changeDoc doc [change] - expectDiagnostics [("Testing.hs", [(DiagnosticSeverity_Error, (0, 15), "Not in scope: 'lissing'")])] + expectDiagnostics [("Testing.hs", [(DiagnosticSeverity_Error, (0, 15), "Not in scope: 'lissing'", Just "GHC-76037")])] , testWithDummyPluginEmpty "variable not in scope" $ do let content = T.unlines [ "module Testing where" @@ -90,8 +90,8 @@ tests = testGroup "diagnostics" _ <- createDoc "Testing.hs" "haskell" content expectDiagnostics [ ( "Testing.hs" - , [ (DiagnosticSeverity_Error, (2, 15), "Variable not in scope: ab") - , (DiagnosticSeverity_Error, (4, 11), "Variable not in scope: cd") + , [ (DiagnosticSeverity_Error, (2, 15), "Variable not in scope: ab", Just "GHC-88464") + , (DiagnosticSeverity_Error, (4, 11), "Variable not in scope: cd", Just "GHC-88464") ] ) ] @@ -104,7 +104,7 @@ tests = testGroup "diagnostics" _ <- createDoc "Testing.hs" "haskell" content expectDiagnostics [ ( "Testing.hs" - , [(DiagnosticSeverity_Error, (2, 14), "Couldn't match type '[Char]' with 'Int'")] + , [(DiagnosticSeverity_Error, (2, 14), "Couldn't match type '[Char]' with 'Int'", Just "GHC-83865")] ) ] , testWithDummyPluginEmpty "typed hole" $ do @@ -116,7 +116,7 @@ tests = testGroup "diagnostics" _ <- createDoc "Testing.hs" "haskell" content expectDiagnostics [ ( "Testing.hs" - , [(DiagnosticSeverity_Error, (2, 8), "Found hole: _ :: Int -> String")] + , [(DiagnosticSeverity_Error, (2, 8), "Found hole: _ :: Int -> String", Just "GHC-88464")] ) ] @@ -131,17 +131,17 @@ tests = testGroup "diagnostics" , "b :: Float" , "b = True"] bMessage = "Couldn't match expected type 'Float' with actual type 'Bool'" - expectedDs aMessage = - [ ("A.hs", [(DiagnosticSeverity_Error, (2,4), aMessage)]) - , ("B.hs", [(DiagnosticSeverity_Error, (3,4), bMessage)])] - deferralTest title binding msg = testWithDummyPluginEmpty title $ do + expectedDs aMessage aCode = + [ ("A.hs", [(DiagnosticSeverity_Error, (2,4), aMessage, aCode)]) + , ("B.hs", [(DiagnosticSeverity_Error, (3,4), bMessage, Just "GHC-83865")])] + deferralTest title binding msg code = testWithDummyPluginEmpty title $ do _ <- createDoc "A.hs" "haskell" $ sourceA binding _ <- createDoc "B.hs" "haskell" sourceB - expectDiagnostics $ expectedDs msg + expectDiagnostics $ expectedDs msg code in - [ deferralTest "type error" "True" "Couldn't match expected type" - , deferralTest "typed hole" "_" "Found hole" - , deferralTest "out of scope var" "unbound" "Variable not in scope" + [ deferralTest "type error" "True" "Couldn't match expected type" (Just "GHC-83865") + , deferralTest "typed hole" "_" "Found hole" (Just "GHC-88464") + , deferralTest "out of scope var" "unbound" "Variable not in scope" (Just "GHC-88464") ] , testWithDummyPluginEmpty "remove required module" $ do @@ -158,14 +158,14 @@ tests = testGroup "diagnostics" , _text = "" } changeDoc docA [change] - expectDiagnostics [("ModuleB.hs", [(DiagnosticSeverity_Error, (1, 0), "Could not find module")])] + expectDiagnostics [("ModuleB.hs", [(DiagnosticSeverity_Error, (1, 0), "Could not find module", Nothing)])] , testWithDummyPluginEmpty "add missing module" $ do let contentB = T.unlines [ "module ModuleB where" , "import ModuleA ()" ] _ <- createDoc "ModuleB.hs" "haskell" contentB - expectDiagnostics [("ModuleB.hs", [(DiagnosticSeverity_Error, (1, 7), "Could not find module")])] + expectDiagnostics [("ModuleB.hs", [(DiagnosticSeverity_Error, (1, 7), "Could not find module", Nothing)])] let contentA = T.unlines [ "module ModuleA where" ] _ <- createDoc "ModuleA.hs" "haskell" contentA expectDiagnostics [("ModuleB.hs", [])] @@ -185,7 +185,7 @@ tests = testGroup "diagnostics" , "import ModuleA ()" ] _ <- createDoc (tmpDir "ModuleB.hs") "haskell" contentB - expectDiagnostics [(tmpDir "ModuleB.hs", [(DiagnosticSeverity_Error, (1, 7), "Could not find module")])] + expectDiagnostics [(tmpDir "ModuleB.hs", [(DiagnosticSeverity_Error, (1, 7), "Could not find module", Nothing)])] let contentA = T.unlines [ "module ModuleA where" ] _ <- createDoc (tmpDir "ModuleA.hs") "haskell" contentA expectDiagnostics [(tmpDir "ModuleB.hs", [])] @@ -202,10 +202,10 @@ tests = testGroup "diagnostics" _ <- createDoc "ModuleB.hs" "haskell" contentB expectDiagnostics [ ( "ModuleA.hs" - , [(DiagnosticSeverity_Error, (1, 7), "Cyclic module dependency between ModuleA, ModuleB")] + , [(DiagnosticSeverity_Error, (1, 7), "Cyclic module dependency between ModuleA, ModuleB", Nothing)] ) , ( "ModuleB.hs" - , [(DiagnosticSeverity_Error, (1, 7), "Cyclic module dependency between ModuleA, ModuleB")] + , [(DiagnosticSeverity_Error, (1, 7), "Cyclic module dependency between ModuleA, ModuleB", Nothing)] ) ] , let contentA = T.unlines [ "module ModuleA where" , "import ModuleB" ] @@ -222,8 +222,8 @@ tests = testGroup "diagnostics" ]) $ do _ <- createDoc "ModuleD.hs" "haskell" contentD expectDiagnostics - [ ( "ModuleB.hs" , [(DiagnosticSeverity_Error, (1, 7), "Cyclic module dependency between ModuleA, ModuleB")]) - , ( "ModuleA.hs" , [(DiagnosticSeverity_Error, (1, 7), "Cyclic module dependency between ModuleA, ModuleB")]) + [ ( "ModuleB.hs" , [(DiagnosticSeverity_Error, (1, 7), "Cyclic module dependency between ModuleA, ModuleB", Nothing)]) + , ( "ModuleA.hs" , [(DiagnosticSeverity_Error, (1, 7), "Cyclic module dependency between ModuleA, ModuleB", Nothing)]) ] , testWithDummyPluginEmpty "cyclic module dependency with hs-boot" $ do let contentA = T.unlines @@ -243,7 +243,7 @@ tests = testGroup "diagnostics" _ <- createDoc "ModuleA.hs" "haskell" contentA _ <- createDoc "ModuleB.hs" "haskell" contentB _ <- createDoc "ModuleB.hs-boot" "haskell" contentBboot - expectDiagnostics [("ModuleB.hs", [(DiagnosticSeverity_Warning, (3,0), "Top-level binding")])] + expectDiagnostics [("ModuleB.hs", [(DiagnosticSeverity_Warning, (3,0), "Top-level binding", Just "GHC-38417")])] , testWithDummyPlugin "bidirectional module dependency with hs-boot" (mkIdeTestFs [directCradle ["ModuleA", "ModuleB"]]) $ do @@ -268,7 +268,7 @@ tests = testGroup "diagnostics" _ <- createDoc "ModuleA.hs-boot" "haskell" contentAboot _ <- createDoc "ModuleB.hs" "haskell" contentB _ <- createDoc "ModuleB.hs-boot" "haskell" contentBboot - expectDiagnostics [("ModuleB.hs", [(DiagnosticSeverity_Warning, (3,0), "Top-level binding")])] + expectDiagnostics [("ModuleB.hs", [(DiagnosticSeverity_Warning, (3,0), "Top-level binding", Just "GHC-38417")])] , testWithDummyPluginEmpty "correct reference used with hs-boot" $ do let contentB = T.unlines [ "module ModuleB where" @@ -294,7 +294,7 @@ tests = testGroup "diagnostics" _ <- createDoc "ModuleA.hs" "haskell" contentA _ <- createDoc "ModuleA.hs-boot" "haskell" contentAboot _ <- createDoc "ModuleC.hs" "haskell" contentC - expectDiagnostics [("ModuleC.hs", [(DiagnosticSeverity_Warning, (3,0), "Top-level binding")])] + expectDiagnostics [("ModuleC.hs", [(DiagnosticSeverity_Warning, (3,0), "Top-level binding", Just "GHC-38417")])] , testWithDummyPluginEmpty "redundant import" $ do let contentA = T.unlines ["module ModuleA where"] let contentB = T.unlines @@ -306,7 +306,7 @@ tests = testGroup "diagnostics" _ <- createDoc "ModuleB.hs" "haskell" contentB expectDiagnosticsWithTags [ ( "ModuleB.hs" - , [(DiagnosticSeverity_Warning, (2, 0), "The import of 'ModuleA' is redundant", Just DiagnosticTag_Unnecessary)] + , [(DiagnosticSeverity_Warning, (2, 0), "The import of 'ModuleA' is redundant", Nothing, Just DiagnosticTag_Unnecessary)] ) ] , testWithDummyPluginEmpty "redundant import even without warning" $ do @@ -320,7 +320,7 @@ tests = testGroup "diagnostics" ] _ <- createDoc "ModuleA.hs" "haskell" contentA _ <- createDoc "ModuleB.hs" "haskell" contentB - expectDiagnostics [("ModuleB.hs", [(DiagnosticSeverity_Warning, (3,0), "Top-level binding")])] + expectDiagnostics [("ModuleB.hs", [(DiagnosticSeverity_Warning, (3,0), "Top-level binding", Just "GHC-38417")])] , testWithDummyPluginEmpty "package imports" $ do let thisDataListContent = T.unlines [ "module Data.List where" @@ -348,14 +348,14 @@ tests = testGroup "diagnostics" else if ghcVersion >= GHC94 then "Variable not in scope: map" -- See https://gitlab.haskell.org/ghc/ghc/-/issues/22130 else - "Not in scope: \8216ThisList.map\8217") + "Not in scope: \8216ThisList.map\8217", Just "GHC-88464") ,(DiagnosticSeverity_Error, (7, 9), if ghcVersion >= GHC96 then "Variable not in scope: BaseList.x" else if ghcVersion >= GHC94 then "Variable not in scope: x" -- See https://gitlab.haskell.org/ghc/ghc/-/issues/22130 else - "Not in scope: \8216BaseList.x\8217") + "Not in scope: \8216BaseList.x\8217", Just "GHC-88464") ] ) ] @@ -373,7 +373,7 @@ tests = testGroup "diagnostics" -- where appropriate. The warning should use an unqualified name 'Ord', not -- something like 'GHC.Classes.Ord'. The choice of redundant-constraints to -- test this is fairly arbitrary. - , [(DiagnosticSeverity_Warning, (2, if ghcVersion >= GHC94 then 7 else 0), "Redundant constraint: Ord a") + , [(DiagnosticSeverity_Warning, (2, if ghcVersion >= GHC94 then 7 else 0), "Redundant constraint: Ord a", Just "GHC-30606") ] ) ] @@ -439,7 +439,7 @@ tests = testGroup "diagnostics" _ <- createDoc "Foo.hs" "haskell" fooContent expectDiagnostics [ ( "Foo.hs" - , [(DiagnosticSeverity_Warning, (1, 0), "Top-level binding with no type signature:") + , [(DiagnosticSeverity_Warning, (1, 0), "Top-level binding with no type signature:", Nothing) ] ) ] @@ -453,7 +453,7 @@ tests = testGroup "diagnostics" _ <- createDoc "Foo.hs" "haskell" fooContent expectDiagnostics [ ( "Foo.hs" - , [(DiagnosticSeverity_Warning, (3, 0), "Defined but not used:") + , [(DiagnosticSeverity_Warning, (3, 0), "Defined but not used:", Nothing) ] ) ] @@ -469,13 +469,13 @@ tests = testGroup "diagnostics" bdoc <- createDoc bPath "haskell" bSource _pdoc <- createDoc pPath "haskell" pSource expectDiagnostics - [("P.hs", [(DiagnosticSeverity_Warning,(4,0), "Top-level binding")])] -- So that we know P has been loaded + [("P.hs", [(DiagnosticSeverity_Warning,(4,0), "Top-level binding", Just "GHC-38417")])] -- So that we know P has been loaded -- Change y from Int to B which introduces a type error in A (imported from P) changeDoc bdoc [TextDocumentContentChangeEvent . InR . TextDocumentContentChangeWholeDocument $ T.unlines ["module B where", "y :: Bool", "y = undefined"]] expectDiagnostics - [("A.hs", [(DiagnosticSeverity_Error, (5, 4), "Couldn't match expected type 'Int' with actual type 'Bool'")]) + [("A.hs", [(DiagnosticSeverity_Error, (5, 4), "Couldn't match expected type 'Int' with actual type 'Bool'", Just "GHC-83865")]) ] -- Open A and edit to fix the type error @@ -485,8 +485,8 @@ tests = testGroup "diagnostics" expectDiagnostics [ ( "P.hs", - [ (DiagnosticSeverity_Error, (4, 6), "Couldn't match expected type 'Int' with actual type 'Bool'"), - (DiagnosticSeverity_Warning, (4, 0), "Top-level binding") + [ (DiagnosticSeverity_Error, (4, 6), "Couldn't match expected type 'Int' with actual type 'Bool'", Just "GHC-83865"), + (DiagnosticSeverity_Warning, (4, 0), "Top-level binding", Just "GHC-38417") ] ), ("A.hs", []) @@ -496,14 +496,14 @@ tests = testGroup "diagnostics" , testWithDummyPluginEmpty "deduplicate missing module diagnostics" $ do let fooContent = T.unlines [ "module Foo() where" , "import MissingModule" ] doc <- createDoc "Foo.hs" "haskell" fooContent - expectDiagnostics [("Foo.hs", [(DiagnosticSeverity_Error, (1,7), "Could not find module 'MissingModule'")])] + expectDiagnostics [("Foo.hs", [(DiagnosticSeverity_Error, (1,7), "Could not find module 'MissingModule'", Nothing)])] changeDoc doc [TextDocumentContentChangeEvent . InR . TextDocumentContentChangeWholeDocument $ "module Foo() where" ] expectDiagnostics [] changeDoc doc [TextDocumentContentChangeEvent . InR . TextDocumentContentChangeWholeDocument $ T.unlines [ "module Foo() where" , "import MissingModule" ] ] - expectDiagnostics [("Foo.hs", [(DiagnosticSeverity_Error, (1,7), "Could not find module 'MissingModule'")])] + expectDiagnostics [("Foo.hs", [(DiagnosticSeverity_Error, (1,7), "Could not find module 'MissingModule'", Nothing)])] , testGroup "Cancellation" [ cancellationTestGroup "edit header" editHeader yesSession noParse noTc @@ -564,7 +564,7 @@ cancellationTemplate (edit, undoEdit) mbKey = testCase (maybe "-" fst mbKey) $ r ] -- for the example above we expect one warning - let missingSigDiags = [(DiagnosticSeverity_Warning, (3, 0), "Top-level binding") ] + let missingSigDiags = [(DiagnosticSeverity_Warning, (3, 0), "Top-level binding", Just "GHC-38417") ] typeCheck doc >> expectCurrentDiagnostics doc missingSigDiags -- Now we edit the document and wait for the given key (if any) diff --git a/ghcide/test/exe/ExceptionTests.hs b/ghcide-test/exe/ExceptionTests.hs similarity index 99% rename from ghcide/test/exe/ExceptionTests.hs rename to ghcide-test/exe/ExceptionTests.hs index 756e7e0547..a95f91e97c 100644 --- a/ghcide/test/exe/ExceptionTests.hs +++ b/ghcide-test/exe/ExceptionTests.hs @@ -8,7 +8,7 @@ import Control.Monad.Error.Class (MonadError (throwError)) import Control.Monad.IO.Class (liftIO) import qualified Data.Aeson as A import Data.Default (Default (..)) -import Data.Text as T +import qualified Data.Text as T import Development.IDE.Core.Shake (IdeState (..)) import qualified Development.IDE.LSP.Notifications as Notifications import Development.IDE.Plugin.HLS (toResponseError) diff --git a/ghcide/test/exe/FindDefinitionAndHoverTests.hs b/ghcide-test/exe/FindDefinitionAndHoverTests.hs similarity index 57% rename from ghcide/test/exe/FindDefinitionAndHoverTests.hs rename to ghcide-test/exe/FindDefinitionAndHoverTests.hs index 63d8dd7ab7..e46141df4e 100644 --- a/ghcide/test/exe/FindDefinitionAndHoverTests.hs +++ b/ghcide-test/exe/FindDefinitionAndHoverTests.hs @@ -13,6 +13,7 @@ import Language.LSP.Test import System.Info.Extra (isWindows) import Config +import Control.Category ((>>>)) import Control.Lens ((^.)) import Development.IDE.Test (expectDiagnostics, standardizeQuotes) @@ -53,7 +54,27 @@ tests = let _ -> liftIO $ assertFailure $ "test not expecting this kind of hover info" <> show hover extractLineColFromHoverMsg :: T.Text -> [T.Text] - extractLineColFromHoverMsg = T.splitOn ":" . head . T.splitOn "*" . last . T.splitOn (sourceFileName <> ":") + extractLineColFromHoverMsg = + -- Hover messages contain multiple lines, and we are looking for the definition + -- site + T.lines + -- The line we are looking for looks like: "*Defined at /tmp/GotoHover.hs:22:3*" + -- So filter by the start of the line + >>> mapMaybe (T.stripPrefix "*Defined at") + -- There can be multiple definitions per hover message! + -- See the test "field in record definition" for example. + -- The tests check against the last line that contains the above line. + >>> last + -- [" /tmp/", "22:3*"] + >>> T.splitOn (sourceFileName <> ":") + -- "22:3*" + >>> last + -- ["22:3", ""] + >>> T.splitOn "*" + -- "22:3" + >>> head + -- ["22", "3"] + >>> T.splitOn ":" checkHoverRange :: Range -> Maybe Range -> T.Text -> Session () checkHoverRange expectedRange rangeInHover msg = @@ -88,8 +109,8 @@ tests = let , testGroup "hover" $ mapMaybe snd tests , testGroup "hover compile" [checkFileCompiles sourceFilePath $ expectDiagnostics - [ ( "GotoHover.hs", [(DiagnosticSeverity_Error, (62, 7), "Found hole: _")]) - , ( "GotoHover.hs", [(DiagnosticSeverity_Error, (65, 8), "Found hole: _")]) + [ ( "GotoHover.hs", [(DiagnosticSeverity_Error, (62, 7), "Found hole: _", Just "GHC-88464")]) + , ( "GotoHover.hs", [(DiagnosticSeverity_Error, (65, 8), "Found hole: _", Just "GHC-88464")]) ]] , testGroup "type-definition" typeDefinitionTests , testGroup "hover-record-dot-syntax" recordDotSyntaxTests ] @@ -119,8 +140,9 @@ tests = let hover = (getHover , checkHover) -- search locations expectations on results - fffL4 = fffR ^. L.start; fffR = mkRange 8 4 8 7 ; fff = [ExpectRange fffR] - fffL8 = Position 12 4 ; + -- TODO: Lookup of record field should return exactly one result + fffL4 = fffR ^. L.start; fffR = mkRange 8 4 8 7; fff = [ExpectRanges [fffR, mkRange 7 23 9 16]] + fffL8 = Position 12 4 ; fff' = [ExpectRange fffR] fffL14 = Position 18 7 ; aL20 = Position 19 15 aaaL14 = Position 18 20 ; aaa = [mkR 11 0 11 3] @@ -148,13 +170,19 @@ tests = let ; constr = [ExpectHoverText ["Monad m"]] eitL40 = Position 44 28 ; kindE = [ExpectHoverText [":: Type -> Type -> Type\n"]] intL40 = Position 44 34 ; kindI = [ExpectHoverText [":: Type\n"]] - tvrL40 = Position 44 37 ; kindV = [ExpectHoverText [":: * -> *\n"]] - intL41 = Position 45 20 ; litI = [ExpectHoverText ["7518"]] - chrL36 = Position 41 24 ; litC = [ExpectHoverText ["'f'"]] - txtL8 = Position 12 14 ; litT = [ExpectHoverText ["\"dfgy\""]] - lstL43 = Position 47 12 ; litL = [ExpectHoverText ["[8391 :: Int, 6268]"]] + -- TODO: Kind signature of type variables should be `Type -> Type` + tvrL40 = Position 44 37 ; kindV = [ExpectHoverText ["m"]]; kindV' = [ExpectHoverText [":: * -> *\n"]] + -- TODO: Hover of integer literal should be `7518` + intL41 = Position 45 20 ; litI = [ExpectHoverText ["_ :: Int"]]; litI' = [ExpectHoverText ["7518"]] + -- TODO: Hover info of char literal should be `'f'` + chrL36 = Position 41 24 ; litC = [ExpectHoverText ["_ :: Char"]]; litC' = [ExpectHoverText ["'f'"]] + -- TODO: Hover info of Text literal should be `"dfgy"` + txtL8 = Position 12 14 ; litT = [ExpectHoverText ["_ :: Text"]]; litT' = [ExpectHoverText ["\"dfgy\""]] + -- TODO: Hover info of List literal should be `[8391 :: Int, 6268]` + lstL43 = Position 47 12 ; litL = [ExpectHoverText ["[Int]"]]; litL' = [ExpectHoverText ["[8391 :: Int, 6268]"]] outL45 = Position 49 3 ; outSig = [ExpectHoverText ["outer", "Bool"], mkR 50 0 50 5] - innL48 = Position 52 5 ; innSig = [ExpectHoverText ["inner", "Char"], mkR 49 2 49 7] + -- TODO: Hover info of local function signature should be `inner :: Bool` + innL48 = Position 52 5 ; innSig = [ExpectHoverText ["inner"], mkR 53 2 53 7]; innSig' = [ExpectHoverText ["inner", "Char"], mkR 49 2 49 7] holeL60 = Position 62 7 ; hleInfo = [ExpectHoverText ["_ ::"]] holeL65 = Position 65 8 ; hleInfo2 = [ExpectHoverText ["_ :: a -> Maybe a"]] cccL17 = Position 17 16 ; docLink = [ExpectHoverTextRegex "\\*Defined in 'GHC.Types'\\* \\*\\(ghc-prim-[0-9.]+\\)\\*\n\n"] @@ -167,46 +195,46 @@ tests = let mkFindTests -- def hover look expect [ -- It suggests either going to the constructor or to the field - test broken yes fffL4 fff "field in record definition" - , test yes yes fffL8 fff "field in record construction #1102" - , test yes yes fffL14 fff "field name used as accessor" -- https://github.com/haskell/ghcide/pull/120 in Calculate.hs - , test yes yes aaaL14 aaa "top-level name" -- https://github.com/haskell/ghcide/pull/120 - , test yes yes dcL7 tcDC "data constructor record #1029" - , test yes yes dcL12 tcDC "data constructor plain" -- https://github.com/haskell/ghcide/pull/121 - , test yes yes tcL6 tcData "type constructor #1028" -- https://github.com/haskell/ghcide/pull/147 - , test yes yes xtcL5 xtc "type constructor external #717,1028" - , test yes yes xvL20 xvMsg "value external package #717" -- https://github.com/haskell/ghcide/pull/120 - , test yes yes vvL16 vv "plain parameter" -- https://github.com/haskell/ghcide/pull/120 - , test yes yes aL18 apmp "pattern match name" -- https://github.com/haskell/ghcide/pull/120 - , test yes yes opL16 op "top-level operator #713" -- https://github.com/haskell/ghcide/pull/120 - , test yes yes opL18 opp "parameter operator" -- https://github.com/haskell/ghcide/pull/120 - , test yes yes b'L19 bp "name in backticks" -- https://github.com/haskell/ghcide/pull/120 - , test yes yes clL23 cls "class in instance declaration #1027" - , test yes yes clL25 cls "class in signature #1027" -- https://github.com/haskell/ghcide/pull/147 - , test yes yes eclL15 ecls "external class in signature #717,1027" - , test yes yes dnbL29 dnb "do-notation bind #1073" - , test yes yes dnbL30 dnb "do-notation lookup" - , test yes yes lcbL33 lcb "listcomp bind #1073" - , test yes yes lclL33 lcb "listcomp lookup" - , test yes yes mclL36 mcl "top-level fn 1st clause" - , test yes yes mclL37 mcl "top-level fn 2nd clause #1030" - , test yes yes spaceL37 space "top-level fn on space #1002" - , test no yes docL41 doc "documentation #1129" - , test no yes eitL40 kindE "kind of Either #1017" - , test no yes intL40 kindI "kind of Int #1017" - , test no broken tvrL40 kindV "kind of (* -> *) type variable #1017" - , test no broken intL41 litI "literal Int in hover info #1016" - , test no broken chrL36 litC "literal Char in hover info #1016" - , test no broken txtL8 litT "literal Text in hover info #1016" - , test no broken lstL43 litL "literal List in hover info #1016" - , test yes yes cmtL68 lackOfdEq "no Core symbols #3280" - , test no yes docL41 constr "type constraint in hover info #1012" - , test no yes outL45 outSig "top-level signature #767" - , test broken broken innL48 innSig "inner signature #767" - , test no yes holeL60 hleInfo "hole without internal name #831" - , test no yes holeL65 hleInfo2 "hole with variable" - , test no yes cccL17 docLink "Haddock html links" - , testM yes yes imported importedSig "Imported symbol" + test (broken fff') yes fffL4 fff "field in record definition" + , test yes yes fffL8 fff' "field in record construction #1102" + , test yes yes fffL14 fff' "field name used as accessor" -- https://github.com/haskell/ghcide/pull/120 in Calculate.hs + , test yes yes aaaL14 aaa "top-level name" -- https://github.com/haskell/ghcide/pull/120 + , test yes yes dcL7 tcDC "data constructor record #1029" + , test yes yes dcL12 tcDC "data constructor plain" -- https://github.com/haskell/ghcide/pull/121 + , test yes yes tcL6 tcData "type constructor #1028" -- https://github.com/haskell/ghcide/pull/147 + , test yes yes xtcL5 xtc "type constructor external #717,1028" + , test yes yes xvL20 xvMsg "value external package #717" -- https://github.com/haskell/ghcide/pull/120 + , test yes yes vvL16 vv "plain parameter" -- https://github.com/haskell/ghcide/pull/120 + , test yes yes aL18 apmp "pattern match name" -- https://github.com/haskell/ghcide/pull/120 + , test yes yes opL16 op "top-level operator #713" -- https://github.com/haskell/ghcide/pull/120 + , test yes yes opL18 opp "parameter operator" -- https://github.com/haskell/ghcide/pull/120 + , test yes yes b'L19 bp "name in backticks" -- https://github.com/haskell/ghcide/pull/120 + , test yes yes clL23 cls "class in instance declaration #1027" + , test yes yes clL25 cls "class in signature #1027" -- https://github.com/haskell/ghcide/pull/147 + , test yes yes eclL15 ecls "external class in signature #717,1027" + , test yes yes dnbL29 dnb "do-notation bind #1073" + , test yes yes dnbL30 dnb "do-notation lookup" + , test yes yes lcbL33 lcb "listcomp bind #1073" + , test yes yes lclL33 lcb "listcomp lookup" + , test yes yes mclL36 mcl "top-level fn 1st clause" + , test yes yes mclL37 mcl "top-level fn 2nd clause #1030" + , test yes yes spaceL37 space "top-level fn on space #1002" + , test no yes docL41 doc "documentation #1129" + , test no yes eitL40 kindE "kind of Either #1017" + , test no yes intL40 kindI "kind of Int #1017" + , test no (broken kindV') tvrL40 kindV "kind of (* -> *) type variable #1017" + , test no (broken litI') intL41 litI "literal Int in hover info #1016" + , test no (broken litC') chrL36 litC "literal Char in hover info #1016" + , test no (broken litT') txtL8 litT "literal Text in hover info #1016" + , test no (broken litL') lstL43 litL "literal List in hover info #1016" + , test yes yes cmtL68 lackOfdEq "no Core symbols #3280" + , test no yes docL41 constr "type constraint in hover info #1012" + , test no yes outL45 outSig "top-level signature #767" + , test yes (broken innSig') innL48 innSig "inner signature #767" + , test no yes holeL60 hleInfo "hole without internal name #831" + , test no yes holeL65 hleInfo2 "hole with variable" + , test no yes cccL17 docLink "Haddock html links" + , testM yes yes imported importedSig "Imported symbol" , if isWindows then -- Flaky on Windows: https://github.com/haskell/haskell-language-server/issues/2997 testM no yes reexported reexportedSig "Imported symbol (reexported)" @@ -215,14 +243,12 @@ tests = let , test no yes thLocL57 thLoc "TH Splice Hover" , test yes yes import310 pkgTxt "show package name and its version" ] - where yes, broken :: (TestTree -> Maybe TestTree) - yes = Just -- test should run and pass - broken = Just . (`xfail` "known broken") + where yes :: (TestTree -> Maybe TestTree) + yes = Just -- test should run and pass no = const Nothing -- don't run this test at all --skip = const Nothing -- unreliable, don't run - -xfail :: TestTree -> String -> TestTree -xfail = flip expectFailBecause + broken :: [Expect] -> TestTree -> Maybe TestTree + broken _ = yes checkFileCompiles :: FilePath -> Session () -> TestTree checkFileCompiles fp diag = diff --git a/ghcide-test/exe/FindImplementationAndHoverTests.hs b/ghcide-test/exe/FindImplementationAndHoverTests.hs new file mode 100644 index 0000000000..221be90dd2 --- /dev/null +++ b/ghcide-test/exe/FindImplementationAndHoverTests.hs @@ -0,0 +1,228 @@ +{-# LANGUAGE ExplicitNamespaces #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ViewPatterns #-} + +module FindImplementationAndHoverTests (tests) where + +import Control.Monad +import Data.Foldable +import Data.Maybe +import Data.Text (Text) +import qualified Data.Text as T +import Language.LSP.Test +import Text.Regex.TDFA ((=~)) + +import Config +import Development.IDE.Test (standardizeQuotes) +import Test.Hls +import Test.Hls.FileSystem (copyDir) + +tests :: TestTree +tests = let + tst :: (TextDocumentIdentifier -> Position -> Session a, a -> Session [Expect] -> Session ()) -> Position -> String -> Session [Expect] -> String -> TestTree + tst (get, check) pos sfp targetRange title = + testWithDummyPlugin title (mkIdeTestFs [copyDir "hover"]) $ do + doc <- openDoc sfp "haskell" + waitForProgressDone + _x <- waitForTypecheck doc + found <- get doc pos + check found targetRange + + checkHover :: (HasCallStack) => Maybe Hover -> Session [Expect] -> Session () + checkHover hover expectations = traverse_ check =<< expectations where + + check :: (HasCallStack) => Expect -> Session () + check expected = + case hover of + Nothing -> unless (expected == ExpectNoHover) $ liftIO $ assertFailure "no hover found" + Just Hover{_contents = (InL MarkupContent{_value = standardizeQuotes -> msg}) + ,_range = _rangeInHover } -> + case expected of + ExpectRange _expectedRange -> liftIO $ assertFailure $ "ExpectRange assertion not implemented, yet." + ExpectHoverRange _expectedRange -> liftIO $ assertFailure $ "ExpectHoverRange assertion not implemented, yet." + ExpectHoverText snippets -> liftIO $ traverse_ (`assertFoundIn` msg) snippets + ExpectHoverExcludeText snippets -> liftIO $ traverse_ (`assertNotFoundIn` msg) snippets + ExpectHoverTextRegex re -> liftIO $ assertBool ("Regex not found in " <> T.unpack msg) (msg =~ re :: Bool) + ExpectNoHover -> liftIO $ assertFailure $ "Expected no hover but got " <> show hover + _ -> pure () -- all other expectations not relevant to hover + _ -> liftIO $ assertFailure $ "test not expecting this kind of hover info" <> show hover + + assertFoundIn :: T.Text -> T.Text -> Assertion + assertFoundIn part whole = assertBool + (T.unpack $ "failed to find: `" <> part <> "` in hover message:\n" <> whole) + (part `T.isInfixOf` whole) + + assertNotFoundIn :: T.Text -> T.Text -> Assertion + assertNotFoundIn part whole = assertBool + (T.unpack $ "found unexpected: `" <> part <> "` in hover message:\n" <> whole) + (not . T.isInfixOf part $ whole) + + sourceFilePath = T.unpack sourceFileName + sourceFileName = "GotoImplementation.hs" + + mkFindTests tests = testGroup "goto implementation" + [ testGroup "implementation" $ mapMaybe fst allTests + , testGroup "hover" $ mapMaybe snd allTests + ] + where + allTests = tests ++ recordDotSyntaxTests + + recordDotSyntaxTests = + -- We get neither new hover information nor 'Goto Implementation' locations for record-dot-syntax + [ test' "RecordDotSyntax.hs" yes yes (Position 17 6) [ExpectNoImplementations, ExpectHoverText ["_ :: [Char]"]] "hover over parent" + , test' "RecordDotSyntax.hs" yes yes (Position 17 18) [ExpectNoImplementations, ExpectHoverText ["_ :: Integer"]] "hover over dot shows child" + , test' "RecordDotSyntax.hs" yes yes (Position 17 25) [ExpectNoImplementations, ExpectHoverText ["_ :: MyChild"]] "hover over child" + , test' "RecordDotSyntax.hs" yes yes (Position 17 27) [ExpectNoImplementations, ExpectHoverText ["_ :: [Char]"]] "hover over grandchild" + ] + + test :: (HasCallStack) => (TestTree -> a) -> (TestTree -> b) -> Position -> [Expect] -> String -> (a, b) + test runImpl runHover look expect = testM runImpl runHover look (return expect) + + testM :: (HasCallStack) => (TestTree -> a) + -> (TestTree -> b) + -> Position + -> Session [Expect] + -> String + -> (a, b) + testM = testM' sourceFilePath + + test' :: (HasCallStack) => FilePath -> (TestTree -> a) -> (TestTree -> b) -> Position -> [Expect] -> String -> (a, b) + test' sourceFile runImpl runHover look expect = testM' sourceFile runImpl runHover look (return expect) + + testM' :: (HasCallStack) + => FilePath + -> (TestTree -> a) + -> (TestTree -> b) + -> Position + -> Session [Expect] + -> String + -> (a, b) + testM' sourceFile runImpl runHover look expect title = + ( runImpl $ tst impl look sourceFile expect title + , runHover $ tst hover look sourceFile expect title ) where + impl = (getImplementations, checkDefs) + hover = (getHover , checkHover) + + aaaL = Position 8 15; aaaR = mkRange 5 9 5 16; + aaa = + [ ExpectRanges [aaaR] + , ExpectHoverText (evidenceBoundByConstraint "Num" "AAA") + ] + + bbbL = Position 15 8; bbbR = mkRange 12 9 12 16; + bbb = + [ ExpectRanges [bbbR] + , ExpectHoverText (evidenceBoundByConstraint "BBB" "AAA") + ] + cccL = Position 18 11; + ccc = + [ ExpectNoImplementations + , ExpectHoverText (evidenceBySignatureOrPattern "Show" "a") + ] + dddShowR = mkRange 21 26 21 30; dddEqR = mkRange 21 22 21 24 + dddL1 = Position 23 16; + ddd1 = + [ ExpectRanges [dddEqR] + , ExpectHoverText + [ constraintEvidence "Eq" "(Q k)" + , evidenceGoal' "'forall k. Eq k => Eq (Q k)'" + , boundByInstanceOf "Eq" + , evidenceGoal "Eq" "k" + , boundByTypeSigOrPattern + ] + ] + dddL2 = Position 23 29; + ddd2 = + [ ExpectNoImplementations + , ExpectHoverText (evidenceBySignatureOrPattern "Show" "k") + ] + dddL3 = Position 24 8; + ddd3 = + [ ExpectRanges [dddEqR, dddShowR] + , ExpectHoverText + [ constraintEvidence "Show" "(Q Integer)" + , evidenceGoal' "'forall k. Show k => Show (Q k)'" + , boundByInstance + , evidenceGoal "Show" "Integer" + , usingExternalInstance + , constraintEvidence "Eq" "(Q Integer)" + , evidenceGoal' "'forall k. Eq k => Eq (Q k)'" + , boundByInstance + , evidenceGoal "Eq" "Integer" + , usingExternalInstance + ] + ] + gadtL = Position 29 35; + gadt = + [ ExpectNoImplementations + , ExpectHoverText + [ constraintEvidence "Show" "Int" + , evidenceGoal "Show" "a" + , boundByTypeSigOrPattern + , evidenceGoal' "'a ~ Int'" + , boundByPattern + ] + ] + in + mkFindTests + -- impl hover look expect + [ + test yes yes aaaL aaa "locally defined class instance" + , test yes yes bbbL bbb "locally defined class and instance" + , test yes yes cccL ccc "bound by type signature" + , test yes yes dddL1 ddd1 "newtype Eq evidence" + , test yes yes dddL2 ddd2 "Show evidence" + , test yes yes dddL3 ddd3 "evidence construction" + , test yes yes gadtL gadt "GADT evidence" + ] + where yes :: (TestTree -> Maybe TestTree) + yes = Just -- test should run and pass + no = const Nothing -- don't run this test at all + +-- ---------------------------------------------------------------------------- +-- Helper functions for creating hover message verification +-- ---------------------------------------------------------------------------- + +evidenceBySignatureOrPattern :: Text -> Text -> [Text] +evidenceBySignatureOrPattern tyclass varname = + [ constraintEvidence tyclass varname + , boundByTypeSigOrPattern + ] + +evidenceBoundByConstraint :: Text -> Text -> [Text] +evidenceBoundByConstraint tyclass varname = + [ constraintEvidence tyclass varname + , boundByInstanceOf tyclass + ] + +boundByTypeSigOrPattern :: Text +boundByTypeSigOrPattern = "bound by type signature or pattern" + +boundByInstance :: Text +boundByInstance = + "bound by an instance of" + +boundByInstanceOf :: Text -> Text +boundByInstanceOf tyvar = + "bound by an instance of class " <> tyvar + +boundByPattern :: Text +boundByPattern = + "bound by a pattern" + +usingExternalInstance :: Text +usingExternalInstance = + "using an external instance" + +constraintEvidence :: Text -> Text -> Text +constraintEvidence tyclass varname = "Evidence of constraint " <> quotedName tyclass varname + +-- | A goal in the evidence tree. +evidenceGoal :: Text -> Text -> Text +evidenceGoal tyclass varname = "- " <> quotedName tyclass varname + +evidenceGoal' :: Text -> Text +evidenceGoal' t = "- " <> t + +quotedName :: Text -> Text -> Text +quotedName tyclass varname = "'" <> tyclass <> " " <> varname <> "'" diff --git a/ghcide-test/exe/FuzzySearch.hs b/ghcide-test/exe/FuzzySearch.hs new file mode 100644 index 0000000000..1d2a5ac181 --- /dev/null +++ b/ghcide-test/exe/FuzzySearch.hs @@ -0,0 +1,52 @@ +module FuzzySearch (tests) where + +import Data.Maybe (isJust, mapMaybe) +import Data.Text (Text) +import qualified Data.Text as Text +import Prelude hiding (filter) +import Test.Tasty +import Test.Tasty.HUnit +import Test.Tasty.QuickCheck +import Text.Fuzzy.Parallel + +tests :: TestTree +tests = + testGroup + "Fuzzy search" + [ testGroup "match" + [ testCase "empty" $ + match "" "" @?= Just 0 + , testCase "camel case" $ + match "myImportantField" "myImportantField" @?= Just 262124 + , testCase "a" $ + mapMaybe (matchInput "a") ["", "a", "aa", "aaa", "A", "AA", "aA", "Aa"] + @?= [("a",3),("aa",3),("aaa",3),("aA",3),("Aa",1)] + , testCase "lowercase words" $ + mapMaybe (matchInput "abc") ["abc", "abcd", "axbc", "axbxc", "def"] + @?= [("abc", 25), ("abcd", 25), ("axbc", 7), ("axbxc", 5)] + , testCase "lower upper mix" $ + mapMaybe (matchInput "abc") ["abc", "aBc", "axbC", "axBxC", "def"] + @?= [("abc", 25), ("aBc", 25), ("axbC", 7), ("axBxC", 5)] + , testCase "prefixes" $ + mapMaybe (matchInput "alpha") (Text.inits "alphabet") + @?= [("alpha", 119), ("alphab", 119), ("alphabe", 119), ("alphabet", 119)] + , testProperty "x `isSubsequenceOf` y => match x y returns Just" + prop_matchIfSubsequence + ] + ] + where + matchInput :: Text -> Text -> Maybe (Text, Int) + matchInput needle candidate = (candidate,) <$> match needle candidate + +prop_matchIfSubsequence :: Property +prop_matchIfSubsequence = + forAll genNonEmptyText $ \haystack -> + forAll (genSubsequence haystack) $ \needle -> + isJust (match needle haystack) + where + genNonEmptyText = + Text.pack <$> listOf1 (elements $ ['a'..'z'] ++ ['A'..'Z'] ++ ['0'..'9']) + + genSubsequence :: Text -> Gen Text + genSubsequence = + fmap Text.pack . sublistOf . Text.unpack diff --git a/ghcide/test/exe/GarbageCollectionTests.hs b/ghcide-test/exe/GarbageCollectionTests.hs similarity index 98% rename from ghcide/test/exe/GarbageCollectionTests.hs rename to ghcide-test/exe/GarbageCollectionTests.hs index 8c0c428c1a..5cc9935352 100644 --- a/ghcide/test/exe/GarbageCollectionTests.hs +++ b/ghcide-test/exe/GarbageCollectionTests.hs @@ -72,7 +72,7 @@ tests = testGroup "garbage collection" changeDoc doc [TextDocumentContentChangeEvent . InR $ TextDocumentContentChangeWholeDocument edit] builds <- waitForTypecheck doc liftIO $ assertBool "it still builds" builds - expectCurrentDiagnostics doc [(DiagnosticSeverity_Error, (2,4), "Couldn't match expected type")] + expectCurrentDiagnostics doc [(DiagnosticSeverity_Error, (2,4), "Couldn't match expected type", Just "GHC-83865")] ] ] where diff --git a/ghcide/test/exe/HaddockTests.hs b/ghcide-test/exe/HaddockTests.hs similarity index 100% rename from ghcide/test/exe/HaddockTests.hs rename to ghcide-test/exe/HaddockTests.hs diff --git a/ghcide/test/exe/HieDbRetry.hs b/ghcide-test/exe/HieDbRetry.hs similarity index 100% rename from ghcide/test/exe/HieDbRetry.hs rename to ghcide-test/exe/HieDbRetry.hs diff --git a/ghcide/test/exe/HighlightTests.hs b/ghcide-test/exe/HighlightTests.hs similarity index 100% rename from ghcide/test/exe/HighlightTests.hs rename to ghcide-test/exe/HighlightTests.hs diff --git a/ghcide/test/exe/IfaceTests.hs b/ghcide-test/exe/IfaceTests.hs similarity index 91% rename from ghcide/test/exe/IfaceTests.hs rename to ghcide-test/exe/IfaceTests.hs index 330d372d73..d7dc533550 100644 --- a/ghcide/test/exe/IfaceTests.hs +++ b/ghcide-test/exe/IfaceTests.hs @@ -50,8 +50,8 @@ ifaceTHTest = testWithExtraFiles "iface-th-test" "TH" $ \dir -> do -- Check that the change propagates to C changeDoc cdoc [TextDocumentContentChangeEvent . InR $ TextDocumentContentChangeWholeDocument cSource] expectDiagnostics - [("THC.hs", [(DiagnosticSeverity_Error, (4, 4), "Couldn't match expected type '()' with actual type 'Bool'")]) - ,("THB.hs", [(DiagnosticSeverity_Warning, (4,1), "Top-level binding")])] + [("THC.hs", [(DiagnosticSeverity_Error, (4, 4), "Couldn't match expected type '()' with actual type 'Bool'", Just "GHC-83865")]) + ,("THB.hs", [(DiagnosticSeverity_Warning, (4,1), "Top-level binding", Just "GHC-38417")])] closeDoc cdoc ifaceErrorTest :: TestTree @@ -65,7 +65,7 @@ ifaceErrorTest = testWithExtraFiles "iface-error-test-1" "recomp" $ \dir -> do bdoc <- createDoc bPath "haskell" bSource expectDiagnostics - [("P.hs", [(DiagnosticSeverity_Warning,(4,0), "Top-level binding")])] -- So what we know P has been loaded + [("P.hs", [(DiagnosticSeverity_Warning,(4,0), "Top-level binding", Just "GHC-38417")])] -- So what we know P has been loaded -- Change y from Int to B changeDoc bdoc [ TextDocumentContentChangeEvent . InR . TextDocumentContentChangeWholeDocument $ @@ -77,7 +77,7 @@ ifaceErrorTest = testWithExtraFiles "iface-error-test-1" "recomp" $ \dir -> do -- Check that the error propagates to A expectDiagnostics - [("A.hs", [(DiagnosticSeverity_Error, (5, 4), "Couldn't match expected type 'Int' with actual type 'Bool'")])] + [("A.hs", [(DiagnosticSeverity_Error, (5, 4), "Couldn't match expected type 'Int' with actual type 'Bool'", Just "GHC-83865")])] -- Check that we wrote the interfaces for B when we saved hidir <- getInterfaceFilesDir bdoc @@ -86,7 +86,7 @@ ifaceErrorTest = testWithExtraFiles "iface-error-test-1" "recomp" $ \dir -> do pdoc <- openDoc pPath "haskell" expectDiagnostics - [("P.hs", [(DiagnosticSeverity_Warning,(4,0), "Top-level binding")]) + [("P.hs", [(DiagnosticSeverity_Warning,(4,0), "Top-level binding", Just "GHC-38417")]) ] changeDoc pdoc [TextDocumentContentChangeEvent . InR . TextDocumentContentChangeWholeDocument $ pSource <> "\nfoo = y :: Bool" ] -- Now in P we have @@ -98,8 +98,8 @@ ifaceErrorTest = testWithExtraFiles "iface-error-test-1" "recomp" $ \dir -> do -- - The diagnostic for A has already been received. Ghcide does not repeat diagnostics -- - P is being typechecked with the last successful artifacts for A. expectDiagnostics - [("P.hs", [(DiagnosticSeverity_Warning,(4,0), "Top-level binding")]) - ,("P.hs", [(DiagnosticSeverity_Warning,(6,0), "Top-level binding")]) + [("P.hs", [(DiagnosticSeverity_Warning,(4,0), "Top-level binding", Just "GHC-38417")]) + ,("P.hs", [(DiagnosticSeverity_Warning,(6,0), "Top-level binding", Just "GHC-38417")]) ] expectNoMoreDiagnostics 2 @@ -114,7 +114,7 @@ ifaceErrorTest2 = testWithExtraFiles "iface-error-test-2" "recomp" $ \dir -> do bdoc <- createDoc bPath "haskell" bSource pdoc <- createDoc pPath "haskell" pSource expectDiagnostics - [("P.hs", [(DiagnosticSeverity_Warning,(4,0), "Top-level binding")])] -- So that we know P has been loaded + [("P.hs", [(DiagnosticSeverity_Warning,(4,0), "Top-level binding", Just "GHC-38417")])] -- So that we know P has been loaded -- Change y from Int to B changeDoc bdoc [TextDocumentContentChangeEvent . InR . TextDocumentContentChangeWholeDocument $ @@ -130,9 +130,9 @@ ifaceErrorTest2 = testWithExtraFiles "iface-error-test-2" "recomp" $ \dir -> do expectDiagnostics -- As in the other test, P is being typechecked with the last successful artifacts for A -- (ot thanks to -fdeferred-type-errors) - [("A.hs", [(DiagnosticSeverity_Error, (5, 4), "Couldn't match expected type 'Int' with actual type 'Bool'")]) - ,("P.hs", [(DiagnosticSeverity_Warning, (4, 0), "Top-level binding")]) - ,("P.hs", [(DiagnosticSeverity_Warning, (6, 0), "Top-level binding")]) + [("A.hs", [(DiagnosticSeverity_Error, (5, 4), "Couldn't match expected type 'Int' with actual type 'Bool'", Just "GHC-83865")]) + ,("P.hs", [(DiagnosticSeverity_Warning, (4, 0), "Top-level binding", Just "GHC-38417")]) + ,("P.hs", [(DiagnosticSeverity_Warning, (6, 0), "Top-level binding", Just "GHC-38417")]) ] expectNoMoreDiagnostics 2 @@ -156,7 +156,7 @@ ifaceErrorTest3 = testWithExtraFiles "iface-error-test-3" "recomp" $ \dir -> do -- In this example the interface file for A should not exist (modulo the cache folder) -- Despite that P still type checks, as we can generate an interface file for A thanks to -fdeferred-type-errors expectDiagnostics - [("A.hs", [(DiagnosticSeverity_Error, (5, 4), "Couldn't match expected type 'Int' with actual type 'Bool'")]) - ,("P.hs", [(DiagnosticSeverity_Warning,(4,0), "Top-level binding")]) + [("A.hs", [(DiagnosticSeverity_Error, (5, 4), "Couldn't match expected type 'Int' with actual type 'Bool'", Just "GHC-83865")]) + ,("P.hs", [(DiagnosticSeverity_Warning,(4,0), "Top-level binding", Just "GHC-38417")]) ] expectNoMoreDiagnostics 2 diff --git a/ghcide/test/exe/InitializeResponseTests.hs b/ghcide-test/exe/InitializeResponseTests.hs similarity index 97% rename from ghcide/test/exe/InitializeResponseTests.hs rename to ghcide-test/exe/InitializeResponseTests.hs index 6192a8aeed..f13344e368 100644 --- a/ghcide/test/exe/InitializeResponseTests.hs +++ b/ghcide-test/exe/InitializeResponseTests.hs @@ -33,9 +33,7 @@ tests = withResource acquire release tests where , chk "NO signature help" _signatureHelpProvider Nothing , chk " goto definition" _definitionProvider (Just $ InR (DefinitionOptions (Just False))) , chk " goto type definition" _typeDefinitionProvider (Just $ InR (InL (TypeDefinitionOptions (Just False)))) - -- BUG in lsp-test, this test fails, just change the accepted response - -- for now - , chk "NO goto implementation" _implementationProvider Nothing + , chk " goto implementation" _implementationProvider (Just $ InR (InL (ImplementationOptions (Just False)))) , chk " find references" _referencesProvider (Just $ InR (ReferenceOptions (Just False))) , chk " doc highlight" _documentHighlightProvider (Just $ InR (DocumentHighlightOptions (Just False))) , chk " doc symbol" _documentSymbolProvider (Just $ InR (DocumentSymbolOptions (Just False) Nothing)) diff --git a/ghcide/test/exe/LogType.hs b/ghcide-test/exe/LogType.hs similarity index 100% rename from ghcide/test/exe/LogType.hs rename to ghcide-test/exe/LogType.hs diff --git a/ghcide/test/exe/Main.hs b/ghcide-test/exe/Main.hs similarity index 93% rename from ghcide/test/exe/Main.hs rename to ghcide-test/exe/Main.hs index 6c8091840d..c8d927072c 100644 --- a/ghcide/test/exe/Main.hs +++ b/ghcide-test/exe/Main.hs @@ -45,12 +45,13 @@ import DependentFileTest import DiagnosticTests import ExceptionTests import FindDefinitionAndHoverTests +import FindImplementationAndHoverTests import GarbageCollectionTests import HaddockTests import HighlightTests import IfaceTests import InitializeResponseTests -import LogType () +import LogType () import NonLspCommandLine import OpenCloseTest import OutlineTests @@ -58,6 +59,7 @@ import PluginSimpleTests import PositionMappingTests import PreprocessorTests import ReferenceTests +import ResolveTests import RootUriTests import SafeTests import SymlinkTests @@ -78,6 +80,7 @@ main = do , OutlineTests.tests , HighlightTests.tests , FindDefinitionAndHoverTests.tests + , FindImplementationAndHoverTests.tests , PluginSimpleTests.tests , PreprocessorTests.tests , THTests.tests @@ -96,6 +99,7 @@ main = do , AsyncTests.tests , ClientSettingsTests.tests , ReferenceTests.tests + , ResolveTests.tests , GarbageCollectionTests.tests , HieDbRetry.tests , ExceptionTests.tests diff --git a/ghcide/test/exe/NonLspCommandLine.hs b/ghcide-test/exe/NonLspCommandLine.hs similarity index 90% rename from ghcide/test/exe/NonLspCommandLine.hs rename to ghcide-test/exe/NonLspCommandLine.hs index a0940625b5..b2b41071d4 100644 --- a/ghcide/test/exe/NonLspCommandLine.hs +++ b/ghcide-test/exe/NonLspCommandLine.hs @@ -14,6 +14,7 @@ import System.Process.Extra (CreateProcess (cwd), proc, readCreateProcessWithExitCode) import Test.Tasty import Test.Tasty.HUnit +import Config (testDataDir) -- A test to ensure that the command line ghcide workflow stays working @@ -44,7 +45,7 @@ withTempDir f = System.IO.Extra.withTempDir $ canonicalizePath >=> f copyTestDataFiles :: FilePath -> FilePath -> IO () copyTestDataFiles dir prefix = do -- Copy all the test data files to the temporary workspace - testDataFiles <- getDirectoryFilesIO ("ghcide/test/data" prefix) ["//*"] + testDataFiles <- getDirectoryFilesIO (testDataDir prefix) ["//*"] for_ testDataFiles $ \f -> do createDirectoryIfMissing True $ dir takeDirectory f - copyFile ("ghcide/test/data" prefix f) (dir f) + copyFile (testDataDir prefix f) (dir f) diff --git a/ghcide/test/exe/OpenCloseTest.hs b/ghcide-test/exe/OpenCloseTest.hs similarity index 100% rename from ghcide/test/exe/OpenCloseTest.hs rename to ghcide-test/exe/OpenCloseTest.hs diff --git a/ghcide/test/exe/OutlineTests.hs b/ghcide-test/exe/OutlineTests.hs similarity index 100% rename from ghcide/test/exe/OutlineTests.hs rename to ghcide-test/exe/OutlineTests.hs diff --git a/ghcide/test/exe/PluginSimpleTests.hs b/ghcide-test/exe/PluginSimpleTests.hs similarity index 98% rename from ghcide/test/exe/PluginSimpleTests.hs rename to ghcide-test/exe/PluginSimpleTests.hs index 05eb76ba81..c160d2461c 100644 --- a/ghcide/test/exe/PluginSimpleTests.hs +++ b/ghcide-test/exe/PluginSimpleTests.hs @@ -41,6 +41,6 @@ tests = expectDiagnostics [ ( "KnownNat.hs", - [(DiagnosticSeverity_Error, (9, 15), "Variable not in scope: c")] + [(DiagnosticSeverity_Error, (9, 15), "Variable not in scope: c", Just "GHC-88464")] ) ] diff --git a/ghcide/test/exe/PositionMappingTests.hs b/ghcide-test/exe/PositionMappingTests.hs similarity index 100% rename from ghcide/test/exe/PositionMappingTests.hs rename to ghcide-test/exe/PositionMappingTests.hs diff --git a/ghcide/test/exe/PreprocessorTests.hs b/ghcide-test/exe/PreprocessorTests.hs similarity index 92% rename from ghcide/test/exe/PreprocessorTests.hs rename to ghcide-test/exe/PreprocessorTests.hs index 1846a31964..24e2e80a10 100644 --- a/ghcide/test/exe/PreprocessorTests.hs +++ b/ghcide-test/exe/PreprocessorTests.hs @@ -22,6 +22,6 @@ tests = testWithDummyPluginEmpty "preprocessor" $ do _ <- createDoc "Testing.hs" "haskell" content expectDiagnostics [ ( "Testing.hs", - [(DiagnosticSeverity_Error, (2, 8), "Variable not in scope: z")] + [(DiagnosticSeverity_Error, (2, 8), "Variable not in scope: z", Nothing)] -- TODO: Why doesn't this work with expected code "GHC-88464"? ) ] diff --git a/ghcide/test/exe/Progress.hs b/ghcide-test/exe/Progress.hs similarity index 100% rename from ghcide/test/exe/Progress.hs rename to ghcide-test/exe/Progress.hs diff --git a/ghcide/test/exe/ReferenceTests.hs b/ghcide-test/exe/ReferenceTests.hs similarity index 85% rename from ghcide/test/exe/ReferenceTests.hs rename to ghcide-test/exe/ReferenceTests.hs index bc69a8fdbf..50c263c4fc 100644 --- a/ghcide/test/exe/ReferenceTests.hs +++ b/ghcide-test/exe/ReferenceTests.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE DataKinds #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE OverloadedStrings #-} @@ -30,13 +31,15 @@ import Ide.PluginUtils (toAbsolute) import Ide.Types import System.FilePath (addTrailingPathSeparator, ()) -import Test.Hls (FromServerMessage' (..), +import Test.Hls (BrokenBehavior (..), + ExpectBroken (..), + FromServerMessage' (..), SMethod (..), TCustomMessage (..), - TNotificationMessage (..)) + TNotificationMessage (..), + unCurrent) import Test.Hls.FileSystem (copyDir) import Test.Tasty -import Test.Tasty.ExpectedFailure import Test.Tasty.HUnit @@ -90,25 +93,25 @@ tests = testGroup "references" , ("Main.hs", 10, 0) ] - , expectFailBecause "references provider does not respect includeDeclaration parameter" $ - referenceTest "works when we ask to exclude declarations" + -- TODO: references provider does not respect includeDeclaration parameter + , referenceTestExpectFail "works when we ask to exclude declarations" ("References.hs", 4, 7) NoExcludeDeclaration - [ ("References.hs", 6, 0) - , ("References.hs", 6, 14) - , ("References.hs", 9, 7) - , ("References.hs", 10, 11) - ] - - , referenceTest "INCORRECTLY returns declarations when we ask to exclude them" - ("References.hs", 4, 7) - NoExcludeDeclaration - [ ("References.hs", 4, 6) - , ("References.hs", 6, 0) - , ("References.hs", 6, 14) - , ("References.hs", 9, 7) - , ("References.hs", 10, 11) - ] + (BrokenIdeal + [ ("References.hs", 6, 0) + , ("References.hs", 6, 14) + , ("References.hs", 9, 7) + , ("References.hs", 10, 11) + ] + ) + (BrokenCurrent + [ ("References.hs", 4, 6) + , ("References.hs", 6, 0) + , ("References.hs", 6, 14) + , ("References.hs", 9, 7) + , ("References.hs", 10, 11) + ] + ) ] , testGroup "can get references to non FOIs" @@ -161,7 +164,7 @@ data IncludeDeclaration = YesIncludeDeclaration | NoExcludeDeclaration -getReferences' :: SymbolLocation -> IncludeDeclaration -> Session ([Location]) +getReferences' :: SymbolLocation -> IncludeDeclaration -> Session [Location] getReferences' (file, l, c) includeDeclaration = do doc <- openDoc file "haskell" getReferences doc (Position l c) $ toBool includeDeclaration @@ -204,6 +207,17 @@ referenceTest name loc includeDeclaration expected = where docs = map fst3 expected +referenceTestExpectFail + :: (HasCallStack) + => String + -> SymbolLocation + -> IncludeDeclaration + -> ExpectBroken 'Ideal [SymbolLocation] + -> ExpectBroken 'Current [SymbolLocation] + -> TestTree +referenceTestExpectFail name loc includeDeclaration _ = + referenceTest name loc includeDeclaration . unCurrent + type SymbolLocation = (FilePath, UInt, UInt) expectSameLocations :: (HasCallStack) => FilePath -> [Location] -> [SymbolLocation] -> Assertion diff --git a/ghcide-test/exe/ResolveTests.hs b/ghcide-test/exe/ResolveTests.hs new file mode 100644 index 0000000000..4fc917c56b --- /dev/null +++ b/ghcide-test/exe/ResolveTests.hs @@ -0,0 +1,199 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE OverloadedStrings #-} +module ResolveTests (tests) where + +import Config +import Control.Lens +import Data.Aeson +import qualified Data.Maybe as Maybe +import Data.Text (Text) +import qualified Data.Text as T +import GHC.Generics +import Ide.Logger +import Ide.Types (PluginDescriptor (..), PluginId, + defaultPluginDescriptor, + mkPluginHandler, + mkResolveHandler) +import qualified Language.LSP.Protocol.Lens as J +import qualified Language.LSP.Protocol.Lens as JL +import Language.LSP.Protocol.Message (SomeMethod (..)) +import qualified Language.LSP.Protocol.Message as LSP +import Language.LSP.Protocol.Types +import Language.LSP.Test hiding (resolveCompletion) +import Test.Hls (IdeState, SMethod (..), liftIO, + mkPluginTestDescriptor, + someMethodToMethodString, + waitForAllProgressDone) +import qualified Test.Hls.FileSystem as FS +import Test.Tasty +import Test.Tasty.HUnit + +tests :: TestTree +tests = testGroup "resolve" + [ testGroup "with and without data" resolveRequests + ] + +removeData :: JL.HasData_ s (Maybe a) => s -> s +removeData param = param & JL.data_ .~ Nothing + +simpleTestSession :: TestName -> Session () -> TestTree +simpleTestSession name act = + testCase name $ runWithResolvePlugin (mkIdeTestFs [FS.directCradle ["A.hs"]]) (const act) + +runWithResolvePlugin :: FS.VirtualFileTree -> (FilePath -> Session a) -> IO a +runWithResolvePlugin fs = + testSessionWithPlugin fs + (mkPluginTestDescriptor resolvePluginDescriptor "resolve-plugin") + +data CompletionItemResolveData = CompletionItemResolveData + { completionItemResolve_number :: Int + } + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (ToJSON, FromJSON) + +data CodeActionResolve = CodeActionResolve + { codeActionResolve_number :: Int + } + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (ToJSON, FromJSON) + +data CodeLensResolve = CodeLensResolve + { codeLensResolve_number :: Int + } + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (ToJSON, FromJSON) + +resolvePluginDescriptor :: Recorder (WithPriority Text) -> PluginId -> PluginDescriptor IdeState +resolvePluginDescriptor recorder pid = (defaultPluginDescriptor pid "Test Plugin for Resolve Requests") + { pluginHandlers = mconcat + [ mkResolveHandler LSP.SMethod_CompletionItemResolve $ \_ _ param _ CompletionItemResolveData{} -> pure param + , mkPluginHandler LSP.SMethod_TextDocumentCompletion $ \_ _ _ -> do + pure $ InL + [ defCompletionItem "test item without data" + , defCompletionItem "test item with data" + & J.data_ .~ Just (toJSON $ CompletionItemResolveData 100) + ] + , mkResolveHandler LSP.SMethod_CodeActionResolve $ \_ _ param _ CodeActionResolve{} -> pure param + , mkPluginHandler LSP.SMethod_TextDocumentCodeAction $ \_ _ _ -> do + logWith recorder Debug "Why is the handler not called?" + pure $ InL + [ InR $ defCodeAction "test item without data" + , InR $ defCodeAction "test item with data" + & J.data_ .~ Just (toJSON $ CodeActionResolve 70) + ] + , mkResolveHandler LSP.SMethod_CodeLensResolve $ \_ _ param _ CodeLensResolve{} -> pure param + , mkPluginHandler LSP.SMethod_TextDocumentCodeLens $ \_ _ _ -> do + pure $ InL + [ defCodeLens "test item without data" + , defCodeLens "test item with data" + & J.data_ .~ Just (toJSON $ CodeLensResolve 50) + ] + ] + } + +resolveRequests :: [TestTree] +resolveRequests = + [ simpleTestSession "completion resolve" $ do + doc <- createDoc "A.hs" "haskell" $ T.unlines + [ "module A where" + , "data Foo = Foo { foo :: Int }" + , "bar = Foo 4" + ] + waitForAllProgressDone + items <- getCompletions doc (Position 2 7) + let resolveCompItems = filter (\i -> "test item" `T.isPrefixOf` (i ^. J.label)) items + liftIO $ assertEqual "There must be exactly two results" 2 (length resolveCompItems) + -- This must not throw an error. + _ <- traverse (resolveCompletion . removeData) resolveCompItems + pure () + , simpleTestSession "codeAction resolve" $ do + doc <- createDoc "A.hs" "haskell" $ T.unlines + [ "module A where" + , "data Foo = Foo { foo :: Int }" + , "bar = Foo 4" + ] + waitForAllProgressDone + -- Cant use 'getAllCodeActions', as this lsp-test function queries the diagnostic + -- locations and we don't have diagnostics in these tests. + cas <- Maybe.mapMaybe (preview _R) <$> getCodeActions doc (Range (Position 0 0) (Position 1 0)) + let resolveCas = filter (\i -> "test item" `T.isPrefixOf` (i ^. J.title)) cas + liftIO $ assertEqual "There must be exactly two results" 2 (length resolveCas) + -- This must not throw an error. + _ <- traverse (resolveCodeAction . removeData) resolveCas + pure () + , simpleTestSession "codelens resolve" $ do + doc <- createDoc "A.hs" "haskell" $ T.unlines + [ "module A where" + , "data Foo = Foo { foo :: Int }" + , "bar = Foo 4" + ] + waitForAllProgressDone + cd <- getCodeLenses doc + let resolveCodeLenses = filter (\i -> case i ^. J.command of + Just cmd -> "test item" `T.isPrefixOf` (cmd ^. J.title) + Nothing -> False + ) cd + liftIO $ assertEqual "There must be exactly two results" 2 (length resolveCodeLenses) + -- This must not throw an error. + _ <- traverse (resolveCodeLens . removeData) resolveCodeLenses + pure () + ] + +defCompletionItem :: T.Text -> CompletionItem +defCompletionItem lbl = CompletionItem + { _label = lbl + , _labelDetails = Nothing + , _kind = Nothing + , _tags = Nothing + , _detail = Nothing + , _documentation = Nothing + , _deprecated = Nothing + , _preselect = Nothing + , _sortText = Nothing + , _filterText = Nothing + , _insertText = Just "insertion" + , _insertTextFormat = Nothing + , _insertTextMode = Nothing + , _textEdit = Nothing + , _textEditText = Nothing + , _additionalTextEdits = Nothing + , _commitCharacters = Nothing + , _command = Nothing + , _data_ = Nothing + } + +defCodeAction :: T.Text -> CodeAction +defCodeAction lbl = CodeAction + { _title = lbl + , _kind = Just CodeActionKind_Refactor + , _diagnostics = Nothing + , _isPreferred = Nothing + , _disabled = Nothing + , _edit = Nothing + , _command = Just $ Command + { _title = lbl + , _command = lbl + , _arguments = Nothing + } + , _data_ = Nothing + } + +defCodeLens :: T.Text -> CodeLens +defCodeLens lbl = CodeLens + { _range = mkRange 0 0 1 0 + , _command = Just $ Command + { _title = lbl + , _command = lbl + , _arguments = Nothing + } + , _data_ = Nothing + } + +-- TODO: expose this from lsp-test +resolveCompletion :: CompletionItem -> Session CompletionItem +resolveCompletion item = do + rsp <- request SMethod_CompletionItemResolve item + case rsp ^. JL.result of + Left err -> liftIO $ assertFailure (someMethodToMethodString (SomeMethod SMethod_CompletionItemResolve) <> " failed with: " <> show err) + Right x -> pure x diff --git a/ghcide/test/exe/RootUriTests.hs b/ghcide-test/exe/RootUriTests.hs similarity index 100% rename from ghcide/test/exe/RootUriTests.hs rename to ghcide-test/exe/RootUriTests.hs diff --git a/ghcide/test/exe/SafeTests.hs b/ghcide-test/exe/SafeTests.hs similarity index 100% rename from ghcide/test/exe/SafeTests.hs rename to ghcide-test/exe/SafeTests.hs diff --git a/ghcide/test/exe/SymlinkTests.hs b/ghcide-test/exe/SymlinkTests.hs similarity index 96% rename from ghcide/test/exe/SymlinkTests.hs rename to ghcide-test/exe/SymlinkTests.hs index ade13bfc41..dda41922f0 100644 --- a/ghcide/test/exe/SymlinkTests.hs +++ b/ghcide-test/exe/SymlinkTests.hs @@ -22,6 +22,6 @@ tests = liftIO $ createFileLink (dir "some_loc" "Sym.hs") (dir "other_loc" "Sym.hs") let fooPath = dir "src" "Foo.hs" _ <- openDoc fooPath "haskell" - expectDiagnosticsWithTags [("src" "Foo.hs", [(DiagnosticSeverity_Warning, (2, 0), "The import of 'Sym' is redundant", Just DiagnosticTag_Unnecessary)])] + expectDiagnosticsWithTags [("src" "Foo.hs", [(DiagnosticSeverity_Warning, (2, 0), "The import of 'Sym' is redundant", Nothing, Just DiagnosticTag_Unnecessary)])] pure () ] diff --git a/ghcide/test/exe/THTests.hs b/ghcide-test/exe/THTests.hs similarity index 94% rename from ghcide/test/exe/THTests.hs rename to ghcide-test/exe/THTests.hs index 42a5650ed7..59b06431f5 100644 --- a/ghcide/test/exe/THTests.hs +++ b/ghcide-test/exe/THTests.hs @@ -43,7 +43,7 @@ tests = ] _ <- createDoc "A.hs" "haskell" sourceA _ <- createDoc "B.hs" "haskell" sourceB - expectDiagnostics [ ( "B.hs", [(DiagnosticSeverity_Error, (6, 29), "Variable not in scope: n")] ) ] + expectDiagnostics [ ( "B.hs", [(DiagnosticSeverity_Error, (6, 29), "Variable not in scope: n", Just "GHC-88464")] ) ] , testWithDummyPluginEmpty "newtype-closure" $ do let sourceA = T.unlines @@ -91,7 +91,7 @@ tests = , "main = $a (putStrLn \"success!\")"] _ <- createDoc "A.hs" "haskell" sourceA _ <- createDoc "B.hs" "haskell" sourceB - expectDiagnostics [ ( "B.hs", [(DiagnosticSeverity_Warning, (4, 0), "Top-level binding with no type signature: main :: IO ()")] ) ] + expectDiagnostics [ ( "B.hs", [(DiagnosticSeverity_Warning, (4, 0), "Top-level binding with no type signature: main :: IO ()", Just "GHC-38417")] ) ] , testCase "findsTHnewNameConstructor" $ runWithExtraFiles "THNewName" $ \dir -> do -- This test defines a TH value with the meaning "data A = A" in A.hs @@ -102,7 +102,7 @@ tests = let cPath = dir "C.hs" _ <- openDoc cPath "haskell" - expectDiagnostics [ ( cPath, [(DiagnosticSeverity_Warning, (3, 0), "Top-level binding with no type signature: a :: A")] ) ] + expectDiagnostics [ ( cPath, [(DiagnosticSeverity_Warning, (3, 0), "Top-level binding with no type signature: a :: A", Just "GHC-38417")] ) ] ] @@ -135,7 +135,7 @@ thReloadingTest unboxed = testCase name $ runWithExtraFiles dir $ \dir -> do bdoc <- createDoc bPath "haskell" bSource cdoc <- createDoc cPath "haskell" cSource - expectDiagnostics [("THB.hs", [(DiagnosticSeverity_Warning, (4,1), "Top-level binding")])] + expectDiagnostics [("THB.hs", [(DiagnosticSeverity_Warning, (4,1), "Top-level binding", Just "GHC-38417")])] -- Change th from () to Bool let aSource' = T.unlines $ init (T.lines aSource) ++ ["th_a = [d| a = False|]"] @@ -145,9 +145,9 @@ thReloadingTest unboxed = testCase name $ runWithExtraFiles dir $ \dir -> do -- Check that the change propagates to C expectDiagnostics - [("THC.hs", [(DiagnosticSeverity_Error, (4, 4), "Couldn't match expected type '()' with actual type 'Bool'")]) - ,("THC.hs", [(DiagnosticSeverity_Warning, (6,0), "Top-level binding")]) - ,("THB.hs", [(DiagnosticSeverity_Warning, (4,1), "Top-level bindin")]) + [("THC.hs", [(DiagnosticSeverity_Error, (4, 4), "Couldn't match expected type '()' with actual type 'Bool'", Just "GHC-83865")]) + ,("THC.hs", [(DiagnosticSeverity_Warning, (6,0), "Top-level binding", Just "GHC-38417")]) + ,("THB.hs", [(DiagnosticSeverity_Warning, (4,1), "Top-level bindin", Just "GHC-38417")]) ] closeDoc adoc @@ -170,7 +170,7 @@ thLinkingTest unboxed = testCase name $ runWithExtraFiles dir $ \dir -> do adoc <- createDoc aPath "haskell" aSource bdoc <- createDoc bPath "haskell" bSource - expectDiagnostics [("THB.hs", [(DiagnosticSeverity_Warning, (4,1), "Top-level binding")])] + expectDiagnostics [("THB.hs", [(DiagnosticSeverity_Warning, (4,1), "Top-level binding", Just "GHC-38417")])] let aSource' = T.unlines $ init (init (T.lines aSource)) ++ ["th :: DecsQ", "th = [d| a = False|]"] changeDoc adoc [TextDocumentContentChangeEvent . InR $ TextDocumentContentChangeWholeDocument aSource'] @@ -180,7 +180,7 @@ thLinkingTest unboxed = testCase name $ runWithExtraFiles dir $ \dir -> do changeDoc bdoc [TextDocumentContentChangeEvent . InR $ TextDocumentContentChangeWholeDocument bSource'] _ <- waitForDiagnostics - expectCurrentDiagnostics bdoc [(DiagnosticSeverity_Warning, (4,1), "Top-level binding")] + expectCurrentDiagnostics bdoc [(DiagnosticSeverity_Warning, (4,1), "Top-level binding", Just "GHC-38417")] closeDoc adoc closeDoc bdoc diff --git a/ghcide/test/exe/UnitTests.hs b/ghcide-test/exe/UnitTests.hs similarity index 97% rename from ghcide/test/exe/UnitTests.hs rename to ghcide-test/exe/UnitTests.hs index 68e6f3e1f0..b2940ab27f 100644 --- a/ghcide/test/exe/UnitTests.hs +++ b/ghcide-test/exe/UnitTests.hs @@ -51,7 +51,7 @@ tests = do let uri = Uri "file://" uriToFilePath' uri @?= Just "" , testCase "showDiagnostics prints ranges 1-based (like vscode)" $ do - let diag = ("", Diagnostics.ShowDiag, Diagnostic + let diag = Diagnostics.FileDiagnostic "" Diagnostics.ShowDiag Diagnostic { _codeDescription = Nothing , _data_ = Nothing , _range = Range @@ -64,7 +64,7 @@ tests = do , _message = "" , _relatedInformation = Nothing , _tags = Nothing - }) + } Diagnostics.NoStructuredMessage let shown = T.unpack (Diagnostics.showDiagnostics [diag]) let expected = "1:2-3:4" assertBool (unwords ["expected to find range", expected, "in diagnostic", shown]) $ diff --git a/ghcide/test/exe/WatchedFileTests.hs b/ghcide-test/exe/WatchedFileTests.hs similarity index 77% rename from ghcide/test/exe/WatchedFileTests.hs rename to ghcide-test/exe/WatchedFileTests.hs index a4683ecbc4..1c2ded9109 100644 --- a/ghcide/test/exe/WatchedFileTests.hs +++ b/ghcide-test/exe/WatchedFileTests.hs @@ -3,11 +3,14 @@ module WatchedFileTests (tests) where -import Config (testWithDummyPluginEmpty') +import Config (mkIdeTestFs, + testWithDummyPlugin', + testWithDummyPluginEmpty') import Control.Applicative.Combinators import Control.Monad.IO.Class (liftIO) import qualified Data.Aeson as A import qualified Data.Text as T +import qualified Data.Text.IO as T import Development.IDE.Test (expectDiagnostics) import Language.LSP.Protocol.Message import Language.LSP.Protocol.Types hiding @@ -18,6 +21,7 @@ import Language.LSP.Protocol.Types hiding import Language.LSP.Test import System.Directory import System.FilePath +import Test.Hls.FileSystem import Test.Tasty import Test.Tasty.HUnit @@ -60,15 +64,26 @@ tests = testGroup "watched files" ,"a :: ()" ,"a = b" ] - expectDiagnostics [("A.hs", [(DiagnosticSeverity_Error, (3, 4), "Couldn't match expected type '()' with actual type 'Bool'")])] + expectDiagnostics [("A.hs", [(DiagnosticSeverity_Error, (3, 4), "Couldn't match expected type '()' with actual type 'Bool'", Just "GHC-83865")])] -- modify B off editor liftIO $ writeFile (sessionDir "B.hs") $ unlines ["module B where" ,"b :: Int" ,"b = 0"] - sendNotification SMethod_WorkspaceDidChangeWatchedFiles $ DidChangeWatchedFilesParams $ + sendNotification SMethod_WorkspaceDidChangeWatchedFiles $ DidChangeWatchedFilesParams [FileEvent (filePathToUri $ sessionDir "B.hs") FileChangeType_Changed ] - expectDiagnostics [("A.hs", [(DiagnosticSeverity_Error, (3, 4), "Couldn't match expected type '()' with actual type 'Int'")])] + expectDiagnostics [("A.hs", [(DiagnosticSeverity_Error, (3, 4), "Couldn't match expected type '()' with actual type 'Int'", Just "GHC-83865")])] + , testWithDummyPlugin' "reload HLS after .cabal file changes" (mkIdeTestFs [copyDir ("watched-files" "reload")]) $ \sessionDir -> do + let hsFile = "src" "MyLib.hs" + _ <- openDoc hsFile "haskell" + expectDiagnostics [(hsFile, [(DiagnosticSeverity_Error, (2, 7), "Could not load module \8216Data.List.Split\8217", Nothing)])] + let cabalFile = "reload.cabal" + cabalContent <- liftIO $ T.readFile cabalFile + let fix = T.replace "build-depends: base" "build-depends: base, split" + liftIO $ T.writeFile cabalFile (fix cabalContent) + sendNotification SMethod_WorkspaceDidChangeWatchedFiles $ DidChangeWatchedFilesParams + [ FileEvent (filePathToUri $ sessionDir cabalFile) FileChangeType_Changed ] + expectDiagnostics [(hsFile, [])] ] ] diff --git a/ghcide/test/manual/lhs/Bird.lhs b/ghcide-test/manual/lhs/Bird.lhs similarity index 100% rename from ghcide/test/manual/lhs/Bird.lhs rename to ghcide-test/manual/lhs/Bird.lhs diff --git a/ghcide/test/manual/lhs/Main.hs b/ghcide-test/manual/lhs/Main.hs similarity index 100% rename from ghcide/test/manual/lhs/Main.hs rename to ghcide-test/manual/lhs/Main.hs diff --git a/ghcide/test/manual/lhs/Test.lhs b/ghcide-test/manual/lhs/Test.lhs similarity index 100% rename from ghcide/test/manual/lhs/Test.lhs rename to ghcide-test/manual/lhs/Test.lhs diff --git a/ghcide/test/preprocessor/Main.hs b/ghcide-test/preprocessor/Main.hs similarity index 100% rename from ghcide/test/preprocessor/Main.hs rename to ghcide-test/preprocessor/Main.hs diff --git a/ghcide/ghcide.cabal b/ghcide/ghcide.cabal index 87db32c2bc..c28c36296c 100644 --- a/ghcide/ghcide.cabal +++ b/ghcide/ghcide.cabal @@ -2,7 +2,7 @@ cabal-version: 3.4 build-type: Simple category: Development name: ghcide -version: 2.8.0.0 +version: 2.10.0.0 license: Apache-2.0 license-file: LICENSE author: Digital Asset and Ghcide contributors @@ -14,15 +14,10 @@ homepage: https://github.com/haskell/haskell-language-server/tree/master/ghcide#readme bug-reports: https://github.com/haskell/haskell-language-server/issues -tested-with: GHC ==9.10.1 || ==9.8.2 || ==9.6.5 || ==9.4.8 || ==9.2.8 +tested-with: GHC ==9.10.1 || ==9.8.2 || ==9.6.5 || ==9.4.8 extra-source-files: CHANGELOG.md README.md - test/data/**/*.cabal - test/data/**/*.hs - test/data/**/*.hs-boot - test/data/**/*.project - test/data/**/*.yaml source-repository head type: git @@ -78,19 +73,21 @@ library , Glob , haddock-library >=1.8 && <1.12 , hashable - , hie-bios ^>=0.14.0 + , hie-bios ^>=0.15.0 , hie-compat ^>=0.3.0.0 - , hiedb ^>= 0.6.0.0 - , hls-graph == 2.8.0.0 - , hls-plugin-api == 2.8.0.0 + , hiedb ^>= 0.6.0.2 + , hls-graph == 2.10.0.0 + , hls-plugin-api == 2.10.0.0 , implicit-hie >= 0.1.4.0 && < 0.1.5 , lens + , lens-aeson , list-t , lsp ^>=2.7 , lsp-types ^>=2.3 , mtl , opentelemetry >=0.6.1 , optparse-applicative + , os-string , parallel , prettyprinter >=1.7 , prettyprinter-ansi-terminal @@ -150,7 +147,9 @@ library Development.IDE.GHC.Compat Development.IDE.GHC.Compat.Core Development.IDE.GHC.Compat.CmdLine + Development.IDE.GHC.Compat.Driver Development.IDE.GHC.Compat.Env + Development.IDE.GHC.Compat.Error Development.IDE.GHC.Compat.Iface Development.IDE.GHC.Compat.Logger Development.IDE.GHC.Compat.Outputable @@ -209,20 +208,6 @@ library ghc-options: -Werror -flag test-exe - description: Build the ghcide-test-preprocessor executable - default: True - -executable ghcide-test-preprocessor - import: warnings - default-language: GHC2021 - hs-source-dirs: test/preprocessor - main-is: Main.hs - build-depends: base >=4 && <5 - - if !flag(test-exe) - buildable: False - flag executable description: Build the ghcide executable default: True diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 613052edf1..a2dbbb1e15 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -8,7 +8,7 @@ module Development.IDE.Session (SessionLoadingOptions(..) ,CacheDirs(..) ,loadSessionWithOptions - ,setInitialDynFlags + ,getInitialGhcLibDirDefault ,getHieDbLoc ,retryOnSqliteBusy ,retryOnException @@ -62,8 +62,7 @@ import Development.IDE.Graph (Action) import qualified Development.IDE.Session.Implicit as GhcIde import Development.IDE.Types.Diagnostics import Development.IDE.Types.Exports -import Development.IDE.Types.HscEnvEq (HscEnvEq, newHscEnvEq, - newHscEnvEqPreserveImportPaths) +import Development.IDE.Types.HscEnvEq (HscEnvEq, newHscEnvEq) import Development.IDE.Types.Location import Development.IDE.Types.Options import GHC.ResponseFile @@ -101,36 +100,32 @@ import Data.Foldable (for_) import Data.HashMap.Strict (HashMap) import Data.HashSet (HashSet) import qualified Data.HashSet as Set +import qualified Data.Set as OS import Database.SQLite.Simple import Development.IDE.Core.Tracing (withTrace) import Development.IDE.Core.WorkerThread (awaitRunInThread, withWorkerQueue) +import qualified Development.IDE.GHC.Compat.Util as Compat import Development.IDE.Session.Diagnostics (renderCradleError) import Development.IDE.Types.Shake (WithHieDb, WithHieDbShield (..), toNoFileKey) +import GHC.Data.Graph.Directed import HieDb.Create import HieDb.Types -import HieDb.Utils import Ide.PluginUtils (toAbsolute) import qualified System.Random as Random import System.Random (RandomGen) import Text.ParserCombinators.ReadP (readP_to_S) - --- See Note [Guidelines For Using CPP In GHCIDE Import Statements] - -#if MIN_VERSION_ghc(9,3,0) -import qualified Data.Set as OS -import qualified Development.IDE.GHC.Compat.Util as Compat -import GHC.Data.Graph.Directed - -import GHC.Data.Bag import GHC.Driver.Env (hsc_all_home_unit_ids) import GHC.Driver.Errors.Types import GHC.Types.Error (errMsgDiagnostic, singleMessage) import GHC.Unit.State + +#if MIN_VERSION_ghc(9,13,0) +import GHC.Driver.Make (checkHomeUnitsClosed) #endif data Log @@ -245,13 +240,6 @@ data SessionLoadingOptions = SessionLoadingOptions , getCacheDirs :: String -> [String] -> IO CacheDirs -- | Return the GHC lib dir to use for the 'unsafeGlobalDynFlags' , getInitialGhcLibDir :: Recorder (WithPriority Log) -> FilePath -> IO (Maybe LibDir) -#if !MIN_VERSION_ghc(9,3,0) - , fakeUid :: UnitId - -- ^ unit id used to tag the internal component built by ghcide - -- To reuse external interface files the unit ids must match, - -- thus make sure to build them with `--this-unit-id` set to the - -- same value as the ghcide fake uid -#endif } instance Default SessionLoadingOptions where @@ -260,9 +248,6 @@ instance Default SessionLoadingOptions where ,loadCradle = loadWithImplicitCradle ,getCacheDirs = getCacheDirsDefault ,getInitialGhcLibDir = getInitialGhcLibDirDefault -#if !MIN_VERSION_ghc(9,3,0) - ,fakeUid = Compat.toUnitId (Compat.stringToUnit "main") -#endif } -- | Find the cradle for a given 'hie.yaml' configuration. @@ -303,15 +288,6 @@ getInitialGhcLibDirDefault recorder rootDir = do logWith recorder Warning LogGetInitialGhcLibDirDefaultCradleNone pure Nothing --- | Sets `unsafeGlobalDynFlags` on using the hie-bios cradle and returns the GHC libdir -setInitialDynFlags :: Recorder (WithPriority Log) -> FilePath -> SessionLoadingOptions -> IO (Maybe LibDir) -setInitialDynFlags recorder rootDir SessionLoadingOptions{..} = do - libdir <- getInitialGhcLibDir recorder rootDir - dynFlags <- mapM dynFlagsForPrinting libdir - logWith recorder Debug LogSettingInitialDynFlags - mapM_ setUnsafeGlobalDynFlags dynFlags - pure libdir - -- | If the action throws exception that satisfies predicate then we sleep for -- a duration determined by the random exponential backoff formula, -- `uniformRandom(0, min (maxDelay, (baseDelay * 2) ^ retryAttempt))`, and try @@ -540,26 +516,17 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do _inplace = map rawComponentUnitId $ NE.toList all_deps all_deps' <- forM all_deps $ \RawComponentInfo{..} -> do - -- Remove all inplace dependencies from package flags for - -- components in this HscEnv -#if MIN_VERSION_ghc(9,3,0) - let (df2, uids) = (rawComponentDynFlags, []) -#else - let (df2, uids) = _removeInplacePackages fakeUid _inplace rawComponentDynFlags -#endif let prefix = show rawComponentUnitId -- See Note [Avoiding bad interface files] - let hscComponents = sort $ map show uids - cacheDirOpts = hscComponents ++ componentOptions opts + let cacheDirOpts = componentOptions opts cacheDirs <- liftIO $ getCacheDirs prefix cacheDirOpts - processed_df <- setCacheDirs recorder cacheDirs df2 + processed_df <- setCacheDirs recorder cacheDirs rawComponentDynFlags -- The final component information, mostly the same but the DynFlags don't -- contain any packages which are also loaded -- into the same component. pure $ ComponentInfo { componentUnitId = rawComponentUnitId , componentDynFlags = processed_df - , componentInternalUnits = uids , componentTargets = rawComponentTargets , componentFP = rawComponentFP , componentCOptions = rawComponentCOptions @@ -585,8 +552,8 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- For GHC's supporting multi component sessions, we create a shared -- HscEnv but set the active component accordingly hscEnv <- emptyHscEnv ideNc _libDir - let new_cache = newComponentCache recorder optExtensions hieYaml _cfp hscEnv - all_target_details <- new_cache old_deps new_deps rootDir + let new_cache = newComponentCache recorder optExtensions _cfp hscEnv + all_target_details <- new_cache old_deps new_deps this_dep_info <- getDependencyInfo $ maybeToList hieYaml let (all_targets, this_flags_map, this_options) @@ -599,10 +566,11 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do this_flags = (this_error_env, this_dep_info) this_error_env = ([this_error], Nothing) this_error = ideErrorWithSource (Just "cradle") (Just DiagnosticSeverity_Error) _cfp - $ T.unlines - [ "No cradle target found. Is this file listed in the targets of your cradle?" - , "If you are using a .cabal file, please ensure that this module is listed in either the exposed-modules or other-modules section" - ] + (T.unlines + [ "No cradle target found. Is this file listed in the targets of your cradle?" + , "If you are using a .cabal file, please ensure that this module is listed in either the exposed-modules or other-modules section" + ]) + Nothing void $ modifyVar' fileToFlags $ Map.insert hieYaml this_flags_map void $ modifyVar' filesMap $ flip HM.union (HM.fromList (map ((,hieYaml) . fst) $ concatMap toFlagsMap all_targets)) @@ -730,7 +698,15 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let ncfp = toNormalizedFilePath' (toAbsolutePath file) cachedHieYamlLocation <- HM.lookup ncfp <$> readVar filesMap hieYaml <- cradleLoc file - sessionOpts (join cachedHieYamlLocation <|> hieYaml, file) `Safe.catch` \e -> + let + -- Each one of deps will be registered as a FileSystemWatcher in the GhcSession action + -- so that we can get a workspace/didChangeWatchedFiles notification when a dep changes. + -- The GlobPattern of a FileSystemWatcher can be absolute or relative. + -- We use the absolute one because it is supported by more LSP clients. + -- Here we make sure deps are absolute and later we use those absolute deps as GlobPattern. + absolutePathsCradleDeps (eq, deps) + = (eq, fmap toAbsolutePath deps) + (absolutePathsCradleDeps <$> sessionOpts (join cachedHieYamlLocation <|> hieYaml, file)) `Safe.catch` \e -> return (([renderPackageSetupException file e], Nothing), maybe [] pure hieYaml) returnWithVersion $ \file -> do @@ -771,24 +747,13 @@ cradleToOptsAndLibDir recorder loadConfig cradle file old_fps = do PreferSingleComponentLoading -> LoadFile PreferMultiComponentLoading -> LoadWithContext old_fps -#if MIN_VERSION_ghc(9,3,0) emptyHscEnv :: NameCache -> FilePath -> IO HscEnv -#else -emptyHscEnv :: IORef NameCache -> FilePath -> IO HscEnv -#endif emptyHscEnv nc libDir = do -- We call setSessionDynFlags so that the loader is initialised -- We need to do this before we call initUnits. env <- runGhc (Just libDir) $ getSessionDynFlags >>= setSessionDynFlags >> getSession - -- On GHC 9.2 calling setSessionDynFlags caches the unit databases - -- for an empty environment. This prevents us from reading the - -- package database subsequently. So clear the unit db cache in - -- hsc_unit_dbs pure $ setNameCache nc (hscSetFlags ((hsc_dflags env){useUnicode = True }) env) -#if !MIN_VERSION_ghc(9,3,0) - {hsc_unit_dbs = Nothing} -#endif data TargetDetails = TargetDetails { @@ -826,23 +791,23 @@ toFlagsMap TargetDetails{..} = [ (l, (targetEnv, targetDepends)) | l <- targetLocations] -#if MIN_VERSION_ghc(9,3,0) setNameCache :: NameCache -> HscEnv -> HscEnv -#else -setNameCache :: IORef NameCache -> HscEnv -> HscEnv -#endif setNameCache nc hsc = hsc { hsc_NC = nc } -#if MIN_VERSION_ghc(9,3,0) +#if MIN_VERSION_ghc(9,13,0) +-- Moved back to implementation in GHC. +checkHomeUnitsClosed' :: UnitEnv -> OS.Set UnitId -> [DriverMessages] +checkHomeUnitsClosed' ue _ = checkHomeUnitsClosed ue +#elif MIN_VERSION_ghc(9,3,0) -- This function checks the important property that if both p and q are home units -- then any dependency of p, which transitively depends on q is also a home unit. -- GHC had an implementation of this function, but it was horribly inefficient -- We should move back to the GHC implementation on compilers where -- https://gitlab.haskell.org/ghc/ghc/-/merge_requests/12162 is included -checkHomeUnitsClosed' :: UnitEnv -> OS.Set UnitId -> [DriverMessages] +checkHomeUnitsClosed' :: UnitEnv -> OS.Set UnitId -> Maybe (Compat.MsgEnvelope DriverMessage) checkHomeUnitsClosed' ue home_id_set - | OS.null bad_unit_ids = [] - | otherwise = [singleMessage $ GHC.mkPlainErrorMsgEnvelope rootLoc $ DriverHomePackagesNotClosed (OS.toList bad_unit_ids)] + | OS.null bad_unit_ids = Nothing + | otherwise = Just (GHC.mkPlainErrorMsgEnvelope rootLoc $ DriverHomePackagesNotClosed (OS.toList bad_unit_ids)) where bad_unit_ids = upwards_closure OS.\\ home_id_set rootLoc = mkGeneralSrcSpan (Compat.fsLit "") @@ -865,7 +830,7 @@ checkHomeUnitsClosed' ue home_id_set where go rest this this_uis = plusUniqMap_C OS.union - (addToUniqMap_C OS.union external_depends this (OS.fromList $ this_deps)) + (addToUniqMap_C OS.union external_depends this (OS.fromList this_deps)) rest where external_depends = mapUniqMap (OS.fromList . unitDepends) @@ -899,14 +864,12 @@ checkHomeUnitsClosed' ue home_id_set newComponentCache :: Recorder (WithPriority Log) -> [String] -- ^ File extensions to consider - -> Maybe FilePath -- ^ Path to cradle -> NormalizedFilePath -- ^ Path to file that caused the creation of this component -> HscEnv -- ^ An empty HscEnv -> [ComponentInfo] -- ^ New components to be loaded -> [ComponentInfo] -- ^ old, already existing components - -> FilePath -- ^ root dir, see Note [Root Directory] -> IO [ [TargetDetails] ] -newComponentCache recorder exts cradlePath _cfp hsc_env old_cis new_cis dir = do +newComponentCache recorder exts _cfp hsc_env old_cis new_cis = do let cis = Map.unionWith unionCIs (mkMap new_cis) (mkMap old_cis) -- When we have multiple components with the same uid, -- prefer the new one over the old. @@ -920,18 +883,22 @@ newComponentCache recorder exts cradlePath _cfp hsc_env old_cis new_cis dir = do hscEnv' <- -- Set up a multi component session with the other units on GHC 9.4 Compat.initUnits dfs hsc_env -#if MIN_VERSION_ghc(9,3,0) - let closure_errs = checkHomeUnitsClosed' (hsc_unit_env hscEnv') (hsc_all_home_unit_ids hscEnv') - multi_errs = map (ideErrorWithSource (Just "cradle") (Just DiagnosticSeverity_Warning) _cfp . T.pack . Compat.printWithoutUniques) closure_errs + let closure_errs = maybeToList $ checkHomeUnitsClosed' (hsc_unit_env hscEnv') (hsc_all_home_unit_ids hscEnv') + closure_err_to_multi_err err = + ideErrorWithSource + (Just "cradle") (Just DiagnosticSeverity_Warning) _cfp + (T.pack (Compat.printWithoutUniques (singleMessage err))) +#if MIN_VERSION_ghc(9,5,0) + (Just (fmap GhcDriverMessage err)) +#else + Nothing +#endif + multi_errs = map closure_err_to_multi_err closure_errs bad_units = OS.fromList $ concat $ do - x <- bagToList $ mapBag errMsgDiagnostic $ unionManyBags $ map Compat.getMessages closure_errs + x <- map errMsgDiagnostic closure_errs DriverHomePackagesNotClosed us <- pure x pure us isBad ci = (homeUnitId_ (componentDynFlags ci)) `OS.member` bad_units -#else - let isBad = const False - multi_errs = [] -#endif -- Whenever we spin up a session on Linux, dynamically load libm.so.6 -- in. We need this in case the binary is statically linked, in which -- case the interactive session will fail when trying to load @@ -951,26 +918,12 @@ newComponentCache recorder exts cradlePath _cfp hsc_env old_cis new_cis dir = do forM (Map.elems cis) $ \ci -> do let df = componentDynFlags ci - let createHscEnvEq = maybe newHscEnvEqPreserveImportPaths (newHscEnvEq dir) cradlePath thisEnv <- do -#if MIN_VERSION_ghc(9,3,0) -- In GHC 9.4 we have multi component support, and we have initialised all the units -- above. -- We just need to set the current unit here pure $ hscSetActiveUnitId (homeUnitId_ df) hscEnv' -#else - -- This initializes the units for GHC 9.2 - -- Add the options for the current component to the HscEnv - -- We want to call `setSessionDynFlags` instead of `hscSetFlags` - -- because `setSessionDynFlags` also initializes the package database, - -- which we need for any changes to the package flags in the dynflags - -- to be visible. - -- See #2693 - evalGhcEnv hscEnv' $ do - _ <- setSessionDynFlags df - getSession -#endif - henv <- createHscEnvEq thisEnv (zip uids dfs) + henv <- newHscEnvEq thisEnv let targetEnv = (if isBad ci then multi_errs else [], Just henv) targetDepends = componentDependencyInfo ci logWith recorder Debug $ LogNewComponentCache (targetEnv, targetDepends) @@ -1076,10 +1029,6 @@ data ComponentInfo = ComponentInfo -- | Processed DynFlags. Does not contain inplace packages such as local -- libraries. Can be used to actually load this Component. , componentDynFlags :: DynFlags - -- | Internal units, such as local libraries, that this component - -- is loaded with. These have been extracted from the original - -- ComponentOptions. - , componentInternalUnits :: [UnitId] -- | All targets of this components. , componentTargets :: [GHC.Target] -- | Filepath which caused the creation of this component @@ -1201,7 +1150,6 @@ setOptions cfp (ComponentOptions theOpts compRoot _) dflags rootDir = do initOne this_opts = do (dflags', targets') <- addCmdOpts this_opts dflags let dflags'' = -#if MIN_VERSION_ghc(9,3,0) case unitIdString (homeUnitId_ dflags') of -- cabal uses main for the unit id of all executable packages -- This makes multi-component sessions confused about what @@ -1210,13 +1158,10 @@ setOptions cfp (ComponentOptions theOpts compRoot _) dflags rootDir = do -- This works because there won't be any dependencies on the -- executable unit. "main" -> - let hash = B.unpack $ B16.encode $ H.finalize $ H.updates H.init (map B.pack $ this_opts) + let hash = B.unpack $ B16.encode $ H.finalize $ H.updates H.init (map B.pack this_opts) hashed_uid = Compat.toUnitId (Compat.stringToUnit ("main-"++hash)) in setHomeUnitId_ hashed_uid dflags' _ -> dflags' -#else - dflags' -#endif let targets = makeTargetsAbsolute root targets' root = case workingDirectory dflags'' of @@ -1236,14 +1181,7 @@ setOptions cfp (ComponentOptions theOpts compRoot _) dflags rootDir = do Compat.setUpTypedHoles $ makeDynFlagsAbsolute compRoot -- makeDynFlagsAbsolute already accounts for workingDirectory dflags'' - -- initPackages parses the -package flags and - -- sets up the visibility for each component. - -- Throws if a -package flag cannot be satisfied. - -- This only works for GHC <9.2 - -- For GHC >= 9.2, we need to modify the unit env in the hsc_dflags, which - -- is done later in newComponentCache - final_flags <- liftIO $ wrapPackageSetupException $ Compat.oldInitUnits dflags''' - return (final_flags, targets) + return (dflags''', targets) setIgnoreInterfacePragmas :: DynFlags -> DynFlags setIgnoreInterfacePragmas df = @@ -1289,12 +1227,6 @@ data PackageSetupException instance Exception PackageSetupException --- | Wrap any exception as a 'PackageSetupException' -wrapPackageSetupException :: IO a -> IO a -wrapPackageSetupException = handleAny $ \case - e | Just (pkgE :: PackageSetupException) <- fromException e -> throwIO pkgE - e -> (throwIO . PackageSetupException . show) e - showPackageSetupException :: PackageSetupException -> String showPackageSetupException GhcVersionMismatch{..} = unwords ["ghcide compiled against GHC" @@ -1308,6 +1240,6 @@ showPackageSetupException PackageSetupException{..} = unwords , "failed to load packages:", message <> "." , "\nPlease ensure that ghcide is compiled with the same GHC installation as the project."] -renderPackageSetupException :: FilePath -> PackageSetupException -> (NormalizedFilePath, ShowDiagnostic, Diagnostic) +renderPackageSetupException :: FilePath -> PackageSetupException -> FileDiagnostic renderPackageSetupException fp e = - ideErrorWithSource (Just "cradle") (Just DiagnosticSeverity_Error) (toNormalizedFilePath' fp) (T.pack $ showPackageSetupException e) + ideErrorWithSource (Just "cradle") (Just DiagnosticSeverity_Error) (toNormalizedFilePath' fp) (T.pack $ showPackageSetupException e) Nothing diff --git a/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs b/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs index a8e35e5965..2890c87966 100644 --- a/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs +++ b/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs @@ -2,6 +2,7 @@ module Development.IDE.Session.Diagnostics where import Control.Applicative +import Control.Lens import Control.Monad import qualified Data.Aeson as Aeson import Data.List @@ -27,13 +28,17 @@ data CradleErrorDetails = Depicts the cradle error in a user-friendly way. -} renderCradleError :: CradleError -> Cradle a -> NormalizedFilePath -> FileDiagnostic -renderCradleError (CradleError deps _ec ms) cradle nfp - | HieBios.isCabalCradle cradle = - let (fp, showDiag, diag) = ideErrorWithSource (Just "cradle") (Just DiagnosticSeverity_Error) nfp $ T.unlines $ map T.pack userFriendlyMessage in - (fp, showDiag, diag{_data_ = Just $ Aeson.toJSON CradleErrorDetails{cabalProjectFiles=absDeps}}) - | otherwise = ideErrorWithSource (Just "cradle") (Just DiagnosticSeverity_Error) nfp $ T.unlines $ map T.pack userFriendlyMessage +renderCradleError cradleError cradle nfp = + let noDetails = + ideErrorWithSource (Just "cradle") (Just DiagnosticSeverity_Error) nfp (T.unlines $ map T.pack userFriendlyMessage) Nothing + in + if HieBios.isCabalCradle cradle + then noDetails & fdLspDiagnosticL %~ \diag -> diag{_data_ = Just $ Aeson.toJSON CradleErrorDetails{cabalProjectFiles=absDeps}} + else noDetails where - absDeps = fmap (cradleRootDir cradle ) deps + ms = cradleErrorStderr cradleError + + absDeps = fmap (cradleRootDir cradle ) (cradleErrorDependencies cradleError) userFriendlyMessage :: [String] userFriendlyMessage | HieBios.isCabalCradle cradle = fromMaybe ms $ fileMissingMessage <|> mkUnknownModuleMessage diff --git a/ghcide/src/Development/IDE.hs b/ghcide/src/Development/IDE.hs index 547ac9a115..8741c98c37 100644 --- a/ghcide/src/Development/IDE.hs +++ b/ghcide/src/Development/IDE.hs @@ -10,7 +10,9 @@ import Development.IDE.Core.Actions as X (getAtPoint, getDefinition, getTypeDefinition) import Development.IDE.Core.FileExists as X (getFileExists) -import Development.IDE.Core.FileStore as X (getFileContents) +import Development.IDE.Core.FileStore as X (getFileContents, + getFileModTimeContents, + getUriContents) import Development.IDE.Core.IdeConfiguration as X (IdeConfiguration (..), isWorkspaceFile) import Development.IDE.Core.OfInterest as X (getFilesOfInterestUntracked) @@ -50,7 +52,6 @@ import Development.IDE.Graph as X (Action, RuleResult, import Development.IDE.Plugin as X import Development.IDE.Types.Diagnostics as X import Development.IDE.Types.HscEnvEq as X (HscEnvEq (..), - hscEnv, - hscEnvWithImportPaths) + hscEnv) import Development.IDE.Types.Location as X import Ide.Logger as X diff --git a/ghcide/src/Development/IDE/Core/Actions.hs b/ghcide/src/Development/IDE/Core/Actions.hs index 4c808f21d9..0d55a73120 100644 --- a/ghcide/src/Development/IDE/Core/Actions.hs +++ b/ghcide/src/Development/IDE/Core/Actions.hs @@ -3,6 +3,7 @@ module Development.IDE.Core.Actions ( getAtPoint , getDefinition , getTypeDefinition +, getImplementationDefinition , highlightAtPoint , refsAtPoint , workspaceSymbols @@ -66,56 +67,68 @@ getAtPoint file pos = runMaybeT $ do !pos' <- MaybeT (return $ fromCurrentPosition mapping pos) MaybeT $ liftIO $ fmap (first (toCurrentRange mapping =<<)) <$> AtPoint.atPoint opts hf dkMap env pos' --- | For each Location, determine if we have the PositionMapping --- for the correct file. If not, get the correct position mapping --- and then apply the position mapping to the location. -toCurrentLocations +-- | Converts locations in the source code to their current positions, +-- taking into account changes that may have occurred due to edits. +toCurrentLocation :: PositionMapping -> NormalizedFilePath - -> [Location] - -> IdeAction [Location] -toCurrentLocations mapping file = mapMaybeM go + -> Location + -> IdeAction (Maybe Location) +toCurrentLocation mapping file (Location uri range) = + -- The Location we are going to might be in a different + -- file than the one we are calling gotoDefinition from. + -- So we check that the location file matches the file + -- we are in. + if nUri == normalizedFilePathToUri file + -- The Location matches the file, so use the PositionMapping + -- we have. + then pure $ Location uri <$> toCurrentRange mapping range + -- The Location does not match the file, so get the correct + -- PositionMapping and use that instead. + else do + otherLocationMapping <- fmap (fmap snd) $ runMaybeT $ do + otherLocationFile <- MaybeT $ pure $ uriToNormalizedFilePath nUri + useWithStaleFastMT GetHieAst otherLocationFile + pure $ Location uri <$> (flip toCurrentRange range =<< otherLocationMapping) where - go :: Location -> IdeAction (Maybe Location) - go (Location uri range) = - -- The Location we are going to might be in a different - -- file than the one we are calling gotoDefinition from. - -- So we check that the location file matches the file - -- we are in. - if nUri == normalizedFilePathToUri file - -- The Location matches the file, so use the PositionMapping - -- we have. - then pure $ Location uri <$> toCurrentRange mapping range - -- The Location does not match the file, so get the correct - -- PositionMapping and use that instead. - else do - otherLocationMapping <- fmap (fmap snd) $ runMaybeT $ do - otherLocationFile <- MaybeT $ pure $ uriToNormalizedFilePath nUri - useWithStaleFastMT GetHieAst otherLocationFile - pure $ Location uri <$> (flip toCurrentRange range =<< otherLocationMapping) - where - nUri :: NormalizedUri - nUri = toNormalizedUri uri + nUri :: NormalizedUri + nUri = toNormalizedUri uri -- | Goto Definition. -getDefinition :: NormalizedFilePath -> Position -> IdeAction (Maybe [Location]) +getDefinition :: NormalizedFilePath -> Position -> IdeAction (Maybe [(Location, Identifier)]) getDefinition file pos = runMaybeT $ do ide@ShakeExtras{ withHieDb, hiedbWriter } <- ask opts <- liftIO $ getIdeOptionsIO ide - (HAR _ hf _ _ _, mapping) <- useWithStaleFastMT GetHieAst file + (hf, mapping) <- useWithStaleFastMT GetHieAst file (ImportMap imports, _) <- useWithStaleFastMT GetImportMap file !pos' <- MaybeT (pure $ fromCurrentPosition mapping pos) - locations <- AtPoint.gotoDefinition withHieDb (lookupMod hiedbWriter) opts imports hf pos' - MaybeT $ Just <$> toCurrentLocations mapping file locations + locationsWithIdentifier <- AtPoint.gotoDefinition withHieDb (lookupMod hiedbWriter) opts imports hf pos' + mapMaybeM (\(location, identifier) -> do + fixedLocation <- MaybeT $ toCurrentLocation mapping file location + pure $ Just (fixedLocation, identifier) + ) locationsWithIdentifier + -getTypeDefinition :: NormalizedFilePath -> Position -> IdeAction (Maybe [Location]) +getTypeDefinition :: NormalizedFilePath -> Position -> IdeAction (Maybe [(Location, Identifier)]) getTypeDefinition file pos = runMaybeT $ do ide@ShakeExtras{ withHieDb, hiedbWriter } <- ask opts <- liftIO $ getIdeOptionsIO ide (hf, mapping) <- useWithStaleFastMT GetHieAst file !pos' <- MaybeT (return $ fromCurrentPosition mapping pos) - locations <- AtPoint.gotoTypeDefinition withHieDb (lookupMod hiedbWriter) opts hf pos' - MaybeT $ Just <$> toCurrentLocations mapping file locations + locationsWithIdentifier <- AtPoint.gotoTypeDefinition withHieDb (lookupMod hiedbWriter) opts hf pos' + mapMaybeM (\(location, identifier) -> do + fixedLocation <- MaybeT $ toCurrentLocation mapping file location + pure $ Just (fixedLocation, identifier) + ) locationsWithIdentifier + +getImplementationDefinition :: NormalizedFilePath -> Position -> IdeAction (Maybe [Location]) +getImplementationDefinition file pos = runMaybeT $ do + ide@ShakeExtras{ withHieDb, hiedbWriter } <- ask + opts <- liftIO $ getIdeOptionsIO ide + (hf, mapping) <- useWithStaleFastMT GetHieAst file + !pos' <- MaybeT (pure $ fromCurrentPosition mapping pos) + locs <- AtPoint.gotoImplementation withHieDb (lookupMod hiedbWriter) opts hf pos' + traverse (MaybeT . toCurrentLocation mapping file) locs highlightAtPoint :: NormalizedFilePath -> Position -> IdeAction (Maybe [DocumentHighlight]) highlightAtPoint file pos = runMaybeT $ do diff --git a/ghcide/src/Development/IDE/Core/Compile.hs b/ghcide/src/Development/IDE/Core/Compile.hs index af1c97a457..ed5e14a70a 100644 --- a/ghcide/src/Development/IDE/Core/Compile.hs +++ b/ghcide/src/Development/IDE/Core/Compile.hs @@ -36,90 +36,90 @@ module Development.IDE.Core.Compile , sourceTypecheck , sourceParser , shareUsages + , setNonHomeFCHook ) where -import Control.Concurrent.Extra -import Control.Concurrent.STM.Stats hiding (orElse) -import Control.DeepSeq (NFData (..), force, rnf) -import Control.Exception (evaluate) +import Control.Concurrent.STM.Stats hiding (orElse) +import Control.DeepSeq (NFData (..), force, + rnf) +import Control.Exception (evaluate) import Control.Exception.Safe -import Control.Lens hiding (List, pre, (<.>)) +import Control.Lens hiding (List, pre, + (<.>)) import Control.Monad.Extra import Control.Monad.IO.Class import Control.Monad.Trans.Except -import qualified Control.Monad.Trans.State.Strict as S -import Data.Aeson (toJSON) -import Data.Bifunctor (first, second) +import qualified Control.Monad.Trans.State.Strict as S +import Data.Aeson (toJSON) +import Data.Bifunctor (first, second) import Data.Binary -import qualified Data.ByteString as BS +import qualified Data.ByteString as BS import Data.Coerce -import qualified Data.DList as DL +import qualified Data.DList as DL import Data.Functor import Data.Generics.Aliases import Data.Generics.Schemes -import qualified Data.HashMap.Strict as HashMap -import Data.IntMap (IntMap) +import qualified Data.HashMap.Strict as HashMap +import Data.IntMap (IntMap) import Data.IORef import Data.List.Extra -import qualified Data.Map.Strict as Map +import qualified Data.List.NonEmpty as NE +import qualified Data.Map.Strict as Map import Data.Maybe -import Data.Proxy (Proxy (Proxy)) -import qualified Data.Text as T -import Data.Time (UTCTime (..)) -import Data.Tuple.Extra (dupe) -import Data.Unique as Unique +import Data.Proxy (Proxy (Proxy)) +import qualified Data.Text as T +import Data.Time (UTCTime (..), getCurrentTime) +import Data.Tuple.Extra (dupe) import Debug.Trace -import Development.IDE.Core.FileStore (resetInterfaceStore) +import Development.IDE.Core.FileStore (resetInterfaceStore) import Development.IDE.Core.Preprocessor +import Development.IDE.Core.ProgressReporting (progressUpdate) import Development.IDE.Core.RuleTypes import Development.IDE.Core.Shake -import Development.IDE.Core.Tracing (withTrace) -import Development.IDE.GHC.Compat hiding (loadInterface, - parseHeader, parseModule, - tcRnModule, writeHieFile, assert) -import qualified Development.IDE.GHC.Compat as Compat -import qualified Development.IDE.GHC.Compat as GHC -import qualified Development.IDE.GHC.Compat.Util as Util +import Development.IDE.Core.Tracing (withTrace) +import Development.IDE.GHC.Compat hiding (assert, + loadInterface, + parseHeader, + parseModule, + tcRnModule, + writeHieFile) +import qualified Development.IDE.GHC.Compat as Compat +import qualified Development.IDE.GHC.Compat as GHC +import qualified Development.IDE.GHC.Compat.Util as Util import Development.IDE.GHC.CoreFile import Development.IDE.GHC.Error -import Development.IDE.GHC.Orphans () +import Development.IDE.GHC.Orphans () import Development.IDE.GHC.Util import Development.IDE.GHC.Warnings import Development.IDE.Types.Diagnostics import Development.IDE.Types.Location import Development.IDE.Types.Options -import GHC (ForeignHValue, - GetDocsFailure (..), - parsedSource) -import qualified GHC.LanguageExtensions as LangExt +import GHC (ForeignHValue, + GetDocsFailure (..), + parsedSource, ModLocation (..)) +import qualified GHC.LanguageExtensions as LangExt import GHC.Serialized -import HieDb hiding (withHieDb) -import qualified Language.LSP.Protocol.Message as LSP -import Language.LSP.Protocol.Types (DiagnosticTag (..)) -import qualified Language.LSP.Protocol.Types as LSP -import qualified Language.LSP.Server as LSP -import Prelude hiding (mod) +import HieDb hiding (withHieDb) +import qualified Language.LSP.Protocol.Message as LSP +import Language.LSP.Protocol.Types (DiagnosticTag (..)) +import qualified Language.LSP.Server as LSP +import Prelude hiding (mod) import System.Directory import System.FilePath -import System.IO.Extra (fixIO, newTempFileWithin) +import System.IO.Extra (fixIO, + newTempFileWithin) -import qualified GHC as G +import qualified Data.Set as Set +import qualified GHC as G +import qualified GHC.Runtime.Loader as Loader import GHC.Tc.Gen.Splice +import GHC.Types.Error import GHC.Types.ForeignStubs import GHC.Types.HpcInfo import GHC.Types.TypeEnv -- See Note [Guidelines For Using CPP In GHCIDE Import Statements] -#if !MIN_VERSION_ghc(9,3,0) -import Data.Map (Map) -import GHC.Unit.Module.Graph (ModuleGraph) -import Unsafe.Coerce -#endif - -#if MIN_VERSION_ghc(9,3,0) -import qualified Data.Set as Set -#endif #if MIN_VERSION_ghc(9,5,0) import GHC.Core.Lint.Interactive @@ -127,12 +127,18 @@ import GHC.Driver.Config.CoreToStg.Prep #endif #if MIN_VERSION_ghc(9,7,0) -import Data.Foldable (toList) +import Data.Foldable (toList) import GHC.Unit.Module.Warnings #else -import Development.IDE.Core.FileStore (shareFilePath) +import Development.IDE.Core.FileStore (shareFilePath) #endif +import Development.IDE.GHC.Compat.Driver (hscTypecheckRenameWithDiagnostics) + +import Development.IDE.Import.DependencyInformation +import GHC.Driver.Env ( hsc_all_home_unit_ids ) +import Development.IDE.Import.FindImports + --Simple constants to make sure the source is consistently named sourceTypecheck :: T.Text sourceTypecheck = "typecheck" @@ -160,13 +166,18 @@ computePackageDeps -> IO (Either [FileDiagnostic] [UnitId]) computePackageDeps env pkg = do case lookupUnit env pkg of - Nothing -> return $ Left [ideErrorText (toNormalizedFilePath' noFilePath) $ - T.pack $ "unknown package: " ++ show pkg] + Nothing -> + return $ Left + [ ideErrorText + (toNormalizedFilePath' noFilePath) + (T.pack $ "unknown package: " ++ show pkg) + ] Just pkgInfo -> return $ Right $ unitDepends pkgInfo -newtype TypecheckHelpers +data TypecheckHelpers = TypecheckHelpers { getLinkables :: [NormalizedFilePath] -> IO [LinkableResult] -- ^ hls-graph action to get linkables for files + , getModuleGraph :: IO DependencyInformation } typecheckModule :: IdeDefer @@ -178,24 +189,28 @@ typecheckModule (IdeDefer defer) hsc tc_helpers pm = do let modSummary = pm_mod_summary pm dflags = ms_hspp_opts modSummary initialized <- catchSrcErrors (hsc_dflags hsc) "typecheck (initialize plugins)" - (initPlugins hsc modSummary) + (Loader.initializePlugins (hscSetFlags (ms_hspp_opts modSummary) hsc)) case initialized of Left errs -> return (errs, Nothing) - Right (modSummary', hscEnv) -> do - (warnings, etcm) <- withWarnings sourceTypecheck $ \tweak -> + Right hscEnv -> do + etcm <- let - session = tweak (hscSetFlags dflags hscEnv) - -- TODO: maybe settings ms_hspp_opts is unnecessary? - mod_summary'' = modSummary' { ms_hspp_opts = hsc_dflags session} + -- TODO: maybe setting ms_hspp_opts is unnecessary? + mod_summary' = modSummary { ms_hspp_opts = hsc_dflags hscEnv} in catchSrcErrors (hsc_dflags hscEnv) sourceTypecheck $ do - tcRnModule session tc_helpers $ demoteIfDefer pm{pm_mod_summary = mod_summary''} - let errorPipeline = unDefer . hideDiag dflags . tagDiag - diags = map errorPipeline warnings - deferredError = any fst diags + tcRnModule hscEnv tc_helpers $ demoteIfDefer pm{pm_mod_summary = mod_summary'} case etcm of - Left errs -> return (map snd diags ++ errs, Nothing) - Right tcm -> return (map snd diags, Just $ tcm{tmrDeferredError = deferredError}) + Left errs -> return (errs, Nothing) + Right tcm -> + let addReason diag = + map (Just (diagnosticReason (errMsgDiagnostic diag)),) $ + diagFromErrMsg sourceTypecheck (hsc_dflags hscEnv) diag + errorPipeline = map (unDefer . hideDiag dflags . tagDiag) . addReason + diags = concatMap errorPipeline $ Compat.getMessages $ tmrWarnings tcm + deferredError = any fst diags + in + return (map snd diags, Just $ tcm{tmrDeferredError = deferredError}) where demoteIfDefer = if defer then demoteTypeErrorsToWarnings else id @@ -223,11 +238,7 @@ captureSplicesAndDeps TypecheckHelpers{..} env k = do -- come from in the IORef,, as these are the modules on whose implementation -- we depend. compile_bco_hook :: IORef (ModuleEnv BS.ByteString) -> HscEnv -> SrcSpan -> CoreExpr -#if MIN_VERSION_ghc(9,3,0) -> IO (ForeignHValue, [Linkable], PkgsLoaded) -#else - -> IO ForeignHValue -#endif compile_bco_hook var hsc_env srcspan ds_expr = do { let dflags = hsc_dflags hsc_env @@ -247,10 +258,8 @@ captureSplicesAndDeps TypecheckHelpers{..} env k = do ; let iNTERACTIVELoc = G.ModLocation{ ml_hs_file = Nothing, ml_hi_file = panic "hscCompileCoreExpr':ml_hi_file", ml_obj_file = panic "hscCompileCoreExpr':ml_obj_file", -#if MIN_VERSION_ghc(9,3,0) ml_dyn_obj_file = panic "hscCompileCoreExpr':ml_dyn_obj_file", ml_dyn_hi_file = panic "hscCompileCoreExpr':ml_dyn_hi_file", -#endif ml_hie_file = panic "hscCompileCoreExpr':ml_hie_file" } ; let ictxt = hsc_IC hsc_env @@ -259,9 +268,7 @@ captureSplicesAndDeps TypecheckHelpers{..} env k = do myCoreToStgExpr (hsc_logger hsc_env) (hsc_dflags hsc_env) ictxt -#if MIN_VERSION_ghc(9,3,0) True -- for bytecode -#endif (icInteractiveModule ictxt) iNTERACTIVELoc prepd_expr @@ -271,6 +278,9 @@ captureSplicesAndDeps TypecheckHelpers{..} env k = do (icInteractiveModule ictxt) stg_expr [] Nothing +#if MIN_VERSION_ghc(9,11,0) + [] -- spt_entries +#endif -- Exclude wired-in names because we may not have read -- their interface files, so getLinkDeps will fail @@ -279,11 +289,7 @@ captureSplicesAndDeps TypecheckHelpers{..} env k = do -- Find the linkables for the modules we need ; let needed_mods = mkUniqSet [ -#if MIN_VERSION_ghc(9,3,0) mod -- We need the whole module for 9.4 because of multiple home units modules may have different unit ids -#else - moduleName mod -- On <= 9.2, just the name is enough because all unit ids will be the same -#endif | n <- concatMap (uniqDSetToList . bcoFreeNames) $ bc_bcos bcos , not (isWiredInName n) -- Exclude wired-in names @@ -291,51 +297,33 @@ captureSplicesAndDeps TypecheckHelpers{..} env k = do , moduleUnitId mod `elem` home_unit_ids -- Only care about stuff from the home package set ] home_unit_ids = -#if MIN_VERSION_ghc(9,3,0) map fst (hugElts $ hsc_HUG hsc_env) -#else - [homeUnitId_ dflags] -#endif mods_transitive = getTransitiveMods hsc_env needed_mods -- If we don't support multiple home units, ModuleNames are sufficient because all the units will be the same mods_transitive_list = -#if MIN_VERSION_ghc(9,3,0) mapMaybe nodeKeyToInstalledModule $ Set.toList mods_transitive -#else - -- Non det OK as we will put it into maps later anyway - map (Compat.installedModule (homeUnitId_ dflags)) $ nonDetEltsUniqSet mods_transitive -#endif -#if MIN_VERSION_ghc(9,3,0) - ; moduleLocs <- readIORef (fcModuleCache $ hsc_FC hsc_env) -#else - ; moduleLocs <- readIORef (hsc_FC hsc_env) -#endif - ; lbs <- getLinkables [toNormalizedFilePath' file + ; moduleLocs <- getModuleGraph + ; lbs <- getLinkables [file | installedMod <- mods_transitive_list - , let ifr = fromJust $ lookupInstalledModuleEnv moduleLocs installedMod - file = case ifr of - InstalledFound loc _ -> - fromJust $ ml_hs_file loc - _ -> panic "hscCompileCoreExprHook: module not found" + , let file = fromJust $ lookupModuleFile (installedMod { moduleUnit = RealUnit (Definite $ moduleUnit installedMod) }) moduleLocs ] ; let hsc_env' = loadModulesHome (map linkableHomeMod lbs) hsc_env -#if MIN_VERSION_ghc(9,3,0) {- load it -} - ; (fv_hvs, lbss, pkgs) <- loadDecls (hscInterp hsc_env') hsc_env' srcspan bcos - ; let hval = ((expectJust "hscCompileCoreExpr'" $ lookup (idName binding_id) fv_hvs), lbss, pkgs) +#if MIN_VERSION_ghc(9,11,0) + ; bco_time <- getCurrentTime + ; (fv_hvs, lbss, pkgs) <- loadDecls (hscInterp hsc_env') hsc_env' srcspan $ + Linkable bco_time (icInteractiveModule ictxt) $ NE.singleton $ BCOs bcos #else - {- load it -} - ; fv_hvs <- loadDecls (hscInterp hsc_env') hsc_env' srcspan bcos - ; let hval = expectJust "hscCompileCoreExpr'" $ lookup (idName binding_id) fv_hvs + ; (fv_hvs, lbss, pkgs) <- loadDecls (hscInterp hsc_env') hsc_env' srcspan bcos #endif + ; let hval = (expectJust "hscCompileCoreExpr'" $ lookup (idName binding_id) fv_hvs, lbss, pkgs) ; modifyIORef' var (flip extendModuleEnvList [(mi_module $ hm_iface hm, linkableHash lb) | lb <- lbs, let hm = linkableHomeMod lb]) ; return hval } -#if MIN_VERSION_ghc(9,3,0) -- TODO: support backpack nodeKeyToInstalledModule :: NodeKey -> Maybe InstalledModule -- We shouldn't get boot files here, but to be safe, never map them to an installed module @@ -346,28 +334,13 @@ captureSplicesAndDeps TypecheckHelpers{..} env k = do nodeKeyToInstalledModule _ = Nothing moduleToNodeKey :: Module -> NodeKey moduleToNodeKey mod = NodeKey_Module $ ModNodeKeyWithUid (GWIB (moduleName mod) NotBoot) (moduleUnitId mod) -#endif -- Compute the transitive set of linkables required getTransitiveMods hsc_env needed_mods -#if MIN_VERSION_ghc(9,3,0) = Set.unions (Set.fromList (map moduleToNodeKey mods) : [ dep | m <- mods , Just dep <- [Map.lookup (moduleToNodeKey m) (mgTransDeps (hsc_mod_graph hsc_env))] ]) where mods = nonDetEltsUniqSet needed_mods -- OK because we put them into a set immediately after -#else - = go emptyUniqSet needed_mods - where - hpt = hsc_HPT hsc_env - go seen new - | isEmptyUniqSet new = seen - | otherwise = go seen' new' - where - seen' = seen `unionUniqSets` new - new' = new_deps `minusUniqSet` seen' - new_deps = unionManyUniqSets [ mkUniqSet $ getDependentMods $ hm_iface mod_info - | mod_info <- eltsUDFM $ udfmIntersectUFM hpt (getUniqSet new)] -#endif -- | Add a Hook to the DynFlags which captures and returns the -- typechecked splices before they are run. This information @@ -408,12 +381,12 @@ tcRnModule hsc_env tc_helpers pmod = do let ms = pm_mod_summary pmod hsc_env_tmp = hscSetFlags (ms_hspp_opts ms) hsc_env - ((tc_gbl_env', mrn_info), splices, mod_env) + (((tc_gbl_env', mrn_info), warning_messages), splices, mod_env) <- captureSplicesAndDeps tc_helpers hsc_env_tmp $ \hscEnvTmp -> - do hscTypecheckRename hscEnvTmp ms $ - HsParsedModule { hpm_module = parsedSource pmod, - hpm_src_files = pm_extra_src_files pmod, - hpm_annotations = pm_annotations pmod } + do hscTypecheckRenameWithDiagnostics hscEnvTmp ms $ + HsParsedModule { hpm_module = parsedSource pmod + , hpm_src_files = pm_extra_src_files pmod + } let rn_info = case mrn_info of Just x -> x Nothing -> error "no renamed info tcRnModule" @@ -422,7 +395,7 @@ tcRnModule hsc_env tc_helpers pmod = do mod_env_anns = map (\(mod, hash) -> Annotation (ModuleTarget mod) $ toSerialized BS.unpack hash) (moduleEnvToList mod_env) tc_gbl_env = tc_gbl_env' { tcg_ann_env = extendAnnEnvList (tcg_ann_env tc_gbl_env') mod_env_anns } - pure (TcModuleResult pmod rn_info tc_gbl_env splices False mod_env) + pure (TcModuleResult pmod rn_info tc_gbl_env splices False mod_env warning_messages) -- Note [Clearing mi_globals after generating an iface] @@ -442,12 +415,8 @@ tcRnModule hsc_env tc_helpers pmod = do -- HLS. To avoid GHC from pessimising HLS, we filter out certain dependency information -- that we track ourselves. See also Note [Recompilation avoidance in the presence of TH] filterUsages :: [Usage] -> [Usage] -#if MIN_VERSION_ghc(9,3,0) filterUsages = filter $ \case UsageHomeModuleInterface{} -> False _ -> True -#else -filterUsages = id -#endif -- | Mitigation for https://gitlab.haskell.org/ghc/ghc/-/issues/22744 -- Important to do this immediately after reading the unit before @@ -473,7 +442,14 @@ mkHiFileResultNoCompile session tcm = do details <- makeSimpleDetails hsc_env_tmp tcGblEnv sf <- finalSafeMode (ms_hspp_opts ms) tcGblEnv iface' <- mkIfaceTc hsc_env_tmp sf details ms Nothing tcGblEnv - let iface = iface' { mi_globals = Nothing, mi_usages = filterUsages (mi_usages iface') } -- See Note [Clearing mi_globals after generating an iface] + -- See Note [Clearing mi_globals after generating an iface] + let iface = iface' +#if MIN_VERSION_ghc(9,11,0) + & set_mi_top_env Nothing + & set_mi_usages (filterUsages (mi_usages iface')) +#else + { mi_globals = Nothing, mi_usages = filterUsages (mi_usages iface') } +#endif pure $! mkHiFileResult ms iface details (tmrRuntimeModules tcm) Nothing mkHiFileResultCompile @@ -498,8 +474,9 @@ mkHiFileResultCompile se session' tcm simplified_guts = catchErrs $ do (cg_binds guts) #endif details -#if MIN_VERSION_ghc(9,3,0) ms +#if MIN_VERSION_ghc(9,11,0) + (tcg_import_decls (tmrTypechecked tcm)) #endif simplified_guts @@ -507,7 +484,17 @@ mkHiFileResultCompile se session' tcm simplified_guts = catchErrs $ do #if MIN_VERSION_ghc(9,4,2) Nothing #endif - let final_iface = final_iface' {mi_globals = Nothing, mi_usages = filterUsages (mi_usages final_iface')} -- See Note [Clearing mi_globals after generating an iface] +#if MIN_VERSION_ghc(9,11,0) + NoStubs [] +#endif + -- See Note [Clearing mi_globals after generating an iface] + let final_iface = final_iface' +#if MIN_VERSION_ghc(9,11,0) + & set_mi_top_env Nothing + & set_mi_usages (filterUsages (mi_usages final_iface')) +#else + {mi_globals = Nothing, mi_usages = filterUsages (mi_usages final_iface')} +#endif -- Write the core file now core_file <- do @@ -515,7 +502,7 @@ mkHiFileResultCompile se session' tcm simplified_guts = catchErrs $ do core_file = codeGutsToCoreFile iface_hash guts iface_hash = getModuleHash final_iface core_hash1 <- atomicFileWrite se core_fp $ \fp -> - writeBinCoreFile fp core_file + writeBinCoreFile (hsc_dflags session) fp core_file -- We want to drop references to guts and read in a serialized, compact version -- of the core file from disk (as it is deserialised lazily) -- This is because we don't want to keep the guts in memory for every file in @@ -552,17 +539,9 @@ mkHiFileResultCompile se session' tcm simplified_guts = catchErrs $ do -- Run corePrep first as we want to test the final version of the program that will -- get translated to STG/Bytecode -#if MIN_VERSION_ghc(9,3,0) prepd_binds -#else - (prepd_binds , _) -#endif <- corePrep unprep_binds data_tycons -#if MIN_VERSION_ghc(9,3,0) prepd_binds' -#else - (prepd_binds', _) -#endif <- corePrep unprep_binds' data_tycons let binds = noUnfoldings $ (map flattenBinds . (:[])) prepd_binds binds' = noUnfoldings $ (map flattenBinds . (:[])) prepd_binds' @@ -599,8 +578,14 @@ mkHiFileResultCompile se session' tcm simplified_guts = catchErrs $ do source = "compile" catchErrs x = x `catches` [ Handler $ return . (,Nothing) . diagFromGhcException source dflags - , Handler $ return . (,Nothing) . diagFromString source DiagnosticSeverity_Error (noSpan "") - . (("Error during " ++ T.unpack source) ++) . show @SomeException + , Handler $ \diag -> + return + ( diagFromString + source DiagnosticSeverity_Error (noSpan "") + ("Error during " ++ T.unpack source ++ show @SomeException diag) + Nothing + , Nothing + ) ] -- | Whether we should run the -O0 simplifier when generating core. @@ -659,11 +644,7 @@ generateObjectCode session summary guts = do let env' = tweak (hscSetFlags (ms_hspp_opts summary) session) target = platformDefaultBackend (hsc_dflags env') newFlags = setBackend target $ updOptLevel 0 $ setOutputFile -#if MIN_VERSION_ghc(9,3,0) (Just dot_o) -#else - dot_o -#endif $ hsc_dflags env' session' = hscSetFlags newFlags session #if MIN_VERSION_ghc(9,4,2) @@ -674,18 +655,16 @@ generateObjectCode session summary guts = do (ms_location summary) fp obj <- compileFile session' driverNoStop (outputFilename, Just (As False)) -#if MIN_VERSION_ghc(9,3,0) case obj of Nothing -> throwGhcExceptionIO $ Panic "compileFile didn't generate object code" Just x -> pure x -#else - return obj -#endif - let unlinked = DotO dot_o_fp -- Need time to be the modification time for recompilation checking t <- liftIO $ getModificationTime dot_o_fp - let linkable = LM t mod [unlinked] - +#if MIN_VERSION_ghc(9,11,0) + let linkable = Linkable t mod (pure $ DotO dot_o_fp ModuleObject) +#else + let linkable = LM t mod [DotO dot_o_fp] +#endif pure (map snd warnings, linkable) newtype CoreFileTime = CoreFileTime UTCTime @@ -694,15 +673,22 @@ generateByteCode :: CoreFileTime -> HscEnv -> ModSummary -> CgGuts -> IO (IdeRes generateByteCode (CoreFileTime time) hscEnv summary guts = do fmap (either (, Nothing) (second Just)) $ catchSrcErrors (hsc_dflags hscEnv) "bytecode" $ do +#if MIN_VERSION_ghc(9,11,0) + (warnings, (_, bytecode)) <- +#else (warnings, (_, bytecode, sptEntries)) <- +#endif withWarnings "bytecode" $ \_tweak -> do let session = _tweak (hscSetFlags (ms_hspp_opts summary) hscEnv) -- TODO: maybe settings ms_hspp_opts is unnecessary? summary' = summary { ms_hspp_opts = hsc_dflags session } hscInteractive session (mkCgInteractiveGuts guts) (ms_location summary') - let unlinked = BCOs bytecode sptEntries - let linkable = LM time (ms_mod summary) [unlinked] +#if MIN_VERSION_ghc(9,11,0) + let linkable = Linkable time (ms_mod summary) (pure $ BCOs bytecode) +#else + let linkable = LM time (ms_mod summary) [BCOs bytecode sptEntries] +#endif pure (map snd warnings, linkable) demoteTypeErrorsToWarnings :: ParsedModule -> ParsedModule @@ -725,34 +711,23 @@ update_pm_mod_summary :: (ModSummary -> ModSummary) -> ParsedModule -> ParsedMod update_pm_mod_summary up pm = pm{pm_mod_summary = up $ pm_mod_summary pm} -#if MIN_VERSION_ghc(9,3,0) unDefer :: (Maybe DiagnosticReason, FileDiagnostic) -> (Bool, FileDiagnostic) unDefer (Just (WarningWithFlag Opt_WarnDeferredTypeErrors) , fd) = (True, upgradeWarningToError fd) unDefer (Just (WarningWithFlag Opt_WarnTypedHoles) , fd) = (True, upgradeWarningToError fd) unDefer (Just (WarningWithFlag Opt_WarnDeferredOutOfScopeVariables), fd) = (True, upgradeWarningToError fd) -#else -unDefer :: (WarnReason, FileDiagnostic) -> (Bool, FileDiagnostic) -unDefer (Reason Opt_WarnDeferredTypeErrors , fd) = (True, upgradeWarningToError fd) -unDefer (Reason Opt_WarnTypedHoles , fd) = (True, upgradeWarningToError fd) -unDefer (Reason Opt_WarnDeferredOutOfScopeVariables, fd) = (True, upgradeWarningToError fd) -#endif unDefer ( _ , fd) = (False, fd) upgradeWarningToError :: FileDiagnostic -> FileDiagnostic -upgradeWarningToError (nfp, sh, fd) = - (nfp, sh, fd{_severity = Just DiagnosticSeverity_Error, _message = warn2err $ _message fd}) where +upgradeWarningToError = + fdLspDiagnosticL %~ \diag -> diag {_severity = Just DiagnosticSeverity_Error, _message = warn2err $ _message diag} + where warn2err :: T.Text -> T.Text warn2err = T.intercalate ": error:" . T.splitOn ": warning:" -#if MIN_VERSION_ghc(9,3,0) hideDiag :: DynFlags -> (Maybe DiagnosticReason, FileDiagnostic) -> (Maybe DiagnosticReason, FileDiagnostic) -hideDiag originalFlags (w@(Just (WarningWithFlag warning)), (nfp, _sh, fd)) -#else -hideDiag :: DynFlags -> (WarnReason, FileDiagnostic) -> (WarnReason, FileDiagnostic) -hideDiag originalFlags (w@(Reason warning), (nfp, _sh, fd)) -#endif +hideDiag originalFlags (w@(Just (WarningWithFlag warning)), fd) | not (wopt warning originalFlags) - = (w, (nfp, HideDiag, fd)) + = (w, fd { fdShouldShowDiagnostic = HideDiag }) hideDiag _originalFlags t = t -- | Warnings which lead to a diagnostic tag @@ -773,29 +748,21 @@ unnecessaryDeprecationWarningFlags ] -- | Add a unnecessary/deprecated tag to the required diagnostics. -#if MIN_VERSION_ghc(9,3,0) tagDiag :: (Maybe DiagnosticReason, FileDiagnostic) -> (Maybe DiagnosticReason, FileDiagnostic) -#else -tagDiag :: (WarnReason, FileDiagnostic) -> (WarnReason, FileDiagnostic) -#endif #if MIN_VERSION_ghc(9,7,0) -tagDiag (w@(Just (WarningWithCategory cat)), (nfp, sh, fd)) +tagDiag (w@(Just (WarningWithCategory cat)), fd) | cat == defaultWarningCategory -- default warning category is for deprecations - = (w, (nfp, sh, fd { _tags = Just $ DiagnosticTag_Deprecated : concat (_tags fd) })) -tagDiag (w@(Just (WarningWithFlags warnings)), (nfp, sh, fd)) + = (w, fd & fdLspDiagnosticL %~ \diag -> diag { _tags = Just $ DiagnosticTag_Deprecated : concat (_tags diag) }) +tagDiag (w@(Just (WarningWithFlags warnings)), fd) | tags <- mapMaybe requiresTag (toList warnings) - = (w, (nfp, sh, fd { _tags = Just $ tags ++ concat (_tags fd) })) -#elif MIN_VERSION_ghc(9,3,0) -tagDiag (w@(Just (WarningWithFlag warning)), (nfp, sh, fd)) - | Just tag <- requiresTag warning - = (w, (nfp, sh, fd { _tags = Just $ tag : concat (_tags fd) })) + = (w, fd & fdLspDiagnosticL %~ \diag -> diag { _tags = Just $ tags ++ concat (_tags diag) }) #else -tagDiag (w@(Reason warning), (nfp, sh, fd)) +tagDiag (w@(Just (WarningWithFlag warning)), fd) | Just tag <- requiresTag warning - = (w, (nfp, sh, fd { _tags = Just $ tag : concat (_tags fd) })) + = (w, fd & fdLspDiagnosticL %~ \diag -> diag { _tags = Just $ tag : concat (_tags diag) }) #endif - where + where requiresTag :: WarningFlag -> Maybe DiagnosticTag #if !MIN_VERSION_ghc(9,7,0) -- doesn't exist on 9.8, we use WarningWithCategory instead @@ -822,33 +789,43 @@ atomicFileWrite se targetPath write = do (write tempFilePath >>= \x -> renameFile tempFilePath targetPath >> atomically (resetInterfaceStore se (toNormalizedFilePath' targetPath)) >> pure x) `onException` cleanUp -generateHieAsts :: HscEnv -> TcModuleResult -> IO ([FileDiagnostic], Maybe (HieASTs Type)) +generateHieAsts :: HscEnv -> TcModuleResult +#if MIN_VERSION_ghc(9,11,0) + -> IO ([FileDiagnostic], Maybe (HieASTs Type, NameEntityInfo)) +#else + -> IO ([FileDiagnostic], Maybe (HieASTs Type)) +#endif generateHieAsts hscEnv tcm = handleGenerationErrors' dflags "extended interface generation" $ runHsc hscEnv $ do -- These varBinds use unitDataConId but it could be anything as the id name is not used -- during the hie file generation process. It's a workaround for the fact that the hie modules -- don't export an interface which allows for additional information to be added to hie files. - let fake_splice_binds = Util.listToBag (map (mkVarBind unitDataConId) (spliceExpressions $ tmrTopLevelSplices tcm)) + let fake_splice_binds = +#if !MIN_VERSION_ghc(9,11,0) + Util.listToBag $ +#endif + map (mkVarBind unitDataConId) (spliceExpressions $ tmrTopLevelSplices tcm) real_binds = tcg_binds $ tmrTypechecked tcm + all_binds = +#if MIN_VERSION_ghc(9,11,0) + fake_splice_binds ++ real_binds +#else + fake_splice_binds `Util.unionBags` real_binds +#endif ts = tmrTypechecked tcm :: TcGblEnv top_ev_binds = tcg_ev_binds ts :: Util.Bag EvBind insts = tcg_insts ts :: [ClsInst] tcs = tcg_tcs ts :: [TyCon] - run ts $ -#if MIN_VERSION_ghc(9,3,0) - pure $ Just $ -#else - Just <$> + hie_asts = GHC.enrichHie all_binds (tmrRenamed tcm) top_ev_binds insts tcs + + pure $ Just $ +#if MIN_VERSION_ghc(9,11,0) + hie_asts (tcg_type_env ts) +#elif MIN_VERSION_ghc(9,3,0) + hie_asts #endif - GHC.enrichHie (fake_splice_binds `Util.unionBags` real_binds) (tmrRenamed tcm) top_ev_binds insts tcs where dflags = hsc_dflags hscEnv - run _ts = -- ts is only used in GHC 9.2 -#if !MIN_VERSION_ghc(9,3,0) - fmap (join . snd) . liftIO . initDs hscEnv _ts -#else - id -#endif spliceExpressions :: Splices -> [LHsExpr GhcTc] spliceExpressions Splices{..} = @@ -890,7 +867,6 @@ spliceExpressions Splices{..} = -- indexHieFile :: ShakeExtras -> ModSummary -> NormalizedFilePath -> Util.Fingerprint -> Compat.HieFile -> IO () indexHieFile se mod_summary srcPath !hash hf = do - IdeOptions{optProgressStyle} <- getIdeOptionsIO se atomically $ do pending <- readTVar indexPending case HashMap.lookup srcPath pending of @@ -911,69 +887,14 @@ indexHieFile se mod_summary srcPath !hash hf = do unless newerScheduled $ do -- Using bracket, so even if an exception happen during withHieDb call, -- the `post` (which clean the progress indicator) will still be called. - bracket_ (pre optProgressStyle) post $ + bracket_ pre post $ withHieDb (\db -> HieDb.addRefsFromLoaded db targetPath (HieDb.RealFile $ fromNormalizedFilePath srcPath) hash hf') where mod_location = ms_location mod_summary targetPath = Compat.ml_hie_file mod_location HieDbWriter{..} = hiedbWriter se - -- Get a progress token to report progress and update it for the current file - pre style = do - tok <- modifyVar indexProgressToken $ fmap dupe . \case - x@(Just _) -> pure x - -- Create a token if we don't already have one - Nothing -> do - case lspEnv se of - Nothing -> pure Nothing - Just env -> LSP.runLspT env $ do - u <- LSP.ProgressToken . LSP.InR . T.pack . show . hashUnique <$> liftIO Unique.newUnique - -- TODO: Wait for the progress create response to use the token - _ <- LSP.sendRequest LSP.SMethod_WindowWorkDoneProgressCreate (LSP.WorkDoneProgressCreateParams u) (const $ pure ()) - LSP.sendNotification LSP.SMethod_Progress $ LSP.ProgressParams u $ - toJSON $ LSP.WorkDoneProgressBegin - { _kind = LSP.AString @"begin" - , _title = "Indexing" - , _cancellable = Nothing - , _message = Nothing - , _percentage = Nothing - } - pure (Just u) - - (!done, !remaining) <- atomically $ do - done <- readTVar indexCompleted - remaining <- HashMap.size <$> readTVar indexPending - pure (done, remaining) - let - progressFrac :: Double - progressFrac = fromIntegral done / fromIntegral (done + remaining) - progressPct :: LSP.UInt - progressPct = floor $ 100 * progressFrac - - whenJust (lspEnv se) $ \env -> whenJust tok $ \token -> LSP.runLspT env $ - LSP.sendNotification LSP.SMethod_Progress $ LSP.ProgressParams token $ - toJSON $ - case style of - Percentage -> LSP.WorkDoneProgressReport - { _kind = LSP.AString @"report" - , _cancellable = Nothing - , _message = Nothing - , _percentage = Just progressPct - } - Explicit -> LSP.WorkDoneProgressReport - { _kind = LSP.AString @"report" - , _cancellable = Nothing - , _message = Just $ - T.pack " (" <> T.pack (show done) <> "/" <> T.pack (show $ done + remaining) <> ")..." - , _percentage = Nothing - } - NoProgress -> LSP.WorkDoneProgressReport - { _kind = LSP.AString @"report" - , _cancellable = Nothing - , _message = Nothing - , _percentage = Nothing - } - + pre = progressUpdate indexProgressReporting ProgressStarted -- Report the progress once we are done indexing this file post = do mdone <- atomically $ do @@ -988,20 +909,16 @@ indexHieFile se mod_summary srcPath !hash hf = do when (coerce $ ideTesting se) $ LSP.sendNotification (LSP.SMethod_CustomMethod (Proxy @"ghcide/reference/ready")) $ toJSON $ fromNormalizedFilePath srcPath - whenJust mdone $ \done -> - modifyVar_ indexProgressToken $ \tok -> do - whenJust (lspEnv se) $ \env -> LSP.runLspT env $ - whenJust tok $ \token -> - LSP.sendNotification LSP.SMethod_Progress $ LSP.ProgressParams token $ - toJSON $ - LSP.WorkDoneProgressEnd - { _kind = LSP.AString @"end" - , _message = Just $ "Finished indexing " <> T.pack (show done) <> " files" - } - -- We are done with the current indexing cycle, so destroy the token - pure Nothing - -writeAndIndexHieFile :: HscEnv -> ShakeExtras -> ModSummary -> NormalizedFilePath -> [GHC.AvailInfo] -> HieASTs Type -> BS.ByteString -> IO [FileDiagnostic] + whenJust mdone $ \_ -> progressUpdate indexProgressReporting ProgressCompleted + +writeAndIndexHieFile + :: HscEnv -> ShakeExtras -> ModSummary -> NormalizedFilePath -> [GHC.AvailInfo] +#if MIN_VERSION_ghc(9,11,0) + -> (HieASTs Type, NameEntityInfo) +#else + -> HieASTs Type +#endif + -> BS.ByteString -> IO [FileDiagnostic] writeAndIndexHieFile hscEnv se mod_summary srcPath exports ast source = handleGenerationErrors dflags "extended interface write/compression" $ do hf <- runHsc hscEnv $ @@ -1028,16 +945,25 @@ handleGenerationErrors :: DynFlags -> T.Text -> IO () -> IO [FileDiagnostic] handleGenerationErrors dflags source action = action >> return [] `catches` [ Handler $ return . diagFromGhcException source dflags - , Handler $ return . diagFromString source DiagnosticSeverity_Error (noSpan "") - . (("Error during " ++ T.unpack source) ++) . show @SomeException + , Handler $ \(exception :: SomeException) -> return $ + diagFromString + source DiagnosticSeverity_Error (noSpan "") + ("Error during " ++ T.unpack source ++ show exception) + Nothing ] handleGenerationErrors' :: DynFlags -> T.Text -> IO (Maybe a) -> IO ([FileDiagnostic], Maybe a) handleGenerationErrors' dflags source action = fmap ([],) action `catches` [ Handler $ return . (,Nothing) . diagFromGhcException source dflags - , Handler $ return . (,Nothing) . diagFromString source DiagnosticSeverity_Error (noSpan "") - . (("Error during " ++ T.unpack source) ++) . show @SomeException + , Handler $ \(exception :: SomeException) -> + return + ( diagFromString + source DiagnosticSeverity_Error (noSpan "") + ("Error during " ++ T.unpack source ++ show exception) + Nothing + , Nothing + ) ] @@ -1046,9 +972,46 @@ handleGenerationErrors' dflags source action = -- Add the current ModSummary to the graph, along with the -- HomeModInfo's of all direct dependencies (by induction hypothesis all -- transitive dependencies will be contained in envs) -mergeEnvs :: HscEnv -> ModuleGraph -> ModSummary -> [HomeModInfo] -> [HscEnv] -> IO HscEnv -mergeEnvs env mg ms extraMods envs = do -#if MIN_VERSION_ghc(9,3,0) +mergeEnvs :: HscEnv + -> ModuleGraph + -> DependencyInformation + -> ModSummary + -> [HomeModInfo] + -> [HscEnv] + -> IO HscEnv +mergeEnvs env mg dep_info ms extraMods envs = do +#if MIN_VERSION_ghc(9,11,0) + return $! loadModulesHome extraMods $ + let newHug = foldl' mergeHUG (hsc_HUG env) (map hsc_HUG envs) in + (hscUpdateHUG (const newHug) env){ + hsc_mod_graph = mg, + hsc_FC = (hsc_FC env) + { addToFinderCache = \gwib@(GWIB im _) val -> + if moduleUnit im `elem` hsc_all_home_unit_ids env + then pure () + else addToFinderCache (hsc_FC env) gwib val + , lookupFinderCache = \gwib@(GWIB im _) -> + if moduleUnit im `elem` hsc_all_home_unit_ids env + then case lookupModuleFile (im { moduleUnit = RealUnit (Definite $ moduleUnit im) }) dep_info of + Nothing -> pure Nothing + Just fs -> let ml = fromJust $ do + id <- lookupPathToId (depPathIdMap dep_info) fs + artifactModLocation (idToModLocation (depPathIdMap dep_info) id) + in pure $ Just $ InstalledFound ml im + else lookupFinderCache (hsc_FC env) gwib + } + } + + where + mergeHUG (UnitEnvGraph a) (UnitEnvGraph b) = UnitEnvGraph $ Map.unionWith mergeHUE a b + mergeHUE a b = a { homeUnitEnv_hpt = mergeUDFM (homeUnitEnv_hpt a) (homeUnitEnv_hpt b) } + mergeUDFM = plusUDFM_C combineModules + + combineModules a b + | HsSrcFile <- mi_hsc_src (hm_iface a) = a + | otherwise = b + +#elif MIN_VERSION_ghc(9,3,0) let im = Compat.installedModule (toUnitId $ moduleUnit $ ms_mod ms) (moduleName (ms_mod ms)) ifr = InstalledFound (ms_location ms) im curFinderCache = Compat.extendInstalledModuleEnv Compat.emptyInstalledModuleEnv im ifr @@ -1082,33 +1045,9 @@ mergeEnvs env mg ms extraMods envs = do fcModules' <- newIORef $! foldl' (plusInstalledModuleEnv combineModuleLocations) cur fcModules fcFiles' <- newIORef $! Map.unions fcFiles pure $ FinderCache fcModules' fcFiles' - -#else - prevFinderCache <- concatFC <$> mapM (readIORef . hsc_FC) envs - let im = Compat.installedModule (toUnitId $ moduleUnit $ ms_mod ms) (moduleName (ms_mod ms)) - ifr = InstalledFound (ms_location ms) im - newFinderCache <- newIORef $! Compat.extendInstalledModuleEnv prevFinderCache im ifr - return $! loadModulesHome extraMods $ - env{ - hsc_HPT = foldMapBy mergeUDFM emptyUDFM hsc_HPT envs, - hsc_FC = newFinderCache, - hsc_mod_graph = mg - } - - where - mergeUDFM = plusUDFM_C combineModules - combineModules a b - | HsSrcFile <- mi_hsc_src (hm_iface a) = a - | otherwise = b - -- required because 'FinderCache': - -- 1) doesn't have a 'Monoid' instance, - -- 2) is abstract and doesn't export constructors - -- To work around this, we coerce to the underlying type - -- To remove this, I plan to upstream the missing Monoid instance - concatFC :: [FinderCache] -> FinderCache - concatFC = unsafeCoerce (mconcat @(Map InstalledModule InstalledFindResult)) #endif + withBootSuffix :: HscSource -> ModLocation -> ModLocation withBootSuffix HsBootFile = addBootSuffixLocnOut withBootSuffix _ = id @@ -1152,24 +1091,16 @@ getModSummaryFromImports env fp _modTime mContents = do convImport (L _ i) = ( -#if !MIN_VERSION_ghc(9,3,0) - fmap sl_fs -#endif (ideclPkgQual i) , reLoc $ ideclName i) msrImports = implicit_imports ++ imps -#if MIN_VERSION_ghc(9,3,0) rn_pkg_qual = renameRawPkgQual (hsc_unit_env ppEnv) rn_imps = fmap (\(pk, lmn@(L _ mn)) -> (rn_pkg_qual mn pk, lmn)) srcImports = rn_imps $ map convImport src_idecls textualImports = rn_imps $ map convImport (implicit_imports ++ ordinary_imps) ghc_prim_import = not (null _ghc_prim_imports) -#else - srcImports = map convImport src_idecls - textualImports = map convImport (implicit_imports ++ ordinary_imps) -#endif -- Force bits that might keep the string buffer and DynFlags alive unnecessarily @@ -1185,18 +1116,14 @@ getModSummaryFromImports env fp _modTime mContents = do let modl = mkHomeModule (hscHomeUnit ppEnv) mod sourceType = if "-boot" `isSuffixOf` takeExtension fp then HsBootFile else HsSrcFile - msrModSummary2 = + msrModSummary = ModSummary { ms_mod = modl , ms_hie_date = Nothing -#if MIN_VERSION_ghc(9,3,0) , ms_dyn_obj_date = Nothing , ms_ghc_prim_import = ghc_prim_import , ms_hs_hash = _src_hash -#else - , ms_hs_date = _modTime -#endif , ms_hsc_src = sourceType -- The contents are used by the GetModSummary rule , ms_hspp_buf = Just contents @@ -1210,8 +1137,8 @@ getModSummaryFromImports env fp _modTime mContents = do , ms_textual_imps = textualImports } - msrFingerprint <- liftIO $ computeFingerprint opts msrModSummary2 - (msrModSummary, msrHscEnv) <- liftIO $ initPlugins ppEnv msrModSummary2 + msrFingerprint <- liftIO $ computeFingerprint opts msrModSummary + msrHscEnv <- liftIO $ Loader.initializePlugins (hscSetFlags (ms_hspp_opts msrModSummary) ppEnv) return ModSummaryResult{..} where -- Compute a fingerprint from the contents of `ModSummary`, @@ -1221,19 +1148,24 @@ getModSummaryFromImports env fp _modTime mContents = do put $ Util.uniq $ moduleNameFS $ moduleName ms_mod forM_ (ms_srcimps ++ ms_textual_imps) $ \(mb_p, m) -> do put $ Util.uniq $ moduleNameFS $ unLoc m -#if MIN_VERSION_ghc(9,3,0) case mb_p of - G.NoPkgQual -> pure () + G.NoPkgQual -> pure () G.ThisPkg uid -> put $ getKey $ getUnique uid G.OtherPkg uid -> put $ getKey $ getUnique uid -#else - whenJust mb_p $ put . Util.uniq -#endif return $! Util.fingerprintFingerprints $ [ Util.fingerprintString fp , fingerPrintImports + , modLocationFingerprint ms_location ] ++ map Util.fingerprintString opts + modLocationFingerprint :: ModLocation -> Util.Fingerprint + modLocationFingerprint ModLocation{..} = Util.fingerprintFingerprints $ + Util.fingerprintString <$> [ fromMaybe "" ml_hs_file + , ml_hi_file + , ml_dyn_hi_file + , ml_obj_file + , ml_dyn_obj_file + , ml_hie_file] -- | Parse only the module header parseHeader @@ -1250,7 +1182,7 @@ parseHeader dflags filename contents = do let loc = mkRealSrcLoc (Util.mkFastString filename) 1 1 case unP Compat.parseHeader (initParserState (initParserOpts dflags) contents loc) of PFailedWithErrorMessages msgs -> - throwE $ diagFromErrMsgs sourceParser dflags $ msgs dflags + throwE $ diagFromGhcErrorMessages sourceParser dflags $ msgs dflags POk pst rdr_module -> do let (warns, errs) = renderMessages $ getPsMessages pst @@ -1264,9 +1196,9 @@ parseHeader dflags filename contents = do -- errors are those from which a parse tree just can't -- be produced. unless (null errs) $ - throwE $ diagFromErrMsgs sourceParser dflags errs + throwE $ diagFromGhcErrorMessages sourceParser dflags errs - let warnings = diagFromErrMsgs sourceParser dflags warns + let warnings = diagFromGhcErrorMessages sourceParser dflags warns return (warnings, rdr_module) -- | Given a buffer, flags, and file path, produce a @@ -1283,20 +1215,29 @@ parseFileContents env customPreprocessor filename ms = do dflags = ms_hspp_opts ms contents = fromJust $ ms_hspp_buf ms case unP Compat.parseModule (initParserState (initParserOpts dflags) contents loc) of - PFailedWithErrorMessages msgs -> throwE $ diagFromErrMsgs sourceParser dflags $ msgs dflags + PFailedWithErrorMessages msgs -> + throwE $ diagFromGhcErrorMessages sourceParser dflags $ msgs dflags POk pst rdr_module -> let - hpm_annotations = mkApiAnns pst psMessages = getPsMessages pst in do - let IdePreprocessedSource preproc_warns errs parsed = customPreprocessor rdr_module - - unless (null errs) $ - throwE $ diagFromStrings sourceParser DiagnosticSeverity_Error errs - - let preproc_warnings = diagFromStrings sourceParser DiagnosticSeverity_Warning preproc_warns - (parsed', msgs) <- liftIO $ applyPluginsParsedResultAction env ms hpm_annotations parsed psMessages + let IdePreprocessedSource preproc_warns preproc_errs parsed = customPreprocessor rdr_module + let attachNoStructuredError (span, msg) = (span, msg, Nothing) + + unless (null preproc_errs) $ + throwE $ + diagFromStrings + sourceParser + DiagnosticSeverity_Error + (fmap attachNoStructuredError preproc_errs) + + let preproc_warning_file_diagnostics = + diagFromStrings + sourceParser + DiagnosticSeverity_Warning + (fmap attachNoStructuredError preproc_warns) + (parsed', msgs) <- liftIO $ applyPluginsParsedResultAction env ms parsed psMessages let (warns, errors) = renderMessages msgs -- Just because we got a `POk`, it doesn't mean there @@ -1309,8 +1250,7 @@ parseFileContents env customPreprocessor filename ms = do -- errors are those from which a parse tree just can't -- be produced. unless (null errors) $ - throwE $ diagFromErrMsgs sourceParser dflags errors - + throwE $ diagFromGhcErrorMessages sourceParser dflags errors -- To get the list of extra source files, we take the list -- that the parser gave us, @@ -1323,11 +1263,7 @@ parseFileContents env customPreprocessor filename ms = do -- - filter out the .hs/.lhs source filename if we have one -- let n_hspp = normalise filename -#if MIN_VERSION_ghc(9,3,0) TempDir tmp_dir = tmpDir dflags -#else - tmp_dir = tmpDir dflags -#endif srcs0 = nubOrd $ filter (not . (tmp_dir `isPrefixOf`)) $ filter (/= n_hspp) $ map normalise @@ -1343,9 +1279,9 @@ parseFileContents env customPreprocessor filename ms = do -- filter them out: srcs2 <- liftIO $ filterM doesFileExist srcs1 - let pm = ParsedModule ms parsed' srcs2 hpm_annotations - warnings = diagFromErrMsgs sourceParser dflags warns - pure (warnings ++ preproc_warnings, pm) + let pm = ParsedModule ms parsed' srcs2 + warnings = diagFromGhcErrorMessages sourceParser dflags warns + pure (warnings ++ preproc_warning_file_diagnostics, pm) loadHieFile :: Compat.NameCacheUpdater -> FilePath -> IO GHC.HieFile loadHieFile ncu f = do @@ -1420,6 +1356,7 @@ data RecompilationInfo m , old_value :: Maybe (HiFileResult, FileVersion) , get_file_version :: NormalizedFilePath -> m (Maybe FileVersion) , get_linkable_hashes :: [NormalizedFilePath] -> m [BS.ByteString] + , get_module_graph :: m DependencyInformation , regenerate :: Maybe LinkableType -> m ([FileDiagnostic], Maybe HiFileResult) -- ^ Action to regenerate an interface } @@ -1474,14 +1411,9 @@ loadInterface session ms linkableNeeded RecompilationInfo{..} = do -- ncu and read_dflags are only used in GHC >= 9.4 let _ncu = hsc_NC sessionWithMsDynFlags _read_dflags = hsc_dflags sessionWithMsDynFlags -#if MIN_VERSION_ghc(9,3,0) read_result <- liftIO $ readIface _read_dflags _ncu mod iface_file -#else - read_result <- liftIO $ initIfaceCheck (text "readIface") sessionWithMsDynFlags - $ readIface mod iface_file -#endif case read_result of - Util.Failed{} -> return Nothing + Util.Failed{} -> return Nothing -- important to call `shareUsages` here before checkOldIface -- consults `mi_usages` Util.Succeeded iface -> return $ Just (shareUsages iface) @@ -1489,13 +1421,9 @@ loadInterface session ms linkableNeeded RecompilationInfo{..} = do -- If mb_old_iface is nothing then checkOldIface will load it for us -- given that the source is unmodified (recomp_iface_reqd, mb_checked_iface) -#if MIN_VERSION_ghc(9,3,0) <- liftIO $ checkOldIface sessionWithMsDynFlags ms _old_iface >>= \case UpToDateItem x -> pure (UpToDate, Just x) OutOfDateItem reason x -> pure (NeedsRecompile reason, x) -#else - <- liftIO $ checkOldIface sessionWithMsDynFlags ms _sourceMod mb_old_iface -#endif let do_regenerate _reason = withTrace "regenerate interface" $ \setTag -> do setTag "Module" $ moduleNameString $ moduleName mod @@ -1511,7 +1439,7 @@ loadInterface session ms linkableNeeded RecompilationInfo{..} = do | not (mi_used_th iface) = emptyModuleEnv | otherwise = parseRuntimeDeps (md_anns details) -- Peform the fine grained recompilation check for TH - maybe_recomp <- checkLinkableDependencies session get_linkable_hashes runtime_deps + maybe_recomp <- checkLinkableDependencies session get_linkable_hashes get_module_graph runtime_deps case maybe_recomp of Just msg -> do_regenerate msg Nothing @@ -1548,20 +1476,10 @@ parseRuntimeDeps anns = mkModuleEnv $ mapMaybe go anns -- the runtime dependencies of the module, to check if any of them are out of date -- Hopefully 'runtime_deps' will be empty if the module didn't actually use TH -- See Note [Recompilation avoidance in the presence of TH] -checkLinkableDependencies :: MonadIO m => HscEnv -> ([NormalizedFilePath] -> m [BS.ByteString]) -> ModuleEnv BS.ByteString -> m (Maybe RecompileRequired) -checkLinkableDependencies hsc_env get_linkable_hashes runtime_deps = do -#if MIN_VERSION_ghc(9,3,0) - moduleLocs <- liftIO $ readIORef (fcModuleCache $ hsc_FC hsc_env) -#else - moduleLocs <- liftIO $ readIORef (hsc_FC hsc_env) -#endif - let go (mod, hash) = do - ifr <- lookupInstalledModuleEnv moduleLocs $ Compat.installedModule (toUnitId $ moduleUnit mod) (moduleName mod) - case ifr of - InstalledFound loc _ -> do - hs <- ml_hs_file loc - pure (toNormalizedFilePath' hs,hash) - _ -> Nothing +checkLinkableDependencies :: MonadIO m => HscEnv -> ([NormalizedFilePath] -> m [BS.ByteString]) -> m DependencyInformation -> ModuleEnv BS.ByteString -> m (Maybe RecompileRequired) +checkLinkableDependencies hsc_env get_linkable_hashes get_module_graph runtime_deps = do + graph <- get_module_graph + let go (mod, hash) = (,hash) <$> lookupModuleFile mod graph hs_files = mapM go (moduleEnvToList runtime_deps) case hs_files of Nothing -> error "invalid module graph" @@ -1575,27 +1493,16 @@ checkLinkableDependencies hsc_env get_linkable_hashes runtime_deps = do recompBecause :: String -> RecompileRequired recompBecause = -#if MIN_VERSION_ghc(9,3,0) NeedsRecompile . -#endif RecompBecause -#if MIN_VERSION_ghc(9,3,0) . CustomReason -#endif -#if MIN_VERSION_ghc(9,3,0) data SourceModified = SourceModified | SourceUnmodified deriving (Eq, Ord, Show) -#endif showReason :: RecompileRequired -> String -showReason UpToDate = "UpToDate" -#if MIN_VERSION_ghc(9,3,0) -showReason (NeedsRecompile MustCompile) = "MustCompile" -showReason (NeedsRecompile s) = printWithoutUniques s -#else -showReason MustCompile = "MustCompile" -showReason (RecompBecause s) = s -#endif +showReason UpToDate = "UpToDate" +showReason (NeedsRecompile MustCompile) = "MustCompile" +showReason (NeedsRecompile s) = printWithoutUniques s mkDetailsFromIface :: HscEnv -> ModIface -> IO ModDetails mkDetailsFromIface session iface = do @@ -1610,11 +1517,7 @@ coreFileToCgGuts session iface details core_file = do this_mod = mi_module iface types_var <- newIORef (md_types details) let hsc_env' = hscUpdateHPT act (session { -#if MIN_VERSION_ghc(9,3,0) hsc_type_env_vars = knotVarsFromModuleEnv (mkModuleEnv [(this_mod, types_var)]) -#else - hsc_type_env_var = Just (this_mod, types_var) -#endif }) core_binds <- initIfaceCheck (text "l") hsc_env' $ typecheckCoreFile this_mod types_var core_file -- Implicit binds aren't saved, so we need to regenerate them ourselves. @@ -1622,11 +1525,13 @@ coreFileToCgGuts session iface details core_file = do tyCons = typeEnvTyCons (md_types details) #if MIN_VERSION_ghc(9,5,0) -- In GHC 9.6, the implicit binds are tidied and part of core_binds - pure $ CgGuts this_mod tyCons core_binds [] NoStubs [] mempty (emptyHpcInfo False) Nothing [] -#elif MIN_VERSION_ghc(9,3,0) - pure $ CgGuts this_mod tyCons (_implicit_binds ++ core_binds) [] NoStubs [] mempty (emptyHpcInfo False) Nothing [] + pure $ CgGuts this_mod tyCons core_binds [] NoStubs [] mempty +#if !MIN_VERSION_ghc(9,11,0) + (emptyHpcInfo False) +#endif + Nothing [] #else - pure $ CgGuts this_mod tyCons (_implicit_binds ++ core_binds) NoStubs [] [] (emptyHpcInfo False) Nothing [] + pure $ CgGuts this_mod tyCons (_implicit_binds ++ core_binds) [] NoStubs [] mempty (emptyHpcInfo False) Nothing [] #endif coreFileToLinkable :: LinkableType -> HscEnv -> ModSummary -> ModIface -> ModDetails -> CoreFile -> UTCTime -> IO ([FileDiagnostic], Maybe HomeModInfo) @@ -1643,45 +1548,23 @@ coreFileToLinkable linkableType session ms iface details core_file t = do getDocsBatch :: HscEnv -> [Name] -#if MIN_VERSION_ghc(9,3,0) -> IO [Either String (Maybe [HsDoc GhcRn], IntMap (HsDoc GhcRn))] -#else - -> IO [Either String (Maybe HsDocString, IntMap HsDocString)] -#endif getDocsBatch hsc_env _names = do res <- initIfaceLoad hsc_env $ forM _names $ \name -> case nameModule_maybe name of Nothing -> return (Left $ NameHasNoModule name) Just mod -> do ModIface { -#if MIN_VERSION_ghc(9,3,0) mi_docs = Just Docs{ docs_mod_hdr = mb_doc_hdr , docs_decls = dmap , docs_args = amap } -#else - mi_doc_hdr = mb_doc_hdr - , mi_decl_docs = DeclDocMap dmap - , mi_arg_docs = ArgDocMap amap -#endif } <- loadSysInterface (text "getModuleInterface") mod -#if MIN_VERSION_ghc(9,3,0) if isNothing mb_doc_hdr && isNullUniqMap dmap && isNullUniqMap amap -#else - if isNothing mb_doc_hdr && Map.null dmap && null amap -#endif then pure (Left (NoDocsInIface mod $ compiled name)) else pure (Right ( -#if MIN_VERSION_ghc(9,3,0) lookupUniqMap dmap name, -#else - Map.lookup name dmap , -#endif -#if MIN_VERSION_ghc(9,3,0) lookupWithDefaultUniqMap amap mempty name)) -#else - Map.findWithDefault mempty name amap)) -#endif return $ map (first $ T.unpack . printOutputable) res where compiled n = @@ -1711,7 +1594,7 @@ lookupName hsc_env name = exceptionHandle $ do res <- initIfaceLoad hsc_env $ importDecl name case res of Util.Succeeded x -> return (Just x) - _ -> return Nothing + _ -> return Nothing where exceptionHandle x = x `catch` \(_ :: IOEnvFailure) -> pure Nothing @@ -1722,6 +1605,22 @@ pathToModuleName = mkModuleName . map rep rep ':' = '_' rep c = c +-- | Initialising plugins looks in the finder cache, but we know that the plugin doesn't come from a home module, so don't +-- error out when we don't find it +setNonHomeFCHook :: HscEnv -> HscEnv +setNonHomeFCHook hsc_env = +#if MIN_VERSION_ghc(9,11,0) + hsc_env { hsc_FC = (hsc_FC hsc_env) + { lookupFinderCache = \m@(GWIB im _) -> + if moduleUnit im `elem` hsc_all_home_unit_ids hsc_env + then pure (Just $ InstalledNotFound [] Nothing) + else lookupFinderCache (hsc_FC hsc_env) m + } + } +#else + hsc_env +#endif + {- Note [Guidelines For Using CPP In GHCIDE Import Statements] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ GHCIDE's interface with GHC is extensive, and unfortunately, because we have diff --git a/ghcide/src/Development/IDE/Core/FileStore.hs b/ghcide/src/Development/IDE/Core/FileStore.hs index 6c0cb875b0..3de21e175d 100644 --- a/ghcide/src/Development/IDE/Core/FileStore.hs +++ b/ghcide/src/Development/IDE/Core/FileStore.hs @@ -3,7 +3,10 @@ {-# LANGUAGE TypeFamilies #-} module Development.IDE.Core.FileStore( + getFileModTimeContents, getFileContents, + getUriContents, + getVersionedTextDoc, setFileModified, setSomethingModified, fileStoreRules, @@ -18,12 +21,13 @@ module Development.IDE.Core.FileStore( isWatchSupported, registerFileWatches, shareFilePath, - Log(..) + Log(..), ) where import Control.Concurrent.STM.Stats (STM, atomically) import Control.Concurrent.STM.TQueue (writeTQueue) import Control.Exception +import Control.Lens ((^.)) import Control.Monad.Extra import Control.Monad.IO.Class import qualified Data.Binary as B @@ -33,6 +37,7 @@ import qualified Data.HashMap.Strict as HashMap import Data.IORef import qualified Data.Text as T import qualified Data.Text as Text +import Data.Text.Utf16.Rope.Mixed (Rope) import Data.Time import Data.Time.Clock.POSIX import Development.IDE.Core.FileUtils @@ -56,13 +61,16 @@ import Ide.Logger (Pretty (pretty), logWith, viaShow, (<+>)) import qualified Ide.Logger as L -import Ide.Plugin.Config (CheckParents (..), - Config) +import Ide.Types +import qualified Language.LSP.Protocol.Lens as L import Language.LSP.Protocol.Message (toUntypedRegistration) import qualified Language.LSP.Protocol.Message as LSP import Language.LSP.Protocol.Types (DidChangeWatchedFilesRegistrationOptions (DidChangeWatchedFilesRegistrationOptions), FileSystemWatcher (..), - _watchers) + TextDocumentIdentifier (..), + VersionedTextDocumentIdentifier (..), + _watchers, + uriToNormalizedFilePath) import qualified Language.LSP.Protocol.Types as LSP import qualified Language.LSP.Server as LSP import Language.LSP.VFS @@ -175,20 +183,20 @@ getFileContentsRule recorder = define (cmapWithPrio LogShake recorder) $ \GetFil getFileContentsImpl :: NormalizedFilePath - -> Action ([FileDiagnostic], Maybe (FileVersion, Maybe T.Text)) + -> Action ([FileDiagnostic], Maybe (FileVersion, Maybe Rope)) getFileContentsImpl file = do -- need to depend on modification time to introduce a dependency with Cutoff time <- use_ GetModificationTime file res <- do mbVirtual <- getVirtualFile file - pure $ virtualFileText <$> mbVirtual + pure $ _file_text <$> mbVirtual pure ([], Just (time, res)) -- | Returns the modification time and the contents. -- For VFS paths, the modification time is the current time. -getFileContents :: NormalizedFilePath -> Action (UTCTime, Maybe T.Text) -getFileContents f = do - (fv, txt) <- use_ GetFileContents f +getFileModTimeContents :: NormalizedFilePath -> Action (UTCTime, Maybe Rope) +getFileModTimeContents f = do + (fv, contents) <- use_ GetFileContents f modTime <- case modificationTime fv of Just t -> pure t Nothing -> do @@ -198,7 +206,29 @@ getFileContents f = do _ -> do posix <- getModTime $ fromNormalizedFilePath f pure $ posixSecondsToUTCTime posix - return (modTime, txt) + return (modTime, contents) + +getFileContents :: NormalizedFilePath -> Action (Maybe Rope) +getFileContents f = snd <$> use_ GetFileContents f + +getUriContents :: NormalizedUri -> Action (Maybe Rope) +getUriContents uri = + join <$> traverse getFileContents (uriToNormalizedFilePath uri) + +-- | Given a text document identifier, annotate it with the latest version. +-- +-- Like Language.LSP.Server.Core.getVersionedTextDoc, but gets the virtual file +-- from the Shake VFS rather than the LSP VFS. +getVersionedTextDoc :: TextDocumentIdentifier -> Action VersionedTextDocumentIdentifier +getVersionedTextDoc doc = do + let uri = doc ^. L.uri + mvf <- + maybe (pure Nothing) getVirtualFile $ + uriToNormalizedFilePath $ toNormalizedUri uri + let ver = case mvf of + Just (VirtualFile lspver _ _) -> lspver + Nothing -> 0 + return (VersionedTextDocumentIdentifier uri ver) fileStoreRules :: Recorder (WithPriority Log) -> (NormalizedFilePath -> Action Bool) -> Rules () fileStoreRules recorder isWatched = do @@ -303,4 +333,3 @@ shareFilePath k = unsafePerformIO $ do Just v -> (km, v) Nothing -> (HashMap.insert k k km, k) {-# NOINLINE shareFilePath #-} - diff --git a/ghcide/src/Development/IDE/Core/OfInterest.hs b/ghcide/src/Development/IDE/Core/OfInterest.hs index abcf6342a8..2a594c1021 100644 --- a/ghcide/src/Development/IDE/Core/OfInterest.hs +++ b/ghcide/src/Development/IDE/Core/OfInterest.hs @@ -29,6 +29,7 @@ import Development.IDE.Graph import Control.Concurrent.STM.Stats (atomically, modifyTVar') import Data.Aeson (toJSON) +import qualified Data.Aeson as Aeson import qualified Data.ByteString as BS import Data.Maybe (catMaybes) import Development.IDE.Core.ProgressReporting @@ -141,7 +142,7 @@ kick = do toJSON $ map fromNormalizedFilePath files signal (Proxy @"kick/start") - liftIO $ progressUpdate progress KickStarted + liftIO $ progressUpdate progress ProgressNewStarted -- Update the exports map results <- uses GenerateCore files @@ -152,7 +153,7 @@ kick = do let mguts = catMaybes results void $ liftIO $ atomically $ modifyTVar' exportsMap (updateExportsMapMg mguts) - liftIO $ progressUpdate progress KickCompleted + liftIO $ progressUpdate progress ProgressCompleted GarbageCollectVar var <- getIdeGlobalAction garbageCollectionScheduled <- liftIO $ readVar var diff --git a/ghcide/src/Development/IDE/Core/PluginUtils.hs b/ghcide/src/Development/IDE/Core/PluginUtils.hs index 76c88421c9..6ba633df26 100644 --- a/ghcide/src/Development/IDE/Core/PluginUtils.hs +++ b/ghcide/src/Development/IDE/Core/PluginUtils.hs @@ -1,6 +1,6 @@ {-# LANGUAGE GADTs #-} module Development.IDE.Core.PluginUtils -(-- Wrapped Action functions +(-- * Wrapped Action functions runActionE , runActionMT , useE @@ -9,13 +9,13 @@ module Development.IDE.Core.PluginUtils , usesMT , useWithStaleE , useWithStaleMT --- Wrapped IdeAction functions +-- * Wrapped IdeAction functions , runIdeActionE , runIdeActionMT , useWithStaleFastE , useWithStaleFastMT , uriToFilePathE --- Wrapped PositionMapping functions +-- * Wrapped PositionMapping functions , toCurrentPositionE , toCurrentPositionMT , fromCurrentPositionE @@ -23,8 +23,16 @@ module Development.IDE.Core.PluginUtils , toCurrentRangeE , toCurrentRangeMT , fromCurrentRangeE -, fromCurrentRangeMT) where - +, fromCurrentRangeMT +-- * Diagnostics +, activeDiagnosticsInRange +, activeDiagnosticsInRangeMT +-- * Formatting handlers +, mkFormattingHandlers) where + +import Control.Concurrent.STM +import Control.Lens ((^.)) +import Control.Monad.Error.Class (MonadError (throwError)) import Control.Monad.Extra import Control.Monad.IO.Class import Control.Monad.Reader (runReaderT) @@ -32,7 +40,10 @@ import Control.Monad.Trans.Except import Control.Monad.Trans.Maybe import Data.Functor.Identity import qualified Data.Text as T +import qualified Data.Text.Utf16.Rope.Mixed as Rope +import Development.IDE.Core.FileStore import Development.IDE.Core.PositionMapping +import Development.IDE.Core.Service (runAction) import Development.IDE.Core.Shake (IdeAction, IdeRule, IdeState (shakeExtras), mkDelayedAction, @@ -40,11 +51,17 @@ import Development.IDE.Core.Shake (IdeAction, IdeRule, import qualified Development.IDE.Core.Shake as Shake import Development.IDE.GHC.Orphans () import Development.IDE.Graph hiding (ShakeValue) +import Development.IDE.Types.Diagnostics import Development.IDE.Types.Location (NormalizedFilePath) import qualified Development.IDE.Types.Location as Location import qualified Ide.Logger as Logger import Ide.Plugin.Error +import Ide.PluginUtils (rangesOverlap) +import Ide.Types +import qualified Language.LSP.Protocol.Lens as LSP +import Language.LSP.Protocol.Message (SMethod (..)) import qualified Language.LSP.Protocol.Types as LSP +import qualified StmContainers.Map as STM -- ---------------------------------------------------------------------------- -- Action wrappers @@ -162,3 +179,77 @@ fromCurrentRangeE mapping = maybeToExceptT (PluginInvalidUserState "fromCurrentR -- |MaybeT version of `fromCurrentRange` fromCurrentRangeMT :: Monad m => PositionMapping -> LSP.Range -> MaybeT m LSP.Range fromCurrentRangeMT mapping = MaybeT . pure . fromCurrentRange mapping + +-- ---------------------------------------------------------------------------- +-- Diagnostics +-- ---------------------------------------------------------------------------- + +-- | @'activeDiagnosticsInRangeMT' shakeExtras nfp range@ computes the +-- 'FileDiagnostic' 's that HLS produced and overlap with the given @range@. +-- +-- This function is to be used whenever we need an authoritative source of truth +-- for which diagnostics are shown to the user. +-- These diagnostics can be used to provide various IDE features, for example +-- CodeActions, CodeLenses, or refactorings. +-- +-- However, why do we need this when computing 'CodeAction's? A 'CodeActionParam' +-- has the 'CodeActionContext' which already contains the diagnostics! +-- But according to the LSP docs, the server shouldn't rely that these Diagnostic +-- are actually up-to-date and accurately reflect the state of the document. +-- +-- From the LSP docs: +-- > An array of diagnostics known on the client side overlapping the range +-- > provided to the `textDocument/codeAction` request. They are provided so +-- > that the server knows which errors are currently presented to the user +-- > for the given range. There is no guarantee that these accurately reflect +-- > the error state of the resource. The primary parameter +-- > to compute code actions is the provided range. +-- +-- Thus, even when the client sends us the context, we should compute the +-- diagnostics on the server side. +activeDiagnosticsInRangeMT :: MonadIO m => Shake.ShakeExtras -> NormalizedFilePath -> LSP.Range -> MaybeT m [FileDiagnostic] +activeDiagnosticsInRangeMT ide nfp range = do + MaybeT $ liftIO $ atomically $ do + mDiags <- STM.lookup (LSP.normalizedFilePathToUri nfp) (Shake.publishedDiagnostics ide) + case mDiags of + Nothing -> pure Nothing + Just fileDiags -> do + pure $ Just $ filter diagRangeOverlaps fileDiags + where + diagRangeOverlaps = \fileDiag -> + rangesOverlap range (fileDiag ^. fdLspDiagnosticL . LSP.range) + +-- | Just like 'activeDiagnosticsInRangeMT'. See the docs of 'activeDiagnosticsInRangeMT' for details. +activeDiagnosticsInRange :: MonadIO m => Shake.ShakeExtras -> NormalizedFilePath -> LSP.Range -> m (Maybe [FileDiagnostic]) +activeDiagnosticsInRange ide nfp range = runMaybeT (activeDiagnosticsInRangeMT ide nfp range) + +-- ---------------------------------------------------------------------------- +-- Formatting handlers +-- ---------------------------------------------------------------------------- + +-- `mkFormattingHandlers` was moved here from hls-plugin-api package so that +-- `mkFormattingHandlers` can refer to `IdeState`. `IdeState` is defined in the +-- ghcide package, but hls-plugin-api does not depend on ghcide, so `IdeState` +-- is not in scope there. + +mkFormattingHandlers :: FormattingHandler IdeState -> PluginHandlers IdeState +mkFormattingHandlers f = mkPluginHandler SMethod_TextDocumentFormatting ( provider SMethod_TextDocumentFormatting) + <> mkPluginHandler SMethod_TextDocumentRangeFormatting (provider SMethod_TextDocumentRangeFormatting) + where + provider :: forall m. FormattingMethod m => SMethod m -> PluginMethodHandler IdeState m + provider m ide _pid params + | Just nfp <- LSP.uriToNormalizedFilePath $ LSP.toNormalizedUri uri = do + contentsMaybe <- liftIO $ runAction "mkFormattingHandlers" ide $ getFileContents nfp + case contentsMaybe of + Just contents -> do + let (typ, mtoken) = case m of + SMethod_TextDocumentFormatting -> (FormatText, params ^. LSP.workDoneToken) + SMethod_TextDocumentRangeFormatting -> (FormatRange (params ^. LSP.range), params ^. LSP.workDoneToken) + _ -> Prelude.error "mkFormattingHandlers: impossible" + f ide mtoken typ (Rope.toText contents) nfp opts + Nothing -> throwError $ PluginInvalidParams $ T.pack $ "Formatter plugin: could not get file contents for " ++ show uri + + | otherwise = throwError $ PluginInvalidParams $ T.pack $ "Formatter plugin: uriToFilePath failed for: " ++ show uri + where + uri = params ^. LSP.textDocument . LSP.uri + opts = params ^. LSP.options diff --git a/ghcide/src/Development/IDE/Core/Preprocessor.hs b/ghcide/src/Development/IDE/Core/Preprocessor.hs index 24a754870d..b3614d89ad 100644 --- a/ghcide/src/Development/IDE/Core/Preprocessor.hs +++ b/ghcide/src/Development/IDE/Core/Preprocessor.hs @@ -28,15 +28,11 @@ import Development.IDE.GHC.Error import Development.IDE.Types.Diagnostics import Development.IDE.Types.Location import qualified GHC.LanguageExtensions as LangExt +import qualified GHC.Runtime.Loader as Loader +import GHC.Utils.Logger (LogFlags (..)) import System.FilePath import System.IO.Extra --- See Note [Guidelines For Using CPP In GHCIDE Import Statements] - -#if MIN_VERSION_ghc(9,3,0) -import GHC.Utils.Logger (LogFlags (..)) -#endif - -- | Given a file and some contents, apply any necessary preprocessors, -- e.g. unlit/cpp. Return the resulting buffer and the DynFlags it implies. preprocessor :: HscEnv -> FilePath -> Maybe Util.StringBuffer -> ExceptT [FileDiagnostic] IO (Util.StringBuffer, [String], HscEnv, Util.Fingerprint) @@ -88,11 +84,7 @@ preprocessor env filename mbContents = do where logAction :: IORef [CPPLog] -> LogActionCompat logAction cppLogs dflags _reason severity srcSpan _style msg = do -#if MIN_VERSION_ghc(9,3,0) let cppLog = CPPLog (fromMaybe SevWarning severity) srcSpan $ T.pack $ renderWithContext (log_default_user_context dflags) msg -#else - let cppLog = CPPLog severity srcSpan $ T.pack $ showSDoc dflags msg -#endif modifyIORef cppLogs (cppLog :) @@ -112,7 +104,7 @@ data CPPDiag diagsFromCPPLogs :: FilePath -> [CPPLog] -> [FileDiagnostic] diagsFromCPPLogs filename logs = - map (\d -> (toNormalizedFilePath' filename, ShowDiag, cppDiagToDiagnostic d)) $ + map (\d -> ideErrorFromLspDiag (cppDiagToDiagnostic d) (toNormalizedFilePath' filename) Nothing) $ go [] logs where -- On errors, CPP calls logAction with a real span for the initial log and @@ -152,17 +144,13 @@ parsePragmasIntoHscEnv -> Util.StringBuffer -> IO (Either [FileDiagnostic] ([String], HscEnv)) parsePragmasIntoHscEnv env fp contents = catchSrcErrors dflags0 "pragmas" $ do -#if MIN_VERSION_ghc(9,3,0) let (_warns,opts) = getOptions (initParserOpts dflags0) contents fp -#else - let opts = getOptions dflags0 contents fp -#endif -- Force bits that might keep the dflags and stringBuffer alive unnecessarily evaluate $ rnf opts (dflags, _, _) <- parseDynamicFilePragma dflags0 opts - hsc_env' <- initializePlugins (hscSetFlags dflags env) + hsc_env' <- Loader.initializePlugins (hscSetFlags dflags env) return (map unLoc opts, hscSetFlags (disableWarningsAsErrors $ hsc_dflags hsc_env') hsc_env') where dflags0 = hsc_dflags env diff --git a/ghcide/src/Development/IDE/Core/ProgressReporting.hs b/ghcide/src/Development/IDE/Core/ProgressReporting.hs index b8c8a34d6f..3d8a2bf989 100644 --- a/ghcide/src/Development/IDE/Core/ProgressReporting.hs +++ b/ghcide/src/Development/IDE/Core/ProgressReporting.hs @@ -1,17 +1,25 @@ +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeFamilies #-} + module Development.IDE.Core.ProgressReporting - ( ProgressEvent(..) - , ProgressReporting(..) - , noProgressReporting - , progressReporting - -- utilities, reexported for use in Core.Shake - , mRunLspT - , mRunLspTCallback - -- for tests - , recordProgress - , InProgressState(..) + ( ProgressEvent (..), + PerFileProgressReporting (..), + ProgressReporting, + noPerFileProgressReporting, + progressReporting, + progressReportingNoTrace, + -- utilities, reexported for use in Core.Shake + mRunLspT, + mRunLspTCallback, + -- for tests + recordProgress, + InProgressState (..), + progressStop, + progressUpdate ) - where +where +import Control.Concurrent.STM (STM) import Control.Concurrent.STM.Stats (TVar, atomically, atomicallyNamed, modifyTVar', newTVarIO, readTVar, retry) @@ -23,7 +31,6 @@ import Control.Monad.Trans.Class (lift) import Data.Functor (($>)) import qualified Data.Text as T import Development.IDE.GHC.Orphans () -import Development.IDE.Graph hiding (ShakeValue) import Development.IDE.Types.Location import Development.IDE.Types.Options import qualified Focus @@ -33,119 +40,197 @@ import Language.LSP.Server (ProgressAmount (..), withProgress) import qualified Language.LSP.Server as LSP import qualified StmContainers.Map as STM -import UnliftIO (Async, async, cancel) +import UnliftIO (Async, async, bracket, cancel) data ProgressEvent - = KickStarted - | KickCompleted + = ProgressNewStarted + | ProgressCompleted + | ProgressStarted -data ProgressReporting = ProgressReporting - { progressUpdate :: ProgressEvent -> IO () - , inProgress :: forall a. NormalizedFilePath -> Action a -> Action a - , progressStop :: IO () +data ProgressReporting = ProgressReporting + { _progressUpdate :: ProgressEvent -> IO (), + _progressStop :: IO () + -- ^ we are using IO here because creating and stopping the `ProgressReporting` + -- is different from how we use it. } -noProgressReporting :: IO ProgressReporting -noProgressReporting = return $ ProgressReporting - { progressUpdate = const $ pure () - , inProgress = const id - , progressStop = pure () +data PerFileProgressReporting = PerFileProgressReporting + { + inProgress :: forall a. NormalizedFilePath -> IO a -> IO a, + -- ^ see Note [ProgressReporting API and InProgressState] + progressReportingInner :: ProgressReporting } +class ProgressReporter a where + progressUpdate :: a -> ProgressEvent -> IO () + progressStop :: a -> IO () + +instance ProgressReporter ProgressReporting where + progressUpdate = _progressUpdate + progressStop = _progressStop + +instance ProgressReporter PerFileProgressReporting where + progressUpdate = _progressUpdate . progressReportingInner + progressStop = _progressStop . progressReportingInner + +{- Note [ProgressReporting API and InProgressState] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The progress of tasks can be tracked in two ways: + +1. `ProgressReporting`: we have an internal state that actively tracks the progress. + Changes to the progress are made directly to this state. + +2. `ProgressReporting`: there is an external state that tracks the progress. + The external state is converted into an STM Int for the purpose of reporting progress. + +The `inProgress` function is only useful when we are using `ProgressReporting`. +-} + +noProgressReporting :: ProgressReporting +noProgressReporting = ProgressReporting + { _progressUpdate = const $ pure (), + _progressStop = pure () + } +noPerFileProgressReporting :: IO PerFileProgressReporting +noPerFileProgressReporting = + return $ + PerFileProgressReporting + { inProgress = const id, + progressReportingInner = noProgressReporting + } + -- | State used in 'delayedProgressReporting' data State - = NotStarted - | Stopped - | Running (Async ()) + = NotStarted + | Stopped + | Running (Async ()) -- | State transitions used in 'delayedProgressReporting' data Transition = Event ProgressEvent | StopProgress updateState :: IO () -> Transition -> State -> IO State -updateState _ _ Stopped = pure Stopped -updateState start (Event KickStarted) NotStarted = Running <$> async start -updateState start (Event KickStarted) (Running job) = cancel job >> Running <$> async start -updateState _ (Event KickCompleted) (Running job) = cancel job $> NotStarted -updateState _ (Event KickCompleted) st = pure st -updateState _ StopProgress (Running job) = cancel job $> Stopped -updateState _ StopProgress st = pure st +updateState _ _ Stopped = pure Stopped +updateState start (Event ProgressNewStarted) NotStarted = Running <$> async start +updateState start (Event ProgressNewStarted) (Running job) = cancel job >> Running <$> async start +updateState start (Event ProgressStarted) NotStarted = Running <$> async start +updateState _ (Event ProgressStarted) (Running job) = return (Running job) +updateState _ (Event ProgressCompleted) (Running job) = cancel job $> NotStarted +updateState _ (Event ProgressCompleted) st = pure st +updateState _ StopProgress (Running job) = cancel job $> Stopped +updateState _ StopProgress st = pure st -- | Data structure to track progress across the project -data InProgressState = InProgressState - { todoVar :: TVar Int -- ^ Number of files to do - , doneVar :: TVar Int -- ^ Number of files done - , currentVar :: STM.Map NormalizedFilePath Int - } +-- see Note [ProgressReporting API and InProgressState] +data InProgressState + = InProgressState + { -- | Number of files to do + todoVar :: TVar Int, + -- | Number of files done + doneVar :: TVar Int, + currentVar :: STM.Map NormalizedFilePath Int + } newInProgress :: IO InProgressState newInProgress = InProgressState <$> newTVarIO 0 <*> newTVarIO 0 <*> STM.newIO recordProgress :: InProgressState -> NormalizedFilePath -> (Int -> Int) -> IO () -recordProgress InProgressState{..} file shift = do - (prev, new) <- atomicallyNamed "recordProgress" $ STM.focus alterPrevAndNew file currentVar - atomicallyNamed "recordProgress2" $ do - case (prev,new) of - (Nothing,0) -> modifyTVar' doneVar (+1) >> modifyTVar' todoVar (+1) - (Nothing,_) -> modifyTVar' todoVar (+1) - (Just 0, 0) -> pure () - (Just 0, _) -> modifyTVar' doneVar pred - (Just _, 0) -> modifyTVar' doneVar (+1) - (Just _, _) -> pure () +recordProgress InProgressState {..} file shift = do + (prev, new) <- atomicallyNamed "recordProgress" $ STM.focus alterPrevAndNew file currentVar + atomicallyNamed "recordProgress2" $ case (prev, new) of + (Nothing, 0) -> modifyTVar' doneVar (+ 1) >> modifyTVar' todoVar (+ 1) + (Nothing, _) -> modifyTVar' todoVar (+ 1) + (Just 0, 0) -> pure () + (Just 0, _) -> modifyTVar' doneVar pred + (Just _, 0) -> modifyTVar' doneVar (+ 1) + (Just _, _) -> pure () where alterPrevAndNew = do - prev <- Focus.lookup - Focus.alter alter - new <- Focus.lookupWithDefault 0 - return (prev, new) + prev <- Focus.lookup + Focus.alter alter + new <- Focus.lookupWithDefault 0 + return (prev, new) alter x = let x' = maybe (shift 0) shift x in Just x' -progressReporting - :: Maybe (LSP.LanguageContextEnv c) - -> ProgressReportingStyle - -> IO ProgressReporting -progressReporting Nothing _optProgressStyle = noProgressReporting -progressReporting (Just lspEnv) optProgressStyle = do - inProgressState <- newInProgress - progressState <- newVar NotStarted - let progressUpdate event = updateStateVar $ Event event - progressStop = updateStateVar StopProgress - updateStateVar = modifyVar_ progressState . updateState (lspShakeProgressNew inProgressState) - inProgress = updateStateForFile inProgressState - return ProgressReporting{..} - where - lspShakeProgressNew :: InProgressState -> IO () - lspShakeProgressNew InProgressState{..} = - LSP.runLspT lspEnv $ withProgress "Processing" Nothing NotCancellable $ \update -> loop update 0 - where - loop _ _ | optProgressStyle == NoProgress = forever $ liftIO $ threadDelay maxBound - loop update prevPct = do - (todo, done, nextPct) <- liftIO $ atomically $ do - todo <- readTVar todoVar - done <- readTVar doneVar - let nextFrac :: Double - nextFrac = if todo == 0 then 0 else fromIntegral done / fromIntegral todo - nextPct :: UInt - nextPct = floor $ 100 * nextFrac - when (nextPct == prevPct) retry - pure (todo, done, nextPct) - - _ <- update (ProgressAmount (Just nextPct) (Just $ T.pack $ show done <> "/" <> show todo)) - loop update nextPct - updateStateForFile inProgress file = actionBracket (f succ) (const $ f pred) . const - -- This functions are deliberately eta-expanded to avoid space leaks. - -- Do not remove the eta-expansion without profiling a session with at - -- least 1000 modifications. - where - f shift = recordProgress inProgress file shift - -mRunLspT :: Applicative m => Maybe (LSP.LanguageContextEnv c ) -> LSP.LspT c m () -> m () + +-- | `progressReportingNoTrace` initiates a new progress reporting session. +-- It functions similarly to `progressReporting`, but it utilizes an external state for progress tracking. +-- Refer to Note [ProgressReporting API and InProgressState] for more details. +progressReportingNoTrace :: + STM Int -> + STM Int -> + Maybe (LSP.LanguageContextEnv c) -> + T.Text -> + ProgressReportingStyle -> + IO ProgressReporting +progressReportingNoTrace _ _ Nothing _title _optProgressStyle = return noProgressReporting +progressReportingNoTrace todo done (Just lspEnv) title optProgressStyle = do + progressState <- newVar NotStarted + let _progressUpdate event = liftIO $ updateStateVar $ Event event + _progressStop = updateStateVar StopProgress + updateStateVar = modifyVar_ progressState . updateState (progressCounter lspEnv title optProgressStyle todo done) + return ProgressReporting {..} + +-- | `progressReporting` initiates a new progress reporting session. +-- It necessitates the active tracking of progress using the `inProgress` function. +-- Refer to Note [ProgressReporting API and InProgressState] for more details. +progressReporting :: + Maybe (LSP.LanguageContextEnv c) -> + T.Text -> + ProgressReportingStyle -> + IO PerFileProgressReporting +progressReporting Nothing _title _optProgressStyle = noPerFileProgressReporting +progressReporting (Just lspEnv) title optProgressStyle = do + inProgressState <- newInProgress + progressReportingInner <- progressReportingNoTrace (readTVar $ todoVar inProgressState) + (readTVar $ doneVar inProgressState) (Just lspEnv) title optProgressStyle + let + inProgress :: NormalizedFilePath -> IO a -> IO a + inProgress = updateStateForFile inProgressState + return PerFileProgressReporting {..} + where + updateStateForFile inProgress file = UnliftIO.bracket (liftIO $ f succ) (const $ liftIO $ f pred) . const + where + -- This functions are deliberately eta-expanded to avoid space leaks. + -- Do not remove the eta-expansion without profiling a session with at + -- least 1000 modifications. + + f = recordProgress inProgress file + +-- Kill this to complete the progress session +progressCounter :: + LSP.LanguageContextEnv c -> + T.Text -> + ProgressReportingStyle -> + STM Int -> + STM Int -> + IO () +progressCounter lspEnv title optProgressStyle getTodo getDone = + LSP.runLspT lspEnv $ withProgress title Nothing NotCancellable $ \update -> loop update 0 + where + loop _ _ | optProgressStyle == NoProgress = forever $ liftIO $ threadDelay maxBound + loop update prevPct = do + (todo, done, nextPct) <- liftIO $ atomically $ do + todo <- getTodo + done <- getDone + let nextFrac :: Double + nextFrac = if todo == 0 then 0 else fromIntegral done / fromIntegral todo + nextPct :: UInt + nextPct = floor $ 100 * nextFrac + when (nextPct == prevPct) retry + pure (todo, done, nextPct) + + _ <- update (ProgressAmount (Just nextPct) (Just $ T.pack $ show done <> "/" <> show todo)) + loop update nextPct + +mRunLspT :: (Applicative m) => Maybe (LSP.LanguageContextEnv c) -> LSP.LspT c m () -> m () mRunLspT (Just lspEnv) f = LSP.runLspT lspEnv f mRunLspT Nothing _ = pure () -mRunLspTCallback :: Monad m - => Maybe (LSP.LanguageContextEnv c) - -> (LSP.LspT c m a -> LSP.LspT c m a) - -> m a - -> m a +mRunLspTCallback :: + (Monad m) => + Maybe (LSP.LanguageContextEnv c) -> + (LSP.LspT c m a -> LSP.LspT c m a) -> + m a -> + m a mRunLspTCallback (Just lspEnv) f g = LSP.runLspT lspEnv $ f (lift g) mRunLspTCallback Nothing _ g = g diff --git a/ghcide/src/Development/IDE/Core/RuleTypes.hs b/ghcide/src/Development/IDE/Core/RuleTypes.hs index 3d60669f5c..c4f88de047 100644 --- a/ghcide/src/Development/IDE/Core/RuleTypes.hs +++ b/ghcide/src/Development/IDE/Core/RuleTypes.hs @@ -24,7 +24,8 @@ import qualified Data.Map as M import Data.Time.Clock.POSIX import Data.Typeable import Development.IDE.GHC.Compat hiding - (HieFileResult) + (HieFileResult, + assert) import Development.IDE.GHC.Compat.Util import Development.IDE.GHC.CoreFile import Development.IDE.GHC.Util @@ -35,11 +36,12 @@ import Development.IDE.Types.KnownTargets import GHC.Generics (Generic) import Data.ByteString (ByteString) -import Data.Text (Text) +import Data.Text.Utf16.Rope.Mixed (Rope) import Development.IDE.Import.FindImports (ArtifactsLocation) import Development.IDE.Spans.Common import Development.IDE.Spans.LocalBindings import Development.IDE.Types.Diagnostics +import GHC.Driver.Errors.Types (WarningMessages) import GHC.Serialized (Serialized) import Ide.Logger (Pretty (..), viaShow) @@ -82,7 +84,7 @@ type instance RuleResult GetKnownTargets = KnownTargets type instance RuleResult GenerateCore = ModGuts data GenerateCore = GenerateCore - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable GenerateCore instance NFData GenerateCore @@ -102,12 +104,12 @@ instance NFData LinkableResult where rnf = rwhnf data GetLinkable = GetLinkable - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable GetLinkable instance NFData GetLinkable data GetImportMap = GetImportMap - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable GetImportMap instance NFData GetImportMap @@ -157,6 +159,8 @@ data TcModuleResult = TcModuleResult -- ^ Which modules did we need at runtime while compiling this file? -- Used for recompilation checking in the presence of TH -- Stores the hash of their core file + , tmrWarnings :: WarningMessages + -- ^ Structured warnings for this module. } instance Show TcModuleResult where show = show . pm_mod_summary . tmrParsed @@ -275,10 +279,12 @@ type instance RuleResult GetModIfaceFromDiskAndIndex = HiFileResult type instance RuleResult GetModIface = HiFileResult -- | Get the contents of a file, either dirty (if the buffer is modified) or Nothing to mean use from disk. -type instance RuleResult GetFileContents = (FileVersion, Maybe Text) +type instance RuleResult GetFileContents = (FileVersion, Maybe Rope) type instance RuleResult GetFileExists = Bool +type instance RuleResult GetFileHash = Fingerprint + type instance RuleResult AddWatchedFile = Bool @@ -329,16 +335,22 @@ instance Hashable GetFileContents instance NFData GetFileContents data GetFileExists = GetFileExists - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance NFData GetFileExists instance Hashable GetFileExists +data GetFileHash = GetFileHash + deriving (Eq, Show, Generic) + +instance NFData GetFileHash +instance Hashable GetFileHash + data FileOfInterestStatus = OnDisk | Modified { firstOpen :: !Bool -- ^ was this file just opened } - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable FileOfInterestStatus instance NFData FileOfInterestStatus @@ -346,7 +358,7 @@ instance Pretty FileOfInterestStatus where pretty = viaShow data IsFileOfInterestResult = NotFOI | IsFOI FileOfInterestStatus - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable IsFileOfInterestResult instance NFData IsFileOfInterestResult @@ -378,17 +390,17 @@ type instance RuleResult GetModSummary = ModSummaryResult type instance RuleResult GetModSummaryWithoutTimestamps = ModSummaryResult data GetParsedModule = GetParsedModule - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable GetParsedModule instance NFData GetParsedModule data GetParsedModuleWithComments = GetParsedModuleWithComments - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable GetParsedModuleWithComments instance NFData GetParsedModuleWithComments data GetLocatedImports = GetLocatedImports - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable GetLocatedImports instance NFData GetLocatedImports @@ -396,42 +408,42 @@ instance NFData GetLocatedImports type instance RuleResult NeedsCompilation = Maybe LinkableType data NeedsCompilation = NeedsCompilation - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable NeedsCompilation instance NFData NeedsCompilation data GetModuleGraph = GetModuleGraph - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable GetModuleGraph instance NFData GetModuleGraph data ReportImportCycles = ReportImportCycles - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable ReportImportCycles instance NFData ReportImportCycles data TypeCheck = TypeCheck - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable TypeCheck instance NFData TypeCheck data GetDocMap = GetDocMap - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable GetDocMap instance NFData GetDocMap data GetHieAst = GetHieAst - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable GetHieAst instance NFData GetHieAst data GetBindings = GetBindings - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable GetBindings instance NFData GetBindings data GhcSession = GhcSession - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable GhcSession instance NFData GhcSession @@ -440,7 +452,7 @@ newtype GhcSessionDeps = GhcSessionDeps_ -- Required for interactive evaluation, but leads to more cache invalidations fullModSummary :: Bool } - deriving newtype (Eq, Typeable, Hashable, NFData) + deriving newtype (Eq, Hashable, NFData) instance Show GhcSessionDeps where show (GhcSessionDeps_ False) = "GhcSessionDeps" @@ -450,45 +462,45 @@ pattern GhcSessionDeps :: GhcSessionDeps pattern GhcSessionDeps = GhcSessionDeps_ False data GetModIfaceFromDisk = GetModIfaceFromDisk - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable GetModIfaceFromDisk instance NFData GetModIfaceFromDisk data GetModIfaceFromDiskAndIndex = GetModIfaceFromDiskAndIndex - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable GetModIfaceFromDiskAndIndex instance NFData GetModIfaceFromDiskAndIndex data GetModIface = GetModIface - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable GetModIface instance NFData GetModIface data IsFileOfInterest = IsFileOfInterest - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable IsFileOfInterest instance NFData IsFileOfInterest data GetModSummaryWithoutTimestamps = GetModSummaryWithoutTimestamps - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable GetModSummaryWithoutTimestamps instance NFData GetModSummaryWithoutTimestamps data GetModSummary = GetModSummary - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable GetModSummary instance NFData GetModSummary -- See Note [Client configuration in Rules] -- | Get the client config stored in the ide state data GetClientSettings = GetClientSettings - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable GetClientSettings instance NFData GetClientSettings type instance RuleResult GetClientSettings = Hashed (Maybe Value) -data AddWatchedFile = AddWatchedFile deriving (Eq, Show, Typeable, Generic) +data AddWatchedFile = AddWatchedFile deriving (Eq, Show, Generic) instance Hashable AddWatchedFile instance NFData AddWatchedFile @@ -508,7 +520,7 @@ data IdeGhcSession = IdeGhcSession instance Show IdeGhcSession where show _ = "IdeGhcSession" instance NFData IdeGhcSession where rnf !_ = () -data GhcSessionIO = GhcSessionIO deriving (Eq, Show, Typeable, Generic) +data GhcSessionIO = GhcSessionIO deriving (Eq, Show, Generic) instance Hashable GhcSessionIO instance NFData GhcSessionIO diff --git a/ghcide/src/Development/IDE/Core/Rules.hs b/ghcide/src/Development/IDE/Core/Rules.hs index 13f6db6f69..74eddf55f1 100644 --- a/ghcide/src/Development/IDE/Core/Rules.hs +++ b/ghcide/src/Development/IDE/Core/Rules.hs @@ -64,6 +64,7 @@ import Control.Concurrent.Strict import Control.DeepSeq import Control.Exception (evaluate) import Control.Exception.Safe +import Control.Lens ((%~), (&), (.~)) import Control.Monad.Extra import Control.Monad.IO.Unlift import Control.Monad.Reader @@ -91,6 +92,7 @@ import Data.Maybe import Data.Proxy import qualified Data.Text as T import qualified Data.Text.Encoding as T +import qualified Data.Text.Utf16.Rope.Mixed as Rope import Data.Time (UTCTime (..)) import Data.Time.Clock.POSIX (posixSecondsToUTCTime) import Data.Tuple.Extra @@ -99,6 +101,7 @@ import Development.IDE.Core.Compile import Development.IDE.Core.FileExists hiding (Log, LogShake) import Development.IDE.Core.FileStore (getFileContents, + getFileModTimeContents, getModTime) import Development.IDE.Core.IdeConfiguration import Development.IDE.Core.OfInterest hiding (Log, @@ -159,6 +162,7 @@ import Ide.Types (DynFlagsModificat import Language.LSP.Protocol.Message (SMethod (SMethod_CustomMethod, SMethod_WindowShowMessage)) import Language.LSP.Protocol.Types (MessageType (MessageType_Info), ShowMessageParams (ShowMessageParams)) +import qualified Language.LSP.Protocol.Lens as JL import Language.LSP.Server (LspT) import qualified Language.LSP.Server as LSP import Language.LSP.VFS @@ -167,19 +171,10 @@ import System.Directory (doesFileExist) import System.Info.Extra (isWindows) -import GHC.Fingerprint - --- See Note [Guidelines For Using CPP In GHCIDE Import Statements] - -#if !MIN_VERSION_ghc(9,3,0) -import GHC (mgModSummaries) -#endif - -#if MIN_VERSION_ghc(9,3,0) import qualified Data.IntMap as IM -#endif - +import GHC.Fingerprint +import GHC.Driver.Env (hsc_all_home_unit_ids) data Log = LogShake Shake.Log @@ -230,10 +225,10 @@ toIdeResult = either (, Nothing) (([],) . Just) -- TODO: return text --> return rope getSourceFileSource :: NormalizedFilePath -> Action BS.ByteString getSourceFileSource nfp = do - (_, msource) <- getFileContents nfp + msource <- getFileContents nfp case msource of Nothing -> liftIO $ BS.readFile (fromNormalizedFilePath nfp) - Just source -> pure $ T.encodeUtf8 source + Just source -> pure $ T.encodeUtf8 $ Rope.toText source -- | Parse the contents of a haskell file. getParsedModule :: NormalizedFilePath -> Action (Maybe ParsedModule) @@ -324,18 +319,11 @@ getLocatedImportsRule recorder = (KnownTargets targets targetsMap) <- useNoFile_ GetKnownTargets let imports = [(False, imp) | imp <- ms_textual_imps ms] ++ [(True, imp) | imp <- ms_srcimps ms] env_eq <- use_ GhcSession file - let env = hscEnvWithImportPaths env_eq - let import_dirs = deps env_eq + let env = hscEnv env_eq + let import_dirs = map (second homeUnitEnv_dflags) $ hugElts $ hsc_HUG env let dflags = hsc_dflags env - isImplicitCradle = isNothing $ envImportPaths env_eq - let dflags' = if isImplicitCradle - then addRelativeImport file (moduleName $ ms_mod ms) dflags - else dflags opt <- getIdeOptions let getTargetFor modName nfp - | isImplicitCradle = do - itExists <- getFileExists nfp - return $ if itExists then Just nfp else Nothing | Just (TargetFile nfp') <- HM.lookup (TargetFile nfp) targetsMap = do -- reuse the existing NormalizedFilePath in order to maximize sharing itExists <- getFileExists nfp' @@ -346,10 +334,11 @@ getLocatedImportsRule recorder = nfp' = HM.lookupDefault nfp nfp ttmap itExists <- getFileExists nfp' return $ if itExists then Just nfp' else Nothing - | otherwise - = return Nothing + | otherwise = do + itExists <- getFileExists nfp + return $ if itExists then Just nfp else Nothing (diags, imports') <- fmap unzip $ forM imports $ \(isSource, (mbPkgName, modName)) -> do - diagOrImp <- locateModule (hscSetFlags dflags' env) import_dirs (optExtensions opt) getTargetFor modName mbPkgName isSource + diagOrImp <- locateModule (hscSetFlags dflags env) import_dirs (optExtensions opt) getTargetFor modName mbPkgName isSource case diagOrImp of Left diags -> pure (diags, Just (modName, Nothing)) Right (FileImport path) -> pure ([], Just (modName, Just path)) @@ -500,17 +489,9 @@ reportImportCyclesRule recorder = where cycleErrorInFile f (PartOfCycle imp fs) | f `elem` fs = Just (imp, fs) cycleErrorInFile _ _ = Nothing - toDiag imp mods = (fp , ShowDiag , ) $ Diagnostic - { _range = rng - , _severity = Just DiagnosticSeverity_Error - , _source = Just "Import cycle detection" - , _message = "Cyclic module dependency between " <> showCycle mods - , _code = Nothing - , _relatedInformation = Nothing - , _tags = Nothing - , _codeDescription = Nothing - , _data_ = Nothing - } + toDiag imp mods = + ideErrorWithSource (Just "Import cycle detection") (Just DiagnosticSeverity_Error) fp ("Cyclic module dependency between " <> showCycle mods) Nothing + & fdLspDiagnosticL %~ JL.range .~ rng where rng = fromMaybe noRange $ srcSpanToRange (getLoc imp) fp = toNormalizedFilePath' $ fromMaybe noFilePath $ srcSpanToFilename (getLoc imp) getModuleName file = do @@ -539,7 +520,12 @@ persistentHieFileRule recorder = addPersistentRule GetHieAst $ \file -> runMaybe getHieAstRuleDefinition :: NormalizedFilePath -> HscEnv -> TcModuleResult -> Action (IdeResult HieAstResult) getHieAstRuleDefinition f hsc tmr = do - (diags, masts) <- liftIO $ generateHieAsts hsc tmr + (diags, masts') <- liftIO $ generateHieAsts hsc tmr +#if MIN_VERSION_ghc(9,11,0) + let masts = fst <$> masts' +#else + let masts = masts' +#endif se <- getShakeExtras isFoi <- use_ IsFileOfInterest f @@ -549,7 +535,7 @@ getHieAstRuleDefinition f hsc tmr = do LSP.sendNotification (SMethod_CustomMethod (Proxy @"ghcide/reference/ready")) $ toJSON $ fromNormalizedFilePath f pure [] - _ | Just asts <- masts -> do + _ | Just asts <- masts' -> do source <- getSourceFileSource f let exports = tcg_exports $ tmrTypechecked tmr modSummary = tmrModSummary tmr @@ -630,6 +616,13 @@ knownFilesRule recorder = defineEarlyCutOffNoFile (cmapWithPrio LogShake recorde fs <- knownTargets pure (LBS.toStrict $ B.encode $ hash fs, unhashed fs) +getFileHashRule :: Recorder (WithPriority Log) -> Rules () +getFileHashRule recorder = + defineEarlyCutoff (cmapWithPrio LogShake recorder) $ Rule $ \GetFileHash file -> do + void $ use_ GetModificationTime file + fileHash <- liftIO $ Util.getFileHash (fromNormalizedFilePath file) + return (Just (fingerprintToBS fileHash), ([], Just fileHash)) + getModuleGraphRule :: Recorder (WithPriority Log) -> Rules () getModuleGraphRule recorder = defineEarlyCutOffNoFile (cmapWithPrio LogShake recorder) $ \GetModuleGraph -> do fs <- toKnownFiles <$> useNoFile_ GetKnownTargets @@ -641,7 +634,6 @@ dependencyInfoForFiles fs = do let (all_fs, _all_ids) = unzip $ HM.toList $ pathToIdMap $ rawPathIdMap rawDepInfo msrs <- uses GetModSummaryWithoutTimestamps all_fs let mss = map (fmap msrModSummary) msrs -#if MIN_VERSION_ghc(9,3,0) let deps = map (\i -> IM.lookup (getFilePathId i) (rawImports rawDepInfo)) _all_ids nodeKeys = IM.fromList $ catMaybes $ zipWith (\fi mms -> (getFilePathId fi,) . NodeKey_Module . msKey <$> mms) _all_ids mss mns = catMaybes $ zipWith go mss deps @@ -651,14 +643,6 @@ dependencyInfoForFiles fs = do go (Just ms) _ = Just $ ModuleNode [] ms go _ _ = Nothing mg = mkModuleGraph mns -#else - let mg = mkModuleGraph $ - -- We don't do any instantiation for backpack at this point of time, so it is OK to use - -- 'extendModSummaryNoDeps'. - -- This may have to change in the future. - map extendModSummaryNoDeps $ - catMaybes mss -#endif pure (fingerprintToBS $ Util.fingerprintFingerprints $ map (maybe fingerprint0 msrFingerprint) msrs, processDependencyInformation rawDepInfo bm mg) -- This is factored out so it can be directly called from the GetModIface @@ -675,6 +659,7 @@ typeCheckRuleDefinition hsc pm = do unlift <- askUnliftIO let dets = TypecheckHelpers { getLinkables = unliftIO unlift . uses_ GetLinkable + , getModuleGraph = unliftIO unlift $ useNoFile_ GetModuleGraph } addUsageDependencies $ liftIO $ typecheckModule defer hsc dets pm @@ -776,7 +761,6 @@ ghcSessionDepsDefinition fullModSummary GhcSessionDepsConfig{..} env file = do then depModuleGraph <$> useNoFile_ GetModuleGraph else do let mgs = map hsc_mod_graph depSessions -#if MIN_VERSION_ghc(9,3,0) -- On GHC 9.4+, the module graph contains not only ModSummary's but each `ModuleNode` in the graph -- also points to all the direct descendants of the current module. To get the keys for the descendants -- we must get their `ModSummary`s @@ -785,17 +769,10 @@ ghcSessionDepsDefinition fullModSummary GhcSessionDepsConfig{..} env file = do return $!! map (NodeKey_Module . msKey) dep_mss let module_graph_nodes = nubOrdOn mkNodeKey (ModuleNode final_deps ms : concatMap mgModSummaries' mgs) -#else - let module_graph_nodes = - -- We don't do any instantiation for backpack at this point of time, so it is OK to use - -- 'extendModSummaryNoDeps'. - -- This may have to change in the future. - map extendModSummaryNoDeps $ - nubOrdOn ms_mod (ms : concatMap mgModSummaries mgs) -#endif liftIO $ evaluate $ liftRnf rwhnf module_graph_nodes return $ mkModuleGraph module_graph_nodes - session' <- liftIO $ mergeEnvs hsc mg ms inLoadOrder depSessions + de <- useNoFile_ GetModuleGraph + session' <- liftIO $ mergeEnvs hsc mg de ms inLoadOrder depSessions -- Here we avoid a call to to `newHscEnvEqWithImportPaths`, which creates a new -- ExportsMap when it is called. We only need to create the ExportsMap once per @@ -824,9 +801,11 @@ getModIfaceFromDiskRule recorder = defineEarlyCutoff (cmapWithPrio LogShake reco , old_value = m_old , get_file_version = use GetModificationTime_{missingFileDiagnostics = False} , get_linkable_hashes = \fs -> map (snd . fromJust . hirCoreFp) <$> uses_ GetModIface fs + , get_module_graph = useNoFile_ GetModuleGraph , regenerate = regenerateHiFile session f ms } - r <- loadInterface (hscEnv session) ms linkableType recompInfo + hsc_env' <- setFileCacheHook (hscEnv session) + r <- loadInterface hsc_env' ms linkableType recompInfo case r of (diags, Nothing) -> return (Nothing, (diags, Nothing)) (diags, Just x) -> do @@ -894,23 +873,18 @@ getModSummaryRule displayTHWarning recorder = do defineEarlyCutoff (cmapWithPrio LogShake recorder) $ Rule $ \GetModSummary f -> do session' <- hscEnv <$> use_ GhcSession f modify_dflags <- getModifyDynFlags dynFlagsModifyGlobal - let session = hscSetFlags (modify_dflags $ hsc_dflags session') session' - (modTime, mFileContent) <- getFileContents f + let session = setNonHomeFCHook $ hscSetFlags (modify_dflags $ hsc_dflags session') session' -- TODO wz1000 + (modTime, mFileContent) <- getFileModTimeContents f let fp = fromNormalizedFilePath f modS <- liftIO $ runExceptT $ - getModSummaryFromImports session fp modTime (textToStringBuffer <$> mFileContent) + getModSummaryFromImports session fp modTime (textToStringBuffer . Rope.toText <$> mFileContent) case modS of Right res -> do -- Check for Template Haskell when (uses_th_qq $ msrModSummary res) $ do DisplayTHWarning act <- getIdeGlobalAction liftIO act -#if MIN_VERSION_ghc(9,3,0) let bufFingerPrint = ms_hs_hash (msrModSummary res) -#else - bufFingerPrint <- liftIO $ - fingerprintFromStringBuffer $ fromJust $ ms_hspp_buf $ msrModSummary res -#endif let fingerPrint = Util.fingerprintFingerprints [ msrFingerprint res, bufFingerPrint ] return ( Just (fingerprintToBS fingerPrint) , ([], Just res)) @@ -921,9 +895,6 @@ getModSummaryRule displayTHWarning recorder = do case mbMs of Just res@ModSummaryResult{..} -> do let ms = msrModSummary { -#if !MIN_VERSION_ghc(9,3,0) - ms_hs_date = error "use GetModSummary instead of GetModSummaryWithoutTimestamps", -#endif ms_hspp_buf = error "use GetModSummary instead of GetModSummaryWithoutTimestamps" } fp = fingerprintToBS msrFingerprint @@ -933,8 +904,9 @@ getModSummaryRule displayTHWarning recorder = do generateCore :: RunSimplifier -> NormalizedFilePath -> Action (IdeResult ModGuts) generateCore runSimplifier file = do packageState <- hscEnv <$> use_ GhcSessionDeps file + hsc' <- setFileCacheHook packageState tm <- use_ TypeCheck file - liftIO $ compileModule runSimplifier packageState (tmrModSummary tm) (tmrTypechecked tm) + liftIO $ compileModule runSimplifier hsc' (tmrModSummary tm) (tmrTypechecked tm) generateCoreRule :: Recorder (WithPriority Log) -> Rules () generateCoreRule recorder = @@ -949,14 +921,15 @@ getModIfaceRule recorder = defineEarlyCutoff (cmapWithPrio LogShake recorder) $ tmr <- use_ TypeCheck f linkableType <- getLinkableType f hsc <- hscEnv <$> use_ GhcSessionDeps f + hsc' <- setFileCacheHook hsc let compile = fmap ([],) $ use GenerateCore f se <- getShakeExtras - (diags, !mbHiFile) <- writeCoreFileIfNeeded se hsc linkableType compile tmr + (diags, !mbHiFile) <- writeCoreFileIfNeeded se hsc' linkableType compile tmr let fp = hiFileFingerPrint <$> mbHiFile hiDiags <- case mbHiFile of Just hiFile | OnDisk <- status - , not (tmrDeferredError tmr) -> liftIO $ writeHiFile se hsc hiFile + , not (tmrDeferredError tmr) -> liftIO $ writeHiFile se hsc' hiFile _ -> pure [] return (fp, (diags++hiDiags, mbHiFile)) NotFOI -> do @@ -980,12 +953,21 @@ incrementRebuildCount = do count <- getRebuildCountVar <$> getIdeGlobalAction liftIO $ atomically $ modifyTVar' count (+1) +setFileCacheHook :: HscEnv -> Action HscEnv +setFileCacheHook old_hsc_env = do +#if MIN_VERSION_ghc(9,11,0) + unlift <- askUnliftIO + return $ old_hsc_env { hsc_FC = (hsc_FC old_hsc_env) { lookupFileCache = unliftIO unlift . use_ GetFileHash . toNormalizedFilePath' } } +#else + return old_hsc_env +#endif + -- | Also generates and indexes the `.hie` file, along with the `.o` file if needed -- Invariant maintained is that if the `.hi` file was successfully written, then the -- `.hie` and `.o` file (if needed) were also successfully written regenerateHiFile :: HscEnvEq -> NormalizedFilePath -> ModSummary -> Maybe LinkableType -> Action ([FileDiagnostic], Maybe HiFileResult) regenerateHiFile sess f ms compNeeded = do - let hsc = hscEnv sess + hsc <- setFileCacheHook (hscEnv sess) opt <- getIdeOptions -- Embed haddocks in the interface file @@ -1081,10 +1063,16 @@ usePropertyByPathAction path plId p = do getLinkableRule :: Recorder (WithPriority Log) -> Rules () getLinkableRule recorder = defineEarlyCutoff (cmapWithPrio LogShake recorder) $ Rule $ \GetLinkable f -> do - ModSummaryResult{msrModSummary = ms} <- use_ GetModSummary f - HiFileResult{hirModIface, hirModDetails, hirCoreFp} <- use_ GetModIface f - let obj_file = ml_obj_file (ms_location ms) - core_file = ml_core_file (ms_location ms) + HiFileResult{hirModSummary, hirModIface, hirModDetails, hirCoreFp} <- use_ GetModIface f + let obj_file = ml_obj_file (ms_location hirModSummary) + core_file = ml_core_file (ms_location hirModSummary) +#if MIN_VERSION_ghc(9,11,0) + mkLinkable t mod l = Linkable t mod (pure l) + dotO o = DotO o ModuleObject +#else + mkLinkable t mod l = LM t mod [l] + dotO = DotO +#endif case hirCoreFp of Nothing -> error $ "called GetLinkable for a file without a linkable: " ++ show f Just (bin_core, fileHash) -> do @@ -1097,7 +1085,7 @@ getLinkableRule recorder = core_t <- liftIO $ getModTime core_file (warns, hmi) <- case linkableType of -- Bytecode needs to be regenerated from the core file - BCOLinkable -> liftIO $ coreFileToLinkable linkableType (hscEnv session) ms hirModIface hirModDetails bin_core (posixSecondsToUTCTime core_t) + BCOLinkable -> liftIO $ coreFileToLinkable linkableType (hscEnv session) hirModSummary hirModIface hirModDetails bin_core (posixSecondsToUTCTime core_t) -- Object code can be read from the disk ObjectLinkable -> do -- object file is up to date if it is newer than the core file @@ -1110,10 +1098,15 @@ getLinkableRule recorder = else pure Nothing case mobj_time of Just obj_t - | obj_t >= core_t -> pure ([], Just $ HomeModInfo hirModIface hirModDetails (justObjects $ LM (posixSecondsToUTCTime obj_t) (ms_mod ms) [DotO obj_file])) - _ -> liftIO $ coreFileToLinkable linkableType (hscEnv session) ms hirModIface hirModDetails bin_core (error "object doesn't have time") + | obj_t >= core_t -> pure ([], Just $ HomeModInfo hirModIface hirModDetails (justObjects $ mkLinkable (posixSecondsToUTCTime obj_t) (ms_mod hirModSummary) (dotO obj_file))) + _ -> liftIO $ coreFileToLinkable linkableType (hscEnv session) hirModSummary hirModIface hirModDetails bin_core (error "object doesn't have time") -- Record the linkable so we know not to unload it, and unload old versions - whenJust ((homeModInfoByteCode =<< hmi) <|> (homeModInfoObject =<< hmi)) $ \(LM time mod _) -> do + whenJust ((homeModInfoByteCode =<< hmi) <|> (homeModInfoObject =<< hmi)) +#if MIN_VERSION_ghc(9,11,0) + $ \(Linkable time mod _) -> do +#else + $ \(LM time mod _) -> do +#endif compiledLinkables <- getCompiledLinkables <$> getIdeGlobalAction liftIO $ modifyVar compiledLinkables $ \old -> do let !to_keep = extendModuleEnv old mod time @@ -1127,7 +1120,9 @@ getLinkableRule recorder = --just before returning it to be loaded. This has a substantial effect on recompile --times as the number of loaded modules and splices increases. -- - unload (hscEnv session) (map (\(mod', time') -> LM time' mod' []) $ moduleEnvToList to_keep) + --We use a dummy DotA linkable part to fake a NativeCode linkable. + --The unload function doesn't care about the exact linkable parts. + unload (hscEnv session) (map (\(mod', time') -> mkLinkable time' mod' (DotA "dummy")) $ moduleEnvToList to_keep) return (to_keep, ()) return (fileHash <$ hmi, (warns, LinkableResult <$> hmi <*> pure fileHash)) @@ -1225,12 +1220,13 @@ mainRule recorder RulesConfig{..} = do reportImportCyclesRule recorder typeCheckRule recorder getDocMapRule recorder - loadGhcSession recorder GhcSessionDepsConfig{fullModuleGraph} + loadGhcSession recorder def{fullModuleGraph} getModIfaceFromDiskRule recorder getModIfaceFromDiskAndIndexRule recorder getModIfaceRule recorder getModSummaryRule templateHaskellWarning recorder getModuleGraphRule recorder + getFileHashRule recorder knownFilesRule recorder getClientSettingsRule recorder getHieAstsRule recorder diff --git a/ghcide/src/Development/IDE/Core/Shake.hs b/ghcide/src/Development/IDE/Core/Shake.hs index 25493da9a4..97150339d0 100644 --- a/ghcide/src/Development/IDE/Core/Shake.hs +++ b/ghcide/src/Development/IDE/Core/Shake.hs @@ -73,7 +73,8 @@ module Development.IDE.Core.Shake( garbageCollectDirtyKeysOlderThan, Log(..), VFSModified(..), getClientConfigAction, - ThreadQueue(..) + ThreadQueue(..), + runWithSignal ) where import Control.Concurrent.Async @@ -82,7 +83,7 @@ import Control.Concurrent.STM.Stats (atomicallyNamed) import Control.Concurrent.Strict import Control.DeepSeq import Control.Exception.Extra hiding (bracket_) -import Control.Lens ((&), (?~)) +import Control.Lens ((%~), (&), (?~)) import Control.Monad.Extra import Control.Monad.IO.Class import Control.Monad.Reader @@ -123,9 +124,14 @@ import Development.IDE.Core.FileUtils (getModTime) import Development.IDE.Core.PositionMapping import Development.IDE.Core.ProgressReporting import Development.IDE.Core.RuleTypes +import Development.IDE.Types.Options as Options +import qualified Language.LSP.Protocol.Message as LSP +import qualified Language.LSP.Server as LSP + import Development.IDE.Core.Tracing import Development.IDE.Core.WorkerThread import Development.IDE.GHC.Compat (NameCache, + NameCacheUpdater, initNameCache, knownKeyNames) import Development.IDE.GHC.Orphans () @@ -146,25 +152,21 @@ import qualified Development.IDE.Types.Exports as ExportsMap import Development.IDE.Types.KnownTargets import Development.IDE.Types.Location import Development.IDE.Types.Monitoring (Monitoring (..)) -import Development.IDE.Types.Options import Development.IDE.Types.Shake import qualified Focus import GHC.Fingerprint import GHC.Stack (HasCallStack) +import GHC.TypeLits (KnownSymbol) import HieDb.Types import Ide.Logger hiding (Priority) import qualified Ide.Logger as Logger import Ide.Plugin.Config import qualified Ide.PluginUtils as HLS -import Ide.Types (IdePlugins (IdePlugins), - PluginDescriptor (pluginId), - PluginId) -import Language.LSP.Diagnostics +import Ide.Types import qualified Language.LSP.Protocol.Lens as L import Language.LSP.Protocol.Message import Language.LSP.Protocol.Types import qualified Language.LSP.Protocol.Types as LSP -import qualified Language.LSP.Server as LSP import Language.LSP.VFS hiding (start) import qualified "list-t" ListT import OpenTelemetry.Eventlog hiding (addEvent) @@ -173,19 +175,8 @@ import qualified StmContainers.Map as STM import System.FilePath hiding (makeRelative) import System.IO.Unsafe (unsafePerformIO) import System.Time.Extra +import UnliftIO (MonadUnliftIO (withRunInIO)) --- See Note [Guidelines For Using CPP In GHCIDE Import Statements] - -#if !MIN_VERSION_ghc(9,3,0) -import Data.IORef -import Development.IDE.GHC.Compat (NameCacheUpdater (NCU), - mkSplitUniqSupply, - upNameCache) -#endif - -#if MIN_VERSION_ghc(9,3,0) -import Development.IDE.GHC.Compat (NameCacheUpdater) -#endif data Log = LogCreateHieDbExportsMapStart @@ -252,11 +243,10 @@ instance Pretty Log where -- a worker thread. data HieDbWriter = HieDbWriter - { indexQueue :: IndexQueue - , indexPending :: TVar (HMap.HashMap NormalizedFilePath Fingerprint) -- ^ Avoid unnecessary/out of date indexing - , indexCompleted :: TVar Int -- ^ to report progress - , indexProgressToken :: Var (Maybe LSP.ProgressToken) - -- ^ This is a Var instead of a TVar since we need to do IO to initialise/update, so we need a lock + { indexQueue :: IndexQueue + , indexPending :: TVar (HMap.HashMap NormalizedFilePath Fingerprint) -- ^ Avoid unnecessary/out of date indexing + , indexCompleted :: TVar Int -- ^ to report progress + , indexProgressReporting :: ProgressReporting } -- | Actions to queue up on the index worker thread @@ -290,7 +280,7 @@ data ShakeExtras = ShakeExtras ,state :: Values ,diagnostics :: STMDiagnosticStore ,hiddenDiagnostics :: STMDiagnosticStore - ,publishedDiagnostics :: STM.Map NormalizedUri [Diagnostic] + ,publishedDiagnostics :: STM.Map NormalizedUri [FileDiagnostic] -- ^ This represents the set of diagnostics that we have published. -- Due to debouncing not every change might get published. @@ -306,7 +296,7 @@ data ShakeExtras = ShakeExtras -- positions in a version of that document to positions in the latest version -- First mapping is delta from previous version and second one is an -- accumulation to the current version. - ,progress :: ProgressReporting + ,progress :: PerFileProgressReporting ,ideTesting :: IdeTesting -- ^ Whether to enable additional lsp messages used by the test suite for checking invariants ,restartShakeSession @@ -315,11 +305,7 @@ data ShakeExtras = ShakeExtras -> [DelayedAction ()] -> IO [Key] -> IO () -#if MIN_VERSION_ghc(9,3,0) ,ideNc :: NameCache -#else - ,ideNc :: IORef NameCache -#endif -- | A mapping of module name to known target (or candidate targets, if missing) ,knownTargetsVar :: TVar (Hashed KnownTargets) -- | A mapping of exported identifiers for local modules. Updated on kick @@ -677,12 +663,7 @@ shakeOpen recorder lspEnv defaultConfig idePlugins debouncer restartQueue = tRestartQueue threadQueue loaderQueue = tLoaderQueue threadQueue -#if MIN_VERSION_ghc(9,3,0) ideNc <- initNameCache 'r' knownKeyNames -#else - us <- mkSplitUniqSupply 'r' - ideNc <- newIORef (initNameCache us knownKeyNames) -#endif shakeExtras <- do globals <- newTVarIO HMap.empty state <- STM.newIO @@ -697,7 +678,10 @@ shakeOpen recorder lspEnv defaultConfig idePlugins debouncer indexPending <- newTVarIO HMap.empty indexCompleted <- newTVarIO 0 semanticTokensId <- newTVarIO 0 - indexProgressToken <- newVar Nothing + indexProgressReporting <- progressReportingNoTrace + (liftM2 (+) (length <$> readTVar indexPending) (readTVar indexCompleted)) + (readTVar indexCompleted) + lspEnv "Indexing" optProgressStyle let hiedbWriter = HieDbWriter{..} exportsMap <- newTVarIO mempty -- lazily initialize the exports map with the contents of the hiedb @@ -710,8 +694,8 @@ shakeOpen recorder lspEnv defaultConfig idePlugins debouncer progress <- if reportProgress - then progressReporting lspEnv optProgressStyle - else noProgressReporting + then progressReporting lspEnv "Processing" optProgressStyle + else noPerFileProgressReporting actionQueue <- newQueue let clientCapabilities = maybe def LSP.resClientCapabilities lspEnv @@ -775,6 +759,7 @@ shakeShut IdeState{..} = do for_ runner cancelShakeSession void $ shakeDatabaseProfile shakeDb progressStop $ progress shakeExtras + progressStop $ indexProgressReporting $ hiedbWriter shakeExtras stopMonitoring @@ -1080,13 +1065,8 @@ askShake :: IdeAction ShakeExtras askShake = ask -#if MIN_VERSION_ghc(9,3,0) mkUpdater :: NameCache -> NameCacheUpdater mkUpdater = id -#else -mkUpdater :: IORef NameCache -> NameCacheUpdater -mkUpdater ref = NCU (upNameCache ref) -#endif -- | A (maybe) stale result now, and an up to date one later data FastResult a = FastResult { stale :: Maybe (a,PositionMapping), uptoDate :: IO (Maybe a) } @@ -1192,7 +1172,7 @@ defineEarlyCutoff recorder (Rule op) = addRule $ \(Q (key, file)) (old :: Maybe extras <- getShakeExtras let diagnostics ver diags = do traceDiagnostics diags - updateFileDiagnostics recorder file ver (newKey key) extras . map (\(_,y,z) -> (y,z)) $ diags + updateFileDiagnostics recorder file ver (newKey key) extras diags defineEarlyCutoff' diagnostics (==) key file old mode $ const $ op key file defineEarlyCutoff recorder (RuleNoDiagnostics op) = addRule $ \(Q (key, file)) (old :: Maybe BS.ByteString) mode -> otTracedAction key file mode traceA $ \traceDiagnostics -> do let diagnostics _ver diags = do @@ -1211,7 +1191,7 @@ defineEarlyCutoff recorder (RuleWithOldValue op) = addRule $ \(Q (key, file)) (o extras <- getShakeExtras let diagnostics ver diags = do traceDiagnostics diags - updateFileDiagnostics recorder file ver (newKey key) extras . map (\(_,y,z) -> (y,z)) $ diags + updateFileDiagnostics recorder file ver (newKey key) extras diags defineEarlyCutoff' diagnostics (==) key file old mode $ op key file defineNoFile :: IdeRule k v => Recorder (WithPriority Log) -> (k -> Action v) -> Rules () @@ -1238,7 +1218,8 @@ defineEarlyCutoff' defineEarlyCutoff' doDiagnostics cmp key file mbOld mode action = do ShakeExtras{state, progress, dirtyKeys} <- getShakeExtras options <- getIdeOptions - (if optSkipProgress options key then id else inProgress progress file) $ do + let trans g x = withRunInIO $ \run -> g (run x) + (if optSkipProgress options key then id else trans (inProgress progress file)) $ do val <- case mbOld of Just old | mode == RunDependenciesSame -> do mbValue <- liftIO $ atomicallyNamed "define - read 1" $ getValues state key file @@ -1265,7 +1246,7 @@ defineEarlyCutoff' doDiagnostics cmp key file mbOld mode action = do (mbBs, (diags, mbRes)) <- actionCatch (do v <- action staleV; liftIO $ evaluate $ force v) $ \(e :: SomeException) -> do - pure (Nothing, ([ideErrorText file $ T.pack $ show e | not $ isBadDependency e],Nothing)) + pure (Nothing, ([ideErrorText file (T.pack $ show (key, file) ++ show e) | not $ isBadDependency e],Nothing)) ver <- estimateFileVersionUnsafely key mbRes file (bs, res) <- case mbRes of @@ -1347,52 +1328,51 @@ updateFileDiagnostics :: MonadIO m -> Maybe Int32 -> Key -> ShakeExtras - -> [(ShowDiagnostic,Diagnostic)] -- ^ current results + -> [FileDiagnostic] -- ^ current results -> m () -updateFileDiagnostics recorder fp ver k ShakeExtras{diagnostics, hiddenDiagnostics, publishedDiagnostics, debouncer, lspEnv, ideTesting} current0 = +updateFileDiagnostics recorder fp ver k ShakeExtras{diagnostics, hiddenDiagnostics, publishedDiagnostics, debouncer, lspEnv, ideTesting} current0 = do liftIO $ withTrace ("update diagnostics " <> fromString(fromNormalizedFilePath fp)) $ \ addTag -> do addTag "key" (show k) - let (currentShown, currentHidden) = partition ((== ShowDiag) . fst) current + let (currentShown, currentHidden) = partition ((== ShowDiag) . fdShouldShowDiagnostic) current uri = filePathToUri' fp addTagUnsafe :: String -> String -> String -> a -> a addTagUnsafe msg t x v = unsafePerformIO(addTag (msg <> t) x) `seq` v - update :: (forall a. String -> String -> a -> a) -> [Diagnostic] -> STMDiagnosticStore -> STM [Diagnostic] + update :: (forall a. String -> String -> a -> a) -> [FileDiagnostic] -> STMDiagnosticStore -> STM [FileDiagnostic] update addTagUnsafeMethod new store = addTagUnsafeMethod "count" (show $ Prelude.length new) $ setStageDiagnostics addTagUnsafeMethod uri ver (renderKey k) new store - current = second diagsFromRule <$> current0 + current = map (fdLspDiagnosticL %~ diagsFromRule) current0 addTag "version" (show ver) mask_ $ do -- Mask async exceptions to ensure that updated diagnostics are always -- published. Otherwise, we might never publish certain diagnostics if -- an exception strikes between modifyVar but before -- publishDiagnosticsNotification. - newDiags <- liftIO $ atomicallyNamed "diagnostics - update" $ update (addTagUnsafe "shown ") (map snd currentShown) diagnostics - _ <- liftIO $ atomicallyNamed "diagnostics - hidden" $ update (addTagUnsafe "hidden ") (map snd currentHidden) hiddenDiagnostics + newDiags <- liftIO $ atomicallyNamed "diagnostics - update" $ update (addTagUnsafe "shown ") currentShown diagnostics + _ <- liftIO $ atomicallyNamed "diagnostics - hidden" $ update (addTagUnsafe "hidden ") currentHidden hiddenDiagnostics let uri' = filePathToUri' fp let delay = if null newDiags then 0.1 else 0 registerEvent debouncer delay uri' $ withTrace ("report diagnostics " <> fromString (fromNormalizedFilePath fp)) $ \tag -> do - join $ mask_ $ do - lastPublish <- atomicallyNamed "diagnostics - publish" $ STM.focus (Focus.lookupWithDefault [] <* Focus.insert newDiags) uri' publishedDiagnostics - let action = when (lastPublish /= newDiags) $ case lspEnv of + join $ mask_ $ do + lastPublish <- atomicallyNamed "diagnostics - publish" $ STM.focus (Focus.lookupWithDefault [] <* Focus.insert newDiags) uri' publishedDiagnostics + let action = when (lastPublish /= newDiags) $ case lspEnv of Nothing -> -- Print an LSP event. - logWith recorder Info $ LogDiagsDiffButNoLspEnv (map (fp, ShowDiag,) newDiags) + logWith recorder Info $ LogDiagsDiffButNoLspEnv newDiags Just env -> LSP.runLspT env $ do liftIO $ tag "count" (show $ Prelude.length newDiags) liftIO $ tag "key" (show k) LSP.sendNotification SMethod_TextDocumentPublishDiagnostics $ - LSP.PublishDiagnosticsParams (fromNormalizedUri uri') (fmap fromIntegral ver) newDiags - return action + LSP.PublishDiagnosticsParams (fromNormalizedUri uri') (fmap fromIntegral ver) (map fdLspDiagnostic newDiags) + return action where diagsFromRule :: Diagnostic -> Diagnostic diagsFromRule c@Diagnostic{_range} | coerce ideTesting = c & L.relatedInformation ?~ - [ - DiagnosticRelatedInformation + [ DiagnosticRelatedInformation (Location (filePathToUri $ fromNormalizedFilePath fp) _range ) (T.pack $ show k) - ] + ] | otherwise = c @@ -1403,26 +1383,28 @@ actionLogger :: Action (Recorder (WithPriority Log)) actionLogger = shakeRecorder <$> getShakeExtras -------------------------------------------------------------------------------- -type STMDiagnosticStore = STM.Map NormalizedUri StoreItem +type STMDiagnosticStore = STM.Map NormalizedUri StoreItem' +data StoreItem' = StoreItem' (Maybe Int32) FileDiagnosticsBySource +type FileDiagnosticsBySource = Map.Map (Maybe T.Text) (SL.SortedList FileDiagnostic) -getDiagnosticsFromStore :: StoreItem -> [Diagnostic] -getDiagnosticsFromStore (StoreItem _ diags) = concatMap SL.fromSortedList $ Map.elems diags +getDiagnosticsFromStore :: StoreItem' -> [FileDiagnostic] +getDiagnosticsFromStore (StoreItem' _ diags) = concatMap SL.fromSortedList $ Map.elems diags updateSTMDiagnostics :: (forall a. String -> String -> a -> a) -> STMDiagnosticStore -> NormalizedUri -> Maybe Int32 -> - DiagnosticsBySource -> - STM [LSP.Diagnostic] + FileDiagnosticsBySource -> + STM [FileDiagnostic] updateSTMDiagnostics addTag store uri mv newDiagsBySource = getDiagnosticsFromStore . fromJust <$> STM.focus (Focus.alter update *> Focus.lookup) uri store where - update (Just(StoreItem mvs dbs)) + update (Just(StoreItem' mvs dbs)) | addTag "previous version" (show mvs) $ addTag "previous count" (show $ Prelude.length $ filter (not.null) $ Map.elems dbs) False = undefined - | mvs == mv = Just (StoreItem mv (newDiagsBySource <> dbs)) - update _ = Just (StoreItem mv newDiagsBySource) + | mvs == mv = Just (StoreItem' mv (newDiagsBySource <> dbs)) + update _ = Just (StoreItem' mv newDiagsBySource) -- | Sets the diagnostics for a file and compilation step -- if you want to clear the diagnostics call this with an empty list @@ -1431,9 +1413,9 @@ setStageDiagnostics -> NormalizedUri -> Maybe Int32 -- ^ the time that the file these diagnostics originate from was last edited -> T.Text - -> [LSP.Diagnostic] + -> [FileDiagnostic] -> STMDiagnosticStore - -> STM [LSP.Diagnostic] + -> STM [FileDiagnostic] setStageDiagnostics addTag uri ver stage diags ds = updateSTMDiagnostics addTag ds uri ver updatedDiags where !updatedDiags = Map.singleton (Just stage) $! SL.toSortedList diags @@ -1442,7 +1424,7 @@ getAllDiagnostics :: STMDiagnosticStore -> STM [FileDiagnostic] getAllDiagnostics = - fmap (concatMap (\(k,v) -> map (fromUri k,ShowDiag,) $ getDiagnosticsFromStore v)) . ListT.toList . STM.listT + fmap (concatMap (\(_,v) -> getDiagnosticsFromStore v)) . ListT.toList . STM.listT updatePositionMapping :: IdeState -> VersionedTextDocumentIdentifier -> [TextDocumentContentChangeEvent] -> STM () updatePositionMapping IdeState{shakeExtras = ShakeExtras{positionMapping}} VersionedTextDocumentIdentifier{..} changes = @@ -1464,3 +1446,19 @@ updatePositionMappingHelper ver changes mappingForUri = snd $ EM.mapAccumRWithKey (\acc _k (delta, _) -> let new = addOldDelta delta acc in (new, (delta, acc))) zeroMapping (EM.insert ver (mkDelta changes, zeroMapping) mappingForUri) + +-- | sends a signal whenever shake session is run/restarted +-- being used in cabal and hlint plugin tests to know when its time +-- to look for file diagnostics +kickSignal :: KnownSymbol s => Bool -> Maybe (LSP.LanguageContextEnv c) -> [NormalizedFilePath] -> Proxy s -> Action () +kickSignal testing lspEnv files msg = when testing $ liftIO $ mRunLspT lspEnv $ + LSP.sendNotification (LSP.SMethod_CustomMethod msg) $ + toJSON $ map fromNormalizedFilePath files + +-- | Add kick start/done signal to rule +runWithSignal :: (KnownSymbol s0, KnownSymbol s1, IdeRule k v) => Proxy s0 -> Proxy s1 -> [NormalizedFilePath] -> k -> Action () +runWithSignal msgStart msgEnd files rule = do + ShakeExtras{ideTesting = Options.IdeTesting testing, lspEnv} <- getShakeExtras + kickSignal testing lspEnv files msgStart + void $ uses rule files + kickSignal testing lspEnv files msgEnd diff --git a/ghcide/src/Development/IDE/Core/Tracing.hs b/ghcide/src/Development/IDE/Core/Tracing.hs index b55dcc7af5..34839faaee 100644 --- a/ghcide/src/Development/IDE/Core/Tracing.hs +++ b/ghcide/src/Development/IDE/Core/Tracing.hs @@ -61,7 +61,7 @@ withTelemetryRecorder k = withSpan "Logger" $ \sp -> -- | Returns a logger that produces telemetry events in a single span. telemetryLogRecorder :: SpanInFlight -> Recorder (WithPriority (Doc a)) telemetryLogRecorder sp = Recorder $ \WithPriority {..} -> - liftIO $ addEvent sp (fromString $ show priority) (encodeUtf8 $ trim $ renderStrict $ layoutCompact $ payload) + liftIO $ addEvent sp (fromString $ show priority) (encodeUtf8 $ trim $ renderStrict $ layoutCompact payload) where -- eventlog message size is limited by EVENT_PAYLOAD_SIZE_MAX = STG_WORD16_MAX trim = T.take (fromIntegral(maxBound :: Word16) - 10) diff --git a/ghcide/src/Development/IDE/Core/WorkerThread.hs b/ghcide/src/Development/IDE/Core/WorkerThread.hs index a38da77f38..6d141c7ef3 100644 --- a/ghcide/src/Development/IDE/Core/WorkerThread.hs +++ b/ghcide/src/Development/IDE/Core/WorkerThread.hs @@ -10,10 +10,13 @@ module Development.IDE.Core.WorkerThread (withWorkerQueue, awaitRunInThread) where -import Control.Concurrent.Async (withAsync) +import Control.Concurrent.Async (AsyncCancelled (AsyncCancelled), + withAsync) import Control.Concurrent.STM import Control.Concurrent.Strict (newBarrier, signalBarrier, waitBarrier) +import Control.Exception.Safe (Exception (fromException), + SomeException, throwIO, try) import Control.Monad (forever) import Control.Monad.Cont (ContT (ContT)) @@ -42,13 +45,15 @@ withWorkerQueue workerAction = ContT $ \mainAction -> do workerAction l -- | 'awaitRunInThread' queues up an 'IO' action to be run by a worker thread, --- and then blocks until the result is computed. +-- and then blocks until the result is computed. If the action throws an +-- non-async exception, it is rethrown in the calling thread. awaitRunInThread :: TQueue (IO ()) -> IO result -> IO result awaitRunInThread q act = do -- Take an action from TQueue, run it and -- use barrier to wait for the result barrier <- newBarrier - atomically $ writeTQueue q $ do - res <- act - signalBarrier barrier res - waitBarrier barrier + atomically $ writeTQueue q $ try act >>= signalBarrier barrier + resultOrException <- waitBarrier barrier + case resultOrException of + Left e -> throwIO (e :: SomeException) + Right r -> return r diff --git a/ghcide/src/Development/IDE/GHC/CPP.hs b/ghcide/src/Development/IDE/GHC/CPP.hs index b0ec869e24..bb4c4e4e81 100644 --- a/ghcide/src/Development/IDE/GHC/CPP.hs +++ b/ghcide/src/Development/IDE/GHC/CPP.hs @@ -22,11 +22,7 @@ import GHC.Settings -- See Note [Guidelines For Using CPP In GHCIDE Import Statements] -#if !MIN_VERSION_ghc(9,3,0) -import qualified GHC.Driver.Pipeline as Pipeline -#endif - -#if MIN_VERSION_ghc(9,3,0) && !MIN_VERSION_ghc(9,5,0) +#if !MIN_VERSION_ghc(9,5,0) import qualified GHC.Driver.Pipeline.Execute as Pipeline #endif @@ -34,6 +30,10 @@ import qualified GHC.Driver.Pipeline.Execute as Pipeline import qualified GHC.SysTools.Cpp as Pipeline #endif +#if MIN_VERSION_ghc(9,10,2) +import qualified GHC.SysTools.Tasks as Pipeline +#endif + #if MIN_VERSION_ghc(9,11,0) import qualified GHC.SysTools.Tasks as Pipeline #endif @@ -56,7 +56,7 @@ doCpp env input_fn output_fn = #if MIN_VERSION_ghc(9,5,0) let cpp_opts = Pipeline.CppOpts { cppLinePragmas = True -#if MIN_VERSION_ghc(9,11,0) +#if MIN_VERSION_ghc(9,10,2) , sourceCodePreprocessor = Pipeline.SCPHsCpp #elif MIN_VERSION_ghc(9,10,0) , useHsCpp = True diff --git a/ghcide/src/Development/IDE/GHC/Compat.hs b/ghcide/src/Development/IDE/GHC/Compat.hs index 8e138ce56b..6a2ae5b77a 100644 --- a/ghcide/src/Development/IDE/GHC/Compat.hs +++ b/ghcide/src/Development/IDE/GHC/Compat.hs @@ -98,20 +98,9 @@ module Development.IDE.GHC.Compat( extract_cons, recDotDot, -#if !MIN_VERSION_ghc(9,3,0) - Dependencies(dep_mods), - NameCacheUpdater(NCU), - extendModSummaryNoDeps, - emsModSummary, - nonDetNameEnvElts, - nonDetOccEnvElts, - upNameCache, -#endif -#if MIN_VERSION_ghc(9,3,0) Dependencies(dep_direct_mods), NameCacheUpdater, -#endif #if MIN_VERSION_ghc(9,5,0) XModulePs(..), @@ -180,6 +169,7 @@ import GHC.Builtin.Uniques import GHC.ByteCode.Types import GHC.CoreToStg import GHC.Data.Maybe +import GHC.Driver.Config.Stg.Pipeline import GHC.Driver.Env as Env import GHC.Iface.Env import GHC.Linker.Loader (loadDecls, loadExpr) @@ -192,24 +182,12 @@ import GHC.Types.IPE import GHC.Types.SrcLoc (combineRealSrcSpans) import GHC.Unit.Home.ModInfo (HomePackageTable, lookupHpt) +import GHC.Unit.Module.Deps (Dependencies (dep_direct_mods), + Usage (..)) import GHC.Unit.Module.ModIface -- See Note [Guidelines For Using CPP In GHCIDE Import Statements] -#if !MIN_VERSION_ghc(9,3,0) -import Data.IORef -import GHC.Runtime.Interpreter -import GHC.Unit.Module.Deps (Dependencies (dep_mods), - Usage (..)) -import GHC.Unit.Module.ModSummary -#endif - -#if MIN_VERSION_ghc(9,3,0) -import GHC.Driver.Config.Stg.Pipeline -import GHC.Unit.Module.Deps (Dependencies (dep_direct_mods), - Usage (..)) -#endif - #if !MIN_VERSION_ghc(9,5,0) import GHC.Core.Lint (lintInteractiveExpr) #endif @@ -234,35 +212,19 @@ nonDetFoldOccEnv :: (a -> b -> b) -> b -> OccEnv a -> b nonDetFoldOccEnv = foldOccEnv #endif -#if !MIN_VERSION_ghc(9,3,0) -nonDetOccEnvElts :: OccEnv a -> [a] -nonDetOccEnvElts = occEnvElts -#endif type ModIfaceAnnotation = Annotation -#if !MIN_VERSION_ghc(9,3,0) -nonDetNameEnvElts :: NameEnv a -> [a] -nonDetNameEnvElts = nameEnvElts -#endif myCoreToStgExpr :: Logger -> DynFlags -> InteractiveContext -#if MIN_VERSION_ghc(9,3,0) -> Bool -#endif -> Module -> ModLocation -> CoreExpr -> IO ( Id -#if MIN_VERSION_ghc(9,3,0) ,[CgStgTopBinding] -- output program -#else - ,[StgTopBinding] -- output program -#endif , InfoTableProvMap , CollectedCCs ) myCoreToStgExpr logger dflags ictxt -#if MIN_VERSION_ghc(9,3,0) for_bytecode -#endif this_mod ml prepd_expr = do {- Create a temporary binding (just because myCoreToStg needs a binding for the stg2stg step) -} @@ -278,30 +240,20 @@ myCoreToStgExpr logger dflags ictxt myCoreToStg logger dflags ictxt -#if MIN_VERSION_ghc(9,3,0) for_bytecode -#endif this_mod ml [NonRec bco_tmp_id prepd_expr] return (bco_tmp_id, stg_binds, prov_map, collected_ccs) myCoreToStg :: Logger -> DynFlags -> InteractiveContext -#if MIN_VERSION_ghc(9,3,0) -> Bool -#endif -> Module -> ModLocation -> CoreProgram -#if MIN_VERSION_ghc(9,3,0) -> IO ( [CgStgTopBinding] -- output program -#else - -> IO ( [StgTopBinding] -- output program -#endif , InfoTableProvMap , CollectedCCs ) -- CAF cost centre info (declared and used) myCoreToStg logger dflags ictxt -#if MIN_VERSION_ghc(9,3,0) for_bytecode -#endif this_mod ml prepd_binds = do let (stg_binds, denv, cost_centre_info) = {-# SCC "Core2Stg" #-} @@ -321,7 +273,6 @@ myCoreToStg logger dflags ictxt stg_binds2 #endif <- {-# SCC "Stg2Stg" #-} -#if MIN_VERSION_ghc(9,3,0) stg2stg logger #if MIN_VERSION_ghc(9,5,0) (interactiveInScope ictxt) @@ -329,9 +280,6 @@ myCoreToStg logger dflags ictxt ictxt #endif (initStgPipelineOpts dflags for_bytecode) this_mod stg_binds -#else - stg2stg logger dflags ictxt this_mod stg_binds -#endif return (stg_binds2, denv, cost_centre_info) @@ -342,11 +290,7 @@ reLocA = reLoc #endif getDependentMods :: ModIface -> [ModuleName] -#if MIN_VERSION_ghc(9,3,0) getDependentMods = map (gwib_mod . snd) . S.toList . dep_direct_mods . mi_deps -#else -getDependentMods = map gwib_mod . dep_mods . mi_deps -#endif simplifyExpr :: DynFlags -> HscEnv -> CoreExpr -> IO CoreExpr #if MIN_VERSION_ghc(9,5,0) @@ -366,57 +310,48 @@ corePrepExpr _ = GHC.corePrepExpr renderMessages :: PsMessages -> (Bag WarnMsg, Bag ErrMsg) renderMessages msgs = -#if MIN_VERSION_ghc(9,3,0) - let renderMsgs extractor = (fmap . fmap) renderDiagnosticMessageWithHints . getMessages $ extractor msgs +#if MIN_VERSION_ghc(9,5,0) + let renderMsgs extractor = (fmap . fmap) GhcPsMessage . getMessages $ extractor msgs in (renderMsgs psWarnings, renderMsgs psErrors) #else - msgs + let renderMsgs extractor = (fmap . fmap) renderDiagnosticMessageWithHints . getMessages $ extractor msgs + in (renderMsgs psWarnings, renderMsgs psErrors) #endif +#if MIN_VERSION_ghc(9,5,0) +pattern PFailedWithErrorMessages :: forall a b. (b -> Bag (MsgEnvelope GhcMessage)) -> ParseResult a +#else pattern PFailedWithErrorMessages :: forall a b. (b -> Bag (MsgEnvelope DecoratedSDoc)) -> ParseResult a +#endif pattern PFailedWithErrorMessages msgs -#if MIN_VERSION_ghc(9,3,0) - <- PFailed (const . fmap (fmap renderDiagnosticMessageWithHints) . getMessages . getPsErrorMessages -> msgs) +#if MIN_VERSION_ghc(9,5,0) + <- PFailed (const . fmap (fmap GhcPsMessage) . getMessages . getPsErrorMessages -> msgs) #else - <- PFailed (const . fmap pprError . getErrorMessages -> msgs) + <- PFailed (const . fmap (fmap renderDiagnosticMessageWithHints) . getMessages . getPsErrorMessages -> msgs) #endif {-# COMPLETE POk, PFailedWithErrorMessages #-} hieExportNames :: HieFile -> [(SrcSpan, Name)] hieExportNames = nameListFromAvails . hie_exports -#if MIN_VERSION_ghc(9,3,0) type NameCacheUpdater = NameCache -#else - -lookupNameCache :: Module -> OccName -> NameCache -> (NameCache, Name) --- Lookup up the (Module,OccName) in the NameCache --- If you find it, return it; if not, allocate a fresh original name and extend --- the NameCache. --- Reason: this may the first occurrence of (say) Foo.bar we have encountered. --- If we need to explore its value we will load Foo.hi; but meanwhile all we --- need is a Name for it. -lookupNameCache mod occ name_cache = - case lookupOrigNameCache (nsNames name_cache) mod occ of { - Just name -> (name_cache, name); - Nothing -> - case takeUniqFromSupply (nsUniqs name_cache) of { - (uniq, us) -> - let - name = mkExternalName uniq mod occ noSrcSpan - new_cache = extendNameCache (nsNames name_cache) mod occ name - in (name_cache{ nsUniqs = us, nsNames = new_cache }, name) }} - -upNameCache :: IORef NameCache -> (NameCache -> (NameCache, c)) -> IO c -upNameCache = updNameCache -#endif mkHieFile' :: ModSummary -> [Avail.AvailInfo] +#if MIN_VERSION_ghc(9,11,0) + -> (HieASTs Type, NameEntityInfo) +#else -> HieASTs Type +#endif -> BS.ByteString -> Hsc HieFile -mkHieFile' ms exports asts src = do +mkHieFile' ms exports +#if MIN_VERSION_ghc(9,11,0) + (asts, entityInfo) +#else + asts +#endif + src = do let Just src_file = ml_hs_file $ ms_location ms (asts',arr) = compressTypes asts return $ HieFile @@ -424,6 +359,9 @@ mkHieFile' ms exports asts src = do , hie_module = ms_mod ms , hie_types = arr , hie_asts = asts' +#if MIN_VERSION_ghc(9,11,0) + , hie_entity_infos = entityInfo +#endif -- mkIfaceExports sorts the AvailInfos for stability , hie_exports = mkIfaceExports exports , hie_hs_src = src @@ -515,18 +453,20 @@ generatedNodeInfo :: HieAST a -> Maybe (NodeInfo a) generatedNodeInfo = Map.lookup GeneratedInfo . getSourcedNodeInfo . sourcedNodeInfo data GhcVersion - = GHC92 - | GHC94 + = GHC94 | GHC96 | GHC98 | GHC910 + | GHC912 deriving (Eq, Ord, Show, Enum) ghcVersionStr :: String ghcVersionStr = VERSION_ghc ghcVersion :: GhcVersion -#if MIN_VERSION_GLASGOW_HASKELL(9,10,0,0) +#if MIN_VERSION_GLASGOW_HASKELL(9,12,0,0) +ghcVersion = GHC912 +#elif MIN_VERSION_GLASGOW_HASKELL(9,10,0,0) ghcVersion = GHC910 #elif MIN_VERSION_GLASGOW_HASKELL(9,8,0,0) ghcVersion = GHC98 @@ -534,8 +474,6 @@ ghcVersion = GHC98 ghcVersion = GHC96 #elif MIN_VERSION_GLASGOW_HASKELL(9,4,0,0) ghcVersion = GHC94 -#elif MIN_VERSION_GLASGOW_HASKELL(9,2,0,0) -ghcVersion = GHC92 #endif simpleNodeInfoCompat :: FastStringCompat -> FastStringCompat -> NodeInfo a @@ -568,16 +506,7 @@ loadModulesHome -> HscEnv -> HscEnv loadModulesHome mod_infos e = -#if MIN_VERSION_ghc(9,3,0) hscUpdateHUG (\hug -> foldl' (flip addHomeModInfoToHug) hug mod_infos) (e { hsc_type_env_vars = emptyKnotVars }) -#else - let !new_modules = addListToHpt (hsc_HPT e) [(mod_name x, x) | x <- mod_infos] - in e { hsc_HPT = new_modules - , hsc_type_env_var = Nothing - } - where - mod_name = moduleName . mi_module . hm_iface -#endif recDotDot :: HsRecFields (GhcPass p) arg -> Maybe Int recDotDot x = diff --git a/ghcide/src/Development/IDE/GHC/Compat/CmdLine.hs b/ghcide/src/Development/IDE/GHC/Compat/CmdLine.hs index 00db02aa8c..7c9efb37e8 100644 --- a/ghcide/src/Development/IDE/GHC/Compat/CmdLine.hs +++ b/ghcide/src/Development/IDE/GHC/Compat/CmdLine.hs @@ -13,26 +13,7 @@ module Development.IDE.GHC.Compat.CmdLine ( , liftEwM ) where -#if MIN_VERSION_ghc(9,3,0) import GHC.Driver.CmdLine -import GHC.Driver.Session (CmdLineP (..), getCmdLineState, - processCmdLineP, putCmdLineState) -#else -import Control.Monad.IO.Class -import GHC (Located) -import GHC.Driver.CmdLine -#endif +import GHC.Driver.Session (CmdLineP (..), getCmdLineState, + processCmdLineP, putCmdLineState) -#if !MIN_VERSION_ghc(9,3,0) --- | A helper to parse a set of flags from a list of command-line arguments, handling --- response files. -processCmdLineP - :: forall s m. MonadIO m - => [Flag (CmdLineP s)] -- ^ valid flags to match against - -> s -- ^ current state - -> [Located String] -- ^ arguments to parse - -> m (([Located String], [Err], [Warn]), s) - -- ^ (leftovers, errors, warnings) -processCmdLineP activeFlags s0 args = - pure $ runCmdLine (processArgs activeFlags args) s0 -#endif diff --git a/ghcide/src/Development/IDE/GHC/Compat/Core.hs b/ghcide/src/Development/IDE/GHC/Compat/Core.hs index 06f798d1ff..3f19cd7489 100644 --- a/ghcide/src/Development/IDE/GHC/Compat/Core.hs +++ b/ghcide/src/Development/IDE/GHC/Compat/Core.hs @@ -58,9 +58,6 @@ module Development.IDE.GHC.Compat.Core ( pattern ExposePackage, parseDynamicFlagsCmdLine, parseDynamicFilePragma, -#if !MIN_VERSION_ghc(9,3,0) - WarnReason(..), -#endif wWarningFlags, updOptLevel, -- slightly unsafe @@ -72,12 +69,14 @@ module Development.IDE.GHC.Compat.Core ( IfaceTyCon(..), ModIface, ModIface_(..), +#if MIN_VERSION_ghc(9,11,0) + pattern ModIface, + set_mi_top_env, + set_mi_usages, +#endif HscSource(..), WhereFrom(..), loadInterface, -#if !MIN_VERSION_ghc(9,3,0) - SourceModified(..), -#endif loadModuleInterface, RecompileRequired(..), mkPartialIface, @@ -236,7 +235,11 @@ module Development.IDE.GHC.Compat.Core ( ModuleOrigin(..), PackageName(..), -- * Linker +#if MIN_VERSION_ghc(9,11,0) + LinkablePart(..), +#else Unlinked(..), +#endif Linkable(..), unload, -- * Hooks @@ -359,7 +362,6 @@ module Development.IDE.GHC.Compat.Core ( module GHC.Parser.Header, module GHC.Parser.Lexer, module GHC.Utils.Panic, -#if MIN_VERSION_ghc(9,3,0) CompileReason(..), hsc_type_env_vars, hscUpdateHUG, hsc_HUG, @@ -372,7 +374,6 @@ module Development.IDE.GHC.Compat.Core ( module GHC.Unit.Finder.Types, module GHC.Unit.Env, module GHC.Driver.Phases, -#endif #if !MIN_VERSION_ghc(9,4,0) pattern HsFieldBind, hfbAnn, @@ -460,7 +461,7 @@ import GHC.Tc.Types.Evidence hiding ((<.>)) import GHC.Tc.Utils.Env import GHC.Tc.Utils.Monad hiding (Applicative (..), IORef, MonadFix (..), MonadIO (..), allM, - anyM, concatMapM, mapMaybeM, + anyM, concatMapM, mapMaybeM, foldMapM, (<$>)) import GHC.Tc.Utils.TcType as TcType import qualified GHC.Types.Avail as Avail @@ -495,12 +496,15 @@ import qualified GHC.Utils.Panic.Plain as Plain import Data.Foldable (toList) import GHC.Core.Multiplicity (scaledThing) import GHC.Data.Bag +import qualified GHC.Data.Strict as Strict +import qualified GHC.Driver.Config.Finder as GHC +import qualified GHC.Driver.Config.Tidy as GHC import GHC.Driver.Env -import GHC.Hs (HsModule (..)) -#if !MIN_VERSION_ghc(9,9,0) -import GHC.Hs (SrcSpanAnn') -#endif -import GHC.Hs.Decls hiding (FunDep) +import GHC.Driver.Env as GHCi +import GHC.Driver.Env.KnotVars +import GHC.Driver.Errors.Types +import GHC.Hs (HsModule (..)) +import GHC.Hs.Decls hiding (FunDep) import GHC.Hs.Doc import GHC.Hs.Expr import GHC.Hs.Extension @@ -524,73 +528,66 @@ import GHC.Types.SourceText import GHC.Types.Target (Target (..), TargetId (..)) import GHC.Types.TyThing import GHC.Types.TyThing.Ppr +import GHC.Types.Unique +import GHC.Types.Unique.Map +import GHC.Unit.Env import GHC.Unit.Finder hiding (mkHomeModLocation) +import qualified GHC.Unit.Finder as GHC +import GHC.Unit.Finder.Types import GHC.Unit.Home.ModInfo +import GHC.Unit.Module.Graph import GHC.Unit.Module.Imported import GHC.Unit.Module.ModDetails import GHC.Unit.Module.ModGuts +#if !MIN_VERSION_ghc(9,9,0) +import GHC.Hs (SrcSpanAnn') +#endif import GHC.Unit.Module.ModIface (IfaceExport, ModIface, - ModIface_ (..), mi_fix) + ModIface_ (..), mi_fix +#if MIN_VERSION_ghc(9,11,0) + , pattern ModIface + , set_mi_top_env + , set_mi_usages +#endif + ) import GHC.Unit.Module.ModSummary (ModSummary (..)) +import GHC.Utils.Error (mkPlainErrorMsgEnvelope) +import GHC.Utils.Panic +import GHC.Utils.TmpFs import Language.Haskell.Syntax hiding (FunDep) --- See Note [Guidelines For Using CPP In GHCIDE Import Statements] -#if !MIN_VERSION_ghc(9,3,0) -import GHC.Types.SourceFile (SourceModified (..)) -import qualified GHC.Unit.Finder as GHC -import GHC.Unit.Module.Graph (mkModuleGraph) -#endif +-- See Note [Guidelines For Using CPP In GHCIDE Import Statements] -#if MIN_VERSION_ghc(9,3,0) -import qualified GHC.Data.Strict as Strict -import qualified GHC.Driver.Config.Finder as GHC -import qualified GHC.Driver.Config.Tidy as GHC -import GHC.Driver.Env as GHCi -import GHC.Driver.Env.KnotVars -import GHC.Driver.Errors.Types -import GHC.Types.Unique -import GHC.Types.Unique.Map -import GHC.Unit.Env -import qualified GHC.Unit.Finder as GHC -import GHC.Unit.Finder.Types -import GHC.Unit.Module.Graph -import GHC.Utils.Error (mkPlainErrorMsgEnvelope) -import GHC.Utils.Panic -import GHC.Utils.TmpFs +#if MIN_VERSION_ghc(9,11,0) +import System.OsPath #endif #if !MIN_VERSION_ghc(9,7,0) import GHC.Types.Avail (greNamePrintableName) #endif +#if !MIN_VERSION_ghc(9,9,0) +import GHC.Hs (SrcSpanAnn') +#endif + mkHomeModLocation :: DynFlags -> ModuleName -> FilePath -> IO Module.ModLocation -#if MIN_VERSION_ghc(9,3,0) -mkHomeModLocation df mn f = pure $ GHC.mkHomeModLocation (GHC.initFinderOpts df) mn f +#if MIN_VERSION_ghc(9,11,0) +mkHomeModLocation df mn f = + let osf = unsafeEncodeUtf f + in pure $ GHC.mkHomeModLocation (GHC.initFinderOpts df) mn osf #else -mkHomeModLocation = GHC.mkHomeModLocation +mkHomeModLocation df mn f = pure $ GHC.mkHomeModLocation (GHC.initFinderOpts df) mn f #endif -#if MIN_VERSION_ghc(9,3,0) pattern RealSrcSpan :: SrcLoc.RealSrcSpan -> Maybe BufSpan -> SrcLoc.SrcSpan -#else -pattern RealSrcSpan :: SrcLoc.RealSrcSpan -> Maybe BufSpan -> SrcLoc.SrcSpan -#endif -#if MIN_VERSION_ghc(9,3,0) pattern RealSrcSpan x y <- SrcLoc.RealSrcSpan x ((\case Strict.Nothing -> Nothing; Strict.Just a -> Just a) -> y) where RealSrcSpan x y = SrcLoc.RealSrcSpan x (case y of Nothing -> Strict.Nothing; Just a -> Strict.Just a) -#else -pattern RealSrcSpan x y = SrcLoc.RealSrcSpan x y -#endif {-# COMPLETE RealSrcSpan, UnhelpfulSpan #-} -#if MIN_VERSION_ghc(9,3,0) pattern RealSrcLoc :: SrcLoc.RealSrcLoc -> Strict.Maybe BufPos-> SrcLoc.SrcLoc -#else -pattern RealSrcLoc :: SrcLoc.RealSrcLoc -> Maybe BufPos-> SrcLoc.SrcLoc -#endif pattern RealSrcLoc x y = SrcLoc.RealSrcLoc x y {-# COMPLETE RealSrcLoc, UnhelpfulLoc #-} @@ -664,6 +661,8 @@ instance HasSrcSpan (EpAnn a) where #if MIN_VERSION_ghc(9,9,0) instance HasSrcSpan (SrcLoc.GenLocated (EpAnn ann) a) where getLoc (L l _) = getLoc l +instance HasSrcSpan (SrcLoc.GenLocated (GHC.EpaLocation) a) where + getLoc = GHC.getHasLoc #else instance HasSrcSpan (SrcSpanAnn' ann) where getLoc = GHC.locA @@ -703,7 +702,7 @@ initObjLinker env = loadDLL :: HscEnv -> String -> IO (Maybe String) loadDLL env str = do res <- GHCi.loadDLL (GHCi.hscInterp env) str -#if MIN_VERSION_ghc(9,11,0) +#if MIN_VERSION_ghc(9,11,0) || (MIN_VERSION_ghc(9, 8, 3) && !MIN_VERSION_ghc(9, 9, 0)) || (MIN_VERSION_ghc(9, 10, 2) && !MIN_VERSION_ghc(9, 11, 0)) pure $ case res of Left err_msg -> Just err_msg @@ -718,12 +717,6 @@ unload hsc_env linkables = (GHCi.hscInterp hsc_env) hsc_env linkables -#if !MIN_VERSION_ghc(9,3,0) -setOutputFile :: FilePath -> DynFlags -> DynFlags -setOutputFile f d = d { - outputFile_ = Just f - } -#endif isSubspanOfA :: LocatedAn la a -> LocatedAn lb b -> Bool isSubspanOfA a b = SrcLoc.isSubspanOf (GHC.getLocA a) (GHC.getLocA b) @@ -744,7 +737,7 @@ pattern GRE{gre_name, gre_par, gre_lcl, gre_imp} <- RdrName.GRE #endif ,gre_par, gre_lcl, gre_imp = (toList -> gre_imp)} -collectHsBindsBinders :: CollectPass p => Bag (XRec p (HsBindLR p idR)) -> [IdP p] +collectHsBindsBinders :: CollectPass p => LHsBindsLR p idR -> [IdP p] collectHsBindsBinders x = GHC.collectHsBindsBinders CollNoDictBinders x @@ -752,54 +745,28 @@ collectHsBindsBinders x = GHC.collectHsBindsBinders CollNoDictBinders x makeSimpleDetails :: HscEnv -> TcGblEnv -> IO ModDetails makeSimpleDetails hsc_env = GHC.makeSimpleDetails -#if MIN_VERSION_ghc(9,3,0) (hsc_logger hsc_env) -#else - hsc_env -#endif mkIfaceTc :: HscEnv -> GHC.SafeHaskellMode -> ModDetails -> ModSummary -> Maybe CoreProgram -> TcGblEnv -> IO ModIface mkIfaceTc hscEnv shm md _ms _mcp = #if MIN_VERSION_ghc(9,5,0) GHC.mkIfaceTc hscEnv shm md _ms _mcp -- mcp::Maybe CoreProgram is only used in GHC >= 9.6 -#elif MIN_VERSION_ghc(9,3,0) - GHC.mkIfaceTc hscEnv shm md _ms -- ms::ModSummary is only used in GHC >= 9.4 #else - GHC.mkIfaceTc hscEnv shm md + GHC.mkIfaceTc hscEnv shm md _ms -- ms::ModSummary is only used in GHC >= 9.4 #endif mkBootModDetailsTc :: HscEnv -> TcGblEnv -> IO ModDetails mkBootModDetailsTc session = GHC.mkBootModDetailsTc -#if MIN_VERSION_ghc(9,3,0) (hsc_logger session) -#else - session -#endif -#if !MIN_VERSION_ghc(9,3,0) -type TidyOpts = HscEnv -#endif initTidyOpts :: HscEnv -> IO TidyOpts initTidyOpts = -#if MIN_VERSION_ghc(9,3,0) GHC.initTidyOpts -#else - pure -#endif -#if MIN_VERSION_ghc(9,3,0) driverNoStop :: StopPhase driverNoStop = NoStop -#else -driverNoStop :: Phase -driverNoStop = StopLn -#endif -#if !MIN_VERSION_ghc(9,3,0) -hscUpdateHPT :: (HomePackageTable -> HomePackageTable) -> HscEnv -> HscEnv -hscUpdateHPT k session = session { hsc_HPT = k (hsc_HPT session) } -#endif #if !MIN_VERSION_ghc(9,4,0) pattern HsFieldBind :: XHsRecField id -> id -> arg -> Bool -> HsRecField' id arg @@ -846,11 +813,7 @@ field_label = id #endif mkSimpleTarget :: DynFlags -> FilePath -> Target -#if MIN_VERSION_ghc(9,3,0) mkSimpleTarget df fp = Target (TargetFile fp Nothing) True (homeUnitId_ df) Nothing -#else -mkSimpleTarget _ fp = Target (TargetFile fp Nothing) True Nothing -#endif #if MIN_VERSION_ghc(9,7,0) lookupGlobalRdrEnv gre_env occ = lookupGRE gre_env (LookupOccName occ AllRelevantGREs) diff --git a/ghcide/src/Development/IDE/GHC/Compat/Driver.hs b/ghcide/src/Development/IDE/GHC/Compat/Driver.hs new file mode 100644 index 0000000000..3ad063936e --- /dev/null +++ b/ghcide/src/Development/IDE/GHC/Compat/Driver.hs @@ -0,0 +1,155 @@ +-- ============================================================================ +-- DO NOT EDIT +-- This module copies parts of the driver code in GHC.Driver.Main to provide +-- `hscTypecheckRenameWithDiagnostics`. +-- Issue to add this function: https://gitlab.haskell.org/ghc/ghc/-/issues/24996 +-- MR to add this function: https://gitlab.haskell.org/ghc/ghc/-/merge_requests/12891 +-- ============================================================================ + +{-# LANGUAGE CPP #-} + +module Development.IDE.GHC.Compat.Driver + ( hscTypecheckRenameWithDiagnostics + ) where + +#if MIN_VERSION_ghc(9,11,0) + +import GHC.Driver.Main (hscTypecheckRenameWithDiagnostics) + +#else + +import Control.Monad +import GHC.Core +import GHC.Data.FastString +import GHC.Data.Maybe +import GHC.Driver.Env +import GHC.Driver.Errors.Types +import GHC.Driver.Main +import GHC.Driver.Session +import GHC.Hs +import GHC.Hs.Dump +import GHC.Iface.Ext.Ast (mkHieFile) +import GHC.Iface.Ext.Binary (hie_file_result, readHieFile, + writeHieFile) +import GHC.Iface.Ext.Debug (diffFile, validateScopes) +import GHC.Iface.Ext.Types (getAsts, hie_asts, hie_module) +import GHC.Tc.Module +import GHC.Tc.Utils.Monad +import GHC.Types.SourceFile +import GHC.Types.SrcLoc +import GHC.Unit +import GHC.Unit.Module.ModDetails +import GHC.Unit.Module.ModIface +import GHC.Unit.Module.ModSummary +import GHC.Utils.Error +import GHC.Utils.Logger +import GHC.Utils.Outputable +import GHC.Utils.Panic.Plain + +hscTypecheckRenameWithDiagnostics :: HscEnv -> ModSummary -> HsParsedModule + -> IO ((TcGblEnv, RenamedStuff), Messages GhcMessage) +hscTypecheckRenameWithDiagnostics hsc_env mod_summary rdr_module = + runHsc' hsc_env $ hsc_typecheck True mod_summary (Just rdr_module) + +-- ============================================================================ +-- DO NOT EDIT - Refer to top of file +-- ============================================================================ +hsc_typecheck :: Bool -- ^ Keep renamed source? + -> ModSummary -> Maybe HsParsedModule + -> Hsc (TcGblEnv, RenamedStuff) +hsc_typecheck keep_rn mod_summary mb_rdr_module = do + hsc_env <- getHscEnv + let hsc_src = ms_hsc_src mod_summary + dflags = hsc_dflags hsc_env + home_unit = hsc_home_unit hsc_env + outer_mod = ms_mod mod_summary + mod_name = moduleName outer_mod + outer_mod' = mkHomeModule home_unit mod_name + inner_mod = homeModuleNameInstantiation home_unit mod_name + src_filename = ms_hspp_file mod_summary + real_loc = realSrcLocSpan $ mkRealSrcLoc (mkFastString src_filename) 1 1 + keep_rn' = gopt Opt_WriteHie dflags || keep_rn + massert (isHomeModule home_unit outer_mod) + tc_result <- if hsc_src == HsigFile && not (isHoleModule inner_mod) + then ioMsgMaybe $ hoistTcRnMessage $ tcRnInstantiateSignature hsc_env outer_mod' real_loc + else + do hpm <- case mb_rdr_module of + Just hpm -> return hpm + Nothing -> hscParse' mod_summary + tc_result0 <- tcRnModule' mod_summary keep_rn' hpm + if hsc_src == HsigFile + then +#if MIN_VERSION_ghc(9,5,0) + do (iface, _) <- liftIO $ hscSimpleIface hsc_env Nothing tc_result0 mod_summary +#else + do (iface, _) <- liftIO $ hscSimpleIface hsc_env tc_result0 mod_summary +#endif + ioMsgMaybe $ hoistTcRnMessage $ + tcRnMergeSignatures hsc_env hpm tc_result0 iface + else return tc_result0 + rn_info <- extract_renamed_stuff mod_summary tc_result + return (tc_result, rn_info) + +-- ============================================================================ +-- DO NOT EDIT - Refer to top of file +-- ============================================================================ +extract_renamed_stuff :: ModSummary -> TcGblEnv -> Hsc RenamedStuff +extract_renamed_stuff mod_summary tc_result = do + let rn_info = getRenamedStuff tc_result + + dflags <- getDynFlags + logger <- getLogger + liftIO $ putDumpFileMaybe logger Opt_D_dump_rn_ast "Renamer" + FormatHaskell (showAstData NoBlankSrcSpan NoBlankEpAnnotations rn_info) + + -- Create HIE files + when (gopt Opt_WriteHie dflags) $ do + -- I assume this fromJust is safe because `-fwrite-hie-file` + -- enables the option which keeps the renamed source. + hieFile <- mkHieFile mod_summary tc_result (fromJust rn_info) + let out_file = ml_hie_file $ ms_location mod_summary + liftIO $ writeHieFile out_file hieFile + liftIO $ putDumpFileMaybe logger Opt_D_dump_hie "HIE AST" FormatHaskell (ppr $ hie_asts hieFile) + + -- Validate HIE files + when (gopt Opt_ValidateHie dflags) $ do + hs_env <- Hsc $ \e w -> return (e, w) + liftIO $ do + -- Validate Scopes + case validateScopes (hie_module hieFile) $ getAsts $ hie_asts hieFile of + [] -> putMsg logger $ text "Got valid scopes" + xs -> do + putMsg logger $ text "Got invalid scopes" + mapM_ (putMsg logger) xs + -- Roundtrip testing + file' <- readHieFile (hsc_NC hs_env) out_file + case diffFile hieFile (hie_file_result file') of + [] -> + putMsg logger $ text "Got no roundtrip errors" + xs -> do + putMsg logger $ text "Got roundtrip errors" + let logger' = updateLogFlags logger (log_set_dopt Opt_D_ppr_debug) + mapM_ (putMsg logger') xs + return rn_info + +-- ============================================================================ +-- DO NOT EDIT - Refer to top of file +-- ============================================================================ +#if MIN_VERSION_ghc(9,5,0) +hscSimpleIface :: HscEnv + -> Maybe CoreProgram + -> TcGblEnv + -> ModSummary + -> IO (ModIface, ModDetails) +hscSimpleIface hsc_env mb_core_program tc_result summary + = runHsc hsc_env $ hscSimpleIface' mb_core_program tc_result summary +#else +hscSimpleIface :: HscEnv + -> TcGblEnv + -> ModSummary + -> IO (ModIface, ModDetails) +hscSimpleIface hsc_env tc_result summary + = runHsc hsc_env $ hscSimpleIface' tc_result summary +#endif + +#endif diff --git a/ghcide/src/Development/IDE/GHC/Compat/Env.hs b/ghcide/src/Development/IDE/GHC/Compat/Env.hs index bc963e2104..988739e3b8 100644 --- a/ghcide/src/Development/IDE/GHC/Compat/Env.hs +++ b/ghcide/src/Development/IDE/GHC/Compat/Env.hs @@ -4,11 +4,7 @@ -- 'UnitEnv' and some DynFlags compat functions. module Development.IDE.GHC.Compat.Env ( Env.HscEnv(hsc_FC, hsc_NC, hsc_IC, hsc_mod_graph -#if MIN_VERSION_ghc(9,3,0) , hsc_type_env_vars -#else - , hsc_type_env_var -#endif ), Env.hsc_HPT, InteractiveContext(..), @@ -19,9 +15,6 @@ module Development.IDE.GHC.Compat.Env ( Env.hsc_logger, Env.hsc_tmpfs, Env.hsc_unit_env, -#if !MIN_VERSION_ghc(9,3,0) - Env.hsc_unit_dbs, -#endif Env.hsc_hooks, hscSetHooks, TmpFs, @@ -64,6 +57,7 @@ module Development.IDE.GHC.Compat.Env ( import GHC (setInteractiveDynFlags) import GHC.Driver.Backend as Backend +import GHC.Driver.Env (HscEnv, hscSetActiveUnitId) import qualified GHC.Driver.Env as Env import GHC.Driver.Hooks (Hooks) import GHC.Driver.Session @@ -76,41 +70,12 @@ import GHC.Unit.Types (UnitId) import GHC.Utils.Logger import GHC.Utils.TmpFs --- See Note [Guidelines For Using CPP In GHCIDE Import Statements] - -#if !MIN_VERSION_ghc(9,3,0) -import qualified Data.Set as S -import GHC.Driver.Env (HscEnv, hsc_EPS) -#endif - -#if MIN_VERSION_ghc(9,3,0) -import GHC.Driver.Env (HscEnv, hscSetActiveUnitId) -#endif - - -#if !MIN_VERSION_ghc(9,3,0) -hscSetActiveUnitId :: UnitId -> HscEnv -> HscEnv -hscSetActiveUnitId _ env = env -reexportedModules :: HscEnv -> S.Set a -reexportedModules _ = S.empty -#endif - -#if MIN_VERSION_ghc(9,3,0) hsc_EPS :: HscEnv -> UnitEnv hsc_EPS = Env.hsc_unit_env -#endif -#if !MIN_VERSION_ghc(9,3,0) -workingDirectory :: a -> Maybe b -workingDirectory _ = Nothing - -setWorkingDirectory :: FilePath -> DynFlags -> DynFlags -setWorkingDirectory = const id -#else setWorkingDirectory :: FilePath -> DynFlags -> DynFlags setWorkingDirectory p d = d { workingDirectory = Just p } -#endif setHomeUnitId_ :: UnitId -> DynFlags -> DynFlags setHomeUnitId_ uid df = df { Session.homeUnitId_ = uid } diff --git a/ghcide/src/Development/IDE/GHC/Compat/Error.hs b/ghcide/src/Development/IDE/GHC/Compat/Error.hs new file mode 100644 index 0000000000..06b6a9876b --- /dev/null +++ b/ghcide/src/Development/IDE/GHC/Compat/Error.hs @@ -0,0 +1,68 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE TemplateHaskell #-} +module Development.IDE.GHC.Compat.Error ( + -- * Top-level error types and lens for easy access + MsgEnvelope(..), + msgEnvelopeErrorL, + GhcMessage(..), + -- * Error messages for the typechecking and renamer phase + TcRnMessage (..), + TcRnMessageDetailed (..), + stripTcRnMessageContext, + -- * Parsing error message + PsMessage(..), + -- * Desugaring diagnostic + DsMessage (..), + -- * Driver error message + DriverMessage (..), + -- * General Diagnostics + Diagnostic(..), + -- * Prisms for error selection + _TcRnMessage, + _GhcPsMessage, + _GhcDsMessage, + _GhcDriverMessage, + ) where + +import Control.Lens +import GHC.Driver.Errors.Types +import GHC.HsToCore.Errors.Types +import GHC.Tc.Errors.Types +import GHC.Types.Error + +_TcRnMessage :: Prism' GhcMessage TcRnMessage +_TcRnMessage = prism' GhcTcRnMessage (\case + GhcTcRnMessage tcRnMsg -> Just tcRnMsg + _ -> Nothing) + +_GhcPsMessage :: Prism' GhcMessage PsMessage +_GhcPsMessage = prism' GhcPsMessage (\case + GhcPsMessage psMsg -> Just psMsg + _ -> Nothing) + +_GhcDsMessage :: Prism' GhcMessage DsMessage +_GhcDsMessage = prism' GhcDsMessage (\case + GhcDsMessage dsMsg -> Just dsMsg + _ -> Nothing) + +_GhcDriverMessage :: Prism' GhcMessage DriverMessage +_GhcDriverMessage = prism' GhcDriverMessage (\case + GhcDriverMessage driverMsg -> Just driverMsg + _ -> Nothing) + +-- | Some 'TcRnMessage's are nested in other constructors for additional context. +-- For example, 'TcRnWithHsDocContext' and 'TcRnMessageWithInfo'. +-- However, in some occasions you don't need the additional context and you just want +-- the error message. @'stripTcRnMessageContext'@ recursively unwraps these constructors, +-- until there are no more constructors with additional context. +-- +stripTcRnMessageContext :: TcRnMessage -> TcRnMessage +stripTcRnMessageContext = \case +#if MIN_VERSION_ghc(9, 6, 1) + TcRnWithHsDocContext _ tcMsg -> stripTcRnMessageContext tcMsg +#endif + TcRnMessageWithInfo _ (TcRnMessageDetailed _ tcMsg) -> stripTcRnMessageContext tcMsg + msg -> msg + +msgEnvelopeErrorL :: Lens' (MsgEnvelope e) e +msgEnvelopeErrorL = lens errMsgDiagnostic (\envelope e -> envelope { errMsgDiagnostic = e } ) diff --git a/ghcide/src/Development/IDE/GHC/Compat/Iface.hs b/ghcide/src/Development/IDE/GHC/Compat/Iface.hs index 7a5fc10029..0a16f676e7 100644 --- a/ghcide/src/Development/IDE/GHC/Compat/Iface.hs +++ b/ghcide/src/Development/IDE/GHC/Compat/Iface.hs @@ -9,25 +9,22 @@ module Development.IDE.GHC.Compat.Iface ( import Development.IDE.GHC.Compat.Env import Development.IDE.GHC.Compat.Outputable import GHC +import GHC.Driver.Session (targetProfile) import qualified GHC.Iface.Load as Iface import GHC.Unit.Finder.Types (FindResult) -- See Note [Guidelines For Using CPP In GHCIDE Import Statements] -#if MIN_VERSION_ghc(9,3,0) -import GHC.Driver.Session (targetProfile) -#endif - #if MIN_VERSION_ghc(9,7,0) import GHC.Iface.Errors.Ppr (missingInterfaceErrorDiagnostic) import GHC.Iface.Errors.Types (IfaceMessage) #endif writeIfaceFile :: HscEnv -> FilePath -> ModIface -> IO () -#if MIN_VERSION_ghc(9,3,0) +#if MIN_VERSION_ghc(9,11,0) +writeIfaceFile env fp iface = Iface.writeIface (hsc_logger env) (targetProfile $ hsc_dflags env) (Iface.flagsToIfCompression $ hsc_dflags env) fp iface +#elif MIN_VERSION_ghc(9,3,0) writeIfaceFile env fp iface = Iface.writeIface (hsc_logger env) (targetProfile $ hsc_dflags env) fp iface -#else -writeIfaceFile env fp iface = Iface.writeIface (hsc_logger env) (hsc_dflags env) fp iface #endif cannotFindModule :: HscEnv -> ModuleName -> FindResult -> SDoc diff --git a/ghcide/src/Development/IDE/GHC/Compat/Logger.hs b/ghcide/src/Development/IDE/GHC/Compat/Logger.hs index 24922069ec..32ec11da4c 100644 --- a/ghcide/src/Development/IDE/GHC/Compat/Logger.hs +++ b/ghcide/src/Development/IDE/GHC/Compat/Logger.hs @@ -14,20 +14,14 @@ import Development.IDE.GHC.Compat.Env as Env import Development.IDE.GHC.Compat.Outputable +import GHC.Types.Error import GHC.Utils.Logger as Logger import GHC.Utils.Outputable --- See Note [Guidelines For Using CPP In GHCIDE Import Statements] - -#if MIN_VERSION_ghc(9,3,0) -import GHC.Types.Error -#endif - putLogHook :: Logger -> HscEnv -> HscEnv putLogHook logger env = env { hsc_logger = logger } -#if MIN_VERSION_ghc(9,3,0) type LogActionCompat = LogFlags -> Maybe DiagnosticReason -> Maybe Severity -> SrcSpan -> PrintUnqualified -> SDoc -> IO () -- alwaysQualify seems to still do the right thing here, according to the "unqualified warnings" test. @@ -41,11 +35,3 @@ logActionCompat logAction logFlags (MCDiagnostic severity wr) loc = logAction lo #endif logActionCompat logAction logFlags _cls loc = logAction logFlags Nothing Nothing loc alwaysQualify -#else -type LogActionCompat = DynFlags -> WarnReason -> Severity -> SrcSpan -> PrintUnqualified -> SDoc -> IO () - --- alwaysQualify seems to still do the right thing here, according to the "unqualified warnings" test. -logActionCompat :: LogActionCompat -> LogAction -logActionCompat logAction dynFlags wr severity loc = logAction dynFlags wr severity loc alwaysQualify - -#endif diff --git a/ghcide/src/Development/IDE/GHC/Compat/Outputable.hs b/ghcide/src/Development/IDE/GHC/Compat/Outputable.hs index c751f7ae0b..d1053ebffc 100644 --- a/ghcide/src/Development/IDE/GHC/Compat/Outputable.hs +++ b/ghcide/src/Development/IDE/GHC/Compat/Outputable.hs @@ -24,7 +24,6 @@ module Development.IDE.GHC.Compat.Outputable ( initDiagOpts, pprMessages, #endif -#if MIN_VERSION_ghc(9,3,0) DiagnosticReason(..), renderDiagnosticMessageWithHints, pprMsgEnvelopeBagWithLoc, @@ -34,10 +33,6 @@ module Development.IDE.GHC.Compat.Outputable ( errMsgDiagnostic, unDecorated, diagnosticMessage, -#else - pprWarning, - pprError, -#endif -- * Error infrastructure DecoratedSDoc, MsgEnvelope, @@ -53,33 +48,24 @@ module Development.IDE.GHC.Compat.Outputable ( textDoc, ) where +import Data.Maybe +import GHC.Driver.Config.Diagnostic import GHC.Driver.Env import GHC.Driver.Ppr import GHC.Driver.Session +import GHC.Parser.Errors.Types import qualified GHC.Types.Error as Error import GHC.Types.Name.Ppr import GHC.Types.Name.Reader import GHC.Types.SourceError import GHC.Types.SrcLoc import GHC.Unit.State +import GHC.Utils.Error import GHC.Utils.Outputable as Out import GHC.Utils.Panic -- See Note [Guidelines For Using CPP In GHCIDE Import Statements] -#if !MIN_VERSION_ghc(9,3,0) -import GHC.Parser.Errors -import qualified GHC.Parser.Errors.Ppr as Ppr -import GHC.Utils.Error hiding (mkWarnMsg) -#endif - -#if MIN_VERSION_ghc(9,3,0) -import Data.Maybe -import GHC.Driver.Config.Diagnostic -import GHC.Parser.Errors.Types -import GHC.Utils.Error -#endif - #if MIN_VERSION_ghc(9,5,0) import GHC.Driver.Errors.Types (DriverMessage, GhcMessage) #endif @@ -114,41 +100,29 @@ printSDocQualifiedUnsafe unqual doc = doc' = pprWithUnitState emptyUnitState doc -#if !MIN_VERSION_ghc(9,3,0) -pprWarning :: PsWarning -> MsgEnvelope DecoratedSDoc -pprWarning = - Ppr.pprWarning - -pprError :: PsError -> MsgEnvelope DecoratedSDoc -pprError = - Ppr.pprError -#endif formatErrorWithQual :: DynFlags -> MsgEnvelope DecoratedSDoc -> String formatErrorWithQual dflags e = showSDoc dflags (pprNoLocMsgEnvelope e) -#if MIN_VERSION_ghc(9,3,0) pprNoLocMsgEnvelope :: MsgEnvelope DecoratedSDoc -> SDoc -#else -pprNoLocMsgEnvelope :: Error.RenderableDiagnostic e => MsgEnvelope e -> SDoc -#endif pprNoLocMsgEnvelope (MsgEnvelope { errMsgDiagnostic = e , errMsgContext = unqual }) = sdocWithContext $ \_ctx -> withErrStyle unqual $ #if MIN_VERSION_ghc(9,7,0) formatBulleted e -#elif MIN_VERSION_ghc(9,3,0) - formatBulleted _ctx $ e #else - formatBulleted _ctx $ Error.renderDiagnostic e + formatBulleted _ctx e #endif +#if MIN_VERSION_ghc(9,5,0) +type ErrMsg = MsgEnvelope GhcMessage +type WarnMsg = MsgEnvelope GhcMessage +#else type ErrMsg = MsgEnvelope DecoratedSDoc -#if MIN_VERSION_ghc(9,3,0) type WarnMsg = MsgEnvelope DecoratedSDoc #endif @@ -165,7 +139,6 @@ mkPrintUnqualifiedDefault env = mkPrintUnqualified (hsc_unit_env env) #endif -#if MIN_VERSION_ghc(9,3,0) renderDiagnosticMessageWithHints :: forall a. Diagnostic a => a -> DecoratedSDoc renderDiagnosticMessageWithHints a = Error.unionDecoratedSDoc (diagnosticMessage @@ -173,16 +146,9 @@ renderDiagnosticMessageWithHints a = Error.unionDecoratedSDoc (defaultDiagnosticOpts @a) #endif a) (mkDecorated $ map ppr $ diagnosticHints a) -#endif -#if MIN_VERSION_ghc(9,3,0) mkWarnMsg :: DynFlags -> Maybe DiagnosticReason -> b -> SrcSpan -> PrintUnqualified -> SDoc -> MsgEnvelope DecoratedSDoc mkWarnMsg df reason _logFlags l st doc = fmap renderDiagnosticMessageWithHints $ mkMsgEnvelope (initDiagOpts df) l st (mkPlainDiagnostic (fromMaybe WarningWithoutFlag reason) [] doc) -#else -mkWarnMsg :: a -> b -> DynFlags -> SrcSpan -> PrintUnqualified -> SDoc -> MsgEnvelope DecoratedSDoc -mkWarnMsg _ _ = - const Error.mkWarnMsg -#endif textDoc :: String -> SDoc textDoc = text diff --git a/ghcide/src/Development/IDE/GHC/Compat/Parser.hs b/ghcide/src/Development/IDE/GHC/Compat/Parser.hs index 0dc40673bc..7ae9c2bab9 100644 --- a/ghcide/src/Development/IDE/GHC/Compat/Parser.hs +++ b/ghcide/src/Development/IDE/GHC/Compat/Parser.hs @@ -5,22 +5,20 @@ module Development.IDE.GHC.Compat.Parser ( initParserOpts, initParserState, - ApiAnns, PsSpan(..), pattern HsParsedModule, type GHC.HsParsedModule, Development.IDE.GHC.Compat.Parser.hpm_module, Development.IDE.GHC.Compat.Parser.hpm_src_files, - Development.IDE.GHC.Compat.Parser.hpm_annotations, pattern ParsedModule, Development.IDE.GHC.Compat.Parser.pm_parsed_source, type GHC.ParsedModule, Development.IDE.GHC.Compat.Parser.pm_mod_summary, Development.IDE.GHC.Compat.Parser.pm_extra_src_files, - Development.IDE.GHC.Compat.Parser.pm_annotations, - mkApiAnns, -- * API Annotations +#if !MIN_VERSION_ghc(9,11,0) Anno.AnnKeywordId(..), +#endif pattern EpaLineComment, pattern EpaBlockComment ) where @@ -38,17 +36,8 @@ import GHC (EpaCommentTok (..), pm_mod_summary, pm_parsed_source) import qualified GHC -import GHC.Hs (hpm_module, hpm_src_files) - --- See Note [Guidelines For Using CPP In GHCIDE Import Statements] - -#if !MIN_VERSION_ghc(9,3,0) -import qualified GHC.Driver.Config as Config -#endif - -#if MIN_VERSION_ghc(9,3,0) import qualified GHC.Driver.Config.Parser as Config -#endif +import GHC.Hs (hpm_module, hpm_src_files) @@ -60,34 +49,28 @@ initParserState :: ParserOpts -> StringBuffer -> RealSrcLoc -> PState initParserState = Lexer.initParserState --- GHC 9.2 does not have ApiAnns anymore packaged in ParsedModule. Now the --- annotations are found in the ast. -type ApiAnns = () - #if MIN_VERSION_ghc(9,5,0) -pattern HsParsedModule :: Located (HsModule GhcPs) -> [FilePath] -> ApiAnns -> GHC.HsParsedModule +pattern HsParsedModule :: Located (HsModule GhcPs) -> [FilePath] -> GHC.HsParsedModule #else -pattern HsParsedModule :: Located HsModule -> [FilePath] -> ApiAnns -> GHC.HsParsedModule +pattern HsParsedModule :: Located HsModule -> [FilePath] -> GHC.HsParsedModule #endif pattern HsParsedModule { hpm_module , hpm_src_files - , hpm_annotations - } <- ( (,()) -> (GHC.HsParsedModule{..}, hpm_annotations)) + } <- GHC.HsParsedModule{..} where - HsParsedModule hpm_module hpm_src_files _hpm_annotations = + HsParsedModule hpm_module hpm_src_files = GHC.HsParsedModule hpm_module hpm_src_files -pattern ParsedModule :: ModSummary -> ParsedSource -> [FilePath] -> ApiAnns -> GHC.ParsedModule +pattern ParsedModule :: ModSummary -> ParsedSource -> [FilePath] -> GHC.ParsedModule pattern ParsedModule { pm_mod_summary , pm_parsed_source , pm_extra_src_files - , pm_annotations - } <- ( (,()) -> (GHC.ParsedModule{..}, pm_annotations)) + } <- GHC.ParsedModule{..} where - ParsedModule ms parsed extra_src_files _anns = + ParsedModule ms parsed extra_src_files = GHC.ParsedModule { pm_mod_summary = ms , pm_parsed_source = parsed @@ -95,6 +78,4 @@ pattern ParsedModule } {-# COMPLETE ParsedModule :: GHC.ParsedModule #-} -mkApiAnns :: PState -> ApiAnns -mkApiAnns = const () diff --git a/ghcide/src/Development/IDE/GHC/Compat/Plugins.hs b/ghcide/src/Development/IDE/GHC/Compat/Plugins.hs index c8c96b1e1f..35bf48374b 100644 --- a/ghcide/src/Development/IDE/GHC/Compat/Plugins.hs +++ b/ghcide/src/Development/IDE/GHC/Compat/Plugins.hs @@ -7,8 +7,6 @@ module Development.IDE.GHC.Compat.Plugins ( defaultPlugin, PluginWithArgs(..), applyPluginsParsedResultAction, - initializePlugins, - initPlugins, -- * Static plugins StaticPlugin(..), @@ -20,79 +18,32 @@ module Development.IDE.GHC.Compat.Plugins ( ) where import Development.IDE.GHC.Compat.Core -import Development.IDE.GHC.Compat.Env (hscSetFlags, hsc_dflags) -import Development.IDE.GHC.Compat.Parser as Parser +import Development.IDE.GHC.Compat.Parser as Parser -import qualified GHC.Driver.Env as Env -import GHC.Driver.Plugins (Plugin (..), - PluginWithArgs (..), - StaticPlugin (..), - defaultPlugin, - withPlugins) -import qualified GHC.Runtime.Loader as Loader +import qualified GHC.Driver.Env as Env +import GHC.Driver.Plugins (ParsedResult (..), + Plugin (..), + PluginWithArgs (..), + PsMessages (..), + StaticPlugin (..), + defaultPlugin, + staticPlugins, withPlugins) +import qualified GHC.Parser.Lexer as Lexer --- See Note [Guidelines For Using CPP In GHCIDE Import Statements] - -#if !MIN_VERSION_ghc(9,3,0) -import Data.Bifunctor (bimap) -import Development.IDE.GHC.Compat.Outputable as Out -import Development.IDE.GHC.Compat.Util (Bag) -#endif - -#if MIN_VERSION_ghc(9,3,0) -import GHC.Driver.Plugins (ParsedResult (..), - PsMessages (..), - staticPlugins) -import qualified GHC.Parser.Lexer as Lexer -#endif - -#if !MIN_VERSION_ghc(9,3,0) -type PsMessages = (Bag WarnMsg, Bag ErrMsg) -#endif getPsMessages :: PState -> PsMessages getPsMessages pst = -#if MIN_VERSION_ghc(9,3,0) uncurry PsMessages $ Lexer.getPsMessages pst -#else - bimap (fmap pprWarning) (fmap pprError) $ getMessages pst -#endif -applyPluginsParsedResultAction :: HscEnv -> ModSummary -> Parser.ApiAnns -> ParsedSource -> PsMessages -> IO (ParsedSource, PsMessages) -applyPluginsParsedResultAction env ms hpm_annotations parsed msgs = do +applyPluginsParsedResultAction :: HscEnv -> ModSummary -> ParsedSource -> PsMessages -> IO (ParsedSource, PsMessages) +applyPluginsParsedResultAction env ms parsed msgs = do -- Apply parsedResultAction of plugins let applyPluginAction p opts = parsedResultAction p opts ms -#if MIN_VERSION_ghc(9,3,0) - fmap (\result -> (hpm_module (parsedResultModule result), (parsedResultMessages result))) $ runHsc env $ withPlugins -#else - fmap (\parsed_module -> (hpm_module parsed_module, msgs)) $ runHsc env $ withPlugins -#endif -#if MIN_VERSION_ghc(9,3,0) + fmap (\result -> (hpm_module (parsedResultModule result), parsedResultMessages result)) $ runHsc env $ withPlugins (Env.hsc_plugins env) -#else - env -#endif applyPluginAction -#if MIN_VERSION_ghc(9,3,0) - (ParsedResult (HsParsedModule parsed [] hpm_annotations) msgs) -#else - (HsParsedModule parsed [] hpm_annotations) -#endif - -initializePlugins :: HscEnv -> IO HscEnv -initializePlugins env = do - Loader.initializePlugins env + (ParsedResult (HsParsedModule parsed []) msgs) --- | Plugins aren't stored in ModSummary anymore since GHC 9.2, but this --- function still returns it for compatibility with 8.10 -initPlugins :: HscEnv -> ModSummary -> IO (ModSummary, HscEnv) -initPlugins session modSummary = do - session1 <- initializePlugins (hscSetFlags (ms_hspp_opts modSummary) session) - return (modSummary{ms_hspp_opts = hsc_dflags session1}, session1) hsc_static_plugins :: HscEnv -> [StaticPlugin] -#if MIN_VERSION_ghc(9,3,0) hsc_static_plugins = staticPlugins . Env.hsc_plugins -#else -hsc_static_plugins = Env.hsc_static_plugins -#endif diff --git a/ghcide/src/Development/IDE/GHC/Compat/Units.hs b/ghcide/src/Development/IDE/GHC/Compat/Units.hs index 0456e3135a..f7f634e448 100644 --- a/ghcide/src/Development/IDE/GHC/Compat/Units.hs +++ b/ghcide/src/Development/IDE/GHC/Compat/Units.hs @@ -5,7 +5,6 @@ module Development.IDE.GHC.Compat.Units ( -- * UnitState UnitState, initUnits, - oldInitUnits, unitState, getUnitName, explicitUnits, @@ -53,10 +52,17 @@ import Development.IDE.GHC.Compat.Env import Development.IDE.GHC.Compat.Outputable import Prelude hiding (mod) +import Control.Monad +import qualified Data.List.NonEmpty as NE +import qualified Data.Map.Strict as Map +import qualified GHC import qualified GHC.Data.ShortText as ST +import qualified GHC.Driver.Session as DynFlags +import GHC.Types.PkgQual (PkgQual (NoPkgQual)) import GHC.Types.Unique.Set import GHC.Unit.External import qualified GHC.Unit.Finder as GHC +import GHC.Unit.Home.ModInfo import qualified GHC.Unit.Info as UnitInfo import GHC.Unit.State (LookupResult, UnitInfo, UnitInfoMap, @@ -69,40 +75,19 @@ import GHC.Unit.State (LookupResult, UnitInfo, import qualified GHC.Unit.State as State import GHC.Unit.Types --- See Note [Guidelines For Using CPP In GHCIDE Import Statements] - -#if !MIN_VERSION_ghc(9,3,0) -import GHC.Data.FastString -import GHC.Unit.Env -import GHC.Unit.Finder hiding - (findImportedModule) -import qualified GHC.Unit.Types as Unit -#endif - -#if MIN_VERSION_ghc(9,3,0) -import Control.Monad -import qualified Data.List.NonEmpty as NE -import qualified Data.Map.Strict as Map -import qualified GHC -import qualified GHC.Driver.Session as DynFlags -import GHC.Types.PkgQual (PkgQual (NoPkgQual)) -import GHC.Unit.Home.ModInfo -#endif - type PreloadUnitClosure = UniqSet UnitId unitState :: HscEnv -> UnitState unitState = ue_units . hsc_unit_env -#if MIN_VERSION_ghc(9,3,0) createUnitEnvFromFlags :: NE.NonEmpty DynFlags -> HomeUnitGraph createUnitEnvFromFlags unitDflags = let newInternalUnitEnv dflags = mkHomeUnitEnv dflags emptyHomePackageTable Nothing unitEnvList = NE.map (\dflags -> (homeUnitId_ dflags, newInternalUnitEnv dflags)) unitDflags in - unitEnv_new (Map.fromList (NE.toList (unitEnvList))) + unitEnv_new (Map.fromList (NE.toList unitEnvList)) initUnits :: [DynFlags] -> HscEnv -> IO HscEnv initUnits unitDflags env = do @@ -135,25 +120,11 @@ initUnits unitDflags env = do , ue_eps = ue_eps (hsc_unit_env env) } pure $ hscSetFlags dflags1 $ hscSetUnitEnv unit_env env -#else -initUnits :: [DynFlags] -> HscEnv -> IO HscEnv -initUnits _df env = pure env -- Can't do anything here, oldInitUnits should already be called -#endif - --- | oldInitUnits only needs to modify DynFlags for GHC <9.2 --- For GHC >= 9.2, we need to set the hsc_unit_env also, that is --- done later by initUnits -oldInitUnits :: DynFlags -> IO DynFlags -oldInitUnits = pure explicitUnits :: UnitState -> [Unit] explicitUnits ue = -#if MIN_VERSION_ghc(9,3,0) map fst $ State.explicitUnits ue -#else - State.explicitUnits ue -#endif listVisibleModuleNames :: HscEnv -> [ModuleName] listVisibleModuleNames env = @@ -166,11 +137,7 @@ getUnitName env i = lookupModuleWithSuggestions :: HscEnv -> ModuleName -#if MIN_VERSION_ghc(9,3,0) -> GHC.PkgQual -#else - -> Maybe FastString -#endif -> LookupResult lookupModuleWithSuggestions env modname mpkg = State.lookupModuleWithSuggestions (unitState env) modname mpkg @@ -204,10 +171,6 @@ defUnitId = Definite installedModule :: unit -> ModuleName -> GenModule unit installedModule = Module -#if !MIN_VERSION_ghc(9,3,0) -moduleUnitId :: Module -> UnitId -moduleUnitId = Unit.toUnitId . Unit.moduleUnit -#endif filterInplaceUnits :: [UnitId] -> [PackageFlag] -> ([UnitId], [PackageFlag]) filterInplaceUnits us packageFlags = @@ -225,11 +188,7 @@ showSDocForUser' env = showSDocForUser (hsc_dflags env) (unitState env) findImportedModule :: HscEnv -> ModuleName -> IO (Maybe Module) findImportedModule env mn = do -#if MIN_VERSION_ghc(9,3,0) res <- GHC.findImportedModule env mn NoPkgQual -#else - res <- GHC.findImportedModule env mn Nothing -#endif case res of Found _ mod -> pure . pure $ mod _ -> pure Nothing diff --git a/ghcide/src/Development/IDE/GHC/Compat/Util.hs b/ghcide/src/Development/IDE/GHC/Compat/Util.hs index 2c60c35b15..1f9e3a1609 100644 --- a/ghcide/src/Development/IDE/GHC/Compat/Util.hs +++ b/ghcide/src/Development/IDE/GHC/Compat/Util.hs @@ -68,6 +68,7 @@ module Development.IDE.GHC.Compat.Util ( import Control.Exception.Safe (MonadCatch, catch, try) import GHC.Data.Bag +import GHC.Data.Bool import GHC.Data.BooleanFormula import GHC.Data.EnumSet import GHC.Data.FastString @@ -79,13 +80,3 @@ import GHC.Types.Unique.DFM import GHC.Utils.Fingerprint import GHC.Utils.Outputable (pprHsString) import GHC.Utils.Panic hiding (try) - --- See Note [Guidelines For Using CPP In GHCIDE Import Statements] - -#if !MIN_VERSION_ghc(9,3,0) -import GHC.Utils.Misc -#endif - -#if MIN_VERSION_ghc(9,3,0) -import GHC.Data.Bool -#endif diff --git a/ghcide/src/Development/IDE/GHC/CoreFile.hs b/ghcide/src/Development/IDE/GHC/CoreFile.hs index ec210a1207..015c5e3aff 100644 --- a/ghcide/src/Development/IDE/GHC/CoreFile.hs +++ b/ghcide/src/Development/IDE/GHC/CoreFile.hs @@ -26,6 +26,9 @@ import GHC.CoreToIface import GHC.Fingerprint import GHC.Iface.Binary import GHC.Iface.Env +#if MIN_VERSION_ghc(9,11,0) +import qualified GHC.Iface.Load as Iface +#endif import GHC.Iface.Recomp.Binary (fingerprintBinMem) import GHC.IfaceToCore import GHC.Types.Id.Make @@ -87,14 +90,20 @@ readBinCoreFile name_cache fat_hi_path = do return (file, fp) -- | Write a core file -writeBinCoreFile :: FilePath -> CoreFile -> IO Fingerprint -writeBinCoreFile core_path fat_iface = do +writeBinCoreFile :: DynFlags -> FilePath -> CoreFile -> IO Fingerprint +writeBinCoreFile _dflags core_path fat_iface = do bh <- openBinMem initBinMemSize let quietTrace = QuietBinIFace - putWithUserData quietTrace bh fat_iface + putWithUserData + quietTrace +#if MIN_VERSION_ghc(9,11,0) + (Iface.flagsToIfCompression _dflags) +#endif + bh + fat_iface -- And send the result to the file writeBinMem bh core_path @@ -141,7 +150,11 @@ getClassImplicitBinds cls | (op, val_index) <- classAllSelIds cls `zip` [0..] ] get_defn :: Id -> CoreBind -get_defn identifier = NonRec identifier (unfoldingTemplate (realIdUnfolding identifier)) +get_defn identifier = NonRec identifier templ + where + templ = case maybeUnfoldingTemplate (realIdUnfolding identifier) of + Nothing -> error "get_dfn: no unfolding template" + Just x -> x toIfaceTopBndr1 :: Module -> Id -> IfaceId toIfaceTopBndr1 mod identifier @@ -197,7 +210,7 @@ tcIfaceId = fmap getIfaceId . tcIfaceDecl False <=< unmangle_decl_name name' <- newIfaceName (mkVarOcc $ getOccString name) pure $ ifid{ ifName = name' } | otherwise = pure ifid - unmangle_decl_name _ifid = error $ "tcIfaceId: got non IfaceId: " + unmangle_decl_name _ifid = error "tcIfaceId: got non IfaceId: " -- invariant: 'IfaceId' is always a 'IfaceId' constructor getIfaceId (AnId identifier) = identifier getIfaceId _ = error "tcIfaceId: got non Id" diff --git a/ghcide/src/Development/IDE/GHC/Error.hs b/ghcide/src/Development/IDE/GHC/Error.hs index 16663f8afd..8f919a3bf2 100644 --- a/ghcide/src/Development/IDE/GHC/Error.hs +++ b/ghcide/src/Development/IDE/GHC/Error.hs @@ -1,11 +1,15 @@ -{-# LANGUAGE CPP #-} +{-# LANGUAGE CPP #-} +{-# LANGUAGE DisambiguateRecordFields #-} -- Copyright (c) 2019 The DAML Authors. All rights reserved. -- SPDX-License-Identifier: Apache-2.0 module Development.IDE.GHC.Error ( -- * Producing Diagnostic values - diagFromErrMsgs + diagFromGhcErrorMessages + , diagFromErrMsgs , diagFromErrMsg + , diagFromSDocErrMsgs + , diagFromSDocErrMsg , diagFromString , diagFromStrings , diagFromGhcException @@ -33,10 +37,13 @@ module Development.IDE.GHC.Error , toDSeverity ) where +import Control.Lens import Data.Maybe import Data.String (fromString) import qualified Data.Text as T -import Development.IDE.GHC.Compat (DecoratedSDoc, MsgEnvelope, +import Data.Tuple.Extra (uncurry3) +import Development.IDE.GHC.Compat (GhcMessage, MsgEnvelope, + errMsgDiagnostic, errMsgSeverity, errMsgSpan, formatErrorWithQual, srcErrorMessages) @@ -51,30 +58,51 @@ import Language.LSP.VFS (CodePointPosition (CodePoint CodePointRange (CodePointRange)) -diagFromText :: T.Text -> D.DiagnosticSeverity -> SrcSpan -> T.Text -> FileDiagnostic -diagFromText diagSource sev loc msg = (toNormalizedFilePath' $ fromMaybe noFilePath $ srcSpanToFilename loc,ShowDiag,) - Diagnostic - { _range = fromMaybe noRange $ srcSpanToRange loc - , _severity = Just sev - , _source = Just diagSource -- not shown in the IDE, but useful for ghcide developers - , _message = msg - , _code = Nothing - , _relatedInformation = Nothing - , _tags = Nothing - , _codeDescription = Nothing - , _data_ = Nothing - } +diagFromText :: T.Text -> D.DiagnosticSeverity -> SrcSpan -> T.Text -> Maybe (MsgEnvelope GhcMessage) -> FileDiagnostic +diagFromText diagSource sev loc msg origMsg = + D.ideErrorWithSource + (Just diagSource) (Just sev) + (toNormalizedFilePath' $ fromMaybe noFilePath $ srcSpanToFilename loc) + msg origMsg + & fdLspDiagnosticL %~ \diag -> diag { D._range = fromMaybe noRange $ srcSpanToRange loc } -- | Produce a GHC-style error from a source span and a message. -diagFromErrMsg :: T.Text -> DynFlags -> MsgEnvelope DecoratedSDoc -> [FileDiagnostic] -diagFromErrMsg diagSource dflags e = - [ diagFromText diagSource sev (errMsgSpan e) - $ T.pack $ formatErrorWithQual dflags e - | Just sev <- [toDSeverity $ errMsgSeverity e]] +diagFromErrMsg :: T.Text -> DynFlags -> MsgEnvelope GhcMessage -> [FileDiagnostic] +diagFromErrMsg diagSource dflags origErr = + let err = fmap (\e -> (Compat.renderDiagnosticMessageWithHints e, Just origErr)) origErr + in + diagFromSDocWithOptionalOrigMsg diagSource dflags err + +-- | Compatibility function for creating '[FileDiagnostic]' from +-- a 'Compat.Bag' of GHC error messages. +-- The function signature changes based on the GHC version. +-- While this is not desirable, it avoids more CPP statements in code +-- that implements actual logic. +#if MIN_VERSION_ghc(9,5,0) +diagFromGhcErrorMessages :: T.Text -> DynFlags -> Compat.Bag (MsgEnvelope GhcMessage) -> [FileDiagnostic] +diagFromGhcErrorMessages sourceParser dflags errs = + diagFromErrMsgs sourceParser dflags errs +#else +diagFromGhcErrorMessages :: T.Text -> DynFlags -> Compat.Bag (MsgEnvelope Compat.DecoratedSDoc) -> [FileDiagnostic] +diagFromGhcErrorMessages sourceParser dflags errs = + diagFromSDocErrMsgs sourceParser dflags errs +#endif -diagFromErrMsgs :: T.Text -> DynFlags -> Compat.Bag (MsgEnvelope DecoratedSDoc) -> [FileDiagnostic] +diagFromErrMsgs :: T.Text -> DynFlags -> Compat.Bag (MsgEnvelope GhcMessage) -> [FileDiagnostic] diagFromErrMsgs diagSource dflags = concatMap (diagFromErrMsg diagSource dflags) . Compat.bagToList +diagFromSDocErrMsg :: T.Text -> DynFlags -> MsgEnvelope Compat.DecoratedSDoc -> [FileDiagnostic] +diagFromSDocErrMsg diagSource dflags err = + diagFromSDocWithOptionalOrigMsg diagSource dflags (fmap (,Nothing) err) + +diagFromSDocErrMsgs :: T.Text -> DynFlags -> Compat.Bag (MsgEnvelope Compat.DecoratedSDoc) -> [FileDiagnostic] +diagFromSDocErrMsgs diagSource dflags = concatMap (diagFromSDocErrMsg diagSource dflags) . Compat.bagToList + +diagFromSDocWithOptionalOrigMsg :: T.Text -> DynFlags -> MsgEnvelope (Compat.DecoratedSDoc, Maybe (MsgEnvelope GhcMessage)) -> [FileDiagnostic] +diagFromSDocWithOptionalOrigMsg diagSource dflags err = + [ diagFromText diagSource sev (errMsgSpan err) (T.pack (formatErrorWithQual dflags (fmap fst err))) (snd (errMsgDiagnostic err)) + | Just sev <- [toDSeverity $ errMsgSeverity err]] + -- | Convert a GHC SrcSpan to a DAML compiler Range srcSpanToRange :: SrcSpan -> Maybe Range srcSpanToRange (UnhelpfulSpan _) = Nothing @@ -157,27 +185,19 @@ spanContainsRange srcSpan range = (range `isSubrangeOf`) <$> srcSpanToRange srcS -- | Convert a GHC severity to a DAML compiler Severity. Severities below -- "Warning" level are dropped (returning Nothing). toDSeverity :: GHC.Severity -> Maybe D.DiagnosticSeverity -#if !MIN_VERSION_ghc(9,3,0) -toDSeverity SevOutput = Nothing -toDSeverity SevInteractive = Nothing -toDSeverity SevDump = Nothing -toDSeverity SevInfo = Just DiagnosticSeverity_Information -toDSeverity SevFatal = Just DiagnosticSeverity_Error -#else -toDSeverity SevIgnore = Nothing -#endif -toDSeverity SevWarning = Just DiagnosticSeverity_Warning -toDSeverity SevError = Just DiagnosticSeverity_Error +toDSeverity SevIgnore = Nothing +toDSeverity SevWarning = Just DiagnosticSeverity_Warning +toDSeverity SevError = Just DiagnosticSeverity_Error -- | Produce a bag of GHC-style errors (@ErrorMessages@) from the given -- (optional) locations and message strings. -diagFromStrings :: T.Text -> D.DiagnosticSeverity -> [(SrcSpan, String)] -> [FileDiagnostic] -diagFromStrings diagSource sev = concatMap (uncurry (diagFromString diagSource sev)) +diagFromStrings :: T.Text -> D.DiagnosticSeverity -> [(SrcSpan, String, Maybe (MsgEnvelope GhcMessage))] -> [FileDiagnostic] +diagFromStrings diagSource sev = concatMap (uncurry3 (diagFromString diagSource sev)) -- | Produce a GHC-style error from a source span and a message. -diagFromString :: T.Text -> D.DiagnosticSeverity -> SrcSpan -> String -> [FileDiagnostic] -diagFromString diagSource sev sp x = [diagFromText diagSource sev sp $ T.pack x] +diagFromString :: T.Text -> D.DiagnosticSeverity -> SrcSpan -> String -> Maybe (MsgEnvelope GhcMessage) -> [FileDiagnostic] +diagFromString diagSource sev sp x origMsg = [diagFromText diagSource sev sp (T.pack x) origMsg] -- | Produces an "unhelpful" source span with the given string. @@ -207,15 +227,11 @@ catchSrcErrors dflags fromWhere ghcM = do Right <$> ghcM where ghcExceptionToDiagnostics = return . Left . diagFromGhcException fromWhere dflags - sourceErrorToDiagnostics = return . Left . diagFromErrMsgs fromWhere dflags -#if MIN_VERSION_ghc(9,3,0) - . fmap (fmap Compat.renderDiagnosticMessageWithHints) . Compat.getMessages -#endif - . srcErrorMessages - + sourceErrorToDiagnostics diag = pure $ Left $ + diagFromErrMsgs fromWhere dflags (Compat.getMessages (srcErrorMessages diag)) diagFromGhcException :: T.Text -> DynFlags -> GhcException -> [FileDiagnostic] -diagFromGhcException diagSource dflags exc = diagFromString diagSource DiagnosticSeverity_Error (noSpan "") (showGHCE dflags exc) +diagFromGhcException diagSource dflags exc = diagFromString diagSource DiagnosticSeverity_Error (noSpan "") (showGHCE dflags exc) Nothing showGHCE :: DynFlags -> GhcException -> String showGHCE dflags exc = case exc of diff --git a/ghcide/src/Development/IDE/GHC/Orphans.hs b/ghcide/src/Development/IDE/GHC/Orphans.hs index d7a85948cf..4e832f9ee2 100644 --- a/ghcide/src/Development/IDE/GHC/Orphans.hs +++ b/ghcide/src/Development/IDE/GHC/Orphans.hs @@ -7,7 +7,9 @@ -- | Orphan instances for GHC. -- Note that the 'NFData' instances may not be law abiding. module Development.IDE.GHC.Orphans() where -import Development.IDE.GHC.Compat +import Development.IDE.GHC.Compat hiding + (DuplicateRecordFields, + FieldSelectors) import Development.IDE.GHC.Util import Control.DeepSeq @@ -17,26 +19,18 @@ import Data.Hashable import Data.String (IsString (fromString)) import Data.Text (unpack) +import Data.Bifunctor (Bifunctor (..)) import GHC.ByteCode.Types import GHC.Data.Bag import GHC.Data.FastString import qualified GHC.Data.StringBuffer as SB -import GHC.Types.SrcLoc - --- See Note [Guidelines For Using CPP In GHCIDE Import Statements] - -#if !MIN_VERSION_ghc(9,3,0) -import GHC.Types.Unique (getKey) -import GHC.Unit.Module.Graph (ModuleGraph) -#endif - -import Data.Bifunctor (Bifunctor (..)) import GHC.Parser.Annotation - -#if MIN_VERSION_ghc(9,3,0) +import GHC.Types.FieldLabel (DuplicateRecordFields (DuplicateRecordFields, NoDuplicateRecordFields), + FieldSelectors (FieldSelectors, NoFieldSelectors)) import GHC.Types.PkgQual +import GHC.Types.SrcLoc -#endif +-- See Note [Guidelines For Using CPP In GHCIDE Import Statements] #if MIN_VERSION_ghc(9,5,0) import GHC.Unit.Home.ModInfo @@ -57,6 +51,17 @@ instance Show ModDetails where show = const "" instance NFData ModDetails where rnf = rwhnf instance NFData SafeHaskellMode where rnf = rwhnf instance Show Linkable where show = unpack . printOutputable +#if MIN_VERSION_ghc(9,11,0) +instance NFData Linkable where rnf (Linkable a b c) = rnf a `seq` rnf b `seq` rnf c +instance NFData LinkableObjectSort where rnf = rwhnf +instance NFData LinkablePart where + rnf (DotO a b) = rnf a `seq` rnf b + rnf (DotA f) = rnf f + rnf (DotDLL f) = rnf f + rnf (BCOs a) = seqCompiledByteCode a + rnf (CoreBindings wcb) = rnf wcb + rnf (LazyBCOs a b) = seqCompiledByteCode a `seq` rnf b +#else instance NFData Linkable where rnf (LM a b c) = rnf a `seq` rnf b `seq` rnf c instance NFData Unlinked where rnf (DotO f) = rnf f @@ -66,13 +71,23 @@ instance NFData Unlinked where #if MIN_VERSION_ghc(9,5,0) rnf (CoreBindings wcb) = rnf wcb rnf (LoadedBCOs us) = rnf us +#endif +#endif +#if MIN_VERSION_ghc(9,5,0) instance NFData WholeCoreBindings where +#if MIN_VERSION_ghc(9,11,0) + rnf (WholeCoreBindings bs m ml f) = rnf bs `seq` rnf m `seq` rnf ml `seq` rnf f +#else rnf (WholeCoreBindings bs m ml) = rnf bs `seq` rnf m `seq` rnf ml +#endif instance NFData ModLocation where +#if MIN_VERSION_ghc(9,11,0) + rnf (OsPathModLocation mf f1 f2 f3 f4 f5) = rnf mf `seq` rnf f1 `seq` rnf f2 `seq` rnf f3 `seq` rnf f4 `seq` rnf f5 +#else rnf (ModLocation mf f1 f2 f3 f4 f5) = rnf mf `seq` rnf f1 `seq` rnf f2 `seq` rnf f3 `seq` rnf f4 `seq` rnf f5 - +#endif #endif instance Show PackageFlag where show = unpack . printOutputable @@ -88,9 +103,6 @@ instance NFData SB.StringBuffer where rnf = rwhnf instance Show Module where show = moduleNameString . moduleName -#if !MIN_VERSION_ghc(9,3,0) -instance Outputable a => Show (GenLocated SrcSpan a) where show = unpack . printOutputable -#endif #if !MIN_VERSION_ghc(9,5,0) instance (NFData l, NFData e) => NFData (GenLocated l e) where @@ -131,12 +143,6 @@ instance Show HieFile where instance NFData HieFile where rnf = rwhnf -#if !MIN_VERSION_ghc(9,3,0) -deriving instance Eq SourceModified -deriving instance Show SourceModified -instance NFData SourceModified where - rnf = rwhnf -#endif instance Hashable ModuleName where hashWithSalt salt = hashWithSalt salt . show @@ -222,7 +228,6 @@ instance NFData ModuleGraph where rnf = rwhnf instance NFData HomeModInfo where rnf (HomeModInfo iface dets link) = rwhnf iface `seq` rnf dets `seq` rnf link -#if MIN_VERSION_ghc(9,3,0) instance NFData PkgQual where rnf NoPkgQual = () rnf (ThisPkg uid) = rnf uid @@ -233,7 +238,6 @@ instance NFData UnitId where instance NFData NodeKey where rnf = rwhnf -#endif #if MIN_VERSION_ghc(9,5,0) instance NFData HomeModLinkable where @@ -246,8 +250,27 @@ instance NFData (HsExpr (GhcPass Renamed)) where instance NFData (Pat (GhcPass Renamed)) where rnf = rwhnf +instance NFData (HsExpr (GhcPass Typechecked)) where + rnf = rwhnf + +instance NFData (Pat (GhcPass Typechecked)) where + rnf = rwhnf + instance NFData Extension where rnf = rwhnf instance NFData (UniqFM Name [Name]) where rnf (ufmToIntMap -> m) = rnf m + +#if !MIN_VERSION_ghc(9,5,0) +instance NFData DuplicateRecordFields where + rnf DuplicateRecordFields = () + rnf NoDuplicateRecordFields = () + +instance NFData FieldSelectors where + rnf FieldSelectors = () + rnf NoFieldSelectors = () + +instance NFData FieldLabel where + rnf (FieldLabel a b c d) = rnf a `seq` rnf b `seq` rnf c `seq` rnf d +#endif diff --git a/ghcide/src/Development/IDE/GHC/Util.hs b/ghcide/src/Development/IDE/GHC/Util.hs index 03384aec92..a6e0c10461 100644 --- a/ghcide/src/Development/IDE/GHC/Util.hs +++ b/ghcide/src/Development/IDE/GHC/Util.hs @@ -255,7 +255,6 @@ ioe_dupHandlesNotCompatible h = -- Tracing exactprint terms -- | Print a GHC value in `defaultUserStyle` without unique symbols. --- It uses `showSDocUnsafe` with `unsafeGlobalDynFlags` internally. -- -- This is the most common print utility. -- It will do something additionally compared to what the 'Outputable' instance does. diff --git a/ghcide/src/Development/IDE/GHC/Warnings.hs b/ghcide/src/Development/IDE/GHC/Warnings.hs index ff82af1d65..fe77ea8456 100644 --- a/ghcide/src/Development/IDE/GHC/Warnings.hs +++ b/ghcide/src/Development/IDE/GHC/Warnings.hs @@ -6,14 +6,36 @@ module Development.IDE.GHC.Warnings(withWarnings) where import Control.Concurrent.Strict -import Data.List +import Control.Lens (over) import qualified Data.Text as T import Development.IDE.GHC.Compat import Development.IDE.GHC.Error import Development.IDE.Types.Diagnostics -import Language.LSP.Protocol.Types (type (|?) (..)) +{- + Note [withWarnings and its dangers] + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + withWarnings collects warnings by registering a custom logger which extracts + the SDocs of those warnings. If you receive warnings this way, you will not + get them in a structured form. In the medium term we'd like to remove all + uses of withWarnings to get structured messages everywhere we can. + + For the time being, withWarnings is no longer used for anything in the main + typecheckModule codepath, but it is still used for bytecode/object code + generation, as well as a few other places. + + I suspect some of these functions (e.g. codegen) will need deeper changes to + be able to get diagnostics as a list, though I don't have great evidence for + that atm. I haven't taken a look to see if those functions that are wrapped + with this could produce diagnostics another way. + + It would be good for someone to take a look. What we've done so far gives us + diagnostics for renaming and typechecking, and doesn't require us to copy + too much code from GHC or make any deeper changes, and lets us get started + with the bulk of the useful plugin work, but it would be good to have all + diagnostics with structure be collected that way. +-} -- | Take a GHC monadic action (e.g. @typecheckModule pm@ for some -- parsed module 'pm@') and produce a "decorated" action that will @@ -24,42 +46,16 @@ import Language.LSP.Protocol.Types (type (|?) (..)) -- https://github.com/ghc/ghc/blob/5f1d949ab9e09b8d95319633854b7959df06eb58/compiler/main/GHC.hs#L623-L640 -- which basically says that log_action is taken from the ModSummary when GHC feels like it. -- The given argument lets you refresh a ModSummary log_action -#if MIN_VERSION_ghc(9,3,0) +-- +-- Also, See Note [withWarnings and its dangers] for some commentary on this function. withWarnings :: T.Text -> ((HscEnv -> HscEnv) -> IO a) -> IO ([(Maybe DiagnosticReason, FileDiagnostic)], a) -#else -withWarnings :: T.Text -> ((HscEnv -> HscEnv) -> IO a) -> IO ([(WarnReason, FileDiagnostic)], a) -#endif withWarnings diagSource action = do warnings <- newVar [] let newAction :: DynFlags -> LogActionCompat newAction dynFlags logFlags wr _ loc prUnqual msg = do - let wr_d = map ((wr,) . third3 (attachReason wr)) $ diagFromErrMsg diagSource dynFlags $ mkWarnMsg dynFlags wr logFlags loc prUnqual msg + let wr_d = map ((wr,) . over fdLspDiagnosticL (attachReason wr)) $ diagFromSDocErrMsg diagSource dynFlags (mkWarnMsg dynFlags wr logFlags loc prUnqual msg) modifyVar_ warnings $ return . (wr_d:) newLogger env = pushLogHook (const (logActionCompat (newAction (hsc_dflags env)))) (hsc_logger env) res <- action $ \env -> putLogHook (newLogger env) env warns <- readVar warnings return (reverse $ concat warns, res) - where - third3 :: (c -> d) -> (a, b, c) -> (a, b, d) - third3 f (a, b, c) = (a, b, f c) - -#if MIN_VERSION_ghc(9,3,0) -attachReason :: Maybe DiagnosticReason -> Diagnostic -> Diagnostic -attachReason Nothing d = d -attachReason (Just wr) d = d{_code = InR <$> showReason wr} - where - showReason = \case - WarningWithFlag flag -> showFlag flag - _ -> Nothing -#else -attachReason :: WarnReason -> Diagnostic -> Diagnostic -attachReason wr d = d{_code = InR <$> showReason wr} - where - showReason = \case - NoReason -> Nothing - Reason flag -> showFlag flag - ErrReason flag -> showFlag =<< flag -#endif - -showFlag :: WarningFlag -> Maybe T.Text -showFlag flag = ("-W" <>) . T.pack . flagSpecName <$> find ((== flag) . flagSpecFlag) wWarningFlags diff --git a/ghcide/src/Development/IDE/Import/DependencyInformation.hs b/ghcide/src/Development/IDE/Import/DependencyInformation.hs index 95478fa25c..d6e0f5614c 100644 --- a/ghcide/src/Development/IDE/Import/DependencyInformation.hs +++ b/ghcide/src/Development/IDE/Import/DependencyInformation.hs @@ -20,6 +20,7 @@ module Development.IDE.Import.DependencyInformation , insertImport , pathToId , idToPath + , idToModLocation , reachableModules , processDependencyInformation , transitiveDeps @@ -47,21 +48,14 @@ import Data.List.NonEmpty (NonEmpty (..), nonEmpty) import qualified Data.List.NonEmpty as NonEmpty import Data.Maybe import Data.Tuple.Extra hiding (first, second) +import Development.IDE.GHC.Compat import Development.IDE.GHC.Orphans () -import GHC.Generics (Generic) -import Prelude hiding (mod) - import Development.IDE.Import.FindImports (ArtifactsLocation (..)) import Development.IDE.Types.Diagnostics import Development.IDE.Types.Location +import GHC.Generics (Generic) +import Prelude hiding (mod) -import Development.IDE.GHC.Compat - --- See Note [Guidelines For Using CPP In GHCIDE Import Statements] - -#if !MIN_VERSION_ghc(9,3,0) -import GHC.Unit.Module.Graph (ModuleGraph) -#endif -- | The imports for a given module. newtype ModuleImports = ModuleImports diff --git a/ghcide/src/Development/IDE/Import/FindImports.hs b/ghcide/src/Development/IDE/Import/FindImports.hs index 3e3fc4d942..79614f1809 100644 --- a/ghcide/src/Development/IDE/Import/FindImports.hs +++ b/ghcide/src/Development/IDE/Import/FindImports.hs @@ -14,29 +14,23 @@ module Development.IDE.Import.FindImports ) where import Control.DeepSeq -import Development.IDE.GHC.Compat as Compat -import Development.IDE.GHC.Error as ErrUtils -import Development.IDE.GHC.Orphans () -import Development.IDE.Types.Diagnostics -import Development.IDE.Types.Location - --- standard imports import Control.Monad.Extra import Control.Monad.IO.Class import Data.List (find, isSuffixOf) import Data.Maybe import qualified Data.Set as S +import Development.IDE.GHC.Compat as Compat +import Development.IDE.GHC.Error as ErrUtils +import Development.IDE.GHC.Orphans () +import Development.IDE.Types.Diagnostics +import Development.IDE.Types.Location +import GHC.Types.PkgQual +import GHC.Unit.State import System.FilePath --- See Note [Guidelines For Using CPP In GHCIDE Import Statements] -#if !MIN_VERSION_ghc(9,3,0) -import Development.IDE.GHC.Compat.Util -#endif - -#if MIN_VERSION_ghc(9,3,0) -import GHC.Types.PkgQual -import GHC.Unit.State +#if MIN_VERSION_ghc(9,11,0) +import GHC.Driver.DynFlags #endif data Import @@ -105,12 +99,11 @@ locateModuleFile import_dirss exts targetFor isSource modName = do -- It only returns Just for unit-ids which are possible to import into the -- current module. In particular, it will return Nothing for 'main' components -- as they can never be imported into another package. -#if MIN_VERSION_ghc(9,3,0) mkImportDirs :: HscEnv -> (UnitId, DynFlags) -> Maybe (UnitId, ([FilePath], S.Set ModuleName)) -mkImportDirs _env (i, flags) = Just (i, (importPaths flags, reexportedModules flags)) +#if MIN_VERSION_ghc(9,11,0) +mkImportDirs _env (i, flags) = Just (i, (importPaths flags, S.fromList $ map reexportTo $ reexportedModules flags)) #else -mkImportDirs :: HscEnv -> (UnitId, DynFlags) -> Maybe (PackageName, (UnitId, [FilePath], S.Set ModuleName)) -mkImportDirs env (i, flags) = (, (i, importPaths flags, S.empty)) <$> getUnitName env i +mkImportDirs _env (i, flags) = Just (i, (importPaths flags, reexportedModules flags)) #endif -- | locate a module in either the file system or the package database. Where we go from *daml to @@ -122,42 +115,22 @@ locateModule -> [String] -- ^ File extensions -> (ModuleName -> NormalizedFilePath -> m (Maybe NormalizedFilePath)) -- ^ does file exist predicate -> Located ModuleName -- ^ Module name -#if MIN_VERSION_ghc(9,3,0) -> PkgQual -- ^ Package name -#else - -> Maybe FastString -- ^ Package name -#endif -> Bool -- ^ Is boot module -> m (Either [FileDiagnostic] Import) locateModule env comp_info exts targetFor modName mbPkgName isSource = do case mbPkgName of -#if MIN_VERSION_ghc(9,3,0) -- 'ThisPkg' just means some home module, not the current unit ThisPkg uid | Just (dirs, reexports) <- lookup uid import_paths -> lookupLocal uid dirs reexports | otherwise -> return $ Left $ notFoundErr env modName $ LookupNotFound [] -#else - -- "this" means that we should only look in the current package - Just "this" -> do - lookupLocal (homeUnitId_ dflags) (importPaths dflags) S.empty -#endif -- if a package name is given we only go look for a package -#if MIN_VERSION_ghc(9,3,0) OtherPkg uid | Just (dirs, reexports) <- lookup uid import_paths -> lookupLocal uid dirs reexports -#else - Just pkgName - | Just (uid, dirs, reexports) <- lookup (PackageName pkgName) import_paths - -> lookupLocal uid dirs reexports -#endif | otherwise -> lookupInPackageDB -#if MIN_VERSION_ghc(9,3,0) NoPkgQual -> do -#else - Nothing -> do -#endif -- Reexports for current unit have to be empty because they only apply to other units depending on the -- current unit. If we set the reexports to be the actual reexports then we risk looping forever trying @@ -181,7 +154,11 @@ locateModule env comp_info exts targetFor modName mbPkgName isSource = do -- about which module unit a imports. -- Without multi-component support it is hard to recontruct the dependency environment so -- unit a will have both unit b and unit c in scope. +#if MIN_VERSION_ghc(9,11,0) + map (\uid -> let this_df = homeUnitEnv_dflags (ue_findHomeUnitEnv uid ue) in (uid, importPaths this_df, S.fromList $ map reexportTo $ reexportedModules this_df)) hpt_deps +#else map (\uid -> let this_df = homeUnitEnv_dflags (ue_findHomeUnitEnv uid ue) in (uid, importPaths this_df, reexportedModules this_df)) hpt_deps +#endif ue = hsc_unit_env env units = homeUnitEnv_units $ ue_findHomeUnitEnv (homeUnitId_ dflags) ue hpt_deps :: [UnitId] @@ -196,11 +173,7 @@ locateModule env comp_info exts targetFor modName mbPkgName isSource = do -- This is particularly important for Paths_* modules which get generated for every component but unless you use it in -- each component will end up being found in the wrong place and cause a multi-cradle match failure. _import_paths' = -- import_paths' is only used in GHC < 9.4 -#if MIN_VERSION_ghc(9,3,0) import_paths -#else - map snd import_paths -#endif toModLocation uid file = liftIO $ do loc <- mkHomeModLocation dflags (unLoc modName) (fromNormalizedFilePath file) @@ -226,7 +199,7 @@ notFoundErr env modName reason = mkError' $ ppr' $ cannotFindModule env modName0 $ lookupToFindResult reason where dfs = hsc_dflags env - mkError' = diagFromString "not found" DiagnosticSeverity_Error (Compat.getLoc modName) + mkError' doc = diagFromString "not found" DiagnosticSeverity_Error (Compat.getLoc modName) doc Nothing modName0 = unLoc modName ppr' = showSDoc dfs -- We convert the lookup result to a find result to reuse GHC's cannotFindModule pretty printer. @@ -263,10 +236,5 @@ notFound = NotFound , fr_suggestions = [] } -#if MIN_VERSION_ghc(9,3,0) noPkgQual :: PkgQual noPkgQual = NoPkgQual -#else -noPkgQual :: Maybe a -noPkgQual = Nothing -#endif diff --git a/ghcide/src/Development/IDE/LSP/HoverDefinition.hs b/ghcide/src/Development/IDE/LSP/HoverDefinition.hs index aea3449bf3..0ba6e22530 100644 --- a/ghcide/src/Development/IDE/LSP/HoverDefinition.hs +++ b/ghcide/src/Development/IDE/LSP/HoverDefinition.hs @@ -7,8 +7,10 @@ module Development.IDE.LSP.HoverDefinition ( Log(..) -- * For haskell-language-server , hover + , foundHover , gotoDefinition , gotoTypeDefinition + , gotoImplementation , documentHighlight , references , wsSymbols @@ -46,9 +48,11 @@ instance Pretty Log where gotoDefinition :: Recorder (WithPriority Log) -> IdeState -> TextDocumentPositionParams -> ExceptT PluginError (HandlerM c) (MessageResult Method_TextDocumentDefinition) hover :: Recorder (WithPriority Log) -> IdeState -> TextDocumentPositionParams -> ExceptT PluginError (HandlerM c) (Hover |? Null) gotoTypeDefinition :: Recorder (WithPriority Log) -> IdeState -> TextDocumentPositionParams -> ExceptT PluginError (HandlerM c) (MessageResult Method_TextDocumentTypeDefinition) +gotoImplementation :: Recorder (WithPriority Log) -> IdeState -> TextDocumentPositionParams -> ExceptT PluginError (HandlerM c) (MessageResult Method_TextDocumentImplementation) documentHighlight :: Recorder (WithPriority Log) -> IdeState -> TextDocumentPositionParams -> ExceptT PluginError (HandlerM c) ([DocumentHighlight] |? Null) -gotoDefinition = request "Definition" getDefinition (InR $ InR Null) (InL . Definition. InR) -gotoTypeDefinition = request "TypeDefinition" getTypeDefinition (InR $ InR Null) (InL . Definition. InR) +gotoDefinition = request "Definition" getDefinition (InR $ InR Null) (InL . Definition . InR . map fst) +gotoTypeDefinition = request "TypeDefinition" getTypeDefinition (InR $ InR Null) (InL . Definition . InR . map fst) +gotoImplementation = request "Implementation" getImplementationDefinition (InR $ InR Null) (InL . Definition . InR) hover = request "Hover" getAtPoint (InR Null) foundHover documentHighlight = request "DocumentHighlight" highlightAtPoint (InR Null) InL diff --git a/ghcide/src/Development/IDE/LSP/Outline.hs b/ghcide/src/Development/IDE/LSP/Outline.hs index 1c9d1971b3..af2a0f1c97 100644 --- a/ghcide/src/Development/IDE/LSP/Outline.hs +++ b/ghcide/src/Development/IDE/LSP/Outline.hs @@ -19,21 +19,17 @@ import Development.IDE.Core.Shake import Development.IDE.GHC.Compat import Development.IDE.GHC.Error (rangeToRealSrcSpan, realSrcSpanToRange) -import Development.IDE.Types.Location import Development.IDE.GHC.Util (printOutputable) +import Development.IDE.Types.Location import Ide.Types -import Language.LSP.Protocol.Types (DocumentSymbol (..), +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types (DocumentSymbol (..), DocumentSymbolParams (DocumentSymbolParams, _textDocument), SymbolKind (..), TextDocumentIdentifier (TextDocumentIdentifier), - type (|?) (InL, InR), uriToFilePath) -import Language.LSP.Protocol.Message + type (|?) (InL, InR), + uriToFilePath) --- See Note [Guidelines For Using CPP In GHCIDE Import Statements] - -#if !MIN_VERSION_ghc(9,3,0) -import qualified Data.Text as T -#endif moduleOutline :: PluginMethodHandler IdeState Method_TextDocumentDocumentSymbol @@ -118,21 +114,13 @@ documentSymbolForDecl (L (locA -> (RealSrcSpan l _)) (TyClD _ DataDecl { tcdLNam , L (locA -> RealSrcSpan l' _) n <- cs , let l'' = case con of L (locA -> RealSrcSpan l''' _) _ -> l''' - _ -> l' + _ -> l' ] } where cvtFld :: LFieldOcc GhcPs -> Maybe DocumentSymbol -#if MIN_VERSION_ghc(9,3,0) cvtFld (L (locA -> RealSrcSpan l' _) n) = Just $ (defDocumentSymbol l' :: DocumentSymbol) -#else - cvtFld (L (RealSrcSpan l' _) n) = Just $ (defDocumentSymbol l' :: DocumentSymbol) -#endif -#if MIN_VERSION_ghc(9,3,0) { _name = printOutputable (unLoc (foLabel n)) -#else - { _name = printOutputable (unLoc (rdrNameFieldOcc n)) -#endif , _kind = SymbolKind_Field } cvtFld _ = Nothing @@ -148,23 +136,13 @@ documentSymbolForDecl (L (locA -> (RealSrcSpan l _)) (InstD _ ClsInstD { cid_ins documentSymbolForDecl (L (locA -> (RealSrcSpan l _)) (InstD _ DataFamInstD { dfid_inst = DataFamInstDecl FamEqn { feqn_tycon, feqn_pats } })) = Just (defDocumentSymbol l :: DocumentSymbol) { _name = -#if MIN_VERSION_ghc(9,3,0) - printOutputable $ pprHsArgsApp (unLoc feqn_tycon) Prefix (feqn_pats) -#else - printOutputable (unLoc feqn_tycon) <> " " <> T.unwords - (map printOutputable feqn_pats) -#endif + printOutputable $ pprHsArgsApp (unLoc feqn_tycon) Prefix feqn_pats , _kind = SymbolKind_Interface } documentSymbolForDecl (L (locA -> (RealSrcSpan l _)) (InstD _ TyFamInstD { tfid_inst = TyFamInstDecl _ FamEqn { feqn_tycon, feqn_pats } })) = Just (defDocumentSymbol l :: DocumentSymbol) { _name = -#if MIN_VERSION_ghc(9,3,0) - printOutputable $ pprHsArgsApp (unLoc feqn_tycon) Prefix (feqn_pats) -#else - printOutputable (unLoc feqn_tycon) <> " " <> T.unwords - (map printOutputable feqn_pats) -#endif + printOutputable $ pprHsArgsApp (unLoc feqn_tycon) Prefix feqn_pats , _kind = SymbolKind_Interface } documentSymbolForDecl (L (locA -> (RealSrcSpan l _)) (DerivD _ DerivDecl { deriv_type })) = @@ -267,18 +245,16 @@ hsConDeclsBinders cons get_flds_h98 :: HsConDeclH98Details GhcPs -> [LFieldOcc GhcPs] get_flds_h98 (RecCon flds) = get_flds (reLoc flds) - get_flds_h98 _ = [] + get_flds_h98 _ = [] get_flds_gadt :: HsConDeclGADTDetails GhcPs -> [LFieldOcc GhcPs] #if MIN_VERSION_ghc(9,9,0) get_flds_gadt (RecConGADT _ flds) = get_flds (reLoc flds) -#elif MIN_VERSION_ghc(9,3,0) - get_flds_gadt (RecConGADT flds _) = get_flds (reLoc flds) #else - get_flds_gadt (RecConGADT flds) = get_flds (reLoc flds) + get_flds_gadt (RecConGADT flds _) = get_flds (reLoc flds) #endif - get_flds_gadt _ = [] + get_flds_gadt _ = [] get_flds :: Located [LConDeclField GhcPs] -> [LFieldOcc GhcPs] diff --git a/ghcide/src/Development/IDE/LSP/Server.hs b/ghcide/src/Development/IDE/LSP/Server.hs index e2b234557d..605250491b 100644 --- a/ghcide/src/Development/IDE/LSP/Server.hs +++ b/ghcide/src/Development/IDE/LSP/Server.hs @@ -41,7 +41,7 @@ requestHandler m k = LSP.requestHandler m $ \TRequestMessage{_method,_id,_params trace x = otTracedHandler "Request" (show _method) $ \sp -> do traceWithSpan sp _params x - writeChan chan $ ReactorRequest (_id) (trace $ LSP.runLspT env $ resp' =<< k ide _params) (LSP.runLspT env . resp' . Left) + writeChan chan $ ReactorRequest _id (trace $ LSP.runLspT env $ resp' =<< k ide _params) (LSP.runLspT env . resp' . Left) notificationHandler :: forall m c. PluginMethod Notification m => diff --git a/ghcide/src/Development/IDE/Main.hs b/ghcide/src/Development/IDE/Main.hs index d4c80e23a6..62b71c3ab6 100644 --- a/ghcide/src/Development/IDE/Main.hs +++ b/ghcide/src/Development/IDE/Main.hs @@ -16,7 +16,6 @@ import Control.Concurrent.MVar (newEmptyMVar, putMVar, tryReadMVar) import Control.Concurrent.STM.Stats (dumpSTMStats) import Control.Exception.Safe (SomeException, - catchAny, displayException) import Control.Monad.Extra (concatMapM, unless, when) @@ -32,7 +31,7 @@ import Data.List.Extra (intercalate, import Data.Maybe (catMaybes, isJust) import qualified Data.Text as T import Development.IDE (Action, - Priority (Debug, Error), + Priority (Debug), Rules, hDuplicateTo') import Development.IDE.Core.Debouncer (Debouncer, newAsyncDebouncer) @@ -72,9 +71,9 @@ import qualified Development.IDE.Plugin.HLS.GhcIde as GhcIde import qualified Development.IDE.Plugin.Test as Test import Development.IDE.Session (SessionLoadingOptions, getHieDbLoc, + getInitialGhcLibDirDefault, loadSessionWithOptions, - retryOnSqliteBusy, - setInitialDynFlags) + retryOnSqliteBusy) import qualified Development.IDE.Session as Session import Development.IDE.Types.Location (NormalizedUri, toNormalizedFilePath') @@ -136,7 +135,6 @@ data Log | LogLspStart [PluginId] | LogLspStartDuration !Seconds | LogShouldRunSubset !Bool - | LogSetInitialDynFlagsException !SomeException | LogConfigurationChange T.Text | LogService Service.Log | LogShake Shake.Log @@ -160,8 +158,6 @@ instance Pretty Log where "Started LSP server in" <+> pretty (showDuration duration) LogShouldRunSubset shouldRunSubset -> "shouldRunSubset:" <+> pretty shouldRunSubset - LogSetInitialDynFlagsException e -> - "setInitialDynFlags:" <+> pretty (displayException e) LogConfigurationChange msg -> "Configuration changed:" <+> pretty msg LogService msg -> pretty msg LogShake msg -> pretty msg @@ -329,13 +325,6 @@ defaultMain recorder Arguments{..} = withHeapStats (cmapWithPrio LogHeapStats re getIdeState env rootPath withHieDb threadQueue = do t <- ioT logWith recorder Info $ LogLspStartDuration t - -- We want to set the global DynFlags right now, so that we can use - -- `unsafeGlobalDynFlags` even before the project is configured - _mlibdir <- - setInitialDynFlags (cmapWithPrio LogSession recorder) rootPath argsSessionLoadingOptions - -- TODO: should probably catch/log/rethrow at top level instead - `catchAny` (\e -> logWith recorder Error (LogSetInitialDynFlagsException e) >> pure Nothing) - sessionLoader <- loadSessionWithOptions (cmapWithPrio LogSession recorder) argsSessionLoadingOptions rootPath (tLoaderQueue threadQueue) config <- LSP.runLspT env LSP.getConfig let def_options = argsIdeOptions config sessionLoader @@ -435,7 +424,7 @@ defaultMain recorder Arguments{..} = withHeapStats (cmapWithPrio LogHeapStats re let root = argsProjectRoot dbLoc <- getHieDbLoc root hPutStrLn stderr $ "Using hiedb at: " ++ dbLoc - mlibdir <- setInitialDynFlags (cmapWithPrio LogSession recorder) root def + mlibdir <- getInitialGhcLibDirDefault (cmapWithPrio LogSession recorder) root rng <- newStdGen case mlibdir of Nothing -> exitWith $ ExitFailure 1 diff --git a/ghcide/src/Development/IDE/Plugin/Completions.hs b/ghcide/src/Development/IDE/Plugin/Completions.hs index 98ca6dc592..0564855177 100644 --- a/ghcide/src/Development/IDE/Plugin/Completions.hs +++ b/ghcide/src/Development/IDE/Plugin/Completions.hs @@ -19,6 +19,7 @@ import qualified Data.HashSet as Set import Data.Maybe import qualified Data.Text as T import Development.IDE.Core.Compile +import Development.IDE.Core.FileStore (getUriContents) import Development.IDE.Core.PluginUtils import Development.IDE.Core.PositionMapping import Development.IDE.Core.RuleTypes @@ -133,11 +134,7 @@ resolveCompletion ide _pid comp@CompletionItem{_detail,_documentation,_data_} ur $ runIdeActionE "CompletionResolve.GhcSessionDeps" (shakeExtras ide) $ useWithStaleFastE GhcSessionDeps file let nc = ideNc $ shakeExtras ide -#if MIN_VERSION_ghc(9,3,0) name <- liftIO $ lookupNameCache nc mod occ -#else - name <- liftIO $ upNameCache nc (lookupNameCache mod occ) -#endif mdkm <- liftIO $ runIdeAction "CompletionResolve.GetDocMap" (shakeExtras ide) $ useWithStaleFast GetDocMap file let (dm,km) = case mdkm of Just (DKMap docMap tyThingMap, _) -> (docMap,tyThingMap) @@ -169,8 +166,9 @@ getCompletionsLSP ide plId CompletionParams{_textDocument=TextDocumentIdentifier uri ,_position=position ,_context=completionContext} = ExceptT $ do - contents <- pluginGetVirtualFile $ toNormalizedUri uri - fmap Right $ case (contents, uriToFilePath' uri) of + contentsMaybe <- + liftIO $ runAction "Completion" ide $ getUriContents $ toNormalizedUri uri + fmap Right $ case (contentsMaybe, uriToFilePath' uri) of (Just cnts, Just path) -> do let npath = toNormalizedFilePath' path (ideOpts, compls, moduleExports, astres) <- liftIO $ runIdeAction "Completion" (shakeExtras ide) $ do @@ -204,7 +202,7 @@ getCompletionsLSP ide plId pure (opts, fmap (,pm,binds) compls, moduleExports, astres) case compls of Just (cci', parsedMod, bindMap) -> do - let pfix = getCompletionPrefix position cnts + let pfix = getCompletionPrefixFromRope position cnts case (pfix, completionContext) of (PosPrefixInfo _ "" _ _, Just CompletionContext { _triggerCharacter = Just "."}) -> return (InL []) diff --git a/ghcide/src/Development/IDE/Plugin/Completions/Logic.hs b/ghcide/src/Development/IDE/Plugin/Completions/Logic.hs index 867c47719a..9fdc196cd5 100644 --- a/ghcide/src/Development/IDE/Plugin/Completions/Logic.hs +++ b/ghcide/src/Development/IDE/Plugin/Completions/Logic.hs @@ -74,11 +74,6 @@ import GHC.Plugins (Depth (AllTheWay), -- See Note [Guidelines For Using CPP In GHCIDE Import Statements] -#if !MIN_VERSION_ghc(9,3,0) -import GHC.Plugins (defaultSDocContext, - renderWithContext) -#endif - #if MIN_VERSION_ghc(9,5,0) import Language.Haskell.Syntax.Basic #endif @@ -514,13 +509,8 @@ findRecordCompl uri mn DataDecl {tcdLName, tcdDataDefn} = result -- -- is encoded as @[[arg1, arg2], [arg3], [arg4]]@ -- Hence, we must concat nested arguments into one to get all the fields. -#if MIN_VERSION_ghc(9,3,0) extract ConDeclField{..} = map (foLabel . unLoc) cd_fld_names -#else - extract ConDeclField{..} - = map (rdrNameFieldOcc . unLoc) cd_fld_names -#endif -- XConDeclField extract _ = [] findRecordCompl _ _ _ = [] diff --git a/ghcide/src/Development/IDE/Plugin/Completions/Types.hs b/ghcide/src/Development/IDE/Plugin/Completions/Types.hs index 2d950d66a9..338b969bab 100644 --- a/ghcide/src/Development/IDE/Plugin/Completions/Types.hs +++ b/ghcide/src/Development/IDE/Plugin/Completions/Types.hs @@ -16,7 +16,6 @@ import Data.Aeson import Data.Aeson.Types import Data.Hashable (Hashable) import Data.Text (Text) -import Data.Typeable (Typeable) import Development.IDE.GHC.Compat import Development.IDE.Graph (RuleResult) import Development.IDE.Spans.Common () @@ -31,12 +30,12 @@ type instance RuleResult LocalCompletions = CachedCompletions type instance RuleResult NonLocalCompletions = CachedCompletions data LocalCompletions = LocalCompletions - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable LocalCompletions instance NFData LocalCompletions data NonLocalCompletions = NonLocalCompletions - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable NonLocalCompletions instance NFData NonLocalCompletions diff --git a/ghcide/src/Development/IDE/Plugin/HLS.hs b/ghcide/src/Development/IDE/Plugin/HLS.hs index fd48d86ae6..f5190e9274 100644 --- a/ghcide/src/Development/IDE/Plugin/HLS.hs +++ b/ghcide/src/Development/IDE/Plugin/HLS.hs @@ -10,7 +10,10 @@ module Development.IDE.Plugin.HLS ) where import Control.Exception (SomeException) +import Control.Lens ((^.)) import Control.Monad +import qualified Control.Monad.Extra as Extra +import Control.Monad.IO.Class (MonadIO) import Control.Monad.Trans.Except (runExceptT) import qualified Data.Aeson as A import Data.Bifunctor (first) @@ -22,7 +25,7 @@ import qualified Data.List as List import Data.List.NonEmpty (NonEmpty, nonEmpty, toList) import qualified Data.List.NonEmpty as NE import qualified Data.Map as Map -import Data.Maybe (mapMaybe) +import Data.Maybe (isNothing, mapMaybe) import Data.Some import Data.String import Data.Text (Text) @@ -39,6 +42,7 @@ import Ide.Plugin.Error import Ide.Plugin.HandleRequestTypes import Ide.PluginUtils (getClientConfig) import Ide.Types as HLS +import qualified Language.LSP.Protocol.Lens as JL import Language.LSP.Protocol.Message import Language.LSP.Protocol.Types import qualified Language.LSP.Server as LSP @@ -58,6 +62,7 @@ data Log | LogNoPluginForMethod (Some SMethod) | LogInvalidCommandIdentifier | ExceptionInPlugin PluginId (Some SMethod) SomeException + | LogResolveDefaultHandler (Some SMethod) instance Pretty Log where pretty = \case @@ -71,6 +76,8 @@ instance Pretty Log where ExceptionInPlugin plId (Some method) exception -> "Exception in plugin " <> viaShow plId <> " while processing " <> pretty method <> ": " <> viaShow exception + LogResolveDefaultHandler (Some method) -> + "No plugin can handle" <+> pretty method <+> "request. Return object unchanged." instance Show Log where show = renderString . layoutCompact . pretty noPluginHandles :: Recorder (WithPriority Log) -> SMethod m -> [(PluginId, HandleRequestResult)] -> IO (Either (TResponseError m) c) @@ -250,8 +257,16 @@ extensiblePlugins recorder plugins = mempty { P.pluginHandlers = handlers } let (fs, dfs) = List.partition (\(_, desc, _) -> handlesRequest m params desc config == HandlesRequest) fs' let disabledPluginsReason = (\(x, desc, _) -> (x, handlesRequest m params desc config)) <$> dfs -- Clients generally don't display ResponseErrors so instead we log any that we come across + -- However, some clients do display ResponseErrors! See for example the issues: + -- https://github.com/haskell/haskell-language-server/issues/4467 + -- https://github.com/haskell/haskell-language-server/issues/4451 case nonEmpty fs of - Nothing -> liftIO $ noPluginHandles recorder m disabledPluginsReason + Nothing -> do + liftIO (fallbackResolveHandler recorder m params) >>= \case + Nothing -> + liftIO $ noPluginHandles recorder m disabledPluginsReason + Just result -> + pure $ Right result Just neFs -> do let plidsAndHandlers = fmap (\(plid,_,handler) -> (plid,handler)) neFs es <- runHandlerM $ runConcurrently exceptionInPlugin m plidsAndHandlers ide params @@ -272,6 +287,72 @@ extensiblePlugins recorder plugins = mempty { P.pluginHandlers = handlers } Just xs -> do pure $ Right $ combineResponses m config caps params xs +-- | Fallback Handler for resolve requests. +-- For all kinds of `*/resolve` requests, if they don't have a 'data_' value, +-- produce the original item, since no other plugin has any resolve data. +-- +-- This is an internal handler, so it cannot be turned off and should be opaque +-- to the end-user. +-- This function does not take the ServerCapabilities into account, and assumes +-- clients will only send these requests, if and only if the Language Server +-- advertised support for it. +-- +-- See Note [Fallback Handler for LSP resolve requests] for justification and reasoning. +fallbackResolveHandler :: MonadIO m => Recorder (WithPriority Log) -> SMethod s -> MessageParams s -> m (Maybe (MessageResult s)) +fallbackResolveHandler recorder m params = do + let result = case m of + SMethod_InlayHintResolve + | noResolveData params -> Just params + SMethod_CompletionItemResolve + | noResolveData params -> Just params + SMethod_CodeActionResolve + | noResolveData params -> Just params + SMethod_WorkspaceSymbolResolve + | noResolveData params -> Just params + SMethod_CodeLensResolve + | noResolveData params -> Just params + SMethod_DocumentLinkResolve + | noResolveData params -> Just params + _ -> Nothing + logResolveHandling result + pure result + where + noResolveData :: JL.HasData_ p (Maybe a) => p -> Bool + noResolveData p = isNothing $ p ^. JL.data_ + + -- We only log if we are handling the request. + -- If we don't handle this request, this should be logged + -- on call-site. + logResolveHandling p = Extra.whenJust p $ \_ -> do + logWith recorder Debug $ LogResolveDefaultHandler (Some m) + +{- Note [Fallback Handler for LSP resolve requests] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +We have a special fallback for `*/resolve` requests. + +We had multiple reports, where `resolve` requests (such as +`completion/resolve` and `codeAction/resolve`) are rejected +by HLS since the `_data_` field of the respective LSP feature has not been +populated by HLS. +This makes sense, as we only support `resolve` for certain kinds of +`CodeAction`/`Completions`, when they contain particularly expensive +properties, such as documentation or non-local type signatures. + +So what to do? We can see two options: + +1. Be dumb and permissive: if no plugin wants to resolve a request, then + just respond positively with the original item! Potentially this masks + real issues, but may not be too bad. If a plugin thinks it can + handle the request but it then fails to resolve it, we should still return a failure. +2. Try and be smart: we try to figure out requests that we're "supposed" to + resolve (e.g. those with a data field), and fail if no plugin wants to handle those. + This is possible since we set data. + So as long as we maintain the invariant that only things which need resolving get + data, then it could be okay. + +In 'fallbackResolveHandler', we implement the option (2). +-} -- --------------------------------------------------------------------- diff --git a/ghcide/src/Development/IDE/Plugin/HLS/GhcIde.hs b/ghcide/src/Development/IDE/Plugin/HLS/GhcIde.hs index ec5c6bf84b..ada0f9e682 100644 --- a/ghcide/src/Development/IDE/Plugin/HLS/GhcIde.hs +++ b/ghcide/src/Development/IDE/Plugin/HLS/GhcIde.hs @@ -51,6 +51,8 @@ descriptor recorder plId = (defaultPluginDescriptor plId desc) Hover.gotoDefinition recorder ide TextDocumentPositionParams{..}) <> mkPluginHandler SMethod_TextDocumentTypeDefinition (\ide _ TypeDefinitionParams{..} -> Hover.gotoTypeDefinition recorder ide TextDocumentPositionParams{..}) + <> mkPluginHandler SMethod_TextDocumentImplementation (\ide _ ImplementationParams{..} -> + Hover.gotoImplementation recorder ide TextDocumentPositionParams{..}) <> mkPluginHandler SMethod_TextDocumentDocumentHighlight (\ide _ DocumentHighlightParams{..} -> Hover.documentHighlight recorder ide TextDocumentPositionParams{..}) <> mkPluginHandler SMethod_TextDocumentReferences (Hover.references recorder) diff --git a/ghcide/src/Development/IDE/Plugin/TypeLenses.hs b/ghcide/src/Development/IDE/Plugin/TypeLenses.hs index 51d25e995b..40ce1dda7b 100644 --- a/ghcide/src/Development/IDE/Plugin/TypeLenses.hs +++ b/ghcide/src/Development/IDE/Plugin/TypeLenses.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE CPP #-} {-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE OverloadedLabels #-} {-# LANGUAGE TypeFamilies #-} @@ -26,7 +27,8 @@ import Data.List (find) import qualified Data.Map as Map import Data.Maybe (catMaybes, maybeToList) import qualified Data.Text as T -import Development.IDE (GhcSession (..), +import Development.IDE (FileDiagnostic (..), + GhcSession (..), HscEnvEq (hscEnv), RuleResult, Rules, Uri, define, srcSpanToRange, @@ -126,9 +128,10 @@ codeLensProvider ideState pId CodeLensParams{_textDocument = TextDocumentIdentif -- We don't actually pass any data to resolve, however we need this -- dummy type to make sure HLS resolves our lens [ CodeLens _range Nothing (Just $ toJSON TypeLensesResolve) - | (dFile, _, diag@Diagnostic{_range}) <- diags - , dFile == nfp - , isGlobalDiagnostic diag] + | diag <- diags + , let lspDiag@Diagnostic {_range} = fdLspDiagnostic diag + , fdFilePath diag == nfp + , isGlobalDiagnostic lspDiag] -- The second option is to generate lenses from the GlobalBindingTypeSig -- rule. This is the only type that needs to have the range adjusted -- with PositionMapping. @@ -319,7 +322,11 @@ gblBindingType (Just hsc) (Just gblEnv) = do let name = idName identifier hasSig name $ do env <- tcInitTidyEnv +#if MIN_VERSION_ghc(9,11,0) + let ty = tidyOpenType env (idType identifier) +#else let (_, ty) = tidyOpenType env (idType identifier) +#endif pure $ GlobalBindingTypeSig name (printName name <> " :: " <> showDoc (pprSigmaType ty)) (name `elemNameSet` exports) patToSig p = do let name = patSynName p diff --git a/ghcide/src/Development/IDE/Spans/AtPoint.hs b/ghcide/src/Development/IDE/Spans/AtPoint.hs index 434c684b96..a577cae32e 100644 --- a/ghcide/src/Development/IDE/Spans/AtPoint.hs +++ b/ghcide/src/Development/IDE/Spans/AtPoint.hs @@ -10,6 +10,7 @@ module Development.IDE.Spans.AtPoint ( atPoint , gotoDefinition , gotoTypeDefinition + , gotoImplementation , documentHighlight , pointCommand , referencesAtPoint @@ -23,6 +24,10 @@ module Development.IDE.Spans.AtPoint ( , LookupModule ) where + +import GHC.Data.FastString (lengthFS) +import qualified GHC.Utils.Outputable as O + import Development.IDE.GHC.Error import Development.IDE.GHC.Orphans () import Development.IDE.Types.Location @@ -31,6 +36,7 @@ import Language.LSP.Protocol.Types hiding import Prelude hiding (mod) -- compiler and infrastructure +import Development.IDE.Core.Compile (setNonHomeFCHook) import Development.IDE.Core.PositionMapping import Development.IDE.Core.RuleTypes import Development.IDE.GHC.Compat @@ -52,9 +58,13 @@ import qualified Data.Text as T import qualified Data.Array as A import Data.Either -import Data.List (isSuffixOf) import Data.List.Extra (dropEnd1, nubOrd) + +import Data.Either.Extra (eitherToMaybe) +import Data.List (isSuffixOf, sortOn) +import Data.Tree +import qualified Data.Tree as T import Data.Version (showVersion) import Development.IDE.Types.Shake (WithHieDb) import HieDb hiding (pointCommand, @@ -171,14 +181,19 @@ documentHighlight hf rf pos = pure highlights highlights = do n <- ns ref <- fromMaybe [] (M.lookup (Right n) rf) - pure $ makeHighlight ref - makeHighlight (sp,dets) = - DocumentHighlight (realSrcSpanToRange sp) (Just $ highlightType $ identInfo dets) + maybeToList (makeHighlight n ref) + makeHighlight n (sp,dets) + | isTvNameSpace (nameNameSpace n) && isBadSpan n sp = Nothing + | otherwise = Just $ DocumentHighlight (realSrcSpanToRange sp) (Just $ highlightType $ identInfo dets) highlightType s = if any (isJust . getScopeFromContext) s then DocumentHighlightKind_Write else DocumentHighlightKind_Read + isBadSpan :: Name -> RealSrcSpan -> Bool + isBadSpan n sp = srcSpanStartLine sp /= srcSpanEndLine sp || (srcSpanEndCol sp - srcSpanStartCol sp > lengthFS (occNameFS $ nameOccName n)) + +-- | Locate the type definition of the name at a given position. gotoTypeDefinition :: MonadIO m => WithHieDb @@ -186,7 +201,7 @@ gotoTypeDefinition -> IdeOptions -> HieAstResult -> Position - -> MaybeT m [Location] + -> MaybeT m [(Location, Identifier)] gotoTypeDefinition withHieDb lookupModule ideOpts srcSpans pos = lift $ typeLocationsAtPoint withHieDb lookupModule ideOpts pos srcSpans @@ -197,12 +212,25 @@ gotoDefinition -> LookupModule m -> IdeOptions -> M.Map ModuleName NormalizedFilePath - -> HieASTs a + -> HieAstResult -> Position - -> MaybeT m [Location] + -> MaybeT m [(Location, Identifier)] gotoDefinition withHieDb getHieFile ideOpts imports srcSpans pos = lift $ locationsAtPoint withHieDb getHieFile ideOpts imports pos srcSpans +-- | Locate the implementation definition of the name at a given position. +-- Goto Implementation for an overloaded function. +gotoImplementation + :: MonadIO m + => WithHieDb + -> LookupModule m + -> IdeOptions + -> HieAstResult + -> Position + -> MaybeT m [Location] +gotoImplementation withHieDb getHieFile ideOpts srcSpans pos + = lift $ instanceLocationsAtPoint withHieDb getHieFile ideOpts pos srcSpans + -- | Synopsis for the name at a given position. atPoint :: IdeOptions @@ -211,13 +239,13 @@ atPoint -> HscEnv -> Position -> IO (Maybe (Maybe Range, [T.Text])) -atPoint IdeOptions{} (HAR _ hf _ _ (kind :: HieKind hietype)) (DKMap dm km) env pos = +atPoint IdeOptions{} (HAR _ (hf :: HieASTs a) rf _ (kind :: HieKind hietype)) (DKMap dm km) env pos = listToMaybe <$> sequence (pointCommand hf pos hoverInfo) where -- Hover info for values/data hoverInfo :: HieAST hietype -> IO (Maybe Range, [T.Text]) hoverInfo ast = do - prettyNames <- mapM prettyName filteredNames + prettyNames <- mapM prettyName names pure (Just range, prettyNames ++ pTypes) where pTypes :: [T.Text] @@ -234,24 +262,34 @@ atPoint IdeOptions{} (HAR _ hf _ _ (kind :: HieKind hietype)) (DKMap dm km) env info :: NodeInfo hietype info = nodeInfoH kind ast + -- We want evidence variables to be displayed last. + -- Evidence trees contain information of secondary relevance. names :: [(Identifier, IdentifierDetails hietype)] - names = M.assocs $ nodeIdentifiers info - - -- Check for evidence bindings - isInternal :: (Identifier, IdentifierDetails a) -> Bool - isInternal (Right _, dets) = - any isEvidenceContext $ identInfo dets - isInternal (Left _, _) = False - - filteredNames :: [(Identifier, IdentifierDetails hietype)] - filteredNames = filter (not . isInternal) names + names = sortOn (any isEvidenceUse . identInfo . snd) $ M.assocs $ nodeIdentifiers info prettyName :: (Either ModuleName Name, IdentifierDetails hietype) -> IO T.Text - prettyName (Right n, dets) = pure $ T.unlines $ - wrapHaskell (printOutputable n <> maybe "" (" :: " <>) ((prettyType <$> identType dets) <|> maybeKind)) - : maybeToList (pretty (definedAt n) (prettyPackageName n)) - ++ catMaybes [ T.unlines . spanDocToMarkdown <$> lookupNameEnv dm n - ] + prettyName (Right n, dets) + -- We want to print evidence variable using a readable tree structure. + -- Evidence variables contain information why a particular instance or + -- type equality was chosen, paired with location information. + | any isEvidenceUse (identInfo dets) = + let + -- The evidence tree may not be present for some reason, e.g., the 'Name' is not + -- present in the tree. + -- Thus, we need to handle it here, but in practice, this should never be 'Nothing'. + evidenceTree = maybe "" (printOutputable . renderEvidenceTree) (getEvidenceTree rf n) + in + pure $ evidenceTree <> "\n" + -- Identifier details that are not evidence variables are used to display type information and + -- documentation of that name. + | otherwise = + let + typeSig = wrapHaskell (printOutputable n <> maybe "" (" :: " <>) ((prettyType <$> identType dets) <|> maybeKind)) + definitionLoc = maybeToList (pretty (definedAt n) (prettyPackageName n)) + docs = maybeToList (T.unlines . spanDocToMarkdown <$> lookupNameEnv dm n) + in + pure $ T.unlines $ + [typeSig] ++ definitionLoc ++ docs where maybeKind = fmap printOutputable $ safeTyThingType =<< lookupNameEnv km n pretty Nothing Nothing = Nothing pretty (Just define) Nothing = Just $ define <> "\n" @@ -269,7 +307,7 @@ atPoint IdeOptions{} (HAR _ hf _ _ (kind :: HieKind hietype)) (DKMap dm km) env -- the package(with version) this `ModuleName` belongs to. packageNameForImportStatement :: ModuleName -> IO T.Text packageNameForImportStatement mod = do - mpkg <- findImportedModule env mod :: IO (Maybe Module) + mpkg <- findImportedModule (setNonHomeFCHook env) mod :: IO (Maybe Module) let moduleName = printOutputable mod case mpkg >>= packageNameWithVersion of Nothing -> pure moduleName @@ -285,7 +323,7 @@ atPoint IdeOptions{} (HAR _ hf _ _ (kind :: HieKind hietype)) (DKMap dm km) env version = T.pack $ showVersion (unitPackageVersion conf) pure $ pkgName <> "-" <> version - -- Type info for the current node, it may contains several symbols + -- Type info for the current node, it may contain several symbols -- for one range, like wildcard types :: [hietype] types = nodeType info @@ -294,9 +332,12 @@ atPoint IdeOptions{} (HAR _ hf _ _ (kind :: HieKind hietype)) (DKMap dm km) env prettyTypes = map (("_ :: "<>) . prettyType) types prettyType :: hietype -> T.Text - prettyType t = case kind of - HieFresh -> printOutputable t - HieFromDisk full_file -> printOutputable $ hieTypeToIface $ recoverFullType t (hie_types full_file) + prettyType = printOutputable . expandType + + expandType :: a -> SDoc + expandType t = case kind of + HieFresh -> ppr t + HieFromDisk full_file -> ppr $ hieTypeToIface $ recoverFullType t (hie_types full_file) definedAt :: Name -> Maybe T.Text definedAt name = @@ -306,6 +347,67 @@ atPoint IdeOptions{} (HAR _ hf _ _ (kind :: HieKind hietype)) (DKMap dm km) env UnhelpfulLoc {} | isInternalName name || isSystemName name -> Nothing _ -> Just $ "*Defined " <> printOutputable (pprNameDefnLoc name) <> "*" + -- We want to render the root constraint even if it is a let, + -- but we don't want to render any subsequent lets + renderEvidenceTree :: Tree (EvidenceInfo a) -> SDoc + -- However, if the root constraint is simply a (Show (,), Show [], Show Int, Show Bool)@ + -- + -- It is also quite helpful to look at the @.hie@ file directly to see how the + -- evidence information is presented on disk. @hiedb dump @ + renderEvidenceTree (T.Node (EvidenceInfo{evidenceDetails=Just (EvLetBind _,_,_)}) [x]) + = renderEvidenceTree x + renderEvidenceTree (T.Node (EvidenceInfo{evidenceDetails=Just (EvLetBind _,_,_), ..}) xs) + = hang (text "Evidence of constraint `" O.<> expandType evidenceType O.<> "`") 2 $ + vcat $ text "constructed using:" : map renderEvidenceTree' xs + renderEvidenceTree (T.Node (EvidenceInfo{..}) _) + = hang (text "Evidence of constraint `" O.<> expandType evidenceType O.<> "`") 2 $ + vcat $ printDets evidenceSpan evidenceDetails : map (text . T.unpack) (maybeToList $ definedAt evidenceVar) + + -- renderEvidenceTree' skips let bound evidence variables and prints the children directly + renderEvidenceTree' (T.Node (EvidenceInfo{evidenceDetails=Just (EvLetBind _,_,_)}) xs) + = vcat (map renderEvidenceTree' xs) + renderEvidenceTree' (T.Node (EvidenceInfo{..}) _) + = hang (text "- `" O.<> expandType evidenceType O.<> "`") 2 $ + vcat $ + printDets evidenceSpan evidenceDetails : map (text . T.unpack) (maybeToList $ definedAt evidenceVar) + + printDets :: RealSrcSpan -> Maybe (EvVarSource, Scope, Maybe Span) -> SDoc + printDets _ Nothing = text "using an external instance" + printDets ospn (Just (src,_,mspn)) = pprSrc + $$ text "at" <+> text (T.unpack $ srcSpanToMdLink location) + where + location = realSrcSpanToLocation spn + -- Use the bind span if we have one, else use the occurrence span + spn = fromMaybe ospn mspn + pprSrc = case src of + -- Users don't know what HsWrappers are + EvWrapperBind -> "bound by type signature or pattern" + _ -> ppr src + +-- | Find 'Location's of type definition at a specific point and return them along with their 'Identifier's. typeLocationsAtPoint :: forall m . MonadIO m @@ -314,14 +416,14 @@ typeLocationsAtPoint -> IdeOptions -> Position -> HieAstResult - -> m [Location] + -> m [(Location, Identifier)] typeLocationsAtPoint withHieDb lookupModule _ideOptions pos (HAR _ ast _ _ hieKind) = case hieKind of HieFromDisk hf -> let arr = hie_types hf ts = concat $ pointCommand ast pos getts unfold = map (arr A.!) - getts x = nodeType ni ++ (mapMaybe identType $ M.elems $ nodeIdentifiers ni) + getts x = nodeType ni ++ mapMaybe identType (M.elems $ nodeIdentifiers ni) where ni = nodeInfo' x getTypes' ts' = flip concatMap (unfold ts') $ \case HTyVarTy n -> [n] @@ -332,12 +434,12 @@ typeLocationsAtPoint withHieDb lookupModule _ideOptions pos (HAR _ ast _ _ hieKi HQualTy a b -> getTypes' [a,b] HCastTy a -> getTypes' [a] _ -> [] - in fmap nubOrd $ concatMapM (fmap (fromMaybe []) . nameToLocation withHieDb lookupModule) (getTypes' ts) + in fmap nubOrd $ concatMapM (\n -> fmap (maybe [] (fmap (,Right n))) (nameToLocation withHieDb lookupModule n)) (getTypes' ts) HieFresh -> let ts = concat $ pointCommand ast pos getts - getts x = nodeType ni ++ (mapMaybe identType $ M.elems $ nodeIdentifiers ni) + getts x = nodeType ni ++ mapMaybe identType (M.elems $ nodeIdentifiers ni) where ni = nodeInfo x - in fmap nubOrd $ concatMapM (fmap (fromMaybe []) . nameToLocation withHieDb lookupModule) (getTypes ts) + in fmap nubOrd $ concatMapM (\n -> fmap (maybe [] (fmap (,Right n))) (nameToLocation withHieDb lookupModule n)) (getTypes ts) namesInType :: Type -> [Name] namesInType (TyVarTy n) = [varName n] @@ -350,24 +452,46 @@ namesInType (LitTy _) = [] namesInType _ = [] getTypes :: [Type] -> [Name] -getTypes ts = concatMap namesInType ts +getTypes = concatMap namesInType +-- | Find 'Location's of definition at a specific point and return them along with their 'Identifier's. locationsAtPoint - :: forall m a + :: forall m . MonadIO m => WithHieDb -> LookupModule m -> IdeOptions -> M.Map ModuleName NormalizedFilePath -> Position - -> HieASTs a - -> m [Location] -locationsAtPoint withHieDb lookupModule _ideOptions imports pos ast = + -> HieAstResult + -> m [(Location, Identifier)] +locationsAtPoint withHieDb lookupModule _ideOptions imports pos (HAR _ ast _rm _ _) = let ns = concat $ pointCommand ast pos (M.keys . getNodeIds) zeroPos = Position 0 0 zeroRange = Range zeroPos zeroPos - modToLocation m = fmap (\fs -> pure $ Location (fromNormalizedUri $ filePathToUri' fs) zeroRange) $ M.lookup m imports - in fmap (nubOrd . concat) $ mapMaybeM (either (pure . modToLocation) $ nameToLocation withHieDb lookupModule) ns + modToLocation m = fmap (\fs -> pure (Location (fromNormalizedUri $ filePathToUri' fs) zeroRange)) $ M.lookup m imports + in fmap (nubOrd . concat) $ mapMaybeM + (either (\m -> pure ((fmap $ fmap (,Left m)) (modToLocation m))) + (\n -> fmap (fmap $ fmap (,Right n)) (nameToLocation withHieDb lookupModule n))) + ns + +-- | Find 'Location's of a implementation definition at a specific point. +instanceLocationsAtPoint + :: forall m + . MonadIO m + => WithHieDb + -> LookupModule m + -> IdeOptions + -> Position + -> HieAstResult + -> m [Location] +instanceLocationsAtPoint withHieDb lookupModule _ideOptions pos (HAR _ ast _rm _ _) = + let ns = concat $ pointCommand ast pos (M.keys . getNodeIds) + evTrees = mapMaybe (eitherToMaybe >=> getEvidenceTree _rm) ns + evNs = concatMap (map (evidenceVar) . T.flatten) evTrees + in fmap (nubOrd . concat) $ mapMaybeM + (nameToLocation withHieDb lookupModule) + evNs -- | Given a 'Name' attempt to find the location where it is defined. nameToLocation :: MonadIO m => WithHieDb -> LookupModule m -> Name -> m (Maybe [Location]) diff --git a/ghcide/src/Development/IDE/Spans/Common.hs b/ghcide/src/Development/IDE/Spans/Common.hs index dbdacfcd5c..ee8a8c18bc 100644 --- a/ghcide/src/Development/IDE/Spans/Common.hs +++ b/ghcide/src/Development/IDE/Spans/Common.hs @@ -13,22 +13,26 @@ module Development.IDE.Spans.Common ( , spanDocToMarkdownForTest , DocMap , TyThingMap +, srcSpanToMdLink ) where import Control.DeepSeq +import Data.Bifunctor (second) import Data.List.Extra import Data.Maybe import qualified Data.Text as T -import GHC.Generics - +import Development.IDE.GHC.Util +import qualified Documentation.Haddock.Parser as H +import qualified Documentation.Haddock.Types as H import GHC +import GHC.Generics +import System.FilePath -import Data.Bifunctor (second) +import Control.Lens import Development.IDE.GHC.Compat import Development.IDE.GHC.Orphans () -import Development.IDE.GHC.Util -import qualified Documentation.Haddock.Parser as H -import qualified Documentation.Haddock.Types as H +import qualified Language.LSP.Protocol.Lens as JL +import Language.LSP.Protocol.Types type DocMap = NameEnv SpanDoc type TyThingMap = NameEnv TyThing @@ -54,13 +58,8 @@ safeTyThingId (AConLike (RealDataCon dataCon)) = Just (dataConWrapId dataCon) safeTyThingId _ = Nothing -- Possible documentation for an element in the code -#if MIN_VERSION_ghc(9,3,0) data SpanDoc = SpanDocString [HsDocString] SpanDocUris -#else -data SpanDoc - = SpanDocString HsDocString SpanDocUris -#endif | SpanDocText [T.Text] SpanDocUris deriving stock (Eq, Show, Generic) deriving anyclass NFData @@ -97,11 +96,7 @@ spanDocToMarkdown :: SpanDoc -> [T.Text] spanDocToMarkdown = \case (SpanDocString docs uris) -> let doc = T.pack $ haddockToMarkdown $ H.toRegular $ H._doc $ H.parseParas Nothing $ -#if MIN_VERSION_ghc(9,3,0) renderHsDocStrings docs -#else - unpackHDS docs -#endif in go [doc] uris (SpanDocText txt uris) -> go txt uris where @@ -118,7 +113,13 @@ spanDocUrisToMarkdown (SpanDocUris mdoc msrc) = catMaybes [ linkify "Documentation" <$> mdoc , linkify "Source" <$> msrc ] - where linkify title uri = "[" <> title <> "](" <> uri <> ")" + +-- | Generate a markdown link. +-- +-- >>> linkify "Title" "uri" +-- "[Title](Uri)" +linkify :: T.Text -> T.Text -> T.Text +linkify title uri = "[" <> title <> "](" <> uri <> ")" spanDocToMarkdownForTest :: String -> String spanDocToMarkdownForTest @@ -224,3 +225,35 @@ splitForList s = case lines s of [] -> "" (first:rest) -> unlines $ first : map ((" " ++) . trimStart) rest + +-- | Generate a source link for the 'Location' according to VSCode's supported form: +-- https://github.com/microsoft/vscode/blob/b3ec8181fc49f5462b5128f38e0723ae85e295c2/src/vs/platform/opener/common/opener.ts#L151-L160 +-- +srcSpanToMdLink :: Location -> T.Text +srcSpanToMdLink location = + let + uri = location ^. JL.uri + range = location ^. JL.range + -- LSP 'Range' starts at '0', but link locations start at '1'. + intText n = T.pack $ show (n + 1) + srcRangeText = + T.concat + [ "L" + , intText (range ^. JL.start . JL.line) + , "," + , intText (range ^. JL.start . JL.character) + , "-L" + , intText (range ^. JL.end . JL.line) + , "," + , intText (range ^. JL.end . JL.character) + ] + + -- If the 'Location' is a 'FilePath', display it in shortened form. + -- This avoids some redundancy and better readability for the user. + title = case uriToFilePath uri of + Just fp -> T.pack (takeFileName fp) <> ":" <> intText (range ^. JL.start . JL.line) + Nothing -> getUri uri + + srcLink = getUri uri <> "#" <> srcRangeText + in + linkify title srcLink diff --git a/ghcide/src/Development/IDE/Spans/Documentation.hs b/ghcide/src/Development/IDE/Spans/Documentation.hs index 6ab7b6ba9e..85f2ef1037 100644 --- a/ghcide/src/Development/IDE/Spans/Documentation.hs +++ b/ghcide/src/Development/IDE/Spans/Documentation.hs @@ -41,16 +41,8 @@ mkDocMap -> IO DocAndTyThingMap mkDocMap env rm this_mod = do -#if MIN_VERSION_ghc(9,3,0) (Just Docs{docs_decls = UniqMap this_docs}) <- extractDocs (hsc_dflags env) this_mod -#else - (_ , DeclDocMap this_docs, _) <- extractDocs this_mod -#endif -#if MIN_VERSION_ghc(9,3,0) d <- foldrM getDocs (fmap (\(_, x) -> (map hsDocString x) `SpanDocString` SpanDocUris Nothing Nothing) this_docs) names -#else - d <- foldrM getDocs (mkNameEnv $ M.toList $ fmap (`SpanDocString` SpanDocUris Nothing Nothing) this_docs) names -#endif k <- foldrM getType (tcg_type_env this_mod) names pure $ DKMap d k where @@ -84,11 +76,7 @@ getDocumentationsTryGhc env names = do Left _ -> return [] Right res -> zipWithM unwrap res names where -#if MIN_VERSION_ghc(9,3,0) unwrap (Right (Just docs, _)) n = SpanDocString (map hsDocString docs) <$> getUris n -#else - unwrap (Right (Just docs, _)) n = SpanDocString docs <$> getUris n -#endif unwrap _ n = mkSpanDocText n mkSpanDocText name = diff --git a/ghcide/src/Development/IDE/Spans/Pragmas.hs b/ghcide/src/Development/IDE/Spans/Pragmas.hs index a2b4981a38..4df16c6704 100644 --- a/ghcide/src/Development/IDE/Spans/Pragmas.hs +++ b/ghcide/src/Development/IDE/Spans/Pragmas.hs @@ -15,6 +15,8 @@ import qualified Data.List as List import qualified Data.Maybe as Maybe import Data.Text (Text, pack) import qualified Data.Text as Text +import Data.Text.Utf16.Rope.Mixed (Rope) +import qualified Data.Text.Utf16.Rope.Mixed as Rope import Development.IDE (srcSpanToRange, IdeState, NormalizedFilePath, GhcSession (..), getFileContents, hscEnv, runAction) import Development.IDE.GHC.Compat import Development.IDE.GHC.Compat.Util @@ -27,10 +29,10 @@ import qualified Data.Text as T import Development.IDE.Core.PluginUtils import qualified Language.LSP.Protocol.Lens as L -getNextPragmaInfo :: DynFlags -> Maybe Text -> NextPragmaInfo -getNextPragmaInfo dynFlags mbSourceText = - if | Just sourceText <- mbSourceText - , let sourceStringBuffer = stringToStringBuffer (Text.unpack sourceText) +getNextPragmaInfo :: DynFlags -> Maybe Rope -> NextPragmaInfo +getNextPragmaInfo dynFlags mbSource = + if | Just source <- mbSource + , let sourceStringBuffer = stringToStringBuffer (Text.unpack (Rope.toText source)) , POk _ parserState <- parsePreDecl dynFlags sourceStringBuffer -> case parserState of ParserStateNotDone{ nextPragma } -> nextPragma @@ -56,7 +58,7 @@ insertNewPragma (NextPragmaInfo nextPragmaLine _) newPragma = LSP.TextEdit prag getFirstPragma :: MonadIO m => PluginId -> IdeState -> NormalizedFilePath -> ExceptT PluginError m NextPragmaInfo getFirstPragma (PluginId pId) state nfp = do (hscEnv -> hsc_dflags -> sessionDynFlags, _) <- runActionE (T.unpack pId <> ".GhcSession") state $ useWithStaleE GhcSession nfp - (_, fileContents) <- liftIO $ runAction (T.unpack pId <> ".GetFileContents") state $ getFileContents nfp + fileContents <- liftIO $ runAction (T.unpack pId <> ".GetFileContents") state $ getFileContents nfp pure $ getNextPragmaInfo sessionDynFlags fileContents -- Pre-declaration comments parser ----------------------------------------------------- diff --git a/ghcide/src/Development/IDE/Types/Diagnostics.hs b/ghcide/src/Development/IDE/Types/Diagnostics.hs index 8189ff89c1..89e1f2d12f 100644 --- a/ghcide/src/Development/IDE/Types/Diagnostics.hs +++ b/ghcide/src/Development/IDE/Types/Diagnostics.hs @@ -1,32 +1,69 @@ -- Copyright (c) 2019 The DAML Authors. All rights reserved. -- SPDX-License-Identifier: Apache-2.0 +{-# LANGUAGE CPP #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE TemplateHaskell #-} module Development.IDE.Types.Diagnostics ( LSP.Diagnostic(..), ShowDiagnostic(..), - FileDiagnostic, + FileDiagnostic(..), + fdFilePathL, + fdLspDiagnosticL, + fdShouldShowDiagnosticL, + fdStructuredMessageL, + StructuredMessage(..), + _NoStructuredMessage, + _SomeStructuredMessage, IdeResult, LSP.DiagnosticSeverity(..), DiagnosticStore, ideErrorText, ideErrorWithSource, + ideErrorFromLspDiag, showDiagnostics, showDiagnosticsColored, - IdeResultNoDiagnosticsEarlyCutoff) where +#if MIN_VERSION_ghc(9,5,0) + showGhcCode, +#endif + IdeResultNoDiagnosticsEarlyCutoff, + attachReason, + attachedReason) where +import Control.Applicative ((<|>)) import Control.DeepSeq +import Control.Lens +import qualified Data.Aeson as JSON +import qualified Data.Aeson.Lens as JSON import Data.ByteString (ByteString) +import Data.Foldable import Data.Maybe as Maybe import qualified Data.Text as T +import Development.IDE.GHC.Compat (GhcMessage, MsgEnvelope, + WarningFlag, flagSpecFlag, + flagSpecName, wWarningFlags) import Development.IDE.Types.Location +import GHC.Generics +#if MIN_VERSION_ghc(9,5,0) +import GHC.Types.Error (DiagnosticCode (..), + DiagnosticReason (..), + diagnosticCode, + diagnosticReason, + errMsgDiagnostic) +#else +import GHC.Types.Error (DiagnosticReason (..), + diagnosticReason, + errMsgDiagnostic) +#endif import Language.LSP.Diagnostics -import Language.LSP.Protocol.Types as LSP (Diagnostic (..), - DiagnosticSeverity (..)) +import Language.LSP.Protocol.Lens (data_) +import Language.LSP.Protocol.Types as LSP import Prettyprinter import Prettyprinter.Render.Terminal (Color (..), color) import qualified Prettyprinter.Render.Terminal as Terminal import Prettyprinter.Render.Text +import Text.Printf (printf) -- | The result of an IDE operation. Warnings and errors are in the Diagnostic, @@ -44,26 +81,102 @@ type IdeResult v = ([FileDiagnostic], Maybe v) -- | an IdeResult with a fingerprint type IdeResultNoDiagnosticsEarlyCutoff v = (Maybe ByteString, Maybe v) +-- | Produce a 'FileDiagnostic' for the given 'NormalizedFilePath' +-- with an error message. ideErrorText :: NormalizedFilePath -> T.Text -> FileDiagnostic -ideErrorText = ideErrorWithSource (Just "compiler") (Just DiagnosticSeverity_Error) +ideErrorText nfp msg = + ideErrorWithSource (Just "compiler") (Just DiagnosticSeverity_Error) nfp msg Nothing + +-- | Create a 'FileDiagnostic' from an existing 'LSP.Diagnostic' for a +-- specific 'NormalizedFilePath'. +-- The optional 'MsgEnvelope GhcMessage' is the original error message +-- that was used for creating the 'LSP.Diagnostic'. +-- It is included here, to allow downstream consumers, such as HLS plugins, +-- to provide LSP features based on the structured error messages. +-- Additionally, if available, we insert the ghc error code into the +-- 'LSP.Diagnostic'. These error codes are used in https://errors.haskell.org/ +-- to provide documentation and explanations for error messages. +ideErrorFromLspDiag + :: LSP.Diagnostic + -> NormalizedFilePath + -> Maybe (MsgEnvelope GhcMessage) + -> FileDiagnostic +ideErrorFromLspDiag lspDiag fdFilePath mbOrigMsg = + let fdShouldShowDiagnostic = ShowDiag + fdStructuredMessage = + case mbOrigMsg of + Nothing -> NoStructuredMessage + Just msg -> SomeStructuredMessage msg + fdLspDiagnostic = + lspDiag + & attachReason (fmap (diagnosticReason . errMsgDiagnostic) mbOrigMsg) + & setGhcCode mbOrigMsg + in + FileDiagnostic {..} + +-- | Set the code of the 'LSP.Diagnostic' to the GHC diagnostic code which is linked +-- to https://errors.haskell.org/. +setGhcCode :: Maybe (MsgEnvelope GhcMessage) -> LSP.Diagnostic -> LSP.Diagnostic +#if MIN_VERSION_ghc(9,5,0) +setGhcCode mbOrigMsg diag = + let mbGhcCode = do + origMsg <- mbOrigMsg + code <- diagnosticCode (errMsgDiagnostic origMsg) + pure (InR (showGhcCode code)) + in + diag { _code = mbGhcCode <|> _code diag } +#else +setGhcCode _ diag = diag +#endif + +#if MIN_VERSION_ghc(9,9,0) +-- DiagnosticCode only got a show instance in 9.10.1 +showGhcCode :: DiagnosticCode -> T.Text +showGhcCode = T.pack . show +#elif MIN_VERSION_ghc(9,5,0) +showGhcCode :: DiagnosticCode -> T.Text +showGhcCode (DiagnosticCode prefix c) = T.pack $ prefix ++ "-" ++ printf "%05d" c +#endif + +attachedReason :: Traversal' Diagnostic (Maybe JSON.Value) +attachedReason = data_ . non (JSON.object []) . JSON.atKey "attachedReason" + +attachReason :: Maybe DiagnosticReason -> Diagnostic -> Diagnostic +attachReason Nothing = id +attachReason (Just wr) = attachedReason .~ fmap JSON.toJSON (showReason wr) + where + showReason = \case + WarningWithFlag flag -> Just $ catMaybes [showFlag flag] +#if MIN_VERSION_ghc(9,7,0) + WarningWithFlags flags -> Just $ catMaybes (fmap showFlag $ toList flags) +#endif + _ -> Nothing + +showFlag :: WarningFlag -> Maybe T.Text +showFlag flag = ("-W" <>) . T.pack . flagSpecName <$> find ((== flag) . flagSpecFlag) wWarningFlags ideErrorWithSource :: Maybe T.Text -> Maybe DiagnosticSeverity - -> a + -> NormalizedFilePath -> T.Text - -> (a, ShowDiagnostic, Diagnostic) -ideErrorWithSource source sev fp msg = (fp, ShowDiag, LSP.Diagnostic { - _range = noRange, - _severity = sev, - _code = Nothing, - _source = source, - _message = msg, - _relatedInformation = Nothing, - _tags = Nothing, - _codeDescription = Nothing, - _data_ = Nothing - }) + -> Maybe (MsgEnvelope GhcMessage) + -> FileDiagnostic +ideErrorWithSource source sev fdFilePath msg origMsg = + let lspDiagnostic = + LSP.Diagnostic { + _range = noRange, + _severity = sev, + _code = Nothing, + _source = source, + _message = msg, + _relatedInformation = Nothing, + _tags = Nothing, + _codeDescription = Nothing, + _data_ = Nothing + } + in + ideErrorFromLspDiag lspDiagnostic fdFilePath origMsg -- | Defines whether a particular diagnostic should be reported -- back to the user. @@ -80,13 +193,78 @@ data ShowDiagnostic instance NFData ShowDiagnostic where rnf = rwhnf +-- | A Maybe-like wrapper for a GhcMessage that doesn't try to compare, show, or +-- force the GhcMessage inside, so that we can derive Show, Eq, Ord, NFData on +-- FileDiagnostic. FileDiagnostic only uses this as metadata so we can safely +-- ignore it in fields. +-- +-- Instead of pattern matching on these constructors directly, consider 'Prism' from +-- the 'lens' package. This allows to conveniently pattern match deeply into the 'MsgEnvelope GhcMessage' +-- constructor. +-- The module 'Development.IDE.GHC.Compat.Error' implements additional 'Lens's and 'Prism's, +-- allowing you to avoid importing GHC modules directly. +-- +-- For example, to pattern match on a 'TcRnMessage' you can use the lens: +-- +-- @ +-- message ^? _SomeStructuredMessage . msgEnvelopeErrorL . _TcRnMessage +-- @ +-- +-- This produces a value of type `Maybe TcRnMessage`. +-- +-- Further, consider utility functions such as 'stripTcRnMessageContext', which strip +-- context from error messages which may be more convenient in certain situations. +data StructuredMessage + = NoStructuredMessage + | SomeStructuredMessage (MsgEnvelope GhcMessage) + deriving (Generic) + +instance Show StructuredMessage where + show NoStructuredMessage = "NoStructuredMessage" + show SomeStructuredMessage {} = "SomeStructuredMessage" + +instance Eq StructuredMessage where + (==) NoStructuredMessage NoStructuredMessage = True + (==) SomeStructuredMessage {} SomeStructuredMessage {} = True + (==) _ _ = False + +instance Ord StructuredMessage where + compare NoStructuredMessage NoStructuredMessage = EQ + compare SomeStructuredMessage {} SomeStructuredMessage {} = EQ + compare NoStructuredMessage SomeStructuredMessage {} = GT + compare SomeStructuredMessage {} NoStructuredMessage = LT + +instance NFData StructuredMessage where + rnf NoStructuredMessage = () + rnf SomeStructuredMessage {} = () + -- | Human readable diagnostics for a specific file. -- -- This type packages a pretty printed, human readable error message -- along with the related source location so that we can display the error -- on either the console or in the IDE at the right source location. -- -type FileDiagnostic = (NormalizedFilePath, ShowDiagnostic, Diagnostic) +-- It also optionally keeps a structured diagnostic message GhcMessage in +-- StructuredMessage. +-- +data FileDiagnostic = FileDiagnostic + { fdFilePath :: NormalizedFilePath + , fdShouldShowDiagnostic :: ShowDiagnostic + , fdLspDiagnostic :: Diagnostic + -- | The original diagnostic that was used to produce 'fdLspDiagnostic'. + -- We keep it here, so downstream consumers, e.g. HLS plugins, can use the + -- the structured error messages and don't have to resort to parsing + -- error messages via regexes or similar. + -- + -- The optional GhcMessage inside of this StructuredMessage is ignored for + -- Eq, Ord, Show, and NFData instances. This is fine because this field + -- should only ever be metadata and should never be used to distinguish + -- between FileDiagnostics. + , fdStructuredMessage :: StructuredMessage + } + deriving (Eq, Ord, Show, Generic) + +instance NFData FileDiagnostic prettyRange :: Range -> Doc Terminal.AnsiStyle prettyRange Range{..} = f _start <> "-" <> f _end @@ -106,13 +284,17 @@ prettyDiagnostics :: [FileDiagnostic] -> Doc Terminal.AnsiStyle prettyDiagnostics = vcat . map prettyDiagnostic prettyDiagnostic :: FileDiagnostic -> Doc Terminal.AnsiStyle -prettyDiagnostic (fp, sh, LSP.Diagnostic{..}) = +prettyDiagnostic FileDiagnostic { fdFilePath, fdShouldShowDiagnostic, fdLspDiagnostic = LSP.Diagnostic{..} } = vcat - [ slabel_ "File: " $ pretty (fromNormalizedFilePath fp) - , slabel_ "Hidden: " $ if sh == ShowDiag then "no" else "yes" + [ slabel_ "File: " $ pretty (fromNormalizedFilePath fdFilePath) + , slabel_ "Hidden: " $ if fdShouldShowDiagnostic == ShowDiag then "no" else "yes" , slabel_ "Range: " $ prettyRange _range , slabel_ "Source: " $ pretty _source , slabel_ "Severity:" $ pretty $ show sev + , slabel_ "Code: " $ case _code of + Just (InR text) -> pretty text + Just (InL i) -> pretty i + Nothing -> "" , slabel_ "Message: " $ case sev of LSP.DiagnosticSeverity_Error -> annotate $ color Red @@ -150,3 +332,9 @@ srenderColored = defaultTermWidth :: Int defaultTermWidth = 80 + +makePrisms ''StructuredMessage + +makeLensesWith + (lensRules & lensField .~ mappingNamer (pure . (++ "L"))) + ''FileDiagnostic diff --git a/ghcide/src/Development/IDE/Types/HscEnvEq.hs b/ghcide/src/Development/IDE/Types/HscEnvEq.hs index dc2999dee6..1c2ed1732f 100644 --- a/ghcide/src/Development/IDE/Types/HscEnvEq.hs +++ b/ghcide/src/Development/IDE/Types/HscEnvEq.hs @@ -1,25 +1,21 @@ +{-# LANGUAGE CPP #-} module Development.IDE.Types.HscEnvEq ( HscEnvEq, hscEnv, newHscEnvEq, - hscEnvWithImportPaths, - newHscEnvEqPreserveImportPaths, - newHscEnvEqWithImportPaths, updateHscEnvEq, - envImportPaths, envPackageExports, envVisibleModuleNames, - deps ) where import Control.Concurrent.Async (Async, async, waitCatch) import Control.Concurrent.Strict (modifyVar, newVar) -import Control.DeepSeq (force) +import Control.DeepSeq (force, rwhnf) import Control.Exception (evaluate, mask, throwIO) import Control.Monad.Extra (eitherM, join, mapMaybeM) import Data.Either (fromRight) -import Data.Set (Set) -import qualified Data.Set as Set +import Data.IORef +import qualified Data.Map as M import Data.Unique (Unique) import qualified Data.Unique as Unique import Development.IDE.GHC.Compat hiding (newUnique) @@ -28,9 +24,11 @@ import Development.IDE.GHC.Error (catchSrcErrors) import Development.IDE.GHC.Util (lookupPackageConfig) import Development.IDE.Graph.Classes import Development.IDE.Types.Exports (ExportsMap, createExportsMap) +import GHC.Driver.Env (hsc_all_home_unit_ids) import Ide.PluginUtils (toAbsolute) import OpenTelemetry.Eventlog (withSpan) -import System.FilePath +import System.Directory (makeAbsolute) + -- | An 'HscEnv' with equality. Two values are considered equal -- if they are created with the same call to 'newHscEnvEq' or @@ -38,13 +36,6 @@ import System.FilePath data HscEnvEq = HscEnvEq { envUnique :: !Unique , hscEnv :: !HscEnv - , deps :: [(UnitId, DynFlags)] - -- ^ In memory components for this HscEnv - -- This is only used at the moment for the import dirs in - -- the DynFlags - , envImportPaths :: Maybe (Set FilePath) - -- ^ If Just, import dirs originally configured in this env - -- If Nothing, the env import dirs are unaltered , envPackageExports :: IO ExportsMap , envVisibleModuleNames :: IO (Maybe [ModuleName]) -- ^ 'listVisibleModuleNames' is a pure function, @@ -59,18 +50,32 @@ updateHscEnvEq oldHscEnvEq newHscEnv = do update <$> Unique.newUnique -- | Wrap an 'HscEnv' into an 'HscEnvEq'. -newHscEnvEq :: FilePath -> FilePath -> HscEnv -> [(UnitId, DynFlags)] -> IO HscEnvEq -newHscEnvEq root cradlePath hscEnv0 deps = do - let relativeToCradle = (takeDirectory cradlePath ) - hscEnv = removeImportPaths hscEnv0 - - -- Make Absolute since targets are also absolute - let importPathsCanon = toAbsolute root . relativeToCradle <$> importPaths (hsc_dflags hscEnv0) - - newHscEnvEqWithImportPaths (Just $ Set.fromList importPathsCanon) hscEnv deps - -newHscEnvEqWithImportPaths :: Maybe (Set FilePath) -> HscEnv -> [(UnitId, DynFlags)] -> IO HscEnvEq -newHscEnvEqWithImportPaths envImportPaths hscEnv deps = do +newHscEnvEq :: HscEnv -> IO HscEnvEq +newHscEnvEq hscEnv' = do + + mod_cache <- newIORef emptyInstalledModuleEnv + file_cache <- newIORef M.empty + -- This finder cache is for things which are outside of things which are tracked + -- by HLS. For example, non-home modules, dependent object files etc +#if MIN_VERSION_ghc(9,11,0) + let hscEnv = hscEnv' + { hsc_FC = FinderCache + { flushFinderCaches = \_ -> error "GHC should never call flushFinderCaches outside the driver" + , addToFinderCache = \(GWIB im _) val -> do + if moduleUnit im `elem` hsc_all_home_unit_ids hscEnv' + then error "tried to add home module to FC" + else atomicModifyIORef' mod_cache $ \c -> (extendInstalledModuleEnv c im val, ()) + , lookupFinderCache = \(GWIB im _) -> do + if moduleUnit im `elem` hsc_all_home_unit_ids hscEnv' + then error ("tried to lookup home module from FC" ++ showSDocUnsafe (ppr (im, hsc_all_home_unit_ids hscEnv'))) + else lookupInstalledModuleEnv <$> readIORef mod_cache <*> pure im + , lookupFileCache = \fp -> error ("not used by HLS" ++ fp) + } + } + +#else + let hscEnv = hscEnv' +#endif let dflags = hsc_dflags hscEnv @@ -112,23 +117,6 @@ newHscEnvEqWithImportPaths envImportPaths hscEnv deps = do return HscEnvEq{..} --- | Wrap an 'HscEnv' into an 'HscEnvEq'. -newHscEnvEqPreserveImportPaths - :: HscEnv -> [(UnitId, DynFlags)] -> IO HscEnvEq -newHscEnvEqPreserveImportPaths = newHscEnvEqWithImportPaths Nothing - --- | Unwrap the 'HscEnv' with the original import paths. --- Used only for locating imports -hscEnvWithImportPaths :: HscEnvEq -> HscEnv -hscEnvWithImportPaths HscEnvEq{..} - | Just imps <- envImportPaths - = hscSetFlags (setImportPaths (Set.toList imps) (hsc_dflags hscEnv)) hscEnv - | otherwise - = hscEnv - -removeImportPaths :: HscEnv -> HscEnv -removeImportPaths hsc = hscSetFlags (setImportPaths [] (hsc_dflags hsc)) hsc - instance Show HscEnvEq where show HscEnvEq{envUnique} = "HscEnvEq " ++ show (Unique.hashUnique envUnique) @@ -136,9 +124,9 @@ instance Eq HscEnvEq where a == b = envUnique a == envUnique b instance NFData HscEnvEq where - rnf (HscEnvEq a b c d _ _) = + rnf (HscEnvEq a b _ _) = -- deliberately skip the package exports map and visible module names - rnf (Unique.hashUnique a) `seq` b `seq` c `seq` rnf d + rnf (Unique.hashUnique a) `seq` rwhnf b instance Hashable HscEnvEq where hashWithSalt s = hashWithSalt s . envUnique diff --git a/ghcide/src/Development/IDE/Types/Options.hs b/ghcide/src/Development/IDE/Types/Options.hs index d330cd4cd3..be3ea20932 100644 --- a/ghcide/src/Development/IDE/Types/Options.hs +++ b/ghcide/src/Development/IDE/Types/Options.hs @@ -89,9 +89,9 @@ data OptHaddockParse = HaddockParse | NoHaddockParse deriving (Eq,Ord,Show,Enum) data IdePreprocessedSource = IdePreprocessedSource - { preprocWarnings :: [(GHC.SrcSpan, String)] + { preprocWarnings :: [(GHC.SrcSpan, String)] -- TODO: Future work could we make these warnings structured as well? -- ^ Warnings emitted by the preprocessor. - , preprocErrors :: [(GHC.SrcSpan, String)] + , preprocErrors :: [(GHC.SrcSpan, String)] -- TODO: Future work could we make these errors structured as well? -- ^ Errors emitted by the preprocessor. , preprocSource :: GHC.ParsedSource -- ^ New parse tree emitted by the preprocessor. diff --git a/ghcide/src/Development/IDE/Types/Shake.hs b/ghcide/src/Development/IDE/Types/Shake.hs index 2083625c43..cc8f84e3b6 100644 --- a/ghcide/src/Development/IDE/Types/Shake.hs +++ b/ghcide/src/Development/IDE/Types/Shake.hs @@ -33,10 +33,9 @@ import GHC.Generics import HieDb.Types (HieDb) import qualified StmContainers.Map as STM import Type.Reflection (SomeTypeRep (SomeTypeRep), - pattern App, pattern Con, - typeOf, typeRep, - typeRepTyCon) -import Unsafe.Coerce (unsafeCoerce) + eqTypeRep, pattern App, + type (:~~:) (HRefl), + typeOf, typeRep) -- | Intended to represent HieDb calls wrapped with (currently) retry -- functionality @@ -86,11 +85,12 @@ fromKey (Key k) -- | fromKeyType (Q (k,f)) = (typeOf k, f) fromKeyType :: Key -> Maybe (SomeTypeRep, NormalizedFilePath) -fromKeyType (Key k) = case typeOf k of - App (Con tc) a | tc == typeRepTyCon (typeRep @Q) - -> case unsafeCoerce k of - Q (_ :: (), f) -> Just (SomeTypeRep a, f) - _ -> Nothing +fromKeyType (Key k) + | App tc a <- typeOf k + , Just HRefl <- tc `eqTypeRep` (typeRep @Q) + , Q (_, f) <- k + = Just (SomeTypeRep a, f) + | otherwise = Nothing toNoFileKey :: (Show k, Typeable k, Eq k, Hashable k) => k -> Key toNoFileKey k = newKey $ Q (k, emptyFilePath) @@ -101,13 +101,11 @@ newtype Q k = Q (k, NormalizedFilePath) instance Show k => Show (Q k) where show (Q (k, file)) = show k ++ "; " ++ fromNormalizedFilePath file --- | Invariant: the 'v' must be in normal form (fully evaluated). +-- | Invariant: the @v@ must be in normal form (fully evaluated). -- Otherwise we keep repeatedly 'rnf'ing values taken from the Shake database newtype A v = A (Value v) deriving Show -instance NFData (A v) where rnf (A v) = v `seq` () - -- In the Shake database we only store one type of key/result pairs, -- namely Q (question) / A (answer). type instance RuleResult (Q k) = A (RuleResult k) diff --git a/ghcide/src/Text/Fuzzy/Parallel.hs b/ghcide/src/Text/Fuzzy/Parallel.hs index 13039e1e55..4d7a1d67e0 100644 --- a/ghcide/src/Text/Fuzzy/Parallel.hs +++ b/ghcide/src/Text/Fuzzy/Parallel.hs @@ -89,8 +89,7 @@ simpleFilter :: Int -- ^ Chunk size. 1000 works well. -> T.Text -- ^ Pattern to look for. -> [T.Text] -- ^ List of texts to check. -> [Scored T.Text] -- ^ The ones that match. -simpleFilter chunk maxRes pattern xs = - filter chunk maxRes pattern xs id +simpleFilter chunk maxRes pat xs = filter chunk maxRes pat xs id -- | The function to filter a list of values by fuzzy search on the text extracted from them, @@ -104,15 +103,15 @@ filter' :: Int -- ^ Chunk size. 1000 works well. -- ^ Custom scoring function to use for calculating how close words are -- When the function returns Nothing, this means the values are incomparable. -> [Scored t] -- ^ The list of results, sorted, highest score first. -filter' chunkSize maxRes pattern ts extract match' = partialSortByAscScore maxRes perfectScore (concat vss) +filter' chunkSize maxRes pat ts extract match' = partialSortByAscScore maxRes perfectScore (concat vss) where -- Preserve case for the first character, make all others lowercase - pattern' = case T.uncons pattern of + pat' = case T.uncons pat of Just (c, rest) -> T.cons c (T.toLower rest) - _ -> pattern - vss = map (mapMaybe (\t -> flip Scored t <$> match' pattern' (extract t))) (chunkList chunkSize ts) + _ -> pat + vss = map (mapMaybe (\t -> flip Scored t <$> match' pat' (extract t))) (chunkList chunkSize ts) `using` parList (evalList rseq) - perfectScore = fromMaybe (error $ T.unpack pattern) $ match' pattern' pattern' + perfectScore = fromMaybe (error $ T.unpack pat) $ match' pat' pat' -- | The function to filter a list of values by fuzzy search on the text extracted from them, -- using a custom matching function which determines how close words are. @@ -122,8 +121,8 @@ filter :: Int -- ^ Chunk size. 1000 works well. -> [t] -- ^ The list of values containing the text to search in. -> (t -> T.Text) -- ^ The function to extract the text from the container. -> [Scored t] -- ^ The list of results, sorted, highest score first. -filter chunkSize maxRes pattern ts extract = - filter' chunkSize maxRes pattern ts extract match +filter chunkSize maxRes pat ts extract = + filter' chunkSize maxRes pat ts extract match -- | Return all elements of the list that have a fuzzy match against the pattern, -- the closeness of the match is determined using the custom scoring match function that is passed. @@ -136,8 +135,8 @@ simpleFilter' :: Int -- ^ Chunk size. 1000 works well. -> (T.Text -> T.Text -> Maybe Int) -- ^ Custom scoring function to use for calculating how close words are -> [Scored T.Text] -- ^ The ones that match. -simpleFilter' chunk maxRes pattern xs match' = - filter' chunk maxRes pattern xs id match' +simpleFilter' chunk maxRes pat xs match' = + filter' chunk maxRes pat xs id match' -------------------------------------------------------------------------------- chunkList :: Int -> [a] -> [[a]] diff --git a/ghcide/test/exe/FuzzySearch.hs b/ghcide/test/exe/FuzzySearch.hs deleted file mode 100644 index f565b94526..0000000000 --- a/ghcide/test/exe/FuzzySearch.hs +++ /dev/null @@ -1,129 +0,0 @@ -module FuzzySearch (tests) where - -import Data.Char (toLower) -import Data.Maybe (catMaybes) -import qualified Data.Monoid.Textual as T -import Data.Text (Text, inits, pack) -import qualified Data.Text as Text -import Prelude hiding (filter) -import System.Directory (doesFileExist) -import System.IO.Unsafe (unsafePerformIO) -import Test.QuickCheck -import Test.Tasty -import Test.Tasty.ExpectedFailure -import Test.Tasty.QuickCheck (testProperty) -import qualified Text.Fuzzy as Fuzzy -import Text.Fuzzy (Fuzzy (..)) -import Text.Fuzzy.Parallel - -tests :: TestTree -tests = - testGroup - "Fuzzy search" - [ needDictionary $ - testGroup - "match works as expected on the english dictionary" - [ testProperty "for legit words" propLegit, - testProperty "for prefixes" propPrefix, - testProperty "for typos" propTypo - ] - ] - -test :: Text -> Bool -test candidate = do - let previous = - catMaybes - [ (d,) . Fuzzy.score - <$> referenceImplementation candidate d "" "" id - | d <- dictionary - ] - new = catMaybes [(d,) <$> match candidate d | d <- dictionary] - previous == new - -propLegit :: Property -propLegit = forAll (elements dictionary) test - -propPrefix :: Property -propPrefix = forAll (elements dictionary >>= elements . inits) test - -propTypo :: Property -propTypo = forAll typoGen test - -typoGen :: Gen Text -typoGen = do - w <- elements dictionary - l <- elements [0 .. Text.length w -1] - let wl = Text.index w l - c <- elements [ c | c <- ['a' .. 'z'], c /= wl] - return $ replaceAt w l c - -replaceAt :: Text -> Int -> Char -> Text -replaceAt t i c = - let (l, r) = Text.splitAt i t - in l <> Text.singleton c <> r - -dictionaryPath :: FilePath -dictionaryPath = "/usr/share/dict/words" - -{-# NOINLINE dictionary #-} -dictionary :: [Text] -dictionary = unsafePerformIO $ do - existsDictionary <- doesFileExist dictionaryPath - if existsDictionary - then map pack . words <$> readFile dictionaryPath - else pure [] - -referenceImplementation :: - (T.TextualMonoid s) => - -- | Pattern in lowercase except for first character - s -> - -- | The value containing the text to search in. - t -> - -- | The text to add before each match. - s -> - -- | The text to add after each match. - s -> - -- | The function to extract the text from the container. - (t -> s) -> - -- | The original value, rendered string and score. - Maybe (Fuzzy t s) -referenceImplementation pattern t pre post extract = - if null pat then Just (Fuzzy t result totalScore) else Nothing - where - null :: (T.TextualMonoid s) => s -> Bool - null = not . T.any (const True) - - s = extract t - (totalScore, _currScore, result, pat, _) = - T.foldl' - undefined - ( \(tot, cur, res, pat, isFirst) c -> - case T.splitCharacterPrefix pat of - Nothing -> (tot, 0, res <> T.singleton c, pat, isFirst) - Just (x, xs) -> - -- the case of the first character has to match - -- otherwise use lower case since the pattern is assumed lower - let !c' = if isFirst then c else toLower c - in if x == c' - then - let cur' = cur * 2 + 1 - in ( tot + cur', - cur', - res <> pre <> T.singleton c <> post, - xs, - False - ) - else (tot, 0, res <> T.singleton c, pat, isFirst) - ) - ( 0, - 1, -- matching at the start gives a bonus (cur = 1) - mempty, - pattern, - True - ) - s - -needDictionary :: TestTree -> TestTree -needDictionary - | null dictionary = ignoreTestBecause ("not found: " <> dictionaryPath) - | otherwise = id diff --git a/haskell-language-server.cabal b/haskell-language-server.cabal index acede2ec8f..3bfbfa4f53 100644 --- a/haskell-language-server.cabal +++ b/haskell-language-server.cabal @@ -1,7 +1,7 @@ cabal-version: 3.4 category: Development name: haskell-language-server -version: 2.8.0.0 +version: 2.10.0.0 synopsis: LSP server for GHC description: Please see the README on GitHub at @@ -14,7 +14,7 @@ copyright: The Haskell IDE Team license: Apache-2.0 license-file: LICENSE build-type: Simple -tested-with: GHC ==9.10.1 || ==9.8.2 || ==9.6.5 || ==9.4.8 || ==9.2.8 +tested-with: GHC ==9.10.1 || ==9.8.2 || ==9.6.5 || ==9.4.8 extra-source-files: README.md ChangeLog.md @@ -32,6 +32,12 @@ extra-source-files: plugins/**/*.txt plugins/**/*.hs + ghcide-test/data/**/*.cabal + ghcide-test/data/**/*.hs + ghcide-test/data/**/*.hs-boot + ghcide-test/data/**/*.project + ghcide-test/data/**/*.yaml + bindist/wrapper.in source-repository head @@ -42,6 +48,8 @@ common defaults default-language: GHC2021 -- Should have been in GHC2021, an oversight default-extensions: ExplicitNamespaces + build-depends: + , base >=4.12 && <5 common test-defaults ghc-options: -threaded -rtsopts -with-rtsopts=-N @@ -109,7 +117,7 @@ flag cabalfmt manual: True common cabalfmt - if flag(cabalfmt) + if flag(cabalfmt) && flag(cabal) build-depends: haskell-language-server:hls-cabal-fmt-plugin cpp-options: -Dhls_cabalfmt @@ -121,38 +129,39 @@ flag isolateCabalfmtTests library hls-cabal-fmt-plugin import: defaults, pedantic, warnings - if !flag(cabalfmt) + if !flag(cabalfmt) || !flag(cabal) buildable: False exposed-modules: Ide.Plugin.CabalFmt hs-source-dirs: plugins/hls-cabal-fmt-plugin/src build-depends: - , base >=4.12 && <5 , directory , filepath - , ghcide == 2.8.0.0 - , hls-plugin-api == 2.8.0.0 + , ghcide == 2.10.0.0 + , hls-plugin-api == 2.10.0.0 , lens , lsp-types , mtl , process-extras , text +-- The `hls-cabal-plugin` is needed for tests, as we need to install notification handlers test-suite hls-cabal-fmt-plugin-tests import: defaults, pedantic, test-defaults, warnings - if !flag(cabalfmt) + if !flag(cabalfmt) || !flag(cabal) buildable: False type: exitcode-stdio-1.0 hs-source-dirs: plugins/hls-cabal-fmt-plugin/test main-is: Main.hs build-depends: - , base , directory , filepath + , haskell-language-server:hls-cabal-plugin , haskell-language-server:hls-cabal-fmt-plugin - , hls-test-utils == 2.8.0.0 + , hls-plugin-api == 2.10.0.0 + , hls-test-utils == 2.10.0.0 if flag(isolateCabalfmtTests) - build-tool-depends: cabal-fmt:cabal-fmt ^>=0.1.6 + build-tool-depends: cabal-fmt:cabal-fmt ^>=0.1.12 cpp-options: -Dhls_isolate_cabalfmt_tests ----------------------------- @@ -165,7 +174,7 @@ flag cabalgild manual: True common cabalgild - if flag(cabalgild) + if flag(cabalgild) && flag(cabal) build-depends: haskell-language-server:hls-cabal-gild-plugin cpp-options: -Dhls_cabalgild @@ -177,37 +186,39 @@ flag isolateCabalGildTests library hls-cabal-gild-plugin import: defaults, pedantic, warnings - if !flag(cabalgild) + if !flag(cabalgild) || !flag(cabal) buildable: False exposed-modules: Ide.Plugin.CabalGild hs-source-dirs: plugins/hls-cabal-gild-plugin/src build-depends: - , base >=4.12 && <5 , directory , filepath - , ghcide == 2.8.0.0 - , hls-plugin-api == 2.8.0.0 + , ghcide == 2.10.0.0 + , hls-plugin-api == 2.10.0.0 , lsp-types , text , mtl , process-extras +-- The `hls-cabal-plugin` is needed for tests, as we need to install notification handlers test-suite hls-cabal-gild-plugin-tests import: defaults, pedantic, test-defaults, warnings - if !flag(cabalgild) + if !flag(cabalgild) || !flag(cabal) buildable: False type: exitcode-stdio-1.0 hs-source-dirs: plugins/hls-cabal-gild-plugin/test main-is: Main.hs build-depends: - , base , directory , filepath + , haskell-language-server:hls-cabal-plugin , haskell-language-server:hls-cabal-gild-plugin - , hls-test-utils == 2.8.0.0 + , hls-plugin-api == 2.10.0.0 + , hls-test-utils == 2.10.0.0 if flag(isolateCabalGildTests) - build-tool-depends: cabal-gild:cabal-gild ^>=1.3 + -- https://github.com/tfausak/cabal-gild/issues/89 + build-tool-depends: cabal-gild:cabal-gild >= 1.3 && < 1.3.2 cpp-options: -Dhls_isolate_cabalgild_tests ----------------------------- @@ -231,6 +242,7 @@ library hls-cabal-plugin exposed-modules: Ide.Plugin.Cabal Ide.Plugin.Cabal.Diagnostics + Ide.Plugin.Cabal.Completion.CabalFields Ide.Plugin.Cabal.Completion.Completer.FilePath Ide.Plugin.Cabal.Completion.Completer.Module Ide.Plugin.Cabal.Completion.Completer.Paths @@ -240,13 +252,16 @@ library hls-cabal-plugin Ide.Plugin.Cabal.Completion.Completions Ide.Plugin.Cabal.Completion.Data Ide.Plugin.Cabal.Completion.Types + Ide.Plugin.Cabal.Definition + Ide.Plugin.Cabal.FieldSuggest Ide.Plugin.Cabal.LicenseSuggest + Ide.Plugin.Cabal.CabalAdd Ide.Plugin.Cabal.Orphans + Ide.Plugin.Cabal.Outline Ide.Plugin.Cabal.Parse build-depends: - , base >=4.12 && <5 , bytestring , Cabal-syntax >= 3.7 , containers @@ -254,18 +269,25 @@ library hls-cabal-plugin , directory , filepath , extra >=1.7.4 - , ghcide == 2.8.0.0 + , ghcide == 2.10.0.0 , hashable - , hls-plugin-api == 2.8.0.0 - , hls-graph == 2.8.0.0 + , hls-plugin-api == 2.10.0.0 + , hls-graph == 2.10.0.0 , lens , lsp ^>=2.7 , lsp-types ^>=2.3 , regex-tdfa ^>=1.3.1 , text + , text-rope , transformers , unordered-containers >=0.2.10.0 , containers + , cabal-add + , process + , aeson + , Cabal + , pretty + hs-source-dirs: plugins/hls-cabal-plugin/src test-suite hls-cabal-plugin-tests @@ -276,20 +298,24 @@ test-suite hls-cabal-plugin-tests hs-source-dirs: plugins/hls-cabal-plugin/test main-is: Main.hs other-modules: + CabalAdd Completer Context + Definition + Outline Utils build-depends: - , base , bytestring , Cabal-syntax >= 3.7 + , extra , filepath , ghcide , haskell-language-server:hls-cabal-plugin - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 , lens , lsp-types , text + , hls-plugin-api ----------------------------- -- class plugin @@ -318,15 +344,14 @@ library hls-class-plugin hs-source-dirs: plugins/hls-class-plugin/src build-depends: , aeson - , base >=4.12 && <5 , containers , deepseq , extra , ghc - , ghc-exactprint >= 1.5 && < 1.10.0.0 - , ghcide == 2.8.0.0 + , ghc-exactprint >= 1.5 && < 1.13.0.0 + , ghcide == 2.10.0.0 , hls-graph - , hls-plugin-api == 2.8.0.0 + , hls-plugin-api == 2.10.0.0 , lens , lsp , mtl @@ -345,10 +370,9 @@ test-suite hls-class-plugin-tests hs-source-dirs: plugins/hls-class-plugin/test main-is: Main.hs build-depends: - , base , filepath , haskell-language-server:hls-class-plugin - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 , lens , lsp-types , text @@ -380,12 +404,11 @@ library hls-call-hierarchy-plugin hs-source-dirs: plugins/hls-call-hierarchy-plugin/src build-depends: , aeson - , base >=4.12 && <5 , containers , extra - , ghcide == 2.8.0.0 - , hiedb ^>= 0.6.0.0 - , hls-plugin-api == 2.8.0.0 + , ghcide == 2.10.0.0 + , hiedb ^>= 0.6.0.2 + , hls-plugin-api == 2.10.0.0 , lens , lsp >=2.7 , sqlite-simple @@ -402,12 +425,11 @@ test-suite hls-call-hierarchy-plugin-tests main-is: Main.hs build-depends: , aeson - , base , containers , extra , filepath , haskell-language-server:hls-call-hierarchy-plugin - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 , lens , lsp , lsp-test @@ -438,9 +460,9 @@ library hls-eval-plugin hs-source-dirs: plugins/hls-eval-plugin/src other-modules: Ide.Plugin.Eval.Code - Ide.Plugin.Eval.CodeLens Ide.Plugin.Eval.Config Ide.Plugin.Eval.GHC + Ide.Plugin.Eval.Handlers Ide.Plugin.Eval.Parse.Comments Ide.Plugin.Eval.Parse.Option Ide.Plugin.Eval.Rules @@ -448,7 +470,6 @@ library hls-eval-plugin build-depends: , aeson - , base >=4.12 && <5 , bytestring , containers , deepseq @@ -458,9 +479,9 @@ library hls-eval-plugin , filepath , ghc , ghc-boot-th - , ghcide == 2.8.0.0 + , ghcide == 2.10.0.0 , hls-graph - , hls-plugin-api == 2.8.0.0 + , hls-plugin-api == 2.10.0.0 , lens , lsp , lsp-types @@ -468,6 +489,7 @@ library hls-eval-plugin , mtl , parser-combinators >=1.2 , text + , text-rope , transformers , unliftio , unordered-containers @@ -485,13 +507,12 @@ test-suite hls-eval-plugin-tests ghc-options: -fno-ignore-asserts build-depends: , aeson - , base , containers , extra , filepath , haskell-language-server:hls-eval-plugin , hls-plugin-api - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 , lens , lsp-types , text @@ -500,16 +521,16 @@ test-suite hls-eval-plugin-tests -- import lens plugin ----------------------------- +flag importLens + description: Enable importLens plugin + default: True + manual: False + common importLens if flag(importLens) build-depends: haskell-language-server:hls-explicit-imports-plugin cpp-options: -Dhls_importLens -flag importLens - description: Enable importLens plugin - default: True - manual: True - library hls-explicit-imports-plugin import: defaults, pedantic, warnings if !flag(importlens) @@ -518,13 +539,12 @@ library hls-explicit-imports-plugin hs-source-dirs: plugins/hls-explicit-imports-plugin/src build-depends: , aeson - , base >=4.12 && <5 , containers , deepseq , ghc - , ghcide == 2.8.0.0 + , ghcide == 2.10.0.0 , hls-graph - , hls-plugin-api == 2.8.0.0 + , hls-plugin-api == 2.10.0.0 , lens , lsp , mtl @@ -542,11 +562,10 @@ test-suite hls-explicit-imports-plugin-tests hs-source-dirs: plugins/hls-explicit-imports-plugin/test main-is: Main.hs build-depends: - , base , extra , filepath , haskell-language-server:hls-explicit-imports-plugin - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 , lens , lsp-types , text @@ -572,13 +591,12 @@ library hls-rename-plugin exposed-modules: Ide.Plugin.Rename hs-source-dirs: plugins/hls-rename-plugin/src build-depends: - , base >=4.12 && <5 , containers - , ghcide == 2.8.0.0 + , ghcide == 2.10.0.0 , hashable - , hiedb ^>= 0.6.0.0 + , hiedb ^>= 0.6.0.2 , hie-compat - , hls-plugin-api == 2.8.0.0 + , hls-plugin-api == 2.10.0.0 , haskell-language-server:hls-refactor-plugin , lens , lsp-types @@ -599,12 +617,11 @@ test-suite hls-rename-plugin-tests main-is: Main.hs build-depends: , aeson - , base , containers , filepath , hls-plugin-api , haskell-language-server:hls-rename-plugin - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 , lens , lsp-types , text @@ -619,26 +636,25 @@ flag retrie manual: True common retrie - if flag(retrie) + if flag(retrie) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds)) build-depends: haskell-language-server:hls-retrie-plugin cpp-options: -Dhls_retrie library hls-retrie-plugin import: defaults, pedantic, warnings - if !flag(retrie) + if !(flag(retrie) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds))) buildable: False exposed-modules: Ide.Plugin.Retrie hs-source-dirs: plugins/hls-retrie-plugin/src build-depends: , aeson - , base >=4.12 && <5 , bytestring , containers , extra , ghc - , ghcide == 2.8.0.0 + , ghcide == 2.10.0.0 , hashable - , hls-plugin-api == 2.8.0.0 + , hls-plugin-api == 2.10.0.0 , haskell-language-server:hls-refactor-plugin , lens , lsp @@ -648,6 +664,7 @@ library hls-retrie-plugin , safe-exceptions , stm , text + , text-rope , transformers , unordered-containers @@ -656,18 +673,17 @@ library hls-retrie-plugin test-suite hls-retrie-plugin-tests import: defaults, pedantic, test-defaults, warnings - if !flag(retrie) + if !(flag(retrie) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds))) buildable: False type: exitcode-stdio-1.0 hs-source-dirs: plugins/hls-retrie-plugin/test main-is: Main.hs build-depends: - , base , containers , filepath , hls-plugin-api , haskell-language-server:{hls-refactor-plugin, hls-retrie-plugin} - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 , text ----------------------------- @@ -687,39 +703,41 @@ flag hlint manual: True common hlint - if flag(hlint) + if flag(hlint) && ((impl(ghc < 9.10) || impl(ghc > 9.11)) || flag(ignore-plugins-ghc-bounds)) build-depends: haskell-language-server:hls-hlint-plugin cpp-options: -Dhls_hlint library hls-hlint-plugin import: defaults, pedantic, warnings - if !flag(hlint) + -- https://github.com/ndmitchell/hlint/pull/1594 + if !(flag(hlint)) || ((impl(ghc >= 9.10) && impl(ghc < 9.11)) && !flag(ignore-plugins-ghc-bounds)) buildable: False exposed-modules: Ide.Plugin.Hlint hs-source-dirs: plugins/hls-hlint-plugin/src build-depends: , aeson - , base >=4.12 && <5 , bytestring , containers , deepseq , filepath - , ghcide == 2.8.0.0 + , ghcide == 2.10.0.0 , hashable - , hlint >= 3.5 && < 3.9 - , hls-plugin-api == 2.8.0.0 + , hlint >= 3.5 && < 3.11 + , hls-plugin-api == 2.10.0.0 , lens - , lsp , mtl , refact , regex-tdfa , stm , temporary , text + , text-rope , transformers , unordered-containers , ghc-lib-parser-ex , apply-refact + -- + , lsp-types if flag(ghc-lib) cpp-options: -DGHC_LIB @@ -735,19 +753,21 @@ library hls-hlint-plugin test-suite hls-hlint-plugin-tests import: defaults, pedantic, test-defaults, warnings - if !flag(hlint) + if !flag(hlint) || ((impl(ghc >= 9.10) && impl(ghc < 9.11)) && !flag(ignore-plugins-ghc-bounds)) buildable: False type: exitcode-stdio-1.0 hs-source-dirs: plugins/hls-hlint-plugin/test main-is: Main.hs + -- Work around https://gitlab.haskell.org/ghc/ghc/-/issues/24648 + if os(darwin) + ghc-options: -optl-Wl,-ld_classic build-depends: aeson - , base , containers , filepath , haskell-language-server:hls-hlint-plugin , hls-plugin-api - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 , lens , lsp-types , text @@ -762,20 +782,17 @@ flag stan manual: True common stan - if flag(stan) && (impl(ghc > 8.8.1) && impl(ghc <= 9.2.3) || impl(ghc >= 9.4.0) && impl(ghc < 9.10.0)) + if flag(stan) && (impl(ghc < 9.11) || flag(ignore-plugins-ghc-bounds)) build-depends: haskell-language-server:hls-stan-plugin cpp-options: -Dhls_stan library hls-stan-plugin import: defaults, pedantic, warnings - if flag(stan) && (impl(ghc > 8.8.1) && impl(ghc <= 9.2.3) || impl(ghc >= 9.4.0) && impl(ghc < 9.10.0)) - buildable: True - else + if !flag(stan) || (impl(ghc > 9.11) && !flag(ignore-plugins-ghc-bounds)) buildable: False exposed-modules: Ide.Plugin.Stan hs-source-dirs: plugins/hls-stan-plugin/src build-depends: - base , deepseq , hashable , hie-compat @@ -796,19 +813,16 @@ library hls-stan-plugin test-suite hls-stan-plugin-tests import: defaults, pedantic, test-defaults, warnings - if flag(stan) && (impl(ghc > 8.8.1) && impl(ghc <= 9.2.3) || impl(ghc >= 9.4.0) && impl(ghc < 9.10.0)) - buildable: True - else + if !flag(stan) || (impl(ghc > 9.11) && !flag(ignore-plugins-ghc-bounds)) buildable: False type: exitcode-stdio-1.0 hs-source-dirs: plugins/hls-stan-plugin/test main-is: Main.hs build-depends: - , base , filepath , haskell-language-server:hls-stan-plugin , hls-plugin-api - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 , lens , lsp-types , text @@ -837,13 +851,13 @@ library hls-module-name-plugin hs-source-dirs: plugins/hls-module-name-plugin/src build-depends: , aeson - , base >=4.12 && <5 , containers , filepath - , ghcide == 2.8.0.0 - , hls-plugin-api == 2.8.0.0 + , ghcide == 2.10.0.0 + , hls-plugin-api == 2.10.0.0 , lsp , text + , text-rope , transformers @@ -855,10 +869,9 @@ test-suite hls-module-name-plugin-tests hs-source-dirs: plugins/hls-module-name-plugin/test main-is: Main.hs build-depends: - , base , filepath , haskell-language-server:hls-module-name-plugin - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 ----------------------------- -- pragmas plugin @@ -881,12 +894,13 @@ library hls-pragmas-plugin exposed-modules: Ide.Plugin.Pragmas hs-source-dirs: plugins/hls-pragmas-plugin/src build-depends: - , base >=4.12 && <5 + , aeson , extra , fuzzy - , ghcide == 2.8.0.0 - , hls-plugin-api == 2.8.0.0 + , ghcide == 2.10.0.0 + , hls-plugin-api == 2.10.0.0 , lens + , lens-aeson , lsp , text , transformers @@ -901,10 +915,9 @@ test-suite hls-pragmas-plugin-tests main-is: Main.hs build-depends: , aeson - , base , filepath , haskell-language-server:hls-pragmas-plugin - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 , lens , lsp-types , text @@ -919,13 +932,13 @@ flag splice manual: True common splice - if flag(splice) + if flag(splice) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds)) build-depends: haskell-language-server:hls-splice-plugin cpp-options: -Dhls_splice library hls-splice-plugin import: defaults, pedantic, warnings - if !flag(splice) + if !(flag(splice) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds))) buildable: False exposed-modules: Ide.Plugin.Splice @@ -934,13 +947,11 @@ library hls-splice-plugin hs-source-dirs: plugins/hls-splice-plugin/src build-depends: , aeson - , base >=4.12 && <5 , extra , foldl , ghc - , ghc-exactprint - , ghcide == 2.8.0.0 - , hls-plugin-api == 2.8.0.0 + , ghcide == 2.10.0.0 + , hls-plugin-api == 2.10.0.0 , haskell-language-server:hls-refactor-plugin , lens , lsp @@ -955,16 +966,15 @@ library hls-splice-plugin test-suite hls-splice-plugin-tests import: defaults, pedantic, test-defaults, warnings - if !flag(splice) + if !(flag(splice) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds))) buildable: False type: exitcode-stdio-1.0 hs-source-dirs: plugins/hls-splice-plugin/test main-is: Main.hs build-depends: - , base , filepath , haskell-language-server:hls-splice-plugin - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 , text ----------------------------- @@ -989,13 +999,12 @@ library hls-alternate-number-format-plugin other-modules: Ide.Plugin.Literals hs-source-dirs: plugins/hls-alternate-number-format-plugin/src build-depends: - , base >=4.12 && < 5 , containers , extra - , ghcide == 2.8.0.0 + , ghcide == 2.10.0.0 , ghc-boot-th , hls-graph - , hls-plugin-api == 2.8.0.0 + , hls-plugin-api == 2.10.0.0 , lens , lsp ^>=2.7 , mtl @@ -1018,10 +1027,9 @@ test-suite hls-alternate-number-format-plugin-tests main-is: Main.hs ghc-options: -fno-ignore-asserts build-depends: - , base >=4.12 && < 5 , filepath , haskell-language-server:hls-alternate-number-format-plugin - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 , regex-tdfa , tasty-quickcheck , text @@ -1052,13 +1060,13 @@ library hls-qualify-imported-names-plugin exposed-modules: Ide.Plugin.QualifyImportedNames hs-source-dirs: plugins/hls-qualify-imported-names-plugin/src build-depends: - , base >=4.12 && <5 , containers - , ghcide == 2.8.0.0 - , hls-plugin-api == 2.8.0.0 + , ghcide == 2.10.0.0 + , hls-plugin-api == 2.10.0.0 , lens , lsp , text + , text-rope , dlist , transformers @@ -1073,11 +1081,10 @@ test-suite hls-qualify-imported-names-plugin-tests hs-source-dirs: plugins/hls-qualify-imported-names-plugin/test main-is: Main.hs build-depends: - , base , text , filepath , haskell-language-server:hls-qualify-imported-names-plugin - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 ----------------------------- -- code range plugin @@ -1104,13 +1111,12 @@ library hls-code-range-plugin Ide.Plugin.CodeRange.ASTPreProcess hs-source-dirs: plugins/hls-code-range-plugin/src build-depends: - , base >=4.12 && <5 , containers , deepseq , extra - , ghcide == 2.8.0.0 + , ghcide == 2.10.0.0 , hashable - , hls-plugin-api == 2.8.0.0 + , hls-plugin-api == 2.10.0.0 , lens , lsp , mtl @@ -1129,11 +1135,10 @@ test-suite hls-code-range-plugin-tests Ide.Plugin.CodeRangeTest Ide.Plugin.CodeRange.RulesTest build-depends: - , base , bytestring , filepath , haskell-language-server:hls-code-range-plugin - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 , lens , lsp , lsp-test @@ -1161,9 +1166,8 @@ library hls-change-type-signature-plugin exposed-modules: Ide.Plugin.ChangeTypeSignature hs-source-dirs: plugins/hls-change-type-signature-plugin/src build-depends: - , base >=4.12 && < 5 - , ghcide == 2.8.0.0 - , hls-plugin-api == 2.8.0.0 + , ghcide == 2.10.0.0 + , hls-plugin-api == 2.10.0.0 , lsp-types , regex-tdfa , syb @@ -1185,10 +1189,9 @@ test-suite hls-change-type-signature-plugin-tests hs-source-dirs: plugins/hls-change-type-signature-plugin/test main-is: Main.hs build-depends: - , base >=4.12 && < 5 , filepath , haskell-language-server:hls-change-type-signature-plugin - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 , regex-tdfa , text default-extensions: @@ -1218,13 +1221,12 @@ library hls-gadt-plugin hs-source-dirs: plugins/hls-gadt-plugin/src build-depends: , aeson - , base >=4.12 && <5 , containers , extra , ghc - , ghcide == 2.8.0.0 + , ghcide == 2.10.0.0 , ghc-exactprint - , hls-plugin-api == 2.8.0.0 + , hls-plugin-api == 2.10.0.0 , haskell-language-server:hls-refactor-plugin , lens , lsp >=2.7 @@ -1242,10 +1244,9 @@ test-suite hls-gadt-plugin-tests hs-source-dirs: plugins/hls-gadt-plugin/test main-is: Main.hs build-depends: - , base , filepath , haskell-language-server:hls-gadt-plugin - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 , text ----------------------------- @@ -1269,13 +1270,12 @@ library hls-explicit-fixity-plugin exposed-modules: Ide.Plugin.ExplicitFixity hs-source-dirs: plugins/hls-explicit-fixity-plugin/src build-depends: - base >=4.12 && <5 , containers , deepseq , extra - , ghcide == 2.8.0.0 + , ghcide == 2.10.0.0 , hashable - , hls-plugin-api == 2.8.0.0 + , hls-plugin-api == 2.10.0.0 , lsp >=2.7 , text @@ -1289,10 +1289,9 @@ test-suite hls-explicit-fixity-plugin-tests hs-source-dirs: plugins/hls-explicit-fixity-plugin/test main-is: Main.hs build-depends: - , base , filepath , haskell-language-server:hls-explicit-fixity-plugin - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 , text ----------------------------- @@ -1315,9 +1314,8 @@ library hls-explicit-record-fields-plugin buildable: False exposed-modules: Ide.Plugin.ExplicitFields build-depends: - , base >=4.12 && <5 - , ghcide == 2.8.0.0 - , hls-plugin-api == 2.8.0.0 + , ghcide == 2.10.0.0 + , hls-plugin-api == 2.10.0.0 , lsp , lens , hls-graph @@ -1339,11 +1337,11 @@ test-suite hls-explicit-record-fields-plugin-tests hs-source-dirs: plugins/hls-explicit-record-fields-plugin/test main-is: Main.hs build-depends: - , base , filepath , text + , ghcide , haskell-language-server:hls-explicit-record-fields-plugin - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 ----------------------------- -- overloaded record dot plugin @@ -1365,7 +1363,6 @@ library hls-overloaded-record-dot-plugin buildable: False exposed-modules: Ide.Plugin.OverloadedRecordDot build-depends: - , base >=4.16 && <5 , aeson , ghcide , hls-plugin-api @@ -1387,11 +1384,10 @@ test-suite hls-overloaded-record-dot-plugin-tests hs-source-dirs: plugins/hls-overloaded-record-dot-plugin/test main-is: Main.hs build-depends: - , base , filepath , text , haskell-language-server:hls-overloaded-record-dot-plugin - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 ----------------------------- @@ -1404,21 +1400,21 @@ flag floskell manual: True common floskell - if flag(floskell) + if flag(floskell) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds)) build-depends: haskell-language-server:hls-floskell-plugin cpp-options: -Dhls_floskell library hls-floskell-plugin import: defaults, pedantic, warnings - if !flag(floskell) + -- https://github.com/ennocramer/floskell/pull/82 + if !(flag(floskell) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds))) buildable: False exposed-modules: Ide.Plugin.Floskell hs-source-dirs: plugins/hls-floskell-plugin/src build-depends: - , base >=4.12 && <5 , floskell ^>=0.11.0 - , ghcide == 2.8.0.0 - , hls-plugin-api == 2.8.0.0 + , ghcide == 2.10.0.0 + , hls-plugin-api == 2.10.0.0 , lsp-types ^>=2.3 , mtl , text @@ -1426,16 +1422,15 @@ library hls-floskell-plugin test-suite hls-floskell-plugin-tests import: defaults, pedantic, test-defaults, warnings - if !flag(floskell) + if !(flag(floskell) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds))) buildable: False type: exitcode-stdio-1.0 hs-source-dirs: plugins/hls-floskell-plugin/test main-is: Main.hs build-depends: - , base , filepath , haskell-language-server:hls-floskell-plugin - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 ----------------------------- -- fourmolu plugin @@ -1458,12 +1453,11 @@ library hls-fourmolu-plugin exposed-modules: Ide.Plugin.Fourmolu hs-source-dirs: plugins/hls-fourmolu-plugin/src build-depends: - , base >=4.12 && <5 , filepath - , fourmolu ^>= 0.14 || ^>= 0.15 || ^>= 0.16 + , fourmolu ^>= 0.14 || ^>= 0.15 || ^>= 0.16 || ^>=0.17 || ^>=0.18 , ghc-boot-th - , ghcide == 2.8.0.0 - , hls-plugin-api == 2.8.0.0 + , ghcide == 2.10.0.0 + , hls-plugin-api == 2.10.0.0 , lens , lsp , mtl @@ -1485,12 +1479,11 @@ test-suite hls-fourmolu-plugin-tests build-tool-depends: fourmolu:fourmolu build-depends: - , base >=4.12 && <5 , aeson , filepath , haskell-language-server:hls-fourmolu-plugin , hls-plugin-api - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 , lsp-test ----------------------------- @@ -1514,16 +1507,15 @@ library hls-ormolu-plugin exposed-modules: Ide.Plugin.Ormolu hs-source-dirs: plugins/hls-ormolu-plugin/src build-depends: - , base >=4.12 && <5 , extra , filepath , ghc-boot-th - , ghcide == 2.8.0.0 - , hls-plugin-api == 2.8.0.0 + , ghcide == 2.10.0.0 + , hls-plugin-api == 2.10.0.0 , lsp , mtl , process-extras >= 0.7.1 - , ormolu ^>=0.1.2 || ^>= 0.2 || ^>= 0.3 || ^>= 0.5 || ^>= 0.6 || ^>= 0.7 + , ormolu ^>=0.5.3 || ^>= 0.6 || ^>= 0.7 || ^>=0.8 , text , transformers @@ -1541,12 +1533,11 @@ test-suite hls-ormolu-plugin-tests build-tool-depends: ormolu:ormolu build-depends: - , base , aeson , filepath , haskell-language-server:hls-ormolu-plugin , hls-plugin-api - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 , lsp-types , ormolu @@ -1566,20 +1557,20 @@ common stylishHaskell library hls-stylish-haskell-plugin import: defaults, pedantic, warnings + -- https://github.com/haskell/stylish-haskell/issues/479 if !flag(stylishHaskell) buildable: False exposed-modules: Ide.Plugin.StylishHaskell hs-source-dirs: plugins/hls-stylish-haskell-plugin/src build-depends: - , base >=4.12 && <5 , directory , filepath , ghc-boot-th - , ghcide == 2.8.0.0 - , hls-plugin-api == 2.8.0.0 + , ghcide == 2.10.0.0 + , hls-plugin-api == 2.10.0.0 , lsp-types , mtl - , stylish-haskell ^>=0.12 || ^>=0.13 || ^>=0.14 + , stylish-haskell >=0.12 && <0.16 , text @@ -1591,10 +1582,9 @@ test-suite hls-stylish-haskell-plugin-tests hs-source-dirs: plugins/hls-stylish-haskell-plugin/test main-is: Main.hs build-depends: - , base , filepath , haskell-language-server:hls-stylish-haskell-plugin - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 ----------------------------- -- refactor plugin @@ -1642,15 +1632,15 @@ library hls-refactor-plugin ViewPatterns hs-source-dirs: plugins/hls-refactor-plugin/src build-depends: - , base >=4.12 && <5 , ghc , bytestring , ghc-boot , regex-tdfa - , ghcide == 2.8.0.0 - , hls-plugin-api == 2.8.0.0 + , ghcide == 2.10.0.0 + , hls-plugin-api == 2.10.0.0 , lsp , text + , text-rope , transformers , unordered-containers , containers @@ -1679,14 +1669,13 @@ test-suite hls-refactor-plugin-tests other-modules: Test.AddArgument ghc-options: -O0 build-depends: - , base , data-default , directory , extra , filepath , ghcide:ghcide , haskell-language-server:hls-refactor-plugin - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 , lens , lsp-test , lsp-types @@ -1729,13 +1718,12 @@ library hls-semantic-tokens-plugin hs-source-dirs: plugins/hls-semantic-tokens-plugin/src build-depends: - , base >=4.12 && <5 , containers , extra , text-rope , mtl >= 2.2 - , ghcide == 2.8.0.0 - , hls-plugin-api == 2.8.0.0 + , ghcide == 2.10.0.0 + , hls-plugin-api == 2.10.0.0 , lens , lsp >=2.6 , text @@ -1745,7 +1733,7 @@ library hls-semantic-tokens-plugin , array , deepseq , dlist - , hls-graph == 2.8.0.0 + , hls-graph == 2.10.0.0 , template-haskell , data-default , stm @@ -1763,14 +1751,13 @@ test-suite hls-semantic-tokens-plugin-tests build-depends: , aeson - , base , containers , data-default , filepath - , ghcide == 2.8.0.0 + , ghcide == 2.10.0.0 , haskell-language-server:hls-semantic-tokens-plugin - , hls-plugin-api == 2.8.0.0 - , hls-test-utils == 2.8.0.0 + , hls-plugin-api == 2.10.0.0 + , hls-test-utils == 2.10.0.0 , lens , lsp , lsp-test @@ -1799,11 +1786,10 @@ library hls-notes-plugin Ide.Plugin.Notes hs-source-dirs: plugins/hls-notes-plugin/src build-depends: - , base >=4.12 && <5 , array - , ghcide == 2.8.0.0 - , hls-graph == 2.8.0.0 - , hls-plugin-api == 2.8.0.0 + , ghcide == 2.10.0.0 + , hls-graph == 2.10.0.0 + , hls-plugin-api == 2.10.0.0 , lens , lsp >=2.7 , mtl >= 2.2 @@ -1827,10 +1813,9 @@ test-suite hls-notes-plugin-tests hs-source-dirs: plugins/hls-notes-plugin/test main-is: NotesTest.hs build-depends: - , base , filepath , haskell-language-server:hls-notes-plugin - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 default-extensions: OverloadedStrings ---------------------------- @@ -1885,16 +1870,15 @@ library hs-source-dirs: src build-depends: , aeson-pretty - , base >=4.16 && <5 , data-default , directory , extra , filepath , ghc - , ghcide == 2.8.0.0 + , ghcide == 2.10.0.0 , githash >=0.1.6.1 , hie-bios - , hls-plugin-api == 2.8.0.0 + , hls-plugin-api == 2.10.0.0 , optparse-applicative , optparse-simple , prettyprinter >= 1.7 @@ -1931,7 +1915,6 @@ executable haskell-language-server ghc-options: -dynamic build-depends: - , base >=4.16 && <5 , haskell-language-server , hls-plugin-api , lsp @@ -1957,7 +1940,6 @@ executable haskell-language-server-wrapper "-with-rtsopts=-I0 -A128M" build-depends: - , base >=4.16 && <5 , data-default , directory , extra @@ -1988,11 +1970,9 @@ test-suite func-test type: exitcode-stdio-1.0 build-tool-depends: haskell-language-server:haskell-language-server, - ghcide:ghcide-test-preprocessor build-depends: , aeson - , base >=4.16 && <5 , bytestring , containers , deepseq @@ -2001,7 +1981,7 @@ test-suite func-test , ghcide:ghcide , hashable , hls-plugin-api - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 , lens , lsp-test , lsp-types @@ -2027,7 +2007,7 @@ test-suite func-test if flag(eval) cpp-options: -Dhls_eval -- formatters - if flag(floskell) + if flag(floskell) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds)) cpp-options: -Dhls_floskell if flag(fourmolu) cpp-options: -Dhls_fourmolu @@ -2044,9 +2024,8 @@ test-suite wrapper-test haskell-language-server:haskell-language-server build-depends: - , base >=4.16 && <5 , extra - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 , process hs-source-dirs: test/wrapper @@ -2061,7 +2040,7 @@ benchmark benchmark hs-source-dirs: bench build-tool-depends: haskell-language-server:ghcide-bench, - hp2pretty:hp2pretty, + eventlog2html:eventlog2html, default-extensions: LambdaCase RecordWildCards @@ -2069,7 +2048,6 @@ benchmark benchmark build-depends: , aeson - , base >=4.16 && <5 , containers , data-default , directory @@ -2085,26 +2063,37 @@ benchmark benchmark , text , yaml +flag test-exe + description: Build the ghcide-test-preprocessor executable + default: True -test-suite ghcide-tests +executable ghcide-test-preprocessor import: warnings + default-language: GHC2021 + hs-source-dirs: ghcide-test/preprocessor + main-is: Main.hs + build-depends: base >=4 && <5 + + if !flag(test-exe) + buildable: False + +test-suite ghcide-tests + import: warnings, defaults type: exitcode-stdio-1.0 default-language: GHC2021 build-tool-depends: , ghcide:ghcide - , ghcide:ghcide-test-preprocessor + , haskell-language-server:ghcide-test-preprocessor , implicit-hie:gen-hie build-depends: , aeson - , base , containers , data-default , directory , enummapset , extra , filepath - , fuzzy , ghcide , hls-plugin-api , lens @@ -2112,7 +2101,6 @@ test-suite ghcide-tests , lsp , lsp-test ^>=0.17.1 , lsp-types - , monoid-subclasses , mtl , network-uri , QuickCheck @@ -2130,12 +2118,12 @@ test-suite ghcide-tests , text , text-rope , unordered-containers - , hls-test-utils == 2.8.0.0 + , hls-test-utils == 2.10.0.0 if impl(ghc <9.3) build-depends: ghc-typelits-knownnat - hs-source-dirs: ghcide/test/exe + hs-source-dirs: ghcide-test/exe ghc-options: -threaded -O0 main-is: Main.hs @@ -2152,6 +2140,7 @@ test-suite ghcide-tests DiagnosticTests ExceptionTests FindDefinitionAndHoverTests + FindImplementationAndHoverTests FuzzySearch GarbageCollectionTests HaddockTests @@ -2168,6 +2157,7 @@ test-suite ghcide-tests PreprocessorTests Progress ReferenceTests + ResolveTests RootUriTests SafeTests SymlinkTests @@ -2182,12 +2172,16 @@ test-suite ghcide-tests RecordWildCards ViewPatterns +flag ghcide-bench + description: Build the ghcide-bench executable + default: True executable ghcide-bench - default-language: GHC2021 + import: defaults + if !flag(ghcide-bench) + buildable: False build-depends: aeson, - base, bytestring, containers, data-default, @@ -2216,7 +2210,7 @@ executable ghcide-bench ViewPatterns library ghcide-bench-lib - default-language: GHC2021 + import: defaults hs-source-dirs: ghcide-bench/src ghc-options: -Wall -Wno-name-shadowing exposed-modules: @@ -2225,7 +2219,6 @@ library ghcide-bench-lib build-depends: aeson, async, - base == 4.*, binary, bytestring, deepseq, @@ -2252,8 +2245,8 @@ library ghcide-bench-lib test-suite ghcide-bench-test + import: defaults type: exitcode-stdio-1.0 - default-language: GHC2021 build-tool-depends: ghcide:ghcide, main-is: Main.hs @@ -2261,7 +2254,6 @@ test-suite ghcide-bench-test ghc-options: -Wunused-packages ghc-options: -threaded -Wall build-depends: - base, extra, haskell-language-server:ghcide-bench-lib, lsp-test ^>= 0.17, diff --git a/hie-compat/hie-compat.cabal b/hie-compat/hie-compat.cabal index 49bf9990a5..2b361df887 100644 --- a/hie-compat/hie-compat.cabal +++ b/hie-compat/hie-compat.cabal @@ -24,7 +24,7 @@ source-repository head library default-language: GHC2021 build-depends: - base < 4.21, array, bytestring, containers, directory, filepath, transformers + base < 4.22, array, bytestring, containers, directory, filepath, transformers build-depends: ghc >= 8.10, ghc-boot ghc-options: -Wall -Wno-name-shadowing @@ -35,7 +35,5 @@ library Compat.HieDebug Compat.HieUtils - if (impl(ghc >= 9.2) && impl(ghc < 9.3)) - hs-source-dirs: src-ghc92 src-reexport-ghc9 if (impl(ghc >= 9.4)) hs-source-dirs: src-reexport-ghc92 diff --git a/hie-compat/src-ghc92/Compat/HieAst.hs b/hie-compat/src-ghc92/Compat/HieAst.hs index f72b1283de..dffa7bc78f 100644 --- a/hie-compat/src-ghc92/Compat/HieAst.hs +++ b/hie-compat/src-ghc92/Compat/HieAst.hs @@ -72,7 +72,7 @@ import qualified Data.Array as A import qualified Data.ByteString as BS import qualified Data.Map as M import qualified Data.Set as S -import Data.Data ( Data, Typeable ) +import Data.Data ( Data ) import Data.Void ( Void, absurd ) import Control.Monad ( forM_ ) import Control.Monad.Trans.State.Strict @@ -469,7 +469,7 @@ data PScoped a = PS (Maybe Span) Scope -- ^ use site of the pattern Scope -- ^ pattern to the right of a, not including a a - deriving (Typeable, Data) -- Pattern Scope + deriving (Data) -- Pattern Scope {- Note [TyVar Scopes] ~~~~~~~~~~~~~~~~~~~ diff --git a/hls-graph/hls-graph.cabal b/hls-graph/hls-graph.cabal index 72adcc3cd1..18480293fd 100644 --- a/hls-graph/hls-graph.cabal +++ b/hls-graph/hls-graph.cabal @@ -1,6 +1,6 @@ cabal-version: 2.4 name: hls-graph -version: 2.8.0.0 +version: 2.10.0.0 synopsis: Haskell Language Server internal graph API description: Please see the README on GitHub at diff --git a/hls-graph/src/Control/Concurrent/STM/Stats.hs b/hls-graph/src/Control/Concurrent/STM/Stats.hs index 3b7c28b013..a6e7d0459b 100644 --- a/hls-graph/src/Control/Concurrent/STM/Stats.hs +++ b/hls-graph/src/Control/Concurrent/STM/Stats.hs @@ -20,7 +20,6 @@ import Control.Monad import Data.IORef import qualified Data.Map.Strict as M import Data.Time (getCurrentTime) -import Data.Typeable (Typeable) import GHC.Conc (unsafeIOToSTM) import System.IO import System.IO.Unsafe @@ -151,7 +150,6 @@ trackSTMConf (TrackSTMConf {..}) name txm = do -- 'BlockedIndefinitelyOnNamedSTM', carrying the name of the transaction and -- thus giving more helpful error messages. newtype BlockedIndefinitelyOnNamedSTM = BlockedIndefinitelyOnNamedSTM String - deriving (Typeable) instance Show BlockedIndefinitelyOnNamedSTM where showsPrec _ (BlockedIndefinitelyOnNamedSTM name) = diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Types.hs b/hls-graph/src/Development/IDE/Graph/Internal/Types.hs index c70cf6ff1c..34bed42391 100644 --- a/hls-graph/src/Development/IDE/Graph/Internal/Types.hs +++ b/hls-graph/src/Development/IDE/Graph/Internal/Types.hs @@ -227,7 +227,7 @@ data GraphException = forall e. Exception e => GraphException { stack :: [String], -- ^ The stack of keys that led to this exception inner :: e -- ^ The underlying exception } - deriving (Typeable, Exception) + deriving (Exception) instance Show GraphException where show GraphException{..} = unlines $ @@ -249,7 +249,7 @@ instance Show Stack where show (Stack kk _) = "Stack: " <> intercalate " -> " (map show kk) newtype StackException = StackException Stack - deriving (Typeable, Show) + deriving (Show) instance Exception StackException where fromException = fromGraphException diff --git a/hls-graph/test/Example.hs b/hls-graph/test/Example.hs index c6a74e90a6..c20ea79328 100644 --- a/hls-graph/test/Example.hs +++ b/hls-graph/test/Example.hs @@ -38,7 +38,7 @@ ruleBool = addRule $ \Rule _old _mode -> do data CondRule = CondRule - deriving (Eq, Generic, Hashable, NFData, Show, Typeable) + deriving (Eq, Generic, Hashable, NFData, Show) type instance RuleResult CondRule = Bool @@ -48,7 +48,7 @@ ruleCond mv = addRule $ \CondRule _old _mode -> do return $ RunResult ChangedRecomputeDiff "" r (return ()) data BranchedRule = BranchedRule - deriving (Eq, Generic, Hashable, NFData, Show, Typeable) + deriving (Eq, Generic, Hashable, NFData, Show) type instance RuleResult BranchedRule = Int ruleWithCond :: Rules () @@ -61,7 +61,7 @@ ruleWithCond = addRule $ \BranchedRule _old _mode -> do return $ RunResult ChangedRecomputeDiff "" (2 :: Int) (return ()) data SubBranchRule = SubBranchRule - deriving (Eq, Generic, Hashable, NFData, Show, Typeable) + deriving (Eq, Generic, Hashable, NFData, Show) type instance RuleResult SubBranchRule = Int ruleSubBranch :: C.MVar Int -> Rules () @@ -70,5 +70,5 @@ ruleSubBranch mv = addRule $ \SubBranchRule _old _mode -> do return $ RunResult ChangedRecomputeDiff "" r (return ()) data CountRule = CountRule - deriving (Eq, Generic, Hashable, NFData, Show, Typeable) + deriving (Eq, Generic, Hashable, NFData, Show) type instance RuleResult CountRule = Int diff --git a/hls-plugin-api/hls-plugin-api.cabal b/hls-plugin-api/hls-plugin-api.cabal index 05d5a9ad1e..d543c435c2 100644 --- a/hls-plugin-api/hls-plugin-api.cabal +++ b/hls-plugin-api/hls-plugin-api.cabal @@ -1,6 +1,6 @@ cabal-version: 2.4 name: hls-plugin-api -version: 2.8.0.0 +version: 2.10.0.0 synopsis: Haskell Language Server API for plugin communication description: Please see the README on GitHub at @@ -66,7 +66,7 @@ library , filepath , ghc , hashable - , hls-graph == 2.8.0.0 + , hls-graph == 2.10.0.0 , lens , lens-aeson , lsp ^>=2.7 diff --git a/hls-plugin-api/src/Ide/Plugin/Config.hs b/hls-plugin-api/src/Ide/Plugin/Config.hs index 24c1b0c376..4fee92c309 100644 --- a/hls-plugin-api/src/Ide/Plugin/Config.hs +++ b/hls-plugin-api/src/Ide/Plugin/Config.hs @@ -63,19 +63,20 @@ parsePlugins (IdePlugins plugins) = A.withObject "Config.plugins" $ \o -> do -- --------------------------------------------------------------------- parsePluginConfig :: PluginConfig -> Value -> A.Parser PluginConfig -parsePluginConfig def = A.withObject "PluginConfig" $ \o -> PluginConfig +parsePluginConfig def = A.withObject "PluginConfig" $ \o -> PluginConfig <$> o .:? "globalOn" .!= plcGlobalOn def <*> o .:? "callHierarchyOn" .!= plcCallHierarchyOn def - <*> o .:? "semanticTokensOn" .!= plcSemanticTokensOn def <*> o .:? "codeActionsOn" .!= plcCodeActionsOn def <*> o .:? "codeLensOn" .!= plcCodeLensOn def + <*> o .:? "inlayHintsOn" .!= plcInlayHintsOn def <*> o .:? "diagnosticsOn" .!= plcDiagnosticsOn def -- AZ <*> o .:? "hoverOn" .!= plcHoverOn def <*> o .:? "symbolsOn" .!= plcSymbolsOn def <*> o .:? "completionOn" .!= plcCompletionOn def <*> o .:? "renameOn" .!= plcRenameOn def <*> o .:? "selectionRangeOn" .!= plcSelectionRangeOn def - <*> o .:? "foldingRangeOn" .!= plcFoldingRangeOn def + <*> o .:? "foldingRangeOn" .!= plcFoldingRangeOn def + <*> o .:? "semanticTokensOn" .!= plcSemanticTokensOn def <*> o .:? "config" .!= plcConfig def -- --------------------------------------------------------------------- diff --git a/hls-plugin-api/src/Ide/Plugin/ConfigUtils.hs b/hls-plugin-api/src/Ide/Plugin/ConfigUtils.hs index 1dbc97a202..8ee6110d29 100644 --- a/hls-plugin-api/src/Ide/Plugin/ConfigUtils.hs +++ b/hls-plugin-api/src/Ide/Plugin/ConfigUtils.hs @@ -88,6 +88,7 @@ pluginsToDefaultConfig IdePlugins {..} = handlersToGenericDefaultConfig PluginConfig{..} (IdeMethod m DSum.:=> _) = case m of SMethod_TextDocumentCodeAction -> ["codeActionsOn" A..= plcCodeActionsOn] SMethod_TextDocumentCodeLens -> ["codeLensOn" A..= plcCodeLensOn] + SMethod_TextDocumentInlayHint -> ["inlayHintsOn" A..= plcInlayHintsOn] SMethod_TextDocumentRename -> ["renameOn" A..= plcRenameOn] SMethod_TextDocumentHover -> ["hoverOn" A..= plcHoverOn] SMethod_TextDocumentDocumentSymbol -> ["symbolsOn" A..= plcSymbolsOn] @@ -120,6 +121,7 @@ pluginsToVSCodeExtensionSchema IdePlugins {..} = A.object $ mconcat $ singlePlug handlersToGenericSchema PluginConfig{..} (IdeMethod m DSum.:=> _) = case m of SMethod_TextDocumentCodeAction -> [toKey' "codeActionsOn" A..= schemaEntry "code actions" plcCodeActionsOn] SMethod_TextDocumentCodeLens -> [toKey' "codeLensOn" A..= schemaEntry "code lenses" plcCodeLensOn] + SMethod_TextDocumentInlayHint -> [toKey' "inlayHintsOn" A..= schemaEntry "inlay hints" plcInlayHintsOn] SMethod_TextDocumentRename -> [toKey' "renameOn" A..= schemaEntry "rename" plcRenameOn] SMethod_TextDocumentHover -> [toKey' "hoverOn" A..= schemaEntry "hover" plcHoverOn] SMethod_TextDocumentDocumentSymbol -> [toKey' "symbolsOn" A..= schemaEntry "symbols" plcSymbolsOn] diff --git a/hls-plugin-api/src/Ide/Plugin/RangeMap.hs b/hls-plugin-api/src/Ide/Plugin/RangeMap.hs index 7b1887a802..6c4b4041c9 100644 --- a/hls-plugin-api/src/Ide/Plugin/RangeMap.hs +++ b/hls-plugin-api/src/Ide/Plugin/RangeMap.hs @@ -13,6 +13,7 @@ module Ide.Plugin.RangeMap fromList, fromList', filterByRange, + elementsInRange, ) where import Development.IDE.Graph.Classes (NFData) @@ -67,6 +68,14 @@ filterByRange range = map snd . IM.dominators (rangeToInterval range) . unRangeM filterByRange range = map snd . filter (isSubrangeOf range . fst) . unRangeMap #endif +-- | Extracts all elements from a 'RangeMap' that fall within a given 'Range'. +elementsInRange :: Range -> RangeMap a -> [a] +#ifdef USE_FINGERTREE +elementsInRange range = map snd . IM.intersections (rangeToInterval range) . unRangeMap +#else +elementsInRange range = map snd . filter (flip isSubrangeOf range . fst) . unRangeMap +#endif + #ifdef USE_FINGERTREE -- NOTE(ozkutuk): In itself, this conversion is wrong. As Michael put it: -- "LSP Ranges have exclusive upper bounds, whereas the intervals here are diff --git a/hls-plugin-api/src/Ide/PluginUtils.hs b/hls-plugin-api/src/Ide/PluginUtils.hs index c5609065c3..e34d19f8b0 100644 --- a/hls-plugin-api/src/Ide/PluginUtils.hs +++ b/hls-plugin-api/src/Ide/PluginUtils.hs @@ -28,6 +28,7 @@ module Ide.PluginUtils allLspCmdIds', installSigUsr1Handler, subRange, + rangesOverlap, positionInRange, usePropertyLsp, -- * Escape @@ -277,6 +278,21 @@ fullRange s = Range startPos endPos subRange :: Range -> Range -> Bool subRange = isSubrangeOf + +-- | Check whether the two 'Range's overlap in any way. +-- +-- >>> rangesOverlap (mkRange 1 0 1 4) (mkRange 1 2 1 5) +-- True +-- >>> rangesOverlap (mkRange 1 2 1 5) (mkRange 1 0 1 4) +-- True +-- >>> rangesOverlap (mkRange 1 0 1 6) (mkRange 1 2 1 4) +-- True +-- >>> rangesOverlap (mkRange 1 2 1 4) (mkRange 1 0 1 6) +-- True +rangesOverlap :: Range -> Range -> Bool +rangesOverlap r1 r2 = + r1 ^. L.start <= r2 ^. L.end && r2 ^. L.start <= r1 ^. L.end + -- --------------------------------------------------------------------- allLspCmdIds' :: T.Text -> IdePlugins ideState -> [T.Text] diff --git a/hls-plugin-api/src/Ide/Types.hs b/hls-plugin-api/src/Ide/Types.hs index f786b6aac9..3a06656a77 100644 --- a/hls-plugin-api/src/Ide/Types.hs +++ b/hls-plugin-api/src/Ide/Types.hs @@ -26,12 +26,12 @@ module Ide.Types , ConfigDescriptor(..), defaultConfigDescriptor, configForPlugin , CustomConfig(..), mkCustomConfig , FallbackCodeActionParams(..) -, FormattingType(..), FormattingMethod, FormattingHandler, mkFormattingHandlers +, FormattingType(..), FormattingMethod, FormattingHandler , HasTracing(..) , PluginCommand(..), CommandId(..), CommandFunction, mkLspCommand, mkLspCmdId , PluginId(..) , PluginHandler(..), mkPluginHandler -, HandlerM, runHandlerM, pluginGetClientCapabilities, pluginGetVirtualFile, pluginGetVersionedTextDoc, pluginSendNotification, pluginSendRequest, pluginWithIndefiniteProgress +, HandlerM, runHandlerM, pluginGetClientCapabilities, pluginSendNotification, pluginSendRequest, pluginWithIndefiniteProgress , PluginHandlers(..) , PluginMethod(..) , PluginMethodHandler @@ -64,7 +64,6 @@ import Control.Lens (_Just, view, (.~), (?~), (^.), import Control.Monad (void) import Control.Monad.Error.Class (MonadError (throwError)) import Control.Monad.IO.Class (MonadIO) -import Control.Monad.Trans.Class (MonadTrans (lift)) import Control.Monad.Trans.Except (ExceptT, runExceptT) import Data.Aeson hiding (Null, defaultOptions) import qualified Data.Aeson.Types as A @@ -260,6 +259,7 @@ data PluginConfig = , plcCallHierarchyOn :: !Bool , plcCodeActionsOn :: !Bool , plcCodeLensOn :: !Bool + , plcInlayHintsOn :: !Bool , plcDiagnosticsOn :: !Bool , plcHoverOn :: !Bool , plcSymbolsOn :: !Bool @@ -277,6 +277,7 @@ instance Default PluginConfig where , plcCallHierarchyOn = True , plcCodeActionsOn = True , plcCodeLensOn = True + , plcInlayHintsOn = True , plcDiagnosticsOn = True , plcHoverOn = True , plcSymbolsOn = True @@ -289,12 +290,13 @@ instance Default PluginConfig where } instance ToJSON PluginConfig where - toJSON (PluginConfig g ch ca cl d h s c rn sr fr st cfg) = r + toJSON (PluginConfig g ch ca ih cl d h s c rn sr fr st cfg) = r where r = object [ "globalOn" .= g , "callHierarchyOn" .= ch , "codeActionsOn" .= ca , "codeLensOn" .= cl + , "inlayHintsOn" .= ih , "diagnosticsOn" .= d , "hoverOn" .= h , "symbolsOn" .= s @@ -501,6 +503,9 @@ instance PluginMethod Request Method_TextDocumentDefinition where instance PluginMethod Request Method_TextDocumentTypeDefinition where handlesRequest _ msgParams pluginDesc _ = pluginSupportsFileType msgParams pluginDesc +instance PluginMethod Request Method_TextDocumentImplementation where + handlesRequest _ msgParams pluginDesc _ = pluginSupportsFileType msgParams pluginDesc + instance PluginMethod Request Method_TextDocumentDocumentHighlight where handlesRequest _ msgParams pluginDesc _ = pluginSupportsFileType msgParams pluginDesc @@ -511,6 +516,12 @@ instance PluginMethod Request Method_WorkspaceSymbol where -- Unconditionally enabled, but should it really be? handlesRequest _ _ _ _ = HandlesRequest +instance PluginMethod Request Method_TextDocumentInlayHint where + handlesRequest = pluginEnabledWithFeature plcInlayHintsOn + +instance PluginMethod Request Method_InlayHintResolve where + handlesRequest = pluginEnabledResolve plcInlayHintsOn + instance PluginMethod Request Method_TextDocumentCodeLens where handlesRequest = pluginEnabledWithFeature plcCodeLensOn @@ -688,6 +699,11 @@ instance PluginRequestMethod Method_TextDocumentTypeDefinition where | Just (Just True) <- caps ^? (L.textDocument . _Just . L.typeDefinition . _Just . L.linkSupport) = foldl' mergeDefinitions x xs | otherwise = downgradeLinks $ foldl' mergeDefinitions x xs +instance PluginRequestMethod Method_TextDocumentImplementation where + combineResponses _ _ caps _ (x :| xs) + | Just (Just True) <- caps ^? (L.textDocument . _Just . L.implementation . _Just . L.linkSupport) = foldl' mergeDefinitions x xs + | otherwise = downgradeLinks $ foldl' mergeDefinitions x xs + instance PluginRequestMethod Method_TextDocumentDocumentHighlight where instance PluginRequestMethod Method_TextDocumentReferences where @@ -711,7 +727,7 @@ instance PluginRequestMethod Method_TextDocumentPrepareRename where combineResponses _ _ _ _ (x :| _) = x instance PluginRequestMethod Method_TextDocumentHover where - combineResponses _ _ _ _ (mapMaybe nullToMaybe . toList -> hs :: [Hover]) = + combineResponses _ _ _ _ (mapMaybe nullToMaybe . toList -> (hs :: [Hover])) = if null hs then InR Null else InL $ Hover (InL mcontent) r @@ -810,6 +826,9 @@ instance PluginRequestMethod Method_TextDocumentSemanticTokensFull where instance PluginRequestMethod Method_TextDocumentSemanticTokensFullDelta where combineResponses _ _ _ _ (x :| _) = x +instance PluginRequestMethod Method_TextDocumentInlayHint where + combineResponses _ _ _ _ x = sconcat x + takeLefts :: [a |? b] -> [a] takeLefts = mapMaybe (\x -> [res | (InL res) <- Just x]) @@ -899,29 +918,15 @@ instance GCompare IdeNotification where -- | Restricted version of 'LspM' specific to plugins. -- --- We plan to use this monad for running plugins instead of 'LspM', since there --- are parts of the LSP server state which plugins should not access directly, --- but instead only via the build system. Note that this restriction of the LSP --- server state has not yet been implemented. See 'pluginGetVirtualFile'. +-- We use this monad for running plugins instead of 'LspM', since there are +-- parts of the LSP server state which plugins should not access directly, but +-- instead only via the build system. newtype HandlerM config a = HandlerM { _runHandlerM :: LspM config a } deriving newtype (Applicative, Functor, Monad, MonadIO, MonadUnliftIO) runHandlerM :: HandlerM config a -> LspM config a runHandlerM = _runHandlerM --- | Wrapper of 'getVirtualFile' for HandlerM --- --- TODO: To be replaced by a lookup of the Shake build graph -pluginGetVirtualFile :: NormalizedUri -> HandlerM config (Maybe VirtualFile) -pluginGetVirtualFile uri = HandlerM $ getVirtualFile uri - --- | Version of 'getVersionedTextDoc' for HandlerM --- --- TODO: Should use 'pluginGetVirtualFile' instead of wrapping 'getVersionedTextDoc'. --- At the time of writing, 'getVersionedTextDoc' of the "lsp" package is implemented with 'getVirtualFile'. -pluginGetVersionedTextDoc :: TextDocumentIdentifier -> HandlerM config VersionedTextDocumentIdentifier -pluginGetVersionedTextDoc = HandlerM . getVersionedTextDoc - -- | Wrapper of 'getClientCapabilities' for HandlerM pluginGetClientCapabilities :: HandlerM config ClientCapabilities pluginGetClientCapabilities = HandlerM getClientCapabilities @@ -1183,31 +1188,8 @@ type FormattingHandler a -> FormattingOptions -> ExceptT PluginError (HandlerM Config) ([TextEdit] |? Null) -mkFormattingHandlers :: forall a. FormattingHandler a -> PluginHandlers a -mkFormattingHandlers f = mkPluginHandler SMethod_TextDocumentFormatting ( provider SMethod_TextDocumentFormatting) - <> mkPluginHandler SMethod_TextDocumentRangeFormatting (provider SMethod_TextDocumentRangeFormatting) - where - provider :: forall m. FormattingMethod m => SMethod m -> PluginMethodHandler a m - provider m ide _pid params - | Just nfp <- uriToNormalizedFilePath $ toNormalizedUri uri = do - mf <- lift $ pluginGetVirtualFile $ toNormalizedUri uri - case mf of - Just vf -> do - let (typ, mtoken) = case m of - SMethod_TextDocumentFormatting -> (FormatText, params ^. L.workDoneToken) - SMethod_TextDocumentRangeFormatting -> (FormatRange (params ^. L.range), params ^. L.workDoneToken) - _ -> Prelude.error "mkFormattingHandlers: impossible" - f ide mtoken typ (virtualFileText vf) nfp opts - Nothing -> throwError $ PluginInvalidParams $ T.pack $ "Formatter plugin: could not get file contents for " ++ show uri - - | otherwise = throwError $ PluginInvalidParams $ T.pack $ "Formatter plugin: uriToFilePath failed for: " ++ show uri - where - uri = params ^. L.textDocument . L.uri - opts = params ^. L.options - -- --------------------------------------------------------------------- - data FallbackCodeActionParams = FallbackCodeActionParams { fallbackWorkspaceEdit :: Maybe WorkspaceEdit diff --git a/hls-test-utils/hls-test-utils.cabal b/hls-test-utils/hls-test-utils.cabal index 299d869b7b..773f3401b5 100644 --- a/hls-test-utils/hls-test-utils.cabal +++ b/hls-test-utils/hls-test-utils.cabal @@ -1,6 +1,6 @@ cabal-version: 2.4 name: hls-test-utils -version: 2.8.0.0 +version: 2.10.0.0 synopsis: Utilities used in the tests of Haskell Language Server description: Please see the README on GitHub at @@ -43,8 +43,8 @@ library , directory , extra , filepath - , ghcide == 2.8.0.0 - , hls-plugin-api == 2.8.0.0 + , ghcide == 2.10.0.0 + , hls-plugin-api == 2.10.0.0 , lens , lsp , lsp-test ^>=0.17 diff --git a/hls-test-utils/src/Development/IDE/Test.hs b/hls-test-utils/src/Development/IDE/Test.hs index 285d91aadb..a1bd2dec0e 100644 --- a/hls-test-utils/src/Development/IDE/Test.hs +++ b/hls-test-utils/src/Development/IDE/Test.hs @@ -14,6 +14,8 @@ module Development.IDE.Test , diagnostic , expectDiagnostics , expectDiagnosticsWithTags + , ExpectedDiagnostic + , ExpectedDiagnosticWithTag , expectNoMoreDiagnostics , expectMessages , expectCurrentDiagnostics @@ -63,10 +65,13 @@ import System.FilePath (equalFilePath) import System.Time.Extra import Test.Tasty.HUnit +expectedDiagnosticWithNothing :: ExpectedDiagnostic -> ExpectedDiagnosticWithTag +expectedDiagnosticWithNothing (ds, c, t, code) = (ds, c, t, code, Nothing) + requireDiagnosticM :: (Foldable f, Show (f Diagnostic), HasCallStack) => f Diagnostic - -> (DiagnosticSeverity, Cursor, T.Text, Maybe DiagnosticTag) + -> ExpectedDiagnosticWithTag -> Assertion requireDiagnosticM actuals expected = case requireDiagnostic actuals expected of Nothing -> pure () @@ -114,25 +119,25 @@ flushMessages = do -- -- Rather than trying to assert the absence of diagnostics, introduce an -- expected diagnostic (e.g. a redundant import) and assert the singleton diagnostic. -expectDiagnostics :: HasCallStack => [(FilePath, [(DiagnosticSeverity, Cursor, T.Text)])] -> Session () +expectDiagnostics :: HasCallStack => [(FilePath, [ExpectedDiagnostic])] -> Session () expectDiagnostics = expectDiagnosticsWithTags - . map (second (map (\(ds, c, t) -> (ds, c, t, Nothing)))) + . map (second (map expectedDiagnosticWithNothing)) unwrapDiagnostic :: TServerMessage Method_TextDocumentPublishDiagnostics -> (Uri, [Diagnostic]) unwrapDiagnostic diagsNot = (diagsNot^. L.params . L.uri, diagsNot^. L.params . L.diagnostics) -expectDiagnosticsWithTags :: HasCallStack => [(String, [(DiagnosticSeverity, Cursor, T.Text, Maybe DiagnosticTag)])] -> Session () +expectDiagnosticsWithTags :: HasCallStack => [(String, [ExpectedDiagnosticWithTag])] -> Session () expectDiagnosticsWithTags expected = do - let f = getDocUri >=> liftIO . canonicalizeUri >=> pure . toNormalizedUri + let toSessionPath = getDocUri >=> liftIO . canonicalizeUri >=> pure . toNormalizedUri next = unwrapDiagnostic <$> skipManyTill anyMessage diagnostic - expected' <- Map.fromListWith (<>) <$> traverseOf (traverse . _1) f expected + expected' <- Map.fromListWith (<>) <$> traverseOf (traverse . _1) toSessionPath expected expectDiagnosticsWithTags' next expected' expectDiagnosticsWithTags' :: (HasCallStack, MonadIO m) => m (Uri, [Diagnostic]) -> - Map.Map NormalizedUri [(DiagnosticSeverity, Cursor, T.Text, Maybe DiagnosticTag)] -> + Map.Map NormalizedUri [ExpectedDiagnosticWithTag] -> m () expectDiagnosticsWithTags' next m | null m = do (_,actual) <- next @@ -170,14 +175,14 @@ expectDiagnosticsWithTags' next expected = go expected <> show actual go $ Map.delete canonUri m -expectCurrentDiagnostics :: HasCallStack => TextDocumentIdentifier -> [(DiagnosticSeverity, Cursor, T.Text)] -> Session () +expectCurrentDiagnostics :: HasCallStack => TextDocumentIdentifier -> [ExpectedDiagnostic] -> Session () expectCurrentDiagnostics doc expected = do diags <- getCurrentDiagnostics doc checkDiagnosticsForDoc doc expected diags -checkDiagnosticsForDoc :: HasCallStack => TextDocumentIdentifier -> [(DiagnosticSeverity, Cursor, T.Text)] -> [Diagnostic] -> Session () +checkDiagnosticsForDoc :: HasCallStack => TextDocumentIdentifier -> [ExpectedDiagnostic] -> [Diagnostic] -> Session () checkDiagnosticsForDoc TextDocumentIdentifier {_uri} expected obtained = do - let expected' = Map.singleton nuri (map (\(ds, c, t) -> (ds, c, t, Nothing)) expected) + let expected' = Map.singleton nuri (map expectedDiagnosticWithNothing expected) nuri = toNormalizedUri _uri expectDiagnosticsWithTags' (return (_uri, obtained)) expected' diff --git a/hls-test-utils/src/Development/IDE/Test/Diagnostic.hs b/hls-test-utils/src/Development/IDE/Test/Diagnostic.hs index 86c1b8bb9d..e64ab34876 100644 --- a/hls-test-utils/src/Development/IDE/Test/Diagnostic.hs +++ b/hls-test-utils/src/Development/IDE/Test/Diagnostic.hs @@ -1,7 +1,9 @@ +{-# LANGUAGE CPP #-} module Development.IDE.Test.Diagnostic where import Control.Lens ((^.)) import qualified Data.Text as T +import Development.IDE.GHC.Compat (GhcVersion (..), ghcVersion) import GHC.Stack (HasCallStack) import Language.LSP.Protocol.Lens import Language.LSP.Protocol.Types @@ -14,12 +16,41 @@ cursorPosition (line, col) = Position line col type ErrorMsg = String + +-- | Expected diagnostics have the following components: +-- +-- 1. severity +-- 2. cursor (line and column numbers) +-- 3. infix of the message +-- 4. code (e.g. GHC-87543) +type ExpectedDiagnostic = + ( DiagnosticSeverity + , Cursor + , T.Text + , Maybe T.Text + ) + +-- | Expected diagnostics with a tag have the following components: +-- +-- 1. severity +-- 2. cursor (line and column numbers) +-- 3. infix of the message +-- 4. code (e.g. GHC-87543) +-- 5. tag (unnecessary or deprecated) +type ExpectedDiagnosticWithTag = + ( DiagnosticSeverity + , Cursor + , T.Text + , Maybe T.Text + , Maybe DiagnosticTag + ) + requireDiagnostic :: (Foldable f, Show (f Diagnostic), HasCallStack) => f Diagnostic - -> (DiagnosticSeverity, Cursor, T.Text, Maybe DiagnosticTag) + -> ExpectedDiagnosticWithTag -> Maybe ErrorMsg -requireDiagnostic actuals expected@(severity, cursor, expectedMsg, expectedTag) +requireDiagnostic actuals expected@(severity, cursor, expectedMsg, mbExpectedCode, expectedTag) | any match actuals = Nothing | otherwise = Just $ "Could not find " <> show expected <> @@ -32,6 +63,15 @@ requireDiagnostic actuals expected@(severity, cursor, expectedMsg, expectedTag) && standardizeQuotes (T.toLower expectedMsg) `T.isInfixOf` standardizeQuotes (T.toLower $ d ^. message) && hasTag expectedTag (d ^. tags) + && codeMatches d + + codeMatches d + | ghcVersion >= GHC96 = + case (mbExpectedCode, _code d) of + (Nothing, _) -> True + (Just expectedCode, Nothing) -> False + (Just expectedCode, Just actualCode) -> InR expectedCode == actualCode + | otherwise = True hasTag :: Maybe DiagnosticTag -> Maybe [DiagnosticTag] -> Bool hasTag Nothing _ = True diff --git a/hls-test-utils/src/Test/Hls.hs b/hls-test-utils/src/Test/Hls.hs index 479f1b04d6..1193b2dd19 100644 --- a/hls-test-utils/src/Test/Hls.hs +++ b/hls-test-utils/src/Test/Hls.hs @@ -39,6 +39,10 @@ module Test.Hls -- * Helpful re-exports PluginDescriptor, IdeState, + -- * Helpers for expected test case failuers + BrokenBehavior(..), + ExpectBroken(..), + unCurrent, -- * Assertion helper functions waitForProgressDone, waitForAllProgressDone, @@ -61,7 +65,9 @@ module Test.Hls WithPriority(..), Recorder, Priority(..), - TestConfig(..), + captureKickDiagnostics, + kick, + TestConfig(..) ) where @@ -69,6 +75,7 @@ import Control.Applicative.Combinators import Control.Concurrent.Async (async, cancel, wait) import Control.Concurrent.Extra import Control.Exception.Safe +import Control.Lens ((^.)) import Control.Lens.Extras (is) import Control.Monad (guard, unless, void) import Control.Monad.Extra (forM) @@ -80,7 +87,7 @@ import qualified Data.Aeson as A import Data.ByteString.Lazy (ByteString) import Data.Default (Default, def) import qualified Data.Map as M -import Data.Maybe (fromMaybe) +import Data.Maybe (fromMaybe, mapMaybe) import Data.Proxy (Proxy (Proxy)) import qualified Data.Text as T import qualified Data.Text.Lazy as TL @@ -114,6 +121,7 @@ import Ide.PluginUtils (idePluginsToPluginDes pluginDescToIdePlugins) import Ide.Types import Language.LSP.Protocol.Capabilities +import qualified Language.LSP.Protocol.Lens as L import Language.LSP.Protocol.Message import qualified Language.LSP.Protocol.Message as LSP import Language.LSP.Protocol.Types hiding (Null) @@ -162,6 +170,15 @@ instance Pretty LogTestHarness where LogCleanup -> "Cleaned up temporary directory" LogNoCleanup -> "No cleanup of temporary directory" +data BrokenBehavior = Current | Ideal + +data ExpectBroken (k :: BrokenBehavior) a where + BrokenCurrent :: a -> ExpectBroken 'Current a + BrokenIdeal :: a -> ExpectBroken 'Ideal a + +unCurrent :: ExpectBroken 'Current a -> a +unCurrent (BrokenCurrent a) = a + -- | Run 'defaultMainWithRerun', limiting each single test case running at most 10 minutes defaultTestRunner :: TestTree -> IO () defaultTestRunner = defaultMainWithRerun . adjustOption (const $ mkTimeout 600000000) @@ -231,14 +248,14 @@ goldenWithTestConfig :: Pretty b => TestConfig b -> TestName - -> FilePath + -> VirtualFileTree -> FilePath -> FilePath -> FilePath -> (TextDocumentIdentifier -> Session ()) -> TestTree -goldenWithTestConfig config title testDataDir path desc ext act = - goldenGitDiff title (testDataDir path <.> desc <.> ext) +goldenWithTestConfig config title tree path desc ext act = + goldenGitDiff title (vftOriginalRoot tree path <.> desc <.> ext) $ runSessionWithTestConfig config $ const $ TL.encodeUtf8 . TL.fromStrict <$> do @@ -869,6 +886,17 @@ setHlsConfig config = do -- requests! skipManyTill anyMessage (void configurationRequest) +captureKickDiagnostics :: Session () -> Session () -> Session [Diagnostic] +captureKickDiagnostics start done = do + _ <- skipManyTill anyMessage start + messages <- manyTill anyMessage done + pure $ concat $ mapMaybe diagnostics messages + where + diagnostics :: FromServerMessage' a -> Maybe [Diagnostic] + diagnostics = \msg -> case msg of + FromServerMess SMethod_TextDocumentPublishDiagnostics diags -> Just (diags ^. L.params . L.diagnostics) + _ -> Nothing + waitForKickDone :: Session () waitForKickDone = void $ skipManyTill anyMessage nonTrivialKickDone @@ -881,6 +909,7 @@ nonTrivialKickDone = kick (Proxy @"kick/done") >>= guard . not . null nonTrivialKickStart :: Session () nonTrivialKickStart = kick (Proxy @"kick/start") >>= guard . not . null + kick :: KnownSymbol k => Proxy k -> Session [FilePath] kick proxyMsg = do NotMess TNotificationMessage{_params} <- customNotification proxyMsg diff --git a/hls-test-utils/src/Test/Hls/Util.hs b/hls-test-utils/src/Test/Hls/Util.hs index eaba6c595b..98c795f8e0 100644 --- a/hls-test-utils/src/Test/Hls/Util.hs +++ b/hls-test-utils/src/Test/Hls/Util.hs @@ -7,6 +7,7 @@ module Test.Hls.Util ( -- * Test Capabilities codeActionResolveCaps , codeActionNoResolveCaps + , codeActionNoInlayHintsCaps , codeActionSupportCaps , expectCodeAction -- * Environment specifications @@ -35,6 +36,7 @@ module Test.Hls.Util , inspectCodeAction , inspectCommand , inspectDiagnostic + , inspectDiagnosticAny , waitForDiagnosticsFrom , waitForDiagnosticsFromSource , waitForDiagnosticsFromSourceWithTimeout @@ -107,6 +109,12 @@ codeActionNoResolveCaps :: ClientCapabilities codeActionNoResolveCaps = Test.fullLatestClientCaps & (L.textDocument . _Just . L.codeAction . _Just . L.resolveSupport) .~ Nothing & (L.textDocument . _Just . L.codeAction . _Just . L.dataSupport . _Just) .~ False + +codeActionNoInlayHintsCaps :: ClientCapabilities +codeActionNoInlayHintsCaps = Test.fullLatestClientCaps + & (L.textDocument . _Just . L.codeAction . _Just . L.resolveSupport) .~ Nothing + & (L.textDocument . _Just . L.codeAction . _Just . L.dataSupport . _Just) .~ False + & (L.textDocument . _Just . L.inlayHint) .~ Nothing -- --------------------------------------------------------------------- -- Environment specification for ignoring tests -- --------------------------------------------------------------------- @@ -240,6 +248,10 @@ inspectDiagnostic :: [Diagnostic] -> [T.Text] -> IO Diagnostic inspectDiagnostic diags s = onMatch diags (\ca -> all (`T.isInfixOf` (ca ^. L.message)) s) err where err = "expected diagnostic matching '" ++ show s ++ "' but did not find one" +inspectDiagnosticAny :: [Diagnostic] -> [T.Text] -> IO Diagnostic +inspectDiagnosticAny diags s = onMatch diags (\ca -> any (`T.isInfixOf` (ca ^. L.message)) s) err + where err = "expected diagnostic matching one of'" ++ show s ++ "' but did not find one" + expectDiagnostic :: [Diagnostic] -> [T.Text] -> IO () expectDiagnostic diags s = void $ inspectDiagnostic diags s diff --git a/plugins/hls-alternate-number-format-plugin/src/Ide/Plugin/Literals.hs b/plugins/hls-alternate-number-format-plugin/src/Ide/Plugin/Literals.hs index 3b463509c7..c26227d933 100644 --- a/plugins/hls-alternate-number-format-plugin/src/Ide/Plugin/Literals.hs +++ b/plugins/hls-alternate-number-format-plugin/src/Ide/Plugin/Literals.hs @@ -68,13 +68,8 @@ getPattern (L (locA -> (RealSrcSpan patSpan _)) pat) = case pat of HsInt _ val -> fromIntegralLit patSpan val HsRat _ val _ -> fromFractionalLit patSpan val _ -> Nothing -#if __GLASGOW_HASKELL__ == 902 - NPat _ (L (RealSrcSpan sSpan _) overLit) _ _ -> fromOverLit overLit sSpan - NPlusKPat _ _ (L (RealSrcSpan sSpan _) overLit1) _ _ _ -> fromOverLit overLit1 sSpan -#else NPat _ (L (locA -> (RealSrcSpan sSpan _)) overLit) _ _ -> fromOverLit overLit sSpan NPlusKPat _ _ (L (locA -> (RealSrcSpan sSpan _)) overLit1) _ _ _ -> fromOverLit overLit1 sSpan -#endif _ -> Nothing getPattern _ = Nothing diff --git a/plugins/hls-cabal-fmt-plugin/src/Ide/Plugin/CabalFmt.hs b/plugins/hls-cabal-fmt-plugin/src/Ide/Plugin/CabalFmt.hs index 1af405e124..8c49f379d7 100644 --- a/plugins/hls-cabal-fmt-plugin/src/Ide/Plugin/CabalFmt.hs +++ b/plugins/hls-cabal-fmt-plugin/src/Ide/Plugin/CabalFmt.hs @@ -6,22 +6,23 @@ module Ide.Plugin.CabalFmt where import Control.Lens -import Control.Monad.Except (throwError) +import Control.Monad.Except (throwError) import Control.Monad.IO.Class -import qualified Data.Text as T -import Development.IDE hiding (pluginHandlers) -import Ide.Plugin.Error (PluginError (PluginInternalError, PluginInvalidParams)) +import qualified Data.Text as T +import Development.IDE hiding (pluginHandlers) +import Development.IDE.Core.PluginUtils (mkFormattingHandlers) +import Ide.Plugin.Error (PluginError (PluginInternalError, PluginInvalidParams)) import Ide.Plugin.Properties import Ide.PluginUtils import Ide.Types -import qualified Language.LSP.Protocol.Lens as L +import qualified Language.LSP.Protocol.Lens as L import Language.LSP.Protocol.Types -import Prelude hiding (log) +import Prelude hiding (log) import System.Directory import System.Exit import System.FilePath import System.Process.ListLike -import qualified System.Process.Text as Process +import qualified System.Process.Text as Process data Log = LogProcessInvocationFailure Int @@ -64,7 +65,7 @@ provider recorder _ _ _ (FormatRange _) _ _ _ = do throwError $ PluginInvalidParams "You cannot format a text-range using cabal-fmt." provider recorder plId ideState _ FormatText contents nfp opts = do let cabalFmtArgs = [ "--indent", show tabularSize] - cabalFmtExePath <- fmap T.unpack $ liftIO $ runAction "cabal-gild" ideState $ usePropertyAction #path plId properties + cabalFmtExePath <- fmap T.unpack $ liftIO $ runAction "cabal-fmt" ideState $ usePropertyAction #path plId properties x <- liftIO $ findExecutable cabalFmtExePath case x of Just _ -> do @@ -85,7 +86,7 @@ provider recorder plId ideState _ FormatText contents nfp opts = do pure $ InL fmtDiff Nothing -> do log Error $ LogFormatterBinNotFound cabalFmtExePath - throwError (PluginInternalError "No installation of cabal-gild could be found. Please install it globally, or provide the full path to the executable") + throwError (PluginInternalError "No installation of cabal-fmt could be found. Please install it globally, or provide the full path to the executable") where fp = fromNormalizedFilePath nfp tabularSize = opts ^. L.tabSize diff --git a/plugins/hls-cabal-fmt-plugin/test/Main.hs b/plugins/hls-cabal-fmt-plugin/test/Main.hs index 5069a9d153..0e458b2163 100644 --- a/plugins/hls-cabal-fmt-plugin/test/Main.hs +++ b/plugins/hls-cabal-fmt-plugin/test/Main.hs @@ -1,14 +1,26 @@ {-# LANGUAGE CPP #-} +{-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedStrings #-} module Main ( main ) where +import Ide.Logger +import qualified Ide.Plugin.Cabal as Cabal import qualified Ide.Plugin.CabalFmt as CabalFmt import System.Directory (findExecutable) import System.FilePath import Test.Hls +data TestLog + = LogCabalFmt CabalFmt.Log + | LogCabal Cabal.Log + +instance Pretty TestLog where + pretty = \case + LogCabalFmt msg -> pretty msg + LogCabal msg -> pretty msg + data CabalFmtFound = Found | NotFound isTestIsolated :: Bool @@ -30,8 +42,11 @@ main = do foundCabalFmt <- isCabalFmtFound defaultTestRunner (tests foundCabalFmt) -cabalFmtPlugin :: PluginTestDescriptor CabalFmt.Log -cabalFmtPlugin = mkPluginTestDescriptor CabalFmt.descriptor "cabal-fmt" +cabalFmtPlugin :: PluginTestDescriptor TestLog +cabalFmtPlugin = mconcat + [ mkPluginTestDescriptor (CabalFmt.descriptor . cmapWithPrio LogCabalFmt) "cabal-fmt" + , mkPluginTestDescriptor (Cabal.descriptor . cmapWithPrio LogCabal) "cabal" + ] tests :: CabalFmtFound -> TestTree tests found = testGroup "cabal-fmt" @@ -39,8 +54,9 @@ tests found = testGroup "cabal-fmt" cabalFmtGolden found "formats a simple document" "simple_testdata" "formatted_document" $ \doc -> do formatDoc doc (FormattingOptions 2 True Nothing Nothing Nothing) - , expectFailBecause "cabal-fmt can't expand modules if .cabal file is read from stdin. Tracking issue: https://github.com/phadej/cabal-fmt/pull/82" $ - cabalFmtGolden found "formats a document with expand:src comment" "commented_testdata" "formatted_document" $ \doc -> do + -- TODO: cabal-fmt can't expand modules if .cabal file is read from stdin. Tracking + -- issue: https://github.com/phadej/cabal-fmt/pull/82 + , cabalFmtGolden found "formats a document with expand:src comment" "commented_testdata" "formatted_document" $ \doc -> do formatDoc doc (FormattingOptions 2 True Nothing Nothing Nothing) , cabalFmtGolden found "formats a document with lib information" "lib_testdata" "formatted_document" $ \doc -> do diff --git a/plugins/hls-cabal-fmt-plugin/test/testdata/commented_testdata.formatted_document.cabal b/plugins/hls-cabal-fmt-plugin/test/testdata/commented_testdata.formatted_document.cabal index 28f8e040cf..933669a483 100644 --- a/plugins/hls-cabal-fmt-plugin/test/testdata/commented_testdata.formatted_document.cabal +++ b/plugins/hls-cabal-fmt-plugin/test/testdata/commented_testdata.formatted_document.cabal @@ -6,10 +6,7 @@ extra-source-files: CHANGELOG.md library -- cabal-fmt: expand src - exposed-modules: - MyLib - MyOtherLib - + exposed-modules: MyLib build-depends: base ^>=4.14.1.0 hs-source-dirs: src default-language: Haskell2010 diff --git a/plugins/hls-cabal-gild-plugin/src/Ide/Plugin/CabalGild.hs b/plugins/hls-cabal-gild-plugin/src/Ide/Plugin/CabalGild.hs index d0b220e6d0..1d698d637b 100644 --- a/plugins/hls-cabal-gild-plugin/src/Ide/Plugin/CabalGild.hs +++ b/plugins/hls-cabal-gild-plugin/src/Ide/Plugin/CabalGild.hs @@ -5,21 +5,22 @@ module Ide.Plugin.CabalGild where -import Control.Monad.Except (throwError) +import Control.Monad.Except (throwError) import Control.Monad.IO.Class -import qualified Data.Text as T -import Development.IDE hiding (pluginHandlers) -import Ide.Plugin.Error (PluginError (PluginInternalError, PluginInvalidParams)) +import qualified Data.Text as T +import Development.IDE hiding (pluginHandlers) +import Development.IDE.Core.PluginUtils (mkFormattingHandlers) +import Ide.Plugin.Error (PluginError (PluginInternalError, PluginInvalidParams)) import Ide.Plugin.Properties import Ide.PluginUtils import Ide.Types import Language.LSP.Protocol.Types -import Prelude hiding (log) +import Prelude hiding (log) import System.Directory import System.Exit import System.FilePath import System.Process.ListLike -import qualified System.Process.Text as Process +import qualified System.Process.Text as Process data Log = LogProcessInvocationFailure Int T.Text diff --git a/plugins/hls-cabal-gild-plugin/test/Main.hs b/plugins/hls-cabal-gild-plugin/test/Main.hs index 5bf519c69a..5aa5ba9fba 100644 --- a/plugins/hls-cabal-gild-plugin/test/Main.hs +++ b/plugins/hls-cabal-gild-plugin/test/Main.hs @@ -1,14 +1,26 @@ {-# LANGUAGE CPP #-} +{-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedStrings #-} module Main ( main ) where +import Ide.Logger +import qualified Ide.Plugin.Cabal as Cabal import qualified Ide.Plugin.CabalGild as CabalGild import System.Directory (findExecutable) import System.FilePath import Test.Hls +data TestLog + = LogCabalGild CabalGild.Log + | LogCabal Cabal.Log + +instance Pretty TestLog where + pretty = \case + LogCabalGild msg -> pretty msg + LogCabal msg -> pretty msg + data CabalGildFound = Found | NotFound isTestIsolated :: Bool @@ -30,8 +42,11 @@ main = do foundCabalFmt <- isCabalFmtFound defaultTestRunner (tests foundCabalFmt) -cabalGildPlugin :: PluginTestDescriptor CabalGild.Log -cabalGildPlugin = mkPluginTestDescriptor CabalGild.descriptor "cabal-gild" +cabalGildPlugin :: PluginTestDescriptor TestLog +cabalGildPlugin = mconcat + [ mkPluginTestDescriptor (CabalGild.descriptor . cmapWithPrio LogCabalGild) "cabal-gild" + , mkPluginTestDescriptor (Cabal.descriptor . cmapWithPrio LogCabal) "cabal" + ] tests :: CabalGildFound -> TestTree tests found = testGroup "cabal-gild" diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal.hs index eb9fed55d7..9a56467f3f 100644 --- a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal.hs +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal.hs @@ -4,46 +4,70 @@ {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TypeFamilies #-} -module Ide.Plugin.Cabal (descriptor, Log (..)) where +module Ide.Plugin.Cabal (descriptor, haskellInteractionDescriptor, Log (..)) where import Control.Concurrent.Strict import Control.DeepSeq -import Control.Lens ((^.)) +import Control.Lens ((^.)) import Control.Monad.Extra import Control.Monad.IO.Class -import Control.Monad.Trans.Class -import Control.Monad.Trans.Maybe (runMaybeT) -import qualified Data.ByteString as BS +import Control.Monad.Trans.Class (lift) +import Control.Monad.Trans.Maybe (runMaybeT) +import qualified Data.ByteString as BS import Data.Hashable -import Data.HashMap.Strict (HashMap) -import qualified Data.HashMap.Strict as HashMap -import qualified Data.List.NonEmpty as NE -import qualified Data.Text.Encoding as Encoding -import Data.Typeable -import Development.IDE as D -import Development.IDE.Core.Shake (restartShakeSession) -import qualified Development.IDE.Core.Shake as Shake -import Development.IDE.Graph (Key, alwaysRerun) -import qualified Development.IDE.Plugin.Completions.Logic as Ghcide -import qualified Development.IDE.Plugin.Completions.Types as Ghcide -import Development.IDE.Types.Shake (toKey) -import qualified Distribution.Fields as Syntax -import qualified Distribution.Parsec.Position as Syntax +import Data.HashMap.Strict (HashMap) +import qualified Data.HashMap.Strict as HashMap +import qualified Data.List as List +import qualified Data.List.NonEmpty as NE +import qualified Data.Maybe as Maybe +import Data.Proxy +import qualified Data.Text () +import qualified Data.Text as T +import qualified Data.Text.Encoding as Encoding +import Data.Text.Utf16.Rope.Mixed as Rope +import Development.IDE as D +import Development.IDE.Core.FileStore (getVersionedTextDoc) +import Development.IDE.Core.PluginUtils +import Development.IDE.Core.Shake (restartShakeSession) +import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.Graph (Key, + alwaysRerun) +import Development.IDE.LSP.HoverDefinition (foundHover) +import qualified Development.IDE.Plugin.Completions.Logic as Ghcide +import Development.IDE.Types.Shake (toKey) +import qualified Distribution.CabalSpecVersion as Cabal +import qualified Distribution.Fields as Syntax +import Distribution.Package (Dependency) +import Distribution.PackageDescription (allBuildDepends, + depPkgName, + unPackageName) +import Distribution.PackageDescription.Configuration (flattenPackageDescription) +import Distribution.Parsec.Error +import qualified Distribution.Parsec.Position as Syntax import GHC.Generics -import qualified Ide.Plugin.Cabal.Completion.Completer.Types as CompleterTypes -import qualified Ide.Plugin.Cabal.Completion.Completions as Completions -import Ide.Plugin.Cabal.Completion.Types (ParseCabalFields (..), - ParseCabalFile (..)) -import qualified Ide.Plugin.Cabal.Completion.Types as Types -import qualified Ide.Plugin.Cabal.Diagnostics as Diagnostics -import qualified Ide.Plugin.Cabal.LicenseSuggest as LicenseSuggest -import Ide.Plugin.Cabal.Orphans () -import qualified Ide.Plugin.Cabal.Parse as Parse +import qualified Ide.Plugin.Cabal.CabalAdd as CabalAdd +import Ide.Plugin.Cabal.Completion.CabalFields as CabalFields +import qualified Ide.Plugin.Cabal.Completion.Completer.Types as CompleterTypes +import qualified Ide.Plugin.Cabal.Completion.Completions as Completions +import qualified Ide.Plugin.Cabal.Completion.Data as Data +import Ide.Plugin.Cabal.Completion.Types (ParseCabalCommonSections (ParseCabalCommonSections), + ParseCabalFields (..), + ParseCabalFile (..)) +import qualified Ide.Plugin.Cabal.Completion.Types as Types +import Ide.Plugin.Cabal.Definition (gotoDefinition) +import qualified Ide.Plugin.Cabal.Diagnostics as Diagnostics +import qualified Ide.Plugin.Cabal.FieldSuggest as FieldSuggest +import qualified Ide.Plugin.Cabal.LicenseSuggest as LicenseSuggest +import Ide.Plugin.Cabal.Orphans () +import Ide.Plugin.Cabal.Outline +import qualified Ide.Plugin.Cabal.Parse as Parse +import Ide.Plugin.Error import Ide.Types -import qualified Language.LSP.Protocol.Lens as JL -import qualified Language.LSP.Protocol.Message as LSP +import qualified Language.LSP.Protocol.Lens as JL +import qualified Language.LSP.Protocol.Message as LSP import Language.LSP.Protocol.Types -import qualified Language.LSP.VFS as VFS +import qualified Language.LSP.VFS as VFS +import Text.Regex.TDFA data Log = LogModificationTime NormalizedFilePath FileVersion @@ -55,6 +79,7 @@ data Log | LogFOI (HashMap NormalizedFilePath FileOfInterestStatus) | LogCompletionContext Types.Context Position | LogCompletions Types.Log + | LogCabalAdd CabalAdd.Log deriving (Show) instance Pretty Log where @@ -78,6 +103,25 @@ instance Pretty Log where <+> "for cursor position:" <+> pretty position LogCompletions logs -> pretty logs + LogCabalAdd logs -> pretty logs + +-- | Some actions with cabal files originate from haskell files. +-- This descriptor allows to hook into the diagnostics of haskell source files, and +-- allows us to provide code actions and commands that interact with `.cabal` files. +haskellInteractionDescriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState +haskellInteractionDescriptor recorder plId = + (defaultPluginDescriptor plId "Provides the cabal-add code action in haskell files") + { pluginHandlers = + mconcat + [ mkPluginHandler LSP.SMethod_TextDocumentCodeAction cabalAddCodeAction + ] + , pluginCommands = [PluginCommand CabalAdd.cabalAddCommand "add a dependency to a cabal file" (CabalAdd.command cabalAddRecorder)] + , pluginRules = pure () + , pluginNotificationHandlers = mempty + } + where + cabalAddRecorder = cmapWithPrio LogCabalAdd recorder + descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState descriptor recorder plId = @@ -87,6 +131,10 @@ descriptor recorder plId = mconcat [ mkPluginHandler LSP.SMethod_TextDocumentCodeAction licenseSuggestCodeAction , mkPluginHandler LSP.SMethod_TextDocumentCompletion $ completion recorder + , mkPluginHandler LSP.SMethod_TextDocumentDocumentSymbol moduleOutline + , mkPluginHandler LSP.SMethod_TextDocumentCodeAction $ fieldSuggestCodeAction recorder + , mkPluginHandler LSP.SMethod_TextDocumentDefinition gotoDefinition + , mkPluginHandler LSP.SMethod_TextDocumentHover hover ] , pluginNotificationHandlers = mconcat @@ -160,7 +208,7 @@ cabalRules recorder plId = do log' Debug $ LogModificationTime file t contents <- case mCabalSource of Just sources -> - pure $ Encoding.encodeUtf8 sources + pure $ Encoding.encodeUtf8 $ Rope.toText sources Nothing -> do liftIO $ BS.readFile $ fromNormalizedFilePath file @@ -170,6 +218,14 @@ cabalRules recorder plId = do Right fields -> pure ([], Just fields) + define (cmapWithPrio LogShake recorder) $ \ParseCabalCommonSections file -> do + fields <- use_ ParseCabalFields file + let commonSections = Maybe.mapMaybe (\case + commonSection@(Syntax.Section (Syntax.Name _ "common") _ _) -> Just commonSection + _ -> Nothing) + fields + pure ([], Just commonSections) + define (cmapWithPrio LogShake recorder) $ \ParseCabalFile file -> do config <- getPluginConfigAction plId if not (plcGlobalOn config && plcDiagnosticsOn config) @@ -181,7 +237,7 @@ cabalRules recorder plId = do log' Debug $ LogModificationTime file t contents <- case mCabalSource of Just sources -> - pure $ Encoding.encodeUtf8 sources + pure $ Encoding.encodeUtf8 $ Rope.toText sources Nothing -> do liftIO $ BS.readFile $ fromNormalizedFilePath file @@ -193,7 +249,38 @@ cabalRules recorder plId = do let warningDiags = fmap (Diagnostics.warningDiagnostic file) pWarnings case pm of Left (_cabalVersion, pErrorNE) -> do - let errorDiags = NE.toList $ NE.map (Diagnostics.errorDiagnostic file) pErrorNE + let regexUnknownCabalBefore310 :: T.Text + -- We don't support the cabal version, this should not be an error, as the + -- user did not do anything wrong. Instead we cast it to a warning + regexUnknownCabalBefore310 = "Unsupported cabal-version [0-9]+.[0-9]*" + regexUnknownCabalVersion :: T.Text + regexUnknownCabalVersion = "Unsupported cabal format version in cabal-version field: [0-9]+.[0-9]+" + unsupportedCabalHelpText = unlines + [ "The used `cabal-version` is not fully supported by this `HLS` binary." + , "Either the `cabal-version` is unknown, or too new for this executable." + , "This means that some functionality might not work as expected." + , "If you face any issues, try downgrading to a supported `cabal-version` or upgrading `HLS` if possible." + , "" + , "Supported versions are: " <> + List.intercalate ", " + (fmap Cabal.showCabalSpecVersion Data.supportedCabalVersions) + ] + errorDiags = + NE.toList $ + NE.map + ( \pe@(PError pos text) -> + if any (text =~) + [ regexUnknownCabalBefore310 + , regexUnknownCabalVersion + ] + then Diagnostics.warningDiagnostic file (Syntax.PWarning Syntax.PWTOther pos $ + unlines + [ text + , unsupportedCabalHelpText + ]) + else Diagnostics.errorDiagnostic file pe + ) + pErrorNE allDiags = errorDiags <> warningDiags pure (allDiags, Nothing) Right gpd -> do @@ -217,7 +304,7 @@ function invocation. kick :: Action () kick = do files <- HashMap.keys <$> getCabalFilesOfInterestUntracked - void $ uses Types.ParseCabalFile files + Shake.runWithSignal (Proxy @"kick/start/cabal") (Proxy @"kick/done/cabal") files Types.ParseCabalFile -- ---------------------------------------------------------------- -- Code Actions @@ -228,6 +315,117 @@ licenseSuggestCodeAction ideState _ (CodeActionParams _ _ (TextDocumentIdentifie maxCompls <- fmap maxCompletions . liftIO $ runAction "cabal-plugin.suggestLicense" ideState getClientConfigAction pure $ InL $ diags >>= (fmap InR . LicenseSuggest.licenseErrorAction maxCompls uri) +-- | CodeActions for correcting field names with typos in them. +-- +-- Provides CodeActions that fix typos in both stanzas and top-level field names. +-- The suggestions are computed based on the completion context, where we "move" a fake cursor +-- to the end of the field name and trigger cabal file completions. The completions are then +-- suggested to the user. +-- +-- TODO: Relying on completions here often does not produce the desired results, we should +-- use some sort of fuzzy matching in the future, see issue #4357. +fieldSuggestCodeAction :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState 'LSP.Method_TextDocumentCodeAction +fieldSuggestCodeAction recorder ide _ (CodeActionParams _ _ (TextDocumentIdentifier uri) _ CodeActionContext{_diagnostics=diags}) = do + mContents <- liftIO $ runAction "cabal-plugin.getUriContents" ide $ getUriContents $ toNormalizedUri uri + case (,) <$> mContents <*> uriToFilePath' uri of + Nothing -> pure $ InL [] + Just (fileContents, path) -> do + -- We decide on `useWithStale` here, since `useWithStaleFast` often leads to the wrong completions being suggested. + -- In case it fails, we still will get some completion results instead of an error. + mFields <- liftIO $ runAction "cabal-plugin.fields" ide $ useWithStale ParseCabalFields $ toNormalizedFilePath path + case mFields of + Nothing -> + pure $ InL [] + Just (cabalFields, _) -> do + let fields = Maybe.mapMaybe FieldSuggest.fieldErrorName diags + results <- forM fields (getSuggestion fileContents path cabalFields) + pure $ InL $ map InR $ concat results + where + getSuggestion fileContents fp cabalFields (fieldName,Diagnostic{ _range=_range@(Range (Position lineNr col) _) }) = do + let -- Compute where we would anticipate the cursor to be. + fakeLspCursorPosition = Position lineNr (col + fromIntegral (T.length fieldName)) + lspPrefixInfo = Ghcide.getCompletionPrefixFromRope fakeLspCursorPosition fileContents + cabalPrefixInfo = Completions.getCabalPrefixInfo fp lspPrefixInfo + completions <- liftIO $ computeCompletionsAt recorder ide cabalPrefixInfo fp cabalFields + let completionTexts = fmap (^. JL.label) completions + pure $ FieldSuggest.fieldErrorAction uri fieldName completionTexts _range + +cabalAddCodeAction :: PluginMethodHandler IdeState 'LSP.Method_TextDocumentCodeAction +cabalAddCodeAction state plId (CodeActionParams _ _ (TextDocumentIdentifier uri) _ CodeActionContext{_diagnostics=diags}) = do + maxCompls <- fmap maxCompletions . liftIO $ runAction "cabal.cabal-add" state getClientConfigAction + let suggestions = take maxCompls $ concatMap CabalAdd.hiddenPackageSuggestion diags + case suggestions of + [] -> pure $ InL [] + _ -> + case uriToFilePath uri of + Nothing -> pure $ InL [] + Just haskellFilePath -> do + mbCabalFile <- liftIO $ CabalAdd.findResponsibleCabalFile haskellFilePath + case mbCabalFile of + Nothing -> pure $ InL [] + Just cabalFilePath -> do + verTxtDocId <- runActionE "cabalAdd.getVersionedTextDoc" state $ + lift $ getVersionedTextDoc $ TextDocumentIdentifier (filePathToUri cabalFilePath) + mbGPD <- liftIO $ runAction "cabal.cabal-add" state $ useWithStale ParseCabalFile $ toNormalizedFilePath cabalFilePath + case mbGPD of + Nothing -> pure $ InL [] + Just (gpd, _) -> do + actions <- liftIO $ CabalAdd.addDependencySuggestCodeAction plId verTxtDocId + suggestions + haskellFilePath cabalFilePath + gpd + pure $ InL $ fmap InR actions + +-- | Handler for hover messages. +-- +-- Provides a Handler for displaying message on hover. +-- If found that the filtered hover message is a dependency, +-- adds a Documentation link. +hover :: PluginMethodHandler IdeState LSP.Method_TextDocumentHover +hover ide _ msgParam = do + nfp <- getNormalizedFilePathE uri + cabalFields <- runActionE "cabal.cabal-hover" ide $ useE ParseCabalFields nfp + case CabalFields.findTextWord cursor cabalFields of + Nothing -> + pure $ InR Null + Just cursorText -> do + gpd <- runActionE "cabal.GPD" ide $ useE ParseCabalFile nfp + let depsNames = map dependencyName $ allBuildDepends $ flattenPackageDescription gpd + case filterVersion cursorText of + Nothing -> pure $ InR Null + Just txt -> + if txt `elem` depsNames + then pure $ foundHover (Nothing, [txt <> "\n", documentationText txt]) + else pure $ InR Null + where + cursor = Types.lspPositionToCabalPosition (msgParam ^. JL.position) + uri = msgParam ^. JL.textDocument . JL.uri + + dependencyName :: Dependency -> T.Text + dependencyName dep = T.pack $ unPackageName $ depPkgName dep + + -- | Removes version requirements like + -- `==1.0.0.0`, `>= 2.1.1` that could be included in + -- hover message. Assumes that the dependency consists + -- of alphanums with dashes in between. Ends with an alphanum. + -- + -- Examples: + -- >>> filterVersion "imp-deps>=2.1.1" + -- "imp-deps" + filterVersion :: T.Text -> Maybe T.Text + filterVersion msg = getMatch (msg =~ regex) + where + regex :: T.Text + regex = "([a-zA-Z0-9-]*[a-zA-Z0-9])" + + getMatch :: (T.Text, T.Text, T.Text, [T.Text]) -> Maybe T.Text + getMatch (_, _, _, [dependency]) = Just dependency + getMatch (_, _, _, _) = Nothing -- impossible case + + documentationText :: T.Text -> T.Text + documentationText package = "[Documentation](https://hackage.haskell.org/package/" <> package <> ")" + + -- ---------------------------------------------------------------- -- Cabal file of Interest rules and global variable -- ---------------------------------------------------------------- @@ -244,14 +442,14 @@ newtype OfInterestCabalVar = OfInterestCabalVar (Var (HashMap NormalizedFilePath instance Shake.IsIdeGlobal OfInterestCabalVar data IsCabalFileOfInterest = IsCabalFileOfInterest - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable IsCabalFileOfInterest instance NFData IsCabalFileOfInterest type instance RuleResult IsCabalFileOfInterest = CabalFileOfInterestResult data CabalFileOfInterestResult = NotCabalFOI | IsCabalFOI FileOfInterestStatus - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable CabalFileOfInterestResult instance NFData CabalFileOfInterestResult @@ -309,10 +507,10 @@ deleteFileOfInterest recorder state f = do completion :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState 'LSP.Method_TextDocumentCompletion completion recorder ide _ complParams = do - let (TextDocumentIdentifier uri) = complParams ^. JL.textDocument + let TextDocumentIdentifier uri = complParams ^. JL.textDocument position = complParams ^. JL.position - mVf <- lift $ pluginGetVirtualFile $ toNormalizedUri uri - case (,) <$> mVf <*> uriToFilePath' uri of + mContents <- liftIO $ runAction "cabal-plugin.getUriContents" ide $ getUriContents $ toNormalizedUri uri + case (,) <$> mContents <*> uriToFilePath' uri of Just (cnts, path) -> do -- We decide on `useWithStale` here, since `useWithStaleFast` often leads to the wrong completions being suggested. -- In case it fails, we still will get some completion results instead of an error. @@ -321,36 +519,36 @@ completion recorder ide _ complParams = do Nothing -> pure . InR $ InR Null Just (fields, _) -> do - let pref = Ghcide.getCompletionPrefix position cnts - let res = produceCompletions pref path fields + let lspPrefInfo = Ghcide.getCompletionPrefixFromRope position cnts + cabalPrefInfo = Completions.getCabalPrefixInfo path lspPrefInfo + let res = computeCompletionsAt recorder ide cabalPrefInfo path fields liftIO $ fmap InL res Nothing -> pure . InR $ InR Null - where - completerRecorder = cmapWithPrio LogCompletions recorder - - produceCompletions :: Ghcide.PosPrefixInfo -> FilePath -> [Syntax.Field Syntax.Position] -> IO [CompletionItem] - produceCompletions prefix fp fields = do - runMaybeT (context fields) >>= \case - Nothing -> pure [] - Just ctx -> do - logWith recorder Debug $ LogCompletionContext ctx pos - let completer = Completions.contextToCompleter ctx - let completerData = CompleterTypes.CompleterData - { getLatestGPD = do - -- We decide on useWithStaleFast here, since we mostly care about the file's meta information, - -- thus, a quick response gives us the desired result most of the time. - -- The `withStale` option is very important here, since we often call this rule with invalid cabal files. - mGPD <- runIdeAction "cabal-plugin.modulesCompleter.gpd" (shakeExtras ide) $ useWithStaleFast ParseCabalFile $ toNormalizedFilePath fp - pure $ fmap fst mGPD - , cabalPrefixInfo = prefInfo - , stanzaName = - case fst ctx of - Types.Stanza _ name -> name - _ -> Nothing - } - completions <- completer completerRecorder completerData - pure completions - where - pos = Ghcide.cursorPos prefix + +computeCompletionsAt :: Recorder (WithPriority Log) -> IdeState -> Types.CabalPrefixInfo -> FilePath -> [Syntax.Field Syntax.Position] -> IO [CompletionItem] +computeCompletionsAt recorder ide prefInfo fp fields = do + runMaybeT (context fields) >>= \case + Nothing -> pure [] + Just ctx -> do + logWith recorder Debug $ LogCompletionContext ctx pos + let completer = Completions.contextToCompleter ctx + let completerData = CompleterTypes.CompleterData + { getLatestGPD = do + -- We decide on useWithStaleFast here, since we mostly care about the file's meta information, + -- thus, a quick response gives us the desired result most of the time. + -- The `withStale` option is very important here, since we often call this rule with invalid cabal files. + mGPD <- runAction "cabal-plugin.modulesCompleter.gpd" ide $ useWithStale ParseCabalFile $ toNormalizedFilePath fp + pure $ fmap fst mGPD + , getCabalCommonSections = runAction "cabal-plugin.commonSections" ide $ use ParseCabalCommonSections $ toNormalizedFilePath fp + , cabalPrefixInfo = prefInfo + , stanzaName = + case fst ctx of + Types.Stanza _ name -> name + _ -> Nothing + } + completions <- completer completerRecorder completerData + pure completions + where + pos = Types.completionCursorPosition prefInfo context fields = Completions.getContext completerRecorder prefInfo fields - prefInfo = Completions.getCabalPrefixInfo fp prefix + completerRecorder = cmapWithPrio LogCompletions recorder diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd.hs new file mode 100644 index 0000000000..3b46eec128 --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd.hs @@ -0,0 +1,326 @@ +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE ExplicitNamespaces #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PartialTypeSignatures #-} +{-# LANGUAGE RecordWildCards #-} + +module Ide.Plugin.Cabal.CabalAdd +( findResponsibleCabalFile + , addDependencySuggestCodeAction + , hiddenPackageSuggestion + , cabalAddCommand + , command + , Log +) +where + +import Control.Monad (filterM, void) +import Control.Monad.IO.Class (MonadIO, liftIO) +import Control.Monad.Trans.Class (lift) +import Control.Monad.Trans.Except +import Data.Aeson.Types (FromJSON, + ToJSON, toJSON) +import Data.ByteString (ByteString) +import qualified Data.ByteString.Char8 as B +import Data.List.NonEmpty (NonEmpty (..), + fromList) +import Data.String (IsString) +import qualified Data.Text as T +import Data.Text.Encoding (encodeUtf8) +import qualified Data.Text.Encoding as T +import Data.Text.Utf16.Rope.Mixed as Rope +import Development.IDE (IdeState, + getFileContents, + useWithStale) +import Development.IDE.Core.Rules (runAction) +import Distribution.Client.Add as Add +import Distribution.Compat.Prelude (Generic) +import Distribution.PackageDescription (GenericPackageDescription, + packageDescription, + specVersion) +import Distribution.PackageDescription.Configuration (flattenPackageDescription) +import Distribution.PackageDescription.Quirks (patchQuirks) +import qualified Distribution.Pretty as Pretty +import Distribution.Simple.BuildTarget (BuildTarget, + buildTargetComponentName, + readBuildTargets) +import Distribution.Simple.Utils (safeHead) +import Distribution.Verbosity (silent, + verboseNoStderr) +import Ide.Logger +import Ide.Plugin.Cabal.Completion.Types (ParseCabalFields (..), + ParseCabalFile (..)) +import Ide.Plugin.Cabal.Orphans () +import Ide.Plugin.Error +import Ide.PluginUtils (WithDeletions (SkipDeletions), + diffText, + mkLspCommand) +import Ide.Types (CommandFunction, + CommandId (CommandId), + PluginId, + pluginGetClientCapabilities, + pluginSendRequest) +import Language.LSP.Protocol.Message (SMethod (SMethod_WorkspaceApplyEdit)) +import Language.LSP.Protocol.Types (ApplyWorkspaceEditParams (ApplyWorkspaceEditParams), + ClientCapabilities, + CodeAction (CodeAction), + CodeActionKind (CodeActionKind_QuickFix), + Diagnostic (..), + Null (Null), + VersionedTextDocumentIdentifier, + WorkspaceEdit, + toNormalizedFilePath, + type (|?) (InR)) +import System.Directory (doesFileExist, + listDirectory) +import System.FilePath (dropFileName, + makeRelative, + splitPath, + takeExtension, + ()) +import Text.PrettyPrint (render) +import Text.Regex.TDFA + +data Log + = LogFoundResponsibleCabalFile FilePath + | LogCalledCabalAddCommand CabalAddCommandParams + | LogCreatedEdit WorkspaceEdit + | LogExecutedCommand + deriving (Show) + +instance Pretty Log where + pretty = \case + LogFoundResponsibleCabalFile fp -> "Located the responsible cabal file at " <+> pretty fp + LogCalledCabalAddCommand params -> "Called CabalAdd command with:\n" <+> pretty params + LogCreatedEdit edit -> "Created inplace edit:\n" <+> pretty edit + LogExecutedCommand -> "Executed CabalAdd command" + +cabalAddCommand :: IsString p => p +cabalAddCommand = "cabalAdd" + +data CabalAddCommandParams = + CabalAddCommandParams { cabalPath :: FilePath + , verTxtDocId :: VersionedTextDocumentIdentifier + , buildTarget :: Maybe String + , dependency :: T.Text + , version :: Maybe T.Text + } + deriving (Generic, Show) + deriving anyclass (FromJSON, ToJSON) + +instance Pretty CabalAddCommandParams where + pretty CabalAddCommandParams{..} = + "CabalAdd parameters:" <+> vcat + [ "cabal path:" <+> pretty cabalPath + , "target:" <+> pretty buildTarget + , "dependendency:" <+> pretty dependency + , "version:" <+> pretty version + ] + +-- | Creates a code action that calls the `cabalAddCommand`, +-- using dependency-version suggestion pairs as input. +-- +-- Returns disabled action if no cabal files given. +-- +-- Takes haskell file and cabal file paths to create a relative path +-- to the haskell file, which is used to get a `BuildTarget`. +-- +-- In current implementation the dependency is being added to the main found +-- build target, but if there will be a way to get all build targets from a file +-- it will be possible to support addition to a build target of choice. +addDependencySuggestCodeAction + :: PluginId + -> VersionedTextDocumentIdentifier -- ^ Cabal's versioned text identifier + -> [(T.Text, T.Text)] -- ^ A dependency-version suggestion pairs + -> FilePath -- ^ Path to the haskell file (source of diagnostics) + -> FilePath -- ^ Path to the cabal file (that will be edited) + -> GenericPackageDescription + -> IO [CodeAction] +addDependencySuggestCodeAction plId verTxtDocId suggestions haskellFilePath cabalFilePath gpd = do + buildTargets <- liftIO $ getBuildTargets gpd cabalFilePath haskellFilePath + case buildTargets of + -- If there are no build targets found, run `cabal-add` command with default behaviour + [] -> pure $ mkCodeAction cabalFilePath Nothing <$> suggestions + -- Otherwise provide actions for all found targets + targets -> pure $ concat [mkCodeAction cabalFilePath (Just $ buildTargetToStringRepr target) <$> + suggestions | target <- targets] + where + -- | Note the use of `pretty` function. + -- It converts the `BuildTarget` to an acceptable string representation. + -- It will be used in as the input for `cabal-add`'s `executeConfig`. + buildTargetToStringRepr target = render $ Pretty.pretty $ buildTargetComponentName target + + -- | Gives the build targets that are used in the `CabalAdd`. + -- Note the unorthodox usage of `readBuildTargets`: + -- If the relative path to the haskell file is provided, + -- the `readBuildTargets` will return build targets, where this + -- module is mentioned (in exposed-modules or other-modules). + getBuildTargets :: GenericPackageDescription -> FilePath -> FilePath -> IO [BuildTarget] + getBuildTargets gpd cabalFilePath haskellFilePath = do + let haskellFileRelativePath = makeRelative (dropFileName cabalFilePath) haskellFilePath + readBuildTargets (verboseNoStderr silent) (flattenPackageDescription gpd) [haskellFileRelativePath] + + mkCodeAction :: FilePath -> Maybe String -> (T.Text, T.Text) -> CodeAction + mkCodeAction cabalFilePath target (suggestedDep, suggestedVersion) = + let + versionTitle = if T.null suggestedVersion then T.empty else "-" <> suggestedVersion + targetTitle = case target of + Nothing -> T.empty + Just t -> " at " <> T.pack t + title = "Add dependency " <> suggestedDep <> versionTitle <> targetTitle + version = if T.null suggestedVersion then Nothing else Just suggestedVersion + + params = CabalAddCommandParams {cabalPath = cabalFilePath + , verTxtDocId = verTxtDocId + , buildTarget = target + , dependency = suggestedDep + , version=version} + command = mkLspCommand plId (CommandId cabalAddCommand) "Add missing dependency" (Just [toJSON params]) + in CodeAction title (Just CodeActionKind_QuickFix) (Just []) Nothing Nothing Nothing (Just command) Nothing + +-- | Gives a mentioned number of @(dependency, version)@ pairs +-- found in the "hidden package" diagnostic message. +-- +-- For example, if a ghc error looks like this: +-- +-- > "Could not load module ‘Data.List.Split’ +-- > It is a member of the hidden package ‘split-0.2.5’. +-- > Perhaps you need to add ‘split’ to the build-depends in your .cabal file." +-- +-- or this if PackageImports extension is used: +-- +-- > "Could not find module ‘Data.List.Split’ +-- > Perhaps you meant +-- > Data.List.Split (needs flag -package-id split-0.2.5)" +-- +-- It extracts mentioned package names and version numbers. +-- In this example, it will be @[("split", "0.2.5")]@ +-- +-- Also supports messages without a version. +-- +-- > "Perhaps you need to add ‘split’ to the build-depends in your .cabal file." +-- +-- Will turn into @[("split", "")]@ +hiddenPackageSuggestion :: Diagnostic -> [(T.Text, T.Text)] +hiddenPackageSuggestion diag = getMatch (msg =~ regex) + where + msg :: T.Text + msg = _message diag + regex :: T.Text -- TODO: Support multiple packages suggestion + regex = + let regex' = "([a-zA-Z0-9-]*[a-zA-Z0-9])(-([0-9\\.]*))?" + in "It is a member of the hidden package [\8216']" <> regex' <> "[\8217']" + <> "|" + <> "needs flag -package-id " <> regex' + -- Have to do this matching because `Regex.TDFA` doesn't(?) support + -- not-capturing groups like (?:message) + getMatch :: (T.Text, T.Text, T.Text, [T.Text]) -> [(T.Text, T.Text)] + getMatch (_, _, _, []) = [] + getMatch (_, _, _, [dependency, _, cleanVersion, "", "", ""]) = [(dependency, cleanVersion)] + getMatch (_, _, _, ["", "", "", dependency, _, cleanVersion]) = [(dependency, cleanVersion)] + getMatch (_, _, _, _) = [] + +command :: Recorder (WithPriority Log) -> CommandFunction IdeState CabalAddCommandParams +command recorder state _ params@(CabalAddCommandParams {cabalPath = path, verTxtDocId = verTxtDocId, buildTarget = target, dependency = dep, version = mbVer}) = do + logWith recorder Debug $ LogCalledCabalAddCommand params + let specifiedDep = case mbVer of + Nothing -> dep + Just ver -> dep <> " ^>=" <> ver + caps <- lift pluginGetClientCapabilities + let env = (state, caps, verTxtDocId) + edit <- getDependencyEdit recorder env path target (fromList [T.unpack specifiedDep]) + void $ lift $ pluginSendRequest SMethod_WorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing edit) (\_ -> pure ()) + logWith recorder Debug LogExecutedCommand + pure $ InR Null + +-- | Constructs prerequisites for the @executeConfig@ +-- and runs it, given path to the cabal file and a dependency message. +-- Given the new contents of the cabal file constructs and returns the @edit@. +-- Inspired by @main@ in cabal-add, +-- Distribution.Client.Main +getDependencyEdit :: MonadIO m => Recorder (WithPriority Log) -> (IdeState, ClientCapabilities, VersionedTextDocumentIdentifier) -> + FilePath -> Maybe String -> NonEmpty String -> ExceptT PluginError m WorkspaceEdit +getDependencyEdit recorder env cabalFilePath buildTarget dependency = do + let (state, caps, verTxtDocId) = env + (mbCnfOrigContents, mbFields, mbPackDescr) <- liftIO $ runAction "cabal.cabal-add" state $ do + contents <- getFileContents $ toNormalizedFilePath cabalFilePath + inFields <- useWithStale ParseCabalFields $ toNormalizedFilePath cabalFilePath + inPackDescr <- useWithStale ParseCabalFile $ toNormalizedFilePath cabalFilePath + let mbCnfOrigContents = case contents of + (Just txt) -> Just $ encodeUtf8 $ Rope.toText txt + _ -> Nothing + let mbFields = fst <$> inFields + let mbPackDescr = fst <$> inPackDescr + pure (mbCnfOrigContents, mbFields, mbPackDescr) + + -- Check if required info was received, + -- otherwise fall back on other options. + (cnfOrigContents, fields, packDescr) <- do + cnfOrigContents <- case mbCnfOrigContents of + (Just cnfOrigContents) -> pure cnfOrigContents + Nothing -> readCabalFile cabalFilePath + (fields, packDescr) <- case (mbFields, mbPackDescr) of + (Just fields, Just packDescr) -> pure (fields, packDescr) + (_, _) -> case parseCabalFile cabalFilePath cnfOrigContents of + Left err -> throwE $ PluginInternalError $ T.pack err + Right (f ,gpd) -> pure (f, gpd) + pure (cnfOrigContents, fields, packDescr) + + let inputs = do + let rcnfComponent = buildTarget + let specVer = specVersion $ packageDescription packDescr + cmp <- resolveComponent cabalFilePath (fields, packDescr) rcnfComponent + deps <- traverse (validateDependency specVer) dependency + pure (fields, packDescr, cmp, deps) + + (cnfFields, origPackDescr, cnfComponent, cnfDependencies) <- case inputs of + Left err -> throwE $ PluginInternalError $ T.pack err + Right pair -> pure pair + + case executeConfig (validateChanges origPackDescr) (Config {..}) of + Nothing -> throwE $ PluginInternalError $ T.pack $ "Cannot extend build-depends in " ++ cabalFilePath + Just newContents -> do + let edit = diffText caps (verTxtDocId, T.decodeUtf8 cnfOrigContents) (T.decodeUtf8 newContents) SkipDeletions + logWith recorder Debug $ LogCreatedEdit edit + pure edit + +-- | Given a path to a haskell file, returns the closest cabal file. +-- If a package.yaml is present in same directory as the .cabal file, returns nothing, because adding a dependency to a generated cabal file +-- will break propagation of changes from package.yaml to cabal files in stack projects. +-- If cabal file wasn't found, gives Nothing. +findResponsibleCabalFile :: FilePath -> IO (Maybe FilePath) +findResponsibleCabalFile haskellFilePath = do + let dirPath = dropFileName haskellFilePath + allDirPaths = reverse $ scanl1 () (splitPath dirPath) -- sorted from most to least specific + go allDirPaths + where + go [] = pure Nothing + go (path:ps) = do + objects <- listDirectory path + let objectsWithPaths = map (\obj -> path <> obj) objects + objectsCabalExtension = filter (\c -> takeExtension c == ".cabal") objectsWithPaths + cabalFiles <- filterM (\c -> doesFileExist c) objectsCabalExtension + case safeHead cabalFiles of + Nothing -> go ps + Just cabalFile -> guardAgainstHpack path cabalFile + where + guardAgainstHpack :: FilePath -> FilePath -> IO (Maybe FilePath) + guardAgainstHpack path cabalFile = do + exists <- doesFileExist $ path "package.yaml" + if exists then pure Nothing else pure $ Just cabalFile + +-- | Gives cabal file's contents or throws error. +-- Inspired by @readCabalFile@ in cabal-add, +-- Distribution.Client.Main +-- +-- This is a fallback option! +-- Use only if the `GetFileContents` fails. +readCabalFile :: MonadIO m => FilePath -> ExceptT PluginError m ByteString +readCabalFile fileName = do + cabalFileExists <- liftIO $ doesFileExist fileName + if cabalFileExists + then snd . patchQuirks <$> liftIO (B.readFile fileName) + else throwE $ PluginInternalError $ T.pack ("Failed to read cabal file at " <> fileName) diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/CabalFields.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/CabalFields.hs new file mode 100644 index 0000000000..b8cb7ce0d6 --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/CabalFields.hs @@ -0,0 +1,303 @@ +module Ide.Plugin.Cabal.Completion.CabalFields + ( findStanzaForColumn + , getModulesNames + , getFieldLSPRange + , findFieldSection + , findTextWord + , findFieldLine + , getOptionalSectionName + , getAnnotation + , getFieldName + , onelineSectionArgs + , getFieldEndPosition + , getSectionArgEndPosition + , getNameEndPosition + , getFieldLineEndPosition + ) + where + +import qualified Data.ByteString as BS +import Data.List (find) +import Data.List.Extra (groupSort) +import Data.List.NonEmpty (NonEmpty) +import qualified Data.List.NonEmpty as NE +import qualified Data.Text as T +import qualified Data.Text.Encoding as T +import Data.Tuple (swap) +import qualified Distribution.Fields as Syntax +import qualified Distribution.Parsec.Position as Syntax +import Ide.Plugin.Cabal.Completion.Types +import qualified Language.LSP.Protocol.Types as LSP + +-- ---------------------------------------------------------------- +-- Cabal-syntax utilities I don't really want to write myself +-- ---------------------------------------------------------------- + +-- | Determine the context of a cursor position within a stack of stanza contexts +-- +-- If the cursor is indented more than one of the stanzas in the stack +-- the respective stanza is returned if this is never the case, the toplevel stanza +-- in the stack is returned. +findStanzaForColumn :: Int -> NonEmpty (Int, StanzaContext) -> (StanzaContext, FieldContext) +findStanzaForColumn col ctx = case NE.uncons ctx of + ((_, stanza), Nothing) -> (stanza, None) + ((indentation, stanza), Just res) + | col < indentation -> findStanzaForColumn col res + | otherwise -> (stanza, None) + +-- | Determine the field the cursor is currently a part of. +-- +-- The result is said field and its starting position +-- or Nothing if the passed list of fields is empty. +-- +-- This only looks at the row of the cursor and not at the cursor's +-- position within the row. +-- +-- TODO: we do not handle braces correctly. Add more tests! +findFieldSection :: Syntax.Position -> [Syntax.Field Syntax.Position] -> Maybe (Syntax.Field Syntax.Position) +findFieldSection _cursor [] = Nothing +findFieldSection _cursor [x] = + -- Last field. We decide later, whether we are starting + -- a new section. + Just x +findFieldSection cursor (x:y:ys) + | Syntax.positionRow (getAnnotation x) <= cursorLine && cursorLine < Syntax.positionRow (getAnnotation y) + = Just x + | otherwise = findFieldSection cursor (y:ys) + where + cursorLine = Syntax.positionRow cursor + +-- | Determine the field line the cursor is currently a part of. +-- +-- The result is said field line and its starting position +-- or Nothing if the passed list of fields is empty. +-- +-- This function assumes that elements in a field's @FieldLine@ list +-- do not share the same row. +findFieldLine :: Syntax.Position -> [Syntax.Field Syntax.Position] -> Maybe (Syntax.FieldLine Syntax.Position) +findFieldLine _cursor [] = Nothing +findFieldLine cursor fields = + case findFieldSection cursor fields of + Nothing -> Nothing + Just (Syntax.Field _ fieldLines) -> find filterLineFields fieldLines + Just (Syntax.Section _ _ fields) -> findFieldLine cursor fields + where + cursorLine = Syntax.positionRow cursor + -- In contrast to `Field` or `Section`, `FieldLine` must have the exact + -- same line position as the cursor. + filterLineFields (Syntax.FieldLine pos _) = Syntax.positionRow pos == cursorLine + +-- | Determine the exact word at the current cursor position. +-- +-- The result is said word or Nothing if the passed list is empty +-- or the cursor position is not next to, or on a word. +-- For this function, a word is a sequence of consecutive characters +-- that are not a space or column. +-- +-- This function currently only considers words inside of a @FieldLine@. +findTextWord :: Syntax.Position -> [Syntax.Field Syntax.Position] -> Maybe T.Text +findTextWord _cursor [] = Nothing +findTextWord cursor fields = + case findFieldLine cursor fields of + Nothing -> Nothing + Just (Syntax.FieldLine pos byteString) -> + let decodedText = T.decodeUtf8 byteString + lineFieldCol = Syntax.positionCol pos + lineFieldLen = T.length decodedText + offset = cursorCol - lineFieldCol in + -- Range check if cursor is inside or or next to found line. + -- The latter comparison includes the length of the line as offset, + -- which is done to also include cursors that are at the end of a line. + -- e.g. "foo,bar|" + -- ^ + -- cursor + -- + -- Having an offset which is outside of the line is possible because of `splitAt`. + if offset >= 0 && lineFieldLen >= offset + then + let (lhs, rhs) = T.splitAt offset decodedText + strippedLhs = T.takeWhileEnd isAllowedChar lhs + strippedRhs = T.takeWhile isAllowedChar rhs + resultText = T.concat [strippedLhs, strippedRhs] in + -- It could be possible that the cursor was in-between separators, in this + -- case the resulting text would be empty, which should result in `Nothing`. + -- e.g. " foo ,| bar" + -- ^ + -- cursor + if not $ T.null resultText then Just resultText else Nothing + else + Nothing + where + cursorCol = Syntax.positionCol cursor + separators = [',', ' '] + isAllowedChar = (`notElem` separators) + +type FieldName = T.Text + +getAnnotation :: Syntax.Field ann -> ann +getAnnotation (Syntax.Field (Syntax.Name ann _) _) = ann +getAnnotation (Syntax.Section (Syntax.Name ann _) _ _) = ann + +getFieldName :: Syntax.Field ann -> FieldName +getFieldName (Syntax.Field (Syntax.Name _ fn) _) = T.decodeUtf8 fn +getFieldName (Syntax.Section (Syntax.Name _ fn) _ _) = T.decodeUtf8 fn + +getFieldLineName :: Syntax.FieldLine ann -> FieldName +getFieldLineName (Syntax.FieldLine _ fn) = T.decodeUtf8 fn + +-- | Returns the name of a section if it has a name. +-- +-- This assumes that the given section args belong to named stanza +-- in which case the stanza name is returned. +getOptionalSectionName :: [Syntax.SectionArg ann] -> Maybe T.Text +getOptionalSectionName [] = Nothing +getOptionalSectionName (x:xs) = case x of + Syntax.SecArgName _ name -> Just (T.decodeUtf8 name) + _ -> getOptionalSectionName xs + +type BuildTargetName = T.Text +type ModuleName = T.Text + +-- | Given a cabal AST returns pairs of all respective target names +-- and the module name bound to them. If a target is a main library gives +-- @Nothing@, otherwise @Just target-name@ +-- +-- Examples of input cabal files and the outputs: +-- +-- * Target is a main library module: +-- +-- > library +-- > exposed-modules: +-- > MyLib +-- +-- * @getModulesNames@ output: +-- +-- > [([Nothing], "MyLib")] +-- +-- * Same module names in different targets: +-- +-- > test-suite first-target +-- > other-modules: +-- > Config +-- > test-suite second-target +-- > other-modules: +-- > Config +-- +-- * @getModulesNames@ output: +-- +-- > [([Just "first-target", Just "second-target"], "Config")] +getModulesNames :: [Syntax.Field any] -> [([Maybe BuildTargetName], ModuleName)] +getModulesNames fields = map swap $ groupSort rawModuleTargetPairs + where + rawModuleTargetPairs = concatMap getSectionModuleNames sections + sections = getSectionsWithModules fields + + getSectionModuleNames :: Syntax.Field any -> [(ModuleName, Maybe BuildTargetName)] + getSectionModuleNames (Syntax.Section _ secArgs fields) = map (, getArgsName secArgs) $ concatMap getFieldModuleNames fields + getSectionModuleNames _ = [] + + getArgsName [Syntax.SecArgName _ name] = Just $ T.decodeUtf8 name + getArgsName _ = Nothing -- Can be only a main library, that has no name + -- since it's impossible to have multiple names for a build target + + getFieldModuleNames field@(Syntax.Field _ modules) = if getFieldName field == T.pack "exposed-modules" || + getFieldName field == T.pack "other-modules" + then map getFieldLineName modules + else [] + getFieldModuleNames _ = [] + +-- | Trims a given cabal AST leaving only targets and their +-- @exposed-modules@ and @other-modules@ sections. +-- +-- For example: +-- +-- * Given a cabal file like this: +-- +-- > library +-- > import: extra +-- > hs-source-dirs: source/directory +-- > ... +-- > exposed-modules: +-- > Important.Exposed.Module +-- > other-modules: +-- > Important.Other.Module +-- > +-- > test-suite tests +-- > type: type +-- > build-tool-depends: tool +-- > other-modules: +-- > Important.Other.Module +-- +-- * @getSectionsWithModules@ gives output: +-- +-- > library +-- > exposed-modules: +-- > Important.Exposed.Module +-- > other-modules: +-- > Important.Other.Module +-- > test-suite tests +-- > other-modules: +-- > Important.Other.Module +getSectionsWithModules :: [Syntax.Field any] -> [Syntax.Field any] +getSectionsWithModules fields = concatMap go fields + where + go :: Syntax.Field any -> [Syntax.Field any] + go (Syntax.Field _ _) = [] + go section@(Syntax.Section _ _ fields) = concatMap onlySectionsWithModules (section:fields) + + onlySectionsWithModules :: Syntax.Field any -> [Syntax.Field any] + onlySectionsWithModules (Syntax.Field _ _) = [] + onlySectionsWithModules (Syntax.Section name secArgs fields) + | (not . null) newFields = [Syntax.Section name secArgs newFields] + | otherwise = [] + where newFields = filter subfieldHasModule fields + + subfieldHasModule :: Syntax.Field any -> Bool + subfieldHasModule field@(Syntax.Field _ _) = getFieldName field == T.pack "exposed-modules" || + getFieldName field == T.pack "other-modules" + subfieldHasModule (Syntax.Section _ _ _) = False + +-- | Makes a single text line out of multiple +-- @SectionArg@s. Allows to display conditions, +-- flags, etc in one line, which is easier to read. +-- +-- For example, @flag@ @(@ @pedantic@ @)@ will be joined in +-- one line, instead of four @SectionArg@s separately. +onelineSectionArgs :: [Syntax.SectionArg ann] -> T.Text +onelineSectionArgs sectionArgs = joinedName + where + joinedName = T.unwords $ map getName sectionArgs + + getName :: Syntax.SectionArg ann -> T.Text + getName (Syntax.SecArgName _ identifier) = T.decodeUtf8 identifier + getName (Syntax.SecArgStr _ quotedString) = T.decodeUtf8 quotedString + getName (Syntax.SecArgOther _ string) = T.decodeUtf8 string + +-- | Returns the end position of a provided field +getFieldEndPosition :: Syntax.Field Syntax.Position -> Syntax.Position +getFieldEndPosition (Syntax.Field name []) = getNameEndPosition name +getFieldEndPosition (Syntax.Field _ (x:xs)) = getFieldLineEndPosition $ NE.last (x NE.:| xs) +getFieldEndPosition (Syntax.Section name [] []) = getNameEndPosition name +getFieldEndPosition (Syntax.Section _ (x:xs) []) = getSectionArgEndPosition $ NE.last (x NE.:| xs) +getFieldEndPosition (Syntax.Section _ _ (x:xs)) = getFieldEndPosition $ NE.last (x NE.:| xs) + +-- | Returns the end position of a provided section arg +getSectionArgEndPosition :: Syntax.SectionArg Syntax.Position -> Syntax.Position +getSectionArgEndPosition (Syntax.SecArgName (Syntax.Position row col) byteString) = Syntax.Position row (col + BS.length byteString) +getSectionArgEndPosition (Syntax.SecArgStr (Syntax.Position row col) byteString) = Syntax.Position row (col + BS.length byteString) +getSectionArgEndPosition (Syntax.SecArgOther (Syntax.Position row col) byteString) = Syntax.Position row (col + BS.length byteString) + +-- | Returns the end position of a provided name +getNameEndPosition :: Syntax.Name Syntax.Position -> Syntax.Position +getNameEndPosition (Syntax.Name (Syntax.Position row col) byteString) = Syntax.Position row (col + BS.length byteString) + +-- | Returns the end position of a provided field line +getFieldLineEndPosition :: Syntax.FieldLine Syntax.Position -> Syntax.Position +getFieldLineEndPosition (Syntax.FieldLine (Syntax.Position row col) byteString) = Syntax.Position row (col + BS.length byteString) + +-- | Returns an LSP compatible range for a provided field +getFieldLSPRange :: Syntax.Field Syntax.Position -> LSP.Range +getFieldLSPRange field = LSP.Range startLSPPos endLSPPos + where + startLSPPos = cabalPositionToLSPPosition $ getAnnotation field + endLSPPos = cabalPositionToLSPPosition $ getFieldEndPosition field diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Paths.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Paths.hs index 5defdbbe63..0e1053453b 100644 --- a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Paths.hs +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Paths.hs @@ -7,6 +7,7 @@ import Distribution.PackageDescription (Benchmark (..), BuildInfo (..), CondTree (condTreeData), Executable (..), + ForeignLib (..), GenericPackageDescription (..), Library (..), UnqualComponentName, @@ -118,6 +119,10 @@ sourceDirsExtractionTestSuite name gpd = extractRelativeDirsFromStanza name gpd sourceDirsExtractionBenchmark :: Maybe StanzaName -> GenericPackageDescription -> [FilePath] sourceDirsExtractionBenchmark name gpd = extractRelativeDirsFromStanza name gpd condBenchmarks benchmarkBuildInfo +-- | Extracts the source directories of foreign-lib stanza with the given name. +sourceDirsExtractionForeignLib :: Maybe StanzaName -> GenericPackageDescription -> [FilePath] +sourceDirsExtractionForeignLib name gpd = extractRelativeDirsFromStanza name gpd condForeignLibs foreignLibBuildInfo + {- | Takes a possible stanza name, a GenericPackageDescription, a function to access the stanza information we are interested in and a function to access the build info from the specific stanza. diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Simple.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Simple.hs index 853b9f4b48..b097af5cd2 100644 --- a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Simple.hs +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Simple.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedStrings #-} module Ide.Plugin.Cabal.Completion.Completer.Simple where @@ -7,11 +8,14 @@ import Data.Function ((&)) import qualified Data.List as List import Data.Map (Map) import qualified Data.Map as Map -import Data.Maybe (fromMaybe) +import Data.Maybe (fromMaybe, + mapMaybe) import Data.Ord (Down (Down)) import qualified Data.Text as T +import qualified Distribution.Fields as Syntax import Ide.Logger (Priority (..), logWith) +import Ide.Plugin.Cabal.Completion.CabalFields import Ide.Plugin.Cabal.Completion.Completer.Types import Ide.Plugin.Cabal.Completion.Types (CabalPrefixInfo (..), Log) @@ -41,6 +45,22 @@ constantCompleter completions _ cData = do range = completionRange prefInfo pure $ map (mkSimpleCompletionItem range . Fuzzy.original) scored +-- | Completer to be used for import fields. +-- +-- TODO: Does not exclude imports, defined after the current cursor position +-- which are not allowed according to the cabal specification +importCompleter :: Completer +importCompleter l cData = do + cabalCommonsM <- getCabalCommonSections cData + case cabalCommonsM of + Just cabalCommons -> do + let commonNames = mapMaybe (\case + Syntax.Section (Syntax.Name _ "common") commonNames _ -> getOptionalSectionName commonNames + _ -> Nothing) + cabalCommons + constantCompleter commonNames l cData + Nothing -> noopCompleter l cData + -- | Completer to be used for the field @name:@ value. -- -- This is almost always the name of the cabal file. However, diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Types.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Types.hs index 65b7343346..968b68919b 100644 --- a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Types.hs +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Types.hs @@ -3,7 +3,9 @@ module Ide.Plugin.Cabal.Completion.Completer.Types where import Development.IDE as D +import qualified Distribution.Fields as Syntax import Distribution.PackageDescription (GenericPackageDescription) +import qualified Distribution.Parsec.Position as Syntax import Ide.Plugin.Cabal.Completion.Types import Language.LSP.Protocol.Types (CompletionItem) @@ -16,9 +18,11 @@ data CompleterData = CompleterData { -- | Access to the latest available generic package description for the handled cabal file, -- relevant for some completion actions which require the file's meta information -- such as the module completers which require access to source directories - getLatestGPD :: IO (Maybe GenericPackageDescription), + getLatestGPD :: IO (Maybe GenericPackageDescription), + -- | Access to the entries of the handled cabal file as parsed by ParseCabalFields + getCabalCommonSections :: IO (Maybe [Syntax.Field Syntax.Position]), -- | Prefix info to be used for constructing completion items - cabalPrefixInfo :: CabalPrefixInfo, + cabalPrefixInfo :: CabalPrefixInfo, -- | The name of the stanza in which the completer is applied - stanzaName :: Maybe StanzaName + stanzaName :: Maybe StanzaName } diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completions.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completions.hs index 6b3f3c9e45..83e809fb0f 100644 --- a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completions.hs +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completions.hs @@ -8,11 +8,11 @@ import Data.List.NonEmpty (NonEmpty) import qualified Data.List.NonEmpty as NE import qualified Data.Map as Map import qualified Data.Text as T -import qualified Data.Text.Encoding as T import Development.IDE as D import qualified Development.IDE.Plugin.Completions.Types as Ghcide import qualified Distribution.Fields as Syntax import qualified Distribution.Parsec.Position as Syntax +import Ide.Plugin.Cabal.Completion.CabalFields import Ide.Plugin.Cabal.Completion.Completer.Simple import Ide.Plugin.Cabal.Completion.Completer.Snippet import Ide.Plugin.Cabal.Completion.Completer.Types (Completer) @@ -140,6 +140,7 @@ findCursorContext cursor parentHistory prefixText fields = Just field@(Syntax.Field _ _) -> classifyFieldContext parentHistory cursor field Just section@(Syntax.Section _ args sectionFields) | inSameLineAsSectionName section -> (stanzaCtx, None) -- TODO: test whether keyword in same line is parsed correctly + | getFieldName section `elem` conditionalKeywords -> findCursorContext cursor parentHistory prefixText sectionFields -- Ignore if conditionals, they are not real sections | otherwise -> findCursorContext cursor (NE.cons (Syntax.positionCol (getAnnotation section) + 1, Stanza (getFieldName section) (getOptionalSectionName args)) parentHistory) @@ -147,6 +148,7 @@ findCursorContext cursor parentHistory prefixText fields = where inSameLineAsSectionName section = Syntax.positionRow (getAnnotation section) == Syntax.positionRow cursor stanzaCtx = snd $ NE.head parentHistory + conditionalKeywords = ["if", "elif", "else"] -- | Finds the cursor's context, where the cursor is already found to be in a specific field -- @@ -177,57 +179,3 @@ classifyFieldContext ctx cursor field cursorColumn = Syntax.positionCol cursor fieldColumn = Syntax.positionCol (getAnnotation field) - --- ---------------------------------------------------------------- --- Cabal-syntax utilities I don't really want to write myself --- ---------------------------------------------------------------- - --- | Determine the context of a cursor position within a stack of stanza contexts --- --- If the cursor is indented more than one of the stanzas in the stack --- the respective stanza is returned if this is never the case, the toplevel stanza --- in the stack is returned. -findStanzaForColumn :: Int -> NonEmpty (Int, StanzaContext) -> (StanzaContext, FieldContext) -findStanzaForColumn col ctx = case NE.uncons ctx of - ((_, stanza), Nothing) -> (stanza, None) - ((indentation, stanza), Just res) - | col < indentation -> findStanzaForColumn col res - | otherwise -> (stanza, None) - --- | Determine the field the cursor is currently a part of. --- --- The result is said field and its starting position --- or Nothing if the passed list of fields is empty. - --- This only looks at the row of the cursor and not at the cursor's --- position within the row. --- --- TODO: we do not handle braces correctly. Add more tests! -findFieldSection :: Syntax.Position -> [Syntax.Field Syntax.Position] -> Maybe (Syntax.Field Syntax.Position) -findFieldSection _cursor [] = Nothing -findFieldSection _cursor [x] = - -- Last field. We decide later, whether we are starting - -- a new section. - Just x -findFieldSection cursor (x:y:ys) - | Syntax.positionRow (getAnnotation x) <= cursorLine && cursorLine < Syntax.positionRow (getAnnotation y) - = Just x - | otherwise = findFieldSection cursor (y:ys) - where - cursorLine = Syntax.positionRow cursor - -type FieldName = T.Text - -getAnnotation :: Syntax.Field ann -> ann -getAnnotation (Syntax.Field (Syntax.Name ann _) _) = ann -getAnnotation (Syntax.Section (Syntax.Name ann _) _ _) = ann - -getFieldName :: Syntax.Field ann -> FieldName -getFieldName (Syntax.Field (Syntax.Name _ fn) _) = T.decodeUtf8 fn -getFieldName (Syntax.Section (Syntax.Name _ fn) _ _) = T.decodeUtf8 fn - -getOptionalSectionName :: [Syntax.SectionArg ann] -> Maybe T.Text -getOptionalSectionName [] = Nothing -getOptionalSectionName (x:xs) = case x of - Syntax.SecArgName _ name -> Just (T.decodeUtf8 name) - _ -> getOptionalSectionName xs diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Data.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Data.hs index 143dfaadff..03e517eae2 100644 --- a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Data.hs +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Data.hs @@ -1,7 +1,4 @@ {-# LANGUAGE OverloadedStrings #-} -{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} - -{-# HLINT ignore "Redundant bracket" #-} module Ide.Plugin.Cabal.Completion.Data where @@ -19,10 +16,24 @@ import Ide.Plugin.Cabal.Completion.Completer.Types (Completer) import Ide.Plugin.Cabal.Completion.Types import Ide.Plugin.Cabal.LicenseSuggest (licenseNames) +-- | Ad-hoc data type for modelling the available top-level stanzas. +-- Not intended right now for anything else but to avoid string +-- comparisons in 'stanzaKeywordMap' and 'libExecTestBenchCommons'. +data TopLevelStanza + = Library + | Executable + | TestSuite + | Benchmark + | ForeignLib + | Common + -- ---------------------------------------------------------------- -- Completion Data -- ---------------------------------------------------------------- +supportedCabalVersions :: [CabalSpecVersion] +supportedCabalVersions = [CabalSpecV2_2 .. maxBound] + -- | Keyword for cabal version; required to be the top line in a cabal file cabalVersionKeyword :: Map KeyWordName Completer cabalVersionKeyword = @@ -30,7 +41,7 @@ cabalVersionKeyword = constantCompleter $ -- We only suggest cabal versions newer than 2.2 -- since we don't recommend using older ones. - map (T.pack . showCabalSpecVersion) [CabalSpecV2_2 .. maxBound] + map (T.pack . showCabalSpecVersion) supportedCabalVersions -- | Top level keywords of a cabal file. -- @@ -68,12 +79,13 @@ cabalKeywords = stanzaKeywordMap :: Map StanzaType (Map KeyWordName Completer) stanzaKeywordMap = Map.fromList - [ ("library", libraryFields <> libExecTestBenchCommons), - ("executable", executableFields <> libExecTestBenchCommons), - ("test-suite", testSuiteFields <> libExecTestBenchCommons), - ("benchmark", benchmarkFields <> libExecTestBenchCommons), - ("foreign-library", foreignLibraryFields <> libExecTestBenchCommons), - ("common", libExecTestBenchCommons), + [ ("library", libraryFields <> libExecTestBenchCommons Library), + ("executable", executableFields <> libExecTestBenchCommons Executable), + ("test-suite", testSuiteFields <> libExecTestBenchCommons TestSuite), + ("benchmark", benchmarkFields <> libExecTestBenchCommons Benchmark), + ("foreign-library", foreignLibraryFields <> libExecTestBenchCommons ForeignLib), + ("common", libExecTestBenchCommons Library), + ("common", libExecTestBenchCommons Common), ("flag", flagFields), ("source-repository", sourceRepositoryFields) ] @@ -159,10 +171,11 @@ flagFields = ("lib-version-linux:", noopCompleter) ] -libExecTestBenchCommons :: Map KeyWordName Completer -libExecTestBenchCommons = +libExecTestBenchCommons :: TopLevelStanza -> Map KeyWordName Completer +libExecTestBenchCommons st = Map.fromList - [ ("build-depends:", noopCompleter), + [ ("import:", importCompleter), + ("build-depends:", noopCompleter), ("hs-source-dirs:", directoryCompleter), ("default-extensions:", noopCompleter), ("other-extensions:", noopCompleter), @@ -179,6 +192,8 @@ libExecTestBenchCommons = ("includes:", filePathCompleter), ("install-includes:", filePathCompleter), ("include-dirs:", directoryCompleter), + ("autogen-includes:", filePathCompleter), + ("autogen-modules:", moduleCompleterByTopLevelStanza), ("c-sources:", filePathCompleter), ("cxx-sources:", filePathCompleter), ("asm-sources:", filePathCompleter), @@ -199,6 +214,26 @@ libExecTestBenchCommons = ("extra-framework-dirs:", directoryCompleter), ("mixins:", noopCompleter) ] + where + -- + moduleCompleterByTopLevelStanza = case st of + Library -> modulesCompleter sourceDirsExtractionLibrary + Executable -> modulesCompleter sourceDirsExtractionExecutable + TestSuite -> modulesCompleter sourceDirsExtractionTestSuite + Benchmark -> modulesCompleter sourceDirsExtractionBenchmark + ForeignLib -> modulesCompleter sourceDirsExtractionForeignLib + Common -> + -- TODO: We can't provide a module completer because we provide + -- module completions based on the "hs-source-dirs" after parsing the file, + -- i.e. based on the 'PackageDescription'. + -- "common" stanzas are erased in the 'PackageDescription' representation, + -- thus we can't provide accurate module completers right now, as we don't + -- know what the 'hs-source-dirs' in the "common" stanza are. + -- + -- A potential fix would be to introduce an intermediate representation that + -- parses the '.cabal' file s.t. that we have access to the 'hs-source-dirs', + -- but not have erased the "common" stanza. + noopCompleter -- | Contains a map of the most commonly used licenses, weighted by their popularity. -- diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Types.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Types.hs index c39362e826..59796afe2b 100644 --- a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Types.hs +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Types.hs @@ -8,7 +8,6 @@ import Control.DeepSeq (NFData) import Control.Lens ((^.)) import Data.Hashable import qualified Data.Text as T -import Data.Typeable import Development.IDE as D import qualified Distribution.Fields as Syntax import qualified Distribution.PackageDescription as PD @@ -44,7 +43,7 @@ instance Pretty Log where type instance RuleResult ParseCabalFile = PD.GenericPackageDescription data ParseCabalFile = ParseCabalFile - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable ParseCabalFile @@ -53,12 +52,21 @@ instance NFData ParseCabalFile type instance RuleResult ParseCabalFields = [Syntax.Field Syntax.Position] data ParseCabalFields = ParseCabalFields - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable ParseCabalFields instance NFData ParseCabalFields +type instance RuleResult ParseCabalCommonSections = [Syntax.Field Syntax.Position] + +data ParseCabalCommonSections = ParseCabalCommonSections + deriving (Eq, Show, Generic) + +instance Hashable ParseCabalCommonSections + +instance NFData ParseCabalCommonSections + -- | The context a cursor can be in within a cabal file. -- -- We can be in stanzas or the top level, @@ -171,3 +179,10 @@ lspPositionToCabalPosition :: Position -> Syntax.Position lspPositionToCabalPosition pos = Syntax.Position (fromIntegral (pos ^. JL.line) + 1) (fromIntegral (pos ^. JL.character) + 1) + +-- | Convert an 'Syntax.Position' to a LSP 'Position'. +-- +-- Cabal Positions start their indexing at 1 while LSP starts at 0. +-- This helper makes sure, the translation is done properly. +cabalPositionToLSPPosition :: Syntax.Position -> Position +cabalPositionToLSPPosition (Syntax.Position start end) = Position (toEnum start -1) (toEnum end -1) diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Definition.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Definition.hs new file mode 100644 index 0000000000..5f85151199 --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Definition.hs @@ -0,0 +1,206 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TypeFamilies #-} + +module Ide.Plugin.Cabal.Definition where + +import Control.Lens ((^.)) +import Control.Monad.Extra +import Control.Monad.IO.Class +import Data.List (find) +import qualified Data.Maybe as Maybe +import qualified Data.Text as T +import Development.IDE as D +import Development.IDE.Core.PluginUtils +import qualified Distribution.Fields as Syntax +import Distribution.PackageDescription (Benchmark (..), + BuildInfo (..), + Executable (..), + ForeignLib (..), + GenericPackageDescription, + Library (..), + LibraryName (LMainLibName, LSubLibName), + PackageDescription (..), + TestSuite (..), + library, + unUnqualComponentName) +import Distribution.PackageDescription.Configuration (flattenPackageDescription) +import qualified Distribution.Parsec.Position as Syntax +import Distribution.Utils.Generic (safeHead) +import Distribution.Utils.Path (getSymbolicPath) +import Ide.Plugin.Cabal.Completion.CabalFields as CabalFields +import Ide.Plugin.Cabal.Completion.Types (ParseCabalCommonSections (ParseCabalCommonSections), + ParseCabalFields (..), + ParseCabalFile (..)) +import qualified Ide.Plugin.Cabal.Completion.Types as Types +import Ide.Plugin.Cabal.Orphans () +import Ide.Plugin.Error +import Ide.Types +import qualified Language.LSP.Protocol.Lens as JL +import qualified Language.LSP.Protocol.Message as LSP +import Language.LSP.Protocol.Types +import System.Directory (doesFileExist) +import System.FilePath (joinPath, + takeDirectory, + (<.>), ()) + +-- | Handler for going to definitions. +-- +-- Provides a handler for going to the definition in a cabal file, +-- gathering all possible definitions by calling subfunctions. + +-- TODO: Resolve more cases for go-to definition. +gotoDefinition :: PluginMethodHandler IdeState LSP.Method_TextDocumentDefinition +gotoDefinition ide _ msgParam = do + nfp <- getNormalizedFilePathE uri + cabalFields <- runActionE "cabal-plugin.commonSections" ide $ useE ParseCabalFields nfp + -- Trim the AST tree, so multiple passes in subfunctions won't hurt the performance. + let fieldsOfInterest = maybe cabalFields (:[] ) $ CabalFields.findFieldSection cursor cabalFields + + commonSections <- runActionE "cabal-plugin.commonSections" ide $ useE ParseCabalCommonSections nfp + let mCommonSectionsDef = gotoCommonSectionDefinition uri commonSections cursor fieldsOfInterest + + mModuleDef <- do + mGPD <- liftIO $ runAction "cabal.GPD" ide $ useWithStale ParseCabalFile nfp + case mGPD of + Nothing -> pure Nothing + Just (gpd, _) -> liftIO $ gotoModulesDefinition nfp gpd cursor fieldsOfInterest + + let defs = Maybe.catMaybes [ mCommonSectionsDef + , mModuleDef + ] + -- Take first found definition. + -- We assume, that there can't be multiple definitions, + -- or the most specific definitions come first. + case safeHead defs of + Nothing -> pure $ InR $ InR Null + Just def -> pure $ InL def + where + cursor = Types.lspPositionToCabalPosition (msgParam ^. JL.position) + uri = msgParam ^. JL.textDocument . JL.uri + +-- | Definitions for Sections. +-- +-- Provides a Definition if cursor is pointed at an identifier, +-- otherwise gives Nothing. +-- The definition is found by traversing the sections and comparing their name to +-- the clicked identifier. +gotoCommonSectionDefinition + :: Uri -- ^ Cabal file URI + -> [Syntax.Field Syntax.Position] -- ^ Found common sections + -> Syntax.Position -- ^ Cursor position + -> [Syntax.Field Syntax.Position] -- ^ Trimmed cabal AST on a cursor + -> Maybe Definition +gotoCommonSectionDefinition uri commonSections cursor fieldsOfInterest = do + cursorText <- CabalFields.findTextWord cursor fieldsOfInterest + commonSection <- find (isSectionArgName cursorText) commonSections + Just $ Definition $ InL $ Location uri $ CabalFields.getFieldLSPRange commonSection + where + isSectionArgName name (Syntax.Section _ sectionArgName _) = name == CabalFields.onelineSectionArgs sectionArgName + isSectionArgName _ _ = False + +-- | Definitions for Modules. +-- +-- Provides a Definition if cursor is pointed at a +-- exposed-module or other-module field, otherwise gives Nothing +-- +-- Definition is found by looking for a module name, +-- the cursor is pointing to and looking for it in @BuildInfo@s. +-- Note that since a trimmed ast is provided, a @Definition@ to +-- a module with the same name as the target one, +-- but in another build target can't be given. +-- +-- See resolving @Config@ module in tests. +gotoModulesDefinition + :: NormalizedFilePath -- ^ Normalized FilePath to the cabal file + -> GenericPackageDescription + -> Syntax.Position -- ^ Cursor position + -> [Syntax.Field Syntax.Position] -- ^ Trimmed cabal AST on a cursor + -> IO (Maybe Definition) +gotoModulesDefinition nfp gpd cursor fieldsOfInterest = do + let mCursorText = CabalFields.findTextWord cursor fieldsOfInterest + moduleNames = CabalFields.getModulesNames fieldsOfInterest + mModuleName = find (isModuleName mCursorText) moduleNames + + case mModuleName of + Nothing -> pure Nothing + Just (mBuildTargetNames, moduleName) -> do + let buildInfos = foldMap (lookupBuildTargetPackageDescription + (flattenPackageDescription gpd)) + mBuildTargetNames + sourceDirs = map getSymbolicPath $ concatMap hsSourceDirs buildInfos + potentialPaths = map (\dir -> takeDirectory (fromNormalizedFilePath nfp) dir toHaskellFile moduleName) sourceDirs + allPaths <- liftIO $ filterM doesFileExist potentialPaths + -- Don't provide the range, since there is little benefit for it + let locations = map (\pth -> Location (filePathToUri pth) (mkRange 0 0 0 0)) allPaths + case safeHead locations of -- We assume there could be only one source location + Nothing -> pure Nothing + Just location -> pure $ Just $ Definition $ InL location + where + isModuleName (Just name) (_, moduleName) = name == moduleName + isModuleName _ _ = False + +-- | Gives all `buildInfo`s given a target name. +-- +-- `Maybe buildTargetName` is provided, and if it's +-- Nothing we assume, that it's a main library. +-- Otherwise looks for the provided name. +lookupBuildTargetPackageDescription :: PackageDescription -> Maybe T.Text -> [BuildInfo] +lookupBuildTargetPackageDescription (PackageDescription {..}) Nothing = + case library of + Nothing -> [] -- Target is a main library but no main library was found + Just (Library {libBuildInfo}) -> [libBuildInfo] +lookupBuildTargetPackageDescription (PackageDescription {..}) (Just buildTargetName) = + Maybe.catMaybes $ + map executableNameLookup executables <> + map subLibraryNameLookup subLibraries <> + map foreignLibsNameLookup foreignLibs <> + map testSuiteNameLookup testSuites <> + map benchmarkNameLookup benchmarks + where + executableNameLookup :: Executable -> Maybe BuildInfo + executableNameLookup (Executable {exeName, buildInfo}) = + if T.pack (unUnqualComponentName exeName) == buildTargetName + then Just buildInfo + else Nothing + subLibraryNameLookup :: Library -> Maybe BuildInfo + subLibraryNameLookup (Library {libName, libBuildInfo}) = + case libName of + (LSubLibName name) -> + if T.pack (unUnqualComponentName name) == buildTargetName + then Just libBuildInfo + else Nothing + LMainLibName -> Nothing + foreignLibsNameLookup :: ForeignLib -> Maybe BuildInfo + foreignLibsNameLookup (ForeignLib {foreignLibName, foreignLibBuildInfo}) = + if T.pack (unUnqualComponentName foreignLibName) == buildTargetName + then Just foreignLibBuildInfo + else Nothing + testSuiteNameLookup :: TestSuite -> Maybe BuildInfo + testSuiteNameLookup (TestSuite {testName, testBuildInfo}) = + if T.pack (unUnqualComponentName testName) == buildTargetName + then Just testBuildInfo + else Nothing + benchmarkNameLookup :: Benchmark -> Maybe BuildInfo + benchmarkNameLookup (Benchmark {benchmarkName, benchmarkBuildInfo}) = + if T.pack (unUnqualComponentName benchmarkName) == buildTargetName + then Just benchmarkBuildInfo + else Nothing + +-- | Converts a name of a module to a FilePath. +-- Is needed to guess the relative path to a file +-- using the name of the module. +-- We assume, that correct module naming is guaranteed. +-- +-- Warning: Generally not advised to use, if there are +-- better ways to get the path. +-- +-- Examples: (output is system dependent) +-- >>> toHaskellFile "My.Module.Lib" +-- "My/Module/Lib.hs" +-- >>> toHaskellFile "Main" +-- "Main.hs" +toHaskellFile :: T.Text -> FilePath +toHaskellFile moduleName = joinPath (map T.unpack $ T.splitOn "." moduleName) <.> ".hs" diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Diagnostics.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Diagnostics.hs index 26156c5131..5429ac0bb9 100644 --- a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Diagnostics.hs +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Diagnostics.hs @@ -11,18 +11,21 @@ module Ide.Plugin.Cabal.Diagnostics ) where -import qualified Data.Text as T -import Development.IDE (FileDiagnostic, - ShowDiagnostic (ShowDiag)) -import Distribution.Fields (showPError, showPWarning) -import qualified Distribution.Parsec as Syntax -import Ide.PluginUtils (extendNextLine) -import Language.LSP.Protocol.Types (Diagnostic (..), - DiagnosticSeverity (..), - NormalizedFilePath, - Position (Position), - Range (Range), - fromNormalizedFilePath) +import Control.Lens ((&), (.~)) +import qualified Data.Text as T +import Development.IDE (FileDiagnostic) +import Development.IDE.Types.Diagnostics (fdLspDiagnosticL, + ideErrorWithSource) +import Distribution.Fields (showPError, showPWarning) +import qualified Distribution.Parsec as Syntax +import Ide.PluginUtils (extendNextLine) +import Language.LSP.Protocol.Lens (range) +import Language.LSP.Protocol.Types (Diagnostic (..), + DiagnosticSeverity (..), + NormalizedFilePath, + Position (Position), + Range (Range), + fromNormalizedFilePath) -- | Produce a diagnostic for a fatal Cabal parser error. fatalParseErrorDiagnostic :: NormalizedFilePath -> T.Text -> FileDiagnostic @@ -63,8 +66,9 @@ positionFromCabalPosition :: Syntax.Position -> Position positionFromCabalPosition (Syntax.Position line column) = Position (fromIntegral line') (fromIntegral col') where -- LSP is zero-based, Cabal is one-based - line' = line-1 - col' = column-1 + -- Cabal can return line 0 for errors in the first line + line' = if line <= 0 then 0 else line-1 + col' = if column <= 0 then 0 else column-1 -- | Create a 'FileDiagnostic' mkDiag @@ -79,15 +83,11 @@ mkDiag -> T.Text -- ^ The message displayed by the editor -> FileDiagnostic -mkDiag file diagSource sev loc msg = (file, ShowDiag,) - Diagnostic - { _range = loc - , _severity = Just sev - , _source = Just diagSource - , _message = msg - , _code = Nothing - , _tags = Nothing - , _relatedInformation = Nothing - , _codeDescription = Nothing - , _data_ = Nothing - } +mkDiag file diagSource sev loc msg = + ideErrorWithSource + (Just diagSource) + (Just sev) + file + msg + Nothing + & fdLspDiagnosticL . range .~ loc diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/FieldSuggest.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/FieldSuggest.hs new file mode 100644 index 0000000000..2e77ccb193 --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/FieldSuggest.hs @@ -0,0 +1,70 @@ +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE ExplicitNamespaces #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} + +module Ide.Plugin.Cabal.FieldSuggest + ( fieldErrorName, + fieldErrorAction, + -- * Re-exports + T.Text, + Diagnostic (..), + ) +where + +import qualified Data.Map.Strict as Map +import qualified Data.Text as T +import Language.LSP.Protocol.Types (CodeAction (..), + CodeActionKind (..), + Diagnostic (..), Position (..), + Range (..), TextEdit (..), Uri, + WorkspaceEdit (..)) +import Text.Regex.TDFA + +-- | Generate all code actions for given file, erroneous/unknown field and suggestions +fieldErrorAction + :: Uri + -- ^ File for which the diagnostic was generated + -> T.Text + -- ^ Original (unknown) field + -> [T.Text] + -- ^ Suggestions for the given file + -> Range + -- ^ Location of diagnostic + -> [CodeAction] +fieldErrorAction uri original suggestions range = + fmap mkCodeAction suggestions + where + mkCodeAction suggestion = + let + -- Range returned by cabal here represents fragment from start of offending identifier + -- to end of line, we modify this range to be to the end of the identifier + adjustRange (Range rangeFrom@(Position lineNr col) _) = + Range rangeFrom (Position lineNr (col + fromIntegral (T.length original))) + title = "Replace with " <> suggestion' + tedit = [TextEdit (adjustRange range ) suggestion'] + edit = WorkspaceEdit (Just $ Map.singleton uri tedit) Nothing Nothing + in CodeAction title (Just CodeActionKind_QuickFix) (Just []) Nothing Nothing (Just edit) Nothing Nothing + where + -- dropping colon from the end of suggestion + suggestion' = T.dropEnd 1 suggestion + +-- | Given a diagnostic returned by 'Ide.Plugin.Cabal.Diag.errorDiagnostic', +-- if it represents an "Unknown field"-error with incorrect identifier +-- then return the incorrect identifier together with original diagnostics. +fieldErrorName :: + Diagnostic -> + -- ^ Output of 'Ide.Plugin.Cabal.Diag.errorDiagnostic' + Maybe (T.Text, Diagnostic) + -- ^ Original (incorrect) field name with the suggested replacement +fieldErrorName diag = + mSuggestion (_message diag) >>= \case + [original] -> Just (original, diag) + _ -> Nothing + where + regex :: T.Text + regex = "Unknown field: \"(.*)\"" + mSuggestion msg = getMatch <$> (msg :: T.Text) =~~ regex + getMatch :: (T.Text, T.Text, T.Text, [T.Text]) -> [T.Text] + getMatch (_, _, _, results) = results diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Outline.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Outline.hs new file mode 100644 index 0000000000..40f348f88c --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Outline.hs @@ -0,0 +1,119 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ViewPatterns #-} + +module Ide.Plugin.Cabal.Outline where + +import Control.Monad.IO.Class +import Data.Maybe +import qualified Data.Text as T +import Data.Text.Encoding (decodeUtf8) +import Development.IDE.Core.Rules +import Development.IDE.Core.Shake (IdeState (shakeExtras), + runIdeAction, + useWithStaleFast) +import Development.IDE.Types.Location (toNormalizedFilePath') +import Distribution.Fields.Field (Field (Field, Section), + Name (Name)) +import Distribution.Parsec.Position (Position) +import Ide.Plugin.Cabal.Completion.CabalFields (onelineSectionArgs) +import Ide.Plugin.Cabal.Completion.Types (ParseCabalFields (..), + cabalPositionToLSPPosition) +import Ide.Plugin.Cabal.Orphans () +import Ide.Types (PluginMethodHandler) +import Language.LSP.Protocol.Message (Method (..)) +import Language.LSP.Protocol.Types (DocumentSymbol (..)) +import qualified Language.LSP.Protocol.Types as LSP + + +moduleOutline :: PluginMethodHandler IdeState Method_TextDocumentDocumentSymbol +moduleOutline ideState _ LSP.DocumentSymbolParams {_textDocument = LSP.TextDocumentIdentifier uri} = + case LSP.uriToFilePath uri of + Just (toNormalizedFilePath' -> fp) -> do + mFields <- liftIO $ runIdeAction "cabal-plugin.fields" (shakeExtras ideState) (useWithStaleFast ParseCabalFields fp) + case fmap fst mFields of + Just fieldPositions -> pure $ LSP.InR (LSP.InL allSymbols) + where + allSymbols = mapMaybe documentSymbolForField fieldPositions + Nothing -> pure $ LSP.InL [] + Nothing -> pure $ LSP.InL [] + +-- | Creates a @DocumentSymbol@ object for the +-- cabal AST, without displaying @fieldLines@ and +-- displaying @Section Name@ and @SectionArgs@ in one line. +-- +-- @fieldLines@ are leaves of a cabal AST, so they are omitted +-- in the outline. Sections have to be displayed in one line, because +-- the AST representation looks unnatural. See examples: +-- +-- * part of a cabal file: +-- +-- > if impl(ghc >= 9.8) +-- > ghc-options: -Wall +-- +-- * AST representation: +-- +-- > if +-- > impl +-- > ( +-- > ghc >= 9.8 +-- > ) +-- > +-- > ghc-options: +-- > -Wall +-- +-- * resulting @DocumentSymbol@: +-- +-- > if impl(ghc >= 9.8) +-- > ghc-options: +-- > +documentSymbolForField :: Field Position -> Maybe DocumentSymbol +documentSymbolForField (Field (Name pos fieldName) _) = + Just + (defDocumentSymbol range) + { _name = decodeUtf8 fieldName, + _kind = LSP.SymbolKind_Field, + _children = Nothing + } + where + range = cabalPositionToLSPRange pos `addNameLengthToLSPRange` decodeUtf8 fieldName +documentSymbolForField (Section (Name pos fieldName) sectionArgs fields) = + Just + (defDocumentSymbol range) + { _name = joinedName, + _kind = LSP.SymbolKind_Object, + _children = + Just + (mapMaybe documentSymbolForField fields) + } + where + joinedName = decodeUtf8 fieldName <> " " <> onelineSectionArgs sectionArgs + range = cabalPositionToLSPRange pos `addNameLengthToLSPRange` joinedName + +-- | Creates a single point LSP range +-- using cabal position +cabalPositionToLSPRange :: Position -> LSP.Range +cabalPositionToLSPRange pos = LSP.Range lspPos lspPos + where + lspPos = cabalPositionToLSPPosition pos + +addNameLengthToLSPRange :: LSP.Range -> T.Text -> LSP.Range +addNameLengthToLSPRange (LSP.Range pos1 (LSP.Position line char)) name = + LSP.Range + pos1 + (LSP.Position line (char + fromIntegral (T.length name))) + +defDocumentSymbol :: LSP.Range -> DocumentSymbol +defDocumentSymbol range = DocumentSymbol + { _detail = Nothing + , _deprecated = Nothing + , _name = "" + , _kind = LSP.SymbolKind_File + , _range = range + , _selectionRange = range + , _children = Nothing + , _tags = Nothing + } diff --git a/plugins/hls-cabal-plugin/test/CabalAdd.hs b/plugins/hls-cabal-plugin/test/CabalAdd.hs new file mode 100644 index 0000000000..6517c811fe --- /dev/null +++ b/plugins/hls-cabal-plugin/test/CabalAdd.hs @@ -0,0 +1,185 @@ +{-# LANGUAGE OverloadedStrings #-} + +module CabalAdd ( + cabalAddTests, +) where + +import Control.Lens ((^.)) +import Control.Lens.Fold ((^?)) +import qualified Data.Maybe as Maybe +import qualified Data.Text as T +import qualified Data.Text.Internal.Search as T +import Distribution.Utils.Generic (safeHead) +import Ide.Plugin.Cabal.CabalAdd (hiddenPackageSuggestion) +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Types (Diagnostic (..), mkRange) +import System.FilePath +import Test.Hls (Session, TestTree, _R, anyMessage, + assertEqual, documentContents, + executeCodeAction, + getAllCodeActions, + getDocumentEdit, liftIO, openDoc, + skipManyTill, testCase, testGroup, + waitForDiagnosticsFrom, (@?=)) +import Utils + +cabalAddTests :: TestTree +cabalAddTests = + testGroup + "CabalAdd Tests" + [ runHaskellTestCaseSession "Code Actions - Can add hidden package to an executable" ("cabal-add-testdata" "cabal-add-exe") + (generateAddDependencyTestSession "cabal-add-exe.cabal" ("src" "Main.hs") "split" [253]) + , runHaskellTestCaseSession "Code Actions - Can add hidden package to a library" ("cabal-add-testdata" "cabal-add-lib") + (generateAddDependencyTestSession "cabal-add-lib.cabal" ("src" "MyLib.hs") "split" [348]) + , runHaskellTestCaseSession "Code Actions - Can add hidden package to a test" ("cabal-add-testdata" "cabal-add-tests") + (generateAddDependencyTestSession "cabal-add-tests.cabal" ("test" "Main.hs") "split" [478]) + , runHaskellTestCaseSession "Code Actions - Can add hidden package to a test with PackageImports" ("cabal-add-testdata" "cabal-add-tests") + (generateAddDependencyTestSession "cabal-add-tests.cabal" ("test" "MainPackageImports.hs") "split" [731]) + , runHaskellTestCaseSession "Code Actions - Can add hidden package to a benchmark" ("cabal-add-testdata" "cabal-add-bench") + (generateAddDependencyTestSession "cabal-add-bench.cabal" ("bench" "Main.hs") "split" [403]) + + , runHaskellTestCaseSession "Code Actions - Can add hidden package to an executable, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") + (generateAddDependencyTestSession "cabal-add-multitarget.cabal" ("src" "Main.hs") "split" [269]) + , runHaskellTestCaseSession "Code Actions - Can add hidden package to a library, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") + (generateAddDependencyTestSession "cabal-add-multitarget.cabal" ("lib" "MyLib.hs") "split" [413]) + , runHaskellTestCaseSession "Code Actions - Can add hidden package to an internal library, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") + (generateAddDependencyTestSession "cabal-add-multitarget.cabal" ("lib" "InternalLib.hs") "split" [413]) + , runHaskellTestCaseSession "Code Actions - Can add hidden package to a test, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") + (generateAddDependencyTestSession "cabal-add-multitarget.cabal" ("test" "Main.hs") "split" [655]) + , runHaskellTestCaseSession "Code Actions - Can add hidden package to a benchmark, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") + (generateAddDependencyTestSession "cabal-add-multitarget.cabal" ("bench" "Main.hs") "split" [776]) + + + , runHaskellTestCaseSession "Code Actions - Guard against HPack" ("cabal-add-testdata" "cabal-add-packageYaml") + (generatePackageYAMLTestSession ("src" "Main.hs")) + + , testHiddenPackageSuggestions "Check CabalAdd's parser, no version" + [ "It is a member of the hidden package 'base'" + , "It is a member of the hidden package 'Blammo-wai'" + , "It is a member of the hidden package 'BlastHTTP'" + , "It is a member of the hidden package 'CC-delcont-ref-tf'" + , "It is a member of the hidden package '3d-graphics-examples'" + , "It is a member of the hidden package 'AAI'" + , "It is a member of the hidden package 'AWin32Console'" + ] + [ ("base", T.empty) + , ("Blammo-wai", T.empty) + , ("BlastHTTP", T.empty) + , ("CC-delcont-ref-tf", T.empty) + , ("3d-graphics-examples", T.empty) + , ("AAI", T.empty) + , ("AWin32Console", T.empty) + ] + , testHiddenPackageSuggestions "Check CabalAdd's parser, with version" + [ "It is a member of the hidden package 'base-0.1.0.0'" + , "It is a member of the hidden package 'Blammo-wai-0.11.0'" + , "It is a member of the hidden package 'BlastHTTP-2.6.4.3'" + , "It is a member of the hidden package 'CC-delcont-ref-tf-0.0.0.2'" + , "It is a member of the hidden package '3d-graphics-examples-1.1.6'" + , "It is a member of the hidden package 'AAI-0.1'" + , "It is a member of the hidden package 'AWin32Console-1.19.1'" + ] + [ ("base","0.1.0.0") + , ("Blammo-wai", "0.11.0") + , ("BlastHTTP", "2.6.4.3") + , ("CC-delcont-ref-tf", "0.0.0.2") + , ("3d-graphics-examples", "1.1.6") + , ("AAI", "0.1") + , ("AWin32Console", "1.19.1") + ] + , testHiddenPackageSuggestions "Check CabalAdd's parser, no version, unicode comma" + [ "It is a member of the hidden package \8216base\8217" + , "It is a member of the hidden package \8216Blammo-wai\8217" + , "It is a member of the hidden package \8216BlastHTTP\8217" + , "It is a member of the hidden package \8216CC-delcont-ref-tf\8217" + , "It is a member of the hidden package \8216AAI\8217" + , "It is a member of the hidden package \8216AWin32Console\8217" + ] + [ ("base", T.empty) + , ("Blammo-wai", T.empty) + , ("BlastHTTP", T.empty) + , ("CC-delcont-ref-tf", T.empty) + , ("AAI", T.empty) + , ("AWin32Console", T.empty) + ] + , testHiddenPackageSuggestions "Check CabalAdd's parser, with version, unicode comma" + [ "It is a member of the hidden package \8216base-0.1.0.0\8217" + , "It is a member of the hidden package \8216Blammo-wai-0.11.0\8217" + , "It is a member of the hidden package \8216BlastHTTP-2.6.4.3\8217" + , "It is a member of the hidden package \8216CC-delcont-ref-tf-0.0.0.2\8217" + , "It is a member of the hidden package \8216AAI-0.1\8217" + , "It is a member of the hidden package \8216AWin32Console-1.19.1\8217" + ] + [ ("base","0.1.0.0") + , ("Blammo-wai", "0.11.0") + , ("BlastHTTP", "2.6.4.3") + , ("CC-delcont-ref-tf", "0.0.0.2") + , ("AAI", "0.1") + , ("AWin32Console", "1.19.1") + ] + , testHiddenPackageSuggestions "Check CabalAdd's parser, with version, unicode comma" + [ "It is a member of the hidden package \8216\&3d-graphics-examples\8217" + , "It is a member of the hidden package \8216\&3d-graphics-examples-1.1.6\8217" + ] + [ ("3d-graphics-examples", T.empty) + , ("3d-graphics-examples", "1.1.6") + ] + , testHiddenPackageSuggestions "Check CabalAdd's parser, with version, with PackageImports" + [ "(needs flag -package-id base-0.1.0.0)" + , "(needs flag -package-id Blammo-wai-0.11.0)" + , "(needs flag -package-id BlastHTTP-2.6.4.3)" + , "(needs flag -package-id CC-delcont-ref-tf-0.0.0.2)" + , "(needs flag -package-id 3d-graphics-examples-1.1.6)" + , "(needs flag -package-id AAI-0.1)" + , "(needs flag -package-id AWin32Console-1.19.1)" + ] + [ ("base","0.1.0.0") + , ("Blammo-wai", "0.11.0") + , ("BlastHTTP", "2.6.4.3") + , ("CC-delcont-ref-tf", "0.0.0.2") + , ("3d-graphics-examples", "1.1.6") + , ("AAI", "0.1") + , ("AWin32Console", "1.19.1") + ] + ] + where + generateAddDependencyTestSession :: FilePath -> FilePath -> T.Text -> [Int] -> Session () + generateAddDependencyTestSession cabalFile haskellFile dependency indicesRes = do + hsdoc <- openDoc haskellFile "haskell" + cabDoc <- openDoc cabalFile "cabal" + _ <- waitForDiagnosticsFrom hsdoc + cas <- Maybe.mapMaybe (^? _R) <$> getAllCodeActions hsdoc + let selectedCas = filter (\ca -> "Add dependency" `T.isPrefixOf` (ca ^. L.title)) cas + mapM_ executeCodeAction selectedCas + _ <- skipManyTill anyMessage $ getDocumentEdit cabDoc -- Wait for the changes in cabal file + contents <- documentContents cabDoc + liftIO $ assertEqual (T.unpack dependency <> " isn't found in the cabal file") indicesRes (T.indices dependency contents) + testHiddenPackageSuggestions :: String -> [T.Text] -> [(T.Text, T.Text)] -> TestTree + testHiddenPackageSuggestions testTitle messages suggestions = + let diags = map (\msg -> messageToDiagnostic msg ) messages + suggestions' = map (safeHead . hiddenPackageSuggestion) diags + assertions = zipWith (@?=) suggestions' (map Just suggestions) + testNames = map (\(f, s) -> "Check if " ++ T.unpack f ++ (if s == "" then "" else "-") ++ T.unpack s ++ " was parsed correctly") suggestions + test = testGroup testTitle $ zipWith testCase testNames assertions + in test + messageToDiagnostic :: T.Text -> Diagnostic + messageToDiagnostic msg = Diagnostic { + _range = mkRange 0 0 0 0 + , _severity = Nothing + , _code = Nothing + , _source = Nothing + , _message = msg + , _relatedInformation = Nothing + , _tags = Nothing + , _codeDescription = Nothing + , _data_ = Nothing + } + + + generatePackageYAMLTestSession :: FilePath -> Session () + generatePackageYAMLTestSession haskellFile = do + hsdoc <- openDoc haskellFile "haskell" + _ <- waitForDiagnosticsFrom hsdoc + cas <- Maybe.mapMaybe (^? _R) <$> getAllCodeActions hsdoc + let selectedCas = filter (\ca -> "Add dependency" `T.isPrefixOf` (ca ^. L.title)) cas + liftIO $ assertEqual "PackageYAML" [] selectedCas diff --git a/plugins/hls-cabal-plugin/test/Completer.hs b/plugins/hls-cabal-plugin/test/Completer.hs index e7403e9a0e..ab7165b1ac 100644 --- a/plugins/hls-cabal-plugin/test/Completer.hs +++ b/plugins/hls-cabal-plugin/test/Completer.hs @@ -1,19 +1,26 @@ {-# LANGUAGE DisambiguateRecordFields #-} {-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE QuasiQuotes #-} + module Completer where import Control.Lens ((^.), (^?)) import Control.Lens.Prism +import Control.Monad (forM_) import qualified Data.ByteString as ByteString +import qualified Data.ByteString.Char8 as BS8 import Data.Maybe (mapMaybe) import qualified Data.Text as T import qualified Development.IDE.Plugin.Completions.Types as Ghcide +import qualified Distribution.Fields as Syntax import Distribution.PackageDescription (GenericPackageDescription) import Distribution.PackageDescription.Parsec (parseGenericPackageDescriptionMaybe) +import qualified Distribution.Parsec.Position as Syntax import Ide.Plugin.Cabal.Completion.Completer.FilePath import Ide.Plugin.Cabal.Completion.Completer.Module import Ide.Plugin.Cabal.Completion.Completer.Paths +import Ide.Plugin.Cabal.Completion.Completer.Simple (importCompleter) import Ide.Plugin.Cabal.Completion.Completer.Types (CompleterData (..)) import Ide.Plugin.Cabal.Completion.Completions import Ide.Plugin.Cabal.Completion.Types (CabalPrefixInfo (..), @@ -33,7 +40,9 @@ completerTests = directoryCompleterTests, completionHelperTests, filePathExposedModulesTests, - exposedModuleCompleterTests + exposedModuleCompleterTests, + importCompleterTests, + autogenFieldCompletionTests ] basicCompleterTests :: TestTree @@ -290,23 +299,78 @@ exposedModuleCompleterTests = completions @?== [] ] where - simpleCompleterData :: Maybe StanzaName -> FilePath -> T.Text -> CompleterData - simpleCompleterData sName dir pref = do - CompleterData - { cabalPrefixInfo = simpleExposedCabalPrefixInfo pref dir, - getLatestGPD = do - cabalContents <- ByteString.readFile $ testDataDir "exposed.cabal" - pure $ parseGenericPackageDescriptionMaybe cabalContents, - stanzaName = sName - } callModulesCompleter :: Maybe StanzaName -> (Maybe StanzaName -> GenericPackageDescription -> [FilePath]) -> T.Text -> IO [T.Text] callModulesCompleter sName func prefix = do let cData = simpleCompleterData sName testDataDir prefix completer <- modulesCompleter func mempty cData pure $ fmap extract completer +-- TODO: These tests are a bit barebones at the moment, +-- since we do not take cursorposition into account at this point. +importCompleterTests :: TestTree +importCompleterTests = + testGroup + "Import Completer Tests" + [ testCase "All above common sections are suggested" $ do + completions <- callImportCompleter + ("defaults" `elem` completions) @? "defaults contained" + ("test-defaults" `elem` completions) @? "test-defaults contained" + -- TODO: Only common sections defined before the current stanza may be imported + , testCase "Common sections occuring below are not suggested" $ do + completions <- callImportCompleter + ("notForLib" `elem` completions) @? "notForLib contained, this needs to be fixed" + , testCase "All common sections are suggested when curser is below them" $ do + completions <- callImportCompleter + completions @?== ["defaults", "notForLib" ,"test-defaults"] + ] + where + callImportCompleter :: IO [T.Text] + callImportCompleter = do + let cData' = simpleCompleterData Nothing testDataDir "" + let cabalCommonSections = [makeCommonSection 13 0 "defaults", makeCommonSection 18 0 "test-defaults", makeCommonSection 27 0 "notForLib"] + let cData = cData' {getCabalCommonSections = pure $ Just cabalCommonSections} + completer <- importCompleter mempty cData + pure $ fmap extract completer + makeCommonSection :: Int -> Int -> String -> Syntax.Field Syntax.Position + makeCommonSection row col name = + Syntax.Section + (Syntax.Name (Syntax.Position row col) "common") + [Syntax.SecArgName (Syntax.Position row (col + 7)) (BS8.pack name)] + [] + +autogenFieldCompletionTests :: TestTree +autogenFieldCompletionTests = + testGroup "Autogen Field Completer Tests" + [ testAutogenField "library" "completion/autogen-completion.cabal" (Position 6 9) ["autogen-modules:", "autogen-includes:"] + , testAutogenField "executable" "completion/autogen-completion.cabal" (Position 11 9) ["autogen-modules:", "autogen-includes:"] + , testAutogenField "test-suite" "completion/autogen-completion.cabal" (Position 16 9) ["autogen-modules:", "autogen-includes:"] + , testAutogenField "benchmark" "completion/autogen-completion.cabal" (Position 21 9) ["autogen-modules:", "autogen-includes:"] + , testAutogenField "common" "completion/autogen-completion.cabal" (Position 24 9) ["autogen-modules:", "autogen-includes:"] + ] + + where + testAutogenField :: String -> FilePath -> Position -> [T.Text] -> TestTree + testAutogenField section file pos expected = runCabalTestCaseSession ("autogen-modules completion in " <> section) "" $ do + doc <- openDoc file "cabal" + items <- getCompletions doc pos + let labels = map (^. L.label) items + liftIO $ forM_ expected $ \expect -> + assertBool (T.unpack expect <> " not found in " <> section) $ + any (expect `T.isInfixOf`) labels + +simpleCompleterData :: Maybe StanzaName -> FilePath -> T.Text -> CompleterData +simpleCompleterData sName dir pref = do + CompleterData + { cabalPrefixInfo = simpleExposedCabalPrefixInfo pref dir, + getLatestGPD = do + cabalContents <- ByteString.readFile $ testDataDir "exposed.cabal" + pure $ parseGenericPackageDescriptionMaybe cabalContents, + getCabalCommonSections = undefined, + stanzaName = sName + } + mkCompleterData :: CabalPrefixInfo -> CompleterData -mkCompleterData prefInfo = CompleterData {getLatestGPD = undefined, cabalPrefixInfo = prefInfo, stanzaName = Nothing} +mkCompleterData prefInfo = CompleterData {getLatestGPD = undefined, getCabalCommonSections = undefined, cabalPrefixInfo = prefInfo, stanzaName = Nothing} exposedTestDir :: FilePath exposedTestDir = addTrailingPathSeparator $ testDataDir "src-modules" @@ -326,3 +390,41 @@ extract :: CompletionItem -> T.Text extract item = case item ^. L.textEdit of Just (InL v) -> v ^. L.newText _ -> error "" + +importTestData :: T.Text +importTestData = [trimming| +cabal-version: 3.0 +name: hls-cabal-plugin +version: 0.1.0.0 +synopsis: +homepage: +license: MIT +license-file: LICENSE +author: Fendor +maintainer: fendor@posteo.de +category: Development +extra-source-files: CHANGELOG.md + +common defaults + default-language: GHC2021 + -- Should have been in GHC2021, an oversight + default-extensions: ExplicitNamespaces + +common test-defaults + ghc-options: -threaded -rtsopts -with-rtsopts=-N + +library + import: + ^ + exposed-modules: IDE.Plugin.Cabal + build-depends: base ^>=4.14.3.0 + hs-source-dirs: src + default-language: Haskell2010 + +common notForLib + default-language: GHC2021 + +test-suite tests + import: + ^ +|] diff --git a/plugins/hls-cabal-plugin/test/Context.hs b/plugins/hls-cabal-plugin/test/Context.hs index 82d50ccf14..8e6176bc5b 100644 --- a/plugins/hls-cabal-plugin/test/Context.hs +++ b/plugins/hls-cabal-plugin/test/Context.hs @@ -154,6 +154,15 @@ getContextTests = , testCase "Top level - cursor in later line with partially written value" $ do ctx <- callGetContext (Position 5 13) "eee" topLevelData ctx @?= (TopLevel, KeyWord "name:") + , testCase "If is ignored" $ do + ctx <- callGetContext (Position 5 18) "" conditionalData + ctx @?= (Stanza "library" Nothing, None) + , testCase "Elif is ignored" $ do + ctx <- callGetContext (Position 7 18) "" conditionalData + ctx @?= (Stanza "library" Nothing, None) + , testCase "Else is ignored" $ do + ctx <- callGetContext (Position 9 18) "" conditionalData + ctx @?= (Stanza "library" Nothing, KeyWord "buildable:") , testCase "Named Stanza" $ do ctx <- callGetContext (Position 2 18) "" executableStanzaData ctx @?= (TopLevel, None) @@ -237,6 +246,18 @@ name: eee |] +conditionalData :: T.Text +conditionalData = [trimming| +cabal-version: 3.0 +name: simple-cabal +library + if os(windows) + buildable: + elif os(linux) + buildable: + else + buildable: +|] multiLineOptsData :: T.Text multiLineOptsData = [trimming| cabal-version: 3.0 diff --git a/plugins/hls-cabal-plugin/test/Definition.hs b/plugins/hls-cabal-plugin/test/Definition.hs new file mode 100644 index 0000000000..33163c03eb --- /dev/null +++ b/plugins/hls-cabal-plugin/test/Definition.hs @@ -0,0 +1,126 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Definition ( + gotoDefinitionTests, +) where + +import Control.Lens ((^.)) +import Data.List.Extra (isSuffixOf) +import qualified Data.Text as T +import Ide.Plugin.Cabal.Definition (toHaskellFile) +import qualified Language.LSP.Protocol.Lens as L +import qualified Language.LSP.Protocol.Types as LSP +import System.FilePath +import Test.Hls +import Utils + + +gotoDefinitionTests :: TestTree +gotoDefinitionTests = testGroup "Goto Definition" + [ gotoCommonSectionDefinitionTests + , gotoModuleDefinitionTests + ] + +gotoModuleDefinitionTests :: TestTree +gotoModuleDefinitionTests = testGroup "Goto Module Definition" + [ testGoToDefinitionLink "simple cabal test" "simple-cabal" "simple-cabal.cabal" + (Position 8 23) (toTestHaskellPath "" "A") + + , testGoToDefinitionLink "library start of exposed-modules" ("goto-definition" "modules") "module-examples.cabal" + (Position 6 22) (toTestHaskellPath "src" "Library.Lib") + , testGoToDefinitionLink "library middle of exposed-modules" ("goto-definition" "modules") "module-examples.cabal" + (Position 6 29) (toTestHaskellPath "src" "Library.Lib") + , testGoToDefinitionLink "library end of exposed-modules" ("goto-definition" "modules") "module-examples.cabal" + (Position 6 33) (toTestHaskellPath "src" "Library.Lib") + , testGoToDefinitionLink "library start of other-modules" ("goto-definition" "modules") "module-examples.cabal" + (Position 9 22) (toTestHaskellPath "src" "Library.Other.OtherLib") + , testGoToDefinitionLink "library end of other-modules" ("goto-definition" "modules") "module-examples.cabal" + (Position 9 44) (toTestHaskellPath "src" "Library.Other.OtherLib") + + , testGoToDefinitionLink "executable other-modules" ("goto-definition" "modules") "module-examples.cabal" + (Position 22 10) (toTestHaskellPath ("src" "exe") "Config") + + , testGoToDefinitionLink "test-suite other-modules Config" ("goto-definition" "modules") "module-examples.cabal" + (Position 31 10) (toTestHaskellPath ("src" "test") "Config") + , testGoToDefinitionLink "test-suite other-modules Library" ("goto-definition" "modules") "module-examples.cabal" + (Position 34 10) (toTestHaskellPath ("src" "test") "Library") + + , testGoToDefinitionLink "benchmark other-modules Config" ("goto-definition" "modules") "module-examples.cabal" + (Position 45 30) (toTestHaskellPath ("src" "bench") "Config") + + , testGoToDefinitionLinkNoLocation "not existent module" ("goto-definition" "modules") "module-examples.cabal" (Position 48 25) + , testGoToDefinitionLinkNoLocation "behind module" ("goto-definition" "modules") "module-examples.cabal" (Position 9 20) + , testGoToDefinitionLinkNoLocation "after module" ("goto-definition" "modules") "module-examples.cabal" (Position 9 50) + ] + where + toTestHaskellPath :: FilePath -> T.Text -> FilePath + toTestHaskellPath dir moduleName = dir toHaskellFile moduleName + + getUriFromDefinition :: Show b => (Definition |? b) -> Uri + getUriFromDefinition (InL (Definition (InL loc))) = loc^.L.uri + getUriFromDefinition unk = error $ "Unexpected pattern '" ++ show unk ++ "' , expected '(InL (Definition (InL loc))'" + + testGoToDefinitionLink :: TestName -> FilePath -> FilePath -> Position -> FilePath -> TestTree + testGoToDefinitionLink testName testDir cabalFile cursorPos expectedFilePath = + runCabalTestCaseSession testName testDir $ do + doc <- openDoc cabalFile "cabal" + definitions <- getDefinitions doc cursorPos + let uri = getUriFromDefinition definitions + mFilePath = (testDir ) <$> uriToFilePath uri + case mFilePath of + Nothing -> error $ "Not possible to convert Uri " <> show uri <> " to FilePath" + Just filePath -> do + let filePathWithDir = testDir expectedFilePath + isCorrectPath = filePathWithDir `isSuffixOf` filePath + liftIO $ isCorrectPath @? ("Absolute path expected to end on " <> filePathWithDir <> + " but " <> filePath <> " was given.") + + testGoToDefinitionLinkNoLocation :: TestName -> FilePath -> FilePath -> Position -> TestTree + testGoToDefinitionLinkNoLocation testName testDir cabalFile cursorPos = + runCabalTestCaseSession testName testDir $ do + doc <- openDoc cabalFile "cabal" + empty <- getDefinitions doc cursorPos + liftIO $ empty @?= (InR $ InR LSP.Null) + +gotoCommonSectionDefinitionTests :: TestTree +gotoCommonSectionDefinitionTests = testGroup "Goto Common Section Definition" + [ positiveTest "middle of identifier" (Position 27 16) (mkRange 6 0 7 22) + , positiveTest "left of identifier" (Position 30 12) (mkRange 10 0 17 40) + , positiveTest "right of identifier" (Position 33 22) (mkRange 20 0 23 34) + , positiveTest "left of '-' in identifier" (Position 36 20) (mkRange 6 0 7 22) + , positiveTest "right of '-' in identifier" (Position 39 19) (mkRange 10 0 17 40) + , positiveTest "identifier in identifier list" (Position 42 16) (mkRange 20 0 23 34) + , positiveTest "left of ',' right of identifier" (Position 45 33) (mkRange 10 0 17 40) + , positiveTest "right of ',' left of identifier" (Position 48 34) (mkRange 6 0 7 22) + + , negativeTest "right of ',' left of space" (Position 51 23) + , negativeTest "right of ':' left of space" (Position 54 11) + , negativeTest "not a definition" (Position 57 8) + , negativeTest "empty space" (Position 59 7) + ] + where + getRangeFromDefinition :: Show b => (Definition |? b) -> Range + getRangeFromDefinition (InL (Definition (InL loc))) = loc^.L.range + getRangeFromDefinition unk = error $ "Unexpected pattern '" ++ show unk ++ "' , expected '(InL (Definition (InL loc))'" + + -- A positive test checks if the provided range is equal + -- to the expected range from the definition in the test file. + -- The test emulates a goto-definition request of an actual definition. + positiveTest :: TestName -> Position -> Range -> TestTree + positiveTest testName cursorPos expectedRange = + runCabalTestCaseSession testName ("goto-definition" "common-section") $ do + doc <- openDoc "simple-with-common.cabal" "cabal" + definitions <- getDefinitions doc cursorPos + let range = getRangeFromDefinition definitions + liftIO $ range @?= expectedRange + + -- A negative test checks if the request failed and + -- the provided result is empty, i.e. `InR $ InR Null`. + -- The test emulates a goto-definition request of anything but an + -- actual definition. + negativeTest :: TestName -> Position -> TestTree + negativeTest testName cursorPos = + runCabalTestCaseSession testName ("goto-definition" "common-section") $ do + doc <- openDoc "simple-with-common.cabal" "cabal" + empty <- getDefinitions doc cursorPos + liftIO $ empty @?= (InR $ InR LSP.Null) diff --git a/plugins/hls-cabal-plugin/test/Main.hs b/plugins/hls-cabal-plugin/test/Main.hs index 6488e71e16..fcb85a081e 100644 --- a/plugins/hls-cabal-plugin/test/Main.hs +++ b/plugins/hls-cabal-plugin/test/Main.hs @@ -6,17 +6,22 @@ module Main ( main, ) where +import CabalAdd (cabalAddTests) import Completer (completerTests) import Context (contextTests) import Control.Lens ((^.)) +import Control.Lens.Fold ((^?)) import Control.Monad (guard) import qualified Data.ByteString as BS import Data.Either (isRight) +import Data.List.Extra (nubOrdOn) +import qualified Data.Maybe as Maybe import qualified Data.Text as T -import qualified Data.Text as Text +import Definition (gotoDefinitionTests) import Ide.Plugin.Cabal.LicenseSuggest (licenseErrorSuggestion) import qualified Ide.Plugin.Cabal.Parse as Lib import qualified Language.LSP.Protocol.Lens as L +import Outline (outlineTests) import System.FilePath import Test.Hls import Utils @@ -30,6 +35,10 @@ main = do , pluginTests , completerTests , contextTests + , outlineTests + , codeActionTests + , gotoDefinitionTests + , hoverTests ] -- ------------------------------------------------------------------------ @@ -79,6 +88,7 @@ codeActionUnitTests = where maxCompletions = 100 + -- ------------------------ ------------------------------------------------ -- Integration Tests -- ------------------------------------------------------------------------ @@ -90,107 +100,162 @@ pluginTests = [ testGroup "Diagnostics" [ runCabalTestCaseSession "Publishes Diagnostics on Error" "" $ do - doc <- openDoc "invalid.cabal" "cabal" - diags <- waitForDiagnosticsFromSource doc "cabal" + _ <- openDoc "invalid.cabal" "cabal" + diags <- cabalCaptureKick unknownLicenseDiag <- liftIO $ inspectDiagnostic diags ["Unknown SPDX license identifier: 'BSD3'"] liftIO $ do length diags @?= 1 unknownLicenseDiag ^. L.range @?= Range (Position 3 24) (Position 4 0) unknownLicenseDiag ^. L.severity @?= Just DiagnosticSeverity_Error + , runCabalTestCaseSession "Publishes Diagnostics on unsupported cabal version as Warning" "" $ do + _ <- openDoc "unsupportedVersion.cabal" "cabal" + diags <- cabalCaptureKick + unknownVersionDiag <- liftIO $ inspectDiagnosticAny diags ["Unsupported cabal-version 99999.0", "Unsupported cabal format version in cabal-version field: 99999.0"] + liftIO $ do + length diags @?= 1 + unknownVersionDiag ^. L.range @?= Range (Position 0 0) (Position 1 0) + unknownVersionDiag ^. L.severity @?= Just DiagnosticSeverity_Warning , runCabalTestCaseSession "Clears diagnostics" "" $ do doc <- openDoc "invalid.cabal" "cabal" - diags <- waitForDiagnosticsFrom doc + diags <- cabalCaptureKick unknownLicenseDiag <- liftIO $ inspectDiagnostic diags ["Unknown SPDX license identifier: 'BSD3'"] liftIO $ do length diags @?= 1 unknownLicenseDiag ^. L.range @?= Range (Position 3 24) (Position 4 0) unknownLicenseDiag ^. L.severity @?= Just DiagnosticSeverity_Error _ <- applyEdit doc $ TextEdit (Range (Position 3 20) (Position 4 0)) "BSD-3-Clause\n" - newDiags <- waitForDiagnosticsFrom doc + newDiags <- cabalCaptureKick liftIO $ newDiags @?= [] , runCabalTestCaseSession "No Diagnostics in .hs files from valid .cabal file" "simple-cabal" $ do hsDoc <- openDoc "A.hs" "haskell" expectNoMoreDiagnostics 1 hsDoc "typechecking" cabalDoc <- openDoc "simple-cabal.cabal" "cabal" expectNoMoreDiagnostics 1 cabalDoc "parsing" - , runCabalTestCaseSession "Diagnostics in .hs files from invalid .cabal file" "simple-cabal" $ do - hsDoc <- openDoc "A.hs" "haskell" - expectNoMoreDiagnostics 1 hsDoc "typechecking" - cabalDoc <- openDoc "simple-cabal.cabal" "cabal" - expectNoMoreDiagnostics 1 cabalDoc "parsing" - let theRange = Range (Position 3 20) (Position 3 23) - -- Invalid license - changeDoc - cabalDoc - [ TextDocumentContentChangeEvent $ - InL TextDocumentContentChangePartial - { _range = theRange - , _rangeLength = Nothing - , _text = "MIT3" - } - ] - cabalDiags <- waitForDiagnosticsFrom cabalDoc - unknownLicenseDiag <- liftIO $ inspectDiagnostic cabalDiags ["Unknown SPDX license identifier: 'MIT3'"] - expectNoMoreDiagnostics 1 hsDoc "typechecking" - liftIO $ do - length cabalDiags @?= 1 - unknownLicenseDiag ^. L.range @?= Range (Position 3 24) (Position 4 0) - unknownLicenseDiag ^. L.severity @?= Just DiagnosticSeverity_Error - ] - , testGroup - "Code Actions" - [ runCabalTestCaseSession "BSD-3" "" $ do - doc <- openDoc "licenseCodeAction.cabal" "cabal" - diags <- waitForDiagnosticsFromSource doc "cabal" - reduceDiag <- liftIO $ inspectDiagnostic diags ["Unknown SPDX license identifier: 'BSD3'"] - liftIO $ do - length diags @?= 1 - reduceDiag ^. L.range @?= Range (Position 3 24) (Position 4 0) - reduceDiag ^. L.severity @?= Just DiagnosticSeverity_Error - [codeAction] <- getLicenseAction "BSD-3-Clause" <$> getCodeActions doc (Range (Position 3 24) (Position 4 0)) - executeCodeAction codeAction - contents <- documentContents doc - liftIO $ - contents - @?= Text.unlines - [ "cabal-version: 3.0" - , "name: licenseCodeAction" - , "version: 0.1.0.0" - , "license: BSD-3-Clause" - , "" - , "library" - , " build-depends: base" - , " default-language: Haskell2010" - ] - , runCabalTestCaseSession "Apache-2.0" "" $ do - doc <- openDoc "licenseCodeAction2.cabal" "cabal" - diags <- waitForDiagnosticsFromSource doc "cabal" - -- test if it supports typos in license name, here 'apahe' - reduceDiag <- liftIO $ inspectDiagnostic diags ["Unknown SPDX license identifier: 'APAHE'"] - liftIO $ do - length diags @?= 1 - reduceDiag ^. L.range @?= Range (Position 3 25) (Position 4 0) - reduceDiag ^. L.severity @?= Just DiagnosticSeverity_Error - [codeAction] <- getLicenseAction "Apache-2.0" <$> getCodeActions doc (Range (Position 3 24) (Position 4 0)) - executeCodeAction codeAction - contents <- documentContents doc - liftIO $ - contents - @?= Text.unlines - [ "cabal-version: 3.0" - , "name: licenseCodeAction2" - , "version: 0.1.0.0" - , "license: Apache-2.0" - , "" - , "library" - , " build-depends: base" - , " default-language: Haskell2010" - ] ] ] +-- ---------------------------------------------------------------------------- +-- Code Action Tests +-- ---------------------------------------------------------------------------- + +codeActionTests :: TestTree +codeActionTests = testGroup "Code Actions" + [ runCabalTestCaseSession "BSD-3" "" $ do + doc <- openDoc "licenseCodeAction.cabal" "cabal" + diags <- waitForDiagnosticsFromSource doc "cabal" + reduceDiag <- liftIO $ inspectDiagnostic diags ["Unknown SPDX license identifier: 'BSD3'"] + liftIO $ do + length diags @?= 1 + reduceDiag ^. L.range @?= Range (Position 3 24) (Position 4 0) + reduceDiag ^. L.severity @?= Just DiagnosticSeverity_Error + [codeAction] <- getLicenseAction "BSD-3-Clause" <$> getCodeActions doc (Range (Position 3 24) (Position 4 0)) + executeCodeAction codeAction + contents <- documentContents doc + liftIO $ + contents + @?= T.unlines + [ "cabal-version: 3.0" + , "name: licenseCodeAction" + , "version: 0.1.0.0" + , "license: BSD-3-Clause" + , "" + , "library" + , " build-depends: base" + , " default-language: Haskell2010" + ] + , runCabalTestCaseSession "Apache-2.0" "" $ do + doc <- openDoc "licenseCodeAction2.cabal" "cabal" + diags <- waitForDiagnosticsFromSource doc "cabal" + -- test if it supports typos in license name, here 'apahe' + reduceDiag <- liftIO $ inspectDiagnostic diags ["Unknown SPDX license identifier: 'APAHE'"] + liftIO $ do + length diags @?= 1 + reduceDiag ^. L.range @?= Range (Position 3 25) (Position 4 0) + reduceDiag ^. L.severity @?= Just DiagnosticSeverity_Error + [codeAction] <- getLicenseAction "Apache-2.0" <$> getCodeActions doc (Range (Position 3 24) (Position 4 0)) + executeCodeAction codeAction + contents <- documentContents doc + liftIO $ + contents + @?= T.unlines + [ "cabal-version: 3.0" + , "name: licenseCodeAction2" + , "version: 0.1.0.0" + , "license: Apache-2.0" + , "" + , "library" + , " build-depends: base" + , " default-language: Haskell2010" + ] + , runCabalGoldenSession "Code Actions - Can fix field names" "code-actions" "FieldSuggestions" $ \doc -> do + _ <- waitForDiagnosticsFrom doc + cas <- Maybe.mapMaybe (^? _R) <$> getAllCodeActions doc + -- Filter out the code actions we want to invoke. + -- We only want to invoke Code Actions with certain titles, and + -- we want to invoke them only once, not once for each cursor request. + -- 'getAllCodeActions' iterates over each cursor position and requests code actions. + let selectedCas = nubOrdOn (^. L.title) $ filter + (\ca -> (ca ^. L.title) `elem` + [ "Replace with license" + , "Replace with build-type" + , "Replace with extra-doc-files" + , "Replace with ghc-options" + , "Replace with location" + , "Replace with default-language" + , "Replace with import" + , "Replace with build-depends" + , "Replace with main-is" + , "Replace with hs-source-dirs" + ]) cas + mapM_ executeCodeAction selectedCas + pure () + , cabalAddTests + ] where getLicenseAction :: T.Text -> [Command |? CodeAction] -> [CodeAction] getLicenseAction license codeActions = do InR action@CodeAction{_title} <- codeActions guard (_title == "Replace with " <> license) pure action + +-- ---------------------------------------------------------------------------- +-- Hover Tests +-- ---------------------------------------------------------------------------- + +hoverTests :: TestTree +hoverTests = testGroup "Hover" + [ hoverOnDependencyTests + ] + +hoverOnDependencyTests :: TestTree +hoverOnDependencyTests = testGroup "Hover Dependency" + [ hoverContainsTest "base with separated version" "hover-deps.cabal" (Position 6 25) "[Documentation](https://hackage.haskell.org/package/base)" + , hoverContainsTest "aeson with not separated version " "hover-deps.cabal" (Position 7 25) "[Documentation](https://hackage.haskell.org/package/aeson)" + , hoverContainsTest "lens no version" "hover-deps.cabal" (Position 7 42) "[Documentation](https://hackage.haskell.org/package/lens)" + + , hoverIsNullTest "name has no documentation" "hover-deps.cabal" (Position 1 25) + , hoverIsNullTest "exposed-modules has no documentation" "hover-deps.cabal" (Position 5 25) + , hoverIsNullTest "hs-source-dirs has no documentation" "hover-deps.cabal" (Position 8 25) + ] + where + hoverContainsTest :: TestName -> FilePath -> Position -> T.Text -> TestTree + hoverContainsTest testName cabalFile pos containedText = + runCabalTestCaseSession testName "hover" $ do + doc <- openDoc cabalFile "cabal" + h <- getHover doc pos + case h of + Nothing -> liftIO $ assertFailure "No hover" + Just (Hover contents _) -> case contents of + InL (MarkupContent _ txt) -> do + liftIO + $ assertBool ("Failed to find `" <> T.unpack containedText <> "` in hover message: " <> T.unpack txt) + $ containedText `T.isInfixOf` txt + _ -> liftIO $ assertFailure "Unexpected content type" + closeDoc doc + + hoverIsNullTest :: TestName -> FilePath -> Position -> TestTree + hoverIsNullTest testName cabalFile pos = + runCabalTestCaseSession testName "hover" $ do + doc <- openDoc cabalFile "cabal" + h <- getHover doc pos + liftIO $ assertBool ("Found hover `" <> show h <> "`") $ Maybe.isNothing h + closeDoc doc diff --git a/plugins/hls-cabal-plugin/test/Outline.hs b/plugins/hls-cabal-plugin/test/Outline.hs new file mode 100644 index 0000000000..cb7279e387 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/Outline.hs @@ -0,0 +1,103 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Outline ( + outlineTests, +) where + +import Language.LSP.Protocol.Types (DocumentSymbol (..), + Position (..), Range (..)) +import qualified Test.Hls as T +import Utils + +testSymbols :: (T.HasCallStack) => T.TestName -> FilePath -> [DocumentSymbol] -> T.TestTree +testSymbols testName path expectedSymbols = + runCabalTestCaseSession testName "outline-cabal" $ do + docId <- T.openDoc path "cabal" + symbols <- T.getDocumentSymbols docId + T.liftIO $ symbols T.@?= Right expectedSymbols + +outlineTests :: T.TestTree +outlineTests = + T.testGroup + "Cabal Outline Tests" + [ testSymbols + "cabal Field outline test" + "field.cabal" + [fieldDocumentSymbol] + , testSymbols + "cabal FieldLine outline test" + "fieldline.cabal" + [fieldLineDocumentSymbol] + , testSymbols + "cabal Section outline test" + "section.cabal" + [sectionDocumentSymbol] + , testSymbols + "cabal SectionArg outline test" + "sectionarg.cabal" + [sectionArgDocumentSymbol] + ] + where + fieldDocumentSymbol :: DocumentSymbol + fieldDocumentSymbol = + ( defDocumentSymbol + ( Range { _start = Position{_line = 0, _character = 0} + , _end = Position{_line = 0, _character = 8} }) + ) + { _name = "homepage" + , _kind = T.SymbolKind_Field + , _children = Nothing + } + fieldLineDocumentSymbol :: DocumentSymbol + fieldLineDocumentSymbol = + ( defDocumentSymbol + ( Range { _start = Position{_line = 0, _character = 0} + , _end = Position{_line = 0, _character = 13} }) + ) + { _name = "cabal-version" + , _kind = T.SymbolKind_Field + , _children = Nothing -- the values of fieldLine are removed from the outline + } + sectionDocumentSymbol :: DocumentSymbol + sectionDocumentSymbol = + ( defDocumentSymbol + ( Range { _start = Position{_line = 0, _character = 2} + , _end = Position{_line = 0, _character = 15} }) + ) + { _name = "build-depends" + , _kind = T.SymbolKind_Field + , _children = Nothing -- the values of fieldLine are removed from the outline + } + sectionArgDocumentSymbol :: DocumentSymbol + sectionArgDocumentSymbol = + ( defDocumentSymbol + ( Range { _start = Position{_line = 0, _character = 2} + , _end = Position{_line = 0, _character = 19} }) + ) + { _name = "if os ( windows )" + , _kind = T.SymbolKind_Object + , _children = Just $ [sectionArgChildrenDocumentSymbol] + } + sectionArgChildrenDocumentSymbol :: DocumentSymbol + sectionArgChildrenDocumentSymbol = + ( defDocumentSymbol + ( Range { _start = Position{_line = 1, _character = 4} + , _end = Position{_line = 1, _character = 17} }) + ) + { _name = "build-depends" + , _kind = T.SymbolKind_Field + , _children = Nothing + } + +defDocumentSymbol :: Range -> DocumentSymbol +defDocumentSymbol range = + DocumentSymbol + { _detail = Nothing + , _deprecated = Nothing + , _name = "" + , _kind = T.SymbolKind_File + , _range = range + , _selectionRange = range + , _children = Nothing + , _tags = Nothing + } diff --git a/plugins/hls-cabal-plugin/test/Utils.hs b/plugins/hls-cabal-plugin/test/Utils.hs index cd83ba623e..2733f94fd0 100644 --- a/plugins/hls-cabal-plugin/test/Utils.hs +++ b/plugins/hls-cabal-plugin/test/Utils.hs @@ -1,19 +1,27 @@ +{-# LANGUAGE DataKinds #-} {-# LANGUAGE DisambiguateRecordFields #-} {-# LANGUAGE OverloadedStrings #-} module Utils where +import Control.Monad (guard) import Data.List (sort) +import Data.Proxy (Proxy (Proxy)) import qualified Data.Text as T -import Ide.Plugin.Cabal (descriptor) +import Ide.Plugin.Cabal (descriptor, + haskellInteractionDescriptor) import qualified Ide.Plugin.Cabal import Ide.Plugin.Cabal.Completion.Types import System.FilePath import Test.Hls + cabalPlugin :: PluginTestDescriptor Ide.Plugin.Cabal.Log cabalPlugin = mkPluginTestDescriptor descriptor "cabal" +cabalHaskellPlugin :: PluginTestDescriptor Ide.Plugin.Cabal.Log +cabalHaskellPlugin = mkPluginTestDescriptor haskellInteractionDescriptor "cabal-haskell" + simpleCabalPrefixInfoFromPos :: Position -> T.Text -> CabalPrefixInfo simpleCabalPrefixInfoFromPos pos prefix = CabalPrefixInfo @@ -42,13 +50,35 @@ filePathComplTestDir = addTrailingPathSeparator $ testDataDir "filepath-comp runCabalTestCaseSession :: TestName -> FilePath -> Session () -> TestTree runCabalTestCaseSession title subdir = testCase title . runCabalSession subdir +runHaskellTestCaseSession :: TestName -> FilePath -> Session () -> TestTree +runHaskellTestCaseSession title subdir = testCase title . runHaskellAndCabalSession subdir + runCabalSession :: FilePath -> Session a -> IO a runCabalSession subdir = failIfSessionTimeout . runSessionWithServer def cabalPlugin (testDataDir subdir) +runHaskellAndCabalSession :: FilePath -> Session a -> IO a +runHaskellAndCabalSession subdir = + failIfSessionTimeout . runSessionWithServer def (cabalPlugin <> cabalHaskellPlugin) (testDataDir subdir) + +runCabalGoldenSession :: TestName -> FilePath -> FilePath -> (TextDocumentIdentifier -> Session ()) -> TestTree +runCabalGoldenSession title subdir fp act = goldenWithCabalDoc def cabalPlugin title testDataDir (subdir fp) "golden" "cabal" act + testDataDir :: FilePath testDataDir = "plugins" "hls-cabal-plugin" "test" "testdata" +-- | these functions are used to detect cabal kicks +-- and look at diagnostics for cabal files +-- kicks are run everytime there is a shake session run/restart +cabalKickDone :: Session () +cabalKickDone = kick (Proxy @"kick/done/cabal") >>= guard . not . null + +cabalKickStart :: Session () +cabalKickStart = kick (Proxy @"kick/start/cabal") >>= guard . not . null + +cabalCaptureKick :: Session [Diagnostic] +cabalCaptureKick = captureKickDiagnostics cabalKickStart cabalKickDone + -- | list comparison where the order in the list is irrelevant (@?==) :: (HasCallStack, Ord a, Show a) => [a] -> [a] -> Assertion (@?==) l1 l2 = sort l1 @?= sort l2 diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-bench/bench/Main.hs b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-bench/bench/Main.hs new file mode 100644 index 0000000000..e5c42398f2 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-bench/bench/Main.hs @@ -0,0 +1,6 @@ +module Main (main) where + +import Data.List.Split + +main :: IO () +main = putStrLn "Test suite not yet implemented." diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-bench/cabal-add-bench.cabal b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-bench/cabal-add-bench.cabal new file mode 100644 index 0000000000..b58a6d3302 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-bench/cabal-add-bench.cabal @@ -0,0 +1,17 @@ +cabal-version: 2.4 +name: cabal-add-bench +version: 0.1.0.0 +license: NONE +author: George Gerasev +maintainer: george30032002@gmail.com +build-type: Simple + +common warnings + ghc-options: -Wall + +benchmark benchmark + type: exitcode-stdio-1.0 + ghc-options: -threaded + main-is: Main.hs + hs-source-dirs: bench + build-depends: base diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-exe/cabal-add-exe.cabal b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-exe/cabal-add-exe.cabal new file mode 100644 index 0000000000..a3499bbf97 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-exe/cabal-add-exe.cabal @@ -0,0 +1,15 @@ +cabal-version: 2.4 +name: cabal-add-exe +version: 0.1.0.0 +build-type: Simple + +executable cabal-add-exe + main-is: Main.hs + hs-source-dirs: src + ghc-options: -Wall + build-depends: base + default-language: Haskell2010 + +library + build-depends: base >= 4 && < 5 + ghc-options: -Wall diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-exe/src/Main.hs b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-exe/src/Main.hs new file mode 100644 index 0000000000..0bf3e99dae --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-exe/src/Main.hs @@ -0,0 +1,5 @@ +module Main where + +import Data.List.Split + +main = putStrLn "Hello, Haskell!" \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-lib/cabal-add-lib.cabal b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-lib/cabal-add-lib.cabal new file mode 100644 index 0000000000..b00b45bb6b --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-lib/cabal-add-lib.cabal @@ -0,0 +1,17 @@ +cabal-version: 2.4 +name: cabal-add-lib +version: 0.1.0.0 +license: NONE +author: George Gerasev +maintainer: george30032002@gmail.com +build-type: Simple + +common warnings + ghc-options: -Wall + +library + import: warnings + exposed-modules: MyLib + build-depends: base + hs-source-dirs: src + default-language: Haskell2010 diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-lib/src/MyLib.hs b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-lib/src/MyLib.hs new file mode 100644 index 0000000000..bbb506d001 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-lib/src/MyLib.hs @@ -0,0 +1,6 @@ +module MyLib (someFunc) where + +import Data.List.Split + +someFunc :: IO () +someFunc = putStrLn "someFunc" diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/bench/Main.hs b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/bench/Main.hs new file mode 100644 index 0000000000..e5c42398f2 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/bench/Main.hs @@ -0,0 +1,6 @@ +module Main (main) where + +import Data.List.Split + +main :: IO () +main = putStrLn "Test suite not yet implemented." diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/cabal-add-multitarget.cabal b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/cabal-add-multitarget.cabal new file mode 100644 index 0000000000..677986768e --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/cabal-add-multitarget.cabal @@ -0,0 +1,33 @@ +cabal-version: 2.4 +name: cabal-add-multitarget +version: 0.1.0.0 +build-type: Simple + +executable cabal-add-exe + main-is: Main.hs + hs-source-dirs: src + ghc-options: -Wall + build-depends: base + default-language: Haskell2010 + +library + exposed-modules: MyLib + other-modules: InternalLib + build-depends: base >= 4 && < 5 + hs-source-dirs: lib + ghc-options: -Wall + +test-suite cabal-add-tests-test + main-is: Main.hs + hs-source-dirs: test + type: exitcode-stdio-1.0 + build-depends: base + default-language: Haskell2010 + +benchmark benchmark + main-is: Main.hs + build-depends: base + hs-source-dirs: bench + type: exitcode-stdio-1.0 + ghc-options: -threaded + diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/lib/InternalLib.hs b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/lib/InternalLib.hs new file mode 100644 index 0000000000..5a3dd79258 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/lib/InternalLib.hs @@ -0,0 +1,6 @@ +module InternalLib (internalFunc) where + +import Data.List.Split + +internalFunc :: IO () +internalFunc = putStrLn "internalFunc" diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/lib/MyLib.hs b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/lib/MyLib.hs new file mode 100644 index 0000000000..bbb506d001 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/lib/MyLib.hs @@ -0,0 +1,6 @@ +module MyLib (someFunc) where + +import Data.List.Split + +someFunc :: IO () +someFunc = putStrLn "someFunc" diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/src/Main.hs b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/src/Main.hs new file mode 100644 index 0000000000..0bf3e99dae --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/src/Main.hs @@ -0,0 +1,5 @@ +module Main where + +import Data.List.Split + +main = putStrLn "Hello, Haskell!" \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/test/Main.hs b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/test/Main.hs new file mode 100644 index 0000000000..e5c42398f2 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/test/Main.hs @@ -0,0 +1,6 @@ +module Main (main) where + +import Data.List.Split + +main :: IO () +main = putStrLn "Test suite not yet implemented." diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-packageYaml/cabal-add-packageYaml.cabal b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-packageYaml/cabal-add-packageYaml.cabal new file mode 100644 index 0000000000..3ac549aa60 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-packageYaml/cabal-add-packageYaml.cabal @@ -0,0 +1,17 @@ +cabal-version: 2.4 +name: cabal-add-packageYaml +version: 0.1.0.0 +license: NONE +author: George Gerasev +maintainer: george30032002@gmail.com +build-type: Simple + +common warnings + ghc-options: -Wall + +benchmark benchmark-packageYaml + type: exitcode-stdio-1.0 + ghc-options: -threaded + main-is: Main.hs + hs-source-dirs: bench + build-depends: base diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-packageYaml/package.yaml b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-packageYaml/package.yaml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-packageYaml/src/Main.hs b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-packageYaml/src/Main.hs new file mode 100644 index 0000000000..e5c42398f2 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-packageYaml/src/Main.hs @@ -0,0 +1,6 @@ +module Main (main) where + +import Data.List.Split + +main :: IO () +main = putStrLn "Test suite not yet implemented." diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-tests/cabal-add-tests.cabal b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-tests/cabal-add-tests.cabal new file mode 100644 index 0000000000..9adc498231 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-tests/cabal-add-tests.cabal @@ -0,0 +1,26 @@ +cabal-version: 2.4 +name: cabal-add-tests +version: 0.1.0.0 +license: NONE +author: George Gerasev +maintainer: george30032002@gmail.com +build-type: Simple + +common warnings + ghc-options: -Wall + +test-suite cabal-add-tests-test + import: warnings + default-language: Haskell2010 + type: exitcode-stdio-1.0 + hs-source-dirs: test + main-is: Main.hs + build-depends: base + +test-suite cabal-add-tests-test-package-imports + import: warnings + default-language: Haskell2010 + type: exitcode-stdio-1.0 + hs-source-dirs: test + main-is: MainPackageImports.hs + build-depends: base diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-tests/test/Main.hs b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-tests/test/Main.hs new file mode 100644 index 0000000000..e5c42398f2 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-tests/test/Main.hs @@ -0,0 +1,6 @@ +module Main (main) where + +import Data.List.Split + +main :: IO () +main = putStrLn "Test suite not yet implemented." diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-tests/test/MainPackageImports.hs b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-tests/test/MainPackageImports.hs new file mode 100644 index 0000000000..753dd165dd --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-tests/test/MainPackageImports.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE PackageImports #-} + +module Main (main) where + +import "split" Data.List.Split + +main :: IO () +main = putStrLn "Test suite not yet implemented." diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal.project b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal.project new file mode 100644 index 0000000000..21eb1f63eb --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal.project @@ -0,0 +1,6 @@ +packages: cabal-add-exe + cabal-add-lib + cabal-add-tests + cabal-add-bench + cabal-add-multitarget + cabal-add-packageYaml diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/hie.yaml b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/hie.yaml new file mode 100644 index 0000000000..f0c7014d7f --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/hie.yaml @@ -0,0 +1,2 @@ +cradle: + cabal: \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/code-actions/FieldSuggestions.cabal b/plugins/hls-cabal-plugin/test/testdata/code-actions/FieldSuggestions.cabal new file mode 100644 index 0000000000..e32f77b614 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/code-actions/FieldSuggestions.cabal @@ -0,0 +1,36 @@ +cabal-version: 3.0 +name: FieldSuggestions +version: 0.1.0 +licens: BSD-3-Clause + +buil-type: Simple + +extra-doc-fils: + ChangeLog + +-- Default warnings in HLS +common warnings + ghc-option: -Wall + -Wredundant-constraints + -Wunused-packages + -Wno-name-shadowing + -Wno-unticked-promoted-constructors + +source-repository head + type: git + loc: fake + +library + default-lang: Haskell2010 + -- Import isn't supported right now. + impor: warnings + build-dep: base + +executable my-exe + mains: Main.hs + +test-suite Test + type: exitcode-stdio-1.0 + main-is: Test.hs + hs-source-drs: + diff --git a/plugins/hls-cabal-plugin/test/testdata/code-actions/FieldSuggestions.golden.cabal b/plugins/hls-cabal-plugin/test/testdata/code-actions/FieldSuggestions.golden.cabal new file mode 100644 index 0000000000..99bf84dfd7 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/code-actions/FieldSuggestions.golden.cabal @@ -0,0 +1,36 @@ +cabal-version: 3.0 +name: FieldSuggestions +version: 0.1.0 +license: BSD-3-Clause + +build-type: Simple + +extra-doc-files: + ChangeLog + +-- Default warnings in HLS +common warnings + ghc-options: -Wall + -Wredundant-constraints + -Wunused-packages + -Wno-name-shadowing + -Wno-unticked-promoted-constructors + +source-repository head + type: git + location: fake + +library + default-language: Haskell2010 + -- Import isn't supported right now. + import: warnings + build-depends: base + +executable my-exe + main-is: Main.hs + +test-suite Test + type: exitcode-stdio-1.0 + main-is: Test.hs + hs-source-dirs: + diff --git a/plugins/hls-cabal-plugin/test/testdata/completion/autogen-completion.cabal b/plugins/hls-cabal-plugin/test/testdata/completion/autogen-completion.cabal new file mode 100644 index 0000000000..dd5c86d339 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/completion/autogen-completion.cabal @@ -0,0 +1,25 @@ +cabal-version: 3.0 +name: autogen-completion +version: 0.1.0.0 + +library + hs-source-dirs: src + autogen- + +executable autoexe + main-is: Main.hs + hs-source-dirs: src + autogen- + +test-suite autotest + type: exitcode-stdio-1.0 + hs-source-dirs: src + autogen- + +benchmark autobench + type: exitcode-stdio-1.0 + hs-source-dirs: src + autogen- + +common defaults + autogen- diff --git a/plugins/hls-cabal-plugin/test/testdata/goto-definition/common-section/simple-with-common.cabal b/plugins/hls-cabal-plugin/test/testdata/goto-definition/common-section/simple-with-common.cabal new file mode 100644 index 0000000000..c71e369b30 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/goto-definition/common-section/simple-with-common.cabal @@ -0,0 +1,62 @@ +cabal-version: 3.0 +name: simple-cabal +version: 0.1.0.0 +license: MIT + +-- Range : (6, 0) - (7, 22) +common warnings-0 + ghc-options: -Wall + +-- Range : (10, 0) - (17, 40) +common warnings-1 + ghc-options: -Wall + -Wredundant-constraints + -Wunused-packages + + -Wno-name-shadowing + + -Wno-unticked-promoted-constructors + +-- Range : (20, 0) - (23, 34) +common warnings-2 + ghc-options: -Wall + -Wredundant-constraints + -Wunused-packages + +library + + import: warnings-0 +-- ^ Position: (27, 16), middle of identifier + + import: warnings-1 +-- ^ Position: (30, 12), left of identifier + + import: warnings-2 +-- ^ Position: (33, 22), right of identifier + + import: warnings-0 +-- ^ Position: (36, 20), left of '-' in identifier + + import: warnings-1 +-- ^ Position: (39, 19), right of "-" in identifier + + import: warnings-2,warnings-1,warnings-0 +-- ^ Position: (42, 16), identifier in identifier list + + import: warnings-2,warnings-1,warnings-0 +-- ^ Position: (45, 33), left of ',' right of identifier + + import: warnings-2,warnings-1,warnings-0 +-- ^ Position: (48, 34), right of ',' left of identifier + + import: warnings-2, warnings-1,warnings-0 +-- ^ Position: (51, 37), right of ',' left of space + + import: warnings-0 +-- ^ Position: (54, 11), right of ':' left of space + + import: warnings-0 +-- ^ Position: (57, 8), not a definition + + -- EOL +-- ^ Position: (59, 7), empty space \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/module-examples.cabal b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/module-examples.cabal new file mode 100644 index 0000000000..24c2bb854e --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/module-examples.cabal @@ -0,0 +1,51 @@ +cabal-version: 3.0 +name: module-examples +version: 0.1.0.0 + + +library + exposed-modules: Library.Lib +-- ^ Position: (6, 22) +-- ^ Position: (6, 33) + other-modules: Library.Other.OtherLib +-- ^ Position: (9, 22) +-- ^ Position: (9, 44) + + build-depends: base + hs-source-dirs: src + default-language: Haskell2010 + +executable exec + hs-source-dirs: src/exe + main-is: Main.hs + build-depends: base + other-modules: + Config +-- ^ Position: (22, 8) +-- ^ Position: (22, 14) + +test-suite module-examples-test + type: exitcode-stdio-1.0 + hs-source-dirs: src/test + main-is: Main.hs + other-modules: + Config +-- ^ Position: (31, 8) +-- ^ Position: (31, 14) + Library +-- ^ Position: (34, 8) +-- ^ Position: (34, 15) + build-depends: base + +benchmark benchmark + type: exitcode-stdio-1.0 + main-is: Main.hs + hs-source-dirs: src/bench + build-depends: base + other-modules: + Config +-- ^ Position: (45, 28) +-- ^ Position: (45, 34) + NotExistent +-- ^ Position: (48, 19) +-- ^ Position: (48, 30) diff --git a/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/Library/Lib.hs b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/Library/Lib.hs new file mode 100644 index 0000000000..e2cde3780b --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/Library/Lib.hs @@ -0,0 +1 @@ +module Library.Lib where diff --git a/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/Library/Other/OtherLib.hs b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/Library/Other/OtherLib.hs new file mode 100644 index 0000000000..625be777dc --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/Library/Other/OtherLib.hs @@ -0,0 +1 @@ +module Library.Other.OtherLib where diff --git a/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/bench/Config.hs b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/bench/Config.hs new file mode 100644 index 0000000000..6ea268c214 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/bench/Config.hs @@ -0,0 +1 @@ +module Config where diff --git a/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/bench/Main.hs b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/bench/Main.hs new file mode 100644 index 0000000000..6640b61707 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/bench/Main.hs @@ -0,0 +1,3 @@ +module Main where + +main = undefined \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/exe/Config.hs b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/exe/Config.hs new file mode 100644 index 0000000000..3a2489708e --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/exe/Config.hs @@ -0,0 +1 @@ +module Confing where \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/exe/Main.hs b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/exe/Main.hs new file mode 100644 index 0000000000..6640b61707 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/exe/Main.hs @@ -0,0 +1,3 @@ +module Main where + +main = undefined \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/test/Config.hs b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/test/Config.hs new file mode 100644 index 0000000000..39e39fc16a --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/test/Config.hs @@ -0,0 +1 @@ +module Config where \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/test/Library.hs b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/test/Library.hs new file mode 100644 index 0000000000..7899749de8 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/test/Library.hs @@ -0,0 +1 @@ +module Library where \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/test/Main.hs b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/test/Main.hs new file mode 100644 index 0000000000..e69de29bb2 diff --git a/plugins/hls-cabal-plugin/test/testdata/goto-definition/simple-with-common.cabal b/plugins/hls-cabal-plugin/test/testdata/goto-definition/simple-with-common.cabal new file mode 100644 index 0000000000..c71e369b30 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/goto-definition/simple-with-common.cabal @@ -0,0 +1,62 @@ +cabal-version: 3.0 +name: simple-cabal +version: 0.1.0.0 +license: MIT + +-- Range : (6, 0) - (7, 22) +common warnings-0 + ghc-options: -Wall + +-- Range : (10, 0) - (17, 40) +common warnings-1 + ghc-options: -Wall + -Wredundant-constraints + -Wunused-packages + + -Wno-name-shadowing + + -Wno-unticked-promoted-constructors + +-- Range : (20, 0) - (23, 34) +common warnings-2 + ghc-options: -Wall + -Wredundant-constraints + -Wunused-packages + +library + + import: warnings-0 +-- ^ Position: (27, 16), middle of identifier + + import: warnings-1 +-- ^ Position: (30, 12), left of identifier + + import: warnings-2 +-- ^ Position: (33, 22), right of identifier + + import: warnings-0 +-- ^ Position: (36, 20), left of '-' in identifier + + import: warnings-1 +-- ^ Position: (39, 19), right of "-" in identifier + + import: warnings-2,warnings-1,warnings-0 +-- ^ Position: (42, 16), identifier in identifier list + + import: warnings-2,warnings-1,warnings-0 +-- ^ Position: (45, 33), left of ',' right of identifier + + import: warnings-2,warnings-1,warnings-0 +-- ^ Position: (48, 34), right of ',' left of identifier + + import: warnings-2, warnings-1,warnings-0 +-- ^ Position: (51, 37), right of ',' left of space + + import: warnings-0 +-- ^ Position: (54, 11), right of ':' left of space + + import: warnings-0 +-- ^ Position: (57, 8), not a definition + + -- EOL +-- ^ Position: (59, 7), empty space \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/hie.yaml b/plugins/hls-cabal-plugin/test/testdata/hie.yaml new file mode 100644 index 0000000000..95d800026a --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/hie.yaml @@ -0,0 +1,3 @@ +cradle: + direct: + arguments: [] \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/hover/hover-deps.cabal b/plugins/hls-cabal-plugin/test/testdata/hover/hover-deps.cabal new file mode 100644 index 0000000000..ddc4a6107a --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/hover/hover-deps.cabal @@ -0,0 +1,10 @@ +cabal-version: 3.0 +name: hover-deps +version: 0.1.0.0 + +library + exposed-modules: Module + build-depends: base ^>=4.14.3.0 + , aeson==1.0.0.0 , lens + hs-source-dirs: src + default-language: Haskell2010 diff --git a/plugins/hls-cabal-plugin/test/testdata/outline-cabal/field.cabal b/plugins/hls-cabal-plugin/test/testdata/outline-cabal/field.cabal new file mode 100644 index 0000000000..c3e3d80df2 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/outline-cabal/field.cabal @@ -0,0 +1 @@ +homepage: \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/outline-cabal/fieldline.cabal b/plugins/hls-cabal-plugin/test/testdata/outline-cabal/fieldline.cabal new file mode 100644 index 0000000000..998369e5f1 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/outline-cabal/fieldline.cabal @@ -0,0 +1 @@ +cabal-version: 3.0 diff --git a/plugins/hls-cabal-plugin/test/testdata/outline-cabal/section.cabal b/plugins/hls-cabal-plugin/test/testdata/outline-cabal/section.cabal new file mode 100644 index 0000000000..8a140c7517 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/outline-cabal/section.cabal @@ -0,0 +1,2 @@ + build-depends: + base >=4.16 && <5 \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/outline-cabal/sectionarg.cabal b/plugins/hls-cabal-plugin/test/testdata/outline-cabal/sectionarg.cabal new file mode 100644 index 0000000000..060d067377 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/outline-cabal/sectionarg.cabal @@ -0,0 +1,2 @@ + if os(windows) + build-depends: Win32 \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/unsupportedVersion.cabal b/plugins/hls-cabal-plugin/test/testdata/unsupportedVersion.cabal new file mode 100644 index 0000000000..328d373cd8 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/unsupportedVersion.cabal @@ -0,0 +1,3 @@ +cabal-version: 99999.0 +name: invalid +version: 0.1.0.0 \ No newline at end of file diff --git a/plugins/hls-call-hierarchy-plugin/test/Main.hs b/plugins/hls-call-hierarchy-plugin/test/Main.hs index f356a0e278..31dad633e6 100644 --- a/plugins/hls-call-hierarchy-plugin/test/Main.hs +++ b/plugins/hls-call-hierarchy-plugin/test/Main.hs @@ -114,13 +114,13 @@ prepareCallHierarchyTests = [ testCase "1" $ do let contents = T.unlines ["{-# LANGUAGE TypeFamilies #-}", "data family A"] -- Since GHC 9.10 the range also includes the family name (and its parameters if any) - range = mkRange 1 0 1 (if ghcVersion == GHC910 then 13 else 11) + range = mkRange 1 0 1 (if ghcVersion >= GHC910 then 13 else 11) selRange = mkRange 1 12 1 13 expected = mkCallHierarchyItemT "A" SymbolKind_Function range selRange oneCaseWithCreate contents 1 12 expected , testCase "2" $ do let contents = T.unlines [ "{-# LANGUAGE TypeFamilies #-}" , "data family A a"] - range = mkRange 1 0 1 (if ghcVersion == GHC910 then 15 else 11) + range = mkRange 1 0 1 (if ghcVersion >= GHC910 then 15 else 11) selRange = mkRange 1 12 1 13 expected = mkCallHierarchyItemT "A" SymbolKind_Function range selRange oneCaseWithCreate contents 1 12 expected diff --git a/plugins/hls-change-type-signature-plugin/test/Main.hs b/plugins/hls-change-type-signature-plugin/test/Main.hs index d34e19ea4f..57541b4736 100644 --- a/plugins/hls-change-type-signature-plugin/test/Main.hs +++ b/plugins/hls-change-type-signature-plugin/test/Main.hs @@ -39,7 +39,7 @@ test :: TestTree test = testGroup "changeTypeSignature" [ testRegexes , codeActionTest "TExpectedActual" 4 11 - , knownBrokenForGhcVersions [GHC92 .. GHC910] "Error Message in 9.2+ does not provide enough info" $ + , knownBrokenForGhcVersions [GHC94 .. GHC912] "Error Message in 9.2+ does not provide enough info" $ codeActionTest "TRigidType" 4 14 , codeActionTest "TRigidType2" 4 6 , codeActionTest "TLocalBinding" 7 22 diff --git a/plugins/hls-class-plugin/src/Ide/Plugin/Class/CodeAction.hs b/plugins/hls-class-plugin/src/Ide/Plugin/Class/CodeAction.hs index fa2a1dd46c..5ff79e2e37 100644 --- a/plugins/hls-class-plugin/src/Ide/Plugin/Class/CodeAction.hs +++ b/plugins/hls-class-plugin/src/Ide/Plugin/Class/CodeAction.hs @@ -24,6 +24,7 @@ import qualified Data.Set as Set import qualified Data.Text as T import Development.IDE import Development.IDE.Core.Compile (sourceTypecheck) +import Development.IDE.Core.FileStore (getVersionedTextDoc) import Development.IDE.Core.PluginUtils import Development.IDE.Core.PositionMapping (fromCurrentRange) import Development.IDE.GHC.Compat @@ -80,7 +81,7 @@ addMethodPlaceholders _ state _ param@AddMinimalMethodsParams{..} = do -- sensitive to the format of diagnostic messages from GHC. codeAction :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState Method_TextDocumentCodeAction codeAction recorder state plId (CodeActionParams _ _ docId _ context) = do - verTxtDocId <- lift $ pluginGetVersionedTextDoc docId + verTxtDocId <- liftIO $ runAction "classplugin.codeAction.getVersionedTextDoc" state $ getVersionedTextDoc docId nfp <- getNormalizedFilePathE (verTxtDocId ^. L.uri) actions <- join <$> mapM (mkActions nfp verTxtDocId) methodDiags pure $ InL actions diff --git a/plugins/hls-class-plugin/src/Ide/Plugin/Class/ExactPrint.hs b/plugins/hls-class-plugin/src/Ide/Plugin/Class/ExactPrint.hs index 11afcfd1c4..71deb9c1d8 100644 --- a/plugins/hls-class-plugin/src/Ide/Plugin/Class/ExactPrint.hs +++ b/plugins/hls-class-plugin/src/Ide/Plugin/Class/ExactPrint.hs @@ -30,7 +30,11 @@ makeEditText pm df AddMinimalMethodsParams{..} = do pm_parsed_source pm old = T.pack $ exactPrint ps +#if MIN_VERSION_ghc_exactprint(1,10,0) + ps' = addMethodDecls ps mDecls range withSig +#else (ps', _, _) = runTransform (addMethodDecls ps mDecls range withSig) +#endif new = T.pack $ exactPrint ps' pure (old, new) @@ -40,7 +44,9 @@ makeMethodDecl df (mName, sig) = do sig' <- eitherToMaybe $ parseDecl df (T.unpack sig) $ T.unpack sig pure (name, sig') -#if MIN_VERSION_ghc(9,5,0) +#if MIN_VERSION_ghc_exactprint(1,10,0) +addMethodDecls :: ParsedSource -> [(LHsDecl GhcPs, LHsDecl GhcPs)] -> Range -> Bool -> Located (HsModule GhcPs) +#elif MIN_VERSION_ghc(9,5,0) addMethodDecls :: ParsedSource -> [(LHsDecl GhcPs, LHsDecl GhcPs)] -> Range -> Bool -> TransformT Identity (Located (HsModule GhcPs)) #else addMethodDecls :: ParsedSource -> [(LHsDecl GhcPs, LHsDecl GhcPs)] -> Range -> Bool -> TransformT Identity (Located HsModule) @@ -50,12 +56,20 @@ addMethodDecls ps mDecls range withSig | otherwise = go (map fst mDecls) where go inserting = do +#if MIN_VERSION_ghc_exactprint(1,10,0) + let allDecls = hsDecls ps +#else allDecls <- hsDecls ps +#endif case break (inRange range . getLoc) allDecls of (before, L l inst : after) -> let instSpan = realSrcSpan $ getLoc l +#if MIN_VERSION_ghc(9,11,0) + instCol = srcSpanStartCol instSpan - 1 +#else instCol = srcSpanStartCol instSpan +#endif #if MIN_VERSION_ghc(9,9,0) instRow = srcSpanEndLine instSpan methodEpAnn = noAnnSrcSpanDP $ deltaPos 1 (instCol + defaultIndent) @@ -91,7 +105,17 @@ addMethodDecls ps mDecls range withSig addWhere :: HsDecl GhcPs -> HsDecl GhcPs addWhere instd@(InstD xInstD (ClsInstD ext decl@ClsInstDecl{..})) = case cid_ext of -#if MIN_VERSION_ghc(9,9,0) +#if MIN_VERSION_ghc(9,11,0) + (warnings, anns, key) + | EpTok _ <- acid_where anns -> instd + | otherwise -> + InstD xInstD (ClsInstD ext decl { + cid_ext = ( warnings + , anns { acid_where = EpTok d1 } + , key + ) + }) +#elif MIN_VERSION_ghc(9,9,0) (warnings, anns, key) | any (\(AddEpAnn kw _ )-> kw == AnnWhere) anns -> instd | otherwise -> diff --git a/plugins/hls-class-plugin/src/Ide/Plugin/Class/Types.hs b/plugins/hls-class-plugin/src/Ide/Plugin/Class/Types.hs index 18c9dbae26..e66632c3c6 100644 --- a/plugins/hls-class-plugin/src/Ide/Plugin/Class/Types.hs +++ b/plugins/hls-class-plugin/src/Ide/Plugin/Class/Types.hs @@ -176,7 +176,11 @@ getInstanceBindLensRule recorder = do getBindSpanWithoutSig :: ClsInstDecl GhcRn -> [BindInfo] getBindSpanWithoutSig ClsInstDecl{..} = - let bindNames = mapMaybe go (bagToList cid_binds) + let bindNames = mapMaybe go $ +#if !MIN_VERSION_ghc(9,11,0) + bagToList +#endif + cid_binds go (L l bind) = case bind of FunBind{..} -- `Generated` tagged for Template Haskell, @@ -221,5 +225,10 @@ getInstanceBindTypeSigsRule recorder = do let name = idName id whenMaybe (isBindingName name) $ do env <- tcInitTidyEnv - let (_, ty) = tidyOpenType env (idType id) +#if MIN_VERSION_ghc(9,11,0) + let ty = +#else + let (_, ty) = +#endif + tidyOpenType env (idType id) pure $ InstanceBindTypeSig name ty diff --git a/plugins/hls-class-plugin/src/Ide/Plugin/Class/Utils.hs b/plugins/hls-class-plugin/src/Ide/Plugin/Class/Utils.hs index 129251ffe5..e73344c341 100644 --- a/plugins/hls-class-plugin/src/Ide/Plugin/Class/Utils.hs +++ b/plugins/hls-class-plugin/src/Ide/Plugin/Class/Utils.hs @@ -64,7 +64,7 @@ insertPragmaIfNotPresent :: (MonadIO m) insertPragmaIfNotPresent state nfp pragma = do (hscEnv -> hsc_dflags -> sessionDynFlags, _) <- runActionE "classplugin.insertPragmaIfNotPresent.GhcSession" state $ useWithStaleE GhcSession nfp - (_, fileContents) <- liftIO $ runAction "classplugin.insertPragmaIfNotPresent.GetFileContents" state + fileContents <- liftIO $ runAction "classplugin.insertPragmaIfNotPresent.GetFileContents" state $ getFileContents nfp (pm, _) <- runActionE "classplugin.insertPragmaIfNotPresent.GetParsedModuleWithComments" state $ useWithStaleE GetParsedModuleWithComments nfp diff --git a/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/Rules.hs b/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/Rules.hs index 2c0adc9ca5..86d5923011 100644 --- a/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/Rules.hs +++ b/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/Rules.hs @@ -29,7 +29,6 @@ import Control.Monad.Trans.Maybe (MaybeT (MaybeT), maybeToExceptT) import Control.Monad.Trans.Writer.CPS import Data.Coerce (coerce) -import Data.Data (Typeable) import Data.Foldable (traverse_) import Data.Function (on, (&)) import Data.Hashable @@ -158,7 +157,7 @@ simplify r = withChildrenSimplified = r { _codeRange_children = simplify <$> _codeRange_children r } data GetCodeRange = GetCodeRange - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable GetCodeRange instance NFData GetCodeRange diff --git a/plugins/hls-eval-plugin/README.md b/plugins/hls-eval-plugin/README.md index b1a50f0705..d2b39498cb 100644 --- a/plugins/hls-eval-plugin/README.md +++ b/plugins/hls-eval-plugin/README.md @@ -40,7 +40,7 @@ A test is composed by a sequence of contiguous lines, the result of their evalua "CDAB" ``` -You execute a test by clicking on the _Evaluate_ code lens that appears above it (or _Refresh_, if the test has been run previously). +You execute a test by clicking on the _Evaluate_ code lens that appears above it (or _Refresh_, if the test has been run previously). A code action is also provided. All tests in the same comment block are executed together. @@ -334,14 +334,7 @@ prop> \(l::[Int]) -> reverse (reverse l) == l ### Multiline Expressions -``` - >>> :{ - let - x = 1 - y = 2 - in x + y + multiline - :} -``` +Multiline expressions are not supported, see https://github.com/haskell/haskell-language-server/issues/1817 # Acknowledgments diff --git a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval.hs b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval.hs index eaf97e4a58..87553bfeba 100644 --- a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval.hs +++ b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval.hs @@ -13,8 +13,8 @@ module Ide.Plugin.Eval ( import Development.IDE (IdeState) import Ide.Logger (Recorder, WithPriority) -import qualified Ide.Plugin.Eval.CodeLens as CL import Ide.Plugin.Eval.Config +import qualified Ide.Plugin.Eval.Handlers as Handlers import Ide.Plugin.Eval.Rules (rules) import qualified Ide.Plugin.Eval.Types as Eval import Ide.Types (ConfigDescriptor (..), @@ -27,9 +27,12 @@ import Language.LSP.Protocol.Message -- |Plugin descriptor descriptor :: Recorder (WithPriority Eval.Log) -> PluginId -> PluginDescriptor IdeState descriptor recorder plId = - (defaultPluginDescriptor plId "Provies a code lens to evaluate expressions in doctest comments") - { pluginHandlers = mkPluginHandler SMethod_TextDocumentCodeLens (CL.codeLens recorder) - , pluginCommands = [CL.evalCommand recorder plId] + (defaultPluginDescriptor plId "Provies code action and lens to evaluate expressions in doctest comments") + { pluginHandlers = mconcat + [ mkPluginHandler SMethod_TextDocumentCodeAction (Handlers.codeAction recorder) + , mkPluginHandler SMethod_TextDocumentCodeLens (Handlers.codeLens recorder) + ] + , pluginCommands = [Handlers.evalCommand recorder plId] , pluginRules = rules recorder , pluginConfigDescriptor = defaultConfigDescriptor { configCustomConfig = mkCustomConfig properties diff --git a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Code.hs b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Code.hs index cc22d31da8..e8b7428b10 100644 --- a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Code.hs +++ b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Code.hs @@ -1,7 +1,7 @@ {-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ViewPatterns #-} -{-# OPTIONS_GHC -Wwarn -fno-warn-orphans #-} +{-# OPTIONS_GHC -Wwarn #-} -- | Expression execution module Ide.Plugin.Eval.Code (Statement, testRanges, resultRange, propSetup, testCheck, asStatements,myExecStmt) where diff --git a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/GHC.hs b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/GHC.hs index 3d3fe5f704..f0b01fca92 100644 --- a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/GHC.hs +++ b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/GHC.hs @@ -1,6 +1,6 @@ {-# LANGUAGE CPP #-} {-# LANGUAGE LambdaCase #-} -{-# OPTIONS_GHC -fno-warn-unused-imports -Wno-orphans #-} +{-# OPTIONS_GHC -Wno-orphans #-} -- |GHC API utilities module Ide.Plugin.Eval.GHC ( @@ -25,12 +25,10 @@ import Development.IDE.GHC.Util (printOutputable) import GHC.LanguageExtensions.Type (Extension (..)) import Ide.Plugin.Eval.Util (gStrictTry) -#if MIN_VERSION_ghc(9,3,0) import GHC (setTopSessionDynFlags, setUnitDynFlags) import GHC.Driver.Env import GHC.Driver.Session (getDynFlags) -#endif {- $setup >>> import GHC @@ -174,13 +172,9 @@ vList = vcat . map text setSessionAndInteractiveDynFlags :: DynFlags -> Ghc () setSessionAndInteractiveDynFlags df = do -#if MIN_VERSION_ghc(9,3,0) _ <- setUnitDynFlags (homeUnitId_ df) df modifySession (hscUpdateLoggerFlags . hscSetActiveUnitId (homeUnitId_ df)) df' <- getDynFlags setTopSessionDynFlags df' -#else - _ <- setSessionDynFlags df -#endif sessDyns <- getSessionDynFlags setInteractiveDynFlags sessDyns diff --git a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/CodeLens.hs b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Handlers.hs similarity index 89% rename from plugins/hls-eval-plugin/src/Ide/Plugin/Eval/CodeLens.hs rename to plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Handlers.hs index 4d9ace1163..cc80e91f77 100644 --- a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/CodeLens.hs +++ b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Handlers.hs @@ -5,26 +5,26 @@ {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ViewPatterns #-} -{-# OPTIONS_GHC -fno-warn-type-defaults -Wno-unused-imports #-} +{-# OPTIONS_GHC -Wno-type-defaults #-} {- | A plugin inspired by the REPLoid feature of , 's Examples and Properties and . For a full example see the "Ide.Plugin.Eval.Tutorial" module. -} -module Ide.Plugin.Eval.CodeLens ( +module Ide.Plugin.Eval.Handlers ( + codeAction, codeLens, evalCommand, ) where import Control.Applicative (Alternative ((<|>))) -import Control.Arrow (second, (>>>)) -import Control.Exception (bracket_, try) +import Control.Arrow (second) +import Control.Exception (bracket_) import qualified Control.Exception as E -import Control.Lens (_1, _3, ix, (%~), - (<&>), (^.)) -import Control.Monad (guard, join, - void, when) +import Control.Lens (ix, (%~), (^.)) +import Control.Monad (guard, void, + when) import Control.Monad.IO.Class (MonadIO (liftIO)) import Control.Monad.Trans.Except (ExceptT (..), runExceptT) @@ -40,29 +40,23 @@ import Data.Maybe (catMaybes) import Data.String (IsString) import Data.Text (Text) import qualified Data.Text as T -import Data.Typeable (Typeable) +import qualified Data.Text.Utf16.Rope.Mixed as Rope +import Development.IDE.Core.FileStore (getUriContents) import Development.IDE.Core.Rules (IdeState, runAction) import Development.IDE.Core.RuleTypes (LinkableResult (linkableHomeMod), - NeedsCompilation (NeedsCompilation), TypeCheck (..), tmrTypechecked) -import Development.IDE.Core.Shake (shakeExtras, - useNoFile_, - useWithStale_, - use_, uses_) +import Development.IDE.Core.Shake (useNoFile_, use_, + uses_) import Development.IDE.GHC.Compat hiding (typeKind, unitState) -import Development.IDE.GHC.Compat.Util (GhcException, - OverridingBool (..), - bagToList) +import Development.IDE.GHC.Compat.Util (OverridingBool (..)) import Development.IDE.GHC.Util (evalGhcEnv, - modifyDynFlags, - printOutputable) + modifyDynFlags) import Development.IDE.Import.DependencyInformation (transitiveDeps, transitiveModuleDeps) -import Development.IDE.Types.Location (toNormalizedFilePath', - uriToFilePath') +import Development.IDE.Types.Location (toNormalizedFilePath') import GHC (ClsInst, ExecOptions (execLineNumber, execSourceFile), FamInst, @@ -87,15 +81,13 @@ import Development.IDE.Core.RuleTypes (GetLinkable (GetL ModSummaryResult (msrModSummary)) import Development.IDE.Core.Shake (VFSModified (VFSUnmodified)) import qualified Development.IDE.GHC.Compat.Core as Compat (InteractiveImport (IIModule)) -import qualified Development.IDE.GHC.Compat.Core as SrcLoc (HasSrcSpan (getLoc), - unLoc) +import qualified Development.IDE.GHC.Compat.Core as SrcLoc (unLoc) import Development.IDE.Types.HscEnvEq (HscEnvEq (hscEnv)) import qualified GHC.LanguageExtensions.Type as LangExt (Extension (..)) -import Control.Concurrent.STM.Stats (atomically) +import Data.List.Extra (unsnoc) import Development.IDE.Core.FileStore (setSomethingModified) import Development.IDE.Core.PluginUtils -import Development.IDE.Graph (ShakeOptions (shakeExtra)) import Development.IDE.Types.Shake (toKey) import GHC.Types.SrcLoc (UnhelpfulSpanReason (UnhelpfulInteractive)) import Ide.Logger (Priority (..), @@ -103,7 +95,6 @@ import Ide.Logger (Priority (..), WithPriority, logWith) import Ide.Plugin.Error (PluginError (PluginInternalError), - handleMaybe, handleMaybeM) import Ide.Plugin.Eval.Code (Statement, asStatements, @@ -117,8 +108,7 @@ import Ide.Plugin.Eval.Config (EvalConfig (..), import Ide.Plugin.Eval.GHC (addImport, addPackages, hasPackage, - setSessionAndInteractiveDynFlags, - showDynFlags) + setSessionAndInteractiveDynFlags) import Ide.Plugin.Eval.Parse.Comments (commentsToSections) import Ide.Plugin.Eval.Parse.Option (parseSetFlags) import Ide.Plugin.Eval.Rules (queueForEvaluation, @@ -133,43 +123,56 @@ import qualified Language.LSP.Protocol.Lens as L import Language.LSP.Protocol.Message import Language.LSP.Protocol.Types import Language.LSP.Server -import Language.LSP.VFS (virtualFileText) +#if MIN_VERSION_ghc(9,11,0) +import GHC.Unit.Module.ModIface (IfaceTopEnv (..)) +#endif + +codeAction :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState Method_TextDocumentCodeAction +codeAction recorder st plId CodeActionParams{_textDocument,_range} = do + rangeCommands <- mkRangeCommands recorder st plId _textDocument + pure + $ InL + [ InL command + | (testRange, command) <- rangeCommands + , _range `isSubrangeOf` testRange + ] {- | Code Lens provider NOTE: Invoked every time the document is modified, not just when the document is saved. -} codeLens :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState Method_TextDocumentCodeLens -codeLens recorder st plId CodeLensParams{_textDocument} = +codeLens recorder st plId CodeLensParams{_textDocument} = do + rangeCommands <- mkRangeCommands recorder st plId _textDocument + pure + $ InL + [ CodeLens range (Just command) Nothing + | (range, command) <- rangeCommands + ] + +mkRangeCommands :: Recorder (WithPriority Log) -> IdeState -> PluginId -> TextDocumentIdentifier -> ExceptT PluginError (HandlerM Config) [(Range, Command)] +mkRangeCommands recorder st plId textDocument = let dbg = logWith recorder Debug perf = timed (\lbl duration -> dbg $ LogExecutionTime lbl duration) - in perf "codeLens" $ + in perf "evalMkRangeCommands" $ do - let TextDocumentIdentifier uri = _textDocument + let TextDocumentIdentifier uri = textDocument fp <- uriToFilePathE uri let nfp = toNormalizedFilePath' fp isLHS = isLiterate fp dbg $ LogCodeLensFp fp (comments, _) <- runActionE "eval.GetParsedModuleWithComments" st $ useWithStaleE GetEvalComments nfp - -- dbg "excluded comments" $ show $ DL.toList $ - -- foldMap (\(L a b) -> - -- case b of - -- AnnLineComment{} -> mempty - -- AnnBlockComment{} -> mempty - -- _ -> DL.singleton (a, b) - -- ) - -- $ apiAnnComments' pm_annotations dbg $ LogCodeLensComments comments -- Extract tests from source code let Sections{..} = commentsToSections isLHS comments tests = testsBySection nonSetupSections cmd = mkLspCommand plId evalCommandName "Evaluate=..." (Just []) - let lenses = - [ CodeLens testRange (Just cmd') Nothing + let rangeCommands = + [ (testRange, cmd') | (section, ident, test) <- tests , let (testRange, resultRange) = testRanges test - args = EvalParams (setupSections ++ [section]) _textDocument ident + args = EvalParams (setupSections ++ [section]) textDocument ident cmd' = (cmd :: Command) { _arguments = Just [toJSON args] @@ -185,9 +188,9 @@ codeLens recorder st plId CodeLensParams{_textDocument} = (length tests) (length nonSetupSections) (length setupSections) - (length lenses) + (length rangeCommands) - return $ InL lenses + pure rangeCommands where trivial (Range p p') = p == p' @@ -210,7 +213,7 @@ runEvalCmd recorder plId st mtoken EvalParams{..} = let TextDocumentIdentifier{_uri} = module_ fp <- uriToFilePathE _uri let nfp = toNormalizedFilePath' fp - mdlText <- moduleText _uri + mdlText <- moduleText st _uri -- enable codegen for the module which we need to evaluate. final_hscEnv <- liftIO $ bracket_ @@ -260,17 +263,22 @@ initialiseSessionForEval needs_quickcheck st nfp = do -- However, the eval plugin (setContext specifically) requires the rdr_env -- for the current module - so get it from the Typechecked Module and add -- it back to the iface for the current module. - rdr_env <- tcg_rdr_env . tmrTypechecked <$> use_ TypeCheck nfp + tm <- tmrTypechecked <$> use_ TypeCheck nfp + let rdr_env = tcg_rdr_env tm let linkable_hsc = loadModulesHome (map (addRdrEnv . linkableHomeMod) linkables) deps_hsc addRdrEnv hmi | iface <- hm_iface hmi , ms_mod ms == mi_module iface +#if MIN_VERSION_ghc(9,11,0) + = hmi { hm_iface = set_mi_top_env (Just $ IfaceTopEnv (forceGlobalRdrEnv (globalRdrEnvLocal rdr_env)) (mkIfaceImports $ tcg_import_decls tm)) iface} +#else = hmi { hm_iface = iface { mi_globals = Just $! #if MIN_VERSION_ghc(9,8,0) forceGlobalRdrEnv #endif rdr_env }} +#endif | otherwise = hmi return (ms, linkable_hsc) @@ -291,6 +299,15 @@ initialiseSessionForEval needs_quickcheck st nfp = do getSession return env2 +#if MIN_VERSION_ghc(9,11,0) +mkIfaceImports :: [ImportUserSpec] -> [IfaceImport] +mkIfaceImports = map go + where + go (ImpUserSpec decl ImpUserAll) = IfaceImport decl ImpIfaceAll + go (ImpUserSpec decl (ImpUserExplicit env)) = IfaceImport decl (ImpIfaceExplicit (forceGlobalRdrEnv env)) + go (ImpUserSpec decl (ImpUserEverythingBut ns)) = IfaceImport decl (ImpIfaceEverythingBut ns) +#endif + addFinalReturn :: Text -> [TextEdit] -> [TextEdit] addFinalReturn mdlText edits | not (null edits) && not (T.null mdlText) && T.last mdlText /= '\n' = @@ -301,16 +318,19 @@ finalReturn :: Text -> TextEdit finalReturn txt = let ls = T.lines txt l = fromIntegral $ length ls -1 - c = fromIntegral $ T.length . last $ ls + c = fromIntegral $ T.length $ maybe T.empty snd (unsnoc ls) p = Position l c in TextEdit (Range p p) "\n" -moduleText :: Uri -> ExceptT PluginError (HandlerM config) Text -moduleText uri = - handleMaybeM (PluginInternalError "mdlText") $ - (virtualFileText <$>) - <$> pluginGetVirtualFile - (toNormalizedUri uri) +moduleText :: IdeState -> Uri -> ExceptT PluginError (HandlerM config) Text +moduleText state uri = do + contents <- + handleMaybeM (PluginInternalError "mdlText") $ + liftIO $ + runAction "eval.getUriContents" state $ + getUriContents $ + toNormalizedUri uri + pure $ Rope.toText contents testsBySection :: [Section] -> [(Section, EvalId, Test)] testsBySection sections = @@ -514,7 +534,7 @@ singleLine s = [T.pack s] errorLines :: String -> [Text] errorLines = dropWhileEnd T.null - . takeWhile (not . ("CallStack" `T.isPrefixOf`)) + . takeWhile (not . (\x -> "CallStack" `T.isPrefixOf` x || "HasCallStack" `T.isPrefixOf` x)) . T.lines . T.pack @@ -654,7 +674,6 @@ data GhciLikeCmdException = GhciLikeCmdNotImplemented { ghciCmdName :: Text , ghciCmdArg :: Text } - deriving (Typeable) instance Show GhciLikeCmdException where showsPrec _ GhciLikeCmdNotImplemented{..} = diff --git a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Rules.hs b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Rules.hs index 6990c4a6e5..3d896f1da1 100644 --- a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Rules.hs +++ b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Rules.hs @@ -65,7 +65,13 @@ unqueueForEvaluation ide nfp = do apiAnnComments' :: ParsedModule -> [SrcLoc.RealLocated EpaCommentTok] apiAnnComments' pm = do L span (EpaComment c _) <- getEpaComments $ pm_parsed_source pm - pure (L (anchor span) c) + pure (L ( +#if MIN_VERSION_ghc(9,11,0) + epaLocationRealSrcSpan +#else + anchor +#endif + span) c) where #if MIN_VERSION_ghc(9,5,0) getEpaComments :: Development.IDE.GHC.Compat.Located (HsModule GhcPs) -> [LEpaComment] diff --git a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Types.hs b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Types.hs index 43ea57c956..1753ab4e6c 100644 --- a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Types.hs +++ b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Types.hs @@ -157,14 +157,14 @@ data Test deriving (Eq, Show, Generic, FromJSON, ToJSON, NFData) data IsEvaluating = IsEvaluating - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable IsEvaluating instance NFData IsEvaluating type instance RuleResult IsEvaluating = Bool data GetEvalComments = GetEvalComments - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable GetEvalComments instance NFData GetEvalComments diff --git a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Util.hs b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Util.hs index 14b47f4d95..77b133ef92 100644 --- a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Util.hs +++ b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Util.hs @@ -1,9 +1,8 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE NoMonomorphismRestriction #-} -{-# OPTIONS_GHC -Wno-orphans -Wno-unused-imports #-} -{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE CPP #-} +{-# LANGUAGE RecordWildCards #-} +{-# OPTIONS_GHC -Wno-orphans #-} --- |Debug utilities +-- | Debug utilities module Ide.Plugin.Eval.Util ( timed, isLiterate, @@ -15,39 +14,31 @@ module Ide.Plugin.Eval.Util ( import Control.Exception (SomeException, evaluate, fromException) -import Control.Monad.Error.Class (MonadError (throwError)) import Control.Monad.IO.Class (MonadIO (liftIO)) import Control.Monad.Trans.Class (MonadTrans (lift)) import Control.Monad.Trans.Except (ExceptT (..), runExceptT) import Data.Aeson (Value) -import Data.Bifunctor (second) import Data.String (IsString (fromString)) -import qualified Data.Text as T -import Development.IDE (IdeState, - printOutputable) -import qualified Development.IDE.Core.PluginUtils as PluginUtils -import qualified Development.IDE.GHC.Compat.Core as Core -import qualified Development.IDE.GHC.Compat.Core as SrcLoc import Development.IDE.GHC.Compat.Outputable import Development.IDE.GHC.Compat.Util (MonadCatch, bagToList, catch) -import GHC.Exts (toList) -import GHC.Stack (HasCallStack, callStack, - srcLocFile, - srcLocStartCol, - srcLocStartLine) import Ide.Plugin.Error import Ide.Types (HandlerM, pluginSendRequest) import Language.LSP.Protocol.Message import Language.LSP.Protocol.Types -import Language.LSP.Server import System.FilePath (takeExtension) import qualified System.Time.Extra as Extra -import System.Time.Extra (duration, showDuration) +import System.Time.Extra (duration) import UnliftIO.Exception (catchAny) +#if !MIN_VERSION_ghc(9,8,0) +import qualified Data.Text as T +import Development.IDE (printOutputable) +import qualified Development.IDE.GHC.Compat.Core as Core +#endif + timed :: MonadIO m => (t -> Extra.Seconds -> m a) -> t -> m b -> m b timed out name op = do (secs, r) <- duration op @@ -77,7 +68,6 @@ gevaluate = liftIO . evaluate showErr :: Monad m => SomeException -> m String showErr e = -#if MIN_VERSION_ghc(9,3,0) case fromException e of -- On GHC 9.4+, the show instance adds the error message span -- We don't want this for the plugin @@ -93,7 +83,6 @@ showErr e = . errMsgDiagnostic) $ getMessages msgs _ -> -#endif return . show $ e #if MIN_VERSION_ghc(9,8,0) @@ -109,6 +98,6 @@ prettyWarnings = unlines . map prettyWarn prettyWarn :: Core.Warn -> String prettyWarn Core.Warn{..} = - T.unpack (printOutputable $ SrcLoc.getLoc warnMsg) <> ": warning:\n" - <> " " <> SrcLoc.unLoc warnMsg + T.unpack (printOutputable $ Core.getLoc warnMsg) <> ": warning:\n" + <> " " <> Core.unLoc warnMsg #endif diff --git a/plugins/hls-eval-plugin/test/Main.hs b/plugins/hls-eval-plugin/test/Main.hs index 10158531d2..7338b4384f 100644 --- a/plugins/hls-eval-plugin/test/Main.hs +++ b/plugins/hls-eval-plugin/test/Main.hs @@ -6,13 +6,15 @@ module Main ) where import Control.Lens (_Just, folded, preview, view, (^.), - (^..)) + (^..), (^?)) +import Control.Monad (join) import Data.Aeson (Value (Object), fromJSON, object, (.=)) import Data.Aeson.Types (Pair, Result (Success)) import Data.List (isInfixOf) import Data.List.Extra (nubOrdOn) import qualified Data.Map as Map +import qualified Data.Maybe as Maybe import qualified Data.Text as T import Ide.Plugin.Config (Config) import qualified Ide.Plugin.Config as Plugin @@ -59,6 +61,9 @@ tests = lenses <- getCodeLenses doc liftIO $ map (view range) lenses @?= [Range (Position 4 0) (Position 5 0)] + , goldenWithEvalForCodeAction "Evaluation of expressions via code action" "T1" "hs" + , goldenWithEvalForCodeAction "Reevaluation of expressions via code action" "T2" "hs" + , goldenWithEval "Evaluation of expressions" "T1" "hs" , goldenWithEval "Reevaluation of expressions" "T2" "hs" , goldenWithEval "Evaluation of expressions w/ imports" "T3" "hs" @@ -75,7 +80,7 @@ tests = else "-- No instance for (Num String) arising from a use of `+'\n-- In the expression: \"a\" + \"bc\"\n-- In an equation for `res': res = \"a\" + \"bc\"" - evalInFile "T8.hs" "-- >>> \"" "-- lexical error in string/character literal at end of input" + evalInFile "T8.hs" "-- >>> \"" (if ghcVersion >= GHC912 then "-- lexical error at end of input" else "-- lexical error in string/character literal at end of input") evalInFile "T8.hs" "-- >>> 3 `div` 0" "-- divide by zero" -- The default for marking exceptions is False , goldenWithEval "Applies file LANGUAGE extensions" "T9" "hs" , goldenWithEval "Evaluate a type with :kind!" "T10" "hs" @@ -89,8 +94,8 @@ tests = , goldenWithEval ":type reports an error when given with unknown +x option" "T17" "hs" , goldenWithEval "Reports an error when given with unknown command" "T18" "hs" , goldenWithEval "Returns defaulted type for :type +d reflecting the default declaration specified in the >>> prompt" "T19" "hs" - , expectFailBecause "known issue - see a note in P.R. #361" $ - goldenWithEval ":type +d reflects the `default' declaration of the module" "T20" "hs" + -- TODO: known issue - see a note in P.R. #361 + , goldenWithEval ":type +d reflects the `default' declaration of the module" "T20" "hs" , testCase ":type handles a multilined result properly" $ evalInFile "T21.hs" "-- >>> :type fun" $ T.unlines [ "-- fun", @@ -126,14 +131,14 @@ tests = , goldenWithEval "The default language extensions for the eval plugin are the same as those for ghci" "TSameDefaultLanguageExtensionsAsGhci" "hs" , goldenWithEval "IO expressions are supported, stdout/stderr output is ignored" "TIO" "hs" , goldenWithEvalAndFs "Property checking" cabalProjectFS "TProperty" "hs" - , knownBrokenInEnv [HostOS Windows] "The output has path separators in it, which on Windows look different. Just skip it there" $ + , knownBrokenInWindowsBeforeGHC912 "The output has path separators in it, which on Windows look different. Just skip it there" $ goldenWithEvalAndFs' "Property checking with exception" cabalProjectFS "TPropertyError" "hs" $ case ghcVersion of + GHC912 -> "ghc912.expected" GHC910 -> "ghc910.expected" GHC98 -> "ghc98.expected" GHC96 -> "ghc96.expected" GHC94 -> "ghc94.expected" - GHC92 -> "ghc92.expected" , goldenWithEval "Prelude has no special treatment, it is imported as stated in the module" "TPrelude" "hs" , goldenWithEval "Don't panic on {-# UNPACK #-} pragma" "TUNPACK" "hs" , goldenWithEval "Can handle eval inside nested comment properly" "TNested" "hs" @@ -210,11 +215,21 @@ tests = let ifaceKeys = filter ("GetModIface" `T.isPrefixOf`) keys liftIO $ ifaceKeys @?= [] ] + where + knownBrokenInWindowsBeforeGHC912 msg = + foldl (.) id + [ knownBrokenInSpecificEnv [GhcVer ghcVer, HostOS Windows] msg + | ghcVer <- [GHC94 .. GHC910] + ] goldenWithEval :: TestName -> FilePath -> FilePath -> TestTree goldenWithEval title path ext = goldenWithHaskellDocInTmpDir def evalPlugin title (mkFs $ FS.directProject (path <.> ext)) path "expected" ext executeLensesBackwards +goldenWithEvalForCodeAction :: TestName -> FilePath -> FilePath -> TestTree +goldenWithEvalForCodeAction title path ext = + goldenWithHaskellDocInTmpDir def evalPlugin title (mkFs $ FS.directProject (path <.> ext)) path "expected" ext executeCodeActionsBackwards + goldenWithEvalAndFs :: TestName -> [FS.FileTree] -> FilePath -> FilePath -> TestTree goldenWithEvalAndFs title tree path ext = goldenWithHaskellDocInTmpDir def evalPlugin title (mkFs tree) path "expected" ext executeLensesBackwards @@ -233,14 +248,24 @@ goldenWithEvalAndFs' title tree path ext expected = -- | Execute lenses backwards, to avoid affecting their position in the source file executeLensesBackwards :: TextDocumentIdentifier -> Session () executeLensesBackwards doc = do - codeLenses <- reverse <$> getCodeLenses doc + codeLenses <- getCodeLenses doc -- liftIO $ print codeLenses + executeCmdsBackwards [c | CodeLens{_command = Just c} <- codeLenses] + +executeCodeActionsBackwards :: TextDocumentIdentifier -> Session () +executeCodeActionsBackwards doc = do + codeLenses <- getCodeLenses doc + let ranges = [_range | CodeLens{_range} <- codeLenses] + -- getAllCodeActions cannot get our code actions because they have no diagnostics + codeActions <- join <$> traverse (getCodeActions doc) ranges + let cmds = Maybe.mapMaybe (^? _L) codeActions + executeCmdsBackwards cmds - -- Execute sequentially, nubbing elements to avoid - -- evaluating the same section with multiple tests - -- more than twice - mapM_ executeCmd $ - nubOrdOn actSectionId [c | CodeLens{_command = Just c} <- codeLenses] +-- Execute commands backwards, nubbing elements to avoid +-- evaluating the same section with multiple tests +-- more than twice +executeCmdsBackwards :: [Command] -> Session () +executeCmdsBackwards = mapM_ executeCmd . nubOrdOn actSectionId . reverse actSectionId :: Command -> Int actSectionId Command{_arguments = Just [fromJSON -> Success EvalParams{..}]} = evalId diff --git a/plugins/hls-eval-plugin/test/testdata/T20.expected.hs b/plugins/hls-eval-plugin/test/testdata/T20.expected.hs index 18d2155560..36c93b99c1 100644 --- a/plugins/hls-eval-plugin/test/testdata/T20.expected.hs +++ b/plugins/hls-eval-plugin/test/testdata/T20.expected.hs @@ -4,4 +4,4 @@ import Data.Word (Word) default (Word) -- >>> :type +d 40+ 2 --- 40+ 2 :: Word +-- 40+ 2 :: Integer diff --git a/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc92.expected.hs b/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc912.expected.hs similarity index 100% rename from plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc92.expected.hs rename to plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc912.expected.hs diff --git a/plugins/hls-explicit-fixity-plugin/src/Ide/Plugin/ExplicitFixity.hs b/plugins/hls-explicit-fixity-plugin/src/Ide/Plugin/ExplicitFixity.hs index 947570690b..92bc37f743 100644 --- a/plugins/hls-explicit-fixity-plugin/src/Ide/Plugin/ExplicitFixity.hs +++ b/plugins/hls-explicit-fixity-plugin/src/Ide/Plugin/ExplicitFixity.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DataKinds #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TypeFamilies #-} @@ -59,7 +61,11 @@ hover state _ (HoverParams (TextDocumentIdentifier uri) pos _) = do in Just $ Hover (InL (mkPlainText contents')) Nothing fixityText :: (Name, Fixity) -> T.Text +#if MIN_VERSION_GLASGOW_HASKELL(9,12,0,0) + fixityText (name, Fixity precedence direction) = +#else fixityText (name, Fixity _ precedence direction) = +#endif printOutputable direction <> " " <> printOutputable precedence <> " `" <> printOutputable name <> "`" newtype FixityMap = FixityMap (M.Map Name Fixity) diff --git a/plugins/hls-explicit-fixity-plugin/test/Main.hs b/plugins/hls-explicit-fixity-plugin/test/Main.hs index 6cfcc16c60..26e94091cd 100644 --- a/plugins/hls-explicit-fixity-plugin/test/Main.hs +++ b/plugins/hls-explicit-fixity-plugin/test/Main.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE DataKinds #-} {-# LANGUAGE OverloadedStrings #-} module Main where @@ -40,17 +41,30 @@ tests = testGroup "Explicit fixity" , hoverTest "signature" (Position 35 2) "infixr 9 `>>>:`" , hoverTest "operator" (Position 36 2) "infixr 9 `>>>:`" , hoverTest "escape" (Position 39 2) "infixl 3 `~\\:`" - -- Ensure that there is no one extra new line in import statement - , expectFail $ hoverTest "import" (Position 2 18) "Control.Monad***" - -- Known issue, See https://github.com/haskell/haskell-language-server/pull/2973/files#r916535742 - , expectFail $ hoverTestImport "import" (Position 4 7) "infixr 9 `>>>:`" + -- TODO: Ensure that there is no one extra new line in import statement + , hoverTestExpectFail + "import" + (Position 2 18) + (BrokenIdeal "Control.Monad***") + (BrokenCurrent "Control.Monad\n\n") + , hoverTestImport "import" (Position 4 7) "infixr 9 `>>>:`" ] hoverTest :: TestName -> Position -> T.Text -> TestTree hoverTest = hoverTest' "Hover.hs" + hoverTestImport :: TestName -> Position -> T.Text -> TestTree hoverTestImport = hoverTest' "HoverImport.hs" +hoverTestExpectFail + :: TestName + -> Position + -> ExpectBroken 'Ideal T.Text + -> ExpectBroken 'Current T.Text + -> TestTree +hoverTestExpectFail title pos _ = + hoverTest title pos . unCurrent + hoverTest' :: String -> TestName -> Position -> T.Text -> TestTree hoverTest' docName title pos expected = testCase title $ runSessionWithServer def plugin testDataDir $ do doc <- openDoc docName "haskell" diff --git a/plugins/hls-explicit-imports-plugin/src/Ide/Plugin/ExplicitImports.hs b/plugins/hls-explicit-imports-plugin/src/Ide/Plugin/ExplicitImports.hs index 13526c0535..f24f849476 100644 --- a/plugins/hls-explicit-imports-plugin/src/Ide/Plugin/ExplicitImports.hs +++ b/plugins/hls-explicit-imports-plugin/src/Ide/Plugin/ExplicitImports.hs @@ -6,15 +6,17 @@ {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE ViewPatterns #-} + module Ide.Plugin.ExplicitImports ( descriptor , descriptorForModules , abbreviateImportTitle + , abbreviateImportTitleWithoutModule , Log(..) ) where import Control.DeepSeq -import Control.Lens ((&), (?~)) +import Control.Lens (_Just, (&), (?~), (^?)) import Control.Monad.Error.Class (MonadError (throwError)) import Control.Monad.IO.Class import Control.Monad.Trans.Class (lift) @@ -22,14 +24,18 @@ import Control.Monad.Trans.Except (ExceptT) import Control.Monad.Trans.Maybe import qualified Data.Aeson as A (ToJSON (toJSON)) import Data.Aeson.Types (FromJSON) +import Data.Char (isSpace) import qualified Data.IntMap as IM (IntMap, elems, fromList, (!?)) import Data.IORef (readIORef) +import Data.List (singleton) import qualified Data.Map.Strict as Map -import Data.Maybe (isNothing, mapMaybe) +import Data.Maybe (isJust, isNothing, + mapMaybe) import qualified Data.Set as S import Data.String (fromString) import qualified Data.Text as T +import qualified Data.Text as Text import Data.Traversable (for) import qualified Data.Unique as U (hashUnique, newUnique) @@ -44,11 +50,14 @@ import GHC.Generics (Generic) import Ide.Plugin.Error (PluginError (..), getNormalizedFilePathE, handleMaybe) -import Ide.Plugin.RangeMap (filterByRange) -import qualified Ide.Plugin.RangeMap as RM (RangeMap, fromList) +import qualified Ide.Plugin.RangeMap as RM (RangeMap, + filterByRange, + fromList) import Ide.Plugin.Resolve import Ide.PluginUtils import Ide.Types +import Language.LSP.Protocol.Lens (HasInlayHint (inlayHint), + HasTextDocument (textDocument)) import qualified Language.LSP.Protocol.Lens as L import Language.LSP.Protocol.Message import Language.LSP.Protocol.Types @@ -76,7 +85,7 @@ instance Pretty Log where descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState descriptor recorder = -- (almost) no one wants to see an explicit import list for Prelude - descriptorForModules recorder (/= moduleName pRELUDE) + descriptorForModules recorder (/= pRELUDE_NAME) descriptorForModules :: Recorder (WithPriority Log) @@ -97,17 +106,23 @@ descriptorForModules recorder modFilter plId = -- This plugin provides code lenses mkPluginHandler SMethod_TextDocumentCodeLens (lensProvider recorder) <> mkResolveHandler SMethod_CodeLensResolve (lensResolveProvider recorder) - -- This plugin provides code actions + -- This plugin provides inlay hints + <> mkPluginHandler SMethod_TextDocumentInlayHint (inlayHintProvider recorder) + -- This plugin provides code actions <> codeActionHandlers - } +isInlayHintsSupported :: IdeState -> Bool +isInlayHintsSupported ideState = + let clientCaps = Shake.clientCapabilities $ shakeExtras ideState + in isJust $ clientCaps ^? textDocument . _Just . inlayHint . _Just + -- | The actual command handler runImportCommand :: Recorder (WithPriority Log) -> CommandFunction IdeState IAResolveData runImportCommand recorder ideState _ eird@(ResolveOne _ _) = do wedit <- resolveWTextEdit ideState eird _ <- lift $ pluginSendRequest SMethod_WorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing wedit) logErrors - return $ InR Null + return $ InR Null where logErrors (Left re) = do logWith recorder Error (LogWAEResponseError re) pure () @@ -129,12 +144,18 @@ runImportCommand _ _ _ rd = do -- the provider should produce one code lens associated to the import statement: -- > Refine imports to import Control.Monad.IO.Class (liftIO) lensProvider :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState 'Method_TextDocumentCodeLens -lensProvider _ state _ CodeLensParams {_textDocument = TextDocumentIdentifier {_uri}} = do +lensProvider _ state _ CodeLensParams {_textDocument = TextDocumentIdentifier {_uri}} = do nfp <- getNormalizedFilePathE _uri (ImportActionsResult{forLens}, pm) <- runActionE "ImportActions" state $ useWithStaleE ImportActions nfp let lens = [ generateLens _uri newRange int - | (range, int) <- forLens - , Just newRange <- [toCurrentRange pm range]] + -- provide ExplicitImport only if the client does not support inlay hints + | not (isInlayHintsSupported state) + , (range, (int, ExplicitImport)) <- forLens + , Just newRange <- [toCurrentRange pm range]] <> + -- RefineImport is always provided because inlay hints cannot + [ generateLens _uri newRange int + | (range, (int, RefineImport)) <- forLens + , Just newRange <- [toCurrentRange pm range]] pure $ InL lens where -- because these are non resolved lenses we only need the range and a -- unique id to later resolve them with. These are for both refine @@ -145,12 +166,13 @@ lensProvider _ state _ CodeLensParams {_textDocument = TextDocumentIdentifier { , _range = range , _command = Nothing } + lensResolveProvider :: Recorder (WithPriority Log) -> ResolveFunction IdeState IAResolveData 'Method_CodeLensResolve lensResolveProvider _ ideState plId cl uri rd@(ResolveOne _ uid) = do nfp <- getNormalizedFilePathE uri (ImportActionsResult{forResolve}, _) <- runActionE "ImportActions" ideState $ useWithStaleE ImportActions nfp target <- handleMaybe PluginStaleResolve $ forResolve IM.!? uid - let updatedCodeLens = cl & L.command ?~ mkCommand plId target + let updatedCodeLens = cl & L.command ?~ mkCommand plId target pure updatedCodeLens where mkCommand :: PluginId -> ImportEdit -> Command mkCommand pId (ImportEdit{ieResType, ieText}) = @@ -165,6 +187,55 @@ lensResolveProvider _ ideState plId cl uri rd@(ResolveOne _ uid) = do lensResolveProvider _ _ _ _ _ rd = do throwError $ PluginInvalidParams (T.pack $ "Unexpected argument for lens resolve handler: " <> show rd) + +-- | Provide explicit imports in inlay hints. +-- Applying textEdits can make the import explicit. +-- There is currently no need to resolve inlay hints, +-- as no tooltips or commands are provided in the label. +inlayHintProvider :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState 'Method_TextDocumentInlayHint +inlayHintProvider _ state _ InlayHintParams {_textDocument = TextDocumentIdentifier {_uri}, _range = visibleRange} = + if isInlayHintsSupported state + then do + nfp <- getNormalizedFilePathE _uri + (ImportActionsResult {forLens, forResolve}, pm) <- runActionE "ImportActions" state $ useWithStaleE ImportActions nfp + let inlayHints = [ inlayHint + | (range, (int, _)) <- forLens + , Just newRange <- [toCurrentRange pm range] + , isSubrangeOf newRange visibleRange + , Just ie <- [forResolve IM.!? int] + , Just inlayHint <- [generateInlayHints newRange ie pm]] + pure $ InL inlayHints + -- When the client does not support inlay hints, fallback to the code lens, + -- so there is nothing to response here. + -- `[]` is no different from `null`, we chose to use all `[]` to indicate "no information" + else pure $ InL [] + where + -- The appropriate and intended position for the hint hints to begin + -- is the end of the range for the code lens. + -- import Data.Char (isSpace) + -- |--- range ----|-- IH ---| + -- |^-_paddingLeft + -- ^-_position + generateInlayHints :: Range -> ImportEdit -> PositionMapping -> Maybe InlayHint + generateInlayHints (Range _ end) ie pm = do + label <- mkLabel ie + currentEnd <- toCurrentPosition pm end + return InlayHint { _position = currentEnd + , _label = InL label + , _kind = Nothing -- neither a type nor a parameter + , _textEdits = fmap singleton $ toTEdit pm ie + , _tooltip = Just $ InL "Make this import explicit" -- simple enough, no need to resolve + , _paddingLeft = Just True -- show an extra space before the inlay hint + , _paddingRight = Nothing + , _data_ = Nothing + } + mkLabel :: ImportEdit -> Maybe T.Text + mkLabel (ImportEdit{ieResType, ieText}) = + let title ExplicitImport = Just $ abbreviateImportTitleWithoutModule ieText + title RefineImport = Nothing -- does not provide imports statements that can be refined via inlay hints + in title ieResType + + -- |For explicit imports: If there are any implicit imports, provide both one -- code action per import to make that specific import explicit, and one code -- action to turn them all into explicit imports. For refine imports: If there @@ -175,7 +246,7 @@ codeActionProvider _ ideState _pId (CodeActionParams _ _ TextDocumentIdentifier nfp <- getNormalizedFilePathE _uri (ImportActionsResult{forCodeActions}, pm) <- runActionE "ImportActions" ideState $ useWithStaleE ImportActions nfp newRange <- toCurrentRangeE pm range - let relevantCodeActions = filterByRange newRange forCodeActions + let relevantCodeActions = RM.filterByRange newRange forCodeActions allExplicit = [InR $ mkCodeAction "Make all imports explicit" (Just $ A.toJSON $ ExplicitAll _uri) -- We should only provide this code action if there are any code @@ -231,12 +302,14 @@ resolveWTextEdit ideState (RefineAll uri) = do pure $ mkWorkspaceEdit uri edits pm mkWorkspaceEdit :: Uri -> [ImportEdit] -> PositionMapping -> WorkspaceEdit mkWorkspaceEdit uri edits pm = - WorkspaceEdit {_changes = Just $ Map.singleton uri (mapMaybe toWEdit edits) + WorkspaceEdit {_changes = Just $ Map.singleton uri (mapMaybe (toTEdit pm) edits) , _documentChanges = Nothing , _changeAnnotations = Nothing} - where toWEdit ImportEdit{ieRange, ieText} = - let newRange = toCurrentRange pm ieRange - in (\r -> TextEdit r ieText) <$> newRange + +toTEdit :: PositionMapping -> ImportEdit -> Maybe TextEdit +toTEdit pm ImportEdit{ieRange, ieText} = + let newRange = toCurrentRange pm ieRange + in (\r -> TextEdit r ieText) <$> newRange data ImportActions = ImportActions deriving (Show, Generic, Eq, Ord) @@ -254,7 +327,7 @@ data ImportActionsResult = ImportActionsResult { -- |For providing the code lenses we need to have a range, and a unique id -- that is later resolved to the new text for each import. It is stored in -- a list, because we always need to provide all the code lens in a file. - forLens :: [(Range, Int)] + forLens :: [(Range, (Int, ResultType))] -- |For the code actions we have the same data as for the code lenses, but -- we store it in a RangeMap, because that allows us to filter on a specific -- range with better performance, and code actions are almost always only @@ -331,7 +404,7 @@ minimalImportsRule recorder modFilter = defineNoDiagnostics (cmapWithPrio LogSha -- for every minimal imports | (location, origImport, minImport@(ImportDecl{ideclName = L _ mn})) <- locationImportWithMinimal -- (almost) no one wants to see an refine import list for Prelude - , mn /= moduleName pRELUDE + , mn /= pRELUDE_NAME -- we check for the inner imports , Just innerImports <- [Map.lookup mn import2Map] -- and only get those symbols used @@ -346,7 +419,7 @@ minimalImportsRule recorder modFilter = defineNoDiagnostics (cmapWithPrio LogSha pure (u, rt) let rangeAndUnique = [ ImportAction r u rt | (u, (r, (_, rt))) <- uniqueAndRangeAndText ] pure ImportActionsResult - { forLens = (\ImportAction{..} -> (iaRange, iaUniqueId)) <$> rangeAndUnique + { forLens = (\ImportAction{..} -> (iaRange, (iaUniqueId, iaResType))) <$> rangeAndUnique , forCodeActions = RM.fromList iaRange rangeAndUnique , forResolve = IM.fromList ((\(u, (r, (te, ty))) -> (u, ImportEdit r te ty)) <$> uniqueAndRangeAndText) } @@ -413,8 +486,6 @@ isExplicitImport _ = False maxColumns :: Int maxColumns = 120 - --- | The title of the command is ideally the minimal explicit import decl, but -- we don't want to create a really massive code lens (and the decl can be extremely large!). -- So we abbreviate it to fit a max column size, and indicate how many more items are in the list -- after the abbreviation @@ -422,7 +493,8 @@ abbreviateImportTitle :: T.Text -> T.Text abbreviateImportTitle input = let -- For starters, we only want one line in the title - oneLineText = T.unwords $ T.lines input + -- we also need to compress multiple spaces into one + oneLineText = T.unwords $ filter (not . T.null) $ T.split isSpace input -- Now, split at the max columns, leaving space for the summary text we're going to add -- (conservatively assuming we won't need to print a number larger than 100) (prefix, suffix) = T.splitAt (maxColumns - T.length (summaryText 100)) oneLineText @@ -447,6 +519,11 @@ abbreviateImportTitle input = else actualPrefix <> suffixText in title +-- Create an import abbreviate title without module for inlay hints +abbreviateImportTitleWithoutModule :: Text.Text -> Text.Text +abbreviateImportTitleWithoutModule = abbreviateImportTitle . T.dropWhile (/= '(') + +-- | The title of the command is ideally the minimal explicit import decl, but -------------------------------------------------------------------------------- @@ -464,11 +541,7 @@ filterByImport (ImportDecl{ideclHiding = Just (_, L _ names)}) then Just res else Nothing where importedNames = S.fromList $ map (ieName . unLoc) names - res = flip Map.filter avails $ \a -> - any (`S.member` importedNames) - $ concatMap - getAvailNames - a + res = Map.filter (any (any (`S.member` importedNames) . getAvailNames)) avails allFilteredAvailsNames = S.fromList $ concatMap getAvailNames $ mconcat diff --git a/plugins/hls-explicit-imports-plugin/test/Main.hs b/plugins/hls-explicit-imports-plugin/test/Main.hs index 0fd94a807c..01fe1d469e 100644 --- a/plugins/hls-explicit-imports-plugin/test/Main.hs +++ b/plugins/hls-explicit-imports-plugin/test/Main.hs @@ -7,6 +7,7 @@ module Main ) where import Control.Lens ((^.)) +import Control.Monad (unless) import Data.Either.Extra import Data.Foldable (find) import Data.Text (Text) @@ -26,18 +27,33 @@ main = defaultTestRunner $ testGroup "import-actions" [testGroup "Refine Imports" [ codeActionGoldenTest "RefineWithOverride" 3 1 - , codeLensGoldenTest isRefineImports "RefineUsualCase" 1 - , codeLensGoldenTest isRefineImports "RefineQualified" 0 - , codeLensGoldenTest isRefineImports "RefineQualifiedExplicit" 0 + -- Although the client has inlay hints caps, refine is always provided by the code lens + , codeLensGoldenTest codeActionNoResolveCaps isRefineImports "RefineUsualCase" 1 + , codeLensGoldenTest codeActionNoResolveCaps isRefineImports "RefineQualified" 0 + , codeLensGoldenTest codeActionNoResolveCaps isRefineImports "RefineQualifiedExplicit" 0 ], testGroup "Make imports explicit" [ codeActionAllGoldenTest "ExplicitUsualCase" 3 0 , codeActionAllResolveGoldenTest "ExplicitUsualCase" 3 0 + , inlayHintsTestWithCap "ExplicitUsualCase" 2 $ (@=?) + [mkInlayHint (Position 2 16) "( a1 )" + (TextEdit (Range (Position 2 0) (Position 2 16)) "import ExplicitA ( a1 )")] + , inlayHintsTestWithoutCap "ExplicitUsualCase" 2 $ (@=?) [] , codeActionOnlyGoldenTest "ExplicitOnlyThis" 3 0 , codeActionOnlyResolveGoldenTest "ExplicitOnlyThis" 3 0 - , codeLensGoldenTest notRefineImports "ExplicitUsualCase" 0 + , inlayHintsTestWithCap "ExplicitOnlyThis" 3 $ (@=?) + [mkInlayHint (Position 3 16) "( b1 )" + (TextEdit (Range (Position 3 0) (Position 3 16)) "import ExplicitB ( b1 )")] + , inlayHintsTestWithoutCap "ExplicitOnlyThis" 3 $ (@=?) [] + -- Only when the client does not support inlay hints, explicit will be provided by code lens + , codeLensGoldenTest codeActionNoInlayHintsCaps notRefineImports "ExplicitUsualCase" 0 + , noCodeLensTest codeActionNoResolveCaps "ExplicitUsualCase" , codeActionBreakFile "ExplicitBreakFile" 4 0 + , inlayHintsTestWithCap "ExplicitBreakFile" 3 $ (@=?) + [mkInlayHint (Position 3 16) "( a1 )" + (TextEdit (Range (Position 3 0) (Position 3 16)) "import ExplicitA ( a1 )")] + , inlayHintsTestWithoutCap "ExplicitBreakFile" 3 $ (@=?) [] , codeActionStaleAction "ExplicitStaleAction" 4 0 , testCase "No CodeAction when exported" $ runSessionWithServer def explicitImportsPlugin testDataDir $ do @@ -49,6 +65,11 @@ main = defaultTestRunner $ testGroup "import-actions" doc <- openDoc "ExplicitExported.hs" "haskell" lenses <- getCodeLenses doc liftIO $ lenses @?= [] + , testCase "No InlayHints when exported" $ + runSessionWithServer def explicitImportsPlugin testDataDir $ do + doc <- openDoc "ExplicitExported.hs" "haskell" + inlayHints <- getInlayHints doc (pointRange 3 0) + liftIO $ inlayHints @?= [] , testGroup "Title abbreviation" [ testCase "not abbreviated" $ let i = "import " <> T.replicate 70 "F" <> " (Athing, Bthing, Cthing)" @@ -72,6 +93,20 @@ main = defaultTestRunner $ testGroup "import-actions" o = "import " <> T.replicate 80 "F" <> " (Athing, Bthing, ... (3 items))" in ExplicitImports.abbreviateImportTitle i @?= o ] + , testGroup "Title abbreviation without module" + [ testCase "not abbreviated" $ + let i = "import M (" <> T.replicate 70 "F" <> ", Athing, Bthing, Cthing)" + o = "(FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF, Athing, Bthing, Cthing)" + in ExplicitImports.abbreviateImportTitleWithoutModule i @?= o + , testCase "abbreviated that drop module name" $ + let i = "import " <> T.replicate 120 "F" <> " (Athing, Bthing, Cthing)" + o = "(Athing, Bthing, Cthing)" + in ExplicitImports.abbreviateImportTitleWithoutModule i @?= o + , testCase "abbreviated in import list" $ + let i = "import M (Athing, Bthing, " <> T.replicate 100 "F" <> ", Cthing, Dthing, Ething)" + o = "(Athing, Bthing, ... (4 items))" + in ExplicitImports.abbreviateImportTitleWithoutModule i @?= o + ] ]] -- code action tests @@ -84,7 +119,9 @@ codeActionAllGoldenTest fp l c = goldenWithImportActions " code action" fp codeA _ -> liftIO $ assertFailure "Unable to find CodeAction" codeActionBreakFile :: FilePath -> Int -> Int -> TestTree -codeActionBreakFile fp l c = goldenWithImportActions " code action" fp codeActionNoResolveCaps $ \doc -> do +-- If use `codeActionNoResolveCaps` instead of `codeActionNoInlayHintsCaps` here, +-- we will get a puzzling error: https://github.com/haskell/haskell-language-server/pull/4235#issuecomment-2189048997 +codeActionBreakFile fp l c = goldenWithImportActions " code action" fp codeActionNoInlayHintsCaps $ \doc -> do _ <- getCodeLenses doc changeDoc doc [edit] actions <- getCodeActions doc (pointRange l c) @@ -150,18 +187,71 @@ caTitle _ = Nothing -- code lens tests -codeLensGoldenTest :: (CodeLens -> Bool) -> FilePath -> Int -> TestTree -codeLensGoldenTest predicate fp i = goldenWithImportActions " code lens" fp codeActionNoResolveCaps $ \doc -> do +codeLensGoldenTest :: ClientCapabilities -> (CodeLens -> Bool) -> FilePath -> Int -> TestTree +codeLensGoldenTest caps predicate fp i = goldenWithImportActions " code lens" fp caps $ \doc -> do codeLenses <- getCodeLenses doc resolvedCodeLenses <- for codeLenses resolveCodeLens (CodeLens {_command = Just c}) <- pure (filter predicate resolvedCodeLenses !! i) executeCmd c +noCodeLensTest :: ClientCapabilities -> FilePath -> TestTree +noCodeLensTest caps fp = do + testCase (fp ++ " no code lens") $ run $ \_ -> do + doc <- openDoc (fp ++ ".hs") "haskell" + codeLenses <- getCodeLenses doc + resolvedCodeLenses <- for codeLenses resolveCodeLens + unless (null resolvedCodeLenses) $ + liftIO (assertFailure "Unexpected code lens") + where + run = runSessionWithTestConfig def + { testDirLocation = Left testDataDir + , testConfigCaps = caps + , testLspConfig = def + , testPluginDescriptor = explicitImportsPlugin + } + + notRefineImports :: CodeLens -> Bool notRefineImports (CodeLens _ (Just (Command text _ _)) _) | "Refine imports to" `T.isPrefixOf` text = False notRefineImports _ = True +-- inlay hints tests + +inlayHintsTest :: ClientCapabilities -> String -> FilePath -> UInt -> ([InlayHint] -> Assertion) -> TestTree +inlayHintsTest configCaps postfix fp line assert = testCase (fp ++ postfix) $ run $ \_ -> do + doc <- openDoc (fp ++ ".hs") "haskell" + inlayHints <- getInlayHints doc (lineRange line) + liftIO $ assert inlayHints + where + -- zero-based position + lineRange line = Range (Position line 0) (Position line 1000) + run = runSessionWithTestConfig def + { testDirLocation = Left testDataDir + , testPluginDescriptor = explicitImportsPlugin + , testConfigCaps = configCaps + } + +inlayHintsTestWithCap :: FilePath -> UInt -> ([InlayHint] -> Assertion) -> TestTree +inlayHintsTestWithCap = inlayHintsTest fullLatestClientCaps " inlay hints with client caps" + +inlayHintsTestWithoutCap :: FilePath -> UInt -> ([InlayHint] -> Assertion) -> TestTree +inlayHintsTestWithoutCap = inlayHintsTest codeActionNoInlayHintsCaps " inlay hints without client caps" + + +mkInlayHint :: Position -> Text -> TextEdit -> InlayHint +mkInlayHint pos label textEdit = + InlayHint + { _position = pos + , _label = InL label + , _kind = Nothing + , _textEdits = Just [textEdit] + , _tooltip = Just $ InL "Make this import explicit" + , _paddingLeft = Just True + , _paddingRight = Nothing + , _data_ = Nothing + } + -- Execute command and wait for result executeCmd :: Command -> Session () executeCmd cmd = do diff --git a/plugins/hls-explicit-record-fields-plugin/src/Ide/Plugin/ExplicitFields.hs b/plugins/hls-explicit-record-fields-plugin/src/Ide/Plugin/ExplicitFields.hs index a1a2017c8d..2d711979c3 100644 --- a/plugins/hls-explicit-record-fields-plugin/src/Ide/Plugin/ExplicitFields.hs +++ b/plugins/hls-explicit-record-fields-plugin/src/Ide/Plugin/ExplicitFields.hs @@ -1,90 +1,125 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE LambdaCase #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE PatternSynonyms #-} -{-# LANGUAGE TypeFamilies #-} -{-# LANGUAGE ViewPatterns #-} +{-# LANGUAGE CPP #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PatternSynonyms #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE ViewPatterns #-} module Ide.Plugin.ExplicitFields ( descriptor , Log ) where -import Control.Lens ((&), (?~), (^.)) -import Control.Monad.IO.Class (MonadIO (liftIO)) +import Control.Arrow ((&&&)) +import Control.Lens ((&), (?~), (^.)) +import Control.Monad (replicateM) +import Control.Monad.IO.Class (MonadIO (liftIO)) +import Control.Monad.Trans.Class (lift) import Control.Monad.Trans.Maybe -import Data.Aeson (toJSON) -import Data.Generics (GenericQ, everything, - everythingBut, extQ, mkQ) -import qualified Data.IntMap.Strict as IntMap -import qualified Data.Map as Map -import Data.Maybe (fromMaybe, isJust, - maybeToList) -import Data.Text (Text) -import Data.Unique (hashUnique, newUnique) - -import Control.Monad (replicateM) -import Development.IDE (IdeState, Pretty (..), Range, - Recorder (..), Rules, - WithPriority (..), - defineNoDiagnostics, - realSrcSpanToRange, viaShow) +import Data.Aeson (ToJSON (toJSON)) +import Data.Generics (GenericQ, everything, + everythingBut, extQ, mkQ) +import qualified Data.IntMap.Strict as IntMap +import Data.List (find, intersperse) +import qualified Data.Map as Map +import Data.Maybe (fromMaybe, isJust, + mapMaybe, maybeToList) +import Data.Text (Text) +import qualified Data.Text as T +import Data.Unique (hashUnique, newUnique) +import Development.IDE (IdeState, + Location (Location), + Pretty (..), + Range (Range, _end, _start), + Recorder (..), Rules, + WithPriority (..), + defineNoDiagnostics, + getDefinition, hsep, + printName, + realSrcSpanToRange, + shakeExtras, + srcSpanToLocation, + srcSpanToRange, viaShow) import Development.IDE.Core.PluginUtils -import Development.IDE.Core.RuleTypes (TcModuleResult (..), - TypeCheck (..)) -import qualified Development.IDE.Core.Shake as Shake -import Development.IDE.GHC.Compat (HsConDetails (RecCon), - HsExpr (XExpr), - HsRecFields (..), LPat, - Outputable, getLoc, - recDotDot, unLoc) -import Development.IDE.GHC.Compat.Core (Extension (NamedFieldPuns), - GhcPass, - HsExpr (RecordCon, rcon_flds), - HsRecField, LHsExpr, - LocatedA, Name, Pass (..), - Pat (..), RealSrcSpan, - UniqFM, conPatDetails, - emptyUFM, hfbPun, hfbRHS, - hs_valds, lookupUFM, - mapConPatDetail, mapLoc, - pattern RealSrcSpan, - plusUFM_C, unitUFM) -import Development.IDE.GHC.Util (getExtensions, - printOutputable) -import Development.IDE.Graph (RuleResult) -import Development.IDE.Graph.Classes (Hashable, NFData) -import Development.IDE.Spans.Pragmas (NextPragmaInfo (..), - getFirstPragma, - insertNewPragma) -import GHC.Generics (Generic) -import Ide.Logger (Priority (..), cmapWithPrio, - logWith, (<+>)) -import Ide.Plugin.Error (PluginError (PluginInternalError, PluginStaleResolve), - getNormalizedFilePathE, - handleMaybe) -import Ide.Plugin.RangeMap (RangeMap) -import qualified Ide.Plugin.RangeMap as RangeMap -import Ide.Plugin.Resolve (mkCodeActionWithResolveAndCommand) -import Ide.Types (PluginDescriptor (..), - PluginId (..), - PluginMethodHandler, - ResolveFunction, - defaultPluginDescriptor) -import qualified Language.LSP.Protocol.Lens as L -import Language.LSP.Protocol.Message (Method (..)) -import Language.LSP.Protocol.Types (CodeAction (..), - CodeActionKind (CodeActionKind_RefactorRewrite), - CodeActionParams (..), - Command, TextEdit (..), - WorkspaceEdit (WorkspaceEdit), - type (|?) (InL, InR)) +import Development.IDE.Core.PositionMapping (PositionMapping, + toCurrentPosition, + toCurrentRange) +import Development.IDE.Core.RuleTypes (TcModuleResult (..), + TypeCheck (..)) +import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.GHC.Compat (FieldLabel (flSelector), + FieldOcc (FieldOcc), + GenLocated (L), GhcPass, + GhcTc, + HasSrcSpan (getLoc), + HsConDetails (RecCon), + HsExpr (HsApp, HsVar, XExpr), + HsFieldBind (hfbLHS), + HsRecFields (..), + HsWrap (HsWrap), + Identifier, LPat, + Located, + NamedThing (getName), + Outputable, + TcGblEnv (tcg_binds), + Var (varName), + XXExprGhcTc (..), + conLikeFieldLabels, + nameSrcSpan, + pprNameUnqualified, + recDotDot, unLoc) +import Development.IDE.GHC.Compat.Core (Extension (NamedFieldPuns), + HsExpr (RecordCon, rcon_flds), + HsRecField, LHsExpr, + LocatedA, Name, Pat (..), + RealSrcSpan, UniqFM, + conPatDetails, emptyUFM, + hfbPun, hfbRHS, + lookupUFM, + mapConPatDetail, mapLoc, + pattern RealSrcSpan, + plusUFM_C, unitUFM) +import Development.IDE.GHC.Util (getExtensions, + printOutputable) +import Development.IDE.Graph (RuleResult) +import Development.IDE.Graph.Classes (Hashable, NFData) +import Development.IDE.Spans.Pragmas (NextPragmaInfo (..), + getFirstPragma, + insertNewPragma) +import GHC.Generics (Generic) +import Ide.Logger (Priority (..), + cmapWithPrio, logWith, + (<+>)) +import Ide.Plugin.Error (PluginError (PluginInternalError, PluginStaleResolve), + getNormalizedFilePathE, + handleMaybe) +import Ide.Plugin.RangeMap (RangeMap) +import qualified Ide.Plugin.RangeMap as RangeMap +import Ide.Plugin.Resolve (mkCodeActionWithResolveAndCommand) +import Ide.Types (PluginDescriptor (..), + PluginId (..), + PluginMethodHandler, + ResolveFunction, + defaultPluginDescriptor, + mkPluginHandler) +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message (Method (..), + SMethod (SMethod_TextDocumentInlayHint)) +import Language.LSP.Protocol.Types (CodeAction (..), + CodeActionKind (CodeActionKind_RefactorRewrite), + CodeActionParams (CodeActionParams), + Command, InlayHint (..), + InlayHintLabelPart (InlayHintLabelPart), + InlayHintParams (InlayHintParams, _range, _textDocument), + TextDocumentIdentifier (TextDocumentIdentifier), + TextEdit (TextEdit), + WorkspaceEdit (WorkspaceEdit), + type (|?) (InL, InR)) #if __GLASGOW_HASKELL__ < 910 -import Development.IDE.GHC.Compat (HsExpansion (HsExpanded)) -#else -import Development.IDE.GHC.Compat (XXExprGhcRn (..)) +import Development.IDE.GHC.Compat (HsExpansion (HsExpanded)) #endif data Log @@ -105,8 +140,10 @@ descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeSta descriptor recorder plId = let resolveRecorder = cmapWithPrio LogResolve recorder (carCommands, caHandlers) = mkCodeActionWithResolveAndCommand resolveRecorder plId codeActionProvider codeActionResolveProvider + ihDotdotHandler = mkPluginHandler SMethod_TextDocumentInlayHint (inlayHintDotdotProvider recorder) + ihPosRecHandler = mkPluginHandler SMethod_TextDocumentInlayHint (inlayHintPosRecProvider recorder) in (defaultPluginDescriptor plId "Provides a code action to make record wildcards explicit") - { pluginHandlers = caHandlers + { pluginHandlers = caHandlers <> ihDotdotHandler <> ihPosRecHandler , pluginCommands = carCommands , pluginRules = collectRecordsRule recorder *> collectNamesRule } @@ -120,12 +157,9 @@ codeActionProvider ideState _ (CodeActionParams _ _ docId range _) = do let actions = map (mkCodeAction enabledExtensions) (RangeMap.filterByRange range crCodeActions) pure $ InL actions where - mkCodeAction :: [Extension] -> Int -> Command |? CodeAction - mkCodeAction exts uid = InR CodeAction - { _title = "Expand record wildcard" - <> if NamedFieldPuns `elem` exts - then mempty - else " (needs extension: NamedFieldPuns)" + mkCodeAction :: [Extension] -> Int -> Command |? CodeAction + mkCodeAction exts uid = InR CodeAction + { _title = mkTitle exts -- TODO: `Expand positional record` without NamedFieldPuns if RecordInfoApp , _kind = Just CodeActionKind_RefactorRewrite , _diagnostics = Nothing , _isPreferred = Nothing @@ -144,17 +178,117 @@ codeActionResolveProvider ideState pId ca uri uid = do -- that this resolve is stale. record <- handleMaybe PluginStaleResolve $ IntMap.lookup uid crCodeActionResolve -- We should never fail to render - rendered <- handleMaybe (PluginInternalError "Failed to render") $ renderRecordInfo nameMap record - let edits = [rendered] - <> maybeToList (pragmaEdit enabledExtensions pragma) + rendered <- handleMaybe (PluginInternalError "Failed to render") $ renderRecordInfoAsTextEdit nameMap record + let shouldInsertNamedFieldPuns (RecordInfoApp _ _) = False + shouldInsertNamedFieldPuns _ = True + whenMaybe True x = x + whenMaybe False _ = Nothing + edits = [rendered] + <> maybeToList (whenMaybe (shouldInsertNamedFieldPuns record) (pragmaEdit enabledExtensions pragma)) pure $ ca & L.edit ?~ mkWorkspaceEdit edits where mkWorkspaceEdit ::[TextEdit] -> WorkspaceEdit mkWorkspaceEdit edits = WorkspaceEdit (Just $ Map.singleton uri edits) Nothing Nothing - pragmaEdit :: [Extension] -> NextPragmaInfo -> Maybe TextEdit - pragmaEdit exts pragma = if NamedFieldPuns `elem` exts - then Nothing - else Just $ insertNewPragma pragma NamedFieldPuns + +inlayHintDotdotProvider :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState 'Method_TextDocumentInlayHint +inlayHintDotdotProvider _ state pId InlayHintParams {_textDocument = TextDocumentIdentifier uri, _range = visibleRange} = do + nfp <- getNormalizedFilePathE uri + pragma <- getFirstPragma pId state nfp + runIdeActionE "ExplicitFields.CollectRecords" (shakeExtras state) $ do + (crr@CRR {crCodeActions, crCodeActionResolve}, pm) <- useWithStaleFastE CollectRecords nfp + let -- Get all records with dotdot in current nfp + records = [ record + | Just range <- [toCurrentRange pm visibleRange] + , uid <- RangeMap.elementsInRange range crCodeActions + , Just record <- [IntMap.lookup uid crCodeActionResolve] ] + -- Get the definition of each dotdot of record + locations = [ fmap (,record) (getDefinition nfp pos) + | record <- records + , pos <- maybeToList $ fmap _start $ recordInfoToDotDotRange record ] + defnLocsList <- lift $ sequence locations + pure $ InL $ mapMaybe (mkInlayHint crr pragma pm) defnLocsList + where + mkInlayHint :: CollectRecordsResult -> NextPragmaInfo -> PositionMapping -> (Maybe [(Location, Identifier)], RecordInfo) -> Maybe InlayHint + mkInlayHint CRR {enabledExtensions, nameMap} pragma pm (defnLocs, record) = + let range = recordInfoToDotDotRange record + textEdits = maybeToList (renderRecordInfoAsTextEdit nameMap record) + <> maybeToList (pragmaEdit enabledExtensions pragma) + names = renderRecordInfoAsDotdotLabelName record + in do + currentEnd <- range >>= toCurrentPosition pm . _end + names' <- names + defnLocs' <- defnLocs + let excludeDotDot (Location _ (Range _ end)) = end /= currentEnd + -- find location from dotdot definitions that name equal to label name + findLocation name locations = + let -- filter locations not within dotdot range + filteredLocations = filter (excludeDotDot . fst) locations + -- checks if 'a' is equal to 'Name' if the 'Either' is 'Right a', otherwise return 'False' + nameEq = either (const False) ((==) name) + in fmap fst $ find (nameEq . snd) filteredLocations + valueWithLoc = [ (T.pack $ printName name, findLocation name defnLocs') | name <- names' ] + -- use `, ` to separate labels with definition location + label = intersperse (mkInlayHintLabelPart (", ", Nothing)) $ fmap mkInlayHintLabelPart valueWithLoc + pure $ InlayHint { _position = currentEnd -- at the end of dotdot + , _label = InR label + , _kind = Nothing -- neither a type nor a parameter + , _textEdits = Just textEdits -- same as CodeAction + , _tooltip = Just $ InL (mkTitle enabledExtensions) -- same as CodeAction + , _paddingLeft = Just True -- padding after dotdot + , _paddingRight = Nothing + , _data_ = Nothing + } + mkInlayHintLabelPart (value, loc) = InlayHintLabelPart value Nothing loc Nothing + + +inlayHintPosRecProvider :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState 'Method_TextDocumentInlayHint +inlayHintPosRecProvider _ state _pId InlayHintParams {_textDocument = TextDocumentIdentifier uri, _range = visibleRange} = do + nfp <- getNormalizedFilePathE uri + runIdeActionE "ExplicitFields.CollectRecords" (shakeExtras state) $ do + (CRR {crCodeActions, nameMap, crCodeActionResolve}, pm) <- useWithStaleFastE CollectRecords nfp + let records = [ record + | Just range <- [toCurrentRange pm visibleRange] + , uid <- RangeMap.elementsInRange range crCodeActions + , Just record <- [IntMap.lookup uid crCodeActionResolve] ] + pure $ InL (concatMap (mkInlayHints nameMap pm) records) + where + mkInlayHints :: UniqFM Name [Name] -> PositionMapping -> RecordInfo -> [InlayHint] + mkInlayHints nameMap pm record@(RecordInfoApp _ (RecordAppExpr _ fla)) = + let textEdits = renderRecordInfoAsTextEdit nameMap record + in mapMaybe (mkInlayHint textEdits pm) fla + mkInlayHints _ _ _ = [] + + mkInlayHint :: Maybe TextEdit -> PositionMapping -> (Located FieldLabel, HsExpr GhcTc) -> Maybe InlayHint + mkInlayHint te pm (label, _) = + let (name, loc) = ((flSelector . unLoc) &&& (srcSpanToLocation . getLoc)) label + fieldDefLoc = srcSpanToLocation (nameSrcSpan name) + in do + (Location _ recRange) <- loc + currentStart <- toCurrentPosition pm (_start recRange) + pure InlayHint { _position = currentStart + , _label = InR $ pure (mkInlayHintLabelPart name fieldDefLoc) + , _kind = Nothing -- neither a type nor a parameter + , _textEdits = Just (maybeToList te) -- same as CodeAction + , _tooltip = Just $ InL "Expand positional record" -- same as CodeAction + , _paddingLeft = Nothing + , _paddingRight = Nothing + , _data_ = Nothing + } + + mkInlayHintLabelPart name loc = InlayHintLabelPart (printOutputable (pprNameUnqualified name) <> "=") Nothing loc Nothing + +mkTitle :: [Extension] -> Text +mkTitle exts = "Expand record wildcard" + <> if NamedFieldPuns `elem` exts + then mempty + else " (needs extension: NamedFieldPuns)" + + +pragmaEdit :: [Extension] -> NextPragmaInfo -> Maybe TextEdit +pragmaEdit exts pragma = if NamedFieldPuns `elem` exts + then Nothing + else Just $ insertNewPragma pragma NamedFieldPuns + collectRecordsRule :: Recorder (WithPriority Log) -> Rules () collectRecordsRule recorder = @@ -176,15 +310,11 @@ collectRecordsRule recorder = pure CRR {crCodeActions, crCodeActionResolve, nameMap, enabledExtensions} where getEnabledExtensions :: TcModuleResult -> [Extension] - getEnabledExtensions = getExtensions . tmrParsed + getEnabledExtensions = getExtensions . tmrParsed toRangeAndUnique (uid, recordInfo) = (recordInfoToRange recordInfo, uid) getRecords :: TcModuleResult -> [RecordInfo] -#if __GLASGOW_HASKELL__ < 910 -getRecords (tmrRenamed -> (hs_valds -> valBinds,_,_,_)) = collectRecords valBinds -#else -getRecords (tmrRenamed -> (hs_valds -> valBinds,_,_,_, _)) = collectRecords valBinds -#endif +getRecords (tcg_binds . tmrTypechecked -> valBinds) = collectRecords valBinds collectNamesRule :: Rules () collectNamesRule = defineNoDiagnostics mempty $ \CollectNames nfp -> runMaybeT $ do @@ -226,6 +356,7 @@ data CollectRecordsResult = CRR instance NFData CollectRecordsResult instance NFData RecordInfo +instance NFData RecordAppExpr instance Show CollectRecordsResult where show _ = "" @@ -248,22 +379,41 @@ instance Show CollectNamesResult where type instance RuleResult CollectNames = CollectNamesResult +data RecordAppExpr = RecordAppExpr (LHsExpr GhcTc) [(Located FieldLabel, HsExpr GhcTc)] + deriving (Generic) + data RecordInfo - = RecordInfoPat RealSrcSpan (Pat (GhcPass 'Renamed)) - | RecordInfoCon RealSrcSpan (HsExpr (GhcPass 'Renamed)) + = RecordInfoPat RealSrcSpan (Pat GhcTc) + | RecordInfoCon RealSrcSpan (HsExpr GhcTc) + | RecordInfoApp RealSrcSpan RecordAppExpr deriving (Generic) instance Pretty RecordInfo where pretty (RecordInfoPat ss p) = pretty (printOutputable ss) <> ":" <+> pretty (printOutputable p) pretty (RecordInfoCon ss e) = pretty (printOutputable ss) <> ":" <+> pretty (printOutputable e) + pretty (RecordInfoApp ss (RecordAppExpr _ fla)) + = pretty (printOutputable ss) <> ":" <+> hsep (map (pretty . printOutputable) fla) recordInfoToRange :: RecordInfo -> Range recordInfoToRange (RecordInfoPat ss _) = realSrcSpanToRange ss recordInfoToRange (RecordInfoCon ss _) = realSrcSpanToRange ss +recordInfoToRange (RecordInfoApp ss _) = realSrcSpanToRange ss + +recordInfoToDotDotRange :: RecordInfo -> Maybe Range +recordInfoToDotDotRange (RecordInfoPat _ (ConPat _ _ (RecCon flds))) = srcSpanToRange . getLoc =<< rec_dotdot flds +recordInfoToDotDotRange (RecordInfoCon _ (RecordCon _ _ flds)) = srcSpanToRange . getLoc =<< rec_dotdot flds +recordInfoToDotDotRange _ = Nothing + +renderRecordInfoAsTextEdit :: UniqFM Name [Name] -> RecordInfo -> Maybe TextEdit +renderRecordInfoAsTextEdit names (RecordInfoPat ss pat) = TextEdit (realSrcSpanToRange ss) <$> showRecordPat names pat +renderRecordInfoAsTextEdit _ (RecordInfoCon ss expr) = TextEdit (realSrcSpanToRange ss) <$> showRecordCon expr +renderRecordInfoAsTextEdit _ (RecordInfoApp ss appExpr) = TextEdit (realSrcSpanToRange ss) <$> showRecordApp appExpr + +renderRecordInfoAsDotdotLabelName :: RecordInfo -> Maybe [Name] +renderRecordInfoAsDotdotLabelName (RecordInfoPat _ pat) = showRecordPatFlds pat +renderRecordInfoAsDotdotLabelName (RecordInfoCon _ expr) = showRecordConFlds expr +renderRecordInfoAsDotdotLabelName _ = Nothing -renderRecordInfo :: UniqFM Name [Name] -> RecordInfo -> Maybe TextEdit -renderRecordInfo names (RecordInfoPat ss pat) = TextEdit (realSrcSpanToRange ss) <$> showRecordPat names pat -renderRecordInfo _ (RecordInfoCon ss expr) = TextEdit (realSrcSpanToRange ss) <$> showRecordCon expr -- | Checks if a 'Name' is referenced in the given map of names. The -- 'hasNonBindingOcc' check is necessary in order to make sure that only the @@ -281,16 +431,16 @@ referencedIn name names = maybe True hasNonBindingOcc $ lookupUFM names name filterReferenced :: (a -> Maybe Name) -> UniqFM Name [Name] -> [a] -> [a] filterReferenced getName names = filter (\x -> maybe True (`referencedIn` names) (getName x)) + preprocessRecordPat - :: p ~ GhcPass 'Renamed + :: p ~ GhcTc => UniqFM Name [Name] -> HsRecFields p (LPat p) -> HsRecFields p (LPat p) -preprocessRecordPat = preprocessRecord (getFieldName . unLoc) - where - getFieldName x = case unLoc (hfbRHS x) of - VarPat _ x' -> Just $ unLoc x' - _ -> Nothing +preprocessRecordPat = preprocessRecord (fmap varName . getFieldName . unLoc) + where getFieldName x = case unLoc (hfbRHS x) of + VarPat _ x' -> Just $ unLoc x' + _ -> Nothing -- No need to check the name usage in the record construction case preprocessRecordCon :: HsRecFields (GhcPass c) arg -> HsRecFields (GhcPass c) arg @@ -333,17 +483,65 @@ preprocessRecord getName names flds = flds { rec_dotdot = Nothing , rec_flds = r punsUsed = filterReferenced getName names puns' rec_flds' = no_puns <> punsUsed -showRecordPat :: Outputable (Pat (GhcPass 'Renamed)) => UniqFM Name [Name] -> Pat (GhcPass 'Renamed) -> Maybe Text +processRecordFlds + :: p ~ GhcPass c + => HsRecFields p arg + -> HsRecFields p arg +processRecordFlds flds = flds { rec_dotdot = Nothing , rec_flds = puns' } + where + no_pun_count = fromMaybe (length (rec_flds flds)) (recDotDot flds) + -- Field binds of the explicit form (e.g. `{ a = a' }`) should be drop + puns = drop no_pun_count (rec_flds flds) + -- `hsRecPun` is set to `True` in order to pretty-print the fields as field + -- puns (since there is similar mechanism in the `Outputable` instance as + -- explained above). + puns' = map (mapLoc (\fld -> fld { hfbPun = True })) puns + + +showRecordPat :: Outputable (Pat GhcTc) => UniqFM Name [Name] -> Pat GhcTc -> Maybe Text showRecordPat names = fmap printOutputable . mapConPatDetail (\case RecCon flds -> Just $ RecCon (preprocessRecordPat names flds) _ -> Nothing) +showRecordPatFlds :: Pat GhcTc -> Maybe [Name] +showRecordPatFlds (ConPat _ _ args) = do + fields <- processRecCon args + names <- mapM getFieldName (rec_flds fields) + pure names + where + processRecCon (RecCon flds) = Just $ processRecordFlds flds + processRecCon _ = Nothing +#if __GLASGOW_HASKELL__ < 911 + getOccName (FieldOcc x _) = Just $ getName x +#else + getOccName (FieldOcc _ x) = Just $ getName (unLoc x) +#endif + getOccName _ = Nothing + getFieldName = getOccName . unLoc . hfbLHS . unLoc +showRecordPatFlds _ = Nothing + showRecordCon :: Outputable (HsExpr (GhcPass c)) => HsExpr (GhcPass c) -> Maybe Text showRecordCon expr@(RecordCon _ _ flds) = Just $ printOutputable $ expr { rcon_flds = preprocessRecordCon flds } showRecordCon _ = Nothing +showRecordConFlds :: p ~ GhcTc => HsExpr p -> Maybe [Name] +showRecordConFlds (RecordCon _ _ flds) = + mapM getFieldName (rec_flds $ processRecordFlds flds) + where + getVarName (HsVar _ lidp) = Just $ getName lidp + getVarName _ = Nothing + getFieldName = getVarName . unLoc . hfbRHS . unLoc +showRecordConFlds _ = Nothing + +showRecordApp :: RecordAppExpr -> Maybe Text +showRecordApp (RecordAppExpr recConstr fla) + = Just $ printOutputable recConstr <> " { " + <> T.intercalate ", " (showFieldWithArg <$> fla) + <> " }" + where showFieldWithArg (field, arg) = printOutputable field <> " = " <> printOutputable arg + collectRecords :: GenericQ [RecordInfo] collectRecords = everythingBut (<>) (([], False) `mkQ` getRecPatterns `extQ` getRecCons) @@ -360,7 +558,7 @@ collectRecords = everythingBut (<>) (([], False) `mkQ` getRecPatterns `extQ` get collectNames :: GenericQ (UniqFM Name [Name]) collectNames = everything (plusUFM_C (<>)) (emptyUFM `mkQ` (\x -> unitUFM x [x])) -getRecCons :: LHsExpr (GhcPass 'Renamed) -> ([RecordInfo], Bool) +getRecCons :: LHsExpr GhcTc -> ([RecordInfo], Bool) -- When we stumble upon an occurrence of HsExpanded, we only want to follow a -- single branch. We do this here, by explicitly returning occurrences from -- traversing the original branch, and returning True, which keeps syb from @@ -369,25 +567,50 @@ getRecCons :: LHsExpr (GhcPass 'Renamed) -> ([RecordInfo], Bool) -- branch #if __GLASGOW_HASKELL__ >= 910 -getRecCons (unLoc -> XExpr (ExpandedThingRn a _)) = (collectRecords a, True) +getRecCons (unLoc -> XExpr (ExpandedThingTc a _)) = (collectRecords a, False) #else -getRecCons (unLoc -> XExpr (HsExpanded a _)) = (collectRecords a, True) +getRecCons (unLoc -> XExpr (ExpansionExpr (HsExpanded _ a))) = (collectRecords a, True) #endif getRecCons e@(unLoc -> RecordCon _ _ flds) | isJust (rec_dotdot flds) = (mkRecInfo e, False) where - mkRecInfo :: LHsExpr (GhcPass 'Renamed) -> [RecordInfo] + mkRecInfo :: LHsExpr GhcTc -> [RecordInfo] mkRecInfo expr = [ RecordInfoCon realSpan' (unLoc expr) | RealSrcSpan realSpan' _ <- [ getLoc expr ]] +getRecCons expr@(unLoc -> app@(HsApp _ _ _)) = + let fieldss = maybeToList $ getFields app [] + recInfo = concatMap mkRecInfo fieldss + in (recInfo, not (null recInfo)) + where + mkRecInfo :: RecordAppExpr -> [RecordInfo] + mkRecInfo appExpr = + [ RecordInfoApp realSpan' appExpr | RealSrcSpan realSpan' _ <- [ getLoc expr ] ] + + getFields :: HsExpr GhcTc -> [LHsExpr GhcTc] -> Maybe RecordAppExpr + getFields (HsApp _ constr@(unLoc -> expr) arg) args + | not (null fls) + = Just (RecordAppExpr constr labelWithArgs) + where fls = getExprFields expr + labelWithArgs = zipWith mkLabelWithArg fls (arg : args) + mkLabelWithArg label arg = (L (getLoc arg) label, unLoc arg) + getFields (HsApp _ constr arg) args = getFields (unLoc constr) (arg : args) + getFields _ _ = Nothing + + getExprFields :: HsExpr GhcTc -> [FieldLabel] + getExprFields (XExpr (ConLikeTc (conLikeFieldLabels -> fls) _ _)) = fls +#if __GLASGOW_HASKELL__ >= 911 + getExprFields (XExpr (WrapExpr _ expr)) = getExprFields expr +#else + getExprFields (XExpr (WrapExpr (HsWrap _ expr))) = getExprFields expr +#endif + getExprFields _ = [] getRecCons _ = ([], False) -getRecPatterns :: LPat (GhcPass 'Renamed) -> ([RecordInfo], Bool) +getRecPatterns :: LPat GhcTc -> ([RecordInfo], Bool) getRecPatterns conPat@(conPatDetails . unLoc -> Just (RecCon flds)) | isJust (rec_dotdot flds) = (mkRecInfo conPat, False) where - mkRecInfo :: LPat (GhcPass 'Renamed) -> [RecordInfo] + mkRecInfo :: LPat GhcTc -> [RecordInfo] mkRecInfo pat = [ RecordInfoPat realSpan' (unLoc pat) | RealSrcSpan realSpan' _ <- [ getLoc pat ]] getRecPatterns _ = ([], False) - - diff --git a/plugins/hls-explicit-record-fields-plugin/test/Main.hs b/plugins/hls-explicit-record-fields-plugin/test/Main.hs index f8e53e44a1..1a4fa5d2ba 100644 --- a/plugins/hls-explicit-record-fields-plugin/test/Main.hs +++ b/plugins/hls-explicit-record-fields-plugin/test/Main.hs @@ -4,7 +4,11 @@ module Main ( main ) where import Data.Either (rights) +import Data.Text (Text) import qualified Data.Text as T +import Development.IDE (filePathToUri', + toNormalizedFilePath') +import Development.IDE.Test (canonicalizeUri) import qualified Ide.Plugin.ExplicitFields as ExplicitFields import System.FilePath ((<.>), ()) import Test.Hls @@ -17,21 +21,236 @@ plugin = mkPluginTestDescriptor ExplicitFields.descriptor "explicit-fields" test :: TestTree test = testGroup "explicit-fields" - [ mkTest "WildcardOnly" "WildcardOnly" 12 10 12 20 - , mkTest "Unused" "Unused" 12 10 12 20 - , mkTest "Unused2" "Unused2" 12 10 12 20 - , mkTest "WithPun" "WithPun" 13 10 13 25 - , mkTest "WithExplicitBind" "WithExplicitBind" 12 10 12 32 - , mkTest "Mixed" "Mixed" 14 10 14 37 - , mkTest "Construction" "Construction" 16 5 16 15 - , mkTest "HsExpanded1" "HsExpanded1" 17 10 17 20 - , mkTest "HsExpanded2" "HsExpanded2" 23 10 23 22 - , mkTestNoAction "ExplicitBinds" "ExplicitBinds" 11 10 11 52 - , mkTestNoAction "Puns" "Puns" 12 10 12 31 - , mkTestNoAction "Infix" "Infix" 11 11 11 31 - , mkTestNoAction "Prefix" "Prefix" 10 11 10 28 + [ testGroup "code actions" + [ mkTest "WildcardOnly" "WildcardOnly" 12 10 12 20 + , mkTest "Unused" "Unused" 12 10 12 20 + , mkTest "Unused2" "Unused2" 12 10 12 20 + , mkTest "WithPun" "WithPun" 13 10 13 25 + , mkTest "WithExplicitBind" "WithExplicitBind" 12 10 12 32 + , mkTest "Mixed" "Mixed" 14 10 14 37 + , mkTest "Construction" "Construction" 16 5 16 15 + , mkTest "PositionalConstruction" "PositionalConstruction" 15 5 15 15 + , mkTest "HsExpanded1" "HsExpanded1" 17 10 17 20 + , mkTest "HsExpanded2" "HsExpanded2" 23 10 23 22 + , mkTestNoAction "ExplicitBinds" "ExplicitBinds" 11 10 11 52 + , mkTestNoAction "Puns" "Puns" 12 10 12 31 + , mkTestNoAction "Infix" "Infix" 11 11 11 31 + , mkTestNoAction "Prefix" "Prefix" 10 11 10 28 + , mkTest "PolymorphicRecordConstruction" "PolymorphicRecordConstruction" 15 5 15 15 + ] + , testGroup "inlay hints" + [ mkInlayHintsTest "Construction" Nothing 16 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLength "Construction" + foo <- mkLabelPart' 13 6 "foo" + bar <- mkLabelPart' 14 6 "bar" + baz <- mkLabelPart' 15 6 "baz" + (@?=) ih + [defInlayHint { _position = Position 16 14 + , _label = InR [ foo, commaPart + , bar, commaPart + , baz + ] + , _textEdits = Just [ mkLineTextEdit "MyRec {foo, bar, baz}" 16 5 15 + , mkPragmaTextEdit 2 + ] + , _tooltip = Just $ InL "Expand record wildcard (needs extension: NamedFieldPuns)" + , _paddingLeft = Just True + }] + , mkInlayHintsTest "PositionalConstruction" Nothing 15 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLengthSub1 "PositionalConstruction" + foo <- mkLabelPart' 5 4 "foo=" + bar <- mkLabelPart' 6 4 "bar=" + baz <- mkLabelPart' 7 4 "baz=" + (@?=) ih + [ defInlayHint { _position = Position 15 11 + , _label = InR [ foo ] + , _textEdits = Just [ mkLineTextEdit "MyRec { foo = a, bar = b, baz = c }" 15 5 16 ] + , _tooltip = Just $ InL "Expand positional record" + , _paddingLeft = Nothing + } + , defInlayHint { _position = Position 15 13 + , _label = InR [ bar ] + , _textEdits = Just [ mkLineTextEdit "MyRec { foo = a, bar = b, baz = c }" 15 5 16 ] + , _tooltip = Just $ InL "Expand positional record" + , _paddingLeft = Nothing + } + , defInlayHint { _position = Position 15 15 + , _label = InR [ baz ] + , _textEdits = Just [ mkLineTextEdit "MyRec { foo = a, bar = b, baz = c }" 15 5 16 ] + , _tooltip = Just $ InL "Expand positional record" + , _paddingLeft = Nothing + } + ] + , mkInlayHintsTest "HsExpanded1" Nothing 17 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLength "HsExpanded1" + foo <- mkLabelPart' 11 4 "foo" + (@?=) ih + [defInlayHint { _position = Position 17 19 + , _label = InR [ foo ] + , _textEdits = Just [ mkLineTextEdit "MyRec {foo}" 17 10 20 ] + , _tooltip = Just $ InL "Expand record wildcard" + , _paddingLeft = Just True + }] + , mkInlayHintsTest "HsExpanded1" (Just " (positional)") 13 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLengthSub1 "HsExpanded1" + foo <- mkLabelPart' 11 4 "foo=" + (@?=) ih + [defInlayHint { _position = Position 13 21 + , _label = InR [ foo ] + , _textEdits = Just [ mkLineTextEdit "MyRec { foo = 5 }" 13 15 22 ] + , _tooltip = Just $ InL "Expand positional record" + , _paddingLeft = Nothing + }] + , mkInlayHintsTest "HsExpanded2" Nothing 23 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLength "HsExpanded2" + bar <- mkLabelPart' 14 4 "bar" + (@?=) ih + [defInlayHint { _position = Position 23 21 + , _label = InR [ bar ] + , _textEdits = Just [ mkLineTextEdit "YourRec {bar}" 23 10 22 ] + , _tooltip = Just $ InL "Expand record wildcard" + , _paddingLeft = Just True + }] + , mkInlayHintsTest "HsExpanded2" (Just " (positional)") 16 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLengthSub1 "HsExpanded2" + foo <- mkLabelPart' 11 4 "foo=" + (@?=) ih + [defInlayHint { _position = Position 16 21 + , _label = InR [ foo ] + , _textEdits = Just [ mkLineTextEdit "MyRec { foo = 5 }" 16 15 22 ] + , _tooltip = Just $ InL "Expand positional record" + , _paddingLeft = Nothing + }] + , mkInlayHintsTest "Mixed" Nothing 14 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLength "Mixed" + baz <- mkLabelPart' 9 4 "baz" + quux <- mkLabelPart' 10 4 "quux" + (@?=) ih + [defInlayHint { _position = Position 14 36 + , _label = InR [ baz, commaPart + , quux + ] + , _textEdits = Just [ mkLineTextEdit "MyRec {foo, bar = bar', baz}" 14 10 37 ] + , _tooltip = Just $ InL "Expand record wildcard" + , _paddingLeft = Just True + }] + , mkInlayHintsTest "Unused" Nothing 12 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLength "Unused" + foo <- mkLabelPart' 6 4 "foo" + bar <- mkLabelPart' 7 4 "bar" + baz <- mkLabelPart' 8 4 "baz" + (@?=) ih + [defInlayHint { _position = Position 12 19 + , _label = InR [ foo, commaPart + , bar, commaPart + , baz + ] + , _textEdits = Just [ mkLineTextEdit "MyRec {foo, bar}" 12 10 20 + , mkPragmaTextEdit 2 + ] + , _tooltip = Just $ InL "Expand record wildcard (needs extension: NamedFieldPuns)" + , _paddingLeft = Just True + }] + , mkInlayHintsTest "Unused2" Nothing 12 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLength "Unused2" + foo <- mkLabelPart' 6 4 "foo" + bar <- mkLabelPart' 7 4 "bar" + baz <- mkLabelPart' 8 4 "baz" + (@?=) ih + [defInlayHint { _position = Position 12 19 + , _label = InR [ foo, commaPart + , bar, commaPart + , baz + ] + , _textEdits = Just [ mkLineTextEdit "MyRec {foo, bar}" 12 10 20 + , mkPragmaTextEdit 2 + ] + , _tooltip = Just $ InL "Expand record wildcard (needs extension: NamedFieldPuns)" + , _paddingLeft = Just True + }] + , mkInlayHintsTest "WildcardOnly" Nothing 12 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLength "WildcardOnly" + foo <- mkLabelPart' 6 4 "foo" + bar <- mkLabelPart' 7 4 "bar" + baz <- mkLabelPart' 8 4 "baz" + (@?=) ih + [defInlayHint { _position = Position 12 19 + , _label = InR [ foo, commaPart + , bar, commaPart + , baz + ] + , _textEdits = Just [ mkLineTextEdit "MyRec {foo, bar, baz}" 12 10 20 + , mkPragmaTextEdit 2 + ] + , _tooltip = Just $ InL "Expand record wildcard (needs extension: NamedFieldPuns)" + , _paddingLeft = Just True + }] + , mkInlayHintsTest "WithExplicitBind" Nothing 12 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLength "WithExplicitBind" + bar <- mkLabelPart' 7 4 "bar" + baz <- mkLabelPart' 8 4 "baz" + (@?=) ih + [defInlayHint { _position = Position 12 31 + , _label = InR [ bar, commaPart + , baz + ] + , _textEdits = Just [ mkLineTextEdit "MyRec {foo = foo', bar, baz}" 12 10 32 + , mkPragmaTextEdit 2 + ] + , _tooltip = Just $ InL "Expand record wildcard (needs extension: NamedFieldPuns)" + , _paddingLeft = Just True + }] + , mkInlayHintsTest "WithPun" Nothing 13 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLength "WithPun" + bar <- mkLabelPart' 8 4 "bar" + baz <- mkLabelPart' 9 4 "baz" + (@?=) ih + [defInlayHint { _position = Position 13 24 + , _label = InR [ bar, commaPart + , baz + ] + , _textEdits = Just [ mkLineTextEdit "MyRec {foo, bar, baz}" 13 10 25 ] + , _tooltip = Just $ InL "Expand record wildcard" + , _paddingLeft = Just True + }] + , mkInlayHintsTest "PolymorphicRecordConstruction" Nothing 15 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLengthSub1 "PolymorphicRecordConstruction" + foo <- mkLabelPart' 5 4 "foo=" + bar <- mkLabelPart' 6 4 "bar=" + baz <- mkLabelPart' 7 4 "baz=" + (@?=) ih + [ defInlayHint { _position = Position 15 11 + , _label = InR [ foo ] + , _textEdits = Just [ mkLineTextEdit "MyRec { foo = a, bar = b, baz = c }" 15 5 16 ] + , _tooltip = Just $ InL "Expand positional record" + , _paddingLeft = Nothing + } + , defInlayHint { _position = Position 15 13 + , _label = InR [ bar ] + , _textEdits = Just [ mkLineTextEdit "MyRec { foo = a, bar = b, baz = c }" 15 5 16 ] + , _tooltip = Just $ InL "Expand positional record" + , _paddingLeft = Nothing + } + , defInlayHint { _position = Position 15 15 + , _label = InR [ baz ] + , _textEdits = Just [ mkLineTextEdit "MyRec { foo = a, bar = b, baz = c }" 15 5 16 ] + , _tooltip = Just $ InL "Expand positional record" + , _paddingLeft = Nothing + } + ] + ] ] +mkInlayHintsTest :: FilePath -> Maybe TestName -> UInt -> ([InlayHint] -> Assertion) -> TestTree +mkInlayHintsTest fp postfix line assert = + testCase (fp ++ concat postfix) $ + runSessionWithServer def plugin testDataDir $ do + doc <- openDoc (fp ++ ".hs") "haskell" + inlayHints <- getInlayHints doc (lineRange line) + liftIO $ assert inlayHints + where + lineRange line = Range (Position line 0) (Position line 1000) + mkTestNoAction :: TestName -> FilePath -> UInt -> UInt -> UInt -> UInt -> TestTree mkTestNoAction title fp x1 y1 x2 y2 = testCase title $ @@ -66,5 +285,60 @@ isExplicitFieldsCodeAction :: CodeAction -> Bool isExplicitFieldsCodeAction CodeAction {_title} = "Expand record wildcard" `T.isPrefixOf` _title +defInlayHint :: InlayHint +defInlayHint = + InlayHint + { _position = Position 0 0 + , _label = InR [] + , _kind = Nothing + , _textEdits = Nothing + , _tooltip = Nothing + , _paddingLeft = Nothing + , _paddingRight = Nothing + , _data_ = Nothing + } + +mkLabelPart :: (Text -> UInt) -> FilePath -> UInt -> UInt -> Text -> IO InlayHintLabelPart +mkLabelPart offset fp line start value = do + uri' <- uri + pure $ InlayHintLabelPart { _location = Just (location uri' line start) + , _value = value + , _tooltip = Nothing + , _command = Nothing + } + where + toUri = fromNormalizedUri . filePathToUri' . toNormalizedFilePath' + uri = canonicalizeUri $ toUri (testDataDir (fp ++ ".hs")) + location uri line char = Location uri (Range (Position line char) (Position line (char + offset value))) + +mkLabelPartOffsetLength :: FilePath -> UInt -> UInt -> Text -> IO InlayHintLabelPart +mkLabelPartOffsetLength = mkLabelPart (fromIntegral . T.length) + +mkLabelPartOffsetLengthSub1 :: FilePath -> UInt -> UInt -> Text -> IO InlayHintLabelPart +mkLabelPartOffsetLengthSub1 = mkLabelPart (fromIntegral . subtract 1 . T.length) + +commaPart :: InlayHintLabelPart +commaPart = + InlayHintLabelPart + { _location = Nothing + , _value = ", " + , _tooltip = Nothing + , _command = Nothing + } + +mkLineTextEdit :: Text -> UInt -> UInt -> UInt -> TextEdit +mkLineTextEdit newText line x y = + TextEdit + { _range = Range (Position line x) (Position line y) + , _newText = newText + } + +mkPragmaTextEdit :: UInt -> TextEdit +mkPragmaTextEdit line = + TextEdit + { _range = Range (Position line 0) (Position line 0) + , _newText = "{-# LANGUAGE NamedFieldPuns #-}\n" + } + testDataDir :: FilePath testDataDir = "plugins" "hls-explicit-record-fields-plugin" "test" "testdata" diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/PolymorphicRecordConstruction.expected.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/PolymorphicRecordConstruction.expected.hs new file mode 100644 index 0000000000..f289508524 --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/PolymorphicRecordConstruction.expected.hs @@ -0,0 +1,16 @@ +{-# LANGUAGE Haskell2010 #-} + +module PolymorphicRecordConstruction where + +data MyRec m = MyRec + { foo :: Int + , bar :: Int + , baz :: Char + } + +convertMe :: () -> MyRec () +convertMe _ = + let a = 3 + b = 5 + c = 'a' + in MyRec { foo = a, bar = b, baz = c } diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/PolymorphicRecordConstruction.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/PolymorphicRecordConstruction.hs new file mode 100644 index 0000000000..f8b9791da5 --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/PolymorphicRecordConstruction.hs @@ -0,0 +1,16 @@ +{-# LANGUAGE Haskell2010 #-} + +module PolymorphicRecordConstruction where + +data MyRec m = MyRec + { foo :: Int + , bar :: Int + , baz :: Char + } + +convertMe :: () -> MyRec () +convertMe _ = + let a = 3 + b = 5 + c = 'a' + in MyRec a b c diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/PositionalConstruction.expected.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/PositionalConstruction.expected.hs new file mode 100644 index 0000000000..667fc25fe0 --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/PositionalConstruction.expected.hs @@ -0,0 +1,16 @@ +{-# LANGUAGE Haskell2010 #-} + +module PositionalConstruction where + +data MyRec = MyRec + { foo :: Int + , bar :: Int + , baz :: Char + } + +convertMe :: () -> MyRec +convertMe _ = + let a = 3 + b = 5 + c = 'a' + in MyRec { foo = a, bar = b, baz = c } diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/PositionalConstruction.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/PositionalConstruction.hs new file mode 100644 index 0000000000..0b2f8d9f86 --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/PositionalConstruction.hs @@ -0,0 +1,16 @@ +{-# LANGUAGE Haskell2010 #-} + +module PositionalConstruction where + +data MyRec = MyRec + { foo :: Int + , bar :: Int + , baz :: Char + } + +convertMe :: () -> MyRec +convertMe _ = + let a = 3 + b = 5 + c = 'a' + in MyRec a b c diff --git a/plugins/hls-floskell-plugin/src/Ide/Plugin/Floskell.hs b/plugins/hls-floskell-plugin/src/Ide/Plugin/Floskell.hs index 87f9f49e5b..f78761958c 100644 --- a/plugins/hls-floskell-plugin/src/Ide/Plugin/Floskell.hs +++ b/plugins/hls-floskell-plugin/src/Ide/Plugin/Floskell.hs @@ -6,12 +6,13 @@ module Ide.Plugin.Floskell , provider ) where -import Control.Monad.Except (throwError) +import Control.Monad.Except (throwError) import Control.Monad.IO.Class -import Data.List (find) -import qualified Data.Text as T -import qualified Data.Text.Lazy as TL -import Development.IDE hiding (pluginHandlers) +import Data.List (find) +import qualified Data.Text as T +import qualified Data.Text.Lazy as TL +import Development.IDE hiding (pluginHandlers) +import Development.IDE.Core.PluginUtils (mkFormattingHandlers) import Floskell import Ide.Plugin.Error import Ide.PluginUtils diff --git a/plugins/hls-fourmolu-plugin/src/Ide/Plugin/Fourmolu.hs b/plugins/hls-fourmolu-plugin/src/Ide/Plugin/Fourmolu.hs index 7615b7d2f2..c12866d7f3 100644 --- a/plugins/hls-fourmolu-plugin/src/Ide/Plugin/Fourmolu.hs +++ b/plugins/hls-fourmolu-plugin/src/Ide/Plugin/Fourmolu.hs @@ -12,42 +12,44 @@ module Ide.Plugin.Fourmolu ( ) where import Control.Exception -import Control.Lens ((^.)) -import Control.Monad (guard) -import Control.Monad.Error.Class (MonadError (throwError)) -import Control.Monad.IO.Class (MonadIO (liftIO)) -import Control.Monad.Trans.Class (MonadTrans (lift)) -import Control.Monad.Trans.Except (ExceptT (..), runExceptT) -import Data.Bifunctor (bimap) -import Data.List (intercalate) -import Data.Maybe (catMaybes) -import Data.Text (Text) -import qualified Data.Text as T -import Data.Version (showVersion) -import Development.IDE hiding (pluginHandlers) -import Development.IDE.GHC.Compat as Compat hiding (Cpp, Warning, - hang, vcat) -import qualified Development.IDE.GHC.Compat.Util as S -import GHC.LanguageExtensions.Type (Extension (Cpp)) +import Control.Lens ((^.)) +import Control.Monad (guard) +import Control.Monad.Error.Class (MonadError (throwError)) +import Control.Monad.IO.Class (MonadIO (liftIO)) +import Control.Monad.Trans.Class (MonadTrans (lift)) +import Control.Monad.Trans.Except (ExceptT (..), runExceptT) +import Data.Bifunctor (bimap) +import Data.List (intercalate) +import Data.Maybe (catMaybes) +import Data.Text (Text) +import qualified Data.Text as T +import Data.Version (showVersion) +import Development.IDE hiding (pluginHandlers) +import Development.IDE.Core.PluginUtils (mkFormattingHandlers) +import Development.IDE.GHC.Compat as Compat hiding (Cpp, + Warning, hang, + vcat) +import qualified Development.IDE.GHC.Compat.Util as S +import GHC.LanguageExtensions.Type (Extension (Cpp)) import Ide.Plugin.Error import Ide.Plugin.Properties -import Ide.PluginUtils (makeDiffTextEdit) +import Ide.PluginUtils (makeDiffTextEdit) import Ide.Types -import Language.LSP.Protocol.Lens (HasTabSize (tabSize)) +import Language.LSP.Protocol.Lens (HasTabSize (tabSize)) import Language.LSP.Protocol.Message import Language.LSP.Protocol.Types -import Language.LSP.Server hiding (defaultConfig) +import Language.LSP.Server hiding (defaultConfig) import Ormolu import Ormolu.Config -import qualified Paths_fourmolu as Fourmolu +import qualified Paths_fourmolu as Fourmolu import System.Exit import System.FilePath -import System.Process.Run (cwd, proc) -import System.Process.Text (readCreateProcessWithExitCode) -import Text.Read (readMaybe) +import System.Process.Run (cwd, proc) +import System.Process.Text (readCreateProcessWithExitCode) +import Text.Read (readMaybe) #if MIN_VERSION_fourmolu(0,16,0) -import qualified Data.Yaml as Yaml +import qualified Data.Yaml as Yaml #endif descriptor :: Recorder (WithPriority LogEvent) -> PluginId -> PluginDescriptor IdeState diff --git a/plugins/hls-gadt-plugin/src/Ide/Plugin/GHC.hs b/plugins/hls-gadt-plugin/src/Ide/Plugin/GHC.hs index 7db7b0378f..7d77d7ae87 100644 --- a/plugins/hls-gadt-plugin/src/Ide/Plugin/GHC.hs +++ b/plugins/hls-gadt-plugin/src/Ide/Plugin/GHC.hs @@ -7,16 +7,20 @@ {-# OPTIONS_GHC -Wno-overlapping-patterns #-} module Ide.Plugin.GHC where +#if !MIN_VERSION_ghc(9,11,0) import Data.Functor ((<&>)) +#endif import Data.List.Extra (stripInfix) import qualified Data.Text as T import Development.IDE import Development.IDE.GHC.Compat import Development.IDE.GHC.Compat.ExactPrint -import GHC.Parser.Annotation (AddEpAnn (..), - DeltaPos (..), +import GHC.Parser.Annotation (DeltaPos (..), EpAnn (..), EpAnnComments (EpaComments)) +#if MIN_VERSION_ghc(9,11,0) +import GHC.Parser.Annotation (EpToken (..)) +#endif import Ide.PluginUtils (subRange) import Language.Haskell.GHC.ExactPrint.Parsers (parseDecl) @@ -44,6 +48,11 @@ import GHC.Parser.Annotation (EpUniToken (..), import Language.Haskell.GHC.ExactPrint.Utils (showAst) #endif +#if MIN_VERSION_ghc(9,11,0) +import GHC.Types.SrcLoc (UnhelpfulSpanReason (..)) +#else +import GHC.Parser.Annotation (AddEpAnn (..)) +#endif type GP = GhcPass Parsed @@ -97,7 +106,9 @@ h98ToGADTConDecl :: h98ToGADTConDecl dataName tyVars ctxt = \case ConDeclH98{..} -> ConDeclGADT -#if MIN_VERSION_ghc(9,9,0) +#if MIN_VERSION_ghc(9,11,0) + (AnnConDeclGADT [] [] NoEpUniTok) +#elif MIN_VERSION_ghc(9,9,0) (NoEpUniTok, con_ext) #else con_ext @@ -133,10 +144,8 @@ h98ToGADTConDecl dataName tyVars ctxt = \case #endif #if MIN_VERSION_ghc(9,9,0) renderDetails (RecCon recs) = RecConGADT NoEpUniTok recs -#elif MIN_VERSION_ghc(9,3,0) - renderDetails (RecCon recs) = RecConGADT recs noHsUniTok #else - renderDetails (RecCon recs) = RecConGADT recs + renderDetails (RecCon recs) = RecConGADT recs noHsUniTok #endif @@ -206,16 +215,16 @@ prettyGADTDecl df decl = adjustTyClD = \case Right (L _ (TyClD _ tycld)) -> Right $ adjustDataDecl tycld Right x -> Left $ "Expect TyClD but got " <> showAst x -#if MIN_VERSION_ghc(9,3,0) Left err -> Left $ printWithoutUniques err -#else - Left err -> Left $ show err -#endif adjustDataDecl DataDecl{..} = DataDecl { tcdDExt = adjustWhere tcdDExt , tcdDataDefn = tcdDataDefn - { dd_cons = + { +#if MIN_VERSION_ghc(9,11,0) + dd_ext = adjustDefnWhere (dd_ext tcdDataDefn), +#endif + dd_cons = fmap adjustCon (dd_cons tcdDataDefn) } , .. @@ -224,7 +233,11 @@ prettyGADTDecl df decl = -- Make every data constructor start with a new line and 2 spaces adjustCon :: LConDecl GP -> LConDecl GP -#if MIN_VERSION_ghc(9,9,0) +#if MIN_VERSION_ghc(9,11,0) + adjustCon (L _ r) = + let delta = EpaDelta (UnhelpfulSpan UnhelpfulNoLocationInfo) (DifferentLine 1 2) [] + in L (EpAnn delta (AnnListItem []) (EpaComments [])) r +#elif MIN_VERSION_ghc(9,9,0) adjustCon (L _ r) = let delta = EpaDelta (DifferentLine 1 3) [] in L (EpAnn delta (AnnListItem []) (EpaComments [])) r @@ -235,6 +248,10 @@ prettyGADTDecl df decl = #endif -- Adjust where annotation to the same line of the type constructor +#if MIN_VERSION_ghc(9,11,0) + -- tcdDext is just a placeholder in ghc-9.12 + adjustWhere = id +#else adjustWhere tcdDExt = tcdDExt <&> #if !MIN_VERSION_ghc(9,9,0) map @@ -244,7 +261,16 @@ prettyGADTDecl df decl = then AddEpAnn AnnWhere d1 else AddEpAnn ann l ) +#endif +#if MIN_VERSION_ghc(9,11,0) + adjustDefnWhere annDataDefn + | andd_where annDataDefn == NoEpTok = annDataDefn + | otherwise = annDataDefn {andd_where = andd_where'} + where + (EpTok (EpaSpan aw)) = andd_where annDataDefn + andd_where' = EpTok (EpaDelta aw (SameLine 1) []) +#endif -- Remove the first extra line if exist removeExtraEmptyLine s = case stripInfix "\n\n" s of Just (x, xs) -> x <> "\n" <> xs @@ -263,6 +289,10 @@ noUsed = EpAnnNotUsed #endif pattern UserTyVar' :: LIdP pass -> HsTyVarBndr flag pass +#if MIN_VERSION_ghc(9,11,0) +pattern UserTyVar' s <- HsTvb _ _ (HsBndrVar _ s) _ +#else pattern UserTyVar' s <- UserTyVar _ _ s +#endif implicitTyVars = wrapXRec @GP mkHsOuterImplicit diff --git a/plugins/hls-hlint-plugin/src/Ide/Plugin/Hlint.hs b/plugins/hls-hlint-plugin/src/Ide/Plugin/Hlint.hs index 97b9cabcae..9621f894e3 100644 --- a/plugins/hls-hlint-plugin/src/Ide/Plugin/Hlint.hs +++ b/plugins/hls-hlint-plugin/src/Ide/Plugin/Hlint.hs @@ -13,11 +13,6 @@ {-# LANGUAGE ViewPatterns #-} {-# OPTIONS_GHC -Wno-orphans #-} --- On 9.4 we get a new redundant constraint warning, but deleting the --- constraint breaks the build on earlier versions. Rather than apply --- lots of CPP, we just disable the warning until later. -{-# OPTIONS_GHC -Wno-redundant-constraints #-} - #ifdef GHC_LIB #define MIN_GHC_API_VERSION(x,y,z) MIN_VERSION_ghc_lib_parser(x,y,z) #else @@ -34,10 +29,8 @@ import Control.Concurrent.STM import Control.DeepSeq import Control.Exception import Control.Lens ((?~), (^.)) -import Control.Monad import Control.Monad.Error.Class (MonadError (throwError)) import Control.Monad.IO.Class (MonadIO (liftIO)) -import Control.Monad.Trans.Class (MonadTrans (lift)) import Control.Monad.Trans.Except (ExceptT (..), runExceptT) import Data.Aeson.Types (FromJSON (..), @@ -50,11 +43,14 @@ import qualified Data.Map as M import Data.Maybe import qualified Data.Text as T import qualified Data.Text.Encoding as T +import Data.Text.Utf16.Rope.Mixed (Rope) +import qualified Data.Text.Utf16.Rope.Mixed as Rope import Data.Typeable import Development.IDE hiding (Error, getExtensions) import Development.IDE.Core.Compile (sourceParser) +import Development.IDE.Core.FileStore (getVersionedTextDoc) import Development.IDE.Core.Rules (defineNoFile, getParsedModuleWithComments) import Development.IDE.Core.Shake (getDiagnostics) @@ -124,6 +120,7 @@ import System.Environment (setEnv, #endif import Development.IDE.Core.PluginUtils as PluginUtils import Text.Regex.TDFA.Text () + -- --------------------------------------------------------------------- data Log @@ -139,7 +136,7 @@ instance Pretty Log where LogShake log -> pretty log LogApplying fp res -> "Applying hint(s) for" <+> viaShow fp <> ":" <+> viaShow res LogGeneratedIdeas fp ideas -> "Generated hlint ideas for for" <+> viaShow fp <> ":" <+> viaShow ideas - LogUsingExtensions fp exts -> "Using extensions for " <+> viaShow fp <> ":" <+> pretty exts + LogUsingExtensions fp exts -> "Using extensions for " <+> viaShow fp <> ":" <> line <> indent 4 (pretty exts) LogGetIdeas fp -> "Getting hlint ideas for " <+> viaShow fp LogResolve msg -> pretty msg @@ -181,19 +178,19 @@ descriptor recorder plId = -- This rule only exists for generating file diagnostics -- so the RuleResult is empty data GetHlintDiagnostics = GetHlintDiagnostics - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable GetHlintDiagnostics instance NFData GetHlintDiagnostics type instance RuleResult GetHlintDiagnostics = () -- | Hlint rules to generate file diagnostics based on hlint hints --- | This rule is recomputed when: --- | - A file has been edited via --- | - `getIdeas` -> `getParsedModule` in any case --- | - `getIdeas` -> `getFileContents` if the hls ghc does not match the hlint default ghc --- | - The client settings have changed, to honour the `hlintOn` setting, via `getClientConfigAction` --- | - The hlint specific settings have changed, via `getHlintSettingsRule` +-- This rule is recomputed when: +-- - A file has been edited via +-- - `getIdeas` -> `getParsedModule` in any case +-- - `getIdeas` -> `getFileContents` if the hls ghc does not match the hlint default ghc +-- - The client settings have changed, to honour the `hlintOn` setting, via `getClientConfigAction` +-- - The hlint specific settings have changed, via `getHlintSettingsRule` rules :: Recorder (WithPriority Log) -> PluginId -> Rules () rules recorder plugin = do define (cmapWithPrio LogShake recorder) $ \GetHlintDiagnostics file -> do @@ -207,16 +204,16 @@ rules recorder plugin = do liftIO $ argsSettings flags action $ do - files <- getFilesOfInterestUntracked - void $ uses GetHlintDiagnostics $ Map.keys files + files <- Map.keys <$> getFilesOfInterestUntracked + Shake.runWithSignal (Proxy @"kick/start/hlint") (Proxy @"kick/done/hlint") files GetHlintDiagnostics where diagnostics :: NormalizedFilePath -> Either ParseError [Idea] -> [FileDiagnostic] diagnostics file (Right ideas) = - (file, ShowDiag,) <$> catMaybes [ideaToDiagnostic i | i <- ideas] + [ideErrorFromLspDiag diag file Nothing | i <- ideas, Just diag <- [ideaToDiagnostic i]] diagnostics file (Left parseErr) = - [(file, ShowDiag, parseErrorToDiagnostic parseErr)] + [ideErrorFromLspDiag (parseErrorToDiagnostic parseErr) file Nothing] ideaToDiagnostic :: Idea -> Maybe Diagnostic @@ -305,9 +302,9 @@ getIdeas recorder nfp = do then return Nothing else do flags' <- setExtensions flags - (_, contents) <- getFileContents nfp + contents <- getFileContents nfp let fp = fromNormalizedFilePath nfp - let contents' = T.unpack <$> contents + let contents' = T.unpack . Rope.toText <$> contents Just <$> liftIO (parseModuleEx flags' fp contents') setExtensions flags = do @@ -334,7 +331,7 @@ getExtensions nfp = do -- --------------------------------------------------------------------- data GetHlintSettings = GetHlintSettings - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable GetHlintSettings instance NFData GetHlintSettings instance NFData Hint where rnf = rwhnf @@ -366,14 +363,19 @@ codeActionProvider ideState _pluginId (CodeActionParams _ _ documentId _ context | let TextDocumentIdentifier uri = documentId , Just docNormalizedFilePath <- uriToNormalizedFilePath (toNormalizedUri uri) = do - verTxtDocId <- lift $ pluginGetVersionedTextDoc documentId + verTxtDocId <- + liftIO $ + runAction "Hlint.getVersionedTextDoc" ideState $ + getVersionedTextDoc documentId liftIO $ fmap (InL . map LSP.InR) $ do allDiagnostics <- atomically $ getDiagnostics ideState let numHintsInDoc = length - [diagnostic | (diagnosticNormalizedFilePath, _, diagnostic) <- allDiagnostics - , validCommand diagnostic - , diagnosticNormalizedFilePath == docNormalizedFilePath + [lspDiagnostic + | diag <- allDiagnostics + , let lspDiagnostic = fdLspDiagnostic diag + , validCommand lspDiagnostic + , fdFilePath diag == docNormalizedFilePath ] let numHintsInContext = length [diagnostic | diagnostic <- diags @@ -445,7 +447,7 @@ mkCodeAction title diagnostic data_ isPreferred = , _data_ = data_ } -mkSuppressHintTextEdits :: DynFlags -> T.Text -> T.Text -> [LSP.TextEdit] +mkSuppressHintTextEdits :: DynFlags -> Rope -> T.Text -> [LSP.TextEdit] mkSuppressHintTextEdits dynFlags fileContents hint = let NextPragmaInfo{ nextPragmaLine, lineSplitTextEdits } = getNextPragmaInfo dynFlags (Just fileContents) @@ -516,7 +518,7 @@ applyHint recorder ide nfp mhint verTxtDocId = let commands = map ideaRefactoring ideas' logWith recorder Debug $ LogGeneratedIdeas nfp commands let fp = fromNormalizedFilePath nfp - (_, mbOldContent) <- liftIO $ runAction' $ getFileContents nfp + mbOldContent <- fmap (fmap Rope.toText) $ liftIO $ runAction' $ getFileContents nfp oldContent <- maybe (liftIO $ fmap T.decodeUtf8 (BS.readFile fp)) return mbOldContent modsum <- liftIO $ runAction' $ use_ GetModSummary nfp let dflags = ms_hspp_opts $ msrModSummary modsum diff --git a/plugins/hls-hlint-plugin/test/Main.hs b/plugins/hls-hlint-plugin/test/Main.hs index 17f83e291a..7d92706051 100644 --- a/plugins/hls-hlint-plugin/test/Main.hs +++ b/plugins/hls-hlint-plugin/test/Main.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE OverloadedLabels #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} module Main @@ -5,19 +7,21 @@ module Main ) where import Control.Lens ((^.)) -import Control.Monad (when) +import Control.Monad (guard, when) import Data.Aeson (Value (..), object, (.=)) import Data.Functor (void) import Data.List (find) import qualified Data.Map as Map import Data.Maybe (fromJust, isJust) +import Data.Proxy (Proxy (Proxy)) import qualified Data.Text as T import Ide.Plugin.Config (Config (..)) import qualified Ide.Plugin.Config as Plugin import qualified Ide.Plugin.Hlint as HLint import qualified Language.LSP.Protocol.Lens as L -import System.FilePath (()) +import System.FilePath ((<.>), ()) import Test.Hls +import Test.Hls.FileSystem main :: IO () main = defaultTestRunner tests @@ -86,7 +90,7 @@ suggestionsTests = testGroup "hlint suggestions" [ testCase "provides 3.8 code actions including apply all" $ runHlintSession "" $ do doc <- openDoc "Base.hs" "haskell" - diags@(reduceDiag:_) <- waitForDiagnosticsFromSource doc "hlint" + diags@(reduceDiag:_) <- hlintCaptureKick liftIO $ do length diags @?= 2 -- "Eta Reduce" and "Redundant Id" @@ -124,7 +128,7 @@ suggestionsTests = , testShiftRoot = True} $ const $ do doc <- openDoc "Base.hs" "haskell" - _ <- waitForDiagnosticsFromSource doc "hlint" + _ <- hlintCaptureKick cars <- getAllCodeActions doc etaReduce <- liftIO $ inspectCommand cars ["Eta reduce"] @@ -136,7 +140,7 @@ suggestionsTests = , testCase ".hlint.yaml fixity rules are applied" $ runHlintSession "fixity" $ do doc <- openDoc "FixityUse.hs" "haskell" - expectNoMoreDiagnostics 3 doc "hlint" + testNoHlintDiagnostics doc , testCase "changing document contents updates hlint diagnostics" $ runHlintSession "" $ do doc <- openDoc "Base.hs" "haskell" @@ -150,7 +154,8 @@ suggestionsTests = } changeDoc doc [change] - expectNoMoreDiagnostics 3 doc "hlint" + -- We need to wait until hlint has been rerun and clears the diagnostic + [] <- waitForDiagnosticsFrom doc let change' = TextDocumentContentChangeEvent $ InL TextDocumentContentChangePartial @@ -166,7 +171,7 @@ suggestionsTests = testHlintDiagnostics doc , knownBrokenForHlintOnGhcLib "hlint doesn't take in account cpp flag as ghc -D argument" $ - testCase "[#554] hlint diagnostics works with CPP via language pragma" $ runHlintSession "" $ do + testCase "[#554] hlint diagnostics works with CPP via language pragma" $ runHlintSession "cpp" $ do doc <- openDoc "CppCond.hs" "haskell" testHlintDiagnostics doc @@ -186,27 +191,27 @@ suggestionsTests = testRefactor "LambdaCase.hs" "Redundant bracket" ("{-# LANGUAGE LambdaCase #-}" : expectedLambdaCase) - , expectFailBecause "apply-refact doesn't work with cpp" $ + , ignoreTestBecause "apply-refact doesn't work with cpp" $ testCase "apply hints works with CPP via -XCPP argument" $ runHlintSession "cpp" $ do testRefactor "CppCond.hs" "Redundant bracket" expectedCPP - , expectFailBecause "apply-refact doesn't work with cpp" $ + , ignoreTestBecause "apply-refact doesn't work with cpp" $ testCase "apply hints works with CPP via language pragma" $ runHlintSession "" $ do testRefactor "CppCond.hs" "Redundant bracket" ("{-# LANGUAGE CPP #-}" : expectedCPP) , testCase "hlint diagnostics ignore hints honouring .hlint.yaml" $ runHlintSession "ignore" $ do doc <- openDoc "CamelCase.hs" "haskell" - expectNoMoreDiagnostics 3 doc "hlint" + testNoHlintDiagnostics doc , testCase "hlint diagnostics ignore hints honouring ANN annotations" $ runHlintSession "" $ do doc <- openDoc "IgnoreAnn.hs" "haskell" - expectNoMoreDiagnostics 3 doc "hlint" + testNoHlintDiagnostics doc , testCase "hlint diagnostics ignore hints honouring HLINT annotations" $ runHlintSession "" $ do doc <- openDoc "IgnoreAnnHlint.hs" "haskell" - expectNoMoreDiagnostics 3 doc "hlint" + testNoHlintDiagnostics doc , testCase "apply-refact preserve regular comments" $ runHlintSession "" $ do testRefactor "Comments.hs" "Redundant bracket" expectedComments @@ -216,7 +221,7 @@ suggestionsTests = , testCase "applyAll is shown only when there is at least one diagnostic in range" $ runHlintSession "" $ do doc <- openDoc "TwoHints.hs" "haskell" - _ <- waitForDiagnosticsFromSource doc "hlint" + _ <- hlintCaptureKick firstLine <- map fromAction <$> getCodeActions doc (mkRange 0 0 0 0) secondLine <- map fromAction <$> getCodeActions doc (mkRange 1 0 1 0) @@ -231,22 +236,20 @@ suggestionsTests = liftIO $ hasApplyAll multiLine @? "Missing apply all code action" , testCase "hlint should warn about unused extensions" $ runHlintSession "unusedext" $ do - doc <- openDoc "UnusedExtension.hs" "haskell" - diags@(unusedExt:_) <- waitForDiagnosticsFromSource doc "hlint" + _ <- openDoc "UnusedExtension.hs" "haskell" + diags@(unusedExt:_) <- hlintCaptureKick liftIO $ do length diags @?= 1 unusedExt ^. L.code @?= Just (InR "refact:Unused LANGUAGE pragma") - , testCase "[#1279] hlint should not activate extensions like PatternSynonyms" $ runHlintSession "" $ do + , testCase "[#1279] hlint should not activate extensions like PatternSynonyms" $ runHlintSession "" $ do doc <- openDoc "PatternKeyword.hs" "haskell" - -- hlint will report a parse error if PatternSynonyms is enabled - expectNoMoreDiagnostics 3 doc "hlint" + testNoHlintDiagnostics doc , testCase "hlint should not warn about redundant irrefutable pattern with LANGUAGE Strict" $ runHlintSession "" $ do doc <- openDoc "StrictData.hs" "haskell" - - expectNoMoreDiagnostics 3 doc "hlint" + testNoHlintDiagnostics doc ] where testRefactor file caTitle expected = do @@ -273,14 +276,24 @@ suggestionsTests = , "g = 2" , "#endif", "" ] - expectedComments = [ "-- comment before header" - , "module Comments where", "" - , "{-# standalone annotation #-}", "" - , "-- standalone comment", "" - , "-- | haddock comment" - , "f = {- inline comment -} {- inline comment inside refactored code -}1 -- ending comment", "" - , "-- final comment" - ] + expectedComments = case ghcVersion of + GHC912 -> [ "-- comment before header" + , "module Comments where", "" + , "{-# standalone annotation #-}", "" + , "-- standalone comment", "" + , "-- | haddock comment" + , "f = {- inline comment -}{- inline comment inside refactored code -} 1 -- ending comment", "" + , "-- final comment" + ] + + _ -> [ "-- comment before header" + , "module Comments where", "" + , "{-# standalone annotation #-}", "" + , "-- standalone comment", "" + , "-- | haddock comment" + , "f = {- inline comment -} {- inline comment inside refactored code -}1 -- ending comment", "" + , "-- final comment" + ] expectedComments2 = [ "module TwoHintsAndComment where" , "biggest = foldr1 max -- the line above will show two hlint hints, \"eta reduce\" and \"use maximum\"" ] @@ -301,9 +314,7 @@ configTests = testGroup "hlint plugin config" [ disableHlint - diags' <- waitForDiagnosticsFrom doc - - liftIO $ noHlintDiagnostics diags' + testNoHlintDiagnostics doc , testCase "adding hlint flags to plugin configuration removes hlint diagnostics" $ runHlintSession "" $ do setIgnoringConfigurationRequests False @@ -315,9 +326,7 @@ configTests = testGroup "hlint plugin config" [ let config' = hlintConfigWithFlags ["--ignore=Redundant id", "--hint=test-hlint-config.yaml"] setHlsConfig config' - diags' <- waitForDiagnosticsFrom doc - - liftIO $ noHlintDiagnostics diags' + testNoHlintDiagnostics doc , testCase "adding hlint flags to plugin configuration adds hlint diagnostics" $ runHlintSession "" $ do setIgnoringConfigurationRequests False @@ -325,12 +334,12 @@ configTests = testGroup "hlint plugin config" [ doc <- openDoc "Generalise.hs" "haskell" - expectNoMoreDiagnostics 3 doc "hlint" + testNoHlintDiagnostics doc let config' = hlintConfigWithFlags ["--with-group=generalise"] setHlsConfig config' - diags' <- waitForDiagnosticsFromSource doc "hlint" + diags' <- hlintCaptureKick d <- liftIO $ inspectDiagnostic diags' ["Use <>"] liftIO $ do @@ -352,14 +361,39 @@ runHlintSession subdir = failIfSessionTimeout . } . const -noHlintDiagnostics :: [Diagnostic] -> Assertion +hlintKickDone :: Session () +hlintKickDone = kick (Proxy @"kick/done/hlint") >>= guard . not . null + +hlintKickStart :: Session () +hlintKickStart = kick (Proxy @"kick/start/hlint") >>= guard . not . null + +hlintCaptureKick :: Session [Diagnostic] +hlintCaptureKick = captureKickDiagnostics hlintKickStart hlintKickDone + +noHlintDiagnostics :: HasCallStack => [Diagnostic] -> Assertion noHlintDiagnostics diags = - Just "hlint" `notElem` map (^. L.source) diags @? "There are no hlint diagnostics" + all (not . isHlintDiagnostic) diags @? "There are hlint diagnostics" + +isHlintDiagnostic :: Diagnostic -> Bool +isHlintDiagnostic diag = + Just "hlint" == diag ^. L.source -testHlintDiagnostics :: TextDocumentIdentifier -> Session () +testHlintDiagnostics :: HasCallStack => TextDocumentIdentifier -> Session () testHlintDiagnostics doc = do - diags <- waitForDiagnosticsFromSource doc "hlint" - liftIO $ length diags > 0 @? "There are hlint diagnostics" + diags <- captureKickNonEmptyDiagnostics doc + liftIO $ length diags > 0 @? "There are no hlint diagnostics" + +captureKickNonEmptyDiagnostics :: HasCallStack => TextDocumentIdentifier -> Session [Diagnostic] +captureKickNonEmptyDiagnostics doc = do + diags <- hlintCaptureKick + if null diags + then captureKickNonEmptyDiagnostics doc + else pure diags + +testNoHlintDiagnostics :: HasCallStack => TextDocumentIdentifier -> Session () +testNoHlintDiagnostics _doc = do + diags <- hlintCaptureKick + liftIO $ noHlintDiagnostics diags hlintConfigWithFlags :: [T.Text] -> Config hlintConfigWithFlags flags = @@ -385,7 +419,7 @@ disableHlint = setHlsConfig $ def { Plugin.plugins = Map.fromList [ ("hlint", de -- Although a given hlint version supports one direct ghc, we could use several versions of hlint -- each one supporting a different ghc version. It should be a temporary situation though. knownBrokenForHlintOnGhcLib :: String -> TestTree -> TestTree -knownBrokenForHlintOnGhcLib = expectFailBecause +knownBrokenForHlintOnGhcLib = ignoreTestBecause -- 1's based data Point = Point { @@ -408,6 +442,10 @@ makeCodeActionNotFoundAtString :: Point -> String makeCodeActionNotFoundAtString Point {..} = "CodeAction not found at line: " <> show line <> ", column: " <> show column +-- ------------------------------------------------------------------------ +-- Test runner helpers +-- ------------------------------------------------------------------------ + ignoreHintGoldenTest :: TestName -> FilePath -> Point -> T.Text -> TestTree ignoreHintGoldenTest testCaseName goldenFilename point hintName = goldenTest testCaseName goldenFilename point (getIgnoreHintText hintName) @@ -418,8 +456,8 @@ applyHintGoldenTest testCaseName goldenFilename point hintName = do goldenTest :: TestName -> FilePath -> Point -> T.Text -> TestTree goldenTest testCaseName goldenFilename point hintText = - setupGoldenHlintTest testCaseName goldenFilename $ \document -> do - _ <- waitForDiagnosticsFromSource document "hlint" + setupGoldenHlintTest testCaseName goldenFilename codeActionNoResolveCaps $ \document -> do + _ <- hlintCaptureKick actions <- getCodeActions document $ pointToRange point case find ((== Just hintText) . getCodeActionTitle) actions of Just (InR codeAction) -> do @@ -429,16 +467,15 @@ goldenTest testCaseName goldenFilename point hintText = _ -> liftIO $ assertFailure $ makeCodeActionNotFoundAtString point -setupGoldenHlintTest :: TestName -> FilePath -> (TextDocumentIdentifier -> Session ()) -> TestTree -setupGoldenHlintTest testName path = +setupGoldenHlintTest :: TestName -> FilePath -> ClientCapabilities -> (TextDocumentIdentifier -> Session ()) -> TestTree +setupGoldenHlintTest testName path config = goldenWithTestConfig def - { testConfigCaps = codeActionNoResolveCaps + { testConfigCaps = config , testShiftRoot = True , testPluginDescriptor = hlintPlugin - , testDirLocation = Left testDir - } - testName testDir path "expected" "hs" - + , testDirLocation = Right tree + } testName tree path "expected" "hs" + where tree = mkVirtualFileTree testDir (directProject (path <.> "hs")) ignoreHintGoldenResolveTest :: TestName -> FilePath -> Point -> T.Text -> TestTree ignoreHintGoldenResolveTest testCaseName goldenFilename point hintName = @@ -450,19 +487,9 @@ applyHintGoldenResolveTest testCaseName goldenFilename point hintName = do goldenResolveTest :: TestName -> FilePath -> Point -> T.Text -> TestTree goldenResolveTest testCaseName goldenFilename point hintText = - setupGoldenHlintResolveTest testCaseName goldenFilename $ \document -> do - _ <- waitForDiagnosticsFromSource document "hlint" + setupGoldenHlintTest testCaseName goldenFilename codeActionResolveCaps $ \document -> do + _ <- hlintCaptureKick actions <- getAndResolveCodeActions document $ pointToRange point case find ((== Just hintText) . getCodeActionTitle) actions of Just (InR codeAction) -> executeCodeAction codeAction _ -> liftIO $ assertFailure $ makeCodeActionNotFoundAtString point - -setupGoldenHlintResolveTest :: TestName -> FilePath -> (TextDocumentIdentifier -> Session ()) -> TestTree -setupGoldenHlintResolveTest testName path = - goldenWithTestConfig def - { testConfigCaps = codeActionResolveCaps - , testShiftRoot = True - , testPluginDescriptor = hlintPlugin - , testDirLocation = Left testDir - } - testName testDir path "expected" "hs" diff --git a/plugins/hls-module-name-plugin/src/Ide/Plugin/ModuleName.hs b/plugins/hls-module-name-plugin/src/Ide/Plugin/ModuleName.hs index 5c3f4ba781..5dc053f47d 100644 --- a/plugins/hls-module-name-plugin/src/Ide/Plugin/ModuleName.hs +++ b/plugins/hls-module-name-plugin/src/Ide/Plugin/ModuleName.hs @@ -4,8 +4,6 @@ {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ViewPatterns #-} -{-# OPTIONS_GHC -Wwarn -fno-warn-type-defaults #-} - {- | Keep the module name in sync with its file path. Provide CodeLenses to: @@ -32,17 +30,18 @@ import Data.Maybe (mapMaybe) import Data.Ord (comparing) import Data.String (IsString) import qualified Data.Text as T +import qualified Data.Text.Utf16.Rope.Mixed as Rope import Development.IDE (GetParsedModule (GetParsedModule), GhcSession (GhcSession), IdeState, Pretty, Priority (Debug), Recorder, WithPriority, colon, evalGhcEnv, - hscEnvWithImportPaths, - logWith, + hscEnv, logWith, realSrcSpanToRange, rootDir, runAction, useWithStale, (<+>)) +import Development.IDE.Core.FileStore (getFileContents) import Development.IDE.Core.PluginUtils import Development.IDE.Core.PositionMapping (toCurrentRange) import Development.IDE.GHC.Compat (GenLocated (L), @@ -57,7 +56,6 @@ import Ide.PluginUtils (toAbsolute) import Ide.Types import Language.LSP.Protocol.Message import Language.LSP.Protocol.Types -import Language.LSP.VFS (virtualFileText) import System.FilePath (dropExtension, normalise, pathSeparator, splitDirectories, @@ -112,8 +110,8 @@ action recorder state uri = do nfp <- getNormalizedFilePathE uri fp <- uriToFilePathE uri - contents <- lift . pluginGetVirtualFile $ toNormalizedUri uri - let emptyModule = maybe True (T.null . T.strip . virtualFileText) contents + contents <- liftIO $ runAction "ModuleName.getFileContents" state $ getFileContents nfp + let emptyModule = maybe True (T.null . T.strip . Rope.toText) contents correctNames <- mapExceptT liftIO $ pathModuleNames recorder state nfp fp logWith recorder Debug (CorrectNames correctNames) @@ -140,7 +138,7 @@ pathModuleNames recorder state normFilePath filePath | firstLetter isLower $ takeFileName filePath = return ["Main"] | otherwise = do (session, _) <- runActionE "ModuleName.ghcSession" state $ useWithStaleE GhcSession normFilePath - srcPaths <- liftIO $ evalGhcEnv (hscEnvWithImportPaths session) $ importPaths <$> getSessionDynFlags + srcPaths <- liftIO $ evalGhcEnv (hscEnv session) $ importPaths <$> getSessionDynFlags logWith recorder Debug (SrcPaths srcPaths) -- Append a `pathSeparator` to make the path looks like a directory, diff --git a/plugins/hls-notes-plugin/src/Ide/Plugin/Notes.hs b/plugins/hls-notes-plugin/src/Ide/Plugin/Notes.hs index 3d9f398ece..1c40ea76b3 100644 --- a/plugins/hls-notes-plugin/src/Ide/Plugin/Notes.hs +++ b/plugins/hls-notes-plugin/src/Ide/Plugin/Notes.hs @@ -3,7 +3,6 @@ module Ide.Plugin.Notes (descriptor, Log) where import Control.Lens ((^.)) import Control.Monad.Except (throwError) import Control.Monad.IO.Class (liftIO) -import Control.Monad.Trans (lift) import qualified Data.Array as A import Data.HashMap.Strict (HashMap) import qualified Data.HashMap.Strict as HM @@ -25,7 +24,6 @@ import qualified Language.LSP.Protocol.Lens as L import Language.LSP.Protocol.Message (Method (Method_TextDocumentDefinition), SMethod (SMethod_TextDocumentDefinition)) import Language.LSP.Protocol.Types -import Language.LSP.VFS (VirtualFile (..)) import Text.Regex.TDFA (Regex, caseSensitive, defaultCompOpt, defaultExecOpt, @@ -79,8 +77,9 @@ jumpToNote state _ param | Just nfp <- uriToNormalizedFilePath uriOrig = do let Position l c = param ^. L.position - contents <- fmap _file_text . err "Error getting file contents" - =<< lift (pluginGetVirtualFile uriOrig) + contents <- + err "Error getting file contents" + =<< liftIO (runAction "notes.getfileContents" state (getFileContents nfp)) line <- err "Line not found in file" (listToMaybe $ Rope.lines $ fst (Rope.splitAtLine 1 $ snd $ Rope.splitAtLine (fromIntegral l) contents)) let noteOpt = listToMaybe $ mapMaybe (atPos $ fromIntegral c) $ matchAllText noteRefRegex line @@ -110,7 +109,7 @@ findNotesInFile file recorder = do -- the user. If not, we need to read it from disk. contentOpt <- (snd =<<) <$> use GetFileContents file content <- case contentOpt of - Just x -> pure x + Just x -> pure $ Rope.toText x Nothing -> liftIO $ readFileUtf8 $ fromNormalizedFilePath file let matches = (A.! 1) <$> matchAllText noteRegex content m = toPositions matches content diff --git a/plugins/hls-ormolu-plugin/src/Ide/Plugin/Ormolu.hs b/plugins/hls-ormolu-plugin/src/Ide/Plugin/Ormolu.hs index 741f158eff..90c5214d8e 100644 --- a/plugins/hls-ormolu-plugin/src/Ide/Plugin/Ormolu.hs +++ b/plugins/hls-ormolu-plugin/src/Ide/Plugin/Ormolu.hs @@ -10,36 +10,37 @@ module Ide.Plugin.Ormolu ) where -import Control.Exception (Handler (..), IOException, - SomeException (..), catches, - handle) -import Control.Monad.Except (runExceptT, throwError) +import Control.Exception (Handler (..), IOException, + SomeException (..), catches, + handle) +import Control.Monad.Except (runExceptT, throwError) import Control.Monad.Extra import Control.Monad.Trans -import Control.Monad.Trans.Except (ExceptT (..), mapExceptT) -import Data.Functor ((<&>)) -import Data.List (intercalate) -import Data.Maybe (catMaybes) -import Data.Text (Text) -import qualified Data.Text as T -import Development.IDE hiding (pluginHandlers) -import Development.IDE.GHC.Compat (hsc_dflags, moduleNameString) -import qualified Development.IDE.GHC.Compat as D -import qualified Development.IDE.GHC.Compat.Util as S +import Control.Monad.Trans.Except (ExceptT (..), mapExceptT) +import Data.Functor ((<&>)) +import Data.List (intercalate) +import Data.Maybe (catMaybes) +import Data.Text (Text) +import qualified Data.Text as T +import Development.IDE hiding (pluginHandlers) +import Development.IDE.Core.PluginUtils (mkFormattingHandlers) +import Development.IDE.GHC.Compat (hsc_dflags, moduleNameString) +import qualified Development.IDE.GHC.Compat as D +import qualified Development.IDE.GHC.Compat.Util as S import GHC.LanguageExtensions.Type -import Ide.Plugin.Error (PluginError (PluginInternalError)) +import Ide.Plugin.Error (PluginError (PluginInternalError)) import Ide.Plugin.Properties import Ide.PluginUtils -import Ide.Types hiding (Config) -import qualified Ide.Types as Types +import Ide.Types hiding (Config) +import qualified Ide.Types as Types import Language.LSP.Protocol.Types -import Language.LSP.Server hiding (defaultConfig) +import Language.LSP.Server hiding (defaultConfig) import Ormolu import System.Exit import System.FilePath -import System.Process.Run (cwd, proc) -import System.Process.Text (readCreateProcessWithExitCode) -import Text.Read (readMaybe) +import System.Process.Run (cwd, proc) +import System.Process.Text (readCreateProcessWithExitCode) +import Text.Read (readMaybe) -- --------------------------------------------------------------------- diff --git a/plugins/hls-overloaded-record-dot-plugin/src/Ide/Plugin/OverloadedRecordDot.hs b/plugins/hls-overloaded-record-dot-plugin/src/Ide/Plugin/OverloadedRecordDot.hs index d5dcde3c2a..8ead286b67 100644 --- a/plugins/hls-overloaded-record-dot-plugin/src/Ide/Plugin/OverloadedRecordDot.hs +++ b/plugins/hls-overloaded-record-dot-plugin/src/Ide/Plugin/OverloadedRecordDot.hs @@ -289,27 +289,25 @@ getRecSels (unLoc -> XExpr (ExpandedThingRn a _)) = (collectRecordSelectors a, T #else getRecSels (unLoc -> XExpr (HsExpanded a _)) = (collectRecordSelectors a, True) #endif -#if __GLASGOW_HASKELL__ >= 903 -- applied record selection: "selector record" or "selector (record)" or -- "selector selector2.record2" +#if __GLASGOW_HASKELL__ >= 911 +getRecSels e@(unLoc -> HsApp _ se@(unLoc -> XExpr (HsRecSelRn _)) re) = +#else getRecSels e@(unLoc -> HsApp _ se@(unLoc -> HsRecSel _ _) re) = +#endif ( [ RecordSelectorExpr (realSrcSpanToRange realSpan') se re | RealSrcSpan realSpan' _ <- [ getLoc e ] ], False ) -- Record selection where the field is being applied with the "$" operator: -- "selector $ record" -getRecSels e@(unLoc -> OpApp _ se@(unLoc -> HsRecSel _ _) - (unLoc -> HsVar _ (unLoc -> d)) re) | d == dollarName = - ( [ RecordSelectorExpr (realSrcSpanToRange realSpan') se re - | RealSrcSpan realSpan' _ <- [ getLoc e ] ], False ) +#if __GLASGOW_HASKELL__ >= 911 +getRecSels e@(unLoc -> OpApp _ se@(unLoc -> XExpr (HsRecSelRn _)) #else -getRecSels e@(unLoc -> HsApp _ se@(unLoc -> HsRecFld _ _) re) = - ( [ RecordSelectorExpr (realSrcSpanToRange realSpan') se re - | RealSrcSpan realSpan' _ <- [ getLoc e ] ], False ) -getRecSels e@(unLoc -> OpApp _ se@(unLoc -> HsRecFld _ _) +getRecSels e@(unLoc -> OpApp _ se@(unLoc -> HsRecSel _ _) +#endif (unLoc -> HsVar _ (unLoc -> d)) re) | d == dollarName = ( [ RecordSelectorExpr (realSrcSpanToRange realSpan') se re | RealSrcSpan realSpan' _ <- [ getLoc e ] ], False ) -#endif getRecSels _ = ([], False) collectRecSelResult :: MonadIO m => IdeState -> NormalizedFilePath diff --git a/plugins/hls-pragmas-plugin/src/Ide/Plugin/Pragmas.hs b/plugins/hls-pragmas-plugin/src/Ide/Plugin/Pragmas.hs index 1f218fb1df..23bfd727cf 100644 --- a/plugins/hls-pragmas-plugin/src/Ide/Plugin/Pragmas.hs +++ b/plugins/hls-pragmas-plugin/src/Ide/Plugin/Pragmas.hs @@ -17,8 +17,9 @@ module Ide.Plugin.Pragmas import Control.Lens hiding (List) import Control.Monad.IO.Class (MonadIO (liftIO)) -import Control.Monad.Trans.Class (lift) +import qualified Data.Aeson as JSON import Data.Char (isAlphaNum) +import qualified Data.Foldable as Foldable import Data.List.Extra (nubOrdOn) import qualified Data.Map as M import Data.Maybe (mapMaybe) @@ -29,7 +30,7 @@ import Development.IDE.Core.Compile (sourceParser, import Development.IDE.Core.PluginUtils import Development.IDE.GHC.Compat import Development.IDE.Plugin.Completions (ghcideCompletionsPluginPriority) -import Development.IDE.Plugin.Completions.Logic (getCompletionPrefix) +import Development.IDE.Plugin.Completions.Logic (getCompletionPrefixFromRope) import Development.IDE.Plugin.Completions.Types (PosPrefixInfo (..)) import qualified Development.IDE.Spans.Pragmas as Pragmas import Ide.Plugin.Error @@ -80,7 +81,7 @@ mkCodeActionProvider mkSuggest state _plId -- ghc session to get some dynflags even if module isn't parsed (hscEnv -> hsc_dflags -> sessionDynFlags, _) <- runActionE "Pragmas.GhcSession" state $ useWithStaleE GhcSession normalizedFilePath - (_, fileContents) <- liftIO $ runAction "Pragmas.GetFileContents" state $ getFileContents normalizedFilePath + fileContents <- liftIO $ runAction "Pragmas.GetFileContents" state $ getFileContents normalizedFilePath parsedModule <- liftIO $ runAction "Pragmas.GetParsedModule" state $ getParsedModule normalizedFilePath let parsedModuleDynFlags = ms_hspp_opts . pm_mod_summary <$> parsedModule nextPragmaInfo = Pragmas.getNextPragmaInfo sessionDynFlags fileContents @@ -121,15 +122,23 @@ suggest dflags diag = -- --------------------------------------------------------------------- suggestDisableWarning :: Diagnostic -> [PragmaEdit] -suggestDisableWarning Diagnostic {_code} - | Just (LSP.InR (T.stripPrefix "-W" -> Just w)) <- _code - , w `notElem` warningBlacklist = - pure ("Disable \"" <> w <> "\" warnings", OptGHC w) +suggestDisableWarning diagnostic + | Just (Just (JSON.Array attachedReasons)) <- diagnostic ^? attachedReason + = + [ ("Disable \"" <> w <> "\" warnings", OptGHC w) + | JSON.String attachedReason <- Foldable.toList attachedReasons + , Just w <- [T.stripPrefix "-W" attachedReason] + , w `notElem` warningBlacklist + ] | otherwise = [] --- Don't suggest disabling type errors as a solution to all type errors warningBlacklist :: [T.Text] -warningBlacklist = ["deferred-type-errors"] +warningBlacklist = + -- Don't suggest disabling type errors as a solution to all type errors. + [ "deferred-type-errors" + -- Don't suggest disabling out of scope errors as a solution to all out of scope errors. + , "deferred-out-of-scope-variables" + ] -- --------------------------------------------------------------------- @@ -195,13 +204,13 @@ flags :: [T.Text] flags = map T.pack $ flagsForCompletion False completion :: PluginMethodHandler IdeState 'LSP.Method_TextDocumentCompletion -completion _ide _ complParams = do +completion ide _ complParams = do let (LSP.TextDocumentIdentifier uri) = complParams ^. L.textDocument position@(Position ln col) = complParams ^. L.position - contents <- lift $ pluginGetVirtualFile $ toNormalizedUri uri + contents <- liftIO $ runAction "Pragmas.GetUriContents" ide $ getUriContents $ toNormalizedUri uri fmap LSP.InL $ case (contents, uriToFilePath' uri) of (Just cnts, Just _path) -> - pure $ result $ getCompletionPrefix position cnts + pure $ result $ getCompletionPrefixFromRope position cnts where result pfix | "{-# language" `T.isPrefixOf` line diff --git a/plugins/hls-pragmas-plugin/test/Main.hs b/plugins/hls-pragmas-plugin/test/Main.hs index dc62c14860..1e38e439ab 100644 --- a/plugins/hls-pragmas-plugin/test/Main.hs +++ b/plugins/hls-pragmas-plugin/test/Main.hs @@ -73,10 +73,10 @@ codeActionTests = , codeActionTestWithPragmasSuggest "adds TypeApplications pragma" "TypeApplications" [("Add \"TypeApplications\"", "Contains TypeApplications code action")] , codeActionTestWithPragmasSuggest "after shebang" "AfterShebang" [("Add \"NamedFieldPuns\"", "Contains NamedFieldPuns code action")] , codeActionTestWithPragmasSuggest "append to existing pragmas" "AppendToExisting" [("Add \"NamedFieldPuns\"", "Contains NamedFieldPuns code action")] - , codeActionTestWithPragmasSuggest "before doc comments" "BeforeDocComment" [("Add \"NamedFieldPuns\"", "Contains NamedFieldPuns code action")] + , codeActionTestWithPragmasSuggest "before doc comments NamedFieldPuns" "BeforeDocComment" [("Add \"NamedFieldPuns\"", "Contains NamedFieldPuns code action")] , codeActionTestWithPragmasSuggest "adds TypeSynonymInstances pragma" "NeedsPragmas" [("Add \"TypeSynonymInstances\"", "Contains TypeSynonymInstances code action"), ("Add \"FlexibleInstances\"", "Contains FlexibleInstances code action")] - , codeActionTestWithDisableWarning "before doc comments" "MissingSignatures" [("Disable \"missing-signatures\" warnings", "Contains missing-signatures code action")] - , codeActionTestWithDisableWarning "before doc comments" "UnusedImports" [("Disable \"unused-imports\" warnings", "Contains unused-imports code action")] + , codeActionTestWithDisableWarning "before doc comments missing-signatures" "MissingSignatures" [("Disable \"missing-signatures\" warnings", "Contains missing-signatures code action")] + , codeActionTestWithDisableWarning "before doc comments unused-imports" "UnusedImports" [("Disable \"unused-imports\" warnings", "Contains unused-imports code action")] ] codeActionTestWithPragmasSuggest :: String -> FilePath -> [(T.Text, String)] -> TestTree @@ -109,11 +109,16 @@ codeActionTests' = _ -> assertFailure $ "Expected one code action, but got: " <> show cas liftIO $ (ca ^. L.title == "Add \"NamedFieldPuns\"") @? "NamedFieldPuns code action" executeCodeAction ca - , goldenWithPragmas pragmasSuggestPlugin "doesn't suggest disabling type errors" "DeferredTypeErrors" $ \doc -> do + , goldenWithPragmas pragmasDisableWarningPlugin "doesn't suggest disabling type errors" "DeferredTypeErrors" $ \doc -> do _ <- waitForDiagnosticsFrom doc cas <- map fromAction <$> getAllCodeActions doc liftIO $ "Disable \"deferred-type-errors\" warnings" `notElem` map (^. L.title) cas @? "Doesn't contain deferred-type-errors code action" liftIO $ length cas == 0 @? "Expected no code actions, but got: " <> show cas + , goldenWithPragmas pragmasDisableWarningPlugin "doesn't suggest disabling out of scope variables" "DeferredOutOfScopeVariables" $ \doc -> do + _ <- waitForDiagnosticsFrom doc + cas <- map fromAction <$> getAllCodeActions doc + liftIO $ "Disable \"deferred-out-of-scope-variables\" warnings" `notElem` map (^. L.title) cas @? "Doesn't contain deferred-out-of-scope-variables code action" + liftIO $ length cas == 0 @? "Expected no code actions, but got: " <> show cas ] completionTests :: TestTree diff --git a/plugins/hls-pragmas-plugin/test/testdata/DeferredOutOfScopeVariables.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/DeferredOutOfScopeVariables.expected.hs new file mode 100644 index 0000000000..38d17261dc --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/DeferredOutOfScopeVariables.expected.hs @@ -0,0 +1,5 @@ +module DeferredOutOfScopeVariables where + +f :: () +f = let x = Doesn'tExist + in undefined diff --git a/plugins/hls-pragmas-plugin/test/testdata/DeferredOutOfScopeVariables.hs b/plugins/hls-pragmas-plugin/test/testdata/DeferredOutOfScopeVariables.hs new file mode 100644 index 0000000000..38d17261dc --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/DeferredOutOfScopeVariables.hs @@ -0,0 +1,5 @@ +module DeferredOutOfScopeVariables where + +f :: () +f = let x = Doesn'tExist + in undefined diff --git a/plugins/hls-qualify-imported-names-plugin/src/Ide/Plugin/QualifyImportedNames.hs b/plugins/hls-qualify-imported-names-plugin/src/Ide/Plugin/QualifyImportedNames.hs index 8b73c9114e..011910b880 100644 --- a/plugins/hls-qualify-imported-names-plugin/src/Ide/Plugin/QualifyImportedNames.hs +++ b/plugins/hls-qualify-imported-names-plugin/src/Ide/Plugin/QualifyImportedNames.hs @@ -21,6 +21,9 @@ import Data.Maybe (fromMaybe, isJust, mapMaybe) import qualified Data.Set as Set import Data.Text (Text) import qualified Data.Text as Text +import qualified Data.Text.Lines as Text.Lines +import Data.Text.Utf16.Rope.Mixed (Rope) +import qualified Data.Text.Utf16.Rope.Mixed as Rope import Development.IDE (spanContainsRange) import Development.IDE.Core.PluginUtils import Development.IDE.Core.RuleTypes (GetFileContents (GetFileContents), @@ -178,10 +181,12 @@ updateColOffset row lineOffset colOffset | row == lineOffset = colOffset | otherwise = 0 -usedIdentifiersToTextEdits :: Range -> NameEnv [ImportedBy] -> Text -> [UsedIdentifier] -> [TextEdit] -usedIdentifiersToTextEdits range nameToImportedByMap sourceText usedIdentifiers +usedIdentifiersToTextEdits :: Range -> NameEnv [ImportedBy] -> Rope -> [UsedIdentifier] -> [TextEdit] +usedIdentifiersToTextEdits range nameToImportedByMap source usedIdentifiers | let sortedUsedIdentifiers = sortOn usedIdentifierSpan usedIdentifiers = - State.evalState (makeStateComputation sortedUsedIdentifiers) (Text.lines sourceText, 0, 0) + State.evalState + (makeStateComputation sortedUsedIdentifiers) + (Text.Lines.lines (Rope.toTextLines source), 0, 0) where folder :: [TextEdit] -> UsedIdentifier -> State ([Text], Int, Int) [TextEdit] folder prevTextEdits UsedIdentifier{usedIdentifierName, usedIdentifierSpan} @@ -227,12 +232,12 @@ codeActionProvider ideState _pluginId (CodeActionParams _ _ documentId range _) if isJust (findLImportDeclAt range tmrParsed) then do HAR {..} <- runActionE "QualifyImportedNames.GetHieAst" ideState (useE GetHieAst normalizedFilePath) - (_, sourceTextM) <- runActionE "QualifyImportedNames.GetFileContents" ideState (useE GetFileContents normalizedFilePath) - sourceText <- handleMaybe (PluginRuleFailed "GetFileContents") sourceTextM + (_, sourceM) <- runActionE "QualifyImportedNames.GetFileContents" ideState (useE GetFileContents normalizedFilePath) + source <- handleMaybe (PluginRuleFailed "GetFileContents") sourceM let globalRdrEnv = tcg_rdr_env tmrTypechecked nameToImportedByMap = globalRdrEnvToNameToImportedByMap globalRdrEnv usedIdentifiers = refMapToUsedIdentifiers refMap - textEdits = usedIdentifiersToTextEdits range nameToImportedByMap sourceText usedIdentifiers + textEdits = usedIdentifiersToTextEdits range nameToImportedByMap source usedIdentifiers pure $ InL (makeCodeActions (documentId ^. L.uri) textEdits) else pure $ InL [] diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/GHC/Compat/ExactPrint.hs b/plugins/hls-refactor-plugin/src/Development/IDE/GHC/Compat/ExactPrint.hs index d8b86217d7..7c337dcd00 100644 --- a/plugins/hls-refactor-plugin/src/Development/IDE/GHC/Compat/ExactPrint.hs +++ b/plugins/hls-refactor-plugin/src/Development/IDE/GHC/Compat/ExactPrint.hs @@ -7,7 +7,7 @@ module Development.IDE.GHC.Compat.ExactPrint , transformA ) where -import Language.Haskell.GHC.ExactPrint as ExactPrint +import Language.Haskell.GHC.ExactPrint as ExactPrint printA :: (ExactPrint ast) => ast -> String printA ast = exactPrint ast @@ -16,4 +16,4 @@ transformA :: Monad m => ast1 -> (ast1 -> TransformT m ast2) -> m ast2 transformA ast f = do (ast',_ ,_) <- runTransformFromT 0 (f ast) - return $ ast' + return ast' diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/GHC/Dump.hs b/plugins/hls-refactor-plugin/src/Development/IDE/GHC/Dump.hs index 949e2a700b..c610225ef5 100644 --- a/plugins/hls-refactor-plugin/src/Development/IDE/GHC/Dump.hs +++ b/plugins/hls-refactor-plugin/src/Development/IDE/GHC/Dump.hs @@ -32,9 +32,6 @@ showAstDataHtml a0 = html $ li = tag "li" caret x = tag' [("class", text "caret")] "span" "" <+> x nested foo cts -#if !MIN_VERSION_ghc(9,3,0) - | cts == empty = foo -#endif | otherwise = foo $$ (caret $ ul cts) body cts = tag "body" $ cts $$ tag "script" (text js) header = tag "head" $ tag "style" $ text css @@ -45,9 +42,13 @@ showAstDataHtml a0 = html $ generic `ext1Q` list `extQ` string `extQ` fastString `extQ` srcSpan `extQ` realSrcSpan +#if !MIN_VERSION_ghc(9,11,0) `extQ` annotation +#endif `extQ` annotationModule +#if !MIN_VERSION_ghc(9,11,0) `extQ` annotationAddEpAnn +#endif `extQ` annotationGrhsAnn `extQ` annotationEpAnnHsCase `extQ` annotationEpAnnHsLet @@ -56,7 +57,9 @@ showAstDataHtml a0 = html $ `extQ` annotationAnnParen `extQ` annotationTrailingAnn `extQ` annotationEpaLocation +#if !MIN_VERSION_ghc(9,11,0) `extQ` addEpAnn +#endif `extQ` lit `extQ` litr `extQ` litt `extQ` sourceText `extQ` deltaPos @@ -138,7 +141,11 @@ showAstDataHtml a0 = html $ #else epaAnchor (EpaSpan r) = text "EpaSpan" <+> realSrcSpan r #endif +#if MIN_VERSION_ghc(9,11,0) + epaAnchor (EpaDelta s d cs) = text "EpaDelta" <+> srcSpan s <+> deltaPos d <+> showAstDataHtml' cs +#else epaAnchor (EpaDelta d cs) = text "EpaDelta" <+> deltaPos d <+> showAstDataHtml' cs +#endif #if !MIN_VERSION_ghc(9,9,0) anchorOp :: AnchorOperation -> SDoc @@ -172,8 +179,10 @@ showAstDataHtml a0 = html $ -- TODO: show annotations here (text "") +#if !MIN_VERSION_ghc(9,11,0) addEpAnn :: AddEpAnn -> SDoc addEpAnn (AddEpAnn a s) = text "AddEpAnn" <+> ppr a <+> epaAnchor s +#endif var :: Var -> SDoc var v = braces $ text "Var:" <+> ppr v @@ -211,14 +220,18 @@ showAstDataHtml a0 = html $ -- ------------------------- +#if !MIN_VERSION_ghc(9,11,0) annotation :: EpAnn [AddEpAnn] -> SDoc annotation = annotation' (text "EpAnn [AddEpAnn]") +#endif annotationModule :: EpAnn AnnsModule -> SDoc annotationModule = annotation' (text "EpAnn AnnsModule") +#if !MIN_VERSION_ghc(9,11,0) annotationAddEpAnn :: EpAnn AddEpAnn -> SDoc annotationAddEpAnn = annotation' (text "EpAnn AddEpAnn") +#endif annotationGrhsAnn :: EpAnn GrhsAnn -> SDoc annotationGrhsAnn = annotation' (text "EpAnn GrhsAnn") @@ -234,7 +247,11 @@ showAstDataHtml a0 = html $ annotationEpAnnHsLet = annotation' (text "EpAnn AnnsLet") #endif +#if MIN_VERSION_ghc(9,11,0) + annotationAnnList :: EpAnn (AnnList ()) -> SDoc +#else annotationAnnList :: EpAnn AnnList -> SDoc +#endif annotationAnnList = annotation' (text "EpAnn AnnList") annotationEpAnnImportDecl :: EpAnn EpAnnImportDecl -> SDoc @@ -259,7 +276,11 @@ showAstDataHtml a0 = html $ srcSpanAnnA :: EpAnn AnnListItem -> SDoc srcSpanAnnA = locatedAnn'' (text "SrcSpanAnnA") +#if MIN_VERSION_ghc(9,11,0) + srcSpanAnnL :: EpAnn (AnnList ()) -> SDoc +#else srcSpanAnnL :: EpAnn AnnList -> SDoc +#endif srcSpanAnnL = locatedAnn'' (text "SrcSpanAnnL") srcSpanAnnP :: EpAnn AnnPragma -> SDoc diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/GHC/ExactPrint.hs b/plugins/hls-refactor-plugin/src/Development/IDE/GHC/ExactPrint.hs index e54db25d60..0f740688be 100644 --- a/plugins/hls-refactor-plugin/src/Development/IDE/GHC/ExactPrint.hs +++ b/plugins/hls-refactor-plugin/src/Development/IDE/GHC/ExactPrint.hs @@ -90,7 +90,7 @@ import GHC (DeltaPos (..), #if !MIN_VERSION_ghc(9,9,0) import Data.Default (Default) -import GHC (Anchor (..), +import GHC ( Anchor (..), AnchorOperation, EpAnn (..), NameAdornment (NameParens), @@ -106,13 +106,22 @@ import GHC.Parser.Annotation (AnnContext (..), deltaPos) import GHC.Types.SrcLoc (generatedSrcSpan) #endif +#if MIN_VERSION_ghc(9,11,0) +import GHC.Types.SrcLoc (UnhelpfulSpanReason(..)) +#endif #if MIN_VERSION_ghc(9,9,0) -import GHC (Anchor, +import GHC ( +#if !MIN_VERSION_ghc(9,11,0) + Anchor, +#endif AnnContext (..), EpAnn (..), EpaLocation, EpaLocation' (..), +#if MIN_VERSION_ghc(9,11,0) + EpToken (..), +#endif NameAdornment (..), NameAnn (..), SrcSpanAnnA, @@ -121,7 +130,6 @@ import GHC (Anchor, emptyComments, spanAsAnchor) #endif - setPrecedingLines :: #if !MIN_VERSION_ghc(9,9,0) Default t => @@ -137,7 +145,7 @@ instance Pretty Log where LogShake shakeLog -> pretty shakeLog data GetAnnotatedParsedSource = GetAnnotatedParsedSource - deriving (Eq, Show, Typeable, GHC.Generic) + deriving (Eq, Show, GHC.Generic) instance Hashable GetAnnotatedParsedSource instance NFData GetAnnotatedParsedSource @@ -158,13 +166,17 @@ getAnnotatedParsedSourceRule recorder = define (cmapWithPrio LogShake recorder) return ([], fmap annotateParsedSource pm) annotateParsedSource :: ParsedModule -> ParsedSource -annotateParsedSource (ParsedModule _ ps _ _) = +annotateParsedSource (ParsedModule _ ps _) = #if MIN_VERSION_ghc(9,9,0) ps #else (makeDeltaAst ps) #endif +#if MIN_VERSION_ghc(9,11,0) +type Anchor = EpaLocation +#endif + ------------------------------------------------------------------------------ {- | A transformation for grafting source trees together. Use the semigroup @@ -463,7 +475,10 @@ modifySmallestDeclWithM validSpan f a = do False -> first (DL.singleton ldecl <>) <$> modifyMatchingDecl rest modifyDeclsT' (fmap (first DL.toList) . modifyMatchingDecl) a -#if MIN_VERSION_ghc(9,9,0) +#if MIN_VERSION_ghc(9,11,0) +generatedAnchor :: DeltaPos -> Anchor +generatedAnchor dp = EpaDelta (UnhelpfulSpan UnhelpfulNoLocationInfo) dp [] +#elif MIN_VERSION_ghc(9,9,0) generatedAnchor :: DeltaPos -> Anchor generatedAnchor dp = EpaDelta dp [] #else @@ -578,9 +593,17 @@ modifyDeclsT' :: (HasDecls t, HasTransform m) => ([LHsDecl GhcPs] -> m ([LHsDecl GhcPs], r)) -> t -> m (t, r) modifyDeclsT' action t = do +#if MIN_VERSION_ghc_exactprint(1,10,0) + decls <- pure $ hsDecls t +#else decls <- liftT $ hsDecls t +#endif (decls', r) <- action decls +#if MIN_VERSION_ghc_exactprint(1,10,0) + t' <- pure $ replaceDecls t decls' +#else t' <- liftT $ replaceDecls t decls' +#endif pure (t', r) -- | Modify each LMatch in a MatchGroup @@ -755,15 +778,28 @@ eqSrcSpan l r = leftmost_smallest l r == EQ addParensToCtxt :: Maybe EpaLocation -> AnnContext -> AnnContext addParensToCtxt close_dp = addOpen . addClose where +#if MIN_VERSION_ghc(9,11,0) + addOpen it@AnnContext{ac_open = []} = it{ac_open = [EpTok (epl 0)]} +#else addOpen it@AnnContext{ac_open = []} = it{ac_open = [epl 0]} +#endif addOpen other = other addClose it +#if MIN_VERSION_ghc(9,11,0) + | Just c <- close_dp = it{ac_close = [EpTok c]} + | AnnContext{ac_close = []} <- it = it{ac_close = [EpTok (epl 0)]} +#else | Just c <- close_dp = it{ac_close = [c]} | AnnContext{ac_close = []} <- it = it{ac_close = [epl 0]} +#endif | otherwise = it epl :: Int -> EpaLocation +#if MIN_VERSION_ghc(9,11,0) +epl n = EpaDelta (UnhelpfulSpan UnhelpfulNoLocationInfo) (SameLine n) [] +#else epl n = EpaDelta (SameLine n) [] +#endif epAnn :: SrcSpan -> ann -> EpAnn ann epAnn srcSpan anns = EpAnn (spanAsAnchor srcSpan) anns emptyComments @@ -792,14 +828,25 @@ removeComma (SrcSpanAnn (EpAnn anc (AnnListItem as) cs) l) #endif addParens :: Bool -> GHC.NameAnn -> GHC.NameAnn +#if MIN_VERSION_ghc(9,11,0) addParens True it@NameAnn{} = - it{nann_adornment = NameParens, nann_open = epl 0, nann_close = epl 0 } + it{nann_adornment = NameParens (EpTok (epl 0)) (EpTok (epl 0)) } addParens True it@NameAnnCommas{} = - it{nann_adornment = NameParens, nann_open = epl 0, nann_close = epl 0 } + it{nann_adornment = NameParens (EpTok (epl 0)) (EpTok (epl 0)) } addParens True it@NameAnnOnly{} = - it{nann_adornment = NameParens, nann_open = epl 0, nann_close = epl 0 } + it{nann_adornment = NameParens (EpTok (epl 0)) (EpTok (epl 0)) } +addParens True it@NameAnnTrailing{} = + NameAnn{nann_adornment = NameParens (EpTok (epl 0)) (EpTok (epl 0)), nann_name = epl 0, nann_trailing = nann_trailing it} +#else +addParens True it@NameAnn{} = + it{nann_adornment = NameParens, nann_open=epl 0, nann_close=epl 0 } +addParens True it@NameAnnCommas{} = + it{nann_adornment = NameParens, nann_open=epl 0, nann_close=epl 0 } +addParens True it@NameAnnOnly{} = + it{nann_adornment = NameParens, nann_open=epl 0, nann_close=epl 0 } addParens True NameAnnTrailing{..} = - NameAnn{nann_adornment = NameParens, nann_open = epl 0, nann_close = epl 0, nann_name = epl 0, ..} + NameAnn{nann_adornment = NameParens, nann_open=epl 0, nann_close=epl 0, nann_name = epl 0, ..} +#endif addParens _ it = it removeTrailingComma :: GenLocated SrcSpanAnnA ast -> GenLocated SrcSpanAnnA ast diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction.hs index 175aced38f..0f41f988e8 100644 --- a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction.hs +++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction.hs @@ -12,7 +12,9 @@ module Development.IDE.Plugin.CodeAction fillHolePluginDescriptor, extendImportPluginDescriptor, -- * For testing - matchRegExMultipleImports + matchRegExMultipleImports, + extractNotInScopeName, + NotInScope(..) ) where import Control.Applicative ((<|>)) @@ -23,7 +25,6 @@ import Control.Arrow (second, import Control.Concurrent.STM.Stats (atomically) import Control.Monad.Extra import Control.Monad.IO.Class -import Control.Monad.Trans import Control.Monad.Trans.Except (ExceptT (ExceptT)) import Control.Monad.Trans.Maybe import Data.Char @@ -41,14 +42,17 @@ import Data.Ord (comparing) import qualified Data.Set as S import qualified Data.Text as T import qualified Data.Text.Encoding as T +import qualified Data.Text.Utf16.Rope.Mixed as Rope +import Development.IDE.Core.FileStore (getUriContents) import Development.IDE.Core.Rules import Development.IDE.Core.RuleTypes import Development.IDE.Core.Service import Development.IDE.Core.Shake hiding (Log) import Development.IDE.GHC.Compat hiding (ImplicitPrelude) -import Development.IDE.GHC.Compat.ExactPrint +#if !MIN_VERSION_ghc(9,11,0) import Development.IDE.GHC.Compat.Util +#endif import Development.IDE.GHC.Error import Development.IDE.GHC.ExactPrint import qualified Development.IDE.GHC.ExactPrint as E @@ -65,11 +69,11 @@ import Development.IDE.Plugin.Plugins.FillHole (suggestFillH import Development.IDE.Plugin.Plugins.FillTypeWildcard (suggestFillTypeWildcard) import Development.IDE.Plugin.Plugins.ImportUtils import Development.IDE.Plugin.TypeLenses (suggestSignature) +import Development.IDE.Types.Diagnostics import Development.IDE.Types.Exports import Development.IDE.Types.Location import Development.IDE.Types.Options -import GHC (AddEpAnn (AddEpAnn), - AnnsModule (am_main), +import GHC ( DeltaPos (..), EpAnn (..), LEpaComment) @@ -87,7 +91,6 @@ import Language.LSP.Protocol.Types (ApplyWorkspa CodeActionKind (CodeActionKind_QuickFix), CodeActionParams (CodeActionParams), Command, - Diagnostic (..), MessageType (..), Null (Null), ShowMessageParams (..), @@ -97,7 +100,6 @@ import Language.LSP.Protocol.Types (ApplyWorkspa WorkspaceEdit (WorkspaceEdit, _changeAnnotations, _changes, _documentChanges), type (|?) (InL, InR), uriToFilePath) -import Language.LSP.VFS (virtualFileText) import qualified Text.Fuzzy.Parallel as TFP import qualified Text.Regex.Applicative as RE import Text.Regex.TDFA ((=~), (=~~)) @@ -105,32 +107,46 @@ import Text.Regex.TDFA ((=~), (=~~)) -- See Note [Guidelines For Using CPP In GHCIDE Import Statements] #if !MIN_VERSION_ghc(9,9,0) -import GHC (Anchor (anchor_op), +import Development.IDE.GHC.Compat.ExactPrint (makeDeltaAst) +import GHC (AddEpAnn (AddEpAnn), + AnnsModule (am_main), + Anchor (anchor_op), AnchorOperation (..), EpaLocation (..)) #endif -#if MIN_VERSION_ghc(9,9,0) -import GHC (EpaLocation, +#if MIN_VERSION_ghc(9,9,0) && !MIN_VERSION_ghc(9,11,0) +import GHC (AddEpAnn (AddEpAnn), + AnnsModule (am_main), + EpaLocation, EpaLocation' (..), HasLoc (..)) import GHC.Types.SrcLoc (srcSpanToRealSrcSpan) #endif +#if MIN_VERSION_ghc(9,11,0) +import GHC (EpaLocation, + AnnsModule (am_where), + EpaLocation' (..), + HasLoc (..), + EpToken (..)) +import GHC.Types.SrcLoc (srcSpanToRealSrcSpan) +#endif + ------------------------------------------------------------------------------------------------- -- | Generate code actions. codeAction :: PluginMethodHandler IdeState 'Method_TextDocumentCodeAction codeAction state _ (CodeActionParams _ _ (TextDocumentIdentifier uri) range _) = do - contents <- lift $ pluginGetVirtualFile $ toNormalizedUri uri + contents <- liftIO $ runAction "hls-refactor-plugin.codeAction.getUriContents" state $ getUriContents $ toNormalizedUri uri liftIO $ do - let text = virtualFileText <$> contents - mbFile = toNormalizedFilePath' <$> uriToFilePath uri - allDiags <- atomically $ fmap (\(_, _, d) -> d) . filter (\(p, _, _) -> mbFile == Just p) <$> getDiagnostics state + let mbFile = toNormalizedFilePath' <$> uriToFilePath uri + allDiags <- atomically $ fmap fdLspDiagnostic . filter (\d -> mbFile == Just (fdFilePath d)) <$> getDiagnostics state (join -> parsedModule) <- runAction "GhcideCodeActions.getParsedModule" state $ getParsedModule `traverse` mbFile let - actions = caRemoveRedundantImports parsedModule text allDiags range uri - <> caRemoveInvalidExports parsedModule text allDiags range uri + textContents = fmap Rope.toText contents + actions = caRemoveRedundantImports parsedModule textContents allDiags range uri + <> caRemoveInvalidExports parsedModule textContents allDiags range uri pure $ InL actions ------------------------------------------------------------------------------------------------- @@ -249,7 +265,7 @@ extendImportHandler' ideState ExtendImport {..} it = case thingParent of Nothing -> newThing Just p -> p <> "(" <> newThing <> ")" - t <- liftMaybe $ snd <$> newImportToEdit n ps (fromMaybe "" contents) + t <- liftMaybe $ snd <$> newImportToEdit n ps (Rope.toText (fromMaybe mempty contents)) return (nfp, WorkspaceEdit {_changes=Just (M.singleton doc [t]), _documentChanges=Nothing, _changeAnnotations=Nothing}) | otherwise = mzero @@ -312,11 +328,7 @@ findSigOfBind range bind = msum [findSigOfBinds range (grhssLocalBinds grhs) -- where clause , do -#if MIN_VERSION_ghc(9,3,0) grhs <- findDeclContainingLoc (_start range) (grhssGRHSs grhs) -#else - grhs <- findDeclContainingLoc (_start range) (map reLocA $ grhssGRHSs grhs) -#endif case unLoc grhs of GRHS _ _ bd -> findSigOfExpr (unLoc bd) ] @@ -324,7 +336,7 @@ findSigOfBind range bind = findSigOfExpr :: HsExpr p -> Maybe (Sig p) findSigOfExpr = go where -#if MIN_VERSION_ghc(9,3,0) && !MIN_VERSION_ghc(9,9,0) +#if !MIN_VERSION_ghc(9,9,0) go (HsLet _ _ binds _ _) = findSigOfBinds range binds #else go (HsLet _ binds _) = findSigOfBinds range binds @@ -343,7 +355,11 @@ findSigOfBinds range = go case unLoc <$> findDeclContainingLoc (_start range) lsigs of Just sig' -> Just sig' Nothing -> do +#if MIN_VERSION_ghc(9,11,0) + lHsBindLR <- findDeclContainingLoc (_start range) binds +#else lHsBindLR <- findDeclContainingLoc (_start range) (bagToList binds) +#endif findSigOfBind range (unLoc lHsBindLR) go _ = Nothing @@ -424,7 +440,11 @@ isUnusedImportedId modName importSpan | occ <- mkVarOcc identifier, +#if MIN_VERSION_ghc(9,11,0) + impModsVals <- importedByUser . concat $ M.elems imp_mods, +#else impModsVals <- importedByUser . concat $ moduleEnvElts imp_mods, +#endif Just rdrEnv <- listToMaybe [ imv_all_exports @@ -567,7 +587,7 @@ suggestRemoveRedundantExport :: ParsedModule -> Diagnostic -> Maybe (T.Text, [Ra suggestRemoveRedundantExport ParsedModule{pm_parsed_source = L _ HsModule{..}} Diagnostic{..} | msg <- unifySpaces _message , Just export <- hsmodExports - , Just exportRange <- getLocatedRange $ export + , Just exportRange <- getLocatedRange export , exports <- unLoc export , Just (removeFromExport, !ranges) <- fmap (getRanges exports . notInScope) (extractNotInScopeName msg) <|> (,[_range]) <$> matchExportItem msg @@ -663,7 +683,11 @@ suggestDeleteUnusedBinding name (L _ Match{m_grhss=GRHSs{grhssLocalBinds}}) = do let go bag lsigs = +#if MIN_VERSION_ghc(9,11,0) + if null bag +#else if isEmptyBag bag +#endif then [] else concatMap (findRelatedSpanForHsBind indexedContent name lsigs) bag case grhssLocalBinds of @@ -773,7 +797,7 @@ suggestExportUnusedTopBinding srcOpt ParsedModule{pm_parsed_source = L _ HsModul printExport :: ExportsAs -> T.Text -> T.Text printExport ExportName x = parenthesizeIfNeeds False x - printExport ExportPattern x = "pattern " <> x + printExport ExportPattern x = "pattern " <> parenthesizeIfNeeds False x printExport ExportFamily x = parenthesizeIfNeeds True x printExport ExportAll x = parenthesizeIfNeeds True x <> "(..)" @@ -951,7 +975,7 @@ suggestModuleTypo Diagnostic{_range=_range,..} | "Could not find module" `T.isInfixOf` _message = case T.splitOn "Perhaps you meant" _message of [_, stuff] -> - [ ("replace with " <> modul, TextEdit _range modul) + [ ("Replace with " <> modul, TextEdit _range modul) | modul <- mapMaybe extractModule (T.lines stuff) ] _ -> [] @@ -1576,13 +1600,34 @@ extractQualifiedModuleNameFromMissingName (T.strip -> missing) modNameP = fmap snd $ RE.withMatched $ conIDP `sepBy1` RE.sym '.' +-- | A Backward compatible implementation of `lookupOccEnv_AllNameSpaces` for +-- GHC <=9.6 +-- +-- It looks for a symbol name in all known namespaces, including types, +-- variables, and fieldnames. +-- +-- Note that on GHC >= 9.8, the record selectors are not in the `mkVarOrDataOcc` +-- anymore, but are in a custom namespace, see +-- https://gitlab.haskell.org/ghc/ghc/-/wikis/migration/9.8#new-namespace-for-record-fields, +-- hence we need to use this "AllNamespaces" implementation, otherwise we'll +-- miss them. +lookupOccEnvAllNamespaces :: ExportsMap -> T.Text -> [IdentInfo] +#if MIN_VERSION_ghc(9,7,0) +lookupOccEnvAllNamespaces exportsMap name = Set.toList $ mconcat (lookupOccEnv_AllNameSpaces (getExportsMap exportsMap) (mkTypeOcc name)) +#else +lookupOccEnvAllNamespaces exportsMap name = maybe [] Set.toList $ + lookupOccEnv (getExportsMap exportsMap) (mkVarOrDataOcc name) + <> lookupOccEnv (getExportsMap exportsMap) (mkTypeOcc name) -- look up the modified unknown name in the export map +#endif + + constructNewImportSuggestions :: ExportsMap -> (Maybe T.Text, NotInScope) -> Maybe [T.Text] -> QualifiedImportStyle -> [ImportSuggestion] constructNewImportSuggestions exportsMap (qual, thingMissing) notTheseModules qis = nubOrdBy simpleCompareImportSuggestion [ suggestion | Just name <- [T.stripPrefix (maybe "" (<> ".") qual) $ notInScope thingMissing] -- strip away qualified module names from the unknown name - , identInfo <- maybe [] Set.toList $ lookupOccEnv (getExportsMap exportsMap) (mkVarOrDataOcc name) - <> lookupOccEnv (getExportsMap exportsMap) (mkTypeOcc name) -- look up the modified unknown name in the export map + + , identInfo <- lookupOccEnvAllNamespaces exportsMap name -- look up the modified unknown name in the export map , canUseIdent thingMissing identInfo -- check if the identifier information retrieved can be used , moduleNameText identInfo `notElem` fromMaybe [] notTheseModules -- check if the module of the identifier is allowed , suggestion <- renderNewImport identInfo -- creates a list of import suggestions for the retrieved identifier information @@ -1629,7 +1674,7 @@ data ImportSuggestion = ImportSuggestion !Int !CodeActionKind !NewImport -- which would lead to an unlawful Ord instance. simpleCompareImportSuggestion :: ImportSuggestion -> ImportSuggestion -> Ordering simpleCompareImportSuggestion (ImportSuggestion s1 _ i1) (ImportSuggestion s2 _ i2) - = flip compare s1 s2 <> compare i1 i2 + = compare s2 s1 <> compare i1 i2 newtype NewImport = NewImport {unNewImport :: T.Text} deriving (Show, Eq, Ord) @@ -1704,13 +1749,22 @@ findPositionAfterModuleName ps _hsmodName' = do #endif EpAnn _ annsModule _ -> do -- Find the first 'where' +#if MIN_VERSION_ghc(9,11,0) + whereLocation <- filterWhere $ am_where annsModule +#else whereLocation <- listToMaybe . mapMaybe filterWhere $ am_main annsModule +#endif epaLocationToLine whereLocation #if !MIN_VERSION_ghc(9,9,0) EpAnnNotUsed -> Nothing #endif +#if MIN_VERSION_ghc(9,11,0) + filterWhere (EpTok loc) = Just loc + filterWhere _ = Nothing +#else filterWhere (AddEpAnn AnnWhere loc) = Just loc filterWhere _ = Nothing +#endif epaLocationToLine :: EpaLocation -> Maybe Int #if MIN_VERSION_ghc(9,9,0) @@ -1723,12 +1777,19 @@ findPositionAfterModuleName ps _hsmodName' = do epaLocationToLine (EpaSpan sp) = Just . srcLocLine . realSrcSpanEnd $ sp #endif +#if MIN_VERSION_ghc(9,11,0) + epaLocationToLine (EpaDelta _ (SameLine _) priorComments) = Just $ sumCommentsOffset priorComments + -- 'priorComments' contains the comments right before the current EpaLocation + -- Summing line offset of priorComments is necessary, as 'line' is the gap between the last comment and + -- the current AST node + epaLocationToLine (EpaDelta _ (DifferentLine line _) priorComments) = Just (line + sumCommentsOffset priorComments) +#else epaLocationToLine (EpaDelta (SameLine _) priorComments) = Just $ sumCommentsOffset priorComments -- 'priorComments' contains the comments right before the current EpaLocation -- Summing line offset of priorComments is necessary, as 'line' is the gap between the last comment and -- the current AST node epaLocationToLine (EpaDelta (DifferentLine line _) priorComments) = Just (line + sumCommentsOffset priorComments) - +#endif sumCommentsOffset :: [LEpaComment] -> Int #if MIN_VERSION_ghc(9,9,0) sumCommentsOffset = sum . fmap (\(L anchor _) -> anchorOpLine anchor) @@ -1736,7 +1797,12 @@ findPositionAfterModuleName ps _hsmodName' = do sumCommentsOffset = sum . fmap (\(L anchor _) -> anchorOpLine (anchor_op anchor)) #endif -#if MIN_VERSION_ghc(9,9,0) +#if MIN_VERSION_ghc(9,11,0) + anchorOpLine :: EpaLocation' a -> Int + anchorOpLine EpaSpan{} = 0 + anchorOpLine (EpaDelta _ (SameLine _) _) = 0 + anchorOpLine (EpaDelta _ (DifferentLine line _) _) = line +#elif MIN_VERSION_ghc(9,9,0) anchorOpLine :: EpaLocation' a -> Int anchorOpLine EpaSpan{} = 0 anchorOpLine (EpaDelta (SameLine _) _) = 0 @@ -1829,7 +1895,7 @@ data NotInScope = NotInScopeDataConstructor T.Text | NotInScopeTypeConstructorOrClass T.Text | NotInScopeThing T.Text - deriving Show + deriving (Show, Eq) notInScope :: NotInScope -> T.Text notInScope (NotInScopeDataConstructor t) = t @@ -1844,6 +1910,38 @@ extractNotInScopeName x = Just $ NotInScopeDataConstructor name | Just [name] <- matchRegexUnifySpaces x "ot in scope: type constructor or class [^‘]*‘([^’]*)’" = Just $ NotInScopeTypeConstructorOrClass name + | Just [name] <- matchRegexUnifySpaces x "The data constructors of ‘([^ ]+)’ are not all in scope" + = Just $ NotInScopeDataConstructor name + | Just [name] <- matchRegexUnifySpaces x "of newtype ‘([^’]*)’ is not in scope" + = Just $ NotInScopeThing name + -- Match for HasField "foo" Bar String in the context where, e.g. x.foo is + -- used, and x :: Bar. + -- + -- This usually mean that the field is not in scope and the correct fix is to + -- import (Bar(foo)) or (Bar(..)). + -- + -- However, it is more reliable to match for the type name instead of the field + -- name, and most of the time you'll want to import the complete type with all + -- their fields instead of the specific field. + -- + -- The regex is convoluted because it accounts for: + -- + -- - Qualified (or not) `HasField` + -- - The type bar is always qualified. If it is unqualified, it means that the + -- parent module is already imported, and in this context it uses an hint + -- already available in the GHC error message. However this regex accounts for + -- qualified or not, it does not cost much and should be more robust if the + -- hint changes in the future + -- - Next regex will account for polymorphic types, which appears as `HasField + -- "foo" (Bar Int)...`, e.g. see the parenthesis + | Just [_module, name] <- matchRegexUnifySpaces x "No instance for [‘(].*HasField \"[^\"]+\" ([^ (.]+\\.)*([^ (.]+).*[’)]" + = Just $ NotInScopeThing name + | Just [_module, name] <- matchRegexUnifySpaces x "No instance for [‘(].*HasField \"[^\"]+\" \\(([^ .]+\\.)*([^ .]+)[^)]*\\).*[’)]" + = Just $ NotInScopeThing name + -- The order of the "Not in scope" is important, for example, some of the + -- matcher may catch the "record" value instead of the value later. + | Just [name] <- matchRegexUnifySpaces x "Not in scope: record field ‘([^’]*)’" + = Just $ NotInScopeThing name | Just [name] <- matchRegexUnifySpaces x "ot in scope: \\(([^‘ ]+)\\)" = Just $ NotInScopeThing name | Just [name] <- matchRegexUnifySpaces x "ot in scope: ([^‘ ]+)" @@ -1885,14 +1983,11 @@ extractQualifiedModuleName x -- ‘Data.Functor’ nor ‘Data.Text’ exports ‘putStrLn’. extractDoesNotExportModuleName :: T.Text -> Maybe T.Text extractDoesNotExportModuleName x - | Just [m] <- -#if MIN_VERSION_ghc(9,4,0) - matchRegexUnifySpaces x "the module ‘([^’]*)’ does not export" - <|> matchRegexUnifySpaces x "nor ‘([^’]*)’ export" -#else - matchRegexUnifySpaces x "Module ‘([^’]*)’ does not export" - <|> matchRegexUnifySpaces x "nor ‘([^’]*)’ exports" -#endif + | Just [m] <- case ghcVersion of + GHC912 -> matchRegexUnifySpaces x "The module ‘([^’]*)’ does not export" + <|> matchRegexUnifySpaces x "nor ‘([^’]*)’ export" + _ -> matchRegexUnifySpaces x "the module ‘([^’]*)’ does not export" + <|> matchRegexUnifySpaces x "nor ‘([^’]*)’ export" = Just m | otherwise = Nothing @@ -1995,11 +2090,19 @@ smallerRangesForBindingExport lies b = where unqualify = snd . breakOnEnd "." b' = wrapOperatorInParens $ unqualify b + ranges' + ( L + _ + ( IEThingWith + _ + thing + _ + inners #if MIN_VERSION_ghc(9,9,0) - ranges' (L _ (IEThingWith _ thing _ inners _)) -#else - ranges' (L _ (IEThingWith _ thing _ inners)) + _ #endif + ) + ) | T.unpack (printOutputable thing) == b' = [] | otherwise = [ locA l' | L l' x <- inners, T.unpack (printOutputable x) == b'] diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/Args.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/Args.hs index 0be04656bd..53ee5200c0 100644 --- a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/Args.hs +++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/Args.hs @@ -24,6 +24,7 @@ import Data.IORef.Extra import qualified Data.Map as Map import Data.Maybe (fromMaybe) import qualified Data.Text as T +import qualified Data.Text.Utf16.Rope.Mixed as Rope import Development.IDE hiding (pluginHandlers) import Development.IDE.Core.Shake @@ -69,7 +70,7 @@ runGhcideCodeAction state (CodeActionParams _ _ (TextDocumentIdentifier uri) _ra caaContents <- onceIO $ runRule GetFileContents <&> \case - Just (_, txt) -> txt + Just (_, mbContents) -> fmap Rope.toText mbContents Nothing -> Nothing caaDf <- onceIO $ fmap (ms_hspp_opts . pm_mod_summary) <$> caaParsedModule caaAnnSource <- onceIO $ runRule GetAnnotatedParsedSource diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/ExactPrint.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/ExactPrint.hs index 7326e2d7e2..2994fe726d 100644 --- a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/ExactPrint.hs +++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/ExactPrint.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE CPP #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE TypeFamilies #-} module Development.IDE.Plugin.CodeAction.ExactPrint ( @@ -35,10 +36,8 @@ import Control.Lens (_head, _last, over) import Data.Bifunctor (first) import Data.Maybe (fromMaybe, mapMaybe) import Development.IDE.Plugin.CodeAction.Util -import GHC (AddEpAnn (..), - AnnContext (..), +import GHC (AnnContext (..), AnnList (..), - AnnParen (..), DeltaPos (SameLine), EpAnn (..), IsUnicodeSyntax (NormalSyntax), @@ -46,8 +45,17 @@ import GHC (AddEpAnn (..), TrailingAnn (AddCommaAnn), emptyComments, reAnnL) + -- See Note [Guidelines For Using CPP In GHCIDE Import Statements] +#if MIN_VERSION_ghc(9,11,0) +import GHC (EpToken (..) + , AnnListBrackets (..) + , EpUniToken (..)) +#else +import GHC (AddEpAnn (..), + AnnParen (..)) +#endif #if !MIN_VERSION_ghc(9,9,0) import Data.Default (Default (..)) import GHC (addAnns, ann) @@ -179,7 +187,9 @@ appendConstraint constraintT = go . traceAst "appendConstraint" -- For singleton constraints, the close Paren DP is attached to an HsPar wrapping the constraint -- we have to reposition it manually into the AnnContext close_dp = case ctxt of -#if MIN_VERSION_ghc(9,9,0) +#if MIN_VERSION_ghc(9,11,0) + [L _ (HsParTy (_, (EpTok ap_close)) _)] -> Just ap_close +#elif MIN_VERSION_ghc(9,9,0) [L _ (HsParTy AnnParen{ap_close} _)] -> Just ap_close #else [L _ (HsParTy EpAnn{anns=AnnParen{ap_close}} _)] -> Just ap_close @@ -203,7 +213,11 @@ appendConstraint constraintT = go . traceAst "appendConstraint" #else let context = Just $ reAnnL annCtxt emptyComments $ L lContext [resetEntryDP constraint] #endif +#if MIN_VERSION_ghc(9,11,0) + annCtxt = AnnContext (Just (EpUniTok (epl 1) NormalSyntax)) [EpTok (epl 0) | needsParens] [EpTok (epl 0) | needsParens] +#else annCtxt = AnnContext (Just (NormalSyntax, epl 1)) [epl 0 | needsParens] [epl 0 | needsParens] +#endif needsParens = hsTypeNeedsParens sigPrec $ unLoc constraint ast <- pure $ setEntryDP (makeDeltaAst ast) (SameLine 1) @@ -346,7 +360,9 @@ extendImportViaParent df parent child (L l it@ImportDecl{..}) #endif childRdr x :: LIE GhcPs = L ll' $ IEThingWith -#if MIN_VERSION_ghc(9,9,0) +#if MIN_VERSION_ghc(9,11,0) + (Nothing, (EpTok d1, NoEpTok, NoEpTok, EpTok noAnn)) +#elif MIN_VERSION_ghc(9,9,0) (Nothing, [AddEpAnn AnnOpenP d1, AddEpAnn AnnCloseP noAnn]) #elif MIN_VERSION_ghc(9,7,0) (Nothing, addAnns mempty [AddEpAnn AnnOpenP d1, AddEpAnn AnnCloseP def] emptyComments) @@ -384,6 +400,8 @@ extendImportViaParent df parent child (L l it@ImportDecl{..}) #endif #if MIN_VERSION_ghc(9,7,0) && !MIN_VERSION_ghc(9,9,0) newl = fmap (\ann -> ann ++ [AddEpAnn AnnDotdot d0]) <$> l''' +#elif MIN_VERSION_ghc(9,11,0) + newl = (\(open, _, comma, close) -> (open, EpTok d0, comma, close)) <$> l''' #else newl = (\ann -> ann ++ [AddEpAnn AnnDotdot d0]) <$> l''' #endif @@ -427,21 +445,31 @@ extendImportViaParent df parent child (L l it@ImportDecl{..}) parentRdr <- liftParseAST df parent let childRdr = reLocA $ L srcChild $ mkRdrUnqual $ mkVarOcc child isParentOperator = hasParen parent +#if MIN_VERSION_ghc(9,11,0) + let parentLIE = reLocA $ L srcParent $ if isParentOperator then IEType (EpTok (epl 0)) parentRdr' +#else let parentLIE = reLocA $ L srcParent $ if isParentOperator then IEType (epl 0) parentRdr' +#endif else IEName #if MIN_VERSION_ghc(9,5,0) noExtField #endif parentRdr' parentRdr' = modifyAnns parentRdr $ \case +#if MIN_VERSION_ghc(9,11,0) + it@NameAnn{nann_adornment = NameParens _ _} -> it{nann_adornment=NameParens (EpTok (epl 1)) (EpTok (epl 0))} +#else it@NameAnn{nann_adornment = NameParens} -> it{nann_open = epl 1, nann_close = epl 0} +#endif other -> other childLIE = reLocA $ L srcChild $ IEName #if MIN_VERSION_ghc(9,5,0) noExtField #endif childRdr -#if MIN_VERSION_ghc(9,9,0) +#if MIN_VERSION_ghc(9,11,0) + listAnn = (Nothing, (EpTok (epl 1), NoEpTok, NoEpTok, EpTok (epl 0))) +#elif MIN_VERSION_ghc(9,9,0) listAnn = (Nothing, [AddEpAnn AnnOpenP (epl 1), AddEpAnn AnnCloseP (epl 0)]) #elif MIN_VERSION_ghc(9,7,0) listAnn = (Nothing, epAnn srcParent [AddEpAnn AnnOpenP (epl 1), AddEpAnn AnnCloseP (epl 0)]) @@ -538,7 +566,10 @@ extendHiding :: extendHiding symbol (L l idecls) mlies df = do L l' lies <- case mlies of Nothing -> do -#if MIN_VERSION_ghc(9,9,0) +#if MIN_VERSION_ghc(9,11,0) + let ann :: EpAnn (AnnList (EpToken "hiding", [EpToken ","])) + ann = noAnnSrcSpanDP0 +#elif MIN_VERSION_ghc(9,9,0) let ann = noAnnSrcSpanDP0 #else src <- uniqueSrcSpanT @@ -549,9 +580,14 @@ extendHiding symbol (L l idecls) mlies df = do #else ann' = flip (fmap.fmap) ann $ \x -> x #endif +#if MIN_VERSION_ghc(9,11,0) + {al_rest = (EpTok (epl 1), [NoEpTok]) + ,al_brackets=ListParens (EpTok (epl 1)) (EpTok (epl 0)) +#else {al_rest = [AddEpAnn AnnHiding (epl 1)] ,al_open = Just $ AddEpAnn AnnOpenP (epl 1) ,al_close = Just $ AddEpAnn AnnCloseP (epl 0) +#endif } return $ L ann' [] Just pr -> pure pr diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/RuleTypes.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/RuleTypes.hs index c338903d35..69f3332dc0 100644 --- a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/RuleTypes.hs +++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/RuleTypes.hs @@ -5,7 +5,6 @@ module Development.IDE.Plugin.CodeAction.RuleTypes import Control.DeepSeq (NFData) import Data.Hashable (Hashable) -import Data.Typeable (Typeable) import Development.IDE.Graph (RuleResult) import Development.IDE.Types.Exports import Development.IDE.Types.HscEnvEq (HscEnvEq) @@ -15,7 +14,7 @@ import GHC.Generics (Generic) type instance RuleResult PackageExports = ExportsMap newtype PackageExports = PackageExports HscEnvEq - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable PackageExports instance NFData PackageExports diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/AddArgument.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/AddArgument.hs index ed2d3b4a73..f48d8355d7 100644 --- a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/AddArgument.hs +++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/AddArgument.hs @@ -50,7 +50,9 @@ import GHC (DeltaPos (..), IsUnicodeSyntax (NormalSyntax)) import Language.Haskell.GHC.ExactPrint (d1, setEntryDP) #endif - +#if MIN_VERSION_ghc(9,11,0) +import GHC.Parser.Annotation (EpToken(..)) +#endif -- When GHC tells us that a variable is not bound, it will tell us either: -- - there is an unbound variable with a given type @@ -77,19 +79,28 @@ plugin parsedModule Diagnostic {_message, _range} -- addArgToMatch "foo" `bar arg1 arg2 = ...` -- => (`bar arg1 arg2 foo = ...`, 2) addArgToMatch :: T.Text -> GenLocated l (Match GhcPs (LocatedA (HsExpr GhcPs))) -> (GenLocated l (Match GhcPs (LocatedA (HsExpr GhcPs))), Int) +#if MIN_VERSION_ghc(9,11,0) +addArgToMatch name (L locMatch (Match xMatch ctxMatch (L l pats) rhs)) = +#else addArgToMatch name (L locMatch (Match xMatch ctxMatch pats rhs)) = - let unqualName = mkRdrUnqual $ mkVarOcc $ T.unpack name +#endif #if MIN_VERSION_ghc(9,9,0) + let unqualName = mkRdrUnqual $ mkVarOcc $ T.unpack name newPat = L noAnnSrcSpanDP1 $ VarPat NoExtField $ L noAnn unqualName -- The intention is to move `= ...` (right-hand side with equals) to the right so there's 1 space between -- the newly added pattern and the rest indentRhs :: GRHSs GhcPs (LocatedA (HsExpr GhcPs)) -> GRHSs GhcPs (LocatedA (HsExpr GhcPs)) indentRhs rhs@GRHSs{grhssGRHSs} = rhs {grhssGRHSs = fmap (`setEntryDP` (SameLine 1)) grhssGRHSs } #else + let unqualName = mkRdrUnqual $ mkVarOcc $ T.unpack name newPat = L (noAnnSrcSpanDP1 generatedSrcSpan) $ VarPat NoExtField (noLocA unqualName) indentRhs = id #endif +#if MIN_VERSION_ghc(9,11,0) + in (L locMatch (Match xMatch ctxMatch (L l (pats <> [newPat])) (indentRhs rhs)), Prelude.length pats) +#else in (L locMatch (Match xMatch ctxMatch (pats <> [newPat]) (indentRhs rhs)), Prelude.length pats) +#endif -- Attempt to insert a binding pattern into each match for the given LHsDecl; succeeds only if the function is a FunBind. -- Also return: @@ -126,7 +137,7 @@ appendFinalPatToMatches name = \case -- -- TODO instead of inserting a typed hole; use GHC's suggested type from the error addArgumentAction :: ParsedModule -> Range -> T.Text -> Maybe T.Text -> Either PluginError [(T.Text, [TextEdit])] -addArgumentAction (ParsedModule _ moduleSrc _ _) range name _typ = do +addArgumentAction (ParsedModule _ moduleSrc _) range name _typ = do (newSource, _, _) <- runTransformT $ do (moduleSrc', join -> matchedDeclNameMay) <- addNameAsLastArgOfMatchingDecl #if MIN_VERSION_ghc(9,9,0) @@ -171,7 +182,11 @@ addTyHoleToTySigArg loc (L annHsSig (HsSig xHsSig tyVarBndrs lsigTy)) = ( noAnn , noExtField , HsUnrestrictedArrow (EpUniTok d1 NormalSyntax) +#if MIN_VERSION_ghc(9,11,0) + , L wildCardAnn $ HsWildCardTy NoEpTok +#else , L wildCardAnn $ HsWildCardTy noExtField +#endif ) #elif MIN_VERSION_ghc(9,4,0) wildCardAnn = SrcSpanAnn (EpAnn genAnchor1 (AnnListItem []) emptyComments) generatedSrcSpan diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/Diagnostic.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/Diagnostic.hs index d64edbd0e2..7facc8f54c 100644 --- a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/Diagnostic.hs +++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/Diagnostic.hs @@ -21,6 +21,9 @@ matchRegex message regex = case message =~~ regex of Nothing -> Nothing -- | 'matchRegex' combined with 'unifySpaces' +-- +-- >>> matchRegexUnifySpaces "hello I'm a cow" "he(ll)o" +-- Just ["ll"] matchRegexUnifySpaces :: T.Text -> T.Text -> Maybe [T.Text] matchRegexUnifySpaces message = matchRegex (unifySpaces message) diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/FillHole.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/FillHole.hs index 8016bcc305..eb6172c7fa 100644 --- a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/FillHole.hs +++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/FillHole.hs @@ -29,7 +29,7 @@ suggestFillHole Diagnostic{_range=_range,..} Just (firstChr, _) -> let isInfixOperator = firstChr == '(' name' = getOperatorNotation isInfixHole isInfixOperator name in - ( "replace " <> holeName <> " with " <> name + ( "Replace " <> holeName <> " with " <> name , TextEdit _range (if parenthise then addParens name' else name') ) getOperatorNotation True False name = addBackticks name diff --git a/plugins/hls-refactor-plugin/test/Main.hs b/plugins/hls-refactor-plugin/test/Main.hs index f913e71b55..f3756506e9 100644 --- a/plugins/hls-refactor-plugin/test/Main.hs +++ b/plugins/hls-refactor-plugin/test/Main.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE CPP #-} {-# LANGUAGE AllowAmbiguousTypes #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE DuplicateRecordFields #-} @@ -21,7 +22,6 @@ import Data.Foldable import Data.List.Extra import Data.Maybe import qualified Data.Text as T -import Data.Tuple.Extra import Development.IDE.GHC.Util import Development.IDE.Plugin.Completions.Types (extendImportCommandId) import Development.IDE.Test @@ -39,7 +39,6 @@ import qualified System.IO.Extra import System.IO.Extra hiding (withTempDir) import System.Time.Extra import Test.Tasty -import Test.Tasty.ExpectedFailure import Test.Tasty.HUnit import Text.Regex.TDFA ((=~)) @@ -48,6 +47,7 @@ import Development.IDE.Plugin.CodeAction (matchRegExMultipleImp import Test.Hls import qualified Development.IDE.GHC.ExactPrint +import Development.IDE.Plugin.CodeAction (NotInScope (..)) import qualified Development.IDE.Plugin.CodeAction as Refactor import qualified Test.AddArgument @@ -70,6 +70,7 @@ tests = , codeActionTests , codeActionHelperFunctionTests , completionTests + , extractNotInScopeNameTests ] initializeTests :: TestTree @@ -302,6 +303,8 @@ codeActionTests = testGroup "code actions" , suggestImportClassMethodTests , suggestImportTests , suggestAddRecordFieldImportTests + , suggestAddCoerceMissingConstructorImportTests + , suggestAddGenericMissingConstructorImportTests , suggestHideShadowTests , fixConstructorImportTests , fixModuleImportTypoTests @@ -318,6 +321,7 @@ codeActionTests = testGroup "code actions" , addImplicitParamsConstraintTests , removeExportTests , Test.AddArgument.tests + , suggestAddRecordFieldUpdateImportTests ] insertImportTests :: TestTree @@ -337,67 +341,61 @@ insertImportTests = testGroup "insert import" "WhereDeclLowerInFileWithCommentsBeforeIt.hs" "WhereDeclLowerInFileWithCommentsBeforeIt.expected.hs" "import Data.Int" - , expectFailBecause - "'findNextPragmaPosition' function doesn't account for case when shebang is not placed at top of file" - (checkImport - "Shebang not at top with spaces" - "ShebangNotAtTopWithSpaces.hs" - "ShebangNotAtTopWithSpaces.expected.hs" - "import Data.Monoid") - , expectFailBecause - "'findNextPragmaPosition' function doesn't account for case when shebang is not placed at top of file" - (checkImport - "Shebang not at top no space" - "ShebangNotAtTopNoSpace.hs" - "ShebangNotAtTopNoSpace.expected.hs" - "import Data.Monoid") - , expectFailBecause - ("'findNextPragmaPosition' function doesn't account for case " - ++ "when OPTIONS_GHC pragma is not placed at top of file") - (checkImport - "OPTIONS_GHC pragma not at top with spaces" - "OptionsNotAtTopWithSpaces.hs" - "OptionsNotAtTopWithSpaces.expected.hs" - "import Data.Monoid") - , expectFailBecause - ("'findNextPragmaPosition' function doesn't account for " - ++ "case when shebang is not placed at top of file") - (checkImport - "Shebang not at top of file" - "ShebangNotAtTop.hs" - "ShebangNotAtTop.expected.hs" - "import Data.Monoid") - , expectFailBecause - ("'findNextPragmaPosition' function doesn't account for case " - ++ "when OPTIONS_GHC is not placed at top of file") - (checkImport - "OPTIONS_GHC pragma not at top of file" - "OptionsPragmaNotAtTop.hs" - "OptionsPragmaNotAtTop.expected.hs" - "import Data.Monoid") - , expectFailBecause - ("'findNextPragmaPosition' function doesn't account for case when " - ++ "OPTIONS_GHC pragma is not placed at top of file") - (checkImport - "pragma not at top with comment at top" - "PragmaNotAtTopWithCommentsAtTop.hs" - "PragmaNotAtTopWithCommentsAtTop.expected.hs" - "import Data.Monoid") - , expectFailBecause - ("'findNextPragmaPosition' function doesn't account for case when " - ++ "OPTIONS_GHC pragma is not placed at top of file") - (checkImport - "pragma not at top multiple comments" - "PragmaNotAtTopMultipleComments.hs" - "PragmaNotAtTopMultipleComments.expected.hs" - "import Data.Monoid") - , expectFailBecause - "'findNextPragmaPosition' function doesn't account for case of multiline pragmas" - (checkImport - "after multiline language pragmas" - "MultiLinePragma.hs" - "MultiLinePragma.expected.hs" - "import Data.Monoid") + -- TODO: 'findNextPragmaPosition' function doesn't account for case when shebang is not + -- placed at top of file" + , checkImport + "Shebang not at top with spaces" + "ShebangNotAtTopWithSpaces.hs" + "ShebangNotAtTopWithSpaces.expected.hs" + "import Data.Monoid" + -- TODO: 'findNextPragmaPosition' function doesn't account for case when shebang is not + -- placed at top of file" + , checkImport + "Shebang not at top no space" + "ShebangNotAtTopNoSpace.hs" + "ShebangNotAtTopNoSpace.expected.hs" + "import Data.Monoid" + -- TODO: 'findNextPragmaPosition' function doesn't account for case when OPTIONS_GHC pragma is + -- not placed at top of file + , checkImport + "OPTIONS_GHC pragma not at top with spaces" + "OptionsNotAtTopWithSpaces.hs" + "OptionsNotAtTopWithSpaces.expected.hs" + "import Data.Monoid" + -- TODO: findNextPragmaPosition' function doesn't account for case when shebang is not placed + -- at top of file + , checkImport + "Shebang not at top of file" + "ShebangNotAtTop.hs" + "ShebangNotAtTop.expected.hs" + "import Data.Monoid" + -- TODO: findNextPragmaPosition' function doesn't account for case when OPTIONS_GHC is not + -- placed at top of file + , checkImport + "OPTIONS_GHC pragma not at top of file" + "OptionsPragmaNotAtTop.hs" + "OptionsPragmaNotAtTop.expected.hs" + "import Data.Monoid" + -- TODO: findNextPragmaPosition' function doesn't account for case when OPTIONS_GHC pragma is + -- not placed at top of file + , checkImport + "pragma not at top with comment at top" + "PragmaNotAtTopWithCommentsAtTop.hs" + "PragmaNotAtTopWithCommentsAtTop.expected.hs" + "import Data.Monoid" + -- TODO: findNextPragmaPosition' function doesn't account for case when OPTIONS_GHC pragma is + -- not placed at top of file + , checkImport + "pragma not at top multiple comments" + "PragmaNotAtTopMultipleComments.hs" + "PragmaNotAtTopMultipleComments.expected.hs" + "import Data.Monoid" + -- TODO: 'findNextPragmaPosition' function doesn't account for case of multiline pragmas + , checkImport + "after multiline language pragmas" + "MultiLinePragma.hs" + "MultiLinePragma.expected.hs" + "import Data.Monoid" , checkImport "pragmas not at top with module declaration" "PragmaNotAtTopWithModuleDecl.hs" @@ -1347,8 +1345,7 @@ extendImportTests = testGroup "extend import actions" , "b :: A" , "b = ConstructorFoo" ]) - , brokenForGHC92 "On GHC 9.2, the error doesn't contain \"perhaps you want ...\" part from which import suggestion can be extracted." $ - testSession "extend single line import in presence of extra parens" $ template + , testSession "extend single line import in presence of extra parens" $ template [] ("Main.hs", T.unlines [ "import Data.Monoid (First)" @@ -1514,27 +1511,48 @@ extendImportTests = testGroup "extend import actions" , "x :: (:~:) [] []" , "x = Refl" ]) - , expectFailBecause "importing pattern synonyms is unsupported" - $ testSession "extend import list with pattern synonym" $ template - [("ModuleA.hs", T.unlines - [ "{-# LANGUAGE PatternSynonyms #-}" - , "module ModuleA where" - , "pattern Some x = Just x" - ]) - ] - ("ModuleB.hs", T.unlines - [ "module ModuleB where" - , "import A ()" - , "k (Some x) = x" - ]) - (Range (Position 2 3) (Position 2 7)) - ["Add pattern Some to the import list of A"] - (T.unlines - [ "module ModuleB where" - , "import A (pattern Some)" - , "k (Some x) = x" - ]) - , ignoreForGhcVersions [GHC92, GHC94] "Diagnostic message has no suggestions" $ + -- TODO: importing pattern synonyms is unsupported + , testSessionExpectFail "extend import list with pattern synonym" + (BrokenIdeal $ + template + [("ModuleA.hs", T.unlines + [ "{-# LANGUAGE PatternSynonyms #-}" + , "module ModuleA where" + , "pattern Some x = Just x" + ]) + ] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import A ()" + , "k (Some x) = x" + ] + ) + (Range (Position 2 3) (Position 2 7)) + ["Add pattern Some to the import list of A"] + (T.unlines + [ "module ModuleB where" + , "import A (pattern Some)" + , "k (Some x) = x" + ] + ) + ) + (BrokenCurrent $ + noCodeActionsTemplate + [("ModuleA.hs", T.unlines + [ "{-# LANGUAGE PatternSynonyms #-}" + , "module ModuleA where" + , "pattern Some x = Just x" + ]) + ] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import A ()" + , "k (Some x) = x" + ] + ) + (Range (Position 2 3) (Position 2 7)) + ) + , ignoreForGhcVersions [GHC94] "Diagnostic message has no suggestions" $ testSession "type constructor name same as data constructor name" $ template [("ModuleA.hs", T.unlines [ "module ModuleA where" @@ -1602,19 +1620,10 @@ extendImportTests = testGroup "extend import actions" codeActionTitle CodeAction{_title=x} = x template setUpModules moduleUnderTest range expectedTitles expectedContentB = do - configureCheckProject overrideCheckProject + docB <- evalProject setUpModules moduleUnderTest + codeActions <- codeActions docB range + let actualTitles = codeActionTitle <$> codeActions - mapM_ (\(fileName, contents) -> createDoc fileName "haskell" contents) setUpModules - docB <- createDoc (fst moduleUnderTest) "haskell" (snd moduleUnderTest) - _ <- waitForDiagnostics - waitForProgressDone - actionsOrCommands <- getCodeActions docB range - let codeActions = - [ ca | InR ca <- actionsOrCommands - , let title = codeActionTitle ca - , "Add" `T.isPrefixOf` title && not ("Add argument" `T.isPrefixOf` title) - ] - actualTitles = codeActionTitle <$> codeActions -- Note that we are not testing the order of the actions, as the -- order of the expected actions indicates which one we'll execute -- in this test, i.e., the first one. @@ -1629,12 +1638,36 @@ extendImportTests = testGroup "extend import actions" contentAfterAction <- documentContents docB liftIO $ expectedContentB @=? contentAfterAction + noCodeActionsTemplate setUpModules moduleUnderTest range = do + docB <- evalProject setUpModules moduleUnderTest + codeActions' <- codeActions docB range + let actualTitles = codeActionTitle <$> codeActions' + liftIO $ [] @=? actualTitles + + evalProject setUpModules moduleUnderTest = do + configureCheckProject overrideCheckProject + + mapM_ (\(fileName, contents) -> createDoc fileName "haskell" contents) setUpModules + docB <- createDoc (fst moduleUnderTest) "haskell" (snd moduleUnderTest) + _ <- waitForDiagnostics + waitForProgressDone + + pure docB + + codeActions docB range = do + actionsOrCommands <- getCodeActions docB range + pure $ + [ ca | InR ca <- actionsOrCommands + , let title = codeActionTitle ca + , "Add" `T.isPrefixOf` title && not ("Add argument" `T.isPrefixOf` title) + ] + fixModuleImportTypoTests :: TestTree fixModuleImportTypoTests = testGroup "fix module import typo" [ testSession "works when single module suggested" $ do doc <- createDoc "A.hs" "haskell" "import Data.Cha" _ <- waitForDiagnostics - action <- pickActionWithTitle "replace with Data.Char" =<< getCodeActions doc (R 0 0 0 10) + action <- pickActionWithTitle "Replace with Data.Char" =<< getCodeActions doc (R 0 0 0 10) executeCodeAction action contentAfterAction <- documentContents doc liftIO $ contentAfterAction @?= "import Data.Char" @@ -1643,11 +1676,11 @@ fixModuleImportTypoTests = testGroup "fix module import typo" _ <- waitForDiagnostics actions <- getCodeActions doc (R 0 0 0 10) traverse_ (assertActionWithTitle actions) - [ "replace with Data.Eq" - , "replace with Data.Int" - , "replace with Data.Ix" + [ "Replace with Data.Eq" + , "Replace with Data.Int" + , "Replace with Data.Ix" ] - replaceWithDataEq <- pickActionWithTitle "replace with Data.Eq" actions + replaceWithDataEq <- pickActionWithTitle "Replace with Data.Eq" actions executeCodeAction replaceWithDataEq contentAfterAction <- documentContents doc liftIO $ contentAfterAction @?= "import Data.Eq" @@ -1788,7 +1821,8 @@ suggestImportTests = testGroup "suggest import actions" , test True [] "f = (.|.)" [] "import Data.Bits (Bits(..))" , test True [] "f = empty" [] "import Control.Applicative (Alternative(..))" ] - , expectFailBecause "importing pattern synonyms is unsupported" $ test True [] "k (Some x) = x" [] "import B (pattern Some)" + -- TODO: Importing pattern synonyms is unsupported + , test False [] "k (Some x) = x" [] "import B (pattern Some)" ] where test = test' False @@ -1821,8 +1855,14 @@ suggestImportTests = testGroup "suggest import actions" suggestAddRecordFieldImportTests :: TestTree suggestAddRecordFieldImportTests = testGroup "suggest imports of record fields when using OverloadedRecordDot" [ testGroup "The field is suggested when an instance resolution failure occurs" - [ ignoreForGhcVersions [GHC94, GHC96] "Extension not present <9.2, and the assist is derived from the help message in >=9.4" theTest + ([ ignoreForGhcVersions [GHC94, GHC96] "Extension not present <9.2, and the assist is derived from the help message in >=9.4" theTest ] + ++ [ + theTestIndirect qualifiedGhcRecords polymorphicType + | + qualifiedGhcRecords <- [False, True] + , polymorphicType <- [False, True] + ]) ] where theTest = testSessionWithExtraFiles "hover" def $ \dir -> do @@ -1843,6 +1883,144 @@ suggestAddRecordFieldImportTests = testGroup "suggest imports of record fields w contentAfterAction <- documentContents doc liftIO $ after @=? contentAfterAction + theTestIndirect qualifiedGhcRecords polymorphicType = testGroup + ((if qualifiedGhcRecords then "qualified-" else "unqualified-") + <> ("HasField " :: String) + <> + (if polymorphicType then "polymorphic-" else "monomorphic-") + <> "type ") + . (\x -> [x]) $ testSessionWithExtraFiles "hover" def $ \dir -> do + -- Hopefully enable project indexing? + configureCheckProject True + + let + before = T.unlines ["{-# LANGUAGE OverloadedRecordDot #-}", "module A where", if qualifiedGhcRecords then "" else "import GHC.Records", "import C (bar)", "spam = bar.foo"] + after = T.unlines ["{-# LANGUAGE OverloadedRecordDot #-}", "module A where", if qualifiedGhcRecords then "" else "import GHC.Records", "import C (bar)", "import B (Foo(..))", "spam = bar.foo"] + cradle = "cradle: {direct: {arguments: [-hide-all-packages, -package, base, -package, text, -package-env, -, A, B, C]}}" + liftIO $ writeFileUTF8 (dir "hie.yaml") cradle + liftIO $ writeFileUTF8 (dir "B.hs") $ unlines ["module B where", if polymorphicType then "data Foo x = Foo { foo :: x }" else "data Foo = Foo { foo :: Int }"] + liftIO $ writeFileUTF8 (dir "C.hs") $ unlines ["module C where", "import B", "bar = Foo 10" ] + doc <- createDoc "Test.hs" "haskell" before + waitForProgressDone + _ <- waitForDiagnostics + let defLine = 4 + range = Range (Position defLine 0) (Position defLine maxBound) + actions <- getCodeActions doc range + action <- pickActionWithTitle "import B (Foo(..))" actions + executeCodeAction action + contentAfterAction <- documentContents doc + liftIO $ after @=? contentAfterAction + +suggestAddRecordFieldUpdateImportTests :: TestTree +suggestAddRecordFieldUpdateImportTests = testGroup "suggest imports of record fields in update" + [ testGroup "implicit import of type" [theTest ] ] + where + theTest = testSessionWithExtraFiles "hover" def $ \dir -> do + configureCheckProject True + + let + before = T.unlines ["module C where", "import B", "biz = bar { foo = 100 }"] + after = T.unlines ["module C where", "import B", "import A (Foo(..))", "biz = bar { foo = 100 }"] + cradle = "cradle: {direct: {arguments: [-hide-all-packages, -package, base, -package, text, -package-env, -, A, B, C]}}" + liftIO $ writeFileUTF8 (dir "hie.yaml") cradle + liftIO $ writeFileUTF8 (dir "A.hs") $ unlines ["module A where", "data Foo = Foo { foo :: Int }"] + liftIO $ writeFileUTF8 (dir "B.hs") $ unlines ["module B where", "import A", "bar = Foo 10" ] + doc <- createDoc "Test.hs" "haskell" before + waitForProgressDone + diags <- waitForDiagnostics + liftIO $ print diags + let defLine = 2 + range = Range (Position defLine 0) (Position defLine maxBound) + actions <- getCodeActions doc range + liftIO $ print actions + action <- pickActionWithTitle "import A (Foo(..))" actions + executeCodeAction action + contentAfterAction <- documentContents doc + liftIO $ after @=? contentAfterAction + +extractNotInScopeNameTests :: TestTree +extractNotInScopeNameTests = + testGroup "extractNotInScopeName" [ + testGroup "record field" [ + testCase ">=ghc 910" $ Refactor.extractNotInScopeName "Not in scope: ‘foo’" @=? Just (NotInScopeThing "foo"), + testCase " do + configureCheckProject False + let before = T.unlines ["module A where", "import Data.Coerce (coerce)", "import Data.Semigroup (Sum)", "bar = coerce (10 :: Int) :: Sum Int"] + after = T.unlines ["module A where", "import Data.Coerce (coerce)", "import Data.Semigroup (Sum)", "import Data.Semigroup (Sum(..))", "bar = coerce (10 :: Int) :: Sum Int"] + cradle = "cradle: {direct: {arguments: [-hide-all-packages, -package, base, -package, text, -package-env, -, A]}}" + liftIO $ writeFileUTF8 (dir "hie.yaml") cradle + doc <- createDoc "Test.hs" "haskell" before + waitForProgressDone + _ <- waitForDiagnostics + let defLine = 3 + range = Range (Position defLine 0) (Position defLine maxBound) + actions <- getCodeActions doc range + action <- pickActionWithTitle "import Data.Semigroup (Sum(..))" actions + executeCodeAction action + contentAfterAction <- documentContents doc + liftIO $ after @=? contentAfterAction + +suggestAddGenericMissingConstructorImportTests :: TestTree +suggestAddGenericMissingConstructorImportTests = testGroup "suggest imports of type constructors when using generic deriving" + [ testGroup "The type constructors are suggested when not in scope" + [ theTest + ] + ] + where + theTest = testSessionWithExtraFiles "hover" def $ \dir -> do + configureCheckProject False + let + before = T.unlines ["module A where", "import GHC.Generics", "import Data.Semigroup (Sum)", "deriving instance Generic (Sum Int)"] + after = T.unlines ["module A where", "import GHC.Generics", "import Data.Semigroup (Sum)", "import Data.Semigroup (Sum(..))", "deriving instance Generic (Sum Int)"] + cradle = "cradle: {direct: {arguments: [-hide-all-packages, -package, base, -package, text, -package-env, -, A]}}" + liftIO $ writeFileUTF8 (dir "hie.yaml") cradle + doc <- createDoc "Test.hs" "haskell" before + waitForProgressDone + _ <- waitForDiagnostics + let defLine = 3 + range = Range (Position defLine 0) (Position defLine maxBound) + actions <- getCodeActions doc range + action <- pickActionWithTitle "import Data.Semigroup (Sum(..))" actions + executeCodeAction action + contentAfterAction <- documentContents doc + liftIO $ after @=? contentAfterAction + suggestImportDisambiguationTests :: TestTree suggestImportDisambiguationTests = testGroup "suggest import disambiguation actions" @@ -1967,7 +2145,7 @@ suggestImportDisambiguationTests = testGroup "suggest import disambiguation acti compareHideFunctionTo = compareTwo "HideFunction.hs" withTarget file locs k = runWithExtraFiles "hiding" $ \dir -> do doc <- openDoc file "haskell" - void $ expectDiagnostics [(file, [(DiagnosticSeverity_Error, loc, "Ambiguous occurrence") | loc <- locs])] + void $ expectDiagnostics [(file, [(DiagnosticSeverity_Error, loc, "Ambiguous occurrence", Nothing) | loc <- locs])] -- Since GHC 9.8: GHC-87110 actions <- getAllCodeActions doc k dir doc actions withHideFunction = withTarget ("HideFunction" <.> "hs") @@ -2426,7 +2604,7 @@ deleteUnusedDefinitionTests = testGroup "delete unused definition action" where testFor sourceLines pos@(l,c) expectedTitle expectedLines = do docId <- createDoc "A.hs" "haskell" $ T.unlines sourceLines - expectDiagnostics [ ("A.hs", [(DiagnosticSeverity_Warning, pos, "not used")]) ] + expectDiagnostics [ ("A.hs", [(DiagnosticSeverity_Warning, pos, "not used", Nothing)]) ] action <- pickActionWithTitle expectedTitle =<< getCodeActions docId (R l c l c) executeCodeAction action contentAfterAction <- documentContents docId @@ -2442,8 +2620,8 @@ addTypeAnnotationsToLiteralsTest = testGroup "add type annotations to literals t , "f = 1" ] (if ghcVersion >= GHC94 - then [ (DiagnosticSeverity_Warning, (3, 4), "Defaulting the type variable") ] - else [ (DiagnosticSeverity_Warning, (3, 4), "Defaulting the following constraint") ]) + then [ (DiagnosticSeverity_Warning, (3, 4), "Defaulting the type variable", Nothing) ] + else [ (DiagnosticSeverity_Warning, (3, 4), "Defaulting the following constraint", Nothing) ]) "Add type annotation ‘Integer’ to ‘1’" [ "{-# OPTIONS_GHC -Wtype-defaults #-}" , "module A (f) where" @@ -2461,8 +2639,8 @@ addTypeAnnotationsToLiteralsTest = testGroup "add type annotations to literals t , " in x" ] (if ghcVersion >= GHC94 - then [ (DiagnosticSeverity_Warning, (4, 12), "Defaulting the type variable") ] - else [ (DiagnosticSeverity_Warning, (4, 12), "Defaulting the following constraint") ]) + then [ (DiagnosticSeverity_Warning, (4, 12), "Defaulting the type variable", Nothing) ] + else [ (DiagnosticSeverity_Warning, (4, 12), "Defaulting the following constraint", Nothing) ]) "Add type annotation ‘Integer’ to ‘3’" [ "{-# OPTIONS_GHC -Wtype-defaults #-}" , "module A where" @@ -2481,8 +2659,8 @@ addTypeAnnotationsToLiteralsTest = testGroup "add type annotations to literals t , " in x" ] (if ghcVersion >= GHC94 - then [ (DiagnosticSeverity_Warning, (4, 20), "Defaulting the type variable") ] - else [ (DiagnosticSeverity_Warning, (4, 20), "Defaulting the following constraint") ]) + then [ (DiagnosticSeverity_Warning, (4, 20), "Defaulting the type variable", Nothing) ] + else [ (DiagnosticSeverity_Warning, (4, 20), "Defaulting the following constraint", Nothing) ]) "Add type annotation ‘Integer’ to ‘5’" [ "{-# OPTIONS_GHC -Wtype-defaults #-}" , "module A where" @@ -2503,21 +2681,21 @@ addTypeAnnotationsToLiteralsTest = testGroup "add type annotations to literals t ] (if ghcVersion >= GHC94 then - [ (DiagnosticSeverity_Warning, (6, 8), "Defaulting the type variable") - , (DiagnosticSeverity_Warning, (6, 16), "Defaulting the type variable") + [ (DiagnosticSeverity_Warning, (6, 8), "Defaulting the type variable", Nothing) + , (DiagnosticSeverity_Warning, (6, 16), "Defaulting the type variable", Nothing) ] else - [ (DiagnosticSeverity_Warning, (6, 8), "Defaulting the following constraint") - , (DiagnosticSeverity_Warning, (6, 16), "Defaulting the following constraint") + [ (DiagnosticSeverity_Warning, (6, 8), "Defaulting the following constraint", Nothing) + , (DiagnosticSeverity_Warning, (6, 16), "Defaulting the following constraint", Nothing) ]) - "Add type annotation ‘String’ to ‘\"debug\"’" + ("Add type annotation ‘" <> stringLit <> "’ to ‘\"debug\"’") [ "{-# OPTIONS_GHC -Wtype-defaults #-}" , "{-# LANGUAGE OverloadedStrings #-}" , "module A (f) where" , "" , "import Debug.Trace" , "" - , "f = seq (\"debug\" :: String) traceShow \"debug\"" + , "f = seq (\"debug\" :: "<> stringLit <> ") traceShow \"debug\"" ] , testSession "add default type to satisfy two constraints" $ testFor @@ -2530,16 +2708,16 @@ addTypeAnnotationsToLiteralsTest = testGroup "add type annotations to literals t , "f a = traceShow \"debug\" a" ] (if ghcVersion >= GHC94 - then [ (DiagnosticSeverity_Warning, (6, 6), "Defaulting the type variable") ] - else [ (DiagnosticSeverity_Warning, (6, 6), "Defaulting the following constraint") ]) - "Add type annotation ‘String’ to ‘\"debug\"’" + then [ (DiagnosticSeverity_Warning, (6, 6), "Defaulting the type variable", Nothing) ] + else [ (DiagnosticSeverity_Warning, (6, 6), "Defaulting the following constraint", Nothing) ]) + ("Add type annotation ‘" <> stringLit <> "’ to ‘\"debug\"’") [ "{-# OPTIONS_GHC -Wtype-defaults #-}" , "{-# LANGUAGE OverloadedStrings #-}" , "module A (f) where" , "" , "import Debug.Trace" , "" - , "f a = traceShow (\"debug\" :: String) a" + , "f a = traceShow (\"debug\" :: " <> stringLit <> ") a" ] , testSession "add default type to satisfy two constraints with duplicate literals" $ testFor @@ -2552,23 +2730,24 @@ addTypeAnnotationsToLiteralsTest = testGroup "add type annotations to literals t , "f = seq (\"debug\" :: [Char]) (seq (\"debug\" :: [Char]) (traceShow \"debug\"))" ] (if ghcVersion >= GHC94 - then [ (DiagnosticSeverity_Warning, (6, 54), "Defaulting the type variable") ] - else [ (DiagnosticSeverity_Warning, (6, 54), "Defaulting the following constraint") ]) - "Add type annotation ‘String’ to ‘\"debug\"’" + then [ (DiagnosticSeverity_Warning, (6, 54), "Defaulting the type variable", Nothing) ] + else [ (DiagnosticSeverity_Warning, (6, 54), "Defaulting the following constraint", Nothing) ]) + ("Add type annotation ‘"<> stringLit <>"’ to ‘\"debug\"’") [ "{-# OPTIONS_GHC -Wtype-defaults #-}" , "{-# LANGUAGE OverloadedStrings #-}" , "module A (f) where" , "" , "import Debug.Trace" , "" - , "f = seq (\"debug\" :: [Char]) (seq (\"debug\" :: [Char]) (traceShow (\"debug\" :: String)))" + , "f = seq (\"debug\" :: [Char]) (seq (\"debug\" :: [Char]) (traceShow (\"debug\" :: "<> stringLit <> ")))" ] ] where + stringLit = if ghcVersion >= GHC912 then "[Char]" else "String" testFor sourceLines diag expectedTitle expectedLines = do docId <- createDoc "A.hs" "haskell" $ T.unlines sourceLines expectDiagnostics [ ("A.hs", diag) ] - let cursors = map snd3 diag + let cursors = map (\(_, snd, _, _) -> snd) diag (ls, cs) = minimum cursors (le, ce) = maximum cursors @@ -2612,7 +2791,7 @@ importRenameActionTests = testGroup "import rename actions" $ where check modname = checkCodeAction ("Data.Mape -> Data." <> T.unpack modname) - ("replace with Data." <> modname) + ("Replace with Data." <> modname) (T.unlines [ "module Testing where" , "import Data.Mape" @@ -2658,33 +2837,33 @@ fillTypedHoleTests = let liftIO $ expectedCode @=? modifiedCode in testGroup "fill typed holes" - [ check "replace _ with show" + [ check "Replace _ with show" "_" "n" "n" "show" "n" "n" - , check "replace _ with globalConvert" + , check "Replace _ with globalConvert" "_" "n" "n" "globalConvert" "n" "n" - , check "replace _convertme with localConvert" + , check "Replace _convertme with localConvert" "_convertme" "n" "n" "localConvert" "n" "n" - , check "replace _b with globalInt" + , check "Replace _b with globalInt" "_a" "_b" "_c" "_a" "globalInt" "_c" - , check "replace _c with globalInt" + , check "Replace _c with globalInt" "_a" "_b" "_c" "_a" "_b" "globalInt" - , check "replace _c with parameterInt" + , check "Replace _c with parameterInt" "_a" "_b" "_c" "_a" "_b" "parameterInt" - , check "replace _ with foo _" + , check "Replace _ with foo _" "_" "n" "n" "(foo _)" "n" "n" - , testSession "replace _toException with E.toException" $ do + , testSession "Replace _toException with E.toException" $ do let mkDoc x = T.unlines [ "module Testing where" , "import qualified Control.Exception as E" @@ -2693,7 +2872,7 @@ fillTypedHoleTests = let doc <- createDoc "Test.hs" "haskell" $ mkDoc "_toException" _ <- waitForDiagnostics actions <- getCodeActions doc (Range (Position 3 0) (Position 3 maxBound)) - chosen <- pickActionWithTitle "replace _toException with E.toException" actions + chosen <- pickActionWithTitle "Replace _toException with E.toException" actions executeCodeAction chosen modifiedCode <- documentContents doc liftIO $ mkDoc "E.toException" @=? modifiedCode @@ -2709,7 +2888,7 @@ fillTypedHoleTests = let doc <- createDoc "Test.hs" "haskell" $ mkDoc "`_`" _ <- waitForDiagnostics actions <- getCodeActions doc (Range (Position 5 16) (Position 5 19)) - chosen <- pickActionWithTitle "replace _ with foo" actions + chosen <- pickActionWithTitle "Replace _ with foo" actions executeCodeAction chosen modifiedCode <- documentContents doc liftIO $ mkDoc "`foo`" @=? modifiedCode @@ -2722,7 +2901,7 @@ fillTypedHoleTests = let doc <- createDoc "Test.hs" "haskell" $ mkDoc "_" _ <- waitForDiagnostics actions <- getCodeActions doc (Range (Position 2 13) (Position 2 14)) - chosen <- pickActionWithTitle "replace _ with (<$>)" actions + chosen <- pickActionWithTitle "Replace _ with (<$>)" actions executeCodeAction chosen modifiedCode <- documentContents doc liftIO $ mkDoc "(<$>)" @=? modifiedCode @@ -2735,7 +2914,7 @@ fillTypedHoleTests = let doc <- createDoc "Test.hs" "haskell" $ mkDoc "`_`" _ <- waitForDiagnostics actions <- getCodeActions doc (Range (Position 2 16) (Position 2 19)) - chosen <- pickActionWithTitle "replace _ with (<$>)" actions + chosen <- pickActionWithTitle "Replace _ with (<$>)" actions executeCodeAction chosen modifiedCode <- documentContents doc liftIO $ mkDoc "<$>" @=? modifiedCode @@ -3180,6 +3359,10 @@ addSigActionTests = let executeCodeAction chosenAction modifiedCode <- documentContents doc liftIO $ expectedCode @=? modifiedCode + issue806 = if ghcVersion >= GHC912 then + "hello = print" >:: "hello :: GHC.Types.ZonkAny 0 -> IO ()" -- GHC now returns ZonkAny 0 instead of Any. https://gitlab.haskell.org/ghc/ghc/-/issues/25895 + else + "hello = print" >:: "hello :: GHC.Types.Any -> IO ()" -- Documents current behavior outlined in #806 in testGroup "add signature" [ "abc = True" >:: "abc :: Bool" @@ -3188,7 +3371,7 @@ addSigActionTests = let , "(!!!) a b = a > b" >:: "(!!!) :: Ord a => a -> a -> Bool" , "a >>>> b = a + b" >:: "(>>>>) :: Num a => a -> a -> a" , "a `haha` b = a b" >:: "haha :: (t1 -> t2) -> t1 -> t2" - , "hello = print" >:: "hello :: GHC.Types.Any -> IO ()" -- Documents current behavior outlined in #806 + , issue806 , "pattern Some a = Just a" >:: "pattern Some :: a -> Maybe a" , "pattern Some a <- Just a" >:: "pattern Some :: a -> Maybe a" , "pattern Some a <- Just a\n where Some a = Just a" >:: "pattern Some :: a -> Maybe a" @@ -3222,7 +3405,7 @@ exportUnusedTests = testGroup "export unused actions" ] (R 2 0 2 11) "Export ‘bar’" - , ignoreForGhcVersions [GHC92, GHC94] "Diagnostic message has no suggestions" $ + , ignoreForGhcVersions [GHC94] "Diagnostic message has no suggestions" $ testSession "type is exported but not the constructor of same name" $ templateNoAction [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" , "module A (Foo) where" @@ -3384,6 +3567,19 @@ exportUnusedTests = testGroup "export unused actions" , "module A (pattern Foo) where" , "pattern Foo a <- (a, _)" ] + , testSession "unused pattern synonym operator" $ template + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE PatternSynonyms #-}" + , "module A () where" + , "pattern x :+ y = (x, y)" + ] + (R 3 0 3 12) + "Export ‘:+’" + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE PatternSynonyms #-}" + , "module A (pattern (:+)) where" + , "pattern x :+ y = (x, y)" + ] , testSession "unused data type" $ template [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" , "module A () where" @@ -3805,6 +4001,13 @@ assertActionWithTitle actions title = testSession :: TestName -> Session () -> TestTree testSession name = testCase name . run +testSessionExpectFail + :: TestName + -> ExpectBroken 'Ideal (Session ()) + -> ExpectBroken 'Current (Session ()) + -> TestTree +testSessionExpectFail name _ = testSession name . unCurrent + testSessionWithExtraFiles :: HasCallStack => FilePath -> TestName -> (FilePath -> Session ()) -> TestTree testSessionWithExtraFiles prefix name = testCase name . runWithExtraFiles prefix @@ -3845,11 +4048,7 @@ pattern R x y x' y' = Range (Position x y) (Position x' y') -- Which we need to do on macOS since the $TMPDIR can be in @/private/var@ or -- @/var@ withTempDir :: (FilePath -> IO a) -> IO a -withTempDir f = System.IO.Extra.withTempDir $ \dir -> - canonicalizePath dir >>= f +withTempDir f = System.IO.Extra.withTempDir $ (canonicalizePath >=> f) brokenForGHC94 :: String -> TestTree -> TestTree brokenForGHC94 = knownBrokenForGhcVersions [GHC94] - -brokenForGHC92 :: String -> TestTree -> TestTree -brokenForGHC92 = knownBrokenForGhcVersions [GHC92] diff --git a/plugins/hls-refactor-plugin/test/Test/AddArgument.hs b/plugins/hls-refactor-plugin/test/Test/AddArgument.hs index 2f741c0003..a0bf8b004e 100644 --- a/plugins/hls-refactor-plugin/test/Test/AddArgument.hs +++ b/plugins/hls-refactor-plugin/test/Test/AddArgument.hs @@ -35,7 +35,7 @@ tests = mkGoldenAddArgTest "AddArgFromLet" (r 2 0 2 50), mkGoldenAddArgTest "AddArgFromWhere" (r 3 0 3 50), -- TODO can we make this work for GHC 9.10? - knownBrokenForGhcVersions [GHC910] "In GHC 9.10 end-of-line comment annotation is in different place" $ + knownBrokenForGhcVersions [GHC910, GHC912] "In GHC 9.10 and 9.12 end-of-line comment annotation is in different place" $ mkGoldenAddArgTest "AddArgFromWhereComments" (r 3 0 3 50), mkGoldenAddArgTest "AddArgWithTypeSynSig" (r 2 0 2 50), mkGoldenAddArgTest "AddArgWithTypeSynSigContravariant" (r 2 0 2 50), diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/MultiLinePragma.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/MultiLinePragma.expected.hs index ca0b9f28dc..e9e8f4f604 100644 --- a/plugins/hls-refactor-plugin/test/data/import-placement/MultiLinePragma.expected.hs +++ b/plugins/hls-refactor-plugin/test/data/import-placement/MultiLinePragma.expected.hs @@ -3,8 +3,8 @@ {-# LANGUAGE RecordWildCards, OverloadedStrings #-} {-# OPTIONS_GHC -Wall, - -Wno-unused-imports #-} import Data.Monoid + -Wno-unused-imports #-} -- some comment diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/OptionsNotAtTopWithSpaces.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/OptionsNotAtTopWithSpaces.expected.hs index 912d6a210c..8595bca913 100644 --- a/plugins/hls-refactor-plugin/test/data/import-placement/OptionsNotAtTopWithSpaces.expected.hs +++ b/plugins/hls-refactor-plugin/test/data/import-placement/OptionsNotAtTopWithSpaces.expected.hs @@ -2,7 +2,6 @@ {-# LANGUAGE TupleSections #-} -import Data.Monoid @@ -11,6 +10,7 @@ class Semigroup a => SomeData a instance SomeData All {-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} +import Data.Monoid addOne :: Int -> Int addOne x = x + 1 diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/OptionsPragmaNotAtTop.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/OptionsPragmaNotAtTop.expected.hs index 55a6c60dbb..a92bbab580 100644 --- a/plugins/hls-refactor-plugin/test/data/import-placement/OptionsPragmaNotAtTop.expected.hs +++ b/plugins/hls-refactor-plugin/test/data/import-placement/OptionsPragmaNotAtTop.expected.hs @@ -1,8 +1,8 @@ -import Data.Monoid class Semigroup a => SomeData a instance SomeData All {-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} +import Data.Monoid addOne :: Int -> Int addOne x = x + 1 diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopMultipleComments.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopMultipleComments.expected.hs index eead1cb55e..cbe451714d 100644 --- a/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopMultipleComments.expected.hs +++ b/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopMultipleComments.expected.hs @@ -9,7 +9,6 @@ comment -} {-# LANGUAGE TupleSections #-} -import Data.Monoid {- some comment -} -- again @@ -18,6 +17,7 @@ instance SomeData All #! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" {-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} +import Data.Monoid addOne :: Int -> Int addOne x = x + 1 diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopWithCommentsAtTop.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopWithCommentsAtTop.expected.hs index 57fc1614be..57ab794a7e 100644 --- a/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopWithCommentsAtTop.expected.hs +++ b/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopWithCommentsAtTop.expected.hs @@ -4,7 +4,6 @@ -- another comment {-# LANGUAGE TupleSections #-} -import Data.Monoid {- some comment -} @@ -13,6 +12,7 @@ instance SomeData All #! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" {-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} +import Data.Monoid addOne :: Int -> Int addOne x = x + 1 diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTop.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTop.expected.hs index 09e503ddd3..230710232e 100644 --- a/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTop.expected.hs +++ b/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTop.expected.hs @@ -1,10 +1,10 @@ {-# LANGUAGE OverloadedStrings #-} -import Data.Monoid class Semigroup a => SomeData a instance SomeData All #! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +import Data.Monoid f :: Int -> Int f x = x * x diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTopNoSpace.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTopNoSpace.expected.hs index b367314238..c5977503a6 100644 --- a/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTopNoSpace.expected.hs +++ b/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTopNoSpace.expected.hs @@ -1,8 +1,8 @@ -import Data.Monoid class Semigroup a => SomeData a instance SomeData All #! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +import Data.Monoid f :: Int -> Int f x = x * x diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTopWithSpaces.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTopWithSpaces.expected.hs index 4c6cbe3917..8d358468da 100644 --- a/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTopWithSpaces.expected.hs +++ b/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTopWithSpaces.expected.hs @@ -6,7 +6,6 @@ {-# LANGUAGE TupleSections #-} -import Data.Monoid @@ -16,6 +15,7 @@ instance SomeData All #! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" {-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} +import Data.Monoid addOne :: Int -> Int addOne x = x + 1 diff --git a/plugins/hls-rename-plugin/src/Ide/Plugin/Rename.hs b/plugins/hls-rename-plugin/src/Ide/Plugin/Rename.hs index 2aeb16a808..7cc1122982 100644 --- a/plugins/hls-rename-plugin/src/Ide/Plugin/Rename.hs +++ b/plugins/hls-rename-plugin/src/Ide/Plugin/Rename.hs @@ -29,6 +29,7 @@ import qualified Data.Set as S import qualified Data.Text as T import Development.IDE (Recorder, WithPriority, usePropertyAction) +import Development.IDE.Core.FileStore (getVersionedTextDoc) import Development.IDE.Core.PluginUtils import Development.IDE.Core.RuleTypes import Development.IDE.Core.Service @@ -109,7 +110,7 @@ renameProvider state pluginId (RenameParams _prog (TextDocumentIdentifier uri) p let newName = mkTcOcc $ T.unpack newNameText filesRefs = collectWith locToUri refs getFileEdit (uri, locations) = do - verTxtDocId <- lift $ pluginGetVersionedTextDoc (TextDocumentIdentifier uri) + verTxtDocId <- liftIO $ runAction "rename: getVersionedTextDoc" state $ getVersionedTextDoc (TextDocumentIdentifier uri) getSrcEdit state verTxtDocId (replaceRefs newName locations) fileEdits <- mapM getFileEdit filesRefs pure $ InL $ fold fileEdits diff --git a/plugins/hls-rename-plugin/test/Main.hs b/plugins/hls-rename-plugin/test/Main.hs index cd4d3f6f88..5f7fb818ff 100644 --- a/plugins/hls-rename-plugin/test/Main.hs +++ b/plugins/hls-rename-plugin/test/Main.hs @@ -20,18 +20,13 @@ main = defaultTestRunner tests renamePlugin :: PluginTestDescriptor Rename.Log renamePlugin = mkPluginTestDescriptor Rename.descriptor "rename" --- See https://github.com/wz1000/HieDb/issues/45 -recordConstructorIssue :: String -recordConstructorIssue = "HIE references for record fields incorrect with GHC versions >= 9" - tests :: TestTree tests = testGroup "Rename" [ goldenWithRename "Data constructor" "DataConstructor" $ \doc -> rename doc (Position 0 15) "Op" , goldenWithRename "Exported function" "ExportedFunction" $ \doc -> rename doc (Position 2 1) "quux" - , ignoreForGhcVersions [GHC92] recordConstructorIssue $ - goldenWithRename "Field Puns" "FieldPuns" $ \doc -> + , goldenWithRename "Field Puns" "FieldPuns" $ \doc -> rename doc (Position 7 13) "bleh" , goldenWithRename "Function argument" "FunctionArgument" $ \doc -> rename doc (Position 3 4) "y" @@ -45,8 +40,7 @@ tests = testGroup "Rename" rename doc (Position 3 8) "baz" , goldenWithRename "Import hiding" "ImportHiding" $ \doc -> rename doc (Position 0 22) "hiddenFoo" - , ignoreForGhcVersions [GHC92] recordConstructorIssue $ - goldenWithRename "Indirect Puns" "IndirectPuns" $ \doc -> + , goldenWithRename "Indirect Puns" "IndirectPuns" $ \doc -> rename doc (Position 4 23) "blah" , goldenWithRename "Let expression" "LetExpression" $ \doc -> rename doc (Position 5 11) "foobar" @@ -58,8 +52,7 @@ tests = testGroup "Rename" rename doc (Position 3 12) "baz" , goldenWithRename "Realigns do block indentation" "RealignDo" $ \doc -> rename doc (Position 0 2) "fooBarQuux" - , ignoreForGhcVersions [GHC92] recordConstructorIssue $ - goldenWithRename "Record field" "RecordField" $ \doc -> + , goldenWithRename "Record field" "RecordField" $ \doc -> rename doc (Position 6 9) "number" , goldenWithRename "Shadowed name" "ShadowedName" $ \doc -> rename doc (Position 1 1) "baz" diff --git a/plugins/hls-retrie-plugin/src/Ide/Plugin/Retrie.hs b/plugins/hls-retrie-plugin/src/Ide/Plugin/Retrie.hs index ca82fc73e8..fe72d945f4 100644 --- a/plugins/hls-retrie-plugin/src/Ide/Plugin/Retrie.hs +++ b/plugins/hls-retrie-plugin/src/Ide/Plugin/Retrie.hs @@ -42,6 +42,7 @@ import Data.Monoid (First (First)) import Data.String (IsString) import qualified Data.Text as T import qualified Data.Text.Encoding as T +import qualified Data.Text.Utf16.Rope.Mixed as Rope import Development.IDE hiding (pluginHandlers) import Development.IDE.Core.Actions (lookupMod) import Development.IDE.Core.PluginUtils @@ -127,9 +128,7 @@ import Retrie.SYB (everything, extQ, import Retrie.Types import Retrie.Universe (Universe) -#if MIN_VERSION_ghc(9,3,0) import GHC.Types.PkgQual -#endif data Log = LogParsingModule FilePath @@ -503,7 +502,7 @@ data CallRetrieError | NoParse NormalizedFilePath | GHCParseError NormalizedFilePath String | NoTypeCheck NormalizedFilePath - deriving (Eq, Typeable) + deriving (Eq) instance Show CallRetrieError where show (CallRetrieInternalError msg f) = msg <> " - " <> fromNormalizedFilePath f @@ -735,11 +734,7 @@ toImportDecl AddImport {..} = GHC.ImportDecl {ideclSource = ideclSource', ..} ideclAs = toMod <$> ideclAsString ideclQualified = if ideclQualifiedBool then GHC.QualifiedPre else GHC.NotQualified -#if MIN_VERSION_ghc(9,3,0) ideclPkgQual = NoRawPkgQual -#else - ideclPkgQual = Nothing -#endif #if MIN_VERSION_ghc(9,5,0) ideclImportList = Nothing @@ -784,10 +779,10 @@ getCPPmodule recorder state session t = do return (fixities, parsed) contents <- do - (_, mbContentsVFS) <- + mbContentsVFS <- runAction "Retrie.GetFileContents" state $ getFileContents nt case mbContentsVFS of - Just contents -> return contents + Just contents -> return $ Rope.toText contents Nothing -> T.decodeUtf8 <$> BS.readFile (fromNormalizedFilePath nt) if any (T.isPrefixOf "#if" . T.toLower) (T.lines contents) then do diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs index cda4fda6e6..7f445bf7ac 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs @@ -10,7 +10,6 @@ module Ide.Plugin.SemanticTokens.Types where import Control.DeepSeq (NFData (rnf), rwhnf) import qualified Data.Array as A import Data.Default (Default (def)) -import Data.Generics (Typeable) import Development.IDE (Pretty (pretty), RuleResult) import qualified Development.IDE.Core.Shake as Shake import Development.IDE.GHC.Compat hiding (loc) @@ -108,7 +107,7 @@ instance Show Loc where show (Loc line startChar len) = show line <> ":" <> show startChar <> "-" <> show (startChar + len) data GetSemanticTokens = GetSemanticTokens - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable GetSemanticTokens diff --git a/plugins/hls-semantic-tokens-plugin/test/SemanticTokensTest.hs b/plugins/hls-semantic-tokens-plugin/test/SemanticTokensTest.hs index f5613fa42a..a0d1648fb3 100644 --- a/plugins/hls-semantic-tokens-plugin/test/SemanticTokensTest.hs +++ b/plugins/hls-semantic-tokens-plugin/test/SemanticTokensTest.hs @@ -10,8 +10,7 @@ import Data.Functor (void) import qualified Data.List as T import Data.Map.Strict as Map hiding (map) import Data.String (fromString) -import Data.Text hiding (length, map, - unlines) +import Data.Text (Text) import qualified Data.Text as Text import qualified Data.Text.Utf16.Rope.Mixed as Rope import Data.Version (Version (..)) @@ -225,7 +224,7 @@ semanticTokensFullDeltaTests = semanticTokensTests :: TestTree semanticTokensTests = - testGroup "other semantic Token test" $ + testGroup "other semantic Token test" [ testCase "module import test" $ do let file1 = "TModuleA.hs" let file2 = "TModuleB.hs" @@ -263,10 +262,9 @@ semanticTokensTests = goldenWithSemanticTokensWithDefaultConfig "pattern bind" "TPatternSynonym", goldenWithSemanticTokensWithDefaultConfig "type family" "TTypefamily", goldenWithSemanticTokensWithDefaultConfig "TUnicodeSyntax" "TUnicodeSyntax", - goldenWithSemanticTokensWithDefaultConfig "TQualifiedName" "TQualifiedName" + goldenWithSemanticTokensWithDefaultConfig "TQualifiedName" "TQualifiedName", + goldenWithSemanticTokensWithDefaultConfig "TDoc" "TDoc" ] - -- not supported in ghc92 - ++ [goldenWithSemanticTokensWithDefaultConfig "TDoc" "TDoc" | ghcVersion > GHC92] semanticTokensDataTypeTests :: TestTree semanticTokensDataTypeTests = diff --git a/plugins/hls-splice-plugin/src/Ide/Plugin/Splice.hs b/plugins/hls-splice-plugin/src/Ide/Plugin/Splice.hs index 6e913d8367..8955b76e3c 100644 --- a/plugins/hls-splice-plugin/src/Ide/Plugin/Splice.hs +++ b/plugins/hls-splice-plugin/src/Ide/Plugin/Splice.hs @@ -38,12 +38,14 @@ import Data.Maybe (fromMaybe, listToMaybe, mapMaybe) import qualified Data.Text as T import Development.IDE +import Development.IDE.Core.FileStore (getVersionedTextDoc) import Development.IDE.Core.PluginUtils import Development.IDE.GHC.Compat as Compat import Development.IDE.GHC.Compat.ExactPrint import qualified Development.IDE.GHC.Compat.Util as Util import Development.IDE.GHC.ExactPrint import GHC.Exts +import qualified GHC.Runtime.Loader as Loader import qualified GHC.Types.Error as Error import Ide.Plugin.Error (PluginError (PluginInternalError)) import Ide.Plugin.Splice.Types @@ -191,7 +193,7 @@ expandTHSplice _eStyle ideState _ params@ExpandSpliceParams {..} = ExceptT $ do pure (Right edits) case res of Nothing -> pure $ Right $ InR Null - Just (Left err) -> pure $ Left $ err + Just (Left err) -> pure $ Left err Just (Right edit) -> do _ <- pluginSendRequest SMethod_WorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing edit) (\_ -> pure ()) pure $ Right $ InR Null @@ -210,7 +212,7 @@ setupHscEnv ideState fp pm = do hscEnvEq <- runActionE "expandTHSplice.fallback.ghcSessionDeps" ideState $ useE GhcSessionDeps fp let ps = annotateParsedSource pm - hscEnv0 = hscEnvWithImportPaths hscEnvEq + hscEnv0 = hscEnv hscEnvEq modSum = pm_mod_summary pm hscEnv <- liftIO $ setupDynFlagsForGHCiLike hscEnv0 $ ms_hspp_opts modSum pure (ps, hscEnv, hsc_dflags hscEnv) @@ -232,7 +234,7 @@ setupDynFlagsForGHCiLike env dflags = do `gopt_set` Opt_IgnoreOptimChanges `gopt_set` Opt_IgnoreHpcChanges `gopt_unset` Opt_DiagnosticsShowCaret - initializePlugins (hscSetFlags dflags4 env) + Loader.initializePlugins (hscSetFlags dflags4 env) adjustToRange :: Uri -> Range -> WorkspaceEdit -> WorkspaceEdit adjustToRange uri ran (WorkspaceEdit mhult mlt x) = @@ -464,7 +466,7 @@ unRenamedE dflags expr = do data SearchResult r = Continue | Stop | Here r - deriving (Read, Show, Eq, Ord, Data, Typeable) + deriving (Read, Show, Eq, Ord, Data) fromSearchResult :: SearchResult a -> Maybe a fromSearchResult (Here r) = Just r @@ -474,7 +476,7 @@ fromSearchResult _ = Nothing -- TODO: Declaration Splices won't appear in HieAst; perhaps we must just use Parsed/Renamed ASTs? codeAction :: PluginMethodHandler IdeState Method_TextDocumentCodeAction codeAction state plId (CodeActionParams _ _ docId ran _) = do - verTxtDocId <- lift $ pluginGetVersionedTextDoc docId + verTxtDocId <- liftIO $ runAction "splice.codeAction.getVersionedTextDoc" state $ getVersionedTextDoc docId liftIO $ fmap (fromMaybe ( InL [])) $ runMaybeT $ do fp <- MaybeT $ pure $ uriToNormalizedFilePath $ toNormalizedUri theUri diff --git a/plugins/hls-stan-plugin/src/Ide/Plugin/Stan.hs b/plugins/hls-stan-plugin/src/Ide/Plugin/Stan.hs index 757768a574..a1efb7f150 100644 --- a/plugins/hls-stan-plugin/src/Ide/Plugin/Stan.hs +++ b/plugins/hls-stan-plugin/src/Ide/Plugin/Stan.hs @@ -187,17 +187,18 @@ rules recorder plId = do "Possible solutions:" ] ++ map (" - " <>) (inspectionSolution inspection) - return ( file, - ShowDiag, - LSP.Diagnostic - { _range = realSrcSpanToRange observationSrcSpan, - _severity = Just LSP.DiagnosticSeverity_Hint, - _code = Just (LSP.InR $ unId (inspectionId inspection)), - _source = Just "stan", - _message = message, - _relatedInformation = Nothing, - _tags = Nothing, - _codeDescription = Nothing, - _data_ = Nothing - } - ) + return $ + ideErrorFromLspDiag + LSP.Diagnostic + { _range = realSrcSpanToRange observationSrcSpan, + _severity = Just LSP.DiagnosticSeverity_Hint, + _code = Just (LSP.InR $ unId (inspectionId inspection)), + _source = Just "stan", + _message = message, + _relatedInformation = Nothing, + _tags = Nothing, + _codeDescription = Nothing, + _data_ = Nothing + } + file + Nothing diff --git a/plugins/hls-stylish-haskell-plugin/src/Ide/Plugin/StylishHaskell.hs b/plugins/hls-stylish-haskell-plugin/src/Ide/Plugin/StylishHaskell.hs index a862e57fb8..767cc061df 100644 --- a/plugins/hls-stylish-haskell-plugin/src/Ide/Plugin/StylishHaskell.hs +++ b/plugins/hls-stylish-haskell-plugin/src/Ide/Plugin/StylishHaskell.hs @@ -79,10 +79,15 @@ provider recorder ide _token typ contents fp _opts = do -- If no such file has been found, return default config. loadConfigFrom :: FilePath -> IO Config loadConfigFrom file = do +#if MIN_VERSION_stylish_haskell(0,15,0) + let configSearchStrategy = SearchFromDirectory (takeDirectory file) + config <- loadConfig (makeVerbose False) configSearchStrategy +#else currDir <- getCurrentDirectory setCurrentDirectory (takeDirectory file) config <- loadConfig (makeVerbose False) Nothing setCurrentDirectory currDir +#endif pure config -- | Run stylish-haskell on the given text with the given configuration. diff --git a/scripts/release/create-yaml-snippet.sh b/scripts/release/create-yaml-snippet.sh index 2fb7413f82..ed0cd6681b 100644 --- a/scripts/release/create-yaml-snippet.sh +++ b/scripts/release/create-yaml-snippet.sh @@ -34,26 +34,26 @@ cat < /dev/stdout dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-deb11.tar.xz" | awk '{ print $1 }') Linux_Ubuntu: '( >= 16 && < 19 )': &hls-${RELEASE//./}-64-ubuntu18 - dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-ubuntu18.04.tar.xz + dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-ubuntu1804.tar.xz dlSubdir: haskell-language-server-$RELEASE - dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-ubuntu18.04.tar.xz" | awk '{ print $1 }') + dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-ubuntu1804.tar.xz" | awk '{ print $1 }') '( >= 20 && < 22 )': &hls-${RELEASE//./}-64-ubuntu20 - dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-ubuntu20.04.tar.xz + dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-ubuntu2004.tar.xz dlSubdir: haskell-language-server-$RELEASE - dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-ubuntu20.04.tar.xz" | awk '{ print $1 }') + dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-ubuntu2004.tar.xz" | awk '{ print $1 }') unknown_versioning: &hls-${RELEASE//./}-64-ubuntu22 - dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-ubuntu22.04.tar.xz + dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-ubuntu2204.tar.xz dlSubdir: haskell-language-server-$RELEASE - dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-ubuntu22.04.tar.xz" | awk '{ print $1 }') + dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-ubuntu2204.tar.xz" | awk '{ print $1 }') Linux_Mint: '< 20': - dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-mint19.3.tar.xz + dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-mint193.tar.xz dlSubdir: haskell-language-server-$RELEASE - dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-mint19.3.tar.xz" | awk '{ print $1 }') + dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-mint193.tar.xz" | awk '{ print $1 }') '(>= 20 && < 21)': - dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-mint20.2.tar.xz + dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-mint202.tar.xz dlSubdir: haskell-language-server-$RELEASE - dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-mint20.2.tar.xz" | awk '{ print $1 }') + dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-mint202.tar.xz" | awk '{ print $1 }') '>= 21': *hls-${RELEASE//./}-64-ubuntu22 Linux_Fedora: '< 33': &hls-${RELEASE//./}-64-fedora27 @@ -95,9 +95,9 @@ cat < /dev/stdout A_ARM64: Linux_UnknownLinux: unknown_versioning: - dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-aarch64-linux-ubuntu20.tar.xz + dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-aarch64-linux-ubuntu2004.tar.xz dlSubdir: haskell-language-server-$RELEASE - dlHash: $(sha256sum "haskell-language-server-$RELEASE-aarch64-linux-ubuntu20.tar.xz" | awk '{ print $1 }') + dlHash: $(sha256sum "haskell-language-server-$RELEASE-aarch64-linux-ubuntu2004.tar.xz" | awk '{ print $1 }') Darwin: unknown_versioning: dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-aarch64-apple-darwin.tar.xz diff --git a/shake-bench/shake-bench.cabal b/shake-bench/shake-bench.cabal index d5852a6310..c381089aba 100644 --- a/shake-bench/shake-bench.cabal +++ b/shake-bench/shake-bench.cabal @@ -16,7 +16,7 @@ source-repository head location: https://github.com/haskell/haskell-language-server.git library - if impl(ghc >= 9.10) + if impl(ghc > 9.11) buildable: False exposed-modules: Development.Benchmark.Rules hs-source-dirs: src diff --git a/shake-bench/src/Development/Benchmark/Rules.hs b/shake-bench/src/Development/Benchmark/Rules.hs index 98cfd717d2..8ba2b3f0df 100644 --- a/shake-bench/src/Development/Benchmark/Rules.hs +++ b/shake-bench/src/Development/Benchmark/Rules.hs @@ -131,7 +131,7 @@ type RuleResultForExample e = , IsExample e) data Configuration = Configuration {confName :: String, confValue :: ByteString} - deriving (Binary, Eq, Generic, Hashable, NFData, Show, Typeable) + deriving (Binary, Eq, Generic, Hashable, NFData, Show) type instance RuleResult GetConfigurations = [Configuration] -- | Knowledge needed to run an example @@ -535,7 +535,7 @@ heapProfileRules build = do build -/- "*/*/*/*/*.heap.svg" %> \out -> do let hpFile = dropExtension2 out <.> "hp" need [hpFile] - cmd_ ("hp2pretty" :: String) [hpFile] + cmd_ ("eventlog2html" :: String) ["--heap-profile", hpFile] liftIO $ renameFile (dropExtension hpFile <.> "svg") out dropExtension2 :: FilePath -> FilePath diff --git a/src/HlsPlugins.hs b/src/HlsPlugins.hs index f08ae187cd..87a1af7392 100644 --- a/src/HlsPlugins.hs +++ b/src/HlsPlugins.hs @@ -152,6 +152,7 @@ idePlugins recorder = pluginDescToIdePlugins allPlugins allPlugins = #if hls_cabal let pId = "cabal" in Cabal.descriptor (pluginRecorder pId) pId : + let caId = "cabalHaskellIntegration" in Cabal.haskellInteractionDescriptor (pluginRecorder caId) caId : #endif #if hls_pragmas Pragmas.suggestPragmaDescriptor "pragmas-suggest" : diff --git a/src/Ide/Arguments.hs b/src/Ide/Arguments.hs index e07e059c8e..733da2e557 100644 --- a/src/Ide/Arguments.hs +++ b/src/Ide/Arguments.hs @@ -3,7 +3,6 @@ {-# LANGUAGE CPP #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TemplateHaskell #-} -{-# OPTIONS_GHC -Wno-dodgy-imports #-} -- GHC no longer exports def in GHC 8.6 and above module Ide.Arguments ( Arguments(..) diff --git a/src/Ide/Main.hs b/src/Ide/Main.hs index cbe3f33bb3..33b1d51a11 100644 --- a/src/Ide/Main.hs +++ b/src/Ide/Main.hs @@ -1,7 +1,6 @@ -- Copyright (c) 2019 The DAML Authors. All rights reserved. -- SPDX-License-Identifier: Apache-2.0 {-# LANGUAGE CPP #-} -{-# OPTIONS_GHC -Wno-dodgy-imports #-} -- GHC no longer exports def in GHC 8.6 and above {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} @@ -18,7 +17,7 @@ import Data.Text (Text) import qualified Data.Text as T import Data.Text.Lazy.Encoding (decodeUtf8) import qualified Data.Text.Lazy.IO as LT -import Development.IDE.Core.Rules hiding (Log, logToPriority) +import Development.IDE.Core.Rules hiding (Log) import Development.IDE.Core.Tracing (withTelemetryRecorder) import Development.IDE.Main (isLSP) import qualified Development.IDE.Main as IDEMain @@ -89,7 +88,7 @@ defaultMain recorder args idePlugins = do $ map describePlugin $ sortOn pluginId $ ipMap idePlugins - putStrLn $ show pluginSummary + print pluginSummary BiosMode PrintCradleType -> do dir <- IO.getCurrentDirectory diff --git a/stack-lts22.yaml b/stack-lts22.yaml index 80007a898c..8c5ba4364c 100644 --- a/stack-lts22.yaml +++ b/stack-lts22.yaml @@ -1,4 +1,4 @@ -resolver: lts-22.25 # ghc-9.6.5 +resolver: lts-22.43 # ghc-9.6.6 packages: - . @@ -19,8 +19,8 @@ allow-newer-deps: extra-deps: - Diff-0.5 - floskell-0.11.1 - - hiedb-0.6.0.1 - - hie-bios-0.14.0 + - hiedb-0.6.0.2 + - hie-bios-0.15.0 - implicit-hie-0.1.4.0 - lsp-2.7.0.0 - lsp-test-0.17.1.0 @@ -37,6 +37,9 @@ extra-deps: - trial-optparse-applicative-0.0.0.0 - trial-tomland-0.0.0.0 - validation-selective-0.2.0.0 + - cabal-add-0.1 + - cabal-install-parsers-0.6.1.1 + configure-options: ghcide: @@ -51,6 +54,8 @@ flags: ghc-lib: true retrie: BuildExecutable: false + cabal-add: + cabal-syntax: true nix: packages: [icu libcxx zlib] diff --git a/stack.yaml b/stack.yaml index 8df73f646b..085de85f97 100644 --- a/stack.yaml +++ b/stack.yaml @@ -1,4 +1,4 @@ -resolver: nightly-2024-06-12 # ghc-9.8.2 +resolver: lts-23.18 # ghc-9.8.4 packages: - . @@ -20,15 +20,11 @@ allow-newer-deps: extra-deps: - floskell-0.11.1 - - hiedb-0.6.0.1 - - hie-bios-0.14.0 + - hiedb-0.6.0.2 - implicit-hie-0.1.4.0 + - hie-bios-0.15.0 - hw-fingertree-0.1.2.1 - - lsp-2.7.0.0 - - lsp-test-0.17.1.0 - - lsp-types-2.3.0.0 - monad-dijkstra-0.1.1.5 - - stylish-haskell-0.14.6.0 - retrie-1.2.3 # stan dependencies not found in the stackage snapshot @@ -52,6 +48,8 @@ flags: ghc-lib: true retrie: BuildExecutable: false + cabal-add: + cabal-syntax: true nix: packages: [icu libcxx zlib] diff --git a/test/functional/Config.hs b/test/functional/Config.hs index 9d11cff3a5..874792784f 100644 --- a/test/functional/Config.hs +++ b/test/functional/Config.hs @@ -8,12 +8,11 @@ import Control.Monad import Data.Hashable import qualified Data.HashMap.Strict as HM import qualified Data.Map as Map -import Data.Typeable (Typeable) import Development.IDE (RuleResult, action, define, getFilesOfInterestUntracked, getPluginConfigAction, ideErrorText, uses_) -import Development.IDE.Test (expectDiagnostics) +import Development.IDE.Test (ExpectedDiagnostic, expectDiagnostics) import GHC.Generics import Ide.Plugin.Config import Ide.Types @@ -43,13 +42,15 @@ genericConfigTests = testGroup "generic plugin config" setHlsConfig $ changeConfig "someplugin" def{plcHoverOn = False} -- getting only the expected diagnostics means the plugin wasn't enabled expectDiagnostics standardDiagnostics - , expectFailBecause "partial config is not supported" $ - testCase "custom defaults and non overlapping user config" $ runConfigSession "diagnostics" $ do + -- TODO: Partial config is not supported + , testCase "custom defaults and non overlapping user config" $ runConfigSession "diagnostics" $ do _doc <- createDoc "Foo.hs" "haskell" "module Foo where\nfoo = False" -- test that the user config doesn't accidentally override the initial config setHlsConfig $ changeConfig testPluginId def{plcHoverOn = False} -- getting only the expected diagnostics means the plugin wasn't enabled - expectDiagnostics standardDiagnostics + expectDiagnosticsFail + (BrokenIdeal standardDiagnostics) + (BrokenCurrent testPluginDiagnostics) , testCase "custom defaults and overlapping user plugin config" $ runConfigSession "diagnostics" $ do _doc <- createDoc "Foo.hs" "haskell" "module Foo where\nfoo = False" -- test that the user config overrides the default initial config @@ -64,8 +65,8 @@ genericConfigTests = testGroup "generic plugin config" expectDiagnostics standardDiagnostics ] where - standardDiagnostics = [("Foo.hs", [(DiagnosticSeverity_Warning, (1,0), "Top-level binding")])] - testPluginDiagnostics = [("Foo.hs", [(DiagnosticSeverity_Error, (0,0), "testplugin")])] + standardDiagnostics = [("Foo.hs", [(DiagnosticSeverity_Warning, (1,0), "Top-level binding", Nothing)])] + testPluginDiagnostics = [("Foo.hs", [(DiagnosticSeverity_Error, (0,0), "testplugin", Nothing)])] runConfigSession subdir session = do failIfSessionTimeout $ @@ -100,7 +101,14 @@ genericConfigTests = testGroup "generic plugin config" data GetTestDiagnostics = GetTestDiagnostics - deriving (Eq, Show, Typeable, Generic) + deriving (Eq, Show, Generic) instance Hashable GetTestDiagnostics instance NFData GetTestDiagnostics type instance RuleResult GetTestDiagnostics = () + +expectDiagnosticsFail + :: HasCallStack + => ExpectBroken 'Ideal [(FilePath, [ExpectedDiagnostic])] + -> ExpectBroken 'Current [(FilePath, [ExpectedDiagnostic])] + -> Session () +expectDiagnosticsFail _ = expectDiagnostics . unCurrent diff --git a/test/functional/Main.hs b/test/functional/Main.hs index 7adf499c05..daa342f694 100644 --- a/test/functional/Main.hs +++ b/test/functional/Main.hs @@ -12,8 +12,8 @@ main :: IO () main = defaultTestRunner $ testGroup "haskell-language-server" [ Config.tests , ConfigSchema.tests - , ignoreInEnv [HostOS Windows, GhcVer GHC92] "Tests gets stuck in ci" $ Format.tests + , ignoreInEnv [HostOS Windows] "Tests gets stuck in ci" Format.tests , FunctionalBadProject.tests , HieBios.tests - , ignoreInEnv [HostOS Windows] "Tests gets stuck in ci" $ Progress.tests + , ignoreInEnv [HostOS Windows] "Tests gets stuck in ci" Progress.tests ] diff --git a/test/testdata/schema/ghc910/default-config.golden.json b/test/testdata/schema/ghc910/default-config.golden.json new file mode 100644 index 0000000000..186a90aa3e --- /dev/null +++ b/test/testdata/schema/ghc910/default-config.golden.json @@ -0,0 +1,151 @@ +{ + "cabalFormattingProvider": "cabal-gild", + "checkParents": "CheckOnSave", + "checkProject": true, + "formattingProvider": "ormolu", + "maxCompletions": 40, + "plugin": { + "alternateNumberFormat": { + "globalOn": true + }, + "cabal": { + "codeActionsOn": true, + "completionOn": true, + "diagnosticsOn": true, + "hoverOn": true, + "symbolsOn": true + }, + "cabal-fmt": { + "config": { + "path": "cabal-fmt" + } + }, + "cabal-gild": { + "config": { + "path": "cabal-gild" + } + }, + "cabalHaskellIntegration": { + "globalOn": true + }, + "callHierarchy": { + "globalOn": true + }, + "changeTypeSignature": { + "globalOn": true + }, + "class": { + "codeActionsOn": true, + "codeLensOn": true + }, + "eval": { + "codeActionsOn": true, + "codeLensOn": true, + "config": { + "diff": true, + "exception": false + } + }, + "explicit-fields": { + "codeActionsOn": true, + "inlayHintsOn": true + }, + "explicit-fixity": { + "globalOn": true + }, + "fourmolu": { + "config": { + "external": false, + "path": "fourmolu" + } + }, + "gadt": { + "globalOn": true + }, + "ghcide-code-actions-bindings": { + "globalOn": true + }, + "ghcide-code-actions-fill-holes": { + "globalOn": true + }, + "ghcide-code-actions-imports-exports": { + "globalOn": true + }, + "ghcide-code-actions-type-signatures": { + "globalOn": true + }, + "ghcide-completions": { + "config": { + "autoExtendOn": true, + "snippetsOn": true + }, + "globalOn": true + }, + "ghcide-hover-and-symbols": { + "hoverOn": true, + "symbolsOn": true + }, + "ghcide-type-lenses": { + "config": { + "mode": "always" + }, + "globalOn": true + }, + "importLens": { + "codeActionsOn": true, + "codeLensOn": true, + "inlayHintsOn": true + }, + "moduleName": { + "globalOn": true + }, + "ormolu": { + "config": { + "external": false + } + }, + "overloaded-record-dot": { + "globalOn": true + }, + "pragmas-completion": { + "globalOn": true + }, + "pragmas-disable": { + "globalOn": true + }, + "pragmas-suggest": { + "globalOn": true + }, + "qualifyImportedNames": { + "globalOn": true + }, + "rename": { + "config": { + "crossModule": false + }, + "globalOn": true + }, + "semanticTokens": { + "config": { + "classMethodToken": "method", + "classToken": "class", + "dataConstructorToken": "enumMember", + "functionToken": "function", + "moduleToken": "namespace", + "operatorToken": "operator", + "patternSynonymToken": "macro", + "recordFieldToken": "property", + "typeConstructorToken": "enum", + "typeFamilyToken": "interface", + "typeSynonymToken": "type", + "typeVariableToken": "typeParameter", + "variableToken": "variable" + }, + "globalOn": false + }, + "stan": { + "globalOn": false + } + }, + "sessionLoading": "singleComponent" +} diff --git a/test/testdata/schema/ghc92/vscode-extension-schema.golden.json b/test/testdata/schema/ghc910/vscode-extension-schema.golden.json similarity index 96% rename from test/testdata/schema/ghc92/vscode-extension-schema.golden.json rename to test/testdata/schema/ghc910/vscode-extension-schema.golden.json index 027fe77b5a..3220003494 100644 --- a/test/testdata/schema/ghc92/vscode-extension-schema.golden.json +++ b/test/testdata/schema/ghc910/vscode-extension-schema.golden.json @@ -35,6 +35,24 @@ "scope": "resource", "type": "boolean" }, + "haskell.plugin.cabal.hoverOn": { + "default": true, + "description": "Enables cabal hover", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal.symbolsOn": { + "default": true, + "description": "Enables cabal symbols", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabalHaskellIntegration.globalOn": { + "default": true, + "description": "Enables cabalHaskellIntegration plugin", + "scope": "resource", + "type": "boolean" + }, "haskell.plugin.callHierarchy.globalOn": { "default": true, "description": "Enables callHierarchy plugin", @@ -59,6 +77,18 @@ "scope": "resource", "type": "boolean" }, + "haskell.plugin.eval.codeActionsOn": { + "default": true, + "description": "Enables eval code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.eval.codeLensOn": { + "default": true, + "description": "Enables eval code lenses", + "scope": "resource", + "type": "boolean" + }, "haskell.plugin.eval.config.diff": { "default": true, "markdownDescription": "Enable the diff output (WAS/NOW) of eval lenses", @@ -71,15 +101,15 @@ "scope": "resource", "type": "boolean" }, - "haskell.plugin.eval.globalOn": { + "haskell.plugin.explicit-fields.codeActionsOn": { "default": true, - "description": "Enables eval plugin", + "description": "Enables explicit-fields code actions", "scope": "resource", "type": "boolean" }, - "haskell.plugin.explicit-fields.globalOn": { + "haskell.plugin.explicit-fields.inlayHintsOn": { "default": true, - "description": "Enables explicit-fields plugin", + "description": "Enables explicit-fields inlay hints", "scope": "resource", "type": "boolean" }, @@ -183,24 +213,6 @@ "scope": "resource", "type": "boolean" }, - "haskell.plugin.hlint.codeActionsOn": { - "default": true, - "description": "Enables hlint code actions", - "scope": "resource", - "type": "boolean" - }, - "haskell.plugin.hlint.config.flags": { - "default": [], - "markdownDescription": "Flags used by hlint", - "scope": "resource", - "type": "array" - }, - "haskell.plugin.hlint.diagnosticsOn": { - "default": true, - "description": "Enables hlint diagnostics", - "scope": "resource", - "type": "boolean" - }, "haskell.plugin.importLens.codeActionsOn": { "default": true, "description": "Enables importLens code actions", @@ -213,6 +225,12 @@ "scope": "resource", "type": "boolean" }, + "haskell.plugin.importLens.inlayHintsOn": { + "default": true, + "description": "Enables importLens inlay hints", + "scope": "resource", + "type": "boolean" + }, "haskell.plugin.moduleName.globalOn": { "default": true, "description": "Enables moduleName plugin", @@ -267,12 +285,6 @@ "scope": "resource", "type": "boolean" }, - "haskell.plugin.retrie.globalOn": { - "default": true, - "description": "Enables retrie plugin", - "scope": "resource", - "type": "boolean" - }, "haskell.plugin.semanticTokens.config.classMethodToken": { "default": "method", "description": "LSP semantic token type to use for typeclass methods", @@ -1007,9 +1019,9 @@ "scope": "resource", "type": "boolean" }, - "haskell.plugin.splice.globalOn": { - "default": true, - "description": "Enables splice plugin", + "haskell.plugin.stan.globalOn": { + "default": false, + "description": "Enables stan plugin", "scope": "resource", "type": "boolean" } diff --git a/test/testdata/schema/ghc92/default-config.golden.json b/test/testdata/schema/ghc912/default-config.golden.json similarity index 91% rename from test/testdata/schema/ghc92/default-config.golden.json rename to test/testdata/schema/ghc912/default-config.golden.json index be1a256f97..0dfbd39df2 100644 --- a/test/testdata/schema/ghc92/default-config.golden.json +++ b/test/testdata/schema/ghc912/default-config.golden.json @@ -11,7 +11,9 @@ "cabal": { "codeActionsOn": true, "completionOn": true, - "diagnosticsOn": true + "diagnosticsOn": true, + "hoverOn": true, + "symbolsOn": true }, "cabal-fmt": { "config": { @@ -23,6 +25,9 @@ "path": "cabal-gild" } }, + "cabalHaskellIntegration": { + "globalOn": true + }, "callHierarchy": { "globalOn": true }, @@ -34,14 +39,16 @@ "codeLensOn": true }, "eval": { + "codeActionsOn": true, + "codeLensOn": true, "config": { "diff": true, "exception": false - }, - "globalOn": true + } }, "explicit-fields": { - "globalOn": true + "codeActionsOn": true, + "inlayHintsOn": true }, "explicit-fixity": { "globalOn": true @@ -93,7 +100,8 @@ }, "importLens": { "codeActionsOn": true, - "codeLensOn": true + "codeLensOn": true, + "inlayHintsOn": true }, "moduleName": { "globalOn": true @@ -124,9 +132,6 @@ }, "globalOn": true }, - "retrie": { - "globalOn": true - }, "semanticTokens": { "config": { "classMethodToken": "method", @@ -144,9 +149,6 @@ "variableToken": "variable" }, "globalOn": false - }, - "splice": { - "globalOn": true } }, "sessionLoading": "singleComponent" diff --git a/test/testdata/schema/ghc912/vscode-extension-schema.golden.json b/test/testdata/schema/ghc912/vscode-extension-schema.golden.json new file mode 100644 index 0000000000..77d398438e --- /dev/null +++ b/test/testdata/schema/ghc912/vscode-extension-schema.golden.json @@ -0,0 +1,1040 @@ +{ + "haskell.plugin.alternateNumberFormat.globalOn": { + "default": true, + "description": "Enables alternateNumberFormat plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal-fmt.config.path": { + "default": "cabal-fmt", + "markdownDescription": "Set path to 'cabal-fmt' executable", + "scope": "resource", + "type": "string" + }, + "haskell.plugin.cabal-gild.config.path": { + "default": "cabal-gild", + "markdownDescription": "Set path to 'cabal-gild' executable", + "scope": "resource", + "type": "string" + }, + "haskell.plugin.cabal.codeActionsOn": { + "default": true, + "description": "Enables cabal code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal.completionOn": { + "default": true, + "description": "Enables cabal completions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal.diagnosticsOn": { + "default": true, + "description": "Enables cabal diagnostics", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal.hoverOn": { + "default": true, + "description": "Enables cabal hover", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal.symbolsOn": { + "default": true, + "description": "Enables cabal symbols", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabalHaskellIntegration.globalOn": { + "default": true, + "description": "Enables cabalHaskellIntegration plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.callHierarchy.globalOn": { + "default": true, + "description": "Enables callHierarchy plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.changeTypeSignature.globalOn": { + "default": true, + "description": "Enables changeTypeSignature plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.class.codeActionsOn": { + "default": true, + "description": "Enables class code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.class.codeLensOn": { + "default": true, + "description": "Enables class code lenses", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.eval.codeActionsOn": { + "default": true, + "description": "Enables eval code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.eval.codeLensOn": { + "default": true, + "description": "Enables eval code lenses", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.eval.config.diff": { + "default": true, + "markdownDescription": "Enable the diff output (WAS/NOW) of eval lenses", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.eval.config.exception": { + "default": false, + "markdownDescription": "Enable marking exceptions with `*** Exception:` similarly to doctest and GHCi.", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.explicit-fields.codeActionsOn": { + "default": true, + "description": "Enables explicit-fields code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.explicit-fields.inlayHintsOn": { + "default": true, + "description": "Enables explicit-fields inlay hints", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.explicit-fixity.globalOn": { + "default": true, + "description": "Enables explicit-fixity plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.fourmolu.config.external": { + "default": false, + "markdownDescription": "Call out to an external \"fourmolu\" executable, rather than using the bundled library.", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.fourmolu.config.path": { + "default": "fourmolu", + "markdownDescription": "Set path to executable (for \"external\" mode).", + "scope": "resource", + "type": "string" + }, + "haskell.plugin.gadt.globalOn": { + "default": true, + "description": "Enables gadt plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-code-actions-bindings.globalOn": { + "default": true, + "description": "Enables ghcide-code-actions-bindings plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-code-actions-fill-holes.globalOn": { + "default": true, + "description": "Enables ghcide-code-actions-fill-holes plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-code-actions-imports-exports.globalOn": { + "default": true, + "description": "Enables ghcide-code-actions-imports-exports plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-code-actions-type-signatures.globalOn": { + "default": true, + "description": "Enables ghcide-code-actions-type-signatures plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-completions.config.autoExtendOn": { + "default": true, + "markdownDescription": "Extends the import list automatically when completing a out-of-scope identifier", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-completions.config.snippetsOn": { + "default": true, + "markdownDescription": "Inserts snippets when using code completions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-completions.globalOn": { + "default": true, + "description": "Enables ghcide-completions plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-hover-and-symbols.hoverOn": { + "default": true, + "description": "Enables ghcide-hover-and-symbols hover", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-hover-and-symbols.symbolsOn": { + "default": true, + "description": "Enables ghcide-hover-and-symbols symbols", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-type-lenses.config.mode": { + "default": "always", + "description": "Control how type lenses are shown", + "enum": [ + "always", + "exported", + "diagnostics" + ], + "enumDescriptions": [ + "Always displays type lenses of global bindings", + "Only display type lenses of exported global bindings", + "Follows error messages produced by GHC about missing signatures" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.ghcide-type-lenses.globalOn": { + "default": true, + "description": "Enables ghcide-type-lenses plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.hlint.codeActionsOn": { + "default": true, + "description": "Enables hlint code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.hlint.config.flags": { + "default": [], + "markdownDescription": "Flags used by hlint", + "scope": "resource", + "type": "array" + }, + "haskell.plugin.hlint.diagnosticsOn": { + "default": true, + "description": "Enables hlint diagnostics", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.importLens.codeActionsOn": { + "default": true, + "description": "Enables importLens code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.importLens.codeLensOn": { + "default": true, + "description": "Enables importLens code lenses", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.importLens.inlayHintsOn": { + "default": true, + "description": "Enables importLens inlay hints", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.moduleName.globalOn": { + "default": true, + "description": "Enables moduleName plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ormolu.config.external": { + "default": false, + "markdownDescription": "Call out to an external \"ormolu\" executable, rather than using the bundled library", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.overloaded-record-dot.globalOn": { + "default": true, + "description": "Enables overloaded-record-dot plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.pragmas-completion.globalOn": { + "default": true, + "description": "Enables pragmas-completion plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.pragmas-disable.globalOn": { + "default": true, + "description": "Enables pragmas-disable plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.pragmas-suggest.globalOn": { + "default": true, + "description": "Enables pragmas-suggest plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.qualifyImportedNames.globalOn": { + "default": true, + "description": "Enables qualifyImportedNames plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.rename.config.crossModule": { + "default": false, + "markdownDescription": "Enable experimental cross-module renaming", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.rename.globalOn": { + "default": true, + "description": "Enables rename plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.semanticTokens.config.classMethodToken": { + "default": "method", + "description": "LSP semantic token type to use for typeclass methods", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.classToken": { + "default": "class", + "description": "LSP semantic token type to use for typeclasses", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.dataConstructorToken": { + "default": "enumMember", + "description": "LSP semantic token type to use for data constructors", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.functionToken": { + "default": "function", + "description": "LSP semantic token type to use for functions", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.moduleToken": { + "default": "namespace", + "description": "LSP semantic token type to use for modules", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.operatorToken": { + "default": "operator", + "description": "LSP semantic token type to use for operators", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.patternSynonymToken": { + "default": "macro", + "description": "LSP semantic token type to use for pattern synonyms", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.recordFieldToken": { + "default": "property", + "description": "LSP semantic token type to use for record fields", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.typeConstructorToken": { + "default": "enum", + "description": "LSP semantic token type to use for type constructors", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.typeFamilyToken": { + "default": "interface", + "description": "LSP semantic token type to use for type families", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.typeSynonymToken": { + "default": "type", + "description": "LSP semantic token type to use for type synonyms", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.typeVariableToken": { + "default": "typeParameter", + "description": "LSP semantic token type to use for type variables", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.variableToken": { + "default": "variable", + "description": "LSP semantic token type to use for variables", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.globalOn": { + "default": false, + "description": "Enables semanticTokens plugin", + "scope": "resource", + "type": "boolean" + } +} diff --git a/test/testdata/schema/ghc94/default-config.golden.json b/test/testdata/schema/ghc94/default-config.golden.json index 2859e3d720..8467b451f1 100644 --- a/test/testdata/schema/ghc94/default-config.golden.json +++ b/test/testdata/schema/ghc94/default-config.golden.json @@ -11,7 +11,9 @@ "cabal": { "codeActionsOn": true, "completionOn": true, - "diagnosticsOn": true + "diagnosticsOn": true, + "hoverOn": true, + "symbolsOn": true }, "cabal-fmt": { "config": { @@ -23,6 +25,9 @@ "path": "cabal-gild" } }, + "cabalHaskellIntegration": { + "globalOn": true + }, "callHierarchy": { "globalOn": true }, @@ -34,14 +39,16 @@ "codeLensOn": true }, "eval": { + "codeActionsOn": true, + "codeLensOn": true, "config": { "diff": true, "exception": false - }, - "globalOn": true + } }, "explicit-fields": { - "globalOn": true + "codeActionsOn": true, + "inlayHintsOn": true }, "explicit-fixity": { "globalOn": true @@ -93,7 +100,8 @@ }, "importLens": { "codeActionsOn": true, - "codeLensOn": true + "codeLensOn": true, + "inlayHintsOn": true }, "moduleName": { "globalOn": true diff --git a/test/testdata/schema/ghc94/vscode-extension-schema.golden.json b/test/testdata/schema/ghc94/vscode-extension-schema.golden.json index d113264901..1c0b19eb27 100644 --- a/test/testdata/schema/ghc94/vscode-extension-schema.golden.json +++ b/test/testdata/schema/ghc94/vscode-extension-schema.golden.json @@ -35,6 +35,24 @@ "scope": "resource", "type": "boolean" }, + "haskell.plugin.cabal.hoverOn": { + "default": true, + "description": "Enables cabal hover", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal.symbolsOn": { + "default": true, + "description": "Enables cabal symbols", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabalHaskellIntegration.globalOn": { + "default": true, + "description": "Enables cabalHaskellIntegration plugin", + "scope": "resource", + "type": "boolean" + }, "haskell.plugin.callHierarchy.globalOn": { "default": true, "description": "Enables callHierarchy plugin", @@ -59,6 +77,18 @@ "scope": "resource", "type": "boolean" }, + "haskell.plugin.eval.codeActionsOn": { + "default": true, + "description": "Enables eval code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.eval.codeLensOn": { + "default": true, + "description": "Enables eval code lenses", + "scope": "resource", + "type": "boolean" + }, "haskell.plugin.eval.config.diff": { "default": true, "markdownDescription": "Enable the diff output (WAS/NOW) of eval lenses", @@ -71,15 +101,15 @@ "scope": "resource", "type": "boolean" }, - "haskell.plugin.eval.globalOn": { + "haskell.plugin.explicit-fields.codeActionsOn": { "default": true, - "description": "Enables eval plugin", + "description": "Enables explicit-fields code actions", "scope": "resource", "type": "boolean" }, - "haskell.plugin.explicit-fields.globalOn": { + "haskell.plugin.explicit-fields.inlayHintsOn": { "default": true, - "description": "Enables explicit-fields plugin", + "description": "Enables explicit-fields inlay hints", "scope": "resource", "type": "boolean" }, @@ -213,6 +243,12 @@ "scope": "resource", "type": "boolean" }, + "haskell.plugin.importLens.inlayHintsOn": { + "default": true, + "description": "Enables importLens inlay hints", + "scope": "resource", + "type": "boolean" + }, "haskell.plugin.moduleName.globalOn": { "default": true, "description": "Enables moduleName plugin", diff --git a/test/testdata/schema/ghc96/default-config.golden.json b/test/testdata/schema/ghc96/default-config.golden.json index 2859e3d720..8467b451f1 100644 --- a/test/testdata/schema/ghc96/default-config.golden.json +++ b/test/testdata/schema/ghc96/default-config.golden.json @@ -11,7 +11,9 @@ "cabal": { "codeActionsOn": true, "completionOn": true, - "diagnosticsOn": true + "diagnosticsOn": true, + "hoverOn": true, + "symbolsOn": true }, "cabal-fmt": { "config": { @@ -23,6 +25,9 @@ "path": "cabal-gild" } }, + "cabalHaskellIntegration": { + "globalOn": true + }, "callHierarchy": { "globalOn": true }, @@ -34,14 +39,16 @@ "codeLensOn": true }, "eval": { + "codeActionsOn": true, + "codeLensOn": true, "config": { "diff": true, "exception": false - }, - "globalOn": true + } }, "explicit-fields": { - "globalOn": true + "codeActionsOn": true, + "inlayHintsOn": true }, "explicit-fixity": { "globalOn": true @@ -93,7 +100,8 @@ }, "importLens": { "codeActionsOn": true, - "codeLensOn": true + "codeLensOn": true, + "inlayHintsOn": true }, "moduleName": { "globalOn": true diff --git a/test/testdata/schema/ghc96/vscode-extension-schema.golden.json b/test/testdata/schema/ghc96/vscode-extension-schema.golden.json index d113264901..1c0b19eb27 100644 --- a/test/testdata/schema/ghc96/vscode-extension-schema.golden.json +++ b/test/testdata/schema/ghc96/vscode-extension-schema.golden.json @@ -35,6 +35,24 @@ "scope": "resource", "type": "boolean" }, + "haskell.plugin.cabal.hoverOn": { + "default": true, + "description": "Enables cabal hover", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal.symbolsOn": { + "default": true, + "description": "Enables cabal symbols", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabalHaskellIntegration.globalOn": { + "default": true, + "description": "Enables cabalHaskellIntegration plugin", + "scope": "resource", + "type": "boolean" + }, "haskell.plugin.callHierarchy.globalOn": { "default": true, "description": "Enables callHierarchy plugin", @@ -59,6 +77,18 @@ "scope": "resource", "type": "boolean" }, + "haskell.plugin.eval.codeActionsOn": { + "default": true, + "description": "Enables eval code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.eval.codeLensOn": { + "default": true, + "description": "Enables eval code lenses", + "scope": "resource", + "type": "boolean" + }, "haskell.plugin.eval.config.diff": { "default": true, "markdownDescription": "Enable the diff output (WAS/NOW) of eval lenses", @@ -71,15 +101,15 @@ "scope": "resource", "type": "boolean" }, - "haskell.plugin.eval.globalOn": { + "haskell.plugin.explicit-fields.codeActionsOn": { "default": true, - "description": "Enables eval plugin", + "description": "Enables explicit-fields code actions", "scope": "resource", "type": "boolean" }, - "haskell.plugin.explicit-fields.globalOn": { + "haskell.plugin.explicit-fields.inlayHintsOn": { "default": true, - "description": "Enables explicit-fields plugin", + "description": "Enables explicit-fields inlay hints", "scope": "resource", "type": "boolean" }, @@ -213,6 +243,12 @@ "scope": "resource", "type": "boolean" }, + "haskell.plugin.importLens.inlayHintsOn": { + "default": true, + "description": "Enables importLens inlay hints", + "scope": "resource", + "type": "boolean" + }, "haskell.plugin.moduleName.globalOn": { "default": true, "description": "Enables moduleName plugin", diff --git a/test/testdata/schema/ghc98/default-config.golden.json b/test/testdata/schema/ghc98/default-config.golden.json index 2859e3d720..8467b451f1 100644 --- a/test/testdata/schema/ghc98/default-config.golden.json +++ b/test/testdata/schema/ghc98/default-config.golden.json @@ -11,7 +11,9 @@ "cabal": { "codeActionsOn": true, "completionOn": true, - "diagnosticsOn": true + "diagnosticsOn": true, + "hoverOn": true, + "symbolsOn": true }, "cabal-fmt": { "config": { @@ -23,6 +25,9 @@ "path": "cabal-gild" } }, + "cabalHaskellIntegration": { + "globalOn": true + }, "callHierarchy": { "globalOn": true }, @@ -34,14 +39,16 @@ "codeLensOn": true }, "eval": { + "codeActionsOn": true, + "codeLensOn": true, "config": { "diff": true, "exception": false - }, - "globalOn": true + } }, "explicit-fields": { - "globalOn": true + "codeActionsOn": true, + "inlayHintsOn": true }, "explicit-fixity": { "globalOn": true @@ -93,7 +100,8 @@ }, "importLens": { "codeActionsOn": true, - "codeLensOn": true + "codeLensOn": true, + "inlayHintsOn": true }, "moduleName": { "globalOn": true diff --git a/test/testdata/schema/ghc98/vscode-extension-schema.golden.json b/test/testdata/schema/ghc98/vscode-extension-schema.golden.json index d113264901..1c0b19eb27 100644 --- a/test/testdata/schema/ghc98/vscode-extension-schema.golden.json +++ b/test/testdata/schema/ghc98/vscode-extension-schema.golden.json @@ -35,6 +35,24 @@ "scope": "resource", "type": "boolean" }, + "haskell.plugin.cabal.hoverOn": { + "default": true, + "description": "Enables cabal hover", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal.symbolsOn": { + "default": true, + "description": "Enables cabal symbols", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabalHaskellIntegration.globalOn": { + "default": true, + "description": "Enables cabalHaskellIntegration plugin", + "scope": "resource", + "type": "boolean" + }, "haskell.plugin.callHierarchy.globalOn": { "default": true, "description": "Enables callHierarchy plugin", @@ -59,6 +77,18 @@ "scope": "resource", "type": "boolean" }, + "haskell.plugin.eval.codeActionsOn": { + "default": true, + "description": "Enables eval code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.eval.codeLensOn": { + "default": true, + "description": "Enables eval code lenses", + "scope": "resource", + "type": "boolean" + }, "haskell.plugin.eval.config.diff": { "default": true, "markdownDescription": "Enable the diff output (WAS/NOW) of eval lenses", @@ -71,15 +101,15 @@ "scope": "resource", "type": "boolean" }, - "haskell.plugin.eval.globalOn": { + "haskell.plugin.explicit-fields.codeActionsOn": { "default": true, - "description": "Enables eval plugin", + "description": "Enables explicit-fields code actions", "scope": "resource", "type": "boolean" }, - "haskell.plugin.explicit-fields.globalOn": { + "haskell.plugin.explicit-fields.inlayHintsOn": { "default": true, - "description": "Enables explicit-fields plugin", + "description": "Enables explicit-fields inlay hints", "scope": "resource", "type": "boolean" }, @@ -213,6 +243,12 @@ "scope": "resource", "type": "boolean" }, + "haskell.plugin.importLens.inlayHintsOn": { + "default": true, + "description": "Enables importLens inlay hints", + "scope": "resource", + "type": "boolean" + }, "haskell.plugin.moduleName.globalOn": { "default": true, "description": "Enables moduleName plugin", diff --git a/test/wrapper/testdata/stack-with-dist-newstyle/stack.yaml b/test/wrapper/testdata/stack-with-dist-newstyle/stack.yaml index e467bdb282..d95c1a7a03 100644 --- a/test/wrapper/testdata/stack-with-dist-newstyle/stack.yaml +++ b/test/wrapper/testdata/stack-with-dist-newstyle/stack.yaml @@ -1,2 +1,2 @@ # specific version does not matter -resolver: ghc-9.2.5 +resolver: ghc-9.6.5