diff --git a/.github/workflows/audit.yaml b/.github/workflows/audit.yaml index 5b463a3e0c2..275abb2d539 100644 --- a/.github/workflows/audit.yaml +++ b/.github/workflows/audit.yaml @@ -25,7 +25,7 @@ jobs: python-version: ${{inputs.python_version}} - name: Install Python packages run: | - pip install --upgrade pip six setuptools pytest codecov 'coverage[toml]<=6.2' + pip install --upgrade pip six setuptools pytest codecov coverage[toml] - name: Package audits (with coverage) if: ${{ inputs.with_coverage == 'true' }} run: | diff --git a/.github/workflows/bootstrap-test.sh b/.github/workflows/bootstrap-test.sh index fc8f93d68b5..b51db3d1b74 100755 --- a/.github/workflows/bootstrap-test.sh +++ b/.github/workflows/bootstrap-test.sh @@ -1,7 +1,7 @@ #!/bin/bash set -ex source share/spack/setup-env.sh -$PYTHON bin/spack bootstrap untrust spack-install +$PYTHON bin/spack bootstrap disable spack-install $PYTHON bin/spack -d solve zlib tree $BOOTSTRAP/store exit 0 diff --git a/.github/workflows/bootstrap.yml b/.github/workflows/bootstrap.yml index dcd6761f689..df2b0f346e1 100644 --- a/.github/workflows/bootstrap.yml +++ b/.github/workflows/bootstrap.yml @@ -42,7 +42,8 @@ jobs: shell: runuser -u spack-test -- bash {0} run: | source share/spack/setup-env.sh - spack bootstrap untrust github-actions-v0.2 + spack bootstrap disable github-actions-v0.4 + spack bootstrap disable github-actions-v0.3 spack external find cmake bison spack -d solve zlib tree ~/.spack/bootstrap/store/ @@ -79,7 +80,8 @@ jobs: shell: runuser -u spack-test -- bash {0} run: | source share/spack/setup-env.sh - spack bootstrap untrust github-actions-v0.2 + spack bootstrap disable github-actions-v0.4 + spack bootstrap disable github-actions-v0.3 spack external find cmake bison spack -d solve zlib tree ~/.spack/bootstrap/store/ @@ -143,7 +145,8 @@ jobs: - name: Bootstrap clingo run: | source share/spack/setup-env.sh - spack bootstrap untrust github-actions-v0.2 + spack bootstrap disable github-actions-v0.4 + spack bootstrap disable github-actions-v0.3 spack external find cmake bison spack -d solve zlib tree ~/.spack/bootstrap/store/ @@ -160,7 +163,8 @@ jobs: run: | source share/spack/setup-env.sh export PATH=/usr/local/opt/bison@2.7/bin:$PATH - spack bootstrap untrust github-actions-v0.2 + spack bootstrap disable github-actions-v0.4 + spack bootstrap disable github-actions-v0.3 spack external find --not-buildable cmake bison spack -d solve zlib tree ~/.spack/bootstrap/store/ @@ -261,7 +265,7 @@ jobs: shell: runuser -u spack-test -- bash {0} run: | source share/spack/setup-env.sh - spack bootstrap untrust spack-install + spack bootstrap disable spack-install spack -d gpg list tree ~/.spack/bootstrap/store/ @@ -298,7 +302,8 @@ jobs: run: | source share/spack/setup-env.sh spack solve zlib - spack bootstrap untrust github-actions-v0.2 + spack bootstrap disable github-actions-v0.4 + spack bootstrap disable github-actions-v0.3 spack -d gpg list tree ~/.spack/bootstrap/store/ @@ -315,7 +320,7 @@ jobs: - name: Bootstrap GnuPG run: | source share/spack/setup-env.sh - spack bootstrap untrust spack-install + spack bootstrap disable spack-install spack -d gpg list tree ~/.spack/bootstrap/store/ @@ -333,7 +338,8 @@ jobs: run: | source share/spack/setup-env.sh spack solve zlib - spack bootstrap untrust github-actions-v0.2 + spack bootstrap disable github-actions-v0.4 + spack bootstrap disable github-actions-v0.3 spack -d gpg list tree ~/.spack/bootstrap/store/ diff --git a/.github/workflows/build-containers.yml b/.github/workflows/build-containers.yml index 9e395974d34..1030ba6428c 100644 --- a/.github/workflows/build-containers.yml +++ b/.github/workflows/build-containers.yml @@ -13,7 +13,7 @@ on: paths: - '.github/workflows/build-containers.yml' - 'share/spack/docker/*' - - 'share/templates/container/*' + - 'share/spack/templates/container/*' - 'lib/spack/spack/container/*' # Let's also build & tag Spack containers on releases. release: @@ -80,19 +80,19 @@ jobs: fi - name: Upload Dockerfile - uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 + uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb with: name: dockerfiles path: dockerfiles - name: Set up QEMU - uses: docker/setup-qemu-action@8b122486cedac8393e77aa9734c3528886e4a1a8 # @v1 + uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # @v1 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@dc7b9719a96d48369863986a06765841d7ea23f6 # @v1 + uses: docker/setup-buildx-action@8c0edbc76e98fa90f69d9a2c020dcb50019dc325 # @v1 - name: Log in to GitHub Container Registry - uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # @v1 + uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1 with: registry: ghcr.io username: ${{ github.actor }} @@ -100,13 +100,13 @@ jobs: - name: Log in to DockerHub if: github.event_name != 'pull_request' - uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # @v1 + uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Build & Deploy ${{ matrix.dockerfile[0] }} - uses: docker/build-push-action@c84f38281176d4c9cdb1626ffafcd6b3911b5d94 # @v2 + uses: docker/build-push-action@c56af957549030174b10d6867f20e78cfd7debc5 # @v2 with: context: dockerfiles/${{ matrix.dockerfile[0] }} platforms: ${{ matrix.dockerfile[1] }} diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 46dfbd572db..3b57bd9bb51 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -46,7 +46,7 @@ jobs: with: fetch-depth: 0 # For pull requests it's not necessary to checkout the code - - uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721 + - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 id: filter with: # See https://github.com/dorny/paths-filter/issues/56 for the syntax used below diff --git a/.github/workflows/setup_git.ps1 b/.github/workflows/setup_git.ps1 index d68f90a7ae1..0acb9a9f460 100644 --- a/.github/workflows/setup_git.ps1 +++ b/.github/workflows/setup_git.ps1 @@ -6,6 +6,10 @@ git config --global user.email "spack@example.com" git config --global user.name "Test User" git config --global core.longpaths true +# See https://github.com/git/git/security/advisories/GHSA-3wp6-j8xr-qw85 (CVE-2022-39253) +# This is needed to let some fixture in our unit-test suite run +git config --global protocol.file.allow always + if ($(git branch --show-current) -ne "develop") { git branch develop origin/develop diff --git a/.github/workflows/setup_git.sh b/.github/workflows/setup_git.sh index 4eb416720be..ee555ff71a9 100755 --- a/.github/workflows/setup_git.sh +++ b/.github/workflows/setup_git.sh @@ -2,6 +2,10 @@ git config --global user.email "spack@example.com" git config --global user.name "Test User" +# See https://github.com/git/git/security/advisories/GHSA-3wp6-j8xr-qw85 (CVE-2022-39253) +# This is needed to let some fixture in our unit-test suite run +git config --global protocol.file.allow always + # create a local pr base branch if [[ -n $GITHUB_BASE_REF ]]; then git fetch origin "${GITHUB_BASE_REF}:${GITHUB_BASE_REF}" diff --git a/.github/workflows/unit_tests.yaml b/.github/workflows/unit_tests.yaml index 29211ae2bbe..6a21d166f89 100644 --- a/.github/workflows/unit_tests.yaml +++ b/.github/workflows/unit_tests.yaml @@ -14,7 +14,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ['2.7', '3.6', '3.7', '3.8', '3.9', '3.10'] + python-version: ['2.7', '3.6', '3.7', '3.8', '3.9', '3.10', '3.11'] concretizer: ['clingo'] on_develop: - ${{ github.ref == 'refs/heads/develop' }} @@ -22,7 +22,7 @@ jobs: - python-version: 2.7 concretizer: original on_develop: ${{ github.ref == 'refs/heads/develop' }} - - python-version: '3.10' + - python-version: '3.11' concretizer: original on_develop: ${{ github.ref == 'refs/heads/develop' }} exclude: @@ -35,6 +35,9 @@ jobs: - python-version: '3.9' concretizer: 'clingo' on_develop: false + - python-version: '3.10' + concretizer: 'clingo' + on_develop: false steps: - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 @@ -52,7 +55,12 @@ jobs: patchelf cmake bison libbison-dev kcov - name: Install Python packages run: | - pip install --upgrade pip six setuptools pytest codecov[toml] pytest-cov pytest-xdist + pip install --upgrade pip six setuptools pytest codecov[toml] pytest-xdist + # Install pytest-cov only on recent Python, to avoid stalling on Python 2.7 due + # to bugs on an unmaintained version of the package when used with xdist. + if [[ ${{ matrix.python-version }} != "2.7" ]]; then + pip install --upgrade pytest-cov + fi # ensure style checks are not skipped in unit tests for python >= 3.6 # note that true/false (i.e., 1/0) are opposite in conditions in python and bash if python -c 'import sys; sys.exit(not sys.version_info >= (3, 6))'; then @@ -61,7 +69,7 @@ jobs: - name: Pin pathlib for Python 2.7 if: ${{ matrix.python-version == 2.7 }} run: | - pip install -U pathlib2==2.3.6 + pip install -U pathlib2==2.3.6 toml - name: Setup git configuration run: | # Need this for the git tests to succeed. @@ -73,7 +81,7 @@ jobs: SPACK_PYTHON: python run: | . share/spack/setup-env.sh - spack bootstrap untrust spack-install + spack bootstrap disable spack-install spack -v solve zlib - name: Run unit tests env: @@ -81,11 +89,9 @@ jobs: SPACK_TEST_SOLVER: ${{ matrix.concretizer }} SPACK_TEST_PARALLEL: 2 COVERAGE: true - UNIT_TEST_COVERAGE: ${{ (matrix.concretizer == 'original' && matrix.python-version == '2.7') || (matrix.python-version == '3.10') }} + UNIT_TEST_COVERAGE: ${{ (matrix.python-version == '3.11') }} run: | share/spack/qa/run-unit-tests - coverage combine -a - coverage xml - uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 with: flags: unittests,linux,${{ matrix.concretizer }} @@ -98,7 +104,7 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2 with: - python-version: '3.10' + python-version: '3.11' - name: Install System packages run: | sudo apt-get -y update @@ -106,7 +112,7 @@ jobs: sudo apt-get install -y coreutils kcov csh zsh tcsh fish dash bash - name: Install Python packages run: | - pip install --upgrade pip six setuptools pytest codecov coverage[toml]==6.2 pytest-xdist + pip install --upgrade pip six setuptools pytest codecov coverage[toml] pytest-xdist - name: Setup git configuration run: | # Need this for the git tests to succeed. @@ -155,7 +161,7 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2 with: - python-version: '3.10' + python-version: '3.11' - name: Install System packages run: | sudo apt-get -y update @@ -177,8 +183,6 @@ jobs: SPACK_TEST_SOLVER: clingo run: | share/spack/qa/run-unit-tests - coverage combine -a - coverage xml - uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # @v2.1.0 with: flags: unittests,linux,clingo @@ -187,7 +191,7 @@ jobs: runs-on: macos-latest strategy: matrix: - python-version: [3.8] + python-version: ["3.10"] steps: - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 with: @@ -210,15 +214,10 @@ jobs: git --version . .github/workflows/setup_git.sh . share/spack/setup-env.sh - $(which spack) bootstrap untrust spack-install + $(which spack) bootstrap disable spack-install $(which spack) solve zlib common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x) - $(which spack) unit-test --cov --cov-config=pyproject.toml "${common_args[@]}" - coverage combine -a - coverage xml - # Delete the symlink going from ./lib/spack/docs/_spack_root back to - # the initial directory, since it causes ELOOP errors with codecov/actions@2 - rm lib/spack/docs/_spack_root + $(which spack) unit-test --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}" - uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 with: flags: unittests,macos diff --git a/.github/workflows/valid-style.yml b/.github/workflows/valid-style.yml index d91f6e958a3..a82c786b44a 100644 --- a/.github/workflows/valid-style.yml +++ b/.github/workflows/valid-style.yml @@ -21,7 +21,7 @@ jobs: - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2 with: - python-version: '3.10' + python-version: '3.11' cache: 'pip' - name: Install Python Packages run: | @@ -40,7 +40,7 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2 with: - python-version: '3.10' + python-version: '3.11' cache: 'pip' - name: Install Python packages run: | @@ -57,4 +57,4 @@ jobs: uses: ./.github/workflows/audit.yaml with: with_coverage: ${{ inputs.with_coverage }} - python_version: '3.10' + python_version: '3.11' diff --git a/.github/workflows/windows_python.yml b/.github/workflows/windows_python.yml index adb8344d94e..05a98c4cba9 100644 --- a/.github/workflows/windows_python.yml +++ b/.github/workflows/windows_python.yml @@ -23,7 +23,7 @@ jobs: python-version: 3.9 - name: Install Python packages run: | - python -m pip install --upgrade pip six pywin32 setuptools codecov pytest-cov + python -m pip install --upgrade pip six pywin32 setuptools codecov pytest-cov clingo - name: Create local develop run: | .\spack\.github\workflows\setup_git.ps1 @@ -32,8 +32,7 @@ jobs: echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml cd spack dir - (Get-Item '.\lib\spack\docs\_spack_root').Delete() - spack unit-test --verbose --cov --cov-config=pyproject.toml --ignore=lib/spack/spack/test/cmd + spack unit-test -x --verbose --cov --cov-config=pyproject.toml --ignore=lib/spack/spack/test/cmd coverage combine -a coverage xml - uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 @@ -50,7 +49,7 @@ jobs: python-version: 3.9 - name: Install Python packages run: | - python -m pip install --upgrade pip six pywin32 setuptools codecov coverage pytest-cov + python -m pip install --upgrade pip six pywin32 setuptools codecov coverage pytest-cov clingo - name: Create local develop run: | .\spack\.github\workflows\setup_git.ps1 @@ -58,8 +57,7 @@ jobs: run: | echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml cd spack - (Get-Item '.\lib\spack\docs\_spack_root').Delete() - spack unit-test --verbose --cov --cov-config=pyproject.toml lib/spack/spack/test/cmd + spack unit-test -x --verbose --cov --cov-config=pyproject.toml lib/spack/spack/test/cmd coverage combine -a coverage xml - uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 @@ -83,7 +81,7 @@ jobs: echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml spack external find cmake spack external find ninja - spack install abseil-cpp + spack -d install abseil-cpp make-installer: runs-on: windows-latest steps: @@ -111,11 +109,11 @@ jobs: echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append env: ProgressPreference: SilentlyContinue - - uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 + - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb with: name: Windows Spack Installer Bundle path: ${{ env.installer_root }}\pkg\Spack.exe - - uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 + - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb with: name: Windows Spack Installer path: ${{ env.installer_root}}\pkg\Spack.msi diff --git a/README.md b/README.md index fea23bb4086..cf4b413af8b 100644 --- a/README.md +++ b/README.md @@ -62,6 +62,7 @@ Resources: * **Slack workspace**: [spackpm.slack.com](https://spackpm.slack.com). To get an invitation, visit [slack.spack.io](https://slack.spack.io). +* [**Github Discussions**](https://github.com/spack/spack/discussions): not just for discussions, also Q&A. * **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack) * **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to `@mention` us! diff --git a/bin/spack b/bin/spack index 864fb34a778..08da29dfd26 100755 --- a/bin/spack +++ b/bin/spack @@ -49,52 +49,8 @@ spack_prefix = os.path.dirname(os.path.dirname(spack_file)) spack_lib_path = os.path.join(spack_prefix, "lib", "spack") sys.path.insert(0, spack_lib_path) -# Add external libs -spack_external_libs = os.path.join(spack_lib_path, "external") - -if sys.version_info[:2] <= (2, 7): - sys.path.insert(0, os.path.join(spack_external_libs, "py2")) - -sys.path.insert(0, spack_external_libs) - -# Here we delete ruamel.yaml in case it has been already imported from site -# (see #9206 for a broader description of the issue). -# -# Briefly: ruamel.yaml produces a .pth file when installed with pip that -# makes the site installed package the preferred one, even though sys.path -# is modified to point to another version of ruamel.yaml. -if "ruamel.yaml" in sys.modules: - del sys.modules["ruamel.yaml"] - -if "ruamel" in sys.modules: - del sys.modules["ruamel"] - -# The following code is here to avoid failures when updating -# the develop version, due to spurious argparse.pyc files remaining -# in the libs/spack/external directory, see: -# https://github.com/spack/spack/pull/25376 -# TODO: Remove in v0.18.0 or later -try: - import argparse -except ImportError: - argparse_pyc = os.path.join(spack_external_libs, "argparse.pyc") - if not os.path.exists(argparse_pyc): - raise - try: - os.remove(argparse_pyc) - import argparse # noqa: F401 - except Exception: - msg = ( - "The file\n\n\t{0}\n\nis corrupted and cannot be deleted by Spack. " - "Either delete it manually or ask some administrator to " - "delete it for you." - ) - print(msg.format(argparse_pyc)) - sys.exit(1) - - -import spack.main # noqa: E402 +from spack_installable.main import main # noqa: E402 # Once we've set up the system path, run the spack main method if __name__ == "__main__": - sys.exit(spack.main.main()) + sys.exit(main()) diff --git a/etc/spack/defaults/bootstrap.yaml b/etc/spack/defaults/bootstrap.yaml index a4a9b23515f..464994d171e 100644 --- a/etc/spack/defaults/bootstrap.yaml +++ b/etc/spack/defaults/bootstrap.yaml @@ -9,16 +9,15 @@ bootstrap: # may not be able to bootstrap all the software that Spack needs, # depending on its type. sources: + - name: 'github-actions-v0.4' + metadata: $spack/share/spack/bootstrap/github-actions-v0.4 - name: 'github-actions-v0.3' metadata: $spack/share/spack/bootstrap/github-actions-v0.3 - - name: 'github-actions-v0.2' - metadata: $spack/share/spack/bootstrap/github-actions-v0.2 - - name: 'github-actions-v0.1' - metadata: $spack/share/spack/bootstrap/github-actions-v0.1 - name: 'spack-install' metadata: $spack/share/spack/bootstrap/spack-install trusted: # By default we trust bootstrapping from sources and from binaries # produced on Github via the workflow + github-actions-v0.4: true github-actions-v0.3: true spack-install: true diff --git a/etc/spack/defaults/concretizer.yaml b/etc/spack/defaults/concretizer.yaml index 7311354c28b..f455aa723d9 100644 --- a/etc/spack/defaults/concretizer.yaml +++ b/etc/spack/defaults/concretizer.yaml @@ -33,4 +33,4 @@ concretizer: # environments can always be activated. When "false" perform concretization separately # on each root spec, allowing different versions and variants of the same package in # an environment. - unify: false \ No newline at end of file + unify: true \ No newline at end of file diff --git a/etc/spack/defaults/config.yaml b/etc/spack/defaults/config.yaml index 37ec464a8b9..55dced1cb7e 100644 --- a/etc/spack/defaults/config.yaml +++ b/etc/spack/defaults/config.yaml @@ -191,10 +191,20 @@ config: package_lock_timeout: null - # Control whether Spack embeds RPATH or RUNPATH attributes in ELF binaries. - # Has no effect on macOS. DO NOT MIX these within the same install tree. - # See the Spack documentation for details. - shared_linking: 'rpath' + # Control how shared libraries are located at runtime on Linux. See the + # the Spack documentation for details. + shared_linking: + # Spack automatically embeds runtime search paths in ELF binaries for their + # dependencies. Their type can either be "rpath" or "runpath". For glibc, rpath is + # inherited and has precedence over LD_LIBRARY_PATH; runpath is not inherited + # and of lower precedence. DO NOT MIX these within the same install tree. + type: rpath + + + # (Experimental) Embed absolute paths of dependent libraries directly in ELF + # binaries to avoid runtime search. This can improve startup time of + # executables with many dependencies, in particular on slow filesystems. + bind: false # Set to 'false' to allow installation on filesystems that doesn't allow setgid bit @@ -205,3 +215,7 @@ config: # building and installing packages. This gives information about Spack's # current progress as well as the current and total number of packages. terminal_title: false + + # Number of seconds a buildcache's index.json is cached locally before probing + # for updates, within a single Spack invocation. Defaults to 10 minutes. + binary_index_ttl: 600 \ No newline at end of file diff --git a/etc/spack/defaults/packages.yaml b/etc/spack/defaults/packages.yaml index ead2d872c7d..f0596b0fe1d 100644 --- a/etc/spack/defaults/packages.yaml +++ b/etc/spack/defaults/packages.yaml @@ -27,7 +27,8 @@ packages: fuse: [libfuse] gl: [glx, osmesa] glu: [mesa-glu, openglu] - golang: [gcc] + golang: [go, gcc] + go-external-or-gccgo-bootstrap: [go-bootstrap, gcc] iconv: [libiconv] ipp: [intel-ipp] java: [openjdk, jdk, ibm-java] diff --git a/etc/spack/defaults/windows/config.yaml b/etc/spack/defaults/windows/config.yaml index 956bc97c20a..367bf831cff 100644 --- a/etc/spack/defaults/windows/config.yaml +++ b/etc/spack/defaults/windows/config.yaml @@ -1,5 +1,5 @@ config: locks: false - concretizer: original + concretizer: clingo build_stage:: - '$spack/.staging' diff --git a/lib/spack/docs/_spack_root b/lib/spack/docs/_spack_root deleted file mode 120000 index a8a4f8c2127..00000000000 --- a/lib/spack/docs/_spack_root +++ /dev/null @@ -1 +0,0 @@ -../../.. \ No newline at end of file diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst index 53bd3858e04..73895449b08 100644 --- a/lib/spack/docs/basic_usage.rst +++ b/lib/spack/docs/basic_usage.rst @@ -85,7 +85,7 @@ All packages whose names or descriptions contain documentation: To get more information on a particular package from `spack list`, use `spack info`. Just supply the name of a package: -.. command-output:: spack info mpich +.. command-output:: spack info --all mpich Most of the information is self-explanatory. The *safe versions* are versions that Spack knows the checksum for, and it will use the @@ -998,11 +998,15 @@ More formally, a spec consists of the following pieces: * ``%`` Optional compiler specifier, with an optional compiler version (``gcc`` or ``gcc@4.7.3``) * ``+`` or ``-`` or ``~`` Optional variant specifiers (``+debug``, - ``-qt``, or ``~qt``) for boolean variants + ``-qt``, or ``~qt``) for boolean variants. Use ``++`` or ``--`` or + ``~~`` to propagate variants through the dependencies (``++debug``, + ``--qt``, or ``~~qt``). * ``name=`` Optional variant specifiers that are not restricted to - boolean variants + boolean variants. Use ``name==`` to propagate variant through the + dependencies. * ``name=`` Optional compiler flag specifiers. Valid flag names are ``cflags``, ``cxxflags``, ``fflags``, ``cppflags``, ``ldflags``, and ``ldlibs``. + Use ``name==`` to propagate compiler flags through the dependencies. * ``target= os=`` Optional architecture specifier (``target=haswell os=CNL10``) * ``^`` Dependency specs (``^callpath@1.1``) @@ -1226,6 +1230,23 @@ variants using the backwards compatibility syntax and uses only ``~`` for disabled boolean variants. The ``-`` and spaces on the command line are provided for convenience and legibility. +Spack allows variants to propagate their value to the package's +dependency by using ``++``, ``--``, and ``~~`` for boolean variants. +For example, for a ``debug`` variant: + +.. code-block:: sh + + mpileaks ++debug # enabled debug will be propagated to dependencies + mpileaks +debug # only mpileaks will have debug enabled + +To propagate the value of non-boolean variants Spack uses ``name==value``. +For example, for the ``stackstart`` variant: + +.. code-block:: sh + + mpileaks stackstart=4 # variant will be propagated to dependencies + mpileaks stackstart==4 # only mpileaks will have this variant value + ^^^^^^^^^^^^^^ Compiler Flags ^^^^^^^^^^^^^^ @@ -1233,10 +1254,15 @@ Compiler Flags Compiler flags are specified using the same syntax as non-boolean variants, but fulfill a different purpose. While the function of a variant is set by the package, compiler flags are used by the compiler wrappers to inject -flags into the compile line of the build. Additionally, compiler flags are -inherited by dependencies. ``spack install libdwarf cppflags="-g"`` will -install both libdwarf and libelf with the ``-g`` flag injected into their -compile line. +flags into the compile line of the build. Additionally, compiler flags can +be inherited by dependencies by using ``==``. +``spack install libdwarf cppflags=="-g"`` will install both libdwarf and +libelf with the ``-g`` flag injected into their compile line. + +.. note:: + + versions of spack prior to 0.19.0 will propagate compiler flags using + the ``=`` syntax. Notice that the value of the compiler flags must be quoted if it contains any spaces. Any of ``cppflags=-O3``, ``cppflags="-O3"``, @@ -1438,7 +1464,7 @@ built. You can see what virtual packages a particular package provides by getting info on it: -.. command-output:: spack info mpich +.. command-output:: spack info --virtuals mpich Spack is unique in that its virtual packages can be versioned, just like regular packages. A particular version of a package may provide diff --git a/lib/spack/docs/bootstrapping.rst b/lib/spack/docs/bootstrapping.rst index a38e96ac2f7..86ce2eb77b9 100644 --- a/lib/spack/docs/bootstrapping.rst +++ b/lib/spack/docs/bootstrapping.rst @@ -15,15 +15,13 @@ is an entire command dedicated to the management of every aspect of bootstrappin .. command-output:: spack bootstrap --help -The first thing to know to understand bootstrapping in Spack is that each of -Spack's dependencies is bootstrapped lazily; i.e. the first time it is needed and -can't be found. You can readily check if any prerequisite for using Spack -is missing by running: +Spack is configured to bootstrap its dependencies lazily by default; i.e. the first time they are needed and +can't be found. You can readily check if any prerequisite for using Spack is missing by running: .. code-block:: console % spack bootstrap status - Spack v0.17.1 - python@3.8 + Spack v0.19.0 - python@3.8 [FAIL] Core Functionalities [B] MISSING "clingo": required to concretize specs @@ -48,6 +46,21 @@ they can be bootstrapped. Running a command that concretize a spec, like: triggers the bootstrapping of clingo from pre-built binaries as expected. +Users can also bootstrap all the dependencies needed by Spack in a single command, which +might be useful to setup containers or other similar environments: + +.. code-block:: console + + $ spack bootstrap now + ==> Bootstrapping clingo from pre-built binaries + ==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.3/build_cache/linux-centos7-x86_64-gcc-10.2.1-clingo-bootstrap-spack-shqedxgvjnhiwdcdrvjhbd73jaevv7wt.spec.json + ==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.3/build_cache/linux-centos7-x86_64/gcc-10.2.1/clingo-bootstrap-spack/linux-centos7-x86_64-gcc-10.2.1-clingo-bootstrap-spack-shqedxgvjnhiwdcdrvjhbd73jaevv7wt.spack + ==> Installing "clingo-bootstrap@spack%gcc@10.2.1~docs~ipo+python+static_libstdcpp build_type=Release arch=linux-centos7-x86_64" from a buildcache + ==> Bootstrapping patchelf from pre-built binaries + ==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.3/build_cache/linux-centos7-x86_64-gcc-10.2.1-patchelf-0.15.0-htk62k7efo2z22kh6kmhaselru7bfkuc.spec.json + ==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.3/build_cache/linux-centos7-x86_64/gcc-10.2.1/patchelf-0.15.0/linux-centos7-x86_64-gcc-10.2.1-patchelf-0.15.0-htk62k7efo2z22kh6kmhaselru7bfkuc.spack + ==> Installing "patchelf@0.15.0%gcc@10.2.1 ldflags="-static-libstdc++ -static-libgcc" arch=linux-centos7-x86_64" from a buildcache + ----------------------- The Bootstrapping store ----------------------- @@ -107,19 +120,19 @@ If need be, you can disable bootstrapping altogether by running: in which case it's your responsibility to ensure Spack runs in an environment where all its prerequisites are installed. You can -also configure Spack to skip certain bootstrapping methods by *untrusting* -them. For instance: +also configure Spack to skip certain bootstrapping methods by disabling +them specifically: .. code-block:: console - % spack bootstrap untrust github-actions - ==> "github-actions" is now untrusted and will not be used for bootstrapping + % spack bootstrap disable github-actions + ==> "github-actions" is now disabled and will not be used for bootstrapping tells Spack to skip trying to bootstrap from binaries. To add the "github-actions" method back you can: .. code-block:: console - % spack bootstrap trust github-actions + % spack bootstrap enable github-actions There is also an option to reset the bootstrapping configuration to Spack's defaults: diff --git a/lib/spack/docs/build_settings.rst b/lib/spack/docs/build_settings.rst index de580a32890..900a612c4c6 100644 --- a/lib/spack/docs/build_settings.rst +++ b/lib/spack/docs/build_settings.rst @@ -302,88 +302,31 @@ microarchitectures considered during the solve are constrained to be compatible host Spack is currently running on. For instance, if this option is set to ``true``, a user cannot concretize for ``target=icelake`` while running on an Haswell node. -.. _package-preferences: - -------------------- -Package Preferences -------------------- - -Spack can be configured to prefer certain compilers, package -versions, dependencies, and variants during concretization. -The preferred configuration can be controlled via the -``~/.spack/packages.yaml`` file for user configurations, or the -``etc/spack/packages.yaml`` site configuration. - -Here's an example ``packages.yaml`` file that sets preferred packages: - -.. code-block:: yaml - - packages: - opencv: - compiler: [gcc@4.9] - variants: +debug - gperftools: - version: [2.2, 2.4, 2.3] - all: - compiler: [gcc@4.4.7, 'gcc@4.6:', intel, clang, pgi] - target: [sandybridge] - providers: - mpi: [mvapich2, mpich, openmpi] - -At a high level, this example is specifying how packages should be -concretized. The opencv package should prefer using GCC 4.9 and -be built with debug options. The gperftools package should prefer version -2.2 over 2.4. Every package on the system should prefer mvapich2 for -its MPI and GCC 4.4.7 (except for opencv, which overrides this by preferring GCC 4.9). -These options are used to fill in implicit defaults. Any of them can be overwritten -on the command line if explicitly requested. - -Each ``packages.yaml`` file begins with the string ``packages:`` and -package names are specified on the next level. The special string ``all`` -applies settings to *all* packages. Underneath each package name is one -or more components: ``compiler``, ``variants``, ``version``, -``providers``, and ``target``. Each component has an ordered list of -spec ``constraints``, with earlier entries in the list being preferred -over later entries. - -Sometimes a package installation may have constraints that forbid -the first concretization rule, in which case Spack will use the first -legal concretization rule. Going back to the example, if a user -requests gperftools 2.3 or later, then Spack will install version 2.4 -as the 2.4 version of gperftools is preferred over 2.3. - -An explicit concretization rule in the preferred section will always -take preference over unlisted concretizations. In the above example, -xlc isn't listed in the compiler list. Every listed compiler from -gcc to pgi will thus be preferred over the xlc compiler. - -The syntax for the ``provider`` section differs slightly from other -concretization rules. A provider lists a value that packages may -``depend_on`` (e.g, MPI) and a list of rules for fulfilling that -dependency. - .. _package-requirements: -------------------- Package Requirements -------------------- -You can use the configuration to force the concretizer to choose -specific properties for packages when building them. Like preferences, -these are only applied when the package is required by some other -request (e.g. if the package is needed as a dependency of a -request to ``spack install``). +Spack can be configured to always use certain compilers, package +versions, and variants during concretization through package +requirements. -An example of where this is useful is if you have a package that -is normally built as a dependency but only under certain circumstances -(e.g. only when a variant on a dependent is active): you can make -sure that it always builds the way you want it to; this distinguishes -package configuration requirements from constraints that you add to -``spack install`` or to environments (in those cases, the associated -packages are always built). +Package requirements are useful when you find yourself repeatedly +specifying the same constraints on the command line, and wish that +Spack respects these constraints whether you mention them explicitly +or not. Another use case is specifying constraints that should apply +to all root specs in an environment, without having to repeat the +constraint everywhere. -The following is an example of how to enforce package properties in -``packages.yaml``: +Apart from that, requirements config is more flexible than constraints +on the command line, because it can specify constraints on packages +*when they occur* as a dependency. In contrast, on the command line it +is not possible to specify constraints on dependencies while also keeping +those dependencies optional. + +The package requirements configuration is specified in ``packages.yaml`` +keyed by package name: .. code-block:: yaml @@ -452,15 +395,15 @@ under ``all`` are disregarded. For example, with a configuration like this: cmake: require: '%gcc' -Spack requires ``cmake`` to use ``gcc`` and all other nodes (including cmake dependencies) -to use ``clang``. +Spack requires ``cmake`` to use ``gcc`` and all other nodes (including ``cmake`` +dependencies) to use ``clang``. ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Setting requirements on virtual specs ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -A requirement on a virtual spec applies whenever that virtual is present in the DAG. This -can be useful for fixing which virtual provider you want to use: +A requirement on a virtual spec applies whenever that virtual is present in the DAG. +This can be useful for fixing which virtual provider you want to use: .. code-block:: yaml @@ -470,8 +413,8 @@ can be useful for fixing which virtual provider you want to use: With the configuration above the only allowed ``mpi`` provider is ``mvapich2 %gcc``. -Requirements on the virtual spec and on the specific provider are both applied, if present. For -instance with a configuration like: +Requirements on the virtual spec and on the specific provider are both applied, if +present. For instance with a configuration like: .. code-block:: yaml @@ -483,6 +426,66 @@ instance with a configuration like: you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider. +.. _package-preferences: + +------------------- +Package Preferences +------------------- + +In some cases package requirements can be too strong, and package +preferences are the better option. Package preferences do not impose +constraints on packages for particular versions or variants values, +they rather only set defaults -- the concretizer is free to change +them if it must due to other constraints. Also note that package +preferences are of lower priority than reuse of already installed +packages. + +Here's an example ``packages.yaml`` file that sets preferred packages: + +.. code-block:: yaml + + packages: + opencv: + compiler: [gcc@4.9] + variants: +debug + gperftools: + version: [2.2, 2.4, 2.3] + all: + compiler: [gcc@4.4.7, 'gcc@4.6:', intel, clang, pgi] + target: [sandybridge] + providers: + mpi: [mvapich2, mpich, openmpi] + +At a high level, this example is specifying how packages are preferably +concretized. The opencv package should prefer using GCC 4.9 and +be built with debug options. The gperftools package should prefer version +2.2 over 2.4. Every package on the system should prefer mvapich2 for +its MPI and GCC 4.4.7 (except for opencv, which overrides this by preferring GCC 4.9). +These options are used to fill in implicit defaults. Any of them can be overwritten +on the command line if explicitly requested. + +Package preferences accept the follow keys or components under +the specific package (or ``all``) section: ``compiler``, ``variants``, +``version``, ``providers``, and ``target``. Each component has an +ordered list of spec ``constraints``, with earlier entries in the +list being preferred over later entries. + +Sometimes a package installation may have constraints that forbid +the first concretization rule, in which case Spack will use the first +legal concretization rule. Going back to the example, if a user +requests gperftools 2.3 or later, then Spack will install version 2.4 +as the 2.4 version of gperftools is preferred over 2.3. + +An explicit concretization rule in the preferred section will always +take preference over unlisted concretizations. In the above example, +xlc isn't listed in the compiler list. Every listed compiler from +gcc to pgi will thus be preferred over the xlc compiler. + +The syntax for the ``provider`` section differs slightly from other +concretization rules. A provider lists a value that packages may +``depends_on`` (e.g, MPI) and a list of rules for fulfilling that +dependency. + .. _package_permissions: ------------------- @@ -531,3 +534,25 @@ directories inside the install prefix. This will ensure that even manually placed files within the install prefix are owned by the assigned group. If no group is assigned, Spack will allow the OS default behavior to go as expected. + +---------------------------- +Assigning Package Attributes +---------------------------- + +You can assign class-level attributes in the configuration: + +.. code-block:: yaml + + packages: + mpileaks: + # Override existing attributes + url: http://www.somewhereelse.com/mpileaks-1.0.tar.gz + # ... or add new ones + x: 1 + +Attributes set this way will be accessible to any method executed +in the package.py file (e.g. the ``install()`` method). Values for these +attributes may be any value parseable by yaml. + +These can only be applied to specific packages, not "all" or +virtual packages. diff --git a/lib/spack/docs/build_systems.rst b/lib/spack/docs/build_systems.rst index 12191d29f09..1ce8d6746e4 100644 --- a/lib/spack/docs/build_systems.rst +++ b/lib/spack/docs/build_systems.rst @@ -65,7 +65,6 @@ on these ideas for each distinct build system that Spack supports: build_systems/custompackage build_systems/inteloneapipackage build_systems/intelpackage - build_systems/multiplepackage build_systems/rocmpackage build_systems/sourceforgepackage diff --git a/lib/spack/docs/build_systems/autotoolspackage.rst b/lib/spack/docs/build_systems/autotoolspackage.rst index d341d28d086..88fabc0c5df 100644 --- a/lib/spack/docs/build_systems/autotoolspackage.rst +++ b/lib/spack/docs/build_systems/autotoolspackage.rst @@ -5,9 +5,9 @@ .. _autotoolspackage: ----------------- -AutotoolsPackage ----------------- +--------- +Autotools +--------- Autotools is a GNU build system that provides a build-script generator. By running the platform-independent ``./configure`` script that comes @@ -17,7 +17,7 @@ with the package, you can generate a platform-dependent Makefile. Phases ^^^^^^ -The ``AutotoolsPackage`` base class comes with the following phases: +The ``AutotoolsBuilder`` and ``AutotoolsPackage`` base classes come with the following phases: #. ``autoreconf`` - generate the configure script #. ``configure`` - generate the Makefiles diff --git a/lib/spack/docs/build_systems/bundlepackage.rst b/lib/spack/docs/build_systems/bundlepackage.rst index 8787dce546c..7a826f5e178 100644 --- a/lib/spack/docs/build_systems/bundlepackage.rst +++ b/lib/spack/docs/build_systems/bundlepackage.rst @@ -5,9 +5,9 @@ .. _bundlepackage: -------------- -BundlePackage -------------- +------ +Bundle +------ ``BundlePackage`` represents a set of packages that are expected to work well together, such as a collection of commonly used software libraries. The diff --git a/lib/spack/docs/build_systems/cmakepackage.rst b/lib/spack/docs/build_systems/cmakepackage.rst index 9544a7df732..7a1db842de1 100644 --- a/lib/spack/docs/build_systems/cmakepackage.rst +++ b/lib/spack/docs/build_systems/cmakepackage.rst @@ -5,9 +5,9 @@ .. _cmakepackage: ------------- -CMakePackage ------------- +----- +CMake +----- Like Autotools, CMake is a widely-used build-script generator. Designed by Kitware, CMake is the most popular build system for new C, C++, and @@ -21,7 +21,7 @@ whereas Autotools is Unix-only. Phases ^^^^^^ -The ``CMakePackage`` base class comes with the following phases: +The ``CMakeBuilder`` and ``CMakePackage`` base classes come with the following phases: #. ``cmake`` - generate the Makefile #. ``build`` - build the package @@ -130,8 +130,8 @@ Adding flags to cmake To add additional flags to the ``cmake`` call, simply override the ``cmake_args`` function. The following example defines values for the flags ``WHATEVER``, ``ENABLE_BROKEN_FEATURE``, ``DETECT_HDF5``, and ``THREADS`` with -and without the :meth:`~spack.build_systems.cmake.CMakePackage.define` and -:meth:`~spack.build_systems.cmake.CMakePackage.define_from_variant` helper functions: +and without the :meth:`~spack.build_systems.cmake.CMakeBuilder.define` and +:meth:`~spack.build_systems.cmake.CMakeBuilder.define_from_variant` helper functions: .. code-block:: python diff --git a/lib/spack/docs/build_systems/inteloneapipackage.rst b/lib/spack/docs/build_systems/inteloneapipackage.rst index fe6aea91bbc..c16c368dd4e 100644 --- a/lib/spack/docs/build_systems/inteloneapipackage.rst +++ b/lib/spack/docs/build_systems/inteloneapipackage.rst @@ -32,7 +32,7 @@ oneAPI packages or use:: For more information on a specific package, do:: - spack info + spack info --all Intel no longer releases new versions of Parallel Studio, which can be used in Spack via the :ref:`intelpackage`. All of its components can diff --git a/lib/spack/docs/build_systems/luapackage.rst b/lib/spack/docs/build_systems/luapackage.rst index 6332edfc205..fd70f90c492 100644 --- a/lib/spack/docs/build_systems/luapackage.rst +++ b/lib/spack/docs/build_systems/luapackage.rst @@ -5,11 +5,11 @@ .. _luapackage: ------------- -LuaPackage ------------- +--- +Lua +--- -LuaPackage is a helper for the common case of Lua packages that provide +The ``Lua`` build-system is a helper for the common case of Lua packages that provide a rockspec file. This is not meant to take a rock archive, but to build a source archive or repository that provides a rockspec, which should cover most lua packages. In the case a Lua package builds by Make rather than @@ -19,7 +19,7 @@ luarocks, prefer MakefilePackage. Phases ^^^^^^ -The ``LuaPackage`` base class comes with the following phases: +The ``LuaBuilder`` and `LuaPackage`` base classes come with the following phases: #. ``unpack`` - if using a rock, unpacks the rock and moves into the source directory #. ``preprocess`` - adjust sources or rockspec to fix build diff --git a/lib/spack/docs/build_systems/makefilepackage.rst b/lib/spack/docs/build_systems/makefilepackage.rst index c092432037d..5a83d612fa2 100644 --- a/lib/spack/docs/build_systems/makefilepackage.rst +++ b/lib/spack/docs/build_systems/makefilepackage.rst @@ -5,9 +5,9 @@ .. _makefilepackage: ---------------- -MakefilePackage ---------------- +-------- +Makefile +-------- The most primitive build system a package can use is a plain Makefile. Makefiles are simple to write for small projects, but they usually @@ -18,7 +18,7 @@ variables. Phases ^^^^^^ -The ``MakefilePackage`` base class comes with 3 phases: +The ``MakefileBuilder`` and ``MakefilePackage`` base classes come with 3 phases: #. ``edit`` - edit the Makefile #. ``build`` - build the project diff --git a/lib/spack/docs/build_systems/mavenpackage.rst b/lib/spack/docs/build_systems/mavenpackage.rst index 94ce128d3a2..d1237ce34c3 100644 --- a/lib/spack/docs/build_systems/mavenpackage.rst +++ b/lib/spack/docs/build_systems/mavenpackage.rst @@ -5,9 +5,9 @@ .. _mavenpackage: ------------- -MavenPackage ------------- +----- +Maven +----- Apache Maven is a general-purpose build system that does not rely on Makefiles to build software. It is designed for building and @@ -17,7 +17,7 @@ managing and Java-based project. Phases ^^^^^^ -The ``MavenPackage`` base class comes with the following phases: +The ``MavenBuilder`` and ``MavenPackage`` base classes come with the following phases: #. ``build`` - compile code and package into a JAR file #. ``install`` - copy to installation prefix diff --git a/lib/spack/docs/build_systems/mesonpackage.rst b/lib/spack/docs/build_systems/mesonpackage.rst index 5ca444dcb1c..c32b2241bc7 100644 --- a/lib/spack/docs/build_systems/mesonpackage.rst +++ b/lib/spack/docs/build_systems/mesonpackage.rst @@ -5,9 +5,9 @@ .. _mesonpackage: ------------- -MesonPackage ------------- +----- +Meson +----- Much like Autotools and CMake, Meson is a build system. But it is meant to be both fast and as user friendly as possible. GNOME's goal @@ -17,7 +17,7 @@ is to port modules to use the Meson build system. Phases ^^^^^^ -The ``MesonPackage`` base class comes with the following phases: +The ``MesonBuilder`` and ``MesonPackage`` base classes come with the following phases: #. ``meson`` - generate ninja files #. ``build`` - build the project diff --git a/lib/spack/docs/build_systems/multiplepackage.rst b/lib/spack/docs/build_systems/multiplepackage.rst deleted file mode 100644 index 71751f0dbf1..00000000000 --- a/lib/spack/docs/build_systems/multiplepackage.rst +++ /dev/null @@ -1,350 +0,0 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other - Spack Project Developers. See the top-level COPYRIGHT file for details. - - SPDX-License-Identifier: (Apache-2.0 OR MIT) - -.. _multiplepackage: - ----------------------- -Multiple Build Systems ----------------------- - -Quite frequently, a package will change build systems from one version to the -next. For example, a small project that once used a single Makefile to build -may now require Autotools to handle the increased number of files that need to -be compiled. Or, a package that once used Autotools may switch to CMake for -Windows support. In this case, it becomes a bit more challenging to write a -single build recipe for this package in Spack. - -There are several ways that this can be handled in Spack: - -#. Subclass the new build system, and override phases as needed (preferred) -#. Subclass ``Package`` and implement ``install`` as needed -#. Create separate ``*-cmake``, ``*-autotools``, etc. packages for each build system -#. Rename the old package to ``*-legacy`` and create a new package -#. Move the old package to a ``legacy`` repository and create a new package -#. Drop older versions that only support the older build system - -Of these options, 1 is preferred, and will be demonstrated in this -documentation. Options 3-5 have issues with concretization, so shouldn't be -used. Options 4-5 also don't support more than two build systems. Option 6 only -works if the old versions are no longer needed. Option 1 is preferred over 2 -because it makes it easier to drop the old build system entirely. - -The exact syntax of the package depends on which build systems you need to -support. Below are a couple of common examples. - -^^^^^^^^^^^^^^^^^^^^^ -Makefile -> Autotools -^^^^^^^^^^^^^^^^^^^^^ - -Let's say we have the following package: - -.. code-block:: python - - class Foo(MakefilePackage): - version("1.2.0", sha256="...") - - def edit(self, spec, prefix): - filter_file("CC=", "CC=" + spack_cc, "Makefile") - - def install(self, spec, prefix): - install_tree(".", prefix) - - -The package subclasses from :ref:`makefilepackage`, which has three phases: - -#. ``edit`` (does nothing by default) -#. ``build`` (runs ``make`` by default) -#. ``install`` (runs ``make install`` by default) - -In this case, the ``install`` phase needed to be overridden because the -Makefile did not have an install target. We also modify the Makefile to use -Spack's compiler wrappers. The default ``build`` phase is not changed. - -Starting with version 1.3.0, we want to use Autotools to build instead. -:ref:`autotoolspackage` has four phases: - -#. ``autoreconf`` (does not if a configure script already exists) -#. ``configure`` (runs ``./configure --prefix=...`` by default) -#. ``build`` (runs ``make`` by default) -#. ``install`` (runs ``make install`` by default) - -If the only version we need to support is 1.3.0, the package would look as -simple as: - -.. code-block:: python - - class Foo(AutotoolsPackage): - version("1.3.0", sha256="...") - - def configure_args(self): - return ["--enable-shared"] - - -In this case, we use the default methods for each phase and only override -``configure_args`` to specify additional flags to pass to ``./configure``. - -If we wanted to write a single package that supports both versions 1.2.0 and -1.3.0, it would look something like: - -.. code-block:: python - - class Foo(AutotoolsPackage): - version("1.3.0", sha256="...") - version("1.2.0", sha256="...", deprecated=True) - - def configure_args(self): - return ["--enable-shared"] - - # Remove the following once version 1.2.0 is dropped - @when("@:1.2") - def patch(self): - filter_file("CC=", "CC=" + spack_cc, "Makefile") - - @when("@:1.2") - def autoreconf(self, spec, prefix): - pass - - @when("@:1.2") - def configure(self, spec, prefix): - pass - - @when("@:1.2") - def install(self, spec, prefix): - install_tree(".", prefix) - - -There are a few interesting things to note here: - -* We added ``deprecated=True`` to version 1.2.0. This signifies that version - 1.2.0 is deprecated and shouldn't be used. However, if a user still relies - on version 1.2.0, it's still there and builds just fine. -* We moved the contents of the ``edit`` phase to the ``patch`` function. Since - ``AutotoolsPackage`` doesn't have an ``edit`` phase, the only way for this - step to be executed is to move it to the ``patch`` function, which always - gets run. -* The ``autoreconf`` and ``configure`` phases become no-ops. Since the old - Makefile-based build system doesn't use these, we ignore these phases when - building ``foo@1.2.0``. -* The ``@when`` decorator is used to override these phases only for older - versions. The default methods are used for ``foo@1.3:``. - -Once a new Spack release comes out, version 1.2.0 and everything below the -comment can be safely deleted. The result is the same as if we had written a -package for version 1.3.0 from scratch. - -^^^^^^^^^^^^^^^^^^ -Autotools -> CMake -^^^^^^^^^^^^^^^^^^ - -Let's say we have the following package: - -.. code-block:: python - - class Bar(AutotoolsPackage): - version("1.2.0", sha256="...") - - def configure_args(self): - return ["--enable-shared"] - - -The package subclasses from :ref:`autotoolspackage`, which has four phases: - -#. ``autoreconf`` (does not if a configure script already exists) -#. ``configure`` (runs ``./configure --prefix=...`` by default) -#. ``build`` (runs ``make`` by default) -#. ``install`` (runs ``make install`` by default) - -In this case, we use the default methods for each phase and only override -``configure_args`` to specify additional flags to pass to ``./configure``. - -Starting with version 1.3.0, we want to use CMake to build instead. -:ref:`cmakepackage` has three phases: - -#. ``cmake`` (runs ``cmake ...`` by default) -#. ``build`` (runs ``make`` by default) -#. ``install`` (runs ``make install`` by default) - -If the only version we need to support is 1.3.0, the package would look as -simple as: - -.. code-block:: python - - class Bar(CMakePackage): - version("1.3.0", sha256="...") - - def cmake_args(self): - return [self.define("BUILD_SHARED_LIBS", True)] - - -In this case, we use the default methods for each phase and only override -``cmake_args`` to specify additional flags to pass to ``cmake``. - -If we wanted to write a single package that supports both versions 1.2.0 and -1.3.0, it would look something like: - -.. code-block:: python - - class Bar(CMakePackage): - version("1.3.0", sha256="...") - version("1.2.0", sha256="...", deprecated=True) - - def cmake_args(self): - return [self.define("BUILD_SHARED_LIBS", True)] - - # Remove the following once version 1.2.0 is dropped - def configure_args(self): - return ["--enable-shared"] - - @when("@:1.2") - def cmake(self, spec, prefix): - configure("--prefix=" + prefix, *self.configure_args()) - - -There are a few interesting things to note here: - -* We added ``deprecated=True`` to version 1.2.0. This signifies that version - 1.2.0 is deprecated and shouldn't be used. However, if a user still relies - on version 1.2.0, it's still there and builds just fine. -* Since CMake and Autotools are so similar, we only need to override the - ``cmake`` phase, we can use the default ``build`` and ``install`` phases. -* We override ``cmake`` to run ``./configure`` for older versions. - ``configure_args`` remains the same. -* The ``@when`` decorator is used to override these phases only for older - versions. The default methods are used for ``bar@1.3:``. - -Once a new Spack release comes out, version 1.2.0 and everything below the -comment can be safely deleted. The result is the same as if we had written a -package for version 1.3.0 from scratch. - -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Multiple build systems for the same version -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -During the transition from one build system to another, developers often -support multiple build systems at the same time. Spack can only use a single -build system for a single version. To decide which build system to use for a -particular version, take the following things into account: - -1. If the developers explicitly state that one build system is preferred over - another, use that one. -2. If one build system is considered "experimental" while another is considered - "stable", use the stable build system. -3. Otherwise, use the newer build system. - -The developer preference for which build system to use can change over time as -a newer build system becomes stable/recommended. - -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Dropping support for old build systems -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -When older versions of a package don't support a newer build system, it can be -tempting to simply delete them from a package. This significantly reduces -package complexity and makes the build recipe much easier to maintain. However, -other packages or Spack users may rely on these older versions. The recommended -approach is to first support both build systems (as demonstrated above), -:ref:`deprecate ` versions that rely on the old build system, and -remove those versions and any phases that needed to be overridden in the next -Spack release. - -^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Three or more build systems -^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -In rare cases, a package may change build systems multiple times. For example, -a package may start with Makefiles, then switch to Autotools, then switch to -CMake. The same logic used above can be extended to any number of build systems. -For example: - -.. code-block:: python - - class Baz(CMakePackage): - version("1.4.0", sha256="...") # CMake - version("1.3.0", sha256="...") # Autotools - version("1.2.0", sha256="...") # Makefile - - def cmake_args(self): - return [self.define("BUILD_SHARED_LIBS", True)] - - # Remove the following once version 1.3.0 is dropped - def configure_args(self): - return ["--enable-shared"] - - @when("@1.3") - def cmake(self, spec, prefix): - configure("--prefix=" + prefix, *self.configure_args()) - - # Remove the following once version 1.2.0 is dropped - @when("@:1.2") - def patch(self): - filter_file("CC=", "CC=" + spack_cc, "Makefile") - - @when("@:1.2") - def cmake(self, spec, prefix): - pass - - @when("@:1.2") - def install(self, spec, prefix): - install_tree(".", prefix) - - -^^^^^^^^^^^^^^^^^^^ -Additional examples -^^^^^^^^^^^^^^^^^^^ - -When writing new packages, it often helps to see examples of existing packages. -Here is an incomplete list of existing Spack packages that have changed build -systems before: - -================ ===================== ================ -Package Previous Build System New Build System -================ ===================== ================ -amber custom CMake -arpack-ng Autotools CMake -atk Autotools Meson -blast None Autotools -dyninst Autotools CMake -evtgen Autotools CMake -fish Autotools CMake -gdk-pixbuf Autotools Meson -glib Autotools Meson -glog Autotools CMake -gmt Autotools CMake -gtkplus Autotools Meson -hpl Makefile Autotools -interproscan Perl Maven -jasper Autotools CMake -kahip SCons CMake -kokkos Makefile CMake -kokkos-kernels Makefile CMake -leveldb Makefile CMake -libdrm Autotools Meson -libjpeg-turbo Autotools CMake -mesa Autotools Meson -metis None CMake -mpifileutils Autotools CMake -muparser Autotools CMake -mxnet Makefile CMake -nest Autotools CMake -neuron Autotools CMake -nsimd CMake nsconfig -opennurbs Makefile CMake -optional-lite None CMake -plasma Makefile CMake -preseq Makefile Autotools -protobuf Autotools CMake -py-pygobject Autotools Python -singularity Autotools Makefile -span-lite None CMake -ssht Makefile CMake -string-view-lite None CMake -superlu Makefile CMake -superlu-dist Makefile CMake -uncrustify Autotools CMake -================ ===================== ================ - -Packages that support multiple build systems can be a bit confusing to write. -Don't hesitate to open an issue or draft pull request and ask for advice from -other Spack developers! diff --git a/lib/spack/docs/build_systems/octavepackage.rst b/lib/spack/docs/build_systems/octavepackage.rst index 9a81671db6c..32e8cb61b34 100644 --- a/lib/spack/docs/build_systems/octavepackage.rst +++ b/lib/spack/docs/build_systems/octavepackage.rst @@ -5,9 +5,9 @@ .. _octavepackage: -------------- -OctavePackage -------------- +------ +Octave +------ Octave has its own build system for installing packages. @@ -15,7 +15,7 @@ Octave has its own build system for installing packages. Phases ^^^^^^ -The ``OctavePackage`` base class has a single phase: +The ``OctaveBuilder`` and ``OctavePackage`` base classes have a single phase: #. ``install`` - install the package diff --git a/lib/spack/docs/build_systems/perlpackage.rst b/lib/spack/docs/build_systems/perlpackage.rst index be81ca6ce97..4e1f613c3b7 100644 --- a/lib/spack/docs/build_systems/perlpackage.rst +++ b/lib/spack/docs/build_systems/perlpackage.rst @@ -5,9 +5,9 @@ .. _perlpackage: ------------ -PerlPackage ------------ +---- +Perl +---- Much like Octave, Perl has its own language-specific build system. @@ -16,7 +16,7 @@ build system. Phases ^^^^^^ -The ``PerlPackage`` base class comes with 3 phases that can be overridden: +The ``PerlBuilder`` and ``PerlPackage`` base classes come with 3 phases that can be overridden: #. ``configure`` - configure the package #. ``build`` - build the package diff --git a/lib/spack/docs/build_systems/qmakepackage.rst b/lib/spack/docs/build_systems/qmakepackage.rst index 6e8bcef7ccb..215d59536e2 100644 --- a/lib/spack/docs/build_systems/qmakepackage.rst +++ b/lib/spack/docs/build_systems/qmakepackage.rst @@ -5,9 +5,9 @@ .. _qmakepackage: ------------- -QMakePackage ------------- +----- +QMake +----- Much like Autotools and CMake, QMake is a build-script generator designed by the developers of Qt. In its simplest form, Spack's @@ -29,7 +29,7 @@ variables or edit ``*.pro`` files to get things working properly. Phases ^^^^^^ -The ``QMakePackage`` base class comes with the following phases: +The ``QMakeBuilder`` and ``QMakePackage`` base classes come with the following phases: #. ``qmake`` - generate Makefiles #. ``build`` - build the project diff --git a/lib/spack/docs/build_systems/racketpackage.rst b/lib/spack/docs/build_systems/racketpackage.rst index 8ba37ceebaa..5e09ffca4a6 100644 --- a/lib/spack/docs/build_systems/racketpackage.rst +++ b/lib/spack/docs/build_systems/racketpackage.rst @@ -5,9 +5,9 @@ .. _racketpackage: -------------- -RacketPackage -------------- +------ +Racket +------ Much like Python, Racket packages and modules have their own special build system. To learn more about the specifics of Racket package system, please refer to the @@ -17,7 +17,7 @@ To learn more about the specifics of Racket package system, please refer to the Phases ^^^^^^ -The ``RacketPackage`` base class provides an ``install`` phase that +The ``RacketBuilder`` and ``RacketPackage`` base classes provides an ``install`` phase that can be overridden, corresponding to the use of: .. code-block:: console diff --git a/lib/spack/docs/build_systems/rpackage.rst b/lib/spack/docs/build_systems/rpackage.rst index 671af779b13..ebf2270e8e6 100644 --- a/lib/spack/docs/build_systems/rpackage.rst +++ b/lib/spack/docs/build_systems/rpackage.rst @@ -19,7 +19,7 @@ new Spack packages for. Phases ^^^^^^ -The ``RPackage`` base class has a single phase: +The ``RBuilder`` and ``RPackage`` base classes have a single phase: #. ``install`` - install the package diff --git a/lib/spack/docs/build_systems/rubypackage.rst b/lib/spack/docs/build_systems/rubypackage.rst index b64ac60b2f9..5b6ec462a6a 100644 --- a/lib/spack/docs/build_systems/rubypackage.rst +++ b/lib/spack/docs/build_systems/rubypackage.rst @@ -5,9 +5,9 @@ .. _rubypackage: ------------ -RubyPackage ------------ +---- +Ruby +---- Like Perl, Python, and R, Ruby has its own build system for installing Ruby gems. @@ -16,7 +16,7 @@ installing Ruby gems. Phases ^^^^^^ -The ``RubyPackage`` base class provides the following phases that +The ``RubyBuilder`` and ``RubyPackage`` base classes provide the following phases that can be overridden: #. ``build`` - build everything needed to install diff --git a/lib/spack/docs/build_systems/sconspackage.rst b/lib/spack/docs/build_systems/sconspackage.rst index cea0408651b..aea5dacfa72 100644 --- a/lib/spack/docs/build_systems/sconspackage.rst +++ b/lib/spack/docs/build_systems/sconspackage.rst @@ -5,9 +5,9 @@ .. _sconspackage: ------------- -SConsPackage ------------- +----- +SCons +----- SCons is a general-purpose build system that does not rely on Makefiles to build software. SCons is written in Python, and handles @@ -42,7 +42,7 @@ As previously mentioned, SCons allows developers to add subcommands like $ scons install -To facilitate this, the ``SConsPackage`` base class provides the +To facilitate this, the ``SConsBuilder`` and ``SconsPackage`` base classes provide the following phases: #. ``build`` - build the package diff --git a/lib/spack/docs/build_systems/sippackage.rst b/lib/spack/docs/build_systems/sippackage.rst index 5235015a92c..3e77968e801 100644 --- a/lib/spack/docs/build_systems/sippackage.rst +++ b/lib/spack/docs/build_systems/sippackage.rst @@ -5,9 +5,9 @@ .. _sippackage: ----------- -SIPPackage ----------- +--- +SIP +--- SIP is a tool that makes it very easy to create Python bindings for C and C++ libraries. It was originally developed to create PyQt, the Python bindings for @@ -22,7 +22,7 @@ provides support functions to the automatically generated code. Phases ^^^^^^ -The ``SIPPackage`` base class comes with the following phases: +The ``SIPBuilder`` and ``SIPPackage`` base classes come with the following phases: #. ``configure`` - configure the package #. ``build`` - build the package diff --git a/lib/spack/docs/build_systems/wafpackage.rst b/lib/spack/docs/build_systems/wafpackage.rst index 54fcba98d00..f91479ce43a 100644 --- a/lib/spack/docs/build_systems/wafpackage.rst +++ b/lib/spack/docs/build_systems/wafpackage.rst @@ -5,9 +5,9 @@ .. _wafpackage: ----------- -WafPackage ----------- +--- +Waf +--- Like SCons, Waf is a general-purpose build system that does not rely on Makefiles to build software. @@ -16,7 +16,7 @@ on Makefiles to build software. Phases ^^^^^^ -The ``WafPackage`` base class comes with the following phases: +The ``WafBuilder`` and ``WafPackage`` base classes come with the following phases: #. ``configure`` - configure the project #. ``build`` - build the project diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py index 30123635eb7..4fc321c72d5 100644 --- a/lib/spack/docs/conf.py +++ b/lib/spack/docs/conf.py @@ -32,6 +32,9 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. +link_name = os.path.abspath("_spack_root") +if not os.path.exists(link_name): + os.symlink(os.path.abspath("../../.."), link_name, target_is_directory=True) sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external")) sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/pytest-fallback")) @@ -206,6 +209,9 @@ def setup(sphinx): # Spack classes that are private and we don't want to expose ("py:class", "spack.provider_index._IndexBase"), ("py:class", "spack.repo._PrependFileLoader"), + ("py:class", "spack.build_systems._checks.BaseBuilder"), + # Spack classes that intersphinx is unable to resolve + ("py:class", "spack.version.VersionBase"), ] # The reST default role (used for this markup: `text`) to use for all documents. diff --git a/lib/spack/docs/config_yaml.rst b/lib/spack/docs/config_yaml.rst index b93384e81b4..f2159c64cce 100644 --- a/lib/spack/docs/config_yaml.rst +++ b/lib/spack/docs/config_yaml.rst @@ -224,9 +224,9 @@ them). Please note that we currently disable ccache's ``hash_dir`` feature to avoid an issue with the stage directory (see https://github.com/LLNL/spack/pull/3761#issuecomment-294352232). ------------------- -``shared_linking`` ------------------- +----------------------- +``shared_linking:type`` +----------------------- Control whether Spack embeds ``RPATH`` or ``RUNPATH`` attributes in ELF binaries so that they can find their dependencies. Has no effect on macOS. @@ -245,6 +245,52 @@ the loading object. DO NOT MIX the two options within the same install tree. +----------------------- +``shared_linking:bind`` +----------------------- + +This is an *experimental option* that controls whether Spack embeds absolute paths +to needed shared libraries in ELF executables and shared libraries on Linux. Setting +this option to ``true`` has two advantages: + +1. **Improved startup time**: when running an executable, the dynamic loader does not + have to perform a search for needed libraries, they are loaded directly. +2. **Reliability**: libraries loaded at runtime are those that were linked to. This + minimizes the risk of accidentally picking up system libraries. + +In the current implementation, Spack sets the soname (shared object name) of +libraries to their install path upon installation. This has two implications: + +1. binding does not apply to libraries installed *before* the option was enabled; +2. toggling the option off does *not* prevent binding of libraries installed when + the option was still enabled. + +It is also worth noting that: + +1. Applications relying on ``dlopen(3)`` will continue to work, even when they open + a library by name. This is because ``RPATH``\s are retained in binaries also + when ``bind`` is enabled. +2. ``LD_PRELOAD`` continues to work for the typical use case of overriding + symbols, such as preloading a library with a more efficient ``malloc``. + However, the preloaded library will be loaded *additionally to*, instead of + *in place of* another library with the same name --- this can be problematic + in very rare cases where libraries rely on a particular ``init`` or ``fini`` + order. + +.. note:: + + In some cases packages provide *stub libraries* that only contain an interface + for linking, but lack an implementation for runtime. An example of this is + ``libcuda.so``, provided by the CUDA toolkit; it can be used to link against, + but the library needed at runtime is the one installed with the CUDA driver. + To avoid binding those libraries, they can be marked as non-bindable using + a property in the package: + + .. code-block:: python + + class Example(Package): + non_bindable_shared_objects = ["libinterface.so"] + ---------------------- ``terminal_title`` ---------------------- diff --git a/lib/spack/docs/configuration.rst b/lib/spack/docs/configuration.rst index 21c8f4afaf4..26df115567f 100644 --- a/lib/spack/docs/configuration.rst +++ b/lib/spack/docs/configuration.rst @@ -405,6 +405,17 @@ Spack understands several special variables. These are: * ``$user``: name of the current user * ``$user_cache_path``: user cache directory (``~/.spack`` unless :ref:`overridden `) +* ``$architecture``: the architecture triple of the current host, as + detected by Spack. +* ``$arch``: alias for ``$architecture``. +* ``$platform``: the platform of the current host, as detected by Spack. +* ``$operating_system``: the operating system of the current host, as + detected by the ``distro`` python module. +* ``$os``: alias for ``$operating_system``. +* ``$target``: the ISA target for the current host, as detected by + ArchSpec. E.g. ``skylake`` or ``neoverse-n1``. +* ``$target_family``. The target family for the current host, as + detected by ArchSpec. E.g. ``x86_64`` or ``aarch64``. Note that, as with shell variables, you can write these as ``$varname`` or with braces to distinguish the variable from surrounding characters: @@ -549,7 +560,7 @@ down the problem: You can see above that the ``build_jobs`` and ``debug`` settings are built in and are not overridden by a configuration file. The -``verify_ssl`` setting comes from the ``--insceure`` option on the +``verify_ssl`` setting comes from the ``--insecure`` option on the command line. ``dirty`` and ``install_tree`` come from the custom scopes ``./my-scope`` and ``./my-scope-2``, and all other configuration options come from the default configuration files that ship with Spack. diff --git a/lib/spack/docs/developer_guide.rst b/lib/spack/docs/developer_guide.rst index c4d04cb485e..6b67ef9f77e 100644 --- a/lib/spack/docs/developer_guide.rst +++ b/lib/spack/docs/developer_guide.rst @@ -149,11 +149,9 @@ grouped by functionality. Package-related modules ^^^^^^^^^^^^^^^^^^^^^^^ -:mod:`spack.package` - Contains the :class:`~spack.package_base.Package` class, which - is the superclass for all packages in Spack. Methods on ``Package`` - implement all phases of the :ref:`package lifecycle - ` and manage the build process. +:mod:`spack.package_base` + Contains the :class:`~spack.package_base.PackageBase` class, which + is the superclass for all packages in Spack. :mod:`spack.util.naming` Contains functions for mapping between Spack package names, diff --git a/lib/spack/docs/environments.rst b/lib/spack/docs/environments.rst index 8f4e0a8256a..65b9fb6b51d 100644 --- a/lib/spack/docs/environments.rst +++ b/lib/spack/docs/environments.rst @@ -520,27 +520,33 @@ available from the yaml file. ^^^^^^^^^^^^^^^^^^^ Spec concretization ^^^^^^^^^^^^^^^^^^^ -An environment can be concretized in three different modes and the behavior active under any environment -is determined by the ``concretizer:unify`` property. By default specs are concretized *separately*, one after the other: +An environment can be concretized in three different modes and the behavior active under +any environment is determined by the ``concretizer:unify`` configuration option. + +The *default* mode is to unify all specs: .. code-block:: yaml spack: specs: - - hdf5~mpi - hdf5+mpi - zlib@1.2.8 concretizer: - unify: false + unify: true -This mode of operation permits to deploy a full software stack where multiple configurations of the same package -need to be installed alongside each other using the best possible selection of transitive dependencies. The downside -is that redundancy of installations is disregarded completely, and thus environments might be more bloated than -strictly needed. In the example above, for instance, if a version of ``zlib`` newer than ``1.2.8`` is known to Spack, -then it will be used for both ``hdf5`` installations. +This means that any package in the environment corresponds to a single concrete spec. In +the above example, when ``hdf5`` depends down the line of ``zlib``, it is required to +take ``zlib@1.2.8`` instead of a newer version. This mode of concretization is +particularly useful when environment views are used: if every package occurs in +only one flavor, it is usually possible to merge all install directories into a view. -If redundancy of the environment is a concern, Spack provides a way to install it *together where possible*, -i.e. trying to maximize reuse of dependencies across different specs: +A downside of unified concretization is that it can be overly strict. For example, a +concretization error would happen when both ``hdf5+mpi`` and ``hdf5~mpi`` are specified +in an environment. + +The second mode is to *unify when possible*: this makes concretization of root specs +more independendent. Instead of requiring reuse of dependencies across different root +specs, it is only maximized: .. code-block:: yaml @@ -552,26 +558,27 @@ i.e. trying to maximize reuse of dependencies across different specs: concretizer: unify: when_possible -Also in this case Spack allows having multiple configurations of the same package, but privileges the reuse of -specs over other factors. Going back to our example, this means that both ``hdf5`` installations will use -``zlib@1.2.8`` as a dependency even if newer versions of that library are available. -Central installations done at HPC centers by system administrators or user support groups are a common case -that fits either of these two modes. +This means that both ``hdf5`` installations will use ``zlib@1.2.8`` as a dependency even +if newer versions of that library are available. -Environments can also be configured to concretize all the root specs *together*, in a self-consistent way, to -ensure that each package in the environment comes with a single configuration: +The third mode of operation is to concretize root specs entirely independently by +disabling unified concretization: .. code-block:: yaml spack: specs: + - hdf5~mpi - hdf5+mpi - zlib@1.2.8 concretizer: - unify: true + unify: false -This mode of operation is usually what is required by software developers that want to deploy their development -environment and have a single view of it in the filesystem. +In this example ``hdf5`` is concretized separately, and does not consider ``zlib@1.2.8`` +as a constraint or preference. Instead, it will take the latest possible version. + +The last two concretization options are typically useful for system administrators and +user support groups providing a large software stack for their HPC center. .. note:: @@ -582,10 +589,10 @@ environment and have a single view of it in the filesystem. .. admonition:: Re-concretization of user specs - When concretizing specs *together* or *together where possible* the entire set of specs will be + When using *unified* concretization (when possible), the entire set of specs will be re-concretized after any addition of new user specs, to ensure that - the environment remains consistent / minimal. When instead the specs are concretized - separately only the new specs will be re-concretized after any addition. + the environment remains consistent / minimal. When instead unified concretization is + disabled, only the new specs will be concretized after any addition. ^^^^^^^^^^^^^ Spec Matrices @@ -987,7 +994,7 @@ A typical workflow is as follows: spack env create -d . spack -e . add perl spack -e . concretize - spack -e . env depfile > Makefile + spack -e . env depfile -o Makefile make -j64 This generates a ``Makefile`` from a concretized environment in the @@ -1000,7 +1007,6 @@ load, even when packages are built in parallel. By default the following phony convenience targets are available: - ``make all``: installs the environment (default target); -- ``make fetch-all``: only fetch sources of all packages; - ``make clean``: cleans files used by make, but does not uninstall packages. .. tip:: @@ -1010,8 +1016,17 @@ By default the following phony convenience targets are available: printed orderly per package install. To get synchronized output with colors, use ``make -j SPACK_COLOR=always --output-sync=recurse``. -The following advanced example shows how generated targets can be used in a -``Makefile``: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Specifying dependencies on generated ``make`` targets +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +An interesting question is how to include generated ``Makefile``\s in your own +``Makefile``\s. This comes up when you want to install an environment that provides +executables required in a command for a make target of your own. + +The example below shows how to accomplish this: the ``env`` target specifies +the generated ``spack/env`` target as a prerequisite, meaning that the environment +gets installed and is available for use in the ``env`` target. .. code:: Makefile @@ -1037,11 +1052,10 @@ The following advanced example shows how generated targets can be used in a include env.mk endif -When ``make`` is invoked, it first "remakes" the missing include ``env.mk`` -from its rule, which triggers concretization. When done, the generated target -``spack/env`` is available. In the above example, the ``env`` target uses this generated -target as a prerequisite, meaning that it can make use of the installed packages in -its commands. +This works as follows: when ``make`` is invoked, it first "remakes" the missing +include ``env.mk`` as there is a target for it. This triggers concretization of +the environment and makes spack output ``env.mk``. At that point the +generated target ``spack/env`` becomes available through ``include env.mk``. As it is typically undesirable to remake ``env.mk`` as part of ``make clean``, the include is conditional. @@ -1052,3 +1066,24 @@ the include is conditional. the ``--make-target-prefix`` flag and use the non-phony target ``/env`` as prerequisite, instead of the phony target ``/all``. + +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Building a subset of the environment +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The generated ``Makefile``\s contain install targets for each spec. Given the hash +of a particular spec, you can use the ``.install/`` target to install the +spec with its dependencies. There is also ``.install-deps/`` to *only* install +its dependencies. This can be useful when certain flags should only apply to +dependencies. Below we show a use case where a spec is installed with verbose +output (``spack install --verbose``) while its dependencies are installed silently: + +.. code:: console + + $ spack env depfile -o Makefile --make-target-prefix my_env + + # Install dependencies in parallel, only show a log on error. + $ make -j16 my_env/.install-deps/ SPACK_INSTALL_FLAGS=--show-log-on-error + + # Install the root spec with verbose output. + $ make -j16 my_env/.install/ SPACK_INSTALL_FLAGS=--verbose \ No newline at end of file diff --git a/lib/spack/docs/features.rst b/lib/spack/docs/features.rst index 985da967fd1..1682616adb8 100644 --- a/lib/spack/docs/features.rst +++ b/lib/spack/docs/features.rst @@ -98,40 +98,42 @@ For example, this command: .. code-block:: console - $ spack create http://www.mr511.de/software/libelf-0.8.13.tar.gz + $ spack create https://ftp.osuosl.org/pub/blfs/conglomeration/libelf/libelf-0.8.13.tar.gz creates a simple python file: .. code-block:: python - from spack import * + from spack.package import * - class Libelf(Package): + class Libelf(AutotoolsPackage): """FIXME: Put a proper description of your package here.""" # FIXME: Add a proper url for your package's homepage here. - homepage = "http://www.example.com" - url = "http://www.mr511.de/software/libelf-0.8.13.tar.gz" + homepage = "https://www.example.com" + url = "https://ftp.osuosl.org/pub/blfs/conglomeration/libelf/libelf-0.8.13.tar.gz" - version('0.8.13', '4136d7b4c04df68b686570afa26988ac') + # FIXME: Add a list of GitHub accounts to + # notify when the package is updated. + # maintainers = ["github_user1", "github_user2"] + + version("0.8.13", sha256="591a9b4ec81c1f2042a97aa60564e0cb79d041c52faa7416acb38bc95bd2c76d") # FIXME: Add dependencies if required. - # depends_on('foo') + # depends_on("foo") - def install(self, spec, prefix): - # FIXME: Modify the configure line to suit your build system here. - configure('--prefix={0}'.format(prefix)) - - # FIXME: Add logic to build and install here. - make() - make('install') + def configure_args(self): + # FIXME: Add arguments other than --prefix + # FIXME: If not needed delete this function + args = [] + return args It doesn't take much python coding to get from there to a working package: .. literalinclude:: _spack_root/var/spack/repos/builtin/packages/libelf/package.py - :lines: 6- + :lines: 5- Spack also provides wrapper functions around common commands like ``configure``, ``make``, and ``cmake`` to make writing packages diff --git a/lib/spack/docs/getting_started.rst b/lib/spack/docs/getting_started.rst index d174a454689..1bf99e09b3c 100644 --- a/lib/spack/docs/getting_started.rst +++ b/lib/spack/docs/getting_started.rst @@ -44,7 +44,7 @@ A build matrix showing which packages are working on which systems is shown belo yum install -y epel-release yum update -y yum --enablerepo epel groupinstall -y "Development Tools" - yum --enablerepo epel install -y curl findutils gcc-c++ gcc gcc-gfortran git gnupg2 hostname iproute make patch python3 python3-pip python3-setuptools unzip + yum --enablerepo epel install -y curl findutils gcc-c++ gcc gcc-gfortran git gnupg2 hostname iproute redhat-lsb-core make patch python3 python3-pip python3-setuptools unzip python3 -m pip install boto3 .. tab-item:: macOS Brew @@ -124,88 +124,41 @@ Spack provides two ways of bootstrapping ``clingo``: from pre-built binaries (default), or from sources. The fastest way to get started is to bootstrap from pre-built binaries. -.. note:: - - When bootstrapping from pre-built binaries, Spack currently requires - ``patchelf`` on Linux and ``otool`` on macOS. If ``patchelf`` is not in the - ``PATH``, Spack will build it from sources, and a C++ compiler is required. - -The first time you concretize a spec, Spack will bootstrap in the background: +The first time you concretize a spec, Spack will bootstrap automatically: .. code-block:: console - $ time spack spec zlib + $ spack spec zlib + ==> Bootstrapping clingo from pre-built binaries + ==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.4/build_cache/linux-centos7-x86_64-gcc-10.2.1-clingo-bootstrap-spack-ba5ijauisd3uuixtmactc36vps7yfsrl.spec.json + ==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.4/build_cache/linux-centos7-x86_64/gcc-10.2.1/clingo-bootstrap-spack/linux-centos7-x86_64-gcc-10.2.1-clingo-bootstrap-spack-ba5ijauisd3uuixtmactc36vps7yfsrl.spack + ==> Installing "clingo-bootstrap@spack%gcc@10.2.1~docs~ipo+python+static_libstdcpp build_type=Release arch=linux-centos7-x86_64" from a buildcache + ==> Bootstrapping patchelf from pre-built binaries + ==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.4/build_cache/linux-centos7-x86_64-gcc-10.2.1-patchelf-0.16.1-p72zyan5wrzuabtmzq7isa5mzyh6ahdp.spec.json + ==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.4/build_cache/linux-centos7-x86_64/gcc-10.2.1/patchelf-0.16.1/linux-centos7-x86_64-gcc-10.2.1-patchelf-0.16.1-p72zyan5wrzuabtmzq7isa5mzyh6ahdp.spack + ==> Installing "patchelf@0.16.1%gcc@10.2.1 ldflags="-static-libstdc++ -static-libgcc" build_system=autotools arch=linux-centos7-x86_64" from a buildcache Input spec -------------------------------- zlib Concretized -------------------------------- - zlib@1.2.11%gcc@7.5.0+optimize+pic+shared arch=linux-ubuntu18.04-zen - - real 0m20.023s - user 0m18.351s - sys 0m0.784s - -After this command you'll see that ``clingo`` has been installed for Spack's own use: - -.. code-block:: console - - $ spack find -b - ==> Showing internal bootstrap store at "/root/.spack/bootstrap/store" - ==> 3 installed packages - -- linux-rhel5-x86_64 / gcc@9.3.0 ------------------------------- - clingo-bootstrap@spack python@3.6 - - -- linux-ubuntu18.04-zen / gcc@7.5.0 ---------------------------- - patchelf@0.13 - -Subsequent calls to the concretizer will then be much faster: - -.. code-block:: console - - $ time spack spec zlib - [ ... ] - real 0m0.490s - user 0m0.431s - sys 0m0.041s - + zlib@1.2.13%gcc@9.4.0+optimize+pic+shared build_system=makefile arch=linux-ubuntu20.04-icelake If for security concerns you cannot bootstrap ``clingo`` from pre-built -binaries, you have to mark this bootstrapping method as untrusted. This makes -Spack fall back to bootstrapping from sources: +binaries, you have to disable fetching the binaries we generated with Github Actions. .. code-block:: console - $ spack bootstrap untrust github-actions-v0.2 - ==> "github-actions-v0.2" is now untrusted and will not be used for bootstrapping + $ spack bootstrap disable github-actions-v0.4 + ==> "github-actions-v0.4" is now disabled and will not be used for bootstrapping + $ spack bootstrap disable github-actions-v0.3 + ==> "github-actions-v0.3" is now disabled and will not be used for bootstrapping You can verify that the new settings are effective with: -.. code-block:: console +.. command-output:: spack bootstrap list - $ spack bootstrap list - Name: github-actions-v0.2 UNTRUSTED - - Type: buildcache - - Info: - url: https://mirror.spack.io/bootstrap/github-actions/v0.2 - homepage: https://github.com/spack/spack-bootstrap-mirrors - releases: https://github.com/spack/spack-bootstrap-mirrors/releases - - Description: - Buildcache generated from a public workflow using Github Actions. - The sha256 checksum of binaries is checked before installation. - - [ ... ] - - Name: spack-install TRUSTED - - Type: install - - Description: - Specs built from sources by Spack. May take a long time. .. note:: @@ -235,9 +188,7 @@ under the ``${HOME}/.spack`` directory. The software installed there can be quer .. code-block:: console - $ spack find --bootstrap - ==> Showing internal bootstrap store at "/home/spack/.spack/bootstrap/store" - ==> 3 installed packages + $ spack -b find -- linux-ubuntu18.04-x86_64 / gcc@10.1.0 ------------------------ clingo-bootstrap@spack python@3.6.9 re2c@1.2.1 @@ -246,7 +197,7 @@ In case it's needed the bootstrap store can also be cleaned with: .. code-block:: console $ spack clean -b - ==> Removing software in "/home/spack/.spack/bootstrap/store" + ==> Removing bootstrapped software and configuration in "/home/spack/.spack/bootstrap" ^^^^^^^^^^^^^^^^^^ Check Installation diff --git a/lib/spack/docs/images/adapter.png b/lib/spack/docs/images/adapter.png new file mode 100644 index 00000000000..aa889c2c324 Binary files /dev/null and b/lib/spack/docs/images/adapter.png differ diff --git a/lib/spack/docs/images/builder_package_architecture.png b/lib/spack/docs/images/builder_package_architecture.png new file mode 100644 index 00000000000..0c79b1ee7fb Binary files /dev/null and b/lib/spack/docs/images/builder_package_architecture.png differ diff --git a/lib/spack/docs/images/builder_phases.png b/lib/spack/docs/images/builder_phases.png new file mode 100644 index 00000000000..e8141651c4b Binary files /dev/null and b/lib/spack/docs/images/builder_phases.png differ diff --git a/lib/spack/docs/images/installation_pipeline.png b/lib/spack/docs/images/installation_pipeline.png new file mode 100644 index 00000000000..c1d1e18f1b9 Binary files /dev/null and b/lib/spack/docs/images/installation_pipeline.png differ diff --git a/lib/spack/docs/images/original_package_architecture.png b/lib/spack/docs/images/original_package_architecture.png new file mode 100644 index 00000000000..9fc21efcc14 Binary files /dev/null and b/lib/spack/docs/images/original_package_architecture.png differ diff --git a/lib/spack/docs/images/packaging.excalidrawlib b/lib/spack/docs/images/packaging.excalidrawlib new file mode 100644 index 00000000000..1449c485793 --- /dev/null +++ b/lib/spack/docs/images/packaging.excalidrawlib @@ -0,0 +1,3092 @@ +{ + "type": "excalidrawlib", + "version": 2, + "source": "https://excalidraw.com", + "libraryItems": [ + { + "status": "unpublished", + "elements": [ + { + "type": "rectangle", + "version": 601, + "versionNonce": 158569138, + "isDeleted": false, + "id": "8MYJkzMoNEhDhGH1FB83g", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 445.75, + "y": 129, + "strokeColor": "#000000", + "backgroundColor": "#ced4da", + "width": 736, + "height": 651, + "seed": 448140078, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664627195460, + "link": null, + "locked": false + }, + { + "type": "rectangle", + "version": 195, + "versionNonce": 1239338030, + "isDeleted": false, + "id": "2CKbNSYnk0z80hSe6axnR", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 470.25, + "y": 164, + "strokeColor": "#000000", + "backgroundColor": "#228be6", + "width": 495, + "height": 455, + "seed": 566918834, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [ + { + "id": "IU_VoaKHNHswI8HaxNWt5", + "type": "arrow" + } + ], + "updated": 1664627105795, + "link": null, + "locked": false + }, + { + "type": "rectangle", + "version": 403, + "versionNonce": 56919410, + "isDeleted": false, + "id": "XUzv2kfpdxMahaSVVS42X", + "fillStyle": "solid", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 509.25, + "y": 407.5, + "strokeColor": "#000000", + "backgroundColor": "#ced4da", + "width": 396.99999999999994, + "height": 112, + "seed": 354909550, + "groupIds": [ + "LYqioPcAzrIgJBDV3IaDA", + "SsaCg2uTI9sJjhD323wkh" + ], + "strokeSharpness": "sharp", + "boundElements": [ + { + "id": "71z_J7hoepiXas8Fk5x0B", + "type": "arrow" + }, + { + "id": "IU_VoaKHNHswI8HaxNWt5", + "type": "arrow" + } + ], + "updated": 1664627099901, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 300, + "versionNonce": 925254318, + "isDeleted": false, + "id": "lkCxvsSEn-AuBHtfj1N0d", + "fillStyle": "solid", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 547.25, + "y": 441, + "strokeColor": "#000000", + "backgroundColor": "#ced4da", + "width": 321, + "height": 45, + "seed": 1361827954, + "groupIds": [ + "LYqioPcAzrIgJBDV3IaDA", + "SsaCg2uTI9sJjhD323wkh" + ], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664627099902, + "link": null, + "locked": false, + "fontSize": 36, + "fontFamily": 1, + "text": "AutotoolsPackage", + "baseline": 32, + "textAlign": "center", + "verticalAlign": "top", + "containerId": null, + "originalText": "AutotoolsPackage" + }, + { + "type": "rectangle", + "version": 377, + "versionNonce": 1733756722, + "isDeleted": false, + "id": "aCDb2PgRdoFKA8e-GqQzR", + "fillStyle": "solid", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 509.25, + "y": 200, + "strokeColor": "#000000", + "backgroundColor": "#ced4da", + "width": 396.99999999999994, + "height": 112, + "seed": 175218606, + "groupIds": [ + "WEeFev8dTdo9KgzR3hPki", + "SsaCg2uTI9sJjhD323wkh" + ], + "strokeSharpness": "sharp", + "boundElements": [ + { + "id": "71z_J7hoepiXas8Fk5x0B", + "type": "arrow" + } + ], + "updated": 1664627099902, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 161, + "versionNonce": 585481454, + "isDeleted": false, + "id": "fXYOlmw0CV0WFTNUDity0", + "fillStyle": "solid", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 627.75, + "y": 233.5, + "strokeColor": "#000000", + "backgroundColor": "#ced4da", + "width": 160, + "height": 45, + "seed": 1186724402, + "groupIds": [ + "WEeFev8dTdo9KgzR3hPki", + "SsaCg2uTI9sJjhD323wkh" + ], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664627099902, + "link": null, + "locked": false, + "fontSize": 36, + "fontFamily": 1, + "text": "ArpackNg", + "baseline": 32, + "textAlign": "center", + "verticalAlign": "top", + "containerId": null, + "originalText": "ArpackNg" + }, + { + "type": "arrow", + "version": 290, + "versionNonce": 890458354, + "isDeleted": false, + "id": "71z_J7hoepiXas8Fk5x0B", + "fillStyle": "hachure", + "strokeWidth": 4, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 707.8516807799414, + "y": 403, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 0, + "height": 85, + "seed": 247298542, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664627099902, + "link": null, + "locked": false, + "startBinding": { + "focus": 0.02318227093169459, + "gap": 3, + "elementId": "XUzv2kfpdxMahaSVVS42X" + }, + "endBinding": { + "focus": -0.02318227093169459, + "gap": 6, + "elementId": "aCDb2PgRdoFKA8e-GqQzR" + }, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "triangle", + "points": [ + [ + 0, + 0 + ], + [ + 0, + -85 + ] + ] + }, + { + "type": "text", + "version": 673, + "versionNonce": 1429991214, + "isDeleted": false, + "id": "bsoYa0EVTdXYsTx5nsFJk", + "fillStyle": "hachure", + "strokeWidth": 4, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 783.25, + "y": 518.3821170339361, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 164, + "height": 90, + "seed": 1633805298, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [ + { + "id": "IU_VoaKHNHswI8HaxNWt5", + "type": "arrow" + } + ], + "updated": 1664627099902, + "link": null, + "locked": false, + "fontSize": 36, + "fontFamily": 1, + "text": "Package \nHierarchy", + "baseline": 77, + "textAlign": "center", + "verticalAlign": "top", + "containerId": null, + "originalText": "Package \nHierarchy" + }, + { + "type": "rectangle", + "version": 903, + "versionNonce": 1712814318, + "isDeleted": false, + "id": "qRi5xNnAOqg-SFwtYBpoN", + "fillStyle": "solid", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 510.25, + "y": 657.5, + "strokeColor": "#000000", + "backgroundColor": "#ced4da", + "width": 396.99999999999994, + "height": 112, + "seed": 1226050606, + "groupIds": [ + "-wCL8N0qNvseDw29hpA8g" + ], + "strokeSharpness": "sharp", + "boundElements": [ + { + "id": "IU_VoaKHNHswI8HaxNWt5", + "type": "arrow" + } + ], + "updated": 1664627118807, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 623, + "versionNonce": 492299954, + "isDeleted": false, + "id": "9h25d9NB-Q9Wc79boMEnC", + "fillStyle": "solid", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 552.25, + "y": 691, + "strokeColor": "#000000", + "backgroundColor": "#ced4da", + "width": 313, + "height": 45, + "seed": 186946994, + "groupIds": [ + "-wCL8N0qNvseDw29hpA8g" + ], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664627118807, + "link": null, + "locked": false, + "fontSize": 36, + "fontFamily": 1, + "text": "Builder Forwarder", + "baseline": 32, + "textAlign": "center", + "verticalAlign": "top", + "containerId": null, + "originalText": "Builder Forwarder" + }, + { + "type": "text", + "version": 1188, + "versionNonce": 351671150, + "isDeleted": false, + "id": "IlomIIocRvEmmYro4MZ68", + "fillStyle": "hachure", + "strokeWidth": 4, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 1002.75, + "y": 168.5, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 157, + "height": 90, + "seed": 1428885362, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664627188273, + "link": null, + "locked": false, + "fontSize": 36, + "fontFamily": 1, + "text": "Package\n Wrapper", + "baseline": 77, + "textAlign": "center", + "verticalAlign": "top", + "containerId": null, + "originalText": "Package\n Wrapper" + }, + { + "type": "arrow", + "version": 832, + "versionNonce": 1121332014, + "isDeleted": false, + "id": "IU_VoaKHNHswI8HaxNWt5", + "fillStyle": "hachure", + "strokeWidth": 4, + "strokeStyle": "dotted", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 707.7778281289579, + "y": 653.5, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 7.847537838213611, + "height": 130.23576593212783, + "seed": 1301783086, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664627118807, + "link": null, + "locked": false, + "startBinding": { + "elementId": "qRi5xNnAOqg-SFwtYBpoN", + "focus": 0.013062197564634722, + "gap": 4 + }, + "endBinding": { + "elementId": "XUzv2kfpdxMahaSVVS42X", + "focus": 0.056574233332975385, + "gap": 3.7642340678721666 + }, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "triangle", + "points": [ + [ + 0, + 0 + ], + [ + -7.847537838213611, + -130.23576593212783 + ] + ] + } + ], + "id": "mulubEO9Lw-HgC00sx7G-", + "created": 1664627205632 + }, + { + "status": "unpublished", + "elements": [ + { + "type": "rectangle", + "version": 360, + "versionNonce": 699609906, + "isDeleted": false, + "id": "ai3MIBTq8Rkokk4d2NJ_k", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 441.5, + "y": 56, + "strokeColor": "#000000", + "backgroundColor": "#228be6", + "width": 479, + "height": 642, + "seed": 725687342, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664622926148, + "link": null, + "locked": false + }, + { + "type": "rectangle", + "version": 327, + "versionNonce": 1239118706, + "isDeleted": false, + "id": "7tuXfM91g28UGae9gJkis", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 993.25, + "y": 53, + "strokeColor": "#000000", + "backgroundColor": "#228be6", + "width": 479, + "height": 642, + "seed": 860539570, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [ + { + "id": "F6E1EQxM-PyPeNjQXH6NZ", + "type": "arrow" + } + ], + "updated": 1664623054904, + "link": null, + "locked": false + }, + { + "type": "rectangle", + "version": 482, + "versionNonce": 616506034, + "isDeleted": false, + "id": "TmgDkNmbU86sH2Ssf1mL2", + "fillStyle": "solid", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 1030.75, + "y": 503.5, + "strokeColor": "#000000", + "backgroundColor": "#ced4da", + "width": 396.99999999999994, + "height": 112, + "seed": 329380206, + "groupIds": [ + "rqi4zfKDNJjqgRyIIknBO" + ], + "strokeSharpness": "sharp", + "boundElements": [ + { + "id": "RQl1RtMzUcPE_zXHt8Ldm", + "type": "arrow" + }, + { + "id": "F6E1EQxM-PyPeNjQXH6NZ", + "type": "arrow" + }, + { + "id": "Iey2r9ev3NqXShFhDRa3t", + "type": "arrow" + } + ], + "updated": 1664623131360, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 377, + "versionNonce": 1649618094, + "isDeleted": false, + "id": "M6LF3AKrGIzDW8p00PLeg", + "fillStyle": "solid", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 1068.75, + "y": 537, + "strokeColor": "#000000", + "backgroundColor": "#ced4da", + "width": 321, + "height": 45, + "seed": 1690477682, + "groupIds": [ + "rqi4zfKDNJjqgRyIIknBO" + ], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664622926151, + "link": null, + "locked": false, + "fontSize": 36, + "fontFamily": 1, + "text": "AutotoolsPackage", + "baseline": 32, + "textAlign": "center", + "verticalAlign": "top", + "containerId": null, + "originalText": "AutotoolsPackage" + }, + { + "type": "rectangle", + "version": 466, + "versionNonce": 378147058, + "isDeleted": false, + "id": "-34MaUc1fQDbeqLTRUx91", + "fillStyle": "solid", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 1030.625, + "y": 296, + "strokeColor": "#000000", + "backgroundColor": "#ced4da", + "width": 396.99999999999994, + "height": 112, + "seed": 964531118, + "groupIds": [ + "TtAdfrQjw8FIlPZMGmWhX" + ], + "strokeSharpness": "sharp", + "boundElements": [ + { + "id": "RQl1RtMzUcPE_zXHt8Ldm", + "type": "arrow" + }, + { + "id": "7czUS_PAuM5hdRJoQRDRT", + "type": "arrow" + }, + { + "id": "Iey2r9ev3NqXShFhDRa3t", + "type": "arrow" + } + ], + "updated": 1664623131360, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 250, + "versionNonce": 1826973422, + "isDeleted": false, + "id": "85YHNomCStJoIV17Sp0A6", + "fillStyle": "solid", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 1093.625, + "y": 329.5, + "strokeColor": "#000000", + "backgroundColor": "#ced4da", + "width": 271, + "height": 45, + "seed": 1436108338, + "groupIds": [ + "TtAdfrQjw8FIlPZMGmWhX" + ], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664622926151, + "link": null, + "locked": false, + "fontSize": 36, + "fontFamily": 1, + "text": "builtin.ArpackNg", + "baseline": 32, + "textAlign": "center", + "verticalAlign": "top", + "containerId": null, + "originalText": "builtin.ArpackNg" + }, + { + "type": "arrow", + "version": 476, + "versionNonce": 1270564594, + "isDeleted": false, + "id": "RQl1RtMzUcPE_zXHt8Ldm", + "fillStyle": "hachure", + "strokeWidth": 4, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 1233.8516807799415, + "y": 499, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 0, + "height": 85, + "seed": 1613426158, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664622926151, + "link": null, + "locked": false, + "startBinding": { + "elementId": "TmgDkNmbU86sH2Ssf1mL2", + "focus": 0.023182270931695163, + "gap": 4.5 + }, + "endBinding": { + "elementId": "-34MaUc1fQDbeqLTRUx91", + "focus": -0.02381199385360952, + "gap": 6 + }, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "triangle", + "points": [ + [ + 0, + 0 + ], + [ + 0, + -85 + ] + ] + }, + { + "type": "text", + "version": 693, + "versionNonce": 1438013742, + "isDeleted": false, + "id": "wSIdF9zegc69r2D38BVMs", + "fillStyle": "hachure", + "strokeWidth": 4, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 1061.75, + "y": 632.3821170339361, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 335, + "height": 45, + "seed": 1052094450, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664622926151, + "link": null, + "locked": false, + "fontSize": 36, + "fontFamily": 1, + "text": "Old-style packages", + "baseline": 32, + "textAlign": "center", + "verticalAlign": "top", + "containerId": null, + "originalText": "Old-style packages" + }, + { + "type": "rectangle", + "version": 556, + "versionNonce": 1760787058, + "isDeleted": false, + "id": "lYxakYKLpAmo_DvzDJ27b", + "fillStyle": "solid", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 1030.625, + "y": 95, + "strokeColor": "#000000", + "backgroundColor": "#ced4da", + "width": 396.99999999999994, + "height": 112, + "seed": 1302932978, + "groupIds": [ + "-WCCzMWoqGFfWxksMC6LG" + ], + "strokeSharpness": "sharp", + "boundElements": [ + { + "id": "RQl1RtMzUcPE_zXHt8Ldm", + "type": "arrow" + }, + { + "id": "8Z8HX6DlXqC-qL-63w1ol", + "type": "arrow" + }, + { + "id": "ia8wHuSmOVJLvGe5blR5g", + "type": "arrow" + }, + { + "id": "7czUS_PAuM5hdRJoQRDRT", + "type": "arrow" + } + ], + "updated": 1664623123836, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 341, + "versionNonce": 1412367214, + "isDeleted": false, + "id": "hF1874wuKYmbBjYAQwrVJ", + "fillStyle": "solid", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 1088.125, + "y": 128.5, + "strokeColor": "#000000", + "backgroundColor": "#ced4da", + "width": 282, + "height": 45, + "seed": 524182062, + "groupIds": [ + "-WCCzMWoqGFfWxksMC6LG" + ], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664622926152, + "link": null, + "locked": false, + "fontSize": 36, + "fontFamily": 1, + "text": "myrepo.ArpackNg", + "baseline": 32, + "textAlign": "center", + "verticalAlign": "top", + "containerId": null, + "originalText": "myrepo.ArpackNg" + }, + { + "type": "arrow", + "version": 593, + "versionNonce": 214413938, + "isDeleted": false, + "id": "8Z8HX6DlXqC-qL-63w1ol", + "fillStyle": "hachure", + "strokeWidth": 4, + "strokeStyle": "dotted", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 1226.4453379157953, + "y": 297.5, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 2.434529927712447, + "height": 84, + "seed": 1326581486, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664622926152, + "link": null, + "locked": false, + "startBinding": null, + "endBinding": { + "elementId": "lYxakYKLpAmo_DvzDJ27b", + "focus": -0.00782655608584947, + "gap": 6.5 + }, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "triangle", + "points": [ + [ + 0, + 0 + ], + [ + 2.434529927712447, + -84 + ] + ] + }, + { + "type": "rectangle", + "version": 733, + "versionNonce": 390297266, + "isDeleted": false, + "id": "G4--cV2YGQSrSijvYiNDB", + "fillStyle": "solid", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 482.5, + "y": 507, + "strokeColor": "#000000", + "backgroundColor": "#ced4da", + "width": 396.99999999999994, + "height": 112, + "seed": 85080878, + "groupIds": [ + "qZhg7KFANDHKWmTH71Lm0", + "FSKOW2oS76ubMa6DTOrDh" + ], + "strokeSharpness": "sharp", + "boundElements": [ + { + "id": "RQl1RtMzUcPE_zXHt8Ldm", + "type": "arrow" + }, + { + "id": "BkpnKUCjV1uqDGHPNuNZK", + "type": "arrow" + }, + { + "id": "aQdIO4VQx_J6SzCz-xt64", + "type": "arrow" + }, + { + "id": "F6E1EQxM-PyPeNjQXH6NZ", + "type": "arrow" + } + ], + "updated": 1664623061069, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 577, + "versionNonce": 2001681906, + "isDeleted": false, + "id": "MbNSUrN26Lx1aERuxunnt", + "fillStyle": "solid", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 541.5, + "y": 540.5, + "strokeColor": "#000000", + "backgroundColor": "#ced4da", + "width": 279, + "height": 45, + "seed": 950326962, + "groupIds": [ + "qZhg7KFANDHKWmTH71Lm0", + "FSKOW2oS76ubMa6DTOrDh" + ], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664622926152, + "link": null, + "locked": false, + "fontSize": 36, + "fontFamily": 1, + "text": "Default Builder", + "baseline": 32, + "textAlign": "center", + "verticalAlign": "top", + "containerId": null, + "originalText": "Default Builder" + }, + { + "type": "rectangle", + "version": 722, + "versionNonce": 1372930162, + "isDeleted": false, + "id": "WIS84sS48dCmi8q81Hh9F", + "fillStyle": "solid", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 482.5, + "y": 99, + "strokeColor": "#000000", + "backgroundColor": "#ced4da", + "width": 396.99999999999994, + "height": 112, + "seed": 1977410350, + "groupIds": [ + "_CQwHz-xftDZzy8u9u4YO", + "FSKOW2oS76ubMa6DTOrDh" + ], + "strokeSharpness": "sharp", + "boundElements": [ + { + "id": "RQl1RtMzUcPE_zXHt8Ldm", + "type": "arrow" + }, + { + "id": "BkpnKUCjV1uqDGHPNuNZK", + "type": "arrow" + }, + { + "id": "aQdIO4VQx_J6SzCz-xt64", + "type": "arrow" + }, + { + "id": "ia8wHuSmOVJLvGe5blR5g", + "type": "arrow" + } + ], + "updated": 1664623105535, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 531, + "versionNonce": 1851174834, + "isDeleted": false, + "id": "qIbTXN1LbDYGZzSceYynz", + "fillStyle": "solid", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 540.5, + "y": 132.5, + "strokeColor": "#000000", + "backgroundColor": "#ced4da", + "width": 281, + "height": 45, + "seed": 221818546, + "groupIds": [ + "_CQwHz-xftDZzy8u9u4YO", + "FSKOW2oS76ubMa6DTOrDh" + ], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664622926152, + "link": null, + "locked": false, + "fontSize": 36, + "fontFamily": 1, + "text": "Adapter Builder", + "baseline": 32, + "textAlign": "center", + "verticalAlign": "top", + "containerId": null, + "originalText": "Adapter Builder" + }, + { + "type": "arrow", + "version": 85, + "versionNonce": 50141422, + "isDeleted": false, + "id": "aQdIO4VQx_J6SzCz-xt64", + "fillStyle": "solid", + "strokeWidth": 4, + "strokeStyle": "solid", + "roughness": 0, + "opacity": 100, + "angle": 0, + "x": 670, + "y": 505, + "strokeColor": "#000000", + "backgroundColor": "#ced4da", + "width": 2, + "height": 291, + "seed": 417372974, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664622926152, + "link": null, + "locked": false, + "startBinding": { + "elementId": "G4--cV2YGQSrSijvYiNDB", + "focus": -0.05731267980406219, + "gap": 2 + }, + "endBinding": { + "elementId": "WIS84sS48dCmi8q81Hh9F", + "focus": 0.04321344955983103, + "gap": 3 + }, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "triangle", + "points": [ + [ + 0, + 0 + ], + [ + 2, + -291 + ] + ] + }, + { + "type": "arrow", + "version": 720, + "versionNonce": 1494556718, + "isDeleted": false, + "id": "ia8wHuSmOVJLvGe5blR5g", + "fillStyle": "hachure", + "strokeWidth": 4, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 4.706831282597808, + "x": 932.4285606227004, + "y": 52.69401049592016, + "strokeColor": "#c92a2a", + "backgroundColor": "transparent", + "width": 47.10077935537049, + "height": 145.9883132350331, + "seed": 314146734, + "groupIds": [], + "strokeSharpness": "round", + "boundElements": [], + "updated": 1664623039605, + "link": null, + "locked": false, + "startBinding": { + "elementId": "WIS84sS48dCmi8q81Hh9F", + "focus": 0.6597923311816741, + "gap": 2.0985583595166872 + }, + "endBinding": { + "elementId": "lYxakYKLpAmo_DvzDJ27b", + "focus": -0.6857137990945498, + "gap": 3.0336827015810286 + }, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "triangle", + "points": [ + [ + 0, + 0 + ], + [ + 45.89517648378751, + 72.7218231059162 + ], + [ + -1.2056028715829825, + 145.9883132350331 + ] + ] + }, + { + "type": "text", + "version": 727, + "versionNonce": 549636846, + "isDeleted": false, + "id": "JRrvIVZ9KAv56BYbRbCLA", + "fillStyle": "hachure", + "strokeWidth": 4, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 527.5, + "y": 633.5, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 295, + "height": 45, + "seed": 2130028978, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664622926153, + "link": null, + "locked": false, + "fontSize": 36, + "fontFamily": 1, + "text": "Builder Hierarchy", + "baseline": 32, + "textAlign": "center", + "verticalAlign": "top", + "containerId": null, + "originalText": "Builder Hierarchy" + }, + { + "type": "text", + "version": 281, + "versionNonce": 777063918, + "isDeleted": false, + "id": "BBj29IYUUwcEAk0aGGgEe", + "fillStyle": "solid", + "strokeWidth": 4, + "strokeStyle": "solid", + "roughness": 0, + "opacity": 100, + "angle": 0, + "x": 746, + "y": 2, + "strokeColor": "#c92a2a", + "backgroundColor": "#ced4da", + "width": 438, + "height": 35, + "seed": 344107566, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664623034966, + "link": null, + "locked": false, + "fontSize": 28, + "fontFamily": 1, + "text": "Defer to the old-style package", + "baseline": 25, + "textAlign": "center", + "verticalAlign": "top", + "containerId": null, + "originalText": "Defer to the old-style package" + }, + { + "type": "arrow", + "version": 864, + "versionNonce": 353999662, + "isDeleted": false, + "id": "F6E1EQxM-PyPeNjQXH6NZ", + "fillStyle": "hachure", + "strokeWidth": 4, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 1.5656833824867196, + "x": 932.5276780900645, + "y": 511.2079252998286, + "strokeColor": "#c92a2a", + "backgroundColor": "transparent", + "width": 47.10077935537049, + "height": 145.9883132350331, + "seed": 2119154546, + "groupIds": [], + "strokeSharpness": "round", + "boundElements": [], + "updated": 1664623061069, + "link": null, + "locked": false, + "startBinding": { + "elementId": "TmgDkNmbU86sH2Ssf1mL2", + "focus": 0.700636908798286, + "gap": 3.7338363313426726 + }, + "endBinding": { + "elementId": "G4--cV2YGQSrSijvYiNDB", + "focus": -0.7137516210459195, + "gap": 1.5235945037890133 + }, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "triangle", + "points": [ + [ + 0, + 0 + ], + [ + 45.89517648378751, + 72.7218231059162 + ], + [ + -1.2056028715829825, + 145.9883132350331 + ] + ] + }, + { + "type": "text", + "version": 318, + "versionNonce": 1988243186, + "isDeleted": false, + "id": "VIOq-st9nvReenpiJkr7q", + "fillStyle": "solid", + "strokeWidth": 4, + "strokeStyle": "solid", + "roughness": 0, + "opacity": 100, + "angle": 0, + "x": 828, + "y": 724.5, + "strokeColor": "#c92a2a", + "backgroundColor": "#ced4da", + "width": 274, + "height": 70, + "seed": 2086072882, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664623095297, + "link": null, + "locked": false, + "fontSize": 28, + "fontFamily": 1, + "text": "Fall-back to the \nAdapter base class", + "baseline": 60, + "textAlign": "center", + "verticalAlign": "top", + "containerId": null, + "originalText": "Fall-back to the \nAdapter base class" + }, + { + "type": "arrow", + "version": 971, + "versionNonce": 1844256174, + "isDeleted": false, + "id": "7czUS_PAuM5hdRJoQRDRT", + "fillStyle": "hachure", + "strokeWidth": 4, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 6.272294617229998, + "x": 1433.5276780900645, + "y": 163.20792529982862, + "strokeColor": "#c92a2a", + "backgroundColor": "transparent", + "width": 47.10077935537049, + "height": 145.9883132350331, + "seed": 142056302, + "groupIds": [], + "strokeSharpness": "round", + "boundElements": [], + "updated": 1664623123836, + "link": null, + "locked": false, + "startBinding": { + "elementId": "lYxakYKLpAmo_DvzDJ27b", + "focus": -0.8331982906950285, + "gap": 5.098981289624589 + }, + "endBinding": { + "elementId": "-34MaUc1fQDbeqLTRUx91", + "focus": 0.7587321286266477, + "gap": 5.483331940596372 + }, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "triangle", + "points": [ + [ + 0, + 0 + ], + [ + 45.89517648378751, + 72.7218231059162 + ], + [ + -1.2056028715829825, + 145.9883132350331 + ] + ] + }, + { + "type": "arrow", + "version": 1075, + "versionNonce": 2073112366, + "isDeleted": false, + "id": "Iey2r9ev3NqXShFhDRa3t", + "fillStyle": "hachure", + "strokeWidth": 4, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 6.272294617229998, + "x": 1434.451400933309, + "y": 387.7559332541056, + "strokeColor": "#c92a2a", + "backgroundColor": "transparent", + "width": 47.10077935537049, + "height": 145.9883132350331, + "seed": 840513518, + "groupIds": [], + "strokeSharpness": "round", + "boundElements": [], + "updated": 1664623131360, + "link": null, + "locked": false, + "startBinding": { + "elementId": "-34MaUc1fQDbeqLTRUx91", + "focus": -0.7723329153292293, + "gap": 6.037577244264867 + }, + "endBinding": { + "elementId": "TmgDkNmbU86sH2Ssf1mL2", + "focus": 0.808011962769455, + "gap": 6.296927895236422 + }, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "triangle", + "points": [ + [ + 0, + 0 + ], + [ + 45.89517648378751, + 72.7218231059162 + ], + [ + -1.2056028715829825, + 145.9883132350331 + ] + ] + } + ], + "id": "sJP5ES4-kuhrqaBed7Feh", + "created": 1664623142493 + }, + { + "status": "unpublished", + "elements": [ + { + "type": "rectangle", + "version": 351, + "versionNonce": 94847218, + "isDeleted": false, + "id": "QfhQQY4Kvx8RLvCd6qXsx", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 1011.5, + "y": 249, + "strokeColor": "#000000", + "backgroundColor": "#228be6", + "width": 479, + "height": 438, + "seed": 1024685106, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664612347442, + "link": null, + "locked": false + }, + { + "type": "rectangle", + "version": 156, + "versionNonce": 2082406190, + "isDeleted": false, + "id": "rMQqqzkSZsBVWvOk137wO", + "fillStyle": "hachure", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 511, + "y": 247, + "strokeColor": "#000000", + "backgroundColor": "#228be6", + "width": 479, + "height": 438, + "seed": 250617778, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664612347443, + "link": null, + "locked": false + }, + { + "type": "rectangle", + "version": 392, + "versionNonce": 414601906, + "isDeleted": false, + "id": "h2lcAgJBn6WsPKAj3vWS8", + "fillStyle": "solid", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 545.5, + "y": 490.5, + "strokeColor": "#000000", + "backgroundColor": "#ced4da", + "width": 396.99999999999994, + "height": 112, + "seed": 721668433, + "groupIds": [ + "ETPwHpdW1CXh0DtqZ_2na" + ], + "strokeSharpness": "sharp", + "boundElements": [ + { + "id": "r2Lq0kGXd6aTn5T-ki1aL", + "type": "arrow" + } + ], + "updated": 1664612347443, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 293, + "versionNonce": 848488814, + "isDeleted": false, + "id": "eaxk_MzyrjAjXKf0vmFuU", + "fillStyle": "solid", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 583.5, + "y": 524, + "strokeColor": "#000000", + "backgroundColor": "#ced4da", + "width": 321, + "height": 45, + "seed": 1324675135, + "groupIds": [ + "ETPwHpdW1CXh0DtqZ_2na" + ], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664612347443, + "link": null, + "locked": false, + "fontSize": 36, + "fontFamily": 1, + "text": "AutotoolsPackage", + "baseline": 32, + "textAlign": "center", + "verticalAlign": "top", + "containerId": null, + "originalText": "AutotoolsPackage" + }, + { + "type": "rectangle", + "version": 370, + "versionNonce": 595405938, + "isDeleted": false, + "id": "6TAhmS7GKN_ppUHjSVGLb", + "fillStyle": "solid", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 545.5, + "y": 283, + "strokeColor": "#000000", + "backgroundColor": "#ced4da", + "width": 396.99999999999994, + "height": 112, + "seed": 2083634783, + "groupIds": [ + "biKtN87UToAb_UBhyub5I" + ], + "strokeSharpness": "sharp", + "boundElements": [ + { + "id": "r2Lq0kGXd6aTn5T-ki1aL", + "type": "arrow" + } + ], + "updated": 1664612347443, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 155, + "versionNonce": 1066372014, + "isDeleted": false, + "id": "xyXchzGRLKRPuMVGo17mr", + "fillStyle": "solid", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 664, + "y": 316.5, + "strokeColor": "#000000", + "backgroundColor": "#ced4da", + "width": 160, + "height": 45, + "seed": 2066951921, + "groupIds": [ + "biKtN87UToAb_UBhyub5I" + ], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664612347443, + "link": null, + "locked": false, + "fontSize": 36, + "fontFamily": 1, + "text": "ArpackNg", + "baseline": 32, + "textAlign": "center", + "verticalAlign": "top", + "containerId": null, + "originalText": "ArpackNg" + }, + { + "type": "arrow", + "version": 285, + "versionNonce": 1807928882, + "isDeleted": false, + "id": "r2Lq0kGXd6aTn5T-ki1aL", + "fillStyle": "hachure", + "strokeWidth": 4, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 748.6016807799414, + "y": 486, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 0, + "height": 85, + "seed": 1479060383, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664612347443, + "link": null, + "locked": false, + "startBinding": { + "elementId": "h2lcAgJBn6WsPKAj3vWS8", + "focus": 0.02318227093169459, + "gap": 3 + }, + "endBinding": { + "elementId": "6TAhmS7GKN_ppUHjSVGLb", + "focus": -0.02318227093169459, + "gap": 6 + }, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "triangle", + "points": [ + [ + 0, + 0 + ], + [ + 0, + -85 + ] + ] + }, + { + "type": "text", + "version": 572, + "versionNonce": 1094575598, + "isDeleted": false, + "id": "pUx1_v_UyKhu5zXISU4-f", + "fillStyle": "hachure", + "strokeWidth": 4, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 653, + "y": 619.3821170339361, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 182, + "height": 45, + "seed": 1608256017, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664612347443, + "link": null, + "locked": false, + "fontSize": 36, + "fontFamily": 1, + "text": "Metadata", + "baseline": 32, + "textAlign": "center", + "verticalAlign": "top", + "containerId": null, + "originalText": "Metadata" + }, + { + "type": "rectangle", + "version": 734, + "versionNonce": 1401317810, + "isDeleted": false, + "id": "4YBPHTc5sQiOKGM9NOZwg", + "fillStyle": "solid", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 1045.5, + "y": 490.5, + "strokeColor": "#000000", + "backgroundColor": "#ced4da", + "width": 396.99999999999994, + "height": 112, + "seed": 1687989426, + "groupIds": [ + "lxE4hLtUAF2X7993lwk8q" + ], + "strokeSharpness": "sharp", + "boundElements": [ + { + "id": "M8cWqpsa0-iwN_cVJeXEQ", + "type": "arrow" + } + ], + "updated": 1664612347443, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 436, + "versionNonce": 1572061806, + "isDeleted": false, + "id": "P2U0ucf_QPvJcOWlMLp2K", + "fillStyle": "solid", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 1183, + "y": 524, + "strokeColor": "#000000", + "backgroundColor": "#ced4da", + "width": 122, + "height": 45, + "seed": 276038958, + "groupIds": [ + "lxE4hLtUAF2X7993lwk8q" + ], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664612347443, + "link": null, + "locked": false, + "fontSize": 36, + "fontFamily": 1, + "text": "Builder", + "baseline": 32, + "textAlign": "center", + "verticalAlign": "top", + "containerId": null, + "originalText": "Builder" + }, + { + "type": "arrow", + "version": 489, + "versionNonce": 1663911086, + "isDeleted": false, + "id": "M8cWqpsa0-iwN_cVJeXEQ", + "fillStyle": "hachure", + "strokeWidth": 4, + "strokeStyle": "dashed", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 942, + "y": 337, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 303, + "height": 143, + "seed": 1698960686, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664612347443, + "link": null, + "locked": false, + "startBinding": null, + "endBinding": { + "elementId": "4YBPHTc5sQiOKGM9NOZwg", + "focus": 0.04820781382766574, + "gap": 10.5 + }, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "dot", + "points": [ + [ + 0, + 0 + ], + [ + 295, + 0 + ], + [ + 303, + 143 + ] + ] + }, + { + "type": "text", + "version": 841, + "versionNonce": 2059173614, + "isDeleted": false, + "id": "QGyg9pXnTgByg9Lw9oZKC", + "fillStyle": "hachure", + "strokeWidth": 4, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 1043.5, + "y": 621.5, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 401, + "height": 45, + "seed": 1012078510, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664612347443, + "link": null, + "locked": false, + "fontSize": 36, + "fontFamily": 1, + "text": "Installation Procedure", + "baseline": 32, + "textAlign": "center", + "verticalAlign": "top", + "containerId": null, + "originalText": "Installation Procedure" + } + ], + "id": "tezI4Q4gBH7mr-Q_us1KO", + "created": 1664612353293 + }, + { + "status": "unpublished", + "elements": [ + { + "type": "rectangle", + "version": 273, + "versionNonce": 1078330865, + "isDeleted": false, + "id": "h2lcAgJBn6WsPKAj3vWS8", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 545.5, + "y": 489, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 396.99999999999994, + "height": 112, + "seed": 721668433, + "groupIds": [ + "ETPwHpdW1CXh0DtqZ_2na" + ], + "strokeSharpness": "sharp", + "boundElements": [ + { + "id": "r2Lq0kGXd6aTn5T-ki1aL", + "type": "arrow" + } + ], + "updated": 1664534889868, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 174, + "versionNonce": 1400524191, + "isDeleted": false, + "id": "eaxk_MzyrjAjXKf0vmFuU", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 583.5, + "y": 522.5, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 321, + "height": 45, + "seed": 1324675135, + "groupIds": [ + "ETPwHpdW1CXh0DtqZ_2na" + ], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664534889868, + "link": null, + "locked": false, + "fontSize": 36, + "fontFamily": 1, + "text": "AutotoolsPackage", + "baseline": 32, + "textAlign": "center", + "verticalAlign": "top", + "containerId": null, + "originalText": "AutotoolsPackage" + }, + { + "type": "text", + "version": 108, + "versionNonce": 438728849, + "isDeleted": false, + "id": "xyXchzGRLKRPuMVGo17mr", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 664, + "y": 316.5, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 160, + "height": 45, + "seed": 2066951921, + "groupIds": [ + "1wm7ikIN28k9zdVSKTLKQ" + ], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664540120970, + "link": null, + "locked": false, + "fontSize": 36, + "fontFamily": 1, + "text": "ArpackNg", + "baseline": 32, + "textAlign": "center", + "verticalAlign": "top", + "containerId": null, + "originalText": "ArpackNg" + }, + { + "type": "rectangle", + "version": 322, + "versionNonce": 1389146591, + "isDeleted": false, + "id": "6TAhmS7GKN_ppUHjSVGLb", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 545.5, + "y": 283, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 396.99999999999994, + "height": 112, + "seed": 2083634783, + "groupIds": [ + "1wm7ikIN28k9zdVSKTLKQ" + ], + "strokeSharpness": "sharp", + "boundElements": [ + { + "id": "r2Lq0kGXd6aTn5T-ki1aL", + "type": "arrow" + } + ], + "updated": 1664534889868, + "link": null, + "locked": false + }, + { + "type": "arrow", + "version": 94, + "versionNonce": 787416433, + "isDeleted": false, + "id": "r2Lq0kGXd6aTn5T-ki1aL", + "fillStyle": "hachure", + "strokeWidth": 4, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 748.6016807799414, + "y": 486, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 0, + "height": 85, + "seed": 1479060383, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664534889868, + "link": null, + "locked": false, + "startBinding": { + "elementId": "h2lcAgJBn6WsPKAj3vWS8", + "focus": 0.02318227093169459, + "gap": 3 + }, + "endBinding": { + "elementId": "6TAhmS7GKN_ppUHjSVGLb", + "focus": -0.02318227093169459, + "gap": 6 + }, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "triangle", + "points": [ + [ + 0, + 0 + ], + [ + 0, + -85 + ] + ] + }, + { + "type": "text", + "version": 227, + "versionNonce": 117980031, + "isDeleted": false, + "id": "pUx1_v_UyKhu5zXISU4-f", + "fillStyle": "hachure", + "strokeWidth": 4, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 969, + "y": 386.5, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 442, + "height": 90, + "seed": 1608256017, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1664534908931, + "link": null, + "locked": false, + "fontSize": 36, + "fontFamily": 1, + "text": "Metadata \n+ Installation Procedure", + "baseline": 77, + "textAlign": "center", + "verticalAlign": "top", + "containerId": null, + "originalText": "Metadata \n+ Installation Procedure" + } + ], + "id": "_c7AOn60omrTlppZHlLQh", + "created": 1664540190548 + }, + { + "status": "unpublished", + "elements": [ + { + "type": "rectangle", + "version": 367, + "versionNonce": 963584621, + "isDeleted": false, + "id": "oAei2n-Ha1gpjnYdK7AwC", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 240.5, + "y": 642.75, + "strokeColor": "#000000", + "backgroundColor": "#228be6", + "width": 392, + "height": 80, + "seed": 701868237, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [ + { + "id": "slfbd0bbRqA8648kZ5fns", + "type": "text" + }, + { + "id": "slfbd0bbRqA8648kZ5fns", + "type": "text" + }, + { + "type": "text", + "id": "slfbd0bbRqA8648kZ5fns" + } + ], + "updated": 1663329462351, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 373, + "versionNonce": 1698441027, + "isDeleted": false, + "id": "slfbd0bbRqA8648kZ5fns", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 245.5, + "y": 670.25, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 382, + "height": 25, + "seed": 1179637379, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1663329462351, + "link": null, + "locked": false, + "fontSize": 20, + "fontFamily": 1, + "text": "Execute the installation process", + "baseline": 18, + "textAlign": "center", + "verticalAlign": "middle", + "containerId": "oAei2n-Ha1gpjnYdK7AwC", + "originalText": "Execute the installation process" + }, + { + "type": "rectangle", + "version": 208, + "versionNonce": 844908259, + "isDeleted": false, + "id": "cLwg2WXUit_OTQmXLIdIW", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 815.5, + "y": 517.5, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 392, + "height": 80, + "seed": 557411811, + "groupIds": [ + "D1SCf714tngJFHk8TFX8T" + ], + "strokeSharpness": "sharp", + "boundElements": [ + { + "id": "SpG_8HxzMHjM2HYK6Fgwx", + "type": "arrow" + } + ], + "updated": 1663329462351, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 274, + "versionNonce": 1704611021, + "isDeleted": false, + "id": "1r8FMl26VYSKpPKlHA_Oc", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 916.5, + "y": 545, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 207, + "height": 25, + "seed": 961881101, + "groupIds": [ + "D1SCf714tngJFHk8TFX8T" + ], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1663329462351, + "link": null, + "locked": false, + "fontSize": 20, + "fontFamily": 1, + "text": "CMakeBuilder.cmake()", + "baseline": 18, + "textAlign": "left", + "verticalAlign": "top", + "containerId": null, + "originalText": "CMakeBuilder.cmake()" + }, + { + "type": "rectangle", + "version": 264, + "versionNonce": 295137923, + "isDeleted": false, + "id": "CSwjuAw6Nl67sqQ6p21ty", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 815.5, + "y": 642.75, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 392, + "height": 80, + "seed": 1011629069, + "groupIds": [ + "D1SCf714tngJFHk8TFX8T" + ], + "strokeSharpness": "sharp", + "boundElements": [ + { + "id": "SpG_8HxzMHjM2HYK6Fgwx", + "type": "arrow" + }, + { + "id": "zvmLoAH5oICRD5og-pBvu", + "type": "arrow" + } + ], + "updated": 1663329462351, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 466, + "versionNonce": 196160301, + "isDeleted": false, + "id": "WX4axTU0IR7PJb0GkR-jq", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 922.5, + "y": 670.25, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 193, + "height": 25, + "seed": 716117827, + "groupIds": [ + "D1SCf714tngJFHk8TFX8T" + ], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1663329462351, + "link": null, + "locked": false, + "fontSize": 20, + "fontFamily": 1, + "text": "CMakeBuilder.build()", + "baseline": 18, + "textAlign": "left", + "verticalAlign": "top", + "containerId": null, + "originalText": "CMakeBuilder.build()" + }, + { + "type": "rectangle", + "version": 301, + "versionNonce": 1545420173, + "isDeleted": false, + "id": "coUXke3Fv_DpjqG9zgEjQ", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 815.5, + "y": 768, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 392, + "height": 80, + "seed": 1934529891, + "groupIds": [ + "D1SCf714tngJFHk8TFX8T" + ], + "strokeSharpness": "sharp", + "boundElements": [ + { + "type": "text", + "id": "yVIbU03yFYvpXnh9xIgET" + }, + { + "id": "zvmLoAH5oICRD5og-pBvu", + "type": "arrow" + } + ], + "updated": 1663329462351, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 273, + "versionNonce": 1837690307, + "isDeleted": false, + "id": "yVIbU03yFYvpXnh9xIgET", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 820.5, + "y": 795.5, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 382, + "height": 25, + "seed": 1611291683, + "groupIds": [ + "D1SCf714tngJFHk8TFX8T" + ], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1663329462351, + "link": null, + "locked": false, + "fontSize": 20, + "fontFamily": 1, + "text": "CMakeBuilder.install()", + "baseline": 18, + "textAlign": "center", + "verticalAlign": "middle", + "containerId": "coUXke3Fv_DpjqG9zgEjQ", + "originalText": "CMakeBuilder.install()" + }, + { + "type": "arrow", + "version": 564, + "versionNonce": 1041761261, + "isDeleted": false, + "id": "SpG_8HxzMHjM2HYK6Fgwx", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 1209, + "y": 558.5, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 96, + "height": 109, + "seed": 732445197, + "groupIds": [ + "D1SCf714tngJFHk8TFX8T" + ], + "strokeSharpness": "round", + "boundElements": [], + "updated": 1663329462351, + "link": null, + "locked": false, + "startBinding": { + "elementId": "cLwg2WXUit_OTQmXLIdIW", + "focus": -0.7327371048252911, + "gap": 1.5 + }, + "endBinding": { + "elementId": "CSwjuAw6Nl67sqQ6p21ty", + "focus": 0.6494341563786008, + "gap": 2.5 + }, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "triangle", + "points": [ + [ + 0, + 0 + ], + [ + 96, + 54 + ], + [ + 1, + 109 + ] + ] + }, + { + "type": "arrow", + "version": 642, + "versionNonce": 1380728163, + "isDeleted": false, + "id": "zvmLoAH5oICRD5og-pBvu", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 1216, + "y": 680, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 98, + "height": 124.33745608356844, + "seed": 708861581, + "groupIds": [ + "D1SCf714tngJFHk8TFX8T" + ], + "strokeSharpness": "round", + "boundElements": [], + "updated": 1663329462351, + "link": null, + "locked": false, + "startBinding": { + "elementId": "CSwjuAw6Nl67sqQ6p21ty", + "focus": -0.7839018302828619, + "gap": 8.5 + }, + "endBinding": { + "elementId": "coUXke3Fv_DpjqG9zgEjQ", + "focus": 0.7841576120638036, + "gap": 6.5 + }, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "triangle", + "points": [ + [ + 0, + 0 + ], + [ + 96, + 54 + ], + [ + -2, + 124.33745608356844 + ] + ] + }, + { + "type": "text", + "version": 613, + "versionNonce": 909390253, + "isDeleted": false, + "id": "fAHH1YdSlMq8ioLIj36Of", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 644, + "y": 353.7484662576685, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 166, + "height": 567.2515337423315, + "seed": 1455993539, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1663329499644, + "link": null, + "locked": false, + "fontSize": 493.16790307261914, + "fontFamily": 2, + "text": "{", + "baseline": 454.2515337423315, + "textAlign": "center", + "verticalAlign": "top", + "containerId": null, + "originalText": "{" + } + ], + "id": "KBV_I9pxrJD2zPuaP6vBc", + "created": 1663329511286 + }, + { + "status": "unpublished", + "elements": [ + { + "type": "rectangle", + "version": 93, + "versionNonce": 42296109, + "isDeleted": false, + "id": "cLwg2WXUit_OTQmXLIdIW", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 625.5, + "y": 298, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 392, + "height": 80, + "seed": 557411811, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [ + { + "id": "SpG_8HxzMHjM2HYK6Fgwx", + "type": "arrow" + } + ], + "updated": 1663324636434, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 99, + "versionNonce": 1537897869, + "isDeleted": false, + "id": "1r8FMl26VYSKpPKlHA_Oc", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 726.5, + "y": 325.5, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 179, + "height": 25, + "seed": 961881101, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1663324636434, + "link": null, + "locked": false, + "fontSize": 20, + "fontFamily": 1, + "text": "Fetch source files", + "baseline": 18, + "textAlign": "left", + "verticalAlign": "top", + "containerId": null, + "originalText": "Fetch source files" + }, + { + "type": "rectangle", + "version": 149, + "versionNonce": 1653290435, + "isDeleted": false, + "id": "CSwjuAw6Nl67sqQ6p21ty", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 625.5, + "y": 423.25, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 392, + "height": 80, + "seed": 1011629069, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [ + { + "id": "SpG_8HxzMHjM2HYK6Fgwx", + "type": "arrow" + }, + { + "id": "zvmLoAH5oICRD5og-pBvu", + "type": "arrow" + } + ], + "updated": 1663324636434, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 272, + "versionNonce": 1195260909, + "isDeleted": false, + "id": "WX4axTU0IR7PJb0GkR-jq", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 645.5, + "y": 450.75, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 352, + "height": 25, + "seed": 716117827, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1663324636434, + "link": null, + "locked": false, + "fontSize": 20, + "fontFamily": 1, + "text": "Expand them in the stage directory", + "baseline": 18, + "textAlign": "left", + "verticalAlign": "top", + "containerId": null, + "originalText": "Expand them in the stage directory" + }, + { + "type": "rectangle", + "version": 185, + "versionNonce": 2143651171, + "isDeleted": false, + "id": "coUXke3Fv_DpjqG9zgEjQ", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 625.5, + "y": 548.5, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 392, + "height": 80, + "seed": 1934529891, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [ + { + "type": "text", + "id": "yVIbU03yFYvpXnh9xIgET" + }, + { + "id": "zvmLoAH5oICRD5og-pBvu", + "type": "arrow" + }, + { + "id": "5yqrFWV-hhJ4RoVewqAC0", + "type": "arrow" + } + ], + "updated": 1663324636434, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 135, + "versionNonce": 1833580109, + "isDeleted": false, + "id": "yVIbU03yFYvpXnh9xIgET", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 630.5, + "y": 563.5, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 382, + "height": 50, + "seed": 1611291683, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1663324636434, + "link": null, + "locked": false, + "fontSize": 20, + "fontFamily": 1, + "text": "Set the stage directory as the \ncurrent working directory", + "baseline": 43, + "textAlign": "center", + "verticalAlign": "middle", + "containerId": "coUXke3Fv_DpjqG9zgEjQ", + "originalText": "Set the stage directory as the current working directory" + }, + { + "type": "rectangle", + "version": 253, + "versionNonce": 1704770627, + "isDeleted": false, + "id": "tBTBRiEA6AJABK4wnKF_-", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 625.5, + "y": 673.75, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 392, + "height": 80, + "seed": 1257829773, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [ + { + "id": "GoE9udjDQxUqdsYCUnbVI", + "type": "text" + }, + { + "type": "text", + "id": "GoE9udjDQxUqdsYCUnbVI" + }, + { + "id": "5yqrFWV-hhJ4RoVewqAC0", + "type": "arrow" + }, + { + "id": "v-9Voh5erXQ8iqoQ_9BVO", + "type": "arrow" + } + ], + "updated": 1663324636434, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 194, + "versionNonce": 1557028205, + "isDeleted": false, + "id": "GoE9udjDQxUqdsYCUnbVI", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 630.5, + "y": 701.25, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 382, + "height": 25, + "seed": 895792579, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1663324636434, + "link": null, + "locked": false, + "fontSize": 20, + "fontFamily": 1, + "text": "Fork a new build environment", + "baseline": 18, + "textAlign": "center", + "verticalAlign": "middle", + "containerId": "tBTBRiEA6AJABK4wnKF_-", + "originalText": "Fork a new build environment" + }, + { + "type": "rectangle", + "version": 321, + "versionNonce": 1675770851, + "isDeleted": false, + "id": "oAei2n-Ha1gpjnYdK7AwC", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 625.5, + "y": 799, + "strokeColor": "#000000", + "backgroundColor": "#228be6", + "width": 392, + "height": 80, + "seed": 701868237, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [ + { + "id": "slfbd0bbRqA8648kZ5fns", + "type": "text" + }, + { + "id": "slfbd0bbRqA8648kZ5fns", + "type": "text" + }, + { + "type": "text", + "id": "slfbd0bbRqA8648kZ5fns" + }, + { + "id": "v-9Voh5erXQ8iqoQ_9BVO", + "type": "arrow" + } + ], + "updated": 1663324636434, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 328, + "versionNonce": 1868179405, + "isDeleted": false, + "id": "slfbd0bbRqA8648kZ5fns", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 630.5, + "y": 826.5, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 382, + "height": 25, + "seed": 1179637379, + "groupIds": [], + "strokeSharpness": "sharp", + "boundElements": [], + "updated": 1663324636434, + "link": null, + "locked": false, + "fontSize": 20, + "fontFamily": 1, + "text": "Execute the installation process", + "baseline": 18, + "textAlign": "center", + "verticalAlign": "middle", + "containerId": "oAei2n-Ha1gpjnYdK7AwC", + "originalText": "Execute the installation process" + }, + { + "type": "arrow", + "version": 221, + "versionNonce": 1777917731, + "isDeleted": false, + "id": "SpG_8HxzMHjM2HYK6Fgwx", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 1019, + "y": 339, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 96, + "height": 109, + "seed": 732445197, + "groupIds": [], + "strokeSharpness": "round", + "boundElements": [], + "updated": 1663324636434, + "link": null, + "locked": false, + "startBinding": { + "elementId": "cLwg2WXUit_OTQmXLIdIW", + "focus": -0.7533277870216306, + "gap": 7 + }, + "endBinding": { + "elementId": "CSwjuAw6Nl67sqQ6p21ty", + "focus": 0.7554869684499315, + "gap": 6 + }, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "triangle", + "points": [ + [ + 0, + 0 + ], + [ + 96, + 54 + ], + [ + 1, + 109 + ] + ] + }, + { + "type": "arrow", + "version": 299, + "versionNonce": 309789379, + "isDeleted": false, + "id": "zvmLoAH5oICRD5og-pBvu", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 1026, + "y": 460.5, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 98, + "height": 124.33745608356844, + "seed": 708861581, + "groupIds": [], + "strokeSharpness": "round", + "boundElements": [], + "updated": 1663324636435, + "link": null, + "locked": false, + "startBinding": { + "elementId": "CSwjuAw6Nl67sqQ6p21ty", + "focus": -0.7021630615640598, + "gap": 12 + }, + "endBinding": { + "elementId": "coUXke3Fv_DpjqG9zgEjQ", + "focus": 0.8530521262002744, + "gap": 12 + }, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "triangle", + "points": [ + [ + 0, + 0 + ], + [ + 96, + 54 + ], + [ + -2, + 124.33745608356844 + ] + ] + }, + { + "type": "arrow", + "version": 301, + "versionNonce": 914472685, + "isDeleted": false, + "id": "5yqrFWV-hhJ4RoVewqAC0", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 1019, + "y": 586.6789496258876, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 99, + "height": 123.78306157234579, + "seed": 642378381, + "groupIds": [], + "strokeSharpness": "round", + "boundElements": [], + "updated": 1663324636435, + "link": null, + "locked": false, + "startBinding": { + "elementId": "coUXke3Fv_DpjqG9zgEjQ", + "focus": -0.6501663893510815, + "gap": 7 + }, + "endBinding": { + "elementId": "tBTBRiEA6AJABK4wnKF_-", + "focus": 0.8705418381344308, + "gap": 8 + }, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "triangle", + "points": [ + [ + 0, + 0 + ], + [ + 99, + 42.82105037411236 + ], + [ + 10.000000000000227, + 123.78306157234579 + ] + ] + }, + { + "type": "arrow", + "version": 351, + "versionNonce": 984592995, + "isDeleted": false, + "id": "v-9Voh5erXQ8iqoQ_9BVO", + "fillStyle": "hachure", + "strokeWidth": 1, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "angle": 0, + "x": 1031, + "y": 714.8662394200408, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "width": 90, + "height": 137.7637151210173, + "seed": 698547757, + "groupIds": [], + "strokeSharpness": "round", + "boundElements": [], + "updated": 1663324636435, + "link": null, + "locked": false, + "startBinding": { + "elementId": "tBTBRiEA6AJABK4wnKF_-", + "focus": -0.6014975041597337, + "gap": 10 + }, + "endBinding": { + "elementId": "oAei2n-Ha1gpjnYdK7AwC", + "focus": 0.9573045267489712, + "gap": 12 + }, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "triangle", + "points": [ + [ + 0, + 0 + ], + [ + 90, + 33.633760579959244 + ], + [ + 4, + 137.7637151210173 + ] + ] + } + ], + "id": "RzNgncGu1938Ma5Teh6qZ", + "created": 1663324659550 + } + ] +} \ No newline at end of file diff --git a/lib/spack/docs/index.rst b/lib/spack/docs/index.rst index 3a5e07ed183..b755f2f3768 100644 --- a/lib/spack/docs/index.rst +++ b/lib/spack/docs/index.rst @@ -56,7 +56,6 @@ or refer to the full manual below. basic_usage Tutorial: Spack 101 replace_conda_homebrew - known_issues .. toctree:: :maxdepth: 2 diff --git a/lib/spack/docs/known_issues.rst b/lib/spack/docs/known_issues.rst deleted file mode 100644 index 0e309f1829a..00000000000 --- a/lib/spack/docs/known_issues.rst +++ /dev/null @@ -1,40 +0,0 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other - Spack Project Developers. See the top-level COPYRIGHT file for details. - - SPDX-License-Identifier: (Apache-2.0 OR MIT) - -============ -Known Issues -============ - -This is a list of known issues in Spack. It provides ways of getting around these -problems if you encounter them. - ------------------------------------------------- -Spack does not seem to respect ``packages.yaml`` ------------------------------------------------- - -.. note:: - - This issue is **resolved** as of v0.19.0.dev0 commit - `8281a0c5feabfc4fe180846d6fe95cfe53420bc5`, through the introduction of package - requirements. See :ref:`package-requirements`. - -A common problem in Spack v0.18.0 up to v0.19.0.dev0 is that package, compiler and target -preferences specified in ``packages.yaml`` do not seem to be respected. Spack picks the -"wrong" compilers and their versions, package versions and variants, and -micro-architectures. - -This is however not a bug. In order to reduce the number of builds of the same -packages, the concretizer values reuse of installed packages higher than preferences -set in ``packages.yaml``. Note that ``packages.yaml`` specifies only preferences, not -hard constraints. - -There are multiple workarounds: - -1. Disable reuse during concretization: ``spack install --fresh `` when installing - from the command line, or ``spack concretize --fresh --force`` when using - environments. -2. Turn preferences into constrains, by moving them to the input spec. For example, - use ``spack spec zlib%gcc@12`` when you want to force GCC 12 even if ``zlib`` was - already installed with GCC 10. diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index 5662c3c3f26..2ceb4ce0517 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -34,24 +34,155 @@ ubiquitous in the scientific software community. Second, it's a modern language and has many powerful features to help make package writing easy. ---------------------------- -Creating & editing packages ---------------------------- + +.. _installation_procedure: + +-------------------------------------- +Overview of the installation procedure +-------------------------------------- + +Whenever Spack installs software, it goes through a series of predefined steps: + +.. image:: images/installation_pipeline.png + :scale: 60 % + :align: center + +All these steps are influenced by the metadata in each ``package.py`` and +by the current Spack configuration. +Since build systems are different from one another, the execution of the +last block in the figure is further expanded in a build system specific way. +An example for ``CMake`` is, for instance: + +.. image:: images/builder_phases.png + :align: center + :scale: 60 % + +The predefined steps for each build system are called "phases". +In general, the name and order in which the phases will be executed can be +obtained by either reading the API docs at :py:mod:`~.spack.build_systems`, or +using the ``spack info`` command: + +.. code-block:: console + :emphasize-lines: 13,14 + + $ spack info --phases m4 + AutotoolsPackage: m4 + Homepage: https://www.gnu.org/software/m4/m4.html + + Safe versions: + 1.4.17 ftp://ftp.gnu.org/gnu/m4/m4-1.4.17.tar.gz + + Variants: + Name Default Description + + sigsegv on Build the libsigsegv dependency + + Installation Phases: + autoreconf configure build install + + Build Dependencies: + libsigsegv + + ... + +An extensive list of available build systems and phases is provided in :ref:`installation_process`. + + +------------------------ +Writing a package recipe +------------------------ + +Since v0.19, Spack supports two ways of writing a package recipe. The most commonly used is to encode both the metadata +(directives, etc.) and the build behavior in a single class, like shown in the following example: + +.. code-block:: python + + class Openjpeg(CMakePackage): + """OpenJPEG is an open-source JPEG 2000 codec written in C language""" + + homepage = "https://github.com/uclouvain/openjpeg" + url = "https://github.com/uclouvain/openjpeg/archive/v2.3.1.tar.gz" + + version("2.4.0", sha256="8702ba68b442657f11aaeb2b338443ca8d5fb95b0d845757968a7be31ef7f16d") + + variant("codec", default=False, description="Build the CODEC executables") + depends_on("libpng", when="+codec") + + def url_for_version(self, version): + if version >= Version("2.1.1"): + return super(Openjpeg, self).url_for_version(version) + url_fmt = "https://github.com/uclouvain/openjpeg/archive/version.{0}.tar.gz" + return url_fmt.format(version) + + def cmake_args(self): + args = [ + self.define_from_variant("BUILD_CODEC", "codec"), + self.define("BUILD_MJ2", False), + self.define("BUILD_THIRDPARTY", False), + ] + return args + +A package encoded with a single class is backward compatible with versions of Spack +lower than v0.19, and so are custom repositories containing only recipes of this kind. +The downside is that *this format doesn't allow packagers to use more than one build system in a single recipe*. + +To do that, we have to resort to the second way Spack has of writing packages, which involves writing a +builder class explicitly. Using the same example as above, this reads: + +.. code-block:: python + + class Openjpeg(CMakePackage): + """OpenJPEG is an open-source JPEG 2000 codec written in C language""" + + homepage = "https://github.com/uclouvain/openjpeg" + url = "https://github.com/uclouvain/openjpeg/archive/v2.3.1.tar.gz" + + version("2.4.0", sha256="8702ba68b442657f11aaeb2b338443ca8d5fb95b0d845757968a7be31ef7f16d") + + variant("codec", default=False, description="Build the CODEC executables") + depends_on("libpng", when="+codec") + + def url_for_version(self, version): + if version >= Version("2.1.1"): + return super(Openjpeg, self).url_for_version(version) + url_fmt = "https://github.com/uclouvain/openjpeg/archive/version.{0}.tar.gz" + return url_fmt.format(version) + + class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder): + def cmake_args(self): + args = [ + self.define_from_variant("BUILD_CODEC", "codec"), + self.define("BUILD_MJ2", False), + self.define("BUILD_THIRDPARTY", False), + ] + return args + +This way of writing packages allows extending the recipe to support multiple build systems, +see :ref:`multiple_build_systems` for more details. The downside is that recipes of this kind +are only understood by Spack since v0.19+. More information on the internal architecture of +Spack can be found at :ref:`package_class_structure`. + +.. note:: + + If a builder is implemented in ``package.py``, all build-specific methods must be moved + to the builder. This means that if you have a package like + + .. code-block:: python + + class Foo(CmakePackage): + def cmake_args(self): + ... + + and you add a builder to the ``package.py``, you must move ``cmake_args`` to the builder. .. _cmd-spack-create: -^^^^^^^^^^^^^^^^ -``spack create`` -^^^^^^^^^^^^^^^^ +--------------------- +Creating new packages +--------------------- -The ``spack create`` command creates a directory with the package name and -generates a ``package.py`` file with a boilerplate package template. If given -a URL pointing to a tarball or other software archive, ``spack create`` is -smart enough to determine basic information about the package, including its name -and build system. In most cases, ``spack create`` plus a few modifications is -all you need to get a package working. - -Here's an example: +To help creating a new package Spack provides a command that generates a ``package.py`` +file in an existing repository, with a boilerplate package template. Here's an example: .. code-block:: console @@ -87,23 +218,6 @@ You do not *have* to download all of the versions up front. You can always choose to download just one tarball initially, and run :ref:`cmd-spack-checksum` later if you need more versions. -Let's say you download 3 tarballs: - -.. code-block:: console - - How many would you like to checksum? (default is 1, q to abort) 3 - ==> Downloading... - ==> Fetching https://gmplib.org/download/gmp/gmp-6.1.2.tar.bz2 - ######################################################################## 100.0% - ==> Fetching https://gmplib.org/download/gmp/gmp-6.1.1.tar.bz2 - ######################################################################## 100.0% - ==> Fetching https://gmplib.org/download/gmp/gmp-6.1.0.tar.bz2 - ######################################################################## 100.0% - ==> Checksummed 3 versions of gmp: - ==> This package looks like it uses the autotools build system - ==> Created template for gmp package - ==> Created package file: /Users/Adam/spack/var/spack/repos/builtin/packages/gmp/package.py - Spack automatically creates a directory in the appropriate repository, generates a boilerplate template for your package, and opens up the new ``package.py`` in your favorite ``$EDITOR``: @@ -111,6 +225,14 @@ generates a boilerplate template for your package, and opens up the new .. code-block:: python :linenos: + # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other + # Spack Project Developers. See the top-level COPYRIGHT file for details. + # + # SPDX-License-Identifier: (Apache-2.0 OR MIT) + + # ---------------------------------------------------------------------------- + # If you submit this package back to Spack as a pull request, + # please first remove this boilerplate and all FIXME comments. # # This is a template package file for Spack. We've put "FIXME" # next to all the things you'll want to change. Once you've handled @@ -123,9 +245,8 @@ generates a boilerplate template for your package, and opens up the new # spack edit gmp # # See the Spack documentation for more information on packaging. - # If you submit this package back to Spack as a pull request, - # please first remove this boilerplate and all FIXME comments. - # + # ---------------------------------------------------------------------------- + import spack.build_systems.autotools from spack.package import * @@ -133,19 +254,17 @@ generates a boilerplate template for your package, and opens up the new """FIXME: Put a proper description of your package here.""" # FIXME: Add a proper url for your package's homepage here. - homepage = "http://www.example.com" - url = "https://gmplib.org/download/gmp/gmp-6.1.2.tar.bz2" + homepage = "https://www.example.com" + url = "https://gmplib.org/download/gmp/gmp-6.1.2.tar.bz2" # FIXME: Add a list of GitHub accounts to # notify when the package is updated. - # maintainers = ['github_user1', 'github_user2'] + # maintainers = ["github_user1", "github_user2"] - version('6.1.2', '8ddbb26dc3bd4e2302984debba1406a5') - version('6.1.1', '4c175f86e11eb32d8bf9872ca3a8e11d') - version('6.1.0', '86ee6e54ebfc4a90b643a65e402c4048') + version("6.2.1", sha256="eae9326beb4158c386e39a356818031bd28f3124cf915f8c5b1dc4c7a36b4d7c") # FIXME: Add dependencies if required. - # depends_on('foo') + # depends_on("foo") def configure_args(self): # FIXME: Add arguments other than --prefix @@ -154,15 +273,16 @@ generates a boilerplate template for your package, and opens up the new return args The tedious stuff (creating the class, checksumming archives) has been -done for you. You'll notice that ``spack create`` correctly detected that -``gmp`` uses the Autotools build system. It created a new ``Gmp`` package -that subclasses the ``AutotoolsPackage`` base class. This base class -provides basic installation methods common to all Autotools packages: +done for you. Spack correctly detected that ``gmp`` uses the ``autotools`` +build system, so it created a new ``Gmp`` package that subclasses the +``AutotoolsPackage`` base class. + +The default installation procedure for a package subclassing the ``AutotoolsPackage`` +is to go through the typical process of: .. code-block:: bash ./configure --prefix=/path/to/installation/directory - make make check make install @@ -209,12 +329,14 @@ The rest of the tasks you need to do are as follows: Your new package may require specific flags during ``configure``. These can be added via ``configure_args``. Specifics will differ depending on the package and its build system. - :ref:`Implementing the install method ` is + :ref:`installation_process` is covered in detail later. -Passing a URL to ``spack create`` is a convenient and easy way to get -a basic package template, but what if your software is licensed and -cannot be downloaded from a URL? You can still create a boilerplate +^^^^^^^^^^^^^^^^^^^^^^^^^ +Non-downloadable software +^^^^^^^^^^^^^^^^^^^^^^^^^ + +If your software cannot be downloaded from a URL you can still create a boilerplate ``package.py`` by telling ``spack create`` what name you want to use: .. code-block:: console @@ -223,40 +345,23 @@ cannot be downloaded from a URL? You can still create a boilerplate This will create a simple ``intel`` package with an ``install()`` method that you can craft to install your package. - -What if ``spack create `` guessed the wrong name or build system? -For example, if your package uses the Autotools build system but does -not come with a ``configure`` script, Spack won't realize it uses -Autotools. You can overwrite the old package with ``--force`` and specify -a name with ``--name`` or a build system template to use with ``--template``: +Likewise, you can force the build system to be used with ``--template`` and, +in case it's needed, you can overwrite a package already in the repository +with ``--force``: .. code-block:: console $ spack create --name gmp https://gmplib.org/download/gmp/gmp-6.1.2.tar.bz2 $ spack create --force --template autotools https://gmplib.org/download/gmp/gmp-6.1.2.tar.bz2 -.. note:: - - If you are creating a package that uses the Autotools build system - but does not come with a ``configure`` script, you'll need to add an - ``autoreconf`` method to your package that explains how to generate - the ``configure`` script. You may also need the following dependencies: - - .. code-block:: python - - depends_on('autoconf', type='build') - depends_on('automake', type='build') - depends_on('libtool', type='build') - depends_on('m4', type='build') - A complete list of available build system templates can be found by running ``spack create --help``. .. _cmd-spack-edit: -^^^^^^^^^^^^^^ -``spack edit`` -^^^^^^^^^^^^^^ +------------------------- +Editing existing packages +------------------------- One of the easiest ways to learn how to write packages is to look at existing ones. You can edit a package file by name with the ``spack @@ -266,10 +371,15 @@ edit`` command: $ spack edit gmp -So, if you used ``spack create`` to create a package, then saved and -closed the resulting file, you can get back to it with ``spack edit``. -The ``gmp`` package actually lives in -``$SPACK_ROOT/var/spack/repos/builtin/packages/gmp/package.py``, +If you used ``spack create`` to create a package, you can get back to +it later with ``spack edit``. For instance, the ``gmp`` package actually +lives in: + +.. code-block:: console + + $ spack location -p gmp + ${SPACK_ROOT}/var/spack/repos/builtin/packages/gmp/package.py + but ``spack edit`` provides a much simpler shortcut and saves you the trouble of typing the full path. @@ -2422,7 +2532,7 @@ Spack provides a mechanism for dependencies to influence the environment of their dependents by overriding the :meth:`setup_dependent_run_environment ` or the -:meth:`setup_dependent_build_environment ` +:meth:`setup_dependent_build_environment ` methods. The Qt package, for instance, uses this call: @@ -3280,67 +3390,91 @@ the Python extensions provided by them: once for ``+python`` and once for ``~python``. Other than using a little extra disk space, that solution has no serious problems. -.. _installation_procedure: +.. _installation_process: ---------------------------------------- -Implementing the installation procedure ---------------------------------------- +-------------------------------- +Overriding build system defaults +-------------------------------- -The last element of a package is its **installation procedure**. This is -where the real work of installation happens, and it's the main part of -the package you'll need to customize for each piece of software. +.. note:: -Defining an installation procedure means overriding a set of methods or attributes -that will be called at some point during the installation of the package. -The package base class, usually specialized for a given build system, determines the -actual set of entities available for overriding. -The classes that are currently provided by Spack are: + If you code a single class in ``package.py`` all the functions shown in the table below + can be implemented with the same signature on the ``*Package`` instead of the corresponding builder. + + +Most of the time the default implementation of methods or attributes in build system base classes +is what a packager needs, and just a very few entities need to be overwritten. Typically we just +need to override methods like ``configure_args``: + +.. code-block:: python + + def configure_args(self): + args = ["--enable-cxx"] + self.enable_or_disable("libs") + if "libs=static" in self.spec: + args.append("--with-pic") + return args + +The actual set of entities available for overriding in ``package.py`` depend on +the build system. The build systems currently supported by Spack are: +----------------------------------------------------------+----------------------------------+ -| **Base Class** | **Purpose** | +| **API docs** | **Description** | +==========================================================+==================================+ -| :class:`~spack.package_base.Package` | General base class not | -| | specialized for any build system | +| :class:`~spack.build_systems.generic` | Generic build system without any | +| | base implementation | +----------------------------------------------------------+----------------------------------+ -| :class:`~spack.build_systems.makefile.MakefilePackage` | Specialized class for packages | -| | built invoking | +| :class:`~spack.build_systems.makefile` | Specialized build system for | +| | software built invoking | | | hand-written Makefiles | +----------------------------------------------------------+----------------------------------+ -| :class:`~spack.build_systems.autotools.AutotoolsPackage` | Specialized class for packages | -| | built using GNU Autotools | +| :class:`~spack.build_systems.autotools` | Specialized build system for | +| | software built using | +| | GNU Autotools | +----------------------------------------------------------+----------------------------------+ -| :class:`~spack.build_systems.cmake.CMakePackage` | Specialized class for packages | -| | built using CMake | +| :class:`~spack.build_systems.cmake` | Specialized build system for | +| | software built using CMake | +----------------------------------------------------------+----------------------------------+ -| :class:`~spack.build_systems.cuda.CudaPackage` | A helper class for packages that | -| | use CUDA | +| :class:`~spack.build_systems.maven` | Specialized build system for | +| | software built using Maven | +----------------------------------------------------------+----------------------------------+ -| :class:`~spack.build_systems.qmake.QMakePackage` | Specialized class for packages | -| | built using QMake | +| :class:`~spack.build_systems.meson` | Specialized build system for | +| | software built using Meson | +----------------------------------------------------------+----------------------------------+ -| :class:`~spack.build_systems.rocm.ROCmPackage` | A helper class for packages that | -| | use ROCm | +| :class:`~spack.build_systems.nmake` | Specialized build system for | +| | software built using NMake | +----------------------------------------------------------+----------------------------------+ -| :class:`~spack.build_systems.scons.SConsPackage` | Specialized class for packages | -| | built using SCons | +| :class:`~spack.build_systems.qmake` | Specialized build system for | +| | software built using QMake | +----------------------------------------------------------+----------------------------------+ -| :class:`~spack.build_systems.waf.WafPackage` | Specialized class for packages | -| | built using Waf | +| :class:`~spack.build_systems.scons` | Specialized build system for | +| | software built using SCons | +----------------------------------------------------------+----------------------------------+ -| :class:`~spack.build_systems.r.RPackage` | Specialized class for | +| :class:`~spack.build_systems.waf` | Specialized build system for | +| | software built using Waf | ++----------------------------------------------------------+----------------------------------+ +| :class:`~spack.build_systems.r` | Specialized build system for | | | R extensions | +----------------------------------------------------------+----------------------------------+ -| :class:`~spack.build_systems.octave.OctavePackage` | Specialized class for | +| :class:`~spack.build_systems.octave` | Specialized build system for | | | Octave packages | +----------------------------------------------------------+----------------------------------+ -| :class:`~spack.build_systems.python.PythonPackage` | Specialized class for | +| :class:`~spack.build_systems.python` | Specialized build system for | | | Python extensions | +----------------------------------------------------------+----------------------------------+ -| :class:`~spack.build_systems.perl.PerlPackage` | Specialized class for | +| :class:`~spack.build_systems.perl` | Specialized build system for | | | Perl extensions | +----------------------------------------------------------+----------------------------------+ -| :class:`~spack.build_systems.intel.IntelPackage` | Specialized class for licensed | -| | Intel software | +| :class:`~spack.build_systems.ruby` | Specialized build system for | +| | Ruby extensions | ++----------------------------------------------------------+----------------------------------+ +| :class:`~spack.build_systems.intel` | Specialized build system for | +| | licensed Intel software | ++----------------------------------------------------------+----------------------------------+ +| :class:`~spack.build_systems.oneapi` | Specialized build system for | +| | Intel onaAPI software | ++----------------------------------------------------------+----------------------------------+ +| :class:`~spack.build_systems.aspell_dict` | Specialized build system for | +| | Aspell dictionaries | +----------------------------------------------------------+----------------------------------+ @@ -3353,52 +3487,17 @@ The classes that are currently provided by Spack are: For example, a Python extension installed with CMake would ``extends('python')`` and subclass from :class:`~spack.build_systems.cmake.CMakePackage`. -^^^^^^^^^^^^^^^^^^^^^ -Installation pipeline -^^^^^^^^^^^^^^^^^^^^^ +^^^^^^^^^^^^^^^^^^^^^^^^^^ +Overriding builder methods +^^^^^^^^^^^^^^^^^^^^^^^^^^ -When a user runs ``spack install``, Spack: - -1. Fetches an archive for the correct version of the software. -2. Expands the archive. -3. Sets the current working directory to the root directory of the expanded archive. - -Then, depending on the base class of the package under consideration, it will execute -a certain number of **phases** that reflect the way a package of that type is usually built. -The name and order in which the phases will be executed can be obtained either reading the API -docs at :py:mod:`~.spack.build_systems`, or using the ``spack info`` command: - -.. code-block:: console - :emphasize-lines: 13,14 - - $ spack info m4 - AutotoolsPackage: m4 - Homepage: https://www.gnu.org/software/m4/m4.html - - Safe versions: - 1.4.17 ftp://ftp.gnu.org/gnu/m4/m4-1.4.17.tar.gz - - Variants: - Name Default Description - - sigsegv on Build the libsigsegv dependency - - Installation Phases: - autoreconf configure build install - - Build Dependencies: - libsigsegv - - ... - - -Typically, phases have default implementations that fit most of the common cases: +Build-system "phases" have default implementations that fit most of the common cases: .. literalinclude:: _spack_root/lib/spack/spack/build_systems/autotools.py - :pyobject: AutotoolsPackage.configure + :pyobject: AutotoolsBuilder.configure :linenos: -It is thus just sufficient for a packager to override a few +It is usually sufficient for a packager to override a few build system specific helper methods or attributes to provide, for instance, configure arguments: @@ -3406,31 +3505,31 @@ configure arguments: :pyobject: M4.configure_args :linenos: -.. note:: - Each specific build system has a list of attributes that can be overridden to - fine-tune the installation of a package without overriding an entire phase. To - have more information on them the place to go is the API docs of the :py:mod:`~.spack.build_systems` - module. +Each specific build system has a list of attributes and methods that can be overridden to +fine-tune the installation of a package without overriding an entire phase. To +have more information on them the place to go is the API docs of the :py:mod:`~.spack.build_systems` +module. ^^^^^^^^^^^^^^^^^^^^^^^^^^ Overriding an entire phase ^^^^^^^^^^^^^^^^^^^^^^^^^^ -In extreme cases it may be necessary to override an entire phase. Regardless -of the build system, the signature is the same. For example, the signature -for the install phase is: +Sometimes it is necessary to override an entire phase. If the ``package.py`` contains +a single class recipe, see :ref:`package_class_structure`, then the signature for a +phase is: .. code-block:: python - class Foo(Package): + class Openjpeg(CMakePackage): def install(self, spec, prefix): ... +regardless of the build system. The arguments for the phase are: + ``self`` - For those not used to Python instance methods, this is the - package itself. In this case it's an instance of ``Foo``, which - extends ``Package``. For API docs on Package objects, see - :py:class:`Package `. + This is the package object, which extends ``CMakePackage``. + For API docs on Package objects, see + :py:class:`Package `. ``spec`` This is the concrete spec object created by Spack from an @@ -3445,12 +3544,111 @@ for the install phase is: The arguments ``spec`` and ``prefix`` are passed only for convenience, as they always correspond to ``self.spec`` and ``self.spec.prefix`` respectively. -As mentioned in :ref:`install-environment`, you will usually not need to refer -to dependencies explicitly in your package file, as the compiler wrappers take care of most of -the heavy lifting here. There will be times, though, when you need to refer to -the install locations of dependencies, or when you need to do something different -depending on the version, compiler, dependencies, etc. that your package is -built with. These parameters give you access to this type of information. +If the ``package.py`` encodes builders explicitly, the signature for a phase changes slightly: + +.. code-block:: python + + class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder): + def install(self, pkg, spec, prefix): + ... + +In this case the package is passed as the second argument, and ``self`` is the builder instance. + +.. _multiple_build_systems: + +^^^^^^^^^^^^^^^^^^^^^^ +Multiple build systems +^^^^^^^^^^^^^^^^^^^^^^ + +There are cases where a software actively supports two build systems, or changes build systems +as it evolves, or needs different build systems on different platforms. Spack allows dealing with +these cases natively, if a recipe is written using builders explicitly. + +For instance, software that supports two build systems unconditionally should derive from +both ``*Package`` base classes, and declare the possible use of multiple build systems using +a directive: + +.. code-block:: python + + class ArpackNg(CMakePackage, AutotoolsPackage): + + build_system("cmake", "autotools", default="cmake") + +In this case the software can be built with both ``autotools`` and ``cmake``. Since the package +supports multiple build systems, it is necessary to declare which one is the default. The ``package.py`` +will likely contain some overriding of default builder methods: + +.. code-block:: python + + class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder): + def cmake_args(self): + pass + + class Autotoolsbuilder(spack.build_systems.autotools.AutotoolsBuilder): + def configure_args(self): + pass + +In more complex cases it might happen that the build system changes according to certain conditions, +for instance across versions. That can be expressed with conditional variant values: + +.. code-block:: python + + class ArpackNg(CMakePackage, AutotoolsPackage): + + build_system( + conditional("cmake", when="@0.64:"), + conditional("autotools", when="@:0.63"), + default="cmake", + ) + +In the example the directive impose a change from ``Autotools`` to ``CMake`` going +from ``v0.63`` to ``v0.64``. + +^^^^^^^^^^^^^^^^^^ +Mixin base classes +^^^^^^^^^^^^^^^^^^ + +Besides build systems, there are other cases where common metadata and behavior can be extracted +and reused by many packages. For instance, packages that depend on ``Cuda`` or ``Rocm``, share +common dependencies and constraints. To factor these attributes into a single place, Spack provides +a few mixin classes in the ``spack.build_systems`` module: + ++---------------------------------------------------------------+----------------------------------+ +| **API docs** | **Description** | ++===============================================================+==================================+ +| :class:`~spack.build_systems.cuda.CudaPackage` | A helper class for packages that | +| | use CUDA | ++---------------------------------------------------------------+----------------------------------+ +| :class:`~spack.build_systems.rocm.ROCmPackage` | A helper class for packages that | +| | use ROCm | ++---------------------------------------------------------------+----------------------------------+ +| :class:`~spack.build_systems.gnu.GNUMirrorPackage` | A helper class for GNU packages | ++---------------------------------------------------------------+----------------------------------+ +| :class:`~spack.build_systems.python.PythonExtension` | A helper class for Python | +| | extensions | ++---------------------------------------------------------------+----------------------------------+ +| :class:`~spack.build_systems.sourceforge.SourceforgePackage` | A helper class for packages | +| | from sourceforge.org | ++---------------------------------------------------------------+----------------------------------+ +| :class:`~spack.build_systems.sourceware.SourcewarePackage` | A helper class for packages | +| | from sourceware.org | ++---------------------------------------------------------------+----------------------------------+ +| :class:`~spack.build_systems.xorg.XorgPackage` | A helper class for x.org | +| | packages | ++---------------------------------------------------------------+----------------------------------+ + +These classes should be used by adding them to the inheritance tree of the package that needs them, +for instance: + +.. code-block:: python + + class Cp2k(MakefilePackage, CudaPackage): + """CP2K is a quantum chemistry and solid state physics software package + that can perform atomistic simulations of solid state, liquid, molecular, + periodic, material, crystal, and biological systems + """ + +In the example above ``Cp2k`` inherits all the conflicts and variants that ``CudaPackage`` defines. .. _install-environment: @@ -4208,16 +4406,9 @@ In addition to invoking the right compiler, the compiler wrappers add flags to the compile line so that dependencies can be easily found. These flags are added for each dependency, if they exist: -Compile-time library search paths -* ``-L$dep_prefix/lib`` -* ``-L$dep_prefix/lib64`` - -Runtime library search paths (RPATHs) -* ``$rpath_flag$dep_prefix/lib`` -* ``$rpath_flag$dep_prefix/lib64`` - -Include search paths -* ``-I$dep_prefix/include`` +* Compile-time library search paths: ``-L$dep_prefix/lib``, ``-L$dep_prefix/lib64`` +* Runtime library search paths (RPATHs): ``$rpath_flag$dep_prefix/lib``, ``$rpath_flag$dep_prefix/lib64`` +* Include search paths: ``-I$dep_prefix/include`` An example of this would be the ``libdwarf`` build, which has one dependency: ``libelf``. Every call to ``cc`` in the ``libdwarf`` @@ -5062,6 +5253,16 @@ where each argument has the following meaning: will run. The default of ``None`` corresponds to the current directory (``'.'``). + Each call starts with the working directory set to the spec's test stage + directory (i.e., ``self.test_suite.test_dir_for_spec(self.spec)``). + +.. warning:: + + Use of the package spec's installation directory for building and running + tests is **strongly** discouraged. Doing so has caused permission errors + for shared spack instances *and* for facilities that install the software + in read-only file systems or directories. + """"""""""""""""""""""""""""""""""""""""" Accessing package- and test-related files @@ -5069,10 +5270,10 @@ Accessing package- and test-related files You may need to access files from one or more locations when writing stand-alone tests. This can happen if the software's repository does not -include test source files or includes files but no way to build the -executables using the installed headers and libraries. In these -cases, you may need to reference the files relative to one or more -root directory. The properties containing package- and test-related +include test source files or includes files but has no way to build the +executables using the installed headers and libraries. In these cases, +you may need to reference the files relative to one or more root +directory. The properties containing package- (or spec-) and test-related directory paths are provided in the table below. .. list-table:: Directory-to-property mapping @@ -5081,19 +5282,22 @@ directory paths are provided in the table below. * - Root Directory - Package Property - Example(s) - * - Package Installation Files + * - Package (Spec) Installation - ``self.prefix`` - ``self.prefix.include``, ``self.prefix.lib`` - * - Package Dependency's Files + * - Dependency Installation - ``self.spec[''].prefix`` - ``self.spec['trilinos'].prefix.include`` - * - Test Suite Stage Files + * - Test Suite Stage - ``self.test_suite.stage`` - ``join_path(self.test_suite.stage, 'results.txt')`` - * - Staged Cached Build-time Files + * - Spec's Test Stage + - ``self.test_suite.test_dir_for_spec`` + - ``self.test_suite.test_dir_for_spec(self.spec)`` + * - Current Spec's Build-time Files - ``self.test_suite.current_test_cache_dir`` - ``join_path(self.test_suite.current_test_cache_dir, 'examples', 'foo.c')`` - * - Staged Custom Package Files + * - Current Spec's Custom Test Files - ``self.test_suite.current_test_data_dir`` - ``join_path(self.test_suite.current_test_data_dir, 'hello.f90')`` @@ -6099,3 +6303,82 @@ might write: DWARF_PREFIX = $(spack location --install-dir libdwarf) CXXFLAGS += -I$DWARF_PREFIX/include CXXFLAGS += -L$DWARF_PREFIX/lib + + +.. _package_class_structure: + +-------------------------- +Package class architecture +-------------------------- + +.. note:: + + This section aims to provide a high-level knowledge of how the package class architecture evolved + in Spack, and provides some insights on the current design. + +Packages in Spack were originally designed to support only a single build system. The overall +class structure for a package looked like: + +.. image:: images/original_package_architecture.png + :scale: 60 % + :align: center + +In this architecture the base class ``AutotoolsPackage`` was responsible for both the metadata +related to the ``autotools`` build system (e.g. dependencies or variants common to all packages +using it), and for encoding the default installation procedure. + +In reality, a non-negligible number of packages are either changing their build system during the evolution of the +project, or using different build systems for different platforms. An architecture based on a single class +requires hacks or other workarounds to deal with these cases. + +To support a model more adherent to reality, Spack v0.19 changed its internal design by extracting +the attributes and methods related to building a software into a separate hierarchy: + +.. image:: images/builder_package_architecture.png + :scale: 60 % + :align: center + +In this new format each ``package.py`` contains one ``*Package`` class that gathers all the metadata, +and one or more ``*Builder`` classes that encode the installation procedure. A specific builder object +is created just before the software is built, so at a time where Spack knows which build system needs +to be used for the current installation, and receives a ``package`` object during initialization. + +^^^^^^^^^^^^^^^^^^^^^^^^ +``build_system`` variant +^^^^^^^^^^^^^^^^^^^^^^^^ + +To allow imposing conditions based on the build system, each package must a have ``build_system`` variant, +which is usually inherited from base classes. This variant allows for writing metadata that is conditional +on the build system: + +.. code-block:: python + + with when("build_system=cmake"): + depends_on("cmake", type="build") + +and also for selecting a specific build system from a spec literal, like in the following command: + +.. code-block:: console + + $ spack install arpack-ng build_system=autotools + +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Compatibility with single-class format +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Internally, Spack always uses builders to perform operations related to the installation of a specific software. +The builders are created in the ``spack.builder.create`` function + +.. literalinclude:: _spack_root/lib/spack/spack/builder.py + :pyobject: create + +To achieve backward compatibility with the single-class format Spack creates in this function a special +"adapter builder", if no custom builder is detected in the recipe: + +.. image:: images/adapter.png + :scale: 60 % + :align: center + +Overall the role of the adapter is to route access to attributes of methods first through the ``*Package`` +hierarchy, and then back to the base class builder. This is schematically shown in the diagram above, where +the adapter role is to "emulate" a method resolution order like the one represented by the red arrows. diff --git a/lib/spack/docs/spack.yaml b/lib/spack/docs/spack.yaml index 2dd932b5658..778f0a3c753 100644 --- a/lib/spack/docs/spack.yaml +++ b/lib/spack/docs/spack.yaml @@ -20,6 +20,8 @@ spack: - py-docutils@:0.16 - py-sphinx-design - py-sphinx-rtd-theme + - py-pygments@:2.12 + # VCS - git - mercurial diff --git a/lib/spack/docs/tables/system_prerequisites.csv b/lib/spack/docs/tables/system_prerequisites.csv index 5f661883d34..efca033b620 100644 --- a/lib/spack/docs/tables/system_prerequisites.csv +++ b/lib/spack/docs/tables/system_prerequisites.csv @@ -1,5 +1,5 @@ Name, Supported Versions, Notes, Requirement Reason -Python, 2.7/3.6-3.10, , Interpreter for Spack +Python, 2.7/3.6-3.11, , Interpreter for Spack C/C++ Compilers, , , Building software make, , , Build software patch, , , Build software @@ -11,6 +11,7 @@ bzip2, , , Compress/Decompress archives xz, , , Compress/Decompress archives zstd, , Optional, Compress/Decompress archives file, , , Create/Use Buildcaches +lsb-release, , , Linux: identify operating system version gnupg2, , , Sign/Verify Buildcaches git, , , Manage Software Repositories svn, , Optional, Manage Software Repositories diff --git a/lib/spack/env/cc b/lib/spack/env/cc index bef7209bfac..ffdddfc0dfb 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -241,28 +241,28 @@ case "$command" in mode=cpp debug_flags="-g" ;; - cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc|amdclang|cl.exe) + cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc|amdclang|cl.exe|craycc) command="$SPACK_CC" language="C" comp="CC" lang_flags=C debug_flags="-g" ;; - c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++) + c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|crayCC) command="$SPACK_CXX" language="C++" comp="CXX" lang_flags=CXX debug_flags="-g" ;; - ftn|f90|fc|f95|gfortran|flang|armflang|ifort|ifx|pgfortran|nvfortran|xlf90|xlf90_r|nagfor|frt|amdflang) + ftn|f90|fc|f95|gfortran|flang|armflang|ifort|ifx|pgfortran|nvfortran|xlf90|xlf90_r|nagfor|frt|amdflang|crayftn) command="$SPACK_FC" language="Fortran 90" comp="FC" lang_flags=F debug_flags="-g" ;; - f77|xlf|xlf_r|pgf77|amdflang) + f77|xlf|xlf_r|pgf77) command="$SPACK_F77" language="Fortran 77" comp="F77" diff --git a/lib/spack/env/cce/case-insensitive/crayCC b/lib/spack/env/cce/case-insensitive/crayCC new file mode 120000 index 00000000000..e2deb67f3b6 --- /dev/null +++ b/lib/spack/env/cce/case-insensitive/crayCC @@ -0,0 +1 @@ +../../cc \ No newline at end of file diff --git a/lib/spack/env/cce/craycc b/lib/spack/env/cce/craycc new file mode 120000 index 00000000000..82c2b8e90a3 --- /dev/null +++ b/lib/spack/env/cce/craycc @@ -0,0 +1 @@ +../cc \ No newline at end of file diff --git a/lib/spack/env/cce/crayftn b/lib/spack/env/cce/crayftn new file mode 120000 index 00000000000..82c2b8e90a3 --- /dev/null +++ b/lib/spack/env/cce/crayftn @@ -0,0 +1 @@ +../cc \ No newline at end of file diff --git a/lib/spack/external/__init__.py b/lib/spack/external/__init__.py index bcb9ed9becc..89928fae59c 100644 --- a/lib/spack/external/__init__.py +++ b/lib/spack/external/__init__.py @@ -18,7 +18,7 @@ * Homepage: https://pypi.python.org/pypi/archspec * Usage: Labeling, comparison and detection of microarchitectures -* Version: 0.1.4 (commit e2cfdc266174488dee78b8c9058e36d60dc1b548) +* Version: 0.2.0 (commit 77640e572725ad97f18e63a04857155752ace045) argparse -------- diff --git a/lib/spack/external/archspec/cpu/detect.py b/lib/spack/external/archspec/cpu/detect.py index 3df04585c70..a7cc4481f63 100644 --- a/lib/spack/external/archspec/cpu/detect.py +++ b/lib/spack/external/archspec/cpu/detect.py @@ -132,9 +132,15 @@ def sysctl(*args): "model name": sysctl("-n", "machdep.cpu.brand_string"), } else: - model = ( - "m1" if "Apple" in sysctl("-n", "machdep.cpu.brand_string") else "unknown" - ) + model = "unknown" + model_str = sysctl("-n", "machdep.cpu.brand_string").lower() + if "m2" in model_str: + model = "m2" + elif "m1" in model_str: + model = "m1" + elif "apple" in model_str: + model = "m1" + info = { "vendor_id": "Apple", "flags": [], @@ -322,14 +328,26 @@ def compatibility_check_for_aarch64(info, target): features = set(info.get("Features", "").split()) vendor = info.get("CPU implementer", "generic") + # At the moment it's not clear how to detect compatibility with + # a specific version of the architecture + if target.vendor == "generic" and target.name != "aarch64": + return False + arch_root = TARGETS[basename] - return ( - (target == arch_root or arch_root in target.ancestors) - and target.vendor in (vendor, "generic") - # On macOS it seems impossible to get all the CPU features with syctl info - and (target.features.issubset(features) or platform.system() == "Darwin") + arch_root_and_vendor = arch_root == target.family and target.vendor in ( + vendor, + "generic", ) + # On macOS it seems impossible to get all the CPU features + # with syctl info, but for ARM we can get the exact model + if platform.system() == "Darwin": + model_key = info.get("model", basename) + model = TARGETS[model_key] + return arch_root_and_vendor and (target == model or target in model.ancestors) + + return arch_root_and_vendor and target.features.issubset(features) + @compatibility_check(architecture_family="riscv64") def compatibility_check_for_riscv64(info, target): diff --git a/lib/spack/external/archspec/json/cpu/microarchitectures.json b/lib/spack/external/archspec/json/cpu/microarchitectures.json index 308f0e51525..15d32e9fa04 100644 --- a/lib/spack/external/archspec/json/cpu/microarchitectures.json +++ b/lib/spack/external/archspec/json/cpu/microarchitectures.json @@ -85,7 +85,7 @@ "intel": [ { "versions": ":", - "name": "x86-64", + "name": "pentium4", "flags": "-march={name} -mtune=generic" } ], @@ -2093,8 +2093,163 @@ ] } }, - "thunderx2": { + "armv8.1a": { "from": ["aarch64"], + "vendor": "generic", + "features": [], + "compilers": { + "gcc": [ + { + "versions": "5:", + "flags": "-march=armv8.1-a -mtune=generic" + } + ], + "clang": [ + { + "versions": ":", + "flags": "-march=armv8.1-a -mtune=generic" + } + ], + "apple-clang": [ + { + "versions": ":", + "flags": "-march=armv8.1-a -mtune=generic" + } + ], + "arm": [ + { + "versions": ":", + "flags": "-march=armv8.1-a -mtune=generic" + } + ] + } + }, + "armv8.2a": { + "from": ["armv8.1a"], + "vendor": "generic", + "features": [], + "compilers": { + "gcc": [ + { + "versions": "6:", + "flags": "-march=armv8.2-a -mtune=generic" + } + ], + "clang": [ + { + "versions": ":", + "flags": "-march=armv8.2-a -mtune=generic" + } + ], + "apple-clang": [ + { + "versions": ":", + "flags": "-march=armv8.2-a -mtune=generic" + } + ], + "arm": [ + { + "versions": ":", + "flags": "-march=armv8.2-a -mtune=generic" + } + ] + } + }, + "armv8.3a": { + "from": ["armv8.2a"], + "vendor": "generic", + "features": [], + "compilers": { + "gcc": [ + { + "versions": "6:", + "flags": "-march=armv8.3-a -mtune=generic" + } + ], + "clang": [ + { + "versions": "6:", + "flags": "-march=armv8.3-a -mtune=generic" + } + ], + "apple-clang": [ + { + "versions": ":", + "flags": "-march=armv8.3-a -mtune=generic" + } + ], + "arm": [ + { + "versions": ":", + "flags": "-march=armv8.3-a -mtune=generic" + } + ] + } + }, + "armv8.4a": { + "from": ["armv8.3a"], + "vendor": "generic", + "features": [], + "compilers": { + "gcc": [ + { + "versions": "8:", + "flags": "-march=armv8.4-a -mtune=generic" + } + ], + "clang": [ + { + "versions": "8:", + "flags": "-march=armv8.4-a -mtune=generic" + } + ], + "apple-clang": [ + { + "versions": ":", + "flags": "-march=armv8.4-a -mtune=generic" + } + ], + "arm": [ + { + "versions": ":", + "flags": "-march=armv8.4-a -mtune=generic" + } + ] + } + }, + "armv8.5a": { + "from": ["armv8.4a"], + "vendor": "generic", + "features": [], + "compilers": { + "gcc": [ + { + "versions": "9:", + "flags": "-march=armv8.5-a -mtune=generic" + } + ], + "clang": [ + { + "versions": "11:", + "flags": "-march=armv8.5-a -mtune=generic" + } + ], + "apple-clang": [ + { + "versions": ":", + "flags": "-march=armv8.5-a -mtune=generic" + } + ], + "arm": [ + { + "versions": ":", + "flags": "-march=armv8.5-a -mtune=generic" + } + ] + } + }, + "thunderx2": { + "from": ["armv8.1a"], "vendor": "Cavium", "features": [ "fp", @@ -2141,7 +2296,7 @@ } }, "a64fx": { - "from": ["aarch64"], + "from": ["armv8.2a"], "vendor": "Fujitsu", "features": [ "fp", @@ -2209,7 +2364,7 @@ ] } }, - "graviton": { + "cortex_a72": { "from": ["aarch64"], "vendor": "ARM", "features": [ @@ -2235,19 +2390,19 @@ }, { "versions": "6:", - "flags" : "-march=armv8-a+crc+crypto -mtune=cortex-a72" + "flags" : "-mcpu=cortex-a72" } ], "clang" : [ { "versions": "3.9:", - "flags" : "-march=armv8-a+crc+crypto" + "flags" : "-mcpu=cortex-a72" } ] } }, - "graviton2": { - "from": ["graviton"], + "neoverse_n1": { + "from": ["cortex_a72", "armv8.2a"], "vendor": "ARM", "features": [ "fp", @@ -2296,7 +2451,7 @@ }, { "versions": "9.0:", - "flags" : "-march=armv8.2-a+fp16+rcpc+dotprod+crypto -mtune=neoverse-n1" + "flags" : "-mcpu=neoverse-n1" } ], "clang" : [ @@ -2307,6 +2462,10 @@ { "versions": "5:", "flags" : "-march=armv8.2-a+fp16+rcpc+dotprod+crypto" + }, + { + "versions": "10:", + "flags" : "-mcpu=neoverse-n1" } ], "arm" : [ @@ -2317,11 +2476,11 @@ ] } }, - "graviton3": { - "from": ["graviton2"], + "neoverse_v1": { + "from": ["neoverse_n1", "armv8.4a"], "vendor": "ARM", "features": [ - "fp", + "fp", "asimd", "evtstrm", "aes", @@ -2384,11 +2543,11 @@ }, { "versions": "9.0:9.9", - "flags" : "-march=armv8.4-a+crypto+rcpc+sha3+sm4+sve+rng+nodotprod -mtune=neoverse-v1" + "flags" : "-mcpu=neoverse-v1" }, { "versions": "10.0:", - "flags" : "-march=armv8.4-a+crypto+rcpc+sha3+sm4+sve+rng+ssbs+i8mm+bf16+nodotprod -mtune=neoverse-v1" + "flags" : "-mcpu=neoverse-v1" } ], @@ -2404,6 +2563,10 @@ { "versions": "11:", "flags" : "-march=armv8.4-a+sve+ssbs+fp16+bf16+crypto+i8mm+rng" + }, + { + "versions": "12:", + "flags" : "-mcpu=neoverse-v1" } ], "arm" : [ @@ -2419,7 +2582,7 @@ } }, "m1": { - "from": ["aarch64"], + "from": ["armv8.4a"], "vendor": "Apple", "features": [ "fp", @@ -2484,6 +2647,76 @@ ] } }, + "m2": { + "from": ["m1", "armv8.5a"], + "vendor": "Apple", + "features": [ + "fp", + "asimd", + "evtstrm", + "aes", + "pmull", + "sha1", + "sha2", + "crc32", + "atomics", + "fphp", + "asimdhp", + "cpuid", + "asimdrdm", + "jscvt", + "fcma", + "lrcpc", + "dcpop", + "sha3", + "asimddp", + "sha512", + "asimdfhm", + "dit", + "uscat", + "ilrcpc", + "flagm", + "ssbs", + "sb", + "paca", + "pacg", + "dcpodp", + "flagm2", + "frint", + "ecv", + "bf16", + "i8mm", + "bti" + ], + "compilers": { + "gcc": [ + { + "versions": "8.0:", + "flags" : "-march=armv8.5-a -mtune=generic" + } + ], + "clang" : [ + { + "versions": "9.0:12.0", + "flags" : "-march=armv8.5-a" + }, + { + "versions": "13.0:", + "flags" : "-mcpu=apple-m1" + } + ], + "apple-clang": [ + { + "versions": "11.0:12.5", + "flags" : "-march=armv8.5-a" + }, + { + "versions": "13.0:", + "flags" : "-mcpu=vortex" + } + ] + } + }, "arm": { "from": [], "vendor": "generic", diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 511f7c4f598..aece52f8436 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -505,8 +505,15 @@ def group_ids(uid=None): if uid is None: uid = getuid() - user = pwd.getpwuid(uid).pw_name - return [g.gr_gid for g in grp.getgrall() if user in g.gr_mem] + + pwd_entry = pwd.getpwuid(uid) + user = pwd_entry.pw_name + + # user's primary group id may not be listed in grp (i.e. /etc/group) + # you have to check pwd for that, so start the list with that + gids = [pwd_entry.pw_gid] + + return sorted(set(gids + [g.gr_gid for g in grp.getgrall() if user in g.gr_mem])) @system_path_filter(arg_slice=slice(1)) @@ -1083,7 +1090,11 @@ def temp_cwd(): with working_dir(tmp_dir): yield tmp_dir finally: - shutil.rmtree(tmp_dir) + kwargs = {} + if is_windows: + kwargs["ignore_errors"] = False + kwargs["onerror"] = readonly_file_handler(ignore_errors=True) + shutil.rmtree(tmp_dir, **kwargs) @contextmanager @@ -2095,7 +2106,7 @@ def find_system_libraries(libraries, shared=True): return libraries_found -def find_libraries(libraries, root, shared=True, recursive=False): +def find_libraries(libraries, root, shared=True, recursive=False, runtime=True): """Returns an iterable of full paths to libraries found in a root dir. Accepts any glob characters accepted by fnmatch: @@ -2116,6 +2127,10 @@ def find_libraries(libraries, root, shared=True, recursive=False): otherwise for static. Defaults to True. recursive (bool): if False search only root folder, if True descends top-down from the root. Defaults to False. + runtime (bool): Windows only option, no-op elsewhere. If true, + search for runtime shared libs (.DLL), otherwise, search + for .Lib files. If shared is false, this has no meaning. + Defaults to True. Returns: LibraryList: The libraries that have been found @@ -2130,7 +2145,9 @@ def find_libraries(libraries, root, shared=True, recursive=False): if is_windows: static_ext = "lib" - shared_ext = "dll" + # For linking (runtime=False) you need the .lib files regardless of + # whether you are doing a shared or static link + shared_ext = "dll" if runtime else "lib" else: # Used on both Linux and macOS static_ext = "a" @@ -2174,13 +2191,13 @@ def find_libraries(libraries, root, shared=True, recursive=False): return LibraryList(found_libs) -def find_all_shared_libraries(root, recursive=False): +def find_all_shared_libraries(root, recursive=False, runtime=True): """Convenience function that returns the list of all shared libraries found in the directory passed as argument. See documentation for `llnl.util.filesystem.find_libraries` for more information """ - return find_libraries("*", root=root, shared=True, recursive=recursive) + return find_libraries("*", root=root, shared=True, recursive=recursive, runtime=runtime) def find_all_static_libraries(root, recursive=False): @@ -2226,48 +2243,36 @@ def __init__(self, package, link_install_prefix=True): self.pkg = package self._addl_rpaths = set() self.link_install_prefix = link_install_prefix - self._internal_links = set() + self._additional_library_dependents = set() @property - def link_dest(self): + def library_dependents(self): """ Set of directories where package binaries/libraries are located. """ - if hasattr(self.pkg, "libs") and self.pkg.libs: - pkg_libs = set(self.pkg.libs.directories) - else: - pkg_libs = set((self.pkg.prefix.lib, self.pkg.prefix.lib64)) + return set([self.pkg.prefix.bin]) | self._additional_library_dependents - return pkg_libs | set([self.pkg.prefix.bin]) | self.internal_links - - @property - def internal_links(self): + def add_library_dependent(self, *dest): """ - linking that would need to be established within the package itself. Useful for links - against extension modules/build time executables/internal linkage - """ - return self._internal_links + Add paths to directories or libraries/binaries to set of + common paths that need to link against other libraries - def add_internal_links(self, *dest): - """ - Incorporate additional paths into the rpath (sym)linking scheme. - - Paths provided to this method are linked against by a package's libraries - and libraries found at these paths are linked against a package's binaries. - (i.e. /site-packages -> /bin and /bin -> /site-packages) - - Specified paths should be outside of a package's lib, lib64, and bin + Specified paths should fall outside of a package's common + link paths, i.e. the bin directories. """ - self._internal_links = self._internal_links | set(*dest) + for pth in dest: + if os.path.isfile(pth): + self._additional_library_dependents.add(os.path.dirname) + else: + self._additional_library_dependents.add(pth) @property - def link_targets(self): + def rpaths(self): """ Set of libraries this package needs to link against during runtime These packages will each be symlinked into the packages lib and binary dir """ - dependent_libs = [] for path in self.pkg.rpath: dependent_libs.extend(list(find_all_shared_libraries(path, recursive=True))) @@ -2275,18 +2280,43 @@ def link_targets(self): dependent_libs.extend(list(find_all_shared_libraries(extra_path, recursive=True))) return set(dependent_libs) - def include_additional_link_paths(self, *paths): + def add_rpath(self, *paths): """ Add libraries found at the root of provided paths to runtime linking These are libraries found outside of the typical scope of rpath linking - that require manual inclusion in a runtime linking scheme + that require manual inclusion in a runtime linking scheme. + These links are unidirectional, and are only + intended to bring outside dependencies into this package Args: *paths (str): arbitrary number of paths to be added to runtime linking """ self._addl_rpaths = self._addl_rpaths | set(paths) + def _link(self, path, dest): + file_name = os.path.basename(path) + dest_file = os.path.join(dest, file_name) + if os.path.exists(dest): + try: + symlink(path, dest_file) + # For py2 compatibility, we have to catch the specific Windows error code + # associate with trying to create a file that already exists (winerror 183) + except OSError as e: + if e.winerror == 183: + # We have either already symlinked or we are encoutering a naming clash + # either way, we don't want to overwrite existing libraries + already_linked = islink(dest_file) + tty.debug( + "Linking library %s to %s failed, " % (path, dest_file) + "already linked." + if already_linked + else "library with name %s already exists at location %s." + % (file_name, dest) + ) + pass + else: + raise e + def establish_link(self): """ (sym)link packages to runtime dependencies based on RPath configuration for @@ -2298,29 +2328,8 @@ def establish_link(self): # for each binary install dir in self.pkg (i.e. pkg.prefix.bin, pkg.prefix.lib) # install a symlink to each dependent library - for library, lib_dir in itertools.product(self.link_targets, self.link_dest): - if not path_contains_subdirectory(library, lib_dir): - file_name = os.path.basename(library) - dest_file = os.path.join(lib_dir, file_name) - if os.path.exists(lib_dir): - try: - symlink(library, dest_file) - # For py2 compatibility, we have to catch the specific Windows error code - # associate with trying to create a file that already exists (winerror 183) - except OSError as e: - if e.winerror == 183: - # We have either already symlinked or we are encoutering a naming clash - # either way, we don't want to overwrite existing libraries - already_linked = islink(dest_file) - tty.debug( - "Linking library %s to %s failed, " % (library, dest_file) - + "already linked." - if already_linked - else "library with name %s already exists." % file_name - ) - pass - else: - raise e + for library, lib_dir in itertools.product(self.rpaths, self.library_dependents): + self._link(library, lib_dir) @system_path_filter diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py index 76b161cbbe1..51bd710ddbb 100644 --- a/lib/spack/llnl/util/lang.py +++ b/lib/spack/llnl/util/lang.py @@ -749,6 +749,26 @@ def _n_xxx_ago(x): raise ValueError(msg) +def pretty_seconds(seconds): + """Seconds to string with appropriate units + + Arguments: + seconds (float): Number of seconds + + Returns: + str: Time string with units + """ + if seconds >= 1: + value, unit = seconds, "s" + elif seconds >= 1e-3: + value, unit = seconds * 1e3, "ms" + elif seconds >= 1e-6: + value, unit = seconds * 1e6, "us" + else: + value, unit = seconds * 1e9, "ns" + return "%.3f%s" % (value, unit) + + class RequiredAttributeError(ValueError): def __init__(self, message): super(RequiredAttributeError, self).__init__(message) @@ -1002,6 +1022,14 @@ def stable_partition( return true_items, false_items +def ensure_last(lst, *elements): + """Performs a stable partition of lst, ensuring that ``elements`` + occur at the end of ``lst`` in specified order. Mutates ``lst``. + Raises ``ValueError`` if any ``elements`` are not already in ``lst``.""" + for elt in elements: + lst.append(lst.pop(lst.index(elt))) + + class TypedMutableSequence(MutableSequence): """Base class that behaves like a list, just with a different type. diff --git a/lib/spack/llnl/util/lock.py b/lib/spack/llnl/util/lock.py index 6dfba50abb4..9c3bcd7a918 100644 --- a/lib/spack/llnl/util/lock.py +++ b/lib/spack/llnl/util/lock.py @@ -12,6 +12,7 @@ from typing import Dict, Tuple # novm import llnl.util.tty as tty +from llnl.util.lang import pretty_seconds import spack.util.string @@ -166,7 +167,7 @@ def _attempts_str(wait_time, nattempts): return "" attempts = spack.util.string.plural(nattempts, "attempt") - return " after {0:0.2f}s and {1}".format(wait_time, attempts) + return " after {} and {}".format(pretty_seconds(wait_time), attempts) class LockType(object): @@ -318,8 +319,8 @@ def _lock(self, op, timeout=None): raise LockROFileError(self.path) self._log_debug( - "{0} locking [{1}:{2}]: timeout {3} sec".format( - op_str.lower(), self._start, self._length, timeout + "{} locking [{}:{}]: timeout {}".format( + op_str.lower(), self._start, self._length, pretty_seconds(timeout or 0) ) ) @@ -340,7 +341,8 @@ def _lock(self, op, timeout=None): total_wait_time = time.time() - start_time return total_wait_time, num_attempts - raise LockTimeoutError("Timed out waiting for a {0} lock.".format(op_str.lower())) + total_wait_time = time.time() - start_time + raise LockTimeoutError(op_str.lower(), self.path, total_wait_time, num_attempts) def _poll_lock(self, op): """Attempt to acquire the lock in a non-blocking manner. Return whether @@ -780,6 +782,18 @@ class LockLimitError(LockError): class LockTimeoutError(LockError): """Raised when an attempt to acquire a lock times out.""" + def __init__(self, lock_type, path, time, attempts): + fmt = "Timed out waiting for a {} lock after {}.\n Made {} {} on file: {}" + super(LockTimeoutError, self).__init__( + fmt.format( + lock_type, + pretty_seconds(time), + attempts, + "attempt" if attempts == 1 else "attempts", + path, + ) + ) + class LockUpgradeError(LockError): """Raised when unable to upgrade from a read to a write lock.""" diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index e368639553c..4486221a18c 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -3,11 +3,20 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -#: (major, minor, micro, dev release) tuple -spack_version_info = (0, 19, 0, "dev0") - #: PEP440 canonical ... string -spack_version = ".".join(str(s) for s in spack_version_info) +__version__ = "0.19.0.dev0" +spack_version = __version__ + + +def __try_int(v): + try: + return int(v) + except ValueError: + return v + + +#: (major, minor, micro, dev release) tuple +spack_version_info = tuple([__try_int(v) for v in __version__.split(".")]) + __all__ = ["spack_version_info", "spack_version"] -__version__ = spack_version diff --git a/lib/spack/spack/audit.py b/lib/spack/spack/audit.py index ee974a19b1e..de9fc1a05bd 100644 --- a/lib/spack/spack/audit.py +++ b/lib/spack/spack/audit.py @@ -503,6 +503,33 @@ def invalid_sha256_digest(fetcher): return errors +@package_properties +def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls): + """Ensure that methods modifying the build environment are ported to builder classes.""" + errors = [] + for pkg_name in pkgs: + pkg_cls = spack.repo.path.get_pkg_class(pkg_name) + buildsystem_variant, _ = pkg_cls.variants["build_system"] + buildsystem_names = [getattr(x, "value", x) for x in buildsystem_variant.values] + builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in buildsystem_names] + module = pkg_cls.module + has_builders_in_package_py = any( + getattr(module, name, False) for name in builder_cls_names + ) + if not has_builders_in_package_py: + continue + + for method_name in ("setup_build_environment", "setup_dependent_build_environment"): + if hasattr(pkg_cls, method_name): + msg = ( + "Package '{}' need to move the '{}' method from the package class to the" + " appropriate builder class".format(pkg_name, method_name) + ) + errors.append(error_cls(msg, [])) + + return errors + + @package_https_directives def _linting_package_file(pkgs, error_cls): """Check for correctness of links""" @@ -660,7 +687,13 @@ def _ensure_variant_defaults_are_parsable(pkgs, error_cls): errors.append(error_cls(error_msg.format(variant_name, pkg_name), [])) continue - vspec = variant.make_default() + try: + vspec = variant.make_default() + except spack.variant.MultipleValuesInExclusiveVariantError: + error_msg = "Cannot create a default value for the variant '{}' in package '{}'" + errors.append(error_cls(error_msg.format(variant_name, pkg_name), [])) + continue + try: variant.validate_or_raise(vspec, pkg_cls=pkg_cls) except spack.variant.InvalidVariantValueError: diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index c329287de8f..1e9843c4738 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -7,11 +7,13 @@ import collections import hashlib import json +import multiprocessing.pool import os import shutil import sys import tarfile import tempfile +import time import traceback import warnings from contextlib import closing @@ -22,7 +24,7 @@ import llnl.util.filesystem as fsys import llnl.util.lang import llnl.util.tty as tty -from llnl.util.filesystem import mkdirp +from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree import spack.cmd import spack.config as config @@ -41,8 +43,10 @@ import spack.util.url as url_util import spack.util.web as web_util from spack.caches import misc_cache_location +from spack.relocate import utf8_paths_to_single_binary_regex from spack.spec import Spec from spack.stage import Stage +from spack.util.executable import which _build_cache_relative_path = "build_cache" _build_cache_keys_relative_path = "_pgp" @@ -70,6 +74,10 @@ def __init__(self, errors): super(FetchCacheError, self).__init__(self.message) +class ListMirrorSpecsError(spack.error.SpackError): + """Raised when unable to retrieve list of specs from the mirror""" + + class BinaryCacheIndex(object): """ The BinaryCacheIndex tracks what specs are available on (usually remote) @@ -105,6 +113,10 @@ def __init__(self, cache_root): # cache (_mirrors_for_spec) self._specs_already_associated = set() + # mapping from mirror urls to the time.time() of the last index fetch and a bool indicating + # whether the fetch succeeded or not. + self._last_fetch_times = {} + # _mirrors_for_spec is a dictionary mapping DAG hashes to lists of # entries indicating mirrors where that concrete spec can be found. # Each entry is a dictionary consisting of: @@ -137,6 +149,7 @@ def clear(self): self._index_file_cache = None self._local_index_cache = None self._specs_already_associated = set() + self._last_fetch_times = {} self._mirrors_for_spec = {} def _write_local_index_cache(self): @@ -242,7 +255,6 @@ def find_built_spec(self, spec, mirrors_to_check=None): } ] """ - self.regenerate_spec_cache() return self.find_by_hash(spec.dag_hash(), mirrors_to_check=mirrors_to_check) def find_by_hash(self, find_hash, mirrors_to_check=None): @@ -253,6 +265,9 @@ def find_by_hash(self, find_hash, mirrors_to_check=None): mirrors_to_check: Optional mapping containing mirrors to check. If None, just assumes all configured mirrors. """ + if find_hash not in self._mirrors_for_spec: + # Not found in the cached index, pull the latest from the server. + self.update(with_cooldown=True) if find_hash not in self._mirrors_for_spec: return None results = self._mirrors_for_spec[find_hash] @@ -283,7 +298,7 @@ def update_spec(self, spec, found_list): "spec": new_entry["spec"], } - def update(self): + def update(self, with_cooldown=False): """Make sure local cache of buildcache index files is up to date. If the same mirrors are configured as the last time this was called and none of the remote buildcache indices have changed, calling this @@ -325,24 +340,41 @@ def update(self): fetch_errors = [] all_methods_failed = True + ttl = spack.config.get("config:binary_index_ttl", 600) + now = time.time() for cached_mirror_url in self._local_index_cache: cache_entry = self._local_index_cache[cached_mirror_url] cached_index_hash = cache_entry["index_hash"] cached_index_path = cache_entry["index_path"] if cached_mirror_url in configured_mirror_urls: - # May need to fetch the index and update the local caches - try: - needs_regen = self._fetch_and_cache_index( - cached_mirror_url, expect_hash=cached_index_hash - ) - all_methods_failed = False - except FetchCacheError as fetch_error: - needs_regen = False - fetch_errors.extend(fetch_error.errors) - # The need to regenerate implies a need to clear as well. - spec_cache_clear_needed |= needs_regen - spec_cache_regenerate_needed |= needs_regen + # Only do a fetch if the last fetch was longer than TTL ago + if ( + with_cooldown + and ttl > 0 + and cached_mirror_url in self._last_fetch_times + and now - self._last_fetch_times[cached_mirror_url][0] < ttl + ): + # We're in the cooldown period, don't try to fetch again + # If the fetch succeeded last time, consider this update a success, otherwise + # re-report the error here + if self._last_fetch_times[cached_mirror_url][1]: + all_methods_failed = False + else: + # May need to fetch the index and update the local caches + try: + needs_regen = self._fetch_and_cache_index( + cached_mirror_url, expect_hash=cached_index_hash + ) + self._last_fetch_times[cached_mirror_url] = (now, True) + all_methods_failed = False + except FetchCacheError as fetch_error: + needs_regen = False + fetch_errors.extend(fetch_error.errors) + self._last_fetch_times[cached_mirror_url] = (now, False) + # The need to regenerate implies a need to clear as well. + spec_cache_clear_needed |= needs_regen + spec_cache_regenerate_needed |= needs_regen else: # No longer have this mirror, cached index should be removed items_to_remove.append( @@ -351,6 +383,8 @@ def update(self): "cache_key": os.path.join(self._index_cache_root, cached_index_path), } ) + if cached_mirror_url in self._last_fetch_times: + del self._last_fetch_times[cached_mirror_url] spec_cache_clear_needed = True spec_cache_regenerate_needed = True @@ -369,10 +403,12 @@ def update(self): # Need to fetch the index and update the local caches try: needs_regen = self._fetch_and_cache_index(mirror_url) + self._last_fetch_times[mirror_url] = (now, True) all_methods_failed = False except FetchCacheError as fetch_error: fetch_errors.extend(fetch_error.errors) needs_regen = False + self._last_fetch_times[mirror_url] = (now, False) # Generally speaking, a new mirror wouldn't imply the need to # clear the spec cache, so leave it as is. if needs_regen: @@ -619,6 +655,57 @@ def read_buildinfo_file(prefix): return buildinfo +class BuildManifestVisitor(BaseDirectoryVisitor): + """Visitor that collects a list of files and symlinks + that can be checked for need of relocation. It knows how + to dedupe hardlinks and deal with symlinks to files and + directories.""" + + def __init__(self): + # Save unique identifiers of files to avoid + # relocating hardlink files for each path. + self.visited = set() + + # Lists of files we will check + self.files = [] + self.symlinks = [] + + def seen_before(self, root, rel_path): + stat_result = os.lstat(os.path.join(root, rel_path)) + identifier = (stat_result.st_dev, stat_result.st_ino) + if identifier in self.visited: + return True + else: + self.visited.add(identifier) + return False + + def visit_file(self, root, rel_path, depth): + if self.seen_before(root, rel_path): + return + self.files.append(rel_path) + + def visit_symlinked_file(self, root, rel_path, depth): + # Note: symlinks *can* be hardlinked, but it is unclear if + # symlinks can be relinked in-place (preserving inode). + # Therefore, we do *not* de-dupe hardlinked symlinks. + self.symlinks.append(rel_path) + + def before_visit_dir(self, root, rel_path, depth): + return os.path.basename(rel_path) not in (".spack", "man") + + def before_visit_symlinked_dir(self, root, rel_path, depth): + # Treat symlinked directories simply as symlinks. + self.visit_symlinked_file(root, rel_path, depth) + # Never recurse into symlinked directories. + return False + + +def file_matches(path, regex): + with open(path, "rb") as f: + contents = f.read() + return bool(regex.search(contents)) + + def get_buildfile_manifest(spec): """ Return a data structure with information about a build, including @@ -634,57 +721,61 @@ def get_buildfile_manifest(spec): "link_to_relocate": [], "other": [], "binary_to_relocate_fullpath": [], + "hardlinks_deduped": True, } - exclude_list = (".spack", "man") + # Guard against filesystem footguns of hardlinks and symlinks by using + # a visitor to retrieve a list of files and symlinks, so we don't have + # to worry about hardlinks of symlinked dirs and what not. + visitor = BuildManifestVisitor() + root = spec.prefix + visit_directory_tree(root, visitor) - # Do this at during tarball creation to save time when tarball unpacked. - # Used by make_package_relative to determine binaries to change. - for root, dirs, files in os.walk(spec.prefix, topdown=True): - dirs[:] = [d for d in dirs if d not in exclude_list] + # Collect a list of prefixes for this package and it's dependencies, Spack will + # look for them to decide if text file needs to be relocated or not + prefixes = [d.prefix for d in spec.traverse(root=True, deptype="all") if not d.external] + prefixes.append(spack.hooks.sbang.sbang_install_path()) + prefixes.append(str(spack.store.layout.root)) - # Directories may need to be relocated too. - for directory in dirs: - dir_path_name = os.path.join(root, directory) - rel_path_name = os.path.relpath(dir_path_name, spec.prefix) - if os.path.islink(dir_path_name): - link = os.readlink(dir_path_name) - if os.path.isabs(link) and link.startswith(spack.store.layout.root): - data["link_to_relocate"].append(rel_path_name) + # Create a giant regex that matches all prefixes + regex = utf8_paths_to_single_binary_regex(prefixes) - for filename in files: - path_name = os.path.join(root, filename) - m_type, m_subtype = fsys.mime_type(path_name) - rel_path_name = os.path.relpath(path_name, spec.prefix) - added = False + # Symlinks. - if os.path.islink(path_name): - link = os.readlink(path_name) - if os.path.isabs(link): - # Relocate absolute links into the spack tree - if link.startswith(spack.store.layout.root): - data["link_to_relocate"].append(rel_path_name) - added = True + # Obvious bugs: + # 1. relative links are not relocated. + # 2. paths are used as strings. + for rel_path in visitor.symlinks: + abs_path = os.path.join(root, rel_path) + link = os.readlink(abs_path) + if os.path.isabs(link) and link.startswith(spack.store.layout.root): + data["link_to_relocate"].append(rel_path) - if relocate.needs_binary_relocation(m_type, m_subtype): - if ( - ( - m_subtype in ("x-executable", "x-sharedlib", "x-pie-executable") - and sys.platform != "darwin" - ) - or (m_subtype in ("x-mach-binary") and sys.platform == "darwin") - or (not filename.endswith(".o")) - ): - data["binary_to_relocate"].append(rel_path_name) - data["binary_to_relocate_fullpath"].append(path_name) - added = True + # Non-symlinks. + for rel_path in visitor.files: + abs_path = os.path.join(root, rel_path) + m_type, m_subtype = fsys.mime_type(abs_path) - if relocate.needs_text_relocation(m_type, m_subtype): - data["text_to_relocate"].append(rel_path_name) - added = True + if relocate.needs_binary_relocation(m_type, m_subtype): + # Why is this branch not part of needs_binary_relocation? :( + if ( + ( + m_subtype in ("x-executable", "x-sharedlib", "x-pie-executable") + and sys.platform != "darwin" + ) + or (m_subtype in ("x-mach-binary") and sys.platform == "darwin") + or (not rel_path.endswith(".o")) + ): + data["binary_to_relocate"].append(rel_path) + data["binary_to_relocate_fullpath"].append(abs_path) + continue + + elif relocate.needs_text_relocation(m_type, m_subtype) and file_matches(abs_path, regex): + data["text_to_relocate"].append(rel_path) + continue + + data["other"].append(abs_path) - if not added: - data["other"].append(path_name) return data @@ -698,7 +789,7 @@ def write_buildinfo_file(spec, workdir, rel=False): prefix_to_hash = dict() prefix_to_hash[str(spec.package.prefix)] = spec.dag_hash() deps = spack.build_environment.get_rpath_deps(spec.package) - for d in deps: + for d in deps + spec.dependencies(deptype="run"): prefix_to_hash[str(d.prefix)] = d.dag_hash() # Create buildinfo data and write it to disk @@ -711,6 +802,7 @@ def write_buildinfo_file(spec, workdir, rel=False): buildinfo["relocate_textfiles"] = manifest["text_to_relocate"] buildinfo["relocate_binaries"] = manifest["binary_to_relocate"] buildinfo["relocate_links"] = manifest["link_to_relocate"] + buildinfo["hardlinks_deduped"] = manifest["hardlinks_deduped"] buildinfo["prefix_to_hash"] = prefix_to_hash filename = buildinfo_file_name(workdir) with open(filename, "w") as outfile: @@ -795,37 +887,52 @@ def sign_specfile(key, force, specfile_path): spack.util.gpg.sign(key, specfile_path, signed_specfile_path, clearsign=True) -def _fetch_spec_from_mirror(spec_url): - s = None - tty.debug("fetching {0}".format(spec_url)) - _, _, spec_file = web_util.read_from_url(spec_url) - spec_file_contents = codecs.getreader("utf-8")(spec_file).read() - # Need full spec.json name or this gets confused with index.json. - if spec_url.endswith(".json.sig"): - specfile_json = Spec.extract_json_from_clearsig(spec_file_contents) - s = Spec.from_dict(specfile_json) - elif spec_url.endswith(".json"): - s = Spec.from_json(spec_file_contents) - elif spec_url.endswith(".yaml"): - s = Spec.from_yaml(spec_file_contents) - return s +def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_dir, concurrency): + """Read all the specs listed in the provided list, using thread given thread parallelism, + generate the index, and push it to the mirror. + Args: + file_list (list(str)): List of urls or file paths pointing at spec files to read + read_method: A function taking a single argument, either a url or a file path, + and which reads the spec file at that location, and returns the spec. + cache_prefix (str): prefix of the build cache on s3 where index should be pushed. + db: A spack database used for adding specs and then writing the index. + temp_dir (str): Location to write index.json and hash for pushing + concurrency (int): Number of parallel processes to use when fetching -def _read_specs_and_push_index(file_list, cache_prefix, db, db_root_dir): - for file_path in file_list: - try: - s = _fetch_spec_from_mirror(url_util.join(cache_prefix, file_path)) - except (URLError, web_util.SpackWebError) as url_err: - tty.error("Error reading specfile: {0}".format(file_path)) - tty.error(url_err) + Return: + None + """ - if s: - db.add(s, None) - db.mark(s, "in_buildcache", True) + def _fetch_spec_from_mirror(spec_url): + spec_file_contents = read_method(spec_url) + + if spec_file_contents: + # Need full spec.json name or this gets confused with index.json. + if spec_url.endswith(".json.sig"): + specfile_json = Spec.extract_json_from_clearsig(spec_file_contents) + return Spec.from_dict(specfile_json) + if spec_url.endswith(".json"): + return Spec.from_json(spec_file_contents) + if spec_url.endswith(".yaml"): + return Spec.from_yaml(spec_file_contents) + + tp = multiprocessing.pool.ThreadPool(processes=concurrency) + try: + fetched_specs = tp.map( + llnl.util.lang.star(_fetch_spec_from_mirror), [(f,) for f in file_list] + ) + finally: + tp.terminate() + tp.join() + + for fetched_spec in fetched_specs: + db.add(fetched_spec, None) + db.mark(fetched_spec, "in_buildcache", True) # Now generate the index, compute its hash, and push the two files to # the mirror. - index_json_path = os.path.join(db_root_dir, "index.json") + index_json_path = os.path.join(temp_dir, "index.json") with open(index_json_path, "w") as f: db._write_to_file(f) @@ -835,7 +942,7 @@ def _read_specs_and_push_index(file_list, cache_prefix, db, db_root_dir): index_hash = compute_hash(index_string) # Write the hash out to a local file - index_hash_path = os.path.join(db_root_dir, "index.json.hash") + index_hash_path = os.path.join(temp_dir, "index.json.hash") with open(index_hash_path, "w") as f: f.write(index_hash) @@ -856,33 +963,152 @@ def _read_specs_and_push_index(file_list, cache_prefix, db, db_root_dir): ) -def generate_package_index(cache_prefix): - """Create the build cache index page. +def _specs_from_cache_aws_cli(cache_prefix): + """Use aws cli to sync all the specs into a local temporary directory. - Creates (or replaces) the "index.json" page at the location given in - cache_prefix. This page contains a link for each binary package (.yaml or - .json) under cache_prefix. + Args: + cache_prefix (str): prefix of the build cache on s3 + + Return: + List of the local file paths and a function that can read each one from the file system. """ + read_fn = None + file_list = None + aws = which("aws") + + def file_read_method(file_path): + with open(file_path) as fd: + return fd.read() + + tmpspecsdir = tempfile.mkdtemp() + sync_command_args = [ + "s3", + "sync", + "--exclude", + "*", + "--include", + "*.spec.json.sig", + "--include", + "*.spec.json", + "--include", + "*.spec.yaml", + cache_prefix, + tmpspecsdir, + ] + try: - file_list = ( - entry + tty.debug( + "Using aws s3 sync to download specs from {0} to {1}".format(cache_prefix, tmpspecsdir) + ) + aws(*sync_command_args, output=os.devnull, error=os.devnull) + file_list = fsys.find(tmpspecsdir, ["*.spec.json.sig", "*.spec.json", "*.spec.yaml"]) + read_fn = file_read_method + except Exception: + tty.warn("Failed to use aws s3 sync to retrieve specs, falling back to parallel fetch") + shutil.rmtree(tmpspecsdir) + + return file_list, read_fn + + +def _specs_from_cache_fallback(cache_prefix): + """Use spack.util.web module to get a list of all the specs at the remote url. + + Args: + cache_prefix (str): Base url of mirror (location of spec files) + + Return: + The list of complete spec file urls and a function that can read each one from its + remote location (also using the spack.util.web module). + """ + read_fn = None + file_list = None + + def url_read_method(url): + contents = None + try: + _, _, spec_file = web_util.read_from_url(url) + contents = codecs.getreader("utf-8")(spec_file).read() + except (URLError, web_util.SpackWebError) as url_err: + tty.error("Error reading specfile: {0}".format(url)) + tty.error(url_err) + return contents + + try: + file_list = [ + url_util.join(cache_prefix, entry) for entry in web_util.list_url(cache_prefix) if entry.endswith(".yaml") or entry.endswith("spec.json") or entry.endswith("spec.json.sig") - ) + ] + read_fn = url_read_method except KeyError as inst: msg = "No packages at {0}: {1}".format(cache_prefix, inst) tty.warn(msg) - return except Exception as err: # If we got some kind of S3 (access denied or other connection # error), the first non boto-specific class in the exception # hierarchy is Exception. Just print a warning and return msg = "Encountered problem listing packages at {0}: {1}".format(cache_prefix, err) tty.warn(msg) + + return file_list, read_fn + + +def _spec_files_from_cache(cache_prefix): + """Get a list of all the spec files in the mirror and a function to + read them. + + Args: + cache_prefix (str): Base url of mirror (location of spec files) + + Return: + A tuple where the first item is a list of absolute file paths or + urls pointing to the specs that should be read from the mirror, + and the second item is a function taking a url or file path and + returning the spec read from that location. + """ + callbacks = [] + if cache_prefix.startswith("s3"): + callbacks.append(_specs_from_cache_aws_cli) + + callbacks.append(_specs_from_cache_fallback) + + for specs_from_cache_fn in callbacks: + file_list, read_fn = specs_from_cache_fn(cache_prefix) + if file_list: + return file_list, read_fn + + raise ListMirrorSpecsError("Failed to get list of specs from {0}".format(cache_prefix)) + + +def generate_package_index(cache_prefix, concurrency=32): + """Create or replace the build cache index on the given mirror. The + buildcache index contains an entry for each binary package under the + cache_prefix. + + Args: + cache_prefix(str): Base url of binary mirror. + concurrency: (int): The desired threading concurrency to use when + fetching the spec files from the mirror. + + Return: + None + """ + try: + file_list, read_fn = _spec_files_from_cache(cache_prefix) + except ListMirrorSpecsError as err: + tty.error("Unabled to generate package index, {0}".format(err)) return + if any(x.endswith(".yaml") for x in file_list): + msg = ( + "The mirror in '{}' contains specs in the deprecated YAML format.\n\n\tSupport for " + "this format will be removed in v0.20, please regenerate the build cache with a " + "recent Spack\n" + ).format(cache_prefix) + warnings.warn(msg) + tty.debug("Retrieving spec descriptor files from {0} to build index".format(cache_prefix)) tmpdir = tempfile.mkdtemp() @@ -895,7 +1121,7 @@ def generate_package_index(cache_prefix): ) try: - _read_specs_and_push_index(file_list, cache_prefix, db, db_root_dir) + _read_specs_and_push_index(file_list, read_fn, cache_prefix, db, db_root_dir, concurrency) except Exception as err: msg = "Encountered problem pushing package index to {0}: {1}".format(cache_prefix, err) tty.warn(msg) @@ -1071,7 +1297,11 @@ def _build_tarball( tty.die(e) # create gzip compressed tarball of the install prefix - with closing(tarfile.open(tarfile_path, "w:gz")) as tar: + # On AMD Ryzen 3700X and an SSD disk, we have the following on compression speed: + # compresslevel=6 gzip default: llvm takes 4mins, roughly 2.1GB + # compresslevel=9 python default: llvm takes 12mins, roughly 2.1GB + # So we follow gzip. + with closing(tarfile.open(tarfile_path, "w:gz", compresslevel=6)) as tar: tar.add(name="%s" % workdir, arcname="%s" % os.path.basename(spec.prefix)) # remove copy of install directory shutil.rmtree(workdir) @@ -1346,6 +1576,13 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None): # the remaining mirrors, looking for one we can use. tarball_stage = try_fetch(spackfile_url) if tarball_stage: + if ext == "yaml": + msg = ( + "Reading {} from mirror.\n\n\tThe YAML format for buildcaches is " + "deprecated and will be removed in v0.20\n" + ).format(spackfile_url) + warnings.warn(msg) + return { "tarball_stage": tarball_stage, "specfile_stage": local_specfile_stage, @@ -1418,6 +1655,38 @@ def check_package_relocatable(workdir, spec, allow_root): relocate.raise_if_not_relocatable(cur_path_names, allow_root) +def dedupe_hardlinks_if_necessary(root, buildinfo): + """Updates a buildinfo dict for old archives that did + not dedupe hardlinks. De-duping hardlinks is necessary + when relocating files in parallel and in-place. This + means we must preserve inodes when relocating.""" + + # New archives don't need this. + if buildinfo.get("hardlinks_deduped", False): + return + + # Clearly we can assume that an inode is either in the + # textfile or binary group, but let's just stick to + # a single set of visited nodes. + visited = set() + + # Note: we do *not* dedupe hardlinked symlinks, since + # it seems difficult or even impossible to relink + # symlinks while preserving inode. + for key in ("relocate_textfiles", "relocate_binaries"): + if key not in buildinfo: + continue + new_list = [] + for rel_path in buildinfo[key]: + stat_result = os.lstat(os.path.join(root, rel_path)) + identifier = (stat_result.st_dev, stat_result.st_ino) + if identifier in visited: + continue + visited.add(identifier) + new_list.append(rel_path) + buildinfo[key] = new_list + + def relocate_package(spec, allow_root): """ Relocate the given package @@ -1451,7 +1720,7 @@ def relocate_package(spec, allow_root): hash_to_prefix = dict() hash_to_prefix[spec.format("{hash}")] = str(spec.package.prefix) new_deps = spack.build_environment.get_rpath_deps(spec.package) - for d in new_deps: + for d in new_deps + spec.dependencies(deptype="run"): hash_to_prefix[d.format("{hash}")] = str(d.prefix) # Spurious replacements (e.g. sbang) will cause issues with binaries # For example, the new sbang can be longer than the old one. @@ -1463,13 +1732,19 @@ def relocate_package(spec, allow_root): install_path = spack.hooks.sbang.sbang_install_path() prefix_to_prefix_text[old_sbang_install_path] = install_path + # First match specific prefix paths. Possibly the *local* install prefix + # of some dependency is in an upstream, so we cannot assume the original + # spack store root can be mapped uniformly to the new spack store root. + for orig_prefix, hash in prefix_to_hash.items(): + prefix_to_prefix_text[orig_prefix] = hash_to_prefix.get(hash, None) + prefix_to_prefix_bin[orig_prefix] = hash_to_prefix.get(hash, None) + + # Only then add the generic fallback of install prefix -> install prefix. prefix_to_prefix_text[old_prefix] = new_prefix prefix_to_prefix_bin[old_prefix] = new_prefix prefix_to_prefix_text[old_layout_root] = new_layout_root prefix_to_prefix_bin[old_layout_root] = new_layout_root - for orig_prefix, hash in prefix_to_hash.items(): - prefix_to_prefix_text[orig_prefix] = hash_to_prefix.get(hash, None) - prefix_to_prefix_bin[orig_prefix] = hash_to_prefix.get(hash, None) + # This is vestigial code for the *old* location of sbang. Previously, # sbang was a bash script, and it lived in the spack prefix. It is # now a POSIX script that lives in the install prefix. Old packages @@ -1480,6 +1755,9 @@ def relocate_package(spec, allow_root): tty.debug("Relocating package from", "%s to %s." % (old_layout_root, new_layout_root)) + # Old archives maybe have hardlinks repeated. + dedupe_hardlinks_if_necessary(workdir, buildinfo) + def is_backup_file(file): return file.endswith("~") @@ -1509,7 +1787,11 @@ def is_backup_file(file): old_prefix, new_prefix, ) - if "elf" in platform.binary_formats: + elif "elf" in platform.binary_formats and not rel: + # The new ELF dynamic section relocation logic only handles absolute to + # absolute relocation. + relocate.new_relocate_elf_binaries(files_to_relocate, prefix_to_prefix_bin) + elif "elf" in platform.binary_formats and rel: relocate.relocate_elf_binaries( files_to_relocate, old_layout_root, @@ -1519,35 +1801,23 @@ def is_backup_file(file): old_prefix, new_prefix, ) - # Relocate links to the new install prefix - links = [link for link in buildinfo.get("relocate_links", [])] - relocate.relocate_links(links, old_layout_root, old_prefix, new_prefix) + + # Relocate links to the new install prefix + links = [os.path.join(workdir, f) for f in buildinfo.get("relocate_links", [])] + relocate.relocate_links(links, prefix_to_prefix_bin) # For all buildcaches # relocate the install prefixes in text files including dependencies - relocate.relocate_text(text_names, prefix_to_prefix_text) + relocate.unsafe_relocate_text(text_names, prefix_to_prefix_text) - paths_to_relocate = [old_prefix, old_layout_root] - paths_to_relocate.extend(prefix_to_hash.keys()) - files_to_relocate = list( - filter( - lambda pathname: not relocate.file_is_relocatable( - pathname, paths_to_relocate=paths_to_relocate - ), - map( - lambda filename: os.path.join(workdir, filename), - buildinfo["relocate_binaries"], - ), - ) - ) # relocate the install prefixes in binary files including dependencies - relocate.relocate_text_bin(files_to_relocate, prefix_to_prefix_bin) + relocate.unsafe_relocate_text_bin(files_to_relocate, prefix_to_prefix_bin) # If we are installing back to the same location # relocate the sbang location if the spack directory changed else: if old_spack_prefix != new_spack_prefix: - relocate.relocate_text(text_names, prefix_to_prefix_text) + relocate.unsafe_relocate_text(text_names, prefix_to_prefix_text) def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum): @@ -1878,8 +2148,8 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False): results = binary_index.find_built_spec(spec, mirrors_to_check=mirrors_to_check) - # Maybe we just didn't have the latest information from the mirror, so - # try to fetch directly, unless we are only considering the indices. + # The index may be out-of-date. If we aren't only considering indices, try + # to fetch directly since we know where the file should be. if not results and not index_only: results = try_direct_fetch(spec, mirrors=mirrors_to_check) # We found a spec by the direct fetch approach, we might as well diff --git a/lib/spack/spack/bootstrap.py b/lib/spack/spack/bootstrap.py index 2e0f2614e77..60f8153ae27 100644 --- a/lib/spack/spack/bootstrap.py +++ b/lib/spack/spack/bootstrap.py @@ -91,6 +91,14 @@ def _try_import_from_store(module, query_spec, query_info=None): os.path.join(candidate_spec.prefix, pkg.platlib), ] # type: list[str] path_before = list(sys.path) + + # Python 3.8+ on Windows does not search dependent DLLs in PATH, + # so we need to manually add it using os.add_dll_directory + # https://docs.python.org/3/whatsnew/3.8.html#bpo-36085-whatsnew + if sys.version_info[:2] >= (3, 8) and sys.platform == "win32": + if os.path.isdir(candidate_spec.prefix.bin): + os.add_dll_directory(candidate_spec.prefix.bin) # novermin + # NOTE: try module_paths first and last, last allows an existing version in path # to be picked up and used, possibly depending on something in the store, first # allows the bootstrap version to work when an incompatible version is in @@ -667,6 +675,11 @@ def _add_externals_if_missing(): _REF_COUNT = 0 +def is_bootstrapping(): + global _REF_COUNT + return _REF_COUNT > 0 + + @contextlib.contextmanager def ensure_bootstrap_configuration(): # The context manager is reference counted to ensure we don't swap multiple diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 63b1309a221..9247d9f1506 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -52,6 +52,7 @@ import spack.build_systems.cmake import spack.build_systems.meson +import spack.builder import spack.config import spack.install_test import spack.main @@ -120,18 +121,18 @@ stat_suffix = "lib" if sys.platform == "win32" else "a" -def should_set_parallel_jobs(jobserver_support=False): - """Returns true in general, except when: - - The env variable SPACK_NO_PARALLEL_MAKE=1 is set - - jobserver_support is enabled, and a jobserver was found. - """ - if ( - jobserver_support - and "MAKEFLAGS" in os.environ - and "--jobserver" in os.environ["MAKEFLAGS"] - ): - return False - return not env_flag(SPACK_NO_PARALLEL_MAKE) +def jobserver_enabled(): + """Returns true if a posix jobserver (make) is detected.""" + return "MAKEFLAGS" in os.environ and "--jobserver" in os.environ["MAKEFLAGS"] + + +def get_effective_jobs(jobs, parallel=True, supports_jobserver=False): + """Return the number of jobs, or None if supports_jobserver and a jobserver is detected.""" + if not parallel or jobs <= 1 or env_flag(SPACK_NO_PARALLEL_MAKE): + return 1 + if supports_jobserver and jobserver_enabled(): + return None + return jobs class MakeExecutable(Executable): @@ -146,26 +147,33 @@ class MakeExecutable(Executable): """ def __init__(self, name, jobs, **kwargs): + supports_jobserver = kwargs.pop("supports_jobserver", True) super(MakeExecutable, self).__init__(name, **kwargs) + self.supports_jobserver = supports_jobserver self.jobs = jobs def __call__(self, *args, **kwargs): """parallel, and jobs_env from kwargs are swallowed and used here; remaining arguments are passed through to the superclass. """ - # TODO: figure out how to check if we are using a jobserver-supporting ninja, - # the two split ninja packages make this very difficult right now - parallel = should_set_parallel_jobs(jobserver_support=True) and kwargs.pop( - "parallel", self.jobs > 1 - ) + parallel = kwargs.pop("parallel", True) + jobs_env = kwargs.pop("jobs_env", None) + jobs_env_supports_jobserver = kwargs.pop("jobs_env_supports_jobserver", False) - if parallel: - args = ("-j{0}".format(self.jobs),) + args - jobs_env = kwargs.pop("jobs_env", None) - if jobs_env: - # Caller wants us to set an environment variable to - # control the parallelism. - kwargs["extra_env"] = {jobs_env: str(self.jobs)} + jobs = get_effective_jobs( + self.jobs, parallel=parallel, supports_jobserver=self.supports_jobserver + ) + if jobs is not None: + args = ("-j{0}".format(jobs),) + args + + if jobs_env: + # Caller wants us to set an environment variable to + # control the parallelism. + jobs_env_jobs = get_effective_jobs( + self.jobs, parallel=parallel, supports_jobserver=jobs_env_supports_jobserver + ) + if jobs_env_jobs is not None: + kwargs["extra_env"] = {jobs_env: str(jobs_env_jobs)} return super(MakeExecutable, self).__call__(*args, **kwargs) @@ -201,6 +209,8 @@ def clean_environment(): env.unset("CMAKE_PREFIX_PATH") env.unset("PYTHONPATH") + env.unset("R_HOME") + env.unset("R_ENVIRON") # Affects GNU make, can e.g. indirectly inhibit enabling parallel build # env.unset('MAKEFLAGS') @@ -314,7 +324,7 @@ def set_compiler_environment_variables(pkg, env): env.set("SPACK_LINKER_ARG", compiler.linker_arg) # Check whether we want to force RPATH or RUNPATH - if spack.config.get("config:shared_linking") == "rpath": + if spack.config.get("config:shared_linking:type") == "rpath": env.set("SPACK_DTAGS_TO_STRIP", compiler.enable_new_dtags) env.set("SPACK_DTAGS_TO_ADD", compiler.disable_new_dtags) else: @@ -322,7 +332,11 @@ def set_compiler_environment_variables(pkg, env): env.set("SPACK_DTAGS_TO_ADD", compiler.enable_new_dtags) # Set the target parameters that the compiler will add - isa_arg = spec.architecture.target.optimization_flags(compiler) + # Don't set on cray platform because the targeting module handles this + if spec.satisfies("platform=cray"): + isa_arg = "" + else: + isa_arg = spec.architecture.target.optimization_flags(compiler) env.set("SPACK_TARGET_ARGS", isa_arg) # Trap spack-tracked compiler flags as appropriate. @@ -343,7 +357,7 @@ def set_compiler_environment_variables(pkg, env): handler = pkg.flag_handler.__func__ else: handler = pkg.flag_handler.im_func - injf, envf, bsf = handler(pkg, flag, spec.compiler_flags[flag]) + injf, envf, bsf = handler(pkg, flag, spec.compiler_flags[flag][:]) inject_flags[flag] = injf or [] env_flags[flag] = envf or [] build_system_flags[flag] = bsf or [] @@ -544,7 +558,7 @@ def _set_variables_for_single_module(pkg, module): # TODO: make these build deps that can be installed if not found. m.make = MakeExecutable("make", jobs) m.gmake = MakeExecutable("gmake", jobs) - m.ninja = MakeExecutable("ninja", jobs) + m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False) # easy shortcut to os.environ m.env = os.environ @@ -556,9 +570,9 @@ def _set_variables_for_single_module(pkg, module): if sys.platform == "win32": m.nmake = Executable("nmake") # Standard CMake arguments - m.std_cmake_args = spack.build_systems.cmake.CMakePackage._std_args(pkg) - m.std_meson_args = spack.build_systems.meson.MesonPackage._std_args(pkg) - m.std_pip_args = spack.build_systems.python.PythonPackage._std_args(pkg) + m.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg) + m.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg) + m.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg) # Put spack compiler paths in module scope. link_dir = spack.paths.build_env_path @@ -725,38 +739,6 @@ def get_rpaths(pkg): return list(dedupe(filter_system_paths(rpaths))) -def get_std_cmake_args(pkg): - """List of standard arguments used if a package is a CMakePackage. - - Returns: - list: standard arguments that would be used if this - package were a CMakePackage instance. - - Args: - pkg (spack.package_base.PackageBase): package under consideration - - Returns: - list: arguments for cmake - """ - return spack.build_systems.cmake.CMakePackage._std_args(pkg) - - -def get_std_meson_args(pkg): - """List of standard arguments used if a package is a MesonPackage. - - Returns: - list: standard arguments that would be used if this - package were a MesonPackage instance. - - Args: - pkg (spack.package_base.PackageBase): package under consideration - - Returns: - list: arguments for meson - """ - return spack.build_systems.meson.MesonPackage._std_args(pkg) - - def parent_class_modules(cls): """ Get list of superclass modules that descend from spack.package_base.PackageBase @@ -817,7 +799,8 @@ def setup_package(pkg, dirty, context="build"): platform.setup_platform_environment(pkg, env_mods) if context == "build": - pkg.setup_build_environment(env_mods) + builder = spack.builder.create(pkg) + builder.setup_build_environment(env_mods) if (not dirty) and (not env_mods.is_unset("CPATH")): tty.debug( @@ -1013,7 +996,8 @@ def add_modifications_for_dep(dep): module.__dict__.update(changes.__dict__) if context == "build": - dpkg.setup_dependent_build_environment(env, spec) + builder = spack.builder.create(dpkg) + builder.setup_dependent_build_environment(env, spec) else: dpkg.setup_dependent_run_environment(env, spec) @@ -1115,8 +1099,20 @@ def _setup_pkg_and_run( pkg.test_suite.stage, spack.install_test.TestSuite.test_log_name(pkg.spec) ) + error_msg = str(exc) + if isinstance(exc, (spack.multimethod.NoSuchMethodError, AttributeError)): + error_msg = ( + "The '{}' package cannot find an attribute while trying to build " + "from sources. This might be due to a change in Spack's package format " + "to support multiple build-systems for a single package. You can fix this " + "by updating the build recipe, and you can also report the issue as a bug. " + "More information at https://spack.readthedocs.io/en/latest/packaging_guide.html#installation-procedure" + ).format(pkg.name) + error_msg = colorize("@*R{{{}}}".format(error_msg)) + error_msg = "{}\n\n{}".format(str(exc), error_msg) + # make a pickleable exception to send to parent. - msg = "%s: %s" % (exc_type.__name__, str(exc)) + msg = "%s: %s" % (exc_type.__name__, error_msg) ce = ChildError( msg, diff --git a/lib/spack/spack/build_systems/_checks.py b/lib/spack/spack/build_systems/_checks.py new file mode 100644 index 00000000000..73d5bbdb93e --- /dev/null +++ b/lib/spack/spack/build_systems/_checks.py @@ -0,0 +1,124 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os + +import six + +import llnl.util.lang + +import spack.builder +import spack.installer +import spack.relocate +import spack.store + + +def sanity_check_prefix(builder): + """Check that specific directories and files are created after installation. + + The files to be checked are in the ``sanity_check_is_file`` attribute of the + package object, while the directories are in the ``sanity_check_is_dir``. + + Args: + builder (spack.builder.Builder): builder that installed the package + """ + pkg = builder.pkg + + def check_paths(path_list, filetype, predicate): + if isinstance(path_list, six.string_types): + path_list = [path_list] + + for path in path_list: + abs_path = os.path.join(pkg.prefix, path) + if not predicate(abs_path): + msg = "Install failed for {0}. No such {1} in prefix: {2}" + msg = msg.format(pkg.name, filetype, path) + raise spack.installer.InstallError(msg) + + check_paths(pkg.sanity_check_is_file, "file", os.path.isfile) + check_paths(pkg.sanity_check_is_dir, "directory", os.path.isdir) + + ignore_file = llnl.util.lang.match_predicate(spack.store.layout.hidden_file_regexes) + if all(map(ignore_file, os.listdir(pkg.prefix))): + msg = "Install failed for {0}. Nothing was installed!" + raise spack.installer.InstallError(msg.format(pkg.name)) + + +def apply_macos_rpath_fixups(builder): + """On Darwin, make installed libraries more easily relocatable. + + Some build systems (handrolled, autotools, makefiles) can set their own + rpaths that are duplicated by spack's compiler wrapper. This fixup + interrogates, and postprocesses if necessary, all libraries installed + by the code. + + It should be added as a @run_after to packaging systems (or individual + packages) that do not install relocatable libraries by default. + + Args: + builder (spack.builder.Builder): builder that installed the package + """ + spack.relocate.fixup_macos_rpaths(builder.spec) + + +def ensure_build_dependencies_or_raise(spec, dependencies, error_msg): + """Ensure that some build dependencies are present in the concrete spec. + + If not, raise a RuntimeError with a helpful error message. + + Args: + spec (spack.spec.Spec): concrete spec to be checked. + dependencies (list of spack.spec.Spec): list of abstract specs to be satisfied + error_msg (str): brief error message to be prepended to a longer description + + Raises: + RuntimeError: when the required build dependencies are not found + """ + assert spec.concrete, "Can ensure build dependencies only on concrete specs" + build_deps = [d.name for d in spec.dependencies(deptype="build")] + missing_deps = [x for x in dependencies if x not in build_deps] + + if not missing_deps: + return + + # Raise an exception on missing deps. + msg = ( + "{0}: missing dependencies: {1}.\n\nPlease add " + "the following lines to the package:\n\n".format(error_msg, ", ".join(missing_deps)) + ) + + for dep in missing_deps: + msg += " depends_on('{0}', type='build', when='@{1} {2}')\n".format( + dep, spec.version, "build_system=autotools" + ) + + msg += "\nUpdate the version (when='@{0}') as needed.".format(spec.version) + raise RuntimeError(msg) + + +def execute_build_time_tests(builder): + """Execute the build-time tests prescribed by builder. + + Args: + builder (Builder): builder prescribing the test callbacks. The name of the callbacks is + stored as a list of strings in the ``build_time_test_callbacks`` attribute. + """ + builder.pkg.run_test_callbacks(builder, builder.build_time_test_callbacks, "build") + + +def execute_install_time_tests(builder): + """Execute the install-time tests prescribed by builder. + + Args: + builder (Builder): builder prescribing the test callbacks. The name of the callbacks is + stored as a list of strings in the ``install_time_test_callbacks`` attribute. + """ + builder.pkg.run_test_callbacks(builder, builder.install_time_test_callbacks, "install") + + +class BaseBuilder(spack.builder.Builder): + """Base class for builders to register common checks""" + + # Check that self.prefix is there after installation + spack.builder.run_after("install")(sanity_check_prefix) diff --git a/lib/spack/spack/build_systems/aspell_dict.py b/lib/spack/spack/build_systems/aspell_dict.py index f1e41cc3dfa..9de8255e68b 100644 --- a/lib/spack/spack/build_systems/aspell_dict.py +++ b/lib/spack/spack/build_systems/aspell_dict.py @@ -2,18 +2,36 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import llnl.util.filesystem as fs -# Why doesn't this work for me? -# from spack import * -from llnl.util.filesystem import filter_file +import spack.directives +import spack.package_base +import spack.util.executable -from spack.build_systems.autotools import AutotoolsPackage -from spack.directives import extends -from spack.package_base import ExtensionError -from spack.util.executable import which +from .autotools import AutotoolsBuilder, AutotoolsPackage + + +class AspellBuilder(AutotoolsBuilder): + """The Aspell builder is close enough to an autotools builder to allow + specializing the builder class, so to use variables that are specific + to the Aspell extensions. + """ + + def configure(self, pkg, spec, prefix): + aspell = spec["aspell"].prefix.bin.aspell + prezip = spec["aspell"].prefix.bin.prezip + destdir = prefix + + sh = spack.util.executable.which("sh") + sh( + "./configure", + "--vars", + "ASPELL={0}".format(aspell), + "PREZIP={0}".format(prezip), + "DESTDIR={0}".format(destdir), + ) -# # Aspell dictionaries install their bits into their prefix.lib # and when activated they'll get symlinked into the appropriate aspell's # dict dir (see aspell's {de,}activate methods). @@ -23,12 +41,17 @@ class AspellDictPackage(AutotoolsPackage): """Specialized class for building aspell dictionairies.""" - extends("aspell") + spack.directives.extends("aspell", when="build_system=autotools") + + #: Override the default autotools builder + AutotoolsBuilder = AspellBuilder def view_destination(self, view): aspell_spec = self.spec["aspell"] if view.get_projection_for_spec(aspell_spec) != aspell_spec.prefix: - raise ExtensionError("aspell does not support non-global extensions") + raise spack.package_base.ExtensionError( + "aspell does not support non-global extensions" + ) aspell = aspell_spec.command return aspell("dump", "config", "dict-dir", output=str).strip() @@ -36,19 +59,5 @@ def view_source(self): return self.prefix.lib def patch(self): - filter_file(r"^dictdir=.*$", "dictdir=/lib", "configure") - filter_file(r"^datadir=.*$", "datadir=/lib", "configure") - - def configure(self, spec, prefix): - aspell = spec["aspell"].prefix.bin.aspell - prezip = spec["aspell"].prefix.bin.prezip - destdir = prefix - - sh = which("sh") - sh( - "./configure", - "--vars", - "ASPELL={0}".format(aspell), - "PREZIP={0}".format(prezip), - "DESTDIR={0}".format(destdir), - ) + fs.filter_file(r"^dictdir=.*$", "dictdir=/lib", "configure") + fs.filter_file(r"^datadir=.*$", "datadir=/lib", "configure") diff --git a/lib/spack/spack/build_systems/autotools.py b/lib/spack/spack/build_systems/autotools.py index 81a572de528..76e6bb27ce6 100644 --- a/lib/spack/spack/build_systems/autotools.py +++ b/lib/spack/spack/build_systems/autotools.py @@ -6,87 +6,140 @@ import os import os.path import stat -from subprocess import PIPE, check_call +import subprocess from typing import List # novm import llnl.util.filesystem as fs import llnl.util.tty as tty -from llnl.util.filesystem import force_remove, working_dir -from spack.build_environment import InstallError -from spack.directives import conflicts, depends_on +import spack.build_environment +import spack.builder +import spack.package_base +from spack.directives import build_system, conflicts, depends_on +from spack.multimethod import when from spack.operating_systems.mac_os import macos_version -from spack.package_base import PackageBase, run_after, run_before from spack.util.executable import Executable from spack.version import Version +from ._checks import ( + BaseBuilder, + apply_macos_rpath_fixups, + ensure_build_dependencies_or_raise, + execute_build_time_tests, + execute_install_time_tests, +) -class AutotoolsPackage(PackageBase): - """Specialized class for packages built using GNU Autotools. - This class provides four phases that can be overridden: +class AutotoolsPackage(spack.package_base.PackageBase): + """Specialized class for packages built using GNU Autotools.""" - 1. :py:meth:`~.AutotoolsPackage.autoreconf` - 2. :py:meth:`~.AutotoolsPackage.configure` - 3. :py:meth:`~.AutotoolsPackage.build` - 4. :py:meth:`~.AutotoolsPackage.install` + #: This attribute is used in UI queries that need to know the build + #: system base class + build_system_class = "AutotoolsPackage" + + #: Legacy buildsystem attribute used to deserialize and install old specs + legacy_buildsystem = "autotools" + + build_system("autotools") + + with when("build_system=autotools"): + depends_on("gnuconfig", type="build", when="target=ppc64le:") + depends_on("gnuconfig", type="build", when="target=aarch64:") + depends_on("gnuconfig", type="build", when="target=riscv64:") + conflicts("platform=windows") + + def flags_to_build_system_args(self, flags): + """Produces a list of all command line arguments to pass specified + compiler flags to configure.""" + # Has to be dynamic attribute due to caching. + setattr(self, "configure_flag_args", []) + for flag, values in flags.items(): + if values: + values_str = "{0}={1}".format(flag.upper(), " ".join(values)) + self.configure_flag_args.append(values_str) + # Spack's fflags are meant for both F77 and FC, therefore we + # additionaly set FCFLAGS if required. + values = flags.get("fflags", None) + if values: + values_str = "FCFLAGS={0}".format(" ".join(values)) + self.configure_flag_args.append(values_str) + + # Legacy methods (used by too many packages to change them, + # need to forward to the builder) + def enable_or_disable(self, *args, **kwargs): + return self.builder.enable_or_disable(*args, **kwargs) + + def with_or_without(self, *args, **kwargs): + return self.builder.with_or_without(*args, **kwargs) + + +@spack.builder.builder("autotools") +class AutotoolsBuilder(BaseBuilder): + """The autotools builder encodes the default way of installing software built + with autotools. It has four phases that can be overridden, if need be: + + 1. :py:meth:`~.AutotoolsBuilder.autoreconf` + 2. :py:meth:`~.AutotoolsBuilder.configure` + 3. :py:meth:`~.AutotoolsBuilder.build` + 4. :py:meth:`~.AutotoolsBuilder.install` + + They all have sensible defaults and for many packages the only thing necessary + is to override the helper method + :meth:`~spack.build_systems.autotools.AutotoolsBuilder.configure_args`. - They all have sensible defaults and for many packages the only thing - necessary will be to override the helper method - :meth:`~spack.build_systems.autotools.AutotoolsPackage.configure_args`. For a finer tuning you may also override: +-----------------------------------------------+--------------------+ | **Method** | **Purpose** | +===============================================+====================+ - | :py:attr:`~.AutotoolsPackage.build_targets` | Specify ``make`` | + | :py:attr:`~.AutotoolsBuilder.build_targets` | Specify ``make`` | | | targets for the | | | build phase | +-----------------------------------------------+--------------------+ - | :py:attr:`~.AutotoolsPackage.install_targets` | Specify ``make`` | + | :py:attr:`~.AutotoolsBuilder.install_targets` | Specify ``make`` | | | targets for the | | | install phase | +-----------------------------------------------+--------------------+ - | :py:meth:`~.AutotoolsPackage.check` | Run build time | + | :py:meth:`~.AutotoolsBuilder.check` | Run build time | | | tests if required | +-----------------------------------------------+--------------------+ """ #: Phases of a GNU Autotools package - phases = ["autoreconf", "configure", "build", "install"] - #: This attribute is used in UI queries that need to know the build - #: system base class - build_system_class = "AutotoolsPackage" + phases = ("autoreconf", "configure", "build", "install") - @property - def patch_config_files(self): - """ - Whether or not to update old ``config.guess`` and ``config.sub`` files - distributed with the tarball. This currently only applies to - ``ppc64le:``, ``aarch64:``, and ``riscv64`` target architectures. The - substitutes are taken from the ``gnuconfig`` package, which is - automatically added as a build dependency for these architectures. In - case system versions of these config files are required, the - ``gnuconfig`` package can be marked external with a prefix pointing to - the directory containing the system ``config.guess`` and ``config.sub`` - files. - """ - return ( - self.spec.satisfies("target=ppc64le:") - or self.spec.satisfies("target=aarch64:") - or self.spec.satisfies("target=riscv64:") - ) + #: Names associated with package methods in the old build-system format + legacy_methods = ( + "configure_args", + "check", + "installcheck", + ) - #: Whether or not to update ``libtool`` - #: (currently only for Arm/Clang/Fujitsu/NVHPC compilers) + #: Names associated with package attributes in the old build-system format + legacy_attributes = ( + "archive_files", + "patch_libtool", + "build_targets", + "install_targets", + "build_time_test_callbacks", + "install_time_test_callbacks", + "force_autoreconf", + "autoreconf_extra_args", + "install_libtool_archives", + "patch_config_files", + "configure_directory", + "configure_abs_path", + "build_directory", + "autoreconf_search_path_args", + ) + + #: Whether to update ``libtool`` (e.g. for Arm/Clang/Fujitsu/NVHPC compilers) patch_libtool = True - #: Targets for ``make`` during the :py:meth:`~.AutotoolsPackage.build` - #: phase + #: Targets for ``make`` during the :py:meth:`~.AutotoolsBuilder.build` phase build_targets = [] # type: List[str] - #: Targets for ``make`` during the :py:meth:`~.AutotoolsPackage.install` - #: phase + #: Targets for ``make`` during the :py:meth:`~.AutotoolsBuilder.install` phase install_targets = ["install"] #: Callback names for build-time test @@ -97,24 +150,40 @@ def patch_config_files(self): #: Set to true to force the autoreconf step even if configure is present force_autoreconf = False + #: Options to be passed to autoreconf when using the default implementation autoreconf_extra_args = [] # type: List[str] - #: If False deletes all the .la files in the prefix folder - #: after the installation. If True instead it installs them. + #: If False deletes all the .la files in the prefix folder after the installation. + #: If True instead it installs them. install_libtool_archives = False - depends_on("gnuconfig", type="build", when="target=ppc64le:") - depends_on("gnuconfig", type="build", when="target=aarch64:") - depends_on("gnuconfig", type="build", when="target=riscv64:") - conflicts("platform=windows") + @property + def patch_config_files(self): + """Whether to update old ``config.guess`` and ``config.sub`` files + distributed with the tarball. + + This currently only applies to ``ppc64le:``, ``aarch64:``, and + ``riscv64`` target architectures. + + The substitutes are taken from the ``gnuconfig`` package, which is + automatically added as a build dependency for these architectures. In case + system versions of these config files are required, the ``gnuconfig`` package + can be marked external, with a prefix pointing to the directory containing the + system ``config.guess`` and ``config.sub`` files. + """ + return ( + self.pkg.spec.satisfies("target=ppc64le:") + or self.pkg.spec.satisfies("target=aarch64:") + or self.pkg.spec.satisfies("target=riscv64:") + ) @property def _removed_la_files_log(self): - """File containing the list of remove libtool archives""" + """File containing the list of removed libtool archives""" build_dir = self.build_directory if not os.path.isabs(self.build_directory): - build_dir = os.path.join(self.stage.path, build_dir) + build_dir = os.path.join(self.pkg.stage.path, build_dir) return os.path.join(build_dir, "removed_la_files.txt") @property @@ -125,13 +194,13 @@ def archive_files(self): files.append(self._removed_la_files_log) return files - @run_after("autoreconf") + @spack.builder.run_after("autoreconf") def _do_patch_config_files(self): - """Some packages ship with older config.guess/config.sub files and - need to have these updated when installed on a newer architecture. - In particular, config.guess fails for PPC64LE for version prior - to a 2013-06-10 build date (automake 1.13.4) and for ARM (aarch64) and - RISC-V (riscv64). + """Some packages ship with older config.guess/config.sub files and need to + have these updated when installed on a newer architecture. + + In particular, config.guess fails for PPC64LE for version prior to a + 2013-06-10 build date (automake 1.13.4) and for AArch64 and RISC-V. """ if not self.patch_config_files: return @@ -139,11 +208,11 @@ def _do_patch_config_files(self): # TODO: Expand this to select the 'config.sub'-compatible architecture # for each platform (e.g. 'config.sub' doesn't accept 'power9le', but # does accept 'ppc64le'). - if self.spec.satisfies("target=ppc64le:"): + if self.pkg.spec.satisfies("target=ppc64le:"): config_arch = "ppc64le" - elif self.spec.satisfies("target=aarch64:"): + elif self.pkg.spec.satisfies("target=aarch64:"): config_arch = "aarch64" - elif self.spec.satisfies("target=riscv64:"): + elif self.pkg.spec.satisfies("target=riscv64:"): config_arch = "riscv64" else: config_arch = "local" @@ -155,7 +224,7 @@ def runs_ok(script_abs_path): args = [script_abs_path] + additional_args.get(script_name, []) try: - check_call(args, stdout=PIPE, stderr=PIPE) + subprocess.check_call(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) except Exception as e: tty.debug(e) return False @@ -163,7 +232,7 @@ def runs_ok(script_abs_path): return True # Get the list of files that needs to be patched - to_be_patched = fs.find(self.stage.path, files=["config.sub", "config.guess"]) + to_be_patched = fs.find(self.pkg.stage.path, files=["config.sub", "config.guess"]) to_be_patched = [f for f in to_be_patched if not runs_ok(f)] # If there are no files to be patched, return early @@ -171,22 +240,21 @@ def runs_ok(script_abs_path): return # Otherwise, require `gnuconfig` to be a build dependency - self._require_build_deps( - pkgs=["gnuconfig"], spec=self.spec, err="Cannot patch config files" + ensure_build_dependencies_or_raise( + spec=self.pkg.spec, dependencies=["gnuconfig"], error_msg="Cannot patch config files" ) # Get the config files we need to patch (config.sub / config.guess). to_be_found = list(set(os.path.basename(f) for f in to_be_patched)) - gnuconfig = self.spec["gnuconfig"] + gnuconfig = self.pkg.spec["gnuconfig"] gnuconfig_dir = gnuconfig.prefix # An external gnuconfig may not not have a prefix. if gnuconfig_dir is None: - raise InstallError( - "Spack could not find substitutes for GNU config " - "files because no prefix is available for the " - "`gnuconfig` package. Make sure you set a prefix " - "path instead of modules for external `gnuconfig`." + raise spack.build_environment.InstallError( + "Spack could not find substitutes for GNU config files because no " + "prefix is available for the `gnuconfig` package. Make sure you set a " + "prefix path instead of modules for external `gnuconfig`." ) candidates = fs.find(gnuconfig_dir, files=to_be_found, recursive=False) @@ -203,7 +271,7 @@ def runs_ok(script_abs_path): msg += ( " or the `gnuconfig` package prefix is misconfigured as" " an external package" ) - raise InstallError(msg) + raise spack.build_environment.InstallError(msg) # Filter working substitutes candidates = [f for f in candidates if runs_ok(f)] @@ -228,7 +296,9 @@ def runs_ok(script_abs_path): and set the prefix to the directory containing the `config.guess` and `config.sub` files. """ - raise InstallError(msg.format(", ".join(to_be_found), self.name)) + raise spack.build_environment.InstallError( + msg.format(", ".join(to_be_found), self.name) + ) # Copy the good files over the bad ones for abs_path in to_be_patched: @@ -238,7 +308,7 @@ def runs_ok(script_abs_path): fs.copy(substitutes[name], abs_path) os.chmod(abs_path, mode) - @run_before("configure") + @spack.builder.run_before("configure") def _patch_usr_bin_file(self): """On NixOS file is not available in /usr/bin/file. Patch configure scripts to use file from path.""" @@ -250,7 +320,7 @@ def _patch_usr_bin_file(self): with fs.keep_modification_time(*x.filenames): x.filter(regex="/usr/bin/file", repl="file", string=True) - @run_before("configure") + @spack.builder.run_before("configure") def _set_autotools_environment_variables(self): """Many autotools builds use a version of mknod.m4 that fails when running as root unless FORCE_UNSAFE_CONFIGURE is set to 1. @@ -261,11 +331,10 @@ def _set_autotools_environment_variables(self): Without it, configure just fails halfway through, but it can still run things *before* this check. Forcing this just removes a nuisance -- this is not circumventing any real protection. - """ os.environ["FORCE_UNSAFE_CONFIGURE"] = "1" - @run_before("configure") + @spack.builder.run_before("configure") def _do_patch_libtool_configure(self): """Patch bugs that propagate from libtool macros into "configure" and further into "libtool". Note that patches that can be fixed by patching @@ -293,7 +362,7 @@ def _do_patch_libtool_configure(self): # Support Libtool 2.4.2 and older: x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2') - @run_after("configure") + @spack.builder.run_after("configure") def _do_patch_libtool(self): """If configure generates a "libtool" script that does not correctly detect the compiler (and patch_libtool is set), patch in the correct @@ -328,31 +397,33 @@ def _do_patch_libtool(self): markers[tag] = "LIBTOOL TAG CONFIG: {0}".format(tag.upper()) # Replace empty linker flag prefixes: - if self.compiler.name == "nag": + if self.pkg.compiler.name == "nag": # Nag is mixed with gcc and g++, which are recognized correctly. # Therefore, we change only Fortran values: for tag in ["fc", "f77"]: marker = markers[tag] x.filter( regex='^wl=""$', - repl='wl="{0}"'.format(self.compiler.linker_arg), + repl='wl="{0}"'.format(self.pkg.compiler.linker_arg), start_at="# ### BEGIN {0}".format(marker), stop_at="# ### END {0}".format(marker), ) else: - x.filter(regex='^wl=""$', repl='wl="{0}"'.format(self.compiler.linker_arg)) + x.filter(regex='^wl=""$', repl='wl="{0}"'.format(self.pkg.compiler.linker_arg)) # Replace empty PIC flag values: for cc, marker in markers.items(): x.filter( regex='^pic_flag=""$', - repl='pic_flag="{0}"'.format(getattr(self.compiler, "{0}_pic_flag".format(cc))), + repl='pic_flag="{0}"'.format( + getattr(self.pkg.compiler, "{0}_pic_flag".format(cc)) + ), start_at="# ### BEGIN {0}".format(marker), stop_at="# ### END {0}".format(marker), ) # Other compiler-specific patches: - if self.compiler.name == "fj": + if self.pkg.compiler.name == "fj": x.filter(regex="-nostdlib", repl="", string=True) rehead = r"/\S*/" for o in [ @@ -365,12 +436,12 @@ def _do_patch_libtool(self): "crtendS.o", ]: x.filter(regex=(rehead + o), repl="", string=True) - elif self.compiler.name == "dpcpp": + elif self.pkg.compiler.name == "dpcpp": # Hack to filter out spurious predep_objects when building with Intel dpcpp # (see https://github.com/spack/spack/issues/32863): x.filter(regex=r"^(predep_objects=.*)/tmp/conftest-[0-9A-Fa-f]+\.o", repl=r"\1") x.filter(regex=r"^(predep_objects=.*)/tmp/a-[0-9A-Fa-f]+\.o", repl=r"\1") - elif self.compiler.name == "nag": + elif self.pkg.compiler.name == "nag": for tag in ["fc", "f77"]: marker = markers[tag] start_at = "# ### BEGIN {0}".format(marker) @@ -446,11 +517,8 @@ def _do_patch_libtool(self): @property def configure_directory(self): - """Returns the directory where 'configure' resides. - - :return: directory where to find configure - """ - return self.stage.source_path + """Return the directory where 'configure' resides.""" + return self.pkg.stage.source_path @property def configure_abs_path(self): @@ -463,34 +531,12 @@ def build_directory(self): """Override to provide another place to build the package""" return self.configure_directory - @run_before("autoreconf") + @spack.builder.run_before("autoreconf") def delete_configure_to_force_update(self): if self.force_autoreconf: - force_remove(self.configure_abs_path) + fs.force_remove(self.configure_abs_path) - def _require_build_deps(self, pkgs, spec, err): - """Require `pkgs` to be direct build dependencies of `spec`. Raises a - RuntimeError with a helpful error messages when any dep is missing.""" - - build_deps = [d.name for d in spec.dependencies(deptype="build")] - missing_deps = [x for x in pkgs if x not in build_deps] - - if not missing_deps: - return - - # Raise an exception on missing deps. - msg = ( - "{0}: missing dependencies: {1}.\n\nPlease add " - "the following lines to the package:\n\n".format(err, ", ".join(missing_deps)) - ) - - for dep in missing_deps: - msg += " depends_on('{0}', type='build', when='@{1}')\n".format(dep, spec.version) - - msg += "\nUpdate the version (when='@{0}') as needed.".format(spec.version) - raise RuntimeError(msg) - - def autoreconf(self, spec, prefix): + def autoreconf(self, pkg, spec, prefix): """Not needed usually, configure should be already there""" # If configure exists nothing needs to be done @@ -498,8 +544,10 @@ def autoreconf(self, spec, prefix): return # Else try to regenerate it, which reuquires a few build dependencies - self._require_build_deps( - pkgs=["autoconf", "automake", "libtool"], spec=spec, err="Cannot generate configure" + ensure_build_dependencies_or_raise( + spec=spec, + dependencies=["autoconf", "automake", "libtool"], + error_msg="Cannot generate configure", ) tty.msg("Configure script not found: trying to generate it") @@ -507,8 +555,8 @@ def autoreconf(self, spec, prefix): tty.warn("* If the default procedure fails, consider implementing *") tty.warn("* a custom AUTORECONF phase in the package *") tty.warn("*********************************************************") - with working_dir(self.configure_directory): - m = inspect.getmodule(self) + with fs.working_dir(self.configure_directory): + m = inspect.getmodule(self.pkg) # This line is what is needed most of the time # --install, --verbose, --force autoreconf_args = ["-ivf"] @@ -524,98 +572,66 @@ def autoreconf_search_path_args(self): spack dependencies.""" return _autoreconf_search_path_args(self.spec) - @run_after("autoreconf") + @spack.builder.run_after("autoreconf") def set_configure_or_die(self): - """Checks the presence of a ``configure`` file after the - autoreconf phase. If it is found sets a module attribute - appropriately, otherwise raises an error. + """Ensure the presence of a "configure" script, or raise. If the "configure" + is found, a module level attribute is set. - :raises RuntimeError: if a configure script is not found in - :py:meth:`~AutotoolsPackage.configure_directory` + Raises: + RuntimeError: if the "configure" script is not found """ - # Check if a configure script is there. If not raise a RuntimeError. + # Check if the "configure" script is there. If not raise a RuntimeError. if not os.path.exists(self.configure_abs_path): msg = "configure script not found in {0}" raise RuntimeError(msg.format(self.configure_directory)) # Monkey-patch the configure script in the corresponding module - inspect.getmodule(self).configure = Executable(self.configure_abs_path) + inspect.getmodule(self.pkg).configure = Executable(self.configure_abs_path) def configure_args(self): - """Produces a list containing all the arguments that must be passed to - configure, except ``--prefix`` which will be pre-pended to the list. - - :return: list of arguments for configure + """Return the list of all the arguments that must be passed to configure, + except ``--prefix`` which will be pre-pended to the list. """ return [] - def flags_to_build_system_args(self, flags): - """Produces a list of all command line arguments to pass specified - compiler flags to configure.""" - # Has to be dynamic attribute due to caching. - setattr(self, "configure_flag_args", []) - for flag, values in flags.items(): - if values: - values_str = "{0}={1}".format(flag.upper(), " ".join(values)) - self.configure_flag_args.append(values_str) - # Spack's fflags are meant for both F77 and FC, therefore we - # additionaly set FCFLAGS if required. - values = flags.get("fflags", None) - if values: - values_str = "FCFLAGS={0}".format(" ".join(values)) - self.configure_flag_args.append(values_str) - - def configure(self, spec, prefix): - """Runs configure with the arguments specified in - :meth:`~spack.build_systems.autotools.AutotoolsPackage.configure_args` - and an appropriately set prefix. + def configure(self, pkg, spec, prefix): + """Run "configure", with the arguments specified by the builder and an + appropriately set prefix. """ - options = getattr(self, "configure_flag_args", []) + options = getattr(self.pkg, "configure_flag_args", []) options += ["--prefix={0}".format(prefix)] options += self.configure_args() - with working_dir(self.build_directory, create=True): - inspect.getmodule(self).configure(*options) + with fs.working_dir(self.build_directory, create=True): + inspect.getmodule(self.pkg).configure(*options) - def setup_build_environment(self, env): - if self.spec.platform == "darwin" and macos_version() >= Version("11"): - # Many configure files rely on matching '10.*' for macOS version - # detection and fail to add flags if it shows as version 11. - env.set("MACOSX_DEPLOYMENT_TARGET", "10.16") - - def build(self, spec, prefix): - """Makes the build targets specified by - :py:attr:``~.AutotoolsPackage.build_targets`` - """ + def build(self, pkg, spec, prefix): + """Run "make" on the build targets specified by the builder.""" # See https://autotools.io/automake/silent.html params = ["V=1"] params += self.build_targets - with working_dir(self.build_directory): - inspect.getmodule(self).make(*params) + with fs.working_dir(self.build_directory): + inspect.getmodule(self.pkg).make(*params) - def install(self, spec, prefix): - """Makes the install targets specified by - :py:attr:``~.AutotoolsPackage.install_targets`` - """ - with working_dir(self.build_directory): - inspect.getmodule(self).make(*self.install_targets) + def install(self, pkg, spec, prefix): + """Run "make" on the install targets specified by the builder.""" + with fs.working_dir(self.build_directory): + inspect.getmodule(self.pkg).make(*self.install_targets) - run_after("build")(PackageBase._run_default_build_time_test_callbacks) + spack.builder.run_after("build")(execute_build_time_tests) def check(self): - """Searches the Makefile for targets ``test`` and ``check`` - and runs them if found. - """ - with working_dir(self.build_directory): - self._if_make_target_execute("test") - self._if_make_target_execute("check") + """Run "make" on the ``test`` and ``check`` targets, if found.""" + with fs.working_dir(self.build_directory): + self.pkg._if_make_target_execute("test") + self.pkg._if_make_target_execute("check") def _activate_or_not( self, name, activation_word, deactivation_word, activation_value=None, variant=None ): - """This function contains the current implementation details of - :meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without` and - :meth:`~spack.build_systems.autotools.AutotoolsPackage.enable_or_disable`. + """This function contain the current implementation details of + :meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without` and + :meth:`~spack.build_systems.autotools.AutotoolsBuilder.enable_or_disable`. Args: name (str): name of the option that is being activated or not @@ -671,7 +687,7 @@ def _activate_or_not( Raises: KeyError: if name is not among known variants """ - spec = self.spec + spec = self.pkg.spec args = [] if activation_value == "prefix": @@ -681,16 +697,16 @@ def _activate_or_not( # Defensively look that the name passed as argument is among # variants - if variant not in self.variants: + if variant not in self.pkg.variants: msg = '"{0}" is not a variant of "{1}"' - raise KeyError(msg.format(variant, self.name)) + raise KeyError(msg.format(variant, self.pkg.name)) if variant not in spec.variants: return [] # Create a list of pairs. Each pair includes a configuration # option and whether or not that option is activated - variant_desc, _ = self.variants[variant] + variant_desc, _ = self.pkg.variants[variant] if set(variant_desc.values) == set((True, False)): # BoolValuedVariant carry information about a single option. # Nonetheless, for uniformity of treatment we'll package them @@ -718,14 +734,18 @@ def _activate_or_not( override_name = "{0}_or_{1}_{2}".format( activation_word, deactivation_word, option_value ) - line_generator = getattr(self, override_name, None) + line_generator = getattr(self, override_name, None) or getattr( + self.pkg, override_name, None + ) # If not available use a sensible default if line_generator is None: def _default_generator(is_activated): if is_activated: line = "--{0}-{1}".format(activation_word, option_value) - if activation_value is not None and activation_value(option_value): + if activation_value is not None and activation_value( + option_value + ): # NOQA=ignore=E501 line += "={0}".format(activation_value(option_value)) return line return "--{0}-{1}".format(deactivation_word, option_value) @@ -764,7 +784,7 @@ def with_or_without(self, name, activation_value=None, variant=None): def enable_or_disable(self, name, activation_value=None, variant=None): """Same as - :meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without` + :meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without` but substitute ``with`` with ``enable`` and ``without`` with ``disable``. Args: @@ -781,19 +801,14 @@ def enable_or_disable(self, name, activation_value=None, variant=None): """ return self._activate_or_not(name, "enable", "disable", activation_value, variant) - run_after("install")(PackageBase._run_default_install_time_test_callbacks) + spack.builder.run_after("install")(execute_install_time_tests) def installcheck(self): - """Searches the Makefile for an ``installcheck`` target - and runs it if found. - """ - with working_dir(self.build_directory): - self._if_make_target_execute("installcheck") + """Run "make" on the ``installcheck`` target, if found.""" + with fs.working_dir(self.build_directory): + self.pkg._if_make_target_execute("installcheck") - # Check that self.prefix is there after installation - run_after("install")(PackageBase.sanity_check_prefix) - - @run_after("install") + @spack.builder.run_after("install") def remove_libtool_archives(self): """Remove all .la files in prefix sub-folders if the package sets ``install_libtool_archives`` to be False. @@ -803,14 +818,20 @@ def remove_libtool_archives(self): return # Remove the files and create a log of what was removed - libtool_files = fs.find(str(self.prefix), "*.la", recursive=True) + libtool_files = fs.find(str(self.pkg.prefix), "*.la", recursive=True) with fs.safe_remove(*libtool_files): fs.mkdirp(os.path.dirname(self._removed_la_files_log)) with open(self._removed_la_files_log, mode="w") as f: f.write("\n".join(libtool_files)) + def setup_build_environment(self, env): + if self.spec.platform == "darwin" and macos_version() >= Version("11"): + # Many configure files rely on matching '10.*' for macOS version + # detection and fail to add flags if it shows as version 11. + env.set("MACOSX_DEPLOYMENT_TARGET", "10.16") + # On macOS, force rpaths for shared library IDs and remove duplicate rpaths - run_after("install")(PackageBase.apply_macos_rpath_fixups) + spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups) def _autoreconf_search_path_args(spec): diff --git a/lib/spack/spack/build_systems/bundle.py b/lib/spack/spack/build_systems/bundle.py new file mode 100644 index 00000000000..fad0ba4e144 --- /dev/null +++ b/lib/spack/spack/build_systems/bundle.py @@ -0,0 +1,31 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +import spack.builder +import spack.directives +import spack.package_base + + +class BundlePackage(spack.package_base.PackageBase): + """General purpose bundle, or no-code, package class.""" + + #: This attribute is used in UI queries that require to know which + #: build-system class we are using + build_system_class = "BundlePackage" + + #: Legacy buildsystem attribute used to deserialize and install old specs + legacy_buildsystem = "bundle" + + #: Bundle packages do not have associated source or binary code. + has_code = False + + spack.directives.build_system("bundle") + + +@spack.builder.builder("bundle") +class BundleBuilder(spack.builder.Builder): + phases = ("install",) + + def install(self, pkg, spec, prefix): + pass diff --git a/lib/spack/spack/build_systems/cached_cmake.py b/lib/spack/spack/build_systems/cached_cmake.py index 9ffd2a82abd..787def703e3 100644 --- a/lib/spack/spack/build_systems/cached_cmake.py +++ b/lib/spack/spack/build_systems/cached_cmake.py @@ -3,12 +3,14 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os +from typing import Tuple +import llnl.util.filesystem as fs import llnl.util.tty as tty -from llnl.util.filesystem import install, mkdirp -from spack.build_systems.cmake import CMakePackage -from spack.package_base import run_after +import spack.builder + +from .cmake import CMakeBuilder, CMakePackage def cmake_cache_path(name, value, comment=""): @@ -28,44 +30,50 @@ def cmake_cache_option(name, boolean_value, comment=""): return 'set({0} {1} CACHE BOOL "{2}")\n'.format(name, value, comment) -class CachedCMakePackage(CMakePackage): - """Specialized class for packages built using CMake initial cache. +class CachedCMakeBuilder(CMakeBuilder): - This feature of CMake allows packages to increase reproducibility, - especially between Spack- and manual builds. It also allows packages to - sidestep certain parsing bugs in extremely long ``cmake`` commands, and to - avoid system limits on the length of the command line.""" + #: Phases of a Cached CMake package + #: Note: the initconfig phase is used for developer builds as a final phase to stop on + phases = ("initconfig", "cmake", "build", "install") # type: Tuple[str, ...] - phases = ["initconfig", "cmake", "build", "install"] + #: Names associated with package methods in the old build-system format + legacy_methods = CMakeBuilder.legacy_methods + ( + "initconfig_compiler_entries", + "initconfig_mpi_entries", + "initconfig_hardware_entries", + "std_initconfig_entries", + "initconfig_package_entries", + ) # type: Tuple[str, ...] + + #: Names associated with package attributes in the old build-system format + legacy_attributes = CMakeBuilder.legacy_attributes + ( + "cache_name", + "cache_path", + ) # type: Tuple[str, ...] @property def cache_name(self): return "{0}-{1}-{2}@{3}.cmake".format( - self.name, - self.spec.architecture, - self.spec.compiler.name, - self.spec.compiler.version, + self.pkg.name, + self.pkg.spec.architecture, + self.pkg.spec.compiler.name, + self.pkg.spec.compiler.version, ) @property def cache_path(self): - return os.path.join(self.stage.source_path, self.cache_name) - - def flag_handler(self, name, flags): - if name in ("cflags", "cxxflags", "cppflags", "fflags"): - return (None, None, None) # handled in the cmake cache - return (flags, None, None) + return os.path.join(self.pkg.stage.source_path, self.cache_name) def initconfig_compiler_entries(self): # This will tell cmake to use the Spack compiler wrappers when run # through Spack, but use the underlying compiler when run outside of # Spack - spec = self.spec + spec = self.pkg.spec # Fortran compiler is optional if "FC" in os.environ: spack_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", os.environ["FC"]) - system_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", self.compiler.fc) + system_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", self.pkg.compiler.fc) else: spack_fc_entry = "# No Fortran compiler defined in spec" system_fc_entry = "# No Fortran compiler defined in spec" @@ -81,8 +89,8 @@ def initconfig_compiler_entries(self): " " + cmake_cache_path("CMAKE_CXX_COMPILER", os.environ["CXX"]), " " + spack_fc_entry, "else()\n", - " " + cmake_cache_path("CMAKE_C_COMPILER", self.compiler.cc), - " " + cmake_cache_path("CMAKE_CXX_COMPILER", self.compiler.cxx), + " " + cmake_cache_path("CMAKE_C_COMPILER", self.pkg.compiler.cc), + " " + cmake_cache_path("CMAKE_CXX_COMPILER", self.pkg.compiler.cxx), " " + system_fc_entry, "endif()\n", ] @@ -126,7 +134,7 @@ def initconfig_compiler_entries(self): return entries def initconfig_mpi_entries(self): - spec = self.spec + spec = self.pkg.spec if not spec.satisfies("^mpi"): return [] @@ -160,13 +168,13 @@ def initconfig_mpi_entries(self): mpiexec = os.path.join(spec["mpi"].prefix.bin, "mpiexec") if not os.path.exists(mpiexec): - msg = "Unable to determine MPIEXEC, %s tests may fail" % self.name + msg = "Unable to determine MPIEXEC, %s tests may fail" % self.pkg.name entries.append("# {0}\n".format(msg)) tty.warn(msg) else: # starting with cmake 3.10, FindMPI expects MPIEXEC_EXECUTABLE # vs the older versions which expect MPIEXEC - if self.spec["cmake"].satisfies("@3.10:"): + if self.pkg.spec["cmake"].satisfies("@3.10:"): entries.append(cmake_cache_path("MPIEXEC_EXECUTABLE", mpiexec)) else: entries.append(cmake_cache_path("MPIEXEC", mpiexec)) @@ -180,7 +188,7 @@ def initconfig_mpi_entries(self): return entries def initconfig_hardware_entries(self): - spec = self.spec + spec = self.pkg.spec entries = [ "#------------------{0}".format("-" * 60), @@ -197,13 +205,7 @@ def initconfig_hardware_entries(self): entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir)) cudacompiler = "${CUDA_TOOLKIT_ROOT_DIR}/bin/nvcc" entries.append(cmake_cache_path("CMAKE_CUDA_COMPILER", cudacompiler)) - - if spec.satisfies("^mpi"): - entries.append(cmake_cache_path("CMAKE_CUDA_HOST_COMPILER", "${MPI_CXX_COMPILER}")) - else: - entries.append( - cmake_cache_path("CMAKE_CUDA_HOST_COMPILER", "${CMAKE_CXX_COMPILER}") - ) + entries.append(cmake_cache_path("CMAKE_CUDA_HOST_COMPILER", "${CMAKE_CXX_COMPILER}")) return entries @@ -212,7 +214,7 @@ def std_initconfig_entries(self): "#------------------{0}".format("-" * 60), "# !!!! This is a generated file, edit at own risk !!!!", "#------------------{0}".format("-" * 60), - "# CMake executable path: {0}".format(self.spec["cmake"].command.path), + "# CMake executable path: {0}".format(self.pkg.spec["cmake"].command.path), "#------------------{0}\n".format("-" * 60), ] @@ -220,7 +222,7 @@ def initconfig_package_entries(self): """This method is to be overwritten by the package""" return [] - def initconfig(self, spec, prefix): + def initconfig(self, pkg, spec, prefix): cache_entries = ( self.std_initconfig_entries() + self.initconfig_compiler_entries() @@ -236,11 +238,28 @@ def initconfig(self, spec, prefix): @property def std_cmake_args(self): - args = super(CachedCMakePackage, self).std_cmake_args + args = super(CachedCMakeBuilder, self).std_cmake_args args.extend(["-C", self.cache_path]) return args - @run_after("install") + @spack.builder.run_after("install") def install_cmake_cache(self): - mkdirp(self.spec.prefix.share.cmake) - install(self.cache_path, self.spec.prefix.share.cmake) + fs.mkdirp(self.pkg.spec.prefix.share.cmake) + fs.install(self.cache_path, self.pkg.spec.prefix.share.cmake) + + +class CachedCMakePackage(CMakePackage): + """Specialized class for packages built using CMake initial cache. + + This feature of CMake allows packages to increase reproducibility, + especially between Spack- and manual builds. It also allows packages to + sidestep certain parsing bugs in extremely long ``cmake`` commands, and to + avoid system limits on the length of the command line. + """ + + CMakeBuilder = CachedCMakeBuilder + + def flag_handler(self, name, flags): + if name in ("cflags", "cxxflags", "cppflags", "fflags"): + return None, None, None # handled in the cmake cache + return flags, None, None diff --git a/lib/spack/spack/build_systems/cmake.py b/lib/spack/spack/build_systems/cmake.py index 6fb75b6747d..1d0c50ea078 100644 --- a/lib/spack/spack/build_systems/cmake.py +++ b/lib/spack/spack/build_systems/cmake.py @@ -2,23 +2,26 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - - import inspect import os import platform import re import sys -from typing import List +from typing import List, Tuple import six +import llnl.util.filesystem as fs from llnl.util.compat import Sequence -from llnl.util.filesystem import working_dir import spack.build_environment -from spack.directives import conflicts, depends_on, variant -from spack.package_base import InstallError, PackageBase, run_after +import spack.builder +import spack.package_base +import spack.util.path +from spack.directives import build_system, depends_on, variant +from spack.multimethod import when + +from ._checks import BaseBuilder, execute_build_time_tests # Regex to extract the primary generator from the CMake generator # string. @@ -34,56 +37,141 @@ def _extract_primary_generator(generator): return primary_generator -class CMakePackage(PackageBase): +class CMakePackage(spack.package_base.PackageBase): """Specialized class for packages built using CMake For more information on the CMake build system, see: https://cmake.org/cmake/help/latest/ + """ - This class provides three phases that can be overridden: + #: This attribute is used in UI queries that need to know the build + #: system base class + build_system_class = "CMakePackage" - 1. :py:meth:`~.CMakePackage.cmake` - 2. :py:meth:`~.CMakePackage.build` - 3. :py:meth:`~.CMakePackage.install` + #: Legacy buildsystem attribute used to deserialize and install old specs + legacy_buildsystem = "cmake" + + build_system("cmake") + + with when("build_system=cmake"): + # https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html + variant( + "build_type", + default="RelWithDebInfo", + description="CMake build type", + values=("Debug", "Release", "RelWithDebInfo", "MinSizeRel"), + ) + # CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9 + # https://cmake.org/cmake/help/latest/variable/CMAKE_INTERPROCEDURAL_OPTIMIZATION.html + variant( + "ipo", + default=False, + when="^cmake@3.9:", + description="CMake interprocedural optimization", + ) + depends_on("cmake", type="build") + depends_on("ninja", type="build", when="platform=windows") + + def flags_to_build_system_args(self, flags): + """Return a list of all command line arguments to pass the specified + compiler flags to cmake. Note CMAKE does not have a cppflags option, + so cppflags will be added to cflags, cxxflags, and fflags to mimic the + behavior in other tools. + """ + # Has to be dynamic attribute due to caching + setattr(self, "cmake_flag_args", []) + + flag_string = "-DCMAKE_{0}_FLAGS={1}" + langs = {"C": "c", "CXX": "cxx", "Fortran": "f"} + + # Handle language compiler flags + for lang, pre in langs.items(): + flag = pre + "flags" + # cmake has no explicit cppflags support -> add it to all langs + lang_flags = " ".join(flags.get(flag, []) + flags.get("cppflags", [])) + if lang_flags: + self.cmake_flag_args.append(flag_string.format(lang, lang_flags)) + + # Cmake has different linker arguments for different build types. + # We specify for each of them. + if flags["ldflags"]: + ldflags = " ".join(flags["ldflags"]) + ld_string = "-DCMAKE_{0}_LINKER_FLAGS={1}" + # cmake has separate linker arguments for types of builds. + for type in ["EXE", "MODULE", "SHARED", "STATIC"]: + self.cmake_flag_args.append(ld_string.format(type, ldflags)) + + # CMake has libs options separated by language. Apply ours to each. + if flags["ldlibs"]: + libs_flags = " ".join(flags["ldlibs"]) + libs_string = "-DCMAKE_{0}_STANDARD_LIBRARIES={1}" + for lang in langs: + self.cmake_flag_args.append(libs_string.format(lang, libs_flags)) + + # Legacy methods (used by too many packages to change them, + # need to forward to the builder) + def define(self, *args, **kwargs): + return self.builder.define(*args, **kwargs) + + def define_from_variant(self, *args, **kwargs): + return self.builder.define_from_variant(*args, **kwargs) + + +@spack.builder.builder("cmake") +class CMakeBuilder(BaseBuilder): + """The cmake builder encodes the default way of building software with CMake. IT + has three phases that can be overridden: + + 1. :py:meth:`~.CMakeBuilder.cmake` + 2. :py:meth:`~.CMakeBuilder.build` + 3. :py:meth:`~.CMakeBuilder.install` They all have sensible defaults and for many packages the only thing - necessary will be to override :py:meth:`~.CMakePackage.cmake_args`. + necessary will be to override :py:meth:`~.CMakeBuilder.cmake_args`. + For a finer tuning you may also override: +-----------------------------------------------+--------------------+ | **Method** | **Purpose** | +===============================================+====================+ - | :py:meth:`~.CMakePackage.root_cmakelists_dir` | Location of the | + | :py:meth:`~.CMakeBuilder.root_cmakelists_dir` | Location of the | | | root CMakeLists.txt| +-----------------------------------------------+--------------------+ - | :py:meth:`~.CMakePackage.build_directory` | Directory where to | + | :py:meth:`~.CMakeBuilder.build_directory` | Directory where to | | | build the package | +-----------------------------------------------+--------------------+ - - The generator used by CMake can be specified by providing the - generator attribute. Per + The generator used by CMake can be specified by providing the ``generator`` + attribute. Per https://cmake.org/cmake/help/git-master/manual/cmake-generators.7.html, - the format is: [ - ]. The - full list of primary and secondary generators supported by CMake may - be found in the documentation for the version of CMake used; - however, at this time Spack supports only the primary generators - "Unix Makefiles" and "Ninja." Spack's CMake support is agnostic with - respect to primary generators. Spack will generate a runtime error - if the generator string does not follow the prescribed format, or if + the format is: [ - ]. + + The full list of primary and secondary generators supported by CMake may be found + in the documentation for the version of CMake used; however, at this time Spack + supports only the primary generators "Unix Makefiles" and "Ninja." Spack's CMake + support is agnostic with respect to primary generators. Spack will generate a + runtime error if the generator string does not follow the prescribed format, or if the primary generator is not supported. """ #: Phases of a CMake package - phases = ["cmake", "build", "install"] - #: This attribute is used in UI queries that need to know the build - #: system base class - build_system_class = "CMakePackage" + phases = ("cmake", "build", "install") # type: Tuple[str, ...] - build_targets = [] # type: List[str] - install_targets = ["install"] + #: Names associated with package methods in the old build-system format + legacy_methods = ("cmake_args", "check") # type: Tuple[str, ...] - build_time_test_callbacks = ["check"] + #: Names associated with package attributes in the old build-system format + legacy_attributes = ( + "generator", + "build_targets", + "install_targets", + "build_time_test_callbacks", + "archive_files", + "root_cmakelists_dir", + "std_cmake_args", + "build_dirname", + "build_directory", + ) # type: Tuple[str, ...] #: The build system generator to use. #: @@ -93,27 +181,14 @@ class CMakePackage(PackageBase): #: #: See https://cmake.org/cmake/help/latest/manual/cmake-generators.7.html #: for more information. + generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles" - generator = "Unix Makefiles" - - if sys.platform == "win32": - generator = "Ninja" - depends_on("ninja") - - # https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html - variant( - "build_type", - default="RelWithDebInfo", - description="CMake build type", - values=("Debug", "Release", "RelWithDebInfo", "MinSizeRel"), - ) - - # https://cmake.org/cmake/help/latest/variable/CMAKE_INTERPROCEDURAL_OPTIMIZATION.html - variant("ipo", default=False, description="CMake interprocedural optimization") - # CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9 - conflicts("+ipo", when="^cmake@:3.8", msg="+ipo is not supported by CMake < 3.9") - - depends_on("cmake", type="build") + #: Targets to be used during the build phase + build_targets = [] # type: List[str] + #: Targets to be used during the install phase + install_targets = ["install"] + #: Callback names for build-time test + build_time_test_callbacks = ["check"] @property def archive_files(self): @@ -126,40 +201,30 @@ def root_cmakelists_dir(self): This path is relative to the root of the extracted tarball, not to the ``build_directory``. Defaults to the current directory. - - :return: directory containing CMakeLists.txt """ - return self.stage.source_path + return self.pkg.stage.source_path @property def std_cmake_args(self): """Standard cmake arguments provided as a property for convenience of package writers - - :return: standard cmake arguments """ # standard CMake arguments - std_cmake_args = CMakePackage._std_args(self) - std_cmake_args += getattr(self, "cmake_flag_args", []) + std_cmake_args = CMakeBuilder.std_args(self.pkg, generator=self.generator) + std_cmake_args += getattr(self.pkg, "cmake_flag_args", []) return std_cmake_args @staticmethod - def _std_args(pkg): + def std_args(pkg, generator=None): """Computes the standard cmake arguments for a generic package""" - - try: - generator = pkg.generator - except AttributeError: - generator = CMakePackage.generator - - # Make sure a valid generator was chosen + generator = generator or "Unix Makefiles" valid_primary_generators = ["Unix Makefiles", "Ninja"] primary_generator = _extract_primary_generator(generator) if primary_generator not in valid_primary_generators: msg = "Invalid CMake generator: '{0}'\n".format(generator) msg += "CMakePackage currently supports the following " msg += "primary generators: '{0}'".format("', '".join(valid_primary_generators)) - raise InstallError(msg) + raise spack.package_base.InstallError(msg) try: build_type = pkg.spec.variants["build_type"].value @@ -171,7 +236,7 @@ def _std_args(pkg): except KeyError: ipo = False - define = CMakePackage.define + define = CMakeBuilder.define args = [ "-G", generator, @@ -251,7 +316,7 @@ def define_from_variant(self, cmake_var, variant=None): of ``cmake_var``. This utility function is similar to - :meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without`. + :meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without`. Examples: @@ -291,122 +356,75 @@ def define_from_variant(self, cmake_var, variant=None): if variant is None: variant = cmake_var.lower() - if variant not in self.variants: - raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.name)) + if variant not in self.pkg.variants: + raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name)) - if variant not in self.spec.variants: + if variant not in self.pkg.spec.variants: return "" - value = self.spec.variants[variant].value + value = self.pkg.spec.variants[variant].value if isinstance(value, (tuple, list)): # Sort multi-valued variants for reproducibility value = sorted(value) return self.define(cmake_var, value) - def flags_to_build_system_args(self, flags): - """Produces a list of all command line arguments to pass the specified - compiler flags to cmake. Note CMAKE does not have a cppflags option, - so cppflags will be added to cflags, cxxflags, and fflags to mimic the - behavior in other tools.""" - # Has to be dynamic attribute due to caching - setattr(self, "cmake_flag_args", []) - - flag_string = "-DCMAKE_{0}_FLAGS={1}" - langs = {"C": "c", "CXX": "cxx", "Fortran": "f"} - - # Handle language compiler flags - for lang, pre in langs.items(): - flag = pre + "flags" - # cmake has no explicit cppflags support -> add it to all langs - lang_flags = " ".join(flags.get(flag, []) + flags.get("cppflags", [])) - if lang_flags: - self.cmake_flag_args.append(flag_string.format(lang, lang_flags)) - - # Cmake has different linker arguments for different build types. - # We specify for each of them. - if flags["ldflags"]: - ldflags = " ".join(flags["ldflags"]) - ld_string = "-DCMAKE_{0}_LINKER_FLAGS={1}" - # cmake has separate linker arguments for types of builds. - for type in ["EXE", "MODULE", "SHARED", "STATIC"]: - self.cmake_flag_args.append(ld_string.format(type, ldflags)) - - # CMake has libs options separated by language. Apply ours to each. - if flags["ldlibs"]: - libs_flags = " ".join(flags["ldlibs"]) - libs_string = "-DCMAKE_{0}_STANDARD_LIBRARIES={1}" - for lang in langs: - self.cmake_flag_args.append(libs_string.format(lang, libs_flags)) - @property def build_dirname(self): - """Returns the directory name to use when building the package - - :return: name of the subdirectory for building the package - """ - return "spack-build-%s" % self.spec.dag_hash(7) + """Directory name to use when building the package.""" + return "spack-build-%s" % self.pkg.spec.dag_hash(7) @property def build_directory(self): - """Returns the directory to use when building the package - - :return: directory where to build the package - """ - return os.path.join(self.stage.path, self.build_dirname) + """Full-path to the directory to use when building the package.""" + return os.path.join(self.pkg.stage.path, self.build_dirname) def cmake_args(self): - """Produces a list containing all the arguments that must be passed to - cmake, except: + """List of all the arguments that must be passed to cmake, except: * CMAKE_INSTALL_PREFIX * CMAKE_BUILD_TYPE * BUILD_TESTING which will be set automatically. - - :return: list of arguments for cmake """ return [] - def cmake(self, spec, prefix): + def cmake(self, pkg, spec, prefix): """Runs ``cmake`` in the build directory""" options = self.std_cmake_args options += self.cmake_args() options.append(os.path.abspath(self.root_cmakelists_dir)) - with working_dir(self.build_directory, create=True): - inspect.getmodule(self).cmake(*options) + with fs.working_dir(self.build_directory, create=True): + inspect.getmodule(self.pkg).cmake(*options) - def build(self, spec, prefix): + def build(self, pkg, spec, prefix): """Make the build targets""" - with working_dir(self.build_directory): + with fs.working_dir(self.build_directory): if self.generator == "Unix Makefiles": - inspect.getmodule(self).make(*self.build_targets) + inspect.getmodule(self.pkg).make(*self.build_targets) elif self.generator == "Ninja": self.build_targets.append("-v") - inspect.getmodule(self).ninja(*self.build_targets) + inspect.getmodule(self.pkg).ninja(*self.build_targets) - def install(self, spec, prefix): + def install(self, pkg, spec, prefix): """Make the install targets""" - with working_dir(self.build_directory): + with fs.working_dir(self.build_directory): if self.generator == "Unix Makefiles": - inspect.getmodule(self).make(*self.install_targets) + inspect.getmodule(self.pkg).make(*self.install_targets) elif self.generator == "Ninja": - inspect.getmodule(self).ninja(*self.install_targets) + inspect.getmodule(self.pkg).ninja(*self.install_targets) - run_after("build")(PackageBase._run_default_build_time_test_callbacks) + spack.builder.run_after("build")(execute_build_time_tests) def check(self): - """Searches the CMake-generated Makefile for the target ``test`` - and runs it if found. + """Search the CMake-generated files for the targets ``test`` and ``check``, + and runs them if found. """ - with working_dir(self.build_directory): + with fs.working_dir(self.build_directory): if self.generator == "Unix Makefiles": - self._if_make_target_execute("test", jobs_env="CTEST_PARALLEL_LEVEL") - self._if_make_target_execute("check") + self.pkg._if_make_target_execute("test", jobs_env="CTEST_PARALLEL_LEVEL") + self.pkg._if_make_target_execute("check") elif self.generator == "Ninja": - self._if_ninja_target_execute("test", jobs_env="CTEST_PARALLEL_LEVEL") - self._if_ninja_target_execute("check") - - # Check that self.prefix is there after installation - run_after("install")(PackageBase.sanity_check_prefix) + self.pkg._if_ninja_target_execute("test", jobs_env="CTEST_PARALLEL_LEVEL") + self.pkg._if_ninja_target_execute("check") diff --git a/lib/spack/spack/build_systems/generic.py b/lib/spack/spack/build_systems/generic.py new file mode 100644 index 00000000000..628af6f2d41 --- /dev/null +++ b/lib/spack/spack/build_systems/generic.py @@ -0,0 +1,44 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from typing import Tuple + +import spack.builder +import spack.directives +import spack.package_base + +from ._checks import BaseBuilder, apply_macos_rpath_fixups + + +class Package(spack.package_base.PackageBase): + """General purpose class with a single ``install`` phase that needs to be + coded by packagers. + """ + + #: This attribute is used in UI queries that require to know which + #: build-system class we are using + build_system_class = "Package" + #: Legacy buildsystem attribute used to deserialize and install old specs + legacy_buildsystem = "generic" + + spack.directives.build_system("generic") + + +@spack.builder.builder("generic") +class GenericBuilder(BaseBuilder): + """A builder for a generic build system, that require packagers + to implement an "install" phase. + """ + + #: A generic package has only the "install" phase + phases = ("install",) + + #: Names associated with package methods in the old build-system format + legacy_methods = () # type: Tuple[str, ...] + + #: Names associated with package attributes in the old build-system format + legacy_attributes = ("archive_files",) # type: Tuple[str, ...] + + # On macOS, force rpaths for shared library IDs and remove duplicate rpaths + spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups) diff --git a/lib/spack/spack/build_systems/intel.py b/lib/spack/spack/build_systems/intel.py index 133b5030dea..4abed70f21d 100644 --- a/lib/spack/spack/build_systems/intel.py +++ b/lib/spack/spack/build_systems/intel.py @@ -2,8 +2,6 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - - import glob import inspect import os @@ -26,12 +24,14 @@ import spack.error from spack.build_environment import dso_suffix -from spack.package_base import InstallError, PackageBase, run_after +from spack.package_base import InstallError from spack.util.environment import EnvironmentModifications from spack.util.executable import Executable from spack.util.prefix import Prefix from spack.version import Version, ver +from .generic import Package + # A couple of utility functions that might be useful in general. If so, they # should really be defined elsewhere, unless deemed heretical. # (Or na"ive on my part). @@ -86,7 +86,7 @@ def _expand_fields(s): return s -class IntelPackage(PackageBase): +class IntelPackage(Package): """Specialized class for licensed Intel software. This class provides two phases that can be overridden: @@ -99,9 +99,6 @@ class IntelPackage(PackageBase): to set the appropriate environment variables. """ - #: Phases of an Intel package - phases = ["configure", "install"] - #: This attribute is used in UI queries that need to know the build #: system base class build_system_class = "IntelPackage" @@ -1184,12 +1181,13 @@ def _determine_license_type(self): debug_print(license_type) return license_type - def configure(self, spec, prefix): + @spack.builder.run_before("install") + def configure(self): """Generates the silent.cfg file to pass to installer.sh. See https://software.intel.com/en-us/articles/configuration-file-format """ - + prefix = self.prefix # Both tokens AND values of the configuration file are validated during # the run of the underlying binary installer. Any unknown token or # unacceptable value will cause that installer to fail. Notably, this @@ -1270,7 +1268,7 @@ def install(self, spec, prefix): for f in glob.glob("%s/intel*log" % tmpdir): install(f, dst) - @run_after("install") + @spack.builder.run_after("install") def validate_install(self): # Sometimes the installer exits with an error but doesn't pass a # non-zero exit code to spack. Check for the existence of a 'bin' @@ -1278,7 +1276,7 @@ def validate_install(self): if not os.path.exists(self.prefix.bin): raise InstallError("The installer has failed to install anything.") - @run_after("install") + @spack.builder.run_after("install") def configure_rpath(self): if "+rpath" not in self.spec: return @@ -1296,7 +1294,7 @@ def configure_rpath(self): with open(compiler_cfg, "w") as fh: fh.write("-Xlinker -rpath={0}\n".format(compilers_lib_dir)) - @run_after("install") + @spack.builder.run_after("install") def configure_auto_dispatch(self): if self._has_compilers: if "auto_dispatch=none" in self.spec: @@ -1320,7 +1318,7 @@ def configure_auto_dispatch(self): with open(compiler_cfg, "a") as fh: fh.write("-ax{0}\n".format(",".join(ad))) - @run_after("install") + @spack.builder.run_after("install") def filter_compiler_wrappers(self): if ("+mpi" in self.spec or self.provides("mpi")) and "~newdtags" in self.spec: bin_dir = self.component_bin_dir("mpi") @@ -1328,7 +1326,7 @@ def filter_compiler_wrappers(self): f = os.path.join(bin_dir, f) filter_file("-Xlinker --enable-new-dtags", " ", f, string=True) - @run_after("install") + @spack.builder.run_after("install") def uninstall_ism(self): # The "Intel(R) Software Improvement Program" [ahem] gets installed, # apparently regardless of PHONEHOME_SEND_USAGE_DATA. @@ -1360,7 +1358,7 @@ def base_lib_dir(self): debug_print(d) return d - @run_after("install") + @spack.builder.run_after("install") def modify_LLVMgold_rpath(self): """Add libimf.so and other required libraries to the RUNPATH of LLVMgold.so. @@ -1391,6 +1389,3 @@ def modify_LLVMgold_rpath(self): ] ) patchelf("--set-rpath", rpath, lib) - - # Check that self.prefix is there after installation - run_after("install")(PackageBase.sanity_check_prefix) diff --git a/lib/spack/spack/build_systems/lua.py b/lib/spack/spack/build_systems/lua.py index c0d4321097c..722e9153f98 100644 --- a/lib/spack/spack/build_systems/lua.py +++ b/lib/spack/spack/build_systems/lua.py @@ -2,59 +2,82 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - - import os from llnl.util.filesystem import find -from spack.directives import depends_on, extends +import spack.builder +import spack.package_base +import spack.util.executable +from spack.directives import build_system, depends_on, extends from spack.multimethod import when -from spack.package_base import PackageBase -from spack.util.executable import Executable -class LuaPackage(PackageBase): +class LuaPackage(spack.package_base.PackageBase): """Specialized class for lua packages""" - phases = ["unpack", "generate_luarocks_config", "preprocess", "install"] #: This attribute is used in UI queries that need to know the build #: system base class build_system_class = "LuaPackage" - list_depth = 1 # LuaRocks requires at least one level of spidering to find versions - depends_on("lua-lang") - extends("lua", when="^lua") - with when("^lua-luajit"): - extends("lua-luajit") - depends_on("luajit") - depends_on("lua-luajit+lualinks") - with when("^lua-luajit-openresty"): - extends("lua-luajit-openresty") - depends_on("luajit") - depends_on("lua-luajit-openresty+lualinks") + #: Legacy buildsystem attribute used to deserialize and install old specs + legacy_buildsystem = "lua" - def unpack(self, spec, prefix): - if os.path.splitext(self.stage.archive_file)[1] == ".rock": - directory = self.luarocks("unpack", self.stage.archive_file, output=str) + list_depth = 1 # LuaRocks requires at least one level of spidering to find versions + + build_system("lua") + + with when("build_system=lua"): + depends_on("lua-lang") + extends("lua", when="^lua") + with when("^lua-luajit"): + extends("lua-luajit") + depends_on("luajit") + depends_on("lua-luajit+lualinks") + with when("^lua-luajit-openresty"): + extends("lua-luajit-openresty") + depends_on("luajit") + depends_on("lua-luajit-openresty+lualinks") + + @property + def lua(self): + return spack.util.executable.Executable(self.spec["lua-lang"].prefix.bin.lua) + + @property + def luarocks(self): + lr = spack.util.executable.Executable(self.spec["lua-lang"].prefix.bin.luarocks) + return lr + + +@spack.builder.builder("lua") +class LuaBuilder(spack.builder.Builder): + phases = ("unpack", "generate_luarocks_config", "preprocess", "install") + + #: Names associated with package methods in the old build-system format + legacy_methods = ("luarocks_args",) + + #: Names associated with package attributes in the old build-system format + legacy_attributes = () + + def unpack(self, pkg, spec, prefix): + if os.path.splitext(pkg.stage.archive_file)[1] == ".rock": + directory = pkg.luarocks("unpack", pkg.stage.archive_file, output=str) dirlines = directory.split("\n") # TODO: figure out how to scope this better os.chdir(dirlines[2]) - def _generate_tree_line(self, name, prefix): + @staticmethod + def _generate_tree_line(name, prefix): return """{{ name = "{name}", root = "{prefix}" }};""".format( name=name, prefix=prefix, ) - def _luarocks_config_path(self): - return os.path.join(self.stage.source_path, "spack_luarocks.lua") - - def generate_luarocks_config(self, spec, prefix): - spec = self.spec + def generate_luarocks_config(self, pkg, spec, prefix): + spec = self.pkg.spec table_entries = [] - for d in spec.traverse(deptypes=("build", "run"), deptype_query="run"): - if d.package.extends(self.extendee_spec): + for d in spec.traverse(deptype=("build", "run")): + if d.package.extends(self.pkg.extendee_spec): table_entries.append(self._generate_tree_line(d.name, d.prefix)) path = self._luarocks_config_path() @@ -71,30 +94,24 @@ def generate_luarocks_config(self, spec, prefix): ) return path - def setup_build_environment(self, env): - env.set("LUAROCKS_CONFIG", self._luarocks_config_path()) - - def preprocess(self, spec, prefix): + def preprocess(self, pkg, spec, prefix): """Override this to preprocess source before building with luarocks""" pass - @property - def lua(self): - return Executable(self.spec["lua-lang"].prefix.bin.lua) - - @property - def luarocks(self): - lr = Executable(self.spec["lua-lang"].prefix.bin.luarocks) - return lr - def luarocks_args(self): return [] - def install(self, spec, prefix): + def install(self, pkg, spec, prefix): rock = "." specs = find(".", "*.rockspec", recursive=False) if specs: rock = specs[0] rocks_args = self.luarocks_args() rocks_args.append(rock) - self.luarocks("--tree=" + prefix, "make", *rocks_args) + self.pkg.luarocks("--tree=" + prefix, "make", *rocks_args) + + def _luarocks_config_path(self): + return os.path.join(self.pkg.stage.source_path, "spack_luarocks.lua") + + def setup_build_environment(self, env): + env.set("LUAROCKS_CONFIG", self._luarocks_config_path()) diff --git a/lib/spack/spack/build_systems/makefile.py b/lib/spack/spack/build_systems/makefile.py index e2bb8c0c26c..b826144258b 100644 --- a/lib/spack/spack/build_systems/makefile.py +++ b/lib/spack/spack/build_systems/makefile.py @@ -2,62 +2,85 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - - import inspect from typing import List # novm -import llnl.util.tty as tty -from llnl.util.filesystem import working_dir +import llnl.util.filesystem as fs -from spack.directives import conflicts -from spack.package_base import PackageBase, run_after +import spack.builder +import spack.package_base +from spack.directives import build_system, conflicts + +from ._checks import ( + BaseBuilder, + apply_macos_rpath_fixups, + execute_build_time_tests, + execute_install_time_tests, +) -class MakefilePackage(PackageBase): - """Specialized class for packages that are built using editable Makefiles +class MakefilePackage(spack.package_base.PackageBase): + """Specialized class for packages built using a Makefiles.""" - This class provides three phases that can be overridden: + #: This attribute is used in UI queries that need to know the build + #: system base class + build_system_class = "MakefilePackage" + #: Legacy buildsystem attribute used to deserialize and install old specs + legacy_buildsystem = "makefile" - 1. :py:meth:`~.MakefilePackage.edit` - 2. :py:meth:`~.MakefilePackage.build` - 3. :py:meth:`~.MakefilePackage.install` + build_system("makefile") + conflicts("platform=windows", when="build_system=makefile") + + +@spack.builder.builder("makefile") +class MakefileBuilder(BaseBuilder): + """The Makefile builder encodes the most common way of building software with + Makefiles. It has three phases that can be overridden, if need be: + + 1. :py:meth:`~.MakefileBuilder.edit` + 2. :py:meth:`~.MakefileBuilder.build` + 3. :py:meth:`~.MakefileBuilder.install` + + It is usually necessary to override the :py:meth:`~.MakefileBuilder.edit` + phase (which is by default a no-op), while the other two have sensible defaults. - It is usually necessary to override the :py:meth:`~.MakefilePackage.edit` - phase, while :py:meth:`~.MakefilePackage.build` and - :py:meth:`~.MakefilePackage.install` have sensible defaults. For a finer tuning you may override: +-----------------------------------------------+--------------------+ | **Method** | **Purpose** | +===============================================+====================+ - | :py:attr:`~.MakefilePackage.build_targets` | Specify ``make`` | + | :py:attr:`~.MakefileBuilder.build_targets` | Specify ``make`` | | | targets for the | | | build phase | +-----------------------------------------------+--------------------+ - | :py:attr:`~.MakefilePackage.install_targets` | Specify ``make`` | + | :py:attr:`~.MakefileBuilder.install_targets` | Specify ``make`` | | | targets for the | | | install phase | +-----------------------------------------------+--------------------+ - | :py:meth:`~.MakefilePackage.build_directory` | Directory where the| + | :py:meth:`~.MakefileBuilder.build_directory` | Directory where the| | | Makefile is located| +-----------------------------------------------+--------------------+ """ - #: Phases of a package that is built with an hand-written Makefile - phases = ["edit", "build", "install"] - #: This attribute is used in UI queries that need to know the build - #: system base class - build_system_class = "MakefilePackage" + phases = ("edit", "build", "install") - #: Targets for ``make`` during the :py:meth:`~.MakefilePackage.build` - #: phase + #: Names associated with package methods in the old build-system format + legacy_methods = ("check", "installcheck") + + #: Names associated with package attributes in the old build-system format + legacy_attributes = ( + "build_targets", + "install_targets", + "build_time_test_callbacks", + "install_time_test_callbacks", + "build_directory", + ) + + #: Targets for ``make`` during the :py:meth:`~.MakefileBuilder.build` phase build_targets = [] # type: List[str] - #: Targets for ``make`` during the :py:meth:`~.MakefilePackage.install` - #: phase + #: Targets for ``make`` during the :py:meth:`~.MakefileBuilder.install` phase install_targets = ["install"] - conflicts("platform=windows") #: Callback names for build-time test build_time_test_callbacks = ["check"] @@ -66,53 +89,39 @@ class MakefilePackage(PackageBase): @property def build_directory(self): - """Returns the directory containing the main Makefile + """Return the directory containing the main Makefile.""" + return self.pkg.stage.source_path - :return: build directory - """ - return self.stage.source_path + def edit(self, pkg, spec, prefix): + """Edit the Makefile before calling make. The default is a no-op.""" + pass - def edit(self, spec, prefix): - """Edits the Makefile before calling make. This phase cannot - be defaulted. - """ - tty.msg("Using default implementation: skipping edit phase.") + def build(self, pkg, spec, prefix): + """Run "make" on the build targets specified by the builder.""" + with fs.working_dir(self.build_directory): + inspect.getmodule(self.pkg).make(*self.build_targets) - def build(self, spec, prefix): - """Calls make, passing :py:attr:`~.MakefilePackage.build_targets` - as targets. - """ - with working_dir(self.build_directory): - inspect.getmodule(self).make(*self.build_targets) + def install(self, pkg, spec, prefix): + """Run "make" on the install targets specified by the builder.""" + with fs.working_dir(self.build_directory): + inspect.getmodule(self.pkg).make(*self.install_targets) - def install(self, spec, prefix): - """Calls make, passing :py:attr:`~.MakefilePackage.install_targets` - as targets. - """ - with working_dir(self.build_directory): - inspect.getmodule(self).make(*self.install_targets) - - run_after("build")(PackageBase._run_default_build_time_test_callbacks) + spack.builder.run_after("build")(execute_build_time_tests) def check(self): - """Searches the Makefile for targets ``test`` and ``check`` - and runs them if found. - """ - with working_dir(self.build_directory): - self._if_make_target_execute("test") - self._if_make_target_execute("check") + """Run "make" on the ``test`` and ``check`` targets, if found.""" + with fs.working_dir(self.build_directory): + self.pkg._if_make_target_execute("test") + self.pkg._if_make_target_execute("check") - run_after("install")(PackageBase._run_default_install_time_test_callbacks) + spack.builder.run_after("install")(execute_install_time_tests) def installcheck(self): """Searches the Makefile for an ``installcheck`` target and runs it if found. """ - with working_dir(self.build_directory): - self._if_make_target_execute("installcheck") - - # Check that self.prefix is there after installation - run_after("install")(PackageBase.sanity_check_prefix) + with fs.working_dir(self.build_directory): + self.pkg._if_make_target_execute("installcheck") # On macOS, force rpaths for shared library IDs and remove duplicate rpaths - run_after("install")(PackageBase.apply_macos_rpath_fixups) + spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups) diff --git a/lib/spack/spack/build_systems/maven.py b/lib/spack/spack/build_systems/maven.py index 1ff1882e134..66680b5b6cf 100644 --- a/lib/spack/spack/build_systems/maven.py +++ b/lib/spack/spack/build_systems/maven.py @@ -2,60 +2,73 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import llnl.util.filesystem as fs - -from llnl.util.filesystem import install_tree, working_dir - -from spack.directives import depends_on -from spack.package_base import PackageBase, run_after +import spack.builder +import spack.package_base +from spack.directives import build_system, depends_on +from spack.multimethod import when from spack.util.executable import which +from ._checks import BaseBuilder -class MavenPackage(PackageBase): + +class MavenPackage(spack.package_base.PackageBase): """Specialized class for packages that are built using the Maven build system. See https://maven.apache.org/index.html for more information. - - This class provides the following phases that can be overridden: - - * build - * install """ - # Default phases - phases = ["build", "install"] - # To be used in UI queries that require to know which # build-system class we are using build_system_class = "MavenPackage" - depends_on("java", type=("build", "run")) - depends_on("maven", type="build") + #: Legacy buildsystem attribute used to deserialize and install old specs + legacy_buildsystem = "maven" + + build_system("maven") + + with when("build_system=maven"): + depends_on("java", type=("build", "run")) + depends_on("maven", type="build") + + +@spack.builder.builder("maven") +class MavenBuilder(BaseBuilder): + """The Maven builder encodes the default way to build software with Maven. + It has two phases that can be overridden, if need be: + + 1. :py:meth:`~.MavenBuilder.build` + 2. :py:meth:`~.MavenBuilder.install` + """ + + phases = ("build", "install") + + #: Names associated with package methods in the old build-system format + legacy_methods = ("build_args",) + + #: Names associated with package attributes in the old build-system format + legacy_attributes = ("build_directory",) @property def build_directory(self): """The directory containing the ``pom.xml`` file.""" - return self.stage.source_path + return self.pkg.stage.source_path def build_args(self): """List of args to pass to build phase.""" return [] - def build(self, spec, prefix): + def build(self, pkg, spec, prefix): """Compile code and package into a JAR file.""" - - with working_dir(self.build_directory): + with fs.working_dir(self.build_directory): mvn = which("mvn") - if self.run_tests: + if self.pkg.run_tests: mvn("verify", *self.build_args()) else: mvn("package", "-DskipTests", *self.build_args()) - def install(self, spec, prefix): + def install(self, pkg, spec, prefix): """Copy to installation prefix.""" - - with working_dir(self.build_directory): - install_tree(".", prefix) - - # Check that self.prefix is there after installation - run_after("install")(PackageBase.sanity_check_prefix) + with fs.working_dir(self.build_directory): + fs.install_tree(".", prefix) diff --git a/lib/spack/spack/build_systems/meson.py b/lib/spack/spack/build_systems/meson.py index 6f5f2d6cbb8..710eecc080e 100644 --- a/lib/spack/spack/build_systems/meson.py +++ b/lib/spack/spack/build_systems/meson.py @@ -2,108 +2,131 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - - import inspect import os from typing import List # novm -from llnl.util.filesystem import working_dir +import llnl.util.filesystem as fs -from spack.directives import depends_on, variant -from spack.package_base import PackageBase, run_after +import spack.builder +import spack.package_base +from spack.directives import build_system, depends_on, variant +from spack.multimethod import when + +from ._checks import BaseBuilder, execute_build_time_tests -class MesonPackage(PackageBase): - """Specialized class for packages built using Meson +class MesonPackage(spack.package_base.PackageBase): + """Specialized class for packages built using Meson. For more information + on the Meson build system, see https://mesonbuild.com/ + """ - For more information on the Meson build system, see: - https://mesonbuild.com/ + #: This attribute is used in UI queries that need to know the build + #: system base class + build_system_class = "MesonPackage" - This class provides three phases that can be overridden: + #: Legacy buildsystem attribute used to deserialize and install old specs + legacy_buildsystem = "meson" - 1. :py:meth:`~.MesonPackage.meson` - 2. :py:meth:`~.MesonPackage.build` - 3. :py:meth:`~.MesonPackage.install` + build_system("meson") + + with when("build_system=meson"): + variant( + "buildtype", + default="debugoptimized", + description="Meson build type", + values=("plain", "debug", "debugoptimized", "release", "minsize"), + ) + variant( + "default_library", + default="shared", + values=("shared", "static"), + multi=True, + description="Build shared libs, static libs or both", + ) + variant("strip", default=False, description="Strip targets on install") + depends_on("meson", type="build") + depends_on("ninja", type="build") + + def flags_to_build_system_args(self, flags): + """Produces a list of all command line arguments to pass the specified + compiler flags to meson.""" + # Has to be dynamic attribute due to caching + setattr(self, "meson_flag_args", []) + + +@spack.builder.builder("meson") +class MesonBuilder(BaseBuilder): + """The Meson builder encodes the default way to build software with Meson. + The builder has three phases that can be overridden, if need be: + + 1. :py:meth:`~.MesonBuilder.meson` + 2. :py:meth:`~.MesonBuilder.build` + 3. :py:meth:`~.MesonBuilder.install` They all have sensible defaults and for many packages the only thing - necessary will be to override :py:meth:`~.MesonPackage.meson_args`. + necessary will be to override :py:meth:`~.MesonBuilder.meson_args`. + For a finer tuning you may also override: +-----------------------------------------------+--------------------+ | **Method** | **Purpose** | +===============================================+====================+ - | :py:meth:`~.MesonPackage.root_mesonlists_dir` | Location of the | + | :py:meth:`~.MesonBuilder.root_mesonlists_dir` | Location of the | | | root MesonLists.txt| +-----------------------------------------------+--------------------+ - | :py:meth:`~.MesonPackage.build_directory` | Directory where to | + | :py:meth:`~.MesonBuilder.build_directory` | Directory where to | | | build the package | +-----------------------------------------------+--------------------+ - - """ - #: Phases of a Meson package - phases = ["meson", "build", "install"] - #: This attribute is used in UI queries that need to know the build - #: system base class - build_system_class = "MesonPackage" + phases = ("meson", "build", "install") + + #: Names associated with package methods in the old build-system format + legacy_methods = ("meson_args", "check") + + #: Names associated with package attributes in the old build-system format + legacy_attributes = ( + "build_targets", + "install_targets", + "build_time_test_callbacks", + "root_mesonlists_dir", + "std_meson_args", + "build_directory", + ) build_targets = [] # type: List[str] install_targets = ["install"] build_time_test_callbacks = ["check"] - variant( - "buildtype", - default="debugoptimized", - description="Meson build type", - values=("plain", "debug", "debugoptimized", "release", "minsize"), - ) - variant( - "default_library", - default="shared", - values=("shared", "static"), - multi=True, - description="Build shared libs, static libs or both", - ) - variant("strip", default=False, description="Strip targets on install") - - depends_on("meson", type="build") - depends_on("ninja", type="build") - @property def archive_files(self): """Files to archive for packages based on Meson""" - return [os.path.join(self.build_directory, "meson-logs/meson-log.txt")] + return [os.path.join(self.build_directory, "meson-logs", "meson-log.txt")] @property def root_mesonlists_dir(self): - """The relative path to the directory containing meson.build + """Relative path to the directory containing meson.build This path is relative to the root of the extracted tarball, not to the ``build_directory``. Defaults to the current directory. - - :return: directory containing meson.build """ - return self.stage.source_path + return self.pkg.stage.source_path @property def std_meson_args(self): - """Standard meson arguments provided as a property for - convenience of package writers - - :return: standard meson arguments + """Standard meson arguments provided as a property for convenience + of package writers. """ # standard Meson arguments - std_meson_args = MesonPackage._std_args(self) + std_meson_args = MesonBuilder.std_args(self.pkg) std_meson_args += getattr(self, "meson_flag_args", []) return std_meson_args @staticmethod - def _std_args(pkg): - """Computes the standard meson arguments for a generic package""" - + def std_args(pkg): + """Standard meson arguments for a generic package.""" try: build_type = pkg.spec.variants["buildtype"].value except KeyError: @@ -119,44 +142,33 @@ def _std_args(pkg): default_library = "shared" args = [ - "--prefix={0}".format(pkg.prefix), + "-Dprefix={0}".format(pkg.prefix), # If we do not specify libdir explicitly, Meson chooses something # like lib/x86_64-linux-gnu, which causes problems when trying to # find libraries and pkg-config files. # See https://github.com/mesonbuild/meson/issues/2197 - "--libdir={0}".format(pkg.prefix.lib), + "-Dlibdir={0}".format(pkg.prefix.lib), "-Dbuildtype={0}".format(build_type), "-Dstrip={0}".format(strip), "-Ddefault_library={0}".format(default_library), + # Do not automatically download and install dependencies + "-Dwrap_mode=nodownload", ] return args - def flags_to_build_system_args(self, flags): - """Produces a list of all command line arguments to pass the specified - compiler flags to meson.""" - # Has to be dynamic attribute due to caching - setattr(self, "meson_flag_args", []) - @property def build_dirname(self): - """Returns the directory name to use when building the package - - :return: name of the subdirectory for building the package - """ - return "spack-build-%s" % self.spec.dag_hash(7) + """Returns the directory name to use when building the package.""" + return "spack-build-{}".format(self.spec.dag_hash(7)) @property def build_directory(self): - """Returns the directory to use when building the package - - :return: directory where to build the package - """ - return os.path.join(self.stage.path, self.build_dirname) + """Directory to use when building the package.""" + return os.path.join(self.pkg.stage.path, self.build_dirname) def meson_args(self): - """Produces a list containing all the arguments that must be passed to - meson, except: + """List of arguments that must be passed to meson, except: * ``--prefix`` * ``--libdir`` @@ -165,40 +177,33 @@ def meson_args(self): * ``--default_library`` which will be set automatically. - - :return: list of arguments for meson """ return [] - def meson(self, spec, prefix): - """Runs ``meson`` in the build directory""" + def meson(self, pkg, spec, prefix): + """Run ``meson`` in the build directory""" options = [os.path.abspath(self.root_mesonlists_dir)] options += self.std_meson_args options += self.meson_args() - with working_dir(self.build_directory, create=True): - inspect.getmodule(self).meson(*options) + with fs.working_dir(self.build_directory, create=True): + inspect.getmodule(self.pkg).meson(*options) - def build(self, spec, prefix): + def build(self, pkg, spec, prefix): """Make the build targets""" options = ["-v"] options += self.build_targets - with working_dir(self.build_directory): - inspect.getmodule(self).ninja(*options) + with fs.working_dir(self.build_directory): + inspect.getmodule(self.pkg).ninja(*options) - def install(self, spec, prefix): + def install(self, pkg, spec, prefix): """Make the install targets""" - with working_dir(self.build_directory): - inspect.getmodule(self).ninja(*self.install_targets) + with fs.working_dir(self.build_directory): + inspect.getmodule(self.pkg).ninja(*self.install_targets) - run_after("build")(PackageBase._run_default_build_time_test_callbacks) + spack.builder.run_after("build")(execute_build_time_tests) def check(self): - """Searches the Meson-generated file for the target ``test`` - and runs it if found. - """ - with working_dir(self.build_directory): + """Search Meson-generated files for the target ``test`` and run it if found.""" + with fs.working_dir(self.build_directory): self._if_ninja_target_execute("test") self._if_ninja_target_execute("check") - - # Check that self.prefix is there after installation - run_after("install")(PackageBase.sanity_check_prefix) diff --git a/lib/spack/spack/build_systems/nmake.py b/lib/spack/spack/build_systems/nmake.py new file mode 100644 index 00000000000..bf368951604 --- /dev/null +++ b/lib/spack/spack/build_systems/nmake.py @@ -0,0 +1,102 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +import inspect +from typing import List # novm + +import llnl.util.filesystem as fs + +import spack.builder +import spack.package_base +from spack.directives import build_system, conflicts + +from ._checks import BaseBuilder + + +class NMakePackage(spack.package_base.PackageBase): + """Specialized class for packages built using a Makefiles.""" + + #: This attribute is used in UI queries that need to know the build + #: system base class + build_system_class = "NmakePackage" + + build_system("nmake") + conflicts("platform=linux", when="build_system=nmake") + conflicts("platform=darwin", when="build_system=nmake") + conflicts("platform=cray", when="build_system=nmake") + + +@spack.builder.builder("nmake") +class NMakeBuilder(BaseBuilder): + """The NMake builder encodes the most common way of building software with + NMake on Windows. It has three phases that can be overridden, if need be: + + 1. :py:meth:`~.NMakeBuilder.edit` + 2. :py:meth:`~.NMakeBuilder.build` + 3. :py:meth:`~.NMakeBuilder.install` + + It is usually necessary to override the :py:meth:`~.NMakeBuilder.edit` + phase (which is by default a no-op), while the other two have sensible defaults. + + For a finer tuning you may override: + + +--------------------------------------------+--------------------+ + | **Method** | **Purpose** | + +============================================+====================+ + | :py:attr:`~.NMakeBuilder.build_targets` | Specify ``nmake`` | + | | targets for the | + | | build phase | + +--------------------------------------------+--------------------+ + | :py:attr:`~.NMakeBuilder.install_targets` | Specify ``nmake`` | + | | targets for the | + | | install phase | + +--------------------------------------------+--------------------+ + | :py:meth:`~.NMakeBuilder.build_directory` | Directory where the| + | | Makefile is located| + +--------------------------------------------+--------------------+ + """ + + phases = ("edit", "build", "install") + + #: Names associated with package methods in the old build-system format + legacy_methods = ("check", "installcheck") + + #: Names associated with package attributes in the old build-system format + legacy_attributes = ( + "build_targets", + "install_targets", + "build_time_test_callbacks", + "install_time_test_callbacks", + "build_directory", + ) + + #: Targets for ``make`` during the :py:meth:`~.NMakeBuilder.build` phase + build_targets = [] # type: List[str] + #: Targets for ``make`` during the :py:meth:`~.NMakeBuilder.install` phase + install_targets = ["install"] + + #: Callback names for build-time test + build_time_test_callbacks = ["check"] + + #: Callback names for install-time test + install_time_test_callbacks = ["installcheck"] + + @property + def build_directory(self): + """Return the directory containing the main Makefile.""" + return self.pkg.stage.source_path + + def edit(self, pkg, spec, prefix): + """Edit the Makefile before calling make. The default is a no-op.""" + pass + + def build(self, pkg, spec, prefix): + """Run "make" on the build targets specified by the builder.""" + with fs.working_dir(self.build_directory): + inspect.getmodule(self.pkg).nmake(*self.build_targets) + + def install(self, pkg, spec, prefix): + """Run "make" on the install targets specified by the builder.""" + with fs.working_dir(self.build_directory): + inspect.getmodule(self.pkg).nmake(*self.install_targets) diff --git a/lib/spack/spack/build_systems/octave.py b/lib/spack/spack/build_systems/octave.py index 9916c319b0b..5b2456f098b 100644 --- a/lib/spack/spack/build_systems/octave.py +++ b/lib/spack/spack/build_systems/octave.py @@ -2,51 +2,62 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - import inspect -from spack.directives import extends -from spack.package_base import PackageBase, run_after +import spack.builder +import spack.package_base +from spack.directives import build_system, extends +from spack.multimethod import when + +from ._checks import BaseBuilder -class OctavePackage(PackageBase): +class OctavePackage(spack.package_base.PackageBase): """Specialized class for Octave packages. See https://www.gnu.org/software/octave/doc/v4.2.0/Installing-and-Removing-Packages.html for more information. - - This class provides the following phases that can be overridden: - - 1. :py:meth:`~.OctavePackage.install` - """ - # Default phases - phases = ["install"] - # To be used in UI queries that require to know which # build-system class we are using build_system_class = "OctavePackage" + #: Legacy buildsystem attribute used to deserialize and install old specs + legacy_buildsystem = "octave" - extends("octave") + build_system("octave") + + with when("build_system=octave"): + extends("octave") + + +@spack.builder.builder("octave") +class OctaveBuilder(BaseBuilder): + """The octave builder provides the following phases that can be overridden: + + 1. :py:meth:`~.OctaveBuilder.install` + """ + + phases = ("install",) + + #: Names associated with package methods in the old build-system format + legacy_methods = () + + #: Names associated with package attributes in the old build-system format + legacy_attributes = () + + def install(self, pkg, spec, prefix): + """Install the package from the archive file""" + inspect.getmodule(self.pkg).octave( + "--quiet", + "--norc", + "--built-in-docstrings-file=/dev/null", + "--texi-macros-file=/dev/null", + "--eval", + "pkg prefix %s; pkg install %s" % (prefix, self.pkg.stage.archive_file), + ) def setup_build_environment(self, env): # octave does not like those environment variables to be set: env.unset("CC") env.unset("CXX") env.unset("FC") - - def install(self, spec, prefix): - """Install the package from the archive file""" - inspect.getmodule(self).octave( - "--quiet", - "--norc", - "--built-in-docstrings-file=/dev/null", - "--texi-macros-file=/dev/null", - "--eval", - "pkg prefix %s; pkg install %s" % (prefix, self.stage.archive_file), - ) - - # Testing - - # Check that self.prefix is there after installation - run_after("install")(PackageBase.sanity_check_prefix) diff --git a/lib/spack/spack/build_systems/oneapi.py b/lib/spack/spack/build_systems/oneapi.py index 669c66fe8fc..1cb79b99015 100644 --- a/lib/spack/spack/build_systems/oneapi.py +++ b/lib/spack/spack/build_systems/oneapi.py @@ -2,11 +2,7 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - -"""Common utilities for managing intel oneapi packages. - -""" - +"""Common utilities for managing intel oneapi packages.""" import getpass import platform import shutil @@ -14,18 +10,17 @@ from llnl.util.filesystem import find_headers, find_libraries, join_path -from spack.package_base import Package from spack.util.environment import EnvironmentModifications from spack.util.executable import Executable +from .generic import Package + class IntelOneApiPackage(Package): """Base class for Intel oneAPI packages.""" homepage = "https://software.intel.com/oneapi" - phases = ["install"] - # oneAPI license does not allow mirroring outside of the # organization (e.g. University/Company). redistribute_source = False diff --git a/lib/spack/spack/build_systems/perl.py b/lib/spack/spack/build_systems/perl.py index 1f354beece0..a100b89bfd7 100644 --- a/lib/spack/spack/build_systems/perl.py +++ b/lib/spack/spack/build_systems/perl.py @@ -2,73 +2,87 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - - import inspect import os from llnl.util.filesystem import filter_file -from spack.directives import extends -from spack.package_base import PackageBase, run_after +import spack.builder +import spack.package_base +from spack.directives import build_system, extends +from spack.package_base import PackageBase from spack.util.executable import Executable +from ._checks import BaseBuilder, execute_build_time_tests + class PerlPackage(PackageBase): - """Specialized class for packages that are built using Perl. + """Specialized class for packages that are built using Perl.""" - This class provides four phases that can be overridden if required: + #: This attribute is used in UI queries that need to know the build + #: system base class + build_system_class = "PerlPackage" + #: Legacy buildsystem attribute used to deserialize and install old specs + legacy_buildsystem = "perl" - 1. :py:meth:`~.PerlPackage.configure` - 2. :py:meth:`~.PerlPackage.build` - 3. :py:meth:`~.PerlPackage.check` - 4. :py:meth:`~.PerlPackage.install` + build_system("perl") + + extends("perl", when="build_system=perl") + + +@spack.builder.builder("perl") +class PerlBuilder(BaseBuilder): + """The perl builder provides four phases that can be overridden, if required: + + 1. :py:meth:`~.PerlBuilder.configure` + 2. :py:meth:`~.PerlBuilder.build` + 3. :py:meth:`~.PerlBuilder.check` + 4. :py:meth:`~.PerlBuilder.install` The default methods use, in order of preference: (1) Makefile.PL, (2) Build.PL. - Some packages may need to override - :py:meth:`~.PerlPackage.configure_args`, - which produces a list of arguments for - :py:meth:`~.PerlPackage.configure`. + Some packages may need to override :py:meth:`~.PerlBuilder.configure_args`, + which produces a list of arguments for :py:meth:`~.PerlBuilder.configure`. + Arguments should not include the installation base directory. """ #: Phases of a Perl package - phases = ["configure", "build", "install"] + phases = ("configure", "build", "install") - #: This attribute is used in UI queries that need to know the build - #: system base class - build_system_class = "PerlPackage" + #: Names associated with package methods in the old build-system format + legacy_methods = ("configure_args", "check") + + #: Names associated with package attributes in the old build-system format + legacy_attributes = () #: Callback names for build-time test build_time_test_callbacks = ["check"] - extends("perl") - def configure_args(self): - """Produces a list containing the arguments that must be passed to - :py:meth:`~.PerlPackage.configure`. Arguments should not include - the installation base directory, which is prepended automatically. + """List of arguments passed to :py:meth:`~.PerlBuilder.configure`. - :return: list of arguments for Makefile.PL or Build.PL + Arguments should not include the installation base directory, which + is prepended automatically. """ return [] - def configure(self, spec, prefix): - """Runs Makefile.PL or Build.PL with arguments consisting of + def configure(self, pkg, spec, prefix): + """Run Makefile.PL or Build.PL with arguments consisting of an appropriate installation base directory followed by the - list returned by :py:meth:`~.PerlPackage.configure_args`. + list returned by :py:meth:`~.PerlBuilder.configure_args`. - :raise RuntimeError: if neither Makefile.PL or Build.PL exist + Raises: + RuntimeError: if neither Makefile.PL nor Build.PL exist """ if os.path.isfile("Makefile.PL"): self.build_method = "Makefile.PL" - self.build_executable = inspect.getmodule(self).make + self.build_executable = inspect.getmodule(self.pkg).make elif os.path.isfile("Build.PL"): self.build_method = "Build.PL" - self.build_executable = Executable(os.path.join(self.stage.source_path, "Build")) + self.build_executable = Executable(os.path.join(self.pkg.stage.source_path, "Build")) else: raise RuntimeError("Unknown build_method for perl package") @@ -78,33 +92,30 @@ def configure(self, spec, prefix): options = ["Build.PL", "--install_base", prefix] options += self.configure_args() - inspect.getmodule(self).perl(*options) + inspect.getmodule(self.pkg).perl(*options) # It is possible that the shebang in the Build script that is created from # Build.PL may be too long causing the build to fail. Patching the shebang # does not happen until after install so set '/usr/bin/env perl' here in # the Build script. - @run_after("configure") + @spack.builder.run_after("configure") def fix_shebang(self): if self.build_method == "Build.PL": pattern = "#!{0}".format(self.spec["perl"].command.path) repl = "#!/usr/bin/env perl" filter_file(pattern, repl, "Build", backup=False) - def build(self, spec, prefix): + def build(self, pkg, spec, prefix): """Builds a Perl package.""" self.build_executable() # Ensure that tests run after build (if requested): - run_after("build")(PackageBase._run_default_build_time_test_callbacks) + spack.builder.run_after("build")(execute_build_time_tests) def check(self): """Runs built-in tests of a Perl package.""" self.build_executable("test") - def install(self, spec, prefix): + def install(self, pkg, spec, prefix): """Installs a Perl package.""" self.build_executable("install") - - # Check that self.prefix is there after installation - run_after("install")(PackageBase.sanity_check_prefix) diff --git a/lib/spack/spack/build_systems/python.py b/lib/spack/spack/build_systems/python.py index d1cecdac639..afa36980f76 100644 --- a/lib/spack/spack/build_systems/python.py +++ b/lib/spack/spack/build_systems/python.py @@ -8,93 +8,23 @@ import shutil from typing import Optional +import llnl.util.filesystem as fs +import llnl.util.lang as lang import llnl.util.tty as tty -from llnl.util.filesystem import ( - filter_file, - find, - find_all_headers, - find_libraries, - is_nonsymlink_exe_with_shebang, - path_contains_subdirectory, - same_path, - working_dir, -) -from llnl.util.lang import classproperty, match_predicate -from spack.directives import depends_on, extends +import spack.builder +import spack.multimethod +import spack.package_base +import spack.spec +from spack.directives import build_system, depends_on, extends from spack.error import NoHeadersError, NoLibrariesError, SpecError -from spack.package_base import PackageBase, run_after from spack.version import Version +from ._checks import BaseBuilder, execute_install_time_tests -class PythonPackage(PackageBase): - """Specialized class for packages that are built using pip.""" - #: Package name, version, and extension on PyPI - pypi = None # type: Optional[str] - - maintainers = ["adamjstewart", "pradyunsg"] - - # Default phases - phases = ["install"] - - # To be used in UI queries that require to know which - # build-system class we are using - build_system_class = "PythonPackage" - - #: Callback names for install-time test - install_time_test_callbacks = ["test"] - - extends("python") - depends_on("py-pip", type="build") - # FIXME: technically wheel is only needed when building from source, not when - # installing a downloaded wheel, but I don't want to add wheel as a dep to every - # package manually - depends_on("py-wheel", type="build") - - py_namespace = None # type: Optional[str] - - @staticmethod - def _std_args(cls): - return [ - # Verbose - "-vvv", - # Disable prompting for input - "--no-input", - # Disable the cache - "--no-cache-dir", - # Don't check to see if pip is up-to-date - "--disable-pip-version-check", - # Install packages - "install", - # Don't install package dependencies - "--no-deps", - # Overwrite existing packages - "--ignore-installed", - # Use env vars like PYTHONPATH - "--no-build-isolation", - # Don't warn that prefix.bin is not in PATH - "--no-warn-script-location", - # Ignore the PyPI package index - "--no-index", - ] - - @classproperty - def homepage(cls): - if cls.pypi: - name = cls.pypi.split("/")[0] - return "https://pypi.org/project/" + name + "/" - - @classproperty - def url(cls): - if cls.pypi: - return "https://files.pythonhosted.org/packages/source/" + cls.pypi[0] + "/" + cls.pypi - - @classproperty - def list_url(cls): - if cls.pypi: - name = cls.pypi.split("/")[0] - return "https://pypi.org/simple/" + name + "/" +class PythonExtension(spack.package_base.PackageBase): + maintainers = ["adamjstewart"] @property def import_modules(self): @@ -124,7 +54,7 @@ def import_modules(self): # Some Python libraries are packages: collections of modules # distributed in directories containing __init__.py files - for path in find(root, "__init__.py", recursive=True): + for path in fs.find(root, "__init__.py", recursive=True): modules.append( path.replace(root + os.sep, "", 1) .replace(os.sep + "__init__.py", "") @@ -133,7 +63,7 @@ def import_modules(self): # Some Python libraries are modules: individual *.py files # found in the site-packages directory - for path in find(root, "*.py", recursive=False): + for path in fs.find(root, "*.py", recursive=False): modules.append( path.replace(root + os.sep, "", 1).replace(".py", "").replace("/", ".") ) @@ -160,6 +90,229 @@ def skip_modules(self): """ return [] + def view_file_conflicts(self, view, merge_map): + """Report all file conflicts, excepting special cases for python. + Specifically, this does not report errors for duplicate + __init__.py files for packages in the same namespace. + """ + conflicts = list(dst for src, dst in merge_map.items() if os.path.exists(dst)) + + if conflicts and self.py_namespace: + ext_map = view.extensions_layout.extension_map(self.extendee_spec) + namespaces = set(x.package.py_namespace for x in ext_map.values()) + namespace_re = r"site-packages/{0}/__init__.py".format(self.py_namespace) + find_namespace = lang.match_predicate(namespace_re) + if self.py_namespace in namespaces: + conflicts = list(x for x in conflicts if not find_namespace(x)) + + return conflicts + + def add_files_to_view(self, view, merge_map, skip_if_exists=True): + bin_dir = self.spec.prefix.bin + python_prefix = self.extendee_spec.prefix + python_is_external = self.extendee_spec.external + global_view = fs.same_path(python_prefix, view.get_projection_for_spec(self.spec)) + for src, dst in merge_map.items(): + if os.path.exists(dst): + continue + elif global_view or not fs.path_contains_subdirectory(src, bin_dir): + view.link(src, dst) + elif not os.path.islink(src): + shutil.copy2(src, dst) + is_script = fs.is_nonsymlink_exe_with_shebang(src) + if is_script and not python_is_external: + fs.filter_file( + python_prefix, + os.path.abspath(view.get_projection_for_spec(self.spec)), + dst, + ) + else: + orig_link_target = os.path.realpath(src) + new_link_target = os.path.abspath(merge_map[orig_link_target]) + view.link(new_link_target, dst) + + def remove_files_from_view(self, view, merge_map): + ignore_namespace = False + if self.py_namespace: + ext_map = view.extensions_layout.extension_map(self.extendee_spec) + remaining_namespaces = set( + spec.package.py_namespace for name, spec in ext_map.items() if name != self.name + ) + if self.py_namespace in remaining_namespaces: + namespace_init = lang.match_predicate( + r"site-packages/{0}/__init__.py".format(self.py_namespace) + ) + ignore_namespace = True + + bin_dir = self.spec.prefix.bin + global_view = self.extendee_spec.prefix == view.get_projection_for_spec(self.spec) + + to_remove = [] + for src, dst in merge_map.items(): + if ignore_namespace and namespace_init(dst): + continue + + if global_view or not fs.path_contains_subdirectory(src, bin_dir): + to_remove.append(dst) + else: + os.remove(dst) + + view.remove_files(to_remove) + + def test(self): + """Attempts to import modules of the installed package.""" + + # Make sure we are importing the installed modules, + # not the ones in the source directory + for module in self.import_modules: + self.run_test( + inspect.getmodule(self).python.path, + ["-c", "import {0}".format(module)], + purpose="checking import of {0}".format(module), + work_dir="spack-test", + ) + + +class PythonPackage(PythonExtension): + """Specialized class for packages that are built using pip.""" + + #: Package name, version, and extension on PyPI + pypi = None # type: Optional[str] + + maintainers = ["adamjstewart", "pradyunsg"] + + # To be used in UI queries that require to know which + # build-system class we are using + build_system_class = "PythonPackage" + #: Legacy buildsystem attribute used to deserialize and install old specs + legacy_buildsystem = "python_pip" + + #: Callback names for install-time test + install_time_test_callbacks = ["test"] + + build_system("python_pip") + + with spack.multimethod.when("build_system=python_pip"): + extends("python") + depends_on("py-pip", type="build") + # FIXME: technically wheel is only needed when building from source, not when + # installing a downloaded wheel, but I don't want to add wheel as a dep to every + # package manually + depends_on("py-wheel", type="build") + + py_namespace = None # type: Optional[str] + + @lang.classproperty + def homepage(cls): + if cls.pypi: + name = cls.pypi.split("/")[0] + return "https://pypi.org/project/" + name + "/" + + @lang.classproperty + def url(cls): + if cls.pypi: + return "https://files.pythonhosted.org/packages/source/" + cls.pypi[0] + "/" + cls.pypi + + @lang.classproperty + def list_url(cls): + if cls.pypi: + name = cls.pypi.split("/")[0] + return "https://pypi.org/simple/" + name + "/" + + def update_external_dependencies(self): + """ + Ensure all external python packages have a python dependency + + If another package in the DAG depends on python, we use that + python for the dependency of the external. If not, we assume + that the external PythonPackage is installed into the same + directory as the python it depends on. + """ + # TODO: Include this in the solve, rather than instantiating post-concretization + if "python" not in self.spec: + if "python" in self.spec.root: + python = self.spec.root["python"] + else: + python = spack.spec.Spec("python") + repo = spack.repo.path.repo_for_pkg(python) + python.namespace = repo.namespace + python._mark_concrete() + python.external_path = self.prefix + self.spec.add_dependency_edge(python, ("build", "link", "run")) + + @property + def headers(self): + """Discover header files in platlib.""" + + # Headers may be in either location + include = self.prefix.join(self.spec["python"].package.include) + platlib = self.prefix.join(self.spec["python"].package.platlib) + headers = fs.find_all_headers(include) + fs.find_all_headers(platlib) + + if headers: + return headers + + msg = "Unable to locate {} headers in {} or {}" + raise NoHeadersError(msg.format(self.spec.name, include, platlib)) + + @property + def libs(self): + """Discover libraries in platlib.""" + + # Remove py- prefix in package name + library = "lib" + self.spec.name[3:].replace("-", "?") + root = self.prefix.join(self.spec["python"].package.platlib) + + for shared in [True, False]: + libs = fs.find_libraries(library, root, shared=shared, recursive=True) + if libs: + return libs + + msg = "Unable to recursively locate {} libraries in {}" + raise NoLibrariesError(msg.format(self.spec.name, root)) + + +@spack.builder.builder("python_pip") +class PythonPipBuilder(BaseBuilder): + phases = ("install",) + + #: Names associated with package methods in the old build-system format + legacy_methods = ("test",) + + #: Same as legacy_methods, but the signature is different + legacy_long_methods = ("install_options", "global_options", "config_settings") + + #: Names associated with package attributes in the old build-system format + legacy_attributes = ("build_directory", "install_time_test_callbacks") + + #: Callback names for install-time test + install_time_test_callbacks = ["test"] + + @staticmethod + def std_args(cls): + return [ + # Verbose + "-vvv", + # Disable prompting for input + "--no-input", + # Disable the cache + "--no-cache-dir", + # Don't check to see if pip is up-to-date + "--disable-pip-version-check", + # Install packages + "install", + # Don't install package dependencies + "--no-deps", + # Overwrite existing packages + "--ignore-installed", + # Use env vars like PYTHONPATH + "--no-build-isolation", + # Don't warn that prefix.bin is not in PATH + "--no-warn-script-location", + # Ignore the PyPI package index + "--no-index", + ] + @property def build_directory(self): """The root directory of the Python package. @@ -170,11 +323,10 @@ def build_directory(self): * ``setup.cfg`` * ``setup.py`` """ - return self.stage.source_path + return self.pkg.stage.source_path def config_settings(self, spec, prefix): """Configuration settings to be passed to the PEP 517 build backend. - Requires pip 22.1+, which requires Python 3.7+. Args: @@ -211,10 +363,10 @@ def global_options(self, spec, prefix): """ return [] - def install(self, spec, prefix): + def install(self, pkg, spec, prefix): """Install everything from build directory.""" - args = PythonPackage._std_args(self) + ["--prefix=" + prefix] + args = PythonPipBuilder.std_args(pkg) + ["--prefix=" + prefix] for key, value in self.config_settings(spec, prefix).items(): if spec["py-pip"].version < Version("22.1"): @@ -223,137 +375,21 @@ def install(self, spec, prefix): "pip 22.1+. Add the following line to the package to fix this:\n\n" ' depends_on("py-pip@22.1:", type="build")'.format(spec.name) ) + args.append("--config-settings={}={}".format(key, value)) + for option in self.install_options(spec, prefix): args.append("--install-option=" + option) for option in self.global_options(spec, prefix): args.append("--global-option=" + option) - if self.stage.archive_file and self.stage.archive_file.endswith(".whl"): - args.append(self.stage.archive_file) + if pkg.stage.archive_file and pkg.stage.archive_file.endswith(".whl"): + args.append(pkg.stage.archive_file) else: args.append(".") - pip = inspect.getmodule(self).pip - with working_dir(self.build_directory): + pip = inspect.getmodule(pkg).pip + with fs.working_dir(self.build_directory): pip(*args) - @property - def headers(self): - """Discover header files in platlib.""" - - # Headers may be in either location - include = self.prefix.join(self.spec["python"].package.include) - platlib = self.prefix.join(self.spec["python"].package.platlib) - headers = find_all_headers(include) + find_all_headers(platlib) - - if headers: - return headers - - msg = "Unable to locate {} headers in {} or {}" - raise NoHeadersError(msg.format(self.spec.name, include, platlib)) - - @property - def libs(self): - """Discover libraries in platlib.""" - - # Remove py- prefix in package name - library = "lib" + self.spec.name[3:].replace("-", "?") - root = self.prefix.join(self.spec["python"].package.platlib) - - for shared in [True, False]: - libs = find_libraries(library, root, shared=shared, recursive=True) - if libs: - return libs - - msg = "Unable to recursively locate {} libraries in {}" - raise NoLibrariesError(msg.format(self.spec.name, root)) - - # Testing - - def test(self): - """Attempts to import modules of the installed package.""" - - # Make sure we are importing the installed modules, - # not the ones in the source directory - for module in self.import_modules: - self.run_test( - inspect.getmodule(self).python.path, - ["-c", "import {0}".format(module)], - purpose="checking import of {0}".format(module), - work_dir="spack-test", - ) - - run_after("install")(PackageBase._run_default_install_time_test_callbacks) - - # Check that self.prefix is there after installation - run_after("install")(PackageBase.sanity_check_prefix) - - def view_file_conflicts(self, view, merge_map): - """Report all file conflicts, excepting special cases for python. - Specifically, this does not report errors for duplicate - __init__.py files for packages in the same namespace. - """ - conflicts = list(dst for src, dst in merge_map.items() if os.path.exists(dst)) - - if conflicts and self.py_namespace: - ext_map = view.extensions_layout.extension_map(self.extendee_spec) - namespaces = set(x.package.py_namespace for x in ext_map.values()) - namespace_re = r"site-packages/{0}/__init__.py".format(self.py_namespace) - find_namespace = match_predicate(namespace_re) - if self.py_namespace in namespaces: - conflicts = list(x for x in conflicts if not find_namespace(x)) - - return conflicts - - def add_files_to_view(self, view, merge_map, skip_if_exists=True): - bin_dir = self.spec.prefix.bin - python_prefix = self.extendee_spec.prefix - python_is_external = self.extendee_spec.external - global_view = same_path(python_prefix, view.get_projection_for_spec(self.spec)) - for src, dst in merge_map.items(): - if os.path.exists(dst): - continue - elif global_view or not path_contains_subdirectory(src, bin_dir): - view.link(src, dst) - elif not os.path.islink(src): - shutil.copy2(src, dst) - is_script = is_nonsymlink_exe_with_shebang(src) - if is_script and not python_is_external: - filter_file( - python_prefix, - os.path.abspath(view.get_projection_for_spec(self.spec)), - dst, - ) - else: - orig_link_target = os.path.realpath(src) - new_link_target = os.path.abspath(merge_map[orig_link_target]) - view.link(new_link_target, dst) - - def remove_files_from_view(self, view, merge_map): - ignore_namespace = False - if self.py_namespace: - ext_map = view.extensions_layout.extension_map(self.extendee_spec) - remaining_namespaces = set( - spec.package.py_namespace for name, spec in ext_map.items() if name != self.name - ) - if self.py_namespace in remaining_namespaces: - namespace_init = match_predicate( - r"site-packages/{0}/__init__.py".format(self.py_namespace) - ) - ignore_namespace = True - - bin_dir = self.spec.prefix.bin - global_view = self.extendee_spec.prefix == view.get_projection_for_spec(self.spec) - - to_remove = [] - for src, dst in merge_map.items(): - if ignore_namespace and namespace_init(dst): - continue - - if global_view or not path_contains_subdirectory(src, bin_dir): - to_remove.append(dst) - else: - os.remove(dst) - - view.remove_files(to_remove) + spack.builder.run_after("install")(execute_install_time_tests) diff --git a/lib/spack/spack/build_systems/qmake.py b/lib/spack/spack/build_systems/qmake.py index c2af684592b..f18bd9812f5 100644 --- a/lib/spack/spack/build_systems/qmake.py +++ b/lib/spack/spack/build_systems/qmake.py @@ -2,82 +2,85 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - - import inspect from llnl.util.filesystem import working_dir -from spack.directives import depends_on -from spack.package_base import PackageBase, run_after +import spack.builder +import spack.package_base +from spack.directives import build_system, depends_on + +from ._checks import BaseBuilder, execute_build_time_tests -class QMakePackage(PackageBase): +class QMakePackage(spack.package_base.PackageBase): """Specialized class for packages built using qmake. For more information on the qmake build system, see: http://doc.qt.io/qt-5/qmake-manual.html - - This class provides three phases that can be overridden: - - 1. :py:meth:`~.QMakePackage.qmake` - 2. :py:meth:`~.QMakePackage.build` - 3. :py:meth:`~.QMakePackage.install` - - They all have sensible defaults and for many packages the only thing - necessary will be to override :py:meth:`~.QMakePackage.qmake_args`. """ - #: Phases of a qmake package - phases = ["qmake", "build", "install"] - #: This attribute is used in UI queries that need to know the build #: system base class build_system_class = "QMakePackage" + #: Legacy buildsystem attribute used to deserialize and install old specs + legacy_buildsystem = "qmake" + + build_system("qmake") + + depends_on("qt", type="build", when="build_system=qmake") + + +@spack.builder.builder("qmake") +class QMakeBuilder(BaseBuilder): + """The qmake builder provides three phases that can be overridden: + + 1. :py:meth:`~.QMakeBuilder.qmake` + 2. :py:meth:`~.QMakeBuilder.build` + 3. :py:meth:`~.QMakeBuilder.install` + + They all have sensible defaults and for many packages the only thing + necessary will be to override :py:meth:`~.QMakeBuilder.qmake_args`. + """ + + phases = ("qmake", "build", "install") + + #: Names associated with package methods in the old build-system format + legacy_methods = ("qmake_args", "check") + + #: Names associated with package attributes in the old build-system format + legacy_attributes = ("build_directory", "build_time_test_callbacks") #: Callback names for build-time test build_time_test_callbacks = ["check"] - depends_on("qt", type="build") - @property def build_directory(self): """The directory containing the ``*.pro`` file.""" return self.stage.source_path def qmake_args(self): - """Produces a list containing all the arguments that must be passed to - qmake - """ + """List of arguments passed to qmake.""" return [] - def qmake(self, spec, prefix): + def qmake(self, pkg, spec, prefix): """Run ``qmake`` to configure the project and generate a Makefile.""" - with working_dir(self.build_directory): - inspect.getmodule(self).qmake(*self.qmake_args()) + inspect.getmodule(self.pkg).qmake(*self.qmake_args()) - def build(self, spec, prefix): + def build(self, pkg, spec, prefix): """Make the build targets""" - with working_dir(self.build_directory): - inspect.getmodule(self).make() + inspect.getmodule(self.pkg).make() - def install(self, spec, prefix): + def install(self, pkg, spec, prefix): """Make the install targets""" - with working_dir(self.build_directory): - inspect.getmodule(self).make("install") - - # Tests + inspect.getmodule(self.pkg).make("install") def check(self): - """Searches the Makefile for a ``check:`` target and runs it if found.""" - + """Search the Makefile for a ``check:`` target and runs it if found.""" with working_dir(self.build_directory): self._if_make_target_execute("check") - run_after("build")(PackageBase._run_default_build_time_test_callbacks) - - # Check that self.prefix is there after installation - run_after("install")(PackageBase.sanity_check_prefix) + spack.builder.run_after("build")(execute_build_time_tests) diff --git a/lib/spack/spack/build_systems/r.py b/lib/spack/spack/build_systems/r.py index 450cae733bf..c62baa3555f 100644 --- a/lib/spack/spack/build_systems/r.py +++ b/lib/spack/spack/build_systems/r.py @@ -3,30 +3,64 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import inspect -from typing import Optional +from typing import Optional, Tuple import llnl.util.lang as lang from spack.directives import extends -from spack.package_base import PackageBase, run_after + +from .generic import GenericBuilder, Package -class RPackage(PackageBase): +class RBuilder(GenericBuilder): + """The R builder provides a single phase that can be overridden: + + 1. :py:meth:`~.RBuilder.install` + + It has sensible defaults, and for many packages the only thing + necessary will be to add dependencies. + """ + + #: Names associated with package methods in the old build-system format + legacy_methods = ( + "configure_args", + "configure_vars", + ) + GenericBuilder.legacy_methods # type: Tuple[str, ...] + + def configure_args(self): + """Arguments to pass to install via ``--configure-args``.""" + return [] + + def configure_vars(self): + """Arguments to pass to install via ``--configure-vars``.""" + return [] + + def install(self, pkg, spec, prefix): + """Installs an R package.""" + + config_args = self.configure_args() + config_vars = self.configure_vars() + + args = ["--vanilla", "CMD", "INSTALL"] + + if config_args: + args.append("--configure-args={0}".format(" ".join(config_args))) + + if config_vars: + args.append("--configure-vars={0}".format(" ".join(config_vars))) + + args.extend(["--library={0}".format(self.pkg.module.r_lib_dir), self.stage.source_path]) + + inspect.getmodule(self.pkg).R(*args) + + +class RPackage(Package): """Specialized class for packages that are built using R. For more information on the R build system, see: https://stat.ethz.ch/R-manual/R-devel/library/utils/html/INSTALL.html - - This class provides a single phase that can be overridden: - - 1. :py:meth:`~.RPackage.install` - - It has sensible defaults, and for many packages the only thing - necessary will be to add dependencies """ - phases = ["install"] - # package attributes that can be expanded to set the homepage, url, # list_url, and git values # For CRAN packages @@ -35,6 +69,8 @@ class RPackage(PackageBase): # For Bioconductor packages bioc = None # type: Optional[str] + GenericBuilder = RBuilder + maintainers = ["glennpj"] #: This attribute is used in UI queries that need to know the build @@ -70,32 +106,3 @@ def list_url(cls): def git(self): if self.bioc: return "https://git.bioconductor.org/packages/" + self.bioc - - def configure_args(self): - """Arguments to pass to install via ``--configure-args``.""" - return [] - - def configure_vars(self): - """Arguments to pass to install via ``--configure-vars``.""" - return [] - - def install(self, spec, prefix): - """Installs an R package.""" - - config_args = self.configure_args() - config_vars = self.configure_vars() - - args = ["--vanilla", "CMD", "INSTALL"] - - if config_args: - args.append("--configure-args={0}".format(" ".join(config_args))) - - if config_vars: - args.append("--configure-vars={0}".format(" ".join(config_vars))) - - args.extend(["--library={0}".format(self.module.r_lib_dir), self.stage.source_path]) - - inspect.getmodule(self).R(*args) - - # Check that self.prefix is there after installation - run_after("install")(PackageBase.sanity_check_prefix) diff --git a/lib/spack/spack/build_systems/racket.py b/lib/spack/spack/build_systems/racket.py index 7b37d85cf2e..889cc079315 100644 --- a/lib/spack/spack/build_systems/racket.py +++ b/lib/spack/spack/build_systems/racket.py @@ -3,14 +3,15 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os -from typing import Optional +from typing import Optional, Tuple +import llnl.util.filesystem as fs import llnl.util.lang as lang import llnl.util.tty as tty -from llnl.util.filesystem import working_dir +import spack.builder from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs -from spack.directives import extends +from spack.directives import build_system, extends from spack.package_base import PackageBase from spack.util.environment import env_flag from spack.util.executable import Executable, ProcessError @@ -19,34 +20,52 @@ class RacketPackage(PackageBase): """Specialized class for packages that are built using Racket's `raco pkg install` and `raco setup` commands. - - This class provides the following phases that can be overridden: - - * install - * setup """ #: Package name, version, and extension on PyPI maintainers = ["elfprince13"] - - # Default phases - phases = ["install"] - # To be used in UI queries that require to know which # build-system class we are using build_system_class = "RacketPackage" + #: Legacy buildsystem attribute used to deserialize and install old specs + legacy_buildsystem = "racket" - extends("racket") + build_system("racket") + + extends("racket", when="build_system=racket") - pkgs = False - subdirectory = None # type: Optional[str] racket_name = None # type: Optional[str] parallel = True @lang.classproperty def homepage(cls): - if cls.pkgs: + if cls.racket_name: return "https://pkgs.racket-lang.org/package/{0}".format(cls.racket_name) + return None + + +@spack.builder.builder("racket") +class RacketBuilder(spack.builder.Builder): + """The Racket builder provides an ``install`` phase that can be overridden.""" + + phases = ("install",) + + #: Names associated with package methods in the old build-system format + legacy_methods = tuple() # type: Tuple[str, ...] + + #: Names associated with package attributes in the old build-system format + legacy_attributes = ("build_directory", "build_time_test_callbacks", "subdirectory") + + #: Callback names for build-time test + build_time_test_callbacks = ["check"] + + racket_name = None # type: Optional[str] + + @property + def subdirectory(self): + if self.racket_name: + return "pkgs/{0}".format(self.pkg.racket_name) + return None @property def build_directory(self): @@ -55,25 +74,25 @@ def build_directory(self): ret = os.path.join(ret, self.subdirectory) return ret - def install(self, spec, prefix): + def install(self, pkg, spec, prefix): """Install everything from build directory.""" raco = Executable("raco") - with working_dir(self.build_directory): - allow_parallel = self.parallel and (not env_flag(SPACK_NO_PARALLEL_MAKE)) + with fs.working_dir(self.build_directory): + parallel = self.pkg.parallel and (not env_flag(SPACK_NO_PARALLEL_MAKE)) args = [ "pkg", "install", "-t", "dir", "-n", - self.racket_name, + self.pkg.racket_name, "--deps", "fail", "--ignore-implies", "--copy", "-i", "-j", - str(determine_number_of_jobs(allow_parallel)), + str(determine_number_of_jobs(parallel)), "--", os.getcwd(), ] @@ -82,9 +101,8 @@ def install(self, spec, prefix): except ProcessError: args.insert(-2, "--skip-installed") raco(*args) - tty.warn( - ( - "Racket package {0} was already installed, uninstalling via " - "Spack may make someone unhappy!" - ).format(self.racket_name) + msg = ( + "Racket package {0} was already installed, uninstalling via " + "Spack may make someone unhappy!" ) + tty.warn(msg.format(self.pkg.racket_name)) diff --git a/lib/spack/spack/build_systems/ruby.py b/lib/spack/spack/build_systems/ruby.py index fcc071f19ec..ef29f164ab2 100644 --- a/lib/spack/spack/build_systems/ruby.py +++ b/lib/spack/spack/build_systems/ruby.py @@ -2,35 +2,49 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - import glob import inspect -from spack.directives import extends -from spack.package_base import PackageBase, run_after +import spack.builder +import spack.package_base +from spack.directives import build_system, extends + +from ._checks import BaseBuilder -class RubyPackage(PackageBase): - """Specialized class for building Ruby gems. - - This class provides two phases that can be overridden if required: - - #. :py:meth:`~.RubyPackage.build` - #. :py:meth:`~.RubyPackage.install` - """ +class RubyPackage(spack.package_base.PackageBase): + """Specialized class for building Ruby gems.""" maintainers = ["Kerilk"] - #: Phases of a Ruby package - phases = ["build", "install"] - #: This attribute is used in UI queries that need to know the build #: system base class build_system_class = "RubyPackage" + #: Legacy buildsystem attribute used to deserialize and install old specs + legacy_buildsystem = "ruby" - extends("ruby") + build_system("ruby") - def build(self, spec, prefix): + extends("ruby", when="build_system=ruby") + + +@spack.builder.builder("ruby") +class RubyBuilder(BaseBuilder): + """The Ruby builder provides two phases that can be overridden if required: + + #. :py:meth:`~.RubyBuilder.build` + #. :py:meth:`~.RubyBuilder.install` + """ + + phases = ("build", "install") + + #: Names associated with package methods in the old build-system format + legacy_methods = () + + #: Names associated with package attributes in the old build-system format + legacy_attributes = () + + def build(self, pkg, spec, prefix): """Build a Ruby gem.""" # ruby-rake provides both rake.gemspec and Rakefile, but only @@ -38,15 +52,15 @@ def build(self, spec, prefix): gemspecs = glob.glob("*.gemspec") rakefiles = glob.glob("Rakefile") if gemspecs: - inspect.getmodule(self).gem("build", "--norc", gemspecs[0]) + inspect.getmodule(self.pkg).gem("build", "--norc", gemspecs[0]) elif rakefiles: - jobs = inspect.getmodule(self).make_jobs - inspect.getmodule(self).rake("package", "-j{0}".format(jobs)) + jobs = inspect.getmodule(self.pkg).make_jobs + inspect.getmodule(self.pkg).rake("package", "-j{0}".format(jobs)) else: # Some Ruby packages only ship `*.gem` files, so nothing to build pass - def install(self, spec, prefix): + def install(self, pkg, spec, prefix): """Install a Ruby gem. The ruby package sets ``GEM_HOME`` to tell gem where to install to.""" @@ -56,9 +70,6 @@ def install(self, spec, prefix): # if --install-dir is not used, GEM_PATH is deleted from the # environement, and Gems required to build native extensions will # not be found. Those extensions are built during `gem install`. - inspect.getmodule(self).gem( + inspect.getmodule(self.pkg).gem( "install", "--norc", "--ignore-dependencies", "--install-dir", prefix, gems[0] ) - - # Check that self.prefix is there after installation - run_after("install")(PackageBase.sanity_check_prefix) diff --git a/lib/spack/spack/build_systems/scons.py b/lib/spack/spack/build_systems/scons.py index 54700911217..2b1c36316ec 100644 --- a/lib/spack/spack/build_systems/scons.py +++ b/lib/spack/spack/build_systems/scons.py @@ -2,63 +2,79 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - - import inspect -from spack.directives import depends_on -from spack.package_base import PackageBase, run_after +import spack.builder +import spack.package_base +from spack.directives import build_system, depends_on + +from ._checks import BaseBuilder, execute_build_time_tests -class SConsPackage(PackageBase): +class SConsPackage(spack.package_base.PackageBase): """Specialized class for packages built using SCons. See http://scons.org/documentation.html for more information. - - This class provides the following phases that can be overridden: - - 1. :py:meth:`~.SConsPackage.build` - 2. :py:meth:`~.SConsPackage.install` - - Packages that use SCons as a build system are less uniform than packages - that use other build systems. Developers can add custom subcommands or - variables that control the build. You will likely need to override - :py:meth:`~.SConsPackage.build_args` to pass the appropriate variables. """ - #: Phases of a SCons package - phases = ["build", "install"] - #: To be used in UI queries that require to know which #: build-system class we are using build_system_class = "SConsPackage" + #: Legacy buildsystem attribute used to deserialize and install old specs + legacy_buildsystem = "scons" + + build_system("scons") + + depends_on("scons", type="build", when="build_system=scons") + + +@spack.builder.builder("scons") +class SConsBuilder(BaseBuilder): + """The Scons builder provides the following phases that can be overridden: + + 1. :py:meth:`~.SConsBuilder.build` + 2. :py:meth:`~.SConsBuilder.install` + + Packages that use SCons as a build system are less uniform than packages that use + other build systems. Developers can add custom subcommands or variables that + control the build. You will likely need to override + :py:meth:`~.SConsBuilder.build_args` to pass the appropriate variables. + """ + + #: Phases of a SCons package + phases = ("build", "install") + + #: Names associated with package methods in the old build-system format + legacy_methods = ("install_args", "build_test") + + #: Same as legacy_methods, but the signature is different + legacy_long_methods = ("build_args",) + + #: Names associated with package attributes in the old build-system format + legacy_attributes = ("build_time_test_callbacks",) + #: Callback names for build-time test build_time_test_callbacks = ["build_test"] - depends_on("scons", type="build") - def build_args(self, spec, prefix): """Arguments to pass to build.""" return [] - def build(self, spec, prefix): + def build(self, pkg, spec, prefix): """Build the package.""" args = self.build_args(spec, prefix) + inspect.getmodule(self.pkg).scons(*args) - inspect.getmodule(self).scons(*args) - - def install_args(self, spec, prefix): + def install_args(self): """Arguments to pass to install.""" return [] - def install(self, spec, prefix): + def install(self, pkg, spec, prefix): """Install the package.""" - args = self.install_args(spec, prefix) + args = self.install_args() - inspect.getmodule(self).scons("install", *args) - - # Testing + inspect.getmodule(self.pkg).scons("install", *args) def build_test(self): """Run unit tests after build. @@ -68,7 +84,4 @@ def build_test(self): """ pass - run_after("build")(PackageBase._run_default_build_time_test_callbacks) - - # Check that self.prefix is there after installation - run_after("install")(PackageBase.sanity_check_prefix) + spack.builder.run_after("build")(execute_build_time_tests) diff --git a/lib/spack/spack/build_systems/sip.py b/lib/spack/spack/build_systems/sip.py index 4d16f6731e7..b129ca4e0ae 100644 --- a/lib/spack/spack/build_systems/sip.py +++ b/lib/spack/spack/build_systems/sip.py @@ -2,7 +2,6 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - import inspect import os import re @@ -10,28 +9,20 @@ import llnl.util.tty as tty from llnl.util.filesystem import find, join_path, working_dir -from spack.directives import depends_on, extends -from spack.package_base import PackageBase, run_after +import spack.builder +import spack.package_base +from spack.directives import build_system, depends_on, extends +from spack.multimethod import when + +from ._checks import BaseBuilder, execute_install_time_tests -class SIPPackage(PackageBase): +class SIPPackage(spack.package_base.PackageBase): """Specialized class for packages that are built using the SIP build system. See https://www.riverbankcomputing.com/software/sip/intro for more information. - - This class provides the following phases that can be overridden: - - * configure - * build - * install - - The configure phase already adds a set of default flags. To see more - options, run ``python configure.py --help``. """ - # Default phases - phases = ["configure", "build", "install"] - # To be used in UI queries that require to know which # build-system class we are using build_system_class = "SIPPackage" @@ -41,11 +32,15 @@ class SIPPackage(PackageBase): #: Callback names for install-time test install_time_test_callbacks = ["test"] + #: Legacy buildsystem attribute used to deserialize and install old specs + legacy_buildsystem = "sip" - extends("python") + build_system("sip") - depends_on("qt") - depends_on("py-sip") + with when("build_system=sip"): + extends("python") + depends_on("qt") + depends_on("py-sip") @property def import_modules(self): @@ -95,11 +90,51 @@ def python(self, *args, **kwargs): """The python ``Executable``.""" inspect.getmodule(self).python(*args, **kwargs) + def test(self): + """Attempts to import modules of the installed package.""" + + # Make sure we are importing the installed modules, + # not the ones in the source directory + for module in self.import_modules: + self.run_test( + inspect.getmodule(self).python.path, + ["-c", "import {0}".format(module)], + purpose="checking import of {0}".format(module), + work_dir="spack-test", + ) + + +@spack.builder.builder("sip") +class SIPBuilder(BaseBuilder): + """The SIP builder provides the following phases that can be overridden: + + * configure + * build + * install + + The configure phase already adds a set of default flags. To see more + options, run ``python configure.py --help``. + """ + + phases = ("configure", "build", "install") + + #: Names associated with package methods in the old build-system format + legacy_methods = ("configure_file", "configure_args", "build_args", "install_args") + + #: Names associated with package attributes in the old build-system format + legacy_attributes = ( + "build_targets", + "install_targets", + "build_time_test_callbacks", + "install_time_test_callbacks", + "build_directory", + ) + def configure_file(self): """Returns the name of the configure file to use.""" return "configure.py" - def configure(self, spec, prefix): + def configure(self, pkg, spec, prefix): """Configure the package.""" configure = self.configure_file() @@ -118,7 +153,7 @@ def configure(self, spec, prefix): "--bindir", prefix.bin, "--destdir", - inspect.getmodule(self).python_platlib, + inspect.getmodule(self.pkg).python_platlib, ] ) @@ -128,53 +163,35 @@ def configure_args(self): """Arguments to pass to configure.""" return [] - def build(self, spec, prefix): + def build(self, pkg, spec, prefix): """Build the package.""" args = self.build_args() - inspect.getmodule(self).make(*args) + inspect.getmodule(self.pkg).make(*args) def build_args(self): """Arguments to pass to build.""" return [] - def install(self, spec, prefix): + def install(self, pkg, spec, prefix): """Install the package.""" args = self.install_args() - inspect.getmodule(self).make("install", parallel=False, *args) + inspect.getmodule(self.pkg).make("install", parallel=False, *args) def install_args(self): """Arguments to pass to install.""" return [] - # Testing + spack.builder.run_after("install")(execute_install_time_tests) - def test(self): - """Attempts to import modules of the installed package.""" - - # Make sure we are importing the installed modules, - # not the ones in the source directory - for module in self.import_modules: - self.run_test( - inspect.getmodule(self).python.path, - ["-c", "import {0}".format(module)], - purpose="checking import of {0}".format(module), - work_dir="spack-test", - ) - - run_after("install")(PackageBase._run_default_install_time_test_callbacks) - - # Check that self.prefix is there after installation - run_after("install")(PackageBase.sanity_check_prefix) - - @run_after("install") + @spack.builder.run_after("install") def extend_path_setup(self): # See github issue #14121 and PR #15297 - module = self.spec["py-sip"].variants["module"].value + module = self.pkg.spec["py-sip"].variants["module"].value if module != "sip": module = module.split(".")[0] - with working_dir(inspect.getmodule(self).python_platlib): + with working_dir(inspect.getmodule(self.pkg).python_platlib): with open(os.path.join(module, "__init__.py"), "a") as f: f.write("from pkgutil import extend_path\n") f.write("__path__ = extend_path(__path__, __name__)\n") diff --git a/lib/spack/spack/build_systems/waf.py b/lib/spack/spack/build_systems/waf.py index 3571ffd525a..f972d4e2867 100644 --- a/lib/spack/spack/build_systems/waf.py +++ b/lib/spack/spack/build_systems/waf.py @@ -2,21 +2,38 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - - import inspect from llnl.util.filesystem import working_dir -from spack.directives import depends_on -from spack.package_base import PackageBase, run_after +import spack.builder +import spack.package_base +from spack.directives import build_system, depends_on + +from ._checks import BaseBuilder, execute_build_time_tests, execute_install_time_tests -class WafPackage(PackageBase): +class WafPackage(spack.package_base.PackageBase): """Specialized class for packages that are built using the Waf build system. See https://waf.io/book/ for more information. + """ - This class provides the following phases that can be overridden: + # To be used in UI queries that require to know which + # build-system class we are using + build_system_class = "WafPackage" + #: Legacy buildsystem attribute used to deserialize and install old specs + legacy_buildsystem = "waf" + + build_system("waf") + # Much like AutotoolsPackage does not require automake and autoconf + # to build, WafPackage does not require waf to build. It only requires + # python to run the waf build script. + depends_on("python@2.5:", type="build", when="build_system=waf") + + +@spack.builder.builder("waf") +class WafBuilder(BaseBuilder): + """The WAF builder provides the following phases that can be overridden: * configure * build @@ -40,12 +57,25 @@ class WafPackage(PackageBase): function, which passes ``--prefix=/path/to/installation/prefix``. """ - # Default phases - phases = ["configure", "build", "install"] + phases = ("configure", "build", "install") - # To be used in UI queries that require to know which - # build-system class we are using - build_system_class = "WafPackage" + #: Names associated with package methods in the old build-system format + legacy_methods = ( + "build_test", + "install_test", + "configure_args", + "build_args", + "install_args", + "build_test", + "install_test", + ) + + #: Names associated with package attributes in the old build-system format + legacy_attributes = ( + "build_time_test_callbacks", + "build_time_test_callbacks", + "build_directory", + ) # Callback names for build-time test build_time_test_callbacks = ["build_test"] @@ -53,11 +83,6 @@ class WafPackage(PackageBase): # Callback names for install-time test install_time_test_callbacks = ["install_test"] - # Much like AutotoolsPackage does not require automake and autoconf - # to build, WafPackage does not require waf to build. It only requires - # python to run the waf build script. - depends_on("python@2.5:", type="build") - @property def build_directory(self): """The directory containing the ``waf`` file.""" @@ -65,18 +90,18 @@ def build_directory(self): def python(self, *args, **kwargs): """The python ``Executable``.""" - inspect.getmodule(self).python(*args, **kwargs) + inspect.getmodule(self.pkg).python(*args, **kwargs) def waf(self, *args, **kwargs): """Runs the waf ``Executable``.""" - jobs = inspect.getmodule(self).make_jobs + jobs = inspect.getmodule(self.pkg).make_jobs with working_dir(self.build_directory): self.python("waf", "-j{0}".format(jobs), *args, **kwargs) - def configure(self, spec, prefix): + def configure(self, pkg, spec, prefix): """Configures the project.""" - args = ["--prefix={0}".format(self.prefix)] + args = ["--prefix={0}".format(self.pkg.prefix)] args += self.configure_args() self.waf("configure", *args) @@ -85,7 +110,7 @@ def configure_args(self): """Arguments to pass to configure.""" return [] - def build(self, spec, prefix): + def build(self, pkg, spec, prefix): """Executes the build.""" args = self.build_args() @@ -95,7 +120,7 @@ def build_args(self): """Arguments to pass to build.""" return [] - def install(self, spec, prefix): + def install(self, pkg, spec, prefix): """Installs the targets on the system.""" args = self.install_args() @@ -105,8 +130,6 @@ def install_args(self): """Arguments to pass to install.""" return [] - # Testing - def build_test(self): """Run unit tests after build. @@ -115,7 +138,7 @@ def build_test(self): """ pass - run_after("build")(PackageBase._run_default_build_time_test_callbacks) + spack.builder.run_after("build")(execute_build_time_tests) def install_test(self): """Run unit tests after install. @@ -125,7 +148,4 @@ def install_test(self): """ pass - run_after("install")(PackageBase._run_default_install_time_test_callbacks) - - # Check that self.prefix is there after installation - run_after("install")(PackageBase.sanity_check_prefix) + spack.builder.run_after("install")(execute_install_time_tests) diff --git a/lib/spack/spack/builder.py b/lib/spack/spack/builder.py new file mode 100644 index 00000000000..7ae36b6e0a2 --- /dev/null +++ b/lib/spack/spack/builder.py @@ -0,0 +1,574 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +import collections +import copy +import functools +import inspect +from typing import List, Optional, Tuple + +import six + +import llnl.util.compat + +import spack.build_environment + +#: Builder classes, as registered by the "builder" decorator +BUILDER_CLS = {} + +#: An object of this kind is a shared global state used to collect callbacks during +#: class definition time, and is flushed when the class object is created at the end +#: of the class definition +#: +#: Args: +#: attribute_name (str): name of the attribute that will be attached to the builder +#: callbacks (list): container used to temporarily aggregate the callbacks +CallbackTemporaryStage = collections.namedtuple( + "CallbackTemporaryStage", ["attribute_name", "callbacks"] +) + +#: Shared global state to aggregate "@run_before" callbacks +_RUN_BEFORE = CallbackTemporaryStage(attribute_name="run_before_callbacks", callbacks=[]) +#: Shared global state to aggregate "@run_after" callbacks +_RUN_AFTER = CallbackTemporaryStage(attribute_name="run_after_callbacks", callbacks=[]) + +#: Map id(pkg) to a builder, to avoid creating multiple +#: builders for the same package object. +_BUILDERS = {} + + +def builder(build_system_name): + """Class decorator used to register the default builder + for a given build-system. + + Args: + build_system_name (str): name of the build-system + """ + + def _decorator(cls): + cls.build_system = build_system_name + BUILDER_CLS[build_system_name] = cls + return cls + + return _decorator + + +def create(pkg): + """Given a package object with an associated concrete spec, + return the builder object that can install it. + + Args: + pkg (spack.package_base.PackageBase): package for which we want the builder + """ + if id(pkg) not in _BUILDERS: + _BUILDERS[id(pkg)] = _create(pkg) + return _BUILDERS[id(pkg)] + + +class _PhaseAdapter(object): + def __init__(self, builder, phase_fn): + self.builder = builder + self.phase_fn = phase_fn + + def __call__(self, spec, prefix): + return self.phase_fn(self.builder.pkg, spec, prefix) + + +def _create(pkg): + """Return a new builder object for the package object being passed as argument. + + The function inspects the build-system used by the package object and try to: + + 1. Return a custom builder, if any is defined in the same ``package.py`` file. + 2. Return a customization of more generic builders, if any is defined in the + class hierarchy (look at AspellDictPackage for an example of that) + 3. Return a run-time generated adapter builder otherwise + + The run-time generated adapter builder is capable of adapting an old-style package + to the new architecture, where the installation procedure has been extracted from + the ``*Package`` hierarchy into a ``*Builder`` hierarchy. This means that the + adapter looks for attribute or method overrides preferably in the ``*Package`` + before using the default builder implementation. + + Note that in case a builder is explicitly coded in ``package.py``, no attempt is made + to look for build-related methods in the ``*Package``. + + Args: + pkg (spack.package_base.PackageBase): package object for which we need a builder + """ + package_module = inspect.getmodule(pkg) + package_buildsystem = buildsystem_name(pkg) + default_builder_cls = BUILDER_CLS[package_buildsystem] + builder_cls_name = default_builder_cls.__name__ + builder_cls = getattr(package_module, builder_cls_name, None) + if builder_cls: + return builder_cls(pkg) + + # Specialized version of a given buildsystem can subclass some + # base classes and specialize certain phases or methods or attributes. + # In that case they can store their builder class as a class level attribute. + # See e.g. AspellDictPackage as an example. + base_cls = getattr(pkg, builder_cls_name, default_builder_cls) + + # From here on we define classes to construct a special builder that adapts to the + # old, single class, package format. The adapter forwards any call or access to an + # attribute related to the installation procedure to a package object wrapped in + # a class that falls-back on calling the base builder if no override is found on the + # package. The semantic should be the same as the method in the base builder were still + # present in the base class of the package. + + class _ForwardToBaseBuilder(object): + def __init__(self, wrapped_pkg_object, root_builder): + self.wrapped_package_object = wrapped_pkg_object + self.root_builder = root_builder + + package_cls = type(wrapped_pkg_object) + wrapper_cls = type(self) + bases = (package_cls, wrapper_cls) + new_cls_name = package_cls.__name__ + "Wrapper" + new_cls = type(new_cls_name, bases, {}) + new_cls.__module__ = package_cls.__module__ + self.__class__ = new_cls + self.__dict__.update(wrapped_pkg_object.__dict__) + + def __getattr__(self, item): + result = getattr(super(type(self.root_builder), self.root_builder), item) + if item in super(type(self.root_builder), self.root_builder).phases: + result = _PhaseAdapter(self.root_builder, result) + return result + + def forward_method_to_getattr(fn_name): + def __forward(self, *args, **kwargs): + return self.__getattr__(fn_name)(*args, **kwargs) + + return __forward + + # Add fallback methods for the Package object to refer to the builder. If a method + # with the same name is defined in the Package, it will override this definition + # (when _ForwardToBaseBuilder is initialized) + for method_name in ( + base_cls.phases + + base_cls.legacy_methods + + getattr(base_cls, "legacy_long_methods", tuple()) + + ("setup_build_environment", "setup_dependent_build_environment") + ): + setattr(_ForwardToBaseBuilder, method_name, forward_method_to_getattr(method_name)) + + def forward_property_to_getattr(property_name): + def __forward(self): + return self.__getattr__(property_name) + + return __forward + + for attribute_name in base_cls.legacy_attributes: + setattr( + _ForwardToBaseBuilder, + attribute_name, + property(forward_property_to_getattr(attribute_name)), + ) + + class Adapter(six.with_metaclass(_PackageAdapterMeta, base_cls)): + def __init__(self, pkg): + # Deal with custom phases in packages here + if hasattr(pkg, "phases"): + self.phases = pkg.phases + for phase in self.phases: + setattr(Adapter, phase, _PackageAdapterMeta.phase_method_adapter(phase)) + + # Attribute containing the package wrapped in dispatcher with a `__getattr__` + # method that will forward certain calls to the default builder. + self.pkg_with_dispatcher = _ForwardToBaseBuilder(pkg, root_builder=self) + super(Adapter, self).__init__(pkg) + + # These two methods don't follow the (self, spec, prefix) signature of phases nor + # the (self) signature of methods, so they are added explicitly to avoid using a + # catch-all (*args, **kwargs) + def setup_build_environment(self, env): + return self.pkg_with_dispatcher.setup_build_environment(env) + + def setup_dependent_build_environment(self, env, dependent_spec): + return self.pkg_with_dispatcher.setup_dependent_build_environment(env, dependent_spec) + + return Adapter(pkg) + + +def buildsystem_name(pkg): + """Given a package object with an associated concrete spec, + return the name of its build system. + + Args: + pkg (spack.package_base.PackageBase): package for which we want + the build system name + """ + try: + return pkg.spec.variants["build_system"].value + except KeyError: + # We are reading an old spec without the build_system variant + return pkg.legacy_buildsystem + + +class PhaseCallbacksMeta(type): + """Permit to register arbitrary functions during class definition and run them + later, before or after a given install phase. + + Each method decorated with ``run_before`` or ``run_after`` gets temporarily + stored in a global shared state when a class being defined is parsed by the Python + interpreter. At class definition time that temporary storage gets flushed and a list + of callbacks is attached to the class being defined. + """ + + def __new__(mcs, name, bases, attr_dict): + for temporary_stage in (_RUN_BEFORE, _RUN_AFTER): + staged_callbacks = temporary_stage.callbacks + + # We don't have callbacks in this class, move on + if not staged_callbacks: + continue + + # If we are here we have callbacks. To get a complete list, get first what + # was attached to parent classes, then prepend what we have registered here. + # + # The order should be: + # 1. Callbacks are registered in order within the same class + # 2. Callbacks defined in derived classes precede those defined in base + # classes + for base in bases: + callbacks_from_base = getattr(base, temporary_stage.attribute_name, None) + if callbacks_from_base: + break + callbacks_from_base = callbacks_from_base or [] + + # Set the callbacks in this class and flush the temporary stage + attr_dict[temporary_stage.attribute_name] = staged_callbacks[:] + callbacks_from_base + del temporary_stage.callbacks[:] + + return super(PhaseCallbacksMeta, mcs).__new__(mcs, name, bases, attr_dict) + + @staticmethod + def run_after(phase, when=None): + """Decorator to register a function for running after a given phase. + + Args: + phase (str): phase after which the function must run. + when (str): condition under which the function is run (if None, it is always run). + """ + + def _decorator(fn): + key = (phase, when) + item = (key, fn) + _RUN_AFTER.callbacks.append(item) + return fn + + return _decorator + + @staticmethod + def run_before(phase, when=None): + """Decorator to register a function for running before a given phase. + + Args: + phase (str): phase before which the function must run. + when (str): condition under which the function is run (if None, it is always run). + """ + + def _decorator(fn): + key = (phase, when) + item = (key, fn) + _RUN_BEFORE.callbacks.append(item) + return fn + + return _decorator + + +class BuilderMeta(PhaseCallbacksMeta, type(llnl.util.compat.Sequence)): # type: ignore + pass + + +class _PackageAdapterMeta(BuilderMeta): + """Metaclass to adapt old-style packages to the new architecture based on builders + for the installation phase. + + This class does the necessary mangling to function argument so that a call to a + builder object can delegate to a package object. + """ + + @staticmethod + def phase_method_adapter(phase_name): + def _adapter(self, pkg, spec, prefix): + phase_fn = getattr(self.pkg_with_dispatcher, phase_name) + return phase_fn(spec, prefix) + + return _adapter + + @staticmethod + def legacy_long_method_adapter(method_name): + def _adapter(self, spec, prefix): + bind_method = getattr(self.pkg_with_dispatcher, method_name) + return bind_method(spec, prefix) + + return _adapter + + @staticmethod + def legacy_method_adapter(method_name): + def _adapter(self): + bind_method = getattr(self.pkg_with_dispatcher, method_name) + return bind_method() + + return _adapter + + @staticmethod + def legacy_attribute_adapter(attribute_name): + def _adapter(self): + return getattr(self.pkg_with_dispatcher, attribute_name) + + return property(_adapter) + + @staticmethod + def combine_callbacks(pipeline_attribute_name): + """This function combines callbacks from old-style packages with callbacks that might + be registered for the default builder. + + It works by: + 1. Extracting the callbacks from the old-style package + 2. Transforming those callbacks by adding an adapter that receives a builder as argument + and calls the wrapped function with ``builder.pkg`` + 3. Combining the list of transformed callbacks with those that might be present in the + default builder + """ + + def _adapter(self): + def unwrap_pkg(fn): + @functools.wraps(fn) + def _wrapped(builder): + return fn(builder.pkg_with_dispatcher) + + return _wrapped + + # Concatenate the current list with the one from package + callbacks_from_package = getattr(self.pkg, pipeline_attribute_name, []) + callbacks_from_package = [(key, unwrap_pkg(x)) for key, x in callbacks_from_package] + callbacks_from_builder = getattr(super(type(self), self), pipeline_attribute_name, []) + return callbacks_from_package + callbacks_from_builder + + return property(_adapter) + + def __new__(mcs, name, bases, attr_dict): + # Add ways to intercept methods and attribute calls and dispatch + # them first to a package object + default_builder_cls = bases[0] + for phase_name in default_builder_cls.phases: + attr_dict[phase_name] = _PackageAdapterMeta.phase_method_adapter(phase_name) + + for method_name in default_builder_cls.legacy_methods: + attr_dict[method_name] = _PackageAdapterMeta.legacy_method_adapter(method_name) + + # These exist e.g. for Python, see discussion in https://github.com/spack/spack/pull/32068 + for method_name in getattr(default_builder_cls, "legacy_long_methods", []): + attr_dict[method_name] = _PackageAdapterMeta.legacy_long_method_adapter(method_name) + + for attribute_name in default_builder_cls.legacy_attributes: + attr_dict[attribute_name] = _PackageAdapterMeta.legacy_attribute_adapter( + attribute_name + ) + + combine_callbacks = _PackageAdapterMeta.combine_callbacks + attr_dict[_RUN_BEFORE.attribute_name] = combine_callbacks(_RUN_BEFORE.attribute_name) + attr_dict[_RUN_AFTER.attribute_name] = combine_callbacks(_RUN_AFTER.attribute_name) + + return super(_PackageAdapterMeta, mcs).__new__(mcs, name, bases, attr_dict) + + +class InstallationPhase(object): + """Manages a single phase of the installation. + + This descriptor stores at creation time the name of the method it should + search for execution. The method is retrieved at __get__ time, so that + it can be overridden by subclasses of whatever class declared the phases. + + It also provides hooks to execute arbitrary callbacks before and after + the phase. + """ + + def __init__(self, name, builder): + self.name = name + self.builder = builder + self.phase_fn = self._select_phase_fn() + self.run_before = self._make_callbacks(_RUN_BEFORE.attribute_name) + self.run_after = self._make_callbacks(_RUN_AFTER.attribute_name) + + def _make_callbacks(self, callbacks_attribute): + result = [] + callbacks = getattr(self.builder, callbacks_attribute, []) + for (phase, condition), fn in callbacks: + # Same if it is for another phase + if phase != self.name: + continue + + # If we have no condition or the callback satisfies a condition, register it + if condition is None or self.builder.pkg.spec.satisfies(condition): + result.append(fn) + return result + + def __str__(self): + msg = '{0}: executing "{1}" phase' + return msg.format(self.builder, self.name) + + def execute(self): + pkg = self.builder.pkg + self._on_phase_start(pkg) + + for callback in self.run_before: + callback(self.builder) + + self.phase_fn(pkg, pkg.spec, pkg.prefix) + + for callback in self.run_after: + callback(self.builder) + + self._on_phase_exit(pkg) + + def _select_phase_fn(self): + phase_fn = getattr(self.builder, self.name, None) + + if not phase_fn: + msg = ( + 'unexpected error: package "{0.fullname}" must implement an ' + '"{1}" phase for the "{2}" build system' + ) + raise RuntimeError(msg.format(self.builder.pkg, self.name, self.builder.build_system)) + + return phase_fn + + def _on_phase_start(self, instance): + # If a phase has a matching stop_before_phase attribute, + # stop the installation process raising a StopPhase + if getattr(instance, "stop_before_phase", None) == self.name: + raise spack.build_environment.StopPhase( + "Stopping before '{0}' phase".format(self.name) + ) + + def _on_phase_exit(self, instance): + # If a phase has a matching last_phase attribute, + # stop the installation process raising a StopPhase + if getattr(instance, "last_phase", None) == self.name: + raise spack.build_environment.StopPhase("Stopping at '{0}' phase".format(self.name)) + + def copy(self): + return copy.deepcopy(self) + + +class Builder(six.with_metaclass(BuilderMeta, llnl.util.compat.Sequence)): + """A builder is a class that, given a package object (i.e. associated with + concrete spec), knows how to install it. + + The builder behaves like a sequence, and when iterated over return the + "phases" of the installation in the correct order. + + Args: + pkg (spack.package_base.PackageBase): package object to be built + """ + + #: Sequence of phases. Must be defined in derived classes + phases = () # type: Tuple[str, ...] + #: Build system name. Must also be defined in derived classes. + build_system = None # type: Optional[str] + + legacy_methods = () # type: Tuple[str, ...] + legacy_attributes = () # type: Tuple[str, ...] + + #: List of glob expressions. Each expression must either be + #: absolute or relative to the package source path. + #: Matching artifacts found at the end of the build process will be + #: copied in the same directory tree as _spack_build_logfile and + #: _spack_build_envfile. + archive_files = [] # type: List[str] + + def __init__(self, pkg): + self.pkg = pkg + self.callbacks = {} + for phase in self.phases: + self.callbacks[phase] = InstallationPhase(phase, self) + + @property + def spec(self): + return self.pkg.spec + + @property + def stage(self): + return self.pkg.stage + + @property + def prefix(self): + return self.pkg.prefix + + def test(self): + # Defer tests to virtual and concrete packages + pass + + def setup_build_environment(self, env): + """Sets up the build environment for a package. + + This method will be called before the current package prefix exists in + Spack's store. + + Args: + env (spack.util.environment.EnvironmentModifications): environment + modifications to be applied when the package is built. Package authors + can call methods on it to alter the build environment. + """ + if not hasattr(super(Builder, self), "setup_build_environment"): + return + super(Builder, self).setup_build_environment(env) + + def setup_dependent_build_environment(self, env, dependent_spec): + """Sets up the build environment of packages that depend on this one. + + This is similar to ``setup_build_environment``, but it is used to + modify the build environments of packages that *depend* on this one. + + This gives packages like Python and others that follow the extension + model a way to implement common environment or compile-time settings + for dependencies. + + This method will be called before the dependent package prefix exists + in Spack's store. + + Examples: + 1. Installing python modules generally requires ``PYTHONPATH`` + to point to the ``lib/pythonX.Y/site-packages`` directory in the + module's install prefix. This method could be used to set that + variable. + + Args: + env (spack.util.environment.EnvironmentModifications): environment + modifications to be applied when the dependent package is built. + Package authors can call methods on it to alter the build environment. + + dependent_spec (spack.spec.Spec): the spec of the dependent package + about to be built. This allows the extendee (self) to query + the dependent's state. Note that *this* package's spec is + available as ``self.spec`` + """ + if not hasattr(super(Builder, self), "setup_dependent_build_environment"): + return + super(Builder, self).setup_dependent_build_environment(env, dependent_spec) + + def __getitem__(self, idx): + key = self.phases[idx] + return self.callbacks[key] + + def __len__(self): + return len(self.phases) + + def __repr__(self): + msg = "{0}({1})" + return msg.format(type(self).__name__, self.pkg.spec.format("{name}/{hash:7}")) + + def __str__(self): + msg = '"{0}" builder for "{1}"' + return msg.format(type(self).build_system, self.pkg.spec.format("{name}/{hash:7}")) + + +# Export these names as standalone to be used in packages +run_after = PhaseCallbacksMeta.run_after +run_before = PhaseCallbacksMeta.run_before diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py index 198e787dea5..aa54b71b3d4 100644 --- a/lib/spack/spack/ci.py +++ b/lib/spack/spack/ci.py @@ -4,6 +4,7 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import base64 +import codecs import copy import json import os @@ -11,11 +12,12 @@ import shutil import stat import subprocess +import sys import tempfile import time import zipfile -from six import iteritems +from six import iteritems, string_types from six.moves.urllib.error import HTTPError, URLError from six.moves.urllib.parse import urlencode from six.moves.urllib.request import HTTPHandler, Request, build_opener @@ -41,7 +43,6 @@ from spack.error import SpackError from spack.reporters.cdash import CDash from spack.reporters.cdash import build_stamp as cdash_build_stamp -from spack.spec import Spec from spack.util.pattern import Bunch JOB_RETRY_CONDITIONS = [ @@ -141,13 +142,6 @@ def _get_spec_string(spec): return spec.format("".join(format_elements)) -def _format_root_spec(spec, main_phase, strip_compiler): - if main_phase is False and strip_compiler is True: - return "{0}@{1} arch={2}".format(spec.name, spec.version, spec.architecture) - else: - return spec.dag_hash() - - def _spec_deps_key(s): return "{0}/{1}".format(s.name, s.dag_hash(7)) @@ -173,8 +167,7 @@ def _get_spec_dependencies( for entry in specs: spec_labels[entry["label"]] = { - "spec": Spec(entry["spec"]), - "rootSpec": entry["root_spec"], + "spec": entry["spec"], "needs_rebuild": entry["needs_rebuild"], } @@ -201,7 +194,7 @@ def stage_spec_jobs(specs, check_index_only=False, mirrors_to_check=None): and stages: spec_labels: A dictionary mapping the spec labels which are made of - (pkg-name/hash-prefix), to objects containing "rootSpec" and "spec" + (pkg-name/hash-prefix), to objects containing "spec" and "needs_rebuild" keys. The root spec is the spec of which this spec is a dependency and the spec is the formatted spec string for this spec. @@ -316,17 +309,14 @@ def _compute_spec_deps(spec_list, check_index_only=False, mirrors_to_check=None) ], "specs": [ { - "root_spec": "readline@7.0%apple-clang@9.1.0 arch=darwin-...", "spec": "readline@7.0%apple-clang@9.1.0 arch=darwin-highs...", "label": "readline/ip6aiun" }, { - "root_spec": "readline@7.0%apple-clang@9.1.0 arch=darwin-...", "spec": "ncurses@6.1%apple-clang@9.1.0 arch=darwin-highsi...", "label": "ncurses/y43rifz" }, { - "root_spec": "readline@7.0%apple-clang@9.1.0 arch=darwin-...", "spec": "pkgconf@1.5.4%apple-clang@9.1.0 arch=darwin-high...", "label": "pkgconf/eg355zb" } @@ -348,8 +338,6 @@ def append_dep(s, d): ) for spec in spec_list: - root_spec = spec - for s in spec.traverse(deptype=all): if s.external: tty.msg("Will not stage external pkg: {0}".format(s)) @@ -361,8 +349,7 @@ def append_dep(s, d): skey = _spec_deps_key(s) spec_labels[skey] = { - "spec": _get_spec_string(s), - "root": root_spec, + "spec": s, "needs_rebuild": not up_to_date_mirrors, } @@ -379,7 +366,6 @@ def append_dep(s, d): { "label": spec_label, "spec": spec_holder["spec"], - "root_spec": spec_holder["root"], "needs_rebuild": spec_holder["needs_rebuild"], } ) @@ -396,6 +382,14 @@ def _spec_matches(spec, match_string): return spec.satisfies(match_string) +def _remove_attributes(src_dict, dest_dict): + if "tags" in src_dict and "tags" in dest_dict: + # For 'tags', we remove any tags that are listed for removal + for tag in src_dict["tags"]: + while tag in dest_dict["tags"]: + dest_dict["tags"].remove(tag) + + def _copy_attributes(attrs_list, src_dict, dest_dict): for runner_attr in attrs_list: if runner_attr in src_dict: @@ -429,23 +423,23 @@ def _find_matching_config(spec, gitlab_ci): _copy_attributes(overridable_attrs, gitlab_ci, runner_attributes) - ci_mappings = gitlab_ci["mappings"] - for ci_mapping in ci_mappings: + matched = False + only_first = gitlab_ci.get("match_behavior", "first") == "first" + for ci_mapping in gitlab_ci["mappings"]: for match_string in ci_mapping["match"]: if _spec_matches(spec, match_string): + matched = True + if "remove-attributes" in ci_mapping: + _remove_attributes(ci_mapping["remove-attributes"], runner_attributes) if "runner-attributes" in ci_mapping: _copy_attributes( overridable_attrs, ci_mapping["runner-attributes"], runner_attributes ) - return runner_attributes - else: - return None + break + if matched and only_first: + break - return runner_attributes - - -def _pkg_name_from_spec_label(spec_label): - return spec_label[: spec_label.index("/")] + return runner_attributes if matched else None def _format_job_needs( @@ -521,38 +515,36 @@ def compute_affected_packages(rev1="HEAD^", rev2="HEAD"): return spack.repo.get_all_package_diffs("ARC", rev1=rev1, rev2=rev2) -def get_spec_filter_list(env, affected_pkgs, dependencies=True, dependents=True): - """Given a list of package names, and assuming an active and - concretized environment, return a set of concrete specs from - the environment corresponding to any of the affected pkgs (or - optionally to any of their dependencies/dependents). +def get_spec_filter_list(env, affected_pkgs): + """Given a list of package names and an active/concretized + environment, return the set of all concrete specs from the + environment that could have been affected by changing the + list of packages. Arguments: env (spack.environment.Environment): Active concrete environment affected_pkgs (List[str]): Affected package names - dependencies (bool): Include dependencies of affected packages - dependents (bool): Include dependents of affected pacakges Returns: - A list of concrete specs from the active environment including - those associated with affected packages, and possible their - dependencies and dependents as well. + A set of concrete specs from the active environment including + those associated with affected packages, their dependencies and + dependents, as well as their dependents dependencies. """ affected_specs = set() all_concrete_specs = env.all_specs() tty.debug("All concrete environment specs:") for s in all_concrete_specs: tty.debug(" {0}/{1}".format(s.name, s.dag_hash()[:7])) - for pkg in affected_pkgs: - env_matches = [s for s in all_concrete_specs if s.name == pkg] - for match in env_matches: - affected_specs.add(match) - if dependencies: - affected_specs.update(match.traverse(direction="children", root=False)) - if dependents: - affected_specs.update(match.traverse(direction="parents", root=False)) + env_matches = [s for s in all_concrete_specs if s.name in frozenset(affected_pkgs)] + visited = set() + dag_hash = lambda s: s.dag_hash() + for match in env_matches: + for parent in match.traverse(direction="parents", key=dag_hash): + affected_specs.update( + parent.traverse(direction="children", visited=visited, key=dag_hash) + ) return affected_specs @@ -613,11 +605,11 @@ def generate_gitlab_ci_yaml( cdash_handler = CDashHandler(yaml_root.get("cdash")) if "cdash" in yaml_root else None build_group = cdash_handler.build_group if cdash_handler else None - prune_untouched_packages = os.environ.get("SPACK_PRUNE_UNTOUCHED", None) - if prune_untouched_packages: + prune_untouched_packages = False + spack_prune_untouched = os.environ.get("SPACK_PRUNE_UNTOUCHED", None) + if spack_prune_untouched is not None and spack_prune_untouched.lower() == "true": # Requested to prune untouched packages, but assume we won't do that # unless we're actually in a git repo. - prune_untouched_packages = False rev1, rev2 = get_change_revisions() tty.debug("Got following revisions: rev1={0}, rev2={1}".format(rev1, rev2)) if rev1 and rev2: @@ -631,7 +623,15 @@ def generate_gitlab_ci_yaml( affected_specs = get_spec_filter_list(env, affected_pkgs) tty.debug("all affected specs:") for s in affected_specs: - tty.debug(" {0}".format(s.name)) + tty.debug(" {0}/{1}".format(s.name, s.dag_hash()[:7])) + + # Allow overriding --prune-dag cli opt with environment variable + prune_dag_override = os.environ.get("SPACK_PRUNE_UP_TO_DATE", None) + if prune_dag_override is not None: + prune_dag = True if prune_dag_override.lower() == "true" else False + + # If we are not doing any kind of pruning, we are rebuilding everything + rebuild_everything = not prune_dag and not prune_untouched_packages # Downstream jobs will "need" (depend on, for both scheduling and # artifacts, which include spack.lock file) this pipeline generation @@ -832,7 +832,6 @@ def generate_gitlab_ci_yaml( phase_name = phase["name"] strip_compilers = phase["strip-compilers"] - main_phase = _is_main_phase(phase_name) spec_labels, dependencies, stages = staged_phases[phase_name] for stage_jobs in stages: @@ -842,14 +841,16 @@ def generate_gitlab_ci_yaml( for spec_label in stage_jobs: spec_record = spec_labels[spec_label] - root_spec = spec_record["rootSpec"] - pkg_name = _pkg_name_from_spec_label(spec_label) - release_spec = root_spec[pkg_name] + release_spec = spec_record["spec"] release_spec_dag_hash = release_spec.dag_hash() if prune_untouched_packages: if release_spec not in affected_specs: - tty.debug("Pruning {0}, untouched by change.".format(release_spec.name)) + tty.debug( + "Pruning {0}/{1}, untouched by change.".format( + release_spec.name, release_spec.dag_hash()[:7] + ) + ) spec_record["needs_rebuild"] = False continue @@ -865,7 +866,7 @@ def generate_gitlab_ci_yaml( # For spack pipelines "public" and "protected" are reserved tags tags = _remove_reserved_tags(tags) if spack_pipeline_type == "spack_protected_branch": - tags.extend(["aws", "protected"]) + tags.extend(["protected"]) elif spack_pipeline_type == "spack_pull_request": tags.extend(["public"]) @@ -914,7 +915,6 @@ def generate_gitlab_ci_yaml( compiler_action = "INSTALL_MISSING" job_vars = { - "SPACK_ROOT_SPEC": _format_root_spec(root_spec, main_phase, strip_compilers), "SPACK_JOB_SPEC_DAG_HASH": release_spec_dag_hash, "SPACK_JOB_SPEC_PKG_NAME": release_spec.name, "SPACK_COMPILER_ACTION": compiler_action, @@ -931,9 +931,7 @@ def generate_gitlab_ci_yaml( # purposes, so we only get the direct dependencies. dep_jobs = [] for dep_label in dependencies[spec_label]: - dep_pkg = _pkg_name_from_spec_label(dep_label) - dep_root = spec_labels[dep_label]["rootSpec"] - dep_jobs.append(dep_root[dep_pkg]) + dep_jobs.append(spec_labels[dep_label]["spec"]) job_dependencies.extend( _format_job_needs( @@ -1017,13 +1015,15 @@ def generate_gitlab_ci_yaml( tty.debug(debug_msg) if prune_dag and not rebuild_spec: - tty.debug("Pruning {0}, does not need rebuild.".format(release_spec.name)) + tty.debug( + "Pruning {0}/{1}, does not need rebuild.".format( + release_spec.name, release_spec.dag_hash() + ) + ) continue if broken_spec_urls is not None and release_spec_dag_hash in broken_spec_urls: - known_broken_specs_encountered.append( - "{0} ({1})".format(release_spec, release_spec_dag_hash) - ) + known_broken_specs_encountered.append(release_spec_dag_hash) # Only keep track of these if we are copying rebuilt cache entries if spack_buildcache_copy: @@ -1167,7 +1167,14 @@ def generate_gitlab_ci_yaml( "after_script", ] - service_job_retries = {"max": 2, "when": ["runner_system_failure", "stuck_or_timeout_failure"]} + service_job_retries = { + "max": 2, + "when": [ + "runner_system_failure", + "stuck_or_timeout_failure", + "script_failure", + ], + } if job_id > 0: if temp_storage_url_prefix: @@ -1286,6 +1293,9 @@ def generate_gitlab_ci_yaml( "SPACK_JOB_TEST_DIR": rel_job_test_dir, "SPACK_LOCAL_MIRROR_DIR": rel_local_mirror_dir, "SPACK_PIPELINE_TYPE": str(spack_pipeline_type), + "SPACK_CI_STACK_NAME": os.environ.get("SPACK_CI_STACK_NAME", "None"), + "SPACK_REBUILD_CHECK_UP_TO_DATE": str(prune_dag), + "SPACK_REBUILD_EVERYTHING": str(rebuild_everything), } if remote_mirror_override: @@ -1343,13 +1353,11 @@ def generate_gitlab_ci_yaml( sorted_output = {"no-specs-to-rebuild": noop_job} if known_broken_specs_encountered: - error_msg = ( - "Pipeline generation failed due to the presence of the " - "following specs that are known to be broken in develop:\n" - ) - for broken_spec in known_broken_specs_encountered: - error_msg += "* {0}\n".format(broken_spec) - tty.die(error_msg) + tty.error("This pipeline generated hashes known to be broken on develop:") + display_broken_spec_messages(broken_specs_url, known_broken_specs_encountered) + + if not rebuild_everything: + sys.exit(1) with open(output_file, "w") as outf: outf.write(syaml.dump_config(sorted_output, default_flow_style=True)) @@ -1461,64 +1469,6 @@ def configure_compilers(compiler_action, scope=None): return None -def get_concrete_specs(env, root_spec, job_name, compiler_action): - """Build a dictionary of concrete specs relevant to a particular - rebuild job. This includes the root spec and the spec to be - rebuilt (which could be the same). - - Arguments: - - env (spack.environment.Environment): Activated spack environment - used to get concrete root spec by hash in case compiler_action - is anthing other than FIND_ANY. - root_spec (str): If compiler_action is FIND_ANY root_spec is - a string representation which can be turned directly into - a spec, otherwise, it's a hash used to index the activated - spack environment. - job_name (str): Name of package to be built, used to index the - concrete root spec and produce the concrete spec to be - built. - compiler_action (str): Determines how to interpret the root_spec - parameter, either as a string representation as a hash. - - Returns: - - .. code-block:: JSON - - { - "root": "", - "": "", - } - - """ - spec_map = { - "root": None, - } - - if compiler_action == "FIND_ANY": - # This corresponds to a bootstrapping phase where we need to - # rely on any available compiler to build the package (i.e. the - # compiler needed to be stripped from the spec when we generated - # the job), and thus we need to concretize the root spec again. - tty.debug("About to concretize {0}".format(root_spec)) - concrete_root = Spec(root_spec).concretized() - tty.debug("Resulting concrete root: {0}".format(concrete_root)) - else: - # in this case, either we're relying on Spack to install missing - # compiler bootstrapped in a previous phase, or else we only had one - # phase (like a site which already knows what compilers are available - # on it's runners), so we don't want to concretize that root spec - # again. The reason we take this path in the first case (bootstrapped - # compiler), is that we can't concretize a spec at this point if we're - # going to ask spack to "install_missing_compilers". - concrete_root = env.specs_by_hash[root_spec] - - spec_map["root"] = concrete_root - spec_map[job_name] = concrete_root[job_name] - - return spec_map - - def _push_mirror_contents(env, specfile_path, sign_binaries, mirror_url): """Unchecked version of the public API, for easier mocking""" unsigned = not sign_binaries @@ -1567,6 +1517,19 @@ def push_mirror_contents(env, specfile_path, mirror_url, sign_binaries): raise inst +def remove_other_mirrors(mirrors_to_keep, scope=None): + """Remove all mirrors from the given config scope, the exceptions being + any listed in in mirrors_to_keep, which is a list of mirror urls. + """ + mirrors_to_remove = [] + for name, mirror_url in spack.config.get("mirrors", scope=scope).items(): + if mirror_url not in mirrors_to_keep: + mirrors_to_remove.append(name) + + for mirror_name in mirrors_to_remove: + spack.mirror.remove(mirror_name, scope) + + def copy_files_to_artifacts(src, artifacts_dir): """ Copy file(s) to the given artifacts directory @@ -1982,26 +1945,35 @@ def reproduce_ci_job(url, work_dir): print("".join(inst_list)) -def process_command(cmd, cmd_args, repro_dir): +def process_command(name, commands, repro_dir): """ Create a script for and run the command. Copy the script to the reproducibility directory. Arguments: - cmd (str): name of the command being processed - cmd_args (list): string arguments to pass to the command + name (str): name of the command being processed + commands (list): list of arguments for single command or list of lists of + arguments for multiple commands. No shell escape is performed. repro_dir (str): Job reproducibility directory Returns: the exit code from processing the command """ - tty.debug("spack {0} arguments: {1}".format(cmd, cmd_args)) + tty.debug("spack {0} arguments: {1}".format(name, commands)) + + if len(commands) == 0 or isinstance(commands[0], string_types): + commands = [commands] + + # Create a string [command 1] && [command 2] && ... && [command n] with commands + # quoted using double quotes. + args_to_string = lambda args: " ".join('"{}"'.format(arg) for arg in args) + full_command = " && ".join(map(args_to_string, commands)) # Write the command to a shell script - script = "{0}.sh".format(cmd) + script = "{0}.sh".format(name) with open(script, "w") as fd: - fd.write("#!/bin/bash\n\n") - fd.write("\n# spack {0} command\n".format(cmd)) - fd.write(" ".join(['"{0}"'.format(i) for i in cmd_args])) + fd.write("#!/bin/sh\n\n") + fd.write("\n# spack {0} command\n".format(name)) + fd.write(full_command) fd.write("\n") st = os.stat(script) @@ -2013,15 +1985,15 @@ def process_command(cmd, cmd_args, repro_dir): # Run the generated install.sh shell script as if it were being run in # a login shell. try: - cmd_process = subprocess.Popen(["bash", "./{0}".format(script)]) + cmd_process = subprocess.Popen(["/bin/sh", "./{0}".format(script)]) cmd_process.wait() exit_code = cmd_process.returncode except (ValueError, subprocess.CalledProcessError, OSError) as err: - tty.error("Encountered error running {0} script".format(cmd)) + tty.error("Encountered error running {0} script".format(name)) tty.error(err) exit_code = 1 - tty.debug("spack {0} exited {1}".format(cmd, exit_code)) + tty.debug("spack {0} exited {1}".format(name, exit_code)) return exit_code @@ -2060,6 +2032,75 @@ def create_buildcache(**kwargs): push_mirror_contents(env, json_path, pipeline_mirror_url, sign_binaries) +def write_broken_spec(url, pkg_name, stack_name, job_url, pipeline_url, spec_dict): + """Given a url to write to and the details of the failed job, write an entry + in the broken specs list. + """ + tmpdir = tempfile.mkdtemp() + file_path = os.path.join(tmpdir, "broken.txt") + + broken_spec_details = { + "broken-spec": { + "job-name": pkg_name, + "job-stack": stack_name, + "job-url": job_url, + "pipeline-url": pipeline_url, + "concrete-spec-dict": spec_dict, + } + } + + try: + with open(file_path, "w") as fd: + fd.write(syaml.dump(broken_spec_details)) + web_util.push_to_url( + file_path, + url, + keep_original=False, + extra_args={"ContentType": "text/plain"}, + ) + except Exception as err: + # If there is an S3 error (e.g., access denied or connection + # error), the first non boto-specific class in the exception + # hierarchy is Exception. Just print a warning and return + msg = "Error writing to broken specs list {0}: {1}".format(url, err) + tty.warn(msg) + finally: + shutil.rmtree(tmpdir) + + +def read_broken_spec(broken_spec_url): + """Read data from broken specs file located at the url, return as a yaml + object. + """ + try: + _, _, fs = web_util.read_from_url(broken_spec_url) + except (URLError, web_util.SpackWebError, HTTPError): + tty.warn("Unable to read broken spec from {0}".format(broken_spec_url)) + return None + + broken_spec_contents = codecs.getreader("utf-8")(fs).read() + return syaml.load(broken_spec_contents) + + +def display_broken_spec_messages(base_url, hashes): + """Fetch the broken spec file for each of the hashes under the base_url and + print a message with some details about each one. + """ + broken_specs = [(h, read_broken_spec(url_util.join(base_url, h))) for h in hashes] + for spec_hash, broken_spec in [tup for tup in broken_specs if tup[1]]: + details = broken_spec["broken-spec"] + if "job-name" in details: + item_name = "{0}/{1}".format(details["job-name"], spec_hash[:7]) + else: + item_name = spec_hash + + if "job-stack" in details: + item_name = "{0} (in stack {1})".format(item_name, details["job-stack"]) + + msg = " {0} was reported broken here: {1}".format(item_name, details["job-url"]) + tty.msg(msg) + + def run_standalone_tests(**kwargs): """Run stand-alone tests on the current spec. @@ -2095,8 +2136,9 @@ def run_standalone_tests(**kwargs): test_args = [ "spack", - "-d", - "-v", + "--color=always", + "--backtrace", + "--verbose", "test", "run", ] diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py index e829166d396..ee1297e51ca 100644 --- a/lib/spack/spack/cmd/__init__.py +++ b/lib/spack/spack/cmd/__init__.py @@ -234,7 +234,8 @@ def parse_specs(args, **kwargs): msg = e.message if e.long_message: msg += e.long_message - if unquoted_flags: + # Unquoted flags will be read as a variant or hash + if unquoted_flags and ("variant" in msg or "hash" in msg): msg += "\n\n" msg += unquoted_flags.report() diff --git a/lib/spack/spack/cmd/bootstrap.py b/lib/spack/spack/cmd/bootstrap.py index 9a782b64eab..6a45e22f8e4 100644 --- a/lib/spack/spack/cmd/bootstrap.py +++ b/lib/spack/spack/cmd/bootstrap.py @@ -5,8 +5,10 @@ from __future__ import print_function import os.path +import platform import shutil import tempfile +import warnings import llnl.util.filesystem import llnl.util.tty @@ -28,7 +30,7 @@ # Tarball to be downloaded if binary packages are requested in a local mirror -BINARY_TARBALL = "https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.2/bootstrap-buildcache.tar.gz" +BINARY_TARBALL = "https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.4/bootstrap-buildcache.tar.gz" #: Subdirectory where to create the mirror LOCAL_MIRROR_DIR = "bootstrap_cache" @@ -48,8 +50,9 @@ }, } -CLINGO_JSON = "$spack/share/spack/bootstrap/github-actions-v0.2/clingo.json" -GNUPG_JSON = "$spack/share/spack/bootstrap/github-actions-v0.2/gnupg.json" +CLINGO_JSON = "$spack/share/spack/bootstrap/github-actions-v0.4/clingo.json" +GNUPG_JSON = "$spack/share/spack/bootstrap/github-actions-v0.4/gnupg.json" +PATCHELF_JSON = "$spack/share/spack/bootstrap/github-actions-v0.4/patchelf.json" # Metadata for a generated source mirror SOURCE_METADATA = { @@ -73,6 +76,8 @@ def _add_scope_option(parser): def setup_parser(subparser): sp = subparser.add_subparsers(dest="subcommand") + sp.add_parser("now", help="Spack ready, right now!") + status = sp.add_parser("status", help="get the status of Spack") status.add_argument( "--optional", @@ -89,9 +94,11 @@ def setup_parser(subparser): enable = sp.add_parser("enable", help="enable bootstrapping") _add_scope_option(enable) + enable.add_argument("name", help="name of the source to be enabled", nargs="?", default=None) disable = sp.add_parser("disable", help="disable bootstrapping") _add_scope_option(disable) + disable.add_argument("name", help="name of the source to be disabled", nargs="?", default=None) reset = sp.add_parser("reset", help="reset bootstrapping configuration to Spack defaults") spack.cmd.common.arguments.add_common_arguments(reset, ["yes_to_all"]) @@ -105,11 +112,11 @@ def setup_parser(subparser): list = sp.add_parser("list", help="list all the sources of software to bootstrap Spack") _add_scope_option(list) - trust = sp.add_parser("trust", help="trust a bootstrapping source") + trust = sp.add_parser("trust", help="(DEPRECATED) trust a bootstrapping source") _add_scope_option(trust) trust.add_argument("name", help="name of the source to be trusted") - untrust = sp.add_parser("untrust", help="untrust a bootstrapping source") + untrust = sp.add_parser("untrust", help="(DEPRECATED) untrust a bootstrapping source") _add_scope_option(untrust) untrust.add_argument("name", help="name of the source to be untrusted") @@ -137,9 +144,21 @@ def setup_parser(subparser): def _enable_or_disable(args): - # Set to True if we called "enable", otherwise set to false value = args.subcommand == "enable" - spack.config.set("bootstrap:enable", value, scope=args.scope) + if args.name is None: + # Set to True if we called "enable", otherwise set to false + old_value = spack.config.get("bootstrap:enable", scope=args.scope) + if old_value == value: + llnl.util.tty.msg("Bootstrapping is already {}d".format(args.subcommand)) + else: + spack.config.set("bootstrap:enable", value, scope=args.scope) + llnl.util.tty.msg("Bootstrapping has been {}d".format(args.subcommand)) + return + + if value is True: + _trust(args) + else: + _untrust(args) def _reset(args): @@ -170,6 +189,8 @@ def _reset(args): if os.path.exists(bootstrap_yaml): shutil.move(bootstrap_yaml, backup_file) + spack.config.config.clear_caches() + def _root(args): if args.path: @@ -194,30 +215,41 @@ def fmt(header, content): header_fmt = "@*b{{{0}:}} {1}" color.cprint(header_fmt.format(header, content)) - trust_str = "@*y{UNKNOWN}" + trust_str = "@*y{DISABLED}" if trusted is True: - trust_str = "@*g{TRUSTED}" + trust_str = "@*g{ENABLED}" elif trusted is False: - trust_str = "@*r{UNTRUSTED}" + trust_str = "@*r{DISABLED}" fmt("Name", source["name"] + " " + trust_str) print() - fmt(" Type", source["type"]) - print() + if trusted is True or args.verbose: + fmt(" Type", source["type"]) + print() - info_lines = ["\n"] - for key, value in source.get("info", {}).items(): - info_lines.append(" " * 4 + "@*{{{0}}}: {1}\n".format(key, value)) - if len(info_lines) > 1: - fmt(" Info", "".join(info_lines)) + info_lines = ["\n"] + for key, value in source.get("info", {}).items(): + info_lines.append(" " * 4 + "@*{{{0}}}: {1}\n".format(key, value)) + if len(info_lines) > 1: + fmt(" Info", "".join(info_lines)) - description_lines = ["\n"] - for line in source["description"].split("\n"): - description_lines.append(" " * 4 + line + "\n") + description_lines = ["\n"] + for line in source["description"].split("\n"): + description_lines.append(" " * 4 + line + "\n") - fmt(" Description", "".join(description_lines)) + fmt(" Description", "".join(description_lines)) trusted = spack.config.get("bootstrap:trusted", {}) + + def sort_fn(x): + x_trust = trusted.get(x["name"], None) + if x_trust is True: + return 0 + elif x_trust is None: + return 1 + return 2 + + sources = sorted(sources, key=sort_fn) for s in sources: _print_method(s, trusted.get(s["name"], None)) @@ -249,15 +281,27 @@ def _write_trust_state(args, value): spack.config.add("bootstrap:trusted:{0}:{1}".format(name, str(value)), scope=scope) +def _deprecate_command(deprecated_cmd, suggested_cmd): + msg = ( + "the 'spack bootstrap {} ...' command is deprecated and will be " + "removed in v0.20, use 'spack bootstrap {} ...' instead" + ) + warnings.warn(msg.format(deprecated_cmd, suggested_cmd)) + + def _trust(args): + if args.subcommand == "trust": + _deprecate_command("trust", "enable") _write_trust_state(args, value=True) - msg = '"{0}" is now trusted for bootstrapping' + msg = '"{0}" is now enabled for bootstrapping' llnl.util.tty.msg(msg.format(args.name)) def _untrust(args): + if args.subcommand == "untrust": + _deprecate_command("untrust", "disable") _write_trust_state(args, value=False) - msg = '"{0}" is now untrusted and will not be used for bootstrapping' + msg = '"{0}" is now disabled and will not be used for bootstrapping' llnl.util.tty.msg(msg.format(args.name)) @@ -400,10 +444,19 @@ def write_metadata(subdir, metadata): abs_directory, rel_directory = write_metadata(subdir="binaries", metadata=BINARY_METADATA) shutil.copy(spack.util.path.canonicalize_path(CLINGO_JSON), abs_directory) shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory) + shutil.copy(spack.util.path.canonicalize_path(PATCHELF_JSON), abs_directory) instructions += cmd.format("local-binaries", rel_directory) print(instructions) +def _now(args): + with spack.bootstrap.ensure_bootstrap_configuration(): + if platform.system().lower() == "linux": + spack.bootstrap.ensure_patchelf_in_path_or_raise() + spack.bootstrap.ensure_clingo_importable_or_raise() + spack.bootstrap.ensure_gpg_in_path_or_raise() + + def bootstrap(parser, args): callbacks = { "status": _status, @@ -417,5 +470,6 @@ def bootstrap(parser, args): "add": _add, "remove": _remove, "mirror": _mirror, + "now": _now, } callbacks[args.subcommand](args) diff --git a/lib/spack/spack/cmd/buildcache.py b/lib/spack/spack/cmd/buildcache.py index 6aaa5eb1c75..d25f2430391 100644 --- a/lib/spack/spack/cmd/buildcache.py +++ b/lib/spack/spack/cmd/buildcache.py @@ -8,7 +8,6 @@ import shutil import sys import tempfile -import warnings import llnl.util.tty as tty @@ -258,19 +257,6 @@ def setup_parser(subparser): ) savespecfile.set_defaults(func=save_specfile_fn) - # Copy buildcache from some directory to another mirror url - copy = subparsers.add_parser("copy", help=copy_fn.__doc__) - copy.add_argument( - "--base-dir", default=None, help="Path to mirror directory (root of existing buildcache)" - ) - copy.add_argument( - "--spec-file", - default=None, - help=("Path to spec json or yaml file representing buildcache entry to" + " copy"), - ) - copy.add_argument("--destination-url", default=None, help="Destination mirror url") - copy.set_defaults(func=copy_fn) - # Sync buildcache entries from one mirror to another sync = subparsers.add_parser("sync", help=sync_fn.__doc__) sync.add_argument( @@ -549,78 +535,6 @@ def save_specfile_fn(args): sys.exit(0) -def copy_fn(args): - """Copy a buildcache entry and all its files from one mirror, given as - '--base-dir', to some other mirror, specified as '--destination-url'. - The specific buildcache entry to be copied from one location to the - other is identified using the '--spec-file' argument.""" - # TODO: Remove after v0.18.0 release - msg = ( - '"spack buildcache copy" is deprecated and will be removed from ' - "Spack starting in v0.19.0" - ) - warnings.warn(msg) - - if not args.spec_file: - tty.msg("No spec yaml provided, exiting.") - sys.exit(1) - - if not args.base_dir: - tty.msg("No base directory provided, exiting.") - sys.exit(1) - - if not args.destination_url: - tty.msg("No destination mirror url provided, exiting.") - sys.exit(1) - - dest_url = args.destination_url - - if dest_url[0:7] != "file://" and dest_url[0] != "/": - tty.msg('Only urls beginning with "file://" or "/" are supported ' + "by buildcache copy.") - sys.exit(1) - - try: - with open(args.spec_file, "r") as fd: - spec = Spec.from_yaml(fd.read()) - except Exception as e: - tty.debug(e) - tty.error("Unable to concrectize spec from yaml {0}".format(args.spec_file)) - sys.exit(1) - - dest_root_path = dest_url - if dest_url[0:7] == "file://": - dest_root_path = dest_url[7:] - - build_cache_dir = bindist.build_cache_relative_path() - - tarball_rel_path = os.path.join(build_cache_dir, bindist.tarball_path_name(spec, ".spack")) - tarball_src_path = os.path.join(args.base_dir, tarball_rel_path) - tarball_dest_path = os.path.join(dest_root_path, tarball_rel_path) - - specfile_rel_path = os.path.join(build_cache_dir, bindist.tarball_name(spec, ".spec.json")) - specfile_src_path = os.path.join(args.base_dir, specfile_rel_path) - specfile_dest_path = os.path.join(dest_root_path, specfile_rel_path) - - specfile_rel_path_yaml = os.path.join( - build_cache_dir, bindist.tarball_name(spec, ".spec.yaml") - ) - specfile_src_path_yaml = os.path.join(args.base_dir, specfile_rel_path) - specfile_dest_path_yaml = os.path.join(dest_root_path, specfile_rel_path) - - # Make sure directory structure exists before attempting to copy - os.makedirs(os.path.dirname(tarball_dest_path)) - - # Now copy the specfile and tarball files to the destination mirror - tty.msg("Copying {0}".format(tarball_rel_path)) - shutil.copyfile(tarball_src_path, tarball_dest_path) - - tty.msg("Copying {0}".format(specfile_rel_path)) - shutil.copyfile(specfile_src_path, specfile_dest_path) - - tty.msg("Copying {0}".format(specfile_rel_path_yaml)) - shutil.copyfile(specfile_src_path_yaml, specfile_dest_path_yaml) - - def copy_buildcache_file(src_url, dest_url, local_path=None): """Copy from source url to destination url""" tmpdir = None diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py index 3cfdcabff95..0483f4dd91f 100644 --- a/lib/spack/spack/cmd/checksum.py +++ b/lib/spack/spack/cmd/checksum.py @@ -15,7 +15,7 @@ import spack.spec import spack.stage import spack.util.crypto -from spack.package_base import preferred_version +from spack.package_base import deprecated_version, preferred_version from spack.util.naming import valid_fully_qualified_module_name from spack.version import VersionBase, ver @@ -81,6 +81,9 @@ def checksum(parser, args): if versions: remote_versions = None for version in versions: + if deprecated_version(pkg, version): + tty.warn("Version {0} is deprecated".format(version)) + version = ver(version) if not isinstance(version, VersionBase): tty.die( @@ -101,7 +104,7 @@ def checksum(parser, args): url_dict = pkg.fetch_remote_versions() if not url_dict: - tty.die("Could not find any versions for {0}".format(pkg.name)) + tty.die("Could not find any remote versions for {0}".format(pkg.name)) version_lines = spack.stage.get_checksums_for_versions( url_dict, diff --git a/lib/spack/spack/cmd/ci.py b/lib/spack/spack/cmd/ci.py index 7bb0497c811..6f29da1f4d6 100644 --- a/lib/spack/spack/cmd/ci.py +++ b/lib/spack/spack/cmd/ci.py @@ -7,7 +7,6 @@ import os import shutil import sys -import tempfile import llnl.util.filesystem as fs import llnl.util.tty as tty @@ -19,7 +18,6 @@ import spack.environment as ev import spack.hash_types as ht import spack.mirror -import spack.util.spack_yaml as syaml import spack.util.url as url_util import spack.util.web as web_util @@ -27,7 +25,8 @@ section = "build" level = "long" -CI_REBUILD_INSTALL_BASE_ARGS = ["spack", "-d", "-v"] +SPACK_COMMAND = "spack" +MAKE_COMMAND = "make" INSTALL_FAIL_CODE = 1 @@ -279,12 +278,14 @@ def ci_rebuild(args): ci_pipeline_id = get_env_var("CI_PIPELINE_ID") ci_job_name = get_env_var("CI_JOB_NAME") signing_key = get_env_var("SPACK_SIGNING_KEY") - root_spec = get_env_var("SPACK_ROOT_SPEC") job_spec_pkg_name = get_env_var("SPACK_JOB_SPEC_PKG_NAME") + job_spec_dag_hash = get_env_var("SPACK_JOB_SPEC_DAG_HASH") compiler_action = get_env_var("SPACK_COMPILER_ACTION") spack_pipeline_type = get_env_var("SPACK_PIPELINE_TYPE") remote_mirror_override = get_env_var("SPACK_REMOTE_MIRROR_OVERRIDE") remote_mirror_url = get_env_var("SPACK_REMOTE_MIRROR_URL") + spack_ci_stack_name = get_env_var("SPACK_CI_STACK_NAME") + rebuild_everything = get_env_var("SPACK_REBUILD_EVERYTHING") # Construct absolute paths relative to current $CI_PROJECT_DIR ci_project_dir = get_env_var("CI_PROJECT_DIR") @@ -297,7 +298,6 @@ def ci_rebuild(args): # Debug print some of the key environment variables we should have received tty.debug("pipeline_artifacts_dir = {0}".format(pipeline_artifacts_dir)) - tty.debug("root_spec = {0}".format(root_spec)) tty.debug("remote_mirror_url = {0}".format(remote_mirror_url)) tty.debug("job_spec_pkg_name = {0}".format(job_spec_pkg_name)) tty.debug("compiler_action = {0}".format(compiler_action)) @@ -326,6 +326,8 @@ def ci_rebuild(args): ) ) + full_rebuild = True if rebuild_everything and rebuild_everything.lower() == "true" else False + # If no override url exists, then just push binary package to the # normal remote mirror url. buildcache_mirror_url = remote_mirror_override or remote_mirror_url @@ -358,10 +360,11 @@ def ci_rebuild(args): mirror_msg = "artifact buildcache enabled, mirror url: {0}".format(pipeline_mirror_url) tty.debug(mirror_msg) - # Whatever form of root_spec we got, use it to get a map giving us concrete - # specs for this job and all of its dependencies. - spec_map = spack_ci.get_concrete_specs(env, root_spec, job_spec_pkg_name, compiler_action) - job_spec = spec_map[job_spec_pkg_name] + # Get the concrete spec to be built by this job. + try: + job_spec = env.get_one_by_hash(job_spec_dag_hash) + except AssertionError: + tty.die("Could not find environment spec with hash {0}".format(job_spec_dag_hash)) job_spec_json_file = "{0}.json".format(job_spec_pkg_name) job_spec_json_path = os.path.join(repro_dir, job_spec_json_file) @@ -425,17 +428,11 @@ def ci_rebuild(args): with open(job_spec_json_path, "w") as fd: fd.write(job_spec.to_json(hash=ht.dag_hash)) - # Write the concrete root spec json into the reproduction directory - root_spec_json_path = os.path.join(repro_dir, "root.json") - with open(root_spec_json_path, "w") as fd: - fd.write(spec_map["root"].to_json(hash=ht.dag_hash)) - # Write some other details to aid in reproduction into an artifact repro_file = os.path.join(repro_dir, "repro.json") repro_details = { "job_name": ci_job_name, "job_spec_json": job_spec_json_file, - "root_spec_json": "root.json", "ci_project_dir": ci_project_dir, } with open(repro_file, "w") as fd: @@ -449,6 +446,8 @@ def ci_rebuild(args): fd.write(spack_info.encode("utf8")) fd.write(b"\n") + pipeline_mirrors = [] + # If we decided there should be a temporary storage mechanism, add that # mirror now so it's used when we check for a hash match already # built for this spec. @@ -456,22 +455,29 @@ def ci_rebuild(args): spack.mirror.add( spack_ci.TEMP_STORAGE_MIRROR_NAME, pipeline_mirror_url, cfg.default_modify_scope() ) + pipeline_mirrors.append(pipeline_mirror_url) # Check configured mirrors for a built spec with a matching hash mirrors_to_check = None - if remote_mirror_override and spack_pipeline_type == "spack_protected_branch": - # Passing "mirrors_to_check" below means we *only* look in the override - # mirror to see if we should skip building, which is what we want. - mirrors_to_check = {"override": remote_mirror_override} + if remote_mirror_override: + if spack_pipeline_type == "spack_protected_branch": + # Passing "mirrors_to_check" below means we *only* look in the override + # mirror to see if we should skip building, which is what we want. + mirrors_to_check = {"override": remote_mirror_override} - # Adding this mirror to the list of configured mirrors means dependencies - # could be installed from either the override mirror or any other configured - # mirror (e.g. remote_mirror_url which is defined in the environment or - # pipeline_mirror_url), which is also what we want. - spack.mirror.add("mirror_override", remote_mirror_override, cfg.default_modify_scope()) + # Adding this mirror to the list of configured mirrors means dependencies + # could be installed from either the override mirror or any other configured + # mirror (e.g. remote_mirror_url which is defined in the environment or + # pipeline_mirror_url), which is also what we want. + spack.mirror.add("mirror_override", remote_mirror_override, cfg.default_modify_scope()) + pipeline_mirrors.append(remote_mirror_override) - matches = bindist.get_mirrors_for_spec( - job_spec, mirrors_to_check=mirrors_to_check, index_only=False + matches = ( + None + if full_rebuild + else bindist.get_mirrors_for_spec( + job_spec, mirrors_to_check=mirrors_to_check, index_only=False + ) ) if matches: @@ -494,45 +500,97 @@ def ci_rebuild(args): # Now we are done and successful sys.exit(0) + # Before beginning the install, if this is a "rebuild everything" pipeline, we + # only want to keep the mirror being used by the current pipeline as it's binary + # package destination. This ensures that the when we rebuild everything, we only + # consume binary dependencies built in this pipeline. + if full_rebuild: + spack_ci.remove_other_mirrors(pipeline_mirrors, cfg.default_modify_scope()) + # No hash match anywhere means we need to rebuild spec # Start with spack arguments - install_args = [base_arg for base_arg in CI_REBUILD_INSTALL_BASE_ARGS] + spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose"] config = cfg.get("config") if not config["verify_ssl"]: - install_args.append("-k") + spack_cmd.append("-k") - install_args.extend( - [ - "install", - "--show-log-on-error", # Print full log on fails - "--keep-stage", - ] - ) + install_args = [] can_verify = spack_ci.can_verify_binaries() verify_binaries = can_verify and spack_is_pr_pipeline is False if not verify_binaries: install_args.append("--no-check-signature") + cdash_args = [] if cdash_handler: # Add additional arguments to `spack install` for CDash reporting. - install_args.extend(cdash_handler.args()) + cdash_args.extend(cdash_handler.args()) - # A compiler action of 'FIND_ANY' means we are building a bootstrap - # compiler or one of its deps. - # TODO: when compilers are dependencies, we should include --no-add - if compiler_action != "FIND_ANY": - install_args.append("--no-add") + slash_hash = "/{}".format(job_spec.dag_hash()) + deps_install_args = install_args + root_install_args = install_args + [ + "--keep-stage", + "--only=package", + "--use-buildcache=package:never,dependencies:only", + slash_hash, + ] - # TODO: once we have the concrete spec registry, use the DAG hash - # to identify the spec to install, rather than the concrete spec - # json file. - install_args.extend(["-f", job_spec_json_path]) + # ["x", "y"] -> "'x' 'y'" + args_to_string = lambda args: " ".join("'{}'".format(arg) for arg in args) + + commands = [ + # apparently there's a race when spack bootstraps? do it up front once + [ + SPACK_COMMAND, + "-e", + env.path, + "bootstrap", + "now", + ], + [ + SPACK_COMMAND, + "-e", + env.path, + "config", + "add", + "config:db_lock_timeout:120", # 2 minutes for processes to fight for a db lock + ], + [ + SPACK_COMMAND, + "-e", + env.path, + "env", + "depfile", + "-o", + "Makefile", + "--use-buildcache=package:never,dependencies:only", + "--make-target-prefix", + "ci", + slash_hash, # limit to spec we're building + ], + [ + # --output-sync requires GNU make 4.x. + # Old make errors when you pass it a flag it doesn't recognize, + # but it doesn't error or warn when you set unrecognized flags in + # this variable. + "export", + "GNUMAKEFLAGS=--output-sync=recurse", + ], + [ + MAKE_COMMAND, + "SPACK={}".format(args_to_string(spack_cmd)), + "SPACK_COLOR=always", + "SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)), + "-j$(nproc)", + "ci/.install-deps/{}".format(job_spec.dag_hash()), + ], + spack_cmd + ["install"] + root_install_args, + ] tty.debug("Installing {0} from source".format(job_spec.name)) - install_exit_code = spack_ci.process_command("install", install_args, repro_dir) + install_exit_code = spack_ci.process_command("install", commands, repro_dir) # Now do the post-install tasks tty.debug("spack install exited {0}".format(install_exit_code)) @@ -547,34 +605,14 @@ def ci_rebuild(args): dev_fail_hash = job_spec.dag_hash() broken_spec_path = url_util.join(broken_specs_url, dev_fail_hash) tty.msg("Reporting broken develop build as: {0}".format(broken_spec_path)) - tmpdir = tempfile.mkdtemp() - empty_file_path = os.path.join(tmpdir, "empty.txt") - - broken_spec_details = { - "broken-spec": { - "job-url": get_env_var("CI_JOB_URL"), - "pipeline-url": get_env_var("CI_PIPELINE_URL"), - "concrete-spec-dict": job_spec.to_dict(hash=ht.dag_hash), - } - } - - try: - with open(empty_file_path, "w") as efd: - efd.write(syaml.dump(broken_spec_details)) - web_util.push_to_url( - empty_file_path, - broken_spec_path, - keep_original=False, - extra_args={"ContentType": "text/plain"}, - ) - except Exception as err: - # If there is an S3 error (e.g., access denied or connection - # error), the first non boto-specific class in the exception - # hierarchy is Exception. Just print a warning and return - msg = "Error writing to broken specs list {0}: {1}".format(broken_spec_path, err) - tty.warn(msg) - finally: - shutil.rmtree(tmpdir) + spack_ci.write_broken_spec( + broken_spec_path, + job_spec_pkg_name, + spack_ci_stack_name, + get_env_var("CI_JOB_URL"), + get_env_var("CI_PIPELINE_URL"), + job_spec.to_dict(hash=ht.dag_hash), + ) # We generated the "spack install ..." command to "--keep-stage", copy # any logs from the staging directory to artifacts now diff --git a/lib/spack/spack/cmd/common/arguments.py b/lib/spack/spack/cmd/common/arguments.py index c9f15cfa98b..7e68ac594bf 100644 --- a/lib/spack/spack/cmd/common/arguments.py +++ b/lib/spack/spack/cmd/common/arguments.py @@ -6,6 +6,8 @@ import argparse +from llnl.util.lang import stable_partition + import spack.cmd import spack.config import spack.dependency as dep @@ -437,3 +439,57 @@ def add_s3_connection_args(subparser, add_help): subparser.add_argument( "--s3-endpoint-url", help="Endpoint URL to use to connect to this S3 mirror" ) + + +def use_buildcache(cli_arg_value): + """Translate buildcache related command line arguments into a pair of strings, + representing whether the root or its dependencies can use buildcaches. + + Argument type that accepts comma-separated subargs: + + 1. auto|only|never + 2. package:auto|only|never + 3. dependencies:auto|only|never + + Args: + cli_arg_value (str): command line argument value to be translated + + Return: + Tuple of two strings + """ + valid_keys = frozenset(["package", "dependencies"]) + valid_values = frozenset(["only", "never", "auto"]) + + # Split in args, split in key/value, and trim whitespace + args = [tuple(map(lambda x: x.strip(), part.split(":"))) for part in cli_arg_value.split(",")] + + # Verify keys and values + def is_valid(arg): + if len(arg) == 1: + return arg[0] in valid_values + if len(arg) == 2: + return arg[0] in valid_keys and arg[1] in valid_values + return False + + valid, invalid = stable_partition(args, is_valid) + + # print first error + if invalid: + raise argparse.ArgumentTypeError("invalid argument `{}`".format(":".join(invalid[0]))) + + # Default values + package = "auto" + dependencies = "auto" + + # Override in order. + for arg in valid: + if len(arg) == 1: + package = dependencies = arg[0] + continue + key, val = arg + if key == "package": + package = val + else: + dependencies = val + + return package, dependencies diff --git a/lib/spack/spack/cmd/deactivate.py b/lib/spack/spack/cmd/deactivate.py index d68341037fa..2fa18fc2b19 100644 --- a/lib/spack/spack/cmd/deactivate.py +++ b/lib/spack/spack/cmd/deactivate.py @@ -91,6 +91,6 @@ def deactivate(parser, args): ) if not args.force and not spec.package.is_activated(view): - tty.die("Package %s is not activated." % specs[0].short_spec) + tty.die("Package %s is not activated." % spec.short_spec) spec.package.do_deactivate(view, force=args.force) diff --git a/lib/spack/spack/cmd/deprecate.py b/lib/spack/spack/cmd/deprecate.py index cc59475a623..5a4c390c22b 100644 --- a/lib/spack/spack/cmd/deprecate.py +++ b/lib/spack/spack/cmd/deprecate.py @@ -117,7 +117,7 @@ def deprecate(parser, args): all_deprecators = [] generator = ( - deprecate.traverse(order="post", type="link", root=True) + deprecate.traverse(order="post", deptype="link", root=True) if args.dependencies else [deprecate] ) diff --git a/lib/spack/spack/cmd/env.py b/lib/spack/spack/cmd/env.py index 86953a7bccd..558b6bead4e 100644 --- a/lib/spack/spack/cmd/env.py +++ b/lib/spack/spack/cmd/env.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import argparse import os import shutil import sys @@ -15,6 +16,8 @@ from llnl.util.tty.colify import colify from llnl.util.tty.color import colorize +import spack.cmd +import spack.cmd.common import spack.cmd.common.arguments import spack.cmd.common.arguments as arguments import spack.cmd.install @@ -24,6 +27,8 @@ import spack.environment as ev import spack.environment.shell import spack.schema.env +import spack.tengine +import spack.traverse as traverse import spack.util.string as string from spack.util.environment import EnvironmentModifications @@ -598,6 +603,15 @@ def env_depfile_setup_parser(subparser): dest="jobserver", help="disable POSIX jobserver support.", ) + subparser.add_argument( + "--use-buildcache", + dest="use_buildcache", + type=arguments.use_buildcache, + default="package:auto,dependencies:auto", + metavar="[{auto,only,never},][package:{auto,only,never},][dependencies:{auto,only,never}]", + help="When using `only`, redundant build dependencies are pruned from the DAG. " + "This flag is passed on to the generated spack install commands.", + ) subparser.add_argument( "-o", "--output", @@ -612,6 +626,64 @@ def env_depfile_setup_parser(subparser): choices=("make",), help="specify the depfile type. Currently only make is supported.", ) + subparser.add_argument( + metavar="specs", + dest="specs", + nargs=argparse.REMAINDER, + default=None, + help="generate a depfile only for matching specs in the environment", + ) + + +def _deptypes(use_buildcache): + """What edges should we follow for a given node? If it's a cache-only + node, then we can drop build type deps.""" + return ("link", "run") if use_buildcache == "only" else ("build", "link", "run") + + +class MakeTargetVisitor(object): + """This visitor produces an adjacency list of a (reduced) DAG, which + is used to generate Makefile targets with their prerequisites.""" + + def __init__(self, target, pkg_buildcache, deps_buildcache): + """ + Args: + target: function that maps dag_hash -> make target string + pkg_buildcache (str): "only", "never", "auto": when "only", + redundant build deps of roots are dropped + deps_buildcache (str): same as pkg_buildcache, but for non-root specs. + """ + self.adjacency_list = [] + self.target = target + self.pkg_buildcache = pkg_buildcache + self.deps_buildcache = deps_buildcache + self.deptypes_root = _deptypes(pkg_buildcache) + self.deptypes_deps = _deptypes(deps_buildcache) + + def neighbors(self, node): + """Produce a list of spec to follow from node""" + deptypes = self.deptypes_root if node.depth == 0 else self.deptypes_deps + return traverse.sort_edges(node.edge.spec.edges_to_dependencies(deptype=deptypes)) + + def build_cache_flag(self, depth): + setting = self.pkg_buildcache if depth == 0 else self.deps_buildcache + if setting == "only": + return "--use-buildcache=only" + elif setting == "never": + return "--use-buildcache=never" + return "" + + def accept(self, node): + dag_hash = node.edge.spec.dag_hash() + spec_str = node.edge.spec.format( + "{name}{@version}{%compiler}{variants}{arch=architecture}" + ) + buildcache_flag = self.build_cache_flag(node.depth) + prereqs = " ".join([self.target(dep.spec.dag_hash()) for dep in self.neighbors(node)]) + self.adjacency_list.append((dag_hash, spec_str, buildcache_flag, prereqs)) + + # We already accepted this + return True def env_depfile(args): @@ -619,10 +691,6 @@ def env_depfile(args): spack.cmd.require_active_env(cmd_name="env depfile") env = ev.active_environment() - # Maps each hash in the environment to a string of install prereqs - hash_to_prereqs = {} - hash_to_spec = {} - if args.make_target_prefix is None: target_prefix = os.path.join(env.env_subdir_path, "makedeps") else: @@ -641,90 +709,56 @@ def get_target(name): def get_install_target(name): return os.path.join(target_prefix, ".install", name) - for _, spec in env.concretized_specs(): - for s in spec.traverse(root=True): - hash_to_spec[s.dag_hash()] = s - hash_to_prereqs[s.dag_hash()] = [ - get_install_target(dep.dag_hash()) for dep in s.dependencies() - ] + def get_install_deps_target(name): + return os.path.join(target_prefix, ".install-deps", name) - root_dags = [s.dag_hash() for _, s in env.concretized_specs()] + # What things do we build when running make? By default, we build the + # root specs. If specific specs are provided as input, we build those. + if args.specs: + abstract_specs = spack.cmd.parse_specs(args.specs) + roots = [env.matching_spec(s) for s in abstract_specs] + else: + roots = [s for _, s in env.concretized_specs()] + + # We produce a sub-DAG from the DAG induced by roots, where we drop build + # edges for those specs that are installed through a binary cache. + pkg_buildcache, dep_buildcache = args.use_buildcache + make_targets = MakeTargetVisitor(get_install_target, pkg_buildcache, dep_buildcache) + traverse.traverse_breadth_first_with_visitor( + roots, traverse.CoverNodesVisitor(make_targets, key=lambda s: s.dag_hash()) + ) # Root specs without deps are the prereqs for the environment target - root_install_targets = [get_install_target(h) for h in root_dags] + root_install_targets = [get_install_target(h.dag_hash()) for h in roots] - # All package install targets, not just roots. - all_install_targets = [get_install_target(h) for h in hash_to_spec.keys()] + # Cleanable targets... + cleanable_targets = [get_install_target(h) for h, _, _, _ in make_targets.adjacency_list] + cleanable_targets.extend( + [get_install_deps_target(h) for h, _, _, _ in make_targets.adjacency_list] + ) buf = six.StringIO() - buf.write( - """SPACK ?= spack + template = spack.tengine.make_environment().get_template(os.path.join("depfile", "Makefile")) -.PHONY: {} {} - -{}: {} - -{}: {} -\t@touch $@ - -{}: -\t@mkdir -p {} - -{}: | {} -\t$(info Installing $(SPEC)) -\t{}$(SPACK) -e '{}' install $(SPACK_INSTALL_FLAGS) --only-concrete --only=package \ ---no-add /$(notdir $@) && touch $@ - -""".format( - get_target("all"), - get_target("clean"), - get_target("all"), - get_target("env"), - get_target("env"), - " ".join(root_install_targets), - get_target("dirs"), - get_target(".install"), - get_target(".install/%"), - get_target("dirs"), - "+" if args.jobserver else "", - env.path, - ) - ) - - # Targets are of the form /: [/]..., - # The prefix can be an empty string, in that case we don't add the `/`. - # The name is currently the dag hash of the spec. In principle it - # could be the package name in case of `concretization: together` so - # it can be more easily referred to, but for now we don't special case - # this. - fmt = "{name}{@version}{%compiler}{variants}{arch=architecture}" - - # Set SPEC for each hash - buf.write("# Set the human-readable spec for each target\n") - for dag_hash in hash_to_prereqs.keys(): - formatted_spec = hash_to_spec[dag_hash].format(fmt) - buf.write("{}: SPEC = {}\n".format(get_target("%/" + dag_hash), formatted_spec)) - buf.write("\n") - - # Set install dependencies - buf.write("# Install dependencies\n") - for parent, children in hash_to_prereqs.items(): - if not children: - continue - buf.write("{}: {}\n".format(get_install_target(parent), " ".join(children))) - buf.write("\n") - - # Clean target: remove target files but not their folders, cause - # --make-target-prefix can be any existing directory we do not control, - # including empty string (which means deleting the containing folder - # would delete the folder with the Makefile) - buf.write( - "{}:\n\trm -f -- {} {}\n".format( - get_target("clean"), get_target("env"), " ".join(all_install_targets) - ) + rendered = template.render( + { + "all_target": get_target("all"), + "env_target": get_target("env"), + "clean_target": get_target("clean"), + "cleanable_targets": " ".join(cleanable_targets), + "root_install_targets": " ".join(root_install_targets), + "dirs_target": get_target("dirs"), + "environment": env.path, + "install_target": get_target(".install"), + "install_deps_target": get_target(".install-deps"), + "any_hash_target": get_target("%"), + "jobserver_support": "+" if args.jobserver else "", + "adjacency_list": make_targets.adjacency_list, + } ) + buf.write(rendered) makefile = buf.getvalue() # Finally write to stdout/file. diff --git a/lib/spack/spack/cmd/extensions.py b/lib/spack/spack/cmd/extensions.py index 50f41529b2e..fa79f8df2dc 100644 --- a/lib/spack/spack/cmd/extensions.py +++ b/lib/spack/spack/cmd/extensions.py @@ -76,7 +76,7 @@ def extensions(parser, args): spec = cmd.disambiguate_spec(spec[0], env) if not spec.package.extendable: - tty.die("%s is not an extendable package." % spec[0].name) + tty.die("%s is not an extendable package." % spec.name) if not spec.package.extendable: tty.die("%s does not have extensions." % spec.short_spec) diff --git a/lib/spack/spack/cmd/info.py b/lib/spack/spack/cmd/info.py index ce69d98dbbe..6a16a153eac 100644 --- a/lib/spack/spack/cmd/info.py +++ b/lib/spack/spack/cmd/info.py @@ -210,11 +210,11 @@ def print_maintainers(pkg): def print_phases(pkg): """output installation phases""" - if hasattr(pkg, "phases") and pkg.phases: + if hasattr(pkg.builder, "phases") and pkg.builder.phases: color.cprint("") color.cprint(section_title("Installation Phases:")) phase_str = "" - for phase in pkg.phases: + for phase in pkg.builder.phases: phase_str += " {0}".format(phase) color.cprint(phase_str) diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py index a6fdfd31f48..3f9a948a233 100644 --- a/lib/spack/spack/cmd/install.py +++ b/lib/spack/spack/cmd/install.py @@ -5,7 +5,6 @@ import argparse import os -import re import shutil import sys import textwrap @@ -32,33 +31,6 @@ level = "short" -# Pass in the value string passed to use-buildcache and get back -# the package and dependencies values. -def parse_use_buildcache(opt): - bc_keys = ["package:", "dependencies:", ""] - bc_values = ["only", "never", "auto"] - kv_list = re.findall("([a-z]+:)?([a-z]+)", opt) - - # Verify keys and values - bc_map = {k: v for k, v in kv_list if k in bc_keys and v in bc_values} - if not len(kv_list) == len(bc_map): - tty.error("Unrecognized arguments passed to use-buildcache") - tty.error( - "Expected: --use-buildcache " - "[[auto|only|never],[package:[auto|only|never]],[dependencies:[auto|only|never]]]" - ) - exit(1) - - for _group in ["package:", "dependencies:"]: - if _group not in bc_map: - if "" in bc_map: - bc_map[_group] = bc_map[""] - else: - bc_map[_group] = "auto" - - return bc_map["package:"], bc_map["dependencies:"] - - # Determine value of cache flag def cache_opt(default_opt, use_buildcache): if use_buildcache == "auto": @@ -73,8 +45,7 @@ def install_kwargs_from_args(args): """Translate command line arguments into a dictionary that will be passed to the package installer. """ - - pkg_use_bc, dep_use_bc = parse_use_buildcache(args.use_buildcache) + pkg_use_bc, dep_use_bc = args.use_buildcache return { "fail_fast": args.fail_fast, @@ -169,6 +140,7 @@ def setup_parser(subparser): cache_group.add_argument( "--use-buildcache", dest="use_buildcache", + type=arguments.use_buildcache, default="package:auto,dependencies:auto", metavar="[{auto,only,never},][package:{auto,only,never},][dependencies:{auto,only,never}]", help="""select the mode of buildcache for the 'package' and 'dependencies'. @@ -221,14 +193,22 @@ def setup_parser(subparser): default=False, help="(with environment) only install already concretized specs", ) - subparser.add_argument( - "--no-add", + + updateenv_group = subparser.add_mutually_exclusive_group() + updateenv_group.add_argument( + "--add", action="store_true", default=False, - help="""(with environment) partially install an environment, limiting -to concrete specs in the environment matching the arguments. -Non-roots remain installed implicitly.""", + help="""(with environment) add spec to the environment as a root.""", ) + updateenv_group.add_argument( + "--no-add", + action="store_false", + dest="add", + help="""(with environment) do not add spec to the environment as a +root (the default behavior).""", + ) + subparser.add_argument( "-f", "--file", @@ -317,11 +297,12 @@ def install_specs_inside_environment(specs, install_kwargs, cli_args): # the matches. Getting to this point means there were either # no matches or exactly one match. - if not m_spec and cli_args.no_add: + if not m_spec and not cli_args.add: msg = ( - "You asked to install {0} without adding it (--no-add), but no such spec " - "exists in environment" - ).format(abstract.name) + "Cannot install '{0}' because it is not in the current environment." + " You can add it to the environment with 'spack add {0}', or as part" + " of the install command with 'spack install --add {0}'" + ).format(str(abstract)) tty.die(msg) if not m_spec: @@ -331,14 +312,16 @@ def install_specs_inside_environment(specs, install_kwargs, cli_args): tty.debug("exactly one match for {0} in env -> {1}".format(m_spec.name, m_spec.dag_hash())) - if m_spec in env.roots() or cli_args.no_add: - # either the single match is a root spec (and --no-add is - # the default for roots) or --no-add was stated explicitly + if m_spec in env.roots() or not cli_args.add: + # either the single match is a root spec (in which case + # the spec is not added to the env again), or the user did + # not specify --add (in which case it is assumed we are + # installing already-concretized specs in the env) tty.debug("just install {0}".format(m_spec.name)) specs_to_install.append(m_spec) else: # the single match is not a root (i.e. it's a dependency), - # and --no-add was not specified, so we'll add it as a + # and --add was specified, so we'll add it as a # root before installing tty.debug("add {0} then install it".format(m_spec.name)) specs_to_add.append((abstract, concrete)) diff --git a/lib/spack/spack/cmd/spec.py b/lib/spack/spack/cmd/spec.py index b701fbc83c0..9ed5f27cab9 100644 --- a/lib/spack/spack/cmd/spec.py +++ b/lib/spack/spack/cmd/spec.py @@ -76,8 +76,7 @@ def setup_parser(subparser): "-t", "--types", action="store_true", default=False, help="show dependency types" ) arguments.add_common_arguments(subparser, ["specs"]) - - spack.cmd.common.arguments.add_concretizer_args(subparser) + arguments.add_concretizer_args(subparser) def spec(parser, args): diff --git a/lib/spack/spack/cmd/test.py b/lib/spack/spack/cmd/test.py index fcd72a123e5..4da35c8a35d 100644 --- a/lib/spack/spack/cmd/test.py +++ b/lib/spack/spack/cmd/test.py @@ -54,6 +54,12 @@ def setup_parser(subparser): run_parser.add_argument( "--externals", action="store_true", help="Test packages that are externally installed." ) + run_parser.add_argument( + "-x", + "--explicit", + action="store_true", + help="Only test packages that are explicitly installed.", + ) run_parser.add_argument( "--keep-stage", action="store_true", help="Keep testing directory for debugging" ) @@ -188,6 +194,9 @@ def test_run(args): if args.fail_fast: spack.config.set("config:fail_fast", True, scope="command_line") + explicit = args.explicit or any + explicit_str = "explicitly " if args.explicit else "" + # Get specs to test env = ev.active_environment() hashes = env.all_hashes() if env else None @@ -195,9 +204,13 @@ def test_run(args): specs = spack.cmd.parse_specs(args.specs) if args.specs else [None] specs_to_test = [] for spec in specs: - matching = spack.store.db.query_local(spec, hashes=hashes) + matching = spack.store.db.query_local( + spec, + hashes=hashes, + explicit=explicit, + ) if spec and not matching: - tty.warn("No installed packages match spec %s" % spec) + tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec)) """ TODO: Need to write out a log message and/or CDASH Testing output that package not installed IF continue to process @@ -208,6 +221,7 @@ def test_run(args): # to ensure report package as skipped (e.g., for CI) specs_to_test.append(spec) """ + specs_to_test.extend(matching) # test_stage_dir diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py index be4b74c54ad..8c112840fd6 100644 --- a/lib/spack/spack/cmd/uninstall.py +++ b/lib/spack/spack/cmd/uninstall.py @@ -5,7 +5,6 @@ from __future__ import print_function -import itertools import sys from llnl.util import tty @@ -61,6 +60,13 @@ def setup_parser(subparser): dest="force", help="remove regardless of whether other packages or environments " "depend on this one", ) + subparser.add_argument( + "--remove", + action="store_true", + dest="remove", + help="if in an environment, then the spec should also be removed from " + "the environment description", + ) arguments.add_common_arguments( subparser, ["recurse_dependents", "yes_to_all", "installed_specs"] ) @@ -134,13 +140,21 @@ def installed_dependents(specs, env): env (spack.environment.Environment or None): the active environment, or None Returns: - tuple: two mappings: one from specs to their dependent environments in the - active environment (or global scope if there is no environment), and one from - specs to their dependents in *inactive* environments (empty if there is no - environment + tuple: two mappings: one from specs to their dependent installs in the + active environment, and one from specs to dependent installs outside of + the active environment. + + Any of the input specs may appear in both mappings (if there are + dependents both inside and outside the current environment). + + If a dependent spec is used both by the active environment and by + an inactive environment, it will only appear in the first mapping. + + If there is not current active environment, the first mapping will be + empty. """ active_dpts = {} - inactive_dpts = {} + outside_dpts = {} env_hashes = set(env.all_hashes()) if env else set() @@ -153,12 +167,12 @@ def installed_dependents(specs, env): # dpts that are outside this environment for dpt in installed: if dpt not in specs: - if not env or dpt.dag_hash() in env_hashes: + if dpt.dag_hash() in env_hashes: active_dpts.setdefault(spec, set()).add(dpt) else: - inactive_dpts.setdefault(spec, set()).add(dpt) + outside_dpts.setdefault(spec, set()).add(dpt) - return active_dpts, inactive_dpts + return active_dpts, outside_dpts def dependent_environments(specs): @@ -228,7 +242,7 @@ def do_uninstall(env, specs, force): except spack.repo.UnknownEntityError: # The package.py file has gone away -- but still # want to uninstall. - spack.package_base.Package.uninstall_by_spec(item, force=True) + spack.package_base.PackageBase.uninstall_by_spec(item, force=True) # A package is ready to be uninstalled when nothing else references it, # unless we are requested to force uninstall it. @@ -262,31 +276,65 @@ def is_ready(dag_hash): def get_uninstall_list(args, specs, env): - # Gets the list of installed specs that match the ones give via cli + """Returns uninstall_list and remove_list: these may overlap (some things + may be both uninstalled and removed from the current environment). + + It is assumed we are in an environment if --remove is specified (this + method raises an exception otherwise). + + uninstall_list is topologically sorted: dependents come before + dependencies (so if a user uninstalls specs in the order provided, + the dependents will always be uninstalled first). + """ + if args.remove and not env: + raise ValueError("Can only use --remove when in an environment") + + # Gets the list of installed specs that match the ones given via cli # args.all takes care of the case where '-a' is given in the cli - uninstall_list = find_matching_specs(env, specs, args.all, args.force, args.origin) + base_uninstall_specs = set(find_matching_specs(env, specs, args.all, args.force)) - # Takes care of '-R' - active_dpts, inactive_dpts = installed_dependents(uninstall_list, env) + active_dpts, outside_dpts = installed_dependents(base_uninstall_specs, env) + # It will be useful to track the unified set of specs with dependents, as + # well as to separately track specs in the current env with dependents + spec_to_dpts = {} + for spec, dpts in active_dpts.items(): + spec_to_dpts[spec] = list(dpts) + for spec, dpts in outside_dpts.items(): + if spec in spec_to_dpts: + spec_to_dpts[spec].extend(dpts) + else: + spec_to_dpts[spec] = list(dpts) - # if we are in the global scope, we complain if you try to remove a - # spec that's in an environment. If we're in an environment, we'll - # just *remove* it from the environment, so we ignore this - # error when *in* an environment - spec_envs = dependent_environments(uninstall_list) - spec_envs = inactive_dependent_environments(spec_envs) + all_uninstall_specs = set(base_uninstall_specs) + if args.dependents: + for spec, lst in active_dpts.items(): + all_uninstall_specs.update(lst) + for spec, lst in outside_dpts.items(): + all_uninstall_specs.update(lst) - # Process spec_dependents and update uninstall_list - has_error = not args.force and ( - (active_dpts and not args.dependents) # dependents in the current env - or (not env and spec_envs) # there are environments that need specs + # For each spec that we intend to uninstall, this tracks the set of + # environments outside the current active environment which depend on the + # spec. There may be environments not managed directly with Spack: such + # environments would not be included here. + spec_to_other_envs = inactive_dependent_environments( + dependent_environments(all_uninstall_specs) + ) + + has_error = not args.force and ( + # There are dependents in the current env and we didn't ask to remove + # dependents + (spec_to_dpts and not args.dependents) + # An environment different than the current env (if any) depends on + # one or more of the specs to be uninstalled. There may also be + # packages in those envs which depend on the base set of packages + # to uninstall, but this covers that scenario. + or (not args.remove and spec_to_other_envs) ) - # say why each problem spec is needed if has_error: - specs = set(active_dpts) - if not env: - specs.update(set(spec_envs)) # environments depend on this + # say why each problem spec is needed + specs = set(spec_to_dpts) + specs.update(set(spec_to_other_envs)) # environments depend on this for i, spec in enumerate(sorted(specs)): # space out blocks of reasons @@ -296,66 +344,86 @@ def get_uninstall_list(args, specs, env): spec_format = "{name}{@version}{%compiler}{/hash:7}" tty.info("Will not uninstall %s" % spec.cformat(spec_format), format="*r") - dependents = active_dpts.get(spec) - if dependents: + dependents = spec_to_dpts.get(spec) + if dependents and not args.dependents: print("The following packages depend on it:") spack.cmd.display_specs(dependents, **display_args) - if not env: - envs = spec_envs.get(spec) - if envs: - print("It is used by the following environments:") - colify([e.name for e in envs], indent=4) + envs = spec_to_other_envs.get(spec) + if envs: + if env: + env_context_qualifier = " other" + else: + env_context_qualifier = "" + print("It is used by the following{0} environments:".format(env_context_qualifier)) + colify([e.name for e in envs], indent=4) msgs = [] - if active_dpts: + if spec_to_dpts and not args.dependents: msgs.append("use `spack uninstall --dependents` to remove dependents too") - if spec_envs: + if spec_to_other_envs: msgs.append("use `spack env remove` to remove from environments") print() tty.die("There are still dependents.", *msgs) - elif args.dependents: - for spec, lst in active_dpts.items(): - uninstall_list.extend(lst) - uninstall_list = list(set(uninstall_list)) + # If we are in an environment, this will track specs in this environment + # which should only be removed from the environment rather than uninstalled + remove_only = set() + if args.remove and not args.force: + remove_only.update(spec_to_other_envs) + if remove_only: + tty.info( + "The following specs will be removed but not uninstalled because" + " they are also used by another environment: {speclist}".format( + speclist=", ".join(x.name for x in remove_only) + ) + ) - # only force-remove (don't completely uninstall) specs that still - # have external dependent envs or pkgs - removes = set(inactive_dpts) - if env: - removes.update(spec_envs) + # Compute the set of specs that should be removed from the current env. + # This may overlap (some specs may be uninstalled and also removed from + # the current environment). + if args.remove: + remove_specs = set(base_uninstall_specs) + if args.dependents: + # Any spec matched from the cli, or dependent of, should be removed + # from the environment + for spec, lst in active_dpts.items(): + remove_specs.update(lst) + else: + remove_specs = set() - # remove anything in removes from the uninstall list - uninstall_list = set(uninstall_list) - removes + all_uninstall_specs -= remove_only + # Inefficient topological sort: uninstall dependents before dependencies + all_uninstall_specs = sorted( + all_uninstall_specs, key=lambda x: sum(1 for i in x.traverse()), reverse=True + ) - return uninstall_list, removes + return list(all_uninstall_specs), list(remove_specs) def uninstall_specs(args, specs): env = ev.active_environment() uninstall_list, remove_list = get_uninstall_list(args, specs, env) - anything_to_do = set(uninstall_list).union(set(remove_list)) - if not anything_to_do: + if not uninstall_list: tty.warn("There are no package to uninstall.") return if not args.yes_to_all: - confirm_removal(anything_to_do) - - if env: - # Remove all the specs that are supposed to be uninstalled or just - # removed. - with env.write_transaction(): - for spec in itertools.chain(remove_list, uninstall_list): - _remove_from_env(spec, env) - env.write() + confirm_removal(uninstall_list) # Uninstall everything on the list do_uninstall(env, uninstall_list, args.force) + if env: + with env.write_transaction(): + for spec in remove_list: + _remove_from_env(spec, env) + env.write() + + env.regenerate_views() + def confirm_removal(specs): """Display the list of specs to be removed and ask for confirmation. diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py index 2e863096995..37bc250de86 100644 --- a/lib/spack/spack/compiler.py +++ b/lib/spack/spack/compiler.py @@ -56,25 +56,25 @@ def get_compiler_version_output(compiler_path, *args, **kwargs): return _get_compiler_version_output(compiler_path, *args, **kwargs) -def tokenize_flags(flags_str): +def tokenize_flags(flags_values, propagate=False): """Given a compiler flag specification as a string, this returns a list where the entries are the flags. For compiler options which set values using the syntax "-flag value", this function groups flags and their values together. Any token not preceded by a "-" is considered the value of a prior flag.""" - tokens = flags_str.split() + tokens = flags_values.split() if not tokens: return [] flag = tokens[0] - flags = [] + flags_with_propagation = [] for token in tokens[1:]: if not token.startswith("-"): flag += " " + token else: - flags.append(flag) + flags_with_propagation.append((flag, propagate)) flag = token - flags.append(flag) - return flags + flags_with_propagation.append((flag, propagate)) + return flags_with_propagation #: regex for parsing linker lines @@ -311,11 +311,13 @@ def __init__( # Unfortunately have to make sure these params are accepted # in the same order they are returned by sorted(flags) # in compilers/__init__.py - self.flags = {} - for flag in spack.spec.FlagMap.valid_compiler_flags(): + self.flags = spack.spec.FlagMap(self.spec) + for flag in self.flags.valid_compiler_flags(): value = kwargs.get(flag, None) if value is not None: - self.flags[flag] = tokenize_flags(value) + values_with_propagation = tokenize_flags(value, False) + for value, propagation in values_with_propagation: + self.flags.add_flag(flag, value, propagation) # caching value for compiler reported version # used for version checks for API, e.g. C++11 flag diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py index 1d61f37df79..e54f8dc96c0 100644 --- a/lib/spack/spack/compilers/__init__.py +++ b/lib/spack/spack/compilers/__init__.py @@ -49,12 +49,26 @@ "clang": "llvm+clang", "oneapi": "intel-oneapi-compilers", "rocmcc": "llvm-amdgpu", + "intel@2020:": "intel-oneapi-compilers-classic", +} + +# TODO: generating this from the previous dict causes docs errors +package_name_to_compiler_name = { + "llvm": "clang", + "intel-oneapi-compilers": "oneapi", + "llvm-amdgpu": "rocmcc", + "intel-oneapi-compilers-classic": "intel", } def pkg_spec_for_compiler(cspec): """Return the spec of the package that provides the compiler.""" - spec_str = "%s@%s" % (_compiler_to_pkg.get(cspec.name, cspec.name), cspec.versions) + for spec, package in _compiler_to_pkg.items(): + if cspec.satisfies(spec): + spec_str = "%s@%s" % (package, cspec.versions) + break + else: + spec_str = str(cspec) return spack.spec.Spec(spec_str) @@ -346,6 +360,10 @@ def compilers_for_arch(arch_spec, scope=None): return list(get_compilers(config, arch_spec=arch_spec)) +def compiler_specs_for_arch(arch_spec, scope=None): + return [c.spec for c in compilers_for_arch(arch_spec, scope)] + + class CacheReference(object): """This acts as a hashable reference to any object (regardless of whether the object itself is hashable) and also prevents the object from being diff --git a/lib/spack/spack/compilers/cce.py b/lib/spack/spack/compilers/cce.py index d572a31ff47..3ecbcdc3d66 100644 --- a/lib/spack/spack/compilers/cce.py +++ b/lib/spack/spack/compilers/cce.py @@ -2,7 +2,6 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - import os from spack.compiler import Compiler, UnsupportedCompilerFlag @@ -12,17 +11,25 @@ class Cce(Compiler): """Cray compiler environment compiler.""" + def __init__(self, *args, **kwargs): + super(Cce, self).__init__(*args, **kwargs) + # For old cray compilers on module based systems we replace + # ``version_argument`` with the old value. Cannot be a property + # as the new value is used in classmethods for path-based detection + if not self.is_clang_based: + self.version_argument = "-V" + # Subclasses use possible names of C compiler - cc_names = ["cc"] + cc_names = ["craycc", "cc"] # Subclasses use possible names of C++ compiler - cxx_names = ["CC"] + cxx_names = ["crayCC", "CC"] # Subclasses use possible names of Fortran 77 compiler - f77_names = ["ftn"] + f77_names = ["crayftn", "ftn"] # Subclasses use possible names of Fortran 90 compiler - fc_names = ["ftn"] + fc_names = ["crayftn", "ftn"] # MacPorts builds gcc versions with prefixes and -mp-X.Y suffixes. suffixes = [r"-mp-\d\.\d"] @@ -30,24 +37,30 @@ class Cce(Compiler): PrgEnv = "PrgEnv-cray" PrgEnv_compiler = "cce" - link_paths = { - "cc": os.path.join("cce", "cc"), - "cxx": os.path.join("cce", "case-insensitive", "CC"), - "f77": os.path.join("cce", "ftn"), - "fc": os.path.join("cce", "ftn"), - } + @property + def link_paths(self): + if self.PrgEnv in self.modules: + # Old module-based interface to cray compilers + return { + "cc": os.path.join("cce", "cc"), + "cxx": os.path.join("case-insensitive", "CC"), + "f77": os.path.join("cce", "ftn"), + "fc": os.path.join("cce", "ftn"), + } + + return { + "cc": os.path.join("cce", "craycc"), + "cxx": os.path.join("cce", "case-insensitive", "crayCC"), + "f77": os.path.join("cce", "crayftn"), + "fc": os.path.join("cce", "crayftn"), + } @property def is_clang_based(self): version = self._real_version or self.version return version >= ver("9.0") and "classic" not in str(version) - @property - def version_argument(self): - if self.is_clang_based: - return "--version" - return "-V" - + version_argument = "--version" version_regex = r"[Vv]ersion.*?(\d+(\.\d+)+)" @property diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index 3fbc7e8bff9..31066de0264 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -734,7 +734,7 @@ def concretize_specs_together(*abstract_specs, **kwargs): Returns: List of concretized specs """ - if spack.config.get("config:concretizer") == "original": + if spack.config.get("config:concretizer", "clingo") == "original": return _concretize_specs_together_original(*abstract_specs, **kwargs) return _concretize_specs_together_new(*abstract_specs, **kwargs) diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index df41459dff6..30ec4d8da0e 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -48,7 +48,10 @@ import spack.store import spack.util.lock as lk import spack.util.spack_json as sjson -from spack.directory_layout import DirectoryLayoutError +from spack.directory_layout import ( + DirectoryLayoutError, + InconsistentInstallDirectoryError, +) from spack.error import SpackError from spack.filesystem_view import YamlFilesystemView from spack.util.crypto import bit_length @@ -1063,7 +1066,14 @@ def _read(self): elif self.is_upstream: tty.warn("upstream not found: {0}".format(self._index_path)) - def _add(self, spec, directory_layout=None, explicit=False, installation_time=None): + def _add( + self, + spec, + directory_layout=None, + explicit=False, + installation_time=None, + allow_missing=False, + ): """Add an install record for this spec to the database. Assumes spec is installed in ``layout.path_for_spec(spec)``. @@ -1074,19 +1084,18 @@ def _add(self, spec, directory_layout=None, explicit=False, installation_time=No Args: spec: spec to be added directory_layout: layout of the spec installation - **kwargs: + explicit: + Possible values: True, False, any - explicit - Possible values: True, False, any - - A spec that was installed following a specific user - request is marked as explicit. If instead it was - pulled-in as a dependency of a user requested spec - it's considered implicit. - - installation_time - Date and time of installation + A spec that was installed following a specific user + request is marked as explicit. If instead it was + pulled-in as a dependency of a user requested spec + it's considered implicit. + installation_time: + Date and time of installation + allow_missing: if True, don't warn when installation is not found on on disk + This is useful when installing specs without build deps. """ if not spec.concrete: raise NonConcreteSpecAddError("Specs added to DB must be concrete.") @@ -1100,11 +1109,22 @@ def _add(self, spec, directory_layout=None, explicit=False, installation_time=No # Retrieve optional arguments installation_time = installation_time or _now() - for dep in spec.dependencies(deptype=_tracked_deps): - dkey = dep.dag_hash() - if dkey not in self._data: - extra_args = {"explicit": False, "installation_time": installation_time} - self._add(dep, directory_layout, **extra_args) + for edge in spec.edges_to_dependencies(deptype=_tracked_deps): + if edge.spec.dag_hash() in self._data: + continue + # allow missing build-only deps. This prevents excessive + # warnings when a spec is installed, and its build dep + # is missing a build dep; there's no need to install the + # build dep's build dep first, and there's no need to warn + # about it missing. + dep_allow_missing = allow_missing or edge.deptypes == ("build",) + self._add( + edge.spec, + directory_layout, + explicit=False, + installation_time=installation_time, + allow_missing=dep_allow_missing, + ) # Make sure the directory layout agrees whether the spec is installed if not spec.external and directory_layout: @@ -1115,13 +1135,14 @@ def _add(self, spec, directory_layout=None, explicit=False, installation_time=No installed = True self._installed_prefixes.add(path) except DirectoryLayoutError as e: - msg = ( - "{0} is being {1} in the database with prefix {2}, " - "but this directory does not contain an installation of " - "the spec, due to: {3}" - ) - action = "updated" if key in self._data else "registered" - tty.warn(msg.format(spec.short_spec, action, path, str(e))) + if not (allow_missing and isinstance(e, InconsistentInstallDirectoryError)): + msg = ( + "{0} is being {1} in the database with prefix {2}, " + "but this directory does not contain an installation of " + "the spec, due to: {3}" + ) + action = "updated" if key in self._data else "registered" + tty.warn(msg.format(spec.short_spec, action, path, str(e))) elif spec.external_path: path = spec.external_path installed = True diff --git a/lib/spack/spack/detection/common.py b/lib/spack/spack/detection/common.py index d79b62b8725..b9cd871fc24 100644 --- a/lib/spack/spack/detection/common.py +++ b/lib/spack/spack/detection/common.py @@ -228,7 +228,7 @@ def compute_windows_program_path_for_package(pkg): program files location, return list of best guesses Args: - pkg (spack.package_base.Package): package for which + pkg (spack.package_base.PackageBase): package for which Program Files location is to be computed """ if not is_windows: diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py index c7491861eb9..7c49158c791 100644 --- a/lib/spack/spack/directives.py +++ b/lib/spack/spack/directives.py @@ -17,6 +17,7 @@ class OpenMpi(Package): The available directives are: + * ``build_system`` * ``conflicts`` * ``depends_on`` * ``extends`` @@ -59,13 +60,15 @@ class OpenMpi(Package): "patch", "variant", "resource", + "build_system", ] #: These are variant names used by Spack internally; packages can't use them reserved_names = ["patches", "dev_path"] -#: Names of possible directives. This list is populated elsewhere in the file. -directive_names = [] +#: Names of possible directives. This list is mostly populated using the @directive decorator. +#: Some directives leverage others and in that case are not automatically added. +directive_names = ["build_system"] _patch_order_index = 0 @@ -758,6 +761,17 @@ def _execute_resource(pkg): return _execute_resource +def build_system(*values, **kwargs): + default = kwargs.get("default", None) or values[0] + return variant( + "build_system", + values=tuple(values), + description="Build systems supported by the package", + default=default, + multi=False, + ) + + class DirectiveError(spack.error.SpackError): """This is raised when something is wrong with a package directive.""" diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index 28f3caab9ef..b5848f12a79 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -102,7 +102,7 @@ def __init__(self, root, **kwargs): @property def hidden_file_regexes(self): - return (re.escape(self.metadata_dir),) + return ("^{0}$".format(re.escape(self.metadata_dir)),) def relative_path_for_spec(self, spec): _check_concrete(spec) diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index 164f078748d..0f1a1d63c44 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -34,6 +34,7 @@ import spack.stage import spack.store import spack.subprocess_context +import spack.traverse import spack.user_environment as uenv import spack.util.cpus import spack.util.environment @@ -1356,30 +1357,25 @@ def _concretize_together(self, tests=False): if user_specs_did_not_change: return [] - # Check that user specs don't have duplicate packages - counter = collections.defaultdict(int) - for user_spec in self.user_specs: - counter[user_spec.name] += 1 - - duplicates = [] - for name, count in counter.items(): - if count > 1: - duplicates.append(name) - - if duplicates: - msg = ( - "environment that are configured to concretize specs" - " together cannot contain more than one spec for each" - " package [{0}]".format(", ".join(duplicates)) - ) - raise SpackEnvironmentError(msg) - # Proceed with concretization self.concretized_user_specs = [] self.concretized_order = [] self.specs_by_hash = {} - concrete_specs = spack.concretize.concretize_specs_together(*self.user_specs, tests=tests) + try: + concrete_specs = spack.concretize.concretize_specs_together( + *self.user_specs, tests=tests + ) + except spack.error.UnsatisfiableSpecError as e: + # "Enhance" the error message for multiple root specs, suggest a less strict + # form of concretization. + if len(self.user_specs) > 1: + e.message += ( + ". Consider setting `concretizer:unify` to `when_possible` " + "or `false` to relax the concretizer strictness." + ) + raise + concretized_specs = [x for x in zip(self.user_specs, concrete_specs)] for abstract, concrete in concretized_specs: self._add_concrete_spec(abstract, concrete) @@ -1411,7 +1407,7 @@ def _concretize_separately(self, tests=False): arguments.append((uspec_constraints, tests)) # Ensure we don't try to bootstrap clingo in parallel - if spack.config.get("config:concretizer") == "clingo": + if spack.config.get("config:concretizer", "clingo") == "clingo": with spack.bootstrap.ensure_bootstrap_configuration(): spack.bootstrap.ensure_clingo_importable_or_raise() @@ -1795,22 +1791,14 @@ def install_specs(self, specs=None, **install_args): def all_specs(self): """Return all specs, even those a user spec would shadow.""" - all_specs = set() - for h in self.concretized_order: - try: - spec = self.specs_by_hash[h] - except KeyError: - tty.warn( - "Environment %s appears to be corrupt: missing spec " '"%s"' % (self.name, h) - ) - continue - all_specs.update(spec.traverse()) - - return sorted(all_specs) + roots = [self.specs_by_hash[h] for h in self.concretized_order] + specs = [s for s in spack.traverse.traverse_nodes(roots, lambda s: s.dag_hash())] + specs.sort() + return specs def all_hashes(self): """Return hashes of all specs.""" - return list(set(s.dag_hash() for s in self.all_specs())) + return [s.dag_hash() for s in self.all_specs()] def roots(self): """Specs explicitly requested by the user *in this environment*. @@ -1847,13 +1835,20 @@ def concretized_specs(self): def get_by_hash(self, dag_hash): matches = {} - for _, root in self.concretized_specs(): - for spec in root.traverse(root=True): - dep_hash = spec.dag_hash() - if dep_hash.startswith(dag_hash): - matches[dep_hash] = spec + roots = [self.specs_by_hash[h] for h in self.concretized_order] + for spec in spack.traverse.traverse_nodes(roots, key=lambda s: s.dag_hash()): + if spec.dag_hash().startswith(dag_hash): + matches[spec.dag_hash()] = spec return list(matches.values()) + def get_one_by_hash(self, dag_hash): + """Returns the single spec from the environment which matches the + provided hash. Raises an AssertionError if no specs match or if + more than one spec matches.""" + hash_matches = self.get_by_hash(dag_hash) + assert len(hash_matches) == 1 + return hash_matches[0] + def matching_spec(self, spec): """ Given a spec (likely not concretized), find a matching concretized @@ -1945,28 +1940,27 @@ def _get_environment_specs(self, recurse_dependencies=True): If these specs appear under different user_specs, only one copy is added to the list returned. """ - spec_list = list() + specs = [self.specs_by_hash[h] for h in self.concretized_order] - for spec_hash in self.concretized_order: - spec = self.specs_by_hash[spec_hash] + if recurse_dependencies: + specs.extend( + spack.traverse.traverse_nodes( + specs, root=False, deptype=("link", "run"), key=lambda s: s.dag_hash() + ) + ) - specs = spec.traverse(deptype=("link", "run")) if recurse_dependencies else (spec,) - - spec_list.extend(specs) - - return spec_list + return specs def _to_lockfile_dict(self): """Create a dictionary to store a lockfile for this environment.""" concrete_specs = {} - for spec in self.specs_by_hash.values(): - for s in spec.traverse(): - dag_hash = s.dag_hash() - if dag_hash not in concrete_specs: - spec_dict = s.node_dict_with_hashes(hash=ht.dag_hash) - # Assumes no legacy formats, since this was just created. - spec_dict[ht.dag_hash.name] = s.dag_hash() - concrete_specs[dag_hash] = spec_dict + for s in spack.traverse.traverse_nodes( + self.specs_by_hash.values(), key=lambda s: s.dag_hash() + ): + spec_dict = s.node_dict_with_hashes(hash=ht.dag_hash) + # Assumes no legacy formats, since this was just created. + spec_dict[ht.dag_hash.name] = s.dag_hash() + concrete_specs[s.dag_hash()] = spec_dict hash_spec_list = zip(self.concretized_order, self.concretized_user_specs) @@ -2077,19 +2071,16 @@ def write(self, regenerate=True): # ensure the prefix/.env directory exists fs.mkdirp(self.env_subdir_path) - for spec in self.new_specs: - for dep in spec.traverse(): - if not dep.concrete: - raise ValueError( - "specs passed to environment.write() " "must be concrete!" - ) + for spec in spack.traverse.traverse_nodes(self.new_specs): + if not spec.concrete: + raise ValueError("specs passed to environment.write() " "must be concrete!") - root = os.path.join(self.repos_path, dep.namespace) - repo = spack.repo.create_or_construct(root, dep.namespace) - pkg_dir = repo.dirname_for_package_name(dep.name) + root = os.path.join(self.repos_path, spec.namespace) + repo = spack.repo.create_or_construct(root, spec.namespace) + pkg_dir = repo.dirname_for_package_name(spec.name) - fs.mkdirp(pkg_dir) - spack.repo.path.dump_provenance(dep, pkg_dir) + fs.mkdirp(pkg_dir) + spack.repo.path.dump_provenance(spec, pkg_dir) self._update_and_write_manifest(raw_yaml_dict, yaml_dict) diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index ea85c6a6824..df993978af6 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -865,7 +865,12 @@ def clone(self, dest=None, commit=None, branch=None, tag=None, bare=False): repo_name = get_single_file(".") if self.stage: self.stage.srcdir = repo_name - shutil.move(repo_name, dest) + shutil.copytree(repo_name, dest, symlinks=True) + shutil.rmtree( + repo_name, + ignore_errors=False, + onerror=fs.readonly_file_handler(ignore_errors=True), + ) with working_dir(dest): checkout_args = ["checkout", commit] diff --git a/lib/spack/spack/filesystem_view.py b/lib/spack/spack/filesystem_view.py index 2373ec5e452..db2d6c94774 100644 --- a/lib/spack/spack/filesystem_view.py +++ b/lib/spack/spack/filesystem_view.py @@ -95,11 +95,11 @@ def view_copy(src, dst, view, spec=None): prefix_to_projection[dep.prefix] = view.get_projection_for_spec(dep) if spack.relocate.is_binary(dst): - spack.relocate.relocate_text_bin(binaries=[dst], prefixes=prefix_to_projection) + spack.relocate.unsafe_relocate_text_bin(binaries=[dst], prefixes=prefix_to_projection) else: prefix_to_projection[spack.store.layout.root] = view._root prefix_to_projection[orig_sbang] = new_sbang - spack.relocate.relocate_text(files=[dst], prefixes=prefix_to_projection) + spack.relocate.unsafe_relocate_text(files=[dst], prefixes=prefix_to_projection) try: stat = os.stat(src) os.chown(dst, stat.st_uid, stat.st_gid) diff --git a/lib/spack/spack/hooks/__init__.py b/lib/spack/spack/hooks/__init__.py index 699464c9135..f30082ccf63 100644 --- a/lib/spack/spack/hooks/__init__.py +++ b/lib/spack/spack/hooks/__init__.py @@ -27,7 +27,8 @@ systems (e.g. modules, lmod, etc.) or to add other custom features. """ -import llnl.util.lang + +from llnl.util.lang import ensure_last, list_modules import spack.paths @@ -44,11 +45,11 @@ def __init__(self, hook_name): def _populate_hooks(cls): # Lazily populate the list of hooks cls._hooks = [] - relative_names = list(llnl.util.lang.list_modules(spack.paths.hooks_path)) - # We want this hook to be the last registered - relative_names.sort(key=lambda x: x == "write_install_manifest") - assert relative_names[-1] == "write_install_manifest" + relative_names = list(list_modules(spack.paths.hooks_path)) + + # Ensure that write_install_manifest comes last + ensure_last(relative_names, "absolutify_elf_sonames", "write_install_manifest") for name in relative_names: module_name = __name__ + "." + name diff --git a/lib/spack/spack/hooks/absolutify_elf_sonames.py b/lib/spack/spack/hooks/absolutify_elf_sonames.py new file mode 100644 index 00000000000..d16de2ea39d --- /dev/null +++ b/lib/spack/spack/hooks/absolutify_elf_sonames.py @@ -0,0 +1,171 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import os + +import llnl.util.tty as tty +from llnl.util.filesystem import BaseDirectoryVisitor, visit_directory_tree +from llnl.util.lang import elide_list + +import spack.bootstrap +import spack.config +import spack.relocate +from spack.util.elf import ElfParsingError, parse_elf +from spack.util.executable import Executable + + +def is_shared_library_elf(filepath): + """Return true if filepath is most a shared library. + Our definition of a shared library for ELF requires: + 1. a dynamic section, + 2. a soname OR lack of interpreter. + The problem is that PIE objects (default on Ubuntu) are + ET_DYN too, and not all shared libraries have a soname... + no interpreter is typically the best indicator then.""" + try: + with open(filepath, "rb") as f: + elf = parse_elf(f, interpreter=True, dynamic_section=True) + return elf.has_pt_dynamic and (elf.has_soname or not elf.has_pt_interp) + except (IOError, OSError, ElfParsingError): + return False + + +class SharedLibrariesVisitor(BaseDirectoryVisitor): + """Visitor that collects all shared libraries in a prefix, with the + exception of an exclude list.""" + + def __init__(self, exclude_list): + + # List of file and directory names to be excluded + self.exclude_list = frozenset(exclude_list) + + # Map from (ino, dev) -> path. We need 1 path per file, if there are hardlinks, + # we don't need to store the path multiple times. + self.libraries = dict() + + # Set of (ino, dev) pairs (excluded by symlinks). + self.excluded_through_symlink = set() + + def visit_file(self, root, rel_path, depth): + # Check if excluded + basename = os.path.basename(rel_path) + if basename in self.exclude_list: + return + + filepath = os.path.join(root, rel_path) + s = os.lstat(filepath) + identifier = (s.st_ino, s.st_dev) + + # We're hitting a hardlink or symlink of an excluded lib, no need to parse. + if identifier in self.libraries or identifier in self.excluded_through_symlink: + return + + # Register the file if it's a shared lib that needs to be patched. + if is_shared_library_elf(filepath): + self.libraries[identifier] = rel_path + + def visit_symlinked_file(self, root, rel_path, depth): + # We don't need to follow the symlink and parse the file, since we will hit + # it by recursing the prefix anyways. We only need to check if the target + # should be excluded based on the filename of the symlink. E.g. when excluding + # libf.so, which is a symlink to libf.so.1.2.3, we keep track of the stat data + # of the latter. + basename = os.path.basename(rel_path) + if basename not in self.exclude_list: + return + + # Register the (ino, dev) pair as ignored (if the symlink is not dangling) + filepath = os.path.join(root, rel_path) + try: + s = os.stat(filepath) + except OSError: + return + self.excluded_through_symlink.add((s.st_ino, s.st_dev)) + + def before_visit_dir(self, root, rel_path, depth): + # Allow skipping over directories. E.g. `/lib/stubs` can be skipped by + # adding `"stubs"` to the exclude list. + return os.path.basename(rel_path) not in self.exclude_list + + def before_visit_symlinked_dir(self, root, rel_path, depth): + # Never enter symlinked dirs, since we don't want to leave the prefix, and + # we'll enter the target dir inside the prefix anyways since we're recursing + # everywhere. + return False + + def get_shared_libraries_relative_paths(self): + """Get the libraries that should be patched, with the excluded libraries + removed.""" + for identifier in self.excluded_through_symlink: + self.libraries.pop(identifier, None) + + return [rel_path for rel_path in self.libraries.values()] + + +def patch_sonames(patchelf, root, rel_paths): + """Set the soname to the file's own path for a list of + given shared libraries.""" + fixed = [] + for rel_path in rel_paths: + filepath = os.path.join(root, rel_path) + normalized = os.path.normpath(filepath) + args = ["--set-soname", normalized, normalized] + output = patchelf(*args, output=str, error=str, fail_on_error=False) + if patchelf.returncode == 0: + fixed.append(rel_path) + else: + # Note: treat as warning to avoid (long) builds to fail post-install. + tty.warn("patchelf: failed to set soname of {}: {}".format(normalized, output.strip())) + return fixed + + +def find_and_patch_sonames(prefix, exclude_list, patchelf): + # Locate all shared libraries in the prefix dir of the spec, excluding + # the ones set in the non_bindable_shared_objects property. + visitor = SharedLibrariesVisitor(exclude_list) + visit_directory_tree(prefix, visitor) + + # Patch all sonames. + relative_paths = visitor.get_shared_libraries_relative_paths() + return patch_sonames(patchelf, prefix, relative_paths) + + +def post_install(spec): + # Skip if disabled + if not spack.config.get("config:shared_linking:bind", False): + return + + # Skip externals + if spec.external: + return + + # Only enable on platforms using ELF. + if not spec.satisfies("platform=linux") and not spec.satisfies("platform=cray"): + return + + # Disable this hook when bootstrapping, to avoid recursion. + if spack.bootstrap.is_bootstrapping(): + return + + # Should failing to locate patchelf be a hard error? + patchelf_path = spack.relocate._patchelf() + if not patchelf_path: + return + patchelf = Executable(patchelf_path) + + fixes = find_and_patch_sonames(spec.prefix, spec.package.non_bindable_shared_objects, patchelf) + + if not fixes: + return + + # Unfortunately this does not end up in the build logs. + tty.info( + "{}: Patched {} {}: {}".format( + spec.name, + len(fixes), + "soname" if len(fixes) == 1 else "sonames", + ", ".join(elide_list(fixes, max_num=5)), + ) + ) diff --git a/lib/spack/spack/hooks/sbang.py b/lib/spack/spack/hooks/sbang.py index 8731ef0c079..5d2de2b8655 100644 --- a/lib/spack/spack/hooks/sbang.py +++ b/lib/spack/spack/hooks/sbang.py @@ -205,7 +205,9 @@ def install_sbang(): fs.set_install_permissions(sbang_bin_dir) # set group on sbang_bin_dir if not already set (only if set in configuration) - if group_name and grp.getgrgid(os.stat(sbang_bin_dir).st_gid).gr_name != group_name: + # TODO: after we drop python2 support, use shutil.chown to avoid gid lookups that + # can fail for remote groups + if group_name and os.stat(sbang_bin_dir).st_gid != grp.getgrnam(group_name).gr_gid: os.chown(sbang_bin_dir, os.stat(sbang_bin_dir).st_uid, grp.getgrnam(group_name).gr_gid) # copy over the fresh copy of `sbang` diff --git a/lib/spack/spack/install_test.py b/lib/spack/spack/install_test.py index da2b73032e4..3c976febb1b 100644 --- a/lib/spack/spack/install_test.py +++ b/lib/spack/spack/install_test.py @@ -43,12 +43,24 @@ def get_escaped_text_output(filename): def get_test_stage_dir(): + """Retrieves the ``config:test_stage`` path to the configured test stage + root directory + + Returns: + str: absolute path to the configured test stage root or, if none, + the default test stage path + """ return spack.util.path.canonicalize_path( spack.config.get("config:test_stage", spack.paths.default_test_path) ) def get_all_test_suites(): + """Retrieves all validly staged TestSuites + + Returns: + list: a list of TestSuite objects, which may be empty if there are none + """ stage_root = get_test_stage_dir() if not os.path.isdir(stage_root): return [] @@ -68,7 +80,14 @@ def valid_stage(d): def get_named_test_suites(name): - """Return a list of the names of any test suites with that name.""" + """Retrieves test suites with the provided name. + + Returns: + list: a list of matching TestSuite instances, which may be empty if none + + Raises: + TestSuiteNameError: If no name is provided + """ if not name: raise TestSuiteNameError("Test suite name is required.") @@ -77,6 +96,14 @@ def get_named_test_suites(name): def get_test_suite(name): + """Ensure there is only one matching test suite with the provided name. + + Returns: + str or None: the name if one matching test suite, else None + + Raises: + TestSuiteNameError: If there is more than one matching TestSuite + """ names = get_named_test_suites(name) if len(names) > 1: raise TestSuiteNameError('Too many suites named "{0}". May shadow hash.'.format(name)) @@ -87,12 +114,14 @@ def get_test_suite(name): def write_test_suite_file(suite): - """Write the test suite to its lock file.""" + """Write the test suite to its (JSON) lock file.""" with open(suite.stage.join(test_suite_filename), "w") as f: sjson.dump(suite.to_dict(), stream=f) def write_test_summary(num_failed, num_skipped, num_untested, num_specs): + """Write a well formatted summary of the totals for each relevant status + category.""" failed = "{0} failed, ".format(num_failed) if num_failed else "" skipped = "{0} skipped, ".format(num_skipped) if num_skipped else "" no_tests = "{0} no-tests, ".format(num_untested) if num_untested else "" @@ -108,6 +137,8 @@ def write_test_summary(num_failed, num_skipped, num_untested, num_specs): class TestSuite(object): + """The class that manages specs for ``spack test run`` execution.""" + def __init__(self, specs, alias=None): # copy so that different test suites have different package objects # even if they contain the same spec @@ -122,10 +153,12 @@ def __init__(self, specs, alias=None): @property def name(self): + """The name (alias or, if none, hash) of the test suite.""" return self.alias if self.alias else self.content_hash @property def content_hash(self): + """The hash used to uniquely identify the test suite.""" if not self._hash: json_text = sjson.dump(self.to_dict()) sha = hashlib.sha1(json_text.encode("utf-8")) @@ -212,48 +245,100 @@ def __call__(self, *args, **kwargs): raise TestSuiteFailure(self.fails) def ensure_stage(self): + """Ensure the test suite stage directory exists.""" if not os.path.exists(self.stage): fs.mkdirp(self.stage) @property def stage(self): + """The root test suite stage directory.""" return spack.util.prefix.Prefix(os.path.join(get_test_stage_dir(), self.content_hash)) @property def results_file(self): + """The path to the results summary file.""" return self.stage.join(results_filename) @classmethod def test_pkg_id(cls, spec): - """Build the standard install test package identifier + """The standard install test package identifier. Args: - spec (Spec): instance of the spec under test + spec (spack.spec.Spec): instance of the spec under test Returns: - (str): the install test package identifier + str: the install test package identifier """ return spec.format("{name}-{version}-{hash:7}") @classmethod def test_log_name(cls, spec): + """The standard log filename for a spec. + + Args: + spec (spack.spec.Spec): instance of the spec under test + + Returns: + str: the spec's log filename + """ return "%s-test-out.txt" % cls.test_pkg_id(spec) def log_file_for_spec(self, spec): + """The test log file path for the provided spec. + + Args: + spec (spack.spec.Spec): instance of the spec under test + + Returns: + str: the path to the spec's log file + """ return self.stage.join(self.test_log_name(spec)) def test_dir_for_spec(self, spec): + """The path to the test stage directory for the provided spec. + + Args: + spec (spack.spec.Spec): instance of the spec under test + + Returns: + str: the spec's test stage directory path + """ return self.stage.join(self.test_pkg_id(spec)) @classmethod def tested_file_name(cls, spec): + """The standard test status filename for the spec. + + Args: + spec (spack.spec.Spec): instance of the spec under test + + Returns: + str: the spec's test status filename + """ return "%s-tested.txt" % cls.test_pkg_id(spec) def tested_file_for_spec(self, spec): + """The test status file path for the spec. + + Args: + spec (spack.spec.Spec): instance of the spec under test + + Returns: + str: the spec's test status file path + """ return self.stage.join(self.tested_file_name(spec)) @property def current_test_cache_dir(self): + """Path to the test stage directory where the current spec's cached + build-time files were automatically copied. + + Returns: + str: path to the current spec's staged, cached build-time files. + + Raises: + TestSuiteSpecError: If there is no spec being tested + """ if not (self.current_test_spec and self.current_base_spec): raise TestSuiteSpecError("Unknown test cache directory: no specs being tested") @@ -263,6 +348,15 @@ def current_test_cache_dir(self): @property def current_test_data_dir(self): + """Path to the test stage directory where the current spec's custom + package (data) files were automatically copied. + + Returns: + str: path to the current spec's staged, custom package (data) files + + Raises: + TestSuiteSpecError: If there is no spec being tested + """ if not (self.current_test_spec and self.current_base_spec): raise TestSuiteSpecError("Unknown test data directory: no specs being tested") @@ -270,13 +364,13 @@ def current_test_data_dir(self): base_spec = self.current_base_spec return self.test_dir_for_spec(base_spec).data.join(test_spec.name) - def add_failure(self, exc, msg): - current_hash = self.current_base_spec.dag_hash() - current_failures = self.failures.get(current_hash, []) - current_failures.append((exc, msg)) - self.failures[current_hash] = current_failures - def write_test_result(self, spec, result): + """Write the spec's test result to the test suite results file. + + Args: + spec (spack.spec.Spec): instance of the spec under test + result (str): result from the spec's test execution (e.g, PASSED) + """ msg = "{0} {1}".format(self.test_pkg_id(spec), result) _add_msg_to_file(self.results_file, msg) @@ -295,6 +389,14 @@ def write_reproducibility_data(self): write_test_suite_file(self) def to_dict(self): + """Build a dictionary for the test suite. + + Returns: + dict: The dictionary contains entries for up to two keys: + + specs: list of the test suite's specs in dictionary form + alias: the alias, or name, given to the test suite if provided + """ specs = [s.to_dict() for s in self.specs] d = {"specs": specs} if self.alias: @@ -303,12 +405,29 @@ def to_dict(self): @staticmethod def from_dict(d): + """Instantiates a TestSuite based on a dictionary specs and an + optional alias: + + specs: list of the test suite's specs in dictionary form + alias: the test suite alias + + + Returns: + TestSuite: Instance of TestSuite created from the specs + """ specs = [Spec.from_dict(spec_dict) for spec_dict in d["specs"]] alias = d.get("alias", None) return TestSuite(specs, alias) @staticmethod def from_file(filename): + """Instantiate a TestSuite using the specs and optional alias + provided in the given file. + + Args: + filename (str): The path to the JSON file containing the test + suite specs and optional alias. + """ try: with open(filename, "r") as f: data = sjson.load(f) @@ -324,7 +443,7 @@ def from_file(filename): def _add_msg_to_file(filename, msg): - """Add the message to the specified file + """Append the message to the specified file. Args: filename (str): path to the file diff --git a/lib/spack/spack/installer.py b/lib/spack/spack/installer.py index 4150302b28e..252c799304e 100644 --- a/lib/spack/spack/installer.py +++ b/lib/spack/spack/installer.py @@ -42,6 +42,7 @@ import llnl.util.filesystem as fs import llnl.util.lock as lk import llnl.util.tty as tty +from llnl.util.lang import pretty_seconds from llnl.util.tty.color import colorize from llnl.util.tty.log import log_output @@ -111,14 +112,15 @@ def _check_last_phase(pkg): Raises: ``BadInstallPhase`` if stop_before or last phase is invalid """ - if pkg.stop_before_phase and pkg.stop_before_phase not in pkg.phases: + phases = pkg.builder.phases + if pkg.stop_before_phase and pkg.stop_before_phase not in phases: raise BadInstallPhase(pkg.name, pkg.stop_before_phase) - if pkg.last_phase and pkg.last_phase not in pkg.phases: + if pkg.last_phase and pkg.last_phase not in phases: raise BadInstallPhase(pkg.name, pkg.last_phase) # If we got a last_phase, make sure it's not already last - if pkg.last_phase and pkg.last_phase == pkg.phases[-1]: + if pkg.last_phase and pkg.last_phase == phases[-1]: pkg.last_phase = None @@ -128,7 +130,7 @@ def _handle_external_and_upstream(pkg, explicit): database if it is external package. Args: - pkg (spack.package_base.Package): the package whose installation is under + pkg (spack.package_base.PackageBase): the package whose installation is under consideration explicit (bool): the package was explicitly requested by the user Return: @@ -262,6 +264,30 @@ def _hms(seconds): return " ".join(parts) +def _log_prefix(pkg_name): + """Prefix of the form "[pid]: [pkg name]: ..." when printing a status update during + the build.""" + pid = "{0}: ".format(os.getpid()) if tty.show_pid() else "" + return "{0}{1}:".format(pid, pkg_name) + + +def _print_installed_pkg(message): + """ + Output a message with a package icon. + + Args: + message (str): message to be output + """ + print(colorize("@*g{[+]} ") + spack.util.path.debug_padded_filter(message)) + + +def _print_timer(pre, pkg_id, fetch, build, total): + tty.msg( + "{0} Successfully installed {1}".format(pre, pkg_id), + "Fetch: {0}. Build: {1}. Total: {2}.".format(_hms(fetch), _hms(build), _hms(total)), + ) + + def _install_from_cache(pkg, cache_only, explicit, unsigned=False): """ Extract the package from binary cache @@ -278,7 +304,10 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False): bool: ``True`` if the package was extract from binary cache, ``False`` otherwise """ - installed_from_cache = _try_install_from_binary_cache(pkg, explicit, unsigned=unsigned) + timer = Timer() + installed_from_cache = _try_install_from_binary_cache( + pkg, explicit, unsigned=unsigned, timer=timer + ) pkg_id = package_id(pkg) if not installed_from_cache: pre = "No binary for {0} found".format(pkg_id) @@ -287,23 +316,20 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False): tty.msg("{0}: installing from source".format(pre)) return False - + timer.stop() tty.debug("Successfully extracted {0} from binary cache".format(pkg_id)) + _print_timer( + pre=_log_prefix(pkg.name), + pkg_id=pkg_id, + fetch=timer.phases.get("search", 0) + timer.phases.get("fetch", 0), + build=timer.phases.get("install", 0), + total=timer.total, + ) _print_installed_pkg(pkg.spec.prefix) spack.hooks.post_install(pkg.spec) return True -def _print_installed_pkg(message): - """ - Output a message with a package icon. - - Args: - message (str): message to be output - """ - print(colorize("@*g{[+]} ") + spack.util.path.debug_padded_filter(message)) - - def _process_external_package(pkg, explicit): """ Helper function to run post install hooks and register external packages. @@ -345,7 +371,9 @@ def _process_external_package(pkg, explicit): spack.store.db.add(spec, None, explicit=explicit) -def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned, mirrors_for_spec=None): +def _process_binary_cache_tarball( + pkg, binary_spec, explicit, unsigned, mirrors_for_spec=None, timer=None +): """ Process the binary cache tarball. @@ -357,6 +385,7 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned, mirrors_ otherwise, ``False`` mirrors_for_spec (list): Optional list of concrete specs and mirrors obtained by calling binary_distribution.get_mirrors_for_spec(). + timer (Timer): timer to keep track of binary install phases. Return: bool: ``True`` if the package was extracted from binary cache, @@ -365,6 +394,8 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned, mirrors_ download_result = binary_distribution.download_tarball( binary_spec, unsigned, mirrors_for_spec=mirrors_for_spec ) + if timer: + timer.phase("fetch") # see #10063 : install from source if tarball doesn't exist if download_result is None: tty.msg("{0} exists in binary cache but with different hash".format(pkg.name)) @@ -381,10 +412,12 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned, mirrors_ pkg.installed_from_binary_cache = True spack.store.db.add(pkg.spec, spack.store.layout, explicit=explicit) + if timer: + timer.phase("install") return True -def _try_install_from_binary_cache(pkg, explicit, unsigned=False): +def _try_install_from_binary_cache(pkg, explicit, unsigned=False, timer=None): """ Try to extract the package from binary cache. @@ -393,16 +426,20 @@ def _try_install_from_binary_cache(pkg, explicit, unsigned=False): explicit (bool): the package was explicitly requested by the user unsigned (bool): ``True`` if binary package signatures to be checked, otherwise, ``False`` + timer (Timer): """ pkg_id = package_id(pkg) tty.debug("Searching for binary cache of {0}".format(pkg_id)) matches = binary_distribution.get_mirrors_for_spec(pkg.spec) + if timer: + timer.phase("search") + if not matches: return False return _process_binary_cache_tarball( - pkg, pkg.spec, explicit, unsigned, mirrors_for_spec=matches + pkg, pkg.spec, explicit, unsigned, mirrors_for_spec=matches, timer=timer ) @@ -523,7 +560,7 @@ def log(pkg): Copy provenance into the install directory on success Args: - pkg (spack.package_base.Package): the package that was built and installed + pkg (spack.package_base.PackageBase): the package that was built and installed """ packages_dir = spack.store.layout.build_packages_path(pkg.spec) @@ -560,7 +597,7 @@ def log(pkg): errors = six.StringIO() target_dir = os.path.join(spack.store.layout.metadata_path(pkg.spec), "archived-files") - for glob_expr in pkg.archive_files: + for glob_expr in pkg.builder.archive_files: # Check that we are trying to copy things that are # in the stage tree (not arbitrary files) abs_expr = os.path.realpath(glob_expr) @@ -766,15 +803,41 @@ def _add_bootstrap_compilers(self, compiler, architecture, pkgs, request, all_de """ packages = _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs) for (comp_pkg, is_compiler) in packages: - if package_id(comp_pkg) not in self.build_tasks: + pkgid = package_id(comp_pkg) + if pkgid not in self.build_tasks: self._add_init_task(comp_pkg, request, is_compiler, all_deps) + elif is_compiler: + # ensure it's queued as a compiler + self._modify_existing_task(pkgid, "compiler", True) + + def _modify_existing_task(self, pkgid, attr, value): + """ + Update a task in-place to modify its behavior. + + Currently used to update the ``compiler`` field on tasks + that were originally created as a dependency of a compiler, + but are compilers in their own right. + + For example, ``intel-oneapi-compilers-classic`` depends on + ``intel-oneapi-compilers``, which can cause the latter to be + queued first as a non-compiler, and only later as a compiler. + """ + for i, tup in enumerate(self.build_pq): + key, task = tup + if task.pkg_id == pkgid: + tty.debug( + "Modifying task for {0} to treat it as a compiler".format(pkgid), + level=2, + ) + setattr(task, attr, value) + self.build_pq[i] = (key, task) def _add_init_task(self, pkg, request, is_compiler, all_deps): """ Creates and queus the initial build task for the package. Args: - pkg (spack.package_base.Package): the package to be built and installed + pkg (spack.package_base.PackageBase): the package to be built and installed request (BuildRequest or None): the associated install request where ``None`` can be used to indicate the package was explicitly requested by the user @@ -1034,7 +1097,7 @@ def _ensure_locked(self, lock_type, pkg): try: if lock is None: - tty.debug(msg.format("Acquiring", desc, pkg_id, timeout)) + tty.debug(msg.format("Acquiring", desc, pkg_id, pretty_seconds(timeout or 0))) op = "acquire" lock = spack.store.db.prefix_lock(pkg.spec, timeout) if timeout != lock.default_timeout: @@ -1053,14 +1116,18 @@ def _ensure_locked(self, lock_type, pkg): # must be downgraded to be a read lock # Retain the original lock timeout, which is in the lock's # default_timeout setting. - tty.debug(msg.format("Downgrading to", desc, pkg_id, lock.default_timeout)) + tty.debug( + msg.format( + "Downgrading to", desc, pkg_id, pretty_seconds(lock.default_timeout or 0) + ) + ) op = "downgrade to" lock.downgrade_write_to_read() else: # read -> write # Only get here if the current lock is a read lock, which # must be upgraded to be a write lock - tty.debug(msg.format("Upgrading to", desc, pkg_id, timeout)) + tty.debug(msg.format("Upgrading to", desc, pkg_id, pretty_seconds(timeout or 0))) op = "upgrade to" lock.upgrade_read_to_write(timeout) tty.debug("{0} is now {1} locked".format(pkg_id, lock_type)) @@ -1174,6 +1241,12 @@ def _add_tasks(self, request, all_deps): fail_fast = request.install_args.get("fail_fast") self.fail_fast = self.fail_fast or fail_fast + def _add_compiler_package_to_config(self, pkg): + compiler_search_prefix = getattr(pkg, "compiler_search_prefix", pkg.spec.prefix) + spack.compilers.add_compilers_to_config( + spack.compilers.find_compilers([compiler_search_prefix]) + ) + def _install_task(self, task): """ Perform the installation of the requested spec and/or dependency @@ -1199,9 +1272,7 @@ def _install_task(self, task): if use_cache and _install_from_cache(pkg, cache_only, explicit, unsigned): self._update_installed(task) if task.compiler: - spack.compilers.add_compilers_to_config( - spack.compilers.find_compilers([pkg.spec.prefix]) - ) + self._add_compiler_package_to_config(pkg) return pkg.run_tests = tests is True or tests and pkg.name in tests @@ -1229,9 +1300,7 @@ def _install_task(self, task): # If a compiler, ensure it is added to the configuration if task.compiler: - spack.compilers.add_compilers_to_config( - spack.compilers.find_compilers([pkg.spec.prefix]) - ) + self._add_compiler_package_to_config(pkg) except spack.build_environment.StopPhase as e: # A StopPhase exception means that do_install was asked to # stop early from clients, and is not an error at this point @@ -1364,7 +1433,7 @@ def _setup_install_dir(self, pkg): Write a small metadata file with the current spack environment. Args: - pkg (spack.package_base.Package): the package to be built and installed + pkg (spack.package_base.PackageBase): the package to be built and installed """ if not os.path.exists(pkg.spec.prefix): path = spack.util.path.debug_padded_filter(pkg.spec.prefix) @@ -1437,8 +1506,8 @@ def _flag_installed(self, pkg, dependent_ids=None): known dependents. Args: - pkg (spack.package_base.Package): Package that has been installed locally, - externally or upstream + pkg (spack.package_base.PackageBase): Package that has been installed + locally, externally or upstream dependent_ids (list or None): list of the package's dependent ids, or None if the dependent ids are limited to those maintained in the package (dependency DAG) @@ -1522,11 +1591,7 @@ def _install_action(self, task): return InstallAction.OVERWRITE def install(self): - """ - Install the requested package(s) and or associated dependencies. - - Args: - pkg (spack.package_base.Package): the package to be built and installed""" + """Install the requested package(s) and or associated dependencies.""" self._init_queue() fail_fast_err = "Terminating after first install failure" @@ -1654,9 +1719,7 @@ def install(self): # It's an already installed compiler, add it to the config if task.compiler: - spack.compilers.add_compilers_to_config( - spack.compilers.find_compilers([pkg.spec.prefix]) - ) + self._add_compiler_package_to_config(pkg) else: # At this point we've failed to get a write or a read @@ -1710,6 +1773,16 @@ def install(self): spack.hooks.on_install_cancel(task.request.pkg.spec) raise + except binary_distribution.NoChecksumException as exc: + if not task.cache_only: + # Checking hash on downloaded binary failed. + err = "Failed to install {0} from binary cache due to {1}:" + err += " Requeueing to install from source." + tty.error(err.format(pkg.name, str(exc))) + task.use_cache = False + self._requeue_task(task) + continue + except (Exception, SystemExit) as exc: self._update_failed(task, True, exc) spack.hooks.on_install_failure(task.request.pkg.spec) @@ -1841,8 +1914,7 @@ def __init__(self, pkg, install_args): self.filter_fn = spack.util.path.padding_filter if padding else None # info/debug information - pid = "{0}: ".format(os.getpid()) if tty.show_pid() else "" - self.pre = "{0}{1}:".format(pid, pkg.name) + self.pre = _log_prefix(pkg.name) self.pkg_id = package_id(pkg) def run(self): @@ -1885,12 +1957,12 @@ def run(self): # Run post install hooks before build stage is removed. spack.hooks.post_install(self.pkg.spec) - build_time = self.timer.total - self.pkg._fetch_time - tty.msg( - "{0} Successfully installed {1}".format(self.pre, self.pkg_id), - "Fetch: {0}. Build: {1}. Total: {2}.".format( - _hms(self.pkg._fetch_time), _hms(build_time), _hms(self.timer.total) - ), + _print_timer( + pre=self.pre, + pkg_id=self.pkg_id, + fetch=self.pkg._fetch_time, + build=self.timer.total - self.pkg._fetch_time, + total=self.timer.total, ) _print_installed_pkg(self.pkg.prefix) @@ -1912,6 +1984,8 @@ def _install_source(self): fs.install_tree(pkg.stage.source_path, src_target) def _real_install(self): + import spack.builder + pkg = self.pkg # Do the real install in the source directory. @@ -1942,13 +2016,11 @@ def _real_install(self): # Spawn a daemon that reads from a pipe and redirects # everything to log_path, and provide the phase for logging - for i, (phase_name, phase_attr) in enumerate( - zip(pkg.phases, pkg._InstallPhase_phases) - ): - + builder = spack.builder.create(pkg) + for i, phase_fn in enumerate(builder): # Keep a log file for each phase log_dir = os.path.dirname(pkg.log_path) - log_file = "spack-build-%02d-%s-out.txt" % (i + 1, phase_name.lower()) + log_file = "spack-build-%02d-%s-out.txt" % (i + 1, phase_fn.name.lower()) log_file = os.path.join(log_dir, log_file) try: @@ -1966,20 +2038,20 @@ def _real_install(self): with logger.force_echo(): inner_debug_level = tty.debug_level() tty.set_debug(debug_level) - tty.msg("{0} Executing phase: '{1}'".format(self.pre, phase_name)) + msg = "{0} Executing phase: '{1}'" + tty.msg(msg.format(self.pre, phase_fn.name)) tty.set_debug(inner_debug_level) # Redirect stdout and stderr to daemon pipe - phase = getattr(pkg, phase_attr) - self.timer.phase(phase_name) + self.timer.phase(phase_fn.name) # Catch any errors to report to logging - phase(pkg.spec, pkg.prefix) - spack.hooks.on_phase_success(pkg, phase_name, log_file) + phase_fn.execute() + spack.hooks.on_phase_success(pkg, phase_fn.name, log_file) except BaseException: combine_phase_logs(pkg.phase_log_files, pkg.log_path) - spack.hooks.on_phase_error(pkg, phase_name, log_file) + spack.hooks.on_phase_error(pkg, phase_fn.name, log_file) # phase error indicates install error spack.hooks.on_install_failure(pkg.spec) @@ -2055,7 +2127,7 @@ def __init__(self, pkg, request, compiler, start, attempts, status, installed): Instantiate a build task for a package. Args: - pkg (spack.package_base.Package): the package to be built and installed + pkg (spack.package_base.PackageBase): the package to be built and installed request (BuildRequest or None): the associated install request where ``None`` can be used to indicate the package was explicitly requested by the user @@ -2271,7 +2343,7 @@ def __init__(self, pkg, install_args): Instantiate a build request for a package. Args: - pkg (spack.package_base.Package): the package to be built and installed + pkg (spack.package_base.PackageBase): the package to be built and installed install_args (dict): the install arguments associated with ``pkg`` """ # Ensure dealing with a package that has a concrete spec @@ -2357,7 +2429,13 @@ def get_deptypes(self, pkg): """ deptypes = ["link", "run"] include_build_deps = self.install_args.get("include_build_deps") - if not self.install_args.get("cache_only") or include_build_deps: + + if self.pkg_id == package_id(pkg): + cache_only = self.install_args.get("package_cache_only") + else: + cache_only = self.install_args.get("dependencies_cache_only") + + if not cache_only or include_build_deps: deptypes.append("build") if self.run_tests(pkg): deptypes.append("test") @@ -2386,21 +2464,31 @@ def spec(self): """The specification associated with the package.""" return self.pkg.spec - def traverse_dependencies(self): + def traverse_dependencies(self, spec=None, visited=None): """ Yield any dependencies of the appropriate type(s) Yields: (Spec) The next child spec in the DAG """ - get_spec = lambda s: s.spec + # notice: deptype is not constant across nodes, so we cannot use + # spec.traverse_edges(deptype=...). - deptypes = self.get_deptypes(self.pkg) - tty.debug("Processing dependencies for {0}: {1}".format(self.pkg_id, deptypes)) - for dspec in self.spec.traverse_edges( - deptype=deptypes, order="post", root=False, direction="children" - ): - yield get_spec(dspec) + if spec is None: + spec = self.spec + if visited is None: + visited = set() + deptype = self.get_deptypes(spec.package) + + for dep in spec.dependencies(deptype=deptype): + hash = dep.dag_hash() + if hash in visited: + continue + visited.add(hash) + # In Python 3: yield from self.traverse_dependencies(dep, visited) + for s in self.traverse_dependencies(dep, visited): + yield s + yield dep class InstallError(spack.error.SpackError): diff --git a/lib/spack/spack/main.py b/lib/spack/spack/main.py index 7e84be57810..a9a9d20df70 100644 --- a/lib/spack/spack/main.py +++ b/lib/spack/spack/main.py @@ -107,6 +107,9 @@ spack_working_dir = None spack_ld_library_path = os.environ.get("LD_LIBRARY_PATH", "") +#: Whether to print backtraces on error +SHOW_BACKTRACE = False + def set_working_dir(): """Change the working directory to getcwd, or spack prefix if no cwd.""" @@ -340,17 +343,21 @@ def add_command(self, cmd_name): self._remove_action(self._actions[-1]) self.subparsers = self.add_subparsers(metavar="COMMAND", dest="command") - # each command module implements a parser() function, to which we - # pass its subparser for setup. - module = spack.cmd.get_module(cmd_name) + if cmd_name not in self.subparsers._name_parser_map: + # each command module implements a parser() function, to which we + # pass its subparser for setup. + module = spack.cmd.get_module(cmd_name) - # build a list of aliases - alias_list = [k for k, v in aliases.items() if v == cmd_name] + # build a list of aliases + alias_list = [k for k, v in aliases.items() if v == cmd_name] - subparser = self.subparsers.add_parser( - cmd_name, aliases=alias_list, help=module.description, description=module.description - ) - module.setup_parser(subparser) + subparser = self.subparsers.add_parser( + cmd_name, + aliases=alias_list, + help=module.description, + description=module.description, + ) + module.setup_parser(subparser) # return the callable function for the command return spack.cmd.get_command(cmd_name) @@ -527,6 +534,12 @@ def make_argument_parser(**kwargs): default="SPACK_STACKTRACE" in os.environ, help="add stacktraces to all printed statements", ) + parser.add_argument( + "--backtrace", + action="store_true", + default="SPACK_BACKTRACE" in os.environ, + help="always show backtraces for exceptions", + ) parser.add_argument( "-V", "--version", action="store_true", help="show version number and exit" ) @@ -561,8 +574,12 @@ def setup_main_options(args): # debug must be set first so that it can even affect behavior of # errors raised by spack.config. + if args.debug or args.backtrace: + spack.error.debug = True + global SHOW_BACKTRACE + SHOW_BACKTRACE = True + if args.debug: - spack.error.debug = args.debug spack.util.debug.register_interrupt_handler() spack.config.set("config:debug", True, scope="command_line") spack.util.environment.tracing_enabled = True @@ -994,7 +1011,7 @@ def main(argv=None): e.die() # gracefully die on any SpackErrors except KeyboardInterrupt: - if spack.config.get("config:debug"): + if spack.config.get("config:debug") or SHOW_BACKTRACE: raise sys.stderr.write("\n") tty.error("Keyboard interrupt.") @@ -1004,12 +1021,12 @@ def main(argv=None): return signal.SIGINT except SystemExit as e: - if spack.config.get("config:debug"): + if spack.config.get("config:debug") or SHOW_BACKTRACE: traceback.print_exc() return e.code except Exception as e: - if spack.config.get("config:debug"): + if spack.config.get("config:debug") or SHOW_BACKTRACE: raise tty.error(e) return 3 diff --git a/lib/spack/spack/mixins.py b/lib/spack/spack/mixins.py index ace3681e527..b43b85aa026 100644 --- a/lib/spack/spack/mixins.py +++ b/lib/spack/spack/mixins.py @@ -6,10 +6,9 @@ """This module contains additional behavior that can be attached to any given package. """ -import collections import os import sys -from typing import Callable, DefaultDict, Dict, List # novm +from typing import Callable, DefaultDict, List # novm if sys.version_info >= (3, 5): CallbackDict = DefaultDict[str, List[Callable]] @@ -18,105 +17,7 @@ import llnl.util.filesystem -__all__ = [ - "filter_compiler_wrappers", - "PackageMixinsMeta", -] - - -class PackageMixinsMeta(type): - """This metaclass serves the purpose of implementing a declarative syntax - for package mixins. - - Mixins are implemented below in the form of a function. Each one of them - needs to register a callable that takes a single argument to be run - before or after a certain phase. This callable is basically a method that - gets implicitly attached to the package class by calling the mixin. - """ - - _methods_to_be_added = {} # type: Dict[str, Callable] - _add_method_before = collections.defaultdict(list) # type: CallbackDict - _add_method_after = collections.defaultdict(list) # type: CallbackDict - - @staticmethod - def register_method_before(fn, phase): # type: (Callable, str) -> None - """Registers a method to be run before a certain phase. - - Args: - fn: function taking a single argument (self) - phase (str): phase before which fn must run - """ - PackageMixinsMeta._methods_to_be_added[fn.__name__] = fn - PackageMixinsMeta._add_method_before[phase].append(fn) - - @staticmethod - def register_method_after(fn, phase): # type: (Callable, str) -> None - """Registers a method to be run after a certain phase. - - Args: - fn: function taking a single argument (self) - phase (str): phase after which fn must run - """ - PackageMixinsMeta._methods_to_be_added[fn.__name__] = fn - PackageMixinsMeta._add_method_after[phase].append(fn) - - def __init__(cls, name, bases, attr_dict): - - # Add the methods to the class being created - if PackageMixinsMeta._methods_to_be_added: - attr_dict.update(PackageMixinsMeta._methods_to_be_added) - PackageMixinsMeta._methods_to_be_added.clear() - - attr_fmt = "_InstallPhase_{0}" - - # Copy the phases that needs it to the most derived classes - # in order not to interfere with other packages in the hierarchy - phases_to_be_copied = list(PackageMixinsMeta._add_method_before.keys()) - phases_to_be_copied += list(PackageMixinsMeta._add_method_after.keys()) - - for phase in phases_to_be_copied: - - attr_name = attr_fmt.format(phase) - - # Here we want to get the attribute directly from the class (not - # from the instance), so that we can modify it and add the mixin - # method to the pipeline. - phase = getattr(cls, attr_name) - - # Due to MRO, we may have taken a method from a parent class - # and modifying it may influence other packages in unwanted - # manners. Solve the problem by copying the phase into the most - # derived class. - setattr(cls, attr_name, phase.copy()) - - # Insert the methods in the appropriate position - # in the installation pipeline. - - for phase in PackageMixinsMeta._add_method_before: - - attr_name = attr_fmt.format(phase) - phase_obj = getattr(cls, attr_name) - fn_list = PackageMixinsMeta._add_method_after[phase] - - for f in fn_list: - phase_obj.run_before.append(f) - - # Flush the dictionary for the next class - PackageMixinsMeta._add_method_before.clear() - - for phase in PackageMixinsMeta._add_method_after: - - attr_name = attr_fmt.format(phase) - phase_obj = getattr(cls, attr_name) - fn_list = PackageMixinsMeta._add_method_after[phase] - - for f in fn_list: - phase_obj.run_after.append(f) - - # Flush the dictionary for the next class - PackageMixinsMeta._add_method_after.clear() - - super(PackageMixinsMeta, cls).__init__(name, bases, attr_dict) +import spack.builder def filter_compiler_wrappers(*files, **kwargs): @@ -216,4 +117,4 @@ def _filter_compiler_wrappers_impl(self): if self.compiler.name == "nag": x.filter("-Wl,--enable-new-dtags", "", **filter_kwargs) - PackageMixinsMeta.register_method_after(_filter_compiler_wrappers_impl, after) + spack.builder.run_after(after)(_filter_compiler_wrappers_impl) diff --git a/lib/spack/spack/modules/lmod.py b/lib/spack/spack/modules/lmod.py index bb288336eaa..3378e533545 100644 --- a/lib/spack/spack/modules/lmod.py +++ b/lib/spack/spack/modules/lmod.py @@ -184,22 +184,10 @@ def provides(self): # If it is in the list of supported compilers family -> compiler if self.spec.name in spack.compilers.supported_compilers(): provides["compiler"] = spack.spec.CompilerSpec(str(self.spec)) - # Special case for llvm - if self.spec.name == "llvm": - provides["compiler"] = spack.spec.CompilerSpec(str(self.spec)) - provides["compiler"].name = "clang" - # Special case for llvm-amdgpu - if self.spec.name == "llvm-amdgpu": - provides["compiler"] = spack.spec.CompilerSpec(str(self.spec)) - provides["compiler"].name = "rocmcc" - # Special case for oneapi - if self.spec.name == "intel-oneapi-compilers": - provides["compiler"] = spack.spec.CompilerSpec(str(self.spec)) - provides["compiler"].name = "oneapi" - # Special case for oneapi classic - if self.spec.name == "intel-oneapi-compilers-classic": - provides["compiler"] = spack.spec.CompilerSpec(str(self.spec)) - provides["compiler"].name = "intel" + elif self.spec.name in spack.compilers.package_name_to_compiler_name: + # If it is the package for a supported compiler, but of a different name + cname = spack.compilers.package_name_to_compiler_name[self.spec.name] + provides["compiler"] = spack.spec.CompilerSpec("%s@%s" % (cname, self.spec.version)) # All the other tokens in the hierarchy must be virtual dependencies for x in self.hierarchy_tokens: diff --git a/lib/spack/spack/multimethod.py b/lib/spack/spack/multimethod.py index 2c6a56db1a5..66f7a887f33 100644 --- a/lib/spack/spack/multimethod.py +++ b/lib/spack/spack/multimethod.py @@ -120,32 +120,36 @@ def _get_method_by_spec(self, spec): return method return self.default or None - def __call__(self, package_self, *args, **kwargs): + def __call__(self, package_or_builder_self, *args, **kwargs): """Find the first method with a spec that matches the package's spec. If none is found, call the default or if there is none, then raise a NoSuchMethodError. """ - spec_method = self._get_method_by_spec(package_self.spec) + spec_method = self._get_method_by_spec(package_or_builder_self.spec) if spec_method: - return spec_method(package_self, *args, **kwargs) + return spec_method(package_or_builder_self, *args, **kwargs) # Unwrap the MRO of `package_self by hand. Note that we can't # use `super()` here, because using `super()` recursively # requires us to know the class of `package_self`, as well as # its superclasses for successive calls. We don't have that # information within `SpecMultiMethod`, because it is not # associated with the package class. - for cls in inspect.getmro(package_self.__class__)[1:]: + for cls in inspect.getmro(package_or_builder_self.__class__)[1:]: superself = cls.__dict__.get(self.__name__, None) + if isinstance(superself, SpecMultiMethod): # Check parent multimethod for method for spec. - superself_method = superself._get_method_by_spec(package_self.spec) + superself_method = superself._get_method_by_spec(package_or_builder_self.spec) if superself_method: - return superself_method(package_self, *args, **kwargs) + return superself_method(package_or_builder_self, *args, **kwargs) elif superself: - return superself(package_self, *args, **kwargs) + return superself(package_or_builder_self, *args, **kwargs) raise NoSuchMethodError( - type(package_self), self.__name__, package_self.spec, [m[0] for m in self.method_list] + type(package_or_builder_self), + self.__name__, + package_or_builder_self.spec, + [m[0] for m in self.method_list], ) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 65e699bda4b..46c9da48440 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -17,6 +17,7 @@ import spack.util.executable from spack.build_systems.aspell_dict import AspellDictPackage from spack.build_systems.autotools import AutotoolsPackage +from spack.build_systems.bundle import BundlePackage from spack.build_systems.cached_cmake import ( CachedCMakePackage, cmake_cache_option, @@ -25,12 +26,14 @@ ) from spack.build_systems.cmake import CMakePackage from spack.build_systems.cuda import CudaPackage +from spack.build_systems.generic import Package from spack.build_systems.gnu import GNUMirrorPackage from spack.build_systems.intel import IntelPackage from spack.build_systems.lua import LuaPackage from spack.build_systems.makefile import MakefilePackage from spack.build_systems.maven import MavenPackage from spack.build_systems.meson import MesonPackage +from spack.build_systems.nmake import NMakePackage from spack.build_systems.octave import OctavePackage from spack.build_systems.oneapi import ( IntelOneApiLibraryPackage, @@ -38,7 +41,7 @@ IntelOneApiStaticLibraryList, ) from spack.build_systems.perl import PerlPackage -from spack.build_systems.python import PythonPackage +from spack.build_systems.python import PythonExtension, PythonPackage from spack.build_systems.qmake import QMakePackage from spack.build_systems.r import RPackage from spack.build_systems.racket import RacketPackage @@ -50,6 +53,7 @@ from spack.build_systems.sourceware import SourcewarePackage from spack.build_systems.waf import WafPackage from spack.build_systems.xorg import XorgPackage +from spack.builder import run_after, run_before from spack.dependency import all_deptypes from spack.directives import * from spack.install_test import get_escaped_text_output @@ -62,17 +66,13 @@ from spack.mixins import filter_compiler_wrappers from spack.multimethod import when from spack.package_base import ( - BundlePackage, DependencyConflictError, - Package, build_system_flags, env_flags, flatten_dependencies, inject_flags, install_dependency_symlinks, on_package_attributes, - run_after, - run_before, ) from spack.spec import InvalidSpecDetected, Spec from spack.util.executable import * diff --git a/lib/spack/spack/package_base.py b/lib/spack/spack/package_base.py index f4573e6f195..48b036e8cfa 100644 --- a/lib/spack/spack/package_base.py +++ b/lib/spack/spack/package_base.py @@ -33,7 +33,7 @@ import llnl.util.filesystem as fsys import llnl.util.tty as tty -from llnl.util.lang import classproperty, match_predicate, memoized, nullcontext +from llnl.util.lang import classproperty, memoized, nullcontext from llnl.util.link_tree import LinkTree import spack.compilers @@ -100,12 +100,29 @@ is_windows = sys.platform == "win32" +def deprecated_version(pkg, version): + """Return True if the version is deprecated, False otherwise. + + Arguments: + pkg (PackageBase): The package whose version is to be checked. + version (str or spack.version.VersionBase): The version being checked + """ + if not isinstance(version, VersionBase): + version = Version(version) + + for k, v in pkg.versions.items(): + if version == k and v.get("deprecated", False): + return True + + return False + + def preferred_version(pkg): """ Returns a sorted list of the preferred versions of the package. Arguments: - pkg (Package): The package whose versions are to be assessed. + pkg (PackageBase): The package whose versions are to be assessed. """ # Here we sort first on the fact that a version is marked # as preferred in the package, then on the fact that the @@ -114,77 +131,6 @@ def preferred_version(pkg): return sorted(pkg.versions, key=key_fn).pop() -class InstallPhase(object): - """Manages a single phase of the installation. - - This descriptor stores at creation time the name of the method it should - search for execution. The method is retrieved at __get__ time, so that - it can be overridden by subclasses of whatever class declared the phases. - - It also provides hooks to execute arbitrary callbacks before and after - the phase. - """ - - def __init__(self, name): - self.name = name - self.run_before = [] - self.run_after = [] - - def __get__(self, instance, owner): - # The caller is a class that is trying to customize - # my behavior adding something - if instance is None: - return self - # If instance is there the caller wants to execute the - # install phase, thus return a properly set wrapper - phase = getattr(instance, self.name) - - @functools.wraps(phase) - def phase_wrapper(spec, prefix): - # Check instance attributes at the beginning of a phase - self._on_phase_start(instance) - # Execute phase pre-conditions, - # and give them the chance to fail - for callback in self.run_before: - callback(instance) - phase(spec, prefix) - # Execute phase sanity_checks, - # and give them the chance to fail - for callback in self.run_after: - callback(instance) - # Check instance attributes at the end of a phase - self._on_phase_exit(instance) - - return phase_wrapper - - def _on_phase_start(self, instance): - # If a phase has a matching stop_before_phase attribute, - # stop the installation process raising a StopPhase - if getattr(instance, "stop_before_phase", None) == self.name: - from spack.build_environment import StopPhase - - raise StopPhase("Stopping before '{0}' phase".format(self.name)) - - def _on_phase_exit(self, instance): - # If a phase has a matching last_phase attribute, - # stop the installation process raising a StopPhase - if getattr(instance, "last_phase", None) == self.name: - from spack.build_environment import StopPhase - - raise StopPhase("Stopping at '{0}' phase".format(self.name)) - - def copy(self): - try: - return copy.deepcopy(self) - except TypeError: - # This bug-fix was not back-ported in Python 2.6 - # http://bugs.python.org/issue1515 - other = InstallPhase(self.name) - other.run_before.extend(self.run_before) - other.run_after.extend(self.run_after) - return other - - class WindowsRPathMeta(object): """Collection of functionality surrounding Windows RPATH specific features @@ -194,11 +140,30 @@ class WindowsRPathMeta(object): they would a genuine RPATH, i.e. adding directories that contain runtime library dependencies""" - def add_search_paths(self, *path): - """Add additional rpaths that are not implicitly included in the search - scheme + def win_add_library_dependent(self): + """Return extra set of directories that require linking for package + + This method should be overridden by packages that produce + binaries/libraries/python extension modules/etc that are installed into + directories outside a package's `bin`, `lib`, and `lib64` directories, + but still require linking against one of the packages dependencies, or + other components of the package itself. No-op otherwise. + + Returns: + List of additional directories that require linking """ - self.win_rpath.include_additional_link_paths(*path) + return [] + + def win_add_rpath(self): + """Return extra set of rpaths for package + + This method should be overridden by packages needing to + include additional paths to be searched by rpath. No-op otherwise + + Returns: + List of additional rpaths + """ + return [] def windows_establish_runtime_linkage(self): """Establish RPATH on Windows @@ -206,6 +171,8 @@ def windows_establish_runtime_linkage(self): Performs symlinking to incorporate rpath dependencies to Windows runtime search paths """ if is_windows: + self.win_rpath.add_library_dependent(*self.win_add_library_dependent()) + self.win_rpath.add_rpath(*self.win_add_rpath()) self.win_rpath.establish_link() @@ -351,23 +318,18 @@ def determine_variants(cls, objs, version_str): class PackageMeta( + spack.builder.PhaseCallbacksMeta, DetectablePackageMeta, spack.directives.DirectiveMeta, - spack.mixins.PackageMixinsMeta, spack.multimethod.MultiMethodMeta, ): """ Package metaclass for supporting directives (e.g., depends_on) and phases """ - phase_fmt = "_InstallPhase_{0}" - - # These are accessed only through getattr, by name - _InstallPhase_run_before = {} # type: Dict[str, List[Callable]] - _InstallPhase_run_after = {} # type: Dict[str, List[Callable]] - def __new__(cls, name, bases, attr_dict): """ + FIXME: REWRITE Instance creation is preceded by phase attribute transformations. Conveniently transforms attributes to permit extensible phases by @@ -375,70 +337,10 @@ def __new__(cls, name, bases, attr_dict): InstallPhase attributes in the class that will be initialized in __init__. """ - if "phases" in attr_dict: - # Turn the strings in 'phases' into InstallPhase instances - # and add them as private attributes - _InstallPhase_phases = [PackageMeta.phase_fmt.format(x) for x in attr_dict["phases"]] - for phase_name, callback_name in zip(_InstallPhase_phases, attr_dict["phases"]): - attr_dict[phase_name] = InstallPhase(callback_name) - attr_dict["_InstallPhase_phases"] = _InstallPhase_phases - - def _flush_callbacks(check_name): - # Name of the attribute I am going to check it exists - check_attr = PackageMeta.phase_fmt.format(check_name) - checks = getattr(cls, check_attr) - if checks: - for phase_name, funcs in checks.items(): - phase_attr = PackageMeta.phase_fmt.format(phase_name) - try: - # Search for the phase in the attribute dictionary - phase = attr_dict[phase_attr] - except KeyError: - # If it is not there it's in the bases - # and we added a check. We need to copy - # and extend - for base in bases: - phase = getattr(base, phase_attr, None) - if phase is not None: - break - - phase = attr_dict[phase_attr] = phase.copy() - getattr(phase, check_name).extend(funcs) - # Clear the attribute for the next class - setattr(cls, check_attr, {}) - - _flush_callbacks("run_before") - _flush_callbacks("run_after") - - # Reset names for packages that inherit from another - # package with a different name attr_dict["_name"] = None return super(PackageMeta, cls).__new__(cls, name, bases, attr_dict) - @staticmethod - def register_callback(check_type, *phases): - def _decorator(func): - attr_name = PackageMeta.phase_fmt.format(check_type) - check_list = getattr(PackageMeta, attr_name) - for item in phases: - checks = check_list.setdefault(item, []) - checks.append(func) - setattr(PackageMeta, attr_name, check_list) - return func - - return _decorator - - -def run_before(*phases): - """Registers a method of a package to be run before a given phase""" - return PackageMeta.register_callback("run_before", *phases) - - -def run_after(*phases): - """Registers a method of a package to be run after a given phase""" - return PackageMeta.register_callback("run_after", *phases) - def on_package_attributes(**attr_dict): """Decorator: executes instance function only if object has attr valuses. @@ -458,7 +360,9 @@ def _wrapper(instance, *args, **kwargs): has_all_attributes = all([hasattr(instance, key) for key in attr_dict]) if has_all_attributes: has_the_right_values = all( - [getattr(instance, key) == value for key, value in attr_dict.items()] + [ + getattr(instance, key) == value for key, value in attr_dict.items() + ] # NOQA: ignore=E501 ) if has_the_right_values: func(instance, *args, **kwargs) @@ -660,6 +564,15 @@ class PackageBase(six.with_metaclass(PackageMeta, WindowsRPathMeta, PackageViewM #: for immediate dependencies. transitive_rpaths = True + #: List of shared objects that should be replaced with a different library at + #: runtime. Typically includes stub libraries like libcuda.so. When linking + #: against a library listed here, the dependent will only record its soname + #: or filename, not its absolute path, so that the dynamic linker will search + #: for it. Note: accepts both file names and directory names, for example + #: ``["libcuda.so", "stubs"]`` will ensure libcuda.so and all libraries in the + #: stubs directory are not bound by path.""" + non_bindable_shared_objects = [] # type: List[str] + #: List of prefix-relative file paths (or a single path). If these do #: not exist after install, or if they exist but are not files, #: sanity checks fail. @@ -670,13 +583,6 @@ class PackageBase(six.with_metaclass(PackageMeta, WindowsRPathMeta, PackageViewM #: directories, sanity checks will fail. sanity_check_is_dir = [] # type: List[str] - #: List of glob expressions. Each expression must either be - #: absolute or relative to the package source path. - #: Matching artifacts found at the end of the build process will be - #: copied in the same directory tree as _spack_build_logfile and - #: _spack_build_envfile. - archive_files = [] # type: List[str] - #: Boolean. Set to ``True`` for packages that require a manual download. #: This is currently used by package sanity tests and generation of a #: more meaningful fetch failure error. @@ -1013,6 +919,12 @@ def url_for_version(self, version): """ return self._implement_all_urls_for_version(version)[0] + def update_external_dependencies(self): + """ + Method to override in package classes to handle external dependencies + """ + pass + def all_urls_for_version(self, version): """Return all URLs derived from version_urls(), url, urls, and list_url (if it contains a version) in a package in that order. @@ -1021,7 +933,7 @@ def all_urls_for_version(self, version): version (spack.version.Version): the version for which a URL is sought """ uf = None - if type(self).url_for_version != Package.url_for_version: + if type(self).url_for_version != PackageBase.url_for_version: uf = self.url_for_version return self._implement_all_urls_for_version(version, uf) @@ -1741,7 +1653,7 @@ def content_hash(self, content=None): from_local_sources = env and env.is_develop(self.spec) if self.has_code and not self.spec.external and not from_local_sources: message = "Missing a source id for {s.name}@{s.version}" - tty.warn(message.format(s=self)) + tty.debug(message.format(s=self)) hash_content.append("".encode("utf-8")) else: hash_content.append(source_id.encode("utf-8")) @@ -1942,9 +1854,9 @@ def do_install(self, **kwargs): even with exceptions. restage (bool): Force spack to restage the package source. skip_patch (bool): Skip patch stage of build if True. - stop_before (InstallPhase): stop execution before this + stop_before (str): stop execution before this installation phase (or None) - stop_at (InstallPhase): last installation phase to be executed + stop_at (str): last installation phase to be executed (or None) tests (bool or list or set): False to run no tests, True to test all packages, or a list of package names to run tests for some @@ -2174,46 +2086,6 @@ def unit_test_check(self): """ return True - def sanity_check_prefix(self): - """This function checks whether install succeeded.""" - - def check_paths(path_list, filetype, predicate): - if isinstance(path_list, six.string_types): - path_list = [path_list] - - for path in path_list: - abs_path = os.path.join(self.prefix, path) - if not predicate(abs_path): - raise InstallError( - "Install failed for %s. No such %s in prefix: %s" - % (self.name, filetype, path) - ) - - check_paths(self.sanity_check_is_file, "file", os.path.isfile) - check_paths(self.sanity_check_is_dir, "directory", os.path.isdir) - - ignore_file = match_predicate(spack.store.layout.hidden_file_regexes) - if all(map(ignore_file, os.listdir(self.prefix))): - raise InstallError("Install failed for %s. Nothing was installed!" % self.name) - - def apply_macos_rpath_fixups(self): - """On Darwin, make installed libraries more easily relocatable. - - Some build systems (handrolled, autotools, makefiles) can set their own - rpaths that are duplicated by spack's compiler wrapper. This fixup - interrogates, and postprocesses if necessary, all libraries installed - by the code. - - It should be added as a @run_after to packaging systems (or individual - packages) that do not install relocatable libraries by default. - """ - if "platform=darwin" not in self.spec: - return - - from spack.relocate import fixup_macos_rpaths - - fixup_macos_rpaths(self.spec) - @property def build_log_path(self): """ @@ -2251,19 +2123,6 @@ def build_system_flags(cls, name, flags): """ return None, None, flags - def setup_build_environment(self, env): - """Sets up the build environment for a package. - - This method will be called before the current package prefix exists in - Spack's store. - - Args: - env (spack.util.environment.EnvironmentModifications): environment - modifications to be applied when the package is built. Package authors - can call methods on it to alter the build environment. - """ - pass - def setup_run_environment(self, env): """Sets up the run environment for a package. @@ -2274,37 +2133,6 @@ def setup_run_environment(self, env): """ pass - def setup_dependent_build_environment(self, env, dependent_spec): - """Sets up the build environment of packages that depend on this one. - - This is similar to ``setup_build_environment``, but it is used to - modify the build environments of packages that *depend* on this one. - - This gives packages like Python and others that follow the extension - model a way to implement common environment or compile-time settings - for dependencies. - - This method will be called before the dependent package prefix exists - in Spack's store. - - Examples: - 1. Installing python modules generally requires ``PYTHONPATH`` - to point to the ``lib/pythonX.Y/site-packages`` directory in the - module's install prefix. This method could be used to set that - variable. - - Args: - env (spack.util.environment.EnvironmentModifications): environment - modifications to be applied when the dependent package is built. - Package authors can call methods on it to alter the build environment. - - dependent_spec (spack.spec.Spec): the spec of the dependent package - about to be built. This allows the extendee (self) to query - the dependent's state. Note that *this* package's spec is - available as ``self.spec`` - """ - pass - def setup_dependent_run_environment(self, env, dependent_spec): """Sets up the run environment of packages that depend on this one. @@ -2491,7 +2319,7 @@ def uninstall_by_spec(spec, force=False, deprecator=None): def do_uninstall(self, force=False): """Uninstall this package by spec.""" # delegate to instance-less method. - Package.uninstall_by_spec(self.spec, force) + PackageBase.uninstall_by_spec(self.spec, force) def do_deprecate(self, deprecator, link_fn): """Deprecate this package in favor of deprecator spec""" @@ -2543,7 +2371,7 @@ def do_deprecate(self, deprecator, link_fn): deprecated.package.do_deprecate(deprecator, link_fn) # Now that we've handled metadata, uninstall and replace with link - Package.uninstall_by_spec(spec, force=True, deprecator=deprecator) + PackageBase.uninstall_by_spec(spec, force=True, deprecator=deprecator) link_fn(deprecator.prefix, spec.prefix) def _check_extendable(self): @@ -2779,12 +2607,17 @@ def fetch_remote_versions(self, concurrency=128): @property def rpath(self): """Get the rpath this package links with, as a list of paths.""" - rpaths = [self.prefix.lib, self.prefix.lib64] deps = self.spec.dependencies(deptype="link") - rpaths.extend(d.prefix.lib for d in deps if os.path.isdir(d.prefix.lib)) - rpaths.extend(d.prefix.lib64 for d in deps if os.path.isdir(d.prefix.lib64)) + + # on Windows, libraries of runtime interest are typically + # stored in the bin directory if is_windows: + rpaths = [self.prefix.bin] rpaths.extend(d.prefix.bin for d in deps if os.path.isdir(d.prefix.bin)) + else: + rpaths = [self.prefix.lib, self.prefix.lib64] + rpaths.extend(d.prefix.lib for d in deps if os.path.isdir(d.prefix.lib)) + rpaths.extend(d.prefix.lib64 for d in deps if os.path.isdir(d.prefix.lib64)) return rpaths @property @@ -2794,21 +2627,25 @@ def rpath_args(self): """ return " ".join("-Wl,-rpath,%s" % p for p in self.rpath) - def _run_test_callbacks(self, method_names, callback_type="install"): + @property + def builder(self): + return spack.builder.create(self) + + @staticmethod + def run_test_callbacks(builder, method_names, callback_type="install"): """Tries to call all of the listed methods, returning immediately if the list is None.""" - if method_names is None: + if not builder.pkg.run_tests or method_names is None: return fail_fast = spack.config.get("config:fail_fast", False) - - with self._setup_test(verbose=False, externals=False) as logger: + with builder.pkg._setup_test(verbose=False, externals=False) as logger: # Report running each of the methods in the build log print_test_message(logger, "Running {0}-time tests".format(callback_type), True) for name in method_names: try: - fn = getattr(self, name) + fn = getattr(builder, name) msg = ("RUN-TESTS: {0}-time tests [{1}]".format(callback_type, name),) print_test_message(logger, msg, True) @@ -2818,27 +2655,13 @@ def _run_test_callbacks(self, method_names, callback_type="install"): msg = ("RUN-TESTS: method not implemented [{0}]".format(name),) print_test_message(logger, msg, True) - self.test_failures.append((e, msg)) + builder.pkg.test_failures.append((e, msg)) if fail_fast: break # Raise any collected failures here - if self.test_failures: - raise TestFailure(self.test_failures) - - @on_package_attributes(run_tests=True) - def _run_default_build_time_test_callbacks(self): - """Tries to call all the methods that are listed in the attribute - ``build_time_test_callbacks`` if ``self.run_tests is True``. - """ - self._run_test_callbacks(self.build_time_test_callbacks, "build") - - @on_package_attributes(run_tests=True) - def _run_default_install_time_test_callbacks(self): - """Tries to call all the methods that are listed in the attribute - ``install_time_test_callbacks`` if ``self.run_tests is True``. - """ - self._run_test_callbacks(self.install_time_test_callbacks, "install") + if builder.pkg.test_failures: + raise TestFailure(builder.pkg.test_failures) def has_test_method(pkg): @@ -2962,37 +2785,6 @@ def test_process(pkg, kwargs): build_system_flags = PackageBase.build_system_flags -class BundlePackage(PackageBase): - """General purpose bundle, or no-code, package class.""" - - #: There are no phases by default but the property is required to support - #: post-install hooks (e.g., for module generation). - phases = [] # type: List[str] - #: This attribute is used in UI queries that require to know which - #: build-system class we are using - build_system_class = "BundlePackage" - - #: Bundle packages do not have associated source or binary code. - has_code = False - - -class Package(PackageBase): - """General purpose class with a single ``install`` - phase that needs to be coded by packagers. - """ - - #: The one and only phase - phases = ["install"] - #: This attribute is used in UI queries that require to know which - #: build-system class we are using - build_system_class = "Package" - # This will be used as a registration decorator in user - # packages, if need be - run_after("install")(PackageBase.sanity_check_prefix) - # On macOS, force rpaths for shared library IDs and remove duplicate rpaths - run_after("install")(PackageBase.apply_macos_rpath_fixups) - - def install_dependency_symlinks(pkg, spec, prefix): """ Execute a dummy install and flatten dependencies. diff --git a/lib/spack/spack/package_prefs.py b/lib/spack/spack/package_prefs.py index 975585ac956..48d1ca129db 100644 --- a/lib/spack/spack/package_prefs.py +++ b/lib/spack/spack/package_prefs.py @@ -195,23 +195,23 @@ def _package(maybe_abstract_spec): def is_spec_buildable(spec): """Return true if the spec is configured as buildable""" - allpkgs = spack.config.get("packages") all_buildable = allpkgs.get("all", {}).get("buildable", True) + so_far = all_buildable # the default "so far" def _package(s): pkg_cls = spack.repo.path.get_pkg_class(s.name) return pkg_cls(s) - # Get the list of names for which all_buildable is overridden - reverse = [ - name + # check whether any providers for this package override the default + if any( + _package(spec).provides(name) and entry.get("buildable", so_far) != so_far for name, entry in allpkgs.items() - if entry.get("buildable", all_buildable) != all_buildable - ] - # Does this spec override all_buildable - spec_reversed = spec.name in reverse or any(_package(spec).provides(name) for name in reverse) - return not all_buildable if spec_reversed else all_buildable + ): + so_far = not so_far + + spec_buildable = allpkgs.get(spec.name, {}).get("buildable", so_far) + return spec_buildable def get_package_dir_permissions(spec): diff --git a/lib/spack/spack/patch.py b/lib/spack/spack/patch.py index 553b04fda92..06841fa73fd 100644 --- a/lib/spack/spack/patch.py +++ b/lib/spack/spack/patch.py @@ -352,7 +352,7 @@ def patch_for_package(self, sha256, pkg): Arguments: sha256 (str): sha256 hash to look up - pkg (spack.package_base.Package): Package object to get patch for. + pkg (spack.package_base.PackageBase): Package object to get patch for. We build patch objects lazily because building them requires that we have information about the package's location in its repo. diff --git a/lib/spack/spack/platforms/cray.py b/lib/spack/spack/platforms/cray.py index 8f87740e4c0..dadf114d90e 100644 --- a/lib/spack/spack/platforms/cray.py +++ b/lib/spack/spack/platforms/cray.py @@ -12,6 +12,7 @@ import llnl.util.tty as tty import spack.target +import spack.version from spack.operating_systems.cray_backend import CrayBackend from spack.operating_systems.cray_frontend import CrayFrontend from spack.paths import build_env_path @@ -31,6 +32,9 @@ "abudhabi": "piledriver", } +_ex_craype_dir = "/opt/cray/pe/cpe" +_xc_craype_dir = "/opt/cray/pe/cdt" + def _target_name_from_craype_target_name(name): return _craype_name_to_target_name.get(name, name) @@ -109,19 +113,48 @@ def setup_platform_environment(self, pkg, env): if os.environ.get("CRAY_LD_LIBRARY_PATH"): env.prepend_path("LD_LIBRARY_PATH", os.environ["CRAY_LD_LIBRARY_PATH"]) + @classmethod + def craype_type_and_version(cls): + if os.path.isdir(_ex_craype_dir): + craype_dir = _ex_craype_dir + craype_type = "EX" + elif os.path.isdir(_xc_craype_dir): + craype_dir = _xc_craype_dir + craype_type = "XC" + else: + return (None, None) + + # Take the default version from known symlink path + default_path = os.path.join(craype_dir, "default") + if os.path.islink(default_path): + version = spack.version.Version(os.readlink(default_path)) + return (craype_type, version) + + # If no default version, sort available versions and return latest + versions_available = [spack.version.Version(v) for v in os.listdir(craype_dir)] + versions_available.sort(reverse=True) + return (craype_type, versions_available[0]) + @classmethod def detect(cls): """ - Detect whether this system is a Cray machine. + Detect whether this system requires CrayPE module support. - We detect the Cray platform based on the availability through `module` - of the Cray programming environment. If this environment is available, - we can use it to find compilers, target modules, etc. If the Cray - programming environment is not available via modules, then we will - treat it as a standard linux system, as the Cray compiler wrappers - and other components of the Cray programming environment are - irrelevant without module support. + Systems with newer CrayPE (21.10 for EX systems, future work for CS and + XC systems) have compilers and MPI wrappers that can be used directly + by path. These systems are considered ``linux`` platforms. + + For systems running an older CrayPE, we detect the Cray platform based + on the availability through `module` of the Cray programming + environment. If this environment is available, we can use it to find + compilers, target modules, etc. If the Cray programming environment is + not available via modules, then we will treat it as a standard linux + system, as the Cray compiler wrappers and other components of the Cray + programming environment are irrelevant without module support. """ + craype_type, craype_version = cls.craype_type_and_version() + if craype_type == "EX" and craype_version >= spack.version.Version("21.10"): + return False return "opt/cray" in os.environ.get("MODULEPATH", "") def _default_target_from_env(self): diff --git a/lib/spack/spack/platforms/test.py b/lib/spack/spack/platforms/test.py index d181f2e9262..26fe943394e 100644 --- a/lib/spack/spack/platforms/test.py +++ b/lib/spack/spack/platforms/test.py @@ -16,9 +16,14 @@ class Test(Platform): if platform.system().lower() == "darwin": binary_formats = ["macho"] - front_end = "x86_64" - back_end = "core2" - default = "core2" + if platform.machine() == "arm64": + front_end = "aarch64" + back_end = "m1" + default = "m1" + else: + front_end = "x86_64" + back_end = "core2" + default = "core2" front_os = "redhat6" back_os = "debian6" diff --git a/lib/spack/spack/provider_index.py b/lib/spack/spack/provider_index.py index 2da7d05e94d..254492fc21f 100644 --- a/lib/spack/spack/provider_index.py +++ b/lib/spack/spack/provider_index.py @@ -175,9 +175,10 @@ def update(self, spec): pkg_provided = self.repository.get_pkg_class(spec.name).provided for provided_spec, provider_specs in six.iteritems(pkg_provided): - for provider_spec in provider_specs: + for provider_spec_readonly in provider_specs: # TODO: fix this comment. # We want satisfaction other than flags + provider_spec = provider_spec_readonly.copy() provider_spec.compiler_flags = spec.compiler_flags.copy() if spec.satisfies(provider_spec, deps=False): diff --git a/lib/spack/spack/relocate.py b/lib/spack/spack/relocate.py index 3ef332c2049..7ac19574ce2 100644 --- a/lib/spack/spack/relocate.py +++ b/lib/spack/spack/relocate.py @@ -7,6 +7,7 @@ import os import re import shutil +from collections import OrderedDict import macholib.mach_o import macholib.MachO @@ -21,6 +22,7 @@ import spack.platforms import spack.repo import spack.spec +import spack.util.elf as elf import spack.util.executable as executable is_macos = str(spack.platforms.real_host()) == "darwin" @@ -59,23 +61,30 @@ def __init__(self, file_path, old_len, new_len): class BinaryTextReplaceError(spack.error.SpackError): - def __init__(self, old_path, new_path): - """Raised when the new install path is longer than the - old one, so binary text replacement cannot occur. + def __init__(self, msg): + msg += ( + " To fix this, compile with more padding " + "(config:install_tree:padded_length), or install to a shorter prefix." + ) + super(BinaryTextReplaceError, self).__init__(msg) - Args: - old_path (str): original path to be substituted - new_path (str): candidate path for substitution - """ - msg = "New path longer than old path: binary text" - msg += " replacement not possible." - err_msg = "The new path %s" % new_path - err_msg += " is longer than the old path %s.\n" % old_path - err_msg += "Text replacement in binaries will not work.\n" - err_msg += "Create buildcache from an install path " - err_msg += "longer than new path." - super(BinaryTextReplaceError, self).__init__(msg, err_msg) +class CannotGrowString(BinaryTextReplaceError): + def __init__(self, old, new): + msg = "Cannot replace {!r} with {!r} because the new prefix is longer.".format(old, new) + super(CannotGrowString, self).__init__(msg) + + +class CannotShrinkCString(BinaryTextReplaceError): + def __init__(self, old, new, full_old_string): + # Just interpolate binary string to not risk issues with invalid + # unicode, which would be really bad user experience: error in error. + # We have no clue if we actually deal with a real C-string nor what + # encoding it has. + msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format( + old, new, full_old_string + ) + super(CannotShrinkCString, self).__init__(msg) @memoized @@ -93,27 +102,15 @@ def _patchelf(): def _elf_rpaths_for(path): """Return the RPATHs for an executable or a library. - The RPATHs are obtained by ``patchelf --print-rpath PATH``. - Args: path (str): full path to the executable or library Return: - RPATHs as a list of strings. + RPATHs as a list of strings. Returns an empty array + on ELF parsing errors, or when the ELF file simply + has no rpaths. """ - # If we're relocating patchelf itself, use it - patchelf_path = path if path.endswith("/bin/patchelf") else _patchelf() - patchelf = executable.Executable(patchelf_path) - - output = "" - try: - output = patchelf("--print-rpath", path, output=str, error=str) - output = output.strip("\n") - except executable.ProcessError as e: - msg = "patchelf --print-rpath {0} produced an error [{1}]" - tty.warn(msg.format(path, str(e))) - - return output.split(":") if output else [] + return elf.get_rpaths(path) or [] def _make_relative(reference_file, path_root, paths): @@ -384,15 +381,13 @@ def _set_elf_rpaths(target, rpaths): """Replace the original RPATH of the target with the paths passed as arguments. - This function uses ``patchelf`` to set RPATHs. - Args: target: target executable. Must be an ELF object. rpaths: paths to be set in the RPATH Returns: A string concatenating the stdout and stderr of the call - to ``patchelf`` + to ``patchelf`` if it was invoked """ # Join the paths using ':' as a separator rpaths_str = ":".join(rpaths) @@ -463,48 +458,116 @@ def _replace_prefix_text(filename, compiled_prefixes): f.truncate() -def _replace_prefix_bin(filename, byte_prefixes): - """Replace all the occurrences of the old install prefix with a - new install prefix in binary files. +def apply_binary_replacements(f, prefix_to_prefix, suffix_safety_size=7): + """ + Given a file opened in rb+ mode, apply the string replacements as + specified by an ordered dictionary of prefix to prefix mappings. This + method takes special care of null-terminated C-strings. C-string constants + are problematic because compilers and linkers optimize readonly strings for + space by aliasing those that share a common suffix (only suffix since all + of them are null terminated). See https://github.com/spack/spack/pull/31739 + and https://github.com/spack/spack/pull/32253 for details. Our logic matches + the original prefix with a ``suffix_safety_size + 1`` lookahead for null bytes. + If no null terminator is found, we simply pad with leading /, assuming that + it's a long C-string; the full C-string after replacement has a large suffix + in common with its original value. + If there *is* a null terminator we can do the same as long as the replacement + has a sufficiently long common suffix with the original prefix. + As a last resort when the replacement does not have a long enough common suffix, + we can try to shorten the string, but this only works if the new length is + sufficiently short (typically the case when going from large padding -> normal path) + If the replacement string is longer, or all of the above fails, we error out. - The new install prefix is prefixed with ``os.sep`` until the - lengths of the prefixes are the same. + Arguments: + f: file opened in rb+ mode + prefix_to_prefix (OrderedDict): OrderedDictionary where the keys are + bytes representing the old prefixes and the values are the new + suffix_safety_size (int): in case of null terminated strings, what size + of the suffix should remain to avoid aliasing issues? + """ + assert suffix_safety_size >= 0 + assert f.tell() == 0 + + # Look for exact matches of our paths, and also look if there's a null terminator + # soon after (this covers the case where we search for /abc but match /abc/ with + # a trailing dir seperator). + regex = re.compile( + b"(" + + b"|".join(re.escape(p) for p in prefix_to_prefix.keys()) + + b")([^\0]{0,%d}\0)?" % suffix_safety_size + ) + + # We *could* read binary data in chunks to avoid loading all in memory, + # but it's nasty to deal with matches across boundaries, so let's stick to + # something simple. + + for match in regex.finditer(f.read()): + # The matching prefix (old) and its replacement (new) + old = match.group(1) + new = prefix_to_prefix[old] + + # Did we find a trailing null within a N + 1 bytes window after the prefix? + null_terminated = match.end(0) > match.end(1) + + # Suffix string length, excluding the null byte + # Only makes sense if null_terminated + suffix_strlen = match.end(0) - match.end(1) - 1 + + # How many bytes are we shrinking our string? + bytes_shorter = len(old) - len(new) + + # We can't make strings larger. + if bytes_shorter < 0: + raise CannotGrowString(old, new) + + # If we don't know whether this is a null terminated C-string (we're looking + # only N + 1 bytes ahead), or if it is and we have a common suffix, we can + # simply pad with leading dir separators. + elif ( + not null_terminated + or suffix_strlen >= suffix_safety_size # == is enough, but let's be defensive + or old[-suffix_safety_size + suffix_strlen :] + == new[-suffix_safety_size + suffix_strlen :] + ): + replacement = b"/" * bytes_shorter + new + + # If it *was* null terminated, all that matters is that we can leave N bytes + # of old suffix in place. Note that > is required since we also insert an + # additional null terminator. + elif bytes_shorter > suffix_safety_size: + replacement = new + match.group(2) # includes the trailing null + + # Otherwise... we can't :( + else: + raise CannotShrinkCString(old, new, match.group()[:-1]) + + f.seek(match.start()) + f.write(replacement) + + +def _replace_prefix_bin(filename, prefix_to_prefix): + """Replace all the occurrences of the old prefix with a new prefix in binary + files. See :func:`~spack.relocate.apply_binary_replacements` for details. Args: filename (str): target binary file - byte_prefixes (OrderedDict): OrderedDictionary where the keys are - precompiled regex of the old prefixes and the values are the new - prefixes (uft-8 encoded) + byte_prefixes (OrderedDict): ordered dictionary where the keys are + bytes representing the old prefixes and the values are the new + prefixes (all bytes utf-8 encoded) """ with open(filename, "rb+") as f: - data = f.read() - f.seek(0) - for orig_bytes, new_bytes in byte_prefixes.items(): - original_data_len = len(data) - # Skip this hassle if not found - if orig_bytes not in data: - continue - # We only care about this problem if we are about to replace - length_compatible = len(new_bytes) <= len(orig_bytes) - if not length_compatible: - tty.debug("Binary failing to relocate is %s" % filename) - raise BinaryTextReplaceError(orig_bytes, new_bytes) - pad_length = len(orig_bytes) - len(new_bytes) - padding = os.sep * pad_length - padding = padding.encode("utf-8") - data = data.replace(orig_bytes, new_bytes + padding) - # Really needs to be the same length - if not len(data) == original_data_len: - print("Length of pad:", pad_length, "should be", len(padding)) - print(new_bytes, "was to replace", orig_bytes) - raise BinaryStringReplacementError(filename, original_data_len, len(data)) - f.write(data) - f.truncate() + apply_binary_replacements(f, prefix_to_prefix) def relocate_macho_binaries( - path_names, old_layout_root, new_layout_root, prefix_to_prefix, rel, old_prefix, new_prefix + path_names, + old_layout_root, + new_layout_root, + prefix_to_prefix, + rel, + old_prefix, + new_prefix, ): """ Use macholib python package to get the rpaths, depedent libraries @@ -600,6 +663,23 @@ def _transform_rpaths(orig_rpaths, orig_root, new_prefixes): return new_rpaths +def new_relocate_elf_binaries(binaries, prefix_to_prefix): + """Take a list of binaries, and an ordered dictionary of + prefix to prefix mapping, and update the rpaths accordingly.""" + + # Transform to binary string + prefix_to_prefix = OrderedDict( + (k.encode("utf-8"), v.encode("utf-8")) for (k, v) in prefix_to_prefix.items() + ) + + for path in binaries: + try: + elf.replace_rpath_in_place_or_raise(path, prefix_to_prefix) + except elf.ElfDynamicSectionUpdateFailed as e: + # Fall back to the old `patchelf --set-rpath` method. + _set_elf_rpaths(path, e.new.decode("utf-8").split(":")) + + def relocate_elf_binaries( binaries, orig_root, new_root, new_prefixes, rel, orig_prefix, new_prefix ): @@ -707,49 +787,50 @@ def raise_if_not_relocatable(binaries, allow_root): raise InstallRootStringError(binary, spack.store.layout.root) -def relocate_links(links, orig_layout_root, orig_install_prefix, new_install_prefix): - """Relocate links to a new install prefix. - - The symbolic links are relative to the original installation prefix. - The old link target is read and the placeholder is replaced by the old - layout root. If the old link target is in the old install prefix, the new - link target is create by replacing the old install prefix with the new - install prefix. - - Args: - links (list): list of links to be relocated - orig_layout_root (str): original layout root - orig_install_prefix (str): install prefix of the original installation - new_install_prefix (str): install prefix where we want to relocate - """ - placeholder = _placeholder(orig_layout_root) - abs_links = [os.path.join(new_install_prefix, link) for link in links] - for abs_link in abs_links: - link_target = os.readlink(abs_link) - link_target = re.sub(placeholder, orig_layout_root, link_target) - # If the link points to a file in the original install prefix, - # compute the corresponding target in the new prefix and relink - if link_target.startswith(orig_install_prefix): - link_target = re.sub(orig_install_prefix, new_install_prefix, link_target) - os.unlink(abs_link) - symlink(link_target, abs_link) - - # If the link is absolute and has not been relocated then - # warn the user about that - if os.path.isabs(link_target) and not link_target.startswith(new_install_prefix): - msg = ( - 'Link target "{0}" for symbolic link "{1}" is outside' - " of the new install prefix {2}" - ) - tty.warn(msg.format(link_target, abs_link, new_install_prefix)) +def warn_if_link_cant_be_relocated(link, target): + if not os.path.isabs(target): + return + tty.warn('Symbolic link at "{}" to "{}" cannot be relocated'.format(link, target)) -def relocate_text(files, prefixes, concurrency=32): +def relocate_links(links, prefix_to_prefix): + """Relocate links to a new install prefix.""" + regex = re.compile("|".join(re.escape(p) for p in prefix_to_prefix.keys())) + for link in links: + old_target = os.readlink(link) + match = regex.match(old_target) + + # No match. + if match is None: + warn_if_link_cant_be_relocated(link, old_target) + continue + + new_target = prefix_to_prefix[match.group()] + old_target[match.end() :] + os.unlink(link) + symlink(new_target, link) + + +def utf8_path_to_binary_regex(prefix): + """Create a (binary) regex that matches the input path in utf8""" + prefix_bytes = re.escape(prefix).encode("utf-8") + return re.compile(b"(? 0 - - # Preorder traversal yields before successors - if yield_me and order == "pre": - yield return_val(dep_spec) - - # Edge traversal yields but skips children of visited nodes - if not (key in visited and cover == "edges"): - visited.add(key) - - # This code determines direction and yields the children/parents - if direction == "children": - edges = self.edges_to_dependencies - key_fn = lambda dspec: dspec.spec.name - succ = lambda dspec: dspec.spec - elif direction == "parents": - edges = self.edges_from_dependents - key_fn = lambda dspec: dspec.parent.name - succ = lambda dspec: dspec.parent - else: - raise ValueError("Invalid traversal direction: %s" % direction) - - for dspec in sorted(edges(), key=key_fn): - dt = dspec.deptypes - if dt and not any(d in deptype for d in dt): - continue - - for child in succ(dspec).traverse_edges(visited, d + 1, deptype, dspec, **kwargs): - yield child - - # Postorder traversal yields after successors - if yield_me and order == "post": - yield return_val(dep_spec) + def traverse_edges(self, **kwargs): + """Shorthand for :meth:`~spack.traverse.traverse_edges`""" + return traverse.traverse_edges([self], **kwargs) @property def short_spec(self): @@ -1795,7 +1755,12 @@ def spec_hash(self, hash): return hash.override(self) node_dict = self.to_node_dict(hash=hash) json_text = sjson.dump(node_dict) - return spack.util.hash.b32_hash(json_text) + # This implements "frankenhashes", preserving the last 7 characters of the + # original hash when splicing so that we can avoid relocation issues + out = spack.util.hash.b32_hash(json_text) + if self.build_spec is not self: + return out[:-7] + self.build_spec.spec_hash(hash)[-7:] + return out def _cached_hash(self, hash, length=None, force=False): """Helper function for storing a cached hash on the spec. @@ -1930,7 +1895,14 @@ def to_node_dict(self, hash=ht.dag_hash): params = syaml.syaml_dict(sorted(v.yaml_entry() for _, v in self.variants.items())) - params.update(sorted(self.compiler_flags.items())) + # Only need the string compiler flag for yaml file + params.update( + sorted( + self.compiler_flags.yaml_entry(flag_type) + for flag_type in self.compiler_flags.keys() + ) + ) + if params: d["parameters"] = params @@ -2155,11 +2127,13 @@ def from_node_dict(node): spec.compiler = None if "parameters" in node: - for name, value in node["parameters"].items(): + for name, values in node["parameters"].items(): if name in _valid_compiler_flags: - spec.compiler_flags[name] = value + spec.compiler_flags[name] = [] + for val in values: + spec.compiler_flags.add_flag(name, val, False) else: - spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, value) + spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, values) elif "variants" in node: for name, value in node["variants"].items(): spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, value) @@ -2777,6 +2751,11 @@ def _old_concretize(self, tests=False, deprecation_warning=True): # If any spec in the DAG is deprecated, throw an error Spec.ensure_no_deprecated(self) + # Update externals as needed + for dep in self.traverse(): + if dep.external: + dep.package.update_external_dependencies() + # Now that the spec is concrete we should check if # there are declared conflicts # @@ -2800,7 +2779,9 @@ def _old_concretize(self, tests=False, deprecation_warning=True): # Check if we can produce an optimized binary (will throw if # there are declared inconsistencies) - self.architecture.target.optimization_flags(self.compiler) + # No need on platform=cray because of the targeting modules + if not self.satisfies("platform=cray"): + self.architecture.target.optimization_flags(self.compiler) def _patches_assigned(self): """Whether patches have been assigned to this spec by the concretizer.""" @@ -2956,7 +2937,7 @@ def concretize(self, tests=False): if a list of names activate them for the packages in the list, if True activate 'test' dependencies for all packages. """ - if spack.config.get("config:concretizer") == "clingo": + if spack.config.get("config:concretizer", "clingo") == "clingo": self._new_concretize(tests) else: self._old_concretize(tests) @@ -3590,7 +3571,9 @@ def _constrain_dependencies(self, other): ) # Update with additional constraints from other spec - for name in other.dep_difference(self): + # operate on direct dependencies only, because a concrete dep + # represented by hash may have structure that needs to be preserved + for name in other.direct_dep_difference(self): dep_spec_copy = other._get_dependency(name) dep_copy = dep_spec_copy.spec deptypes = dep_spec_copy.deptypes @@ -3611,10 +3594,10 @@ def constrained(self, other, deps=True): clone.constrain(other, deps) return clone - def dep_difference(self, other): + def direct_dep_difference(self, other): """Returns dependencies in self that are not in other.""" - mine = set(s.name for s in self.traverse(root=False)) - mine.difference_update(s.name for s in other.traverse(root=False)) + mine = set(dname for dname in self._dependencies) + mine.difference_update(dname for dname in other._dependencies) return mine def _autospec(self, spec_like): @@ -3970,12 +3953,23 @@ def __getitem__(self, name): csv = query_parameters.pop().strip() query_parameters = re.split(r"\s*,\s*", csv) + # In some cases a package appears multiple times in the same DAG for *distinct* + # specs. For example, a build-type dependency may itself depend on a package + # the current spec depends on, but their specs may differ. Therefore we iterate + # in an order here that prioritizes the build, test and runtime dependencies; + # only when we don't find the package do we consider the full DAG. + order = lambda: itertools.chain( + self.traverse(deptype="link"), + self.dependencies(deptype=("build", "run", "test")), + self.traverse(), # fall back to a full search + ) + try: value = next( itertools.chain( # Regular specs - (x for x in self.traverse() if x.name == name), - (x for x in self.traverse() if (not x.virtual) and x.package.provides(name)), + (x for x in order() if x.name == name), + (x for x in order() if (not x.virtual) and x.package.provides(name)), ) ) except StopIteration: @@ -4258,7 +4252,7 @@ def write_attribute(spec, attribute, color): except AttributeError: parent = ".".join(parts[:idx]) m = "Attempted to format attribute %s." % attribute - m += "Spec.%s has no attribute %s" % (parent, part) + m += "Spec %s has no attribute %s" % (parent, part) raise SpecFormatStringError(m) if isinstance(current, vn.VersionList): if current == _any_version: @@ -4604,11 +4598,13 @@ def tree(self, **kwargs): show_types = kwargs.pop("show_types", False) deptypes = kwargs.pop("deptypes", "all") recurse_dependencies = kwargs.pop("recurse_dependencies", True) + depth_first = kwargs.pop("depth_first", False) lang.check_kwargs(kwargs, self.tree) out = "" - for d, dep_spec in self.traverse_edges( - order="pre", cover=cover, depth=True, deptype=deptypes + + for d, dep_spec in traverse.traverse_tree( + [self], cover=cover, deptype=deptypes, depth_first=depth_first ): node = dep_spec.spec @@ -4880,7 +4876,7 @@ def merge_abstract_anonymous_specs(*abstract_specs): merged_spec[name].constrain(current_spec_constraint[name], deps=False) # Update with additional constraints from other spec - for name in current_spec_constraint.dep_difference(merged_spec): + for name in current_spec_constraint.direct_dep_difference(merged_spec): edge = next(iter(current_spec_constraint.edges_to_dependencies(name))) merged_spec._add_dependency(edge.spec.copy(), edge.deptypes) @@ -4993,7 +4989,7 @@ def __missing__(self, key): #: These are possible token types in the spec grammar. -HASH, DEP, VER, COLON, COMMA, ON, OFF, PCT, EQ, ID, VAL, FILE = range(12) +HASH, DEP, VER, COLON, COMMA, ON, D_ON, OFF, D_OFF, PCT, EQ, D_EQ, ID, VAL, FILE = range(15) #: Regex for fully qualified spec names. (e.g., builtin.hdf5) spec_id_re = r"\w[\w.-]*" @@ -5020,10 +5016,14 @@ def __init__(self): (r"\:", lambda scanner, val: self.token(COLON, val)), (r"\,", lambda scanner, val: self.token(COMMA, val)), (r"\^", lambda scanner, val: self.token(DEP, val)), + (r"\+\+", lambda scanner, val: self.token(D_ON, val)), (r"\+", lambda scanner, val: self.token(ON, val)), + (r"\-\-", lambda scanner, val: self.token(D_OFF, val)), (r"\-", lambda scanner, val: self.token(OFF, val)), + (r"\~\~", lambda scanner, val: self.token(D_OFF, val)), (r"\~", lambda scanner, val: self.token(OFF, val)), (r"\%", lambda scanner, val: self.token(PCT, val)), + (r"\=\=", lambda scanner, val: self.token(D_EQ, val)), (r"\=", lambda scanner, val: self.token(EQ, val)), # Filenames match before identifiers, so no initial filename # component is parsed as a spec (e.g., in subdir/spec.yaml/json) @@ -5035,7 +5035,7 @@ def __init__(self): (spec_id_re, lambda scanner, val: self.token(ID, val)), (r"\s+", lambda scanner, val: None), ], - [EQ], + [D_EQ, EQ], [ (r"[\S].*", lambda scanner, val: self.token(VAL, val)), (r"\s+", lambda scanner, val: None), @@ -5080,7 +5080,7 @@ def do_parse(self): if self.accept(ID): self.previous = self.token - if self.accept(EQ): + if self.accept(EQ) or self.accept(D_EQ): # We're parsing an anonymous spec beginning with a # key-value pair. if not specs: @@ -5157,9 +5157,10 @@ def do_parse(self): else: # If the next token can be part of a valid anonymous spec, # create the anonymous spec - if self.next.type in (VER, ON, OFF, PCT): - # Raise an error if the previous spec is already concrete - if specs and specs[-1].concrete: + if self.next.type in (VER, ON, D_ON, OFF, D_OFF, PCT): + # Raise an error if the previous spec is already + # concrete (assigned by hash) + if specs and specs[-1]._hash: raise RedundantSpecError(specs[-1], "compiler, version, " "or variant") specs.append(self.spec(None)) else: @@ -5269,22 +5270,36 @@ def spec(self, name): vlist = self.version_list() spec._add_versions(vlist) + elif self.accept(D_ON): + name = self.variant() + spec.variants[name] = vt.BoolValuedVariant(name, True, propagate=True) + elif self.accept(ON): name = self.variant() - spec.variants[name] = vt.BoolValuedVariant(name, True) + spec.variants[name] = vt.BoolValuedVariant(name, True, propagate=False) + + elif self.accept(D_OFF): + name = self.variant() + spec.variants[name] = vt.BoolValuedVariant(name, False, propagate=True) elif self.accept(OFF): name = self.variant() - spec.variants[name] = vt.BoolValuedVariant(name, False) + spec.variants[name] = vt.BoolValuedVariant(name, False, propagate=False) elif self.accept(PCT): spec._set_compiler(self.compiler()) elif self.accept(ID): self.previous = self.token - if self.accept(EQ): + if self.accept(D_EQ): + # We're adding a key-value pair to the spec self.expect(VAL) - spec._add_flag(self.previous.value, self.token.value) + spec._add_flag(self.previous.value, self.token.value, propagate=True) + self.previous = None + elif self.accept(EQ): + # We're adding a key-value pair to the spec + self.expect(VAL) + spec._add_flag(self.previous.value, self.token.value, propagate=False) self.previous = None else: # We've found the start of a new spec. Go back to do_parse @@ -5306,7 +5321,6 @@ def spec(self, name): else: break - spec._add_default_platform() return spec def variant(self, name=None): @@ -5448,6 +5462,12 @@ def long_message(self): ) +class ArchitecturePropagationError(spack.error.SpecError): + """Raised when the double equal symbols are used to assign + the spec's architecture. + """ + + class DuplicateDependencyError(spack.error.SpecError): """Raised when the same dependency occurs in a spec twice.""" diff --git a/lib/spack/spack/target.py b/lib/spack/spack/target.py index a85baa4d457..d51ca3aa992 100644 --- a/lib/spack/spack/target.py +++ b/lib/spack/spack/target.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import functools +import warnings import six @@ -34,6 +35,14 @@ def _impl(self, other): return _impl +#: Translation table from archspec deprecated names +_DEPRECATED_ARCHSPEC_NAMES = { + "graviton": "cortex_a72", + "graviton2": "neoverse_n1", + "graviton3": "neoverse_v1", +} + + class Target(object): def __init__(self, name, module_name=None): """Target models microarchitectures and their compatibility. @@ -45,6 +54,10 @@ def __init__(self, name, module_name=None): like Cray (e.g. craype-compiler) """ if not isinstance(name, archspec.cpu.Microarchitecture): + if name in _DEPRECATED_ARCHSPEC_NAMES: + msg = "'target={}' is deprecated, use 'target={}' instead" + name, old_name = _DEPRECATED_ARCHSPEC_NAMES[name], name + warnings.warn(msg.format(old_name, name)) name = archspec.cpu.TARGETS.get(name, archspec.cpu.generic_microarchitecture(name)) self.microarchitecture = name self.module_name = module_name diff --git a/lib/spack/spack/tengine.py b/lib/spack/spack/tengine.py index 34db15d832a..75dae5c9c1c 100644 --- a/lib/spack/spack/tengine.py +++ b/lib/spack/spack/tengine.py @@ -11,6 +11,7 @@ import llnl.util.lang import spack.config +import spack.extensions from spack.util.path import canonicalize_path diff --git a/lib/spack/spack/test/abi.py b/lib/spack/spack/test/abi.py index 5c3d0fa83d5..91560939797 100644 --- a/lib/spack/spack/test/abi.py +++ b/lib/spack/spack/test/abi.py @@ -22,7 +22,7 @@ ("platform=linux", "arch=linux-fedora31-x86_64", True), ("platform=linux os=fedora31", "platform=linux", True), ("platform=darwin", "arch=linux-fedora31-x86_64", False), - ("os=fedora31", "platform=linux", False), # TODO should be true ? + ("os=fedora31", "platform=linux", True), ], ) def test_architecture_compatibility(target, constraint, expected): diff --git a/lib/spack/spack/test/architecture.py b/lib/spack/spack/test/architecture.py index 64bb57d474b..92914be6852 100644 --- a/lib/spack/spack/test/architecture.py +++ b/lib/spack/spack/test/architecture.py @@ -8,6 +8,8 @@ import pytest +import llnl.util.filesystem as fs + import spack.concretize import spack.operating_systems import spack.platforms @@ -140,7 +142,7 @@ def test_optimization_flags(compiler_spec, target_name, expected_flags, config): (spack.spec.CompilerSpec("gcc@9.2.0"), None, "haswell", "-march=haswell -mtune=haswell"), # Check that custom string versions are accepted ( - spack.spec.CompilerSpec("gcc@foo"), + spack.spec.CompilerSpec("gcc@10foo"), "9.2.0", "icelake", "-march=icelake-client -mtune=icelake-client", @@ -196,9 +198,15 @@ def test_satisfy_strict_constraint_when_not_concrete(architecture_tuple, constra ], ) @pytest.mark.usefixtures("mock_packages", "config") -def test_concretize_target_ranges(root_target_range, dep_target_range, result): +def test_concretize_target_ranges(root_target_range, dep_target_range, result, monkeypatch): + # Monkeypatch so that all concretization is done as if the machine is core2 + monkeypatch.setattr(spack.platforms.test.Test, "default", "core2") + # use foobar=bar to make the problem simpler for the old concretizer # the new concretizer should not need that help + if spack.config.get("config:concretizer") == "original": + pytest.skip("Fixing the parser broke this test for the original concretizer.") + spec_str = "a %%gcc@10 foobar=bar target=%s ^b target=%s" % ( root_target_range, dep_target_range, @@ -208,3 +216,27 @@ def test_concretize_target_ranges(root_target_range, dep_target_range, result): spec.concretize() assert str(spec).count("arch=test-debian6-%s" % result) == 2 + + +@pytest.mark.parametrize( + "versions,default,expected", + [ + (["21.11", "21.9"], "21.11", False), + (["21.11", "21.9"], "21.9", True), + (["21.11", "21.9"], None, False), + ], +) +def test_cray_platform_detection(versions, default, expected, tmpdir, monkeypatch, working_env): + ex_path = str(tmpdir.join("fake_craype_dir")) + fs.mkdirp(ex_path) + + with fs.working_dir(ex_path): + for version in versions: + fs.touch(version) + if default: + os.symlink(default, "default") + + monkeypatch.setattr(spack.platforms.cray, "_ex_craype_dir", ex_path) + os.environ["MODULEPATH"] = "/opt/cray/pe" + + assert spack.platforms.cray.Cray.detect() == expected diff --git a/lib/spack/spack/test/audit.py b/lib/spack/spack/test/audit.py index eded1a92f2c..bc5dd3edca6 100644 --- a/lib/spack/spack/test/audit.py +++ b/lib/spack/spack/test/audit.py @@ -22,7 +22,7 @@ # This package has a GitHub patch URL without full_index=1 (["invalid-github-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]), # This package has a stand-alone 'test' method in build-time callbacks - (["test-build-callbacks"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]), + (["fail-test-audit"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]), # This package has no issues (["mpileaks"], None), # This package has a conflict with a trigger which cannot constrain the constraint diff --git a/lib/spack/spack/test/bindist.py b/lib/spack/spack/test/bindist.py index 68ea2cb7340..c73612f1011 100644 --- a/lib/spack/spack/test/bindist.py +++ b/lib/spack/spack/test/bindist.py @@ -11,6 +11,8 @@ import py import pytest +from llnl.util.filesystem import join_path, visit_directory_tree + import spack.binary_distribution as bindist import spack.config import spack.hooks.sbang as sbang @@ -20,6 +22,7 @@ import spack.store import spack.util.gpg import spack.util.web as web_util +from spack.binary_distribution import get_buildfile_manifest from spack.directory_layout import DirectoryLayout from spack.paths import test_path from spack.spec import Spec @@ -453,10 +456,11 @@ def test_generate_index_missing(monkeypatch, tmpdir, mutable_config): # Update index buildcache_cmd("update-index", "-d", mirror_dir.strpath) - # Check dependency not in buildcache - cache_list = buildcache_cmd("list", "--allarch") - assert "libdwarf" in cache_list - assert "libelf" not in cache_list + with spack.config.override("config:binary_index_ttl", 0): + # Check dependency not in buildcache + cache_list = buildcache_cmd("list", "--allarch") + assert "libdwarf" in cache_list + assert "libelf" not in cache_list def test_generate_indices_key_error(monkeypatch, capfd): @@ -632,3 +636,57 @@ def test_FetchCacheError_pretty_printing_single(): assert "Multiple errors" not in str_e assert "RuntimeError: Oops!" in str_e assert str_e.rstrip() == str_e + + +def test_build_manifest_visitor(tmpdir): + dir = "directory" + file = os.path.join("directory", "file") + + with tmpdir.as_cwd(): + # Create a file inside a directory + os.mkdir(dir) + with open(file, "wb") as f: + f.write(b"example file") + + # Symlink the dir + os.symlink(dir, "symlink_to_directory") + + # Symlink the file + os.symlink(file, "symlink_to_file") + + # Hardlink the file + os.link(file, "hardlink_of_file") + + # Hardlinked symlinks: seems like this is only a thing on Linux, + # on Darwin the symlink *target* is hardlinked, on Linux the + # symlink *itself* is hardlinked. + if sys.platform.startswith("linux"): + os.link("symlink_to_file", "hardlink_of_symlink_to_file") + os.link("symlink_to_directory", "hardlink_of_symlink_to_directory") + + visitor = bindist.BuildManifestVisitor() + visit_directory_tree(str(tmpdir), visitor) + + # We de-dupe hardlinks of files, so there should really be just one file + assert len(visitor.files) == 1 + + # We do not de-dupe symlinks, cause it's unclear how to update symlinks + # in-place, preserving inodes. + if sys.platform.startswith("linux"): + assert len(visitor.symlinks) == 4 # includes hardlinks of symlinks. + else: + assert len(visitor.symlinks) == 2 + + with tmpdir.as_cwd(): + assert not any(os.path.islink(f) or os.path.isdir(f) for f in visitor.files) + assert all(os.path.islink(f) for f in visitor.symlinks) + + +def test_text_relocate_if_needed(install_mockery, mock_fetch, monkeypatch, capfd): + spec = Spec("needs-text-relocation").concretized() + install_cmd(str(spec)) + + manifest = get_buildfile_manifest(spec) + assert join_path("bin", "exe") in manifest["text_to_relocate"] + assert join_path("bin", "otherexe") not in manifest["text_to_relocate"] + assert join_path("bin", "secretexe") not in manifest["text_to_relocate"] diff --git a/lib/spack/spack/test/build_environment.py b/lib/spack/spack/test/build_environment.py index 6ba8fc056fb..9b894a93519 100644 --- a/lib/spack/spack/test/build_environment.py +++ b/lib/spack/spack/test/build_environment.py @@ -465,7 +465,7 @@ def test_setting_dtags_based_on_config(config_setting, expected_flag, config, mo pkg = s.package env = EnvironmentModifications() - with spack.config.override("config:shared_linking", config_setting): + with spack.config.override("config:shared_linking", {"type": config_setting, "bind": False}): spack.build_environment.set_compiler_environment_variables(pkg, env) modifications = env.group_by_name() assert "SPACK_DTAGS_TO_STRIP" in modifications diff --git a/lib/spack/spack/test/build_systems.py b/lib/spack/spack/test/build_systems.py index f660596bb7d..03a920a28e3 100644 --- a/lib/spack/spack/test/build_systems.py +++ b/lib/spack/spack/test/build_systems.py @@ -13,10 +13,11 @@ import llnl.util.filesystem as fs import spack.build_systems.autotools +import spack.build_systems.cmake import spack.environment import spack.platforms import spack.repo -from spack.build_environment import ChildError, get_std_cmake_args, setup_package +from spack.build_environment import ChildError, setup_package from spack.spec import Spec from spack.util.executable import which @@ -144,7 +145,7 @@ def test_libtool_archive_files_are_deleted_by_default(self, mutable_database): # Assert the libtool archive is not there and we have # a log of removed files - assert not os.path.exists(s.package.libtool_archive_file) + assert not os.path.exists(s.package.builder.libtool_archive_file) search_directory = os.path.join(s.prefix, ".spack") libtool_deletion_log = fs.find(search_directory, "removed_la_files.txt", recursive=True) assert libtool_deletion_log @@ -155,11 +156,11 @@ def test_libtool_archive_files_might_be_installed_on_demand( # Install a package that creates a mock libtool archive, # patch its package to preserve the installation s = Spec("libtool-deletion").concretized() - monkeypatch.setattr(s.package, "install_libtool_archives", True) + monkeypatch.setattr(type(s.package.builder), "install_libtool_archives", True) s.package.do_install(explicit=True) # Assert libtool archives are installed - assert os.path.exists(s.package.libtool_archive_file) + assert os.path.exists(s.package.builder.libtool_archive_file) def test_autotools_gnuconfig_replacement(self, mutable_database): """ @@ -203,11 +204,12 @@ def test_autotools_gnuconfig_replacement_disabled(self, mutable_database): assert "gnuconfig version of config.guess" not in f.read() @pytest.mark.disable_clean_stage_check - def test_autotools_gnuconfig_replacement_no_gnuconfig(self, mutable_database): + def test_autotools_gnuconfig_replacement_no_gnuconfig(self, mutable_database, monkeypatch): """ Tests whether a useful error message is shown when patch_config_files is enabled, but gnuconfig is not listed as a direct build dependency. """ + monkeypatch.setattr(spack.platforms.test.Test, "default", "x86_64") s = Spec("autotools-config-replacement +patch_config_files ~gnuconfig") s.concretize() @@ -253,22 +255,23 @@ class TestCMakePackage(object): def test_cmake_std_args(self): # Call the function on a CMakePackage instance s = Spec("cmake-client").concretized() - assert s.package.std_cmake_args == get_std_cmake_args(s.package) + expected = spack.build_systems.cmake.CMakeBuilder.std_args(s.package) + assert s.package.builder.std_cmake_args == expected # Call it on another kind of package s = Spec("mpich").concretized() - assert get_std_cmake_args(s.package) + assert spack.build_systems.cmake.CMakeBuilder.std_args(s.package) - def test_cmake_bad_generator(self): + def test_cmake_bad_generator(self, monkeypatch): s = Spec("cmake-client").concretized() - s.package.generator = "Yellow Sticky Notes" + monkeypatch.setattr(type(s.package), "generator", "Yellow Sticky Notes", raising=False) with pytest.raises(spack.package_base.InstallError): - get_std_cmake_args(s.package) + s.package.builder.std_cmake_args def test_cmake_secondary_generator(config, mock_packages): s = Spec("cmake-client").concretized() s.package.generator = "CodeBlocks - Unix Makefiles" - assert get_std_cmake_args(s.package) + assert s.package.builder.std_cmake_args def test_define(self): s = Spec("cmake-client").concretized() @@ -361,7 +364,7 @@ def test_autotools_args_from_conditional_variant(config, mock_packages): is not met. When this is the case, the variant is not set in the spec.""" s = Spec("autotools-conditional-variants-test").concretized() assert "example" not in s.variants - assert len(s.package._activate_or_not("example", "enable", "disable")) == 0 + assert len(s.package.builder._activate_or_not("example", "enable", "disable")) == 0 def test_autoreconf_search_path_args_multiple(config, mock_packages, tmpdir): diff --git a/lib/spack/spack/test/builder.py b/lib/spack/spack/test/builder.py new file mode 100644 index 00000000000..bda72c8b490 --- /dev/null +++ b/lib/spack/spack/test/builder.py @@ -0,0 +1,123 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os.path + +import pytest + +import spack.paths + + +@pytest.fixture() +def builder_test_repository(): + builder_test_path = os.path.join(spack.paths.repos_path, "builder.test") + with spack.repo.use_repositories(builder_test_path) as mock_repo: + yield mock_repo + + +@pytest.mark.parametrize( + "spec_str,expected_values", + [ + ( + "callbacks@2.0", + [ + ("BEFORE_INSTALL_1_CALLED", "1"), + ("BEFORE_INSTALL_2_CALLED", "1"), + ("CALLBACKS_INSTALL_CALLED", "1"), + ("AFTER_INSTALL_1_CALLED", "1"), + ("TEST_VALUE", "3"), + ("INSTALL_VALUE", "CALLBACKS"), + ], + ), + # The last callback is conditional on "@1.0", check it's being executed + ( + "callbacks@1.0", + [ + ("BEFORE_INSTALL_1_CALLED", "1"), + ("BEFORE_INSTALL_2_CALLED", "1"), + ("CALLBACKS_INSTALL_CALLED", "1"), + ("AFTER_INSTALL_1_CALLED", "1"), + ("AFTER_INSTALL_2_CALLED", "1"), + ("TEST_VALUE", "4"), + ("INSTALL_VALUE", "CALLBACKS"), + ], + ), + # The package below adds to "callbacks" using inheritance, test that using super() + # works with builder hierarchies + ( + "inheritance@1.0", + [ + ("DERIVED_BEFORE_INSTALL_CALLED", "1"), + ("BEFORE_INSTALL_1_CALLED", "1"), + ("BEFORE_INSTALL_2_CALLED", "1"), + ("CALLBACKS_INSTALL_CALLED", "1"), + ("INHERITANCE_INSTALL_CALLED", "1"), + ("AFTER_INSTALL_1_CALLED", "1"), + ("AFTER_INSTALL_2_CALLED", "1"), + ("TEST_VALUE", "4"), + ("INSTALL_VALUE", "INHERITANCE"), + ], + ), + # Generate custom phases using a GenericBuilder + ( + "custom-phases", + [ + ("CONFIGURE_CALLED", "1"), + ("INSTALL_CALLED", "1"), + ("LAST_PHASE", "INSTALL"), + ], + ), + # Old-style package, with phase defined in base builder + ( + "old-style-autotools@1.0", + [ + ("AFTER_AUTORECONF_1_CALLED", "1"), + ], + ), + ( + "old-style-autotools@2.0", + [ + ("AFTER_AUTORECONF_2_CALLED", "1"), + ], + ), + ( + "old-style-custom-phases", + [ + ("AFTER_CONFIGURE_CALLED", "1"), + ("TEST_VALUE", "0"), + ], + ), + ], +) +@pytest.mark.usefixtures("builder_test_repository", "config") +@pytest.mark.disable_clean_stage_check +def test_callbacks_and_installation_procedure(spec_str, expected_values, working_env): + """Test the correct execution of callbacks and installation procedures for packages.""" + s = spack.spec.Spec(spec_str).concretized() + builder = spack.builder.create(s.package) + for phase_fn in builder: + phase_fn.execute() + + # Check calls have produced the expected side effects + for var_name, expected in expected_values: + assert os.environ[var_name] == expected, os.environ + + +@pytest.mark.usefixtures("builder_test_repository", "config") +@pytest.mark.parametrize( + "spec_str,method_name,expected", + [ + # Call a function defined on the package, which calls the same function defined + # on the super(builder) + ("old-style-autotools", "configure_args", ["--with-foo"]), + # Call a function defined on the package, which calls the same function defined on the + # super(pkg), which calls the same function defined in the super(builder) + ("old-style-derived", "configure_args", ["--with-bar", "--with-foo"]), + ], +) +def test_old_style_compatibility_with_super(spec_str, method_name, expected): + s = spack.spec.Spec(spec_str).concretized() + builder = spack.builder.create(s.package) + value = getattr(builder, method_name)() + assert value == expected diff --git a/lib/spack/spack/test/buildrequest.py b/lib/spack/spack/test/buildrequest.py index e656cfa99ef..7c986bb844a 100644 --- a/lib/spack/spack/test/buildrequest.py +++ b/lib/spack/spack/test/buildrequest.py @@ -62,3 +62,36 @@ def test_build_request_strings(install_mockery): istr = str(request) assert "package=dependent-install" in istr assert "install_args=" in istr + + +@pytest.mark.parametrize( + "package_cache_only,dependencies_cache_only,package_deptypes,dependencies_deptypes", + [ + (False, False, ["build", "link", "run"], ["build", "link", "run"]), + (True, False, ["link", "run"], ["build", "link", "run"]), + (False, True, ["build", "link", "run"], ["link", "run"]), + (True, True, ["link", "run"], ["link", "run"]), + ], +) +def test_build_request_deptypes( + install_mockery, + package_cache_only, + dependencies_cache_only, + package_deptypes, + dependencies_deptypes, +): + s = spack.spec.Spec("dependent-install").concretized() + + build_request = inst.BuildRequest( + s.package, + { + "package_cache_only": package_cache_only, + "dependencies_cache_only": dependencies_cache_only, + }, + ) + + actual_package_deptypes = build_request.get_deptypes(s.package) + actual_dependency_deptypes = build_request.get_deptypes(s["dependency-install"].package) + + assert sorted(actual_package_deptypes) == package_deptypes + assert sorted(actual_dependency_deptypes) == dependencies_deptypes diff --git a/lib/spack/spack/test/ci.py b/lib/spack/spack/test/ci.py index 2d8e65e8a5c..2a4786d51b9 100644 --- a/lib/spack/spack/test/ci.py +++ b/lib/spack/spack/test/ci.py @@ -83,37 +83,6 @@ def assert_present(config): assert_present(last_config) -@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)") -def test_get_concrete_specs(config, mutable_mock_env_path, mock_packages): - e = ev.create("test1") - e.add("dyninst") - e.concretize() - - dyninst_hash = None - hash_dict = {} - - with e as active_env: - for s in active_env.all_specs(): - hash_dict[s.name] = s.dag_hash() - if s.name == "dyninst": - dyninst_hash = s.dag_hash() - - assert dyninst_hash - - spec_map = ci.get_concrete_specs(active_env, dyninst_hash, "dyninst", "NONE") - assert "root" in spec_map - - concrete_root = spec_map["root"] - assert concrete_root.dag_hash() == dyninst_hash - - s = spec.Spec("dyninst") - print("nonconc spec name: {0}".format(s.name)) - - spec_map = ci.get_concrete_specs(active_env, s.name, s.name, "FIND_ANY") - - assert "root" in spec_map - - class FakeWebResponder(object): def __init__(self, response_code=200, content_to_read=[]): self._resp_code = response_code @@ -473,13 +442,16 @@ def test_get_spec_filter_list(mutable_mock_env_path, config, mutable_mock_repo): touched = ["libdwarf"] # traversing both directions from libdwarf in the graphs depicted - # above results in the following possibly affected env specs: - # mpileaks, callpath, dyninst, libdwarf, and libelf. Unaffected - # specs are mpich, plus hypre and it's dependencies. + # above (and additionally including dependencies of dependents of + # libdwarf) results in the following possibly affected env specs: + # mpileaks, callpath, dyninst, libdwarf, libelf, and mpich. + # Unaffected specs are hypre and it's dependencies. affected_specs = ci.get_spec_filter_list(e1, touched) affected_pkg_names = set([s.name for s in affected_specs]) - expected_affected_pkg_names = set(["mpileaks", "callpath", "dyninst", "libdwarf", "libelf"]) + expected_affected_pkg_names = set( + ["mpileaks", "mpich", "callpath", "dyninst", "libdwarf", "libelf"] + ) assert affected_pkg_names == expected_affected_pkg_names @@ -498,7 +470,7 @@ def test_affected_specs_on_first_concretization(mutable_mock_env_path, config): @pytest.mark.skipif( - sys.platform == "win32", reason="Reliance on bash script ot supported on Windows" + sys.platform == "win32", reason="Reliance on bash script not supported on Windows" ) def test_ci_process_command(tmpdir): repro_dir = tmpdir.join("repro_dir").strpath @@ -510,7 +482,7 @@ def test_ci_process_command(tmpdir): @pytest.mark.skipif( - sys.platform == "win32", reason="Reliance on bash script ot supported on Windows" + sys.platform == "win32", reason="Reliance on bash script not supported on Windows" ) def test_ci_process_command_fail(tmpdir, monkeypatch): import subprocess @@ -557,7 +529,7 @@ def test_ci_run_standalone_tests_missing_requirements( @pytest.mark.skipif( - sys.platform == "win32", reason="Reliance on bash script ot supported on Windows" + sys.platform == "win32", reason="Reliance on bash script not supported on Windows" ) def test_ci_run_standalone_tests_not_installed_junit( tmpdir, working_env, config, mock_packages, mock_test_stage, capfd @@ -578,7 +550,7 @@ def test_ci_run_standalone_tests_not_installed_junit( @pytest.mark.skipif( - sys.platform == "win32", reason="Reliance on bash script ot supported on Windows" + sys.platform == "win32", reason="Reliance on bash script not supported on Windows" ) def test_ci_run_standalone_tests_not_installed_cdash( tmpdir, working_env, config, mock_packages, mock_test_stage, capfd diff --git a/lib/spack/spack/test/cmd/bootstrap.py b/lib/spack/spack/test/cmd/bootstrap.py index 2ff66ae35b7..81ba1e2cee9 100644 --- a/lib/spack/spack/test/cmd/bootstrap.py +++ b/lib/spack/spack/test/cmd/bootstrap.py @@ -109,7 +109,9 @@ def test_list_sources(capsys): assert "No method available" in output -@pytest.mark.parametrize("command,value", [("trust", True), ("untrust", False)]) +@pytest.mark.parametrize( + "command,value", [("enable", True), ("disable", False), ("trust", True), ("untrust", False)] +) def test_trust_or_untrust_sources(mutable_config, command, value): key = "bootstrap:trusted:github-actions" trusted = spack.config.get(key, default=None) @@ -135,7 +137,7 @@ def test_trust_or_untrust_fails_with_more_than_one_method(mutable_config): } with spack.config.override("bootstrap", wrong_config): with pytest.raises(RuntimeError, match="more than one"): - _bootstrap("trust", "github-actions") + _bootstrap("enable", "github-actions") @pytest.mark.parametrize("use_existing_dir", [True, False]) @@ -166,7 +168,7 @@ def test_remove_and_add_a_source(mutable_config): assert not sources # Add it back and check we restored the initial state - _bootstrap("add", "github-actions", "$spack/share/spack/bootstrap/github-actions-v0.2") + _bootstrap("add", "github-actions", "$spack/share/spack/bootstrap/github-actions-v0.3") sources = spack.bootstrap.bootstrapping_sources() assert len(sources) == 1 diff --git a/lib/spack/spack/test/cmd/checksum.py b/lib/spack/spack/test/cmd/checksum.py index a84b0eb8b75..84696bb6f8d 100644 --- a/lib/spack/spack/test/cmd/checksum.py +++ b/lib/spack/spack/test/cmd/checksum.py @@ -68,3 +68,13 @@ def test_checksum_versions(mock_packages, mock_fetch, mock_stage): output = spack_checksum("preferred-test", versions[0]) assert "Found 1 version" in output assert "version(" in output + + +def test_checksum_missing_version(mock_packages, mock_fetch, mock_stage): + output = spack_checksum("preferred-test", "99.99.99", fail_on_error=False) + assert "Could not find any remote versions" in output + + +def test_checksum_deprecated_version(mock_packages, mock_fetch, mock_stage): + output = spack_checksum("deprecated-versions", "1.1.0", fail_on_error=False) + assert "Version 1.1.0 is deprecated" in output diff --git a/lib/spack/spack/test/cmd/ci.py b/lib/spack/spack/test/cmd/ci.py index a8eaa2c631d..73b474c7dfd 100644 --- a/lib/spack/spack/test/cmd/ci.py +++ b/lib/spack/spack/test/cmd/ci.py @@ -190,6 +190,12 @@ def test_ci_generate_with_env( tags: - donotcare image: donotcare + - match: + - arch=test-debian6-m1 + runner-attributes: + tags: + - donotcare + image: donotcare service-job-attributes: image: donotcare tags: [donotcare] @@ -270,10 +276,10 @@ def test_ci_generate_bootstrap_gcc( spack: definitions: - bootstrap: - - gcc@3.0 - - gcc@2.0 + - gcc@9.5 + - gcc@9.0 specs: - - dyninst%gcc@3.0 + - dyninst%gcc@9.5 mirrors: some-mirror: https://my.fake.mirror gitlab-ci: @@ -286,6 +292,11 @@ def test_ci_generate_bootstrap_gcc( runner-attributes: tags: - donotcare + - match: + - arch=test-debian6-aarch64 + runner-attributes: + tags: + - donotcare """ ) @@ -338,9 +349,9 @@ def test_ci_generate_bootstrap_artifacts_buildcache( spack: definitions: - bootstrap: - - gcc@3.0 + - gcc@9.5 specs: - - dyninst%gcc@3.0 + - dyninst%gcc@9.5 mirrors: some-mirror: https://my.fake.mirror gitlab-ci: @@ -353,6 +364,11 @@ def test_ci_generate_bootstrap_artifacts_buildcache( runner-attributes: tags: - donotcare + - match: + - arch=test-debian6-aarch64 + runner-attributes: + tags: + - donotcare enable-artifacts-buildcache: True """ ) @@ -885,7 +901,6 @@ def activate_rebuild_env(tmpdir, pkg_name, rebuild_env): "SPACK_CONCRETE_ENV_DIR": rebuild_env.env_dir.strpath, "CI_PIPELINE_ID": "7192", "SPACK_SIGNING_KEY": _signing_key(), - "SPACK_ROOT_SPEC": rebuild_env.root_spec_dag_hash, "SPACK_JOB_SPEC_DAG_HASH": rebuild_env.root_spec_dag_hash, "SPACK_JOB_SPEC_PKG_NAME": pkg_name, "SPACK_COMPILER_ACTION": "NONE", @@ -917,11 +932,8 @@ def test_ci_rebuild_mock_success( pkg_name = "archive-files" rebuild_env = create_rebuild_env(tmpdir, pkg_name, broken_tests) - monkeypatch.setattr( - spack.cmd.ci, - "CI_REBUILD_INSTALL_BASE_ARGS", - ["echo"], - ) + monkeypatch.setattr(spack.cmd.ci, "SPACK_COMMAND", "echo") + monkeypatch.setattr(spack.cmd.ci, "MAKE_COMMAND", "echo") with rebuild_env.env_dir.as_cwd(): activate_rebuild_env(tmpdir, pkg_name, rebuild_env) @@ -966,7 +978,8 @@ def test_ci_rebuild( ci_cmd("rebuild", "--tests", fail_on_error=False) - monkeypatch.setattr(spack.cmd.ci, "CI_REBUILD_INSTALL_BASE_ARGS", ["notcommand"]) + monkeypatch.setattr(spack.cmd.ci, "SPACK_COMMAND", "notcommand") + monkeypatch.setattr(spack.cmd.ci, "MAKE_COMMAND", "notcommand") monkeypatch.setattr(spack.cmd.ci, "INSTALL_FAIL_CODE", 127) with rebuild_env.env_dir.as_cwd(): @@ -998,7 +1011,6 @@ def mystrip(s): assert "--keep-stage" in install_parts assert "--no-check-signature" not in install_parts - assert "--no-add" in install_parts assert "-f" in install_parts flag_index = install_parts.index("-f") assert "archive-files.json" in install_parts[flag_index + 1] @@ -1084,7 +1096,6 @@ def test_ci_nothing_to_rebuild( "SPACK_JOB_TEST_DIR": "test_dir", "SPACK_LOCAL_MIRROR_DIR": mirror_dir.strpath, "SPACK_CONCRETE_ENV_DIR": tmpdir.strpath, - "SPACK_ROOT_SPEC": root_spec_dag_hash, "SPACK_JOB_SPEC_DAG_HASH": root_spec_dag_hash, "SPACK_JOB_SPEC_PKG_NAME": "archive-files", "SPACK_COMPILER_ACTION": "NONE", @@ -1243,14 +1254,13 @@ def test_push_mirror_contents( with tmpdir.as_cwd(): env_cmd("create", "test", "./spack.yaml") with ev.read("test") as env: - spec_map = ci.get_concrete_specs(env, "patchelf", "patchelf", "FIND_ANY") - concrete_spec = spec_map["patchelf"] + concrete_spec = Spec("patchelf").concretized() spec_json = concrete_spec.to_json(hash=ht.dag_hash) json_path = str(tmpdir.join("spec.json")) with open(json_path, "w") as ypfd: ypfd.write(spec_json) - install_cmd("--keep-stage", json_path) + install_cmd("--add", "--keep-stage", json_path) # env, spec, json_path, mirror_url, build_id, sign_binaries ci.push_mirror_contents(env, json_path, mirror_url, True) @@ -1358,8 +1368,15 @@ def failing_access(*args, **kwargs): assert expect_msg in std_out +@pytest.mark.parametrize("match_behavior", ["first", "merge"]) def test_ci_generate_override_runner_attrs( - tmpdir, mutable_mock_env_path, install_mockery, mock_packages, monkeypatch, ci_base_environment + tmpdir, + mutable_mock_env_path, + install_mockery, + mock_packages, + monkeypatch, + ci_base_environment, + match_behavior, ): """Test that we get the behavior we want with respect to the provision of runner attributes like tags, variables, and scripts, both when we @@ -1378,6 +1395,7 @@ def test_ci_generate_override_runner_attrs( gitlab-ci: tags: - toplevel + - toplevel2 variables: ONE: toplevelvarone TWO: toplevelvartwo @@ -1388,6 +1406,7 @@ def test_ci_generate_override_runner_attrs( - main step after_script: - post step one + match_behavior: {0} mappings: - match: - flatten-deps @@ -1400,10 +1419,12 @@ def test_ci_generate_override_runner_attrs( - dependency-install - match: - a + remove-attributes: + tags: + - toplevel2 runner-attributes: tags: - specific-a - - toplevel variables: ONE: specificvarone TWO: specificvartwo @@ -1413,10 +1434,17 @@ def test_ci_generate_override_runner_attrs( - custom main step after_script: - custom post step one + - match: + - a + runner-attributes: + tags: + - specific-a-2 service-job-attributes: image: donotcare tags: [donotcare] -""" +""".format( + match_behavior + ) ) with tmpdir.as_cwd(): @@ -1449,9 +1477,12 @@ def test_ci_generate_override_runner_attrs( assert the_elt["variables"]["ONE"] == "specificvarone" assert the_elt["variables"]["TWO"] == "specificvartwo" assert "THREE" not in the_elt["variables"] - assert len(the_elt["tags"]) == 2 + assert len(the_elt["tags"]) == (2 if match_behavior == "first" else 3) assert "specific-a" in the_elt["tags"] + if match_behavior == "merge": + assert "specific-a-2" in the_elt["tags"] assert "toplevel" in the_elt["tags"] + assert "toplevel2" not in the_elt["tags"] assert len(the_elt["before_script"]) == 1 assert the_elt["before_script"][0] == "custom pre step one" assert len(the_elt["script"]) == 1 @@ -1466,8 +1497,9 @@ def test_ci_generate_override_runner_attrs( assert the_elt["variables"]["ONE"] == "toplevelvarone" assert the_elt["variables"]["TWO"] == "toplevelvartwo" assert "THREE" not in the_elt["variables"] - assert len(the_elt["tags"]) == 1 - assert the_elt["tags"][0] == "toplevel" + assert len(the_elt["tags"]) == 2 + assert "toplevel" in the_elt["tags"] + assert "toplevel2" in the_elt["tags"] assert len(the_elt["before_script"]) == 2 assert the_elt["before_script"][0] == "pre step one" assert the_elt["before_script"][1] == "pre step two" @@ -1484,9 +1516,10 @@ def test_ci_generate_override_runner_attrs( assert the_elt["variables"]["ONE"] == "toplevelvarone" assert the_elt["variables"]["TWO"] == "toplevelvartwo" assert the_elt["variables"]["THREE"] == "specificvarthree" - assert len(the_elt["tags"]) == 2 + assert len(the_elt["tags"]) == 3 assert "specific-one" in the_elt["tags"] assert "toplevel" in the_elt["tags"] + assert "toplevel2" in the_elt["tags"] assert len(the_elt["before_script"]) == 2 assert the_elt["before_script"][0] == "pre step one" assert the_elt["before_script"][1] == "pre step two" @@ -1506,12 +1539,12 @@ def test_ci_generate_with_workarounds( """\ spack: specs: - - callpath%gcc@3.0 + - callpath%gcc@9.5 mirrors: some-mirror: https://my.fake.mirror gitlab-ci: mappings: - - match: ['%gcc@3.0'] + - match: ['%gcc@9.5'] runner-attributes: tags: - donotcare @@ -1582,15 +1615,14 @@ def test_ci_rebuild_index( with tmpdir.as_cwd(): env_cmd("create", "test", "./spack.yaml") - with ev.read("test") as env: - spec_map = ci.get_concrete_specs(env, "callpath", "callpath", "FIND_ANY") - concrete_spec = spec_map["callpath"] + with ev.read("test"): + concrete_spec = Spec("callpath").concretized() spec_json = concrete_spec.to_json(hash=ht.dag_hash) json_path = str(tmpdir.join("spec.json")) with open(json_path, "w") as ypfd: ypfd.write(spec_json) - install_cmd("--keep-stage", "-f", json_path) + install_cmd("--add", "--keep-stage", "-f", json_path) buildcache_cmd("create", "-u", "-a", "-f", "--mirror-url", mirror_url, "callpath") ci_cmd("rebuild-index") @@ -1622,28 +1654,28 @@ def test_ci_generate_bootstrap_prune_dag( mirror_url = "file://{0}".format(mirror_dir.strpath) # Install a compiler, because we want to put it in a buildcache - install_cmd("gcc@10.1.0%gcc@4.5.0") + install_cmd("gcc@12.2.0%gcc@10.2.1") # Put installed compiler in the buildcache - buildcache_cmd("create", "-u", "-a", "-f", "-d", mirror_dir.strpath, "gcc@10.1.0%gcc@4.5.0") + buildcache_cmd("create", "-u", "-a", "-f", "-d", mirror_dir.strpath, "gcc@12.2.0%gcc@10.2.1") # Now uninstall the compiler - uninstall_cmd("-y", "gcc@10.1.0%gcc@4.5.0") + uninstall_cmd("-y", "gcc@12.2.0%gcc@10.2.1") monkeypatch.setattr(spack.concretize.Concretizer, "check_for_compiler_existence", False) spack.config.set("config:install_missing_compilers", True) - assert CompilerSpec("gcc@10.1.0") not in compilers.all_compiler_specs() + assert CompilerSpec("gcc@12.2.0") not in compilers.all_compiler_specs() # Configure the mirror where we put that buildcache w/ the compiler mirror_cmd("add", "test-mirror", mirror_url) - install_cmd("--no-check-signature", "a%gcc@10.1.0") + install_cmd("--no-check-signature", "b%gcc@12.2.0") # Put spec built with installed compiler in the buildcache - buildcache_cmd("create", "-u", "-a", "-f", "-d", mirror_dir.strpath, "a%gcc@10.1.0") + buildcache_cmd("create", "-u", "-a", "-f", "-d", mirror_dir.strpath, "b%gcc@12.2.0") # Now uninstall the spec - uninstall_cmd("-y", "a%gcc@10.1.0") + uninstall_cmd("-y", "b%gcc@12.2.0") filename = str(tmpdir.join("spack.yaml")) with open(filename, "w") as f: @@ -1652,9 +1684,9 @@ def test_ci_generate_bootstrap_prune_dag( spack: definitions: - bootstrap: - - gcc@10.1.0%gcc@4.5.0 + - gcc@12.2.0%gcc@10.2.1 specs: - - a%gcc@10.1.0 + - b%gcc@12.2.0 mirrors: atestm: {0} gitlab-ci: @@ -1672,6 +1704,16 @@ def test_ci_generate_bootstrap_prune_dag( runner-attributes: tags: - meh + - match: + - arch=test-debian6-aarch64 + runner-attributes: + tags: + - donotcare + - match: + - arch=test-debian6-m1 + runner-attributes: + tags: + - meh """.format( mirror_url ) @@ -1729,10 +1771,6 @@ def fake_get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False "(specs) b": [ "(bootstrap) gcc", ], - "(specs) a": [ - "(bootstrap) gcc", - "(specs) b", - ], } _validate_needs_graph(new_yaml_contents, needs_graph, False) @@ -1798,8 +1836,8 @@ def fake_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"): yaml_contents = syaml.load(contents) for ci_key in yaml_contents.keys(): - if "archive-files" in ci_key or "mpich" in ci_key: - print("Error: archive-files and mpich should have been pruned") + if "archive-files" in ci_key: + print("Error: archive-files should have been pruned") assert False @@ -1959,13 +1997,16 @@ def test_ci_generate_read_broken_specs_url( spec_flattendeps.concretize() flattendeps_dag_hash = spec_flattendeps.dag_hash() - # Mark 'a' as broken (but not 'flatten-deps') - broken_spec_a_path = str(tmpdir.join(a_dag_hash)) - with open(broken_spec_a_path, "w") as bsf: - bsf.write("") - broken_specs_url = "file://{0}".format(tmpdir.strpath) + # Mark 'a' as broken (but not 'flatten-deps') + broken_spec_a_url = "{0}/{1}".format(broken_specs_url, a_dag_hash) + job_stack = "job_stack" + a_job_url = "a_job_url" + ci.write_broken_spec( + broken_spec_a_url, spec_a.name, job_stack, a_job_url, "pipeline_url", spec_a.to_dict() + ) + # Test that `spack ci generate` notices this broken spec and fails. filename = str(tmpdir.join("spack.yaml")) with open(filename, "w") as f: @@ -2001,11 +2042,13 @@ def test_ci_generate_read_broken_specs_url( output = ci_cmd("generate", output=str, fail_on_error=False) assert "known to be broken" in output - ex = "({0})".format(a_dag_hash) - assert ex in output + expected = "{0}/{1} (in stack {2}) was reported broken here: {3}".format( + spec_a.name, a_dag_hash[:7], job_stack, a_job_url + ) + assert expected in output - ex = "({0})".format(flattendeps_dag_hash) - assert ex not in output + not_expected = "flatten-deps/{0} (in stack".format(flattendeps_dag_hash[:7]) + assert not_expected not in output def test_ci_generate_external_signing_job( @@ -2115,34 +2158,30 @@ def test_ci_reproduce( shutil.copyfile(env.manifest_path, os.path.join(working_dir.strpath, "spack.yaml")) shutil.copyfile(env.lock_path, os.path.join(working_dir.strpath, "spack.lock")) - root_spec = None job_spec = None for h, s in env.specs_by_hash.items(): if s.name == "archive-files": - root_spec = s job_spec = s job_spec_json_path = os.path.join(working_dir.strpath, "archivefiles.json") with open(job_spec_json_path, "w") as fd: fd.write(job_spec.to_json(hash=ht.dag_hash)) - root_spec_json_path = os.path.join(working_dir.strpath, "root.json") - with open(root_spec_json_path, "w") as fd: - fd.write(root_spec.to_json(hash=ht.dag_hash)) - artifacts_root = os.path.join(working_dir.strpath, "scratch_dir") pipeline_path = os.path.join(artifacts_root, "pipeline.yml") ci_cmd("generate", "--output-file", pipeline_path, "--artifacts-root", artifacts_root) - job_name = ci.get_job_name("specs", False, job_spec, "test-debian6-core2", None) + target_name = spack.platforms.test.Test.default + job_name = ci.get_job_name( + "specs", False, job_spec, "test-debian6-%s" % target_name, None + ) repro_file = os.path.join(working_dir.strpath, "repro.json") repro_details = { "job_name": job_name, "job_spec_json": "archivefiles.json", - "root_spec_json": "root.json", "ci_project_dir": working_dir.strpath, } with open(repro_file, "w") as fd: diff --git a/lib/spack/spack/test/cmd/common/arguments.py b/lib/spack/spack/test/cmd/common/arguments.py index 58bf8e0ac58..d56ad59c85d 100644 --- a/lib/spack/spack/test/cmd/common/arguments.py +++ b/lib/spack/spack/test/cmd/common/arguments.py @@ -46,21 +46,27 @@ def test_negative_integers_not_allowed_for_parallel_jobs(job_parser): @pytest.mark.parametrize( - "specs,cflags,negated_variants", + "specs,cflags,propagation,negated_variants", [ - (['coreutils cflags="-O3 -g"'], ["-O3", "-g"], []), - (["coreutils", "cflags=-O3 -g"], ["-O3"], ["g"]), - (["coreutils", "cflags=-O3", "-g"], ["-O3"], ["g"]), + (['coreutils cflags="-O3 -g"'], ["-O3", "-g"], [False, False], []), + (['coreutils cflags=="-O3 -g"'], ["-O3", "-g"], [True, True], []), + (["coreutils", "cflags=-O3 -g"], ["-O3"], [False], ["g"]), + (["coreutils", "cflags==-O3 -g"], ["-O3"], [True], ["g"]), + (["coreutils", "cflags=-O3", "-g"], ["-O3"], [False], ["g"]), ], ) @pytest.mark.regression("12951") -def test_parse_spec_flags_with_spaces(specs, cflags, negated_variants): +def test_parse_spec_flags_with_spaces(specs, cflags, propagation, negated_variants): spec_list = spack.cmd.parse_specs(specs) assert len(spec_list) == 1 s = spec_list.pop() - assert s.compiler_flags["cflags"] == cflags + compiler_flags = [flag for flag in s.compiler_flags["cflags"]] + flag_propagation = [flag.propagate for flag in s.compiler_flags["cflags"]] + + assert compiler_flags == cflags + assert flag_propagation == propagation assert list(s.variants.keys()) == negated_variants for v in negated_variants: assert "~{0}".format(v) in s @@ -129,3 +135,19 @@ def test_concretizer_arguments(mutable_config, mock_packages): spec("--fresh", "zlib") assert spack.config.get("concretizer:reuse", None) is False + + +def test_use_buildcache_type(): + assert arguments.use_buildcache("only") == ("only", "only") + assert arguments.use_buildcache("never") == ("never", "never") + assert arguments.use_buildcache("auto") == ("auto", "auto") + assert arguments.use_buildcache("package:never,dependencies:only") == ("never", "only") + assert arguments.use_buildcache("only,package:never") == ("never", "only") + assert arguments.use_buildcache("package:only,package:never") == ("never", "auto") + assert arguments.use_buildcache("auto , package: only") == ("only", "auto") + + with pytest.raises(argparse.ArgumentTypeError): + assert arguments.use_buildcache("pkg:only,deps:never") + + with pytest.raises(argparse.ArgumentTypeError): + assert arguments.use_buildcache("sometimes") diff --git a/lib/spack/spack/test/cmd/config.py b/lib/spack/spack/test/cmd/config.py index cdf9bc00a02..56943f5d8b6 100644 --- a/lib/spack/spack/test/cmd/config.py +++ b/lib/spack/spack/test/cmd/config.py @@ -606,6 +606,14 @@ def check_config_updated(data): assert data["install_tree"]["projections"] == {"all": "{name}-{version}"} +def test_config_update_shared_linking(mutable_config): + # Old syntax: config:shared_linking:rpath/runpath + # New syntax: config:shared_linking:{type:rpath/runpath,bind:True/False} + with spack.config.override("config:shared_linking", "runpath"): + assert spack.config.get("config:shared_linking:type") == "runpath" + assert not spack.config.get("config:shared_linking:bind") + + def test_config_prefer_upstream( tmpdir_factory, install_mockery, mock_fetch, mutable_config, gen_mock_layout, monkeypatch ): @@ -634,13 +642,13 @@ def test_config_prefer_upstream( # Make sure only the non-default variants are set. assert packages["boost"] == { - "compiler": ["gcc@4.5.0"], + "compiler": ["gcc@10.2.1"], "variants": "+debug +graph", "version": ["1.63.0"], } - assert packages["dependency-install"] == {"compiler": ["gcc@4.5.0"], "version": ["2.0"]} + assert packages["dependency-install"] == {"compiler": ["gcc@10.2.1"], "version": ["2.0"]} # Ensure that neither variant gets listed for hdf5, since they conflict - assert packages["hdf5"] == {"compiler": ["gcc@4.5.0"], "version": ["2.3"]} + assert packages["hdf5"] == {"compiler": ["gcc@10.2.1"], "version": ["2.3"]} # Make sure a message about the conflicting hdf5's was given. assert "- hdf5" in output diff --git a/lib/spack/spack/test/cmd/dev_build.py b/lib/spack/spack/test/cmd/dev_build.py index cad706e6485..2ffbec8e73c 100644 --- a/lib/spack/spack/test/cmd/dev_build.py +++ b/lib/spack/spack/test/cmd/dev_build.py @@ -254,13 +254,18 @@ def test_dev_build_env_version_mismatch( def test_dev_build_multiple( tmpdir, mock_packages, install_mockery, mutable_mock_env_path, mock_fetch ): - """Test spack install with multiple developer builds""" + """Test spack install with multiple developer builds + + Test that only the root needs to be specified in the environment + Test that versions known only from the dev specs are included in the solve, + even if they come from a non-root + """ # setup dev-build-test-install package for dev build # Wait to concretize inside the environment to set dev_path on the specs; # without the environment, the user would need to set dev_path for both the # root and dependency if they wanted a dev build for both. leaf_dir = tmpdir.mkdir("leaf") - leaf_spec = spack.spec.Spec("dev-build-test-install@0.0.0") + leaf_spec = spack.spec.Spec("dev-build-test-install@1.0.0") leaf_pkg_cls = spack.repo.path.get_pkg_class(leaf_spec.name) with leaf_dir.as_cwd(): with open(leaf_pkg_cls.filename, "w") as f: @@ -283,13 +288,12 @@ def test_dev_build_multiple( """\ env: specs: - - dev-build-test-install@0.0.0 - dev-build-test-dependent@0.0.0 develop: dev-build-test-install: path: %s - spec: dev-build-test-install@0.0.0 + spec: dev-build-test-install@1.0.0 dev-build-test-dependent: spec: dev-build-test-dependent@0.0.0 path: %s @@ -300,6 +304,7 @@ def test_dev_build_multiple( env("create", "test", "./spack.yaml") with ev.read("test"): # Do concretization inside environment for dev info + # These specs are the source of truth to compare against the installs leaf_spec.concretize() root_spec.concretize() diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index 5cf4b09cae4..6782e651798 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -18,6 +18,7 @@ import spack.cmd.env import spack.environment as ev import spack.environment.shell +import spack.error import spack.modules import spack.paths import spack.repo @@ -220,7 +221,7 @@ def test_env_install_single_spec(install_mockery, mock_fetch): e = ev.read("test") with e: - install("cmake-client") + install("--add", "cmake-client") e = ev.read("test") assert e.user_specs[0].name == "cmake-client" @@ -255,7 +256,7 @@ def test_env_modifications_error_on_activate(install_mockery, mock_fetch, monkey e = ev.read("test") with e: - install("cmake-client") + install("--add", "cmake-client") def setup_error(pkg, env): raise RuntimeError("cmake-client had issues!") @@ -276,7 +277,7 @@ def test_activate_adds_transitive_run_deps_to_path(install_mockery, mock_fetch, e = ev.read("test") with e: - install("depends-on-run-env") + install("--add", "depends-on-run-env") env_variables = {} spack.environment.shell.activate(e).apply_modifications(env_variables) @@ -289,7 +290,7 @@ def test_env_install_same_spec_twice(install_mockery, mock_fetch): e = ev.read("test") with e: # The first installation outputs the package prefix, updates the view - out = install("cmake-client") + out = install("--add", "cmake-client") assert "Updating view at" in out # The second installation reports all packages already installed @@ -448,7 +449,7 @@ def test_env_status_broken_view( ): env_dir = str(tmpdir) with ev.Environment(env_dir): - install("trivial-install-test-package") + install("--add", "trivial-install-test-package") # switch to a new repo that doesn't include the installed package # test that Spack detects the missing package and warns the user @@ -467,7 +468,7 @@ def test_env_activate_broken_view( mutable_mock_env_path, mock_archive, mock_fetch, mock_custom_repository, install_mockery ): with ev.create("test"): - install("trivial-install-test-package") + install("--add", "trivial-install-test-package") # switch to a new repo that doesn't include the installed package # test that Spack detects the missing package and fails gracefully @@ -1056,7 +1057,9 @@ def test_roots_display_with_variants(): assert "boost +shared" in out -def test_uninstall_removes_from_env(mock_stage, mock_fetch, install_mockery): +def test_uninstall_keeps_in_env(mock_stage, mock_fetch, install_mockery): + # 'spack uninstall' without --remove should not change the environment + # spack.yaml file, just uninstall specs env("create", "test") with ev.read("test"): add("mpileaks") @@ -1064,12 +1067,32 @@ def test_uninstall_removes_from_env(mock_stage, mock_fetch, install_mockery): install("--fake") test = ev.read("test") - assert any(s.name == "mpileaks" for s in test.specs_by_hash.values()) - assert any(s.name == "libelf" for s in test.specs_by_hash.values()) + # Save this spec to check later if it is still in the env + (mpileaks_hash,) = list(x for x, y in test.specs_by_hash.items() if y.name == "mpileaks") + orig_user_specs = test.user_specs + orig_concretized_specs = test.concretized_order with ev.read("test"): uninstall("-ya") + test = ev.read("test") + assert test.concretized_order == orig_concretized_specs + assert test.user_specs.specs == orig_user_specs.specs + assert mpileaks_hash in test.specs_by_hash + assert not test.specs_by_hash[mpileaks_hash].package.installed + + +def test_uninstall_removes_from_env(mock_stage, mock_fetch, install_mockery): + # 'spack uninstall --remove' should update the environment + env("create", "test") + with ev.read("test"): + add("mpileaks") + add("libelf") + install("--fake") + + with ev.read("test"): + uninstall("-y", "-a", "--remove") + test = ev.read("test") assert not test.specs_by_hash assert not test.concretized_order @@ -1255,7 +1278,7 @@ def test_env_updates_view_install_package(tmpdir, mock_stage, mock_fetch, instal view_dir = tmpdir.join("view") env("create", "--with-view=%s" % view_dir, "test") with ev.read("test"): - install("--fake", "mpileaks") + install("--fake", "--add", "mpileaks") assert os.path.exists(str(view_dir.join(".spack/mpileaks"))) @@ -1275,7 +1298,7 @@ def test_env_updates_view_uninstall(tmpdir, mock_stage, mock_fetch, install_mock view_dir = tmpdir.join("view") env("create", "--with-view=%s" % view_dir, "test") with ev.read("test"): - install("--fake", "mpileaks") + install("--fake", "--add", "mpileaks") check_mpileaks_and_deps_in_view(view_dir) @@ -1324,7 +1347,7 @@ def test_env_updates_view_force_remove(tmpdir, mock_stage, mock_fetch, install_m view_dir = tmpdir.join("view") env("create", "--with-view=%s" % view_dir, "test") with ev.read("test"): - install("--fake", "mpileaks") + install("--add", "--fake", "mpileaks") check_mpileaks_and_deps_in_view(view_dir) @@ -2403,7 +2426,9 @@ def test_duplicate_packages_raise_when_concretizing_together(): e.add("mpileaks~opt") e.add("mpich") - with pytest.raises(ev.SpackEnvironmentError, match=r"cannot contain more"): + with pytest.raises( + spack.error.UnsatisfiableSpecError, match=r"relax the concretizer strictness" + ): e.concretize() @@ -3030,29 +3055,106 @@ def test_read_legacy_lockfile_and_reconcretize(mock_stage, mock_fetch, install_m assert current_versions == expected_versions -def test_environment_depfile_makefile(tmpdir, mock_packages): +@pytest.mark.parametrize( + "depfile_flags,expected_installs", + [ + # This installs the full environment + ( + ["--use-buildcache=never"], + [ + "dtbuild1", + "dtbuild2", + "dtbuild3", + "dtlink1", + "dtlink2", + "dtlink3", + "dtlink4", + "dtlink5", + "dtrun1", + "dtrun2", + "dtrun3", + "dttop", + ], + ), + # This prunes build deps at depth > 0 + ( + ["--use-buildcache=package:never,dependencies:only"], + [ + "dtbuild1", + "dtlink1", + "dtlink2", + "dtlink3", + "dtlink4", + "dtlink5", + "dtrun1", + "dtrun2", + "dtrun3", + "dttop", + ], + ), + # This prunes all build deps + ( + ["--use-buildcache=only"], + [ + "dtlink1", + "dtlink3", + "dtlink4", + "dtlink5", + "dtrun1", + "dtrun3", + "dttop", + ], + ), + # Test whether pruning of build deps is correct if we explicitly include one + # that is also a dependency of a root. + ( + ["--use-buildcache=only", "dttop", "dtbuild1"], + [ + "dtbuild1", + "dtlink1", + "dtlink2", + "dtlink3", + "dtlink4", + "dtlink5", + "dtrun1", + "dtrun2", + "dtrun3", + "dttop", + ], + ), + ], +) +def test_environment_depfile_makefile(depfile_flags, expected_installs, tmpdir, mock_packages): env("create", "test") make = Executable("make") makefile = str(tmpdir.join("Makefile")) with ev.read("test"): - add("libdwarf") + add("dttop") concretize() # Disable jobserver so we can do a dry run. with ev.read("test"): env( - "depfile", "-o", makefile, "--make-disable-jobserver", "--make-target-prefix", "prefix" + "depfile", + "-o", + makefile, + "--make-disable-jobserver", + "--make-target-prefix=prefix", + *depfile_flags ) # Do make dry run. - all_out = make("-n", "-f", makefile, output=str) + out = make("-n", "-f", makefile, output=str) - # Check whether `make` installs everything - with ev.read("test") as e: - for _, root in e.concretized_specs(): - for spec in root.traverse(root=True): - tgt = os.path.join("prefix", ".install", spec.dag_hash()) - assert "touch {}".format(tgt) in all_out + # Spack install commands are of the form "spack install ... # ", + # so we just parse the spec again, for simplicity. + specs_that_make_would_install = [ + Spec(line.split("# ")[1]).name for line in out.splitlines() if line.startswith("spack") + ] + + # Check that all specs are there (without duplicates) + assert set(specs_that_make_would_install) == set(expected_installs) + assert len(specs_that_make_would_install) == len(expected_installs) def test_environment_depfile_out(tmpdir, mock_packages): diff --git a/lib/spack/spack/test/cmd/find.py b/lib/spack/spack/test/cmd/find.py index b313a364463..9f3f453bad5 100644 --- a/lib/spack/spack/test/cmd/find.py +++ b/lib/spack/spack/test/cmd/find.py @@ -322,7 +322,7 @@ def test_find_very_long(database, config): @pytest.mark.db def test_find_show_compiler(database, config): output = find("--no-groups", "--show-full-compiler", "mpileaks") - assert "mpileaks@2.3%gcc@4.5.0" in output + assert "mpileaks@2.3%gcc@10.2.1" in output @pytest.mark.db @@ -356,7 +356,7 @@ def test_find_prefix_in_env( """Test `find` formats requiring concrete specs work in environments.""" env("create", "test") with ev.read("test"): - install("mpileaks") + install("--add", "mpileaks") find("-p") find("-l") find("-L") diff --git a/lib/spack/spack/test/cmd/info.py b/lib/spack/spack/test/cmd/info.py index 55b875e0224..2bbf4d3a7fa 100644 --- a/lib/spack/spack/test/cmd/info.py +++ b/lib/spack/spack/test/cmd/info.py @@ -43,7 +43,7 @@ def test_it_just_runs(pkg): def test_info_noversion(mock_packages, print_buffer): - """Check that a mock package with no versions or variants outputs None.""" + """Check that a mock package with no versions outputs None.""" info("noversion") line_iter = iter(print_buffer) @@ -52,7 +52,7 @@ def test_info_noversion(mock_packages, print_buffer): has = [desc in line for desc in ["Preferred", "Safe", "Deprecated"]] if not any(has): continue - elif "Variants" not in line: + else: continue assert "None" in next(line_iter).strip() diff --git a/lib/spack/spack/test/cmd/install.py b/lib/spack/spack/test/cmd/install.py index e74f01db7fd..67e0dd3f80a 100644 --- a/lib/spack/spack/test/cmd/install.py +++ b/lib/spack/spack/test/cmd/install.py @@ -17,6 +17,7 @@ import llnl.util.filesystem as fs import llnl.util.tty as tty +import spack.cmd.common.arguments import spack.cmd.install import spack.compilers as compilers import spack.config @@ -770,7 +771,7 @@ def test_install_only_dependencies_in_env( dep = Spec("dependency-install").concretized() root = Spec("dependent-install").concretized() - install("-v", "--only", "dependencies", "dependent-install") + install("-v", "--only", "dependencies", "--add", "dependent-install") assert os.path.exists(dep.prefix) assert not os.path.exists(root.prefix) @@ -799,7 +800,7 @@ def test_install_only_dependencies_of_all_in_env( def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery, mutable_mock_env_path): - # To test behavior of --no-add option, we create the following environment: + # To test behavior of --add option, we create the following environment: # # mpileaks # ^callpath @@ -848,18 +849,19 @@ def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery, mutable_mock # Assert using --no-add with a spec not in the env fails inst_out = install("--no-add", "boost", fail_on_error=False, output=str) - assert "no such spec exists in environment" in inst_out + assert "You can add it to the environment with 'spack add " in inst_out - # Ensure using --no-add with an ambiguous spec fails + # Without --add, ensure that install fails if the spec matches more + # than one root with pytest.raises(ev.SpackEnvironmentError) as err: - inst_out = install("--no-add", "a", output=str) + inst_out = install("a", output=str) assert "a matches multiple specs in the env" in str(err) - # With "--no-add", install an unambiguous dependency spec (that already - # exists as a dep in the environment) using --no-add and make sure it - # gets installed (w/ deps), but is not added to the environment. - install("--no-add", "dyninst") + # Install an unambiguous dependency spec (that already exists as a dep + # in the environment) and make sure it gets installed (w/ deps), + # but is not added to the environment. + install("dyninst") find_output = find("-l", output=str) assert "dyninst" in find_output @@ -871,31 +873,30 @@ def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery, mutable_mock assert all([s in env_specs for s in post_install_specs]) # Make sure we can install a concrete dependency spec from a spec.json - # file on disk, using the ``--no-add` option, and the spec is installed - # but not added as a root + # file on disk, and the spec is installed but not added as a root mpi_spec_json_path = tmpdir.join("{0}.json".format(mpi_spec.name)) with open(mpi_spec_json_path.strpath, "w") as fd: fd.write(mpi_spec.to_json(hash=ht.dag_hash)) - install("--no-add", "-f", mpi_spec_json_path.strpath) + install("-f", mpi_spec_json_path.strpath) assert mpi_spec not in e.roots() find_output = find("-l", output=str) assert mpi_spec.name in find_output - # Without "--no-add", install an unambiguous depependency spec (that - # already exists as a dep in the environment) without --no-add and make - # sure it is added as a root of the environment as well as installed. + # Install an unambiguous depependency spec (that already exists as a + # dep in the environment) with --add and make sure it is added as a + # root of the environment as well as installed. assert b_spec not in e.roots() - install("b") + install("--add", "b") assert b_spec in e.roots() assert b_spec not in e.uninstalled_specs() - # Without "--no-add", install a novel spec and make sure it is added - # as a root and installed. - install("bowtie") + # Install a novel spec with --add and make sure it is added as a root + # and installed. + install("--add", "bowtie") assert any([s.name == "bowtie" for s in e.roots()]) assert not any([s.name == "bowtie" for s in e.uninstalled_specs()]) @@ -934,7 +935,16 @@ def test_cdash_configure_warning(tmpdir, mock_fetch, install_mockery, capfd): with capfd.disabled(): with tmpdir.as_cwd(): # Test would fail if install raised an error. - install("--log-file=cdash_reports", "--log-format=cdash", "configure-warning") + + # Ensure that even on non-x86_64 architectures, there are no + # dependencies installed + spec = spack.spec.Spec("configure-warning").concretized() + spec.clear_dependencies() + specfile = "./spec.json" + with open(specfile, "w") as f: + f.write(spec.to_json()) + + install("--log-file=cdash_reports", "--log-format=cdash", specfile) # Verify Configure.xml exists with expected contents. report_dir = tmpdir.join("cdash_reports") assert report_dir in tmpdir.listdir() @@ -954,10 +964,10 @@ def test_compiler_bootstrap( ): monkeypatch.setattr(spack.concretize.Concretizer, "check_for_compiler_existence", False) spack.config.set("config:install_missing_compilers", True) - assert CompilerSpec("gcc@2.0") not in compilers.all_compiler_specs() + assert CompilerSpec("gcc@12.0") not in compilers.all_compiler_specs() # Test succeeds if it does not raise an error - install("a%gcc@2.0") + install("a%gcc@12.0") def test_compiler_bootstrap_from_binary_mirror( @@ -1012,11 +1022,11 @@ def test_compiler_bootstrap_already_installed( monkeypatch.setattr(spack.concretize.Concretizer, "check_for_compiler_existence", False) spack.config.set("config:install_missing_compilers", True) - assert CompilerSpec("gcc@2.0") not in compilers.all_compiler_specs() + assert CompilerSpec("gcc@12.0") not in compilers.all_compiler_specs() # Test succeeds if it does not raise an error - install("gcc@2.0") - install("a%gcc@2.0") + install("gcc@12.0") + install("a%gcc@12.0") def test_install_fails_no_args(tmpdir): @@ -1086,7 +1096,7 @@ def test_install_empty_env( ("test-install-callbacks", "undefined-install-test"), ], ) -def test_install_callbacks_fail(install_mockery, mock_fetch, name, method): +def test_installation_fail_tests(install_mockery, mock_fetch, name, method): output = install("--test=root", "--no-cache", name, fail_on_error=False) assert output.count(method) == 2 @@ -1132,7 +1142,7 @@ def install_use_buildcache(opt): "--no-check-signature", "--use-buildcache", opt, package_name, fail_on_error=True ) - pkg_opt, dep_opt = spack.cmd.install.parse_use_buildcache(opt) + pkg_opt, dep_opt = spack.cmd.common.arguments.use_buildcache(opt) validate(dep_opt, out, dependency_name) validate(pkg_opt, out, package_name) diff --git a/lib/spack/spack/test/cmd/style.py b/lib/spack/spack/test/cmd/style.py index 179973af522..fd727d088e7 100644 --- a/lib/spack/spack/test/cmd/style.py +++ b/lib/spack/spack/test/cmd/style.py @@ -92,14 +92,14 @@ def test_changed_files_from_git_rev_base(tmpdir, capfd): git("checkout", "-b", "main") git("config", "user.name", "test user") git("config", "user.email", "test@user.com") - git("commit", "--allow-empty", "-m", "initial commit") + git("commit", "--no-gpg-sign", "--allow-empty", "-m", "initial commit") tmpdir.ensure("bin/spack") assert changed_files(base="HEAD") == ["bin/spack"] assert changed_files(base="main") == ["bin/spack"] git("add", "bin/spack") - git("commit", "-m", "v1") + git("commit", "--no-gpg-sign", "-m", "v1") assert changed_files(base="HEAD") == [] assert changed_files(base="HEAD~") == ["bin/spack"] @@ -113,7 +113,7 @@ def test_changed_no_base(tmpdir, capfd): git("config", "user.name", "test user") git("config", "user.email", "test@user.com") git("add", ".") - git("commit", "-m", "initial commit") + git("commit", "--no-gpg-sign", "-m", "initial commit") with pytest.raises(SystemExit): changed_files(base="foobar") @@ -198,7 +198,7 @@ def external_style_root(flake8_package_with_errors, tmpdir): git("config", "user.name", "test user") git("config", "user.email", "test@user.com") git("add", ".") - git("commit", "-m", "initial commit") + git("commit", "--no-gpg-sign", "-m", "initial commit") git("branch", "-m", "develop") git("checkout", "-b", "feature") @@ -210,7 +210,7 @@ def external_style_root(flake8_package_with_errors, tmpdir): # add the buggy file on the feature branch with tmpdir.as_cwd(): git("add", str(py_file)) - git("commit", "-m", "add new file") + git("commit", "--no-gpg-sign", "-m", "add new file") yield tmpdir, py_file diff --git a/lib/spack/spack/test/cmd/test.py b/lib/spack/spack/test/cmd/test.py index e842dbc4655..fac639309d7 100644 --- a/lib/spack/spack/test/cmd/test.py +++ b/lib/spack/spack/test/cmd/test.py @@ -237,8 +237,7 @@ def test_test_list_all(mock_packages): "simple-standalone-test", "test-error", "test-fail", - "test-build-callbacks", - "test-install-callbacks", + "fail-test-audit", ] ) diff --git a/lib/spack/spack/test/cmd/uninstall.py b/lib/spack/spack/test/cmd/uninstall.py index a47f6c60a57..ddbd54e2529 100644 --- a/lib/spack/spack/test/cmd/uninstall.py +++ b/lib/spack/spack/test/cmd/uninstall.py @@ -3,10 +3,13 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import sys + import pytest import llnl.util.tty as tty +import spack.environment import spack.store from spack.main import SpackCommand, SpackCommandError @@ -166,3 +169,187 @@ def _warn(*args, **kwargs): monkeypatch.setattr(tty, "warn", _warn) # Now try to uninstall and check this doesn't trigger warnings uninstall("-y", "-a") + + +# Note: I want to use https://docs.pytest.org/en/7.1.x/how-to/skipping.html#skip-all-test-functions-of-a-class-or-module +# the style formatter insists on separating these two lines. +pytest.mark.skipif(sys.platform == "win32", reason="Envs unsupported on Windows") + + +class TestUninstallFromEnv(object): + """Tests an installation with two environments e1 and e2, which each have + shared package installations: + + e1 has dt-diamond-left -> dt-diamond-bottom + + e2 has dt-diamond-right -> dt-diamond-bottom + """ + + env = SpackCommand("env") + add = SpackCommand("add") + concretize = SpackCommand("concretize") + find = SpackCommand("find") + + @pytest.fixture + def environment_setup( + self, mutable_mock_env_path, config, mock_packages, mutable_database, install_mockery + ): + TestUninstallFromEnv.env("create", "e1") + e1 = spack.environment.read("e1") + with e1: + TestUninstallFromEnv.add("dt-diamond-left") + TestUninstallFromEnv.add("dt-diamond-bottom") + TestUninstallFromEnv.concretize() + install("--fake") + + TestUninstallFromEnv.env("create", "e2") + e2 = spack.environment.read("e2") + with e2: + TestUninstallFromEnv.add("dt-diamond-right") + TestUninstallFromEnv.add("dt-diamond-bottom") + TestUninstallFromEnv.concretize() + install("--fake") + + def test_basic_env_sanity(self, environment_setup): + for env_name in ["e1", "e2"]: + e = spack.environment.read(env_name) + with e: + for _, concretized_spec in e.concretized_specs(): + assert concretized_spec.package.installed + + def test_uninstall_force_dependency_shared_between_envs(self, environment_setup): + """If you "spack uninstall -f --dependents dt-diamond-bottom" from + e1, then all packages should be uninstalled (but not removed) from + both e1 and e2. + """ + e1 = spack.environment.read("e1") + with e1: + uninstall("-f", "-y", "--dependents", "dt-diamond-bottom") + + # The specs should still be in the environment, since + # --remove was not specified + assert set(root.name for (root, _) in e1.concretized_specs()) == set( + ["dt-diamond-left", "dt-diamond-bottom"] + ) + + for _, concretized_spec in e1.concretized_specs(): + assert not concretized_spec.package.installed + + # Everything in e2 depended on dt-diamond-bottom, so should also + # have been uninstalled. The roots should be unchanged though. + e2 = spack.environment.read("e2") + with e2: + assert set(root.name for (root, _) in e2.concretized_specs()) == set( + ["dt-diamond-right", "dt-diamond-bottom"] + ) + for _, concretized_spec in e2.concretized_specs(): + assert not concretized_spec.package.installed + + def test_uninstall_remove_dependency_shared_between_envs(self, environment_setup): + """If you "spack uninstall --dependents --remove dt-diamond-bottom" from + e1, then all packages are removed from e1 (it is now empty); + dt-diamond-left is also uninstalled (since only e1 needs it) but + dt-diamond-bottom is not uninstalled (since e2 needs it). + """ + e1 = spack.environment.read("e1") + with e1: + dtdiamondleft = next( + concrete + for (_, concrete) in e1.concretized_specs() + if concrete.name == "dt-diamond-left" + ) + output = uninstall("-y", "--dependents", "--remove", "dt-diamond-bottom") + assert "The following specs will be removed but not uninstalled" in output + assert not list(e1.roots()) + assert not dtdiamondleft.package.installed + + # Since -f was not specified, all specs in e2 should still be installed + # (and e2 should be unchanged) + e2 = spack.environment.read("e2") + with e2: + assert set(root.name for (root, _) in e2.concretized_specs()) == set( + ["dt-diamond-right", "dt-diamond-bottom"] + ) + for _, concretized_spec in e2.concretized_specs(): + assert concretized_spec.package.installed + + def test_uninstall_dependency_shared_between_envs_fail(self, environment_setup): + """If you "spack uninstall --dependents dt-diamond-bottom" from + e1 (without --remove or -f), then this should fail (this is needed by + e2). + """ + e1 = spack.environment.read("e1") + with e1: + output = uninstall("-y", "--dependents", "dt-diamond-bottom", fail_on_error=False) + assert "There are still dependents." in output + assert "use `spack env remove`" in output + + # The environment should be unchanged and nothing should have been + # uninstalled + assert set(root.name for (root, _) in e1.concretized_specs()) == set( + ["dt-diamond-left", "dt-diamond-bottom"] + ) + for _, concretized_spec in e1.concretized_specs(): + assert concretized_spec.package.installed + + def test_uninstall_force_and_remove_dependency_shared_between_envs(self, environment_setup): + """If you "spack uninstall -f --dependents --remove dt-diamond-bottom" from + e1, then all packages should be uninstalled and removed from e1. + All packages will also be uninstalled from e2, but the roots will + remain unchanged. + """ + e1 = spack.environment.read("e1") + with e1: + dtdiamondleft = next( + concrete + for (_, concrete) in e1.concretized_specs() + if concrete.name == "dt-diamond-left" + ) + uninstall("-f", "-y", "--dependents", "--remove", "dt-diamond-bottom") + assert not list(e1.roots()) + assert not dtdiamondleft.package.installed + + e2 = spack.environment.read("e2") + with e2: + assert set(root.name for (root, _) in e2.concretized_specs()) == set( + ["dt-diamond-right", "dt-diamond-bottom"] + ) + for _, concretized_spec in e2.concretized_specs(): + assert not concretized_spec.package.installed + + def test_uninstall_keep_dependents_dependency_shared_between_envs(self, environment_setup): + """If you "spack uninstall -f --remove dt-diamond-bottom" from + e1, then dt-diamond-bottom should be uninstalled, which leaves + "dangling" references in both environments, since + dt-diamond-left and dt-diamond-right both need it. + """ + e1 = spack.environment.read("e1") + with e1: + dtdiamondleft = next( + concrete + for (_, concrete) in e1.concretized_specs() + if concrete.name == "dt-diamond-left" + ) + uninstall("-f", "-y", "--remove", "dt-diamond-bottom") + # dt-diamond-bottom was removed from the list of roots (note that + # it would still be installed since dt-diamond-left depends on it) + assert set(x.name for x in e1.roots()) == set(["dt-diamond-left"]) + assert dtdiamondleft.package.installed + + e2 = spack.environment.read("e2") + with e2: + assert set(root.name for (root, _) in e2.concretized_specs()) == set( + ["dt-diamond-right", "dt-diamond-bottom"] + ) + dtdiamondright = next( + concrete + for (_, concrete) in e2.concretized_specs() + if concrete.name == "dt-diamond-right" + ) + assert dtdiamondright.package.installed + dtdiamondbottom = next( + concrete + for (_, concrete) in e2.concretized_specs() + if concrete.name == "dt-diamond-bottom" + ) + assert not dtdiamondbottom.package.installed diff --git a/lib/spack/spack/test/cmd/view.py b/lib/spack/spack/test/cmd/view.py index 51af2bae2a4..67c4275ddd4 100644 --- a/lib/spack/spack/test/cmd/view.py +++ b/lib/spack/spack/test/cmd/view.py @@ -10,6 +10,7 @@ import spack.util.spack_yaml as s_yaml from spack.main import SpackCommand +from spack.spec import Spec activate = SpackCommand("activate") extensions = SpackCommand("extensions") @@ -261,3 +262,34 @@ def test_view_fails_with_missing_projections_file(tmpdir): projection_file = os.path.join(str(tmpdir), "nonexistent") with pytest.raises(SystemExit): view("symlink", "--projection-file", projection_file, viewpath, "foo") + + +@pytest.mark.parametrize("with_projection", [False, True]) +@pytest.mark.parametrize("cmd", ["symlink", "copy"]) +def test_view_files_not_ignored( + tmpdir, mock_packages, mock_archive, mock_fetch, config, install_mockery, cmd, with_projection +): + spec = Spec("view-not-ignored").concretized() + pkg = spec.package + pkg.do_install() + pkg.assert_installed(spec.prefix) + + install("view-dir-file") # Arbitrary package to add noise + + viewpath = str(tmpdir.mkdir("view_{0}".format(cmd))) + + if with_projection: + proj = str(tmpdir.join("proj.yaml")) + with open(proj, "w") as f: + f.write('{"projections":{"all":"{name}"}}') + prefix_in_view = os.path.join(viewpath, "view-not-ignored") + args = ["--projection-file", proj] + else: + prefix_in_view = viewpath + args = [] + + view(cmd, *(args + [viewpath, "view-not-ignored", "view-dir-file"])) + pkg.assert_installed(prefix_in_view) + + view("remove", viewpath, "view-not-ignored") + pkg.assert_not_installed(prefix_in_view) diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 4d15d48dbe1..f2d7edf126e 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os +import posixpath import sys import jinja2 @@ -81,6 +82,7 @@ def check_concretize(abstract_spec): "mpich", # compiler flags 'mpich cppflags="-O3"', + 'mpich cppflags=="-O3"', # with virtual "mpileaks ^mpi", "mpileaks ^mpi@:1.1", @@ -228,7 +230,7 @@ def test_concretize(self, spec): check_concretize(spec) def test_concretize_mention_build_dep(self): - spec = check_concretize("cmake-client ^cmake@3.4.3") + spec = check_concretize("cmake-client ^cmake@3.21.3") # Check parent's perspective of child to_dependencies = spec.edges_to_dependencies(name="cmake") @@ -313,8 +315,8 @@ def test_provides_handles_multiple_providers_of_same_version(self): def test_different_compilers_get_different_flags(self): client = Spec( - "cmake-client %gcc@4.7.2 platform=test os=fe target=fe" - + " ^cmake %clang@3.5 platform=test os=fe target=fe" + "cmake-client %gcc@11.1.0 platform=test os=fe target=fe" + + " ^cmake %clang@12.2.0 platform=test os=fe target=fe" ) client.concretize() cmake = client["cmake"] @@ -323,12 +325,50 @@ def test_different_compilers_get_different_flags(self): assert set(client.compiler_flags["fflags"]) == set(["-O0", "-g"]) assert not set(cmake.compiler_flags["fflags"]) + def test_concretize_compiler_flag_propagate(self): + spec = Spec("hypre cflags=='-g' ^openblas") + spec.concretize() + + assert spec.satisfies("^openblas cflags='-g'") + + @pytest.mark.skipif( + os.environ.get("SPACK_TEST_SOLVER") == "original" or sys.platform == "win32", + reason="Optional compiler propagation isn't deprecated for original concretizer", + ) + def test_concretize_compiler_flag_does_not_propagate(self): + spec = Spec("hypre cflags='-g' ^openblas") + spec.concretize() + + assert not spec.satisfies("^openblas cflags='-g'") + + @pytest.mark.skipif( + os.environ.get("SPACK_TEST_SOLVER") == "original" or sys.platform == "win32", + reason="Optional compiler propagation isn't deprecated for original concretizer", + ) + def test_concretize_propagate_compiler_flag_not_passed_to_dependent(self): + spec = Spec("hypre cflags=='-g' ^openblas cflags='-O3'") + spec.concretize() + + assert set(spec.compiler_flags["cflags"]) == set(["-g"]) + assert spec.satisfies("^openblas cflags='-O3'") + + def test_mixing_compilers_only_affects_subdag(self): + spack.config.set("packages:all:compiler", ["clang", "gcc"]) + spec = Spec("dt-diamond%gcc ^dt-diamond-bottom%clang").concretized() + for dep in spec.traverse(): + assert ("%clang" in dep) == (dep.name == "dt-diamond-bottom") + + def test_compiler_inherited_upwards(self): + spec = Spec("dt-diamond ^dt-diamond-bottom%clang").concretized() + for dep in spec.traverse(): + assert "%clang" in dep + def test_architecture_inheritance(self): """test_architecture_inheritance is likely to fail with an UnavailableCompilerVersionError if the architecture is concretized incorrectly. """ - spec = Spec("cmake-client %gcc@4.7.2 os=fe ^ cmake") + spec = Spec("cmake-client %gcc@11.1.0 os=fe ^ cmake") spec.concretize() assert spec["cmake"].architecture == spec.architecture @@ -337,6 +377,9 @@ def test_architecture_deep_inheritance(self, mock_targets): information from the root even when partial architecture information is provided by an intermediate dependency. """ + if spack.config.get("config:concretizer") == "original": + pytest.skip("Fixing the parser broke this test for the original concretizer.") + spec_str = "mpileaks %gcc@4.5.0 os=CNL target=nocona" " ^dyninst os=CNL ^callpath os=CNL" spec = Spec(spec_str).concretized() for s in spec.traverse(root=False): @@ -398,6 +441,24 @@ def test_concretize_two_virtuals_with_dual_provider_and_a_conflict(self): with pytest.raises(spack.error.SpackError): s.concretize() + @pytest.mark.skipif( + os.environ.get("SPACK_TEST_SOLVER") == "original" or sys.platform == "win32", + reason="Optional compiler propagation isn't deprecated for original concretizer", + ) + def test_concretize_propagate_disabled_variant(self): + """Test a package variant value was passed from its parent.""" + spec = Spec("hypre~~shared ^openblas") + spec.concretize() + + assert spec.satisfies("^openblas~shared") + + def test_concretize_propagated_variant_is_not_passed_to_dependent(self): + """Test a package variant value was passed from its parent.""" + spec = Spec("hypre~~shared ^openblas+shared") + spec.concretize() + + assert spec.satisfies("^openblas+shared") + @pytest.mark.skipif(sys.platform == "win32", reason="No Compiler for Arch on Win") def test_no_matching_compiler_specs(self, mock_low_high_config): # only relevant when not building compilers as needed @@ -449,7 +510,7 @@ def test_my_dep_depends_on_provider_of_my_virtual_dep(self): spec.normalize() spec.concretize() - @pytest.mark.parametrize("compiler_str", ["clang", "gcc", "gcc@4.5.0", "clang@:3.3.0"]) + @pytest.mark.parametrize("compiler_str", ["clang", "gcc", "gcc@10.2.1", "clang@:12.0.0"]) def test_compiler_inheritance(self, compiler_str): spec_str = "mpileaks %{0}".format(compiler_str) spec = Spec(spec_str).concretized() @@ -459,7 +520,7 @@ def test_compiler_inheritance(self, compiler_str): def test_external_package(self): spec = Spec("externaltool%gcc") spec.concretize() - assert spec["externaltool"].external_path == os.path.sep + os.path.join( + assert spec["externaltool"].external_path == posixpath.sep + posixpath.join( "path", "to", "external_tool" ) assert "externalprereq" not in spec @@ -490,10 +551,10 @@ def test_nobuild_package(self): def test_external_and_virtual(self): spec = Spec("externaltest") spec.concretize() - assert spec["externaltool"].external_path == os.path.sep + os.path.join( + assert spec["externaltool"].external_path == posixpath.sep + posixpath.join( "path", "to", "external_tool" ) - assert spec["stuff"].external_path == os.path.sep + os.path.join( + assert spec["stuff"].external_path == posixpath.sep + posixpath.join( "path", "to", "external_virtual_gcc" ) assert spec["externaltool"].compiler.satisfies("gcc") @@ -692,15 +753,15 @@ def test_adjusting_default_target_based_on_compiler( @pytest.mark.regression("8735,14730") def test_compiler_version_matches_any_entry_in_compilers_yaml(self): # Ensure that a concrete compiler with different compiler version - # doesn't match (here it's 4.5 vs. 4.5.0) + # doesn't match (here it's 10.2 vs. 10.2.1) with pytest.raises(spack.concretize.UnavailableCompilerVersionError): - s = Spec("mpileaks %gcc@4.5") + s = Spec("mpileaks %gcc@10.2") s.concretize() # An abstract compiler with a version list could resolve to 4.5.0 - s = Spec("mpileaks %gcc@4.5:") + s = Spec("mpileaks %gcc@10.2:") s.concretize() - assert str(s.compiler.version) == "4.5.0" + assert str(s.compiler.version) == "10.2.1" def test_concretize_anonymous(self): with pytest.raises(spack.error.SpackError): @@ -717,11 +778,11 @@ def test_concretize_anonymous_dep(self, spec_str): "spec_str,expected_str", [ # Unconstrained versions select default compiler (gcc@4.5.0) - ("bowtie@1.3.0", "%gcc@4.5.0"), + ("bowtie@1.4.0", "%gcc@10.2.1"), # Version with conflicts and no valid gcc select another compiler - ("bowtie@1.2.2", "%clang@3.3"), + ("bowtie@1.3.0", "%clang@12.0.0"), # If a higher gcc is available still prefer that - ("bowtie@1.2.2 os=redhat6", "%gcc@4.7.2"), + ("bowtie@1.2.2 os=redhat6", "%gcc@11.1.0"), ], ) def test_compiler_conflicts_in_package_py(self, spec_str, expected_str): @@ -1044,11 +1105,11 @@ def test_compiler_match_is_preferred_to_newer_version(self): # that doesn't allow newer versions with gcc@4.4.0. Check # that an old version of openblas is selected, rather than # a different compiler for just that node. - spec_str = "simple-inheritance+openblas %gcc@4.4.0 os=redhat6" + spec_str = "simple-inheritance+openblas %gcc@10.1.0 os=redhat6" s = Spec(spec_str).concretized() - assert "openblas@0.2.13" in s - assert s["openblas"].satisfies("%gcc@4.4.0") + assert "openblas@0.2.15" in s + assert s["openblas"].satisfies("%gcc@10.1.0") @pytest.mark.regression("19981") def test_target_ranges_in_conflicts(self): @@ -1077,8 +1138,8 @@ def test_custom_compiler_version(self): if spack.config.get("config:concretizer") == "original": pytest.xfail("Known failure of the original concretizer") - s = Spec("a %gcc@foo os=redhat6").concretized() - assert "%gcc@foo" in s + s = Spec("a %gcc@10foo os=redhat6").concretized() + assert "%gcc@10foo" in s def test_all_patches_applied(self): uuidpatch = ( @@ -1270,8 +1331,8 @@ def test_external_with_non_default_variant_as_dependency(self): ("mpileaks", "os=debian6"), # To trigger the bug in 22871 we need to have the same compiler # spec available on both operating systems - ("mpileaks%gcc@4.5.0 platform=test os=debian6", "os=debian6"), - ("mpileaks%gcc@4.5.0 platform=test os=redhat6", "os=redhat6"), + ("mpileaks%gcc@10.2.1 platform=test os=debian6", "os=debian6"), + ("mpileaks%gcc@10.2.1 platform=test os=redhat6", "os=redhat6"), ], ) def test_os_selection_when_multiple_choices_are_possible(self, spec_str, expected_os): @@ -1283,7 +1344,7 @@ def test_os_selection_when_multiple_choices_are_possible(self, spec_str, expecte @pytest.mark.regression("22718") @pytest.mark.parametrize( "spec_str,expected_compiler", - [("mpileaks", "%gcc@4.5.0"), ("mpileaks ^mpich%clang@3.3", "%clang@3.3")], + [("mpileaks", "%gcc@10.2.1"), ("mpileaks ^mpich%clang@12.0.0", "%clang@12.0.0")], ) def test_compiler_is_unique(self, spec_str, expected_compiler): s = Spec(spec_str).concretized() @@ -1459,10 +1520,12 @@ def test_target_granularity(self): # The test architecture uses core2 as the default target. Check that when # we configure Spack for "generic" granularity we concretize for x86_64 + default_target = spack.platforms.test.Test.default + generic_target = archspec.cpu.TARGETS[default_target].generic.name s = Spec("python") - assert s.concretized().satisfies("target=core2") + assert s.concretized().satisfies("target=%s" % default_target) with spack.config.override("concretizer:targets", {"granularity": "generic"}): - assert s.concretized().satisfies("target=x86_64") + assert s.concretized().satisfies("target=%s" % generic_target) def test_host_compatible_concretization(self): if spack.config.get("config:concretizer") == "original": @@ -1643,6 +1706,28 @@ def test_best_effort_coconcretize_preferences(self, specs, expected_spec, occura counter += 1 assert counter == occurances, concrete_specs + def test_coconcretize_reuse_and_virtuals(self): + import spack.solver.asp + + if spack.config.get("config:concretizer") == "original": + pytest.skip("Original concretizer cannot reuse") + + reusable_specs = [] + for s in ["mpileaks ^mpich", "zmpi"]: + reusable_specs.extend(spack.spec.Spec(s).concretized().traverse(root=True)) + + root_specs = [spack.spec.Spec("mpileaks"), spack.spec.Spec("zmpi")] + + import spack.solver.asp + + with spack.config.override("concretizer:reuse", True): + solver = spack.solver.asp.Solver() + setup = spack.solver.asp.SpackSolverSetup() + result, _, _ = solver.driver.solve(setup, root_specs, reuse=reusable_specs) + + for spec in result.specs: + assert "zmpi" in spec + @pytest.mark.regression("30864") def test_misleading_error_message_on_version(self, mutable_database): # For this bug to be triggered we need a reusable dependency @@ -1689,12 +1774,19 @@ def test_version_weight_and_provenance(self): # version_declared("b","0.9",1,"package_py"). # version_declared("b","1.0",2,"installed"). # version_declared("b","0.9",3,"installed"). - for criterion in [ - (1, None, "number of packages to build (vs. reuse)"), + # + # Depending on the target, it may also use gnuconfig + result_spec = result.specs[0] + num_specs = len(list(result_spec.traverse())) + + criteria = [ + (num_specs - 1, None, "number of packages to build (vs. reuse)"), (2, 0, "version badness"), - ]: + ] + + for criterion in criteria: assert criterion in result.criteria - assert result.specs[0].satisfies("^b@1.0") + assert result_spec.satisfies("^b@1.0") @pytest.mark.regression("31169") def test_not_reusing_incompatible_os_or_compiler(self): @@ -1714,8 +1806,8 @@ def test_not_reusing_incompatible_os_or_compiler(self): setup = spack.solver.asp.SpackSolverSetup() result, _, _ = solver.driver.solve(setup, [root_spec], reuse=reusable_specs) concrete_spec = result.specs[0] - assert concrete_spec.satisfies("%gcc@4.5.0") - assert concrete_spec.satisfies("os=debian6") + assert concrete_spec.satisfies("%{}".format(s.compiler)) + assert concrete_spec.satisfies("os={}".format(s.architecture.os)) def test_git_hash_assigned_version_is_preferred(self): hash = "a" * 40 @@ -1837,3 +1929,34 @@ def test_installed_specs_disregard_conflicts(self, mutable_database, monkeypatch with spack.config.override("concretizer:reuse", True): s = Spec("mpich").concretized() assert s.satisfies("~debug") + + @pytest.mark.regression("32471") + def test_require_targets_are_allowed(self, mutable_database): + """Test that users can set target constraints under the require attribute.""" + if spack.config.get("config:concretizer") == "original": + pytest.xfail("Use case not supported by the original concretizer") + + # Configuration to be added to packages.yaml + external_conf = {"all": {"require": "target=%s" % spack.platforms.test.Test.front_end}} + spack.config.set("packages", external_conf) + + with spack.config.override("concretizer:reuse", False): + spec = Spec("mpich").concretized() + + for s in spec.traverse(): + assert s.satisfies("target=%s" % spack.platforms.test.Test.front_end) + + def test_external_python_extensions_have_dependency(self): + """Test that python extensions have access to a python dependency""" + external_conf = { + "py-extension1": { + "buildable": False, + "externals": [{"spec": "py-extension1@2.0", "prefix": "/fake"}], + } + } + spack.config.set("packages", external_conf) + + spec = Spec("py-extension2").concretized() + + assert "python" in spec["py-extension1"] + assert spec["python"] == spec["py-extension1"]["python"] diff --git a/lib/spack/spack/test/concretize_preferences.py b/lib/spack/spack/test/concretize_preferences.py index 9ad5a498ee8..b0ae008a72e 100644 --- a/lib/spack/spack/test/concretize_preferences.py +++ b/lib/spack/spack/test/concretize_preferences.py @@ -103,23 +103,16 @@ def test_preferred_variants_from_wildcard(self): update_packages("multivalue-variant", "variants", "foo=bar") assert_variant_values("multivalue-variant foo=*", foo=("bar",)) - def test_preferred_compilers(self): + @pytest.mark.parametrize( + "compiler_str,spec_str", + [("gcc@4.5.0", "mpileaks"), ("clang@12.0.0", "mpileaks"), ("gcc@4.5.0", "openmpi")], + ) + def test_preferred_compilers(self, compiler_str, spec_str): """Test preferred compilers are applied correctly""" - # Need to make sure the test uses an available compiler - compiler_list = spack.compilers.all_compiler_specs() - assert compiler_list - - # Try the first available compiler - compiler = str(compiler_list[0]) - update_packages("mpileaks", "compiler", [compiler]) - spec = concretize("mpileaks") - assert spec.compiler == spack.spec.CompilerSpec(compiler) - - # Try the last available compiler - compiler = str(compiler_list[-1]) - update_packages("mpileaks", "compiler", [compiler]) - spec = concretize("mpileaks os=redhat6") - assert spec.compiler == spack.spec.CompilerSpec(compiler) + spec = spack.spec.Spec(spec_str) + update_packages(spec.name, "compiler", [compiler_str]) + spec.concretize() + assert spec.compiler == spack.spec.CompilerSpec(compiler_str) def test_preferred_target(self, mutable_mock_repo): """Test preferred targets are applied correctly""" @@ -172,6 +165,53 @@ def test_preferred_providers(self): spec = concretize("mpileaks") assert "zmpi" in spec + def test_config_set_pkg_property_url(self, mutable_mock_repo): + """Test setting an existing attribute in the package class""" + update_packages( + "mpileaks", + "package_attributes", + {"url": "http://www.somewhereelse.com/mpileaks-1.0.tar.gz"}, + ) + spec = concretize("mpileaks") + assert spec.package.fetcher[0].url == "http://www.somewhereelse.com/mpileaks-2.3.tar.gz" + + update_packages("mpileaks", "package_attributes", {}) + spec = concretize("mpileaks") + assert spec.package.fetcher[0].url == "http://www.llnl.gov/mpileaks-2.3.tar.gz" + + def test_config_set_pkg_property_new(self, mutable_mock_repo): + """Test that you can set arbitrary attributes on the Package class""" + conf = syaml.load_config( + """\ +mpileaks: + package_attributes: + v1: 1 + v2: true + v3: yesterday + v4: "true" + v5: + x: 1 + y: 2 + v6: + - 1 + - 2 +""" + ) + spack.config.set("packages", conf, scope="concretize") + + spec = concretize("mpileaks") + assert spec.package.v1 == 1 + assert spec.package.v2 is True + assert spec.package.v3 == "yesterday" + assert spec.package.v4 == "true" + assert dict(spec.package.v5) == {"x": 1, "y": 2} + assert list(spec.package.v6) == [1, 2] + + update_packages("mpileaks", "package_attributes", {}) + spec = concretize("mpileaks") + with pytest.raises(AttributeError): + spec.package.v1 + def test_preferred(self): """ "Test packages with some version marked as preferred=True""" spec = Spec("python") @@ -341,6 +381,23 @@ def test_buildable_false_all_true_virtual(self): spec = Spec("mpich") assert spack.package_prefs.is_spec_buildable(spec) + def test_buildable_false_virtual_true_pacakge(self): + conf = syaml.load_config( + """\ +mpi: + buildable: false +mpich: + buildable: true +""" + ) + spack.config.set("packages", conf, scope="concretize") + + spec = Spec("zmpi") + assert not spack.package_prefs.is_spec_buildable(spec) + + spec = Spec("mpich") + assert spack.package_prefs.is_spec_buildable(spec) + def test_config_permissions_from_all(self, configure_permissions): # Although these aren't strictly about concretization, they are # configured in the same file and therefore convenient to test here. diff --git a/lib/spack/spack/test/concretize_requirements.py b/lib/spack/spack/test/concretize_requirements.py index 50cc6baa9a5..e43203b9696 100644 --- a/lib/spack/spack/test/concretize_requirements.py +++ b/lib/spack/spack/test/concretize_requirements.py @@ -7,6 +7,7 @@ import pytest +import spack.build_systems.generic import spack.config import spack.repo import spack.util.spack_yaml as syaml @@ -102,7 +103,7 @@ def fake_installs(monkeypatch, tmpdir): stage_path = str(tmpdir.ensure("fake-stage", dir=True)) universal_unused_stage = spack.stage.DIYStage(stage_path) monkeypatch.setattr( - spack.package_base.Package, "_make_stage", MakeStage(universal_unused_stage) + spack.build_systems.generic.Package, "_make_stage", MakeStage(universal_unused_stage) ) diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py index 091efcea012..ba06f75394a 100644 --- a/lib/spack/spack/test/config.py +++ b/lib/spack/spack/test/config.py @@ -18,6 +18,7 @@ import spack.config import spack.environment as ev import spack.main +import spack.package_base import spack.paths import spack.repo import spack.schema.compilers @@ -381,6 +382,17 @@ def test_substitute_config_variables(mock_low_high_config, monkeypatch): os.path.join(mock_low_high_config.scopes["low"].path, os.path.join("foo", "bar", "baz")) ) + # test architecture information is in replacements + assert spack_path.canonicalize_path( + os.path.join("foo", "$platform", "bar") + ) == os.path.abspath(os.path.join("foo", "test", "bar")) + + host_target = spack.platforms.host().target("default_target") + host_target_family = str(host_target.microarchitecture.family) + assert spack_path.canonicalize_path( + os.path.join("foo", "$target_family", "bar") + ) == os.path.abspath(os.path.join("foo", host_target_family, "bar")) + packages_merge_low = {"packages": {"foo": {"variants": ["+v1"]}, "bar": {"variants": ["+v2"]}}} @@ -1174,13 +1186,13 @@ def test_license_dir_config(mutable_config, mock_packages): """Ensure license directory is customizable""" expected_dir = spack.paths.default_license_dir assert spack.config.get("config:license_dir") == expected_dir - assert spack.package.Package.global_license_dir == expected_dir + assert spack.package_base.PackageBase.global_license_dir == expected_dir assert spack.repo.path.get_pkg_class("a").global_license_dir == expected_dir rel_path = os.path.join(os.path.sep, "foo", "bar", "baz") spack.config.set("config:license_dir", rel_path) assert spack.config.get("config:license_dir") == rel_path - assert spack.package.Package.global_license_dir == rel_path + assert spack.package_base.PackageBase.global_license_dir == rel_path assert spack.repo.path.get_pkg_class("a").global_license_dir == rel_path diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index 6eab62a2735..4bf289be253 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -128,7 +128,14 @@ def mock_git_version_info(tmpdir, override_git_repos_cache_path): def commit(message): global commit_counter - git("commit", "--date", "2020-01-%02d 12:0:00 +0300" % commit_counter, "-am", message) + git( + "commit", + "--no-gpg-sign", + "--date", + "2020-01-%02d 12:0:00 +0300" % commit_counter, + "-am", + message, + ) commit_counter += 1 with working_dir(repo_path): @@ -584,6 +591,16 @@ def linux_os(): return LinuxOS(name=name, version=version) +@pytest.fixture +def ensure_debug(monkeypatch): + current_debug_level = tty.debug_level() + tty.set_debug(1) + + yield + + tty.set_debug(current_debug_level) + + @pytest.fixture(autouse=is_windows, scope="session") def platform_config(): spack.config.add_default_platform_scope(spack.platforms.real_host().name) @@ -1626,7 +1643,7 @@ def mock_executable(tmpdir): """ import jinja2 - shebang = "#!/bin/bash\n" if not is_windows else "@ECHO OFF" + shebang = "#!/bin/sh\n" if not is_windows else "@ECHO OFF" def _factory(name, output, subdir=("bin",)): f = tmpdir.ensure(*subdir, dir=True).join(name) @@ -1789,3 +1806,49 @@ def _spider(*args, **kwargs): @pytest.fixture(scope="function") def mock_tty_stdout(monkeypatch): monkeypatch.setattr(sys.stdout, "isatty", lambda: True) + + +@pytest.fixture +def prefix_like(): + return "package-0.0.0.a1-hashhashhashhashhashhashhashhash" + + +@pytest.fixture() +def prefix_tmpdir(tmpdir, prefix_like): + return tmpdir.mkdir(prefix_like) + + +@pytest.fixture() +def binary_with_rpaths(prefix_tmpdir): + """Factory fixture that compiles an ELF binary setting its RPATH. Relative + paths are encoded with `$ORIGIN` prepended. + """ + + def _factory(rpaths, message="Hello world!"): + source = prefix_tmpdir.join("main.c") + source.write( + """ + #include + int main(){{ + printf("{0}"); + }} + """.format( + message + ) + ) + gcc = spack.util.executable.which("gcc") + executable = source.dirpath("main.x") + # Encode relative RPATHs using `$ORIGIN` as the root prefix + rpaths = [x if os.path.isabs(x) else os.path.join("$ORIGIN", x) for x in rpaths] + rpath_str = ":".join(rpaths) + opts = [ + "-Wl,--disable-new-dtags", + "-Wl,-rpath={0}".format(rpath_str), + str(source), + "-o", + str(executable), + ] + gcc(*opts) + return executable + + return _factory diff --git a/lib/spack/spack/test/data/config/bootstrap.yaml b/lib/spack/spack/test/data/config/bootstrap.yaml index 8929d7ff35a..6adb7ab9967 100644 --- a/lib/spack/spack/test/data/config/bootstrap.yaml +++ b/lib/spack/spack/test/data/config/bootstrap.yaml @@ -1,5 +1,5 @@ bootstrap: sources: - name: 'github-actions' - metadata: $spack/share/spack/bootstrap/github-actions-v0.2 + metadata: $spack/share/spack/bootstrap/github-actions-v0.3 trusted: {} diff --git a/lib/spack/spack/test/data/config/compilers.yaml b/lib/spack/spack/test/data/config/compilers.yaml index e0b0464976c..6f36c13b593 100644 --- a/lib/spack/spack/test/data/config/compilers.yaml +++ b/lib/spack/spack/test/data/config/compilers.yaml @@ -130,6 +130,7 @@ compilers: f77: /path/to/gfortran440 fc: /path/to/gfortran440 modules: 'None' + target: x86_64 - compiler: spec: clang@3.5 operating_system: redhat6 @@ -167,7 +168,7 @@ compilers: modules: 'None' target: x86_64 - compiler: - spec: gcc@foo + spec: gcc@10foo operating_system: redhat6 paths: cc: /path/to/gcc @@ -186,3 +187,167 @@ compilers: fc: /path/to/gfortran modules: 'None' target: x86_64 +- compiler: + spec: clang@12.0.0 + operating_system: {0.name}{0.version} + paths: + cc: /path/to/clang + cxx: /path/to/clang++ + f77: None + fc: None + modules: 'None' + target: aarch64 +- compiler: + spec: gcc@10.2.1 + operating_system: {0.name}{0.version} + paths: + cc: /path/to/gcc + cxx: /path/to/g++ + f77: None + fc: None + modules: 'None' + target: aarch64 +- compiler: + spec: clang@12.0.0 + operating_system: redhat6 + paths: + cc: /path/to/clang + cxx: /path/to/clang++ + f77: None + fc: None + modules: 'None' + target: aarch64 +- compiler: + spec: gcc@10.2.1 + operating_system: redhat6 + paths: + cc: /path/to/gcc + cxx: /path/to/g++ + f77: None + fc: None + modules: 'None' + target: aarch64 +- compiler: + spec: gcc@10.1.0 + operating_system: redhat6 + paths: + cc: /path/to/gcc + cxx: /path/to/g++ + f77: None + fc: None + modules: 'None' + target: aarch64 +- compiler: + spec: gcc@11.1.0 + operating_system: redhat6 + paths: + cc: /path/to/gcc + cxx: /path/to/g++ + f77: None + fc: None + flags: + cflags: -O0 -g + cxxflags: -O0 -g + fflags: -O0 -g + modules: 'None' + target: aarch64 +- compiler: + spec: clang@12.2.0 + operating_system: redhat6 + paths: + cc: /path/to/clang35 + cxx: /path/to/clang++35 + f77: None + fc: None + flags: + cflags: -O3 + cxxflags: -O3 + modules: 'None' + target: aarch64 +- compiler: + spec: gcc@10foo + operating_system: redhat6 + paths: + cc: /path/to/gcc + cxx: /path/to/g++ + f77: /path/to/gfortran + fc: /path/to/gfortran + modules: 'None' + target: aarch64 +- compiler: + spec: clang@12.0.0 + operating_system: {0.name}{0.version} + paths: + cc: /path/to/clang + cxx: /path/to/clang++ + f77: None + fc: None + modules: 'None' + target: x86_64 +- compiler: + spec: gcc@10.2.1 + operating_system: {0.name}{0.version} + paths: + cc: /path/to/gcc + cxx: /path/to/g++ + f77: None + fc: None + modules: 'None' + target: x86_64 +- compiler: + spec: clang@12.0.0 + operating_system: redhat6 + paths: + cc: /path/to/clang + cxx: /path/to/clang++ + f77: None + fc: None + modules: 'None' + target: x86_64 +- compiler: + spec: gcc@10.2.1 + operating_system: redhat6 + paths: + cc: /path/to/gcc + cxx: /path/to/g++ + f77: None + fc: None + modules: 'None' + target: x86_64 +- compiler: + spec: gcc@10.1.0 + operating_system: redhat6 + paths: + cc: /path/to/gcc + cxx: /path/to/g++ + f77: None + fc: None + modules: 'None' + target: x86_64 +- compiler: + spec: gcc@11.1.0 + operating_system: redhat6 + paths: + cc: /path/to/gcc + cxx: /path/to/g++ + f77: None + fc: None + flags: + cflags: -O0 -g + cxxflags: -O0 -g + fflags: -O0 -g + modules: 'None' + target: x86_64 +- compiler: + spec: clang@12.2.0 + operating_system: redhat6 + paths: + cc: /path/to/clang35 + cxx: /path/to/clang++35 + f77: None + fc: None + flags: + cflags: -O3 + cxxflags: -O3 + modules: 'None' + target: x86_64 diff --git a/lib/spack/spack/test/data/config/packages.yaml b/lib/spack/spack/test/data/config/packages.yaml index 7d5e06a74ce..c6ff731a7e5 100644 --- a/lib/spack/spack/test/data/config/packages.yaml +++ b/lib/spack/spack/test/data/config/packages.yaml @@ -7,18 +7,18 @@ packages: externaltool: buildable: False externals: - - spec: externaltool@1.0%gcc@4.5.0 + - spec: externaltool@1.0%gcc@10.2.1 prefix: /path/to/external_tool - - spec: externaltool@0.9%gcc@4.5.0 + - spec: externaltool@0.9%gcc@10.2.1 prefix: /usr - - spec: externaltool@0_8%gcc@4.5.0 + - spec: externaltool@0_8%gcc@10.2.1 prefix: /usr externalvirtual: buildable: False externals: - - spec: externalvirtual@2.0%clang@3.3 + - spec: externalvirtual@2.0%clang@12.0.0 prefix: /path/to/external_virtual_clang - - spec: externalvirtual@1.0%gcc@4.5.0 + - spec: externalvirtual@1.0%gcc@10.2.1 prefix: /path/to/external_virtual_gcc externalmodule: buildable: False @@ -49,4 +49,4 @@ packages: - spec: external-non-default-variant@3.8.7~foo~bar prefix: /usr version-test-dependency-preferred: - version: ['5.2.5'] \ No newline at end of file + version: ['5.2.5'] diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py index fb22c6fec0d..bd06e5eb2c2 100644 --- a/lib/spack/spack/test/database.py +++ b/lib/spack/spack/test/database.py @@ -858,7 +858,7 @@ def test_mark_failed(mutable_database, monkeypatch, tmpdir, capsys): """Add coverage to mark_failed.""" def _raise_exc(lock): - raise lk.LockTimeoutError("Mock acquire_write failure") + raise lk.LockTimeoutError("write", "/mock-lock", 1.234, 10) # Ensure attempt to acquire write lock on the mark raises the exception monkeypatch.setattr(lk.Lock, "acquire_write", _raise_exc) diff --git a/lib/spack/spack/test/flag_handlers.py b/lib/spack/spack/test/flag_handlers.py index d55a059769e..3b871a2bc97 100644 --- a/lib/spack/spack/test/flag_handlers.py +++ b/lib/spack/spack/test/flag_handlers.py @@ -135,3 +135,15 @@ def test_ld_libs_cmake(self, temp_env): "-DCMAKE_CXX_STANDARD_LIBRARIES=-lfoo", "-DCMAKE_Fortran_STANDARD_LIBRARIES=-lfoo", } + + def test_flag_handler_no_modify_specs(self, temp_env): + def test_flag_handler(self, name, flags): + flags.append("-foo") + return (flags, None, None) + + s = spack.spec.Spec("cmake-client").concretized() + s.package.flag_handler = test_flag_handler + spack.build_environment.setup_package(s.package, False) + + assert not s.compiler_flags["cflags"] + assert os.environ["SPACK_CFLAGS"] == "-foo" diff --git a/lib/spack/spack/test/graph.py b/lib/spack/spack/test/graph.py index e7aafe4b0d2..60d041d60b4 100644 --- a/lib/spack/spack/test/graph.py +++ b/lib/spack/spack/test/graph.py @@ -103,6 +103,19 @@ def test_ascii_graph_mpileaks(config, mock_packages, monkeypatch): | o libdwarf |/ o libelf +""" + or graph_str + == r"""o mpileaks +|\ +o | callpath +|\| +| o mpich +| +o dyninst +|\ +o | libdwarf +|/ +o libelf """ ) diff --git a/lib/spack/spack/test/hooks/absolutify_elf_sonames.py b/lib/spack/spack/test/hooks/absolutify_elf_sonames.py new file mode 100644 index 00000000000..2163b776dc9 --- /dev/null +++ b/lib/spack/spack/test/hooks/absolutify_elf_sonames.py @@ -0,0 +1,81 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +import os + +import pytest + +import llnl.util.filesystem as fs + +import spack.platforms +from spack.hooks.absolutify_elf_sonames import ( + SharedLibrariesVisitor, + find_and_patch_sonames, +) +from spack.util.executable import Executable + + +def skip_unless_linux(f): + return pytest.mark.skipif( + str(spack.platforms.real_host()) != "linux", reason="only tested on linux for now" + )(f) + + +class ExecutableIntercept: + def __init__(self): + self.calls = [] + + def __call__(self, *args, **kwargs): + self.calls.append(args) + + @property + def returncode(self): + return 0 + + +@pytest.mark.requires_executables("gcc") +@skip_unless_linux +def test_shared_libraries_visitor(tmpdir): + """Integration test for soname rewriting""" + gcc = Executable("gcc") + + # Create a directory structure like this: + # ./no-soname.so # just a shared library without a soname + # ./soname.so # a shared library with a soname + # ./executable.so # an executable masquerading as a shared lib + # ./libskipme.so # a shared library with a soname + # ./mydir/parent_dir -> .. # a symlinked dir, causing a cycle + # ./mydir/skip_symlink -> ../libskipme # a symlink to a library + + with fs.working_dir(str(tmpdir)): + with open("hello.c", "w") as f: + f.write("int main(){return 0;}") + gcc("hello.c", "-o", "no-soname.so", "--shared") + gcc("hello.c", "-o", "soname.so", "--shared", "-Wl,-soname,example.so") + gcc("hello.c", "-pie", "-o", "executable.so") + gcc("hello.c", "-o", "libskipme.so", "-Wl,-soname,libskipme.so") + os.mkdir("my_dir") + os.symlink("..", os.path.join("my_dir", "parent_dir")) + os.symlink(os.path.join("..", "libskipme.so"), os.path.join("my_dir", "skip_symlink")) + + # Visit the whole prefix, but exclude `skip_symlink` + visitor = SharedLibrariesVisitor(exclude_list=["skip_symlink"]) + fs.visit_directory_tree(str(tmpdir), visitor) + relative_paths = visitor.get_shared_libraries_relative_paths() + + assert "no-soname.so" in relative_paths + assert "soname.so" in relative_paths + assert "executable.so" not in relative_paths + assert "libskipme.so" not in relative_paths + + # Run the full hook of finding libs and setting sonames. + patchelf = ExecutableIntercept() + find_and_patch_sonames(str(tmpdir), ["skip_symlink"], patchelf) + assert len(patchelf.calls) == 2 + elf_1 = tmpdir.join("no-soname.so") + elf_2 = tmpdir.join("soname.so") + assert ("--set-soname", elf_1, elf_1) in patchelf.calls + assert ("--set-soname", elf_2, elf_2) in patchelf.calls diff --git a/lib/spack/spack/test/install.py b/lib/spack/spack/test/install.py index 9093593bede..da54216402b 100644 --- a/lib/spack/spack/test/install.py +++ b/lib/spack/spack/test/install.py @@ -144,13 +144,12 @@ def __getattr__(self, attr): return getattr(self.wrapped_stage, attr) -def test_partial_install_delete_prefix_and_stage(install_mockery, mock_fetch): +def test_partial_install_delete_prefix_and_stage(install_mockery, mock_fetch, working_env): s = Spec("canfail").concretized() instance_rm_prefix = s.package.remove_prefix try: - s.package.succeed = False s.package.remove_prefix = mock_remove_prefix with pytest.raises(MockInstallError): s.package.do_install() @@ -161,7 +160,7 @@ def test_partial_install_delete_prefix_and_stage(install_mockery, mock_fetch): # must clear failure markings for the package before re-installing it spack.store.db.clear_failure(s, True) - s.package.succeed = True + s.package.set_install_succeed() s.package.stage = MockStage(s.package.stage) s.package.do_install(restage=True) @@ -174,18 +173,20 @@ def test_partial_install_delete_prefix_and_stage(install_mockery, mock_fetch): @pytest.mark.disable_clean_stage_check -def test_failing_overwrite_install_should_keep_previous_installation(mock_fetch, install_mockery): +def test_failing_overwrite_install_should_keep_previous_installation( + mock_fetch, install_mockery, working_env +): """ Make sure that whenever `spack install --overwrite` fails, spack restores the original install prefix instead of cleaning it. """ # Do a successful install s = Spec("canfail").concretized() - s.package.succeed = True + s.package.set_install_succeed() # Do a failing overwrite install s.package.do_install() - s.package.succeed = False + s.package.set_install_fail() kwargs = {"overwrite": [s.dag_hash()]} with pytest.raises(Exception): @@ -238,13 +239,11 @@ def test_install_dependency_symlinks_pkg(install_mockery, mock_fetch, mutable_mo def test_install_times(install_mockery, mock_fetch, mutable_mock_repo): """Test install times added.""" - spec = Spec("dev-build-test-install-phases") - spec.concretize() - pkg = spec.package - pkg.do_install() + spec = Spec("dev-build-test-install-phases").concretized() + spec.package.do_install() # Ensure dependency directory exists after the installation. - install_times = os.path.join(pkg.prefix, ".spack", "install_times.json") + install_times = os.path.join(spec.package.prefix, ".spack", "install_times.json") assert os.path.isfile(install_times) # Ensure the phases are included @@ -346,12 +345,11 @@ def test_installed_upstream(install_upstream, mock_fetch): @pytest.mark.disable_clean_stage_check -def test_partial_install_keep_prefix(install_mockery, mock_fetch, monkeypatch): +def test_partial_install_keep_prefix(install_mockery, mock_fetch, monkeypatch, working_env): s = Spec("canfail").concretized() # If remove_prefix is called at any point in this test, that is an error - s.package.succeed = False # make the build fail - monkeypatch.setattr(spack.package_base.Package, "remove_prefix", mock_remove_prefix) + monkeypatch.setattr(spack.package_base.PackageBase, "remove_prefix", mock_remove_prefix) with pytest.raises(spack.build_environment.ChildError): s.package.do_install(keep_prefix=True) assert os.path.exists(s.package.prefix) @@ -359,7 +357,7 @@ def test_partial_install_keep_prefix(install_mockery, mock_fetch, monkeypatch): # must clear failure markings for the package before re-installing it spack.store.db.clear_failure(s, True) - s.package.succeed = True # make the build succeed + s.package.set_install_succeed() s.package.stage = MockStage(s.package.stage) s.package.do_install(keep_prefix=True) assert s.package.spec.installed @@ -368,14 +366,14 @@ def test_partial_install_keep_prefix(install_mockery, mock_fetch, monkeypatch): def test_second_install_no_overwrite_first(install_mockery, mock_fetch, monkeypatch): s = Spec("canfail").concretized() - monkeypatch.setattr(spack.package_base.Package, "remove_prefix", mock_remove_prefix) + monkeypatch.setattr(spack.package_base.PackageBase, "remove_prefix", mock_remove_prefix) - s.package.succeed = True + s.package.set_install_succeed() s.package.do_install() assert s.package.spec.installed # If Package.install is called after this point, it will fail - s.package.succeed = False + s.package.set_install_fail() s.package.do_install() @@ -431,7 +429,7 @@ def test_uninstall_by_spec_errors(mutable_database): @pytest.mark.disable_clean_stage_check -def test_nosource_pkg_install(install_mockery, mock_fetch, mock_packages, capfd): +def test_nosource_pkg_install(install_mockery, mock_fetch, mock_packages, capfd, ensure_debug): """Test install phases with the nosource package.""" spec = Spec("nosource").concretized() pkg = spec.package @@ -446,7 +444,9 @@ def test_nosource_pkg_install(install_mockery, mock_fetch, mock_packages, capfd) @pytest.mark.disable_clean_stage_check -def test_nosource_bundle_pkg_install(install_mockery, mock_fetch, mock_packages, capfd): +def test_nosource_bundle_pkg_install( + install_mockery, mock_fetch, mock_packages, capfd, ensure_debug +): """Test install phases with the nosource-bundle package.""" spec = Spec("nosource-bundle").concretized() pkg = spec.package @@ -589,7 +589,9 @@ def _install(src, dest): source = spec.package.stage.source_path config = os.path.join(source, "config.log") fs.touchp(config) - spec.package.archive_files = ["missing", "..", config] + monkeypatch.setattr( + type(spec.package), "archive_files", ["missing", "..", config], raising=False + ) spack.installer.log(spec.package) diff --git a/lib/spack/spack/test/installer.py b/lib/spack/spack/test/installer.py index 7f608d6c89f..4c85f4ba26c 100644 --- a/lib/spack/spack/test/installer.py +++ b/lib/spack/spack/test/installer.py @@ -467,6 +467,37 @@ def _conc_spec(compiler): assert packages +def test_update_tasks_for_compiler_packages_as_compiler(mock_packages, config, monkeypatch): + spec = spack.spec.Spec("trivial-install-test-package").concretized() + installer = inst.PackageInstaller([(spec.package, {})]) + + # Add a task to the queue + installer._add_init_task(spec.package, installer.build_requests[0], False, {}) + + # monkeypatch to make the list of compilers be what we test + def fake_package_list(compiler, architecture, pkgs): + return [(spec.package, True)] + + monkeypatch.setattr(inst, "_packages_needed_to_bootstrap_compiler", fake_package_list) + + installer._add_bootstrap_compilers("fake", "fake", "fake", None, {}) + + # Check that the only task is now a compiler task + assert len(installer.build_pq) == 1 + assert installer.build_pq[0][1].compiler + + +def test_bootstrapping_compilers_with_different_names_from_spec( + install_mockery, mutable_config, mock_fetch +): + with spack.config.override("config:install_missing_compilers", True): + with spack.concretize.disable_compiler_existence_check(): + spec = spack.spec.Spec("trivial-install-test-package%oneapi@22.2.0").concretized() + spec.package.do_install() + + assert spack.spec.CompilerSpec("oneapi@22.2.0") in spack.compilers.all_compiler_specs() + + def test_dump_packages_deps_ok(install_mockery, tmpdir, mock_packages): """Test happy path for dump_packages with dependencies.""" @@ -632,7 +663,10 @@ def test_check_deps_status_external(install_mockery, monkeypatch): # Mock the known dependent, b, as external so assumed to be installed monkeypatch.setattr(spack.spec.Spec, "external", True) installer._check_deps_status(request) - assert list(installer.installed)[0].startswith("b") + + # exotic architectures will add dependencies on gnuconfig, which we want to ignore + installed = [x for x in installer.installed if not x.startswith("gnuconfig")] + assert installed[0].startswith("b") def test_check_deps_status_upstream(install_mockery, monkeypatch): @@ -643,7 +677,10 @@ def test_check_deps_status_upstream(install_mockery, monkeypatch): # Mock the known dependent, b, as installed upstream monkeypatch.setattr(spack.spec.Spec, "installed_upstream", True) installer._check_deps_status(request) - assert list(installer.installed)[0].startswith("b") + + # exotic architectures will add dependencies on gnuconfig, which we want to ignore + installed = [x for x in installer.installed if not x.startswith("gnuconfig")] + assert installed[0].startswith("b") def test_add_bootstrap_compilers(install_mockery, monkeypatch): diff --git a/lib/spack/spack/test/llnl/util/lang.py b/lib/spack/spack/test/llnl/util/lang.py index 94b067f3dc8..63090caa384 100644 --- a/lib/spack/spack/test/llnl/util/lang.py +++ b/lib/spack/spack/test/llnl/util/lang.py @@ -4,6 +4,7 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os.path +import re import sys from datetime import datetime, timedelta from textwrap import dedent @@ -118,6 +119,14 @@ def test_pretty_string_to_date(format, pretty_string): assert t1 == t2 +def test_pretty_seconds(): + assert llnl.util.lang.pretty_seconds(2.1) == "2.100s" + assert llnl.util.lang.pretty_seconds(2.1 / 1000) == "2.100ms" + assert llnl.util.lang.pretty_seconds(2.1 / 1000 / 1000) == "2.100us" + assert llnl.util.lang.pretty_seconds(2.1 / 1000 / 1000 / 1000) == "2.100ns" + assert llnl.util.lang.pretty_seconds(2.1 / 1000 / 1000 / 1000 / 10) == "0.210ns" + + def test_match_predicate(): matcher = match_predicate(lambda x: True) assert matcher("foo") @@ -299,22 +308,25 @@ def inner(): top-level raised TypeError: ok""" ) + full_message = h.grouped_message(with_tracebacks=True) + no_line_numbers = re.sub(r"line [0-9]+,", "line xxx,", full_message) + assert ( - h.grouped_message(with_tracebacks=True) + no_line_numbers == dedent( """\ due to the following failures: inner method raised ValueError: wow! File "{0}", \ -line 290, in test_grouped_exception +line xxx, in test_grouped_exception inner() File "{0}", \ -line 287, in inner +line xxx, in inner raise ValueError("wow!") top-level raised TypeError: ok File "{0}", \ -line 293, in test_grouped_exception +line xxx, in test_grouped_exception raise TypeError("ok") """ ).format(__file__) diff --git a/lib/spack/spack/test/llnl/util/lock.py b/lib/spack/spack/test/llnl/util/lock.py index a9c6b64db8d..bf812bcc1db 100644 --- a/lib/spack/spack/test/llnl/util/lock.py +++ b/lib/spack/spack/test/llnl/util/lock.py @@ -1294,7 +1294,7 @@ def test_lock_in_current_directory(tmpdir): def test_attempts_str(): assert lk._attempts_str(0, 0) == "" assert lk._attempts_str(0.12, 1) == "" - assert lk._attempts_str(12.345, 2) == " after 12.35s and 2 attempts" + assert lk._attempts_str(12.345, 2) == " after 12.345s and 2 attempts" def test_lock_str(): diff --git a/lib/spack/spack/test/make_executable.py b/lib/spack/spack/test/make_executable.py index b7063e5e10d..efaff04de5e 100644 --- a/lib/spack/spack/test/make_executable.py +++ b/lib/spack/spack/test/make_executable.py @@ -53,24 +53,24 @@ def test_make_explicit(self): def test_make_one_job(self): make = MakeExecutable("make", 1) - self.assertEqual(make(output=str).strip(), "") - self.assertEqual(make("install", output=str).strip(), "install") + self.assertEqual(make(output=str).strip(), "-j1") + self.assertEqual(make("install", output=str).strip(), "-j1 install") def test_make_parallel_false(self): make = MakeExecutable("make", 8) - self.assertEqual(make(parallel=False, output=str).strip(), "") - self.assertEqual(make("install", parallel=False, output=str).strip(), "install") + self.assertEqual(make(parallel=False, output=str).strip(), "-j1") + self.assertEqual(make("install", parallel=False, output=str).strip(), "-j1 install") def test_make_parallel_disabled(self): make = MakeExecutable("make", 8) os.environ["SPACK_NO_PARALLEL_MAKE"] = "true" - self.assertEqual(make(output=str).strip(), "") - self.assertEqual(make("install", output=str).strip(), "install") + self.assertEqual(make(output=str).strip(), "-j1") + self.assertEqual(make("install", output=str).strip(), "-j1 install") os.environ["SPACK_NO_PARALLEL_MAKE"] = "1" - self.assertEqual(make(output=str).strip(), "") - self.assertEqual(make("install", output=str).strip(), "install") + self.assertEqual(make(output=str).strip(), "-j1") + self.assertEqual(make("install", output=str).strip(), "-j1 install") # These don't disable (false and random string) os.environ["SPACK_NO_PARALLEL_MAKE"] = "false" @@ -88,12 +88,12 @@ def test_make_parallel_precedence(self): # These should work os.environ["SPACK_NO_PARALLEL_MAKE"] = "true" - self.assertEqual(make(parallel=True, output=str).strip(), "") - self.assertEqual(make("install", parallel=True, output=str).strip(), "install") + self.assertEqual(make(parallel=True, output=str).strip(), "-j1") + self.assertEqual(make("install", parallel=True, output=str).strip(), "-j1 install") os.environ["SPACK_NO_PARALLEL_MAKE"] = "1" - self.assertEqual(make(parallel=True, output=str).strip(), "") - self.assertEqual(make("install", parallel=True, output=str).strip(), "install") + self.assertEqual(make(parallel=True, output=str).strip(), "-j1") + self.assertEqual(make("install", parallel=True, output=str).strip(), "-j1 install") # These don't disable (false and random string) os.environ["SPACK_NO_PARALLEL_MAKE"] = "false" @@ -113,3 +113,17 @@ def test_make_jobs_env(self): make(output=str, jobs_env="MAKE_PARALLELISM", _dump_env=dump_env).strip(), "-j8" ) self.assertEqual(dump_env["MAKE_PARALLELISM"], "8") + + def test_make_jobserver(self): + make = MakeExecutable("make", 8) + os.environ["MAKEFLAGS"] = "--jobserver-auth=X,Y" + self.assertEqual(make(output=str).strip(), "") + self.assertEqual(make(parallel=False, output=str).strip(), "-j1") + del os.environ["MAKEFLAGS"] + + def test_make_jobserver_not_supported(self): + make = MakeExecutable("make", 8, supports_jobserver=False) + os.environ["MAKEFLAGS"] = "--jobserver-auth=X,Y" + # Currently fallback on default job count, Maybe it should force -j1 ? + self.assertEqual(make(output=str).strip(), "-j8") + del os.environ["MAKEFLAGS"] diff --git a/lib/spack/spack/test/modules/lmod.py b/lib/spack/spack/test/modules/lmod.py index 58c013ef864..c0d228c2fea 100644 --- a/lib/spack/spack/test/modules/lmod.py +++ b/lib/spack/spack/test/modules/lmod.py @@ -24,7 +24,7 @@ pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows") -@pytest.fixture(params=["clang@3.3", "gcc@4.5.0"]) +@pytest.fixture(params=["clang@12.0.0", "gcc@10.2.1"]) def compiler(request): return request.param @@ -81,6 +81,15 @@ def test_file_layout(self, compiler, provider, factory, module_configuration): else: assert repetitions == 1 + def test_compilers_provided_different_name(self, factory, module_configuration): + module_configuration("complex_hierarchy") + module, spec = factory("intel-oneapi-compilers%clang@3.3") + + provides = module.conf.provides + + assert "compiler" in provides + assert provides["compiler"] == spack.spec.CompilerSpec("oneapi@3.0") + def test_simple_case(self, modulefile_content, module_configuration): """Tests the generation of a simple TCL module file.""" @@ -298,7 +307,7 @@ def test_modules_relative_to_view( ): with ev.Environment(str(tmpdir), with_view=True) as e: module_configuration("with_view") - install("cmake") + install("--add", "cmake") spec = spack.spec.Spec("cmake").concretized() diff --git a/lib/spack/spack/test/packages.py b/lib/spack/spack/test/packages.py index 07161ad83fc..3a22abcd7c2 100644 --- a/lib/spack/spack/test/packages.py +++ b/lib/spack/spack/test/packages.py @@ -321,3 +321,11 @@ def test_has_test_method_fails(capsys): captured = capsys.readouterr()[1] assert "is not a class" in captured + + +def test_package_deprecated_version(mock_packages, mock_fetch, mock_stage): + spec = Spec("deprecated-versions") + pkg_cls = spack.repo.path.get_pkg_class(spec.name) + + assert spack.package_base.deprecated_version(pkg_cls, "1.1.0") + assert not spack.package_base.deprecated_version(pkg_cls, "1.0.0") diff --git a/lib/spack/spack/test/packaging.py b/lib/spack/spack/test/packaging.py index f0d1c240e8e..f0294f5a74a 100644 --- a/lib/spack/spack/test/packaging.py +++ b/lib/spack/spack/test/packaging.py @@ -9,10 +9,10 @@ import argparse import os import platform -import re import shutil import stat import sys +from collections import OrderedDict import pytest @@ -28,7 +28,6 @@ from spack.fetch_strategy import FetchStrategyComposite, URLFetchStrategy from spack.paths import mock_gpg_keys_path from spack.relocate import ( - _placeholder, file_is_relocatable, macho_find_paths, macho_make_paths_normal, @@ -36,7 +35,7 @@ needs_binary_relocation, needs_text_relocation, relocate_links, - relocate_text, + unsafe_relocate_text, ) from spack.spec import Spec @@ -190,7 +189,7 @@ def test_buildcache(mock_archive, tmpdir): @pytest.mark.usefixtures("install_mockery") -def test_relocate_text(tmpdir): +def test_unsafe_relocate_text(tmpdir): spec = Spec("trivial-install-test-package") spec.concretize() with tmpdir.as_cwd(): @@ -203,7 +202,7 @@ def test_relocate_text(tmpdir): filenames = [filename] new_dir = "/opt/rh/devtoolset/" # Singleton dict doesn't matter if Ordered - relocate_text(filenames, {old_dir: new_dir}) + unsafe_relocate_text(filenames, {old_dir: new_dir}) with open(filename, "r") as script: for line in script: assert new_dir in line @@ -213,27 +212,42 @@ def test_relocate_text(tmpdir): def test_relocate_links(tmpdir): - with tmpdir.as_cwd(): - old_layout_root = os.path.join("%s" % tmpdir, "home", "spack", "opt", "spack") - old_install_prefix = os.path.join("%s" % old_layout_root, "debian6", "test") - old_binname = os.path.join(old_install_prefix, "binfile") - placeholder = _placeholder(old_layout_root) - re.sub(old_layout_root, placeholder, old_binname) - filenames = ["link.ln", "outsideprefix.ln"] - new_layout_root = os.path.join("%s" % tmpdir, "opt", "rh", "devtoolset") - new_install_prefix = os.path.join("%s" % new_layout_root, "test", "debian6") - new_linkname = os.path.join(new_install_prefix, "link.ln") - new_linkname2 = os.path.join(new_install_prefix, "outsideprefix.ln") - new_binname = os.path.join(new_install_prefix, "binfile") - mkdirp(new_install_prefix) - with open(new_binname, "w") as f: - f.write("\n") - os.utime(new_binname, None) - symlink(old_binname, new_linkname) - symlink("/usr/lib/libc.so", new_linkname2) - relocate_links(filenames, old_layout_root, old_install_prefix, new_install_prefix) - assert os.readlink(new_linkname) == new_binname - assert os.readlink(new_linkname2) == "/usr/lib/libc.so" + tmpdir.ensure("new_prefix_a", dir=True) + + own_prefix_path = str(tmpdir.join("prefix_a", "file")) + dep_prefix_path = str(tmpdir.join("prefix_b", "file")) + system_path = os.path.join(os.path.sep, "system", "path") + + # Old prefixes to new prefixes + prefix_to_prefix = OrderedDict( + [ + # map /prefix_a -> /new_prefix_a + (str(tmpdir.join("prefix_a")), str(tmpdir.join("new_prefix_a"))), + # map /prefix_b -> /new_prefix_b + (str(tmpdir.join("prefix_b")), str(tmpdir.join("new_prefix_b"))), + # map -> /fallback/path -- this is just to see we respect order. + (str(tmpdir), os.path.join(os.path.sep, "fallback", "path")), + ] + ) + + with tmpdir.join("new_prefix_a").as_cwd(): + # To be relocated + os.symlink(own_prefix_path, "to_self") + os.symlink(dep_prefix_path, "to_dependency") + + # To be ignored + os.symlink(system_path, "to_system") + os.symlink("relative", "to_self_but_relative") + + relocate_links(["to_self", "to_dependency", "to_system"], prefix_to_prefix) + + # These two are relocated + assert os.readlink("to_self") == str(tmpdir.join("new_prefix_a", "file")) + assert os.readlink("to_dependency") == str(tmpdir.join("new_prefix_b", "file")) + + # These two are not. + assert os.readlink("to_system") == system_path + assert os.readlink("to_self_but_relative") == "relative" def test_needs_relocation(): diff --git a/lib/spack/spack/test/relocate.py b/lib/spack/spack/test/relocate.py index b79f0ba1a45..b07f8402df0 100644 --- a/lib/spack/spack/test/relocate.py +++ b/lib/spack/spack/test/relocate.py @@ -2,11 +2,13 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import io import os import os.path import re import shutil import sys +from collections import OrderedDict import pytest @@ -20,6 +22,7 @@ import spack.store import spack.tengine import spack.util.executable +from spack.relocate import utf8_path_to_binary_regex, utf8_paths_to_single_binary_regex pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="Tests fail on Windows") @@ -80,42 +83,6 @@ def _factory(output): return _factory -@pytest.fixture() -def hello_world(tmpdir): - """Factory fixture that compiles an ELF binary setting its RPATH. Relative - paths are encoded with `$ORIGIN` prepended. - """ - - def _factory(rpaths, message="Hello world!"): - source = tmpdir.join("main.c") - source.write( - """ - #include - int main(){{ - printf("{0}"); - }} - """.format( - message - ) - ) - gcc = spack.util.executable.which("gcc") - executable = source.dirpath("main.x") - # Encode relative RPATHs using `$ORIGIN` as the root prefix - rpaths = [x if os.path.isabs(x) else os.path.join("$ORIGIN", x) for x in rpaths] - rpath_str = ":".join(rpaths) - opts = [ - "-Wl,--disable-new-dtags", - "-Wl,-rpath={0}".format(rpath_str), - str(source), - "-o", - str(executable), - ] - gcc(*opts) - return executable - - return _factory - - @pytest.fixture() def make_dylib(tmpdir_factory): """Create a shared library with unfriendly qualities. @@ -175,13 +142,13 @@ def _factory(): @pytest.fixture() -def copy_binary(): +def copy_binary(prefix_like): """Returns a function that copies a binary somewhere and returns the new location. """ def _copy_somewhere(orig_binary): - new_root = orig_binary.mkdtemp() + new_root = orig_binary.mkdtemp().mkdir(prefix_like) new_binary = new_root.join("main.x") shutil.copy(str(orig_binary), str(new_binary)) return new_binary @@ -226,24 +193,6 @@ def test_file_is_relocatable_errors(tmpdir): assert "is not an absolute path" in str(exc_info.value) -@pytest.mark.parametrize( - "patchelf_behavior,expected", - [ - ("echo ", []), - ("echo /opt/foo/lib:/opt/foo/lib64", ["/opt/foo/lib", "/opt/foo/lib64"]), - ("exit 1", []), - ], -) -def test_existing_rpaths(patchelf_behavior, expected, mock_patchelf): - # Here we are mocking an executable that is always called "patchelf" - # because that will skip the part where we try to build patchelf - # by ourselves. The executable will output some rpaths like - # `patchelf --print-rpath` would. - path = mock_patchelf(patchelf_behavior) - rpaths = spack.relocate._elf_rpaths_for(path) - assert rpaths == expected - - @pytest.mark.parametrize( "start_path,path_root,paths,expected", [ @@ -313,29 +262,33 @@ def test_set_elf_rpaths_warning(mock_patchelf): @pytest.mark.requires_executables("patchelf", "strings", "file", "gcc") @skip_unless_linux -def test_replace_prefix_bin(hello_world): +def test_replace_prefix_bin(binary_with_rpaths, prefix_like): + prefix = "/usr/" + prefix_like + prefix_bytes = prefix.encode("utf-8") + new_prefix = "/foo/" + prefix_like + new_prefix_bytes = new_prefix.encode("utf-8") # Compile an "Hello world!" executable and set RPATHs - executable = hello_world(rpaths=["/usr/lib", "/usr/lib64"]) + executable = binary_with_rpaths(rpaths=[prefix + "/lib", prefix + "/lib64"]) # Relocate the RPATHs - spack.relocate._replace_prefix_bin(str(executable), {b"/usr": b"/foo"}) + spack.relocate._replace_prefix_bin(str(executable), {prefix_bytes: new_prefix_bytes}) # Some compilers add rpaths so ensure changes included in final result - assert "/foo/lib:/foo/lib64" in rpaths_for(executable) + assert "%s/lib:%s/lib64" % (new_prefix, new_prefix) in rpaths_for(executable) @pytest.mark.requires_executables("patchelf", "strings", "file", "gcc") @skip_unless_linux -def test_relocate_elf_binaries_absolute_paths(hello_world, copy_binary, tmpdir): +def test_relocate_elf_binaries_absolute_paths(binary_with_rpaths, copy_binary, prefix_tmpdir): # Create an executable, set some RPATHs, copy it to another location - orig_binary = hello_world(rpaths=[str(tmpdir.mkdir("lib")), "/usr/lib64"]) + orig_binary = binary_with_rpaths(rpaths=[str(prefix_tmpdir.mkdir("lib")), "/usr/lib64"]) new_binary = copy_binary(orig_binary) spack.relocate.relocate_elf_binaries( binaries=[str(new_binary)], orig_root=str(orig_binary.dirpath()), new_root=None, # Not needed when relocating absolute paths - new_prefixes={str(tmpdir): "/foo"}, + new_prefixes={str(orig_binary.dirpath()): "/foo"}, rel=False, # Not needed when relocating absolute paths orig_prefix=None, @@ -348,9 +301,9 @@ def test_relocate_elf_binaries_absolute_paths(hello_world, copy_binary, tmpdir): @pytest.mark.requires_executables("patchelf", "strings", "file", "gcc") @skip_unless_linux -def test_relocate_elf_binaries_relative_paths(hello_world, copy_binary): +def test_relocate_elf_binaries_relative_paths(binary_with_rpaths, copy_binary): # Create an executable, set some RPATHs, copy it to another location - orig_binary = hello_world(rpaths=["lib", "lib64", "/opt/local/lib"]) + orig_binary = binary_with_rpaths(rpaths=["lib", "lib64", "/opt/local/lib"]) new_binary = copy_binary(orig_binary) spack.relocate.relocate_elf_binaries( @@ -369,9 +322,13 @@ def test_relocate_elf_binaries_relative_paths(hello_world, copy_binary): @pytest.mark.requires_executables("patchelf", "strings", "file", "gcc") @skip_unless_linux -def test_make_elf_binaries_relative(hello_world, copy_binary, tmpdir): - orig_binary = hello_world( - rpaths=[str(tmpdir.mkdir("lib")), str(tmpdir.mkdir("lib64")), "/opt/local/lib"] +def test_make_elf_binaries_relative(binary_with_rpaths, copy_binary, prefix_tmpdir): + orig_binary = binary_with_rpaths( + rpaths=[ + str(prefix_tmpdir.mkdir("lib")), + str(prefix_tmpdir.mkdir("lib64")), + "/opt/local/lib", + ] ) new_binary = copy_binary(orig_binary) @@ -391,26 +348,30 @@ def test_raise_if_not_relocatable(monkeypatch): @pytest.mark.requires_executables("patchelf", "strings", "file", "gcc") @skip_unless_linux -def test_relocate_text_bin(hello_world, copy_binary, tmpdir): - orig_binary = hello_world( - rpaths=[str(tmpdir.mkdir("lib")), str(tmpdir.mkdir("lib64")), "/opt/local/lib"], - message=str(tmpdir), +def test_relocate_text_bin(binary_with_rpaths, copy_binary, prefix_tmpdir): + orig_binary = binary_with_rpaths( + rpaths=[ + str(prefix_tmpdir.mkdir("lib")), + str(prefix_tmpdir.mkdir("lib64")), + "/opt/local/lib", + ], + message=str(prefix_tmpdir), ) new_binary = copy_binary(orig_binary) - # Check original directory is in the executabel and the new one is not - assert text_in_bin(str(tmpdir), new_binary) + # Check original directory is in the executable and the new one is not + assert text_in_bin(str(prefix_tmpdir), new_binary) assert not text_in_bin(str(new_binary.dirpath()), new_binary) # Check this call succeed orig_path_bytes = str(orig_binary.dirpath()).encode("utf-8") new_path_bytes = str(new_binary.dirpath()).encode("utf-8") - spack.relocate.relocate_text_bin([str(new_binary)], {orig_path_bytes: new_path_bytes}) + spack.relocate.unsafe_relocate_text_bin([str(new_binary)], {orig_path_bytes: new_path_bytes}) # Check original directory is not there anymore and it was # substituted with the new one - assert not text_in_bin(str(tmpdir), new_binary) + assert not text_in_bin(str(prefix_tmpdir), new_binary) assert text_in_bin(str(new_binary.dirpath()), new_binary) @@ -421,7 +382,7 @@ def test_relocate_text_bin_raise_if_new_prefix_is_longer(tmpdir): with open(fpath, "w") as f: f.write("/short") with pytest.raises(spack.relocate.BinaryTextReplaceError): - spack.relocate.relocate_text_bin([fpath], {short_prefix: long_prefix}) + spack.relocate.unsafe_relocate_text_bin([fpath], {short_prefix: long_prefix}) @pytest.mark.requires_executables("install_name_tool", "file", "cc") @@ -476,3 +437,170 @@ def test_fixup_macos_rpaths(make_dylib, make_object_file): # (this is a corner case for GCC installation) (root, filename) = make_object_file() assert not fixup_rpath(root, filename) + + +def test_text_relocation_regex_is_safe(): + # Test whether prefix regex is properly escaped + string = b"This does not match /a/, but this does: /[a-z]/." + assert utf8_path_to_binary_regex("/[a-z]/").search(string).group(0) == b"/[a-z]/" + + +def test_utf8_paths_to_single_binary_regex(): + regex = utf8_paths_to_single_binary_regex(["/first/path", "/second/path", "/safe/[a-z]"]) + # Match nothing + assert not regex.search(b"text /neither/first/path text /the/second/path text") + + # Match first + string = b"contains both /first/path/subdir and /second/path/sub" + assert regex.search(string).group(0) == b"/first/path/subdir" + + # Match second + string = b"contains both /not/first/path/subdir but /second/path/subdir" + assert regex.search(string).group(0) == b"/second/path/subdir" + + # Match "unsafe" dir name + string = b"don't match /safe/a/path but do match /safe/[a-z]/file" + assert regex.search(string).group(0) == b"/safe/[a-z]/file" + + +def test_ordered_replacement(): + # This tests whether binary text replacement respects order, so that + # a long package prefix is replaced before a shorter sub-prefix like + # the root of the spack store (as a fallback). + def replace_and_expect(prefix_map, before, after=None, suffix_safety_size=7): + f = io.BytesIO(before) + spack.relocate.apply_binary_replacements(f, OrderedDict(prefix_map), suffix_safety_size) + f.seek(0) + assert f.read() == after + + # The case of having a non-null terminated common suffix. + replace_and_expect( + [ + (b"/old-spack/opt/specific-package", b"/first/specific-package"), + (b"/old-spack/opt", b"/sec/spack/opt"), + ], + b"Binary with /old-spack/opt/specific-package and /old-spack/opt", + b"Binary with /////////first/specific-package and /sec/spack/opt", + suffix_safety_size=7, + ) + + # The case of having a direct null terminated common suffix. + replace_and_expect( + [ + (b"/old-spack/opt/specific-package", b"/first/specific-package"), + (b"/old-spack/opt", b"/sec/spack/opt"), + ], + b"Binary with /old-spack/opt/specific-package\0 and /old-spack/opt\0", + b"Binary with /////////first/specific-package\0 and /sec/spack/opt\0", + suffix_safety_size=7, + ) + + # Testing the order of operations (not null terminated, long enough common suffix) + replace_and_expect( + [ + (b"/old-spack/opt", b"/s/spack/opt"), + (b"/old-spack/opt/specific-package", b"/first/specific-package"), + ], + b"Binary with /old-spack/opt/specific-package and /old-spack/opt", + b"Binary with ///s/spack/opt/specific-package and ///s/spack/opt", + suffix_safety_size=7, + ) + + # Testing the order of operations (null terminated, long enough common suffix) + replace_and_expect( + [ + (b"/old-spack/opt", b"/s/spack/opt"), + (b"/old-spack/opt/specific-package", b"/first/specific-package"), + ], + b"Binary with /old-spack/opt/specific-package\0 and /old-spack/opt\0", + b"Binary with ///s/spack/opt/specific-package\0 and ///s/spack/opt\0", + suffix_safety_size=7, + ) + + # Null terminated within the lookahead window, common suffix long enough + replace_and_expect( + [(b"/old-spack/opt/specific-package", b"/opt/specific-XXXXage")], + b"Binary with /old-spack/opt/specific-package/sub\0 data", + b"Binary with ///////////opt/specific-XXXXage/sub\0 data", + suffix_safety_size=7, + ) + + # Null terminated within the lookahead window, common suffix too short, but + # shortening is enough to spare more than 7 bytes of old suffix. + replace_and_expect( + [(b"/old-spack/opt/specific-package", b"/opt/specific-XXXXXge")], + b"Binary with /old-spack/opt/specific-package/sub\0 data", + b"Binary with /opt/specific-XXXXXge/sub\0ckage/sub\0 data", # ckage/sub = 9 bytes + suffix_safety_size=7, + ) + + # Null terminated within the lookahead window, common suffix too short, + # shortening leaves exactly 7 suffix bytes untouched, amazing! + replace_and_expect( + [(b"/old-spack/opt/specific-package", b"/spack/specific-XXXXXge")], + b"Binary with /old-spack/opt/specific-package/sub\0 data", + b"Binary with /spack/specific-XXXXXge/sub\0age/sub\0 data", # age/sub = 7 bytes + suffix_safety_size=7, + ) + + # Null terminated within the lookahead window, common suffix too short, + # shortening doesn't leave space for 7 bytes, sad! + error_msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format( + b"/old-spack/opt/specific-package", + b"/snacks/specific-XXXXXge", + b"/old-spack/opt/specific-package/sub", + ) + with pytest.raises(spack.relocate.CannotShrinkCString, match=error_msg): + replace_and_expect( + [(b"/old-spack/opt/specific-package", b"/snacks/specific-XXXXXge")], + b"Binary with /old-spack/opt/specific-package/sub\0 data", + # expect failure! + suffix_safety_size=7, + ) + + # Check that it works when changing suffix_safety_size. + replace_and_expect( + [(b"/old-spack/opt/specific-package", b"/snacks/specific-XXXXXXe")], + b"Binary with /old-spack/opt/specific-package/sub\0 data", + b"Binary with /snacks/specific-XXXXXXe/sub\0ge/sub\0 data", + suffix_safety_size=6, + ) + + # Finally check the case of no shortening but a long enough common suffix. + replace_and_expect( + [(b"pkg-gwixwaalgczp6", b"pkg-zkesfralgczp6")], + b"Binary with pkg-gwixwaalgczp6/config\0 data", + b"Binary with pkg-zkesfralgczp6/config\0 data", + suffix_safety_size=7, + ) + + # Too short matching suffix, identical string length + error_msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format( + b"pkg-gwixwaxlgczp6", + b"pkg-zkesfrzlgczp6", + b"pkg-gwixwaxlgczp6", + ) + with pytest.raises(spack.relocate.CannotShrinkCString, match=error_msg): + replace_and_expect( + [(b"pkg-gwixwaxlgczp6", b"pkg-zkesfrzlgczp6")], + b"Binary with pkg-gwixwaxlgczp6\0 data", + # expect failure + suffix_safety_size=7, + ) + + # Finally, make sure that the regex is not greedily finding the LAST null byte + # it should find the first null byte in the window. In this test we put one null + # at a distance where we cant keep a long enough suffix, and one where we can, + # so we should expect failure when the first null is used. + error_msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format( + b"pkg-abcdef", + b"pkg-xyzabc", + b"pkg-abcdef", + ) + with pytest.raises(spack.relocate.CannotShrinkCString, match=error_msg): + replace_and_expect( + [(b"pkg-abcdef", b"pkg-xyzabc")], + b"Binary with pkg-abcdef\0/xx\0", # def\0/xx is 7 bytes. + # expect failure + suffix_safety_size=7, + ) diff --git a/lib/spack/spack/test/repo.py b/lib/spack/spack/test/repo.py index fcd009edec2..402723d226c 100644 --- a/lib/spack/spack/test/repo.py +++ b/lib/spack/spack/test/repo.py @@ -113,14 +113,14 @@ def test_absolute_import_spack_packages_as_python_modules(mock_packages): assert hasattr(spack.pkg.builtin.mock, "mpileaks") assert hasattr(spack.pkg.builtin.mock.mpileaks, "Mpileaks") assert isinstance(spack.pkg.builtin.mock.mpileaks.Mpileaks, spack.package_base.PackageMeta) - assert issubclass(spack.pkg.builtin.mock.mpileaks.Mpileaks, spack.package_base.Package) + assert issubclass(spack.pkg.builtin.mock.mpileaks.Mpileaks, spack.package_base.PackageBase) def test_relative_import_spack_packages_as_python_modules(mock_packages): from spack.pkg.builtin.mock.mpileaks import Mpileaks assert isinstance(Mpileaks, spack.package_base.PackageMeta) - assert issubclass(Mpileaks, spack.package_base.Package) + assert issubclass(Mpileaks, spack.package_base.PackageBase) def test_all_virtual_packages_have_default_providers(): diff --git a/lib/spack/spack/test/rewiring.py b/lib/spack/spack/test/rewiring.py index 936ba1e78a4..085fb950bbd 100644 --- a/lib/spack/spack/test/rewiring.py +++ b/lib/spack/spack/test/rewiring.py @@ -18,7 +18,7 @@ if sys.platform == "darwin": args.extend(["/usr/bin/clang++", "install_name_tool"]) else: - args.extend(["/usr/bin/g++", "patchelf"]) + args.extend(["g++", "patchelf"]) @pytest.mark.requires_executables(*args) diff --git a/lib/spack/spack/test/sbang.py b/lib/spack/spack/test/sbang.py index ebc9a85347e..876f3f4d0f6 100644 --- a/lib/spack/spack/test/sbang.py +++ b/lib/spack/spack/test/sbang.py @@ -268,6 +268,13 @@ def test_shebang_handles_non_writable_files(script_dir, sbang_line): @pytest.fixture(scope="function") def configure_group_perms(): + # On systems with remote groups, the primary user group may be remote + # and grp does not act on remote groups. + # To ensure we find a group we can operate on, we get take the first group + # listed which has the current user as a member. + gid = fs.group_ids(os.getuid())[0] + group_name = grp.getgrgid(gid).gr_name + conf = syaml.load_config( """\ all: @@ -276,7 +283,7 @@ def configure_group_perms(): write: group group: {0} """.format( - grp.getgrgid(os.getegid()).gr_name + group_name ) ) spack.config.set("packages", conf, scope="user") diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py index 66e5a0bc2fd..898ae15f74f 100644 --- a/lib/spack/spack/test/spec_dag.py +++ b/lib/spack/spack/test/spec_dag.py @@ -1068,3 +1068,38 @@ def test_adding_same_deptype_with_the_same_name_raises( p.add_dependency_edge(c1, deptype=c1_deptypes) with pytest.raises(spack.error.SpackError): p.add_dependency_edge(c2, deptype=c2_deptypes) + + +@pytest.mark.regression("33499") +def test_indexing_prefers_direct_or_transitive_link_deps(): + # Test whether spec indexing prefers direct/transitive link type deps over deps of + # build/run/test deps, and whether it does fall back to a full dag search. + root = Spec("root") + + # Use a and z to since we typically traverse by edges sorted alphabetically. + a1 = Spec("a1") + a2 = Spec("a2") + z1 = Spec("z1") + z2 = Spec("z2") + + # Same package, different spec. + z3_flavor_1 = Spec("z3 +through_a1") + z3_flavor_2 = Spec("z3 +through_z1") + + root.add_dependency_edge(a1, deptype=("build", "run", "test")) + + # unique package as a dep of a build/run/test type dep. + a1.add_dependency_edge(a2, deptype="all") + a1.add_dependency_edge(z3_flavor_1, deptype="all") + + # chain of link type deps root -> z1 -> z2 -> z3 + root.add_dependency_edge(z1, deptype="link") + z1.add_dependency_edge(z2, deptype="link") + z2.add_dependency_edge(z3_flavor_2, deptype="link") + + # Indexing should prefer the link-type dep. + assert "through_z1" in root["z3"].variants + assert "through_a1" in a1["z3"].variants + + # Ensure that the full DAG is still searched + assert root["a2"] diff --git a/lib/spack/spack/test/spec_list.py b/lib/spack/spack/test/spec_list.py index 487417dff10..8923f6e1f68 100644 --- a/lib/spack/spack/test/spec_list.py +++ b/lib/spack/spack/test/spec_list.py @@ -200,3 +200,22 @@ def test_spec_list_matrix_exclude(self, mock_packages): ] speclist = SpecList("specs", matrix) assert len(speclist.specs) == 1 + + @pytest.mark.regression("22991") + def test_spec_list_constraints_with_structure( + self, mock_packages, mock_fetch, install_mockery + ): + # Setup by getting hash and installing package with dep + libdwarf_spec = Spec("libdwarf").concretized() + libdwarf_spec.package.do_install() + + # Create matrix + matrix = { + "matrix": [["mpileaks"], ["^callpath"], ["^libdwarf/%s" % libdwarf_spec.dag_hash()]] + } + + # ensure the concrete spec was retained in the matrix entry of which + # it is a dependency + speclist = SpecList("specs", [matrix]) + assert len(speclist.specs) == 1 + assert libdwarf_spec in speclist.specs[0] diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py index 7100cfdb5fb..04aa82797cf 100644 --- a/lib/spack/spack/test/spec_semantics.py +++ b/lib/spack/spack/test/spec_semantics.py @@ -247,16 +247,23 @@ def test_satisfies_virtual_dependency_versions(self): def test_satisfies_matching_variant(self): check_satisfies("mpich+foo", "mpich+foo") + check_satisfies("mpich++foo", "mpich++foo") check_satisfies("mpich~foo", "mpich~foo") + check_satisfies("mpich~~foo", "mpich~~foo") check_satisfies("mpich foo=1", "mpich foo=1") + check_satisfies("mpich foo==1", "mpich foo==1") # confirm that synonymous syntax works correctly check_satisfies("mpich+foo", "mpich foo=True") + check_satisfies("mpich++foo", "mpich foo=True") check_satisfies("mpich foo=true", "mpich+foo") + check_satisfies("mpich foo==true", "mpich++foo") check_satisfies("mpich~foo", "mpich foo=FALSE") + check_satisfies("mpich~~foo", "mpich foo=FALSE") check_satisfies("mpich foo=False", "mpich~foo") + check_satisfies("mpich foo==False", "mpich~foo") check_satisfies("mpich foo=*", "mpich~foo") - check_satisfies("mpich +foo", "mpich foo=*") + check_satisfies("mpich+foo", "mpich foo=*") def test_satisfies_multi_value_variant(self): # Check quoting @@ -295,6 +302,7 @@ def test_satisfies_single_valued_variant(self): # Assert that an autospec generated from a literal # gives the right result for a single valued variant assert "foobar=bar" in a + assert "foobar==bar" in a assert "foobar=baz" not in a assert "foobar=fee" not in a @@ -415,21 +423,32 @@ def test_unsatisfiable_variants(self): check_satisfies("mpich", "mpich+foo", False) check_satisfies("mpich", "mpich~foo", False) check_satisfies("mpich", "mpich foo=1", False) + check_satisfies("mpich", "mpich++foo", False) + check_satisfies("mpich", "mpich~~foo", False) + check_satisfies("mpich", "mpich foo==1", False) # 'mpich' is concrete: check_unsatisfiable("mpich", "mpich+foo", True) check_unsatisfiable("mpich", "mpich~foo", True) check_unsatisfiable("mpich", "mpich foo=1", True) + check_unsatisfiable("mpich", "mpich++foo", True) + check_unsatisfiable("mpich", "mpich~~foo", True) + check_unsatisfiable("mpich", "mpich foo==1", True) def test_unsatisfiable_variant_mismatch(self): # No matchi in specs check_unsatisfiable("mpich~foo", "mpich+foo") check_unsatisfiable("mpich+foo", "mpich~foo") check_unsatisfiable("mpich foo=True", "mpich foo=False") + check_unsatisfiable("mpich~~foo", "mpich++foo") + check_unsatisfiable("mpich++foo", "mpich~~foo") + check_unsatisfiable("mpich foo==True", "mpich foo==False") def test_satisfies_matching_compiler_flag(self): check_satisfies('mpich cppflags="-O3"', 'mpich cppflags="-O3"') check_satisfies('mpich cppflags="-O3 -Wall"', 'mpich cppflags="-O3 -Wall"') + check_satisfies('mpich cppflags=="-O3"', 'mpich cppflags=="-O3"') + check_satisfies('mpich cppflags=="-O3 -Wall"', 'mpich cppflags=="-O3 -Wall"') def test_satisfies_unconstrained_compiler_flag(self): # only asked for mpich, no constraints. Any will do. @@ -453,8 +472,9 @@ def test_copy_satisfies_transitive(self): assert copy[s.name].satisfies(s) def test_unsatisfiable_compiler_flag_mismatch(self): - # No matchi in specs + # No match in specs check_unsatisfiable('mpich cppflags="-O3"', 'mpich cppflags="-O2"') + check_unsatisfiable('mpich cppflags="-O3"', 'mpich cppflags=="-O3"') def test_satisfies_virtual(self): # Don't use check_satisfies: it checks constrain() too, and @@ -554,6 +574,12 @@ def test_constrain_variants(self): check_constrain("libelf+debug~foo", "libelf+debug", "libelf~foo") check_constrain("libelf+debug~foo", "libelf+debug", "libelf+debug~foo") + check_constrain("libelf++debug++foo", "libelf++debug", "libelf+debug+foo") + check_constrain("libelf debug==2 foo==1", "libelf debug==2", "libelf foo=1") + check_constrain("libelf debug==2 foo==1", "libelf debug==2", "libelf debug=2 foo=1") + + check_constrain("libelf++debug~~foo", "libelf++debug", "libelf++debug~foo") + def test_constrain_multi_value_variant(self): check_constrain( 'multivalue-variant foo="bar,baz"', @@ -582,6 +608,17 @@ def test_constrain_compiler_flags(self): 'libelf cflags="-O3" cppflags="-Wall"', ) + check_constrain( + 'libelf cflags="-O3" cppflags=="-Wall"', + 'libelf cppflags=="-Wall"', + 'libelf cflags="-O3"', + ) + check_constrain( + 'libelf cflags=="-O3" cppflags=="-Wall"', + 'libelf cflags=="-O3"', + 'libelf cflags=="-O3" cppflags=="-Wall"', + ) + def test_constrain_architecture(self): check_constrain( "libelf target=default_target os=default_os", @@ -620,6 +657,7 @@ def test_constrain_changed(self): check_constrain_changed("libelf", "~debug") check_constrain_changed("libelf", "debug=2") check_constrain_changed("libelf", 'cppflags="-O3"') + check_constrain_changed("libelf", 'cppflags=="-O3"') platform = spack.platforms.host() check_constrain_changed("libelf", "target=" + platform.target("default_target").name) @@ -636,6 +674,7 @@ def test_constrain_not_changed(self): check_constrain_not_changed("libelf debug=2", "debug=2") check_constrain_not_changed("libelf debug=2", "debug=*") check_constrain_not_changed('libelf cppflags="-O3"', 'cppflags="-O3"') + check_constrain_not_changed('libelf cppflags=="-O3"', 'cppflags=="-O3"') platform = spack.platforms.host() default_target = platform.target("default_target").name @@ -791,7 +830,7 @@ def test_spec_formatting_escapes(self): spec.format(fmt_str) def test_spec_deprecated_formatting(self): - spec = Spec("libelf cflags=-O2") + spec = Spec("libelf cflags==-O2") spec.concretize() # Since the default is the full spec see if the string rep of @@ -841,7 +880,7 @@ def test_spec_flags_maintain_order(self): # Spack was assembling flags in a manner that could result in # different orderings for repeated concretizations of the same # spec and config - spec_str = "libelf %gcc@4.7.2 os=redhat6" + spec_str = "libelf %gcc@11.1.0 os=redhat6" for _ in range(25): s = Spec(spec_str).concretized() assert all( diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py index c9394bf8ab6..cf1ce971d01 100644 --- a/lib/spack/spack/test/spec_syntax.py +++ b/lib/spack/spack/test/spec_syntax.py @@ -277,18 +277,28 @@ def test_canonicalize(self): "x ^y@1,2:3,4%intel@1,2,3,4+a~b+c~d+e~f", "x ^y~f+e~d+c~b+a@4,2:3,1%intel@4,3,2,1" ) + default_target = spack.platforms.test.Test.default self.check_parse( - "x arch=test-redhat6-None" " ^y arch=test-None-core2" " ^z arch=linux-None-None", + "x arch=test-redhat6-None" + + (" ^y arch=test-None-%s" % default_target) + + " ^z arch=linux-None-None", "x os=fe " "^y target=be " "^z platform=linux", ) self.check_parse( - "x arch=test-debian6-core2" " ^y arch=test-debian6-core2", + ("x arch=test-debian6-%s" % default_target) + + (" ^y arch=test-debian6-%s" % default_target), "x os=default_os target=default_target" " ^y os=default_os target=default_target", ) self.check_parse("x ^y", "x@: ^y@:") + def test_parse_redundant_deps(self): + self.check_parse("x ^y@foo", "x ^y@foo ^y@foo") + self.check_parse("x ^y@foo+bar", "x ^y@foo ^y+bar") + self.check_parse("x ^y@foo+bar", "x ^y@foo+bar ^y") + self.check_parse("x ^y@foo+bar", "x ^y ^y@foo+bar") + def test_parse_errors(self): errors = ["x@@1.2", "x ^y@@1.2", "x@1.2::", "x::"] self._check_raises(SpecParseError, errors) @@ -477,7 +487,7 @@ def test_multiple_versions(self): self._check_raises(MultipleVersionError, multiples) def test_duplicate_dependency(self): - self._check_raises(DuplicateDependencyError, ["x ^y ^y"]) + self._check_raises(DuplicateDependencyError, ["x ^y@1 ^y@2"]) def test_duplicate_compiler(self): duplicates = [ @@ -912,3 +922,9 @@ def test_git_ref_spec_equivalences(self, mock_packages, mock_stage): assert not s_no_git.satisfies(s1) assert not s2.satisfies(s1) assert not s3.satisfies(s1) + + @pytest.mark.regression("32471") + @pytest.mark.parametrize("spec_str", ["target=x86_64", "os=redhat6", "target=x86_64:"]) + def test_platform_is_none_if_not_present(self, spec_str): + s = sp.Spec(spec_str) + assert s.architecture.platform is None, s diff --git a/lib/spack/spack/test/test_suite.py b/lib/spack/spack/test/test_suite.py index 3d8ebace395..7db59526ca3 100644 --- a/lib/spack/spack/test/test_suite.py +++ b/lib/spack/spack/test/test_suite.py @@ -7,8 +7,6 @@ import pytest -import llnl.util.tty as tty - import spack.install_test import spack.spec @@ -20,16 +18,6 @@ def _true(*args, **kwargs): return True -@pytest.fixture -def ensure_debug(monkeypatch): - current_debug_level = tty.debug_level() - tty.set_debug(1) - - yield - - tty.set_debug(current_debug_level) - - def ensure_results(filename, expected): assert os.path.exists(filename) with open(filename, "r") as fd: diff --git a/lib/spack/spack/test/traverse.py b/lib/spack/spack/test/traverse.py new file mode 100644 index 00000000000..663f323e675 --- /dev/null +++ b/lib/spack/spack/test/traverse.py @@ -0,0 +1,211 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import spack.traverse as traverse +from spack.spec import Spec + + +def key_by_hash(spec): + return spec.dag_hash() + + +def test_breadth_first_traversal(config, mock_packages): + # That that depth of discovery is non-decreasing + s = Spec("dttop").concretized() + depths = [ + depth + for (depth, _) in traverse.traverse_nodes( + [s], order="breadth", key=key_by_hash, depth=True + ) + ] + assert depths == sorted(depths) + + +def test_breadth_first_deptype_traversal(config, mock_packages): + s = Spec("dtuse").concretized() + + names = [ + "dtuse", + "dttop", + "dtbuild1", + "dtlink1", + "dtbuild2", + "dtlink2", + "dtlink3", + "dtlink4", + ] + + traversal = traverse.traverse_nodes( + [s], order="breadth", key=key_by_hash, deptype=("build", "link") + ) + assert [x.name for x in traversal] == names + + +def test_breadth_firsrt_traversal_deptype_with_builddeps(config, mock_packages): + s = Spec("dttop").concretized() + + names = ["dttop", "dtbuild1", "dtlink1", "dtbuild2", "dtlink2", "dtlink3", "dtlink4"] + + traversal = traverse.traverse_nodes( + [s], order="breadth", key=key_by_hash, deptype=("build", "link") + ) + assert [x.name for x in traversal] == names + + +def test_breadth_first_traversal_deptype_full(config, mock_packages): + s = Spec("dttop").concretized() + + names = [ + "dttop", + "dtbuild1", + "dtlink1", + "dtrun1", + "dtbuild2", + "dtlink2", + "dtrun2", + "dtlink3", + "dtlink5", + "dtrun3", + "dtlink4", + "dtbuild3", + ] + + traversal = traverse.traverse_nodes([s], order="breadth", key=key_by_hash, deptype="all") + assert [x.name for x in traversal] == names + + +def test_breadth_first_traversal_deptype_run(config, mock_packages): + s = Spec("dttop").concretized() + names = ["dttop", "dtrun1", "dtrun3"] + traversal = traverse.traverse_nodes([s], order="breadth", key=key_by_hash, deptype="run") + assert [x.name for x in traversal] == names + + +def test_breadth_first_traversal_reverse(config, mock_packages): + s = Spec("dt-diamond").concretized() + gen = traverse.traverse_nodes( + [s["dt-diamond-bottom"]], order="breadth", key=key_by_hash, direction="parents", depth=True + ) + assert [(depth, spec.name) for (depth, spec) in gen] == [ + (0, "dt-diamond-bottom"), + (1, "dt-diamond-left"), + (1, "dt-diamond-right"), + (2, "dt-diamond"), + ] + + +def test_breadth_first_traversal_multiple_roots(config, mock_packages): + # With DFS, the branch dt-diamond -> dt-diamond-left -> dt-diamond-bottom + # is followed, with BFS, dt-diamond-bottom should be traced through the second + # root dt-diamond-right at depth 1 instead. + s = Spec("dt-diamond").concretized() + roots = [s["dt-diamond"], s["dt-diamond-right"]] + gen = traverse.traverse_edges(roots, order="breadth", key=key_by_hash, depth=True, root=False) + assert [(depth, edge.parent.name, edge.spec.name) for (depth, edge) in gen] == [ + (1, "dt-diamond", "dt-diamond-left"), # edge from first root "to" depth 1 + (1, "dt-diamond-right", "dt-diamond-bottom"), # edge from second root "to" depth 1 + ] + + +def test_breadth_first_versus_depth_first_tree(config, mock_packages): + """ + The packages chain-a, chain-b, chain-c, chain-d have the following DAG: + a --> b --> c --> d # a chain + a --> c # and "skip" connections + a --> d + Here we test at what depth the nodes are discovered when using BFS vs DFS. + """ + s = Spec("chain-a").concretized() + + # BFS should find all nodes as direct deps + assert [ + (depth, edge.spec.name) + for (depth, edge) in traverse.traverse_tree([s], cover="nodes", depth_first=False) + ] == [ + (0, "chain-a"), + (1, "chain-b"), + (1, "chain-c"), + (1, "chain-d"), + ] + + # DFS will disover all nodes along the chain a -> b -> c -> d. + assert [ + (depth, edge.spec.name) + for (depth, edge) in traverse.traverse_tree([s], cover="nodes", depth_first=True) + ] == [ + (0, "chain-a"), + (1, "chain-b"), + (2, "chain-c"), + (3, "chain-d"), + ] + + # When covering all edges, we should never exceed depth 2 in BFS. + assert [ + (depth, edge.spec.name) + for (depth, edge) in traverse.traverse_tree([s], cover="edges", depth_first=False) + ] == [ + (0, "chain-a"), + (1, "chain-b"), + (2, "chain-c"), + (1, "chain-c"), + (2, "chain-d"), + (1, "chain-d"), + ] + + # In DFS we see the chain again. + assert [ + (depth, edge.spec.name) + for (depth, edge) in traverse.traverse_tree([s], cover="edges", depth_first=True) + ] == [ + (0, "chain-a"), + (1, "chain-b"), + (2, "chain-c"), + (3, "chain-d"), + (1, "chain-c"), + (1, "chain-d"), + ] + + +def test_breadth_first_versus_depth_first_printing(config, mock_packages): + """Test breadth-first versus depth-first tree printing.""" + s = Spec("chain-a").concretized() + + args = {"format": "{name}", "color": False} + + dfs_tree_nodes = """\ +chain-a + ^chain-b + ^chain-c + ^chain-d +""" + assert s.tree(depth_first=True, **args) == dfs_tree_nodes + + bfs_tree_nodes = """\ +chain-a + ^chain-b + ^chain-c + ^chain-d +""" + assert s.tree(depth_first=False, **args) == bfs_tree_nodes + + dfs_tree_edges = """\ +chain-a + ^chain-b + ^chain-c + ^chain-d + ^chain-c + ^chain-d +""" + assert s.tree(depth_first=True, cover="edges", **args) == dfs_tree_edges + + bfs_tree_edges = """\ +chain-a + ^chain-b + ^chain-c + ^chain-c + ^chain-d + ^chain-d +""" + assert s.tree(depth_first=False, cover="edges", **args) == bfs_tree_edges diff --git a/lib/spack/spack/test/util/elf.py b/lib/spack/spack/test/util/elf.py new file mode 100644 index 00000000000..b57477f54b0 --- /dev/null +++ b/lib/spack/spack/test/util/elf.py @@ -0,0 +1,167 @@ +# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +import io +from collections import OrderedDict + +import pytest + +import llnl.util.filesystem as fs + +import spack.platforms +import spack.util.elf as elf +import spack.util.executable + + +# note that our elf parser is platform independent... but I guess creating an elf file +# is slightly more difficult with system tools on non-linux. +def skip_unless_linux(f): + return pytest.mark.skipif( + str(spack.platforms.real_host()) != "linux", + reason="implementation currently requires linux", + )(f) + + +@pytest.mark.requires_executables("gcc") +@skip_unless_linux +@pytest.mark.parametrize( + "linker_flag,is_runpath", + [ + ("-Wl,--disable-new-dtags", False), + ("-Wl,--enable-new-dtags", True), + ], +) +def test_elf_parsing_shared_linking(linker_flag, is_runpath, tmpdir): + gcc = spack.util.executable.which("gcc") + + with fs.working_dir(str(tmpdir)): + # Create a library to link to so we can force a dynamic section in an ELF file + with open("foo.c", "w") as f: + f.write("int foo(){return 0;}") + with open("bar.c", "w") as f: + f.write("int foo(); int _start(){return foo();}") + + # Create library and executable linking to it. + gcc("-shared", "-o", "libfoo.so", "-Wl,-soname,libfoo.so.1", "-nostdlib", "foo.c") + gcc( + "-o", + "bar", + linker_flag, + "-Wl,-rpath,/first", + "-Wl,-rpath,/second", + "-Wl,--no-as-needed", + "-nostdlib", + "libfoo.so", + "bar.c", + "-o", + "bar", + ) + + with open("libfoo.so", "rb") as f: + foo_parsed = elf.parse_elf(f, interpreter=True, dynamic_section=True) + + assert not foo_parsed.has_pt_interp + assert foo_parsed.has_pt_dynamic + assert not foo_parsed.has_rpath + assert not foo_parsed.has_needed + assert foo_parsed.has_soname + assert foo_parsed.dt_soname_str == b"libfoo.so.1" + + with open("bar", "rb") as f: + bar_parsed = elf.parse_elf(f, interpreter=True, dynamic_section=True) + + assert bar_parsed.has_pt_interp + assert bar_parsed.has_pt_dynamic + assert bar_parsed.has_rpath + assert bar_parsed.has_needed + assert not bar_parsed.has_soname + assert bar_parsed.dt_rpath_str == b"/first:/second" + assert bar_parsed.dt_needed_strs == [b"libfoo.so.1"] + + +def test_broken_elf(): + # No elf magic + with pytest.raises(elf.ElfParsingError, match="Not an ELF file"): + elf.parse_elf(io.BytesIO(b"x")) + + # Incomplete ELF header + with pytest.raises(elf.ElfParsingError, match="Not an ELF file"): + elf.parse_elf(io.BytesIO(b"\x7fELF")) + + # Invalid class + with pytest.raises(elf.ElfParsingError, match="Invalid class"): + elf.parse_elf(io.BytesIO(b"\x7fELF\x09\x01" + b"\x00" * 10)) + + # Invalid data type + with pytest.raises(elf.ElfParsingError, match="Invalid data type"): + elf.parse_elf(io.BytesIO(b"\x7fELF\x01\x09" + b"\x00" * 10)) + + # 64-bit needs at least 64 bytes of header; this is only 56 bytes + with pytest.raises(elf.ElfParsingError, match="ELF header malformed"): + elf.parse_elf(io.BytesIO(b"\x7fELF\x02\x01" + b"\x00" * 50)) + + # 32-bit needs at least 52 bytes of header; this is only 46 bytes + with pytest.raises(elf.ElfParsingError, match="ELF header malformed"): + elf.parse_elf(io.BytesIO(b"\x7fELF\x01\x01" + b"\x00" * 40)) + + # Not a ET_DYN/ET_EXEC on a 32-bit LE ELF + with pytest.raises(elf.ElfParsingError, match="Not an ET_DYN or ET_EXEC"): + elf.parse_elf(io.BytesIO(b"\x7fELF\x01\x01" + (b"\x00" * 10) + b"\x09" + (b"\x00" * 35))) + + +def test_parser_doesnt_deal_with_nonzero_offset(): + # Currently we don't have logic to parse ELF files at nonzero offsets in a file + # This could be useful when e.g. modifying an ELF file inside a tarball or so, + # but currently we cannot. + elf_at_offset_one = io.BytesIO(b"\x00\x7fELF\x01\x01" + b"\x00" * 10) + elf_at_offset_one.read(1) + with pytest.raises(elf.ElfParsingError, match="Cannot parse at a nonzero offset"): + elf.parse_elf(elf_at_offset_one) + + +@pytest.mark.requires_executables("gcc") +@skip_unless_linux +def test_elf_get_and_replace_rpaths(binary_with_rpaths): + long_rpaths = ["/very/long/prefix-a/x", "/very/long/prefix-b/y"] + executable = str(binary_with_rpaths(rpaths=long_rpaths)) + + # Before + assert elf.get_rpaths(executable) == long_rpaths + + replacements = OrderedDict( + [ + (b"/very/long/prefix-a", b"/short-a"), + (b"/very/long/prefix-b", b"/short-b"), + (b"/very/long", b"/dont"), + ] + ) + + # Replace once: should modify the file. + assert elf.replace_rpath_in_place_or_raise(executable, replacements) + + # Replace twice: nothing to be done. + assert not elf.replace_rpath_in_place_or_raise(executable, replacements) + + # Verify the rpaths were modified correctly + assert elf.get_rpaths(executable) == ["/short-a/x", "/short-b/y"] + + # Going back to long rpaths should fail, since we've added trailing \0 + # bytes, and replacement can't assume it can write back in repeated null + # bytes -- it may correspond to zero-length strings for example. + with pytest.raises( + elf.ElfDynamicSectionUpdateFailed, + match="New rpath /very/long/prefix-a/x:/very/long/prefix-b/y is " + "longer than old rpath /short-a/x:/short-b/y", + ): + elf.replace_rpath_in_place_or_raise( + executable, + OrderedDict( + [ + (b"/short-a", b"/very/long/prefix-a"), + (b"/short-b", b"/very/long/prefix-b"), + ] + ), + ) diff --git a/lib/spack/spack/test/util/unparse/unparse.py b/lib/spack/spack/test/util/unparse/unparse.py index 217f67f35d3..82148c9dc82 100644 --- a/lib/spack/spack/test/util/unparse/unparse.py +++ b/lib/spack/spack/test/util/unparse/unparse.py @@ -178,8 +178,69 @@ async def f(): """ -def assertASTEqual(ast1, ast2): - ast.dump(ast1) == ast.dump(ast2) +match_literal = """\ +match status: + case 400: + return "Bad request" + case 404 | 418: + return "Not found" + case _: + return "Something's wrong with the internet" +""" + +match_with_noop = """\ +match status: + case 400: + return "Bad request" +""" + +match_literal_and_variable = """\ +match point: + case (0, 0): + print("Origin") + case (0, y): + print(f"Y={y}") + case (x, 0): + print(f"X={x}") + case (x, y): + print(f"X={x}, Y={y}") + case _: + raise ValueError("Not a point") +""" + + +match_classes = """\ +class Point: + x: int + y: int + +def location(point): + match point: + case Point(x=0, y=0): + print("Origin is the point's location.") + case Point(x=0, y=y): + print(f"Y={y} and the point is on the y-axis.") + case Point(x=x, y=0): + print(f"X={x} and the point is on the x-axis.") + case Point(): + print("The point is located somewhere else on the plane.") + case _: + print("Not a point") +""" + +match_nested = """\ +match points: + case []: + print("No points in the list.") + case [Point(0, 0)]: + print("The origin is the only point in the list.") + case [Point(x, y)]: + print(f"A single point {x}, {y} is in the list.") + case [Point(0, y1), Point(0, y2)]: + print(f"Two points on the Y axis at {y1}, {y2} are in the list.") + case _: + print("Something else is found in the list.") +""" def check_ast_roundtrip(code1, filename="internal", mode="exec"): @@ -187,7 +248,9 @@ def check_ast_roundtrip(code1, filename="internal", mode="exec"): code2 = spack.util.unparse.unparse(ast1) ast2 = compile(code2, filename, mode, ast.PyCF_ONLY_AST) - assertASTEqual(ast1, ast2) + + error_msg = "Failed to roundtrip {} [mode={}]".format(filename, mode) + assert ast.dump(ast1) == ast.dump(ast2), error_msg def test_core_lib_files(): @@ -514,3 +577,12 @@ def test_async_with(): @pytest.mark.skipif(sys.version_info < (3, 5), reason="Not supported < 3.5") def test_async_with_as(): check_ast_roundtrip(async_with_as) + + +@pytest.mark.skipif(sys.version_info < (3, 10), reason="Not supported < 3.10") +@pytest.mark.parametrize( + "literal", + [match_literal, match_with_noop, match_literal_and_variable, match_classes, match_nested], +) +def test_match_literal(literal): + check_ast_roundtrip(literal) diff --git a/lib/spack/spack/traverse.py b/lib/spack/spack/traverse.py new file mode 100644 index 00000000000..6263f83896a --- /dev/null +++ b/lib/spack/spack/traverse.py @@ -0,0 +1,417 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from collections import defaultdict, namedtuple + +import spack.spec + +# Export only the high-level API. +__all__ = ["traverse_edges", "traverse_nodes", "traverse_tree"] + +#: Data class that stores a directed edge together with depth at +#: which the target vertex was found. It is passed to ``accept`` +#: and ``neighbors`` of visitors, so they can decide whether to +#: follow the edge or not. +EdgeAndDepth = namedtuple("EdgeAndDepth", ["edge", "depth"]) + + +def sort_edges(edges): + edges.sort(key=lambda edge: edge.spec.name) + return edges + + +class BaseVisitor(object): + """A simple visitor that accepts all edges unconditionally and follows all + edges to dependencies of a given ``deptype``.""" + + def __init__(self, deptype="all"): + self.deptype = deptype + + def accept(self, node): + """ + Arguments: + node (EdgeAndDepth): Provides the depth and the edge through which the + node was discovered + + Returns: + bool: Returns ``True`` if the node is accepted. When ``False``, this + indicates that the node won't be yielded by iterators and dependencies + are not followed. + """ + return True + + def neighbors(self, node): + return sort_edges(node.edge.spec.edges_to_dependencies(deptype=self.deptype)) + + +class ReverseVisitor(object): + """A visitor that reverses the arrows in the DAG, following dependents.""" + + def __init__(self, visitor, deptype="all"): + self.visitor = visitor + self.deptype = deptype + + def accept(self, node): + return self.visitor.accept(node) + + def neighbors(self, node): + """Return dependents, note that we actually flip the edge direction to allow + generic programming""" + spec = node.edge.spec + return sort_edges( + [edge.flip() for edge in spec.edges_from_dependents(deptype=self.deptype)] + ) + + +class CoverNodesVisitor(object): + """A visitor that traverses each node once.""" + + def __init__(self, visitor, key=id, visited=None): + self.visitor = visitor + self.key = key + self.visited = set() if visited is None else visited + + def accept(self, node): + # Covering nodes means: visit nodes once and only once. + key = self.key(node.edge.spec) + + if key in self.visited: + return False + + accept = self.visitor.accept(node) + self.visited.add(key) + return accept + + def neighbors(self, node): + return self.visitor.neighbors(node) + + +class CoverEdgesVisitor(object): + """A visitor that traverses all edges once.""" + + def __init__(self, visitor, key=id, visited=None): + self.visitor = visitor + self.visited = set() if visited is None else visited + self.key = key + + def accept(self, node): + return self.visitor.accept(node) + + def neighbors(self, node): + # Covering edges means: drop dependencies of visited nodes. + key = self.key(node.edge.spec) + + if key in self.visited: + return [] + + self.visited.add(key) + return self.visitor.neighbors(node) + + +def get_visitor_from_args(cover, direction, deptype, key=id, visited=None, visitor=None): + """ + Create a visitor object from common keyword arguments. + + Arguments: + cover (str): Determines how extensively to cover the dag. Possible values: + ``nodes`` -- Visit each unique node in the dag only once. + ``edges`` -- If a node has been visited once but is reached along a + new path, it's accepted, but not recurisvely followed. This traverses + each 'edge' in the DAG once. + ``paths`` -- Explore every unique path reachable from the root. + This descends into visited subtrees and will accept nodes multiple + times if they're reachable by multiple paths. + direction (str): ``children`` or ``parents``. If ``children``, does a traversal + of this spec's children. If ``parents``, traverses upwards in the DAG + towards the root. + deptype (str or tuple): allowed dependency types + key: function that takes a spec and outputs a key for uniqueness test. + visited (set or None): a set of nodes not to follow (when using cover=nodes/edges) + visitor: An initial visitor that is used for composition. + + Returns: + A visitor + """ + visitor = visitor or BaseVisitor(deptype) + if cover == "nodes": + visitor = CoverNodesVisitor(visitor, key, visited) + elif cover == "edges": + visitor = CoverEdgesVisitor(visitor, key, visited) + if direction == "parents": + visitor = ReverseVisitor(visitor, deptype) + return visitor + + +def root_specs(specs): + """Initialize a list of edges from an imaginary root node to the root specs.""" + return [ + EdgeAndDepth(edge=spack.spec.DependencySpec(parent=None, spec=s, deptypes=()), depth=0) + for s in specs + ] + + +def traverse_depth_first_edges_generator(nodes, visitor, post_order=False, root=True, depth=False): + # This is a somewhat non-standard implementation, but the reason to start with + # edges is that we don't have to deal with an artificial root node when doing DFS + # on multiple (root) specs. + for node in nodes: + if not visitor.accept(node): + continue + + yield_me = root or node.depth > 0 + + # Pre + if yield_me and not post_order: + yield (node.depth, node.edge) if depth else node.edge + + neighbors = [ + EdgeAndDepth(edge=edge, depth=node.depth + 1) for edge in visitor.neighbors(node) + ] + + # This extra branch is just for efficiency. + if len(neighbors) >= 0: + for item in traverse_depth_first_edges_generator( + neighbors, visitor, post_order, root, depth + ): + yield item + + # Post + if yield_me and post_order: + yield (node.depth, node.edge) if depth else node.edge + + +def traverse_breadth_first_edges_generator(queue, visitor, root=True, depth=False): + while len(queue) > 0: + node = queue.pop(0) + + # If the visitor doesn't accept the node, we don't yield it nor follow its edges. + if not visitor.accept(node): + continue + + if root or node.depth > 0: + yield (node.depth, node.edge) if depth else node.edge + + for edge in visitor.neighbors(node): + queue.append(EdgeAndDepth(edge, node.depth + 1)) + + +def traverse_breadth_first_with_visitor(specs, visitor): + """Performs breadth first traversal for a list of specs (not a generator). + + Arguments: + specs (list): List of Spec instances. + visitor: object that implements accept and neighbors interface, see + for example BaseVisitor. + """ + queue = root_specs(specs) + while len(queue) > 0: + node = queue.pop(0) + + # If the visitor doesn't accept the node, we don't traverse it further. + if not visitor.accept(node): + continue + + for edge in visitor.neighbors(node): + queue.append(EdgeAndDepth(edge, node.depth + 1)) + + +# Helper functions for generating a tree using breadth-first traversal + + +def breadth_first_to_tree_edges(roots, deptype="all", key=id): + """This produces an adjacency list (with edges) and a map of parents. + There may be nodes that are reached through multiple edges. To print as + a tree, one should use the parents dict to verify if the path leading to + the node is through the correct parent. If not, the branch should be + truncated.""" + edges = defaultdict(list) + parents = dict() + + for edge in traverse_edges(roots, order="breadth", cover="edges", deptype=deptype, key=key): + parent_id = None if edge.parent is None else key(edge.parent) + child_id = key(edge.spec) + edges[parent_id].append(edge) + if child_id not in parents: + parents[child_id] = parent_id + + return edges, parents + + +def breadth_first_to_tree_nodes(roots, deptype="all", key=id): + """This produces a list of edges that forms a tree; every node has no more + that one incoming edge.""" + edges = defaultdict(list) + + for edge in traverse_edges(roots, order="breadth", cover="nodes", deptype=deptype, key=key): + parent_id = None if edge.parent is None else key(edge.parent) + edges[parent_id].append(edge) + + return edges + + +def traverse_breadth_first_tree_edges(parent_id, edges, parents, key=id, depth=0): + """Do a depth-first search on edges generated by bread-first traversal, + which can be used to produce a tree.""" + for edge in edges[parent_id]: + yield (depth, edge) + + child_id = key(edge.spec) + + # Don't follow further if we're not the parent + if parents[child_id] != parent_id: + continue + + # yield from ... in Python 3. + for item in traverse_breadth_first_tree_edges(child_id, edges, parents, key, depth + 1): + yield item + + +def traverse_breadth_first_tree_nodes(parent_id, edges, key=id, depth=0): + for edge in edges[parent_id]: + yield (depth, edge) + for item in traverse_breadth_first_tree_nodes(key(edge.spec), edges, key, depth + 1): + yield item + + +# High-level API: traverse_edges, traverse_nodes, traverse_tree. + + +def traverse_edges( + specs, + root=True, + order="pre", + cover="nodes", + direction="children", + deptype="all", + depth=False, + key=id, + visited=None, +): + """ + Generator that yields edges from the DAG, starting from a list of root specs. + + Arguments: + + specs (list): List of root specs (considered to be depth 0) + root (bool): Yield the root nodes themselves + order (str): What order of traversal to use in the DAG. For depth-first + search this can be ``pre`` or ``post``. For BFS this should be ``breadth``. + cover (str): Determines how extensively to cover the dag. Possible values: + ``nodes`` -- Visit each unique node in the dag only once. + ``edges`` -- If a node has been visited once but is reached along a + new path, it's accepted, but not recurisvely followed. This traverses + each 'edge' in the DAG once. + ``paths`` -- Explore every unique path reachable from the root. + This descends into visited subtrees and will accept nodes multiple + times if they're reachable by multiple paths. + direction (str): ``children`` or ``parents``. If ``children``, does a traversal + of this spec's children. If ``parents``, traverses upwards in the DAG + towards the root. + deptype (str or tuple): allowed dependency types + depth (bool): When ``False``, yield just edges. When ``True`` yield + the tuple (depth, edge), where depth corresponds to the depth + at which edge.spec was discovered. + key: function that takes a spec and outputs a key for uniqueness test. + visited (set or None): a set of nodes not to follow + + Returns: + A generator that yields ``DependencySpec`` if depth is ``False`` + or a tuple of ``(depth, DependencySpec)`` if depth is ``True``. + """ + root_edges = root_specs(specs) + visitor = get_visitor_from_args(cover, direction, deptype, key, visited) + + # Depth-first + if order in ("pre", "post"): + return traverse_depth_first_edges_generator( + root_edges, visitor, order == "post", root, depth + ) + + # Breadth-first + return traverse_breadth_first_edges_generator(root_edges, visitor, root, depth) + + +def traverse_nodes( + specs, + root=True, + order="pre", + cover="nodes", + direction="children", + deptype="all", + depth=False, + key=id, + visited=None, +): + """ + Generator that yields specs from the DAG, starting from a list of root specs. + + Arguments: + specs (list): List of root specs (considered to be depth 0) + root (bool): Yield the root nodes themselves + order (str): What order of traversal to use in the DAG. For depth-first + search this can be ``pre`` or ``post``. For BFS this should be ``breadth``. + cover (str): Determines how extensively to cover the dag. Possible values: + ``nodes`` -- Visit each unique node in the dag only once. + ``edges`` -- If a node has been visited once but is reached along a + new path, it's accepted, but not recurisvely followed. This traverses + each 'edge' in the DAG once. + ``paths`` -- Explore every unique path reachable from the root. + This descends into visited subtrees and will accept nodes multiple + times if they're reachable by multiple paths. + direction (str): ``children`` or ``parents``. If ``children``, does a traversal + of this spec's children. If ``parents``, traverses upwards in the DAG + towards the root. + deptype (str or tuple): allowed dependency types + depth (bool): When ``False``, yield just edges. When ``True`` yield + the tuple ``(depth, edge)``, where depth corresponds to the depth + at which ``edge.spec`` was discovered. + key: function that takes a spec and outputs a key for uniqueness test. + visited (set or None): a set of nodes not to follow + + Yields: + By default :class:`~spack.spec.Spec`, or a tuple ``(depth, Spec)`` if depth is + set to ``True``. + """ + for item in traverse_edges(specs, root, order, cover, direction, deptype, depth, key, visited): + yield (item[0], item[1].spec) if depth else item.spec + + +def traverse_tree(specs, cover="nodes", deptype="all", key=id, depth_first=True): + """ + Generator that yields ``(depth, DependencySpec)`` tuples in the depth-first + pre-order, so that a tree can be printed from it. + + Arguments: + + specs (list): List of root specs (considered to be depth 0) + cover (str): Determines how extensively to cover the dag. Possible values: + ``nodes`` -- Visit each unique node in the dag only once. + ``edges`` -- If a node has been visited once but is reached along a + new path, it's accepted, but not recurisvely followed. This traverses + each 'edge' in the DAG once. + ``paths`` -- Explore every unique path reachable from the root. + This descends into visited subtrees and will accept nodes multiple + times if they're reachable by multiple paths. + deptype (str or tuple): allowed dependency types + key: function that takes a spec and outputs a key for uniqueness test. + depth_first (bool): Explore the tree in depth-first or breadth-first order. + When setting ``depth_first=True`` and ``cover=nodes``, each spec only + occurs once at the shallowest level, which is useful when rendering + the tree in a terminal. + + Returns: + A generator that yields ``(depth, DependencySpec)`` tuples in such an order + that a tree can be printed. + """ + # BFS only makes sense when going over edges and nodes, for paths the tree is + # identical to DFS, which is much more efficient then. + if not depth_first and cover == "edges": + edges, parents = breadth_first_to_tree_edges(specs, deptype, key) + return traverse_breadth_first_tree_edges(None, edges, parents) + elif not depth_first and cover == "nodes": + edges = breadth_first_to_tree_nodes(specs, deptype, key) + return traverse_breadth_first_tree_nodes(None, edges) + + return traverse_edges(specs, order="pre", cover=cover, deptype=deptype, key=key, depth=True) diff --git a/lib/spack/spack/util/elf.py b/lib/spack/spack/util/elf.py new file mode 100644 index 00000000000..0b2e5a4e718 --- /dev/null +++ b/lib/spack/spack/util/elf.py @@ -0,0 +1,534 @@ +# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import bisect +import re +import struct +import sys +from collections import namedtuple +from struct import calcsize, unpack, unpack_from + +ElfHeader = namedtuple( + "ElfHeader", + [ + "e_type", + "e_machine", + "e_version", + "e_entry", + "e_phoff", + "e_shoff", + "e_flags", + "e_ehsize", + "e_phentsize", + "e_phnum", + "e_shentsize", + "e_shnum", + "e_shstrndx", + ], +) + +SectionHeader = namedtuple( + "SectionHeader", + [ + "sh_name", + "sh_type", + "sh_flags", + "sh_addr", + "sh_offset", + "sh_size", + "sh_link", + "sh_info", + "sh_addralign", + "sh_entsize", + ], +) + +ProgramHeader32 = namedtuple( + "ProgramHeader32", + [ + "p_type", + "p_offset", + "p_vaddr", + "p_paddr", + "p_filesz", + "p_memsz", + "p_flags", + "p_align", + ], +) + +ProgramHeader64 = namedtuple( + "ProgramHeader64", + [ + "p_type", + "p_flags", + "p_offset", + "p_vaddr", + "p_paddr", + "p_filesz", + "p_memsz", + "p_align", + ], +) + + +class ELF_CONSTANTS: + MAGIC = b"\x7fELF" + CLASS32 = 1 + CLASS64 = 2 + DATA2LSB = 1 + DATA2MSB = 2 + ET_EXEC = 2 + ET_DYN = 3 + PT_LOAD = 1 + PT_DYNAMIC = 2 + PT_INTERP = 3 + DT_NULL = 0 + DT_NEEDED = 1 + DT_STRTAB = 5 + DT_SONAME = 14 + DT_RPATH = 15 + DT_RUNPATH = 29 + SHT_STRTAB = 3 + + +def get_byte_at(byte_array, idx): + if sys.version_info[0] < 3: + return ord(byte_array[idx]) + return byte_array[idx] + + +class ElfFile(object): + """Parsed ELF file.""" + + __slots__ = [ + "is_64_bit", + "is_little_endian", + "byte_order", + "elf_hdr", + "pt_load", + # pt_interp + "has_pt_interp", + "pt_interp_p_offset", + "pt_interp_p_filesz", + "pt_interp_str", + # pt_dynamic + "has_pt_dynamic", + "pt_dynamic_p_offset", + "pt_dynamic_p_filesz", + "pt_dynamic_strtab_offset", # string table for dynamic section + # rpath + "has_rpath", + "dt_rpath_offset", + "dt_rpath_str", + "rpath_strtab_offset", + "is_runpath", + # dt needed + "has_needed", + "dt_needed_strtab_offsets", + "dt_needed_strs", + # dt soname + "has_soname", + "dt_soname_strtab_offset", + "dt_soname_str", + ] + + def __init__(self): + self.dt_needed_strtab_offsets = [] + self.has_soname = False + self.has_rpath = False + self.has_needed = False + self.pt_load = [] + self.has_pt_dynamic = False + self.has_pt_interp = False + + +def parse_c_string(byte_string, start=0): + """ + Retrieve a C-string at a given offset in a byte string + + Arguments: + byte_string (bytes): String + start (int): Offset into the string + + Returns: + bytes: A copy of the C-string excluding the terminating null byte + """ + str_end = byte_string.find(b"\0", start) + if str_end == -1: + raise ElfParsingError("C-string is not null terminated") + return byte_string[start:str_end] + + +def read_exactly(f, num_bytes, msg): + """ + Read exactly num_bytes at the current offset, otherwise raise + a parsing error with the given error message. + + Arguments: + f: file handle + num_bytes (int): Number of bytes to read + msg (str): Error to show when bytes cannot be read + + Returns: + bytes: the ``num_bytes`` bytes that were read. + """ + data = f.read(num_bytes) + if len(data) != num_bytes: + raise ElfParsingError(msg) + return data + + +def parse_program_headers(f, elf): + """ + Parse program headers + + Arguments: + f: file handle + elf (ElfFile): ELF file parser data + """ + # Forward to the program header + f.seek(elf.elf_hdr.e_phoff) + + # Here we have to make a mapping from virtual address to offset in the file. + ProgramHeader = ProgramHeader64 if elf.is_64_bit else ProgramHeader32 + ph_fmt = elf.byte_order + ("LLQQQQQQ" if elf.is_64_bit else "LLLLLLLL") + ph_size = calcsize(ph_fmt) + ph_num = elf.elf_hdr.e_phnum + + # Read all program headers in one go + data = read_exactly(f, ph_num * ph_size, "Malformed program header") + + for i in range(ph_num): + ph = ProgramHeader._make(unpack_from(ph_fmt, data, i * ph_size)) + + # Skip segments of size 0; we don't distinguish between missing segment and + # empty segments. I've see an empty PT_DYNAMIC section for an ELF file that + # contained debug data. + if ph.p_filesz == 0: + continue + + # For PT_LOAD entries: Save offsets and virtual addrs of the loaded ELF segments + # This way we can map offsets by virtual address to offsets in the file. + if ph.p_type == ELF_CONSTANTS.PT_LOAD: + elf.pt_load.append((ph.p_offset, ph.p_vaddr)) + + elif ph.p_type == ELF_CONSTANTS.PT_INTERP: + elf.pt_interp_p_offset = ph.p_offset + elf.pt_interp_p_filesz = ph.p_filesz + elf.has_pt_interp = True + + elif ph.p_type == ELF_CONSTANTS.PT_DYNAMIC: + elf.pt_dynamic_p_offset = ph.p_offset + elf.pt_dynamic_p_filesz = ph.p_filesz + elf.has_pt_dynamic = True + + # The linker sorts PT_LOAD segments by vaddr, but let's do it just to be sure, since + # patchelf for example has a flag to leave them in an arbitrary order. + elf.pt_load.sort(key=lambda x: x[1]) + + +def parse_pt_interp(f, elf): + """ + Parse the interpreter (i.e. absolute path to the dynamic linker) + + Arguments: + f: file handle + elf (ElfFile): ELF file parser data + """ + f.seek(elf.pt_interp_p_offset) + data = read_exactly(f, elf.pt_interp_p_filesz, "Malformed PT_INTERP entry") + elf.pt_interp_str = parse_c_string(data) + + +def find_strtab_size_at_offset(f, elf, offset): + """ + Retrieve the size of a string table section at a particular known offset + + Arguments: + f: file handle + elf (ElfFile): ELF file parser data + offset (int): offset of the section in the file (i.e. ``sh_offset``) + + Returns: + int: the size of the string table in bytes + """ + section_hdr_fmt = elf.byte_order + ("LLQQQQLLQQ" if elf.is_64_bit else "LLLLLLLLLL") + section_hdr_size = calcsize(section_hdr_fmt) + f.seek(elf.elf_hdr.e_shoff) + for _ in range(elf.elf_hdr.e_shnum): + data = read_exactly(f, section_hdr_size, "Malformed section header") + sh = SectionHeader._make(unpack(section_hdr_fmt, data)) + if sh.sh_type == ELF_CONSTANTS.SHT_STRTAB and sh.sh_offset == offset: + return sh.sh_size + + raise ElfParsingError("Could not determine strtab size") + + +def retrieve_strtab(f, elf, offset): + """ + Read a full string table at the given offset, which + requires looking it up in the section headers. + + Arguments: + elf (ElfFile): ELF file parser data + vaddr (int): virtual address + + Returns: + bytes: file offset + """ + size = find_strtab_size_at_offset(f, elf, offset) + f.seek(offset) + return read_exactly(f, size, "Could not read string table") + + +def vaddr_to_offset(elf, vaddr): + """ + Given a virtual address, find the corresponding offset in the ELF file itself. + + Arguments: + elf (ElfFile): ELF file parser data + vaddr (int): virtual address + """ + idx = bisect.bisect_right([p_vaddr for (p_offset, p_vaddr) in elf.pt_load], vaddr) - 1 + p_offset, p_vaddr = elf.pt_load[idx] + return p_offset - p_vaddr + vaddr + + +def parse_pt_dynamic(f, elf): + """ + Parse the dynamic section of an ELF file + + Arguments: + f: file handle + elf (ElfFile): ELF file parse data + """ + dynamic_array_fmt = elf.byte_order + ("qQ" if elf.is_64_bit else "lL") + dynamic_array_size = calcsize(dynamic_array_fmt) + + current_offset = elf.pt_dynamic_p_offset + count_rpath = 0 + count_runpath = 0 + count_strtab = 0 + + f.seek(elf.pt_dynamic_p_offset) + + # In case of broken ELF files, don't read beyond the advertized size. + for _ in range(elf.pt_dynamic_p_filesz // dynamic_array_size): + data = read_exactly(f, dynamic_array_size, "Malformed dynamic array entry") + tag, val = unpack(dynamic_array_fmt, data) + if tag == ELF_CONSTANTS.DT_NULL: + break + elif tag == ELF_CONSTANTS.DT_RPATH: + count_rpath += 1 + elf.rpath_strtab_offset = val + elf.dt_rpath_offset = current_offset + elf.is_runpath = False + elf.has_rpath = True + elif tag == ELF_CONSTANTS.DT_RUNPATH: + count_runpath += 1 + elf.rpath_strtab_offset = val + elf.dt_rpath_offset = current_offset + elf.is_runpath = True + elf.has_rpath = True + elif tag == ELF_CONSTANTS.DT_STRTAB: + count_strtab += 1 + strtab_vaddr = val + elif tag == ELF_CONSTANTS.DT_NEEDED: + elf.has_needed = True + elf.dt_needed_strtab_offsets.append(val) + elif tag == ELF_CONSTANTS.DT_SONAME: + elf.has_soname = True + elf.dt_soname_strtab_offset = val + current_offset += dynamic_array_size + + # No rpath/runpath, that happens. + if count_rpath == count_runpath == 0: + elf.has_rpath = False + elif count_rpath + count_runpath != 1: + raise ElfParsingError("Could not find a unique rpath/runpath.") + + if count_strtab != 1: + raise ElfParsingError("Could not find a unique strtab of for the dynamic section strings") + + # Nothing to retrieve, so don't bother getting the string table. + if not (elf.has_rpath or elf.has_soname or elf.has_needed): + return + + elf.pt_dynamic_strtab_offset = vaddr_to_offset(elf, strtab_vaddr) + string_table = retrieve_strtab(f, elf, elf.pt_dynamic_strtab_offset) + + if elf.has_needed: + elf.dt_needed_strs = list( + parse_c_string(string_table, offset) for offset in elf.dt_needed_strtab_offsets + ) + + if elf.has_soname: + elf.dt_soname_str = parse_c_string(string_table, elf.dt_soname_strtab_offset) + + if elf.has_rpath: + elf.dt_rpath_str = parse_c_string(string_table, elf.rpath_strtab_offset) + + +def parse_header(f, elf): + # Read the 32/64 bit class independent part of the header and validate + e_ident = f.read(16) + + # Require ELF magic bytes. + if len(e_ident) != 16 or e_ident[:4] != ELF_CONSTANTS.MAGIC: + raise ElfParsingError("Not an ELF file") + + # Defensively require a valid class and data. + e_ident_class, e_ident_data = get_byte_at(e_ident, 4), get_byte_at(e_ident, 5) + + if e_ident_class not in (ELF_CONSTANTS.CLASS32, ELF_CONSTANTS.CLASS64): + raise ElfParsingError("Invalid class found") + + if e_ident_data not in (ELF_CONSTANTS.DATA2LSB, ELF_CONSTANTS.DATA2MSB): + raise ElfParsingError("Invalid data type") + + elf.is_64_bit = e_ident_class == ELF_CONSTANTS.CLASS64 + elf.is_little_endian = e_ident_data == ELF_CONSTANTS.DATA2LSB + + # Set up byte order and types for unpacking + elf.byte_order = "<" if elf.is_little_endian else ">" + + # Parse the rest of the header + elf_header_fmt = elf.byte_order + ("HHLQQQLHHHHHH" if elf.is_64_bit else "HHLLLLLHHHHHH") + hdr_size = calcsize(elf_header_fmt) + data = read_exactly(f, hdr_size, "ELF header malformed") + elf.elf_hdr = ElfHeader._make(unpack(elf_header_fmt, data)) + + +def _do_parse_elf(f, interpreter=True, dynamic_section=True): + # We don't (yet?) allow parsing ELF files at a nonzero offset, we just + # jump to absolute offsets as they are specified in the ELF file. + if f.tell() != 0: + raise ElfParsingError("Cannot parse at a nonzero offset") + + elf = ElfFile() + parse_header(f, elf) + + # We don't handle anything but executables and shared libraries now. + if elf.elf_hdr.e_type not in (ELF_CONSTANTS.ET_EXEC, ELF_CONSTANTS.ET_DYN): + raise ElfParsingError("Not an ET_DYN or ET_EXEC type") + + parse_program_headers(f, elf) + + # Parse PT_INTERP section + if interpreter and elf.has_pt_interp: + parse_pt_interp(f, elf) + + # Parse PT_DYNAMIC section. + if dynamic_section and elf.has_pt_dynamic and len(elf.pt_load) > 0: + parse_pt_dynamic(f, elf) + + return elf + + +def parse_elf(f, interpreter=False, dynamic_section=False): + """Given a file handle f for an ELF file opened in binary mode, return an ElfFile + object that is stores data about rpaths""" + try: + return _do_parse_elf(f, interpreter, dynamic_section) + except (DeprecationWarning, struct.error): + # According to the docs old versions of Python can throw DeprecationWarning + # instead of struct.error. + raise ElfParsingError("Malformed ELF file") + + +def get_rpaths(path): + """Returns list of rpaths of the given file as UTF-8 strings, or None if the file + does not have any rpaths.""" + try: + with open(path, "rb") as f: + elf = parse_elf(f, interpreter=False, dynamic_section=True) + except ElfParsingError: + return None + + if not elf.has_rpath: + return None + + # If it does, split the string in components + rpath = elf.dt_rpath_str + if sys.version_info[0] >= 3: + rpath = rpath.decode("utf-8") + return rpath.split(":") + + +def replace_rpath_in_place_or_raise(path, substitutions): + regex = re.compile(b"|".join(re.escape(p) for p in substitutions.keys())) + + try: + with open(path, "rb+") as f: + elf = parse_elf(f, interpreter=False, dynamic_section=True) + + # If there's no RPATH, then there's no need to replace anything. + if not elf.has_rpath: + return False + + # Get the non-empty rpaths. Sometimes there's a bunch of trailing + # colons ::::: used for padding, we don't add them back to make it + # more likely that the string doesn't grow. + rpaths = list(filter(len, elf.dt_rpath_str.split(b":"))) + + num_rpaths = len(rpaths) + + if num_rpaths == 0: + return False + + changed = False + for i in range(num_rpaths): + old_rpath = rpaths[i] + match = regex.match(old_rpath) + if match: + changed = True + rpaths[i] = substitutions[match.group()] + old_rpath[match.end() :] + + # Nothing to replace! + if not changed: + return False + + new_rpath_string = b":".join(rpaths) + + pad = len(elf.dt_rpath_str) - len(new_rpath_string) + + if pad < 0: + raise ElfDynamicSectionUpdateFailed(elf.dt_rpath_str, new_rpath_string) + + # We zero out the bits we shortened because (a) it should be a + # C-string and (b) it's nice not to have spurious parts of old + # paths in the output of `strings file`. Note that we're all + # good when pad == 0; the original terminating null is used. + new_rpath_string += b"\x00" * pad + + # The rpath is at a given offset in the string table used by the + # dynamic section. + rpath_offset = elf.pt_dynamic_strtab_offset + elf.rpath_strtab_offset + + f.seek(rpath_offset) + f.write(new_rpath_string) + return True + + except ElfParsingError: + # This just means the file wasnt an elf file, so there's no point + # in updating its rpath anyways; ignore this problem. + return False + + +class ElfDynamicSectionUpdateFailed(Exception): + def __init__(self, old, new): + self.old = old + self.new = new + super(ElfDynamicSectionUpdateFailed, self).__init__( + "New rpath {} is longer than old rpath {}".format( + new.decode("utf-8"), + old.decode("utf-8"), + ) + ) + + +class ElfParsingError(Exception): + pass diff --git a/lib/spack/spack/util/executable.py b/lib/spack/spack/util/executable.py index cd8ddef6de6..6160b952662 100644 --- a/lib/spack/spack/util/executable.py +++ b/lib/spack/spack/util/executable.py @@ -10,6 +10,7 @@ import sys from six import string_types, text_type +from six.moves import shlex_quote import llnl.util.tty as tty @@ -333,7 +334,7 @@ def which(*args, **kwargs): Executable: The first executable that is found in the path """ exe = which_string(*args, **kwargs) - return Executable(exe) if exe else None + return Executable(shlex_quote(exe)) if exe else None class ProcessError(spack.error.SpackError): diff --git a/lib/spack/spack/util/gpg.py b/lib/spack/spack/util/gpg.py index 3f0d74f4b71..ffee274c3fc 100644 --- a/lib/spack/spack/util/gpg.py +++ b/lib/spack/spack/util/gpg.py @@ -239,7 +239,7 @@ def trust(keyfile): keys = _get_unimported_public_keys(output) # Import them - GPG("--import", keyfile) + GPG("--batch", "--import", keyfile) # Set trust to ultimate key_to_fpr = dict(public_keys_to_fingerprint()) @@ -285,7 +285,7 @@ def sign(key, file, output, clearsign=False): signature, if False creates a detached signature """ signopt = "--clearsign" if clearsign else "--detach-sign" - GPG(signopt, "--armor", "--default-key", key, "--output", output, file) + GPG(signopt, "--armor", "--local-user", key, "--output", output, file) @_autoinit diff --git a/lib/spack/spack/util/package_hash.py b/lib/spack/spack/util/package_hash.py index 4877748338c..f4435fbba46 100644 --- a/lib/spack/spack/util/package_hash.py +++ b/lib/spack/spack/util/package_hash.py @@ -64,7 +64,7 @@ def __init__(self, spec): # list of URL attributes and metadata attributes # these will be removed from packages. self.metadata_attrs = [s.url_attr for s in spack.fetch_strategy.all_strategies] - self.metadata_attrs += spack.package_base.Package.metadata_attrs + self.metadata_attrs += spack.package_base.PackageBase.metadata_attrs self.spec = spec self.in_classdef = False # used to avoid nested classdefs @@ -158,6 +158,7 @@ def __init__(self, spec): def visit_FunctionDef(self, func): conditions = [] + for dec in func.decorator_list: if isinstance(dec, ast.Call) and dec.func.id == "when": try: diff --git a/lib/spack/spack/util/path.py b/lib/spack/spack/util/path.py index 57f70e18570..690fd87ba76 100644 --- a/lib/spack/spack/util/path.py +++ b/lib/spack/spack/util/path.py @@ -27,16 +27,49 @@ __all__ = ["substitute_config_variables", "substitute_path_variables", "canonicalize_path"] +def architecture(): + # break circular import + import spack.platforms + import spack.spec + + host_platform = spack.platforms.host() + host_os = host_platform.operating_system("default_os") + host_target = host_platform.target("default_target") + + return spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target))) + + +def get_user(): + # User pwd where available because it accounts for effective uids when using ksu and similar + try: + # user pwd for unix systems + import pwd + + return pwd.getpwuid(os.geteuid()).pw_name + except ImportError: + # fallback on getpass + return getpass.getuser() + + # Substitutions to perform def replacements(): # break circular import from spack.util.executable import spack.paths + arch = architecture() + return { "spack": spack.paths.prefix, - "user": getpass.getuser(), + "user": get_user(), "tempdir": tempfile.gettempdir(), "user_cache_path": spack.paths.user_cache_path, + "architecture": str(arch), + "arch": str(arch), + "platform": str(arch.platform), + "operating_system": str(arch.os), + "os": str(arch.os), + "target": str(arch.target), + "target_family": str(arch.target.microarchitecture.family), } @@ -245,6 +278,13 @@ def substitute_config_variables(path, allow_env=True): - $tempdir Default temporary directory returned by tempfile.gettempdir() - $user The current user's username - $user_cache_path The user cache directory (~/.spack, unless overridden) + - $architecture The spack architecture triple for the current system + - $arch The spack architecture triple for the current system + - $platform The spack platform for the current system + - $os The OS of the current system + - $operating_system The OS of the current system + - $target The ISA target detected for the system + - $target_family The family of the target detected for the system These are substituted case-insensitively into the path, and users can use either ``$var`` or ``${var}`` syntax for the variables. $env is only diff --git a/lib/spack/spack/util/unparse/unparser.py b/lib/spack/spack/util/unparse/unparser.py index a46d19fa76b..c204aea25a4 100644 --- a/lib/spack/spack/util/unparse/unparser.py +++ b/lib/spack/spack/util/unparse/unparser.py @@ -1243,3 +1243,95 @@ def visit_withitem(self, node): if node.optional_vars: self.write(" as ") self.dispatch(node.optional_vars) + + def visit_Match(self, node): + self.fill("match ") + self.dispatch(node.subject) + with self.block(): + for case in node.cases: + self.dispatch(case) + + def visit_match_case(self, node): + self.fill("case ") + self.dispatch(node.pattern) + if node.guard: + self.write(" if ") + self.dispatch(node.guard) + with self.block(): + self.dispatch(node.body) + + def visit_MatchValue(self, node): + self.dispatch(node.value) + + def visit_MatchSingleton(self, node): + self._write_constant(node.value) + + def visit_MatchSequence(self, node): + with self.delimit("[", "]"): + interleave(lambda: self.write(", "), self.dispatch, node.patterns) + + def visit_MatchStar(self, node): + name = node.name + if name is None: + name = "_" + self.write("*{}".format(name)) + + def visit_MatchMapping(self, node): + def write_key_pattern_pair(pair): + k, p = pair + self.dispatch(k) + self.write(": ") + self.dispatch(p) + + with self.delimit("{", "}"): + keys = node.keys + interleave( + lambda: self.write(", "), + write_key_pattern_pair, + zip(keys, node.patterns), + ) + rest = node.rest + if rest is not None: + if keys: + self.write(", ") + self.write("**{}".format(rest)) + + def visit_MatchClass(self, node): + self.set_precedence(_Precedence.ATOM, node.cls) + self.dispatch(node.cls) + with self.delimit("(", ")"): + patterns = node.patterns + interleave(lambda: self.write(", "), self.dispatch, patterns) + attrs = node.kwd_attrs + if attrs: + + def write_attr_pattern(pair): + attr, pattern = pair + self.write("{}=".format(attr)) + self.dispatch(pattern) + + if patterns: + self.write(", ") + interleave( + lambda: self.write(", "), + write_attr_pattern, + zip(attrs, node.kwd_patterns), + ) + + def visit_MatchAs(self, node): + name = node.name + pattern = node.pattern + if name is None: + self.write("_") + elif pattern is None: + self.write(node.name) + else: + with self.require_parens(_Precedence.TEST, node): + self.set_precedence(_Precedence.BOR, node.pattern) + self.dispatch(node.pattern) + self.write(" as {}".format(node.name)) + + def visit_MatchOr(self, node): + with self.require_parens(_Precedence.BOR, node): + self.set_precedence(pnext(_Precedence.BOR), *node.patterns) + interleave(lambda: self.write(" | "), self.dispatch, node.patterns) diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py index fa4119f9173..939ec669c02 100644 --- a/lib/spack/spack/util/web.py +++ b/lib/spack/spack/util/web.py @@ -784,7 +784,7 @@ def find_versions_of_archive( list_depth (int): max depth to follow links on list_url pages. Defaults to 0. concurrency (int): maximum number of concurrent requests - reference_package (spack.package_base.Package or None): a spack package + reference_package (spack.package_base.PackageBase or None): a spack package used as a reference for url detection. Uses the url_for_version method on the package to produce reference urls which, if found, are preferred. diff --git a/lib/spack/spack/variant.py b/lib/spack/spack/variant.py index 864ea264467..cfb43cfc708 100644 --- a/lib/spack/spack/variant.py +++ b/lib/spack/spack/variant.py @@ -96,7 +96,7 @@ def validate_or_raise(self, vspec, pkg_cls=None): Args: vspec (Variant): instance to be validated - pkg_cls (spack.package_base.Package): the package class + pkg_cls (spack.package_base.PackageBase): the package class that required the validation, if available Raises: @@ -245,8 +245,9 @@ class AbstractVariant(object): values. """ - def __init__(self, name, value): + def __init__(self, name, value, propagate=False): self.name = name + self.propagate = propagate # Stores 'value' after a bit of massaging # done by the property setter @@ -334,7 +335,7 @@ def copy(self): >>> assert a == b >>> assert a is not b """ - return type(self)(self.name, self._original_value) + return type(self)(self.name, self._original_value, self.propagate) @implicit_variant_conversion def satisfies(self, other): @@ -401,6 +402,8 @@ def __repr__(self): return "{0.__name__}({1}, {2})".format(cls, repr(self.name), repr(self._original_value)) def __str__(self): + if self.propagate: + return "{0}=={1}".format(self.name, ",".join(str(x) for x in self.value)) return "{0}={1}".format(self.name, ",".join(str(x) for x in self.value)) @@ -444,6 +447,9 @@ def __str__(self): values_str = ",".join(x[:7] for x in self.value) else: values_str = ",".join(str(x) for x in self.value) + + if self.propagate: + return "{0}=={1}".format(self.name, values_str) return "{0}={1}".format(self.name, values_str) @@ -460,6 +466,8 @@ def _value_setter(self, value): self._value = str(self._value[0]) def __str__(self): + if self.propagate: + return "{0}=={1}".format(self.name, self.value) return "{0}={1}".format(self.name, self.value) @implicit_variant_conversion @@ -523,6 +531,8 @@ def __contains__(self, item): return item is self.value def __str__(self): + if self.propagate: + return "{0}{1}".format("++" if self.value else "~~", self.name) return "{0}{1}".format("+" if self.value else "~", self.name) diff --git a/lib/spack/spack_installable/__init__.py b/lib/spack/spack_installable/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/lib/spack/spack_installable/main.py b/lib/spack/spack_installable/main.py new file mode 100644 index 00000000000..4a4001b9999 --- /dev/null +++ b/lib/spack/spack_installable/main.py @@ -0,0 +1,59 @@ +import os +import sys +from os.path import dirname as dn + + +def main(argv=None): + # Find spack's location and its prefix. + this_file = os.path.realpath(os.path.expanduser(__file__)) + spack_prefix = dn(dn(dn(dn(this_file)))) + + # Allow spack libs to be imported in our scripts + spack_lib_path = os.path.join(spack_prefix, "lib", "spack") + sys.path.insert(0, spack_lib_path) + + # Add external libs + spack_external_libs = os.path.join(spack_lib_path, "external") + + if sys.version_info[:2] <= (2, 7): + sys.path.insert(0, os.path.join(spack_external_libs, "py2")) + + sys.path.insert(0, spack_external_libs) + # Here we delete ruamel.yaml in case it has been already imported from site + # (see #9206 for a broader description of the issue). + # + # Briefly: ruamel.yaml produces a .pth file when installed with pip that + # makes the site installed package the preferred one, even though sys.path + # is modified to point to another version of ruamel.yaml. + if "ruamel.yaml" in sys.modules: + del sys.modules["ruamel.yaml"] + + if "ruamel" in sys.modules: + del sys.modules["ruamel"] + + # The following code is here to avoid failures when updating + # the develop version, due to spurious argparse.pyc files remaining + # in the libs/spack/external directory, see: + # https://github.com/spack/spack/pull/25376 + # TODO: Remove in v0.18.0 or later + try: + import argparse # noqa: F401 + except ImportError: + argparse_pyc = os.path.join(spack_external_libs, "argparse.pyc") + if not os.path.exists(argparse_pyc): + raise + try: + os.remove(argparse_pyc) + import argparse # noqa: F401 + except Exception: + msg = ( + "The file\n\n\t{0}\n\nis corrupted and cannot be deleted by Spack. " + "Either delete it manually or ask some administrator to " + "delete it for you." + ) + print(msg.format(argparse_pyc)) + sys.exit(1) + + import spack.main # noqa: E402 + + sys.exit(spack.main.main(argv)) diff --git a/pyproject.toml b/pyproject.toml index f5fed2df4b1..30b621dec47 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,74 @@ +[project] +name="spack" +description="The spack package manager" +dependencies=[ + "clingo", + "setuptools", + "six", + "types-six", +] +dynamic = ["version"] + +[project.scripts] +spack = "lib.spack.spack_installable.main:main" + +[tool.hatch.version] +path = "lib/spack/spack/__init__.py" + +[project.optional-dependencies] +dev = [ + "pip>=21.3", + "pytest", + "pytest-xdist", + "setuptools", + "click==8.0.2", + 'black==21.12b0', + "mypy", + "isort", + "flake8", + "vermin", +] +ci = [ + "pytest-cov", + "codecov[toml]", +] + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +include = [ + "/bin", + "/etc", + "/lib", + "/share", + "/var", + "CITATION.cff", + "COPYRIGHT", + "LICENSE-APACHE", + "LICENSE-MIT", + "NOTICE", + "README.md", + "SECURITY.md", +] + +[tool.hatch.envs.default] +features = [ + "dev", +] + +[tool.hatch.envs.default.scripts] +spack = "./bin/spack" +style = "./bin/spack style" +test = "./bin/spack unit-test" + +[tool.hatch.envs.ci] +features = [ + "dev", + "ci", +] + [tool.black] line-length = 99 target-version = ['py27', 'py35', 'py36', 'py37', 'py38', 'py39', 'py310'] diff --git a/share/spack/bootstrap/github-actions-v0.1/clingo.json b/share/spack/bootstrap/github-actions-v0.1/clingo.json deleted file mode 100644 index 3068ad1f191..00000000000 --- a/share/spack/bootstrap/github-actions-v0.1/clingo.json +++ /dev/null @@ -1,257 +0,0 @@ -{ - "verified": [ - { - "binaries": [ - [ - "clingo-bootstrap", - "m4ertsh3ooxdisc5tigglublasu4udfe", - "094548672362306d75012398a6f9b1e8c0f796c833163ca77cf644d84822f25f" - ] - ], - "python": "python@3.5", - "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "iv4gr5vscx2io23ljgdueybwatxlk6wo", - "c8110c68ec339d05155392818b21ba87b27905ad798f5f3f194d6312385dbdc2" - ] - ], - "python": "python@3.6", - "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "rwxcxsohrkp5iai3yushsltkdprjmexb", - "fbee764cac890a29bc03c472d3ba0401e915d6924a7cedac9fd8d961159b70e7" - ] - ], - "python": "python@3.7", - "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "p5on7i4hejl775ezndzfdkhvwra3hatn", - "35e32f7c1f80e99da450b52643800fd2895ee2f895109f708b5cf0da6afbedff" - ] - ], - "python": "python@3.8", - "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "omsvlh5v6fi2saw5qyqvzsbvqpvrf5yw", - "cef0e554737dbf22655094d8ae072c67539cce2a37cba1577aeb5aea18b5747c" - ] - ], - "python": "python@3.9", - "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "qguh44xegb72y4q4lar3ufjddissoumv", - "68d2d0c06690d75a794aa2c50be9d6d501fec1b566784bf87b1fc5611f84f3c9" - ] - ], - "python": "python@3.5", - "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "tsypkz7hyylmh5pwpykaf7wcmdunrdiv", - "3b8e3e6e21e399a90c4128776cc591734f9d533f0a7e64ed8babd9cbcf616e3d" - ] - ], - "python": "python@3.6", - "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "2w6b4q5j2zgra6t3zfrjfbvsnoi5yqmk", - "4f335e02641f6ecc7ec7d9d2b8293d07d6e7a7234034531713b760aaa507fa7c" - ] - ], - "python": "python@3.7", - "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "ij7udwpgeghdfoswljwdtwwaylvudze7", - "b5eedf593f198e523aebf6ce127fd8ffcf3d1c980a920cdf1c5a0d2a335e4892" - ] - ], - "python": "python@3.8", - "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "a5ppijpzmk7ubjem4i3zttbxp545vjuz", - "8f9755c16c0c99b5c40c420f2c1c6aec2bdff99b25444e001506527af18dd94e" - ] - ], - "python": "python@3.9", - "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "tsnva2bxjguosntz3tk5mqbdgrjvhfcc", - "bcd093c08110309e705beebccd012260a61215eda12c1d47f3a89d4734ec7170" - ] - ], - "python": "python@3.5", - "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "rfu4la457mdbpoffk2g5hikj2hhoek4s", - "b6417b9b90f3f4e98caaa869393edee08fad3d3c7db37fad8b332c785d0e81e6" - ] - ], - "python": "python@3.6", - "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "qg3utj2csbgtdwc2slqbjmwn25rkor5r", - "a1760e064d41d364cdf53f89248f7824dad9bf97c6b999df18343b57d21c06ed" - ] - ], - "python": "python@3.7", - "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "uxkpc2euofdkgveonftklstnxyha5wsf", - "d6a04e7b15dae05eacce54806fa18356b392a5e2d212a55bf0960116b8e9dfef" - ] - ], - "python": "python@3.8", - "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "i2uoawqsmmkr2uwb42oxfsnt4djzejrj", - "6511837f87e50c40a7f4aab2ec7454e5200594821e4d1fc4a441d3be647b9acb" - ] - ], - "python": "python@3.9", - "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "p3bqplnwqo66lvvolmtaezck77jafrc4", - "436f9483e4028c12c32ba9f4e7e91e944bf9819ef487dfe4e42ddd1d487c93ee" - ] - ], - "python": "python@2.6", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "rpa6yyujr7ilnfcowq2pbmkmh7uzrijp", - "3be0f4ccd412d45506f9f305ef7f6621cd246fbde97aed081595d01dafe3c397" - ] - ], - "python": "python@2.7+ucs4", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "y32mbn7swer3yxvgf6tmkgekpo23uo5z", - "f6e0716bd97f2df123abcd96ec8884c525a9fd10b81e0062784e7b0d2df3f622" - ] - ], - "python": "python@2.7~ucs4", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "ohtmyp5c74xt75csx4llbjs5anobryp6", - "7d613ddbca1640d761311fb00403c0cb65e279534c44a2129b8d9610f6146e78" - ] - ], - "python": "python@3.5", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "vcipwnf57slgoo7busvvkzjkk7vydeb5", - "db5222760045f20ad1e5c194179d31273b8e4bfa6ade38e15cd3182d685cc05b" - ] - ], - "python": "python@3.6", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "ffoyoewfd6pdwbjniodfkqusyvkrbhyi", - "14cea5f6cfd86bcb8de38ad8c1a5e44cc22955de2e7c78b825b617dccd107dbe" - ] - ], - "python": "python@3.7", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "kt74l7kjzrlp3cgtj2576o33mhsrgyrw", - "e71de4beb68bb3e58bd2dcb98dc3be3a375c82781b6f7cb01bc5d552c2240bd2" - ] - ], - "python": "python@3.8", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "hmnv6gk5wha64k6r3s7hid35mzvhkuot", - "b08ff59357fa184ce39b8cc0a17aaf7f0a925a449ab389e1afa4eab6ae026f2e" - ] - ], - "python": "python@3.9", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - } - ] -} \ No newline at end of file diff --git a/share/spack/bootstrap/github-actions-v0.1/gnupg.json b/share/spack/bootstrap/github-actions-v0.1/gnupg.json deleted file mode 100644 index d6400febfce..00000000000 --- a/share/spack/bootstrap/github-actions-v0.1/gnupg.json +++ /dev/null @@ -1,204 +0,0 @@ -{ - "verified": [ - { - "binaries": [ - [ - "libgpg-error", - "hph66gvb7vlinpzwoytxq27ojb7gtl2j", - "f040f513e212c428ee863d75924331f58b5f8f3946f98c757a9e0af0d0a34f63" - ], - [ - "libiconv", - "ckpif6rcf7mxdmceyv6sqvtnwfqi7fmc", - "1d745d04f7c8a1c4d17d9735eba0ee88c8acfbb213c22a15e45e58637867ed4c" - ], - [ - "npth", - "fjuoy73whriauk3bt6ma5fwet6iric7y", - "78d5d9e339ef901b0def0924a72ce87a93e0a8acb7394ec2c35be6c328154444" - ], - [ - "zlib", - "qo6otxqnn6mxpw4zhqc4wdwqmgcfjdfe", - "f00c38ecaf316cd665399ed14c233f839ae5868387ff04998e1ec949c1f4dcd6" - ], - [ - "libassuan", - "2upi74qccouj4k6d7wultp2u5fntayi3", - "f2118b102f9a94bb1e2804492689b44b260b7f6e46ac1208d5110ebffe24bf99" - ], - [ - "libgcrypt", - "xzhvvm44cfxwvgqgkpbeucpnl4dbj4p2", - "ae717e068f2f7f4eaeee4bdec4a6b20ff299c59c3d724c1661b6045fda628a9b" - ], - [ - "libksba", - "aodyg5mzfule3vimuetmzulv5mzdx37g", - "c665eb20f27b2d28fcb634fe958829165e44a27b1ad69995d5040f13d5693d52" - ], - [ - "pinentry", - "ihqcvdm5okxuvnln463l7h4flbkhrp44", - "b0c7781354eb4a7c9e979802590c0e4fb7eb53f440191367f0142eac4868f8d6" - ], - [ - "gnupg", - "47vilwybwuxved7jov7esiad3qlkv5rp", - "83f3de13b2712a05f520d16b5141797493f8b117041dd32aa5414a25d9d53439" - ] - ], - "spec": "gnupg@2.3: %apple-clang platform=darwin target=x86_64" - }, - { - "binaries": [ - [ - "libgpg-error", - "3dkguooajaaejhsebigs2e3lhk37mtem", - "09c5edd93fb479961d62d9938c1ea02b8f443babf0e79776f1539085c3422cd5" - ], - [ - "libiconv", - "i2eqtudh3zcxt5fvxuhe6n2ztuqbadtp", - "838786e029474d668b5f83a9669d227c812253c3c3f0328aa4f888542a7de830" - ], - [ - "npth", - "c3z6gg3ilutvvryxkjrboampqv5u5s2s", - "967522ae988ccce8c922f28aa2124170246f501f0a45179b919d827bf28c35d2" - ], - [ - "zlib", - "p2jozvok56voja7652dms4gvthpcjzta", - "41cbc69850640ed4466dbedc1bb4ccb0ade0c1a1e8fcd70d1e247b1387b937b5" - ], - [ - "libassuan", - "s2wx2xvt3iz3sigcdt5tvppj2m7e2bsf", - "5f766af4ff355769e3e456a9c95636a20a64f5ba32aecec633216a3d83a854f8" - ], - [ - "libgcrypt", - "gznmtryix6ck4x3chnuvbctz4xa3fmxl", - "0261b03f790c5320980d27bf0a471a1a4663929689ddfaeb5e568d33be8dc889" - ], - [ - "libksba", - "uxaryyfybbcw563jcwumufhfmbsawlbz", - "f45fff7a6a5c626a1474c7354fd00e18e629fcd086787336f7d62d1ead50c94f" - ], - [ - "pinentry", - "ias6sb4qi24u6i7axr5hkj4liq5dtr6l", - "a2a8e7652dceb7d080ff78726d28099f9050cb9f6e290d97f1f59f4b42521b9c" - ], - [ - "gnupg", - "qpm457bujhmfqy66euzhswokumuvufbz", - "d2371e26412e10fc702b9b2482aff776108194b84e1927644a3d64f5710fd163" - ] - ], - "spec": "gnupg@2.3: %gcc platform=linux target=aarch64" - }, - { - "binaries": [ - [ - "libgpg-error", - "4bp6dcfdbzbd4uuzvbgjyqunhjedg3lf", - "9a9947240c6af7e915aa8336bfaed8706c9129967eb9ab1895598217df91f301" - ], - [ - "libiconv", - "dscneqtpyy32r6ds24izlkki3euthnbr", - "a9dc099f6c7ee9fd6c63757cb81a59fe4b81672543d5253a50bb73bc819440ef" - ], - [ - "npth", - "jukmafxhkxo2s4udlzi5r5b6bbb3udw5", - "d2a2b11c0f1794ab0de813753221bde073508fbec19f0b15dbfd23455bc6de87" - ], - [ - "zlib", - "amqfrcbn67rochzkeu2ashklo35ayqqq", - "686fc10058d208530889bc5c3779aa2cc324b77301878a5405cf65ca53d613ba" - ], - [ - "libassuan", - "lyeih2j3miy7yugmwh37h667fogqn3fl", - "f87c474d81c890232cd8e1e4d93b5b232aa0ad428dcaa7231d7a8d182cea9ecc" - ], - [ - "libgcrypt", - "zb33zulvwcansfzu5km4d34avujnazfa", - "e67ae6a5345f9e864bd2009c1a9d7eb65a63ca2841368bebc477a770fb8dcaf5" - ], - [ - "libksba", - "yjuh2aplj23qyvaqixukd7a6eokfdgyp", - "6944fc047e8f0eb41daec734e2505016369319c64929f5ec8d3a8f99e01928d4" - ], - [ - "pinentry", - "xd7vajghgcueohv5qgahdvbjpcnrurns", - "a6b37efd6ef9f9374aa0c7d1869da990ae3668938b47ad6af50138d2ea05da02" - ], - [ - "gnupg", - "ti2ddl27nilobj2ctwsgzl52qque5o7z", - "43781437e3dfae158e7a6911313a4162d8ffa5313182438d1e6547a854f6f38a" - ] - ], - "spec": "gnupg@2.3: %gcc platform=linux target=ppc64le" - }, - { - "binaries": [ - [ - "libgpg-error", - "p7chd5hhecdkc27npia4uaoeabjit4gh", - "4b5e1f418b7afdd91755d54d38a51d5d048aa3b1e5239bcaf3453c8ca1cca4b6" - ], - [ - "libiconv", - "scpkgy6bmk3wcgfwzoiv7hw74drmnaoi", - "2bcb9a2868c20284ce65ab53d4f7bb4c7edccd4c14390460858e25a8bc48faa3" - ], - [ - "npth", - "6vh3jypaf7u2zez3vohn66fvo6znt35l", - "23a333c4e83910eb1f87c91caffb07f40b592561a4c44924fed9459751c017f7" - ], - [ - "zlib", - "uc25tb5r57nykfrxszsdy54trzqnk2jn", - "9e18c1146bc3dcb8454d18502013b8621ecf00d2f2d4d66d76cbe1e07f351ac8" - ], - [ - "libassuan", - "vdoskg5mldu6ixhvftwplp4zdftwxwws", - "1413b84af0c58127032e7bde86dbacf35dc65205aee1c2071718678bc57ce793" - ], - [ - "libgcrypt", - "ng7gfusjpnypmqgckq7rp4vq3bvylp3b", - "1a09e97eb2333812f8381d4737aca4d7cfd9f27ebae30eddbcf99f399ad67fec" - ], - [ - "libksba", - "p4feho36xa7dhabk766fzglwyo2dfbj6", - "000ef0f2ad3aa05c07272849be68e059ec60946970ab8875a824305afe832c9a" - ], - [ - "pinentry", - "m423kpm7k52r66q3sdctqcjxtekiyrrp", - "5739bee66271d7f0d5b9bcf5c248f1a434e9cdcb327a4a5a22fc47f565ac0de7" - ], - [ - "gnupg", - "dlapzqxrwduafgfq2evptizb7p4kgpkh", - "262177fa8f66468e589f8b3e10d17531f17a74ea0f5ac6905ac948198dca3c3c" - ] - ], - "spec": "gnupg@2.3: %gcc platform=linux target=x86_64" - } - ] -} \ No newline at end of file diff --git a/share/spack/bootstrap/github-actions-v0.2/clingo.json b/share/spack/bootstrap/github-actions-v0.2/clingo.json deleted file mode 100644 index 60e771221df..00000000000 --- a/share/spack/bootstrap/github-actions-v0.2/clingo.json +++ /dev/null @@ -1,268 +0,0 @@ -{ - "verified": [ - { - "binaries": [ - [ - "clingo-bootstrap", - "i5rx6vbyw7cyg3snajcpnuozo7l3lcab", - "c55d1c76adb82ac9fbe67725641ef7e4fe1ae11e2e8da0dc93a3efe362549127" - ] - ], - "python": "python@3.10", - "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "xoxkdgo3n332ewhbh7pz2zuevrjxkrke", - "b50e2fba026e85af3f99b3c412b4f0c88ec2fbce15b48eeb75072f1d3737f3cc" - ] - ], - "python": "python@3.5", - "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "sgmirxbu3bpn4rdpfs6jlyycfrkfxl5i", - "b0a574df6f5d59491a685a31a8ed99fb345c850a91df62ef232fbe0cca716ed1" - ] - ], - "python": "python@3.6", - "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "5hn7hszlizeqq3leqi6lrdmyy5ssv6zs", - "36e24bc3bd27b125fdeb30d51d2554e44288877c0ce6df5a878bb4e8a1d5847a" - ] - ], - "python": "python@3.7", - "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "qk3ecxakadq4naakng6mhdfkwauef3dn", - "9d974c0d2b546d18f0ec35e08d5ba114bf2867f7ff7c7ea990b79d019ece6380" - ] - ], - "python": "python@3.8", - "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "2omdsvzshkn2u3l5vwvwoey4es5cowfu", - "cbf72eb932ac847f87b1640f8e70e26f5261967288f7d6db19206ef352e36a88" - ] - ], - "python": "python@3.9", - "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "ifgzrctoh2ibrmitp6ushrvrnaeqtkr7", - "1c609df7351286fe09aa3452fa7ed7fedf903e9fa12cde89b916a0fc4c022949" - ] - ], - "python": "python@3.10", - "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "esfzjhodgh5be22hvh3trg2ojzrmhzwt", - "8d070cdb2a5103cde3e6f873b1eb11d25f60464f3059d8643f943e5c9a9ec76c" - ] - ], - "python": "python@3.6", - "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "5b4uhkhrvtvdmsnctjx2isrxciy6v2o2", - "336b8b1202a8a28a0e34a98e5780ae0e2b2370b342ce67434551009b1a7c8db9" - ] - ], - "python": "python@3.7", - "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "czapgrrey6llnsu2m4qaamv3so2lybxm", - "16bdfe4b08ee8da38f3e2c7d5cc44a38d87696cc2b6de0971a4de25efb8ad8e4" - ] - ], - "python": "python@3.8", - "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "7za6vsetahbghs4d2qe4ajtf2iyiacwx", - "730ae7e6096ec8b83a0fc9464dda62bd6c2fec1f8565bb291f4d1ffe7746703b" - ] - ], - "python": "python@3.9", - "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "zulnxrmchldtasffqw6qacmgg4y2qumj", - "8988325db53c0c650f64372c21571ac85e9ba4577975d14ae7dba8ab7728b5fc" - ] - ], - "python": "python@3.10", - "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "lx54ebqzwtjpfgch7kagoxkmul56z7fa", - "81d64229299e76f9dc81f88d286bc94725e7cbcbb29ad0d66aaeaff73dd6473a" - ] - ], - "python": "python@3.6", - "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "isu2rjoicl4xzmbl3k2c4bg35gvejkgz", - "fcc4b052832cfd327d11f657c2b7715d981b0894ed03bbce18b23a842c7d706d" - ] - ], - "python": "python@3.7", - "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "ob3k3g2wjy7cw33lfobjar44sqmojyth", - "f51fd6256bfd3afc8470614d87df61e5c9dd582fcc70f707ca66ba2b7255da12" - ] - ], - "python": "python@3.8", - "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "norpsmparkl5dfuzdqj4537o77vjbgsl", - "477c041857b60f29ff9d6c7d2982b7eb49a2e02ebbc98af11488c32e2fb24081" - ] - ], - "python": "python@3.9", - "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "gypv5loj2ml73duq6sr76yg5rj25te2m", - "c855d7d32aadec37c41e51f19b83558b32bc0b946a9565dba0e659c6820bd6c3" - ] - ], - "python": "python@2.7+ucs4", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "rjopyx7hum3hqhgsdyw3st7frdfgrv3p", - "0e555f9bc99b4e4152939b30b2257f4f353941d152659e716bf6123c0ce11a60" - ] - ], - "python": "python@2.7~ucs4", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "2l45t4kw3cqqwj6vbxhfwhzlo6b3q2p4", - "6cb90de5a3d123b7408cfef693a9a78bb69c66abbfed746c1e85aa0acb848d03" - ] - ], - "python": "python@3.10", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "4psiezojm7dexequtbnav77wvgcajigq", - "b3fc33b5482357613294becb54968bd74de638abeae69e27c6c4319046a7e352" - ] - ], - "python": "python@3.5", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "dzhvhynye4z7oalowdcy5zt25lej3m2n", - "61c5f3e80bcc7acfc65e335f1910762df2cc5ded9d7e1e5977380a24de553dd7" - ] - ], - "python": "python@3.6", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "dtwevigmwgke4g6ee5byktpmzmrp2kvx", - "636937244b58611ec2eedb4422a1076fcaf09f3998593befb5a6ff1a74e1d5f7" - ] - ], - "python": "python@3.7", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "shqedxgvjnhiwdcdrvjhbd73jaevv7wt", - "b3615b2a94a8a15fddaa74cf4d9f9b3a516467a843cdeab597f72dcf6be5e31d" - ] - ], - "python": "python@3.8", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "z6v6zvc6awioeompbvo735b4flr3yuyz", - "1389192bd74c1f7059d95c4a41500201cbc2905cbba553678613e0b7e3b96c71" - ] - ], - "python": "python@3.9", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - } - ] -} \ No newline at end of file diff --git a/share/spack/bootstrap/github-actions-v0.2/gnupg.json b/share/spack/bootstrap/github-actions-v0.2/gnupg.json deleted file mode 100644 index 2f568297892..00000000000 --- a/share/spack/bootstrap/github-actions-v0.2/gnupg.json +++ /dev/null @@ -1,204 +0,0 @@ -{ - "verified": [ - { - "binaries": [ - [ - "libiconv", - "d6dhoguolmllbzy2h6pnvjm3tti6uy6f", - "7fe765a87945991d4e57782ed67c4bf42a10f95582eecd6f57de80a545bde821" - ], - [ - "npth", - "x6fb7zx6n7mos5knvi6wlnaadd7r2szx", - "fd1e5a62107339f45219c32ba20b5e82aa0880c31ac86d1b245d388ca4546990" - ], - [ - "zlib", - "c5wm3jilx6zsers3sfgdisjqusoza4wr", - "7500a717c62736872aa65df4599f797ef67b21086dd6236b4c7712cfffac9bf3" - ], - [ - "libassuan", - "3qv4bprobfwes37clg764cfipdzjdbto", - "d85cd9d2c63a296300d4dcbd667421956df241109daef5e12d3ca63fa241cb14" - ], - [ - "libgcrypt", - "3y4ubdgxvgpvhxr3bk4l5mkw4gv42n7e", - "9dad7c2635344957c4db68378964d3af84ea052d45dbe8ded9a6e6e47211daa8" - ], - [ - "libgpg-error", - "doido34kfwsvwpj4c4jcocahjb5ltebw", - "20e5c238bee91d2a841f0b4bd0358ded59a0bd665d7f251fd9cd42f83e0b283b" - ], - [ - "libksba", - "mttecm7gzdv544lbzcoahchnboxysrvi", - "1c0ae64e828a597e4cf15dd997c66cd677e41f68c63db09b9551480a197052a2" - ], - [ - "pinentry", - "se7xgv7yf4ywpjnbv7voxgeuuvs77ahb", - "2fd13fbee7ca2361dc5dd09708c72d0489611301b60635cb0206bc5b94add884" - ], - [ - "gnupg", - "yannph34bpaqkhsv5mz2icwhy3epiqxd", - "1de8b4e119fa3455d0170466fa0fb8e04957fab740aec32535b4667279312b3f" - ] - ], - "spec": "gnupg@2.3: %apple-clang platform=darwin target=x86_64" - }, - { - "binaries": [ - [ - "zlib", - "t2hjzsyf3txkg64e4bq3nihe26rzzdws", - "171e720840a28af50b62141be77bc525e666cffd1fbbe2ee62673214e8b0280f" - ], - [ - "libiconv", - "yjdji2wj4njz72fyrg46jlz5f5wfbhfr", - "94c773c3d0294cf248ec1f3e9862669dfa743fe1a76de580d9425c14c8f7dcd2" - ], - [ - "npth", - "kx3vzmpysee7jxwsudarthrmyop6hzgc", - "f8cc6204fa449ce576d450396ec2cad40a75d5712c1381a61ed1681a54f9c79f" - ], - [ - "libassuan", - "e5n5l5ftzwxs4ego5furrdbegphb6hxp", - "ef0428874aa81bcb9944deed88e1fc639f629fe3d522cab3c281235ae2a53db9" - ], - [ - "libgcrypt", - "wyncpahrpqsmpk4b7nlhg5ekkjzyjdzs", - "2309548c51a17f580f036445b701feb85d2bc552b9c4404418c2f223666cfe3b" - ], - [ - "libgpg-error", - "vhcdd6jkbiday2seg3rlkbzpf6jzfdx7", - "79dd719538d9223d6287c0bba07b981944ab6d3ab11e5060274f1b7c727daf55" - ], - [ - "libksba", - "azcgpgncynoox3dce45hkz46bp2tb5rr", - "15d301f201a5162234261fcfccd579b0ff484131444a0b6f5c0006224bb155d6" - ], - [ - "pinentry", - "e3z5ekbv4jlsie4qooubcfvsk2sb6t7l", - "5fd27b8e47934b06554e84f1374a90a93e71e60a14dbde672a8da414b27b97f4" - ], - [ - "gnupg", - "i5agfvsmzdokuooaqhlh6vro5giwei2t", - "f1bde7a1f0c84c1bbcde5757a96cf7a3e9157c2cfa9907fde799aa8e04c0d51f" - ] - ], - "spec": "gnupg@2.3: %gcc platform=linux target=aarch64" - }, - { - "binaries": [ - [ - "zlib", - "v5rr6ba37tudzfuv2jszwikgcl4wd3cd", - "371ad4b277af7b97c7871b9931f2764c97362620c7990c5ad8fdb5c42a1d30dc" - ], - [ - "libiconv", - "bvcnx2e4bumjcgya4dczdhjb3fhqyass", - "65a00b717b3a4ee1b5ab9f84163722bdfea8eb20a2eecc9cf657c0eaac0227e9" - ], - [ - "npth", - "dkb6ez6a4c3iyrv67llwf5mzmynqdmtj", - "4d77351661d0e0130b1c89fb6c6a944aee41d701ef80d056d3fc0178a7f36075" - ], - [ - "libassuan", - "tuydcxdbb5jfvw3gri7y24b233kgotgd", - "d8775e7c1dd252437c6fa0781675b1d2202cfc0c8190e60d248928b6fca8bc9f" - ], - [ - "libgcrypt", - "kgxmg4eukwx6nn3bdera3j7cf7hxfy6n", - "6046523f10ed54be50b0211c27191b3422886984fc0c00aed1a85d1f121c42e6" - ], - [ - "libgpg-error", - "ewhrwnltlrzkpqyix2vbkf4ruq6b6ea3", - "3f3bbbf1a3cb82d39313e39bcbe3dad94a176130fc0e9a8045417d6223fb4f31" - ], - [ - "libksba", - "onxt5ry2fotgwiognwmhxlgnekuvtviq", - "3a4df13f8b880441d1df4b234a4ca01de7601d84a6627185c2b3191a34445d40" - ], - [ - "pinentry", - "fm3m4rsszzxxakcpssd34jbbe4ihrhac", - "73afa46176a7ec8f02d01a2caad3e400dc18c3c8a53f92b88a9aa9e3653db3e6" - ], - [ - "gnupg", - "gwr65ovh4wbxjgniaoqlbt3yla6rdikj", - "7a3f7afe69ca67797a339c04028ca45a9630933020b57cb56e28453197fe8a57" - ] - ], - "spec": "gnupg@2.3: %gcc platform=linux target=ppc64le" - }, - { - "binaries": [ - [ - "libiconv", - "vec3ac6t4ag3lb7ycvisafthqmpci74b", - "35d184218e525d8aaea60082fd2d0f1e80449ec32746cceda2ea0ca106e9a095" - ], - [ - "npth", - "jx3kmy3ilc66rgg5mqtbed5z6qwt3vrd", - "74c2c1b087667661da3e24ac83bcecf1bc2d10d69e7678d1fd232875fe295135" - ], - [ - "zlib", - "wnpbp4pu7xca24goggcy773d2y4pobbd", - "bcbd5310e8c5e75cbf33d8155448b212486dc543469d6df7e56dcecb6112ee88" - ], - [ - "libassuan", - "ynn33wutdtoo2lbjjoizgslintxst2zl", - "ac3b060690c6da0c64dcf35da047b84cc81793118fb9ff29b993f3fb9efdc258" - ], - [ - "libgcrypt", - "zzofcjer43vsxwj27c3rxapjxhsz4hlx", - "4b1977d815f657c2d6af540ea4b4ce80838cadcf4ada72a8ba142a7441e571ea" - ], - [ - "libgpg-error", - "gzr2ucybgks5jquvf4lv7iprxq5vx5le", - "a12ecb5cfd083a29d042fd309ebb5ab8fd4ace0b68b27f89b857e9a84d75b5be" - ], - [ - "libksba", - "hw4u4pam6mp3henpw476axtqaahfdy64", - "5424caf98a2d48e0ed0b9134353c242328ebeef6d2b31808d58969165e809b47" - ], - [ - "pinentry", - "hffsjitsewdgoijwgzvub6vpjwm33ywr", - "8ed7504b5b2d13ab7e1f4a0e27a882c33c5a6ebfcb43c51269333c0d6d5e1448" - ], - [ - "gnupg", - "lge4h2kjgvssyspnvutq6t3q2xual5oc", - "6080ce00fcc24185e4051a30f6d52982f86f46eee6d8a2dc4d83ab08d8195be8" - ] - ], - "spec": "gnupg@2.3: %gcc platform=linux target=x86_64" - } - ] -} \ No newline at end of file diff --git a/share/spack/bootstrap/github-actions-v0.2/metadata.yaml b/share/spack/bootstrap/github-actions-v0.2/metadata.yaml deleted file mode 100644 index f786731aa8f..00000000000 --- a/share/spack/bootstrap/github-actions-v0.2/metadata.yaml +++ /dev/null @@ -1,8 +0,0 @@ -type: buildcache -description: | - Buildcache generated from a public workflow using Github Actions. - The sha256 checksum of binaries is checked before installation. -info: - url: https://mirror.spack.io/bootstrap/github-actions/v0.2 - homepage: https://github.com/spack/spack-bootstrap-mirrors - releases: https://github.com/spack/spack-bootstrap-mirrors/releases diff --git a/share/spack/bootstrap/github-actions-v0.3/clingo.json b/share/spack/bootstrap/github-actions-v0.3/clingo.json deleted file mode 120000 index 049ba5f7ce7..00000000000 --- a/share/spack/bootstrap/github-actions-v0.3/clingo.json +++ /dev/null @@ -1 +0,0 @@ -../github-actions-v0.2/clingo.json \ No newline at end of file diff --git a/share/spack/bootstrap/github-actions-v0.3/clingo.json b/share/spack/bootstrap/github-actions-v0.3/clingo.json new file mode 100644 index 00000000000..60e771221df --- /dev/null +++ b/share/spack/bootstrap/github-actions-v0.3/clingo.json @@ -0,0 +1,268 @@ +{ + "verified": [ + { + "binaries": [ + [ + "clingo-bootstrap", + "i5rx6vbyw7cyg3snajcpnuozo7l3lcab", + "c55d1c76adb82ac9fbe67725641ef7e4fe1ae11e2e8da0dc93a3efe362549127" + ] + ], + "python": "python@3.10", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "xoxkdgo3n332ewhbh7pz2zuevrjxkrke", + "b50e2fba026e85af3f99b3c412b4f0c88ec2fbce15b48eeb75072f1d3737f3cc" + ] + ], + "python": "python@3.5", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "sgmirxbu3bpn4rdpfs6jlyycfrkfxl5i", + "b0a574df6f5d59491a685a31a8ed99fb345c850a91df62ef232fbe0cca716ed1" + ] + ], + "python": "python@3.6", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "5hn7hszlizeqq3leqi6lrdmyy5ssv6zs", + "36e24bc3bd27b125fdeb30d51d2554e44288877c0ce6df5a878bb4e8a1d5847a" + ] + ], + "python": "python@3.7", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "qk3ecxakadq4naakng6mhdfkwauef3dn", + "9d974c0d2b546d18f0ec35e08d5ba114bf2867f7ff7c7ea990b79d019ece6380" + ] + ], + "python": "python@3.8", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "2omdsvzshkn2u3l5vwvwoey4es5cowfu", + "cbf72eb932ac847f87b1640f8e70e26f5261967288f7d6db19206ef352e36a88" + ] + ], + "python": "python@3.9", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "ifgzrctoh2ibrmitp6ushrvrnaeqtkr7", + "1c609df7351286fe09aa3452fa7ed7fedf903e9fa12cde89b916a0fc4c022949" + ] + ], + "python": "python@3.10", + "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "esfzjhodgh5be22hvh3trg2ojzrmhzwt", + "8d070cdb2a5103cde3e6f873b1eb11d25f60464f3059d8643f943e5c9a9ec76c" + ] + ], + "python": "python@3.6", + "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "5b4uhkhrvtvdmsnctjx2isrxciy6v2o2", + "336b8b1202a8a28a0e34a98e5780ae0e2b2370b342ce67434551009b1a7c8db9" + ] + ], + "python": "python@3.7", + "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "czapgrrey6llnsu2m4qaamv3so2lybxm", + "16bdfe4b08ee8da38f3e2c7d5cc44a38d87696cc2b6de0971a4de25efb8ad8e4" + ] + ], + "python": "python@3.8", + "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "7za6vsetahbghs4d2qe4ajtf2iyiacwx", + "730ae7e6096ec8b83a0fc9464dda62bd6c2fec1f8565bb291f4d1ffe7746703b" + ] + ], + "python": "python@3.9", + "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "zulnxrmchldtasffqw6qacmgg4y2qumj", + "8988325db53c0c650f64372c21571ac85e9ba4577975d14ae7dba8ab7728b5fc" + ] + ], + "python": "python@3.10", + "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "lx54ebqzwtjpfgch7kagoxkmul56z7fa", + "81d64229299e76f9dc81f88d286bc94725e7cbcbb29ad0d66aaeaff73dd6473a" + ] + ], + "python": "python@3.6", + "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "isu2rjoicl4xzmbl3k2c4bg35gvejkgz", + "fcc4b052832cfd327d11f657c2b7715d981b0894ed03bbce18b23a842c7d706d" + ] + ], + "python": "python@3.7", + "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "ob3k3g2wjy7cw33lfobjar44sqmojyth", + "f51fd6256bfd3afc8470614d87df61e5c9dd582fcc70f707ca66ba2b7255da12" + ] + ], + "python": "python@3.8", + "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "norpsmparkl5dfuzdqj4537o77vjbgsl", + "477c041857b60f29ff9d6c7d2982b7eb49a2e02ebbc98af11488c32e2fb24081" + ] + ], + "python": "python@3.9", + "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "gypv5loj2ml73duq6sr76yg5rj25te2m", + "c855d7d32aadec37c41e51f19b83558b32bc0b946a9565dba0e659c6820bd6c3" + ] + ], + "python": "python@2.7+ucs4", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "rjopyx7hum3hqhgsdyw3st7frdfgrv3p", + "0e555f9bc99b4e4152939b30b2257f4f353941d152659e716bf6123c0ce11a60" + ] + ], + "python": "python@2.7~ucs4", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "2l45t4kw3cqqwj6vbxhfwhzlo6b3q2p4", + "6cb90de5a3d123b7408cfef693a9a78bb69c66abbfed746c1e85aa0acb848d03" + ] + ], + "python": "python@3.10", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "4psiezojm7dexequtbnav77wvgcajigq", + "b3fc33b5482357613294becb54968bd74de638abeae69e27c6c4319046a7e352" + ] + ], + "python": "python@3.5", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "dzhvhynye4z7oalowdcy5zt25lej3m2n", + "61c5f3e80bcc7acfc65e335f1910762df2cc5ded9d7e1e5977380a24de553dd7" + ] + ], + "python": "python@3.6", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "dtwevigmwgke4g6ee5byktpmzmrp2kvx", + "636937244b58611ec2eedb4422a1076fcaf09f3998593befb5a6ff1a74e1d5f7" + ] + ], + "python": "python@3.7", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "shqedxgvjnhiwdcdrvjhbd73jaevv7wt", + "b3615b2a94a8a15fddaa74cf4d9f9b3a516467a843cdeab597f72dcf6be5e31d" + ] + ], + "python": "python@3.8", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "z6v6zvc6awioeompbvo735b4flr3yuyz", + "1389192bd74c1f7059d95c4a41500201cbc2905cbba553678613e0b7e3b96c71" + ] + ], + "python": "python@3.9", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + } + ] +} \ No newline at end of file diff --git a/share/spack/bootstrap/github-actions-v0.3/gnupg.json b/share/spack/bootstrap/github-actions-v0.3/gnupg.json deleted file mode 120000 index 1d6273a6d93..00000000000 --- a/share/spack/bootstrap/github-actions-v0.3/gnupg.json +++ /dev/null @@ -1 +0,0 @@ -../github-actions-v0.2/gnupg.json \ No newline at end of file diff --git a/share/spack/bootstrap/github-actions-v0.3/gnupg.json b/share/spack/bootstrap/github-actions-v0.3/gnupg.json new file mode 100644 index 00000000000..2f568297892 --- /dev/null +++ b/share/spack/bootstrap/github-actions-v0.3/gnupg.json @@ -0,0 +1,204 @@ +{ + "verified": [ + { + "binaries": [ + [ + "libiconv", + "d6dhoguolmllbzy2h6pnvjm3tti6uy6f", + "7fe765a87945991d4e57782ed67c4bf42a10f95582eecd6f57de80a545bde821" + ], + [ + "npth", + "x6fb7zx6n7mos5knvi6wlnaadd7r2szx", + "fd1e5a62107339f45219c32ba20b5e82aa0880c31ac86d1b245d388ca4546990" + ], + [ + "zlib", + "c5wm3jilx6zsers3sfgdisjqusoza4wr", + "7500a717c62736872aa65df4599f797ef67b21086dd6236b4c7712cfffac9bf3" + ], + [ + "libassuan", + "3qv4bprobfwes37clg764cfipdzjdbto", + "d85cd9d2c63a296300d4dcbd667421956df241109daef5e12d3ca63fa241cb14" + ], + [ + "libgcrypt", + "3y4ubdgxvgpvhxr3bk4l5mkw4gv42n7e", + "9dad7c2635344957c4db68378964d3af84ea052d45dbe8ded9a6e6e47211daa8" + ], + [ + "libgpg-error", + "doido34kfwsvwpj4c4jcocahjb5ltebw", + "20e5c238bee91d2a841f0b4bd0358ded59a0bd665d7f251fd9cd42f83e0b283b" + ], + [ + "libksba", + "mttecm7gzdv544lbzcoahchnboxysrvi", + "1c0ae64e828a597e4cf15dd997c66cd677e41f68c63db09b9551480a197052a2" + ], + [ + "pinentry", + "se7xgv7yf4ywpjnbv7voxgeuuvs77ahb", + "2fd13fbee7ca2361dc5dd09708c72d0489611301b60635cb0206bc5b94add884" + ], + [ + "gnupg", + "yannph34bpaqkhsv5mz2icwhy3epiqxd", + "1de8b4e119fa3455d0170466fa0fb8e04957fab740aec32535b4667279312b3f" + ] + ], + "spec": "gnupg@2.3: %apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "zlib", + "t2hjzsyf3txkg64e4bq3nihe26rzzdws", + "171e720840a28af50b62141be77bc525e666cffd1fbbe2ee62673214e8b0280f" + ], + [ + "libiconv", + "yjdji2wj4njz72fyrg46jlz5f5wfbhfr", + "94c773c3d0294cf248ec1f3e9862669dfa743fe1a76de580d9425c14c8f7dcd2" + ], + [ + "npth", + "kx3vzmpysee7jxwsudarthrmyop6hzgc", + "f8cc6204fa449ce576d450396ec2cad40a75d5712c1381a61ed1681a54f9c79f" + ], + [ + "libassuan", + "e5n5l5ftzwxs4ego5furrdbegphb6hxp", + "ef0428874aa81bcb9944deed88e1fc639f629fe3d522cab3c281235ae2a53db9" + ], + [ + "libgcrypt", + "wyncpahrpqsmpk4b7nlhg5ekkjzyjdzs", + "2309548c51a17f580f036445b701feb85d2bc552b9c4404418c2f223666cfe3b" + ], + [ + "libgpg-error", + "vhcdd6jkbiday2seg3rlkbzpf6jzfdx7", + "79dd719538d9223d6287c0bba07b981944ab6d3ab11e5060274f1b7c727daf55" + ], + [ + "libksba", + "azcgpgncynoox3dce45hkz46bp2tb5rr", + "15d301f201a5162234261fcfccd579b0ff484131444a0b6f5c0006224bb155d6" + ], + [ + "pinentry", + "e3z5ekbv4jlsie4qooubcfvsk2sb6t7l", + "5fd27b8e47934b06554e84f1374a90a93e71e60a14dbde672a8da414b27b97f4" + ], + [ + "gnupg", + "i5agfvsmzdokuooaqhlh6vro5giwei2t", + "f1bde7a1f0c84c1bbcde5757a96cf7a3e9157c2cfa9907fde799aa8e04c0d51f" + ] + ], + "spec": "gnupg@2.3: %gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "zlib", + "v5rr6ba37tudzfuv2jszwikgcl4wd3cd", + "371ad4b277af7b97c7871b9931f2764c97362620c7990c5ad8fdb5c42a1d30dc" + ], + [ + "libiconv", + "bvcnx2e4bumjcgya4dczdhjb3fhqyass", + "65a00b717b3a4ee1b5ab9f84163722bdfea8eb20a2eecc9cf657c0eaac0227e9" + ], + [ + "npth", + "dkb6ez6a4c3iyrv67llwf5mzmynqdmtj", + "4d77351661d0e0130b1c89fb6c6a944aee41d701ef80d056d3fc0178a7f36075" + ], + [ + "libassuan", + "tuydcxdbb5jfvw3gri7y24b233kgotgd", + "d8775e7c1dd252437c6fa0781675b1d2202cfc0c8190e60d248928b6fca8bc9f" + ], + [ + "libgcrypt", + "kgxmg4eukwx6nn3bdera3j7cf7hxfy6n", + "6046523f10ed54be50b0211c27191b3422886984fc0c00aed1a85d1f121c42e6" + ], + [ + "libgpg-error", + "ewhrwnltlrzkpqyix2vbkf4ruq6b6ea3", + "3f3bbbf1a3cb82d39313e39bcbe3dad94a176130fc0e9a8045417d6223fb4f31" + ], + [ + "libksba", + "onxt5ry2fotgwiognwmhxlgnekuvtviq", + "3a4df13f8b880441d1df4b234a4ca01de7601d84a6627185c2b3191a34445d40" + ], + [ + "pinentry", + "fm3m4rsszzxxakcpssd34jbbe4ihrhac", + "73afa46176a7ec8f02d01a2caad3e400dc18c3c8a53f92b88a9aa9e3653db3e6" + ], + [ + "gnupg", + "gwr65ovh4wbxjgniaoqlbt3yla6rdikj", + "7a3f7afe69ca67797a339c04028ca45a9630933020b57cb56e28453197fe8a57" + ] + ], + "spec": "gnupg@2.3: %gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "libiconv", + "vec3ac6t4ag3lb7ycvisafthqmpci74b", + "35d184218e525d8aaea60082fd2d0f1e80449ec32746cceda2ea0ca106e9a095" + ], + [ + "npth", + "jx3kmy3ilc66rgg5mqtbed5z6qwt3vrd", + "74c2c1b087667661da3e24ac83bcecf1bc2d10d69e7678d1fd232875fe295135" + ], + [ + "zlib", + "wnpbp4pu7xca24goggcy773d2y4pobbd", + "bcbd5310e8c5e75cbf33d8155448b212486dc543469d6df7e56dcecb6112ee88" + ], + [ + "libassuan", + "ynn33wutdtoo2lbjjoizgslintxst2zl", + "ac3b060690c6da0c64dcf35da047b84cc81793118fb9ff29b993f3fb9efdc258" + ], + [ + "libgcrypt", + "zzofcjer43vsxwj27c3rxapjxhsz4hlx", + "4b1977d815f657c2d6af540ea4b4ce80838cadcf4ada72a8ba142a7441e571ea" + ], + [ + "libgpg-error", + "gzr2ucybgks5jquvf4lv7iprxq5vx5le", + "a12ecb5cfd083a29d042fd309ebb5ab8fd4ace0b68b27f89b857e9a84d75b5be" + ], + [ + "libksba", + "hw4u4pam6mp3henpw476axtqaahfdy64", + "5424caf98a2d48e0ed0b9134353c242328ebeef6d2b31808d58969165e809b47" + ], + [ + "pinentry", + "hffsjitsewdgoijwgzvub6vpjwm33ywr", + "8ed7504b5b2d13ab7e1f4a0e27a882c33c5a6ebfcb43c51269333c0d6d5e1448" + ], + [ + "gnupg", + "lge4h2kjgvssyspnvutq6t3q2xual5oc", + "6080ce00fcc24185e4051a30f6d52982f86f46eee6d8a2dc4d83ab08d8195be8" + ] + ], + "spec": "gnupg@2.3: %gcc platform=linux target=x86_64" + } + ] +} \ No newline at end of file diff --git a/share/spack/bootstrap/github-actions-v0.4/clingo.json b/share/spack/bootstrap/github-actions-v0.4/clingo.json new file mode 100644 index 00000000000..1fa83eef1c0 --- /dev/null +++ b/share/spack/bootstrap/github-actions-v0.4/clingo.json @@ -0,0 +1,334 @@ +{ + "verified": [ + { + "binaries": [ + [ + "clingo-bootstrap", + "fk6k6buvgbwhwtigvpvi3266gllv7z2o", + "003eb7b2c62debc0bac4a7f3a3933d6a955520199b37e00c8c0761036d8dc63a" + ] + ], + "python": "python@3.10", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "2ezozohyngzmq74eeclsjupcawg6slse", + "bf3c559d655d5f04a2b080c640996086db2bb6bbf49f4139eed225a77b574923" + ] + ], + "python": "python@3.11", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "okjsfmgareef7laq432tdtgyu7bshmv2", + "7beed9fe21b52df6b56d8242b79becab7ed953af16612d6e09c595ef39591ac3" + ] + ], + "python": "python@3.6", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "cv7nf5ti72ywciapdy6mn7cemqv766zy", + "6af9e548044e4849794ee85008c8b19539b63857510c6fff544de7ccb6e53ee8" + ] + ], + "python": "python@3.7", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "ing4swsz5bj7guqffc277zitcky4uhu4", + "4d9008372c73797fc0bd47c92c922f810e1b3fd44dc373682a7a0780b711058c" + ] + ], + "python": "python@3.8", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "p2cyqcsow6k6prfryoqb7usv27hhofuq", + "5e4fd1fc552d815ce8db8b8917d9089c7782a92269754f8ca5d4f01a9406244d" + ] + ], + "python": "python@3.9", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "5bfjmclf6sktj4drclxe7rdwdthlkxw3", + "b811e62f82b564e9cd5e12fc3cdb19b3d4e5f2bdb98985e1bbe3d1bbd5dd3d5c" + ] + ], + "python": "python@3.10", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "bwmaj7wyyiivvkq5j72mholmhmytb2fl", + "468da2198479514bbbf66f4268716bce38cace1004a612bc669d21d97c596f85" + ] + ], + "python": "python@3.11", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "ewhsk7bcohduujp5t7hljb5uk2mfbk7k", + "919cbfc82bbb08da207e22bec4d8047c34042b90d58b9c6b438b5dcef0046e39" + ] + ], + "python": "python@3.6", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "263aqtzhrgzmzgpfekda7uk6wdqez76j", + "b9e579ee2a848f7287a8b625459ac5b8ce19e9e6858a86b53effaa4ae712d1b6" + ] + ], + "python": "python@3.7", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "yqqiddbmi2pyxgu757qfvts6hlt6525q", + "254ab94d48543472ad8a32f598dc869c49051a0b890951d7de8425c7549caa26" + ] + ], + "python": "python@3.8", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "plv3woz7rwmixdo42ew27imtqqjqnnv5", + "ec494e7043433fac6f8f404e023eea397197ff0928bf1c3f3cc0bc62d549334c" + ] + ], + "python": "python@3.9", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "domcqyr4qx2yl3433l5dycnehastl7zc", + "fbfc1fc14f27bbabe36a438dd70515067dbd7e0873bc748b9f34d576d5400cb4" + ] + ], + "python": "python@3.10", + "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "b5m7ectjiuucgaoyry24hhop44edjvg7", + "5412e2b3f45d251acd976c12d238549e0c324e6481bf328f9547fafc0e810daf" + ] + ], + "python": "python@3.11", + "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "h2uxdiwlbvrfsz5mlt2s2xvnefbuk7qx", + "4cf26cd903fe0034522e1d8a712ab7a6ae936961c1c010473ff15566665cef6b" + ] + ], + "python": "python@3.6", + "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "4ksotxknsesu4mv2bio5ndtilo423cpy", + "9281ca638e2ec5c0b6d3ae050827a1c3696251a6274e96f3a8a89a1fdf7f0ba2" + ] + ], + "python": "python@3.7", + "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "ho7jg4bl7degmnnnj6x6fatbcno37kqo", + "0e78a555839fbd3752473ed80c76be9007b6ce3f152fa69d8014b172e339b92f" + ] + ], + "python": "python@3.8", + "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "oztu77xbgiige4pp2epmbqrmxt4vwnla", + "5271b271a2f6ae26838614477b2b8e5f230bceda7e0eb63f2cc36b18da3ba53d" + ] + ], + "python": "python@3.9", + "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "33qaxq2adjrlv6ttxbu6bmueundhns2w", + "5fa731b84e354b8108ac4b7205d40e8c1a74cb2dfd590dd2d648d744a8556a1d" + ] + ], + "python": "python@3.10", + "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "vwhszkap6e2zbzb74ywgyggflkmtavwz", + "09eed0c9b98681173f512385675f44d070cb5ebc3e08aac659a12ea1ec41d05a" + ] + ], + "python": "python@3.11", + "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "3pst4dqux2clmm3mpjj4jkowv3s2ixv6", + "f9d9ade557ed426f55308dd14b43c59e1b51b8f40c9847d00994a3a89182a846" + ] + ], + "python": "python@3.6", + "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "6wk7qj2hdglt2sjtec4mv7ibsvhw53ge", + "e06a3190e60b1d0c4d4b8f01b7a2ade9d2d3d8fdaf84757cc9741e81a5ad59a3" + ] + ], + "python": "python@3.7", + "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "k2kch7a6j7ilikuklhyffkqhdqb46yt5", + "2547727ce0b8295594dfa56b711631b8ab221a19c4cbd19341539b929693b0cb" + ] + ], + "python": "python@3.8", + "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "ivzizagt74uqxrp2mri5lbqiqkhab77p", + "2ddd5daeeabfc3b2a211f7efb3cc700991c5817b08b19c2d315084198f7d2bc8" + ] + ], + "python": "python@3.9", + "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "idkenmhnscjlu5gjqhpcqa4h7o2a7aow", + "44c88094abb239dd33b75c02c24fefe7f4f5646c2371f50a5bfb47b23805760b" + ] + ], + "python": "python@3.10", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "cizcjj3tx4irs3uzoktrgybq73sz545f", + "d8c8d4accece4e10a4339b263ff42f0f0adc77b3fbeea1010b3d7fc48aead5b3" + ] + ], + "python": "python@3.11", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "prqkzynv2nwko5mktitebgkeumuxkveu", + "3059fb60ff3b2dd5b36a46af37972b479fbfad348c30ec2e6b59729d93f07eed" + ] + ], + "python": "python@3.6", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "axtxtavfahxuazw2wueu3tjwwu6ttdfo", + "281cf24d0a8f2372b348bb1a38a9bfd1516063f597ffdecfde6e8e3aa4e2139f" + ] + ], + "python": "python@3.7", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "ba5ijauisd3uuixtmactc36vps7yfsrl", + "ea5960f47f48daeb62e6ebf7d8574ceb4bfccff6e2bae17571b0857bfd7a0bbc" + ] + ], + "python": "python@3.8", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "gqcctd2ejbgvyvyt4umqpetfoogfycwu", + "8358d72dd5de00a1b7a7ffb88ba366a01ce9b700245d2940eae7395fec0e6fda" + ] + ], + "python": "python@3.9", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + } + ] +} \ No newline at end of file diff --git a/share/spack/bootstrap/github-actions-v0.4/gnupg.json b/share/spack/bootstrap/github-actions-v0.4/gnupg.json new file mode 100644 index 00000000000..5237d8729af --- /dev/null +++ b/share/spack/bootstrap/github-actions-v0.4/gnupg.json @@ -0,0 +1,254 @@ +{ + "verified": [ + { + "binaries": [ + [ + "zlib", + "azrxnl6yp7xeapfy7nljiopucaelofuh", + "c3f28571947a41d3c9fb0da0b340b51bdef6b9e05a59e6df7c9bc5838bacd81a" + ], + [ + "libiconv", + "id44zneq3nh4grvtekqoefl24okct4ak", + "8cf48050c8d58dc0e1d11c8b3b9d970586e1f62933f8655982f4312d1e4426ea" + ], + [ + "npth", + "lp7fobvpwlk3xugo7th2kmcnrvqqxb3b", + "ec4dda80a2485e0eda5b1ef09e6b8b020283b00ab6252981722979af04ce2ba8" + ], + [ + "libassuan", + "trhqsquxpocecfgkeif5bh2dwgu4njbp", + "33f15821d6e41238de58f2237d3e1be46b657e3337cbe73f87973fe970ab36fd" + ], + [ + "libgcrypt", + "eadvdhou2xjdhf47x3q5x2ypa4qhfqjy", + "f0d1d6b3cef5794933b78df3446ac71bdd0cc79b81a26fc33153ef13819e6b09" + ], + [ + "libgpg-error", + "yg67vozcaac75p3dnhd6c3cjpa5nsfjo", + "fe907bce097dec72a92a1973d73253d9e4ce4bd78ed14f8d6e647dd8e77eef15" + ], + [ + "libksba", + "m7o6qwsu2gxvlka2jbd5puzwj3z553ob", + "69d324a77b550a6c7a201f3f39835df4f14534fcf5fa28628c14039bfdb39dda" + ], + [ + "pinentry", + "6m36xv6ft3yterimp6xoozz66ych5eew", + "0b82a4b52a6bc5e6fd4913f585455ea703e0fa5c85fd9f4bb1eb5819af5084e1" + ], + [ + "gnupg", + "pyrfgqkgltgfk4yljfw2myxn6vqen2j6", + "3c41b0cf2db01ad2675f27d51edb4cf7f798be9ca0e3ac781990ff8b462cd8f6" + ] + ], + "spec": "gnupg@2.3: %apple-clang platform=darwin target=aarch64" + }, + { + "binaries": [ + [ + "libiconv", + "f6om5cmewxrhzpowei3m2g2qnijvlep4", + "ab891ac21bc9cf44723993232ce3fff6fe75d003dfb88077dea630e532db123f" + ], + [ + "npth", + "tvebgs23dhejixfe36dufivhkwnyxh3t", + "95b9852c2e69f18fb8eff3dc6fc2bb9efe38821314cac6c310523da89c8346a2" + ], + [ + "zlib", + "rlzphstv75due7yzcicuu7nfos5zuk2q", + "e5ee87fab6e51b46ab1fb1cedafc4edee380a810947d52e669a185b52636aa37" + ], + [ + "libassuan", + "ow5h7we5zrgoknsvss3yjjs4g3aci4b2", + "44cf47134b4e4cbad30b8f4ef5ac1e7e25ead1d4dc64bd44fe807a4f173977ad" + ], + [ + "libgcrypt", + "nuy3jjihjlktwggpwdrert2q5xoqk4ic", + "ebb85da4d0b4ea93e073b8faf11e4ec955752a589b0ee47cd46b825ef685e536" + ], + [ + "libgpg-error", + "w7xfbrbfdnssbfoxrsz4emt6aildxsfy", + "6973cd597db96830822a8111fe3b3cff271e8cedc26fb0cb48443c2de2cc50ad" + ], + [ + "libksba", + "74h62c57ojgmqqp6xrrrzmzgftmcv22c", + "73afeb0bfdf57623d694ea16b52e1d73bfca61954583d6737f4ab6ab05c92ca8" + ], + [ + "pinentry", + "dv7sj3xesjfhqbrcxorvbzoxzlqpac4e", + "509d6881145a33b7de69db63af84fe887e7c995ffd4e89003a25fafa45b5874b" + ], + [ + "gnupg", + "hrv7rjtbvuxkt4trjdnzovegwutciunv", + "bf39c84047508556e142b9a9808007bbcc9aa80b2b9936a3598530f5acc7c75a" + ] + ], + "spec": "gnupg@2.3: %apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "zlib", + "mrdyh4e34orgypetqhru6romj6wlvyxm", + "ecd344c5dcae7377d8b20f14248a73d1fe350e54364f2f1e70aa4fccf1c219ed" + ], + [ + "libiconv", + "iuparzfnzuwmmhj5ytlhaobn4nz3cct4", + "58ef399a4bd8794a00314440e02de8c685c9c02d1b02a751923ae669556a1a91" + ], + [ + "npth", + "eltd4b6tq4gsnboeidmr7mykezykcho5", + "89b3e0c7f573009d4816b219413a07a9917758836befdfeb6c33a9e87d846b6f" + ], + [ + "libassuan", + "xfaguxawrc6z73draba5fccjxtxjvzmz", + "59ebe715532a2671cde9783aceebb1448062e7adb7307da30b0d6245529d897f" + ], + [ + "libgcrypt", + "ntb2fzwckdgb77eubdcvvj2xm5eilavw", + "92fb1ef0d57c98b16e172c6afbc995dd163f0bac1484eb11eef5305f434a5cd1" + ], + [ + "libgpg-error", + "utzxfplsbueqmj7ksxaykk6tk3xi5dmr", + "74aa95bc48c42eab0a8ca0afab51074811bf79477271123af13398029ac7394f" + ], + [ + "libksba", + "jzxmzebonsgrw5e6ij446azzocvko2vi", + "bfc11401fc94d3f6d3176fa4b95dd866ad355c0b77b9c5787acbfdffe42915b9" + ], + [ + "pinentry", + "wsjzc3l5zgieowd24p2paccrporun5cv", + "db3e475b2113ad9587017a76c9df57fc537d2dd6c5d3323119c30723b5b51330" + ], + [ + "gnupg", + "zigabpppmz5ctktqwdj5ueaxjuvm6syh", + "fd8a681dfa919d8faff256fabafe1f08238cc75c74cbcfc44acce23cf0afb34c" + ] + ], + "spec": "gnupg@2.3: %gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "zlib", + "c4kbhgxjlko6a367d3zl6b5qcm5swiew", + "49747756dea8dd15fc3ea8f73d03b24724fa6b22103f04b557152db59c17f799" + ], + [ + "libiconv", + "5l5cq7de7iwagndyyphpdmvdvp3pepe6", + "a46d2a89cab00d8196e6226f3831bc3ff8b7f728844d6d29013cc8999d7b7f78" + ], + [ + "npth", + "b6ifa47mma7n7mxl36yg73uwjqezbde5", + "1b16e28e692ca91a096da4976f7df451df5e3ea9aa2f03cc2d39f62646a9399b" + ], + [ + "libassuan", + "phds2cjgeo3sbluuvdj6ebdkuom6un6p", + "482bf3a4975e21e03b7d34ff69031071a2822fb182774421f648ed7ccc99f24d" + ], + [ + "libgcrypt", + "7hgqgoekgh4jiml2u55rg2zec3ouyq7z", + "edfa277010de9f060bbcb11c2011dd66fb6e714c28a189d7cd7ef2d825e85180" + ], + [ + "libgpg-error", + "th2tzwwoz7ddrygkjrxzzv4yvznxglmx", + "e7c645287270ae2ac08ff5d400bf44b2e79203e752c3ff32aed07200638a6fe0" + ], + [ + "libksba", + "ex5gt36shiwt54jg7mbgxysnwu7jjy6a", + "8cf350544821bfec19e2b52a47896ca9258fc56680e4bb0d12715416169ead4a" + ], + [ + "pinentry", + "aowc7abd6kvohdohxz4j225q2hh743cq", + "ad336a7eee41eebd6b8e667e7ef673b64088c0553492567245653ac6c07fdb46" + ], + [ + "gnupg", + "7i7j24llnlzwpwrfumulorq6ucx2ku2f", + "a743ffd0698db5329a8231d25fa2e13b88f63cf85198664794a91df7a2c48632" + ] + ], + "spec": "gnupg@2.3: %gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "libiconv", + "vyvyow3bnokashj3wntl7pgm5nc4h7vw", + "4fb8c1a563975f339b2e98e4c5c6cd98d629bc94fcf57b8e92beedae17a4584d" + ], + [ + "npth", + "opncvl75zv6njawkgtxgt4yhii65f5nx", + "24b442a6f2cc28176a4f742d961807e5ffd853d2f9d65175944b6aa8b47d91e2" + ], + [ + "zlib", + "dcixs2nytw7vlthk55mwvog7veypnuor", + "6ab7018b621783c971192e46b6a3e2764b638b5ab5c2f3c62af24afd5a9039e0" + ], + [ + "libassuan", + "yk2555moxgj3dro6edznumguezecriso", + "ebde470fee06e5ad7527dca0eb3689ae13b7299229b51e64f97ff87b9daf9160" + ], + [ + "libgcrypt", + "imws5ss7coeeo45zr6w54xnwjfjm4cc6", + "ad20c2974c90717efa8a4c27781e5f4c14d60527dc1c224fd2e113fe52d3e958" + ], + [ + "libgpg-error", + "nbhvf75esgtjeu6nh57gu6mnikiazmjt", + "ec9f59c684dc4054706217952b8ddf610e4277ec8031c92640f086959dcf756e" + ], + [ + "libksba", + "cx425tk5tnod3523zj4izloqibr44frz", + "b2465fecbca3d022cf068766a9c01c72f6a68f9b58e78375f687b1273f6c683c" + ], + [ + "pinentry", + "pto3uq53xwl7dtbvycdp4qccacrrzs3r", + "bd9ae21dff99c34165baa680df4b4b36339e207fec2ac4fcc80103d774a1dd84" + ], + [ + "gnupg", + "5mhxefklns5hpdai3jn3rsf23kz4nol6", + "8a21155078dc51fdee7990326335e9f99192da0eb4b3490260a7399e30f20243" + ] + ], + "spec": "gnupg@2.3: %gcc platform=linux target=x86_64" + } + ] +} \ No newline at end of file diff --git a/share/spack/bootstrap/github-actions-v0.1/metadata.yaml b/share/spack/bootstrap/github-actions-v0.4/metadata.yaml similarity index 83% rename from share/spack/bootstrap/github-actions-v0.1/metadata.yaml rename to share/spack/bootstrap/github-actions-v0.4/metadata.yaml index b2439424b0c..0b483b547b2 100644 --- a/share/spack/bootstrap/github-actions-v0.1/metadata.yaml +++ b/share/spack/bootstrap/github-actions-v0.4/metadata.yaml @@ -3,6 +3,6 @@ description: | Buildcache generated from a public workflow using Github Actions. The sha256 checksum of binaries is checked before installation. info: - url: https://mirror.spack.io/bootstrap/github-actions/v0.1 + url: https://mirror.spack.io/bootstrap/github-actions/v0.4 homepage: https://github.com/spack/spack-bootstrap-mirrors releases: https://github.com/spack/spack-bootstrap-mirrors/releases diff --git a/share/spack/bootstrap/github-actions-v0.4/patchelf.json b/share/spack/bootstrap/github-actions-v0.4/patchelf.json new file mode 100644 index 00000000000..cab42851089 --- /dev/null +++ b/share/spack/bootstrap/github-actions-v0.4/patchelf.json @@ -0,0 +1,34 @@ +{ + "verified": [ + { + "binaries": [ + [ + "patchelf", + "kjmrsrd7akfwzlejzsdyoun7fwgmvjgk", + "2c1975adb6fbd42bdb960b67fa6b32bc2846a28e5d293d2ca7b44a38f49ecf4f" + ] + ], + "spec": "patchelf@0.13: %gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "patchelf", + "gxxogiws7fmzkbdc26k24id3aplly6wi", + "d45ac6b9045d510861fda0cfaa5c04d71f316df5784376f2d2915ab134619c1b" + ] + ], + "spec": "patchelf@0.13: %gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "patchelf", + "p72zyan5wrzuabtmzq7isa5mzyh6ahdp", + "ed7ebae3399d96c8d2f4b38ce6f2da52d8b73b312c73babae880ed3467b464b4" + ] + ], + "spec": "patchelf@0.13: %gcc platform=linux target=x86_64" + } + ] +} \ No newline at end of file diff --git a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml index 17c16e16d67..8098d6f5feb 100644 --- a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml +++ b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml @@ -91,6 +91,9 @@ protected-publish: extends: [ ".protected" ] image: "ghcr.io/spack/python-aws-bash:0.0.1" tags: ["spack", "public", "medium", "aws", "x86_64"] + retry: + max: 2 + when: ["runner_system_failure", "stuck_or_timeout_failure"] variables: AWS_ACCESS_KEY_ID: ${PROTECTED_MIRRORS_AWS_ACCESS_KEY_ID} AWS_SECRET_ACCESS_KEY: ${PROTECTED_MIRRORS_AWS_SECRET_ACCESS_KEY} diff --git a/share/spack/gitlab/cloud_pipelines/stacks/aws-ahug-aarch64/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/aws-ahug-aarch64/spack.yaml index 67ac499003e..a149c9e88b5 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/aws-ahug-aarch64/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/aws-ahug-aarch64/spack.yaml @@ -6,6 +6,7 @@ spack: unify: false config: + build_jobs: 32 concretizer: clingo install_tree: root: /home/software/spack @@ -24,6 +25,8 @@ spack: - openmpi - mpich variants: +mpi + tbb: + require: "intel-tbb" binutils: variants: +ld +gold +headers +libiberty ~nls version: @@ -241,17 +244,25 @@ spack: gitlab-ci: script: + - uname -a || true + - grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true + - nproc + - curl -Lfs 'https://github.com/JuliaBinaryWrappers/GNUMake_jll.jl/releases/download/GNUMake-v4.3.0+1/GNUMake.v4.3.0.aarch64-linux-gnu.tar.gz' -o gmake.tar.gz + - printf '2322c175fb092b426f9eb6c24ee22d94ffa6759c3d0c260b74d81abd8120122b gmake.tar.gz' | sha256sum --check --strict --quiet + - tar -xzf gmake.tar.gz -C /usr bin/make 2> /dev/null - . "./share/spack/setup-env.sh" - spack --version + - spack arch - cd ${SPACK_CONCRETE_ENV_DIR} - spack env activate --without-view . - spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'" - mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data - if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi - if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi - - spack -d ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) + - spack --color=always --backtrace ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) image: { "name": "ghcr.io/spack/e4s-amazonlinux-2:v2022-03-21", "entrypoint": [""] } + match_behavior: first mappings: - match: - llvm diff --git a/share/spack/gitlab/cloud_pipelines/stacks/aws-ahug/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/aws-ahug/spack.yaml index f81eb0383e9..6117278bcf0 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/aws-ahug/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/aws-ahug/spack.yaml @@ -6,6 +6,7 @@ spack: unify: false config: + build_jobs: 32 concretizer: clingo install_tree: root: /home/software/spack @@ -214,9 +215,7 @@ spack: - '%gcc@7.3.1' - target: - #- 'target=x86_64' - 'target=x86_64_v3' - - 'target=x86_64_v4' specs: @@ -242,17 +241,25 @@ spack: gitlab-ci: script: + - uname -a || true + - grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true + - nproc + - curl -Lfs 'https://github.com/JuliaBinaryWrappers/GNUMake_jll.jl/releases/download/GNUMake-v4.3.0+1/GNUMake.v4.3.0.x86_64-linux-gnu.tar.gz' -o gmake.tar.gz + - printf 'fef1f59e56d2d11e6d700ba22d3444b6e583c663d6883fd0a4f63ab8bd280f0f gmake.tar.gz' | sha256sum --check --strict --quiet + - tar -xzf gmake.tar.gz -C /usr bin/make 2> /dev/null - . "./share/spack/setup-env.sh" - spack --version + - spack arch - cd ${SPACK_CONCRETE_ENV_DIR} - spack env activate --without-view . - spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'" - mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data - if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi - if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi - - spack -d ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) + - spack --color=always --backtrace ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) image: { "name": "ghcr.io/spack/e4s-amazonlinux-2:v2022-03-21", "entrypoint": [""] } + match_behavior: first mappings: - match: - llvm diff --git a/share/spack/gitlab/cloud_pipelines/stacks/aws-isc-aarch64/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/aws-isc-aarch64/spack.yaml index 248d49fd40c..379d5e2f317 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/aws-isc-aarch64/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/aws-isc-aarch64/spack.yaml @@ -6,6 +6,7 @@ spack: unify: false config: + build_jobs: 32 concretizer: clingo install_tree: root: /home/software/spack @@ -24,6 +25,8 @@ spack: - openmpi - mpich variants: +mpi + tbb: + require: "intel-tbb" binutils: variants: +ld +gold +headers +libiberty ~nls version: @@ -148,17 +151,25 @@ spack: gitlab-ci: script: + - uname -a || true + - grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true + - nproc + - curl -Lfs 'https://github.com/JuliaBinaryWrappers/GNUMake_jll.jl/releases/download/GNUMake-v4.3.0+1/GNUMake.v4.3.0.aarch64-linux-gnu.tar.gz' -o gmake.tar.gz + - printf '2322c175fb092b426f9eb6c24ee22d94ffa6759c3d0c260b74d81abd8120122b gmake.tar.gz' | sha256sum --check --strict --quiet + - tar -xzf gmake.tar.gz -C /usr bin/make 2> /dev/null - . "./share/spack/setup-env.sh" - spack --version + - spack arch - cd ${SPACK_CONCRETE_ENV_DIR} - spack env activate --without-view . - spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'" - mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data - if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi - if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi - - spack -d ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) + - spack --color=always --backtrace ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) image: { "name": "ghcr.io/spack/e4s-amazonlinux-2:v2022-03-21", "entrypoint": [""] } + match_behavior: first mappings: - match: - llvm diff --git a/share/spack/gitlab/cloud_pipelines/stacks/aws-isc/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/aws-isc/spack.yaml index 27de37b1f59..5feaad01cff 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/aws-isc/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/aws-isc/spack.yaml @@ -6,6 +6,7 @@ spack: unify: false config: + build_jobs: 32 concretizer: clingo install_tree: root: /home/software/spack @@ -24,6 +25,8 @@ spack: - openmpi - mpich variants: +mpi + tbb: + require: "intel-tbb" binutils: variants: +ld +gold +headers +libiberty ~nls version: @@ -129,7 +132,6 @@ spack: - target: - 'target=x86_64_v3' - - 'target=x86_64_v4' specs: @@ -160,17 +162,25 @@ spack: gitlab-ci: script: + - uname -a || true + - grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true + - nproc + - curl -Lfs 'https://github.com/JuliaBinaryWrappers/GNUMake_jll.jl/releases/download/GNUMake-v4.3.0+1/GNUMake.v4.3.0.x86_64-linux-gnu.tar.gz' -o gmake.tar.gz + - printf 'fef1f59e56d2d11e6d700ba22d3444b6e583c663d6883fd0a4f63ab8bd280f0f gmake.tar.gz' | sha256sum --check --strict --quiet + - tar -xzf gmake.tar.gz -C /usr bin/make 2> /dev/null - . "./share/spack/setup-env.sh" - spack --version + - spack arch - cd ${SPACK_CONCRETE_ENV_DIR} - spack env activate --without-view . - spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'" - mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data - if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi - if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi - - spack -d ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) + - spack --color=always --backtrace ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) image: { "name": "ghcr.io/spack/e4s-amazonlinux-2:v2022-03-21", "entrypoint": [""] } + match_behavior: first mappings: - match: - llvm diff --git a/share/spack/gitlab/cloud_pipelines/stacks/build_systems/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/build_systems/spack.yaml index 700d32add8e..53d8cefc4af 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/build_systems/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/build_systems/spack.yaml @@ -6,6 +6,7 @@ spack: unify: when_possible config: + build_jobs: 32 install_tree: root: /home/software/spack padded_length: 512 @@ -14,6 +15,8 @@ spack: definitions: - default_specs: + - 'uncrustify build_system=autotools' + - 'uncrustify build_system=cmake' - lz4 # MakefilePackage - mpich~fortran # AutotoolsPackage - py-setuptools # PythonPackage @@ -32,19 +35,27 @@ spack: gitlab-ci: script: + - uname -a || true + - grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true + - nproc + - curl -Lfs 'https://github.com/JuliaBinaryWrappers/GNUMake_jll.jl/releases/download/GNUMake-v4.3.0+1/GNUMake.v4.3.0.x86_64-linux-gnu.tar.gz' -o gmake.tar.gz + - printf 'fef1f59e56d2d11e6d700ba22d3444b6e583c663d6883fd0a4f63ab8bd280f0f gmake.tar.gz' | sha256sum --check --strict --quiet + - tar -xzf gmake.tar.gz -C /usr bin/make 2> /dev/null - . "./share/spack/setup-env.sh" - spack --version + - spack arch - cd ${SPACK_CONCRETE_ENV_DIR} - spack env activate --without-view . - spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'" - if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi - if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi - - spack -d ci rebuild + - spack --color=always --backtrace ci rebuild image: name: "ghcr.io/spack/e4s-ubuntu-18.04:v2021-10-18" entrypoint: [ "" ] + match_behavior: first mappings: - match: - cmake diff --git a/share/spack/gitlab/cloud_pipelines/stacks/data-vis-sdk/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/data-vis-sdk/spack.yaml index ae9025e366d..de1e68fcd41 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/data-vis-sdk/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/data-vis-sdk/spack.yaml @@ -6,6 +6,7 @@ spack: unify: false config: + build_jobs: 32 install_tree: root: /home/software/spack padded_length: 512 @@ -47,14 +48,22 @@ spack: gitlab-ci: image: { "name": "ghcr.io/spack/e4s-ubuntu-18.04:v2021-10-18", "entrypoint": [""] } script: + - uname -a || true + - grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true + - nproc + - curl -Lfs 'https://github.com/JuliaBinaryWrappers/GNUMake_jll.jl/releases/download/GNUMake-v4.3.0+1/GNUMake.v4.3.0.x86_64-linux-gnu.tar.gz' -o gmake.tar.gz + - printf 'fef1f59e56d2d11e6d700ba22d3444b6e583c663d6883fd0a4f63ab8bd280f0f gmake.tar.gz' | sha256sum --check --strict --quiet + - tar -xzf gmake.tar.gz -C /usr bin/make 2> /dev/null - . "./share/spack/setup-env.sh" - spack --version + - spack arch - cd ${SPACK_CONCRETE_ENV_DIR} - spack env activate --without-view . - spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'" - if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi - if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi - - spack -d ci rebuild + - spack --color=always --backtrace ci rebuild + match_behavior: first mappings: - match: - llvm diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-mac/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-mac/spack.yaml index 34517c1fa92..539dcd5d592 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-mac/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-mac/spack.yaml @@ -45,12 +45,14 @@ spack: - tmp="$(mktemp -d)"; export SPACK_USER_CONFIG_PATH="$tmp"; export SPACK_USER_CACHE_PATH="$tmp" - . "./share/spack/setup-env.sh" - spack --version + - spack arch - cd ${SPACK_CONCRETE_ENV_DIR} - spack env activate --without-view . - spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'" - mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data - - spack -d ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) + - spack --color=always --backtrace ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) + match_behavior: first mappings: - match: ['os=monterey'] runner-attributes: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-on-power/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-on-power/spack.yaml index 24a4879277c..4ef8a090a55 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-on-power/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-on-power/spack.yaml @@ -214,13 +214,21 @@ spack: gitlab-ci: script: + - uname -a || true + - grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true + - nproc + - curl -Lfs 'https://github.com/JuliaBinaryWrappers/GNUMake_jll.jl/releases/download/GNUMake-v4.3.0+1/GNUMake.v4.3.0.powerpc64le-linux-gnu.tar.gz' -o gmake.tar.gz + - printf '8096d202fe0a0c400b8c0573c4b9e009f2f10d2fa850a3f495340f16e9c42454 gmake.tar.gz' | sha256sum --check --strict --quiet + - tar -xzf gmake.tar.gz -C /usr bin/make 2> /dev/null - . "./share/spack/setup-env.sh" - spack --version + - spack arch - cd ${SPACK_CONCRETE_ENV_DIR} - spack env activate --without-view . - spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'" - - spack -d ci rebuild + - spack --color=always --backtrace ci rebuild + match_behavior: first mappings: - match: - cuda diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml index 44923801260..a064faf4921 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml @@ -6,6 +6,7 @@ spack: unify: when_possible config: + build_jobs: 32 concretizer: clingo install_tree: root: /home/software/spack @@ -266,8 +267,15 @@ spack: gitlab-ci: script: + - uname -a || true + - grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true + - nproc + - curl -Lfs 'https://github.com/JuliaBinaryWrappers/GNUMake_jll.jl/releases/download/GNUMake-v4.3.0+1/GNUMake.v4.3.0.x86_64-linux-gnu.tar.gz' -o gmake.tar.gz + - printf 'fef1f59e56d2d11e6d700ba22d3444b6e583c663d6883fd0a4f63ab8bd280f0f gmake.tar.gz' | sha256sum --check --strict --quiet + - tar -xzf gmake.tar.gz -C /usr bin/make 2> /dev/null - . "./share/spack/setup-env.sh" - spack --version + - spack arch - cd ${SPACK_CONCRETE_ENV_DIR} - spack env activate --without-view . - spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'" @@ -278,10 +286,11 @@ spack: - . /bootstrap/runner/install/linux-ubuntu20.04-x86_64/gcc-9.4.0/lmod-8.7.2-ri26z7qy6ixtgpsqinswx3w6tuggluv5/lmod/8.7.2/init/bash - module use /opt/intel/oneapi/modulefiles - module load compiler - - spack -d ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) + - spack --color=always --backtrace ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2022-07-01 + match_behavior: first mappings: - match: - hipblas diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml index 853e293e6c6..b6583d3a4e0 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml @@ -6,6 +6,7 @@ spack: unify: false config: + build_jobs: 32 concretizer: clingo install_tree: root: /home/software/spack @@ -21,6 +22,8 @@ spack: mpi: [mpich] target: [x86_64] variants: +mpi + tbb: + require: "intel-tbb" binutils: variants: +ld +gold +headers +libiberty ~nls cuda: @@ -206,6 +209,7 @@ spack: - arborx +rocm amdgpu_target=gfx90a - cabana +rocm - caliper +rocm amdgpu_target=gfx90a + - chai ~benchmarks +rocm amdgpu_target=gfx90a - gasnet +rocm amdgpu_target=gfx90a - ginkgo +rocm amdgpu_target=gfx90a - heffte +rocm amdgpu_target=gfx90a @@ -214,8 +218,10 @@ spack: - hypre +rocm amdgpu_target=gfx90a - kokkos +rocm amdgpu_target=gfx90a - magma ~cuda +rocm amdgpu_target=gfx90a + - mfem +rocm amdgpu_target=gfx90a - papi +rocm amdgpu_target=gfx90a - petsc +rocm amdgpu_target=gfx90a + - raja ~openmp +rocm amdgpu_target=gfx90a - slate +rocm amdgpu_target=gfx90a - slepc +rocm amdgpu_target=gfx90a ^petsc +rocm amdgpu_target=gfx90a - strumpack ~slate +rocm amdgpu_target=gfx90a @@ -223,6 +229,12 @@ spack: - superlu-dist +rocm amdgpu_target=gfx90a - tasmanian ~openmp +rocm amdgpu_target=gfx90a - tau +mpi +rocm + - trilinos@13.4.0 +amesos +amesos2 +anasazi +aztec ~belos +boost +epetra +epetraext + +ifpack ~ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu + +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu ~stokhos +stratimikos + +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long + +rocm amdgpu_target=gfx90a + - umpire +rocm amdgpu_target=gfx90a - upcxx +rocm amdgpu_target=gfx90a - vtk-m ~openmp +rocm amdgpu_target=gfx90a @@ -237,7 +249,6 @@ spack: #- parsec +cuda cuda_arch=80 # parsec/mca/device/cuda/transfer.c:168: multiple definition of `parsec_CUDA_d2h_max_flows'; # ROCm failures - #- chai ~benchmarks +rocm amdgpu_target=gfx90a # umpire: Target "blt_hip" INTERFACE_INCLUDE_DIRECTORIES property contains path: "/tmp/root/spack-stage/spack-stage-umpire-2022.03.1-by6rldnpdowaaoqgxkeqejwyx5uxo2sv/spack-src/HIP_CLANG_INCLUDE_PATH-NOTFOUND/.." which is prefixed in the source directory. #- raja ~openmp +rocm amdgpu_target=gfx90a # cmake: Could NOT find ROCPRIM (missing: ROCPRIM_INCLUDE_DIRS) #- umpire +rocm amdgpu_target=gfx90a # Target "blt_hip" INTERFACE_INCLUDE_DIRECTORIES property contains path: "/tmp/root/spack-stage/spack-stage-umpire-2022.03.1-by6rldnpdowaaoqgxkeqejwyx5uxo2sv/spack-src/HIP_CLANG_INCLUDE_PATH-NOTFOUND/.." which is prefixed in the source directory. @@ -246,21 +257,29 @@ spack: gitlab-ci: script: + - uname -a || true + - grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true + - nproc + - curl -Lfs 'https://github.com/JuliaBinaryWrappers/GNUMake_jll.jl/releases/download/GNUMake-v4.3.0+1/GNUMake.v4.3.0.x86_64-linux-gnu.tar.gz' -o gmake.tar.gz + - printf 'fef1f59e56d2d11e6d700ba22d3444b6e583c663d6883fd0a4f63ab8bd280f0f gmake.tar.gz' | sha256sum --check --strict --quiet + - tar -xzf gmake.tar.gz -C /usr bin/make 2> /dev/null - . "./share/spack/setup-env.sh" - spack --version + - spack arch - cd ${SPACK_CONCRETE_ENV_DIR} - spack env activate --without-view . - spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'" - mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data - if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi - if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi - - spack -d ci rebuild --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) + - spack --color=always --backtrace ci rebuild --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) image: ecpe4s/ubuntu20.04-runner-x86_64:2022-10-01 broken-tests-packages: - gptune + match_behavior: first mappings: - match: - hipblas @@ -452,7 +471,7 @@ spack: - aws s3 sync --exclude "*" --include "*spec.json.sig*" /tmp ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache cdash: - build-group: New PR testing workflow + build-group: E4S url: https://cdash.spack.io project: Spack Testing site: Cloud Gitlab Infrastructure diff --git a/share/spack/gitlab/cloud_pipelines/stacks/ml-cpu/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/ml-cpu/spack.yaml index e6b49b9f41c..90f8428ca35 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/ml-cpu/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/ml-cpu/spack.yaml @@ -6,6 +6,7 @@ spack: unify: false config: + build_jobs: 32 concretizer: clingo install_tree: root: /home/software/spack @@ -16,13 +17,16 @@ spack: packages: all: compiler: [gcc@11.2.0] - target: [x86_64_v4] + target: [x86_64_v3] variants: ~cuda~rocm specs: # Horovod - py-horovod + # Hugging Face + - py-transformers + # JAX # https://github.com/google/jax/issues/12614 # - py-jax @@ -87,16 +91,24 @@ spack: gitlab-ci: script: + - uname -a || true + - grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true + - nproc + - curl -Lfs 'https://github.com/JuliaBinaryWrappers/GNUMake_jll.jl/releases/download/GNUMake-v4.3.0+1/GNUMake.v4.3.0.x86_64-linux-gnu.tar.gz' -o gmake.tar.gz + - printf 'fef1f59e56d2d11e6d700ba22d3444b6e583c663d6883fd0a4f63ab8bd280f0f gmake.tar.gz' | sha256sum --check --strict --quiet + - tar -xzf gmake.tar.gz -C /usr bin/make 2> /dev/null - . "./share/spack/setup-env.sh" - spack --version + - spack arch - cd ${SPACK_CONCRETE_ENV_DIR} - spack env activate --without-view . - spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'" - mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data - if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi - if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi - - spack -d ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) + - spack --color=always --backtrace ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) + match_behavior: first mappings: - match: - llvm diff --git a/share/spack/gitlab/cloud_pipelines/stacks/ml-cuda/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/ml-cuda/spack.yaml index eb37168665f..849b9ae08b9 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/ml-cuda/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/ml-cuda/spack.yaml @@ -6,6 +6,7 @@ spack: unify: false config: + build_jobs: 32 concretizer: clingo install_tree: root: /home/software/spack @@ -16,7 +17,7 @@ spack: packages: all: compiler: [gcc@11.2.0] - target: [x86_64_v4] + target: [x86_64_v3] variants: ~rocm+cuda cuda_arch=80 llvm: # https://github.com/spack/spack/issues/27999 @@ -26,6 +27,9 @@ spack: # Horovod - py-horovod + # Hugging Face + - py-transformers + # JAX # https://github.com/google/jax/issues/12614 # - py-jax @@ -90,16 +94,24 @@ spack: gitlab-ci: script: + - uname -a || true + - grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true + - nproc + - curl -Lfs 'https://github.com/JuliaBinaryWrappers/GNUMake_jll.jl/releases/download/GNUMake-v4.3.0+1/GNUMake.v4.3.0.x86_64-linux-gnu.tar.gz' -o gmake.tar.gz + - printf 'fef1f59e56d2d11e6d700ba22d3444b6e583c663d6883fd0a4f63ab8bd280f0f gmake.tar.gz' | sha256sum --check --strict --quiet + - tar -xzf gmake.tar.gz -C /usr bin/make 2> /dev/null - . "./share/spack/setup-env.sh" - spack --version + - spack arch - cd ${SPACK_CONCRETE_ENV_DIR} - spack env activate --without-view . - spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'" - mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data - if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi - if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi - - spack -d ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) + - spack --color=always --backtrace ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) + match_behavior: first mappings: - match: - llvm diff --git a/share/spack/gitlab/cloud_pipelines/stacks/ml-rocm/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/ml-rocm/spack.yaml index c437b170e4b..e49e805fc72 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/ml-rocm/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/ml-rocm/spack.yaml @@ -6,6 +6,7 @@ spack: unify: false config: + build_jobs: 32 concretizer: clingo install_tree: root: /home/software/spack @@ -16,7 +17,7 @@ spack: packages: all: compiler: [gcc@11.2.0] - target: [x86_64_v4] + target: [x86_64_v3] variants: ~cuda+rocm amdgpu_target=gfx90a gl: require: "osmesa" @@ -28,6 +29,9 @@ spack: # Horovod - py-horovod + # Hugging Face + - py-transformers + # JAX # https://github.com/google/jax/issues/12614 # - py-jax @@ -93,16 +97,24 @@ spack: gitlab-ci: script: + - uname -a || true + - grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true + - nproc + - curl -Lfs 'https://github.com/JuliaBinaryWrappers/GNUMake_jll.jl/releases/download/GNUMake-v4.3.0+1/GNUMake.v4.3.0.x86_64-linux-gnu.tar.gz' -o gmake.tar.gz + - printf 'fef1f59e56d2d11e6d700ba22d3444b6e583c663d6883fd0a4f63ab8bd280f0f gmake.tar.gz' | sha256sum --check --strict --quiet + - tar -xzf gmake.tar.gz -C /usr bin/make 2> /dev/null - . "./share/spack/setup-env.sh" - spack --version + - spack arch - cd ${SPACK_CONCRETE_ENV_DIR} - spack env activate --without-view . - spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'" - mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data - if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi - if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi - - spack -d ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) + - spack --color=always --backtrace ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) + match_behavior: first mappings: - match: - llvm diff --git a/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws-aarch64/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws-aarch64/spack.yaml index 4d18961d4b3..1c597d10df0 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws-aarch64/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws-aarch64/spack.yaml @@ -6,6 +6,7 @@ spack: unify: false config: + build_jobs: 32 concretizer: clingo install_tree: root: /home/software/spack @@ -56,17 +57,25 @@ spack: gitlab-ci: script: + - uname -a || true + - grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true + - nproc + - curl -Lfs 'https://github.com/JuliaBinaryWrappers/GNUMake_jll.jl/releases/download/GNUMake-v4.3.0+1/GNUMake.v4.3.0.aarch64-linux-gnu.tar.gz' -o gmake.tar.gz + - printf '2322c175fb092b426f9eb6c24ee22d94ffa6759c3d0c260b74d81abd8120122b gmake.tar.gz' | sha256sum --check --strict --quiet + - tar -xzf gmake.tar.gz -C /usr bin/make 2> /dev/null - . "./share/spack/setup-env.sh" - spack --version + - spack arch - cd ${SPACK_CONCRETE_ENV_DIR} - spack env activate --without-view . - spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'" - mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data - if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi - if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi - - spack -d ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) + - spack --color=always --backtrace ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) image: { "name": "ghcr.io/spack/e4s-amazonlinux-2:v2022-03-21", "entrypoint": [""] } + match_behavior: first mappings: - match: - llvm diff --git a/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws/spack.yaml index c6c895bd365..ce93b57303a 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws/spack.yaml @@ -6,6 +6,7 @@ spack: unify: false config: + build_jobs: 32 concretizer: clingo install_tree: root: /home/software/spack @@ -61,17 +62,25 @@ spack: gitlab-ci: script: + - uname -a || true + - grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true + - nproc + - curl -Lfs 'https://github.com/JuliaBinaryWrappers/GNUMake_jll.jl/releases/download/GNUMake-v4.3.0+1/GNUMake.v4.3.0.x86_64-linux-gnu.tar.gz' -o gmake.tar.gz + - printf 'fef1f59e56d2d11e6d700ba22d3444b6e583c663d6883fd0a4f63ab8bd280f0f gmake.tar.gz' | sha256sum --check --strict --quiet + - tar -xzf gmake.tar.gz -C /usr bin/make 2> /dev/null - . "./share/spack/setup-env.sh" - spack --version + - spack arch - cd ${SPACK_CONCRETE_ENV_DIR} - spack env activate --without-view . - spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'" - mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data - if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi - if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi - - spack -d ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) + - spack --color=always --backtrace ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) image: { "name": "ghcr.io/spack/e4s-amazonlinux-2:v2022-03-21", "entrypoint": [""] } + match_behavior: first mappings: - match: - llvm diff --git a/share/spack/gitlab/cloud_pipelines/stacks/radiuss/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/radiuss/spack.yaml index ebb15eb7ca0..fc7700be11f 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/radiuss/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/radiuss/spack.yaml @@ -6,6 +6,7 @@ spack: unify: false config: + build_jobs: 32 concretizer: clingo install_tree: root: /home/software/radiuss @@ -64,14 +65,22 @@ spack: gitlab-ci: image: { "name": "ghcr.io/spack/e4s-ubuntu-18.04:v2021-10-18", "entrypoint": [""] } script: + - uname -a || true + - grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true + - nproc + - curl -Lfs 'https://github.com/JuliaBinaryWrappers/GNUMake_jll.jl/releases/download/GNUMake-v4.3.0+1/GNUMake.v4.3.0.x86_64-linux-gnu.tar.gz' -o gmake.tar.gz + - printf 'fef1f59e56d2d11e6d700ba22d3444b6e583c663d6883fd0a4f63ab8bd280f0f gmake.tar.gz' | sha256sum --check --strict --quiet + - tar -xzf gmake.tar.gz -C /usr bin/make 2> /dev/null - . "./share/spack/setup-env.sh" - spack --version + - spack arch - cd ${SPACK_CONCRETE_ENV_DIR} - spack env activate --without-view . - spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'" - if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi - if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi - - spack -d ci rebuild + - spack --color=always --backtrace ci rebuild + match_behavior: first mappings: - match: - lbann diff --git a/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml index 504b4209d27..58ee39c1139 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml @@ -6,6 +6,7 @@ spack: unify: false config: + build_jobs: 32 install_tree: root: /home/software/spack padded_length: 512 @@ -18,6 +19,8 @@ spack: packages: all: target: [x86_64] + tbb: + require: 'intel-tbb' definitions: - gcc_system_packages: @@ -63,17 +66,25 @@ spack: gitlab-ci: script: + - uname -a || true + - grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true + - nproc + - curl -Lfs 'https://github.com/JuliaBinaryWrappers/GNUMake_jll.jl/releases/download/GNUMake-v4.3.0+1/GNUMake.v4.3.0.x86_64-linux-gnu.tar.gz' -o gmake.tar.gz + - printf 'fef1f59e56d2d11e6d700ba22d3444b6e583c663d6883fd0a4f63ab8bd280f0f gmake.tar.gz' | sha256sum --check --strict --quiet + - tar -xzf gmake.tar.gz -C /usr bin/make 2> /dev/null - . "./share/spack/setup-env.sh" - spack --version + - spack arch - spack compiler find - cd ${SPACK_CONCRETE_ENV_DIR} - spack env activate --without-view . - spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'" - if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi - if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi - - spack -d ci rebuild + - spack --color=always --backtrace ci rebuild image: { "name": "ghcr.io/spack/tutorial-ubuntu-18.04:v2021-11-02", "entrypoint": [""] } + match_behavior: first mappings: - match: - cmake diff --git a/share/spack/qa/run-shell-tests b/share/spack/qa/run-shell-tests index a300bee98bf..465c68000e5 100755 --- a/share/spack/qa/run-shell-tests +++ b/share/spack/qa/run-shell-tests @@ -56,9 +56,3 @@ fish "$QA_DIR/setup-env-test.fish" # run csh and tcsh tests csh "$QA_DIR/setup-env-test.csh" tcsh "$QA_DIR/setup-env-test.csh" - -# Delete the symlink going from ./lib/spack/docs/_spack_root back to -# the initial directory, since it causes ELOOP errors with codecov/actions@2 -if [[ "$COVERAGE" == "true" ]]; then - rm lib/spack/docs/_spack_root -fi diff --git a/share/spack/qa/run-unit-tests b/share/spack/qa/run-unit-tests index 2de6b6101d3..842a00caf2c 100755 --- a/share/spack/qa/run-unit-tests +++ b/share/spack/qa/run-unit-tests @@ -66,7 +66,7 @@ fi # where it seems that otherwise the configuration file might not be located by subprocesses # in some, not better specified, cases. if [[ "$UNIT_TEST_COVERAGE" == "true" ]]; then - $(which spack) unit-test -x --verbose --cov --cov-config=pyproject.toml + $(which spack) unit-test -x --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml else $(which spack) unit-test -x --verbose fi @@ -74,7 +74,3 @@ fi bash "$QA_DIR/test-env-cfg.sh" - -# Delete the symlink going from ./lib/spack/docs/_spack_root back to -# the initial directory, since it causes ELOOP errors with codecov/actions@2 -rm lib/spack/docs/_spack_root diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index 1543cffd92f..2461a0b7c77 100755 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -335,7 +335,7 @@ _spacktivate() { _spack() { if $list_options then - SPACK_COMPREPLY="-h --help -H --all-help --color -c --config -C --config-scope -d --debug --timestamp --pdb -e --env -D --env-dir -E --no-env --use-env-repo -k --insecure -l --enable-locks -L --disable-locks -m --mock -b --bootstrap -p --profile --sorted-profile --lines -v --verbose --stacktrace -V --version --print-shell-vars" + SPACK_COMPREPLY="-h --help -H --all-help --color -c --config -C --config-scope -d --debug --timestamp --pdb -e --env -D --env-dir -E --no-env --use-env-repo -k --insecure -l --enable-locks -L --disable-locks -m --mock -b --bootstrap -p --profile --sorted-profile --lines -v --verbose --stacktrace --backtrace -V --version --print-shell-vars" else SPACK_COMPREPLY="activate add arch audit blame bootstrap build-env buildcache cd change checksum ci clean clone commands compiler compilers concretize config containerize create deactivate debug dependencies dependents deprecate dev-build develop diff docs edit env extensions external fetch find gc gpg graph help info install license list load location log-parse maintainers make-installer mark mirror module patch pkg providers pydoc python reindex remove rm repo resource restage solve spec stage style tags test test-env tutorial undevelop uninstall unit-test unload url verify versions view" fi @@ -412,20 +412,34 @@ _spack_bootstrap() { then SPACK_COMPREPLY="-h --help" else - SPACK_COMPREPLY="status enable disable reset root list trust untrust add remove mirror" + SPACK_COMPREPLY="now status enable disable reset root list trust untrust add remove mirror" fi } +_spack_bootstrap_now() { + SPACK_COMPREPLY="-h --help" +} + _spack_bootstrap_status() { SPACK_COMPREPLY="-h --help --optional --dev" } _spack_bootstrap_enable() { - SPACK_COMPREPLY="-h --help --scope" + if $list_options + then + SPACK_COMPREPLY="-h --help --scope" + else + SPACK_COMPREPLY="" + fi } _spack_bootstrap_disable() { - SPACK_COMPREPLY="-h --help --scope" + if $list_options + then + SPACK_COMPREPLY="-h --help --scope" + else + SPACK_COMPREPLY="" + fi } _spack_bootstrap_reset() { @@ -504,7 +518,7 @@ _spack_buildcache() { then SPACK_COMPREPLY="-h --help" else - SPACK_COMPREPLY="create install list keys preview check download get-buildcache-name save-specfile copy sync update-index" + SPACK_COMPREPLY="create install list keys preview check download get-buildcache-name save-specfile sync update-index" fi } @@ -564,10 +578,6 @@ _spack_buildcache_save_specfile() { SPACK_COMPREPLY="-h --help --root-spec --root-specfile -s --specs --specfile-dir" } -_spack_buildcache_copy() { - SPACK_COMPREPLY="-h --help --base-dir --spec-file --destination-url" -} - _spack_buildcache_sync() { SPACK_COMPREPLY="-h --help --manifest-glob --src-directory --src-mirror-name --src-mirror-url --dest-directory --dest-mirror-name --dest-mirror-url" } @@ -1018,7 +1028,12 @@ _spack_env_revert() { } _spack_env_depfile() { - SPACK_COMPREPLY="-h --help --make-target-prefix --make-disable-jobserver -o --output -G --generator" + if $list_options + then + SPACK_COMPREPLY="-h --help --make-target-prefix --make-disable-jobserver --use-buildcache -o --output -G --generator" + else + _all_packages + fi } _spack_extensions() { @@ -1188,7 +1203,7 @@ _spack_info() { _spack_install() { if $list_options then - SPACK_COMPREPLY="-h --help --only -u --until -j --jobs --overwrite --fail-fast --keep-prefix --keep-stage --dont-restage --use-cache --no-cache --cache-only --use-buildcache --include-build-deps --no-check-signature --show-log-on-error --source -n --no-checksum --deprecated -v --verbose --fake --only-concrete --no-add -f --file --clean --dirty --test --log-format --log-file --help-cdash --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp -y --yes-to-all -U --fresh --reuse" + SPACK_COMPREPLY="-h --help --only -u --until -j --jobs --overwrite --fail-fast --keep-prefix --keep-stage --dont-restage --use-cache --no-cache --cache-only --use-buildcache --include-build-deps --no-check-signature --show-log-on-error --source -n --no-checksum --deprecated -v --verbose --fake --only-concrete --add --no-add -f --file --clean --dirty --test --log-format --log-file --help-cdash --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp -y --yes-to-all -U --fresh --reuse" else _all_packages fi @@ -1733,7 +1748,7 @@ _spack_test() { _spack_test_run() { if $list_options then - SPACK_COMPREPLY="-h --help --alias --fail-fast --fail-first --externals --keep-stage --log-format --log-file --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp --help-cdash --clean --dirty" + SPACK_COMPREPLY="-h --help --alias --fail-fast --fail-first --externals -x --explicit --keep-stage --log-format --log-file --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp --help-cdash --clean --dirty" else _installed_packages fi @@ -1809,7 +1824,7 @@ _spack_undevelop() { _spack_uninstall() { if $list_options then - SPACK_COMPREPLY="-h --help -f --force -R --dependents -y --yes-to-all -a --all --origin" + SPACK_COMPREPLY="-h --help -f --force --remove -R --dependents -y --yes-to-all -a --all --origin" else _installed_packages fi diff --git a/share/spack/templates/container/bootstrap-base.dockerfile b/share/spack/templates/container/bootstrap-base.dockerfile index 0674ddd5412..e002cce02e1 100644 --- a/share/spack/templates/container/bootstrap-base.dockerfile +++ b/share/spack/templates/container/bootstrap-base.dockerfile @@ -39,7 +39,7 @@ WORKDIR /root SHELL ["docker-shell"] # Creates the package cache -RUN spack spec hdf5+mpi +RUN spack bootstrap now && spack spec hdf5+mpi ENTRYPOINT ["/bin/bash", "/opt/spack/share/spack/docker/entrypoint.bash"] CMD ["interactive-shell"] diff --git a/share/spack/templates/container/leap-15.dockerfile b/share/spack/templates/container/leap-15.dockerfile index 38e4f1c0096..2427c377545 100644 --- a/share/spack/templates/container/leap-15.dockerfile +++ b/share/spack/templates/container/leap-15.dockerfile @@ -12,7 +12,6 @@ RUN zypper ref && \ git\ gzip\ patch\ - patchelf\ python3-base \ python3-boto3\ tar\ diff --git a/share/spack/templates/depfile/Makefile b/share/spack/templates/depfile/Makefile new file mode 100644 index 00000000000..dc0aeb79ea2 --- /dev/null +++ b/share/spack/templates/depfile/Makefile @@ -0,0 +1,36 @@ +SPACK ?= spack + +.PHONY: {{ all_target }} {{ clean_target }} + +{{ all_target }}: {{ env_target }} + +{{ env_target }}: {{ root_install_targets }} + @touch $@ + +{{ dirs_target }}: + @mkdir -p {{ install_target }} {{ install_deps_target }} + +# The spack install commands are of the form: +# spack -e my_env --no-add --only=package --only=concrete /hash +# This is an involved way of expressing that Spack should only install +# an individual concrete spec from the environment without deps. +{{ install_target }}/%: {{ install_deps_target }}/% | {{ dirs_target }} + {{ jobserver_support }}$(SPACK) -e '{{ environment }}' install $(SPACK_BUILDCACHE_FLAG) $(SPACK_INSTALL_FLAGS) --only-concrete --only=package --no-add /$(notdir $@) # $(SPEC) + @touch $@ + +{{ install_deps_target }}/%: | {{ dirs_target }} + @touch $@ + +# Set a human-readable SPEC variable for each target that has a hash +{% for (parent, name, build_cache, _) in adjacency_list -%} +{{ any_hash_target }}/{{ parent }}: SPEC = {{ name }} +{{ any_hash_target }}/{{ parent }}: SPACK_BUILDCACHE_FLAG = {{ build_cache }} +{% endfor %} + +# The Spack DAG expressed in targets: +{% for (parent, _, _, prereqs) in adjacency_list -%} +{{ install_deps_target }}/{{ parent }}: {{prereqs}} +{% endfor %} + +{{ clean_target }}: + rm -rf {{ env_target }} {{ cleanable_targets }} diff --git a/var/spack/repos/builder.test/packages/callbacks/package.py b/var/spack/repos/builder.test/packages/callbacks/package.py new file mode 100644 index 00000000000..ea6787d9b57 --- /dev/null +++ b/var/spack/repos/builder.test/packages/callbacks/package.py @@ -0,0 +1,45 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os + +import spack.build_systems.generic +from spack.package import * + + +class Callbacks(Package): + """Package used to verify that callbacks on phases work correctly, including conditions""" + + homepage = "http://www.example.com" + url = "http://www.example.com/a-1.0.tar.gz" + + version("2.0", "abcdef0123456789abcdef0123456789") + version("1.0", "0123456789abcdef0123456789abcdef") + + +class GenericBuilder(spack.build_systems.generic.GenericBuilder): + def install(self, pkg, spec, prefix): + os.environ["CALLBACKS_INSTALL_CALLED"] = "1" + os.environ["INSTALL_VALUE"] = "CALLBACKS" + mkdirp(prefix.bin) + + @run_before("install") + def before_install_1(self): + os.environ["BEFORE_INSTALL_1_CALLED"] = "1" + os.environ["TEST_VALUE"] = "1" + + @run_before("install") + def before_install_2(self): + os.environ["BEFORE_INSTALL_2_CALLED"] = "1" + os.environ["TEST_VALUE"] = "2" + + @run_after("install") + def after_install_1(self): + os.environ["AFTER_INSTALL_1_CALLED"] = "1" + os.environ["TEST_VALUE"] = "3" + + @run_after("install", when="@1.0") + def after_install_2(self): + os.environ["AFTER_INSTALL_2_CALLED"] = "1" + os.environ["TEST_VALUE"] = "4" diff --git a/var/spack/repos/builder.test/packages/custom-phases/package.py b/var/spack/repos/builder.test/packages/custom-phases/package.py new file mode 100644 index 00000000000..37b26e37d0d --- /dev/null +++ b/var/spack/repos/builder.test/packages/custom-phases/package.py @@ -0,0 +1,31 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os + +import spack.build_systems.generic +from spack.package import * + + +class CustomPhases(Package): + """Package used to verify that we can set custom phases on builders""" + + homepage = "http://www.example.com" + url = "http://www.example.com/a-1.0.tar.gz" + + version("2.0", "abcdef0123456789abcdef0123456789") + version("1.0", "0123456789abcdef0123456789abcdef") + + +class GenericBuilder(spack.build_systems.generic.GenericBuilder): + phases = ["configure", "install"] + + def configure(self, pkg, spec, prefix): + os.environ["CONFIGURE_CALLED"] = "1" + os.environ["LAST_PHASE"] = "CONFIGURE" + + def install(self, pkg, spec, prefix): + os.environ["INSTALL_CALLED"] = "1" + os.environ["LAST_PHASE"] = "INSTALL" + mkdirp(prefix.bin) diff --git a/var/spack/repos/builder.test/packages/gnuconfig/package.py b/var/spack/repos/builder.test/packages/gnuconfig/package.py new file mode 100644 index 00000000000..53f8a10705d --- /dev/null +++ b/var/spack/repos/builder.test/packages/gnuconfig/package.py @@ -0,0 +1,15 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack.package import * + + +class Gnuconfig(Package): + """This package is needed to allow mocking AutotoolsPackage objects""" + + homepage = "http://www.example.com" + url = "http://www.example.com/a-1.0.tar.gz" + + version("2.0", "abcdef0123456789abcdef0123456789") + version("1.0", "0123456789abcdef0123456789abcdef") diff --git a/var/spack/repos/builder.test/packages/inheritance/package.py b/var/spack/repos/builder.test/packages/inheritance/package.py new file mode 100644 index 00000000000..307d93ca802 --- /dev/null +++ b/var/spack/repos/builder.test/packages/inheritance/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os + +import spack.pkg.builder.test.callbacks +from spack.package import * + + +class Inheritance(spack.pkg.builder.test.callbacks.Callbacks): + """Package used to verify that inheritance among packages work as expected""" + + pass + + +class GenericBuilder(spack.pkg.builder.test.callbacks.GenericBuilder): + def install(self, pkg, spec, prefix): + super(GenericBuilder, self).install(pkg, spec, prefix) + os.environ["INHERITANCE_INSTALL_CALLED"] = "1" + os.environ["INSTALL_VALUE"] = "INHERITANCE" + + @run_before("install") + def derived_before_install(self): + os.environ["DERIVED_BEFORE_INSTALL_CALLED"] = "1" + os.environ["TEST_VALUE"] = "0" diff --git a/var/spack/repos/builder.test/packages/old-style-autotools/package.py b/var/spack/repos/builder.test/packages/old-style-autotools/package.py new file mode 100644 index 00000000000..56213d71585 --- /dev/null +++ b/var/spack/repos/builder.test/packages/old-style-autotools/package.py @@ -0,0 +1,50 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os + +from spack.package import * + + +class OldStyleAutotools(AutotoolsPackage): + """Package used to verify that old-style packages work correctly when executing the + installation procedure. + """ + + homepage = "http://www.example.com" + url = "http://www.example.com/a-1.0.tar.gz" + + version("2.0", "abcdef0123456789abcdef0123456789") + version("1.0", "0123456789abcdef0123456789abcdef") + + def configure(self, spec, prefix): + pass + + def build(self, spec, prefix): + pass + + def install(self, spec, prefix): + mkdirp(prefix.bin) + + def configure_args(self): + """This override a function in the builder and construct the result using a method + defined in this class and a super method defined in the builder. + """ + return [self.foo()] + super(OldStyleAutotools, self).configure_args() + + def foo(self): + return "--with-foo" + + @run_before("autoreconf") + def create_configure(self): + mkdirp(self.configure_directory) + touch(self.configure_abs_path) + + @run_after("autoreconf", when="@1.0") + def after_autoreconf_1(self): + os.environ["AFTER_AUTORECONF_1_CALLED"] = "1" + + @run_after("autoreconf", when="@2.0") + def after_autoreconf_2(self): + os.environ["AFTER_AUTORECONF_2_CALLED"] = "1" diff --git a/var/spack/repos/builder.test/packages/old-style-custom-phases/package.py b/var/spack/repos/builder.test/packages/old-style-custom-phases/package.py new file mode 100644 index 00000000000..afa5b52d709 --- /dev/null +++ b/var/spack/repos/builder.test/packages/old-style-custom-phases/package.py @@ -0,0 +1,34 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os + +from spack.package import * + + +class OldStyleCustomPhases(AutotoolsPackage): + """Package used to verify that old-style packages work correctly when defining custom + phases (though it's not recommended for packagers to do so). + """ + + homepage = "http://www.example.com" + url = "http://www.example.com/a-1.0.tar.gz" + + version("2.0", "abcdef0123456789abcdef0123456789") + version("1.0", "0123456789abcdef0123456789abcdef") + + phases = ["configure"] + + def configure(self, spec, prefix): + mkdirp(prefix.bin) + + @run_after("configure") + def after_configure(self): + os.environ["AFTER_CONFIGURE_CALLED"] = "1" + os.environ["TEST_VALUE"] = "0" + + @run_after("install") + def after_install(self): + os.environ["AFTER_INSTALL_CALLED"] = "1" + os.environ["TEST_VALUE"] = "1" diff --git a/var/spack/repos/builder.test/packages/old-style-derived/package.py b/var/spack/repos/builder.test/packages/old-style-derived/package.py new file mode 100644 index 00000000000..a7dd0262179 --- /dev/null +++ b/var/spack/repos/builder.test/packages/old-style-derived/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +import spack.pkg.builder.test.old_style_autotools +from spack.package import * + + +class OldStyleDerived(spack.pkg.builder.test.old_style_autotools.OldStyleAutotools): + """Package used to verify that old-style packages work correctly when executing the + installation procedure. + """ + + homepage = "http://www.example.com" + url = "http://www.example.com/a-1.0.tar.gz" + + version("2.0", "abcdef0123456789abcdef0123456789") + version("1.0", "0123456789abcdef0123456789abcdef") + + def configure_args(self): + return ["--with-bar"] + super(OldStyleDerived, self).configure_args() diff --git a/var/spack/repos/builder.test/repo.yaml b/var/spack/repos/builder.test/repo.yaml new file mode 100644 index 00000000000..a9031afe214 --- /dev/null +++ b/var/spack/repos/builder.test/repo.yaml @@ -0,0 +1,2 @@ +repo: + namespace: builder.test diff --git a/var/spack/repos/builtin.mock/packages/a/package.py b/var/spack/repos/builtin.mock/packages/a/package.py index b556fbf06f6..5dbcd1f9c27 100644 --- a/var/spack/repos/builtin.mock/packages/a/package.py +++ b/var/spack/repos/builtin.mock/packages/a/package.py @@ -2,7 +2,7 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - +import spack.build_systems.autotools from spack.package import * @@ -32,21 +32,23 @@ class A(AutotoolsPackage): parallel = False + +class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder): def with_or_without_fee(self, activated): if not activated: return "--no-fee" return "--fee-all-the-time" - def autoreconf(self, spec, prefix): + def autoreconf(self, pkg, spec, prefix): pass - def configure(self, spec, prefix): + def configure(self, pkg, spec, prefix): pass - def build(self, spec, prefix): + def build(self, pkg, spec, prefix): pass - def install(self, spec, prefix): + def install(self, pkg, spec, prefix): # sanity_check_prefix requires something in the install directory # Test requires overriding the one provided by `AutotoolsPackage` mkdirp(prefix.bin) diff --git a/var/spack/repos/builtin.mock/packages/attributes-foo/package.py b/var/spack/repos/builtin.mock/packages/attributes-foo/package.py index 9010c52958e..83b41b98ac3 100644 --- a/var/spack/repos/builtin.mock/packages/attributes-foo/package.py +++ b/var/spack/repos/builtin.mock/packages/attributes-foo/package.py @@ -8,7 +8,6 @@ class AttributesFoo(BundlePackage): - phases = ["install"] version("1.0") provides("bar") diff --git a/var/spack/repos/builtin.mock/packages/bowtie/package.py b/var/spack/repos/builtin.mock/packages/bowtie/package.py index 24d7312b749..9bef2111724 100644 --- a/var/spack/repos/builtin.mock/packages/bowtie/package.py +++ b/var/spack/repos/builtin.mock/packages/bowtie/package.py @@ -11,8 +11,10 @@ class Bowtie(Package): homepage = "http://www.example.org" url = "http://bowtie-1.2.2.tar.bz2" + version("1.4.0", "1c837ecd990bb022d07e7aab32b09847") version("1.3.0", "1c837ecd990bb022d07e7aab32b09847") version("1.2.2", "1c837ecd990bb022d07e7aab32b09847") version("1.2.0", "1c837ecd990bb022d07e7aab32b09847") conflicts("%gcc@:4.5.0", when="@1.2.2") + conflicts("%gcc@:10.2.1", when="@:1.3.0") diff --git a/var/spack/repos/builtin.mock/packages/canfail/package.py b/var/spack/repos/builtin.mock/packages/canfail/package.py index eb35fdec562..75bb66df252 100644 --- a/var/spack/repos/builtin.mock/packages/canfail/package.py +++ b/var/spack/repos/builtin.mock/packages/canfail/package.py @@ -2,6 +2,7 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os from spack.package import * @@ -14,7 +15,16 @@ class Canfail(Package): version("1.0", "0123456789abcdef0123456789abcdef") - succeed = False + def set_install_succeed(self): + os.environ["CANFAIL_SUCCEED"] = "1" + + def set_install_fail(self): + os.environ.pop("CANFAIL_SUCCEED", None) + + @property + def succeed(self): + result = True if "CANFAIL_SUCCEED" in os.environ else False + return result def install(self, spec, prefix): if not self.succeed: diff --git a/var/spack/repos/builtin.mock/packages/chain-a/package.py b/var/spack/repos/builtin.mock/packages/chain-a/package.py new file mode 100644 index 00000000000..6dc7dc2c90e --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/chain-a/package.py @@ -0,0 +1,34 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class ChainA(Package): + """ + Part of a collection of mock packages used for testing depth-first vs + breadth-first traversal. The DAG they form: + a --> b --> c --> d # a chain + a --> c # "skip" connection + a --> d # "skip" connection + Spack's edge order is based on the child package name. + In depth-first traversal we get a tree that looks like a chain: + a + b + c + d + In breadth-first we get the tree: + a + b + c + d + """ + + homepage = "https://example.com" + has_code = False + version("1.0") + depends_on("chain-b") + depends_on("chain-c") + depends_on("chain-d") diff --git a/var/spack/repos/builtin.mock/packages/chain-b/package.py b/var/spack/repos/builtin.mock/packages/chain-b/package.py new file mode 100644 index 00000000000..ede29c0e0fd --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/chain-b/package.py @@ -0,0 +1,32 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class ChainB(Package): + """ + Part of a collection of mock packages used for testing depth-first vs + breadth-first traversal. The DAG they form: + a --> b --> c --> d # a chain + a --> c # "skip" connection + a --> d # "skip" connection + Spack's edge order is based on the child package name. + In depth-first traversal we get a tree that looks like a chain: + a + b + c + d + In breadth-first we get the tree: + a + b + c + d + """ + + homepage = "https://example.com" + has_code = False + version("1.0") + depends_on("chain-c") diff --git a/var/spack/repos/builtin.mock/packages/chain-c/package.py b/var/spack/repos/builtin.mock/packages/chain-c/package.py new file mode 100644 index 00000000000..e9d919f0ba9 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/chain-c/package.py @@ -0,0 +1,32 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class ChainC(Package): + """ + Part of a collection of mock packages used for testing depth-first vs + breadth-first traversal. The DAG they form: + a --> b --> c --> d # a chain + a --> c # "skip" connection + a --> d # "skip" connection + Spack's edge order is based on the child package name. + In depth-first traversal we get a tree that looks like a chain: + a + b + c + d + In breadth-first we get the tree: + a + b + c + d + """ + + homepage = "https://example.com" + has_code = False + version("1.0") + depends_on("chain-d") diff --git a/var/spack/repos/builtin.mock/packages/chain-d/package.py b/var/spack/repos/builtin.mock/packages/chain-d/package.py new file mode 100644 index 00000000000..f2e04089ee5 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/chain-d/package.py @@ -0,0 +1,31 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class ChainD(Package): + """ + Part of a collection of mock packages used for testing depth-first vs + breadth-first traversal. The DAG they form: + a --> b --> c --> d # a chain + a --> c # "skip" connection + a --> d # "skip" connection + Spack's edge order is based on the child package name. + In depth-first traversal we get a tree that looks like a chain: + a + b + c + d + In breadth-first we get the tree: + a + b + c + d + """ + + homepage = "https://example.com" + has_code = False + version("1.0") diff --git a/var/spack/repos/builtin.mock/packages/cmake-client/package.py b/var/spack/repos/builtin.mock/packages/cmake-client/package.py index 54842fd2d8f..ed3998e550b 100644 --- a/var/spack/repos/builtin.mock/packages/cmake-client/package.py +++ b/var/spack/repos/builtin.mock/packages/cmake-client/package.py @@ -15,7 +15,7 @@ def check(condition, msg): class CmakeClient(CMakePackage): - """A dumy package that uses cmake.""" + """A dummy package that uses cmake.""" homepage = "https://www.example.com" url = "https://www.example.com/cmake-client-1.0.tar.gz" @@ -38,14 +38,16 @@ class CmakeClient(CMakePackage): did_something = False @run_after("cmake") - @run_before("cmake", "build", "install") + @run_before("cmake") + @run_before("build") + @run_before("install") def increment(self): - self.callback_counter += 1 + CmakeClient.callback_counter += 1 @run_after("cmake") @on_package_attributes(run_this=True, check_this_is_none=None) def flip(self): - self.flipped = True + CmakeClient.flipped = True @run_after("cmake") @on_package_attributes(does_not_exist=None) diff --git a/var/spack/repos/builtin.mock/packages/cmake/package.py b/var/spack/repos/builtin.mock/packages/cmake/package.py index 30c3647df21..dac2c19875b 100644 --- a/var/spack/repos/builtin.mock/packages/cmake/package.py +++ b/var/spack/repos/builtin.mock/packages/cmake/package.py @@ -18,11 +18,16 @@ def check(condition, msg): class Cmake(Package): - """A dumy package for the cmake build system.""" + """A dummy package for the cmake build system.""" homepage = "https://www.cmake.org" url = "https://cmake.org/files/v3.4/cmake-3.4.3.tar.gz" + version( + "3.23.1", + "4cb3ff35b2472aae70f542116d616e63", + url="https://cmake.org/files/v3.4/cmake-3.4.3.tar.gz", + ) version( "3.4.3", "4cb3ff35b2472aae70f542116d616e63", diff --git a/var/spack/repos/builtin.mock/packages/dev-build-test-dependent/package.py b/var/spack/repos/builtin.mock/packages/dev-build-test-dependent/package.py index 44374cd1a75..a5ac04d2825 100644 --- a/var/spack/repos/builtin.mock/packages/dev-build-test-dependent/package.py +++ b/var/spack/repos/builtin.mock/packages/dev-build-test-dependent/package.py @@ -7,14 +7,12 @@ from spack.package import * -class DevBuildTestDependent(Package): +class DevBuildTestDependent(MakefilePackage): homepage = "example.com" url = "fake.com" version("0.0.0", sha256="0123456789abcdef0123456789abcdef") - phases = ["edit", "install"] - filename = "dev-build-test-file.txt" original_string = "This file should be edited" replacement_string = "This file has been edited" @@ -28,5 +26,8 @@ def edit(self, spec, prefix): f.truncate() f.write(self.replacement_string) + def build(self, spec, prefix): + pass + def install(self, spec, prefix): install(self.filename, prefix) diff --git a/var/spack/repos/builtin.mock/packages/dev-build-test-install-phases/package.py b/var/spack/repos/builtin.mock/packages/dev-build-test-install-phases/package.py index fa0f6b794e4..916156c1f59 100644 --- a/var/spack/repos/builtin.mock/packages/dev-build-test-install-phases/package.py +++ b/var/spack/repos/builtin.mock/packages/dev-build-test-install-phases/package.py @@ -29,4 +29,5 @@ def three(self, spec, prefix): print("Three locomoco") def install(self, spec, prefix): + mkdirp(prefix.bin) print("install") diff --git a/var/spack/repos/builtin.mock/packages/dev-build-test-install/package.py b/var/spack/repos/builtin.mock/packages/dev-build-test-install/package.py index 185fe5552c9..ba0b1400a34 100644 --- a/var/spack/repos/builtin.mock/packages/dev-build-test-install/package.py +++ b/var/spack/repos/builtin.mock/packages/dev-build-test-install/package.py @@ -2,19 +2,15 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - - from spack.package import * -class DevBuildTestInstall(Package): +class DevBuildTestInstall(MakefilePackage): homepage = "example.com" url = "fake.com" version("0.0.0", sha256="0123456789abcdef0123456789abcdef") - phases = ["edit", "install"] - filename = "dev-build-test-file.txt" original_string = "This file should be edited" replacement_string = "This file has been edited" @@ -26,5 +22,8 @@ def edit(self, spec, prefix): f.truncate() f.write(self.replacement_string) + def build(self, spec, prefix): + pass + def install(self, spec, prefix): install(self.filename, prefix) diff --git a/var/spack/repos/builtin.mock/packages/fail-test-audit/package.py b/var/spack/repos/builtin.mock/packages/fail-test-audit/package.py new file mode 100644 index 00000000000..1e290724d1e --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/fail-test-audit/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack.package import * + + +class FailTestAudit(MakefilePackage): + """Simple package with one optional dependency""" + + homepage = "http://www.example.com" + url = "http://www.example.com/a-1.0.tar.gz" + + version("1.0", "0123456789abcdef0123456789abcdef") + version("2.0", "abcdef0123456789abcdef0123456789") + + build_time_test_callbacks = ["test"] + + def test(self): + print("test: test-install-callbacks") + print("PASSED") diff --git a/var/spack/repos/builtin.mock/packages/garply/package.py b/var/spack/repos/builtin.mock/packages/garply/package.py index 819da452861..7fc3fd484a3 100644 --- a/var/spack/repos/builtin.mock/packages/garply/package.py +++ b/var/spack/repos/builtin.mock/packages/garply/package.py @@ -83,7 +83,12 @@ class Garply f.write(garply_cc % prefix.config) with open("%s/garply/garplinator.cc" % self.stage.source_path, "w") as f: f.write(garplinator_cc) - gpp = which("/usr/bin/g++") + gpp = which( + "g++", + path=":".join( + [s for s in os.environ["PATH"].split(os.pathsep) if "lib/spack/env" not in s] + ), + ) if sys.platform == "darwin": gpp = which("/usr/bin/clang++") gpp( diff --git a/var/spack/repos/builtin.mock/packages/hypre/package.py b/var/spack/repos/builtin.mock/packages/hypre/package.py index fe077f067da..7182cf628a8 100644 --- a/var/spack/repos/builtin.mock/packages/hypre/package.py +++ b/var/spack/repos/builtin.mock/packages/hypre/package.py @@ -2,6 +2,7 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import sys from spack.package import * @@ -17,3 +18,9 @@ class Hypre(Package): depends_on("lapack") depends_on("blas") + + variant( + "shared", + default=(sys.platform != "darwin"), + description="Build shared library (disables static library)", + ) diff --git a/var/spack/repos/builtin.mock/packages/impossible-concretization/package.py b/var/spack/repos/builtin.mock/packages/impossible-concretization/package.py index c0f2ce8641c..b47e97f3622 100644 --- a/var/spack/repos/builtin.mock/packages/impossible-concretization/package.py +++ b/var/spack/repos/builtin.mock/packages/impossible-concretization/package.py @@ -16,3 +16,4 @@ class ImpossibleConcretization(Package): version(1.0, "0123456789abcdef0123456789abcdef") conflicts("target=x86_64:") + conflicts("target=aarch64:") diff --git a/var/spack/repos/builtin.mock/packages/intel-oneapi-compilers/package.py b/var/spack/repos/builtin.mock/packages/intel-oneapi-compilers/package.py new file mode 100644 index 00000000000..3ab49f1e1c9 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/intel-oneapi-compilers/package.py @@ -0,0 +1,28 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class IntelOneapiCompilers(Package): + """Simple compiler package.""" + + homepage = "http://www.example.com" + url = "http://www.example.com/oneapi-1.0.tar.gz" + + version("1.0", "0123456789abcdef0123456789abcdef") + version("2.0", "abcdef0123456789abcdef0123456789") + version("3.0", "def0123456789abcdef0123456789abc") + + @property + def compiler_search_prefix(self): + return self.prefix.foo.bar.baz.bin + + def install(self, spec, prefix): + # Create the minimal compiler that will fool `spack compiler find` + mkdirp(self.compiler_search_prefix) + with open(self.compiler_search_prefix.icx, "w") as f: + f.write('#!/bin/bash\necho "oneAPI DPC++ Compiler %s"' % str(spec.version)) + set_executable(self.compiler_search_prefix.icx) diff --git a/var/spack/repos/builtin.mock/packages/libtool-deletion/package.py b/var/spack/repos/builtin.mock/packages/libtool-deletion/package.py index a169a78d2e5..8ab87d20644 100644 --- a/var/spack/repos/builtin.mock/packages/libtool-deletion/package.py +++ b/var/spack/repos/builtin.mock/packages/libtool-deletion/package.py @@ -4,6 +4,7 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os.path +import spack.build_systems.autotools from spack.package import * @@ -19,17 +20,21 @@ class LibtoolDeletion(AutotoolsPackage): def do_stage(self): mkdirp(self.stage.source_path) - def autoreconf(self, spec, prefix): + +class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder): + install_libtool_archives = False + + def autoreconf(self, pkg, spec, prefix): mkdirp(os.path.dirname(self.configure_abs_path)) touch(self.configure_abs_path) - def configure(self, spec, prefix): + def configure(self, pkg, spec, prefix): pass - def build(self, spec, prefix): + def build(self, pkg, spec, prefix): pass - def install(self, spec, prefix): + def install(self, pkg, spec, prefix): mkdirp(os.path.dirname(self.libtool_archive_file)) touch(self.libtool_archive_file) diff --git a/var/spack/repos/builtin.mock/packages/libtool-installation/package.py b/var/spack/repos/builtin.mock/packages/libtool-installation/package.py new file mode 100644 index 00000000000..72883b90423 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/libtool-installation/package.py @@ -0,0 +1,15 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack.package import * +from spack.pkg.builtin.mock.libtool_deletion import AutotoolsBuilder as BuilderBase +from spack.pkg.builtin.mock.libtool_deletion import LibtoolDeletion + + +class LibtoolInstallation(LibtoolDeletion, AutotoolsPackage): + """Mock AutotoolsPackage to check proper installation of libtool archives.""" + + +class AutotoolsBuilder(BuilderBase): + install_libtool_archives = True diff --git a/var/spack/repos/builtin.mock/packages/needs-text-relocation/package.py b/var/spack/repos/builtin.mock/packages/needs-text-relocation/package.py new file mode 100644 index 00000000000..9b2654dd834 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/needs-text-relocation/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack.package import * + + +class NeedsTextRelocation(Package): + """A dumy package that encodes its prefix.""" + + homepage = "https://www.cmake.org" + url = "https://cmake.org/files/v3.4/cmake-3.4.3.tar.gz" + + version("0.0.0", "12345678qwertyuiasdfghjkzxcvbnm0") + + def install(self, spec, prefix): + mkdirp(prefix.bin) + + exe = join_path(prefix.bin, "exe") + with open(exe, "w") as f: + f.write(prefix) + set_executable(exe) + + otherexe = join_path(prefix.bin, "otherexe") + with open(otherexe, "w") as f: + f.write("Lorem Ipsum") + set_executable(otherexe) diff --git a/var/spack/repos/builtin.mock/packages/nosource-install/package.py b/var/spack/repos/builtin.mock/packages/nosource-install/package.py index a1ca7244489..5166480dd4f 100644 --- a/var/spack/repos/builtin.mock/packages/nosource-install/package.py +++ b/var/spack/repos/builtin.mock/packages/nosource-install/package.py @@ -16,9 +16,6 @@ class NosourceInstall(BundlePackage): depends_on("dependency-install") - # The install phase must be specified. - phases = ["install"] - # The install method must also be present. def install(self, spec, prefix): touch(join_path(self.prefix, "install.txt")) diff --git a/var/spack/repos/builtin.mock/packages/openblas/package.py b/var/spack/repos/builtin.mock/packages/openblas/package.py index 5b8637aca2c..8c2f26de579 100644 --- a/var/spack/repos/builtin.mock/packages/openblas/package.py +++ b/var/spack/repos/builtin.mock/packages/openblas/package.py @@ -12,11 +12,17 @@ class Openblas(Package): homepage = "http://www.openblas.net" url = "http://github.com/xianyi/OpenBLAS/archive/v0.2.15.tar.gz" + version("0.2.16", "b1190f3d3471685f17cfd1ec1d252ac9") version("0.2.15", "b1190f3d3471685f17cfd1ec1d252ac9") version("0.2.14", "b1190f3d3471685f17cfd1ec1d252ac9") version("0.2.13", "b1190f3d3471685f17cfd1ec1d252ac9") + variant("shared", default=True, description="Build shared libraries") + # See #20019 for this conflict conflicts("%gcc@:4.4", when="@0.2.14:") + # To ensure test works with newer gcc versions + conflicts("%gcc@:10.1", when="@0.2.16:") + provides("blas") diff --git a/var/spack/repos/builtin.mock/packages/quux/package.py b/var/spack/repos/builtin.mock/packages/quux/package.py index 693ef07ba44..de4f8b93279 100644 --- a/var/spack/repos/builtin.mock/packages/quux/package.py +++ b/var/spack/repos/builtin.mock/packages/quux/package.py @@ -97,7 +97,12 @@ class Quux f.write(quux_h) with open("%s/quux/quuxifier.cc" % self.stage.source_path, "w") as f: f.write(quuxifier_cc) - gpp = which("/usr/bin/g++") + gpp = which( + "g++", + path=":".join( + [s for s in os.environ["PATH"].split(os.pathsep) if "lib/spack/env" not in s] + ), + ) if sys.platform == "darwin": gpp = which("/usr/bin/clang++") gpp( diff --git a/var/spack/repos/builtin.mock/packages/test-build-callbacks/package.py b/var/spack/repos/builtin.mock/packages/test-build-callbacks/package.py index 4b4b74e9b3f..d45f0d295bc 100644 --- a/var/spack/repos/builtin.mock/packages/test-build-callbacks/package.py +++ b/var/spack/repos/builtin.mock/packages/test-build-callbacks/package.py @@ -2,9 +2,9 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - +import spack.build_systems._checks as checks +import spack.build_systems.generic from spack.package import * -from spack.package_base import run_after class TestBuildCallbacks(Package): @@ -15,17 +15,16 @@ class TestBuildCallbacks(Package): version("1.0", "0123456789abcdef0123456789abcdef") - phases = ["build", "install"] - # Include undefined method (runtime failure) and 'test' (audit failure) - build_time_test_callbacks = ["undefined-build-test", "test"] - run_after("build")(Package._run_default_build_time_test_callbacks) - def build(self, spec, prefix): +class GenericBuilder(spack.build_systems.generic.GenericBuilder): + phases = ["build", "install"] + + # Include undefined method (runtime failure) + build_time_test_callbacks = ["undefined-build-test"] + run_after("build")(checks.execute_build_time_tests) + + def build(self, pkg, spec, prefix): pass - def install(self, spec, prefix): + def install(self, pkg, spec, prefix): mkdirp(prefix.bin) - - def test(self): - print("test: running test-build-callbacks") - print("PASSED") diff --git a/var/spack/repos/builtin.mock/packages/test-install-callbacks/package.py b/var/spack/repos/builtin.mock/packages/test-install-callbacks/package.py index 27a31227c3d..0d348c0d679 100644 --- a/var/spack/repos/builtin.mock/packages/test-install-callbacks/package.py +++ b/var/spack/repos/builtin.mock/packages/test-install-callbacks/package.py @@ -2,9 +2,9 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - +import spack.build_systems._checks as checks +import spack.build_systems.generic from spack.package import * -from spack.package_base import run_after class TestInstallCallbacks(Package): @@ -15,13 +15,11 @@ class TestInstallCallbacks(Package): version("1.0", "0123456789abcdef0123456789abcdef") + +class GenericBuilder(spack.build_systems.generic.GenericBuilder): # Include an undefined callback method - install_time_test_callbacks = ["undefined-install-test", "test"] - run_after("install")(Package._run_default_install_time_test_callbacks) + install_time_test_callbacks = ["undefined-install-test"] + run_after("install")(checks.execute_install_time_tests) - def install(self, spec, prefix): + def install(self, pkg, spec, prefix): mkdirp(prefix.bin) - - def test(self): - print("test: test-install-callbacks") - print("PASSED") diff --git a/var/spack/repos/builtin.mock/packages/transitive-conditional-virtual-dependency/package.py b/var/spack/repos/builtin.mock/packages/transitive-conditional-virtual-dependency/package.py index a7c46588103..9078d63b4d8 100644 --- a/var/spack/repos/builtin.mock/packages/transitive-conditional-virtual-dependency/package.py +++ b/var/spack/repos/builtin.mock/packages/transitive-conditional-virtual-dependency/package.py @@ -5,12 +5,10 @@ from spack.package import * -class TransitiveConditionalVirtualDependency(Package): +class TransitiveConditionalVirtualDependency(BundlePackage): """Depends on a package with a conditional virtual dependency.""" homepage = "https://dev.null" - has_code = False - phases = [] version("1.0") depends_on("conditional-virtual-dependency") diff --git a/var/spack/repos/builtin.mock/packages/trivial-smoke-test/package.py b/var/spack/repos/builtin.mock/packages/trivial-smoke-test/package.py index ec80640d216..35983a18f40 100644 --- a/var/spack/repos/builtin.mock/packages/trivial-smoke-test/package.py +++ b/var/spack/repos/builtin.mock/packages/trivial-smoke-test/package.py @@ -16,6 +16,9 @@ class TrivialSmokeTest(Package): test_source_filename = "cached_file.in" + def install(self, spec, prefix): + pass + @run_before("install") def create_extra_test_source(self): mkdirp(self.install_test_root) diff --git a/var/spack/repos/builtin.mock/packages/view-not-ignored/package.py b/var/spack/repos/builtin.mock/packages/view-not-ignored/package.py new file mode 100644 index 00000000000..3342de98999 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/view-not-ignored/package.py @@ -0,0 +1,46 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import os.path + +from spack.package import * + + +class ViewNotIgnored(Package): + """Install files that should not be ignored by spack.""" + + homepage = "http://www.spack.org" + url = "http://www.spack.org/downloads/aml-1.0.tar.gz" + has_code = False + + version("0.1.0", sha256="cc89a8768693f1f11539378b21cdca9f0ce3fc5cb564f9b3e4154a051dcea69b") + + install_test_files = [ + "foo.spack", + ".spack.bar", + "aspack", + "bin/foo.spack", + "bin/.spack.bar", + "bin/aspack", + ] + + def install(self, spec, prefix): + for test_file in self.install_test_files: + path = os.path.join(prefix, test_file) + mkdirp(os.path.dirname(path)) + with open(path, "w") as f: + f.write(test_file) + + @classmethod + def assert_installed(cls, prefix): + for test_file in cls.install_test_files: + path = os.path.join(prefix, test_file) + assert os.path.exists(path), "Missing installed file: {}".format(path) + + @classmethod + def assert_not_installed(cls, prefix): + for test_file in cls.install_test_files: + path = os.path.join(prefix, test_file) + assert not os.path.exists(path), "File was not uninstalled: {}".format(path) diff --git a/var/spack/repos/builtin/packages/acts-dd4hep/package.py b/var/spack/repos/builtin/packages/acts-dd4hep/package.py index 3cc0431ce17..7b7f4405d8f 100644 --- a/var/spack/repos/builtin/packages/acts-dd4hep/package.py +++ b/var/spack/repos/builtin/packages/acts-dd4hep/package.py @@ -12,8 +12,9 @@ class ActsDd4hep(CMakePackage): homepage = "https://github.com/acts-project/acts-dd4hep" url = "https://github.com/acts-project/acts-dd4hep/archive/refs/tags/v1.0.0.tar.gz" - maintainers = ["HadrianG2", "wdconinc"] + maintainers = ["HadrienG2", "wdconinc"] + version("1.0.1", sha256="e40f34ebc30b3c33a6802c9d94136e65072d8dcee0b7db57a645f08a64ea5334") version("1.0.0", sha256="991f996944c88efa837880f919239e50d12c5c9361e220bc9422438dd608308c") depends_on("dd4hep@1.11: +dddetectors") diff --git a/var/spack/repos/builtin/packages/acts/package.py b/var/spack/repos/builtin/packages/acts/package.py index bb63510c3aa..af07f730708 100644 --- a/var/spack/repos/builtin/packages/acts/package.py +++ b/var/spack/repos/builtin/packages/acts/package.py @@ -39,7 +39,13 @@ class Acts(CMakePackage, CudaPackage): # Supported Acts versions version("main", branch="main") version("master", branch="main", deprecated=True) # For compatibility + version("20.3.0", commit="b1859b322744cb033328fd57d9e74fb5326aa56b", submodules=True) + version("20.2.0", commit="7750c1d24714314e8de716b92ebcd4a92cc4e303", submodules=True) + version("20.1.0", commit="be36226fb1be88d7be7c9b17a1c1f6e76ff0e006", submodules=True) version("20.0.0", commit="3740e6cdbfb1f75d8e481686acdfa5b16d3c41a3", submodules=True) + version("19.10.0", commit="2d07f60eb2280a46af1085600ec8327679bbb630", submodules=True) + version("19.9.0", commit="b655e18929ae0ccb6926d8e217b1b3fc02978d35", submodules=True) + version("19.8.0", commit="7582072dbaa70802264f20b392de4313afd25667", submodules=True) version("19.7.0", commit="03cf7a3ae74b632b3f89416dc27cc993c9ae4628", submodules=True) version("19.6.0", commit="333082914e6a51b381abc1cf52856829e3eb7890", submodules=True) version("19.5.0", commit="bf9f0270eadd8e78d283557b7c9070b80dece4a7", submodules=True) @@ -228,10 +234,23 @@ class Acts(CMakePackage, CudaPackage): description="Build python bindings for the examples", when="@14: +examples", ) + variant( + "svg", + default=False, + description="Build ActSVG display plugin", + when="@20.1:", + ) + variant( + "tbb", + default=True, + description="Build the examples with Threading Building Blocks library", + when="@19.8:19,20.1: +examples", + ) variant("analysis", default=False, description="Build analysis applications in the examples") # Build dependencies depends_on("acts-dd4hep", when="@19 +dd4hep") + depends_on("actsvg", when="@20.1: +svg") depends_on("autodiff @0.6:", when="@17: +autodiff") depends_on("autodiff @0.5.11:0.5.99", when="@1.2:16 +autodiff") depends_on("boost @1.62:1.69 +program_options +test", when="@:0.10.3") @@ -249,7 +268,7 @@ class Acts(CMakePackage, CudaPackage): depends_on("gperftools", when="+profilemem") depends_on("hepmc3 @3.2.1:", when="+hepmc3") depends_on("heppdt", when="+hepmc3 @:4.0") - depends_on("intel-tbb @2020.1:", when="+examples") + depends_on("intel-tbb @2020.1:", when="+examples +tbb") depends_on("nlohmann-json @3.9.1:", when="@0.14: +json") depends_on("pythia8", when="+pythia8") depends_on("python", when="+python") @@ -275,9 +294,9 @@ def enable_cmake_variant(cmake_label, spack_variant): enabled = spec.satisfies(spack_variant) return "-DACTS_ENABLE_{0}={1}".format(cmake_label, enabled) - def example_cmake_variant(cmake_label, spack_variant): + def example_cmake_variant(cmake_label, spack_variant, type="BUILD"): enabled = spec.satisfies("+examples +" + spack_variant) - return "-DACTS_BUILD_EXAMPLES_{0}={1}".format(cmake_label, enabled) + return "-DACTS_{0}_EXAMPLES_{1}={2}".format(type, cmake_label, enabled) def plugin_label(plugin_name): if spec.satisfies("@0.33:"): @@ -305,6 +324,7 @@ def plugin_cmake_variant(plugin_name, spack_variant): plugin_cmake_variant("CUDA", "cuda"), plugin_cmake_variant("DD4HEP", "dd4hep"), example_cmake_variant("DD4HEP", "dd4hep"), + plugin_cmake_variant("DIGITIZATION", "digitization"), example_cmake_variant("EDM4HEP", "edm4hep"), cmake_variant("EXAMPLES", "examples"), cmake_variant("FATRAS", "fatras"), @@ -321,8 +341,10 @@ def plugin_cmake_variant(plugin_name, spack_variant): enable_cmake_variant("MEMORY_PROFILING", "profilemem"), example_cmake_variant("PYTHIA8", "pythia8"), example_cmake_variant("PYTHON_BINDINGS", "python"), + plugin_cmake_variant("ACTSVG", "svg"), plugin_cmake_variant("SYCL", "sycl"), plugin_cmake_variant("TGEO", "tgeo"), + example_cmake_variant("TBB", "tbb", "USE"), cmake_variant(unit_tests_label, "unit_tests"), ] @@ -331,28 +353,35 @@ def plugin_cmake_variant(plugin_name, spack_variant): if spec.satisfies("@19.4.0:"): args.append("-DACTS_ENABLE_LOG_FAILURE_THRESHOLD=ON") - if spec.satisfies("+autodiff"): - args.append("-DACTS_USE_SYSTEM_AUTODIFF=ON") + # Use dependencies provided by spack + if spec.satisfies("@20.3:"): + args.append("-DACTS_USE_SYSTEM_LIBS=ON") + else: + if spec.satisfies("+autodiff"): + args.append("-DACTS_USE_SYSTEM_AUTODIFF=ON") + + if spec.satisfies("@19:20.2 +dd4hep"): + args.append("-DACTS_USE_SYSTEM_ACTSDD4HEP=ON") + + if spec.satisfies("@0.33: +json"): + args.append("-DACTS_USE_SYSTEM_NLOHMANN_JSON=ON") + elif spec.satisfies("@0.14.0:0.32 +json"): + args.append("-DACTS_USE_BUNDLED_NLOHMANN_JSON=OFF") + + if spec.satisfies("@18: +python"): + args.append("-DACTS_USE_SYSTEM_PYBIND11=ON") + + if spec.satisfies("@20.1: +svg"): + args.append("-DACTS_USE_SYSTEM_ACTSVG=ON") + + if spec.satisfies("@14: +vecmem"): + args.append("-DACTS_USE_SYSTEM_VECMEM=ON") if "+cuda" in spec: cuda_arch = spec.variants["cuda_arch"].value if cuda_arch != "none": args.append("-DCUDA_FLAGS=-arch=sm_{0}".format(cuda_arch[0])) - if spec.satisfies("@19 +dd4hep"): - args.append("-DACTS_USE_SYSTEM_ACTSDD4HEP=ON") - - if spec.satisfies("@:16"): - args.append(plugin_cmake_variant("DIGITIZATION", "digitization")) - - if spec.satisfies("@0.33: +json"): - args.append("-DACTS_USE_SYSTEM_NLOHMANN_JSON=ON") - elif spec.satisfies("@0.14.0: +json"): - args.append("-DACTS_USE_BUNDLED_NLOHMANN_JSON=OFF") - - if spec.satisfies("@18: +python"): - args.append("-DACTS_USE_SYSTEM_PYBIND11=ON") - if "root" in spec: cxxstd = spec["root"].variants["cxxstd"].value args.append("-DCMAKE_CXX_STANDARD={0}".format(cxxstd)) diff --git a/var/spack/repos/builtin/packages/actsvg/package.py b/var/spack/repos/builtin/packages/actsvg/package.py new file mode 100644 index 00000000000..bd2cc394d48 --- /dev/null +++ b/var/spack/repos/builtin/packages/actsvg/package.py @@ -0,0 +1,44 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class Actsvg(CMakePackage): + """An SVG based C++17 plotting library for ACTS detectors and + surfaces.""" + + homepage = "https://github.com/acts-project/actsvg" + url = "https://github.com/acts-project/actsvg/archive/refs/tags/v0.4.22.zip" + list_url = "https://github.com/acts-project/actsvg/releases" + git = "https://github.com/acts-project/actsvg.git" + + maintainers = ["HadrienG2", "wdconinc"] + + version("0.4.26", sha256="a1dfad15b616cac8191a355c1a87544571c36349400e3de56b9e5be6fa73714c") + + variant( + "examples", + default=False, + description="Build the example applications", + ) + variant( + "meta", + default=True, + description="Build the meta level interface", + ) + + depends_on("boost +program_options", type="test") + depends_on("boost +program_options", when="+examples") + depends_on("googletest", when="+examples") + + def cmake_args(self): + args = [ + self.define_from_variant("ACTSVG_BUILD_EXAMPLES", "examples"), + self.define_from_variant("ACTSVG_BUILD_META", "meta"), + self.define("ACTSVG_BUILD_TESTING", self.run_tests), + ] + return args diff --git a/var/spack/repos/builtin/packages/adios2/package.py b/var/spack/repos/builtin/packages/adios2/package.py index 39568376e85..dd4708b841a 100644 --- a/var/spack/repos/builtin/packages/adios2/package.py +++ b/var/spack/repos/builtin/packages/adios2/package.py @@ -60,6 +60,9 @@ class Adios2(CMakePackage, CudaPackage): variant("mpi", default=True, description="Enable MPI") # Compression libraries + variant( + "libpressio", default=False, when="@2.8:", description="Enable LibPressio for compression" + ) variant("blosc", default=True, when="@2.4:", description="Enable Blosc compression") variant("bzip2", default=True, when="@2.4:", description="Enable BZip2 compression") variant("zfp", default=True, description="Enable ZFP compression") @@ -103,6 +106,7 @@ class Adios2(CMakePackage, CudaPackage): depends_on("hdf5~mpi", when="+hdf5~mpi") depends_on("hdf5+mpi", when="+hdf5+mpi") + depends_on("libpressio", when="+libpressio") depends_on("c-blosc", when="+blosc") depends_on("bzip2", when="+bzip2") depends_on("libpng@1.6:", when="+png") @@ -178,6 +182,7 @@ def cmake_args(self): from_variant("ADIOS2_USE_SZ", "sz"), from_variant("ADIOS2_USE_ZFP", "zfp"), from_variant("ADIOS2_USE_CUDA", "cuda"), + from_variant("ADIOS2_USE_LIBPRESSIO", "libpressio"), self.define("BUILD_TESTING", self.run_tests), self.define("ADIOS2_BUILD_EXAMPLES", False), self.define("ADIOS2_USE_Endian_Reverse", True), diff --git a/var/spack/repos/builtin/packages/alpgen/package.py b/var/spack/repos/builtin/packages/alpgen/package.py index 717b513cc3f..e0816eeca78 100644 --- a/var/spack/repos/builtin/packages/alpgen/package.py +++ b/var/spack/repos/builtin/packages/alpgen/package.py @@ -2,15 +2,16 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - import os +import spack.build_systems.makefile from spack.package import * -class Alpgen(MakefilePackage): - """A collection of codes for the generation of - multi-parton processes in hadronic collisions.""" +class Alpgen(CMakePackage, MakefilePackage): + """A collection of codes for the generation of multi-parton processes + in hadronic collisions. + """ homepage = "http://mlm.home.cern.ch/mlm/alpgen/" url = "http://mlm.home.cern.ch/mlm/alpgen/V2.1/v214.tgz" @@ -18,102 +19,44 @@ class Alpgen(MakefilePackage): maintainers = ["iarspider"] tags = ["hep"] - patch("alpgen-214.patch", when="recipe=cms") - patch("alpgen-214-Darwin-x86_84-gfortran.patch", when="platform=darwin recipe=cms") - patch("alpgen-2.1.4-sft.patch", when="recipe=sft", level=0) + version("2.1.4", sha256="2f43f7f526793fe5f81a3a3e1adeffe21b653a7f5851efc599ed69ea13985c5e") - depends_on("cmake", type="build", when="recipe=sft") + build_system("makefile", "cmake", default="makefile") variant( "recipe", - values=("cms", "sft"), + values=( + conditional("cms", when="build_system=makefile"), + conditional("sft", when="build_system=cmake"), + ), default="sft", - description="Select build recipe: CMS for CMS experiment, " + "SFT for ATLAS/LHCb/others.", + description="CMS for CMS experiment, SFT for ATLAS/LHCb/others.", ) - version("2.1.4", sha256="2f43f7f526793fe5f81a3a3e1adeffe21b653a7f5851efc599ed69ea13985c5e") - - phases = ["cmake", "build", "install"] - - # copied from CMakePackage - @property - def build_dirname(self): - """Returns the directory name to use when building the package - - :return: name of the subdirectory for building the package - """ - return "spack-build-%s" % self.spec.dag_hash(7) - - @property - def build_directory(self): - """Returns the directory to use when building the package - - :return: directory where to build the package - """ - return os.path.join(self.stage.path, self.build_dirname) - - @property - def root_cmakelists_dir(self): - """The relative path to the directory containing CMakeLists.txt - - This path is relative to the root of the extracted tarball, - not to the ``build_directory``. Defaults to the current directory. - - :return: directory containing CMakeLists.txt - """ - return self.stage.source_path - - def cmake_args(self): - """Produces a list containing all the arguments that must be passed to - cmake, except: - - * CMAKE_INSTALL_PREFIX - * CMAKE_BUILD_TYPE - - which will be set automatically. - - :return: list of arguments for cmake - """ - return [] - - @property - def std_cmake_args(self): - """Standard cmake arguments provided as a property for - convenience of package writers - - :return: standard cmake arguments - """ - # standard CMake arguments - std_cmake_args = CMakePackage._std_args(self) - std_cmake_args += getattr(self, "cmake_flag_args", []) - return std_cmake_args - - # end + patch("alpgen-214.patch", when="recipe=cms") + patch("alpgen-214-Darwin-x86_84-gfortran.patch", when="platform=darwin recipe=cms") + patch("alpgen-2.1.4-sft.patch", when="recipe=sft", level=0) def url_for_version(self, version): root = self.url.rsplit("/", 2)[0] return "{0}/V{1}/v{2}.tgz".format(root, version.up_to(2), version.joined) def patch(self): - if self.spec.satisfies("recipe=sft"): + if self.spec.satisfies("build_system=cmake"): copy(join_path(os.path.dirname(__file__), "CMakeLists.txt"), "CMakeLists.txt") - if self.spec.satisfies("recipe=cms"): + if self.spec.satisfies("build_system=makefile"): filter_file("-fno-automatic", "-fno-automatic -std=legacy", "compile.mk") copy(join_path(os.path.dirname(__file__), "cms_build.sh"), "cms_build.sh") copy(join_path(os.path.dirname(__file__), "cms_install.sh"), "cms_install.sh") - @when("recipe=cms") - def cmake(self, spec, prefix): - return - @when("recipe=cms") - def build(self, spec, prefix): +class MakefileBuilder(spack.build_systems.makefile.MakefileBuilder): + def build(self, pkg, spec, prefix): bash = which("bash") bash("./cms_build.sh") - @when("recipe=cms") - def install(self, spec, prefix): + def install(self, pkg, spec, prefix): bash = which("bash") bash("./cms_install.sh", prefix) @@ -121,25 +64,3 @@ def install(self, spec, prefix): set_install_permissions(root) for file in files: set_install_permissions(join_path(root, file)) - - @when("recipe=sft") - def cmake(self, spec, prefix): - """Runs ``cmake`` in the build directory""" - options = self.std_cmake_args - options += self.cmake_args() - options.append(os.path.abspath(self.root_cmakelists_dir)) - with working_dir(self.build_directory, create=True): - cmake_x = which("cmake") - cmake_x(*options) - - @when("recipe=sft") - def build(self, spec, prefix): - """Make the build targets""" - with working_dir(self.build_directory): - make() - - @when("recipe=sft") - def install(self, spec, prefix): - """Make the install targets""" - with working_dir(self.build_directory): - make("install") diff --git a/var/spack/repos/builtin/packages/alquimia/package.py b/var/spack/repos/builtin/packages/alquimia/package.py index bd99e2d817a..05d4c54128f 100644 --- a/var/spack/repos/builtin/packages/alquimia/package.py +++ b/var/spack/repos/builtin/packages/alquimia/package.py @@ -16,6 +16,7 @@ class Alquimia(CMakePackage): maintainers = ["smolins", "balay"] version("develop") + version("1.0.10", commit="b2c11b6cde321f4a495ef9fcf267cb4c7a9858a0") # tag v.1.0.10 version("1.0.9", commit="2ee3bcfacc63f685864bcac2b6868b48ad235225") # tag v.1.0.9 version("xsdk-0.6.0", commit="9a0aedd3a927d4d5e837f8fd18b74ad5a78c3821") version("xsdk-0.5.0", commit="8397c3b00a09534c5473ff3ab21f0e32bb159380") @@ -26,6 +27,7 @@ class Alquimia(CMakePackage): depends_on("mpi") depends_on("hdf5") + depends_on("pflotran@4.0.1", when="@1.0.10") depends_on("pflotran@3.0.2", when="@1.0.9") depends_on("pflotran@xsdk-0.6.0", when="@xsdk-0.6.0") depends_on("pflotran@xsdk-0.5.0", when="@xsdk-0.5.0") diff --git a/var/spack/repos/builtin/packages/aluminum/package.py b/var/spack/repos/builtin/packages/aluminum/package.py index 235aa0b996d..cad16d0193a 100644 --- a/var/spack/repos/builtin/packages/aluminum/package.py +++ b/var/spack/repos/builtin/packages/aluminum/package.py @@ -52,9 +52,16 @@ class Aluminum(CMakePackage, CudaPackage, ROCmPackage): ) variant("rccl", default=False, description="Builds with support for RCCL communication lib") variant( - "ofi_rccl_plugin", - default=False, - description="Builds with support for OFI libfabric enhanced RCCL communication lib", + "ofi_libfabric_plugin", + default=True, + when="+rccl", + description="Builds with support for OFI libfabric enhanced RCCL/NCCL communication lib", + ) + variant( + "ofi_libfabric_plugin", + default=True, + when="+nccl", + description="Builds with support for OFI libfabric enhanced RCCL/NCCL communication lib", ) depends_on("cmake@3.21.0:", type="build", when="@1.0.1:") @@ -68,12 +75,12 @@ class Aluminum(CMakePackage, CudaPackage, ROCmPackage): depends_on("hipcub", when="@:0.1,0.6.0: +rocm") depends_on("rccl", when="+rccl") - depends_on("aws-ofi-rccl", when="+ofi_rccl_plugin platform=cray") + depends_on("aws-ofi-rccl", when="+rccl +ofi_libfabric_plugin") + depends_on("aws-ofi-nccl", when="+nccl +ofi_libfabric_plugin") conflicts("~cuda", when="+cuda_rma", msg="CUDA RMA support requires CUDA") conflicts("+cuda", when="+rocm", msg="CUDA and ROCm support are mutually exclusive") conflicts("+nccl", when="+rccl", msg="NCCL and RCCL support are mutually exclusive") - conflicts("~rccl", when="+ofi_rccl_plugin", msg="libfabric enhancements require RCCL support") generator = "Ninja" depends_on("ninja", type="build") diff --git a/var/spack/repos/builtin/packages/aml/package.py b/var/spack/repos/builtin/packages/aml/package.py index 8ece44c1023..9c91323e95a 100644 --- a/var/spack/repos/builtin/packages/aml/package.py +++ b/var/spack/repos/builtin/packages/aml/package.py @@ -76,6 +76,7 @@ class Aml(AutotoolsPackage): depends_on("automake", type="build") depends_on("libtool", type="build") # Required to have pkg config macros in configure. + # Note: This does NOT work with pkg-config but requires pkgconf! depends_on("pkgconf", type="build") # Required to generate AML version in configure. depends_on("git", type="build") diff --git a/var/spack/repos/builtin/packages/amrex/package.py b/var/spack/repos/builtin/packages/amrex/package.py index 55bb65fc8f2..55281ccbaaf 100644 --- a/var/spack/repos/builtin/packages/amrex/package.py +++ b/var/spack/repos/builtin/packages/amrex/package.py @@ -14,7 +14,7 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage): mesh refinement (AMR) applications.""" homepage = "https://amrex-codes.github.io/amrex/" - url = "https://github.com/AMReX-Codes/amrex/releases/download/22.10/amrex-22.10.tar.gz" + url = "https://github.com/AMReX-Codes/amrex/releases/download/22.11/amrex-22.11.tar.gz" git = "https://github.com/AMReX-Codes/amrex.git" test_requires_compiler = True @@ -24,6 +24,7 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage): maintainers = ["WeiqunZhang", "asalmgren", "etpalmer63"] version("develop", branch="development") + version("22.11", sha256="8be9d5c6934d73b98c71c9c67ca7113f18794268f257333591d9b2449d7410c4") version("22.10", sha256="458da410d7f43e428726bfc905123e85d05786080f892ebaa26f94c5f8e79b07") version("22.09", sha256="24601fbb9d554f7b66d7db89b14ff95dadb18d51db893af7ee6c70d4b7dd4be6") version("22.08", sha256="d89167c4567fa246b06478a5b160010a0117dc58be9e879beb15be53cb08b6e9") @@ -342,8 +343,13 @@ def test(self): args = [] args.append("-S./cache/amrex/Tests/SpackSmokeTest") args.append("-DAMReX_ROOT=" + self.prefix) - args.append("-DMPI_C_COMPILER=" + self.spec["mpi"].mpicc) - args.append("-DMPI_CXX_COMPILER=" + self.spec["mpi"].mpicxx) + if "+mpi" in self.spec: + args.append("-DMPI_C_COMPILER=" + self.spec["mpi"].mpicc) + args.append("-DMPI_CXX_COMPILER=" + self.spec["mpi"].mpicxx) + + if "+cuda" in self.spec: + args.append("-DCMAKE_CUDA_COMPILER=" + join_path(self.spec["cuda"].prefix.bin, "nvcc")) + args.extend(self.cmake_args()) self.run_test(cmake_bin, args, purpose="Configure with CMake") diff --git a/var/spack/repos/builtin/packages/anicalculator/package.py b/var/spack/repos/builtin/packages/anicalculator/package.py new file mode 100644 index 00000000000..7d83bb4596d --- /dev/null +++ b/var/spack/repos/builtin/packages/anicalculator/package.py @@ -0,0 +1,35 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import os + +from spack.package import * + + +class Anicalculator(Package): + """This tool will calculate the bidirectional average nucleotide identity + (gANI) and Alignment Fraction (AF) between two genomes. + + Note: A manual download is required for ANIcalculator. + Spack will search your current directory for the download file. + Alternatively, add this file to a mirror so that Spack can find it. + For instructions on how to set up a mirror, see + https://spack.readthedocs.io/en/latest/mirrors.html""" + + homepage = "https://ani.jgi.doe.gov/html/download.php?" + url = "file://{0}/ANIcalculator_v1.tgz".format(os.getcwd()) + manual_download = True + + version("1", sha256="236596a9a204cbcad162fc66be3506b2530b1f48f4f84d9647ccec3ca7483a43") + + depends_on("perl@5:", type="run") + + conflicts("platform=darwin", msg="ANIcalculator requires Linux") + + def install(self, spec, prefix): + mkdirp(prefix.bin) + install("ANIcalculator", prefix.bin) + install("nsimscan", prefix.bin) + install_tree("Log", prefix.bin.Log) diff --git a/var/spack/repos/builtin/packages/any2fasta/package.py b/var/spack/repos/builtin/packages/any2fasta/package.py new file mode 100644 index 00000000000..4b9428767db --- /dev/null +++ b/var/spack/repos/builtin/packages/any2fasta/package.py @@ -0,0 +1,24 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class Any2fasta(Package): + """any2fasta: Convert various sequence formats to FASTA""" + + homepage = "https://github.com/tseemann/any2fasta" + url = "https://github.com/tseemann/any2fasta/archive/refs/tags/v0.4.2.tar.gz" + + version("0.4.2", sha256="e4cb2ddccda6298f5b0aee0c10184a75307a08b584d2abbfbf0d59d37b197e73") + version("0.2.3", sha256="197cd1e18adebe28b71a1448c5107804b7093b2aa83c4bcfd8edd3fc4ed485df") + version("0.1.2", sha256="ef035595756df7dca1f8a503ee26f8479393953bc67d8870c9965b6d5ade2674") + + depends_on("perl@5.10:", type=("build", "run")) + + def install(self, spec, prefix): + mkdirp(prefix.bin) + install("any2fasta", prefix.bin) diff --git a/var/spack/repos/builtin/packages/arborx/package.py b/var/spack/repos/builtin/packages/arborx/package.py index cf9d0cc084f..9df147534e4 100644 --- a/var/spack/repos/builtin/packages/arborx/package.py +++ b/var/spack/repos/builtin/packages/arborx/package.py @@ -18,6 +18,7 @@ class Arborx(CMakePackage, CudaPackage, ROCmPackage): maintainers = ["aprokop"] version("master", branch="master") + version("1.3", sha256="3f1e17f029a460ab99f8396e2772cec908eefc4bf3868c8828907624a2d0ce5d") version("1.2", sha256="ed1939110b2330b7994dcbba649b100c241a2353ed2624e627a200a398096c20") version("1.1", sha256="2b5f2d2d5cec57c52f470c2bf4f42621b40271f870b4f80cb57e52df1acd90ce") version("1.0", sha256="9b5f45c8180622c907ef0b7cc27cb18ba272ac6558725d9e460c3f3e764f1075") @@ -27,6 +28,16 @@ class Arborx(CMakePackage, CudaPackage, ROCmPackage): deprecated=True, ) + # Allowed C++ standard + variant( + "cxxstd", + default="17", + values=("14", "17", "2a", "2b"), + multi=False, + description="Use the specified C++ standard when building.", + ) + conflicts("cxxstd=14", when="@1.3:") + # ArborX relies on Kokkos to provide devices, providing one-to-one matching # variants. The only way to disable those devices is to make sure Kokkos # does not provide them. @@ -49,7 +60,8 @@ class Arborx(CMakePackage, CudaPackage, ROCmPackage): # Standalone Kokkos depends_on("kokkos@3.1.00:", when="~trilinos") - depends_on("kokkos@3.4.00:", when="@1.2:~trilinos") + depends_on("kokkos@3.4.00:", when="@1.2~trilinos") + depends_on("kokkos@3.6.00:", when="@1.3:~trilinos") for backend in kokkos_backends: depends_on("kokkos+%s" % backend.lower(), when="~trilinos+%s" % backend.lower()) @@ -69,7 +81,8 @@ class Arborx(CMakePackage, CudaPackage, ROCmPackage): # - current version of Trilinos package does not allow enabling CUDA depends_on("trilinos+kokkos", when="+trilinos") depends_on("trilinos+openmp", when="+trilinos+openmp") - depends_on("trilinos@13.2.0:", when="@1.2:+trilinos") + depends_on("trilinos@13.2.0:", when="@1.2+trilinos") + depends_on("trilinos@13.4.0:", when="@1.3:+trilinos") conflicts("~serial", when="+trilinos") conflicts("+cuda", when="+trilinos") diff --git a/var/spack/repos/builtin/packages/arc/package.py b/var/spack/repos/builtin/packages/arc/package.py new file mode 100644 index 00000000000..5d2349ca3e9 --- /dev/null +++ b/var/spack/repos/builtin/packages/arc/package.py @@ -0,0 +1,37 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Arc(CMakePackage): + """ARC is an automatic resiliency library designed to provide security + to lossy compressed data or other uint8_t data arrays + + forked from: https://github.com/FTHPC/ARC to support Spack after developer + left grad school + """ + + homepage = "https://github.com/FTHPC/ARC" + url = "https://github.com/FTHPC/ARC" + git = "https://github.com/robertu94/ARC" + + maintainers = ["robertu94"] + + version("master", branch="master") + version("2021-12-01", commit="49d4a5df53a082f15a6959aef434224fd7b9beac") + + depends_on("libpressio+sz+zfp", when="+examples") + + variant("examples", description="build examples", default=False) + variant("shared", description="build shared libraries", default=True) + + def cmake_args(self): + args = [ + self.define("BUILD_TESTING", self.run_tests), + self.define_from_variant("BUILD_SHARED_LIBS", "shared"), + self.define_from_variant("BUILD_EXAMPLES", "examples"), + ] + return args diff --git a/var/spack/repos/builtin/packages/arm-forge/package.py b/var/spack/repos/builtin/packages/arm-forge/package.py index 61c85810127..9d658301f35 100644 --- a/var/spack/repos/builtin/packages/arm-forge/package.py +++ b/var/spack/repos/builtin/packages/arm-forge/package.py @@ -23,6 +23,10 @@ class ArmForge(Package): # versions (and checksums) based on the target platform shows up if platform.machine() == "aarch64": + version( + "22.1.1", sha256="f352625659a5319ca26130b633cbe8cc8e5fda312c50a8cd81145051eb66855c" + ) + version("22.1", sha256="c0e2639051f75be77a440fd00f049ffd42a932a6c2459372e64657a8e5b78779") version( "22.0.4", sha256="f770781d3c5e2fccb341f6b6ea7ddbe106e26168d4bad4cad3296b2eef65cb76" ) @@ -57,6 +61,10 @@ class ArmForge(Package): ) version("21.0", sha256="2bcc745d0049d6b25c77c97b2d7bad7b4f804180972a2306a8599ce41f6a4573") elif platform.machine() == "ppc64le": + version( + "22.1.1", sha256="c160779ad7217582ced9924a2af90330626af34385d07f4c39b827f929f89508" + ) + version("22.1", sha256="b94a7923360a76a431b29b939191bce1d2076bc6bc0bc698f24191055328952c") version( "22.0.4", sha256="f4cb5bcbaa67f9209299fe4653186a2829760b8b16a2883913aa43766375b04c" ) @@ -91,6 +99,10 @@ class ArmForge(Package): ) version("21.0", sha256="60cfa7dd1cd131ec85e67cb660f2f84cf30bb700d8979cae1f5f88af658fd249") elif platform.machine() == "x86_64": + version( + "22.1.1", sha256="392a7b0b4a212c506dc600ca2c37001cf85780ea2248fc47701953f12ef35300" + ) + version("22.1", sha256="3a1346ec10ff1871de7a013bacc21911976f97640297fc8d88136c4f91092e69") version( "22.0.4", sha256="a2c8c1da38b9684d7c4656a98b3fc42777b03fd474cd0bf969324804f47587e5" ) diff --git a/var/spack/repos/builtin/packages/armpl-gcc/package.py b/var/spack/repos/builtin/packages/armpl-gcc/package.py index dd9d22ffa08..edb14359d8f 100644 --- a/var/spack/repos/builtin/packages/armpl-gcc/package.py +++ b/var/spack/repos/builtin/packages/armpl-gcc/package.py @@ -13,6 +13,7 @@ "sles15": "SLES-15", "centos7": "RHEL-7", "centos8": "RHEL-8", + "rocky8": "RHEL-8", "amzn2": "RHEL-7", } diff --git a/var/spack/repos/builtin/packages/arpack-ng/package.py b/var/spack/repos/builtin/packages/arpack-ng/package.py index 2a418130337..49feb262cb1 100644 --- a/var/spack/repos/builtin/packages/arpack-ng/package.py +++ b/var/spack/repos/builtin/packages/arpack-ng/package.py @@ -2,11 +2,12 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - +import spack.build_systems.autotools +import spack.build_systems.cmake from spack.package import * -class ArpackNg(Package): +class ArpackNg(CMakePackage, AutotoolsPackage): """ARPACK-NG is a collection of Fortran77 subroutines designed to solve large scale eigenvalue problems. @@ -38,6 +39,8 @@ class ArpackNg(Package): url = "https://github.com/opencollab/arpack-ng/archive/3.3.0.tar.gz" git = "https://github.com/opencollab/arpack-ng.git" + build_system("cmake", "autotools", default="cmake") + version("develop", branch="master") version("3.8.0", sha256="ada5aeb3878874383307239c9235b716a8a170c6d096a6625bfd529844df003d") version("3.7.0", sha256="972e3fc3cd0b9d6b5a737c9bf6fd07515c0d6549319d4ffb06970e64fa3cc2d6") @@ -74,19 +77,25 @@ class ArpackNg(Package): depends_on("blas") depends_on("lapack") - depends_on("automake", when="@3.3.0", type="build") - depends_on("autoconf", when="@3.3.0", type="build") - depends_on("libtool@2.4.2:", when="@3.3.0", type="build") - depends_on("cmake@2.8.6:", when="@3.4.0:", type="build") - depends_on("mpi", when="+mpi") + with when("build_system=autotools"): + depends_on("automake", type="build") + depends_on("autoconf", type="build") + depends_on("libtool@2.4.2:", type="build") + depends_on("pkgconfig", type="build") + def flag_handler(self, name, flags): spec = self.spec iflags = [] if name == "cflags": if spec.satisfies("%oneapi"): iflags.append("-Wno-error=implicit-function-declaration") + + if name == "fflags": + if self.spec.satisfies("%cce"): + iflags.append("-hnopattern") + return (iflags, None, None) @property @@ -100,36 +109,26 @@ def libs(self): return find_libraries(libraries, root=self.prefix, shared=True, recursive=True) - @when("@:3.7.0 %gcc@10:") - def setup_build_environment(self, env): - # version up to and including 3.7.0 are not ported to gcc 10 - # https://github.com/opencollab/arpack-ng/issues/242 - env.set("FFLAGS", "-fallow-argument-mismatch") - @when("@3.4.0:") - def install(self, spec, prefix): - - options = ["-DEXAMPLES=ON"] - options.extend(std_cmake_args) - options.append("-DCMAKE_INSTALL_NAME_DIR:PATH=%s/lib" % prefix) - - # Make sure we use Spack's blas/lapack: +class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder): + def cmake_args(self): + spec = self.spec lapack_libs = spec["lapack"].libs.joined(";") blas_libs = spec["blas"].libs.joined(";") - options.extend( - [ - "-DLAPACK_FOUND=true", - "-DLAPACK_INCLUDE_DIRS={0}".format(spec["lapack"].prefix.include), - "-DLAPACK_LIBRARIES={0}".format(lapack_libs), - "-DBLAS_FOUND=true", - "-DBLAS_INCLUDE_DIRS={0}".format(spec["blas"].prefix.include), - "-DBLAS_LIBRARIES={0}".format(blas_libs), - ] - ) - - if "+mpi" in spec: - options.append("-DMPI=ON") + options = [ + self.define("EXAMPLES", "ON"), + self.define("CMAKE_INSTALL_NAME_DIR", self.prefix.lib), + self.define("LAPACK_FOUND", True), + self.define("LAPACK_INCLUDE_DIRS", spec["lapack"].prefix.include), + self.define("LAPACK_LIBRARIES", lapack_libs), + self.define("BLAS_FOUND", True), + self.define("BLAS_INCLUDE_DIRS", spec["blas"].prefix.include), + self.define("BLAS_LIBRARIES", blas_libs), + self.define_from_variant("MPI", "mpi"), + self.define_from_variant("BUILD_SHARED_LIBS", "shared"), + self.define("CMAKE_POSITION_INDEPENDENT_CODE", True), + ] # If 64-bit BLAS is used: if ( @@ -139,41 +138,28 @@ def install(self, spec, prefix): ): options.append("-DINTERFACE64=1") - if "+shared" in spec: - options.append("-DBUILD_SHARED_LIBS=ON") - else: - options.append("-DBUILD_SHARED_LIBS=OFF") - options.append("-DCMAKE_POSITION_INDEPENDENT_CODE:BOOL=true") + return options - cmake(".", *options) - make() - if self.run_tests: - make("test") - make("install") - @when("@3.3.0") - def install(self, spec, prefix): - # Apparently autotools are not bootstrapped - which("libtoolize")() - bootstrap = Executable("./bootstrap") - - options = ["--prefix=%s" % prefix] - - if "+mpi" in spec: - options.extend(["--enable-mpi", "F77=%s" % spec["mpi"].mpif77]) - - options.extend( - [ +class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder): + def configure_args(self): + spec = self.spec + options = ( + self.enable_or_disable("mpi") + + [ "--with-blas={0}".format(spec["blas"].libs.ld_flags), "--with-lapack={0}".format(spec["lapack"].libs.ld_flags), ] + + self.enable_or_disable("shared") ) - if "+shared" not in spec: - options.append("--enable-shared=no") - bootstrap() - configure(*options) - make() - if self.run_tests: - make("check") - make("install") + if "+mpi" in spec: + options.append("F77={0}".format(spec["mpi"].mpif77)) + + return options + + @when("@:3.7.0 %gcc@10:") + def setup_build_environment(self, env): + # version up to and including 3.7.0 are not ported to gcc 10 + # https://github.com/opencollab/arpack-ng/issues/242 + env.set("FFLAGS", "-fallow-argument-mismatch") diff --git a/var/spack/repos/builtin/packages/axom/package.py b/var/spack/repos/builtin/packages/axom/package.py index d1cb7f1069d..bf6fa62da40 100644 --- a/var/spack/repos/builtin/packages/axom/package.py +++ b/var/spack/repos/builtin/packages/axom/package.py @@ -91,8 +91,8 @@ class Axom(CachedCMakePackage, CudaPackage, ROCmPackage): # Dependencies # ----------------------------------------------------------------------- # Basics - depends_on("cmake@3.8.2:", type="build") - depends_on("cmake@3.16.8:", type="build", when="+rocm") + depends_on("cmake@3.14:", type="build") + depends_on("cmake@3.21:", type="build", when="+rocm") depends_on("blt", type="build") depends_on("blt@0.5.1:", type="build", when="@0.6.2:") @@ -449,7 +449,7 @@ def initconfig_package_entries(self): entries.append("# ClangFormat disabled due to disabled devtools\n") entries.append(cmake_cache_option("ENABLE_CLANGFORMAT", False)) - if spec.satisfies("^python") or "+devtools" in spec: + if "+python" in spec or "+devtools" in spec: python_path = os.path.realpath(spec["python"].command.path) for key in path_replacements: python_path = python_path.replace(key, path_replacements[key]) diff --git a/var/spack/repos/builtin/packages/bacio/package.py b/var/spack/repos/builtin/packages/bacio/package.py index 923c6139e53..013635cbeb9 100644 --- a/var/spack/repos/builtin/packages/bacio/package.py +++ b/var/spack/repos/builtin/packages/bacio/package.py @@ -14,7 +14,12 @@ class Bacio(CMakePackage): homepage = "https://noaa-emc.github.io/NCEPLIBS-bacio" url = "https://github.com/NOAA-EMC/NCEPLIBS-bacio/archive/refs/tags/v2.4.1.tar.gz" - maintainers = ["t-brown", "edwardhartnett", "kgerheiser", "Hang-Lei-NOAA"] + maintainers = [ + "t-brown", + "edwardhartnett", + "AlexanderRichert-NOAA", + "Hang-Lei-NOAA", + ] version("2.5.0", sha256="540a0ed73941d70dbf5d7b21d5d0a441e76fad2bfe37dfdfea0db3e98fc0fbfb") diff --git a/var/spack/repos/builtin/packages/bear/package.py b/var/spack/repos/builtin/packages/bear/package.py index c2b22a4d0b9..567c737e13e 100644 --- a/var/spack/repos/builtin/packages/bear/package.py +++ b/var/spack/repos/builtin/packages/bear/package.py @@ -22,7 +22,7 @@ class Bear(CMakePackage): version("2.2.0", sha256="6bd61a6d64a24a61eab17e7f2950e688820c72635e1cf7ea8ea7bf9482f3b612") version("2.0.4", sha256="33ea117b09068aa2cd59c0f0f7535ad82c5ee473133779f1cc20f6f99793a63e") - depends_on("pkgconf", when="@3:") + depends_on("pkgconfig", when="@3:") depends_on("fmt", when="@3.0.0:") depends_on("grpc", when="@3.0.0:") depends_on("nlohmann-json", when="@3.0.0:") diff --git a/var/spack/repos/builtin/packages/binutils/package.py b/var/spack/repos/builtin/packages/binutils/package.py index 9ba963b019e..f5a9ae7fd90 100644 --- a/var/spack/repos/builtin/packages/binutils/package.py +++ b/var/spack/repos/builtin/packages/binutils/package.py @@ -4,6 +4,7 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import re +import spack.build_systems.autotools from spack.package import * @@ -40,13 +41,21 @@ class Binutils(AutotoolsPackage, GNUMirrorPackage): version("2.20.1", sha256="71d37c96451333c5c0b84b170169fdcb138bbb27397dc06281905d9717c8ed64") variant("plugins", default=True, description="enable plugins, needed for gold linker") - variant("gold", default=False, description="build the gold linker") + # When you build ld.gold you automatically get ld, even when you add the + # --disable-ld flag + variant("gold", default=False, when="+ld", description="build the gold linker") variant("libiberty", default=False, description="Also install libiberty.") variant("nls", default=True, description="Enable Native Language Support") variant("headers", default=False, description="Install extra headers (e.g. ELF)") variant("lto", default=False, description="Enable lto.") variant("ld", default=False, description="Enable ld.") - variant("gas", default=False, description="Enable as assembler.") + # When you build binutils with ~ld and +gas and load it in your PATH, you + # may end up with incompatibilities between a potentially older system ld + # and a recent assembler. For instance the linker on ubuntu 16.04 from + # binutils 2.26 and the assembler from binutils 2.36.1 will result in: + # "unable to initialize decompress status for section .debug_info" + # when compiling with debug symbols on gcc. + variant("gas", default=False, when="+ld", description="Enable as assembler.") variant("interwork", default=False, description="Enable interwork.") variant( "libs", @@ -81,83 +90,12 @@ class Binutils(AutotoolsPackage, GNUMirrorPackage): conflicts("+gold", when="platform=darwin", msg="Binutils cannot build linkers on macOS") - # When you build binutils with ~ld and +gas and load it in your PATH, you - # may end up with incompatibilities between a potentially older system ld - # and a recent assembler. For instance the linker on ubuntu 16.04 from - # binutils 2.26 and the assembler from binutils 2.36.1 will result in: - # "unable to initialize decompress status for section .debug_info" - # when compiling with debug symbols on gcc. - conflicts("+gas", "~ld", msg="Assembler not always compatible with system ld") - - # When you build ld.gold you automatically get ld, even when you add the - # --disable-ld flag - conflicts("~ld", "+gold") - @classmethod def determine_version(cls, exe): output = Executable(exe)("--version", output=str, error=str) match = re.search(r"GNU (nm|readelf).* (\S+)", output) return Version(match.group(2)).dotted.up_to(3) if match else None - def setup_build_environment(self, env): - - if self.spec.satisfies("%cce"): - env.append_flags("LDFLAGS", "-Wl,-z,muldefs") - - if "+nls" in self.spec: - env.append_flags("LDFLAGS", "-lintl") - - def configure_args(self): - spec = self.spec - - args = [ - "--disable-dependency-tracking", - "--disable-werror", - "--enable-multilib", - "--enable-64-bit-bfd", - "--enable-targets=all", - "--with-system-zlib", - "--with-sysroot=/", - ] - - args += self.enable_or_disable("libs") - args += self.enable_or_disable("lto") - args += self.enable_or_disable("ld") - args += self.enable_or_disable("gas") - args += self.enable_or_disable("interwork") - args += self.enable_or_disable("gold") - args += self.enable_or_disable("plugins") - - if "+libiberty" in spec: - args.append("--enable-install-libiberty") - else: - args.append("--disable-install-libiberty") - - if "+nls" in spec: - args.append("--enable-nls") - else: - args.append("--disable-nls") - - # To avoid namespace collisions with Darwin/BSD system tools, - # prefix executables with "g", e.g., gar, gnm; see Homebrew - # https://github.com/Homebrew/homebrew-core/blob/master/Formula/binutils.rb - if spec.satisfies("platform=darwin"): - args.append("--program-prefix=g") - - return args - - @run_after("install") - def install_headers(self): - # some packages (like TAU) need the ELF headers, so install them - # as a subdirectory in include/extras - if "+headers" in self.spec: - extradir = join_path(self.prefix.include, "extra") - mkdirp(extradir) - # grab the full binutils set of headers - install_tree("include", extradir) - # also grab the headers from the bfd directory - install(join_path(self.build_directory, "bfd", "*.h"), extradir) - def flag_handler(self, name, flags): spec = self.spec # Use a separate variable for injecting flags. This way, installing @@ -204,3 +142,55 @@ def test(self): self.run_test( exe, "--version", expected, installed=True, purpose=reason, skip_missing=True ) + + +class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder): + def configure_args(self): + args = [ + "--disable-dependency-tracking", + "--disable-werror", + "--enable-multilib", + "--enable-64-bit-bfd", + "--enable-targets=all", + "--with-system-zlib", + "--with-sysroot=/", + ] + args += self.enable_or_disable("libs") + args += self.enable_or_disable("lto") + args += self.enable_or_disable("ld") + args += self.enable_or_disable("gas") + args += self.enable_or_disable("interwork") + args += self.enable_or_disable("gold") + args += self.enable_or_disable("nls") + args += self.enable_or_disable("plugins") + + if "+libiberty" in self.spec: + args.append("--enable-install-libiberty") + else: + args.append("--disable-install-libiberty") + + # To avoid namespace collisions with Darwin/BSD system tools, + # prefix executables with "g", e.g., gar, gnm; see Homebrew + # https://github.com/Homebrew/homebrew-core/blob/master/Formula/binutils.rb + if self.spec.satisfies("platform=darwin"): + args.append("--program-prefix=g") + + return args + + @run_after("install", when="+headers") + def install_headers(self): + # some packages (like TAU) need the ELF headers, so install them + # as a subdirectory in include/extras + extradir = join_path(self.prefix.include, "extra") + mkdirp(extradir) + # grab the full binutils set of headers + install_tree("include", extradir) + # also grab the headers from the bfd directory + install(join_path(self.build_directory, "bfd", "*.h"), extradir) + + def setup_build_environment(self, env): + if self.spec.satisfies("%cce"): + env.append_flags("LDFLAGS", "-Wl,-z,muldefs") + + if "+nls" in self.spec: + env.append_flags("LDFLAGS", "-lintl") diff --git a/var/spack/repos/builtin/packages/bitgroomingz/package.py b/var/spack/repos/builtin/packages/bitgroomingz/package.py new file mode 100644 index 00000000000..c10a4f15df9 --- /dev/null +++ b/var/spack/repos/builtin/packages/bitgroomingz/package.py @@ -0,0 +1,30 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Bitgroomingz(CMakePackage): + """BGZ: Bit Grooming Compressor""" + + homepage = "https://github.com/disheng222/BitGroomingZ" + git = "https://github.com/disheng222/BitGroomingZ" + + maintainers = ["robertu94"] + + version("master", branch="master") + version("2022-10-14", commit="a018b20cca9f7d6a5396ab36230e4be6ae1cb25b") + + variant("shared", default=True, description="build shared libs") + + depends_on("zlib") + + def cmake_args(self): + args = [] + if "+shared" in self.spec: + args.append("-DBUILD_SHARED_LIBS=ON") + else: + args.append("-DBUILD_SHARED_LIBS=OFF") + return args diff --git a/var/spack/repos/builtin/packages/boost/package.py b/var/spack/repos/builtin/packages/boost/package.py index 4e2c05ada33..3fd01f23845 100644 --- a/var/spack/repos/builtin/packages/boost/package.py +++ b/var/spack/repos/builtin/packages/boost/package.py @@ -27,6 +27,7 @@ class Boost(Package): maintainers = ["hainest"] version("develop", branch="develop", submodules=True) + version("1.80.0", sha256="1e19565d82e43bc59209a168f5ac899d3ba471d55c7610c677d4ccf2c9c500c0") version("1.79.0", sha256="475d589d51a7f8b3ba2ba4eda022b170e562ca3b760ee922c146b6c65856ef39") version("1.78.0", sha256="8681f175d4bdb26c52222665793eef08490d7758529330f98d3b29dd0735bccc") version("1.77.0", sha256="fc9f85fc030e233142908241af7a846e60630aa7388de9a5fafb1f3a26840854") @@ -43,10 +44,6 @@ class Boost(Package): version("1.66.0", sha256="5721818253e6a0989583192f96782c4a98eb6204965316df9f5ad75819225ca9") version("1.65.1", sha256="9807a5d16566c57fd74fb522764e0b134a8bbe6b6e8967b83afefd30dcd3be81") version("1.65.0", sha256="ea26712742e2fb079c2a566a31f3266973b76e38222b9f88b387e3c8b2f9902c") - # NOTE: 1.64.0 seems fine for *most* applications, but if you need - # +python and +mpi, there seem to be errors with out-of-date - # API calls from mpi/python. - # See: https://github.com/spack/spack/issues/3963 version("1.64.0", sha256="7bcc5caace97baa948931d712ea5f37038dbb1c5d89b43ad4def4ed7cb683332") version("1.63.0", sha256="beae2529f759f6b3bf3f4969a19c2e9d6f0c503edcb2de4a61d1428519fcb3b0") version("1.62.0", sha256="36c96b0f6155c98404091d8ceb48319a28279ca0333fba1ad8611eb90afb2ca0") @@ -244,6 +241,13 @@ def libs(self): conflicts("cxxstd=98", when="+fiber") # Fiber requires >=C++11. conflicts("~context", when="+fiber") # Fiber requires Context. + # NOTE: 1.64.0 seems fine for *most* applications, but if you need + # +python and +mpi, there seem to be errors with out-of-date + # API calls from mpi/python. + # See: https://github.com/spack/spack/issues/3963 + conflicts("@1.64.0", when="+python", msg="Errors with out-of-date API calls from Python") + conflicts("@1.64.0", when="+mpi", msg="Errors with out-of-date API calls from MPI") + conflicts("+taggedlayout", when="+versionedlayout") conflicts("+numpy", when="~python") @@ -261,6 +265,10 @@ def libs(self): # https://github.com/STEllAR-GROUP/hpx/issues/5442#issuecomment-878913339 conflicts("%gcc", when="@:1.76 +system platform=darwin") + # Boost 1.80 does not build with the Intel oneapi compiler + # (https://github.com/spack/spack/pull/32879#issuecomment-1265933265) + conflicts("%oneapi", when="@1.80") + # Patch fix from https://svn.boost.org/trac/boost/ticket/11856 patch("boost_11856.patch", when="@1.60.0%gcc@4.4.7") diff --git a/var/spack/repos/builtin/packages/bufr/package.py b/var/spack/repos/builtin/packages/bufr/package.py index 05c93508548..4376ac59fd4 100644 --- a/var/spack/repos/builtin/packages/bufr/package.py +++ b/var/spack/repos/builtin/packages/bufr/package.py @@ -17,7 +17,13 @@ class Bufr(CMakePackage): homepage = "https://noaa-emc.github.io/NCEPLIBS-bufr" url = "https://github.com/NOAA-EMC/NCEPLIBS-bufr/archive/refs/tags/bufr_v11.5.0.tar.gz" - maintainers = ["t-brown", "kgerheiser", "edwardhartnett", "Hang-Lei-NOAA", "jbathegit"] + maintainers = [ + "t-brown", + "AlexanderRichert-NOAA", + "edwardhartnett", + "Hang-Lei-NOAA", + "jbathegit", + ] version("11.5.0", sha256="d154839e29ef1fe82e58cf20232e9f8a4f0610f0e8b6a394b7ca052e58f97f43") diff --git a/var/spack/repos/builtin/packages/butterflypack/package.py b/var/spack/repos/builtin/packages/butterflypack/package.py index 68da397e847..5d1572b19aa 100644 --- a/var/spack/repos/builtin/packages/butterflypack/package.py +++ b/var/spack/repos/builtin/packages/butterflypack/package.py @@ -26,6 +26,7 @@ class Butterflypack(CMakePackage): maintainers = ["liuyangzhuan"] version("master", branch="master") + version("2.2.2", sha256="73f67073e4291877f1eee19483a8a7b3c761eaf79a75805d52105ceedead85ea") version("2.2.1", sha256="4cedc2896a6b368773ce4f9003aa2c0230baf56a4464a6b899a155e01406a232") version("2.2.0", sha256="1ce5b8461b3c4f488cee6396419e8a6f0a1bcf95254f24d7c27bfa53b391c30b") version("2.1.1", sha256="0d4a1ce540c84de37e4398f72ecf685ea0c4eabceba13015add5b445a4ca3a15") @@ -39,12 +40,16 @@ class Butterflypack(CMakePackage): version("1.0.0", sha256="86c5eb09a18522367d63ce2bacf67ca1c9813ef351a1443baaab3c53f0d77232") variant("shared", default=True, description="Build shared libraries") + variant("openmp", default=True, description="add OpenMP support") depends_on("mpi") depends_on("blas") depends_on("lapack") depends_on("scalapack") depends_on("arpack-ng") + depends_on("sed", type="build") + + conflicts("%gcc@:7", when="@2.2.1:") # https://github.com/spack/spack/issues/31818 patch("qopenmp-for-oneapi.patch", when="@2.1.1 %oneapi") @@ -66,5 +71,6 @@ def cmake_args(self): "-DTPL_ARPACK_LIBRARIES=%s" % spec["arpack-ng"].libs.joined(";"), self.define_from_variant("BUILD_SHARED_LIBS", "shared"), ] + args.append("-Denable_openmp=%s" % ("ON" if "+openmp" in spec else "OFF")) return args diff --git a/var/spack/repos/builtin/packages/ca-certificates-mozilla/package.py b/var/spack/repos/builtin/packages/ca-certificates-mozilla/package.py index f1a39062288..50862817d0b 100644 --- a/var/spack/repos/builtin/packages/ca-certificates-mozilla/package.py +++ b/var/spack/repos/builtin/packages/ca-certificates-mozilla/package.py @@ -14,6 +14,11 @@ class CaCertificatesMozilla(Package): maintainers = ["haampie"] + version( + "2022-10-11", + sha256="2cff03f9efdaf52626bd1b451d700605dc1ea000c5da56bd0fc59f8f43071040", + expand=False, + ) version( "2022-07-19", sha256="6ed95025fba2aef0ce7b647607225745624497f876d74ef6ec22b26e73e9de77", diff --git a/var/spack/repos/builtin/packages/cdo/package.py b/var/spack/repos/builtin/packages/cdo/package.py index bbc232c0bd8..85f208861a9 100644 --- a/var/spack/repos/builtin/packages/cdo/package.py +++ b/var/spack/repos/builtin/packages/cdo/package.py @@ -20,6 +20,11 @@ class Cdo(AutotoolsPackage): maintainers = ["skosukhin", "Try2Code"] + version( + "2.1.0", + sha256="b871346c944b05566ab21893827c74616575deaad0b20eacb472b80b1fa528cc", + url="https://code.mpimet.mpg.de/attachments/download/27481/cdo-2.1.0.tar.gz", + ) version( "2.0.6", sha256="ef120dea9032b1be80a4cfa201958c3b910107205beb6674195675f1ee8ed402", diff --git a/var/spack/repos/builtin/packages/chai/package.py b/var/spack/repos/builtin/packages/chai/package.py index 6687b5092b3..a52bcb8a085 100644 --- a/var/spack/repos/builtin/packages/chai/package.py +++ b/var/spack/repos/builtin/packages/chai/package.py @@ -112,6 +112,13 @@ def cache_name(self): self.spec.compiler.version, ) + def initconfig_compiler_entries(self): + spec = self.spec + entries = super(Chai, self).initconfig_compiler_entries() + if "+rocm" in spec: + entries.insert(0, cmake_cache_path("CMAKE_CXX_COMPILER", spec["hip"].hipcc)) + return entries + def initconfig_hardware_entries(self): spec = self.spec entries = super(Chai, self).initconfig_hardware_entries() diff --git a/var/spack/repos/builtin/packages/chameleon/package.py b/var/spack/repos/builtin/packages/chameleon/package.py index f73cd82e161..98aba276b02 100644 --- a/var/spack/repos/builtin/packages/chameleon/package.py +++ b/var/spack/repos/builtin/packages/chameleon/package.py @@ -54,8 +54,11 @@ class Chameleon(CMakePackage, CudaPackage): depends_on("starpu~cuda", when="~cuda") depends_on("starpu+cuda", when="+cuda") with when("+simgrid"): + depends_on("simgrid+msg") depends_on("starpu+simgrid") depends_on("starpu+mpi~shared+simgrid", when="+mpi") + conflicts("^simgrid@:3.31", when="@:1.1.0") + conflicts("+shared", when="+simgrid") with when("~simgrid"): depends_on("mpi", when="+mpi") depends_on("cuda", when="+cuda") @@ -90,9 +93,9 @@ def cmake_args(self): if spec.satisfies("+mpi +simgrid"): args.extend( [ - self.define("MPI_C_COMPILER", self.spec["simgrid"].smpicc), - self.define("MPI_CXX_COMPILER", self.spec["simgrid"].smpicxx), - self.define("MPI_Fortran_COMPILER", self.spec["simgrid"].smpifc), + self.define("CMAKE_C_COMPILER", self.spec["simgrid"].smpicc), + self.define("CMAKE_CXX_COMPILER", self.spec["simgrid"].smpicxx), + self.define("CMAKE_Fortran_COMPILER", self.spec["simgrid"].smpifc), ] ) diff --git a/var/spack/repos/builtin/packages/clara/package.py b/var/spack/repos/builtin/packages/clara/package.py index 3eaea61ce54..fadec16835b 100644 --- a/var/spack/repos/builtin/packages/clara/package.py +++ b/var/spack/repos/builtin/packages/clara/package.py @@ -2,19 +2,26 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - +import spack.build_systems.generic from spack.package import * -class Clara(CMakePackage): +class Clara(CMakePackage, Package): """A simple to use, composable, command line parser for C++ 11 - and beyond.""" + and beyond. + """ homepage = "https://github.com/catchorg/Clara" url = "https://github.com/catchorg/Clara/archive/v1.1.5.tar.gz" maintainers = ["bvanessen"] + build_system( + conditional("generic", when="+single_header"), + conditional("cmake", when="~single_header"), + default="generic", + ) + variant("single_header", default=True, description="Install a single header only.") version("1.1.5", sha256="767dc1718e53678cbea00977adcd0a8a195802a505aec3c537664cf25a173142") @@ -24,15 +31,8 @@ class Clara(CMakePackage): version("1.1.1", sha256="10915a49a94d371f05af360d40e9cc9615ab86f200d261edf196a8ddd7efa7f8") version("1.1.0", sha256="29ca29d843150aabad702356f79009f5b30dda05ac9674a064362b7edcba5477") - @when("+single_header") - def cmake(self, spec, prefix): - pass - @when("+single_header") - def build(self, spec, prefix): - pass - - @when("+single_header") - def install(self, spec, prefix): +class GenericBuilder(spack.build_systems.generic.GenericBuilder): + def install(self, pkg, spec, prefix): mkdirp(prefix.include) install_tree("single_include", prefix.include) diff --git a/var/spack/repos/builtin/packages/clhep/package.py b/var/spack/repos/builtin/packages/clhep/package.py index 80765d00f1a..1d4f5bd9195 100644 --- a/var/spack/repos/builtin/packages/clhep/package.py +++ b/var/spack/repos/builtin/packages/clhep/package.py @@ -19,6 +19,8 @@ class Clhep(CMakePackage): maintainers = ["drbenmorgan"] + version("2.4.6.0", sha256="e8d16debb84ced28e40e9ae84789cf5a0adad45f9213fbac3ce7583e06caa7b1") + version("2.4.5.4", sha256="983fb4ea1fe423217fe9debc709569495a62a3b4540eb790d557c5a34dffbbb6") version("2.4.5.3", sha256="45f63eeb097f02fe67b86a7dadbf10d409b401c28a1a3e172db36252c3097c13") version("2.4.5.1", sha256="2517c9b344ad9f55974786ae6e7a0ef8b22f4abcbf506df91194ea2299ce3813") version("2.4.4.0", sha256="5df78c11733a091da9ae5a24ce31161d44034dd45f20455587db85f1ca1ba539") @@ -65,6 +67,11 @@ class Clhep(CMakePackage): patch("clhep-cms.patch", when="+cms", level=0) def patch(self): + # Patched line removed since 2.3.2.2 + # https://gitlab.cern.ch/CLHEP/CLHEP/-/commit/5da6830d69c71dc178632f7f5121a3a00e379f94 + if self.spec.satisfies("@2.3.2.2:"): + return + filter_file( "SET CMP0042 OLD", "SET CMP0042 NEW", diff --git a/var/spack/repos/builtin/packages/clingo/package.py b/var/spack/repos/builtin/packages/clingo/package.py index be477fdf5b7..a24ffb0956d 100644 --- a/var/spack/repos/builtin/packages/clingo/package.py +++ b/var/spack/repos/builtin/packages/clingo/package.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os from spack.compiler import UnsupportedCompilerFlag from spack.package import * @@ -64,6 +65,8 @@ class Clingo(CMakePackage): depends_on("py-cffi", type=("build", "run"), when="@5.5.0: platform=cray") patch("python38.patch", when="@5.3:5.4.0") + patch("size-t.patch", when="%msvc") + patch("vs2022.patch", when="%msvc@19.30:") def patch(self): # Doxygen is optional but can't be disabled with a -D, so patch @@ -118,3 +121,9 @@ def cmake_args(self): args += ["-DCLINGO_BUILD_WITH_PYTHON=OFF"] return args + + def win_add_library_dependent(self): + if "+python" in self.spec: + return [os.path.join(self.prefix, self.spec["python"].package.platlib)] + else: + return [] diff --git a/var/spack/repos/builtin/packages/clingo/size-t.patch b/var/spack/repos/builtin/packages/clingo/size-t.patch new file mode 100644 index 00000000000..52f7db20624 --- /dev/null +++ b/var/spack/repos/builtin/packages/clingo/size-t.patch @@ -0,0 +1,22 @@ +diff --git a/libpyclingo/pyclingo.cc b/libpyclingo/pyclingo.cc +index ec4a33c..88b6669 100644 +--- a/libpyclingo/pyclingo.cc ++++ b/libpyclingo/pyclingo.cc +@@ -116,7 +116,7 @@ struct ObjectProtocoll { + Object call(char const *name, Args &&... args); + template + Object operator()(Args &&... args); +- ssize_t size(); ++ Py_ssize_t size(); + bool empty() { return size() == 0; } + Object getItem(Reference o); + Object getItem(char const *key); +@@ -232,7 +232,7 @@ Object ObjectProtocoll::operator()(Args &&... args) { + return PyObject_CallFunctionObjArgs(toPy_(), Reference(args).toPy()..., nullptr); + } + template +-ssize_t ObjectProtocoll::size() { ++Py_ssize_t ObjectProtocoll::size() { + auto ret = PyObject_Size(toPy_()); + if (PyErr_Occurred()) { throw PyException(); } + return ret; diff --git a/var/spack/repos/builtin/packages/clingo/vs2022.patch b/var/spack/repos/builtin/packages/clingo/vs2022.patch new file mode 100644 index 00000000000..8a27fc8679d --- /dev/null +++ b/var/spack/repos/builtin/packages/clingo/vs2022.patch @@ -0,0 +1,18 @@ +diff --git a/libpyclingo/pyclingo.cc b/libpyclingo/pyclingo.cc +index 88b6669..58e73bd 100644 +--- a/libpyclingo/pyclingo.cc ++++ b/libpyclingo/pyclingo.cc +@@ -25,6 +25,13 @@ + // NOTE: the python header has a linker pragma to link with python_d.lib + // when _DEBUG is set which is not part of official python releases + #if defined(_MSC_VER) && defined(_DEBUG) && !defined(CLINGO_UNDEF__DEBUG) ++// Workaround for a VS 2022 issue. ++// NOTE: This workaround knowingly violates the Python.h include order requirement: ++// https://docs.python.org/3/c-api/intro.html#include-files ++# include ++# if _MSVC_STL_VERSION >= 143 ++# include ++# endif + #undef _DEBUG + #include + #define _DEBUG diff --git a/var/spack/repos/builtin/packages/cmake/package.py b/var/spack/repos/builtin/packages/cmake/package.py index e988af5ba26..272ce1bd05d 100644 --- a/var/spack/repos/builtin/packages/cmake/package.py +++ b/var/spack/repos/builtin/packages/cmake/package.py @@ -28,9 +28,12 @@ class Cmake(Package): executables = ["^cmake$"] version("master", branch="master") + version("3.24.3", sha256="b53aa10fa82bff84ccdb59065927b72d3bee49f4d86261249fc0984b3b367291") version("3.24.2", sha256="0d9020f06f3ddf17fb537dc228e1a56c927ee506b486f55fe2dc19f69bf0c8db") version("3.24.1", sha256="4931e277a4db1a805f13baa7013a7757a0cbfe5b7932882925c7061d9d1fa82b") version("3.24.0", sha256="c2b61f7cdecb1576cad25f918a8f42b8685d88a832fd4b62b9e0fa32e915a658") + version("3.23.5", sha256="f2944cde7a140b992ba5ccea2009a987a92413762250de22ebbace2319a0f47d") + version("3.23.4", sha256="aa8b6c17a5adf04de06e42c06adc7e25b21e4fe8378f44f703a861e5f6ac59c7") version("3.23.3", sha256="06fefaf0ad94989724b56f733093c2623f6f84356e5beb955957f9ce3ee28809") version("3.23.2", sha256="f316b40053466f9a416adf981efda41b160ca859e97f6a484b447ea299ff26aa") version("3.23.1", sha256="33fd10a8ec687a4d0d5b42473f10459bb92b3ae7def2b745dc10b192760869f3") @@ -307,11 +310,20 @@ def bootstrap_args(self): args = [] self.generator = make + if self.spec.satisfies("platform=windows"): + args.append("-GNinja") + self.generator = ninja + if not sys.platform == "win32": args.append("--prefix={0}".format(self.prefix)) - if spack.build_environment.should_set_parallel_jobs(jobserver_support=True): - args.append("--parallel={0}".format(make_jobs)) + jobs = spack.build_environment.get_effective_jobs( + make_jobs, + parallel=self.parallel, + supports_jobserver=self.generator.supports_jobserver, + ) + if jobs is not None: + args.append("--parallel={0}".format(jobs)) if "+ownlibs" in spec: # Build and link to the CMake-provided third-party libraries @@ -338,9 +350,7 @@ def bootstrap_args(self): args.append("--") else: args.append("-DCMAKE_INSTALL_PREFIX=%s" % self.prefix) - if self.spec.satisfies("platform=windows"): - args.append("-GNinja") - self.generator = ninja + args.append("-DCMAKE_BUILD_TYPE={0}".format(self.spec.variants["build_type"].value)) # Install CMake correctly, even if `spack install` runs diff --git a/var/spack/repos/builtin/packages/cosma/package.py b/var/spack/repos/builtin/packages/cosma/package.py index 7b52650c81a..6ea66510386 100644 --- a/var/spack/repos/builtin/packages/cosma/package.py +++ b/var/spack/repos/builtin/packages/cosma/package.py @@ -33,6 +33,7 @@ class Cosma(CMakePackage): variant("cuda", default=False, description="Build with cuBLAS support") variant("rocm", default=False, description="Build with rocBLAS support") variant("scalapack", default=False, description="Build with ScaLAPACK API") + variant("shared", default=False, description="Build the shared library version") depends_on("cmake@3.12:", type="build") depends_on("mpi@3:") @@ -91,10 +92,11 @@ def cosma_scalapack_cmake_arg(self): def cmake_args(self): return [ - self.define("COSMA_WITH_TESTS", "OFF"), - self.define("COSMA_WITH_APPS", "OFF"), - self.define("COSMA_WITH_PROFILING", "OFF"), - self.define("COSMA_WITH_BENCHMARKS", "OFF"), + self.define("COSMA_WITH_TESTS", False), + self.define("COSMA_WITH_APPS", False), + self.define("COSMA_WITH_PROFILING", False), + self.define("COSMA_WITH_BENCHMARKS", False), self.define("COSMA_BLAS", self.cosma_blas_cmake_arg()), self.define("COSMA_SCALAPACK", self.cosma_scalapack_cmake_arg()), + self.define_from_variant("BUILD_SHARED_LIBS", "shared"), ] diff --git a/var/spack/repos/builtin/packages/cppcheck/package.py b/var/spack/repos/builtin/packages/cppcheck/package.py index 63ee7385e90..5d9a2f23665 100644 --- a/var/spack/repos/builtin/packages/cppcheck/package.py +++ b/var/spack/repos/builtin/packages/cppcheck/package.py @@ -14,6 +14,7 @@ class Cppcheck(CMakePackage): maintainers = ["white238"] + version("2.9", sha256="d89f3282c70814fa66669e1ea0323c0484563b3f8249c7a2dcaac2ad07651dc7") version("2.8", sha256="a5ed97a99173d2952cd93fcb028a3405a7b3b992e7168e2ae9d527b991770203") version("2.7", sha256="ac74c0973c46a052760f4ff7ca6a84616ca5795510542d195a6f122c53079291") version("2.1", sha256="ab26eeef039e5b58aac01efb8cb664f2cc16bf9879c61bc93cd00c95be89a5f7") diff --git a/var/spack/repos/builtin/packages/cppunit/cppunit-1.14-defaulted-function-deleted.patch b/var/spack/repos/builtin/packages/cppunit/cppunit-1.14-defaulted-function-deleted.patch new file mode 100644 index 00000000000..59c2d5d114b --- /dev/null +++ b/var/spack/repos/builtin/packages/cppunit/cppunit-1.14-defaulted-function-deleted.patch @@ -0,0 +1,13 @@ +diff --git a/include/cppunit/extensions/TestSuiteBuilderContext.h b/include/cppunit/extensions/TestSuiteBuilderContext.h +index 12d157e..ad1a34f 100644 +--- a/include/cppunit/extensions/TestSuiteBuilderContext.h ++++ b/include/cppunit/extensions/TestSuiteBuilderContext.h +@@ -42,8 +42,6 @@ public: + + TestSuiteBuilderContextBase(TestSuiteBuilderContextBase const &) = default; + TestSuiteBuilderContextBase(TestSuiteBuilderContextBase &&) = default; +- TestSuiteBuilderContextBase & operator =(TestSuiteBuilderContextBase const &) = default; +- TestSuiteBuilderContextBase & operator =(TestSuiteBuilderContextBase &&) = default; + + /*! \brief Adds a test to the fixture suite. + * diff --git a/var/spack/repos/builtin/packages/cppunit/package.py b/var/spack/repos/builtin/packages/cppunit/package.py index be3515ab31e..2db7440cb0a 100644 --- a/var/spack/repos/builtin/packages/cppunit/package.py +++ b/var/spack/repos/builtin/packages/cppunit/package.py @@ -11,10 +11,20 @@ class Cppunit(AutotoolsPackage): homepage = "https://wiki.freedesktop.org/www/Software/cppunit/" url = "https://dev-www.libreoffice.org/src/cppunit-1.13.2.tar.gz" + git = "https://anongit.freedesktop.org/git/libreoffice/cppunit.git" - version("1.14.0", sha256="3d569869d27b48860210c758c4f313082103a5e58219a7669b52bfd29d674780") + version("master", branch="master") + version("1.15_20220904", commit="78e64f0edb4f3271a6ddbcdf9cba05138597bfca") + version( + "1.14.0", + sha256="3d569869d27b48860210c758c4f313082103a5e58219a7669b52bfd29d674780", + preferred=True, + ) version("1.13.2", sha256="3f47d246e3346f2ba4d7c9e882db3ad9ebd3fcbd2e8b732f946e0e3eeb9f429f") + # https://github.com/cms-sw/cmsdist/blob/IB/CMSSW_12_6_X/master/cppunit-1.14-defaulted-function-deleted.patch + patch("cppunit-1.14-defaulted-function-deleted.patch", when="@1.15:") + variant( "cxxstd", default="default", @@ -23,6 +33,18 @@ class Cppunit(AutotoolsPackage): description="Use the specified C++ standard when building.", ) + variant( + "libs", + default="shared,static", + values=("shared", "static"), + multi=True, + description="Build shared libs, static libs or both", + ) + + depends_on("autoconf", type="build", when="@1.15_20220904") + depends_on("automake", type="build", when="@1.15_20220904") + depends_on("libtool", type="build", when="@1.15_20220904") + def setup_build_environment(self, env): cxxstd = self.spec.variants["cxxstd"].value cxxstdflag = ( @@ -32,4 +54,6 @@ def setup_build_environment(self, env): def configure_args(self): args = ["--disable-doxygen"] + args += self.enable_or_disable("libs") + return args diff --git a/var/spack/repos/builtin/packages/crtm/package.py b/var/spack/repos/builtin/packages/crtm/package.py index 92f5a6c08b6..7c36edb40e1 100644 --- a/var/spack/repos/builtin/packages/crtm/package.py +++ b/var/spack/repos/builtin/packages/crtm/package.py @@ -15,6 +15,11 @@ class Crtm(CMakePackage): homepage = "https://www.jcsda.org/jcsda-project-community-radiative-transfer-model" url = "https://github.com/NOAA-EMC/EMC_crtm/archive/refs/tags/v2.3.0.tar.gz" - maintainers = ["t-brown", "edwardhartnett", "kgerheiser", "Hang-Lei-NOAA"] + maintainers = [ + "t-brown", + "edwardhartnett", + "AlexanderRichert-NOAA", + "Hang-Lei-NOAA", + ] version("2.3.0", sha256="3e2c87ae5498c33dd98f9ede5c39e33ee7f298c7317b12adeb552e3a572700ce") diff --git a/var/spack/repos/builtin/packages/cub/package.py b/var/spack/repos/builtin/packages/cub/package.py index c704b47319f..ad115965cbf 100644 --- a/var/spack/repos/builtin/packages/cub/package.py +++ b/var/spack/repos/builtin/packages/cub/package.py @@ -39,6 +39,9 @@ class Cub(Package): version("1.7.1", sha256="50b8777b83093fdfdab429a61fccdbfbbb991b3bbc08385118e5ad58e8f62e1d") version("1.4.1", sha256="7c3784cf59f02d4a88099d6a11e357032bac9eac2b9c78aaec947d1270e21871") + def setup_dependent_build_environment(self, env, dependent_spec): + env.set("CUB_DIR", self.prefix.include.cub.cmake) + def install(self, spec, prefix): mkdirp(prefix.include) install_tree("cub", join_path(prefix.include, "cub")) diff --git a/var/spack/repos/builtin/packages/cuda/package.py b/var/spack/repos/builtin/packages/cuda/package.py index e8157b54aed..2375e318afe 100644 --- a/var/spack/repos/builtin/packages/cuda/package.py +++ b/var/spack/repos/builtin/packages/cuda/package.py @@ -583,3 +583,6 @@ def libs(self): if "compat" not in parts and "stubs" not in parts: filtered_libs.append(lib) return LibraryList(filtered_libs) + + # Avoid binding stub libraries by absolute path + non_bindable_shared_objects = ["stubs"] diff --git a/var/spack/repos/builtin/packages/curl/package.py b/var/spack/repos/builtin/packages/curl/package.py index 8923423efb1..26be294245f 100644 --- a/var/spack/repos/builtin/packages/curl/package.py +++ b/var/spack/repos/builtin/packages/curl/package.py @@ -129,7 +129,7 @@ def determine_variants(cls, exes, version): for exe in exes: variants = "" curl = Executable(exe) - output = curl("--version", output=str, error="str") + output = curl("--version", output=str, error=str) if "nghttp2" in output: variants += "+nghttp2" protocols_match = re.search(r"Protocols: (.*)\n", output) diff --git a/var/spack/repos/builtin/packages/cusz/package.py b/var/spack/repos/builtin/packages/cusz/package.py index 8a9e1358e50..2c1e78b4b06 100644 --- a/var/spack/repos/builtin/packages/cusz/package.py +++ b/var/spack/repos/builtin/packages/cusz/package.py @@ -6,19 +6,24 @@ from spack.package import * -class Cusz(MakefilePackage): - """cuSZ is a CUDA-based error-bounded lossy compressor for scientific - data (floating point and integers). - """ +class Cusz(CMakePackage, CudaPackage): + """A GPU accelerated error-bounded lossy compression for scientific data""" - homepage = "https://szcompressor.org" - url = "https://github.com/szcompressor/cuSZ/releases/download/v0.1.2/cuSZ-0.1.2.tar.gz" - git = "https://github.com/szcompressor/cuSZ" - maintainers = ["dingwentao", "jtian0"] + homepage = "https://szcompressor.org/" + git = "https://github.com/szcompressor/cusz" + url = "https://github.com/szcompressor/cuSZ/archive/refs/tags/v0.3.tar.gz" - version("master", branch="master") - version("0.1.2", sha256="c6e89a26b295724edefc8052f62653c5a315c78eaf6d5273299a8e11a5cf7363") + maintainers = ["jtian0", "dingwentao"] - def install(self, spec, prefix): - mkdir(prefix.bin) - install("bin/cusz", prefix.bin) + conflicts("~cuda") + conflicts("cuda_arch=none", when="+cuda") + + version("develop", branch="develop") + version("0.3", sha256="0feb4f7fd64879fe147624dd5ad164adf3983f79b2e0383d35724f8d185dcb11") + + depends_on("cub", when="^ cuda@:10.2.89") + + def cmake_args(self): + cuda_arch = self.spec.variants["cuda_arch"].value + args = ["-DBUILD_TESTING=OFF", ("-DCMAKE_CUDA_ARCHITECTURES=%s" % cuda_arch)] + return args diff --git a/var/spack/repos/builtin/packages/czmq/package.py b/var/spack/repos/builtin/packages/czmq/package.py index 60d1cdd71aa..a0b35774627 100644 --- a/var/spack/repos/builtin/packages/czmq/package.py +++ b/var/spack/repos/builtin/packages/czmq/package.py @@ -25,6 +25,14 @@ class Czmq(AutotoolsPackage): depends_on("uuid") depends_on("libzmq") + def flag_handler(self, name, flags): + iflags = [] + if name == "cflags": + if self.spec.satisfies("%oneapi@2022.2.0:"): + iflags.append("-Wno-error=gnu-null-pointer-arithmetic") + iflags.append("-Wno-error=strict-prototypes") + return (iflags, None, None) + def autoreconf(self, spec, prefix): autogen = Executable("./autogen.sh") autogen() diff --git a/var/spack/repos/builtin/packages/damask-grid/long-lines.patch b/var/spack/repos/builtin/packages/damask-grid/long-lines.patch new file mode 100644 index 00000000000..b3e0f962ab4 --- /dev/null +++ b/var/spack/repos/builtin/packages/damask-grid/long-lines.patch @@ -0,0 +1,21 @@ +--- damask.orig/src/CMakeLists.txt 2022-10-10 11:15:50.430977247 +0200 ++++ damask/src/CMakeLists.txt 2022-10-10 11:26:53.792092659 +0200 +@@ -2,6 +2,9 @@ + if(CMAKE_Fortran_COMPILER_ID STREQUAL "GNU") + # long lines for interaction matrix + set_source_files_properties("lattice.f90" PROPERTIES COMPILE_FLAGS "-ffree-line-length-240") ++ # CHKERRQ PETSc macro ++ set_source_files_properties("parallelization.f90" PROPERTIES COMPILE_FLAGS "-ffree-line-length-none") ++ set_source_files_properties("quit.f90" PROPERTIES COMPILE_FLAGS "-ffree-line-length-none") + endif() + + file(GLOB damask-sources CONFIGURE_DEPENDS *.f90 *.c) +@@ -18,7 +21,7 @@ + file(READ ${solver-source} content) + string(FIND "${content}" "CHKERR" found) + if(NOT ${found} EQUAL -1) +- set_source_files_properties(${solver-source} PROPERTIES COMPILE_FLAGS "-ffree-line-length-160") ++ set_source_files_properties(${solver-source} PROPERTIES COMPILE_FLAGS "-ffree-line-length-none") + endif() + endforeach() + diff --git a/var/spack/repos/builtin/packages/damask-grid/package.py b/var/spack/repos/builtin/packages/damask-grid/package.py index 1f62e857617..29fc78a99a4 100644 --- a/var/spack/repos/builtin/packages/damask-grid/package.py +++ b/var/spack/repos/builtin/packages/damask-grid/package.py @@ -15,6 +15,9 @@ class DamaskGrid(CMakePackage): maintainers = ["MarDiehl"] + version( + "3.0.0-alpha7", sha256="442b06b824441293e72ff91b211a555c5d497aedf62be1c4332c426558b848a4" + ) version( "3.0.0-alpha6", sha256="de6748c285558dec8f730c4301bfa56b4078c130ff80e3095faf76202f8d2109" ) @@ -25,15 +28,19 @@ class DamaskGrid(CMakePackage): "3.0.0-alpha4", sha256="0bb8bde43b27d852b1fb6e359a7157354544557ad83d87987b03f5d629ce5493" ) + depends_on("petsc@3.17.1:3.18", when="@3.0.0-alpha7") depends_on("petsc@3.16.5:3.16", when="@3.0.0-alpha6") depends_on("petsc@3.14.0:3.14,3.15.1:3.16", when="@3.0.0-alpha5") depends_on("petsc@3.14.0:3.14,3.15.1:3.15", when="@3.0.0-alpha4") depends_on("pkgconfig", type="build") depends_on("cmake@3.10:", type="build") depends_on("petsc+mpi+hdf5") + depends_on("hdf5@1.12:+mpi+fortran", when="@3.0.0-alpha7:") depends_on("hdf5@1.10:+mpi+fortran") depends_on("fftw+mpi") + depends_on("libfyaml", when="@3.0.0-alpha7:") + patch("long-lines.patch", when="@3.0.0-alpha7") patch("CMakeDebugRelease.patch", when="@3.0.0-alpha4") variant( @@ -44,7 +51,7 @@ class DamaskGrid(CMakePackage): ) def patch(self): - filter_file(" -lz ", " -lz ${FFTW_LIBS} ", "CMakeLists.txt") + filter_file(" -lz", " -lz ${FFTW_LIBS}", "CMakeLists.txt") def cmake_args(self): return [ diff --git a/var/spack/repos/builtin/packages/damask-mesh/long-lines.patch b/var/spack/repos/builtin/packages/damask-mesh/long-lines.patch new file mode 100644 index 00000000000..b3e0f962ab4 --- /dev/null +++ b/var/spack/repos/builtin/packages/damask-mesh/long-lines.patch @@ -0,0 +1,21 @@ +--- damask.orig/src/CMakeLists.txt 2022-10-10 11:15:50.430977247 +0200 ++++ damask/src/CMakeLists.txt 2022-10-10 11:26:53.792092659 +0200 +@@ -2,6 +2,9 @@ + if(CMAKE_Fortran_COMPILER_ID STREQUAL "GNU") + # long lines for interaction matrix + set_source_files_properties("lattice.f90" PROPERTIES COMPILE_FLAGS "-ffree-line-length-240") ++ # CHKERRQ PETSc macro ++ set_source_files_properties("parallelization.f90" PROPERTIES COMPILE_FLAGS "-ffree-line-length-none") ++ set_source_files_properties("quit.f90" PROPERTIES COMPILE_FLAGS "-ffree-line-length-none") + endif() + + file(GLOB damask-sources CONFIGURE_DEPENDS *.f90 *.c) +@@ -18,7 +21,7 @@ + file(READ ${solver-source} content) + string(FIND "${content}" "CHKERR" found) + if(NOT ${found} EQUAL -1) +- set_source_files_properties(${solver-source} PROPERTIES COMPILE_FLAGS "-ffree-line-length-160") ++ set_source_files_properties(${solver-source} PROPERTIES COMPILE_FLAGS "-ffree-line-length-none") + endif() + endforeach() + diff --git a/var/spack/repos/builtin/packages/damask-mesh/package.py b/var/spack/repos/builtin/packages/damask-mesh/package.py index 3507dfdff8a..7e9590fcea5 100644 --- a/var/spack/repos/builtin/packages/damask-mesh/package.py +++ b/var/spack/repos/builtin/packages/damask-mesh/package.py @@ -15,6 +15,9 @@ class DamaskMesh(CMakePackage): maintainers = ["MarDiehl"] + version( + "3.0.0-alpha7", sha256="442b06b824441293e72ff91b211a555c5d497aedf62be1c4332c426558b848a4" + ) version( "3.0.0-alpha6", sha256="de6748c285558dec8f730c4301bfa56b4078c130ff80e3095faf76202f8d2109" ) @@ -25,13 +28,16 @@ class DamaskMesh(CMakePackage): "3.0.0-alpha4", sha256="0bb8bde43b27d852b1fb6e359a7157354544557ad83d87987b03f5d629ce5493" ) + depends_on("petsc@3.17.1:3.18", when="@3.0.0-alpha7") depends_on("petsc@3.16.5:3.16", when="@3.0.0-alpha6") depends_on("petsc@3.14.0:3.14,3.15.1:3.16", when="@3.0.0-alpha5") depends_on("petsc@3.14.0:3.14,3.15.1:3.15", when="@3.0.0-alpha4") depends_on("pkgconfig", type="build") depends_on("cmake@3.10:", type="build") depends_on("petsc+mpi+hdf5") + depends_on("hdf5@1.12:+mpi+fortran", when="@3.0.0-alpha7:") depends_on("hdf5@1.10:+mpi+fortran") + depends_on("libfyaml", when="@3.0.0-alpha7:") patch("CMakeDebugRelease.patch", when="@3.0.0-alpha4") diff --git a/var/spack/repos/builtin/packages/damask/package.py b/var/spack/repos/builtin/packages/damask/package.py index 849d7acd1a1..953e82912ac 100644 --- a/var/spack/repos/builtin/packages/damask/package.py +++ b/var/spack/repos/builtin/packages/damask/package.py @@ -28,10 +28,15 @@ class Damask(BundlePackage): maintainers = ["MarDiehl"] + version("3.0.0-alpha7") version("3.0.0-alpha6") version("3.0.0-alpha5") version("3.0.0-alpha4") + depends_on("damask-grid@3.0.0-alpha7", when="@3.0.0-alpha7", type="run") + depends_on("damask-mesh@3.0.0-alpha7", when="@3.0.0-alpha7", type="run") + depends_on("py-damask@3.0.0-alpha7", when="@3.0.0-alpha7", type="run") + depends_on("damask-grid@3.0.0-alpha6", when="@3.0.0-alpha6", type="run") depends_on("damask-mesh@3.0.0-alpha6", when="@3.0.0-alpha6", type="run") depends_on("py-damask@3.0.0-alpha6", when="@3.0.0-alpha6", type="run") diff --git a/var/spack/repos/builtin/packages/dbow2/package.py b/var/spack/repos/builtin/packages/dbow2/package.py index 90c3f9f1c16..0851eff37a1 100644 --- a/var/spack/repos/builtin/packages/dbow2/package.py +++ b/var/spack/repos/builtin/packages/dbow2/package.py @@ -24,5 +24,5 @@ class Dbow2(CMakePackage): # See https://github.com/spack/spack/pull/22303 for reference depends_on(Boost.with_default_variants) depends_on("opencv+calib3d+features2d+highgui+imgproc") - depends_on("dlib") + depends_on("dorian3d-dlib") depends_on("eigen", type="link") diff --git a/var/spack/repos/builtin/packages/dealii/package.py b/var/spack/repos/builtin/packages/dealii/package.py index 222691c69ec..902e310be69 100644 --- a/var/spack/repos/builtin/packages/dealii/package.py +++ b/var/spack/repos/builtin/packages/dealii/package.py @@ -106,8 +106,11 @@ class Dealii(CMakePackage, CudaPackage): # more precisely its variation https://github.com/dealii/dealii/pull/5572#issuecomment-349742019 # 1.68.0 has issues with serialization https://github.com/dealii/dealii/issues/7074 # adopt https://github.com/boostorg/serialization/pull/105 as a fix + # + # dealii does not build with Boost 1.80.0 + # (https://github.com/spack/spack/pull/32879#issuecomment-1265933265) depends_on( - "boost@1.59.0:1.63,1.65.1,1.67.0:+thread+system+serialization+iostreams", + "boost@1.59.0:1.63,1.65.1,1.67.0:1.79+thread+system+serialization+iostreams", patches=[ patch("boost_1.65.1_singleton.patch", level=1, when="@1.65.1"), patch("boost_1.68.0.patch", level=1, when="@1.68.0"), @@ -115,7 +118,7 @@ class Dealii(CMakePackage, CudaPackage): when="~python", ) depends_on( - "boost@1.59.0:1.63,1.65.1,1.67.0:+thread+system+serialization+iostreams+python", + "boost@1.59.0:1.63,1.65.1,1.67.0:1.79+thread+system+serialization+iostreams+python", patches=[ patch("boost_1.65.1_singleton.patch", level=1, when="@1.65.1"), patch("boost_1.68.0.patch", level=1, when="@1.68.0"), diff --git a/var/spack/repos/builtin/packages/digitrounding/package.py b/var/spack/repos/builtin/packages/digitrounding/package.py new file mode 100644 index 00000000000..3351bdf0f42 --- /dev/null +++ b/var/spack/repos/builtin/packages/digitrounding/package.py @@ -0,0 +1,30 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Digitrounding(CMakePackage): + """Standalone version of Digit rounding compressor""" + + homepage = "https://github.com/disheng222/digitroundingZ" + git = "https://github.com/disheng222/digitroundingZ" + + maintainers = ["robertu94"] + + version("master", branch="master") + version("2020-02-27", commit="7b18679aded7a85e6f221f7f5cd4f080f322bc33") + + depends_on("zlib") + + variant("shared", default=True, description="build shared libraries") + + def cmake_args(self): + args = [] + if "+shared" in self.spec: + args.append("-DBUILD_SHARED_LIBS=ON") + else: + args.append("-DBUILD_SHARED_LIBS=OFF") + return args diff --git a/var/spack/repos/builtin/packages/dihydrogen/package.py b/var/spack/repos/builtin/packages/dihydrogen/package.py index 40ffd50d2d0..33857729977 100644 --- a/var/spack/repos/builtin/packages/dihydrogen/package.py +++ b/var/spack/repos/builtin/packages/dihydrogen/package.py @@ -83,9 +83,9 @@ class Dihydrogen(CMakePackage, CudaPackage, ROCmPackage): for val in ROCmPackage.amdgpu_targets: depends_on("aluminum amdgpu_target=%s" % val, when="amdgpu_target=%s" % val) - for when in ["+cuda", "+distconv"]: - depends_on("cuda", when=when) - depends_on("cudnn", when=when) + depends_on("roctracer-dev", when="+rocm +distconv") + + depends_on("cudnn", when="+cuda") depends_on("cub", when="^cuda@:10") # Note that #1712 forces us to enumerate the different blas variants @@ -108,8 +108,8 @@ class Dihydrogen(CMakePackage, CudaPackage, ROCmPackage): depends_on("cray-libsci", when="blas=libsci") depends_on("cray-libsci +openmp", when="blas=libsci +openmp_blas") - # Distconv builds require cuda - conflicts("~cuda", when="+distconv") + # Distconv builds require cuda or rocm + conflicts("+distconv", when="~cuda ~rocm") conflicts("+distconv", when="+half") conflicts("+rocm", when="+half") @@ -120,6 +120,8 @@ class Dihydrogen(CMakePackage, CudaPackage, ROCmPackage): depends_on("ninja", type="build") depends_on("cmake@3.17.0:", type="build") + depends_on("spdlog", when="@:0.1,0.2:") + depends_on("llvm-openmp", when="%apple-clang +openmp") # TODO: Debug linker errors when NVSHMEM is built with UCX @@ -155,10 +157,14 @@ def cmake_args(self): "-DH2_ENABLE_DISTCONV_LEGACY=%s" % ("+distconv" in spec), "-DH2_ENABLE_OPENMP=%s" % ("+openmp" in spec), "-DH2_ENABLE_FP16=%s" % ("+half" in spec), - "-DH2_ENABLE_HIP_ROCM=%s" % ("+rocm" in spec), "-DH2_DEVELOPER_BUILD=%s" % ("+developer" in spec), ] + if spec.version < Version("0.3"): + args.append("-DH2_ENABLE_HIP_ROCM=%s" % ("+rocm" in spec)) + else: + args.append("-DH2_ENABLE_ROCM=%s" % ("+rocm" in spec)) + if not spec.satisfies("^cmake@3.23.0"): # There is a bug with using Ninja generator in this version # of CMake @@ -181,7 +187,7 @@ def cmake_args(self): if spec.satisfies("%cce") and spec.satisfies("^cuda+allow-unsupported-compilers"): args.append("-DCMAKE_CUDA_FLAGS=-allow-unsupported-compiler") - if "+cuda" in spec or "+distconv" in spec: + if "+cuda" in spec: args.append("-DcuDNN_DIR={0}".format(spec["cudnn"].prefix)) if spec.satisfies("^cuda@:10"): @@ -209,6 +215,12 @@ def cmake_args(self): "-DHIP_CXX_COMPILER={0}".format(self.spec["hip"].hipcc), ] ) + if "platform=cray" in spec: + args.extend( + [ + "-DMPI_ASSUME_NO_BUILTIN_MPI=ON", + ] + ) archs = self.spec.variants["amdgpu_target"].value if archs != "none": arch_str = ",".join(archs) diff --git a/var/spack/repos/builtin/packages/dlib/package.py b/var/spack/repos/builtin/packages/dlib/package.py index d733d00ff9f..61813a7e828 100644 --- a/var/spack/repos/builtin/packages/dlib/package.py +++ b/var/spack/repos/builtin/packages/dlib/package.py @@ -2,20 +2,37 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - - from spack.package import * class Dlib(CMakePackage): - """DLib is a collection of C++ classes to solve common tasks in C++ - programs, as well as to offer additional functionality to use OpenCV - data and to solve computer vision problems.""" + """toolkit containing machine learning algorithms and tools + for creating complex software in C++ to solve real world problems""" - homepage = "https://github.com/dorian3d/DLib" - git = "https://github.com/dorian3d/DLib.git" + homepage = "http://dlib.net/" + url = "https://github.com/davisking/dlib/archive/v19.19.tar.gz" + git = "https://github.com/davisking/dlib" + + maintainer = ["robertu94"] version("master", branch="master") + version("19.22", sha256="5f44b67f762691b92f3e41dcf9c95dd0f4525b59cacb478094e511fdacb5c096") + version("19.21", sha256="116f52e58be04b47dab52057eaad4b5c4d5c3032d927fe23d55b0741fc4107a0") + version("19.20", sha256="fc3f0986350e8e53aceadf95a71d2f413f1eedc469abda99a462cb528741d411") + version("19.19", sha256="7af455bb422d3ae5ef369c51ee64e98fa68c39435b0fa23be2e5d593a3d45b87") - depends_on("cmake@3.0:", type="build") - depends_on("opencv+calib3d+features2d+highgui+imgproc+imgcodecs+flann") + variant("shared", default=True, description="build the shared libraries") + + depends_on("zlib") + depends_on("libpng") + depends_on("libjpeg") + depends_on("blas") + depends_on("lapack") + depends_on("libsm") + depends_on("libx11") + + def cmake_args(self): + args = [] + if "+shared" in self.spec: + args.append("-DBUILD_SHARED_LIBS=ON") + return args diff --git a/var/spack/repos/builtin/packages/dorian3d-dlib/package.py b/var/spack/repos/builtin/packages/dorian3d-dlib/package.py new file mode 100644 index 00000000000..c71d7d32ded --- /dev/null +++ b/var/spack/repos/builtin/packages/dorian3d-dlib/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class Dorian3dDlib(CMakePackage): + """DLib is a collection of C++ classes to solve common tasks in C++ + programs, as well as to offer additional functionality to use OpenCV + data and to solve computer vision problems.""" + + homepage = "https://github.com/dorian3d/DLib" + git = "https://github.com/dorian3d/DLib.git" + + version("master", branch="master") + + depends_on("cmake@3.0:", type="build") + depends_on("opencv+calib3d+features2d+highgui+imgproc+imgcodecs+flann") diff --git a/var/spack/repos/builtin/packages/eckit/package.py b/var/spack/repos/builtin/packages/eckit/package.py index 52849a831c0..6a6a31c142a 100644 --- a/var/spack/repos/builtin/packages/eckit/package.py +++ b/var/spack/repos/builtin/packages/eckit/package.py @@ -137,4 +137,12 @@ def cmake_args(self): # (the LAPACK backend is still built though): args.append(self.define("ENABLE_LAPACK", "linalg=lapack" in self.spec)) + if "+admin" in self.spec and "+termlib" in self.spec["ncurses"]: + # Make sure that libeckit_cmd is linked to a library that resolves 'setupterm', + # 'tputs', etc. That is either libncurses (when 'ncurses~termlib') or libtinfo (when + # 'ncurses+termlib'). CMake considers the latter only if CURSES_NEED_NCURSES is set to + # TRUE. Note that the installation of eckit does not fail without this but the building + # of a dependent package (e.g. fdb) might fail due to the undefined references. + args.append(self.define("CURSES_NEED_NCURSES", True)) + return args diff --git a/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py b/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py index 31c3fc06ea2..e0f6676600e 100644 --- a/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py +++ b/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py @@ -15,17 +15,22 @@ def dav_sdk_depends_on(spec, when=None, propagate=None): # ie. A +c ~b -> A spec = Spec(spec).name - if "+" in when and len(when.split()) == 1: - when_not = when.replace("+", "~") - # If the package is in the spec tree then it must - # be enabled in the SDK. - conflicts(when_not, "^" + spec) + # If the package is in the spec tree then it must be enabled in the SDK. + if "+" in when: + _when_variants = when.strip("+").split("+") + if any(tok in when for tok in ["~", "="]): + tty.error("Bad token in when clause, only positive boolean tokens allowed") + + for variant in _when_variants: + conflicts("~" + variant, when="^" + spec) # Skip if there is nothing to propagate if not propagate: return - # Map the propagated variants to the dependency variant + # Map the propagated variants to the dependency variant. Some packages may need + # overrides to propagate a dependency as something else, e.g., {"visit": "libsim"}. + # Most call-sites will just use a list. if not type(propagate) is dict: propagate = dict([(v, v) for v in propagate]) @@ -108,6 +113,18 @@ class EcpDataVisSdk(BundlePackage, CudaPackage, ROCmPackage): dav_sdk_depends_on("faodel+shared+mpi network=libfabric", when="+faodel", propagate=["hdf5"]) dav_sdk_depends_on("hdf5@1.12: +shared+mpi", when="+hdf5", propagate=["fortran"]) + # hdf5-vfd-gds needs cuda@11.7.1 or later, only enable when 11.7.1+ available. + depends_on( + "hdf5-vfd-gds@1.0.2:", + when="+cuda+hdf5^cuda@11.7.1:", + ) + for cuda_arch in cuda_arch_variants: + depends_on( + "hdf5-vfd-gds@1.0.2: {0}".format(cuda_arch), + when="+cuda+hdf5 {0} ^cuda@11.7.1:".format(cuda_arch), + ) + conflicts("~cuda", when="^hdf5-vfd-gds@1.0.2:") + conflicts("~hdf5", when="^hdf5-vfd-gds@1.0.2:") dav_sdk_depends_on("parallel-netcdf+shared", when="+pnetcdf", propagate=["fortran"]) diff --git a/var/spack/repos/builtin/packages/elfutils/package.py b/var/spack/repos/builtin/packages/elfutils/package.py index c84cd4deee3..a865d9785a4 100644 --- a/var/spack/repos/builtin/packages/elfutils/package.py +++ b/var/spack/repos/builtin/packages/elfutils/package.py @@ -22,6 +22,10 @@ class Elfutils(AutotoolsPackage, SourcewarePackage): list_url = "https://sourceware.org/elfutils/ftp" list_depth = 1 + maintainers = ["mwkrentel"] + + version("0.188", sha256="fb8b0e8d0802005b9a309c60c1d8de32dd2951b56f0c3a3cb56d21ce01595dff") + version("0.187", sha256="e70b0dfbe610f90c4d1fe0d71af142a4e25c3c4ef9ebab8d2d72b65159d454c8") version("0.186", sha256="7f6fb9149b1673d38d9178a0d3e0fb8a1ec4f53a9f4c2ff89469609879641177") version("0.185", sha256="dc8d3e74ab209465e7f568e1b3bb9a5a142f8656e2b57d10049a73da2ae6b5a6") version("0.184", sha256="87e7d1d7f0333815dd1f62135d047a4dc4082068f361452f357997c11360644b") @@ -43,13 +47,19 @@ class Elfutils(AutotoolsPackage, SourcewarePackage): # Libraries for reading compressed DWARF sections. variant("bzip2", default=False, description="Support bzip2 compressed sections.") variant("xz", default=False, description="Support xz (lzma) compressed sections.") + variant("zstd", default=False, description="Support zstd compressed sections.", when="@0.182:") # Native language support from libintl. variant("nls", default=True, description="Enable Native Language Support.") # libdebuginfod support # NB: For 0.181 and newer, this enables _both_ the client and server - variant("debuginfod", default=False, description="Enable libdebuginfod support.") + variant( + "debuginfod", + default=False, + description="Enable libdebuginfod support.", + when="@0.179:", + ) # elfutils-0.185-static-inline.patch # elflint.c (buffer_left): Mark as 'inline' to avoid external linkage failure. @@ -61,6 +71,7 @@ class Elfutils(AutotoolsPackage, SourcewarePackage): depends_on("bzip2", type="link", when="+bzip2") depends_on("xz", type="link", when="+xz") + depends_on("zstd", type="link", when="+zstd") depends_on("zlib", type="link") depends_on("gettext", when="+nls") depends_on("m4", type="build") @@ -73,7 +84,6 @@ class Elfutils(AutotoolsPackage, SourcewarePackage): depends_on("curl@7.29.0:", type="link", when="+debuginfod") conflicts("%gcc@7.2.0:", when="@0.163") - conflicts("+debuginfod", when="@:0.178") provides("elf@1") @@ -107,6 +117,8 @@ def configure_args(self): else: args.append("--without-lzma") + args.extend(self.with_or_without("zstd", activation_value="prefix")) + # zlib is required args.append("--with-zlib=%s" % spec["zlib"].prefix) diff --git a/var/spack/repos/builtin/packages/environment-modules/package.py b/var/spack/repos/builtin/packages/environment-modules/package.py index f33debb6eab..3d0fb99fc1c 100644 --- a/var/spack/repos/builtin/packages/environment-modules/package.py +++ b/var/spack/repos/builtin/packages/environment-modules/package.py @@ -13,10 +13,11 @@ class EnvironmentModules(Package): """ homepage = "https://cea-hpc.github.io/modules/" - url = "https://github.com/cea-hpc/modules/releases/download/v5.1.1/modules-5.1.1.tar.gz" + url = "https://github.com/cea-hpc/modules/releases/download/v5.2.0/modules-5.2.0.tar.gz" maintainers = ["xdelaruelle"] + version("5.2.0", sha256="48f9f10864303df628a48cab17074820a6251ad8cd7d66dd62aa7798af479254") version("5.1.1", sha256="1985f79e0337f63d6564b08db0238cf96a276a4184def822bb8ad37996dc8295") version("5.1.0", sha256="1ab1e859b9c8bca8a8d332945366567fae4cf8dd7e312a689daaff46e7ffa949") version("5.0.1", sha256="33a598eaff0713de09e479c2636ecde188b982584e56377f234e5065a61be7ba") diff --git a/var/spack/repos/builtin/packages/esmf/package.py b/var/spack/repos/builtin/packages/esmf/package.py index dcfde5415b6..c7c9b4bb03b 100644 --- a/var/spack/repos/builtin/packages/esmf/package.py +++ b/var/spack/repos/builtin/packages/esmf/package.py @@ -255,7 +255,7 @@ def edit(self, spec, prefix): # ESMF_COMM must be set to indicate which MPI implementation # is used to build the ESMF library. if "+mpi" in spec: - if "platform=cray" in self.spec: + if "^cray-mpich" in self.spec: os.environ["ESMF_COMM"] = "mpi" elif "^mvapich2" in spec: os.environ["ESMF_COMM"] = "mvapich2" diff --git a/var/spack/repos/builtin/packages/exago/package.py b/var/spack/repos/builtin/packages/exago/package.py index b85f72e9d8d..c3a48e23af4 100644 --- a/var/spack/repos/builtin/packages/exago/package.py +++ b/var/spack/repos/builtin/packages/exago/package.py @@ -13,7 +13,7 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): homepage = "https://gitlab.pnnl.gov/exasgd/frameworks/exago" git = "https://gitlab.pnnl.gov/exasgd/frameworks/exago.git" - maintainers = ["ashermancinelli", "CameronRutherford", "pelesh"] + maintainers = ["ryandanehy", "CameronRutherford", "pelesh"] version( "1.4.1", commit="ea607c685444b5f345bfdc9a59c345f0f30adde2", submodules=True, preferred=True @@ -46,7 +46,7 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): conflicts("~hiop~ipopt", msg="ExaGO needs at least one solver enabled") - # Dependencides + # Dependencies depends_on("pkgconfig", type="build") depends_on("mpi", when="+mpi") depends_on("blas") @@ -62,6 +62,40 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): depends_on("cmake@3.18:", type="build") + # Profiling + depends_on( + "hiop+deepchecking build_type=RelWithDebInfo", when="+hiop build_type=RelWithDebInfo" + ) + depends_on("hiop~deepchecking build_type=Release ", when="+hiop build_type=Release ") + + # Control the package's build-type depending on the release or debug flag + for pkg in [ + ("raja", "raja"), + ("umpire", "raja"), + ("magma", "hiop+cuda"), + ("magma", "hiop+rocm"), + ("camp", "raja"), + ]: + depends_on( + "{0} build_type=Release".format(pkg[0]), when="+{0} build_type=Release".format(pkg[1]) + ) + depends_on( + "{0} build_type=RelWithDebInfo".format(pkg[0]), + when="+{0} build_type=RelWithDebInfo".format(pkg[1]), + ) + + depends_on( + "{0} build_type=Release".format("hiop+ginkgo ^ginkgo"), + when="+{0} build_type=Release".format("hiop ^hiop+ginkgo"), + ) + depends_on( + "{0} build_type=Debug".format("hiop+ginkgo ^ginkgo"), + when="+{0} build_type=RelWithDebInfo".format("hiop ^hiop+ginkgo"), + ) + # depends_on("hpctoolkit", when="with_profiling=hpctoolkit") + # depends_on("tau", when="with_profiling=tau") + # ^ need to depend when both hpctoolkit and tau + # HiOp dependency logic depends_on("hiop+raja", when="+hiop+raja") depends_on("hiop@0.3.99:", when="@0.99:+hiop") @@ -102,6 +136,17 @@ def cmake_args(self): args = [] spec = self.spec + if "~mpi" in self.spec: + args.append(self.define("CMAKE_C_COMPILER", os.environ["CC"])) + args.append(self.define("CMAKE_CXX_COMPILER", os.environ["CXX"])) + else: + args.append(self.define("CMAKE_C_COMPILER", spec["mpi"].mpicc)) + args.append(self.define("CMAKE_CXX_COMPILER", spec["mpi"].mpicxx)) + args.append(self.define("MPI_C_COMPILER", spec["mpi"].mpicc)) + args.append(self.define("MPI_CXX_COMPILER", spec["mpi"].mpicxx)) + if "+cuda" in spec: + args.append(self.define("MPI_CXX_HEADER_DIR", spec["mpi"].prefix.include)) + # NOTE: If building with spack develop on a cluster, you may want to # change the ctest launch command to use your job scheduler like so: # diff --git a/var/spack/repos/builtin/packages/exodusii/package.py b/var/spack/repos/builtin/packages/exodusii/package.py index c9cd90eedcd..72963398d94 100644 --- a/var/spack/repos/builtin/packages/exodusii/package.py +++ b/var/spack/repos/builtin/packages/exodusii/package.py @@ -56,6 +56,7 @@ class Exodusii(CMakePackage): version("master", branch="master") variant("mpi", default=True, description="Enables MPI parallelism.") + variant("fortran", default=False, description="Build Fortran wrapper libraries.") depends_on("cmake@2.8.11:", type="build") depends_on("mpi", when="+mpi") @@ -88,6 +89,16 @@ def cmake_args(self): "-DCMAKE_C_COMPILER={0}".format(cc_path), "-DCMAKE_CXX_COMPILER={0}".format(cxx_path), ] + if "+fortran" in spec: + fc_path = spec["mpi"].mpifc if "+mpi" in spec else self.compiler.f90 + options.extend( + [ + "-DSEACASProj_ENABLE_Fortran:BOOL=ON", + "-DCMAKE_Fortran_COMPILER={0}".format(fc_path), + "-DSEACASProj_ENABLE_SEACASExodus_for:BOOL=ON", + "-DSEACASProj_ENABLE_SEACASExoIIv2for32:BOOL=ON", + ] + ) # Python # # Handle v2016 separately because of older tribits if spec.satisfies("@:2016-08-09"): diff --git a/var/spack/repos/builtin/packages/fides/package.py b/var/spack/repos/builtin/packages/fides/package.py index a62be5696a4..2acfdb5eaec 100644 --- a/var/spack/repos/builtin/packages/fides/package.py +++ b/var/spack/repos/builtin/packages/fides/package.py @@ -18,17 +18,14 @@ class Fides(CMakePackage): version("master", branch="master") version("1.1.0", sha256="40d2e08b8d5cfdfc809eae6ed2ae0731108ce3b1383485f4934a5ec8aaa9425e") version("1.0.0", sha256="c355fdb4ca3790c1fa9a4491a0d294b8f883b6946c540ad9e5633c9fd8c8c3aa") - variant("mpi", default=True, description="build mpi support") - variant("adios2", default=True, description="build ADIOS2 support") - variant("vtk-m", default=True, description="build VTK-m support") # Certain CMake versions have been found to break for our use cases depends_on("cmake@3.14.1:3.14,3.18.2:", type="build") depends_on("mpi", when="+mpi") - depends_on("adios2~zfp", when="+adios2") - depends_on("vtk-m@:1.7", when="+vtk-m") + depends_on("adios2~zfp") + depends_on("vtk-m") # Fix missing implicit includes @when("%gcc@7:") diff --git a/var/spack/repos/builtin/packages/flac/package.py b/var/spack/repos/builtin/packages/flac/package.py index 0ff0527fe1d..7d6f5b6bcac 100644 --- a/var/spack/repos/builtin/packages/flac/package.py +++ b/var/spack/repos/builtin/packages/flac/package.py @@ -12,10 +12,10 @@ class Flac(AutotoolsPackage): homepage = "https://xiph.org/flac/index.html" url = "http://downloads.xiph.org/releases/flac/flac-1.3.2.tar.xz" + version("1.4.2", sha256="e322d58a1f48d23d9dd38f432672865f6f79e73a6f9cc5a5f57fcaa83eb5a8e4") version("1.3.3", sha256="213e82bd716c9de6db2f98bcadbc4c24c7e2efe8c75939a1a84e28539c4e1748") version("1.3.2", sha256="91cfc3ed61dc40f47f050a109b08610667d73477af6ef36dcad31c31a4a8d53f") version("1.3.1", sha256="4773c0099dba767d963fd92143263be338c48702172e8754b9bc5103efe1c56c") version("1.3.0", sha256="fa2d64aac1f77e31dfbb270aeb08f5b32e27036a52ad15e69a77e309528010dc") - depends_on("libvorbis") - depends_on("id3lib") + depends_on("libogg@1.1.2:") diff --git a/var/spack/repos/builtin/packages/flex/package.py b/var/spack/repos/builtin/packages/flex/package.py index 8f3d496bb57..f0754ec86d8 100644 --- a/var/spack/repos/builtin/packages/flex/package.py +++ b/var/spack/repos/builtin/packages/flex/package.py @@ -113,20 +113,19 @@ def configure_args(self): args += self.enable_or_disable("nls") return args - @run_after("install") + @run_after("install", when="+lex") def symlink_lex(self): """Install symlinks for lex compatibility.""" - if self.spec.satisfies("+lex"): - dso = dso_suffix - for dir, flex, lex in ( - (self.prefix.bin, "flex", "lex"), - (self.prefix.lib, "libfl.a", "libl.a"), - (self.prefix.lib, "libfl." + dso, "libl." + dso), - (self.prefix.lib64, "libfl.a", "libl.a"), - (self.prefix.lib64, "libfl." + dso, "libl." + dso), - ): + dso = dso_suffix + for dir, flex, lex in ( + (self.prefix.bin, "flex", "lex"), + (self.prefix.lib, "libfl.a", "libl.a"), + (self.prefix.lib, "libfl." + dso, "libl." + dso), + (self.prefix.lib64, "libfl.a", "libl.a"), + (self.prefix.lib64, "libfl." + dso, "libl." + dso), + ): - if os.path.isdir(dir): - with working_dir(dir): - if os.path.isfile(flex) and not os.path.lexists(lex): - symlink(flex, lex) + if os.path.isdir(dir): + with working_dir(dir): + if os.path.isfile(flex) and not os.path.lexists(lex): + symlink(flex, lex) diff --git a/var/spack/repos/builtin/packages/flux-core/package.py b/var/spack/repos/builtin/packages/flux-core/package.py index abaca92b3f3..096655f303c 100644 --- a/var/spack/repos/builtin/packages/flux-core/package.py +++ b/var/spack/repos/builtin/packages/flux-core/package.py @@ -111,7 +111,7 @@ class FluxCore(AutotoolsPackage): variant("cuda", default=False, description="Build dependencies with support for CUDA") depends_on("libarchive", when="@0.38.0:") - depends_on("ncurses@6.2", when="@0.32.0:") + depends_on("ncurses@6.2:", when="@0.32.0:") depends_on("libzmq@4.0.4:") depends_on("czmq@3.0.1:") depends_on("hwloc@1.11.1:1", when="@:0.17.0") diff --git a/var/spack/repos/builtin/packages/fms/package.py b/var/spack/repos/builtin/packages/fms/package.py index 5f82e39918e..53400dc3e7d 100644 --- a/var/spack/repos/builtin/packages/fms/package.py +++ b/var/spack/repos/builtin/packages/fms/package.py @@ -13,11 +13,17 @@ class Fms(CMakePackage): system models.""" homepage = "https://github.com/NOAA-GFDL/FMS" - url = "https://github.com/NOAA-GFDL/FMS/archive/refs/tags/2022.02.tar.gz" + url = "https://github.com/NOAA-GFDL/FMS/archive/refs/tags/2022.04.tar.gz" git = "https://github.com/NOAA-GFDL/FMS.git" - maintainers = ["kgerheiser", "Hang-Lei-NOAA", "edwardhartnett", "rem1776"] + maintainers = [ + "AlexanderRichert-NOAA", + "Hang-Lei-NOAA", + "edwardhartnett", + "rem1776", + ] + version("2022.04", sha256="f741479128afc2b93ca8291a4c5bcdb024a8cbeda1a26bf77a236c0f629e1b03") version("2022.03", sha256="42d2ac53d3c889a8177a6d7a132583364c0f6e5d5cbde0d980443b6797ad4838") version("2022.02", sha256="ad4978302b219e11b883b2f52519e1ee455137ad947474abb316c8654f72c874") version("2022.01", sha256="a1cba1f536923f5953c28729a28e5431e127b45d6bc2c15d230939f0c02daa9b") diff --git a/var/spack/repos/builtin/packages/fmt/package.py b/var/spack/repos/builtin/packages/fmt/package.py index 7a79ad0da66..0c51c1a5af9 100644 --- a/var/spack/repos/builtin/packages/fmt/package.py +++ b/var/spack/repos/builtin/packages/fmt/package.py @@ -13,6 +13,7 @@ class Fmt(CMakePackage): homepage = "https://fmt.dev/" url = "https://github.com/fmtlib/fmt/releases/download/7.1.3/fmt-7.1.3.zip" + maintainers = ["msimberg"] version("9.1.0", sha256="cceb4cb9366e18a5742128cb3524ce5f50e88b476f1e54737a47ffdf4df4c996") version("9.0.0", sha256="fc96dd2d2fdf2bded630787adba892c23cb9e35c6fd3273c136b0c57d4651ad6") @@ -70,6 +71,13 @@ class Fmt(CMakePackage): # Only allow [[attributes]] on C++11 and higher patch("fmt-attributes-cpp11_4.1.0.patch", when="@4.1.0") + # Fix compilation with hipcc/dpcpp: https://github.com/fmtlib/fmt/issues/3005 + patch( + "https://github.com/fmtlib/fmt/commit/0b0f7cfbfcebd021c910078003d413354bd843e2.patch?full_index=1", + sha256="08fb707bf8b4fc890d6eed29217ead666558cbae38f9249e22ddb82212f0eb4a", + when="@9.0.0:9.1.0", + ) + def cmake_args(self): spec = self.spec args = [] diff --git a/var/spack/repos/builtin/packages/fpzip/package.py b/var/spack/repos/builtin/packages/fpzip/package.py new file mode 100644 index 00000000000..46d30da2a5d --- /dev/null +++ b/var/spack/repos/builtin/packages/fpzip/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Fpzip(CMakePackage): + """fpzip compressor""" + + homepage = "https://github.com/llnl/fpzip" + url = "https://github.com/LLNL/fpzip/releases/download/1.3.0/fpzip-1.3.0.tar.gz" + git = "https://github.com/llnl/fpzip" + + maintainers = ["robertu94"] + + version("master", branch="master") + version("1.3.0", sha256="248df7d84259e3feaa4c4797956b2a77c3fcd734e8f8fdc51ce171dcf4f0136c") diff --git a/var/spack/repos/builtin/packages/g2/package.py b/var/spack/repos/builtin/packages/g2/package.py index 821d3110412..7870a6b2533 100644 --- a/var/spack/repos/builtin/packages/g2/package.py +++ b/var/spack/repos/builtin/packages/g2/package.py @@ -16,7 +16,12 @@ class G2(CMakePackage): homepage = "https://noaa-emc.github.io/NCEPLIBS-g2" url = "https://github.com/NOAA-EMC/NCEPLIBS-g2/archive/refs/tags/v3.4.3.tar.gz" - maintainers = ["t-brown", "kgerheiser", "Hang-Lei-NOAA", "edwardhartnett"] + maintainers = [ + "t-brown", + "AlexanderRichert-NOAA", + "Hang-Lei-NOAA", + "edwardhartnett", + ] version("3.4.5", sha256="c18e991c56964953d778632e2d74da13c4e78da35e8d04cb742a2ca4f52737b6") version("3.4.3", sha256="679ea99b225f08b168cbf10f4b29f529b5b011232f298a5442ce037ea84de17c") diff --git a/var/spack/repos/builtin/packages/g2c/package.py b/var/spack/repos/builtin/packages/g2c/package.py index ddcb1414ee0..31f00a4059c 100644 --- a/var/spack/repos/builtin/packages/g2c/package.py +++ b/var/spack/repos/builtin/packages/g2c/package.py @@ -14,7 +14,11 @@ class G2c(CMakePackage): homepage = "https://github.com/NOAA-EMC/NCEPLIBS-g2c" url = "https://github.com/NOAA-EMC/NCEPLIBS-g2c/archive/refs/tags/v1.6.4.tar.gz" - maintainers = ["kgerheiser", "Hang-Lei-NOAA", "edwardhartnett"] + maintainers = [ + "AlexanderRichert-NOAA", + "Hang-Lei-NOAA", + "edwardhartnett", + ] variant("png", default=True) variant("jasper", default=True) diff --git a/var/spack/repos/builtin/packages/g2tmpl/package.py b/var/spack/repos/builtin/packages/g2tmpl/package.py index 36163acd437..d4a7d5d51c6 100644 --- a/var/spack/repos/builtin/packages/g2tmpl/package.py +++ b/var/spack/repos/builtin/packages/g2tmpl/package.py @@ -14,7 +14,12 @@ class G2tmpl(CMakePackage): homepage = "https://github.com/NOAA-EMC/NCEPLIBS-g2tmpl" url = "https://github.com/NOAA-EMC/NCEPLIBS-g2tmpl/archive/refs/tags/v1.10.0.tar.gz" - maintainers = ["t-brown", "edwardhartnett", "kgerheiser", "Hang-Lei-NOAA"] + maintainers = [ + "t-brown", + "edwardhartnett", + "AlexanderRichert-NOAA", + "Hang-Lei-NOAA", + ] version("1.10.2", sha256="4063361369f3691f75288c801fa9d1a2414908b7d6c07bbf69d4165802e2a7fc") version("1.10.1", sha256="0be425e5128fabb89915a92261aa75c27a46a3e115e00c686fc311321e5d1e2a") diff --git a/var/spack/repos/builtin/packages/gasnet/package.py b/var/spack/repos/builtin/packages/gasnet/package.py index efcb23828d1..baec00cd59e 100644 --- a/var/spack/repos/builtin/packages/gasnet/package.py +++ b/var/spack/repos/builtin/packages/gasnet/package.py @@ -31,12 +31,13 @@ class Gasnet(Package, CudaPackage, ROCmPackage): maintainers = ["PHHargrove", "bonachea"] - tags = ["e4s"] + tags = ["e4s", "ecp"] version("develop", branch="develop") version("main", branch="stable") version("master", branch="master") + version("2022.9.0", sha256="6873ff4ad8ebee49da4378f2d78095a6ccc31333d6ae4cd739b9f772af11f936") version("2022.3.0", sha256="91b59aa84c0680c807e00d3d1d8fa7c33c1aed50b86d1616f93e499620a9ba09") version("2021.9.0", sha256="1b6ff6cdad5ecf76b92032ef9507e8a0876c9fc3ee0ab008de847c1fad0359ee") version("2021.3.0", sha256="8a40fb3fa8bacc3922cd4d45217816fcb60100357ab97fb622a245567ea31747") @@ -53,9 +54,9 @@ class Gasnet(Package, CudaPackage, ROCmPackage): description="The hardware-dependent network backends to enable.\n" + "(smp) = SMP conduit for single-node operation ;\n" + "(ibv) = Native InfiniBand verbs conduit ;\n" + + "(ofi) = OFI conduit over libfabric, for HPE Cray Slingshot and Intel Omni-Path ;\n" + "(udp) = Portable UDP conduit, for Ethernet networks ;\n" + "(mpi) = Low-performance/portable MPI conduit ;\n" - + "(ofi) = EXPERIMENTAL Portable OFI conduit over libfabric ;\n" + "(ucx) = EXPERIMENTAL UCX conduit for Mellanox IB/RoCE ConnectX-5+ ;\n" + "For detailed recommendations, consult https://gasnet.lbl.gov", ) @@ -164,6 +165,7 @@ def test(self): } os.environ["GASNET_VERBOSEENV"] = "1" # include diagnostic info + os.environ["GASNET_SPAWN_VERBOSE"] = "1" # include spawning diagnostics if "GASNET_SSH_SERVERS" not in os.environ: os.environ["GASNET_SSH_SERVERS"] = "localhost " * 4 diff --git a/var/spack/repos/builtin/packages/gcc/package.py b/var/spack/repos/builtin/packages/gcc/package.py index 67b065a107c..f36980d8e70 100644 --- a/var/spack/repos/builtin/packages/gcc/package.py +++ b/var/spack/repos/builtin/packages/gcc/package.py @@ -206,9 +206,24 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage): provides("golang@:1.4", when="@5:") provides("golang@:1.6.1", when="@6:") provides("golang@:1.8", when="@7:") + provides("golang@:1.10", when="@8:") + provides("golang@:1.12", when="@9:") + provides("golang@:1.14", when="@10:") + provides("golang@:1.16", when="@11:") + provides("golang@:1.18", when="@11:") # GCC 4.6 added support for the Go programming language. # See https://gcc.gnu.org/gcc-4.6/changes.html conflicts("@:4.5", msg="support for Go has been added in GCC 4.6") + # aarch64 machines (including Macs with Apple silicon) can't use + # go-bootstrap because it pre-dates aarch64 support in Go. When not + # using an external go bootstrap go, These machines have to rely on + # Go support in gcc (which may require compiling a version of gcc + # with Go support just to satisfy this requirement). However, + # there's also a bug in some versions of GCC's Go front-end that prevents + # these versions from properly bootstrapping Go. (See issue #47771 + # https://github.com/golang/go/issues/47771 ) On the 10.x branch, we need + # at least 10.4. On the 11.x branch, we need at least 11.3: + provides("go-external-or-gccgo-bootstrap", when="gcc@10.4.0:10,11.3.0:target=aarch64:") # Go is not supported on macOS conflicts("platform=darwin", msg="Go not supported on MacOS") @@ -442,7 +457,7 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage): @classproperty def executables(cls): - names = [r"gcc", r"[^\w]?g\+\+", r"gfortran", r"gdc"] + names = [r"gcc", r"[^\w]?g\+\+", r"gfortran", r"gdc", r"gccgo"] suffixes = [r"", r"-mp-\d+\.\d", r"-\d+\.\d", r"-\d+", r"\d\d"] return [r"".join(x) for x in itertools.product(names, suffixes)] @@ -520,6 +535,9 @@ def determine_variants(cls, exes, version_str): elif "gcc" in basename: languages.add("c") compilers["c"] = exe + elif "gccgo" in basename: + languages.add("go") + compilers["go"] = exe elif "gdc" in basename: languages.add("d") compilers["d"] = exe diff --git a/var/spack/repos/builtin/packages/gdal/package.py b/var/spack/repos/builtin/packages/gdal/package.py index c9b6f9bafd5..e0f1072964f 100644 --- a/var/spack/repos/builtin/packages/gdal/package.py +++ b/var/spack/repos/builtin/packages/gdal/package.py @@ -6,11 +6,13 @@ import os import sys +from spack.build_systems.autotools import AutotoolsBuilder +from spack.build_systems.cmake import CMakeBuilder from spack.package import * from spack.util.environment import filter_system_paths -class Gdal(CMakePackage): +class Gdal(CMakePackage, AutotoolsPackage, PythonExtension): """GDAL: Geospatial Data Abstraction Library. GDAL is a translator library for raster and vector geospatial data formats that @@ -28,6 +30,7 @@ class Gdal(CMakePackage): maintainers = ["adamjstewart"] + version("3.5.3", sha256="d32223ddf145aafbbaec5ccfa5dbc164147fb3348a3413057f9b1600bb5b3890") version("3.5.2", sha256="0874dfdeb9ac42e53c37be4184b19350be76f0530e1f4fa8004361635b9030c2") version("3.5.1", sha256="d12c30a9eacdeaab493c0d1c9f88eb337c9cbb5bb40744c751bdd5a5af166ab6") version("3.5.0", sha256="d49121e5348a51659807be4fb866aa840f8dbec4d1acba6d17fdefa72125bfc9") @@ -83,7 +86,9 @@ class Gdal(CMakePackage): default=False, description="Speed up computations related to the Thin Plate Spline transformer", ) - variant("arrow", default=False, when="@3.5:", description="Required for Arrow driver") + variant( + "arrow", default=False, when="build_system=cmake", description="Required for Arrow driver" + ) variant("blosc", default=False, when="@3.4:", description="Required for Zarr driver") variant("brunsli", default=True, when="@3.4:", description="Required for MRF driver") variant("bsb", default=False, when="@:2", description="Required for BSB driver") @@ -136,23 +141,41 @@ class Gdal(CMakePackage): "mrsid_lidar", default=False, when="@:3.4", description="Required for MrSID/MG4 driver" ) variant( - "mssql_ncli", default=False, when="@3.5:", description="Required for MSSQLSpatial driver" + "mssql_ncli", + default=False, + when="build_system=cmake", + description="Required for MSSQLSpatial driver", ) variant( - "mssql_odbc", default=False, when="@3.5:", description="Required for MSSQLSpatial driver" + "mssql_odbc", + default=False, + when="build_system=cmake", + description="Required for MSSQLSpatial driver", ) variant("mysql", default=False, description="Required for MySQL driver") variant("netcdf", default=False, description="Required for NetCDF driver") variant("odbc", default=False, description="Required for many OGR drivers") - variant("odbccpp", default=False, when="@3.5:", description="Required for SAP HANA driver") + variant( + "odbccpp", + default=False, + when="build_system=cmake", + description="Required for SAP HANA driver", + ) variant("ogdi", default=False, description="Required for OGDI driver") - variant("opencad", default=False, when="@3.5:", description="Required for CAD driver") + variant( + "opencad", default=False, when="build_system=cmake", description="Required for CAD driver" + ) variant("opencl", default=False, description="Required to accelerate warping computations") variant("openexr", default=False, when="@3.1:", description="Required for EXR driver") variant("openjpeg", default=False, description="Required for JP2OpenJPEG driver") variant("openssl", default=False, when="@2.3:", description="Required for EEDAI driver") variant("oracle", default=False, description="Required for OCI and GeoRaster drivers") - variant("parquet", default=False, when="@3.5:", description="Required for Parquet driver") + variant( + "parquet", + default=False, + when="build_system=cmake", + description="Required for Parquet driver", + ) variant("pcidsk", default=False, description="Required for PCIDSK driver") variant( "pcre", default=False, description="Required for REGEXP operator in drivers using SQLite3" @@ -194,14 +217,25 @@ class Gdal(CMakePackage): # Language bindings variant("python", default=False, description="Build Python bindings") variant("java", default=False, description="Build Java bindings") - variant("csharp", default=False, when="@3.5:", description="Build C# bindings") + variant("csharp", default=False, when="build_system=cmake", description="Build C# bindings") variant("perl", default=False, when="@:3.4", description="Build Perl bindings") variant("php", default=False, when="@:2.3", description="Build PHP bindings") + # Build system + build_system( + conditional("cmake", when="@3.5:"), + conditional("autotools", when="@:3.5"), + default="cmake", + ) + + with when("build_system=cmake"): + depends_on("cmake@3.9:", type="build") + depends_on("ninja", type="build") + + with when("build_system=autotools"): + depends_on("gmake", type="build") + # Required dependencies - depends_on("cmake@3.9:", when="@3.5:", type="build") - depends_on("ninja", when="@3.5:", type="build") - depends_on("gmake", when="@:3.4", type="build") depends_on("pkgconfig@0.25:", type="build") depends_on("proj@6:", when="@3:") depends_on("proj@:6", when="@2.5:2") @@ -392,30 +426,12 @@ class Gdal(CMakePackage): sha256="9f9824296e75b34b3e78284ec772a5ac8f8ba92c17253ea9ca242caf766767ce", ) - generator = "Ninja" executables = ["^gdal-config$"] @classmethod def determine_version(cls, exe): return Executable(exe)("--version", output=str, error=str).rstrip() - @property - def import_modules(self): - modules = ["osgeo"] - if self.spec.satisfies("@3.3:"): - modules.append("osgeo_utils") - else: - modules.append("osgeo.utils") - return modules - - @when("@:3.4") - def setup_build_environment(self, env): - # Needed to install Python bindings to GDAL installation - # prefix instead of Python installation prefix. - # See swig/python/GNUmakefile for more details. - env.set("PREFIX", self.prefix) - env.set("DESTDIR", "/") - def setup_run_environment(self, env): if "+java" in self.spec: class_paths = find(self.prefix, "*.jar") @@ -437,6 +453,10 @@ def patch(self): if "+java platform=darwin" in self.spec: filter_file("linux", "darwin", "swig/java/java.opt", string=True) + +class CMakeBuilder(CMakeBuilder): + generator = "Ninja" + def cmake_args(self): # https://gdal.org/build_hints.html args = [ @@ -528,11 +548,20 @@ def cmake_args(self): return args + +class AutotoolsBuilder(AutotoolsBuilder): + def setup_build_environment(self, env): + # Needed to install Python bindings to GDAL installation + # prefix instead of Python installation prefix. + # See swig/python/GNUmakefile for more details. + env.set("PREFIX", self.prefix) + env.set("DESTDIR", "/") + def with_or_without(self, name, variant=None, package=None, attribute=None): if not variant: variant = name - if variant not in self.variants: + if variant not in self.pkg.variants: msg = '"{}" is not a variant of "{}"' raise KeyError(msg.format(variant, self.name)) @@ -684,49 +713,26 @@ def configure_args(self): return args - @when("@:3.4") - def cmake(self, spec, prefix): - configure(*self.configure_args()) - - @when("@:3.4") - def build(self, spec, prefix): + def build(self, pkg, spec, prefix): # https://trac.osgeo.org/gdal/wiki/GdalOgrInJavaBuildInstructionsUnix make() if "+java" in spec: with working_dir("swig/java"): make() - @when("@:3.4") def check(self): # no top-level test target if "+java" in self.spec: with working_dir("swig/java"): make("test") - @when("@:3.4") - def install(self, spec, prefix): + def install(self, pkg, spec, prefix): make("install") if "+java" in spec: with working_dir("swig/java"): make("install") install("*.jar", prefix) - @run_after("install") - def darwin_fix(self): # The shared library is not installed correctly on Darwin; fix this - if self.spec.satisfies("@:3.4 platform=darwin"): + if self.spec.satisfies("platform=darwin"): fix_darwin_install_name(self.prefix.lib) - - def test(self): - """Attempts to import modules of the installed package.""" - - if "+python" in self.spec: - # Make sure we are importing the installed modules, - # not the ones in the source directory - for module in self.import_modules: - self.run_test( - self.spec["python"].command.path, - ["-c", "import {0}".format(module)], - purpose="checking import of {0}".format(module), - work_dir="spack-test", - ) diff --git a/var/spack/repos/builtin/packages/gegelati/package.py b/var/spack/repos/builtin/packages/gegelati/package.py new file mode 100644 index 00000000000..1047bfc87aa --- /dev/null +++ b/var/spack/repos/builtin/packages/gegelati/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Gegelati(CMakePackage): + """Class to add the a TPG library(gegelati) into Spack""" + + homepage = "https://github.com/gegelati/gegelati" + url = "https://github.com/gegelati/gegelati/archive/refs/tags/v1.2.0.tar.gz" + + # notify when the package is updated. + maintainers = ["lucascarvalhoroncoroni"] + + version("1.2.0", sha256="039997c7d6cb394f910f6c40620165b32094e0c85c170be01eb74b55488a1d4c") + + depends_on("sdl2") + depends_on("doxygen") + + def cmake_args(self): + args = [] + return args diff --git a/var/spack/repos/builtin/packages/gettext/package.py b/var/spack/repos/builtin/packages/gettext/package.py index 27c93cc50dd..b528e8a8b07 100644 --- a/var/spack/repos/builtin/packages/gettext/package.py +++ b/var/spack/repos/builtin/packages/gettext/package.py @@ -14,8 +14,11 @@ class Gettext(AutotoolsPackage, GNUMirrorPackage): homepage = "https://www.gnu.org/software/gettext/" gnu_mirror_path = "gettext/gettext-0.20.1.tar.xz" + maintainers = ["michaelkuhn"] + executables = [r"^gettext$"] + version("0.21.1", sha256="50dbc8f39797950aa2c98e939947c527e5ac9ebd2c1b99dd7b06ba33a6767ae6") version("0.21", sha256="d20fcbb537e02dcf1383197ba05bd0734ef7bf5db06bdb241eb69b7d16b73192") version("0.20.2", sha256="b22b818e644c37f6e3d1643a1943c32c3a9bff726d601e53047d2682019ceaba") version("0.20.1", sha256="53f02fbbec9e798b0faaf7c73272f83608e835c6288dd58be6c9bb54624a3800") @@ -51,7 +54,7 @@ class Gettext(AutotoolsPackage, GNUMirrorPackage): # depends_on('cvs') patch("test-verify-parallel-make-check.patch", when="@:0.19.8.1") - patch("nvhpc-builtin.patch", when="%nvhpc") + patch("nvhpc-builtin.patch", when="@:0.21.0 %nvhpc") patch("nvhpc-export-symbols.patch", when="%nvhpc") patch("nvhpc-long-width.patch", when="%nvhpc") diff --git a/var/spack/repos/builtin/packages/gfsio/package.py b/var/spack/repos/builtin/packages/gfsio/package.py index 1c33e3eda25..bfff24a4eb8 100644 --- a/var/spack/repos/builtin/packages/gfsio/package.py +++ b/var/spack/repos/builtin/packages/gfsio/package.py @@ -15,7 +15,12 @@ class Gfsio(CMakePackage): homepage = "https://github.com/NOAA-EMC/NCEPLIBS-gfsio" url = "https://github.com/NOAA-EMC/NCEPLIBS-gfsio/archive/refs/tags/v1.4.1.tar.gz" - maintainers = ["t-brown", "kgerheiser", "Hang-Lei-NOAA", "edwardhartnett"] + maintainers = [ + "t-brown", + "AlexanderRichert-NOAA", + "Hang-Lei-NOAA", + "edwardhartnett", + ] version("1.4.1", sha256="eab106302f520600decc4f9665d7c6a55e7b4901fab6d9ef40f29702b89b69b1") diff --git a/var/spack/repos/builtin/packages/ginkgo/package.py b/var/spack/repos/builtin/packages/ginkgo/package.py index 8899964a544..e2ebbe13773 100644 --- a/var/spack/repos/builtin/packages/ginkgo/package.py +++ b/var/spack/repos/builtin/packages/ginkgo/package.py @@ -22,9 +22,9 @@ class Ginkgo(CMakePackage, CudaPackage, ROCmPackage): version("develop", branch="develop") version("master", branch="master") - version("glu", branch="glu") - version("glu_experimental", branch="glu_experimental") - version("1.4.0", commit="f811917c1def4d0fcd8db3fe5c948ce13409e28e") # v1.4.0 + version("1.5.0.glu", branch="glu") + version("1.5.0.glu_experimental", branch="glu_experimental") + version("1.4.0", commit="f811917c1def4d0fcd8db3fe5c948ce13409e28e", preferred=True) # v1.4.0 version("1.3.0", commit="4678668c66f634169def81620a85c9a20b7cec78") # v1.3.0 version("1.2.0", commit="b4be2be961fd5db45c3d02b5e004d73550722e31") # v1.2.0 version("1.1.1", commit="08d2c5200d3c78015ac8a4fd488bafe1e4240cf5") # v1.1.1 diff --git a/var/spack/repos/builtin/packages/git/package.py b/var/spack/repos/builtin/packages/git/package.py index 1608e5eb9f1..26637127523 100644 --- a/var/spack/repos/builtin/packages/git/package.py +++ b/var/spack/repos/builtin/packages/git/package.py @@ -26,9 +26,32 @@ class Git(AutotoolsPackage): # Every new git release comes with a corresponding manpage resource: # https://www.kernel.org/pub/software/scm/git/git-manpages-{version}.tar.gz # https://mirrors.edge.kernel.org/pub/software/scm/git/sha256sums.asc - version("2.37.0", sha256="fc3ffe6c65c1f7c681a1ce6bb91703866e432c762731d4b57c566d696f6d62c3") - version("2.36.1", sha256="37d936fd17c81aa9ddd3dba4e56e88a45fa534ad0ba946454e8ce818760c6a2c") - version("2.35.2", sha256="0decc02a47e792f522df3183c38a61ad8fbb38927502ca6781467a6599a888cb") + version("2.38.1", sha256="620ed3df572a34e782a2be4c7d958d443469b2665eac4ae33f27da554d88b270") + version("2.37.4", sha256="a638c9bf9e45e8d48592076266adaa9b7aa272a99ee2aee2e166a649a9ba8a03") + version("2.36.3", sha256="0c831b88b0534f08051d1287505dfe45c367108ee043de6f1c0502711a7aa3a6") + version("2.35.5", sha256="2cca63fe7bebb5b4bf8efea7b46b12bb89c16ff9711b6b6d845928501d00d0a3") + version("2.34.5", sha256="26831c5e48a8c2bf6a4fede1b38e1e51ffd6dad85952cf69ac520ebd81a5ae82") + version("2.33.5", sha256="d061ed97f890befaef18b4aad80a37b40db90bcf24113c42765fee157a69c7de") + version("2.32.4", sha256="4c791b8e1d96948c9772efc21373ab9b3187af42cdebc3bcbb1a06d794d4e494") + version("2.31.5", sha256="2d4197660322937cc44cab5742deef727ba519ef7405455e33100912e3b019f2") + version("2.30.6", sha256="a6130b38843a5c80e80fb4f7ac4864d361cbf103d262b64e267264e49440d24a") + + # Deprecated versions + version( + "2.37.0", + sha256="fc3ffe6c65c1f7c681a1ce6bb91703866e432c762731d4b57c566d696f6d62c3", + deprecated=True, + ) + version( + "2.36.1", + sha256="37d936fd17c81aa9ddd3dba4e56e88a45fa534ad0ba946454e8ce818760c6a2c", + deprecated=True, + ) + version( + "2.35.2", + sha256="0decc02a47e792f522df3183c38a61ad8fbb38927502ca6781467a6599a888cb", + deprecated=True, + ) version( "2.35.1", sha256="9845a37dd01f9faaa7d8aa2078399d3aea91b43819a5efea6e2877b0af09bd43", @@ -256,18 +279,27 @@ class Git(AutotoolsPackage): ) for (_version, _sha256_manpage) in { + "2.38.1": "fcb27484406b64419a9f9890e95ef29af08e1f911d9d368546eddc59a18e245d", + "2.37.4": "06ed920949e717f3ab13c98327ee63cae5e3020ac657d14513ef8f843109b638", "2.37.0": "69386ab0dcdbc8398ebb97487733166033f1c7711b02b8861b1ae8f4f46e6e4e", + "2.36.6": "c5f5385c2b46270a8ce062a9c510bfa4288d9cca54efe0dff48a12ca969cfc6f", "2.36.1": "3fcd315976f06b54b0abb9c14d38c3d484f431ea4de70a706cc5dddc1799f4f7", + "2.35.5": "6cbd4d2185c7a757db21f873973fa1efb81069d8b8b8cc350ca6735cb98f45c5", "2.35.2": "86e153bdd96edd8462cb7a5c57be1b2b670b033c18272b0aa2e6a102acce50be", "2.35.1": "d90da8b28fe0088519e0dc3c9f4bc85e429c7d6ccbaadcfe94aed47fb9c95504", "2.35.0": "c0408a1c944c8e481d7f507bd90a7ee43c34617a1a7af2d76a1898dcf44fa430", + "2.34.5": "897941be5b223b9d32217adb64ea8747db2ba57be5f68be598c44d747d1061b2", "2.34.1": "220f1ed68582caeddf79c4db15e4eaa4808ec01fd11889e19232f0a74d7f31b0", "2.34.0": "fe66a69244def488306c3e05c1362ea53d8626d2a7e57cd7311df2dab1ef8356", + "2.33.5": "34648ede9ac2869190083ee826065c36165e54d9e2906b10680261b243d89890", "2.33.1": "292b08ca1b79422ff478a6221980099c5e3c0a38aba39d952063eedb68e27d93", "2.33.0": "ba9cd0f29a3632a3b78f8ed2389f0780aa6e8fcbe258259d7c584920d19ed1f7", + "2.32.4": "fa73d0eac384e594efdd4c21343545e407267ab64e970a6b395c7f1874ddb0bf", "2.32.0": "b5533c40ea1688231c0e2df51cc0d1c0272e17fe78a45ba6e60cb8f61fa4a53c", + "2.31.5": "18850fc8f1c34e51a0a98b9f974b8356a5d63a53c96fb9fe3dc2880ee84746ab", "2.31.1": "d330498aaaea6928b0abbbbb896f6f605efd8d35f23cbbb2de38c87a737d4543", "2.31.0": "a51b760c36be19113756839a9110b328a09abfff0d57f1c93ddac3974ccbc238", + "2.30.6": "6c20ab10be233e8ff7838351fa5210e972c08005ec541a5241f626cfd4adebfe", "2.30.1": "db323e1b242e9d0337363b1e538c8b879e4c46eedbf94d3bee9e65dab6d49138", "2.30.0": "e23035ae232c9a5eda57db258bc3b7f1c1060cfd66920f92c7d388b6439773a6", "2.29.2": "68b258e6d590cb78e02c0df741bbaeab94cbbac6d25de9da4fb3882ee098307b", diff --git a/var/spack/repos/builtin/packages/glib/package.py b/var/spack/repos/builtin/packages/glib/package.py index 83dfb307b20..a0f29cb87be 100644 --- a/var/spack/repos/builtin/packages/glib/package.py +++ b/var/spack/repos/builtin/packages/glib/package.py @@ -23,6 +23,9 @@ class Glib(Package): maintainers = ["michaelkuhn"] + version("2.74.1", sha256="0ab981618d1db47845e56417b0d7c123f81a3427b2b9c93f5a46ff5bbb964964") + version("2.74.0", sha256="3652c7f072d7b031a6b5edd623f77ebc5dcd2ae698598abcc89ff39ca75add30") + version("2.72.4", sha256="8848aba518ba2f4217d144307a1d6cb9afcc92b54e5c13ac1f8c4d4608e96f0e") version("2.72.3", sha256="4a39a2f624b8512d500d5840173eda7fa85f51c109052eae806acece85d345f0") version("2.72.2", sha256="78d599a133dba7fe2036dfa8db8fb6131ab9642783fc9578b07a20995252d2de") version("2.72.1", sha256="c07e57147b254cef92ce80a0378dc0c02a4358e7de4702e9f403069781095fe2") @@ -119,7 +122,8 @@ class Glib(Package): depends_on("gettext") depends_on("perl", type=("build", "run")) depends_on("python", type=("build", "run"), when="@2.53.4:") - depends_on("pcre+utf", when="@2.48:") + depends_on("pcre2", when="@2.73.2:") + depends_on("pcre+utf", when="@2.48:2.73.1") depends_on("uuid", when="+libmount") depends_on("util-linux", when="+libmount") depends_on("iconv") @@ -137,7 +141,7 @@ class Glib(Package): # glib prefers the libc version of gettext, which breaks the build if the # external version is also found. patch("meson-gettext.patch", when="@2.58:2.64") - patch("meson-gettext-2.66.patch", when="@2.66:2.68,2.72:") + patch("meson-gettext-2.66.patch", when="@2.66:2.68,2.72") patch("meson-gettext-2.70.patch", when="@2.70") def url_for_version(self, version): @@ -162,7 +166,9 @@ def libs(self): return find_libraries(["libglib*"], root=self.prefix, recursive=True) def meson_args(self): - args = ["-Dgettext=external"] + args = [] + if self.spec.satisfies("@:2.72"): + args.append("-Dgettext=external") if self.spec.satisfies("@2.63.5:"): if "+libmount" in self.spec: args.append("-Dlibmount=enabled") diff --git a/var/spack/repos/builtin/packages/glvis/package.py b/var/spack/repos/builtin/packages/glvis/package.py index 7c1292a4a82..3425991a2f1 100644 --- a/var/spack/repos/builtin/packages/glvis/package.py +++ b/var/spack/repos/builtin/packages/glvis/package.py @@ -3,6 +3,9 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import sys + +import spack.build_systems.makefile from spack.package import * @@ -13,7 +16,7 @@ class Glvis(MakefilePackage): git = "https://github.com/glvis/glvis.git" tags = ["radiuss"] - maintainers = ["goxberry", "v-dobrev", "tzanio", "tomstitt"] + maintainers = ["v-dobrev", "tzanio", "tomstitt", "goxberry"] # glvis (like mfem) is downloaded from a URL shortener at request # of upstream author Tzanio Kolev . See here: @@ -39,6 +42,20 @@ class Glvis(MakefilePackage): version("develop", branch="master") + version( + "4.2", + sha256="314fb04040cd0a8128d6dac62ba67d7067c2c097364e5747182ee8371049b42a", + url="https://bit.ly/glvis-4-2", + extension=".tar.gz", + ) + + version( + "4.1", + sha256="7542c2942167533eec10d59b8331d18241798bbd86a7efbe51dc479db4127407", + url="https://bit.ly/glvis-4-1", + extension=".tar.gz", + ) + version( "4.0", sha256="68331eaea8b93968ed6bf395388c2730b27bbcb4b7809ce44277726edccd9f08", @@ -83,84 +100,77 @@ class Glvis(MakefilePackage): variant("fonts", default=True, description="Use antialiased fonts via freetype & fontconfig") depends_on("mfem@develop", when="@develop") + depends_on("mfem@4.4.0:", when="@4.2") + depends_on("mfem@4.3.0:", when="@4.1") depends_on("mfem@4.0.0:", when="@4.0") depends_on("mfem@3.4.0", when="@3.4") depends_on("mfem@3.3", when="@3.3") depends_on("mfem@3.2", when="@3.2") depends_on("mfem@3.1", when="@3.1") - depends_on("gl") - depends_on("glu") - depends_on("libx11", when="@:3.5") + with when("@:3"): + depends_on("gl") + depends_on("glu") + depends_on("libx11") - depends_on("sdl2", when="@4.0:,develop") - depends_on("glm", when="@4.0:,develop") - depends_on("glew", when="@4.0:,develop") + with when("@4.0:"): + # On Mac, we use the OpenGL framework + if sys.platform.startswith("linux"): + depends_on("gl") + depends_on("sdl2") + depends_on("glm") + # On Mac, use external glew, e.g. from Homebrew + depends_on("glew") + # On Mac, use external freetype and fontconfig, e.g. from /opt/X11 + depends_on("freetype") + depends_on("fontconfig") + depends_on("xxd", type="build") + + with when("+fonts"): + depends_on("freetype") + depends_on("fontconfig") depends_on("libpng", when="screenshots=png") depends_on("libtiff", when="screenshots=tiff") - depends_on("freetype", when="+fonts") - depends_on("freetype", when="@4.0:,develop") - depends_on("fontconfig", when="+fonts") - depends_on("fontconfig", when="@4.0:,develop") - depends_on("uuid", when="platform=linux") - def edit(self, spec, prefix): - def yes_no(s): - return "YES" if self.spec.satisfies(s) else "NO" +class MakefileBuilder(spack.build_systems.makefile.MakefileBuilder): + @property + def build_targets(self): + return self.common_args() - mfem = spec["mfem"] - config_mk = mfem.package.config_mk + @property + def install_targets(self): + return ["install"] + self.common_args() - args = [ + def common_args(self): + spec = self.spec + result = [ "CC={0}".format(env["CC"]), - "PREFIX={0}".format(prefix.bin), - "MFEM_DIR={0}".format(mfem.prefix), - "CONFIG_MK={0}".format(config_mk), + "PREFIX={0}".format(self.spec.prefix.bin), + "MFEM_DIR={0}".format(self.spec["mfem"].prefix), + "CONFIG_MK={0}".format(self.spec["mfem"].package.config_mk), ] - png_args = ( - [ - "PNG_OPTS=-DGLVIS_USE_LIBPNG -I{0}".format(spec["libpng"].prefix.include), - "PNG_LIBS={0}".format(spec["libpng"].libs.ld_flags), + if self.spec.satisfies("@4.0:"): + # Spack will inject the necessary include dirs and link paths via + # its compiler wrapper, so we can skip them: + result += [ + "GLM_DIR=", + "SDL_DIR=", + "GLEW_DIR=", + "FREETYPE_DIR=", + "OPENGL_DIR=", ] - if "screenshots=png" in spec - else [] - ) - - tiff_args = ( - [ - "TIFF_OPTS=-DGLVIS_USE_LIBTIFF -I{0}".format(spec["libtiff"].prefix.include), - "TIFF_LIBS={0}".format(spec["libtiff"].libs.ld_flags), - ] - if "screenshots=tiff" in spec - else [] - ) - - if "@4.0:" in spec or "@develop" in spec: - # TODO: glu and fontconfig dirs - args += [ - "GLM_DIR={0}".format(spec["glm"].prefix), - "SDL_DIR={0}".format(spec["sdl2"].prefix), - "GLEW_DIR={0}".format(spec["glew"].prefix), - "FREETYPE_DIR={0}".format(spec["freetype"].prefix), - "OPENGL_DIR={0}".format(spec["gl"].home), - ] - - if "screenshots=png" in spec: - args += ["GLVIS_USE_LIBPNG=YES", "GLVIS_USE_LIBTIFF=NO"] - args.extend(png_args) - elif "screenshots=tiff" in spec: - args += ["GLVIS_USE_LIBPNG=NO", "GLVIS_USE_LIBTIFF=YES"] - args.extend(tiff_args) - else: - args += ["GLVIS_USE_LIBPNG=NO", "GLVIS_USE_LIBTIFF=NO"] + # Spack will not inject include dirs like /usr/include/freetype2, + # so we need to do it ourselves: + if spec["freetype"].external: + result += ["GL_OPTS={0}".format(spec["freetype"].headers.cpp_flags)] else: gl_libs = spec["glu"].libs + spec["gl"].libs + spec["libx11"].libs - args += [ + result += [ "GL_OPTS=-I{0} -I{1} -I{2}".format( spec["libx11"].prefix.include, spec["gl"].home.include, @@ -168,26 +178,54 @@ def yes_no(s): ), "GL_LIBS={0}".format(gl_libs.ld_flags), ] + result.extend(self.fonts_args()) - if "screenshots=png" in spec: - args += ["USE_LIBPNG=YES", "USE_LIBTIFF=NO"] - args.extend(png_args) - elif "screenshots=tiff" in spec: - args += ["USE_LIBPNG=NO", "USE_LIBTIFF=YES"] - args.extend(tiff_args) - else: - args += ["USE_LIBPNG=NO", "USE_LIBTIFF=NO"] + if self.spec.satisfies("screenshots=png"): + result.extend(self.png_args()) + elif self.spec.satisfies("screenshots=tiff"): + result.extend(self.tiff_args()) + else: + result.extend(self.xwd_args()) - args.append("USE_FREETYPE={0}".format(yes_no("+fonts"))) - if "+fonts" in spec: - args += [ - "FT_OPTS=-DGLVIS_USE_FREETYPE {0} -I{1}".format( - spec["freetype"].headers.include_flags, spec["fontconfig"].prefix.include - ), - "FT_LIBS={0} {1}".format( - spec["freetype"].libs.ld_flags, spec["fontconfig"].libs.ld_flags - ), - ] + return result - self.build_targets = args - self.install_targets += args + def fonts_args(self): + if not self.spec.satisfies("+fonts"): + return ["USE_FREETYPE=NO"] + + freetype = self.spec["freetype"] + fontconfig = self.spec["fontconfig"] + return [ + "USE_FREETYPE=YES", + "FT_OPTS=-DGLVIS_USE_FREETYPE {0} -I{1}".format( + freetype.headers.include_flags, fontconfig.prefix.include + ), + "FT_LIBS={0} {1}".format(freetype.libs.ld_flags, fontconfig.libs.ld_flags), + ] + + def xwd_args(self): + if self.spec.satisfies("@4.0:"): + return ["GLVIS_USE_LIBPNG=NO", "GLVIS_USE_LIBTIFF=NO"] + return ["USE_LIBPNG=NO", "USE_LIBTIFF=NO"] + + def png_args(self): + prefix_args = ["USE_LIBPNG=YES", "USE_LIBTIFF=NO"] + if self.spec.satisfies("@4.0:"): + prefix_args = ["GLVIS_USE_LIBPNG=YES", "GLVIS_USE_LIBTIFF=NO"] + + libpng = self.spec["libpng"] + return prefix_args + [ + "PNG_OPTS=-DGLVIS_USE_LIBPNG -I{0}".format(libpng.prefix.include), + "PNG_LIBS={0}".format(libpng.libs.ld_flags), + ] + + def tiff_args(self): + prefix_args = ["USE_LIBPNG=NO", "USE_LIBTIFF=YES"] + if self.spec.satisfies("@4.0:"): + prefix_args = ["GLVIS_USE_LIBPNG=NO", "GLVIS_USE_LIBTIFF=YES"] + + libtiff = self.spec["libtiff"] + return prefix_args + [ + "TIFF_OPTS=-DGLVIS_USE_LIBTIFF -I{0}".format(libtiff.prefix.include), + "TIFF_LIBS={0}".format(libtiff.libs.ld_flags), + ] diff --git a/var/spack/repos/builtin/packages/gmake/findprog-in-ignore-directories.patch b/var/spack/repos/builtin/packages/gmake/findprog-in-ignore-directories.patch new file mode 100644 index 00000000000..4f44eccdf37 --- /dev/null +++ b/var/spack/repos/builtin/packages/gmake/findprog-in-ignore-directories.patch @@ -0,0 +1,132 @@ +From 6e6abd0cdfe4bb96f6412aebc511f10bf254a820 Mon Sep 17 00:00:00 2001 +From: Bruno Haible +Date: Sat, 23 May 2020 12:19:34 +0200 +Subject: [PATCH] findprog-in: Ignore directories. + +Reported by Frederick Eaton via Dmitry Goncharov in +. + +* lib/findprog-in.c (find_in_given_path): When the file found is a +directory, set errno to EACCES and, during a PATH search, continue +searching. +* modules/findprog-in (Depends-on): Add sys_stat, stat. +--- + ChangeLog | 10 ++++++ + lib/findprog-in.c | 75 ++++++++++++++++++++++++++++++--------------- + modules/findprog-in | 2 ++ + 3 files changed, 62 insertions(+), 25 deletions(-) + +diff --git a/lib/findprog-in.c b/lib/findprog-in.c +index c254f2f58..0f76e36ca 100644 +--- a/lib/findprog-in.c ++++ b/lib/findprog-in.c +@@ -26,6 +26,7 @@ + #include + #include + #include ++#include + + #include "filename.h" + #include "concat-filename.h" +@@ -58,8 +59,8 @@ static const char * const suffixes[] = + /* Note: The cmd.exe program does a different lookup: It searches according + to the PATHEXT environment variable. + See . +- Also, it executes files ending .bat and .cmd directly without letting the +- kernel interpret the program file. */ ++ Also, it executes files ending in .bat and .cmd directly without letting ++ the kernel interpret the program file. */ + #elif defined __CYGWIN__ + "", ".exe", ".com" + #elif defined __EMX__ +@@ -136,14 +137,26 @@ find_in_given_path (const char *progname, const char *path, + call access() despite its design flaw. */ + if (eaccess (progpathname, X_OK) == 0) + { +- /* Found! */ +- if (strcmp (progpathname, progname) == 0) ++ /* Check that the progpathname does not point to a ++ directory. */ ++ struct stat statbuf; ++ ++ if (stat (progpathname, &statbuf) >= 0) + { +- free (progpathname); +- return progname; ++ if (! S_ISDIR (statbuf.st_mode)) ++ { ++ /* Found! */ ++ if (strcmp (progpathname, progname) == 0) ++ { ++ free (progpathname); ++ return progname; ++ } ++ else ++ return progpathname; ++ } ++ ++ errno = EACCES; + } +- else +- return progpathname; + } + + if (errno != ENOENT) +@@ -210,25 +223,37 @@ find_in_given_path (const char *progname, const char *path, + call access() despite its design flaw. */ + if (eaccess (progpathname, X_OK) == 0) + { +- /* Found! */ +- if (strcmp (progpathname, progname) == 0) ++ /* Check that the progpathname does not point to a ++ directory. */ ++ struct stat statbuf; ++ ++ if (stat (progpathname, &statbuf) >= 0) + { +- free (progpathname); +- +- /* Add the "./" prefix for real, that +- xconcatenated_filename() optimized away. This +- avoids a second PATH search when the caller uses +- execl/execv/execlp/execvp. */ +- progpathname = +- XNMALLOC (2 + strlen (progname) + 1, char); +- progpathname[0] = '.'; +- progpathname[1] = NATIVE_SLASH; +- memcpy (progpathname + 2, progname, +- strlen (progname) + 1); +- } ++ if (! S_ISDIR (statbuf.st_mode)) ++ { ++ /* Found! */ ++ if (strcmp (progpathname, progname) == 0) ++ { ++ free (progpathname); ++ ++ /* Add the "./" prefix for real, that ++ xconcatenated_filename() optimized away. ++ This avoids a second PATH search when the ++ caller uses execl/execv/execlp/execvp. */ ++ progpathname = ++ XNMALLOC (2 + strlen (progname) + 1, char); ++ progpathname[0] = '.'; ++ progpathname[1] = NATIVE_SLASH; ++ memcpy (progpathname + 2, progname, ++ strlen (progname) + 1); ++ } ++ ++ free (path_copy); ++ return progpathname; ++ } + +- free (path_copy); +- return progpathname; ++ errno = EACCES; ++ } + } + + if (errno != ENOENT) +-- +2.38.1 + diff --git a/var/spack/repos/builtin/packages/gmake/package.py b/var/spack/repos/builtin/packages/gmake/package.py index 98792e2e381..7cccbacb01d 100644 --- a/var/spack/repos/builtin/packages/gmake/package.py +++ b/var/spack/repos/builtin/packages/gmake/package.py @@ -64,6 +64,9 @@ class Gmake(AutotoolsPackage, GNUMirrorPackage): when="@:4.2.1", ) + # See https://savannah.gnu.org/bugs/?57962 + patch("findprog-in-ignore-directories.patch", when="@4.3") + tags = ["build-tools"] executables = ["^g?make$"] diff --git a/var/spack/repos/builtin/packages/gmsh/package.py b/var/spack/repos/builtin/packages/gmsh/package.py index 5f8166b2d8c..9651333ce95 100644 --- a/var/spack/repos/builtin/packages/gmsh/package.py +++ b/var/spack/repos/builtin/packages/gmsh/package.py @@ -91,6 +91,7 @@ class Gmsh(CMakePackage): depends_on("zlib", when="+compression") depends_on("metis", when="+metis+external") depends_on("cgns", when="+cgns") + depends_on("cgns~scoping", when="+cgns @:4.7.1") with when("+petsc"): depends_on("petsc~int64") depends_on("petsc+mpi", when="+mpi") diff --git a/var/spack/repos/builtin/packages/gnupg/package.py b/var/spack/repos/builtin/packages/gnupg/package.py index bcf0549472f..6d9b89b72a9 100644 --- a/var/spack/repos/builtin/packages/gnupg/package.py +++ b/var/spack/repos/builtin/packages/gnupg/package.py @@ -14,7 +14,10 @@ class Gnupg(AutotoolsPackage): maintainers = ["alalazo"] + version("2.3.8", sha256="540b7a40e57da261fb10ef521a282e0021532a80fd023e75fb71757e8a4969ed") version("2.3.7", sha256="ee163a5fb9ec99ffc1b18e65faef8d086800c5713d15a672ab57d3799da83669") + version("2.2.40", sha256="1164b29a75e8ab93ea15033300149e1872a7ef6bdda3d7c78229a735f8204c28") + # Deprecated over CVE-2022-34903 version( "2.3.4", diff --git a/var/spack/repos/builtin/packages/gnutls/package.py b/var/spack/repos/builtin/packages/gnutls/package.py index d75ab7c15b4..9fc99707176 100644 --- a/var/spack/repos/builtin/packages/gnutls/package.py +++ b/var/spack/repos/builtin/packages/gnutls/package.py @@ -17,6 +17,7 @@ class Gnutls(AutotoolsPackage): homepage = "https://www.gnutls.org" url = "https://www.gnupg.org/ftp/gcrypt/gnutls/v3.5/gnutls-3.5.19.tar.xz" + version("3.7.8", sha256="c58ad39af0670efe6a8aee5e3a8b2331a1200418b64b7c51977fb396d4617114") version("3.6.15", sha256="0ea8c3283de8d8335d7ae338ef27c53a916f15f382753b174c18b45ffd481558") version("3.6.14", sha256="5630751adec7025b8ef955af4d141d00d252a985769f51b4059e5affa3d39d63") version("3.6.8", sha256="aa81944e5635de981171772857e72be231a7e0f559ae0292d2737de475383e83") diff --git a/var/spack/repos/builtin/packages/go-bootstrap/package.py b/var/spack/repos/builtin/packages/go-bootstrap/package.py index 7068a44c93a..a622ffd72a1 100644 --- a/var/spack/repos/builtin/packages/go-bootstrap/package.py +++ b/var/spack/repos/builtin/packages/go-bootstrap/package.py @@ -3,6 +3,8 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import re + from spack.package import * # THIS PACKAGE SHOULD NOT EXIST @@ -47,6 +49,25 @@ class GoBootstrap(Package): conflicts("os=monterey", msg="go-bootstrap won't build on new macOS") conflicts("target=aarch64:", msg="Go bootstrap doesn't support aarch64 architectures") + # This virtual package allows a fallback to gccgo for aarch64, + # where go-bootstrap cannot be built(aarch64 was added with Go 1.5) + provides("go-external-or-gccgo-bootstrap") + + # Support for aarch64 was added in Go 1.5, use an external package or gccgo instead: + conflicts("@:1.4", when="target=aarch64:") + + executables = ["^go$"] + + # When the user adds a go compiler using ``spack external find go-bootstrap``, + # this lets us get the version for packages.yaml. Then, the solver can avoid + # to build the bootstrap go compiler(for aarch64, it's only gccgo) from source: + @classmethod + def determine_version(cls, exe): + """Return the version of an externally provided go executable or ``None``""" + output = Executable(exe)("version", output=str, error=str) + match = re.search(r"go version go(\S+)", output) + return match.group(1) if match else None + def patch(self): if self.spec.satisfies("@:1.4.3"): # NOTE: Older versions of Go attempt to download external files that have @@ -72,7 +93,13 @@ def install(self, spec, prefix): install_tree(".", prefix) def setup_dependent_build_environment(self, env, dependent_spec): - env.set("GOROOT_BOOTSTRAP", self.spec.prefix) + """Set GOROOT_BOOTSTRAP: When using an external compiler, get its GOROOT env""" + if self.spec.external: + # Use the go compiler added by ``spack external find go-bootstrap``: + goroot = Executable(self.spec.prefix.bin.go)("env", "GOROOT", output=str) + else: + goroot = self.spec.prefix + env.set("GOROOT_BOOTSTRAP", goroot) def setup_build_environment(self, env): env.set("GOROOT_FINAL", self.spec.prefix) diff --git a/var/spack/repos/builtin/packages/go/package.py b/var/spack/repos/builtin/packages/go/package.py index e1425f06c86..b4f2c769595 100644 --- a/var/spack/repos/builtin/packages/go/package.py +++ b/var/spack/repos/builtin/packages/go/package.py @@ -152,14 +152,13 @@ class Go(Package): # aarch64 machines (including Macs with Apple silicon) can't use # go-bootstrap because it pre-dates aarch64 support in Go. These machines # have to rely on Go support in gcc (which may require compiling a version - # of gcc with Go support just to satisfy this requirement). However, - # there's also a bug in some versions of GCC's Go front-end that prevents - # these versions from properly bootstrapping Go. (See issue #47771 - # https://github.com/golang/go/issues/47771 ) On the 10.x branch, we need - # at least 10.4. On the 11.x branch, we need at least 11.3. + # of gcc with Go support just to satisfy this requirement) or external go: - if platform.machine() == "aarch64": - depends_on("gcc@10.4.0:10,11.3.0: languages=go", type="build") + # #27769: On M1/MacOS, platform.machine() may return arm64: + if platform.machine() in ["arm64", "aarch64"]: + # Use an external go compiler from packages.yaml/`spack external find go-bootstrap`, + # but fallback to build go-bootstrap@1.4 or to gcc with languages=go (for aarch64): + depends_on("go-external-or-gccgo-bootstrap", type="build") else: depends_on("go-bootstrap", type="build") diff --git a/var/spack/repos/builtin/packages/gobject-introspection/package.py b/var/spack/repos/builtin/packages/gobject-introspection/package.py index 9ea90684e83..624d0941f8e 100644 --- a/var/spack/repos/builtin/packages/gobject-introspection/package.py +++ b/var/spack/repos/builtin/packages/gobject-introspection/package.py @@ -2,15 +2,16 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - +import spack.build_systems.autotools import spack.hooks.sbang as sbang from spack.package import * -class GobjectIntrospection(MesonPackage): +class GobjectIntrospection(MesonPackage, AutotoolsPackage): """The GObject Introspection is used to describe the program APIs and collect them in a uniform, machine readable format.Cairo is a 2D graphics - library with support for multiple output""" + library with support for multiple output + """ homepage = "https://wiki.gnome.org/Projects/GObjectIntrospection" url = "https://download.gnome.org/sources/gobject-introspection/1.72/gobject-introspection-1.72.0.tar.xz" @@ -22,6 +23,12 @@ class GobjectIntrospection(MesonPackage): version("1.49.2", sha256="73d59470ba1a546b293f54d023fd09cca03a951005745d86d586b9e3a8dde9ac") version("1.48.0", sha256="fa275aaccdbfc91ec0bc9a6fd0562051acdba731e7d584b64a277fec60e75877") + build_system( + conditional("autotools", when="@:1.60"), + conditional("meson", when="@1.61:"), + default="meson", + ) + depends_on("pkgconfig", type="build") depends_on("bison", type="build") depends_on("flex", type="build") @@ -94,22 +101,9 @@ def setup_dependent_run_environment(self, env, dependent_spec): def parallel(self): return not self.spec.satisfies("%fj") - def meson_args(self): - return [] - @when("@:1.60") - def meson(self, spec, prefix): - """Run the AutotoolsPackage configure phase""" - configure("--prefix={0}".format(prefix)) - - @when("@:1.60") - def build(self, spec, prefix): - """Run the AutotoolsPackage build phase""" +class AutotoolsBuilderPackage(spack.build_systems.autotools.AutotoolsBuilder): + @run_before("build") + def filter_file_to_avoid_overly_long_shebangs(self): # we need to filter this file to avoid an overly long hashbang line filter_file("#!/usr/bin/env @PYTHON@", "#!@PYTHON@", "tools/g-ir-tool-template.in") - make() - - @when("@:1.60") - def install(self, spec, prefix): - """Run the AutotoolsPackage install phase""" - make("install", parallel=False) diff --git a/var/spack/repos/builtin/packages/gpgme/package.py b/var/spack/repos/builtin/packages/gpgme/package.py index fc02415f357..a8077130f58 100644 --- a/var/spack/repos/builtin/packages/gpgme/package.py +++ b/var/spack/repos/builtin/packages/gpgme/package.py @@ -15,6 +15,7 @@ class Gpgme(AutotoolsPackage): executables = ["^gpgme-config$"] + version("1.18.0", sha256="361d4eae47ce925dba0ea569af40e7b52c645c4ae2e65e5621bf1b6cdd8b0e9e") version("1.16.0", sha256="6c8cc4aedb10d5d4c905894ba1d850544619ee765606ac43df7405865de29ed0") version("1.12.0", sha256="b4dc951c3743a60e2e120a77892e9e864fb936b2e58e7c77e8581f4d050e8cd8") diff --git a/var/spack/repos/builtin/packages/gpi-space/package.py b/var/spack/repos/builtin/packages/gpi-space/package.py index 97dc85159f1..b5066e7e496 100644 --- a/var/spack/repos/builtin/packages/gpi-space/package.py +++ b/var/spack/repos/builtin/packages/gpi-space/package.py @@ -24,24 +24,33 @@ class GpiSpace(CMakePackage): maintainers = ["mzeyen1985", "tiberot", "rumach", "mrahn", "acastanedam"] version("latest", branch="main") + version("22.09", sha256="f938847205181081ed24896bba16302ac35bbf478b4ceecae5bb21d5a38c8556") version("22.06", sha256="d89d8a7b574430c4f151a3768073fa44d32e5cc7606fbe0f58aeedf6f5fefc0b") version("22.03", sha256="b01500b9480452aee865a0ef98cf40864f847b7e22ea572f9a6f0f5ac2ae9a1a") version("21.12.1", sha256="6c49aca95a32e66fa1e34bef542c2f380e91f86c9c2b3b0d98921901bab7abce") version("21.12", sha256="51794e2b593b8d1dc7d6310e17744842919bf44205b2cb7a79de2f2bbac3352a") version("21.09", sha256="7f3861c2bfec15a4da46378ea38b304e1462ed315cd315b81ab2c2a8ba50dd3e") + variant( + "monitor", + default=True, + description=""" + Enables the gspc-monitor application for execution monitoring. + """, + ) + variant( + "iml", + default=True, + description=""" + Enables IML support + """, + ) variant( "network", default="ethernet", values=("infiniband", "ethernet"), description="GPI-2 fabric to enable", - ) - variant( - "monitor", - default=True, - description=""" - Enables the gspc-monitor application for execution monitoring. - """, + when="+iml", ) variant( "build_type", @@ -61,8 +70,9 @@ class GpiSpace(CMakePackage): depends_on("hwloc@1.10: +libudev ~libxml2 libs=static") depends_on("libssh2@1.7:") depends_on("openssl@0.9:") - depends_on("gpi-2@1.3.2:1.3.3 fabrics=infiniband", when="network=infiniband") - depends_on("gpi-2@1.3.2:1.3.3 fabrics=ethernet", when="network=ethernet") + with when("+iml"): + depends_on("gpi-2@1.3.2:1.3.3 fabrics=infiniband", when="network=infiniband") + depends_on("gpi-2@1.3.2:1.3.3 fabrics=ethernet", when="network=ethernet") depends_on("qt@5.9:5.15", when="+monitor") def cmake_args(self): @@ -71,6 +81,7 @@ def cmake_args(self): self.define("INSTALL_DO_NOT_BUNDLE", True), self.define("BUILD_TESTING", False), self.define_from_variant("GSPC_WITH_MONITOR_APP", "monitor"), + self.define_from_variant("GSPC_WITH_IML", "iml"), ] return args diff --git a/var/spack/repos/builtin/packages/gptl/package.py b/var/spack/repos/builtin/packages/gptl/package.py index 35f2c9380e0..5290d1bf78e 100644 --- a/var/spack/repos/builtin/packages/gptl/package.py +++ b/var/spack/repos/builtin/packages/gptl/package.py @@ -16,8 +16,9 @@ class Gptl(AutotoolsPackage): homepage = "https://jmrosinski.github.io/GPTL/" url = "https://github.com/jmrosinski/GPTL/releases/download/v8.0.3/gptl-8.0.3.tar.gz" - maintainers = ["edwardhartnett", "kgerheiser", "Hang-Lei-NOAA", " jmrosinski"] + maintainers = ["edwardhartnett", "AlexanderRichert-NOAA", "Hang-Lei-NOAA", "jmrosinski"] + version("8.1.1", sha256="b8ee26f7aeedd2a31d565789634e7c380023fe6b65bbf59030884f4dcbce94a5") version("8.0.3", sha256="334979c6fe78d4ed1b491ec57fb61df7a910c58fd39a3658d03ad89f077a4db6") version("8.0.2", sha256="011f153084ebfb52b6bf8f190835d4bae6f6b5c0ad320331356aa47a547bf2b4") @@ -34,6 +35,10 @@ def configure_args(self): if "+pmpi" in self.spec: args.append("--enable-pmpi") args.append("CC=" + self.spec["mpi"].mpicc) + args.append("CXX=" + self.spec["mpi"].mpicxx) + args.append("FC=" + self.spec["mpi"].mpifc) + args.append("F90=" + self.spec["mpi"].mpifc) + args.append("F77=" + self.spec["mpi"].mpif77) if "+papi" in self.spec: args.append("--enable-papi") diff --git a/var/spack/repos/builtin/packages/grib-util/package.py b/var/spack/repos/builtin/packages/grib-util/package.py index d49556245e0..1e66d1940bc 100644 --- a/var/spack/repos/builtin/packages/grib-util/package.py +++ b/var/spack/repos/builtin/packages/grib-util/package.py @@ -14,7 +14,7 @@ class GribUtil(CMakePackage): homepage = "https://github.com/NOAA-EMC/NCEPLIBS-grib_util" url = "https://github.com/NOAA-EMC/NCEPLIBS-grib_util/archive/refs/tags/v1.2.3.tar.gz" - maintainers = ["kgerheiser", "Hang-Lei-NOAA", "edwardhartnett"] + maintainers = ["AlexanderRichert-NOAA", "Hang-Lei-NOAA", "edwardhartnett"] version("1.2.4", sha256="f021d6df3186890b0b1781616dabf953581d71db63e7c2913360336985ccaec7") version("1.2.3", sha256="b17b08e12360bb8ad01298e615f1b4198e304b0443b6db35fe990a817e648ad5") diff --git a/var/spack/repos/builtin/packages/grid/package.py b/var/spack/repos/builtin/packages/grid/package.py index 1427e2f2c6f..d7f19a67398 100644 --- a/var/spack/repos/builtin/packages/grid/package.py +++ b/var/spack/repos/builtin/packages/grid/package.py @@ -77,7 +77,7 @@ def configure_args(self): args.append("--enable-mkl") else: if "+fftw" in spec: - args.append("--with-fftw={0}".format(self.spec["fftw"].prefix)) + args.append("--with-fftw={0}".format(self.spec["fftw-api"].prefix)) if "+lapack" in spec: args.append("--enable-lapack={0}".format(self.spec["lapack"].prefix)) # lapack is searched only as `-llapack`, so anything else diff --git a/var/spack/repos/builtin/packages/gromacs/package.py b/var/spack/repos/builtin/packages/gromacs/package.py index 52e66ee4cdc..95bc7dab985 100644 --- a/var/spack/repos/builtin/packages/gromacs/package.py +++ b/var/spack/repos/builtin/packages/gromacs/package.py @@ -489,6 +489,13 @@ def cmake_args(self): options.append( "-DFFTWF_LIBRARIES={0}".format(self.spec["amdfftw"].libs.joined(";")) ) + elif "^armpl-gcc" in self.spec: + options.append( + "-DFFTWF_INCLUDE_DIR={0}".format(self.spec["armpl-gcc"].headers.directories[0]) + ) + options.append( + "-DFFTWF_LIBRARY={0}".format(self.spec["armpl-gcc"].libs.joined(";")) + ) # Ensure that the GROMACS log files report how the code was patched # during the build, so that any problems are easier to diagnose. diff --git a/var/spack/repos/builtin/packages/harfbuzz/package.py b/var/spack/repos/builtin/packages/harfbuzz/package.py index 0400c0a21ab..e746ed6fbf7 100644 --- a/var/spack/repos/builtin/packages/harfbuzz/package.py +++ b/var/spack/repos/builtin/packages/harfbuzz/package.py @@ -2,11 +2,12 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - +import spack.build_systems.autotools +import spack.build_systems.meson from spack.package import * -class Harfbuzz(MesonPackage): +class Harfbuzz(MesonPackage, AutotoolsPackage): """The Harfbuzz package contains an OpenType text shaping engine.""" homepage = "https://github.com/harfbuzz/harfbuzz" @@ -14,6 +15,10 @@ class Harfbuzz(MesonPackage): git = "https://github.com/harfbuzz/harfbuzz.git" version("5.1.0", sha256="2edb95db668781aaa8d60959d21be2ff80085f31b12053cdd660d9a50ce84f05") + build_system( + conditional("autotools", when="@:2.9"), conditional("meson", when="@3:"), default="meson" + ) + version("4.2.1", sha256="bd17916513829aeff961359a5ccebba6de2f4bf37a91faee3ac29c120e3d7ee1") version("4.1.0", sha256="f7984ff4241d4d135f318a93aa902d910a170a8265b7eaf93b5d9a504eed40c8") version("4.0.1", sha256="98f68777272db6cd7a3d5152bac75083cd52a26176d87bc04c8b3929d33bce49") @@ -99,31 +104,33 @@ def flag_handler(self, name, flags): def setup_run_environment(self, env): env.prepend_path("GI_TYPELIB_PATH", join_path(self.prefix.lib, "girepository-1.0")) - def setup_dependent_build_environment(self, env, dependent_spec): - env.prepend_path("XDG_DATA_DIRS", self.prefix.share) - env.prepend_path("GI_TYPELIB_PATH", join_path(self.prefix.lib, "girepository-1.0")) - def setup_dependent_run_environment(self, env, dependent_spec): env.prepend_path("XDG_DATA_DIRS", self.prefix.share) env.prepend_path("GI_TYPELIB_PATH", join_path(self.prefix.lib, "girepository-1.0")) + def patch(self): + change_sed_delimiter("@", ";", "src/Makefile.in") + + +class SetupEnvironment(object): + def setup_dependent_build_environment(self, env, dependent_spec): + env.prepend_path("XDG_DATA_DIRS", self.prefix.share) + env.prepend_path("GI_TYPELIB_PATH", join_path(self.prefix.lib, "girepository-1.0")) + + +class MesonBuilder(spack.build_systems.meson.MesonBuilder, SetupEnvironment): def meson_args(self): - args = [] + graphite2 = "enabled" if self.pkg.spec.satisfies("+graphite2") else "disabled" + coretext = "enabled" if self.pkg.spec.satisfies("+coretext") else "disabled" + return [ + # disable building of gtk-doc files following #9885 and #9771 + "-Ddocs=disabled", + "-Dgraphite2={0}".format(graphite2), + "-Dcoretext={0}".format(coretext), + ] - # disable building of gtk-doc files following #9885 and #9771 - args.append("-Ddocs=disabled") - args.append( - "-Dgraphite2=" + ("enabled" if self.spec.satisfies("+graphite2") else "disabled") - ) - if "+coretext" in self.spec: - args.append("-Dcoretext=enabled") - elif "~coretext" in self.spec: - args.append("-Dcoretext=disabled") - - return args - - @when("@:2.9") +class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder, SetupEnvironment): def configure_args(self): args = [] @@ -135,28 +142,6 @@ def configure_args(self): args.append("GTKDOC_MKPDF={0}".format(true)) args.append("GTKDOC_REBASE={0}".format(true)) args.extend(self.with_or_without("graphite2")) - - if "+coretext" in self.spec: - args.append("--with-coretext") - elif "~coretext" in self.spec: - args.append("--without-coretext") + args.extend(self.with_or_without("coretext")) return args - - def patch(self): - change_sed_delimiter("@", ";", "src/Makefile.in") - - @when("@:2.9") - def meson(self, spec, prefix): - """Run the AutotoolsPackage configure phase""" - configure("--prefix=" + prefix, *self.configure_args()) - - @when("@:2.9") - def build(self, spec, prefix): - """Run the AutotoolsPackage build phase""" - make() - - @when("@:2.9") - def install(self, spec, prefix): - """Run the AutotoolsPackage install phase""" - make("install") diff --git a/var/spack/repos/builtin/packages/hdf5-vfd-gds/package.py b/var/spack/repos/builtin/packages/hdf5-vfd-gds/package.py index ddd0159db86..7bc6f0e1ec4 100644 --- a/var/spack/repos/builtin/packages/hdf5-vfd-gds/package.py +++ b/var/spack/repos/builtin/packages/hdf5-vfd-gds/package.py @@ -17,19 +17,25 @@ class Hdf5VfdGds(CMakePackage, CudaPackage): # Versions version("master", branch="master") + version("1.0.2", sha256="f7df64ff62e057b525bc30ed6534f9c0752e52bd58b65f7c147878d6c68105ae") version("1.0.1", sha256="00e125fd149561be991f41e883824de826d8add604aebccf103a4fb82d5faac2") version("1.0.0", sha256="6b16105c7c49f13fc05784ee69b78d45fb159270c78d760689f9cd21e230ddd2") # Dependencies conflicts("~cuda") - depends_on("cmake@3.12:") + # Although cuFILE predates 11.7.0, it is not installed in a location the build + # system can obtaion via `find_library`. Packaging issues fixed in 11.7.1. + conflicts("^cuda@:11.7.0") + depends_on("cmake@3.12:", type="build") depends_on("hdf5@1.13.0:") def cmake_args(self): - # CMake options args = [ self.define("BUILD_TESTING", self.run_tests), ] return args + + def setup_run_environment(self, env): + env.prepend_path("HDF5_PLUGIN_PATH", self.spec.prefix.lib) diff --git a/var/spack/repos/builtin/packages/heffte/package.py b/var/spack/repos/builtin/packages/heffte/package.py index 2628cc5a87d..2c9424ca34b 100644 --- a/var/spack/repos/builtin/packages/heffte/package.py +++ b/var/spack/repos/builtin/packages/heffte/package.py @@ -3,6 +3,10 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os + +from llnl.util import tty + from spack.package import * @@ -19,6 +23,7 @@ class Heffte(CMakePackage, CudaPackage, ROCmPackage): test_requires_compiler = True version("develop", branch="master") + version("2.3.0", sha256="27c0a8da8f7bc91c8715ecb640721ab7e0454e22f6e3f521fe5acc45c28d60a9") version("2.2.0", sha256="aff4f5111d3d05b269a1378bb201271c40b39e9c960c05c4ef247a31a039be58") version("2.1.0", sha256="527a3e21115231715a0342afdfaf6a8878d2dd0f02f03c92b53692340fd940b9") version("2.0.0", sha256="12f2b49a1a36c416eac174cf0cc50e729d56d68a9f68886d8c34bd45a0be26b6") @@ -30,6 +35,8 @@ class Heffte(CMakePackage, CudaPackage, ROCmPackage): patch("fortran200.patch", when="@2.0.0") depends_on("cmake@3.10:", type=("build", "run")) + depends_on("cmake@3.19:", when="@develop", type=("build", "run")) + depends_on("cmake@3.21:", when="@develop+rocm", type=("build", "run")) variant("shared", default=True, description="Builds with shared libraries") variant("fftw", default=False, description="Builds with support for FFTW backend") @@ -58,8 +65,10 @@ class Heffte(CMakePackage, CudaPackage, ROCmPackage): depends_on("fftw@3.3.8:", when="+fftw", type=("build", "run")) depends_on("intel-mkl@2018.0.128:", when="+mkl", type=("build", "run")) depends_on("cuda@8.0:", when="+cuda", type=("build", "run")) - depends_on("hip@3.8.0:", when="+rocm") - depends_on("rocfft@3.8.0:", when="+rocm") + depends_on("hip@3.8.0:", when="+rocm", type=("build", "run")) + depends_on("rocfft@3.8.0:", when="+rocm", type=("build", "run")) + depends_on("hip@5.2.3:", when="@develop+rocm", type=("build", "run")) + depends_on("rocfft@5.2.3:", when="@develop+rocm", type=("build", "run")) depends_on("magma@2.5.3:", when="+cuda+magma", type=("build", "run")) depends_on("magma+rocm@2.6.1:", when="+magma+rocm @2.1:", type=("build", "run")) depends_on("hipblas@3.8:", when="+magma+rocm", type=("build", "run")) @@ -104,11 +113,67 @@ def cmake_args(self): return args + def cmake_bin(self, set=True): + """(Hack) Set/get cmake dependency path. Sync with Tasmanian.""" + filepath = join_path(self.install_test_root, "cmake_bin_path.txt") + if set: + with open(filepath, "w") as out_file: + cmake_bin = join_path(self.spec["cmake"].prefix.bin, "cmake") + out_file.write("{0}\n".format(cmake_bin)) + elif os.path.isfile(filepath): + with open(filepath, "r") as in_file: + return in_file.read().strip() + + @run_after("install") + def setup_smoke_test(self): + install_tree( + self.prefix.share.heffte.testing, join_path(self.install_test_root, "testing") + ) + self.cmake_bin(set=True) + def test(self): - # using the tests installed in /share/heffte/testing - cmake_dir = join_path(self.prefix, "share", "heffte", "testing") - test_dir = join_path(self.test_suite.current_test_cache_dir, "test_install") - with working_dir(test_dir, create=True): - cmake(cmake_dir) - make() - make("test") + cmake_bin = self.cmake_bin(set=False) + + if not cmake_bin: + tty.msg("Skipping heffte test: cmake_bin_path.txt not found") + return + + # using the tests copied from /share/heffte/testing + cmake_dir = self.test_suite.current_test_cache_dir.testing + + options = [ + cmake_dir, + ] + if "+rocm" in self.spec: + options.append( + "-Dhip_DIR=" + join_path(self.spec["hip"].prefix, "lib", "cmake", "hip") + ) + options.append( + "-DAMDDeviceLibs_DIR=" + + join_path(self.spec["llvm-amdgpu"].prefix, "lib", "cmake", "AMDDeviceLibs") + ) + options.append( + "-Damd_comgr_DIR=" + + join_path(self.spec["comgr"].prefix, "lib", "cmake", "amd_comgr") + ) + options.append( + "-Dhsa-runtime64_DIR=" + + join_path(self.spec["hsa-rocr-dev"].prefix, "lib", "cmake", "hsa-runtime64") + ) + options.append( + "-DHSA_HEADER=" + join_path(self.spec["hsa-rocr-dev"].prefix, "include") + ) + options.append( + "-Drocfft_DIR=" + join_path(self.spec["rocfft"].prefix, "lib", "cmake", "rocfft") + ) + + if not self.run_test(cmake_bin, options=options, purpose="Generate the Makefile"): + tty.msg("Skipping heffte test: failed to generate Makefile") + return + + if not self.run_test("make", purpose="Build test software"): + tty.msg("Skipping heffte test: failed to build test") + return + + if not self.run_test("make", options=["test"], purpose="Run test"): + tty.msg("Failed heffte test: failed to run test") diff --git a/var/spack/repos/builtin/packages/helics/package.py b/var/spack/repos/builtin/packages/helics/package.py index 1a4f24e6ea6..04e0bd05799 100644 --- a/var/spack/repos/builtin/packages/helics/package.py +++ b/var/spack/repos/builtin/packages/helics/package.py @@ -21,6 +21,7 @@ class Helics(CMakePackage): version("develop", branch="develop", submodules=True) version("main", branch="main", submodules=True) version("master", branch="main", submodules=True) + version("3.3.1", sha256="0f6357e6781157515230d14033afc8769a02971a1870909e5697415e1db2e03f") version("3.3.0", sha256="0c2fe0eb2bfc527901a50bbdaa742a7c4b9424dc8fa326ca614157613dcd1457") version("3.2.1", sha256="9df8a7a687c7cf8ea6f157e748e57e8bf5cefa9a49a24e7c590fe9191291da95") version("3.2.0", sha256="b9cec50b9e767113b2e04a5623437885f76196cc9a58287e21f5c0f62c32cca1") diff --git a/var/spack/repos/builtin/packages/hepmc3/package.py b/var/spack/repos/builtin/packages/hepmc3/package.py index e673faf8544..99c2084cd5c 100644 --- a/var/spack/repos/builtin/packages/hepmc3/package.py +++ b/var/spack/repos/builtin/packages/hepmc3/package.py @@ -18,6 +18,7 @@ class Hepmc3(CMakePackage): maintainers = ["vvolkl"] + version("3.2.5", sha256="cd0f75c80f75549c59cc2a829ece7601c77de97cb2a5ab75790cac8e1d585032") version("3.2.4", sha256="e088fccfd1a6c2f8e1089f457101bee1e5c7a9777e9d51c6419c8a288a49e1bb") version("3.2.3", sha256="8caadacc2c969883cd1f994b622795fc885fb4b15dad8c8ae64bcbdbf0cbd47d") version("3.2.2", sha256="0e8cb4f78f804e38f7d29875db66f65e4c77896749d723548cc70fb7965e2d41") diff --git a/var/spack/repos/builtin/packages/highfive/package.py b/var/spack/repos/builtin/packages/highfive/package.py index 0b719175f43..d52d6cfff93 100644 --- a/var/spack/repos/builtin/packages/highfive/package.py +++ b/var/spack/repos/builtin/packages/highfive/package.py @@ -13,13 +13,17 @@ class Highfive(CMakePackage): homepage = "https://github.com/BlueBrain/HighFive" url = "https://github.com/BlueBrain/HighFive/archive/v1.2.tar.gz" + maintainers = ["alkino"] + version("2.5.0", sha256="27f55596570df3cc8b878a1681a0d4ba0fe2e3da4a0ef8d436722990d77dc93a") version("2.4.1", sha256="6826471ef5c645ebf947d29574b302991525a8a8ff1ef687aba7311d9a0ea36f") + version("2.4.0", sha256="ba0ed6d8e2e09e80849926f38c15a26cf4b80772084cea0555269a25fec02149") version("2.3.1", sha256="41728a1204bdfcdcef8cbc3ddffe5d744c5331434ce3dcef35614b831234fcd7") version("2.3", sha256="7da6815646eb4294f210cec6be24c9234d7d6ceb2bf92a01129fbba6583c5349") version("2.2.2", sha256="5bfb356705c6feb9d46a0507573028b289083ec4b4607a6f36187cb916f085a7") version("2.2.1", sha256="964c722ba916259209083564405ef9ce073b15e9412955fef9281576ea9c5b85") version("2.2", sha256="fe065f2443e38444100b43999a96916e81a0aa7e500cf768d3bf6f8392b8efee") + version("2.1.1", sha256="52cffeda0d018f020f48e5460c051d5c2031c3a3c82133a21527f186a0c1650e") version("2.0", sha256="deee33d7f578e33dccb5d04771f4e01b89a980dd9a3ff449dd79156901ee8d25") version("1.5", sha256="f194bda482ab15efa7c577ecc4fb7ee519f6d4bf83470acdb3fb455c8accb407") version("1.2", sha256="4d8f84ee1002e8fd6269b62c21d6232aea3d56ce4171609e39eb0171589aab31") diff --git a/var/spack/repos/builtin/packages/hiop/package.py b/var/spack/repos/builtin/packages/hiop/package.py index b770a786183..16325dd5208 100644 --- a/var/spack/repos/builtin/packages/hiop/package.py +++ b/var/spack/repos/builtin/packages/hiop/package.py @@ -19,9 +19,11 @@ class Hiop(CMakePackage, CudaPackage, ROCmPackage): homepage = "https://github.com/LLNL/hiop" git = "https://github.com/LLNL/hiop.git" - maintainers = ["ashermancinelli", "CameronRutherford", "pelesh"] + maintainers = ["ryandanehy", "CameronRutherford", "pelesh"] # Most recent tagged snapshot is the preferred version when profiling. + version("0.7.1", commit="8064ef6b2249ad2feca92a9d1e90060bad3eebc7", submodules=True) + version("0.7.0", commit="5f42ab34b419b7cf64d0fffb29d443b009dbfd75", submodules=True) version("0.6.2", commit="55652fbe923ab9107d002d0d070865bd22375b28") version("0.6.1", commit="a9e2697b00aa13ecf0ae4783dd8a41dee11dc50e") version("0.6.0", commit="21af7eb0d6427be73546cf303abc84e834a5a55d") @@ -113,7 +115,7 @@ class Hiop(CMakePackage, CudaPackage, ROCmPackage): depends_on("coinhsl+blas", when="+sparse") depends_on("metis", when="+sparse") - depends_on("ginkgo@glu_experimental", when="+ginkgo") + depends_on("ginkgo@1.5.0.glu_experimental", when="+ginkgo") conflicts( "+shared", diff --git a/var/spack/repos/builtin/packages/hip/package.py b/var/spack/repos/builtin/packages/hip/package.py index 8b9b39f9a76..9dce2189049 100644 --- a/var/spack/repos/builtin/packages/hip/package.py +++ b/var/spack/repos/builtin/packages/hip/package.py @@ -288,10 +288,22 @@ def get_paths(self): if self.spec.external: # For external packages we only assume the `hip` prefix is known, # because spack does not set prefixes of dependencies of externals. - # We assume self.spec.prefix is /opt/rocm-x.y.z/hip and rocm has a - # default installation with everything installed under - # /opt/rocm-x.y.z - rocm_prefix = Prefix(os.path.dirname(self.spec.prefix)) + hip_libs_at_top = os.path.basename(self.spec.prefix) != "hip" + # We assume self.spec.prefix is /opt/rocm-x.y.z for rocm-5.2.0 and newer + # and /opt/rocm-x.y.z/hip for older versions + if self.spec.satisfies("@5.2.0:"): + rocm_prefix = Prefix(self.spec.prefix) + else: + # We assume self.spec.prefix is /opt/rocm-x.y.z/hip and rocm has a + # default installation with everything installed under + # /opt/rocm-x.y.z + # Note that since the key hip library can also exist at the top of the + # /opt/rocm-x.y.z/lib tree, it is possible that the package is detected + # without the correct prefix. Work around it. + if hip_libs_at_top: + rocm_prefix = Prefix(self.spec.prefix) + else: + rocm_prefix = Prefix(os.path.dirname(self.spec.prefix)) if not os.path.isdir(rocm_prefix): msg = "Could not determine prefix for other rocm components\n" @@ -300,7 +312,13 @@ def get_paths(self): msg += "a workaround." raise RuntimeError(msg) + if hip_libs_at_top: + hip_path = "{0}/hip".format(self.spec.prefix) + else: + hip_path = self.spec.prefix + paths = { + "hip-path": hip_path, "rocm-path": rocm_prefix, "llvm-amdgpu": rocm_prefix.llvm, "hsa-rocr-dev": rocm_prefix.hsa, @@ -309,6 +327,7 @@ def get_paths(self): } else: paths = { + "hip-path": self.spec.prefix, "rocm-path": self.spec.prefix, "llvm-amdgpu": self.spec["llvm-amdgpu"].prefix, "hsa-rocr-dev": self.spec["hsa-rocr-dev"].prefix, @@ -372,7 +391,7 @@ def set_variables(self, env): env.set("HIP_DEVICE_LIB_PATH", paths["bitcode"]) # Just the prefix of hip (used in hipcc) - env.set("HIP_PATH", paths["rocm-path"]) + env.set("HIP_PATH", paths["hip-path"]) # Used in comgr and seems necessary when using the JIT compiler, e.g. # hiprtcCreateProgram: diff --git a/var/spack/repos/builtin/packages/hpctoolkit/package.py b/var/spack/repos/builtin/packages/hpctoolkit/package.py index 368e115a68d..818df90af96 100644 --- a/var/spack/repos/builtin/packages/hpctoolkit/package.py +++ b/var/spack/repos/builtin/packages/hpctoolkit/package.py @@ -119,7 +119,7 @@ class Hpctoolkit(AutotoolsPackage): depends_on("libunwind@1.4: +xz+pic") depends_on("mbedtls+pic", when="@:2022.03") depends_on("xerces-c transcoder=iconv") - depends_on("xz+pic", type="link") + depends_on("xz+pic@:5.2.6", type="link") depends_on("yaml-cpp@0.7.0:", when="@2022.10:") depends_on("zlib+shared") @@ -137,10 +137,12 @@ class Hpctoolkit(AutotoolsPackage): depends_on("hpcviewer@2022.10:", type="run", when="@2022.10: +viewer") depends_on("hpcviewer", type="run", when="+viewer") - depends_on("hip@4.5:", when="+rocm") - depends_on("hsa-rocr-dev@4.5:", when="+rocm") - depends_on("roctracer-dev@4.5:", when="+rocm") - depends_on("rocprofiler-dev@4.5:", when="+rocm") + # Avoid 'link' dep, we don't actually link, and that adds rpath + # that conflicts with app. + depends_on("hip@4.5:", type=("build", "run"), when="+rocm") + depends_on("hsa-rocr-dev@4.5:", type=("build", "run"), when="+rocm") + depends_on("roctracer-dev@4.5:", type=("build", "run"), when="+rocm") + depends_on("rocprofiler-dev@4.5:", type=("build", "run"), when="+rocm") conflicts("%gcc@:7", when="@2022.10:", msg="hpctoolkit requires gnu gcc 8.x or later") conflicts("%gcc@:6", when="@2021.00:2022.06", msg="hpctoolkit requires gnu gcc 7.x or later") diff --git a/var/spack/repos/builtin/packages/hypar/package.py b/var/spack/repos/builtin/packages/hypar/package.py new file mode 100644 index 00000000000..fc50e626d71 --- /dev/null +++ b/var/spack/repos/builtin/packages/hypar/package.py @@ -0,0 +1,63 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Hypar(AutotoolsPackage): + """ + HyPar is a finite-difference algorithm to solve hyperbolic-parabolic partial differential + equations (with source terms) on Cartesian grids. It is a unified framework that can handle + systems of PDEs with arbitrary number of spatial dimensions and solution components. It + provides the spatial discretization and time integration functions, functions to read and + write solutions from/to files, as well as functions required to solve the system on parallel + (MPI) platforms. The physical models define the physics-specific functions such as the exact + forms of the hyperbolic flux, parabolic flux, source terms, upwinding functions, etc. + """ + + homepage = "http://hypar.github.io/" + url = "https://bitbucket.org/deboghosh/hypar/get/v4.1.tar.gz" + git = "https://bitbucket.org/deboghosh/hypar.git" + + maintainers = ["debog"] + + tags = ["proxy-app", "ecp-proxy-app"] + + version("4.1", sha256="36c11dcfda006115f4656ff73790992e5caea99dbc64776c9db4e0a29b4c60da") + + variant("mpi", default=True, description="Build with MPI support") + variant("openmp", default=False, description="Build with OpenMP support") + variant("scalapack", default=False, description="Build with Scalapack Support") + variant("fftw", default=False, description="Build with FFTW support") + + depends_on("autoconf", type="build") + depends_on("automake", type="build") + depends_on("libtool", type="build") + + depends_on("mpi", when="+mpi") + depends_on("scalapack", when="+scalapack") + depends_on("blas", when="+scalapack") + depends_on("lapack", when="+scalapack") + depends_on("fftw", when="+fftw") + + patch_config_files = False + + def configure_args(self): + args = [] + spec = self.spec + if "+mpi" in spec: + args.append("--enable-mpi") + args.append("--with-mpi-dir={0}".format(spec["mpi"].prefix)) + if "+openmp" in spec: + args.append("--enable-omp") + if "+scalapack" in spec: + args.append("--enable-scalapack") + args.append("--with-blas-dir={0}".format(spec["blas"].prefix)) + args.append("--with-lapack-dir={0}".format(spec["lapack"].prefix)) + args.append("--with-scalapack-dir={0}".format(spec["scalapack"].prefix)) + if "+fftw" in spec: + args.append("--enable-fftw") + args.append("--with-fftw-dir={0}".format(spec["fftw"].prefix)) + return args diff --git a/var/spack/repos/builtin/packages/hypre/package.py b/var/spack/repos/builtin/packages/hypre/package.py index 874b2321cbf..fd168400cc2 100644 --- a/var/spack/repos/builtin/packages/hypre/package.py +++ b/var/spack/repos/builtin/packages/hypre/package.py @@ -26,6 +26,7 @@ class Hypre(AutotoolsPackage, CudaPackage, ROCmPackage): test_requires_compiler = True version("develop", branch="master") + version("2.26.0", sha256="c214084bddc61a06f3758d82947f7f831e76d7e3edeac2c78bb82d597686e05d") version("2.25.0", sha256="f9fc8371d91239fca694284dab17175bfda3821d7b7a871fd2e8f9d5930f303c") version("2.24.0", sha256="f480e61fc25bf533fc201fdf79ec440be79bb8117650627d1f25151e8be2fdb5") version("2.23.0", sha256="8a9f9fb6f65531b77e4c319bf35bfc9d34bf529c36afe08837f56b635ac052e2") @@ -96,6 +97,7 @@ class Hypre(AutotoolsPackage, CudaPackage, ROCmPackage): depends_on("rocsparse", when="+rocm") depends_on("rocthrust", when="+rocm") depends_on("rocrand", when="+rocm") + depends_on("rocprim", when="+rocm") depends_on("umpire", when="+umpire") for sm_ in CudaPackage.cuda_arch_values: depends_on( @@ -240,7 +242,8 @@ def configure_args(self): rocm_pkgs = ["rocsparse", "rocthrust", "rocprim", "rocrand"] rocm_inc = "" for pkg in rocm_pkgs: - rocm_inc += spec[pkg].headers.include_flags + " " + if "^" + pkg in spec: + rocm_inc += spec[pkg].headers.include_flags + " " configure_args.extend( [ "--with-hip", diff --git a/var/spack/repos/builtin/packages/ibm-databroker/package.py b/var/spack/repos/builtin/packages/ibm-databroker/package.py index ae80f924396..9c0516844bb 100644 --- a/var/spack/repos/builtin/packages/ibm-databroker/package.py +++ b/var/spack/repos/builtin/packages/ibm-databroker/package.py @@ -7,7 +7,7 @@ from spack.package import * -class IbmDatabroker(CMakePackage, PythonPackage): +class IbmDatabroker(CMakePackage, PythonExtension): """The Data Broker (DBR) is a distributed, in-memory container of key-value stores enabling applications in a workflow to exchange data through one or more shared namespaces. Thanks to a small set of primitives, applications diff --git a/var/spack/repos/builtin/packages/intel-mpi/package.py b/var/spack/repos/builtin/packages/intel-mpi/package.py index 68aec8734df..93abd2b3821 100644 --- a/var/spack/repos/builtin/packages/intel-mpi/package.py +++ b/var/spack/repos/builtin/packages/intel-mpi/package.py @@ -127,6 +127,7 @@ class IntelMpi(IntelPackage): "external-libfabric", default=False, description="Enable external libfabric dependency" ) depends_on("libfabric", when="+external-libfabric", type=("build", "link", "run")) + depends_on("cpio", type="build") def setup_dependent_build_environment(self, *args): # Handle in callback, conveying client's compilers in additional arg. diff --git a/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py b/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py index e222ce638d1..27c46cee4a1 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py @@ -27,6 +27,12 @@ class IntelOneapiAdvisor(IntelOneApiPackage): ) if platform.system() == "Linux": + version( + "2022.3.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18985/l_oneapi_advisor_p_2022.3.1.15323_offline.sh", + sha256="f05b58c2f13972b3ac979e4796bcc12a234b1e077400b5d00fc5df46cd228899", + expand=False, + ) version( "2022.3.0", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18872/l_oneapi_advisor_p_2022.3.0.8704_offline.sh", diff --git a/var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py index f373af03c88..3747b965762 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py @@ -30,6 +30,12 @@ class IntelOneapiCcl(IntelOneApiLibraryPackage): depends_on("intel-oneapi-mpi") if platform.system() == "Linux": + version( + "2021.7.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19029/l_oneapi_ccl_p_2021.7.1.16948_offline.sh", + sha256="daab05a0779db343b600253df8fea93ab0ed20bd630d89883dd651b6b540b1b2", + expand=False, + ) version( "2021.7.0", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18891/l_oneapi_ccl_p_2021.7.0.8733_offline.sh", diff --git a/var/spack/repos/builtin/packages/intel-oneapi-compilers-classic/package.py b/var/spack/repos/builtin/packages/intel-oneapi-compilers-classic/package.py index 61b634fb9b3..1b81e0ff632 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-compilers-classic/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-compilers-classic/package.py @@ -2,6 +2,9 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os + +from llnl.util.link_tree import LinkTree from spack.package import * @@ -19,8 +22,6 @@ class IntelOneapiCompilersClassic(Package): has_code = False - phases = [] - # Versions before 2021 are in the `intel` package # intel-oneapi versions before 2022 use intel@19.0.4 for ver, oneapi_ver in { @@ -35,8 +36,19 @@ class IntelOneapiCompilersClassic(Package): version(ver) depends_on("intel-oneapi-compilers@" + oneapi_ver, when="@" + ver, type="run") + @property + def oneapi_compiler_prefix(self): + oneapi_version = self.spec["intel-oneapi-compilers"].version + return self.spec["intel-oneapi-compilers"].prefix.compiler.join(str(oneapi_version)) + def setup_run_environment(self, env): - """Adds environment variables to the generated module file.""" + """Adds environment variables to the generated module file. + + These environment variables come from running: + .. code-block:: console + $ source {prefix}/{component}/{version}/env/vars.sh + and from setting CC/CXX/F77/FC + """ oneapi_pkg = self.spec["intel-oneapi-compilers"].package oneapi_pkg.setup_run_environment(env) @@ -46,3 +58,26 @@ def setup_run_environment(self, env): env.set("CXX", bin_prefix.icpc) env.set("F77", bin_prefix.ifort) env.set("FC", bin_prefix.ifort) + + def install(self, spec, prefix): + # If we symlink top-level directories directly, files won't show up in views + # Create real dirs and symlink files instead + self.symlink_dir(self.oneapi_compiler_prefix.linux.bin.intel64, prefix.bin) + self.symlink_dir(self.oneapi_compiler_prefix.linux.lib, prefix.lib) + self.symlink_dir(self.oneapi_compiler_prefix.linux.include, prefix.include) + self.symlink_dir(self.oneapi_compiler_prefix.linux.compiler, prefix.compiler) + self.symlink_dir(self.oneapi_compiler_prefix.documentation.en.man, prefix.man) + + def symlink_dir(self, src, dest): + # Create a real directory at dest + mkdirp(dest) + + # Symlink all files in src to dest keeping directories as dirs + for entry in os.listdir(src): + src_path = os.path.join(src, entry) + dest_path = os.path.join(dest, entry) + if os.path.isdir(src_path) and os.access(src_path, os.X_OK): + link_tree = LinkTree(src_path) + link_tree.merge(dest_path) + else: + os.symlink(src_path, dest_path) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py index df40c94f8a9..4869fc63d89 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py @@ -10,6 +10,17 @@ from spack.package import * linux_versions = [ + { + "version": "2022.2.1", + "cpp": { + "url": "https://registrationcenter-download.intel.com/akdlm/irc_nas/19049/l_dpcpp-cpp-compiler_p_2022.2.1.16991_offline.sh", + "sha256": "3f0f02f9812a0cdf01922d2df9348910c6a4cb4f9dfe50fc7477a59bbb1f7173", + }, + "ftn": { + "url": "https://registrationcenter-download.intel.com/akdlm/irc_nas/18998/l_fortran-compiler_p_2022.2.1.16992_offline.sh", + "sha256": "64f1d1efbcdc3ac2182bec18313ca23f800d94f69758db83a1394490d9d4b042", + }, + }, { "version": "2022.2.0", "cpp": { @@ -138,6 +149,10 @@ class IntelOneapiCompilers(IntelOneApiPackage): def component_dir(self): return "compiler" + @property + def compiler_search_prefix(self): + return self.prefix.compiler.join(str(self.version)).linux.bin + def setup_run_environment(self, env): """Adds environment variables to the generated module file. @@ -158,7 +173,7 @@ def setup_run_environment(self, env): def install(self, spec, prefix): # Copy instead of install to speed up debugging - # install_tree('/opt/intel/oneapi/compiler', self.prefix) + # install_tree("/opt/intel/oneapi/compiler", self.prefix) # install cpp super(IntelOneapiCompilers, self).install(spec, prefix) @@ -182,6 +197,13 @@ def inject_rpaths(self): # should not be patched patchelf(file, fail_on_error=False) + def write_config_file(self, flags, path, compilers): + for compiler in compilers: + p = path.join(compiler + ".cfg") + with open(p, "w") as f: + f.write(" ".join(flags)) + set_install_permissions(p) + @run_after("install") def extend_config_flags(self): # Extends compiler config files to inject additional compiler flags. @@ -196,7 +218,12 @@ def extend_config_flags(self): # TODO: it is unclear whether we should really use all elements of # _ld_library_path because it looks like the only rpath that needs to be # injected is self.component_prefix.linux.compiler.lib.intel64_lin. - flags_list = ["-Wl,-rpath,{}".format(d) for d in self._ld_library_path()] + common_flags = ["-Wl,-rpath,{}".format(d) for d in self._ld_library_path()] + + # Make sure that underlying clang gets the right GCC toolchain by default + llvm_flags = ["--gcc-toolchain={}".format(self.compiler.prefix)] + classic_flags = ["-gcc-name={}".format(self.compiler.cc)] + classic_flags.append("-gxx-name={}".format(self.compiler.cxx)) # Older versions trigger -Wunused-command-line-argument warnings whenever # linker flags are passed in preprocessor (-E) or compilation mode (-c). @@ -204,23 +231,16 @@ def extend_config_flags(self): # do not trigger these warnings. In some build systems these warnings can # cause feature detection to fail, so we silence them with -Wno-unused-... if self.spec.version < Version("2022.1.0"): - flags_list.append("-Wno-unused-command-line-argument") + llvm_flags.append("-Wno-unused-command-line-argument") - # Make sure that underlying clang gets the right GCC toolchain by default - flags_list.append("--gcc-toolchain={}".format(self.compiler.prefix)) - flags = " ".join(flags_list) - for cmp in [ - "icx", - "icpx", - "ifx", - join_path("intel64", "icc"), - join_path("intel64", "icpc"), - join_path("intel64", "ifort"), - ]: - cfg_file = self.component_prefix.linux.bin.join(cmp + ".cfg") - with open(cfg_file, "w") as f: - f.write(flags) - set_install_permissions(cfg_file) + self.write_config_file( + common_flags + llvm_flags, self.component_prefix.linux.bin, ["icx", "icpx", "ifx"] + ) + self.write_config_file( + common_flags + classic_flags, + self.component_prefix.linux.bin.intel64, + ["icc", "icpc", "ifort"], + ) def _ld_library_path(self): # Returns an iterable of directories that might contain shared runtime libraries diff --git a/var/spack/repos/builtin/packages/intel-oneapi-dal/package.py b/var/spack/repos/builtin/packages/intel-oneapi-dal/package.py index 7ed60a6be7b..c2d90138d4f 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-dal/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-dal/package.py @@ -29,6 +29,12 @@ class IntelOneapiDal(IntelOneApiLibraryPackage): ) if platform.system() == "Linux": + version( + "2021.7.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19032/l_daal_oneapi_p_2021.7.1.16996_offline.sh", + sha256="2328927480b0ba5d380028f981717b63ee323f8a1616a491a160a0a0b239e285", + expand=False, + ) version( "2021.7.0", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18895/l_daal_oneapi_p_2021.7.0.8746_offline.sh", diff --git a/var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py b/var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py index 7d0602d3702..33fa28e3d6d 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py @@ -29,6 +29,12 @@ class IntelOneapiDnn(IntelOneApiLibraryPackage): ) if platform.system() == "Linux": + version( + "2022.2.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19035/l_onednn_p_2022.2.1.16994_offline.sh", + sha256="2102964a36a5b58b529385706e6829456ee5225111c33dfce6326fff5175aace", + expand=False, + ) version( "2022.2.0", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18933/l_onednn_p_2022.2.0.8750_offline.sh", diff --git a/var/spack/repos/builtin/packages/intel-oneapi-dpct/package.py b/var/spack/repos/builtin/packages/intel-oneapi-dpct/package.py index b35db040061..66957cf274e 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-dpct/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-dpct/package.py @@ -22,6 +22,12 @@ class IntelOneapiDpct(IntelOneApiPackage): homepage = "https://www.intel.com/content/www/us/en/developer/tools/oneapi/dpc-compatibility-tool.html#gs.2p8km6" if platform.system() == "Linux": + version( + "2022.2.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18991/l_dpcpp-ct_p_2022.2.1.14994_offline.sh", + sha256="ea2fbe36de70eb3c78c97133f81e0b2a2fbcfc9525e77125a183d7af446ef3e6", + expand=False, + ) version( "2022.2.0", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18908/l_dpcpp-ct_p_2022.2.0.8701_offline.sh", diff --git a/var/spack/repos/builtin/packages/intel-oneapi-dpl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-dpl/package.py index 41279fceed2..0d8fb52d58b 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-dpl/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-dpl/package.py @@ -25,6 +25,12 @@ class IntelOneapiDpl(IntelOneApiLibraryPackage): homepage = "https://github.com/oneapi-src/oneDPL" if platform.system() == "Linux": + version( + "2021.7.2", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19046/l_oneDPL_p_2021.7.2.15007_offline.sh", + sha256="84d60a6b1978ff45d2c416f18ca7df542eaa8c0b18dc3abf4bb0824a91b4fc44", + expand=False, + ) version( "2021.7.1", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18846/l_oneDPL_p_2021.7.1.8713_offline.sh", diff --git a/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py b/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py index bd0d479099d..5e6760656c4 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py @@ -27,6 +27,12 @@ class IntelOneapiInspector(IntelOneApiPackage): homepage = "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/inspector.html" if platform.system() == "Linux": + version( + "2022.3.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19005/l_inspector_oneapi_p_2022.3.1.15318_offline.sh", + sha256="62aa2abf6928c0f4fc60ccfb69375297f823c183aea2519d7344e09c9734c1f8", + expand=False, + ) version( "2022.3.0", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18924/l_inspector_oneapi_p_2022.3.0.8706_offline.sh", diff --git a/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py b/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py index 971e0234c9a..4b912c41c98 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py @@ -30,6 +30,12 @@ class IntelOneapiIpp(IntelOneApiLibraryPackage): ) if platform.system() == "Linux": + version( + "2021.6.2", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19007/l_ipp_oneapi_p_2021.6.2.16995_offline.sh", + sha256="23ae49afa9f13c2bed0c8a32e447e1c6b3528685cebdd32e4aa2a9736827cc4e", + expand=False, + ) version( "2021.6.1", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18925/l_ipp_oneapi_p_2021.6.1.8749_offline.sh", diff --git a/var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py b/var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py index f983abcdb26..c3de57f3c10 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py @@ -31,6 +31,12 @@ class IntelOneapiIppcp(IntelOneApiLibraryPackage): ) if platform.system() == "Linux": + version( + "2021.6.2", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18999/l_ippcp_oneapi_p_2021.6.2.15006_offline.sh", + sha256="3c285c12da98a4d16e9a5ba237c8c51780475af54b1d1162185480ac891f16ee", + expand=False, + ) version( "2021.6.1", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18923/l_ippcp_oneapi_p_2021.6.1.8714_offline.sh", diff --git a/var/spack/repos/builtin/packages/intel-oneapi-itac/package.py b/var/spack/repos/builtin/packages/intel-oneapi-itac/package.py index 0110f05b169..98c5cac8666 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-itac/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-itac/package.py @@ -30,6 +30,12 @@ class IntelOneapiItac(IntelOneApiPackage): maintainers = ["rscohn2"] if platform.system() == "Linux": + version( + "2021.7.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19024/l_itac_oneapi_p_2021.7.1.15324_offline.sh", + sha256="fb26689efdb7369e211b5cf05f3e30d491a2787f24fef174b23241b997cc442f", + expand=False, + ) version( "2021.7.0", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18886/l_itac_oneapi_p_2021.7.0.8707_offline.sh", diff --git a/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py index 0d227b39528..49ee7869f23 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py @@ -27,6 +27,12 @@ class IntelOneapiMkl(IntelOneApiLibraryPackage): ) if platform.system() == "Linux": + version( + "2022.2.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19038/l_onemkl_p_2022.2.1.16993_offline.sh", + sha256="eedd4b795720de776b1fc5f542ae0fac37ec235cdb567f7c2ee3182e73e3e59d", + expand=False, + ) version( "2022.2.0", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18898/l_onemkl_p_2022.2.0.8748_offline.sh", @@ -129,6 +135,7 @@ def setup_run_environment(self, env): def setup_dependent_build_environment(self, env, dependent_spec): env.set("MKLROOT", self.component_prefix) + env.append_path("PKG_CONFIG_PATH", self.component_prefix.lib.pkgconfig) def _find_mkl_libs(self, shared): libs = [] diff --git a/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py b/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py index c58320d72e6..a49899c7c54 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py @@ -26,6 +26,12 @@ class IntelOneapiMpi(IntelOneApiLibraryPackage): homepage = "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/mpi-library.html" if platform.system() == "Linux": + version( + "2021.7.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19010/l_mpi_oneapi_p_2021.7.1.16815_offline.sh", + sha256="90e7804f2367d457cd4cbf7aa29f1c5676287aa9b34f93e7c9a19e4b8583fff7", + expand=False, + ) version( "2021.7.0", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18926/l_mpi_oneapi_p_2021.7.0.8711_offline.sh", diff --git a/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py b/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py index e8df02c63d0..8935db4c400 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py @@ -25,6 +25,12 @@ class IntelOneapiTbb(IntelOneApiLibraryPackage): ) if platform.system() == "Linux": + version( + "2021.7.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19041/l_tbb_oneapi_p_2021.7.1.15005_offline.sh", + sha256="f13a8e740d69347b5985c1be496a3259a86d64ec94933b3d26100dbc2f059fd4", + expand=False, + ) version( "2021.7.0", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18901/l_tbb_oneapi_p_2021.7.0.8712_offline.sh", diff --git a/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py b/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py index 361c3f090d2..fb4011d2660 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py @@ -28,6 +28,12 @@ class IntelOneapiVtune(IntelOneApiPackage): homepage = "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/vtune-profiler.html" if platform.system() == "Linux": + version( + "2022.4.1", + url="https://registrationcenter-download.intel.com/akdlm/irc_nas/19027/l_oneapi_vtune_p_2022.4.1.16919_offline.sh", + sha256="eb4b4da61eea52c08fc139dbf4630e2c52cbcfaea8f1376c545c0863839366d1", + expand=False, + ) version( "2022.4.0", url="https://registrationcenter-download.intel.com/akdlm/irc_nas/18888/l_oneapi_vtune_p_2022.4.0.8705_offline.sh", diff --git a/var/spack/repos/builtin/packages/intel-tbb/package.py b/var/spack/repos/builtin/packages/intel-tbb/package.py index f0abf5a70ba..8921604b08f 100644 --- a/var/spack/repos/builtin/packages/intel-tbb/package.py +++ b/var/spack/repos/builtin/packages/intel-tbb/package.py @@ -8,10 +8,12 @@ import platform import sys +import spack.build_systems.cmake +import spack.build_systems.makefile from spack.package import * -class IntelTbb(CMakePackage): +class IntelTbb(CMakePackage, MakefilePackage): """Widely used C++ template library for task parallelism. Intel Threading Building Blocks (Intel TBB) lets you easily write parallel C++ programs that take full advantage of multicore performance, that are @@ -29,12 +31,9 @@ class IntelTbb(CMakePackage): # patches, filters and url_for_version() below as needed. version("master", branch="master") - version("2021.6.0-rc1", tag="v2021.6.0-rc1") - version( - "2021.5.0", - sha256="e5b57537c741400cf6134b428fc1689a649d7d38d9bb9c1b6d64f092ea28178a", - preferred=True, - ) + version("2021.7.0", sha256="2cae2a80cda7d45dc7c072e4295c675fff5ad8316691f26f40539f7e7e54c0cc") + version("2021.6.0", sha256="4897dd106d573e9dacda8509ca5af1a0e008755bf9c383ef6777ac490223031f") + version("2021.5.0", sha256="e5b57537c741400cf6134b428fc1689a649d7d38d9bb9c1b6d64f092ea28178a") version("2021.4.0", sha256="021796c7845e155e616f5ecda16daa606ebb4c6f90b996e5c08aebab7a8d3de3") version("2021.3.0", sha256="8f616561603695bbb83871875d2c6051ea28f8187dbe59299961369904d1d49e") version("2021.2.0", sha256="cee20b0a71d977416f3e3b4ec643ee4f38cedeb2a9ff015303431dd9d8d79854") @@ -77,6 +76,12 @@ class IntelTbb(CMakePackage): version("4.4.1", sha256="05737bf6dd220b31aad63d77ca59c742271f81b4cc6643aa6f93d37450ae32b5") version("4.4", sha256="93c74b6054c69c86fa49d0fce7c50061fc907cb198a7237b8dd058298fd40c0e") + build_system( + conditional("makefile", when="@:2020.3"), + conditional("cmake", when="@2021:"), + default="cmake", + ) + provides("tbb") # Clang builds incorrectly determine GCC version which in turn incorrectly @@ -164,12 +169,54 @@ def url_for_version(self, version): name = "{0}".format(version) return url.format(name) + @property + def libs(self): + shared = True if "+shared" in self.spec else False + return find_libraries("libtbb*", root=self.prefix, shared=shared, recursive=True) + + +class SetupEnvironment(object): # We set OS here in case the user has it set to something else # that TBB doesn't expect. def setup_build_environment(self, env): env.set("OS", platform.system()) - @when("@:2020.3") + +class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder, SetupEnvironment): + def cmake_args(self): + spec = self.spec + options = [ + self.define("CMAKE_HWLOC_2_INCLUDE_PATH", spec["hwloc"].prefix.include), + self.define("CMAKE_HWLOC_2_LIBRARY_PATH", spec["hwloc"].libs), + self.define("-DTBB_CPF", True), + self.define("TBB_STRICT", False), + ] + if spec.variants["cxxstd"].value != "default": + options.append(self.define("CMAKE_CXX_STANDARD", spec.variants["cxxstd"].value)) + return options + + @run_after("install") + def install_pkgconfig(self): + # pkg-config generation is introduced in May 5, 2021. + # It must not be overwritten by spack-generated tbb.pc. + # https://github.com/oneapi-src/oneTBB/commit/478de5b1887c928e52f029d706af6ea640a877be + if self.spec.satisfies("@:2021.2.0", strict=True): + mkdirp(self.prefix.lib.pkgconfig) + + with open(join_path(self.prefix.lib.pkgconfig, "tbb.pc"), "w") as f: + f.write("prefix={0}\n".format(self.prefix)) + f.write("exec_prefix=${prefix}\n") + f.write("libdir={0}\n".format(self.prefix.lib)) + f.write("includedir={0}\n".format(self.prefix.include)) + f.write("\n") + f.write("Name: Threading Building Blocks\n") + f.write("Description: Intel's parallelism library for C++\n") + f.write("Version: {0}\n".format(self.spec.version)) + f.write("Cflags: -I${includedir}\n") + f.write("Libs: -L${libdir} -ltbb -latomic\n") + + +class MakefileBuilder(spack.build_systems.makefile.MakefileBuilder, SetupEnvironment): def coerce_to_spack(self, tbb_build_subdir): for compiler in ["icc", "gcc", "clang"]: fs = glob.glob(join_path(tbb_build_subdir, "*.%s.inc" % compiler)) @@ -186,16 +233,7 @@ def coerce_to_spack(self, tbb_build_subdir): else: of.write(lin) - @when("@:2020.3") - def cmake(self, spec, prefix): - return - - @when("@:2020.3") - def cmake_args(self): - return - - @when("@:2020.3") - def build(self, spec, prefix): + def build(self, pkg, spec, prefix): # Deactivate use of RTM with GCC when on an OS with a very old # assembler. if ( @@ -242,8 +280,7 @@ def build(self, spec, prefix): make_opts.append("compiler={0}".format(tbb_compiler)) make(*make_opts) - @when("@:2020.3") - def install(self, spec, prefix): + def install(self, pkg, spec, prefix): mkdirp(prefix) mkdirp(prefix.lib) @@ -270,46 +307,8 @@ def install(self, spec, prefix): with working_dir(join_path(self.stage.source_path, "cmake")): inspect.getmodule(self).cmake(*cmake_args) - @when("@:2020.3") @run_after("install") def darwin_fix(self): # Replace @rpath in ids with full path if sys.platform == "darwin": fix_darwin_install_name(self.prefix.lib) - - @property - def libs(self): - shared = True if "+shared" in self.spec else False - return find_libraries("libtbb*", root=self.prefix, shared=shared, recursive=True) - - @when("@2021.1.1:") - def cmake_args(self): - spec = self.spec - options = [] - options.append("-DCMAKE_HWLOC_2_INCLUDE_PATH=%s" % spec["hwloc"].prefix.include) - options.append("-DCMAKE_HWLOC_2_LIBRARY_PATH=%s" % spec["hwloc"].libs) - options.append("-DTBB_CPF=ON") - options.append("-DTBB_STRICT=OFF") - if spec.variants["cxxstd"].value != "default": - options.append("-DCMAKE_CXX_STANDARD=%s" % spec.variants["cxxstd"].value) - return options - - @run_after("install") - def install_pkgconfig(self): - # pkg-config generation is introduced in May 5, 2021. - # It must not be overwritten by spack-generated tbb.pc. - # https://github.com/oneapi-src/oneTBB/commit/478de5b1887c928e52f029d706af6ea640a877be - if self.spec.satisfies("@:2021.2.0", strict=True): - mkdirp(self.prefix.lib.pkgconfig) - - with open(join_path(self.prefix.lib.pkgconfig, "tbb.pc"), "w") as f: - f.write("prefix={0}\n".format(self.prefix)) - f.write("exec_prefix=${prefix}\n") - f.write("libdir={0}\n".format(self.prefix.lib)) - f.write("includedir={0}\n".format(self.prefix.include)) - f.write("\n") - f.write("Name: Threading Building Blocks\n") - f.write("Description: Intel's parallelism library for C++\n") - f.write("Version: {0}\n".format(self.spec.version)) - f.write("Cflags: -I${includedir}\n") - f.write("Libs: -L${libdir} -ltbb -latomic\n") diff --git a/var/spack/repos/builtin/packages/ip/package.py b/var/spack/repos/builtin/packages/ip/package.py index 1b437ce3003..d7e89485a3d 100644 --- a/var/spack/repos/builtin/packages/ip/package.py +++ b/var/spack/repos/builtin/packages/ip/package.py @@ -14,7 +14,12 @@ class Ip(CMakePackage): homepage = "https://noaa-emc.github.io/NCEPLIBS-ip" url = "https://github.com/NOAA-EMC/NCEPLIBS-ip/archive/refs/tags/v3.3.3.tar.gz" - maintainers = ["t-brown", "kgerheiser", "edwardhartnett", "Hang-Lei-NOAA"] + maintainers = [ + "t-brown", + "AlexanderRichert-NOAA", + "edwardhartnett", + "Hang-Lei-NOAA", + ] version("4.0.0", sha256="a2ef0cc4e4012f9cb0389fab6097407f4c623eb49772d96eb80c44f804aa86b8") version( diff --git a/var/spack/repos/builtin/packages/ip2/package.py b/var/spack/repos/builtin/packages/ip2/package.py index ee68a7d1a33..7a654332665 100644 --- a/var/spack/repos/builtin/packages/ip2/package.py +++ b/var/spack/repos/builtin/packages/ip2/package.py @@ -17,7 +17,12 @@ class Ip2(CMakePackage): homepage = "https://github.com/NOAA-EMC/NCEPLIBS-ip2" url = "https://github.com/NOAA-EMC/NCEPLIBS-ip2/archive/refs/tags/v1.1.2.tar.gz" - maintainers = ["t-brown", "kgerheiser", "Hang-Lei-NOAA", "edwardhartnett"] + maintainers = [ + "t-brown", + "AlexanderRichert-NOAA", + "Hang-Lei-NOAA", + "edwardhartnett", + ] version("1.1.2", sha256="73c6beec8fd463ec7ccba3633d8c5d53d385c43d507367efde918c2db0af42ab") diff --git a/var/spack/repos/builtin/packages/isescan/package.py b/var/spack/repos/builtin/packages/isescan/package.py index c109f68ce19..4c9eafacb62 100644 --- a/var/spack/repos/builtin/packages/isescan/package.py +++ b/var/spack/repos/builtin/packages/isescan/package.py @@ -11,8 +11,9 @@ class Isescan(Package): genome and metagenome""" homepage = "https://github.com/xiezhq/ISEScan" - url = "https://github.com/xiezhq/ISEScan/archive/v1.7.2.1.tar.gz" + url = "https://github.com/xiezhq/ISEScan/archive/refs/tags/v1.7.2.1.tar.gz" + version("1.7.2.3", sha256="90ef6bc660e471347f65864bd3563f769ed4b79b1a932195f353c5e86351ab05") version("1.7.2.1", sha256="b971a3e86a8cddaa4bcd520ba9e75425bbe93190466f81a3791ae0cb4baf5e5d") depends_on("python@3.3.3:", type="run") diff --git a/var/spack/repos/builtin/packages/json-c/package.py b/var/spack/repos/builtin/packages/json-c/package.py index 559039f21db..d13bddecb83 100644 --- a/var/spack/repos/builtin/packages/json-c/package.py +++ b/var/spack/repos/builtin/packages/json-c/package.py @@ -6,7 +6,7 @@ from spack.package import * -class JsonC(CMakePackage): +class JsonC(CMakePackage, AutotoolsPackage): """A JSON implementation in C.""" homepage = "https://github.com/json-c/json-c/wiki" @@ -20,7 +20,13 @@ class JsonC(CMakePackage): version("0.12", sha256="000c01b2b3f82dcb4261751eb71f1b084404fb7d6a282f06074d3c17078b9f3f") version("0.11", sha256="28dfc65145dc0d4df1dfe7701ac173c4e5f9347176c8983edbfac9149494448c") - depends_on("autoconf", when="@:0.13.1", type="build") + build_system( + conditional("cmake", when="@0.14:"), + conditional("autotools", when="@:0.13.1"), + default="cmake", + ) + + depends_on("autoconf", when="build_system=autotools", type="build") parallel = False @@ -32,23 +38,17 @@ def patch(self): "Makefile.in", ) - @when("@:0.13.1") - def cmake(self, spec, prefix): - configure_args = ["--prefix=" + prefix] - configure(*configure_args) - - @when("@:0.13.1") - def build(self, spec, prefix): - make() - - @when("@:0.13.1") - def install(self, spec, prefix): - make("install") - @when("%cce@11.0.3:") def patch(self): filter_file("-Werror", "", "CMakeLists.txt") + def flag_handler(self, name, flags): + iflags = [] + if name == "cflags": + if self.spec.satisfies("%oneapi"): + iflags.append("-Wno-error=implicit-function-declaration") + return (iflags, None, None) + @run_after("install") def darwin_fix(self): # The shared library is not installed correctly on Darwin; fix this diff --git a/var/spack/repos/builtin/packages/julia/package.py b/var/spack/repos/builtin/packages/julia/package.py index 28b14c1f3ae..bf7297357d5 100644 --- a/var/spack/repos/builtin/packages/julia/package.py +++ b/var/spack/repos/builtin/packages/julia/package.py @@ -204,12 +204,13 @@ def setup_build_environment(self, env): "mpfr", "nghttp2", "openblas", - "openlibm", "pcre2", "suite-sparse", "utf8proc", "zlib", ] + if "+openlibm" in self.spec: + pkgs.append("openlibm") if self.spec.satisfies("@1.7.0:"): pkgs.append("libblastrampoline") for pkg in pkgs: diff --git a/var/spack/repos/builtin/packages/justbuild/package.py b/var/spack/repos/builtin/packages/justbuild/package.py new file mode 100644 index 00000000000..5cf9499a2d5 --- /dev/null +++ b/var/spack/repos/builtin/packages/justbuild/package.py @@ -0,0 +1,82 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import os + +from spack.package import * + + +class Justbuild(Package): + "just, a generic build tool" + + git = "https://github.com/just-buildsystem/justbuild.git" + + homepage = "https://github.com/just-buildsystem/justbuild" + + tags = ["build-tools"] + + executables = ["^just$"] + + maintainers = ["asartori86"] + + version("master", branch="master") + + depends_on("python@3:", type=("build", "run")) + depends_on("wget", type=("build", "run")) + + sanity_check_is_file = [join_path("bin", "just"), join_path("bin", "just-mr")] + + def setup_build_environment(self, env): + ar = which("ar") + if self.spec.satisfies("%gcc@10:"): + gcc = which("gcc") + gpp = which("g++") + env.set( + "JUST_BUILD_CONF", + " {" + + ' "CC":"{0}"'.format(gcc.path) + + ', "CXX":"{0}"'.format(gpp.path) + + ', "AR":"{0}"'.format(ar.path) + + ', "COMPILER_FAMILY":"unknown"' + + ', "ENV":{' + + ' "PATH":"{0}"'.format(os.environ["PATH"]) + + " }" + + "}", + ) + elif self.spec.satisfies("%clang@11:") or spec.satisfies("%apple-clang@11:"): + clang = which("clang") + clangpp = which("clang++") + env.set( + "JUST_BUILD_CONF", + " {" + + ' "CC":"{0}"'.format(clang.path) + + ', "CXX":"{0}"'.format(clangpp.path) + + ', "AR":"{0}"'.format(ar.path) + + ', "COMPILER_FAMILY":"unknown"' + + ', "ENV":{' + + ' "PATH":"{0}"'.format(os.environ["PATH"]) + + " }" + + "}", + ) + else: + raise InstallError("please use gcc >= 10 or clang >= 11") + + def install(self, spec, prefix): + python = which("python3") + python(os.path.join("bin", "bootstrap.py"), ".", prefix) + mkdirp(prefix.bin) + install(os.path.join(prefix, "out", "bin", "just"), prefix.bin) + install(os.path.join("bin", "just-mr.py"), os.path.join(prefix.bin, "just-mr")) + + @classmethod + def determine_version(cls, exe): + import json + + try: + s = os.popen(exe + " version").read() + d = json.loads(s) + return ".".join(map(str, d["version"])) + d["suffix"].replace("~", "-") + except Exception: + return None diff --git a/var/spack/repos/builtin/packages/kokkos-kernels/package.py b/var/spack/repos/builtin/packages/kokkos-kernels/package.py index 58f57bb44e2..b82253feefa 100644 --- a/var/spack/repos/builtin/packages/kokkos-kernels/package.py +++ b/var/spack/repos/builtin/packages/kokkos-kernels/package.py @@ -23,6 +23,8 @@ class KokkosKernels(CMakePackage, CudaPackage): # openssl sha256 kokkos-kernels-x.y.z.tar.gz version("develop", branch="develop") version("master", branch="master") + version("3.7.00", sha256="51bc6db3995392065656848e2b152cfd1c3a95a951ab18a3934278113d59f32b") + version("3.6.01", sha256="f000b156c8c0b80e85d38587907c11d9479aaf362408b812effeda5e22b24d0d") version("3.6.00", sha256="2753643fd643b9eed9f7d370e0ff5fa957211d08a91aa75398e31cbc9e5eb0a5") version("3.5.00", sha256="a03a41a047d95f9f07cd1e1d30692afdb75b5c705ef524e19c1d02fe60ccf8d1") version("3.4.01", sha256="f504aa4afbffb58fa7c4430d0fdb8fd5690a268823fa15eb0b7d58dab9d351e6") @@ -37,6 +39,8 @@ class KokkosKernels(CMakePackage, CudaPackage): depends_on("kokkos") depends_on("kokkos@master", when="@master") depends_on("kokkos@develop", when="@develop") + depends_on("kokkos@3.7.00", when="@3.7.00") + depends_on("kokkos@3.6.01", when="@3.6.01") depends_on("kokkos@3.6.00", when="@3.6.00") depends_on("kokkos@3.5.00", when="@3.5.00") depends_on("kokkos@3.4.01", when="@3.4.01") diff --git a/var/spack/repos/builtin/packages/kokkos/package.py b/var/spack/repos/builtin/packages/kokkos/package.py index ef08372d0dc..965ab03721c 100644 --- a/var/spack/repos/builtin/packages/kokkos/package.py +++ b/var/spack/repos/builtin/packages/kokkos/package.py @@ -25,6 +25,7 @@ class Kokkos(CMakePackage, CudaPackage, ROCmPackage): version("master", branch="master") version("develop", branch="develop") + version("3.7.00", sha256="62e3f9f51c798998f6493ed36463f66e49723966286ef70a9dcba329b8443040") version("3.6.01", sha256="1b80a70c5d641da9fefbbb652e857d7c7a76a0ebad1f477c253853e209deb8db") version("3.6.00", sha256="53b11fffb53c5d48da5418893ac7bc814ca2fde9c86074bdfeaa967598c918f4") version("3.5.00", sha256="748f06aed63b1e77e3653cd2f896ef0d2c64cb2e2d896d9e5a57fec3ff0244ff") diff --git a/var/spack/repos/builtin/packages/landsfcutil/package.py b/var/spack/repos/builtin/packages/landsfcutil/package.py index ff3a9bf6bf5..6fc0229d4bf 100644 --- a/var/spack/repos/builtin/packages/landsfcutil/package.py +++ b/var/spack/repos/builtin/packages/landsfcutil/package.py @@ -15,7 +15,7 @@ class Landsfcutil(CMakePackage): homepage = "https://github.com/NOAA-EMC/NCEPLIBS-landsfcutil" url = "https://github.com/NOAA-EMC/NCEPLIBS-landsfcutil/archive/refs/tags/v2.4.1.tar.gz" - maintainers = ["edwardhartnett", "kgerheiser", "Hang-Lei-NOAA"] + maintainers = ["edwardhartnett", "AlexanderRichert-NOAA", "Hang-Lei-NOAA"] version("2.4.1", sha256="831c5005a480eabe9a8542b4deec838c2650f6966863ea2711cc0cc5db51ca14") diff --git a/var/spack/repos/builtin/packages/lbann/package.py b/var/spack/repos/builtin/packages/lbann/package.py index 69039783ff9..bed40b3f5ed 100644 --- a/var/spack/repos/builtin/packages/lbann/package.py +++ b/var/spack/repos/builtin/packages/lbann/package.py @@ -167,7 +167,8 @@ class Lbann(CMakePackage, CudaPackage, ROCmPackage): depends_on("dihydrogen +cuda", when="+dihydrogen +cuda") depends_on("dihydrogen ~al", when="+dihydrogen ~al") depends_on("dihydrogen +al", when="+dihydrogen +al") - depends_on("dihydrogen +distconv +cuda", when="+distconv") + depends_on("dihydrogen +distconv +cuda", when="+distconv +cuda") + depends_on("dihydrogen +distconv +rocm", when="+distconv +rocm") depends_on("dihydrogen ~half", when="+dihydrogen ~half") depends_on("dihydrogen +half", when="+dihydrogen +half") depends_on("dihydrogen ~nvshmem", when="+dihydrogen ~nvshmem") @@ -191,6 +192,8 @@ class Lbann(CMakePackage, CudaPackage, ROCmPackage): depends_on("aluminum amdgpu_target=%s" % val, when="+al amdgpu_target=%s" % val) depends_on("dihydrogen amdgpu_target=%s" % val, when="+dihydrogen amdgpu_target=%s" % val) + depends_on("roctracer-dev", when="+rocm +distconv") + depends_on("cudnn", when="@0.90:0.100 +cuda") depends_on("cudnn@8.0.2:", when="@:0.90,0.101: +cuda") depends_on("cub", when="@0.94:0.98.2 +cuda ^cuda@:10") @@ -334,6 +337,7 @@ def cmake_args(self): "-DLBANN_WITH_ONNX:BOOL=%s" % ("+onnx" in spec), "-DLBANN_WITH_EMBEDDED_PYTHON:BOOL=%s" % ("+python" in spec), "-DLBANN_WITH_PYTHON_FRONTEND:BOOL=%s" % ("+pfe" in spec), + "-DLBANN_WITH_ROCTRACER:BOOL=%s" % ("+rocm +distconv" in spec), "-DLBANN_WITH_TBINF=OFF", "-DLBANN_WITH_UNIT_TESTING:BOOL=%s" % ("+unit_tests" in spec), "-DLBANN_WITH_VISION:BOOL=%s" % ("+vision" in spec), @@ -424,6 +428,12 @@ def cmake_args(self): "-DHIP_CXX_COMPILER={0}".format(self.spec["hip"].hipcc), ] ) + if "platform=cray" in spec: + args.extend( + [ + "-DMPI_ASSUME_NO_BUILTIN_MPI=ON", + ] + ) archs = self.spec.variants["amdgpu_target"].value if archs != "none": arch_str = ",".join(archs) diff --git a/var/spack/repos/builtin/packages/libbigwig/package.py b/var/spack/repos/builtin/packages/libbigwig/package.py new file mode 100644 index 00000000000..9b6819b7a77 --- /dev/null +++ b/var/spack/repos/builtin/packages/libbigwig/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Libbigwig(CMakePackage): + """A C library for reading/parsing local and remote bigWig and bigBed files.""" + + homepage = "https://github.com/dpryan79/libBigWig" + url = "https://github.com/dpryan79/libBigWig/archive/refs/tags/0.4.7.tar.gz" + maintainers = ["snehring"] + + version("0.4.7", sha256="8e057797011d93fa00e756600898af4fe6ca2d48959236efc9f296abe94916d9") + + variant("curl", default=True, description="Build with curl support") + + depends_on("curl", when="+curl") + + def cmake_args(self): + args = [] + if self.spec.satisfies("~curl"): + args.append("-DWITH_CURL=OFF") + return args diff --git a/var/spack/repos/builtin/packages/libblastrampoline/package.py b/var/spack/repos/builtin/packages/libblastrampoline/package.py index b8638e54b86..54a59458d98 100644 --- a/var/spack/repos/builtin/packages/libblastrampoline/package.py +++ b/var/spack/repos/builtin/packages/libblastrampoline/package.py @@ -15,6 +15,8 @@ class Libblastrampoline(MakefilePackage): maintainers = ["haampie", "giordano"] + version("5.2.0", sha256="5af9ff9cec16087f57109082a362419fc49152034fa90772ebcabd882007fd95") + version("5.1.1", sha256="cb5515512f6653538ce74653e46ccfba58c87b7dcb79b9655f7c3655e65498f0") version("5.1.0", sha256="55ac0c8f9cb91b2ed2db014be8394c9dadf3b5f26bd8af6dca9d6f20ca72b8fd") version("5.0.2", sha256="2e96fa62957719351da3e4dff8cd0949449073708f5564dae0a224a556432356") version("5.0.1", sha256="1066b4d157276e41ca66ca94f0f8c2900c221b49da2df3c410e6f8bf1ce9b488") diff --git a/var/spack/repos/builtin/packages/libcatalyst/package.py b/var/spack/repos/builtin/packages/libcatalyst/package.py index 5bf1fe4f8fa..07f0bf4f1c1 100644 --- a/var/spack/repos/builtin/packages/libcatalyst/package.py +++ b/var/spack/repos/builtin/packages/libcatalyst/package.py @@ -12,9 +12,12 @@ class Libcatalyst(CMakePackage): homepage = "https://gitlab.kitware.com/paraview/catalyst" git = "https://gitlab.kitware.com/paraview/catalyst.git" + url = "https://gitlab.kitware.com/api/v4/projects/paraview%2Fcatalyst/packages/generic/catalyst/v2.0.0/catalyst-v2.0.0.tar.gz" maintainers = ["mathstuf"] + version("2.0.0-rc3", sha256="8862bd0a4d0be2176b4272f9affda1ea4e5092087acbb99a2fe2621c33834e05") + # master as of 2021-05-12 version("2021-05-12", commit="8456ccd6015142b5a7705f79471361d4f5644fa7") diff --git a/var/spack/repos/builtin/packages/libcroco/package.py b/var/spack/repos/builtin/packages/libcroco/package.py index 2f1a5f07c23..56230050aa7 100644 --- a/var/spack/repos/builtin/packages/libcroco/package.py +++ b/var/spack/repos/builtin/packages/libcroco/package.py @@ -15,16 +15,18 @@ class Libcroco(AutotoolsPackage): version("0.6.13", sha256="767ec234ae7aa684695b3a735548224888132e063f92db585759b422570621d4") version("0.6.12", sha256="ddc4b5546c9fb4280a5017e2707fbd4839034ed1aba5b7d4372212f34f84f860") - variant("doc", default=False, description="Build documentation with gtk-doc") + # libcroco has a --enable-gtk-doc configure flag that appears to be + # ignored as of version 0.6.13. Until that flag is honored, the +doc + # variant is a no-op + # variant("doc", default=False, + # description="Build documentation with gtk-doc") depends_on("glib") depends_on("libxml2") - depends_on("gtk-doc", type="build", when="+doc") + depends_on("gtk-doc", type="build") depends_on("pkgconfig", type="build") def configure_args(self): - args = ["--enable-gtk-doc=" + ("yes" if self.spec.variants["doc"].value else "no")] # macOS ld does not support this flag # https://github.com/Homebrew/homebrew-core/blob/HEAD/Formula/libcroco.rb - args.append("--disable-Bsymbolic") - return args + return ["--disable-Bsymbolic"] diff --git a/var/spack/repos/builtin/packages/libdistributed/package.py b/var/spack/repos/builtin/packages/libdistributed/package.py new file mode 100644 index 00000000000..3d442b1c608 --- /dev/null +++ b/var/spack/repos/builtin/packages/libdistributed/package.py @@ -0,0 +1,50 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Libdistributed(CMakePackage): + """a collection of facilities for MPI that create for higher level + facilities for programming in C++""" + + homepage = "https://github.com/robertu94/libdistributed" + url = "https://github.com/robertu94/libdistributed/archive/0.0.3.tar.gz" + git = "https://github.com/robertu94/libdistributed" + + maintainers = ["robertu94"] + + version("master", branch="master") + version("0.4.0", sha256="7895d268c4f9b5444e4378f60b5a28198720bc48633d0e5d072c39e3366b096c") + version("0.3.0", sha256="57443c72a5a9aa57d7f8760c878a77dcffca0b3b5ccf5124cdf5c1fad8a44ae8") + version("0.2.0", sha256="4540136d39f98a21c59a7e127cb0568266747bfff886edf0f0007be4959a09a3") + version("0.1.2", sha256="c22f93e6ea781ea33812481b19c7bc5e688e51f91991debc7f27a493ef2c78b3") + version("0.1.1", sha256="a5201fd754588034e2c87a21f0283dd9fda758d7820450179eabd68b3dae8cb6") + version("0.1.0", sha256="e10daa6d4a6dc371057e92d2b706ae16450b41ed7c0d386cffeb68e160f556c1") + version("0.0.10", sha256="3af4ce81b3ae016e80e401adfcfad856e15a76da4d2a81535cb4bd993c11104b") + version("0.0.8", sha256="78bc1fbc99e46ea0e03cb181623262be0f527767efd3249baa249cb24b794762") + version("0.0.7", sha256="b2c65752df7bc55fcdc9a5eb7b36c203667f2fb6382d3eaecdaf1504421d4d7b") + version("0.0.6", sha256="05ce6ae880aec19f6945ee5f3c2f4099343ca6b555ea6c8e005a48a6e09faf5b") + version("0.0.5", sha256="09c1e9a0b34371fa8e6f3d50671bcce7fcc3e4c7c26f3e19017b07b64695d199") + version("0.0.4", sha256="7813980011091822534d196d372b8cb6fdc12d35acd5acb42c6eeeaf10a44490") + version("0.0.3", sha256="c476b3efe20e1af4c976e89ff81b7eed01ddddae73ac66f005108747facbeff7") + version("0.0.2", sha256="c25309108fe17021fd5f06ba98386210708158c439e98326e68f66c42875e58a") + version("0.0.1", sha256="4c23ce0fd70a12ee5f8760ea00377ab6370d86b30ab42476e07453b19ea4ac44") + + depends_on("mpi@2:") + depends_on("libstdcompat@0.0.2:", when="@0.1.0:") + + def cmake_args(self): + args = [] + if self.run_tests: + args.append("-DBUILD_TESTING=ON") + else: + args.append("-DBUILD_TESTING=OFF") + return args + + @run_after("build") + @on_package_attributes(run_tests=True) + def test(self): + make("test") diff --git a/var/spack/repos/builtin/packages/libgcrypt/package.py b/var/spack/repos/builtin/packages/libgcrypt/package.py index e7798429f6e..1b945c87c33 100644 --- a/var/spack/repos/builtin/packages/libgcrypt/package.py +++ b/var/spack/repos/builtin/packages/libgcrypt/package.py @@ -20,6 +20,7 @@ class Libgcrypt(AutotoolsPackage): version("1.9.3", sha256="97ebe4f94e2f7e35b752194ce15a0f3c66324e0ff6af26659bbfb5ff2ec328fd") version("1.9.2", sha256="b2c10d091513b271e47177274607b1ffba3d95b188bbfa8797f948aec9053c5a") version("1.9.1", sha256="c5a67a8b9b2bd370fb415ed1ee31c7172e5683076493cf4a3678a0fbdf0265d9") + version("1.8.9", sha256="2bda4790aa5f0895d3407cf7bf6bd7727fd992f25a45a63d92fef10767fa3769") version("1.8.7", sha256="03b70f028299561b7034b8966d7dd77ef16ed139c43440925fe8782561974748") version("1.8.6", sha256="0cba2700617b99fc33864a0c16b1fa7fdf9781d9ed3509f5d767178e5fd7b975") version("1.8.5", sha256="3b4a2a94cb637eff5bdebbcaf46f4d95c4f25206f459809339cdada0eb577ac3") diff --git a/var/spack/repos/builtin/packages/libgit2/package.py b/var/spack/repos/builtin/packages/libgit2/package.py index d16090f0f94..2c6f5d1cdd2 100644 --- a/var/spack/repos/builtin/packages/libgit2/package.py +++ b/var/spack/repos/builtin/packages/libgit2/package.py @@ -18,6 +18,8 @@ class Libgit2(CMakePackage): maintainers = ["AndrewGaspar"] + version("1.5.0", sha256="8de872a0f201b33d9522b817c92e14edb4efad18dae95cf156cf240b2efff93e") + version("1.4.4", sha256="e9923e9916a32f54c661d55d79c28fa304cb23617639e68bff9f94d3e18f2d4b") version("1.4.3", sha256="f48b961e463a9e4e7e7e58b21a0fb5a9b2a1d24d9ba4d15870a0c9b8ad965163") version("1.4.2", sha256="901c2b4492976b86477569502a41c31b274b69adc177149c02099ea88404ef19") version("1.4.1", sha256="fccd371a271133e29d002dd207490d22a0c9b06992b874b8edb8366532a94f54") diff --git a/var/spack/repos/builtin/packages/libgpg-error/package.py b/var/spack/repos/builtin/packages/libgpg-error/package.py index acdc73f8a71..8b6924492f9 100644 --- a/var/spack/repos/builtin/packages/libgpg-error/package.py +++ b/var/spack/repos/builtin/packages/libgpg-error/package.py @@ -14,6 +14,7 @@ class LibgpgError(AutotoolsPackage): maintainers = ["alalazo"] + version("1.46", sha256="b7e11a64246bbe5ef37748de43b245abd72cfcd53c9ae5e7fc5ca59f1c81268d") version("1.45", sha256="570f8ee4fb4bff7b7495cff920c275002aea2147e9a1d220c068213267f80a26") version("1.44", sha256="8e3d2da7a8b9a104dd8e9212ebe8e0daf86aa838cc1314ba6bc4de8f2d8a1ff9") version("1.43", sha256="a9ab83ca7acc442a5bd846a75b920285ff79bdb4e3d34aa382be88ed2c3aebaf") @@ -31,8 +32,11 @@ class LibgpgError(AutotoolsPackage): patch("awk-5.patch", when="@1.36^gawk@5:") def configure_args(self): - return [ + args = [ "--enable-static", "--enable-shared", "--enable-tests" if self.run_tests else "--disable-tests", ] + if self.spec.satisfies("@1.46:"): + args.append("--enable-install-gpg-error-config") + return args diff --git a/var/spack/repos/builtin/packages/libksba/package.py b/var/spack/repos/builtin/packages/libksba/package.py index 9cb7d40c94f..8369102f316 100644 --- a/var/spack/repos/builtin/packages/libksba/package.py +++ b/var/spack/repos/builtin/packages/libksba/package.py @@ -17,6 +17,7 @@ class Libksba(AutotoolsPackage): maintainers = ["alalazo"] + version("1.6.2", sha256="fce01ccac59812bddadffacff017dac2e4762bdb6ebc6ffe06f6ed4f6192c971") version("1.6.0", sha256="dad683e6f2d915d880aa4bed5cea9a115690b8935b78a1bbe01669189307a48b") version("1.5.1", sha256="b0f4c65e4e447d9a2349f6b8c0e77a28be9531e4548ba02c545d1f46dc7bf921") version("1.5.0", sha256="ae4af129216b2d7fdea0b5bf2a788cd458a79c983bb09a43f4d525cc87aba0ba") diff --git a/var/spack/repos/builtin/packages/libpfm4/package.py b/var/spack/repos/builtin/packages/libpfm4/package.py index 120d2da01f8..9d0c07b63c6 100644 --- a/var/spack/repos/builtin/packages/libpfm4/package.py +++ b/var/spack/repos/builtin/packages/libpfm4/package.py @@ -12,9 +12,10 @@ class Libpfm4(MakefilePackage): the perf_events Linux kernel interface.""" homepage = "http://perfmon2.sourceforge.net" - url = "https://downloads.sourceforge.net/project/perfmon2/libpfm4/libpfm-4.8.0.tar.gz" + url = "https://downloads.sourceforge.net/project/perfmon2/libpfm4/libpfm-4.12.0.tar.gz" maintainers = ["mwkrentel"] + version("4.12.0", sha256="4b0c1f53f39a61525b69bebf532c68040c1b984d7544a8ae0844b13cd91e1ee4") version("4.11.0", sha256="5da5f8872bde14b3634c9688d980f68bda28b510268723cc12973eedbab9fecc") version("4.10.1", sha256="c61c575378b5c17ccfc5806761e4038828610de76e2e34fac9f7fa73ba844b49") version("4.9.0", sha256="db0fbe8ee28fd9beeb5d3e80b7cb3b104debcf6a9fcf5cb8b882f0662c79e4e2") diff --git a/var/spack/repos/builtin/packages/libpressio-adios2/package.py b/var/spack/repos/builtin/packages/libpressio-adios2/package.py new file mode 100644 index 00000000000..dbd06a79738 --- /dev/null +++ b/var/spack/repos/builtin/packages/libpressio-adios2/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class LibpressioAdios2(CMakePackage): + """An IO plugin to read/write ADIOS2 files for LibPressio""" + + homepage = "https://github.com/robertu94/libpressio_adios2" + url = "https://github.com/robertu94/libpressio_adios2/archive/refs/tags/0.0.1.tar.gz" + + maintainers = ["robertu94"] + + version("0.0.2", sha256="8ab4b5a0dd8038d52f54aa9b5a67b83a8f7cd096db4c5a413fe0c6caf678e402") + version("0.0.1", sha256="ab9c7e26114e8d81f8ad8aca703855079cd3441f9b72e01d9b4aeb0c57ce0746") + + depends_on("libpressio@0.85.0:+mpi", when="@0.0.2") + depends_on("libpressio@0.60.0:+mpi") + depends_on("adios2@2.8.0:+mpi") + + def cmake_args(self): + args = [ + self.define("BUILD_TESTING", self.run_tests), + self.define("LIBPRESSIO_ADIOS2_WERROR", False), + ] + return args diff --git a/var/spack/repos/builtin/packages/libpressio-errorinjector/package.py b/var/spack/repos/builtin/packages/libpressio-errorinjector/package.py new file mode 100644 index 00000000000..ce75a977ac6 --- /dev/null +++ b/var/spack/repos/builtin/packages/libpressio-errorinjector/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class LibpressioErrorinjector(CMakePackage): + """LibPressioErrorInjector injects errors into data for sensitivity studies""" + + homepage = "https://github.com/robertu94/libpressio-errorinjector" + git = "https://github.com/robertu94/libpressio-errorinjector" + + maintainers = ["robertu94"] + + version("0.8.0", commit="0bfac9a06b1ae34a872b8b599dd4ccb46aa2db4e") + version("0.7.0", commit="0b5a5b15121be248a3e5af925f9ad88b3d43fef6") + + depends_on("libpressio@0.88.0:", when="@0.8.0:") + depends_on("libpressio@:0.87.0", when="@:0.7.0") + + def cmake_args(self): + args = ["-DBUILD_TESTING=OFF"] + return args diff --git a/var/spack/repos/builtin/packages/libpressio-nvcomp/package.py b/var/spack/repos/builtin/packages/libpressio-nvcomp/package.py new file mode 100644 index 00000000000..7688b3c5017 --- /dev/null +++ b/var/spack/repos/builtin/packages/libpressio-nvcomp/package.py @@ -0,0 +1,30 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class LibpressioNvcomp(CMakePackage, CudaPackage): + """LibPressio Bindings for NVCOMP""" + + homepage = "https://github.com/robertu94/libpressio-nvcomp" + url = "https://github.com/robertu94/libpressio-nvcomp/archive/refs/tags/0.0.3.tar.gz" + git = "https://github.com/robertu94/libpressio-nvcomp" + + maintainers = ["robertu94"] + + version("0.0.3", sha256="21409d34f9281bfd7b83b74f5f8fc6d34794f3161391405538c060fb59534597") + version("0.0.2", commit="38d7aa7c283681cbe5b7f17b900f72f9f25be51c") + + depends_on("nvcomp@2.2.0:", when="@0.0.3:") + depends_on("libpressio@0.88.0:", when="@:0.0.2") + + conflicts("~cuda") + conflicts("cuda_arch=none", when="+cuda") + + def cmake_args(self): + cuda_arch = self.spec.variants["cuda_arch"].value + args = [("-DCMAKE_CUDA_ARCHITECTURES=%s" % cuda_arch)] + return args diff --git a/var/spack/repos/builtin/packages/libpressio-opt/package.py b/var/spack/repos/builtin/packages/libpressio-opt/package.py new file mode 100644 index 00000000000..4c988b63723 --- /dev/null +++ b/var/spack/repos/builtin/packages/libpressio-opt/package.py @@ -0,0 +1,49 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class LibpressioOpt(CMakePackage): + """Metacompressor which preforms optimization of compressor settings for LibPressio""" + + homepage = "https://github.com/robertu94/libpressio_opt" + git = "git@github.com:robertu94/libpressio_opt" + url = "https://github.com/robertu94/libpressio_opt/archive/refs/tags/0.11.0.tar.gz" + + maintainers = ["robertu94"] + + version("develop", branch="develop") + version("sdr-develop", branch="develop", git="git@github.com:szcompressor/SDRFramework") + version("0.13.5", sha256="cc0e6a46335aa3552b8ab57757d39855f4fba71e661f706ec99519cb2c8a2f3c") + version("0.13.4", sha256="e9f715d11fe3558a31e1d9a939150209449ec8636ded047cb0adcd3db07569ae") + version("0.13.3", sha256="98436b7fa6a53dd9cc09a9b978dc81c299501930cb8b844713080fc42d39d173") + version("0.13.2", sha256="8a16ba23b5078b0ee3a75d8a64ba64b492ecfadc221dd28ae463f4d3f4f7d847") + version("0.13.1", sha256="a831d326871c183a7e64b2015d687da3f17cf89c2d7d1d6770e3acbc1346aa8c") + version("0.13.0", sha256="6a64116dd6727e2dc05840b0e804fcaf82debde09c69e4905197462a769e998e") + version("0.12.1", sha256="e5d0b4d8b4885dfe555148e23f34e0bc904a898871dea8d412265075f1f8c858") + version("0.12.0", sha256="5f28f37de858634cf481d911f202360f078902803f82b5f49b7eec9b59948d64") + version("0.11.0", sha256="cebbc512fcaa537d2af1a6919d6e0400cdc13595d71d9b90b74ad3eb865c9767") + + depends_on("libpressio+libdistributed+mpi") + depends_on("libpressio@0.88.0:", when="@0.13.5:") + depends_on("libpressio@0.85.0:", when="@0.13.3:") + depends_on("libpressio@0.66.1:", when="@:0.13.2") + depends_on("libdistributed@0.0.11:") + depends_on("libdistributed@0.4.0:", when="@0.13.3:") + depends_on("dlib@19.22:") + + def cmake_args(self): + args = [] + if self.run_tests: + args.append("-DBUILD_TESTING=ON") + else: + args.append("-DBUILD_TESTING=OFF") + return args + + @run_after("build") + @on_package_attributes(run_tests=True) + def test(self): + make("test") diff --git a/var/spack/repos/builtin/packages/libpressio-rmetric/package.py b/var/spack/repos/builtin/packages/libpressio-rmetric/package.py new file mode 100644 index 00000000000..4a916904e58 --- /dev/null +++ b/var/spack/repos/builtin/packages/libpressio-rmetric/package.py @@ -0,0 +1,43 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class LibpressioRmetric(CMakePackage): + """LibPressio metric that runs R code""" + + url = "https://github.com/robertu94/libpressio-rmetric/archive/refs/tags/0.0.2.tar.gz" + git = "https://github.com/robertu94/libpressio-rmetric" + homepage = git + + maintainers = ["robertu94"] + + version("master", branch="master") + # note versions <= 0.0.3 do not build with spack + version("0.0.6", sha256="b23a79448cd32b51a7301d6cebf4e228289712dd77dd76d86821741467e9af46") + version("0.0.5", sha256="51eb192314ef083790dd0779864cab527845bd8de699b3a33cd065c248eae24c") + version("0.0.4", sha256="166af5e84d7156c828a3f0dcc5bf531793ea4ec44bbf468184fbab96e1f0a91f") + version("0.0.3", sha256="c45948f83854c87748c7ec828ca2f06d7cf6f98a34f763b68c13a4e2deb7fd79") + + depends_on("libpressio@0.88.0:", when="@0.0.5:") + depends_on("libpressio@0.85.0:", when="@:0.0.4") + depends_on("r") + depends_on("r-rcpp") + depends_on("r-rinside") + + def cmake_args(self): + args = [] + if self.run_tests: + args.append("-DBUILD_TESTING=ON") + else: + args.append("-DBUILD_TESTING=OFF") + + return args + + @run_after("build") + @on_package_attributes(run_tests=True) + def test(self): + make("test") diff --git a/var/spack/repos/builtin/packages/libpressio-sperr/package.py b/var/spack/repos/builtin/packages/libpressio-sperr/package.py new file mode 100644 index 00000000000..36a01c7531b --- /dev/null +++ b/var/spack/repos/builtin/packages/libpressio-sperr/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class LibpressioSperr(CMakePackage): + """A LibPressio plugin for Sperr""" + + homepage = "https://github.com/robertu94/libpressio-sperr" + url = "https://github.com/robertu94/libpressio-sperr/archive/refs/tags/0.0.1.tar.gz" + git = homepage + + maintainers = ["robertu94"] + + depends_on("libpressio@0.88.0:", when="@0.0.3:") + depends_on("libpressio@:0.88.0", when="@:0.0.2") + depends_on("sperr") + depends_on("pkgconfig", type="build") + + version("master", branch="master") + version("0.0.2", sha256="61995d687f9e7e798e17ec7238d19d917890dc0ff5dec18293b840c4d6f8c115") + version("0.0.1", sha256="e2c164822708624b97654046b42abff704594cba6537d6d0646d485bdf2d03ca") diff --git a/var/spack/repos/builtin/packages/libpressio-tools/package.py b/var/spack/repos/builtin/packages/libpressio-tools/package.py new file mode 100644 index 00000000000..bda9019fd48 --- /dev/null +++ b/var/spack/repos/builtin/packages/libpressio-tools/package.py @@ -0,0 +1,100 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class LibpressioTools(CMakePackage): + """General Utilities for LibPressio""" + + homepage = "https://github.com/robertu94/pressio-tools" + url = "https://github.com/robertu94/pressio-tools/archive/refs/tags/0.0.15.tar.gz" + git = "https://github.com/robertu94/pressio-tools" + + maintainers = ["robertu94"] + + version("master", branch="master") + version("0.1.6", sha256="a67a364f46dea29ff1b3e5c52c0a5abf2d9d53412fb8d424f6bd71252bfa7792") + version("0.1.5", sha256="b35f495fae53df87dd2abf58c0c51ed17710b16aaa2d0842a543fddd3b2a8035") + version("0.1.4", sha256="39adc4b09a63548a416ee4b1dcc87ec8578b15a176a11a2845c276c6c211f2d0") + version("0.1.3", sha256="4e6c39061d6d829936dfeb569ea997854694ef1a46f112e306672ee1cc1567a0") + version("0.1.2", sha256="a3379fd7c53c2eb0b5cdbf0e7eed37ae2a415f737885310d3da4d34fa55c618e") + version("0.1.1", sha256="adec3ea9a12677c647fbc3a1f9909fde6f2dd5ed662ed0ee5cd753b26397643e") + version("0.1.0", sha256="e016b1785f2dc5c8a8565ff3d7b50980788e057e61905a91ef1d16da73297a06") + version("0.0.24", sha256="b369efcc17f339fdd5741d817f1b7908bd2b5df5686c5406c6b1123b0daa82c5") + version("0.0.23", sha256="08a141be14e63e491216a89d45737040fc3450c5b793e6a4819cd06f876b2b0b") + version("0.0.22", sha256="9fcb20a3bf24e139386e94b413f10087d65ed32d2eb93cc7be8e87d736da9766") + version("0.0.21", sha256="2ffe1018ff86eca0928ab8bbf568b2cf7ab739f0e191e2722a6f5071dac4a153") + version("0.0.20", sha256="cad3a1dff25ae1dc442821e72fe8f7495e098bd0ea52c3beeac1ceb721c60351") + version("0.0.19", sha256="cc8a4bb5259b7b8e14248a1a741067a865a0db36645c878d346da983e74c9521") + version("0.0.18", sha256="766fcf6c4bd475de66107d379c76805d6368d71ee83cade645f2b7cd27801718") + version("0.0.17", sha256="cf76e8a929aa128d09f8f953171d5cf395223245bc81d2ea4e22099849e40b94") + version("0.0.16", sha256="1299e441fb15666d1c8abfd40f3f52b1bf55b6bfda4bfcc71177eec37160a95e") + version("0.0.15", sha256="bcdf865d77969a34e2d747034ceeccf5cb766a4c11bcc856630d837f442ee33e") + + depends_on("mpi", when="+mpi") + depends_on("libpressio+libdistributed+mpi", when="+mpi") + depends_on("libpressio", when="~mpi") + depends_on("libpressio+hdf5", when="+hdf5") + + depends_on("boost") + + # 0.1.0 changed a bunch of things in the build system, make sure everything is up to date + depends_on("libpressio@0.88.0:", when="@0.1.6:") + depends_on("libpressio@0.85.0:", when="@0.1.0:0.1.5") + depends_on("libpressio-opt@0.13.3:", when="@0.1.0:+opt") + depends_on("libpressio-errorinjector@0.7.0:", when="@0.1.0:+error_injector") + depends_on("libpressio-tthresh@0.0.5:", when="@0.1.0:+tthresh") + depends_on("libpressio-rmetric@0.0.4:", when="@0.1.0:+rcpp") + depends_on("libpressio-adios2@0.0.2", when="@0.1.0:+adios2") + + depends_on("libpressio-opt", when="+opt") + depends_on("libpressio-errorinjector", when="+error_injector") + depends_on("libpressio-tthresh", when="+tthresh") + depends_on("libpressio-rmetric", when="+rcpp") + depends_on("libpressio-adios2", when="+adios2") + depends_on("libpressio-sperr", when="+sperr") + depends_on("libpressio-nvcomp", when="+nvcomp") + + variant("hdf5", default=True, description="support the hdf5 package") + variant("opt", default=False, description="support the libpressio-opt package") + variant( + "error_injector", default=False, description="support the libpressio-errorinjector package" + ) + variant("tthresh", default=False, description="depend on the GPL licensed libpressio-tthresh") + variant("rcpp", default=False, description="depend on the GPL licensed libpressio-rmetric") + variant("mpi", default=False, description="depend on MPI for distributed parallelism") + variant("adios2", default=False, description="depend on ADIOS2 for IO modules") + variant("sperr", default=False, description="depend on sperr", when="@0.1.2:") + variant("nvcomp", default=False, description="depend on nvcomp", when="@0.1.0:") + conflicts("+opt", "~mpi") + + def cmake_args(self): + args = [] + if "+mpi" in self.spec: + args.append("-DLIBPRESSIO_TOOLS_HAS_MPI=YES") + if "+opt" in self.spec: + args.append("-DLIBPRESSIO_TOOLS_HAS_OPT=YES") + if "+error_injector" in self.spec: + args.append("-DLIBPRESSIO_TOOLS_HAS_ERROR_INJECTOR=YES") + if "+tthresh" in self.spec: + args.append("-DLIBPRESSIO_TOOLS_HAS_TTHRESH=YES") + if "+rcpp" in self.spec: + args.append("-DLIBPRESSIO_TOOLS_HAS_RMETRIC=YES") + if "+sperr" in self.spec: + args.append("-DLIBPRESSIO_TOOLS_HAS_SPERR=YES") + if "+nvcomp" in self.spec: + args.append("-DLIBPRESSIO_TOOLS_HAS_NVCOMP=YES") + if self.run_tests: + args.append("-DBUILD_TESTING=ON") + else: + args.append("-DBUILD_TESTING=OFF") + + return args + + @run_after("build") + @on_package_attributes(run_tests=True) + def test(self): + make("test") diff --git a/var/spack/repos/builtin/packages/libpressio-tthresh/package.py b/var/spack/repos/builtin/packages/libpressio-tthresh/package.py new file mode 100644 index 00000000000..d94019328d3 --- /dev/null +++ b/var/spack/repos/builtin/packages/libpressio-tthresh/package.py @@ -0,0 +1,39 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class LibpressioTthresh(CMakePackage): + """A tthresh implementation for libpressio""" + + homepage = "https://github.com/robertu94/libpressio_tthresh" + url = "https://github.com/robertu94/libpressio_tthresh/archive/refs/tags/0.0.1.tar.gz" + git = homepage + + maintainers = ["robertu94"] + + version("main", branch="main") + version("0.0.6", sha256="e9dc4754421d892a86516c6bb892f6ff582e9ea3c242c1c052104e4f6944cbec") + version("0.0.5", sha256="af47c90e9c16825312e390a7fb30d9d128847afb69ad6c2f6608bd80f60bae23") + version("0.0.3", sha256="b0b0a4876d3362deafc2bb326be33882132e3d1666e0c5f916fd6fad74a18688") + version("0.0.1", sha256="9efcfa97a5a81e9c456f50b712adb806d9d2f2ed6039860615df0f2e9d96569e") + + depends_on("eigen") + depends_on("libpressio@0.85.0:", when="@:0.0.5") + depends_on("libpressio@0.88.0:", when="@0.0.6:") + + def cmake_args(self): + args = [] + if self.run_tests: + args.append("-DBUILD_TESTING=ON") + else: + args.append("-DBUILD_TESTING=OFF") + return args + + @run_after("build") + @on_package_attributes(run_tests=True) + def test(self): + make("test") diff --git a/var/spack/repos/builtin/packages/libpressio/package.py b/var/spack/repos/builtin/packages/libpressio/package.py new file mode 100644 index 00000000000..2a8a8521ba9 --- /dev/null +++ b/var/spack/repos/builtin/packages/libpressio/package.py @@ -0,0 +1,331 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Libpressio(CMakePackage, CudaPackage): + """A generic abstraction for the compression of dense tensors""" + + # codarcode gets "stable" releases ~1/yr; robertu94 contains development versions + homepage = "https://github.com/codarcode/libpressio" + url = "https://github.com/robertu94/libpressio/archive/0.31.1.tar.gz" + git = "https://github.com/robertu94/libpressio" + + version("master", branch="master") + version("develop", branch="develop") + version("0.88.0", sha256="4358441f0d10559d571327162a216617d16d09569a80e13ad286e3b7c41c5b9b") + version("0.87.0", sha256="2bea685e5ed3a1528ea68ba4a281902ff77c0bebd38ff212b6e8edbfa263b572") + version("0.86.7", sha256="2a6319640a018c39aa93aaf0f027fd496d7ea7dc5ac95509313cf1b4b6b1fb00") + version("0.86.6", sha256="31ac77137c31a524c2086e1fe4d9b1d3c1bc6d8594662cd4b67878ba8887cabb") + version("0.86.5", sha256="6e6ffe7585e298061f6f5ff79a9fe7edf722a8c124a87282bae864ed6a167246") + version("0.86.4", sha256="52a1d932b30a9390e836ea4b102225b176f8feebbac598a0ab3a81a9ac83775c") + version("0.86.3", sha256="6b010e394fc916ad2233e941a49f70555dda40521e3668f2e10502c7bfa529be") + version("0.86.2", sha256="e221cb256e1b387ce1245cab5704c10d351812c003b257655d43b156b9650a89") + version("0.86.1", sha256="89b1b652215f67635da1baac81d3f927ff00f335c473322edcf24472b5a9b5a4") + version("0.86.0", sha256="867bd6ea6b632b7f6d6a89aac073cea738b574825c81ee83318802e9d3d5fbe8") + version("0.85.0", sha256="79a600fdd5c7a418a0380425e1bbeb245d5d86e1676f251e5900b69738b72423") + version("0.84.3", sha256="7b2ca198f919c1f981c88722da33ef69b564fe123d49330ad6ba17eba80c046e") + version("0.84.2", sha256="c50b599a22ab89b7ef57dbaa717f5e97f4437d2bd4b6e572274c8c98022b05da") + version("0.84.0", sha256="b22320a54dbb9f65a66af2a6f335884e7ba48abd3effe643e51e4e7cfe793b7d") + version("0.83.4", sha256="9dd0efff1c6121e964b45371d6a52895f6a8db3d5cdabbd1e951b696a3f590e3") + version("0.83.3", sha256="59e2bb2c1eb422c03204bfc713bc76d7bbaeaeba6430e1204577495c07eef34d") + version("0.83.2", sha256="56dd63cb3924fb57f8f53929faecf2a5211985f160cdacf38b3d001e22728427") + version("0.83.1", sha256="2afdc8421b4c0f638c8547bcdd54bdb405d1717dca32b804621c5c152adbe2a6") + version("0.83.0", sha256="7c692bbf3ebdfa508a493902eb561c85b9087dd8003547dcd54baf0b2188d9bd") + version("0.82.3", sha256="97a6a0a022d8ae60f477ce21d1ff10cc47bb2f7d3891bb3b49f4a7b166f9c2e1") + version("0.82.2", sha256="ce2d566c627a5341e1fd58261b2d38567b84d963f1045e2e4aac87e67ac06d89") + version("0.82.1", sha256="f6b41ad6f56311078e67e68063f9124f32e63a9c1c9c0c0289c75addaf9fed94") + version("0.82.0", sha256="e60f843dda8312ae4269c3ee23aad67b50f29a8830c84fb6c10309c37e442410") + version("0.81.0", sha256="51ab443a42895fefb4e0ae8eb841402f01a340f3dd30dcb372f837e36ac65070") + version("0.80.1", sha256="9168789f8714d0bbce1a03ff3a41ef24c203f807fed1fbd5ca050798ebef015f") + version("0.80.0", sha256="f93292dc258224a8ef69f33299a5deecfb45e7ea530575eeaa4ceff48093d20e") + version("0.79.0", sha256="e843d8f70369e30d0135b513926ac4a5dacd3042c307c132e80a29b7349e8501") + version("0.78.0", sha256="d9292150686d2be616cd9145c24fe6fc12374df023eee14099ffdf7071e87044") + version("0.77.0", sha256="d2f362c8b48b6ea6b3a099f3dcb0ce844e3b45fd6cf0c4130fbbf48d54d1a9b3") + version("0.76.1", sha256="09b6926efefa1b10f400dfc94927c195d1f266f34ed34cddeba11707c0cc6982") + version("0.76.0", sha256="8ec0e3bcc57511a426047748f649096cf899a07767ddbcdbfad28500e1190810") + version("0.75.1", sha256="8b9beb79507196575649d32116d13833e7dc9765370c245ac5a3640a50cb106a") + version("0.75.0", sha256="83aadd5e6172b3654b955954d13f2d9346fcd008bc901746f6f8b65a978235ee") + version("0.74.1", sha256="aab7211c244a7a640e0b2d12346463c8650ef4f8d48fc58819a20d3b27ab5f81") + version("0.74.0", sha256="2fbd54bbc4d1f3ce4b107ac625ad97c6396bff8873f2ac51dd049d93aa3f2276") + version("0.73.0", sha256="059c90ab50d2e50a1fff8bf25c0c387a9274090bf8657fa49aa1c211b4690491") + version("0.72.2", sha256="1f620b8af272dd2823712c1e38a69c6375febe49eb9155a3f04667ea1931ebdb") + version("0.72.1", sha256="f8ab9559c40a6a93ad0c1a894acf71e07c9fe1994f464852c9dd6f0423a6dc51") + version("0.72.0", sha256="0e6e7327a21a0cd6cf56fa4c62ba5ec1c41381ac053602d8acaa854bdfd1cb30") + version("0.71.3", sha256="f1185acdc6143fe7e417754032336ef50fec5760b08cb291962305429adf18da") + version("0.71.2", sha256="0501f6a0a9cfad62f80834d1dd77c678b000202903168aec0d2c4928ff6e581c") + version("0.71.1", sha256="cd9daa4b28da3b5e3cb36cace11b4e580a66fb14ca04a807c5a135a9448bb5df") + version("0.71.0", sha256="9b9ba9689c53e9cfa4d9fee52653ed393d2307c437dac41daceb6f98564fbcd1") + version("0.70.8", sha256="f0600cabd0341669ef1d6e838ef3496cff5200239a3b96a4941c434d71e4517c") + version("0.70.7", sha256="82722a9e7fbec3b2d79be226ba73bbf3108d3206d006a763db46d20cc044a8b5") + version("0.70.6", sha256="e76be47b0b8bd18d7ac44d59242adc45dc721465638aefd2c8564fd778d1adbd") + version("0.70.5", sha256="c6ee62643c08a7ceca7c45eb28edff0eeb070671bf0d502563b6cc8ada4bf695") + version("0.70.4", sha256="6df62154d0a8919fa91f6fce4ffb2f77584d5ddc61c85eee34557d36de9906b2") + version("0.70.3", sha256="40cca7f6d3bd19fdcf6f6c17521acdf63dfda0fb5b173c23d4521818b16a9a46") + version("0.70.2", sha256="30929e02c0ce5db8d9ff1eeca42df92e68439c7dd5a3c1fea0bb44ead2343442") + version("0.70.1", sha256="855923ca58b1c549681d368d2112d05b96fae9e3199f2a10c2013fcb2f630036") + version("0.70.0", sha256="1e987dcea76b2bd01f7e59b404267c7614a7c99b3fbc0ae745bf8e9426f489c6") + version("0.69.0", sha256="22e47deb4791778846b9c858295b756f91e1d8c884ccf246c2df2bf9b56a04d5") + version("0.68.0", sha256="c7008e6f6b4451812070ece7e9b2fb6cc2fb04971255f95c8274375a698c6794") + version("0.66.3", sha256="7423339831525a128115d446b1dd7fb7942f2aed24e0ec3778396d2c0c379678") + version("0.66.2", sha256="89a6459b6fcf1273f8afc7317e7351c09be977aeb3bb6554941166074ee2030f") + version("0.66.1", sha256="1de2d3d911fc91f7aa9f57eda467f1aadd7060a680538b82c678a5f4e7e6c5d0") + version("0.66.0", sha256="c3063a85c8f17df6ba1722f06eaab74fe14a53f37f5a86664c23a7f35d943f3a") + version("0.65.0", sha256="beb4f7bc73b746fe68c4333fa4d4e1dba05f5f5fb386874b83cbf7f105e83c45") + version("0.64.0", sha256="1af87b410eabee7f377b047049eae486cf3161fa67546789440f1d1e56e2324d") + version("0.63.0", sha256="32d716f52073d7ea246d01fefb420bfe5b834ebc10579edd79ebce7a87dd1a81") + version("0.62.0", sha256="248eedc764312da401aa29304275e009196ebdb5b08594a1522bb165c16874aa") + version("0.61.0", sha256="7b4304b7556d8ec0742d1b8a9280f7f788307d2a6f4d2f59cc8e8358b6c69c11") + version("0.60.0", sha256="a57fce96d50a603075a8a4a583431a1a03170df4d2894ff30f84d8c5ab4caf47") + version("0.59.0", sha256="eae5933a7b37834cf4f70424b083f99799f9381ee8bb616f3a01d4ab2e5631a6") + version("0.58.0", sha256="6b092dda66e7cc1bc4842fe54ab41248c4f136307cc955081e8052222c82aff1") + version("0.57.0", sha256="4f978616c13f311170fdc992610ad1fd727884cf0d20b6849b2c985d936c482b") + version("0.56.2", sha256="1ae20415ba50a4dcfec7992e9a571f09f075f077ebdd7c1afb9a19b158f6205d") + version("0.56.1", sha256="01b7c09f1eafff819de0079baf033f75547432be62dc10cb96691d078994a4e9") + version("0.56.0", sha256="77003c9dde0590ca37fddfbe380b29b9f897fa0dadb9b9f953819f5e9d9f08f0") + version("0.55.3", sha256="f8c6ae6ae48c4d38a82691d7de219ebf0e3f9ca38ae6ba31a64181bfd8a8c50a") + version("0.55.2", sha256="47f25f27f4bff22fd32825d5a1135522e61f9505758dde3d093cfbdaff0b3255") + version("0.55.1", sha256="39f1799d965cd0fec06f0a43dec865c360cbb206e4254f4deb4f7b7f7f3c3b2f") + version("0.55.0", sha256="fb74cfe2a8f3da51f9840082fa563c2a4ce689972c164e95b1b8a1817c4224cf") + version("0.54.0", sha256="cd28ddf054446c286f9bfae563aa463e638ee03e0353c0828a9ce44be4ce2df9") + version("0.53.2", sha256="4a7b57f1fd8e3e85ecf4a481cc907b81a71c4f44cf2c4a506cb37a6513a819a4") + version("0.53.1", sha256="1425dec7ee1a7ddf1c3086b83834ef6e49de021901a62d5bff0f2ca0c75d3455") + version("0.53.0", sha256="0afb44c2dab8dd8121d174193eb6623b29da9592e5fe1bbe344cfc9cacbec0cb") + version("0.52.2", sha256="c642463da0bbdd533399e43c84ea0007b1d7da98276c26bc075c7b4778f97a01") + version("0.52.1", sha256="32f211aaf4641223bf837dc71ea064931f85aa9260b9c7f379787ca907c78c3a") + version("0.52.0", sha256="2fd4cf0cc43c363b2e51cb264a919a1b43514aad979b9b5761b746fc70490130") + version("0.51.0", sha256="878d5399c4789034b7f587a478e2baf8e852f7f1f82aa59e276ddf81d325b934") + version("0.50.4", sha256="f4ab7dada0e07ecf97f88e2dd7ca6c4755fb0f4175d8d12ed3a856c45b240bde") + version("0.50.3", sha256="cc78bfc9a5d1b061098c892e9c8ff14861aa48ea95f0e9684ca4250d30c38889") + version("0.50.2", sha256="0ef1355f905d48ed01c034a8d418e9c528113d65acb3dd31951297029c5aaed4") + version("0.50.1", sha256="1500bae01ba74c330bc205b57423688c2b1aacafe1aabcaf469b221dcda9beec") + version("0.50.0", sha256="c50fb77b5c8d535fe0c930e5d4400d039ad567a571ea9711b01d6d5bd2a26fb6") + version("0.49.2", sha256="cde90e0183024dc1a78d510e2ae3effa087c86c5761f84cba0125f10abc74254") + version("0.49.1", sha256="6d1952ada65d52d2fd5d4c60bb17e51d264c2c618f9b66dadeffa1e5f849394a") + version("0.49.0", sha256="adfe5c64a5d170197893fe5a4c9338cde6cbdd5b54e52534886425101be4458f") + version("0.48.0", sha256="087a7f944240caf2d121c1520a6877beea5d30cc598d09a55138014d7953348a") + version("0.47.0", sha256="efce0f6f32e00482b80973d951a6ddc47b20c8703bd0e63ab59acc0e339d410b") + version("0.46.3", sha256="24bc5d8532a90f07ab2a412ea28ddbfc8ff7ab27cd9b4e7bd99a92b2a0b5acfd") + version("0.46.2", sha256="3ebbafa241e54cb328966ea99eab5d837c4a889f17c3b2048cc2961166a84cc4") + version("0.46.1", sha256="be7468b30a367bcbefab09ed5ac07320cd323904c9129d6b874175b39ef65cd9") + version("0.46.0", sha256="ab944358edc7e03be604749002f1f00aaf4d55d20bac2689d40bd4e66357879d") + version("0.45.0", sha256="b3307b99f82f0300dfed7dd122447a6e74ca8ad8c012d2fc60467e6e067ac226") + version("0.44.0", sha256="cec114325167731233be294aab329d54862457cb2e1f1a87d42d100da7c53aa5") + version("0.42.2", sha256="a9289260eb0a4eaf4550c2d6ad1af7e95a669a747ce425ab9a572d4ab80e2c1f") + version("0.42.1", sha256="5f79487568ec4625b0731f0c10efb565201602a733d1b6ac1436e8934cf8b8ec") + version("0.42.0", sha256="c08e047e202271ec15eeda53670c6082815d168009f4e993debcc0d035904d6b") + version("0.41.0", sha256="b789360d70656d99cd5e0ceebfc8828bdf129f7e2bfe6451592a735be9a0809a") + version("0.40.1", sha256="73a65f17e727191b97dfdf770dd2c285900af05e6fee93aa9ced9eadb86f58ff") + version("0.40.0", sha256="80e68172eeef0fbff128ede354eaac759a9408c3ef72c5eed871bb9430b960ff") + version("0.39.0", sha256="e62fea9bcb96529507fdd83abc991036e8ed9aa858b7d36587fce3d559420036") + version("0.38.2", sha256="5f38387d92338eac8658cd70544a5d9a609bd632090f4f69bcbc9f07ec4abd7b") + version("0.38.1", sha256="99ff1ff61408e17f67ab427c51add074f66ab7375a506ae70dcb24c47a8ea636") + version("0.38.0", sha256="e95aa6e4161324fa92fa236ea2bf08a7267a883ef4ca5fbb8bbf75e70db1ce4f") + version("0.37.0", sha256="98877fa2daf91ac91c2e0e0014684131d6efc4a1f2f77917d40fdbf424d74588") + version("0.36.0", sha256="452a3973cf359786409e064ca4b63a5f81072a9d72a52d1a4084d197f21fc26b") + version("0.35.0", sha256="50e6de305e1ffdcf423cec424e919bb0bdebee6449d34ff26a40421f09392826") + version("0.34.4", sha256="5a997c6f4b8c954a98046a851d0f3b31ce7c5be6e7e615068df4f1d7b86c9630") + version("0.34.3", sha256="1f5994862c33df4588d411b49fba20a40294627d0b325bbd5234f169eb1d4842") + version("0.34.2", sha256="3b8d3f801799023c8efe5069895723ce4e742330282774dc0873c2fa3910eeb2") + version("0.34.1", sha256="791ff249a685fab1733d4c3c936db6a064aa912d47926ad4bd26b1829f6e2178") + version("0.34.0", sha256="da62a15da103e763e34dae43be3436873e4fb550630dddc55232ae644accda02") + version("0.33.0", sha256="61200855a0846ce765b686fa368496f44534e633031811803ba2cb31f94c25b1") + version("0.32.0", sha256="187e75fc6d3f84003829d2b8aec584e99d72d65f2d82835998714ae05ae008af") + version("0.31.1", sha256="32c1fd8319fbbb844a0a252d44761f81f17c6f3549daadce47e81524d84605a4") + version("0.31.0", sha256="9d4bc8b2c1a210a58f34216cebe7cd5935039d244b7e90f7e2792bda81ff7ddc") + version("0.30.1", sha256="e2249bdced68d80a413de59f8393922553a8900a14e731030e362266e82a9af8") + version("0.30.0", sha256="91de53099d9381e3744e7a1ac06d2db0f9065378c4d178328b78ac797ee3ec65") + version("0.29.1", sha256="ced1e98fbd383669e59ec06d2e0c15e27dbceda9ac5569d311c538b2fe6d3876") + version("0.29.0", sha256="a417a1d0ed75bd51131b86fba990502666d8c1388ad6282b3097aa461ccf9785") + version("0.28.0", sha256="5c4e0fe8c7c80615688f271b57b35ee9d924ac07c6d3d56d0303e610338ed332") + version("0.27.1", sha256="3f7d2401ff8b113781d93c5bf374f47ca35b2f962634c6310b73620da735e63d") + version("0.27.0", sha256="387ee5958de2d986095cda2aaf39d0bf319d02eaeeea2a565aea97e6a6f31f36") + version("0.26.0", sha256="c451591d106d1671c9ddbb5c304979dd2d083e0616b2aeede62e7a6b568f828c") + + variant("blosc", default=False, description="support the blosc lossless compressors") + variant("fpzip", default=False, description="support for the FPZIP lossy compressor") + variant("hdf5", default=False, description="support reading and writing from hdf5 files") + variant("magick", default=False, description="support the imagemagick image compressors") + variant( + "mgard", default=False, description="support for the MAGARD error bounded lossy compressor" + ) + variant("python", default=False, description="build the python wrappers") + variant("sz", default=False, description="support for the SZ error bounded lossy compressor") + variant("zfp", default=False, description="support for the ZFP error bounded lossy compressor") + variant("boost", default=False, description="support older compilers using boost") + variant("petsc", default=False, description="support IO using petsc format") + variant("mpi", default=False, description="support for launching processes using mpi") + variant("lua", default=False, description="support for composite metrics using lua") + variant( + "libdistributed", default=False, description="support for distributed multi-buffer support" + ) + variant("ftk", default=False, description="build support for the feature tracking toolkit") + variant("digitrounding", default=False, description="build support for the digit rounding") + variant("bitgrooming", default=False, description="build support for the bitgrooming") + variant("openmp", default=False, description="build plugins that use openmp") + variant("docs", default=False, description="build and install manual pages") + variant("remote", default=False, description="build the remote launch plugin") + variant("json", default=False, description="build the JSON support") + variant("szauto", default=False, description="build szauto support") + variant("unix", default=False, description="build support for unixisms like mmap and rusage") + variant("ndzip", default=False, description="build support for the NDZIP compressor") + variant("arc", default=False, description="build support for the ARC error correction tool") + variant("netcdf", default=False, description="build support for the NDFCDF data format") + variant("sz3", default=False, description="build support for the SZ3 compressor family") + variant("mgardx", default=False, description="build support for the MGARDx compressor") + variant("bzip2", default=False, description="build support for the bzip2 compressor") + variant("qoz", default=False, description="build support for the qoz compressor") + variant( + "cusz", default=False, description="build support for the cusz compressor", when="@0.86.0:" + ) + + depends_on("boost", when="@:0.51.0+boost") + + depends_on("libstdcompat+boost", when="+boost") + depends_on("libstdcompat@0.0.14:", when="@0.79.0:") + depends_on("libstdcompat@0.0.13:", when="@0.73.0:") + depends_on("libstdcompat@0.0.10:", when="@0.71.3:") + depends_on("libstdcompat@0.0.7:", when="@0.70.3:") + depends_on("libstdcompat@0.0.6:", when="@0.70.2:") + depends_on("libstdcompat@0.0.5:", when="@0.63.0:") + depends_on("libstdcompat@0.0.3:", when="@0.60.0:") + depends_on("libstdcompat", when="@0.52.0:") + + depends_on("c-blosc", when="+blosc") + depends_on("fpzip", when="+fpzip") + depends_on("hdf5", when="+hdf5") + depends_on("imagemagick", when="+magick") + depends_on("mgard", when="+mgard") + depends_on("python@3:", when="+python", type=("build", "link", "run")) + depends_on("py-numpy", when="+python", type=("build", "link", "run")) + depends_on("swig@3.12:", when="+python", type="build") + depends_on("sz@2.1.8.1:", when="@0.55.2:+sz") + depends_on("sz@2.1.11.1:", when="@0.55.3:+sz") + depends_on("sz@2.1.12:", when="@0.69.0:+sz") + depends_on("fftw", when="+sz ^sz@:2.1.10") + depends_on("zfp", when="+zfp") + depends_on("petsc", when="+petsc") + depends_on("mpi@2:", when="+mpi") + depends_on("lua-sol2", when="+lua") + depends_on("libdistributed@0.0.11:", when="+libdistributed") + depends_on("libdistributed@0.4.0:", when="@0.85.0:+libdistributed") + depends_on("pkgconfig", type="build") + depends_on("ftk@master", when="+ftk") + depends_on("digitrounding", when="+digitrounding") + depends_on("bitgroomingz", when="+bitgrooming") + depends_on("cmake@3.14:", type="build") + depends_on("py-mpi4py", when="@0.54.0:+mpi+python", type=("build", "link", "run")) + depends_on("py-numcodecs", when="@0.54.0:+python", type="run") + depends_on("doxygen+graphviz", when="+docs", type="build") + depends_on("curl", when="+remote") + depends_on("nlohmann-json+multiple_headers", when="+remote") + depends_on("nlohmann-json+multiple_headers", when="+json") + depends_on("szauto", when="+szauto") + depends_on("ndzip", when="+ndzip") + depends_on("arc", when="+arc") + depends_on("netcdf-c", when="+netcdf") + depends_on("mgardx", when="+mgardx") + conflicts( + "+mgardx", when="+szauto" + ) # SZ auto and MGARDx cause symbol conflicts with each other + conflicts( + "~json", + when="@0.57.0:+remote", + msg="JSON support required for remote after version 0.57.0", + ) + depends_on("sz3", when="+sz3") + depends_on("bzip2", when="+bzip2") + depends_on("qoz", when="+qoz") + depends_on("cusz", when="+cusz") + + extends("python", when="+python") + + def cmake_args(self): + args = [] + if "+python" in self.spec: + args.append("-DLIBPRESSIO_PYTHON_SITELIB={0}".format(python_platlib)) + args.append("-DBUILD_PYTHON_WRAPPER=ON") + args.append("-DPython3_EXECUTABLE={0}".format(self.spec["python"].command)) + if "+mpi" in self.spec: + args.append("-DLIBPRESSIO_HAS_MPI4PY=ON") + if "+hdf5" in self.spec: + args.append("-DLIBPRESSIO_HAS_HDF=ON") + args.append("-DHDF5_ROOT=" + self.spec["hdf5"].prefix) + if "+sz" in self.spec: + args.append("-DLIBPRESSIO_HAS_SZ=ON") + if "+szauto" in self.spec: + args.append("-DLIBPRESSIO_HAS_SZ_AUTO=ON") + if "+zfp" in self.spec: + args.append("-DLIBPRESSIO_HAS_ZFP=ON") + if "+fpzip" in self.spec: + args.append("-DLIBPRESSIO_HAS_FPZIP=ON") + if "+blosc" in self.spec: + args.append("-DLIBPRESSIO_HAS_BLOSC=ON") + if "+magick" in self.spec: + args.append("-DLIBPRESSIO_HAS_MAGICK=ON") + if "+mgard" in self.spec: + args.append("-DLIBPRESSIO_HAS_MGARD=ON") + if "+petsc" in self.spec: + args.append("-DLIBPRESSIO_HAS_PETSC=ON") + if "+boost" in self.spec: + args.append("-DLIBPRESSIO_CXX_VERSION=11") + if "+mpi" in self.spec: + args.append("-DLIBPRESSIO_HAS_MPI=ON") + if "+lua" in self.spec: + args.append("-DLIBPRESSIO_HAS_LUA=ON") + if "+libdistributed" in self.spec: + args.append("-DLIBPRESSIO_HAS_LIBDISTRIBUTED=ON") + if "+ftk" in self.spec: + args.append("-DLIBPRESSIO_HAS_FTK=ON") + if "+bitgrooming" in self.spec: + args.append("-DLIBPRESSIO_HAS_BIT_GROOMING=ON") + if "+digitrounding" in self.spec: + args.append("-DLIBPRESSIO_HAS_DIGIT_ROUNDING=ON") + if "+openmp" in self.spec: + args.append("-DLIBPRESSIO_HAS_OPENMP=ON") + if "+docs" in self.spec: + args.append("-DBUILD_DOCS=ON") + args.append("-DLIBPRESSIO_INSTALL_DOCS=ON") + if "+remote" in self.spec: + args.append("-DLIBPRESSIO_HAS_REMOTELAUNCH=ON") + if "+json" in self.spec: + args.append("-DLIBPRESSIO_HAS_JSON=ON") + if "+unix" in self.spec: + args.append("-DLIBPRESSIO_HAS_LINUX=ON") + if "+ndzip" in self.spec: + args.append("-DLIBPRESSIO_HAS_NDZIP=ON") + if "+arc" in self.spec: + args.append("-DLIBPRESSIO_HAS_ARC=ON") + if "+netcdf" in self.spec: + args.append("-DLIBPRESSIO_HAS_NETCDF=ON") + if "+sz3" in self.spec: + args.append("-DLIBPRESSIO_HAS_SZ3=ON") + if "+cuda" in self.spec: + args.append("-DLIBPRESSIO_HAS_CUFILE=ON") + if "+mgardx" in self.spec: + args.append("-DLIBPRESSIO_HAS_MGARDx=ON") + if "+bzip2" in self.spec: + args.append("-DLIBPRESSIO_HAS_BZIP2=ON") + if "+qoz" in self.spec: + args.append("-DLIBPRESSIO_HAS_QoZ=ON") + if "+cusz" in self.spec: + args.append("-DLIBPRESSIO_HAS_CUSZ=ON") + if self.run_tests: + args.append("-DBUILD_TESTING=ON") + else: + args.append("-DBUILD_TESTING=OFF") + return args + + @run_after("build") + @on_package_attributes(run_tests=True) + def test(self): + make("test") + + @run_after("build") + def install_docs(self): + if "+docs" in self.spec: + with working_dir(self.build_directory): + make("docs") diff --git a/var/spack/repos/builtin/packages/libressl/package.py b/var/spack/repos/builtin/packages/libressl/package.py new file mode 100644 index 00000000000..04550700341 --- /dev/null +++ b/var/spack/repos/builtin/packages/libressl/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Libressl(AutotoolsPackage): + """LibreSSL is a version of the TLS/crypto stack forked from OpenSSL + in 2014, with goals of modernizing the codebase, improving + security, and applying best practice development processes.""" + + homepage = "https://www.libressl.org" + url = "https://ftp.openbsd.org/pub/OpenBSD/LibreSSL/libressl-3.6.1.tar.gz" + + maintainers = ["eschnett"] + + version("3.6.1", sha256="acfac61316e93b919c28d62d53037ca734de85c46b4d703f19fd8395cf006774") + + variant("shared", default=True, description="Build shared libraries") + variant("static", default=False, description="Build static libraries") + + def configure_args(self): + args = [ + "--enable-shared" if "+shared" in spec else "--disable-shared", + "--enable-static" if "+static" in spec else "--disable-static", + ] + return args diff --git a/var/spack/repos/builtin/packages/libstdcompat/package.py b/var/spack/repos/builtin/packages/libstdcompat/package.py new file mode 100644 index 00000000000..481dbce1c86 --- /dev/null +++ b/var/spack/repos/builtin/packages/libstdcompat/package.py @@ -0,0 +1,98 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Libstdcompat(CMakePackage): + """A compatibility header for C++14, 17, and 20 for C++11""" + + homepage = "https://github.com/robertu94/std_compat" + url = "https://github.com/robertu94/std_compat/archive/0.0.1.tar.gz" + git = "https://github.com/robertu94/std_compat" + + maintainers = ["robertu94"] + + version("master", branch="master") + version("0.0.15", sha256="af374a8883a32d874f6cd18cce4e4344e32f9d60754be403a5ac7114feca2a28") + version("0.0.14", sha256="9a794d43a1d79aec3350b89d8c06689b8b32cf75e2742cdfa9dc0e3f2be6f04e") + version("0.0.13", sha256="460656189e317e108a489af701fa8f33f13a93d96380788e692a1c68100e0388") + version("0.0.12", sha256="67c1d1724122a1ba7cebcd839658786680fa06a549369f4a7c36a44ad93ddd5d") + version("0.0.11", sha256="f166cd55e3cf845e4ed9eee1fb25de1f991dee5ef538c1e3ea9cbe7714863ccb") + version("0.0.10", sha256="d55ad9b7f61efa5a4bbef047f729af5ed9e44f96bb9d54f36023fa99af2bfe40") + version("0.0.9", sha256="325e816153aab0aee791e4c628e01dbc5b7aa336558d1694bd5de763f34e37e6") + version("0.0.8", sha256="3103295033fb6723dc462a8979ccfe3b571347c2a458f4cc8d8324981dedead9") + version("0.0.7", sha256="8cb4ed704aef427bbe4c86ee874a350561e6e059223e7b3d60f1e0d7300ccfe9") + version("0.0.6", sha256="cf4288422c9e9ab9e7c831c11a6a67907fe19b0da40601cc2b05e76e3be2f795") + version("0.0.5", sha256="a8599a12253b5ebdb38c6e416e7896444fd48a15167fe481985182ed17fc6883") + version("0.0.4", sha256="b2aeb4e60105635acb3f41b2c9559956e7b75d999c73b84b14be5b78daa4e6a9") + version("0.0.3", sha256="098678618a335bb2e8b25ceae8c3498f4c3056fd9e03467948bab18252afb46d") + version("0.0.2", sha256="36424399e649be38bdb21899aa45f94aebba25c66048bab2751b1b3b9fd27238") + version("0.0.1", sha256="3d63e901f4e20b9032a67086f4b4281f641ee0dea436cf15f7058faa40d8637b") + + variant( + "cpp_compat", + values=("11", "14", "17", "20", "auto"), + default="auto", + multi=False, + description="version of the c++ standard to use and depend on", + ) + variant("cpp_unstable", default=True, description="sets CXX_STANDARD_REQUIRED") + variant("boost", default=False, description="support older compilers using boost") + + depends_on("boost+thread", when="%gcc@:8.0.0") + depends_on("boost+thread", when="+boost") + depends_on("boost+thread", when="cpp_compat=11") + depends_on("boost+thread", when="cpp_compat=14") + + conflicts("~cpp_unstable", when="@0.0.7: cpp_compat=auto") + conflicts("+cpp_unstable", when="@:0.0.7") + conflicts("cpp_compat=11", when="@:0.0.7") + conflicts("cpp_compat=14", when="@:0.0.7") + conflicts("cpp_compat=17", when="@:0.0.7") + conflicts("cpp_compat=20", when="@:0.0.7") + + def max_cxx_version(self): + try: + self.compiler.cxx17_flag + return "17" + except Exception: + pass + try: + self.compiler.cxx14_flag + return "14" + except Exception: + pass + self.compiler.cxx11_flag + return "11" + + def cmake_args(self): + args = [] + cpp_compat = self.spec.variants["cpp_compat"].value + + if "cpp_unstable" in self.spec: + args.append("-DSTDCOMPAT_CXX_UNSTABLE=ON") + + if cpp_compat == "auto": + args.append("-DSTDCOMPAT_CXX_VERSION=%s" % self.max_cxx_version()) + elif cpp_compat == "11": + args.append("-DSTDCOMPAT_CXX_VERSION=11") + elif cpp_compat == "14": + args.append("-DSTDCOMPAT_CXX_VERSION=14") + elif cpp_compat == "17": + args.append("-DSTDCOMPAT_CXX_VERSION=17") + elif cpp_compat == "20": + args.append("-DSTDCOMPAT_CXX_VERSION=20") + + if self.run_tests: + args.append("-DBUILD_TESTING=ON") + else: + args.append("-DBUILD_TESTING=OFF") + return args + + @run_after("build") + @on_package_attributes(run_tests=True) + def test(self): + make("test") diff --git a/var/spack/repos/builtin/packages/libtiff/package.py b/var/spack/repos/builtin/packages/libtiff/package.py index a17c1eb21f3..535a3145b60 100644 --- a/var/spack/repos/builtin/packages/libtiff/package.py +++ b/var/spack/repos/builtin/packages/libtiff/package.py @@ -3,10 +3,34 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack.build_systems.autotools import AutotoolsBuilder +from spack.build_systems.cmake import CMakeBuilder from spack.package import * +VARIANTS = [ + # Internal codecs + "ccitt", + "packbits", + "lzw", + "thunder", + "next", + "logluv", + # External codecs + "zlib", + "libdeflate", + "pixarlog", + "jpeg", + "old-jpeg", + "jpeg12", + "jbig", + "lerc", + "lzma", + "zstd", + "webp", +] -class Libtiff(AutotoolsPackage): + +class Libtiff(CMakePackage, AutotoolsPackage): """LibTIFF - Tag Image File Format (TIFF) Library and Utilities.""" homepage = "http://www.simplesystems.org/libtiff/" @@ -23,19 +47,42 @@ class Libtiff(AutotoolsPackage): version("4.0.8", sha256="59d7a5a8ccd92059913f246877db95a2918e6c04fb9d43fd74e5c3390dac2910") version("4.0.7", sha256="9f43a2cfb9589e5cecaa66e16bf87f814c945f22df7ba600d63aac4632c4f019") version("4.0.6", sha256="4d57a50907b510e3049a4bba0d7888930fdfc16ce49f1bf693e5b6247370d68c") + version("4.0.5", sha256="e25eaa83ed7fab43ddd278b9b14d91a406a4b674cedc776adb95535f897f309c") + version("4.0.4", sha256="8cb1d90c96f61cdfc0bcf036acc251c9dbe6320334da941c7a83cfe1576ef890") version("3.9.7", sha256="f5d64dd4ce61c55f5e9f6dc3920fbe5a41e02c2e607da7117a35eb5c320cef6a") - variant("zlib", default=True, description="Enable Zlib usage") - variant("libdeflate", default=False, description="Enable libdeflate usage", when="@4.2:") - variant("pixarlog", default=False, description="Enable support for Pixar log-format algorithm") - variant("jpeg", default=True, description="Enable IJG JPEG library usage") - variant("old-jpeg", default=False, description="Enable support for Old JPEG compression") - variant("jpeg12", default=False, description="Enable libjpeg 8/12bit dual mode", when="@4:") - variant("jbig", default=False, description="Enable JBIG-KIT usage") - variant("lerc", default=False, description="Enable liblerc usage", when="@4.3:") - variant("lzma", default=False, description="Enable liblzma usage", when="@4:") - variant("zstd", default=False, description="Enable libzstd usage", when="@4.0.10:") - variant("webp", default=False, description="Enable libwebp usage", when="@4.0.10:") + # Internal codecs + variant("ccitt", default=True, description="support for CCITT Group 3 & 4 algorithms") + variant("packbits", default=True, description="support for Macintosh PackBits algorithm") + variant("lzw", default=True, description="support for LZW algorithm") + variant("thunder", default=True, description="support for ThunderScan 4-bit RLE algorithm") + variant("next", default=True, description="support for NeXT 2-bit RLE algorithm") + variant("logluv", default=True, description="support for LogLuv high dynamic range algorithm") + + # External codecs + variant("zlib", default=True, description="use zlib") + variant("libdeflate", default=False, description="use libdeflate", when="@4.2:") + variant("pixarlog", default=False, description="support for Pixar log-format algorithm") + variant("jpeg", default=True, description="use libjpeg") + variant("old-jpeg", default=False, description="support for Old JPEG compression") + variant("jpeg12", default=False, description="enable libjpeg 8/12-bit dual mode", when="@4:") + variant("jbig", default=False, description="use ISO JBIG compression") + variant("lerc", default=False, description="use libLerc", when="@4.3:") + variant("lzma", default=False, description="use liblzma", when="@4:") + variant("zstd", default=False, description="use libzstd", when="@4.0.10:") + variant("webp", default=False, description="use libwebp", when="@4.0.10:") + + build_system( + conditional("cmake", when="@4.0.5:"), + "autotools", + default="cmake", + ) + + with when("build_system=cmake"): + depends_on("cmake@3.9:", when="@4.3:", type="build") + depends_on("cmake@2.8.11:", when="@4.0.10:4.2", type="build") + depends_on("cmake@2.8.9:", when="@4.0.6:4.0.9", type="build") + depends_on("cmake@3:", when="@4.0.5", type="build") depends_on("zlib", when="+zlib") depends_on("zlib", when="+pixarlog") @@ -63,17 +110,21 @@ def patch(self): 'vl_cv_prog_cc_warnings="-Wall -W"', 'vl_cv_prog_cc_warnings="-Wall"', "configure" ) + +class CMakeBuilder(CMakeBuilder): + def cmake_args(self): + args = [self.define_from_variant(var) for var in VARIANTS] + + # Remove empty strings + args = [arg for arg in args if arg] + + return args + + +class AutotoolsBuilder(AutotoolsBuilder): def configure_args(self): args = [] - args += self.enable_or_disable("zlib") - args += self.enable_or_disable("libdeflate") - args += self.enable_or_disable("pixarlog") - args += self.enable_or_disable("jpeg") - args += self.enable_or_disable("old-jpeg") - args += self.enable_or_disable("jpeg12") - args += self.enable_or_disable("jbig") - args += self.enable_or_disable("lerc") - args += self.enable_or_disable("lzma") - args += self.enable_or_disable("zstd") - args += self.enable_or_disable("webp") + for var in VARIANTS: + args.extend(self.enable_or_disable(var)) + return args diff --git a/var/spack/repos/builtin/packages/libtree/package.py b/var/spack/repos/builtin/packages/libtree/package.py index 572f459b43a..a1c97dfd767 100644 --- a/var/spack/repos/builtin/packages/libtree/package.py +++ b/var/spack/repos/builtin/packages/libtree/package.py @@ -2,11 +2,12 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - +import spack.build_systems.cmake +import spack.build_systems.makefile from spack.package import * -class Libtree(MakefilePackage): +class Libtree(MakefilePackage, CMakePackage): """ldd as a tree""" homepage = "https://github.com/haampie/libtree" @@ -35,6 +36,10 @@ class Libtree(MakefilePackage): version("1.0.4", sha256="b15a54b6f388b8bd8636e288fcb581029f1e65353660387b0096a554ad8e9e45") version("1.0.3", sha256="67ce886c191d50959a5727246cdb04af38872cd811c9ed4e3822f77a8f40b20b") + build_system( + conditional("cmake", when="@:2"), conditional("makefile", when="@3:"), default="makefile" + ) + def url_for_version(self, version): if version < Version("2.0.0"): return ( @@ -45,13 +50,8 @@ def url_for_version(self, version): return "https://github.com/haampie/libtree/archive/refs/tags/v{0}.tar.gz".format(version) - # Version 3.x (Makefile) - @when("@3:") - def install(self, spec, prefix): - make("install", "PREFIX=" + prefix) - # Version 2.x and earlier (CMake) - with when("@:2"): + with when("build_system=cmake"): variant("chrpath", default=False, description="Use chrpath for deployment") variant("strip", default=False, description="Use binutils strip for deployment") variant( @@ -70,23 +70,19 @@ def install(self, spec, prefix): depends_on("cxxopts", when="@2.0.0:2", type="build") depends_on("elfio@:3.9", when="@2.0.0:2", type="build") + +class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder): def cmake_args(self): - tests_enabled = "ON" if self.run_tests else "OFF" + tests_enabled = "ON" if self.pkg.run_tests else "OFF" if self.spec.satisfies("@2.0:"): tests_define = "LIBTREE_BUILD_TESTS" else: tests_define = "BUILD_TESTING" - return [CMakePackage.define(tests_define, tests_enabled)] + return [self.define(tests_define, tests_enabled)] - @when("@:2") - def edit(self, spec, prefix): - options = CMakePackage._std_args(self) + self.cmake_args() - options.append(self.stage.source_path) - with working_dir(self.build_directory): - cmake(*options) - @when("@:2") - def check(self): - with working_dir(self.build_directory): - ctest("--output-on-failure") +class MakefileBuilder(spack.build_systems.makefile.MakefileBuilder): + @property + def install_targets(self): + return ["install", "PREFIX=" + self.prefix] diff --git a/var/spack/repos/builtin/packages/libxkbcommon/package.py b/var/spack/repos/builtin/packages/libxkbcommon/package.py index b7003ab5a99..054f05e9bdc 100644 --- a/var/spack/repos/builtin/packages/libxkbcommon/package.py +++ b/var/spack/repos/builtin/packages/libxkbcommon/package.py @@ -2,11 +2,12 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - +import spack.build_systems.autotools +import spack.build_systems.meson from spack.package import * -class Libxkbcommon(MesonPackage): +class Libxkbcommon(MesonPackage, AutotoolsPackage): """xkbcommon is a library to handle keyboard descriptions, including loading them from disk, parsing them and handling their state. It's mainly meant for client toolkits, window systems, and other system @@ -15,6 +16,10 @@ class Libxkbcommon(MesonPackage): homepage = "https://xkbcommon.org/" url = "https://xkbcommon.org/download/libxkbcommon-0.8.2.tar.xz" + build_system( + conditional("meson", when="@0.9:"), conditional("autotools", when="@:0.8"), default="meson" + ) + version("1.4.0", sha256="106cec5263f9100a7e79b5f7220f889bc78e7d7ffc55d2b6fdb1efefb8024031") version( "0.8.2", @@ -44,6 +49,8 @@ class Libxkbcommon(MesonPackage): depends_on("wayland@1.2.0:", when="+wayland") depends_on("wayland-protocols@1.7:", when="+wayland") + +class MesonBuilder(spack.build_systems.meson.MesonBuilder): def meson_args(self): return [ "-Dxkb-config-root={0}".format(self.spec["xkbdata"].prefix), @@ -51,26 +58,11 @@ def meson_args(self): "-Denable-wayland=" + str(self.spec.satisfies("+wayland")), ] - @when("@:0.8") + +class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder): def configure_args(self): """Configure arguments are passed using meson_args functions""" return [ "--with-xkb-config-root={0}".format(self.spec["xkbdata"].prefix), "--disable-docs", - "--" + ("en" if self.spec.satisfies("+wayland") else "dis") + "able-wayland", - ] - - @when("@:0.8") - def meson(self, spec, prefix): - """Run the AutotoolsPackage configure phase in source_path""" - configure("--prefix=" + prefix, *self.configure_args()) - - @when("@:0.8") - def build(self, spec, prefix): - """Run the AutotoolsPackage build phase in source_path""" - make() - - @when("@:0.8") - def install(self, spec, prefix): - """Run the AutotoolsPackage install phase in source_path""" - make("install") + ] + self.enable_or_disable("wayland") diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py index d7958bd6f8f..d7dc58d4f56 100644 --- a/var/spack/repos/builtin/packages/llvm/package.py +++ b/var/spack/repos/builtin/packages/llvm/package.py @@ -36,6 +36,8 @@ class Llvm(CMakePackage, CudaPackage): # fmt: off version("main", branch="main") + version("15.0.4", sha256="e24b4d3bf7821dcb1c901d1e09096c1f88fb00095c5a6ef893baab4836975e52") + version("15.0.3", sha256="8ac8e4c0982bf236526d737d385db5e1e66543ab217a9355d54159659eae3774") version("15.0.2", sha256="dc11d35e60ab61792baa607dff080c993b39de23fb93b3d3369ba15b0601c307") version("15.0.1", sha256="20bccb964e39f604fdc16d1258f94d2053fbdcdab2b2f6d5e20e6095ec403c00") version("15.0.0", sha256="36d83cd84e1caf2bcfda1669c029e2b949adb9860cff01e7d3246ac2348b11ae") diff --git a/var/spack/repos/builtin/packages/lua/package.py b/var/spack/repos/builtin/packages/lua/package.py index 7753bf1e092..d04d2036d31 100644 --- a/var/spack/repos/builtin/packages/lua/package.py +++ b/var/spack/repos/builtin/packages/lua/package.py @@ -26,11 +26,6 @@ class LuaImplPackage(MakefilePackage): description="Fetcher to use in the LuaRocks package manager", ) - phases = MakefilePackage.phases + ["add_luarocks"] - #: This attribute is used in UI queries that need to know the build - #: system base class - build_system_class = "LuaImplPackage" - lua_version_override = None def __init__(self, *args, **kwargs): @@ -105,7 +100,9 @@ def symlink_luajit(self): ) symlink(real_lib, "liblua" + ext) - def add_luarocks(self, spec, prefix): + @run_after("install") + def add_luarocks(self): + prefix = self.spec.prefix with working_dir(os.path.join("luarocks", "luarocks")): configure("--prefix=" + prefix, "--with-lua=" + prefix) make("build") @@ -118,7 +115,7 @@ def append_paths(self, paths, cpaths, path): def _setup_dependent_env_helper(self, env, dependent_spec): lua_paths = [] - for d in dependent_spec.traverse(deptypes=("build", "run"), deptype_query="run"): + for d in dependent_spec.traverse(deptype=("build", "run")): if d.package.extends(self.spec): lua_paths.append(os.path.join(d.prefix, self.lua_lib_dir)) lua_paths.append(os.path.join(d.prefix, self.lua_lib64_dir)) @@ -209,6 +206,12 @@ class Lua(LuaImplPackage): homepage = "https://www.lua.org" url = "https://www.lua.org/ftp/lua-5.3.4.tar.gz" + version("5.4.4", sha256="164c7849653b80ae67bec4b7473b884bf5cc8d2dca05653475ec2ed27b9ebf61") + version("5.4.3", sha256="f8612276169e3bfcbcfb8f226195bfc6e466fe13042f1076cbde92b7ec96bbfb") + version("5.4.2", sha256="11570d97e9d7303c0a59567ed1ac7c648340cd0db10d5fd594c09223ef2f524f") + version("5.4.1", sha256="4ba786c3705eb9db6567af29c91a01b81f1c0ac3124fdbf6cd94bdd9e53cca7d") + version("5.4.0", sha256="eac0836eb7219e421a96b7ee3692b93f0629e4cdb0c788432e3d10ce9ed47e28") + version("5.3.6", sha256="fc5fd69bb8736323f026672b1b7235da613d7177e72558893a0bdcd320466d60") version("5.3.5", sha256="0c2eed3f960446e1a3e4b9a1ca2f3ff893b6ce41942cf54d5dd59ab4b3b058ac") version("5.3.4", sha256="f681aa518233bc407e23acf0f5887c884f17436f000d453b2491a9f11a52400c") version("5.3.2", sha256="c740c7bb23a936944e1cc63b7c3c5351a8976d7867c5252c8854f7b2af9da68f") @@ -229,6 +232,7 @@ class Lua(LuaImplPackage): provides("lua-lang@5.1", when="@5.1:5.1.99") provides("lua-lang@5.2", when="@5.2:5.2.99") provides("lua-lang@5.3", when="@5.3:5.3.99") + provides("lua-lang@5.4", when="@5.4:5.4.99") depends_on("ncurses+termlib") depends_on("readline") @@ -236,22 +240,23 @@ class Lua(LuaImplPackage): patch( "http://lua.2524044.n2.nabble.com/attachment/7666421/0/pkg-config.patch", sha256="208316c2564bdd5343fa522f3b230d84bd164058957059838df7df56876cb4ae", - when="+pcfile", + when="+pcfile @:5.3.9999", ) - def install(self, spec, prefix): + def build(self, spec, prefix): if spec.satisfies("platform=darwin"): target = "macosx" else: target = "linux" make( - "INSTALL_TOP=%s" % prefix, "MYLDFLAGS=" + " ".join((spec["readline"].libs.search_flags, spec["ncurses"].libs.search_flags)), "MYLIBS=%s" % spec["ncurses"].libs.link_flags, - "CC=%s -std=gnu99 %s" % (spack_cc, self.compiler.cc_pic_flag), + "CC={0} -std=gnu99 {1}".format(spack_cc, self.compiler.cc_pic_flag), target, ) + + def install(self, spec, prefix): make("INSTALL_TOP=%s" % prefix, "install") if "+shared" in spec: @@ -286,7 +291,8 @@ def install(self, spec, prefix): @run_after("install") def link_pkg_config(self): if "+pcfile" in self.spec: + versioned_pc_file_name = "lua{0}.pc".format(self.version.up_to(2)) symlink( - join_path(self.prefix.lib, "pkgconfig", "lua5.3.pc"), + join_path(self.prefix.lib, "pkgconfig", versioned_pc_file_name), join_path(self.prefix.lib, "pkgconfig", "lua.pc"), ) diff --git a/var/spack/repos/builtin/packages/lz4/package.py b/var/spack/repos/builtin/packages/lz4/package.py index 4c3dbfffd2d..c2b34a5d2d6 100644 --- a/var/spack/repos/builtin/packages/lz4/package.py +++ b/var/spack/repos/builtin/packages/lz4/package.py @@ -17,6 +17,7 @@ class Lz4(MakefilePackage): homepage = "https://lz4.github.io/lz4/" url = "https://github.com/lz4/lz4/archive/v1.9.2.tar.gz" + version("1.9.4", sha256="0b0e3aa07c8c063ddf40b082bdf7e37a1562bda40a0ff5272957f3e987e0e54b") version("1.9.3", sha256="030644df4611007ff7dc962d981f390361e6c97a34e5cbc393ddfbe019ffe2c1") version("1.9.2", sha256="658ba6191fa44c92280d4aa2c271b0f4fbc0e34d249578dd05e50e76d0e5efcc") version("1.9.0", sha256="f8b6d5662fa534bd61227d313535721ae41a68c9d84058b7b7d86e143572dcfb") diff --git a/var/spack/repos/builtin/packages/mariadb-c-client/package.py b/var/spack/repos/builtin/packages/mariadb-c-client/package.py index 0eafda8f268..fe30923df6b 100644 --- a/var/spack/repos/builtin/packages/mariadb-c-client/package.py +++ b/var/spack/repos/builtin/packages/mariadb-c-client/package.py @@ -22,12 +22,16 @@ class MariadbCClient(CMakePackage): list_url = "https://downloads.mariadb.com/Connectors/c/" list_depth = 1 + version("3.3.2", sha256="7e0722e07d30bb906fac9fe10fb582cde1e148e05a83d9ca7b6fcc884b68fbce") + version("3.2.7", sha256="9d7196248e6697c09c73e173fe9b282045f55ec9d7ae743c1ebad08b9ea56dda") version("3.2.6", sha256="9c22fff9d18db7ebdcb63979882fb6b68d2036cf2eb62f043eac922cd36bdb91") + version("3.1.18", sha256="b01ecacf7531c2f36d90708845488e66462bf63627c58cb5986bd1c0833e4d9c") version("3.1.13", sha256="0271a5edfd64b13bca5937267474e4747d832ec62e169fc2589d2ead63746875") version("3.1.9", sha256="108d99bf2add434dcb3bd9526ba1d89a2b9a943b62dcd9d0a41fcbef8ffbf2c7") version("3.1.6", sha256="d266bb67df83c088c4fb05392713d2504c67be620894cedaf758a9561c116720") version("3.1.5", sha256="a9de5fedd1a7805c86e23be49b9ceb79a86b090ad560d51495d7ba5952a9d9d5") version("3.1.4", sha256="7a1a72fee00e4c28060f96c3efbbf38aabcbbab17903e82fce85a85002565316") + version("3.0.10", sha256="bd9aa1f137ead3dc68ed3165adc53541712076d08949800b6ccebd33da6d0ae8") version("3.0.9", sha256="7277c0caba6f50b1d07e1d682baf0b962a63e2e6af9e00e09b8dcf36a7858641") version("3.0.8", sha256="2ca368fd79e87e80497a5c9fd18922d8316af8584d87cecb35bd5897cb1efd05") version("3.0.7", sha256="f63883c9360675d111646fba5c97feb0d08e0def5873dd189d78bafbb75fa004") diff --git a/var/spack/repos/builtin/packages/mesa/package.py b/var/spack/repos/builtin/packages/mesa/package.py index e82bdc37640..caa47f8d641 100644 --- a/var/spack/repos/builtin/packages/mesa/package.py +++ b/var/spack/repos/builtin/packages/mesa/package.py @@ -2,9 +2,9 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - import sys +import spack.build_systems.meson from spack.package import * @@ -158,6 +158,28 @@ def flag_handler(self, name, flags): flags.append("-std=c99") return super(Mesa, self).flag_handler(name, flags) + @property + def libglx_headers(self): + return find_headers("GL/glx", root=self.spec.prefix.include, recursive=False) + + @property + def libglx_libs(self): + return find_libraries("libGL", root=self.spec.prefix, recursive=True) + + @property + def libosmesa_headers(self): + return find_headers("GL/osmesa", root=self.spec.prefix.include, recursive=False) + + @property + def libosmesa_libs(self): + if "platform=windows" in self.spec: + lib_name = "osmesa" + else: + lib_name = "libOSMesa" + return find_libraries(lib_name, root=self.spec.prefix, recursive=True) + + +class MesonBuilder(spack.build_systems.meson.MesonBuilder): def meson_args(self): spec = self.spec args = [ @@ -274,23 +296,3 @@ def meson_args(self): args.append("-Ddri-drivers=" + ",".join(args_dri_drivers)) return args - - @property - def libglx_headers(self): - return find_headers("GL/glx", root=self.spec.prefix.include, recursive=False) - - @property - def libglx_libs(self): - return find_libraries("libGL", root=self.spec.prefix, recursive=True) - - @property - def libosmesa_headers(self): - return find_headers("GL/osmesa", root=self.spec.prefix.include, recursive=False) - - @property - def libosmesa_libs(self): - if "platform=windows" in self.spec: - lib_name = "osmesa" - else: - lib_name = "libOSMesa" - return find_libraries(lib_name, root=self.spec.prefix, recursive=True) diff --git a/var/spack/repos/builtin/packages/meson/oneapi.patch b/var/spack/repos/builtin/packages/meson/oneapi.patch new file mode 100644 index 00000000000..e5996261db7 --- /dev/null +++ b/var/spack/repos/builtin/packages/meson/oneapi.patch @@ -0,0 +1,158 @@ +diff --git a/docs/markdown/Reference-tables.md b/docs/markdown/Reference-tables.md +index 60303dad6..421b33c07 100644 +--- a/docs/markdown/Reference-tables.md ++++ b/docs/markdown/Reference-tables.md +@@ -20,6 +20,7 @@ These are return values of the `get_id` (Compiler family) and + | gcc | The GNU Compiler Collection | gcc | + | intel | Intel compiler (Linux and Mac) | gcc | + | intel-cl | Intel compiler (Windows) | msvc | ++| intel-llvm| Intel oneAPI LLVM-based compiler | | + | lcc | Elbrus C/C++/Fortran Compiler | | + | llvm | LLVM-based compiler (Swift, D) | | + | mono | Xamarin C# compiler | | +diff --git a/docs/markdown/snippets/oneapi_compilers.md b/docs/markdown/snippets/oneapi_compilers.md +new file mode 100644 +index 000000000..a982da22a +--- /dev/null ++++ b/docs/markdown/snippets/oneapi_compilers.md +@@ -0,0 +1,8 @@ ++## Basic support for oneAPI compilers on Linux ++ ++To use: ++ ++``` ++source /opt/intel/oneapi/setvars.sh ++CC=icx CXX=icpx FC=ifx meson setup builddir ++``` +diff --git a/mesonbuild/compilers/c.py b/mesonbuild/compilers/c.py +index b1b4a7c92..9490ee688 100644 +--- a/mesonbuild/compilers/c.py ++++ b/mesonbuild/compilers/c.py +@@ -406,6 +406,13 @@ class IntelCCompiler(IntelGnuLikeCompiler, CCompiler): + return args + + ++class IntelLLVMCCompiler(ClangCCompiler): ++ ++ ++ id = 'intel-llvm' ++ ++ ++ + class VisualStudioLikeCCompilerMixin(CompilerMixinBase): + + """Shared methods that apply to MSVC-like C compilers.""" +diff --git a/mesonbuild/compilers/cpp.py b/mesonbuild/compilers/cpp.py +index ac65df9a1..3d728f169 100644 +--- a/mesonbuild/compilers/cpp.py ++++ b/mesonbuild/compilers/cpp.py +@@ -153,7 +153,7 @@ class CPPCompiler(CLikeCompiler, Compiler): + } + + # Currently, remapping is only supported for Clang, Elbrus and GCC +- assert self.id in frozenset(['clang', 'lcc', 'gcc', 'emscripten', 'armltdclang']) ++ assert self.id in frozenset(['clang', 'lcc', 'gcc', 'emscripten', 'armltdclang', 'intel-llvm']) + + if cpp_std not in CPP_FALLBACKS: + # 'c++03' and 'c++98' don't have fallback types +@@ -593,6 +593,13 @@ class IntelCPPCompiler(IntelGnuLikeCompiler, CPPCompiler): + return [] + + ++class IntelLLVMCPPCompiler(ClangCPPCompiler): ++ ++ ++ id = 'intel-llvm' ++ ++ ++ + class VisualStudioLikeCPPCompilerMixin(CompilerMixinBase): + + """Mixin for C++ specific method overrides in MSVC-like compilers.""" +diff --git a/mesonbuild/compilers/detect.py b/mesonbuild/compilers/detect.py +index f4afa777d..42a4b18a5 100644 +--- a/mesonbuild/compilers/detect.py ++++ b/mesonbuild/compilers/detect.py +@@ -62,6 +62,7 @@ from .c import ( + EmscriptenCCompiler, + IntelCCompiler, + IntelClCCompiler, ++ IntelLLVMCCompiler, + NvidiaHPC_CCompiler, + PGICCompiler, + CcrxCCompiler, +@@ -83,6 +84,7 @@ from .cpp import ( + EmscriptenCPPCompiler, + IntelCPPCompiler, + IntelClCPPCompiler, ++ IntelLLVMCPPCompiler, + NvidiaHPC_CPPCompiler, + PGICPPCompiler, + CcrxCPPCompiler, +@@ -106,6 +108,7 @@ from .fortran import ( + FlangFortranCompiler, + IntelFortranCompiler, + IntelClFortranCompiler, ++ IntelLLVMFortranCompiler, + NAGFortranCompiler, + Open64FortranCompiler, + PathScaleFortranCompiler, +@@ -180,11 +183,11 @@ else: + defaults['objc'] = ['clang'] + defaults['objcpp'] = ['clang++'] + else: +- defaults['c'] = ['cc', 'gcc', 'clang', 'nvc', 'pgcc', 'icc'] +- defaults['cpp'] = ['c++', 'g++', 'clang++', 'nvc++', 'pgc++', 'icpc'] ++ defaults['c'] = ['cc', 'gcc', 'clang', 'nvc', 'pgcc', 'icc', 'icx'] ++ defaults['cpp'] = ['c++', 'g++', 'clang++', 'nvc++', 'pgc++', 'icpc', 'icpx'] + defaults['objc'] = ['cc', 'gcc', 'clang'] + defaults['objcpp'] = ['c++', 'g++', 'clang++'] +- defaults['fortran'] = ['gfortran', 'flang', 'nvfortran', 'pgfortran', 'ifort', 'g95'] ++ defaults['fortran'] = ['gfortran', 'flang', 'nvfortran', 'pgfortran', 'ifort', 'ifx', 'g95'] + defaults['cs'] = ['mcs', 'csc'] + defaults['d'] = ['ldc2', 'ldc', 'gdc', 'dmd'] + defaults['java'] = ['javac'] +@@ -617,6 +620,12 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin + return cls( + ccache + compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=l) ++ if 'Intel(R) oneAPI' in out: ++ cls = IntelLLVMCCompiler if lang == 'c' else IntelLLVMCPPCompiler ++ l = guess_nix_linker(env, compiler, cls, version, for_machine) ++ return cls( ++ ccache + compiler, version, for_machine, is_cross, info, ++ exe_wrap, full_version=full_version, linker=l) + if 'TMS320C2000 C/C++' in out or 'MSP430 C/C++' in out or 'TI ARM C/C++ Compiler' in out: + lnk: T.Union[T.Type[C2000DynamicLinker], T.Type[TIDynamicLinker]] + if 'TMS320C2000 C/C++' in out: +@@ -789,6 +798,13 @@ def detect_fortran_compiler(env: 'Environment', for_machine: MachineChoice) -> C + compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=linker) + ++ if 'ifx (IFORT)' in out: ++ cls = IntelLLVMFortranCompiler ++ linker = guess_nix_linker(env, compiler, cls, version, for_machine) ++ return cls( ++ compiler, version, for_machine, is_cross, info, ++ exe_wrap, full_version=full_version, linker=linker) ++ + if 'PathScale EKOPath(tm)' in err: + return PathScaleFortranCompiler( + compiler, version, for_machine, is_cross, info, +diff --git a/mesonbuild/compilers/fortran.py b/mesonbuild/compilers/fortran.py +index 0a0c3ec86..e7154fe87 100644 +--- a/mesonbuild/compilers/fortran.py ++++ b/mesonbuild/compilers/fortran.py +@@ -352,6 +352,12 @@ class IntelFortranCompiler(IntelGnuLikeCompiler, FortranCompiler): + return ['-gen-dep=' + outtarget, '-gen-depformat=make'] + + ++class IntelLLVMFortranCompiler(IntelFortranCompiler): ++ ++ ++ id = 'intel-llvm' ++ ++ + class IntelClFortranCompiler(IntelVisualStudioLikeCompiler, FortranCompiler): + + file_suffixes = ('f90', 'f', 'for', 'ftn', 'fpp', ) diff --git a/var/spack/repos/builtin/packages/meson/package.py b/var/spack/repos/builtin/packages/meson/package.py index 05433d59ddf..ecec5026510 100644 --- a/var/spack/repos/builtin/packages/meson/package.py +++ b/var/spack/repos/builtin/packages/meson/package.py @@ -67,12 +67,11 @@ class Meson(PythonPackage): patch("rpath-0.54.patch", when="@0.54:0.55") patch("rpath-0.56.patch", when="@0.56:0.57") patch("rpath-0.58.patch", when="@0.58:") - # Help meson recognize Intel OneAPI compilers - patch( - "https://patch-diff.githubusercontent.com/raw/mesonbuild/meson/pull/9850.patch?full_index=1", - sha256="9c874726ce0a06922580d3e3d6adbe74e5144b3a661ef1059f32c9c1bc478b65", - when="@0.60.0:", - ) + + # Intel OneAPI compiler support + # https://github.com/mesonbuild/meson/pull/10909 + # https://github.com/mesonbuild/meson/pull/9850 + patch("oneapi.patch", when="@0.62: %oneapi") executables = ["^meson$"] diff --git a/var/spack/repos/builtin/packages/methyldackel/package.py b/var/spack/repos/builtin/packages/methyldackel/package.py new file mode 100644 index 00000000000..6e943c4c3d1 --- /dev/null +++ b/var/spack/repos/builtin/packages/methyldackel/package.py @@ -0,0 +1,40 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Methyldackel(MakefilePackage): + """MethylDackel (formerly named PileOMeth, which was a temporary name + derived due to it using a PILEup to extract METHylation metrics) will + process a coordinate-sorted and indexed BAM or CRAM file containing + some form of BS-seq alignments and extract per-base methylation + metrics from them. + """ + + homepage = "https://github.com/dpryan79/MethylDackel" + url = "https://github.com/dpryan79/MethylDackel/archive/refs/tags/0.6.1.tar.gz" + maintainers = ["snehring"] + + version("0.6.1", sha256="eeb1da4c830bcd9f3e6663a764947d957c41337643069524a4b545812fcf4819") + + depends_on("htslib@1.11:") + depends_on("libbigwig") + depends_on("curl") + + def edit(self, spec, prefix): + filter_file(r"^prefix \?=.*$", "prefix = " + spec.prefix, "Makefile") + filter_file( + "$(LIBBIGWIG)", + join_path(spec["libbigwig"].prefix.lib64, "libBigWig.a"), + "Makefile", + string=True, + ) + filter_file( + "-IlibBigWig", + "-I" + spec["libbigwig"].prefix.include.libbigwig, + "Makefile", + string=True, + ) diff --git a/var/spack/repos/builtin/packages/metis/gklib_path.patch b/var/spack/repos/builtin/packages/metis/gklib_path.patch new file mode 100644 index 00000000000..2cee12d0446 --- /dev/null +++ b/var/spack/repos/builtin/packages/metis/gklib_path.patch @@ -0,0 +1,11 @@ +--- a/CMakeLists.txt 2022-07-20 21:17:20.352231603 +0200 ++++ b/CMakeLists.txt 2022-07-20 21:19:28.998269385 +0200 +@@ -1,7 +1,7 @@ + cmake_minimum_required(VERSION 2.8) + project(METIS) + +-set(GKLIB_PATH "GKlib" CACHE PATH "path to GKlib") ++set(GKLIB_PATH "${CMAKE_SOURCE_DIR}/GKlib" CACHE PATH "path to GKlib") + set(SHARED FALSE CACHE BOOL "build a shared library") + + if(MSVC) diff --git a/var/spack/repos/builtin/packages/metis/package.py b/var/spack/repos/builtin/packages/metis/package.py index edf99be5ee5..da6e798783d 100644 --- a/var/spack/repos/builtin/packages/metis/package.py +++ b/var/spack/repos/builtin/packages/metis/package.py @@ -2,20 +2,22 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - - import os import sys +import spack.build_systems.cmake +import spack.build_systems.makefile from spack.package import * -class Metis(Package): +class Metis(CMakePackage, MakefilePackage): """METIS is a set of serial programs for partitioning graphs, partitioning finite element meshes, and producing fill reducing orderings for sparse - matrices. The algorithms implemented in METIS are based on the - multilevel recursive-bisection, multilevel k-way, and multi-constraint - partitioning schemes.""" + matrices. + + The algorithms implemented in METIS are based on the multilevel + recursive-bisection, multilevel k-way, and multi-constraint partitioning schemes. + """ homepage = "http://glaros.dtc.umn.edu/gkhome/metis/metis/overview" url = "http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/metis-5.1.0.tar.gz" @@ -27,52 +29,29 @@ class Metis(Package): version("5.1.0", sha256="76faebe03f6c963127dbb73c13eab58c9a3faeae48779f049066a21c087c5db2") version("4.0.3", sha256="5efa35de80703c1b2c4d0de080fafbcf4e0d363a21149a1ad2f96e0144841a55") - variant("shared", default=True, description="Enables the build of shared libraries.") - variant("gdb", default=False, description="Enables gdb support (version 5+).") - variant("int64", default=False, description="Sets the bit width of METIS's index type to 64.") - variant("real64", default=False, description="Sets the bit width of METIS's real type to 64.") - - # For Metis version 5:, the build system is CMake, provide the - # `build_type` variant. - variant( - "build_type", - default="Release", - description="The build type for the installation (only Debug or" - " Release allowed for version 4).", - values=("Debug", "Release", "RelWithDebInfo", "MinSizeRel"), + build_system( + conditional("cmake", when="@5:"), conditional("makefile", when="@:4"), default="cmake" ) + variant("shared", default=True, description="Build shared libraries") + with when("build_system=cmake"): + variant("gdb", default=False, description="Enable gdb support") + variant("int64", default=False, description="Use index type of 64 bit") + variant("real64", default=False, description="Use real type of 64 bit") - # Prior to version 5, the (non-cmake) build system only knows about - # 'build_type=Debug|Release'. - conflicts("@:4", when="build_type=RelWithDebInfo") - conflicts("@:4", when="build_type=MinSizeRel") - conflicts("@:4", when="+gdb") - conflicts("@:4", when="+int64") - conflicts("@:4", when="+real64") + # Use the correct path to GKLIB when building out of source + patch("gklib_path.patch") + # Install both gklib_defs.h and gklib_rename.h + patch("install_gklib_defs_rename.patch") + # Disable the "misleading indentation" warning when compiling + patch("gklib_nomisleadingindentation_warning.patch", when="%gcc@6:") - depends_on("cmake@2.8:", when="@5:", type="build") + with when("build_system=makefile"): + variant("debug", default=False, description="Compile in debug mode") - patch("install_gklib_defs_rename.patch", when="@5:") - patch("gklib_nomisleadingindentation_warning.patch", when="@5: %gcc@6:") - - def setup_build_environment(self, env): - # Ignore warnings/errors re unrecognized omp pragmas on %intel - if "%intel@14:" in self.spec: - env.append_flags("CFLAGS", "-diag-disable 3180") - # Ignore some warnings to get it to compile with %nvhpc - # 111: statement is unreachable - # 177: variable "foo" was declared but never referenced - # 188: enumerated type mixed with another type - # 550: variable "foo" was set but never used - if "%nvhpc" in self.spec: - env.append_flags("CFLAGS", "--display_error_number") - env.append_flags("CFLAGS", "--diag_suppress 111") - env.append_flags("CFLAGS", "--diag_suppress 177") - env.append_flags("CFLAGS", "--diag_suppress 188") - env.append_flags("CFLAGS", "--diag_suppress 550") - - @when("@5:") def patch(self): + if not self.spec.satisfies("build_system=cmake"): + return + source_path = self.stage.source_path metis_header = FileFilter(join_path(source_path, "include", "metis.h")) @@ -96,18 +75,38 @@ def patch(self): join_path(source_path, "GKlib", "error.c"), ) - @when("@:4") - def install(self, spec, prefix): - # Process library spec and options - options = [] - if "+shared" in spec: - options.append("COPTIONS={0}".format(self.compiler.cc_pic_flag)) - if spec.variants["build_type"].value == "Debug": - options.append("OPTFLAGS=-g -O0") - make(*options) +class SetupEnvironment(object): + def setup_build_environment(self, env): + # Ignore warnings/errors re unrecognized omp pragmas on %intel + if "%intel@14:" in self.spec: + env.append_flags("CFLAGS", "-diag-disable 3180") + # Ignore some warnings to get it to compile with %nvhpc + # 111: statement is unreachable + # 177: variable "foo" was declared but never referenced + # 188: enumerated type mixed with another type + # 550: variable "foo" was set but never used + if "%nvhpc" in self.spec: + env.append_flags("CFLAGS", "--display_error_number") + env.append_flags("CFLAGS", "--diag_suppress 111") + env.append_flags("CFLAGS", "--diag_suppress 177") + env.append_flags("CFLAGS", "--diag_suppress 188") + env.append_flags("CFLAGS", "--diag_suppress 550") + + +class MakefileBuilder(spack.build_systems.makefile.MakefileBuilder, SetupEnvironment): + @property + def build_targets(self): + options = [] + if "+shared" in self.spec: + options.append("COPTIONS={0}".format(self.pkg.compiler.cc_pic_flag)) + if "+debug" in self.spec: + options.append("OPTFLAGS=-g -O0") + return options + + def install(self, pkg, spec, prefix): # Compile and install library files - ccompile = Executable(self.compiler.cc) + ccompile = Executable(pkg.compiler.cc) mkdir(prefix.bin) binfiles = ( @@ -140,7 +139,7 @@ def install(self, spec, prefix): install(sharefile, prefix.share) if "+shared" in spec: - shared_flags = [self.compiler.cc_pic_flag, "-shared"] + shared_flags = [pkg.compiler.cc_pic_flag, "-shared"] if sys.platform == "darwin": shared_suffix = "dylib" shared_flags.extend(["-Wl,-all_load", "libmetis.a"]) @@ -157,7 +156,7 @@ def install(self, spec, prefix): ccompile( "-I%s" % prefix.include, "-L%s" % prefix.lib, - (self.compiler.cc_rpath_arg + prefix.lib if "+shared" in spec else ""), + (pkg.compiler.cc_rpath_arg + prefix.lib if "+shared" in spec else ""), join_path("Programs", "io.o"), join_path("Test", "mtest.c"), "-o", @@ -166,58 +165,32 @@ def install(self, spec, prefix): "-lm", ) - if self.run_tests: - test_bin = lambda testname: join_path(prefix.bin, testname) - test_graph = lambda graphname: join_path(prefix.share, graphname) + def check(self): + test_bin = lambda testname: join_path(prefix.bin, testname) + test_graph = lambda graphname: join_path(prefix.share, graphname) - graph = test_graph("4elt.graph") - os.system("%s %s" % (test_bin("mtest"), graph)) - os.system("%s %s 40" % (test_bin("kmetis"), graph)) - os.system("%s %s" % (test_bin("onmetis"), graph)) - graph = test_graph("test.mgraph") - os.system("%s %s 2" % (test_bin("pmetis"), graph)) - os.system("%s %s 2" % (test_bin("kmetis"), graph)) - os.system("%s %s 5" % (test_bin("kmetis"), graph)) - graph = test_graph("metis.mesh") - os.system("%s %s 10" % (test_bin("partnmesh"), graph)) - os.system("%s %s 10" % (test_bin("partdmesh"), graph)) - os.system("%s %s" % (test_bin("mesh2dual"), graph)) + graph = test_graph("4elt.graph") + os.system("%s %s" % (test_bin("mtest"), graph)) + os.system("%s %s 40" % (test_bin("kmetis"), graph)) + os.system("%s %s" % (test_bin("onmetis"), graph)) + graph = test_graph("test.mgraph") + os.system("%s %s 2" % (test_bin("pmetis"), graph)) + os.system("%s %s 2" % (test_bin("kmetis"), graph)) + os.system("%s %s 5" % (test_bin("kmetis"), graph)) + graph = test_graph("metis.mesh") + os.system("%s %s 10" % (test_bin("partnmesh"), graph)) + os.system("%s %s 10" % (test_bin("partdmesh"), graph)) + os.system("%s %s" % (test_bin("mesh2dual"), graph)) - # FIXME: The following code should replace the testing code in the - # block above since it causes installs to fail when one or more of - # the Metis tests fail, but it currently doesn't work because the - # 'mtest', 'onmetis', and 'partnmesh' tests return error codes that - # trigger false positives for failure. - """ - Executable(test_bin('mtest'))(test_graph('4elt.graph')) - Executable(test_bin('kmetis'))(test_graph('4elt.graph'), '40') - Executable(test_bin('onmetis'))(test_graph('4elt.graph')) - Executable(test_bin('pmetis'))(test_graph('test.mgraph'), '2') - Executable(test_bin('kmetis'))(test_graph('test.mgraph'), '2') - Executable(test_bin('kmetis'))(test_graph('test.mgraph'), '5') +class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder, SetupEnvironment): + def cmake_args(self): + options = [ + self.define_from_variant("SHARED", "shared"), + self.define_from_variant("GDB", "gdb"), + ] - Executable(test_bin('partnmesh'))(test_graph('metis.mesh'), '10') - Executable(test_bin('partdmesh'))(test_graph('metis.mesh'), '10') - Executable(test_bin('mesh2dual'))(test_graph('metis.mesh')) - """ - - @when("@5:") - def install(self, spec, prefix): - source_directory = self.stage.source_path - build_directory = join_path(self.stage.path, "build") - - options = CMakePackage._std_args(self) - options.append("-DGKLIB_PATH:PATH=%s/GKlib" % source_directory) - - # Normally this is available via the 'CMakePackage' object, but metis - # IS-A 'Package' (not a 'CMakePackage') to support non-cmake metis@:5. - build_type = spec.variants["build_type"].value - options.extend(["-DCMAKE_BUILD_TYPE:STRING={0}".format(build_type)]) - - if "+shared" in spec: - options.append("-DSHARED:BOOL=ON") - else: + if self.spec.satisfies("~shared"): # Remove all RPATH options # (RPATHxxx options somehow trigger cmake to link dynamically) rpath_options = [] @@ -226,40 +199,41 @@ def install(self, spec, prefix): rpath_options.append(o) for o in rpath_options: options.remove(o) - if "+gdb" in spec: - options.append("-DGDB:BOOL=ON") - with working_dir(build_directory, create=True): - cmake(source_directory, *options) - make() - make("install") - - # install all headers, which will be needed for ParMETIS and other programs - subdirs = ["GKlib", "libmetis", "programs"] - for subd in subdirs: - inc_dist = join_path(prefix.include, subd) - mkdirp(inc_dist) - install(join_path(source_directory, subd, "*.h"), inc_dist) - - if self.run_tests: - # FIXME: On some systems, the installed binaries for METIS cannot - # be executed without first being read. - ls = which("ls") - ls("-a", "-l", prefix.bin) - - for f in ["4elt", "copter2", "mdual"]: - graph = join_path(source_directory, "graphs", "%s.graph" % f) - Executable(join_path(prefix.bin, "graphchk"))(graph) - Executable(join_path(prefix.bin, "gpmetis"))(graph, "2") - Executable(join_path(prefix.bin, "ndmetis"))(graph) - - graph = join_path(source_directory, "graphs", "test.mgraph") - Executable(join_path(prefix.bin, "gpmetis"))(graph, "2") - graph = join_path(source_directory, "graphs", "metis.mesh") - Executable(join_path(prefix.bin, "mpmetis"))(graph, "2") + return options @run_after("install") + def install_headers(self): + with working_dir(self.build_directory): + # install all headers, which will be needed for ParMETIS and other programs + directories = ["GKlib", "libmetis", "programs"] + for directory in directories: + inc_dist = join_path(self.prefix.include, directory) + mkdirp(inc_dist) + install(join_path(self.stage.source_path, directory, "*.h"), inc_dist) + + def check(self): + # On some systems, the installed binaries for METIS cannot + # be executed without first being read. + ls = which("ls") + ls("-a", "-l", self.prefix.bin) + + graphchk = Executable(join_path(self.prefix.bin, "graphchk")) + gpmetis = Executable(join_path(self.prefix.bin, "gpmetis")) + ndmetis = Executable(join_path(self.prefix.bin, "ndmetis")) + mpmetis = Executable(join_path(self.prefix.bin, "mpmetis")) + for f in ["4elt", "copter2", "mdual"]: + graph = join_path(self.stage.source_path, "graphs", "%s.graph" % f) + graphchk(graph) + gpmetis(graph, "2") + ndmetis(graph) + + graph = join_path(self.stage.source_path, "graphs", "test.mgraph") + gpmetis(graph, "2") + graph = join_path(self.stage.source_path, "graphs", "metis.mesh") + mpmetis(graph, "2") + + @run_after("install", when="+shared platform=darwin") def darwin_fix(self): # The shared library is not installed correctly on Darwin; fix this - if (sys.platform == "darwin") and ("+shared" in self.spec): - fix_darwin_install_name(prefix.lib) + fix_darwin_install_name(prefix.lib) diff --git a/var/spack/repos/builtin/packages/mfem/package.py b/var/spack/repos/builtin/packages/mfem/package.py index dab89f95500..984e715394a 100644 --- a/var/spack/repos/builtin/packages/mfem/package.py +++ b/var/spack/repos/builtin/packages/mfem/package.py @@ -48,6 +48,13 @@ class Mfem(Package, CudaPackage, ROCmPackage): # other version. version("develop", branch="master") + version( + "4.5.0", + sha256="4f201bec02fc5460a902596697b6c1deb7b15ac57c71f615b2ab4a8eb65665f7", + url="https://bit.ly/mfem-4-5", + extension="tar.gz", + ) + version( "4.4.0", sha256="37250dbef6e97b16dc9ab50973e8d68bc165bb4afcdaf91b3b72c8972c87deef", @@ -171,7 +178,9 @@ class Mfem(Package, CudaPackage, ROCmPackage): "libunwind", default=False, description="Enable backtrace on error support using Libunwind" ) variant("fms", default=False, when="@4.3.0:", description="Enable FMS I/O support") - # TODO: SIMD, Ginkgo, ADIOS2, HiOp, MKL CPardiso, Axom/Sidre + variant("ginkgo", default=False, when="@4.3.0:", description="Enable Ginkgo support") + variant("hiop", default=False, when="@4.4.0:", description="Enable HiOp support") + # TODO: SIMD, ADIOS2, MKL CPardiso, Axom/Sidre variant( "timer", default="auto", @@ -229,6 +238,7 @@ class Mfem(Package, CudaPackage, ROCmPackage): conflicts("^mpich@4:", when="@:4.3+mpi") depends_on("mpi", when="+mpi") + depends_on("hipsparse", when="@4.4.0:+rocm") depends_on("hypre@2.10.0:2.13", when="@:3.3+mpi") depends_on("hypre@:2.20.0", when="@3.4:4.2+mpi") depends_on("hypre@:2.23.0", when="@4.3.0+mpi") @@ -256,11 +266,13 @@ class Mfem(Package, CudaPackage, ROCmPackage): depends_on("sundials@2.7.0+mpi+hypre", when="@:3.3.0+sundials+mpi") depends_on("sundials@2.7.0:", when="@3.3.2:+sundials~mpi") depends_on("sundials@2.7.0:+mpi+hypre", when="@3.3.2:+sundials+mpi") - depends_on("sundials@5.0.0:5", when="@4.0.1-xsdk:+sundials~mpi") - depends_on("sundials@5.0.0:5+mpi+hypre", when="@4.0.1-xsdk:+sundials+mpi") + depends_on("sundials@5.0.0:5", when="@4.0.1-xsdk:4.4+sundials~mpi") + depends_on("sundials@5.0.0:5+mpi+hypre", when="@4.0.1-xsdk:4.4+sundials+mpi") + depends_on("sundials@5.0.0:", when="@4.5.0:+sundials~mpi") + depends_on("sundials@5.0.0:+mpi+hypre", when="@4.5.0:+sundials+mpi") for sm_ in CudaPackage.cuda_arch_values: depends_on( - "sundials@5.4.0:5+cuda cuda_arch={0}".format(sm_), + "sundials@5.4.0:+cuda cuda_arch={0}".format(sm_), when="@4.2.0:+sundials+cuda cuda_arch={0}".format(sm_), ) depends_on("pumi", when="+pumi~shared") @@ -320,6 +332,29 @@ class Mfem(Package, CudaPackage, ROCmPackage): depends_on("conduit@0.3.1:,master:", when="+conduit") depends_on("conduit+mpi", when="+conduit+mpi") depends_on("libfms@0.2.0:", when="+fms") + depends_on("ginkgo@1.4.0:", when="+ginkgo") + for sm_ in CudaPackage.cuda_arch_values: + depends_on( + "ginkgo+cuda cuda_arch={0}".format(sm_), + when="+ginkgo+cuda cuda_arch={0}".format(sm_), + ) + for gfx in ROCmPackage.amdgpu_targets: + depends_on( + "ginkgo+rocm amdgpu_target={0}".format(gfx), + when="+ginkgo+rocm amdgpu_target={0}".format(gfx), + ) + depends_on("hiop@0.4.6:~mpi", when="+hiop~mpi") + depends_on("hiop@0.4.6:+mpi", when="+hiop+mpi") + for sm_ in CudaPackage.cuda_arch_values: + depends_on( + "hiop+cuda cuda_arch={0}".format(sm_), + when="+hiop+cuda cuda_arch={0}".format(sm_), + ) + for gfx in ROCmPackage.amdgpu_targets: + depends_on( + "hiop+rocm amdgpu_target={0}".format(gfx), + when="+hiop+rocm amdgpu_target={0}".format(gfx), + ) # The MFEM 4.0.0 SuperLU interface fails when using hypre@2.16.0 and # superlu-dist@6.1.1. See https://github.com/mfem/mfem/issues/983. @@ -405,6 +440,12 @@ class Mfem(Package, CudaPackage, ROCmPackage): def setup_build_environment(self, env): env.unset("MFEM_DIR") env.unset("MFEM_BUILD_DIR") + # Workaround for changes made by the 'kokkos-nvcc-wrapper' package + # which can be a dependency e.g. through PETSc that uses Kokkos: + if "^kokkos-nvcc-wrapper" in self.spec: + env.set("MPICH_CXX", spack_cxx) + env.set("OMPI_CXX", spack_cxx) + env.set("MPICXX_CXX", spack_cxx) # # Note: Although MFEM does support CMake configuration, MFEM @@ -488,6 +529,9 @@ def find_optional_library(name, prefix): else: mfem_mpiexec = "jsrun" mfem_mpiexec_np = "-p" + elif "FLUX_JOB_ID" in os.environ: + mfem_mpiexec = "flux mini run" + mfem_mpiexec_np = "-n" metis5_str = "NO" if ("+metis" in spec) and spec["metis"].satisfies("@5:"): @@ -530,11 +574,32 @@ def find_optional_library(name, prefix): "MFEM_USE_CEED=%s" % yes_no("+libceed"), "MFEM_USE_UMPIRE=%s" % yes_no("+umpire"), "MFEM_USE_FMS=%s" % yes_no("+fms"), + "MFEM_USE_GINKGO=%s" % yes_no("+ginkgo"), + "MFEM_USE_HIOP=%s" % yes_no("+hiop"), "MFEM_MPIEXEC=%s" % mfem_mpiexec, "MFEM_MPIEXEC_NP=%s" % mfem_mpiexec_np, "MFEM_USE_EXCEPTIONS=%s" % yes_no("+exceptions"), ] + # Determine C++ standard to use: + cxxstd = None + if self.spec.satisfies("@4.0.0:"): + cxxstd = "11" + if self.spec.satisfies("^raja@2022.03.0:"): + cxxstd = "14" + if self.spec.satisfies("^umpire@2022.03.0:"): + cxxstd = "14" + if self.spec.satisfies("^sundials@6.4.0:"): + cxxstd = "14" + if self.spec.satisfies("^ginkgo"): + cxxstd = "14" + cxxstd_flag = None + if cxxstd: + if "+cuda" in spec: + cxxstd_flag = "-std=c++" + cxxstd + else: + cxxstd_flag = getattr(self.compiler, "cxx" + cxxstd + "_flag") + cxxflags = spec.compiler_flags["cxxflags"] if cxxflags: @@ -557,16 +622,16 @@ def find_optional_library(name, prefix): "-x=cu --expt-extended-lambda -arch=sm_%s" % cuda_arch, "-ccbin %s" % (spec["mpi"].mpicxx if "+mpi" in spec else env["CXX"]), ] - if self.spec.satisfies("@4.0.0:"): - if "+cuda" in spec: - cxxflags.append("-std=c++11") - else: - cxxflags.append(self.compiler.cxx11_flag) + if cxxstd_flag: + cxxflags.append(cxxstd_flag) # The cxxflags are set by the spack c++ compiler wrapper. We also # set CXXFLAGS explicitly, for clarity, and to properly export the # cxxflags in the variable MFEM_CXXFLAGS in config.mk. options += ["CXXFLAGS=%s" % " ".join(cxxflags)] + elif cxxstd_flag: + options += ["BASE_FLAGS=%s" % cxxstd_flag] + # Treat any 'CXXFLAGS' in the environment as extra c++ flags which are # handled through the 'CPPFLAGS' makefile variable in MFEM. Also, unset # 'CXXFLAGS' from the environment to prevent it from overriding the @@ -589,9 +654,12 @@ def find_optional_library(name, prefix): if "+cuda" in hypre: hypre_gpu_libs = " -lcusparse -lcurand" elif "+rocm" in hypre: - hypre_gpu_libs = " " + ld_flags_from_dirs( - [env["ROCM_PATH"] + "/lib"], ["rocsparse", "rocrand"] - ) + hypre_rocm_libs = LibraryList([]) + if "^rocsparse" in hypre: + hypre_rocm_libs += hypre["rocsparse"].libs + if "^rocrand" in hypre: + hypre_rocm_libs += hypre["rocrand"].libs + hypre_gpu_libs = " " + ld_flags_from_library_list(hypre_rocm_libs) options += [ "HYPRE_OPT=-I%s" % hypre.prefix.include, "HYPRE_LIB=%s%s" % (ld_flags_from_library_list(all_hypre_libs), hypre_gpu_libs), @@ -804,6 +872,33 @@ def find_optional_library(name, prefix): if "+rocm" in spec: amdgpu_target = ",".join(spec.variants["amdgpu_target"].value) options += ["HIP_CXX=%s" % spec["hip"].hipcc, "HIP_ARCH=%s" % amdgpu_target] + hip_libs = LibraryList([]) + # To use a C++ compiler that supports -xhip flag one can use + # something like this: + # options += [ + # "HIP_CXX=%s" % (spec["mpi"].mpicxx if "+mpi" in spec else spack_cxx), + # "HIP_FLAGS=-xhip --offload-arch=%s" % amdgpu_target, + # ] + # hip_libs += find_libraries("libamdhip64", spec["hip"].prefix.lib) + if "^hipsparse" in spec: # hipsparse is needed @4.4.0:+rocm + hipsparse = spec["hipsparse"] + options += ["HIP_OPT=%s" % hipsparse.headers.cpp_flags] + hip_libs += hipsparse.libs + # Note: MFEM's defaults.mk wants to find librocsparse.* in + # $(HIP_DIR)/lib, so we set HIP_DIR to be $ROCM_PATH when using + # external HIP, or the prefix of rocsparse (which is a + # dependency of hipsparse) when using Spack-built HIP. + if spec["hip"].external: + options += ["HIP_DIR=%s" % env["ROCM_PATH"]] + else: + options += ["HIP_DIR=%s" % hipsparse["rocsparse"].prefix] + if "%cce" in spec: + # We assume the proper Cray CCE module (cce) is loaded: + craylibs_path = env["CRAYLIBS_" + env["MACHTYPE"].capitalize()] + craylibs = ["libmodules", "libfi", "libcraymath", "libf", "libu", "libcsup"] + hip_libs += find_libraries(craylibs, craylibs_path) + if hip_libs: + options += ["HIP_LIB=%s" % ld_flags_from_library_list(hip_libs)] if "+occa" in spec: options += [ @@ -812,12 +907,18 @@ def find_optional_library(name, prefix): ] if "+raja" in spec: - raja_opt = "-I%s" % spec["raja"].prefix.include - if spec["raja"].satisfies("^camp"): - raja_opt += " -I%s" % spec["camp"].prefix.include + raja = spec["raja"] + raja_opt = "-I%s" % raja.prefix.include + raja_lib = find_libraries( + "libRAJA", raja.prefix, shared=("+shared" in raja), recursive=True + ) + if raja.satisfies("^camp"): + camp = raja["camp"] + raja_opt += " -I%s" % camp.prefix.include + raja_lib += find_optional_library("libcamp", camp.prefix) options += [ "RAJA_OPT=%s" % raja_opt, - "RAJA_LIB=%s" % ld_flags_from_dirs([spec["raja"].prefix.lib], ["RAJA"]), + "RAJA_LIB=%s" % ld_flags_from_library_list(raja_lib), ] if "+amgx" in spec: @@ -895,6 +996,24 @@ def find_optional_library(name, prefix): "FMS_LIB=%s" % ld_flags_from_library_list(libfms.libs), ] + if "+ginkgo" in spec: + ginkgo = spec["ginkgo"] + options += [ + "GINKGO_DIR=%s" % ginkgo.prefix, + "GINKGO_BUILD_TYPE=%s" % ginkgo.variants["build_type"].value, + ] + + if "+hiop" in spec: + hiop = spec["hiop"] + hiop_libs = hiop.libs + hiop_libs += spec["lapack"].libs + spec["blas"].libs + if "^magma" in hiop: + hiop_libs += hiop["magma"].libs + options += [ + "HIOP_OPT=-I%s" % hiop.prefix.include, + "HIOP_LIB=%s" % ld_flags_from_library_list(hiop_libs), + ] + make("config", *options, parallel=False) make("info", parallel=False) @@ -910,6 +1029,9 @@ def check_or_test(self): make("-C", "examples", "ex1p" if ("+mpi" in self.spec) else "ex1", parallel=False) # make('check', parallel=False) else: + # As of v4.5.0 and ROCm up to 5.2.3, the following miniapp crashes + # the HIP compiler, so it has to be disabled for testing with HIP: + # filter_file("PAR_MINIAPPS = hooke", "PAR_MINIAPPS =", "miniapps/hooke/makefile") make("all") make("test", parallel=False) @@ -927,7 +1049,11 @@ def install(self, spec, prefix): with working_dir("config"): os.rename("config.mk", "config.mk.orig") copy(str(self.config_mk), "config.mk") + # Add '/mfem' to MFEM_INC_DIR for miniapps that include directly + # headers like "general/forall.hpp": + filter_file("(MFEM_INC_DIR.*)$", "\\1/mfem", "config.mk") shutil.copystat("config.mk.orig", "config.mk") + # TODO: miniapps linking to libmfem-common.* will not work. prefix_share = join_path(prefix, "share", "mfem") diff --git a/var/spack/repos/builtin/packages/mfem/test_builds.sh b/var/spack/repos/builtin/packages/mfem/test_builds.sh index 9b534836ec7..ba2c6ceef3a 100755 --- a/var/spack/repos/builtin/packages/mfem/test_builds.sh +++ b/var/spack/repos/builtin/packages/mfem/test_builds.sh @@ -8,7 +8,7 @@ rocm_arch="gfx908" spack_jobs='' # spack_jobs='-j 128' -mfem='mfem@4.4.0'${compiler} +mfem='mfem@4.5.0'${compiler} mfem_dev='mfem@develop'${compiler} backends='+occa+raja+libceed' @@ -19,41 +19,41 @@ hdf5_spec='^hdf5@1.8.19:1.8' # petsc spec petsc_spec='^petsc+suite-sparse+mumps' petsc_spec_cuda='^petsc+cuda+suite-sparse+mumps' -# strumpack spec without cuda (use @master until version > 6.3.1 is released) -strumpack_spec='^strumpack@master~slate~openmp~cuda' -strumpack_cuda_spec='^strumpack@master~slate~openmp' +# superlu-dist specs +superlu_spec_cuda='^superlu-dist+cuda cuda_arch='"${cuda_arch}" +superlu_spec_rocm='^superlu-dist+rocm amdgpu_target='"${rocm_arch}" +# strumpack spec without cuda (use version > 6.3.1) +strumpack_spec='^strumpack~slate~openmp~cuda' +strumpack_cuda_spec='^strumpack~slate~openmp' strumpack_rocm_spec='^strumpack+rocm~slate~openmp~cuda' builds=( # preferred version: ${mfem} ${mfem}'~mpi~metis~zlib' - # TODO: add back '+gslib' when the gslib test is fixed. - # TODO: add back '+slepc' when its build is fixed. - ${mfem}"$backends"'+superlu-dist+strumpack+suite-sparse+petsc \ - +sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit \ + ${mfem}"$backends"'+superlu-dist+strumpack+suite-sparse+petsc+slepc+gslib \ + +sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit+ginkgo+hiop \ '"$backends_specs $strumpack_spec $petsc_spec $hdf5_spec" ${mfem}'~mpi \ '"$backends"'+suite-sparse+sundials+gslib+mpfr+netcdf \ - +zlib+gnutls+libunwind+conduit \ + +zlib+gnutls+libunwind+conduit+ginkgo+hiop \ '"$backends_specs $hdf5_spec"' ^sundials~mpi' # develop version, shared builds: ${mfem_dev}'+shared~static' ${mfem_dev}'+shared~static~mpi~metis~zlib' # NOTE: Shared build with +gslib works on mac but not on linux - # TODO: add back '+gslib' when the gslib test is fixed and the above NOTE + # TODO: add back '+gslib' when the above NOTE # is addressed. - # TODO: add back '+slepc' when its build is fixed. ${mfem_dev}'+shared~static \ - '"$backends"'+superlu-dist+strumpack+suite-sparse+petsc \ - +sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit \ + '"$backends"'+superlu-dist+strumpack+suite-sparse+petsc+slepc \ + +sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit+ginkgo+hiop \ '"$backends_specs $strumpack_spec $petsc_spec $hdf5_spec" # NOTE: Shared build with +gslib works on mac but not on linux # TODO: add back '+gslib' when the above NOTE is addressed. ${mfem_dev}'+shared~static~mpi \ '"$backends"'+suite-sparse+sundials+mpfr+netcdf \ - +zlib+gnutls+libunwind+conduit \ + +zlib+gnutls+libunwind+conduit+ginkgo+hiop \ '"$backends_specs $hdf5_spec"' ^sundials~mpi' ) @@ -67,8 +67,7 @@ builds2=( ${mfem}'+sundials~mpi ^sundials~mpi' ${mfem}'+sundials' ${mfem}'+pumi' - # TODO: uncomment the next line when the gslib test is fixed. - # ${mfem}'+gslib' + ${mfem}'+gslib' ${mfem}'+netcdf~mpi' ${mfem}'+netcdf' ${mfem}'+mpfr' @@ -77,10 +76,11 @@ builds2=( ${mfem}'+conduit' ${mfem}'+umpire' ${mfem}'+petsc'" $petsc_spec" - # TODO: uncomment the next line when the slepc build is fixed. - # ${mfem}'+petsc+slepc'" $petsc_spec" - # TODO: uncomment the next line when the threadsafe build is fixed. - # ${mfem}'+threadsafe' + ${mfem}'+petsc+slepc'" $petsc_spec" + ${mfem}'+ginkgo' + ${mfem}'+hiop' + ${mfem}'+threadsafe' + # # develop version ${mfem_dev}"$backends $backends_specs" ${mfem_dev}'+superlu-dist' @@ -90,8 +90,7 @@ builds2=( ${mfem_dev}'+sundials~mpi ^sundials~mpi' ${mfem_dev}'+sundials' ${mfem_dev}'+pumi' - # TODO: uncomment the next line when the gslib test is fixed. - # ${mfem_dev}'+gslib' + ${mfem_dev}'+gslib' ${mfem_dev}'+netcdf~mpi' ${mfem_dev}'+netcdf' ${mfem_dev}'+mpfr' @@ -101,8 +100,9 @@ builds2=( ${mfem_dev}'+umpire' ${mfem_dev}'+petsc'" $petsc_spec" ${mfem_dev}'+petsc+slepc'" $petsc_spec" - # TODO: uncomment the next line when the threadsafe build is fixed. - # ${mfem_dev}'+threadsafe' + ${mfem_dev}'+ginkgo' + ${mfem_dev}'+hiop' + ${mfem_dev}'+threadsafe' ) @@ -119,27 +119,28 @@ builds_cuda=( ^raja+cuda~openmp ^hypre+cuda' # hypre without cuda: + # NOTE: PETSc tests may need PETSC_OPTIONS="-use_gpu_aware_mpi 0" # TODO: restore '+libceed' when the libCEED CUDA unit tests take less time. - # TODO: add back '+gslib' when the gslib test is fixed. - # TODO: restore '+superlu-dist' when the unit test is fixed. - # TODO: add back "+petsc+slepc $petsc_spec_cuda" when it works. + # TODO: remove "^hiop+shared" when the default static build is fixed. ${mfem}'+cuda+openmp+raja+occa cuda_arch='"${cuda_arch}"' \ - +strumpack+suite-sparse \ - +sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit \ - ^raja+cuda+openmp'" $strumpack_cuda_spec"' \ - '"$hdf5_spec" + +superlu-dist+strumpack+suite-sparse+gslib+petsc+slepc \ + +sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit+ginkgo+hiop \ + ^raja+cuda+openmp ^hiop+shared'" $strumpack_cuda_spec"' \ + '"$superlu_spec_cuda $petsc_spec_cuda $hdf5_spec" # hypre with cuda: # TODO: restore '+libceed' when the libCEED CUDA unit tests take less time. - # TODO: add back '+gslib' when the gslib test is fixed. - # TODO: restore '+superlu-dist' when we support it with '^hypre+cuda'. + # TODO: restore '+superlu-dist $superlu_spec_cuda' when we support it with + # '^hypre+cuda'. # TODO: add back "+strumpack $strumpack_cuda_spec" when it's supported. # TODO: add back "+petsc+slepc $petsc_spec_cuda" when it works. + # NOTE: PETSc tests may need PETSC_OPTIONS="-use_gpu_aware_mpi 0" # TODO: add back "+sundials" when it's supported with '^hypre+cuda'. + # TODO: remove "^hiop+shared" when the default static build is fixed. ${mfem}'+cuda+openmp+raja+occa cuda_arch='"${cuda_arch}"' \ - +suite-sparse \ - +pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit \ - ^raja+cuda+openmp ^hypre+cuda \ + +suite-sparse+gslib \ + +pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit+ginkgo+hiop \ + ^raja+cuda+openmp ^hiop+shared ^hypre+cuda \ '"$hdf5_spec" # @@ -158,32 +159,38 @@ builds_cuda=( ^raja+cuda~openmp ^hypre+cuda' # hypre without cuda: + # NOTE: PETSc tests may need PETSC_OPTIONS="-use_gpu_aware_mpi 0" # TODO: restore '+libceed' when the libCEED CUDA unit tests take less time. - # TODO: add back '+gslib' when the gslib test is fixed. - # TODO: restore '+superlu-dist' when the unit test is fixed. - # TODO: add back "+petsc+slepc $petsc_spec_cuda" when it works. + # TODO: remove "^hiop+shared" when the default static build is fixed. ${mfem_dev}'+cuda+openmp+raja+occa cuda_arch='"${cuda_arch}"' \ - +strumpack+suite-sparse \ - +sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit \ - ^raja+cuda+openmp'" $strumpack_cuda_spec"' \ - '"$hdf5_spec" + +superlu-dist+strumpack+suite-sparse+gslib+petsc+slepc \ + +sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit+ginkgo+hiop \ + ^raja+cuda+openmp ^hiop+shared'" $strumpack_cuda_spec"' \ + '"$superlu_spec_cuda $petsc_spec_cuda $hdf5_spec" # hypre with cuda: # TODO: restore '+libceed' when the libCEED CUDA unit tests take less time. - # TODO: add back '+gslib' when the gslib test is fixed. - # TODO: restore '+superlu-dist' when we support it with '^hypre+cuda'. + # TODO: restore '+superlu-dist $superlu_spec_cuda' when we support it with + # '^hypre+cuda'. # TODO: add back "+strumpack $strumpack_cuda_spec" when it's supported. # TODO: add back "+petsc+slepc $petsc_spec_cuda" when it works. + # NOTE: PETSc tests may need PETSC_OPTIONS="-use_gpu_aware_mpi 0" # TODO: add back "+sundials" when it's supported with '^hypre+cuda'. + # TODO: remove "^hiop+shared" when the default static build is fixed. ${mfem_dev}'+cuda+openmp+raja+occa cuda_arch='"${cuda_arch}"' \ - +suite-sparse \ - +pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit \ - ^raja+cuda+openmp ^hypre+cuda \ + +suite-sparse+gslib \ + +pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit+ginkgo+hiop \ + ^raja+cuda+openmp ^hiop+shared ^hypre+cuda \ '"$hdf5_spec" ) builds_rocm=( + # NOTE: the miniapp 'hooke' crashes the HIP compiler, so it needs to be + # disabled in Spack, e.g. with + # filter_file("PAR_MINIAPPS = hooke", "PAR_MINIAPPS =", + # "miniapps/hooke/makefile") + # hypre without rocm: ${mfem}'+rocm amdgpu_target='"${rocm_arch}" @@ -195,24 +202,30 @@ builds_rocm=( ^raja+rocm~openmp ^occa~cuda ^hypre+rocm' # hypre without rocm: - # TODO: add back '+gslib' when the gslib test is fixed. - # TODO: restore '+superlu-dist' when the unit test is fixed. # TODO: add "+petsc+slepc $petsc_spec_rocm" when it is supported. + # TODO: add back '+conduit' when it is no longer linked with tcmalloc*. + # TODO: add back '+hiop' when it is no longer linked with tcmalloc* through + # its magma dependency. + # TODO: add back '+ginkgo' when the Ginkgo example works. ${mfem}'+rocm+openmp+raja+occa+libceed amdgpu_target='"${rocm_arch}"' \ - +strumpack+suite-sparse \ - +sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit \ + +superlu-dist+strumpack+suite-sparse+gslib \ + +sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind \ ^raja+rocm~openmp ^occa~cuda'" $strumpack_rocm_spec"' \ - '"$hdf5_spec" + '"$superlu_spec_rocm $hdf5_spec" # hypre with rocm: - # TODO: add back '+gslib' when the gslib test is fixed. - # TODO: restore '+superlu-dist' when we support it with '^hypre+rocm'. + # TODO: restore '+superlu-dist $superlu_spec_rocm' when we support it with + # '^hypre+rocm'. # TODO: add back "+strumpack $strumpack_rocm_spec" when it's supported. # TODO: add back "+petsc+slepc $petsc_spec_rocm" when it works. + # TODO: add back '+conduit' when it is no longer linked with tcmalloc*. + # TODO: add back '+hiop' when it is no longer linked with tcmalloc* through + # its magma dependency. + # TODO: add back '+ginkgo' when the Ginkgo example works. # TODO: add back "+sundials" when it's supported with '^hypre+rocm'. ${mfem}'+rocm+openmp+raja+occa+libceed amdgpu_target='"${rocm_arch}"' \ - +suite-sparse \ - +pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit \ + +suite-sparse+gslib \ + +pumi+mpfr+netcdf+zlib+gnutls+libunwind \ ^raja+rocm~openmp ^occa~cuda ^hypre+rocm \ '"$hdf5_spec" @@ -233,14 +246,22 @@ run_builds=("${builds[@]}" "${builds2[@]}") # run_builds=("${builds_cuda[@]}") # run_builds=("${builds_rocm[@]}") +# PETSc CUDA tests on Lassen need this: +# export PETSC_OPTIONS="-use_gpu_aware_mpi 0" + for bld in "${run_builds[@]}"; do + eval bbb="\"${bld}\"" + printf "\n%s\n" "${SEP}" printf " %s\n" "${bld}" printf "%s\n" "${SEP}" - eval bbb="\"${bld}\"" - spack spec -I $bbb || exit 1 + spack spec --fresh -I $bbb || exit 1 printf "%s\n" "${sep}" - spack install $spack_jobs --test=root $bbb || exit 2 + spack install $spack_jobs --fresh --test=root $bbb || exit 2 + + # echo ./bin/spack spec --fresh -I $bbb + # echo ./bin/spack install $spack_jobs --fresh --test=root $bbb + # echo done # Uninstall all mfem builds: diff --git a/var/spack/repos/builtin/packages/mgard/package.py b/var/spack/repos/builtin/packages/mgard/package.py new file mode 100644 index 00000000000..1cc336c7a19 --- /dev/null +++ b/var/spack/repos/builtin/packages/mgard/package.py @@ -0,0 +1,45 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Mgard(CMakePackage, CudaPackage): + """MGARD error bounded lossy compressor + forked from https://github.com/CODARcode/MGARD with patches to support Spack""" + + # This is a research compressor with a fast evolving API. The fork is updated + # when releases are made with minimal changes to support spack + + homepage = "https://github.com/CODARcode/MGARD" + git = "https://github.com/robertu94/MGARD" + + maintainers = ["robertu94"] + + version("2021-11-12", commit="3c05c80a45a51bb6cc5fb5fffe7b1b16787d3366") + version("2020-10-01", commit="b67a0ac963587f190e106cc3c0b30773a9455f7a") + + depends_on("zlib") + depends_on("zstd") + depends_on("libarchive", when="@2021-11-12:") + depends_on("tclap", when="@2021-11-12:") + depends_on("yaml-cpp", when="@2021-11-12:") + depends_on("cmake@3.19:") + depends_on("nvcomp@2.0.2", when="+cuda") + conflicts("cuda_arch=none", when="+cuda") + conflicts("~cuda", when="@2021-11-12") + + def cmake_args(self): + args = ["-DBUILD_TESTING=OFF"] + if "+cuda" in self.spec: + args.append("-DMGARD_ENABLE_CUDA=ON") + cuda_arch = self.spec.variants["cuda_arch"].value + args.append("-DCUDA_ARCH_STRING={}".format(";".join(cuda_arch))) + if "75" in cuda_arch: + args.append("-DMGARD_ENABLE_CUDA_OPTIMIZE_TURING=ON") + if "70" in cuda_arch: + args.append("-DMGARD_ENABLE_CUDA_OPTIMIZE_VOLTA=ON") + + return args diff --git a/var/spack/repos/builtin/packages/mgardx/package.py b/var/spack/repos/builtin/packages/mgardx/package.py new file mode 100644 index 00000000000..8872e7cfe4c --- /dev/null +++ b/var/spack/repos/builtin/packages/mgardx/package.py @@ -0,0 +1,35 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Mgardx(CMakePackage): + """MGARD implementation for research purposes + forked from: https://github.com/lxAltria/MGARDx""" + + # Some of the functionality of this compressor were moved to `MGARD` proper + # effectively retiring this package. This package lives on to access some of + # this functionality. Includes minor patches to support spack. + + homepage = "https://github.com/lxAltria/MGARDx" + git = "https://github.com/robertu94/MGARDx" + + maintainers = ["robertu94"] + + variant("shared", description="build shared libraries", default=True) + + version("2022-01-27", commit="aabe9de1a331eaeb8eec41125dd45e30c1d03af4") + + depends_on("sz-cpp") + depends_on("pkgconfig") + depends_on("zstd") + + def cmake_args(self): + args = [ + self.define("BUILD_TESTING", self.run_tests), + self.define_from_variant("BUILD_SHARED_LIBS", "shared"), + ] + return args diff --git a/var/spack/repos/builtin/packages/mlst/package.py b/var/spack/repos/builtin/packages/mlst/package.py new file mode 100644 index 00000000000..fb2b7b585cd --- /dev/null +++ b/var/spack/repos/builtin/packages/mlst/package.py @@ -0,0 +1,36 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class Mlst(Package): + """Scan contig files against traditional PubMLST typing schemes""" + + homepage = "https://github.com/tseemann/mlst" + url = "https://github.com/tseemann/mlst/archive/refs/tags/v2.22.1.tar.gz" + + version("2.22.1", sha256="a8f64d7cb961a8e422e96a19309ad8707f8792d9f755a9e5a1f5742986d19bca") + + depends_on("perl@5.26:", type="run") + depends_on("perl-moo", type="run") + depends_on("perl-list-moreutils", type="run") + depends_on("perl-json", type="run") + depends_on("perl-file-which", type="run") + depends_on("blast-plus@2.9.0:", type="run") + depends_on("any2fasta", type="run") + # dependencies for scripts + depends_on("parallel", type="run") + depends_on("curl", type="run") + + def install(self, spec, prefix): + mkdirp(prefix.bin) + mkdirp(prefix.db) + mkdirp(prefix.perl5) + install_tree("bin", prefix.bin) + install_tree("scripts", prefix.bin) + install_tree("db", prefix.db) + install_tree("perl5", prefix.perl5) diff --git a/var/spack/repos/builtin/packages/mmg/package.py b/var/spack/repos/builtin/packages/mmg/package.py index f0269ca4306..3cd579dff1a 100644 --- a/var/spack/repos/builtin/packages/mmg/package.py +++ b/var/spack/repos/builtin/packages/mmg/package.py @@ -5,6 +5,7 @@ import os +import spack.build_systems.cmake from spack.package import * from spack.util.executable import which @@ -43,24 +44,22 @@ class Mmg(CMakePackage): depends_on("doxygen", when="+doc") depends_on("vtk", when="+vtk") + +class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder): def cmake_args(self): - args = [] - - args.append(self.define_from_variant("USE_SCOTCH", "scotch")) - args.append(self.define_from_variant("USE_VTK", "vtk")) - - if "+shared" in self.spec: - args.append("-DLIBMMG3D_SHARED=ON") - args.append("-DLIBMMG2D_SHARED=ON") - args.append("-DLIBMMGS_SHARED=ON") - args.append("-DLIBMMG_SHARED=ON") - else: - args.append("-DLIBMMG3D_STATIC=ON") - args.append("-DLIBMMG2D_STATIC=ON") - args.append("-DLIBMMGS_STATIC=ON") - args.append("-DLIBMMG_STATIC=ON") - - return args + shared_active = self.spec.satisfies("+shared") + return [ + self.define_from_variant("USE_SCOTCH", "scotch"), + self.define_from_variant("USE_VTK", "vtk"), + self.define("LIBMMG3D_SHARED", shared_active), + self.define("LIBMMG2D_SHARED", shared_active), + self.define("LIBMMGS_SHARED", shared_active), + self.define("LIBMMG_SHARED", shared_active), + self.define("LIBMMG3D_STATIC", not shared_active), + self.define("LIBMMG2D_STATIC", not shared_active), + self.define("LIBMMGS_STATIC", not shared_active), + self.define("LIBMMG_STATIC", not shared_active), + ] # parmmg requires this for its build @run_after("install") diff --git a/var/spack/repos/builtin/packages/mochi-margo/package.py b/var/spack/repos/builtin/packages/mochi-margo/package.py index 7feb7345fc5..ebbdfebe123 100644 --- a/var/spack/repos/builtin/packages/mochi-margo/package.py +++ b/var/spack/repos/builtin/packages/mochi-margo/package.py @@ -17,6 +17,7 @@ class MochiMargo(AutotoolsPackage): maintainers = ["carns", "mdorier", "fbudin69500", "chuckatkins"] version("main", branch="main") + version("0.10", sha256="163be090575ee267a84320b92791d83b98e9549b03bd705a166f0b5e4df53129") version("0.9.10", sha256="b205b45fe200d1b2801ea3b913fa75d709af97abf470f4ad72a08d2839f03772") version("0.9.9", sha256="9e8fce88a6bd9c1002b4a6924c935ebb2e2024e3afe6618b17e23538335bd15d") version("0.9.8", sha256="a139e804bf0b2725433c256e8315a2ba896f1fb34d9057261a4b92df783ffbbb") diff --git a/var/spack/repos/builtin/packages/mothur/package.py b/var/spack/repos/builtin/packages/mothur/package.py index 2d546c79581..3e88378a5d6 100644 --- a/var/spack/repos/builtin/packages/mothur/package.py +++ b/var/spack/repos/builtin/packages/mothur/package.py @@ -4,7 +4,6 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack.package import * -from spack.pkg.builtin.boost import Boost class Mothur(MakefilePackage): @@ -15,37 +14,81 @@ class Mothur(MakefilePackage): homepage = "https://github.com/mothur/mothur" url = "https://github.com/mothur/mothur/archive/v1.39.5.tar.gz" + version("1.48.0", sha256="9494406abd8d14b821782ab9db811f045ded9424f28f01234ee6764d4e78941d") version("1.46.1", sha256="29b500b3c92d726cde34922f697f2e47f0b7127d76d9a6fb167cc2b8ba3d00fd") version("1.43.0", sha256="12ccd95a85bec3bb1564b8feabd244ea85413973740754803d01fc71ecb0a2c1") version("1.42.1", sha256="6b61591dda289ac2d8361f9c1547ffbeeba3b9fbdff877dd286bad850bbd5539") version("1.40.5", sha256="a0fbdfa68b966d7adc4560e3787506a0dad8b47b4b996c2663cd6c0b416d101a") version("1.39.5", sha256="9f1cd691e9631a2ab7647b19eb59cd21ea643f29b22cde73d7f343372dfee342") + maintainers = ["snehring"] - variant("vsearch", default=False, description="Use vsearch") + variant( + "boost", + default=True, + description="Build with boost support (allow make.contigs to read gz files).", + ) + variant("hdf5", default=False, description="Build with hdf5 support", when="@1.41.0:") + variant( + "gsl", default=False, description="Build with the gnu scientific libaries", when="@1.43.0:" + ) - # TODO: replace this with an explicit list of components of Boost, - # for instance depends_on('boost +filesystem') - # See https://github.com/spack/spack/pull/22303 for reference - depends_on(Boost.with_default_variants) + depends_on("boost+iostreams+filesystem+system", when="+boost") + depends_on("gsl", when="+gsl") + depends_on("hdf5+cxx", when="+hdf5") depends_on("readline") - depends_on("vsearch@2.13.3", when="+vsearch", type="run") + depends_on("vsearch@2.13.5:", type="run") + depends_on("usearch", type="run") + depends_on("zlib", when="+boost") def edit(self, spec, prefix): - makefile = FileFilter("Makefile") - makefile.filter( - 'BOOST_LIBRARY_DIR="\\"Enter_your_boost_library_path' '_here\\""', - "BOOST_LIBRARY_DIR=%s" % self.spec["boost"].prefix.lib, - ) - makefile.filter( - 'BOOST_INCLUDE_DIR="\\"Enter_your_boost_include_path' '_here\\""', - "BOOST_INCLUDE_DIR=%s" % self.spec["boost"].prefix.include, - ) - makefile.filter( - 'MOTHUR_FILES="\\"Enter_your_default_path_' 'here\\""', "MOTHUR_FILES=%s" % prefix - ) + filter_file(r"^.*DMOTHUR_TOOLS.*$", "", "Makefile") + filter_file(r"^.*DMOTHUR_FILES.*$", "", "Makefile") + filter_file(r"(\$\(skipUchime\))", r"\1, source/", "Makefile") + if spec.satisfies("@1.40.5"): + filter_file( + r"^(#define writer_h)", "\\1 \n#include", join_path("source", "writer.h") + ) + # this includes the public domain uchime, which needs work to + # compile on newer compilers we'll use what's in usearch + filter_file(" uchime", "", "Makefile") + if spec.satisfies("+boost"): + filter_file(r"USEBOOST \?=.*$", "USEBOOST = yes", "Makefile") + filter_file( + r"^BOOST_LIBRARY_DIR .*$", + "BOOST_LIBRARY_DIR=%s" % self.spec["boost"].prefix.lib, + "Makefile", + ) + filter_file( + r"BOOST_INCLUDE_DIR .*$", + "BOOST_INCLUDE_DIR=%s" % self.spec["boost"].prefix.include, + "Makefile", + ) + if spec.satisfies("+hdf5"): + filter_file(r"USEHDF5 \?=.*$", "USEHDF5 = yes", "Makefile") + filter_file( + r"^HDF5_LIBRARY_DIR \?=.*$", + "HDF5_LIBRARY_DIR = " + spec["hdf5"].prefix.lib, + "Makefile", + ) + filter_file( + r"^HDF5_INCLUDE_DIR \?=.*$", + "HDF5_INCLUDE_DIR = " + spec["hdf5"].prefix.include, + "Makefile", + ) + if spec.satisfies("+gsl"): + filter_file(r"^USEGSL \?=.*$", "USEGSL = yes", "Makefile") + filter_file( + r"GSL_LIBRARY_DIR \?=.*$", + "GSL_LIBRARY_DIR = " + spec["gsl"].prefix.lib, + "Makefile", + ) + filter_file( + r"GSL_INCLUDE_DIR \?=.*$", + "GSL_INCLUDE_DIR = " + spec["gsl"].prefix.include, + "Makefile", + ) def install(self, spec, prefix): mkdirp(prefix.bin) install("mothur", prefix.bin) - install("uchime", prefix.bin) install_tree("source", prefix.include) diff --git a/var/spack/repos/builtin/packages/mpich/mpich-oneapi-config-rpath.patch b/var/spack/repos/builtin/packages/mpich/mpich-oneapi-config-rpath.patch deleted file mode 100644 index 94eafda34f5..00000000000 --- a/var/spack/repos/builtin/packages/mpich/mpich-oneapi-config-rpath.patch +++ /dev/null @@ -1,30 +0,0 @@ -diff -ruN spack-src/confdb/config.rpath spack-src-patched/confdb/config.rpath ---- spack-src/confdb/config.rpath 2022-03-29 15:13:49.000000000 -0700 -+++ spack-src-patched/confdb/config.rpath 2022-07-25 17:54:14.638367460 -0700 -@@ -76,7 +76,7 @@ - ecc*) - wl='-Wl,' - ;; -- icc* | ifort*) -+ icc* | icx* | ifort* | ifx*) - wl='-Wl,' - ;; - lf95*) -@@ -233,7 +233,7 @@ - enable_dtags_flag="${wl}--enable-new-dtags" - disable_dtags_flag="${wl}--disable-new-dtags" - else -- case $cc_basename in ifort*) -+ case $cc_basename in ifort* | ifx*) - enable_dtags_flag="${wl}--enable-new-dtags" - disable_dtags_flag="${wl}--disable-new-dtags" - ;; -@@ -380,7 +380,7 @@ - ;; - darwin* | rhapsody*) - hardcode_direct=no -- if { case $cc_basename in ifort*) true;; *) test "$GCC" = yes;; esac; }; then -+ if { case $cc_basename in ifort* | ifx*) true;; *) test "$GCC" = yes;; esac; }; then - : - else - ld_shlibs=no diff --git a/var/spack/repos/builtin/packages/mpich/mpich-oneapi-config-rpath/step1.patch b/var/spack/repos/builtin/packages/mpich/mpich-oneapi-config-rpath/step1.patch new file mode 100644 index 00000000000..90f53b61cf6 --- /dev/null +++ b/var/spack/repos/builtin/packages/mpich/mpich-oneapi-config-rpath/step1.patch @@ -0,0 +1,9 @@ +# This patch is applicable starting at least version 3.0. +--- a/confdb/config.rpath ++++ b/confdb/config.rpath +@@ -79 +79 @@ else +- icc* | ifort*) ++ icc* | icx* | ifort* | ifx*) +@@ -383 +383 @@ else +- if { case $cc_basename in ifort*) true;; *) test "$GCC" = yes;; esac; }; then ++ if { case $cc_basename in ifort* | ifx*) true;; *) test "$GCC" = yes;; esac; }; then diff --git a/var/spack/repos/builtin/packages/mpich/mpich-oneapi-config-rpath/step2.patch b/var/spack/repos/builtin/packages/mpich/mpich-oneapi-config-rpath/step2.patch new file mode 100644 index 00000000000..ea14a28eb8b --- /dev/null +++ b/var/spack/repos/builtin/packages/mpich/mpich-oneapi-config-rpath/step2.patch @@ -0,0 +1,6 @@ +# This patch is applicable starting version 3.1.1. +--- a/confdb/config.rpath ++++ b/confdb/config.rpath +@@ -236 +236 @@ if test "$with_gnu_ld" = yes; then +- case $cc_basename in ifort*) ++ case $cc_basename in ifort* | ifx*) diff --git a/var/spack/repos/builtin/packages/mpich/mpich34_yaksa_hindexed.patch b/var/spack/repos/builtin/packages/mpich/mpich34_yaksa_hindexed.patch new file mode 100644 index 00000000000..6306eab5d35 --- /dev/null +++ b/var/spack/repos/builtin/packages/mpich/mpich34_yaksa_hindexed.patch @@ -0,0 +1,131 @@ +--- a/modules/yaksa/src/frontend/types/yaksa_blkindx.c ++++ b/modules/yaksa/src/frontend/types/yaksa_blkindx.c +@@ -74,7 +74,7 @@ int yaksi_type_create_hindexed_block(int count, int blocklength, const intptr_t + if (intype->is_contig && ((outtype->ub - outtype->lb) == outtype->size)) { + outtype->is_contig = true; + for (int i = 1; i < count; i++) { +- if (array_of_displs[i] <= array_of_displs[i - 1]) { ++ if (array_of_displs[i] != array_of_displs[i - 1] + intype->extent * blocklength) { + outtype->is_contig = false; + break; + } +--- a/modules/yaksa/src/frontend/types/yaksa_indexed.c ++++ b/modules/yaksa/src/frontend/types/yaksa_indexed.c +@@ -44,8 +44,12 @@ int yaksi_type_create_hindexed(int count, const int *array_of_blocklengths, + outtype->alignment = intype->alignment; + + int is_set; ++ intptr_t last_ub; ++ int is_noncontig; + is_set = 0; +- for (int idx = 0; idx < count; idx++) { ++ last_ub = 0; ++ is_noncontig = 0; ++ for (intptr_t idx = 0; idx < count; idx++) { + if (array_of_blocklengths[idx] == 0) + continue; + +@@ -60,6 +64,11 @@ int yaksi_type_create_hindexed(int count, const int *array_of_blocklengths, + ub = array_of_displs[idx] + intype->ub; + } + ++ if (idx > 0 && lb != last_ub) { ++ is_noncontig = 1; ++ } ++ last_ub = ub; ++ + intptr_t true_lb = lb - intype->lb + intype->true_lb; + intptr_t true_ub = ub - intype->ub + intype->true_ub; + +@@ -90,26 +99,8 @@ int yaksi_type_create_hindexed(int count, const int *array_of_blocklengths, + outtype->u.hindexed.child = intype; + + /* detect if the outtype is contiguous */ +- if (intype->is_contig && ((outtype->ub - outtype->lb) == outtype->size)) { ++ if (!is_noncontig && intype->is_contig && (outtype->ub - outtype->lb) == outtype->size) { + outtype->is_contig = true; +- +- int left = 0; +- while (array_of_blocklengths[left] == 0) +- left++; +- int right = left + 1; +- while (right < count && array_of_blocklengths[right] == 0) +- right++; +- while (right < count) { +- if (array_of_displs[right] <= array_of_displs[left]) { +- outtype->is_contig = false; +- break; +- } else { +- left = right; +- right++; +- while (right < count && array_of_blocklengths[right] == 0) +- right++; +- } +- } + } else { + outtype->is_contig = false; + } +--- a/modules/yaksa/src/frontend/types/yaksa_struct.c ++++ b/modules/yaksa/src/frontend/types/yaksa_struct.c +@@ -42,9 +42,13 @@ int yaksi_type_create_struct(int count, const int *array_of_blocklengths, + } + + int is_set; ++ intptr_t last_ub; ++ int is_noncontig; + is_set = 0; ++ last_ub = 0; ++ is_noncontig = 0; + outtype->alignment = 0; +- for (int idx = 0; idx < count; idx++) { ++ for (intptr_t idx = 0; idx < count; idx++) { + if (array_of_blocklengths[idx] == 0) + continue; + +@@ -61,6 +65,12 @@ int yaksi_type_create_struct(int count, const int *array_of_blocklengths, + + intptr_t true_lb = lb - array_of_intypes[idx]->lb + array_of_intypes[idx]->true_lb; + intptr_t true_ub = ub - array_of_intypes[idx]->ub + array_of_intypes[idx]->true_ub; ++ ++ if (idx > 0 && true_lb != last_ub) { ++ is_noncontig = 1; ++ } ++ last_ub = true_ub; ++ + int tree_depth = array_of_intypes[idx]->tree_depth; + if (outtype->alignment < array_of_intypes[idx]->alignment) + outtype->alignment = array_of_intypes[idx]->alignment; +@@ -94,7 +104,7 @@ int yaksi_type_create_struct(int count, const int *array_of_blocklengths, + outtype->extent = outtype->ub - outtype->lb; + + /* detect if the outtype is contiguous */ +- if ((outtype->ub - outtype->lb) == outtype->size) { ++ if (!is_noncontig && (outtype->ub - outtype->lb) == outtype->size) { + outtype->is_contig = true; + + for (int i = 0; i < count; i++) { +@@ -103,24 +113,6 @@ int yaksi_type_create_struct(int count, const int *array_of_blocklengths, + break; + } + } +- +- int left = 0; +- while (array_of_blocklengths[left] == 0) +- left++; +- int right = left + 1; +- while (right < count && array_of_blocklengths[right] == 0) +- right++; +- while (right < count) { +- if (array_of_displs[right] <= array_of_displs[left]) { +- outtype->is_contig = false; +- break; +- } else { +- left = right; +- right++; +- while (right < count && array_of_blocklengths[right] == 0) +- right++; +- } +- } + } else { + outtype->is_contig = false; + } diff --git a/var/spack/repos/builtin/packages/mpich/package.py b/var/spack/repos/builtin/packages/mpich/package.py index a09b779e0ba..552f3139896 100644 --- a/var/spack/repos/builtin/packages/mpich/package.py +++ b/var/spack/repos/builtin/packages/mpich/package.py @@ -150,8 +150,14 @@ class Mpich(AutotoolsPackage, CudaPackage, ROCmPackage): filter_compiler_wrappers("mpicc", "mpicxx", "mpif77", "mpif90", "mpifort", relative_root="bin") - # https://github.com/spack/spack/issues/31678 - patch("mpich-oneapi-config-rpath.patch", when="@4.0.2 %oneapi") + # Set correct rpath flags for Intel Fortran Compiler (%oneapi) + # See https://github.com/pmodels/mpich/pull/5824 + # and https://github.com/spack/spack/issues/31678 + # We do not fetch the patch from the upstream repo because it cannot be applied to older + # versions. + with when("%oneapi"): + patch("mpich-oneapi-config-rpath/step1.patch", when="@:4.0.2") + patch("mpich-oneapi-config-rpath/step2.patch", when="@3.1.1:4.0.2") # Fix using an external hwloc # See https://github.com/pmodels/mpich/issues/4038 @@ -188,6 +194,18 @@ class Mpich(AutotoolsPackage, CudaPackage, ROCmPackage): when="@4.0:4.0.2", ) + # Fix checking whether the datatype is contiguous + # https://github.com/pmodels/yaksa/pull/189 + # https://github.com/pmodels/mpich/issues/5391 + # The problem has been fixed starting version 4.0 by updating the yaksa git submodule, which + # has not been done for the 3.4.x branch. The following patch is a backport of the + # aforementioned pull request for the unreleased version of yaksa that is vendored with MPICH. + # Note that Spack builds MPICH against a non-vendored yaksa only starting version 4.0. + with when("@3.4"): + # Apply the patch only when yaksa is used: + patch("mpich34_yaksa_hindexed.patch", when="datatype-engine=yaksa") + patch("mpich34_yaksa_hindexed.patch", when="datatype-engine=auto device=ch4") + depends_on("findutils", type="build") depends_on("pkgconfig", type="build") @@ -384,8 +402,7 @@ def setup_run_environment(self, env): # their run environments the code to make the compilers available. # For Cray MPIs, the regular compiler wrappers *are* the MPI wrappers. # Cray MPIs always have cray in the module name, e.g. "cray-mpich" - external_modules = self.spec.external_modules - if external_modules and "cray" in external_modules[0]: + if self.spec.satisfies("platform=cray"): # This is intended to support external MPICH instances registered # by Spack on Cray machines prior to a879c87; users defining an # external MPICH entry for Cray should generally refer to the @@ -414,8 +431,7 @@ def setup_dependent_package(self, module, dependent_spec): # For Cray MPIs, the regular compiler wrappers *are* the MPI wrappers. # Cray MPIs always have cray in the module name, e.g. "cray-mpich" - external_modules = spec.external_modules - if external_modules and "cray" in external_modules[0]: + if self.spec.satisfies("platform=cray"): spec.mpicc = spack_cc spec.mpicxx = spack_cxx spec.mpifc = spack_fc @@ -508,7 +524,7 @@ def configure_args(self): if "+cuda" in spec: config_args.append("--with-cuda={0}".format(spec["cuda"].prefix)) - elif spec.satisfies("@:3.3,3.4.4:"): + elif not spec.satisfies("@3.4:3.4.3"): # Versions from 3.4 to 3.4.3 cannot handle --without-cuda # (see https://github.com/pmodels/mpich/pull/5060): config_args.append("--without-cuda") @@ -567,7 +583,7 @@ def configure_args(self): elif "datatype-engine=dataloop" in spec: config_args.append("--with-datatype-engine=dataloop") elif "datatype-engine=auto" in spec: - config_args.append("--with-datatye-engine=auto") + config_args.append("--with-datatype-engine=auto") if "+hcoll" in spec: config_args.append("--with-hcoll=" + spec["hcoll"].prefix) diff --git a/var/spack/repos/builtin/packages/mumps/mumps.src-makefile.5.5.patch b/var/spack/repos/builtin/packages/mumps/mumps.src-makefile.5.5.patch new file mode 100644 index 00000000000..d56d7ad7455 --- /dev/null +++ b/var/spack/repos/builtin/packages/mumps/mumps.src-makefile.5.5.patch @@ -0,0 +1,25 @@ +diff --git a/src/Makefile b/src/Makefile +index 2562522..9dfa43e 100644 +--- a/src/Makefile ++++ b/src/Makefile +@@ -215,7 +215,7 @@ $(libdir)/libmumps_common$(PLAT)$(LIBEXT_SHARED): $(OBJS_COMMON_MOD) $(OBJS + $(FC) $(OPTL) -shared $^ -Wl,-soname,libmumps_common$(PLAT).so -L$(libdir) $(RPATH_OPT) $(LORDERINGS) $(LIBS) $(LIBOTHERS) -o $@ + + $(libdir)/lib$(ARITH)mumps$(PLAT)$(LIBEXT): $(OBJS_MOD) $(OBJS_OTHER) +- $(AR)$@ $? ++ $(AR)$@ $? $(EXTRA_LIBS4MUMPS) + $(RANLIB) $@ + + $(libdir)/lib$(ARITH)mumps$(PLAT)$(LIBEXT_SHARED): $(OBJS_MOD) $(OBJS_OTHER) $(libdir)/libmumps_common$(PLAT)$(LIBEXT_SHARED) +@@ -434,9 +434,9 @@ $(OBJS_OTHER):$(OBJS_COMMON_MOD) $(OBJS_MOD) + + .SUFFIXES: .c .F .o + .F.o: +- $(FC) $(OPTF) $(FPIC) -I. -I../include $(INCS) $(IORDERINGSF) $(ORDERINGSF) -c $*.F $(OUTF)$*.o ++ $(FC) $(OPTF) $(FC_PIC_FLAG) $(FPIC) -I. -I../include $(INCS) $(IORDERINGSF) $(ORDERINGSF) -c $*.F $(OUTF)$*.o + .c.o: +- $(CC) $(OPTC) $(FPIC) -I../include $(INCS) $(CDEFS) $(IORDERINGSC) $(ORDERINGSC) -c $*.c $(OUTC)$*.o ++ $(CC) $(OPTC) $(CC_PIC_FLAG) $(FPIC) -I../include $(INCS) $(CDEFS) $(IORDERINGSC) $(ORDERINGSC) -c $*.c $(OUTC)$*.o + + $(ARITH)mumps_c.o: mumps_c.c + $(CC) $(OPTC) $(FPIC) -I../include $(INCS) $(CDEFS) -DMUMPS_ARITH=MUMPS_ARITH_$(ARITH) \ diff --git a/var/spack/repos/builtin/packages/mumps/package.py b/var/spack/repos/builtin/packages/mumps/package.py index 7d0d5c950b4..e0aff4a7567 100644 --- a/var/spack/repos/builtin/packages/mumps/package.py +++ b/var/spack/repos/builtin/packages/mumps/package.py @@ -13,9 +13,11 @@ class Mumps(Package): """MUMPS: a MUltifrontal Massively Parallel sparse direct Solver""" - homepage = "http://mumps.enseeiht.fr" - url = "http://mumps.enseeiht.fr/MUMPS_5.3.5.tar.gz" + homepage = "https://graal.ens-lyon.fr/MUMPS/index.php" + url = "https://graal.ens-lyon.fr/MUMPS/MUMPS_5.5.1.tar.gz" + version("5.5.1", sha256="1abff294fa47ee4cfd50dfd5c595942b72ebfcedce08142a75a99ab35014fa15") + version("5.5.0", sha256="e54d17c5e42a36c40607a03279e0704d239d71d38503aab68ef3bfe0a9a79c13") version("5.4.1", sha256="93034a1a9fe0876307136dcde7e98e9086e199de76f1c47da822e7d4de987fa8") version("5.4.0", sha256="c613414683e462da7c152c131cebf34f937e79b30571424060dd673368bbf627") version("5.3.5", sha256="e5d665fdb7043043f0799ae3dbe3b37e5b200d1ab7a6f7b2a4e463fd89507fa4") @@ -68,7 +70,8 @@ class Mumps(Package): # The following patches src/Makefile to fix some dependency # issues in lib[cdsz]mumps.so patch("mumps.src-makefile.5.2.patch", when="@5.2.0 +shared") - patch("mumps.src-makefile.5.3.patch", when="@5.3.0: +shared") + patch("mumps.src-makefile.5.3.patch", when="@5.3.0:5.4.1 +shared") + patch("mumps.src-makefile.5.5.patch", when="@5.5.0: +shared") conflicts("+parmetis", when="~mpi", msg="You cannot use the parmetis variant without mpi") conflicts("+parmetis", when="~metis", msg="You cannot use the parmetis variant without metis") diff --git a/var/spack/repos/builtin/packages/mvapich2/package.py b/var/spack/repos/builtin/packages/mvapich2/package.py index d806260c999..9f1d094ca07 100644 --- a/var/spack/repos/builtin/packages/mvapich2/package.py +++ b/var/spack/repos/builtin/packages/mvapich2/package.py @@ -358,8 +358,7 @@ def setup_dependent_build_environment(self, env, dependent_spec): def setup_compiler_environment(self, env): # For Cray MPIs, the regular compiler wrappers *are* the MPI wrappers. # Cray MPIs always have cray in the module name, e.g. "cray-mvapich" - external_modules = self.spec.external_modules - if external_modules and "cray" in external_modules[0]: + if self.spec.satisfies("platform=cray"): env.set("MPICC", spack_cc) env.set("MPICXX", spack_cxx) env.set("MPIF77", spack_fc) @@ -373,8 +372,7 @@ def setup_compiler_environment(self, env): def setup_dependent_package(self, module, dependent_spec): # For Cray MPIs, the regular compiler wrappers *are* the MPI wrappers. # Cray MPIs always have cray in the module name, e.g. "cray-mvapich" - external_modules = self.spec.external_modules - if external_modules and "cray" in external_modules[0]: + if self.spec.satisfies("platform=cray"): self.spec.mpicc = spack_cc self.spec.mpicxx = spack_cxx self.spec.mpifc = spack_fc diff --git a/var/spack/repos/builtin/packages/nalu-wind/package.py b/var/spack/repos/builtin/packages/nalu-wind/package.py index b66595d9398..4fc0bf61126 100644 --- a/var/spack/repos/builtin/packages/nalu-wind/package.py +++ b/var/spack/repos/builtin/packages/nalu-wind/package.py @@ -51,7 +51,7 @@ class NaluWind(CMakePackage, CudaPackage): depends_on("mpi") depends_on("yaml-cpp@0.5.3:") depends_on( - "trilinos@stable:" + "trilinos@13:" "+exodus+tpetra+muelu+belos+ifpack2+amesos2+zoltan+stk+boost" "~superlu-dist~superlu+hdf5+shards~hypre+gtest" ) diff --git a/var/spack/repos/builtin/packages/nasm/package.py b/var/spack/repos/builtin/packages/nasm/package.py index e7d76b1d8f5..5514b26b5e6 100644 --- a/var/spack/repos/builtin/packages/nasm/package.py +++ b/var/spack/repos/builtin/packages/nasm/package.py @@ -8,7 +8,7 @@ from spack.package import * -class Nasm(Package): +class Nasm(AutotoolsPackage): """NASM (Netwide Assembler) is an 80x86 assembler designed for portability and modularity. It includes a disassembler as well.""" @@ -17,6 +17,8 @@ class Nasm(Package): list_url = "https://www.nasm.us/pub/nasm/releasebuilds" list_depth = 1 + build_system("autotools", conditional("generic", when="platform=windows"), default="autotools") + version("2.15.05", sha256="9182a118244b058651c576baa9d0366ee05983c4d4ae1d9ddd3236a9f2304997") version("2.14.02", sha256="b34bae344a3f2ed93b2ca7bf25f1ed3fb12da89eeda6096e3551fd66adeae9fc") version("2.13.03", sha256="23e1b679d64024863e2991e5c166e19309f0fe58a9765622b35bd31be5b2cc99") @@ -31,11 +33,11 @@ class Nasm(Package): when="@2.13.03 %gcc@8:", ) - patch("msvc.mak.patch", when="@2.15.05 platform=windows") + with when("platform=windows"): + depends_on("perl") + patch("msvc.mak.patch", when="@2.15.05") - conflicts("%intel@:14", when="@2.14:", msg="Intel 14 has immature C11 support") - - depends_on("perl", when="platform=windows") + conflicts("%intel@:14", when="@2.14:", msg="Intel <= 14 lacks support for C11") def patch(self): # Remove flags not recognized by the NVIDIA compiler @@ -51,13 +53,8 @@ def patch(self): "configure", ) - def install(self, spec, prefix): - with working_dir(self.stage.source_path, create=True): - configure(*["--prefix={0}".format(self.prefix)]) - make("V=1") - make("install") - @when("platform=windows") +class GenericBuilder(spack.build_systems.generic.GenericBuilder): def install(self, spec, prefix): with working_dir(self.stage.source_path, create=True): # build NASM with nmake diff --git a/var/spack/repos/builtin/packages/ncio/package.py b/var/spack/repos/builtin/packages/ncio/package.py index bd3d0c1ad3d..c392710813d 100644 --- a/var/spack/repos/builtin/packages/ncio/package.py +++ b/var/spack/repos/builtin/packages/ncio/package.py @@ -15,7 +15,7 @@ class Ncio(CMakePackage): homepage = "https://github.com/NOAA-EMC/NCEPLIBS-ncio" url = "https://github.com/NOAA-EMC/NCEPLIBS-ncio/archive/refs/tags/v1.0.0.tar.gz" - maintainers = ["edwardhartnett", "kgerheiser", "Hang-Lei-NOAA"] + maintainers = ["edwardhartnett", "AlexanderRichert-NOAA", "Hang-Lei-NOAA"] version("1.1.0", sha256="9de05cf3b8b1291010197737666cede3d621605806379b528d2146c4f02d08f6") version("1.0.0", sha256="2e2630b26513bf7b0665619c6c3475fe171a9d8b930e9242f5546ddf54749bd4") diff --git a/var/spack/repos/builtin/packages/ndzip/package.py b/var/spack/repos/builtin/packages/ndzip/package.py new file mode 100644 index 00000000000..be8f7ab0e7f --- /dev/null +++ b/var/spack/repos/builtin/packages/ndzip/package.py @@ -0,0 +1,40 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Ndzip(CMakePackage, CudaPackage): + """A High-Throughput Parallel Lossless Compressor for Scientific Data + + forked from: https://github.com/fknorr/ndzip + """ + + # the upstream developer graduated and moved on to other tasks + + url = "https://github.com/celerity/ndzip" + homepage = "https://github.com/fknorr/ndzip" + git = "https://github.com/robertu94/ndzip" + + maintainers = ["robertu94"] + + version("master", branch="master") + version("2021-11-30", commit="5b3c34991005c0924a339f2ec06750729ebbf015") + + variant("cuda", description="build with cuda support", default=False) + variant("openmp", description="build with cuda support", default=False) + + def cmake_args(self): + args = [ + self.define_from_variant("NDZIP_WITH_CUDA", "cuda"), + self.define_from_variant("NDZIP_WITH_MT", "openmp"), + self.define("NDZIP_BUILD_BENCHMARK", False), + self.define("NDZIP_BUILD_TEST", self.run_tests), + self.define("NDZIP_USE_WERROR", False), + ] + if "+cuda" in self.spec and self.spec.variants["cuda_arch"].value != "none": + arch_str = ";".join(self.spec.variants["cuda_arch"].value) + args.append(self.define("CMAKE_CUDA_ARCHITECTURES", arch_str)) + return args diff --git a/var/spack/repos/builtin/packages/nemsio/package.py b/var/spack/repos/builtin/packages/nemsio/package.py index 0f236795277..417521bef9c 100644 --- a/var/spack/repos/builtin/packages/nemsio/package.py +++ b/var/spack/repos/builtin/packages/nemsio/package.py @@ -16,7 +16,12 @@ class Nemsio(CMakePackage): homepage = "https://noaa-emc.github.io/NCEPLIBS-nemsio" url = "https://github.com/NOAA-EMC/NCEPLIBS-nemsio/archive/refs/tags/v2.5.2.tar.gz" - maintainers = ["t-brown", "edwardhartnett", "kgerheiser", "Hang-Lei-NOAA"] + maintainers = [ + "t-brown", + "edwardhartnett", + "AlexanderRichert-NOAA", + "Hang-Lei-NOAA", + ] variant("mpi", default=True, description="Build Nemsio with MPI") # Nemsio 2.5.3 and below require MPI diff --git a/var/spack/repos/builtin/packages/nemsiogfs/package.py b/var/spack/repos/builtin/packages/nemsiogfs/package.py index 85b1a128a19..8ce3792ea1c 100644 --- a/var/spack/repos/builtin/packages/nemsiogfs/package.py +++ b/var/spack/repos/builtin/packages/nemsiogfs/package.py @@ -15,7 +15,7 @@ class Nemsiogfs(CMakePackage): homepage = "https://github.com/NOAA-EMC/NCEPLIBS-nemsiogfs" url = "https://github.com/NOAA-EMC/NCEPLIBS-nemsiogfs/archive/refs/tags/v2.5.3.tar.gz" - maintainers = ["kgerheiser", "Hang-Lei-NOAA", "edwardhartnett"] + maintainers = ["AlexanderRichert-NOAA", "Hang-Lei-NOAA", "edwardhartnett"] version("2.5.3", sha256="bf84206b08c8779787bef33e4aba18404df05f8b2fdd20fc40b3af608ae4b9af") diff --git a/var/spack/repos/builtin/packages/netcdf-c/package.py b/var/spack/repos/builtin/packages/netcdf-c/package.py index 55eb3e4b718..de3f8950cca 100644 --- a/var/spack/repos/builtin/packages/netcdf-c/package.py +++ b/var/spack/repos/builtin/packages/netcdf-c/package.py @@ -20,6 +20,7 @@ class NetcdfC(AutotoolsPackage): maintainers = ["skosukhin", "WardF"] version("main", branch="main") + version("4.9.0", sha256="9f4cb864f3ab54adb75409984c6202323d2fc66c003e5308f3cdf224ed41c0a6") version("4.8.1", sha256="bc018cc30d5da402622bf76462480664c6668b55eb16ba205a0dfb8647161dd0") version("4.8.0", sha256="aff58f02b1c3e91dc68f989746f652fe51ff39e6270764e484920cb8db5ad092") version("4.7.4", sha256="99930ad7b3c4c1a8e8831fb061cb02b2170fc8e5ccaeda733bd99c3b9d31666b") @@ -65,7 +66,7 @@ class NetcdfC(AutotoolsPackage): patch("4.7.3-spectrum-mpi-pnetcdf-detect.patch", when="@4.7.3:4.7.4 +parallel-netcdf") # See https://github.com/Unidata/netcdf-c/pull/2293 - patch("4.8.1-no-strict-aliasing-config.patch", when="@4.8.1:") + patch("4.8.1-no-strict-aliasing-config.patch", when="@4.8.1") variant("mpi", default=True, description="Enable parallel I/O for netcdf-4") variant("parallel-netcdf", default=False, description="Enable parallel I/O for classic files") @@ -75,6 +76,8 @@ class NetcdfC(AutotoolsPackage): variant("dap", default=False, description="Enable DAP support") variant("jna", default=False, description="Enable JNA support") variant("fsync", default=False, description="Enable fsync support") + variant("zstd", default=True, description="Enable ZStandard compression", when="@4.9.0:") + variant("optimize", default=True, description="Enable -O2 for a more optimized lib") # It's unclear if cdmremote can be enabled if '--enable-netcdf-4' is passed # to the configure script. Since netcdf-4 support is mandatory we comment @@ -123,6 +126,8 @@ class NetcdfC(AutotoolsPackage): # https://github.com/Unidata/netcdf-c/issues/250 depends_on("hdf5@:1.8", when="@:4.4.0") + depends_on("zstd", when="+zstd") + # The feature was introduced in version 4.1.2 # and was removed in version 4.4.0 # conflicts('+cdmremote', when='@:4.1.1,4.4:') @@ -157,6 +162,9 @@ def configure_args(self): "--enable-netcdf-4", ] + if "+optimize" in self.spec: + cflags.append("-O2") + config_args.extend(self.enable_or_disable("fsync")) # The flag was introduced in version 4.3.1 @@ -165,9 +173,7 @@ def configure_args(self): config_args += self.enable_or_disable("shared") - if "~shared" in self.spec or "+pic" in self.spec: - # We don't have shared libraries but we still want it to be - # possible to use this library in shared builds + if "+pic" in self.spec: cflags.append(self.compiler.cc_pic_flag) config_args += self.enable_or_disable("dap") @@ -236,6 +242,16 @@ def configure_args(self): if "+external-xdr" in hdf4 and hdf4["rpc"].name != "libc": libs.append(hdf4["rpc"].libs.link_flags) + if "+zstd" in self.spec: + zstd = self.spec["zstd"] + cppflags.append(zstd.headers.cpp_flags) + ldflags.append(zstd.libs.search_flags) + config_args.append("--with-plugin-dir={}".format(self.prefix.plugins)) + elif "~zstd" in self.spec: + # Prevent linking to system zstd. + # There is no explicit option to disable zstd. + config_args.append("ac_cv_lib_zstd_ZSTD_compress=no") + # Fortran support # In version 4.2+, NetCDF-C and NetCDF-Fortran have split. # Use the netcdf-fortran package to install Fortran support. @@ -247,7 +263,12 @@ def configure_args(self): return config_args + def setup_run_environment(self, env): + if "+zstd" in self.spec: + env.append_path("HDF5_PLUGIN_PATH", self.prefix.plugins) + def setup_dependent_build_environment(self, env, dependent_spec): + self.setup_run_environment(env) # Some packages, e.g. ncview, refuse to build if the compiler path returned by nc-config # differs from the path to the compiler that the package should be built with. Therefore, # we have to shadow nc-config from self.prefix.bin, which references the real compiler, diff --git a/var/spack/repos/builtin/packages/netcdf-fortran/package.py b/var/spack/repos/builtin/packages/netcdf-fortran/package.py index 69f4b51a3c2..2b22569b011 100644 --- a/var/spack/repos/builtin/packages/netcdf-fortran/package.py +++ b/var/spack/repos/builtin/packages/netcdf-fortran/package.py @@ -21,6 +21,7 @@ class NetcdfFortran(AutotoolsPackage): maintainers = ["skosukhin", "WardF"] + version("4.6.0", sha256="198bff6534cc85a121adc9e12f1c4bc53406c403bda331775a1291509e7b2f23") version("4.5.4", sha256="0a19b26a2b6e29fab5d29d7d7e08c24e87712d09a5cafeea90e16e0a2ab86b81") version("4.5.3", sha256="123a5c6184336891e62cf2936b9f2d1c54e8dee299cfd9d2c1a1eb05dd668a74") version("4.5.2", sha256="b959937d7d9045184e9d2040a915d94a7f4d0185f4a9dceb8f08c94b0c3304aa") diff --git a/var/spack/repos/builtin/packages/netlib-lapack/package.py b/var/spack/repos/builtin/packages/netlib-lapack/package.py index 4908fc1b9de..387657d1ca2 100644 --- a/var/spack/repos/builtin/packages/netlib-lapack/package.py +++ b/var/spack/repos/builtin/packages/netlib-lapack/package.py @@ -2,7 +2,7 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - +import spack.build_systems.cmake from spack.package import * @@ -55,6 +55,8 @@ class NetlibLapack(CMakePackage): # netlib-lapack is the reference implementation of LAPACK for ver in [ + "3.10.1", + "3.10.0", "3.9.1", "3.9.0", "3.8.0", @@ -182,82 +184,53 @@ def headers(self): lapacke_h = join_path(include_dir, "lapacke.h") return HeaderList([cblas_h, lapacke_h]) - @property - def build_directory(self): - return join_path( - self.stage.source_path, - "spack-build-shared" if self._building_shared else "spack-build-static", - ) +class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder): def cmake_args(self): - args = ["-DBUILD_SHARED_LIBS:BOOL=" + ("ON" if self._building_shared else "OFF")] - - if self.spec.satisfies("+lapacke"): - args.extend(["-DLAPACKE:BOOL=ON", "-DLAPACKE_WITH_TMG:BOOL=ON"]) - else: - args.extend(["-DLAPACKE:BOOL=OFF", "-DLAPACKE_WITH_TMG:BOOL=OFF"]) - - if self.spec.satisfies("@3.6.0:"): - args.append("-DCBLAS=ON") # always build CBLAS + args = [ + self.define_from_variant("BUILD_SHARED_LIBS", "shared"), + self.define_from_variant("LAPACKE", "lapacke"), + self.define_from_variant("LAPACKE_WITH_TMG", "lapacke"), + self.define("CBLAS", self.spec.satisfies("@3.6.0:")), + ] if self.spec.satisfies("%intel"): # Intel compiler finds serious syntax issues when trying to # build CBLAS and LapackE - args.extend(["-DCBLAS=OFF", "-DLAPACKE:BOOL=OFF"]) + args.extend([self.define("CBLAS", False), self.define("LAPACKE", False)]) if self.spec.satisfies("%xl") or self.spec.satisfies("%xl_r"): # use F77 compiler if IBM XL args.extend( [ - "-DCMAKE_Fortran_COMPILER=" + self.compiler.f77, - "-DCMAKE_Fortran_FLAGS=" - + (" ".join(self.spec.compiler_flags["fflags"])) - + " -O3 -qnohot", + self.define("CMAKE_Fortran_COMPILER", self.compiler.f77), + self.define( + "CMAKE_Fortran_FLAGS", + " ".join(self.spec.compiler_flags["fflags"]) + " -O3 -qnohot", + ), ] ) # deprecated routines are commonly needed by, for example, suitesparse # Note that OpenBLAS spack is built with deprecated routines - args.append("-DBUILD_DEPRECATED:BOOL=ON") + args.append(self.define("BUILD_DEPRECATED", True)) if self.spec.satisfies("+external-blas"): args.extend( [ - "-DUSE_OPTIMIZED_BLAS:BOOL=ON", - "-DBLAS_LIBRARIES:PATH=" + self.spec["blas"].libs.joined(";"), + self.define("USE_OPTIMIZED_BLAS", True), + self.define("BLAS_LIBRARIES:PATH", self.spec["blas"].libs.joined(";")), ] ) if self.spec.satisfies("+xblas"): args.extend( [ - "-DXBLAS_INCLUDE_DIR=" + self.spec["netlib-xblas"].prefix.include, - "-DXBLAS_LIBRARY=" + self.spec["netlib-xblas"].libs.joined(";"), + self.define("XBLAS_INCLUDE_DIR", self.spec["netlib-xblas"].prefix.include), + self.define("XBLAS_LIBRARY", self.spec["netlib-xblas"].libs.joined(";")), ] ) - args.append("-DBUILD_TESTING:BOOL=" + ("ON" if self.run_tests else "OFF")) + args.append(self.define("BUILD_TESTING", self.pkg.run_tests)) return args - - # Build, install, and check both static and shared versions of the - # libraries when +shared - @when("+shared") - def cmake(self, spec, prefix): - for self._building_shared in (False, True): - super(NetlibLapack, self).cmake(spec, prefix) - - @when("+shared") - def build(self, spec, prefix): - for self._building_shared in (False, True): - super(NetlibLapack, self).build(spec, prefix) - - @when("+shared") - def install(self, spec, prefix): - for self._building_shared in (False, True): - super(NetlibLapack, self).install(spec, prefix) - - @when("+shared") - def check(self): - for self._building_shared in (False, True): - super(NetlibLapack, self).check() diff --git a/var/spack/repos/builtin/packages/netlib-scalapack/package.py b/var/spack/repos/builtin/packages/netlib-scalapack/package.py index 6bf280a9be4..e0becba3b90 100644 --- a/var/spack/repos/builtin/packages/netlib-scalapack/package.py +++ b/var/spack/repos/builtin/packages/netlib-scalapack/package.py @@ -45,6 +45,13 @@ class ScalapackBase(CMakePackage): when="@2.2.0", ) + def flag_handler(self, name, flags): + iflags = [] + if name == "fflags": + if self.spec.satisfies("%cce"): + iflags.append("-hnopattern") + return (iflags, None, None) + @property def libs(self): # Note that the default will be to search @@ -80,7 +87,7 @@ def cmake_args(self): # Work around errors of the form: # error: implicit declaration of function 'BI_smvcopy' is # invalid in C99 [-Werror,-Wimplicit-function-declaration] - if spec.satisfies("%clang") or spec.satisfies("%apple-clang"): + if spec.satisfies("%clang") or spec.satisfies("%apple-clang") or spec.satisfies("%oneapi"): c_flags.append("-Wno-error=implicit-function-declaration") options.append(self.define("CMAKE_C_FLAGS", " ".join(c_flags))) diff --git a/var/spack/repos/builtin/packages/nextflow/package.py b/var/spack/repos/builtin/packages/nextflow/package.py index 855cb8d3e7b..04b2e84ae69 100644 --- a/var/spack/repos/builtin/packages/nextflow/package.py +++ b/var/spack/repos/builtin/packages/nextflow/package.py @@ -14,6 +14,16 @@ class Nextflow(Package): maintainers = ["dialvarezs"] + version( + "22.10.1", + sha256="fa6b6faa8b213860212da413e77141a56a5e128662d21ea6603aeb9717817c4c", + expand=False, + ) + version( + "22.10.0", + sha256="6acea8bd21f7f66b1363eef900cd696d9523d2b9edb53327940f093189c1535e", + expand=False, + ) version( "22.04.4", sha256="e5ebf9942af4569db9199e8528016d9a52f73010ed476049774a76b201cd4b10", diff --git a/var/spack/repos/builtin/packages/ninja-fortran/package.py b/var/spack/repos/builtin/packages/ninja-fortran/package.py index 371b350adde..4709f51694b 100644 --- a/var/spack/repos/builtin/packages/ninja-fortran/package.py +++ b/var/spack/repos/builtin/packages/ninja-fortran/package.py @@ -86,3 +86,12 @@ def install(self, spec, prefix): # instead of 'ninja'. Install both for uniformity. with working_dir(prefix.bin): symlink("ninja", "ninja-build") + + def setup_dependent_package(self, module, dspec): + name = "ninja" + + module.ninja = MakeExecutable( + which_string(name, path=[self.spec.prefix.bin], required=True), + determine_number_of_jobs(parallel=self.parallel), + supports_jobserver=True, # This fork supports jobserver + ) diff --git a/var/spack/repos/builtin/packages/ninja/package.py b/var/spack/repos/builtin/packages/ninja/package.py index 5321d550411..a6ee815a2ba 100644 --- a/var/spack/repos/builtin/packages/ninja/package.py +++ b/var/spack/repos/builtin/packages/ninja/package.py @@ -25,6 +25,7 @@ class Ninja(Package): version("kitware", branch="features-for-fortran", git="https://github.com/Kitware/ninja.git") version("master", branch="master") + version("1.11.1", sha256="31747ae633213f1eda3842686f83c2aa1412e0f5691d1c14dbbcc67fe7400cea") version("1.11.0", sha256="3c6ba2e66400fe3f1ae83deb4b235faf3137ec20bd5b08c29bfc368db143e4c6") version("1.10.2", sha256="ce35865411f0490368a8fc383f29071de6690cbadc27704734978221f25e2bed") version("1.10.1", sha256="a6b6f7ac360d4aabd54e299cc1d8fa7b234cd81b9401693da21221c62569a23e") @@ -78,4 +79,5 @@ def setup_dependent_package(self, module, dspec): module.ninja = MakeExecutable( which_string(name, path=[self.spec.prefix.bin], required=True), determine_number_of_jobs(parallel=self.parallel), + supports_jobserver=self.spec.version == ver("kitware"), ) diff --git a/var/spack/repos/builtin/packages/npm/package.py b/var/spack/repos/builtin/packages/npm/package.py index a6898bcba98..3910a0142e0 100644 --- a/var/spack/repos/builtin/packages/npm/package.py +++ b/var/spack/repos/builtin/packages/npm/package.py @@ -25,6 +25,7 @@ class Npm(Package): version("3.10.5", sha256="ff019769e186152098841c1fa6325e5a79f7903a45f13bd0046a4dc8e63f845f") depends_on("node-js", type=("build", "run")) + depends_on("libvips") # npm 6.13.4 ships with node-gyp 5.0.5, which contains several Python 3 # compatibility issues on macOS. Manually update to node-gyp 6.0.1 for diff --git a/var/spack/repos/builtin/packages/nvcomp/package.py b/var/spack/repos/builtin/packages/nvcomp/package.py new file mode 100644 index 00000000000..353802225d5 --- /dev/null +++ b/var/spack/repos/builtin/packages/nvcomp/package.py @@ -0,0 +1,34 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Nvcomp(CMakePackage, CudaPackage): + """Last open source release of A library for fast lossless compression + /decompression on NVIDIA GPUs + + forked from: https://github.com/NVIDIA/nvcomp after NVIDIA made this closed source + """ + + homepage = "https://github.com/NVIDIA/nvcomp" + url = "https://github.com/NVIDIA/nvcomp/archive/refs/tags/v2.0.2.tar.gz" + + # pinned to the last open source release+a few minor patches + git = "https://github.com/robertu94/nvcomp" + + maintainers = ["robertu94"] + + version("2.2.0", commit="3737f6e5028ed1887b0023ad0fc033e139d57574") + version("2.0.2", commit="5d5c194f3449486d989057f632d10954b8d11d75") + + depends_on("cuda") + conflicts("~cuda") + + def cmake_args(self): + args = ["-DBUILD_EXAMPLES=OFF", "-DBUILD_BENCHMARKS=OFF"] + cuda_arch_list = self.spec.variants["cuda_arch"].value + args.append("CMAKE_CUDA_ARCHITECTURES={0}".format(";".join(cuda_arch_list))) + return args diff --git a/var/spack/repos/builtin/packages/nvhpc/package.py b/var/spack/repos/builtin/packages/nvhpc/package.py index 1a8751559cc..2ade4bc9e64 100644 --- a/var/spack/repos/builtin/packages/nvhpc/package.py +++ b/var/spack/repos/builtin/packages/nvhpc/package.py @@ -21,6 +21,20 @@ # - package key must be in the form '{os}-{arch}' where 'os' is in the # format returned by platform.system() and 'arch' by platform.machine() _versions = { + "22.9": { + "Linux-aarch64": ( + "bc4473f04b49bc9a26f08c17a72360650ddf48a3b6eefacdc525d79c8d730f30", + "https://developer.download.nvidia.com/hpc-sdk/22.9/nvhpc_2022_229_Linux_aarch64_cuda_multi.tar.gz", + ), + "Linux-ppc64le": ( + "9aac31d36bb09f6653544978021f5b78c272112e7748871566f7e930f5e7475b", + "https://developer.download.nvidia.com/hpc-sdk/22.9/nvhpc_2022_229_Linux_ppc64le_cuda_multi.tar.gz", + ), + "Linux-x86_64": ( + "aebfeb826ace3dabf9699f72390ca0340f8789a8ef6fe4032e3c7b794f073ea3", + "https://developer.download.nvidia.com/hpc-sdk/22.9/nvhpc_2022_229_Linux_x86_64_cuda_multi.tar.gz", + ), + }, "22.7": { "Linux-aarch64": ( "2aae3fbfd2d0d2d09448a36166c42311368f5600c7c346f159c280b412fe924a", @@ -401,3 +415,6 @@ def libs(self): libs.append("libnvf") return find_libraries(libs, root=prefix, recursive=True) + + # Avoid binding stub libraries by absolute path + non_bindable_shared_objects = ["stubs"] diff --git a/var/spack/repos/builtin/packages/nvshmem/package.py b/var/spack/repos/builtin/packages/nvshmem/package.py index d007c6c6115..bf91fb75dd7 100644 --- a/var/spack/repos/builtin/packages/nvshmem/package.py +++ b/var/spack/repos/builtin/packages/nvshmem/package.py @@ -18,6 +18,7 @@ class Nvshmem(MakefilePackage, CudaPackage): maintainers = ["bvanessen"] + version("2.7.0-6", sha256="23ed9b0187104dc87d5d2bc1394b6f5ff29e8c19138dc019d940b109ede699df") version("2.6.0-1", sha256="fc0e8de61b034f3a079dc231b1d0955e665a9f57b5013ee98b6743647bd60417") version("2.5.0-19", sha256="dd800b40f1d296e1d3ed2a9885adcfe745c3e57582bc809860e87bd32abcdc60") version("2.4.1-3", sha256="8b6c0eab321b6352911e470f9e81a777a49e58148ec3728453b9522446dba178") diff --git a/var/spack/repos/builtin/packages/oce/package.py b/var/spack/repos/builtin/packages/oce/package.py index aece04bd2e2..101680aeac1 100644 --- a/var/spack/repos/builtin/packages/oce/package.py +++ b/var/spack/repos/builtin/packages/oce/package.py @@ -10,9 +10,9 @@ class Oce(Package): - """Open CASCADE Community Edition: - patches/improvements/experiments contributed by users over the official - Open CASCADE library. + """Open CASCADE Community Edition + + UNMAINTAINED: see https://github.com/tpaviot/oce/issues/745#issuecomment-992285943 """ homepage = "https://github.com/tpaviot/oce" @@ -32,9 +32,11 @@ class Oce(Package): variant("X11", default=False, description="Build with X11 enabled") depends_on("cmake@2.8:", type="build") - depends_on("tbb", when="+tbb") - conflicts("intel-tbb@2021.1:") - conflicts("intel-oneapi-tbb@2021.1:") + + with when("+tbb"): + depends_on("tbb") + depends_on("intel-tbb@:2020 build_system=makefile", when="^intel-tbb") + conflicts("intel-oneapi-tbb@2021.1:") # There is a bug in OCE which appears with Clang (version?) or GCC 6.0 # and has to do with compiler optimization, see diff --git a/var/spack/repos/builtin/packages/octopus/package.py b/var/spack/repos/builtin/packages/octopus/package.py index 5ba38a4a400..7ff01e86336 100644 --- a/var/spack/repos/builtin/packages/octopus/package.py +++ b/var/spack/repos/builtin/packages/octopus/package.py @@ -5,12 +5,13 @@ import os +import llnl.util.filesystem as fs import llnl.util.tty as tty from spack.package import * -class Octopus(Package, CudaPackage): +class Octopus(AutotoolsPackage, CudaPackage): """A real-space finite-difference (time-dependent) density-functional theory code.""" @@ -20,6 +21,7 @@ class Octopus(Package, CudaPackage): maintainers = ["fangohr", "RemiLacroix-IDRIS"] + version("12.1", sha256="e2214e958f1e9631dbe6bf020c39f1fe4d71ab0b6118ea9bd8dc38f6d7a7959a") version("12.0", sha256="70beaf08573d394a766f10346a708219b355ad725642126065d12596afbc0dcc") version("11.4", sha256="73bb872bff8165ddd8efc5b891f767cb3fe575b5a4b518416c834450a4492da7") version("11.3", sha256="0c98417071b5e38ba6cbdd409adf917837c387a010e321c0a7f94d9bd9478930") @@ -54,10 +56,10 @@ class Octopus(Package, CudaPackage): variant("nlopt", default=False, description="Compile with nlopt") variant("debug", default=False, description="Compile with debug flags") - depends_on("autoconf", type="build") - depends_on("automake", type="build") - depends_on("libtool", type="build") - depends_on("m4", type="build") + depends_on("autoconf", type="build", when="@develop") + depends_on("automake", type="build", when="@develop") + depends_on("libtool", type="build", when="@develop") + depends_on("m4", type="build", when="@develop") depends_on("blas") depends_on("gsl@1.9:") @@ -89,7 +91,8 @@ class Octopus(Package, CudaPackage): # TODO: etsf-io, sparskit, # feast, libfm, pfft, isf, pnfft, poke - def install(self, spec, prefix): + def configure_args(self): + spec = self.spec lapack = spec["lapack"].libs blas = spec["blas"].libs args = [] @@ -210,12 +213,7 @@ def install(self, spec, prefix): args.append(fcflags) args.append(fflags) - autoreconf("-i") - configure(*args) - make() - # short tests take forever... - # make('check-short') - make("install") + return args @run_after("install") @on_package_attributes(run_tests=True) @@ -278,7 +276,7 @@ def smoke_tests(self): purpose = "Run Octopus recipe example" with working_dir("example-recipe", create=True): print("Current working directory (in example-recipe)") - copy(join_path(os.path.dirname(__file__), "test", "recipe.inp"), "inp") + fs.copy(join_path(os.path.dirname(__file__), "test", "recipe.inp"), "inp") self.run_test( exe, options=options, @@ -304,7 +302,7 @@ def smoke_tests(self): purpose = "Run tiny calculation for He" with working_dir("example-he", create=True): print("Current working directory (in example-he)") - copy(join_path(os.path.dirname(__file__), "test", "he.inp"), "inp") + fs.copy(join_path(os.path.dirname(__file__), "test", "he.inp"), "inp") self.run_test( exe, options=options, diff --git a/var/spack/repos/builtin/packages/openblas/fix-cray-fortran-detection-pr3778.patch b/var/spack/repos/builtin/packages/openblas/fix-cray-fortran-detection-pr3778.patch new file mode 100644 index 00000000000..0d87eafb8ec --- /dev/null +++ b/var/spack/repos/builtin/packages/openblas/fix-cray-fortran-detection-pr3778.patch @@ -0,0 +1,54 @@ +diff -ruN spack-src/f_check spack-src-patched/f_check +--- spack-src/f_check 2022-08-07 16:36:26.000000000 -0400 ++++ spack-src-patched/f_check 2022-10-20 15:06:34.296845220 -0400 +@@ -82,10 +82,6 @@ + vendor=FUJITSU + openmp='-Kopenmp' + ;; +- *Cray*) +- vendor=CRAY +- openmp='-fopenmp' +- ;; + *GNU*|*GCC*) + + v="${data#*GCC: *\) }" +@@ -117,6 +113,10 @@ + esac + fi + ;; ++ *Cray*) ++ vendor=CRAY ++ openmp='-fopenmp' ++ ;; + *g95*) + vendor=G95 + openmp='' +diff -ruN spack-src/f_check.pl spack-src-patched/f_check.pl +--- spack-src/f_check.pl 2022-08-07 16:36:26.000000000 -0400 ++++ spack-src-patched/f_check.pl 2022-10-20 15:07:50.884794505 -0400 +@@ -76,11 +76,6 @@ + $vendor = FUJITSU; + $openmp = "-Kopenmp"; + +- } elsif ($data =~ /Cray/) { +- +- $vendor = CRAY; +- $openmp = "-fopenmp"; +- + } elsif ($data =~ /GNU/ || $data =~ /GCC/ ) { + + $data =~ s/\(+.*?\)+//g; +@@ -106,8 +101,12 @@ + $openmp = ""; + } + } ++ } elsif ($data =~ /Cray/) { ++ ++ $vendor = CRAY; ++ $openmp = "-fopenmp"; + +- } ++ } + + if ($data =~ /g95/) { + $vendor = G95; diff --git a/var/spack/repos/builtin/packages/openblas/package.py b/var/spack/repos/builtin/packages/openblas/package.py index bfb6d3e7d50..e9d3a50d466 100644 --- a/var/spack/repos/builtin/packages/openblas/package.py +++ b/var/spack/repos/builtin/packages/openblas/package.py @@ -89,6 +89,9 @@ class Openblas(MakefilePackage): # https://github.com/xianyi/OpenBLAS/pull/3712 patch("cce.patch", when="@0.3.20 %cce") + # https://github.com/xianyi/OpenBLAS/pull/3778 + patch("fix-cray-fortran-detection-pr3778.patch", when="@0.3.21") + # https://github.com/spack/spack/issues/31732 patch("f_check-oneapi.patch", when="@0.3.20 %oneapi") @@ -193,7 +196,7 @@ def flag_handler(self, name, flags): spec = self.spec iflags = [] if name == "cflags": - if spec.satisfies("@0.3.20 %oneapi"): + if spec.satisfies("@0.3.20: %oneapi"): iflags.append("-Wno-error=implicit-function-declaration") return (iflags, None, None) @@ -379,8 +382,9 @@ def make_defs(self): if "+consistent_fpcsr" in self.spec: make_defs += ["CONSISTENT_FPCSR=1"] - # Flang/f18 does not provide ETIME as an intrinsic - if self.spec.satisfies("%clang"): + # Flang/f18 does not provide ETIME as an intrinsic. + # Do not set TIMER variable if fortran is disabled. + if self.spec.satisfies("+fortran%clang"): make_defs.append("TIMER=INT_CPU_TIME") # Prevent errors in `as` assembler from newer instructions diff --git a/var/spack/repos/builtin/packages/opencascade/package.py b/var/spack/repos/builtin/packages/opencascade/package.py index c378ffe9b14..635e6b2d040 100644 --- a/var/spack/repos/builtin/packages/opencascade/package.py +++ b/var/spack/repos/builtin/packages/opencascade/package.py @@ -18,6 +18,11 @@ class Opencascade(CMakePackage): maintainers = ["wdconinc"] + version( + "7.6.3", + extension="tar.gz", + sha256="baae5b3a7a38825396fc45ef9d170db406339f5eeec62e21b21036afeda31200", + ) version( "7.6.0", extension="tar.gz", diff --git a/var/spack/repos/builtin/packages/opencoarrays/package.py b/var/spack/repos/builtin/packages/opencoarrays/package.py index 470d501f5d3..6c9a309c6f6 100644 --- a/var/spack/repos/builtin/packages/opencoarrays/package.py +++ b/var/spack/repos/builtin/packages/opencoarrays/package.py @@ -35,7 +35,7 @@ class Opencoarrays(CMakePackage): depends_on("mpi") # This patch removes a bunch of checks for the version of MPI available on # the system. They make the Crays hang. - patch("CMakeLists.patch", when="platform=cray") + patch("CMakeLists.patch", when="^cray-mpich") def cmake_args(self): args = [] diff --git a/var/spack/repos/builtin/packages/openfast/package.py b/var/spack/repos/builtin/packages/openfast/package.py index b9d1382304a..6da8c696e63 100644 --- a/var/spack/repos/builtin/packages/openfast/package.py +++ b/var/spack/repos/builtin/packages/openfast/package.py @@ -16,6 +16,8 @@ class Openfast(CMakePackage): version("develop", branch="dev") version("master", branch="main") + version("3.2.1", tag="v3.2.1") + version("3.2.0", tag="v3.2.0") version("3.1.0", tag="v3.1.0") version("3.0.0", tag="v3.0.0") version("2.6.0", tag="v2.6.0") @@ -33,6 +35,7 @@ class Openfast(CMakePackage): variant("cxx", default=False, description="Enable C++ bindings") variant("pic", default=True, description="Position independent code") variant("openmp", default=False, description="Enable OpenMP support") + variant("netcdf", default=False, description="Enable NetCDF support") # Dependencies for OpenFAST Fortran depends_on("blas") @@ -44,6 +47,7 @@ class Openfast(CMakePackage): depends_on("hdf5+mpi+cxx+hl", when="+cxx") depends_on("zlib", when="+cxx") depends_on("libxml2", when="+cxx") + depends_on("netcdf-c", when="+cxx+netcdf") def cmake_args(self): spec = self.spec @@ -52,8 +56,8 @@ def cmake_args(self): options.extend( [ - "-DBUILD_DOCUMENTATION:BOOL=OFF", - "-DBUILD_TESTING:BOOL=OFF", + self.define("BUILD_DOCUMENTATION", False), + self.define("BUILD_TESTING", False), self.define_from_variant("BUILD_SHARED_LIBS", "shared"), self.define_from_variant("DOUBLE_PRECISION", "double-precision"), self.define_from_variant("USE_DLL_INTERFACE", "dll-interface"), @@ -66,42 +70,47 @@ def cmake_args(self): blas_libs = spec["lapack"].libs + spec["blas"].libs options.extend( [ - "-DBLAS_LIBRARIES=%s" % blas_libs.joined(";"), - "-DLAPACK_LIBRARIES=%s" % blas_libs.joined(";"), + self.define("BLAS_LIBRARIES", blas_libs.joined(";")), + self.define("LAPACK_LIBRARIES", blas_libs.joined(";")), ] ) if "+cxx" in spec: options.extend( [ - "-DCMAKE_C_COMPILER=%s" % spec["mpi"].mpicc, - "-DCMAKE_CXX_COMPILER=%s" % spec["mpi"].mpicxx, - "-DCMAKE_Fortran_COMPILER=%s" % spec["mpi"].mpifc, - "-DMPI_CXX_COMPILER:PATH=%s" % spec["mpi"].mpicxx, - "-DMPI_C_COMPILER:PATH=%s" % spec["mpi"].mpicc, - "-DMPI_Fortran_COMPILER:PATH=%s" % spec["mpi"].mpifc, - "-DHDF5_ROOT:PATH=%s" % spec["hdf5"].prefix, - "-DYAML_ROOT:PATH=%s" % spec["yaml-cpp"].prefix, + self.define("CMAKE_CXX_COMPILER", spec["mpi"].mpicxx), + self.define("CMAKE_C_COMPILER", spec["mpi"].mpicc), + self.define("CMAKE_Fortran_COMPILER", spec["mpi"].mpifc), + self.define("MPI_CXX_COMPILER", spec["mpi"].mpicxx), + self.define("MPI_C_COMPILER", spec["mpi"].mpicc), + self.define("MPI_Fortran_COMPILER", spec["mpi"].mpifc), + self.define("HDF5_ROOT", spec["hdf5"].prefix), + self.define("YAML_ROOT", spec["yaml-cpp"].prefix), + # The following expects that HDF5 was built with CMake. + # Solves issue with OpenFAST trying to link + # to HDF5 libraries with a "-shared" prefix + # that do not exist. + self.define("HDF5_NO_FIND_PACKAGE_CONFIG_FILE", True), ] ) - if "~shared" in spec: - options.extend( - [ - "-DHDF5_USE_STATIC_LIBRARIES=ON", - ] - ) + if "+netcdf" in spec: + options.extend([self.define("NETCDF_ROOT", spec["netcdf-c"].prefix)]) + + if "~shared" in spec: + options.extend( + [ + self.define("HDF5_USE_STATIC_LIBRARIES", True), + ] + ) if "+openmp" in spec: options.extend( [ - "-DOPENMP:BOOL=ON", + self.define("OPENMP", True), ] ) - if "darwin" in spec.architecture: - options.append("-DCMAKE_MACOSX_RPATH:BOOL=ON") - return options def flag_handler(self, name, flags): diff --git a/var/spack/repos/builtin/packages/openfoam/package.py b/var/spack/repos/builtin/packages/openfoam/package.py index d67a21f3cc2..6e6ed66b325 100644 --- a/var/spack/repos/builtin/packages/openfoam/package.py +++ b/var/spack/repos/builtin/packages/openfoam/package.py @@ -991,11 +991,14 @@ def foam_dict(self): ] ) - def _rule_directory(self, projdir, general=False): - """Return the wmake/rules/ General or compiler rules directory. + def _rule_directory(self, projdir, general=False, common=False): + """Return wmake/rules/ General/common, General or + compiler rules directory. Supports wmake/rules/ and wmake/rules//. """ rules_dir = os.path.join(projdir, "wmake", "rules") + if common: + return os.path.join(rules_dir, "General", "common") if general: return os.path.join(rules_dir, "General") @@ -1015,10 +1018,41 @@ def has_rule(self, projdir): raise InstallError("No wmake rule for {0} {1}".format(self.arch, self.compiler)) return True + def _rule_add_rpath(self, rpath, src, dst): + """Create {c,c++}-spack rules in the specified project directory. + The compiler rules are based on the respective {cflags,cxxflags}-Opt or + {c,c++}Opt rules with additional rpath information for the OpenFOAM libraries. + + The '-spack' rules channel spack information into OpenFOAM wmake + rules with minimal modification to OpenFOAM. + The rpath is used for the installed libpath (continue to use + LD_LIBRARY_PATH for values during the build). + """ + # Note: the 'c' rules normally don't need rpath, since they are just + # used for some statically linked wmake tools, but left in anyhow. + + ok = os.path.isfile(src) + + if ok: + with open(src, "r") as infile: + with open(dst, "w") as outfile: + for line in infile: + line = line.rstrip() + outfile.write(line) + if re.match(r"^\S+DBUG\s*:?=", line): + outfile.write(" ") + outfile.write(rpath) + elif re.match(r"^\S+OPT\s*:?=", line): + if self.arch_option: + outfile.write(" ") + outfile.write(self.arch_option) + outfile.write("\n") + return ok + def create_rules(self, projdir, foam_pkg): - """Create {c,c++}-spack and mplib{USERMPI} - rules in the specified project directory. - The compiler rules are based on the respective {c,c++}Opt rules + """Create {c,c++}-spack and mplib{USERMPI} rules in the + specified project directory. + Uses General/common/{c,c++}Opt or arch-specific {c,c++}Opt rules, but with additional rpath information for the OpenFOAM libraries. The '-spack' rules channel spack information into OpenFOAM wmake @@ -1036,26 +1070,20 @@ def create_rules(self, projdir, foam_pkg): user_mpi = mplib_content(foam_pkg.spec) rule_dir = self._rule_directory(projdir) + comm_dir = self._rule_directory(projdir, False, True) + + # Compiler: copy existing {c,c++}Opt or General/common/{c,c++}Opt + # and modify '*DBUG' value to include rpath + + for lang in ["c", "c++"]: + gen = join_path(comm_dir, "{0}Opt".format(lang)) + src = join_path(rule_dir, "{0}Opt".format(lang)) + dst = join_path(rule_dir, "{0}{1}".format(lang, self.compile_option)) + + if not self._rule_add_rpath(rpath, src, dst): + self._rule_add_rpath(rpath, gen, dst) with working_dir(rule_dir): - # Compiler: copy existing cOpt,c++Opt and modify '*DBUG' value - for lang in ["c", "c++"]: - src = "{0}Opt".format(lang) - dst = "{0}{1}".format(lang, self.compile_option) - with open(src, "r") as infile: - with open(dst, "w") as outfile: - for line in infile: - line = line.rstrip() - outfile.write(line) - if re.match(r"^\S+DBUG\s*=", line): - outfile.write(" ") - outfile.write(rpath) - elif re.match(r"^\S+OPT\s*=", line): - if self.arch_option: - outfile.write(" ") - outfile.write(self.arch_option) - outfile.write("\n") - # MPI rules for mplib in ["mplibUSERMPI"]: with open(mplib, "w") as out: diff --git a/var/spack/repos/builtin/packages/openjdk/package.py b/var/spack/repos/builtin/packages/openjdk/package.py index f6239500250..fab50a28f85 100644 --- a/var/spack/repos/builtin/packages/openjdk/package.py +++ b/var/spack/repos/builtin/packages/openjdk/package.py @@ -18,6 +18,38 @@ # format returned by platform.system() and 'arch' by platform.machine() _versions = { + "17.0.5_8": { + "Linux-x86_64": ( + "482180725ceca472e12a8e6d1a4af23d608d78287a77d963335e2a0156a020af", + "https://github.com/adoptium/temurin17-binaries/releases/download/jdk-17.0.5%2B8/OpenJDK17U-jdk_x64_linux_hotspot_17.0.5_8.tar.gz", + ), + "Linux-aarch64": ( + "1c26c0e09f1641a666d6740d802beb81e12180abaea07b47c409d30c7f368109", + "https://github.com/adoptium/temurin17-binaries/releases/download/jdk-17.0.5%2B8/OpenJDK17U-jdk_aarch64_linux_hotspot_17.0.5_8.tar.gz", + ), + }, + "17.0.4.1_1": { + "Linux-x86_64": ( + "5fbf8b62c44f10be2efab97c5f5dbf15b74fae31e451ec10abbc74e54a04ff44", + "https://github.com/adoptium/temurin17-binaries/releases/download/jdk-17.0.4.1%2B1/OpenJDK17U-jdk_x64_linux_hotspot_17.0.4.1_1.tar.gz", + ), + "Linux-aarch64": ( + "3c7460de77421284b38b4e57cb1bd584a6cef55c34fc51a12270620544de2b8a", + "https://github.com/adoptium/temurin17-binaries/releases/download/jdk-17.0.4.1%2B1/OpenJDK17U-jdk_aarch64_linux_hotspot_17.0.4.1_1.tar.gz", + ), + "Linux-ppc64le": ( + "cbedd0a1428b3058d156e99e8e9bc8769e0d633736d6776a4c4d9136648f2fd1", + "https://github.com/adoptium/temurin17-binaries/releases/download/jdk-17.0.4.1%2B1/OpenJDK17U-jdk_ppc64le_linux_hotspot_17.0.4.1_1.tar.gz", + ), + "Darwin-x86_64": ( + "ac21a5a87f7cfa00212ab7c41f7eb80ca33640d83b63ad850be811c24095d61a", + "https://github.com/adoptium/temurin17-binaries/releases/download/jdk-17.0.4.1%2B1/OpenJDK17U-jdk_x64_mac_hotspot_17.0.4.1_1.tar.gz", + ), + "Darwin-aarch64": ( + "3a976943a9e6a635e68e2b06bd093fc096aad9f5894acda673d3bea0cb3a6f38", + "https://github.com/adoptium/temurin17-binaries/releases/download/jdk-17.0.4.1%2B1/OpenJDK17U-jdk_aarch64_mac_hotspot_17.0.4.1_1.tar.gz", + ), + }, "17.0.3_7": { "Linux-x86_64": ( "81f5bed21077f9fbb04909b50391620c78b9a3c376593c0992934719c0de6b73", @@ -94,6 +126,46 @@ "https://download.java.net/java/GA/jdk16.0.2/d4a915d82b4c4fbb9bde534da945d746/7/GPL/openjdk-16.0.2_linux-aarch64_bin.tar.gz", ), }, + "11.0.17_8": { + "Linux-x86_64": ( + "b8d46ed08ef4859476fe6421a7690d899ed83dce63f13fd894f994043177ef3c", + "https://github.com/adoptium/temurin11-binaries/releases/download/jdk-11.0.17%2B8/OpenJDK11U-jdk_x64_linux_hotspot_11.0.17_8.tar.gz", + ), + "Linux-aarch64": ( + "d18b5dd73fce9edd5c58f623a1173f9ee2d45023836b8753b96beae51673a432", + "https://github.com/adoptium/temurin11-binaries/releases/download/jdk-11.0.17%2B8/OpenJDK11U-jdk_aarch64_linux_hotspot_11.0.17_8.tar.gz", + ), + "Linux-ppc64le": ( + "18c636bd103e240d29cdb30d7867720ea9fb9ff7c645738bfb4d5b8027269263", + "https://github.com/adoptium/temurin11-binaries/releases/download/jdk-11.0.17%2B8/OpenJDK11U-jdk_ppc64le_linux_hotspot_11.0.17_8.tar.gz", + ), + "Darwin-aarch64": ( + "79b18cbd398b67a52ebaf033dfca15c7af4c1a84ec5fa68a88f3bf742bb082f7", + "https://github.com/adoptium/temurin11-binaries/releases/download/jdk-11.0.17%2B8/OpenJDK11U-jdk_aarch64_mac_hotspot_11.0.17_8.tar.gz", + ), + }, + "11.0.16.1_1": { + "Linux-x86_64": ( + "5f6b513757d386352cf91514ed5859d1ab59364b4453e1f1c57152ba2039b8e2", + "https://github.com/adoptium/temurin11-binaries/releases/download/jdk-11.0.16.1%2B1/OpenJDK11U-jdk_x64_linux_hotspot_11.0.16.1_1.tar.gz", + ), + "Linux-aarch64": ( + "2b89cabf0ce1c2cedadd92b798d6e9056bc27c71a06f5ba24ede5dc9c316e3e8", + "https://github.com/adoptium/temurin11-binaries/releases/download/jdk-11.0.16.1%2B1/OpenJDK11U-jdk_aarch64_linux_hotspot_11.0.16.1_1.tar.gz", + ), + "Linux-ppc64le": ( + "b18877871eda801ccb99bb34c5d7d77fccf6adad02514110c21389632ec91024", + "https://github.com/adoptium/temurin11-binaries/releases/download/jdk-11.0.16.1%2B1/OpenJDK11U-jdk_ppc64le_linux_hotspot_11.0.16.1_1.tar.gz", + ), + "Darwin-x86_64": ( + "723548e36e0b3e0a5a2f36a38b22ea825d3004e26054a0e254854adc57045352", + "https://github.com/adoptium/temurin11-binaries/releases/download/jdk-11.0.16.1%2B1/OpenJDK11U-jdk_x64_mac_hotspot_11.0.16.1_1.tar.gz", + ), + "Darwin-aarch64": ( + "1953f06702d45eb54bae3ccf453b57c33de827015f5623a2dfc16e1c83e6b0a1", + "https://github.com/adoptium/temurin11-binaries/releases/download/jdk-11.0.16.1%2B1/OpenJDK11U-jdk_aarch64_mac_hotspot_11.0.16.1_1.tar.gz", + ), + }, "11.0.15_10": { "Linux-x86_64": ( "5fdb4d5a1662f0cca73fec30f99e67662350b1fa61460fa72e91eb9f66b54d0b", @@ -341,3 +413,8 @@ def setup_dependent_run_environment(self, env, dependent_spec): class_paths = find(dependent_spec.prefix, "*.jar") classpath = os.pathsep.join(class_paths) env.prepend_path("CLASSPATH", classpath) + + # Since we provide openjdk as a binary, we can't remove an obsolete glibc + # fix that prevents us from modifying the soname of libjvm.so. If we move + # to source builds this should be possible. + non_bindable_shared_objects = ["libjvm.so"] diff --git a/var/spack/repos/builtin/packages/openmpi/package.py b/var/spack/repos/builtin/packages/openmpi/package.py index e1a34ac6b3c..9405951c352 100644 --- a/var/spack/repos/builtin/packages/openmpi/package.py +++ b/var/spack/repos/builtin/packages/openmpi/package.py @@ -1053,8 +1053,7 @@ def configure_args(self): return config_args - @when("+wrapper-rpath") - @run_after("install") + @run_after("install", when="+wrapper-rpath") def filter_rpaths(self): def filter_lang_rpaths(lang_tokens, rpath_arg): if self.compiler.cc_rpath_arg == rpath_arg: @@ -1086,8 +1085,7 @@ def filter_lang_rpaths(lang_tokens, rpath_arg): filter_lang_rpaths(["c++", "CC", "cxx"], self.compiler.cxx_rpath_arg) filter_lang_rpaths(["fort", "f77", "f90"], self.compiler.fc_rpath_arg) - @when("@:3.0.4+wrapper-rpath") - @run_after("install") + @run_after("install", when="@:3.0.4+wrapper-rpath") def filter_pc_files(self): files = find(self.spec.prefix.lib.pkgconfig, "*.pc") x = FileFilter(*[f for f in files if not os.path.islink(f)]) diff --git a/var/spack/repos/builtin/packages/openssh/package.py b/var/spack/repos/builtin/packages/openssh/package.py index 14e6581b304..245accc938f 100755 --- a/var/spack/repos/builtin/packages/openssh/package.py +++ b/var/spack/repos/builtin/packages/openssh/package.py @@ -23,6 +23,7 @@ class Openssh(AutotoolsPackage): tags = ["core-packages"] + version("9.1p1", sha256="19f85009c7e3e23787f0236fbb1578392ab4d4bf9f8ec5fe6bc1cd7e8bfdd288") version("9.0p1", sha256="03974302161e9ecce32153cfa10012f1e65c8f3750f573a73ab1befd5972a28a") version("8.9p1", sha256="fd497654b7ab1686dac672fb83dfb4ba4096e8b5ffcdaccd262380ae58bec5e7") version("8.8p1", sha256="4590890ea9bb9ace4f71ae331785a3a5823232435161960ed5fc86588f331fe9") diff --git a/var/spack/repos/builtin/packages/openssl/package.py b/var/spack/repos/builtin/packages/openssl/package.py index 2d420e75747..da444e30cd8 100644 --- a/var/spack/repos/builtin/packages/openssl/package.py +++ b/var/spack/repos/builtin/packages/openssl/package.py @@ -28,7 +28,12 @@ class Openssl(Package): # Uses Fake Autotools, should subclass Package executables = ["openssl"] - version("3.0.5", sha256="aa7d8d9bef71ad6525c55ba11e5f4397889ce49c2c9349dcea6d3e4f0b024a7a") + version("3.0.7", sha256="83049d042a260e696f62406ac5c08bf706fd84383f945cf21bd61e9ed95c396e") + version( + "3.0.5", + sha256="aa7d8d9bef71ad6525c55ba11e5f4397889ce49c2c9349dcea6d3e4f0b024a7a", + deprecated=True, + ) version( "3.0.4", sha256="2831843e9a668a0ab478e7020ad63d2d65e51f72977472dc73efcefbafc0c00f", @@ -52,10 +57,15 @@ class Openssl(Package): # Uses Fake Autotools, should subclass Package # The latest stable version is the 1.1.1 series. This is also our Long Term # Support (LTS) version, supported until 11th September 2023. + version( + "1.1.1s", + sha256="c5ac01e760ee6ff0dab61d6b2bbd30146724d063eb322180c6f18a6f74e4b6aa", + preferred=True, + ) version( "1.1.1q", sha256="d7939ce614029cdff0b6c20f0e2e5703158a489a72b2507b8bd51bf8c8fd10ca", - preferred=True, + deprecated=True, ) version( "1.1.1p", diff --git a/var/spack/repos/builtin/packages/ophidia-analytics-framework/package.py b/var/spack/repos/builtin/packages/ophidia-analytics-framework/package.py new file mode 100644 index 00000000000..ff15e3c3163 --- /dev/null +++ b/var/spack/repos/builtin/packages/ophidia-analytics-framework/package.py @@ -0,0 +1,49 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class OphidiaAnalyticsFramework(AutotoolsPackage): + """Core modules and operators of the Ophidia framework""" + + homepage = "https://github.com/OphidiaBigData/ophidia-analytics-framework" + url = "https://github.com/OphidiaBigData/ophidia-analytics-framework/archive/refs/tags/v1.7.1.tar.gz" + maintainers = ["eldoo", "SoniaScard"] + version( + "1.7.1", + sha256="565050b90ce1cefc59136c835a335ca7981fec792df7a1ee9309b24c05b275d6", + ) + + depends_on("autoconf", type="build") + depends_on("automake", type="build") + depends_on("libtool", type="build") + depends_on("m4", type="build") + depends_on("pkgconfig", type="build") + + depends_on("gsl") + depends_on("mpich") + depends_on("jansson") + depends_on("libxml2") + depends_on("libssh2") + depends_on("openssl") + depends_on("mysql") + depends_on("netcdf-c") + depends_on("curl") + depends_on("ophidia-io-server") + + def autoreconf(self, spec, prefix): + autoreconf("--install", "--verbose", "--force") + + def configure_args(self): + args = [ + "--enable-parallel-netcdf", + "--with-web-server-path={0}/html".format( + self.spec["ophidia-analytics-framework"].prefix + ), + "--with-web-server-url=http://127.0.0.1/ophidia", + "--with-ophidiaio-server-path={0}".format(self.spec["ophidia-io-server"].prefix), + ] + return args diff --git a/var/spack/repos/builtin/packages/ophidia-io-server/package.py b/var/spack/repos/builtin/packages/ophidia-io-server/package.py new file mode 100644 index 00000000000..9b2ee7a03cf --- /dev/null +++ b/var/spack/repos/builtin/packages/ophidia-io-server/package.py @@ -0,0 +1,39 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class OphidiaIoServer(AutotoolsPackage): + """In-memory IO server of the Ophidia framework""" + + homepage = "https://github.com/OphidiaBigData/ophidia-io-server" + url = "https://github.com/OphidiaBigData/ophidia-io-server/archive/refs/tags/v1.7.2.tar.gz" + maintainers = ["eldoo", "SoniaScard"] + version("1.7.2", sha256="8b203c44e0e5497c00f1fdb2322f0b0a41f36900b62a33d95a4570ae1ccc2971") + + depends_on("autoconf", type="build") + depends_on("automake", type="build") + depends_on("libtool", type="build") + depends_on("m4", type="build") + + depends_on("boost@1.79.0") + depends_on("netcdf-c") + depends_on("mysql") + depends_on("bison") + depends_on("flex") + depends_on("ophidia-primitives") + + def autoreconf(self, spec, prefix): + autoreconf("--install", "--verbose", "--force") + + def configure_args(self): + args = [ + "--with-plugin-path={0}".format(self.spec["ophidia-primitives"].prefix.lib), + "--with-netcdf-path={0}".format(self.spec["netcdf-c"].prefix), + "--enable-parallel-nc4", + ] + + return args diff --git a/var/spack/repos/builtin/packages/ophidia-primitives/package.py b/var/spack/repos/builtin/packages/ophidia-primitives/package.py new file mode 100644 index 00000000000..bf85210080c --- /dev/null +++ b/var/spack/repos/builtin/packages/ophidia-primitives/package.py @@ -0,0 +1,41 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class OphidiaPrimitives(AutotoolsPackage): + """Array-based primitives for the Ophidia framework""" + + homepage = "https://github.com/OphidiaBigData/ophidia-primitives" + url = "https://github.com/OphidiaBigData/ophidia-primitives/archive/refs/tags/v1.7.1.tar.gz" + maintainers = ["eldoo", "SoniaScard"] + version( + "1.7.1", + sha256="efec5248dca8fb766abcd536344eefbe2e970fb551f03454a968e59e2df69116", + ) + + depends_on("autoconf", type="build") + depends_on("automake", type="build") + depends_on("libtool", type="build") + depends_on("m4", type="build") + depends_on("pkgconfig", type="build") + + depends_on("boost@1.79.0") + depends_on("mysql") + depends_on("libmatheval") + depends_on("zlib") + depends_on("gsl") + + def autoreconf(self, spec, prefix): + autoreconf("--install", "--verbose", "--force") + + def configure_args(self): + args = [ + "--with-gsl-lib-path={0}".format(self.spec["gsl"].prefix.lib), + "--with-gsl-header-path={0}".format(self.spec["gsl"].prefix.include), + ] + + return args diff --git a/var/spack/repos/builtin/packages/ophidia-server/package.py b/var/spack/repos/builtin/packages/ophidia-server/package.py new file mode 100644 index 00000000000..585224d68a0 --- /dev/null +++ b/var/spack/repos/builtin/packages/ophidia-server/package.py @@ -0,0 +1,47 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class OphidiaServer(AutotoolsPackage): + """Front-end server of the Ophidia framework""" + + homepage = "https://github.com/SoniaScard/ophidia-server" + url = "https://github.com/SoniaScard/ophidia-server/archive/refs/tags/v1.7.2.tar.gz" + maintainers = ["eldoo", "SoniaScard"] + version( + "1.7.2", + sha256="452587775343b266bbb5adcfeee64e7f7e9a9bbfcb2133646a831ae3e74348be", + ) + + depends_on("autoconf", type="build") + depends_on("automake", type="build") + depends_on("libtool", type="build") + depends_on("m4", type="build") + depends_on("pkg-config", type="build") + + depends_on("libmatheval") + depends_on("jansson") + depends_on("libxml2") + depends_on("libssh2") + depends_on("openssl") + depends_on("mysql") + depends_on("curl") + depends_on("ophidia-analytics-framework") + + def autoreconf(self, spec, prefix): + autoreconf("--install", "--verbose", "--force") + + def configure_args(self): + args = [ + "--with-web-server-path={0}/html".format( + self.spec["ophidia-analytics-framework"].prefix + ), + "--with-web-server-url=http://127.0.0.1/ophidia", + "--with-framework-path={0}".format(self.spec["ophidia-analytics-framework"].prefix), + ] + + return args diff --git a/var/spack/repos/builtin/packages/orca/package.py b/var/spack/repos/builtin/packages/orca/package.py index 997cd569d6e..af4078cfb7c 100644 --- a/var/spack/repos/builtin/packages/orca/package.py +++ b/var/spack/repos/builtin/packages/orca/package.py @@ -19,8 +19,14 @@ class Orca(Package): homepage = "https://cec.mpg.de" url = "file://{0}/orca_4_0_1_2_linux_x86-64_openmpi202.tar.zst".format(os.getcwd()) + maintainers = ["snehring"] manual_download = True + version( + "5.0.3", + sha256="b8b9076d1711150a6d6cb3eb30b18e2782fa847c5a86d8404b9339faef105043", + url="file://{0}/orca_5_0_3_linux_x86-64_shared_openmpi411.tar.xz".format(os.getcwd()), + ) version( "4.2.1", sha256="9bbb3bfdca8220b417ee898b27b2885508d8c82799adfa63dde9e72eab49a6b2", @@ -37,10 +43,10 @@ class Orca(Package): expand=False, ) - depends_on("zstd", type="build") + depends_on("zstd", when="@:4.2.1", type="build") # Map Orca version with the required OpenMPI version - openmpi_versions = {"4.0.1.2": "2.0.2", "4.2.0": "3.1.4", "4.2.1": "3.1.4"} + openmpi_versions = {"4.0.1.2": "2.0.2", "4.2.0": "3.1.4", "4.2.1": "3.1.4", "5.0.3": "4.1.2"} for orca_version, openmpi_version in openmpi_versions.items(): depends_on( "openmpi@{0}".format(openmpi_version), type="run", when="@{0}".format(orca_version) @@ -51,20 +57,21 @@ def url_for_version(self, version): return out.format(os.getcwd(), version.underscored, self.openmpi_versions[version.string]) def install(self, spec, prefix): - # we have to extract the archive ourself - # fortunately it's just full of a bunch of binaries - - vername = os.path.basename(self.stage.archive_file).split(".")[0] - - zstd = which("zstd") - zstd("-d", self.stage.archive_file, "-o", vername + ".tar") - - tar = which("tar") - tar("-xvf", vername + ".tar") - - # there are READMEs in there but they don't hurt anyone mkdirp(prefix.bin) - install_tree(vername, prefix.bin) + + if self.spec.satisfies("@:4.2.1"): + vername = os.path.basename(self.stage.archive_file).split(".")[0] + + zstd = which("zstd") + zstd("-d", self.stage.archive_file, "-o", vername + ".tar") + + tar = which("tar") + tar("-xvf", vername + ".tar") + + # there are READMEs in there but they don't hurt anyone + install_tree(vername, prefix.bin) + else: + install_tree(".", prefix.bin) # Check "mpirun" usability when building against OpenMPI # with Slurm scheduler and add a "mpirun" wrapper that @@ -72,3 +79,6 @@ def install(self, spec, prefix): if "^openmpi ~legacylaunchers schedulers=slurm" in self.spec: mpirun_srun = join_path(os.path.dirname(__file__), "mpirun_srun.sh") install(mpirun_srun, prefix.bin.mpirun) + + def setup_run_environment(self, env): + env.prepend_path("LD_LIBRARY_PATH", self.prefix.bin) diff --git a/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py b/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py index 6840dfe076e..bb090e63f3a 100644 --- a/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py +++ b/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py @@ -16,10 +16,11 @@ class OsuMicroBenchmarks(AutotoolsPackage, CudaPackage, ROCmPackage): and can be used for both traditional and GPU-enhanced nodes.""" homepage = "https://mvapich.cse.ohio-state.edu/benchmarks/" - url = "https://mvapich.cse.ohio-state.edu/download/mvapich/osu-micro-benchmarks-6.1.tar.gz" + url = "https://mvapich.cse.ohio-state.edu/download/mvapich/osu-micro-benchmarks-6.2.tar.gz" maintainers = ["natshineman", "harisubramoni", "MatthewLieber"] + version("6.2", sha256="bb9dbc87dcf8ec6785977a61f6fceee8febf1a682488eaab4c58cf50e4fa985f") version("6.1", sha256="ecccedc868264f75db4d9529af79005419a2775113c7fae8f4e4a8434362e4a7") version("6.0", sha256="309fb7583ff54562343b0e0df1eebde3fc245191e183be362f031ac74f4ab542") version("5.9", sha256="d619740a1c2cc7c02a9763931546b320d0fa4093c415ff3873c2958e121c0609") diff --git a/var/spack/repos/builtin/packages/papi/package.py b/var/spack/repos/builtin/packages/papi/package.py index f3fe762e448..35ea13fbd9c 100644 --- a/var/spack/repos/builtin/packages/papi/package.py +++ b/var/spack/repos/builtin/packages/papi/package.py @@ -105,6 +105,11 @@ def setup_build_environment(self, env): env.set("HSA_TOOLS_LIB", "unset") if "+rocm_smi" in spec: env.append_flags("CFLAGS", "-I%s/rocm_smi" % spec["rocm-smi-lib"].prefix.include) + # + # Intel OneAPI LLVM cannot compile papi unless the DBG enviroment variable is cleared + # + if spec.satisfies("%oneapi"): + env.set("DBG", "") setup_run_environment = setup_build_environment diff --git a/var/spack/repos/builtin/packages/parallel-netcdf/package.py b/var/spack/repos/builtin/packages/parallel-netcdf/package.py index 965e6a365eb..44f71946408 100644 --- a/var/spack/repos/builtin/packages/parallel-netcdf/package.py +++ b/var/spack/repos/builtin/packages/parallel-netcdf/package.py @@ -34,6 +34,7 @@ def url_for_version(self, version): return url.format(version.dotted) version("master", branch="master") + version("1.12.3", sha256="439e359d09bb93d0e58a6e3f928f39c2eae965b6c97f64e67cd42220d6034f77") version("1.12.2", sha256="3ef1411875b07955f519a5b03278c31e566976357ddfc74c2493a1076e7d7c74") version("1.12.1", sha256="56f5afaa0ddc256791c405719b6436a83b92dcd5be37fe860dea103aee8250a2") version("1.11.2", sha256="d2c18601b364c35b5acb0a0b46cd6e14cae456e0eb854e5c789cf65f3cd6a2a7") diff --git a/var/spack/repos/builtin/packages/paraview/package.py b/var/spack/repos/builtin/packages/paraview/package.py index ea522143e98..ccd14f984ce 100644 --- a/var/spack/repos/builtin/packages/paraview/package.py +++ b/var/spack/repos/builtin/packages/paraview/package.py @@ -28,7 +28,7 @@ class Paraview(CMakePackage, CudaPackage): version("master", branch="master", submodules=True) version( - "5.11.0-RC1", sha256="892c4617b3f23f6e5c9a08ecc9b3e9f16b9e2f54c044155c3c252f00b0fbafd9" + "5.11.0-RC2", sha256="b5748b1ef4b8855467c3db75ffb8739096075596229e7ba16b284946964904b9" ) version( "5.10.1", @@ -73,6 +73,13 @@ class Paraview(CMakePackage, CudaPackage): variant("pagosa", default=False, description="Build the pagosa adaptor") variant("eyedomelighting", default=False, description="Enable Eye Dome Lighting feature") variant("adios2", default=False, description="Enable ADIOS2 support", when="@5.8:") + variant("catalyst", default=False, description="Enable Catalyst 1", when="@5.7:") + variant( + "libcatalyst", + default=False, + description="Enable Catalyst 2 (libcatalyst) implementation", + when="@5.10:", + ) variant( "advanced_debug", @@ -102,7 +109,7 @@ class Paraview(CMakePackage, CudaPackage): # Python 2 support dropped with 5.9.0 conflicts("+python", when="@5.9:") conflicts("+python3", when="@:5.5") - conflicts("+shared", when="+cuda") + conflicts("~shared", when="+cuda") conflicts("+cuda", when="@5.8:5.10") # Legacy rendering dropped in 5.5 # See commit: https://gitlab.kitware.com/paraview/paraview/-/commit/798d328c @@ -205,6 +212,7 @@ class Paraview(CMakePackage, CudaPackage): depends_on("lz4") depends_on("xz") depends_on("zlib") + depends_on("libcatalyst", when="+libcatalyst") # Older builds of pugi export their symbols differently, # and pre-5.9 is unable to handle that. @@ -303,6 +311,15 @@ def flag_handler(self, name, flags): if (name == "cflags" or name == "cxxflags") and self.spec.satisfies("%intel"): flags.append("-no-ipo") return (None, None, flags) + + if name in ("cflags", "cxxflags"): + # Constrain the HDF5 API + if self.spec.satisfies("@:5.9 +hdf5"): + if self.spec["hdf5"].satisfies("@1.10:"): + flags.append("-DH5_USE_18_API") + elif self.spec.satisfies("@5.10: +hdf5"): + if self.spec["hdf5"].satisfies("@1.12:"): + flags.append("-DH5_USE_110_API") return (flags, None, None) def setup_run_environment(self, env): @@ -574,4 +591,13 @@ def nvariant_bool(feature): if "+advanced_debug" in spec: cmake_args.append("-DVTK_DEBUG_LEAKS:BOOL=ON") + if "+catalyst" in spec: + cmake_args.append("-DVTK_MODULE_ENABLE_ParaView_Catalyst=YES") + if "+python3" in spec: + cmake_args.append("-DVTK_MODULE_ENABLE_ParaView_PythonCatalyst=YES") + + if "+libcatalyst" in spec: + cmake_args.append("-DVTK_MODULE_ENABLE_ParaView_InSitu=YES") + cmake_args.append("-DPARAVIEW_ENABLE_CATALYST=YES") + return cmake_args diff --git a/var/spack/repos/builtin/packages/patchelf/package.py b/var/spack/repos/builtin/packages/patchelf/package.py index a19eb4d0ed3..ce1549b5aa0 100644 --- a/var/spack/repos/builtin/packages/patchelf/package.py +++ b/var/spack/repos/builtin/packages/patchelf/package.py @@ -18,6 +18,7 @@ class Patchelf(AutotoolsPackage): maintainers = ["haampie"] + version("0.16.1", sha256="1a562ed28b16f8a00456b5f9ee573bb1af7c39c1beea01d94fc0c7b3256b0406") version("0.15.0", sha256="53a8d58ed4e060412b8fdcb6489562b3c62be6f65cee5af30eba60f4423bfa0f") version("0.14.5", sha256="113ada3f1ace08f0a7224aa8500f1fa6b08320d8f7df05ff58585286ec5faa6f") version("0.14.3", sha256="8fabf4210499744ced101612cd5c9fd12b94af67a16297cb5d3ff682c007ffdb") diff --git a/var/spack/repos/builtin/packages/perl-carp/package.py b/var/spack/repos/builtin/packages/perl-carp/package.py new file mode 100644 index 00000000000..3b0dd441cd7 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-carp/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PerlCarp(PerlPackage): + """Carp - alternative warn and die for modules""" + + homepage = "https://metacpan.org/pod/Carp" + url = "https://cpan.metacpan.org/authors/id/X/XS/XSAWYERX/Carp-1.50.tar.gz" + + version("1.50", sha256="f5273b4e1a6d51b22996c48cb3a3cbc72fd456c4038f5c20b127e2d4bcbcebd9") + + depends_on("perl-extutils-makemaker", type=("build", "run")) + depends_on("perl-test-more", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/perl-class-method-modifiers/package.py b/var/spack/repos/builtin/packages/perl-class-method-modifiers/package.py new file mode 100644 index 00000000000..a67a0c4c193 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-class-method-modifiers/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PerlClassMethodModifiers(PerlPackage): + """Class::Method::Modifiers - Provides Moose-like method modifiers""" + + homepage = "https://metacpan.org/pod/Class::Method::Modifiers" + url = "https://cpan.metacpan.org/authors/id/E/ET/ETHER/Class-Method-Modifiers-2.13.tar.gz" + + version("2.13", sha256="ab5807f71018a842de6b7a4826d6c1f24b8d5b09fcce5005a3309cf6ea40fd63") + + depends_on("perl-carp", type=("build", "run")) + depends_on("perl-exporter-tiny", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/perl-moo/package.py b/var/spack/repos/builtin/packages/perl-moo/package.py new file mode 100644 index 00000000000..188ff7361b8 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-moo/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PerlMoo(PerlPackage): + """Moo - Minimalist Object Orientation (with Moose compatibility)""" + + homepage = "https://metacpan.org/pod/Moo" + url = "https://cpan.metacpan.org/authors/id/H/HA/HAARG/Moo-2.005004.tar.gz" + + version("2.005004", sha256="e3030b80bd554a66f6b3c27fd53b1b5909d12af05c4c11ece9a58f8d1e478928") + + depends_on("perl-carp", type=("build", "run")) + depends_on("perl-class-method-modifiers", type=("build", "run")) + depends_on("perl-exporter-tiny", type=("build", "run")) + depends_on("perl-role-tiny", type=("build", "run")) + depends_on("perl-scalar-list-utils", type=("build", "run")) + depends_on("perl-sub-quote", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/perl-role-tiny/package.py b/var/spack/repos/builtin/packages/perl-role-tiny/package.py new file mode 100644 index 00000000000..96868dbb3a4 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-role-tiny/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PerlRoleTiny(PerlPackage): + """Role::Tiny - Roles: a nouvelle cuisine portion size slice of Moose""" + + homepage = "https://metacpan.org/pod/Role::Tiny" + url = "https://cpan.metacpan.org/authors/id/H/HA/HAARG/Role-Tiny-2.002004.tar.gz" + + version("2.002004", sha256="d7bdee9e138a4f83aa52d0a981625644bda87ff16642dfa845dcb44d9a242b45") + + depends_on("perl-exporter-tiny", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/perl-sub-quote/package.py b/var/spack/repos/builtin/packages/perl-sub-quote/package.py new file mode 100644 index 00000000000..382afc31403 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-sub-quote/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PerlSubQuote(PerlPackage): + """Sub::Quote - Efficient generation of subroutines via string eval""" + + homepage = "https://metacpan.org/pod/Sub::Quote" + url = "https://cpan.metacpan.org/authors/id/H/HA/HAARG/Sub-Quote-2.006006.tar.gz" + + version("2.006006", sha256="6e4e2af42388fa6d2609e0e82417de7cc6be47223f576592c656c73c7524d89d") + + depends_on("perl-scalar-list-utils", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/perl/package.py b/var/spack/repos/builtin/packages/perl/package.py index 5aa009a9a9c..b268c86f2b5 100644 --- a/var/spack/repos/builtin/packages/perl/package.py +++ b/var/spack/repos/builtin/packages/perl/package.py @@ -46,45 +46,42 @@ class Perl(Package): # Perl doesn't use Autotools, it should subclass Package version("5.31.4", sha256="418a7e6fe6485cc713a86d1227ef112f0bb3f80322e3b715ffe42851d97804a5") # Maintenance releases (even numbers, recommended) + version( + "5.36.0", + sha256="e26085af8ac396f62add8a533c3a0ea8c8497d836f0689347ac5abd7b7a4e00a", + preferred=True, + ) version( "5.34.1", sha256="357951a491b0ba1ce3611263922feec78ccd581dddc24a446b033e25acf242a1", - preferred=True, ) version( "5.34.0", sha256="551efc818b968b05216024fb0b727ef2ad4c100f8cb6b43fab615fa78ae5be9a", - preferred=True, ) version( "5.32.1", sha256="03b693901cd8ae807231b1787798cf1f2e0b8a56218d07b7da44f784a7caeb2c", - preferred=True, ) version( "5.32.0", sha256="efeb1ce1f10824190ad1cadbcccf6fdb8a5d37007d0100d2d9ae5f2b5900c0b4", - preferred=True, ) version( "5.30.3", sha256="32e04c8bb7b1aecb2742a7f7ac0eabac100f38247352a73ad7fa104e39e7406f", - preferred=True, ) version( "5.30.2", sha256="66db7df8a91979eb576fac91743644da878244cf8ee152f02cd6f5cd7a731689", - preferred=True, ) version( "5.30.1", sha256="bf3d25571ff1ee94186177c2cdef87867fd6a14aa5a84f0b1fb7bf798f42f964", - preferred=True, ) version( "5.30.0", sha256="851213c754d98ccff042caa40ba7a796b2cee88c5325f121be5cbb61bbf975f2", - preferred=True, ) # End of life releases @@ -103,8 +100,9 @@ class Perl(Package): # Perl doesn't use Autotools, it should subclass Package extendable = True if not is_windows: + depends_on("gdbm@:1.23") # Bind us below gdbm-1.20 due to API change: https://github.com/Perl/perl5/issues/18915 - depends_on("gdbm@:1.19") + depends_on("gdbm@:1.19", when="@:5.35") # :5.28 needs gdbm@:1:14.1: https://rt-archive.perl.org/perl5/Ticket/Display.html?id=133295 depends_on("gdbm@:1.14.1", when="@:5.28.0") depends_on("berkeley-db") @@ -357,12 +355,12 @@ def install_cpanm(self): maker() maker("install") - def _setup_dependent_env(self, env, dependent_spec, deptypes): + def _setup_dependent_env(self, env, dependent_spec, deptype): """Set PATH and PERL5LIB to include the extension and any other perl extensions it depends on, assuming they were installed with INSTALL_BASE defined.""" perl_lib_dirs = [] - for d in dependent_spec.traverse(deptype=deptypes): + for d in dependent_spec.traverse(deptype=deptype): if d.package.extends(self.spec): perl_lib_dirs.append(d.prefix.lib.perl5) if perl_lib_dirs: @@ -372,10 +370,10 @@ def _setup_dependent_env(self, env, dependent_spec, deptypes): env.append_path("PATH", self.prefix.bin) def setup_dependent_build_environment(self, env, dependent_spec): - self._setup_dependent_env(env, dependent_spec, deptypes=("build", "run", "test")) + self._setup_dependent_env(env, dependent_spec, deptype=("build", "run", "test")) def setup_dependent_run_environment(self, env, dependent_spec): - self._setup_dependent_env(env, dependent_spec, deptypes=("run",)) + self._setup_dependent_env(env, dependent_spec, deptype=("run",)) def setup_dependent_package(self, module, dependent_spec): """Called before perl modules' install() methods. diff --git a/var/spack/repos/builtin/packages/petsc/package.py b/var/spack/repos/builtin/packages/petsc/package.py index 05799ec6bea..ad674264951 100644 --- a/var/spack/repos/builtin/packages/petsc/package.py +++ b/var/spack/repos/builtin/packages/petsc/package.py @@ -22,6 +22,9 @@ class Petsc(Package, CudaPackage, ROCmPackage): version("main", branch="main") + version("3.18.1", sha256="02f5979a22f5961bb775d527f8450db77bc6a8d2541f3b05fb586829b82e9bc8") + version("3.18.0", sha256="9da802e703ad79fb7ef0007d17f68916573011073ee9712dcd1673537f6a5f68") + version("3.17.5", sha256="a1193e6c50a1676c3972a1edf0a06eec9fac8ecc2f3771f2689a8997423e4c71") version("3.17.4", sha256="99c127486722a3ffd95a268b4ceb0976cbf217926c681a9631bd7246eab8cb2a") version("3.17.3", sha256="5c24ade5e4b32cc04935ba0db1dafe48d633bebaaa30a3033f1e58788d37875f") version("3.17.2", sha256="2313dd1ca41bf0ace68671ea6f8d4abf90011ed899f5e1e08658d3f18478359d") @@ -201,6 +204,7 @@ def check_fortran_compiler(self): patch("xcode_stub_out_of_sync.patch", when="@:3.10.4") patch("xlf_fix-dup-petscfecreate.patch", when="@3.11.0") patch("disable-DEPRECATED_ENUM.diff", when="@3.14.1 +cuda") + patch("revert-3.18.0-ver-format-for-dealii.patch", when="@3.18.0") depends_on("diffutils", type="build") @@ -217,6 +221,7 @@ def check_fortran_compiler(self): depends_on("hip", when="+rocm") depends_on("hipblas", when="+rocm") depends_on("hipsparse", when="+rocm") + depends_on("hipsolver", when="+rocm") depends_on("rocsparse", when="+rocm") depends_on("rocsolver", when="+rocm") depends_on("rocblas", when="+rocm") @@ -328,9 +333,20 @@ def check_fortran_compiler(self): depends_on("hwloc", when="+hwloc") depends_on("kokkos", when="+kokkos") depends_on("kokkos-kernels", when="+kokkos") - depends_on("kokkos+cuda+wrapper+cuda_lambda", when="+kokkos +cuda") - depends_on("kokkos-kernels+cuda", when="+kokkos +cuda") - depends_on("kokkos+rocm", when="+kokkos +rocm") + for cuda_arch in CudaPackage.cuda_arch_values: + depends_on( + "kokkos+cuda+cuda_lambda cuda_arch=%s" % cuda_arch, + when="+kokkos +cuda cuda_arch=%s" % cuda_arch, + ) + depends_on( + "kokkos-kernels+cuda cuda_arch=%s" % cuda_arch, + when="+kokkos +cuda cuda_arch=%s" % cuda_arch, + ) + for rocm_arch in ROCmPackage.amdgpu_targets: + depends_on( + "kokkos+rocm amdgpu_target=%s" % rocm_arch, + when="+kokkos +rocm amdgpu_target=%s" % rocm_arch, + ) phases = ["configure", "build", "install"] @@ -543,7 +559,7 @@ def configure_options(self): if not spec.satisfies("amdgpu_target=none"): hip_arch = spec.variants["amdgpu_target"].value options.append("--with-hip-arch={0}".format(hip_arch[0])) - hip_pkgs = ["hipsparse", "hipblas", "rocsparse", "rocsolver", "rocblas"] + hip_pkgs = ["hipsparse", "hipblas", "hipsolver", "rocsparse", "rocsolver", "rocblas"] hip_ipkgs = hip_pkgs + ["rocthrust", "rocprim"] hip_lpkgs = hip_pkgs if spec.satisfies("^rocrand@5.1:"): @@ -557,7 +573,7 @@ def configure_options(self): for pkg in hip_lpkgs: hip_lib += spec[pkg].libs.joined() + " " options.append("HIPPPFLAGS=%s" % hip_inc) - options.append("with-hip-lib=%s -L%s -lamdhip64" % (hip_lib, spec["hip"].prefix.lib)) + options.append("--with-hip-lib=%s -L%s -lamdhip64" % (hip_lib, spec["hip"].prefix.lib)) if "superlu-dist" in spec: if spec.satisfies("@3.10.3:3.15"): diff --git a/var/spack/repos/builtin/packages/petsc/revert-3.18.0-ver-format-for-dealii.patch b/var/spack/repos/builtin/packages/petsc/revert-3.18.0-ver-format-for-dealii.patch new file mode 100644 index 00000000000..6c6d96c2693 --- /dev/null +++ b/var/spack/repos/builtin/packages/petsc/revert-3.18.0-ver-format-for-dealii.patch @@ -0,0 +1,57 @@ +--- a/include/petscversion.h 2022-10-01 13:55:26.000000000 -0500 ++++ b/include/petscversion.h 2022-10-10 18:03:11.525297321 -0500 +@@ -2,11 +2,11 @@ + #define PETSCVERSION_H + #include + +-#define PETSC_VERSION_RELEASE 1 +-#define PETSC_VERSION_MAJOR 3 +-#define PETSC_VERSION_MINOR 18 +-#define PETSC_VERSION_SUBMINOR 0 +-#define PETSC_RELEASE_DATE "Sep 30, 2022" ++#define PETSC_VERSION_RELEASE 1 ++#define PETSC_VERSION_MAJOR 3 ++#define PETSC_VERSION_MINOR 18 ++#define PETSC_VERSION_SUBMINOR 0 ++#define PETSC_RELEASE_DATE "Sep 30, 2022" + #define PETSC_VERSION_DATE "Sep 30, 2022" + + #if !defined(PETSC_VERSION_GIT) +@@ -17,17 +17,30 @@ + #define PETSC_VERSION_DATE_GIT "2022-09-30 20:39:36 -0500" + #endif + +-#define PETSC_VERSION_EQ(MAJOR, MINOR, SUBMINOR) ((PETSC_VERSION_MAJOR == (MAJOR)) && (PETSC_VERSION_MINOR == (MINOR)) && (PETSC_VERSION_SUBMINOR == (SUBMINOR)) && (PETSC_VERSION_RELEASE == 1)) ++#define PETSC_VERSION_EQ(MAJOR,MINOR,SUBMINOR) \ ++ ((PETSC_VERSION_MAJOR == (MAJOR)) && \ ++ (PETSC_VERSION_MINOR == (MINOR)) && \ ++ (PETSC_VERSION_SUBMINOR == (SUBMINOR)) && \ ++ (PETSC_VERSION_RELEASE == 1)) + + #define PETSC_VERSION_ PETSC_VERSION_EQ + +-#define PETSC_VERSION_LT(MAJOR, MINOR, SUBMINOR) \ +- (PETSC_VERSION_RELEASE == 1 && (PETSC_VERSION_MAJOR < (MAJOR) || (PETSC_VERSION_MAJOR == (MAJOR) && (PETSC_VERSION_MINOR < (MINOR) || (PETSC_VERSION_MINOR == (MINOR) && (PETSC_VERSION_SUBMINOR < (SUBMINOR))))))) ++#define PETSC_VERSION_LT(MAJOR,MINOR,SUBMINOR) \ ++ (PETSC_VERSION_RELEASE == 1 && \ ++ (PETSC_VERSION_MAJOR < (MAJOR) || \ ++ (PETSC_VERSION_MAJOR == (MAJOR) && \ ++ (PETSC_VERSION_MINOR < (MINOR) || \ ++ (PETSC_VERSION_MINOR == (MINOR) && \ ++ (PETSC_VERSION_SUBMINOR < (SUBMINOR))))))) ++ ++#define PETSC_VERSION_LE(MAJOR,MINOR,SUBMINOR) \ ++ (PETSC_VERSION_LT(MAJOR,MINOR,SUBMINOR) || \ ++ PETSC_VERSION_EQ(MAJOR,MINOR,SUBMINOR)) + +-#define PETSC_VERSION_LE(MAJOR, MINOR, SUBMINOR) (PETSC_VERSION_LT(MAJOR, MINOR, SUBMINOR) || PETSC_VERSION_EQ(MAJOR, MINOR, SUBMINOR)) ++#define PETSC_VERSION_GT(MAJOR,MINOR,SUBMINOR) \ ++ (0 == PETSC_VERSION_LE(MAJOR,MINOR,SUBMINOR)) + +-#define PETSC_VERSION_GT(MAJOR, MINOR, SUBMINOR) (0 == PETSC_VERSION_LE(MAJOR, MINOR, SUBMINOR)) +- +-#define PETSC_VERSION_GE(MAJOR, MINOR, SUBMINOR) (0 == PETSC_VERSION_LT(MAJOR, MINOR, SUBMINOR)) ++#define PETSC_VERSION_GE(MAJOR,MINOR,SUBMINOR) \ ++ (0 == PETSC_VERSION_LT(MAJOR,MINOR,SUBMINOR)) + + #endif diff --git a/var/spack/repos/builtin/packages/pflotran/package.py b/var/spack/repos/builtin/packages/pflotran/package.py index 3b00fda8e7c..6a042497b99 100644 --- a/var/spack/repos/builtin/packages/pflotran/package.py +++ b/var/spack/repos/builtin/packages/pflotran/package.py @@ -18,6 +18,7 @@ class Pflotran(AutotoolsPackage): maintainers = ["ghammond86", "balay"] version("develop") + version("4.0.1", commit="fd351a49b687e27f46eae92e9259156eea74897d") # tag v4.0.1 version("3.0.2", commit="9e07f416a66b0ad304c720b61aa41cba9a0929d5") # tag v3.0.2 version("xsdk-0.6.0", commit="46e14355c1827c057f2e1b3e3ae934119ab023b2") version("xsdk-0.5.0", commit="98a959c591b72f73373febf5f9735d2c523b4c20") @@ -27,6 +28,7 @@ class Pflotran(AutotoolsPackage): depends_on("mpi") depends_on("hdf5@1.8.12:+mpi+fortran+hl") depends_on("petsc@main:+hdf5+metis", when="@develop") + depends_on("petsc@3.18:+hdf5+metis", when="@4.0.1") depends_on("petsc@3.16:+hdf5+metis", when="@3.0.2") depends_on("petsc@3.14:+hdf5+metis", when="@xsdk-0.6.0") depends_on("petsc@3.12:+hdf5+metis", when="@xsdk-0.5.0") @@ -36,3 +38,8 @@ class Pflotran(AutotoolsPackage): @property def parallel(self): return self.spec.satisfies("@xsdk-0.4.0:") + + def flag_handler(self, name, flags): + if "%gcc@10:" in self.spec and name == "fflags": + flags.append("-fallow-argument-mismatch") + return flags, None, None diff --git a/var/spack/repos/builtin/packages/phist/package.py b/var/spack/repos/builtin/packages/phist/package.py index 3d467231811..840b6e1d671 100644 --- a/var/spack/repos/builtin/packages/phist/package.py +++ b/var/spack/repos/builtin/packages/phist/package.py @@ -21,7 +21,7 @@ class Phist(CMakePackage): """ homepage = "https://bitbucket.org/essex/phist/" - url = "https://bitbucket.org/essex/phist/get/phist-1.9.6.tar.gz" + url = "https://bitbucket.org/essex/phist/get/phist-1.11.2.tar.gz" git = "https://bitbucket.org/essex/phist.git" maintainers = ["jthies"] @@ -34,6 +34,9 @@ class Phist(CMakePackage): version("develop", branch="devel") version("master", branch="master") + # compatible with python@3.11: and cray-libsci as BLAS/LAPACK provider + version("1.11.2", sha256="e23f76307c26b930f7331a734b0a864ea6d7fb4a13c12f3c5d70c2c41481747b") + # updated lapack interface to work with openblas and netlib-lapack version("1.11.0", sha256="36e6cc41a13884ba0a26f7be03e3f1882b1a2d14ca04353a609c0eec0cfb7a77") @@ -128,6 +131,10 @@ class Phist(CMakePackage): description="generate Fortran 2003 bindings (requires Python3 and " "a Fortran compiler)", ) + # Build error with cray-libsci because they define macro 'I', workaround in phist-1.11.2 + conflicts("^cray-libsci", when="@:1.11.1") + # phist@1.11.2 got rid of some deprecated python code + conflicts("^python@3.11:", when="@:1.11.1") # The builtin kernels switched from the 'mpi' to the 'mpi_f08' module in # phist 1.9.6, which causes compile-time errors with mpich and older # GCC versions. diff --git a/var/spack/repos/builtin/packages/pika/package.py b/var/spack/repos/builtin/packages/pika/package.py index 37ae086d828..8c4c7202cd2 100644 --- a/var/spack/repos/builtin/packages/pika/package.py +++ b/var/spack/repos/builtin/packages/pika/package.py @@ -17,7 +17,8 @@ class Pika(CMakePackage, CudaPackage, ROCmPackage): git = "https://github.com/pika-org/pika.git" maintainers = ["msimberg", "albestro", "teonnik", "aurianer"] - version("0.9.0", branch="c349b2a96476d6974d2421288ca4d2e14ef9e5897d44cd7d5343165faa2d1299") + version("0.10.0", sha256="3b443b8f0f75b9a558accbaef0334a113a71b0205770e6c7ff02ea2d7c6aca5b") + version("0.9.0", sha256="c349b2a96476d6974d2421288ca4d2e14ef9e5897d44cd7d5343165faa2d1299") version("0.8.0", sha256="058e82d7c8f95badabe52bbb4682d55aadf340d67ced1226c0673b4529adc182") version("0.7.0", sha256="e1bf978c88515f7af28ee47f98b795ffee521c15b39877ea4cfb405f31d507ed") version("0.6.0", sha256="cb4ebd7b92da39ec4df7b0d05923b94299d6ee2f2f49752923ffa2266ca76568") @@ -98,7 +99,9 @@ class Pika(CMakePackage, CudaPackage, ROCmPackage): depends_on("rocblas", when="+rocm") depends_on("rocsolver", when="@0.5: +rocm") depends_on("tracy-client", when="+tracy") - depends_on("whip", when="@0.9:") + conflicts("tracy-client@0.9:", when="@:0.9") + depends_on("whip+rocm", when="@0.9: +rocm") + depends_on("whip+cuda", when="@0.9: +cuda") for cxxstd in cxxstds: depends_on("boost cxxstd={0}".format(map_cxxstd(cxxstd)), when="cxxstd={0}".format(cxxstd)) diff --git a/var/spack/repos/builtin/packages/pilercr/package.py b/var/spack/repos/builtin/packages/pilercr/package.py new file mode 100644 index 00000000000..a18c4c70f7b --- /dev/null +++ b/var/spack/repos/builtin/packages/pilercr/package.py @@ -0,0 +1,28 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class Pilercr(MakefilePackage): + """Identification and analysis of CRISPR repeats.""" + + homepage = "http://www.drive5.com/pilercr/" + url = "http://www.drive5.com/pilercr/pilercr1.06.tar.gz" + + version("1.06", sha256="50175f7aa171674cda5ba255631f340f9cc7f80e8cc25135a4cb857147d91068") + + @property + def build_targets(self): + targets = [] + targets.append("GPP = {0}".format(spack_cxx)) + targets.append("CFLAGS = -O3 -DNDEBUG=1") + targets.append("LDLIBS = -lm") + return targets + + def install(self, spec, prefix): + mkdirp(prefix.bin) + install("pilercr", prefix.bin) diff --git a/var/spack/repos/builtin/packages/pinentry/package.py b/var/spack/repos/builtin/packages/pinentry/package.py index fec657a7727..9c91a1e0ce3 100644 --- a/var/spack/repos/builtin/packages/pinentry/package.py +++ b/var/spack/repos/builtin/packages/pinentry/package.py @@ -20,6 +20,7 @@ class Pinentry(AutotoolsPackage): maintainers = ["alalazo"] + version("1.2.1", sha256="457a185e5a85238fb945a955dc6352ab962dc8b48720b62fc9fa48c7540a4067") version("1.2.0", sha256="10072045a3e043d0581f91cd5676fcac7ffee957a16636adedaa4f583a616470") version("1.1.1", sha256="cd12a064013ed18e2ee8475e669b9f58db1b225a0144debdb85a68cecddba57f") version("1.1.0", sha256="68076686fa724a290ea49cdf0d1c0c1500907d1b759a3bcbfbec0293e8f56570") diff --git a/var/spack/repos/builtin/packages/pism/package.py b/var/spack/repos/builtin/packages/pism/package.py index 912752792ac..edfb3ebd3e6 100644 --- a/var/spack/repos/builtin/packages/pism/package.py +++ b/var/spack/repos/builtin/packages/pism/package.py @@ -47,6 +47,8 @@ class Pism(CMakePackage): description = "Report errors through Everytrace (requires Everytrace)" variant("everytrace", default=False, description=description) + patch("pism-petsc-3.18.diff", when="@1.1.4 ^petsc@3.18:") + # CMake build options not transferred to Spack variants # (except from CMakeLists.txt) # diff --git a/var/spack/repos/builtin/packages/pism/pism-petsc-3.18.diff b/var/spack/repos/builtin/packages/pism/pism-petsc-3.18.diff new file mode 100644 index 00000000000..68d47427edd --- /dev/null +++ b/var/spack/repos/builtin/packages/pism/pism-petsc-3.18.diff @@ -0,0 +1,22 @@ +--- spack-src/src/util/options.cc~ 2019-06-17 13:14:35.000000000 -0500 ++++ spack-src/src/util/options.cc 2022-10-10 19:50:05.309762538 -0500 +@@ -59,8 +59,7 @@ + memset(tmp, 0, TEMPORARY_STRING_LENGTH); + + PetscErrorCode ierr; +- ierr = PetscOptionsBegin(MPI_COMM_SELF, "", "", ""); +- PISM_CHK(ierr, "PetscOptionsBegin"); ++ PetscOptionsBegin(MPI_COMM_SELF, "", "", ""); + + ierr = PetscOptionsString(option.c_str(), + description.c_str(), +@@ -71,8 +70,7 @@ + &flag); // PETSC_TRUE if found, else PETSC_FALSE + PISM_CHK(ierr, "PetscOptionsString"); + +- ierr = PetscOptionsEnd(); +- PISM_CHK(ierr, "PetscOptionsEnd"); ++ PetscOptionsEnd(); + + std::string result = tmp; + diff --git a/var/spack/repos/builtin/packages/plasma/package.py b/var/spack/repos/builtin/packages/plasma/package.py index 937d87de9e4..f6f32d06291 100644 --- a/var/spack/repos/builtin/packages/plasma/package.py +++ b/var/spack/repos/builtin/packages/plasma/package.py @@ -2,7 +2,8 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - +import spack.build_systems.cmake +import spack.build_systems.makefile from spack.package import * @@ -23,6 +24,7 @@ class Plasma(CMakePackage): tags = ["e4s"] version("develop", git=git) + version("22.9.29", sha256="78827898b7e3830eee2e388823b9180858279f77c5eda5aa1be173765c53ade5") version("21.8.29", sha256="e0bb4d9143c8540f9f46cbccac9ed0cbea12500a864e6954fce2fe94ea057a10") version("20.9.20", sha256="2144a77b739f8dd2f0dbe5b64d94cde0e916f55c4eb170facd168c0db7fc7970") version("19.8.1", sha256="3a5db6eabf91aec782b7f27b17a7f6b8ce2c9d8e648c0e9c0ff5d87277ba4d17") @@ -37,6 +39,12 @@ class Plasma(CMakePackage): url="https://github.com/icl-utk-edu/plasma/releases/download/17.01/plasma-17.01.tar.gz", ) + build_system( + conditional("makefile", when="@:17.1"), + conditional("cmake", when="@18.9:"), + default="cmake", + ) + variant("shared", default=True, description="Build shared library (disables static library)") variant("lua", default=False, description="Build Lua support for tuning tile sizes") @@ -81,18 +89,15 @@ class Plasma(CMakePackage): def patch(self): python("tools/generate_precisions.py") - @when("@18.9.0:") + +class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder): def cmake_args(self): - options = list() - - options.extend( - [ - "-DBLAS_LIBRARIES=%s" % self.spec["blas"].libs.joined(";"), - "-DLAPACK_LIBRARIES=%s" % self.spec["lapack"].libs.joined(";"), - ] - ) - - options += ["-DBUILD_SHARED_LIBS=%s" % ("ON" if ("+shared" in self.spec) else "OFF")] + options = [ + self.define("BLAS_LIBRARIES", self.spec["blas"].libs.joined(";")), + self.define("LAPACK_LIBRARIES", self.spec["lapack"].libs.joined(";")), + self.define_from_variant("BUILD_SHARED_LIBS", "shared"), + self.define_from_variant("PLASMA_DETECT_LUA", "lua"), + ] for package, provider in ( ("openblas", "openblas"), @@ -101,32 +106,13 @@ def cmake_args(self): ): if package in self.spec: for lib in ("CBLAS", "LAPACKE"): - options.append("-D%s_PROVIDER=%s" % (lib, provider)) - - if "lua" in self.spec: - options.append("-DPLASMA_DETECT_LUA=TRUE") + options.append(self.define("{}_PROVIDER".format(lib), provider)) return options - # Before 18.9.0 it was an Makefile package - @when("@:17.1") - def cmake(self, spec, prefix): - pass - # Before 18.9.0 it was an Makefile package - @when("@:17.1") - def build(self, spec, prefix): - pass - - # Before 18.9.0 it was an Makefile package - @when("@:17.1") - def install(self, spec, prefix): - self.edit(spec, prefix) - make() - make("install") - - @when("@:17.1") - def edit(self, spec, prefix): +class MakefileBuilder(spack.build_systems.makefile.MakefileBuilder): + def edit(self, pkg, spec, prefix): # copy "make.inc.mkl-gcc" provided by default into "make.inc" open("make.inc", "w").write(open("make.inc.mkl-gcc").read()) diff --git a/var/spack/repos/builtin/packages/polypolish/package.py b/var/spack/repos/builtin/packages/polypolish/package.py new file mode 100644 index 00000000000..1b1e99f2828 --- /dev/null +++ b/var/spack/repos/builtin/packages/polypolish/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class Polypolish(Package): + """Polypolish is a tool for polishing genome assemblies with short reads. + Unlike other tools in this category, Polypolish uses SAM files where each + read has been aligned to all possible locations (not just a single best + location). This allows it to repair errors in repeat regions that other + alignment-based polishers cannot fix.""" + + homepage = "https://github.com/rrwick/Polypolish" + url = "https://github.com/rrwick/Polypolish/archive/refs/tags/v0.5.0.tar.gz" + + version("0.5.0", sha256="183156093c03094290951f140010b3aef6222a672bf538e9136914178775fb1f") + + depends_on("rust") + depends_on("python@3.6:", type="run") + depends_on("bwa", type="run") + + def install(self, spec, prefix): + cargo = which("cargo") + cargo("install", "--root", prefix, "--path", ".") + install("scripts/polypolish_insert_filter.py", prefix.bin) diff --git a/var/spack/repos/builtin/packages/postgresql/package.py b/var/spack/repos/builtin/packages/postgresql/package.py index c162567fada..0fce4210b68 100644 --- a/var/spack/repos/builtin/packages/postgresql/package.py +++ b/var/spack/repos/builtin/packages/postgresql/package.py @@ -100,7 +100,7 @@ def install(self, spec, prefix): with working_dir(os.path.join("src", subdir)): make("install") else: - AutotoolsPackage.install(self, spec, prefix) + super(Postgresql, self).install(spec, prefix) def setup_run_environment(self, env): spec = self.spec diff --git a/var/spack/repos/builtin/packages/prod-util/package.py b/var/spack/repos/builtin/packages/prod-util/package.py index cafad8a75f6..5129d5d0ca9 100644 --- a/var/spack/repos/builtin/packages/prod-util/package.py +++ b/var/spack/repos/builtin/packages/prod-util/package.py @@ -15,7 +15,7 @@ class ProdUtil(CMakePackage): homepage = "https://github.com/NOAA-EMC/NCEPLIBS-prod_util" url = "https://github.com/NOAA-EMC/NCEPLIBS-prod_util/archive/refs/tags/v1.2.2.tar.gz" - maintainers = ["kgerheiser", "Hang-Lei-NOAA", "edwardhartnett"] + maintainers = ["AlexanderRichert-NOAA", "Hang-Lei-NOAA", "edwardhartnett"] version("1.2.2", sha256="c51b903ea5a046cb9b545b5c04fd28647c58b4ab6182e61710f0287846350ef8") diff --git a/var/spack/repos/builtin/packages/protobuf/package.py b/var/spack/repos/builtin/packages/protobuf/package.py index c0fbeeaa8c3..ab78a889445 100644 --- a/var/spack/repos/builtin/packages/protobuf/package.py +++ b/var/spack/repos/builtin/packages/protobuf/package.py @@ -9,12 +9,13 @@ from spack.package import * -class Protobuf(Package): +class Protobuf(CMakePackage): """Google's data interchange format.""" homepage = "https://developers.google.com/protocol-buffers" url = "https://github.com/protocolbuffers/protobuf/archive/v3.18.0.tar.gz" + version("3.21.7", sha256="ce2fbea3c78147a41b2a922485d283137845303e5e1b6cbd7ece94b96ade7031") version("3.21.5", sha256="d7d204a59fd0d2d2387bd362c2155289d5060f32122c4d1d922041b61191d522") version("3.21.4", sha256="85d42d4485f36f8cec3e475a3b9e841d7d78523cd775de3a86dba77081f4ca25") version("3.21.3", sha256="c29d8b4b79389463c546f98b15aa4391d4ed7ec459340c47bffe15db63eb9126") @@ -65,11 +66,6 @@ class Protobuf(Package): version("3.2.0", sha256="a839d3f1519ff9d68ab908de5a0f269650ef1fc501c10f6eefd4cae51d29b86f") version("3.1.0", sha256="fb2a314f4be897491bb2446697be693d489af645cb0e165a85e7e64e07eb134d") version("3.0.2", sha256="a0a265bcc9d4e98c87416e59c33afc37cede9fb277292523739417e449b18c1e") - version( - "2.5.0", - sha256="c2665a7aa2ac1a206e61b28e014486e3de59009ea2be2bde9182e0847f38b62f", - deprecated=True, - ) variant("shared", default=True, description="Enables the build of shared libraries") variant( @@ -79,12 +75,7 @@ class Protobuf(Package): values=("Debug", "Release", "RelWithDebInfo"), ) - depends_on("cmake", when="@3.0.2:", type="build") depends_on("zlib") - depends_on("autoconf", type="build", when="@2.5.0") - depends_on("automake", type="build", when="@2.5.0") - depends_on("libtool", type="build", when="@2.5.0") - depends_on("m4", type="build", when="@2.5.0") conflicts("%gcc@:4.6", when="@3.6.0:") # Requires c++11 conflicts("%gcc@:4.6", when="@3.2.0:3.3.0") # Breaks @@ -97,12 +88,6 @@ class Protobuf(Package): # See https://github.com/protocolbuffers/protobuf/pull/7197 patch("intel-v2.patch", when="@3.7:3.11.4 %intel") - patch( - "protoc2.5.0_aarch64.patch", - sha256="7b44fcdb794f421174d619f83584e00a36012a16da09079e2fad9c12f7337451", - when="@2.5.0 target=aarch64:", - ) - # See https://github.com/protocolbuffers/protobuf/issues/9916 patch( "https://github.com/protocolbuffers/protobuf/pull/9936.patch?full_index=1", @@ -133,28 +118,9 @@ def cmake_args(self): args.extend(["-DCMAKE_MACOSX_RPATH=ON"]) return args - @when("@3.0.2:") - def install(self, spec, prefix): - args = self.cmake_args() - args.extend(std_cmake_args) - - source_directory = join_path(self.stage.source_path, "cmake") - build_directory = join_path(source_directory, "build") - - with working_dir(build_directory, create=True): - cmake(source_directory, *args) - make() - make("install") - - def configure_args(self): - args = [] - args.append("--prefix=%s" % self.prefix) - return args - - @when("@2.5.0") - def install(self, spec, prefix): - args = self.configure_args() - autoreconf("-ifv") - configure(*args) - make() - make("install") + @property + def root_cmakelists_dir(self): + if self.spec.satisfies("@:3.20"): + return join_path(self.stage.source_path, "cmake") + else: + return self.stage.source_path diff --git a/var/spack/repos/builtin/packages/protobuf/protoc2.5.0_aarch64.patch b/var/spack/repos/builtin/packages/protobuf/protoc2.5.0_aarch64.patch deleted file mode 100644 index aa1ebc6a9ab..00000000000 --- a/var/spack/repos/builtin/packages/protobuf/protoc2.5.0_aarch64.patch +++ /dev/null @@ -1,113 +0,0 @@ -diff -uprN /src/google/protobuf/stubs/atomicops_internals_arm_gcc.h /src/google/protobuf/stubs/atomicops_internals_arm_gcc.h ---- /src/google/protobuf/subs/atomicops_internals_arm_gcc.h 2018-08-03 08:50:58.579413324 +0000 -+++ /src/google/protobuf/stubs/atomicops_internals_arm_gcc.h 2018-08-03 08:50:58.711413322 +0000 -@@ -68,6 +68,30 @@ inline Atomic32 NoBarrier_CompareAndSwap - } while (prev_value == old_value); - return prev_value; - } -+inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64* ptr, -+ Atomic64 old_value, -+ Atomic64 new_value) { -+ Atomic64 prev; -+ int32_t temp; -+ -+ __asm__ __volatile__ ( // NOLINT -+ "0: \n\t" -+ "ldxr %[prev], %[ptr] \n\t" -+ "cmp %[prev], %[old_value] \n\t" -+ "bne 1f \n\t" -+ "stxr %w[temp], %[new_value], %[ptr] \n\t" -+ "cbnz %w[temp], 0b \n\t" -+ "1: \n\t" -+ : [prev]"=&r" (prev), -+ [temp]"=&r" (temp), -+ [ptr]"+Q" (*ptr) -+ : [old_value]"IJr" (old_value), -+ [new_value]"r" (new_value) -+ : "cc", "memory" -+ ); // NOLINT -+ -+ return prev; -+} - - inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr, - Atomic32 new_value) { -@@ -105,6 +129,15 @@ inline Atomic32 Acquire_CompareAndSwap(v - return NoBarrier_CompareAndSwap(ptr, old_value, new_value); - } - -+inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64* ptr, -+ Atomic64 old_value, -+ Atomic64 new_value) { -+ Atomic64 prev = NoBarrier_CompareAndSwap(ptr, old_value, new_value); -+ MemoryBarrier(); -+ -+ return prev; -+} -+ - inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, - Atomic32 old_value, - Atomic32 new_value) { -@@ -115,8 +148,11 @@ inline void NoBarrier_Store(volatile Ato - *ptr = value; - } - --inline void MemoryBarrier() { -+/*inline void MemoryBarrier() { - pLinuxKernelMemoryBarrier(); -+}*/ -+inline void MemoryBarrier() { -+ __asm__ __volatile__ ("dmb ish" ::: "memory"); // NOLINT - } - - inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) { -@@ -129,6 +165,15 @@ inline void Release_Store(volatile Atomi - *ptr = value; - } - -+inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) { -+ __asm__ __volatile__ ( // NOLINT -+ "stlr %x[value], %[ptr] \n\t" -+ : [ptr]"=Q" (*ptr) -+ : [value]"r" (value) -+ : "memory" -+ ); // NOLINT -+} -+ - inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) { - return *ptr; - } -@@ -139,6 +184,19 @@ inline Atomic32 Acquire_Load(volatile co - return value; - } - -+inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) { -+ Atomic64 value; -+ -+ __asm__ __volatile__ ( // NOLINT -+ "ldar %x[value], %[ptr] \n\t" -+ : [value]"=r" (value) -+ : [ptr]"Q" (*ptr) -+ : "memory" -+ ); // NOLINT -+ -+ return value; -+} -+ - inline Atomic32 Release_Load(volatile const Atomic32* ptr) { - MemoryBarrier(); - return *ptr; -diff -uprN /src/google/protobuf/stubs/platform_macros.h /src/google/protobuf/stubs/platform_macros.h ---- /src/google/protobuf/stubs/platform_macros.h 2018-08-03 08:50:58.543413325 +0000 -+++ /src/google/protobuf/stubs/platform_macros.h 2018-08-03 08:50:58.595413324 +0000 -@@ -57,6 +57,9 @@ - #elif defined(__ppc__) - #define GOOGLE_PROTOBUF_ARCH_PPC 1 - #define GOOGLE_PROTOBUF_ARCH_32_BIT 1 -+#elif defined(__aarch64__) -+#define GOOGLE_PROTOBUF_ARCH_ARM 1 -+#define GOOGLE_PROTOBUF_ARCH_64_BIT 1 - #else - #error Host architecture was not detected as supported by protobuf - #endif - diff --git a/var/spack/repos/builtin/packages/py-about-time/package.py b/var/spack/repos/builtin/packages/py-about-time/package.py new file mode 100644 index 00000000000..ba49abb99da --- /dev/null +++ b/var/spack/repos/builtin/packages/py-about-time/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyAboutTime(PythonPackage): + """A cool helper for tracking time and throughput of + code blocks, with beautiful human friendly renditions.""" + + homepage = "https://github.com/rsalmei/about-time" + pypi = "about-time/about-time-4.1.0.tar.gz" + + version("4.1.0", sha256="963b1f3739b0c9732eb205031762b76f1291d89b5d0c8220a8d5b154e32ce650") + version("3.1.1", sha256="586b329450c9387d1ae8c42d2db4f5b4c57a54508d0f1b7bb00322ffd5ce9f9b") + + depends_on("python@3.7:3", type=("build", "run"), when="@4:") + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-alive-progress/package.py b/var/spack/repos/builtin/packages/py-alive-progress/package.py new file mode 100644 index 00000000000..95f4f871690 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-alive-progress/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyAliveProgress(PythonPackage): + """A new kind of Progress Bar, with real-time + throughput, ETA, and very cool animations!""" + + homepage = "https://github.com/rsalmei/alive-progress" + pypi = "alive-progress/alive-progress-2.4.1.tar.gz" + + version("2.4.1", sha256="089757c8197f27ad972ba27e1060f6db92368d83c736884e159034fd74865323") + version("1.6.2", sha256="642e1ce98becf226c8c36bf24e10221085998c5465a357a66fb83b7dc618b43e") + + depends_on("python@2.7:3.8", type=("build", "run")) + depends_on("python@3.6:3", type=("build", "run"), when="@2:") + depends_on("python@3.7:3", type=("build", "run"), when="@2.2:") + depends_on("py-setuptools", type="build") + depends_on("py-about-time@3.1.1", type=("build", "run"), when="@2.4.1:") + depends_on("py-grapheme@0.6.0", type=("build", "run"), when="@2.4.1:") diff --git a/var/spack/repos/builtin/packages/py-asn1crypto/package.py b/var/spack/repos/builtin/packages/py-asn1crypto/package.py index 5d5576cab41..0fbd15dadba 100644 --- a/var/spack/repos/builtin/packages/py-asn1crypto/package.py +++ b/var/spack/repos/builtin/packages/py-asn1crypto/package.py @@ -13,6 +13,7 @@ class PyAsn1crypto(PythonPackage): homepage = "https://github.com/wbond/asn1crypto" pypi = "asn1crypto/asn1crypto-0.22.0.tar.gz" + version("1.5.1", sha256="13ae38502be632115abf8a24cbe5f4da52e3b5231990aff31123c805306ccb9c") version("1.4.0", sha256="f4f6e119474e58e04a2b1af817eb585b4fd72bdd89b998624712b5c99be7641c") version("0.24.0", sha256="9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49") version("0.22.0", sha256="cbbadd640d3165ab24b06ef25d1dca09a3441611ac15f6a6b452474fdf0aed1a") diff --git a/var/spack/repos/builtin/packages/py-astroid/package.py b/var/spack/repos/builtin/packages/py-astroid/package.py index 0d28062c256..555ff3822cd 100644 --- a/var/spack/repos/builtin/packages/py-astroid/package.py +++ b/var/spack/repos/builtin/packages/py-astroid/package.py @@ -13,6 +13,8 @@ class PyAstroid(PythonPackage): homepage = "https://github.com/PyCQA/astroid" pypi = "astroid/astroid-2.8.3.tar.gz" + version("2.12.10", sha256="81f870105d892e73bf535da77a8261aa5bde838fa4ed12bb2f435291a098c581") + version("2.12.7", sha256="cd468be9d9d03d086d4d7e6643a59bfc025762d2c895e1e22cf21feced7bb148") version("2.11.6", sha256="4f933d0bf5e408b03a6feb5d23793740c27e07340605f236496cd6ce552043d6") version("2.11.5", sha256="f4e4ec5294c4b07ac38bab9ca5ddd3914d4bf46f9006eb5c0ae755755061044e") version("2.11.4", sha256="561dc6015eecce7e696ff7e3b40434bc56831afeff783f0ea853e19c4f635c06") @@ -40,17 +42,20 @@ class PyAstroid(PythonPackage): depends_on("python@3.5:", when="@2.3.3:", type=("build", "run")) depends_on("python@3.6:", when="@2.5.6:", type=("build", "run")) depends_on("python@3.6.2:", when="@2.11.4:", type=("build", "run")) + depends_on("python@3.7.2:", when="@2.12.7:", type=("build", "run")) depends_on("py-lazy-object-proxy", type=("build", "run")) # Starting with astroid 2.3.1, astroid's dependencies were restricted # to a given minor version, c.f. commit e1b4e11. depends_on("py-lazy-object-proxy@1.4.0:1.4", when="@2.3.1:2.7.2", type=("build", "run")) depends_on("py-lazy-object-proxy@1.4.0:", when="@2.7.3:", type=("build", "run")) - depends_on("py-six", type=("build", "run"), when="@:2.7.2") + depends_on("py-six", when="@:2.7.2", type=("build", "run")) depends_on("py-six@1.12:1", when="@2.3.3:2.7.2", type=("build", "run")) depends_on("py-wrapt", when="@:2.2", type=("build", "run")) depends_on("py-wrapt@1.11:1.12", when="@2.3.3:2.8.2", type=("build", "run")) depends_on("py-wrapt@1.11:1.13", when="@2.8.3:2.10", type=("build", "run")) - depends_on("py-wrapt@1.11:1", when="@2.11:", type=("build", "run")) + depends_on("py-wrapt@1.11:1", when="@2.11", type=("build", "run")) + depends_on("py-wrapt@1.14:1", when="@2.12.7: ^python@3.11:", type=("build", "run")) + depends_on("py-wrapt@1.11:1", when="@2.12.7: ^python@:3.10", type=("build", "run")) depends_on("py-enum34@1.1.3:", when="^python@:3.3", type=("build", "run")) depends_on("py-singledispatch", when="^python@:3.3", type=("build", "run")) depends_on("py-backports-functools-lru-cache", when="^python@:3.2", type=("build", "run")) @@ -60,4 +65,6 @@ class PyAstroid(PythonPackage): depends_on("py-typing-extensions@3.7.4:", when="@2.7.3: ^python@:3.7", type=("build", "run")) depends_on("py-typing-extensions@3.10:", when="@2.8.3: ^python@:3.9", type=("build", "run")) depends_on("py-setuptools@17.1:", type=("build", "run")) - depends_on("py-setuptools@20:", type=("build", "run"), when="@2.7.3:") + depends_on("py-setuptools@20:", when="@2.7.3:", type=("build", "run")) + depends_on("py-setuptools@62.6:62", when="@2.12.7:", type=("build", "run")) + depends_on("py-wheel@0.37.1:0.37", when="@2.12.7:", type="build") diff --git a/var/spack/repos/builtin/packages/py-async-lru/package.py b/var/spack/repos/builtin/packages/py-async-lru/package.py index 54ad4c173b9..37f99ce2cc6 100644 --- a/var/spack/repos/builtin/packages/py-async-lru/package.py +++ b/var/spack/repos/builtin/packages/py-async-lru/package.py @@ -10,11 +10,20 @@ class PyAsyncLru(PythonPackage): """Simple lru_cache for asyncio""" homepage = "https://github.com/wikibusiness/async_lru" - pypi = "async_lru/async_lru-1.0.2.tar.gz" + pypi = "async-lru/async-lru-1.0.2.tar.gz" maintainers = ["iarspider"] + version("1.0.3", sha256="c2cb9b2915eb14e6cf3e717154b40f715bf90e596d73623677affd0d1fbcd32a") version("1.0.2", sha256="baa898027619f5cc31b7966f96f00e4fc0df43ba206a8940a5d1af5336a477cb") depends_on("python@3.6:", type=("build", "run")) depends_on("py-setuptools", type="build") + + def url_for_version(self, version): + url = "https://files.pythonhosted.org/packages/source/a/{0}/{0}-{1}.tar.gz" + if version >= Version("1.0.3"): + name = "async-lru" + else: + name = "async_lru" + return url.format(name, version) diff --git a/var/spack/repos/builtin/packages/py-atomicwrites/package.py b/var/spack/repos/builtin/packages/py-atomicwrites/package.py index 65321fb2faf..7057832fd66 100644 --- a/var/spack/repos/builtin/packages/py-atomicwrites/package.py +++ b/var/spack/repos/builtin/packages/py-atomicwrites/package.py @@ -12,6 +12,7 @@ class PyAtomicwrites(PythonPackage): homepage = "https://github.com/untitaker/python-atomicwrites" pypi = "atomicwrites/atomicwrites-1.3.0.tar.gz" + version("1.4.1", sha256="81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11") version("1.4.0", sha256="ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a") version("1.3.0", sha256="75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6") version("1.1.5", sha256="240831ea22da9ab882b551b31d4225591e5e447a68c5e188db5b89ca1d487585") diff --git a/var/spack/repos/builtin/packages/py-auditwheel/package.py b/var/spack/repos/builtin/packages/py-auditwheel/package.py new file mode 100644 index 00000000000..ad5936f292d --- /dev/null +++ b/var/spack/repos/builtin/packages/py-auditwheel/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyAuditwheel(PythonPackage): + """Auditing and relabeling of PEP 600 manylinux_x_y, PEP 513 manylinux1, + PEP 571 manylinux2010 and PEP 599 manylinux2014 Linux wheels.""" + + homepage = "https://github.com/pypa/auditwheel" + pypi = "auditwheel/auditwheel-5.1.2.tar.gz" + + version("5.1.2", sha256="3ee5830014931ea84af5cd065c637b6614efa03d9b88bd8fbfc924e7ed01d6ba") + + depends_on("python@3.6:", type=("build", "run")) + depends_on("py-setuptools@45:", type="build") + depends_on("py-setuptools-scm@6.2:", type="build") + depends_on("py-pyelftools@0.24:", type=("build", "run")) + depends_on("py-importlib-metadata", when="^python@:3.7", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-bakta/package.py b/var/spack/repos/builtin/packages/py-bakta/package.py new file mode 100644 index 00000000000..50e683c86cf --- /dev/null +++ b/var/spack/repos/builtin/packages/py-bakta/package.py @@ -0,0 +1,36 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyBakta(PythonPackage): + """Bakta: rapid & standardized annotation + of bacterial genomes, MAGs & plasmids""" + + homepage = "https://github.com/oschwengers/bakta" + pypi = "bakta/bakta-1.5.1.tar.gz" + + maintainers = ["oschwengers"] + + version("1.5.1", sha256="36781612c4eaa99e6e24a00e8ab5b27dadf21c98ae6d16432f3e78c96a4adb5d") + + depends_on("python@3.8:", type=("build", "run")) + depends_on("py-setuptools", type=("build", "run")) + depends_on("py-biopython@1.78:", type=("build", "run")) + depends_on("py-xopen@1.1.0:", type=("build", "run")) + depends_on("py-requests@2.25.1:", type=("build", "run")) + depends_on("py-alive-progress@1.6.2", type=("build", "run")) + depends_on("trnascan-se@2.0.8:", type=("build", "run")) + depends_on("aragorn@1.2.38:", type=("build", "run")) + depends_on("infernal@1.1.4:", type=("build", "run")) + depends_on("pilercr@1.06:", type=("build", "run")) + depends_on("prodigal@2.6.3:", type=("build", "run")) + depends_on("hmmer@3.3.2:", type=("build", "run")) + depends_on("diamond@2.0.14:", type=("build", "run")) + depends_on("blast-plus@2.12.0:", type=("build", "run")) + depends_on("amrfinder@3.10.23:", type=("build", "run")) + depends_on("py-deepsig-biocomp@1.2.5:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-biopython/package.py b/var/spack/repos/builtin/packages/py-biopython/package.py index bf1c4803683..71f108068e2 100644 --- a/var/spack/repos/builtin/packages/py-biopython/package.py +++ b/var/spack/repos/builtin/packages/py-biopython/package.py @@ -10,18 +10,21 @@ class PyBiopython(PythonPackage): """A distributed collaborative effort to develop Python libraries and applications which address the needs of current and future work in bioinformatics. - """ homepage = "https://biopython.org/wiki/Main_Page" - url = "https://biopython.org/DIST/biopython-1.65.tar.gz" + pypi = "biopython/biopython-1.79.tar.gz" version("1.79", sha256="edb07eac99d3b8abd7ba56ff4bedec9263f76dfc3c3f450e7d2e2bcdecf8559b") version("1.78", sha256="1ee0a0b6c2376680fea6642d5080baa419fd73df104a62d58a8baf7a8bbe4564") version("1.73", sha256="70c5cc27dc61c23d18bb33b6d38d70edc4b926033aea3b7434737c731c94a5e0") version("1.70", sha256="4a7c5298f03d1a45523f32bae1fffcff323ea9dce007fb1241af092f5ab2e45b") - version("1.65", sha256="463cc81db84e9bfcdfb15629511c81ed556a6c0287e670dbfe80f03c65d2a88e") + version("1.65", sha256="6d591523ba4d07a505978f6e1d7fac57e335d6d62fb5b0bcb8c40bdde5c8998e") + depends_on("python@2.6:2.7,3.3:", type=("build", "run"), when="@1.63:1.68") + depends_on("python@2.7,3.3:", type=("build", "run"), when="@1.69") + depends_on("python@2.7,3.4:", type=("build", "run"), when="@1.70:1.74") + depends_on("python@2.7,3.5:", type=("build", "run"), when="@1.75:1.76") + depends_on("python@3.6:", type=("build", "run"), when="@1.77:") depends_on("py-numpy", type=("build", "run")) depends_on("py-setuptools", type="build") - depends_on("python@3.6:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-bokeh/package.py b/var/spack/repos/builtin/packages/py-bokeh/package.py index bd6f2d8b4fb..aa2a793558e 100644 --- a/var/spack/repos/builtin/packages/py-bokeh/package.py +++ b/var/spack/repos/builtin/packages/py-bokeh/package.py @@ -12,6 +12,7 @@ class PyBokeh(PythonPackage): homepage = "https://github.com/bokeh/bokeh" pypi = "bokeh/bokeh-0.12.2.tar.gz" + version("2.4.3", sha256="ef33801161af379665ab7a34684f2209861e3aefd5c803a21fbbb99d94874b03") version("2.4.1", sha256="d0410717d743a0ac251e62480e2ea860a7341bdcd1dbe01499a904f233c90512") version("2.4.0", sha256="6fa00ed8baab5cca33f4175792c309fa2536eaae7e90abee884501ba8c90fddb") version("2.3.3", sha256="a5fdcc181835561447fcc5a371300973fce4114692d5853addec284d1cdeb677") @@ -26,22 +27,23 @@ class PyBokeh(PythonPackage): depends_on("python@3.7:", type=("build", "run"), when="@2.4.0:") depends_on("py-requests@1.2.3:", type=("build", "run"), when="@0.12.2") - - depends_on("py-packaging@16.8:", type=("build", "run"), when="@1.3.4:") depends_on("py-six@1.5.2:", type=("build", "run"), when="@:1.3.4") - depends_on("py-pyyaml@3.10:", type=("build", "run")) depends_on("py-python-dateutil@2.1:", type=("build", "run"), when="@:2.3.3") depends_on("py-futures@3.0.3:", type=("build", "run"), when="@:1.3.4 ^python@2.7:2.8") - depends_on("pil@4.0:", type=("build", "run"), when="@1.3.4:") - depends_on("pil@7.1.0:", type=("build", "run"), when="@2.3.3:") - depends_on("py-jinja2@2.7:", type=("build", "run")) depends_on("py-jinja2@2.9:", type=("build", "run"), when="@2.3.3:") depends_on("py-numpy@1.7.1:", type=("build", "run")) depends_on("py-numpy@1.11.3:", type=("build", "run"), when="@2.3.3:") + depends_on("py-packaging@16.8:", type=("build", "run"), when="@1.3.4:") + + depends_on("pil@4.0:", type=("build", "run"), when="@1.3.4:") + depends_on("pil@7.1.0:", type=("build", "run"), when="@2.3.3:") + + depends_on("py-pyyaml@3.10:", type=("build", "run")) + depends_on("py-tornado@4.3:", type=("build", "run")) depends_on("py-tornado@5.1:", type=("build", "run"), when="@2.3.3:") diff --git a/var/spack/repos/builtin/packages/py-bottle/package.py b/var/spack/repos/builtin/packages/py-bottle/package.py index ff79fc62e51..90cd50a7d54 100644 --- a/var/spack/repos/builtin/packages/py-bottle/package.py +++ b/var/spack/repos/builtin/packages/py-bottle/package.py @@ -13,6 +13,7 @@ class PyBottle(PythonPackage): homepage = "https://github.com/bottlepy/bottle" url = "https://github.com/bottlepy/bottle/archive/0.12.18.tar.gz" + version("0.12.23", sha256="f38c26395736ae4653cbeb94087d3bd1d2e1ad0c29b1d3e5384f5db20b63bc98") version("0.12.19", sha256="b97277f8e87d452a0aa5fbcd16cd604a189e2cc17fdb2d4eaf6baa732f8d111b") version("0.12.18", sha256="176721f1e26082c66fd4df76f31800933e4bb36de6814b0fda3851cb409a95e6") version("0.12.17", sha256="7df26ca1789aa0693277c4a86d564524bff03e5d3132d9405946c58739190928") diff --git a/var/spack/repos/builtin/packages/py-checkm-genome/package.py b/var/spack/repos/builtin/packages/py-checkm-genome/package.py index f544f92f2ca..7d6d73d34e1 100644 --- a/var/spack/repos/builtin/packages/py-checkm-genome/package.py +++ b/var/spack/repos/builtin/packages/py-checkm-genome/package.py @@ -13,18 +13,26 @@ class PyCheckmGenome(PythonPackage): homepage = "https://ecogenomics.github.io/CheckM" pypi = "checkm-genome/checkm-genome-1.0.11.tar.gz" + version("1.2.1", sha256="33907aa7bbf029f8345e33df80d5c89b7a719041f55ece4f7470cd061c8eff76") version("1.0.13", sha256="ffb7e4966c0fac07c7e6e7db6f6eb5b48587fa83987f8a68efbaff2afb7da82e") version("1.0.11", sha256="e475d9817d12fa771dbccc80f47758b742fc67c25261dc8ca0c0dc898c2a5190") # pip silently replaces distutils with setuptools - depends_on("py-setuptools", type="build") + + depends_on("python@2.7.0:2.7", type=("build", "run"), when="@:1.0.18") + depends_on("python@3:", type=("build", "run"), when="@1.1.0:") + depends_on("py-setuptools", type=("build", "run")) depends_on("hmmer@3.1b1:", type=("build", "run")) depends_on("pplacer", type=("build", "run")) depends_on("prodigal@2.6.1:", type=("build", "run")) - depends_on("python@2.7.0:2.7", type=("build", "run")) depends_on("py-backports-functools-lru-cache", type=("build", "run"), when="^python@:3.2") - depends_on("py-numpy@1.8.0:", type=("build", "run")) - depends_on("py-scipy@0.9.0:", type=("build", "run")) - depends_on("py-matplotlib@1.3.1:2.2.3", type=("build", "run")) - depends_on("py-pysam@0.8.3:", type=("build", "run")) - depends_on("py-dendropy@4.0.0:", type=("build", "run")) + depends_on("py-numpy@1.8.0:", type=("build", "run"), when="@0.9.5:1.0.18") + depends_on("py-numpy@1.21.3:", type=("build", "run"), when="@1.2.0:") + depends_on("py-scipy@0.9.0:", type=("build", "run"), when="@0.9.5:1.0.18") + depends_on("py-scipy@1.7.3:", type=("build", "run"), when="@1.2.0") + depends_on("py-matplotlib@1.3.1:", type=("build", "run"), when="@0.9.5:1.0.18") + depends_on("py-matplotlib@3.5.1:", type=("build", "run"), when="@1.2.0:") + depends_on("py-pysam@0.8.3:", type=("build", "run"), when="@1.0.5:1.0.18") + depends_on("py-pysam@0.19.0:", type=("build", "run"), when="@1.2.0:") + depends_on("py-dendropy@4.0.0:", type=("build", "run"), when="@1.0.0:1.0.18") + depends_on("py-dendropy@4.5.2:", type=("build", "run"), when="@1.2.0:") diff --git a/var/spack/repos/builtin/packages/py-cloudpickle/package.py b/var/spack/repos/builtin/packages/py-cloudpickle/package.py index 490a532d667..b4bbc9ad190 100644 --- a/var/spack/repos/builtin/packages/py-cloudpickle/package.py +++ b/var/spack/repos/builtin/packages/py-cloudpickle/package.py @@ -12,10 +12,12 @@ class PyCloudpickle(PythonPackage): homepage = "https://github.com/cloudpipe/cloudpickle" pypi = "cloudpickle/cloudpickle-0.5.2.tar.gz" + version("2.2.0", sha256="3f4219469c55453cfe4737e564b67c2a149109dabf7f242478948b895f61106f") version("1.6.0", sha256="9bc994f9e9447593bd0a45371f0e7ac7333710fcf64a4eb9834bf149f4ef2f32") version("1.2.1", sha256="603244e0f552b72a267d47a7d9b347b27a3430f58a0536037a290e7e0e212ecf") version("1.1.1", sha256="7d43c4d0c7e9735ee8a352c96f84031dabd6676170c4e5e0585a469cc4769f22") version("0.5.2", sha256="b0e63dd89ed5285171a570186751bc9b84493675e99e12789e9a5dc5490ef554") depends_on("python@3.5:", type=("build", "run"), when="@1.6.0:") + depends_on("python@3.6:", type=("build", "run"), when="@2.2.0:") depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-clustershell/package.py b/var/spack/repos/builtin/packages/py-clustershell/package.py index 2a4fd640a58..6504f8aaedf 100644 --- a/var/spack/repos/builtin/packages/py-clustershell/package.py +++ b/var/spack/repos/builtin/packages/py-clustershell/package.py @@ -12,9 +12,13 @@ class PyClustershell(PythonPackage): """ homepage = "https://cea-hpc.github.io/clustershell/" - url = "https://github.com/cea-hpc/clustershell/archive/v1.8.tar.gz" + url = "https://github.com/cea-hpc/clustershell/archive/v1.8.4.tar.gz" + version("1.8.4", sha256="763793f729bd1c275361717c540e01ad5fe536119eca92f14077c0995739b9d7") + version("1.8.3", sha256="86b0d524e5e50c0a15faec01d8642f0ff12ba78d50b7e7b660261be5d53fed9c") + version("1.8.2", sha256="abf5ed23b6adfc802ee65aa0208c697f617e5fb8fd0d8cb0100ee337e2721796") + version("1.8.1", sha256="0c3da87108de8b735f40b5905b8dcd8084a234849aee2a8b8d2e20b99b57100c") version("1.8", sha256="ad5a13e2d107b4095229810c35365e22ea94dfd2baf4fdcfcc68ce58ee37cee3") depends_on("py-setuptools", type="build") - depends_on("py-pyyaml") + depends_on("py-pyyaml", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-damask/package.py b/var/spack/repos/builtin/packages/py-damask/package.py index 659a3b3bc53..e7b44821bfd 100644 --- a/var/spack/repos/builtin/packages/py-damask/package.py +++ b/var/spack/repos/builtin/packages/py-damask/package.py @@ -15,6 +15,9 @@ class PyDamask(PythonPackage): maintainers = ["MarDiehl"] + version( + "3.0.0-alpha7", sha256="442b06b824441293e72ff91b211a555c5d497aedf62be1c4332c426558b848a4" + ) version( "3.0.0-alpha6", sha256="de6748c285558dec8f730c4301bfa56b4078c130ff80e3095faf76202f8d2109" ) @@ -35,4 +38,6 @@ class PyDamask(PythonPackage): depends_on("py-matplotlib", type=("build", "run")) depends_on("py-pyyaml", type=("build", "run")) + patch("setup.patch", when="@3.0.0-alpha7") + build_directory = "python" diff --git a/var/spack/repos/builtin/packages/py-damask/setup.patch b/var/spack/repos/builtin/packages/py-damask/setup.patch new file mode 100644 index 00000000000..e30bcefeefa --- /dev/null +++ b/var/spack/repos/builtin/packages/py-damask/setup.patch @@ -0,0 +1,11 @@ +--- damask.orig/python/setup.cfg 2022-10-10 11:34:22.934631052 +0200 ++++ damask/python/setup.cfg 2022-10-10 11:54:32.197194977 +0200 +@@ -6,7 +6,7 @@ + url = https://damask.mpie.de + description = DAMASK processing tools + long_description = Pre- and post-processing tools for DAMASK +-license: AGPL3 ++license = AGPL3 + classifiers = + Intended Audience :: Science/Research + Topic :: Scientific/Engineering diff --git a/var/spack/repos/builtin/packages/py-dask/package.py b/var/spack/repos/builtin/packages/py-dask/package.py index c28a8b57479..5e117ceb108 100644 --- a/var/spack/repos/builtin/packages/py-dask/package.py +++ b/var/spack/repos/builtin/packages/py-dask/package.py @@ -138,18 +138,20 @@ class PyDask(PythonPackage): # Requirements for dask.distributed depends_on("py-dill", type=("build", "run"), when="@:0.7.5 +distributed") depends_on("py-pyzmq", type=("build", "run"), when="@:0.7.5 +distributed") - depends_on("py-distributed", type=("build", "run"), when="@0.8.2: +distributed") - depends_on("py-distributed@1.9:", type=("build", "run"), when="@0.9.0: +distributed") - depends_on("py-distributed@1.10:", type=("build", "run"), when="@0.10.0: +distributed") - depends_on("py-distributed@1.14:", type=("build", "run"), when="@0.12.0: +distributed") - depends_on("py-distributed@1.15:", type=("build", "run"), when="@0.13.0: +distributed") - depends_on("py-distributed@1.16:", type=("build", "run"), when="@0.14.1: +distributed") - depends_on("py-distributed@1.20:", type=("build", "run"), when="@0.16.0: +distributed") - depends_on("py-distributed@1.21:", type=("build", "run"), when="@0.17.0: +distributed") - depends_on("py-distributed@1.22:", type=("build", "run"), when="@0.18.0: +distributed") - depends_on("py-distributed@2.0:", type=("build", "run"), when="@2.0.0: +distributed") - depends_on("py-distributed@2020.12.0:", type=("build", "run"), when="@2020.12.0: +distributed") - depends_on("py-distributed@2021.6.2:", type=("build", "run"), when="@2021.6.2: +distributed") + depends_on("py-distributed@:2021.8.0", type=("build", "run"), when="@0.8.2: +distributed") + depends_on("py-distributed@1.9:2021.8.0", type=("build", "run"), when="@0.9.0: +distributed") + depends_on("py-distributed@1.10:2021.8.0", type=("build", "run"), when="@0.10.0: +distributed") + depends_on("py-distributed@1.14:2021.8.0", type=("build", "run"), when="@0.12.0: +distributed") + depends_on("py-distributed@1.15:2021.8.0", type=("build", "run"), when="@0.13.0: +distributed") + depends_on("py-distributed@1.16:2021.8.0", type=("build", "run"), when="@0.14.1: +distributed") + depends_on("py-distributed@1.20:2021.8.0", type=("build", "run"), when="@0.16.0: +distributed") + depends_on("py-distributed@1.21:2021.8.0", type=("build", "run"), when="@0.17.0: +distributed") + depends_on("py-distributed@1.22:2021.8.0", type=("build", "run"), when="@0.18.0: +distributed") + depends_on("py-distributed@2.0:2021.8.0", type=("build", "run"), when="@2.0.0: +distributed") + depends_on( + "py-distributed@2020.12.0:2021.8.0", type=("build", "run"), when="@2020.12.0: +distributed" + ) + depends_on("py-distributed@2021.6.2", type=("build", "run"), when="@2021.6.2 +distributed") # Requirements for dask.diagnostics depends_on("py-bokeh@1.0.0:", type=("build", "run"), when="@2.0.0: +diagnostics") diff --git a/var/spack/repos/builtin/packages/py-deepsig-biocomp/package.py b/var/spack/repos/builtin/packages/py-deepsig-biocomp/package.py new file mode 100644 index 00000000000..200812735e8 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-deepsig-biocomp/package.py @@ -0,0 +1,36 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyDeepsigBiocomp(PythonPackage): + """DeepSig - Predictor of signal peptides + in proteins based on deep learning""" + + homepage = "https://deepsig.biocomp.unibo.it" + + url = "https://github.com/BolognaBiocomp/deepsig/archive/refs/tags/v1.2.5.tar.gz" + + version("1.2.5", sha256="e954b815d63c221c564c7d3fe27123d7cd2c39b191d6107369ab095d506496e0") + + depends_on("python@3.8", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-biopython@1.78:", type=("build", "run")) + depends_on("py-keras@2.4.3", type=("build", "run")) + depends_on("py-tensorflow@2.2.0", type=("build", "run")) + depends_on("py-tensorboard", type=("build", "run")) + + @run_after("install") + def create_share_folder(self): + share_dir = join_path(self.prefix, "share", "deepsig") + mkdirp(share_dir) + mv = which("mv") + for d in ("models", "tools"): + mv(d, share_dir) + + def setup_run_environment(self, env): + env.set("DEEPSIG_ROOT", self.prefix.share.deepsig) diff --git a/var/spack/repos/builtin/packages/py-dm-tree/package.py b/var/spack/repos/builtin/packages/py-dm-tree/package.py index 36dcdc910f2..1c8b7eca36d 100644 --- a/var/spack/repos/builtin/packages/py-dm-tree/package.py +++ b/var/spack/repos/builtin/packages/py-dm-tree/package.py @@ -26,20 +26,23 @@ class PyDmTree(PythonPackage): depends_on("bazel", type="build") depends_on("py-six@1.12.0:", type=("build", "run")) + # This is set later + tmp_path = None + @run_after("install") def clean(self): - remove_linked_tree(self.tmp_path) + remove_linked_tree(PyDmTree.tmp_path) def patch(self): - self.tmp_path = tempfile.mkdtemp(prefix="spack") - env["TEST_TMPDIR"] = self.tmp_path - env["HOME"] = self.tmp_path + PyDmTree.tmp_path = tempfile.mkdtemp(prefix="spack") + env["TEST_TMPDIR"] = PyDmTree.tmp_path + env["HOME"] = PyDmTree.tmp_path args = [ # Don't allow user or system .bazelrc to override build settings "'--nohome_rc',\n", "'--nosystem_rc',\n", # Bazel does not work properly on NFS, switch to /tmp - "'--output_user_root={0}',\n".format(self.tmp_path), + "'--output_user_root={0}',\n".format(PyDmTree.tmp_path), "'build',\n", # Spack logs don't handle colored output well "'--color=no',\n", diff --git a/var/spack/repos/builtin/packages/py-drep/package.py b/var/spack/repos/builtin/packages/py-drep/package.py new file mode 100644 index 00000000000..a7ec0b3d002 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-drep/package.py @@ -0,0 +1,44 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyDrep(PythonPackage): + """dRep is a python program for rapidly comparing large numbers of genomes. + dRep can also "de-replicate" a genome set by identifying groups of highly + similar genomes and choosing the best representative genome for each + genome set.""" + + homepage = "https://github.com/MrOlm/drep" + pypi = "drep/drep-3.4.0.tar.gz" + + version("3.4.0", sha256="a6533eb585122c1ee66ae622b1b97450a3e1e493a3c3c1d55e79a580d5c46d40") + + variant("fastani", default=True, description="Enable fastANI support") + variant("py-checkm-genome", default=True, description="Enable CheckM support") + variant("anicalculator", default=True, description="Enable gDNA support") + variant("prodigal", default=True, description="Used with both checkM and gANI") + + depends_on("py-setuptools", type="build") + depends_on("py-numpy", type=("build", "run")) + depends_on("py-pandas", type=("build", "run")) + depends_on("py-seaborn", type=("build", "run")) + depends_on("py-matplotlib", type=("build", "run")) + depends_on("py-biopython", type=("build", "run")) + depends_on("py-scikit-learn", type=("build", "run")) + depends_on("py-tqdm", type=("build", "run")) + depends_on("py-pytest", type=("build", "run")) + # Non-python dependencies + # https://drep.readthedocs.io/en/latest/installation.html#dependencies + # essential dependencies + depends_on("mash@1.1.1:", type="run") + depends_on("mummer@3.23:", type="run") + # recommended dependencies + depends_on("fastani", type="run", when="+fastani") + depends_on("py-checkm-genome@1.0.7:", type="run", when="+py-checkm-genome") + depends_on("anicalculator@1:", type="run", when="+anicalculator") + depends_on("prodigal@2.6.3:", type="run", when="+prodigal") diff --git a/var/spack/repos/builtin/packages/py-elephant/package.py b/var/spack/repos/builtin/packages/py-elephant/package.py index cdfc9092ebf..33363ac6925 100644 --- a/var/spack/repos/builtin/packages/py-elephant/package.py +++ b/var/spack/repos/builtin/packages/py-elephant/package.py @@ -16,6 +16,7 @@ class PyElephant(PythonPackage): # list of GitHub accounts to notify when the package is updated. maintainers = ["Moritz-Alexander-Kern"] + version("0.11.2", sha256="f8759fff0bbb136ae4ffc8d1eacadeea8ba56610d705c3bf207de87ada3ba240") version("0.11.1", sha256="d604a202583440fdf9d95d42cef50a410bd74fcaaa1a925b139435f27ab012ef") version("0.11.0", sha256="7b547964dbd196361edc922db2c5a7c0c886ef1effcca6c6dc7adb06f966a3be") version("0.10.0", sha256="7e69a113475e4db01b3563328953c037d37f1597d9f2edf0d51fb65e9aebf096") diff --git a/var/spack/repos/builtin/packages/py-execnet/package.py b/var/spack/repos/builtin/packages/py-execnet/package.py index d64b3131e4b..2e32631d711 100644 --- a/var/spack/repos/builtin/packages/py-execnet/package.py +++ b/var/spack/repos/builtin/packages/py-execnet/package.py @@ -14,10 +14,12 @@ class PyExecnet(PythonPackage): homepage = "https://codespeak.net/execnet" pypi = "execnet/execnet-1.7.1.tar.gz" + version("1.9.0", sha256="8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5") version("1.7.1", sha256="cacb9df31c9680ec5f95553976c4da484d407e85e41c83cb812aa014f0eddc50") version("1.4.1", sha256="f66dd4a7519725a1b7e14ad9ae7d3df8e09b2da88062386e08e941cafc0ef3e6") depends_on("python@2.7:2.8,3.4:", type=("build", "run")) + depends_on("python@2.7:2.8,3.5:", type=("build", "run"), when="@1.9:") depends_on("py-setuptools", type="build") depends_on("py-setuptools-scm", type="build") - depends_on("py-apipkg@1.4:", type=("build", "run")) + depends_on("py-apipkg@1.4:", type=("build", "run"), when="@:1.7") diff --git a/var/spack/repos/builtin/packages/py-exhale/package.py b/var/spack/repos/builtin/packages/py-exhale/package.py new file mode 100644 index 00000000000..06be8abaef0 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-exhale/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyExhale(PythonPackage): + """Automatic C++ library api documentation generation: breathe doxygen in + and exhale it out.""" + + homepage = "https://github.com/svenevs/exhale" + pypi = "exhale/exhale-0.3.6.tar.gz" + + maintainers = ["svenevs"] + + version("0.3.6", sha256="ab41be313e1236bd4386e4696fb35f37ce8103c2059cf8d1f083da5411bb74d7") + + depends_on("python@3.7:", type=("build", "run")) + depends_on("py-setuptools@42:", type="build") + depends_on("py-breathe@4.32.0:", type="build") + depends_on("py-docutils@0.12:", type="build") + depends_on("py-sphinx@3:4", type="build") + depends_on("py-beautifulsoup4", type=("build", "run")) + depends_on("py-lxml", type=("build", "run")) + depends_on("py-six", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-fiona/package.py b/var/spack/repos/builtin/packages/py-fiona/package.py index 5d7dc23759f..a43daa0bb65 100644 --- a/var/spack/repos/builtin/packages/py-fiona/package.py +++ b/var/spack/repos/builtin/packages/py-fiona/package.py @@ -16,6 +16,7 @@ class PyFiona(PythonPackage): maintainers = ["adamjstewart"] version("master", branch="master") + version("1.8.22", sha256="a82a99ce9b3e7825740157c45c9fb2259d4e92f0a886aaac25f0db40ffe1eea3") version("1.8.21", sha256="3a0edca2a7a070db405d71187214a43d2333a57b4097544a3fcc282066a58bfc") version("1.8.20", sha256="a70502d2857b82f749c09cb0dea3726787747933a2a1599b5ab787d74e3c143b") version("1.8.18", sha256="b732ece0ff8886a29c439723a3e1fc382718804bb057519d537a81308854967a") @@ -28,7 +29,7 @@ class PyFiona(PythonPackage): depends_on("gdal@1.11:", type=("build", "link", "run"), when="@1.9:") depends_on("gdal@1.8:", type=("build", "link", "run")) - depends_on("py-cython", type="build", when="@master") + depends_on("py-cython@0.29.29:", type="build", when="@master") depends_on("py-attrs@17:", type=("build", "run")) depends_on("py-certifi", type=("build", "run"), when="@1.8.18:") depends_on("py-click@4:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-fitter/package.py b/var/spack/repos/builtin/packages/py-fitter/package.py new file mode 100644 index 00000000000..b3640e2f617 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-fitter/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack.package import * + + +class PyFitter(PythonPackage): + """fitter package provides a simple class to identify the distribution + from which a data samples is generated from. It uses 80 distributions + from Scipy and allows you to plot the results to check what is the + most probable distribution and the best parameters.""" + + homepage = "https://github.com/cokelaer/fitter" + pypi = "fitter/fitter-1.5.1.tar.gz" + + maintainers = ["carsonwoods"] + + version("1.5.1", sha256="893b35ad0a84c3b96b63ec203a6a79effdba98777aed966ae61709f5e1e8cf99") + + depends_on("py-setuptools", type="build") + + depends_on("py-pandas", type=("build", "run")) + depends_on("py-numpy", type=("build", "run")) + depends_on("py-scipy@0.18:", type=("build", "run")) + depends_on("py-tqdm", type=("build", "run")) + depends_on("py-joblib", type=("build", "run")) + depends_on("py-click", type=("build", "run")) + depends_on("py-matplotlib", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-gcovr/package.py b/var/spack/repos/builtin/packages/py-gcovr/package.py index 5fb58361d32..9a807854572 100644 --- a/var/spack/repos/builtin/packages/py-gcovr/package.py +++ b/var/spack/repos/builtin/packages/py-gcovr/package.py @@ -15,9 +15,14 @@ class PyGcovr(PythonPackage): homepage = "https://gcovr.com/" pypi = "gcovr/gcovr-4.2.tar.gz" + version("5.2", sha256="217195085ec94346291a87b7b1e6d9cfdeeee562b3e0f9a32b25c9530b3bce8f") version("4.2", sha256="5aae34dc81e51600cfecbbbce3c3a80ce3f7548bc0aa1faa4b74ecd18f6fca3f") - depends_on("python@2.7:,3.5:", type=("build", "run")) + depends_on("python@3.7:", when="@5.1:", type=("build", "run")) + depends_on("python@3.6:", when="@5.0", type=("build", "run")) + depends_on("python@2.7:2,3.5:", when="@:4", type=("build", "run")) + depends_on("py-setuptools", type=("build", "run")) + depends_on("py-jinja2", type=("build", "run")) depends_on("py-lxml", type=("build", "run")) - depends_on("py-setuptools", type=("build", "run")) + depends_on("py-pygments", when="@5:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-gitpython/package.py b/var/spack/repos/builtin/packages/py-gitpython/package.py index 6903749062a..742c9ba5c3a 100644 --- a/var/spack/repos/builtin/packages/py-gitpython/package.py +++ b/var/spack/repos/builtin/packages/py-gitpython/package.py @@ -12,6 +12,7 @@ class PyGitpython(PythonPackage): homepage = "https://gitpython.readthedocs.org" pypi = "GitPython/GitPython-3.1.12.tar.gz" + version("3.1.27", sha256="1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704") version("3.1.24", sha256="df83fdf5e684fef7c6ee2c02fc68a5ceb7e7e759d08b694088d0cacb4eba59e5") version("3.1.23", sha256="aaae7a3bfdf0a6db30dc1f3aeae47b71cd326d86b936fe2e158aa925fdf1471c") version("3.1.22", sha256="e1589f27c3cd1f33b22db1df194201b5abca6b4cc5450f13f9c371e099c1b24f") @@ -49,5 +50,8 @@ class PyGitpython(PythonPackage): "py-typing-extensions@3.7.4.0:", type=("build", "run"), when="@3.1.16: ^python@:3.7" ) depends_on( - "py-typing-extensions@3.7.4.3:", type=("build", "run"), when="@3.1.19: ^python@:3.10" + "py-typing-extensions@3.7.4.3:", type=("build", "run"), when="@3.1.19:3.1.26 ^python@:3.10" + ) + depends_on( + "py-typing-extensions@3.7.4.3:", type=("build", "run"), when="@3.1.27: ^python@:3.7" ) diff --git a/var/spack/repos/builtin/packages/py-grapheme/package.py b/var/spack/repos/builtin/packages/py-grapheme/package.py new file mode 100644 index 00000000000..d6fd724cccb --- /dev/null +++ b/var/spack/repos/builtin/packages/py-grapheme/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyGrapheme(PythonPackage): + """A Python package for working with user perceived characters. More + specifically, string manipulation and calculation functions for working + with grapheme cluster groups (graphemes) as defined by the + Unicode Standard Annex #29.""" + + homepage = "https://github.com/alvinlindstam/grapheme" + pypi = "grapheme/grapheme-0.6.0.tar.gz" + + version("0.6.0", sha256="44c2b9f21bbe77cfb05835fec230bd435954275267fea1858013b102f8603cca") + + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-grpcio-tools/package.py b/var/spack/repos/builtin/packages/py-grpcio-tools/package.py index 78c4339279f..21247b745a1 100644 --- a/var/spack/repos/builtin/packages/py-grpcio-tools/package.py +++ b/var/spack/repos/builtin/packages/py-grpcio-tools/package.py @@ -12,14 +12,17 @@ class PyGrpcioTools(PythonPackage): homepage = "https://grpc.io/" pypi = "grpcio-tools/grpcio-tools-1.42.0.tar.gz" + version("1.48.1", sha256="1178f2ea531f80cc2027ec64728df6ffc8e98cf1df61652a496eafd612127183") version("1.42.0", sha256="d0a0daa82eb2c2fb8e12b82a458d1b7c5516fe1135551da92b1a02e2cba93422") version("1.39.0", sha256="39dfe7415bc0d3860fdb8dd90607594b046b88b57dbe64284efa4820f951c805") depends_on("python@3.6:", type=("build", "run")) depends_on("py-setuptools", type="build") + depends_on("py-protobuf@3.12.0:3", when="@1.48.1:", type=("build", "run")) depends_on("py-protobuf@3.5.0.post1:3", type=("build", "run")) - depends_on("py-grpcio@1.42.0:", type=("build", "run"), when="@1.42.0:") - depends_on("py-grpcio@1.39.0:", type=("build", "run"), when="@1.39.0:1.41") + depends_on("py-grpcio@1.48.1:", when="@1.48.1:", type=("build", "run")) + depends_on("py-grpcio@1.42.0:", when="@1.42.0:", type=("build", "run")) + depends_on("py-grpcio@1.39.0:", when="@1.39.0:1.41", type=("build", "run")) depends_on("py-cython@0.23:", type="build") depends_on("openssl") depends_on("zlib") diff --git a/var/spack/repos/builtin/packages/py-gym/package.py b/var/spack/repos/builtin/packages/py-gym/package.py index 0d610934ab2..3614c7a7908 100644 --- a/var/spack/repos/builtin/packages/py-gym/package.py +++ b/var/spack/repos/builtin/packages/py-gym/package.py @@ -14,16 +14,16 @@ class PyGym(PythonPackage): environments.""" homepage = "https://github.com/openai/gym" - pypi = "gym/0.18.0.tar.gz" + pypi = "gym/gym-0.18.0.tar.gz" + version("0.19.0", sha256="940069b983806e1ccc400fa6d47b4e34e462accf6a4fb0acb0a5e509ad0f502d") version("0.18.0", sha256="a0dcd25c1373f3938f4cb4565f74f434fba6faefb73a42d09c9dddd0c08af53e") depends_on("python@3.6:", type=("build", "run")) depends_on("py-setuptools", type="build") - depends_on("py-scipy", type=("build", "run")) - depends_on("py-numpy@1.10.4:", type=("build", "run")) + depends_on("py-scipy", type=("build", "run"), when="@0.18.0") + depends_on("py-numpy@1.10.4:", type=("build", "run"), when="@0.18.0") + depends_on("py-numpy@1.18.0:", type=("build", "run"), when="@0.19.0") depends_on("py-pyglet@1.4.0:1.5.0", type=("build", "run"), when="@0.18.0") - depends_on("py-pyglet@1.4.0:1.5.15", type=("build", "run"), when="@0.18.1") - depends_on("pil@:8.2.0", type=("build", "run"), when="@0.18.1") depends_on("pil@:7.2.0", type=("build", "run"), when="@0.18.0") depends_on("py-cloudpickle@1.2.0:1.6", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-horovod/package.py b/var/spack/repos/builtin/packages/py-horovod/package.py index 866ba430afd..ef3c1415b2c 100644 --- a/var/spack/repos/builtin/packages/py-horovod/package.py +++ b/var/spack/repos/builtin/packages/py-horovod/package.py @@ -17,6 +17,8 @@ class PyHorovod(PythonPackage, CudaPackage): maintainers = ["adamjstewart", "aweits", "tgaddair"] version("master", branch="master", submodules=True) + version("0.26.1", tag="v0.26.1", submodules=True) + version("0.26.0", tag="v0.26.0", submodules=True) version("0.25.0", tag="v0.25.0", submodules=True) version("0.24.3", tag="v0.24.3", submodules=True) version("0.24.2", tag="v0.24.2", submodules=True) @@ -86,6 +88,7 @@ class PyHorovod(PythonPackage, CudaPackage): depends_on("py-pyyaml", type=("build", "run")) depends_on("py-six", type=("build", "run"), when="@:0.19") depends_on("py-dataclasses", type=("build", "run"), when="@0.20: ^python@:3.6") + depends_on("py-packaging", type=("build", "run"), when="@0.26:") # Framework dependencies depends_on("py-tensorflow@1.1.0:", type=("build", "link", "run"), when="frameworks=tensorflow") @@ -115,6 +118,7 @@ class PyHorovod(PythonPackage, CudaPackage): ) depends_on("py-petastorm@0.9.8:", type=("build", "run"), when="frameworks=spark @0.21.1:") depends_on("py-petastorm@0.11:", type=("build", "run"), when="frameworks=spark @0.22:") + depends_on("py-petastorm@0.12:", type=("build", "run"), when="frameworks=spark @0.26:") depends_on("py-pyarrow@0.15.0:", type=("build", "run"), when="frameworks=spark") depends_on("py-pyspark@2.3.2:", type=("build", "run"), when="frameworks=spark ^python@:3.7") depends_on("py-pyspark@3.0.0:", type=("build", "run"), when="frameworks=spark ^python@3.8:") @@ -122,6 +126,7 @@ class PyHorovod(PythonPackage, CudaPackage): depends_on("py-fsspec@2021.07:", type=("build", "run"), when="frameworks=spark @0.24.2:") depends_on("py-ray", type=("build", "run"), when="frameworks=ray") depends_on("py-aioredis@:1", type=("build", "run"), when="frameworks=ray @0.23:") + depends_on("py-google-api-core@:2.8", type=("build", "run"), when="frameworks=ray @0.26:") # Controller dependencies depends_on("mpi", when="controllers=mpi") diff --git a/var/spack/repos/builtin/packages/py-huggingface-hub/package.py b/var/spack/repos/builtin/packages/py-huggingface-hub/package.py index f9d5fec83a5..be59e5b60ca 100644 --- a/var/spack/repos/builtin/packages/py-huggingface-hub/package.py +++ b/var/spack/repos/builtin/packages/py-huggingface-hub/package.py @@ -14,13 +14,18 @@ class PyHuggingfaceHub(PythonPackage): homepage = "https://github.com/huggingface/huggingface_hub" pypi = "huggingface_hub/huggingface_hub-0.0.10.tar.gz" + version("0.10.1", sha256="5c188d5b16bec4b78449f8681f9975ff9d321c16046cc29bcf0d7e464ff29276") version("0.0.10", sha256="556765e4c7edd2d2c4c733809bae1069dca20e10ff043870ec40d53e498efae2") version("0.0.8", sha256="be5b9a7ed36437bb10a780d500154d426798ec16803ff3406f7a61107e4ebfc2") + depends_on("python@3.7:", when="@0.10:", type=("build", "run")) depends_on("python@3.6:", type=("build", "run")) depends_on("py-setuptools", type="build") depends_on("py-filelock", type=("build", "run")) depends_on("py-requests", type=("build", "run")) depends_on("py-tqdm", type=("build", "run")) + depends_on("py-pyyaml@5.1:", when="@0.10:", type=("build", "run")) + depends_on("py-typing-extensions@3.7.4.3:", when="@0.10:", type=("build", "run")) depends_on("py-typing-extensions", when="@0.0.10:", type=("build", "run")) depends_on("py-importlib-metadata", when="^python@:3.7", type=("build", "run")) + depends_on("py-packaging@20.9:", when="@0.10:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-ilmbase/package.py b/var/spack/repos/builtin/packages/py-ilmbase/package.py index 225b9b80ae0..4e073e0af3f 100644 --- a/var/spack/repos/builtin/packages/py-ilmbase/package.py +++ b/var/spack/repos/builtin/packages/py-ilmbase/package.py @@ -16,6 +16,7 @@ class PyIlmbase(AutotoolsPackage): depends_on("ilmbase") depends_on("boost+python") + depends_on("py-numpy") # https://github.com/AcademySoftwareFoundation/openexr/issues/336 parallel = False diff --git a/var/spack/repos/builtin/packages/py-instrain/package.py b/var/spack/repos/builtin/packages/py-instrain/package.py index 90887508fe0..962ee78f7a5 100644 --- a/var/spack/repos/builtin/packages/py-instrain/package.py +++ b/var/spack/repos/builtin/packages/py-instrain/package.py @@ -17,6 +17,8 @@ class PyInstrain(PythonPackage): homepage = "https://github.com/MrOlm/instrain" pypi = "inStrain/inStrain-1.5.7.tar.gz" + variant("prodigal", default=False, description="Enables profiling on a gene by gene level") + version("1.5.7", sha256="c5dcb01dae244927fe987b5f0695d895ccf521c9dfd87a2cb59057ad50bd9bfa") depends_on("python@3.4.0:", type=("build", "run")) @@ -35,3 +37,9 @@ class PyInstrain(PythonPackage): depends_on("py-psutil", type=("build", "run")) depends_on("py-lmfit", type=("build", "run")) depends_on("py-numba", type=("build", "run")) + # non-python dependencies + # https://instrain.readthedocs.io/en/latest/installation.html#dependencies + # Essential dependencies + depends_on("samtools", type=("build", "run")) + # Optional dependencies + depends_on("prodigal", type=("build", "run"), when="+prodigal") diff --git a/var/spack/repos/builtin/packages/py-ipykernel/package.py b/var/spack/repos/builtin/packages/py-ipykernel/package.py index 9ac69621a0e..a32c3c8331f 100644 --- a/var/spack/repos/builtin/packages/py-ipykernel/package.py +++ b/var/spack/repos/builtin/packages/py-ipykernel/package.py @@ -14,6 +14,7 @@ class PyIpykernel(PythonPackage): pypi = "ipykernel/ipykernel-5.3.4.tar.gz" version("6.16.0", sha256="7fe42c0d58435e971dc15fd42189f20d66bf35f3056bda4f6554271bc1fa3d0d") + version("6.15.2", sha256="e7481083b438609c9c8a22d6362e8e1bc6ec94ba0741b666941e634f2d61bdf3") version("6.9.1", sha256="f95070a2dfd3147f8ab19f18ee46733310813758593745e07ec18fb08b409f1d") version("6.4.1", sha256="df3355e5eec23126bc89767a676c5f0abfc7f4c3497d118c592b83b316e8c0cd") version("6.2.0", sha256="4439459f171d77f35b7f7e72dace5d7c2dd10a5c9e2c22b173ad9048fbfe7656") diff --git a/var/spack/repos/builtin/packages/py-isal/package.py b/var/spack/repos/builtin/packages/py-isal/package.py new file mode 100644 index 00000000000..3a4efa4ef78 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-isal/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyIsal(PythonPackage): + """Faster zlib and gzip compatible compression and decompression by + providing Python bindings for the ISA-L library.""" + + homepage = "https://github.com/pycompression/python-isal" + pypi = "isal/isal-1.1.0.tar.gz" + + version("1.1.0", sha256="1364f4e3255a57d51c01422ab3ae785a43c076d516ebf49f6a25adecf8232105") + version("1.0.0", sha256="a30369de6852109eef8ca1bdd46d7e4b5c4517846a25acfc707cbb19db66ac80") + + depends_on("python@3.7:", type=("build", "run")) + depends_on("py-setuptools@51:", type="build") diff --git a/var/spack/repos/builtin/packages/py-jupyter-server-mathjax/package.py b/var/spack/repos/builtin/packages/py-jupyter-server-mathjax/package.py index 10b9e7fc05e..1c768476c4b 100644 --- a/var/spack/repos/builtin/packages/py-jupyter-server-mathjax/package.py +++ b/var/spack/repos/builtin/packages/py-jupyter-server-mathjax/package.py @@ -12,10 +12,12 @@ class PyJupyterServerMathjax(PythonPackage): homepage = "http://jupyter.org/" pypi = "jupyter_server_mathjax/jupyter_server_mathjax-0.2.3.tar.gz" + version("0.2.6", sha256="bb1e6b6dc0686c1fe386a22b5886163db548893a99c2810c36399e9c4ca23943") version("0.2.3", sha256="564e8d1272019c6771208f577b5f9f2b3afb02b9e2bff3b34c042cef8ed84451") depends_on("python@3.6:", type=("build", "run")) depends_on("py-setuptools", type="build") - depends_on("py-wheel", type="build") depends_on("py-jupyter-packaging", type="build") - depends_on("py-jupyter-server@1.1:1", type=("build", "run")) + depends_on("py-jupyter-packaging11@:1", when="@0.2.6:", type="build") + depends_on("py-jupyter-server@1.1:1", when="@0.2.3", type=("build", "run")) + depends_on("py-jupyter-server@1.1:", when="@0.2.6:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-jupyterlab-pygments/package.py b/var/spack/repos/builtin/packages/py-jupyterlab-pygments/package.py index 9335e2db485..5f5973273c6 100644 --- a/var/spack/repos/builtin/packages/py-jupyterlab-pygments/package.py +++ b/var/spack/repos/builtin/packages/py-jupyterlab-pygments/package.py @@ -11,14 +11,28 @@ class PyJupyterlabPygments(PythonPackage): """Pygments theme using JupyterLab CSS variables.""" homepage = "https://jupyter.org/" - pypi = "jupyterlab-pygments/jupyterlab_pygments-0.1.1.tar.gz" + url = "https://files.pythonhosted.org/packages/py2.py3/j/jupyterlab-pygments/jupyterlab_pygments-0.2.2-py2.py3-none-any.whl" + # We use wheels because in @0.2.2: there is a cyclic dependency between + # py-nbconvert and py-jupyter-server: + # py-nbconvert -> py-jupyterlab-pygments -> py-jupyterlab -> + # -> py-jupyter-server -> py-nbconvert + # Reported here: https://github.com/jupyterlab/jupyterlab_pygments/issues/23 - version("0.2.2", sha256="7405d7fde60819d905a9fa8ce89e4cd830e318cdad22a0030f7a901da705585d") - version("0.1.2", sha256="cfcda0873626150932f438eccf0f8bf22bfa92345b814890ab360d666b254146") - version("0.1.1", sha256="19a0ccde7daddec638363cd3d60b63a4f6544c9181d65253317b2fb492a797b9") + version( + "0.2.2", + sha256="2405800db07c9f770863bcf8049a529c3dd4d3e28536638bd7c1c01d2748309f", + expand=False, + ) + version( + "0.1.2", + sha256="abfb880fd1561987efaefcb2d2ac75145d2a5d0139b1876d5be806e32f630008", + expand=False, + ) + version( + "0.1.1", + sha256="c9535e5999f29bff90bd0fa423717dcaf247b71fad505d66b17d3217e9021fc5", + expand=False, + ) depends_on("python@3.7:", when="@0.2.2:", type=("build", "run")) - depends_on("py-setuptools", when="@:0.1.2", type="build") - depends_on("py-jupyter-packaging11", when="@0.2.2:", type="build") - depends_on("py-jupyterlab@3.1:3", when="@0.2.2:", type="build") depends_on("py-pygments@2.4.1:2", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-keyring/package.py b/var/spack/repos/builtin/packages/py-keyring/package.py index 9503f7d29b5..8c29d85bc00 100644 --- a/var/spack/repos/builtin/packages/py-keyring/package.py +++ b/var/spack/repos/builtin/packages/py-keyring/package.py @@ -14,6 +14,7 @@ class PyKeyring(PythonPackage): homepage = "https://github.com/jaraco/keyring" pypi = "keyring/keyring-23.0.1.tar.gz" + version("23.9.1", sha256="39e4f6572238d2615a82fcaa485e608b84b503cf080dc924c43bbbacb11c1c18") version("23.5.0", sha256="9012508e141a80bd1c0b6778d5c610dd9f8c464d75ac6774248500503f972fb9") version("23.2.1", sha256="6334aee6073db2fb1f30892697b1730105b5e9a77ce7e61fca6b435225493efe") version("23.2.0", sha256="1e1970dcecde00c59ff6033d69cee3b283cd0d7cbad78b0dc4cdd15c8a28bcf8") @@ -39,8 +40,9 @@ class PyKeyring(PythonPackage): ) depends_on("py-secretstorage", when="platform=linux", type=("build", "run")) depends_on("py-jeepney@0.4.2:", when="@21: platform=linux", type=("build", "run")) - depends_on("py-importlib-metadata@3.6:", when="@23:", type=("build", "run")) - depends_on("py-importlib-metadata@1:", when="@21:", type=("build", "run")) + depends_on("py-importlib-metadata@3.6:", when="@23: ^python@:3.9", type=("build", "run")) + depends_on("py-importlib-metadata@1:", when="@21:22", type=("build", "run")) depends_on("py-importlib-metadata", when="@20:", type=("build", "run")) + depends_on("py-jaraco-classes", when="@23.9.1:", type=("build", "run")) # TODO: additional dependency on pywin32-ctypes required for Windows diff --git a/var/spack/repos/builtin/packages/py-kornia/package.py b/var/spack/repos/builtin/packages/py-kornia/package.py index 0834cef0825..3abc0a1aac3 100644 --- a/var/spack/repos/builtin/packages/py-kornia/package.py +++ b/var/spack/repos/builtin/packages/py-kornia/package.py @@ -12,6 +12,7 @@ class PyKornia(PythonPackage): homepage = "https://www.kornia.org/" pypi = "kornia/kornia-0.5.10.tar.gz" + version("0.6.8", sha256="0985e02453c0ab4f030e8d22a3a7554dab312ffa8f8a54ec872190e6f0b58c56") version("0.6.7", sha256="7ff57c931551a1a1465aaac1fa6842a2aad650f51a0f9bf6cf0b0f7d6e5fb59c") version("0.6.6", sha256="e29f0f994e3bafec016b101a9a3e89c3751b4fe99ada3ac21d3febb47904faa4") version("0.6.5", sha256="14cbd8b4064b3d0fb5a8198d1b5fd9231bcd62b9039351641fca6b294b5069f0") diff --git a/var/spack/repos/builtin/packages/py-libensemble/package.py b/var/spack/repos/builtin/packages/py-libensemble/package.py index 6d5bfbc35b5..13b511811e5 100644 --- a/var/spack/repos/builtin/packages/py-libensemble/package.py +++ b/var/spack/repos/builtin/packages/py-libensemble/package.py @@ -12,13 +12,14 @@ class PyLibensemble(PythonPackage): """Library for managing ensemble-like collections of computations.""" homepage = "https://libensemble.readthedocs.io" - pypi = "libensemble/libensemble-0.9.2.tar.gz" + pypi = "libensemble/libensemble-0.9.3.tar.gz" git = "https://github.com/Libensemble/libensemble.git" - maintainers = ["shuds13"] + maintainers = ["shuds13", "jlnav"] tags = ["e4s"] version("develop", branch="develop") + version("0.9.3", sha256="00e5a65d6891feee6a686c048d8de72097b8bff164431f163be96ec130a9c390") version("0.9.2", sha256="e46598e5696f770cbff4cb90507b52867faad5654f1b80de35405a95228c909f") version("0.9.1", sha256="684e52b0ea64f5ec610e7868b7e4c9fa5fd2316a370a726870aa5fd5fb1b0ede") version("0.9.0", sha256="34976e775f0d2ba5955744560104eab214fd22cb47173440eb5136e852a8ec38") @@ -45,9 +46,6 @@ class PyLibensemble(PythonPackage): variant("tasmanian", default=False, description="Install with tasmanian") variant("pyyaml", default=False, description="Install with pyyaml") - # depends_on('python@2.7:2.8,3.3:', when='@:0.4.1') - # depends_on('python@3.5:', when='@0.5.0:') - depends_on("python@3.5:", type=("build", "run")) depends_on("py-setuptools", type="build") depends_on("py-numpy", type=("build", "run")) depends_on("py-psutil", type=("build", "run"), when="@0.7.1:") diff --git a/var/spack/repos/builtin/packages/py-lightning-lite/package.py b/var/spack/repos/builtin/packages/py-lightning-lite/package.py new file mode 100644 index 00000000000..30cddf16e06 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-lightning-lite/package.py @@ -0,0 +1,30 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyLightningLite(PythonPackage): + """LightningLite enables pure PyTorch users to scale their existing code on any kind + of device while retaining full control over their own loops and optimization logic.""" + + homepage = "https://github.com/Lightning-AI/lightning" + pypi = "lightning-lite/lightning-lite-1.8.0.tar.gz" + + maintainers = ["adamjstewart"] + + version("1.8.0", sha256="a71940409d3d1a5bb20f63716c86a745157ce30100f1c16600dfe33d9b657955") + + # src/lightning_lite/__setup__.py + depends_on("python@3.7:", type=("build", "run")) + depends_on("py-setuptools", type="build") + + # requirements/lite/base.txt + depends_on("py-numpy@1.17.2:", type=("build", "run")) + depends_on("py-torch@1.9:", type=("build", "run")) + depends_on("py-fsspec@2021.06.1:+http", type=("build", "run")) + depends_on("py-packaging@17:", type=("build", "run")) + depends_on("py-typing-extensions@4:", type=("build", "run")) + depends_on("py-lightning-utilities@0.3", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-lightning-utilities/package.py b/var/spack/repos/builtin/packages/py-lightning-utilities/package.py new file mode 100644 index 00000000000..963295686b8 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-lightning-utilities/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyLightningUtilities(PythonPackage): + """Common Python utilities and GitHub Actions in Lightning Ecosystem""" + + homepage = "https://github.com/Lightning-AI/utilities" + pypi = "lightning-utilities/lightning-utilities-0.4.1.tar.gz" + + maintainers = ["adamjstewart"] + + version("0.4.1", sha256="969697b0debffd808d4cf3b74af4952f82bf6726f4ce561119037871547690a5") + version("0.4.0", sha256="961c29774c2c8303e0a2f6e6512a2e21e1d8acaf6df182865667af4a51bc176c") + version("0.3.0", sha256="d769ab9b76ebdee3243d1051d509aafee57d7947734ddc22977deef8a6427f2f") + + # setup.py + depends_on("python@3.7:", type=("build", "run")) + depends_on("py-setuptools", type="build") + + # requirements/base.txt + depends_on("py-importlib-metadata@4:", when="@0.4.1: ^python@:3.7", type=("build", "run")) + + # Historical dependencies + depends_on("py-fire", when="@0.3.0", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-matplotlib/package.py b/var/spack/repos/builtin/packages/py-matplotlib/package.py index cdebe1e2af8..04548fd89cd 100644 --- a/var/spack/repos/builtin/packages/py-matplotlib/package.py +++ b/var/spack/repos/builtin/packages/py-matplotlib/package.py @@ -37,6 +37,7 @@ class PyMatplotlib(PythonPackage): "pylab", ] + version("3.6.2", sha256="b03fd10a1709d0101c054883b550f7c4c5e974f751e2680318759af005964990") version("3.6.1", sha256="e2d1b7225666f7e1bcc94c0bc9c587a82e3e8691da4757e357e5c2515222ee37") version("3.6.0", sha256="c5108ebe67da60a9204497d8d403316228deb52b550388190c53a57394d41531") version("3.5.3", sha256="339cac48b80ddbc8bfd05daae0a3a73414651a8596904c2a881cfd1edb65f26c") diff --git a/var/spack/repos/builtin/packages/py-mdit-py-plugins/package.py b/var/spack/repos/builtin/packages/py-mdit-py-plugins/package.py index 0b33022ebe7..a096af89488 100644 --- a/var/spack/repos/builtin/packages/py-mdit-py-plugins/package.py +++ b/var/spack/repos/builtin/packages/py-mdit-py-plugins/package.py @@ -12,10 +12,15 @@ class PyMditPyPlugins(PythonPackage): homepage = "https://github.com/executablebooks/mdit-py-plugins/" git = "https://github.com/executablebooks/mdit-py-plugins/" - pypi = "mdit-py-plugins/mdit-py-plugins-0.2.8.tar.gz" + pypi = "mdit-py-plugins/mdit-py-plugins-0.3.1.tar.gz" + version("0.3.1", sha256="3fc13298497d6e04fe96efdd41281bfe7622152f9caa1815ea99b5c893de9441") version("0.2.8", sha256="5991cef645502e80a5388ec4fc20885d2313d4871e8b8e320ca2de14ac0c015f") - depends_on("py-setuptools", type="build") - depends_on("py-markdown-it-py@1.0:1", type=("build", "run")) - depends_on("python@3.6:3", type=("build", "run")) + depends_on("py-flit-core@3.4:3", when="@0.3.1", type=("build", "run")) + depends_on("python@3.7:", when="@0.3.1", type=("build", "run")) + depends_on("py-markdown-it-py@1.0:2", when="@0.3.1", type=("build", "run")) + + depends_on("py-setuptools", when="@:0.2", type="build") + depends_on("py-markdown-it-py@1.0:1", when="@0.2.8", type=("build", "run")) + depends_on("python@3.6:3", when="@0.2.8", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-medaka/htslib.patch b/var/spack/repos/builtin/packages/py-medaka/htslib.patch new file mode 100644 index 00000000000..2c500f8866b --- /dev/null +++ b/var/spack/repos/builtin/packages/py-medaka/htslib.patch @@ -0,0 +1,34 @@ +# patch method copied from bioconda recipe +# github.com/bioconda/bioconda-recipes/blob/master/recipes/medaka/build.sh +diff -Naur spack-src/setup.py spack-src.patched/setup.py +--- spack-src/setup.py 2022-10-26 16:10:27.824793639 -0500 ++++ spack-src.patched/setup.py 2022-10-26 16:16:57.815324146 -0500 +@@ -144,6 +144,6 @@ + 'scripts/mini_align', 'scripts/hdf2tf.py'], + zip_safe=False, + cmdclass={ +- 'build_ext': HTSBuild ++ + }, + ) +diff -Naur spack-src/build.py spack-src.patched/build.py +--- spack-src/build.py 2022-10-26 16:15:35.891735658 -0500 ++++ spack-src.patched/build.py 2022-10-26 16:18:42.468243578 -0500 +@@ -12,7 +12,7 @@ + deflatever = "1.10" + deflate_dir = os.path.join(dir_path, 'submodules', 'libdeflate-{}'.format(deflatever)) + +-libraries=['m', 'z', 'lzma', 'bz2', 'pthread', 'curl', 'crypto'] ++libraries=["hts",'m', 'z', 'lzma', 'bz2', 'pthread', 'curl', 'crypto'] + library_dirs=[htslib_dir] + if os.getenv('WITHDEFLATE') == "1": + print("Using deflate") +@@ -52,7 +52,7 @@ + 'fastrle.c', 'medaka_trimbam.c', 'medaka_pytrimbam.c', + 'medaka_rnn_variants.c')], + extra_compile_args=extra_compile_args, +- extra_objects=['libhts.a'] ++ + ) + + cdef = [ diff --git a/var/spack/repos/builtin/packages/py-medaka/package.py b/var/spack/repos/builtin/packages/py-medaka/package.py new file mode 100644 index 00000000000..03ff3ccd80b --- /dev/null +++ b/var/spack/repos/builtin/packages/py-medaka/package.py @@ -0,0 +1,43 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyMedaka(PythonPackage): + """medaka is a tool to create consensus sequences and variant calls from + nanopore sequencing data. This task is performed using neural networks + applied a pileup of individual sequencing reads against a draft assembly. + It provides state-of-the-art results outperforming sequence-graph based + methods and signal-based methods, whilst also being faster.""" + + homepage = "https://github.com/nanoporetech/medaka" + pypi = "medaka/medaka-1.7.2.tar.gz" + + version("1.7.2", sha256="7629546ed9193ffb6b1f881a6ce74b7d13d94972e032556098577ddb43bee763") + + # disable Makefile driven build of htslib and link to system htslib instead + patch("htslib.patch", when="@1.7.2") + + depends_on("python@3.6:3.9", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-cffi@1.15.0", type=("build", "run")) + depends_on("py-edlib", type=("build", "run")) + depends_on("py-grpcio", type=("build", "run")) + depends_on("py-h5py", type=("build", "run")) + depends_on("py-intervaltree", type=("build", "run")) + depends_on("py-tensorflow@2.7.0:2.7", type=("build", "run")) + depends_on("py-numpy", type=("build", "run")) + depends_on("minimap2", type=("build", "run")) + depends_on("py-ont-fast5-api", type=("build", "run")) + depends_on("py-parasail", when="target=x86_64:", type=("build", "run")) + depends_on("py-parasail", when="target=ppc64le:", type=("build", "run")) + depends_on("py-pysam@0.16.0.1:", type=("build", "run")) + depends_on("py-pyspoa@0.0.3:", when="target=x86_64:", type=("build", "run")) + depends_on("py-pyspoa@0.0.3:", when="target=ppc64le:", type=("build", "run")) + depends_on("py-requests", type=("build", "run")) + depends_on("samtools", type=("build", "run")) + depends_on("htslib", type=("build", "run", "link")) diff --git a/var/spack/repos/builtin/packages/py-meson-python/package.py b/var/spack/repos/builtin/packages/py-meson-python/package.py index eba4d613f93..ba4f5e9df77 100644 --- a/var/spack/repos/builtin/packages/py-meson-python/package.py +++ b/var/spack/repos/builtin/packages/py-meson-python/package.py @@ -14,12 +14,21 @@ class PyMesonPython(PythonPackage): maintainers = ["eli-schwartz", "adamjstewart"] + version("0.10.0", sha256="08dd122c1074dbd5c55b53993a719cca73dd8216372c91217f7a550260f9e7e1") + version("0.9.0", sha256="6aa5a09ff5cce1c5308938ebbf3eab5529413c8677055ace1ac8c83d8a07b29d") + version("0.8.1", sha256="442f1fa4cf5db50eea61170a6059c10fafd70977f5dbdf3441c106cd23b05e4c") + version("0.8.0", sha256="b5c8a2727e6f6feaffc1db513244c9bdb5d0f689b45e24f4529b649b7710daf7") version("0.7.0", sha256="9fcfa350f44ca80dd4f5f9c3d251725434acf9a07d9618f382e6cc4629dcbe84") depends_on("python@3.7:", type=("build", "run")) - depends_on("py-meson@0.62:", type=("build", "run")) - depends_on("py-ninja", type=("build", "run")) + depends_on("meson@0.63:", when="@0.9:", type=("build", "run")) + depends_on("meson@0.62:", type=("build", "run")) depends_on("py-pyproject-metadata@0.5:", type=("build", "run")) depends_on("py-tomli@1:", type=("build", "run")) depends_on("py-typing-extensions@3.7.4:", when="^python@:3.7", type=("build", "run")) depends_on("py-colorama", when="platform=windows", type=("build", "run")) + + # https://github.com/FFY00/meson-python/pull/111 + conflicts("platform=darwin os=ventura", when="@:0.7") + conflicts("platform=darwin os=monterey", when="@:0.7") + conflicts("platform=darwin os=bigsur", when="@:0.7") diff --git a/var/spack/repos/builtin/packages/py-meson/package.py b/var/spack/repos/builtin/packages/py-meson/package.py deleted file mode 100644 index 0c71dc6887c..00000000000 --- a/var/spack/repos/builtin/packages/py-meson/package.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack.package import * - - -class PyMeson(PythonPackage): - """A high performance build system. - - Meson is a cross-platform build system designed to be both as fast and as user - friendly as possible. It supports many languages and compilers, including GCC, - Clang, PGI, Intel, and Visual Studio. Its build definitions are written in a simple - non-Turing complete DSL. - """ - - homepage = "https://mesonbuild.com/" - pypi = "meson/meson-0.62.2.tar.gz" - - maintainers = ["eli-schwartz", "adamjstewart"] - - version("0.62.2", sha256="a7669e4c4110b06b743d57cc5d6432591a6677ef2402139fe4f3d42ac13380b0") - - depends_on("python@3.7:", type=("build", "run")) - depends_on("py-setuptools@42:", type="build") diff --git a/var/spack/repos/builtin/packages/py-modin/package.py b/var/spack/repos/builtin/packages/py-modin/package.py new file mode 100644 index 00000000000..14bfdce366d --- /dev/null +++ b/var/spack/repos/builtin/packages/py-modin/package.py @@ -0,0 +1,46 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyModin(PythonPackage): + """Modin: Make your pandas code run faster by changing one line of code.""" + + homepage = "https://github.com/modin-project/modin" + pypi = "modin/modin-0.16.2.tar.gz" + + version("0.16.2", sha256="8e3f4cb478ae08dcc71b5a345781d57f29d6b95bc6ce1dc5c14d597a382f1354") + + variant( + "engine", + default="ray", + values=["ray", "dask", "python", "native"], + description="Default distribution engine. All engines are installed and " + "functional as long as dependencies are found at run-time", + ) + + depends_on("python@3.6:", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-pandas@1.5.1", when="^python@3.8:", type=("build", "run")) + depends_on("py-pandas@1.1.5", when="^python@:3.7", type=("build", "run")) + depends_on("py-packaging", type=("build", "run")) + depends_on("py-numpy@1.18.5:", type=("build", "run")) + depends_on("py-fsspec", type=("build", "run")) + depends_on("py-psutil", type=("build", "run")) + + with when("engine=ray"): + depends_on("py-ray@1.4:+default", type=("build", "run")) + depends_on("py-pyarrow@4.0.1:", type=("build", "run")) + depends_on("py-redis@3.5:3", type=("build", "run")) + + with when("engine=dask"): + depends_on("py-dask@2.22:", type=("build", "run")) + depends_on("py-distributed@2.22:", type=("build", "run")) + depends_on("py-pickle5", when="^python@:3.7", type=("build", "run")) + + def setup_run_environment(self, env): + # modin/config/envvars.py + env.set("MODIN_ENGINE", self.spec.variants["engine"].value) diff --git a/var/spack/repos/builtin/packages/py-mpi4py/package.py b/var/spack/repos/builtin/packages/py-mpi4py/package.py index f0133d2af49..7ce0633733d 100644 --- a/var/spack/repos/builtin/packages/py-mpi4py/package.py +++ b/var/spack/repos/builtin/packages/py-mpi4py/package.py @@ -17,6 +17,7 @@ class PyMpi4py(PythonPackage): git = "https://github.com/mpi4py/mpi4py.git" version("master", branch="master") + version("3.1.3", sha256="f1e9fae1079f43eafdd9f817cdb3fd30d709edc093b5d5dada57a461b2db3008") version("3.1.2", sha256="40dd546bece8f63e1131c3ceaa7c18f8e8e93191a762cd446a8cfcf7f9cce770") version("3.1.1", sha256="e11f8587a3b93bb24c8526addec664b586b965d83c0882b884c14dc3fd6b9f5c") version("3.1.0", sha256="134fa2b2fe6d8f91bcfcc2824cfd74b55ca3dcbff4d185b1bda009beea9232ec") @@ -28,7 +29,7 @@ class PyMpi4py(PythonPackage): depends_on("python@2.6:2.7,3.2:") depends_on("python@2.7:2.8,3.5:", when="@3.1:") - depends_on("py-setuptools", type="build") + depends_on("py-setuptools@40.9:", type="build") depends_on("mpi") depends_on("py-cython@0.27.0:", when="@master", type="build") depends_on("py-3to2", when="@3.1: ^python@:2", type="build") diff --git a/var/spack/repos/builtin/packages/py-mpld3/package.py b/var/spack/repos/builtin/packages/py-mpld3/package.py index e4057cfd3cb..1c75fcbaf0d 100644 --- a/var/spack/repos/builtin/packages/py-mpld3/package.py +++ b/var/spack/repos/builtin/packages/py-mpld3/package.py @@ -13,6 +13,7 @@ class PyMpld3(PythonPackage): homepage = "https://mpld3.github.io/" pypi = "mpld3/mpld3-0.3.tar.gz" + version("0.5.8", sha256="1a167dbef836dd7c66d8aa71c06a32d50bffa18725f304d93cb74fdb3545043b") version("0.5.5", sha256="b080f3535238a71024c0158280ab4f6091717c45347c41c907012f8dd6da1bd5") version("0.3", sha256="4d455884a211bf99b37ecc760759435c7bb6a5955de47d8daf4967e301878ab7") diff --git a/var/spack/repos/builtin/packages/py-myst-parser/package.py b/var/spack/repos/builtin/packages/py-myst-parser/package.py new file mode 100644 index 00000000000..c8748227521 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-myst-parser/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyMystParser(PythonPackage): + """A Sphinx and Docutils extension to parse MyST, a rich and + extensible flavour of Markdown for authoring technical and + scientific documentation.""" + + homepage = "https://github.com/executablebooks/MyST-Parser" + pypi = "myst-parser/myst-parser-0.18.1.tar.gz" + + version("0.18.1", sha256="79317f4bb2c13053dd6e64f9da1ba1da6cd9c40c8a430c447a7b146a594c246d") + + depends_on("python@3.7:", type=("build", "run")) + depends_on("py-flit-core@3.4:3", type="build") + depends_on("py-docutils@0.15:0.19", type=("build", "run")) + depends_on("py-jinja2", type=("build", "run")) # let sphinx decide version + depends_on("py-markdown-it-py@1.0.0:2", type=("build", "run")) + depends_on("py-mdit-py-plugins@0.3.1:0.3", type=("build", "run")) + depends_on("py-pyyaml", type=("build", "run")) + depends_on("py-sphinx@4.0.0:5", type=("build", "run")) + depends_on("py-typing-extensions", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-ncbi-genome-download/package.py b/var/spack/repos/builtin/packages/py-ncbi-genome-download/package.py new file mode 100644 index 00000000000..3124f45ae38 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-ncbi-genome-download/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyNcbiGenomeDownload(PythonPackage): + """Scripts to download genomes from the NCBI FTP servers""" + + homepage = "https://github.com/kblin/ncbi-genome-download/" + pypi = "ncbi-genome-download/ncbi-genome-download-0.3.1.tar.gz" + + version("0.3.1", sha256="74675e94f184b8d80429641b27ed6d46ed81028d95156337de6d09f8dd739c6e") + + depends_on("py-setuptools", type="build") + depends_on("py-appdirs", type=("build", "run")) + depends_on("py-requests@2.4.3:", type=("build", "run")) + depends_on("py-tqdm", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-numpy/package.py b/var/spack/repos/builtin/packages/py-numpy/package.py index 840e05a9d67..708272744ea 100644 --- a/var/spack/repos/builtin/packages/py-numpy/package.py +++ b/var/spack/repos/builtin/packages/py-numpy/package.py @@ -23,6 +23,7 @@ class PyNumpy(PythonPackage): maintainers = ["adamjstewart", "rgommers"] version("main", branch="main") + version("1.23.4", sha256="ed2cc92af0efad20198638c69bb0fc2870a58dabfba6eb722c933b48556c686c") version("1.23.3", sha256="51bf49c0cd1d52be0a240aa66f3458afc4b95d8993d2d04f0d91fa60c10af6cd") version("1.23.2", sha256="b78d00e48261fbbd04aa0d7427cf78d18401ee0abd89c7559bbf422e5b1c7d01") version("1.23.1", sha256="d748ef349bfef2e1194b59da37ed5a29c19ea8d7e6342019921ba2ba4fd8b624") diff --git a/var/spack/repos/builtin/packages/py-oauthlib/package.py b/var/spack/repos/builtin/packages/py-oauthlib/package.py index d5a7fb4ef6a..1b73e0c4b2a 100644 --- a/var/spack/repos/builtin/packages/py-oauthlib/package.py +++ b/var/spack/repos/builtin/packages/py-oauthlib/package.py @@ -15,19 +15,38 @@ class PyOauthlib(PythonPackage): homepage = "https://github.com/oauthlib/oauthlib" pypi = "oauthlib/oauthlib-3.1.0.tar.gz" + version("3.2.1", sha256="1565237372795bf6ee3e5aba5e2a85bd5a65d0e2aa5c628b9a97b7d7a0da3721") version("3.1.1", sha256="8f0215fcc533dd8dd1bee6f4c412d4f0cd7297307d43ac61666389e3bc3198a3") version("3.1.0", sha256="bee41cc35fcca6e988463cacc3bcb8a96224f470ca547e697b604cc697b2f889") version("3.0.1", sha256="0ce32c5d989a1827e3f1148f98b9085ed2370fc939bf524c9c851d8714797298") version("2.0.2", sha256="b3b9b47f2a263fe249b5b48c4e25a5bce882ff20a0ac34d553ce43cff55b53ac") - variant("extras", default=True, description="Build with pyjwt, blinker, cryptography") + variant( + "extras", + when="@:3.1.1", + default=True, + description="Build with pyjwt, blinker, cryptography", + ) + variant("rsa", when="@3.2.1:", default=False, description="Build with cryptography") + variant( + "signedtoken", + when="@3.2.1:", + default=False, + description="Build with cryptography and pyjwt", + ) + variant("signals", when="@3.2.1:", default=False, description="Build with blinker") depends_on("py-setuptools", type="build") depends_on("py-pyjwt@1.0.0:", type=("build", "run"), when="+extras") depends_on("py-pyjwt@2.0.0:2", type=("build", "run"), when="+extras @3.1.1:") + depends_on("py-pyjwt@2.0.0:2", type=("build", "run"), when="+signedtoken @3.2.1:") depends_on("py-blinker", type=("build", "run"), when="+extras") + depends_on("py-blinker", type=("build", "run"), when="+signals") depends_on("py-blinker@1.4:", type=("build", "run"), when="+extras @3.1.1:") + depends_on("py-blinker@1.4:", type=("build", "run"), when="+signals @3.2.1:") depends_on("py-cryptography", type=("build", "run"), when="+extras") - depends_on("py-cryptography@3.0.0:3", type=("build", "run"), when="+extras @3.1.1:") + depends_on("py-cryptography@3.0.0:3", type=("build", "run"), when="+extras @3.1.1") + depends_on("py-cryptography@3.0.0:", type=("build", "run"), when="+rsa @3.2.1:") + depends_on("py-cryptography@3.0.0:", type=("build", "run"), when="+signedtoken @3.2.1:") depends_on("python@2.7:2.8,3.4:", type=("build", "run")) depends_on("python@3.6:", type=("build", "run"), when="@3.1.1:") diff --git a/var/spack/repos/builtin/packages/py-onnx-runtime/package.py b/var/spack/repos/builtin/packages/py-onnx-runtime/package.py index 62ea5313957..fa956086b8e 100644 --- a/var/spack/repos/builtin/packages/py-onnx-runtime/package.py +++ b/var/spack/repos/builtin/packages/py-onnx-runtime/package.py @@ -6,7 +6,7 @@ from spack.package import * -class PyOnnxRuntime(CMakePackage, PythonPackage): +class PyOnnxRuntime(CMakePackage, PythonExtension): """ONNX Runtime is a performance-focused complete scoring engine for Open Neural Network Exchange (ONNX) models, with an open extensible architecture to continually address the diff --git a/var/spack/repos/builtin/packages/py-ont-fast5-api/package.py b/var/spack/repos/builtin/packages/py-ont-fast5-api/package.py index 3e96ca254db..730e4b8b63a 100644 --- a/var/spack/repos/builtin/packages/py-ont-fast5-api/package.py +++ b/var/spack/repos/builtin/packages/py-ont-fast5-api/package.py @@ -17,8 +17,14 @@ class PyOntFast5Api(PythonPackage): homepage = "https://github.com/nanoporetech/ont_fast5_api" pypi = "ont-fast5-api/ont-fast5-api-0.3.2.tar.gz" + version("4.1.0", sha256="afa58fb0a73ac33161fe0d13d32698b3325756c370f2f440a8a43b4b68c75f32") version("0.3.2", sha256="ae44b1bcd812e8acf8beff3db92456647c343cf19340f97cff4847de5cc905d8") + depends_on("python@3.6:", type=("build", "run"), when="@4:") depends_on("py-setuptools", type="build") depends_on("py-h5py", type=("build", "run")) + depends_on("py-h5py@2.10:", type=("build", "run"), when="@4.0.1:") depends_on("py-numpy@1.8.1:", type=("build", "run")) + depends_on("py-numpy@1.16:", type=("build", "run"), when="@3.2.0:") + depends_on("py-packaging", type=("build", "run"), when="@3.0.2:") + depends_on("py-progressbar33@2.3.1:", type=("build", "run"), when="@1.0.1:") diff --git a/var/spack/repos/builtin/packages/py-pandas/package.py b/var/spack/repos/builtin/packages/py-pandas/package.py index cb86bd502cd..ebbd69b7bb6 100644 --- a/var/spack/repos/builtin/packages/py-pandas/package.py +++ b/var/spack/repos/builtin/packages/py-pandas/package.py @@ -17,6 +17,7 @@ class PyPandas(PythonPackage): maintainers = ["adamjstewart"] + version("1.5.1", sha256="249cec5f2a5b22096440bd85c33106b6102e0672204abd2d5c014106459804ee") version("1.5.0", sha256="3ee61b881d2f64dd90c356eb4a4a4de75376586cd3c9341c6c0fcaae18d52977") version("1.4.4", sha256="ab6c0d738617b675183e5f28db32b5148b694ad9bba0a40c3ea26d96b431db67") version("1.4.3", sha256="2ff7788468e75917574f080cd4681b27e1a7bf36461fe968b49a87b5a54d007c") diff --git a/var/spack/repos/builtin/packages/py-parasail/package.py b/var/spack/repos/builtin/packages/py-parasail/package.py new file mode 100644 index 00000000000..ea65f9efab0 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-parasail/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyParasail(PythonPackage): + """Python Bindings for the Parasail C Library. Parasail is a SIMD C (C99) + library containing implementations of the Smith-Waterman (local), + Needleman-Wunsch (global), and semi-global pairwise sequence alignment + algorithms.""" + + homepage = "https://github.com/jeffdaily/parasail-python" + pypi = "parasail/parasail-1.3.3.tar.gz" + + version("1.3.3", sha256="06f05066d9cf624c0b043f51a1e9d2964154e1edd0f9843e0838f32073e576f8") + + depends_on("perl", type="build") + depends_on("m4", type="build") + depends_on("libtool", type="build") + depends_on("autoconf", type="build") + depends_on("automake", type="build") + depends_on("py-setuptools", type="build") + depends_on("py-numpy", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-petastorm/package.py b/var/spack/repos/builtin/packages/py-petastorm/package.py index 65953b1466e..fbb7b191a33 100644 --- a/var/spack/repos/builtin/packages/py-petastorm/package.py +++ b/var/spack/repos/builtin/packages/py-petastorm/package.py @@ -16,6 +16,7 @@ class PyPetastorm(PythonPackage): maintainers = ["adamjstewart"] + version("0.12.0", sha256="79b98b87a619f34ca96a3dd42670506ce9439d321b3aab356cdf7edac8ff5c5c") version("0.11.4", sha256="7090dfc86f110e641d95798bcc75f8b1ca14cd56ed3feef491baaa6849629e51") version("0.9.8", sha256="571855224411b88b759ba5d48b288ad2ba09997ebd259292f72b9246144b8101") version("0.8.2", sha256="1bf4f26ce0b14f7334c0c29868154f1e600021a044f7565a5ad766b5ecdde911") diff --git a/var/spack/repos/builtin/packages/py-petsc4py/package.py b/var/spack/repos/builtin/packages/py-petsc4py/package.py index 555c26e4986..a276c09bae4 100644 --- a/var/spack/repos/builtin/packages/py-petsc4py/package.py +++ b/var/spack/repos/builtin/packages/py-petsc4py/package.py @@ -16,6 +16,9 @@ class PyPetsc4py(PythonPackage): maintainers = ["balay"] version("main", branch="main") + version("3.18.1", sha256="6d9d9632e2da0920c4e3905b7bac919837bdd85ecfaf1b9e461ba7e05ec4a5ce") + version("3.18.0", sha256="76bad2d35f380f698f5649c3f38eabd153b9b19b1fe3ce3a1d3de9aa5824a4d2") + version("3.17.5", sha256="e435d927bf22950c71c30bda538e1ae75f48f6931a63205c6fbeff6cf4393f09") version("3.17.4", sha256="216c3da074557946615d37d0826bc89f1f2e599323e2dacbdc45326d78bd50c6") version("3.17.3", sha256="c588ab4a17deebe7f0a57f966b3368d88f01d1a1c09f220f63fe8e3b37a32899") version("3.17.2", sha256="7e256e13013ce12c8e52edee35920e3d2c1deaae1b71597a3064201eba7abc1c") @@ -61,6 +64,7 @@ class PyPetsc4py(PythonPackage): depends_on("petsc+mpi", when="+mpi") depends_on("petsc~mpi", when="~mpi") depends_on("petsc@main", when="@main") + depends_on("petsc@3.18.0:3.18", when="@3.18.0:3.18") depends_on("petsc@3.17.0:3.17", when="@3.17.0:3.17") depends_on("petsc@3.16.0:3.16", when="@3.16.0:3.16") depends_on("petsc@3.15.0:3.15", when="@3.15.0:3.15") diff --git a/var/spack/repos/builtin/packages/py-picmistandard/package.py b/var/spack/repos/builtin/packages/py-picmistandard/package.py index e497a682569..3d6da0ba62b 100644 --- a/var/spack/repos/builtin/packages/py-picmistandard/package.py +++ b/var/spack/repos/builtin/packages/py-picmistandard/package.py @@ -16,6 +16,8 @@ class PyPicmistandard(PythonPackage): maintainers = ["ax3l", "dpgrote", "RemiLehe"] version("develop", branch="master") + version("0.0.21", sha256="930056a23ed92dac7930198f115b6248606b57403bffebce3d84579657c8d10b") + version("0.0.20", sha256="9c1822eaa2e4dd543b5afcfa97940516267dda3890695a6cf9c29565a41e2905") version("0.0.19", sha256="4b7ba1330964fbfd515e8ea2219966957c1386e0896b92d36bd9e134afb02f5a") version("0.0.18", sha256="68c208c0c54b4786e133bb13eef0dd4824998da4906285987ddee84e6d195e71") # 0.15 - 0.17 have broken install logic: missing requirements.txt on pypi diff --git a/var/spack/repos/builtin/packages/py-pillow/package.py b/var/spack/repos/builtin/packages/py-pillow/package.py index 8f3488fce3e..aa98037062e 100644 --- a/var/spack/repos/builtin/packages/py-pillow/package.py +++ b/var/spack/repos/builtin/packages/py-pillow/package.py @@ -15,6 +15,18 @@ class PyPillowBase(PythonPackage): # These defaults correspond to Pillow defaults # https://pillow.readthedocs.io/en/stable/installation.html#external-libraries + VARIANTS_IN_SETUP_CFG = ( + "zlib", + "jpeg", + "tiff", + "freetype", + "lcms", + "webp", + "webpmux", + "jpeg2000", + "imagequant", + "xcb", + ) variant("zlib", default=True, description="Compressed PNG functionality") variant("jpeg", default=True, description="JPEG functionality") variant("tiff", default=False, description="Compressed TIFF functionality") @@ -79,8 +91,7 @@ def variant_to_cfg(variant): with open("setup.cfg", "a") as setup: setup.write("[build_ext]\n") - variants = list(self.spec.variants) - for variant in variants: + for variant in self.VARIANTS_IN_SETUP_CFG: setup.write(variant_to_cfg(variant)) setup.write("rpath={0}\n".format(":".join(self.rpath))) diff --git a/var/spack/repos/builtin/packages/py-progressbar33/package.py b/var/spack/repos/builtin/packages/py-progressbar33/package.py new file mode 100644 index 00000000000..1a4b6adb114 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-progressbar33/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyProgressbar33(PythonPackage): + """Text progress bar library for Python""" + + homepage = "https://github.com/germangh/python-progressbar" + pypi = "progressbar33/progressbar33-2.4.tar.gz" + + version("2.4", sha256="51fe0d9b3b4023db2f983eeccdfc8c9846b84db8443b9bee002c7f58f4376eff") + + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-prompt-toolkit/package.py b/var/spack/repos/builtin/packages/py-prompt-toolkit/package.py index d4e9a04c418..b15830d6d94 100644 --- a/var/spack/repos/builtin/packages/py-prompt-toolkit/package.py +++ b/var/spack/repos/builtin/packages/py-prompt-toolkit/package.py @@ -37,6 +37,7 @@ class PyPromptToolkit(PythonPackage): "prompt_toolkit.clipboard", ] + version("3.0.31", sha256="9ada952c9d1787f52ff6d5f3484d0b4df8952787c087edf6a1f7c2cb1ea88148") version("3.0.29", sha256="bd640f60e8cecd74f0dc249713d433ace2ddc62b65ee07f96d358e0b152b6ea7") version("3.0.24", sha256="1bb05628c7d87b645974a1bad3f17612be0c29fa39af9f7688030163f680bad6") version("3.0.17", sha256="9397a7162cf45449147ad6042fa37983a081b8a73363a5253dd4072666333137") diff --git a/var/spack/repos/builtin/packages/py-protobuf/package.py b/var/spack/repos/builtin/packages/py-protobuf/package.py index 28b99194be2..ea263f13fc6 100644 --- a/var/spack/repos/builtin/packages/py-protobuf/package.py +++ b/var/spack/repos/builtin/packages/py-protobuf/package.py @@ -19,6 +19,7 @@ class PyProtobuf(PythonPackage): variant("cpp", default=False, description="Enable the cpp implementation") + version("4.21.7", sha256="71d9dba03ed3432c878a801e2ea51e034b0ea01cf3a4344fb60166cb5f6c8757") version("4.21.5", sha256="eb1106e87e095628e96884a877a51cdb90087106ee693925ec0a300468a9be3a") version( "3.20.1", @@ -63,12 +64,16 @@ class PyProtobuf(PythonPackage): deprecated=True, ) version("3.0.0", sha256="ecc40bc30f1183b418fe0ec0c90bc3b53fa1707c4205ee278c6b90479e5b6ff5") - version("3.0.0b2", sha256="d5b560bbc4b7d97cc2455c05cad9299d9db02d7bd11193b05684e3a86303c229") - version("3.0.0a3", sha256="b61622de5048415bfd3f2d812ad64606438ac9e25009ae84191405fe58e522c1") - version("2.6.1", sha256="8faca1fb462ee1be58d00f5efb4ca4f64bde92187fe61fde32615bbee7b3e745") - version("2.5.0", sha256="58292c459598c9297258bf57acc055f701c727f0154a86af8c0947dde37d8172") - version("2.4.1", sha256="df30b98acb6ef892da8b4776175510cff2131908fd0526b6bad960c55a830a1b") - version("2.3.0", sha256="374bb047874a506507912c3717d0ce62affbaa9a22bcb494d63d60326a0867b5") + version( + "3.0.0b2", + sha256="d5b560bbc4b7d97cc2455c05cad9299d9db02d7bd11193b05684e3a86303c229", + deprecated=True, + ) + version( + "3.0.0a3", + sha256="b61622de5048415bfd3f2d812ad64606438ac9e25009ae84191405fe58e522c1", + deprecated=True, + ) depends_on("python@3.5:", when="@3.18:", type=("build", "run")) depends_on("python@3.7:", when="@3.20:", type=("build", "run")) @@ -84,12 +89,6 @@ class PyProtobuf(PythonPackage): # Handle the 3.x series releases for ver in list(range(1, 8)) + list(range(9, 21)): depends_on("protobuf@3." + str(ver), when="+cpp @3." + str(ver)) - # Handle the 2.x series releases - for ver in list(range(3, 7)): - if ver == 5: - depends_on("protobuf@2." + str(ver), when="+cpp @2." + str(ver)) - else: - conflicts("+cpp", when="@2." + str(ver)) @property def build_directory(self): diff --git a/var/spack/repos/builtin/packages/py-psutil/package.py b/var/spack/repos/builtin/packages/py-psutil/package.py index 8b401532326..c839f4354e3 100644 --- a/var/spack/repos/builtin/packages/py-psutil/package.py +++ b/var/spack/repos/builtin/packages/py-psutil/package.py @@ -11,8 +11,10 @@ class PyPsutil(PythonPackage): running processes and system utilization (CPU, memory, disks, network) in Python.""" + homepage = "https://github.com/giampaolo/psutil" pypi = "psutil/psutil-5.6.3.tar.gz" + version("5.9.2", sha256="feb861a10b6c3bb00701063b37e4afc754f8217f0f09c42280586bd6ac712b5c") version("5.8.0", sha256="0c9ccb99ab76025f2f0bbecf341d4656e9c1351db8cc8a03ccd62e318ab4b5c6") version("5.7.2", sha256="90990af1c3c67195c44c9a889184f84f5b2320dce3ee3acbd054e3ba0b4a7beb") version("5.6.3", sha256="863a85c1c0a5103a12c05a35e59d336e1d665747e531256e061213e2e90f63f3") @@ -22,4 +24,5 @@ class PyPsutil(PythonPackage): version("5.0.1", sha256="9d8b7f8353a2b2eb6eb7271d42ec99d0d264a9338a37be46424d56b4e473b39e") depends_on("python@2.6:2.8,3.4:", type=("build", "run")) + depends_on("python@2.7:2.8,3.4:", when="@5.9.2:", type=("build", "run")) depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-pybind11/package.py b/var/spack/repos/builtin/packages/py-pybind11/package.py index b8a234c6682..d50bdeef8e2 100644 --- a/var/spack/repos/builtin/packages/py-pybind11/package.py +++ b/var/spack/repos/builtin/packages/py-pybind11/package.py @@ -2,13 +2,14 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - import os +import spack.build_systems.cmake +import spack.build_systems.python from spack.package import * -class PyPybind11(CMakePackage, PythonPackage): +class PyPybind11(CMakePackage, PythonExtension): """pybind11 -- Seamless operability between C++11 and Python. pybind11 is a lightweight header-only library that exposes C++ types in @@ -16,7 +17,8 @@ class PyPybind11(CMakePackage, PythonPackage): code. Its goals and syntax are similar to the excellent Boost.Python library by David Abrahams: to minimize boilerplate code in traditional extension modules by inferring type information using compile-time - introspection.""" + introspection. + """ homepage = "https://pybind11.readthedocs.io" url = "https://github.com/pybind/pybind11/archive/v2.6.2.tar.gz" @@ -26,6 +28,7 @@ class PyPybind11(CMakePackage, PythonPackage): version("master", branch="master") version("2.10.0", sha256="eacf582fa8f696227988d08cfc46121770823839fe9e301a20fbce67e7cd70ec") + version("2.9.2", sha256="6bd528c4dbe2276635dc787b6b1f2e5316cf6b49ee3e150264e455a0d68d19c1") version("2.9.1", sha256="c6160321dc98e6e1184cc791fbeadd2907bb4a0ce0e447f2ea4ff8ab56550913") version("2.9.0", sha256="057fb68dafd972bc13afb855f3b0d8cf0fa1a78ef053e815d9af79be7ff567cb") version("2.8.1", sha256="f1bcc07caa568eb312411dde5308b1e250bd0e1bc020fae855bf9f43209940cc") @@ -45,30 +48,25 @@ class PyPybind11(CMakePackage, PythonPackage): version("2.1.1", sha256="f2c6874f1ea5b4ad4ffffe352413f7d2cd1a49f9050940805c2a082348621540") version("2.1.0", sha256="2860f2b8d0c9f65f0698289a161385f59d099b7ead1bf64e8993c486f2b93ee0") - depends_on("ninja", type="build") depends_on("py-setuptools@42:", type="build") depends_on("py-pytest", type="test") depends_on("python@2.7:2.8,3.5:", type=("build", "run")) depends_on("python@3.6:", when="@2.10.0:", type=("build", "run")) - depends_on("cmake@3.13:", type="build") - depends_on("cmake@3.18:", type="build", when="@2.6.0:") + + depends_on("py-pip", type="build") + depends_on("py-wheel", type="build") + extends("python") + + with when("build_system=cmake"): + depends_on("ninja", type="build") + depends_on("cmake@3.13:", type="build") + depends_on("cmake@3.18:", type="build", when="@2.6.0:") # compiler support conflicts("%gcc@:4.7") conflicts("%clang@:3.2") conflicts("%intel@:16") - build_directory = "." - - def cmake_args(self): - args = [] - args.append("-DPYTHON_EXECUTABLE:FILEPATH=%s" % self.spec["python"].command.path) - args += [self.define("PYBIND11_TEST", self.run_tests)] - return args - - def setup_build_environment(self, env): - env.set("PYBIND11_USE_CMAKE", 1) - # https://github.com/pybind/pybind11/pull/1995 @when("@:2.4") def patch(self): @@ -80,13 +78,27 @@ def patch(self): string=True, ) - def install(self, spec, prefix): - CMakePackage.install(self, spec, prefix) - PythonPackage.install(self, spec, prefix) + +class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder): + def cmake_args(self): + return [ + self.define("PYTHON_EXECUTABLE:FILEPATH", self.spec["python"].command.path), + self.define("PYBIND11_TEST", self.pkg.run_tests), + ] + + def install(self, pkg, spec, prefix): + super(CMakeBuilder, self).install(pkg, spec, prefix) + python_builder = spack.build_systems.python.PythonPipBuilder(pkg) + python_builder.install(pkg, spec, prefix) + + def setup_build_environment(self, env): + env.set("PYBIND11_USE_CMAKE", 1) @run_after("install") - @on_package_attributes(run_tests=True) def install_test(self): + if not self.pkg.run_tests: + return + with working_dir("spack-test", create=True): # test include helper points to right location python = self.spec["python"].command diff --git a/var/spack/repos/builtin/packages/py-pyfr/package.py b/var/spack/repos/builtin/packages/py-pyfr/package.py index 3e1d094c218..504f0e7e6bf 100644 --- a/var/spack/repos/builtin/packages/py-pyfr/package.py +++ b/var/spack/repos/builtin/packages/py-pyfr/package.py @@ -57,7 +57,7 @@ class PyPyfr(PythonPackage, CudaPackage, ROCmPackage): # Optional dependencies depends_on("py-scipy", when="+scipy", type=("build", "run")) depends_on("metis@5.0:", when="+metis", type=("run")) - depends_on("scotch@6.0:", when="+scotch", type=("run")) + depends_on("scotch@7.0.1: +link_error_lib", when="+scotch", type=("run")) depends_on("cuda@8.0: +allow-unsupported-compilers", when="@:1.14.0 +cuda", type=("run")) depends_on("cuda@11.4.0: +allow-unsupported-compilers", when="@1.15.0: +cuda", type=("run")) depends_on("rocblas@5.2.0:", when="+hip", type=("run")) diff --git a/var/spack/repos/builtin/packages/py-pykml/package.py b/var/spack/repos/builtin/packages/py-pykml/package.py index ce2df0cb23f..fabaf8136d7 100644 --- a/var/spack/repos/builtin/packages/py-pykml/package.py +++ b/var/spack/repos/builtin/packages/py-pykml/package.py @@ -14,7 +14,10 @@ class PyPykml(PythonPackage): pypi = "pykml/pykml-0.1.3.tar.gz" + version("0.2.0", sha256="44a1892e7c2a649c8ae9f8e2899ff76cae79ec6749ffb64d11140b4e87d0f957") version("0.1.3", sha256="e1a133e582f0b4652a6b00bac970b446d90580664e5a634a670731c990ff9f05") + depends_on("python@2", type=("build", "run"), when="@0.1") depends_on("py-setuptools", type="build") + depends_on("py-lxml@3.3.6:", type=("build", "run"), when="@0.2.0:") depends_on("py-lxml@2.2.6:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-pykokkos-base/package.py b/var/spack/repos/builtin/packages/py-pykokkos-base/package.py index 5d3427b4bb9..f9b02c9ae9d 100644 --- a/var/spack/repos/builtin/packages/py-pykokkos-base/package.py +++ b/var/spack/repos/builtin/packages/py-pykokkos-base/package.py @@ -8,7 +8,7 @@ from spack.package import * -class PyPykokkosBase(CMakePackage, PythonPackage): +class PyPykokkosBase(CMakePackage, PythonExtension): """Minimal set of bindings for Kokkos interoperability with Python (initialize, finalize, View, DynRankView, Kokkos-tools)""" diff --git a/var/spack/repos/builtin/packages/py-pylatex/package.py b/var/spack/repos/builtin/packages/py-pylatex/package.py new file mode 100644 index 00000000000..c55cf6c870b --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pylatex/package.py @@ -0,0 +1,35 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyPylatex(PythonPackage): + """A Python library for creating LaTeX files and snippets""" + + homepage = "https://github.com/JelteF/PyLaTeX" + pypi = "PyLaTeX/PyLaTeX-1.4.1.tar.gz" + + version("1.4.1", sha256="d3c12efb8b260771260443dce78d1e9089c09f9d0b92e6273dfca0bf5e7302fb") + + variant("docs", default=False, description="Build with Sphinx support for documentation") + variant("matrices", default=False, description="Build with matrix support") + variant("matplotlib", default=False, description="Build with matplotlib support") + variant("quantities", default=False, description="Build with quantities support") + + depends_on("python@2.7,3.3:", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-setuptools@:57", type="build", when="@:1.4.1") + depends_on("py-ordered-set", type=("build", "run")) + + # from extras section in setup.py + depends_on("py-future@0.15.2:", type=("build", "run"), when="^python@:2") + depends_on("py-sphinx", type="run", when="+docs") + depends_on("py-matplotlib", type="run", when="+matplotlib") + depends_on("py-numpy", type="run", when="+matrices") + depends_on("py-numpy", type="run", when="+quantities") + depends_on("py-quantities", type="run", when="+quantities") + + depends_on("texlive", type="run") diff --git a/var/spack/repos/builtin/packages/py-pylint/package.py b/var/spack/repos/builtin/packages/py-pylint/package.py index 60f3a67a535..675f292e8c8 100644 --- a/var/spack/repos/builtin/packages/py-pylint/package.py +++ b/var/spack/repos/builtin/packages/py-pylint/package.py @@ -25,6 +25,7 @@ class PyPylint(PythonPackage): "pylint.reporters.ureports", ] + version("2.15.0", sha256="4f3f7e869646b0bd63b3dfb79f3c0f28fc3d2d923ea220d52620fd625aed92b0") version("2.14.4", sha256="47705453aa9dce520e123a7d51843d5f0032cbfa06870f89f00927aa1f735a4a") version("2.13.5", sha256="dab221658368c7a05242e673c275c488670144123f4bd262b2777249c1c0de9b") version("2.11.1", sha256="2c9843fff1a88ca0ad98a256806c82c5a8f86086e7ccbdb93297d86c3f90c436") @@ -43,33 +44,35 @@ class PyPylint(PythonPackage): depends_on("python@3.6:", when="@2.8.2:", type=("build", "run")) depends_on("python@3.6.2:", when="@2.13.5:", type=("build", "run")) depends_on("python@3.7.2:", when="@2.14.0:", type=("build", "run")) + depends_on("py-setuptools-scm", when="@2.8.2", type="build") + depends_on("py-setuptools@17.1:", type="build") + depends_on("py-setuptools@62.6:62", when="@2.15.0:", type="build") + depends_on("py-wheel@0.37.1:0.37", when="@2.15.0:", type="build") + depends_on("py-dill@0.2:", when="@2.13.5:", type=("build", "run")) + depends_on("py-platformdirs@2.2.0:", when="@2.11.1:", type=("build", "run")) depends_on("py-astroid", type=("build", "run")) # note there is no working version of astroid for this - depends_on("py-astroid@1.5.1:", type=("build", "run"), when="@1.7:") - depends_on("py-astroid@1.6:1.9", type=("build", "run"), when="@1.9.4") - depends_on("py-astroid@2.0:", type=("build", "run"), when="@2.2.0:") - depends_on("py-astroid@2.2.0:2", type=("build", "run"), when="@2.3.0:2.7") - depends_on("py-astroid@2.5.6:2.6", type=("build", "run"), when="@2.8.0:2.10") - depends_on("py-astroid@2.8.0:2.8", type=("build", "run"), when="@2.11.1") - depends_on("py-astroid@2.11.2:2.11", type=("build", "run"), when="@2.13.5:") - depends_on("py-astroid@2.11.6:2.11", type=("build", "run"), when="@2.14.2:") - depends_on("py-backports-functools-lru-cache", when="^python@:2.8", type=("build", "run")) - depends_on("py-configparser", when="^python@:2.8", type=("build", "run")) - depends_on("py-dill@0.2:", when="@2.13.5:", type=("build", "run")) - depends_on("py-editdistance", type=("build", "run"), when="@:1.7") + depends_on("py-astroid@1.5.1:", when="@1.7:", type=("build", "run")) + depends_on("py-astroid@1.6:1.9", when="@1.9.4", type=("build", "run")) + depends_on("py-astroid@2.0:", when="@2.2.0:", type=("build", "run")) + depends_on("py-astroid@2.2.0:2", when="@2.3.0:2.7", type=("build", "run")) + depends_on("py-astroid@2.5.6:2.6", when="@2.8.0:2.10", type=("build", "run")) + depends_on("py-astroid@2.8.0:2.8", when="@2.11.1", type=("build", "run")) + depends_on("py-astroid@2.11.2:2.11", when="@2.13.5:2.13", type=("build", "run")) + depends_on("py-astroid@2.11.6:2.11", when="@2.14.2:2.14", type=("build", "run")) + depends_on("py-astroid@2.12.4:2.13", when="@2.15.0:", type=("build", "run")) depends_on("py-isort@4.2.5:", type=("build", "run")) depends_on("py-isort@4.2.5:5", when="@2.3.1:", type=("build", "run")) depends_on("py-mccabe", type=("build", "run")) depends_on("py-mccabe@0.6.0:0.6", when="@2.3.1:2.11", type=("build", "run")) depends_on("py-mccabe@0.6.0:0.7", when="@2.13:", type=("build", "run")) - depends_on("py-pip", type=("build")) # see https://github.com/spack/spack/issues/27075 - # depends_on('py-setuptools-scm@1.15.0:', type='build') - depends_on("py-setuptools-scm", type="build", when="@2.8.2") - depends_on("py-setuptools@17.1:", type="build") + depends_on("py-tomli@1.1.0:", when="@2.13.5: ^python@:3.10", type=("build", "run")) + depends_on("py-tomlkit@0.10.1:", when="@2.14.0:", type=("build", "run")) + depends_on("py-colorama@0.4.5:", when="platform=windows", type=("build", "run")) + depends_on("py-typing-extensions@3.10.0:", when="@2.11.1: ^python@:3.9", type=("build", "run")) + depends_on("py-backports-functools-lru-cache", when="^python@:2.8", type=("build", "run")) + depends_on("py-configparser", when="^python@:2.8", type=("build", "run")) + depends_on("py-editdistance", when="@:1.7", type=("build", "run")) depends_on("py-singledispatch", when="^python@:3.3", type=("build", "run")) - depends_on("py-six", type=("build", "run"), when="@1:2.3.1") - depends_on("py-toml@0.7.1:", type=("build", "run"), when="@2.8.2:2.12.2") - depends_on("py-tomli@1.1.0:", type=("build", "run"), when="@2.13.5: ^python@:3.10") - depends_on("py-tomlkit@0.10.1:", type=("build", "run"), when="@2.14.0:") - depends_on("py-platformdirs@2.2.0:", type=("build", "run"), when="@2.11.1:") - depends_on("py-typing-extensions@3.10.0:", type=("build", "run"), when="@2.11.1: ^python@:3.9") + depends_on("py-six", when="@1:2.3.1", type=("build", "run")) + depends_on("py-toml@0.7.1:", when="@2.8.2:2.12.2", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-pymongo/package.py b/var/spack/repos/builtin/packages/py-pymongo/package.py index 4505acb9035..501484d1e38 100644 --- a/var/spack/repos/builtin/packages/py-pymongo/package.py +++ b/var/spack/repos/builtin/packages/py-pymongo/package.py @@ -17,10 +17,12 @@ class PyPymongo(PythonPackage): pypi = "pymongo/pymongo-3.9.0.tar.gz" + version("4.2.0", sha256="72f338f6aabd37d343bd9d1fdd3de921104d395766bcc5cdc4039e4c2dd97766") version("3.12.1", sha256="704879b6a54c45ad76cea7c6789c1ae7185050acea7afd15b58318fa1932ed45") version("3.9.0", sha256="4249c6ba45587b959292a727532826c5032d59171f923f7f823788f413c2a5a3") version("3.6.0", sha256="c6de26d1e171cdc449745b82f1addbc873d105b8e7335097da991c0fc664a4a8") version("3.3.0", sha256="3d45302fc2622fabf34356ba274c69df41285bac71bbd229f1587283b851b91e") depends_on("python@2.7:2.8,3.4:", type=("build", "run")) + depends_on("python@3.7:", when="@4.2.0:", type=("build", "run")) depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-pyopenssl/package.py b/var/spack/repos/builtin/packages/py-pyopenssl/package.py index 6a049cb80fe..df5d0257594 100644 --- a/var/spack/repos/builtin/packages/py-pyopenssl/package.py +++ b/var/spack/repos/builtin/packages/py-pyopenssl/package.py @@ -17,8 +17,11 @@ class PyPyopenssl(PythonPackage): homepage = "https://pyopenssl.org/" pypi = "pyOpenSSL/pyOpenSSL-19.0.0.tar.gz" + version("22.1.0", sha256="7a83b7b272dd595222d672f5ce29aa030f1fb837630ef229f62e72e395ce8968") version("19.0.0", sha256="aeca66338f6de19d1aa46ed634c3b9ae519a64b458f8468aec688e7e3c20f200") depends_on("py-setuptools", type="build") depends_on("py-cryptography@2.3:", type=("build", "run")) - depends_on("py-six@1.5.2:", type=("build", "run")) + depends_on("py-cryptography@38", when="@22:", type=("build", "run")) + depends_on("python@3.6:", when="@22:", type=("build", "run")) + depends_on("py-six@1.5.2:", when="@:19", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-pysam/package.py b/var/spack/repos/builtin/packages/py-pysam/package.py index 4b91d188a35..f2df9eb37aa 100644 --- a/var/spack/repos/builtin/packages/py-pysam/package.py +++ b/var/spack/repos/builtin/packages/py-pysam/package.py @@ -13,6 +13,7 @@ class PyPysam(PythonPackage): homepage = "https://github.com/pysam-developers/pysam" pypi = "pysam/pysam-0.14.1.tar.gz" + version("0.19.1", sha256="dee403cbdf232170c1e11cc24c76e7dd748fc672ad38eb0414f3b9d569b1448f") version("0.18.0", sha256="1d6d49a0b3c626fae410a93d4c80583a8b5ddaacc9b46a080b250dbcebd30a59") version("0.15.3", sha256="a98dd0a164aa664b1ab30a36f653752f00e93c13deeb66868597f4b2a30f7265") version("0.15.2", sha256="d049efd91ed5b1af515aa30280bc9cb46a92ddd15d546c9b21ee68a6ed4055d9") diff --git a/var/spack/repos/builtin/packages/py-pyspoa/package.py b/var/spack/repos/builtin/packages/py-pyspoa/package.py new file mode 100644 index 00000000000..c351b0fc9ab --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pyspoa/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyPyspoa(PythonPackage): + """Python bindings to spoa""" + + homepage = "https://github.com/nanoporetech/pyspoa" + pypi = "pyspoa/pyspoa-0.0.8.tar.gz" + + version("0.0.8", sha256="8299d18066b498a6ef294c5a33a99266ded06eeb022f67488d2caecba974b0a4") + + depends_on("py-setuptools", type="build") + depends_on("cmake@3.18.4", type="build") + depends_on("py-pybind11@2.4:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-pythran/package.py b/var/spack/repos/builtin/packages/py-pythran/package.py index 8a05c43e065..eba40ccaf38 100644 --- a/var/spack/repos/builtin/packages/py-pythran/package.py +++ b/var/spack/repos/builtin/packages/py-pythran/package.py @@ -5,6 +5,8 @@ import sys +import llnl.util.filesystem as fs + from spack.package import * @@ -54,6 +56,13 @@ class PyPythran(PythonPackage): # https://github.com/serge-sans-paille/pythran/issues/1937 conflicts("%apple-clang@13:", when="@:0.10") + @property + def headers(self): + # Pythran is mainly meant to be used as a compiler, so return no headers to + # avoid issue https://github.com/spack/spack/issues/33237 This can be refined + # later to allow using pythran also as a library. + return fs.HeaderList([]) + def patch(self): # Compiler is used at run-time to determine name of OpenMP library to search for cfg_file = join_path("pythran", "pythran-{0}.cfg".format(sys.platform)) diff --git a/var/spack/repos/builtin/packages/py-pytorch-lightning/package.py b/var/spack/repos/builtin/packages/py-pytorch-lightning/package.py index 369cc7dd5ac..5702b205095 100644 --- a/var/spack/repos/builtin/packages/py-pytorch-lightning/package.py +++ b/var/spack/repos/builtin/packages/py-pytorch-lightning/package.py @@ -14,6 +14,7 @@ class PyPytorchLightning(PythonPackage): maintainers = ["adamjstewart"] + version("1.8.0", sha256="deff9bc7978ecebc8f45e881adef65dc8d9f4554e88c3b064f80587f32ab158d") version("1.7.7", sha256="27c2dd01a18db2415168e3fa3775ccb5a1fa1e2961a50439ad9365507fe9d4ae") version("1.7.6", sha256="93266c83f8340c100e41b3777bbab26dd2c20b4df3deccce3b8a15652326b9c8") version("1.7.5", sha256="a5838ae990f0eef9a894fa863be3bc1f5594d2abba7848fb21317ba3e885d7cd") @@ -34,21 +35,26 @@ class PyPytorchLightning(PythonPackage): version("1.3.8", sha256="60b0a3e464d394864dae4c8d251afa7aa453644a19bb7672f5ee400343cdf7b0") version("1.2.10", sha256="2d8365e30ded0c20e73ce6e5b6028478ae460b8fd33727df2275666df005a301") + # src/pytorch_lightning/__setup__.py depends_on("python@3.7:", when="@1.6:", type=("build", "run")) depends_on("python@3.6:", when="@:1.5", type=("build", "run")) depends_on("py-setuptools", type="build") + + # requirements/pytorch/base.txt depends_on("py-numpy@1.17.2:", when="@1.3:", type=("build", "run")) depends_on("py-numpy@1.16.6:", when="@:1.2", type=("build", "run")) depends_on("py-torch@1.9:", when="@1.7:", type=("build", "run")) depends_on("py-torch@1.8:", when="@1.6:", type=("build", "run")) depends_on("py-torch@1.6:", when="@1.4:1.5", type=("build", "run")) depends_on("py-torch@1.4:", when="@:1.3", type=("build", "run")) + depends_on("py-lightning-lite@1.8.0", when="@1.8.0", type=("build", "run")) depends_on("py-tqdm@4.57.0:", when="@1.6.3:", type=("build", "run")) depends_on("py-tqdm@4.41.0:", when="@:1.6.2", type=("build", "run")) depends_on("py-pyyaml@5.4:", when="@1.6:", type=("build", "run")) depends_on("py-pyyaml@5.1:", when="@1.4:1.5", type=("build", "run")) depends_on("py-pyyaml@5.1:5.4.1", when="@1.3", type=("build", "run")) depends_on("py-pyyaml@5.1:5.3,5.5:", when="@:1.2", type=("build", "run")) + depends_on("py-fsspec@2021.06.1:+http", when="@1.8:", type=("build", "run")) depends_on("py-fsspec@2021.05.0:2021.05,2021.06.1:+http", when="@1.3:", type=("build", "run")) depends_on("py-fsspec@0.8.1:+http", when="@:1.2", type=("build", "run")) depends_on("py-tensorboard@2.9.1:", when="@1.7:", type=("build", "run")) @@ -59,14 +65,22 @@ class PyPytorchLightning(PythonPackage): depends_on("py-torchmetrics@0.4.0:", when="@1.4", type=("build", "run")) depends_on("py-torchmetrics@0.2.0:", when="@1.3", type=("build", "run")) depends_on("py-torchmetrics@0.2.0", when="@:1.2", type=("build", "run")) - depends_on("py-pydeprecate@0.3.1:", when="@1.6.4:", type=("build", "run")) - depends_on("py-pydeprecate@0.3.1:0.3", when="@1.6:1.6.3", type=("build", "run")) - depends_on("py-pydeprecate@0.3.1", when="@1.4:1.5", type=("build", "run")) - depends_on("py-pydeprecate@0.3.0", when="@1.3", type=("build", "run")) depends_on("py-packaging@17.0:", when="@1.3:", type=("build", "run")) depends_on("py-packaging", when="@:1.2", type=("build", "run")) depends_on("py-typing-extensions@4.0.0:", when="@1.6:", type=("build", "run")) depends_on("py-typing-extensions", when="@1.4:1.5", type=("build", "run")) + depends_on("py-lightning-utilities@0.3", when="@1.8:", type=("build", "run")) + + # Historical dependencies depends_on("py-future@0.17.1:", when="@:1.5", type=("build", "run")) depends_on("pil@:8.2,8.3.1:", when="@1.3", type=("build", "run")) depends_on("py-protobuf@:3.20.1", when="@1.6.4:1.6", type="build") + depends_on("py-pydeprecate@0.3.1:", when="@1.6.4:1.7", type=("build", "run")) + depends_on("py-pydeprecate@0.3.1:0.3", when="@1.6:1.6.3", type=("build", "run")) + depends_on("py-pydeprecate@0.3.1", when="@1.4:1.5", type=("build", "run")) + depends_on("py-pydeprecate@0.3.0", when="@1.3", type=("build", "run")) + + # https://github.com/Lightning-AI/lightning/issues/15494 + conflicts("^py-torch~distributed", when="@1.8.0") + # https://github.com/Lightning-AI/lightning/issues/10348 + conflicts("^py-torch~distributed", when="@1.5.0:1.5.2") diff --git a/var/spack/repos/builtin/packages/py-rasterio/package.py b/var/spack/repos/builtin/packages/py-rasterio/package.py index e81bd6a217d..7eddb86cba5 100644 --- a/var/spack/repos/builtin/packages/py-rasterio/package.py +++ b/var/spack/repos/builtin/packages/py-rasterio/package.py @@ -20,6 +20,7 @@ class PyRasterio(PythonPackage): maintainers = ["adamjstewart"] version("master", branch="master") + version("1.3.3", sha256="b6fb1f12489f3a678c05ddcb78a74f0b6f63836219f51c0541e505f5e5208e7d") version("1.3.2", sha256="a91b32f649bc5aa3259909349258eb7999b7e830375f63cd37ade2082066ec1c") version("1.3.1", sha256="91a22c512862e6411def675cd864eb63000ec2e0922c8bf25834c631ba80bdc1") version("1.3.0", sha256="90171035e5b201cdb85a9abd60181426366040d4ca44706958db982a030f8dc4") diff --git a/var/spack/repos/builtin/packages/py-ray/package.py b/var/spack/repos/builtin/packages/py-ray/package.py index 3f6de85cdbd..a7b07f6f190 100644 --- a/var/spack/repos/builtin/packages/py-ray/package.py +++ b/var/spack/repos/builtin/packages/py-ray/package.py @@ -8,48 +8,76 @@ class PyRay(PythonPackage): - """A system for parallel and distributed Python that unifies the ML - ecosystem.""" + """Ray provides a simple, universal API for building distributed applications.""" homepage = "https://github.com/ray-project/ray" url = "https://github.com/ray-project/ray/archive/ray-0.8.7.tar.gz" + version("2.0.1", sha256="b8b2f0a99d2ac4c001ff11c78b4521b217e2a02df95fb6270fd621412143f28b") version("0.8.7", sha256="2df328f1bcd3eeb4fa33119142ea0d669396f4ab2a3e78db90178757aa61534b") - build_directory = "python" + variant("default", default=False, description="Install default extras", when="@2.0.1") - depends_on("python@3.6:3.8", type=("build", "run")) - depends_on("bazel@3.2.0", type="build") - depends_on("py-setuptools", type="build") - depends_on("py-cython@0.29.14:", type="build") - depends_on("py-wheel", type="build") + depends_on("python@3.6:3.10", when="@2.0.1", type=("build", "run")) + depends_on("python@3.6:3.8", when="@0.8.7", type=("build", "run")) + depends_on("bazel@4.2.2", when="@2.0.1", type="build") + depends_on("bazel@3.2.0", when="@0.8.7", type="build") depends_on("npm", type="build") - depends_on("py-aiohttp", type=("build", "run")) - depends_on("py-aioredis", type=("build", "run")) - depends_on("py-click@7.0:", type=("build", "run")) - depends_on("py-colorama", type=("build", "run")) - depends_on("py-colorful", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-cython@0.29.26:", when="@2.0.1", type="build") + depends_on("py-cython@0.29.14:", when="@0.8.7", type="build") + depends_on("py-attrs", when="@2.0.1", type=("build", "run")) + depends_on("py-click@7:8.0.4", when="@2.0.1", type=("build", "run")) + depends_on("py-click@7.0:", when="@0.8.7", type=("build", "run")) + depends_on("py-dataclasses", when="@2.0.1 ^python@:3.6", type=("build", "run")) depends_on("py-filelock", type=("build", "run")) - depends_on("py-google", type=("build", "run")) - depends_on("py-gpustat", type=("build", "run")) - depends_on("py-grpcio@1.28.1:", type=("build", "run")) + depends_on("py-grpcio@1.32:1.43.0", when="@2.0.1 ^python@:3.9", type=("build", "run")) + depends_on("py-grpcio@1.42:1.43.0", when="@2.0.1 ^python@3.10:", type=("build", "run")) + depends_on("py-grpcio@1.28.1:", when="@0.8.7", type=("build", "run")) depends_on("py-jsonschema", type=("build", "run")) - depends_on("py-msgpack@1.0:1", type=("build", "run")) - depends_on("py-numpy@1.16:", type=("build", "run")) - depends_on("py-protobuf@3.8.0:", type=("build", "run")) - depends_on("py-py-spy@0.2.0:", type=("build", "run")) + depends_on("py-msgpack@1", type=("build", "run")) + depends_on("py-numpy@1.16:", when="^python@:3.8", type=("build", "run")) + depends_on("py-numpy@1.19.3:", when="^python@3.9:", type=("build", "run")) + depends_on("py-protobuf@3.15.3:3", when="@2.0.1", type=("build", "run")) + depends_on("py-protobuf@3.8.0:", when="@0.8.7", type=("build", "run")) depends_on("py-pyyaml", type=("build", "run")) + depends_on("py-frozenlist", when="@2.0.1", type=("build", "run")) depends_on("py-requests", type=("build", "run")) - depends_on("py-redis@3.3.2:3.4", type=("build", "run")) - depends_on("py-opencensus", type=("build", "run")) - depends_on("py-prometheus-client@0.7.1:", type=("build", "run")) - # If not guarded by SKIP_THIRDPARTY_INSTALL, those dependencies - # would be automatically installed via pip by the setup.py script. - depends_on("py-setproctitle", type=("build", "run")) - depends_on("py-psutil", type=("build", "run")) - # If not detected during install, the following dependency would - # be automatically downloaded and installed by the setup.py script. - depends_on("py-pickle5", when="^python@:3.8.1", type=("build", "run")) + depends_on("py-typing-extensions", when="@2.0.1 ^python@:3.7", type=("build", "run")) + depends_on("py-virtualenv", when="@2.0.1", type=("build", "run")) + + with when("+default"): + depends_on("py-aiohttp@3.7:", type=("build", "run")) + depends_on("py-aiohttp-cors", type=("build", "run")) + depends_on("py-colorful", type=("build", "run")) + depends_on("py-py-spy@0.2:", type=("build", "run")) + depends_on("py-gpustat@1:", type=("build", "run")) + depends_on("py-opencensus", type=("build", "run")) + depends_on("py-pydantic", type=("build", "run")) + depends_on("py-prometheus-client@0.7.1:0.13", type=("build", "run")) + depends_on("py-smart-open", type=("build", "run")) + + # Historical dependencies + with when("@0.8.7"): + depends_on("py-aiohttp", type=("build", "run")) + depends_on("py-aioredis", type=("build", "run")) + depends_on("py-colorama", type=("build", "run")) + depends_on("py-colorful", type=("build", "run")) + depends_on("py-google", type=("build", "run")) + depends_on("py-gpustat", type=("build", "run")) + depends_on("py-py-spy@0.2.0:", type=("build", "run")) + depends_on("py-redis@3.3.2:3.4", type=("build", "run")) + depends_on("py-opencensus", type=("build", "run")) + depends_on("py-prometheus-client@0.7.1:", type=("build", "run")) + # If not guarded by SKIP_THIRDPARTY_INSTALL, those dependencies + # would be automatically installed via pip by the setup.py script. + depends_on("py-setproctitle", type=("build", "run")) + depends_on("py-psutil", type=("build", "run")) + # If not detected during install, the following dependency would + # be automatically downloaded and installed by the setup.py script. + depends_on("py-pickle5", when="^python@:3.8.1", type=("build", "run")) + + build_directory = "python" def setup_build_environment(self, env): env.set("SKIP_THIRDPARTY_INSTALL", "1") diff --git a/var/spack/repos/builtin/packages/py-regex/package.py b/var/spack/repos/builtin/packages/py-regex/package.py index 494af423ab4..85c3a06b4cc 100644 --- a/var/spack/repos/builtin/packages/py-regex/package.py +++ b/var/spack/repos/builtin/packages/py-regex/package.py @@ -9,8 +9,10 @@ class PyRegex(PythonPackage): """Alternative regular expression module, to replace re.""" + homepage = "https://github.com/mrabarnett/mrab-regex" pypi = "regex/regex-2020.11.13.tar.gz" + version("2022.8.17", sha256="5c77eab46f3a2b2cd8bbe06467df783543bf7396df431eb4a144cc4b89e9fb3c") version( "2020.11.13", sha256="83d6b356e116ca119db8e7c6fc2983289d87b27b3fac238cfe5dca529d884562" ) @@ -25,4 +27,5 @@ class PyRegex(PythonPackage): "2017.07.11", sha256="dbda8bdc31a1c85445f1a1b29d04abda46e5c690f8f933a9cc3a85a358969616" ) - depends_on("py-setuptools", type="build", when="@:2018,2020:") + depends_on("py-setuptools", type="build") + depends_on("python@3.6:", when="@2022.8.17:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-requests-oauthlib/package.py b/var/spack/repos/builtin/packages/py-requests-oauthlib/package.py index 1bb3dcc6b3f..f96fefb9ea9 100644 --- a/var/spack/repos/builtin/packages/py-requests-oauthlib/package.py +++ b/var/spack/repos/builtin/packages/py-requests-oauthlib/package.py @@ -12,6 +12,7 @@ class PyRequestsOauthlib(PythonPackage): homepage = "https://github.com/requests/requests-oauthlib" pypi = "requests-oauthlib/requests-oauthlib-1.2.0.tar.gz" + version("1.3.1", sha256="75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a") version("1.3.0", sha256="b4261601a71fd721a8bd6d7aa1cc1d6a8a93b4a9f5e96626f8e4d91e8beeaa6a") version("1.2.0", sha256="bd6533330e8748e94bf0b214775fed487d309b8b8fe823dc45641ebcd9a32f57") version("0.3.3", sha256="37557b4de3eef50d2a4c65dc9382148b8331f04b1c637c414b3355feb0f007e9") diff --git a/var/spack/repos/builtin/packages/py-scikit-build/package.py b/var/spack/repos/builtin/packages/py-scikit-build/package.py index e51f323c981..1d895c005d0 100644 --- a/var/spack/repos/builtin/packages/py-scikit-build/package.py +++ b/var/spack/repos/builtin/packages/py-scikit-build/package.py @@ -22,6 +22,7 @@ class PyScikitBuild(PythonPackage): version("0.15.0", sha256="e723cd0f3489a042370b9ea988bbb9cfd7725e8b25b20ca1c7981821fcf65fb9") version("0.12.0", sha256="f851382c469bcd9a8c98b1878bcfdd13b68556279d2fd9a329be41956ae5a7fe") + version("0.11.1", sha256="da40dfd69b2456fad1349a894b90180b43712152b8a85d2a00f4ae2ce8ac9a5c") version("0.10.0", sha256="7342017cc82dd6178e3b19377389b8a8d1f8b429d9cdb315cfb1094e34a0f526") depends_on("py-setuptools@28.0.0:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-scikit-learn/package.py b/var/spack/repos/builtin/packages/py-scikit-learn/package.py index cf91f792914..771e409a6a2 100644 --- a/var/spack/repos/builtin/packages/py-scikit-learn/package.py +++ b/var/spack/repos/builtin/packages/py-scikit-learn/package.py @@ -16,6 +16,7 @@ class PyScikitLearn(PythonPackage): maintainers = ["adamjstewart"] version("master", branch="master") + version("1.1.3", sha256="bef51978a51ec19977700fe7b86aecea49c825884f3811756b74a3b152bb4e35") version("1.1.2", sha256="7c22d1305b16f08d57751a4ea36071e2215efb4c09cb79183faa4e8e82a3dbf8") version("1.1.1", sha256="3e77b71e8e644f86c8b5be7f1c285ef597de4c384961389ee3e9ca36c445b256") version("1.1.0", sha256="80f9904f5b1356adfc32406725dd94c8cc9c8d265047d98390033a6c238cbb29") diff --git a/var/spack/repos/builtin/packages/py-seaborn/package.py b/var/spack/repos/builtin/packages/py-seaborn/package.py index 4d96e464a04..81ff8bf858d 100644 --- a/var/spack/repos/builtin/packages/py-seaborn/package.py +++ b/var/spack/repos/builtin/packages/py-seaborn/package.py @@ -17,26 +17,40 @@ class PySeaborn(PythonPackage): homepage = "https://seaborn.pydata.org/" pypi = "seaborn/seaborn-0.7.1.tar.gz" + version("0.12.0", sha256="893f17292d8baca616c1578ddb58eb25c72d622f54fc5ee329c8207dc9b57b23") version("0.11.2", sha256="cf45e9286d40826864be0e3c066f98536982baf701a7caa386511792d61ff4f6") version("0.11.1", sha256="44e78eaed937c5a87fc7a892c329a7cc091060b67ebd1d0d306b446a74ba01ad") version("0.9.0", sha256="76c83f794ca320fb6b23a7c6192d5e185a5fcf4758966a0c0a54baee46d41e2f") version("0.7.1", sha256="fa274344b1ee72f723bab751c40a5c671801d47a29ee9b5e69fcf63a18ce5c5d") - depends_on("python@3.6:", when="@0.10:", type="build") - depends_on("py-setuptools", type="build") + variant("stats", default=False, description="Optional statistical dependencies", when="@0.12:") + depends_on("python@3.6:", when="@0.10:", type="build") + depends_on("python@3.7:", when="@0.12:", type="build") + depends_on("py-setuptools", when="@:0.11.2", type="build") + depends_on("py-flit-core@3.2:3", when="@0.12.0:", type="build") + + depends_on("py-numpy@1.17:", when="@0.12:", type=("build", "run")) depends_on("py-numpy@1.15:", when="@0.11:", type=("build", "run")) depends_on("py-numpy@1.9.3:", when="@0.9:", type=("build", "run")) depends_on("py-numpy", type=("build", "run")) - depends_on("py-scipy@1:", when="@0.11:", type=("build", "run")) - depends_on("py-scipy@1.0.1:", when="@0.10:", type=("build", "run")) - depends_on("py-scipy@0.14:", when="@0.9.0:", type=("build", "run")) - depends_on("py-scipy", type=("build", "run")) + depends_on("py-pandas@0.25:", when="@0.12:", type=("build", "run")) depends_on("py-pandas@0.23:", when="@0.11:", type=("build", "run")) depends_on("py-pandas@0.22:", when="@0.10:", type=("build", "run")) depends_on("py-pandas@0.15.2:", when="@0.9:", type=("build", "run")) depends_on("py-pandas", type=("build", "run")) + depends_on("py-matplotlib@3.1:", when="@0.12:", type=("build", "run")) depends_on("py-matplotlib@2.2:", when="@0.11:", type=("build", "run")) depends_on("py-matplotlib@2.1.2:", when="@0.10:", type=("build", "run")) depends_on("py-matplotlib@1.4.3:", when="@0.9:", type=("build", "run")) depends_on("py-matplotlib", type=("build", "run")) + + depends_on("py-scipy@1:", when="@0.12: +stats", type=("build", "run")) + depends_on("py-scipy@1:", when="@0.11", type=("build", "run")) + depends_on("py-scipy@1.0.1:", when="@0.10:0.11", type=("build", "run")) + depends_on("py-scipy@0.14:", when="@0.9.0:0.11", type=("build", "run")) + depends_on("py-scipy", when="@:0.11", type=("build", "run")) + + depends_on("py-statsmodels@0.10:", when="@0.12: +stats", type=("build", "run")) + + depends_on("py-typing-extensions", when="@0.12: ^python@:3.7", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-secretstorage/package.py b/var/spack/repos/builtin/packages/py-secretstorage/package.py index 3c0f935a2d0..e7f0c9f1a88 100644 --- a/var/spack/repos/builtin/packages/py-secretstorage/package.py +++ b/var/spack/repos/builtin/packages/py-secretstorage/package.py @@ -12,6 +12,7 @@ class PySecretstorage(PythonPackage): homepage = "https://github.com/mitya57/secretstorage" pypi = "SecretStorage/SecretStorage-3.1.2.tar.gz" + version("3.3.3", sha256="2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77") version("3.3.1", sha256="fd666c51a6bf200643495a04abb261f83229dcb6fd8472ec393df7ffc8b6f195") version("3.1.2", sha256="15da8a989b65498e29be338b3b279965f1b8f09b9668bd8010da183024c8bff6") version("2.3.1", sha256="3af65c87765323e6f64c83575b05393f9e003431959c9395d1791d51497f29b6") diff --git a/var/spack/repos/builtin/packages/py-setuptools-rust/package.py b/var/spack/repos/builtin/packages/py-setuptools-rust/package.py index fed852868a0..cab58b302c9 100644 --- a/var/spack/repos/builtin/packages/py-setuptools-rust/package.py +++ b/var/spack/repos/builtin/packages/py-setuptools-rust/package.py @@ -12,6 +12,7 @@ class PySetuptoolsRust(PythonPackage): homepage = "https://github.com/PyO3/setuptools-rust" pypi = "setuptools-rust/setuptools-rust-0.12.1.tar.gz" + version("1.5.1", sha256="0e05e456645d59429cb1021370aede73c0760e9360bbfdaaefb5bced530eb9d7") version("1.4.1", sha256="18ff850831f58ee21d5783825c99fad632da21e47645e9427fd7dec048029e76") version("1.2.0", sha256="0a4ada479e8c7e3d8bd7cb56e1a29acc2b2bb98c2325051b0cdcb57d7f056de8") version("0.12.1", sha256="647009e924f0ae439c7f3e0141a184a69ad247ecb9044c511dabde232d3d570e") @@ -29,8 +30,8 @@ class PySetuptoolsRust(PythonPackage): depends_on("py-setuptools@62.4:", when="@1.4.0:", type=("build", "run")) depends_on("py-setuptools@46.1:", when="@0.11.6:", type=("build", "run")) depends_on("py-setuptools", type=("build", "run")) - depends_on("py-setuptools-scm+toml@6.3.2:", when="@1.2.0:", type="build") - depends_on("py-setuptools-scm+toml@3.4.3:", when="@0.11:", type="build") + depends_on("py-setuptools-scm+toml@6.3.2:", when="@1.2.0:1.4.1", type="build") + depends_on("py-setuptools-scm+toml@3.4.3:", when="@0.11:1.1", type="build") depends_on("py-semantic-version@2.8.2:2", when="@1.2.0:", type=("build", "run")) depends_on("py-semantic-version@2.6.0:", type=("build", "run")) depends_on("py-typing-extensions@3.7.4.3:", when="@1.2.0:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-setuptools/package.py b/var/spack/repos/builtin/packages/py-setuptools/package.py index 53b1bc157a4..6bb59b2ad6b 100644 --- a/var/spack/repos/builtin/packages/py-setuptools/package.py +++ b/var/spack/repos/builtin/packages/py-setuptools/package.py @@ -16,6 +16,11 @@ class PySetuptools(Package): maintainers = ["adamjstewart"] + version( + "65.5.0", + sha256="f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356", + expand=False, + ) version( "65.0.0", sha256="fe9a97f68b064a6ddd4bacfb0b4b93a4c65a556d97ce906255540439d0c35cef", @@ -31,6 +36,11 @@ class PySetuptools(Package): sha256="045aec56a3eee5c82373a70e02db8b6da9a10f7faf61ff89a14ab66c738ed370", expand=False, ) + version( + "62.6.0", + sha256="c1848f654aea2e3526d17fc3ce6aeaa5e7e24e66e645b5be2171f3f6b4e5a178", + expand=False, + ) version( "62.4.0", sha256="5a844ad6e190dccc67d6d7411d119c5152ce01f7c76be4d8a1eaa314501bba77", @@ -168,11 +178,11 @@ class PySetuptools(Package): ) extends("python") - depends_on("python@3.7:", type=("build", "run"), when="@59.7:") - depends_on("python@3.6:", type=("build", "run"), when="@51:") - depends_on("python@3.5:", type=("build", "run"), when="@45:50") - depends_on("python@2.7:2.8,3.5:", type=("build", "run"), when="@44") - depends_on("python@2.7:2.8,3.4:", type=("build", "run"), when="@:43") + depends_on("python@3.7:", when="@59.7:", type=("build", "run")) + depends_on("python@3.6:", when="@51:", type=("build", "run")) + depends_on("python@3.5:", when="@45:50", type=("build", "run")) + depends_on("python@2.7:2.8,3.5:", when="@44", type=("build", "run")) + depends_on("python@2.7:2.8,3.4:", when="@:43", type=("build", "run")) depends_on("py-pip", type="build") def url_for_version(self, version): diff --git a/var/spack/repos/builtin/packages/py-shapely/package.py b/var/spack/repos/builtin/packages/py-shapely/package.py index 8c2aad9eb16..9c39acbe0bc 100644 --- a/var/spack/repos/builtin/packages/py-shapely/package.py +++ b/var/spack/repos/builtin/packages/py-shapely/package.py @@ -20,6 +20,7 @@ class PyShapely(PythonPackage): version("main", branch="main") version("master", branch="main", deprecated=True) + version("1.8.5", sha256="e82b6d60ecfb124120c88fe106a478596bbeab142116d7e7f64a364dac902a92") version("1.8.4", sha256="a195e51caafa218291f2cbaa3fef69fd3353c93ec4b65b2a4722c4cf40c3198c") version("1.8.3", sha256="1ce9da186d48efc50130af96d62ffb4d2e175235143d804ef395aad156d45bb3") version("1.8.2", sha256="572af9d5006fd5e3213e37ee548912b0341fb26724d6dc8a4e3950c10197ebb6") diff --git a/var/spack/repos/builtin/packages/py-shortbred/package.py b/var/spack/repos/builtin/packages/py-shortbred/package.py new file mode 100644 index 00000000000..44dc115f122 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-shortbred/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyShortbred(PythonPackage): + """ShortBRED is a system for profiling protein families of interest at + very high specificity in shotgun meta'omic sequencing data.""" + + homepage = "https://huttenhower.sph.harvard.edu/shortbred/" + pypi = "shortbred/shortbred-0.9.5.tar.gz" + + version("0.9.5", sha256="a6ac09b858f14e2c0b8622b122ec91e5d02d32c12429cad66626d7ef26df10d5") + + depends_on("python@2.7.9:", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-biopython@1.65:", type="run") + depends_on("blast-plus@2.2.28:", type="run") + depends_on("usearch@6.0.307:", type="run") + depends_on("muscle@3.8.31:", type="run") + depends_on("cdhit@4.6:", type="run") diff --git a/var/spack/repos/builtin/packages/py-simplekml/package.py b/var/spack/repos/builtin/packages/py-simplekml/package.py index dbbbb03283d..e7e5f878c4b 100644 --- a/var/spack/repos/builtin/packages/py-simplekml/package.py +++ b/var/spack/repos/builtin/packages/py-simplekml/package.py @@ -14,6 +14,8 @@ class PySimplekml(PythonPackage): homepage = "https://readthedocs.org/projects/simplekml/" pypi = "simplekml/simplekml-1.3.1.tar.gz" + version("1.3.6", sha256="cda687be2754395fcab664e908ebf589facd41e8436d233d2be37a69efb1c536") + version("1.3.5", sha256="657b4e20177299a4e80bacfafff1f91102010bc23dc0ce7a7ae43bdd4246049e") version("1.3.1", sha256="30c121368ce1d73405721730bf766721e580cae6fbb7424884c734c89ec62ad7") depends_on("python@2.6:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-slepc4py/package.py b/var/spack/repos/builtin/packages/py-slepc4py/package.py index 2525fe46dd2..985159e822b 100644 --- a/var/spack/repos/builtin/packages/py-slepc4py/package.py +++ b/var/spack/repos/builtin/packages/py-slepc4py/package.py @@ -16,6 +16,8 @@ class PySlepc4py(PythonPackage): maintainers = ["joseeroman", "balay"] version("main", branch="main") + version("3.18.1", sha256="4c2bc0947d6a9cdb209e3174b7f54fe7b029220e2c90106f52844e8f8795f8f0") + version("3.18.0", sha256="aa83f46f942aca05ffcbc8be29b496f56837f564e0396f5b39cec4946654ee78") version("3.17.2", sha256="e5b235486b6901cd4ff0d94083f0e5eeacaef3a2893e1714769717ad488a3885") version("3.17.1", sha256="967d5d045526088ff5b7b2cde76f8b4d1fee3a2a68481f85224b0795e6613eb9") version("3.17.0", sha256="cab298eb794739579167fd60ff900db90476c4c93b4ae4e0204e989a6eeb3767") @@ -41,6 +43,7 @@ class PySlepc4py(PythonPackage): depends_on("py-setuptools", type="build") depends_on("py-petsc4py", type=("build", "run")) + depends_on("py-petsc4py@3.18.0:3.18", when="@3.18.0:3.18", type=("build", "run")) depends_on("py-petsc4py@3.17.0:3.17", when="@3.17.0:3.17", type=("build", "run")) depends_on("py-petsc4py@3.16.0:3.16", when="@3.16.0:3.16", type=("build", "run")) depends_on("py-petsc4py@3.15.0:3.15", when="@3.15.0:3.15", type=("build", "run")) @@ -53,6 +56,7 @@ class PySlepc4py(PythonPackage): depends_on("py-petsc4py@3.7.0:3.7", when="@3.7.0:3.7", type=("build", "run")) depends_on("slepc") + depends_on("slepc@3.18.0:3.18", when="@3.18.0:3.18") depends_on("slepc@3.17.0:3.17", when="@3.17.0:3.17") depends_on("slepc@3.16.0:3.16", when="@3.16.0:3.16") depends_on("slepc@3.15.0:3.15", when="@3.15.0:3.15") diff --git a/var/spack/repos/builtin/packages/py-sniffio/package.py b/var/spack/repos/builtin/packages/py-sniffio/package.py index 19aab104d24..257c2366046 100644 --- a/var/spack/repos/builtin/packages/py-sniffio/package.py +++ b/var/spack/repos/builtin/packages/py-sniffio/package.py @@ -13,9 +13,11 @@ class PySniffio(PythonPackage): homepage = "https://github.com/python-trio/sniffio" pypi = "sniffio/sniffio-1.1.0.tar.gz" + version("1.3.0", sha256="e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101") version("1.2.0", sha256="c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de") version("1.1.0", sha256="8e3810100f69fe0edd463d02ad407112542a11ffdc29f67db2bf3771afb87a21") depends_on("python@3.5:", type=("build", "run")) + depends_on("python@3.7:", when="@1.3.0:", type=("build", "run")) depends_on("py-setuptools", type="build") depends_on("py-contextvars@2.1:", when="^python@:3.6", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-sphinx/package.py b/var/spack/repos/builtin/packages/py-sphinx/package.py index a5c99d50783..86de3285a1e 100644 --- a/var/spack/repos/builtin/packages/py-sphinx/package.py +++ b/var/spack/repos/builtin/packages/py-sphinx/package.py @@ -14,6 +14,10 @@ class PySphinx(PythonPackage): maintainers = ["adamjstewart"] + version("5.3.0", sha256="51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5") + version("5.2.3", sha256="5b10cb1022dac8c035f75767799c39217a05fc0fe2d6fe5597560d38e44f0363") + version("5.2.2", sha256="7225c104dc06169eb73b061582c4bc84a9594042acae6c1582564de274b7df2f") + version("5.2.1", sha256="c009bb2e9ac5db487bcf53f015504005a330ff7c631bb6ab2604e0d65bae8b54") version("5.2.0", sha256="1790c2098937dcfa7871c9d102c24eccd4a8b883b67c5c1e26892fb52d102542") version("5.1.1", sha256="ba3224a4e206e1fbdecf98a4fae4992ef9b24b85ebf7b584bb340156eaf08d89") version("5.1.0", sha256="7893d10d9d852c16673f9b1b7e9eda1606b420b7810270294d6e4b44c0accacc") diff --git a/var/spack/repos/builtin/packages/py-sphinxcontrib-mermaid/package.py b/var/spack/repos/builtin/packages/py-sphinxcontrib-mermaid/package.py index 4a135a98194..c11dece3fe5 100644 --- a/var/spack/repos/builtin/packages/py-sphinxcontrib-mermaid/package.py +++ b/var/spack/repos/builtin/packages/py-sphinxcontrib-mermaid/package.py @@ -14,6 +14,7 @@ class PySphinxcontribMermaid(PythonPackage): homepage = "https://github.com/mgaitan/sphinxcontrib-mermaid" pypi = "sphinxcontrib-mermaid/sphinxcontrib-mermaid-0.4.0.tar.gz" + version("0.7.1", sha256="aa8a40b50ec86ad12824b62180240ca52a9bda8424455d7eb252eae9aa5d293c") version("0.4.0", sha256="0ee45ba45b9575505eacdd6212e4e545213f4f93dfa32c7eeca32720dbc3b468") depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-tensorflow-hub/0001-zlib-bump-over-CVE-use-fossils-url-which-is-more-sta.patch b/var/spack/repos/builtin/packages/py-tensorflow-hub/0001-zlib-bump-over-CVE-use-fossils-url-which-is-more-sta.patch new file mode 100644 index 00000000000..c552390eaae --- /dev/null +++ b/var/spack/repos/builtin/packages/py-tensorflow-hub/0001-zlib-bump-over-CVE-use-fossils-url-which-is-more-sta.patch @@ -0,0 +1,29 @@ +From e5a889202143ccc5a6d126197e86ee138307cbc4 Mon Sep 17 00:00:00 2001 +From: Harmen Stoppels +Date: Mon, 17 Oct 2022 09:52:27 +0200 +Subject: [PATCH] zlib: bump over CVE, use fossils url which is more stable + +--- + WORKSPACE | 6 +++--- + 1 file changed, 3 insertions(+), 3 deletions(-) + +diff --git a/WORKSPACE b/WORKSPACE +index 495ed63..36d730b 100644 +--- a/WORKSPACE ++++ b/WORKSPACE +@@ -29,9 +29,9 @@ git_repository( + http_archive( + name = "zlib", + build_file = "@com_google_protobuf//:third_party/zlib.BUILD", +- sha256 = "c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1", +- strip_prefix = "zlib-1.2.11", +- urls = ["https://zlib.net/zlib-1.2.11.tar.gz"], ++ sha256 = "b3a24de97a8fdbc835b9833169501030b8977031bcb54b3b3ac13740f846ab30", ++ strip_prefix = "zlib-1.2.13", ++ urls = ["https://zlib.net/fossils/zlib-1.2.13.tar.gz"], + ) + + # Required by protobuf 3.8.0. +-- +2.37.0 + diff --git a/var/spack/repos/builtin/packages/py-tensorflow-hub/package.py b/var/spack/repos/builtin/packages/py-tensorflow-hub/package.py index 2ec9bc65196..58cf70414c4 100644 --- a/var/spack/repos/builtin/packages/py-tensorflow-hub/package.py +++ b/var/spack/repos/builtin/packages/py-tensorflow-hub/package.py @@ -30,11 +30,8 @@ class PyTensorflowHub(Package): depends_on("py-numpy@1.12.0:", type=("build", "run")) depends_on("py-protobuf@3.8.0:", type=("build", "run")) - patch( - "https://github.com/tensorflow/hub/commit/049192a7edd3e80eebf1735b93f57c7965381bdb.patch?full_index=1", - sha256="c8b59d17511a8ebd2a58717723b9b77514a12b43bb2e6acec6d0c1062df6e457", - when="@:0.12", - ) + # Deal with vendored zlib. + patch("0001-zlib-bump-over-CVE-use-fossils-url-which-is-more-sta.patch", when="@:0.12") def install(self, spec, prefix): tmp_path = tempfile.mkdtemp(prefix="spack") diff --git a/var/spack/repos/builtin/packages/py-tensorflow-metadata/package.py b/var/spack/repos/builtin/packages/py-tensorflow-metadata/package.py index 363579b7a5d..07e09d4bf44 100644 --- a/var/spack/repos/builtin/packages/py-tensorflow-metadata/package.py +++ b/var/spack/repos/builtin/packages/py-tensorflow-metadata/package.py @@ -22,6 +22,13 @@ class PyTensorflowMetadata(PythonPackage): version("1.10.0", sha256="e7aa81aa01433e2a75c11425affd55125b64f384baf96b71eeb3a88dca8cf2ae") version("1.5.0", sha256="f0ec8aaf62fd772ef908efe4ee5ea3bc0d67dcbf10ae118415b7b206a1d61745") + # Fix non-existing zlib URL + patch( + "https://github.com/tensorflow/metadata/commit/8df679e782f5bf2d163d63e550d8752c3812d566.patch?full_index=1", + sha256="a6b294d5e6099979192fcdb4d5b7b0388dc30b48671944d22e51a9e6bd5e1490", + when="@1.10.0", + ) + depends_on("bazel@0.24.1:", type="build") depends_on("python@3.7:3", type=("build", "run")) depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-tensorflow/package.py b/var/spack/repos/builtin/packages/py-tensorflow/package.py index 5c6b3dc1ac2..8247cd1ceba 100644 --- a/var/spack/repos/builtin/packages/py-tensorflow/package.py +++ b/var/spack/repos/builtin/packages/py-tensorflow/package.py @@ -115,14 +115,46 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage): version("1.1.0", sha256="aad4470f52fa59f54de7b9a2da727429e6755d91d756f245f952698c42a60027") version("1.0.1", sha256="deea3c65e0703da96d9c3f1162e464c51d37659dd129396af134e9e8f1ea8c05") version("1.0.0", sha256="db8b3b8f4134b7c9c1b4165492ad5d5bb78889fcd99ffdffc325e97da3e8c677") - version("0.12.0", sha256="13a1d4e98c82eae7e26fe75384de1517d6126f63ba5d302392ec02ac3ae4b1b9") - version("0.11.0", sha256="24242ff696234bb1e58d09d45169b148525ccb706f980a4a92ddd3b82c7546dc") - version("0.10.0", sha256="f32df04e8f7186aaf6723fc5396733b2f6c2fd6fe4a53a54a68b80f3ec855680") - version("0.9.0", sha256="3128c396af19518c642d3e590212291e1d93c5b047472a10cf3245b53adac9c9") - version("0.8.0", sha256="f201ba7fb7609a6416968d4e1920d87d67be693b5bc7d34b6b4a79860a9a8a4e") - version("0.7.1", sha256="ef34121432f7a522cf9f99a56cdd86e370cc5fa3ee31255ca7cb17f36b8dfc0d") - version("0.7.0", sha256="43dd3051f947aa66e6fc09dac2f86a2efe2e019736bbd091c138544b86d717ce") - version("0.6.0", sha256="f86ace45e99053b09749cd55ab79c57274d8c7460ae763c5e808d81ffbc3b657") + version( + "0.12.0", + sha256="13a1d4e98c82eae7e26fe75384de1517d6126f63ba5d302392ec02ac3ae4b1b9", + deprecated=True, + ) + version( + "0.11.0", + sha256="24242ff696234bb1e58d09d45169b148525ccb706f980a4a92ddd3b82c7546dc", + deprecated=True, + ) + version( + "0.10.0", + sha256="f32df04e8f7186aaf6723fc5396733b2f6c2fd6fe4a53a54a68b80f3ec855680", + deprecated=True, + ) + version( + "0.9.0", + sha256="3128c396af19518c642d3e590212291e1d93c5b047472a10cf3245b53adac9c9", + deprecated=True, + ) + version( + "0.8.0", + sha256="f201ba7fb7609a6416968d4e1920d87d67be693b5bc7d34b6b4a79860a9a8a4e", + deprecated=True, + ) + version( + "0.7.1", + sha256="ef34121432f7a522cf9f99a56cdd86e370cc5fa3ee31255ca7cb17f36b8dfc0d", + deprecated=True, + ) + version( + "0.7.0", + sha256="43dd3051f947aa66e6fc09dac2f86a2efe2e019736bbd091c138544b86d717ce", + deprecated=True, + ) + version( + "0.6.0", + sha256="f86ace45e99053b09749cd55ab79c57274d8c7460ae763c5e808d81ffbc3b657", + deprecated=True, + ) variant("mkl", default=False, description="Build with MKL support") variant("jemalloc", default=False, description="Build with jemalloc as malloc support") @@ -440,8 +472,16 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage): conflicts("~rocm", when="@2.7.4-rocm-enhanced") conflicts("+rocm", when="@:2.7.4-a,2.7.4.0:") - # TODO: why is this needed? + # zlib is vendored and downloaded directly from zlib.org (or mirrors), but + # old downloads are removed from that site immediately after a new release. + # If the tf mirrors don't work, make sure the fallback is to something existing. patch("url-zlib.patch", when="@0.10.0") + # bump to zlib 1.2.13 + patch( + "https://github.com/tensorflow/tensorflow/commit/76b9fa22857148a562f3d9b5af6843402a93c15b.patch?full_index=1", + sha256="f9e26c544da729cfd376dbd3b096030e3777d3592459add1f3c78b1b9828d493", + when="@2.9:2.10.0", + ) # TODO: why is this needed? patch("crosstool.patch", when="@0.10.0+cuda") # Avoid build error: "no such package '@io_bazel_rules_docker..." diff --git a/var/spack/repos/builtin/packages/py-tfdlpack/package.py b/var/spack/repos/builtin/packages/py-tfdlpack/package.py index 2bc5186d92b..1304184c2e2 100644 --- a/var/spack/repos/builtin/packages/py-tfdlpack/package.py +++ b/var/spack/repos/builtin/packages/py-tfdlpack/package.py @@ -7,7 +7,7 @@ from spack.package import * -class PyTfdlpack(CMakePackage, PythonPackage): +class PyTfdlpack(CMakePackage, PythonExtension): """Tensorflow plugin for DLPack.""" homepage = "https://github.com/VoVAllen/tf-dlpack" diff --git a/var/spack/repos/builtin/packages/py-tokenizers/package.py b/var/spack/repos/builtin/packages/py-tokenizers/package.py index 138fe084664..d306e57950a 100644 --- a/var/spack/repos/builtin/packages/py-tokenizers/package.py +++ b/var/spack/repos/builtin/packages/py-tokenizers/package.py @@ -13,15 +13,17 @@ class PyTokenizers(PythonPackage): homepage = "https://github.com/huggingface/tokenizers" pypi = "tokenizers/tokenizers-0.6.0.tar.gz" + version("0.13.1", sha256="3333d1cee5c8f47c96362ea0abc1f81c77c9b92c6c3d11cbf1d01985f0d5cf1d") version("0.10.3", sha256="1a5d3b596c6d3a237e1ad7f46c472d467b0246be7fd1a364f12576eb8db8f7e6") version("0.6.0", sha256="1da11fbfb4f73be695bed0d655576097d09a137a16dceab2f66399716afaffac") version("0.5.2", sha256="b5a235f9c71d04d4925df6c4fa13b13f1d03f9b7ac302b89f8120790c4f742bc") depends_on("py-setuptools", type="build") depends_on("py-setuptools-rust", type="build") - depends_on("rust@nightly", type="build") - # TODO: This package currently requires internet access to install. - # Also, a nightly or dev version of rust is required to build. + # A nightly or dev version of rust is required to build older versions. # https://github.com/huggingface/tokenizers/issues/176 # https://github.com/PyO3/pyo3/issues/5 + depends_on("rust@nightly", when="@:0.10", type="build") + + # TODO: This package currently requires internet access to install. diff --git a/var/spack/repos/builtin/packages/py-torch/package.py b/var/spack/repos/builtin/packages/py-torch/package.py index 9e5a9a8cef3..b6e5ed6b1fe 100644 --- a/var/spack/repos/builtin/packages/py-torch/package.py +++ b/var/spack/repos/builtin/packages/py-torch/package.py @@ -24,6 +24,7 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): import_modules = ["torch", "torch.autograd", "torch.nn", "torch.utils"] version("master", branch="master", submodules=True) + version("1.13.0", tag="v1.13.0", submodules=True) version("1.12.1", tag="v1.12.1", submodules=True) version("1.12.0", tag="v1.12.0", submodules=True) version("1.11.0", tag="v1.11.0", submodules=True) @@ -115,11 +116,6 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): ) # Required dependencies - depends_on("cmake@3.13:", when="@1.11:", type="build") - depends_on("cmake@3.10:", when="@1.10:", type="build") - depends_on("cmake@3.5:", type="build") - # Use Ninja generator to speed up build times, automatically used if found - depends_on("ninja@1.5:", when="@1.1:", type="build") # See python_min_version in setup.py depends_on("python@3.7:", when="@1.11:", type=("build", "link", "run")) depends_on("python@3.6.2:", when="@1.7.1:", type=("build", "link", "run")) @@ -127,18 +123,35 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): depends_on("python@3.5:", when="@1.5", type=("build", "link", "run")) depends_on("python@2.7:2,3.5:", when="@1.4", type=("build", "link", "run")) depends_on("python@2.7:2,3.5:3.7", when="@:1.3", type=("build", "link", "run")) + + # pyproject.toml depends_on("py-setuptools", type=("build", "run")) + depends_on("py-astunparse", when="@1.13:", type=("build", "run")) + depends_on("py-numpy@1.16.6:", type=("build", "run")) + depends_on("ninja@1.5:", when="@1.1:", type="build") + depends_on("py-pyyaml", type=("build", "run")) + depends_on("cmake@3.13:", when="@1.11:", type="build") + depends_on("cmake@3.10:", when="@1.10:", type="build") + depends_on("cmake@3.5:", type="build") + depends_on("py-cffi", type=("build", "run")) + depends_on("py-typing-extensions@3.6.2.1:", when="@1.7:", type=("build", "run")) depends_on("py-future", when="@1.5:", type=("build", "run")) depends_on("py-future", when="@1.1: ^python@:2", type=("build", "run")) - depends_on("py-pyyaml", type=("build", "run")) + depends_on("py-six", when="@1.13:", type=("build", "run")) + depends_on("py-requests", when="@1.13:", type=("build", "run")) + depends_on("py-dataclasses", when="@1.7: ^python@3.6", type=("build", "run")) + + # Undocumented dependencies depends_on("py-typing", when="^python@:3.4", type=("build", "run")) - depends_on("py-pybind11@2.6.2", when="@1.8:", type=("build", "link", "run")) + depends_on("py-tqdm", type="run") + depends_on("blas") + depends_on("lapack") + + # third_party + depends_on("py-pybind11@2.10.0", when="@1.13:", type=("build", "link", "run")) + depends_on("py-pybind11@2.6.2", when="@1.8:1.12", type=("build", "link", "run")) depends_on("py-pybind11@2.3.0", when="@1.1:1.7", type=("build", "link", "run")) depends_on("py-pybind11@2.2.4", when="@:1.0", type=("build", "link", "run")) - depends_on("py-dataclasses", when="@1.7: ^python@3.6", type=("build", "run")) - depends_on("py-tqdm", type="run") - # https://github.com/onnx/onnx#prerequisites - depends_on("py-numpy@1.16.6:", type=("build", "run")) depends_on("py-protobuf@3.12.2:", when="@1.10:", type=("build", "run")) depends_on("py-protobuf@:3.14", when="@:1.9", type=("build", "run")) depends_on("protobuf@3.12.2:", when="@1.10:") @@ -147,19 +160,17 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): # https://github.com/pytorch/pytorch/issues/78362 depends_on("py-protobuf@:3", type=("build", "run")) depends_on("protobuf@:3", type=("build", "run")) - depends_on("py-typing-extensions@3.6.2.1:", when="@1.7:", type=("build", "run")) - depends_on("blas") - depends_on("lapack") depends_on("eigen") # https://github.com/pytorch/pytorch/issues/60329 - # depends_on('cpuinfo@2020-12-17', when='@1.8:') - # depends_on('cpuinfo@2020-06-11', when='@1.6:1.7') + # depends_on("cpuinfo@2022-08-19", when="@1.13:") + # depends_on("cpuinfo@2020-12-17", when="@1.8:1.12") + # depends_on("cpuinfo@2020-06-11", when="@1.6:1.7") # https://github.com/shibatch/sleef/issues/427 - # depends_on('sleef@3.5.1_2020-12-22', when='@1.8:') + # depends_on("sleef@3.5.1_2020-12-22", when="@1.8:") # https://github.com/pytorch/pytorch/issues/60334 - # depends_on('sleef@3.4.0_2019-07-30', when='@1.6:1.7') + # depends_on("sleef@3.4.0_2019-07-30", when="@1.6:1.7") # https://github.com/Maratyszcza/FP16/issues/18 - # depends_on('fp16@2020-05-14', when='@1.6:') + # depends_on("fp16@2020-05-14", when="@1.6:") depends_on("pthreadpool@2021-04-13", when="@1.9:") depends_on("pthreadpool@2020-10-05", when="@1.8") depends_on("pthreadpool@2020-06-15", when="@1.6:1.7") @@ -198,22 +209,24 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): depends_on("miopen-hip") depends_on("rocminfo") # https://github.com/pytorch/pytorch/issues/60332 - # depends_on('xnnpack@2022-02-16', when='@1.12:+xnnpack') - # depends_on('xnnpack@2021-06-21', when='@1.10:1.11+xnnpack') - # depends_on('xnnpack@2021-02-22', when='@1.8:1.9+xnnpack') - # depends_on('xnnpack@2020-03-23', when='@1.6:1.7+xnnpack') + # depends_on("xnnpack@2022-02-16", when="@1.12:+xnnpack") + # depends_on("xnnpack@2021-06-21", when="@1.10:1.11+xnnpack") + # depends_on("xnnpack@2021-02-22", when="@1.8:1.9+xnnpack") + # depends_on("xnnpack@2020-03-23", when="@1.6:1.7+xnnpack") depends_on("mpi", when="+mpi") # https://github.com/pytorch/pytorch/issues/60270 - # depends_on('gloo@2021-05-21', when='@1.10:+gloo') - # depends_on('gloo@2021-05-04', when='@1.9+gloo') - # depends_on('gloo@2020-09-18', when='@1.7:1.8+gloo') - # depends_on('gloo@2020-03-17', when='@1.6+gloo') + # depends_on("gloo@2022-05-18", when="@1.13:+gloo") + # depends_on("gloo@2021-05-21", when="@1.10:1.12+gloo") + # depends_on("gloo@2021-05-04", when="@1.9+gloo") + # depends_on("gloo@2020-09-18", when="@1.7:1.8+gloo") + # depends_on("gloo@2020-03-17", when="@1.6+gloo") # https://github.com/pytorch/pytorch/issues/60331 - # depends_on('onnx@1.11.0', when='@1.12:+onnx_ml') - # depends_on('onnx@1.10.1_2021-10-08', when='@1.11+onnx_ml') - # depends_on('onnx@1.10.1', when='@1.10+onnx_ml') - # depends_on('onnx@1.8.0_2020-11-03', when='@1.8:1.9+onnx_ml') - # depends_on('onnx@1.7.0_2020-05-31', when='@1.6:1.7+onnx_ml') + # depends_on("onnx!1.12.0", when="@1.13:+onnx_ml") + # depends_on("onnx@1.11.0", when="@1.12+onnx_ml") + # depends_on("onnx@1.10.1_2021-10-08", when="@1.11+onnx_ml") + # depends_on("onnx@1.10.1", when="@1.10+onnx_ml") + # depends_on("onnx@1.8.0_2020-11-03", when="@1.8:1.9+onnx_ml") + # depends_on("onnx@1.7.0_2020-05-31", when="@1.6:1.7+onnx_ml") depends_on("mkl", when="+mkldnn") # Test dependencies @@ -247,7 +260,7 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): # Fixes fatal error: sleef.h: No such file or directory # https://github.com/pytorch/pytorch/pull/35359 # https://github.com/pytorch/pytorch/issues/26555 - # patch('sleef.patch', when='@:1.5') + # patch("sleef.patch", when="@:1.5") # Fixes compilation with Clang 9.0.0 and Apple Clang 11.0.3 # https://github.com/pytorch/pytorch/pull/37086 @@ -510,24 +523,24 @@ def enable_or_disable(variant, keyword="USE", var=None, newer=False): if self.spec.satisfies("@1.10:"): env.set("USE_SYSTEM_PYBIND11", "ON") # https://github.com/pytorch/pytorch/issues/60334 - # if self.spec.satisfies('@1.8:'): - # env.set('USE_SYSTEM_SLEEF', 'ON') + # if self.spec.satisfies("@1.8:"): + # env.set("USE_SYSTEM_SLEEF", "ON") if self.spec.satisfies("@1.6:"): - # env.set('USE_SYSTEM_LIBS', 'ON') + # env.set("USE_SYSTEM_LIBS", "ON") # https://github.com/pytorch/pytorch/issues/60329 - # env.set('USE_SYSTEM_CPUINFO', 'ON') + # env.set("USE_SYSTEM_CPUINFO", "ON") # https://github.com/pytorch/pytorch/issues/60270 - # env.set('USE_SYSTEM_GLOO', 'ON') + # env.set("USE_SYSTEM_GLOO", "ON") # https://github.com/Maratyszcza/FP16/issues/18 - # env.set('USE_SYSTEM_FP16', 'ON') + # env.set("USE_SYSTEM_FP16", "ON") env.set("USE_SYSTEM_PTHREADPOOL", "ON") env.set("USE_SYSTEM_PSIMD", "ON") env.set("USE_SYSTEM_FXDIV", "ON") env.set("USE_SYSTEM_BENCHMARK", "ON") # https://github.com/pytorch/pytorch/issues/60331 - # env.set('USE_SYSTEM_ONNX', 'ON') + # env.set("USE_SYSTEM_ONNX", "ON") # https://github.com/pytorch/pytorch/issues/60332 - # env.set('USE_SYSTEM_XNNPACK', 'ON') + # env.set("USE_SYSTEM_XNNPACK", "ON") @run_before("install") def build_amd(self): diff --git a/var/spack/repos/builtin/packages/py-torchaudio/package.py b/var/spack/repos/builtin/packages/py-torchaudio/package.py index 9fb103095e3..83e16bcf6cc 100644 --- a/var/spack/repos/builtin/packages/py-torchaudio/package.py +++ b/var/spack/repos/builtin/packages/py-torchaudio/package.py @@ -20,10 +20,80 @@ class PyTorchaudio(PythonPackage): extension.""" homepage = "https://github.com/pytorch/audio" - url = "https://github.com/pytorch/audio/archive/v0.4.0.tar.gz" + git = "https://github.com/pytorch/audio.git" - version("0.4.0", sha256="9361312319b1ab880fc348ea82b024053bca6faf477ef6a9232a5b805742dc66") + version("main", branch="main", submodules=True) + version("0.13.0", tag="v0.13.0", submodules=True) + version("0.12.1", tag="v0.12.1", submodules=True) + version("0.12.0", tag="v0.12.0", submodules=True) + version("0.11.0", tag="v0.11.0", submodules=True) + version("0.10.2", tag="v0.10.2", submodules=True) + version("0.10.1", tag="v0.10.1", submodules=True) + version("0.10.0", tag="v0.10.0", submodules=True) + version("0.9.1", tag="v0.9.1", submodules=True) + version("0.9.0", tag="v0.9.0", submodules=True) + version("0.8.2", tag="v0.8.2", submodules=True) + version("0.8.1", tag="v0.8.1", submodules=True) + version("0.8.0", tag="v0.8.0", submodules=True) + version("0.7.2", tag="v0.7.2", submodules=True) + version("0.7.0", tag="v0.7.0", submodules=True) + version("0.6.0", tag="v0.6.0", submodules=True) + version("0.5.1", tag="v0.5.1", submodules=True) + version("0.5.0", tag="v0.5.0", submodules=True) + version("0.4.0", tag="v0.4.0", submodules=True) + # https://github.com/pytorch/audio#dependencies + depends_on("python@3.7:3.10", when="@0.12:", type=("build", "link", "run")) + depends_on("python@3.7:3.9", when="@0.11", type=("build", "link", "run")) + depends_on("python@3.6:3.9", when="@0.7.2:0.10", type=("build", "link", "run")) + depends_on("python@3.6:3.8", when="@0.6:0.7.0", type=("build", "link", "run")) + depends_on("python@3.5:3.8", when="@0.5", type=("build", "link", "run")) + depends_on("python@2.7,3.5:3.8", when="@0.4", type=("build", "link", "run")) + + depends_on("cmake@3.18:", when="@0.10:", type="build") + depends_on("cmake@3.5:", when="@0.8:", type="build") + depends_on("ninja", when="@0.8:", type="build") depends_on("py-setuptools", type="build") - depends_on("sox@14.3.2:") - depends_on("py-torch@1.2.0:", type=("build", "run")) + depends_on("py-pybind11", when="@0.12:", type=("build", "link")) + depends_on("pkgconfig", type="build") + depends_on("sox") + + # https://github.com/pytorch/audio#dependencies + depends_on("py-torch@master", when="@main", type=("build", "link", "run")) + depends_on("py-torch@1.13.0", when="@0.13.0", type=("build", "link", "run")) + depends_on("py-torch@1.12.1", when="@0.12.1", type=("build", "link", "run")) + depends_on("py-torch@1.12.0", when="@0.12.0", type=("build", "link", "run")) + depends_on("py-torch@1.11.0", when="@0.11.0", type=("build", "link", "run")) + depends_on("py-torch@1.10.2", when="@0.10.2", type=("build", "link", "run")) + depends_on("py-torch@1.10.1", when="@0.10.1", type=("build", "link", "run")) + depends_on("py-torch@1.10.0", when="@0.10.0", type=("build", "link", "run")) + depends_on("py-torch@1.9.1", when="@0.9.1", type=("build", "link", "run")) + depends_on("py-torch@1.9.0", when="@0.9.0", type=("build", "link", "run")) + depends_on("py-torch@1.8.2", when="@0.8.2", type=("build", "link", "run")) + depends_on("py-torch@1.8.1", when="@0.8.1", type=("build", "link", "run")) + depends_on("py-torch@1.8.0", when="@0.8.0", type=("build", "link", "run")) + depends_on("py-torch@1.7.1", when="@0.7.2", type=("build", "link", "run")) + depends_on("py-torch@1.7.0", when="@0.7.0", type=("build", "link", "run")) + depends_on("py-torch@1.6.0", when="@0.6.0", type=("build", "link", "run")) + depends_on("py-torch@1.5.1", when="@0.5.1", type=("build", "link", "run")) + depends_on("py-torch@1.5.0", when="@0.5.0", type=("build", "link", "run")) + depends_on("py-torch@1.4.1", when="@0.4.0", type=("build", "link", "run")) + + def setup_build_environment(self, env): + # tools/setup_helpers/extension.py + env.set("BUILD_SOX", 0) + + if "+cuda" in self.spec["py-torch"]: + env.set("USE_CUDA", 1) + torch_cuda_arch_list = ";".join( + "{0:.1f}".format(float(i) / 10.0) + for i in self.spec["py-torch"].variants["cuda_arch"].value + ) + env.set("TORCH_CUDA_ARCH_LIST", torch_cuda_arch_list) + else: + env.set("USE_CUDA", 0) + + if "+rocm" in self.spec["py-torch"]: + env.set("USE_ROCM", 1) + else: + env.set("USE_ROCM", 0) diff --git a/var/spack/repos/builtin/packages/py-torchdata/package.py b/var/spack/repos/builtin/packages/py-torchdata/package.py index 939987b06ab..0d674e7d75b 100644 --- a/var/spack/repos/builtin/packages/py-torchdata/package.py +++ b/var/spack/repos/builtin/packages/py-torchdata/package.py @@ -16,6 +16,7 @@ class PyTorchdata(PythonPackage): maintainers = ["adamjstewart"] version("main", branch="main") + version("0.5.0", sha256="b4e1a7015b34e3576111d495a00a675db238bfd136629fc443078bab9383ec36") version("0.4.1", sha256="71c0aa3aca3b04a986a2cd4cc2e0be114984ca836dc4def2c700bf1bd1ff087e") version("0.4.0", sha256="b4ec446a701680faa620fcb828b98ba36a63fa79da62a1e568d4a683889172da") version("0.3.0", sha256="ac36188bf133cf5f1041a28ccb3ee82ba52d4b5d99617be37d64d740acd6cfd4") @@ -23,6 +24,7 @@ class PyTorchdata(PythonPackage): # https://github.com/pytorch/data#version-compatibility depends_on("python@3.7:3.10", type=("build", "run")) depends_on("py-torch@master", when="@main", type=("build", "run")) + depends_on("py-torch@1.13.0", when="@0.5.0", type=("build", "run")) depends_on("py-torch@1.12.1", when="@0.4.1", type=("build", "run")) depends_on("py-torch@1.12.0", when="@0.4.0", type=("build", "run")) depends_on("py-torch@1.11.0", when="@0.3.0", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-torchmetrics/package.py b/var/spack/repos/builtin/packages/py-torchmetrics/package.py index c87344f5744..729bcb4b601 100644 --- a/var/spack/repos/builtin/packages/py-torchmetrics/package.py +++ b/var/spack/repos/builtin/packages/py-torchmetrics/package.py @@ -14,6 +14,8 @@ class PyTorchmetrics(PythonPackage): maintainers = ["adamjstewart"] + version("0.10.2", sha256="daa29d96bff5cff04d80eec5b9f5076993d6ac9c2d2163e88b6b31f8d38f7c25") + version("0.10.1", sha256="e892ecd413e6bf63950329d1317c70f697d81d0f7e386152238062e322c8f1f3") version("0.10.0", sha256="990bafc7f76d7442894533771d0ba7492dbca2bbf2989fb32de7e9c68eb3d133") version("0.9.3", sha256="4ebfd2466021db26397636966ee1a195d3b340ba5d71bb258e764340dfc2476f") version("0.9.2", sha256="8178c9242e243318093d9b7237738a504535193d2006da6e58b0ed4003e318d2") diff --git a/var/spack/repos/builtin/packages/py-torchtext/package.py b/var/spack/repos/builtin/packages/py-torchtext/package.py index e9008168e53..a22bfa53f1d 100644 --- a/var/spack/repos/builtin/packages/py-torchtext/package.py +++ b/var/spack/repos/builtin/packages/py-torchtext/package.py @@ -11,17 +11,52 @@ class PyTorchtext(PythonPackage): """Text utilities and datasets for PyTorch.""" homepage = "https://github.com/pytorch/text" - pypi = "torchtext/torchtext-0.5.0.tar.gz" + git = "https://github.com/pytorch/text.git" maintainers = ["adamjstewart"] - version("0.5.0", sha256="7f22e24e9b939fff56b9118c78dc07aafec8dcc67164de15b9b5ed339e4179c6") + version("main", branch="main", submodules=True) + version("0.14.0", tag="v0.14.0", submodules=True) + version("0.13.1", tag="v0.13.1", submodules=True) + version("0.13.0", tag="v0.13.0", submodules=True) + version("0.12.0", tag="v0.12.0", submodules=True) + version("0.11.2", tag="v0.11.2", submodules=True) + version("0.11.1", tag="v0.11.1", submodules=True) + version("0.10.1", tag="v0.10.1", submodules=True) + version("0.10.0", tag="v0.10.0", submodules=True) + version("0.9.2", tag="v0.9.2", submodules=True) + version("0.8.1", tag="v0.8.1", submodules=True) + version("0.6.0", tag="0.6.0", submodules=True) + version("0.5.0", tag="0.5.0", submodules=True) - depends_on("python@2.7:2.8,3.5:", type=("build", "run")) + # https://github.com/pytorch/text#installation + depends_on("python@3.7:3.10", when="@0.13:", type=("build", "link", "run")) + depends_on("python@3.6:3.9", when="@0.8.1:0.12", type=("build", "link", "run")) + depends_on("python@3.6:3.8", when="@0.7:0.8.0", type=("build", "link", "run")) + depends_on("python@3.5:3.8", when="@0.6", type=("build", "link", "run")) + depends_on("python@2.7,3.5:3.8", when="@:0.5", type=("build", "link", "run")) + + depends_on("cmake@3.18:", when="@0.13:", type="build") + depends_on("ninja", when="@0.13:", type="build") + depends_on("py-pybind11", when="@0.8:", type=("build", "link")) depends_on("py-setuptools", type="build") depends_on("py-tqdm", type=("build", "run")) depends_on("py-requests", type=("build", "run")) - depends_on("py-torch@0.4.0:", type=("build", "run")) depends_on("py-numpy", type=("build", "run")) - depends_on("py-six", type=("build", "run")) - depends_on("py-sentencepiece", type=("build", "run")) + depends_on("py-six", when="@:0.6", type=("build", "run")) + depends_on("py-sentencepiece", when="@:0.7", type=("build", "run")) + + # https://github.com/pytorch/text#installation + depends_on("py-torch@master", when="@main", type=("build", "link", "run")) + depends_on("py-torch@1.13.0", when="@0.14.0", type=("build", "link", "run")) + depends_on("py-torch@1.12.1", when="@0.13.1", type=("build", "link", "run")) + depends_on("py-torch@1.12.0", when="@0.13.0", type=("build", "link", "run")) + depends_on("py-torch@1.11.0", when="@0.12.0", type=("build", "link", "run")) + depends_on("py-torch@1.10.2", when="@0.11.2", type=("build", "link", "run")) + depends_on("py-torch@1.10.1", when="@0.11.1", type=("build", "link", "run")) + depends_on("py-torch@1.9.1", when="@0.10.1", type=("build", "link", "run")) + depends_on("py-torch@1.9.0", when="@0.10.0", type=("build", "link", "run")) + depends_on("py-torch@1.8.2", when="@0.9.2", type=("build", "link", "run")) + depends_on("py-torch@1.7.1", when="@0.8.1", type=("build", "link", "run")) + depends_on("py-torch@1.5.0", when="@0.6.0", type=("build", "link", "run")) + depends_on("py-torch@1.4.1", when="@0.5.0", type=("build", "link", "run")) diff --git a/var/spack/repos/builtin/packages/py-torchvision/package.py b/var/spack/repos/builtin/packages/py-torchvision/package.py index 273732995a8..e1e58b7ccdc 100644 --- a/var/spack/repos/builtin/packages/py-torchvision/package.py +++ b/var/spack/repos/builtin/packages/py-torchvision/package.py @@ -18,6 +18,7 @@ class PyTorchvision(PythonPackage): maintainers = ["adamjstewart"] version("main", branch="main") + version("0.14.0", sha256="be1621c85c56eb40537cb74e6ec5d8e58ed8b69f8374a58bcb6ec413cb540c8b") version("0.13.1", sha256="c32fab734e62c7744dadeb82f7510ff58cc3bca1189d17b16aa99b08afc42249") version("0.13.0", sha256="2fe9139150800820d02c867a0b64b7c7fbc964d48d76fae235d6ef9215eabcf4") version("0.12.0", sha256="99e6d3d304184895ff4f6152e2d2ec1cbec89b3e057d9c940ae0125546b04e91") @@ -47,7 +48,12 @@ class PyTorchvision(PythonPackage): "backend", default="pil", description="Image backend", - values=("pil", "accimage", "png", "jpeg"), + values=[ + "pil", + "accimage", + conditional("png", when="@0.8:"), + conditional("jpeg", when="@0.8:"), + ], multi=False, ) @@ -68,6 +74,7 @@ class PyTorchvision(PythonPackage): # https://github.com/pytorch/vision#installation depends_on("py-torch@master", when="@main", type=("build", "link", "run")) + depends_on("py-torch@1.13.0", when="@0.14.0", type=("build", "link", "run")) depends_on("py-torch@1.12.1", when="@0.13.1", type=("build", "link", "run")) depends_on("py-torch@1.12.0", when="@0.13.0", type=("build", "link", "run")) depends_on("py-torch@1.11.0", when="@0.12.0", type=("build", "link", "run")) @@ -102,15 +109,12 @@ class PyTorchvision(PythonPackage): depends_on("pil@5.3:8.2,8.4:", when="@0.13: backend=pil", type=("build", "run")) depends_on("py-accimage", when="backend=accimage", type=("build", "run")) depends_on("libpng@1.6.0:", when="backend=png") - depends_on("jpeg") - - # Many of the datasets require additional dependencies to use. - # These can be installed after the fact. + depends_on("jpeg") # seems to be required for all backends depends_on("ffmpeg@3.1:", when="@0.4.2:") - conflicts("backend=png", when="@:0.7") - conflicts("backend=jpeg", when="@:0.7") + # Many of the datasets require additional dependencies to use. + # These can be installed after the fact. def setup_build_environment(self, env): include = [] @@ -138,10 +142,10 @@ def setup_build_environment(self, env): if "+cuda" in self.spec["py-torch"]: env.set("FORCE_CUDA", 1) env.set("CUDA_HOME", self.spec["cuda"].prefix) - pytorch_cuda_arch = ";".join( + torch_cuda_arch_list = ";".join( "{0:.1f}".format(float(i) / 10.0) for i in self.spec["py-torch"].variants["cuda_arch"].value ) - env.set("TORCH_CUDA_ARCH_LIST", pytorch_cuda_arch) + env.set("TORCH_CUDA_ARCH_LIST", torch_cuda_arch_list) else: env.set("FORCE_CUDA", 0) diff --git a/var/spack/repos/builtin/packages/py-transformers/package.py b/var/spack/repos/builtin/packages/py-transformers/package.py index b0e3fbd1fdb..d2643b732ca 100644 --- a/var/spack/repos/builtin/packages/py-transformers/package.py +++ b/var/spack/repos/builtin/packages/py-transformers/package.py @@ -16,24 +16,32 @@ class PyTransformers(PythonPackage): maintainers = ["adamjstewart"] + version("4.24.0", sha256="486f353a8e594002e48be0e2aba723d96eda839e63bfe274702a4b5eda85559b") version("4.6.1", sha256="83dbff763b7e7dc57cbef1a6b849655d4fcab6bffdd955c5e8bea12a4f76dc10") version("2.8.0", sha256="b9f29cdfd39c28f29e0806c321270dea337d6174a7aa60daf9625bf83dbb12ee") + depends_on("python@3.7:", when="@4.24:", type=("build", "run")) depends_on("python@3.6:", type=("build", "run")) depends_on("py-setuptools", type="build") - - depends_on("py-dataclasses", when="^python@:3.6", type=("build", "run")) - depends_on("py-importlib-metadata", when="@4.6.1: ^python@:3.7", type=("build", "run")) + depends_on("py-importlib-metadata", when="@4.6: ^python@:3.7", type=("build", "run")) depends_on("py-filelock", type=("build", "run")) - depends_on("py-huggingface-hub@0.0.8", when="@4.6.1:", type=("build", "run")) + depends_on("py-huggingface-hub@0.10:0", when="@4.24:", type=("build", "run")) + depends_on("py-huggingface-hub@0.0.8", when="@4.6.1", type=("build", "run")) + depends_on("py-numpy@1.17:", when="@4.6:", type=("build", "run")) depends_on("py-numpy", type=("build", "run")) - depends_on("py-numpy@1.17:", when="@4.6.1:", type=("build", "run")) - depends_on("py-packaging", when="@4.6.1:", type=("build", "run")) + depends_on("py-packaging@20:", when="@4.24:", type=("build", "run")) + depends_on("py-packaging", when="@4.6.1", type=("build", "run")) + depends_on("py-pyyaml@5.1:", when="@4.24:", type=("build", "run")) depends_on("py-regex@:2019.12.16,2019.12.18:", type=("build", "run")) depends_on("py-requests", type=("build", "run")) - depends_on("py-sacremoses", type=("build", "run")) + depends_on("py-tokenizers@0.11.1:0.11.2,0.11.4:0.13", when="@4.24:", type=("build", "run")) + depends_on("py-tokenizers@0.10.1:0.10", when="@4.6.1", type=("build", "run")) depends_on("py-tokenizers@0.5.2", when="@2.8.0", type=("build", "run")) - depends_on("py-tokenizers@0.10.1:0.10", when="@4.6.1:", type=("build", "run")) depends_on("py-tqdm@4.27:", type=("build", "run")) + + # Historical requirements + depends_on("py-dataclasses", when="@4.6.1 ^python@:3.6", type=("build", "run")) + depends_on("py-sacremoses", when="@:4.6", type=("build", "run")) depends_on("py-boto3", when="@2.8.0", type=("build", "run")) + depends_on("py-dataclasses", when="@2.8.0 ^python@:3.6", type=("build", "run")) depends_on("py-sentencepiece", when="@2.8.0", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-vermin/package.py b/var/spack/repos/builtin/packages/py-vermin/package.py index 02b7e565478..c4745cefe42 100644 --- a/var/spack/repos/builtin/packages/py-vermin/package.py +++ b/var/spack/repos/builtin/packages/py-vermin/package.py @@ -11,10 +11,11 @@ class PyVermin(PythonPackage): """Concurrently detect the minimum Python versions needed to run code.""" homepage = "https://github.com/netromdk/vermin" - url = "https://github.com/netromdk/vermin/archive/v1.4.2.tar.gz" + url = "https://github.com/netromdk/vermin/archive/v1.5.0.tar.gz" maintainers = ["netromdk"] + version("1.5.0", sha256="77207385c9cea1f02053a8f2e7f2e8c945394cf37c44c70ce217cada077a2d17") version("1.4.2", sha256="c9a69420b610bfb25d5a2abd7da6edf0ae4329481a857ef6c5d71f602ed5c63d") version("1.4.1", sha256="ee69d5e84f0d446e0d6574ec60c428798de6e6c8d055589f65ac02f074a7da25") version("1.4.0", sha256="984773ed6af60329e700b39c58b7584032acbc908a00b5a76d1ce5468c825c70") diff --git a/var/spack/repos/builtin/packages/py-warpx/package.py b/var/spack/repos/builtin/packages/py-warpx/package.py index b550d6dfe89..cbf4bff7991 100644 --- a/var/spack/repos/builtin/packages/py-warpx/package.py +++ b/var/spack/repos/builtin/packages/py-warpx/package.py @@ -18,7 +18,7 @@ class PyWarpx(PythonPackage): """ homepage = "https://ecp-warpx.github.io" - url = "https://github.com/ECP-WarpX/WarpX/archive/refs/tags/22.05.tar.gz" + url = "https://github.com/ECP-WarpX/WarpX/archive/refs/tags/22.10.tar.gz" git = "https://github.com/ECP-WarpX/WarpX.git" maintainers = ["ax3l", "dpgrote", "RemiLehe"] @@ -27,6 +27,8 @@ class PyWarpx(PythonPackage): # NOTE: if you update the versions here, also see warpx version("develop", branch="development") + version("22.10", sha256="3cbbbbb4d79f806b15e81c3d0e4a4401d1d03d925154682a3060efebd3b6ca3e") + version("22.09", sha256="dbef1318248c86c860cc47f7e18bbb0397818e3acdfb459e48075004bdaedea3") version("22.08", sha256="5ff7fd628e8bf615c1107e6c51bc55926f3ef2a076985444b889d292fecf56d4") version("22.07", sha256="0286adc788136cb78033cb1678d38d36e42265bcfd3d0c361a9bcc2cfcdf241b") version("22.06", sha256="e78398e215d3fc6bc5984f5d1c2ddeac290dcbc8a8e9d196e828ef6299187db9") @@ -48,6 +50,8 @@ class PyWarpx(PythonPackage): variant("mpi", default=True, description="Enable MPI support") for v in [ + "22.10", + "22.09", "22.08", "22.07", "22.06", @@ -77,7 +81,8 @@ class PyWarpx(PythonPackage): depends_on("py-picmistandard@0.0.14", type=("build", "run"), when="@21.03:21.11") depends_on("py-picmistandard@0.0.16", type=("build", "run"), when="@21.12") depends_on("py-picmistandard@0.0.18", type=("build", "run"), when="@22.01") - depends_on("py-picmistandard@0.0.19", type=("build", "run"), when="@22.02:") + depends_on("py-picmistandard@0.0.19", type=("build", "run"), when="@22.02:22.09") + depends_on("py-picmistandard@0.0.20", type=("build", "run"), when="@22.10:") depends_on("py-setuptools@42:", type="build") # Since we use PYWARPX_LIB_DIR to pull binaries out of the # 'warpx' spack package, we don't need py-cmake as declared diff --git a/var/spack/repos/builtin/packages/py-wrapt/package.py b/var/spack/repos/builtin/packages/py-wrapt/package.py index 281f56f0b6c..a7a57fa0250 100644 --- a/var/spack/repos/builtin/packages/py-wrapt/package.py +++ b/var/spack/repos/builtin/packages/py-wrapt/package.py @@ -12,6 +12,7 @@ class PyWrapt(PythonPackage): homepage = "https://github.com/GrahamDumpleton/wrapt" pypi = "wrapt/wrapt-1.11.2.tar.gz" + version("1.14.1", sha256="380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d") version("1.13.3", sha256="1fea9cd438686e6682271d36f3481a9f3636195578bab9ca3382e2f5f01fc185") version("1.12.1", sha256="b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7") version("1.11.2", sha256="565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1") diff --git a/var/spack/repos/builtin/packages/py-xopen/package.py b/var/spack/repos/builtin/packages/py-xopen/package.py index 801dcd5da93..22ce0369e03 100644 --- a/var/spack/repos/builtin/packages/py-xopen/package.py +++ b/var/spack/repos/builtin/packages/py-xopen/package.py @@ -12,9 +12,11 @@ class PyXopen(PythonPackage): compression formats are gzip, bzip2 and xz. They are automatically recognized by their file extensions .gz, .bz2 or .xz.""" - homepage = "https://github.com/marcelm/xopen" + homepage = "https://github.com/pycompression/xopen" pypi = "xopen/xopen-0.1.1.tar.gz" + version("1.6.0", sha256="72219a4d690e9c90ad445c45d2119ae2a6d5d38912255631e227aceac6294353") + version("1.1.0", sha256="38277eb96313b2e8822e19e793791801a1f41bf13ee5b48616a97afc65e9adb3") version("1.0.1", sha256="79d7e425fb0930b0153eb6beba9a540ca3e07ac254ca828577ad2e8fa24105dc") version("0.9.0", sha256="1e3918c8a5cd2bd128ba05b3b883ee322349219c99c305e10114638478e3162a") version("0.8.4", sha256="dcd8f5ef5da5564f514a990573a48a0c347ee1fdbb9b6374d31592819868f7ba") @@ -22,9 +24,13 @@ class PyXopen(PythonPackage): version("0.5.0", sha256="b097cd25e8afec42b6e1780c1f6315016171b5b6936100cdf307d121e2cbab9f") version("0.1.1", sha256="d1320ca46ed464a59db4c27c7a44caf5e268301e68319f0295d06bf6a9afa6f3") + depends_on("python@3.7:", type=("build", "run"), when="@1.5.0:") + depends_on("python@3.6:", type=("build", "run"), when="@1.1.0:") depends_on("python@3.5:", type=("build", "run"), when="@0.9.0:") depends_on("python@2.7,3.4:", type=("build", "run"), when="@0.5:0.8") depends_on("python@2.6:2,3.3:", type=("build", "run"), when="@0.1.1") depends_on("py-setuptools", type="build") + depends_on("py-setuptools-scm@6.2:", type="build", when="@1.2.0:") depends_on("py-setuptools-scm", type="build") + depends_on("py-isal@1.0.0:", type=("build", "run"), when="@1.6.0: target=x86_64:") depends_on("py-bz2file", type=("build", "run"), when="@0.5: ^python@:2.8") diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py index 48d3cae903b..1681e2524e0 100644 --- a/var/spack/repos/builtin/packages/python/package.py +++ b/var/spack/repos/builtin/packages/python/package.py @@ -44,6 +44,13 @@ class Python(Package): install_targets = ["install"] build_targets = [] # type: List[str] + version("3.11.0", sha256="64424e96e2457abbac899b90f9530985b51eef2905951febd935f0e73414caeb") + version( + "3.10.8", + sha256="f400c3fb394b8bef1292f6dc1292c5fadc3533039a5bc0c3e885f3e16738029a", + preferred=True, + ) + version("3.10.7", sha256="1b2e4e2df697c52d36731666979e648beeda5941d0f95740aafbf4163e5cc126") version("3.10.6", sha256="848cb06a5caa85da5c45bd7a9221bb821e33fc2bdcba088c127c58fad44e6343") version("3.10.5", sha256="18f57182a2de3b0be76dfc39fdcfd28156bb6dd23e5f08696f7492e9e3d0bf2d") version("3.10.4", sha256="f3bcc65b1d5f1dc78675c746c98fcee823c038168fc629c5935b044d0911ad28") @@ -51,11 +58,9 @@ class Python(Package): version("3.10.2", sha256="3c0ede893011319f9b0a56b44953a3d52c7abf9657c23fb4bc9ced93b86e9c97") version("3.10.1", sha256="b76117670e7c5064344b9c138e141a377e686b9063f3a8a620ff674fa8ec90d3") version("3.10.0", sha256="c4e0cbad57c90690cb813fb4663ef670b4d0f587d8171e2c42bd4c9245bd2758") - version( - "3.9.13", - sha256="829b0d26072a44689a6b0810f5b4a3933ee2a0b8a4bfc99d7c5893ffd4f97c44", - preferred=True, - ) + version("3.9.15", sha256="48d1ccb29d5fbaf1fb8f912271d09f7450e426d4dfe95978ef6aaada70ece4d8") + version("3.9.14", sha256="9201836e2c16361b2b7408680502393737d44f227333fe2e5729c7d5f6041675") + version("3.9.13", sha256="829b0d26072a44689a6b0810f5b4a3933ee2a0b8a4bfc99d7c5893ffd4f97c44") version("3.9.12", sha256="70e08462ebf265012bd2be88a63d2149d880c73e53f1712b7bbbe93750560ae8") version("3.9.11", sha256="3442400072f582ac2f0df30895558f08883b416c8c7877ea55d40d00d8a93112") version("3.9.10", sha256="1aa9c0702edbae8f6a2c95f70a49da8420aaa76b7889d3419c186bfc8c0e571e") @@ -69,6 +74,8 @@ class Python(Package): version("3.9.2", sha256="7899e8a6f7946748830d66739f2d8f2b30214dad956e56b9ba216b3de5581519") version("3.9.1", sha256="29cb91ba038346da0bd9ab84a0a55a845d872c341a4da6879f462e94c741f117") version("3.9.0", sha256="df796b2dc8ef085edae2597a41c1c0a63625ebd92487adaef2fed22b567873e8") + version("3.8.15", sha256="924d46999df82aa2eaa1de5ca51d6800ffb56b4bf52486a28f40634e3362abc4") + version("3.8.14", sha256="41f959c480c59211feb55d5a28851a56c7e22d02ef91035606ebb21011723c31") version("3.8.13", sha256="903b92d76354366b1d9c4434d0c81643345cef87c1600adfa36095d7b00eede4") version("3.8.12", sha256="316aa33f3b7707d041e73f246efedb297a70898c4b91f127f66dc8d80c596f1a") version("3.8.11", sha256="b77464ea80cec14581b86aeb7fb2ff02830e0abc7bcdc752b7b4bdfcd8f3e393") @@ -83,6 +90,8 @@ class Python(Package): version("3.8.2", sha256="e634a7a74776c2b89516b2e013dda1728c89c8149b9863b8cea21946daf9d561") version("3.8.1", sha256="c7cfa39a43b994621b245e029769e9126caa2a93571cee2e743b213cceac35fb") version("3.8.0", sha256="f1069ad3cae8e7ec467aa98a6565a62a48ef196cb8f1455a245a08db5e1792df") + version("3.7.15", sha256="cf2993798ae8430f3af3a00d96d9fdf320719f4042f039380dca79967c25e436") + version("3.7.14", sha256="82b2abf8978caa61a9011d166eede831b32de9cbebc0db8162900fa23437b709") version("3.7.13", sha256="e405417f50984bc5870c7e7a9f9aeb93e9d270f5ac67f667a0cd3a09439682b5") version("3.7.12", sha256="33b4daaf831be19219659466d12645f87ecec6eb21d4d9f9711018a7b66cce46") version("3.7.11", sha256="b4fba32182e16485d0a6022ba83c9251e6a1c14676ec243a9a07d3722cd4661a") @@ -412,14 +421,16 @@ class Python(Package): patch("python-2.7.17+-distutils-C++-fixup.patch", when="@2.7.17:2.7.18") patch("python-3.6.8-distutils-C++.patch", when="@3.6.8,3.7.2") patch("python-3.7.3-distutils-C++.patch", when="@3.7.3") - patch("python-3.7.4+-distutils-C++.patch", when="@3.7.4:") + patch("python-3.7.4+-distutils-C++.patch", when="@3.7.4:3.10") patch("python-3.7.4+-distutils-C++-testsuite.patch", when="@3.7.4:") + patch("python-3.11-distutils-C++.patch", when="@3.11.0:3.11") patch("cpython-windows-externals.patch", when="@:3.9.6 platform=windows") patch("tkinter.patch", when="@:2.8,3.3:3.7 platform=darwin") # Patch the setup script to deny that tcl/x11 exists rather than allowing # autodetection of (possibly broken) system components patch("tkinter-3.8.patch", when="@3.8:3.9 ~tkinter") - patch("tkinter-3.10.patch", when="@3.10: ~tkinter") + patch("tkinter-3.10.patch", when="@3.10.0:3.10 ~tkinter") + patch("tkinter-3.11.patch", when="@3.11.0:3.11 ~tkinter") # Ensure that distutils chooses correct compiler option for RPATH on cray: patch("cray-rpath-2.3.patch", when="@2.3:3.0.1 platform=cray") @@ -1242,12 +1253,29 @@ def libs(self): # The values of LDLIBRARY and LIBRARY aren't reliable. Intel Python uses a # static binary but installs shared libraries, so sysconfig reports # libpythonX.Y.a but only libpythonX.Y.so exists. So we add our own paths, too. - shared_libs = [ - self.config_vars["LDLIBRARY"], + + # With framework python on macOS, self.config_vars["LDLIBRARY"] can point + # to a library that is not linkable because it does not have the required + # suffix of a shared library (it is called "Python" without extention). + # The linker then falls back to libPython.tbd in the default macOS + # software tree, which security settings prohibit to link against + # (your binary is not an allowed client of /path/to/libPython.tbd). + # To avoid this, we replace the entry in config_vars with a default value. + file_extension_shared = os.path.splitext(self.config_vars["LDLIBRARY"])[-1] + if file_extension_shared == "": + shared_libs = [] + else: + shared_libs = [self.config_vars["LDLIBRARY"]] + shared_libs += [ "{}python{}.{}".format(lib_prefix, py_version, dso_suffix), ] - static_libs = [ - self.config_vars["LIBRARY"], + # Like LDLIBRARY for Python on Mac OS, LIBRARY may refer to an un-linkable object + file_extension_static = os.path.splitext(self.config_vars["LIBRARY"])[-1] + if file_extension_static == "": + static_libs = [] + else: + static_libs = [self.config_vars["LIBRARY"]] + static_libs += [ "{}python{}.{}".format(lib_prefix, py_version, stat_suffix), ] diff --git a/var/spack/repos/builtin/packages/python/python-3.11-distutils-C++.patch b/var/spack/repos/builtin/packages/python/python-3.11-distutils-C++.patch new file mode 100644 index 00000000000..335e06b93c3 --- /dev/null +++ b/var/spack/repos/builtin/packages/python/python-3.11-distutils-C++.patch @@ -0,0 +1,257 @@ +diff --git a/Lib/_osx_support.py b/Lib/_osx_support.py +index aa66c8b9f4..71e6556bac 100644 +--- a/Lib/_osx_support.py ++++ b/Lib/_osx_support.py +@@ -14,13 +14,13 @@ + # configuration variables that may contain universal build flags, + # like "-arch" or "-isdkroot", that may need customization for + # the user environment +-_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS', +- 'BLDSHARED', 'LDSHARED', 'CC', 'CXX', +- 'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS', +- 'PY_CORE_CFLAGS', 'PY_CORE_LDFLAGS') ++_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'CXXFLAGS', 'LDFLAGS', 'CPPFLAGS', ++ 'BASECFLAGS', 'BLDSHARED', 'LDSHARED', 'LDCXXSHARED', ++ 'CC', 'CXX', 'PY_CFLAGS', 'PY_LDFLAGS', ++ 'PY_CPPFLAGS', 'PY_CORE_LDFLAGS', 'PY_CORE_CFLAGS') + + # configuration variables that may contain compiler calls +-_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'CC', 'CXX') ++_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'LDCXXSHARED', 'CC', 'CXX') + + # prefix added to original configuration variable names + _INITPRE = '_OSX_SUPPORT_INITIAL_' +diff --git a/Lib/distutils/cygwinccompiler.py b/Lib/distutils/cygwinccompiler.py +index 66c12dd358..dddb9fd2d4 100644 +--- a/Lib/distutils/cygwinccompiler.py ++++ b/Lib/distutils/cygwinccompiler.py +@@ -123,8 +123,10 @@ def __init__(self, verbose=0, dry_run=0, force=0): + # dllwrap 2.10.90 is buggy + if self.ld_version >= "2.10.90": + self.linker_dll = "gcc" ++ self.linker_dll_cxx = "g++" + else: + self.linker_dll = "dllwrap" ++ self.linker_dll_cxx = "dllwrap" + + # ld_version >= "2.13" support -shared so use it instead of + # -mdll -static +@@ -138,9 +140,13 @@ def __init__(self, verbose=0, dry_run=0, force=0): + self.set_executables(compiler='gcc -mcygwin -O -Wall', + compiler_so='gcc -mcygwin -mdll -O -Wall', + compiler_cxx='g++ -mcygwin -O -Wall', ++ compiler_so_cxx='g++ -mcygwin -mdll -O -Wall', + linker_exe='gcc -mcygwin', + linker_so=('%s -mcygwin %s' % +- (self.linker_dll, shared_option))) ++ (self.linker_dll, shared_option)), ++ linker_exe_cxx='g++ -mcygwin', ++ linker_so_cxx=('%s -mcygwin %s' % ++ (self.linker_dll_cxx, shared_option))) + + # cygwin and mingw32 need different sets of libraries + if self.gcc_version == "2.91.57": +@@ -164,8 +170,12 @@ def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): + raise CompileError(msg) + else: # for other files use the C-compiler + try: +- self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + +- extra_postargs) ++ if self.detect_language(src) == 'c++': ++ self.spawn(self.compiler_so_cxx + cc_args + [src, '-o', obj] + ++ extra_postargs) ++ else: ++ self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + ++ extra_postargs) + except DistutilsExecError as msg: + raise CompileError(msg) + +@@ -300,9 +310,14 @@ def __init__(self, verbose=0, dry_run=0, force=0): + self.set_executables(compiler='gcc -O -Wall', + compiler_so='gcc -mdll -O -Wall', + compiler_cxx='g++ -O -Wall', ++ compiler_so_cxx='g++ -mdll -O -Wall', + linker_exe='gcc', + linker_so='%s %s %s' + % (self.linker_dll, shared_option, ++ entry_point), ++ linker_exe_cxx='g++', ++ linker_so_cxx='%s %s %s' ++ % (self.linker_dll_cxx, shared_option, + entry_point)) + # Maybe we should also append -mthreads, but then the finished + # dlls need another dll (mingwm10.dll see Mingw32 docs) +diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py +index 3414a761e7..f1af560cc1 100644 +--- a/Lib/distutils/sysconfig.py ++++ b/Lib/distutils/sysconfig.py +@@ -216,9 +216,11 @@ def customize_compiler(compiler): + _osx_support.customize_compiler(_config_vars) + _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True' + +- (cc, cxx, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \ +- get_config_vars('CC', 'CXX', 'CFLAGS', +- 'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS') ++ (cc, cxx, cflags, ccshared, ldshared, ldcxxshared, shlib_suffix, ar, ar_flags) = \ ++ get_config_vars('CC', 'CXX', 'CFLAGS', 'CCSHARED', 'LDSHARED', 'LDCXXSHARED', ++ 'SHLIB_SUFFIX', 'AR', 'ARFLAGS') ++ ++ cxxflags = cflags + + if 'CC' in os.environ: + newcc = os.environ['CC'] +@@ -233,19 +235,27 @@ def customize_compiler(compiler): + cxx = os.environ['CXX'] + if 'LDSHARED' in os.environ: + ldshared = os.environ['LDSHARED'] ++ if 'LDCXXSHARED' in os.environ: ++ ldcxxshared = os.environ['LDCXXSHARED'] + if 'CPP' in os.environ: + cpp = os.environ['CPP'] + else: + cpp = cc + " -E" # not always + if 'LDFLAGS' in os.environ: + ldshared = ldshared + ' ' + os.environ['LDFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['LDFLAGS'] + if 'CFLAGS' in os.environ: +- cflags = cflags + ' ' + os.environ['CFLAGS'] ++ cflags = os.environ['CFLAGS'] + ldshared = ldshared + ' ' + os.environ['CFLAGS'] ++ if 'CXXFLAGS' in os.environ: ++ cxxflags = os.environ['CXXFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['CXXFLAGS'] + if 'CPPFLAGS' in os.environ: + cpp = cpp + ' ' + os.environ['CPPFLAGS'] + cflags = cflags + ' ' + os.environ['CPPFLAGS'] ++ cxxflags = cxxflags + ' ' + os.environ['CPPFLAGS'] + ldshared = ldshared + ' ' + os.environ['CPPFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['CPPFLAGS'] + if 'AR' in os.environ: + ar = os.environ['AR'] + if 'ARFLAGS' in os.environ: +@@ -254,13 +264,17 @@ def customize_compiler(compiler): + archiver = ar + ' ' + ar_flags + + cc_cmd = cc + ' ' + cflags ++ cxx_cmd = cxx + ' ' + cxxflags + compiler.set_executables( + preprocessor=cpp, + compiler=cc_cmd, + compiler_so=cc_cmd + ' ' + ccshared, +- compiler_cxx=cxx, ++ compiler_cxx=cxx_cmd, ++ compiler_so_cxx=cxx_cmd + ' ' + ccshared, + linker_so=ldshared, + linker_exe=cc, ++ linker_so_cxx=ldcxxshared, ++ linker_exe_cxx=cxx, + archiver=archiver) + + compiler.shared_lib_extension = shlib_suffix +diff --git a/Lib/distutils/unixccompiler.py b/Lib/distutils/unixccompiler.py +index d00c48981e..4a3d271fee 100644 +--- a/Lib/distutils/unixccompiler.py ++++ b/Lib/distutils/unixccompiler.py +@@ -52,14 +52,17 @@ class UnixCCompiler(CCompiler): + # are pretty generic; they will probably have to be set by an outsider + # (eg. using information discovered by the sysconfig about building + # Python extensions). +- executables = {'preprocessor' : None, +- 'compiler' : ["cc"], +- 'compiler_so' : ["cc"], +- 'compiler_cxx' : ["cc"], +- 'linker_so' : ["cc", "-shared"], +- 'linker_exe' : ["cc"], +- 'archiver' : ["ar", "-cr"], +- 'ranlib' : None, ++ executables = {'preprocessor' : None, ++ 'compiler' : ["cc"], ++ 'compiler_so' : ["cc"], ++ 'compiler_cxx' : ["c++"], ++ 'compiler_so_cxx' : ["c++"], ++ 'linker_so' : ["cc", "-shared"], ++ 'linker_exe' : ["cc"], ++ 'linker_so_cxx' : ["c++", "-shared"], ++ 'linker_exe_cxx' : ["c++"], ++ 'archiver' : ["ar", "-cr"], ++ 'ranlib' : None, + } + + if sys.platform[:6] == "darwin": +@@ -110,12 +113,19 @@ def preprocess(self, source, output_file=None, macros=None, + + def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): + compiler_so = self.compiler_so ++ compiler_so_cxx = self.compiler_so_cxx + if sys.platform == 'darwin': + compiler_so = _osx_support.compiler_fixup(compiler_so, + cc_args + extra_postargs) ++ compiler_so_cxx = _osx_support.compiler_fixup(compiler_so_cxx, ++ cc_args + extra_postargs) + try: +- self.spawn(compiler_so + cc_args + [src, '-o', obj] + +- extra_postargs) ++ if self.detect_language(src) == 'c++': ++ self.spawn(compiler_so_cxx + cc_args + [src, '-o', obj] + ++ extra_postargs) ++ else: ++ self.spawn(compiler_so + cc_args + [src, '-o', obj] + ++ extra_postargs) + except DistutilsExecError as msg: + raise CompileError(msg) + +@@ -173,30 +183,16 @@ def link(self, target_desc, objects, + ld_args.extend(extra_postargs) + self.mkpath(os.path.dirname(output_filename)) + try: +- if target_desc == CCompiler.EXECUTABLE: +- linker = self.linker_exe[:] ++ if target_lang == "c++": ++ if target_desc == CCompiler.EXECUTABLE: ++ linker = self.linker_exe_cxx[:] ++ else: ++ linker = self.linker_so_cxx[:] + else: +- linker = self.linker_so[:] +- if target_lang == "c++" and self.compiler_cxx: +- # skip over environment variable settings if /usr/bin/env +- # is used to set up the linker's environment. +- # This is needed on OSX. Note: this assumes that the +- # normal and C++ compiler have the same environment +- # settings. +- i = 0 +- if os.path.basename(linker[0]) == "env": +- i = 1 +- while '=' in linker[i]: +- i += 1 +- +- if os.path.basename(linker[i]) == 'ld_so_aix': +- # AIX platforms prefix the compiler with the ld_so_aix +- # script, so we need to adjust our linker index +- offset = 1 ++ if target_desc == CCompiler.EXECUTABLE: ++ linker = self.linker_exe[:] + else: +- offset = 0 +- +- linker[i+offset] = self.compiler_cxx[i] ++ linker = self.linker_so[:] + + if sys.platform == 'darwin': + linker = _osx_support.compiler_fixup(linker, ld_args) +diff --git a/Makefile.pre.in b/Makefile.pre.in +index f803391346..090f14c46c 100644 +--- a/Makefile.pre.in ++++ b/Makefile.pre.in +@@ -732,9 +732,9 @@ sharedmods: $(BUILDPYTHON) pybuilddir.txt @LIBMPDEC_INTERNAL@ @LIBEXPAT_INTERNAL + *\ -s*|s*) quiet="-q";; \ + *) quiet="";; \ + esac; \ +- echo "$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \ ++ echo "$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' CFLAGS='$(PY_CFLAGS)' \ + $(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build"; \ +- $(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \ ++ $(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' CFLAGS='$(PY_CFLAGS)' \ + $(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build + + diff --git a/var/spack/repos/builtin/packages/python/tkinter-3.11.patch b/var/spack/repos/builtin/packages/python/tkinter-3.11.patch new file mode 100644 index 00000000000..fe2d54bd43c --- /dev/null +++ b/var/spack/repos/builtin/packages/python/tkinter-3.11.patch @@ -0,0 +1,25 @@ +From a49e95e44961a0b6703ef9cb577d2ae5334c4a62 Mon Sep 17 00:00:00 2001 +From: Harmen Stoppels +Date: Thu, 3 Nov 2022 13:54:00 +0100 +Subject: [PATCH] disable tkinter explicitly + +--- + setup.py | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/setup.py b/setup.py +index 15d0d45..642adb3 100644 +--- a/setup.py ++++ b/setup.py +@@ -1358,7 +1358,7 @@ class PyBuildExt(build_ext): + self.detect_decimal() + self.detect_ctypes() + self.detect_multiprocessing() +- self.detect_tkinter() ++ # self.detect_tkinter() + self.detect_uuid() + + # Uncomment the next line if you want to play with xxmodule.c +-- +2.38.1 + diff --git a/var/spack/repos/builtin/packages/qcat/package.py b/var/spack/repos/builtin/packages/qcat/package.py new file mode 100644 index 00000000000..d0753663e6a --- /dev/null +++ b/var/spack/repos/builtin/packages/qcat/package.py @@ -0,0 +1,24 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Qcat(CMakePackage): + """Quick data compression quality analysis tool""" + + homepage = "https://github.com/szcompressor/qcat" + git = "https://github.com/szcompressor/qcat" + + maintainers = ["disheng222", "robertu94"] + + version("master", branch="master") + version("1.4", commit="f16032cf237837b1d32dde0c3daa6ad1ca4a912f") + + depends_on("zstd") + + def cmake_args(self): + args = ["-DQCAT_USE_BUNDLES=OFF"] + return args diff --git a/var/spack/repos/builtin/packages/qgis/package.py b/var/spack/repos/builtin/packages/qgis/package.py index 377c4fd10eb..158a2a021f6 100644 --- a/var/spack/repos/builtin/packages/qgis/package.py +++ b/var/spack/repos/builtin/packages/qgis/package.py @@ -240,6 +240,11 @@ def cmake_args(self): args.append("-DWITH_GRASS7=OFF") return args + def setup_run_environment(self, env): + if "+bindings" in self.spec: + # python module isn't located at the standard path + env.prepend_path("PYTHONPATH", self.prefix.share.qgis.python) + def check(self): """The tests of fail without access to an X server, cant run on build servers""" pass diff --git a/var/spack/repos/builtin/packages/qoz/package.py b/var/spack/repos/builtin/packages/qoz/package.py new file mode 100644 index 00000000000..81ca6bd7944 --- /dev/null +++ b/var/spack/repos/builtin/packages/qoz/package.py @@ -0,0 +1,28 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Qoz(CMakePackage): + """Quality optimized version of SZ3 is the next generation of the SZ compressor framework""" + + git = "https://github.com/robertu94/QoZ" + homepage = git + + version("2022.04.26", commit="d28a7a8c9f703075441b700202b8a1ee185ded00") + + maintainers = ["disheng222"] + + depends_on("zstd") + depends_on("gsl") + depends_on("pkgconfig") + + def cmake_args(self): + args = [ + "-DQoZ_USE_BUNDLED_ZSTD=OFF", + "-DQoZ_DEBUG_TIMINGS=OFF", + ] + return args diff --git a/var/spack/repos/builtin/packages/quantum-espresso/package.py b/var/spack/repos/builtin/packages/quantum-espresso/package.py index acc07692cf0..d4454082113 100644 --- a/var/spack/repos/builtin/packages/quantum-espresso/package.py +++ b/var/spack/repos/builtin/packages/quantum-espresso/package.py @@ -2,11 +2,12 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - +import spack.build_systems.cmake +import spack.build_systems.generic from spack.package import * -class QuantumEspresso(CMakePackage): +class QuantumEspresso(CMakePackage, Package): """Quantum ESPRESSO is an integrated suite of Open-Source computer codes for electronic-structure calculations and materials modeling at the nanoscale. It is based on density-functional theory, plane waves, and @@ -19,6 +20,8 @@ class QuantumEspresso(CMakePackage): maintainers = ["ye-luo", "danielecesarini", "bellenlau"] + build_system(conditional("cmake", when="@6.8:"), "generic", default="cmake") + version("develop", branch="develop") version("7.1", sha256="d56dea096635808843bd5a9be2dee3d1f60407c01dbeeda03f8256a3bcfc4eb6") version("7.0", sha256="85beceb1aaa1678a49e774c085866d4612d9d64108e0ac49b23152c8622880ee") @@ -56,10 +59,8 @@ class QuantumEspresso(CMakePackage): destination=".", ) - variant("cmake", default=True, description="Builds via CMake") - with when("+cmake"): + with when("build_system=cmake"): depends_on("cmake@3.14.0:", type="build") - conflicts("@:6.7", msg="+cmake works since QE v6.8") variant("libxc", default=False, description="Uses libxc") depends_on("libxc@5.1.2:", when="+libxc") @@ -72,6 +73,7 @@ class QuantumEspresso(CMakePackage): depends_on("amdfftw+openmp", when="^amdfftw") depends_on("openblas threads=openmp", when="^openblas") depends_on("amdblis threads=openmp", when="^amdblis") + depends_on("intel-mkl threads=openmp", when="^intel-mkl") # Add Cuda Fortran support # depends on NVHPC compiler, not directly on CUDA toolkit @@ -93,7 +95,7 @@ class QuantumEspresso(CMakePackage): msg="bugs with NVHPCSDK from v21.11 to v22.3, OpenMP and GPU", ) # only cmake is supported - conflicts("~cmake", msg="Only CMake supported for GPU-enabled version") + conflicts("build_system=generic", msg="Only CMake supported for GPU-enabled version") # NVTX variant for profiling # requires linking to CUDA runtime APIs , handled by CMake @@ -121,9 +123,10 @@ class QuantumEspresso(CMakePackage): with when("+elpa"): # CMake builds only support elpa without openmp - depends_on("elpa~openmp", when="+cmake") - depends_on("elpa+openmp", when="+openmp~cmake") - depends_on("elpa~openmp", when="~openmp~cmake") + depends_on("elpa~openmp", when="build_system=cmake") + with when("build_system=generic"): + depends_on("elpa+openmp", when="+openmp") + depends_on("elpa~openmp", when="~openmp") # Elpa is formally supported by @:5.4.0, but QE configure searches # for it in the wrong folders (or tries to download it within # the build directory). Instead of patching Elpa to provide the @@ -181,12 +184,14 @@ class QuantumEspresso(CMakePackage): with when("@7.0.1:"): # when QE doesn't use hdf5 library, the converter plugin still needs it depends_on("hdf5@1.8.16:+hl~mpi", when="hdf5=none") - conflicts("~cmake", msg="QE-to-QMCPACK wave function converter requires cmake") + conflicts( + "build_system=generic", msg="QE-to-QMCPACK wave function converter requires cmake" + ) # Enables building Electron-phonon Wannier 'epw.x' executable # http://epw.org.uk/Main/About - variant("epw", default=False, description="Builds Electron-phonon Wannier executable") - conflicts("~epw", when="+cmake", msg="epw cannot be turned off when using CMake") + variant("epw", default=True, description="Builds Electron-phonon Wannier executable") + conflicts("~epw", when="build_system=cmake", msg="epw cannot be turned off when using CMake") with when("+epw"): # The first version of Q-E to feature integrated EPW is 6.0.0, @@ -198,8 +203,10 @@ class QuantumEspresso(CMakePackage): # Constraints may be relaxed as successful reports # of different compiler+mpi combinations arrive - # TODO: enable building EPW when ~mpi and ~cmake - conflicts("~mpi", when="~cmake", msg="EPW needs MPI when ~cmake") + # TODO: enable building EPW when ~mpi and build_system=generic + conflicts( + "~mpi", when="build_system=generic", msg="EPW needs MPI when build_system=generic" + ) # EPW doesn't gets along well with OpenMPI 2.x.x conflicts("^openmpi@2.0.0:2", msg="OpenMPI version incompatible with EPW") @@ -212,19 +219,19 @@ class QuantumEspresso(CMakePackage): variant( "environ", default=False, + when="build_system=generic", description="Enables support for introducing environment effects " "into atomistic first-principles simulations." "See http://quantum-environ.org/about.html", ) - conflicts("+environ", when="+cmake", msg="environ doesn't work with CMake") variant( "gipaw", default=False, + when="build_system=generic", description="Builds Gauge-Including Projector Augmented-Waves executable", ) with when("+gipaw"): - conflicts("+cmake", msg="gipaw doesn't work with CMake") conflicts( "@:6.3", msg="gipaw standard support available for QE 6.3 or grater version only" ) @@ -370,6 +377,8 @@ class QuantumEspresso(CMakePackage): # extlibs_makefile updated to work with fujitsu compilers patch("fj-fox.patch", when="+patch %fj") + +class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder): def cmake_args(self): spec = self.spec @@ -400,18 +409,9 @@ def cmake_args(self): return cmake_args - @when("~cmake") - def cmake(self, spec, prefix): - print("Bypass cmake stage when building via configure") - - @when("~cmake") - def build(self, spec, prefix): - print("Bypass build stage when building via configure") - - @when("~cmake") - def install(self, spec, prefix): - print("Override install stage when building via configure") +class GenericBuilder(spack.build_systems.generic.GenericBuilder): + def install(self, pkg, spec, prefix): prefix_path = prefix.bin if "@:5.4.0" in spec else prefix options = ["-prefix={0}".format(prefix_path)] @@ -577,7 +577,7 @@ def install(self, spec, prefix): # can't be applied to the '+qmcpack' variant if spec.variants["hdf5"].value != "none": if spec.satisfies("@6.1.0:6.4.0") or (spec.satisfies("@6.4.1") and "+qmcpack" in spec): - make_inc = join_path(self.stage.source_path, "make.inc") + make_inc = join_path(self.pkg.stage.source_path, "make.inc") zlib_libs = spec["zlib"].prefix.lib + " -lz" filter_file(zlib_libs, format(spec["zlib"].libs.ld_flags), make_inc) diff --git a/var/spack/repos/builtin/packages/r-annotationforge/package.py b/var/spack/repos/builtin/packages/r-annotationforge/package.py index ab4b8d0db6b..cab41691450 100644 --- a/var/spack/repos/builtin/packages/r-annotationforge/package.py +++ b/var/spack/repos/builtin/packages/r-annotationforge/package.py @@ -14,6 +14,7 @@ class RAnnotationforge(RPackage): bioc = "AnnotationForge" + version("1.38.1", commit="2dcedf353bc57bf80818e6adb1f7129c21886f6b") version("1.38.0", commit="1f77750562ea3a01f0f1a46c299184fc31196ffd") version("1.36.0", commit="523b5f0c3ffb77e59e1568e5f36a5a470bfeeae5") version("1.32.0", commit="3d17c2a945951c02fe152e5a8a8e9c6cb41e30f7") diff --git a/var/spack/repos/builtin/packages/r-biocparallel/package.py b/var/spack/repos/builtin/packages/r-biocparallel/package.py index f1a13d964f5..0a315307a97 100644 --- a/var/spack/repos/builtin/packages/r-biocparallel/package.py +++ b/var/spack/repos/builtin/packages/r-biocparallel/package.py @@ -15,6 +15,7 @@ class RBiocparallel(RPackage): bioc = "BiocParallel" + version("1.30.4", commit="1229ebe9f6d8305f9f61e562464f83f9ba86e699") version("1.30.2", commit="e7e109f7a94dbfbc50f926be030c7ad8c1a053db") version("1.28.3", commit="2f9d88ad83659939e7911d49c2d24d2cd599c7cc") version("1.24.1", commit="f713caa4314ec0ddeba7fe0eb599ad417efb413f") @@ -27,4 +28,5 @@ class RBiocparallel(RPackage): depends_on("r@3.5.0:", type=("build", "run"), when="@1.28.3:") depends_on("r-futile-logger", type=("build", "run")) depends_on("r-snow", type=("build", "run")) + depends_on("r-codetools", type=("build", "run"), when="@1.30.4:") depends_on("r-bh", type=("build", "run"), when="@1.12.0:") diff --git a/var/spack/repos/builtin/packages/r-biostrings/package.py b/var/spack/repos/builtin/packages/r-biostrings/package.py index b2279c62bcc..b4fa9866a0a 100644 --- a/var/spack/repos/builtin/packages/r-biostrings/package.py +++ b/var/spack/repos/builtin/packages/r-biostrings/package.py @@ -15,6 +15,7 @@ class RBiostrings(RPackage): bioc = "Biostrings" + version("2.64.1", commit="ffe263e958463bd1edb5d5d9316cfd89905be53c") version("2.64.0", commit="c7ad3c7af607bc8fe4a5e1c37f09e6c9bf70b4f6") version("2.62.0", commit="53ed287e03d16fa523789af3131c60375ccf587f") version("2.58.0", commit="0ec1a5455d5e9eebd14b26228906bb04e2abb197") @@ -39,6 +40,7 @@ class RBiostrings(RPackage): depends_on("r-iranges@2.9.18:", type=("build", "run"), when="@2.44.2:") depends_on("r-iranges@2.13.24:", type=("build", "run"), when="@2.48.0:") depends_on("r-iranges@2.23.9:", type=("build", "run"), when="@2.58.0:") + depends_on("r-iranges@2.30.1:", type=("build", "run"), when="@2.64.1:") depends_on("r-xvector@0.11.6:", type=("build", "run")) depends_on("r-xvector@0.19.8:", type=("build", "run"), when="@2.48.0:") depends_on("r-xvector@0.21.4:", type=("build", "run"), when="@2.50.2:") diff --git a/var/spack/repos/builtin/packages/r-clusterprofiler/package.py b/var/spack/repos/builtin/packages/r-clusterprofiler/package.py index a39b6e81422..3f3d83bb60e 100644 --- a/var/spack/repos/builtin/packages/r-clusterprofiler/package.py +++ b/var/spack/repos/builtin/packages/r-clusterprofiler/package.py @@ -15,6 +15,7 @@ class RClusterprofiler(RPackage): bioc = "clusterProfiler" + version("4.4.4", commit="9fca9a45ca1793884d8dcfd0f077353dbf75df29") version("4.4.1", commit="daad11fb80be2dd9b825e0b484815a0a2b1592a4") version("4.2.2", commit="4ebb9de8e03eedc971f54a57cf5bf1b250ed43d5") version("3.18.0", commit="064a6e612ce27e260e33af78b907bee4065ff821") diff --git a/var/spack/repos/builtin/packages/r-complexheatmap/package.py b/var/spack/repos/builtin/packages/r-complexheatmap/package.py index 2efd9ab0076..6cf4964280d 100644 --- a/var/spack/repos/builtin/packages/r-complexheatmap/package.py +++ b/var/spack/repos/builtin/packages/r-complexheatmap/package.py @@ -16,6 +16,7 @@ class RComplexheatmap(RPackage): bioc = "ComplexHeatmap" + version("2.12.1", commit="2c5fe70724219008174d4e6f83189cddbd895ec6") version("2.12.0", commit="8a5f060b06646f9d6a5032832ea72e3f183ca5d7") version("2.10.0", commit="170df82a1568e879e4019e0ff6feb0047851684f") version("2.6.2", commit="0383bada2c76dc3dde71cf6a625016b619aec4d3") diff --git a/var/spack/repos/builtin/packages/r-delayedmatrixstats/package.py b/var/spack/repos/builtin/packages/r-delayedmatrixstats/package.py index 15f852038d5..df0e57aa984 100644 --- a/var/spack/repos/builtin/packages/r-delayedmatrixstats/package.py +++ b/var/spack/repos/builtin/packages/r-delayedmatrixstats/package.py @@ -18,6 +18,7 @@ class RDelayedmatrixstats(RPackage): bioc = "DelayedMatrixStats" + version("1.18.1", commit="9c4658d11fc20b7d88e05b9c52140c2ca8a65768") version("1.18.0", commit="50c9aab259b6e8f68abf44b78122662a41c8bf47") version("1.16.0", commit="d44a3d765769cb022193428a77af25bf19916be7") version("1.12.3", commit="2b3091dfa9b3bab914e3a4157182063714ba86ae") @@ -40,6 +41,7 @@ class RDelayedmatrixstats(RPackage): depends_on("r-matrixstats@0.60.0:", type=("build", "run"), when="@1.16.0:") depends_on("r-sparsematrixstats", type=("build", "run"), when="@1.12.2:") depends_on("r-matrix", type=("build", "run")) + depends_on("r-matrix@1.5.0:", type=("build", "run"), when="@1.18.1:") depends_on("r-s4vectors", type=("build", "run")) depends_on("r-s4vectors@0.17.5:", type=("build", "run"), when="@1.2.0:") depends_on("r-iranges", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/r-dose/package.py b/var/spack/repos/builtin/packages/r-dose/package.py index 3b1716da587..99c36609ab0 100644 --- a/var/spack/repos/builtin/packages/r-dose/package.py +++ b/var/spack/repos/builtin/packages/r-dose/package.py @@ -18,6 +18,7 @@ class RDose(RPackage): bioc = "DOSE" + version("3.22.1", commit="6b711a0f076a9fefcb00ddef66e8f198039e6dfa") version("3.22.0", commit="242ac1b746c44fbbf281fbe6e5e4424a8dc74375") version("3.20.1", commit="bf434f24d035217822cb1b0ab08a486b9a53edb4") version("3.16.0", commit="a534a4f2ef1e54e8b92079cf1bbedb5042fd90cd") diff --git a/var/spack/repos/builtin/packages/r-edger/package.py b/var/spack/repos/builtin/packages/r-edger/package.py index ab3610f4734..2dc5026083d 100644 --- a/var/spack/repos/builtin/packages/r-edger/package.py +++ b/var/spack/repos/builtin/packages/r-edger/package.py @@ -19,6 +19,7 @@ class REdger(RPackage): bioc = "edgeR" + version("3.38.4", commit="f5a3bb568a23b34146ac66329a95ee4785093536") version("3.38.1", commit="e58bf52f34ec451096f593126922ad7e5d517f7e") version("3.36.0", commit="c7db03addfc42138a1901834409c02da9d873026") version("3.32.1", commit="b881d801d60e5b38413d27f149384c218621c55a") diff --git a/var/spack/repos/builtin/packages/r-enrichplot/package.py b/var/spack/repos/builtin/packages/r-enrichplot/package.py index dc7597b6ea4..24f204a5eaa 100644 --- a/var/spack/repos/builtin/packages/r-enrichplot/package.py +++ b/var/spack/repos/builtin/packages/r-enrichplot/package.py @@ -16,6 +16,7 @@ class REnrichplot(RPackage): bioc = "enrichplot" + version("1.16.2", commit="eeb21345288d96c116ac308649fa772d03760259") version("1.16.1", commit="cff77b622b2312be546714ec437aa4bc585bac87") version("1.14.1", commit="ccf3a6d9b7cd9cffd8de6d6263efdffe59d2ec36") version("1.10.2", commit="77ee04f60a07cc31151f8f47f8ee64f3a43c9760") @@ -49,5 +50,5 @@ class REnrichplot(RPackage): depends_on("r-europepmc", type=("build", "run"), when="@1.2.0:1.4.0") depends_on("r-ggplotify", type=("build", "run"), when="@1.2.0:1.4.0") depends_on("r-gridextra", type=("build", "run"), when="@1.2.0:1.4.0") - depends_on("r-cowplot", type=("build", "run")) + depends_on("r-cowplot", when="@:1.16.1") diff --git a/var/spack/repos/builtin/packages/r-ensembldb/package.py b/var/spack/repos/builtin/packages/r-ensembldb/package.py index 8e2403c6bf8..da2dc2dfe28 100644 --- a/var/spack/repos/builtin/packages/r-ensembldb/package.py +++ b/var/spack/repos/builtin/packages/r-ensembldb/package.py @@ -24,6 +24,7 @@ class REnsembldb(RPackage): bioc = "ensembldb" + version("2.20.2", commit="ac1fb8389efd88099600af298d6bb3384206f9ed") version("2.20.1", commit="e547d184730cfe5e65f59e4f3512395fb1cdba1a") version("2.18.3", commit="e2fcfc0c7700110df070a171d2d542b37ec098f3") version("2.14.0", commit="c7150519ed4ef38e5eac1043209863dbc7be43a1") diff --git a/var/spack/repos/builtin/packages/r-genomeinfodb/package.py b/var/spack/repos/builtin/packages/r-genomeinfodb/package.py index de08108955d..e9218f10e0b 100644 --- a/var/spack/repos/builtin/packages/r-genomeinfodb/package.py +++ b/var/spack/repos/builtin/packages/r-genomeinfodb/package.py @@ -17,6 +17,7 @@ class RGenomeinfodb(RPackage): bioc = "GenomeInfoDb" + version("1.32.4", commit="69df6a5a10027fecf6a6d1c8298f3f686b990d8f") version("1.32.2", commit="2e40af38f00ee86d2c83d140e234c1349baa27de") version("1.30.1", commit="bf8b385a2ffcecf9b41e581794056f267895863d") version("1.26.2", commit="96dd27a7e3ef476790b1475aab50dbbed7df67a2") diff --git a/var/spack/repos/builtin/packages/r-genomicalignments/package.py b/var/spack/repos/builtin/packages/r-genomicalignments/package.py index 3a2cb8c2deb..20f659508c4 100644 --- a/var/spack/repos/builtin/packages/r-genomicalignments/package.py +++ b/var/spack/repos/builtin/packages/r-genomicalignments/package.py @@ -16,6 +16,7 @@ class RGenomicalignments(RPackage): bioc = "GenomicAlignments" + version("1.32.1", commit="2553580d0b8a8a5fd7835c1446616b39f707b8a9") version("1.32.0", commit="7a660a914a04e2eb0758082b6f64c4124a887ef3") version("1.30.0", commit="2d2c5fce3529c2962fdcefd736d8b7f7c0ec2d54") version("1.26.0", commit="6c74c74ee53efcd880171126366fee4bd72357bc") diff --git a/var/spack/repos/builtin/packages/r-genomicfeatures/package.py b/var/spack/repos/builtin/packages/r-genomicfeatures/package.py index 491b8632bc1..2da07056944 100644 --- a/var/spack/repos/builtin/packages/r-genomicfeatures/package.py +++ b/var/spack/repos/builtin/packages/r-genomicfeatures/package.py @@ -20,6 +20,7 @@ class RGenomicfeatures(RPackage): bioc = "GenomicFeatures" + version("1.48.4", commit="06e37dc1847d49d91391264caec877ed33abf359") version("1.48.3", commit="b0ddea0e101e3861928f3ad353348df047d90382") version("1.46.4", commit="d3ab6fd069624904ce7fcdf75dad884473f97975") version("1.42.1", commit="2e82891974138b0e976799d64a8938f0be61284d") diff --git a/var/spack/repos/builtin/packages/r-ggbio/package.py b/var/spack/repos/builtin/packages/r-ggbio/package.py index b310a41ee4c..1ea9655c07e 100644 --- a/var/spack/repos/builtin/packages/r-ggbio/package.py +++ b/var/spack/repos/builtin/packages/r-ggbio/package.py @@ -21,6 +21,7 @@ class RGgbio(RPackage): bioc = "ggbio" + version("1.44.1", commit="0301d9464e304a8113ea4479185cd358855ca365") version("1.44.0", commit="cb21284a9803917fa76e116adfc456525c95f660") version("1.42.0", commit="3540047ef018957d59fba8af7d3c58e4659f8e26") version("1.38.0", commit="c39c51993f419cfc2f094e664477f25f5212a242") diff --git a/var/spack/repos/builtin/packages/r-ggtree/package.py b/var/spack/repos/builtin/packages/r-ggtree/package.py index bc3e5eddfd7..9128cffcf83 100644 --- a/var/spack/repos/builtin/packages/r-ggtree/package.py +++ b/var/spack/repos/builtin/packages/r-ggtree/package.py @@ -16,6 +16,7 @@ class RGgtree(RPackage): bioc = "ggtree" + version("3.4.4", commit="8e48d3e2ea445b6c2213f0471462108a7a72b333") version("3.4.0", commit="23f08a3da1829d1bbb6827ed1c4cf878daa4b539") version("3.2.1", commit="d3747e636fe1a6a9e09b56a3a3899208ebd05547") diff --git a/var/spack/repos/builtin/packages/r-hdf5array/package.py b/var/spack/repos/builtin/packages/r-hdf5array/package.py index 024eee78fed..ca60baa1e13 100644 --- a/var/spack/repos/builtin/packages/r-hdf5array/package.py +++ b/var/spack/repos/builtin/packages/r-hdf5array/package.py @@ -20,6 +20,7 @@ class RHdf5array(RPackage): bioc = "HDF5Array" + version("1.24.2", commit="fb213ba36631b04dfe754705f701f3a015c4fc82") version("1.24.1", commit="d002fe70c84baaadb62058ce467d6c1ea032d8f5") version("1.22.1", commit="b3f091fbc159609e8e0792d2bf9fbef52c6ceede") version("1.18.0", commit="d5bd55d170cb384fdebdf60751e1e28483782caa") diff --git a/var/spack/repos/builtin/packages/r-iranges/package.py b/var/spack/repos/builtin/packages/r-iranges/package.py index 80e44d5d7e5..4cf880670ae 100644 --- a/var/spack/repos/builtin/packages/r-iranges/package.py +++ b/var/spack/repos/builtin/packages/r-iranges/package.py @@ -18,6 +18,7 @@ class RIranges(RPackage): bioc = "IRanges" + version("2.30.1", commit="ead506a14d6cc89ac2f14b55a4b04496755e4e50") version("2.30.0", commit="9b5f3ca12812fb76c23b1550aa3a794384384d9b") version("2.28.0", commit="d85ee908a379e12d1e32599e999c71ab37c25e57") version("2.24.1", commit="6c61fddf4c5830f69a0f7f108888c67cd0a12b19") diff --git a/var/spack/repos/builtin/packages/r-keggrest/package.py b/var/spack/repos/builtin/packages/r-keggrest/package.py index 4a081d71326..20bf8b0e315 100644 --- a/var/spack/repos/builtin/packages/r-keggrest/package.py +++ b/var/spack/repos/builtin/packages/r-keggrest/package.py @@ -15,6 +15,7 @@ class RKeggrest(RPackage): bioc = "KEGGREST" + version("1.36.3", commit="1827cde76863aa80c83264a0dd95514654358df3") version("1.36.0", commit="591818bbc9195bfd0657cf4f5c7c771ea7f86830") version("1.34.0", commit="2056750dc202fa04a34b84c6c712e884c7cad2bd") version("1.30.1", commit="fd9970ea9df117d625257b8c6351cf85098cfbc1") diff --git a/var/spack/repos/builtin/packages/r-libpressio/package.py b/var/spack/repos/builtin/packages/r-libpressio/package.py new file mode 100644 index 00000000000..94e90c59b95 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-libpressio/package.py @@ -0,0 +1,36 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class RLibpressio(RPackage): + """R package for libpressio""" + + homepage = "https://github.com/robertu94/libpressio-r" + url = "https://github.com/robertu94/libpressio-r/archive/0.0.1.tar.gz" + + maintainers = ["robertu94"] + + version("1.6.0", sha256="4f8a712e5e84a201373a104e73b10282fcf98f1c7672cc1dd5a2ff07a32d54f6") + version("1.5.0", sha256="6b0e095610f190aad5dded0dbc6c0783893d4d5e773afc80328fc8c5befeff58") + version("1.4.1", sha256="fa9d47c84ddeb4edd9c5250067a87cc1bb549b9b1dd71e2501dd39ee4e171c27") + version("1.3.2", sha256="6afc907aa3663fbb9bfc7c92ca57e15d05ecbec59f94badec24e8da99ac1422f") + version("1.3", sha256="6ade53d30446de85d2bf6aff0f0a756887f970634b97456aec8b2920a96c0726") + version("1.2", sha256="e5889abf6aabd14b25b5c11e8ecc42cfe56681b1e4f7ade9c9fc28de213981b4") + version("1.1", sha256="b86a541e095b6e41b3548f6cd734c1ff50c70edda2806ed66b5205880fbfbb96") + version("0.0.1", sha256="a508cf3ec1b06c417e0de0e1e4180f3175ead2e4ec23b374425fcf2abfaa1b88") + + variant( + "third_party", description="include support for 3rd party compressor modules", default=True + ) + + depends_on("r@3.3.0:", type=("build", "run")) + depends_on("r-rcpp", type=("build", "link", "run")) + depends_on("libpressio+json", type=("build", "link", "run")) + depends_on("libpressio@0.65.0:", type=("build", "link", "run"), when="@1.2:1.5") + depends_on("libpressio@0.88.0:", type=("build", "link", "run"), when="@1.6:") + depends_on("pkgconfig", type=("build")) + depends_on("libpressio-tools@0.1.4:", type=("build", "link", "run"), when="+third_party") diff --git a/var/spack/repos/builtin/packages/r-limma/package.py b/var/spack/repos/builtin/packages/r-limma/package.py index 6bc580bb2f4..8d92ec55058 100644 --- a/var/spack/repos/builtin/packages/r-limma/package.py +++ b/var/spack/repos/builtin/packages/r-limma/package.py @@ -14,6 +14,7 @@ class RLimma(RPackage): bioc = "limma" + version("3.52.4", commit="3226c29ad8c18aa7e6722f4a2c95ff8ac900437e") version("3.52.1", commit="c81c539a217ac1cf46e850f8a20266cecfafed50") version("3.50.0", commit="657b19bbc33c5c941af79aeb68967bf42ea40e23") version("3.46.0", commit="ff03542231827f39ebde6464cdbba0110e24364e") diff --git a/var/spack/repos/builtin/packages/r-matrixgenerics/package.py b/var/spack/repos/builtin/packages/r-matrixgenerics/package.py index bfada6c30de..289f23d36af 100644 --- a/var/spack/repos/builtin/packages/r-matrixgenerics/package.py +++ b/var/spack/repos/builtin/packages/r-matrixgenerics/package.py @@ -19,6 +19,7 @@ class RMatrixgenerics(RPackage): bioc = "MatrixGenerics" + version("1.8.1", commit="a4a21089e9f78275dd4a6f0df0c4b6b45c4650c7") version("1.8.0", commit="e4cc34d53bcfb9a5914afd79fda31ecd5037a47a") version("1.6.0", commit="4588a60e5cc691424c17faa281bdd7d99d83ec34") version("1.2.1", commit="abcc9ca0504e0b915cd7933a3169a8e9e5bd2fe9") diff --git a/var/spack/repos/builtin/packages/r-org-hs-eg-db/package.py b/var/spack/repos/builtin/packages/r-org-hs-eg-db/package.py index d8edda67ff8..2892bc3480f 100644 --- a/var/spack/repos/builtin/packages/r-org-hs-eg-db/package.py +++ b/var/spack/repos/builtin/packages/r-org-hs-eg-db/package.py @@ -15,6 +15,11 @@ class ROrgHsEgDb(RPackage): bioc = "org.Hs.eg.db" url = "https://www.bioconductor.org/packages/3.5/data/annotation/src/contrib/org.Hs.eg.db_3.4.1.tar.gz" + version( + "3.15.0", + sha256="1dc9bb6019e0f0a222b9ec84a1c5870cdbca480f45d9ad08e35f77278baa3c5f", + url="https://www.bioconductor.org/packages/3.15/data/annotation/src/contrib/org.Hs.eg.db_3.15.0.tar.gz", + ) version( "3.14.0", sha256="0f87b3f1925a1d7007e5ad9200bdf511788bd1d7cb76f1121feeb109889c2b00", @@ -41,3 +46,4 @@ class ROrgHsEgDb(RPackage): depends_on("r-annotationdbi@1.43.1:", type=("build", "run"), when="@3.8.2:") depends_on("r-annotationdbi@1.51.3:", type=("build", "run"), when="@3.12.0:") depends_on("r-annotationdbi@1.55.1:", type=("build", "run"), when="@3.14.0:") + depends_on("r-annotationdbi@1.57.1:", type=("build", "run"), when="@3.15.0:") diff --git a/var/spack/repos/builtin/packages/r-organismdbi/package.py b/var/spack/repos/builtin/packages/r-organismdbi/package.py index bd289585864..5ae90de6299 100644 --- a/var/spack/repos/builtin/packages/r-organismdbi/package.py +++ b/var/spack/repos/builtin/packages/r-organismdbi/package.py @@ -16,6 +16,7 @@ class ROrganismdbi(RPackage): bioc = "OrganismDbi" + version("1.38.1", commit="fa8da4dd42ab15e1d21fd9f8286440596d50b1ec") version("1.38.0", commit="2ca01830a6ffcd0c0018d2bdbd3de8b4df716771") version("1.36.0", commit="3e7a90d248ff09f05ccd381ff921e12373a4b330") version("1.32.0", commit="c8100c4fea17bf1b10d4efacc73a7e2866d649e3") diff --git a/var/spack/repos/builtin/packages/r-pathview/package.py b/var/spack/repos/builtin/packages/r-pathview/package.py index 01a8f0f8709..92b017f1581 100644 --- a/var/spack/repos/builtin/packages/r-pathview/package.py +++ b/var/spack/repos/builtin/packages/r-pathview/package.py @@ -20,6 +20,7 @@ class RPathview(RPackage): bioc = "pathview" + version("1.36.1", commit="f2e86b106c1cd91aac703337f968b7593a61c68d") version("1.36.0", commit="4f6be090a4089b5259d8e796d62f9830e2d63943") version("1.34.0", commit="a8788902a3bb047f8ee785966e57f84596076bbd") version("1.30.1", commit="a6a32395db408798cb076894678e90148bae6bf4") diff --git a/var/spack/repos/builtin/packages/r-rtracklayer/package.py b/var/spack/repos/builtin/packages/r-rtracklayer/package.py index e095dd3251c..a52013b3290 100644 --- a/var/spack/repos/builtin/packages/r-rtracklayer/package.py +++ b/var/spack/repos/builtin/packages/r-rtracklayer/package.py @@ -18,6 +18,7 @@ class RRtracklayer(RPackage): bioc = "rtracklayer" + version("1.56.1", commit="4c6d2201fcb102d471bd88f4f51cc34317669955") version("1.56.0", commit="1d70f7dc464ad87a1fde61588cd9ae0cb86b6e86") version("1.54.0", commit="04cdd75521a8364e67a49d7352500dd4a3e83c55") version("1.50.0", commit="d2e61f72ff5d5a94c2c487ba108a37f23bfcc1e6") diff --git a/var/spack/repos/builtin/packages/r-scaledmatrix/package.py b/var/spack/repos/builtin/packages/r-scaledmatrix/package.py index e99dc8dac29..b49f1235d52 100644 --- a/var/spack/repos/builtin/packages/r-scaledmatrix/package.py +++ b/var/spack/repos/builtin/packages/r-scaledmatrix/package.py @@ -16,6 +16,7 @@ class RScaledmatrix(RPackage): bioc = "ScaledMatrix" + version("1.4.1", commit="15e2efcb6b11e26c31ef2d44968355f71cc1f4fc") version("1.4.0", commit="32e6e918bc7bb64bbf75613d353ca268c7d04292") version("1.2.0", commit="d0573e14ca537b40ade7dd1c9cf0cadae60d4349") diff --git a/var/spack/repos/builtin/packages/r-scran/package.py b/var/spack/repos/builtin/packages/r-scran/package.py index fcf4f46534a..cac48a6e176 100644 --- a/var/spack/repos/builtin/packages/r-scran/package.py +++ b/var/spack/repos/builtin/packages/r-scran/package.py @@ -17,6 +17,7 @@ class RScran(RPackage): bioc = "scran" + version("1.24.1", commit="1a83eb7c948b1dc49253080c23b26cefb3a0f3b9") version("1.24.0", commit="c3f9e169c4538ce827d4f14a4141571c2366cd31") depends_on("r-singlecellexperiment", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/r-scuttle/package.py b/var/spack/repos/builtin/packages/r-scuttle/package.py index 6862bf38068..2ed9145d158 100644 --- a/var/spack/repos/builtin/packages/r-scuttle/package.py +++ b/var/spack/repos/builtin/packages/r-scuttle/package.py @@ -16,6 +16,7 @@ class RScuttle(RPackage): bioc = "scuttle" + version("1.6.3", commit="df23680da9fa4d685df77e4561467f491c850b50") version("1.6.2", commit="afdfc555151d84cc332757b4ec0b97cb7f39d2d5") version("1.4.0", commit="b335263dd56bb859b5dd3ea27ee00dffa0215313") version("1.0.4", commit="a827e2759d80e6c3510e2f8fd4bd680274206d9f") diff --git a/var/spack/repos/builtin/packages/r-singlecellexperiment/package.py b/var/spack/repos/builtin/packages/r-singlecellexperiment/package.py index d024ae131a8..47719faeb76 100644 --- a/var/spack/repos/builtin/packages/r-singlecellexperiment/package.py +++ b/var/spack/repos/builtin/packages/r-singlecellexperiment/package.py @@ -16,6 +16,7 @@ class RSinglecellexperiment(RPackage): bioc = "SingleCellExperiment" + version("1.18.1", commit="db7768a7cb5eca724bcf7e4cea3234992ac714a1") version("1.18.0", commit="3a72dcd97e628055b2d02294eaecca9a41aba604") version("1.16.0", commit="bb27609ba08052607fc08529ffbbbcf1eab265cb") version("1.12.0", commit="66063b74c8b0bd0fd1277c7ad425ad11823ab356") diff --git a/var/spack/repos/builtin/packages/r-treeio/package.py b/var/spack/repos/builtin/packages/r-treeio/package.py index 64b8dce4203..17cf92167da 100644 --- a/var/spack/repos/builtin/packages/r-treeio/package.py +++ b/var/spack/repos/builtin/packages/r-treeio/package.py @@ -18,6 +18,7 @@ class RTreeio(RPackage): bioc = "treeio" + version("1.20.2", commit="ed457d6fd85a50e0993c8c9acbd9b701be01a348") version("1.20.0", commit="5f7c3704fc8202c52451d092148fdcfe683f026a") version("1.18.1", commit="a06b6b3d2a64f1b22c6c8c5f97c08f5863349c83") diff --git a/var/spack/repos/builtin/packages/r/package.py b/var/spack/repos/builtin/packages/r/package.py index 130ad46c9bd..7aa60e37ba4 100644 --- a/var/spack/repos/builtin/packages/r/package.py +++ b/var/spack/repos/builtin/packages/r/package.py @@ -23,6 +23,7 @@ class R(AutotoolsPackage): maintainers = ["glennpj"] + version("4.2.2", sha256="0ff62b42ec51afa5713caee7c4fde7a0c45940ba39bef8c5c9487fef0c953df5") version("4.2.1", sha256="4d52db486d27848e54613d4ee977ad952ec08ce17807e1b525b10cd4436c643f") version("4.2.0", sha256="38eab7719b7ad095388f06aa090c5a2b202791945de60d3e2bb0eab1f5097488") version("4.1.3", sha256="15ff5b333c61094060b2a52e9c1d8ec55cc42dd029e39ca22abdaa909526fed6") @@ -106,8 +107,6 @@ def url_for_version(self, version): url = "https://cloud.r-project.org/src/base" return url + "/R-%s/R-%s.tar.gz" % (version.up_to(1), version) - filter_compiler_wrappers("Makeconf", relative_root=os.path.join("rlib", "R", "etc")) - @property def etcdir(self): return join_path(prefix, "rlib", "R", "etc") @@ -186,6 +185,9 @@ def copy_makeconf(self): dst_makeconf = join_path(self.etcdir, "Makeconf.spack") install(src_makeconf, dst_makeconf) + # To respect order of execution, we should filter after we made the copy above + filter_compiler_wrappers("Makeconf", relative_root=os.path.join("rlib", "R", "etc")) + # ======================================================================== # Set up environment to make install easy for R extensions. # ======================================================================== @@ -198,7 +200,7 @@ def setup_dependent_build_environment(self, env, dependent_spec): # Set R_LIBS to include the library dir for the # extension and any other R extensions it depends on. r_libs_path = [] - for d in dependent_spec.traverse(deptype=("build", "run"), deptype_query="run"): + for d in dependent_spec.traverse(deptype=("build", "run")): if d.package.extends(self.spec): r_libs_path.append(join_path(d.prefix, self.r_lib_dir)) @@ -214,16 +216,19 @@ def setup_dependent_build_environment(self, env, dependent_spec): # Use the number of make_jobs set in spack. The make program will # determine how many jobs can actually be started. env.set("MAKEFLAGS", "-j{0}".format(make_jobs)) + env.set("R_HOME", join_path(self.prefix, "rlib", "R")) def setup_dependent_run_environment(self, env, dependent_spec): # For run time environment set only the path for dependent_spec and # prepend it to R_LIBS + env.set("R_HOME", join_path(self.prefix, "rlib", "R")) if dependent_spec.package.extends(self.spec): env.prepend_path("R_LIBS", join_path(dependent_spec.prefix, self.r_lib_dir)) def setup_run_environment(self, env): env.prepend_path("LD_LIBRARY_PATH", join_path(self.prefix, "rlib", "R", "lib")) env.prepend_path("PKG_CONFIG_PATH", join_path(self.prefix, "rlib", "pkgconfig")) + env.set("R_HOME", join_path(self.prefix, "rlib", "R")) if "+rmath" in self.spec: env.prepend_path("LD_LIBRARY_PATH", join_path(self.prefix, "rlib")) diff --git a/var/spack/repos/builtin/packages/racket/package.py b/var/spack/repos/builtin/packages/racket/package.py index cc9e1ba25b0..7031e234ea4 100644 --- a/var/spack/repos/builtin/packages/racket/package.py +++ b/var/spack/repos/builtin/packages/racket/package.py @@ -2,13 +2,11 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - -import sys - +import spack.build_systems.makefile from spack.package import * -class Racket(Package): +class Racket(MakefilePackage): """The Racket programming language.""" homepage = "https://www.racket-lang.org" @@ -21,13 +19,6 @@ class Racket(Package): depends_on("patchutils") depends_on("libtool", type=("build")) - phases = ["configure", "build", "install"] - - def url_for_version(self, version): - return "https://mirror.racket-lang.org/installers/{0}/racket-minimal-{0}-src-builtpkgs.tgz".format( - version - ) - variant("cs", default=True, description="Build Racket CS (new ChezScheme VM)") variant("bc", default=False, description="Build Racket BC (old MZScheme VM)") variant("shared", default=True, description="Enable shared") @@ -36,12 +27,22 @@ def url_for_version(self, version): parallel = False extendable = True + def url_for_version(self, version): + return "https://mirror.racket-lang.org/installers/{0}/racket-minimal-{0}-src-builtpkgs.tgz".format( + version + ) + + +class MakefileBuilder(spack.build_systems.makefile.MakefileBuilder): + + build_directory = "src" + def toggle(self, spec, variant): toggle_text = "enable" if spec.variants[variant].value else "disable" return "--{0}-{1}".format(toggle_text, variant) - def configure(self, spec, prefix): - with working_dir("src"): + def edit(self, pkg, spec, prefix): + with working_dir(self.build_directory): configure = Executable("./configure") configure_args = [ self.toggle(spec, "cs"), @@ -49,7 +50,7 @@ def configure(self, spec, prefix): self.toggle(spec, "jit"), ] toggle_shared = self.toggle(spec, "shared") - if sys.platform == "darwin": + if spec.satisfies("platform=darwin"): configure_args += ["--enable-macprefix"] if "+xonx" in spec: configure_args += ["--enable-xonx", toggle_shared] @@ -58,16 +59,20 @@ def configure(self, spec, prefix): configure_args += ["--prefix={0}".format(prefix)] configure(*configure_args) - def build(self, spec, prefix): - with working_dir("src"): - if spec.variants["bc"].value: - make("bc") - if spec.variants["cs"].value: - make("cs") + @property + def build_targets(self): + result = [] + if "+bc" in self.spec: + result.append("bc") + if "+cs" in self.spec: + result.append("cs") + return result - def install(self, spec, prefix): - with working_dir("src"): - if spec.variants["bc"].value: - make("install-bc") - if spec.variants["cs"].value: - make("install-cs") + @property + def install_targets(self): + result = [] + if "+bc" in self.spec: + result.append("install-bc") + if "+cs" in self.spec: + result.append("install-cs") + return result diff --git a/var/spack/repos/builtin/packages/raja/package.py b/var/spack/repos/builtin/packages/raja/package.py index b9e1f6fe8fa..1f0bf93a675 100644 --- a/var/spack/repos/builtin/packages/raja/package.py +++ b/var/spack/repos/builtin/packages/raja/package.py @@ -104,6 +104,13 @@ def cache_name(self): self.spec.compiler.version, ) + def initconfig_compiler_entries(self): + spec = self.spec + entries = super(Raja, self).initconfig_compiler_entries() + if "+rocm" in spec: + entries.insert(0, cmake_cache_path("CMAKE_CXX_COMPILER", spec["hip"].hipcc)) + return entries + def initconfig_hardware_entries(self): spec = self.spec entries = super(Raja, self).initconfig_hardware_entries() @@ -126,12 +133,14 @@ def initconfig_hardware_entries(self): entries.append(cmake_cache_option("ENABLE_HIP", True)) entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix))) hip_repair_cache(entries, spec) + hipcc_flags = [] + if self.spec.satisfies("@0.14.0"): + hipcc_flags.append("-std=c++14") archs = self.spec.variants["amdgpu_target"].value if archs != "none": arch_str = ",".join(archs) - entries.append( - cmake_cache_string("HIP_HIPCC_FLAGS", "--amdgpu-target={0}".format(arch_str)) - ) + hipcc_flags.append("--amdgpu-target={0}".format(arch_str)) + entries.append(cmake_cache_string("HIP_HIPCC_FLAGS", " ".join(hipcc_flags))) else: entries.append(cmake_cache_option("ENABLE_HIP", False)) diff --git a/var/spack/repos/builtin/packages/reframe/package.py b/var/spack/repos/builtin/packages/reframe/package.py index 90bbb503f98..f2de9f7c2a0 100644 --- a/var/spack/repos/builtin/packages/reframe/package.py +++ b/var/spack/repos/builtin/packages/reframe/package.py @@ -24,6 +24,19 @@ class Reframe(Package): maintainers = ["victorusu", "vkarak"] version("master", branch="master") + version( + "4.0.0-dev.1", sha256="6db55c20b79764fc1f0e0a13de062850007425fa2c7f54a113b96adee50741ed" + ) + version( + "4.0.0-dev.0", sha256="a96162a88a36ea0793836c492a39470010f6e63b8d9bd324c033614d27304fa6" + ) + version( + "3.12.0", + sha256="425cc546e24edd5b2dbfcdcb61dbbf723ca1a2a2977948e359e893514f5eb10f", + preferred=True, + ) + version("3.11.2", sha256="d6f36071df316d6a5ef5ce6f0477b3385d9dac5c1b82e54ae6954dc9b68f9440") + version("3.11.1", sha256="7f591cd8f4fbb2c6255cc8ea02e3814393355a8931ac883e9f57490fde699b63") version("3.11.0", sha256="3ddfef5482f0c304286a6c8f1ad0b3d75c4c61d0b9f9f8429b6157c189f2bb64") version("3.10.1", sha256="5fd649872bf93ba72a835896ea1a581b9b8c3e04150247be2359b95a7cdb89b5") version("3.10.0", sha256="b137f034be09abcf1bb8c3ceaf1a00d9c22c51c10738312eccf12c1c3e04b9ef") diff --git a/var/spack/repos/builtin/packages/rempi/package.py b/var/spack/repos/builtin/packages/rempi/package.py index 46adf68c277..fbcb1369ea4 100644 --- a/var/spack/repos/builtin/packages/rempi/package.py +++ b/var/spack/repos/builtin/packages/rempi/package.py @@ -23,6 +23,13 @@ class Rempi(AutotoolsPackage): depends_on("libtool", type="build") depends_on("libpciaccess", type="link") + def flag_handler(self, name, flags): + iflags = [] + if name == "cflags": + if self.spec.satisfies("%oneapi@2022.2.0:"): + iflags.append("-Wno-error=implicit-function-declaration") + return (iflags, None, None) + def setup_build_environment(self, env): if self.spec.satisfies("%cce"): env.set("MPICC", "mpicc") diff --git a/var/spack/repos/builtin/packages/riscv-gnu-toolchain/package.py b/var/spack/repos/builtin/packages/riscv-gnu-toolchain/package.py index d4f96029072..4c71ba0622e 100644 --- a/var/spack/repos/builtin/packages/riscv-gnu-toolchain/package.py +++ b/var/spack/repos/builtin/packages/riscv-gnu-toolchain/package.py @@ -21,7 +21,7 @@ class RiscvGnuToolchain(AutotoolsPackage): version("2022.08.08", tag="2022.08.08", submodules=True) # Dependencies: - depends_on("pkg-config", type="build") + depends_on("pkgconfig", type="build") depends_on("autoconf", when="@main:", type="build") depends_on("python", type="build") depends_on("gawk", type="build") diff --git a/var/spack/repos/builtin/packages/rkt-base/package.py b/var/spack/repos/builtin/packages/rkt-base/package.py index 256f90106eb..3eb21e485d7 100644 --- a/var/spack/repos/builtin/packages/rkt-base/package.py +++ b/var/spack/repos/builtin/packages/rkt-base/package.py @@ -18,5 +18,3 @@ class RktBase(RacketPackage): depends_on("racket@8.3", type=("build", "run"), when="@8.3") racket_name = "base" - pkgs = True - subdirectory = "pkgs/{0}".format(racket_name) diff --git a/var/spack/repos/builtin/packages/rkt-cext-lib/package.py b/var/spack/repos/builtin/packages/rkt-cext-lib/package.py index 8d9b257c05d..daa1ef5226f 100644 --- a/var/spack/repos/builtin/packages/rkt-cext-lib/package.py +++ b/var/spack/repos/builtin/packages/rkt-cext-lib/package.py @@ -21,5 +21,4 @@ class RktCextLib(RacketPackage): depends_on("rkt-scheme-lib@8.3", type=("build", "run"), when="@8.3") racket_name = "cext-lib" - pkgs = True subdirectory = racket_name diff --git a/var/spack/repos/builtin/packages/rkt-compiler-lib/package.py b/var/spack/repos/builtin/packages/rkt-compiler-lib/package.py index fee9b3ea9a6..6a21c774cb2 100644 --- a/var/spack/repos/builtin/packages/rkt-compiler-lib/package.py +++ b/var/spack/repos/builtin/packages/rkt-compiler-lib/package.py @@ -21,5 +21,3 @@ class RktCompilerLib(RacketPackage): depends_on("rkt-zo-lib@1.3", type=("build", "run"), when="@8.3") racket_name = "compiler-lib" - pkgs = True - subdirectory = "pkgs/{0}".format(racket_name) diff --git a/var/spack/repos/builtin/packages/rkt-dynext-lib/package.py b/var/spack/repos/builtin/packages/rkt-dynext-lib/package.py index 81f7ca811f9..9164571ed37 100644 --- a/var/spack/repos/builtin/packages/rkt-dynext-lib/package.py +++ b/var/spack/repos/builtin/packages/rkt-dynext-lib/package.py @@ -17,5 +17,4 @@ class RktDynextLib(RacketPackage): depends_on("rkt-base@8.3", type=("build", "run"), when="@8.3") racket_name = "dynext-lib" - pkgs = True subdirectory = racket_name diff --git a/var/spack/repos/builtin/packages/rkt-rackunit-lib/package.py b/var/spack/repos/builtin/packages/rkt-rackunit-lib/package.py index 783aebe306b..33910502583 100644 --- a/var/spack/repos/builtin/packages/rkt-rackunit-lib/package.py +++ b/var/spack/repos/builtin/packages/rkt-rackunit-lib/package.py @@ -18,5 +18,4 @@ class RktRackunitLib(RacketPackage): depends_on("rkt-testing-util-lib@8.3", type=("build", "run"), when="@8.3") racket_name = "rackunit-lib" - pkgs = True subdirectory = racket_name diff --git a/var/spack/repos/builtin/packages/rkt-scheme-lib/package.py b/var/spack/repos/builtin/packages/rkt-scheme-lib/package.py index 75346eac703..689d3af7c50 100644 --- a/var/spack/repos/builtin/packages/rkt-scheme-lib/package.py +++ b/var/spack/repos/builtin/packages/rkt-scheme-lib/package.py @@ -17,4 +17,3 @@ class RktSchemeLib(RacketPackage): depends_on("rkt-base@8.3", type=("build", "run"), when="@8.3") racket_name = "scheme-lib" - pkgs = True diff --git a/var/spack/repos/builtin/packages/rkt-testing-util-lib/package.py b/var/spack/repos/builtin/packages/rkt-testing-util-lib/package.py index 9186845fc98..aa1aafda4e4 100644 --- a/var/spack/repos/builtin/packages/rkt-testing-util-lib/package.py +++ b/var/spack/repos/builtin/packages/rkt-testing-util-lib/package.py @@ -17,5 +17,4 @@ class RktTestingUtilLib(RacketPackage): depends_on("rkt-base@8.3:", type=("build", "run"), when="@8.3") racket_name = "testing-util-lib" - pkgs = True subdirectory = racket_name diff --git a/var/spack/repos/builtin/packages/rkt-zo-lib/package.py b/var/spack/repos/builtin/packages/rkt-zo-lib/package.py index 66f3d498afb..a6b70cd075a 100644 --- a/var/spack/repos/builtin/packages/rkt-zo-lib/package.py +++ b/var/spack/repos/builtin/packages/rkt-zo-lib/package.py @@ -17,5 +17,3 @@ class RktZoLib(RacketPackage): depends_on("rkt-base@8.3:", type=("build", "run"), when="@1.3") racket_name = "zo-lib" - pkgs = True - subdirectory = "pkgs/{0}".format(racket_name) diff --git a/var/spack/repos/builtin/packages/rocblas/package.py b/var/spack/repos/builtin/packages/rocblas/package.py index 84f90cff4b3..92844ffda52 100644 --- a/var/spack/repos/builtin/packages/rocblas/package.py +++ b/var/spack/repos/builtin/packages/rocblas/package.py @@ -248,6 +248,11 @@ def cmake_args(self): args.append(self.define("Tensile_LIBRARY_FORMAT", "msgpack")) if self.spec.satisfies("@:4.2.0"): arch_define_name = "Tensile_ARCHITECTURE" + # Restrict the number of jobs Tensile can spawn. + # If we don't specify otherwise, Tensile creates a job per available core, + # and that consumes a lot of system memory. + # https://github.com/ROCmSoftwarePlatform/Tensile/blob/93e10678a0ced7843d9332b80bc17ebf9a166e8e/Tensile/Parallel.py#L38 + args.append(self.define("Tensile_CPU_THREADS", min(16, make_jobs))) # See https://github.com/ROCmSoftwarePlatform/rocBLAS/commit/c1895ba4bb3f4f5947f3818ebd155cf71a27b634 if "auto" not in self.spec.variants["amdgpu_target"]: diff --git a/var/spack/repos/builtin/packages/rocksdb/package.py b/var/spack/repos/builtin/packages/rocksdb/package.py index b6046d28062..c005d57b602 100644 --- a/var/spack/repos/builtin/packages/rocksdb/package.py +++ b/var/spack/repos/builtin/packages/rocksdb/package.py @@ -14,6 +14,7 @@ class Rocksdb(MakefilePackage): git = "https://github.com/facebook/rocksdb.git" version("master", git=git, branch="master", submodules=True) + version("7.7.3", sha256="b8ac9784a342b2e314c821f6d701148912215666ac5e9bdbccd93cf3767cb611") version("7.2.2", sha256="c4ea6bd2e3ffe3f0f8921c699234d59108c9122d61b0ba2aa78358642a7b614e") version("6.20.3", sha256="c6502c7aae641b7e20fafa6c2b92273d935d2b7b2707135ebd9a67b092169dca") version("6.19.3", sha256="5c19ffefea2bbe4c275d0c60194220865f508f371c64f42e802b4a85f065af5b") diff --git a/var/spack/repos/builtin/packages/rsync/package.py b/var/spack/repos/builtin/packages/rsync/package.py index 357d7e7184c..29925862cfb 100644 --- a/var/spack/repos/builtin/packages/rsync/package.py +++ b/var/spack/repos/builtin/packages/rsync/package.py @@ -14,6 +14,9 @@ class Rsync(AutotoolsPackage): homepage = "https://rsync.samba.org" url = "https://download.samba.org/pub/rsync/src/rsync-3.2.4.tar.gz" + version("3.2.7", sha256="4e7d9d3f6ed10878c58c5fb724a67dacf4b6aac7340b13e488fb2dc41346f2bb") + version("3.2.6", sha256="fb3365bab27837d41feaf42e967c57bd3a47bc8f10765a3671efd6a3835454d3") + version("3.2.5", sha256="2ac4d21635cdf791867bc377c35ca6dda7f50d919a58be45057fd51600c69aba") version("3.2.4", sha256="6f761838d08052b0b6579cf7f6737d93e47f01f4da04c5d24d3447b7f2a5fad1") version("3.2.3", sha256="becc3c504ceea499f4167a260040ccf4d9f2ef9499ad5683c179a697146ce50e") version("3.2.2", sha256="644bd3841779507665211fd7db8359c8a10670c57e305b4aab61b4e40037afa8") diff --git a/var/spack/repos/builtin/packages/ruby/package.py b/var/spack/repos/builtin/packages/ruby/package.py index 9cc7f921514..a472509e08b 100644 --- a/var/spack/repos/builtin/packages/ruby/package.py +++ b/var/spack/repos/builtin/packages/ruby/package.py @@ -2,19 +2,17 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - import re -import sys -from typing import List +import spack.build_systems.autotools +import spack.build_systems.nmake from spack.package import * -is_windows = sys.platform == "win32" - -class Ruby(Package): +class Ruby(AutotoolsPackage, NMakePackage): """A dynamic, open source programming language with a focus on - simplicity and productivity.""" + simplicity and productivity. + """ maintainers = ["Kerilk"] @@ -33,24 +31,25 @@ class Ruby(Package): version("2.5.3", sha256="9828d03852c37c20fa333a0264f2490f07338576734d910ee3fd538c9520846c") version("2.2.0", sha256="7671e394abfb5d262fbcd3b27a71bf78737c7e9347fa21c39e58b0bb9c4840fc") - if not is_windows: - variant("openssl", default=True, description="Enable OpenSSL support") - variant("readline", default=False, description="Enable Readline support") - depends_on("pkgconfig", type=("build")) - depends_on("libffi") - depends_on("libx11", when="@:2.3") - depends_on("tcl", when="@:2.3") - depends_on("tk", when="@:2.3") - depends_on("readline", when="+readline") - depends_on("zlib") - with when("+openssl"): - depends_on("openssl@:1") - depends_on("openssl@:1.0", when="@:2.3") + build_system("autotools", "nmake", default="autotools") + + for _platform_condition in ("platform=linux", "platform=darwin", "platform=cray"): + with when(_platform_condition): + variant("openssl", default=True, description="Enable OpenSSL support") + variant("readline", default=False, description="Enable Readline support") + depends_on("pkgconfig", type="build") + depends_on("libffi") + depends_on("libx11", when="@:2.3") + depends_on("tcl", when="@:2.3") + depends_on("tk", when="@:2.3") + depends_on("readline", when="+readline") + depends_on("zlib") + with when("+openssl"): + depends_on("openssl@:1") + depends_on("openssl@:1.0", when="@:2.3") extendable = True - phases = ["configure", "build", "install"] - build_targets = [] # type: List[str] - install_targets = ["install"] + # Known build issues when Avira antivirus software is running: # https://github.com/rvm/rvm/issues/4313#issuecomment-374020379 # TODO: add check for this and warn user @@ -82,28 +81,6 @@ def url_for_version(self, version): url = "https://cache.ruby-lang.org/pub/ruby/{0}/ruby-{1}.tar.gz" return url.format(version.up_to(2), version) - def configure_args(self): - args = [] - if "+openssl" in self.spec: - args.append("--with-openssl-dir=%s" % self.spec["openssl"].prefix) - if "+readline" in self.spec: - args.append("--with-readline-dir=%s" % self.spec["readline"].prefix) - if "^tk" in self.spec: - args.append("--with-tk=%s" % self.spec["tk"].prefix) - if self.spec.satisfies("%fj"): - args.append("--disable-dtrace") - return args - - def setup_dependent_build_environment(self, env, dependent_spec): - # TODO: do this only for actual extensions. - # Set GEM_PATH to include dependent gem directories - for d in dependent_spec.traverse(deptype=("build", "run", "test"), root=True): - if d.package.extends(self.spec): - env.prepend_path("GEM_PATH", d.prefix) - - # The actual installation path for this gem - env.set("GEM_HOME", dependent_spec.prefix) - def setup_dependent_run_environment(self, env, dependent_spec): for d in dependent_spec.traverse(deptype=("run"), root=True): if d.package.extends(self.spec): @@ -122,31 +99,31 @@ def setup_dependent_package(self, module, dependent_spec): module.gem = Executable(self.prefix.bin.gem) module.rake = Executable(self.prefix.bin.rake) - def configure(self, spec, prefix): - with working_dir(self.stage.source_path, create=True): - if is_windows: - Executable("win32\\configure.bat")("--prefix=%s" % self.prefix) - else: - options = getattr(self, "configure_flag_args", []) - options += ["--prefix={0}".format(prefix)] - options += self.configure_args() - configure(*options) - def build(self, spec, prefix): - with working_dir(self.stage.source_path): - if is_windows: - nmake() - else: - params = ["V=1"] - params += self.build_targets - make(*params) +class SetupEnvironment(object): + def setup_dependent_build_environment(self, env, dependent_spec): + # TODO: do this only for actual extensions. + # Set GEM_PATH to include dependent gem directories + for d in dependent_spec.traverse(deptype=("build", "run", "test"), root=True): + if d.package.extends(self.spec): + env.prepend_path("GEM_PATH", d.prefix) - def install(self, spec, prefix): - with working_dir(self.stage.source_path): - if is_windows: - nmake("install") - else: - make(*self.install_targets) + # The actual installation path for this gem + env.set("GEM_HOME", dependent_spec.prefix) + + +class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder, SetupEnvironment): + def configure_args(self): + args = [] + if "+openssl" in self.spec: + args.append("--with-openssl-dir=%s" % self.spec["openssl"].prefix) + if "+readline" in self.spec: + args.append("--with-readline-dir=%s" % self.spec["readline"].prefix) + if "^tk" in self.spec: + args.append("--with-tk=%s" % self.spec["tk"].prefix) + if self.spec.satisfies("%fj"): + args.append("--disable-dtrace") + return args @run_after("install") def post_install(self): @@ -158,7 +135,7 @@ def post_install(self): """ if self.spec.satisfies("+openssl"): rubygems_updated_cert_path = join_path( - self.stage.source_path, "rubygems-updated-ssl-cert", "GlobalSignRootCA.pem" + self.pkg.stage.source_path, "rubygems-updated-ssl-cert", "GlobalSignRootCA.pem" ) rubygems_certs_path = join_path( self.spec.prefix.lib, @@ -171,11 +148,19 @@ def post_install(self): rbconfig = find(self.prefix, "rbconfig.rb")[0] filter_file( - r'^(\s*CONFIG\["CXX"\]\s*=\s*).*', r'\1"{0}"'.format(self.compiler.cxx), rbconfig + r'^(\s*CONFIG\["CXX"\]\s*=\s*).*', r'\1"{0}"'.format(self.pkg.compiler.cxx), rbconfig ) filter_file( - r'^(\s*CONFIG\["CC"\]\s*=\s*).*', r'\1"{0}"'.format(self.compiler.cc), rbconfig + r'^(\s*CONFIG\["CC"\]\s*=\s*).*', r'\1"{0}"'.format(self.pkg.compiler.cc), rbconfig ) filter_file( - r'^(\s*CONFIG\["MJIT_CC"\]\s*=\s*).*', r'\1"{0}"'.format(self.compiler.cc), rbconfig + r'^(\s*CONFIG\["MJIT_CC"\]\s*=\s*).*', + r'\1"{0}"'.format(self.pkg.compiler.cc), + rbconfig, ) + + +class NMakeBuilder(spack.build_systems.nmake.NMakeBuilder, SetupEnvironment): + def edit(self, pkg, spec, prefix): + with working_dir(self.pkg.stage.source_path, create=True): + Executable("win32\\configure.bat")("--prefix=%s" % self.prefix) diff --git a/var/spack/repos/builtin/packages/rust/package.py b/var/spack/repos/builtin/packages/rust/package.py index a17d34e2665..2ffdf495cf1 100644 --- a/var/spack/repos/builtin/packages/rust/package.py +++ b/var/spack/repos/builtin/packages/rust/package.py @@ -54,7 +54,7 @@ class Rust(Package): ) depends_on("python@2.7:", type="build") - depends_on("python@2.7:2.8", when="@:1.43", type="build") + depends_on("python@2.7:2.8", when="@0:1.43", type="build") depends_on("gmake@3.81:", type="build") depends_on("cmake@3.4.3:", type="build") depends_on("ninja", when="@1.48.0:", type="build") @@ -63,7 +63,7 @@ class Rust(Package): depends_on("openssl@:1") depends_on("libssh2") # https://github.com/rust-lang/cargo/issues/10446 - depends_on("libgit2@:1.3", when="@:1.60") + depends_on("libgit2@:1.3", when="@0:1.60") depends_on("libgit2") # Pre-release Versions diff --git a/var/spack/repos/builtin/packages/sam2p/package.py b/var/spack/repos/builtin/packages/sam2p/package.py new file mode 100644 index 00000000000..6c4c8e1cb01 --- /dev/null +++ b/var/spack/repos/builtin/packages/sam2p/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Sam2p(Package): + """A raster to PostScript/PDF image conversion program""" + + homepage = "https://github.com/pts/sam2p" + url = "https://github.com/pts/sam2p/archive/v0.49.4.tar.gz" + git = homepage + + maintainers = ["robertu94"] + + version("master", branch="master") + version("2021-05-04", commit="f3e9cc0a2df1880a63f9f37c96e3595bca890cfa") + + def install(self, spec, prefix): + compile_sh = Executable("./compile.sh") + compile_sh() + mkdirp(prefix.bin) + install("sam2p", prefix.bin) diff --git a/var/spack/repos/builtin/packages/scorep/package.py b/var/spack/repos/builtin/packages/scorep/package.py index cf0baf5164c..5c7870def18 100644 --- a/var/spack/repos/builtin/packages/scorep/package.py +++ b/var/spack/repos/builtin/packages/scorep/package.py @@ -124,7 +124,11 @@ def configure_args(self): if spec.satisfies("^intel-mpi"): config_args.append("--with-mpi=intel3") - elif spec.satisfies("^mpich") or spec.satisfies("^mvapich2"): + elif ( + spec.satisfies("^mpich") + or spec.satisfies("^mvapich2") + or spec.satisfies("^cray-mpich") + ): config_args.append("--with-mpi=mpich3") elif spec.satisfies("^openmpi"): config_args.append("--with-mpi=openmpi") diff --git a/var/spack/repos/builtin/packages/scotch/libscotch-scotcherr-link-7.0.1.patch b/var/spack/repos/builtin/packages/scotch/libscotch-scotcherr-link-7.0.1.patch new file mode 100644 index 00000000000..40e4ade8834 --- /dev/null +++ b/var/spack/repos/builtin/packages/scotch/libscotch-scotcherr-link-7.0.1.patch @@ -0,0 +1,20 @@ +--- a/src/libscotch/CMakeLists.txt ++++ b/src/libscotch/CMakeLists.txt +@@ -508,7 +508,7 @@ add_library(scotch + set_target_properties(scotch PROPERTIES VERSION + ${SCOTCH_VERSION}.${SCOTCH_RELEASE}.${SCOTCH_PATCHLEVEL}) + add_dependencies(scotch parser_yy_c parser_ll_c) +-target_link_libraries(scotch PUBLIC m) ++target_link_libraries(scotch PUBLIC m scotcherr) + target_include_directories(scotch PUBLIC + $ + $ +@@ -748,7 +748,7 @@ if(BUILD_PTSCOTCH) + set_target_properties(ptscotch PROPERTIES + VERSION ${SCOTCH_VERSION}.${SCOTCH_RELEASE}.${SCOTCH_PATCHLEVEL} + COMPILE_FLAGS -DSCOTCH_PTSCOTCH) +- target_link_libraries(ptscotch PUBLIC scotch MPI::MPI_C) ++ target_link_libraries(ptscotch PUBLIC ptscotcherr scotch MPI::MPI_C) + target_include_directories(ptscotch PUBLIC + $ + $ diff --git a/var/spack/repos/builtin/packages/scotch/package.py b/var/spack/repos/builtin/packages/scotch/package.py index 672706a6355..b957db31bf9 100644 --- a/var/spack/repos/builtin/packages/scotch/package.py +++ b/var/spack/repos/builtin/packages/scotch/package.py @@ -3,12 +3,15 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import spack.build_systems.cmake +import spack.build_systems.makefile from spack.package import * -class Scotch(CMakePackage): +class Scotch(CMakePackage, MakefilePackage): """Scotch is a software package for graph and mesh/hypergraph - partitioning, graph clustering, and sparse matrix ordering.""" + partitioning, graph clustering, and sparse matrix ordering. + """ homepage = "https://gitlab.inria.fr/scotch/scotch" git = "https://gitlab.inria.fr/scotch/scotch.git" @@ -32,18 +35,21 @@ class Scotch(CMakePackage): version("6.0.0", sha256="8206127d038bda868dda5c5a7f60ef8224f2e368298fbb01bf13fa250e378dd4") version("5.1.10b", sha256="54c9e7fafefd49d8b2017d179d4f11a655abe10365961583baaddc4eeb6a9add") - variant("mpi", default=True, description="Activate the compilation of parallel libraries") - variant( - "compression", default=True, description="Activate the posibility to use compressed files" - ) - variant( - "esmumps", default=False, description="Activate the compilation of esmumps needed by mumps" - ) + build_system(conditional("cmake", when="@7:"), "makefile", default="cmake") + variant("mpi", default=True, description="Compile parallel libraries") + variant("compression", default=True, description="May use compressed files") + variant("esmumps", default=False, description="Compile esmumps (needed by mumps)") variant("shared", default=True, description="Build a shared version of the library") variant( "metis", default=False, description="Expose vendored METIS/ParMETIS libraries and wrappers" ) variant("int64", default=False, description="Use int64_t for SCOTCH_Num typedef") + variant( + "link_error_lib", + default=False, + when="@7.0.1", + description="Link error handling library to libscotch/libptscotch", + ) # Does not build with flex 2.6.[23] depends_on("flex@:2.6.1,2.6.4:", type="build") @@ -57,14 +63,13 @@ class Scotch(CMakePackage): patch("metis-headers-6.0.4.patch", when="@6.0.4") patch("libscotchmetis-return-6.0.5a.patch", when="@6.0.5a") + patch("libscotch-scotcherr-link-7.0.1.patch", when="@7.0.1 +link_error_lib") # Vendored dependency of METIS/ParMETIS conflicts with standard # installations conflicts("^metis", when="+metis") conflicts("^parmetis", when="+metis") - # NOTE: In cross-compiling environment parallel build - # produces weird linker errors. parallel = False # NOTE: Versions of Scotch up to version 6.0.0 don't include support for @@ -100,16 +105,30 @@ def libs(self): return scotchlibs + zlibs - @when("@:6") - def patch(self): - self.configure() - # NOTE: Configuration of Scotch is achieved by writing a 'Makefile.inc' - # file that contains all of the configuration variables and their desired - # values for the installation. This function writes this file based on - # the given installation variants. - @when("@:6") - def configure(self): +class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder): + def cmake_args(self): + spec = self.spec + args = [ + self.define_from_variant("BUILD_LIBSCOTCHMETIS", "metis"), + self.define_from_variant("INSTALL_METIS_HEADERS", "metis"), + self.define_from_variant("BUILD_LIBESMUMPS", "esmumps"), + self.define_from_variant("BUILD_SHARED_LIBS", "shared"), + self.define_from_variant("BUILD_PTSCOTCH", "mpi"), + ] + + # TODO should we enable/disable THREADS? + + if "+int64" in spec: + args.append("-DINTSIZE=64") + + return args + + +class MakefileBuilder(spack.build_systems.makefile.MakefileBuilder): + build_directory = "src" + + def edit(self, pkg, spec, prefix): makefile_inc = [] cflags = [ "-O3", @@ -140,7 +159,7 @@ def configure(self): makefile_inc.extend( [ "LIB = .dylib", - "CLIBFLAGS = -dynamiclib {0}".format(self.compiler.cc_pic_flag), + "CLIBFLAGS = -dynamiclib {0}".format(pkg.compiler.cc_pic_flag), "RANLIB = echo", "AR = $(CC)", ( @@ -155,13 +174,13 @@ def configure(self): makefile_inc.extend( [ "LIB = .so", - "CLIBFLAGS = -shared {0}".format(self.compiler.cc_pic_flag), + "CLIBFLAGS = -shared {0}".format(pkg.compiler.cc_pic_flag), "RANLIB = echo", "AR = $(CC)", "ARFLAGS = -shared $(LDFLAGS) -o", ] ) - cflags.append(self.compiler.cc_pic_flag) + cflags.append(pkg.compiler.cc_pic_flag) else: makefile_inc.extend( [ @@ -175,9 +194,9 @@ def configure(self): # Compiler-Specific Options # - if self.compiler.name == "gcc": + if pkg.compiler.name == "gcc": cflags.append("-Drestrict=__restrict") - elif self.compiler.name == "intel": + elif pkg.compiler.name == "intel": cflags.append("-Drestrict=") mpicc_path = self.spec["mpi"].mpicc if "+mpi" in self.spec else "mpicc" @@ -230,8 +249,8 @@ def configure(self): with open("Makefile.inc", "w") as fh: fh.write("\n".join(makefile_inc)) - @when("@:6") - def install(self, spec, prefix): + @property + def build_targets(self): targets = ["scotch"] if "+mpi" in self.spec: targets.append("ptscotch") @@ -241,65 +260,4 @@ def install(self, spec, prefix): targets.append("esmumps") if "+mpi" in self.spec: targets.append("ptesmumps") - - with working_dir("src"): - for target in targets: - # It seems that building ptesmumps in parallel fails, for - # version prior to 6.0.0 there is no separated targets force - # ptesmumps, this library is built by the ptscotch target. This - # should explain the test for the can_make_parallel variable - can_make_parallel = not ( - target == "ptesmumps" - or (self.spec.version < Version("6.0.0") and target == "ptscotch") - ) - make(target, parallel=can_make_parallel) - - lib_ext = dso_suffix if "+shared" in self.spec else "a" - # It seems easier to remove metis wrappers from the folder that will be - # installed than to tweak the Makefiles - if "+metis" not in self.spec: - with working_dir("lib"): - force_remove("libscotchmetis.{0}".format(lib_ext)) - force_remove("libptscotchparmetis.{0}".format(lib_ext)) - - with working_dir("include"): - force_remove("metis.h") - force_remove("parmetis.h") - - if "~esmumps" in self.spec and self.spec.version < Version("6.0.0"): - with working_dir("lib"): - force_remove("libesmumps.{0}".format(lib_ext)) - force_remove("libptesmumps.{0}".format(lib_ext)) - - with working_dir("include"): - force_remove("esmumps.h") - - install_tree("bin", prefix.bin) - install_tree("lib", prefix.lib) - install_tree("include", prefix.include) - install_tree("man/man1", prefix.share.man.man1) - - @when("@:6") - def cmake(self, spec, prefix): - self.configure() - - @when("@:6") - def build(self, spec, prefix): - pass - - def cmake_args(self): - spec = self.spec - args = [ - self.define_from_variant("BUILD_LIBSCOTCHMETIS", "metis"), - self.define_from_variant("INSTALL_METIS_HEADERS", "metis"), - self.define_from_variant("BUILD_LIBESMUMPS", "esmumps"), - self.define_from_variant("BUILD_SHARED_LIBS", "shared"), - self.define_from_variant("BUILD_PTSCOTCH", "mpi"), - ] - - # TODO should we enable/disable THREADS? - - if "+int64" in spec: - args.append("-DINTSIZE=64") - - return args + return targets diff --git a/var/spack/repos/builtin/packages/sdl2/package.py b/var/spack/repos/builtin/packages/sdl2/package.py index 6f191fd28d2..2d01bf89013 100644 --- a/var/spack/repos/builtin/packages/sdl2/package.py +++ b/var/spack/repos/builtin/packages/sdl2/package.py @@ -3,6 +3,8 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import sys + from spack.package import * @@ -13,12 +15,17 @@ class Sdl2(CMakePackage): homepage = "https://wiki.libsdl.org/FrontPage" url = "https://libsdl.org/release/SDL2-2.0.5.tar.gz" + git = "https://github.com/libsdl-org/SDL.git" + list_url = "https://github.com/libsdl-org/SDL.git" + version("2.24.1", sha256="bc121588b1105065598ce38078026a414c28ea95e66ed2adab4c44d80b309e1b") + version("2.0.22", sha256="fe7cbf3127882e3fc7259a75a0cb585620272c51745d3852ab9dd87960697f2e") version("2.0.14", sha256="d8215b571a581be1332d2106f8036fcb03d12a70bae01e20f424976d275432bc") version("2.0.5", sha256="442038cf55965969f2ff06d976031813de643af9c9edc9e331bd761c242e8785") depends_on("cmake@2.8.5:", type="build") - depends_on("libxext", type="link") + if sys.platform.startswith("linux"): + depends_on("libxext", type="link") def cmake_args(self): return ["-DSSEMATH={0}".format("OFF" if self.spec.target.family == "aarch64" else "ON")] diff --git a/var/spack/repos/builtin/packages/seacas/package.py b/var/spack/repos/builtin/packages/seacas/package.py index 4d486f2fb21..a94130893c1 100644 --- a/var/spack/repos/builtin/packages/seacas/package.py +++ b/var/spack/repos/builtin/packages/seacas/package.py @@ -15,10 +15,13 @@ class Seacas(CMakePackage): - """The SEACAS Project contains the Exodus and IOSS libraries and a - collection of applications which create, query, modify, or - translate exodus databases. Default is to build the exodus and - IOSS libraries and the io_shell, io_info, io_modify, struc_to_unstruc apps. + """The SEACAS Project contains the Exodus and IOSS I/O libraries + and a collection of applications which create, query, modify, or + translate Exodus databases. Exodus is a finite element mesh and + results database file format. + + Default is to build the Exodus and IOSS libraries and the + io_shell, io_info, io_modify, struc_to_unstruc apps. """ homepage = "https://sandialabs.github.io/seacas/" @@ -28,6 +31,9 @@ class Seacas(CMakePackage): # ###################### Versions ########################## version("master", branch="master") + version( + "2022-10-14", sha256="cde91e7561d2352045d669a25bdf46a604d85ed1ea7f3f5028004455e4ce9d56" + ) version( "2022-05-16", sha256="22ff67045d730a2c7d5394c9034e44a2033cc82a461574f93d899e9aa713d4ae" ) @@ -123,23 +129,31 @@ class Seacas(CMakePackage): variant("x11", default=True, description="Compile with X11") # ###################### Dependencies ########################## + depends_on("cmake@3.17:", type="build") + depends_on("mpi", when="+mpi") + + # Always depends on netcdf-c depends_on("netcdf-c@4.8.0:+mpi+parallel-netcdf", when="+mpi") depends_on("netcdf-c@4.8.0:~mpi", when="~mpi") depends_on("hdf5+hl~mpi", when="~mpi") - depends_on("cgns@4.2.0:+mpi+scoping", when="+cgns +mpi") - depends_on("cgns@4.2.0:~mpi+scoping", when="+cgns ~mpi") - depends_on("fmt@8.1.0:", when="@2022-03-04:") + + depends_on("fmt@8.1.0:", when="@2022-03-04:2022-05-16") + depends_on("fmt@9.1.0:", when="@2022-10-14") + depends_on("matio", when="+matio") + depends_on("libx11", when="+x11") + + with when("+cgns"): + depends_on("cgns@4.2.0:+mpi+scoping", when="+mpi") + depends_on("cgns@4.2.0:~mpi+scoping", when="~mpi") with when("+adios2"): depends_on("adios2@master") depends_on("adios2~mpi", when="~mpi") depends_on("adios2+mpi", when="+mpi") - depends_on("matio", when="+matio") with when("+metis"): depends_on("metis+int64+real64") depends_on("parmetis+int64", when="+mpi") - depends_on("libx11", when="+x11") # The Faodel TPL is only supported in seacas@2021-04-05: depends_on("faodel@1.2108.1:+mpi", when="+faodel +mpi") @@ -150,11 +164,6 @@ class Seacas(CMakePackage): msg="The Faodel TPL is only compatible with @2021-04-05 and later.", ) - # MPI related dependencies - depends_on("mpi", when="+mpi") - - depends_on("cmake@3.1:", type="build") - def setup_run_environment(self, env): env.prepend_path("PYTHONPATH", self.prefix.lib) @@ -163,187 +172,185 @@ def cmake_args(self): from_variant = self.define_from_variant define = self.define + if self.spec.satisfies("@2022-10-14:"): + project_name_base = "Seacas" + else: + project_name_base = "SEACASProj" + options = [] # #################### Base Settings ####################### + options.extend( + [ + define(project_name_base + "_ENABLE_TESTS", True), + define(project_name_base + "_ENABLE_CXX11", True), + define(project_name_base + "_ENABLE_Kokkos", False), + define(project_name_base + "_HIDE_DEPRECATED_CODE", False), + from_variant("CMAKE_INSTALL_RPATH_USE_LINK_PATH", "shared"), + from_variant("BUILD_SHARED_LIBS", "shared"), + from_variant("SEACASExodus_ENABLE_THREADSAFE", "thread_safe"), + from_variant("SEACASIoss_ENABLE_THREADSAFE", "thread_safe"), + from_variant("TPL_ENABLE_X11", "x11"), + from_variant(project_name_base + "_ENABLE_Fortran", "fortran"), + ] + ) + + options.append(from_variant("TPL_ENABLE_MPI", "mpi")) if "+mpi" in spec: options.extend( [ - "-DCMAKE_C_COMPILER=%s" % spec["mpi"].mpicc, - "-DCMAKE_CXX_COMPILER=%s" % spec["mpi"].mpicxx, - "-DCMAKE_Fortran_COMPILER=%s" % spec["mpi"].mpifc, - "-DTPL_ENABLE_MPI:BOOL=ON", - "-DMPI_BASE_DIR:PATH=%s" % spec["mpi"].prefix, + define("CMAKE_C_COMPILER", spec["mpi"].mpicc), + define("CMAKE_CXX_COMPILER", spec["mpi"].mpicxx), + define("CMAKE_Fortran_COMPILER", spec["mpi"].mpifc), + define("MPI_BASE_DIR", spec["mpi"].prefix), ] ) - else: - "-DTPL_ENABLE_MPI:BOOL=OFF" - - options.extend( - [ - "-DSEACASProj_ENABLE_TESTS:BOOL=ON", - "-DSEACASProj_ENABLE_CXX11:BOOL=ON", - from_variant("CMAKE_INSTALL_RPATH_USE_LINK_PATH", "shared"), - from_variant("BUILD_SHARED_LIBS", "shared"), - "-DSEACASProj_ENABLE_Kokkos:BOOL=OFF", - "-DSEACASProj_HIDE_DEPRECATED_CODE:BOOL=OFF", - from_variant("SEACASExodus_ENABLE_THREADSAFE", "thread_safe"), - from_variant("SEACASIoss_ENABLE_THREADSAFE", "thread_safe"), - from_variant("SEACASProj_ENABLE_Fortran", "fortran"), - from_variant("TPL_ENABLE_X11", "x11"), - ] - ) # ########## What applications should be built ############# # Check whether they want everything; if so, do the easy way... if "+applications" in spec and "+legacy" in spec: options.extend( [ - "-DSEACASProj_ENABLE_ALL_PACKAGES:BOOL=ON", - "-DSEACASProj_ENABLE_ALL_OPTIONAL_PACKAGES:BOOL=ON", - "-DSEACASProj_ENABLE_SECONDARY_TESTED_CODE:BOOL=ON", + define(project_name_base + "_ENABLE_ALL_PACKAGES", True), + define(project_name_base + "_ENABLE_ALL_OPTIONAL_PACKAGES", True), + define(project_name_base + "_ENABLE_SECONDARY_TESTED_CODE", True), ] ) + else: # Don't want everything; handle the subsets: options.extend( [ - "-DSEACASProj_ENABLE_ALL_PACKAGES:BOOL=OFF", - "-DSEACASProj_ENABLE_ALL_OPTIONAL_PACKAGES:BOOL=OFF", - "-DSEACASProj_ENABLE_SECONDARY_TESTED_CODE:BOOL=OFF", - "-DSEACASProj_ENABLE_SEACASIoss:BOOL=ON", - "-DSEACASProj_ENABLE_SEACASExodus:BOOL=ON", - from_variant("SEACASProj_ENABLE_SEACASExodus_for", "fortran"), - from_variant("SEACASProj_ENABLE_SEACASExoIIv2for32", "fortran"), + define(project_name_base + "_ENABLE_ALL_PACKAGES", False), + define(project_name_base + "_ENABLE_ALL_OPTIONAL_PACKAGES", False), + define(project_name_base + "_ENABLE_SECONDARY_TESTED_CODE", False), + define(project_name_base + "_ENABLE_SEACASIoss", True), + define(project_name_base + "_ENABLE_SEACASExodus", True), + from_variant(project_name_base + "_ENABLE_SEACASExodus_for", "fortran"), + from_variant(project_name_base + "_ENABLE_SEACASExoIIv2for32", "fortran"), ] ) if "+applications" in spec: - options.extend( - [ - "-DSEACASProj_ENABLE_SEACASAprepro:BOOL=ON", - "-DSEACASProj_ENABLE_SEACASAprepro_lib:BOOL=ON", - "-DSEACASProj_ENABLE_SEACASConjoin:BOOL=ON", - "-DSEACASProj_ENABLE_SEACASCpup:BOOL=ON", - "-DSEACASProj_ENABLE_SEACASEjoin:BOOL=ON", - "-DSEACASProj_ENABLE_SEACASEpu:BOOL=ON", - "-DSEACASProj_ENABLE_SEACASExo2mat:BOOL=ON", - "-DSEACASProj_ENABLE_SEACASExo_format:BOOL=ON", - "-DSEACASProj_ENABLE_SEACASExodiff:BOOL=ON", - from_variant("SEACASProj_ENABLE_SEACASExplore", "fortran"), - from_variant("SEACASProj_ENABLE_SEACASGrepos", "fortran"), - "-DSEACASProj_ENABLE_SEACASMat2exo:BOOL=ON", - "-DSEACASProj_ENABLE_SEACASNas2exo:BOOL=ON", - "-DSEACASProj_ENABLE_SEACASNemslice:BOOL=ON", - "-DSEACASProj_ENABLE_SEACASNemspread:BOOL=ON", - "-DSEACASProj_ENABLE_SEACASSlice:BOOL=ON", - "-DSEACASProj_ENABLE_SEACASZellij:BOOL=ON", - ] - ) + # C / C++ applications + for app in ( + "Aprepro", + "Aprepro_lib", + "Conjoin", + "Cpup", + "Ejoin", + "Epu", + "Exo2mat", + "Exo_format", + "Exodiff", + "Mat2exo", + "Nas2exo", + "Nemslice", + "Nemspread", + "Slice", + "Zellij", + ): + options.append(define(project_name_base + "_ENABLE_SEACAS" + app, True)) + # Fortran-based applications + for app in ("Explore", "Grepos"): + options.append( + from_variant(project_name_base + "_ENABLE_SEACAS" + app, "fortran") + ) if "+legacy" in spec: - options.extend( - [ - define("SEACASProj_ENABLE_SEACASNemesis", True), - from_variant("SEACASProj_ENABLE_SEACASAlgebra", "fortran"), - from_variant("SEACASProj_ENABLE_SEACASBlot", "fortran"), - from_variant("SEACASProj_ENABLE_SEACASEx1ex2v2", "fortran"), - from_variant("SEACASProj_ENABLE_SEACASEx2ex1v2", "fortran"), - from_variant("SEACASProj_ENABLE_SEACASExomatlab", "fortran"), - from_variant("SEACASProj_ENABLE_SEACASExotec2", "fortran"), - from_variant("SEACASProj_ENABLE_SEACASExotxt", "fortran"), - from_variant("SEACASProj_ENABLE_SEACASFastq", "fortran"), - from_variant("SEACASProj_ENABLE_SEACASGen3D", "fortran"), - from_variant("SEACASProj_ENABLE_SEACASGenshell", "fortran"), - from_variant("SEACASProj_ENABLE_SEACASGjoin", "fortran"), - from_variant("SEACASProj_ENABLE_SEACASMapvar", "fortran"), - from_variant("SEACASProj_ENABLE_SEACASMapvar-kd", "fortran"), - from_variant("SEACASProj_ENABLE_SEACASNumbers", "fortran"), - from_variant("SEACASProj_ENABLE_SEACASTxtexo", "fortran"), - ] - ) + # Legacy applications -- all are fortran-based except Nemesis + options.append(define(project_name_base + "_ENABLE_SEACASNemesis", True)) + + for app in ( + "Algebra", + "Blot", + "Ex1ex2v2", + "Ex2ex1v2", + "Exomatlab", + "Exotec2", + "Exotxt", + "Fastq", + "Gen3D", + "Genshell", + "Gjoin", + "Mapvar", + "Mapvar-kd", + "Numbers", + "Txtexo", + ): + options.append( + from_variant(project_name_base + "_ENABLE_SEACAS" + app, "fortran") + ) # ##################### Dependencies ########################## # Always need NetCDF-C options.extend( [ - "-DTPL_ENABLE_Netcdf:BOOL=ON", - "-DNetCDF_ROOT:PATH=%s" % spec["netcdf-c"].prefix, + define("TPL_ENABLE_Netcdf", True), + define("NetCDF_ROOT", spec["netcdf-c"].prefix), ] ) if "+parmetis" in spec: options.extend( [ - "-DTPL_ENABLE_METIS:BOOL=ON", - "-DMETIS_LIBRARY_DIRS=%s" % spec["metis"].prefix.lib, - "-DMETIS_LIBRARY_NAMES=metis", - "-DTPL_METIS_INCLUDE_DIRS=%s" % spec["metis"].prefix.include, - "-DTPL_ENABLE_ParMETIS:BOOL=ON", - "-DParMETIS_LIBRARY_DIRS=%s;%s" - % (spec["parmetis"].prefix.lib, spec["metis"].prefix.lib), - "-DParMETIS_LIBRARY_NAMES=parmetis;metis", - "-DTPL_ParMETIS_INCLUDE_DIRS=%s;%s" - % (spec["parmetis"].prefix.include, spec["metis"].prefix.include), + define("TPL_ENABLE_METIS", True), + define("METIS_LIBRARY_DIRS", spec["metis"].prefix.lib), + define("METIS_LIBRARY_NAMES", "metis"), + define("TPL_METIS_INCLUDE_DIRS", spec["metis"].prefix.include), + define("TPL_ENABLE_ParMETIS", True), + define( + "ParMETIS_LIBRARY_DIRS", + [spec["parmetis"].prefix.lib, spec["metis"].prefix.lib], + ), + define("ParMETIS_LIBRARY_NAMES", ["parmetis", "metis"]), + define( + "TPL_ParMETIS_INCLUDE_DIRS", + [spec["parmetis"].prefix.include, spec["metis"].prefix.include], + ), ] ) elif "+metis" in spec: options.extend( [ - "-DTPL_ENABLE_METIS:BOOL=ON", - "-DMETIS_LIBRARY_DIRS=%s" % spec["metis"].prefix.lib, - "-DMETIS_LIBRARY_NAMES=metis", - "-DTPL_METIS_INCLUDE_DIRS=%s" % spec["metis"].prefix.include, - "-DTPL_ENABLE_ParMETIS:BOOL=OFF", + define("TPL_ENABLE_METIS", True), + define("METIS_LIBRARY_DIRS", spec["metis"].prefix.lib), + define("METIS_LIBRARY_NAMES", "metis"), + define("TPL_METIS_INCLUDE_DIRS", spec["metis"].prefix.include), + define("TPL_ENABLE_ParMETIS", False), ] ) else: options.extend( [ - "-DTPL_ENABLE_METIS:BOOL=OFF", - "-DTPL_ENABLE_ParMETIS:BOOL=OFF", + define("TPL_ENABLE_METIS", False), + define("TPL_ENABLE_ParMETIS", False), ] ) + options.append(from_variant("TPL_ENABLE_Matio", "matio")) if "+matio" in spec: - options.extend( - ["-DTPL_ENABLE_Matio:BOOL=ON", "-DMatio_ROOT:PATH=%s" % spec["matio"].prefix] - ) - else: - options.extend(["-DTPL_ENABLE_Matio:BOOL=OFF"]) + options.append(define("Matio_ROOT", spec["matio"].prefix)) + options.append(from_variant("TPL_ENABLE_CGNS", "cgns")) if "+cgns" in spec: - options.extend( - [ - "-DTPL_ENABLE_CGNS:BOOL=ON", - "-DCGNS_ROOT:PATH=%s" % spec["cgns"].prefix, - ] - ) - else: - options.extend(["-DTPL_ENABLE_CGNS:BOOL=OFF"]) + options.append(define("CGNS_ROOT", spec["cgns"].prefix)) options.append(from_variant("TPL_ENABLE_Faodel", "faodel")) - for pkg in ("Faodel", "BOOST"): if pkg.lower() in spec: options.append(define(pkg + "_ROOT", spec[pkg.lower()].prefix)) + options.append(from_variant("TPL_ENABLE_ADIOS2", "adios2")) if "+adios2" in spec: - options.extend( - [ - "-DTPL_ENABLE_ADIOS2:BOOL=ON", - "-DADIOS2_ROOT:PATH=%s" % spec["adios2"].prefix, - ] - ) - else: - options.extend(["-DTPL_ENABLE_ADIOS2:BOOL=OFF"]) + options.append(define("ADIOS2_ROOT", spec["adios2"].prefix)) # ################# RPath Handling ###################### if sys.platform == "darwin" and macos_version() >= Version("10.12"): # use @rpath on Sierra due to limit of dynamic loader - options.append("-DCMAKE_MACOSX_RPATH:BOOL=ON") + options.append(define("CMAKE_MACOSX_RPATH", True)) else: - options.append("-DCMAKE_INSTALL_NAME_DIR:PATH=%s" % self.prefix.lib) + options.append(define("CMAKE_INSTALL_NAME_DIR", self.prefix.lib)) return options diff --git a/var/spack/repos/builtin/packages/sentieon-genomics/package.py b/var/spack/repos/builtin/packages/sentieon-genomics/package.py index 053ce9fb3cb..06b630fea1d 100644 --- a/var/spack/repos/builtin/packages/sentieon-genomics/package.py +++ b/var/spack/repos/builtin/packages/sentieon-genomics/package.py @@ -26,6 +26,7 @@ class SentieonGenomics(Package): url = "https://s3.amazonaws.com/sentieon-release/software/sentieon-genomics-201808.01.tar.gz" maintainers = ["snehring"] + version("202112.06", sha256="18306036f01c3d41dd7ae738b18ae76fd6b666f1172dd4696cd55b4a8465270d") version("202112.05", sha256="c97b14b0484a0c0025115ad7b911453af7bdcd09874c26cbc39fd0bc5588a306") version("202112.04", sha256="154732dc752476d984908e78b1fc5120d4f23028ee165cc4a451ecc1df0e0246") version("202112.02", sha256="033943df7958550fd42b410d34ae91a8956a905fc90ca8baa93d2830f918872c") diff --git a/var/spack/repos/builtin/packages/sfcio/package.py b/var/spack/repos/builtin/packages/sfcio/package.py index 0a8dc94847a..8156e78e889 100644 --- a/var/spack/repos/builtin/packages/sfcio/package.py +++ b/var/spack/repos/builtin/packages/sfcio/package.py @@ -15,7 +15,12 @@ class Sfcio(CMakePackage): homepage = "https://noaa-emc.github.io/NCEPLIBS-sfcio" url = "https://github.com/NOAA-EMC/NCEPLIBS-sfcio/archive/refs/tags/v1.4.1.tar.gz" - maintainers = ["t-brown", "kgerheiser", "Hang-Lei-NOAA", "edwardhartnett"] + maintainers = [ + "t-brown", + "AlexanderRichert-NOAA", + "Hang-Lei-NOAA", + "edwardhartnett", + ] version("1.4.1", sha256="d9f900cf18ec1a839b4128c069b1336317ffc682086283443354896746b89c59") diff --git a/var/spack/repos/builtin/packages/shortbred/package.py b/var/spack/repos/builtin/packages/shortbred/package.py index 502bfd32eb7..a08fb3efc6d 100644 --- a/var/spack/repos/builtin/packages/shortbred/package.py +++ b/var/spack/repos/builtin/packages/shortbred/package.py @@ -13,7 +13,11 @@ class Shortbred(Package): homepage = "https://huttenhower.sph.harvard.edu/shortbred" url = "https://bitbucket.org/biobakery/shortbred/get/0.9.4.tar.gz" - version("0.9.4", sha256="a85e5609db79696d3f2d478408fc6abfeea7628de9f533c4e1e0ea3622b397ba") + version( + "0.9.4", + sha256="a85e5609db79696d3f2d478408fc6abfeea7628de9f533c4e1e0ea3622b397ba", + deprecated=True, + ) depends_on("blast-plus@2.2.28:") depends_on("cdhit@4.6:") diff --git a/var/spack/repos/builtin/packages/sigio/package.py b/var/spack/repos/builtin/packages/sigio/package.py index 18cdf46f8f1..4ddad9f9539 100644 --- a/var/spack/repos/builtin/packages/sigio/package.py +++ b/var/spack/repos/builtin/packages/sigio/package.py @@ -15,7 +15,12 @@ class Sigio(CMakePackage): homepage = "https://noaa-emc.github.io/NCEPLIBS-sigio" url = "https://github.com/NOAA-EMC/NCEPLIBS-sigio/archive/refs/tags/v2.3.2.tar.gz" - maintainers = ["t-brown", "kgerheiser", "Hang-Lei-NOAA", "edwardhartnett"] + maintainers = [ + "t-brown", + "AlexanderRichert-NOAA", + "Hang-Lei-NOAA", + "edwardhartnett", + ] version("2.3.2", sha256="333f3cf3a97f97103cbafcafc2ad89b24faa55b1332a98adc1637855e8a5b613") diff --git a/var/spack/repos/builtin/packages/simgrid/package.py b/var/spack/repos/builtin/packages/simgrid/package.py index 4af3ed91fd1..2e0b5f3e581 100644 --- a/var/spack/repos/builtin/packages/simgrid/package.py +++ b/var/spack/repos/builtin/packages/simgrid/package.py @@ -19,6 +19,7 @@ class Simgrid(CMakePackage): maintainers = ["viniciusvgp"] + version("3.32", sha256="837764eb81562f04e49dd20fbd8518d9eb1f94df00a4e4555e7ec7fa8aa341f0") version("3.31", sha256="4b44f77ad40c01cf4e3013957c9cbe39f33dec9304ff0c9c3d9056372ed4c61d") version("3.30", sha256="0cad48088c106e72efb42fb423e65d77fc9053cc03d6f3a5ff7ba4c712bb4eb8") version("3.29", sha256="83e8afd653555eeb70dc5c0737b88036c7906778ecd3c95806c6bf5535da2ccf") diff --git a/var/spack/repos/builtin/packages/singularity-eos/package.py b/var/spack/repos/builtin/packages/singularity-eos/package.py index 5475ed565d9..f9666b1abe4 100644 --- a/var/spack/repos/builtin/packages/singularity-eos/package.py +++ b/var/spack/repos/builtin/packages/singularity-eos/package.py @@ -19,6 +19,7 @@ class SingularityEos(CMakePackage, CudaPackage): maintainers = ["rbberger"] version("main", branch="main") + version("1.6.2", sha256="9c85fca679139a40cc9c72fcaeeca78a407cc1ca184734785236042de364b942") version("1.6.1", sha256="c6d92dfecf9689ffe2df615791c039f7e527e9f47799a862e26fa4e3420fe5d7") # build with kokkos, kokkos-kernels for offloading support diff --git a/var/spack/repos/builtin/packages/singularity/package.py b/var/spack/repos/builtin/packages/singularity/package.py index 2d6bf2faecb..26726fd13fb 100644 --- a/var/spack/repos/builtin/packages/singularity/package.py +++ b/var/spack/repos/builtin/packages/singularity/package.py @@ -27,6 +27,8 @@ class Singularity(SingularityBase): maintainers = ["alalazo"] version("master", branch="master") + version("3.8.7", sha256="3329f2e583f84a8343cb2c0380a1d6cbceafae7d1e633b5cbcadf7143eac859b") + version("3.8.6", sha256="bb5a3b7670ac9c7a4b3ce5b2c9f3d6b5be60e21b08d338c9dfdabb7b2a99f528") version("3.8.5", sha256="7fff78b5c07b5d4d08269bd267ac5e994390f933321e54efd6b7c86683153ce4") version("3.8.3", sha256="2e22eb9ee1b73fdd51b8783149f0e4d83c0d2d8a0c1edf6034157d50eeefb835") version("3.8.0", sha256="e9608b0e0a8c805218bbe795e9176484837b2f7fcb95e5469b853b3809a2412e") diff --git a/var/spack/repos/builtin/packages/singularityce/package.py b/var/spack/repos/builtin/packages/singularityce/package.py index 7133febaf64..0ee15ae5a99 100644 --- a/var/spack/repos/builtin/packages/singularityce/package.py +++ b/var/spack/repos/builtin/packages/singularityce/package.py @@ -191,5 +191,10 @@ class Singularityce(SingularityBase): maintainers = ["alalazo"] version("master", branch="master") + version("3.10.3", sha256="f87d8e212ce209c5212d6faf253b97a24b5d0b6e6b17b5e58b316cdda27a332f") + version("3.10.2", sha256="b4f279856ea4bf28a1f34f89320c02b545d6e57d4143679920e1ac4267f540e1") + version("3.10.1", sha256="e3af12edc0260bc3a3a481459a3a4457de9235025e6b37288da80e3cdc011a7a") + version("3.10.0", sha256="5e22e6cdad66c331668f6cff4544c83917bb3db90da3cf92403a394c5bf8cc8f") + version("3.9.9", sha256="1381433d64138c08e93ffacdfb4844e82c2288f1e39a9d2c631a1c4021381f2a") version("3.9.1", sha256="1ba3bb1719a420f48e9b0a6afdb5011f6c786d0f107ef272528c632fff9fd153") version("3.8.0", sha256="5fa2c0e7ef2b814d8aa170826b833f91e5031a85d85cd1292a234e6c55da1be1") diff --git a/var/spack/repos/builtin/packages/slate/package.py b/var/spack/repos/builtin/packages/slate/package.py index 7afb4f59fff..0a08a6af3c9 100644 --- a/var/spack/repos/builtin/packages/slate/package.py +++ b/var/spack/repos/builtin/packages/slate/package.py @@ -60,6 +60,9 @@ class Slate(CMakePackage, CudaPackage, ROCmPackage): depends_on("lapackpp ~cuda", when="~cuda") depends_on("lapackpp +cuda", when="+cuda") depends_on("lapackpp ~rocm", when="~rocm") + for val in CudaPackage.cuda_arch_values: + depends_on("blaspp +cuda cuda_arch=%s" % val, when="cuda_arch=%s" % val) + depends_on("lapackpp +cuda cuda_arch=%s" % val, when="cuda_arch=%s" % val) for val in ROCmPackage.amdgpu_targets: depends_on("blaspp +rocm amdgpu_target=%s" % val, when="amdgpu_target=%s" % val) depends_on("lapackpp +rocm amdgpu_target=%s" % val, when="amdgpu_target=%s" % val) @@ -68,7 +71,7 @@ class Slate(CMakePackage, CudaPackage, ROCmPackage): depends_on("lapackpp@2021.04.00:", when="@2021.05.01:") depends_on("lapackpp@2020.10.02", when="@2020.10.00") depends_on("lapackpp@master", when="@master") - depends_on("scalapack") + depends_on("scalapack", type="test") depends_on("hipify-clang", when="@:2021.05.02 +rocm ^hip@5:") cpp_17_msg = "Requires C++17 compiler support" @@ -92,14 +95,16 @@ def cmake_args(self): backend = "hip" backend_config = "-Dgpu_backend=%s" % backend - return [ + config = [ "-Dbuild_tests=%s" % self.run_tests, "-Duse_openmp=%s" % ("+openmp" in spec), "-DBUILD_SHARED_LIBS=%s" % ("+shared" in spec), backend_config, "-Duse_mpi=%s" % ("+mpi" in spec), - "-DSCALAPACK_LIBRARIES=%s" % spec["scalapack"].libs.joined(";"), ] + if self.run_tests: + config.append("-DSCALAPACK_LIBRARIES=%s" % spec["scalapack"].libs.joined(";")) + return config @run_after("install") def cache_test_sources(self): @@ -117,13 +122,10 @@ def test(self): test_dir = join_path(self.test_suite.current_test_cache_dir, "examples", "build") with working_dir(test_dir, create=True): cmake_bin = join_path(self.spec["cmake"].prefix.bin, "cmake") - prefixes = ";".join( - [ - self.spec["blaspp"].prefix, - self.spec["lapackpp"].prefix, - self.spec["mpi"].prefix, - ] - ) + deps = "blaspp lapackpp mpi" + if self.spec.satisfies("+rocm"): + deps += " rocblas hip llvm-amdgpu comgr hsa-rocr-dev rocsolver" + prefixes = ";".join([self.spec[x].prefix for x in deps.split()]) self.run_test(cmake_bin, ["-DCMAKE_PREFIX_PATH=" + prefixes, ".."]) make() test_args = ["-n", "4", "./ex05_blas"] diff --git a/var/spack/repos/builtin/packages/slepc/package.py b/var/spack/repos/builtin/packages/slepc/package.py index 0a19e0025f3..48590e36c67 100644 --- a/var/spack/repos/builtin/packages/slepc/package.py +++ b/var/spack/repos/builtin/packages/slepc/package.py @@ -22,6 +22,8 @@ class Slepc(Package, CudaPackage, ROCmPackage): test_requires_compiler = True version("main", branch="main") + version("3.18.1", sha256="f6e6e16d8399c3f94d187da9d4bfdfca160de50ebda7d63f6fa8ef417597e9b4") + version("3.18.0", sha256="18af535d979a646363df01f407c75f0e3b0dd97b3fdeb20dca25b30cd89239ee") version("3.17.2", sha256="f784cca83a14156631d6e0f5726ca0778e259e1fe40c927607d5fb12d958d705") version("3.17.1", sha256="11386cd3f4c0f9727af3c1c59141cc4bf5f83bdf7c50251de0845e406816f575") version("3.17.0", sha256="d4685fed01b2351c66706cbd6d08e4083a4645df398ef5ccd68fdfeb2f86ea97") @@ -69,6 +71,7 @@ class Slepc(Package, CudaPackage, ROCmPackage): # Cannot mix release and development versions of SLEPc and PETSc: depends_on("petsc@main", when="@main") + depends_on("petsc@3.18.0:3.18", when="@3.18.0:3.18") depends_on("petsc@3.17.0:3.17", when="@3.17.0:3.17") depends_on("petsc@3.16.0:3.16", when="@3.16.0:3.16") depends_on("petsc@3.15.0:3.15", when="@3.15.0:3.15") @@ -111,12 +114,20 @@ class Slepc(Package, CudaPackage, ROCmPackage): when="@3.13.0:+blopex", ) + def revert_kokkos_nvcc_wrapper(self): + # revert changes by kokkos-nvcc-wrapper + if self.spec.satisfies("^kokkos+cuda+wrapper"): + env["MPICH_CXX"] = env["CXX"] + env["OMPI_CXX"] = env["CXX"] + env["MPICXX_CXX"] = env["CXX"] + def install(self, spec, prefix): # set SLEPC_DIR for installation # Note that one should set the current (temporary) directory instead # its symlink in spack/stage/ ! os.environ["SLEPC_DIR"] = os.getcwd() + self.revert_kokkos_nvcc_wrapper() if self.spec.satisfies("%cce"): filter_file( " flags = l", @@ -156,7 +167,7 @@ def install(self, spec, prefix): python("configure", "--prefix=%s" % prefix, *options) - make("MAKE_NP=%s" % make_jobs, parallel=False) + make("V=1 MAKE_NP=%s" % make_jobs, parallel=False) if self.run_tests: make("test", parallel=False) diff --git a/var/spack/repos/builtin/packages/socat/package.py b/var/spack/repos/builtin/packages/socat/package.py new file mode 100644 index 00000000000..3a01e8e5dd4 --- /dev/null +++ b/var/spack/repos/builtin/packages/socat/package.py @@ -0,0 +1,31 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Socat(AutotoolsPackage): + """socat is a relay for bidirectional data transfer between two independent + data channels. Each of these data channels may be a file, pipe, device + (serial line etc. or a pseudo terminal), a socket (UNIX, IP4, IP6 - raw, + UDP, TCP), an SSL socket, proxy CONNECT connection, a file descriptor + (stdin etc.), the GNU line editor (readline), a program, or a combination + of two of these. These modes include generation of "listening" sockets, + named pipes, and pseudo terminals.""" + + homepage = "http://www.dest-unreach.org/socat/" + url = "http://www.dest-unreach.org/socat/download/socat-1.7.4.4.tar.bz2" + + maintainers = ["michaelkuhn"] + + version("1.7.4.4", sha256="fbd42bd2f0e54a3af6d01bdf15385384ab82dbc0e4f1a5e153b3e0be1b6380ac") + + depends_on("openssl") + depends_on("readline") + depends_on("ncurses") + + def configure_args(self): + args = ["--disable-libwrap"] + return args diff --git a/var/spack/repos/builtin/packages/sox/package.py b/var/spack/repos/builtin/packages/sox/package.py index 3cbb9ab3d12..8e4315a8857 100644 --- a/var/spack/repos/builtin/packages/sox/package.py +++ b/var/spack/repos/builtin/packages/sox/package.py @@ -18,7 +18,6 @@ class Sox(AutotoolsPackage): depends_on("bzip2") depends_on("flac") - depends_on("id3lib") depends_on("libvorbis") depends_on("opus") depends_on("lame", when="+mp3") diff --git a/var/spack/repos/builtin/packages/sp/package.py b/var/spack/repos/builtin/packages/sp/package.py index bfbce956c4a..a999eda820e 100644 --- a/var/spack/repos/builtin/packages/sp/package.py +++ b/var/spack/repos/builtin/packages/sp/package.py @@ -14,7 +14,12 @@ class Sp(CMakePackage): homepage = "https://noaa-emc.github.io/NCEPLIBS-sp" url = "https://github.com/NOAA-EMC/NCEPLIBS-sp/archive/refs/tags/v2.3.3.tar.gz" - maintainers = ["t-brown", "kgerheiser", "edwardhartnett", "Hang-Lei-NOAA"] + maintainers = [ + "t-brown", + "AlexanderRichert-NOAA", + "edwardhartnett", + "Hang-Lei-NOAA", + ] version("2.3.3", sha256="c0d465209e599de3c0193e65671e290e9f422f659f1da928505489a3edeab99f") diff --git a/var/spack/repos/builtin/packages/spades/package.py b/var/spack/repos/builtin/packages/spades/package.py index 7a734ed8582..621e3c574e7 100644 --- a/var/spack/repos/builtin/packages/spades/package.py +++ b/var/spack/repos/builtin/packages/spades/package.py @@ -13,6 +13,7 @@ class Spades(CMakePackage): homepage = "https://cab.spbu.ru/software/spades/" url = "https://github.com/ablab/spades/releases/download/v3.15.3/SPAdes-3.15.3.tar.gz" + version("3.15.5", sha256="155c3640d571f2e7b19a05031d1fd0d19bd82df785d38870fb93bd241b12bbfa") version("3.15.3", sha256="b2e5a9fd7a65aee5ab886222d6af4f7b7bc7f755da7a03941571fabd6b9e1499") version("3.15.0", sha256="6719489fa4bed6dd96d78bdd4001a30806d5469170289085836711d1ffb8b28b") version("3.14.1", sha256="d629b78f7e74c82534ac20f5b3c2eb367f245e6840a67b9ef6a76f6fac5323ca") diff --git a/var/spack/repos/builtin/packages/spdlog/package.py b/var/spack/repos/builtin/packages/spdlog/package.py index 6c8b24cd0f2..e449d853d24 100644 --- a/var/spack/repos/builtin/packages/spdlog/package.py +++ b/var/spack/repos/builtin/packages/spdlog/package.py @@ -12,6 +12,7 @@ class Spdlog(CMakePackage): homepage = "https://github.com/gabime/spdlog" url = "https://github.com/gabime/spdlog/archive/v0.9.0.tar.gz" + version("1.10.0", sha256="697f91700237dbae2326b90469be32b876b2b44888302afbc7aceb68bcfe8224") version("1.9.2", sha256="6fff9215f5cb81760be4cc16d033526d1080427d236e86d70bb02994f85e3d38") version("1.9.1", sha256="9a452cfa24408baccc9b2bc2d421d68172a7630c99e9504a14754be840d31a62") version("1.9.0", sha256="9ad181d75aaedbf47c8881e7b947a47cac3d306997e39de24dba60db633e70a7") @@ -44,13 +45,18 @@ class Spdlog(CMakePackage): version("0.9.0", sha256="bbbe5a855c8b309621352921d650449eb2f741d35d55ec50fb4d8122ddfb8f01") variant("shared", default=True, description="Build shared libraries (v1.4.0+)") + variant( + "fmt_external", + default=False, + description="Build using external fmt libraries instead of bundled one", + ) depends_on("cmake@3.2:", when="@:1.7.0", type="build") depends_on("cmake@3.10:", when="@1.8.0:", type="build") - depends_on("fmt@5.3:") - depends_on("fmt@7:", when="@1.7:") - depends_on("fmt@8:", when="@1.9:") + depends_on("fmt@5.3:", when="+fmt_external") + depends_on("fmt@7:", when="@1.7: +fmt_external") + depends_on("fmt@8:", when="@1.9: +fmt_external") def cmake_args(self): args = [] @@ -59,7 +65,7 @@ def cmake_args(self): args.extend( [ self.define_from_variant("SPDLOG_BUILD_SHARED", "shared"), - self.define("SPDLOG_FMT_EXTERNAL", "ON"), + self.define_from_variant("SPDLOG_FMT_EXTERNAL", "fmt_external"), # tests and examples self.define("SPDLOG_BUILD_TESTS", self.run_tests), self.define("SPDLOG_BUILD_EXAMPLE", self.run_tests), diff --git a/var/spack/repos/builtin/packages/sperr/package.py b/var/spack/repos/builtin/packages/sperr/package.py new file mode 100644 index 00000000000..5859dd22ccc --- /dev/null +++ b/var/spack/repos/builtin/packages/sperr/package.py @@ -0,0 +1,43 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Sperr(CMakePackage): + """SPERR is a lossy scientific (floating-point) data compressor that can + perform either error-bounded or size-bounded data compression""" + + homepage = "https://github.com/NCAR/SPERR" + git = homepage + + version("2022.07.18", commit="640305d049db9e9651ebdd773e6936e2c028ff3a") + version("2022.05.26", commit="7894a5fe1b5ca5a4aaa952d1779dfc31fd741243") + + depends_on("git", type="build") + depends_on("zstd", type=("build", "link"), when="+zstd") + depends_on("pkgconfig", type=("build"), when="+zstd") + + variant("shared", description="build shared libaries", default=True) + variant("zstd", description="use Zstd for more compression", default=True) + variant("openmp", description="use openmp for acceleration", default=True) + + maintainers = ["shaomeng", "robertu94"] + + def cmake_args(self): + # ensure the compiler supports OpenMP if it is used + if "+openmp" in self.spec: + self.compiler.openmp_flag + + args = [ + self.define_from_variant("BUILD_SHARED_LIBS", "shared"), + self.define_from_variant("USE_ZSTD", "zstd"), + self.define_from_variant("USE_OMP", "openmp"), + "-DSPERR_PREFER_RPATH=OFF", + "-DUSE_BUNDLED_ZSTD=OFF", + "-DBUILD_CLI_UTILITIES=OFF", + "-DBUILD_UNIT_TESTS=OFF", + ] + return args diff --git a/var/spack/repos/builtin/packages/sqlite/package.py b/var/spack/repos/builtin/packages/sqlite/package.py index 5b562955827..7e22fc3aae9 100644 --- a/var/spack/repos/builtin/packages/sqlite/package.py +++ b/var/spack/repos/builtin/packages/sqlite/package.py @@ -17,6 +17,7 @@ class Sqlite(AutotoolsPackage): homepage = "https://www.sqlite.org" + version("3.39.4", sha256="f31d445b48e67e284cf206717cc170ab63cbe4fd7f79a82793b772285e78fdbb") version("3.39.2", sha256="852be8a6183a17ba47cee0bbff7400b7aa5affd283bf3beefc34fcd088a239de") version("3.38.5", sha256="5af07de982ba658fd91a03170c945f99c971f6955bc79df3266544373e39869c") version("3.38.3", sha256="61f2dd93a2e38c33468b7125967c3218bf9f4dd8365def6025e314f905dc942e") diff --git a/var/spack/repos/builtin/packages/starpu/package.py b/var/spack/repos/builtin/packages/starpu/package.py index 947a934db65..b0158964abd 100644 --- a/var/spack/repos/builtin/packages/starpu/package.py +++ b/var/spack/repos/builtin/packages/starpu/package.py @@ -69,6 +69,8 @@ class Starpu(AutotoolsPackage): variant("simgrid", default=False, description="Enable SimGrid support") variant("simgridmc", default=False, description="Enable SimGrid model checker support") variant("examples", default=True, description="Enable Examples") + variant("papi", default=False, description="Enable PAPI support", when="@master:") + variant("blocking", default=False, description="Enable blocking drivers support") depends_on("pkgconfig", type="build") depends_on("autoconf", type="build") @@ -80,14 +82,18 @@ class Starpu(AutotoolsPackage): depends_on("mpi", when="+mpi~simgrid") depends_on("cuda", when="+cuda~simgrid") depends_on("fxt", when="+fxt") + depends_on("fxt+static", when="+fxt+simgrid+mpi") depends_on("simgrid", when="+simgrid") depends_on("simgrid+smpi", when="+simgrid+mpi") depends_on("simgrid+mc", when="+simgridmc") + depends_on("papi", when="+papi") conflicts( "+shared", when="+mpi+simgrid", msg="Simgrid MPI cannot be built with a shared library" ) + conflicts("+papi", when="+simgrid") + def autoreconf(self, spec, prefix): if not os.path.isfile("./configure"): autogen = Executable("./autogen.sh") @@ -124,6 +130,8 @@ def configure_args(self): "--%s-build-examples" % ("enable" if "+examples" in spec else "disable"), "--%s-fortran" % ("enable" if "+fortran" in spec else "disable"), "--%s-openmp" % ("enable" if "+openmp" in spec else "disable"), + "--%s-blocking-drivers" % ("enable" if "+blocking" in spec else "disable"), + "--%s-papi" % ("enable" if "+papi" in spec else "disable"), "--%s-opencl" % ("disable" if "~opencl" in spec or "+simgrid" in spec else "enable"), "--%s-cuda" % ("disable" if "~cuda" in spec or "+simgrid" in spec else "enable"), diff --git a/var/spack/repos/builtin/packages/strumpack/package.py b/var/spack/repos/builtin/packages/strumpack/package.py index 258e0583030..e19aadcff38 100644 --- a/var/spack/repos/builtin/packages/strumpack/package.py +++ b/var/spack/repos/builtin/packages/strumpack/package.py @@ -29,6 +29,8 @@ class Strumpack(CMakePackage, CudaPackage, ROCmPackage): test_requires_compiler = True version("master", branch="master") + version("7.0.1", sha256="ddbf9c0509eaf0f8a4c70f59508787336a05eeacc8322f156117d8ce59a70a60") + version("7.0.0", sha256="18f7a0d75cc5cfdb7bbb6112a2bdda7a50fbcaefa2d8bab001f902bdf62e69e3") version("6.3.1", sha256="3f1de435aeb850c06d841655c3bc426565eb0cc0a7314b76586c2c709b03fb61") version("6.3.0", sha256="47dec831684894b7ed77c66b8a23e172b388c83580cfaf91f921564fa0b46d41") version("6.2.1", sha256="52d63ab8f565266a9b1b5f3596afd00fc3b70296179b53a1e5b99405defeca22") @@ -72,6 +74,7 @@ class Strumpack(CMakePackage, CudaPackage, ROCmPackage): depends_on("cuda", when="@4.0.0: +cuda") depends_on("zfp", when="+zfp") depends_on("hipblas", when="+rocm") + depends_on("hipsparse", type="link", when="@7.0.1: +rocm") depends_on("rocsolver", when="+rocm") depends_on("slate", when="+slate") depends_on("slate+cuda", when="+cuda+slate") @@ -144,8 +147,12 @@ def cmake_args(self): if "+rocm" in spec: args.append("-DHIP_ROOT_DIR={0}".format(spec["hip"].prefix)) rocm_archs = spec.variants["amdgpu_target"].value + hipcc_flags = [] + if spec.satisfies("@7.0.1: +rocm"): + hipcc_flags.append("-std=c++14") if "none" not in rocm_archs: - args.append("-DHIP_HIPCC_FLAGS=--amdgpu-target={0}".format(",".join(rocm_archs))) + hipcc_flags.append("--amdgpu-target={0}".format(",".join(rocm_archs))) + args.append("-DHIP_HIPCC_FLAGS={0}".format(" ".join(hipcc_flags))) return args diff --git a/var/spack/repos/builtin/packages/suite-sparse/package.py b/var/spack/repos/builtin/packages/suite-sparse/package.py index d2b85a741c9..3c1e3901e80 100644 --- a/var/spack/repos/builtin/packages/suite-sparse/package.py +++ b/var/spack/repos/builtin/packages/suite-sparse/package.py @@ -15,6 +15,9 @@ class SuiteSparse(Package): url = "https://github.com/DrTimothyAldenDavis/SuiteSparse/archive/v4.5.3.tar.gz" git = "https://github.com/DrTimothyAldenDavis/SuiteSparse.git" + version("5.13.0", sha256="59c6ca2959623f0c69226cf9afb9a018d12a37fab3a8869db5f6d7f83b6b147d") + version("5.12.0", sha256="5fb0064a3398111976f30c5908a8c0b40df44c6dd8f0cc4bfa7b9e45d8c647de") + version("5.11.0", sha256="fdd957ed06019465f7de73ce931afaf5d40e96e14ae57d91f60868b8c123c4c8") version("5.10.1", sha256="acb4d1045f48a237e70294b950153e48dce5b5f9ca8190e86c2b8c54ce00a7ee") version("5.10.0", sha256="4bcc974901c0173acf80c41ee0fd779eb7dce2871d4afa24a5d15b1a468f93e5") version("5.9.0", sha256="7bdd4811f1cf0767c5fdb5e435817fdadee50b0acdb598f4882ae7b8291a7f24") @@ -34,7 +37,6 @@ class SuiteSparse(Package): version("4.5.5", sha256="80d1d9960a6ec70031fecfe9adfe5b1ccd8001a7420efb50d6fa7326ef14af91") version("4.5.3", sha256="b6965f9198446a502cde48fb0e02236e75fa5700b94c7306fc36599d57b563f4") - variant("tbb", default=False, description="Build with Intel TBB") variant( "pic", default=True, @@ -48,21 +50,33 @@ class SuiteSparse(Package): description="Build with GraphBLAS (takes a long time to compile)", ) - depends_on("mpfr@4.0.0:", type=("build", "link"), when="@5.8.0:") - depends_on("gmp", type=("build", "link"), when="@5.8.0:") + # In @4.5.1. TBB support in SPQR seems to be broken as TBB-related linking + # flags does not seem to be used, which leads to linking errors on Linux. + # Support for TBB has been removed in version 5.11 + variant("tbb", default=False, description="Build with Intel TBB", when="@4.5.3:5.10") + depends_on("blas") depends_on("lapack") - depends_on("m4", type="build", when="@5.0.0:") - depends_on("cmake", when="+graphblas @5.2.0:", type="build") - - depends_on("metis@5.1.0", when="@4.5.1:") - # in @4.5.1. TBB support in SPQR seems to be broken as TBB-related linkng - # flags does not seem to be used, which leads to linking errors on Linux. - depends_on("tbb", when="@4.5.3:+tbb") - depends_on("cuda", when="+cuda") - patch("tbb_453.patch", when="@4.5.3:4.5.5+tbb") + depends_on("mpfr@4.0.0:", when="@5.8.0:") + depends_on("gmp", when="@5.8.0:") + depends_on("m4", type="build", when="@5.0.0:") + depends_on("cmake", when="+graphblas @5.2.0:", type="build") + depends_on("metis@5.1.0", when="@4.5.1:") + + with when("+tbb"): + depends_on("tbb") + patch("tbb_453.patch", when="@4.5.3:4.5.5") + # The @2021.x versions of tbb dropped the task_scheduler_init.h header and + # related stuff (which have long been deprecated). This appears to be + # rather problematic for suite-sparse (see e.g. + # https://github.com/DrTimothyAldenDavis/SuiteSparse/blob/master/SPQR/Source/spqr_parallel.cpp) + depends_on("intel-tbb@:2020 build_system=makefile", when="^intel-tbb") + conflicts( + "^intel-oneapi-tbb@2021:", + msg="suite-sparse needs task_scheduler_init.h dropped in recent tbb libs", + ) # This patch removes unsupported flags for pgi compiler patch("pgi.patch", when="%pgi") @@ -81,22 +95,6 @@ class SuiteSparse(Package): "%gcc@:4.8", when="@5.2.0:", msg="gcc version must be at least 4.9 for suite-sparse@5.2.0:" ) - # The @2021.x versions of tbb dropped the task_scheduler_init.h header and - # related stuff (which have long been deprecated). This appears to be - # rather problematic for suite-sparse (see e.g. - # https://github.com/DrTimothyAldenDavis/SuiteSparse/blob/master/SPQR/Source/spqr_parallel.cpp) - # Have Spack complain if +tbb and trying to use a 2021.x version of tbb - conflicts( - "+tbb", - when="^intel-oneapi-tbb@2021:", - msg="suite-sparse needs task_scheduler_init.h dropped in " "recent tbb libs", - ) - conflicts( - "+tbb", - when="^intel-tbb@2021:", - msg="suite-sparse needs task_scheduler_init.h dropped in " "recent tbb libs", - ) - def symbol_suffix_blas(self, spec, args): """When using BLAS with a special symbol suffix we use defines to replace blas symbols, e.g. dgemm_ becomes dgemm_64_ when diff --git a/var/spack/repos/builtin/packages/sundials/package.py b/var/spack/repos/builtin/packages/sundials/package.py index fe1bb0bef5d..d345bb793b4 100644 --- a/var/spack/repos/builtin/packages/sundials/package.py +++ b/var/spack/repos/builtin/packages/sundials/package.py @@ -27,6 +27,8 @@ class Sundials(CMakePackage, CudaPackage, ROCmPackage): # Versions # ========================================================================== version("develop", branch="develop") + version("6.4.1", sha256="7bf10a8d2920591af3fba2db92548e91ad60eb7241ab23350a9b1bc51e05e8d0") + version("6.4.0", sha256="0aff803a12c6d298d05b56839197dd09858631864017e255ed89e28b49b652f1") version("6.3.0", sha256="89a22bea820ff250aa7239f634ab07fa34efe1d2dcfde29cc8d3af11455ba2a7") version("6.2.0", sha256="195d5593772fc483f63f08794d79e4bab30c2ec58e6ce4b0fb6bcc0e0c48f31d") version("6.1.1", sha256="cfaf637b792c330396a25ef787eb59d58726c35918ebbc08e33466e45d50470c") @@ -122,9 +124,17 @@ class Sundials(CMakePackage, CudaPackage, ROCmPackage): when="@6.0.0: +profiling", description="Enable Caliper instrumentation/profiling", ) + variant("ginkgo", default=False, when="@6.4.0:", description="Enable Ginkgo interfaces") variant("hypre", default=False, when="@2.7.0:", description="Enable Hypre MPI parallel vector") - variant("lapack", default=False, description="Enable LAPACK direct solvers") + variant("kokkos", default=False, when="@6.4.0:", description="Enable Kokkos vector") + variant( + "kokkos-kernels", + default=False, + when="@6.4.0:", + description="Enable KokkosKernels based matrix and linear solver", + ) variant("klu", default=False, description="Enable KLU sparse, direct solver") + variant("lapack", default=False, description="Enable LAPACK direct solvers") variant("petsc", default=False, when="@2.7.0:", description="Enable PETSc interfaces") variant("magma", default=False, when="@5.7.0:", description="Enable MAGMA interface") variant("superlu-mt", default=False, description="Enable SuperLU_MT sparse, direct solver") @@ -205,6 +215,7 @@ class Sundials(CMakePackage, CudaPackage, ROCmPackage): # Build dependencies depends_on("cmake@3.12:", type="build") + depends_on("cmake@3.18:", when="+cuda", type="build") # MPI related dependencies depends_on("mpi", when="+mpi") @@ -219,6 +230,23 @@ class Sundials(CMakePackage, CudaPackage, ROCmPackage): # External libraries depends_on("caliper", when="+caliper") + depends_on("ginkgo@1.5.0:", when="+ginkgo") + depends_on("kokkos", when="+kokkos") + depends_on("kokkos-kernels", when="+kokkos-kernels") + for cuda_arch in CudaPackage.cuda_arch_values: + depends_on( + "kokkos+cuda+cuda_lambda+cuda_constexpr cuda_arch=%s" % cuda_arch, + when="+kokkos +cuda cuda_arch=%s" % cuda_arch, + ) + depends_on( + "kokkos-kernels+cuda cuda_arch=%s" % cuda_arch, + when="+kokkos-kernels +cuda cuda_arch=%s" % cuda_arch, + ) + for rocm_arch in ROCmPackage.amdgpu_targets: + depends_on( + "kokkos+rocm amdgpu_target=%s" % rocm_arch, + when="+kokkos +rocm amdgpu_target=%s" % rocm_arch, + ) depends_on("lapack", when="+lapack") depends_on("hypre+mpi~int64", when="@5.7.1: +hypre ~int64") depends_on("hypre+mpi+int64", when="@5.7.1: +hypre +int64") @@ -228,7 +256,8 @@ class Sundials(CMakePackage, CudaPackage, ROCmPackage): depends_on("petsc+mpi", when="+petsc") depends_on("suite-sparse", when="+klu") depends_on("superlu-dist@6.1.1:", when="@:5.4.0 +superlu-dist") - depends_on("superlu-dist@6.3.0:", when="@5.5.0: +superlu-dist") + depends_on("superlu-dist@6.3.0:", when="@5.5.0:6.3 +superlu-dist") + depends_on("superlu-dist@7:", when="@6.4: +superlu-dist") depends_on("trilinos+tpetra", when="+trilinos") # Require that external libraries built with the same precision @@ -337,6 +366,8 @@ def cmake_args(self): from_variant("SUPERLUMT_ENABLE", "superlu-mt"), from_variant("SUPERLUDIST_ENABLE", "superlu-dist"), from_variant("Trilinos_ENABLE", "trilinos"), + from_variant("ENABLE_KOKKOS", "kokkos"), + from_variant("ENABLE_KOKKOS_KERNELS", "kokkos-kernels"), from_variant("EXAMPLES_INSTALL", "examples-install"), ] ) @@ -367,6 +398,25 @@ def cmake_args(self): ] ) + # Building with Ginkgo + if "+ginkgo" in spec: + gko_backends = ["REF"] + if "+openmp" in spec["ginkgo"] and "+openmp" in spec: + gko_backends.append("OMP") + if "+cuda" in spec["ginkgo"] and "+cuda" in spec: + gko_backends.append("CUDA") + if "+rocm" in spec["ginkgo"] and "+rocm" in spec: + gko_backends.append("HIP") + if "+oneapi" in spec["ginkgo"] and "+sycl" in spec: + gko_backends.append("DPCPP") + args.extend( + [ + from_variant("ENABLE_GINKGO", "ginkgo"), + define("Ginkgo_DIR", spec["ginkgo"].prefix), + define("SUNDIALS_GINKGO_BACKENDS", ";".join(gko_backends)), + ] + ) + # Building with Hypre if "+hypre" in spec: args.extend( @@ -379,6 +429,12 @@ def cmake_args(self): hypre_libs = spec["blas"].libs + spec["lapack"].libs args.extend([define("HYPRE_LIBRARIES", hypre_libs.joined(";"))]) + # Building with Kokkos and KokkosKernels + if "+kokkos" in spec: + args.extend([define("Kokkos_DIR", spec["kokkos"].prefix)]) + if "+kokkos-kernels" in spec: + args.extend([define("KokkosKernels_DIR", spec["kokkos-kernels"].prefix)]) + # Building with KLU if "+klu" in spec: args.extend( @@ -404,6 +460,9 @@ def cmake_args(self): if "+petsc" in spec: if spec.version >= Version("5"): args.append(define("PETSC_DIR", spec["petsc"].prefix)) + if "+kokkos" in spec["petsc"]: + args.append(define("Kokkos_DIR", spec["kokkos"].prefix)) + args.append(define("KokkosKernels_DIR", spec["kokkos-kernels"].prefix)) else: args.extend( [ @@ -438,15 +497,22 @@ def cmake_args(self): # Building with SuperLU_DIST if "+superlu-dist" in spec: - args.extend( - [ - define("OPENMP_ENABLE", "^superlu-dist+openmp" in spec), - define("SUPERLUDIST_INCLUDE_DIR", spec["superlu-dist"].prefix.include), - define("SUPERLUDIST_LIBRARY_DIR", spec["superlu-dist"].prefix.lib), - define("SUPERLUDIST_LIBRARIES", spec["blas"].libs), - define("SUPERLUDIST_OpenMP", "^superlu-dist+openmp" in spec), - ] - ) + if spec.satisfies("@6.4.0:"): + args.extend( + [ + define("SUPERLUDIST_DIR", spec["superlu-dist"].prefix), + define("SUPERLUDIST_OpenMP", "^superlu-dist+openmp" in spec), + ] + ) + else: + args.extend( + [ + define("SUPERLUDIST_INCLUDE_DIR", spec["superlu-dist"].prefix.include), + define("SUPERLUDIST_LIBRARY_DIR", spec["superlu-dist"].prefix.lib), + define("SUPERLUDIST_LIBRARIES", spec["blas"].libs), + define("SUPERLUDIST_OpenMP", "^superlu-dist+openmp" in spec), + ] + ) # Building with Trilinos if "+trilinos" in spec: diff --git a/var/spack/repos/builtin/packages/superlu-dist/package.py b/var/spack/repos/builtin/packages/superlu-dist/package.py index 21a5da15421..d429da8ce92 100644 --- a/var/spack/repos/builtin/packages/superlu-dist/package.py +++ b/var/spack/repos/builtin/packages/superlu-dist/package.py @@ -122,12 +122,15 @@ def append_from_variant(*args): if cuda_arch[0] != "none": append_define("CMAKE_CUDA_ARCHITECTURES", cuda_arch[0]) - if "+rocm" in spec and spec.satisfies("@amd"): + if "+rocm" in spec and (spec.satisfies("@amd") or spec.satisfies("@8:")): append_define("TPL_ENABLE_HIPLIB", True) append_define("HIP_ROOT_DIR", spec["hip"].prefix) rocm_archs = spec.variants["amdgpu_target"].value + mpiinc = spec["mpi"].prefix.include if "none" not in rocm_archs: - append_define("HIP_HIPCC_FLAGS", "--amdgpu-target=" + ",".join(rocm_archs)) + append_define( + "HIP_HIPCC_FLAGS", "--amdgpu-target=" + ",".join(rocm_archs) + " -I/" + mpiinc + ) append_from_variant("BUILD_SHARED_LIBS", "shared") return cmake_args diff --git a/var/spack/repos/builtin/packages/superlu/package.py b/var/spack/repos/builtin/packages/superlu/package.py index 8f20f90ea45..e8bd7c38f6e 100644 --- a/var/spack/repos/builtin/packages/superlu/package.py +++ b/var/spack/repos/builtin/packages/superlu/package.py @@ -2,15 +2,16 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - import os from llnl.util import tty +import spack.build_systems.cmake +import spack.build_systems.generic from spack.package import * -class Superlu(CMakePackage): +class Superlu(CMakePackage, Package): """SuperLU is a general purpose library for the direct solution of large, sparse, nonsymmetric systems of linear equations on high performance machines. SuperLU is designed for sequential machines.""" @@ -36,9 +37,12 @@ class Superlu(CMakePackage): url="https://crd-legacy.lbl.gov/~xiaoye/SuperLU/superlu_4.2.tar.gz", ) + build_system( + conditional("cmake", when="@5:"), conditional("autotools", when="@:4"), default="cmake" + ) + variant("pic", default=True, description="Build with position independent code") - depends_on("cmake", when="@5:", type="build") depends_on("blas") conflicts( "@:5.2.1", @@ -48,76 +52,7 @@ class Superlu(CMakePackage): test_requires_compiler = True - # CMake installation method - def cmake_args(self): - if self.version > Version("5.2.1"): - _blaslib_key = "enable_internal_blaslib" - else: - _blaslib_key = "enable_blaslib" - args = [ - self.define(_blaslib_key, False), - self.define("CMAKE_INSTALL_LIBDIR", self.prefix.lib), - self.define_from_variant("CMAKE_POSITION_INDEPENDENT_CODE", "pic"), - self.define("enable_tests", self.run_tests), - ] - - return args - # Pre-cmake installation method - @when("@:4") - def cmake(self, spec, prefix): - """Use autotools before version 5""" - config = [] - - # Define make.inc file - config.extend( - [ - "PLAT = _x86_64", - "SuperLUroot = %s" % self.stage.source_path, - # 'SUPERLULIB = $(SuperLUroot)/lib/libsuperlu$(PLAT).a', - "SUPERLULIB = $(SuperLUroot)/lib/libsuperlu_{0}.a".format(self.spec.version), - "BLASDEF = -DUSE_VENDOR_BLAS", - "BLASLIB = {0}".format(spec["blas"].libs.ld_flags), - # or BLASLIB = -L/usr/lib64 -lblas - "TMGLIB = libtmglib.a", - "LIBS = $(SUPERLULIB) $(BLASLIB)", - "ARCH = ar", - "ARCHFLAGS = cr", - "RANLIB = {0}".format("ranlib" if which("ranlib") else "echo"), - "CC = {0}".format(env["CC"]), - "FORTRAN = {0}".format(env["FC"]), - "LOADER = {0}".format(env["CC"]), - "CDEFS = -DAdd_", - ] - ) - - if "+pic" in spec: - config.extend( - [ - # Use these lines instead when pic_flag capability arrives - "CFLAGS = -O3 {0}".format(self.compiler.cc_pic_flag), - "NOOPTS = {0}".format(self.compiler.cc_pic_flag), - "FFLAGS = -O2 {0}".format(self.compiler.f77_pic_flag), - "LOADOPTS = {0}".format(self.compiler.cc_pic_flag), - ] - ) - else: - config.extend( - ["CFLAGS = -O3", "NOOPTS = ", "FFLAGS = -O2", "LOADOPTS = "] - ) - - # Write configuration options to make.inc file - with open("make.inc", "w") as inc: - for option in config: - inc.write("{0}\n".format(option)) - - make(parallel=False) - - # Install manually - install_tree("lib", prefix.lib) - mkdir(prefix.include) - install(join_path("SRC", "*.h"), prefix.include) - examples_src_dir = "EXAMPLE" make_hdr_file = "make.inc" @@ -221,3 +156,67 @@ def test(self): return self.run_superlu_test(test_dir, exe, args) + + +class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder): + def cmake_args(self): + if self.pkg.version > Version("5.2.1"): + _blaslib_key = "enable_internal_blaslib" + else: + _blaslib_key = "enable_blaslib" + args = [ + self.define(_blaslib_key, False), + self.define("CMAKE_INSTALL_LIBDIR", self.prefix.lib), + self.define_from_variant("CMAKE_POSITION_INDEPENDENT_CODE", "pic"), + self.define("enable_tests", self.pkg.run_tests), + ] + return args + + +class GenericBuilder(spack.build_systems.generic.GenericBuilder): + def install(self, pkg, spec, prefix): + """Use autotools before version 5""" + # Define make.inc file + config = [ + "PLAT = _x86_64", + "SuperLUroot = %s" % self.pkg.stage.source_path, + # 'SUPERLULIB = $(SuperLUroot)/lib/libsuperlu$(PLAT).a', + "SUPERLULIB = $(SuperLUroot)/lib/libsuperlu_{0}.a".format(self.pkg.spec.version), + "BLASDEF = -DUSE_VENDOR_BLAS", + "BLASLIB = {0}".format(spec["blas"].libs.ld_flags), + # or BLASLIB = -L/usr/lib64 -lblas + "TMGLIB = libtmglib.a", + "LIBS = $(SUPERLULIB) $(BLASLIB)", + "ARCH = ar", + "ARCHFLAGS = cr", + "RANLIB = {0}".format("ranlib" if which("ranlib") else "echo"), + "CC = {0}".format(env["CC"]), + "FORTRAN = {0}".format(env["FC"]), + "LOADER = {0}".format(env["CC"]), + "CDEFS = -DAdd_", + ] + + if "+pic" in spec: + config.extend( + [ + # Use these lines instead when pic_flag capability arrives + "CFLAGS = -O3 {0}".format(self.pkg.compiler.cc_pic_flag), + "NOOPTS = {0}".format(self.pkg.compiler.cc_pic_flag), + "FFLAGS = -O2 {0}".format(self.pkg.compiler.f77_pic_flag), + "LOADOPTS = {0}".format(self.pkg.compiler.cc_pic_flag), + ] + ) + else: + config.extend( + ["CFLAGS = -O3", "NOOPTS = ", "FFLAGS = -O2", "LOADOPTS = "] + ) + + with open("make.inc", "w") as inc: + for option in config: + inc.write("{0}\n".format(option)) + + make(parallel=False) + + install_tree("lib", prefix.lib) + mkdir(prefix.include) + install(join_path("SRC", "*.h"), prefix.include) diff --git a/var/spack/repos/builtin/packages/swig/package.py b/var/spack/repos/builtin/packages/swig/package.py index d1a760ffec6..2bb9853526e 100644 --- a/var/spack/repos/builtin/packages/swig/package.py +++ b/var/spack/repos/builtin/packages/swig/package.py @@ -2,10 +2,10 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - import os import re +import spack.build_systems.autotools from spack.package import * @@ -57,10 +57,10 @@ class Swig(AutotoolsPackage, SourceforgePackage): depends_on("pcre") - _autoconf_versions = ["@master", "@fortran", "@4.0.2-fortran", "@4.1.dev1-fortran"] + AUTOCONF_VERSIONS = ["@master", "@fortran", "@4.0.2-fortran", "@4.1.dev1-fortran"] # Git releases do *not* include configure script - for _version in _autoconf_versions: + for _version in AUTOCONF_VERSIONS: depends_on("autoconf", type="build", when=_version) depends_on("automake", type="build", when=_version) depends_on("libtool", type="build", when=_version) @@ -70,8 +70,6 @@ class Swig(AutotoolsPackage, SourceforgePackage): depends_on("automake@1.15:", type="build", when="target={0}:".format(_target)) depends_on("pkgconfig", type="build") - build_directory = "spack-build" - conflicts("%nvhpc", when="@:4.0.2") @classmethod @@ -80,18 +78,6 @@ def determine_version(cls, exe): match = re.search(r"SWIG\s+Version\s+(\S+)", output) return match.group(1) if match else None - @run_after("install") - def create_symlink(self): - # CMake compatibility: see https://github.com/spack/spack/pull/6240 - with working_dir(self.prefix.bin): - os.symlink("swig", "swig{0}".format(self.spec.version.up_to(2))) - - for _version in _autoconf_versions: - - @when(_version) - def autoreconf(self, spec, prefix): - which("sh")("./autogen.sh") - @property def _installed_exe(self): return join_path(self.prefix, "bin", "swig") @@ -134,3 +120,19 @@ def _test_swiglib(self): def test(self): self._test_version() self._test_swiglib() + + +class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder): + build_directory = "spack-build" + + @run_after("install") + def create_symlink(self): + # CMake compatibility: see https://github.com/spack/spack/pull/6240 + with working_dir(self.prefix.bin): + os.symlink("swig", "swig{0}".format(self.spec.version.up_to(2))) + + for _version in Swig.AUTOCONF_VERSIONS: + + @when(_version) + def autoreconf(self, pkg, spec, prefix): + which("sh")("./autogen.sh") diff --git a/var/spack/repos/builtin/packages/sz-cpp/package.py b/var/spack/repos/builtin/packages/sz-cpp/package.py new file mode 100644 index 00000000000..34a5365a73f --- /dev/null +++ b/var/spack/repos/builtin/packages/sz-cpp/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class SzCpp(CMakePackage): + """Refactorization of SZ in cpp""" + + homepage = "https://github.com/robertu94/meta_compressor/" + git = "https://github.com/robertu94/meta_compressor/" + + maintainers = ["robertu94"] + + version("2022-01-27", commit="9441b79abc89d4bcce53fe18edf0df53fd92d1d7") + + variant("shared", description="build shared libs", default=True) + + depends_on("zstd") + depends_on("pkgconfig") + + def cmake_args(self): + args = [ + self.define_from_variant("BUILD_SHARED_LIBS", "shared"), + self.define("BUILD_TESTING", self.run_tests), + ] + return args diff --git a/var/spack/repos/builtin/packages/sz/package.py b/var/spack/repos/builtin/packages/sz/package.py index e16657a6b67..d83e0a52d50 100644 --- a/var/spack/repos/builtin/packages/sz/package.py +++ b/var/spack/repos/builtin/packages/sz/package.py @@ -2,11 +2,12 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - +import spack.build_systems.autotools +import spack.build_systems.cmake from spack.package import * -class Sz(CMakePackage): +class Sz(CMakePackage, AutotoolsPackage): """Error-bounded Lossy Compressor for HPC Data""" homepage = "https://szcompressor.org" @@ -43,6 +44,12 @@ class Sz(CMakePackage): version("1.4.10.0", sha256="cf23cf1ffd7c69c3d3128ae9c356b6acdc03a38f92c02db5d9bfc04f3fabc506") version("1.4.9.2", sha256="9dc785274d068d04c2836955fc93518a9797bfd409b46fea5733294b7c7c18f8") + build_system( + conditional("autotools", when="@:2.1.8.0"), + conditional("cmake", when="@2.1.8.1:"), + default="cmake", + ) + variant("python", default=False, description="builds the python wrapper") variant("netcdf", default=False, description="build the netcdf reader") variant("hdf5", default=False, description="build the hdf5 filter") @@ -71,86 +78,6 @@ class Sz(CMakePackage): patch("ctags-only-if-requested.patch", when="@2.1.8.1:2.1.8.3") - @property - def build_directory(self): - """autotools needs a different build directory to work""" - if self.version >= Version("2.1.8.1"): - return "spack-build" - else: - return "." - - @when("@:2.1.8.0") - def cmake(self, spec, prefix): - """use autotools before 2.1.8.1""" - configure_args = ["--prefix=" + prefix] - if "+fortran" in spec: - configure_args.append("--enable-fortran") - else: - configure_args.append("--disable-fortran") - configure(*configure_args) - # at least the v2.0.2.0 tarball contains object files - # which need to be cleaned out - make("clean") - - def cmake_args(self): - """configure the package with CMake for version 2.1.8.1 and later""" - args = [] - - if "+python" in self.spec: - args.append("-DBUILD_PYTHON_WRAPPER=ON") - args.append("-DSZ_PYTHON_SITELIB={0}".format(python_platlib)) - else: - args.append("-DBUILD_PYTHON_WRAPPER=OFF") - - if "+netcdf" in self.spec: - args.append("-DBUILD_NETCDF_READER=ON") - else: - args.append("-DBUILD_NETCDF_READER=OFF") - - if "+hdf5" in self.spec: - args.append("-DBUILD_HDF5_FILTER=ON") - else: - args.append("-DBUILD_HDF5_FILTER=OFF") - - if "+pastri" in self.spec: - args.append("-DBUILD_PASTRI=ON") - else: - args.append("-DBUILD_PASTRI=OFF") - - if "+time_compression" in self.spec: - args.append("-DBUILD_TIMECMPR=ON") - else: - args.append("-DBUILD_TIMECMPR=OFF") - - if "+random_access" in self.spec: - args.append("-DBUILD_RANDOMACCESS=ON") - else: - args.append("-DBUILD_RANDOMACCESS=OFF") - - if "+fortran" in self.spec: - args.append("-DBUILD_FORTRAN=ON") - else: - args.append("-DBUILD_FORTRAN=OFF") - - if "+shared" in self.spec: - args.append("-DBUILD_SHARED_LIBS=ON") - else: - args.append("-DBUILD_SHARED_LIBS=OFF") - - if "+stats" in self.spec: - args.append("-DBUILD_STATS=ON") - else: - args.append("-DBUILD_STATS=OFF") - - args.append(self.define("BUILD_TESTS", self.run_tests)) - - return args - - @run_after("build") - @on_package_attributes(run_tests=True) - def test_build(self): - make("test") - def _test_2d_float(self): """This test performs simple 2D compression/decompression (float)""" test_data_dir = self.test_suite.current_test_data_dir @@ -232,3 +159,37 @@ def test(self): self._test_2d_float() # run 3D compression and decompression (float) self._test_3d_float() + + +class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder): + build_directory = "." + + def configure_args(self): + return self.enable_or_disable("fortran") + + @run_before("build") + def make_clean(self): + # at least the v2.0.2.0 tarball contains object files + # which need to be cleaned out + make("clean") + + +class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder): + def cmake_args(self): + result = [ + self.define_from_variant("BUILD_NETCDF_READER", "netcdf"), + self.define_from_variant("BUILD_HDF5_FILTER", "hdf5"), + self.define_from_variant("BUILD_PASTRI", "pastri"), + self.define_from_variant("BUILD_TIMECPR", "time_compression"), + self.define_from_variant("BUILD_RANDOMACCESS", "random_access"), + self.define_from_variant("BUILD_FORTRAN", "fortran"), + self.define_from_variant("BUILD_SHARED_LIBS", "shared"), + self.define_from_variant("BUILD_STATS", "stats"), + self.define("BUILD_TESTS", self.pkg.run_tests), + self.define_from_variant("BUILD_PYTHON_WRAPPER", "python"), + ] + + if "+python" in self.spec: + result.append(self.define("SZ_PYTHON_SITELIB", python_platlib)) + + return result diff --git a/var/spack/repos/builtin/packages/sz3/package.py b/var/spack/repos/builtin/packages/sz3/package.py new file mode 100644 index 00000000000..80a8ff3b02b --- /dev/null +++ b/var/spack/repos/builtin/packages/sz3/package.py @@ -0,0 +1,31 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Sz3(CMakePackage): + """SZ3 is the next generation of the SZ compressor framework""" + + homepage = "https://github.com/szcompressor/SZ3" + git = "https://github.com/szcompressor/SZ3" + + maintainers = ["disheng222"] + + version("master") + version("3.1.5.4", commit="08df24b566e6d2e419cb95553aebf4a4902a8015") + version("3.1.5.1", commit="5736a63b917e439dd62248b4ff6234e96726af5d") + version("3.1.3.1", commit="323cb17b412d657c4be681b52c34beaf933fe7af") + version("3.1.3", commit="695dff8dc326f3b165f6676d810f46add088a585") + + depends_on("zstd") + depends_on("gsl") + depends_on("pkgconfig") + + def cmake_args(self): + return [ + "-DSZ3_USE_BUNDLED_ZSTD=OFF", + "-DSZ3_DEBUG_TIMINGS=OFF", + ] diff --git a/var/spack/repos/builtin/packages/szauto/package.py b/var/spack/repos/builtin/packages/szauto/package.py new file mode 100644 index 00000000000..744149d6e28 --- /dev/null +++ b/var/spack/repos/builtin/packages/szauto/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Szauto(CMakePackage): + """SZauto: SZ C++ Version that Supports Second-Order Prediction and Parameter Optimization""" + + homepage = "https://github.com/szcompressor/SZauto" + url = "https://github.com/szcompressor/SZauto/releases/download/1.0.0/SZauto-1.0.0.tar.gz" + git = "https://github.com/szcompressor/szauto" + + maintainers = ["disheng222", "robertu94"] + + version("master", branch="master") + version("1.2.1", sha256="55c58f58df3a874f684ef864a9247907df0501e5598c089fd2d855ae0309b03a") + version("1.0.0", commit="03f3ab0312bd1de647e9d65746add73a0e8602d2") + + depends_on("zstd") + depends_on("pkgconfig") diff --git a/var/spack/repos/builtin/packages/tasmanian/package.py b/var/spack/repos/builtin/packages/tasmanian/package.py index 6296e59a08c..e86252e1035 100644 --- a/var/spack/repos/builtin/packages/tasmanian/package.py +++ b/var/spack/repos/builtin/packages/tasmanian/package.py @@ -15,7 +15,7 @@ class Tasmanian(CMakePackage, CudaPackage, ROCmPackage): ApproximatioN is a robust library for high dimensional integration and interpolation as well as parameter calibration.""" - homepage = "http://tasmanian.ornl.gov" + homepage = "https://ornl.github.io/TASMANIAN/stable/" url = "https://github.com/ORNL/TASMANIAN/archive/v7.5.tar.gz" git = "https://github.com/ORNL/TASMANIAN.git" @@ -24,6 +24,7 @@ class Tasmanian(CMakePackage, CudaPackage, ROCmPackage): version("develop", branch="master") + version("7.9", sha256="decba62e6bbccf1bc26c6e773a8d4fd51d7f3e3e534ddd386ec41300694ce5cc") version("7.7", sha256="85fb3a7b302ea21a3b700712767a59a623d9ab93da03308fa47d4413654c3878") version("7.5", sha256="d621bd36dced4db86ef638693ba89b336762e7a3d7fedb3b5bcefb03390712b3") version("7.3", sha256="5bd1dd89cc5c84506f6900b6569b17e50becd73eb31ec85cfa11d6f1f912c4fa") @@ -44,8 +45,7 @@ class Tasmanian(CMakePackage, CudaPackage, ROCmPackage): variant("xsdkflags", default=False, description="enable XSDK defaults for Tasmanian") - variant("openmp", default=True, description="add OpenMP support to Tasmanian") - # tested with OpenMP 3.1 (clang4) through 4.0-4.5 (gcc 5 - 8) + variant("openmp", default=False, description="add OpenMP support to Tasmanian") variant("blas", default=False, description="add BLAS support to Tasmanian") @@ -193,7 +193,43 @@ def test(self): # using the tests copied from /share/Tasmanian/testing cmake_dir = self.test_suite.current_test_cache_dir.testing - if not self.run_test(cmake_bin, options=[cmake_dir], purpose="Generate the Makefile"): + options = [ + cmake_dir, + ] + if "+rocm" in self.spec: + options.append( + "-DAMDDeviceLibs_DIR=" + + join_path(self.spec["llvm-amdgpu"].prefix, "lib", "cmake", "AMDDeviceLibs") + ) + options.append( + "-Damd_comgr_DIR=" + + join_path(self.spec["comgr"].prefix, "lib", "cmake", "amd_comgr") + ) + options.append( + "-Dhsa-runtime64_DIR=" + + join_path(self.spec["hsa-rocr-dev"].prefix, "lib", "cmake", "hsa-runtime64") + ) + options.append( + "-DHSA_HEADER=" + join_path(self.spec["hsa-rocr-dev"].prefix, "include") + ) + options.append( + "-DCMAKE_INCLUDE_PATH=" + + join_path(self.spec["hsa-rocr-dev"].prefix, "include", "hsa") + ) + options.append( + "-Drocblas_DIR=" + + join_path(self.spec["rocblas"].prefix, "lib", "cmake", "rocblas") + ) + options.append( + "-Drocsparse_DIR=" + + join_path(self.spec["rocsparse"].prefix, "lib", "cmake", "rocsparse") + ) + options.append( + "-Drocsolver_DIR=" + + join_path(self.spec["rocsolver"].prefix, "lib", "cmake", "rocsolver") + ) + + if not self.run_test(cmake_bin, options=options, purpose="Generate the Makefile"): tty.msg("Skipping tasmanian test: failed to generate Makefile") return diff --git a/var/spack/repos/builtin/packages/tassel/package.py b/var/spack/repos/builtin/packages/tassel/package.py index 1f058850594..346a7e2c26e 100644 --- a/var/spack/repos/builtin/packages/tassel/package.py +++ b/var/spack/repos/builtin/packages/tassel/package.py @@ -12,7 +12,9 @@ class Tassel(Package): homepage = "https://www.maizegenetics.net/tassel" git = "https://bitbucket.org/tasseladmin/tassel-5-standalone.git" + maintainers = ["snehring"] + version("5.2.86", commit="6557864512a89932710b9f53c6005a35ad6c526e") version("5.2.39", commit="ae96ae75c3c9a9e8026140b6c775fa4685bdf531") version( "3.0.174", diff --git a/var/spack/repos/builtin/packages/tau/package.py b/var/spack/repos/builtin/packages/tau/package.py index 08b4f869cd4..78e9229084a 100644 --- a/var/spack/repos/builtin/packages/tau/package.py +++ b/var/spack/repos/builtin/packages/tau/package.py @@ -26,6 +26,7 @@ class Tau(Package): tags = ["e4s"] version("master", branch="master") + version("2.32", sha256="fc8f5cdbdae999e98e9e97b0d8d66d282cb8bb41c19d5486d48a2d2d11b4b475") version("2.31.1", sha256="bf445b9d4fe40a5672a7b175044d2133791c4dfb36a214c1a55a931aebc06b9d") version("2.31", sha256="27e73c395dd2a42b91591ce4a76b88b1f67663ef13aa19ef4297c68f45d946c2") version("2.30.2", sha256="43f84a15b71a226f8a64d966f0cb46022bcfbaefb341295ecc6fa80bb82bbfb4") diff --git a/var/spack/repos/builtin/packages/timemory/package.py b/var/spack/repos/builtin/packages/timemory/package.py index 759a7437d18..f30e5787796 100644 --- a/var/spack/repos/builtin/packages/timemory/package.py +++ b/var/spack/repos/builtin/packages/timemory/package.py @@ -8,7 +8,7 @@ from spack.package import * -class Timemory(CMakePackage, PythonPackage): +class Timemory(CMakePackage, PythonExtension): """Modular profiling toolkit and suite of libraries and tools for C/C++/Fortran/CUDA/Python""" diff --git a/var/spack/repos/builtin/packages/tioga/package.py b/var/spack/repos/builtin/packages/tioga/package.py index 1374ad76fb0..2716d13dfaa 100644 --- a/var/spack/repos/builtin/packages/tioga/package.py +++ b/var/spack/repos/builtin/packages/tioga/package.py @@ -7,15 +7,17 @@ from spack.package import * -class Tioga(CMakePackage, CudaPackage): +class Tioga(CMakePackage): """Topology Independent Overset Grid Assembly (TIOGA)""" homepage = "https://github.com/jsitaraman/tioga" git = "https://github.com/jsitaraman/tioga.git" - maintainers = ["jsitaraman", "sayerhs"] + maintainers = ["jrood-nrel"] - version("develop", branch="exawind") + # The original TIOGA repo has possibly been abandoned, + # so work on TIOGA has continued in the Exawind project + version("develop", git="https://github.com/Exawind/tioga.git", branch="exawind") version("master", branch="master") variant("shared", default=sys.platform != "darwin", description="Build shared libraries") @@ -23,39 +25,16 @@ class Tioga(CMakePackage, CudaPackage): variant("nodegid", default=True, description="Enable support for global Node IDs") variant("timers", default=False, description="Enable timers") variant("stats", default=False, description="Enable output of holecut stats") - variant( - "cxxstd", default="11", values=("11", "14"), multi=False, description="C++ standard to use" - ) depends_on("mpi") - depends_on("cuda@9.0.0:", when="+cuda") - - # Tioga has the fortran module file problem with parallel builds - parallel = False def cmake_args(self): - spec = self.spec - args = [ self.define_from_variant("BUILD_SHARED_LIBS", "shared"), self.define_from_variant("CMAKE_POSITION_INDEPENDENT_CODE", "pic"), - self.define_from_variant("CMAKE_CXX_STANDARD", "cxxstd"), self.define_from_variant("TIOGA_HAS_NODEGID", "nodegid"), self.define_from_variant("TIOGA_ENABLE_TIMERS", "timers"), self.define_from_variant("TIOGA_OUTPUT_STATS", "stats"), - self.define_from_variant("TIOGA_ENABLE_CUDA", "cuda"), ] - if "+cuda" in self.spec: - args.append(self.define("CMAKE_CUDA_SEPARABLE_COMPILATION", True)) - - # Currently TIOGA only supports one device arch during specialization - cuda_arch = self.spec.variants["cuda_arch"].value - if cuda_arch: - arch_sorted = list(sorted(cuda_arch, reverse=True)) - args.append(self.define("TIOGA_CUDA_SM", arch_sorted[0])) - - if "darwin" in spec.architecture: - args.append(self.define("CMAKE_MACOSX_RPATH", True)) - return args diff --git a/var/spack/repos/builtin/packages/tower-agent/package.py b/var/spack/repos/builtin/packages/tower-agent/package.py index d5cd01507ca..e4ec5103ccc 100644 --- a/var/spack/repos/builtin/packages/tower-agent/package.py +++ b/var/spack/repos/builtin/packages/tower-agent/package.py @@ -15,9 +15,16 @@ class TowerAgent(Package): """ homepage = "https://github.com/seqeralabs/tower-agent" + maintainers = ["marcodelapierre"] if platform.machine() == "x86_64": if platform.system() == "Linux": + version( + "0.4.5", + sha256="d3f38931ff769299b9f9f7e78d9f6a55f93914878c09117b8eaf5decd0c734ec", + url="https://github.com/seqeralabs/tower-agent/releases/download/v0.4.5/tw-agent-linux-x86_64", + expand=False, + ) version( "0.4.3", sha256="1125e64d4e3342e77fcf7f6827f045e421084654fe8faafd5389e356e0613cc0", diff --git a/var/spack/repos/builtin/packages/tower-cli/package.py b/var/spack/repos/builtin/packages/tower-cli/package.py index a10c0b6d2db..ed878fe7b42 100644 --- a/var/spack/repos/builtin/packages/tower-cli/package.py +++ b/var/spack/repos/builtin/packages/tower-cli/package.py @@ -15,9 +15,22 @@ class TowerCli(Package): """ homepage = "https://github.com/seqeralabs/tower-cli" + maintainers = ["marcodelapierre"] if platform.machine() == "x86_64": if platform.system() == "Darwin": + version( + "0.7.0", + sha256="b1b3ade4231de2c7303832bac406510c9de171d07d6384a54945903f5123f772", + url="https://github.com/seqeralabs/tower-cli/releases/download/v0.7.0/tw-0.7.0-osx-x86_64", + expand=False, + ) + version( + "0.6.5", + sha256="8e7369611f3617bad3e76264d93fe467c6039c86af9f18e26142dee5df1e7346", + url="https://github.com/seqeralabs/tower-cli/releases/download/v0.6.5/tw-0.6.5-osx-x86_64", + expand=False, + ) version( "0.6.2", sha256="2bcc17687d58d4c888e8d57b7f2f769a2940afb3266dc3c6c48b0af0cb490d91", @@ -25,6 +38,18 @@ class TowerCli(Package): expand=False, ) elif platform.system() == "Linux": + version( + "0.7.0", + sha256="651f564b80585c9060639f1a8fc82966f81becb0ab3e3ba34e53baf3baabff39", + url="https://github.com/seqeralabs/tower-cli/releases/download/v0.7.0/tw-0.7.0-linux-x86_64", + expand=False, + ) + version( + "0.6.5", + sha256="0d1f3a6f53694000c1764bd3b40ce141f4b8923d477e2bdfdce75c66de95be00", + url="https://github.com/seqeralabs/tower-cli/releases/download/v0.6.5/tw-0.6.5-linux-x86_64", + expand=False, + ) version( "0.6.2", sha256="02c6d141416b046b6e8b6f9723331fe0e39d37faa3561c47c152df4d33b37e50", diff --git a/var/spack/repos/builtin/packages/tracy-client/package.py b/var/spack/repos/builtin/packages/tracy-client/package.py index 2c81c7ff4d3..796cc116e50 100644 --- a/var/spack/repos/builtin/packages/tracy-client/package.py +++ b/var/spack/repos/builtin/packages/tracy-client/package.py @@ -15,6 +15,7 @@ class TracyClient(CMakePackage): maintainers = ["msimberg"] version("master", git="https://github.com/wolfpld/tracy.git", branch="master") + version("0.9", sha256="93a91544e3d88f3bc4c405bad3dbc916ba951cdaadd5fcec1139af6fa56e6bfc") version( "0.8.2", sha256="4784eddd89c17a5fa030d408392992b3da3c503c872800e9d3746d985cfcc92a", diff --git a/var/spack/repos/builtin/packages/tracy/package.py b/var/spack/repos/builtin/packages/tracy/package.py index 50d2f6564c0..e55054f31f2 100644 --- a/var/spack/repos/builtin/packages/tracy/package.py +++ b/var/spack/repos/builtin/packages/tracy/package.py @@ -15,6 +15,7 @@ class Tracy(MakefilePackage): maintainers = ["msimberg"] version("master", git="https://github.com/wolfpld/tracy.git", branch="master") + version("0.9", sha256="93a91544e3d88f3bc4c405bad3dbc916ba951cdaadd5fcec1139af6fa56e6bfc") version( "0.8.2", sha256="4784eddd89c17a5fa030d408392992b3da3c503c872800e9d3746d985cfcc92a", diff --git a/var/spack/repos/builtin/packages/trilinos/package.py b/var/spack/repos/builtin/packages/trilinos/package.py index 80765939cc8..cf6f7307c84 100644 --- a/var/spack/repos/builtin/packages/trilinos/package.py +++ b/var/spack/repos/builtin/packages/trilinos/package.py @@ -33,7 +33,7 @@ class Trilinos(CMakePackage, CudaPackage, ROCmPackage): url = "https://github.com/trilinos/Trilinos/archive/refs/tags/trilinos-release-12-12-1.tar.gz" git = "https://github.com/trilinos/Trilinos.git" - maintainers = ["keitat", "sethrj", "kuberry"] + maintainers = ["keitat", "sethrj", "kuberry", "jwillenbring"] tags = ["e4s"] diff --git a/var/spack/repos/builtin/packages/udunits/package.py b/var/spack/repos/builtin/packages/udunits/package.py index ea59de0ef8e..dcce887f5d6 100644 --- a/var/spack/repos/builtin/packages/udunits/package.py +++ b/var/spack/repos/builtin/packages/udunits/package.py @@ -10,11 +10,26 @@ class Udunits(AutotoolsPackage): """Automated units conversion""" homepage = "https://www.unidata.ucar.edu/software/udunits" - url = "https://artifacts.unidata.ucar.edu/repository/downloads-udunits/udunits-2.2.28.tar.gz" + url = "https://artifacts.unidata.ucar.edu/repository/downloads-udunits/2.2.28/udunits-2.2.28.tar.gz" + # Unidata now only provides the latest version of each X.Y branch. + # Older 2.2 versions have been deprecated accordingly but are still + # available in the build cache. version("2.2.28", sha256="590baec83161a3fd62c00efa66f6113cec8a7c461e3f61a5182167e0cc5d579e") - version("2.2.24", sha256="20bac512f2656f056385429a0e44902fdf02fc7fe01c14d56f3c724336177f95") - version("2.2.23", sha256="b745ae10753fe82cdc7cc834e6ce471ca7c25ba2662e6ff93be147cb3d4fd380") - version("2.2.21", sha256="a154d1f8428c24e92723f9e50bdb5cc00827e3f5ff9cba64d33e5409f5c03455") + version( + "2.2.24", + sha256="20bac512f2656f056385429a0e44902fdf02fc7fe01c14d56f3c724336177f95", + deprecated=True, + ) + version( + "2.2.23", + sha256="b745ae10753fe82cdc7cc834e6ce471ca7c25ba2662e6ff93be147cb3d4fd380", + deprecated=True, + ) + version( + "2.2.21", + sha256="a154d1f8428c24e92723f9e50bdb5cc00827e3f5ff9cba64d33e5409f5c03455", + deprecated=True, + ) depends_on("expat") diff --git a/var/spack/repos/builtin/packages/ufs-utils/package.py b/var/spack/repos/builtin/packages/ufs-utils/package.py index 75ec90944fe..2d893b05e54 100644 --- a/var/spack/repos/builtin/packages/ufs-utils/package.py +++ b/var/spack/repos/builtin/packages/ufs-utils/package.py @@ -15,7 +15,12 @@ class UfsUtils(CMakePackage): homepage = "https://noaa-emcufs-utils.readthedocs.io/en/latest/" url = "https://github.com/NOAA-EMC/UFS_UTILS/archive/refs/tags/ufs_utils_1_6_0.tar.gz" - maintainers = ["t-brown", "edwardhartnett", "kgerheiser", "Hang-Lei-NOAA"] + maintainers = [ + "t-brown", + "edwardhartnett", + "AlexanderRichert-NOAA", + "Hang-Lei-NOAA", + ] version("1_6_0", sha256="829ba4b50162e4202f96ec92a65b9fa824f71db65d2b63b70822db07d061cd92") diff --git a/var/spack/repos/builtin/packages/umpire/package.py b/var/spack/repos/builtin/packages/umpire/package.py index 3916b7a2343..f24fb8d6605 100644 --- a/var/spack/repos/builtin/packages/umpire/package.py +++ b/var/spack/repos/builtin/packages/umpire/package.py @@ -54,6 +54,7 @@ class Umpire(CachedCMakePackage, CudaPackage, ROCmPackage): version("0.1.4", tag="v0.1.4", submodules=True) version("0.1.3", tag="v0.1.3", submodules=True) + patch("std-filesystem-pr784.patch", when="@2022.03.1 +rocm ^blt@0.5.2:") patch("camp_target_umpire_3.0.0.patch", when="@3.0.0") patch("cmake_version_check.patch", when="@4.1") patch("missing_header_for_numeric_limits.patch", when="@4.1:5.0.1") @@ -148,6 +149,9 @@ def initconfig_compiler_entries(self): spec = self.spec entries = super(Umpire, self).initconfig_compiler_entries() + if "+rocm" in spec: + entries.insert(0, cmake_cache_path("CMAKE_CXX_COMPILER", spec["hip"].hipcc)) + option_prefix = "UMPIRE_" if spec.satisfies("@2022.03.0:") else "" if "+fortran" in spec and self.compiler.fc is not None: diff --git a/var/spack/repos/builtin/packages/umpire/std-filesystem-pr784.patch b/var/spack/repos/builtin/packages/umpire/std-filesystem-pr784.patch new file mode 100644 index 00000000000..677144214d6 --- /dev/null +++ b/var/spack/repos/builtin/packages/umpire/std-filesystem-pr784.patch @@ -0,0 +1,33 @@ +diff -ruN spack-src/cmake/SetupCompilerFlags.cmake spack-src-patched/cmake/SetupCompilerFlags.cmake +--- spack-src/cmake/SetupCompilerFlags.cmake 2022-10-12 08:05:03.538390165 -0700 ++++ spack-src-patched/cmake/SetupCompilerFlags.cmake 2022-10-12 09:47:56.317645003 -0700 +@@ -11,20 +11,20 @@ + + message(STATUS "Checking for std::filesystem") + +-include(CheckCXXSourceCompiles) +-check_cxx_source_compiles( +- "#include +- #include +- +- int main(int, char**) +- { ++blt_check_code_compiles(CODE_COMPILES UMPIRE_ENABLE_FILESYSTEM ++ SOURCE_STRING ++[=[ ++#include ++#include + ++int main(int, char**) ++{ + auto path = std::filesystem::path(\".\"); + (void)(path); + + return 0; +- }" +- UMPIRE_ENABLE_FILESYSTEM) ++} ++]=]) + + if (UMPIRE_ENABLE_FILESYSTEM) + message(STATUS "std::filesystem found") diff --git a/var/spack/repos/builtin/packages/uncrustify/package.py b/var/spack/repos/builtin/packages/uncrustify/package.py index a9148c01e5a..b528302ff61 100644 --- a/var/spack/repos/builtin/packages/uncrustify/package.py +++ b/var/spack/repos/builtin/packages/uncrustify/package.py @@ -6,7 +6,7 @@ from spack.package import * -class Uncrustify(Package): +class Uncrustify(CMakePackage, AutotoolsPackage): """Source Code Beautifier for C, C++, C#, ObjectiveC, Java, and others.""" homepage = "http://uncrustify.sourceforge.net/" @@ -31,28 +31,15 @@ class Uncrustify(Package): version("0.62", commit="5987f2") version("0.61", sha256="1df0e5a2716e256f0a4993db12f23d10195b3030326fdf2e07f8e6421e172df9") - depends_on("cmake", type="build", when="@0.64:") - depends_on("automake", type="build", when="@0.63") - depends_on("autoconf", type="build", when="@0.63") + build_system( + conditional("cmake", when="@0.64:"), + conditional("autotools", when="@:0.63"), + default="cmake", + ) - @when("@0.64:") - def install(self, spec, prefix): - with working_dir("spack-build", create=True): - cmake("..", *std_cmake_args) - make() - make("install") - - @when("@0.63") - def install(self, spec, prefix): - which("bash")("autogen.sh") - configure("--prefix={0}".format(self.prefix)) - make() - make("install") - - @when("@:0.62") - def install(self, spec, prefix): - configure("--prefix={0}".format(self.prefix)) - make() - make("install") + with when("build_system=autotools"): + depends_on("automake", type="build") + depends_on("autoconf", type="build") + depends_on("libtool", type="build", when="@0.63") patch("uncrustify-includes.patch", when="@0.73") diff --git a/var/spack/repos/builtin/packages/unifyfs/package.py b/var/spack/repos/builtin/packages/unifyfs/package.py index 3d58448a5ee..fae9eba74c8 100644 --- a/var/spack/repos/builtin/packages/unifyfs/package.py +++ b/var/spack/repos/builtin/packages/unifyfs/package.py @@ -98,6 +98,9 @@ def flag_handler(self, name, flags): if "-g" in flags: self.debug_build = True if name == "cflags": + if self.spec.satisfies("%oneapi@2022.2.0:"): + flags.append("-Wno-error=deprecated-non-prototype") + flags.append("-Wno-error=unused-function") if self.spec.satisfies("%gcc@4"): flags.append("-std=gnu99") return (None, None, flags) diff --git a/var/spack/repos/builtin/packages/upcxx/package.py b/var/spack/repos/builtin/packages/upcxx/package.py index ce6081a1ae9..04841436212 100644 --- a/var/spack/repos/builtin/packages/upcxx/package.py +++ b/var/spack/repos/builtin/packages/upcxx/package.py @@ -42,11 +42,12 @@ class Upcxx(Package, CudaPackage, ROCmPackage): url = "https://bitbucket.org/berkeleylab/upcxx/downloads/upcxx-2021.3.0.tar.gz" git = "https://bitbucket.org/berkeleylab/upcxx.git" - tags = ["e4s"] + tags = ["e4s", "ecp"] version("develop", branch="develop") version("master", branch="master") + version("2022.9.0", sha256="dbf15fd9ba38bfe2491f556b55640343d6303048a117c4e84877ceddb64e4c7c") version("2022.3.0", sha256="72bccfc9dfab5c2351ee964232b3754957ecfdbe6b4de640e1b1387d45019496") version("2021.9.0", sha256="9299e17602bcc8c05542cdc339897a9c2dba5b5c3838d6ef2df7a02250f42177") version("2021.3.0", sha256="3433714cd4162ffd8aad9a727c12dbf1c207b7d6664879fc41259a4b351595b7") @@ -89,7 +90,8 @@ class Upcxx(Package, CudaPackage, ROCmPackage): # UPC++ always relies on GASNet-EX. # The default (and recommendation) is to use the implicit, embedded version. - # This variant allows overriding with a particular version of GASNet-EX sources. + # This variant allows overriding with a particular version of GASNet-EX sources, + # although this is not officially supported and some combinations might be rejected. variant("gasnet", default=False, description="Override embedded GASNet-EX version") depends_on("gasnet conduits=none", when="+gasnet") @@ -148,6 +150,7 @@ def install(self, spec, prefix): real_cc = join_path(env["CRAYPE_DIR"], "bin", "cc") real_cxx = join_path(env["CRAYPE_DIR"], "bin", "CC") # workaround a bug in the UPC++ installer: (issue #346) + # this can be removed once the floor version reaches 2020.10.0 env["GASNET_CONFIGURE_ARGS"] += " --with-cc=" + real_cc + " --with-cxx=" + real_cxx if "+mpi" in spec: env["GASNET_CONFIGURE_ARGS"] += " --with-mpicc=" + real_cc @@ -169,6 +172,7 @@ def install(self, spec, prefix): provider = "verbs;ofi_rxm" # Append the recommended options for Cray Shasta + # This list can be pruned once the floor version reaches 2022.9.0 options.append("--with-pmi-version=cray") options.append("--with-pmi-runcmd='srun -n %N -- %C'") options.append("--disable-ibv") diff --git a/var/spack/repos/builtin/packages/upp/package.py b/var/spack/repos/builtin/packages/upp/package.py index e339cbd5948..2f187a51e8d 100644 --- a/var/spack/repos/builtin/packages/upp/package.py +++ b/var/spack/repos/builtin/packages/upp/package.py @@ -17,7 +17,7 @@ class Upp(CMakePackage): git = "https://github.com/NOAA-EMC/UPP.git" url = "https://github.com/NOAA-EMC/UPP/archive/refs/tags/upp_v10.0.10.tar.gz" - maintainers = ["kgerheiser", "edwardhartnett", "Hang-Lei-NOAA"] + maintainers = ["AlexanderRichert-NOAA", "edwardhartnett", "Hang-Lei-NOAA"] version("10.0.10", sha256="0c96a88d0e79b554d5fcee9401efcf4d6273da01d15e3413845274f73d70b66e") version("10.0.9", tag="upp_v10.0.9", submodules=True) diff --git a/var/spack/repos/builtin/packages/utf8proc/package.py b/var/spack/repos/builtin/packages/utf8proc/package.py index ae812893b68..3ef8e53863a 100644 --- a/var/spack/repos/builtin/packages/utf8proc/package.py +++ b/var/spack/repos/builtin/packages/utf8proc/package.py @@ -13,6 +13,7 @@ class Utf8proc(CMakePackage): homepage = "https://juliastrings.github.io/utf8proc/" url = "https://github.com/JuliaStrings/utf8proc/archive/v2.4.0.tar.gz" + version("2.8.0", sha256="a0a60a79fe6f6d54e7d411facbfcc867a6e198608f2cd992490e46f04b1bcecc") version("2.7.0", sha256="4bb121e297293c0fd55f08f83afab6d35d48f0af4ecc07523ad8ec99aa2b12a1") version("2.6.1", sha256="4c06a9dc4017e8a2438ef80ee371d45868bda2237a98b26554de7a95406b283b") version("2.6.0", sha256="b36ce1534b8035e7febd95c031215ed279ee9d31cf9b464e28b4c688133b22c5") diff --git a/var/spack/repos/builtin/packages/util-linux-uuid/package.py b/var/spack/repos/builtin/packages/util-linux-uuid/package.py index c9a1c5de96c..ef621f9e834 100644 --- a/var/spack/repos/builtin/packages/util-linux-uuid/package.py +++ b/var/spack/repos/builtin/packages/util-linux-uuid/package.py @@ -9,11 +9,12 @@ class UtilLinuxUuid(AutotoolsPackage): """Util-linux is a suite of essential utilities for any Linux system.""" - homepage = "https://github.com/karelzak/util-linux" + homepage = "https://github.com/util-linux/util-linux" url = "https://www.kernel.org/pub/linux/utils/util-linux/v2.29/util-linux-2.29.2.tar.gz" list_url = "https://www.kernel.org/pub/linux/utils/util-linux" list_depth = 1 + version("2.38.1", sha256="0820eb8eea90408047e3715424bc6be771417047f683950fecb4bdd2e2cbbc6e") version("2.37.4", sha256="c8b7b4fa541f974cc32c1c6559d9bfca33651020a456ad6ee5fc9b0cacd00151") version("2.36.2", sha256="f5dbe79057e7d68e1a46fc04083fc558b26a49499b1b3f50e4f4893150970463") version("2.36", sha256="82942cd877a989f6d12d4ce2c757fb67ec53d8c5cd9af0537141ec5f84a2eea3") diff --git a/var/spack/repos/builtin/packages/util-linux/package.py b/var/spack/repos/builtin/packages/util-linux/package.py index fa2b1ff2c9f..bc703913292 100644 --- a/var/spack/repos/builtin/packages/util-linux/package.py +++ b/var/spack/repos/builtin/packages/util-linux/package.py @@ -16,6 +16,7 @@ class UtilLinux(AutotoolsPackage): list_url = "https://www.kernel.org/pub/linux/utils/util-linux" list_depth = 1 + version("2.38.1", sha256="0820eb8eea90408047e3715424bc6be771417047f683950fecb4bdd2e2cbbc6e") version("2.38", sha256="c31d4e54f30b56b0f7ec8b342658c07de81378f2c067941c2b886da356f8ad42") version("2.37.4", sha256="c8b7b4fa541f974cc32c1c6559d9bfca33651020a456ad6ee5fc9b0cacd00151") version("2.37.2", sha256="15db966474e459b33fa390a6b892190a92079a73ca45384cde4c86e6ed265a86") diff --git a/var/spack/repos/builtin/packages/vasp/package.py b/var/spack/repos/builtin/packages/vasp/package.py index 1706aa28439..eac1f0431ed 100644 --- a/var/spack/repos/builtin/packages/vasp/package.py +++ b/var/spack/repos/builtin/packages/vasp/package.py @@ -46,6 +46,8 @@ class Vasp(MakefilePackage): "https://github.com/henniggroup/VASPsol", ) + variant("shmem", default=True, description="Enable use_shmem build flag") + depends_on("rsync", type="build") depends_on("blas") depends_on("lapack") @@ -151,8 +153,11 @@ def setup_build_environment(self, spack_env): "-Davoidalloc", "-Duse_bse_te", "-Dtbdyn", - "-Duse_shmem", ] + + if "+shmem" in spec: + cpp_options.append("-Duse_shmem") + if "%nvhpc" in self.spec: cpp_options.extend(['-DHOST=\\"LinuxPGI\\"', "-DPGI16", "-Dqd_emulate"]) elif "%aocc" in self.spec: diff --git a/var/spack/repos/builtin/packages/vsearch/package.py b/var/spack/repos/builtin/packages/vsearch/package.py index f975bf9e5ab..18e3b9f6895 100644 --- a/var/spack/repos/builtin/packages/vsearch/package.py +++ b/var/spack/repos/builtin/packages/vsearch/package.py @@ -11,7 +11,9 @@ class Vsearch(AutotoolsPackage): homepage = "https://github.com/torognes/vsearch" url = "https://github.com/torognes/vsearch/archive/v2.4.3.tar.gz" + maintainers = ["snehring"] + version("2.22.1", sha256="c62bf69e7cc3d011a12e3b522ba8c0c91fb90deea782359e9569677d0c991778") version("2.14.1", sha256="388529a39eb0618a09047bf91e0a8ae8c9fd851a05f8d975e299331748f97741") version("2.13.3", sha256="e5f34ece28b76403d3ba4a673eca41178fe399c35a1023dbc87d0c0da5efaa52") version("2.4.3", sha256="f7ffc2aec5d76bdaf1ffe7fb733102138214cec3e3846eb225455dcc3c088141") @@ -19,4 +21,6 @@ class Vsearch(AutotoolsPackage): depends_on("m4", type="build") depends_on("autoconf", type="build") depends_on("automake", type="build") + depends_on("bzip2") depends_on("libtool", type="build") + depends_on("zlib") diff --git a/var/spack/repos/builtin/packages/vtk-h/package.py b/var/spack/repos/builtin/packages/vtk-h/package.py index c1c0b3ace3b..7671426434c 100644 --- a/var/spack/repos/builtin/packages/vtk-h/package.py +++ b/var/spack/repos/builtin/packages/vtk-h/package.py @@ -179,6 +179,23 @@ def hostconfig(self): cfg.write("# cpp compiler used by spack\n") cfg.write(cmake_cache_entry("CMAKE_CXX_COMPILER", cpp_compiler)) + # use global spack compiler flags + cppflags = " ".join(spec.compiler_flags["cppflags"]) + if cppflags: + # avoid always ending up with ' ' with no flags defined + cppflags += " " + cflags = cppflags + " ".join(spec.compiler_flags["cflags"]) + if cflags: + cfg.write(cmake_cache_entry("CMAKE_C_FLAGS", cflags)) + cxxflags = cppflags + " ".join(spec.compiler_flags["cxxflags"]) + if cxxflags: + cfg.write(cmake_cache_entry("CMAKE_CXX_FLAGS", cxxflags)) + fflags = " ".join(spec.compiler_flags["fflags"]) + if self.spec.satisfies("%cce"): + fflags += " -ef" + if fflags: + cfg.write(cmake_cache_entry("CMAKE_Fortran_FLAGS", fflags)) + # shared vs static libs if "+shared" in spec: cfg.write(cmake_cache_entry("BUILD_SHARED_LIBS", "ON")) diff --git a/var/spack/repos/builtin/packages/w3emc/package.py b/var/spack/repos/builtin/packages/w3emc/package.py index ca6e14a795f..dc487f46031 100644 --- a/var/spack/repos/builtin/packages/w3emc/package.py +++ b/var/spack/repos/builtin/packages/w3emc/package.py @@ -15,7 +15,12 @@ class W3emc(CMakePackage): homepage = "https://noaa-emc.github.io/NCEPLIBS-w3emc/" url = "https://github.com/NOAA-EMC/NCEPLIBS-w3emc/archive/refs/tags/v2.9.0.tar.gz" - maintainers = ["t-brown", "kgerheiser", "Hang-Lei-NOAA", "edwardhartnett"] + maintainers = [ + "t-brown", + "AlexanderRichert-NOAA", + "Hang-Lei-NOAA", + "edwardhartnett", + ] version("2.9.2", sha256="eace811a1365f69b85fdf2bcd93a9d963ba72de5a7111e6fa7c0e6578b69bfbc") version("2.9.1", sha256="d3e705615bdd0b76a40751337d943d5a1ea415636f4e5368aed058f074b85df4") diff --git a/var/spack/repos/builtin/packages/w3nco/package.py b/var/spack/repos/builtin/packages/w3nco/package.py index ed736ab77d4..ef4b5510cc5 100644 --- a/var/spack/repos/builtin/packages/w3nco/package.py +++ b/var/spack/repos/builtin/packages/w3nco/package.py @@ -16,6 +16,11 @@ class W3nco(CMakePackage): homepage = "https://noaa-emc.github.io/NCEPLIBS/NCEPLIBS-w3nco/" url = "https://github.com/NOAA-EMC/NCEPLIBS-w3nco/archive/refs/tags/v2.4.1.tar.gz" - maintainers = ["t-brown", "kgerheiser", "Hang-Lei-NOAA", "edwardhartnett"] + maintainers = [ + "t-brown", + "AlexanderRichert-NOAA", + "Hang-Lei-NOAA", + "edwardhartnett", + ] version("2.4.1", sha256="48b06e0ea21d3d0fd5d5c4e7eb50b081402567c1bff6c4abf4fd4f3669070139") diff --git a/var/spack/repos/builtin/packages/warpx/package.py b/var/spack/repos/builtin/packages/warpx/package.py index f4b8c66c352..59762894de5 100644 --- a/var/spack/repos/builtin/packages/warpx/package.py +++ b/var/spack/repos/builtin/packages/warpx/package.py @@ -17,7 +17,7 @@ class Warpx(CMakePackage): """ homepage = "https://ecp-warpx.github.io" - url = "https://github.com/ECP-WarpX/WarpX/archive/refs/tags/22.05.tar.gz" + url = "https://github.com/ECP-WarpX/WarpX/archive/refs/tags/22.10.tar.gz" git = "https://github.com/ECP-WarpX/WarpX.git" maintainers = ["ax3l", "dpgrote", "MaxThevenet", "RemiLehe"] @@ -25,6 +25,8 @@ class Warpx(CMakePackage): # NOTE: if you update the versions here, also see py-warpx version("develop", branch="development") + version("22.10", sha256="3cbbbbb4d79f806b15e81c3d0e4a4401d1d03d925154682a3060efebd3b6ca3e") + version("22.09", sha256="dbef1318248c86c860cc47f7e18bbb0397818e3acdfb459e48075004bdaedea3") version("22.08", sha256="5ff7fd628e8bf615c1107e6c51bc55926f3ef2a076985444b889d292fecf56d4") version("22.07", sha256="0286adc788136cb78033cb1678d38d36e42265bcfd3d0c361a9bcc2cfcdf241b") version("22.06", sha256="e78398e215d3fc6bc5984f5d1c2ddeac290dcbc8a8e9d196e828ef6299187db9") diff --git a/var/spack/repos/builtin/packages/wgrib2/package.py b/var/spack/repos/builtin/packages/wgrib2/package.py index 33b9ff37e6c..2f2c1409333 100644 --- a/var/spack/repos/builtin/packages/wgrib2/package.py +++ b/var/spack/repos/builtin/packages/wgrib2/package.py @@ -3,29 +3,209 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os +import re + from spack.package import * -class Wgrib2(CMakePackage): - """The wgrib2 package functionality for interacting with, reading, - writing, and manipulating GRIB2 files.""" +class Wgrib2(MakefilePackage): + """Utility for interacting with GRIB2 files""" homepage = "https://www.cpc.ncep.noaa.gov/products/wesley/wgrib2" - url = "https://github.com/NOAA-EMC/NCEPLIBS-wgrib2/archive/refs/tags/v2.0.8-cmake-v6.tar.gz" + url = "https://www.ftp.cpc.ncep.noaa.gov/wd51we/wgrib2/wgrib2.tgz.v2.0.8" - maintainers = ["t-brown", "kgerheiser", "Hang-Lei-NOAA", "edwardhartnett"] + maintainers = [ + "t-brown", + "AlexanderRichert-NOAA", + "Hang-Lei-NOAA", + "edwardhartnett", + ] version( - "2.0.8-cmake-v6", sha256="745cd008b4ce0245ea44247733e57e2b9ec6c5205d171d457e18d0ff8f87172d" + "3.1.1", + sha256="9236f6afddad76d868c2cfdf5c4227f5bdda5e85ae40c18bafb37218e49bc04a", + extension="tar.gz", + ) + version( + "3.1.0", + sha256="5757ef9016b19ae87491918e0853dce2d3616b14f8c42efe3b2f41219c16b78f", + extension="tar.gz", + ) + version( + "2.0.8", + sha256="5e6a0d6807591aa2a190d35401606f7e903d5485719655aea1c4866cc2828160", + extension="tar.gz", + ) + version( + "2.0.7", + sha256="d7f1a4f9872922c62b3c7818c022465532cca1f5666b75d3ac5735f0b2747793", + extension="tar.gz", ) - depends_on("ip2") - depends_on("jasper@:2.0.32") - depends_on("libpng") - depends_on("netcdf-c") - depends_on("netcdf-fortran") - depends_on("sp") + variant("netcdf3", default=True, description="Link in netcdf3 library to write netcdf3 files") + variant( + "netcdf4", default=False, description="Link in netcdf4 library to write netcdf3/4 files" + ) + variant( + "ipolates", + default="3", + description="Use to interpolate to new grids (0 = OFF, 1 = ip, 3 = ip2)", + values=("0", "1", "3"), + ) + variant("spectral", default=False, description="Spectral interpolation in -new_grid") + variant( + "fortran_api", + default=True, + description="Make wgrib2api which allows fortran code to read/write grib2", + ) + variant( + "mysql", default=False, description="Link in interface to MySQL to write to mysql database" + ) + variant( + "udf", + default=False, + description="Add commands for user-defined functions and shell commands", + ) + variant("regex", default=True, description="Use regular expression library (POSIX-2)") + variant("tigge", default=True, description="Ability for TIGGE-like variable names") + variant("proj4", default=False, description="The proj4 library is used to verify gctpc.") + variant( + "aec", default=True, description="Use the libaec library for packing with GRIB2 template" + ) + variant("g2c", default=False, description="Include NCEP g2clib (mainly for testing purposes)") + variant( + "disable_timezone", default=False, description="Some machines do not support timezones" + ) + variant( + "disable_alarm", + default=False, + description="Some machines do not support the alarm to terminate wgrib2", + ) + variant("png", default=True, description="PNG encoding") + variant("jasper", default=True, description="JPEG compression using Jasper") + variant("openmp", default=True, description="OpenMP parallelization") + variant("wmo_validation", default=False, description="WMO validation") - def cmake_args(self): - args = ["-DUSE_IPOLATES=3", "-DUSE_SPECTRAL=BOOL:ON"] - return args + conflicts("+netcdf3", when="+netcdf4") + conflicts("+openmp", when="%apple-clang") + + depends_on("wget", type=("build"), when="+netcdf4") + + variant_map = { + "netcdf3": "USE_NETCDF3", + "netcdf4": "USE_NETCDF4", + "spectral": "USE_SPECTRAL", + "mysql": "USE_MYSQL", + "udf": "USE_UDF", + "regex": "USE_REGEX", + "tigge": "USE_TIGGE", + "proj4": "USE_PROJ4", + "aec": "USE_AEC", + "g2c": "USE_G2CLIB", + "png": "USE_PNG", + "jasper": "USE_JASPER", + "openmp": "USE_OPENMP", + "wmo_validation": "USE_WMO_VALIDATION", + "ipolates": "USE_IPOLATES", + "disable_timezone": "DISABLE_TIMEZONE", + "disable_alarm": "DISABLE_ALARM", + "fortran_api": "MAKE_FTN_API", + } + + # Disable parallel build + parallel = False + + # Use Spack compiler wrapper flags + def inject_flags(self, name, flags): + if name == "cflags": + if self.spec.compiler.name == "apple-clang": + flags.append("-Wno-error=implicit-function-declaration") + + # When mixing Clang/gfortran need to link to -lgfortran + # Find this by searching for gfortran/../lib + if self.spec.compiler.name in ["apple-clang", "clang"]: + if "gfortran" in self.compiler.fc: + output = Executable(self.compiler.fc)("-###", output=str, error=str) + libdir = re.search("--libdir=(.+?) ", output).group(1) + flags.append("-L{}".format(libdir)) + + return (flags, None, None) + + flag_handler = inject_flags + + def url_for_version(self, version): + url = "https://www.ftp.cpc.ncep.noaa.gov/wd51we/wgrib2/wgrib2.tgz.v{}" + return url.format(version) + + def edit(self, spec, prefix): + makefile = FileFilter("makefile") + + # ifort no longer accepts -openmp + makefile.filter(r"-openmp", "-qopenmp") + makefile.filter(r"-Wall", " ") + makefile.filter(r"-Werror=format-security", " ") + + # clang doesn"t understand --fast-math + if spec.satisfies("%clang") or spec.satisfies("%apple-clang"): + makefile.filter(r"--fast-math", "-ffast-math") + + for variant_name, makefile_option in self.variant_map.items(): + value = int(spec.variants[variant_name].value) + makefile.filter(r"^%s=.*" % makefile_option, "{}={}".format(makefile_option, value)) + + def setup_build_environment(self, env): + + if self.spec.compiler.name in "intel": + comp_sys = "intel_linux" + elif self.spec.compiler.name in ["gcc", "clang", "apple-clang"]: + comp_sys = "gnu_linux" + + env.set("COMP_SYS", comp_sys) + + def build(self, spec, prefix): + + # Get source files for netCDF4 builds + if self.spec.satisfies("+netcdf4"): + with working_dir(self.build_directory): + os.system( + "wget https://downloads.unidata.ucar.edu/netcdf-c/4.8.1/netcdf-c-4.8.1.tar.gz" + ) + os.system( + "wget https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.12/hdf5-1.12.1/src/hdf5-1.12.1.tar.gz" + ) + + make() + + # Move wgrib2 executable to a tempoary directory + mkdir("install") + mkdir(join_path("install", "bin")) + move(join_path("wgrib2", "wgrib2"), join_path("install", "bin")) + + # Build wgrib2 library by disabling all options + # and enabling only MAKE_FTN_API=1 + if "+fortran_api" in spec: + make("clean") + make("deep-clean") + makefile = FileFilter("makefile") + + # Disable all options + for variant_name, makefile_option in self.variant_map.items(): + value = 0 + makefile.filter( + r"^%s=.*" % makefile_option, "{}={}".format(makefile_option, value) + ) + + # Need USE_REGEX in addition to MAKE_FTN_API to build lib + makefile.filter(r"^MAKE_FTN_API=.*", "MAKE_FTN_API=1") + makefile.filter(r"^USE_REGEX=.*", "USE_REGEX=1") + make("lib") + mkdir(join_path("install", "lib")) + mkdir(join_path("install", "include")) + + move(join_path("lib", "libwgrib2.a"), join_path("install", "lib")) + move(join_path("lib", "wgrib2api.mod"), join_path("install", "include")) + move(join_path("lib", "wgrib2lowapi.mod"), join_path("install", "include")) + + def install(self, spec, prefix): + install_tree("install/", prefix) diff --git a/var/spack/repos/builtin/packages/wi4mpi/package.py b/var/spack/repos/builtin/packages/wi4mpi/package.py index b3d0be71884..1a5081632df 100644 --- a/var/spack/repos/builtin/packages/wi4mpi/package.py +++ b/var/spack/repos/builtin/packages/wi4mpi/package.py @@ -14,6 +14,9 @@ class Wi4mpi(CMakePackage): url = "https://github.com/cea-hpc/wi4mpi/archive/v3.4.1.tar.gz" maintainers = ["adrien-cotte", "marcjoos-cea"] + version("3.6.3", sha256="c327babc892cc3c2bdddfacf3011e6fcb7e00a04e814de31f5e707cba3199c5c") + version("3.6.2", sha256="4b784d27decfff9cbd29f072ba75bb0f6c471d6edc7f1037df1ab7ccbcceffba") + version("3.6.1", sha256="14fbaf8c7ac0b7f350242a90e1be75e9f4bd0196a0d0e326b40be04ca58a2613") version("3.6.0", sha256="06f48bf506643edba51dd04bfdfbaa824363d28549f8eabf002b760ba516227b") version("3.5.0", sha256="36dd3dfed4f0f37bc817204d4810f049e624900b1b32641122f09a183135522f") version("3.4.1", sha256="92bf6738216426069bc07bff19cd7c933e33e397a941ff9f89a639380fab3737") diff --git a/var/spack/repos/builtin/packages/wrf-io/package.py b/var/spack/repos/builtin/packages/wrf-io/package.py index 5dada9ff269..40c9713137c 100644 --- a/var/spack/repos/builtin/packages/wrf-io/package.py +++ b/var/spack/repos/builtin/packages/wrf-io/package.py @@ -16,7 +16,12 @@ class WrfIo(CMakePackage): homepage = "https://noaa-emc.github.io/NCEPLIBS-wrf_io" url = "https://github.com/NOAA-EMC/NCEPLIBS-wrf_io/archive/refs/tags/v1.2.0.tar.gz" - maintainers = ["t-brown", "kgerheiser", "Hang-Lei-NOAA", "edwardhartnett"] + maintainers = [ + "t-brown", + "AlexanderRichert-NOAA", + "Hang-Lei-NOAA", + "edwardhartnett", + ] version("1.2.0", sha256="000cf5294a2c68460085258186e1f36c86d3d0d9c433aa969a0f92736b745617") diff --git a/var/spack/repos/builtin/packages/xsdk/package.py b/var/spack/repos/builtin/packages/xsdk/package.py index d0396f9d268..627143b41c4 100644 --- a/var/spack/repos/builtin/packages/xsdk/package.py +++ b/var/spack/repos/builtin/packages/xsdk/package.py @@ -190,7 +190,7 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): xsdk_depends_on("datatransferkit@3.1-rc2", when="@0.6.0 +trilinos +datatransferkit") xsdk_depends_on("petsc +trilinos", when="+trilinos @:0.6.0") - xsdk_depends_on("petsc +batch", when="platform=cray @0.5.0:") + xsdk_depends_on("petsc +batch", when="@0.5.0: ^cray-mpich") xsdk_depends_on( "petsc@main+mpi+hypre+superlu-dist+metis+hdf5~mumps+double~int64", when="@develop", @@ -398,7 +398,7 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): xsdk_depends_on("py-libensemble@0.5.2+petsc4py", when="@0.5.0 +libensemble") xsdk_depends_on("py-petsc4py@3.12.0", when="@0.5.0 +libensemble") - xsdk_depends_on("precice ~petsc", when="platform=cray +precice") + xsdk_depends_on("precice ~petsc", when="+precice ^cray-mpich") xsdk_depends_on("precice@develop", when="@develop +precice") xsdk_depends_on("precice@2.3.0", when="@0.7.0 +precice") xsdk_depends_on("precice@2.1.1", when="@0.6.0 +precice") diff --git a/var/spack/repos/builtin/packages/xz/package.py b/var/spack/repos/builtin/packages/xz/package.py index 440e67608ac..1224e0a9f94 100644 --- a/var/spack/repos/builtin/packages/xz/package.py +++ b/var/spack/repos/builtin/packages/xz/package.py @@ -20,10 +20,13 @@ class Xz(AutotoolsPackage, SourceforgePackage): executables = [r"^xz$"] + version("5.2.7", sha256="b65f1d0c2708e57716f4dd2216989a73847ac6fdb4168ffceb155767e22b834b") + version("5.2.6", sha256="13e3402e301b6018f6a71ef0e497f714c6d11e214ae82dab156b81c2a64acb25") version("5.2.5", sha256="5117f930900b341493827d63aa910ff5e011e0b994197c3b71c08a20228a42df") version("5.2.4", sha256="3313fd2a95f43d88e44264e6b015e7d03053e681860b0d5d3f9baca79c57b7bf") version("5.2.3", sha256="fd9ca16de1052aac899ad3495ad20dfa906c27b4a5070102a2ec35ca3a4740c1") version("5.2.2", sha256="6ff5f57a4b9167155e35e6da8b529de69270efb2b4cf3fbabf41a4ee793840b5") + version("5.2.1", sha256="679148f497e0bff2c1adce42dee5a23f746e71321c33ebb0f641a302e30c2a80") version("5.2.0", sha256="f7357d7455a1670229b3cca021da71dd5d13b789db62743c20624bdffc9cc4a5") variant("pic", default=False, description="Compile with position independent code.") diff --git a/var/spack/repos/builtin/packages/z3/package.py b/var/spack/repos/builtin/packages/z3/package.py index 7f8c1dcbb04..dfe3be3a9fe 100644 --- a/var/spack/repos/builtin/packages/z3/package.py +++ b/var/spack/repos/builtin/packages/z3/package.py @@ -14,6 +14,7 @@ class Z3(CMakePackage): homepage = "https://github.com/Z3Prover/z3/wiki" url = "https://github.com/Z3Prover/z3/archive/z3-4.5.0.tar.gz" + version("4.11.2", sha256="e3a82431b95412408a9c994466fad7252135c8ed3f719c986cd75c8c5f234c7e") version("4.8.16", sha256="75f95e09f3f35fef746e571d5ec88a4efba27f1bc8f1a0ef1117167486ec3dc6") version("4.8.15", sha256="2abe7f5ecb7c8023b712ffba959c55b4515f4978522a6882391de289310795ac") version("4.8.14", sha256="96a1f49a7701120cc38bfa63c02ff93be4d64c7926cea41977dedec7d87a1364") diff --git a/var/spack/repos/builtin/packages/zlib/package.py b/var/spack/repos/builtin/packages/zlib/package.py index 870629e569c..58db86f6b2a 100644 --- a/var/spack/repos/builtin/packages/zlib/package.py +++ b/var/spack/repos/builtin/packages/zlib/package.py @@ -9,10 +9,12 @@ import glob import os +import spack.build_systems.generic +import spack.build_systems.makefile from spack.package import * -class Zlib(Package): +class Zlib(MakefilePackage, Package): """A free, general-purpose, legally unencumbered lossless data-compression library. """ @@ -21,7 +23,12 @@ class Zlib(Package): # URL must remain http:// so Spack can bootstrap curl url = "https://zlib.net/fossils/zlib-1.2.11.tar.gz" - version("1.2.12", sha256="91844808532e5ce316b3c010929493c0244f3d37593afd6de04f71821d5136d9") + version("1.2.13", sha256="b3a24de97a8fdbc835b9833169501030b8977031bcb54b3b3ac13740f846ab30") + version( + "1.2.12", + sha256="91844808532e5ce316b3c010929493c0244f3d37593afd6de04f71821d5136d9", + deprecated=True, + ) version( "1.2.11", sha256="c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1", @@ -38,10 +45,14 @@ class Zlib(Package): deprecated=True, ) + build_system("makefile", conditional("generic", when="platform=windows"), default="makefile") + variant("pic", default=True, description="Produce position-independent code (for shared libs)") variant("shared", default=True, description="Enables the build of shared libraries.") variant("optimize", default=True, description="Enable -O2 for a more optimized lib") + conflicts("build_system=makefile", when="platform=windows") + patch("w_patch.patch", when="@1.2.11%cce") patch("configure-cc.patch", when="@1.2.12") @@ -50,11 +61,31 @@ def libs(self): shared = "+shared" in self.spec return find_libraries(["libz"], root=self.prefix, recursive=True, shared=shared) - def win_install(self): - build_dir = self.stage.source_path - install_tree = {} - install_tree["bin"] = glob.glob(os.path.join(build_dir, "*.dll")) - install_tree["lib"] = glob.glob(os.path.join(build_dir, "*.lib")) + +class SetupEnvironment(object): + def setup_build_environment(self, env): + if "+pic" in self.spec: + env.append_flags("CFLAGS", self.pkg.compiler.cc_pic_flag) + if "+optimize" in self.spec: + env.append_flags("CFLAGS", "-O2") + + +class MakefileBuilder(spack.build_systems.makefile.MakefileBuilder, SetupEnvironment): + def edit(self, pkg, spec, prefix): + config_args = [] + if "~shared" in self.spec: + config_args.append("--static") + configure("--prefix={0}".format(prefix), *config_args) + + +class GenericBuilder(spack.build_systems.generic.GenericBuilder, SetupEnvironment): + def install(self, spec, prefix): + nmake("-f" "win32\\Makefile.msc") + build_dir = self.pkg.stage.source_path + install_tree = { + "bin": glob.glob(os.path.join(build_dir, "*.dll")), + "lib": glob.glob(os.path.join(build_dir, "*.lib")), + } compose_src_path = lambda x: os.path.join(build_dir, x) install_tree["include"] = [compose_src_path("zlib.h"), compose_src_path("zconf.h")] # Windows path seps are fine here as this method is Windows specific. @@ -71,24 +102,3 @@ def installtree(dst, tree): install(file, install_dst) installtree(self.prefix, install_tree) - - def setup_build_environment(self, env): - if "+pic" in self.spec: - env.append_flags("CFLAGS", self.compiler.cc_pic_flag) - if "+optimize" in self.spec: - env.append_flags("CFLAGS", "-O2") - - def install(self, spec, prefix): - if "platform=windows" in self.spec: - nmake("-f" "win32\\Makefile.msc") - self.win_install() - else: - config_args = [] - if "~shared" in spec: - config_args.append("--static") - configure("--prefix={0}".format(prefix), *config_args) - - make() - if self.run_tests: - make("check") - make("install")