Merge branch 'develop' into f/env-location

This commit is contained in:
psakievich 2022-11-09 15:09:22 -07:00 committed by GitHub
commit 41cf807804
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
776 changed files with 23415 additions and 8065 deletions

View File

@ -25,7 +25,7 @@ jobs:
python-version: ${{inputs.python_version}} python-version: ${{inputs.python_version}}
- name: Install Python packages - name: Install Python packages
run: | run: |
pip install --upgrade pip six setuptools pytest codecov 'coverage[toml]<=6.2' pip install --upgrade pip six setuptools pytest codecov coverage[toml]
- name: Package audits (with coverage) - name: Package audits (with coverage)
if: ${{ inputs.with_coverage == 'true' }} if: ${{ inputs.with_coverage == 'true' }}
run: | run: |

View File

@ -1,7 +1,7 @@
#!/bin/bash #!/bin/bash
set -ex set -ex
source share/spack/setup-env.sh source share/spack/setup-env.sh
$PYTHON bin/spack bootstrap untrust spack-install $PYTHON bin/spack bootstrap disable spack-install
$PYTHON bin/spack -d solve zlib $PYTHON bin/spack -d solve zlib
tree $BOOTSTRAP/store tree $BOOTSTRAP/store
exit 0 exit 0

View File

@ -42,7 +42,8 @@ jobs:
shell: runuser -u spack-test -- bash {0} shell: runuser -u spack-test -- bash {0}
run: | run: |
source share/spack/setup-env.sh source share/spack/setup-env.sh
spack bootstrap untrust github-actions-v0.2 spack bootstrap disable github-actions-v0.4
spack bootstrap disable github-actions-v0.3
spack external find cmake bison spack external find cmake bison
spack -d solve zlib spack -d solve zlib
tree ~/.spack/bootstrap/store/ tree ~/.spack/bootstrap/store/
@ -79,7 +80,8 @@ jobs:
shell: runuser -u spack-test -- bash {0} shell: runuser -u spack-test -- bash {0}
run: | run: |
source share/spack/setup-env.sh source share/spack/setup-env.sh
spack bootstrap untrust github-actions-v0.2 spack bootstrap disable github-actions-v0.4
spack bootstrap disable github-actions-v0.3
spack external find cmake bison spack external find cmake bison
spack -d solve zlib spack -d solve zlib
tree ~/.spack/bootstrap/store/ tree ~/.spack/bootstrap/store/
@ -143,7 +145,8 @@ jobs:
- name: Bootstrap clingo - name: Bootstrap clingo
run: | run: |
source share/spack/setup-env.sh source share/spack/setup-env.sh
spack bootstrap untrust github-actions-v0.2 spack bootstrap disable github-actions-v0.4
spack bootstrap disable github-actions-v0.3
spack external find cmake bison spack external find cmake bison
spack -d solve zlib spack -d solve zlib
tree ~/.spack/bootstrap/store/ tree ~/.spack/bootstrap/store/
@ -160,7 +163,8 @@ jobs:
run: | run: |
source share/spack/setup-env.sh source share/spack/setup-env.sh
export PATH=/usr/local/opt/bison@2.7/bin:$PATH export PATH=/usr/local/opt/bison@2.7/bin:$PATH
spack bootstrap untrust github-actions-v0.2 spack bootstrap disable github-actions-v0.4
spack bootstrap disable github-actions-v0.3
spack external find --not-buildable cmake bison spack external find --not-buildable cmake bison
spack -d solve zlib spack -d solve zlib
tree ~/.spack/bootstrap/store/ tree ~/.spack/bootstrap/store/
@ -261,7 +265,7 @@ jobs:
shell: runuser -u spack-test -- bash {0} shell: runuser -u spack-test -- bash {0}
run: | run: |
source share/spack/setup-env.sh source share/spack/setup-env.sh
spack bootstrap untrust spack-install spack bootstrap disable spack-install
spack -d gpg list spack -d gpg list
tree ~/.spack/bootstrap/store/ tree ~/.spack/bootstrap/store/
@ -298,7 +302,8 @@ jobs:
run: | run: |
source share/spack/setup-env.sh source share/spack/setup-env.sh
spack solve zlib spack solve zlib
spack bootstrap untrust github-actions-v0.2 spack bootstrap disable github-actions-v0.4
spack bootstrap disable github-actions-v0.3
spack -d gpg list spack -d gpg list
tree ~/.spack/bootstrap/store/ tree ~/.spack/bootstrap/store/
@ -315,7 +320,7 @@ jobs:
- name: Bootstrap GnuPG - name: Bootstrap GnuPG
run: | run: |
source share/spack/setup-env.sh source share/spack/setup-env.sh
spack bootstrap untrust spack-install spack bootstrap disable spack-install
spack -d gpg list spack -d gpg list
tree ~/.spack/bootstrap/store/ tree ~/.spack/bootstrap/store/
@ -333,7 +338,8 @@ jobs:
run: | run: |
source share/spack/setup-env.sh source share/spack/setup-env.sh
spack solve zlib spack solve zlib
spack bootstrap untrust github-actions-v0.2 spack bootstrap disable github-actions-v0.4
spack bootstrap disable github-actions-v0.3
spack -d gpg list spack -d gpg list
tree ~/.spack/bootstrap/store/ tree ~/.spack/bootstrap/store/

View File

@ -13,7 +13,7 @@ on:
paths: paths:
- '.github/workflows/build-containers.yml' - '.github/workflows/build-containers.yml'
- 'share/spack/docker/*' - 'share/spack/docker/*'
- 'share/templates/container/*' - 'share/spack/templates/container/*'
- 'lib/spack/spack/container/*' - 'lib/spack/spack/container/*'
# Let's also build & tag Spack containers on releases. # Let's also build & tag Spack containers on releases.
release: release:
@ -80,19 +80,19 @@ jobs:
fi fi
- name: Upload Dockerfile - name: Upload Dockerfile
uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
with: with:
name: dockerfiles name: dockerfiles
path: dockerfiles path: dockerfiles
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@8b122486cedac8393e77aa9734c3528886e4a1a8 # @v1 uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # @v1
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@dc7b9719a96d48369863986a06765841d7ea23f6 # @v1 uses: docker/setup-buildx-action@8c0edbc76e98fa90f69d9a2c020dcb50019dc325 # @v1
- name: Log in to GitHub Container Registry - name: Log in to GitHub Container Registry
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # @v1 uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.actor }} username: ${{ github.actor }}
@ -100,13 +100,13 @@ jobs:
- name: Log in to DockerHub - name: Log in to DockerHub
if: github.event_name != 'pull_request' if: github.event_name != 'pull_request'
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # @v1 uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
with: with:
username: ${{ secrets.DOCKERHUB_USERNAME }} username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }} password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build & Deploy ${{ matrix.dockerfile[0] }} - name: Build & Deploy ${{ matrix.dockerfile[0] }}
uses: docker/build-push-action@c84f38281176d4c9cdb1626ffafcd6b3911b5d94 # @v2 uses: docker/build-push-action@c56af957549030174b10d6867f20e78cfd7debc5 # @v2
with: with:
context: dockerfiles/${{ matrix.dockerfile[0] }} context: dockerfiles/${{ matrix.dockerfile[0] }}
platforms: ${{ matrix.dockerfile[1] }} platforms: ${{ matrix.dockerfile[1] }}

View File

@ -46,7 +46,7 @@ jobs:
with: with:
fetch-depth: 0 fetch-depth: 0
# For pull requests it's not necessary to checkout the code # For pull requests it's not necessary to checkout the code
- uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721 - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50
id: filter id: filter
with: with:
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below # See https://github.com/dorny/paths-filter/issues/56 for the syntax used below

View File

@ -6,6 +6,10 @@ git config --global user.email "spack@example.com"
git config --global user.name "Test User" git config --global user.name "Test User"
git config --global core.longpaths true git config --global core.longpaths true
# See https://github.com/git/git/security/advisories/GHSA-3wp6-j8xr-qw85 (CVE-2022-39253)
# This is needed to let some fixture in our unit-test suite run
git config --global protocol.file.allow always
if ($(git branch --show-current) -ne "develop") if ($(git branch --show-current) -ne "develop")
{ {
git branch develop origin/develop git branch develop origin/develop

View File

@ -2,6 +2,10 @@
git config --global user.email "spack@example.com" git config --global user.email "spack@example.com"
git config --global user.name "Test User" git config --global user.name "Test User"
# See https://github.com/git/git/security/advisories/GHSA-3wp6-j8xr-qw85 (CVE-2022-39253)
# This is needed to let some fixture in our unit-test suite run
git config --global protocol.file.allow always
# create a local pr base branch # create a local pr base branch
if [[ -n $GITHUB_BASE_REF ]]; then if [[ -n $GITHUB_BASE_REF ]]; then
git fetch origin "${GITHUB_BASE_REF}:${GITHUB_BASE_REF}" git fetch origin "${GITHUB_BASE_REF}:${GITHUB_BASE_REF}"

View File

@ -14,7 +14,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
matrix: matrix:
python-version: ['2.7', '3.6', '3.7', '3.8', '3.9', '3.10'] python-version: ['2.7', '3.6', '3.7', '3.8', '3.9', '3.10', '3.11']
concretizer: ['clingo'] concretizer: ['clingo']
on_develop: on_develop:
- ${{ github.ref == 'refs/heads/develop' }} - ${{ github.ref == 'refs/heads/develop' }}
@ -22,7 +22,7 @@ jobs:
- python-version: 2.7 - python-version: 2.7
concretizer: original concretizer: original
on_develop: ${{ github.ref == 'refs/heads/develop' }} on_develop: ${{ github.ref == 'refs/heads/develop' }}
- python-version: '3.10' - python-version: '3.11'
concretizer: original concretizer: original
on_develop: ${{ github.ref == 'refs/heads/develop' }} on_develop: ${{ github.ref == 'refs/heads/develop' }}
exclude: exclude:
@ -35,6 +35,9 @@ jobs:
- python-version: '3.9' - python-version: '3.9'
concretizer: 'clingo' concretizer: 'clingo'
on_develop: false on_develop: false
- python-version: '3.10'
concretizer: 'clingo'
on_develop: false
steps: steps:
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
@ -52,7 +55,12 @@ jobs:
patchelf cmake bison libbison-dev kcov patchelf cmake bison libbison-dev kcov
- name: Install Python packages - name: Install Python packages
run: | run: |
pip install --upgrade pip six setuptools pytest codecov[toml] pytest-cov pytest-xdist pip install --upgrade pip six setuptools pytest codecov[toml] pytest-xdist
# Install pytest-cov only on recent Python, to avoid stalling on Python 2.7 due
# to bugs on an unmaintained version of the package when used with xdist.
if [[ ${{ matrix.python-version }} != "2.7" ]]; then
pip install --upgrade pytest-cov
fi
# ensure style checks are not skipped in unit tests for python >= 3.6 # ensure style checks are not skipped in unit tests for python >= 3.6
# note that true/false (i.e., 1/0) are opposite in conditions in python and bash # note that true/false (i.e., 1/0) are opposite in conditions in python and bash
if python -c 'import sys; sys.exit(not sys.version_info >= (3, 6))'; then if python -c 'import sys; sys.exit(not sys.version_info >= (3, 6))'; then
@ -61,7 +69,7 @@ jobs:
- name: Pin pathlib for Python 2.7 - name: Pin pathlib for Python 2.7
if: ${{ matrix.python-version == 2.7 }} if: ${{ matrix.python-version == 2.7 }}
run: | run: |
pip install -U pathlib2==2.3.6 pip install -U pathlib2==2.3.6 toml
- name: Setup git configuration - name: Setup git configuration
run: | run: |
# Need this for the git tests to succeed. # Need this for the git tests to succeed.
@ -73,7 +81,7 @@ jobs:
SPACK_PYTHON: python SPACK_PYTHON: python
run: | run: |
. share/spack/setup-env.sh . share/spack/setup-env.sh
spack bootstrap untrust spack-install spack bootstrap disable spack-install
spack -v solve zlib spack -v solve zlib
- name: Run unit tests - name: Run unit tests
env: env:
@ -81,11 +89,9 @@ jobs:
SPACK_TEST_SOLVER: ${{ matrix.concretizer }} SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
SPACK_TEST_PARALLEL: 2 SPACK_TEST_PARALLEL: 2
COVERAGE: true COVERAGE: true
UNIT_TEST_COVERAGE: ${{ (matrix.concretizer == 'original' && matrix.python-version == '2.7') || (matrix.python-version == '3.10') }} UNIT_TEST_COVERAGE: ${{ (matrix.python-version == '3.11') }}
run: | run: |
share/spack/qa/run-unit-tests share/spack/qa/run-unit-tests
coverage combine -a
coverage xml
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 - uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
with: with:
flags: unittests,linux,${{ matrix.concretizer }} flags: unittests,linux,${{ matrix.concretizer }}
@ -98,7 +104,7 @@ jobs:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2 - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
with: with:
python-version: '3.10' python-version: '3.11'
- name: Install System packages - name: Install System packages
run: | run: |
sudo apt-get -y update sudo apt-get -y update
@ -106,7 +112,7 @@ jobs:
sudo apt-get install -y coreutils kcov csh zsh tcsh fish dash bash sudo apt-get install -y coreutils kcov csh zsh tcsh fish dash bash
- name: Install Python packages - name: Install Python packages
run: | run: |
pip install --upgrade pip six setuptools pytest codecov coverage[toml]==6.2 pytest-xdist pip install --upgrade pip six setuptools pytest codecov coverage[toml] pytest-xdist
- name: Setup git configuration - name: Setup git configuration
run: | run: |
# Need this for the git tests to succeed. # Need this for the git tests to succeed.
@ -155,7 +161,7 @@ jobs:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2 - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
with: with:
python-version: '3.10' python-version: '3.11'
- name: Install System packages - name: Install System packages
run: | run: |
sudo apt-get -y update sudo apt-get -y update
@ -177,8 +183,6 @@ jobs:
SPACK_TEST_SOLVER: clingo SPACK_TEST_SOLVER: clingo
run: | run: |
share/spack/qa/run-unit-tests share/spack/qa/run-unit-tests
coverage combine -a
coverage xml
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # @v2.1.0 - uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # @v2.1.0
with: with:
flags: unittests,linux,clingo flags: unittests,linux,clingo
@ -187,7 +191,7 @@ jobs:
runs-on: macos-latest runs-on: macos-latest
strategy: strategy:
matrix: matrix:
python-version: [3.8] python-version: ["3.10"]
steps: steps:
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
with: with:
@ -210,15 +214,10 @@ jobs:
git --version git --version
. .github/workflows/setup_git.sh . .github/workflows/setup_git.sh
. share/spack/setup-env.sh . share/spack/setup-env.sh
$(which spack) bootstrap untrust spack-install $(which spack) bootstrap disable spack-install
$(which spack) solve zlib $(which spack) solve zlib
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x) common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
$(which spack) unit-test --cov --cov-config=pyproject.toml "${common_args[@]}" $(which spack) unit-test --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
coverage combine -a
coverage xml
# Delete the symlink going from ./lib/spack/docs/_spack_root back to
# the initial directory, since it causes ELOOP errors with codecov/actions@2
rm lib/spack/docs/_spack_root
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 - uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
with: with:
flags: unittests,macos flags: unittests,macos

View File

@ -21,7 +21,7 @@ jobs:
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2 - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
with: with:
python-version: '3.10' python-version: '3.11'
cache: 'pip' cache: 'pip'
- name: Install Python Packages - name: Install Python Packages
run: | run: |
@ -40,7 +40,7 @@ jobs:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2 - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
with: with:
python-version: '3.10' python-version: '3.11'
cache: 'pip' cache: 'pip'
- name: Install Python packages - name: Install Python packages
run: | run: |
@ -57,4 +57,4 @@ jobs:
uses: ./.github/workflows/audit.yaml uses: ./.github/workflows/audit.yaml
with: with:
with_coverage: ${{ inputs.with_coverage }} with_coverage: ${{ inputs.with_coverage }}
python_version: '3.10' python_version: '3.11'

View File

@ -23,7 +23,7 @@ jobs:
python-version: 3.9 python-version: 3.9
- name: Install Python packages - name: Install Python packages
run: | run: |
python -m pip install --upgrade pip six pywin32 setuptools codecov pytest-cov python -m pip install --upgrade pip six pywin32 setuptools codecov pytest-cov clingo
- name: Create local develop - name: Create local develop
run: | run: |
.\spack\.github\workflows\setup_git.ps1 .\spack\.github\workflows\setup_git.ps1
@ -32,8 +32,7 @@ jobs:
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
cd spack cd spack
dir dir
(Get-Item '.\lib\spack\docs\_spack_root').Delete() spack unit-test -x --verbose --cov --cov-config=pyproject.toml --ignore=lib/spack/spack/test/cmd
spack unit-test --verbose --cov --cov-config=pyproject.toml --ignore=lib/spack/spack/test/cmd
coverage combine -a coverage combine -a
coverage xml coverage xml
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 - uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
@ -50,7 +49,7 @@ jobs:
python-version: 3.9 python-version: 3.9
- name: Install Python packages - name: Install Python packages
run: | run: |
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage pytest-cov python -m pip install --upgrade pip six pywin32 setuptools codecov coverage pytest-cov clingo
- name: Create local develop - name: Create local develop
run: | run: |
.\spack\.github\workflows\setup_git.ps1 .\spack\.github\workflows\setup_git.ps1
@ -58,8 +57,7 @@ jobs:
run: | run: |
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
cd spack cd spack
(Get-Item '.\lib\spack\docs\_spack_root').Delete() spack unit-test -x --verbose --cov --cov-config=pyproject.toml lib/spack/spack/test/cmd
spack unit-test --verbose --cov --cov-config=pyproject.toml lib/spack/spack/test/cmd
coverage combine -a coverage combine -a
coverage xml coverage xml
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 - uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
@ -83,7 +81,7 @@ jobs:
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
spack external find cmake spack external find cmake
spack external find ninja spack external find ninja
spack install abseil-cpp spack -d install abseil-cpp
make-installer: make-installer:
runs-on: windows-latest runs-on: windows-latest
steps: steps:
@ -111,11 +109,11 @@ jobs:
echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
env: env:
ProgressPreference: SilentlyContinue ProgressPreference: SilentlyContinue
- uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
with: with:
name: Windows Spack Installer Bundle name: Windows Spack Installer Bundle
path: ${{ env.installer_root }}\pkg\Spack.exe path: ${{ env.installer_root }}\pkg\Spack.exe
- uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
with: with:
name: Windows Spack Installer name: Windows Spack Installer
path: ${{ env.installer_root}}\pkg\Spack.msi path: ${{ env.installer_root}}\pkg\Spack.msi

View File

@ -62,6 +62,7 @@ Resources:
* **Slack workspace**: [spackpm.slack.com](https://spackpm.slack.com). * **Slack workspace**: [spackpm.slack.com](https://spackpm.slack.com).
To get an invitation, visit [slack.spack.io](https://slack.spack.io). To get an invitation, visit [slack.spack.io](https://slack.spack.io).
* [**Github Discussions**](https://github.com/spack/spack/discussions): not just for discussions, also Q&A.
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack) * **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack)
* **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to * **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to
`@mention` us! `@mention` us!

View File

@ -49,52 +49,8 @@ spack_prefix = os.path.dirname(os.path.dirname(spack_file))
spack_lib_path = os.path.join(spack_prefix, "lib", "spack") spack_lib_path = os.path.join(spack_prefix, "lib", "spack")
sys.path.insert(0, spack_lib_path) sys.path.insert(0, spack_lib_path)
# Add external libs from spack_installable.main import main # noqa: E402
spack_external_libs = os.path.join(spack_lib_path, "external")
if sys.version_info[:2] <= (2, 7):
sys.path.insert(0, os.path.join(spack_external_libs, "py2"))
sys.path.insert(0, spack_external_libs)
# Here we delete ruamel.yaml in case it has been already imported from site
# (see #9206 for a broader description of the issue).
#
# Briefly: ruamel.yaml produces a .pth file when installed with pip that
# makes the site installed package the preferred one, even though sys.path
# is modified to point to another version of ruamel.yaml.
if "ruamel.yaml" in sys.modules:
del sys.modules["ruamel.yaml"]
if "ruamel" in sys.modules:
del sys.modules["ruamel"]
# The following code is here to avoid failures when updating
# the develop version, due to spurious argparse.pyc files remaining
# in the libs/spack/external directory, see:
# https://github.com/spack/spack/pull/25376
# TODO: Remove in v0.18.0 or later
try:
import argparse
except ImportError:
argparse_pyc = os.path.join(spack_external_libs, "argparse.pyc")
if not os.path.exists(argparse_pyc):
raise
try:
os.remove(argparse_pyc)
import argparse # noqa: F401
except Exception:
msg = (
"The file\n\n\t{0}\n\nis corrupted and cannot be deleted by Spack. "
"Either delete it manually or ask some administrator to "
"delete it for you."
)
print(msg.format(argparse_pyc))
sys.exit(1)
import spack.main # noqa: E402
# Once we've set up the system path, run the spack main method # Once we've set up the system path, run the spack main method
if __name__ == "__main__": if __name__ == "__main__":
sys.exit(spack.main.main()) sys.exit(main())

View File

@ -9,16 +9,15 @@ bootstrap:
# may not be able to bootstrap all the software that Spack needs, # may not be able to bootstrap all the software that Spack needs,
# depending on its type. # depending on its type.
sources: sources:
- name: 'github-actions-v0.4'
metadata: $spack/share/spack/bootstrap/github-actions-v0.4
- name: 'github-actions-v0.3' - name: 'github-actions-v0.3'
metadata: $spack/share/spack/bootstrap/github-actions-v0.3 metadata: $spack/share/spack/bootstrap/github-actions-v0.3
- name: 'github-actions-v0.2'
metadata: $spack/share/spack/bootstrap/github-actions-v0.2
- name: 'github-actions-v0.1'
metadata: $spack/share/spack/bootstrap/github-actions-v0.1
- name: 'spack-install' - name: 'spack-install'
metadata: $spack/share/spack/bootstrap/spack-install metadata: $spack/share/spack/bootstrap/spack-install
trusted: trusted:
# By default we trust bootstrapping from sources and from binaries # By default we trust bootstrapping from sources and from binaries
# produced on Github via the workflow # produced on Github via the workflow
github-actions-v0.4: true
github-actions-v0.3: true github-actions-v0.3: true
spack-install: true spack-install: true

View File

@ -33,4 +33,4 @@ concretizer:
# environments can always be activated. When "false" perform concretization separately # environments can always be activated. When "false" perform concretization separately
# on each root spec, allowing different versions and variants of the same package in # on each root spec, allowing different versions and variants of the same package in
# an environment. # an environment.
unify: false unify: true

View File

@ -191,10 +191,20 @@ config:
package_lock_timeout: null package_lock_timeout: null
# Control whether Spack embeds RPATH or RUNPATH attributes in ELF binaries. # Control how shared libraries are located at runtime on Linux. See the
# Has no effect on macOS. DO NOT MIX these within the same install tree. # the Spack documentation for details.
# See the Spack documentation for details. shared_linking:
shared_linking: 'rpath' # Spack automatically embeds runtime search paths in ELF binaries for their
# dependencies. Their type can either be "rpath" or "runpath". For glibc, rpath is
# inherited and has precedence over LD_LIBRARY_PATH; runpath is not inherited
# and of lower precedence. DO NOT MIX these within the same install tree.
type: rpath
# (Experimental) Embed absolute paths of dependent libraries directly in ELF
# binaries to avoid runtime search. This can improve startup time of
# executables with many dependencies, in particular on slow filesystems.
bind: false
# Set to 'false' to allow installation on filesystems that doesn't allow setgid bit # Set to 'false' to allow installation on filesystems that doesn't allow setgid bit
@ -205,3 +215,7 @@ config:
# building and installing packages. This gives information about Spack's # building and installing packages. This gives information about Spack's
# current progress as well as the current and total number of packages. # current progress as well as the current and total number of packages.
terminal_title: false terminal_title: false
# Number of seconds a buildcache's index.json is cached locally before probing
# for updates, within a single Spack invocation. Defaults to 10 minutes.
binary_index_ttl: 600

View File

@ -27,7 +27,8 @@ packages:
fuse: [libfuse] fuse: [libfuse]
gl: [glx, osmesa] gl: [glx, osmesa]
glu: [mesa-glu, openglu] glu: [mesa-glu, openglu]
golang: [gcc] golang: [go, gcc]
go-external-or-gccgo-bootstrap: [go-bootstrap, gcc]
iconv: [libiconv] iconv: [libiconv]
ipp: [intel-ipp] ipp: [intel-ipp]
java: [openjdk, jdk, ibm-java] java: [openjdk, jdk, ibm-java]

View File

@ -1,5 +1,5 @@
config: config:
locks: false locks: false
concretizer: original concretizer: clingo
build_stage:: build_stage::
- '$spack/.staging' - '$spack/.staging'

View File

@ -1 +0,0 @@
../../..

View File

@ -85,7 +85,7 @@ All packages whose names or descriptions contain documentation:
To get more information on a particular package from `spack list`, use To get more information on a particular package from `spack list`, use
`spack info`. Just supply the name of a package: `spack info`. Just supply the name of a package:
.. command-output:: spack info mpich .. command-output:: spack info --all mpich
Most of the information is self-explanatory. The *safe versions* are Most of the information is self-explanatory. The *safe versions* are
versions that Spack knows the checksum for, and it will use the versions that Spack knows the checksum for, and it will use the
@ -998,11 +998,15 @@ More formally, a spec consists of the following pieces:
* ``%`` Optional compiler specifier, with an optional compiler version * ``%`` Optional compiler specifier, with an optional compiler version
(``gcc`` or ``gcc@4.7.3``) (``gcc`` or ``gcc@4.7.3``)
* ``+`` or ``-`` or ``~`` Optional variant specifiers (``+debug``, * ``+`` or ``-`` or ``~`` Optional variant specifiers (``+debug``,
``-qt``, or ``~qt``) for boolean variants ``-qt``, or ``~qt``) for boolean variants. Use ``++`` or ``--`` or
``~~`` to propagate variants through the dependencies (``++debug``,
``--qt``, or ``~~qt``).
* ``name=<value>`` Optional variant specifiers that are not restricted to * ``name=<value>`` Optional variant specifiers that are not restricted to
boolean variants boolean variants. Use ``name==<value>`` to propagate variant through the
dependencies.
* ``name=<value>`` Optional compiler flag specifiers. Valid flag names are * ``name=<value>`` Optional compiler flag specifiers. Valid flag names are
``cflags``, ``cxxflags``, ``fflags``, ``cppflags``, ``ldflags``, and ``ldlibs``. ``cflags``, ``cxxflags``, ``fflags``, ``cppflags``, ``ldflags``, and ``ldlibs``.
Use ``name==<value>`` to propagate compiler flags through the dependencies.
* ``target=<value> os=<value>`` Optional architecture specifier * ``target=<value> os=<value>`` Optional architecture specifier
(``target=haswell os=CNL10``) (``target=haswell os=CNL10``)
* ``^`` Dependency specs (``^callpath@1.1``) * ``^`` Dependency specs (``^callpath@1.1``)
@ -1226,6 +1230,23 @@ variants using the backwards compatibility syntax and uses only ``~``
for disabled boolean variants. The ``-`` and spaces on the command for disabled boolean variants. The ``-`` and spaces on the command
line are provided for convenience and legibility. line are provided for convenience and legibility.
Spack allows variants to propagate their value to the package's
dependency by using ``++``, ``--``, and ``~~`` for boolean variants.
For example, for a ``debug`` variant:
.. code-block:: sh
mpileaks ++debug # enabled debug will be propagated to dependencies
mpileaks +debug # only mpileaks will have debug enabled
To propagate the value of non-boolean variants Spack uses ``name==value``.
For example, for the ``stackstart`` variant:
.. code-block:: sh
mpileaks stackstart=4 # variant will be propagated to dependencies
mpileaks stackstart==4 # only mpileaks will have this variant value
^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^
Compiler Flags Compiler Flags
^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^
@ -1233,10 +1254,15 @@ Compiler Flags
Compiler flags are specified using the same syntax as non-boolean variants, Compiler flags are specified using the same syntax as non-boolean variants,
but fulfill a different purpose. While the function of a variant is set by but fulfill a different purpose. While the function of a variant is set by
the package, compiler flags are used by the compiler wrappers to inject the package, compiler flags are used by the compiler wrappers to inject
flags into the compile line of the build. Additionally, compiler flags are flags into the compile line of the build. Additionally, compiler flags can
inherited by dependencies. ``spack install libdwarf cppflags="-g"`` will be inherited by dependencies by using ``==``.
install both libdwarf and libelf with the ``-g`` flag injected into their ``spack install libdwarf cppflags=="-g"`` will install both libdwarf and
compile line. libelf with the ``-g`` flag injected into their compile line.
.. note::
versions of spack prior to 0.19.0 will propagate compiler flags using
the ``=`` syntax.
Notice that the value of the compiler flags must be quoted if it Notice that the value of the compiler flags must be quoted if it
contains any spaces. Any of ``cppflags=-O3``, ``cppflags="-O3"``, contains any spaces. Any of ``cppflags=-O3``, ``cppflags="-O3"``,
@ -1438,7 +1464,7 @@ built.
You can see what virtual packages a particular package provides by You can see what virtual packages a particular package provides by
getting info on it: getting info on it:
.. command-output:: spack info mpich .. command-output:: spack info --virtuals mpich
Spack is unique in that its virtual packages can be versioned, just Spack is unique in that its virtual packages can be versioned, just
like regular packages. A particular version of a package may provide like regular packages. A particular version of a package may provide

View File

@ -15,15 +15,13 @@ is an entire command dedicated to the management of every aspect of bootstrappin
.. command-output:: spack bootstrap --help .. command-output:: spack bootstrap --help
The first thing to know to understand bootstrapping in Spack is that each of Spack is configured to bootstrap its dependencies lazily by default; i.e. the first time they are needed and
Spack's dependencies is bootstrapped lazily; i.e. the first time it is needed and can't be found. You can readily check if any prerequisite for using Spack is missing by running:
can't be found. You can readily check if any prerequisite for using Spack
is missing by running:
.. code-block:: console .. code-block:: console
% spack bootstrap status % spack bootstrap status
Spack v0.17.1 - python@3.8 Spack v0.19.0 - python@3.8
[FAIL] Core Functionalities [FAIL] Core Functionalities
[B] MISSING "clingo": required to concretize specs [B] MISSING "clingo": required to concretize specs
@ -48,6 +46,21 @@ they can be bootstrapped. Running a command that concretize a spec, like:
triggers the bootstrapping of clingo from pre-built binaries as expected. triggers the bootstrapping of clingo from pre-built binaries as expected.
Users can also bootstrap all the dependencies needed by Spack in a single command, which
might be useful to setup containers or other similar environments:
.. code-block:: console
$ spack bootstrap now
==> Bootstrapping clingo from pre-built binaries
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.3/build_cache/linux-centos7-x86_64-gcc-10.2.1-clingo-bootstrap-spack-shqedxgvjnhiwdcdrvjhbd73jaevv7wt.spec.json
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.3/build_cache/linux-centos7-x86_64/gcc-10.2.1/clingo-bootstrap-spack/linux-centos7-x86_64-gcc-10.2.1-clingo-bootstrap-spack-shqedxgvjnhiwdcdrvjhbd73jaevv7wt.spack
==> Installing "clingo-bootstrap@spack%gcc@10.2.1~docs~ipo+python+static_libstdcpp build_type=Release arch=linux-centos7-x86_64" from a buildcache
==> Bootstrapping patchelf from pre-built binaries
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.3/build_cache/linux-centos7-x86_64-gcc-10.2.1-patchelf-0.15.0-htk62k7efo2z22kh6kmhaselru7bfkuc.spec.json
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.3/build_cache/linux-centos7-x86_64/gcc-10.2.1/patchelf-0.15.0/linux-centos7-x86_64-gcc-10.2.1-patchelf-0.15.0-htk62k7efo2z22kh6kmhaselru7bfkuc.spack
==> Installing "patchelf@0.15.0%gcc@10.2.1 ldflags="-static-libstdc++ -static-libgcc" arch=linux-centos7-x86_64" from a buildcache
----------------------- -----------------------
The Bootstrapping store The Bootstrapping store
----------------------- -----------------------
@ -107,19 +120,19 @@ If need be, you can disable bootstrapping altogether by running:
in which case it's your responsibility to ensure Spack runs in an in which case it's your responsibility to ensure Spack runs in an
environment where all its prerequisites are installed. You can environment where all its prerequisites are installed. You can
also configure Spack to skip certain bootstrapping methods by *untrusting* also configure Spack to skip certain bootstrapping methods by disabling
them. For instance: them specifically:
.. code-block:: console .. code-block:: console
% spack bootstrap untrust github-actions % spack bootstrap disable github-actions
==> "github-actions" is now untrusted and will not be used for bootstrapping ==> "github-actions" is now disabled and will not be used for bootstrapping
tells Spack to skip trying to bootstrap from binaries. To add the "github-actions" method back you can: tells Spack to skip trying to bootstrap from binaries. To add the "github-actions" method back you can:
.. code-block:: console .. code-block:: console
% spack bootstrap trust github-actions % spack bootstrap enable github-actions
There is also an option to reset the bootstrapping configuration to Spack's defaults: There is also an option to reset the bootstrapping configuration to Spack's defaults:

View File

@ -302,88 +302,31 @@ microarchitectures considered during the solve are constrained to be compatible
host Spack is currently running on. For instance, if this option is set to ``true``, a host Spack is currently running on. For instance, if this option is set to ``true``, a
user cannot concretize for ``target=icelake`` while running on an Haswell node. user cannot concretize for ``target=icelake`` while running on an Haswell node.
.. _package-preferences:
-------------------
Package Preferences
-------------------
Spack can be configured to prefer certain compilers, package
versions, dependencies, and variants during concretization.
The preferred configuration can be controlled via the
``~/.spack/packages.yaml`` file for user configurations, or the
``etc/spack/packages.yaml`` site configuration.
Here's an example ``packages.yaml`` file that sets preferred packages:
.. code-block:: yaml
packages:
opencv:
compiler: [gcc@4.9]
variants: +debug
gperftools:
version: [2.2, 2.4, 2.3]
all:
compiler: [gcc@4.4.7, 'gcc@4.6:', intel, clang, pgi]
target: [sandybridge]
providers:
mpi: [mvapich2, mpich, openmpi]
At a high level, this example is specifying how packages should be
concretized. The opencv package should prefer using GCC 4.9 and
be built with debug options. The gperftools package should prefer version
2.2 over 2.4. Every package on the system should prefer mvapich2 for
its MPI and GCC 4.4.7 (except for opencv, which overrides this by preferring GCC 4.9).
These options are used to fill in implicit defaults. Any of them can be overwritten
on the command line if explicitly requested.
Each ``packages.yaml`` file begins with the string ``packages:`` and
package names are specified on the next level. The special string ``all``
applies settings to *all* packages. Underneath each package name is one
or more components: ``compiler``, ``variants``, ``version``,
``providers``, and ``target``. Each component has an ordered list of
spec ``constraints``, with earlier entries in the list being preferred
over later entries.
Sometimes a package installation may have constraints that forbid
the first concretization rule, in which case Spack will use the first
legal concretization rule. Going back to the example, if a user
requests gperftools 2.3 or later, then Spack will install version 2.4
as the 2.4 version of gperftools is preferred over 2.3.
An explicit concretization rule in the preferred section will always
take preference over unlisted concretizations. In the above example,
xlc isn't listed in the compiler list. Every listed compiler from
gcc to pgi will thus be preferred over the xlc compiler.
The syntax for the ``provider`` section differs slightly from other
concretization rules. A provider lists a value that packages may
``depend_on`` (e.g, MPI) and a list of rules for fulfilling that
dependency.
.. _package-requirements: .. _package-requirements:
-------------------- --------------------
Package Requirements Package Requirements
-------------------- --------------------
You can use the configuration to force the concretizer to choose Spack can be configured to always use certain compilers, package
specific properties for packages when building them. Like preferences, versions, and variants during concretization through package
these are only applied when the package is required by some other requirements.
request (e.g. if the package is needed as a dependency of a
request to ``spack install``).
An example of where this is useful is if you have a package that Package requirements are useful when you find yourself repeatedly
is normally built as a dependency but only under certain circumstances specifying the same constraints on the command line, and wish that
(e.g. only when a variant on a dependent is active): you can make Spack respects these constraints whether you mention them explicitly
sure that it always builds the way you want it to; this distinguishes or not. Another use case is specifying constraints that should apply
package configuration requirements from constraints that you add to to all root specs in an environment, without having to repeat the
``spack install`` or to environments (in those cases, the associated constraint everywhere.
packages are always built).
The following is an example of how to enforce package properties in Apart from that, requirements config is more flexible than constraints
``packages.yaml``: on the command line, because it can specify constraints on packages
*when they occur* as a dependency. In contrast, on the command line it
is not possible to specify constraints on dependencies while also keeping
those dependencies optional.
The package requirements configuration is specified in ``packages.yaml``
keyed by package name:
.. code-block:: yaml .. code-block:: yaml
@ -452,15 +395,15 @@ under ``all`` are disregarded. For example, with a configuration like this:
cmake: cmake:
require: '%gcc' require: '%gcc'
Spack requires ``cmake`` to use ``gcc`` and all other nodes (including cmake dependencies) Spack requires ``cmake`` to use ``gcc`` and all other nodes (including ``cmake``
to use ``clang``. dependencies) to use ``clang``.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Setting requirements on virtual specs Setting requirements on virtual specs
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
A requirement on a virtual spec applies whenever that virtual is present in the DAG. This A requirement on a virtual spec applies whenever that virtual is present in the DAG.
can be useful for fixing which virtual provider you want to use: This can be useful for fixing which virtual provider you want to use:
.. code-block:: yaml .. code-block:: yaml
@ -470,8 +413,8 @@ can be useful for fixing which virtual provider you want to use:
With the configuration above the only allowed ``mpi`` provider is ``mvapich2 %gcc``. With the configuration above the only allowed ``mpi`` provider is ``mvapich2 %gcc``.
Requirements on the virtual spec and on the specific provider are both applied, if present. For Requirements on the virtual spec and on the specific provider are both applied, if
instance with a configuration like: present. For instance with a configuration like:
.. code-block:: yaml .. code-block:: yaml
@ -483,6 +426,66 @@ instance with a configuration like:
you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider. you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
.. _package-preferences:
-------------------
Package Preferences
-------------------
In some cases package requirements can be too strong, and package
preferences are the better option. Package preferences do not impose
constraints on packages for particular versions or variants values,
they rather only set defaults -- the concretizer is free to change
them if it must due to other constraints. Also note that package
preferences are of lower priority than reuse of already installed
packages.
Here's an example ``packages.yaml`` file that sets preferred packages:
.. code-block:: yaml
packages:
opencv:
compiler: [gcc@4.9]
variants: +debug
gperftools:
version: [2.2, 2.4, 2.3]
all:
compiler: [gcc@4.4.7, 'gcc@4.6:', intel, clang, pgi]
target: [sandybridge]
providers:
mpi: [mvapich2, mpich, openmpi]
At a high level, this example is specifying how packages are preferably
concretized. The opencv package should prefer using GCC 4.9 and
be built with debug options. The gperftools package should prefer version
2.2 over 2.4. Every package on the system should prefer mvapich2 for
its MPI and GCC 4.4.7 (except for opencv, which overrides this by preferring GCC 4.9).
These options are used to fill in implicit defaults. Any of them can be overwritten
on the command line if explicitly requested.
Package preferences accept the follow keys or components under
the specific package (or ``all``) section: ``compiler``, ``variants``,
``version``, ``providers``, and ``target``. Each component has an
ordered list of spec ``constraints``, with earlier entries in the
list being preferred over later entries.
Sometimes a package installation may have constraints that forbid
the first concretization rule, in which case Spack will use the first
legal concretization rule. Going back to the example, if a user
requests gperftools 2.3 or later, then Spack will install version 2.4
as the 2.4 version of gperftools is preferred over 2.3.
An explicit concretization rule in the preferred section will always
take preference over unlisted concretizations. In the above example,
xlc isn't listed in the compiler list. Every listed compiler from
gcc to pgi will thus be preferred over the xlc compiler.
The syntax for the ``provider`` section differs slightly from other
concretization rules. A provider lists a value that packages may
``depends_on`` (e.g, MPI) and a list of rules for fulfilling that
dependency.
.. _package_permissions: .. _package_permissions:
------------------- -------------------
@ -531,3 +534,25 @@ directories inside the install prefix. This will ensure that even
manually placed files within the install prefix are owned by the manually placed files within the install prefix are owned by the
assigned group. If no group is assigned, Spack will allow the OS assigned group. If no group is assigned, Spack will allow the OS
default behavior to go as expected. default behavior to go as expected.
----------------------------
Assigning Package Attributes
----------------------------
You can assign class-level attributes in the configuration:
.. code-block:: yaml
packages:
mpileaks:
# Override existing attributes
url: http://www.somewhereelse.com/mpileaks-1.0.tar.gz
# ... or add new ones
x: 1
Attributes set this way will be accessible to any method executed
in the package.py file (e.g. the ``install()`` method). Values for these
attributes may be any value parseable by yaml.
These can only be applied to specific packages, not "all" or
virtual packages.

View File

@ -65,7 +65,6 @@ on these ideas for each distinct build system that Spack supports:
build_systems/custompackage build_systems/custompackage
build_systems/inteloneapipackage build_systems/inteloneapipackage
build_systems/intelpackage build_systems/intelpackage
build_systems/multiplepackage
build_systems/rocmpackage build_systems/rocmpackage
build_systems/sourceforgepackage build_systems/sourceforgepackage

View File

@ -5,9 +5,9 @@
.. _autotoolspackage: .. _autotoolspackage:
---------------- ---------
AutotoolsPackage Autotools
---------------- ---------
Autotools is a GNU build system that provides a build-script generator. Autotools is a GNU build system that provides a build-script generator.
By running the platform-independent ``./configure`` script that comes By running the platform-independent ``./configure`` script that comes
@ -17,7 +17,7 @@ with the package, you can generate a platform-dependent Makefile.
Phases Phases
^^^^^^ ^^^^^^
The ``AutotoolsPackage`` base class comes with the following phases: The ``AutotoolsBuilder`` and ``AutotoolsPackage`` base classes come with the following phases:
#. ``autoreconf`` - generate the configure script #. ``autoreconf`` - generate the configure script
#. ``configure`` - generate the Makefiles #. ``configure`` - generate the Makefiles

View File

@ -5,9 +5,9 @@
.. _bundlepackage: .. _bundlepackage:
------------- ------
BundlePackage Bundle
------------- ------
``BundlePackage`` represents a set of packages that are expected to work well ``BundlePackage`` represents a set of packages that are expected to work well
together, such as a collection of commonly used software libraries. The together, such as a collection of commonly used software libraries. The

View File

@ -5,9 +5,9 @@
.. _cmakepackage: .. _cmakepackage:
------------ -----
CMakePackage CMake
------------ -----
Like Autotools, CMake is a widely-used build-script generator. Designed Like Autotools, CMake is a widely-used build-script generator. Designed
by Kitware, CMake is the most popular build system for new C, C++, and by Kitware, CMake is the most popular build system for new C, C++, and
@ -21,7 +21,7 @@ whereas Autotools is Unix-only.
Phases Phases
^^^^^^ ^^^^^^
The ``CMakePackage`` base class comes with the following phases: The ``CMakeBuilder`` and ``CMakePackage`` base classes come with the following phases:
#. ``cmake`` - generate the Makefile #. ``cmake`` - generate the Makefile
#. ``build`` - build the package #. ``build`` - build the package
@ -130,8 +130,8 @@ Adding flags to cmake
To add additional flags to the ``cmake`` call, simply override the To add additional flags to the ``cmake`` call, simply override the
``cmake_args`` function. The following example defines values for the flags ``cmake_args`` function. The following example defines values for the flags
``WHATEVER``, ``ENABLE_BROKEN_FEATURE``, ``DETECT_HDF5``, and ``THREADS`` with ``WHATEVER``, ``ENABLE_BROKEN_FEATURE``, ``DETECT_HDF5``, and ``THREADS`` with
and without the :meth:`~spack.build_systems.cmake.CMakePackage.define` and and without the :meth:`~spack.build_systems.cmake.CMakeBuilder.define` and
:meth:`~spack.build_systems.cmake.CMakePackage.define_from_variant` helper functions: :meth:`~spack.build_systems.cmake.CMakeBuilder.define_from_variant` helper functions:
.. code-block:: python .. code-block:: python

View File

@ -32,7 +32,7 @@ oneAPI packages or use::
For more information on a specific package, do:: For more information on a specific package, do::
spack info <package-name> spack info --all <package-name>
Intel no longer releases new versions of Parallel Studio, which can be Intel no longer releases new versions of Parallel Studio, which can be
used in Spack via the :ref:`intelpackage`. All of its components can used in Spack via the :ref:`intelpackage`. All of its components can

View File

@ -5,11 +5,11 @@
.. _luapackage: .. _luapackage:
------------ ---
LuaPackage Lua
------------ ---
LuaPackage is a helper for the common case of Lua packages that provide The ``Lua`` build-system is a helper for the common case of Lua packages that provide
a rockspec file. This is not meant to take a rock archive, but to build a rockspec file. This is not meant to take a rock archive, but to build
a source archive or repository that provides a rockspec, which should cover a source archive or repository that provides a rockspec, which should cover
most lua packages. In the case a Lua package builds by Make rather than most lua packages. In the case a Lua package builds by Make rather than
@ -19,7 +19,7 @@ luarocks, prefer MakefilePackage.
Phases Phases
^^^^^^ ^^^^^^
The ``LuaPackage`` base class comes with the following phases: The ``LuaBuilder`` and `LuaPackage`` base classes come with the following phases:
#. ``unpack`` - if using a rock, unpacks the rock and moves into the source directory #. ``unpack`` - if using a rock, unpacks the rock and moves into the source directory
#. ``preprocess`` - adjust sources or rockspec to fix build #. ``preprocess`` - adjust sources or rockspec to fix build

View File

@ -5,9 +5,9 @@
.. _makefilepackage: .. _makefilepackage:
--------------- --------
MakefilePackage Makefile
--------------- --------
The most primitive build system a package can use is a plain Makefile. The most primitive build system a package can use is a plain Makefile.
Makefiles are simple to write for small projects, but they usually Makefiles are simple to write for small projects, but they usually
@ -18,7 +18,7 @@ variables.
Phases Phases
^^^^^^ ^^^^^^
The ``MakefilePackage`` base class comes with 3 phases: The ``MakefileBuilder`` and ``MakefilePackage`` base classes come with 3 phases:
#. ``edit`` - edit the Makefile #. ``edit`` - edit the Makefile
#. ``build`` - build the project #. ``build`` - build the project

View File

@ -5,9 +5,9 @@
.. _mavenpackage: .. _mavenpackage:
------------ -----
MavenPackage Maven
------------ -----
Apache Maven is a general-purpose build system that does not rely Apache Maven is a general-purpose build system that does not rely
on Makefiles to build software. It is designed for building and on Makefiles to build software. It is designed for building and
@ -17,7 +17,7 @@ managing and Java-based project.
Phases Phases
^^^^^^ ^^^^^^
The ``MavenPackage`` base class comes with the following phases: The ``MavenBuilder`` and ``MavenPackage`` base classes come with the following phases:
#. ``build`` - compile code and package into a JAR file #. ``build`` - compile code and package into a JAR file
#. ``install`` - copy to installation prefix #. ``install`` - copy to installation prefix

View File

@ -5,9 +5,9 @@
.. _mesonpackage: .. _mesonpackage:
------------ -----
MesonPackage Meson
------------ -----
Much like Autotools and CMake, Meson is a build system. But it is Much like Autotools and CMake, Meson is a build system. But it is
meant to be both fast and as user friendly as possible. GNOME's goal meant to be both fast and as user friendly as possible. GNOME's goal
@ -17,7 +17,7 @@ is to port modules to use the Meson build system.
Phases Phases
^^^^^^ ^^^^^^
The ``MesonPackage`` base class comes with the following phases: The ``MesonBuilder`` and ``MesonPackage`` base classes come with the following phases:
#. ``meson`` - generate ninja files #. ``meson`` - generate ninja files
#. ``build`` - build the project #. ``build`` - build the project

View File

@ -1,350 +0,0 @@
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT)
.. _multiplepackage:
----------------------
Multiple Build Systems
----------------------
Quite frequently, a package will change build systems from one version to the
next. For example, a small project that once used a single Makefile to build
may now require Autotools to handle the increased number of files that need to
be compiled. Or, a package that once used Autotools may switch to CMake for
Windows support. In this case, it becomes a bit more challenging to write a
single build recipe for this package in Spack.
There are several ways that this can be handled in Spack:
#. Subclass the new build system, and override phases as needed (preferred)
#. Subclass ``Package`` and implement ``install`` as needed
#. Create separate ``*-cmake``, ``*-autotools``, etc. packages for each build system
#. Rename the old package to ``*-legacy`` and create a new package
#. Move the old package to a ``legacy`` repository and create a new package
#. Drop older versions that only support the older build system
Of these options, 1 is preferred, and will be demonstrated in this
documentation. Options 3-5 have issues with concretization, so shouldn't be
used. Options 4-5 also don't support more than two build systems. Option 6 only
works if the old versions are no longer needed. Option 1 is preferred over 2
because it makes it easier to drop the old build system entirely.
The exact syntax of the package depends on which build systems you need to
support. Below are a couple of common examples.
^^^^^^^^^^^^^^^^^^^^^
Makefile -> Autotools
^^^^^^^^^^^^^^^^^^^^^
Let's say we have the following package:
.. code-block:: python
class Foo(MakefilePackage):
version("1.2.0", sha256="...")
def edit(self, spec, prefix):
filter_file("CC=", "CC=" + spack_cc, "Makefile")
def install(self, spec, prefix):
install_tree(".", prefix)
The package subclasses from :ref:`makefilepackage`, which has three phases:
#. ``edit`` (does nothing by default)
#. ``build`` (runs ``make`` by default)
#. ``install`` (runs ``make install`` by default)
In this case, the ``install`` phase needed to be overridden because the
Makefile did not have an install target. We also modify the Makefile to use
Spack's compiler wrappers. The default ``build`` phase is not changed.
Starting with version 1.3.0, we want to use Autotools to build instead.
:ref:`autotoolspackage` has four phases:
#. ``autoreconf`` (does not if a configure script already exists)
#. ``configure`` (runs ``./configure --prefix=...`` by default)
#. ``build`` (runs ``make`` by default)
#. ``install`` (runs ``make install`` by default)
If the only version we need to support is 1.3.0, the package would look as
simple as:
.. code-block:: python
class Foo(AutotoolsPackage):
version("1.3.0", sha256="...")
def configure_args(self):
return ["--enable-shared"]
In this case, we use the default methods for each phase and only override
``configure_args`` to specify additional flags to pass to ``./configure``.
If we wanted to write a single package that supports both versions 1.2.0 and
1.3.0, it would look something like:
.. code-block:: python
class Foo(AutotoolsPackage):
version("1.3.0", sha256="...")
version("1.2.0", sha256="...", deprecated=True)
def configure_args(self):
return ["--enable-shared"]
# Remove the following once version 1.2.0 is dropped
@when("@:1.2")
def patch(self):
filter_file("CC=", "CC=" + spack_cc, "Makefile")
@when("@:1.2")
def autoreconf(self, spec, prefix):
pass
@when("@:1.2")
def configure(self, spec, prefix):
pass
@when("@:1.2")
def install(self, spec, prefix):
install_tree(".", prefix)
There are a few interesting things to note here:
* We added ``deprecated=True`` to version 1.2.0. This signifies that version
1.2.0 is deprecated and shouldn't be used. However, if a user still relies
on version 1.2.0, it's still there and builds just fine.
* We moved the contents of the ``edit`` phase to the ``patch`` function. Since
``AutotoolsPackage`` doesn't have an ``edit`` phase, the only way for this
step to be executed is to move it to the ``patch`` function, which always
gets run.
* The ``autoreconf`` and ``configure`` phases become no-ops. Since the old
Makefile-based build system doesn't use these, we ignore these phases when
building ``foo@1.2.0``.
* The ``@when`` decorator is used to override these phases only for older
versions. The default methods are used for ``foo@1.3:``.
Once a new Spack release comes out, version 1.2.0 and everything below the
comment can be safely deleted. The result is the same as if we had written a
package for version 1.3.0 from scratch.
^^^^^^^^^^^^^^^^^^
Autotools -> CMake
^^^^^^^^^^^^^^^^^^
Let's say we have the following package:
.. code-block:: python
class Bar(AutotoolsPackage):
version("1.2.0", sha256="...")
def configure_args(self):
return ["--enable-shared"]
The package subclasses from :ref:`autotoolspackage`, which has four phases:
#. ``autoreconf`` (does not if a configure script already exists)
#. ``configure`` (runs ``./configure --prefix=...`` by default)
#. ``build`` (runs ``make`` by default)
#. ``install`` (runs ``make install`` by default)
In this case, we use the default methods for each phase and only override
``configure_args`` to specify additional flags to pass to ``./configure``.
Starting with version 1.3.0, we want to use CMake to build instead.
:ref:`cmakepackage` has three phases:
#. ``cmake`` (runs ``cmake ...`` by default)
#. ``build`` (runs ``make`` by default)
#. ``install`` (runs ``make install`` by default)
If the only version we need to support is 1.3.0, the package would look as
simple as:
.. code-block:: python
class Bar(CMakePackage):
version("1.3.0", sha256="...")
def cmake_args(self):
return [self.define("BUILD_SHARED_LIBS", True)]
In this case, we use the default methods for each phase and only override
``cmake_args`` to specify additional flags to pass to ``cmake``.
If we wanted to write a single package that supports both versions 1.2.0 and
1.3.0, it would look something like:
.. code-block:: python
class Bar(CMakePackage):
version("1.3.0", sha256="...")
version("1.2.0", sha256="...", deprecated=True)
def cmake_args(self):
return [self.define("BUILD_SHARED_LIBS", True)]
# Remove the following once version 1.2.0 is dropped
def configure_args(self):
return ["--enable-shared"]
@when("@:1.2")
def cmake(self, spec, prefix):
configure("--prefix=" + prefix, *self.configure_args())
There are a few interesting things to note here:
* We added ``deprecated=True`` to version 1.2.0. This signifies that version
1.2.0 is deprecated and shouldn't be used. However, if a user still relies
on version 1.2.0, it's still there and builds just fine.
* Since CMake and Autotools are so similar, we only need to override the
``cmake`` phase, we can use the default ``build`` and ``install`` phases.
* We override ``cmake`` to run ``./configure`` for older versions.
``configure_args`` remains the same.
* The ``@when`` decorator is used to override these phases only for older
versions. The default methods are used for ``bar@1.3:``.
Once a new Spack release comes out, version 1.2.0 and everything below the
comment can be safely deleted. The result is the same as if we had written a
package for version 1.3.0 from scratch.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Multiple build systems for the same version
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
During the transition from one build system to another, developers often
support multiple build systems at the same time. Spack can only use a single
build system for a single version. To decide which build system to use for a
particular version, take the following things into account:
1. If the developers explicitly state that one build system is preferred over
another, use that one.
2. If one build system is considered "experimental" while another is considered
"stable", use the stable build system.
3. Otherwise, use the newer build system.
The developer preference for which build system to use can change over time as
a newer build system becomes stable/recommended.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Dropping support for old build systems
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
When older versions of a package don't support a newer build system, it can be
tempting to simply delete them from a package. This significantly reduces
package complexity and makes the build recipe much easier to maintain. However,
other packages or Spack users may rely on these older versions. The recommended
approach is to first support both build systems (as demonstrated above),
:ref:`deprecate <deprecate>` versions that rely on the old build system, and
remove those versions and any phases that needed to be overridden in the next
Spack release.
^^^^^^^^^^^^^^^^^^^^^^^^^^^
Three or more build systems
^^^^^^^^^^^^^^^^^^^^^^^^^^^
In rare cases, a package may change build systems multiple times. For example,
a package may start with Makefiles, then switch to Autotools, then switch to
CMake. The same logic used above can be extended to any number of build systems.
For example:
.. code-block:: python
class Baz(CMakePackage):
version("1.4.0", sha256="...") # CMake
version("1.3.0", sha256="...") # Autotools
version("1.2.0", sha256="...") # Makefile
def cmake_args(self):
return [self.define("BUILD_SHARED_LIBS", True)]
# Remove the following once version 1.3.0 is dropped
def configure_args(self):
return ["--enable-shared"]
@when("@1.3")
def cmake(self, spec, prefix):
configure("--prefix=" + prefix, *self.configure_args())
# Remove the following once version 1.2.0 is dropped
@when("@:1.2")
def patch(self):
filter_file("CC=", "CC=" + spack_cc, "Makefile")
@when("@:1.2")
def cmake(self, spec, prefix):
pass
@when("@:1.2")
def install(self, spec, prefix):
install_tree(".", prefix)
^^^^^^^^^^^^^^^^^^^
Additional examples
^^^^^^^^^^^^^^^^^^^
When writing new packages, it often helps to see examples of existing packages.
Here is an incomplete list of existing Spack packages that have changed build
systems before:
================ ===================== ================
Package Previous Build System New Build System
================ ===================== ================
amber custom CMake
arpack-ng Autotools CMake
atk Autotools Meson
blast None Autotools
dyninst Autotools CMake
evtgen Autotools CMake
fish Autotools CMake
gdk-pixbuf Autotools Meson
glib Autotools Meson
glog Autotools CMake
gmt Autotools CMake
gtkplus Autotools Meson
hpl Makefile Autotools
interproscan Perl Maven
jasper Autotools CMake
kahip SCons CMake
kokkos Makefile CMake
kokkos-kernels Makefile CMake
leveldb Makefile CMake
libdrm Autotools Meson
libjpeg-turbo Autotools CMake
mesa Autotools Meson
metis None CMake
mpifileutils Autotools CMake
muparser Autotools CMake
mxnet Makefile CMake
nest Autotools CMake
neuron Autotools CMake
nsimd CMake nsconfig
opennurbs Makefile CMake
optional-lite None CMake
plasma Makefile CMake
preseq Makefile Autotools
protobuf Autotools CMake
py-pygobject Autotools Python
singularity Autotools Makefile
span-lite None CMake
ssht Makefile CMake
string-view-lite None CMake
superlu Makefile CMake
superlu-dist Makefile CMake
uncrustify Autotools CMake
================ ===================== ================
Packages that support multiple build systems can be a bit confusing to write.
Don't hesitate to open an issue or draft pull request and ask for advice from
other Spack developers!

View File

@ -5,9 +5,9 @@
.. _octavepackage: .. _octavepackage:
------------- ------
OctavePackage Octave
------------- ------
Octave has its own build system for installing packages. Octave has its own build system for installing packages.
@ -15,7 +15,7 @@ Octave has its own build system for installing packages.
Phases Phases
^^^^^^ ^^^^^^
The ``OctavePackage`` base class has a single phase: The ``OctaveBuilder`` and ``OctavePackage`` base classes have a single phase:
#. ``install`` - install the package #. ``install`` - install the package

View File

@ -5,9 +5,9 @@
.. _perlpackage: .. _perlpackage:
----------- ----
PerlPackage Perl
----------- ----
Much like Octave, Perl has its own language-specific Much like Octave, Perl has its own language-specific
build system. build system.
@ -16,7 +16,7 @@ build system.
Phases Phases
^^^^^^ ^^^^^^
The ``PerlPackage`` base class comes with 3 phases that can be overridden: The ``PerlBuilder`` and ``PerlPackage`` base classes come with 3 phases that can be overridden:
#. ``configure`` - configure the package #. ``configure`` - configure the package
#. ``build`` - build the package #. ``build`` - build the package

View File

@ -5,9 +5,9 @@
.. _qmakepackage: .. _qmakepackage:
------------ -----
QMakePackage QMake
------------ -----
Much like Autotools and CMake, QMake is a build-script generator Much like Autotools and CMake, QMake is a build-script generator
designed by the developers of Qt. In its simplest form, Spack's designed by the developers of Qt. In its simplest form, Spack's
@ -29,7 +29,7 @@ variables or edit ``*.pro`` files to get things working properly.
Phases Phases
^^^^^^ ^^^^^^
The ``QMakePackage`` base class comes with the following phases: The ``QMakeBuilder`` and ``QMakePackage`` base classes come with the following phases:
#. ``qmake`` - generate Makefiles #. ``qmake`` - generate Makefiles
#. ``build`` - build the project #. ``build`` - build the project

View File

@ -5,9 +5,9 @@
.. _racketpackage: .. _racketpackage:
------------- ------
RacketPackage Racket
------------- ------
Much like Python, Racket packages and modules have their own special build system. Much like Python, Racket packages and modules have their own special build system.
To learn more about the specifics of Racket package system, please refer to the To learn more about the specifics of Racket package system, please refer to the
@ -17,7 +17,7 @@ To learn more about the specifics of Racket package system, please refer to the
Phases Phases
^^^^^^ ^^^^^^
The ``RacketPackage`` base class provides an ``install`` phase that The ``RacketBuilder`` and ``RacketPackage`` base classes provides an ``install`` phase that
can be overridden, corresponding to the use of: can be overridden, corresponding to the use of:
.. code-block:: console .. code-block:: console

View File

@ -19,7 +19,7 @@ new Spack packages for.
Phases Phases
^^^^^^ ^^^^^^
The ``RPackage`` base class has a single phase: The ``RBuilder`` and ``RPackage`` base classes have a single phase:
#. ``install`` - install the package #. ``install`` - install the package

View File

@ -5,9 +5,9 @@
.. _rubypackage: .. _rubypackage:
----------- ----
RubyPackage Ruby
----------- ----
Like Perl, Python, and R, Ruby has its own build system for Like Perl, Python, and R, Ruby has its own build system for
installing Ruby gems. installing Ruby gems.
@ -16,7 +16,7 @@ installing Ruby gems.
Phases Phases
^^^^^^ ^^^^^^
The ``RubyPackage`` base class provides the following phases that The ``RubyBuilder`` and ``RubyPackage`` base classes provide the following phases that
can be overridden: can be overridden:
#. ``build`` - build everything needed to install #. ``build`` - build everything needed to install

View File

@ -5,9 +5,9 @@
.. _sconspackage: .. _sconspackage:
------------ -----
SConsPackage SCons
------------ -----
SCons is a general-purpose build system that does not rely on SCons is a general-purpose build system that does not rely on
Makefiles to build software. SCons is written in Python, and handles Makefiles to build software. SCons is written in Python, and handles
@ -42,7 +42,7 @@ As previously mentioned, SCons allows developers to add subcommands like
$ scons install $ scons install
To facilitate this, the ``SConsPackage`` base class provides the To facilitate this, the ``SConsBuilder`` and ``SconsPackage`` base classes provide the
following phases: following phases:
#. ``build`` - build the package #. ``build`` - build the package

View File

@ -5,9 +5,9 @@
.. _sippackage: .. _sippackage:
---------- ---
SIPPackage SIP
---------- ---
SIP is a tool that makes it very easy to create Python bindings for C and C++ SIP is a tool that makes it very easy to create Python bindings for C and C++
libraries. It was originally developed to create PyQt, the Python bindings for libraries. It was originally developed to create PyQt, the Python bindings for
@ -22,7 +22,7 @@ provides support functions to the automatically generated code.
Phases Phases
^^^^^^ ^^^^^^
The ``SIPPackage`` base class comes with the following phases: The ``SIPBuilder`` and ``SIPPackage`` base classes come with the following phases:
#. ``configure`` - configure the package #. ``configure`` - configure the package
#. ``build`` - build the package #. ``build`` - build the package

View File

@ -5,9 +5,9 @@
.. _wafpackage: .. _wafpackage:
---------- ---
WafPackage Waf
---------- ---
Like SCons, Waf is a general-purpose build system that does not rely Like SCons, Waf is a general-purpose build system that does not rely
on Makefiles to build software. on Makefiles to build software.
@ -16,7 +16,7 @@ on Makefiles to build software.
Phases Phases
^^^^^^ ^^^^^^
The ``WafPackage`` base class comes with the following phases: The ``WafBuilder`` and ``WafPackage`` base classes come with the following phases:
#. ``configure`` - configure the project #. ``configure`` - configure the project
#. ``build`` - build the project #. ``build`` - build the project

View File

@ -32,6 +32,9 @@
# If extensions (or modules to document with autodoc) are in another directory, # If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the # add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here. # documentation root, use os.path.abspath to make it absolute, like shown here.
link_name = os.path.abspath("_spack_root")
if not os.path.exists(link_name):
os.symlink(os.path.abspath("../../.."), link_name, target_is_directory=True)
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external")) sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external"))
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/pytest-fallback")) sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/pytest-fallback"))
@ -206,6 +209,9 @@ def setup(sphinx):
# Spack classes that are private and we don't want to expose # Spack classes that are private and we don't want to expose
("py:class", "spack.provider_index._IndexBase"), ("py:class", "spack.provider_index._IndexBase"),
("py:class", "spack.repo._PrependFileLoader"), ("py:class", "spack.repo._PrependFileLoader"),
("py:class", "spack.build_systems._checks.BaseBuilder"),
# Spack classes that intersphinx is unable to resolve
("py:class", "spack.version.VersionBase"),
] ]
# The reST default role (used for this markup: `text`) to use for all documents. # The reST default role (used for this markup: `text`) to use for all documents.

View File

@ -224,9 +224,9 @@ them). Please note that we currently disable ccache's ``hash_dir``
feature to avoid an issue with the stage directory (see feature to avoid an issue with the stage directory (see
https://github.com/LLNL/spack/pull/3761#issuecomment-294352232). https://github.com/LLNL/spack/pull/3761#issuecomment-294352232).
------------------ -----------------------
``shared_linking`` ``shared_linking:type``
------------------ -----------------------
Control whether Spack embeds ``RPATH`` or ``RUNPATH`` attributes in ELF binaries Control whether Spack embeds ``RPATH`` or ``RUNPATH`` attributes in ELF binaries
so that they can find their dependencies. Has no effect on macOS. so that they can find their dependencies. Has no effect on macOS.
@ -245,6 +245,52 @@ the loading object.
DO NOT MIX the two options within the same install tree. DO NOT MIX the two options within the same install tree.
-----------------------
``shared_linking:bind``
-----------------------
This is an *experimental option* that controls whether Spack embeds absolute paths
to needed shared libraries in ELF executables and shared libraries on Linux. Setting
this option to ``true`` has two advantages:
1. **Improved startup time**: when running an executable, the dynamic loader does not
have to perform a search for needed libraries, they are loaded directly.
2. **Reliability**: libraries loaded at runtime are those that were linked to. This
minimizes the risk of accidentally picking up system libraries.
In the current implementation, Spack sets the soname (shared object name) of
libraries to their install path upon installation. This has two implications:
1. binding does not apply to libraries installed *before* the option was enabled;
2. toggling the option off does *not* prevent binding of libraries installed when
the option was still enabled.
It is also worth noting that:
1. Applications relying on ``dlopen(3)`` will continue to work, even when they open
a library by name. This is because ``RPATH``\s are retained in binaries also
when ``bind`` is enabled.
2. ``LD_PRELOAD`` continues to work for the typical use case of overriding
symbols, such as preloading a library with a more efficient ``malloc``.
However, the preloaded library will be loaded *additionally to*, instead of
*in place of* another library with the same name --- this can be problematic
in very rare cases where libraries rely on a particular ``init`` or ``fini``
order.
.. note::
In some cases packages provide *stub libraries* that only contain an interface
for linking, but lack an implementation for runtime. An example of this is
``libcuda.so``, provided by the CUDA toolkit; it can be used to link against,
but the library needed at runtime is the one installed with the CUDA driver.
To avoid binding those libraries, they can be marked as non-bindable using
a property in the package:
.. code-block:: python
class Example(Package):
non_bindable_shared_objects = ["libinterface.so"]
---------------------- ----------------------
``terminal_title`` ``terminal_title``
---------------------- ----------------------

View File

@ -405,6 +405,17 @@ Spack understands several special variables. These are:
* ``$user``: name of the current user * ``$user``: name of the current user
* ``$user_cache_path``: user cache directory (``~/.spack`` unless * ``$user_cache_path``: user cache directory (``~/.spack`` unless
:ref:`overridden <local-config-overrides>`) :ref:`overridden <local-config-overrides>`)
* ``$architecture``: the architecture triple of the current host, as
detected by Spack.
* ``$arch``: alias for ``$architecture``.
* ``$platform``: the platform of the current host, as detected by Spack.
* ``$operating_system``: the operating system of the current host, as
detected by the ``distro`` python module.
* ``$os``: alias for ``$operating_system``.
* ``$target``: the ISA target for the current host, as detected by
ArchSpec. E.g. ``skylake`` or ``neoverse-n1``.
* ``$target_family``. The target family for the current host, as
detected by ArchSpec. E.g. ``x86_64`` or ``aarch64``.
Note that, as with shell variables, you can write these as ``$varname`` Note that, as with shell variables, you can write these as ``$varname``
or with braces to distinguish the variable from surrounding characters: or with braces to distinguish the variable from surrounding characters:
@ -549,7 +560,7 @@ down the problem:
You can see above that the ``build_jobs`` and ``debug`` settings are You can see above that the ``build_jobs`` and ``debug`` settings are
built in and are not overridden by a configuration file. The built in and are not overridden by a configuration file. The
``verify_ssl`` setting comes from the ``--insceure`` option on the ``verify_ssl`` setting comes from the ``--insecure`` option on the
command line. ``dirty`` and ``install_tree`` come from the custom command line. ``dirty`` and ``install_tree`` come from the custom
scopes ``./my-scope`` and ``./my-scope-2``, and all other configuration scopes ``./my-scope`` and ``./my-scope-2``, and all other configuration
options come from the default configuration files that ship with Spack. options come from the default configuration files that ship with Spack.

View File

@ -149,11 +149,9 @@ grouped by functionality.
Package-related modules Package-related modules
^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^
:mod:`spack.package` :mod:`spack.package_base`
Contains the :class:`~spack.package_base.Package` class, which Contains the :class:`~spack.package_base.PackageBase` class, which
is the superclass for all packages in Spack. Methods on ``Package`` is the superclass for all packages in Spack.
implement all phases of the :ref:`package lifecycle
<package-lifecycle>` and manage the build process.
:mod:`spack.util.naming` :mod:`spack.util.naming`
Contains functions for mapping between Spack package names, Contains functions for mapping between Spack package names,

View File

@ -520,27 +520,33 @@ available from the yaml file.
^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^
Spec concretization Spec concretization
^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^
An environment can be concretized in three different modes and the behavior active under any environment An environment can be concretized in three different modes and the behavior active under
is determined by the ``concretizer:unify`` property. By default specs are concretized *separately*, one after the other: any environment is determined by the ``concretizer:unify`` configuration option.
The *default* mode is to unify all specs:
.. code-block:: yaml .. code-block:: yaml
spack: spack:
specs: specs:
- hdf5~mpi
- hdf5+mpi - hdf5+mpi
- zlib@1.2.8 - zlib@1.2.8
concretizer: concretizer:
unify: false unify: true
This mode of operation permits to deploy a full software stack where multiple configurations of the same package This means that any package in the environment corresponds to a single concrete spec. In
need to be installed alongside each other using the best possible selection of transitive dependencies. The downside the above example, when ``hdf5`` depends down the line of ``zlib``, it is required to
is that redundancy of installations is disregarded completely, and thus environments might be more bloated than take ``zlib@1.2.8`` instead of a newer version. This mode of concretization is
strictly needed. In the example above, for instance, if a version of ``zlib`` newer than ``1.2.8`` is known to Spack, particularly useful when environment views are used: if every package occurs in
then it will be used for both ``hdf5`` installations. only one flavor, it is usually possible to merge all install directories into a view.
If redundancy of the environment is a concern, Spack provides a way to install it *together where possible*, A downside of unified concretization is that it can be overly strict. For example, a
i.e. trying to maximize reuse of dependencies across different specs: concretization error would happen when both ``hdf5+mpi`` and ``hdf5~mpi`` are specified
in an environment.
The second mode is to *unify when possible*: this makes concretization of root specs
more independendent. Instead of requiring reuse of dependencies across different root
specs, it is only maximized:
.. code-block:: yaml .. code-block:: yaml
@ -552,26 +558,27 @@ i.e. trying to maximize reuse of dependencies across different specs:
concretizer: concretizer:
unify: when_possible unify: when_possible
Also in this case Spack allows having multiple configurations of the same package, but privileges the reuse of This means that both ``hdf5`` installations will use ``zlib@1.2.8`` as a dependency even
specs over other factors. Going back to our example, this means that both ``hdf5`` installations will use if newer versions of that library are available.
``zlib@1.2.8`` as a dependency even if newer versions of that library are available.
Central installations done at HPC centers by system administrators or user support groups are a common case
that fits either of these two modes.
Environments can also be configured to concretize all the root specs *together*, in a self-consistent way, to The third mode of operation is to concretize root specs entirely independently by
ensure that each package in the environment comes with a single configuration: disabling unified concretization:
.. code-block:: yaml .. code-block:: yaml
spack: spack:
specs: specs:
- hdf5~mpi
- hdf5+mpi - hdf5+mpi
- zlib@1.2.8 - zlib@1.2.8
concretizer: concretizer:
unify: true unify: false
This mode of operation is usually what is required by software developers that want to deploy their development In this example ``hdf5`` is concretized separately, and does not consider ``zlib@1.2.8``
environment and have a single view of it in the filesystem. as a constraint or preference. Instead, it will take the latest possible version.
The last two concretization options are typically useful for system administrators and
user support groups providing a large software stack for their HPC center.
.. note:: .. note::
@ -582,10 +589,10 @@ environment and have a single view of it in the filesystem.
.. admonition:: Re-concretization of user specs .. admonition:: Re-concretization of user specs
When concretizing specs *together* or *together where possible* the entire set of specs will be When using *unified* concretization (when possible), the entire set of specs will be
re-concretized after any addition of new user specs, to ensure that re-concretized after any addition of new user specs, to ensure that
the environment remains consistent / minimal. When instead the specs are concretized the environment remains consistent / minimal. When instead unified concretization is
separately only the new specs will be re-concretized after any addition. disabled, only the new specs will be concretized after any addition.
^^^^^^^^^^^^^ ^^^^^^^^^^^^^
Spec Matrices Spec Matrices
@ -987,7 +994,7 @@ A typical workflow is as follows:
spack env create -d . spack env create -d .
spack -e . add perl spack -e . add perl
spack -e . concretize spack -e . concretize
spack -e . env depfile > Makefile spack -e . env depfile -o Makefile
make -j64 make -j64
This generates a ``Makefile`` from a concretized environment in the This generates a ``Makefile`` from a concretized environment in the
@ -1000,7 +1007,6 @@ load, even when packages are built in parallel.
By default the following phony convenience targets are available: By default the following phony convenience targets are available:
- ``make all``: installs the environment (default target); - ``make all``: installs the environment (default target);
- ``make fetch-all``: only fetch sources of all packages;
- ``make clean``: cleans files used by make, but does not uninstall packages. - ``make clean``: cleans files used by make, but does not uninstall packages.
.. tip:: .. tip::
@ -1010,8 +1016,17 @@ By default the following phony convenience targets are available:
printed orderly per package install. To get synchronized output with colors, printed orderly per package install. To get synchronized output with colors,
use ``make -j<N> SPACK_COLOR=always --output-sync=recurse``. use ``make -j<N> SPACK_COLOR=always --output-sync=recurse``.
The following advanced example shows how generated targets can be used in a ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
``Makefile``: Specifying dependencies on generated ``make`` targets
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
An interesting question is how to include generated ``Makefile``\s in your own
``Makefile``\s. This comes up when you want to install an environment that provides
executables required in a command for a make target of your own.
The example below shows how to accomplish this: the ``env`` target specifies
the generated ``spack/env`` target as a prerequisite, meaning that the environment
gets installed and is available for use in the ``env`` target.
.. code:: Makefile .. code:: Makefile
@ -1037,11 +1052,10 @@ The following advanced example shows how generated targets can be used in a
include env.mk include env.mk
endif endif
When ``make`` is invoked, it first "remakes" the missing include ``env.mk`` This works as follows: when ``make`` is invoked, it first "remakes" the missing
from its rule, which triggers concretization. When done, the generated target include ``env.mk`` as there is a target for it. This triggers concretization of
``spack/env`` is available. In the above example, the ``env`` target uses this generated the environment and makes spack output ``env.mk``. At that point the
target as a prerequisite, meaning that it can make use of the installed packages in generated target ``spack/env`` becomes available through ``include env.mk``.
its commands.
As it is typically undesirable to remake ``env.mk`` as part of ``make clean``, As it is typically undesirable to remake ``env.mk`` as part of ``make clean``,
the include is conditional. the include is conditional.
@ -1052,3 +1066,24 @@ the include is conditional.
the ``--make-target-prefix`` flag and use the non-phony target the ``--make-target-prefix`` flag and use the non-phony target
``<target-prefix>/env`` as prerequisite, instead of the phony target ``<target-prefix>/env`` as prerequisite, instead of the phony target
``<target-prefix>/all``. ``<target-prefix>/all``.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Building a subset of the environment
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The generated ``Makefile``\s contain install targets for each spec. Given the hash
of a particular spec, you can use the ``.install/<hash>`` target to install the
spec with its dependencies. There is also ``.install-deps/<hash>`` to *only* install
its dependencies. This can be useful when certain flags should only apply to
dependencies. Below we show a use case where a spec is installed with verbose
output (``spack install --verbose``) while its dependencies are installed silently:
.. code:: console
$ spack env depfile -o Makefile --make-target-prefix my_env
# Install dependencies in parallel, only show a log on error.
$ make -j16 my_env/.install-deps/<hash> SPACK_INSTALL_FLAGS=--show-log-on-error
# Install the root spec with verbose output.
$ make -j16 my_env/.install/<hash> SPACK_INSTALL_FLAGS=--verbose

View File

@ -98,40 +98,42 @@ For example, this command:
.. code-block:: console .. code-block:: console
$ spack create http://www.mr511.de/software/libelf-0.8.13.tar.gz $ spack create https://ftp.osuosl.org/pub/blfs/conglomeration/libelf/libelf-0.8.13.tar.gz
creates a simple python file: creates a simple python file:
.. code-block:: python .. code-block:: python
from spack import * from spack.package import *
class Libelf(Package): class Libelf(AutotoolsPackage):
"""FIXME: Put a proper description of your package here.""" """FIXME: Put a proper description of your package here."""
# FIXME: Add a proper url for your package's homepage here. # FIXME: Add a proper url for your package's homepage here.
homepage = "http://www.example.com" homepage = "https://www.example.com"
url = "http://www.mr511.de/software/libelf-0.8.13.tar.gz" url = "https://ftp.osuosl.org/pub/blfs/conglomeration/libelf/libelf-0.8.13.tar.gz"
version('0.8.13', '4136d7b4c04df68b686570afa26988ac') # FIXME: Add a list of GitHub accounts to
# notify when the package is updated.
# maintainers = ["github_user1", "github_user2"]
version("0.8.13", sha256="591a9b4ec81c1f2042a97aa60564e0cb79d041c52faa7416acb38bc95bd2c76d")
# FIXME: Add dependencies if required. # FIXME: Add dependencies if required.
# depends_on('foo') # depends_on("foo")
def install(self, spec, prefix): def configure_args(self):
# FIXME: Modify the configure line to suit your build system here. # FIXME: Add arguments other than --prefix
configure('--prefix={0}'.format(prefix)) # FIXME: If not needed delete this function
args = []
# FIXME: Add logic to build and install here. return args
make()
make('install')
It doesn't take much python coding to get from there to a working It doesn't take much python coding to get from there to a working
package: package:
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/libelf/package.py .. literalinclude:: _spack_root/var/spack/repos/builtin/packages/libelf/package.py
:lines: 6- :lines: 5-
Spack also provides wrapper functions around common commands like Spack also provides wrapper functions around common commands like
``configure``, ``make``, and ``cmake`` to make writing packages ``configure``, ``make``, and ``cmake`` to make writing packages

View File

@ -44,7 +44,7 @@ A build matrix showing which packages are working on which systems is shown belo
yum install -y epel-release yum install -y epel-release
yum update -y yum update -y
yum --enablerepo epel groupinstall -y "Development Tools" yum --enablerepo epel groupinstall -y "Development Tools"
yum --enablerepo epel install -y curl findutils gcc-c++ gcc gcc-gfortran git gnupg2 hostname iproute make patch python3 python3-pip python3-setuptools unzip yum --enablerepo epel install -y curl findutils gcc-c++ gcc gcc-gfortran git gnupg2 hostname iproute redhat-lsb-core make patch python3 python3-pip python3-setuptools unzip
python3 -m pip install boto3 python3 -m pip install boto3
.. tab-item:: macOS Brew .. tab-item:: macOS Brew
@ -124,88 +124,41 @@ Spack provides two ways of bootstrapping ``clingo``: from pre-built binaries
(default), or from sources. The fastest way to get started is to bootstrap from (default), or from sources. The fastest way to get started is to bootstrap from
pre-built binaries. pre-built binaries.
.. note:: The first time you concretize a spec, Spack will bootstrap automatically:
When bootstrapping from pre-built binaries, Spack currently requires
``patchelf`` on Linux and ``otool`` on macOS. If ``patchelf`` is not in the
``PATH``, Spack will build it from sources, and a C++ compiler is required.
The first time you concretize a spec, Spack will bootstrap in the background:
.. code-block:: console .. code-block:: console
$ time spack spec zlib $ spack spec zlib
==> Bootstrapping clingo from pre-built binaries
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.4/build_cache/linux-centos7-x86_64-gcc-10.2.1-clingo-bootstrap-spack-ba5ijauisd3uuixtmactc36vps7yfsrl.spec.json
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.4/build_cache/linux-centos7-x86_64/gcc-10.2.1/clingo-bootstrap-spack/linux-centos7-x86_64-gcc-10.2.1-clingo-bootstrap-spack-ba5ijauisd3uuixtmactc36vps7yfsrl.spack
==> Installing "clingo-bootstrap@spack%gcc@10.2.1~docs~ipo+python+static_libstdcpp build_type=Release arch=linux-centos7-x86_64" from a buildcache
==> Bootstrapping patchelf from pre-built binaries
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.4/build_cache/linux-centos7-x86_64-gcc-10.2.1-patchelf-0.16.1-p72zyan5wrzuabtmzq7isa5mzyh6ahdp.spec.json
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.4/build_cache/linux-centos7-x86_64/gcc-10.2.1/patchelf-0.16.1/linux-centos7-x86_64-gcc-10.2.1-patchelf-0.16.1-p72zyan5wrzuabtmzq7isa5mzyh6ahdp.spack
==> Installing "patchelf@0.16.1%gcc@10.2.1 ldflags="-static-libstdc++ -static-libgcc" build_system=autotools arch=linux-centos7-x86_64" from a buildcache
Input spec Input spec
-------------------------------- --------------------------------
zlib zlib
Concretized Concretized
-------------------------------- --------------------------------
zlib@1.2.11%gcc@7.5.0+optimize+pic+shared arch=linux-ubuntu18.04-zen zlib@1.2.13%gcc@9.4.0+optimize+pic+shared build_system=makefile arch=linux-ubuntu20.04-icelake
real 0m20.023s
user 0m18.351s
sys 0m0.784s
After this command you'll see that ``clingo`` has been installed for Spack's own use:
.. code-block:: console
$ spack find -b
==> Showing internal bootstrap store at "/root/.spack/bootstrap/store"
==> 3 installed packages
-- linux-rhel5-x86_64 / gcc@9.3.0 -------------------------------
clingo-bootstrap@spack python@3.6
-- linux-ubuntu18.04-zen / gcc@7.5.0 ----------------------------
patchelf@0.13
Subsequent calls to the concretizer will then be much faster:
.. code-block:: console
$ time spack spec zlib
[ ... ]
real 0m0.490s
user 0m0.431s
sys 0m0.041s
If for security concerns you cannot bootstrap ``clingo`` from pre-built If for security concerns you cannot bootstrap ``clingo`` from pre-built
binaries, you have to mark this bootstrapping method as untrusted. This makes binaries, you have to disable fetching the binaries we generated with Github Actions.
Spack fall back to bootstrapping from sources:
.. code-block:: console .. code-block:: console
$ spack bootstrap untrust github-actions-v0.2 $ spack bootstrap disable github-actions-v0.4
==> "github-actions-v0.2" is now untrusted and will not be used for bootstrapping ==> "github-actions-v0.4" is now disabled and will not be used for bootstrapping
$ spack bootstrap disable github-actions-v0.3
==> "github-actions-v0.3" is now disabled and will not be used for bootstrapping
You can verify that the new settings are effective with: You can verify that the new settings are effective with:
.. code-block:: console .. command-output:: spack bootstrap list
$ spack bootstrap list
Name: github-actions-v0.2 UNTRUSTED
Type: buildcache
Info:
url: https://mirror.spack.io/bootstrap/github-actions/v0.2
homepage: https://github.com/spack/spack-bootstrap-mirrors
releases: https://github.com/spack/spack-bootstrap-mirrors/releases
Description:
Buildcache generated from a public workflow using Github Actions.
The sha256 checksum of binaries is checked before installation.
[ ... ]
Name: spack-install TRUSTED
Type: install
Description:
Specs built from sources by Spack. May take a long time.
.. note:: .. note::
@ -235,9 +188,7 @@ under the ``${HOME}/.spack`` directory. The software installed there can be quer
.. code-block:: console .. code-block:: console
$ spack find --bootstrap $ spack -b find
==> Showing internal bootstrap store at "/home/spack/.spack/bootstrap/store"
==> 3 installed packages
-- linux-ubuntu18.04-x86_64 / gcc@10.1.0 ------------------------ -- linux-ubuntu18.04-x86_64 / gcc@10.1.0 ------------------------
clingo-bootstrap@spack python@3.6.9 re2c@1.2.1 clingo-bootstrap@spack python@3.6.9 re2c@1.2.1
@ -246,7 +197,7 @@ In case it's needed the bootstrap store can also be cleaned with:
.. code-block:: console .. code-block:: console
$ spack clean -b $ spack clean -b
==> Removing software in "/home/spack/.spack/bootstrap/store" ==> Removing bootstrapped software and configuration in "/home/spack/.spack/bootstrap"
^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^
Check Installation Check Installation

Binary file not shown.

After

Width:  |  Height:  |  Size: 658 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 449 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 128 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 126 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 35 KiB

File diff suppressed because it is too large Load Diff

View File

@ -56,7 +56,6 @@ or refer to the full manual below.
basic_usage basic_usage
Tutorial: Spack 101 <https://spack-tutorial.readthedocs.io> Tutorial: Spack 101 <https://spack-tutorial.readthedocs.io>
replace_conda_homebrew replace_conda_homebrew
known_issues
.. toctree:: .. toctree::
:maxdepth: 2 :maxdepth: 2

View File

@ -1,40 +0,0 @@
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT)
============
Known Issues
============
This is a list of known issues in Spack. It provides ways of getting around these
problems if you encounter them.
------------------------------------------------
Spack does not seem to respect ``packages.yaml``
------------------------------------------------
.. note::
This issue is **resolved** as of v0.19.0.dev0 commit
`8281a0c5feabfc4fe180846d6fe95cfe53420bc5`, through the introduction of package
requirements. See :ref:`package-requirements`.
A common problem in Spack v0.18.0 up to v0.19.0.dev0 is that package, compiler and target
preferences specified in ``packages.yaml`` do not seem to be respected. Spack picks the
"wrong" compilers and their versions, package versions and variants, and
micro-architectures.
This is however not a bug. In order to reduce the number of builds of the same
packages, the concretizer values reuse of installed packages higher than preferences
set in ``packages.yaml``. Note that ``packages.yaml`` specifies only preferences, not
hard constraints.
There are multiple workarounds:
1. Disable reuse during concretization: ``spack install --fresh <spec>`` when installing
from the command line, or ``spack concretize --fresh --force`` when using
environments.
2. Turn preferences into constrains, by moving them to the input spec. For example,
use ``spack spec zlib%gcc@12`` when you want to force GCC 12 even if ``zlib`` was
already installed with GCC 10.

View File

@ -34,24 +34,155 @@ ubiquitous in the scientific software community. Second, it's a modern
language and has many powerful features to help make package writing language and has many powerful features to help make package writing
easy. easy.
---------------------------
Creating & editing packages .. _installation_procedure:
---------------------------
--------------------------------------
Overview of the installation procedure
--------------------------------------
Whenever Spack installs software, it goes through a series of predefined steps:
.. image:: images/installation_pipeline.png
:scale: 60 %
:align: center
All these steps are influenced by the metadata in each ``package.py`` and
by the current Spack configuration.
Since build systems are different from one another, the execution of the
last block in the figure is further expanded in a build system specific way.
An example for ``CMake`` is, for instance:
.. image:: images/builder_phases.png
:align: center
:scale: 60 %
The predefined steps for each build system are called "phases".
In general, the name and order in which the phases will be executed can be
obtained by either reading the API docs at :py:mod:`~.spack.build_systems`, or
using the ``spack info`` command:
.. code-block:: console
:emphasize-lines: 13,14
$ spack info --phases m4
AutotoolsPackage: m4
Homepage: https://www.gnu.org/software/m4/m4.html
Safe versions:
1.4.17 ftp://ftp.gnu.org/gnu/m4/m4-1.4.17.tar.gz
Variants:
Name Default Description
sigsegv on Build the libsigsegv dependency
Installation Phases:
autoreconf configure build install
Build Dependencies:
libsigsegv
...
An extensive list of available build systems and phases is provided in :ref:`installation_process`.
------------------------
Writing a package recipe
------------------------
Since v0.19, Spack supports two ways of writing a package recipe. The most commonly used is to encode both the metadata
(directives, etc.) and the build behavior in a single class, like shown in the following example:
.. code-block:: python
class Openjpeg(CMakePackage):
"""OpenJPEG is an open-source JPEG 2000 codec written in C language"""
homepage = "https://github.com/uclouvain/openjpeg"
url = "https://github.com/uclouvain/openjpeg/archive/v2.3.1.tar.gz"
version("2.4.0", sha256="8702ba68b442657f11aaeb2b338443ca8d5fb95b0d845757968a7be31ef7f16d")
variant("codec", default=False, description="Build the CODEC executables")
depends_on("libpng", when="+codec")
def url_for_version(self, version):
if version >= Version("2.1.1"):
return super(Openjpeg, self).url_for_version(version)
url_fmt = "https://github.com/uclouvain/openjpeg/archive/version.{0}.tar.gz"
return url_fmt.format(version)
def cmake_args(self):
args = [
self.define_from_variant("BUILD_CODEC", "codec"),
self.define("BUILD_MJ2", False),
self.define("BUILD_THIRDPARTY", False),
]
return args
A package encoded with a single class is backward compatible with versions of Spack
lower than v0.19, and so are custom repositories containing only recipes of this kind.
The downside is that *this format doesn't allow packagers to use more than one build system in a single recipe*.
To do that, we have to resort to the second way Spack has of writing packages, which involves writing a
builder class explicitly. Using the same example as above, this reads:
.. code-block:: python
class Openjpeg(CMakePackage):
"""OpenJPEG is an open-source JPEG 2000 codec written in C language"""
homepage = "https://github.com/uclouvain/openjpeg"
url = "https://github.com/uclouvain/openjpeg/archive/v2.3.1.tar.gz"
version("2.4.0", sha256="8702ba68b442657f11aaeb2b338443ca8d5fb95b0d845757968a7be31ef7f16d")
variant("codec", default=False, description="Build the CODEC executables")
depends_on("libpng", when="+codec")
def url_for_version(self, version):
if version >= Version("2.1.1"):
return super(Openjpeg, self).url_for_version(version)
url_fmt = "https://github.com/uclouvain/openjpeg/archive/version.{0}.tar.gz"
return url_fmt.format(version)
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
def cmake_args(self):
args = [
self.define_from_variant("BUILD_CODEC", "codec"),
self.define("BUILD_MJ2", False),
self.define("BUILD_THIRDPARTY", False),
]
return args
This way of writing packages allows extending the recipe to support multiple build systems,
see :ref:`multiple_build_systems` for more details. The downside is that recipes of this kind
are only understood by Spack since v0.19+. More information on the internal architecture of
Spack can be found at :ref:`package_class_structure`.
.. note::
If a builder is implemented in ``package.py``, all build-specific methods must be moved
to the builder. This means that if you have a package like
.. code-block:: python
class Foo(CmakePackage):
def cmake_args(self):
...
and you add a builder to the ``package.py``, you must move ``cmake_args`` to the builder.
.. _cmd-spack-create: .. _cmd-spack-create:
^^^^^^^^^^^^^^^^ ---------------------
``spack create`` Creating new packages
^^^^^^^^^^^^^^^^ ---------------------
The ``spack create`` command creates a directory with the package name and To help creating a new package Spack provides a command that generates a ``package.py``
generates a ``package.py`` file with a boilerplate package template. If given file in an existing repository, with a boilerplate package template. Here's an example:
a URL pointing to a tarball or other software archive, ``spack create`` is
smart enough to determine basic information about the package, including its name
and build system. In most cases, ``spack create`` plus a few modifications is
all you need to get a package working.
Here's an example:
.. code-block:: console .. code-block:: console
@ -87,23 +218,6 @@ You do not *have* to download all of the versions up front. You can
always choose to download just one tarball initially, and run always choose to download just one tarball initially, and run
:ref:`cmd-spack-checksum` later if you need more versions. :ref:`cmd-spack-checksum` later if you need more versions.
Let's say you download 3 tarballs:
.. code-block:: console
How many would you like to checksum? (default is 1, q to abort) 3
==> Downloading...
==> Fetching https://gmplib.org/download/gmp/gmp-6.1.2.tar.bz2
######################################################################## 100.0%
==> Fetching https://gmplib.org/download/gmp/gmp-6.1.1.tar.bz2
######################################################################## 100.0%
==> Fetching https://gmplib.org/download/gmp/gmp-6.1.0.tar.bz2
######################################################################## 100.0%
==> Checksummed 3 versions of gmp:
==> This package looks like it uses the autotools build system
==> Created template for gmp package
==> Created package file: /Users/Adam/spack/var/spack/repos/builtin/packages/gmp/package.py
Spack automatically creates a directory in the appropriate repository, Spack automatically creates a directory in the appropriate repository,
generates a boilerplate template for your package, and opens up the new generates a boilerplate template for your package, and opens up the new
``package.py`` in your favorite ``$EDITOR``: ``package.py`` in your favorite ``$EDITOR``:
@ -111,6 +225,14 @@ generates a boilerplate template for your package, and opens up the new
.. code-block:: python .. code-block:: python
:linenos: :linenos:
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
# ----------------------------------------------------------------------------
# If you submit this package back to Spack as a pull request,
# please first remove this boilerplate and all FIXME comments.
# #
# This is a template package file for Spack. We've put "FIXME" # This is a template package file for Spack. We've put "FIXME"
# next to all the things you'll want to change. Once you've handled # next to all the things you'll want to change. Once you've handled
@ -123,9 +245,8 @@ generates a boilerplate template for your package, and opens up the new
# spack edit gmp # spack edit gmp
# #
# See the Spack documentation for more information on packaging. # See the Spack documentation for more information on packaging.
# If you submit this package back to Spack as a pull request, # ----------------------------------------------------------------------------
# please first remove this boilerplate and all FIXME comments. import spack.build_systems.autotools
#
from spack.package import * from spack.package import *
@ -133,19 +254,17 @@ generates a boilerplate template for your package, and opens up the new
"""FIXME: Put a proper description of your package here.""" """FIXME: Put a proper description of your package here."""
# FIXME: Add a proper url for your package's homepage here. # FIXME: Add a proper url for your package's homepage here.
homepage = "http://www.example.com" homepage = "https://www.example.com"
url = "https://gmplib.org/download/gmp/gmp-6.1.2.tar.bz2" url = "https://gmplib.org/download/gmp/gmp-6.1.2.tar.bz2"
# FIXME: Add a list of GitHub accounts to # FIXME: Add a list of GitHub accounts to
# notify when the package is updated. # notify when the package is updated.
# maintainers = ['github_user1', 'github_user2'] # maintainers = ["github_user1", "github_user2"]
version('6.1.2', '8ddbb26dc3bd4e2302984debba1406a5') version("6.2.1", sha256="eae9326beb4158c386e39a356818031bd28f3124cf915f8c5b1dc4c7a36b4d7c")
version('6.1.1', '4c175f86e11eb32d8bf9872ca3a8e11d')
version('6.1.0', '86ee6e54ebfc4a90b643a65e402c4048')
# FIXME: Add dependencies if required. # FIXME: Add dependencies if required.
# depends_on('foo') # depends_on("foo")
def configure_args(self): def configure_args(self):
# FIXME: Add arguments other than --prefix # FIXME: Add arguments other than --prefix
@ -154,15 +273,16 @@ generates a boilerplate template for your package, and opens up the new
return args return args
The tedious stuff (creating the class, checksumming archives) has been The tedious stuff (creating the class, checksumming archives) has been
done for you. You'll notice that ``spack create`` correctly detected that done for you. Spack correctly detected that ``gmp`` uses the ``autotools``
``gmp`` uses the Autotools build system. It created a new ``Gmp`` package build system, so it created a new ``Gmp`` package that subclasses the
that subclasses the ``AutotoolsPackage`` base class. This base class ``AutotoolsPackage`` base class.
provides basic installation methods common to all Autotools packages:
The default installation procedure for a package subclassing the ``AutotoolsPackage``
is to go through the typical process of:
.. code-block:: bash .. code-block:: bash
./configure --prefix=/path/to/installation/directory ./configure --prefix=/path/to/installation/directory
make make
make check make check
make install make install
@ -209,12 +329,14 @@ The rest of the tasks you need to do are as follows:
Your new package may require specific flags during ``configure``. Your new package may require specific flags during ``configure``.
These can be added via ``configure_args``. Specifics will differ These can be added via ``configure_args``. Specifics will differ
depending on the package and its build system. depending on the package and its build system.
:ref:`Implementing the install method <install-method>` is :ref:`installation_process` is
covered in detail later. covered in detail later.
Passing a URL to ``spack create`` is a convenient and easy way to get ^^^^^^^^^^^^^^^^^^^^^^^^^
a basic package template, but what if your software is licensed and Non-downloadable software
cannot be downloaded from a URL? You can still create a boilerplate ^^^^^^^^^^^^^^^^^^^^^^^^^
If your software cannot be downloaded from a URL you can still create a boilerplate
``package.py`` by telling ``spack create`` what name you want to use: ``package.py`` by telling ``spack create`` what name you want to use:
.. code-block:: console .. code-block:: console
@ -223,40 +345,23 @@ cannot be downloaded from a URL? You can still create a boilerplate
This will create a simple ``intel`` package with an ``install()`` This will create a simple ``intel`` package with an ``install()``
method that you can craft to install your package. method that you can craft to install your package.
Likewise, you can force the build system to be used with ``--template`` and,
What if ``spack create <url>`` guessed the wrong name or build system? in case it's needed, you can overwrite a package already in the repository
For example, if your package uses the Autotools build system but does with ``--force``:
not come with a ``configure`` script, Spack won't realize it uses
Autotools. You can overwrite the old package with ``--force`` and specify
a name with ``--name`` or a build system template to use with ``--template``:
.. code-block:: console .. code-block:: console
$ spack create --name gmp https://gmplib.org/download/gmp/gmp-6.1.2.tar.bz2 $ spack create --name gmp https://gmplib.org/download/gmp/gmp-6.1.2.tar.bz2
$ spack create --force --template autotools https://gmplib.org/download/gmp/gmp-6.1.2.tar.bz2 $ spack create --force --template autotools https://gmplib.org/download/gmp/gmp-6.1.2.tar.bz2
.. note::
If you are creating a package that uses the Autotools build system
but does not come with a ``configure`` script, you'll need to add an
``autoreconf`` method to your package that explains how to generate
the ``configure`` script. You may also need the following dependencies:
.. code-block:: python
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
depends_on('m4', type='build')
A complete list of available build system templates can be found by running A complete list of available build system templates can be found by running
``spack create --help``. ``spack create --help``.
.. _cmd-spack-edit: .. _cmd-spack-edit:
^^^^^^^^^^^^^^ -------------------------
``spack edit`` Editing existing packages
^^^^^^^^^^^^^^ -------------------------
One of the easiest ways to learn how to write packages is to look at One of the easiest ways to learn how to write packages is to look at
existing ones. You can edit a package file by name with the ``spack existing ones. You can edit a package file by name with the ``spack
@ -266,10 +371,15 @@ edit`` command:
$ spack edit gmp $ spack edit gmp
So, if you used ``spack create`` to create a package, then saved and If you used ``spack create`` to create a package, you can get back to
closed the resulting file, you can get back to it with ``spack edit``. it later with ``spack edit``. For instance, the ``gmp`` package actually
The ``gmp`` package actually lives in lives in:
``$SPACK_ROOT/var/spack/repos/builtin/packages/gmp/package.py``,
.. code-block:: console
$ spack location -p gmp
${SPACK_ROOT}/var/spack/repos/builtin/packages/gmp/package.py
but ``spack edit`` provides a much simpler shortcut and saves you the but ``spack edit`` provides a much simpler shortcut and saves you the
trouble of typing the full path. trouble of typing the full path.
@ -2422,7 +2532,7 @@ Spack provides a mechanism for dependencies to influence the
environment of their dependents by overriding the environment of their dependents by overriding the
:meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>` :meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>`
or the or the
:meth:`setup_dependent_build_environment <spack.package_base.PackageBase.setup_dependent_build_environment>` :meth:`setup_dependent_build_environment <spack.builder.Builder.setup_dependent_build_environment>`
methods. methods.
The Qt package, for instance, uses this call: The Qt package, for instance, uses this call:
@ -3280,67 +3390,91 @@ the Python extensions provided by them: once for ``+python`` and once
for ``~python``. Other than using a little extra disk space, that for ``~python``. Other than using a little extra disk space, that
solution has no serious problems. solution has no serious problems.
.. _installation_procedure: .. _installation_process:
--------------------------------------- --------------------------------
Implementing the installation procedure Overriding build system defaults
--------------------------------------- --------------------------------
The last element of a package is its **installation procedure**. This is .. note::
where the real work of installation happens, and it's the main part of
the package you'll need to customize for each piece of software.
Defining an installation procedure means overriding a set of methods or attributes If you code a single class in ``package.py`` all the functions shown in the table below
that will be called at some point during the installation of the package. can be implemented with the same signature on the ``*Package`` instead of the corresponding builder.
The package base class, usually specialized for a given build system, determines the
actual set of entities available for overriding.
The classes that are currently provided by Spack are: Most of the time the default implementation of methods or attributes in build system base classes
is what a packager needs, and just a very few entities need to be overwritten. Typically we just
need to override methods like ``configure_args``:
.. code-block:: python
def configure_args(self):
args = ["--enable-cxx"] + self.enable_or_disable("libs")
if "libs=static" in self.spec:
args.append("--with-pic")
return args
The actual set of entities available for overriding in ``package.py`` depend on
the build system. The build systems currently supported by Spack are:
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| **Base Class** | **Purpose** | | **API docs** | **Description** |
+==========================================================+==================================+ +==========================================================+==================================+
| :class:`~spack.package_base.Package` | General base class not | | :class:`~spack.build_systems.generic` | Generic build system without any |
| | specialized for any build system | | | base implementation |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.makefile.MakefilePackage` | Specialized class for packages | | :class:`~spack.build_systems.makefile` | Specialized build system for |
| | built invoking | | | software built invoking |
| | hand-written Makefiles | | | hand-written Makefiles |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.autotools.AutotoolsPackage` | Specialized class for packages | | :class:`~spack.build_systems.autotools` | Specialized build system for |
| | built using GNU Autotools | | | software built using |
| | GNU Autotools |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.cmake.CMakePackage` | Specialized class for packages | | :class:`~spack.build_systems.cmake` | Specialized build system for |
| | built using CMake | | | software built using CMake |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.cuda.CudaPackage` | A helper class for packages that | | :class:`~spack.build_systems.maven` | Specialized build system for |
| | use CUDA | | | software built using Maven |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.qmake.QMakePackage` | Specialized class for packages | | :class:`~spack.build_systems.meson` | Specialized build system for |
| | built using QMake | | | software built using Meson |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.rocm.ROCmPackage` | A helper class for packages that | | :class:`~spack.build_systems.nmake` | Specialized build system for |
| | use ROCm | | | software built using NMake |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.scons.SConsPackage` | Specialized class for packages | | :class:`~spack.build_systems.qmake` | Specialized build system for |
| | built using SCons | | | software built using QMake |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.waf.WafPackage` | Specialized class for packages | | :class:`~spack.build_systems.scons` | Specialized build system for |
| | built using Waf | | | software built using SCons |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.r.RPackage` | Specialized class for | | :class:`~spack.build_systems.waf` | Specialized build system for |
| | software built using Waf |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.r` | Specialized build system for |
| | R extensions | | | R extensions |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.octave.OctavePackage` | Specialized class for | | :class:`~spack.build_systems.octave` | Specialized build system for |
| | Octave packages | | | Octave packages |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.python.PythonPackage` | Specialized class for | | :class:`~spack.build_systems.python` | Specialized build system for |
| | Python extensions | | | Python extensions |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.perl.PerlPackage` | Specialized class for | | :class:`~spack.build_systems.perl` | Specialized build system for |
| | Perl extensions | | | Perl extensions |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.intel.IntelPackage` | Specialized class for licensed | | :class:`~spack.build_systems.ruby` | Specialized build system for |
| | Intel software | | | Ruby extensions |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.intel` | Specialized build system for |
| | licensed Intel software |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.oneapi` | Specialized build system for |
| | Intel onaAPI software |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.aspell_dict` | Specialized build system for |
| | Aspell dictionaries |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
@ -3353,52 +3487,17 @@ The classes that are currently provided by Spack are:
For example, a Python extension installed with CMake would ``extends('python')`` and For example, a Python extension installed with CMake would ``extends('python')`` and
subclass from :class:`~spack.build_systems.cmake.CMakePackage`. subclass from :class:`~spack.build_systems.cmake.CMakePackage`.
^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^
Installation pipeline Overriding builder methods
^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^
When a user runs ``spack install``, Spack: Build-system "phases" have default implementations that fit most of the common cases:
1. Fetches an archive for the correct version of the software.
2. Expands the archive.
3. Sets the current working directory to the root directory of the expanded archive.
Then, depending on the base class of the package under consideration, it will execute
a certain number of **phases** that reflect the way a package of that type is usually built.
The name and order in which the phases will be executed can be obtained either reading the API
docs at :py:mod:`~.spack.build_systems`, or using the ``spack info`` command:
.. code-block:: console
:emphasize-lines: 13,14
$ spack info m4
AutotoolsPackage: m4
Homepage: https://www.gnu.org/software/m4/m4.html
Safe versions:
1.4.17 ftp://ftp.gnu.org/gnu/m4/m4-1.4.17.tar.gz
Variants:
Name Default Description
sigsegv on Build the libsigsegv dependency
Installation Phases:
autoreconf configure build install
Build Dependencies:
libsigsegv
...
Typically, phases have default implementations that fit most of the common cases:
.. literalinclude:: _spack_root/lib/spack/spack/build_systems/autotools.py .. literalinclude:: _spack_root/lib/spack/spack/build_systems/autotools.py
:pyobject: AutotoolsPackage.configure :pyobject: AutotoolsBuilder.configure
:linenos: :linenos:
It is thus just sufficient for a packager to override a few It is usually sufficient for a packager to override a few
build system specific helper methods or attributes to provide, for instance, build system specific helper methods or attributes to provide, for instance,
configure arguments: configure arguments:
@ -3406,31 +3505,31 @@ configure arguments:
:pyobject: M4.configure_args :pyobject: M4.configure_args
:linenos: :linenos:
.. note:: Each specific build system has a list of attributes and methods that can be overridden to
Each specific build system has a list of attributes that can be overridden to fine-tune the installation of a package without overriding an entire phase. To
fine-tune the installation of a package without overriding an entire phase. To have more information on them the place to go is the API docs of the :py:mod:`~.spack.build_systems`
have more information on them the place to go is the API docs of the :py:mod:`~.spack.build_systems` module.
module.
^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^
Overriding an entire phase Overriding an entire phase
^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^
In extreme cases it may be necessary to override an entire phase. Regardless Sometimes it is necessary to override an entire phase. If the ``package.py`` contains
of the build system, the signature is the same. For example, the signature a single class recipe, see :ref:`package_class_structure`, then the signature for a
for the install phase is: phase is:
.. code-block:: python .. code-block:: python
class Foo(Package): class Openjpeg(CMakePackage):
def install(self, spec, prefix): def install(self, spec, prefix):
... ...
regardless of the build system. The arguments for the phase are:
``self`` ``self``
For those not used to Python instance methods, this is the This is the package object, which extends ``CMakePackage``.
package itself. In this case it's an instance of ``Foo``, which For API docs on Package objects, see
extends ``Package``. For API docs on Package objects, see :py:class:`Package <spack.package_base.PackageBase>`.
:py:class:`Package <spack.package_base.Package>`.
``spec`` ``spec``
This is the concrete spec object created by Spack from an This is the concrete spec object created by Spack from an
@ -3445,12 +3544,111 @@ for the install phase is:
The arguments ``spec`` and ``prefix`` are passed only for convenience, as they always The arguments ``spec`` and ``prefix`` are passed only for convenience, as they always
correspond to ``self.spec`` and ``self.spec.prefix`` respectively. correspond to ``self.spec`` and ``self.spec.prefix`` respectively.
As mentioned in :ref:`install-environment`, you will usually not need to refer If the ``package.py`` encodes builders explicitly, the signature for a phase changes slightly:
to dependencies explicitly in your package file, as the compiler wrappers take care of most of
the heavy lifting here. There will be times, though, when you need to refer to .. code-block:: python
the install locations of dependencies, or when you need to do something different
depending on the version, compiler, dependencies, etc. that your package is class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
built with. These parameters give you access to this type of information. def install(self, pkg, spec, prefix):
...
In this case the package is passed as the second argument, and ``self`` is the builder instance.
.. _multiple_build_systems:
^^^^^^^^^^^^^^^^^^^^^^
Multiple build systems
^^^^^^^^^^^^^^^^^^^^^^
There are cases where a software actively supports two build systems, or changes build systems
as it evolves, or needs different build systems on different platforms. Spack allows dealing with
these cases natively, if a recipe is written using builders explicitly.
For instance, software that supports two build systems unconditionally should derive from
both ``*Package`` base classes, and declare the possible use of multiple build systems using
a directive:
.. code-block:: python
class ArpackNg(CMakePackage, AutotoolsPackage):
build_system("cmake", "autotools", default="cmake")
In this case the software can be built with both ``autotools`` and ``cmake``. Since the package
supports multiple build systems, it is necessary to declare which one is the default. The ``package.py``
will likely contain some overriding of default builder methods:
.. code-block:: python
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
def cmake_args(self):
pass
class Autotoolsbuilder(spack.build_systems.autotools.AutotoolsBuilder):
def configure_args(self):
pass
In more complex cases it might happen that the build system changes according to certain conditions,
for instance across versions. That can be expressed with conditional variant values:
.. code-block:: python
class ArpackNg(CMakePackage, AutotoolsPackage):
build_system(
conditional("cmake", when="@0.64:"),
conditional("autotools", when="@:0.63"),
default="cmake",
)
In the example the directive impose a change from ``Autotools`` to ``CMake`` going
from ``v0.63`` to ``v0.64``.
^^^^^^^^^^^^^^^^^^
Mixin base classes
^^^^^^^^^^^^^^^^^^
Besides build systems, there are other cases where common metadata and behavior can be extracted
and reused by many packages. For instance, packages that depend on ``Cuda`` or ``Rocm``, share
common dependencies and constraints. To factor these attributes into a single place, Spack provides
a few mixin classes in the ``spack.build_systems`` module:
+---------------------------------------------------------------+----------------------------------+
| **API docs** | **Description** |
+===============================================================+==================================+
| :class:`~spack.build_systems.cuda.CudaPackage` | A helper class for packages that |
| | use CUDA |
+---------------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.rocm.ROCmPackage` | A helper class for packages that |
| | use ROCm |
+---------------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.gnu.GNUMirrorPackage` | A helper class for GNU packages |
+---------------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.python.PythonExtension` | A helper class for Python |
| | extensions |
+---------------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.sourceforge.SourceforgePackage` | A helper class for packages |
| | from sourceforge.org |
+---------------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.sourceware.SourcewarePackage` | A helper class for packages |
| | from sourceware.org |
+---------------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.xorg.XorgPackage` | A helper class for x.org |
| | packages |
+---------------------------------------------------------------+----------------------------------+
These classes should be used by adding them to the inheritance tree of the package that needs them,
for instance:
.. code-block:: python
class Cp2k(MakefilePackage, CudaPackage):
"""CP2K is a quantum chemistry and solid state physics software package
that can perform atomistic simulations of solid state, liquid, molecular,
periodic, material, crystal, and biological systems
"""
In the example above ``Cp2k`` inherits all the conflicts and variants that ``CudaPackage`` defines.
.. _install-environment: .. _install-environment:
@ -4208,16 +4406,9 @@ In addition to invoking the right compiler, the compiler wrappers add
flags to the compile line so that dependencies can be easily found. flags to the compile line so that dependencies can be easily found.
These flags are added for each dependency, if they exist: These flags are added for each dependency, if they exist:
Compile-time library search paths * Compile-time library search paths: ``-L$dep_prefix/lib``, ``-L$dep_prefix/lib64``
* ``-L$dep_prefix/lib`` * Runtime library search paths (RPATHs): ``$rpath_flag$dep_prefix/lib``, ``$rpath_flag$dep_prefix/lib64``
* ``-L$dep_prefix/lib64`` * Include search paths: ``-I$dep_prefix/include``
Runtime library search paths (RPATHs)
* ``$rpath_flag$dep_prefix/lib``
* ``$rpath_flag$dep_prefix/lib64``
Include search paths
* ``-I$dep_prefix/include``
An example of this would be the ``libdwarf`` build, which has one An example of this would be the ``libdwarf`` build, which has one
dependency: ``libelf``. Every call to ``cc`` in the ``libdwarf`` dependency: ``libelf``. Every call to ``cc`` in the ``libdwarf``
@ -5062,6 +5253,16 @@ where each argument has the following meaning:
will run. will run.
The default of ``None`` corresponds to the current directory (``'.'``). The default of ``None`` corresponds to the current directory (``'.'``).
Each call starts with the working directory set to the spec's test stage
directory (i.e., ``self.test_suite.test_dir_for_spec(self.spec)``).
.. warning::
Use of the package spec's installation directory for building and running
tests is **strongly** discouraged. Doing so has caused permission errors
for shared spack instances *and* for facilities that install the software
in read-only file systems or directories.
""""""""""""""""""""""""""""""""""""""""" """""""""""""""""""""""""""""""""""""""""
Accessing package- and test-related files Accessing package- and test-related files
@ -5069,10 +5270,10 @@ Accessing package- and test-related files
You may need to access files from one or more locations when writing You may need to access files from one or more locations when writing
stand-alone tests. This can happen if the software's repository does not stand-alone tests. This can happen if the software's repository does not
include test source files or includes files but no way to build the include test source files or includes files but has no way to build the
executables using the installed headers and libraries. In these executables using the installed headers and libraries. In these cases,
cases, you may need to reference the files relative to one or more you may need to reference the files relative to one or more root
root directory. The properties containing package- and test-related directory. The properties containing package- (or spec-) and test-related
directory paths are provided in the table below. directory paths are provided in the table below.
.. list-table:: Directory-to-property mapping .. list-table:: Directory-to-property mapping
@ -5081,19 +5282,22 @@ directory paths are provided in the table below.
* - Root Directory * - Root Directory
- Package Property - Package Property
- Example(s) - Example(s)
* - Package Installation Files * - Package (Spec) Installation
- ``self.prefix`` - ``self.prefix``
- ``self.prefix.include``, ``self.prefix.lib`` - ``self.prefix.include``, ``self.prefix.lib``
* - Package Dependency's Files * - Dependency Installation
- ``self.spec['<dependency-package>'].prefix`` - ``self.spec['<dependency-package>'].prefix``
- ``self.spec['trilinos'].prefix.include`` - ``self.spec['trilinos'].prefix.include``
* - Test Suite Stage Files * - Test Suite Stage
- ``self.test_suite.stage`` - ``self.test_suite.stage``
- ``join_path(self.test_suite.stage, 'results.txt')`` - ``join_path(self.test_suite.stage, 'results.txt')``
* - Staged Cached Build-time Files * - Spec's Test Stage
- ``self.test_suite.test_dir_for_spec``
- ``self.test_suite.test_dir_for_spec(self.spec)``
* - Current Spec's Build-time Files
- ``self.test_suite.current_test_cache_dir`` - ``self.test_suite.current_test_cache_dir``
- ``join_path(self.test_suite.current_test_cache_dir, 'examples', 'foo.c')`` - ``join_path(self.test_suite.current_test_cache_dir, 'examples', 'foo.c')``
* - Staged Custom Package Files * - Current Spec's Custom Test Files
- ``self.test_suite.current_test_data_dir`` - ``self.test_suite.current_test_data_dir``
- ``join_path(self.test_suite.current_test_data_dir, 'hello.f90')`` - ``join_path(self.test_suite.current_test_data_dir, 'hello.f90')``
@ -6099,3 +6303,82 @@ might write:
DWARF_PREFIX = $(spack location --install-dir libdwarf) DWARF_PREFIX = $(spack location --install-dir libdwarf)
CXXFLAGS += -I$DWARF_PREFIX/include CXXFLAGS += -I$DWARF_PREFIX/include
CXXFLAGS += -L$DWARF_PREFIX/lib CXXFLAGS += -L$DWARF_PREFIX/lib
.. _package_class_structure:
--------------------------
Package class architecture
--------------------------
.. note::
This section aims to provide a high-level knowledge of how the package class architecture evolved
in Spack, and provides some insights on the current design.
Packages in Spack were originally designed to support only a single build system. The overall
class structure for a package looked like:
.. image:: images/original_package_architecture.png
:scale: 60 %
:align: center
In this architecture the base class ``AutotoolsPackage`` was responsible for both the metadata
related to the ``autotools`` build system (e.g. dependencies or variants common to all packages
using it), and for encoding the default installation procedure.
In reality, a non-negligible number of packages are either changing their build system during the evolution of the
project, or using different build systems for different platforms. An architecture based on a single class
requires hacks or other workarounds to deal with these cases.
To support a model more adherent to reality, Spack v0.19 changed its internal design by extracting
the attributes and methods related to building a software into a separate hierarchy:
.. image:: images/builder_package_architecture.png
:scale: 60 %
:align: center
In this new format each ``package.py`` contains one ``*Package`` class that gathers all the metadata,
and one or more ``*Builder`` classes that encode the installation procedure. A specific builder object
is created just before the software is built, so at a time where Spack knows which build system needs
to be used for the current installation, and receives a ``package`` object during initialization.
^^^^^^^^^^^^^^^^^^^^^^^^
``build_system`` variant
^^^^^^^^^^^^^^^^^^^^^^^^
To allow imposing conditions based on the build system, each package must a have ``build_system`` variant,
which is usually inherited from base classes. This variant allows for writing metadata that is conditional
on the build system:
.. code-block:: python
with when("build_system=cmake"):
depends_on("cmake", type="build")
and also for selecting a specific build system from a spec literal, like in the following command:
.. code-block:: console
$ spack install arpack-ng build_system=autotools
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Compatibility with single-class format
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Internally, Spack always uses builders to perform operations related to the installation of a specific software.
The builders are created in the ``spack.builder.create`` function
.. literalinclude:: _spack_root/lib/spack/spack/builder.py
:pyobject: create
To achieve backward compatibility with the single-class format Spack creates in this function a special
"adapter builder", if no custom builder is detected in the recipe:
.. image:: images/adapter.png
:scale: 60 %
:align: center
Overall the role of the adapter is to route access to attributes of methods first through the ``*Package``
hierarchy, and then back to the base class builder. This is schematically shown in the diagram above, where
the adapter role is to "emulate" a method resolution order like the one represented by the red arrows.

View File

@ -20,6 +20,8 @@ spack:
- py-docutils@:0.16 - py-docutils@:0.16
- py-sphinx-design - py-sphinx-design
- py-sphinx-rtd-theme - py-sphinx-rtd-theme
- py-pygments@:2.12
# VCS # VCS
- git - git
- mercurial - mercurial

View File

@ -1,5 +1,5 @@
Name, Supported Versions, Notes, Requirement Reason Name, Supported Versions, Notes, Requirement Reason
Python, 2.7/3.6-3.10, , Interpreter for Spack Python, 2.7/3.6-3.11, , Interpreter for Spack
C/C++ Compilers, , , Building software C/C++ Compilers, , , Building software
make, , , Build software make, , , Build software
patch, , , Build software patch, , , Build software
@ -11,6 +11,7 @@ bzip2, , , Compress/Decompress archives
xz, , , Compress/Decompress archives xz, , , Compress/Decompress archives
zstd, , Optional, Compress/Decompress archives zstd, , Optional, Compress/Decompress archives
file, , , Create/Use Buildcaches file, , , Create/Use Buildcaches
lsb-release, , , Linux: identify operating system version
gnupg2, , , Sign/Verify Buildcaches gnupg2, , , Sign/Verify Buildcaches
git, , , Manage Software Repositories git, , , Manage Software Repositories
svn, , Optional, Manage Software Repositories svn, , Optional, Manage Software Repositories

1 Name Supported Versions Notes Requirement Reason
2 Python 2.7/3.6-3.10 2.7/3.6-3.11 Interpreter for Spack
3 C/C++ Compilers Building software
4 make Build software
5 patch Build software
11 xz Compress/Decompress archives
12 zstd Optional Compress/Decompress archives
13 file Create/Use Buildcaches
14 lsb-release Linux: identify operating system version
15 gnupg2 Sign/Verify Buildcaches
16 git Manage Software Repositories
17 svn Optional Manage Software Repositories

8
lib/spack/env/cc vendored
View File

@ -241,28 +241,28 @@ case "$command" in
mode=cpp mode=cpp
debug_flags="-g" debug_flags="-g"
;; ;;
cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc|amdclang|cl.exe) cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc|amdclang|cl.exe|craycc)
command="$SPACK_CC" command="$SPACK_CC"
language="C" language="C"
comp="CC" comp="CC"
lang_flags=C lang_flags=C
debug_flags="-g" debug_flags="-g"
;; ;;
c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++) c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|crayCC)
command="$SPACK_CXX" command="$SPACK_CXX"
language="C++" language="C++"
comp="CXX" comp="CXX"
lang_flags=CXX lang_flags=CXX
debug_flags="-g" debug_flags="-g"
;; ;;
ftn|f90|fc|f95|gfortran|flang|armflang|ifort|ifx|pgfortran|nvfortran|xlf90|xlf90_r|nagfor|frt|amdflang) ftn|f90|fc|f95|gfortran|flang|armflang|ifort|ifx|pgfortran|nvfortran|xlf90|xlf90_r|nagfor|frt|amdflang|crayftn)
command="$SPACK_FC" command="$SPACK_FC"
language="Fortran 90" language="Fortran 90"
comp="FC" comp="FC"
lang_flags=F lang_flags=F
debug_flags="-g" debug_flags="-g"
;; ;;
f77|xlf|xlf_r|pgf77|amdflang) f77|xlf|xlf_r|pgf77)
command="$SPACK_F77" command="$SPACK_F77"
language="Fortran 77" language="Fortran 77"
comp="F77" comp="F77"

View File

@ -0,0 +1 @@
../../cc

1
lib/spack/env/cce/craycc vendored Symbolic link
View File

@ -0,0 +1 @@
../cc

1
lib/spack/env/cce/crayftn vendored Symbolic link
View File

@ -0,0 +1 @@
../cc

View File

@ -18,7 +18,7 @@
* Homepage: https://pypi.python.org/pypi/archspec * Homepage: https://pypi.python.org/pypi/archspec
* Usage: Labeling, comparison and detection of microarchitectures * Usage: Labeling, comparison and detection of microarchitectures
* Version: 0.1.4 (commit e2cfdc266174488dee78b8c9058e36d60dc1b548) * Version: 0.2.0 (commit 77640e572725ad97f18e63a04857155752ace045)
argparse argparse
-------- --------

View File

@ -132,9 +132,15 @@ def sysctl(*args):
"model name": sysctl("-n", "machdep.cpu.brand_string"), "model name": sysctl("-n", "machdep.cpu.brand_string"),
} }
else: else:
model = ( model = "unknown"
"m1" if "Apple" in sysctl("-n", "machdep.cpu.brand_string") else "unknown" model_str = sysctl("-n", "machdep.cpu.brand_string").lower()
) if "m2" in model_str:
model = "m2"
elif "m1" in model_str:
model = "m1"
elif "apple" in model_str:
model = "m1"
info = { info = {
"vendor_id": "Apple", "vendor_id": "Apple",
"flags": [], "flags": [],
@ -322,14 +328,26 @@ def compatibility_check_for_aarch64(info, target):
features = set(info.get("Features", "").split()) features = set(info.get("Features", "").split())
vendor = info.get("CPU implementer", "generic") vendor = info.get("CPU implementer", "generic")
# At the moment it's not clear how to detect compatibility with
# a specific version of the architecture
if target.vendor == "generic" and target.name != "aarch64":
return False
arch_root = TARGETS[basename] arch_root = TARGETS[basename]
return ( arch_root_and_vendor = arch_root == target.family and target.vendor in (
(target == arch_root or arch_root in target.ancestors) vendor,
and target.vendor in (vendor, "generic") "generic",
# On macOS it seems impossible to get all the CPU features with syctl info
and (target.features.issubset(features) or platform.system() == "Darwin")
) )
# On macOS it seems impossible to get all the CPU features
# with syctl info, but for ARM we can get the exact model
if platform.system() == "Darwin":
model_key = info.get("model", basename)
model = TARGETS[model_key]
return arch_root_and_vendor and (target == model or target in model.ancestors)
return arch_root_and_vendor and target.features.issubset(features)
@compatibility_check(architecture_family="riscv64") @compatibility_check(architecture_family="riscv64")
def compatibility_check_for_riscv64(info, target): def compatibility_check_for_riscv64(info, target):

View File

@ -85,7 +85,7 @@
"intel": [ "intel": [
{ {
"versions": ":", "versions": ":",
"name": "x86-64", "name": "pentium4",
"flags": "-march={name} -mtune=generic" "flags": "-march={name} -mtune=generic"
} }
], ],
@ -2093,8 +2093,163 @@
] ]
} }
}, },
"thunderx2": { "armv8.1a": {
"from": ["aarch64"], "from": ["aarch64"],
"vendor": "generic",
"features": [],
"compilers": {
"gcc": [
{
"versions": "5:",
"flags": "-march=armv8.1-a -mtune=generic"
}
],
"clang": [
{
"versions": ":",
"flags": "-march=armv8.1-a -mtune=generic"
}
],
"apple-clang": [
{
"versions": ":",
"flags": "-march=armv8.1-a -mtune=generic"
}
],
"arm": [
{
"versions": ":",
"flags": "-march=armv8.1-a -mtune=generic"
}
]
}
},
"armv8.2a": {
"from": ["armv8.1a"],
"vendor": "generic",
"features": [],
"compilers": {
"gcc": [
{
"versions": "6:",
"flags": "-march=armv8.2-a -mtune=generic"
}
],
"clang": [
{
"versions": ":",
"flags": "-march=armv8.2-a -mtune=generic"
}
],
"apple-clang": [
{
"versions": ":",
"flags": "-march=armv8.2-a -mtune=generic"
}
],
"arm": [
{
"versions": ":",
"flags": "-march=armv8.2-a -mtune=generic"
}
]
}
},
"armv8.3a": {
"from": ["armv8.2a"],
"vendor": "generic",
"features": [],
"compilers": {
"gcc": [
{
"versions": "6:",
"flags": "-march=armv8.3-a -mtune=generic"
}
],
"clang": [
{
"versions": "6:",
"flags": "-march=armv8.3-a -mtune=generic"
}
],
"apple-clang": [
{
"versions": ":",
"flags": "-march=armv8.3-a -mtune=generic"
}
],
"arm": [
{
"versions": ":",
"flags": "-march=armv8.3-a -mtune=generic"
}
]
}
},
"armv8.4a": {
"from": ["armv8.3a"],
"vendor": "generic",
"features": [],
"compilers": {
"gcc": [
{
"versions": "8:",
"flags": "-march=armv8.4-a -mtune=generic"
}
],
"clang": [
{
"versions": "8:",
"flags": "-march=armv8.4-a -mtune=generic"
}
],
"apple-clang": [
{
"versions": ":",
"flags": "-march=armv8.4-a -mtune=generic"
}
],
"arm": [
{
"versions": ":",
"flags": "-march=armv8.4-a -mtune=generic"
}
]
}
},
"armv8.5a": {
"from": ["armv8.4a"],
"vendor": "generic",
"features": [],
"compilers": {
"gcc": [
{
"versions": "9:",
"flags": "-march=armv8.5-a -mtune=generic"
}
],
"clang": [
{
"versions": "11:",
"flags": "-march=armv8.5-a -mtune=generic"
}
],
"apple-clang": [
{
"versions": ":",
"flags": "-march=armv8.5-a -mtune=generic"
}
],
"arm": [
{
"versions": ":",
"flags": "-march=armv8.5-a -mtune=generic"
}
]
}
},
"thunderx2": {
"from": ["armv8.1a"],
"vendor": "Cavium", "vendor": "Cavium",
"features": [ "features": [
"fp", "fp",
@ -2141,7 +2296,7 @@
} }
}, },
"a64fx": { "a64fx": {
"from": ["aarch64"], "from": ["armv8.2a"],
"vendor": "Fujitsu", "vendor": "Fujitsu",
"features": [ "features": [
"fp", "fp",
@ -2209,7 +2364,7 @@
] ]
} }
}, },
"graviton": { "cortex_a72": {
"from": ["aarch64"], "from": ["aarch64"],
"vendor": "ARM", "vendor": "ARM",
"features": [ "features": [
@ -2235,19 +2390,19 @@
}, },
{ {
"versions": "6:", "versions": "6:",
"flags" : "-march=armv8-a+crc+crypto -mtune=cortex-a72" "flags" : "-mcpu=cortex-a72"
} }
], ],
"clang" : [ "clang" : [
{ {
"versions": "3.9:", "versions": "3.9:",
"flags" : "-march=armv8-a+crc+crypto" "flags" : "-mcpu=cortex-a72"
} }
] ]
} }
}, },
"graviton2": { "neoverse_n1": {
"from": ["graviton"], "from": ["cortex_a72", "armv8.2a"],
"vendor": "ARM", "vendor": "ARM",
"features": [ "features": [
"fp", "fp",
@ -2296,7 +2451,7 @@
}, },
{ {
"versions": "9.0:", "versions": "9.0:",
"flags" : "-march=armv8.2-a+fp16+rcpc+dotprod+crypto -mtune=neoverse-n1" "flags" : "-mcpu=neoverse-n1"
} }
], ],
"clang" : [ "clang" : [
@ -2307,6 +2462,10 @@
{ {
"versions": "5:", "versions": "5:",
"flags" : "-march=armv8.2-a+fp16+rcpc+dotprod+crypto" "flags" : "-march=armv8.2-a+fp16+rcpc+dotprod+crypto"
},
{
"versions": "10:",
"flags" : "-mcpu=neoverse-n1"
} }
], ],
"arm" : [ "arm" : [
@ -2317,11 +2476,11 @@
] ]
} }
}, },
"graviton3": { "neoverse_v1": {
"from": ["graviton2"], "from": ["neoverse_n1", "armv8.4a"],
"vendor": "ARM", "vendor": "ARM",
"features": [ "features": [
"fp", "fp",
"asimd", "asimd",
"evtstrm", "evtstrm",
"aes", "aes",
@ -2384,11 +2543,11 @@
}, },
{ {
"versions": "9.0:9.9", "versions": "9.0:9.9",
"flags" : "-march=armv8.4-a+crypto+rcpc+sha3+sm4+sve+rng+nodotprod -mtune=neoverse-v1" "flags" : "-mcpu=neoverse-v1"
}, },
{ {
"versions": "10.0:", "versions": "10.0:",
"flags" : "-march=armv8.4-a+crypto+rcpc+sha3+sm4+sve+rng+ssbs+i8mm+bf16+nodotprod -mtune=neoverse-v1" "flags" : "-mcpu=neoverse-v1"
} }
], ],
@ -2404,6 +2563,10 @@
{ {
"versions": "11:", "versions": "11:",
"flags" : "-march=armv8.4-a+sve+ssbs+fp16+bf16+crypto+i8mm+rng" "flags" : "-march=armv8.4-a+sve+ssbs+fp16+bf16+crypto+i8mm+rng"
},
{
"versions": "12:",
"flags" : "-mcpu=neoverse-v1"
} }
], ],
"arm" : [ "arm" : [
@ -2419,7 +2582,7 @@
} }
}, },
"m1": { "m1": {
"from": ["aarch64"], "from": ["armv8.4a"],
"vendor": "Apple", "vendor": "Apple",
"features": [ "features": [
"fp", "fp",
@ -2484,6 +2647,76 @@
] ]
} }
}, },
"m2": {
"from": ["m1", "armv8.5a"],
"vendor": "Apple",
"features": [
"fp",
"asimd",
"evtstrm",
"aes",
"pmull",
"sha1",
"sha2",
"crc32",
"atomics",
"fphp",
"asimdhp",
"cpuid",
"asimdrdm",
"jscvt",
"fcma",
"lrcpc",
"dcpop",
"sha3",
"asimddp",
"sha512",
"asimdfhm",
"dit",
"uscat",
"ilrcpc",
"flagm",
"ssbs",
"sb",
"paca",
"pacg",
"dcpodp",
"flagm2",
"frint",
"ecv",
"bf16",
"i8mm",
"bti"
],
"compilers": {
"gcc": [
{
"versions": "8.0:",
"flags" : "-march=armv8.5-a -mtune=generic"
}
],
"clang" : [
{
"versions": "9.0:12.0",
"flags" : "-march=armv8.5-a"
},
{
"versions": "13.0:",
"flags" : "-mcpu=apple-m1"
}
],
"apple-clang": [
{
"versions": "11.0:12.5",
"flags" : "-march=armv8.5-a"
},
{
"versions": "13.0:",
"flags" : "-mcpu=vortex"
}
]
}
},
"arm": { "arm": {
"from": [], "from": [],
"vendor": "generic", "vendor": "generic",

View File

@ -505,8 +505,15 @@ def group_ids(uid=None):
if uid is None: if uid is None:
uid = getuid() uid = getuid()
user = pwd.getpwuid(uid).pw_name
return [g.gr_gid for g in grp.getgrall() if user in g.gr_mem] pwd_entry = pwd.getpwuid(uid)
user = pwd_entry.pw_name
# user's primary group id may not be listed in grp (i.e. /etc/group)
# you have to check pwd for that, so start the list with that
gids = [pwd_entry.pw_gid]
return sorted(set(gids + [g.gr_gid for g in grp.getgrall() if user in g.gr_mem]))
@system_path_filter(arg_slice=slice(1)) @system_path_filter(arg_slice=slice(1))
@ -1083,7 +1090,11 @@ def temp_cwd():
with working_dir(tmp_dir): with working_dir(tmp_dir):
yield tmp_dir yield tmp_dir
finally: finally:
shutil.rmtree(tmp_dir) kwargs = {}
if is_windows:
kwargs["ignore_errors"] = False
kwargs["onerror"] = readonly_file_handler(ignore_errors=True)
shutil.rmtree(tmp_dir, **kwargs)
@contextmanager @contextmanager
@ -2095,7 +2106,7 @@ def find_system_libraries(libraries, shared=True):
return libraries_found return libraries_found
def find_libraries(libraries, root, shared=True, recursive=False): def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
"""Returns an iterable of full paths to libraries found in a root dir. """Returns an iterable of full paths to libraries found in a root dir.
Accepts any glob characters accepted by fnmatch: Accepts any glob characters accepted by fnmatch:
@ -2116,6 +2127,10 @@ def find_libraries(libraries, root, shared=True, recursive=False):
otherwise for static. Defaults to True. otherwise for static. Defaults to True.
recursive (bool): if False search only root folder, recursive (bool): if False search only root folder,
if True descends top-down from the root. Defaults to False. if True descends top-down from the root. Defaults to False.
runtime (bool): Windows only option, no-op elsewhere. If true,
search for runtime shared libs (.DLL), otherwise, search
for .Lib files. If shared is false, this has no meaning.
Defaults to True.
Returns: Returns:
LibraryList: The libraries that have been found LibraryList: The libraries that have been found
@ -2130,7 +2145,9 @@ def find_libraries(libraries, root, shared=True, recursive=False):
if is_windows: if is_windows:
static_ext = "lib" static_ext = "lib"
shared_ext = "dll" # For linking (runtime=False) you need the .lib files regardless of
# whether you are doing a shared or static link
shared_ext = "dll" if runtime else "lib"
else: else:
# Used on both Linux and macOS # Used on both Linux and macOS
static_ext = "a" static_ext = "a"
@ -2174,13 +2191,13 @@ def find_libraries(libraries, root, shared=True, recursive=False):
return LibraryList(found_libs) return LibraryList(found_libs)
def find_all_shared_libraries(root, recursive=False): def find_all_shared_libraries(root, recursive=False, runtime=True):
"""Convenience function that returns the list of all shared libraries found """Convenience function that returns the list of all shared libraries found
in the directory passed as argument. in the directory passed as argument.
See documentation for `llnl.util.filesystem.find_libraries` for more information See documentation for `llnl.util.filesystem.find_libraries` for more information
""" """
return find_libraries("*", root=root, shared=True, recursive=recursive) return find_libraries("*", root=root, shared=True, recursive=recursive, runtime=runtime)
def find_all_static_libraries(root, recursive=False): def find_all_static_libraries(root, recursive=False):
@ -2226,48 +2243,36 @@ def __init__(self, package, link_install_prefix=True):
self.pkg = package self.pkg = package
self._addl_rpaths = set() self._addl_rpaths = set()
self.link_install_prefix = link_install_prefix self.link_install_prefix = link_install_prefix
self._internal_links = set() self._additional_library_dependents = set()
@property @property
def link_dest(self): def library_dependents(self):
""" """
Set of directories where package binaries/libraries are located. Set of directories where package binaries/libraries are located.
""" """
if hasattr(self.pkg, "libs") and self.pkg.libs: return set([self.pkg.prefix.bin]) | self._additional_library_dependents
pkg_libs = set(self.pkg.libs.directories)
else:
pkg_libs = set((self.pkg.prefix.lib, self.pkg.prefix.lib64))
return pkg_libs | set([self.pkg.prefix.bin]) | self.internal_links def add_library_dependent(self, *dest):
@property
def internal_links(self):
""" """
linking that would need to be established within the package itself. Useful for links Add paths to directories or libraries/binaries to set of
against extension modules/build time executables/internal linkage common paths that need to link against other libraries
"""
return self._internal_links
def add_internal_links(self, *dest): Specified paths should fall outside of a package's common
""" link paths, i.e. the bin
Incorporate additional paths into the rpath (sym)linking scheme.
Paths provided to this method are linked against by a package's libraries
and libraries found at these paths are linked against a package's binaries.
(i.e. /site-packages -> /bin and /bin -> /site-packages)
Specified paths should be outside of a package's lib, lib64, and bin
directories. directories.
""" """
self._internal_links = self._internal_links | set(*dest) for pth in dest:
if os.path.isfile(pth):
self._additional_library_dependents.add(os.path.dirname)
else:
self._additional_library_dependents.add(pth)
@property @property
def link_targets(self): def rpaths(self):
""" """
Set of libraries this package needs to link against during runtime Set of libraries this package needs to link against during runtime
These packages will each be symlinked into the packages lib and binary dir These packages will each be symlinked into the packages lib and binary dir
""" """
dependent_libs = [] dependent_libs = []
for path in self.pkg.rpath: for path in self.pkg.rpath:
dependent_libs.extend(list(find_all_shared_libraries(path, recursive=True))) dependent_libs.extend(list(find_all_shared_libraries(path, recursive=True)))
@ -2275,18 +2280,43 @@ def link_targets(self):
dependent_libs.extend(list(find_all_shared_libraries(extra_path, recursive=True))) dependent_libs.extend(list(find_all_shared_libraries(extra_path, recursive=True)))
return set(dependent_libs) return set(dependent_libs)
def include_additional_link_paths(self, *paths): def add_rpath(self, *paths):
""" """
Add libraries found at the root of provided paths to runtime linking Add libraries found at the root of provided paths to runtime linking
These are libraries found outside of the typical scope of rpath linking These are libraries found outside of the typical scope of rpath linking
that require manual inclusion in a runtime linking scheme that require manual inclusion in a runtime linking scheme.
These links are unidirectional, and are only
intended to bring outside dependencies into this package
Args: Args:
*paths (str): arbitrary number of paths to be added to runtime linking *paths (str): arbitrary number of paths to be added to runtime linking
""" """
self._addl_rpaths = self._addl_rpaths | set(paths) self._addl_rpaths = self._addl_rpaths | set(paths)
def _link(self, path, dest):
file_name = os.path.basename(path)
dest_file = os.path.join(dest, file_name)
if os.path.exists(dest):
try:
symlink(path, dest_file)
# For py2 compatibility, we have to catch the specific Windows error code
# associate with trying to create a file that already exists (winerror 183)
except OSError as e:
if e.winerror == 183:
# We have either already symlinked or we are encoutering a naming clash
# either way, we don't want to overwrite existing libraries
already_linked = islink(dest_file)
tty.debug(
"Linking library %s to %s failed, " % (path, dest_file) + "already linked."
if already_linked
else "library with name %s already exists at location %s."
% (file_name, dest)
)
pass
else:
raise e
def establish_link(self): def establish_link(self):
""" """
(sym)link packages to runtime dependencies based on RPath configuration for (sym)link packages to runtime dependencies based on RPath configuration for
@ -2298,29 +2328,8 @@ def establish_link(self):
# for each binary install dir in self.pkg (i.e. pkg.prefix.bin, pkg.prefix.lib) # for each binary install dir in self.pkg (i.e. pkg.prefix.bin, pkg.prefix.lib)
# install a symlink to each dependent library # install a symlink to each dependent library
for library, lib_dir in itertools.product(self.link_targets, self.link_dest): for library, lib_dir in itertools.product(self.rpaths, self.library_dependents):
if not path_contains_subdirectory(library, lib_dir): self._link(library, lib_dir)
file_name = os.path.basename(library)
dest_file = os.path.join(lib_dir, file_name)
if os.path.exists(lib_dir):
try:
symlink(library, dest_file)
# For py2 compatibility, we have to catch the specific Windows error code
# associate with trying to create a file that already exists (winerror 183)
except OSError as e:
if e.winerror == 183:
# We have either already symlinked or we are encoutering a naming clash
# either way, we don't want to overwrite existing libraries
already_linked = islink(dest_file)
tty.debug(
"Linking library %s to %s failed, " % (library, dest_file)
+ "already linked."
if already_linked
else "library with name %s already exists." % file_name
)
pass
else:
raise e
@system_path_filter @system_path_filter

View File

@ -749,6 +749,26 @@ def _n_xxx_ago(x):
raise ValueError(msg) raise ValueError(msg)
def pretty_seconds(seconds):
"""Seconds to string with appropriate units
Arguments:
seconds (float): Number of seconds
Returns:
str: Time string with units
"""
if seconds >= 1:
value, unit = seconds, "s"
elif seconds >= 1e-3:
value, unit = seconds * 1e3, "ms"
elif seconds >= 1e-6:
value, unit = seconds * 1e6, "us"
else:
value, unit = seconds * 1e9, "ns"
return "%.3f%s" % (value, unit)
class RequiredAttributeError(ValueError): class RequiredAttributeError(ValueError):
def __init__(self, message): def __init__(self, message):
super(RequiredAttributeError, self).__init__(message) super(RequiredAttributeError, self).__init__(message)
@ -1002,6 +1022,14 @@ def stable_partition(
return true_items, false_items return true_items, false_items
def ensure_last(lst, *elements):
"""Performs a stable partition of lst, ensuring that ``elements``
occur at the end of ``lst`` in specified order. Mutates ``lst``.
Raises ``ValueError`` if any ``elements`` are not already in ``lst``."""
for elt in elements:
lst.append(lst.pop(lst.index(elt)))
class TypedMutableSequence(MutableSequence): class TypedMutableSequence(MutableSequence):
"""Base class that behaves like a list, just with a different type. """Base class that behaves like a list, just with a different type.

View File

@ -12,6 +12,7 @@
from typing import Dict, Tuple # novm from typing import Dict, Tuple # novm
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.lang import pretty_seconds
import spack.util.string import spack.util.string
@ -166,7 +167,7 @@ def _attempts_str(wait_time, nattempts):
return "" return ""
attempts = spack.util.string.plural(nattempts, "attempt") attempts = spack.util.string.plural(nattempts, "attempt")
return " after {0:0.2f}s and {1}".format(wait_time, attempts) return " after {} and {}".format(pretty_seconds(wait_time), attempts)
class LockType(object): class LockType(object):
@ -318,8 +319,8 @@ def _lock(self, op, timeout=None):
raise LockROFileError(self.path) raise LockROFileError(self.path)
self._log_debug( self._log_debug(
"{0} locking [{1}:{2}]: timeout {3} sec".format( "{} locking [{}:{}]: timeout {}".format(
op_str.lower(), self._start, self._length, timeout op_str.lower(), self._start, self._length, pretty_seconds(timeout or 0)
) )
) )
@ -340,7 +341,8 @@ def _lock(self, op, timeout=None):
total_wait_time = time.time() - start_time total_wait_time = time.time() - start_time
return total_wait_time, num_attempts return total_wait_time, num_attempts
raise LockTimeoutError("Timed out waiting for a {0} lock.".format(op_str.lower())) total_wait_time = time.time() - start_time
raise LockTimeoutError(op_str.lower(), self.path, total_wait_time, num_attempts)
def _poll_lock(self, op): def _poll_lock(self, op):
"""Attempt to acquire the lock in a non-blocking manner. Return whether """Attempt to acquire the lock in a non-blocking manner. Return whether
@ -780,6 +782,18 @@ class LockLimitError(LockError):
class LockTimeoutError(LockError): class LockTimeoutError(LockError):
"""Raised when an attempt to acquire a lock times out.""" """Raised when an attempt to acquire a lock times out."""
def __init__(self, lock_type, path, time, attempts):
fmt = "Timed out waiting for a {} lock after {}.\n Made {} {} on file: {}"
super(LockTimeoutError, self).__init__(
fmt.format(
lock_type,
pretty_seconds(time),
attempts,
"attempt" if attempts == 1 else "attempts",
path,
)
)
class LockUpgradeError(LockError): class LockUpgradeError(LockError):
"""Raised when unable to upgrade from a read to a write lock.""" """Raised when unable to upgrade from a read to a write lock."""

View File

@ -3,11 +3,20 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
#: (major, minor, micro, dev release) tuple
spack_version_info = (0, 19, 0, "dev0")
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string #: PEP440 canonical <major>.<minor>.<micro>.<devN> string
spack_version = ".".join(str(s) for s in spack_version_info) __version__ = "0.19.0.dev0"
spack_version = __version__
def __try_int(v):
try:
return int(v)
except ValueError:
return v
#: (major, minor, micro, dev release) tuple
spack_version_info = tuple([__try_int(v) for v in __version__.split(".")])
__all__ = ["spack_version_info", "spack_version"] __all__ = ["spack_version_info", "spack_version"]
__version__ = spack_version

View File

@ -503,6 +503,33 @@ def invalid_sha256_digest(fetcher):
return errors return errors
@package_properties
def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls):
"""Ensure that methods modifying the build environment are ported to builder classes."""
errors = []
for pkg_name in pkgs:
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
buildsystem_variant, _ = pkg_cls.variants["build_system"]
buildsystem_names = [getattr(x, "value", x) for x in buildsystem_variant.values]
builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in buildsystem_names]
module = pkg_cls.module
has_builders_in_package_py = any(
getattr(module, name, False) for name in builder_cls_names
)
if not has_builders_in_package_py:
continue
for method_name in ("setup_build_environment", "setup_dependent_build_environment"):
if hasattr(pkg_cls, method_name):
msg = (
"Package '{}' need to move the '{}' method from the package class to the"
" appropriate builder class".format(pkg_name, method_name)
)
errors.append(error_cls(msg, []))
return errors
@package_https_directives @package_https_directives
def _linting_package_file(pkgs, error_cls): def _linting_package_file(pkgs, error_cls):
"""Check for correctness of links""" """Check for correctness of links"""
@ -660,7 +687,13 @@ def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
errors.append(error_cls(error_msg.format(variant_name, pkg_name), [])) errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
continue continue
vspec = variant.make_default() try:
vspec = variant.make_default()
except spack.variant.MultipleValuesInExclusiveVariantError:
error_msg = "Cannot create a default value for the variant '{}' in package '{}'"
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
continue
try: try:
variant.validate_or_raise(vspec, pkg_cls=pkg_cls) variant.validate_or_raise(vspec, pkg_cls=pkg_cls)
except spack.variant.InvalidVariantValueError: except spack.variant.InvalidVariantValueError:

View File

@ -7,11 +7,13 @@
import collections import collections
import hashlib import hashlib
import json import json
import multiprocessing.pool
import os import os
import shutil import shutil
import sys import sys
import tarfile import tarfile
import tempfile import tempfile
import time
import traceback import traceback
import warnings import warnings
from contextlib import closing from contextlib import closing
@ -22,7 +24,7 @@
import llnl.util.filesystem as fsys import llnl.util.filesystem as fsys
import llnl.util.lang import llnl.util.lang
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree
import spack.cmd import spack.cmd
import spack.config as config import spack.config as config
@ -41,8 +43,10 @@
import spack.util.url as url_util import spack.util.url as url_util
import spack.util.web as web_util import spack.util.web as web_util
from spack.caches import misc_cache_location from spack.caches import misc_cache_location
from spack.relocate import utf8_paths_to_single_binary_regex
from spack.spec import Spec from spack.spec import Spec
from spack.stage import Stage from spack.stage import Stage
from spack.util.executable import which
_build_cache_relative_path = "build_cache" _build_cache_relative_path = "build_cache"
_build_cache_keys_relative_path = "_pgp" _build_cache_keys_relative_path = "_pgp"
@ -70,6 +74,10 @@ def __init__(self, errors):
super(FetchCacheError, self).__init__(self.message) super(FetchCacheError, self).__init__(self.message)
class ListMirrorSpecsError(spack.error.SpackError):
"""Raised when unable to retrieve list of specs from the mirror"""
class BinaryCacheIndex(object): class BinaryCacheIndex(object):
""" """
The BinaryCacheIndex tracks what specs are available on (usually remote) The BinaryCacheIndex tracks what specs are available on (usually remote)
@ -105,6 +113,10 @@ def __init__(self, cache_root):
# cache (_mirrors_for_spec) # cache (_mirrors_for_spec)
self._specs_already_associated = set() self._specs_already_associated = set()
# mapping from mirror urls to the time.time() of the last index fetch and a bool indicating
# whether the fetch succeeded or not.
self._last_fetch_times = {}
# _mirrors_for_spec is a dictionary mapping DAG hashes to lists of # _mirrors_for_spec is a dictionary mapping DAG hashes to lists of
# entries indicating mirrors where that concrete spec can be found. # entries indicating mirrors where that concrete spec can be found.
# Each entry is a dictionary consisting of: # Each entry is a dictionary consisting of:
@ -137,6 +149,7 @@ def clear(self):
self._index_file_cache = None self._index_file_cache = None
self._local_index_cache = None self._local_index_cache = None
self._specs_already_associated = set() self._specs_already_associated = set()
self._last_fetch_times = {}
self._mirrors_for_spec = {} self._mirrors_for_spec = {}
def _write_local_index_cache(self): def _write_local_index_cache(self):
@ -242,7 +255,6 @@ def find_built_spec(self, spec, mirrors_to_check=None):
} }
] ]
""" """
self.regenerate_spec_cache()
return self.find_by_hash(spec.dag_hash(), mirrors_to_check=mirrors_to_check) return self.find_by_hash(spec.dag_hash(), mirrors_to_check=mirrors_to_check)
def find_by_hash(self, find_hash, mirrors_to_check=None): def find_by_hash(self, find_hash, mirrors_to_check=None):
@ -253,6 +265,9 @@ def find_by_hash(self, find_hash, mirrors_to_check=None):
mirrors_to_check: Optional mapping containing mirrors to check. If mirrors_to_check: Optional mapping containing mirrors to check. If
None, just assumes all configured mirrors. None, just assumes all configured mirrors.
""" """
if find_hash not in self._mirrors_for_spec:
# Not found in the cached index, pull the latest from the server.
self.update(with_cooldown=True)
if find_hash not in self._mirrors_for_spec: if find_hash not in self._mirrors_for_spec:
return None return None
results = self._mirrors_for_spec[find_hash] results = self._mirrors_for_spec[find_hash]
@ -283,7 +298,7 @@ def update_spec(self, spec, found_list):
"spec": new_entry["spec"], "spec": new_entry["spec"],
} }
def update(self): def update(self, with_cooldown=False):
"""Make sure local cache of buildcache index files is up to date. """Make sure local cache of buildcache index files is up to date.
If the same mirrors are configured as the last time this was called If the same mirrors are configured as the last time this was called
and none of the remote buildcache indices have changed, calling this and none of the remote buildcache indices have changed, calling this
@ -325,24 +340,41 @@ def update(self):
fetch_errors = [] fetch_errors = []
all_methods_failed = True all_methods_failed = True
ttl = spack.config.get("config:binary_index_ttl", 600)
now = time.time()
for cached_mirror_url in self._local_index_cache: for cached_mirror_url in self._local_index_cache:
cache_entry = self._local_index_cache[cached_mirror_url] cache_entry = self._local_index_cache[cached_mirror_url]
cached_index_hash = cache_entry["index_hash"] cached_index_hash = cache_entry["index_hash"]
cached_index_path = cache_entry["index_path"] cached_index_path = cache_entry["index_path"]
if cached_mirror_url in configured_mirror_urls: if cached_mirror_url in configured_mirror_urls:
# May need to fetch the index and update the local caches # Only do a fetch if the last fetch was longer than TTL ago
try: if (
needs_regen = self._fetch_and_cache_index( with_cooldown
cached_mirror_url, expect_hash=cached_index_hash and ttl > 0
) and cached_mirror_url in self._last_fetch_times
all_methods_failed = False and now - self._last_fetch_times[cached_mirror_url][0] < ttl
except FetchCacheError as fetch_error: ):
needs_regen = False # We're in the cooldown period, don't try to fetch again
fetch_errors.extend(fetch_error.errors) # If the fetch succeeded last time, consider this update a success, otherwise
# The need to regenerate implies a need to clear as well. # re-report the error here
spec_cache_clear_needed |= needs_regen if self._last_fetch_times[cached_mirror_url][1]:
spec_cache_regenerate_needed |= needs_regen all_methods_failed = False
else:
# May need to fetch the index and update the local caches
try:
needs_regen = self._fetch_and_cache_index(
cached_mirror_url, expect_hash=cached_index_hash
)
self._last_fetch_times[cached_mirror_url] = (now, True)
all_methods_failed = False
except FetchCacheError as fetch_error:
needs_regen = False
fetch_errors.extend(fetch_error.errors)
self._last_fetch_times[cached_mirror_url] = (now, False)
# The need to regenerate implies a need to clear as well.
spec_cache_clear_needed |= needs_regen
spec_cache_regenerate_needed |= needs_regen
else: else:
# No longer have this mirror, cached index should be removed # No longer have this mirror, cached index should be removed
items_to_remove.append( items_to_remove.append(
@ -351,6 +383,8 @@ def update(self):
"cache_key": os.path.join(self._index_cache_root, cached_index_path), "cache_key": os.path.join(self._index_cache_root, cached_index_path),
} }
) )
if cached_mirror_url in self._last_fetch_times:
del self._last_fetch_times[cached_mirror_url]
spec_cache_clear_needed = True spec_cache_clear_needed = True
spec_cache_regenerate_needed = True spec_cache_regenerate_needed = True
@ -369,10 +403,12 @@ def update(self):
# Need to fetch the index and update the local caches # Need to fetch the index and update the local caches
try: try:
needs_regen = self._fetch_and_cache_index(mirror_url) needs_regen = self._fetch_and_cache_index(mirror_url)
self._last_fetch_times[mirror_url] = (now, True)
all_methods_failed = False all_methods_failed = False
except FetchCacheError as fetch_error: except FetchCacheError as fetch_error:
fetch_errors.extend(fetch_error.errors) fetch_errors.extend(fetch_error.errors)
needs_regen = False needs_regen = False
self._last_fetch_times[mirror_url] = (now, False)
# Generally speaking, a new mirror wouldn't imply the need to # Generally speaking, a new mirror wouldn't imply the need to
# clear the spec cache, so leave it as is. # clear the spec cache, so leave it as is.
if needs_regen: if needs_regen:
@ -619,6 +655,57 @@ def read_buildinfo_file(prefix):
return buildinfo return buildinfo
class BuildManifestVisitor(BaseDirectoryVisitor):
"""Visitor that collects a list of files and symlinks
that can be checked for need of relocation. It knows how
to dedupe hardlinks and deal with symlinks to files and
directories."""
def __init__(self):
# Save unique identifiers of files to avoid
# relocating hardlink files for each path.
self.visited = set()
# Lists of files we will check
self.files = []
self.symlinks = []
def seen_before(self, root, rel_path):
stat_result = os.lstat(os.path.join(root, rel_path))
identifier = (stat_result.st_dev, stat_result.st_ino)
if identifier in self.visited:
return True
else:
self.visited.add(identifier)
return False
def visit_file(self, root, rel_path, depth):
if self.seen_before(root, rel_path):
return
self.files.append(rel_path)
def visit_symlinked_file(self, root, rel_path, depth):
# Note: symlinks *can* be hardlinked, but it is unclear if
# symlinks can be relinked in-place (preserving inode).
# Therefore, we do *not* de-dupe hardlinked symlinks.
self.symlinks.append(rel_path)
def before_visit_dir(self, root, rel_path, depth):
return os.path.basename(rel_path) not in (".spack", "man")
def before_visit_symlinked_dir(self, root, rel_path, depth):
# Treat symlinked directories simply as symlinks.
self.visit_symlinked_file(root, rel_path, depth)
# Never recurse into symlinked directories.
return False
def file_matches(path, regex):
with open(path, "rb") as f:
contents = f.read()
return bool(regex.search(contents))
def get_buildfile_manifest(spec): def get_buildfile_manifest(spec):
""" """
Return a data structure with information about a build, including Return a data structure with information about a build, including
@ -634,57 +721,61 @@ def get_buildfile_manifest(spec):
"link_to_relocate": [], "link_to_relocate": [],
"other": [], "other": [],
"binary_to_relocate_fullpath": [], "binary_to_relocate_fullpath": [],
"hardlinks_deduped": True,
} }
exclude_list = (".spack", "man") # Guard against filesystem footguns of hardlinks and symlinks by using
# a visitor to retrieve a list of files and symlinks, so we don't have
# to worry about hardlinks of symlinked dirs and what not.
visitor = BuildManifestVisitor()
root = spec.prefix
visit_directory_tree(root, visitor)
# Do this at during tarball creation to save time when tarball unpacked. # Collect a list of prefixes for this package and it's dependencies, Spack will
# Used by make_package_relative to determine binaries to change. # look for them to decide if text file needs to be relocated or not
for root, dirs, files in os.walk(spec.prefix, topdown=True): prefixes = [d.prefix for d in spec.traverse(root=True, deptype="all") if not d.external]
dirs[:] = [d for d in dirs if d not in exclude_list] prefixes.append(spack.hooks.sbang.sbang_install_path())
prefixes.append(str(spack.store.layout.root))
# Directories may need to be relocated too. # Create a giant regex that matches all prefixes
for directory in dirs: regex = utf8_paths_to_single_binary_regex(prefixes)
dir_path_name = os.path.join(root, directory)
rel_path_name = os.path.relpath(dir_path_name, spec.prefix)
if os.path.islink(dir_path_name):
link = os.readlink(dir_path_name)
if os.path.isabs(link) and link.startswith(spack.store.layout.root):
data["link_to_relocate"].append(rel_path_name)
for filename in files: # Symlinks.
path_name = os.path.join(root, filename)
m_type, m_subtype = fsys.mime_type(path_name)
rel_path_name = os.path.relpath(path_name, spec.prefix)
added = False
if os.path.islink(path_name): # Obvious bugs:
link = os.readlink(path_name) # 1. relative links are not relocated.
if os.path.isabs(link): # 2. paths are used as strings.
# Relocate absolute links into the spack tree for rel_path in visitor.symlinks:
if link.startswith(spack.store.layout.root): abs_path = os.path.join(root, rel_path)
data["link_to_relocate"].append(rel_path_name) link = os.readlink(abs_path)
added = True if os.path.isabs(link) and link.startswith(spack.store.layout.root):
data["link_to_relocate"].append(rel_path)
if relocate.needs_binary_relocation(m_type, m_subtype): # Non-symlinks.
if ( for rel_path in visitor.files:
( abs_path = os.path.join(root, rel_path)
m_subtype in ("x-executable", "x-sharedlib", "x-pie-executable") m_type, m_subtype = fsys.mime_type(abs_path)
and sys.platform != "darwin"
)
or (m_subtype in ("x-mach-binary") and sys.platform == "darwin")
or (not filename.endswith(".o"))
):
data["binary_to_relocate"].append(rel_path_name)
data["binary_to_relocate_fullpath"].append(path_name)
added = True
if relocate.needs_text_relocation(m_type, m_subtype): if relocate.needs_binary_relocation(m_type, m_subtype):
data["text_to_relocate"].append(rel_path_name) # Why is this branch not part of needs_binary_relocation? :(
added = True if (
(
m_subtype in ("x-executable", "x-sharedlib", "x-pie-executable")
and sys.platform != "darwin"
)
or (m_subtype in ("x-mach-binary") and sys.platform == "darwin")
or (not rel_path.endswith(".o"))
):
data["binary_to_relocate"].append(rel_path)
data["binary_to_relocate_fullpath"].append(abs_path)
continue
elif relocate.needs_text_relocation(m_type, m_subtype) and file_matches(abs_path, regex):
data["text_to_relocate"].append(rel_path)
continue
data["other"].append(abs_path)
if not added:
data["other"].append(path_name)
return data return data
@ -698,7 +789,7 @@ def write_buildinfo_file(spec, workdir, rel=False):
prefix_to_hash = dict() prefix_to_hash = dict()
prefix_to_hash[str(spec.package.prefix)] = spec.dag_hash() prefix_to_hash[str(spec.package.prefix)] = spec.dag_hash()
deps = spack.build_environment.get_rpath_deps(spec.package) deps = spack.build_environment.get_rpath_deps(spec.package)
for d in deps: for d in deps + spec.dependencies(deptype="run"):
prefix_to_hash[str(d.prefix)] = d.dag_hash() prefix_to_hash[str(d.prefix)] = d.dag_hash()
# Create buildinfo data and write it to disk # Create buildinfo data and write it to disk
@ -711,6 +802,7 @@ def write_buildinfo_file(spec, workdir, rel=False):
buildinfo["relocate_textfiles"] = manifest["text_to_relocate"] buildinfo["relocate_textfiles"] = manifest["text_to_relocate"]
buildinfo["relocate_binaries"] = manifest["binary_to_relocate"] buildinfo["relocate_binaries"] = manifest["binary_to_relocate"]
buildinfo["relocate_links"] = manifest["link_to_relocate"] buildinfo["relocate_links"] = manifest["link_to_relocate"]
buildinfo["hardlinks_deduped"] = manifest["hardlinks_deduped"]
buildinfo["prefix_to_hash"] = prefix_to_hash buildinfo["prefix_to_hash"] = prefix_to_hash
filename = buildinfo_file_name(workdir) filename = buildinfo_file_name(workdir)
with open(filename, "w") as outfile: with open(filename, "w") as outfile:
@ -795,37 +887,52 @@ def sign_specfile(key, force, specfile_path):
spack.util.gpg.sign(key, specfile_path, signed_specfile_path, clearsign=True) spack.util.gpg.sign(key, specfile_path, signed_specfile_path, clearsign=True)
def _fetch_spec_from_mirror(spec_url): def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_dir, concurrency):
s = None """Read all the specs listed in the provided list, using thread given thread parallelism,
tty.debug("fetching {0}".format(spec_url)) generate the index, and push it to the mirror.
_, _, spec_file = web_util.read_from_url(spec_url)
spec_file_contents = codecs.getreader("utf-8")(spec_file).read()
# Need full spec.json name or this gets confused with index.json.
if spec_url.endswith(".json.sig"):
specfile_json = Spec.extract_json_from_clearsig(spec_file_contents)
s = Spec.from_dict(specfile_json)
elif spec_url.endswith(".json"):
s = Spec.from_json(spec_file_contents)
elif spec_url.endswith(".yaml"):
s = Spec.from_yaml(spec_file_contents)
return s
Args:
file_list (list(str)): List of urls or file paths pointing at spec files to read
read_method: A function taking a single argument, either a url or a file path,
and which reads the spec file at that location, and returns the spec.
cache_prefix (str): prefix of the build cache on s3 where index should be pushed.
db: A spack database used for adding specs and then writing the index.
temp_dir (str): Location to write index.json and hash for pushing
concurrency (int): Number of parallel processes to use when fetching
def _read_specs_and_push_index(file_list, cache_prefix, db, db_root_dir): Return:
for file_path in file_list: None
try: """
s = _fetch_spec_from_mirror(url_util.join(cache_prefix, file_path))
except (URLError, web_util.SpackWebError) as url_err:
tty.error("Error reading specfile: {0}".format(file_path))
tty.error(url_err)
if s: def _fetch_spec_from_mirror(spec_url):
db.add(s, None) spec_file_contents = read_method(spec_url)
db.mark(s, "in_buildcache", True)
if spec_file_contents:
# Need full spec.json name or this gets confused with index.json.
if spec_url.endswith(".json.sig"):
specfile_json = Spec.extract_json_from_clearsig(spec_file_contents)
return Spec.from_dict(specfile_json)
if spec_url.endswith(".json"):
return Spec.from_json(spec_file_contents)
if spec_url.endswith(".yaml"):
return Spec.from_yaml(spec_file_contents)
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
try:
fetched_specs = tp.map(
llnl.util.lang.star(_fetch_spec_from_mirror), [(f,) for f in file_list]
)
finally:
tp.terminate()
tp.join()
for fetched_spec in fetched_specs:
db.add(fetched_spec, None)
db.mark(fetched_spec, "in_buildcache", True)
# Now generate the index, compute its hash, and push the two files to # Now generate the index, compute its hash, and push the two files to
# the mirror. # the mirror.
index_json_path = os.path.join(db_root_dir, "index.json") index_json_path = os.path.join(temp_dir, "index.json")
with open(index_json_path, "w") as f: with open(index_json_path, "w") as f:
db._write_to_file(f) db._write_to_file(f)
@ -835,7 +942,7 @@ def _read_specs_and_push_index(file_list, cache_prefix, db, db_root_dir):
index_hash = compute_hash(index_string) index_hash = compute_hash(index_string)
# Write the hash out to a local file # Write the hash out to a local file
index_hash_path = os.path.join(db_root_dir, "index.json.hash") index_hash_path = os.path.join(temp_dir, "index.json.hash")
with open(index_hash_path, "w") as f: with open(index_hash_path, "w") as f:
f.write(index_hash) f.write(index_hash)
@ -856,33 +963,152 @@ def _read_specs_and_push_index(file_list, cache_prefix, db, db_root_dir):
) )
def generate_package_index(cache_prefix): def _specs_from_cache_aws_cli(cache_prefix):
"""Create the build cache index page. """Use aws cli to sync all the specs into a local temporary directory.
Creates (or replaces) the "index.json" page at the location given in Args:
cache_prefix. This page contains a link for each binary package (.yaml or cache_prefix (str): prefix of the build cache on s3
.json) under cache_prefix.
Return:
List of the local file paths and a function that can read each one from the file system.
""" """
read_fn = None
file_list = None
aws = which("aws")
def file_read_method(file_path):
with open(file_path) as fd:
return fd.read()
tmpspecsdir = tempfile.mkdtemp()
sync_command_args = [
"s3",
"sync",
"--exclude",
"*",
"--include",
"*.spec.json.sig",
"--include",
"*.spec.json",
"--include",
"*.spec.yaml",
cache_prefix,
tmpspecsdir,
]
try: try:
file_list = ( tty.debug(
entry "Using aws s3 sync to download specs from {0} to {1}".format(cache_prefix, tmpspecsdir)
)
aws(*sync_command_args, output=os.devnull, error=os.devnull)
file_list = fsys.find(tmpspecsdir, ["*.spec.json.sig", "*.spec.json", "*.spec.yaml"])
read_fn = file_read_method
except Exception:
tty.warn("Failed to use aws s3 sync to retrieve specs, falling back to parallel fetch")
shutil.rmtree(tmpspecsdir)
return file_list, read_fn
def _specs_from_cache_fallback(cache_prefix):
"""Use spack.util.web module to get a list of all the specs at the remote url.
Args:
cache_prefix (str): Base url of mirror (location of spec files)
Return:
The list of complete spec file urls and a function that can read each one from its
remote location (also using the spack.util.web module).
"""
read_fn = None
file_list = None
def url_read_method(url):
contents = None
try:
_, _, spec_file = web_util.read_from_url(url)
contents = codecs.getreader("utf-8")(spec_file).read()
except (URLError, web_util.SpackWebError) as url_err:
tty.error("Error reading specfile: {0}".format(url))
tty.error(url_err)
return contents
try:
file_list = [
url_util.join(cache_prefix, entry)
for entry in web_util.list_url(cache_prefix) for entry in web_util.list_url(cache_prefix)
if entry.endswith(".yaml") if entry.endswith(".yaml")
or entry.endswith("spec.json") or entry.endswith("spec.json")
or entry.endswith("spec.json.sig") or entry.endswith("spec.json.sig")
) ]
read_fn = url_read_method
except KeyError as inst: except KeyError as inst:
msg = "No packages at {0}: {1}".format(cache_prefix, inst) msg = "No packages at {0}: {1}".format(cache_prefix, inst)
tty.warn(msg) tty.warn(msg)
return
except Exception as err: except Exception as err:
# If we got some kind of S3 (access denied or other connection # If we got some kind of S3 (access denied or other connection
# error), the first non boto-specific class in the exception # error), the first non boto-specific class in the exception
# hierarchy is Exception. Just print a warning and return # hierarchy is Exception. Just print a warning and return
msg = "Encountered problem listing packages at {0}: {1}".format(cache_prefix, err) msg = "Encountered problem listing packages at {0}: {1}".format(cache_prefix, err)
tty.warn(msg) tty.warn(msg)
return file_list, read_fn
def _spec_files_from_cache(cache_prefix):
"""Get a list of all the spec files in the mirror and a function to
read them.
Args:
cache_prefix (str): Base url of mirror (location of spec files)
Return:
A tuple where the first item is a list of absolute file paths or
urls pointing to the specs that should be read from the mirror,
and the second item is a function taking a url or file path and
returning the spec read from that location.
"""
callbacks = []
if cache_prefix.startswith("s3"):
callbacks.append(_specs_from_cache_aws_cli)
callbacks.append(_specs_from_cache_fallback)
for specs_from_cache_fn in callbacks:
file_list, read_fn = specs_from_cache_fn(cache_prefix)
if file_list:
return file_list, read_fn
raise ListMirrorSpecsError("Failed to get list of specs from {0}".format(cache_prefix))
def generate_package_index(cache_prefix, concurrency=32):
"""Create or replace the build cache index on the given mirror. The
buildcache index contains an entry for each binary package under the
cache_prefix.
Args:
cache_prefix(str): Base url of binary mirror.
concurrency: (int): The desired threading concurrency to use when
fetching the spec files from the mirror.
Return:
None
"""
try:
file_list, read_fn = _spec_files_from_cache(cache_prefix)
except ListMirrorSpecsError as err:
tty.error("Unabled to generate package index, {0}".format(err))
return return
if any(x.endswith(".yaml") for x in file_list):
msg = (
"The mirror in '{}' contains specs in the deprecated YAML format.\n\n\tSupport for "
"this format will be removed in v0.20, please regenerate the build cache with a "
"recent Spack\n"
).format(cache_prefix)
warnings.warn(msg)
tty.debug("Retrieving spec descriptor files from {0} to build index".format(cache_prefix)) tty.debug("Retrieving spec descriptor files from {0} to build index".format(cache_prefix))
tmpdir = tempfile.mkdtemp() tmpdir = tempfile.mkdtemp()
@ -895,7 +1121,7 @@ def generate_package_index(cache_prefix):
) )
try: try:
_read_specs_and_push_index(file_list, cache_prefix, db, db_root_dir) _read_specs_and_push_index(file_list, read_fn, cache_prefix, db, db_root_dir, concurrency)
except Exception as err: except Exception as err:
msg = "Encountered problem pushing package index to {0}: {1}".format(cache_prefix, err) msg = "Encountered problem pushing package index to {0}: {1}".format(cache_prefix, err)
tty.warn(msg) tty.warn(msg)
@ -1071,7 +1297,11 @@ def _build_tarball(
tty.die(e) tty.die(e)
# create gzip compressed tarball of the install prefix # create gzip compressed tarball of the install prefix
with closing(tarfile.open(tarfile_path, "w:gz")) as tar: # On AMD Ryzen 3700X and an SSD disk, we have the following on compression speed:
# compresslevel=6 gzip default: llvm takes 4mins, roughly 2.1GB
# compresslevel=9 python default: llvm takes 12mins, roughly 2.1GB
# So we follow gzip.
with closing(tarfile.open(tarfile_path, "w:gz", compresslevel=6)) as tar:
tar.add(name="%s" % workdir, arcname="%s" % os.path.basename(spec.prefix)) tar.add(name="%s" % workdir, arcname="%s" % os.path.basename(spec.prefix))
# remove copy of install directory # remove copy of install directory
shutil.rmtree(workdir) shutil.rmtree(workdir)
@ -1346,6 +1576,13 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
# the remaining mirrors, looking for one we can use. # the remaining mirrors, looking for one we can use.
tarball_stage = try_fetch(spackfile_url) tarball_stage = try_fetch(spackfile_url)
if tarball_stage: if tarball_stage:
if ext == "yaml":
msg = (
"Reading {} from mirror.\n\n\tThe YAML format for buildcaches is "
"deprecated and will be removed in v0.20\n"
).format(spackfile_url)
warnings.warn(msg)
return { return {
"tarball_stage": tarball_stage, "tarball_stage": tarball_stage,
"specfile_stage": local_specfile_stage, "specfile_stage": local_specfile_stage,
@ -1418,6 +1655,38 @@ def check_package_relocatable(workdir, spec, allow_root):
relocate.raise_if_not_relocatable(cur_path_names, allow_root) relocate.raise_if_not_relocatable(cur_path_names, allow_root)
def dedupe_hardlinks_if_necessary(root, buildinfo):
"""Updates a buildinfo dict for old archives that did
not dedupe hardlinks. De-duping hardlinks is necessary
when relocating files in parallel and in-place. This
means we must preserve inodes when relocating."""
# New archives don't need this.
if buildinfo.get("hardlinks_deduped", False):
return
# Clearly we can assume that an inode is either in the
# textfile or binary group, but let's just stick to
# a single set of visited nodes.
visited = set()
# Note: we do *not* dedupe hardlinked symlinks, since
# it seems difficult or even impossible to relink
# symlinks while preserving inode.
for key in ("relocate_textfiles", "relocate_binaries"):
if key not in buildinfo:
continue
new_list = []
for rel_path in buildinfo[key]:
stat_result = os.lstat(os.path.join(root, rel_path))
identifier = (stat_result.st_dev, stat_result.st_ino)
if identifier in visited:
continue
visited.add(identifier)
new_list.append(rel_path)
buildinfo[key] = new_list
def relocate_package(spec, allow_root): def relocate_package(spec, allow_root):
""" """
Relocate the given package Relocate the given package
@ -1451,7 +1720,7 @@ def relocate_package(spec, allow_root):
hash_to_prefix = dict() hash_to_prefix = dict()
hash_to_prefix[spec.format("{hash}")] = str(spec.package.prefix) hash_to_prefix[spec.format("{hash}")] = str(spec.package.prefix)
new_deps = spack.build_environment.get_rpath_deps(spec.package) new_deps = spack.build_environment.get_rpath_deps(spec.package)
for d in new_deps: for d in new_deps + spec.dependencies(deptype="run"):
hash_to_prefix[d.format("{hash}")] = str(d.prefix) hash_to_prefix[d.format("{hash}")] = str(d.prefix)
# Spurious replacements (e.g. sbang) will cause issues with binaries # Spurious replacements (e.g. sbang) will cause issues with binaries
# For example, the new sbang can be longer than the old one. # For example, the new sbang can be longer than the old one.
@ -1463,13 +1732,19 @@ def relocate_package(spec, allow_root):
install_path = spack.hooks.sbang.sbang_install_path() install_path = spack.hooks.sbang.sbang_install_path()
prefix_to_prefix_text[old_sbang_install_path] = install_path prefix_to_prefix_text[old_sbang_install_path] = install_path
# First match specific prefix paths. Possibly the *local* install prefix
# of some dependency is in an upstream, so we cannot assume the original
# spack store root can be mapped uniformly to the new spack store root.
for orig_prefix, hash in prefix_to_hash.items():
prefix_to_prefix_text[orig_prefix] = hash_to_prefix.get(hash, None)
prefix_to_prefix_bin[orig_prefix] = hash_to_prefix.get(hash, None)
# Only then add the generic fallback of install prefix -> install prefix.
prefix_to_prefix_text[old_prefix] = new_prefix prefix_to_prefix_text[old_prefix] = new_prefix
prefix_to_prefix_bin[old_prefix] = new_prefix prefix_to_prefix_bin[old_prefix] = new_prefix
prefix_to_prefix_text[old_layout_root] = new_layout_root prefix_to_prefix_text[old_layout_root] = new_layout_root
prefix_to_prefix_bin[old_layout_root] = new_layout_root prefix_to_prefix_bin[old_layout_root] = new_layout_root
for orig_prefix, hash in prefix_to_hash.items():
prefix_to_prefix_text[orig_prefix] = hash_to_prefix.get(hash, None)
prefix_to_prefix_bin[orig_prefix] = hash_to_prefix.get(hash, None)
# This is vestigial code for the *old* location of sbang. Previously, # This is vestigial code for the *old* location of sbang. Previously,
# sbang was a bash script, and it lived in the spack prefix. It is # sbang was a bash script, and it lived in the spack prefix. It is
# now a POSIX script that lives in the install prefix. Old packages # now a POSIX script that lives in the install prefix. Old packages
@ -1480,6 +1755,9 @@ def relocate_package(spec, allow_root):
tty.debug("Relocating package from", "%s to %s." % (old_layout_root, new_layout_root)) tty.debug("Relocating package from", "%s to %s." % (old_layout_root, new_layout_root))
# Old archives maybe have hardlinks repeated.
dedupe_hardlinks_if_necessary(workdir, buildinfo)
def is_backup_file(file): def is_backup_file(file):
return file.endswith("~") return file.endswith("~")
@ -1509,7 +1787,11 @@ def is_backup_file(file):
old_prefix, old_prefix,
new_prefix, new_prefix,
) )
if "elf" in platform.binary_formats: elif "elf" in platform.binary_formats and not rel:
# The new ELF dynamic section relocation logic only handles absolute to
# absolute relocation.
relocate.new_relocate_elf_binaries(files_to_relocate, prefix_to_prefix_bin)
elif "elf" in platform.binary_formats and rel:
relocate.relocate_elf_binaries( relocate.relocate_elf_binaries(
files_to_relocate, files_to_relocate,
old_layout_root, old_layout_root,
@ -1519,35 +1801,23 @@ def is_backup_file(file):
old_prefix, old_prefix,
new_prefix, new_prefix,
) )
# Relocate links to the new install prefix
links = [link for link in buildinfo.get("relocate_links", [])] # Relocate links to the new install prefix
relocate.relocate_links(links, old_layout_root, old_prefix, new_prefix) links = [os.path.join(workdir, f) for f in buildinfo.get("relocate_links", [])]
relocate.relocate_links(links, prefix_to_prefix_bin)
# For all buildcaches # For all buildcaches
# relocate the install prefixes in text files including dependencies # relocate the install prefixes in text files including dependencies
relocate.relocate_text(text_names, prefix_to_prefix_text) relocate.unsafe_relocate_text(text_names, prefix_to_prefix_text)
paths_to_relocate = [old_prefix, old_layout_root]
paths_to_relocate.extend(prefix_to_hash.keys())
files_to_relocate = list(
filter(
lambda pathname: not relocate.file_is_relocatable(
pathname, paths_to_relocate=paths_to_relocate
),
map(
lambda filename: os.path.join(workdir, filename),
buildinfo["relocate_binaries"],
),
)
)
# relocate the install prefixes in binary files including dependencies # relocate the install prefixes in binary files including dependencies
relocate.relocate_text_bin(files_to_relocate, prefix_to_prefix_bin) relocate.unsafe_relocate_text_bin(files_to_relocate, prefix_to_prefix_bin)
# If we are installing back to the same location # If we are installing back to the same location
# relocate the sbang location if the spack directory changed # relocate the sbang location if the spack directory changed
else: else:
if old_spack_prefix != new_spack_prefix: if old_spack_prefix != new_spack_prefix:
relocate.relocate_text(text_names, prefix_to_prefix_text) relocate.unsafe_relocate_text(text_names, prefix_to_prefix_text)
def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum): def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum):
@ -1878,8 +2148,8 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
results = binary_index.find_built_spec(spec, mirrors_to_check=mirrors_to_check) results = binary_index.find_built_spec(spec, mirrors_to_check=mirrors_to_check)
# Maybe we just didn't have the latest information from the mirror, so # The index may be out-of-date. If we aren't only considering indices, try
# try to fetch directly, unless we are only considering the indices. # to fetch directly since we know where the file should be.
if not results and not index_only: if not results and not index_only:
results = try_direct_fetch(spec, mirrors=mirrors_to_check) results = try_direct_fetch(spec, mirrors=mirrors_to_check)
# We found a spec by the direct fetch approach, we might as well # We found a spec by the direct fetch approach, we might as well

View File

@ -91,6 +91,14 @@ def _try_import_from_store(module, query_spec, query_info=None):
os.path.join(candidate_spec.prefix, pkg.platlib), os.path.join(candidate_spec.prefix, pkg.platlib),
] # type: list[str] ] # type: list[str]
path_before = list(sys.path) path_before = list(sys.path)
# Python 3.8+ on Windows does not search dependent DLLs in PATH,
# so we need to manually add it using os.add_dll_directory
# https://docs.python.org/3/whatsnew/3.8.html#bpo-36085-whatsnew
if sys.version_info[:2] >= (3, 8) and sys.platform == "win32":
if os.path.isdir(candidate_spec.prefix.bin):
os.add_dll_directory(candidate_spec.prefix.bin) # novermin
# NOTE: try module_paths first and last, last allows an existing version in path # NOTE: try module_paths first and last, last allows an existing version in path
# to be picked up and used, possibly depending on something in the store, first # to be picked up and used, possibly depending on something in the store, first
# allows the bootstrap version to work when an incompatible version is in # allows the bootstrap version to work when an incompatible version is in
@ -667,6 +675,11 @@ def _add_externals_if_missing():
_REF_COUNT = 0 _REF_COUNT = 0
def is_bootstrapping():
global _REF_COUNT
return _REF_COUNT > 0
@contextlib.contextmanager @contextlib.contextmanager
def ensure_bootstrap_configuration(): def ensure_bootstrap_configuration():
# The context manager is reference counted to ensure we don't swap multiple # The context manager is reference counted to ensure we don't swap multiple

View File

@ -52,6 +52,7 @@
import spack.build_systems.cmake import spack.build_systems.cmake
import spack.build_systems.meson import spack.build_systems.meson
import spack.builder
import spack.config import spack.config
import spack.install_test import spack.install_test
import spack.main import spack.main
@ -120,18 +121,18 @@
stat_suffix = "lib" if sys.platform == "win32" else "a" stat_suffix = "lib" if sys.platform == "win32" else "a"
def should_set_parallel_jobs(jobserver_support=False): def jobserver_enabled():
"""Returns true in general, except when: """Returns true if a posix jobserver (make) is detected."""
- The env variable SPACK_NO_PARALLEL_MAKE=1 is set return "MAKEFLAGS" in os.environ and "--jobserver" in os.environ["MAKEFLAGS"]
- jobserver_support is enabled, and a jobserver was found.
"""
if ( def get_effective_jobs(jobs, parallel=True, supports_jobserver=False):
jobserver_support """Return the number of jobs, or None if supports_jobserver and a jobserver is detected."""
and "MAKEFLAGS" in os.environ if not parallel or jobs <= 1 or env_flag(SPACK_NO_PARALLEL_MAKE):
and "--jobserver" in os.environ["MAKEFLAGS"] return 1
): if supports_jobserver and jobserver_enabled():
return False return None
return not env_flag(SPACK_NO_PARALLEL_MAKE) return jobs
class MakeExecutable(Executable): class MakeExecutable(Executable):
@ -146,26 +147,33 @@ class MakeExecutable(Executable):
""" """
def __init__(self, name, jobs, **kwargs): def __init__(self, name, jobs, **kwargs):
supports_jobserver = kwargs.pop("supports_jobserver", True)
super(MakeExecutable, self).__init__(name, **kwargs) super(MakeExecutable, self).__init__(name, **kwargs)
self.supports_jobserver = supports_jobserver
self.jobs = jobs self.jobs = jobs
def __call__(self, *args, **kwargs): def __call__(self, *args, **kwargs):
"""parallel, and jobs_env from kwargs are swallowed and used here; """parallel, and jobs_env from kwargs are swallowed and used here;
remaining arguments are passed through to the superclass. remaining arguments are passed through to the superclass.
""" """
# TODO: figure out how to check if we are using a jobserver-supporting ninja, parallel = kwargs.pop("parallel", True)
# the two split ninja packages make this very difficult right now jobs_env = kwargs.pop("jobs_env", None)
parallel = should_set_parallel_jobs(jobserver_support=True) and kwargs.pop( jobs_env_supports_jobserver = kwargs.pop("jobs_env_supports_jobserver", False)
"parallel", self.jobs > 1
)
if parallel: jobs = get_effective_jobs(
args = ("-j{0}".format(self.jobs),) + args self.jobs, parallel=parallel, supports_jobserver=self.supports_jobserver
jobs_env = kwargs.pop("jobs_env", None) )
if jobs_env: if jobs is not None:
# Caller wants us to set an environment variable to args = ("-j{0}".format(jobs),) + args
# control the parallelism.
kwargs["extra_env"] = {jobs_env: str(self.jobs)} if jobs_env:
# Caller wants us to set an environment variable to
# control the parallelism.
jobs_env_jobs = get_effective_jobs(
self.jobs, parallel=parallel, supports_jobserver=jobs_env_supports_jobserver
)
if jobs_env_jobs is not None:
kwargs["extra_env"] = {jobs_env: str(jobs_env_jobs)}
return super(MakeExecutable, self).__call__(*args, **kwargs) return super(MakeExecutable, self).__call__(*args, **kwargs)
@ -201,6 +209,8 @@ def clean_environment():
env.unset("CMAKE_PREFIX_PATH") env.unset("CMAKE_PREFIX_PATH")
env.unset("PYTHONPATH") env.unset("PYTHONPATH")
env.unset("R_HOME")
env.unset("R_ENVIRON")
# Affects GNU make, can e.g. indirectly inhibit enabling parallel build # Affects GNU make, can e.g. indirectly inhibit enabling parallel build
# env.unset('MAKEFLAGS') # env.unset('MAKEFLAGS')
@ -314,7 +324,7 @@ def set_compiler_environment_variables(pkg, env):
env.set("SPACK_LINKER_ARG", compiler.linker_arg) env.set("SPACK_LINKER_ARG", compiler.linker_arg)
# Check whether we want to force RPATH or RUNPATH # Check whether we want to force RPATH or RUNPATH
if spack.config.get("config:shared_linking") == "rpath": if spack.config.get("config:shared_linking:type") == "rpath":
env.set("SPACK_DTAGS_TO_STRIP", compiler.enable_new_dtags) env.set("SPACK_DTAGS_TO_STRIP", compiler.enable_new_dtags)
env.set("SPACK_DTAGS_TO_ADD", compiler.disable_new_dtags) env.set("SPACK_DTAGS_TO_ADD", compiler.disable_new_dtags)
else: else:
@ -322,7 +332,11 @@ def set_compiler_environment_variables(pkg, env):
env.set("SPACK_DTAGS_TO_ADD", compiler.enable_new_dtags) env.set("SPACK_DTAGS_TO_ADD", compiler.enable_new_dtags)
# Set the target parameters that the compiler will add # Set the target parameters that the compiler will add
isa_arg = spec.architecture.target.optimization_flags(compiler) # Don't set on cray platform because the targeting module handles this
if spec.satisfies("platform=cray"):
isa_arg = ""
else:
isa_arg = spec.architecture.target.optimization_flags(compiler)
env.set("SPACK_TARGET_ARGS", isa_arg) env.set("SPACK_TARGET_ARGS", isa_arg)
# Trap spack-tracked compiler flags as appropriate. # Trap spack-tracked compiler flags as appropriate.
@ -343,7 +357,7 @@ def set_compiler_environment_variables(pkg, env):
handler = pkg.flag_handler.__func__ handler = pkg.flag_handler.__func__
else: else:
handler = pkg.flag_handler.im_func handler = pkg.flag_handler.im_func
injf, envf, bsf = handler(pkg, flag, spec.compiler_flags[flag]) injf, envf, bsf = handler(pkg, flag, spec.compiler_flags[flag][:])
inject_flags[flag] = injf or [] inject_flags[flag] = injf or []
env_flags[flag] = envf or [] env_flags[flag] = envf or []
build_system_flags[flag] = bsf or [] build_system_flags[flag] = bsf or []
@ -544,7 +558,7 @@ def _set_variables_for_single_module(pkg, module):
# TODO: make these build deps that can be installed if not found. # TODO: make these build deps that can be installed if not found.
m.make = MakeExecutable("make", jobs) m.make = MakeExecutable("make", jobs)
m.gmake = MakeExecutable("gmake", jobs) m.gmake = MakeExecutable("gmake", jobs)
m.ninja = MakeExecutable("ninja", jobs) m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
# easy shortcut to os.environ # easy shortcut to os.environ
m.env = os.environ m.env = os.environ
@ -556,9 +570,9 @@ def _set_variables_for_single_module(pkg, module):
if sys.platform == "win32": if sys.platform == "win32":
m.nmake = Executable("nmake") m.nmake = Executable("nmake")
# Standard CMake arguments # Standard CMake arguments
m.std_cmake_args = spack.build_systems.cmake.CMakePackage._std_args(pkg) m.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
m.std_meson_args = spack.build_systems.meson.MesonPackage._std_args(pkg) m.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
m.std_pip_args = spack.build_systems.python.PythonPackage._std_args(pkg) m.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
# Put spack compiler paths in module scope. # Put spack compiler paths in module scope.
link_dir = spack.paths.build_env_path link_dir = spack.paths.build_env_path
@ -725,38 +739,6 @@ def get_rpaths(pkg):
return list(dedupe(filter_system_paths(rpaths))) return list(dedupe(filter_system_paths(rpaths)))
def get_std_cmake_args(pkg):
"""List of standard arguments used if a package is a CMakePackage.
Returns:
list: standard arguments that would be used if this
package were a CMakePackage instance.
Args:
pkg (spack.package_base.PackageBase): package under consideration
Returns:
list: arguments for cmake
"""
return spack.build_systems.cmake.CMakePackage._std_args(pkg)
def get_std_meson_args(pkg):
"""List of standard arguments used if a package is a MesonPackage.
Returns:
list: standard arguments that would be used if this
package were a MesonPackage instance.
Args:
pkg (spack.package_base.PackageBase): package under consideration
Returns:
list: arguments for meson
"""
return spack.build_systems.meson.MesonPackage._std_args(pkg)
def parent_class_modules(cls): def parent_class_modules(cls):
""" """
Get list of superclass modules that descend from spack.package_base.PackageBase Get list of superclass modules that descend from spack.package_base.PackageBase
@ -817,7 +799,8 @@ def setup_package(pkg, dirty, context="build"):
platform.setup_platform_environment(pkg, env_mods) platform.setup_platform_environment(pkg, env_mods)
if context == "build": if context == "build":
pkg.setup_build_environment(env_mods) builder = spack.builder.create(pkg)
builder.setup_build_environment(env_mods)
if (not dirty) and (not env_mods.is_unset("CPATH")): if (not dirty) and (not env_mods.is_unset("CPATH")):
tty.debug( tty.debug(
@ -1013,7 +996,8 @@ def add_modifications_for_dep(dep):
module.__dict__.update(changes.__dict__) module.__dict__.update(changes.__dict__)
if context == "build": if context == "build":
dpkg.setup_dependent_build_environment(env, spec) builder = spack.builder.create(dpkg)
builder.setup_dependent_build_environment(env, spec)
else: else:
dpkg.setup_dependent_run_environment(env, spec) dpkg.setup_dependent_run_environment(env, spec)
@ -1115,8 +1099,20 @@ def _setup_pkg_and_run(
pkg.test_suite.stage, spack.install_test.TestSuite.test_log_name(pkg.spec) pkg.test_suite.stage, spack.install_test.TestSuite.test_log_name(pkg.spec)
) )
error_msg = str(exc)
if isinstance(exc, (spack.multimethod.NoSuchMethodError, AttributeError)):
error_msg = (
"The '{}' package cannot find an attribute while trying to build "
"from sources. This might be due to a change in Spack's package format "
"to support multiple build-systems for a single package. You can fix this "
"by updating the build recipe, and you can also report the issue as a bug. "
"More information at https://spack.readthedocs.io/en/latest/packaging_guide.html#installation-procedure"
).format(pkg.name)
error_msg = colorize("@*R{{{}}}".format(error_msg))
error_msg = "{}\n\n{}".format(str(exc), error_msg)
# make a pickleable exception to send to parent. # make a pickleable exception to send to parent.
msg = "%s: %s" % (exc_type.__name__, str(exc)) msg = "%s: %s" % (exc_type.__name__, error_msg)
ce = ChildError( ce = ChildError(
msg, msg,

View File

@ -0,0 +1,124 @@
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import six
import llnl.util.lang
import spack.builder
import spack.installer
import spack.relocate
import spack.store
def sanity_check_prefix(builder):
"""Check that specific directories and files are created after installation.
The files to be checked are in the ``sanity_check_is_file`` attribute of the
package object, while the directories are in the ``sanity_check_is_dir``.
Args:
builder (spack.builder.Builder): builder that installed the package
"""
pkg = builder.pkg
def check_paths(path_list, filetype, predicate):
if isinstance(path_list, six.string_types):
path_list = [path_list]
for path in path_list:
abs_path = os.path.join(pkg.prefix, path)
if not predicate(abs_path):
msg = "Install failed for {0}. No such {1} in prefix: {2}"
msg = msg.format(pkg.name, filetype, path)
raise spack.installer.InstallError(msg)
check_paths(pkg.sanity_check_is_file, "file", os.path.isfile)
check_paths(pkg.sanity_check_is_dir, "directory", os.path.isdir)
ignore_file = llnl.util.lang.match_predicate(spack.store.layout.hidden_file_regexes)
if all(map(ignore_file, os.listdir(pkg.prefix))):
msg = "Install failed for {0}. Nothing was installed!"
raise spack.installer.InstallError(msg.format(pkg.name))
def apply_macos_rpath_fixups(builder):
"""On Darwin, make installed libraries more easily relocatable.
Some build systems (handrolled, autotools, makefiles) can set their own
rpaths that are duplicated by spack's compiler wrapper. This fixup
interrogates, and postprocesses if necessary, all libraries installed
by the code.
It should be added as a @run_after to packaging systems (or individual
packages) that do not install relocatable libraries by default.
Args:
builder (spack.builder.Builder): builder that installed the package
"""
spack.relocate.fixup_macos_rpaths(builder.spec)
def ensure_build_dependencies_or_raise(spec, dependencies, error_msg):
"""Ensure that some build dependencies are present in the concrete spec.
If not, raise a RuntimeError with a helpful error message.
Args:
spec (spack.spec.Spec): concrete spec to be checked.
dependencies (list of spack.spec.Spec): list of abstract specs to be satisfied
error_msg (str): brief error message to be prepended to a longer description
Raises:
RuntimeError: when the required build dependencies are not found
"""
assert spec.concrete, "Can ensure build dependencies only on concrete specs"
build_deps = [d.name for d in spec.dependencies(deptype="build")]
missing_deps = [x for x in dependencies if x not in build_deps]
if not missing_deps:
return
# Raise an exception on missing deps.
msg = (
"{0}: missing dependencies: {1}.\n\nPlease add "
"the following lines to the package:\n\n".format(error_msg, ", ".join(missing_deps))
)
for dep in missing_deps:
msg += " depends_on('{0}', type='build', when='@{1} {2}')\n".format(
dep, spec.version, "build_system=autotools"
)
msg += "\nUpdate the version (when='@{0}') as needed.".format(spec.version)
raise RuntimeError(msg)
def execute_build_time_tests(builder):
"""Execute the build-time tests prescribed by builder.
Args:
builder (Builder): builder prescribing the test callbacks. The name of the callbacks is
stored as a list of strings in the ``build_time_test_callbacks`` attribute.
"""
builder.pkg.run_test_callbacks(builder, builder.build_time_test_callbacks, "build")
def execute_install_time_tests(builder):
"""Execute the install-time tests prescribed by builder.
Args:
builder (Builder): builder prescribing the test callbacks. The name of the callbacks is
stored as a list of strings in the ``install_time_test_callbacks`` attribute.
"""
builder.pkg.run_test_callbacks(builder, builder.install_time_test_callbacks, "install")
class BaseBuilder(spack.builder.Builder):
"""Base class for builders to register common checks"""
# Check that self.prefix is there after installation
spack.builder.run_after("install")(sanity_check_prefix)

View File

@ -2,18 +2,36 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import llnl.util.filesystem as fs
# Why doesn't this work for me? import spack.directives
# from spack import * import spack.package_base
from llnl.util.filesystem import filter_file import spack.util.executable
from spack.build_systems.autotools import AutotoolsPackage from .autotools import AutotoolsBuilder, AutotoolsPackage
from spack.directives import extends
from spack.package_base import ExtensionError
from spack.util.executable import which class AspellBuilder(AutotoolsBuilder):
"""The Aspell builder is close enough to an autotools builder to allow
specializing the builder class, so to use variables that are specific
to the Aspell extensions.
"""
def configure(self, pkg, spec, prefix):
aspell = spec["aspell"].prefix.bin.aspell
prezip = spec["aspell"].prefix.bin.prezip
destdir = prefix
sh = spack.util.executable.which("sh")
sh(
"./configure",
"--vars",
"ASPELL={0}".format(aspell),
"PREZIP={0}".format(prezip),
"DESTDIR={0}".format(destdir),
)
#
# Aspell dictionaries install their bits into their prefix.lib # Aspell dictionaries install their bits into their prefix.lib
# and when activated they'll get symlinked into the appropriate aspell's # and when activated they'll get symlinked into the appropriate aspell's
# dict dir (see aspell's {de,}activate methods). # dict dir (see aspell's {de,}activate methods).
@ -23,12 +41,17 @@
class AspellDictPackage(AutotoolsPackage): class AspellDictPackage(AutotoolsPackage):
"""Specialized class for building aspell dictionairies.""" """Specialized class for building aspell dictionairies."""
extends("aspell") spack.directives.extends("aspell", when="build_system=autotools")
#: Override the default autotools builder
AutotoolsBuilder = AspellBuilder
def view_destination(self, view): def view_destination(self, view):
aspell_spec = self.spec["aspell"] aspell_spec = self.spec["aspell"]
if view.get_projection_for_spec(aspell_spec) != aspell_spec.prefix: if view.get_projection_for_spec(aspell_spec) != aspell_spec.prefix:
raise ExtensionError("aspell does not support non-global extensions") raise spack.package_base.ExtensionError(
"aspell does not support non-global extensions"
)
aspell = aspell_spec.command aspell = aspell_spec.command
return aspell("dump", "config", "dict-dir", output=str).strip() return aspell("dump", "config", "dict-dir", output=str).strip()
@ -36,19 +59,5 @@ def view_source(self):
return self.prefix.lib return self.prefix.lib
def patch(self): def patch(self):
filter_file(r"^dictdir=.*$", "dictdir=/lib", "configure") fs.filter_file(r"^dictdir=.*$", "dictdir=/lib", "configure")
filter_file(r"^datadir=.*$", "datadir=/lib", "configure") fs.filter_file(r"^datadir=.*$", "datadir=/lib", "configure")
def configure(self, spec, prefix):
aspell = spec["aspell"].prefix.bin.aspell
prezip = spec["aspell"].prefix.bin.prezip
destdir = prefix
sh = which("sh")
sh(
"./configure",
"--vars",
"ASPELL={0}".format(aspell),
"PREZIP={0}".format(prezip),
"DESTDIR={0}".format(destdir),
)

View File

@ -6,87 +6,140 @@
import os import os
import os.path import os.path
import stat import stat
from subprocess import PIPE, check_call import subprocess
from typing import List # novm from typing import List # novm
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import force_remove, working_dir
from spack.build_environment import InstallError import spack.build_environment
from spack.directives import conflicts, depends_on import spack.builder
import spack.package_base
from spack.directives import build_system, conflicts, depends_on
from spack.multimethod import when
from spack.operating_systems.mac_os import macos_version from spack.operating_systems.mac_os import macos_version
from spack.package_base import PackageBase, run_after, run_before
from spack.util.executable import Executable from spack.util.executable import Executable
from spack.version import Version from spack.version import Version
from ._checks import (
BaseBuilder,
apply_macos_rpath_fixups,
ensure_build_dependencies_or_raise,
execute_build_time_tests,
execute_install_time_tests,
)
class AutotoolsPackage(PackageBase):
"""Specialized class for packages built using GNU Autotools.
This class provides four phases that can be overridden: class AutotoolsPackage(spack.package_base.PackageBase):
"""Specialized class for packages built using GNU Autotools."""
1. :py:meth:`~.AutotoolsPackage.autoreconf` #: This attribute is used in UI queries that need to know the build
2. :py:meth:`~.AutotoolsPackage.configure` #: system base class
3. :py:meth:`~.AutotoolsPackage.build` build_system_class = "AutotoolsPackage"
4. :py:meth:`~.AutotoolsPackage.install`
#: Legacy buildsystem attribute used to deserialize and install old specs
legacy_buildsystem = "autotools"
build_system("autotools")
with when("build_system=autotools"):
depends_on("gnuconfig", type="build", when="target=ppc64le:")
depends_on("gnuconfig", type="build", when="target=aarch64:")
depends_on("gnuconfig", type="build", when="target=riscv64:")
conflicts("platform=windows")
def flags_to_build_system_args(self, flags):
"""Produces a list of all command line arguments to pass specified
compiler flags to configure."""
# Has to be dynamic attribute due to caching.
setattr(self, "configure_flag_args", [])
for flag, values in flags.items():
if values:
values_str = "{0}={1}".format(flag.upper(), " ".join(values))
self.configure_flag_args.append(values_str)
# Spack's fflags are meant for both F77 and FC, therefore we
# additionaly set FCFLAGS if required.
values = flags.get("fflags", None)
if values:
values_str = "FCFLAGS={0}".format(" ".join(values))
self.configure_flag_args.append(values_str)
# Legacy methods (used by too many packages to change them,
# need to forward to the builder)
def enable_or_disable(self, *args, **kwargs):
return self.builder.enable_or_disable(*args, **kwargs)
def with_or_without(self, *args, **kwargs):
return self.builder.with_or_without(*args, **kwargs)
@spack.builder.builder("autotools")
class AutotoolsBuilder(BaseBuilder):
"""The autotools builder encodes the default way of installing software built
with autotools. It has four phases that can be overridden, if need be:
1. :py:meth:`~.AutotoolsBuilder.autoreconf`
2. :py:meth:`~.AutotoolsBuilder.configure`
3. :py:meth:`~.AutotoolsBuilder.build`
4. :py:meth:`~.AutotoolsBuilder.install`
They all have sensible defaults and for many packages the only thing necessary
is to override the helper method
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.configure_args`.
They all have sensible defaults and for many packages the only thing
necessary will be to override the helper method
:meth:`~spack.build_systems.autotools.AutotoolsPackage.configure_args`.
For a finer tuning you may also override: For a finer tuning you may also override:
+-----------------------------------------------+--------------------+ +-----------------------------------------------+--------------------+
| **Method** | **Purpose** | | **Method** | **Purpose** |
+===============================================+====================+ +===============================================+====================+
| :py:attr:`~.AutotoolsPackage.build_targets` | Specify ``make`` | | :py:attr:`~.AutotoolsBuilder.build_targets` | Specify ``make`` |
| | targets for the | | | targets for the |
| | build phase | | | build phase |
+-----------------------------------------------+--------------------+ +-----------------------------------------------+--------------------+
| :py:attr:`~.AutotoolsPackage.install_targets` | Specify ``make`` | | :py:attr:`~.AutotoolsBuilder.install_targets` | Specify ``make`` |
| | targets for the | | | targets for the |
| | install phase | | | install phase |
+-----------------------------------------------+--------------------+ +-----------------------------------------------+--------------------+
| :py:meth:`~.AutotoolsPackage.check` | Run build time | | :py:meth:`~.AutotoolsBuilder.check` | Run build time |
| | tests if required | | | tests if required |
+-----------------------------------------------+--------------------+ +-----------------------------------------------+--------------------+
""" """
#: Phases of a GNU Autotools package #: Phases of a GNU Autotools package
phases = ["autoreconf", "configure", "build", "install"] phases = ("autoreconf", "configure", "build", "install")
#: This attribute is used in UI queries that need to know the build
#: system base class
build_system_class = "AutotoolsPackage"
@property #: Names associated with package methods in the old build-system format
def patch_config_files(self): legacy_methods = (
""" "configure_args",
Whether or not to update old ``config.guess`` and ``config.sub`` files "check",
distributed with the tarball. This currently only applies to "installcheck",
``ppc64le:``, ``aarch64:``, and ``riscv64`` target architectures. The )
substitutes are taken from the ``gnuconfig`` package, which is
automatically added as a build dependency for these architectures. In
case system versions of these config files are required, the
``gnuconfig`` package can be marked external with a prefix pointing to
the directory containing the system ``config.guess`` and ``config.sub``
files.
"""
return (
self.spec.satisfies("target=ppc64le:")
or self.spec.satisfies("target=aarch64:")
or self.spec.satisfies("target=riscv64:")
)
#: Whether or not to update ``libtool`` #: Names associated with package attributes in the old build-system format
#: (currently only for Arm/Clang/Fujitsu/NVHPC compilers) legacy_attributes = (
"archive_files",
"patch_libtool",
"build_targets",
"install_targets",
"build_time_test_callbacks",
"install_time_test_callbacks",
"force_autoreconf",
"autoreconf_extra_args",
"install_libtool_archives",
"patch_config_files",
"configure_directory",
"configure_abs_path",
"build_directory",
"autoreconf_search_path_args",
)
#: Whether to update ``libtool`` (e.g. for Arm/Clang/Fujitsu/NVHPC compilers)
patch_libtool = True patch_libtool = True
#: Targets for ``make`` during the :py:meth:`~.AutotoolsPackage.build` #: Targets for ``make`` during the :py:meth:`~.AutotoolsBuilder.build` phase
#: phase
build_targets = [] # type: List[str] build_targets = [] # type: List[str]
#: Targets for ``make`` during the :py:meth:`~.AutotoolsPackage.install` #: Targets for ``make`` during the :py:meth:`~.AutotoolsBuilder.install` phase
#: phase
install_targets = ["install"] install_targets = ["install"]
#: Callback names for build-time test #: Callback names for build-time test
@ -97,24 +150,40 @@ def patch_config_files(self):
#: Set to true to force the autoreconf step even if configure is present #: Set to true to force the autoreconf step even if configure is present
force_autoreconf = False force_autoreconf = False
#: Options to be passed to autoreconf when using the default implementation #: Options to be passed to autoreconf when using the default implementation
autoreconf_extra_args = [] # type: List[str] autoreconf_extra_args = [] # type: List[str]
#: If False deletes all the .la files in the prefix folder #: If False deletes all the .la files in the prefix folder after the installation.
#: after the installation. If True instead it installs them. #: If True instead it installs them.
install_libtool_archives = False install_libtool_archives = False
depends_on("gnuconfig", type="build", when="target=ppc64le:") @property
depends_on("gnuconfig", type="build", when="target=aarch64:") def patch_config_files(self):
depends_on("gnuconfig", type="build", when="target=riscv64:") """Whether to update old ``config.guess`` and ``config.sub`` files
conflicts("platform=windows") distributed with the tarball.
This currently only applies to ``ppc64le:``, ``aarch64:``, and
``riscv64`` target architectures.
The substitutes are taken from the ``gnuconfig`` package, which is
automatically added as a build dependency for these architectures. In case
system versions of these config files are required, the ``gnuconfig`` package
can be marked external, with a prefix pointing to the directory containing the
system ``config.guess`` and ``config.sub`` files.
"""
return (
self.pkg.spec.satisfies("target=ppc64le:")
or self.pkg.spec.satisfies("target=aarch64:")
or self.pkg.spec.satisfies("target=riscv64:")
)
@property @property
def _removed_la_files_log(self): def _removed_la_files_log(self):
"""File containing the list of remove libtool archives""" """File containing the list of removed libtool archives"""
build_dir = self.build_directory build_dir = self.build_directory
if not os.path.isabs(self.build_directory): if not os.path.isabs(self.build_directory):
build_dir = os.path.join(self.stage.path, build_dir) build_dir = os.path.join(self.pkg.stage.path, build_dir)
return os.path.join(build_dir, "removed_la_files.txt") return os.path.join(build_dir, "removed_la_files.txt")
@property @property
@ -125,13 +194,13 @@ def archive_files(self):
files.append(self._removed_la_files_log) files.append(self._removed_la_files_log)
return files return files
@run_after("autoreconf") @spack.builder.run_after("autoreconf")
def _do_patch_config_files(self): def _do_patch_config_files(self):
"""Some packages ship with older config.guess/config.sub files and """Some packages ship with older config.guess/config.sub files and need to
need to have these updated when installed on a newer architecture. have these updated when installed on a newer architecture.
In particular, config.guess fails for PPC64LE for version prior
to a 2013-06-10 build date (automake 1.13.4) and for ARM (aarch64) and In particular, config.guess fails for PPC64LE for version prior to a
RISC-V (riscv64). 2013-06-10 build date (automake 1.13.4) and for AArch64 and RISC-V.
""" """
if not self.patch_config_files: if not self.patch_config_files:
return return
@ -139,11 +208,11 @@ def _do_patch_config_files(self):
# TODO: Expand this to select the 'config.sub'-compatible architecture # TODO: Expand this to select the 'config.sub'-compatible architecture
# for each platform (e.g. 'config.sub' doesn't accept 'power9le', but # for each platform (e.g. 'config.sub' doesn't accept 'power9le', but
# does accept 'ppc64le'). # does accept 'ppc64le').
if self.spec.satisfies("target=ppc64le:"): if self.pkg.spec.satisfies("target=ppc64le:"):
config_arch = "ppc64le" config_arch = "ppc64le"
elif self.spec.satisfies("target=aarch64:"): elif self.pkg.spec.satisfies("target=aarch64:"):
config_arch = "aarch64" config_arch = "aarch64"
elif self.spec.satisfies("target=riscv64:"): elif self.pkg.spec.satisfies("target=riscv64:"):
config_arch = "riscv64" config_arch = "riscv64"
else: else:
config_arch = "local" config_arch = "local"
@ -155,7 +224,7 @@ def runs_ok(script_abs_path):
args = [script_abs_path] + additional_args.get(script_name, []) args = [script_abs_path] + additional_args.get(script_name, [])
try: try:
check_call(args, stdout=PIPE, stderr=PIPE) subprocess.check_call(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except Exception as e: except Exception as e:
tty.debug(e) tty.debug(e)
return False return False
@ -163,7 +232,7 @@ def runs_ok(script_abs_path):
return True return True
# Get the list of files that needs to be patched # Get the list of files that needs to be patched
to_be_patched = fs.find(self.stage.path, files=["config.sub", "config.guess"]) to_be_patched = fs.find(self.pkg.stage.path, files=["config.sub", "config.guess"])
to_be_patched = [f for f in to_be_patched if not runs_ok(f)] to_be_patched = [f for f in to_be_patched if not runs_ok(f)]
# If there are no files to be patched, return early # If there are no files to be patched, return early
@ -171,22 +240,21 @@ def runs_ok(script_abs_path):
return return
# Otherwise, require `gnuconfig` to be a build dependency # Otherwise, require `gnuconfig` to be a build dependency
self._require_build_deps( ensure_build_dependencies_or_raise(
pkgs=["gnuconfig"], spec=self.spec, err="Cannot patch config files" spec=self.pkg.spec, dependencies=["gnuconfig"], error_msg="Cannot patch config files"
) )
# Get the config files we need to patch (config.sub / config.guess). # Get the config files we need to patch (config.sub / config.guess).
to_be_found = list(set(os.path.basename(f) for f in to_be_patched)) to_be_found = list(set(os.path.basename(f) for f in to_be_patched))
gnuconfig = self.spec["gnuconfig"] gnuconfig = self.pkg.spec["gnuconfig"]
gnuconfig_dir = gnuconfig.prefix gnuconfig_dir = gnuconfig.prefix
# An external gnuconfig may not not have a prefix. # An external gnuconfig may not not have a prefix.
if gnuconfig_dir is None: if gnuconfig_dir is None:
raise InstallError( raise spack.build_environment.InstallError(
"Spack could not find substitutes for GNU config " "Spack could not find substitutes for GNU config files because no "
"files because no prefix is available for the " "prefix is available for the `gnuconfig` package. Make sure you set a "
"`gnuconfig` package. Make sure you set a prefix " "prefix path instead of modules for external `gnuconfig`."
"path instead of modules for external `gnuconfig`."
) )
candidates = fs.find(gnuconfig_dir, files=to_be_found, recursive=False) candidates = fs.find(gnuconfig_dir, files=to_be_found, recursive=False)
@ -203,7 +271,7 @@ def runs_ok(script_abs_path):
msg += ( msg += (
" or the `gnuconfig` package prefix is misconfigured as" " an external package" " or the `gnuconfig` package prefix is misconfigured as" " an external package"
) )
raise InstallError(msg) raise spack.build_environment.InstallError(msg)
# Filter working substitutes # Filter working substitutes
candidates = [f for f in candidates if runs_ok(f)] candidates = [f for f in candidates if runs_ok(f)]
@ -228,7 +296,9 @@ def runs_ok(script_abs_path):
and set the prefix to the directory containing the `config.guess` and and set the prefix to the directory containing the `config.guess` and
`config.sub` files. `config.sub` files.
""" """
raise InstallError(msg.format(", ".join(to_be_found), self.name)) raise spack.build_environment.InstallError(
msg.format(", ".join(to_be_found), self.name)
)
# Copy the good files over the bad ones # Copy the good files over the bad ones
for abs_path in to_be_patched: for abs_path in to_be_patched:
@ -238,7 +308,7 @@ def runs_ok(script_abs_path):
fs.copy(substitutes[name], abs_path) fs.copy(substitutes[name], abs_path)
os.chmod(abs_path, mode) os.chmod(abs_path, mode)
@run_before("configure") @spack.builder.run_before("configure")
def _patch_usr_bin_file(self): def _patch_usr_bin_file(self):
"""On NixOS file is not available in /usr/bin/file. Patch configure """On NixOS file is not available in /usr/bin/file. Patch configure
scripts to use file from path.""" scripts to use file from path."""
@ -250,7 +320,7 @@ def _patch_usr_bin_file(self):
with fs.keep_modification_time(*x.filenames): with fs.keep_modification_time(*x.filenames):
x.filter(regex="/usr/bin/file", repl="file", string=True) x.filter(regex="/usr/bin/file", repl="file", string=True)
@run_before("configure") @spack.builder.run_before("configure")
def _set_autotools_environment_variables(self): def _set_autotools_environment_variables(self):
"""Many autotools builds use a version of mknod.m4 that fails when """Many autotools builds use a version of mknod.m4 that fails when
running as root unless FORCE_UNSAFE_CONFIGURE is set to 1. running as root unless FORCE_UNSAFE_CONFIGURE is set to 1.
@ -261,11 +331,10 @@ def _set_autotools_environment_variables(self):
Without it, configure just fails halfway through, but it can Without it, configure just fails halfway through, but it can
still run things *before* this check. Forcing this just removes a still run things *before* this check. Forcing this just removes a
nuisance -- this is not circumventing any real protection. nuisance -- this is not circumventing any real protection.
""" """
os.environ["FORCE_UNSAFE_CONFIGURE"] = "1" os.environ["FORCE_UNSAFE_CONFIGURE"] = "1"
@run_before("configure") @spack.builder.run_before("configure")
def _do_patch_libtool_configure(self): def _do_patch_libtool_configure(self):
"""Patch bugs that propagate from libtool macros into "configure" and """Patch bugs that propagate from libtool macros into "configure" and
further into "libtool". Note that patches that can be fixed by patching further into "libtool". Note that patches that can be fixed by patching
@ -293,7 +362,7 @@ def _do_patch_libtool_configure(self):
# Support Libtool 2.4.2 and older: # Support Libtool 2.4.2 and older:
x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2') x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2')
@run_after("configure") @spack.builder.run_after("configure")
def _do_patch_libtool(self): def _do_patch_libtool(self):
"""If configure generates a "libtool" script that does not correctly """If configure generates a "libtool" script that does not correctly
detect the compiler (and patch_libtool is set), patch in the correct detect the compiler (and patch_libtool is set), patch in the correct
@ -328,31 +397,33 @@ def _do_patch_libtool(self):
markers[tag] = "LIBTOOL TAG CONFIG: {0}".format(tag.upper()) markers[tag] = "LIBTOOL TAG CONFIG: {0}".format(tag.upper())
# Replace empty linker flag prefixes: # Replace empty linker flag prefixes:
if self.compiler.name == "nag": if self.pkg.compiler.name == "nag":
# Nag is mixed with gcc and g++, which are recognized correctly. # Nag is mixed with gcc and g++, which are recognized correctly.
# Therefore, we change only Fortran values: # Therefore, we change only Fortran values:
for tag in ["fc", "f77"]: for tag in ["fc", "f77"]:
marker = markers[tag] marker = markers[tag]
x.filter( x.filter(
regex='^wl=""$', regex='^wl=""$',
repl='wl="{0}"'.format(self.compiler.linker_arg), repl='wl="{0}"'.format(self.pkg.compiler.linker_arg),
start_at="# ### BEGIN {0}".format(marker), start_at="# ### BEGIN {0}".format(marker),
stop_at="# ### END {0}".format(marker), stop_at="# ### END {0}".format(marker),
) )
else: else:
x.filter(regex='^wl=""$', repl='wl="{0}"'.format(self.compiler.linker_arg)) x.filter(regex='^wl=""$', repl='wl="{0}"'.format(self.pkg.compiler.linker_arg))
# Replace empty PIC flag values: # Replace empty PIC flag values:
for cc, marker in markers.items(): for cc, marker in markers.items():
x.filter( x.filter(
regex='^pic_flag=""$', regex='^pic_flag=""$',
repl='pic_flag="{0}"'.format(getattr(self.compiler, "{0}_pic_flag".format(cc))), repl='pic_flag="{0}"'.format(
getattr(self.pkg.compiler, "{0}_pic_flag".format(cc))
),
start_at="# ### BEGIN {0}".format(marker), start_at="# ### BEGIN {0}".format(marker),
stop_at="# ### END {0}".format(marker), stop_at="# ### END {0}".format(marker),
) )
# Other compiler-specific patches: # Other compiler-specific patches:
if self.compiler.name == "fj": if self.pkg.compiler.name == "fj":
x.filter(regex="-nostdlib", repl="", string=True) x.filter(regex="-nostdlib", repl="", string=True)
rehead = r"/\S*/" rehead = r"/\S*/"
for o in [ for o in [
@ -365,12 +436,12 @@ def _do_patch_libtool(self):
"crtendS.o", "crtendS.o",
]: ]:
x.filter(regex=(rehead + o), repl="", string=True) x.filter(regex=(rehead + o), repl="", string=True)
elif self.compiler.name == "dpcpp": elif self.pkg.compiler.name == "dpcpp":
# Hack to filter out spurious predep_objects when building with Intel dpcpp # Hack to filter out spurious predep_objects when building with Intel dpcpp
# (see https://github.com/spack/spack/issues/32863): # (see https://github.com/spack/spack/issues/32863):
x.filter(regex=r"^(predep_objects=.*)/tmp/conftest-[0-9A-Fa-f]+\.o", repl=r"\1") x.filter(regex=r"^(predep_objects=.*)/tmp/conftest-[0-9A-Fa-f]+\.o", repl=r"\1")
x.filter(regex=r"^(predep_objects=.*)/tmp/a-[0-9A-Fa-f]+\.o", repl=r"\1") x.filter(regex=r"^(predep_objects=.*)/tmp/a-[0-9A-Fa-f]+\.o", repl=r"\1")
elif self.compiler.name == "nag": elif self.pkg.compiler.name == "nag":
for tag in ["fc", "f77"]: for tag in ["fc", "f77"]:
marker = markers[tag] marker = markers[tag]
start_at = "# ### BEGIN {0}".format(marker) start_at = "# ### BEGIN {0}".format(marker)
@ -446,11 +517,8 @@ def _do_patch_libtool(self):
@property @property
def configure_directory(self): def configure_directory(self):
"""Returns the directory where 'configure' resides. """Return the directory where 'configure' resides."""
return self.pkg.stage.source_path
:return: directory where to find configure
"""
return self.stage.source_path
@property @property
def configure_abs_path(self): def configure_abs_path(self):
@ -463,34 +531,12 @@ def build_directory(self):
"""Override to provide another place to build the package""" """Override to provide another place to build the package"""
return self.configure_directory return self.configure_directory
@run_before("autoreconf") @spack.builder.run_before("autoreconf")
def delete_configure_to_force_update(self): def delete_configure_to_force_update(self):
if self.force_autoreconf: if self.force_autoreconf:
force_remove(self.configure_abs_path) fs.force_remove(self.configure_abs_path)
def _require_build_deps(self, pkgs, spec, err): def autoreconf(self, pkg, spec, prefix):
"""Require `pkgs` to be direct build dependencies of `spec`. Raises a
RuntimeError with a helpful error messages when any dep is missing."""
build_deps = [d.name for d in spec.dependencies(deptype="build")]
missing_deps = [x for x in pkgs if x not in build_deps]
if not missing_deps:
return
# Raise an exception on missing deps.
msg = (
"{0}: missing dependencies: {1}.\n\nPlease add "
"the following lines to the package:\n\n".format(err, ", ".join(missing_deps))
)
for dep in missing_deps:
msg += " depends_on('{0}', type='build', when='@{1}')\n".format(dep, spec.version)
msg += "\nUpdate the version (when='@{0}') as needed.".format(spec.version)
raise RuntimeError(msg)
def autoreconf(self, spec, prefix):
"""Not needed usually, configure should be already there""" """Not needed usually, configure should be already there"""
# If configure exists nothing needs to be done # If configure exists nothing needs to be done
@ -498,8 +544,10 @@ def autoreconf(self, spec, prefix):
return return
# Else try to regenerate it, which reuquires a few build dependencies # Else try to regenerate it, which reuquires a few build dependencies
self._require_build_deps( ensure_build_dependencies_or_raise(
pkgs=["autoconf", "automake", "libtool"], spec=spec, err="Cannot generate configure" spec=spec,
dependencies=["autoconf", "automake", "libtool"],
error_msg="Cannot generate configure",
) )
tty.msg("Configure script not found: trying to generate it") tty.msg("Configure script not found: trying to generate it")
@ -507,8 +555,8 @@ def autoreconf(self, spec, prefix):
tty.warn("* If the default procedure fails, consider implementing *") tty.warn("* If the default procedure fails, consider implementing *")
tty.warn("* a custom AUTORECONF phase in the package *") tty.warn("* a custom AUTORECONF phase in the package *")
tty.warn("*********************************************************") tty.warn("*********************************************************")
with working_dir(self.configure_directory): with fs.working_dir(self.configure_directory):
m = inspect.getmodule(self) m = inspect.getmodule(self.pkg)
# This line is what is needed most of the time # This line is what is needed most of the time
# --install, --verbose, --force # --install, --verbose, --force
autoreconf_args = ["-ivf"] autoreconf_args = ["-ivf"]
@ -524,98 +572,66 @@ def autoreconf_search_path_args(self):
spack dependencies.""" spack dependencies."""
return _autoreconf_search_path_args(self.spec) return _autoreconf_search_path_args(self.spec)
@run_after("autoreconf") @spack.builder.run_after("autoreconf")
def set_configure_or_die(self): def set_configure_or_die(self):
"""Checks the presence of a ``configure`` file after the """Ensure the presence of a "configure" script, or raise. If the "configure"
autoreconf phase. If it is found sets a module attribute is found, a module level attribute is set.
appropriately, otherwise raises an error.
:raises RuntimeError: if a configure script is not found in Raises:
:py:meth:`~AutotoolsPackage.configure_directory` RuntimeError: if the "configure" script is not found
""" """
# Check if a configure script is there. If not raise a RuntimeError. # Check if the "configure" script is there. If not raise a RuntimeError.
if not os.path.exists(self.configure_abs_path): if not os.path.exists(self.configure_abs_path):
msg = "configure script not found in {0}" msg = "configure script not found in {0}"
raise RuntimeError(msg.format(self.configure_directory)) raise RuntimeError(msg.format(self.configure_directory))
# Monkey-patch the configure script in the corresponding module # Monkey-patch the configure script in the corresponding module
inspect.getmodule(self).configure = Executable(self.configure_abs_path) inspect.getmodule(self.pkg).configure = Executable(self.configure_abs_path)
def configure_args(self): def configure_args(self):
"""Produces a list containing all the arguments that must be passed to """Return the list of all the arguments that must be passed to configure,
configure, except ``--prefix`` which will be pre-pended to the list. except ``--prefix`` which will be pre-pended to the list.
:return: list of arguments for configure
""" """
return [] return []
def flags_to_build_system_args(self, flags): def configure(self, pkg, spec, prefix):
"""Produces a list of all command line arguments to pass specified """Run "configure", with the arguments specified by the builder and an
compiler flags to configure.""" appropriately set prefix.
# Has to be dynamic attribute due to caching.
setattr(self, "configure_flag_args", [])
for flag, values in flags.items():
if values:
values_str = "{0}={1}".format(flag.upper(), " ".join(values))
self.configure_flag_args.append(values_str)
# Spack's fflags are meant for both F77 and FC, therefore we
# additionaly set FCFLAGS if required.
values = flags.get("fflags", None)
if values:
values_str = "FCFLAGS={0}".format(" ".join(values))
self.configure_flag_args.append(values_str)
def configure(self, spec, prefix):
"""Runs configure with the arguments specified in
:meth:`~spack.build_systems.autotools.AutotoolsPackage.configure_args`
and an appropriately set prefix.
""" """
options = getattr(self, "configure_flag_args", []) options = getattr(self.pkg, "configure_flag_args", [])
options += ["--prefix={0}".format(prefix)] options += ["--prefix={0}".format(prefix)]
options += self.configure_args() options += self.configure_args()
with working_dir(self.build_directory, create=True): with fs.working_dir(self.build_directory, create=True):
inspect.getmodule(self).configure(*options) inspect.getmodule(self.pkg).configure(*options)
def setup_build_environment(self, env): def build(self, pkg, spec, prefix):
if self.spec.platform == "darwin" and macos_version() >= Version("11"): """Run "make" on the build targets specified by the builder."""
# Many configure files rely on matching '10.*' for macOS version
# detection and fail to add flags if it shows as version 11.
env.set("MACOSX_DEPLOYMENT_TARGET", "10.16")
def build(self, spec, prefix):
"""Makes the build targets specified by
:py:attr:``~.AutotoolsPackage.build_targets``
"""
# See https://autotools.io/automake/silent.html # See https://autotools.io/automake/silent.html
params = ["V=1"] params = ["V=1"]
params += self.build_targets params += self.build_targets
with working_dir(self.build_directory): with fs.working_dir(self.build_directory):
inspect.getmodule(self).make(*params) inspect.getmodule(self.pkg).make(*params)
def install(self, spec, prefix): def install(self, pkg, spec, prefix):
"""Makes the install targets specified by """Run "make" on the install targets specified by the builder."""
:py:attr:``~.AutotoolsPackage.install_targets`` with fs.working_dir(self.build_directory):
""" inspect.getmodule(self.pkg).make(*self.install_targets)
with working_dir(self.build_directory):
inspect.getmodule(self).make(*self.install_targets)
run_after("build")(PackageBase._run_default_build_time_test_callbacks) spack.builder.run_after("build")(execute_build_time_tests)
def check(self): def check(self):
"""Searches the Makefile for targets ``test`` and ``check`` """Run "make" on the ``test`` and ``check`` targets, if found."""
and runs them if found. with fs.working_dir(self.build_directory):
""" self.pkg._if_make_target_execute("test")
with working_dir(self.build_directory): self.pkg._if_make_target_execute("check")
self._if_make_target_execute("test")
self._if_make_target_execute("check")
def _activate_or_not( def _activate_or_not(
self, name, activation_word, deactivation_word, activation_value=None, variant=None self, name, activation_word, deactivation_word, activation_value=None, variant=None
): ):
"""This function contains the current implementation details of """This function contain the current implementation details of
:meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without` and :meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without` and
:meth:`~spack.build_systems.autotools.AutotoolsPackage.enable_or_disable`. :meth:`~spack.build_systems.autotools.AutotoolsBuilder.enable_or_disable`.
Args: Args:
name (str): name of the option that is being activated or not name (str): name of the option that is being activated or not
@ -671,7 +687,7 @@ def _activate_or_not(
Raises: Raises:
KeyError: if name is not among known variants KeyError: if name is not among known variants
""" """
spec = self.spec spec = self.pkg.spec
args = [] args = []
if activation_value == "prefix": if activation_value == "prefix":
@ -681,16 +697,16 @@ def _activate_or_not(
# Defensively look that the name passed as argument is among # Defensively look that the name passed as argument is among
# variants # variants
if variant not in self.variants: if variant not in self.pkg.variants:
msg = '"{0}" is not a variant of "{1}"' msg = '"{0}" is not a variant of "{1}"'
raise KeyError(msg.format(variant, self.name)) raise KeyError(msg.format(variant, self.pkg.name))
if variant not in spec.variants: if variant not in spec.variants:
return [] return []
# Create a list of pairs. Each pair includes a configuration # Create a list of pairs. Each pair includes a configuration
# option and whether or not that option is activated # option and whether or not that option is activated
variant_desc, _ = self.variants[variant] variant_desc, _ = self.pkg.variants[variant]
if set(variant_desc.values) == set((True, False)): if set(variant_desc.values) == set((True, False)):
# BoolValuedVariant carry information about a single option. # BoolValuedVariant carry information about a single option.
# Nonetheless, for uniformity of treatment we'll package them # Nonetheless, for uniformity of treatment we'll package them
@ -718,14 +734,18 @@ def _activate_or_not(
override_name = "{0}_or_{1}_{2}".format( override_name = "{0}_or_{1}_{2}".format(
activation_word, deactivation_word, option_value activation_word, deactivation_word, option_value
) )
line_generator = getattr(self, override_name, None) line_generator = getattr(self, override_name, None) or getattr(
self.pkg, override_name, None
)
# If not available use a sensible default # If not available use a sensible default
if line_generator is None: if line_generator is None:
def _default_generator(is_activated): def _default_generator(is_activated):
if is_activated: if is_activated:
line = "--{0}-{1}".format(activation_word, option_value) line = "--{0}-{1}".format(activation_word, option_value)
if activation_value is not None and activation_value(option_value): if activation_value is not None and activation_value(
option_value
): # NOQA=ignore=E501
line += "={0}".format(activation_value(option_value)) line += "={0}".format(activation_value(option_value))
return line return line
return "--{0}-{1}".format(deactivation_word, option_value) return "--{0}-{1}".format(deactivation_word, option_value)
@ -764,7 +784,7 @@ def with_or_without(self, name, activation_value=None, variant=None):
def enable_or_disable(self, name, activation_value=None, variant=None): def enable_or_disable(self, name, activation_value=None, variant=None):
"""Same as """Same as
:meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without` :meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without`
but substitute ``with`` with ``enable`` and ``without`` with ``disable``. but substitute ``with`` with ``enable`` and ``without`` with ``disable``.
Args: Args:
@ -781,19 +801,14 @@ def enable_or_disable(self, name, activation_value=None, variant=None):
""" """
return self._activate_or_not(name, "enable", "disable", activation_value, variant) return self._activate_or_not(name, "enable", "disable", activation_value, variant)
run_after("install")(PackageBase._run_default_install_time_test_callbacks) spack.builder.run_after("install")(execute_install_time_tests)
def installcheck(self): def installcheck(self):
"""Searches the Makefile for an ``installcheck`` target """Run "make" on the ``installcheck`` target, if found."""
and runs it if found. with fs.working_dir(self.build_directory):
""" self.pkg._if_make_target_execute("installcheck")
with working_dir(self.build_directory):
self._if_make_target_execute("installcheck")
# Check that self.prefix is there after installation @spack.builder.run_after("install")
run_after("install")(PackageBase.sanity_check_prefix)
@run_after("install")
def remove_libtool_archives(self): def remove_libtool_archives(self):
"""Remove all .la files in prefix sub-folders if the package sets """Remove all .la files in prefix sub-folders if the package sets
``install_libtool_archives`` to be False. ``install_libtool_archives`` to be False.
@ -803,14 +818,20 @@ def remove_libtool_archives(self):
return return
# Remove the files and create a log of what was removed # Remove the files and create a log of what was removed
libtool_files = fs.find(str(self.prefix), "*.la", recursive=True) libtool_files = fs.find(str(self.pkg.prefix), "*.la", recursive=True)
with fs.safe_remove(*libtool_files): with fs.safe_remove(*libtool_files):
fs.mkdirp(os.path.dirname(self._removed_la_files_log)) fs.mkdirp(os.path.dirname(self._removed_la_files_log))
with open(self._removed_la_files_log, mode="w") as f: with open(self._removed_la_files_log, mode="w") as f:
f.write("\n".join(libtool_files)) f.write("\n".join(libtool_files))
def setup_build_environment(self, env):
if self.spec.platform == "darwin" and macos_version() >= Version("11"):
# Many configure files rely on matching '10.*' for macOS version
# detection and fail to add flags if it shows as version 11.
env.set("MACOSX_DEPLOYMENT_TARGET", "10.16")
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths # On macOS, force rpaths for shared library IDs and remove duplicate rpaths
run_after("install")(PackageBase.apply_macos_rpath_fixups) spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
def _autoreconf_search_path_args(spec): def _autoreconf_search_path_args(spec):

View File

@ -0,0 +1,31 @@
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import spack.builder
import spack.directives
import spack.package_base
class BundlePackage(spack.package_base.PackageBase):
"""General purpose bundle, or no-code, package class."""
#: This attribute is used in UI queries that require to know which
#: build-system class we are using
build_system_class = "BundlePackage"
#: Legacy buildsystem attribute used to deserialize and install old specs
legacy_buildsystem = "bundle"
#: Bundle packages do not have associated source or binary code.
has_code = False
spack.directives.build_system("bundle")
@spack.builder.builder("bundle")
class BundleBuilder(spack.builder.Builder):
phases = ("install",)
def install(self, pkg, spec, prefix):
pass

View File

@ -3,12 +3,14 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os import os
from typing import Tuple
import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import install, mkdirp
from spack.build_systems.cmake import CMakePackage import spack.builder
from spack.package_base import run_after
from .cmake import CMakeBuilder, CMakePackage
def cmake_cache_path(name, value, comment=""): def cmake_cache_path(name, value, comment=""):
@ -28,44 +30,50 @@ def cmake_cache_option(name, boolean_value, comment=""):
return 'set({0} {1} CACHE BOOL "{2}")\n'.format(name, value, comment) return 'set({0} {1} CACHE BOOL "{2}")\n'.format(name, value, comment)
class CachedCMakePackage(CMakePackage): class CachedCMakeBuilder(CMakeBuilder):
"""Specialized class for packages built using CMake initial cache.
This feature of CMake allows packages to increase reproducibility, #: Phases of a Cached CMake package
especially between Spack- and manual builds. It also allows packages to #: Note: the initconfig phase is used for developer builds as a final phase to stop on
sidestep certain parsing bugs in extremely long ``cmake`` commands, and to phases = ("initconfig", "cmake", "build", "install") # type: Tuple[str, ...]
avoid system limits on the length of the command line."""
phases = ["initconfig", "cmake", "build", "install"] #: Names associated with package methods in the old build-system format
legacy_methods = CMakeBuilder.legacy_methods + (
"initconfig_compiler_entries",
"initconfig_mpi_entries",
"initconfig_hardware_entries",
"std_initconfig_entries",
"initconfig_package_entries",
) # type: Tuple[str, ...]
#: Names associated with package attributes in the old build-system format
legacy_attributes = CMakeBuilder.legacy_attributes + (
"cache_name",
"cache_path",
) # type: Tuple[str, ...]
@property @property
def cache_name(self): def cache_name(self):
return "{0}-{1}-{2}@{3}.cmake".format( return "{0}-{1}-{2}@{3}.cmake".format(
self.name, self.pkg.name,
self.spec.architecture, self.pkg.spec.architecture,
self.spec.compiler.name, self.pkg.spec.compiler.name,
self.spec.compiler.version, self.pkg.spec.compiler.version,
) )
@property @property
def cache_path(self): def cache_path(self):
return os.path.join(self.stage.source_path, self.cache_name) return os.path.join(self.pkg.stage.source_path, self.cache_name)
def flag_handler(self, name, flags):
if name in ("cflags", "cxxflags", "cppflags", "fflags"):
return (None, None, None) # handled in the cmake cache
return (flags, None, None)
def initconfig_compiler_entries(self): def initconfig_compiler_entries(self):
# This will tell cmake to use the Spack compiler wrappers when run # This will tell cmake to use the Spack compiler wrappers when run
# through Spack, but use the underlying compiler when run outside of # through Spack, but use the underlying compiler when run outside of
# Spack # Spack
spec = self.spec spec = self.pkg.spec
# Fortran compiler is optional # Fortran compiler is optional
if "FC" in os.environ: if "FC" in os.environ:
spack_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", os.environ["FC"]) spack_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", os.environ["FC"])
system_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", self.compiler.fc) system_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", self.pkg.compiler.fc)
else: else:
spack_fc_entry = "# No Fortran compiler defined in spec" spack_fc_entry = "# No Fortran compiler defined in spec"
system_fc_entry = "# No Fortran compiler defined in spec" system_fc_entry = "# No Fortran compiler defined in spec"
@ -81,8 +89,8 @@ def initconfig_compiler_entries(self):
" " + cmake_cache_path("CMAKE_CXX_COMPILER", os.environ["CXX"]), " " + cmake_cache_path("CMAKE_CXX_COMPILER", os.environ["CXX"]),
" " + spack_fc_entry, " " + spack_fc_entry,
"else()\n", "else()\n",
" " + cmake_cache_path("CMAKE_C_COMPILER", self.compiler.cc), " " + cmake_cache_path("CMAKE_C_COMPILER", self.pkg.compiler.cc),
" " + cmake_cache_path("CMAKE_CXX_COMPILER", self.compiler.cxx), " " + cmake_cache_path("CMAKE_CXX_COMPILER", self.pkg.compiler.cxx),
" " + system_fc_entry, " " + system_fc_entry,
"endif()\n", "endif()\n",
] ]
@ -126,7 +134,7 @@ def initconfig_compiler_entries(self):
return entries return entries
def initconfig_mpi_entries(self): def initconfig_mpi_entries(self):
spec = self.spec spec = self.pkg.spec
if not spec.satisfies("^mpi"): if not spec.satisfies("^mpi"):
return [] return []
@ -160,13 +168,13 @@ def initconfig_mpi_entries(self):
mpiexec = os.path.join(spec["mpi"].prefix.bin, "mpiexec") mpiexec = os.path.join(spec["mpi"].prefix.bin, "mpiexec")
if not os.path.exists(mpiexec): if not os.path.exists(mpiexec):
msg = "Unable to determine MPIEXEC, %s tests may fail" % self.name msg = "Unable to determine MPIEXEC, %s tests may fail" % self.pkg.name
entries.append("# {0}\n".format(msg)) entries.append("# {0}\n".format(msg))
tty.warn(msg) tty.warn(msg)
else: else:
# starting with cmake 3.10, FindMPI expects MPIEXEC_EXECUTABLE # starting with cmake 3.10, FindMPI expects MPIEXEC_EXECUTABLE
# vs the older versions which expect MPIEXEC # vs the older versions which expect MPIEXEC
if self.spec["cmake"].satisfies("@3.10:"): if self.pkg.spec["cmake"].satisfies("@3.10:"):
entries.append(cmake_cache_path("MPIEXEC_EXECUTABLE", mpiexec)) entries.append(cmake_cache_path("MPIEXEC_EXECUTABLE", mpiexec))
else: else:
entries.append(cmake_cache_path("MPIEXEC", mpiexec)) entries.append(cmake_cache_path("MPIEXEC", mpiexec))
@ -180,7 +188,7 @@ def initconfig_mpi_entries(self):
return entries return entries
def initconfig_hardware_entries(self): def initconfig_hardware_entries(self):
spec = self.spec spec = self.pkg.spec
entries = [ entries = [
"#------------------{0}".format("-" * 60), "#------------------{0}".format("-" * 60),
@ -197,13 +205,7 @@ def initconfig_hardware_entries(self):
entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir)) entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir))
cudacompiler = "${CUDA_TOOLKIT_ROOT_DIR}/bin/nvcc" cudacompiler = "${CUDA_TOOLKIT_ROOT_DIR}/bin/nvcc"
entries.append(cmake_cache_path("CMAKE_CUDA_COMPILER", cudacompiler)) entries.append(cmake_cache_path("CMAKE_CUDA_COMPILER", cudacompiler))
entries.append(cmake_cache_path("CMAKE_CUDA_HOST_COMPILER", "${CMAKE_CXX_COMPILER}"))
if spec.satisfies("^mpi"):
entries.append(cmake_cache_path("CMAKE_CUDA_HOST_COMPILER", "${MPI_CXX_COMPILER}"))
else:
entries.append(
cmake_cache_path("CMAKE_CUDA_HOST_COMPILER", "${CMAKE_CXX_COMPILER}")
)
return entries return entries
@ -212,7 +214,7 @@ def std_initconfig_entries(self):
"#------------------{0}".format("-" * 60), "#------------------{0}".format("-" * 60),
"# !!!! This is a generated file, edit at own risk !!!!", "# !!!! This is a generated file, edit at own risk !!!!",
"#------------------{0}".format("-" * 60), "#------------------{0}".format("-" * 60),
"# CMake executable path: {0}".format(self.spec["cmake"].command.path), "# CMake executable path: {0}".format(self.pkg.spec["cmake"].command.path),
"#------------------{0}\n".format("-" * 60), "#------------------{0}\n".format("-" * 60),
] ]
@ -220,7 +222,7 @@ def initconfig_package_entries(self):
"""This method is to be overwritten by the package""" """This method is to be overwritten by the package"""
return [] return []
def initconfig(self, spec, prefix): def initconfig(self, pkg, spec, prefix):
cache_entries = ( cache_entries = (
self.std_initconfig_entries() self.std_initconfig_entries()
+ self.initconfig_compiler_entries() + self.initconfig_compiler_entries()
@ -236,11 +238,28 @@ def initconfig(self, spec, prefix):
@property @property
def std_cmake_args(self): def std_cmake_args(self):
args = super(CachedCMakePackage, self).std_cmake_args args = super(CachedCMakeBuilder, self).std_cmake_args
args.extend(["-C", self.cache_path]) args.extend(["-C", self.cache_path])
return args return args
@run_after("install") @spack.builder.run_after("install")
def install_cmake_cache(self): def install_cmake_cache(self):
mkdirp(self.spec.prefix.share.cmake) fs.mkdirp(self.pkg.spec.prefix.share.cmake)
install(self.cache_path, self.spec.prefix.share.cmake) fs.install(self.cache_path, self.pkg.spec.prefix.share.cmake)
class CachedCMakePackage(CMakePackage):
"""Specialized class for packages built using CMake initial cache.
This feature of CMake allows packages to increase reproducibility,
especially between Spack- and manual builds. It also allows packages to
sidestep certain parsing bugs in extremely long ``cmake`` commands, and to
avoid system limits on the length of the command line.
"""
CMakeBuilder = CachedCMakeBuilder
def flag_handler(self, name, flags):
if name in ("cflags", "cxxflags", "cppflags", "fflags"):
return None, None, None # handled in the cmake cache
return flags, None, None

View File

@ -2,23 +2,26 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect import inspect
import os import os
import platform import platform
import re import re
import sys import sys
from typing import List from typing import List, Tuple
import six import six
import llnl.util.filesystem as fs
from llnl.util.compat import Sequence from llnl.util.compat import Sequence
from llnl.util.filesystem import working_dir
import spack.build_environment import spack.build_environment
from spack.directives import conflicts, depends_on, variant import spack.builder
from spack.package_base import InstallError, PackageBase, run_after import spack.package_base
import spack.util.path
from spack.directives import build_system, depends_on, variant
from spack.multimethod import when
from ._checks import BaseBuilder, execute_build_time_tests
# Regex to extract the primary generator from the CMake generator # Regex to extract the primary generator from the CMake generator
# string. # string.
@ -34,56 +37,141 @@ def _extract_primary_generator(generator):
return primary_generator return primary_generator
class CMakePackage(PackageBase): class CMakePackage(spack.package_base.PackageBase):
"""Specialized class for packages built using CMake """Specialized class for packages built using CMake
For more information on the CMake build system, see: For more information on the CMake build system, see:
https://cmake.org/cmake/help/latest/ https://cmake.org/cmake/help/latest/
"""
This class provides three phases that can be overridden: #: This attribute is used in UI queries that need to know the build
#: system base class
build_system_class = "CMakePackage"
1. :py:meth:`~.CMakePackage.cmake` #: Legacy buildsystem attribute used to deserialize and install old specs
2. :py:meth:`~.CMakePackage.build` legacy_buildsystem = "cmake"
3. :py:meth:`~.CMakePackage.install`
build_system("cmake")
with when("build_system=cmake"):
# https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
variant(
"build_type",
default="RelWithDebInfo",
description="CMake build type",
values=("Debug", "Release", "RelWithDebInfo", "MinSizeRel"),
)
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
# https://cmake.org/cmake/help/latest/variable/CMAKE_INTERPROCEDURAL_OPTIMIZATION.html
variant(
"ipo",
default=False,
when="^cmake@3.9:",
description="CMake interprocedural optimization",
)
depends_on("cmake", type="build")
depends_on("ninja", type="build", when="platform=windows")
def flags_to_build_system_args(self, flags):
"""Return a list of all command line arguments to pass the specified
compiler flags to cmake. Note CMAKE does not have a cppflags option,
so cppflags will be added to cflags, cxxflags, and fflags to mimic the
behavior in other tools.
"""
# Has to be dynamic attribute due to caching
setattr(self, "cmake_flag_args", [])
flag_string = "-DCMAKE_{0}_FLAGS={1}"
langs = {"C": "c", "CXX": "cxx", "Fortran": "f"}
# Handle language compiler flags
for lang, pre in langs.items():
flag = pre + "flags"
# cmake has no explicit cppflags support -> add it to all langs
lang_flags = " ".join(flags.get(flag, []) + flags.get("cppflags", []))
if lang_flags:
self.cmake_flag_args.append(flag_string.format(lang, lang_flags))
# Cmake has different linker arguments for different build types.
# We specify for each of them.
if flags["ldflags"]:
ldflags = " ".join(flags["ldflags"])
ld_string = "-DCMAKE_{0}_LINKER_FLAGS={1}"
# cmake has separate linker arguments for types of builds.
for type in ["EXE", "MODULE", "SHARED", "STATIC"]:
self.cmake_flag_args.append(ld_string.format(type, ldflags))
# CMake has libs options separated by language. Apply ours to each.
if flags["ldlibs"]:
libs_flags = " ".join(flags["ldlibs"])
libs_string = "-DCMAKE_{0}_STANDARD_LIBRARIES={1}"
for lang in langs:
self.cmake_flag_args.append(libs_string.format(lang, libs_flags))
# Legacy methods (used by too many packages to change them,
# need to forward to the builder)
def define(self, *args, **kwargs):
return self.builder.define(*args, **kwargs)
def define_from_variant(self, *args, **kwargs):
return self.builder.define_from_variant(*args, **kwargs)
@spack.builder.builder("cmake")
class CMakeBuilder(BaseBuilder):
"""The cmake builder encodes the default way of building software with CMake. IT
has three phases that can be overridden:
1. :py:meth:`~.CMakeBuilder.cmake`
2. :py:meth:`~.CMakeBuilder.build`
3. :py:meth:`~.CMakeBuilder.install`
They all have sensible defaults and for many packages the only thing They all have sensible defaults and for many packages the only thing
necessary will be to override :py:meth:`~.CMakePackage.cmake_args`. necessary will be to override :py:meth:`~.CMakeBuilder.cmake_args`.
For a finer tuning you may also override: For a finer tuning you may also override:
+-----------------------------------------------+--------------------+ +-----------------------------------------------+--------------------+
| **Method** | **Purpose** | | **Method** | **Purpose** |
+===============================================+====================+ +===============================================+====================+
| :py:meth:`~.CMakePackage.root_cmakelists_dir` | Location of the | | :py:meth:`~.CMakeBuilder.root_cmakelists_dir` | Location of the |
| | root CMakeLists.txt| | | root CMakeLists.txt|
+-----------------------------------------------+--------------------+ +-----------------------------------------------+--------------------+
| :py:meth:`~.CMakePackage.build_directory` | Directory where to | | :py:meth:`~.CMakeBuilder.build_directory` | Directory where to |
| | build the package | | | build the package |
+-----------------------------------------------+--------------------+ +-----------------------------------------------+--------------------+
The generator used by CMake can be specified by providing the ``generator``
The generator used by CMake can be specified by providing the attribute. Per
generator attribute. Per
https://cmake.org/cmake/help/git-master/manual/cmake-generators.7.html, https://cmake.org/cmake/help/git-master/manual/cmake-generators.7.html,
the format is: [<secondary-generator> - ]<primary_generator>. The the format is: [<secondary-generator> - ]<primary_generator>.
full list of primary and secondary generators supported by CMake may
be found in the documentation for the version of CMake used; The full list of primary and secondary generators supported by CMake may be found
however, at this time Spack supports only the primary generators in the documentation for the version of CMake used; however, at this time Spack
"Unix Makefiles" and "Ninja." Spack's CMake support is agnostic with supports only the primary generators "Unix Makefiles" and "Ninja." Spack's CMake
respect to primary generators. Spack will generate a runtime error support is agnostic with respect to primary generators. Spack will generate a
if the generator string does not follow the prescribed format, or if runtime error if the generator string does not follow the prescribed format, or if
the primary generator is not supported. the primary generator is not supported.
""" """
#: Phases of a CMake package #: Phases of a CMake package
phases = ["cmake", "build", "install"] phases = ("cmake", "build", "install") # type: Tuple[str, ...]
#: This attribute is used in UI queries that need to know the build
#: system base class
build_system_class = "CMakePackage"
build_targets = [] # type: List[str] #: Names associated with package methods in the old build-system format
install_targets = ["install"] legacy_methods = ("cmake_args", "check") # type: Tuple[str, ...]
build_time_test_callbacks = ["check"] #: Names associated with package attributes in the old build-system format
legacy_attributes = (
"generator",
"build_targets",
"install_targets",
"build_time_test_callbacks",
"archive_files",
"root_cmakelists_dir",
"std_cmake_args",
"build_dirname",
"build_directory",
) # type: Tuple[str, ...]
#: The build system generator to use. #: The build system generator to use.
#: #:
@ -93,27 +181,14 @@ class CMakePackage(PackageBase):
#: #:
#: See https://cmake.org/cmake/help/latest/manual/cmake-generators.7.html #: See https://cmake.org/cmake/help/latest/manual/cmake-generators.7.html
#: for more information. #: for more information.
generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles"
generator = "Unix Makefiles" #: Targets to be used during the build phase
build_targets = [] # type: List[str]
if sys.platform == "win32": #: Targets to be used during the install phase
generator = "Ninja" install_targets = ["install"]
depends_on("ninja") #: Callback names for build-time test
build_time_test_callbacks = ["check"]
# https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
variant(
"build_type",
default="RelWithDebInfo",
description="CMake build type",
values=("Debug", "Release", "RelWithDebInfo", "MinSizeRel"),
)
# https://cmake.org/cmake/help/latest/variable/CMAKE_INTERPROCEDURAL_OPTIMIZATION.html
variant("ipo", default=False, description="CMake interprocedural optimization")
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
conflicts("+ipo", when="^cmake@:3.8", msg="+ipo is not supported by CMake < 3.9")
depends_on("cmake", type="build")
@property @property
def archive_files(self): def archive_files(self):
@ -126,40 +201,30 @@ def root_cmakelists_dir(self):
This path is relative to the root of the extracted tarball, This path is relative to the root of the extracted tarball,
not to the ``build_directory``. Defaults to the current directory. not to the ``build_directory``. Defaults to the current directory.
:return: directory containing CMakeLists.txt
""" """
return self.stage.source_path return self.pkg.stage.source_path
@property @property
def std_cmake_args(self): def std_cmake_args(self):
"""Standard cmake arguments provided as a property for """Standard cmake arguments provided as a property for
convenience of package writers convenience of package writers
:return: standard cmake arguments
""" """
# standard CMake arguments # standard CMake arguments
std_cmake_args = CMakePackage._std_args(self) std_cmake_args = CMakeBuilder.std_args(self.pkg, generator=self.generator)
std_cmake_args += getattr(self, "cmake_flag_args", []) std_cmake_args += getattr(self.pkg, "cmake_flag_args", [])
return std_cmake_args return std_cmake_args
@staticmethod @staticmethod
def _std_args(pkg): def std_args(pkg, generator=None):
"""Computes the standard cmake arguments for a generic package""" """Computes the standard cmake arguments for a generic package"""
generator = generator or "Unix Makefiles"
try:
generator = pkg.generator
except AttributeError:
generator = CMakePackage.generator
# Make sure a valid generator was chosen
valid_primary_generators = ["Unix Makefiles", "Ninja"] valid_primary_generators = ["Unix Makefiles", "Ninja"]
primary_generator = _extract_primary_generator(generator) primary_generator = _extract_primary_generator(generator)
if primary_generator not in valid_primary_generators: if primary_generator not in valid_primary_generators:
msg = "Invalid CMake generator: '{0}'\n".format(generator) msg = "Invalid CMake generator: '{0}'\n".format(generator)
msg += "CMakePackage currently supports the following " msg += "CMakePackage currently supports the following "
msg += "primary generators: '{0}'".format("', '".join(valid_primary_generators)) msg += "primary generators: '{0}'".format("', '".join(valid_primary_generators))
raise InstallError(msg) raise spack.package_base.InstallError(msg)
try: try:
build_type = pkg.spec.variants["build_type"].value build_type = pkg.spec.variants["build_type"].value
@ -171,7 +236,7 @@ def _std_args(pkg):
except KeyError: except KeyError:
ipo = False ipo = False
define = CMakePackage.define define = CMakeBuilder.define
args = [ args = [
"-G", "-G",
generator, generator,
@ -251,7 +316,7 @@ def define_from_variant(self, cmake_var, variant=None):
of ``cmake_var``. of ``cmake_var``.
This utility function is similar to This utility function is similar to
:meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without`. :meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without`.
Examples: Examples:
@ -291,122 +356,75 @@ def define_from_variant(self, cmake_var, variant=None):
if variant is None: if variant is None:
variant = cmake_var.lower() variant = cmake_var.lower()
if variant not in self.variants: if variant not in self.pkg.variants:
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.name)) raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
if variant not in self.spec.variants: if variant not in self.pkg.spec.variants:
return "" return ""
value = self.spec.variants[variant].value value = self.pkg.spec.variants[variant].value
if isinstance(value, (tuple, list)): if isinstance(value, (tuple, list)):
# Sort multi-valued variants for reproducibility # Sort multi-valued variants for reproducibility
value = sorted(value) value = sorted(value)
return self.define(cmake_var, value) return self.define(cmake_var, value)
def flags_to_build_system_args(self, flags):
"""Produces a list of all command line arguments to pass the specified
compiler flags to cmake. Note CMAKE does not have a cppflags option,
so cppflags will be added to cflags, cxxflags, and fflags to mimic the
behavior in other tools."""
# Has to be dynamic attribute due to caching
setattr(self, "cmake_flag_args", [])
flag_string = "-DCMAKE_{0}_FLAGS={1}"
langs = {"C": "c", "CXX": "cxx", "Fortran": "f"}
# Handle language compiler flags
for lang, pre in langs.items():
flag = pre + "flags"
# cmake has no explicit cppflags support -> add it to all langs
lang_flags = " ".join(flags.get(flag, []) + flags.get("cppflags", []))
if lang_flags:
self.cmake_flag_args.append(flag_string.format(lang, lang_flags))
# Cmake has different linker arguments for different build types.
# We specify for each of them.
if flags["ldflags"]:
ldflags = " ".join(flags["ldflags"])
ld_string = "-DCMAKE_{0}_LINKER_FLAGS={1}"
# cmake has separate linker arguments for types of builds.
for type in ["EXE", "MODULE", "SHARED", "STATIC"]:
self.cmake_flag_args.append(ld_string.format(type, ldflags))
# CMake has libs options separated by language. Apply ours to each.
if flags["ldlibs"]:
libs_flags = " ".join(flags["ldlibs"])
libs_string = "-DCMAKE_{0}_STANDARD_LIBRARIES={1}"
for lang in langs:
self.cmake_flag_args.append(libs_string.format(lang, libs_flags))
@property @property
def build_dirname(self): def build_dirname(self):
"""Returns the directory name to use when building the package """Directory name to use when building the package."""
return "spack-build-%s" % self.pkg.spec.dag_hash(7)
:return: name of the subdirectory for building the package
"""
return "spack-build-%s" % self.spec.dag_hash(7)
@property @property
def build_directory(self): def build_directory(self):
"""Returns the directory to use when building the package """Full-path to the directory to use when building the package."""
return os.path.join(self.pkg.stage.path, self.build_dirname)
:return: directory where to build the package
"""
return os.path.join(self.stage.path, self.build_dirname)
def cmake_args(self): def cmake_args(self):
"""Produces a list containing all the arguments that must be passed to """List of all the arguments that must be passed to cmake, except:
cmake, except:
* CMAKE_INSTALL_PREFIX * CMAKE_INSTALL_PREFIX
* CMAKE_BUILD_TYPE * CMAKE_BUILD_TYPE
* BUILD_TESTING * BUILD_TESTING
which will be set automatically. which will be set automatically.
:return: list of arguments for cmake
""" """
return [] return []
def cmake(self, spec, prefix): def cmake(self, pkg, spec, prefix):
"""Runs ``cmake`` in the build directory""" """Runs ``cmake`` in the build directory"""
options = self.std_cmake_args options = self.std_cmake_args
options += self.cmake_args() options += self.cmake_args()
options.append(os.path.abspath(self.root_cmakelists_dir)) options.append(os.path.abspath(self.root_cmakelists_dir))
with working_dir(self.build_directory, create=True): with fs.working_dir(self.build_directory, create=True):
inspect.getmodule(self).cmake(*options) inspect.getmodule(self.pkg).cmake(*options)
def build(self, spec, prefix): def build(self, pkg, spec, prefix):
"""Make the build targets""" """Make the build targets"""
with working_dir(self.build_directory): with fs.working_dir(self.build_directory):
if self.generator == "Unix Makefiles": if self.generator == "Unix Makefiles":
inspect.getmodule(self).make(*self.build_targets) inspect.getmodule(self.pkg).make(*self.build_targets)
elif self.generator == "Ninja": elif self.generator == "Ninja":
self.build_targets.append("-v") self.build_targets.append("-v")
inspect.getmodule(self).ninja(*self.build_targets) inspect.getmodule(self.pkg).ninja(*self.build_targets)
def install(self, spec, prefix): def install(self, pkg, spec, prefix):
"""Make the install targets""" """Make the install targets"""
with working_dir(self.build_directory): with fs.working_dir(self.build_directory):
if self.generator == "Unix Makefiles": if self.generator == "Unix Makefiles":
inspect.getmodule(self).make(*self.install_targets) inspect.getmodule(self.pkg).make(*self.install_targets)
elif self.generator == "Ninja": elif self.generator == "Ninja":
inspect.getmodule(self).ninja(*self.install_targets) inspect.getmodule(self.pkg).ninja(*self.install_targets)
run_after("build")(PackageBase._run_default_build_time_test_callbacks) spack.builder.run_after("build")(execute_build_time_tests)
def check(self): def check(self):
"""Searches the CMake-generated Makefile for the target ``test`` """Search the CMake-generated files for the targets ``test`` and ``check``,
and runs it if found. and runs them if found.
""" """
with working_dir(self.build_directory): with fs.working_dir(self.build_directory):
if self.generator == "Unix Makefiles": if self.generator == "Unix Makefiles":
self._if_make_target_execute("test", jobs_env="CTEST_PARALLEL_LEVEL") self.pkg._if_make_target_execute("test", jobs_env="CTEST_PARALLEL_LEVEL")
self._if_make_target_execute("check") self.pkg._if_make_target_execute("check")
elif self.generator == "Ninja": elif self.generator == "Ninja":
self._if_ninja_target_execute("test", jobs_env="CTEST_PARALLEL_LEVEL") self.pkg._if_ninja_target_execute("test", jobs_env="CTEST_PARALLEL_LEVEL")
self._if_ninja_target_execute("check") self.pkg._if_ninja_target_execute("check")
# Check that self.prefix is there after installation
run_after("install")(PackageBase.sanity_check_prefix)

View File

@ -0,0 +1,44 @@
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from typing import Tuple
import spack.builder
import spack.directives
import spack.package_base
from ._checks import BaseBuilder, apply_macos_rpath_fixups
class Package(spack.package_base.PackageBase):
"""General purpose class with a single ``install`` phase that needs to be
coded by packagers.
"""
#: This attribute is used in UI queries that require to know which
#: build-system class we are using
build_system_class = "Package"
#: Legacy buildsystem attribute used to deserialize and install old specs
legacy_buildsystem = "generic"
spack.directives.build_system("generic")
@spack.builder.builder("generic")
class GenericBuilder(BaseBuilder):
"""A builder for a generic build system, that require packagers
to implement an "install" phase.
"""
#: A generic package has only the "install" phase
phases = ("install",)
#: Names associated with package methods in the old build-system format
legacy_methods = () # type: Tuple[str, ...]
#: Names associated with package attributes in the old build-system format
legacy_attributes = ("archive_files",) # type: Tuple[str, ...]
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)

View File

@ -2,8 +2,6 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import glob import glob
import inspect import inspect
import os import os
@ -26,12 +24,14 @@
import spack.error import spack.error
from spack.build_environment import dso_suffix from spack.build_environment import dso_suffix
from spack.package_base import InstallError, PackageBase, run_after from spack.package_base import InstallError
from spack.util.environment import EnvironmentModifications from spack.util.environment import EnvironmentModifications
from spack.util.executable import Executable from spack.util.executable import Executable
from spack.util.prefix import Prefix from spack.util.prefix import Prefix
from spack.version import Version, ver from spack.version import Version, ver
from .generic import Package
# A couple of utility functions that might be useful in general. If so, they # A couple of utility functions that might be useful in general. If so, they
# should really be defined elsewhere, unless deemed heretical. # should really be defined elsewhere, unless deemed heretical.
# (Or na"ive on my part). # (Or na"ive on my part).
@ -86,7 +86,7 @@ def _expand_fields(s):
return s return s
class IntelPackage(PackageBase): class IntelPackage(Package):
"""Specialized class for licensed Intel software. """Specialized class for licensed Intel software.
This class provides two phases that can be overridden: This class provides two phases that can be overridden:
@ -99,9 +99,6 @@ class IntelPackage(PackageBase):
to set the appropriate environment variables. to set the appropriate environment variables.
""" """
#: Phases of an Intel package
phases = ["configure", "install"]
#: This attribute is used in UI queries that need to know the build #: This attribute is used in UI queries that need to know the build
#: system base class #: system base class
build_system_class = "IntelPackage" build_system_class = "IntelPackage"
@ -1184,12 +1181,13 @@ def _determine_license_type(self):
debug_print(license_type) debug_print(license_type)
return license_type return license_type
def configure(self, spec, prefix): @spack.builder.run_before("install")
def configure(self):
"""Generates the silent.cfg file to pass to installer.sh. """Generates the silent.cfg file to pass to installer.sh.
See https://software.intel.com/en-us/articles/configuration-file-format See https://software.intel.com/en-us/articles/configuration-file-format
""" """
prefix = self.prefix
# Both tokens AND values of the configuration file are validated during # Both tokens AND values of the configuration file are validated during
# the run of the underlying binary installer. Any unknown token or # the run of the underlying binary installer. Any unknown token or
# unacceptable value will cause that installer to fail. Notably, this # unacceptable value will cause that installer to fail. Notably, this
@ -1270,7 +1268,7 @@ def install(self, spec, prefix):
for f in glob.glob("%s/intel*log" % tmpdir): for f in glob.glob("%s/intel*log" % tmpdir):
install(f, dst) install(f, dst)
@run_after("install") @spack.builder.run_after("install")
def validate_install(self): def validate_install(self):
# Sometimes the installer exits with an error but doesn't pass a # Sometimes the installer exits with an error but doesn't pass a
# non-zero exit code to spack. Check for the existence of a 'bin' # non-zero exit code to spack. Check for the existence of a 'bin'
@ -1278,7 +1276,7 @@ def validate_install(self):
if not os.path.exists(self.prefix.bin): if not os.path.exists(self.prefix.bin):
raise InstallError("The installer has failed to install anything.") raise InstallError("The installer has failed to install anything.")
@run_after("install") @spack.builder.run_after("install")
def configure_rpath(self): def configure_rpath(self):
if "+rpath" not in self.spec: if "+rpath" not in self.spec:
return return
@ -1296,7 +1294,7 @@ def configure_rpath(self):
with open(compiler_cfg, "w") as fh: with open(compiler_cfg, "w") as fh:
fh.write("-Xlinker -rpath={0}\n".format(compilers_lib_dir)) fh.write("-Xlinker -rpath={0}\n".format(compilers_lib_dir))
@run_after("install") @spack.builder.run_after("install")
def configure_auto_dispatch(self): def configure_auto_dispatch(self):
if self._has_compilers: if self._has_compilers:
if "auto_dispatch=none" in self.spec: if "auto_dispatch=none" in self.spec:
@ -1320,7 +1318,7 @@ def configure_auto_dispatch(self):
with open(compiler_cfg, "a") as fh: with open(compiler_cfg, "a") as fh:
fh.write("-ax{0}\n".format(",".join(ad))) fh.write("-ax{0}\n".format(",".join(ad)))
@run_after("install") @spack.builder.run_after("install")
def filter_compiler_wrappers(self): def filter_compiler_wrappers(self):
if ("+mpi" in self.spec or self.provides("mpi")) and "~newdtags" in self.spec: if ("+mpi" in self.spec or self.provides("mpi")) and "~newdtags" in self.spec:
bin_dir = self.component_bin_dir("mpi") bin_dir = self.component_bin_dir("mpi")
@ -1328,7 +1326,7 @@ def filter_compiler_wrappers(self):
f = os.path.join(bin_dir, f) f = os.path.join(bin_dir, f)
filter_file("-Xlinker --enable-new-dtags", " ", f, string=True) filter_file("-Xlinker --enable-new-dtags", " ", f, string=True)
@run_after("install") @spack.builder.run_after("install")
def uninstall_ism(self): def uninstall_ism(self):
# The "Intel(R) Software Improvement Program" [ahem] gets installed, # The "Intel(R) Software Improvement Program" [ahem] gets installed,
# apparently regardless of PHONEHOME_SEND_USAGE_DATA. # apparently regardless of PHONEHOME_SEND_USAGE_DATA.
@ -1360,7 +1358,7 @@ def base_lib_dir(self):
debug_print(d) debug_print(d)
return d return d
@run_after("install") @spack.builder.run_after("install")
def modify_LLVMgold_rpath(self): def modify_LLVMgold_rpath(self):
"""Add libimf.so and other required libraries to the RUNPATH of LLVMgold.so. """Add libimf.so and other required libraries to the RUNPATH of LLVMgold.so.
@ -1391,6 +1389,3 @@ def modify_LLVMgold_rpath(self):
] ]
) )
patchelf("--set-rpath", rpath, lib) patchelf("--set-rpath", rpath, lib)
# Check that self.prefix is there after installation
run_after("install")(PackageBase.sanity_check_prefix)

View File

@ -2,59 +2,82 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os import os
from llnl.util.filesystem import find from llnl.util.filesystem import find
from spack.directives import depends_on, extends import spack.builder
import spack.package_base
import spack.util.executable
from spack.directives import build_system, depends_on, extends
from spack.multimethod import when from spack.multimethod import when
from spack.package_base import PackageBase
from spack.util.executable import Executable
class LuaPackage(PackageBase): class LuaPackage(spack.package_base.PackageBase):
"""Specialized class for lua packages""" """Specialized class for lua packages"""
phases = ["unpack", "generate_luarocks_config", "preprocess", "install"]
#: This attribute is used in UI queries that need to know the build #: This attribute is used in UI queries that need to know the build
#: system base class #: system base class
build_system_class = "LuaPackage" build_system_class = "LuaPackage"
list_depth = 1 # LuaRocks requires at least one level of spidering to find versions #: Legacy buildsystem attribute used to deserialize and install old specs
depends_on("lua-lang") legacy_buildsystem = "lua"
extends("lua", when="^lua")
with when("^lua-luajit"):
extends("lua-luajit")
depends_on("luajit")
depends_on("lua-luajit+lualinks")
with when("^lua-luajit-openresty"):
extends("lua-luajit-openresty")
depends_on("luajit")
depends_on("lua-luajit-openresty+lualinks")
def unpack(self, spec, prefix): list_depth = 1 # LuaRocks requires at least one level of spidering to find versions
if os.path.splitext(self.stage.archive_file)[1] == ".rock":
directory = self.luarocks("unpack", self.stage.archive_file, output=str) build_system("lua")
with when("build_system=lua"):
depends_on("lua-lang")
extends("lua", when="^lua")
with when("^lua-luajit"):
extends("lua-luajit")
depends_on("luajit")
depends_on("lua-luajit+lualinks")
with when("^lua-luajit-openresty"):
extends("lua-luajit-openresty")
depends_on("luajit")
depends_on("lua-luajit-openresty+lualinks")
@property
def lua(self):
return spack.util.executable.Executable(self.spec["lua-lang"].prefix.bin.lua)
@property
def luarocks(self):
lr = spack.util.executable.Executable(self.spec["lua-lang"].prefix.bin.luarocks)
return lr
@spack.builder.builder("lua")
class LuaBuilder(spack.builder.Builder):
phases = ("unpack", "generate_luarocks_config", "preprocess", "install")
#: Names associated with package methods in the old build-system format
legacy_methods = ("luarocks_args",)
#: Names associated with package attributes in the old build-system format
legacy_attributes = ()
def unpack(self, pkg, spec, prefix):
if os.path.splitext(pkg.stage.archive_file)[1] == ".rock":
directory = pkg.luarocks("unpack", pkg.stage.archive_file, output=str)
dirlines = directory.split("\n") dirlines = directory.split("\n")
# TODO: figure out how to scope this better # TODO: figure out how to scope this better
os.chdir(dirlines[2]) os.chdir(dirlines[2])
def _generate_tree_line(self, name, prefix): @staticmethod
def _generate_tree_line(name, prefix):
return """{{ name = "{name}", root = "{prefix}" }};""".format( return """{{ name = "{name}", root = "{prefix}" }};""".format(
name=name, name=name,
prefix=prefix, prefix=prefix,
) )
def _luarocks_config_path(self): def generate_luarocks_config(self, pkg, spec, prefix):
return os.path.join(self.stage.source_path, "spack_luarocks.lua") spec = self.pkg.spec
def generate_luarocks_config(self, spec, prefix):
spec = self.spec
table_entries = [] table_entries = []
for d in spec.traverse(deptypes=("build", "run"), deptype_query="run"): for d in spec.traverse(deptype=("build", "run")):
if d.package.extends(self.extendee_spec): if d.package.extends(self.pkg.extendee_spec):
table_entries.append(self._generate_tree_line(d.name, d.prefix)) table_entries.append(self._generate_tree_line(d.name, d.prefix))
path = self._luarocks_config_path() path = self._luarocks_config_path()
@ -71,30 +94,24 @@ def generate_luarocks_config(self, spec, prefix):
) )
return path return path
def setup_build_environment(self, env): def preprocess(self, pkg, spec, prefix):
env.set("LUAROCKS_CONFIG", self._luarocks_config_path())
def preprocess(self, spec, prefix):
"""Override this to preprocess source before building with luarocks""" """Override this to preprocess source before building with luarocks"""
pass pass
@property
def lua(self):
return Executable(self.spec["lua-lang"].prefix.bin.lua)
@property
def luarocks(self):
lr = Executable(self.spec["lua-lang"].prefix.bin.luarocks)
return lr
def luarocks_args(self): def luarocks_args(self):
return [] return []
def install(self, spec, prefix): def install(self, pkg, spec, prefix):
rock = "." rock = "."
specs = find(".", "*.rockspec", recursive=False) specs = find(".", "*.rockspec", recursive=False)
if specs: if specs:
rock = specs[0] rock = specs[0]
rocks_args = self.luarocks_args() rocks_args = self.luarocks_args()
rocks_args.append(rock) rocks_args.append(rock)
self.luarocks("--tree=" + prefix, "make", *rocks_args) self.pkg.luarocks("--tree=" + prefix, "make", *rocks_args)
def _luarocks_config_path(self):
return os.path.join(self.pkg.stage.source_path, "spack_luarocks.lua")
def setup_build_environment(self, env):
env.set("LUAROCKS_CONFIG", self._luarocks_config_path())

View File

@ -2,62 +2,85 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect import inspect
from typing import List # novm from typing import List # novm
import llnl.util.tty as tty import llnl.util.filesystem as fs
from llnl.util.filesystem import working_dir
from spack.directives import conflicts import spack.builder
from spack.package_base import PackageBase, run_after import spack.package_base
from spack.directives import build_system, conflicts
from ._checks import (
BaseBuilder,
apply_macos_rpath_fixups,
execute_build_time_tests,
execute_install_time_tests,
)
class MakefilePackage(PackageBase): class MakefilePackage(spack.package_base.PackageBase):
"""Specialized class for packages that are built using editable Makefiles """Specialized class for packages built using a Makefiles."""
This class provides three phases that can be overridden: #: This attribute is used in UI queries that need to know the build
#: system base class
build_system_class = "MakefilePackage"
#: Legacy buildsystem attribute used to deserialize and install old specs
legacy_buildsystem = "makefile"
1. :py:meth:`~.MakefilePackage.edit` build_system("makefile")
2. :py:meth:`~.MakefilePackage.build` conflicts("platform=windows", when="build_system=makefile")
3. :py:meth:`~.MakefilePackage.install`
@spack.builder.builder("makefile")
class MakefileBuilder(BaseBuilder):
"""The Makefile builder encodes the most common way of building software with
Makefiles. It has three phases that can be overridden, if need be:
1. :py:meth:`~.MakefileBuilder.edit`
2. :py:meth:`~.MakefileBuilder.build`
3. :py:meth:`~.MakefileBuilder.install`
It is usually necessary to override the :py:meth:`~.MakefileBuilder.edit`
phase (which is by default a no-op), while the other two have sensible defaults.
It is usually necessary to override the :py:meth:`~.MakefilePackage.edit`
phase, while :py:meth:`~.MakefilePackage.build` and
:py:meth:`~.MakefilePackage.install` have sensible defaults.
For a finer tuning you may override: For a finer tuning you may override:
+-----------------------------------------------+--------------------+ +-----------------------------------------------+--------------------+
| **Method** | **Purpose** | | **Method** | **Purpose** |
+===============================================+====================+ +===============================================+====================+
| :py:attr:`~.MakefilePackage.build_targets` | Specify ``make`` | | :py:attr:`~.MakefileBuilder.build_targets` | Specify ``make`` |
| | targets for the | | | targets for the |
| | build phase | | | build phase |
+-----------------------------------------------+--------------------+ +-----------------------------------------------+--------------------+
| :py:attr:`~.MakefilePackage.install_targets` | Specify ``make`` | | :py:attr:`~.MakefileBuilder.install_targets` | Specify ``make`` |
| | targets for the | | | targets for the |
| | install phase | | | install phase |
+-----------------------------------------------+--------------------+ +-----------------------------------------------+--------------------+
| :py:meth:`~.MakefilePackage.build_directory` | Directory where the| | :py:meth:`~.MakefileBuilder.build_directory` | Directory where the|
| | Makefile is located| | | Makefile is located|
+-----------------------------------------------+--------------------+ +-----------------------------------------------+--------------------+
""" """
#: Phases of a package that is built with an hand-written Makefile phases = ("edit", "build", "install")
phases = ["edit", "build", "install"]
#: This attribute is used in UI queries that need to know the build
#: system base class
build_system_class = "MakefilePackage"
#: Targets for ``make`` during the :py:meth:`~.MakefilePackage.build` #: Names associated with package methods in the old build-system format
#: phase legacy_methods = ("check", "installcheck")
#: Names associated with package attributes in the old build-system format
legacy_attributes = (
"build_targets",
"install_targets",
"build_time_test_callbacks",
"install_time_test_callbacks",
"build_directory",
)
#: Targets for ``make`` during the :py:meth:`~.MakefileBuilder.build` phase
build_targets = [] # type: List[str] build_targets = [] # type: List[str]
#: Targets for ``make`` during the :py:meth:`~.MakefilePackage.install` #: Targets for ``make`` during the :py:meth:`~.MakefileBuilder.install` phase
#: phase
install_targets = ["install"] install_targets = ["install"]
conflicts("platform=windows")
#: Callback names for build-time test #: Callback names for build-time test
build_time_test_callbacks = ["check"] build_time_test_callbacks = ["check"]
@ -66,53 +89,39 @@ class MakefilePackage(PackageBase):
@property @property
def build_directory(self): def build_directory(self):
"""Returns the directory containing the main Makefile """Return the directory containing the main Makefile."""
return self.pkg.stage.source_path
:return: build directory def edit(self, pkg, spec, prefix):
""" """Edit the Makefile before calling make. The default is a no-op."""
return self.stage.source_path pass
def edit(self, spec, prefix): def build(self, pkg, spec, prefix):
"""Edits the Makefile before calling make. This phase cannot """Run "make" on the build targets specified by the builder."""
be defaulted. with fs.working_dir(self.build_directory):
""" inspect.getmodule(self.pkg).make(*self.build_targets)
tty.msg("Using default implementation: skipping edit phase.")
def build(self, spec, prefix): def install(self, pkg, spec, prefix):
"""Calls make, passing :py:attr:`~.MakefilePackage.build_targets` """Run "make" on the install targets specified by the builder."""
as targets. with fs.working_dir(self.build_directory):
""" inspect.getmodule(self.pkg).make(*self.install_targets)
with working_dir(self.build_directory):
inspect.getmodule(self).make(*self.build_targets)
def install(self, spec, prefix): spack.builder.run_after("build")(execute_build_time_tests)
"""Calls make, passing :py:attr:`~.MakefilePackage.install_targets`
as targets.
"""
with working_dir(self.build_directory):
inspect.getmodule(self).make(*self.install_targets)
run_after("build")(PackageBase._run_default_build_time_test_callbacks)
def check(self): def check(self):
"""Searches the Makefile for targets ``test`` and ``check`` """Run "make" on the ``test`` and ``check`` targets, if found."""
and runs them if found. with fs.working_dir(self.build_directory):
""" self.pkg._if_make_target_execute("test")
with working_dir(self.build_directory): self.pkg._if_make_target_execute("check")
self._if_make_target_execute("test")
self._if_make_target_execute("check")
run_after("install")(PackageBase._run_default_install_time_test_callbacks) spack.builder.run_after("install")(execute_install_time_tests)
def installcheck(self): def installcheck(self):
"""Searches the Makefile for an ``installcheck`` target """Searches the Makefile for an ``installcheck`` target
and runs it if found. and runs it if found.
""" """
with working_dir(self.build_directory): with fs.working_dir(self.build_directory):
self._if_make_target_execute("installcheck") self.pkg._if_make_target_execute("installcheck")
# Check that self.prefix is there after installation
run_after("install")(PackageBase.sanity_check_prefix)
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths # On macOS, force rpaths for shared library IDs and remove duplicate rpaths
run_after("install")(PackageBase.apply_macos_rpath_fixups) spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)

View File

@ -2,60 +2,73 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import llnl.util.filesystem as fs
import spack.builder
from llnl.util.filesystem import install_tree, working_dir import spack.package_base
from spack.directives import build_system, depends_on
from spack.directives import depends_on from spack.multimethod import when
from spack.package_base import PackageBase, run_after
from spack.util.executable import which from spack.util.executable import which
from ._checks import BaseBuilder
class MavenPackage(PackageBase):
class MavenPackage(spack.package_base.PackageBase):
"""Specialized class for packages that are built using the """Specialized class for packages that are built using the
Maven build system. See https://maven.apache.org/index.html Maven build system. See https://maven.apache.org/index.html
for more information. for more information.
This class provides the following phases that can be overridden:
* build
* install
""" """
# Default phases
phases = ["build", "install"]
# To be used in UI queries that require to know which # To be used in UI queries that require to know which
# build-system class we are using # build-system class we are using
build_system_class = "MavenPackage" build_system_class = "MavenPackage"
depends_on("java", type=("build", "run")) #: Legacy buildsystem attribute used to deserialize and install old specs
depends_on("maven", type="build") legacy_buildsystem = "maven"
build_system("maven")
with when("build_system=maven"):
depends_on("java", type=("build", "run"))
depends_on("maven", type="build")
@spack.builder.builder("maven")
class MavenBuilder(BaseBuilder):
"""The Maven builder encodes the default way to build software with Maven.
It has two phases that can be overridden, if need be:
1. :py:meth:`~.MavenBuilder.build`
2. :py:meth:`~.MavenBuilder.install`
"""
phases = ("build", "install")
#: Names associated with package methods in the old build-system format
legacy_methods = ("build_args",)
#: Names associated with package attributes in the old build-system format
legacy_attributes = ("build_directory",)
@property @property
def build_directory(self): def build_directory(self):
"""The directory containing the ``pom.xml`` file.""" """The directory containing the ``pom.xml`` file."""
return self.stage.source_path return self.pkg.stage.source_path
def build_args(self): def build_args(self):
"""List of args to pass to build phase.""" """List of args to pass to build phase."""
return [] return []
def build(self, spec, prefix): def build(self, pkg, spec, prefix):
"""Compile code and package into a JAR file.""" """Compile code and package into a JAR file."""
with fs.working_dir(self.build_directory):
with working_dir(self.build_directory):
mvn = which("mvn") mvn = which("mvn")
if self.run_tests: if self.pkg.run_tests:
mvn("verify", *self.build_args()) mvn("verify", *self.build_args())
else: else:
mvn("package", "-DskipTests", *self.build_args()) mvn("package", "-DskipTests", *self.build_args())
def install(self, spec, prefix): def install(self, pkg, spec, prefix):
"""Copy to installation prefix.""" """Copy to installation prefix."""
with fs.working_dir(self.build_directory):
with working_dir(self.build_directory): fs.install_tree(".", prefix)
install_tree(".", prefix)
# Check that self.prefix is there after installation
run_after("install")(PackageBase.sanity_check_prefix)

View File

@ -2,108 +2,131 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect import inspect
import os import os
from typing import List # novm from typing import List # novm
from llnl.util.filesystem import working_dir import llnl.util.filesystem as fs
from spack.directives import depends_on, variant import spack.builder
from spack.package_base import PackageBase, run_after import spack.package_base
from spack.directives import build_system, depends_on, variant
from spack.multimethod import when
from ._checks import BaseBuilder, execute_build_time_tests
class MesonPackage(PackageBase): class MesonPackage(spack.package_base.PackageBase):
"""Specialized class for packages built using Meson """Specialized class for packages built using Meson. For more information
on the Meson build system, see https://mesonbuild.com/
"""
For more information on the Meson build system, see: #: This attribute is used in UI queries that need to know the build
https://mesonbuild.com/ #: system base class
build_system_class = "MesonPackage"
This class provides three phases that can be overridden: #: Legacy buildsystem attribute used to deserialize and install old specs
legacy_buildsystem = "meson"
1. :py:meth:`~.MesonPackage.meson` build_system("meson")
2. :py:meth:`~.MesonPackage.build`
3. :py:meth:`~.MesonPackage.install` with when("build_system=meson"):
variant(
"buildtype",
default="debugoptimized",
description="Meson build type",
values=("plain", "debug", "debugoptimized", "release", "minsize"),
)
variant(
"default_library",
default="shared",
values=("shared", "static"),
multi=True,
description="Build shared libs, static libs or both",
)
variant("strip", default=False, description="Strip targets on install")
depends_on("meson", type="build")
depends_on("ninja", type="build")
def flags_to_build_system_args(self, flags):
"""Produces a list of all command line arguments to pass the specified
compiler flags to meson."""
# Has to be dynamic attribute due to caching
setattr(self, "meson_flag_args", [])
@spack.builder.builder("meson")
class MesonBuilder(BaseBuilder):
"""The Meson builder encodes the default way to build software with Meson.
The builder has three phases that can be overridden, if need be:
1. :py:meth:`~.MesonBuilder.meson`
2. :py:meth:`~.MesonBuilder.build`
3. :py:meth:`~.MesonBuilder.install`
They all have sensible defaults and for many packages the only thing They all have sensible defaults and for many packages the only thing
necessary will be to override :py:meth:`~.MesonPackage.meson_args`. necessary will be to override :py:meth:`~.MesonBuilder.meson_args`.
For a finer tuning you may also override: For a finer tuning you may also override:
+-----------------------------------------------+--------------------+ +-----------------------------------------------+--------------------+
| **Method** | **Purpose** | | **Method** | **Purpose** |
+===============================================+====================+ +===============================================+====================+
| :py:meth:`~.MesonPackage.root_mesonlists_dir` | Location of the | | :py:meth:`~.MesonBuilder.root_mesonlists_dir` | Location of the |
| | root MesonLists.txt| | | root MesonLists.txt|
+-----------------------------------------------+--------------------+ +-----------------------------------------------+--------------------+
| :py:meth:`~.MesonPackage.build_directory` | Directory where to | | :py:meth:`~.MesonBuilder.build_directory` | Directory where to |
| | build the package | | | build the package |
+-----------------------------------------------+--------------------+ +-----------------------------------------------+--------------------+
""" """
#: Phases of a Meson package phases = ("meson", "build", "install")
phases = ["meson", "build", "install"]
#: This attribute is used in UI queries that need to know the build #: Names associated with package methods in the old build-system format
#: system base class legacy_methods = ("meson_args", "check")
build_system_class = "MesonPackage"
#: Names associated with package attributes in the old build-system format
legacy_attributes = (
"build_targets",
"install_targets",
"build_time_test_callbacks",
"root_mesonlists_dir",
"std_meson_args",
"build_directory",
)
build_targets = [] # type: List[str] build_targets = [] # type: List[str]
install_targets = ["install"] install_targets = ["install"]
build_time_test_callbacks = ["check"] build_time_test_callbacks = ["check"]
variant(
"buildtype",
default="debugoptimized",
description="Meson build type",
values=("plain", "debug", "debugoptimized", "release", "minsize"),
)
variant(
"default_library",
default="shared",
values=("shared", "static"),
multi=True,
description="Build shared libs, static libs or both",
)
variant("strip", default=False, description="Strip targets on install")
depends_on("meson", type="build")
depends_on("ninja", type="build")
@property @property
def archive_files(self): def archive_files(self):
"""Files to archive for packages based on Meson""" """Files to archive for packages based on Meson"""
return [os.path.join(self.build_directory, "meson-logs/meson-log.txt")] return [os.path.join(self.build_directory, "meson-logs", "meson-log.txt")]
@property @property
def root_mesonlists_dir(self): def root_mesonlists_dir(self):
"""The relative path to the directory containing meson.build """Relative path to the directory containing meson.build
This path is relative to the root of the extracted tarball, This path is relative to the root of the extracted tarball,
not to the ``build_directory``. Defaults to the current directory. not to the ``build_directory``. Defaults to the current directory.
:return: directory containing meson.build
""" """
return self.stage.source_path return self.pkg.stage.source_path
@property @property
def std_meson_args(self): def std_meson_args(self):
"""Standard meson arguments provided as a property for """Standard meson arguments provided as a property for convenience
convenience of package writers of package writers.
:return: standard meson arguments
""" """
# standard Meson arguments # standard Meson arguments
std_meson_args = MesonPackage._std_args(self) std_meson_args = MesonBuilder.std_args(self.pkg)
std_meson_args += getattr(self, "meson_flag_args", []) std_meson_args += getattr(self, "meson_flag_args", [])
return std_meson_args return std_meson_args
@staticmethod @staticmethod
def _std_args(pkg): def std_args(pkg):
"""Computes the standard meson arguments for a generic package""" """Standard meson arguments for a generic package."""
try: try:
build_type = pkg.spec.variants["buildtype"].value build_type = pkg.spec.variants["buildtype"].value
except KeyError: except KeyError:
@ -119,44 +142,33 @@ def _std_args(pkg):
default_library = "shared" default_library = "shared"
args = [ args = [
"--prefix={0}".format(pkg.prefix), "-Dprefix={0}".format(pkg.prefix),
# If we do not specify libdir explicitly, Meson chooses something # If we do not specify libdir explicitly, Meson chooses something
# like lib/x86_64-linux-gnu, which causes problems when trying to # like lib/x86_64-linux-gnu, which causes problems when trying to
# find libraries and pkg-config files. # find libraries and pkg-config files.
# See https://github.com/mesonbuild/meson/issues/2197 # See https://github.com/mesonbuild/meson/issues/2197
"--libdir={0}".format(pkg.prefix.lib), "-Dlibdir={0}".format(pkg.prefix.lib),
"-Dbuildtype={0}".format(build_type), "-Dbuildtype={0}".format(build_type),
"-Dstrip={0}".format(strip), "-Dstrip={0}".format(strip),
"-Ddefault_library={0}".format(default_library), "-Ddefault_library={0}".format(default_library),
# Do not automatically download and install dependencies
"-Dwrap_mode=nodownload",
] ]
return args return args
def flags_to_build_system_args(self, flags):
"""Produces a list of all command line arguments to pass the specified
compiler flags to meson."""
# Has to be dynamic attribute due to caching
setattr(self, "meson_flag_args", [])
@property @property
def build_dirname(self): def build_dirname(self):
"""Returns the directory name to use when building the package """Returns the directory name to use when building the package."""
return "spack-build-{}".format(self.spec.dag_hash(7))
:return: name of the subdirectory for building the package
"""
return "spack-build-%s" % self.spec.dag_hash(7)
@property @property
def build_directory(self): def build_directory(self):
"""Returns the directory to use when building the package """Directory to use when building the package."""
return os.path.join(self.pkg.stage.path, self.build_dirname)
:return: directory where to build the package
"""
return os.path.join(self.stage.path, self.build_dirname)
def meson_args(self): def meson_args(self):
"""Produces a list containing all the arguments that must be passed to """List of arguments that must be passed to meson, except:
meson, except:
* ``--prefix`` * ``--prefix``
* ``--libdir`` * ``--libdir``
@ -165,40 +177,33 @@ def meson_args(self):
* ``--default_library`` * ``--default_library``
which will be set automatically. which will be set automatically.
:return: list of arguments for meson
""" """
return [] return []
def meson(self, spec, prefix): def meson(self, pkg, spec, prefix):
"""Runs ``meson`` in the build directory""" """Run ``meson`` in the build directory"""
options = [os.path.abspath(self.root_mesonlists_dir)] options = [os.path.abspath(self.root_mesonlists_dir)]
options += self.std_meson_args options += self.std_meson_args
options += self.meson_args() options += self.meson_args()
with working_dir(self.build_directory, create=True): with fs.working_dir(self.build_directory, create=True):
inspect.getmodule(self).meson(*options) inspect.getmodule(self.pkg).meson(*options)
def build(self, spec, prefix): def build(self, pkg, spec, prefix):
"""Make the build targets""" """Make the build targets"""
options = ["-v"] options = ["-v"]
options += self.build_targets options += self.build_targets
with working_dir(self.build_directory): with fs.working_dir(self.build_directory):
inspect.getmodule(self).ninja(*options) inspect.getmodule(self.pkg).ninja(*options)
def install(self, spec, prefix): def install(self, pkg, spec, prefix):
"""Make the install targets""" """Make the install targets"""
with working_dir(self.build_directory): with fs.working_dir(self.build_directory):
inspect.getmodule(self).ninja(*self.install_targets) inspect.getmodule(self.pkg).ninja(*self.install_targets)
run_after("build")(PackageBase._run_default_build_time_test_callbacks) spack.builder.run_after("build")(execute_build_time_tests)
def check(self): def check(self):
"""Searches the Meson-generated file for the target ``test`` """Search Meson-generated files for the target ``test`` and run it if found."""
and runs it if found. with fs.working_dir(self.build_directory):
"""
with working_dir(self.build_directory):
self._if_ninja_target_execute("test") self._if_ninja_target_execute("test")
self._if_ninja_target_execute("check") self._if_ninja_target_execute("check")
# Check that self.prefix is there after installation
run_after("install")(PackageBase.sanity_check_prefix)

View File

@ -0,0 +1,102 @@
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
from typing import List # novm
import llnl.util.filesystem as fs
import spack.builder
import spack.package_base
from spack.directives import build_system, conflicts
from ._checks import BaseBuilder
class NMakePackage(spack.package_base.PackageBase):
"""Specialized class for packages built using a Makefiles."""
#: This attribute is used in UI queries that need to know the build
#: system base class
build_system_class = "NmakePackage"
build_system("nmake")
conflicts("platform=linux", when="build_system=nmake")
conflicts("platform=darwin", when="build_system=nmake")
conflicts("platform=cray", when="build_system=nmake")
@spack.builder.builder("nmake")
class NMakeBuilder(BaseBuilder):
"""The NMake builder encodes the most common way of building software with
NMake on Windows. It has three phases that can be overridden, if need be:
1. :py:meth:`~.NMakeBuilder.edit`
2. :py:meth:`~.NMakeBuilder.build`
3. :py:meth:`~.NMakeBuilder.install`
It is usually necessary to override the :py:meth:`~.NMakeBuilder.edit`
phase (which is by default a no-op), while the other two have sensible defaults.
For a finer tuning you may override:
+--------------------------------------------+--------------------+
| **Method** | **Purpose** |
+============================================+====================+
| :py:attr:`~.NMakeBuilder.build_targets` | Specify ``nmake`` |
| | targets for the |
| | build phase |
+--------------------------------------------+--------------------+
| :py:attr:`~.NMakeBuilder.install_targets` | Specify ``nmake`` |
| | targets for the |
| | install phase |
+--------------------------------------------+--------------------+
| :py:meth:`~.NMakeBuilder.build_directory` | Directory where the|
| | Makefile is located|
+--------------------------------------------+--------------------+
"""
phases = ("edit", "build", "install")
#: Names associated with package methods in the old build-system format
legacy_methods = ("check", "installcheck")
#: Names associated with package attributes in the old build-system format
legacy_attributes = (
"build_targets",
"install_targets",
"build_time_test_callbacks",
"install_time_test_callbacks",
"build_directory",
)
#: Targets for ``make`` during the :py:meth:`~.NMakeBuilder.build` phase
build_targets = [] # type: List[str]
#: Targets for ``make`` during the :py:meth:`~.NMakeBuilder.install` phase
install_targets = ["install"]
#: Callback names for build-time test
build_time_test_callbacks = ["check"]
#: Callback names for install-time test
install_time_test_callbacks = ["installcheck"]
@property
def build_directory(self):
"""Return the directory containing the main Makefile."""
return self.pkg.stage.source_path
def edit(self, pkg, spec, prefix):
"""Edit the Makefile before calling make. The default is a no-op."""
pass
def build(self, pkg, spec, prefix):
"""Run "make" on the build targets specified by the builder."""
with fs.working_dir(self.build_directory):
inspect.getmodule(self.pkg).nmake(*self.build_targets)
def install(self, pkg, spec, prefix):
"""Run "make" on the install targets specified by the builder."""
with fs.working_dir(self.build_directory):
inspect.getmodule(self.pkg).nmake(*self.install_targets)

View File

@ -2,51 +2,62 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect import inspect
from spack.directives import extends import spack.builder
from spack.package_base import PackageBase, run_after import spack.package_base
from spack.directives import build_system, extends
from spack.multimethod import when
from ._checks import BaseBuilder
class OctavePackage(PackageBase): class OctavePackage(spack.package_base.PackageBase):
"""Specialized class for Octave packages. See """Specialized class for Octave packages. See
https://www.gnu.org/software/octave/doc/v4.2.0/Installing-and-Removing-Packages.html https://www.gnu.org/software/octave/doc/v4.2.0/Installing-and-Removing-Packages.html
for more information. for more information.
This class provides the following phases that can be overridden:
1. :py:meth:`~.OctavePackage.install`
""" """
# Default phases
phases = ["install"]
# To be used in UI queries that require to know which # To be used in UI queries that require to know which
# build-system class we are using # build-system class we are using
build_system_class = "OctavePackage" build_system_class = "OctavePackage"
#: Legacy buildsystem attribute used to deserialize and install old specs
legacy_buildsystem = "octave"
extends("octave") build_system("octave")
with when("build_system=octave"):
extends("octave")
@spack.builder.builder("octave")
class OctaveBuilder(BaseBuilder):
"""The octave builder provides the following phases that can be overridden:
1. :py:meth:`~.OctaveBuilder.install`
"""
phases = ("install",)
#: Names associated with package methods in the old build-system format
legacy_methods = ()
#: Names associated with package attributes in the old build-system format
legacy_attributes = ()
def install(self, pkg, spec, prefix):
"""Install the package from the archive file"""
inspect.getmodule(self.pkg).octave(
"--quiet",
"--norc",
"--built-in-docstrings-file=/dev/null",
"--texi-macros-file=/dev/null",
"--eval",
"pkg prefix %s; pkg install %s" % (prefix, self.pkg.stage.archive_file),
)
def setup_build_environment(self, env): def setup_build_environment(self, env):
# octave does not like those environment variables to be set: # octave does not like those environment variables to be set:
env.unset("CC") env.unset("CC")
env.unset("CXX") env.unset("CXX")
env.unset("FC") env.unset("FC")
def install(self, spec, prefix):
"""Install the package from the archive file"""
inspect.getmodule(self).octave(
"--quiet",
"--norc",
"--built-in-docstrings-file=/dev/null",
"--texi-macros-file=/dev/null",
"--eval",
"pkg prefix %s; pkg install %s" % (prefix, self.stage.archive_file),
)
# Testing
# Check that self.prefix is there after installation
run_after("install")(PackageBase.sanity_check_prefix)

View File

@ -2,11 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Common utilities for managing intel oneapi packages."""
"""Common utilities for managing intel oneapi packages.
"""
import getpass import getpass
import platform import platform
import shutil import shutil
@ -14,18 +10,17 @@
from llnl.util.filesystem import find_headers, find_libraries, join_path from llnl.util.filesystem import find_headers, find_libraries, join_path
from spack.package_base import Package
from spack.util.environment import EnvironmentModifications from spack.util.environment import EnvironmentModifications
from spack.util.executable import Executable from spack.util.executable import Executable
from .generic import Package
class IntelOneApiPackage(Package): class IntelOneApiPackage(Package):
"""Base class for Intel oneAPI packages.""" """Base class for Intel oneAPI packages."""
homepage = "https://software.intel.com/oneapi" homepage = "https://software.intel.com/oneapi"
phases = ["install"]
# oneAPI license does not allow mirroring outside of the # oneAPI license does not allow mirroring outside of the
# organization (e.g. University/Company). # organization (e.g. University/Company).
redistribute_source = False redistribute_source = False

View File

@ -2,73 +2,87 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect import inspect
import os import os
from llnl.util.filesystem import filter_file from llnl.util.filesystem import filter_file
from spack.directives import extends import spack.builder
from spack.package_base import PackageBase, run_after import spack.package_base
from spack.directives import build_system, extends
from spack.package_base import PackageBase
from spack.util.executable import Executable from spack.util.executable import Executable
from ._checks import BaseBuilder, execute_build_time_tests
class PerlPackage(PackageBase): class PerlPackage(PackageBase):
"""Specialized class for packages that are built using Perl. """Specialized class for packages that are built using Perl."""
This class provides four phases that can be overridden if required: #: This attribute is used in UI queries that need to know the build
#: system base class
build_system_class = "PerlPackage"
#: Legacy buildsystem attribute used to deserialize and install old specs
legacy_buildsystem = "perl"
1. :py:meth:`~.PerlPackage.configure` build_system("perl")
2. :py:meth:`~.PerlPackage.build`
3. :py:meth:`~.PerlPackage.check` extends("perl", when="build_system=perl")
4. :py:meth:`~.PerlPackage.install`
@spack.builder.builder("perl")
class PerlBuilder(BaseBuilder):
"""The perl builder provides four phases that can be overridden, if required:
1. :py:meth:`~.PerlBuilder.configure`
2. :py:meth:`~.PerlBuilder.build`
3. :py:meth:`~.PerlBuilder.check`
4. :py:meth:`~.PerlBuilder.install`
The default methods use, in order of preference: The default methods use, in order of preference:
(1) Makefile.PL, (1) Makefile.PL,
(2) Build.PL. (2) Build.PL.
Some packages may need to override Some packages may need to override :py:meth:`~.PerlBuilder.configure_args`,
:py:meth:`~.PerlPackage.configure_args`, which produces a list of arguments for :py:meth:`~.PerlBuilder.configure`.
which produces a list of arguments for
:py:meth:`~.PerlPackage.configure`.
Arguments should not include the installation base directory. Arguments should not include the installation base directory.
""" """
#: Phases of a Perl package #: Phases of a Perl package
phases = ["configure", "build", "install"] phases = ("configure", "build", "install")
#: This attribute is used in UI queries that need to know the build #: Names associated with package methods in the old build-system format
#: system base class legacy_methods = ("configure_args", "check")
build_system_class = "PerlPackage"
#: Names associated with package attributes in the old build-system format
legacy_attributes = ()
#: Callback names for build-time test #: Callback names for build-time test
build_time_test_callbacks = ["check"] build_time_test_callbacks = ["check"]
extends("perl")
def configure_args(self): def configure_args(self):
"""Produces a list containing the arguments that must be passed to """List of arguments passed to :py:meth:`~.PerlBuilder.configure`.
:py:meth:`~.PerlPackage.configure`. Arguments should not include
the installation base directory, which is prepended automatically.
:return: list of arguments for Makefile.PL or Build.PL Arguments should not include the installation base directory, which
is prepended automatically.
""" """
return [] return []
def configure(self, spec, prefix): def configure(self, pkg, spec, prefix):
"""Runs Makefile.PL or Build.PL with arguments consisting of """Run Makefile.PL or Build.PL with arguments consisting of
an appropriate installation base directory followed by the an appropriate installation base directory followed by the
list returned by :py:meth:`~.PerlPackage.configure_args`. list returned by :py:meth:`~.PerlBuilder.configure_args`.
:raise RuntimeError: if neither Makefile.PL or Build.PL exist Raises:
RuntimeError: if neither Makefile.PL nor Build.PL exist
""" """
if os.path.isfile("Makefile.PL"): if os.path.isfile("Makefile.PL"):
self.build_method = "Makefile.PL" self.build_method = "Makefile.PL"
self.build_executable = inspect.getmodule(self).make self.build_executable = inspect.getmodule(self.pkg).make
elif os.path.isfile("Build.PL"): elif os.path.isfile("Build.PL"):
self.build_method = "Build.PL" self.build_method = "Build.PL"
self.build_executable = Executable(os.path.join(self.stage.source_path, "Build")) self.build_executable = Executable(os.path.join(self.pkg.stage.source_path, "Build"))
else: else:
raise RuntimeError("Unknown build_method for perl package") raise RuntimeError("Unknown build_method for perl package")
@ -78,33 +92,30 @@ def configure(self, spec, prefix):
options = ["Build.PL", "--install_base", prefix] options = ["Build.PL", "--install_base", prefix]
options += self.configure_args() options += self.configure_args()
inspect.getmodule(self).perl(*options) inspect.getmodule(self.pkg).perl(*options)
# It is possible that the shebang in the Build script that is created from # It is possible that the shebang in the Build script that is created from
# Build.PL may be too long causing the build to fail. Patching the shebang # Build.PL may be too long causing the build to fail. Patching the shebang
# does not happen until after install so set '/usr/bin/env perl' here in # does not happen until after install so set '/usr/bin/env perl' here in
# the Build script. # the Build script.
@run_after("configure") @spack.builder.run_after("configure")
def fix_shebang(self): def fix_shebang(self):
if self.build_method == "Build.PL": if self.build_method == "Build.PL":
pattern = "#!{0}".format(self.spec["perl"].command.path) pattern = "#!{0}".format(self.spec["perl"].command.path)
repl = "#!/usr/bin/env perl" repl = "#!/usr/bin/env perl"
filter_file(pattern, repl, "Build", backup=False) filter_file(pattern, repl, "Build", backup=False)
def build(self, spec, prefix): def build(self, pkg, spec, prefix):
"""Builds a Perl package.""" """Builds a Perl package."""
self.build_executable() self.build_executable()
# Ensure that tests run after build (if requested): # Ensure that tests run after build (if requested):
run_after("build")(PackageBase._run_default_build_time_test_callbacks) spack.builder.run_after("build")(execute_build_time_tests)
def check(self): def check(self):
"""Runs built-in tests of a Perl package.""" """Runs built-in tests of a Perl package."""
self.build_executable("test") self.build_executable("test")
def install(self, spec, prefix): def install(self, pkg, spec, prefix):
"""Installs a Perl package.""" """Installs a Perl package."""
self.build_executable("install") self.build_executable("install")
# Check that self.prefix is there after installation
run_after("install")(PackageBase.sanity_check_prefix)

View File

@ -8,93 +8,23 @@
import shutil import shutil
from typing import Optional from typing import Optional
import llnl.util.filesystem as fs
import llnl.util.lang as lang
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import (
filter_file,
find,
find_all_headers,
find_libraries,
is_nonsymlink_exe_with_shebang,
path_contains_subdirectory,
same_path,
working_dir,
)
from llnl.util.lang import classproperty, match_predicate
from spack.directives import depends_on, extends import spack.builder
import spack.multimethod
import spack.package_base
import spack.spec
from spack.directives import build_system, depends_on, extends
from spack.error import NoHeadersError, NoLibrariesError, SpecError from spack.error import NoHeadersError, NoLibrariesError, SpecError
from spack.package_base import PackageBase, run_after
from spack.version import Version from spack.version import Version
from ._checks import BaseBuilder, execute_install_time_tests
class PythonPackage(PackageBase):
"""Specialized class for packages that are built using pip."""
#: Package name, version, and extension on PyPI class PythonExtension(spack.package_base.PackageBase):
pypi = None # type: Optional[str] maintainers = ["adamjstewart"]
maintainers = ["adamjstewart", "pradyunsg"]
# Default phases
phases = ["install"]
# To be used in UI queries that require to know which
# build-system class we are using
build_system_class = "PythonPackage"
#: Callback names for install-time test
install_time_test_callbacks = ["test"]
extends("python")
depends_on("py-pip", type="build")
# FIXME: technically wheel is only needed when building from source, not when
# installing a downloaded wheel, but I don't want to add wheel as a dep to every
# package manually
depends_on("py-wheel", type="build")
py_namespace = None # type: Optional[str]
@staticmethod
def _std_args(cls):
return [
# Verbose
"-vvv",
# Disable prompting for input
"--no-input",
# Disable the cache
"--no-cache-dir",
# Don't check to see if pip is up-to-date
"--disable-pip-version-check",
# Install packages
"install",
# Don't install package dependencies
"--no-deps",
# Overwrite existing packages
"--ignore-installed",
# Use env vars like PYTHONPATH
"--no-build-isolation",
# Don't warn that prefix.bin is not in PATH
"--no-warn-script-location",
# Ignore the PyPI package index
"--no-index",
]
@classproperty
def homepage(cls):
if cls.pypi:
name = cls.pypi.split("/")[0]
return "https://pypi.org/project/" + name + "/"
@classproperty
def url(cls):
if cls.pypi:
return "https://files.pythonhosted.org/packages/source/" + cls.pypi[0] + "/" + cls.pypi
@classproperty
def list_url(cls):
if cls.pypi:
name = cls.pypi.split("/")[0]
return "https://pypi.org/simple/" + name + "/"
@property @property
def import_modules(self): def import_modules(self):
@ -124,7 +54,7 @@ def import_modules(self):
# Some Python libraries are packages: collections of modules # Some Python libraries are packages: collections of modules
# distributed in directories containing __init__.py files # distributed in directories containing __init__.py files
for path in find(root, "__init__.py", recursive=True): for path in fs.find(root, "__init__.py", recursive=True):
modules.append( modules.append(
path.replace(root + os.sep, "", 1) path.replace(root + os.sep, "", 1)
.replace(os.sep + "__init__.py", "") .replace(os.sep + "__init__.py", "")
@ -133,7 +63,7 @@ def import_modules(self):
# Some Python libraries are modules: individual *.py files # Some Python libraries are modules: individual *.py files
# found in the site-packages directory # found in the site-packages directory
for path in find(root, "*.py", recursive=False): for path in fs.find(root, "*.py", recursive=False):
modules.append( modules.append(
path.replace(root + os.sep, "", 1).replace(".py", "").replace("/", ".") path.replace(root + os.sep, "", 1).replace(".py", "").replace("/", ".")
) )
@ -160,6 +90,229 @@ def skip_modules(self):
""" """
return [] return []
def view_file_conflicts(self, view, merge_map):
"""Report all file conflicts, excepting special cases for python.
Specifically, this does not report errors for duplicate
__init__.py files for packages in the same namespace.
"""
conflicts = list(dst for src, dst in merge_map.items() if os.path.exists(dst))
if conflicts and self.py_namespace:
ext_map = view.extensions_layout.extension_map(self.extendee_spec)
namespaces = set(x.package.py_namespace for x in ext_map.values())
namespace_re = r"site-packages/{0}/__init__.py".format(self.py_namespace)
find_namespace = lang.match_predicate(namespace_re)
if self.py_namespace in namespaces:
conflicts = list(x for x in conflicts if not find_namespace(x))
return conflicts
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
bin_dir = self.spec.prefix.bin
python_prefix = self.extendee_spec.prefix
python_is_external = self.extendee_spec.external
global_view = fs.same_path(python_prefix, view.get_projection_for_spec(self.spec))
for src, dst in merge_map.items():
if os.path.exists(dst):
continue
elif global_view or not fs.path_contains_subdirectory(src, bin_dir):
view.link(src, dst)
elif not os.path.islink(src):
shutil.copy2(src, dst)
is_script = fs.is_nonsymlink_exe_with_shebang(src)
if is_script and not python_is_external:
fs.filter_file(
python_prefix,
os.path.abspath(view.get_projection_for_spec(self.spec)),
dst,
)
else:
orig_link_target = os.path.realpath(src)
new_link_target = os.path.abspath(merge_map[orig_link_target])
view.link(new_link_target, dst)
def remove_files_from_view(self, view, merge_map):
ignore_namespace = False
if self.py_namespace:
ext_map = view.extensions_layout.extension_map(self.extendee_spec)
remaining_namespaces = set(
spec.package.py_namespace for name, spec in ext_map.items() if name != self.name
)
if self.py_namespace in remaining_namespaces:
namespace_init = lang.match_predicate(
r"site-packages/{0}/__init__.py".format(self.py_namespace)
)
ignore_namespace = True
bin_dir = self.spec.prefix.bin
global_view = self.extendee_spec.prefix == view.get_projection_for_spec(self.spec)
to_remove = []
for src, dst in merge_map.items():
if ignore_namespace and namespace_init(dst):
continue
if global_view or not fs.path_contains_subdirectory(src, bin_dir):
to_remove.append(dst)
else:
os.remove(dst)
view.remove_files(to_remove)
def test(self):
"""Attempts to import modules of the installed package."""
# Make sure we are importing the installed modules,
# not the ones in the source directory
for module in self.import_modules:
self.run_test(
inspect.getmodule(self).python.path,
["-c", "import {0}".format(module)],
purpose="checking import of {0}".format(module),
work_dir="spack-test",
)
class PythonPackage(PythonExtension):
"""Specialized class for packages that are built using pip."""
#: Package name, version, and extension on PyPI
pypi = None # type: Optional[str]
maintainers = ["adamjstewart", "pradyunsg"]
# To be used in UI queries that require to know which
# build-system class we are using
build_system_class = "PythonPackage"
#: Legacy buildsystem attribute used to deserialize and install old specs
legacy_buildsystem = "python_pip"
#: Callback names for install-time test
install_time_test_callbacks = ["test"]
build_system("python_pip")
with spack.multimethod.when("build_system=python_pip"):
extends("python")
depends_on("py-pip", type="build")
# FIXME: technically wheel is only needed when building from source, not when
# installing a downloaded wheel, but I don't want to add wheel as a dep to every
# package manually
depends_on("py-wheel", type="build")
py_namespace = None # type: Optional[str]
@lang.classproperty
def homepage(cls):
if cls.pypi:
name = cls.pypi.split("/")[0]
return "https://pypi.org/project/" + name + "/"
@lang.classproperty
def url(cls):
if cls.pypi:
return "https://files.pythonhosted.org/packages/source/" + cls.pypi[0] + "/" + cls.pypi
@lang.classproperty
def list_url(cls):
if cls.pypi:
name = cls.pypi.split("/")[0]
return "https://pypi.org/simple/" + name + "/"
def update_external_dependencies(self):
"""
Ensure all external python packages have a python dependency
If another package in the DAG depends on python, we use that
python for the dependency of the external. If not, we assume
that the external PythonPackage is installed into the same
directory as the python it depends on.
"""
# TODO: Include this in the solve, rather than instantiating post-concretization
if "python" not in self.spec:
if "python" in self.spec.root:
python = self.spec.root["python"]
else:
python = spack.spec.Spec("python")
repo = spack.repo.path.repo_for_pkg(python)
python.namespace = repo.namespace
python._mark_concrete()
python.external_path = self.prefix
self.spec.add_dependency_edge(python, ("build", "link", "run"))
@property
def headers(self):
"""Discover header files in platlib."""
# Headers may be in either location
include = self.prefix.join(self.spec["python"].package.include)
platlib = self.prefix.join(self.spec["python"].package.platlib)
headers = fs.find_all_headers(include) + fs.find_all_headers(platlib)
if headers:
return headers
msg = "Unable to locate {} headers in {} or {}"
raise NoHeadersError(msg.format(self.spec.name, include, platlib))
@property
def libs(self):
"""Discover libraries in platlib."""
# Remove py- prefix in package name
library = "lib" + self.spec.name[3:].replace("-", "?")
root = self.prefix.join(self.spec["python"].package.platlib)
for shared in [True, False]:
libs = fs.find_libraries(library, root, shared=shared, recursive=True)
if libs:
return libs
msg = "Unable to recursively locate {} libraries in {}"
raise NoLibrariesError(msg.format(self.spec.name, root))
@spack.builder.builder("python_pip")
class PythonPipBuilder(BaseBuilder):
phases = ("install",)
#: Names associated with package methods in the old build-system format
legacy_methods = ("test",)
#: Same as legacy_methods, but the signature is different
legacy_long_methods = ("install_options", "global_options", "config_settings")
#: Names associated with package attributes in the old build-system format
legacy_attributes = ("build_directory", "install_time_test_callbacks")
#: Callback names for install-time test
install_time_test_callbacks = ["test"]
@staticmethod
def std_args(cls):
return [
# Verbose
"-vvv",
# Disable prompting for input
"--no-input",
# Disable the cache
"--no-cache-dir",
# Don't check to see if pip is up-to-date
"--disable-pip-version-check",
# Install packages
"install",
# Don't install package dependencies
"--no-deps",
# Overwrite existing packages
"--ignore-installed",
# Use env vars like PYTHONPATH
"--no-build-isolation",
# Don't warn that prefix.bin is not in PATH
"--no-warn-script-location",
# Ignore the PyPI package index
"--no-index",
]
@property @property
def build_directory(self): def build_directory(self):
"""The root directory of the Python package. """The root directory of the Python package.
@ -170,11 +323,10 @@ def build_directory(self):
* ``setup.cfg`` * ``setup.cfg``
* ``setup.py`` * ``setup.py``
""" """
return self.stage.source_path return self.pkg.stage.source_path
def config_settings(self, spec, prefix): def config_settings(self, spec, prefix):
"""Configuration settings to be passed to the PEP 517 build backend. """Configuration settings to be passed to the PEP 517 build backend.
Requires pip 22.1+, which requires Python 3.7+. Requires pip 22.1+, which requires Python 3.7+.
Args: Args:
@ -211,10 +363,10 @@ def global_options(self, spec, prefix):
""" """
return [] return []
def install(self, spec, prefix): def install(self, pkg, spec, prefix):
"""Install everything from build directory.""" """Install everything from build directory."""
args = PythonPackage._std_args(self) + ["--prefix=" + prefix] args = PythonPipBuilder.std_args(pkg) + ["--prefix=" + prefix]
for key, value in self.config_settings(spec, prefix).items(): for key, value in self.config_settings(spec, prefix).items():
if spec["py-pip"].version < Version("22.1"): if spec["py-pip"].version < Version("22.1"):
@ -223,137 +375,21 @@ def install(self, spec, prefix):
"pip 22.1+. Add the following line to the package to fix this:\n\n" "pip 22.1+. Add the following line to the package to fix this:\n\n"
' depends_on("py-pip@22.1:", type="build")'.format(spec.name) ' depends_on("py-pip@22.1:", type="build")'.format(spec.name)
) )
args.append("--config-settings={}={}".format(key, value)) args.append("--config-settings={}={}".format(key, value))
for option in self.install_options(spec, prefix): for option in self.install_options(spec, prefix):
args.append("--install-option=" + option) args.append("--install-option=" + option)
for option in self.global_options(spec, prefix): for option in self.global_options(spec, prefix):
args.append("--global-option=" + option) args.append("--global-option=" + option)
if self.stage.archive_file and self.stage.archive_file.endswith(".whl"): if pkg.stage.archive_file and pkg.stage.archive_file.endswith(".whl"):
args.append(self.stage.archive_file) args.append(pkg.stage.archive_file)
else: else:
args.append(".") args.append(".")
pip = inspect.getmodule(self).pip pip = inspect.getmodule(pkg).pip
with working_dir(self.build_directory): with fs.working_dir(self.build_directory):
pip(*args) pip(*args)
@property spack.builder.run_after("install")(execute_install_time_tests)
def headers(self):
"""Discover header files in platlib."""
# Headers may be in either location
include = self.prefix.join(self.spec["python"].package.include)
platlib = self.prefix.join(self.spec["python"].package.platlib)
headers = find_all_headers(include) + find_all_headers(platlib)
if headers:
return headers
msg = "Unable to locate {} headers in {} or {}"
raise NoHeadersError(msg.format(self.spec.name, include, platlib))
@property
def libs(self):
"""Discover libraries in platlib."""
# Remove py- prefix in package name
library = "lib" + self.spec.name[3:].replace("-", "?")
root = self.prefix.join(self.spec["python"].package.platlib)
for shared in [True, False]:
libs = find_libraries(library, root, shared=shared, recursive=True)
if libs:
return libs
msg = "Unable to recursively locate {} libraries in {}"
raise NoLibrariesError(msg.format(self.spec.name, root))
# Testing
def test(self):
"""Attempts to import modules of the installed package."""
# Make sure we are importing the installed modules,
# not the ones in the source directory
for module in self.import_modules:
self.run_test(
inspect.getmodule(self).python.path,
["-c", "import {0}".format(module)],
purpose="checking import of {0}".format(module),
work_dir="spack-test",
)
run_after("install")(PackageBase._run_default_install_time_test_callbacks)
# Check that self.prefix is there after installation
run_after("install")(PackageBase.sanity_check_prefix)
def view_file_conflicts(self, view, merge_map):
"""Report all file conflicts, excepting special cases for python.
Specifically, this does not report errors for duplicate
__init__.py files for packages in the same namespace.
"""
conflicts = list(dst for src, dst in merge_map.items() if os.path.exists(dst))
if conflicts and self.py_namespace:
ext_map = view.extensions_layout.extension_map(self.extendee_spec)
namespaces = set(x.package.py_namespace for x in ext_map.values())
namespace_re = r"site-packages/{0}/__init__.py".format(self.py_namespace)
find_namespace = match_predicate(namespace_re)
if self.py_namespace in namespaces:
conflicts = list(x for x in conflicts if not find_namespace(x))
return conflicts
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
bin_dir = self.spec.prefix.bin
python_prefix = self.extendee_spec.prefix
python_is_external = self.extendee_spec.external
global_view = same_path(python_prefix, view.get_projection_for_spec(self.spec))
for src, dst in merge_map.items():
if os.path.exists(dst):
continue
elif global_view or not path_contains_subdirectory(src, bin_dir):
view.link(src, dst)
elif not os.path.islink(src):
shutil.copy2(src, dst)
is_script = is_nonsymlink_exe_with_shebang(src)
if is_script and not python_is_external:
filter_file(
python_prefix,
os.path.abspath(view.get_projection_for_spec(self.spec)),
dst,
)
else:
orig_link_target = os.path.realpath(src)
new_link_target = os.path.abspath(merge_map[orig_link_target])
view.link(new_link_target, dst)
def remove_files_from_view(self, view, merge_map):
ignore_namespace = False
if self.py_namespace:
ext_map = view.extensions_layout.extension_map(self.extendee_spec)
remaining_namespaces = set(
spec.package.py_namespace for name, spec in ext_map.items() if name != self.name
)
if self.py_namespace in remaining_namespaces:
namespace_init = match_predicate(
r"site-packages/{0}/__init__.py".format(self.py_namespace)
)
ignore_namespace = True
bin_dir = self.spec.prefix.bin
global_view = self.extendee_spec.prefix == view.get_projection_for_spec(self.spec)
to_remove = []
for src, dst in merge_map.items():
if ignore_namespace and namespace_init(dst):
continue
if global_view or not path_contains_subdirectory(src, bin_dir):
to_remove.append(dst)
else:
os.remove(dst)
view.remove_files(to_remove)

View File

@ -2,82 +2,85 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect import inspect
from llnl.util.filesystem import working_dir from llnl.util.filesystem import working_dir
from spack.directives import depends_on import spack.builder
from spack.package_base import PackageBase, run_after import spack.package_base
from spack.directives import build_system, depends_on
from ._checks import BaseBuilder, execute_build_time_tests
class QMakePackage(PackageBase): class QMakePackage(spack.package_base.PackageBase):
"""Specialized class for packages built using qmake. """Specialized class for packages built using qmake.
For more information on the qmake build system, see: For more information on the qmake build system, see:
http://doc.qt.io/qt-5/qmake-manual.html http://doc.qt.io/qt-5/qmake-manual.html
This class provides three phases that can be overridden:
1. :py:meth:`~.QMakePackage.qmake`
2. :py:meth:`~.QMakePackage.build`
3. :py:meth:`~.QMakePackage.install`
They all have sensible defaults and for many packages the only thing
necessary will be to override :py:meth:`~.QMakePackage.qmake_args`.
""" """
#: Phases of a qmake package
phases = ["qmake", "build", "install"]
#: This attribute is used in UI queries that need to know the build #: This attribute is used in UI queries that need to know the build
#: system base class #: system base class
build_system_class = "QMakePackage" build_system_class = "QMakePackage"
#: Legacy buildsystem attribute used to deserialize and install old specs
legacy_buildsystem = "qmake"
build_system("qmake")
depends_on("qt", type="build", when="build_system=qmake")
@spack.builder.builder("qmake")
class QMakeBuilder(BaseBuilder):
"""The qmake builder provides three phases that can be overridden:
1. :py:meth:`~.QMakeBuilder.qmake`
2. :py:meth:`~.QMakeBuilder.build`
3. :py:meth:`~.QMakeBuilder.install`
They all have sensible defaults and for many packages the only thing
necessary will be to override :py:meth:`~.QMakeBuilder.qmake_args`.
"""
phases = ("qmake", "build", "install")
#: Names associated with package methods in the old build-system format
legacy_methods = ("qmake_args", "check")
#: Names associated with package attributes in the old build-system format
legacy_attributes = ("build_directory", "build_time_test_callbacks")
#: Callback names for build-time test #: Callback names for build-time test
build_time_test_callbacks = ["check"] build_time_test_callbacks = ["check"]
depends_on("qt", type="build")
@property @property
def build_directory(self): def build_directory(self):
"""The directory containing the ``*.pro`` file.""" """The directory containing the ``*.pro`` file."""
return self.stage.source_path return self.stage.source_path
def qmake_args(self): def qmake_args(self):
"""Produces a list containing all the arguments that must be passed to """List of arguments passed to qmake."""
qmake
"""
return [] return []
def qmake(self, spec, prefix): def qmake(self, pkg, spec, prefix):
"""Run ``qmake`` to configure the project and generate a Makefile.""" """Run ``qmake`` to configure the project and generate a Makefile."""
with working_dir(self.build_directory): with working_dir(self.build_directory):
inspect.getmodule(self).qmake(*self.qmake_args()) inspect.getmodule(self.pkg).qmake(*self.qmake_args())
def build(self, spec, prefix): def build(self, pkg, spec, prefix):
"""Make the build targets""" """Make the build targets"""
with working_dir(self.build_directory): with working_dir(self.build_directory):
inspect.getmodule(self).make() inspect.getmodule(self.pkg).make()
def install(self, spec, prefix): def install(self, pkg, spec, prefix):
"""Make the install targets""" """Make the install targets"""
with working_dir(self.build_directory): with working_dir(self.build_directory):
inspect.getmodule(self).make("install") inspect.getmodule(self.pkg).make("install")
# Tests
def check(self): def check(self):
"""Searches the Makefile for a ``check:`` target and runs it if found.""" """Search the Makefile for a ``check:`` target and runs it if found."""
with working_dir(self.build_directory): with working_dir(self.build_directory):
self._if_make_target_execute("check") self._if_make_target_execute("check")
run_after("build")(PackageBase._run_default_build_time_test_callbacks) spack.builder.run_after("build")(execute_build_time_tests)
# Check that self.prefix is there after installation
run_after("install")(PackageBase.sanity_check_prefix)

View File

@ -3,30 +3,64 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect import inspect
from typing import Optional from typing import Optional, Tuple
import llnl.util.lang as lang import llnl.util.lang as lang
from spack.directives import extends from spack.directives import extends
from spack.package_base import PackageBase, run_after
from .generic import GenericBuilder, Package
class RPackage(PackageBase): class RBuilder(GenericBuilder):
"""The R builder provides a single phase that can be overridden:
1. :py:meth:`~.RBuilder.install`
It has sensible defaults, and for many packages the only thing
necessary will be to add dependencies.
"""
#: Names associated with package methods in the old build-system format
legacy_methods = (
"configure_args",
"configure_vars",
) + GenericBuilder.legacy_methods # type: Tuple[str, ...]
def configure_args(self):
"""Arguments to pass to install via ``--configure-args``."""
return []
def configure_vars(self):
"""Arguments to pass to install via ``--configure-vars``."""
return []
def install(self, pkg, spec, prefix):
"""Installs an R package."""
config_args = self.configure_args()
config_vars = self.configure_vars()
args = ["--vanilla", "CMD", "INSTALL"]
if config_args:
args.append("--configure-args={0}".format(" ".join(config_args)))
if config_vars:
args.append("--configure-vars={0}".format(" ".join(config_vars)))
args.extend(["--library={0}".format(self.pkg.module.r_lib_dir), self.stage.source_path])
inspect.getmodule(self.pkg).R(*args)
class RPackage(Package):
"""Specialized class for packages that are built using R. """Specialized class for packages that are built using R.
For more information on the R build system, see: For more information on the R build system, see:
https://stat.ethz.ch/R-manual/R-devel/library/utils/html/INSTALL.html https://stat.ethz.ch/R-manual/R-devel/library/utils/html/INSTALL.html
This class provides a single phase that can be overridden:
1. :py:meth:`~.RPackage.install`
It has sensible defaults, and for many packages the only thing
necessary will be to add dependencies
""" """
phases = ["install"]
# package attributes that can be expanded to set the homepage, url, # package attributes that can be expanded to set the homepage, url,
# list_url, and git values # list_url, and git values
# For CRAN packages # For CRAN packages
@ -35,6 +69,8 @@ class RPackage(PackageBase):
# For Bioconductor packages # For Bioconductor packages
bioc = None # type: Optional[str] bioc = None # type: Optional[str]
GenericBuilder = RBuilder
maintainers = ["glennpj"] maintainers = ["glennpj"]
#: This attribute is used in UI queries that need to know the build #: This attribute is used in UI queries that need to know the build
@ -70,32 +106,3 @@ def list_url(cls):
def git(self): def git(self):
if self.bioc: if self.bioc:
return "https://git.bioconductor.org/packages/" + self.bioc return "https://git.bioconductor.org/packages/" + self.bioc
def configure_args(self):
"""Arguments to pass to install via ``--configure-args``."""
return []
def configure_vars(self):
"""Arguments to pass to install via ``--configure-vars``."""
return []
def install(self, spec, prefix):
"""Installs an R package."""
config_args = self.configure_args()
config_vars = self.configure_vars()
args = ["--vanilla", "CMD", "INSTALL"]
if config_args:
args.append("--configure-args={0}".format(" ".join(config_args)))
if config_vars:
args.append("--configure-vars={0}".format(" ".join(config_vars)))
args.extend(["--library={0}".format(self.module.r_lib_dir), self.stage.source_path])
inspect.getmodule(self).R(*args)
# Check that self.prefix is there after installation
run_after("install")(PackageBase.sanity_check_prefix)

View File

@ -3,14 +3,15 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os import os
from typing import Optional from typing import Optional, Tuple
import llnl.util.filesystem as fs
import llnl.util.lang as lang import llnl.util.lang as lang
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import working_dir
import spack.builder
from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs
from spack.directives import extends from spack.directives import build_system, extends
from spack.package_base import PackageBase from spack.package_base import PackageBase
from spack.util.environment import env_flag from spack.util.environment import env_flag
from spack.util.executable import Executable, ProcessError from spack.util.executable import Executable, ProcessError
@ -19,34 +20,52 @@
class RacketPackage(PackageBase): class RacketPackage(PackageBase):
"""Specialized class for packages that are built using Racket's """Specialized class for packages that are built using Racket's
`raco pkg install` and `raco setup` commands. `raco pkg install` and `raco setup` commands.
This class provides the following phases that can be overridden:
* install
* setup
""" """
#: Package name, version, and extension on PyPI #: Package name, version, and extension on PyPI
maintainers = ["elfprince13"] maintainers = ["elfprince13"]
# Default phases
phases = ["install"]
# To be used in UI queries that require to know which # To be used in UI queries that require to know which
# build-system class we are using # build-system class we are using
build_system_class = "RacketPackage" build_system_class = "RacketPackage"
#: Legacy buildsystem attribute used to deserialize and install old specs
legacy_buildsystem = "racket"
extends("racket") build_system("racket")
extends("racket", when="build_system=racket")
pkgs = False
subdirectory = None # type: Optional[str]
racket_name = None # type: Optional[str] racket_name = None # type: Optional[str]
parallel = True parallel = True
@lang.classproperty @lang.classproperty
def homepage(cls): def homepage(cls):
if cls.pkgs: if cls.racket_name:
return "https://pkgs.racket-lang.org/package/{0}".format(cls.racket_name) return "https://pkgs.racket-lang.org/package/{0}".format(cls.racket_name)
return None
@spack.builder.builder("racket")
class RacketBuilder(spack.builder.Builder):
"""The Racket builder provides an ``install`` phase that can be overridden."""
phases = ("install",)
#: Names associated with package methods in the old build-system format
legacy_methods = tuple() # type: Tuple[str, ...]
#: Names associated with package attributes in the old build-system format
legacy_attributes = ("build_directory", "build_time_test_callbacks", "subdirectory")
#: Callback names for build-time test
build_time_test_callbacks = ["check"]
racket_name = None # type: Optional[str]
@property
def subdirectory(self):
if self.racket_name:
return "pkgs/{0}".format(self.pkg.racket_name)
return None
@property @property
def build_directory(self): def build_directory(self):
@ -55,25 +74,25 @@ def build_directory(self):
ret = os.path.join(ret, self.subdirectory) ret = os.path.join(ret, self.subdirectory)
return ret return ret
def install(self, spec, prefix): def install(self, pkg, spec, prefix):
"""Install everything from build directory.""" """Install everything from build directory."""
raco = Executable("raco") raco = Executable("raco")
with working_dir(self.build_directory): with fs.working_dir(self.build_directory):
allow_parallel = self.parallel and (not env_flag(SPACK_NO_PARALLEL_MAKE)) parallel = self.pkg.parallel and (not env_flag(SPACK_NO_PARALLEL_MAKE))
args = [ args = [
"pkg", "pkg",
"install", "install",
"-t", "-t",
"dir", "dir",
"-n", "-n",
self.racket_name, self.pkg.racket_name,
"--deps", "--deps",
"fail", "fail",
"--ignore-implies", "--ignore-implies",
"--copy", "--copy",
"-i", "-i",
"-j", "-j",
str(determine_number_of_jobs(allow_parallel)), str(determine_number_of_jobs(parallel)),
"--", "--",
os.getcwd(), os.getcwd(),
] ]
@ -82,9 +101,8 @@ def install(self, spec, prefix):
except ProcessError: except ProcessError:
args.insert(-2, "--skip-installed") args.insert(-2, "--skip-installed")
raco(*args) raco(*args)
tty.warn( msg = (
( "Racket package {0} was already installed, uninstalling via "
"Racket package {0} was already installed, uninstalling via " "Spack may make someone unhappy!"
"Spack may make someone unhappy!"
).format(self.racket_name)
) )
tty.warn(msg.format(self.pkg.racket_name))

View File

@ -2,35 +2,49 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import glob import glob
import inspect import inspect
from spack.directives import extends import spack.builder
from spack.package_base import PackageBase, run_after import spack.package_base
from spack.directives import build_system, extends
from ._checks import BaseBuilder
class RubyPackage(PackageBase): class RubyPackage(spack.package_base.PackageBase):
"""Specialized class for building Ruby gems. """Specialized class for building Ruby gems."""
This class provides two phases that can be overridden if required:
#. :py:meth:`~.RubyPackage.build`
#. :py:meth:`~.RubyPackage.install`
"""
maintainers = ["Kerilk"] maintainers = ["Kerilk"]
#: Phases of a Ruby package
phases = ["build", "install"]
#: This attribute is used in UI queries that need to know the build #: This attribute is used in UI queries that need to know the build
#: system base class #: system base class
build_system_class = "RubyPackage" build_system_class = "RubyPackage"
#: Legacy buildsystem attribute used to deserialize and install old specs
legacy_buildsystem = "ruby"
extends("ruby") build_system("ruby")
def build(self, spec, prefix): extends("ruby", when="build_system=ruby")
@spack.builder.builder("ruby")
class RubyBuilder(BaseBuilder):
"""The Ruby builder provides two phases that can be overridden if required:
#. :py:meth:`~.RubyBuilder.build`
#. :py:meth:`~.RubyBuilder.install`
"""
phases = ("build", "install")
#: Names associated with package methods in the old build-system format
legacy_methods = ()
#: Names associated with package attributes in the old build-system format
legacy_attributes = ()
def build(self, pkg, spec, prefix):
"""Build a Ruby gem.""" """Build a Ruby gem."""
# ruby-rake provides both rake.gemspec and Rakefile, but only # ruby-rake provides both rake.gemspec and Rakefile, but only
@ -38,15 +52,15 @@ def build(self, spec, prefix):
gemspecs = glob.glob("*.gemspec") gemspecs = glob.glob("*.gemspec")
rakefiles = glob.glob("Rakefile") rakefiles = glob.glob("Rakefile")
if gemspecs: if gemspecs:
inspect.getmodule(self).gem("build", "--norc", gemspecs[0]) inspect.getmodule(self.pkg).gem("build", "--norc", gemspecs[0])
elif rakefiles: elif rakefiles:
jobs = inspect.getmodule(self).make_jobs jobs = inspect.getmodule(self.pkg).make_jobs
inspect.getmodule(self).rake("package", "-j{0}".format(jobs)) inspect.getmodule(self.pkg).rake("package", "-j{0}".format(jobs))
else: else:
# Some Ruby packages only ship `*.gem` files, so nothing to build # Some Ruby packages only ship `*.gem` files, so nothing to build
pass pass
def install(self, spec, prefix): def install(self, pkg, spec, prefix):
"""Install a Ruby gem. """Install a Ruby gem.
The ruby package sets ``GEM_HOME`` to tell gem where to install to.""" The ruby package sets ``GEM_HOME`` to tell gem where to install to."""
@ -56,9 +70,6 @@ def install(self, spec, prefix):
# if --install-dir is not used, GEM_PATH is deleted from the # if --install-dir is not used, GEM_PATH is deleted from the
# environement, and Gems required to build native extensions will # environement, and Gems required to build native extensions will
# not be found. Those extensions are built during `gem install`. # not be found. Those extensions are built during `gem install`.
inspect.getmodule(self).gem( inspect.getmodule(self.pkg).gem(
"install", "--norc", "--ignore-dependencies", "--install-dir", prefix, gems[0] "install", "--norc", "--ignore-dependencies", "--install-dir", prefix, gems[0]
) )
# Check that self.prefix is there after installation
run_after("install")(PackageBase.sanity_check_prefix)

View File

@ -2,63 +2,79 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect import inspect
from spack.directives import depends_on import spack.builder
from spack.package_base import PackageBase, run_after import spack.package_base
from spack.directives import build_system, depends_on
from ._checks import BaseBuilder, execute_build_time_tests
class SConsPackage(PackageBase): class SConsPackage(spack.package_base.PackageBase):
"""Specialized class for packages built using SCons. """Specialized class for packages built using SCons.
See http://scons.org/documentation.html for more information. See http://scons.org/documentation.html for more information.
This class provides the following phases that can be overridden:
1. :py:meth:`~.SConsPackage.build`
2. :py:meth:`~.SConsPackage.install`
Packages that use SCons as a build system are less uniform than packages
that use other build systems. Developers can add custom subcommands or
variables that control the build. You will likely need to override
:py:meth:`~.SConsPackage.build_args` to pass the appropriate variables.
""" """
#: Phases of a SCons package
phases = ["build", "install"]
#: To be used in UI queries that require to know which #: To be used in UI queries that require to know which
#: build-system class we are using #: build-system class we are using
build_system_class = "SConsPackage" build_system_class = "SConsPackage"
#: Legacy buildsystem attribute used to deserialize and install old specs
legacy_buildsystem = "scons"
build_system("scons")
depends_on("scons", type="build", when="build_system=scons")
@spack.builder.builder("scons")
class SConsBuilder(BaseBuilder):
"""The Scons builder provides the following phases that can be overridden:
1. :py:meth:`~.SConsBuilder.build`
2. :py:meth:`~.SConsBuilder.install`
Packages that use SCons as a build system are less uniform than packages that use
other build systems. Developers can add custom subcommands or variables that
control the build. You will likely need to override
:py:meth:`~.SConsBuilder.build_args` to pass the appropriate variables.
"""
#: Phases of a SCons package
phases = ("build", "install")
#: Names associated with package methods in the old build-system format
legacy_methods = ("install_args", "build_test")
#: Same as legacy_methods, but the signature is different
legacy_long_methods = ("build_args",)
#: Names associated with package attributes in the old build-system format
legacy_attributes = ("build_time_test_callbacks",)
#: Callback names for build-time test #: Callback names for build-time test
build_time_test_callbacks = ["build_test"] build_time_test_callbacks = ["build_test"]
depends_on("scons", type="build")
def build_args(self, spec, prefix): def build_args(self, spec, prefix):
"""Arguments to pass to build.""" """Arguments to pass to build."""
return [] return []
def build(self, spec, prefix): def build(self, pkg, spec, prefix):
"""Build the package.""" """Build the package."""
args = self.build_args(spec, prefix) args = self.build_args(spec, prefix)
inspect.getmodule(self.pkg).scons(*args)
inspect.getmodule(self).scons(*args) def install_args(self):
def install_args(self, spec, prefix):
"""Arguments to pass to install.""" """Arguments to pass to install."""
return [] return []
def install(self, spec, prefix): def install(self, pkg, spec, prefix):
"""Install the package.""" """Install the package."""
args = self.install_args(spec, prefix) args = self.install_args()
inspect.getmodule(self).scons("install", *args) inspect.getmodule(self.pkg).scons("install", *args)
# Testing
def build_test(self): def build_test(self):
"""Run unit tests after build. """Run unit tests after build.
@ -68,7 +84,4 @@ def build_test(self):
""" """
pass pass
run_after("build")(PackageBase._run_default_build_time_test_callbacks) spack.builder.run_after("build")(execute_build_time_tests)
# Check that self.prefix is there after installation
run_after("install")(PackageBase.sanity_check_prefix)

View File

@ -2,7 +2,6 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect import inspect
import os import os
import re import re
@ -10,28 +9,20 @@
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import find, join_path, working_dir from llnl.util.filesystem import find, join_path, working_dir
from spack.directives import depends_on, extends import spack.builder
from spack.package_base import PackageBase, run_after import spack.package_base
from spack.directives import build_system, depends_on, extends
from spack.multimethod import when
from ._checks import BaseBuilder, execute_install_time_tests
class SIPPackage(PackageBase): class SIPPackage(spack.package_base.PackageBase):
"""Specialized class for packages that are built using the """Specialized class for packages that are built using the
SIP build system. See https://www.riverbankcomputing.com/software/sip/intro SIP build system. See https://www.riverbankcomputing.com/software/sip/intro
for more information. for more information.
This class provides the following phases that can be overridden:
* configure
* build
* install
The configure phase already adds a set of default flags. To see more
options, run ``python configure.py --help``.
""" """
# Default phases
phases = ["configure", "build", "install"]
# To be used in UI queries that require to know which # To be used in UI queries that require to know which
# build-system class we are using # build-system class we are using
build_system_class = "SIPPackage" build_system_class = "SIPPackage"
@ -41,11 +32,15 @@ class SIPPackage(PackageBase):
#: Callback names for install-time test #: Callback names for install-time test
install_time_test_callbacks = ["test"] install_time_test_callbacks = ["test"]
#: Legacy buildsystem attribute used to deserialize and install old specs
legacy_buildsystem = "sip"
extends("python") build_system("sip")
depends_on("qt") with when("build_system=sip"):
depends_on("py-sip") extends("python")
depends_on("qt")
depends_on("py-sip")
@property @property
def import_modules(self): def import_modules(self):
@ -95,11 +90,51 @@ def python(self, *args, **kwargs):
"""The python ``Executable``.""" """The python ``Executable``."""
inspect.getmodule(self).python(*args, **kwargs) inspect.getmodule(self).python(*args, **kwargs)
def test(self):
"""Attempts to import modules of the installed package."""
# Make sure we are importing the installed modules,
# not the ones in the source directory
for module in self.import_modules:
self.run_test(
inspect.getmodule(self).python.path,
["-c", "import {0}".format(module)],
purpose="checking import of {0}".format(module),
work_dir="spack-test",
)
@spack.builder.builder("sip")
class SIPBuilder(BaseBuilder):
"""The SIP builder provides the following phases that can be overridden:
* configure
* build
* install
The configure phase already adds a set of default flags. To see more
options, run ``python configure.py --help``.
"""
phases = ("configure", "build", "install")
#: Names associated with package methods in the old build-system format
legacy_methods = ("configure_file", "configure_args", "build_args", "install_args")
#: Names associated with package attributes in the old build-system format
legacy_attributes = (
"build_targets",
"install_targets",
"build_time_test_callbacks",
"install_time_test_callbacks",
"build_directory",
)
def configure_file(self): def configure_file(self):
"""Returns the name of the configure file to use.""" """Returns the name of the configure file to use."""
return "configure.py" return "configure.py"
def configure(self, spec, prefix): def configure(self, pkg, spec, prefix):
"""Configure the package.""" """Configure the package."""
configure = self.configure_file() configure = self.configure_file()
@ -118,7 +153,7 @@ def configure(self, spec, prefix):
"--bindir", "--bindir",
prefix.bin, prefix.bin,
"--destdir", "--destdir",
inspect.getmodule(self).python_platlib, inspect.getmodule(self.pkg).python_platlib,
] ]
) )
@ -128,53 +163,35 @@ def configure_args(self):
"""Arguments to pass to configure.""" """Arguments to pass to configure."""
return [] return []
def build(self, spec, prefix): def build(self, pkg, spec, prefix):
"""Build the package.""" """Build the package."""
args = self.build_args() args = self.build_args()
inspect.getmodule(self).make(*args) inspect.getmodule(self.pkg).make(*args)
def build_args(self): def build_args(self):
"""Arguments to pass to build.""" """Arguments to pass to build."""
return [] return []
def install(self, spec, prefix): def install(self, pkg, spec, prefix):
"""Install the package.""" """Install the package."""
args = self.install_args() args = self.install_args()
inspect.getmodule(self).make("install", parallel=False, *args) inspect.getmodule(self.pkg).make("install", parallel=False, *args)
def install_args(self): def install_args(self):
"""Arguments to pass to install.""" """Arguments to pass to install."""
return [] return []
# Testing spack.builder.run_after("install")(execute_install_time_tests)
def test(self): @spack.builder.run_after("install")
"""Attempts to import modules of the installed package."""
# Make sure we are importing the installed modules,
# not the ones in the source directory
for module in self.import_modules:
self.run_test(
inspect.getmodule(self).python.path,
["-c", "import {0}".format(module)],
purpose="checking import of {0}".format(module),
work_dir="spack-test",
)
run_after("install")(PackageBase._run_default_install_time_test_callbacks)
# Check that self.prefix is there after installation
run_after("install")(PackageBase.sanity_check_prefix)
@run_after("install")
def extend_path_setup(self): def extend_path_setup(self):
# See github issue #14121 and PR #15297 # See github issue #14121 and PR #15297
module = self.spec["py-sip"].variants["module"].value module = self.pkg.spec["py-sip"].variants["module"].value
if module != "sip": if module != "sip":
module = module.split(".")[0] module = module.split(".")[0]
with working_dir(inspect.getmodule(self).python_platlib): with working_dir(inspect.getmodule(self.pkg).python_platlib):
with open(os.path.join(module, "__init__.py"), "a") as f: with open(os.path.join(module, "__init__.py"), "a") as f:
f.write("from pkgutil import extend_path\n") f.write("from pkgutil import extend_path\n")
f.write("__path__ = extend_path(__path__, __name__)\n") f.write("__path__ = extend_path(__path__, __name__)\n")

View File

@ -2,21 +2,38 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect import inspect
from llnl.util.filesystem import working_dir from llnl.util.filesystem import working_dir
from spack.directives import depends_on import spack.builder
from spack.package_base import PackageBase, run_after import spack.package_base
from spack.directives import build_system, depends_on
from ._checks import BaseBuilder, execute_build_time_tests, execute_install_time_tests
class WafPackage(PackageBase): class WafPackage(spack.package_base.PackageBase):
"""Specialized class for packages that are built using the """Specialized class for packages that are built using the
Waf build system. See https://waf.io/book/ for more information. Waf build system. See https://waf.io/book/ for more information.
"""
This class provides the following phases that can be overridden: # To be used in UI queries that require to know which
# build-system class we are using
build_system_class = "WafPackage"
#: Legacy buildsystem attribute used to deserialize and install old specs
legacy_buildsystem = "waf"
build_system("waf")
# Much like AutotoolsPackage does not require automake and autoconf
# to build, WafPackage does not require waf to build. It only requires
# python to run the waf build script.
depends_on("python@2.5:", type="build", when="build_system=waf")
@spack.builder.builder("waf")
class WafBuilder(BaseBuilder):
"""The WAF builder provides the following phases that can be overridden:
* configure * configure
* build * build
@ -40,12 +57,25 @@ class WafPackage(PackageBase):
function, which passes ``--prefix=/path/to/installation/prefix``. function, which passes ``--prefix=/path/to/installation/prefix``.
""" """
# Default phases phases = ("configure", "build", "install")
phases = ["configure", "build", "install"]
# To be used in UI queries that require to know which #: Names associated with package methods in the old build-system format
# build-system class we are using legacy_methods = (
build_system_class = "WafPackage" "build_test",
"install_test",
"configure_args",
"build_args",
"install_args",
"build_test",
"install_test",
)
#: Names associated with package attributes in the old build-system format
legacy_attributes = (
"build_time_test_callbacks",
"build_time_test_callbacks",
"build_directory",
)
# Callback names for build-time test # Callback names for build-time test
build_time_test_callbacks = ["build_test"] build_time_test_callbacks = ["build_test"]
@ -53,11 +83,6 @@ class WafPackage(PackageBase):
# Callback names for install-time test # Callback names for install-time test
install_time_test_callbacks = ["install_test"] install_time_test_callbacks = ["install_test"]
# Much like AutotoolsPackage does not require automake and autoconf
# to build, WafPackage does not require waf to build. It only requires
# python to run the waf build script.
depends_on("python@2.5:", type="build")
@property @property
def build_directory(self): def build_directory(self):
"""The directory containing the ``waf`` file.""" """The directory containing the ``waf`` file."""
@ -65,18 +90,18 @@ def build_directory(self):
def python(self, *args, **kwargs): def python(self, *args, **kwargs):
"""The python ``Executable``.""" """The python ``Executable``."""
inspect.getmodule(self).python(*args, **kwargs) inspect.getmodule(self.pkg).python(*args, **kwargs)
def waf(self, *args, **kwargs): def waf(self, *args, **kwargs):
"""Runs the waf ``Executable``.""" """Runs the waf ``Executable``."""
jobs = inspect.getmodule(self).make_jobs jobs = inspect.getmodule(self.pkg).make_jobs
with working_dir(self.build_directory): with working_dir(self.build_directory):
self.python("waf", "-j{0}".format(jobs), *args, **kwargs) self.python("waf", "-j{0}".format(jobs), *args, **kwargs)
def configure(self, spec, prefix): def configure(self, pkg, spec, prefix):
"""Configures the project.""" """Configures the project."""
args = ["--prefix={0}".format(self.prefix)] args = ["--prefix={0}".format(self.pkg.prefix)]
args += self.configure_args() args += self.configure_args()
self.waf("configure", *args) self.waf("configure", *args)
@ -85,7 +110,7 @@ def configure_args(self):
"""Arguments to pass to configure.""" """Arguments to pass to configure."""
return [] return []
def build(self, spec, prefix): def build(self, pkg, spec, prefix):
"""Executes the build.""" """Executes the build."""
args = self.build_args() args = self.build_args()
@ -95,7 +120,7 @@ def build_args(self):
"""Arguments to pass to build.""" """Arguments to pass to build."""
return [] return []
def install(self, spec, prefix): def install(self, pkg, spec, prefix):
"""Installs the targets on the system.""" """Installs the targets on the system."""
args = self.install_args() args = self.install_args()
@ -105,8 +130,6 @@ def install_args(self):
"""Arguments to pass to install.""" """Arguments to pass to install."""
return [] return []
# Testing
def build_test(self): def build_test(self):
"""Run unit tests after build. """Run unit tests after build.
@ -115,7 +138,7 @@ def build_test(self):
""" """
pass pass
run_after("build")(PackageBase._run_default_build_time_test_callbacks) spack.builder.run_after("build")(execute_build_time_tests)
def install_test(self): def install_test(self):
"""Run unit tests after install. """Run unit tests after install.
@ -125,7 +148,4 @@ def install_test(self):
""" """
pass pass
run_after("install")(PackageBase._run_default_install_time_test_callbacks) spack.builder.run_after("install")(execute_install_time_tests)
# Check that self.prefix is there after installation
run_after("install")(PackageBase.sanity_check_prefix)

574
lib/spack/spack/builder.py Normal file
View File

@ -0,0 +1,574 @@
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections
import copy
import functools
import inspect
from typing import List, Optional, Tuple
import six
import llnl.util.compat
import spack.build_environment
#: Builder classes, as registered by the "builder" decorator
BUILDER_CLS = {}
#: An object of this kind is a shared global state used to collect callbacks during
#: class definition time, and is flushed when the class object is created at the end
#: of the class definition
#:
#: Args:
#: attribute_name (str): name of the attribute that will be attached to the builder
#: callbacks (list): container used to temporarily aggregate the callbacks
CallbackTemporaryStage = collections.namedtuple(
"CallbackTemporaryStage", ["attribute_name", "callbacks"]
)
#: Shared global state to aggregate "@run_before" callbacks
_RUN_BEFORE = CallbackTemporaryStage(attribute_name="run_before_callbacks", callbacks=[])
#: Shared global state to aggregate "@run_after" callbacks
_RUN_AFTER = CallbackTemporaryStage(attribute_name="run_after_callbacks", callbacks=[])
#: Map id(pkg) to a builder, to avoid creating multiple
#: builders for the same package object.
_BUILDERS = {}
def builder(build_system_name):
"""Class decorator used to register the default builder
for a given build-system.
Args:
build_system_name (str): name of the build-system
"""
def _decorator(cls):
cls.build_system = build_system_name
BUILDER_CLS[build_system_name] = cls
return cls
return _decorator
def create(pkg):
"""Given a package object with an associated concrete spec,
return the builder object that can install it.
Args:
pkg (spack.package_base.PackageBase): package for which we want the builder
"""
if id(pkg) not in _BUILDERS:
_BUILDERS[id(pkg)] = _create(pkg)
return _BUILDERS[id(pkg)]
class _PhaseAdapter(object):
def __init__(self, builder, phase_fn):
self.builder = builder
self.phase_fn = phase_fn
def __call__(self, spec, prefix):
return self.phase_fn(self.builder.pkg, spec, prefix)
def _create(pkg):
"""Return a new builder object for the package object being passed as argument.
The function inspects the build-system used by the package object and try to:
1. Return a custom builder, if any is defined in the same ``package.py`` file.
2. Return a customization of more generic builders, if any is defined in the
class hierarchy (look at AspellDictPackage for an example of that)
3. Return a run-time generated adapter builder otherwise
The run-time generated adapter builder is capable of adapting an old-style package
to the new architecture, where the installation procedure has been extracted from
the ``*Package`` hierarchy into a ``*Builder`` hierarchy. This means that the
adapter looks for attribute or method overrides preferably in the ``*Package``
before using the default builder implementation.
Note that in case a builder is explicitly coded in ``package.py``, no attempt is made
to look for build-related methods in the ``*Package``.
Args:
pkg (spack.package_base.PackageBase): package object for which we need a builder
"""
package_module = inspect.getmodule(pkg)
package_buildsystem = buildsystem_name(pkg)
default_builder_cls = BUILDER_CLS[package_buildsystem]
builder_cls_name = default_builder_cls.__name__
builder_cls = getattr(package_module, builder_cls_name, None)
if builder_cls:
return builder_cls(pkg)
# Specialized version of a given buildsystem can subclass some
# base classes and specialize certain phases or methods or attributes.
# In that case they can store their builder class as a class level attribute.
# See e.g. AspellDictPackage as an example.
base_cls = getattr(pkg, builder_cls_name, default_builder_cls)
# From here on we define classes to construct a special builder that adapts to the
# old, single class, package format. The adapter forwards any call or access to an
# attribute related to the installation procedure to a package object wrapped in
# a class that falls-back on calling the base builder if no override is found on the
# package. The semantic should be the same as the method in the base builder were still
# present in the base class of the package.
class _ForwardToBaseBuilder(object):
def __init__(self, wrapped_pkg_object, root_builder):
self.wrapped_package_object = wrapped_pkg_object
self.root_builder = root_builder
package_cls = type(wrapped_pkg_object)
wrapper_cls = type(self)
bases = (package_cls, wrapper_cls)
new_cls_name = package_cls.__name__ + "Wrapper"
new_cls = type(new_cls_name, bases, {})
new_cls.__module__ = package_cls.__module__
self.__class__ = new_cls
self.__dict__.update(wrapped_pkg_object.__dict__)
def __getattr__(self, item):
result = getattr(super(type(self.root_builder), self.root_builder), item)
if item in super(type(self.root_builder), self.root_builder).phases:
result = _PhaseAdapter(self.root_builder, result)
return result
def forward_method_to_getattr(fn_name):
def __forward(self, *args, **kwargs):
return self.__getattr__(fn_name)(*args, **kwargs)
return __forward
# Add fallback methods for the Package object to refer to the builder. If a method
# with the same name is defined in the Package, it will override this definition
# (when _ForwardToBaseBuilder is initialized)
for method_name in (
base_cls.phases
+ base_cls.legacy_methods
+ getattr(base_cls, "legacy_long_methods", tuple())
+ ("setup_build_environment", "setup_dependent_build_environment")
):
setattr(_ForwardToBaseBuilder, method_name, forward_method_to_getattr(method_name))
def forward_property_to_getattr(property_name):
def __forward(self):
return self.__getattr__(property_name)
return __forward
for attribute_name in base_cls.legacy_attributes:
setattr(
_ForwardToBaseBuilder,
attribute_name,
property(forward_property_to_getattr(attribute_name)),
)
class Adapter(six.with_metaclass(_PackageAdapterMeta, base_cls)):
def __init__(self, pkg):
# Deal with custom phases in packages here
if hasattr(pkg, "phases"):
self.phases = pkg.phases
for phase in self.phases:
setattr(Adapter, phase, _PackageAdapterMeta.phase_method_adapter(phase))
# Attribute containing the package wrapped in dispatcher with a `__getattr__`
# method that will forward certain calls to the default builder.
self.pkg_with_dispatcher = _ForwardToBaseBuilder(pkg, root_builder=self)
super(Adapter, self).__init__(pkg)
# These two methods don't follow the (self, spec, prefix) signature of phases nor
# the (self) signature of methods, so they are added explicitly to avoid using a
# catch-all (*args, **kwargs)
def setup_build_environment(self, env):
return self.pkg_with_dispatcher.setup_build_environment(env)
def setup_dependent_build_environment(self, env, dependent_spec):
return self.pkg_with_dispatcher.setup_dependent_build_environment(env, dependent_spec)
return Adapter(pkg)
def buildsystem_name(pkg):
"""Given a package object with an associated concrete spec,
return the name of its build system.
Args:
pkg (spack.package_base.PackageBase): package for which we want
the build system name
"""
try:
return pkg.spec.variants["build_system"].value
except KeyError:
# We are reading an old spec without the build_system variant
return pkg.legacy_buildsystem
class PhaseCallbacksMeta(type):
"""Permit to register arbitrary functions during class definition and run them
later, before or after a given install phase.
Each method decorated with ``run_before`` or ``run_after`` gets temporarily
stored in a global shared state when a class being defined is parsed by the Python
interpreter. At class definition time that temporary storage gets flushed and a list
of callbacks is attached to the class being defined.
"""
def __new__(mcs, name, bases, attr_dict):
for temporary_stage in (_RUN_BEFORE, _RUN_AFTER):
staged_callbacks = temporary_stage.callbacks
# We don't have callbacks in this class, move on
if not staged_callbacks:
continue
# If we are here we have callbacks. To get a complete list, get first what
# was attached to parent classes, then prepend what we have registered here.
#
# The order should be:
# 1. Callbacks are registered in order within the same class
# 2. Callbacks defined in derived classes precede those defined in base
# classes
for base in bases:
callbacks_from_base = getattr(base, temporary_stage.attribute_name, None)
if callbacks_from_base:
break
callbacks_from_base = callbacks_from_base or []
# Set the callbacks in this class and flush the temporary stage
attr_dict[temporary_stage.attribute_name] = staged_callbacks[:] + callbacks_from_base
del temporary_stage.callbacks[:]
return super(PhaseCallbacksMeta, mcs).__new__(mcs, name, bases, attr_dict)
@staticmethod
def run_after(phase, when=None):
"""Decorator to register a function for running after a given phase.
Args:
phase (str): phase after which the function must run.
when (str): condition under which the function is run (if None, it is always run).
"""
def _decorator(fn):
key = (phase, when)
item = (key, fn)
_RUN_AFTER.callbacks.append(item)
return fn
return _decorator
@staticmethod
def run_before(phase, when=None):
"""Decorator to register a function for running before a given phase.
Args:
phase (str): phase before which the function must run.
when (str): condition under which the function is run (if None, it is always run).
"""
def _decorator(fn):
key = (phase, when)
item = (key, fn)
_RUN_BEFORE.callbacks.append(item)
return fn
return _decorator
class BuilderMeta(PhaseCallbacksMeta, type(llnl.util.compat.Sequence)): # type: ignore
pass
class _PackageAdapterMeta(BuilderMeta):
"""Metaclass to adapt old-style packages to the new architecture based on builders
for the installation phase.
This class does the necessary mangling to function argument so that a call to a
builder object can delegate to a package object.
"""
@staticmethod
def phase_method_adapter(phase_name):
def _adapter(self, pkg, spec, prefix):
phase_fn = getattr(self.pkg_with_dispatcher, phase_name)
return phase_fn(spec, prefix)
return _adapter
@staticmethod
def legacy_long_method_adapter(method_name):
def _adapter(self, spec, prefix):
bind_method = getattr(self.pkg_with_dispatcher, method_name)
return bind_method(spec, prefix)
return _adapter
@staticmethod
def legacy_method_adapter(method_name):
def _adapter(self):
bind_method = getattr(self.pkg_with_dispatcher, method_name)
return bind_method()
return _adapter
@staticmethod
def legacy_attribute_adapter(attribute_name):
def _adapter(self):
return getattr(self.pkg_with_dispatcher, attribute_name)
return property(_adapter)
@staticmethod
def combine_callbacks(pipeline_attribute_name):
"""This function combines callbacks from old-style packages with callbacks that might
be registered for the default builder.
It works by:
1. Extracting the callbacks from the old-style package
2. Transforming those callbacks by adding an adapter that receives a builder as argument
and calls the wrapped function with ``builder.pkg``
3. Combining the list of transformed callbacks with those that might be present in the
default builder
"""
def _adapter(self):
def unwrap_pkg(fn):
@functools.wraps(fn)
def _wrapped(builder):
return fn(builder.pkg_with_dispatcher)
return _wrapped
# Concatenate the current list with the one from package
callbacks_from_package = getattr(self.pkg, pipeline_attribute_name, [])
callbacks_from_package = [(key, unwrap_pkg(x)) for key, x in callbacks_from_package]
callbacks_from_builder = getattr(super(type(self), self), pipeline_attribute_name, [])
return callbacks_from_package + callbacks_from_builder
return property(_adapter)
def __new__(mcs, name, bases, attr_dict):
# Add ways to intercept methods and attribute calls and dispatch
# them first to a package object
default_builder_cls = bases[0]
for phase_name in default_builder_cls.phases:
attr_dict[phase_name] = _PackageAdapterMeta.phase_method_adapter(phase_name)
for method_name in default_builder_cls.legacy_methods:
attr_dict[method_name] = _PackageAdapterMeta.legacy_method_adapter(method_name)
# These exist e.g. for Python, see discussion in https://github.com/spack/spack/pull/32068
for method_name in getattr(default_builder_cls, "legacy_long_methods", []):
attr_dict[method_name] = _PackageAdapterMeta.legacy_long_method_adapter(method_name)
for attribute_name in default_builder_cls.legacy_attributes:
attr_dict[attribute_name] = _PackageAdapterMeta.legacy_attribute_adapter(
attribute_name
)
combine_callbacks = _PackageAdapterMeta.combine_callbacks
attr_dict[_RUN_BEFORE.attribute_name] = combine_callbacks(_RUN_BEFORE.attribute_name)
attr_dict[_RUN_AFTER.attribute_name] = combine_callbacks(_RUN_AFTER.attribute_name)
return super(_PackageAdapterMeta, mcs).__new__(mcs, name, bases, attr_dict)
class InstallationPhase(object):
"""Manages a single phase of the installation.
This descriptor stores at creation time the name of the method it should
search for execution. The method is retrieved at __get__ time, so that
it can be overridden by subclasses of whatever class declared the phases.
It also provides hooks to execute arbitrary callbacks before and after
the phase.
"""
def __init__(self, name, builder):
self.name = name
self.builder = builder
self.phase_fn = self._select_phase_fn()
self.run_before = self._make_callbacks(_RUN_BEFORE.attribute_name)
self.run_after = self._make_callbacks(_RUN_AFTER.attribute_name)
def _make_callbacks(self, callbacks_attribute):
result = []
callbacks = getattr(self.builder, callbacks_attribute, [])
for (phase, condition), fn in callbacks:
# Same if it is for another phase
if phase != self.name:
continue
# If we have no condition or the callback satisfies a condition, register it
if condition is None or self.builder.pkg.spec.satisfies(condition):
result.append(fn)
return result
def __str__(self):
msg = '{0}: executing "{1}" phase'
return msg.format(self.builder, self.name)
def execute(self):
pkg = self.builder.pkg
self._on_phase_start(pkg)
for callback in self.run_before:
callback(self.builder)
self.phase_fn(pkg, pkg.spec, pkg.prefix)
for callback in self.run_after:
callback(self.builder)
self._on_phase_exit(pkg)
def _select_phase_fn(self):
phase_fn = getattr(self.builder, self.name, None)
if not phase_fn:
msg = (
'unexpected error: package "{0.fullname}" must implement an '
'"{1}" phase for the "{2}" build system'
)
raise RuntimeError(msg.format(self.builder.pkg, self.name, self.builder.build_system))
return phase_fn
def _on_phase_start(self, instance):
# If a phase has a matching stop_before_phase attribute,
# stop the installation process raising a StopPhase
if getattr(instance, "stop_before_phase", None) == self.name:
raise spack.build_environment.StopPhase(
"Stopping before '{0}' phase".format(self.name)
)
def _on_phase_exit(self, instance):
# If a phase has a matching last_phase attribute,
# stop the installation process raising a StopPhase
if getattr(instance, "last_phase", None) == self.name:
raise spack.build_environment.StopPhase("Stopping at '{0}' phase".format(self.name))
def copy(self):
return copy.deepcopy(self)
class Builder(six.with_metaclass(BuilderMeta, llnl.util.compat.Sequence)):
"""A builder is a class that, given a package object (i.e. associated with
concrete spec), knows how to install it.
The builder behaves like a sequence, and when iterated over return the
"phases" of the installation in the correct order.
Args:
pkg (spack.package_base.PackageBase): package object to be built
"""
#: Sequence of phases. Must be defined in derived classes
phases = () # type: Tuple[str, ...]
#: Build system name. Must also be defined in derived classes.
build_system = None # type: Optional[str]
legacy_methods = () # type: Tuple[str, ...]
legacy_attributes = () # type: Tuple[str, ...]
#: List of glob expressions. Each expression must either be
#: absolute or relative to the package source path.
#: Matching artifacts found at the end of the build process will be
#: copied in the same directory tree as _spack_build_logfile and
#: _spack_build_envfile.
archive_files = [] # type: List[str]
def __init__(self, pkg):
self.pkg = pkg
self.callbacks = {}
for phase in self.phases:
self.callbacks[phase] = InstallationPhase(phase, self)
@property
def spec(self):
return self.pkg.spec
@property
def stage(self):
return self.pkg.stage
@property
def prefix(self):
return self.pkg.prefix
def test(self):
# Defer tests to virtual and concrete packages
pass
def setup_build_environment(self, env):
"""Sets up the build environment for a package.
This method will be called before the current package prefix exists in
Spack's store.
Args:
env (spack.util.environment.EnvironmentModifications): environment
modifications to be applied when the package is built. Package authors
can call methods on it to alter the build environment.
"""
if not hasattr(super(Builder, self), "setup_build_environment"):
return
super(Builder, self).setup_build_environment(env)
def setup_dependent_build_environment(self, env, dependent_spec):
"""Sets up the build environment of packages that depend on this one.
This is similar to ``setup_build_environment``, but it is used to
modify the build environments of packages that *depend* on this one.
This gives packages like Python and others that follow the extension
model a way to implement common environment or compile-time settings
for dependencies.
This method will be called before the dependent package prefix exists
in Spack's store.
Examples:
1. Installing python modules generally requires ``PYTHONPATH``
to point to the ``lib/pythonX.Y/site-packages`` directory in the
module's install prefix. This method could be used to set that
variable.
Args:
env (spack.util.environment.EnvironmentModifications): environment
modifications to be applied when the dependent package is built.
Package authors can call methods on it to alter the build environment.
dependent_spec (spack.spec.Spec): the spec of the dependent package
about to be built. This allows the extendee (self) to query
the dependent's state. Note that *this* package's spec is
available as ``self.spec``
"""
if not hasattr(super(Builder, self), "setup_dependent_build_environment"):
return
super(Builder, self).setup_dependent_build_environment(env, dependent_spec)
def __getitem__(self, idx):
key = self.phases[idx]
return self.callbacks[key]
def __len__(self):
return len(self.phases)
def __repr__(self):
msg = "{0}({1})"
return msg.format(type(self).__name__, self.pkg.spec.format("{name}/{hash:7}"))
def __str__(self):
msg = '"{0}" builder for "{1}"'
return msg.format(type(self).build_system, self.pkg.spec.format("{name}/{hash:7}"))
# Export these names as standalone to be used in packages
run_after = PhaseCallbacksMeta.run_after
run_before = PhaseCallbacksMeta.run_before

View File

@ -4,6 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import base64 import base64
import codecs
import copy import copy
import json import json
import os import os
@ -11,11 +12,12 @@
import shutil import shutil
import stat import stat
import subprocess import subprocess
import sys
import tempfile import tempfile
import time import time
import zipfile import zipfile
from six import iteritems from six import iteritems, string_types
from six.moves.urllib.error import HTTPError, URLError from six.moves.urllib.error import HTTPError, URLError
from six.moves.urllib.parse import urlencode from six.moves.urllib.parse import urlencode
from six.moves.urllib.request import HTTPHandler, Request, build_opener from six.moves.urllib.request import HTTPHandler, Request, build_opener
@ -41,7 +43,6 @@
from spack.error import SpackError from spack.error import SpackError
from spack.reporters.cdash import CDash from spack.reporters.cdash import CDash
from spack.reporters.cdash import build_stamp as cdash_build_stamp from spack.reporters.cdash import build_stamp as cdash_build_stamp
from spack.spec import Spec
from spack.util.pattern import Bunch from spack.util.pattern import Bunch
JOB_RETRY_CONDITIONS = [ JOB_RETRY_CONDITIONS = [
@ -141,13 +142,6 @@ def _get_spec_string(spec):
return spec.format("".join(format_elements)) return spec.format("".join(format_elements))
def _format_root_spec(spec, main_phase, strip_compiler):
if main_phase is False and strip_compiler is True:
return "{0}@{1} arch={2}".format(spec.name, spec.version, spec.architecture)
else:
return spec.dag_hash()
def _spec_deps_key(s): def _spec_deps_key(s):
return "{0}/{1}".format(s.name, s.dag_hash(7)) return "{0}/{1}".format(s.name, s.dag_hash(7))
@ -173,8 +167,7 @@ def _get_spec_dependencies(
for entry in specs: for entry in specs:
spec_labels[entry["label"]] = { spec_labels[entry["label"]] = {
"spec": Spec(entry["spec"]), "spec": entry["spec"],
"rootSpec": entry["root_spec"],
"needs_rebuild": entry["needs_rebuild"], "needs_rebuild": entry["needs_rebuild"],
} }
@ -201,7 +194,7 @@ def stage_spec_jobs(specs, check_index_only=False, mirrors_to_check=None):
and stages: and stages:
spec_labels: A dictionary mapping the spec labels which are made of spec_labels: A dictionary mapping the spec labels which are made of
(pkg-name/hash-prefix), to objects containing "rootSpec" and "spec" (pkg-name/hash-prefix), to objects containing "spec" and "needs_rebuild"
keys. The root spec is the spec of which this spec is a dependency keys. The root spec is the spec of which this spec is a dependency
and the spec is the formatted spec string for this spec. and the spec is the formatted spec string for this spec.
@ -316,17 +309,14 @@ def _compute_spec_deps(spec_list, check_index_only=False, mirrors_to_check=None)
], ],
"specs": [ "specs": [
{ {
"root_spec": "readline@7.0%apple-clang@9.1.0 arch=darwin-...",
"spec": "readline@7.0%apple-clang@9.1.0 arch=darwin-highs...", "spec": "readline@7.0%apple-clang@9.1.0 arch=darwin-highs...",
"label": "readline/ip6aiun" "label": "readline/ip6aiun"
}, },
{ {
"root_spec": "readline@7.0%apple-clang@9.1.0 arch=darwin-...",
"spec": "ncurses@6.1%apple-clang@9.1.0 arch=darwin-highsi...", "spec": "ncurses@6.1%apple-clang@9.1.0 arch=darwin-highsi...",
"label": "ncurses/y43rifz" "label": "ncurses/y43rifz"
}, },
{ {
"root_spec": "readline@7.0%apple-clang@9.1.0 arch=darwin-...",
"spec": "pkgconf@1.5.4%apple-clang@9.1.0 arch=darwin-high...", "spec": "pkgconf@1.5.4%apple-clang@9.1.0 arch=darwin-high...",
"label": "pkgconf/eg355zb" "label": "pkgconf/eg355zb"
} }
@ -348,8 +338,6 @@ def append_dep(s, d):
) )
for spec in spec_list: for spec in spec_list:
root_spec = spec
for s in spec.traverse(deptype=all): for s in spec.traverse(deptype=all):
if s.external: if s.external:
tty.msg("Will not stage external pkg: {0}".format(s)) tty.msg("Will not stage external pkg: {0}".format(s))
@ -361,8 +349,7 @@ def append_dep(s, d):
skey = _spec_deps_key(s) skey = _spec_deps_key(s)
spec_labels[skey] = { spec_labels[skey] = {
"spec": _get_spec_string(s), "spec": s,
"root": root_spec,
"needs_rebuild": not up_to_date_mirrors, "needs_rebuild": not up_to_date_mirrors,
} }
@ -379,7 +366,6 @@ def append_dep(s, d):
{ {
"label": spec_label, "label": spec_label,
"spec": spec_holder["spec"], "spec": spec_holder["spec"],
"root_spec": spec_holder["root"],
"needs_rebuild": spec_holder["needs_rebuild"], "needs_rebuild": spec_holder["needs_rebuild"],
} }
) )
@ -396,6 +382,14 @@ def _spec_matches(spec, match_string):
return spec.satisfies(match_string) return spec.satisfies(match_string)
def _remove_attributes(src_dict, dest_dict):
if "tags" in src_dict and "tags" in dest_dict:
# For 'tags', we remove any tags that are listed for removal
for tag in src_dict["tags"]:
while tag in dest_dict["tags"]:
dest_dict["tags"].remove(tag)
def _copy_attributes(attrs_list, src_dict, dest_dict): def _copy_attributes(attrs_list, src_dict, dest_dict):
for runner_attr in attrs_list: for runner_attr in attrs_list:
if runner_attr in src_dict: if runner_attr in src_dict:
@ -429,23 +423,23 @@ def _find_matching_config(spec, gitlab_ci):
_copy_attributes(overridable_attrs, gitlab_ci, runner_attributes) _copy_attributes(overridable_attrs, gitlab_ci, runner_attributes)
ci_mappings = gitlab_ci["mappings"] matched = False
for ci_mapping in ci_mappings: only_first = gitlab_ci.get("match_behavior", "first") == "first"
for ci_mapping in gitlab_ci["mappings"]:
for match_string in ci_mapping["match"]: for match_string in ci_mapping["match"]:
if _spec_matches(spec, match_string): if _spec_matches(spec, match_string):
matched = True
if "remove-attributes" in ci_mapping:
_remove_attributes(ci_mapping["remove-attributes"], runner_attributes)
if "runner-attributes" in ci_mapping: if "runner-attributes" in ci_mapping:
_copy_attributes( _copy_attributes(
overridable_attrs, ci_mapping["runner-attributes"], runner_attributes overridable_attrs, ci_mapping["runner-attributes"], runner_attributes
) )
return runner_attributes break
else: if matched and only_first:
return None break
return runner_attributes return runner_attributes if matched else None
def _pkg_name_from_spec_label(spec_label):
return spec_label[: spec_label.index("/")]
def _format_job_needs( def _format_job_needs(
@ -521,38 +515,36 @@ def compute_affected_packages(rev1="HEAD^", rev2="HEAD"):
return spack.repo.get_all_package_diffs("ARC", rev1=rev1, rev2=rev2) return spack.repo.get_all_package_diffs("ARC", rev1=rev1, rev2=rev2)
def get_spec_filter_list(env, affected_pkgs, dependencies=True, dependents=True): def get_spec_filter_list(env, affected_pkgs):
"""Given a list of package names, and assuming an active and """Given a list of package names and an active/concretized
concretized environment, return a set of concrete specs from environment, return the set of all concrete specs from the
the environment corresponding to any of the affected pkgs (or environment that could have been affected by changing the
optionally to any of their dependencies/dependents). list of packages.
Arguments: Arguments:
env (spack.environment.Environment): Active concrete environment env (spack.environment.Environment): Active concrete environment
affected_pkgs (List[str]): Affected package names affected_pkgs (List[str]): Affected package names
dependencies (bool): Include dependencies of affected packages
dependents (bool): Include dependents of affected pacakges
Returns: Returns:
A list of concrete specs from the active environment including A set of concrete specs from the active environment including
those associated with affected packages, and possible their those associated with affected packages, their dependencies and
dependencies and dependents as well. dependents, as well as their dependents dependencies.
""" """
affected_specs = set() affected_specs = set()
all_concrete_specs = env.all_specs() all_concrete_specs = env.all_specs()
tty.debug("All concrete environment specs:") tty.debug("All concrete environment specs:")
for s in all_concrete_specs: for s in all_concrete_specs:
tty.debug(" {0}/{1}".format(s.name, s.dag_hash()[:7])) tty.debug(" {0}/{1}".format(s.name, s.dag_hash()[:7]))
for pkg in affected_pkgs: env_matches = [s for s in all_concrete_specs if s.name in frozenset(affected_pkgs)]
env_matches = [s for s in all_concrete_specs if s.name == pkg] visited = set()
for match in env_matches: dag_hash = lambda s: s.dag_hash()
affected_specs.add(match) for match in env_matches:
if dependencies: for parent in match.traverse(direction="parents", key=dag_hash):
affected_specs.update(match.traverse(direction="children", root=False)) affected_specs.update(
if dependents: parent.traverse(direction="children", visited=visited, key=dag_hash)
affected_specs.update(match.traverse(direction="parents", root=False)) )
return affected_specs return affected_specs
@ -613,11 +605,11 @@ def generate_gitlab_ci_yaml(
cdash_handler = CDashHandler(yaml_root.get("cdash")) if "cdash" in yaml_root else None cdash_handler = CDashHandler(yaml_root.get("cdash")) if "cdash" in yaml_root else None
build_group = cdash_handler.build_group if cdash_handler else None build_group = cdash_handler.build_group if cdash_handler else None
prune_untouched_packages = os.environ.get("SPACK_PRUNE_UNTOUCHED", None) prune_untouched_packages = False
if prune_untouched_packages: spack_prune_untouched = os.environ.get("SPACK_PRUNE_UNTOUCHED", None)
if spack_prune_untouched is not None and spack_prune_untouched.lower() == "true":
# Requested to prune untouched packages, but assume we won't do that # Requested to prune untouched packages, but assume we won't do that
# unless we're actually in a git repo. # unless we're actually in a git repo.
prune_untouched_packages = False
rev1, rev2 = get_change_revisions() rev1, rev2 = get_change_revisions()
tty.debug("Got following revisions: rev1={0}, rev2={1}".format(rev1, rev2)) tty.debug("Got following revisions: rev1={0}, rev2={1}".format(rev1, rev2))
if rev1 and rev2: if rev1 and rev2:
@ -631,7 +623,15 @@ def generate_gitlab_ci_yaml(
affected_specs = get_spec_filter_list(env, affected_pkgs) affected_specs = get_spec_filter_list(env, affected_pkgs)
tty.debug("all affected specs:") tty.debug("all affected specs:")
for s in affected_specs: for s in affected_specs:
tty.debug(" {0}".format(s.name)) tty.debug(" {0}/{1}".format(s.name, s.dag_hash()[:7]))
# Allow overriding --prune-dag cli opt with environment variable
prune_dag_override = os.environ.get("SPACK_PRUNE_UP_TO_DATE", None)
if prune_dag_override is not None:
prune_dag = True if prune_dag_override.lower() == "true" else False
# If we are not doing any kind of pruning, we are rebuilding everything
rebuild_everything = not prune_dag and not prune_untouched_packages
# Downstream jobs will "need" (depend on, for both scheduling and # Downstream jobs will "need" (depend on, for both scheduling and
# artifacts, which include spack.lock file) this pipeline generation # artifacts, which include spack.lock file) this pipeline generation
@ -832,7 +832,6 @@ def generate_gitlab_ci_yaml(
phase_name = phase["name"] phase_name = phase["name"]
strip_compilers = phase["strip-compilers"] strip_compilers = phase["strip-compilers"]
main_phase = _is_main_phase(phase_name)
spec_labels, dependencies, stages = staged_phases[phase_name] spec_labels, dependencies, stages = staged_phases[phase_name]
for stage_jobs in stages: for stage_jobs in stages:
@ -842,14 +841,16 @@ def generate_gitlab_ci_yaml(
for spec_label in stage_jobs: for spec_label in stage_jobs:
spec_record = spec_labels[spec_label] spec_record = spec_labels[spec_label]
root_spec = spec_record["rootSpec"] release_spec = spec_record["spec"]
pkg_name = _pkg_name_from_spec_label(spec_label)
release_spec = root_spec[pkg_name]
release_spec_dag_hash = release_spec.dag_hash() release_spec_dag_hash = release_spec.dag_hash()
if prune_untouched_packages: if prune_untouched_packages:
if release_spec not in affected_specs: if release_spec not in affected_specs:
tty.debug("Pruning {0}, untouched by change.".format(release_spec.name)) tty.debug(
"Pruning {0}/{1}, untouched by change.".format(
release_spec.name, release_spec.dag_hash()[:7]
)
)
spec_record["needs_rebuild"] = False spec_record["needs_rebuild"] = False
continue continue
@ -865,7 +866,7 @@ def generate_gitlab_ci_yaml(
# For spack pipelines "public" and "protected" are reserved tags # For spack pipelines "public" and "protected" are reserved tags
tags = _remove_reserved_tags(tags) tags = _remove_reserved_tags(tags)
if spack_pipeline_type == "spack_protected_branch": if spack_pipeline_type == "spack_protected_branch":
tags.extend(["aws", "protected"]) tags.extend(["protected"])
elif spack_pipeline_type == "spack_pull_request": elif spack_pipeline_type == "spack_pull_request":
tags.extend(["public"]) tags.extend(["public"])
@ -914,7 +915,6 @@ def generate_gitlab_ci_yaml(
compiler_action = "INSTALL_MISSING" compiler_action = "INSTALL_MISSING"
job_vars = { job_vars = {
"SPACK_ROOT_SPEC": _format_root_spec(root_spec, main_phase, strip_compilers),
"SPACK_JOB_SPEC_DAG_HASH": release_spec_dag_hash, "SPACK_JOB_SPEC_DAG_HASH": release_spec_dag_hash,
"SPACK_JOB_SPEC_PKG_NAME": release_spec.name, "SPACK_JOB_SPEC_PKG_NAME": release_spec.name,
"SPACK_COMPILER_ACTION": compiler_action, "SPACK_COMPILER_ACTION": compiler_action,
@ -931,9 +931,7 @@ def generate_gitlab_ci_yaml(
# purposes, so we only get the direct dependencies. # purposes, so we only get the direct dependencies.
dep_jobs = [] dep_jobs = []
for dep_label in dependencies[spec_label]: for dep_label in dependencies[spec_label]:
dep_pkg = _pkg_name_from_spec_label(dep_label) dep_jobs.append(spec_labels[dep_label]["spec"])
dep_root = spec_labels[dep_label]["rootSpec"]
dep_jobs.append(dep_root[dep_pkg])
job_dependencies.extend( job_dependencies.extend(
_format_job_needs( _format_job_needs(
@ -1017,13 +1015,15 @@ def generate_gitlab_ci_yaml(
tty.debug(debug_msg) tty.debug(debug_msg)
if prune_dag and not rebuild_spec: if prune_dag and not rebuild_spec:
tty.debug("Pruning {0}, does not need rebuild.".format(release_spec.name)) tty.debug(
"Pruning {0}/{1}, does not need rebuild.".format(
release_spec.name, release_spec.dag_hash()
)
)
continue continue
if broken_spec_urls is not None and release_spec_dag_hash in broken_spec_urls: if broken_spec_urls is not None and release_spec_dag_hash in broken_spec_urls:
known_broken_specs_encountered.append( known_broken_specs_encountered.append(release_spec_dag_hash)
"{0} ({1})".format(release_spec, release_spec_dag_hash)
)
# Only keep track of these if we are copying rebuilt cache entries # Only keep track of these if we are copying rebuilt cache entries
if spack_buildcache_copy: if spack_buildcache_copy:
@ -1167,7 +1167,14 @@ def generate_gitlab_ci_yaml(
"after_script", "after_script",
] ]
service_job_retries = {"max": 2, "when": ["runner_system_failure", "stuck_or_timeout_failure"]} service_job_retries = {
"max": 2,
"when": [
"runner_system_failure",
"stuck_or_timeout_failure",
"script_failure",
],
}
if job_id > 0: if job_id > 0:
if temp_storage_url_prefix: if temp_storage_url_prefix:
@ -1286,6 +1293,9 @@ def generate_gitlab_ci_yaml(
"SPACK_JOB_TEST_DIR": rel_job_test_dir, "SPACK_JOB_TEST_DIR": rel_job_test_dir,
"SPACK_LOCAL_MIRROR_DIR": rel_local_mirror_dir, "SPACK_LOCAL_MIRROR_DIR": rel_local_mirror_dir,
"SPACK_PIPELINE_TYPE": str(spack_pipeline_type), "SPACK_PIPELINE_TYPE": str(spack_pipeline_type),
"SPACK_CI_STACK_NAME": os.environ.get("SPACK_CI_STACK_NAME", "None"),
"SPACK_REBUILD_CHECK_UP_TO_DATE": str(prune_dag),
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
} }
if remote_mirror_override: if remote_mirror_override:
@ -1343,13 +1353,11 @@ def generate_gitlab_ci_yaml(
sorted_output = {"no-specs-to-rebuild": noop_job} sorted_output = {"no-specs-to-rebuild": noop_job}
if known_broken_specs_encountered: if known_broken_specs_encountered:
error_msg = ( tty.error("This pipeline generated hashes known to be broken on develop:")
"Pipeline generation failed due to the presence of the " display_broken_spec_messages(broken_specs_url, known_broken_specs_encountered)
"following specs that are known to be broken in develop:\n"
) if not rebuild_everything:
for broken_spec in known_broken_specs_encountered: sys.exit(1)
error_msg += "* {0}\n".format(broken_spec)
tty.die(error_msg)
with open(output_file, "w") as outf: with open(output_file, "w") as outf:
outf.write(syaml.dump_config(sorted_output, default_flow_style=True)) outf.write(syaml.dump_config(sorted_output, default_flow_style=True))
@ -1461,64 +1469,6 @@ def configure_compilers(compiler_action, scope=None):
return None return None
def get_concrete_specs(env, root_spec, job_name, compiler_action):
"""Build a dictionary of concrete specs relevant to a particular
rebuild job. This includes the root spec and the spec to be
rebuilt (which could be the same).
Arguments:
env (spack.environment.Environment): Activated spack environment
used to get concrete root spec by hash in case compiler_action
is anthing other than FIND_ANY.
root_spec (str): If compiler_action is FIND_ANY root_spec is
a string representation which can be turned directly into
a spec, otherwise, it's a hash used to index the activated
spack environment.
job_name (str): Name of package to be built, used to index the
concrete root spec and produce the concrete spec to be
built.
compiler_action (str): Determines how to interpret the root_spec
parameter, either as a string representation as a hash.
Returns:
.. code-block:: JSON
{
"root": "<spec>",
"<job-pkg-name>": "<spec>",
}
"""
spec_map = {
"root": None,
}
if compiler_action == "FIND_ANY":
# This corresponds to a bootstrapping phase where we need to
# rely on any available compiler to build the package (i.e. the
# compiler needed to be stripped from the spec when we generated
# the job), and thus we need to concretize the root spec again.
tty.debug("About to concretize {0}".format(root_spec))
concrete_root = Spec(root_spec).concretized()
tty.debug("Resulting concrete root: {0}".format(concrete_root))
else:
# in this case, either we're relying on Spack to install missing
# compiler bootstrapped in a previous phase, or else we only had one
# phase (like a site which already knows what compilers are available
# on it's runners), so we don't want to concretize that root spec
# again. The reason we take this path in the first case (bootstrapped
# compiler), is that we can't concretize a spec at this point if we're
# going to ask spack to "install_missing_compilers".
concrete_root = env.specs_by_hash[root_spec]
spec_map["root"] = concrete_root
spec_map[job_name] = concrete_root[job_name]
return spec_map
def _push_mirror_contents(env, specfile_path, sign_binaries, mirror_url): def _push_mirror_contents(env, specfile_path, sign_binaries, mirror_url):
"""Unchecked version of the public API, for easier mocking""" """Unchecked version of the public API, for easier mocking"""
unsigned = not sign_binaries unsigned = not sign_binaries
@ -1567,6 +1517,19 @@ def push_mirror_contents(env, specfile_path, mirror_url, sign_binaries):
raise inst raise inst
def remove_other_mirrors(mirrors_to_keep, scope=None):
"""Remove all mirrors from the given config scope, the exceptions being
any listed in in mirrors_to_keep, which is a list of mirror urls.
"""
mirrors_to_remove = []
for name, mirror_url in spack.config.get("mirrors", scope=scope).items():
if mirror_url not in mirrors_to_keep:
mirrors_to_remove.append(name)
for mirror_name in mirrors_to_remove:
spack.mirror.remove(mirror_name, scope)
def copy_files_to_artifacts(src, artifacts_dir): def copy_files_to_artifacts(src, artifacts_dir):
""" """
Copy file(s) to the given artifacts directory Copy file(s) to the given artifacts directory
@ -1982,26 +1945,35 @@ def reproduce_ci_job(url, work_dir):
print("".join(inst_list)) print("".join(inst_list))
def process_command(cmd, cmd_args, repro_dir): def process_command(name, commands, repro_dir):
""" """
Create a script for and run the command. Copy the script to the Create a script for and run the command. Copy the script to the
reproducibility directory. reproducibility directory.
Arguments: Arguments:
cmd (str): name of the command being processed name (str): name of the command being processed
cmd_args (list): string arguments to pass to the command commands (list): list of arguments for single command or list of lists of
arguments for multiple commands. No shell escape is performed.
repro_dir (str): Job reproducibility directory repro_dir (str): Job reproducibility directory
Returns: the exit code from processing the command Returns: the exit code from processing the command
""" """
tty.debug("spack {0} arguments: {1}".format(cmd, cmd_args)) tty.debug("spack {0} arguments: {1}".format(name, commands))
if len(commands) == 0 or isinstance(commands[0], string_types):
commands = [commands]
# Create a string [command 1] && [command 2] && ... && [command n] with commands
# quoted using double quotes.
args_to_string = lambda args: " ".join('"{}"'.format(arg) for arg in args)
full_command = " && ".join(map(args_to_string, commands))
# Write the command to a shell script # Write the command to a shell script
script = "{0}.sh".format(cmd) script = "{0}.sh".format(name)
with open(script, "w") as fd: with open(script, "w") as fd:
fd.write("#!/bin/bash\n\n") fd.write("#!/bin/sh\n\n")
fd.write("\n# spack {0} command\n".format(cmd)) fd.write("\n# spack {0} command\n".format(name))
fd.write(" ".join(['"{0}"'.format(i) for i in cmd_args])) fd.write(full_command)
fd.write("\n") fd.write("\n")
st = os.stat(script) st = os.stat(script)
@ -2013,15 +1985,15 @@ def process_command(cmd, cmd_args, repro_dir):
# Run the generated install.sh shell script as if it were being run in # Run the generated install.sh shell script as if it were being run in
# a login shell. # a login shell.
try: try:
cmd_process = subprocess.Popen(["bash", "./{0}".format(script)]) cmd_process = subprocess.Popen(["/bin/sh", "./{0}".format(script)])
cmd_process.wait() cmd_process.wait()
exit_code = cmd_process.returncode exit_code = cmd_process.returncode
except (ValueError, subprocess.CalledProcessError, OSError) as err: except (ValueError, subprocess.CalledProcessError, OSError) as err:
tty.error("Encountered error running {0} script".format(cmd)) tty.error("Encountered error running {0} script".format(name))
tty.error(err) tty.error(err)
exit_code = 1 exit_code = 1
tty.debug("spack {0} exited {1}".format(cmd, exit_code)) tty.debug("spack {0} exited {1}".format(name, exit_code))
return exit_code return exit_code
@ -2060,6 +2032,75 @@ def create_buildcache(**kwargs):
push_mirror_contents(env, json_path, pipeline_mirror_url, sign_binaries) push_mirror_contents(env, json_path, pipeline_mirror_url, sign_binaries)
def write_broken_spec(url, pkg_name, stack_name, job_url, pipeline_url, spec_dict):
"""Given a url to write to and the details of the failed job, write an entry
in the broken specs list.
"""
tmpdir = tempfile.mkdtemp()
file_path = os.path.join(tmpdir, "broken.txt")
broken_spec_details = {
"broken-spec": {
"job-name": pkg_name,
"job-stack": stack_name,
"job-url": job_url,
"pipeline-url": pipeline_url,
"concrete-spec-dict": spec_dict,
}
}
try:
with open(file_path, "w") as fd:
fd.write(syaml.dump(broken_spec_details))
web_util.push_to_url(
file_path,
url,
keep_original=False,
extra_args={"ContentType": "text/plain"},
)
except Exception as err:
# If there is an S3 error (e.g., access denied or connection
# error), the first non boto-specific class in the exception
# hierarchy is Exception. Just print a warning and return
msg = "Error writing to broken specs list {0}: {1}".format(url, err)
tty.warn(msg)
finally:
shutil.rmtree(tmpdir)
def read_broken_spec(broken_spec_url):
"""Read data from broken specs file located at the url, return as a yaml
object.
"""
try:
_, _, fs = web_util.read_from_url(broken_spec_url)
except (URLError, web_util.SpackWebError, HTTPError):
tty.warn("Unable to read broken spec from {0}".format(broken_spec_url))
return None
broken_spec_contents = codecs.getreader("utf-8")(fs).read()
return syaml.load(broken_spec_contents)
def display_broken_spec_messages(base_url, hashes):
"""Fetch the broken spec file for each of the hashes under the base_url and
print a message with some details about each one.
"""
broken_specs = [(h, read_broken_spec(url_util.join(base_url, h))) for h in hashes]
for spec_hash, broken_spec in [tup for tup in broken_specs if tup[1]]:
details = broken_spec["broken-spec"]
if "job-name" in details:
item_name = "{0}/{1}".format(details["job-name"], spec_hash[:7])
else:
item_name = spec_hash
if "job-stack" in details:
item_name = "{0} (in stack {1})".format(item_name, details["job-stack"])
msg = " {0} was reported broken here: {1}".format(item_name, details["job-url"])
tty.msg(msg)
def run_standalone_tests(**kwargs): def run_standalone_tests(**kwargs):
"""Run stand-alone tests on the current spec. """Run stand-alone tests on the current spec.
@ -2095,8 +2136,9 @@ def run_standalone_tests(**kwargs):
test_args = [ test_args = [
"spack", "spack",
"-d", "--color=always",
"-v", "--backtrace",
"--verbose",
"test", "test",
"run", "run",
] ]

View File

@ -234,7 +234,8 @@ def parse_specs(args, **kwargs):
msg = e.message msg = e.message
if e.long_message: if e.long_message:
msg += e.long_message msg += e.long_message
if unquoted_flags: # Unquoted flags will be read as a variant or hash
if unquoted_flags and ("variant" in msg or "hash" in msg):
msg += "\n\n" msg += "\n\n"
msg += unquoted_flags.report() msg += unquoted_flags.report()

Some files were not shown because too many files have changed in this diff Show More