Compare commits
119 Commits
bugfix/msv
...
v1.0.0-alp
Author | SHA1 | Date | |
---|---|---|---|
![]() |
1aaf606cd1 | ||
![]() |
0f9b1c85dd | ||
![]() |
eacdfef38f | ||
![]() |
1316e4a2e3 | ||
![]() |
de51c6b894 | ||
![]() |
9fb5878ebb | ||
![]() |
cfaf130115 | ||
![]() |
5dbbb52579 | ||
![]() |
964baf9402 | ||
![]() |
2e2f76819c | ||
![]() |
848816efa4 | ||
![]() |
035131749b | ||
![]() |
05cf9b32a5 | ||
![]() |
29d273d86e | ||
![]() |
e37a7b6c91 | ||
![]() |
626c5c59c6 | ||
![]() |
229945ed27 | ||
![]() |
6989bf9661 | ||
![]() |
bab2053d54 | ||
![]() |
cc8447968e | ||
![]() |
ae1f39339b | ||
![]() |
30ebf3595b | ||
![]() |
7800c4c51b | ||
![]() |
476f2a63e2 | ||
![]() |
745a0fac8a | ||
![]() |
660fff39eb | ||
![]() |
d4b5eb2be6 | ||
![]() |
ddeab9879e | ||
![]() |
5920d31b25 | ||
![]() |
f75555136b | ||
![]() |
922b9b0e50 | ||
![]() |
da916a944e | ||
![]() |
6d224d8a6f | ||
![]() |
15f0871a6f | ||
![]() |
99489c236f | ||
![]() |
e94ee8b2f3 | ||
![]() |
ac72170e91 | ||
![]() |
fc2793f98f | ||
![]() |
a8dd481bbf | ||
![]() |
56c685e374 | ||
![]() |
42c5cc4dc8 | ||
![]() |
e0d889fb91 | ||
![]() |
4566ad9c2b | ||
![]() |
dc12cfde75 | ||
![]() |
b3cc4b4cb3 | ||
![]() |
bb65d495d9 | ||
![]() |
8ccf626306 | ||
![]() |
11065ff318 | ||
![]() |
b9b7ef424c | ||
![]() |
91b09cfeb6 | ||
![]() |
57b8167ead | ||
![]() |
aa10284a0a | ||
![]() |
ec8c6e565d | ||
![]() |
324d427292 | ||
![]() |
4bd9ff2ef0 | ||
![]() |
95115d4290 | ||
![]() |
e5f8049f3d | ||
![]() |
da48fdd864 | ||
![]() |
f8117e8182 | ||
![]() |
533973671b | ||
![]() |
cf9a148708 | ||
![]() |
d6f0ce3e5a | ||
![]() |
d6cb54da4b | ||
![]() |
f6851a56e8 | ||
![]() |
95820a91b3 | ||
![]() |
c3ddea9061 | ||
![]() |
0bd8ca4e08 | ||
![]() |
2d40025ae3 | ||
![]() |
47d01c086c | ||
![]() |
00c04bd36a | ||
![]() |
0d6a5c0f06 | ||
![]() |
f379b304a1 | ||
![]() |
ec97e7e6fe | ||
![]() |
7f093d129b | ||
![]() |
3a67dfd9e8 | ||
![]() |
dfd28bc5c0 | ||
![]() |
0d8549e282 | ||
![]() |
895e3c453e | ||
![]() |
7a429af479 | ||
![]() |
32fc8c351d | ||
![]() |
eb85f2e862 | ||
![]() |
28d42eed5e | ||
![]() |
f79354c312 | ||
![]() |
5492b9cc6d | ||
![]() |
5260acc53b | ||
![]() |
040b827dad | ||
![]() |
54bca16130 | ||
![]() |
bec6b06c16 | ||
![]() |
27e2e146e2 | ||
![]() |
1ddc0e6b52 | ||
![]() |
f56aaf1fc3 | ||
![]() |
5b3f4387b3 | ||
![]() |
55196252dd | ||
![]() |
d3a7a73a00 | ||
![]() |
21afe2af1f | ||
![]() |
646c2f42c4 | ||
![]() |
1ab3e8c776 | ||
![]() |
49978d5b6c | ||
![]() |
a1866d7a4b | ||
![]() |
6674ce6dc4 | ||
![]() |
f729353ac3 | ||
![]() |
73e0cf07cb | ||
![]() |
8842df3f94 | ||
![]() |
8d3132b26b | ||
![]() |
e342de41b2 | ||
![]() |
0415390270 | ||
![]() |
5b7caba4a6 | ||
![]() |
f59c120e0a | ||
![]() |
a0cae04302 | ||
![]() |
496ae0bb31 | ||
![]() |
4c06f83c60 | ||
![]() |
5c66cc71fe | ||
![]() |
0b11775529 | ||
![]() |
a10f3295bc | ||
![]() |
285926cb69 | ||
![]() |
da02a4a606 | ||
![]() |
37dd777a51 | ||
![]() |
7dc824d1ff | ||
![]() |
78744b11ae |
2
.flake8
2
.flake8
@@ -28,7 +28,7 @@ max-line-length = 99
|
|||||||
# - F821: undefined name `name`
|
# - F821: undefined name `name`
|
||||||
#
|
#
|
||||||
per-file-ignores =
|
per-file-ignores =
|
||||||
var/spack/*/package.py:F403,F405,F821
|
var/spack/repos/*/package.py:F403,F405,F821
|
||||||
*-ci-package.py:F403,F405,F821
|
*-ci-package.py:F403,F405,F821
|
||||||
|
|
||||||
# exclude things we usually do not want linting for.
|
# exclude things we usually do not want linting for.
|
||||||
|
3
.gitattributes
vendored
3
.gitattributes
vendored
@@ -1,3 +1,4 @@
|
|||||||
*.py diff=python
|
*.py diff=python
|
||||||
|
*.lp linguist-language=Prolog
|
||||||
lib/spack/external/* linguist-vendored
|
lib/spack/external/* linguist-vendored
|
||||||
*.bat text eol=crlf
|
*.bat text eol=crlf
|
1
.github/workflows/audit.yaml
vendored
1
.github/workflows/audit.yaml
vendored
@@ -59,6 +59,7 @@ jobs:
|
|||||||
- name: Package audits (without coverage)
|
- name: Package audits (without coverage)
|
||||||
if: ${{ runner.os == 'Windows' }}
|
if: ${{ runner.os == 'Windows' }}
|
||||||
run: |
|
run: |
|
||||||
|
. share/spack/setup-env.sh
|
||||||
spack -d audit packages
|
spack -d audit packages
|
||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
spack -d audit configs
|
spack -d audit configs
|
||||||
|
2
.github/workflows/bootstrap.yml
vendored
2
.github/workflows/bootstrap.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
|||||||
dnf install -y \
|
dnf install -y \
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gzip \
|
||||||
make patch unzip which xz python3 python3-devel tree \
|
make patch unzip which xz python3 python3-devel tree \
|
||||||
cmake bison bison-devel libstdc++-static gawk
|
cmake bison bison-devel libstdc++-static
|
||||||
- name: Setup OpenSUSE
|
- name: Setup OpenSUSE
|
||||||
if: ${{ matrix.image == 'opensuse/leap:latest' }}
|
if: ${{ matrix.image == 'opensuse/leap:latest' }}
|
||||||
run: |
|
run: |
|
||||||
|
39
.github/workflows/ci.yaml
vendored
39
.github/workflows/ci.yaml
vendored
@@ -9,7 +9,6 @@ on:
|
|||||||
branches:
|
branches:
|
||||||
- develop
|
- develop
|
||||||
- releases/**
|
- releases/**
|
||||||
merge_group:
|
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ci-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
group: ci-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||||
@@ -26,33 +25,29 @@ jobs:
|
|||||||
packages: ${{ steps.filter.outputs.packages }}
|
packages: ${{ steps.filter.outputs.packages }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
if: ${{ github.event_name == 'push' || github.event_name == 'merge_group' }}
|
if: ${{ github.event_name == 'push' }}
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
# For pull requests it's not necessary to checkout the code
|
# For pull requests it's not necessary to checkout the code
|
||||||
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36
|
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36
|
||||||
id: filter
|
id: filter
|
||||||
with:
|
with:
|
||||||
# For merge group events, compare against the target branch (main)
|
|
||||||
base: ${{ github.event_name == 'merge_group' && github.event.merge_group.base_ref || '' }}
|
|
||||||
# For merge group events, use the merge group head ref
|
|
||||||
ref: ${{ github.event_name == 'merge_group' && github.event.merge_group.head_sha || github.ref }}
|
|
||||||
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below
|
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below
|
||||||
# Don't run if we only modified packages in the
|
# Don't run if we only modified packages in the
|
||||||
# built-in repository or documentation
|
# built-in repository or documentation
|
||||||
filters: |
|
filters: |
|
||||||
bootstrap:
|
bootstrap:
|
||||||
- 'var/spack/repos/spack_repo/builtin/packages/clingo-bootstrap/**'
|
- 'var/spack/repos/builtin/packages/clingo-bootstrap/**'
|
||||||
- 'var/spack/repos/spack_repo/builtin/packages/clingo/**'
|
- 'var/spack/repos/builtin/packages/clingo/**'
|
||||||
- 'var/spack/repos/spack_repo/builtin/packages/python/**'
|
- 'var/spack/repos/builtin/packages/python/**'
|
||||||
- 'var/spack/repos/spack_repo/builtin/packages/re2c/**'
|
- 'var/spack/repos/builtin/packages/re2c/**'
|
||||||
- 'var/spack/repos/spack_repo/builtin/packages/gnupg/**'
|
- 'var/spack/repos/builtin/packages/gnupg/**'
|
||||||
- 'var/spack/repos/spack_repo/builtin/packages/libassuan/**'
|
- 'var/spack/repos/builtin/packages/libassuan/**'
|
||||||
- 'var/spack/repos/spack_repo/builtin/packages/libgcrypt/**'
|
- 'var/spack/repos/builtin/packages/libgcrypt/**'
|
||||||
- 'var/spack/repos/spack_repo/builtin/packages/libgpg-error/**'
|
- 'var/spack/repos/builtin/packages/libgpg-error/**'
|
||||||
- 'var/spack/repos/spack_repo/builtin/packages/libksba/**'
|
- 'var/spack/repos/builtin/packages/libksba/**'
|
||||||
- 'var/spack/repos/spack_repo/builtin/packages/npth/**'
|
- 'var/spack/repos/builtin/packages/npth/**'
|
||||||
- 'var/spack/repos/spack_repo/builtin/packages/pinentry/**'
|
- 'var/spack/repos/builtin/packages/pinentry/**'
|
||||||
- 'lib/spack/**'
|
- 'lib/spack/**'
|
||||||
- 'share/spack/**'
|
- 'share/spack/**'
|
||||||
- '.github/workflows/bootstrap.yml'
|
- '.github/workflows/bootstrap.yml'
|
||||||
@@ -81,11 +76,10 @@ jobs:
|
|||||||
|
|
||||||
prechecks:
|
prechecks:
|
||||||
needs: [ changes ]
|
needs: [ changes ]
|
||||||
uses: ./.github/workflows/prechecks.yml
|
uses: ./.github/workflows/valid-style.yml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
with:
|
with:
|
||||||
with_coverage: ${{ needs.changes.outputs.core }}
|
with_coverage: ${{ needs.changes.outputs.core }}
|
||||||
with_packages: ${{ needs.changes.outputs.packages }}
|
|
||||||
|
|
||||||
import-check:
|
import-check:
|
||||||
needs: [ changes ]
|
needs: [ changes ]
|
||||||
@@ -99,7 +93,7 @@ jobs:
|
|||||||
- name: Success
|
- name: Success
|
||||||
run: |
|
run: |
|
||||||
if [ "${{ needs.prechecks.result }}" == "failure" ] || [ "${{ needs.prechecks.result }}" == "canceled" ]; then
|
if [ "${{ needs.prechecks.result }}" == "failure" ] || [ "${{ needs.prechecks.result }}" == "canceled" ]; then
|
||||||
echo "Unit tests failed."
|
echo "Unit tests failed."
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
exit 0
|
exit 0
|
||||||
@@ -107,7 +101,6 @@ jobs:
|
|||||||
|
|
||||||
coverage:
|
coverage:
|
||||||
needs: [ unit-tests, prechecks ]
|
needs: [ unit-tests, prechecks ]
|
||||||
if: ${{ needs.changes.outputs.core }}
|
|
||||||
uses: ./.github/workflows/coverage.yml
|
uses: ./.github/workflows/coverage.yml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
@@ -120,10 +113,10 @@ jobs:
|
|||||||
- name: Status summary
|
- name: Status summary
|
||||||
run: |
|
run: |
|
||||||
if [ "${{ needs.unit-tests.result }}" == "failure" ] || [ "${{ needs.unit-tests.result }}" == "canceled" ]; then
|
if [ "${{ needs.unit-tests.result }}" == "failure" ] || [ "${{ needs.unit-tests.result }}" == "canceled" ]; then
|
||||||
echo "Unit tests failed."
|
echo "Unit tests failed."
|
||||||
exit 1
|
exit 1
|
||||||
elif [ "${{ needs.bootstrap.result }}" == "failure" ] || [ "${{ needs.bootstrap.result }}" == "canceled" ]; then
|
elif [ "${{ needs.bootstrap.result }}" == "failure" ] || [ "${{ needs.bootstrap.result }}" == "canceled" ]; then
|
||||||
echo "Bootstrap tests failed."
|
echo "Bootstrap tests failed."
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
exit 0
|
exit 0
|
||||||
|
108
.github/workflows/prechecks.yml
vendored
108
.github/workflows/prechecks.yml
vendored
@@ -1,108 +0,0 @@
|
|||||||
name: prechecks
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
with_coverage:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
with_packages:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: style-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
# Validate that the code can be run on all the Python versions supported by Spack
|
|
||||||
validate:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
|
||||||
with:
|
|
||||||
python-version: '3.13'
|
|
||||||
cache: 'pip'
|
|
||||||
cache-dependency-path: '.github/workflows/requirements/style/requirements.txt'
|
|
||||||
- name: Install Python Packages
|
|
||||||
run: |
|
|
||||||
pip install -r .github/workflows/requirements/style/requirements.txt
|
|
||||||
- name: vermin (Spack's Core)
|
|
||||||
run: |
|
|
||||||
vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
|
||||||
- name: vermin (Repositories)
|
|
||||||
run: |
|
|
||||||
vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos var/spack/test_repos
|
|
||||||
|
|
||||||
# Run style checks on the files that have been changed
|
|
||||||
style:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
fetch-depth: 2
|
|
||||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
|
||||||
with:
|
|
||||||
python-version: '3.13'
|
|
||||||
cache: 'pip'
|
|
||||||
cache-dependency-path: '.github/workflows/requirements/style/requirements.txt'
|
|
||||||
- name: Install Python packages
|
|
||||||
run: |
|
|
||||||
pip install -r .github/workflows/requirements/style/requirements.txt
|
|
||||||
- name: Run style tests
|
|
||||||
run: |
|
|
||||||
bin/spack style --base HEAD^1
|
|
||||||
bin/spack license verify
|
|
||||||
pylint -j $(nproc) --disable=all --enable=unspecified-encoding --ignore-paths=lib/spack/external lib
|
|
||||||
|
|
||||||
audit:
|
|
||||||
uses: ./.github/workflows/audit.yaml
|
|
||||||
secrets: inherit
|
|
||||||
with:
|
|
||||||
with_coverage: ${{ inputs.with_coverage }}
|
|
||||||
python_version: '3.13'
|
|
||||||
|
|
||||||
verify-checksums:
|
|
||||||
# do not run if the commit message or PR description contains [skip-verify-checksums]
|
|
||||||
if: >-
|
|
||||||
${{ inputs.with_packages == 'true' &&
|
|
||||||
!contains(github.event.pull_request.body, '[skip-verify-checksums]') &&
|
|
||||||
!contains(github.event.head_commit.message, '[skip-verify-checksums]') }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
|
||||||
with:
|
|
||||||
fetch-depth: 2
|
|
||||||
- name: Verify Added Checksums
|
|
||||||
run: |
|
|
||||||
bin/spack ci verify-versions HEAD^1 HEAD
|
|
||||||
|
|
||||||
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
|
|
||||||
bootstrap-dev-rhel8:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container: registry.access.redhat.com/ubi8/ubi
|
|
||||||
steps:
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
dnf install -y \
|
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
|
||||||
make patch tcl unzip which xz
|
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
- name: Setup repo and non-root user
|
|
||||||
run: |
|
|
||||||
git --version
|
|
||||||
git config --global --add safe.directory '*'
|
|
||||||
git fetch --unshallow
|
|
||||||
. .github/workflows/bin/setup_git.sh
|
|
||||||
useradd spack-test
|
|
||||||
chown -R spack-test .
|
|
||||||
- name: Bootstrap Spack development environment
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack debug report
|
|
||||||
spack -d bootstrap now --dev
|
|
||||||
spack -d style -t black
|
|
||||||
spack unit-test -V
|
|
@@ -1,8 +1,7 @@
|
|||||||
black==25.1.0
|
black==25.1.0
|
||||||
clingo==5.8.0
|
clingo==5.7.1
|
||||||
flake8==7.2.0
|
flake8==7.1.2
|
||||||
isort==6.0.1
|
isort==6.0.0
|
||||||
mypy==1.15.0
|
mypy==1.15.0
|
||||||
types-six==1.17.0.20250403
|
types-six==1.17.0.20241205
|
||||||
vermin==1.6.0
|
vermin==1.6.0
|
||||||
pylint==3.3.7
|
|
||||||
|
3
.github/workflows/unit_tests.yaml
vendored
3
.github/workflows/unit_tests.yaml
vendored
@@ -19,6 +19,9 @@ jobs:
|
|||||||
on_develop:
|
on_develop:
|
||||||
- ${{ github.ref == 'refs/heads/develop' }}
|
- ${{ github.ref == 'refs/heads/develop' }}
|
||||||
include:
|
include:
|
||||||
|
- python-version: '3.6'
|
||||||
|
os: ubuntu-20.04
|
||||||
|
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||||
- python-version: '3.7'
|
- python-version: '3.7'
|
||||||
os: ubuntu-22.04
|
os: ubuntu-22.04
|
||||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||||
|
106
.github/workflows/valid-style.yml
vendored
Normal file
106
.github/workflows/valid-style.yml
vendored
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
name: style
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
with_coverage:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: style-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# Validate that the code can be run on all the Python versions supported by Spack
|
||||||
|
validate:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
|
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||||
|
with:
|
||||||
|
python-version: '3.13'
|
||||||
|
cache: 'pip'
|
||||||
|
- name: Install Python Packages
|
||||||
|
run: |
|
||||||
|
pip install --upgrade pip setuptools
|
||||||
|
pip install -r .github/workflows/requirements/style/requirements.txt
|
||||||
|
- name: vermin (Spack's Core)
|
||||||
|
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||||
|
- name: vermin (Repositories)
|
||||||
|
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
|
||||||
|
# Run style checks on the files that have been changed
|
||||||
|
style:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||||
|
with:
|
||||||
|
python-version: '3.13'
|
||||||
|
cache: 'pip'
|
||||||
|
- name: Install Python packages
|
||||||
|
run: |
|
||||||
|
pip install --upgrade pip setuptools
|
||||||
|
pip install -r .github/workflows/requirements/style/requirements.txt
|
||||||
|
- name: Setup git configuration
|
||||||
|
run: |
|
||||||
|
# Need this for the git tests to succeed.
|
||||||
|
git --version
|
||||||
|
. .github/workflows/bin/setup_git.sh
|
||||||
|
- name: Run style tests
|
||||||
|
run: |
|
||||||
|
share/spack/qa/run-style-tests
|
||||||
|
audit:
|
||||||
|
uses: ./.github/workflows/audit.yaml
|
||||||
|
secrets: inherit
|
||||||
|
with:
|
||||||
|
with_coverage: ${{ inputs.with_coverage }}
|
||||||
|
python_version: '3.13'
|
||||||
|
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
|
||||||
|
bootstrap-dev-rhel8:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container: registry.access.redhat.com/ubi8/ubi
|
||||||
|
steps:
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
dnf install -y \
|
||||||
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||||
|
make patch tcl unzip which xz
|
||||||
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
|
- name: Setup repo and non-root user
|
||||||
|
run: |
|
||||||
|
git --version
|
||||||
|
git config --global --add safe.directory '*'
|
||||||
|
git fetch --unshallow
|
||||||
|
. .github/workflows/bin/setup_git.sh
|
||||||
|
useradd spack-test
|
||||||
|
chown -R spack-test .
|
||||||
|
- name: Bootstrap Spack development environment
|
||||||
|
shell: runuser -u spack-test -- bash {0}
|
||||||
|
run: |
|
||||||
|
source share/spack/setup-env.sh
|
||||||
|
spack debug report
|
||||||
|
spack -d bootstrap now --dev
|
||||||
|
spack -d style -t black
|
||||||
|
spack unit-test -V
|
||||||
|
|
||||||
|
# Further style checks from pylint
|
||||||
|
pylint:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||||
|
with:
|
||||||
|
python-version: '3.13'
|
||||||
|
cache: 'pip'
|
||||||
|
- name: Install Python packages
|
||||||
|
run: |
|
||||||
|
pip install --upgrade pip setuptools pylint
|
||||||
|
- name: Pylint (Spack Core)
|
||||||
|
run: |
|
||||||
|
pylint -j 4 --disable=all --enable=unspecified-encoding --ignore-paths=lib/spack/external lib
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -201,6 +201,7 @@ tramp
|
|||||||
|
|
||||||
# Org-mode
|
# Org-mode
|
||||||
.org-id-locations
|
.org-id-locations
|
||||||
|
*_archive
|
||||||
|
|
||||||
# flymake-mode
|
# flymake-mode
|
||||||
*_flymake.*
|
*_flymake.*
|
||||||
|
34
README.md
34
README.md
@@ -46,42 +46,18 @@ See the
|
|||||||
[Feature Overview](https://spack.readthedocs.io/en/latest/features.html)
|
[Feature Overview](https://spack.readthedocs.io/en/latest/features.html)
|
||||||
for examples and highlights.
|
for examples and highlights.
|
||||||
|
|
||||||
Installation
|
To install spack and your first package, make sure you have Python & Git.
|
||||||
----------------
|
|
||||||
|
|
||||||
To install spack, first make sure you have Python & Git.
|
|
||||||
Then:
|
Then:
|
||||||
|
|
||||||
```bash
|
$ git clone -c feature.manyFiles=true --depth=2 https://github.com/spack/spack.git
|
||||||
git clone -c feature.manyFiles=true --depth=2 https://github.com/spack/spack.git
|
$ cd spack/bin
|
||||||
```
|
$ ./spack install zlib
|
||||||
|
|
||||||
<details>
|
|
||||||
<summary>What are <code>manyFiles=true</code> and <code>--depth=2</code>?</summary>
|
|
||||||
<br>
|
|
||||||
|
|
||||||
|
> [!TIP]
|
||||||
> `-c feature.manyFiles=true` improves git's performance on repositories with 1,000+ files.
|
> `-c feature.manyFiles=true` improves git's performance on repositories with 1,000+ files.
|
||||||
>
|
>
|
||||||
> `--depth=2` prunes the git history to reduce the size of the Spack installation.
|
> `--depth=2` prunes the git history to reduce the size of the Spack installation.
|
||||||
|
|
||||||
</details>
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# For bash/zsh/sh
|
|
||||||
. spack/share/spack/setup-env.sh
|
|
||||||
|
|
||||||
# For tcsh/csh
|
|
||||||
source spack/share/spack/setup-env.csh
|
|
||||||
|
|
||||||
# For fish
|
|
||||||
. spack/share/spack/setup-env.fish
|
|
||||||
```
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Now you're ready to install a package!
|
|
||||||
spack install zlib-ng
|
|
||||||
```
|
|
||||||
|
|
||||||
Documentation
|
Documentation
|
||||||
----------------
|
----------------
|
||||||
|
|
||||||
|
@@ -90,9 +90,10 @@ config:
|
|||||||
misc_cache: $user_cache_path/cache
|
misc_cache: $user_cache_path/cache
|
||||||
|
|
||||||
|
|
||||||
# Abort downloads after this many seconds if not data is received.
|
# Timeout in seconds used for downloading sources etc. This only applies
|
||||||
# Setting this to 0 will disable the timeout.
|
# to the connection phase and can be increased for slow connections or
|
||||||
connect_timeout: 30
|
# servers. 0 means no timeout.
|
||||||
|
connect_timeout: 10
|
||||||
|
|
||||||
|
|
||||||
# If this is false, tools like curl that use SSL will not verify
|
# If this is false, tools like curl that use SSL will not verify
|
||||||
|
2
etc/spack/defaults/darwin/concretizer.yaml
Normal file
2
etc/spack/defaults/darwin/concretizer.yaml
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
concretizer:
|
||||||
|
static_analysis: true
|
@@ -25,8 +25,6 @@ packages:
|
|||||||
glu: [apple-glu]
|
glu: [apple-glu]
|
||||||
unwind: [apple-libunwind]
|
unwind: [apple-libunwind]
|
||||||
uuid: [apple-libuuid]
|
uuid: [apple-libuuid]
|
||||||
apple-clang:
|
|
||||||
buildable: false
|
|
||||||
apple-gl:
|
apple-gl:
|
||||||
buildable: false
|
buildable: false
|
||||||
externals:
|
externals:
|
||||||
@@ -52,11 +50,8 @@ packages:
|
|||||||
- spec: apple-libuuid@1353.100.2
|
- spec: apple-libuuid@1353.100.2
|
||||||
prefix: /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk
|
prefix: /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk
|
||||||
c:
|
c:
|
||||||
prefer:
|
require: apple-clang
|
||||||
- apple-clang
|
|
||||||
cxx:
|
cxx:
|
||||||
prefer:
|
require: apple-clang
|
||||||
- apple-clang
|
|
||||||
fortran:
|
fortran:
|
||||||
prefer:
|
require: gcc
|
||||||
- gcc
|
|
||||||
|
@@ -19,14 +19,14 @@ packages:
|
|||||||
awk: [gawk]
|
awk: [gawk]
|
||||||
armci: [armcimpi]
|
armci: [armcimpi]
|
||||||
blas: [openblas, amdblis]
|
blas: [openblas, amdblis]
|
||||||
c: [gcc, llvm, intel-oneapi-compilers]
|
c: [gcc, llvm, intel-oneapi-compilers, xl, aocc]
|
||||||
cxx: [gcc, llvm, intel-oneapi-compilers]
|
cxx: [gcc, llvm, intel-oneapi-compilers, xl, aocc]
|
||||||
D: [ldc]
|
D: [ldc]
|
||||||
daal: [intel-oneapi-daal]
|
daal: [intel-oneapi-daal]
|
||||||
elf: [elfutils]
|
elf: [elfutils]
|
||||||
fftw-api: [fftw, amdfftw]
|
fftw-api: [fftw, amdfftw]
|
||||||
flame: [libflame, amdlibflame]
|
flame: [libflame, amdlibflame]
|
||||||
fortran: [gcc, llvm, intel-oneapi-compilers]
|
fortran: [gcc, llvm]
|
||||||
fortran-rt: [gcc-runtime, intel-oneapi-runtime]
|
fortran-rt: [gcc-runtime, intel-oneapi-runtime]
|
||||||
fuse: [libfuse]
|
fuse: [libfuse]
|
||||||
gl: [glx, osmesa]
|
gl: [glx, osmesa]
|
||||||
@@ -72,8 +72,6 @@ packages:
|
|||||||
permissions:
|
permissions:
|
||||||
read: world
|
read: world
|
||||||
write: user
|
write: user
|
||||||
cce:
|
|
||||||
buildable: false
|
|
||||||
cray-fftw:
|
cray-fftw:
|
||||||
buildable: false
|
buildable: false
|
||||||
cray-libsci:
|
cray-libsci:
|
||||||
@@ -88,23 +86,13 @@ packages:
|
|||||||
buildable: false
|
buildable: false
|
||||||
essl:
|
essl:
|
||||||
buildable: false
|
buildable: false
|
||||||
fj:
|
|
||||||
buildable: false
|
|
||||||
fujitsu-mpi:
|
fujitsu-mpi:
|
||||||
buildable: false
|
buildable: false
|
||||||
fujitsu-ssl2:
|
fujitsu-ssl2:
|
||||||
buildable: false
|
buildable: false
|
||||||
glibc:
|
|
||||||
buildable: false
|
|
||||||
hpcx-mpi:
|
hpcx-mpi:
|
||||||
buildable: false
|
buildable: false
|
||||||
iconv:
|
|
||||||
prefer: [libiconv]
|
|
||||||
mpt:
|
mpt:
|
||||||
buildable: false
|
buildable: false
|
||||||
musl:
|
|
||||||
buildable: false
|
|
||||||
spectrum-mpi:
|
spectrum-mpi:
|
||||||
buildable: false
|
buildable: false
|
||||||
xl:
|
|
||||||
buildable: false
|
|
||||||
|
@@ -11,4 +11,4 @@
|
|||||||
# ~/.spack/repos.yaml
|
# ~/.spack/repos.yaml
|
||||||
# -------------------------------------------------------------------------
|
# -------------------------------------------------------------------------
|
||||||
repos:
|
repos:
|
||||||
- $spack/var/spack/repos/spack_repo/builtin
|
- $spack/var/spack/repos/builtin
|
||||||
|
@@ -20,8 +20,3 @@ packages:
|
|||||||
cxx: [msvc]
|
cxx: [msvc]
|
||||||
mpi: [msmpi]
|
mpi: [msmpi]
|
||||||
gl: [wgl]
|
gl: [wgl]
|
||||||
mpi:
|
|
||||||
require:
|
|
||||||
- one_of: [msmpi]
|
|
||||||
msvc:
|
|
||||||
buildable: false
|
|
||||||
|
@@ -1291,61 +1291,55 @@ based on site policies.
|
|||||||
Variants
|
Variants
|
||||||
^^^^^^^^
|
^^^^^^^^
|
||||||
|
|
||||||
Variants are named options associated with a particular package and are
|
Variants are named options associated with a particular package. They are
|
||||||
typically used to enable or disable certain features at build time. They
|
optional, as each package must provide default values for each variant it
|
||||||
are optional, as each package must provide default values for each variant
|
makes available. Variants can be specified using
|
||||||
it makes available.
|
a flexible parameter syntax ``name=<value>``. For example,
|
||||||
|
``spack install mercury debug=True`` will install mercury built with debug
|
||||||
The names of variants available for a particular package depend on
|
flags. The names of particular variants available for a package depend on
|
||||||
what was provided by the package author. ``spack info <package>`` will
|
what was provided by the package author. ``spack info <package>`` will
|
||||||
provide information on what build variants are available.
|
provide information on what build variants are available.
|
||||||
|
|
||||||
There are different types of variants:
|
For compatibility with earlier versions, variants which happen to be
|
||||||
|
boolean in nature can be specified by a syntax that represents turning
|
||||||
|
options on and off. For example, in the previous spec we could have
|
||||||
|
supplied ``mercury +debug`` with the same effect of enabling the debug
|
||||||
|
compile time option for the libelf package.
|
||||||
|
|
||||||
1. Boolean variants. Typically used to enable or disable a feature at
|
Depending on the package a variant may have any default value. For
|
||||||
compile time. For example, a package might have a ``debug`` variant that
|
``mercury`` here, ``debug`` is ``False`` by default, and we turned it on
|
||||||
can be explicitly enabled with ``+debug`` and disabled with ``~debug``.
|
with ``debug=True`` or ``+debug``. If a variant is ``True`` by default
|
||||||
2. Single-valued variants. Often used to set defaults. For example, a package
|
you can turn it off by either adding ``-name`` or ``~name`` to the spec.
|
||||||
might have a ``compression`` variant that determines the default
|
|
||||||
compression algorithm, which users could set to ``compression=gzip`` or
|
|
||||||
``compression=zstd``.
|
|
||||||
3. Multi-valued variants. A package might have a ``fabrics`` variant that
|
|
||||||
determines which network fabrics to support. Users could set this to
|
|
||||||
``fabrics=verbs,ofi`` to enable both InfiniBand verbs and OpenFabrics
|
|
||||||
interfaces. The values are separated by commas.
|
|
||||||
|
|
||||||
The meaning of ``fabrics=verbs,ofi`` is to enable *at least* the specified
|
There are two syntaxes here because, depending on context, ``~`` and
|
||||||
fabrics, but other fabrics may be enabled as well. If the intent is to
|
``-`` may mean different things. In most shells, the following will
|
||||||
enable *only* the specified fabrics, then the ``fabrics:=verbs,ofi``
|
result in the shell performing home directory substitution:
|
||||||
syntax should be used with the ``:=`` operator.
|
|
||||||
|
|
||||||
.. note::
|
.. code-block:: sh
|
||||||
|
|
||||||
In certain shells, the the ``~`` character is expanded to the home
|
mpileaks ~debug # shell may try to substitute this!
|
||||||
directory. To avoid these issues, avoid whitespace between the package
|
mpileaks~debug # use this instead
|
||||||
name and the variant:
|
|
||||||
|
|
||||||
.. code-block:: sh
|
If there is a user called ``debug``, the ``~`` will be incorrectly
|
||||||
|
expanded. In this situation, you would want to write ``libelf
|
||||||
|
-debug``. However, ``-`` can be ambiguous when included after a
|
||||||
|
package name without spaces:
|
||||||
|
|
||||||
mpileaks ~debug # shell may try to substitute this!
|
.. code-block:: sh
|
||||||
mpileaks~debug # use this instead
|
|
||||||
|
|
||||||
Alternatively, you can use the ``-`` character to disable a variant,
|
mpileaks-debug # wrong!
|
||||||
but be aware that this requires a space between the package name and
|
mpileaks -debug # right
|
||||||
the variant:
|
|
||||||
|
|
||||||
.. code-block:: sh
|
Spack allows the ``-`` character to be part of package names, so the
|
||||||
|
above will be interpreted as a request for the ``mpileaks-debug``
|
||||||
|
package, not a request for ``mpileaks`` built without ``debug``
|
||||||
|
options. In this scenario, you should write ``mpileaks~debug`` to
|
||||||
|
avoid ambiguity.
|
||||||
|
|
||||||
mpileaks-debug # wrong: refers to a package named "mpileaks-debug"
|
When spack normalizes specs, it prints them out with no spaces boolean
|
||||||
mpileaks -debug # right: refers to a package named mpileaks with debug disabled
|
variants using the backwards compatibility syntax and uses only ``~``
|
||||||
|
for disabled boolean variants. The ``-`` and spaces on the command
|
||||||
As a last resort, ``debug=False`` can also be used to disable a boolean variant.
|
line are provided for convenience and legibility.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
"""""""""""""""""""""""""""""""""""
|
|
||||||
Variant propagation to dependencies
|
|
||||||
"""""""""""""""""""""""""""""""""""
|
|
||||||
|
|
||||||
Spack allows variants to propagate their value to the package's
|
Spack allows variants to propagate their value to the package's
|
||||||
dependency by using ``++``, ``--``, and ``~~`` for boolean variants.
|
dependency by using ``++``, ``--``, and ``~~`` for boolean variants.
|
||||||
@@ -1415,29 +1409,27 @@ that executables will run without the need to set ``LD_LIBRARY_PATH``.
|
|||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
packages:
|
compilers:
|
||||||
gcc:
|
- compiler:
|
||||||
externals:
|
spec: gcc@4.9.3
|
||||||
- spec: gcc@4.9.3
|
paths:
|
||||||
prefix: /opt/gcc
|
cc: /opt/gcc/bin/gcc
|
||||||
extra_attributes:
|
c++: /opt/gcc/bin/g++
|
||||||
compilers:
|
f77: /opt/gcc/bin/gfortran
|
||||||
c: /opt/gcc/bin/gcc
|
fc: /opt/gcc/bin/gfortran
|
||||||
cxx: /opt/gcc/bin/g++
|
environment:
|
||||||
fortran: /opt/gcc/bin/gfortran
|
unset:
|
||||||
environment:
|
- BAD_VARIABLE
|
||||||
unset:
|
set:
|
||||||
- BAD_VARIABLE
|
GOOD_VARIABLE_NUM: 1
|
||||||
set:
|
GOOD_VARIABLE_STR: good
|
||||||
GOOD_VARIABLE_NUM: 1
|
prepend_path:
|
||||||
GOOD_VARIABLE_STR: good
|
PATH: /path/to/binutils
|
||||||
prepend_path:
|
append_path:
|
||||||
PATH: /path/to/binutils
|
LD_LIBRARY_PATH: /opt/gcc/lib
|
||||||
append_path:
|
extra_rpaths:
|
||||||
LD_LIBRARY_PATH: /opt/gcc/lib
|
- /path/to/some/compiler/runtime/directory
|
||||||
extra_rpaths:
|
- /path/to/some/other/compiler/runtime/directory
|
||||||
- /path/to/some/compiler/runtime/directory
|
|
||||||
- /path/to/some/other/compiler/runtime/directory
|
|
||||||
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
@@ -1916,7 +1908,7 @@ diagnostics. Issues, if found, are reported to stdout:
|
|||||||
PKG-DIRECTIVES: 1 issue found
|
PKG-DIRECTIVES: 1 issue found
|
||||||
1. lammps: wrong variant in "conflicts" directive
|
1. lammps: wrong variant in "conflicts" directive
|
||||||
the variant 'adios' does not exist
|
the variant 'adios' does not exist
|
||||||
in /home/spack/spack/var/spack/repos/spack_repo/builtin/packages/lammps/package.py
|
in /home/spack/spack/var/spack/repos/builtin/packages/lammps/package.py
|
||||||
|
|
||||||
|
|
||||||
------------
|
------------
|
||||||
|
@@ -63,6 +63,7 @@ on these ideas for each distinct build system that Spack supports:
|
|||||||
build_systems/cudapackage
|
build_systems/cudapackage
|
||||||
build_systems/custompackage
|
build_systems/custompackage
|
||||||
build_systems/inteloneapipackage
|
build_systems/inteloneapipackage
|
||||||
|
build_systems/intelpackage
|
||||||
build_systems/rocmpackage
|
build_systems/rocmpackage
|
||||||
build_systems/sourceforgepackage
|
build_systems/sourceforgepackage
|
||||||
|
|
||||||
@@ -83,7 +84,7 @@ packages. You can quickly find examples by running:
|
|||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ cd var/spack/repos/spack_repo/builtin/packages
|
$ cd var/spack/repos/builtin/packages
|
||||||
$ grep -l QMakePackage */package.py
|
$ grep -l QMakePackage */package.py
|
||||||
|
|
||||||
|
|
||||||
|
@@ -27,10 +27,10 @@ it could use the ``require`` directive as follows:
|
|||||||
|
|
||||||
Spack has a number of built-in bundle packages, such as:
|
Spack has a number of built-in bundle packages, such as:
|
||||||
|
|
||||||
* `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/amd_aocl/package.py>`_
|
* `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_
|
||||||
* `EcpProxyApps <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/ecp_proxy_apps/package.py>`_
|
* `EcpProxyApps <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_
|
||||||
* `Libc <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/libc/package.py>`_
|
* `Libc <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/libc/package.py>`_
|
||||||
* `Xsdk <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/xsdk/package.py>`_
|
* `Xsdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/xsdk/package.py>`_
|
||||||
|
|
||||||
where ``Xsdk`` also inherits from ``CudaPackage`` and ``RocmPackage`` and
|
where ``Xsdk`` also inherits from ``CudaPackage`` and ``RocmPackage`` and
|
||||||
``Libc`` is a virtual bundle package for the C standard library.
|
``Libc`` is a virtual bundle package for the C standard library.
|
||||||
|
@@ -199,7 +199,7 @@ a variant to control this:
|
|||||||
However, not every CMake package accepts all four of these options.
|
However, not every CMake package accepts all four of these options.
|
||||||
Grep the ``CMakeLists.txt`` file to see if the default values are
|
Grep the ``CMakeLists.txt`` file to see if the default values are
|
||||||
missing or replaced. For example, the
|
missing or replaced. For example, the
|
||||||
`dealii <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/dealii/package.py>`_
|
`dealii <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/dealii/package.py>`_
|
||||||
package overrides the default variant with:
|
package overrides the default variant with:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
@@ -20,8 +20,8 @@ start is to look at the definitions of other build systems. This guide
|
|||||||
focuses mostly on how Spack's build systems work.
|
focuses mostly on how Spack's build systems work.
|
||||||
|
|
||||||
In this guide, we will be using the
|
In this guide, we will be using the
|
||||||
`perl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/perl/package.py>`_ and
|
`perl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/perl/package.py>`_ and
|
||||||
`cmake <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cmake/package.py>`_
|
`cmake <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cmake/package.py>`_
|
||||||
packages as examples. ``perl``'s build system is a hand-written
|
packages as examples. ``perl``'s build system is a hand-written
|
||||||
``Configure`` shell script, while ``cmake`` bootstraps itself during
|
``Configure`` shell script, while ``cmake`` bootstraps itself during
|
||||||
installation. Both of these packages require custom build systems.
|
installation. Both of these packages require custom build systems.
|
||||||
|
@@ -33,6 +33,9 @@ For more information on a specific package, do::
|
|||||||
|
|
||||||
spack info --all <package-name>
|
spack info --all <package-name>
|
||||||
|
|
||||||
|
Intel no longer releases new versions of Parallel Studio, which can be
|
||||||
|
used in Spack via the :ref:`intelpackage`. All of its components can
|
||||||
|
now be found in oneAPI.
|
||||||
|
|
||||||
Examples
|
Examples
|
||||||
========
|
========
|
||||||
@@ -47,8 +50,34 @@ Install the oneAPI compilers::
|
|||||||
|
|
||||||
spack install intel-oneapi-compilers
|
spack install intel-oneapi-compilers
|
||||||
|
|
||||||
|
Add the compilers to your ``compilers.yaml`` so spack can use them::
|
||||||
|
|
||||||
To build the ``patchelf`` Spack package with ``icx``, do::
|
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/bin
|
||||||
|
|
||||||
|
Verify that the compilers are available::
|
||||||
|
|
||||||
|
spack compiler list
|
||||||
|
|
||||||
|
Note that 2024 and later releases do not include ``icc``. Before 2024,
|
||||||
|
the package layout was different::
|
||||||
|
|
||||||
|
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin/intel64
|
||||||
|
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin
|
||||||
|
|
||||||
|
The ``intel-oneapi-compilers`` package includes 2 families of
|
||||||
|
compilers:
|
||||||
|
|
||||||
|
* ``intel``: ``icc``, ``icpc``, ``ifort``. Intel's *classic*
|
||||||
|
compilers. 2024 and later releases contain ``ifort``, but not
|
||||||
|
``icc`` and ``icpc``.
|
||||||
|
* ``oneapi``: ``icx``, ``icpx``, ``ifx``. Intel's new generation of
|
||||||
|
compilers based on LLVM.
|
||||||
|
|
||||||
|
To build the ``patchelf`` Spack package with ``icc``, do::
|
||||||
|
|
||||||
|
spack install patchelf%intel
|
||||||
|
|
||||||
|
To build with with ``icx``, do ::
|
||||||
|
|
||||||
spack install patchelf%oneapi
|
spack install patchelf%oneapi
|
||||||
|
|
||||||
@@ -63,6 +92,15 @@ Install the oneAPI compilers::
|
|||||||
|
|
||||||
spack install intel-oneapi-compilers
|
spack install intel-oneapi-compilers
|
||||||
|
|
||||||
|
Add the compilers to your ``compilers.yaml`` so Spack can use them::
|
||||||
|
|
||||||
|
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/bin
|
||||||
|
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/bin
|
||||||
|
|
||||||
|
Verify that the compilers are available::
|
||||||
|
|
||||||
|
spack compiler list
|
||||||
|
|
||||||
Clone `spack-configs <https://github.com/spack/spack-configs>`_ repo and activate Intel oneAPI CPU environment::
|
Clone `spack-configs <https://github.com/spack/spack-configs>`_ repo and activate Intel oneAPI CPU environment::
|
||||||
|
|
||||||
git clone https://github.com/spack/spack-configs
|
git clone https://github.com/spack/spack-configs
|
||||||
@@ -111,7 +149,7 @@ Compilers
|
|||||||
---------
|
---------
|
||||||
|
|
||||||
To use the compilers, add some information about the installation to
|
To use the compilers, add some information about the installation to
|
||||||
``packages.yaml``. For most users, it is sufficient to do::
|
``compilers.yaml``. For most users, it is sufficient to do::
|
||||||
|
|
||||||
spack compiler add /opt/intel/oneapi/compiler/latest/bin
|
spack compiler add /opt/intel/oneapi/compiler/latest/bin
|
||||||
|
|
||||||
@@ -119,7 +157,7 @@ Adapt the paths above if you did not install the tools in the default
|
|||||||
location. After adding the compilers, using them is the same
|
location. After adding the compilers, using them is the same
|
||||||
as if you had installed the ``intel-oneapi-compilers`` package.
|
as if you had installed the ``intel-oneapi-compilers`` package.
|
||||||
Another option is to manually add the configuration to
|
Another option is to manually add the configuration to
|
||||||
``packages.yaml`` as described in :ref:`Compiler configuration
|
``compilers.yaml`` as described in :ref:`Compiler configuration
|
||||||
<compiler-config>`.
|
<compiler-config>`.
|
||||||
|
|
||||||
Before 2024, the directory structure was different::
|
Before 2024, the directory structure was different::
|
||||||
@@ -162,5 +200,15 @@ You can also use Spack-installed libraries. For example::
|
|||||||
Will update your environment CPATH, LIBRARY_PATH, and other
|
Will update your environment CPATH, LIBRARY_PATH, and other
|
||||||
environment variables for building an application with oneMKL.
|
environment variables for building an application with oneMKL.
|
||||||
|
|
||||||
|
More information
|
||||||
|
================
|
||||||
|
|
||||||
|
This section describes basic use of oneAPI, especially if it has
|
||||||
|
changed compared to Parallel Studio. See :ref:`intelpackage` for more
|
||||||
|
information on :ref:`intel-virtual-packages`,
|
||||||
|
:ref:`intel-unrelated-packages`,
|
||||||
|
:ref:`intel-integrating-external-libraries`, and
|
||||||
|
:ref:`using-mkl-tips`.
|
||||||
|
|
||||||
|
|
||||||
.. _`Intel installers`: https://software.intel.com/content/www/us/en/develop/documentation/installation-guide-for-intel-oneapi-toolkits-linux/top.html
|
.. _`Intel installers`: https://software.intel.com/content/www/us/en/develop/documentation/installation-guide-for-intel-oneapi-toolkits-linux/top.html
|
||||||
|
1077
lib/spack/docs/build_systems/intelpackage.rst
Normal file
1077
lib/spack/docs/build_systems/intelpackage.rst
Normal file
File diff suppressed because it is too large
Load Diff
@@ -91,14 +91,14 @@ there are any other variables you need to set, you can do this in the
|
|||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
def setup_build_environment(self, env: EnvironmentModifications) -> None:
|
def setup_build_environment(self, env):
|
||||||
env.set("PREFIX", prefix)
|
env.set("PREFIX", prefix)
|
||||||
env.set("BLASLIB", spec["blas"].libs.ld_flags)
|
env.set("BLASLIB", spec["blas"].libs.ld_flags)
|
||||||
|
|
||||||
|
|
||||||
`cbench <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cbench/package.py>`_
|
`cbench <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cbench/package.py>`_
|
||||||
is a good example of a simple package that does this, while
|
is a good example of a simple package that does this, while
|
||||||
`esmf <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/esmf/package.py>`_
|
`esmf <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/esmf/package.py>`_
|
||||||
is a good example of a more complex package.
|
is a good example of a more complex package.
|
||||||
|
|
||||||
""""""""""""""""""""""
|
""""""""""""""""""""""
|
||||||
@@ -129,7 +129,7 @@ If you do need access to the spec, you can create a property like so:
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
`cloverleaf <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cloverleaf/package.py>`_
|
`cloverleaf <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cloverleaf/package.py>`_
|
||||||
is a good example of a package that uses this strategy.
|
is a good example of a package that uses this strategy.
|
||||||
|
|
||||||
"""""""""""""
|
"""""""""""""
|
||||||
@@ -152,7 +152,7 @@ and a ``filter`` method to help with this. For example:
|
|||||||
makefile.filter(r"^\s*FC\s*=.*", f"FC = {spack_fc}")
|
makefile.filter(r"^\s*FC\s*=.*", f"FC = {spack_fc}")
|
||||||
|
|
||||||
|
|
||||||
`stream <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/stream/package.py>`_
|
`stream <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/stream/package.py>`_
|
||||||
is a good example of a package that involves editing a Makefile to set
|
is a good example of a package that involves editing a Makefile to set
|
||||||
the appropriate variables.
|
the appropriate variables.
|
||||||
|
|
||||||
@@ -192,7 +192,7 @@ well for storing variables:
|
|||||||
inc.write(f"{key} = {config[key]}\n")
|
inc.write(f"{key} = {config[key]}\n")
|
||||||
|
|
||||||
|
|
||||||
`elk <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/elk/package.py>`_
|
`elk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/elk/package.py>`_
|
||||||
is a good example of a package that uses a dictionary to store
|
is a good example of a package that uses a dictionary to store
|
||||||
configuration variables.
|
configuration variables.
|
||||||
|
|
||||||
@@ -213,7 +213,7 @@ them in a list:
|
|||||||
inc.write(f"{var}\n")
|
inc.write(f"{var}\n")
|
||||||
|
|
||||||
|
|
||||||
`hpl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/hpl/package.py>`_
|
`hpl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/hpl/package.py>`_
|
||||||
is a good example of a package that uses a list to store
|
is a good example of a package that uses a list to store
|
||||||
configuration variables.
|
configuration variables.
|
||||||
|
|
||||||
|
@@ -12,7 +12,8 @@ The ``ROCmPackage`` is not a build system but a helper package. Like ``CudaPacka
|
|||||||
it provides standard variants, dependencies, and conflicts to facilitate building
|
it provides standard variants, dependencies, and conflicts to facilitate building
|
||||||
packages using GPUs though for AMD in this case.
|
packages using GPUs though for AMD in this case.
|
||||||
|
|
||||||
You can find the source for this package (and suggestions for setting up your ``packages.yaml`` file) at
|
You can find the source for this package (and suggestions for setting up your
|
||||||
|
``compilers.yaml`` and ``packages.yaml`` files) at
|
||||||
`<https://github.com/spack/spack/blob/develop/lib/spack/spack/build_systems/rocm.py>`__.
|
`<https://github.com/spack/spack/blob/develop/lib/spack/spack/build_systems/rocm.py>`__.
|
||||||
|
|
||||||
^^^^^^^^
|
^^^^^^^^
|
||||||
|
@@ -39,7 +39,7 @@ for "CRAN <package-name>" and you should quickly find what you want.
|
|||||||
If it isn't on CRAN, try Bioconductor, another common R repository.
|
If it isn't on CRAN, try Bioconductor, another common R repository.
|
||||||
|
|
||||||
For the purposes of this tutorial, we will be walking through
|
For the purposes of this tutorial, we will be walking through
|
||||||
`r-caret <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/r_caret/package.py>`_
|
`r-caret <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/r-caret/package.py>`_
|
||||||
as an example. If you search for "CRAN caret", you will quickly find what
|
as an example. If you search for "CRAN caret", you will quickly find what
|
||||||
you are looking for at https://cran.r-project.org/package=caret.
|
you are looking for at https://cran.r-project.org/package=caret.
|
||||||
https://cran.r-project.org is the main CRAN website. However, CRAN also
|
https://cran.r-project.org is the main CRAN website. However, CRAN also
|
||||||
@@ -337,7 +337,7 @@ Non-R dependencies
|
|||||||
^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Some packages depend on non-R libraries for linking. Check out the
|
Some packages depend on non-R libraries for linking. Check out the
|
||||||
`r-stringi <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/r_stringi/package.py>`_
|
`r-stringi <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/r-stringi/package.py>`_
|
||||||
package for an example: https://cloud.r-project.org/package=stringi.
|
package for an example: https://cloud.r-project.org/package=stringi.
|
||||||
If you search for the text "SystemRequirements", you will see:
|
If you search for the text "SystemRequirements", you will see:
|
||||||
|
|
||||||
@@ -352,7 +352,7 @@ Passing arguments to the installation
|
|||||||
|
|
||||||
Some R packages provide additional flags that can be passed to
|
Some R packages provide additional flags that can be passed to
|
||||||
``R CMD INSTALL``, often to locate non-R dependencies.
|
``R CMD INSTALL``, often to locate non-R dependencies.
|
||||||
`r-rmpi <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/r_rmpi/package.py>`_
|
`r-rmpi <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/r-rmpi/package.py>`_
|
||||||
is an example of this, and flags for linking to an MPI library. To pass
|
is an example of this, and flags for linking to an MPI library. To pass
|
||||||
these to the installation command, you can override ``configure_args``
|
these to the installation command, you can override ``configure_args``
|
||||||
like so:
|
like so:
|
||||||
|
@@ -104,10 +104,10 @@ Finding available options
|
|||||||
|
|
||||||
The first place to start when looking for a list of valid options to
|
The first place to start when looking for a list of valid options to
|
||||||
build a package is ``scons --help``. Some packages like
|
build a package is ``scons --help``. Some packages like
|
||||||
`kahip <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/kahip/package.py>`_
|
`kahip <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/kahip/package.py>`_
|
||||||
don't bother overwriting the default SCons help message, so this isn't
|
don't bother overwriting the default SCons help message, so this isn't
|
||||||
very useful, but other packages like
|
very useful, but other packages like
|
||||||
`serf <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/serf/package.py>`_
|
`serf <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/serf/package.py>`_
|
||||||
print a list of valid command-line variables:
|
print a list of valid command-line variables:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
@@ -177,7 +177,7 @@ print a list of valid command-line variables:
|
|||||||
|
|
||||||
|
|
||||||
More advanced packages like
|
More advanced packages like
|
||||||
`cantera <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cantera/package.py>`_
|
`cantera <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cantera/package.py>`_
|
||||||
use ``scons --help`` to print a list of subcommands:
|
use ``scons --help`` to print a list of subcommands:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
@@ -225,14 +225,8 @@ def setup(sphinx):
|
|||||||
("py:class", "llnl.util.lang.T"),
|
("py:class", "llnl.util.lang.T"),
|
||||||
("py:class", "llnl.util.lang.KT"),
|
("py:class", "llnl.util.lang.KT"),
|
||||||
("py:class", "llnl.util.lang.VT"),
|
("py:class", "llnl.util.lang.VT"),
|
||||||
("py:class", "llnl.util.lang.K"),
|
|
||||||
("py:class", "llnl.util.lang.V"),
|
|
||||||
("py:class", "llnl.util.lang.ClassPropertyType"),
|
|
||||||
("py:obj", "llnl.util.lang.KT"),
|
("py:obj", "llnl.util.lang.KT"),
|
||||||
("py:obj", "llnl.util.lang.VT"),
|
("py:obj", "llnl.util.lang.VT"),
|
||||||
("py:obj", "llnl.util.lang.ClassPropertyType"),
|
|
||||||
("py:obj", "llnl.util.lang.K"),
|
|
||||||
("py:obj", "llnl.util.lang.V"),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||||
|
@@ -148,16 +148,15 @@ this can expose you to attacks. Use at your own risk.
|
|||||||
``ssl_certs``
|
``ssl_certs``
|
||||||
--------------------
|
--------------------
|
||||||
|
|
||||||
Path to custom certificats for SSL verification. The value can be a
|
Path to custom certificats for SSL verification. The value can be a
|
||||||
filesytem path, or an environment variable that expands to an absolute file path.
|
filesytem path, or an environment variable that expands to an absolute file path.
|
||||||
The default value is set to the environment variable ``SSL_CERT_FILE``
|
The default value is set to the environment variable ``SSL_CERT_FILE``
|
||||||
to use the same syntax used by many other applications that automatically
|
to use the same syntax used by many other applications that automatically
|
||||||
detect custom certificates.
|
detect custom certificates.
|
||||||
When ``url_fetch_method:curl`` the ``config:ssl_certs`` should resolve to
|
When ``url_fetch_method:curl`` the ``config:ssl_certs`` should resolve to
|
||||||
a single file. Spack will then set the environment variable ``CURL_CA_BUNDLE``
|
a single file. Spack will then set the environment variable ``CURL_CA_BUNDLE``
|
||||||
in the subprocess calling ``curl``. If additional ``curl`` arguments are required,
|
in the subprocess calling ``curl``.
|
||||||
they can be set in the config, e.g. ``url_fetch_method:'curl -k -q'``.
|
If ``url_fetch_method:urllib`` then files and directories are supported i.e.
|
||||||
If ``url_fetch_method:urllib`` then files and directories are supported i.e.
|
|
||||||
``config:ssl_certs:$SSL_CERT_FILE`` or ``config:ssl_certs:$SSL_CERT_DIR``
|
``config:ssl_certs:$SSL_CERT_FILE`` or ``config:ssl_certs:$SSL_CERT_DIR``
|
||||||
will work.
|
will work.
|
||||||
In all cases the expanded path must be absolute for Spack to use the certificates.
|
In all cases the expanded path must be absolute for Spack to use the certificates.
|
||||||
|
@@ -11,10 +11,9 @@ Configuration Files
|
|||||||
Spack has many configuration files. Here is a quick list of them, in
|
Spack has many configuration files. Here is a quick list of them, in
|
||||||
case you want to skip directly to specific docs:
|
case you want to skip directly to specific docs:
|
||||||
|
|
||||||
* :ref:`packages.yaml <compiler-config>`
|
* :ref:`compilers.yaml <compiler-config>`
|
||||||
* :ref:`concretizer.yaml <concretizer-options>`
|
* :ref:`concretizer.yaml <concretizer-options>`
|
||||||
* :ref:`config.yaml <config-yaml>`
|
* :ref:`config.yaml <config-yaml>`
|
||||||
* :ref:`include.yaml <include-yaml>`
|
|
||||||
* :ref:`mirrors.yaml <mirrors>`
|
* :ref:`mirrors.yaml <mirrors>`
|
||||||
* :ref:`modules.yaml <modules>`
|
* :ref:`modules.yaml <modules>`
|
||||||
* :ref:`packages.yaml <packages-config>`
|
* :ref:`packages.yaml <packages-config>`
|
||||||
@@ -46,12 +45,6 @@ Each Spack configuration file is nested under a top-level section
|
|||||||
corresponding to its name. So, ``config.yaml`` starts with ``config:``,
|
corresponding to its name. So, ``config.yaml`` starts with ``config:``,
|
||||||
``mirrors.yaml`` starts with ``mirrors:``, etc.
|
``mirrors.yaml`` starts with ``mirrors:``, etc.
|
||||||
|
|
||||||
.. tip::
|
|
||||||
|
|
||||||
Validation and autocompletion of Spack config files can be enabled in
|
|
||||||
your editor with the YAML language server. See `spack/schemas
|
|
||||||
<https://github.com/spack/schemas>`_ for more information.
|
|
||||||
|
|
||||||
.. _configuration-scopes:
|
.. _configuration-scopes:
|
||||||
|
|
||||||
--------------------
|
--------------------
|
||||||
@@ -101,7 +94,7 @@ are six configuration scopes. From lowest to highest:
|
|||||||
precedence over all other scopes.
|
precedence over all other scopes.
|
||||||
|
|
||||||
Each configuration directory may contain several configuration files,
|
Each configuration directory may contain several configuration files,
|
||||||
such as ``config.yaml``, ``packages.yaml``, or ``mirrors.yaml``. When
|
such as ``config.yaml``, ``compilers.yaml``, or ``mirrors.yaml``. When
|
||||||
configurations conflict, settings from higher-precedence scopes override
|
configurations conflict, settings from higher-precedence scopes override
|
||||||
lower-precedence settings.
|
lower-precedence settings.
|
||||||
|
|
||||||
|
@@ -226,9 +226,9 @@ If all is well, you'll see something like this:
|
|||||||
|
|
||||||
Modified files:
|
Modified files:
|
||||||
|
|
||||||
var/spack/repos/spack_repo/builtin/packages/hdf5/package.py
|
var/spack/repos/builtin/packages/hdf5/package.py
|
||||||
var/spack/repos/spack_repo/builtin/packages/hdf/package.py
|
var/spack/repos/builtin/packages/hdf/package.py
|
||||||
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py
|
var/spack/repos/builtin/packages/netcdf/package.py
|
||||||
=======================================================
|
=======================================================
|
||||||
Flake8 checks were clean.
|
Flake8 checks were clean.
|
||||||
|
|
||||||
@@ -236,9 +236,9 @@ However, if you aren't compliant with PEP 8, flake8 will complain:
|
|||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py:26: [F401] 'os' imported but unused
|
var/spack/repos/builtin/packages/netcdf/package.py:26: [F401] 'os' imported but unused
|
||||||
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py:61: [E303] too many blank lines (2)
|
var/spack/repos/builtin/packages/netcdf/package.py:61: [E303] too many blank lines (2)
|
||||||
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py:106: [E501] line too long (92 > 79 characters)
|
var/spack/repos/builtin/packages/netcdf/package.py:106: [E501] line too long (92 > 79 characters)
|
||||||
Flake8 found errors.
|
Flake8 found errors.
|
||||||
|
|
||||||
Most of the error messages are straightforward, but if you don't understand what
|
Most of the error messages are straightforward, but if you don't understand what
|
||||||
@@ -280,7 +280,7 @@ All of these can be installed with Spack, e.g.
|
|||||||
|
|
||||||
.. warning::
|
.. warning::
|
||||||
|
|
||||||
Sphinx has `several required dependencies <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/py-sphinx/package.py>`_.
|
Sphinx has `several required dependencies <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/py-sphinx/package.py>`_.
|
||||||
If you're using a ``python`` from Spack and you installed
|
If you're using a ``python`` from Spack and you installed
|
||||||
``py-sphinx`` and friends, you need to make them available to your
|
``py-sphinx`` and friends, you need to make them available to your
|
||||||
``python``. The easiest way to do this is to run:
|
``python``. The easiest way to do this is to run:
|
||||||
|
@@ -154,7 +154,9 @@ Package-related modules
|
|||||||
|
|
||||||
:mod:`spack.util.naming`
|
:mod:`spack.util.naming`
|
||||||
Contains functions for mapping between Spack package names,
|
Contains functions for mapping between Spack package names,
|
||||||
Python module names, and Python class names.
|
Python module names, and Python class names. Functions like
|
||||||
|
:func:`~spack.util.naming.mod_to_class` handle mapping package
|
||||||
|
module names to class names.
|
||||||
|
|
||||||
:mod:`spack.directives`
|
:mod:`spack.directives`
|
||||||
*Directives* are functions that can be called inside a package definition
|
*Directives* are functions that can be called inside a package definition
|
||||||
|
@@ -1,34 +0,0 @@
|
|||||||
.. Copyright Spack Project Developers. See COPYRIGHT file for details.
|
|
||||||
|
|
||||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
|
|
||||||
.. _env-vars-yaml:
|
|
||||||
|
|
||||||
=============================================
|
|
||||||
Environment Variable Settings (env_vars.yaml)
|
|
||||||
=============================================
|
|
||||||
|
|
||||||
Spack allows you to include shell environment variable modifications
|
|
||||||
for a spack environment by including an ``env_vars.yaml``. Environment
|
|
||||||
varaibles can be modified by setting, unsetting, appending, and prepending
|
|
||||||
variables in the shell environment.
|
|
||||||
The changes to the shell environment will take effect when the spack
|
|
||||||
environment is activated.
|
|
||||||
|
|
||||||
for example,
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
env_vars:
|
|
||||||
set:
|
|
||||||
ENVAR_TO_SET_IN_ENV_LOAD: "FOO"
|
|
||||||
unset:
|
|
||||||
ENVAR_TO_UNSET_IN_ENV_LOAD:
|
|
||||||
prepend_path:
|
|
||||||
PATH_LIST: "path/to/prepend"
|
|
||||||
append_path:
|
|
||||||
PATH_LIST: "path/to/append"
|
|
||||||
remove_path:
|
|
||||||
PATH_LIST: "path/to/remove"
|
|
||||||
|
|
||||||
|
|
@@ -457,13 +457,6 @@ developed package in the environment are concretized to match the
|
|||||||
version (and other constraints) passed as the spec argument to the
|
version (and other constraints) passed as the spec argument to the
|
||||||
``spack develop`` command.
|
``spack develop`` command.
|
||||||
|
|
||||||
When working deep in the graph it is often desirable to have multiple specs marked
|
|
||||||
as ``develop`` so you don't have to restage and/or do full rebuilds each time you
|
|
||||||
call ``spack install``. The ``--recursive`` flag can be used in these scenarios
|
|
||||||
to ensure that all the dependents of the initial spec you provide are also marked
|
|
||||||
as develop specs. The ``--recursive`` flag requires a pre-concretized environment
|
|
||||||
so the graph can be traversed from the supplied spec all the way to the root specs.
|
|
||||||
|
|
||||||
For packages with ``git`` attributes, git branches, tags, and commits can
|
For packages with ``git`` attributes, git branches, tags, and commits can
|
||||||
also be used as valid concrete versions (see :ref:`version-specifier`).
|
also be used as valid concrete versions (see :ref:`version-specifier`).
|
||||||
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
|
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
|
||||||
@@ -667,56 +660,34 @@ a ``packages.yaml`` file) could contain:
|
|||||||
# ...
|
# ...
|
||||||
packages:
|
packages:
|
||||||
all:
|
all:
|
||||||
providers:
|
compiler: [intel]
|
||||||
mpi: [openmpi]
|
|
||||||
# ...
|
# ...
|
||||||
|
|
||||||
This configuration sets the default mpi provider to be openmpi.
|
This configuration sets the default compiler for all packages to
|
||||||
|
``intel``.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Included configurations
|
Included configurations
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Spack environments allow an ``include`` heading in their yaml schema.
|
Spack environments allow an ``include`` heading in their yaml
|
||||||
This heading pulls in external configuration files and applies them to
|
schema. This heading pulls in external configuration files and applies
|
||||||
the environment.
|
them to the environment.
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
spack:
|
spack:
|
||||||
include:
|
include:
|
||||||
- environment/relative/path/to/config.yaml
|
- relative/path/to/config.yaml
|
||||||
- path: https://github.com/path/to/raw/config/compilers.yaml
|
- https://github.com/path/to/raw/config/compilers.yaml
|
||||||
sha256: 26e871804a92cd07bb3d611b31b4156ae93d35b6a6d6e0ef3a67871fcb1d258b
|
|
||||||
- /absolute/path/to/packages.yaml
|
- /absolute/path/to/packages.yaml
|
||||||
- path: /path/to/$os/$target/environment
|
|
||||||
optional: true
|
|
||||||
- path: /path/to/os-specific/config-dir
|
|
||||||
when: os == "ventura"
|
|
||||||
|
|
||||||
Included configuration files are required *unless* they are explicitly optional
|
|
||||||
or the entry's condition evaluates to ``false``. Optional includes are specified
|
|
||||||
with the ``optional`` clause and conditional with the ``when`` clause. (See
|
|
||||||
:ref:`include-yaml` for more information on optional and conditional entries.)
|
|
||||||
|
|
||||||
Files are listed using paths to individual files or directories containing them.
|
|
||||||
Path entries may be absolute or relative to the environment or specified as
|
|
||||||
URLs. URLs to individual files must link to the **raw** form of the file's
|
|
||||||
contents (e.g., `GitHub
|
|
||||||
<https://docs.github.com/en/repositories/working-with-files/using-files/viewing-and-understanding-files#viewing-or-copying-the-raw-file-content>`_
|
|
||||||
or `GitLab
|
|
||||||
<https://docs.gitlab.com/ee/api/repository_files.html#get-raw-file-from-repository>`_) **and** include a valid sha256 for the file.
|
|
||||||
Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or schemes) are
|
|
||||||
supported. Spack-specific, environment and user path variables can be used.
|
|
||||||
(See :ref:`config-file-variables` for more information.)
|
|
||||||
|
|
||||||
.. warning::
|
|
||||||
|
|
||||||
Recursive includes are not currently processed in a breadth-first manner
|
|
||||||
so the value of a configuration option that is altered by multiple included
|
|
||||||
files may not be what you expect. This will be addressed in a future
|
|
||||||
update.
|
|
||||||
|
|
||||||
|
Environments can include files or URLs. File paths can be relative or
|
||||||
|
absolute. URLs include the path to the text for individual files or
|
||||||
|
can be the path to a directory containing configuration files.
|
||||||
|
Spack supports ``file``, ``http``, ``https`` and ``ftp`` protocols (or
|
||||||
|
schemes). Spack-specific, environment and user path variables may be
|
||||||
|
used in these paths. See :ref:`config-file-variables` for more information.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Configuration precedence
|
Configuration precedence
|
||||||
@@ -1000,28 +971,6 @@ For example, the following environment has three root packages:
|
|||||||
This allows for a much-needed reduction in redundancy between packages
|
This allows for a much-needed reduction in redundancy between packages
|
||||||
and constraints.
|
and constraints.
|
||||||
|
|
||||||
-------------------------------
|
|
||||||
Modifying Environment Variables
|
|
||||||
-------------------------------
|
|
||||||
|
|
||||||
Spack Environments can modify the active shell's environment variables when activated. The environment can be
|
|
||||||
configured to set, unset, prepend, or append using ``env_vars`` configuration in the ``spack.yaml`` or through config scopes
|
|
||||||
file:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
spack:
|
|
||||||
env_vars:
|
|
||||||
set:
|
|
||||||
ENVAR_TO_SET_IN_ENV_LOAD: "FOO"
|
|
||||||
unset:
|
|
||||||
ENVAR_TO_UNSET_IN_ENV_LOAD:
|
|
||||||
prepend_path:
|
|
||||||
PATH_LIST: "path/to/prepend"
|
|
||||||
append_path:
|
|
||||||
PATH_LIST: "path/to/append"
|
|
||||||
remove_path:
|
|
||||||
PATH_LIST: "path/to/remove"
|
|
||||||
|
|
||||||
-----------------
|
-----------------
|
||||||
Environment Views
|
Environment Views
|
||||||
|
161
lib/spack/docs/example_files/spack.yaml
Normal file
161
lib/spack/docs/example_files/spack.yaml
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
spack:
|
||||||
|
definitions:
|
||||||
|
- compiler-pkgs:
|
||||||
|
- 'llvm+clang@6.0.1 os=centos7'
|
||||||
|
- 'gcc@6.5.0 os=centos7'
|
||||||
|
- 'llvm+clang@6.0.1 os=ubuntu18.04'
|
||||||
|
- 'gcc@6.5.0 os=ubuntu18.04'
|
||||||
|
- pkgs:
|
||||||
|
- readline@7.0
|
||||||
|
# - xsdk@0.4.0
|
||||||
|
- compilers:
|
||||||
|
- '%gcc@5.5.0'
|
||||||
|
- '%gcc@6.5.0'
|
||||||
|
- '%gcc@7.3.0'
|
||||||
|
- '%clang@6.0.0'
|
||||||
|
- '%clang@6.0.1'
|
||||||
|
- oses:
|
||||||
|
- os=ubuntu18.04
|
||||||
|
- os=centos7
|
||||||
|
|
||||||
|
specs:
|
||||||
|
- matrix:
|
||||||
|
- [$pkgs]
|
||||||
|
- [$compilers]
|
||||||
|
- [$oses]
|
||||||
|
exclude:
|
||||||
|
- '%gcc@7.3.0 os=centos7'
|
||||||
|
- '%gcc@5.5.0 os=ubuntu18.04'
|
||||||
|
|
||||||
|
mirrors:
|
||||||
|
cloud_gitlab: https://mirror.spack.io
|
||||||
|
|
||||||
|
compilers:
|
||||||
|
# The .gitlab-ci.yml for this project picks a Docker container which does
|
||||||
|
# not have any compilers pre-built and ready to use, so we need to fake the
|
||||||
|
# existence of those here.
|
||||||
|
- compiler:
|
||||||
|
operating_system: centos7
|
||||||
|
modules: []
|
||||||
|
paths:
|
||||||
|
cc: /not/used
|
||||||
|
cxx: /not/used
|
||||||
|
f77: /not/used
|
||||||
|
fc: /not/used
|
||||||
|
spec: gcc@5.5.0
|
||||||
|
target: x86_64
|
||||||
|
- compiler:
|
||||||
|
operating_system: centos7
|
||||||
|
modules: []
|
||||||
|
paths:
|
||||||
|
cc: /not/used
|
||||||
|
cxx: /not/used
|
||||||
|
f77: /not/used
|
||||||
|
fc: /not/used
|
||||||
|
spec: gcc@6.5.0
|
||||||
|
target: x86_64
|
||||||
|
- compiler:
|
||||||
|
operating_system: centos7
|
||||||
|
modules: []
|
||||||
|
paths:
|
||||||
|
cc: /not/used
|
||||||
|
cxx: /not/used
|
||||||
|
f77: /not/used
|
||||||
|
fc: /not/used
|
||||||
|
spec: clang@6.0.0
|
||||||
|
target: x86_64
|
||||||
|
- compiler:
|
||||||
|
operating_system: centos7
|
||||||
|
modules: []
|
||||||
|
paths:
|
||||||
|
cc: /not/used
|
||||||
|
cxx: /not/used
|
||||||
|
f77: /not/used
|
||||||
|
fc: /not/used
|
||||||
|
spec: clang@6.0.1
|
||||||
|
target: x86_64
|
||||||
|
|
||||||
|
- compiler:
|
||||||
|
operating_system: ubuntu18.04
|
||||||
|
modules: []
|
||||||
|
paths:
|
||||||
|
cc: /not/used
|
||||||
|
cxx: /not/used
|
||||||
|
f77: /not/used
|
||||||
|
fc: /not/used
|
||||||
|
spec: clang@6.0.0
|
||||||
|
target: x86_64
|
||||||
|
- compiler:
|
||||||
|
operating_system: ubuntu18.04
|
||||||
|
modules: []
|
||||||
|
paths:
|
||||||
|
cc: /not/used
|
||||||
|
cxx: /not/used
|
||||||
|
f77: /not/used
|
||||||
|
fc: /not/used
|
||||||
|
spec: clang@6.0.1
|
||||||
|
target: x86_64
|
||||||
|
- compiler:
|
||||||
|
operating_system: ubuntu18.04
|
||||||
|
modules: []
|
||||||
|
paths:
|
||||||
|
cc: /not/used
|
||||||
|
cxx: /not/used
|
||||||
|
f77: /not/used
|
||||||
|
fc: /not/used
|
||||||
|
spec: gcc@6.5.0
|
||||||
|
target: x86_64
|
||||||
|
- compiler:
|
||||||
|
operating_system: ubuntu18.04
|
||||||
|
modules: []
|
||||||
|
paths:
|
||||||
|
cc: /not/used
|
||||||
|
cxx: /not/used
|
||||||
|
f77: /not/used
|
||||||
|
fc: /not/used
|
||||||
|
spec: gcc@7.3.0
|
||||||
|
target: x86_64
|
||||||
|
|
||||||
|
gitlab-ci:
|
||||||
|
bootstrap:
|
||||||
|
- name: compiler-pkgs
|
||||||
|
compiler-agnostic: true
|
||||||
|
mappings:
|
||||||
|
- # spack-cloud-ubuntu
|
||||||
|
match:
|
||||||
|
# these are specs, if *any* match the spec under consideration, this
|
||||||
|
# 'mapping' will be used to generate the CI job
|
||||||
|
- os=ubuntu18.04
|
||||||
|
runner-attributes:
|
||||||
|
# 'tags' and 'image' go directly onto the job, 'variables' will
|
||||||
|
# be added to what we already necessarily create for the job as
|
||||||
|
# a part of the CI workflow
|
||||||
|
tags:
|
||||||
|
- spack-k8s
|
||||||
|
image:
|
||||||
|
name: scottwittenburg/spack_builder_ubuntu_18.04
|
||||||
|
entrypoint: [""]
|
||||||
|
- # spack-cloud-centos
|
||||||
|
match:
|
||||||
|
# these are specs, if *any* match the spec under consideration, this
|
||||||
|
# 'mapping' will be used to generate the CI job
|
||||||
|
- 'os=centos7'
|
||||||
|
runner-attributes:
|
||||||
|
tags:
|
||||||
|
- spack-k8s
|
||||||
|
image:
|
||||||
|
name: scottwittenburg/spack_builder_centos_7
|
||||||
|
entrypoint: [""]
|
||||||
|
|
||||||
|
cdash:
|
||||||
|
build-group: Release Testing
|
||||||
|
url: http://cdash
|
||||||
|
project: Spack Testing
|
||||||
|
site: Spack Docker-Compose Workflow
|
||||||
|
|
||||||
|
repos: []
|
||||||
|
upstreams: {}
|
||||||
|
modules:
|
||||||
|
enable: []
|
||||||
|
packages: {}
|
||||||
|
config: {}
|
@@ -131,7 +131,7 @@ creates a simple python file:
|
|||||||
It doesn't take much python coding to get from there to a working
|
It doesn't take much python coding to get from there to a working
|
||||||
package:
|
package:
|
||||||
|
|
||||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/libelf/package.py
|
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/libelf/package.py
|
||||||
:lines: 5-
|
:lines: 5-
|
||||||
|
|
||||||
Spack also provides wrapper functions around common commands like
|
Spack also provides wrapper functions around common commands like
|
||||||
|
@@ -30,7 +30,7 @@ than always choosing the latest versions or default variants.
|
|||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
As a rule of thumb: requirements + constraints > strong preferences > reuse > preferences > defaults.
|
As a rule of thumb: requirements + constraints > reuse > preferences > defaults.
|
||||||
|
|
||||||
The following set of criteria (from lowest to highest precedence) explain
|
The following set of criteria (from lowest to highest precedence) explain
|
||||||
common cases where concretization output may seem surprising at first.
|
common cases where concretization output may seem surprising at first.
|
||||||
@@ -56,19 +56,7 @@ common cases where concretization output may seem surprising at first.
|
|||||||
concretizer:
|
concretizer:
|
||||||
reuse: dependencies # other options are 'true' and 'false'
|
reuse: dependencies # other options are 'true' and 'false'
|
||||||
|
|
||||||
3. :ref:`Strong preferences <package-strong-preferences>` configured in ``packages.yaml``
|
3. :ref:`Package requirements <package-requirements>` configured in ``packages.yaml``,
|
||||||
are higher priority than reuse, and can be used to strongly prefer a specific version
|
|
||||||
or variant, without erroring out if it's not possible. Strong preferences are specified
|
|
||||||
as follows:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
packages:
|
|
||||||
foo:
|
|
||||||
prefer:
|
|
||||||
- "@1.1: ~mpi"
|
|
||||||
|
|
||||||
4. :ref:`Package requirements <package-requirements>` configured in ``packages.yaml``,
|
|
||||||
and constraints from the command line as well as ``package.py`` files override all
|
and constraints from the command line as well as ``package.py`` files override all
|
||||||
of the above. Requirements are specified as follows:
|
of the above. Requirements are specified as follows:
|
||||||
|
|
||||||
@@ -78,8 +66,6 @@ common cases where concretization output may seem surprising at first.
|
|||||||
foo:
|
foo:
|
||||||
require:
|
require:
|
||||||
- "@1.2: +mpi"
|
- "@1.2: +mpi"
|
||||||
conflicts:
|
|
||||||
- "@1.4"
|
|
||||||
|
|
||||||
Requirements and constraints restrict the set of possible solutions, while reuse
|
Requirements and constraints restrict the set of possible solutions, while reuse
|
||||||
behavior and preferences influence what an optimal solution looks like.
|
behavior and preferences influence what an optimal solution looks like.
|
||||||
|
@@ -254,11 +254,12 @@ directory.
|
|||||||
Compiler configuration
|
Compiler configuration
|
||||||
----------------------
|
----------------------
|
||||||
|
|
||||||
Spack has the ability to build packages with multiple compilers and compiler versions.
|
Spack has the ability to build packages with multiple compilers and
|
||||||
Compilers can be made available to Spack by specifying them manually in ``packages.yaml``,
|
compiler versions. Compilers can be made available to Spack by
|
||||||
or automatically by running ``spack compiler find``.
|
specifying them manually in ``compilers.yaml`` or ``packages.yaml``,
|
||||||
For convenience, Spack will automatically detect compilers the first time it needs them,
|
or automatically by running ``spack compiler find``, but for
|
||||||
if none is available.
|
convenience Spack will automatically detect compilers the first time
|
||||||
|
it needs them.
|
||||||
|
|
||||||
.. _cmd-spack-compilers:
|
.. _cmd-spack-compilers:
|
||||||
|
|
||||||
@@ -273,11 +274,16 @@ compilers`` or ``spack compiler list``:
|
|||||||
|
|
||||||
$ spack compilers
|
$ spack compilers
|
||||||
==> Available compilers
|
==> Available compilers
|
||||||
-- gcc ubuntu20.04-x86_64 ---------------------------------------
|
-- gcc ---------------------------------------------------------
|
||||||
gcc@9.4.0 gcc@8.4.0 gcc@10.5.0
|
gcc@4.9.0 gcc@4.8.0 gcc@4.7.0 gcc@4.6.2 gcc@4.4.7
|
||||||
|
gcc@4.8.2 gcc@4.7.1 gcc@4.6.3 gcc@4.6.1 gcc@4.1.2
|
||||||
-- llvm ubuntu20.04-x86_64 --------------------------------------
|
-- intel -------------------------------------------------------
|
||||||
llvm@12.0.0 llvm@11.0.0 llvm@10.0.0
|
intel@15.0.0 intel@14.0.0 intel@13.0.0 intel@12.1.0 intel@10.0
|
||||||
|
intel@14.0.3 intel@13.1.1 intel@12.1.5 intel@12.0.4 intel@9.1
|
||||||
|
intel@14.0.2 intel@13.1.0 intel@12.1.3 intel@11.1
|
||||||
|
intel@14.0.1 intel@13.0.1 intel@12.1.2 intel@10.1
|
||||||
|
-- clang -------------------------------------------------------
|
||||||
|
clang@3.4 clang@3.3 clang@3.2 clang@3.1
|
||||||
|
|
||||||
Any of these compilers can be used to build Spack packages. More on
|
Any of these compilers can be used to build Spack packages. More on
|
||||||
how this is done is in :ref:`sec-specs`.
|
how this is done is in :ref:`sec-specs`.
|
||||||
@@ -296,22 +302,16 @@ An alias for ``spack compiler find``.
|
|||||||
``spack compiler find``
|
``spack compiler find``
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
If you do not see a compiler in the list shown by:
|
Lists the compilers currently available to Spack. If you do not see
|
||||||
|
a compiler in this list, but you want to use it with Spack, you can
|
||||||
|
simply run ``spack compiler find`` with the path to where the
|
||||||
|
compiler is installed. For example:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack compiler list
|
$ spack compiler find /usr/local/tools/ic-13.0.079
|
||||||
|
==> Added 1 new compiler to ~/.spack/linux/compilers.yaml
|
||||||
but you want to use it with Spack, you can simply run ``spack compiler find`` with the
|
intel@13.0.079
|
||||||
path to where the compiler is installed. For example:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack compiler find /opt/intel/oneapi/compiler/2025.1/bin/
|
|
||||||
==> Added 1 new compiler to /home/user/.spack/packages.yaml
|
|
||||||
intel-oneapi-compilers@2025.1.0
|
|
||||||
==> Compilers are defined in the following files:
|
|
||||||
/home/user/.spack/packages.yaml
|
|
||||||
|
|
||||||
Or you can run ``spack compiler find`` with no arguments to force
|
Or you can run ``spack compiler find`` with no arguments to force
|
||||||
auto-detection. This is useful if you do not know where compilers are
|
auto-detection. This is useful if you do not know where compilers are
|
||||||
@@ -322,7 +322,7 @@ installed, but you know that new compilers have been added to your
|
|||||||
|
|
||||||
$ module load gcc/4.9.0
|
$ module load gcc/4.9.0
|
||||||
$ spack compiler find
|
$ spack compiler find
|
||||||
==> Added 1 new compiler to /home/user/.spack/packages.yaml
|
==> Added 1 new compiler to ~/.spack/linux/compilers.yaml
|
||||||
gcc@4.9.0
|
gcc@4.9.0
|
||||||
|
|
||||||
This loads the environment module for gcc-4.9.0 to add it to
|
This loads the environment module for gcc-4.9.0 to add it to
|
||||||
@@ -331,7 +331,7 @@ This loads the environment module for gcc-4.9.0 to add it to
|
|||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
By default, spack does not fill in the ``modules:`` field in the
|
By default, spack does not fill in the ``modules:`` field in the
|
||||||
``packages.yaml`` file. If you are using a compiler from a
|
``compilers.yaml`` file. If you are using a compiler from a
|
||||||
module, then you should add this field manually.
|
module, then you should add this field manually.
|
||||||
See the section on :ref:`compilers-requiring-modules`.
|
See the section on :ref:`compilers-requiring-modules`.
|
||||||
|
|
||||||
@@ -341,82 +341,91 @@ This loads the environment module for gcc-4.9.0 to add it to
|
|||||||
``spack compiler info``
|
``spack compiler info``
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
If you want to see additional information on some specific compilers, you can run ``spack compiler info`` on it:
|
If you want to see specifics on a particular compiler, you can run
|
||||||
|
``spack compiler info`` on it:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack compiler info gcc
|
$ spack compiler info intel@15
|
||||||
gcc@=8.4.0 languages='c,c++,fortran' arch=linux-ubuntu20.04-x86_64:
|
intel@15.0.0:
|
||||||
prefix: /usr
|
paths:
|
||||||
compilers:
|
cc = /usr/local/bin/icc-15.0.090
|
||||||
c: /usr/bin/gcc-8
|
cxx = /usr/local/bin/icpc-15.0.090
|
||||||
cxx: /usr/bin/g++-8
|
f77 = /usr/local/bin/ifort-15.0.090
|
||||||
fortran: /usr/bin/gfortran-8
|
fc = /usr/local/bin/ifort-15.0.090
|
||||||
|
modules = []
|
||||||
|
operating_system = centos6
|
||||||
|
...
|
||||||
|
|
||||||
gcc@=9.4.0 languages='c,c++,fortran' arch=linux-ubuntu20.04-x86_64:
|
This shows which C, C++, and Fortran compilers were detected by Spack.
|
||||||
prefix: /usr
|
Notice also that we didn't have to be too specific about the
|
||||||
compilers:
|
version. We just said ``intel@15``, and information about the only
|
||||||
c: /usr/bin/gcc
|
matching Intel compiler was displayed.
|
||||||
cxx: /usr/bin/g++
|
|
||||||
fortran: /usr/bin/gfortran
|
|
||||||
|
|
||||||
gcc@=10.5.0 languages='c,c++,fortran' arch=linux-ubuntu20.04-x86_64:
|
|
||||||
prefix: /usr
|
|
||||||
compilers:
|
|
||||||
c: /usr/bin/gcc-10
|
|
||||||
cxx: /usr/bin/g++-10
|
|
||||||
fortran: /usr/bin/gfortran-10
|
|
||||||
|
|
||||||
This shows the details of the compilers that were detected by Spack.
|
|
||||||
Notice also that we didn't have to be too specific about the version. We just said ``gcc``, and we got information
|
|
||||||
about all the matching compilers.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Manual compiler configuration
|
Manual compiler configuration
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
If auto-detection fails, you can manually configure a compiler by editing your ``~/.spack/packages.yaml`` file.
|
If auto-detection fails, you can manually configure a compiler by
|
||||||
You can do this by running ``spack config edit packages``, which will open the file in
|
editing your ``~/.spack/<platform>/compilers.yaml`` file. You can do this by running
|
||||||
|
``spack config edit compilers``, which will open the file in
|
||||||
:ref:`your favorite editor <controlling-the-editor>`.
|
:ref:`your favorite editor <controlling-the-editor>`.
|
||||||
|
|
||||||
Each compiler has an "external" entry in the file with some ``extra_attributes``:
|
Each compiler configuration in the file looks like this:
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
packages:
|
compilers:
|
||||||
gcc:
|
- compiler:
|
||||||
externals:
|
modules: []
|
||||||
- spec: gcc@10.5.0 languages='c,c++,fortran'
|
operating_system: centos6
|
||||||
prefix: /usr
|
paths:
|
||||||
extra_attributes:
|
cc: /usr/local/bin/icc-15.0.024-beta
|
||||||
compilers:
|
cxx: /usr/local/bin/icpc-15.0.024-beta
|
||||||
c: /usr/bin/gcc-10
|
f77: /usr/local/bin/ifort-15.0.024-beta
|
||||||
cxx: /usr/bin/g++-10
|
fc: /usr/local/bin/ifort-15.0.024-beta
|
||||||
fortran: /usr/bin/gfortran-10
|
spec: intel@15.0.0
|
||||||
|
|
||||||
The compiler executables are listed under ``extra_attributes:compilers``, and are keyed by language.
|
For compilers that do not support Fortran (like ``clang``), put
|
||||||
Once you save the file, the configured compilers will show up in the list displayed by ``spack compilers``.
|
``None`` for ``f77`` and ``fc``:
|
||||||
|
|
||||||
You can also add compiler flags to manually configured compilers. These flags should be specified in the
|
.. code-block:: yaml
|
||||||
``flags`` section of the compiler specification. The valid flags are ``cflags``, ``cxxflags``, ``fflags``,
|
|
||||||
|
compilers:
|
||||||
|
- compiler:
|
||||||
|
modules: []
|
||||||
|
operating_system: centos6
|
||||||
|
paths:
|
||||||
|
cc: /usr/bin/clang
|
||||||
|
cxx: /usr/bin/clang++
|
||||||
|
f77: None
|
||||||
|
fc: None
|
||||||
|
spec: clang@3.3svn
|
||||||
|
|
||||||
|
Once you save the file, the configured compilers will show up in the
|
||||||
|
list displayed by ``spack compilers``.
|
||||||
|
|
||||||
|
You can also add compiler flags to manually configured compilers. These
|
||||||
|
flags should be specified in the ``flags`` section of the compiler
|
||||||
|
specification. The valid flags are ``cflags``, ``cxxflags``, ``fflags``,
|
||||||
``cppflags``, ``ldflags``, and ``ldlibs``. For example:
|
``cppflags``, ``ldflags``, and ``ldlibs``. For example:
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
packages:
|
compilers:
|
||||||
gcc:
|
- compiler:
|
||||||
externals:
|
modules: []
|
||||||
- spec: gcc@10.5.0 languages='c,c++,fortran'
|
operating_system: centos6
|
||||||
prefix: /usr
|
paths:
|
||||||
extra_attributes:
|
cc: /usr/bin/gcc
|
||||||
compilers:
|
cxx: /usr/bin/g++
|
||||||
c: /usr/bin/gcc-10
|
f77: /usr/bin/gfortran
|
||||||
cxx: /usr/bin/g++-10
|
fc: /usr/bin/gfortran
|
||||||
fortran: /usr/bin/gfortran-10
|
flags:
|
||||||
flags:
|
cflags: -O3 -fPIC
|
||||||
cflags: -O3 -fPIC
|
cxxflags: -O3 -fPIC
|
||||||
cxxflags: -O3 -fPIC
|
cppflags: -O3 -fPIC
|
||||||
cppflags: -O3 -fPIC
|
spec: gcc@4.7.2
|
||||||
|
|
||||||
These flags will be treated by spack as if they were entered from
|
These flags will be treated by spack as if they were entered from
|
||||||
the command line each time this compiler is used. The compiler wrappers
|
the command line each time this compiler is used. The compiler wrappers
|
||||||
@@ -431,44 +440,95 @@ These variables should be specified in the ``environment`` section of the compil
|
|||||||
specification. The operations available to modify the environment are ``set``, ``unset``,
|
specification. The operations available to modify the environment are ``set``, ``unset``,
|
||||||
``prepend_path``, ``append_path``, and ``remove_path``. For example:
|
``prepend_path``, ``append_path``, and ``remove_path``. For example:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
compilers:
|
||||||
|
- compiler:
|
||||||
|
modules: []
|
||||||
|
operating_system: centos6
|
||||||
|
paths:
|
||||||
|
cc: /opt/intel/oneapi/compiler/latest/linux/bin/icx
|
||||||
|
cxx: /opt/intel/oneapi/compiler/latest/linux/bin/icpx
|
||||||
|
f77: /opt/intel/oneapi/compiler/latest/linux/bin/ifx
|
||||||
|
fc: /opt/intel/oneapi/compiler/latest/linux/bin/ifx
|
||||||
|
spec: oneapi@latest
|
||||||
|
environment:
|
||||||
|
set:
|
||||||
|
MKL_ROOT: "/path/to/mkl/root"
|
||||||
|
unset: # A list of environment variables to unset
|
||||||
|
- CC
|
||||||
|
prepend_path: # Similar for append|remove_path
|
||||||
|
LD_LIBRARY_PATH: /ld/paths/added/by/setvars/sh
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
Spack is in the process of moving compilers from a separate
|
||||||
|
attribute to be handled like all other packages. As part of this
|
||||||
|
process, the ``compilers.yaml`` section will eventually be replaced
|
||||||
|
by configuration in the ``packages.yaml`` section. This new
|
||||||
|
configuration is now available, although it is not yet the default
|
||||||
|
behavior.
|
||||||
|
|
||||||
|
Compilers can also be configured as external packages in the
|
||||||
|
``packages.yaml`` config file. Any external package for a compiler
|
||||||
|
(e.g. ``gcc`` or ``llvm``) will be treated as a configured compiler
|
||||||
|
assuming the paths to the compiler executables are determinable from
|
||||||
|
the prefix.
|
||||||
|
|
||||||
|
If the paths to the compiler executable are not determinable from the
|
||||||
|
prefix, you can add them to the ``extra_attributes`` field. Similarly,
|
||||||
|
all other fields from the compilers config can be added to the
|
||||||
|
``extra_attributes`` field for an external representing a compiler.
|
||||||
|
|
||||||
|
Note that the format for the ``paths`` field in the
|
||||||
|
``extra_attributes`` section is different than in the ``compilers``
|
||||||
|
config. For compilers configured as external packages, the section is
|
||||||
|
named ``compilers`` and the dictionary maps language names (``c``,
|
||||||
|
``cxx``, ``fortran``) to paths, rather than using the names ``cc``,
|
||||||
|
``fc``, and ``f77``.
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
packages:
|
packages:
|
||||||
intel-oneapi-compilers:
|
gcc:
|
||||||
externals:
|
external:
|
||||||
- spec: intel-oneapi-compilers@2025.1.0
|
- spec: gcc@12.2.0 arch=linux-rhel8-skylake
|
||||||
prefix: /opt/intel/oneapi
|
prefix: /usr
|
||||||
extra_attributes:
|
extra_attributes:
|
||||||
compilers:
|
|
||||||
c: /opt/intel/oneapi/compiler/2025.1/bin/icx
|
|
||||||
cxx: /opt/intel/oneapi/compiler/2025.1/bin/icpx
|
|
||||||
fortran: /opt/intel/oneapi/compiler/2025.1/bin/ifx
|
|
||||||
environment:
|
environment:
|
||||||
set:
|
set:
|
||||||
MKL_ROOT: "/path/to/mkl/root"
|
GCC_ROOT: /usr
|
||||||
unset: # A list of environment variables to unset
|
external:
|
||||||
- CC
|
- spec: llvm+clang@15.0.0 arch=linux-rhel8-skylake
|
||||||
prepend_path: # Similar for append|remove_path
|
prefix: /usr
|
||||||
LD_LIBRARY_PATH: /ld/paths/added/by/setvars/sh
|
extra_attributes:
|
||||||
|
compilers:
|
||||||
|
c: /usr/bin/clang-with-suffix
|
||||||
|
cxx: /usr/bin/clang++-with-extra-info
|
||||||
|
fortran: /usr/bin/gfortran
|
||||||
|
extra_rpaths:
|
||||||
|
- /usr/lib/llvm/
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Build Your Own Compiler
|
Build Your Own Compiler
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
If you are particular about which compiler/version you use, you might wish to have Spack build it for you.
|
If you are particular about which compiler/version you use, you might
|
||||||
For example:
|
wish to have Spack build it for you. For example:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack install gcc@14+binutils
|
$ spack install gcc@4.9.3
|
||||||
|
|
||||||
Once the compiler is installed, you can start using it without additional configuration:
|
Once that has finished, you will need to add it to your
|
||||||
|
``compilers.yaml`` file. You can then set Spack to use it by default
|
||||||
|
by adding the following to your ``packages.yaml`` file:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: yaml
|
||||||
|
|
||||||
$ spack install hdf5~mpi %gcc@14
|
packages:
|
||||||
|
all:
|
||||||
The same holds true for compilers that are made available from buildcaches, when reusing them is allowed.
|
compiler: [gcc@4.9.3]
|
||||||
|
|
||||||
.. _compilers-requiring-modules:
|
.. _compilers-requiring-modules:
|
||||||
|
|
||||||
@@ -476,26 +536,30 @@ The same holds true for compilers that are made available from buildcaches, when
|
|||||||
Compilers Requiring Modules
|
Compilers Requiring Modules
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Many installed compilers will work regardless of the environment they are called with.
|
Many installed compilers will work regardless of the environment they
|
||||||
However, some installed compilers require environment variables to be set in order to run;
|
are called with. However, some installed compilers require
|
||||||
this is typical for Intel and other proprietary compilers.
|
``$LD_LIBRARY_PATH`` or other environment variables to be set in order
|
||||||
|
to run; this is typical for Intel and other proprietary compilers.
|
||||||
|
|
||||||
On typical HPC clusters, these environment modifications are usually delegated to some "module" system.
|
In such a case, you should tell Spack which module(s) to load in order
|
||||||
In such a case, you should tell Spack which module(s) to load in order to run the chosen compiler:
|
to run the chosen compiler (If the compiler does not come with a
|
||||||
|
module file, you might consider making one by hand). Spack will load
|
||||||
|
this module into the environment ONLY when the compiler is run, and
|
||||||
|
NOT in general for a package's ``install()`` method. See, for
|
||||||
|
example, this ``compilers.yaml`` file:
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
packages:
|
compilers:
|
||||||
gcc:
|
- compiler:
|
||||||
externals:
|
modules: [other/comp/gcc-5.3-sp3]
|
||||||
- spec: gcc@10.5.0 languages='c,c++,fortran'
|
operating_system: SuSE11
|
||||||
prefix: /opt/compilers
|
paths:
|
||||||
extra_attributes:
|
cc: /usr/local/other/SLES11.3/gcc/5.3.0/bin/gcc
|
||||||
compilers:
|
cxx: /usr/local/other/SLES11.3/gcc/5.3.0/bin/g++
|
||||||
c: /opt/compilers/bin/gcc-10
|
f77: /usr/local/other/SLES11.3/gcc/5.3.0/bin/gfortran
|
||||||
cxx: /opt/compilers/bin/g++-10
|
fc: /usr/local/other/SLES11.3/gcc/5.3.0/bin/gfortran
|
||||||
fortran: /opt/compilers/bin/gfortran-10
|
spec: gcc@5.3.0
|
||||||
modules: [gcc/10.5.0]
|
|
||||||
|
|
||||||
Some compilers require special environment settings to be loaded not just
|
Some compilers require special environment settings to be loaded not just
|
||||||
to run, but also to execute the code they build, breaking packages that
|
to run, but also to execute the code they build, breaking packages that
|
||||||
@@ -516,7 +580,7 @@ Licensed Compilers
|
|||||||
^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Some proprietary compilers require licensing to use. If you need to
|
Some proprietary compilers require licensing to use. If you need to
|
||||||
use a licensed compiler, the process is similar to a mix of
|
use a licensed compiler (eg, PGI), the process is similar to a mix of
|
||||||
build your own, plus modules:
|
build your own, plus modules:
|
||||||
|
|
||||||
#. Create a Spack package (if it doesn't exist already) to install
|
#. Create a Spack package (if it doesn't exist already) to install
|
||||||
@@ -526,21 +590,24 @@ build your own, plus modules:
|
|||||||
using Spack to load the module it just created, and running simple
|
using Spack to load the module it just created, and running simple
|
||||||
builds (eg: ``cc helloWorld.c && ./a.out``)
|
builds (eg: ``cc helloWorld.c && ./a.out``)
|
||||||
|
|
||||||
#. Add the newly-installed compiler to ``packages.yaml`` as shown above.
|
#. Add the newly-installed compiler to ``compilers.yaml`` as shown
|
||||||
|
above.
|
||||||
|
|
||||||
.. _mixed-toolchains:
|
.. _mixed-toolchains:
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^
|
||||||
Fortran compilers on macOS
|
Mixed Toolchains
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Modern compilers typically come with related compilers for C, C++ and
|
Modern compilers typically come with related compilers for C, C++ and
|
||||||
Fortran bundled together. When possible, results are best if the same
|
Fortran bundled together. When possible, results are best if the same
|
||||||
compiler is used for all languages.
|
compiler is used for all languages.
|
||||||
|
|
||||||
In some cases, this is not possible. For example, XCode on macOS provides no Fortran compilers.
|
In some cases, this is not possible. For example, starting with macOS El
|
||||||
The user is therefore forced to use a mixed toolchain: XCode-provided Clang for C/C++ and e.g.
|
Capitan (10.11), many packages no longer build with GCC, but XCode
|
||||||
GNU ``gfortran`` for Fortran.
|
provides no Fortran compilers. The user is therefore forced to use a
|
||||||
|
mixed toolchain: XCode-provided Clang for C/C++ and GNU ``gfortran`` for
|
||||||
|
Fortran.
|
||||||
|
|
||||||
#. You need to make sure that Xcode is installed. Run the following command:
|
#. You need to make sure that Xcode is installed. Run the following command:
|
||||||
|
|
||||||
@@ -593,25 +660,45 @@ GNU ``gfortran`` for Fortran.
|
|||||||
|
|
||||||
Note: the flag is ``-license``, not ``--license``.
|
Note: the flag is ``-license``, not ``--license``.
|
||||||
|
|
||||||
|
#. Run ``spack compiler find`` to locate Clang.
|
||||||
|
|
||||||
#. There are different ways to get ``gfortran`` on macOS. For example, you can
|
#. There are different ways to get ``gfortran`` on macOS. For example, you can
|
||||||
install GCC with Spack (``spack install gcc``), with Homebrew (``brew install
|
install GCC with Spack (``spack install gcc``), with Homebrew (``brew install
|
||||||
gcc``), or from a `DMG installer
|
gcc``), or from a `DMG installer
|
||||||
<https://github.com/fxcoudert/gfortran-for-macOS/releases>`_.
|
<https://github.com/fxcoudert/gfortran-for-macOS/releases>`_.
|
||||||
|
|
||||||
#. Run ``spack compiler find`` to locate both Apple-Clang and GCC.
|
#. The only thing left to do is to edit ``~/.spack/darwin/compilers.yaml`` to provide
|
||||||
|
the path to ``gfortran``:
|
||||||
|
|
||||||
Since languages in Spack are modeled as virtual packages, ``apple-clang`` will be used to provide
|
.. code-block:: yaml
|
||||||
C and C++, while GCC will be used for Fortran.
|
|
||||||
|
compilers:
|
||||||
|
- compiler:
|
||||||
|
# ...
|
||||||
|
paths:
|
||||||
|
cc: /usr/bin/clang
|
||||||
|
cxx: /usr/bin/clang++
|
||||||
|
f77: /path/to/bin/gfortran
|
||||||
|
fc: /path/to/bin/gfortran
|
||||||
|
spec: apple-clang@11.0.0
|
||||||
|
|
||||||
|
|
||||||
|
If you used Spack to install GCC, you can get the installation prefix by
|
||||||
|
``spack location -i gcc`` (this will only work if you have a single version
|
||||||
|
of GCC installed). Whereas for Homebrew, GCC is installed in
|
||||||
|
``/usr/local/Cellar/gcc/x.y.z``. With the DMG installer, the correct path
|
||||||
|
will be ``/usr/local/gfortran``.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
Compiler Verification
|
Compiler Verification
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
You can verify that your compilers are configured properly by installing a simple package. For example:
|
You can verify that your compilers are configured properly by installing a
|
||||||
|
simple package. For example:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack install zlib-ng%gcc@5.3.0
|
$ spack install zlib%gcc@5.3.0
|
||||||
|
|
||||||
|
|
||||||
.. _vendor-specific-compiler-configuration:
|
.. _vendor-specific-compiler-configuration:
|
||||||
@@ -620,7 +707,9 @@ You can verify that your compilers are configured properly by installing a simpl
|
|||||||
Vendor-Specific Compiler Configuration
|
Vendor-Specific Compiler Configuration
|
||||||
--------------------------------------
|
--------------------------------------
|
||||||
|
|
||||||
This section provides details on how to get vendor-specific compilers working.
|
With Spack, things usually "just work" with GCC. Not so for other
|
||||||
|
compilers. This section provides details on how to get specific
|
||||||
|
compilers working.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^
|
||||||
Intel Compilers
|
Intel Compilers
|
||||||
@@ -642,8 +731,8 @@ compilers:
|
|||||||
you have installed from the ``PATH`` environment variable.
|
you have installed from the ``PATH`` environment variable.
|
||||||
|
|
||||||
If you want use a version of ``gcc`` or ``g++`` other than the default
|
If you want use a version of ``gcc`` or ``g++`` other than the default
|
||||||
version on your system, you need to use either the ``--gcc-install-dir``
|
version on your system, you need to use either the ``-gcc-name``
|
||||||
or ``--gcc-toolchain`` compiler option to specify the path to the version of
|
or ``-gxx-name`` compiler option to specify the path to the version of
|
||||||
``gcc`` or ``g++`` that you want to use."
|
``gcc`` or ``g++`` that you want to use."
|
||||||
|
|
||||||
-- `Intel Reference Guide <https://software.intel.com/en-us/node/522750>`_
|
-- `Intel Reference Guide <https://software.intel.com/en-us/node/522750>`_
|
||||||
@@ -651,12 +740,76 @@ compilers:
|
|||||||
Intel compilers may therefore be configured in one of two ways with
|
Intel compilers may therefore be configured in one of two ways with
|
||||||
Spack: using modules, or using compiler flags.
|
Spack: using modules, or using compiler flags.
|
||||||
|
|
||||||
|
""""""""""""""""""""""""""
|
||||||
|
Configuration with Modules
|
||||||
|
""""""""""""""""""""""""""
|
||||||
|
|
||||||
|
One can control which GCC is seen by the Intel compiler with modules.
|
||||||
|
A module must be loaded both for the Intel Compiler (so it will run)
|
||||||
|
and GCC (so the compiler can find the intended GCC). The following
|
||||||
|
configuration in ``compilers.yaml`` illustrates this technique:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
compilers:
|
||||||
|
- compiler:
|
||||||
|
modules: [gcc-4.9.3, intel-15.0.24]
|
||||||
|
operating_system: centos7
|
||||||
|
paths:
|
||||||
|
cc: /opt/intel-15.0.24/bin/icc-15.0.24-beta
|
||||||
|
cxx: /opt/intel-15.0.24/bin/icpc-15.0.24-beta
|
||||||
|
f77: /opt/intel-15.0.24/bin/ifort-15.0.24-beta
|
||||||
|
fc: /opt/intel-15.0.24/bin/ifort-15.0.24-beta
|
||||||
|
spec: intel@15.0.24.4.9.3
|
||||||
|
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
The version number on the Intel compiler is a combination of
|
||||||
|
the "native" Intel version number and the GNU compiler it is
|
||||||
|
targeting.
|
||||||
|
|
||||||
|
""""""""""""""""""""""""""
|
||||||
|
Command Line Configuration
|
||||||
|
""""""""""""""""""""""""""
|
||||||
|
|
||||||
|
One can also control which GCC is seen by the Intel compiler by adding
|
||||||
|
flags to the ``icc`` command:
|
||||||
|
|
||||||
|
#. Identify the location of the compiler you just installed:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack location --install-dir gcc
|
||||||
|
~/spack/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw...
|
||||||
|
|
||||||
|
#. Set up ``compilers.yaml``, for example:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
compilers:
|
||||||
|
- compiler:
|
||||||
|
modules: [intel-15.0.24]
|
||||||
|
operating_system: centos7
|
||||||
|
paths:
|
||||||
|
cc: /opt/intel-15.0.24/bin/icc-15.0.24-beta
|
||||||
|
cxx: /opt/intel-15.0.24/bin/icpc-15.0.24-beta
|
||||||
|
f77: /opt/intel-15.0.24/bin/ifort-15.0.24-beta
|
||||||
|
fc: /opt/intel-15.0.24/bin/ifort-15.0.24-beta
|
||||||
|
flags:
|
||||||
|
cflags: -gcc-name ~/spack/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw.../bin/gcc
|
||||||
|
cxxflags: -gxx-name ~/spack/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw.../bin/g++
|
||||||
|
fflags: -gcc-name ~/spack/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw.../bin/gcc
|
||||||
|
spec: intel@15.0.24.4.9.3
|
||||||
|
|
||||||
|
|
||||||
^^^
|
^^^
|
||||||
NAG
|
NAG
|
||||||
^^^
|
^^^
|
||||||
|
|
||||||
The Numerical Algorithms Group provides a licensed Fortran compiler.
|
The Numerical Algorithms Group provides a licensed Fortran compiler. Like Clang,
|
||||||
It is recommended to use GCC for your C/C++ compilers.
|
this requires you to set up a :ref:`mixed-toolchains`. It is recommended to use
|
||||||
|
GCC for your C/C++ compilers.
|
||||||
|
|
||||||
The NAG Fortran compilers are a bit more strict than other compilers, and many
|
The NAG Fortran compilers are a bit more strict than other compilers, and many
|
||||||
packages will fail to install with error messages like:
|
packages will fail to install with error messages like:
|
||||||
@@ -673,40 +826,44 @@ the command line:
|
|||||||
|
|
||||||
$ spack install openmpi fflags="-mismatch"
|
$ spack install openmpi fflags="-mismatch"
|
||||||
|
|
||||||
Or it can be set permanently in your ``packages.yaml``:
|
Or it can be set permanently in your ``compilers.yaml``:
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
packages:
|
- compiler:
|
||||||
nag:
|
modules: []
|
||||||
externals:
|
operating_system: centos6
|
||||||
- spec: nag@6.1
|
paths:
|
||||||
prefix: /opt/nag/bin
|
cc: /soft/spack/opt/spack/linux-x86_64/gcc-5.3.0/gcc-6.1.0-q2zosj3igepi3pjnqt74bwazmptr5gpj/bin/gcc
|
||||||
extra_attributes:
|
cxx: /soft/spack/opt/spack/linux-x86_64/gcc-5.3.0/gcc-6.1.0-q2zosj3igepi3pjnqt74bwazmptr5gpj/bin/g++
|
||||||
compilers:
|
f77: /soft/spack/opt/spack/linux-x86_64/gcc-4.4.7/nag-6.1-jt3h5hwt5myezgqguhfsan52zcskqene/bin/nagfor
|
||||||
fortran: /opt/nag/bin/nagfor
|
fc: /soft/spack/opt/spack/linux-x86_64/gcc-4.4.7/nag-6.1-jt3h5hwt5myezgqguhfsan52zcskqene/bin/nagfor
|
||||||
flags:
|
flags:
|
||||||
fflags: -mismatch
|
fflags: -mismatch
|
||||||
|
spec: nag@6.1
|
||||||
|
|
||||||
|
|
||||||
---------------
|
---------------
|
||||||
System Packages
|
System Packages
|
||||||
---------------
|
---------------
|
||||||
|
|
||||||
Once compilers are configured, one needs to determine which pre-installed system packages,
|
Once compilers are configured, one needs to determine which
|
||||||
if any, to use in builds. These are also configured in the ``~/.spack/packages.yaml`` file.
|
pre-installed system packages, if any, to use in builds. This is
|
||||||
For example, to use an OpenMPI installed in /opt/local, one would use:
|
configured in the file ``~/.spack/packages.yaml``. For example, to use
|
||||||
|
an OpenMPI installed in /opt/local, one would use:
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
packages:
|
packages:
|
||||||
openmpi:
|
openmpi:
|
||||||
buildable: False
|
externals:
|
||||||
externals:
|
- spec: openmpi@1.10.1
|
||||||
- spec: openmpi@1.10.1
|
prefix: /opt/local
|
||||||
prefix: /opt/local
|
buildable: False
|
||||||
|
|
||||||
In general, *Spack is easier to use and more reliable if it builds all of its own dependencies*.
|
In general, Spack is easier to use and more reliable if it builds all of
|
||||||
However, there are several packages for which one commonly needs to use system versions:
|
its own dependencies. However, there are several packages for which one
|
||||||
|
commonly needs to use system versions:
|
||||||
|
|
||||||
^^^
|
^^^
|
||||||
MPI
|
MPI
|
||||||
@@ -719,7 +876,8 @@ you are unlikely to get a working MPI from Spack. Instead, use an
|
|||||||
appropriate pre-installed MPI.
|
appropriate pre-installed MPI.
|
||||||
|
|
||||||
If you choose a pre-installed MPI, you should consider using the
|
If you choose a pre-installed MPI, you should consider using the
|
||||||
pre-installed compiler used to build that MPI.
|
pre-installed compiler used to build that MPI; see above on
|
||||||
|
``compilers.yaml``.
|
||||||
|
|
||||||
^^^^^^^
|
^^^^^^^
|
||||||
OpenSSL
|
OpenSSL
|
||||||
@@ -1283,9 +1441,9 @@ To configure Spack, first run the following command inside the Spack console:
|
|||||||
spack compiler find
|
spack compiler find
|
||||||
|
|
||||||
This creates a ``.staging`` directory in our Spack prefix, along with a ``windows`` subdirectory
|
This creates a ``.staging`` directory in our Spack prefix, along with a ``windows`` subdirectory
|
||||||
containing a ``packages.yaml`` file. On a fresh Windows install with the above packages
|
containing a ``compilers.yaml`` file. On a fresh Windows install with the above packages
|
||||||
installed, this command should only detect Microsoft Visual Studio and the Intel Fortran
|
installed, this command should only detect Microsoft Visual Studio and the Intel Fortran
|
||||||
compiler will be integrated within the first version of MSVC present in the ``packages.yaml``
|
compiler will be integrated within the first version of MSVC present in the ``compilers.yaml``
|
||||||
output.
|
output.
|
||||||
|
|
||||||
Spack provides a default ``config.yaml`` file for Windows that it will use unless overridden.
|
Spack provides a default ``config.yaml`` file for Windows that it will use unless overridden.
|
||||||
|
@@ -23,6 +23,7 @@ components for use by dependent packages:
|
|||||||
|
|
||||||
packages:
|
packages:
|
||||||
all:
|
all:
|
||||||
|
compiler: [rocmcc@=5.3.0]
|
||||||
variants: amdgpu_target=gfx90a
|
variants: amdgpu_target=gfx90a
|
||||||
hip:
|
hip:
|
||||||
buildable: false
|
buildable: false
|
||||||
@@ -69,15 +70,16 @@ This is in combination with the following compiler definition:
|
|||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
packages:
|
compilers:
|
||||||
llvm-amdgpu:
|
- compiler:
|
||||||
externals:
|
spec: rocmcc@=5.3.0
|
||||||
- spec: llvm-amdgpu@=5.3.0
|
paths:
|
||||||
prefix: /opt/rocm-5.3.0
|
cc: /opt/rocm-5.3.0/bin/amdclang
|
||||||
compilers:
|
cxx: /opt/rocm-5.3.0/bin/amdclang++
|
||||||
c: /opt/rocm-5.3.0/bin/amdclang
|
f77: null
|
||||||
cxx: /opt/rocm-5.3.0/bin/amdclang++
|
fc: /opt/rocm-5.3.0/bin/amdflang
|
||||||
fortran: null
|
operating_system: rhel8
|
||||||
|
target: x86_64
|
||||||
|
|
||||||
This includes the following considerations:
|
This includes the following considerations:
|
||||||
|
|
||||||
|
@@ -1,65 +0,0 @@
|
|||||||
.. Copyright Spack Project Developers. See COPYRIGHT file for details.
|
|
||||||
|
|
||||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
|
|
||||||
.. _include-yaml:
|
|
||||||
|
|
||||||
===============================
|
|
||||||
Include Settings (include.yaml)
|
|
||||||
===============================
|
|
||||||
|
|
||||||
Spack allows you to include configuration files through ``include.yaml``.
|
|
||||||
Using the ``include:`` heading results in pulling in external configuration
|
|
||||||
information to be used by any Spack command.
|
|
||||||
|
|
||||||
Included configuration files are required *unless* they are explicitly optional
|
|
||||||
or the entry's condition evaluates to ``false``. Optional includes are specified
|
|
||||||
with the ``optional`` clause and conditional with the ``when`` clause. For
|
|
||||||
example,
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
include:
|
|
||||||
- /path/to/a/required/config.yaml
|
|
||||||
- path: /path/to/$os/$target/config
|
|
||||||
optional: true
|
|
||||||
- path: /path/to/os-specific/config-dir
|
|
||||||
when: os == "ventura"
|
|
||||||
|
|
||||||
shows all three. The first entry, ``/path/to/a/required/config.yaml``,
|
|
||||||
indicates that included ``config.yaml`` file is required (so must exist).
|
|
||||||
Use of ``optional: true`` for ``/path/to/$os/$target/config`` means
|
|
||||||
the path is only included if it exists. The condition ``os == "ventura"``
|
|
||||||
in the ``when`` clause for ``/path/to/os-specific/config-dir`` means the
|
|
||||||
path is only included when the operating system (``os``) is ``ventura``.
|
|
||||||
|
|
||||||
The same conditions and variables in `Spec List References
|
|
||||||
<https://spack.readthedocs.io/en/latest/environments.html#spec-list-references>`_
|
|
||||||
can be used for conditional activation in the ``when`` clauses.
|
|
||||||
|
|
||||||
Included files can be specified by path or by their parent directory.
|
|
||||||
Paths may be absolute, relative (to the configuration file including the path),
|
|
||||||
or specified as URLs. Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or
|
|
||||||
schemes) are supported. Spack-specific, environment and user path variables
|
|
||||||
can be used. (See :ref:`config-file-variables` for more information.)
|
|
||||||
|
|
||||||
A ``sha256`` is required for remote file URLs and must be specified as follows:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
include:
|
|
||||||
- path: https://github.com/path/to/raw/config/compilers.yaml
|
|
||||||
sha256: 26e871804a92cd07bb3d611b31b4156ae93d35b6a6d6e0ef3a67871fcb1d258b
|
|
||||||
|
|
||||||
Additionally, remote file URLs must link to the **raw** form of the file's
|
|
||||||
contents (e.g., `GitHub
|
|
||||||
<https://docs.github.com/en/repositories/working-with-files/using-files/viewing-and-understanding-files#viewing-or-copying-the-raw-file-content>`_
|
|
||||||
or `GitLab
|
|
||||||
<https://docs.gitlab.com/ee/api/repository_files.html#get-raw-file-from-repository>`_).
|
|
||||||
|
|
||||||
.. warning::
|
|
||||||
|
|
||||||
Recursive includes are not currently processed in a breadth-first manner
|
|
||||||
so the value of a configuration option that is altered by multiple included
|
|
||||||
files may not be what you expect. This will be addressed in a future
|
|
||||||
update.
|
|
@@ -71,11 +71,9 @@ or refer to the full manual below.
|
|||||||
|
|
||||||
configuration
|
configuration
|
||||||
config_yaml
|
config_yaml
|
||||||
include_yaml
|
|
||||||
packages_yaml
|
packages_yaml
|
||||||
build_settings
|
build_settings
|
||||||
environments
|
environments
|
||||||
env_vars_yaml
|
|
||||||
containers
|
containers
|
||||||
mirrors
|
mirrors
|
||||||
module_file_support
|
module_file_support
|
||||||
|
@@ -128,7 +128,7 @@ depend on the spec:
|
|||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
def setup_run_environment(self, env: EnvironmentModifications) -> None:
|
def setup_run_environment(self, env):
|
||||||
if self.spec.satisfies("+foo"):
|
if self.spec.satisfies("+foo"):
|
||||||
env.set("FOO", "bar")
|
env.set("FOO", "bar")
|
||||||
|
|
||||||
@@ -142,7 +142,7 @@ For example, a simplified version of the ``python`` package could look like this
|
|||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
def setup_dependent_run_environment(self, env: EnvironmentModifications, dependent_spec: Spec) -> None:
|
def setup_dependent_run_environment(self, env, dependent_spec):
|
||||||
if dependent_spec.package.extends(self.spec):
|
if dependent_spec.package.extends(self.spec):
|
||||||
env.prepend_path("PYTHONPATH", dependent_spec.prefix.lib.python)
|
env.prepend_path("PYTHONPATH", dependent_spec.prefix.lib.python)
|
||||||
|
|
||||||
|
@@ -486,8 +486,6 @@ present. For instance with a configuration like:
|
|||||||
|
|
||||||
you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
|
you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
|
||||||
|
|
||||||
.. _package-strong-preferences:
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Conflicts and strong preferences
|
Conflicts and strong preferences
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
@@ -557,13 +555,14 @@ preferences.
|
|||||||
FAQ: :ref:`Why does Spack pick particular versions and variants? <faq-concretizer-precedence>`
|
FAQ: :ref:`Why does Spack pick particular versions and variants? <faq-concretizer-precedence>`
|
||||||
|
|
||||||
|
|
||||||
The ``target`` and ``providers`` preferences
|
Most package preferences (``compilers``, ``target`` and ``providers``)
|
||||||
can only be set globally under the ``all`` section of ``packages.yaml``:
|
can only be set globally under the ``all`` section of ``packages.yaml``:
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
packages:
|
packages:
|
||||||
all:
|
all:
|
||||||
|
compiler: [gcc@12.2.0, clang@12:, oneapi@2023:]
|
||||||
target: [x86_64_v3]
|
target: [x86_64_v3]
|
||||||
providers:
|
providers:
|
||||||
mpi: [mvapich2, mpich, openmpi]
|
mpi: [mvapich2, mpich, openmpi]
|
||||||
|
@@ -369,9 +369,9 @@ If you have a collection of software expected to work well together with
|
|||||||
no source code of its own, you can create a :ref:`BundlePackage <bundlepackage>`.
|
no source code of its own, you can create a :ref:`BundlePackage <bundlepackage>`.
|
||||||
Examples where bundle packages can be useful include defining suites of
|
Examples where bundle packages can be useful include defining suites of
|
||||||
applications (e.g, `EcpProxyApps
|
applications (e.g, `EcpProxyApps
|
||||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/ecp_proxy_apps/package.py>`_), commonly used libraries
|
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_), commonly used libraries
|
||||||
(e.g., `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/amd_aocl/package.py>`_),
|
(e.g., `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_),
|
||||||
and software development kits (e.g., `EcpDataVisSdk <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/ecp_data_vis_sdk/package.py>`_).
|
and software development kits (e.g., `EcpDataVisSdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py>`_).
|
||||||
|
|
||||||
These versioned packages primarily consist of dependencies on the associated
|
These versioned packages primarily consist of dependencies on the associated
|
||||||
software packages. They can include :ref:`variants <variants>` to ensure
|
software packages. They can include :ref:`variants <variants>` to ensure
|
||||||
@@ -443,7 +443,7 @@ lives in:
|
|||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack location -p gmp
|
$ spack location -p gmp
|
||||||
${SPACK_ROOT}/var/spack/repos/spack_repo/builtin/packages/gmp/package.py
|
${SPACK_ROOT}/var/spack/repos/builtin/packages/gmp/package.py
|
||||||
|
|
||||||
but ``spack edit`` provides a much simpler shortcut and saves you the
|
but ``spack edit`` provides a much simpler shortcut and saves you the
|
||||||
trouble of typing the full path.
|
trouble of typing the full path.
|
||||||
@@ -457,19 +457,19 @@ live in Spack's directory structure. In general, :ref:`cmd-spack-create`
|
|||||||
handles creating package files for you, so you can skip most of the
|
handles creating package files for you, so you can skip most of the
|
||||||
details here.
|
details here.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
``var/spack/repos/spack_repo/builtin/packages``
|
``var/spack/repos/builtin/packages``
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
A Spack installation directory is structured like a standard UNIX
|
A Spack installation directory is structured like a standard UNIX
|
||||||
install prefix (``bin``, ``lib``, ``include``, ``var``, ``opt``,
|
install prefix (``bin``, ``lib``, ``include``, ``var``, ``opt``,
|
||||||
etc.). Most of the code for Spack lives in ``$SPACK_ROOT/lib/spack``.
|
etc.). Most of the code for Spack lives in ``$SPACK_ROOT/lib/spack``.
|
||||||
Packages themselves live in ``$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages``.
|
Packages themselves live in ``$SPACK_ROOT/var/spack/repos/builtin/packages``.
|
||||||
|
|
||||||
If you ``cd`` to that directory, you will see directories for each
|
If you ``cd`` to that directory, you will see directories for each
|
||||||
package:
|
package:
|
||||||
|
|
||||||
.. command-output:: cd $SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages && ls
|
.. command-output:: cd $SPACK_ROOT/var/spack/repos/builtin/packages && ls
|
||||||
:shell:
|
:shell:
|
||||||
:ellipsis: 10
|
:ellipsis: 10
|
||||||
|
|
||||||
@@ -479,7 +479,7 @@ package lives in:
|
|||||||
|
|
||||||
.. code-block:: none
|
.. code-block:: none
|
||||||
|
|
||||||
$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages/libelf/package.py
|
$SPACK_ROOT/var/spack/repos/builtin/packages/libelf/package.py
|
||||||
|
|
||||||
Alongside the ``package.py`` file, a package may contain extra
|
Alongside the ``package.py`` file, a package may contain extra
|
||||||
directories or files (like patches) that it needs to build.
|
directories or files (like patches) that it needs to build.
|
||||||
@@ -492,7 +492,7 @@ Packages are named after the directory containing ``package.py``. So,
|
|||||||
``libelf``'s ``package.py`` lives in a directory called ``libelf``.
|
``libelf``'s ``package.py`` lives in a directory called ``libelf``.
|
||||||
The ``package.py`` file defines a class called ``Libelf``, which
|
The ``package.py`` file defines a class called ``Libelf``, which
|
||||||
extends Spack's ``Package`` class. For example, here is
|
extends Spack's ``Package`` class. For example, here is
|
||||||
``$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages/libelf/package.py``:
|
``$SPACK_ROOT/var/spack/repos/builtin/packages/libelf/package.py``:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
:linenos:
|
:linenos:
|
||||||
@@ -520,7 +520,7 @@ these:
|
|||||||
$ spack install libelf@0.8.13
|
$ spack install libelf@0.8.13
|
||||||
|
|
||||||
Spack sees the package name in the spec and looks for
|
Spack sees the package name in the spec and looks for
|
||||||
``libelf/package.py`` in ``var/spack/repos/spack_repo/builtin/packages``.
|
``libelf/package.py`` in ``var/spack/repos/builtin/packages``.
|
||||||
Likewise, if you run ``spack install py-numpy``, Spack looks for
|
Likewise, if you run ``spack install py-numpy``, Spack looks for
|
||||||
``py-numpy/package.py``.
|
``py-numpy/package.py``.
|
||||||
|
|
||||||
@@ -686,7 +686,7 @@ https://www.open-mpi.org/software/ompi/v2.1/downloads/openmpi-2.1.1.tar.bz2
|
|||||||
In order to handle this, you can define a ``url_for_version()`` function
|
In order to handle this, you can define a ``url_for_version()`` function
|
||||||
like so:
|
like so:
|
||||||
|
|
||||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/openmpi/package.py
|
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/openmpi/package.py
|
||||||
:pyobject: Openmpi.url_for_version
|
:pyobject: Openmpi.url_for_version
|
||||||
|
|
||||||
With the use of this ``url_for_version()``, Spack knows to download OpenMPI ``2.1.1``
|
With the use of this ``url_for_version()``, Spack knows to download OpenMPI ``2.1.1``
|
||||||
@@ -787,7 +787,7 @@ of GNU. For that, Spack goes a step further and defines a mixin class that
|
|||||||
takes care of all of the plumbing and requires packagers to just define a proper
|
takes care of all of the plumbing and requires packagers to just define a proper
|
||||||
``gnu_mirror_path`` attribute:
|
``gnu_mirror_path`` attribute:
|
||||||
|
|
||||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/autoconf/package.py
|
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/autoconf/package.py
|
||||||
:lines: 9-18
|
:lines: 9-18
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
@@ -1995,7 +1995,7 @@ structure like this:
|
|||||||
|
|
||||||
.. code-block:: none
|
.. code-block:: none
|
||||||
|
|
||||||
$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages/
|
$SPACK_ROOT/var/spack/repos/builtin/packages/
|
||||||
mvapich2/
|
mvapich2/
|
||||||
package.py
|
package.py
|
||||||
ad_lustre_rwcontig_open_source.patch
|
ad_lustre_rwcontig_open_source.patch
|
||||||
@@ -2133,7 +2133,7 @@ handles ``RPATH``:
|
|||||||
|
|
||||||
.. _pyside-patch:
|
.. _pyside-patch:
|
||||||
|
|
||||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/py_pyside/package.py
|
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/py-pyside/package.py
|
||||||
:pyobject: PyPyside.patch
|
:pyobject: PyPyside.patch
|
||||||
:linenos:
|
:linenos:
|
||||||
|
|
||||||
@@ -2201,7 +2201,7 @@ using the ``spack resource show`` command::
|
|||||||
|
|
||||||
$ spack resource show 3877ab54
|
$ spack resource show 3877ab54
|
||||||
3877ab548f88597ab2327a2230ee048d2d07ace1062efe81fc92e91b7f39cd00
|
3877ab548f88597ab2327a2230ee048d2d07ace1062efe81fc92e91b7f39cd00
|
||||||
path: /home/spackuser/src/spack/var/spack/repos/spack_repo/builtin/packages/m4/gnulib-pgi.patch
|
path: /home/spackuser/src/spack/var/spack/repos/builtin/packages/m4/gnulib-pgi.patch
|
||||||
applies to: builtin.m4
|
applies to: builtin.m4
|
||||||
|
|
||||||
``spack resource show`` looks up downloadable resources from package
|
``spack resource show`` looks up downloadable resources from package
|
||||||
@@ -2219,7 +2219,7 @@ wonder where the extra boost patches are coming from::
|
|||||||
^boost@1.68.0%apple-clang@9.0.0+atomic+chrono~clanglibcpp cxxstd=default +date_time~debug+exception+filesystem+graph~icu+iostreams+locale+log+math~mpi+multithreaded~numpy patches=2ab6c72d03dec6a4ae20220a9dfd5c8c572c5294252155b85c6874d97c323199,b37164268f34f7133cbc9a4066ae98fda08adf51e1172223f6a969909216870f ~pic+program_options~python+random+regex+serialization+shared+signals~singlethreaded+system~taggedlayout+test+thread+timer~versionedlayout+wave arch=darwin-highsierra-x86_64
|
^boost@1.68.0%apple-clang@9.0.0+atomic+chrono~clanglibcpp cxxstd=default +date_time~debug+exception+filesystem+graph~icu+iostreams+locale+log+math~mpi+multithreaded~numpy patches=2ab6c72d03dec6a4ae20220a9dfd5c8c572c5294252155b85c6874d97c323199,b37164268f34f7133cbc9a4066ae98fda08adf51e1172223f6a969909216870f ~pic+program_options~python+random+regex+serialization+shared+signals~singlethreaded+system~taggedlayout+test+thread+timer~versionedlayout+wave arch=darwin-highsierra-x86_64
|
||||||
$ spack resource show b37164268
|
$ spack resource show b37164268
|
||||||
b37164268f34f7133cbc9a4066ae98fda08adf51e1172223f6a969909216870f
|
b37164268f34f7133cbc9a4066ae98fda08adf51e1172223f6a969909216870f
|
||||||
path: /home/spackuser/src/spack/var/spack/repos/spack_repo/builtin/packages/dealii/boost_1.68.0.patch
|
path: /home/spackuser/src/spack/var/spack/repos/builtin/packages/dealii/boost_1.68.0.patch
|
||||||
applies to: builtin.boost
|
applies to: builtin.boost
|
||||||
patched by: builtin.dealii
|
patched by: builtin.dealii
|
||||||
|
|
||||||
@@ -2930,7 +2930,7 @@ this, Spack provides four different methods that can be overridden in a package:
|
|||||||
|
|
||||||
The Qt package, for instance, uses this call:
|
The Qt package, for instance, uses this call:
|
||||||
|
|
||||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/qt/package.py
|
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/qt/package.py
|
||||||
:pyobject: Qt.setup_dependent_build_environment
|
:pyobject: Qt.setup_dependent_build_environment
|
||||||
:linenos:
|
:linenos:
|
||||||
|
|
||||||
@@ -2958,7 +2958,7 @@ variables to be used by the dependent. This is done by implementing
|
|||||||
:meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`. An
|
:meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`. An
|
||||||
example of this can be found in the ``Python`` package:
|
example of this can be found in the ``Python`` package:
|
||||||
|
|
||||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/python/package.py
|
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
|
||||||
:pyobject: Python.setup_dependent_package
|
:pyobject: Python.setup_dependent_package
|
||||||
:linenos:
|
:linenos:
|
||||||
|
|
||||||
@@ -3785,7 +3785,7 @@ It is usually sufficient for a packager to override a few
|
|||||||
build system specific helper methods or attributes to provide, for instance,
|
build system specific helper methods or attributes to provide, for instance,
|
||||||
configure arguments:
|
configure arguments:
|
||||||
|
|
||||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/m4/package.py
|
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/m4/package.py
|
||||||
:pyobject: M4.configure_args
|
:pyobject: M4.configure_args
|
||||||
:linenos:
|
:linenos:
|
||||||
|
|
||||||
@@ -4110,7 +4110,7 @@ Shell command functions
|
|||||||
|
|
||||||
Recall the install method from ``libelf``:
|
Recall the install method from ``libelf``:
|
||||||
|
|
||||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/libelf/package.py
|
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/libelf/package.py
|
||||||
:pyobject: Libelf.install
|
:pyobject: Libelf.install
|
||||||
:linenos:
|
:linenos:
|
||||||
|
|
||||||
@@ -4901,7 +4901,7 @@ the one passed to install, only the MPI implementations all set some
|
|||||||
additional properties on it to help you out. E.g., in openmpi, you'll
|
additional properties on it to help you out. E.g., in openmpi, you'll
|
||||||
find this:
|
find this:
|
||||||
|
|
||||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/openmpi/package.py
|
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/openmpi/package.py
|
||||||
:pyobject: Openmpi.setup_dependent_package
|
:pyobject: Openmpi.setup_dependent_package
|
||||||
|
|
||||||
That code allows the ``openmpi`` package to associate an ``mpicc`` property
|
That code allows the ``openmpi`` package to associate an ``mpicc`` property
|
||||||
@@ -6001,16 +6001,16 @@ with those implemented in the package itself.
|
|||||||
* - Parent/Provider Package
|
* - Parent/Provider Package
|
||||||
- Stand-alone Tests
|
- Stand-alone Tests
|
||||||
* - `C
|
* - `C
|
||||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/c>`_
|
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/c>`_
|
||||||
- Compiles ``hello.c`` and runs it
|
- Compiles ``hello.c`` and runs it
|
||||||
* - `Cxx
|
* - `Cxx
|
||||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cxx>`_
|
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cxx>`_
|
||||||
- Compiles and runs several ``hello`` programs
|
- Compiles and runs several ``hello`` programs
|
||||||
* - `Fortran
|
* - `Fortran
|
||||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/fortran>`_
|
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/fortran>`_
|
||||||
- Compiles and runs ``hello`` programs (``F`` and ``f90``)
|
- Compiles and runs ``hello`` programs (``F`` and ``f90``)
|
||||||
* - `Mpi
|
* - `Mpi
|
||||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/mpi>`_
|
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/mpi>`_
|
||||||
- Compiles and runs ``mpi_hello`` (``c``, ``fortran``)
|
- Compiles and runs ``mpi_hello`` (``c``, ``fortran``)
|
||||||
* - :ref:`PythonPackage <pythonpackage>`
|
* - :ref:`PythonPackage <pythonpackage>`
|
||||||
- Imports modules listed in the ``self.import_modules`` property with defaults derived from the tarball
|
- Imports modules listed in the ``self.import_modules`` property with defaults derived from the tarball
|
||||||
@@ -6031,7 +6031,7 @@ maintainers provide additional stand-alone tests customized to the package.
|
|||||||
One example of a package that adds its own stand-alone tests to those
|
One example of a package that adds its own stand-alone tests to those
|
||||||
"inherited" by the virtual package it provides an implementation for is
|
"inherited" by the virtual package it provides an implementation for is
|
||||||
the `Openmpi package
|
the `Openmpi package
|
||||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/openmpi/package.py>`_.
|
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/openmpi/package.py>`_.
|
||||||
|
|
||||||
Below are snippets from running and viewing the stand-alone test results
|
Below are snippets from running and viewing the stand-alone test results
|
||||||
for ``openmpi``:
|
for ``openmpi``:
|
||||||
|
@@ -9,7 +9,7 @@ Package Repositories (repos.yaml)
|
|||||||
=================================
|
=================================
|
||||||
|
|
||||||
Spack comes with thousands of built-in package recipes in
|
Spack comes with thousands of built-in package recipes in
|
||||||
``var/spack/repos/spack_repo/builtin/``. This is a **package repository** -- a
|
``var/spack/repos/builtin/``. This is a **package repository** -- a
|
||||||
directory that Spack searches when it needs to find a package by name.
|
directory that Spack searches when it needs to find a package by name.
|
||||||
You may need to maintain packages for restricted, proprietary or
|
You may need to maintain packages for restricted, proprietary or
|
||||||
experimental software separately from the built-in repository. Spack
|
experimental software separately from the built-in repository. Spack
|
||||||
@@ -69,7 +69,7 @@ The default ``etc/spack/defaults/repos.yaml`` file looks like this:
|
|||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
repos:
|
repos:
|
||||||
- $spack/var/spack/repos/spack_repo/builtin
|
- $spack/var/spack/repos/builtin
|
||||||
|
|
||||||
The file starts with ``repos:`` and contains a single ordered list of
|
The file starts with ``repos:`` and contains a single ordered list of
|
||||||
paths to repositories. Each path is on a separate line starting with
|
paths to repositories. Each path is on a separate line starting with
|
||||||
@@ -78,16 +78,16 @@ paths to repositories. Each path is on a separate line starting with
|
|||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
repos:
|
repos:
|
||||||
- /opt/repos/spack_repo/local_repo
|
- /opt/local-repo
|
||||||
- $spack/var/spack/repos/spack_repo/builtin
|
- $spack/var/spack/repos/builtin
|
||||||
|
|
||||||
When Spack interprets a spec, e.g., ``mpich`` in ``spack install mpich``,
|
When Spack interprets a spec, e.g., ``mpich`` in ``spack install mpich``,
|
||||||
it searches these repositories in order (first to last) to resolve each
|
it searches these repositories in order (first to last) to resolve each
|
||||||
package name. In this example, Spack will look for the following
|
package name. In this example, Spack will look for the following
|
||||||
packages and use the first valid file:
|
packages and use the first valid file:
|
||||||
|
|
||||||
1. ``/opt/repos/spack_repo/local_repo/packages/mpich/package.py``
|
1. ``/opt/local-repo/packages/mpich/package.py``
|
||||||
2. ``$spack/var/spack/repos/spack_repo/builtin/packages/mpich/package.py``
|
2. ``$spack/var/spack/repos/builtin/packages/mpich/package.py``
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
@@ -101,15 +101,14 @@ Namespaces
|
|||||||
|
|
||||||
Every repository in Spack has an associated **namespace** defined in its
|
Every repository in Spack has an associated **namespace** defined in its
|
||||||
top-level ``repo.yaml`` file. If you look at
|
top-level ``repo.yaml`` file. If you look at
|
||||||
``var/spack/repos/spack_repo/builtin/repo.yaml`` in the built-in repository, you'll
|
``var/spack/repos/builtin/repo.yaml`` in the built-in repository, you'll
|
||||||
see that its namespace is ``builtin``:
|
see that its namespace is ``builtin``:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ cat var/spack/repos/spack_repo/builtin/repo.yaml
|
$ cat var/spack/repos/builtin/repo.yaml
|
||||||
repo:
|
repo:
|
||||||
namespace: builtin
|
namespace: builtin
|
||||||
api: v2.0
|
|
||||||
|
|
||||||
Spack records the repository namespace of each installed package. For
|
Spack records the repository namespace of each installed package. For
|
||||||
example, if you install the ``mpich`` package from the ``builtin`` repo,
|
example, if you install the ``mpich`` package from the ``builtin`` repo,
|
||||||
@@ -218,15 +217,15 @@ Suppose you have three repositories: the builtin Spack repo
|
|||||||
repo containing your own prototype packages (``proto``). Suppose they
|
repo containing your own prototype packages (``proto``). Suppose they
|
||||||
contain packages as follows:
|
contain packages as follows:
|
||||||
|
|
||||||
+--------------+-----------------------------------------------+-----------------------------+
|
+--------------+------------------------------------+-----------------------------+
|
||||||
| Namespace | Path to repo | Packages |
|
| Namespace | Path to repo | Packages |
|
||||||
+==============+===============================================+=============================+
|
+==============+====================================+=============================+
|
||||||
| ``proto`` | ``~/my_spack_repos/spack_repo/proto`` | ``mpich`` |
|
| ``proto`` | ``~/proto`` | ``mpich`` |
|
||||||
+--------------+-----------------------------------------------+-----------------------------+
|
+--------------+------------------------------------+-----------------------------+
|
||||||
| ``llnl`` | ``/usr/local/repos/spack_repo/llnl`` | ``hdf5`` |
|
| ``llnl`` | ``/usr/local/llnl`` | ``hdf5`` |
|
||||||
+--------------+-----------------------------------------------+-----------------------------+
|
+--------------+------------------------------------+-----------------------------+
|
||||||
| ``builtin`` | ``$spack/var/spack/repos/spack_repo/builtin`` | ``mpich``, ``hdf5``, others |
|
| ``builtin`` | ``$spack/var/spack/repos/builtin`` | ``mpich``, ``hdf5``, others |
|
||||||
+--------------+-----------------------------------------------+-----------------------------+
|
+--------------+------------------------------------+-----------------------------+
|
||||||
|
|
||||||
Suppose that ``hdf5`` depends on ``mpich``. You can override the
|
Suppose that ``hdf5`` depends on ``mpich``. You can override the
|
||||||
built-in ``hdf5`` by adding the ``llnl`` repo to ``repos.yaml``:
|
built-in ``hdf5`` by adding the ``llnl`` repo to ``repos.yaml``:
|
||||||
@@ -234,8 +233,8 @@ built-in ``hdf5`` by adding the ``llnl`` repo to ``repos.yaml``:
|
|||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
repos:
|
repos:
|
||||||
- /usr/local/repos/spack_repo/llnl
|
- /usr/local/llnl
|
||||||
- $spack/var/spack/repos/spack_repo/builtin
|
- $spack/var/spack/repos/builtin
|
||||||
|
|
||||||
``spack install hdf5`` will install ``llnl.hdf5 ^builtin.mpich``.
|
``spack install hdf5`` will install ``llnl.hdf5 ^builtin.mpich``.
|
||||||
|
|
||||||
@@ -244,9 +243,9 @@ If, instead, ``repos.yaml`` looks like this:
|
|||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
repos:
|
repos:
|
||||||
- ~/my_spack_repos/spack_repo/proto
|
- ~/proto
|
||||||
- /usr/local/repos/spack_repo/llnl
|
- /usr/local/llnl
|
||||||
- $spack/var/spack/repos/spack_repo/builtin
|
- $spack/var/spack/repos/builtin
|
||||||
|
|
||||||
``spack install hdf5`` will install ``llnl.hdf5 ^proto.mpich``.
|
``spack install hdf5`` will install ``llnl.hdf5 ^proto.mpich``.
|
||||||
|
|
||||||
@@ -327,8 +326,8 @@ files, use ``spack repo list``.
|
|||||||
|
|
||||||
$ spack repo list
|
$ spack repo list
|
||||||
==> 2 package repositories.
|
==> 2 package repositories.
|
||||||
myrepo v2.0 ~/my_spack_repos/spack_repo/myrepo
|
myrepo ~/myrepo
|
||||||
builtin v2.0 ~/spack/var/spack/repos/spack_repo/builtin
|
builtin ~/spack/var/spack/repos/builtin
|
||||||
|
|
||||||
Each repository is listed with its associated namespace. To get the raw,
|
Each repository is listed with its associated namespace. To get the raw,
|
||||||
merged YAML from all configuration files, use ``spack config get repos``:
|
merged YAML from all configuration files, use ``spack config get repos``:
|
||||||
@@ -336,9 +335,9 @@ merged YAML from all configuration files, use ``spack config get repos``:
|
|||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack config get repos
|
$ spack config get repos
|
||||||
repos:
|
repos:srepos:
|
||||||
- ~/my_spack_repos/spack_repo/myrepo
|
- ~/myrepo
|
||||||
- $spack/var/spack/repos/spack_repo/builtin
|
- $spack/var/spack/repos/builtin
|
||||||
|
|
||||||
Note that, unlike ``spack repo list``, this does not include the
|
Note that, unlike ``spack repo list``, this does not include the
|
||||||
namespace, which is read from each repo's ``repo.yaml``.
|
namespace, which is read from each repo's ``repo.yaml``.
|
||||||
@@ -352,54 +351,66 @@ yourself; you can use the ``spack repo create`` command.
|
|||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack repo create ~/my_spack_repos myrepo
|
$ spack repo create myrepo
|
||||||
==> Created repo with namespace 'myrepo'.
|
==> Created repo with namespace 'myrepo'.
|
||||||
==> To register it with spack, run this command:
|
==> To register it with spack, run this command:
|
||||||
spack repo add ~/my_spack_repos/spack_repo/myrepo
|
spack repo add ~/myrepo
|
||||||
|
|
||||||
$ ls ~/my_spack_repos/spack_repo/myrepo
|
$ ls myrepo
|
||||||
packages/ repo.yaml
|
packages/ repo.yaml
|
||||||
|
|
||||||
$ cat ~/my_spack_repos/spack_repo/myrepo/repo.yaml
|
$ cat myrepo/repo.yaml
|
||||||
repo:
|
repo:
|
||||||
namespace: 'myrepo'
|
namespace: 'myrepo'
|
||||||
api: v2.0
|
|
||||||
|
|
||||||
Namespaces can also be nested, which can be useful if you have
|
By default, the namespace of a new repo matches its directory's name.
|
||||||
multiple package repositories for an organization. Spack will
|
You can supply a custom namespace with a second argument, e.g.:
|
||||||
create the corresponding directory structure for you:
|
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack repo create ~/my_spack_repos llnl.comp
|
$ spack repo create myrepo llnl.comp
|
||||||
==> Created repo with namespace 'llnl.comp'.
|
==> Created repo with namespace 'llnl.comp'.
|
||||||
==> To register it with spack, run this command:
|
==> To register it with spack, run this command:
|
||||||
spack repo add ~/my_spack_repos/spack_repo/llnl/comp
|
spack repo add ~/myrepo
|
||||||
|
|
||||||
|
$ cat myrepo/repo.yaml
|
||||||
$ cat ~/my_spack_repos/spack_repo/llnl/comp/repo.yaml
|
|
||||||
repo:
|
repo:
|
||||||
namespace: 'llnl.comp'
|
namespace: 'llnl.comp'
|
||||||
api: v2.0
|
|
||||||
|
You can also create repositories with custom structure with the ``-d/--subdirectory``
|
||||||
|
argument, e.g.:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack repo create -d applications myrepo apps
|
||||||
|
==> Created repo with namespace 'apps'.
|
||||||
|
==> To register it with Spack, run this command:
|
||||||
|
spack repo add ~/myrepo
|
||||||
|
|
||||||
|
$ ls myrepo
|
||||||
|
applications/ repo.yaml
|
||||||
|
|
||||||
|
$ cat myrepo/repo.yaml
|
||||||
|
repo:
|
||||||
|
namespace: apps
|
||||||
|
subdirectory: applications
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^
|
||||||
``spack repo add``
|
``spack repo add``
|
||||||
^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Once your repository is created, you can register it with Spack with
|
Once your repository is created, you can register it with Spack with
|
||||||
``spack repo add``. You nee to specify the path to the directory that
|
``spack repo add``:
|
||||||
contains the ``repo.yaml`` file.
|
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack repo add ~/my_spack_repos/spack_repo/llnl/comp
|
$ spack repo add ./myrepo
|
||||||
==> Added repo with namespace 'llnl.comp'.
|
==> Added repo with namespace 'llnl.comp'.
|
||||||
|
|
||||||
$ spack repo list
|
$ spack repo list
|
||||||
==> 2 package repositories.
|
==> 2 package repositories.
|
||||||
llnl.comp v2.0 ~/my_spack_repos/spack_repo/llnl/comp
|
llnl.comp ~/myrepo
|
||||||
builtin v2.0 ~/spack/var/spack/repos/spack_repo/builtin
|
builtin ~/spack/var/spack/repos/builtin
|
||||||
|
|
||||||
|
|
||||||
This simply adds the repo to your ``repos.yaml`` file.
|
This simply adds the repo to your ``repos.yaml`` file.
|
||||||
|
|
||||||
@@ -421,43 +432,46 @@ By namespace:
|
|||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack repo rm llnl.comp
|
$ spack repo rm llnl.comp
|
||||||
==> Removed repository ~/my_spack_repos/spack_repo/llnl/comp with namespace 'llnl.comp'.
|
==> Removed repository ~/myrepo with namespace 'llnl.comp'.
|
||||||
|
|
||||||
$ spack repo list
|
$ spack repo list
|
||||||
==> 1 package repository.
|
==> 1 package repository.
|
||||||
builtin ~/spack/var/spack/repos/spack_repo/builtin
|
builtin ~/spack/var/spack/repos/builtin
|
||||||
|
|
||||||
By path:
|
By path:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack repo rm ~/my_spack_repos/spack_repo/llnl/comp
|
$ spack repo rm ~/myrepo
|
||||||
==> Removed repository ~/my_spack_repos/spack_repo/llnl/comp
|
==> Removed repository ~/myrepo
|
||||||
|
|
||||||
$ spack repo list
|
$ spack repo list
|
||||||
==> 1 package repository.
|
==> 1 package repository.
|
||||||
builtin ~/spack/var/spack/repos/spack_repo/builtin
|
builtin ~/spack/var/spack/repos/builtin
|
||||||
|
|
||||||
--------------------------------
|
--------------------------------
|
||||||
Repo namespaces and Python
|
Repo namespaces and Python
|
||||||
--------------------------------
|
--------------------------------
|
||||||
|
|
||||||
Package repositories are implemented as Python packages. To be precise,
|
You may have noticed that namespace notation for repositories is similar
|
||||||
they are `namespace packages
|
to the notation for namespaces in Python. As it turns out, you *can*
|
||||||
<https://packaging.python.org/en/latest/guides/packaging-namespace-packages/>`_
|
treat Spack repositories like Python packages; this is how they are
|
||||||
with ``spack_repo`` the top-level namespace, followed by the repository
|
implemented.
|
||||||
namespace as submodules. For example, the builtin repository corresponds
|
|
||||||
to the Python module ``spack_repo.builtin.packages``.
|
|
||||||
|
|
||||||
This structure allows you to extend a ``builtin`` package in your own
|
You could, for example, extend a ``builtin`` package in your own
|
||||||
repository:
|
repository:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
from spack_repo.builtin.packages.mpich.package import Mpich
|
from spack.pkg.builtin.mpich import Mpich
|
||||||
|
|
||||||
class MyPackage(Mpich):
|
class MyPackage(Mpich):
|
||||||
...
|
...
|
||||||
|
|
||||||
Spack populates ``sys.path`` at runtime with the path to the root of your
|
Spack repo namespaces are actually Python namespaces tacked on under
|
||||||
package repository's ``spack_repo`` directory.
|
``spack.pkg``. The search semantics of ``repos.yaml`` are actually
|
||||||
|
implemented using Python's built-in `sys.path
|
||||||
|
<https://docs.python.org/2/library/sys.html#sys.path>`_ search. The
|
||||||
|
:py:mod:`spack.repo` module implements a custom `Python importer
|
||||||
|
<https://docs.python.org/2/library/imp.html>`_.
|
||||||
|
|
||||||
|
@@ -1,13 +1,13 @@
|
|||||||
sphinx==8.2.3
|
sphinx==8.2.1
|
||||||
sphinxcontrib-programoutput==0.18
|
sphinxcontrib-programoutput==0.18
|
||||||
sphinx_design==0.6.1
|
sphinx_design==0.6.1
|
||||||
sphinx-rtd-theme==3.0.2
|
sphinx-rtd-theme==3.0.2
|
||||||
python-levenshtein==0.27.1
|
python-levenshtein==0.26.1
|
||||||
docutils==0.21.2
|
docutils==0.21.2
|
||||||
pygments==2.19.1
|
pygments==2.19.1
|
||||||
urllib3==2.4.0
|
urllib3==2.3.0
|
||||||
pytest==8.3.5
|
pytest==8.3.4
|
||||||
isort==6.0.1
|
isort==6.0.0
|
||||||
black==25.1.0
|
black==25.1.0
|
||||||
flake8==7.2.0
|
flake8==7.1.2
|
||||||
mypy==1.11.1
|
mypy==1.11.1
|
||||||
|
13
lib/spack/external/__init__.py
vendored
13
lib/spack/external/__init__.py
vendored
@@ -11,7 +11,6 @@
|
|||||||
* Homepage: https://altgraph.readthedocs.io/en/latest/index.html
|
* Homepage: https://altgraph.readthedocs.io/en/latest/index.html
|
||||||
* Usage: dependency of macholib
|
* Usage: dependency of macholib
|
||||||
* Version: 0.17.3
|
* Version: 0.17.3
|
||||||
* License: MIT
|
|
||||||
|
|
||||||
archspec
|
archspec
|
||||||
--------
|
--------
|
||||||
@@ -19,7 +18,6 @@
|
|||||||
* Homepage: https://pypi.python.org/pypi/archspec
|
* Homepage: https://pypi.python.org/pypi/archspec
|
||||||
* Usage: Labeling, comparison and detection of microarchitectures
|
* Usage: Labeling, comparison and detection of microarchitectures
|
||||||
* Version: 0.2.5 (commit 38ce485258ffc4fc6dd6688f8dc90cb269478c47)
|
* Version: 0.2.5 (commit 38ce485258ffc4fc6dd6688f8dc90cb269478c47)
|
||||||
* License: Apache-2.0 or MIT
|
|
||||||
|
|
||||||
astunparse
|
astunparse
|
||||||
----------------
|
----------------
|
||||||
@@ -27,7 +25,6 @@
|
|||||||
* Homepage: https://github.com/simonpercivall/astunparse
|
* Homepage: https://github.com/simonpercivall/astunparse
|
||||||
* Usage: Unparsing Python ASTs for package hashes in Spack
|
* Usage: Unparsing Python ASTs for package hashes in Spack
|
||||||
* Version: 1.6.3 (plus modifications)
|
* Version: 1.6.3 (plus modifications)
|
||||||
* License: PSF-2.0
|
|
||||||
* Note: This is in ``spack.util.unparse`` because it's very heavily
|
* Note: This is in ``spack.util.unparse`` because it's very heavily
|
||||||
modified, and we want to track coverage for it.
|
modified, and we want to track coverage for it.
|
||||||
Specifically, we have modified this library to generate consistent unparsed ASTs
|
Specifically, we have modified this library to generate consistent unparsed ASTs
|
||||||
@@ -44,7 +41,6 @@
|
|||||||
* Homepage: https://github.com/python-attrs/attrs
|
* Homepage: https://github.com/python-attrs/attrs
|
||||||
* Usage: Needed by jsonschema.
|
* Usage: Needed by jsonschema.
|
||||||
* Version: 22.1.0
|
* Version: 22.1.0
|
||||||
* License: MIT
|
|
||||||
|
|
||||||
ctest_log_parser
|
ctest_log_parser
|
||||||
----------------
|
----------------
|
||||||
@@ -52,7 +48,6 @@
|
|||||||
* Homepage: https://github.com/Kitware/CMake/blob/master/Source/CTest/cmCTestBuildHandler.cxx
|
* Homepage: https://github.com/Kitware/CMake/blob/master/Source/CTest/cmCTestBuildHandler.cxx
|
||||||
* Usage: Functions to parse build logs and extract error messages.
|
* Usage: Functions to parse build logs and extract error messages.
|
||||||
* Version: Unversioned
|
* Version: Unversioned
|
||||||
* License: BSD-3-Clause
|
|
||||||
* Note: This is a homemade port of Kitware's CTest build handler.
|
* Note: This is a homemade port of Kitware's CTest build handler.
|
||||||
|
|
||||||
distro
|
distro
|
||||||
@@ -61,7 +56,6 @@
|
|||||||
* Homepage: https://pypi.python.org/pypi/distro
|
* Homepage: https://pypi.python.org/pypi/distro
|
||||||
* Usage: Provides a more stable linux distribution detection.
|
* Usage: Provides a more stable linux distribution detection.
|
||||||
* Version: 1.8.0
|
* Version: 1.8.0
|
||||||
* License: Apache-2.0
|
|
||||||
|
|
||||||
jinja2
|
jinja2
|
||||||
------
|
------
|
||||||
@@ -69,7 +63,6 @@
|
|||||||
* Homepage: https://pypi.python.org/pypi/Jinja2
|
* Homepage: https://pypi.python.org/pypi/Jinja2
|
||||||
* Usage: A modern and designer-friendly templating language for Python.
|
* Usage: A modern and designer-friendly templating language for Python.
|
||||||
* Version: 3.0.3 (last version supporting Python 3.6)
|
* Version: 3.0.3 (last version supporting Python 3.6)
|
||||||
* License: BSD-3-Clause
|
|
||||||
|
|
||||||
jsonschema
|
jsonschema
|
||||||
----------
|
----------
|
||||||
@@ -77,7 +70,6 @@
|
|||||||
* Homepage: https://pypi.python.org/pypi/jsonschema
|
* Homepage: https://pypi.python.org/pypi/jsonschema
|
||||||
* Usage: An implementation of JSON Schema for Python.
|
* Usage: An implementation of JSON Schema for Python.
|
||||||
* Version: 3.2.0 (last version before 2.7 and 3.6 support was dropped)
|
* Version: 3.2.0 (last version before 2.7 and 3.6 support was dropped)
|
||||||
* License: MIT
|
|
||||||
* Note: We don't include tests or benchmarks; just what Spack needs.
|
* Note: We don't include tests or benchmarks; just what Spack needs.
|
||||||
|
|
||||||
macholib
|
macholib
|
||||||
@@ -86,7 +78,6 @@
|
|||||||
* Homepage: https://macholib.readthedocs.io/en/latest/index.html#
|
* Homepage: https://macholib.readthedocs.io/en/latest/index.html#
|
||||||
* Usage: Manipulation of Mach-o binaries for relocating macOS buildcaches on Linux
|
* Usage: Manipulation of Mach-o binaries for relocating macOS buildcaches on Linux
|
||||||
* Version: 1.16.2
|
* Version: 1.16.2
|
||||||
* License: MIT
|
|
||||||
|
|
||||||
markupsafe
|
markupsafe
|
||||||
----------
|
----------
|
||||||
@@ -94,7 +85,6 @@
|
|||||||
* Homepage: https://pypi.python.org/pypi/MarkupSafe
|
* Homepage: https://pypi.python.org/pypi/MarkupSafe
|
||||||
* Usage: Implements a XML/HTML/XHTML Markup safe string for Python.
|
* Usage: Implements a XML/HTML/XHTML Markup safe string for Python.
|
||||||
* Version: 2.0.1 (last version supporting Python 3.6)
|
* Version: 2.0.1 (last version supporting Python 3.6)
|
||||||
* License: BSD-3-Clause
|
|
||||||
|
|
||||||
pyrsistent
|
pyrsistent
|
||||||
----------
|
----------
|
||||||
@@ -102,7 +92,6 @@
|
|||||||
* Homepage: http://github.com/tobgu/pyrsistent/
|
* Homepage: http://github.com/tobgu/pyrsistent/
|
||||||
* Usage: Needed by `jsonschema`
|
* Usage: Needed by `jsonschema`
|
||||||
* Version: 0.18.0
|
* Version: 0.18.0
|
||||||
* License: MIT
|
|
||||||
|
|
||||||
ruamel.yaml
|
ruamel.yaml
|
||||||
------
|
------
|
||||||
@@ -112,7 +101,6 @@
|
|||||||
actively maintained and has more features, including round-tripping
|
actively maintained and has more features, including round-tripping
|
||||||
comments read from config files.
|
comments read from config files.
|
||||||
* Version: 0.17.21
|
* Version: 0.17.21
|
||||||
* License: MIT
|
|
||||||
|
|
||||||
six
|
six
|
||||||
---
|
---
|
||||||
@@ -120,6 +108,5 @@
|
|||||||
* Homepage: https://pypi.python.org/pypi/six
|
* Homepage: https://pypi.python.org/pypi/six
|
||||||
* Usage: Python 2 and 3 compatibility utilities.
|
* Usage: Python 2 and 3 compatibility utilities.
|
||||||
* Version: 1.16.0
|
* Version: 1.16.0
|
||||||
* License: MIT
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
@@ -764,7 +764,7 @@ def copy_tree(
|
|||||||
|
|
||||||
files = glob.glob(src)
|
files = glob.glob(src)
|
||||||
if not files:
|
if not files:
|
||||||
raise OSError("No such file or directory: '{0}'".format(src), errno.ENOENT)
|
raise OSError("No such file or directory: '{0}'".format(src))
|
||||||
|
|
||||||
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
|
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
|
||||||
# all symlinks to this list while traversing the tree, then when finished, make all
|
# all symlinks to this list while traversing the tree, then when finished, make all
|
||||||
|
@@ -11,24 +11,10 @@
|
|||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
import types
|
|
||||||
import typing
|
import typing
|
||||||
import warnings
|
import warnings
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from typing import (
|
from typing import Callable, Dict, Iterable, List, Mapping, Optional, Tuple, TypeVar
|
||||||
Any,
|
|
||||||
Callable,
|
|
||||||
Dict,
|
|
||||||
Generic,
|
|
||||||
Iterable,
|
|
||||||
Iterator,
|
|
||||||
List,
|
|
||||||
Mapping,
|
|
||||||
Optional,
|
|
||||||
Tuple,
|
|
||||||
TypeVar,
|
|
||||||
Union,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Ignore emacs backups when listing modules
|
# Ignore emacs backups when listing modules
|
||||||
ignore_modules = r"^\.#|~$"
|
ignore_modules = r"^\.#|~$"
|
||||||
@@ -437,39 +423,46 @@ def add_func_to_class(name, func):
|
|||||||
return cls
|
return cls
|
||||||
|
|
||||||
|
|
||||||
K = TypeVar("K")
|
|
||||||
V = TypeVar("V")
|
|
||||||
|
|
||||||
|
|
||||||
@lazy_lexicographic_ordering
|
@lazy_lexicographic_ordering
|
||||||
class HashableMap(typing.MutableMapping[K, V]):
|
class HashableMap(collections.abc.MutableMapping):
|
||||||
"""This is a hashable, comparable dictionary. Hash is performed on
|
"""This is a hashable, comparable dictionary. Hash is performed on
|
||||||
a tuple of the values in the dictionary."""
|
a tuple of the values in the dictionary."""
|
||||||
|
|
||||||
__slots__ = ("dict",)
|
__slots__ = ("dict",)
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.dict: Dict[K, V] = {}
|
self.dict = {}
|
||||||
|
|
||||||
def __getitem__(self, key: K) -> V:
|
def __getitem__(self, key):
|
||||||
return self.dict[key]
|
return self.dict[key]
|
||||||
|
|
||||||
def __setitem__(self, key: K, value: V) -> None:
|
def __setitem__(self, key, value):
|
||||||
self.dict[key] = value
|
self.dict[key] = value
|
||||||
|
|
||||||
def __iter__(self) -> Iterator[K]:
|
def __iter__(self):
|
||||||
return iter(self.dict)
|
return iter(self.dict)
|
||||||
|
|
||||||
def __len__(self) -> int:
|
def __len__(self):
|
||||||
return len(self.dict)
|
return len(self.dict)
|
||||||
|
|
||||||
def __delitem__(self, key: K) -> None:
|
def __delitem__(self, key):
|
||||||
del self.dict[key]
|
del self.dict[key]
|
||||||
|
|
||||||
def _cmp_iter(self):
|
def _cmp_iter(self):
|
||||||
for _, v in sorted(self.items()):
|
for _, v in sorted(self.items()):
|
||||||
yield v
|
yield v
|
||||||
|
|
||||||
|
def copy(self):
|
||||||
|
"""Type-agnostic clone method. Preserves subclass type."""
|
||||||
|
# Construct a new dict of my type
|
||||||
|
self_type = type(self)
|
||||||
|
clone = self_type()
|
||||||
|
|
||||||
|
# Copy everything from this dict into it.
|
||||||
|
for key in self:
|
||||||
|
clone[key] = self[key].copy()
|
||||||
|
return clone
|
||||||
|
|
||||||
|
|
||||||
def match_predicate(*args):
|
def match_predicate(*args):
|
||||||
"""Utility function for making string matching predicates.
|
"""Utility function for making string matching predicates.
|
||||||
@@ -714,24 +707,14 @@ def __init__(self, wrapped_object):
|
|||||||
|
|
||||||
|
|
||||||
class Singleton:
|
class Singleton:
|
||||||
"""Wrapper for lazily initialized singleton objects."""
|
"""Simple wrapper for lazily initialized singleton objects."""
|
||||||
|
|
||||||
def __init__(self, factory: Callable[[], object]):
|
def __init__(self, factory):
|
||||||
"""Create a new singleton to be inited with the factory function.
|
"""Create a new singleton to be inited with the factory function.
|
||||||
|
|
||||||
Most factories will simply create the object to be initialized and
|
|
||||||
return it.
|
|
||||||
|
|
||||||
In some cases, e.g. when bootstrapping some global state, the singleton
|
|
||||||
may need to be initialized incrementally. If the factory returns a generator
|
|
||||||
instead of a regular object, the singleton will assign each result yielded by
|
|
||||||
the generator to the singleton instance. This allows methods called by
|
|
||||||
the factory in later stages to refer back to the singleton.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
factory (function): function taking no arguments that creates the
|
factory (function): function taking no arguments that
|
||||||
singleton instance.
|
creates the singleton instance.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self.factory = factory
|
self.factory = factory
|
||||||
self._instance = None
|
self._instance = None
|
||||||
@@ -739,16 +722,7 @@ def __init__(self, factory: Callable[[], object]):
|
|||||||
@property
|
@property
|
||||||
def instance(self):
|
def instance(self):
|
||||||
if self._instance is None:
|
if self._instance is None:
|
||||||
instance = self.factory()
|
self._instance = self.factory()
|
||||||
|
|
||||||
if isinstance(instance, types.GeneratorType):
|
|
||||||
# if it's a generator, assign every value
|
|
||||||
for value in instance:
|
|
||||||
self._instance = value
|
|
||||||
else:
|
|
||||||
# if not, just assign the result like a normal singleton
|
|
||||||
self._instance = instance
|
|
||||||
|
|
||||||
return self._instance
|
return self._instance
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
@@ -1053,28 +1027,19 @@ def __exit__(self, exc_type, exc_value, tb):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
ClassPropertyType = TypeVar("ClassPropertyType")
|
class classproperty:
|
||||||
|
|
||||||
|
|
||||||
class classproperty(Generic[ClassPropertyType]):
|
|
||||||
"""Non-data descriptor to evaluate a class-level property. The function that performs
|
"""Non-data descriptor to evaluate a class-level property. The function that performs
|
||||||
the evaluation is injected at creation time and takes an owner (i.e., the class that
|
the evaluation is injected at creation time and take an instance (could be None) and
|
||||||
originated the instance).
|
an owner (i.e. the class that originated the instance)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, callback: Callable[[Any], ClassPropertyType]) -> None:
|
def __init__(self, callback):
|
||||||
self.callback = callback
|
self.callback = callback
|
||||||
|
|
||||||
def __get__(self, instance, owner) -> ClassPropertyType:
|
def __get__(self, instance, owner):
|
||||||
return self.callback(owner)
|
return self.callback(owner)
|
||||||
|
|
||||||
|
|
||||||
#: A type alias that represents either a classproperty descriptor or a constant value of the same
|
|
||||||
#: type. This allows derived classes to override a computed class-level property with a constant
|
|
||||||
#: value while retaining type compatibility.
|
|
||||||
ClassProperty = Union[ClassPropertyType, classproperty[ClassPropertyType]]
|
|
||||||
|
|
||||||
|
|
||||||
class DeprecatedProperty:
|
class DeprecatedProperty:
|
||||||
"""Data descriptor to error or warn when a deprecated property is accessed.
|
"""Data descriptor to error or warn when a deprecated property is accessed.
|
||||||
|
|
||||||
|
@@ -10,21 +10,9 @@
|
|||||||
import spack.util.git
|
import spack.util.git
|
||||||
|
|
||||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||||
__version__ = "1.0.0.dev0"
|
__version__ = "1.0.0-alpha.4"
|
||||||
spack_version = __version__
|
spack_version = __version__
|
||||||
|
|
||||||
#: The current Package API version implemented by this version of Spack. The Package API defines
|
|
||||||
#: the Python interface for packages as well as the layout of package repositories. The minor
|
|
||||||
#: version is incremented when the package API is extended in a backwards-compatible way. The major
|
|
||||||
#: version is incremented upon breaking changes. This version is changed independently from the
|
|
||||||
#: Spack version.
|
|
||||||
package_api_version = (2, 0)
|
|
||||||
|
|
||||||
#: The minimum Package API version that this version of Spack is compatible with. This should
|
|
||||||
#: always be a tuple of the form ``(major, 0)``, since compatibility with vX.Y implies
|
|
||||||
#: compatibility with vX.0.
|
|
||||||
min_package_api_version = (1, 0)
|
|
||||||
|
|
||||||
|
|
||||||
def __try_int(v):
|
def __try_int(v):
|
||||||
try:
|
try:
|
||||||
@@ -91,6 +79,4 @@ def get_short_version() -> str:
|
|||||||
"get_version",
|
"get_version",
|
||||||
"get_spack_commit",
|
"get_spack_commit",
|
||||||
"get_short_version",
|
"get_short_version",
|
||||||
"package_api_version",
|
|
||||||
"min_package_api_version",
|
|
||||||
]
|
]
|
||||||
|
@@ -1,20 +0,0 @@
|
|||||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
"""Alias names to convert legacy compilers to builtin packages and vice-versa"""
|
|
||||||
|
|
||||||
BUILTIN_TO_LEGACY_COMPILER = {
|
|
||||||
"llvm": "clang",
|
|
||||||
"intel-oneapi-compilers": "oneapi",
|
|
||||||
"llvm-amdgpu": "rocmcc",
|
|
||||||
"intel-oneapi-compilers-classic": "intel",
|
|
||||||
"acfl": "arm",
|
|
||||||
}
|
|
||||||
|
|
||||||
LEGACY_COMPILER_TO_BUILTIN = {
|
|
||||||
"clang": "llvm",
|
|
||||||
"oneapi": "intel-oneapi-compilers",
|
|
||||||
"rocmcc": "llvm-amdgpu",
|
|
||||||
"intel": "intel-oneapi-compilers-classic",
|
|
||||||
"arm": "acfl",
|
|
||||||
}
|
|
@@ -636,7 +636,14 @@ def tarball_directory_name(spec):
|
|||||||
Return name of the tarball directory according to the convention
|
Return name of the tarball directory according to the convention
|
||||||
<os>-<architecture>/<compiler>/<package>-<version>/
|
<os>-<architecture>/<compiler>/<package>-<version>/
|
||||||
"""
|
"""
|
||||||
return spec.format_path("{architecture}/{compiler.name}-{compiler.version}/{name}-{version}")
|
if spec.original_spec_format() < 5:
|
||||||
|
compiler = spec.annotations.compiler_node_attribute
|
||||||
|
assert compiler is not None, "a compiler spec is expected"
|
||||||
|
return spec.format_path(
|
||||||
|
f"{spec.architecture}/{compiler.name}-{compiler.version}/{spec.name}-{spec.version}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return spec.format_path(f"{spec.architecture.platform}/{spec.name}-{spec.version}")
|
||||||
|
|
||||||
|
|
||||||
def tarball_name(spec, ext):
|
def tarball_name(spec, ext):
|
||||||
@@ -644,9 +651,17 @@ def tarball_name(spec, ext):
|
|||||||
Return the name of the tarfile according to the convention
|
Return the name of the tarfile according to the convention
|
||||||
<os>-<architecture>-<package>-<dag_hash><ext>
|
<os>-<architecture>-<package>-<dag_hash><ext>
|
||||||
"""
|
"""
|
||||||
spec_formatted = spec.format_path(
|
if spec.original_spec_format() < 5:
|
||||||
"{architecture}-{compiler.name}-{compiler.version}-{name}-{version}-{hash}"
|
compiler = spec.annotations.compiler_node_attribute
|
||||||
)
|
assert compiler is not None, "a compiler spec is expected"
|
||||||
|
spec_formatted = (
|
||||||
|
f"{spec.architecture}-{compiler.name}-{compiler.version}-{spec.name}"
|
||||||
|
f"-{spec.version}-{spec.dag_hash()}"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
spec_formatted = (
|
||||||
|
f"{spec.architecture.platform}-{spec.name}-{spec.version}-{spec.dag_hash()}"
|
||||||
|
)
|
||||||
return f"{spec_formatted}{ext}"
|
return f"{spec_formatted}{ext}"
|
||||||
|
|
||||||
|
|
||||||
|
@@ -234,6 +234,10 @@ def _root_spec(spec_str: str) -> str:
|
|||||||
# Add a compiler and platform requirement to the root spec.
|
# Add a compiler and platform requirement to the root spec.
|
||||||
platform = str(spack.platforms.host())
|
platform = str(spack.platforms.host())
|
||||||
|
|
||||||
|
if platform == "windows":
|
||||||
|
spec_str += " %msvc"
|
||||||
|
elif platform == "freebsd":
|
||||||
|
spec_str += " %clang"
|
||||||
spec_str += f" platform={platform}"
|
spec_str += f" platform={platform}"
|
||||||
target = archspec.cpu.host().family
|
target = archspec.cpu.host().family
|
||||||
spec_str += f" target={target}"
|
spec_str += f" target={target}"
|
||||||
|
@@ -133,7 +133,7 @@ def mypy_root_spec() -> str:
|
|||||||
|
|
||||||
def black_root_spec() -> str:
|
def black_root_spec() -> str:
|
||||||
"""Return the root spec used to bootstrap black"""
|
"""Return the root spec used to bootstrap black"""
|
||||||
return _root_spec("py-black@:25.1.0")
|
return _root_spec("py-black@:24.1.0")
|
||||||
|
|
||||||
|
|
||||||
def flake8_root_spec() -> str:
|
def flake8_root_spec() -> str:
|
||||||
|
@@ -36,11 +36,9 @@
|
|||||||
import multiprocessing
|
import multiprocessing
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import signal
|
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
import types
|
import types
|
||||||
import warnings
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from enum import Flag, auto
|
from enum import Flag, auto
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
@@ -115,7 +113,7 @@
|
|||||||
# set_wrapper_variables and used to pass parameters to
|
# set_wrapper_variables and used to pass parameters to
|
||||||
# Spack's compiler wrappers.
|
# Spack's compiler wrappers.
|
||||||
#
|
#
|
||||||
SPACK_COMPILER_WRAPPER_PATH = "SPACK_COMPILER_WRAPPER_PATH"
|
SPACK_ENV_PATH = "SPACK_ENV_PATH"
|
||||||
SPACK_MANAGED_DIRS = "SPACK_MANAGED_DIRS"
|
SPACK_MANAGED_DIRS = "SPACK_MANAGED_DIRS"
|
||||||
SPACK_INCLUDE_DIRS = "SPACK_INCLUDE_DIRS"
|
SPACK_INCLUDE_DIRS = "SPACK_INCLUDE_DIRS"
|
||||||
SPACK_LINK_DIRS = "SPACK_LINK_DIRS"
|
SPACK_LINK_DIRS = "SPACK_LINK_DIRS"
|
||||||
@@ -574,10 +572,12 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
|||||||
module.make = DeprecatedExecutable(pkg.name, "make", "gmake")
|
module.make = DeprecatedExecutable(pkg.name, "make", "gmake")
|
||||||
module.gmake = DeprecatedExecutable(pkg.name, "gmake", "gmake")
|
module.gmake = DeprecatedExecutable(pkg.name, "gmake", "gmake")
|
||||||
module.ninja = DeprecatedExecutable(pkg.name, "ninja", "ninja")
|
module.ninja = DeprecatedExecutable(pkg.name, "ninja", "ninja")
|
||||||
|
# TODO: johnwparent: add package or builder support to define these build tools
|
||||||
|
# for now there is no entrypoint for builders to define these on their
|
||||||
|
# own
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
module.nmake = DeprecatedExecutable(pkg.name, "nmake", "msvc")
|
module.nmake = Executable("nmake")
|
||||||
module.msbuild = DeprecatedExecutable(pkg.name, "msbuild", "msvc")
|
module.msbuild = Executable("msbuild")
|
||||||
# analog to configure for win32
|
# analog to configure for win32
|
||||||
module.cscript = Executable("cscript")
|
module.cscript = Executable("cscript")
|
||||||
|
|
||||||
@@ -715,6 +715,21 @@ def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]
|
|||||||
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
||||||
|
|
||||||
|
|
||||||
|
def load_external_modules(pkg):
|
||||||
|
"""Traverse a package's spec DAG and load any external modules.
|
||||||
|
|
||||||
|
Traverse a package's dependencies and load any external modules
|
||||||
|
associated with them.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pkg (spack.package_base.PackageBase): package to load deps for
|
||||||
|
"""
|
||||||
|
for dep in list(pkg.spec.traverse()):
|
||||||
|
external_modules = dep.external_modules or []
|
||||||
|
for external_module in external_modules:
|
||||||
|
load_module(external_module)
|
||||||
|
|
||||||
|
|
||||||
def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||||
"""Execute all environment setup routines."""
|
"""Execute all environment setup routines."""
|
||||||
if context not in (Context.BUILD, Context.TEST):
|
if context not in (Context.BUILD, Context.TEST):
|
||||||
@@ -748,10 +763,8 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
|||||||
|
|
||||||
tty.debug("setup_package: adding compiler wrappers paths")
|
tty.debug("setup_package: adding compiler wrappers paths")
|
||||||
env_by_name = env_mods.group_by_name()
|
env_by_name = env_mods.group_by_name()
|
||||||
for x in env_by_name["SPACK_COMPILER_WRAPPER_PATH"]:
|
for x in env_by_name["SPACK_ENV_PATH"]:
|
||||||
assert isinstance(
|
assert isinstance(x, PrependPath), "unexpected setting used for SPACK_ENV_PATH"
|
||||||
x, PrependPath
|
|
||||||
), "unexpected setting used for SPACK_COMPILER_WRAPPER_PATH"
|
|
||||||
env_mods.prepend_path("PATH", x.value)
|
env_mods.prepend_path("PATH", x.value)
|
||||||
|
|
||||||
# Check whether we want to force RPATH or RUNPATH
|
# Check whether we want to force RPATH or RUNPATH
|
||||||
@@ -779,7 +792,7 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
|||||||
|
|
||||||
# Load modules on an already clean environment, just before applying Spack's
|
# Load modules on an already clean environment, just before applying Spack's
|
||||||
# own environment modifications. This ensures Spack controls CC/CXX/... variables.
|
# own environment modifications. This ensures Spack controls CC/CXX/... variables.
|
||||||
load_external_modules(setup_context)
|
load_external_modules(pkg)
|
||||||
|
|
||||||
# Make sure nothing's strange about the Spack environment.
|
# Make sure nothing's strange about the Spack environment.
|
||||||
validate(env_mods, tty.warn)
|
validate(env_mods, tty.warn)
|
||||||
@@ -1076,21 +1089,6 @@ def _make_runnable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
|
|||||||
env.prepend_path("PATH", bin_dir)
|
env.prepend_path("PATH", bin_dir)
|
||||||
|
|
||||||
|
|
||||||
def load_external_modules(context: SetupContext) -> None:
|
|
||||||
"""Traverse a package's spec DAG and load any external modules.
|
|
||||||
|
|
||||||
Traverse a package's dependencies and load any external modules
|
|
||||||
associated with them.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
context: A populated SetupContext object
|
|
||||||
"""
|
|
||||||
for spec, _ in context.external:
|
|
||||||
external_modules = spec.external_modules or []
|
|
||||||
for external_module in external_modules:
|
|
||||||
load_module(external_module)
|
|
||||||
|
|
||||||
|
|
||||||
def _setup_pkg_and_run(
|
def _setup_pkg_and_run(
|
||||||
serialized_pkg: "spack.subprocess_context.PackageInstallContext",
|
serialized_pkg: "spack.subprocess_context.PackageInstallContext",
|
||||||
function: Callable,
|
function: Callable,
|
||||||
@@ -1189,9 +1187,11 @@ def _setup_pkg_and_run(
|
|||||||
if isinstance(e, (spack.multimethod.NoSuchMethodError, AttributeError)):
|
if isinstance(e, (spack.multimethod.NoSuchMethodError, AttributeError)):
|
||||||
process = "test the installation" if context == "test" else "build from sources"
|
process = "test the installation" if context == "test" else "build from sources"
|
||||||
error_msg = (
|
error_msg = (
|
||||||
"The '{}' package cannot find an attribute while trying to {}. You can fix this "
|
"The '{}' package cannot find an attribute while trying to {}. "
|
||||||
"by updating the {} recipe, and you can also report the issue as a build-error or "
|
"This might be due to a change in Spack's package format "
|
||||||
"a bug at https://github.com/spack/spack/issues"
|
"to support multiple build-systems for a single package. You can fix this "
|
||||||
|
"by updating the {} recipe, and you can also report the issue as a bug. "
|
||||||
|
"More information at https://spack.readthedocs.io/en/latest/packaging_guide.html#installation-procedure"
|
||||||
).format(pkg.name, process, context)
|
).format(pkg.name, process, context)
|
||||||
error_msg = colorize("@*R{{{}}}".format(error_msg))
|
error_msg = colorize("@*R{{{}}}".format(error_msg))
|
||||||
error_msg = "{}\n\n{}".format(str(e), error_msg)
|
error_msg = "{}\n\n{}".format(str(e), error_msg)
|
||||||
@@ -1216,45 +1216,15 @@ def _setup_pkg_and_run(
|
|||||||
input_pipe.close()
|
input_pipe.close()
|
||||||
|
|
||||||
|
|
||||||
class BuildProcess:
|
def start_build_process(pkg, function, kwargs):
|
||||||
def __init__(self, *, target, args) -> None:
|
|
||||||
self.p = multiprocessing.Process(target=target, args=args)
|
|
||||||
|
|
||||||
def start(self) -> None:
|
|
||||||
self.p.start()
|
|
||||||
|
|
||||||
def is_alive(self) -> bool:
|
|
||||||
return self.p.is_alive()
|
|
||||||
|
|
||||||
def join(self, *, timeout: Optional[int] = None):
|
|
||||||
self.p.join(timeout=timeout)
|
|
||||||
|
|
||||||
def terminate(self):
|
|
||||||
# Opportunity for graceful termination
|
|
||||||
self.p.terminate()
|
|
||||||
self.p.join(timeout=1)
|
|
||||||
|
|
||||||
# If the process didn't gracefully terminate, forcefully kill
|
|
||||||
if self.p.is_alive():
|
|
||||||
# TODO (python 3.6 removal): use self.p.kill() instead, consider removing this class
|
|
||||||
assert isinstance(self.p.pid, int), f"unexpected value for PID: {self.p.pid}"
|
|
||||||
os.kill(self.p.pid, signal.SIGKILL)
|
|
||||||
self.p.join()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def exitcode(self):
|
|
||||||
return self.p.exitcode
|
|
||||||
|
|
||||||
|
|
||||||
def start_build_process(pkg, function, kwargs, *, timeout: Optional[int] = None):
|
|
||||||
"""Create a child process to do part of a spack build.
|
"""Create a child process to do part of a spack build.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
|
||||||
pkg (spack.package_base.PackageBase): package whose environment we should set up the
|
pkg (spack.package_base.PackageBase): package whose environment we should set up the
|
||||||
child process for.
|
child process for.
|
||||||
function (typing.Callable): argless function to run in the child process.
|
function (typing.Callable): argless function to run in the child
|
||||||
timeout: maximum time allowed to finish the execution of function
|
process.
|
||||||
|
|
||||||
Usage::
|
Usage::
|
||||||
|
|
||||||
@@ -1282,14 +1252,14 @@ def child_fun():
|
|||||||
# Forward sys.stdin when appropriate, to allow toggling verbosity
|
# Forward sys.stdin when appropriate, to allow toggling verbosity
|
||||||
if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"):
|
if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"):
|
||||||
input_fd = Connection(os.dup(sys.stdin.fileno()))
|
input_fd = Connection(os.dup(sys.stdin.fileno()))
|
||||||
mflags = os.environ.get("MAKEFLAGS")
|
mflags = os.environ.get("MAKEFLAGS", False)
|
||||||
if mflags is not None:
|
if mflags:
|
||||||
m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags)
|
m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags)
|
||||||
if m:
|
if m:
|
||||||
jobserver_fd1 = Connection(int(m.group(1)))
|
jobserver_fd1 = Connection(int(m.group(1)))
|
||||||
jobserver_fd2 = Connection(int(m.group(2)))
|
jobserver_fd2 = Connection(int(m.group(2)))
|
||||||
|
|
||||||
p = BuildProcess(
|
p = multiprocessing.Process(
|
||||||
target=_setup_pkg_and_run,
|
target=_setup_pkg_and_run,
|
||||||
args=(
|
args=(
|
||||||
serialized_pkg,
|
serialized_pkg,
|
||||||
@@ -1323,17 +1293,14 @@ def exitcode_msg(p):
|
|||||||
typ = "exit" if p.exitcode >= 0 else "signal"
|
typ = "exit" if p.exitcode >= 0 else "signal"
|
||||||
return f"{typ} {abs(p.exitcode)}"
|
return f"{typ} {abs(p.exitcode)}"
|
||||||
|
|
||||||
p.join(timeout=timeout)
|
|
||||||
if p.is_alive():
|
|
||||||
warnings.warn(f"Terminating process, since the timeout of {timeout}s was exceeded")
|
|
||||||
p.terminate()
|
|
||||||
p.join()
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
child_result = read_pipe.recv()
|
child_result = read_pipe.recv()
|
||||||
except EOFError:
|
except EOFError:
|
||||||
|
p.join()
|
||||||
raise InstallError(f"The process has stopped unexpectedly ({exitcode_msg(p)})")
|
raise InstallError(f"The process has stopped unexpectedly ({exitcode_msg(p)})")
|
||||||
|
|
||||||
|
p.join()
|
||||||
|
|
||||||
# If returns a StopPhase, raise it
|
# If returns a StopPhase, raise it
|
||||||
if isinstance(child_result, spack.error.StopPhase):
|
if isinstance(child_result, spack.error.StopPhase):
|
||||||
# do not print
|
# do not print
|
||||||
|
@@ -16,7 +16,6 @@
|
|||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.util.environment
|
|
||||||
import spack.util.prefix
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, conflicts, depends_on
|
from spack.directives import build_system, conflicts, depends_on
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
@@ -847,9 +846,7 @@ def _remove_libtool_archives(self) -> None:
|
|||||||
with open(self._removed_la_files_log, mode="w", encoding="utf-8") as f:
|
with open(self._removed_la_files_log, mode="w", encoding="utf-8") as f:
|
||||||
f.write("\n".join(libtool_files))
|
f.write("\n".join(libtool_files))
|
||||||
|
|
||||||
def setup_build_environment(
|
def setup_build_environment(self, env):
|
||||||
self, env: spack.util.environment.EnvironmentModifications
|
|
||||||
) -> None:
|
|
||||||
if self.spec.platform == "darwin" and macos_version() >= Version("11"):
|
if self.spec.platform == "darwin" and macos_version() >= Version("11"):
|
||||||
# Many configure files rely on matching '10.*' for macOS version
|
# Many configure files rely on matching '10.*' for macOS version
|
||||||
# detection and fail to add flags if it shows as version 11.
|
# detection and fail to add flags if it shows as version 11.
|
||||||
|
@@ -2,10 +2,9 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import collections.abc
|
import collections.abc
|
||||||
import enum
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from typing import Optional, Tuple
|
from typing import Tuple
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
@@ -14,7 +13,6 @@
|
|||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.util.prefix
|
import spack.util.prefix
|
||||||
from spack.directives import depends_on
|
from spack.directives import depends_on
|
||||||
from spack.util.executable import which_string
|
|
||||||
|
|
||||||
from .cmake import CMakeBuilder, CMakePackage
|
from .cmake import CMakeBuilder, CMakePackage
|
||||||
|
|
||||||
@@ -180,64 +178,6 @@ def initconfig_compiler_entries(self):
|
|||||||
|
|
||||||
return entries
|
return entries
|
||||||
|
|
||||||
class Scheduler(enum.Enum):
|
|
||||||
LSF = enum.auto()
|
|
||||||
SLURM = enum.auto()
|
|
||||||
FLUX = enum.auto()
|
|
||||||
|
|
||||||
def get_scheduler(self) -> Optional[Scheduler]:
|
|
||||||
spec = self.pkg.spec
|
|
||||||
|
|
||||||
# Check for Spectrum-mpi, which always uses LSF or LSF MPI variant
|
|
||||||
if spec.satisfies("^spectrum-mpi") or spec["mpi"].satisfies("schedulers=lsf"):
|
|
||||||
return self.Scheduler.LSF
|
|
||||||
|
|
||||||
# Check for Slurm MPI variants
|
|
||||||
slurm_checks = ["+slurm", "schedulers=slurm", "process_managers=slurm"]
|
|
||||||
if any(spec["mpi"].satisfies(variant) for variant in slurm_checks):
|
|
||||||
return self.Scheduler.SLURM
|
|
||||||
|
|
||||||
# TODO improve this when MPI implementations support flux
|
|
||||||
# Do this check last to avoid using a flux wrapper present next to Slurm/ LSF schedulers
|
|
||||||
if which_string("flux") is not None:
|
|
||||||
return self.Scheduler.FLUX
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_mpi_exec(self) -> Optional[str]:
|
|
||||||
spec = self.pkg.spec
|
|
||||||
scheduler = self.get_scheduler()
|
|
||||||
|
|
||||||
if scheduler == self.Scheduler.LSF:
|
|
||||||
return which_string("lrun")
|
|
||||||
|
|
||||||
elif scheduler == self.Scheduler.SLURM:
|
|
||||||
if spec["mpi"].external:
|
|
||||||
return which_string("srun")
|
|
||||||
else:
|
|
||||||
return os.path.join(spec["slurm"].prefix.bin, "srun")
|
|
||||||
|
|
||||||
elif scheduler == self.Scheduler.FLUX:
|
|
||||||
flux = which_string("flux")
|
|
||||||
return f"{flux};run" if flux else None
|
|
||||||
|
|
||||||
elif hasattr(spec["mpi"].package, "mpiexec"):
|
|
||||||
return spec["mpi"].package.mpiexec
|
|
||||||
|
|
||||||
else:
|
|
||||||
mpiexec = os.path.join(spec["mpi"].prefix.bin, "mpirun")
|
|
||||||
if not os.path.exists(mpiexec):
|
|
||||||
mpiexec = os.path.join(spec["mpi"].prefix.bin, "mpiexec")
|
|
||||||
return mpiexec
|
|
||||||
|
|
||||||
def get_mpi_exec_num_proc(self) -> str:
|
|
||||||
scheduler = self.get_scheduler()
|
|
||||||
|
|
||||||
if scheduler in [self.Scheduler.FLUX, self.Scheduler.LSF, self.Scheduler.SLURM]:
|
|
||||||
return "-n"
|
|
||||||
else:
|
|
||||||
return "-np"
|
|
||||||
|
|
||||||
def initconfig_mpi_entries(self):
|
def initconfig_mpi_entries(self):
|
||||||
spec = self.pkg.spec
|
spec = self.pkg.spec
|
||||||
|
|
||||||
@@ -257,10 +197,27 @@ def initconfig_mpi_entries(self):
|
|||||||
if hasattr(spec["mpi"], "mpifc"):
|
if hasattr(spec["mpi"], "mpifc"):
|
||||||
entries.append(cmake_cache_path("MPI_Fortran_COMPILER", spec["mpi"].mpifc))
|
entries.append(cmake_cache_path("MPI_Fortran_COMPILER", spec["mpi"].mpifc))
|
||||||
|
|
||||||
# Determine MPIEXEC
|
# Check for slurm
|
||||||
mpiexec = self.get_mpi_exec()
|
using_slurm = False
|
||||||
|
slurm_checks = ["+slurm", "schedulers=slurm", "process_managers=slurm"]
|
||||||
|
if any(spec["mpi"].satisfies(variant) for variant in slurm_checks):
|
||||||
|
using_slurm = True
|
||||||
|
|
||||||
if mpiexec is None or not os.path.exists(mpiexec.split(";")[0]):
|
# Determine MPIEXEC
|
||||||
|
if using_slurm:
|
||||||
|
if spec["mpi"].external:
|
||||||
|
# Heuristic until we have dependents on externals
|
||||||
|
mpiexec = "/usr/bin/srun"
|
||||||
|
else:
|
||||||
|
mpiexec = os.path.join(spec["slurm"].prefix.bin, "srun")
|
||||||
|
elif hasattr(spec["mpi"].package, "mpiexec"):
|
||||||
|
mpiexec = spec["mpi"].package.mpiexec
|
||||||
|
else:
|
||||||
|
mpiexec = os.path.join(spec["mpi"].prefix.bin, "mpirun")
|
||||||
|
if not os.path.exists(mpiexec):
|
||||||
|
mpiexec = os.path.join(spec["mpi"].prefix.bin, "mpiexec")
|
||||||
|
|
||||||
|
if not os.path.exists(mpiexec):
|
||||||
msg = "Unable to determine MPIEXEC, %s tests may fail" % self.pkg.name
|
msg = "Unable to determine MPIEXEC, %s tests may fail" % self.pkg.name
|
||||||
entries.append("# {0}\n".format(msg))
|
entries.append("# {0}\n".format(msg))
|
||||||
tty.warn(msg)
|
tty.warn(msg)
|
||||||
@@ -273,7 +230,10 @@ def initconfig_mpi_entries(self):
|
|||||||
entries.append(cmake_cache_path("MPIEXEC", mpiexec))
|
entries.append(cmake_cache_path("MPIEXEC", mpiexec))
|
||||||
|
|
||||||
# Determine MPIEXEC_NUMPROC_FLAG
|
# Determine MPIEXEC_NUMPROC_FLAG
|
||||||
entries.append(cmake_cache_string("MPIEXEC_NUMPROC_FLAG", self.get_mpi_exec_num_proc()))
|
if using_slurm:
|
||||||
|
entries.append(cmake_cache_string("MPIEXEC_NUMPROC_FLAG", "-n"))
|
||||||
|
else:
|
||||||
|
entries.append(cmake_cache_string("MPIEXEC_NUMPROC_FLAG", "-np"))
|
||||||
|
|
||||||
return entries
|
return entries
|
||||||
|
|
||||||
@@ -316,18 +276,23 @@ def initconfig_hardware_entries(self):
|
|||||||
entries.append("# ROCm")
|
entries.append("# ROCm")
|
||||||
entries.append("#------------------{0}\n".format("-" * 30))
|
entries.append("#------------------{0}\n".format("-" * 30))
|
||||||
|
|
||||||
rocm_root = os.path.dirname(spec["llvm-amdgpu"].prefix)
|
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
|
||||||
entries.append(cmake_cache_path("ROCM_PATH", rocm_root))
|
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
|
||||||
|
llvm_bin = spec["llvm-amdgpu"].prefix.bin
|
||||||
|
llvm_prefix = spec["llvm-amdgpu"].prefix
|
||||||
|
# Some ROCm systems seem to point to /<path>/rocm-<ver>/ and
|
||||||
|
# others point to /<path>/rocm-<ver>/llvm
|
||||||
|
if os.path.basename(os.path.normpath(llvm_prefix)) != "llvm":
|
||||||
|
llvm_bin = os.path.join(llvm_prefix, "llvm/bin/")
|
||||||
|
entries.append(
|
||||||
|
cmake_cache_filepath("CMAKE_HIP_COMPILER", os.path.join(llvm_bin, "clang++"))
|
||||||
|
)
|
||||||
archs = self.spec.variants["amdgpu_target"].value
|
archs = self.spec.variants["amdgpu_target"].value
|
||||||
if archs[0] != "none":
|
if archs[0] != "none":
|
||||||
arch_str = ";".join(archs)
|
arch_str = ";".join(archs)
|
||||||
entries.append(cmake_cache_string("CMAKE_HIP_ARCHITECTURES", arch_str))
|
entries.append(cmake_cache_string("CMAKE_HIP_ARCHITECTURES", arch_str))
|
||||||
|
entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
|
||||||
llvm_bin = spec["llvm-amdgpu"].prefix.bin
|
entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
|
||||||
entries.append(
|
|
||||||
cmake_cache_filepath("CMAKE_HIP_COMPILER", os.path.join(llvm_bin, "amdclang++"))
|
|
||||||
)
|
|
||||||
|
|
||||||
if spec.satisfies("%gcc"):
|
if spec.satisfies("%gcc"):
|
||||||
entries.append(
|
entries.append(
|
||||||
@@ -336,15 +301,6 @@ def initconfig_hardware_entries(self):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Extra definitions that might be required in other cases
|
|
||||||
if not spec.satisfies("^blt"):
|
|
||||||
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
|
|
||||||
|
|
||||||
if archs[0] != "none":
|
|
||||||
arch_str = ";".join(archs)
|
|
||||||
entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
|
|
||||||
entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
|
|
||||||
|
|
||||||
return entries
|
return entries
|
||||||
|
|
||||||
def std_initconfig_entries(self):
|
def std_initconfig_entries(self):
|
||||||
|
@@ -8,7 +8,6 @@
|
|||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.util.environment
|
|
||||||
import spack.util.prefix
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, depends_on
|
from spack.directives import build_system, depends_on
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
@@ -87,9 +86,7 @@ def check_args(self):
|
|||||||
"""Argument for ``cargo test`` during check phase"""
|
"""Argument for ``cargo test`` during check phase"""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def setup_build_environment(
|
def setup_build_environment(self, env):
|
||||||
self, env: spack.util.environment.EnvironmentModifications
|
|
||||||
) -> None:
|
|
||||||
env.set("CARGO_HOME", self.stage.path)
|
env.set("CARGO_HOME", self.stage.path)
|
||||||
|
|
||||||
def build(
|
def build(
|
||||||
|
@@ -45,12 +45,7 @@ class CompilerPackage(spack.package_base.PackageBase):
|
|||||||
compiler_languages: Sequence[str] = ["c", "cxx", "fortran"]
|
compiler_languages: Sequence[str] = ["c", "cxx", "fortran"]
|
||||||
|
|
||||||
#: Relative path to compiler wrappers
|
#: Relative path to compiler wrappers
|
||||||
compiler_wrapper_link_paths: Dict[str, str] = {}
|
link_paths: Dict[str, str] = {}
|
||||||
|
|
||||||
#: Optimization flags
|
|
||||||
opt_flags: Sequence[str] = []
|
|
||||||
#: Flags for generating debug information
|
|
||||||
debug_flags: Sequence[str] = []
|
|
||||||
|
|
||||||
def __init__(self, spec: "spack.spec.Spec"):
|
def __init__(self, spec: "spack.spec.Spec"):
|
||||||
super().__init__(spec)
|
super().__init__(spec)
|
||||||
@@ -164,7 +159,7 @@ def determine_variants(cls, exes: Sequence[Path], version_str: str) -> Tuple:
|
|||||||
#: Flag to activate OpenMP support
|
#: Flag to activate OpenMP support
|
||||||
openmp_flag: str = "-fopenmp"
|
openmp_flag: str = "-fopenmp"
|
||||||
|
|
||||||
implicit_rpath_libs: List[str] = []
|
required_libs: List[str] = []
|
||||||
|
|
||||||
def standard_flag(self, *, language: str, standard: str) -> str:
|
def standard_flag(self, *, language: str, standard: str) -> str:
|
||||||
"""Returns the flag used to enforce a given standard for a language"""
|
"""Returns the flag used to enforce a given standard for a language"""
|
||||||
|
@@ -8,7 +8,6 @@
|
|||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.util.environment
|
|
||||||
import spack.util.prefix
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, depends_on
|
from spack.directives import build_system, depends_on
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
@@ -69,9 +68,7 @@ class GoBuilder(BuilderWithDefaults):
|
|||||||
#: Callback names for install-time test
|
#: Callback names for install-time test
|
||||||
install_time_test_callbacks = ["check"]
|
install_time_test_callbacks = ["check"]
|
||||||
|
|
||||||
def setup_build_environment(
|
def setup_build_environment(self, env):
|
||||||
self, env: spack.util.environment.EnvironmentModifications
|
|
||||||
) -> None:
|
|
||||||
env.set("GO111MODULE", "on")
|
env.set("GO111MODULE", "on")
|
||||||
env.set("GOTOOLCHAIN", "local")
|
env.set("GOTOOLCHAIN", "local")
|
||||||
env.set("GOPATH", fs.join_path(self.pkg.stage.path, "go"))
|
env.set("GOPATH", fs.join_path(self.pkg.stage.path, "go"))
|
||||||
|
@@ -23,7 +23,6 @@
|
|||||||
|
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
import spack.spec
|
|
||||||
from spack.build_environment import dso_suffix
|
from spack.build_environment import dso_suffix
|
||||||
from spack.error import InstallError
|
from spack.error import InstallError
|
||||||
from spack.util.environment import EnvironmentModifications
|
from spack.util.environment import EnvironmentModifications
|
||||||
@@ -1017,7 +1016,7 @@ def libs(self):
|
|||||||
debug_print(result)
|
debug_print(result)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def setup_run_environment(self, env: EnvironmentModifications) -> None:
|
def setup_run_environment(self, env):
|
||||||
"""Adds environment variables to the generated module file.
|
"""Adds environment variables to the generated module file.
|
||||||
|
|
||||||
These environment variables come from running:
|
These environment variables come from running:
|
||||||
@@ -1050,13 +1049,11 @@ def setup_run_environment(self, env: EnvironmentModifications) -> None:
|
|||||||
env.set("F77", self.prefix.bin.ifort)
|
env.set("F77", self.prefix.bin.ifort)
|
||||||
env.set("F90", self.prefix.bin.ifort)
|
env.set("F90", self.prefix.bin.ifort)
|
||||||
|
|
||||||
def setup_dependent_build_environment(
|
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||||
self, env: EnvironmentModifications, dependent_spec: spack.spec.Spec
|
|
||||||
) -> None:
|
|
||||||
# NB: This function is overwritten by 'mpi' provider packages:
|
# NB: This function is overwritten by 'mpi' provider packages:
|
||||||
#
|
#
|
||||||
# var/spack/repos/spack_repo/builtin/packages/intel_mpi/package.py
|
# var/spack/repos/builtin/packages/intel-mpi/package.py
|
||||||
# var/spack/repos/spack_repo/builtin/packages/intel_parallel_studio/package.py
|
# var/spack/repos/builtin/packages/intel-parallel-studio/package.py
|
||||||
#
|
#
|
||||||
# They call _setup_dependent_env_callback() as well, but with the
|
# They call _setup_dependent_env_callback() as well, but with the
|
||||||
# dictionary kwarg compilers_of_client{} present and populated.
|
# dictionary kwarg compilers_of_client{} present and populated.
|
||||||
@@ -1064,12 +1061,7 @@ def setup_dependent_build_environment(
|
|||||||
# Handle everything in a callback version.
|
# Handle everything in a callback version.
|
||||||
self._setup_dependent_env_callback(env, dependent_spec)
|
self._setup_dependent_env_callback(env, dependent_spec)
|
||||||
|
|
||||||
def _setup_dependent_env_callback(
|
def _setup_dependent_env_callback(self, env, dependent_spec, compilers_of_client={}):
|
||||||
self,
|
|
||||||
env: EnvironmentModifications,
|
|
||||||
dependent_spec: spack.spec.Spec,
|
|
||||||
compilers_of_client={},
|
|
||||||
) -> None:
|
|
||||||
# Expected to be called from a client's
|
# Expected to be called from a client's
|
||||||
# setup_dependent_build_environment(),
|
# setup_dependent_build_environment(),
|
||||||
# with args extended to convey the client's compilers as needed.
|
# with args extended to convey the client's compilers as needed.
|
||||||
|
@@ -8,7 +8,6 @@
|
|||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.util.environment
|
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
import spack.util.prefix
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, depends_on, extends
|
from spack.directives import build_system, depends_on, extends
|
||||||
@@ -115,7 +114,5 @@ def install(
|
|||||||
def _luarocks_config_path(self):
|
def _luarocks_config_path(self):
|
||||||
return os.path.join(self.pkg.stage.source_path, "spack_luarocks.lua")
|
return os.path.join(self.pkg.stage.source_path, "spack_luarocks.lua")
|
||||||
|
|
||||||
def setup_build_environment(
|
def setup_build_environment(self, env):
|
||||||
self, env: spack.util.environment.EnvironmentModifications
|
|
||||||
) -> None:
|
|
||||||
env.set("LUAROCKS_CONFIG", self._luarocks_config_path())
|
env.set("LUAROCKS_CONFIG", self._luarocks_config_path())
|
||||||
|
@@ -4,7 +4,6 @@
|
|||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.util.environment
|
|
||||||
import spack.util.prefix
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, extends
|
from spack.directives import build_system, extends
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
@@ -58,9 +57,7 @@ def install(
|
|||||||
"pkg prefix %s; pkg install %s" % (prefix, self.pkg.stage.archive_file),
|
"pkg prefix %s; pkg install %s" % (prefix, self.pkg.stage.archive_file),
|
||||||
)
|
)
|
||||||
|
|
||||||
def setup_build_environment(
|
def setup_build_environment(self, env):
|
||||||
self, env: spack.util.environment.EnvironmentModifications
|
|
||||||
) -> None:
|
|
||||||
# octave does not like those environment variables to be set:
|
# octave does not like those environment variables to be set:
|
||||||
env.unset("CC")
|
env.unset("CC")
|
||||||
env.unset("CXX")
|
env.unset("CXX")
|
||||||
|
@@ -106,8 +106,8 @@ def install_component(self, installer_path):
|
|||||||
|
|
||||||
bash = Executable("bash")
|
bash = Executable("bash")
|
||||||
|
|
||||||
# Installer writes files in ~/intel set HOME so it goes to staging directory
|
# Installer writes files in ~/intel set HOME so it goes to prefix
|
||||||
bash.add_default_env("HOME", join_path(self.stage.path, "home"))
|
bash.add_default_env("HOME", self.prefix)
|
||||||
# Installer checks $XDG_RUNTIME_DIR/.bootstrapper_lock_file as well
|
# Installer checks $XDG_RUNTIME_DIR/.bootstrapper_lock_file as well
|
||||||
bash.add_default_env("XDG_RUNTIME_DIR", join_path(self.stage.path, "runtime"))
|
bash.add_default_env("XDG_RUNTIME_DIR", join_path(self.stage.path, "runtime"))
|
||||||
|
|
||||||
@@ -132,7 +132,7 @@ def install_component(self, installer_path):
|
|||||||
if not isdir(install_dir):
|
if not isdir(install_dir):
|
||||||
raise RuntimeError("install failed to directory: {0}".format(install_dir))
|
raise RuntimeError("install failed to directory: {0}".format(install_dir))
|
||||||
|
|
||||||
def setup_run_environment(self, env: EnvironmentModifications) -> None:
|
def setup_run_environment(self, env):
|
||||||
"""Adds environment variables to the generated module file.
|
"""Adds environment variables to the generated module file.
|
||||||
|
|
||||||
These environment variables come from running:
|
These environment variables come from running:
|
||||||
@@ -311,4 +311,4 @@ def ld_flags(self):
|
|||||||
|
|
||||||
|
|
||||||
#: Tuple of Intel math libraries, exported to packages
|
#: Tuple of Intel math libraries, exported to packages
|
||||||
INTEL_MATH_LIBRARIES = ("intel-oneapi-mkl",)
|
INTEL_MATH_LIBRARIES = ("intel-mkl", "intel-oneapi-mkl", "intel-parallel-studio")
|
||||||
|
@@ -13,9 +13,9 @@
|
|||||||
import archspec
|
import archspec
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
import llnl.util.lang as lang
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import HeaderList, LibraryList, join_path
|
from llnl.util.filesystem import HeaderList, LibraryList, join_path
|
||||||
from llnl.util.lang import ClassProperty, classproperty, match_predicate
|
|
||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.config
|
import spack.config
|
||||||
@@ -139,7 +139,7 @@ def view_file_conflicts(self, view, merge_map):
|
|||||||
ext_map = view.extensions_layout.extension_map(self.extendee_spec)
|
ext_map = view.extensions_layout.extension_map(self.extendee_spec)
|
||||||
namespaces = set(x.package.py_namespace for x in ext_map.values())
|
namespaces = set(x.package.py_namespace for x in ext_map.values())
|
||||||
namespace_re = r"site-packages/{0}/__init__.py".format(self.py_namespace)
|
namespace_re = r"site-packages/{0}/__init__.py".format(self.py_namespace)
|
||||||
find_namespace = match_predicate(namespace_re)
|
find_namespace = lang.match_predicate(namespace_re)
|
||||||
if self.py_namespace in namespaces:
|
if self.py_namespace in namespaces:
|
||||||
conflicts = list(x for x in conflicts if not find_namespace(x))
|
conflicts = list(x for x in conflicts if not find_namespace(x))
|
||||||
|
|
||||||
@@ -206,7 +206,7 @@ def remove_files_from_view(self, view, merge_map):
|
|||||||
spec.package.py_namespace for name, spec in ext_map.items() if name != self.name
|
spec.package.py_namespace for name, spec in ext_map.items() if name != self.name
|
||||||
)
|
)
|
||||||
if self.py_namespace in remaining_namespaces:
|
if self.py_namespace in remaining_namespaces:
|
||||||
namespace_init = match_predicate(
|
namespace_init = lang.match_predicate(
|
||||||
r"site-packages/{0}/__init__.py".format(self.py_namespace)
|
r"site-packages/{0}/__init__.py".format(self.py_namespace)
|
||||||
)
|
)
|
||||||
ignore_namespace = True
|
ignore_namespace = True
|
||||||
@@ -324,27 +324,6 @@ def get_external_python_for_prefix(self):
|
|||||||
raise StopIteration("No external python could be detected for %s to depend on" % self.spec)
|
raise StopIteration("No external python could be detected for %s to depend on" % self.spec)
|
||||||
|
|
||||||
|
|
||||||
def _homepage(cls: "PythonPackage") -> Optional[str]:
|
|
||||||
"""Get the homepage from PyPI if available."""
|
|
||||||
if cls.pypi:
|
|
||||||
name = cls.pypi.split("/")[0]
|
|
||||||
return f"https://pypi.org/project/{name}/"
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def _url(cls: "PythonPackage") -> Optional[str]:
|
|
||||||
if cls.pypi:
|
|
||||||
return f"https://files.pythonhosted.org/packages/source/{cls.pypi[0]}/{cls.pypi}"
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def _list_url(cls: "PythonPackage") -> Optional[str]:
|
|
||||||
if cls.pypi:
|
|
||||||
name = cls.pypi.split("/")[0]
|
|
||||||
return f"https://pypi.org/simple/{name}/"
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
class PythonPackage(PythonExtension):
|
class PythonPackage(PythonExtension):
|
||||||
"""Specialized class for packages that are built using pip."""
|
"""Specialized class for packages that are built using pip."""
|
||||||
|
|
||||||
@@ -372,9 +351,25 @@ class PythonPackage(PythonExtension):
|
|||||||
|
|
||||||
py_namespace: Optional[str] = None
|
py_namespace: Optional[str] = None
|
||||||
|
|
||||||
homepage: ClassProperty[Optional[str]] = classproperty(_homepage)
|
@lang.classproperty
|
||||||
url: ClassProperty[Optional[str]] = classproperty(_url)
|
def homepage(cls) -> Optional[str]: # type: ignore[override]
|
||||||
list_url: ClassProperty[Optional[str]] = classproperty(_list_url)
|
if cls.pypi:
|
||||||
|
name = cls.pypi.split("/")[0]
|
||||||
|
return f"https://pypi.org/project/{name}/"
|
||||||
|
return None
|
||||||
|
|
||||||
|
@lang.classproperty
|
||||||
|
def url(cls) -> Optional[str]:
|
||||||
|
if cls.pypi:
|
||||||
|
return f"https://files.pythonhosted.org/packages/source/{cls.pypi[0]}/{cls.pypi}"
|
||||||
|
return None
|
||||||
|
|
||||||
|
@lang.classproperty
|
||||||
|
def list_url(cls) -> Optional[str]: # type: ignore[override]
|
||||||
|
if cls.pypi:
|
||||||
|
name = cls.pypi.split("/")[0]
|
||||||
|
return f"https://pypi.org/simple/{name}/"
|
||||||
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def python_spec(self) -> Spec:
|
def python_spec(self) -> Spec:
|
||||||
|
@@ -3,8 +3,8 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
from typing import Optional, Tuple
|
from typing import Optional, Tuple
|
||||||
|
|
||||||
|
import llnl.util.lang as lang
|
||||||
from llnl.util.filesystem import mkdirp
|
from llnl.util.filesystem import mkdirp
|
||||||
from llnl.util.lang import ClassProperty, classproperty
|
|
||||||
|
|
||||||
from spack.directives import extends
|
from spack.directives import extends
|
||||||
|
|
||||||
@@ -54,32 +54,6 @@ def install(self, pkg, spec, prefix):
|
|||||||
pkg.module.R(*args)
|
pkg.module.R(*args)
|
||||||
|
|
||||||
|
|
||||||
def _homepage(cls: "RPackage") -> Optional[str]:
|
|
||||||
if cls.cran:
|
|
||||||
return f"https://cloud.r-project.org/package={cls.cran}"
|
|
||||||
elif cls.bioc:
|
|
||||||
return f"https://bioconductor.org/packages/{cls.bioc}"
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def _url(cls: "RPackage") -> Optional[str]:
|
|
||||||
if cls.cran:
|
|
||||||
return f"https://cloud.r-project.org/src/contrib/{cls.cran}_{str(list(cls.versions)[0])}.tar.gz"
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def _list_url(cls: "RPackage") -> Optional[str]:
|
|
||||||
if cls.cran:
|
|
||||||
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def _git(cls: "RPackage") -> Optional[str]:
|
|
||||||
if cls.bioc:
|
|
||||||
return f"https://git.bioconductor.org/packages/{cls.bioc}"
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
class RPackage(Package):
|
class RPackage(Package):
|
||||||
"""Specialized class for packages that are built using R.
|
"""Specialized class for packages that are built using R.
|
||||||
|
|
||||||
@@ -103,7 +77,24 @@ class RPackage(Package):
|
|||||||
|
|
||||||
extends("r")
|
extends("r")
|
||||||
|
|
||||||
homepage: ClassProperty[Optional[str]] = classproperty(_homepage)
|
@lang.classproperty
|
||||||
url: ClassProperty[Optional[str]] = classproperty(_url)
|
def homepage(cls):
|
||||||
list_url: ClassProperty[Optional[str]] = classproperty(_list_url)
|
if cls.cran:
|
||||||
git: ClassProperty[Optional[str]] = classproperty(_git)
|
return f"https://cloud.r-project.org/package={cls.cran}"
|
||||||
|
elif cls.bioc:
|
||||||
|
return f"https://bioconductor.org/packages/{cls.bioc}"
|
||||||
|
|
||||||
|
@lang.classproperty
|
||||||
|
def url(cls):
|
||||||
|
if cls.cran:
|
||||||
|
return f"https://cloud.r-project.org/src/contrib/{cls.cran}_{str(list(cls.versions)[0])}.tar.gz"
|
||||||
|
|
||||||
|
@lang.classproperty
|
||||||
|
def list_url(cls):
|
||||||
|
if cls.cran:
|
||||||
|
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
|
||||||
|
|
||||||
|
@lang.classproperty
|
||||||
|
def git(cls):
|
||||||
|
if cls.bioc:
|
||||||
|
return f"https://git.bioconductor.org/packages/{cls.bioc}"
|
||||||
|
@@ -5,8 +5,8 @@
|
|||||||
from typing import Optional, Tuple
|
from typing import Optional, Tuple
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
import llnl.util.lang as lang
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.lang import ClassProperty, classproperty
|
|
||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.spec
|
import spack.spec
|
||||||
@@ -19,12 +19,6 @@
|
|||||||
from spack.util.executable import Executable, ProcessError
|
from spack.util.executable import Executable, ProcessError
|
||||||
|
|
||||||
|
|
||||||
def _homepage(cls: "RacketPackage") -> Optional[str]:
|
|
||||||
if cls.racket_name:
|
|
||||||
return f"https://pkgs.racket-lang.org/package/{cls.racket_name}"
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
class RacketPackage(PackageBase):
|
class RacketPackage(PackageBase):
|
||||||
"""Specialized class for packages that are built using Racket's
|
"""Specialized class for packages that are built using Racket's
|
||||||
`raco pkg install` and `raco setup` commands.
|
`raco pkg install` and `raco setup` commands.
|
||||||
@@ -43,7 +37,13 @@ class RacketPackage(PackageBase):
|
|||||||
extends("racket", when="build_system=racket")
|
extends("racket", when="build_system=racket")
|
||||||
|
|
||||||
racket_name: Optional[str] = None
|
racket_name: Optional[str] = None
|
||||||
homepage: ClassProperty[Optional[str]] = classproperty(_homepage)
|
parallel = True
|
||||||
|
|
||||||
|
@lang.classproperty
|
||||||
|
def homepage(cls):
|
||||||
|
if cls.racket_name:
|
||||||
|
return "https://pkgs.racket-lang.org/package/{0}".format(cls.racket_name)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("racket")
|
@spack.builder.builder("racket")
|
||||||
|
@@ -59,7 +59,7 @@ def __call__(self, spec, prefix):
|
|||||||
def get_builder_class(pkg, name: str) -> Optional[Type["Builder"]]:
|
def get_builder_class(pkg, name: str) -> Optional[Type["Builder"]]:
|
||||||
"""Return the builder class if a package module defines it."""
|
"""Return the builder class if a package module defines it."""
|
||||||
cls = getattr(pkg.module, name, None)
|
cls = getattr(pkg.module, name, None)
|
||||||
if cls and spack.repo.is_package_module(cls.__module__):
|
if cls and cls.__module__.startswith(spack.repo.ROOT_PYTHON_NAMESPACE):
|
||||||
return cls
|
return cls
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -121,7 +121,6 @@ def __init__(self, wrapped_pkg_object, root_builder):
|
|||||||
new_cls_name,
|
new_cls_name,
|
||||||
bases,
|
bases,
|
||||||
{
|
{
|
||||||
"__module__": package_cls.__module__,
|
|
||||||
"run_tests": property(lambda x: x.wrapped_package_object.run_tests),
|
"run_tests": property(lambda x: x.wrapped_package_object.run_tests),
|
||||||
"test_requires_compiler": property(
|
"test_requires_compiler": property(
|
||||||
lambda x: x.wrapped_package_object.test_requires_compiler
|
lambda x: x.wrapped_package_object.test_requires_compiler
|
||||||
@@ -130,6 +129,7 @@ def __init__(self, wrapped_pkg_object, root_builder):
|
|||||||
"tester": property(lambda x: x.wrapped_package_object.tester),
|
"tester": property(lambda x: x.wrapped_package_object.tester),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
new_cls.__module__ = package_cls.__module__
|
||||||
self.__class__ = new_cls
|
self.__class__ = new_cls
|
||||||
self.__dict__.update(wrapped_pkg_object.__dict__)
|
self.__dict__.update(wrapped_pkg_object.__dict__)
|
||||||
|
|
||||||
@@ -185,16 +185,10 @@ def __init__(self, pkg):
|
|||||||
# These two methods don't follow the (self, spec, prefix) signature of phases nor
|
# These two methods don't follow the (self, spec, prefix) signature of phases nor
|
||||||
# the (self) signature of methods, so they are added explicitly to avoid using a
|
# the (self) signature of methods, so they are added explicitly to avoid using a
|
||||||
# catch-all (*args, **kwargs)
|
# catch-all (*args, **kwargs)
|
||||||
def setup_build_environment(
|
def setup_build_environment(self, env):
|
||||||
self, env: spack.util.environment.EnvironmentModifications
|
|
||||||
) -> None:
|
|
||||||
return self.pkg_with_dispatcher.setup_build_environment(env)
|
return self.pkg_with_dispatcher.setup_build_environment(env)
|
||||||
|
|
||||||
def setup_dependent_build_environment(
|
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||||
self,
|
|
||||||
env: spack.util.environment.EnvironmentModifications,
|
|
||||||
dependent_spec: spack.spec.Spec,
|
|
||||||
) -> None:
|
|
||||||
return self.pkg_with_dispatcher.setup_dependent_build_environment(env, dependent_spec)
|
return self.pkg_with_dispatcher.setup_dependent_build_environment(env, dependent_spec)
|
||||||
|
|
||||||
return Adapter(pkg)
|
return Adapter(pkg)
|
||||||
@@ -408,7 +402,7 @@ def fixup_install(self):
|
|||||||
# do something after the package is installed
|
# do something after the package is installed
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def setup_build_environment(self, env: EnvironmentModifications) -> None:
|
def setup_build_environment(self, env):
|
||||||
env.set("MY_ENV_VAR", "my_value")
|
env.set("MY_ENV_VAR", "my_value")
|
||||||
|
|
||||||
class CMakeBuilder(cmake.CMakeBuilder, AnyBuilder):
|
class CMakeBuilder(cmake.CMakeBuilder, AnyBuilder):
|
||||||
|
@@ -6,7 +6,6 @@
|
|||||||
import codecs
|
import codecs
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import pathlib
|
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import stat
|
import stat
|
||||||
@@ -14,7 +13,7 @@
|
|||||||
import tempfile
|
import tempfile
|
||||||
import zipfile
|
import zipfile
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from typing import Callable, Dict, List, Optional, Set, Union
|
from typing import Callable, Dict, List, Set
|
||||||
from urllib.request import Request
|
from urllib.request import Request
|
||||||
|
|
||||||
import llnl.path
|
import llnl.path
|
||||||
@@ -33,7 +32,6 @@
|
|||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
|
||||||
import spack.util.git
|
import spack.util.git
|
||||||
import spack.util.gpg as gpg_util
|
import spack.util.gpg as gpg_util
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
@@ -42,7 +40,6 @@
|
|||||||
from spack import traverse
|
from spack import traverse
|
||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||||
from spack.version import GitVersion, StandardVersion
|
|
||||||
|
|
||||||
from .common import (
|
from .common import (
|
||||||
IS_WINDOWS,
|
IS_WINDOWS,
|
||||||
@@ -81,45 +78,6 @@ def get_change_revisions():
|
|||||||
return None, None
|
return None, None
|
||||||
|
|
||||||
|
|
||||||
def get_added_versions(
|
|
||||||
checksums_version_dict: Dict[str, Union[StandardVersion, GitVersion]],
|
|
||||||
path: str,
|
|
||||||
from_ref: str = "HEAD~1",
|
|
||||||
to_ref: str = "HEAD",
|
|
||||||
) -> List[Union[StandardVersion, GitVersion]]:
|
|
||||||
"""Get a list of the versions added between `from_ref` and `to_ref`.
|
|
||||||
Args:
|
|
||||||
checksums_version_dict (Dict): all package versions keyed by known checksums.
|
|
||||||
path (str): path to the package.py
|
|
||||||
from_ref (str): oldest git ref, defaults to `HEAD~1`
|
|
||||||
to_ref (str): newer git ref, defaults to `HEAD`
|
|
||||||
Returns: list of versions added between refs
|
|
||||||
"""
|
|
||||||
git_exe = spack.util.git.git(required=True)
|
|
||||||
|
|
||||||
# Gather git diff
|
|
||||||
diff_lines = git_exe("diff", from_ref, to_ref, "--", path, output=str).split("\n")
|
|
||||||
|
|
||||||
# Store added and removed versions
|
|
||||||
# Removed versions are tracked here to determine when versions are moved in a file
|
|
||||||
# and show up as both added and removed in a git diff.
|
|
||||||
added_checksums = set()
|
|
||||||
removed_checksums = set()
|
|
||||||
|
|
||||||
# Scrape diff for modified versions and prune added versions if they show up
|
|
||||||
# as also removed (which means they've actually just moved in the file and
|
|
||||||
# we shouldn't need to rechecksum them)
|
|
||||||
for checksum in checksums_version_dict.keys():
|
|
||||||
for line in diff_lines:
|
|
||||||
if checksum in line:
|
|
||||||
if line.startswith("+"):
|
|
||||||
added_checksums.add(checksum)
|
|
||||||
if line.startswith("-"):
|
|
||||||
removed_checksums.add(checksum)
|
|
||||||
|
|
||||||
return [checksums_version_dict[c] for c in added_checksums - removed_checksums]
|
|
||||||
|
|
||||||
|
|
||||||
def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
|
def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
|
||||||
"""Given an environment manifest path and two revisions to compare, return
|
"""Given an environment manifest path and two revisions to compare, return
|
||||||
whether or not the stack was changed. Returns True if the environment
|
whether or not the stack was changed. Returns True if the environment
|
||||||
@@ -150,10 +108,10 @@ def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def compute_affected_packages(rev1: str = "HEAD^", rev2: str = "HEAD") -> Set[str]:
|
def compute_affected_packages(rev1="HEAD^", rev2="HEAD"):
|
||||||
"""Determine which packages were added, removed or changed
|
"""Determine which packages were added, removed or changed
|
||||||
between rev1 and rev2, and return the names as a set"""
|
between rev1 and rev2, and return the names as a set"""
|
||||||
return spack.repo.get_all_package_diffs("ARC", spack.repo.builtin_repo(), rev1=rev1, rev2=rev2)
|
return spack.repo.get_all_package_diffs("ARC", rev1=rev1, rev2=rev2)
|
||||||
|
|
||||||
|
|
||||||
def get_spec_filter_list(env, affected_pkgs, dependent_traverse_depth=None):
|
def get_spec_filter_list(env, affected_pkgs, dependent_traverse_depth=None):
|
||||||
@@ -265,7 +223,7 @@ def rebuild_filter(s: spack.spec.Spec) -> RebuildDecision:
|
|||||||
|
|
||||||
def _format_pruning_message(spec: spack.spec.Spec, prune: bool, reasons: List[str]) -> str:
|
def _format_pruning_message(spec: spack.spec.Spec, prune: bool, reasons: List[str]) -> str:
|
||||||
reason_msg = ", ".join(reasons)
|
reason_msg = ", ".join(reasons)
|
||||||
spec_fmt = "{name}{@version}{/hash:7}{%compiler}"
|
spec_fmt = "{name}{@version}{%compiler}{/hash:7}"
|
||||||
|
|
||||||
if not prune:
|
if not prune:
|
||||||
status = colorize("@*g{[x]} ")
|
status = colorize("@*g{[x]} ")
|
||||||
@@ -614,40 +572,29 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
|
|||||||
job_spec, and attempts to copy the files into the directory given
|
job_spec, and attempts to copy the files into the directory given
|
||||||
by job_log_dir.
|
by job_log_dir.
|
||||||
|
|
||||||
Parameters:
|
Args:
|
||||||
job_spec: spec associated with spack install log
|
job_spec: spec associated with spack install log
|
||||||
job_log_dir: path into which build log should be copied
|
job_log_dir: path into which build log should be copied
|
||||||
"""
|
"""
|
||||||
tty.debug(f"job spec: {job_spec}")
|
tty.debug(f"job spec: {job_spec}")
|
||||||
if not job_spec.concrete:
|
|
||||||
tty.warn("Cannot copy artifacts for non-concrete specs")
|
try:
|
||||||
|
pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name)
|
||||||
|
job_pkg = pkg_cls(job_spec)
|
||||||
|
tty.debug(f"job package: {job_pkg}")
|
||||||
|
except AssertionError:
|
||||||
|
msg = f"Cannot copy stage logs: job spec ({job_spec}) must be concrete"
|
||||||
|
tty.error(msg)
|
||||||
return
|
return
|
||||||
|
|
||||||
package_metadata_root = pathlib.Path(spack.store.STORE.layout.metadata_path(job_spec))
|
stage_dir = job_pkg.stage.path
|
||||||
if not os.path.isdir(package_metadata_root):
|
tty.debug(f"stage dir: {stage_dir}")
|
||||||
# Fallback to using the stage directory
|
for file in [
|
||||||
job_pkg = job_spec.package
|
job_pkg.log_path,
|
||||||
|
job_pkg.env_mods_path,
|
||||||
package_metadata_root = pathlib.Path(job_pkg.stage.path)
|
*spack.builder.create(job_pkg).archive_files,
|
||||||
archive_files = spack.builder.create(job_pkg).archive_files
|
]:
|
||||||
tty.warn("Package not installed, falling back to use stage dir")
|
copy_files_to_artifacts(file, job_log_dir)
|
||||||
tty.debug(f"stage dir: {package_metadata_root}")
|
|
||||||
else:
|
|
||||||
# Get the package's archived files
|
|
||||||
archive_files = []
|
|
||||||
archive_root = package_metadata_root / "archived-files"
|
|
||||||
if os.path.isdir(archive_root):
|
|
||||||
archive_files = [str(f) for f in archive_root.rglob("*") if os.path.isfile(f)]
|
|
||||||
else:
|
|
||||||
tty.debug(f"No archived files detected at {archive_root}")
|
|
||||||
|
|
||||||
# Try zipped and unzipped versions of the build log
|
|
||||||
build_log_zipped = package_metadata_root / "spack-build-out.txt.gz"
|
|
||||||
build_log = package_metadata_root / "spack-build-out.txt"
|
|
||||||
build_env_mods = package_metadata_root / "spack-build-env.txt"
|
|
||||||
|
|
||||||
for f in [build_log_zipped, build_log, build_env_mods, *archive_files]:
|
|
||||||
copy_files_to_artifacts(str(f), job_log_dir, compress_artifacts=True)
|
|
||||||
|
|
||||||
|
|
||||||
def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
||||||
@@ -660,12 +607,11 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
|||||||
"""
|
"""
|
||||||
tty.debug(f"test stage: {test_stage}")
|
tty.debug(f"test stage: {test_stage}")
|
||||||
if not os.path.exists(test_stage):
|
if not os.path.exists(test_stage):
|
||||||
tty.error(f"Cannot copy test logs: job test stage ({test_stage}) does not exist")
|
msg = f"Cannot copy test logs: job test stage ({test_stage}) does not exist"
|
||||||
|
tty.error(msg)
|
||||||
return
|
return
|
||||||
|
|
||||||
copy_files_to_artifacts(
|
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
||||||
os.path.join(test_stage, "*", "*.txt"), job_test_dir, compress_artifacts=True
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def download_and_extract_artifacts(url, work_dir) -> str:
|
def download_and_extract_artifacts(url, work_dir) -> str:
|
||||||
@@ -1304,34 +1250,35 @@ def display_broken_spec_messages(base_url, hashes):
|
|||||||
tty.msg(msg)
|
tty.msg(msg)
|
||||||
|
|
||||||
|
|
||||||
def run_standalone_tests(
|
def run_standalone_tests(**kwargs):
|
||||||
*,
|
|
||||||
cdash: Optional[CDashHandler] = None,
|
|
||||||
fail_fast: bool = False,
|
|
||||||
log_file: Optional[str] = None,
|
|
||||||
job_spec: Optional[spack.spec.Spec] = None,
|
|
||||||
repro_dir: Optional[str] = None,
|
|
||||||
timeout: Optional[int] = None,
|
|
||||||
):
|
|
||||||
"""Run stand-alone tests on the current spec.
|
"""Run stand-alone tests on the current spec.
|
||||||
|
|
||||||
Args:
|
Arguments:
|
||||||
cdash: cdash handler instance
|
kwargs (dict): dictionary of arguments used to run the tests
|
||||||
fail_fast: terminate tests after the first failure
|
|
||||||
log_file: test log file name if NOT CDash reporting
|
List of recognized keys:
|
||||||
job_spec: spec that was built
|
|
||||||
repro_dir: reproduction directory
|
* "cdash" (CDashHandler): (optional) cdash handler instance
|
||||||
timeout: maximum time (in seconds) that tests are allowed to run
|
* "fail_fast" (bool): (optional) terminate tests after the first failure
|
||||||
|
* "log_file" (str): (optional) test log file name if NOT CDash reporting
|
||||||
|
* "job_spec" (Spec): spec that was built
|
||||||
|
* "repro_dir" (str): reproduction directory
|
||||||
"""
|
"""
|
||||||
|
cdash = kwargs.get("cdash")
|
||||||
|
fail_fast = kwargs.get("fail_fast")
|
||||||
|
log_file = kwargs.get("log_file")
|
||||||
|
|
||||||
if cdash and log_file:
|
if cdash and log_file:
|
||||||
tty.msg(f"The test log file {log_file} option is ignored with CDash reporting")
|
tty.msg(f"The test log file {log_file} option is ignored with CDash reporting")
|
||||||
log_file = None
|
log_file = None
|
||||||
|
|
||||||
# Error out but do NOT terminate if there are missing required arguments.
|
# Error out but do NOT terminate if there are missing required arguments.
|
||||||
|
job_spec = kwargs.get("job_spec")
|
||||||
if not job_spec:
|
if not job_spec:
|
||||||
tty.error("Job spec is required to run stand-alone tests")
|
tty.error("Job spec is required to run stand-alone tests")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
repro_dir = kwargs.get("repro_dir")
|
||||||
if not repro_dir:
|
if not repro_dir:
|
||||||
tty.error("Reproduction directory is required for stand-alone tests")
|
tty.error("Reproduction directory is required for stand-alone tests")
|
||||||
return
|
return
|
||||||
@@ -1340,9 +1287,6 @@ def run_standalone_tests(
|
|||||||
if fail_fast:
|
if fail_fast:
|
||||||
test_args.append("--fail-fast")
|
test_args.append("--fail-fast")
|
||||||
|
|
||||||
if timeout is not None:
|
|
||||||
test_args.extend(["--timeout", str(timeout)])
|
|
||||||
|
|
||||||
if cdash:
|
if cdash:
|
||||||
test_args.extend(cdash.args())
|
test_args.extend(cdash.args())
|
||||||
else:
|
else:
|
||||||
|
@@ -2,13 +2,9 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import copy
|
import copy
|
||||||
import errno
|
|
||||||
import glob
|
|
||||||
import gzip
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from collections import deque
|
from collections import deque
|
||||||
@@ -29,7 +25,6 @@
|
|||||||
import spack.mirrors.mirror
|
import spack.mirrors.mirror
|
||||||
import spack.schema
|
import spack.schema
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.util.compression as compression
|
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
import spack.util.url as url_util
|
import spack.util.url as url_util
|
||||||
import spack.util.web as web_util
|
import spack.util.web as web_util
|
||||||
@@ -45,67 +40,22 @@
|
|||||||
_urlopen = web_util.urlopen
|
_urlopen = web_util.urlopen
|
||||||
|
|
||||||
|
|
||||||
def copy_gzipped(glob_or_path: str, dest: str) -> None:
|
def copy_files_to_artifacts(src, artifacts_dir):
|
||||||
"""Copy all of the files in the source glob/path to the destination.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
glob_or_path: path to file to test
|
|
||||||
dest: destination path to copy to
|
|
||||||
"""
|
|
||||||
|
|
||||||
files = glob.glob(glob_or_path)
|
|
||||||
if not files:
|
|
||||||
raise OSError("No such file or directory: '{0}'".format(glob_or_path), errno.ENOENT)
|
|
||||||
if len(files) > 1 and not os.path.isdir(dest):
|
|
||||||
raise ValueError(
|
|
||||||
"'{0}' matches multiple files but '{1}' is not a directory".format(glob_or_path, dest)
|
|
||||||
)
|
|
||||||
|
|
||||||
def is_gzipped(path):
|
|
||||||
with open(path, "rb") as fd:
|
|
||||||
return compression.GZipFileType().matches_magic(fd)
|
|
||||||
|
|
||||||
for src in files:
|
|
||||||
if is_gzipped(src):
|
|
||||||
fs.copy(src, dest)
|
|
||||||
else:
|
|
||||||
# Compress and copy in one step
|
|
||||||
src_name = os.path.basename(src)
|
|
||||||
if os.path.isdir(dest):
|
|
||||||
zipped = os.path.join(dest, f"{src_name}.gz")
|
|
||||||
elif not dest.endswith(".gz"):
|
|
||||||
zipped = f"{dest}.gz"
|
|
||||||
else:
|
|
||||||
zipped = dest
|
|
||||||
|
|
||||||
with open(src, "rb") as fin, gzip.open(zipped, "wb") as fout:
|
|
||||||
shutil.copyfileobj(fin, fout)
|
|
||||||
|
|
||||||
|
|
||||||
def copy_files_to_artifacts(
|
|
||||||
src: str, artifacts_dir: str, *, compress_artifacts: bool = False
|
|
||||||
) -> None:
|
|
||||||
"""
|
"""
|
||||||
Copy file(s) to the given artifacts directory
|
Copy file(s) to the given artifacts directory
|
||||||
|
|
||||||
Args:
|
Parameters:
|
||||||
src (str): the glob-friendly path expression for the file(s) to copy
|
src (str): the glob-friendly path expression for the file(s) to copy
|
||||||
artifacts_dir (str): the destination directory
|
artifacts_dir (str): the destination directory
|
||||||
compress_artifacts (bool): option to compress copied artifacts using Gzip
|
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
|
fs.copy(src, artifacts_dir)
|
||||||
if compress_artifacts:
|
|
||||||
copy_gzipped(src, artifacts_dir)
|
|
||||||
else:
|
|
||||||
fs.copy(src, artifacts_dir)
|
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
tty.warn(
|
msg = (
|
||||||
(
|
f"Unable to copy files ({src}) to artifacts {artifacts_dir} due to "
|
||||||
f"Unable to copy files ({src}) to artifacts {artifacts_dir} due to "
|
f"exception: {str(err)}"
|
||||||
f"exception: {str(err)}"
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
tty.warn(msg)
|
||||||
|
|
||||||
|
|
||||||
def win_quote(quote_str: str) -> str:
|
def win_quote(quote_str: str) -> str:
|
||||||
|
@@ -330,7 +330,7 @@ def ensure_single_spec_or_die(spec, matching_specs):
|
|||||||
if len(matching_specs) <= 1:
|
if len(matching_specs) <= 1:
|
||||||
return
|
return
|
||||||
|
|
||||||
format_string = "{name}{@version}{ arch=architecture} {%compiler.name}{@compiler.version}"
|
format_string = "{name}{@version}{%compiler.name}{@compiler.version}{ arch=architecture}"
|
||||||
args = ["%s matches multiple packages." % spec, "Matching packages:"]
|
args = ["%s matches multiple packages." % spec, "Matching packages:"]
|
||||||
args += [
|
args += [
|
||||||
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
|
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
|
||||||
@@ -436,7 +436,7 @@ def display_specs(specs, args=None, **kwargs):
|
|||||||
all_headers (bool): show headers even when arch/compiler aren't defined
|
all_headers (bool): show headers even when arch/compiler aren't defined
|
||||||
status_fn (typing.Callable): if provided, prepend install-status info
|
status_fn (typing.Callable): if provided, prepend install-status info
|
||||||
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
||||||
specfile_format (bool): specfile format of the current spec
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def get_arg(name, default=None):
|
def get_arg(name, default=None):
|
||||||
@@ -458,7 +458,6 @@ def get_arg(name, default=None):
|
|||||||
all_headers = get_arg("all_headers", False)
|
all_headers = get_arg("all_headers", False)
|
||||||
output = get_arg("output", sys.stdout)
|
output = get_arg("output", sys.stdout)
|
||||||
status_fn = get_arg("status_fn", None)
|
status_fn = get_arg("status_fn", None)
|
||||||
specfile_format = get_arg("specfile_format", False)
|
|
||||||
|
|
||||||
decorator = get_arg("decorator", None)
|
decorator = get_arg("decorator", None)
|
||||||
if decorator is None:
|
if decorator is None:
|
||||||
@@ -478,10 +477,7 @@ def get_arg(name, default=None):
|
|||||||
if flags:
|
if flags:
|
||||||
ffmt += " {compiler_flags}"
|
ffmt += " {compiler_flags}"
|
||||||
vfmt = "{variants}" if variants else ""
|
vfmt = "{variants}" if variants else ""
|
||||||
format_string = nfmt + "{@version}" + vfmt + ffmt
|
format_string = nfmt + "{@version}" + ffmt + vfmt
|
||||||
|
|
||||||
if specfile_format:
|
|
||||||
format_string = "[{specfile_version}] " + format_string
|
|
||||||
|
|
||||||
def fmt(s, depth=0):
|
def fmt(s, depth=0):
|
||||||
"""Formatter function for all output specs"""
|
"""Formatter function for all output specs"""
|
||||||
|
@@ -76,6 +76,9 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
|||||||
default=False,
|
default=False,
|
||||||
help="regenerate buildcache index after building package(s)",
|
help="regenerate buildcache index after building package(s)",
|
||||||
)
|
)
|
||||||
|
push.add_argument(
|
||||||
|
"--spec-file", default=None, help="create buildcache entry for spec from json or yaml file"
|
||||||
|
)
|
||||||
push.add_argument(
|
push.add_argument(
|
||||||
"--only",
|
"--only",
|
||||||
default="package,dependencies",
|
default="package,dependencies",
|
||||||
@@ -189,14 +192,28 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
|||||||
default=lambda: spack.config.default_modify_scope(),
|
default=lambda: spack.config.default_modify_scope(),
|
||||||
help="configuration scope containing mirrors to check",
|
help="configuration scope containing mirrors to check",
|
||||||
)
|
)
|
||||||
|
# Unfortunately there are 3 ways to do the same thing here:
|
||||||
|
check_specs = check.add_mutually_exclusive_group()
|
||||||
|
check_specs.add_argument(
|
||||||
|
"-s", "--spec", help="check single spec instead of release specs file"
|
||||||
|
)
|
||||||
|
check_specs.add_argument(
|
||||||
|
"--spec-file",
|
||||||
|
help="check single spec from json or yaml file instead of release specs file",
|
||||||
|
)
|
||||||
arguments.add_common_arguments(check, ["specs"])
|
arguments.add_common_arguments(check, ["specs"])
|
||||||
|
|
||||||
check.set_defaults(func=check_fn)
|
check.set_defaults(func=check_fn)
|
||||||
|
|
||||||
# Download tarball and specfile
|
# Download tarball and specfile
|
||||||
download = subparsers.add_parser("download", help=download_fn.__doc__)
|
download = subparsers.add_parser("download", help=download_fn.__doc__)
|
||||||
download.add_argument("-s", "--spec", help="download built tarball for spec from mirror")
|
download_spec_or_specfile = download.add_mutually_exclusive_group(required=True)
|
||||||
|
download_spec_or_specfile.add_argument(
|
||||||
|
"-s", "--spec", help="download built tarball for spec from mirror"
|
||||||
|
)
|
||||||
|
download_spec_or_specfile.add_argument(
|
||||||
|
"--spec-file", help="download built tarball for spec (from json or yaml file) from mirror"
|
||||||
|
)
|
||||||
download.add_argument(
|
download.add_argument(
|
||||||
"-p",
|
"-p",
|
||||||
"--path",
|
"--path",
|
||||||
@@ -206,10 +223,28 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
|||||||
)
|
)
|
||||||
download.set_defaults(func=download_fn)
|
download.set_defaults(func=download_fn)
|
||||||
|
|
||||||
|
# Get buildcache name
|
||||||
|
getbuildcachename = subparsers.add_parser(
|
||||||
|
"get-buildcache-name", help=get_buildcache_name_fn.__doc__
|
||||||
|
)
|
||||||
|
getbuildcachename_spec_or_specfile = getbuildcachename.add_mutually_exclusive_group(
|
||||||
|
required=True
|
||||||
|
)
|
||||||
|
getbuildcachename_spec_or_specfile.add_argument(
|
||||||
|
"-s", "--spec", help="spec string for which buildcache name is desired"
|
||||||
|
)
|
||||||
|
getbuildcachename_spec_or_specfile.add_argument(
|
||||||
|
"--spec-file", help="path to spec json or yaml file for which buildcache name is desired"
|
||||||
|
)
|
||||||
|
getbuildcachename.set_defaults(func=get_buildcache_name_fn)
|
||||||
|
|
||||||
# Given the root spec, save the yaml of the dependent spec to a file
|
# Given the root spec, save the yaml of the dependent spec to a file
|
||||||
savespecfile = subparsers.add_parser("save-specfile", help=save_specfile_fn.__doc__)
|
savespecfile = subparsers.add_parser("save-specfile", help=save_specfile_fn.__doc__)
|
||||||
savespecfile_spec_or_specfile = savespecfile.add_mutually_exclusive_group(required=True)
|
savespecfile_spec_or_specfile = savespecfile.add_mutually_exclusive_group(required=True)
|
||||||
savespecfile_spec_or_specfile.add_argument("--root-spec", help="root spec of dependent spec")
|
savespecfile_spec_or_specfile.add_argument("--root-spec", help="root spec of dependent spec")
|
||||||
|
savespecfile_spec_or_specfile.add_argument(
|
||||||
|
"--root-specfile", help="path to json or yaml file containing root spec of dependent spec"
|
||||||
|
)
|
||||||
savespecfile.add_argument(
|
savespecfile.add_argument(
|
||||||
"-s",
|
"-s",
|
||||||
"--specs",
|
"--specs",
|
||||||
@@ -345,8 +380,14 @@ def _specs_to_be_packaged(
|
|||||||
|
|
||||||
def push_fn(args):
|
def push_fn(args):
|
||||||
"""create a binary package and push it to a mirror"""
|
"""create a binary package and push it to a mirror"""
|
||||||
if args.specs:
|
if args.spec_file:
|
||||||
roots = _matching_specs(spack.cmd.parse_specs(args.specs))
|
tty.warn(
|
||||||
|
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||||
|
"Use positional arguments instead."
|
||||||
|
)
|
||||||
|
|
||||||
|
if args.specs or args.spec_file:
|
||||||
|
roots = _matching_specs(spack.cmd.parse_specs(args.specs or args.spec_file))
|
||||||
else:
|
else:
|
||||||
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
|
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
|
||||||
|
|
||||||
@@ -488,7 +529,22 @@ def check_fn(args: argparse.Namespace):
|
|||||||
this command uses the process exit code to indicate its result, specifically, if the
|
this command uses the process exit code to indicate its result, specifically, if the
|
||||||
exit code is non-zero, then at least one of the indicated specs needs to be rebuilt
|
exit code is non-zero, then at least one of the indicated specs needs to be rebuilt
|
||||||
"""
|
"""
|
||||||
specs_arg = args.specs
|
if args.spec_file:
|
||||||
|
specs_arg = (
|
||||||
|
args.spec_file if os.path.sep in args.spec_file else os.path.join(".", args.spec_file)
|
||||||
|
)
|
||||||
|
tty.warn(
|
||||||
|
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||||
|
f"Use `spack buildcache check {specs_arg}` instead."
|
||||||
|
)
|
||||||
|
elif args.spec:
|
||||||
|
specs_arg = args.spec
|
||||||
|
tty.warn(
|
||||||
|
"The flag `--spec` is deprecated and will be removed in Spack 0.23. "
|
||||||
|
f"Use `spack buildcache check {specs_arg}` instead."
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
specs_arg = args.specs
|
||||||
|
|
||||||
if specs_arg:
|
if specs_arg:
|
||||||
specs = _matching_specs(spack.cmd.parse_specs(specs_arg))
|
specs = _matching_specs(spack.cmd.parse_specs(specs_arg))
|
||||||
@@ -522,7 +578,13 @@ def download_fn(args):
|
|||||||
code indicates that the command failed to download at least one of the required buildcache
|
code indicates that the command failed to download at least one of the required buildcache
|
||||||
components
|
components
|
||||||
"""
|
"""
|
||||||
specs = _matching_specs(spack.cmd.parse_specs(args.spec))
|
if args.spec_file:
|
||||||
|
tty.warn(
|
||||||
|
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||||
|
"Use --spec instead."
|
||||||
|
)
|
||||||
|
|
||||||
|
specs = _matching_specs(spack.cmd.parse_specs(args.spec or args.spec_file))
|
||||||
|
|
||||||
if len(specs) != 1:
|
if len(specs) != 1:
|
||||||
tty.die("a single spec argument is required to download from a buildcache")
|
tty.die("a single spec argument is required to download from a buildcache")
|
||||||
@@ -531,6 +593,15 @@ def download_fn(args):
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def get_buildcache_name_fn(args):
|
||||||
|
"""get name (prefix) of buildcache entries for this spec"""
|
||||||
|
tty.warn("This command is deprecated and will be removed in Spack 0.22.")
|
||||||
|
specs = _matching_specs(spack.cmd.parse_specs(args.spec or args.spec_file))
|
||||||
|
if len(specs) != 1:
|
||||||
|
tty.die("a single spec argument is required to get buildcache name")
|
||||||
|
print(bindist.tarball_name(specs[0], ""))
|
||||||
|
|
||||||
|
|
||||||
def save_specfile_fn(args):
|
def save_specfile_fn(args):
|
||||||
"""get full spec for dependencies and write them to files in the specified output directory
|
"""get full spec for dependencies and write them to files in the specified output directory
|
||||||
|
|
||||||
@@ -538,7 +609,13 @@ def save_specfile_fn(args):
|
|||||||
successful. if any errors or exceptions are encountered, or if expected command-line arguments
|
successful. if any errors or exceptions are encountered, or if expected command-line arguments
|
||||||
are not provided, then the exit code will be non-zero
|
are not provided, then the exit code will be non-zero
|
||||||
"""
|
"""
|
||||||
specs = spack.cmd.parse_specs(args.root_spec)
|
if args.root_specfile:
|
||||||
|
tty.warn(
|
||||||
|
"The flag `--root-specfile` is deprecated and will be removed in Spack 0.22. "
|
||||||
|
"Use --root-spec instead."
|
||||||
|
)
|
||||||
|
|
||||||
|
specs = spack.cmd.parse_specs(args.root_spec or args.root_specfile)
|
||||||
|
|
||||||
if len(specs) != 1:
|
if len(specs) != 1:
|
||||||
tty.die("a single spec argument is required to save specfile")
|
tty.die("a single spec argument is required to save specfile")
|
||||||
|
@@ -5,13 +5,11 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
|
||||||
from typing import Dict
|
|
||||||
from urllib.parse import urlparse, urlunparse
|
from urllib.parse import urlparse, urlunparse
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
import llnl.util.tty as tty
|
||||||
import llnl.util.tty.color as clr
|
import llnl.util.tty.color as clr
|
||||||
from llnl.util import tty
|
|
||||||
|
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
import spack.ci as spack_ci
|
import spack.ci as spack_ci
|
||||||
@@ -20,20 +18,12 @@
|
|||||||
import spack.cmd.common.arguments
|
import spack.cmd.common.arguments
|
||||||
import spack.config as cfg
|
import spack.config as cfg
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.error
|
|
||||||
import spack.fetch_strategy
|
|
||||||
import spack.hash_types as ht
|
import spack.hash_types as ht
|
||||||
import spack.mirrors.mirror
|
import spack.mirrors.mirror
|
||||||
import spack.package_base
|
|
||||||
import spack.repo
|
|
||||||
import spack.spec
|
|
||||||
import spack.stage
|
|
||||||
import spack.util.executable
|
|
||||||
import spack.util.gpg as gpg_util
|
import spack.util.gpg as gpg_util
|
||||||
import spack.util.timer as timer
|
import spack.util.timer as timer
|
||||||
import spack.util.url as url_util
|
import spack.util.url as url_util
|
||||||
import spack.util.web as web_util
|
import spack.util.web as web_util
|
||||||
import spack.version
|
|
||||||
|
|
||||||
description = "manage continuous integration pipelines"
|
description = "manage continuous integration pipelines"
|
||||||
section = "build"
|
section = "build"
|
||||||
@@ -160,12 +150,6 @@ def setup_parser(subparser):
|
|||||||
default=False,
|
default=False,
|
||||||
help="stop stand-alone tests after the first failure",
|
help="stop stand-alone tests after the first failure",
|
||||||
)
|
)
|
||||||
rebuild.add_argument(
|
|
||||||
"--timeout",
|
|
||||||
type=int,
|
|
||||||
default=None,
|
|
||||||
help="maximum time (in seconds) that tests are allowed to run",
|
|
||||||
)
|
|
||||||
rebuild.set_defaults(func=ci_rebuild)
|
rebuild.set_defaults(func=ci_rebuild)
|
||||||
spack.cmd.common.arguments.add_common_arguments(rebuild, ["jobs"])
|
spack.cmd.common.arguments.add_common_arguments(rebuild, ["jobs"])
|
||||||
|
|
||||||
@@ -207,16 +191,6 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
reproduce.set_defaults(func=ci_reproduce)
|
reproduce.set_defaults(func=ci_reproduce)
|
||||||
|
|
||||||
# Verify checksums inside of ci workflows
|
|
||||||
verify_versions = subparsers.add_parser(
|
|
||||||
"verify-versions",
|
|
||||||
description=deindent(ci_verify_versions.__doc__),
|
|
||||||
help=spack.cmd.first_line(ci_verify_versions.__doc__),
|
|
||||||
)
|
|
||||||
verify_versions.add_argument("from_ref", help="git ref from which start looking at changes")
|
|
||||||
verify_versions.add_argument("to_ref", help="git ref to end looking at changes")
|
|
||||||
verify_versions.set_defaults(func=ci_verify_versions)
|
|
||||||
|
|
||||||
|
|
||||||
def ci_generate(args):
|
def ci_generate(args):
|
||||||
"""generate jobs file from a CI-aware spack file
|
"""generate jobs file from a CI-aware spack file
|
||||||
@@ -490,12 +464,10 @@ def ci_rebuild(args):
|
|||||||
job_spec.to_dict(hash=ht.dag_hash),
|
job_spec.to_dict(hash=ht.dag_hash),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Copy logs and archived files from the install metadata (.spack) directory to artifacts now
|
# We generated the "spack install ..." command to "--keep-stage", copy
|
||||||
|
# any logs from the staging directory to artifacts now
|
||||||
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
|
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
|
||||||
|
|
||||||
# Clear the stage directory
|
|
||||||
spack.stage.purge()
|
|
||||||
|
|
||||||
# If the installation succeeded and we're running stand-alone tests for
|
# If the installation succeeded and we're running stand-alone tests for
|
||||||
# the package, run them and copy the output. Failures of any kind should
|
# the package, run them and copy the output. Failures of any kind should
|
||||||
# *not* terminate the build process or preclude creating the build cache.
|
# *not* terminate the build process or preclude creating the build cache.
|
||||||
@@ -530,7 +502,6 @@ def ci_rebuild(args):
|
|||||||
fail_fast=args.fail_fast,
|
fail_fast=args.fail_fast,
|
||||||
log_file=log_file,
|
log_file=log_file,
|
||||||
repro_dir=repro_dir,
|
repro_dir=repro_dir,
|
||||||
timeout=args.timeout,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
@@ -689,158 +660,6 @@ def _gitlab_artifacts_url(url: str) -> str:
|
|||||||
return urlunparse(parsed._replace(path="/".join(parts), fragment="", query=""))
|
return urlunparse(parsed._replace(path="/".join(parts), fragment="", query=""))
|
||||||
|
|
||||||
|
|
||||||
def validate_standard_versions(
|
|
||||||
pkg: spack.package_base.PackageBase, versions: spack.version.VersionList
|
|
||||||
) -> bool:
|
|
||||||
"""Get and test the checksum of a package version based on a tarball.
|
|
||||||
Args:
|
|
||||||
pkg spack.package_base.PackageBase: Spack package for which to validate a version checksum
|
|
||||||
versions spack.version.VersionList: list of package versions to validate
|
|
||||||
Returns: bool: result of the validation. True is valid and false is failed.
|
|
||||||
"""
|
|
||||||
url_dict: Dict[spack.version.StandardVersion, str] = {}
|
|
||||||
|
|
||||||
for version in versions:
|
|
||||||
url = pkg.find_valid_url_for_version(version)
|
|
||||||
url_dict[version] = url
|
|
||||||
|
|
||||||
version_hashes = spack.stage.get_checksums_for_versions(
|
|
||||||
url_dict, pkg.name, fetch_options=pkg.fetch_options
|
|
||||||
)
|
|
||||||
|
|
||||||
valid_checksums = True
|
|
||||||
for version, sha in version_hashes.items():
|
|
||||||
if sha != pkg.versions[version]["sha256"]:
|
|
||||||
tty.error(
|
|
||||||
f"Invalid checksum found {pkg.name}@{version}\n"
|
|
||||||
f" [package.py] {pkg.versions[version]['sha256']}\n"
|
|
||||||
f" [Downloaded] {sha}"
|
|
||||||
)
|
|
||||||
valid_checksums = False
|
|
||||||
continue
|
|
||||||
|
|
||||||
tty.info(f"Validated {pkg.name}@{version} --> {sha}")
|
|
||||||
|
|
||||||
return valid_checksums
|
|
||||||
|
|
||||||
|
|
||||||
def validate_git_versions(
|
|
||||||
pkg: spack.package_base.PackageBase, versions: spack.version.VersionList
|
|
||||||
) -> bool:
|
|
||||||
"""Get and test the commit and tag of a package version based on a git repository.
|
|
||||||
Args:
|
|
||||||
pkg spack.package_base.PackageBase: Spack package for which to validate a version
|
|
||||||
versions spack.version.VersionList: list of package versions to validate
|
|
||||||
Returns: bool: result of the validation. True is valid and false is failed.
|
|
||||||
"""
|
|
||||||
valid_commit = True
|
|
||||||
for version in versions:
|
|
||||||
fetcher = spack.fetch_strategy.for_package_version(pkg, version)
|
|
||||||
with spack.stage.Stage(fetcher) as stage:
|
|
||||||
known_commit = pkg.versions[version]["commit"]
|
|
||||||
try:
|
|
||||||
stage.fetch()
|
|
||||||
except spack.error.FetchError:
|
|
||||||
tty.error(
|
|
||||||
f"Invalid commit for {pkg.name}@{version}\n"
|
|
||||||
f" {known_commit} could not be checked out in the git repository."
|
|
||||||
)
|
|
||||||
valid_commit = False
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Test if the specified tag matches the commit in the package.py
|
|
||||||
# We retrieve the commit associated with a tag and compare it to the
|
|
||||||
# commit that is located in the package.py file.
|
|
||||||
if "tag" in pkg.versions[version]:
|
|
||||||
tag = pkg.versions[version]["tag"]
|
|
||||||
try:
|
|
||||||
with fs.working_dir(stage.source_path):
|
|
||||||
found_commit = fetcher.git(
|
|
||||||
"rev-list", "-n", "1", tag, output=str, error=str
|
|
||||||
).strip()
|
|
||||||
except spack.util.executable.ProcessError:
|
|
||||||
tty.error(
|
|
||||||
f"Invalid tag for {pkg.name}@{version}\n"
|
|
||||||
f" {tag} could not be found in the git repository."
|
|
||||||
)
|
|
||||||
valid_commit = False
|
|
||||||
continue
|
|
||||||
|
|
||||||
if found_commit != known_commit:
|
|
||||||
tty.error(
|
|
||||||
f"Mismatched tag <-> commit found for {pkg.name}@{version}\n"
|
|
||||||
f" [package.py] {known_commit}\n"
|
|
||||||
f" [Downloaded] {found_commit}"
|
|
||||||
)
|
|
||||||
valid_commit = False
|
|
||||||
continue
|
|
||||||
|
|
||||||
# If we have downloaded the repository, found the commit, and compared
|
|
||||||
# the tag (if specified) we can conclude that the version is pointing
|
|
||||||
# at what we would expect.
|
|
||||||
tty.info(f"Validated {pkg.name}@{version} --> {known_commit}")
|
|
||||||
|
|
||||||
return valid_commit
|
|
||||||
|
|
||||||
|
|
||||||
def ci_verify_versions(args):
|
|
||||||
"""validate version checksum & commits between git refs
|
|
||||||
This command takes a from_ref and to_ref arguments and
|
|
||||||
then parses the git diff between the two to determine which packages
|
|
||||||
have been modified verifies the new checksums inside of them.
|
|
||||||
"""
|
|
||||||
# Get a list of all packages that have been changed or added
|
|
||||||
# between from_ref and to_ref
|
|
||||||
pkgs = spack.repo.get_all_package_diffs(
|
|
||||||
"AC", spack.repo.builtin_repo(), args.from_ref, args.to_ref
|
|
||||||
)
|
|
||||||
|
|
||||||
failed_version = False
|
|
||||||
for pkg_name in pkgs:
|
|
||||||
spec = spack.spec.Spec(pkg_name)
|
|
||||||
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
|
|
||||||
path = spack.repo.PATH.package_path(pkg_name)
|
|
||||||
|
|
||||||
# Skip checking manual download packages and trust the maintainers
|
|
||||||
if pkg.manual_download:
|
|
||||||
tty.warn(f"Skipping manual download package: {pkg_name}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Store versions checksums / commits for future loop
|
|
||||||
checksums_version_dict = {}
|
|
||||||
commits_version_dict = {}
|
|
||||||
for version in pkg.versions:
|
|
||||||
# If the package version defines a sha256 we'll use that as the high entropy
|
|
||||||
# string to detect which versions have been added between from_ref and to_ref
|
|
||||||
if "sha256" in pkg.versions[version]:
|
|
||||||
checksums_version_dict[pkg.versions[version]["sha256"]] = version
|
|
||||||
|
|
||||||
# If a package version instead defines a commit we'll use that as a
|
|
||||||
# high entropy string to detect new versions.
|
|
||||||
elif "commit" in pkg.versions[version]:
|
|
||||||
commits_version_dict[pkg.versions[version]["commit"]] = version
|
|
||||||
|
|
||||||
# TODO: enforce every version have a commit or a sha256 defined if not
|
|
||||||
# an infinite version (there are a lot of package's where this doesn't work yet.)
|
|
||||||
|
|
||||||
with fs.working_dir(os.path.dirname(path)):
|
|
||||||
added_checksums = spack_ci.get_added_versions(
|
|
||||||
checksums_version_dict, path, from_ref=args.from_ref, to_ref=args.to_ref
|
|
||||||
)
|
|
||||||
added_commits = spack_ci.get_added_versions(
|
|
||||||
commits_version_dict, path, from_ref=args.from_ref, to_ref=args.to_ref
|
|
||||||
)
|
|
||||||
|
|
||||||
if added_checksums:
|
|
||||||
failed_version = not validate_standard_versions(pkg, added_checksums) or failed_version
|
|
||||||
|
|
||||||
if added_commits:
|
|
||||||
failed_version = not validate_git_versions(pkg, added_commits) or failed_version
|
|
||||||
|
|
||||||
if failed_version:
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
def ci(parser, args):
|
def ci(parser, args):
|
||||||
if args.func:
|
if args.func:
|
||||||
return args.func(args)
|
return args.func(args)
|
||||||
|
@@ -63,7 +63,7 @@ def setup_parser(subparser):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# List
|
# List
|
||||||
list_parser = sp.add_parser("list", aliases=["ls"], help="list available compilers")
|
list_parser = sp.add_parser("list", help="list available compilers")
|
||||||
list_parser.add_argument(
|
list_parser.add_argument(
|
||||||
"--scope", action=arguments.ConfigScope, help="configuration scope to read from"
|
"--scope", action=arguments.ConfigScope, help="configuration scope to read from"
|
||||||
)
|
)
|
||||||
@@ -216,6 +216,5 @@ def compiler(parser, args):
|
|||||||
"rm": compiler_remove,
|
"rm": compiler_remove,
|
||||||
"info": compiler_info,
|
"info": compiler_info,
|
||||||
"list": compiler_list,
|
"list": compiler_list,
|
||||||
"ls": compiler_list,
|
|
||||||
}
|
}
|
||||||
action[args.compiler_command](args)
|
action[args.compiler_command](args)
|
||||||
|
@@ -350,12 +350,9 @@ def _config_change(config_path, match_spec_str=None):
|
|||||||
if spack.config.get(key_path, scope=scope):
|
if spack.config.get(key_path, scope=scope):
|
||||||
ideal_scope_to_modify = scope
|
ideal_scope_to_modify = scope
|
||||||
break
|
break
|
||||||
# If we find our key in a specific scope, that's the one we want
|
|
||||||
# to modify. Otherwise we use the default write scope.
|
|
||||||
write_scope = ideal_scope_to_modify or spack.config.default_modify_scope()
|
|
||||||
|
|
||||||
update_path = f"{key_path}:[{str(spec)}]"
|
update_path = f"{key_path}:[{str(spec)}]"
|
||||||
spack.config.add(update_path, scope=write_scope)
|
spack.config.add(update_path, scope=ideal_scope_to_modify)
|
||||||
else:
|
else:
|
||||||
raise ValueError("'config change' can currently only change 'require' sections")
|
raise ValueError("'config change' can currently only change 'require' sections")
|
||||||
|
|
||||||
|
@@ -23,7 +23,7 @@
|
|||||||
from spack.util.editor import editor
|
from spack.util.editor import editor
|
||||||
from spack.util.executable import which
|
from spack.util.executable import which
|
||||||
from spack.util.format import get_version_lines
|
from spack.util.format import get_version_lines
|
||||||
from spack.util.naming import pkg_name_to_class_name, simplify_name
|
from spack.util.naming import mod_to_class, simplify_name, valid_fully_qualified_module_name
|
||||||
|
|
||||||
description = "create a new package file"
|
description = "create a new package file"
|
||||||
section = "packaging"
|
section = "packaging"
|
||||||
@@ -95,7 +95,7 @@ class BundlePackageTemplate:
|
|||||||
|
|
||||||
def __init__(self, name: str, versions, languages: List[str]):
|
def __init__(self, name: str, versions, languages: List[str]):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.class_name = pkg_name_to_class_name(name)
|
self.class_name = mod_to_class(name)
|
||||||
self.versions = versions
|
self.versions = versions
|
||||||
self.languages = languages
|
self.languages = languages
|
||||||
|
|
||||||
@@ -572,7 +572,7 @@ def edit(self, spec, prefix):
|
|||||||
class IntelPackageTemplate(PackageTemplate):
|
class IntelPackageTemplate(PackageTemplate):
|
||||||
"""Provides appropriate overrides for licensed Intel software"""
|
"""Provides appropriate overrides for licensed Intel software"""
|
||||||
|
|
||||||
base_class_name = "IntelOneApiPackage"
|
base_class_name = "IntelPackage"
|
||||||
|
|
||||||
body_def = """\
|
body_def = """\
|
||||||
# FIXME: Override `setup_environment` if necessary."""
|
# FIXME: Override `setup_environment` if necessary."""
|
||||||
@@ -874,7 +874,7 @@ def get_name(name, url):
|
|||||||
|
|
||||||
result = simplify_name(result)
|
result = simplify_name(result)
|
||||||
|
|
||||||
if not re.match(r"^[a-z0-9-]+$", result):
|
if not valid_fully_qualified_module_name(result):
|
||||||
tty.die("Package name can only contain a-z, 0-9, and '-'")
|
tty.die("Package name can only contain a-z, 0-9, and '-'")
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
@@ -55,7 +55,7 @@ def dependencies(parser, args):
|
|||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||||
|
|
||||||
format_string = "{name}{@version}{/hash:7}{%compiler}"
|
format_string = "{name}{@version}{%compiler}{/hash:7}"
|
||||||
if sys.stdout.isatty():
|
if sys.stdout.isatty():
|
||||||
tty.msg("Dependencies of %s" % spec.format(format_string, color=True))
|
tty.msg("Dependencies of %s" % spec.format(format_string, color=True))
|
||||||
deps = spack.store.STORE.db.installed_relatives(
|
deps = spack.store.STORE.db.installed_relatives(
|
||||||
|
@@ -93,7 +93,7 @@ def dependents(parser, args):
|
|||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||||
|
|
||||||
format_string = "{name}{@version}{/hash:7}{%compiler}"
|
format_string = "{name}{@version}{%compiler}{/hash:7}"
|
||||||
if sys.stdout.isatty():
|
if sys.stdout.isatty():
|
||||||
tty.msg("Dependents of %s" % spec.cformat(format_string))
|
tty.msg("Dependents of %s" % spec.cformat(format_string))
|
||||||
deps = spack.store.STORE.db.installed_relatives(spec, "parents", args.transitive)
|
deps = spack.store.STORE.db.installed_relatives(spec, "parents", args.transitive)
|
||||||
|
@@ -3,13 +3,11 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.environment
|
|
||||||
import spack.fetch_strategy
|
import spack.fetch_strategy
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
@@ -33,33 +31,37 @@ def setup_parser(subparser):
|
|||||||
"--no-clone",
|
"--no-clone",
|
||||||
action="store_false",
|
action="store_false",
|
||||||
dest="clone",
|
dest="clone",
|
||||||
|
default=None,
|
||||||
help="do not clone, the package already exists at the source path",
|
help="do not clone, the package already exists at the source path",
|
||||||
)
|
)
|
||||||
clone_group.add_argument(
|
clone_group.add_argument(
|
||||||
"--clone",
|
"--clone",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
dest="clone",
|
dest="clone",
|
||||||
default=True,
|
default=None,
|
||||||
help=(
|
help="clone the package even if the path already exists",
|
||||||
"(default) clone the package unless the path already exists, "
|
|
||||||
"use --force to overwrite"
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-f", "--force", help="remove any files or directories that block cloning source code"
|
"-f", "--force", help="remove any files or directories that block cloning source code"
|
||||||
)
|
)
|
||||||
|
|
||||||
subparser.add_argument(
|
|
||||||
"-r",
|
|
||||||
"--recursive",
|
|
||||||
action="store_true",
|
|
||||||
help="traverse nodes of the graph to mark everything up to the root as a develop spec",
|
|
||||||
)
|
|
||||||
|
|
||||||
arguments.add_common_arguments(subparser, ["spec"])
|
arguments.add_common_arguments(subparser, ["spec"])
|
||||||
|
|
||||||
|
|
||||||
|
def _update_config(spec, path):
|
||||||
|
find_fn = lambda section: spec.name in section
|
||||||
|
|
||||||
|
entry = {"spec": str(spec)}
|
||||||
|
if path != spec.name:
|
||||||
|
entry["path"] = path
|
||||||
|
|
||||||
|
def change_fn(section):
|
||||||
|
section[spec.name] = entry
|
||||||
|
|
||||||
|
spack.config.change_or_add("develop", find_fn, change_fn)
|
||||||
|
|
||||||
|
|
||||||
def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
|
def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
|
||||||
# "steal" the source code via staging API. We ask for a stage
|
# "steal" the source code via staging API. We ask for a stage
|
||||||
# to be created, then copy it afterwards somewhere else. It would be
|
# to be created, then copy it afterwards somewhere else. It would be
|
||||||
@@ -81,43 +83,44 @@ def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
|
|||||||
package.stage.steal_source(abspath)
|
package.stage.steal_source(abspath)
|
||||||
|
|
||||||
|
|
||||||
def assure_concrete_spec(env: spack.environment.Environment, spec: spack.spec.Spec):
|
def develop(parser, args):
|
||||||
version = spec.versions.concrete_range_as_version
|
# Note: we could put develop specs in any scope, but I assume
|
||||||
if not version:
|
# users would only ever want to do this for either (a) an active
|
||||||
# first check environment for a matching concrete spec
|
# env or (b) a specified config file (e.g. that is included by
|
||||||
matching_specs = env.all_matching_specs(spec)
|
# an environment)
|
||||||
if matching_specs:
|
# TODO: when https://github.com/spack/spack/pull/35307 is merged,
|
||||||
version = matching_specs[0].version
|
# an active env is not required if a scope is specified
|
||||||
test_spec = spack.spec.Spec(f"{spec}@{version}")
|
env = spack.cmd.require_active_env(cmd_name="develop")
|
||||||
for m_spec in matching_specs:
|
if not args.spec:
|
||||||
if not m_spec.satisfies(test_spec):
|
if args.clone is False:
|
||||||
raise SpackError(
|
raise SpackError("No spec provided to spack develop command")
|
||||||
f"{spec.name}: has multiple concrete instances in the graph that can't be"
|
|
||||||
" satisified by a single develop spec. To use `spack develop` ensure one"
|
|
||||||
" of the following:"
|
|
||||||
f"\n a) {spec.name} nodes can satisfy the same develop spec (minimally "
|
|
||||||
"this means they all share the same version)"
|
|
||||||
f"\n b) Provide a concrete develop spec ({spec.name}@[version]) to clearly"
|
|
||||||
" indicate what should be developed"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# look up the maximum version so infintiy versions are preferred for develop
|
|
||||||
version = max(spack.repo.PATH.get_pkg_class(spec.fullname).versions.keys())
|
|
||||||
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
|
|
||||||
spec.versions = spack.version.VersionList([version])
|
|
||||||
|
|
||||||
|
# download all dev specs
|
||||||
|
for name, entry in env.dev_specs.items():
|
||||||
|
path = entry.get("path", name)
|
||||||
|
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
|
||||||
|
|
||||||
def setup_src_code(spec: spack.spec.Spec, src_path: str, clone: bool = True, force: bool = False):
|
if os.path.exists(abspath):
|
||||||
"""
|
msg = "Skipping developer download of %s" % entry["spec"]
|
||||||
Handle checking, cloning or overwriting source code
|
msg += " because its path already exists."
|
||||||
"""
|
tty.msg(msg)
|
||||||
assert spec.versions
|
continue
|
||||||
|
|
||||||
if clone:
|
# Both old syntax `spack develop pkg@x` and new syntax `spack develop pkg@=x`
|
||||||
_clone(spec, src_path, force)
|
# are currently supported.
|
||||||
|
spec = spack.spec.parse_with_version_concrete(entry["spec"])
|
||||||
|
_retrieve_develop_source(spec, abspath)
|
||||||
|
|
||||||
if not clone and not os.path.exists(src_path):
|
if not env.dev_specs:
|
||||||
raise SpackError(f"Provided path {src_path} does not exist")
|
tty.warn("No develop specs to download")
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
specs = spack.cmd.parse_specs(args.spec)
|
||||||
|
if len(specs) > 1:
|
||||||
|
raise SpackError("spack develop requires at most one named spec")
|
||||||
|
|
||||||
|
spec = specs[0]
|
||||||
|
|
||||||
version = spec.versions.concrete_range_as_version
|
version = spec.versions.concrete_range_as_version
|
||||||
if not version:
|
if not version:
|
||||||
@@ -126,114 +129,40 @@ def setup_src_code(spec: spack.spec.Spec, src_path: str, clone: bool = True, for
|
|||||||
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
|
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
|
||||||
spec.versions = spack.version.VersionList([version])
|
spec.versions = spack.version.VersionList([version])
|
||||||
|
|
||||||
|
# If user does not specify --path, we choose to create a directory in the
|
||||||
|
# active environment's directory, named after the spec
|
||||||
|
path = args.path or spec.name
|
||||||
|
if not os.path.isabs(path):
|
||||||
|
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
|
||||||
|
else:
|
||||||
|
abspath = path
|
||||||
|
|
||||||
def _update_config(spec, path):
|
# clone default: only if the path doesn't exist
|
||||||
find_fn = lambda section: spec.name in section
|
clone = args.clone
|
||||||
|
if clone is None:
|
||||||
|
clone = not os.path.exists(abspath)
|
||||||
|
|
||||||
entry = {"spec": str(spec)}
|
if not clone and not os.path.exists(abspath):
|
||||||
if path and path != spec.name:
|
raise SpackError("Provided path %s does not exist" % abspath)
|
||||||
entry["path"] = path
|
|
||||||
|
|
||||||
def change_fn(section):
|
if clone:
|
||||||
section[spec.name] = entry
|
if os.path.exists(abspath):
|
||||||
|
if args.force:
|
||||||
|
shutil.rmtree(abspath)
|
||||||
|
else:
|
||||||
|
msg = "Path %s already exists and cannot be cloned to." % abspath
|
||||||
|
msg += " Use `spack develop -f` to overwrite."
|
||||||
|
raise SpackError(msg)
|
||||||
|
|
||||||
spack.config.change_or_add("develop", find_fn, change_fn)
|
_retrieve_develop_source(spec, abspath)
|
||||||
|
|
||||||
|
|
||||||
def update_env(
|
|
||||||
env: spack.environment.Environment,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
specified_path: Optional[str] = None,
|
|
||||||
build_dir: Optional[str] = None,
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Update the spack.yaml file with additions or changes from a develop call
|
|
||||||
"""
|
|
||||||
tty.debug(f"Updating develop config for {env.name} transactionally")
|
|
||||||
|
|
||||||
if not specified_path:
|
|
||||||
dev_entry = env.dev_specs.get(spec.name)
|
|
||||||
if dev_entry:
|
|
||||||
specified_path = dev_entry.get("path", None)
|
|
||||||
|
|
||||||
|
tty.debug("Updating develop config for {0} transactionally".format(env.name))
|
||||||
with env.write_transaction():
|
with env.write_transaction():
|
||||||
if build_dir is not None:
|
if args.build_directory is not None:
|
||||||
spack.config.add(
|
spack.config.add(
|
||||||
f"packages:{spec.name}:package_attributes:build_directory:{build_dir}",
|
"packages:{}:package_attributes:build_directory:{}".format(
|
||||||
|
spec.name, args.build_directory
|
||||||
|
),
|
||||||
env.scope_name,
|
env.scope_name,
|
||||||
)
|
)
|
||||||
# add develop spec and update path
|
_update_config(spec, path)
|
||||||
_update_config(spec, specified_path)
|
|
||||||
|
|
||||||
|
|
||||||
def _clone(spec: spack.spec.Spec, abspath: str, force: bool = False):
|
|
||||||
if os.path.exists(abspath):
|
|
||||||
if force:
|
|
||||||
shutil.rmtree(abspath)
|
|
||||||
else:
|
|
||||||
msg = f"Skipping developer download of {spec.name}"
|
|
||||||
msg += f" because its path {abspath} already exists."
|
|
||||||
tty.msg(msg)
|
|
||||||
return
|
|
||||||
|
|
||||||
# cloning can take a while and it's nice to get a message for the longer clones
|
|
||||||
tty.msg(f"Cloning source code for {spec}")
|
|
||||||
_retrieve_develop_source(spec, abspath)
|
|
||||||
|
|
||||||
|
|
||||||
def _abs_code_path(
|
|
||||||
env: spack.environment.Environment, spec: spack.spec.Spec, path: Optional[str] = None
|
|
||||||
):
|
|
||||||
src_path = path if path else spec.name
|
|
||||||
return spack.util.path.canonicalize_path(src_path, default_wd=env.path)
|
|
||||||
|
|
||||||
|
|
||||||
def _dev_spec_generator(args, env):
|
|
||||||
"""
|
|
||||||
Generator function to loop over all the develop specs based on how the command is called
|
|
||||||
If no specs are supplied then loop over the develop specs listed in the environment.
|
|
||||||
"""
|
|
||||||
if not args.spec:
|
|
||||||
if args.clone is False:
|
|
||||||
raise SpackError("No spec provided to spack develop command")
|
|
||||||
|
|
||||||
for name, entry in env.dev_specs.items():
|
|
||||||
path = entry.get("path", name)
|
|
||||||
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
|
|
||||||
# Both old syntax `spack develop pkg@x` and new syntax `spack develop pkg@=x`
|
|
||||||
# are currently supported.
|
|
||||||
spec = spack.spec.parse_with_version_concrete(entry["spec"])
|
|
||||||
yield spec, abspath
|
|
||||||
else:
|
|
||||||
specs = spack.cmd.parse_specs(args.spec)
|
|
||||||
if (args.path or args.build_directory) and len(specs) > 1:
|
|
||||||
raise SpackError(
|
|
||||||
"spack develop requires at most one named spec when using the --path or"
|
|
||||||
" --build-directory arguments"
|
|
||||||
)
|
|
||||||
|
|
||||||
for spec in specs:
|
|
||||||
if args.recursive:
|
|
||||||
concrete_specs = env.all_matching_specs(spec)
|
|
||||||
if not concrete_specs:
|
|
||||||
tty.warn(
|
|
||||||
f"{spec.name} has no matching concrete specs in the environment and "
|
|
||||||
"will be skipped. `spack develop --recursive` requires a concretized"
|
|
||||||
" environment"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
for s in concrete_specs:
|
|
||||||
for node_spec in s.traverse(direction="parents", root=True):
|
|
||||||
tty.debug(f"Recursive develop for {node_spec.name}")
|
|
||||||
yield node_spec, _abs_code_path(env, node_spec, args.path)
|
|
||||||
else:
|
|
||||||
yield spec, _abs_code_path(env, spec, args.path)
|
|
||||||
|
|
||||||
|
|
||||||
def develop(parser, args):
|
|
||||||
env = spack.cmd.require_active_env(cmd_name="develop")
|
|
||||||
|
|
||||||
for spec, abspath in _dev_spec_generator(args, env):
|
|
||||||
assure_concrete_spec(env, spec)
|
|
||||||
setup_src_code(spec, abspath, clone=args.clone, force=args.force)
|
|
||||||
update_env(env, spec, args.path, args.build_directory)
|
|
||||||
|
@@ -62,7 +62,7 @@ def setup_parser(subparser):
|
|||||||
"package Spack knows how to find."
|
"package Spack knows how to find."
|
||||||
)
|
)
|
||||||
|
|
||||||
sp.add_parser("list", aliases=["ls"], help="list detectable packages, by repository and name")
|
sp.add_parser("list", help="list detectable packages, by repository and name")
|
||||||
|
|
||||||
read_cray_manifest = sp.add_parser(
|
read_cray_manifest = sp.add_parser(
|
||||||
"read-cray-manifest",
|
"read-cray-manifest",
|
||||||
@@ -259,7 +259,6 @@ def external(parser, args):
|
|||||||
action = {
|
action = {
|
||||||
"find": external_find,
|
"find": external_find,
|
||||||
"list": external_list,
|
"list": external_list,
|
||||||
"ls": external_list,
|
|
||||||
"read-cray-manifest": external_read_cray_manifest,
|
"read-cray-manifest": external_read_cray_manifest,
|
||||||
}
|
}
|
||||||
action[args.external_command](args)
|
action[args.external_command](args)
|
||||||
|
@@ -51,12 +51,6 @@ def setup_parser(subparser):
|
|||||||
"-I", "--install-status", action="store_true", help="show install status of packages"
|
"-I", "--install-status", action="store_true", help="show install status of packages"
|
||||||
)
|
)
|
||||||
|
|
||||||
subparser.add_argument(
|
|
||||||
"--specfile-format",
|
|
||||||
action="store_true",
|
|
||||||
help="show the specfile format for installed deps ",
|
|
||||||
)
|
|
||||||
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-d", "--deps", action="store_true", help="output dependencies along with found specs"
|
"-d", "--deps", action="store_true", help="output dependencies along with found specs"
|
||||||
)
|
)
|
||||||
@@ -286,7 +280,6 @@ def root_decorator(spec, string):
|
|||||||
show_flags=True,
|
show_flags=True,
|
||||||
decorator=root_decorator,
|
decorator=root_decorator,
|
||||||
variants=True,
|
variants=True,
|
||||||
specfile_format=args.specfile_format,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
print()
|
print()
|
||||||
@@ -308,7 +301,6 @@ def root_decorator(spec, string):
|
|||||||
namespace=True,
|
namespace=True,
|
||||||
show_flags=True,
|
show_flags=True,
|
||||||
variants=True,
|
variants=True,
|
||||||
specfile_format=args.specfile_format,
|
|
||||||
)
|
)
|
||||||
print()
|
print()
|
||||||
|
|
||||||
@@ -398,12 +390,7 @@ def find(parser, args):
|
|||||||
if args.show_concretized:
|
if args.show_concretized:
|
||||||
display_results += concretized_but_not_installed
|
display_results += concretized_but_not_installed
|
||||||
cmd.display_specs(
|
cmd.display_specs(
|
||||||
display_results,
|
display_results, args, decorator=decorator, all_headers=True, status_fn=status_fn
|
||||||
args,
|
|
||||||
decorator=decorator,
|
|
||||||
all_headers=True,
|
|
||||||
status_fn=status_fn,
|
|
||||||
specfile_format=args.specfile_format,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# print number of installed packages last (as the list may be long)
|
# print number of installed packages last (as the list may be long)
|
||||||
|
@@ -73,7 +73,7 @@
|
|||||||
boxlib @B{dim=2} boxlib built for 2 dimensions
|
boxlib @B{dim=2} boxlib built for 2 dimensions
|
||||||
libdwarf @g{%intel} ^libelf@g{%gcc}
|
libdwarf @g{%intel} ^libelf@g{%gcc}
|
||||||
libdwarf, built with intel compiler, linked to libelf built with gcc
|
libdwarf, built with intel compiler, linked to libelf built with gcc
|
||||||
mvapich2 @B{fabrics=psm,mrail,sock} @g{%gcc}
|
mvapich2 @g{%gcc} @B{fabrics=psm,mrail,sock}
|
||||||
mvapich2, built with gcc compiler, with support for multiple fabrics
|
mvapich2, built with gcc compiler, with support for multiple fabrics
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@@ -10,13 +10,11 @@
|
|||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from html import escape
|
from html import escape
|
||||||
from typing import Type
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.tty.colify import colify
|
from llnl.util.tty.colify import colify
|
||||||
|
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.package_base
|
|
||||||
import spack.repo
|
import spack.repo
|
||||||
from spack.cmd.common import arguments
|
from spack.cmd.common import arguments
|
||||||
from spack.version import VersionList
|
from spack.version import VersionList
|
||||||
@@ -141,10 +139,10 @@ def name_only(pkgs, out):
|
|||||||
tty.msg("%d packages" % len(pkgs))
|
tty.msg("%d packages" % len(pkgs))
|
||||||
|
|
||||||
|
|
||||||
def github_url(pkg: Type[spack.package_base.PackageBase]) -> str:
|
def github_url(pkg):
|
||||||
"""Link to a package file on github."""
|
"""Link to a package file on github."""
|
||||||
mod_path = pkg.__module__.replace(".", "/")
|
url = "https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/{0}/package.py"
|
||||||
return f"https://github.com/spack/spack/blob/develop/var/spack/{mod_path}.py"
|
return url.format(pkg.name)
|
||||||
|
|
||||||
|
|
||||||
def rows_for_ncols(elts, ncols):
|
def rows_for_ncols(elts, ncols):
|
||||||
|
@@ -383,10 +383,8 @@ def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
|||||||
query = " ".join(str(s) for s in args.constraint_specs)
|
query = " ".join(str(s) for s in args.constraint_specs)
|
||||||
msg = f"the constraint '{query}' matches multiple packages:\n"
|
msg = f"the constraint '{query}' matches multiple packages:\n"
|
||||||
for s in specs:
|
for s in specs:
|
||||||
spec_fmt = (
|
spec_fmt = "{hash:7} {name}{@version}{%compiler}"
|
||||||
"{hash:7} {name}{@version}{compiler_flags}{variants}"
|
spec_fmt += "{compiler_flags}{variants}{arch=architecture}"
|
||||||
"{arch=architecture} {%compiler}"
|
|
||||||
)
|
|
||||||
msg += "\t" + s.cformat(spec_fmt) + "\n"
|
msg += "\t" + s.cformat(spec_fmt) + "\n"
|
||||||
tty.die(msg, "In this context exactly *one* match is needed.")
|
tty.die(msg, "In this context exactly *one* match is needed.")
|
||||||
|
|
||||||
|
@@ -89,17 +89,17 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
def pkg_add(args):
|
def pkg_add(args):
|
||||||
"""add a package to the git stage with `git add`"""
|
"""add a package to the git stage with `git add`"""
|
||||||
spack.repo.add_package_to_git_stage(args.packages, spack.repo.builtin_repo())
|
spack.repo.add_package_to_git_stage(args.packages)
|
||||||
|
|
||||||
|
|
||||||
def pkg_list(args):
|
def pkg_list(args):
|
||||||
"""list packages associated with a particular spack git revision"""
|
"""list packages associated with a particular spack git revision"""
|
||||||
colify(spack.repo.list_packages(args.rev, spack.repo.builtin_repo()))
|
colify(spack.repo.list_packages(args.rev))
|
||||||
|
|
||||||
|
|
||||||
def pkg_diff(args):
|
def pkg_diff(args):
|
||||||
"""compare packages available in two different git revisions"""
|
"""compare packages available in two different git revisions"""
|
||||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2, spack.repo.builtin_repo())
|
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2)
|
||||||
|
|
||||||
if u1:
|
if u1:
|
||||||
print("%s:" % args.rev1)
|
print("%s:" % args.rev1)
|
||||||
@@ -114,23 +114,21 @@ def pkg_diff(args):
|
|||||||
|
|
||||||
def pkg_removed(args):
|
def pkg_removed(args):
|
||||||
"""show packages removed since a commit"""
|
"""show packages removed since a commit"""
|
||||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2, spack.repo.builtin_repo())
|
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2)
|
||||||
if u1:
|
if u1:
|
||||||
colify(sorted(u1))
|
colify(sorted(u1))
|
||||||
|
|
||||||
|
|
||||||
def pkg_added(args):
|
def pkg_added(args):
|
||||||
"""show packages added since a commit"""
|
"""show packages added since a commit"""
|
||||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2, spack.repo.builtin_repo())
|
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2)
|
||||||
if u2:
|
if u2:
|
||||||
colify(sorted(u2))
|
colify(sorted(u2))
|
||||||
|
|
||||||
|
|
||||||
def pkg_changed(args):
|
def pkg_changed(args):
|
||||||
"""show packages changed since a commit"""
|
"""show packages changed since a commit"""
|
||||||
packages = spack.repo.get_all_package_diffs(
|
packages = spack.repo.get_all_package_diffs(args.type, args.rev1, args.rev2)
|
||||||
args.type, spack.repo.builtin_repo(), args.rev1, args.rev2
|
|
||||||
)
|
|
||||||
|
|
||||||
if packages:
|
if packages:
|
||||||
colify(sorted(packages))
|
colify(sorted(packages))
|
||||||
|
@@ -1,12 +1,7 @@
|
|||||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
|
|
||||||
from llnl.util import tty
|
|
||||||
|
|
||||||
import spack.database
|
|
||||||
import spack.store
|
import spack.store
|
||||||
|
|
||||||
description = "rebuild Spack's package database"
|
description = "rebuild Spack's package database"
|
||||||
@@ -15,11 +10,4 @@
|
|||||||
|
|
||||||
|
|
||||||
def reindex(parser, args):
|
def reindex(parser, args):
|
||||||
current_index = spack.store.STORE.db._index_path
|
|
||||||
if os.path.isfile(current_index):
|
|
||||||
backup = f"{current_index}.bkp"
|
|
||||||
shutil.copy(current_index, backup)
|
|
||||||
tty.msg(f"Created a back-up copy of the DB at {backup}")
|
|
||||||
|
|
||||||
spack.store.STORE.reindex()
|
spack.store.STORE.reindex()
|
||||||
tty.msg(f"The DB at {current_index} has been reindex to v{spack.database._DB_VERSION}")
|
|
||||||
|
@@ -4,7 +4,6 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from typing import List
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
@@ -25,7 +24,9 @@ def setup_parser(subparser):
|
|||||||
create_parser = sp.add_parser("create", help=repo_create.__doc__)
|
create_parser = sp.add_parser("create", help=repo_create.__doc__)
|
||||||
create_parser.add_argument("directory", help="directory to create the repo in")
|
create_parser.add_argument("directory", help="directory to create the repo in")
|
||||||
create_parser.add_argument(
|
create_parser.add_argument(
|
||||||
"namespace", help="name or namespace to identify packages in the repository"
|
"namespace",
|
||||||
|
help="namespace to identify packages in the repository (defaults to the directory name)",
|
||||||
|
nargs="?",
|
||||||
)
|
)
|
||||||
create_parser.add_argument(
|
create_parser.add_argument(
|
||||||
"-d",
|
"-d",
|
||||||
@@ -137,7 +138,7 @@ def repo_remove(args):
|
|||||||
def repo_list(args):
|
def repo_list(args):
|
||||||
"""show registered repositories and their namespaces"""
|
"""show registered repositories and their namespaces"""
|
||||||
roots = spack.config.get("repos", scope=args.scope)
|
roots = spack.config.get("repos", scope=args.scope)
|
||||||
repos: List[spack.repo.Repo] = []
|
repos = []
|
||||||
for r in roots:
|
for r in roots:
|
||||||
try:
|
try:
|
||||||
repos.append(spack.repo.from_path(r))
|
repos.append(spack.repo.from_path(r))
|
||||||
@@ -145,14 +146,17 @@ def repo_list(args):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
if sys.stdout.isatty():
|
if sys.stdout.isatty():
|
||||||
tty.msg(f"{len(repos)} package repositor" + ("y." if len(repos) == 1 else "ies."))
|
msg = "%d package repositor" % len(repos)
|
||||||
|
msg += "y." if len(repos) == 1 else "ies."
|
||||||
|
tty.msg(msg)
|
||||||
|
|
||||||
if not repos:
|
if not repos:
|
||||||
return
|
return
|
||||||
|
|
||||||
max_ns_len = max(len(r.namespace) for r in repos)
|
max_ns_len = max(len(r.namespace) for r in repos)
|
||||||
for repo in repos:
|
for repo in repos:
|
||||||
print(f"{repo.namespace:<{max_ns_len + 4}}{repo.package_api_str:<8}{repo.root}")
|
fmt = "%%-%ds%%s" % (max_ns_len + 4)
|
||||||
|
print(fmt % (repo.namespace, repo.root))
|
||||||
|
|
||||||
|
|
||||||
def repo(parser, args):
|
def repo(parser, args):
|
||||||
|
@@ -136,7 +136,20 @@ def solve(parser, args):
|
|||||||
setup_only = set(show) == {"asp"}
|
setup_only = set(show) == {"asp"}
|
||||||
unify = spack.config.get("concretizer:unify")
|
unify = spack.config.get("concretizer:unify")
|
||||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||||
if unify == "when_possible":
|
if unify != "when_possible":
|
||||||
|
# set up solver parameters
|
||||||
|
# Note: reuse and other concretizer prefs are passed as configuration
|
||||||
|
result = solver.solve(
|
||||||
|
specs,
|
||||||
|
out=output,
|
||||||
|
timers=args.timers,
|
||||||
|
stats=args.stats,
|
||||||
|
setup_only=setup_only,
|
||||||
|
allow_deprecated=allow_deprecated,
|
||||||
|
)
|
||||||
|
if not setup_only:
|
||||||
|
_process_result(result, show, required_format, kwargs)
|
||||||
|
else:
|
||||||
for idx, result in enumerate(
|
for idx, result in enumerate(
|
||||||
solver.solve_in_rounds(
|
solver.solve_in_rounds(
|
||||||
specs,
|
specs,
|
||||||
@@ -153,29 +166,3 @@ def solve(parser, args):
|
|||||||
print("% END ROUND {0}\n".format(idx))
|
print("% END ROUND {0}\n".format(idx))
|
||||||
if not setup_only:
|
if not setup_only:
|
||||||
_process_result(result, show, required_format, kwargs)
|
_process_result(result, show, required_format, kwargs)
|
||||||
elif unify:
|
|
||||||
# set up solver parameters
|
|
||||||
# Note: reuse and other concretizer prefs are passed as configuration
|
|
||||||
result = solver.solve(
|
|
||||||
specs,
|
|
||||||
out=output,
|
|
||||||
timers=args.timers,
|
|
||||||
stats=args.stats,
|
|
||||||
setup_only=setup_only,
|
|
||||||
allow_deprecated=allow_deprecated,
|
|
||||||
)
|
|
||||||
if not setup_only:
|
|
||||||
_process_result(result, show, required_format, kwargs)
|
|
||||||
else:
|
|
||||||
for spec in specs:
|
|
||||||
tty.msg("SOLVING SPEC:", spec)
|
|
||||||
result = solver.solve(
|
|
||||||
[spec],
|
|
||||||
out=output,
|
|
||||||
timers=args.timers,
|
|
||||||
stats=args.stats,
|
|
||||||
setup_only=setup_only,
|
|
||||||
allow_deprecated=allow_deprecated,
|
|
||||||
)
|
|
||||||
if not setup_only:
|
|
||||||
_process_result(result, show, required_format, kwargs)
|
|
||||||
|
@@ -6,9 +6,8 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import warnings
|
|
||||||
from itertools import islice, zip_longest
|
from itertools import islice, zip_longest
|
||||||
from typing import Callable, Dict, List, Optional
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
import llnl.util.tty.color as color
|
import llnl.util.tty.color as color
|
||||||
@@ -17,9 +16,6 @@
|
|||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.util.git
|
import spack.util.git
|
||||||
import spack.util.spack_yaml
|
|
||||||
from spack.spec_parser import SPEC_TOKENIZER, SpecTokens
|
|
||||||
from spack.tokenize import Token
|
|
||||||
from spack.util.executable import Executable, which
|
from spack.util.executable import Executable, which
|
||||||
|
|
||||||
description = "runs source code style checks on spack"
|
description = "runs source code style checks on spack"
|
||||||
@@ -59,7 +55,7 @@ def is_package(f):
|
|||||||
packages, since we allow `from spack import *` and poking globals
|
packages, since we allow `from spack import *` and poking globals
|
||||||
into packages.
|
into packages.
|
||||||
"""
|
"""
|
||||||
return f.startswith("var/spack/") and f.endswith("package.py")
|
return f.startswith("var/spack/repos/") and f.endswith("package.py")
|
||||||
|
|
||||||
|
|
||||||
#: decorator for adding tools to the list
|
#: decorator for adding tools to the list
|
||||||
@@ -202,13 +198,6 @@ def setup_parser(subparser):
|
|||||||
action="append",
|
action="append",
|
||||||
help="specify tools to skip (choose from %s)" % ", ".join(tool_names),
|
help="specify tools to skip (choose from %s)" % ", ".join(tool_names),
|
||||||
)
|
)
|
||||||
subparser.add_argument(
|
|
||||||
"--spec-strings",
|
|
||||||
action="store_true",
|
|
||||||
help="upgrade spec strings in Python, JSON and YAML files for compatibility with Spack "
|
|
||||||
"v1.0 and v0.x. Example: spack style --spec-strings $(git ls-files). Note: this flag "
|
|
||||||
"will be removed in Spack v1.0.",
|
|
||||||
)
|
|
||||||
|
|
||||||
subparser.add_argument("files", nargs=argparse.REMAINDER, help="specific files to check")
|
subparser.add_argument("files", nargs=argparse.REMAINDER, help="specific files to check")
|
||||||
|
|
||||||
@@ -380,7 +369,7 @@ def run_black(black_cmd, file_list, args):
|
|||||||
def _module_part(root: str, expr: str):
|
def _module_part(root: str, expr: str):
|
||||||
parts = expr.split(".")
|
parts = expr.split(".")
|
||||||
# spack.pkg is for repositories, don't try to resolve it here.
|
# spack.pkg is for repositories, don't try to resolve it here.
|
||||||
if expr.startswith(spack.repo.PKG_MODULE_PREFIX_V1) or expr == "spack.pkg":
|
if ".".join(parts[:2]) == spack.repo.ROOT_PYTHON_NAMESPACE:
|
||||||
return None
|
return None
|
||||||
while parts:
|
while parts:
|
||||||
f1 = os.path.join(root, "lib", "spack", *parts) + ".py"
|
f1 = os.path.join(root, "lib", "spack", *parts) + ".py"
|
||||||
@@ -518,196 +507,7 @@ def _bootstrap_dev_dependencies():
|
|||||||
spack.bootstrap.ensure_environment_dependencies()
|
spack.bootstrap.ensure_environment_dependencies()
|
||||||
|
|
||||||
|
|
||||||
IS_PROBABLY_COMPILER = re.compile(r"%[a-zA-Z_][a-zA-Z0-9\-]")
|
|
||||||
|
|
||||||
|
|
||||||
def _spec_str_reorder_compiler(idx: int, blocks: List[List[Token]]) -> None:
|
|
||||||
# only move the compiler to the back if it exists and is not already at the end
|
|
||||||
if not 0 <= idx < len(blocks) - 1:
|
|
||||||
return
|
|
||||||
# if there's only whitespace after the compiler, don't move it
|
|
||||||
if all(token.kind == SpecTokens.WS for block in blocks[idx + 1 :] for token in block):
|
|
||||||
return
|
|
||||||
# rotate left and always add at least one WS token between compiler and previous token
|
|
||||||
compiler_block = blocks.pop(idx)
|
|
||||||
if compiler_block[0].kind != SpecTokens.WS:
|
|
||||||
compiler_block.insert(0, Token(SpecTokens.WS, " "))
|
|
||||||
# delete the WS tokens from the new first block if it was at the very start, to prevent leading
|
|
||||||
# WS tokens.
|
|
||||||
while idx == 0 and blocks[0][0].kind == SpecTokens.WS:
|
|
||||||
blocks[0].pop(0)
|
|
||||||
blocks.append(compiler_block)
|
|
||||||
|
|
||||||
|
|
||||||
def _spec_str_format(spec_str: str) -> Optional[str]:
|
|
||||||
"""Given any string, try to parse as spec string, and rotate the compiler token to the end
|
|
||||||
of each spec instance. Returns the formatted string if it was changed, otherwise None."""
|
|
||||||
# We parse blocks of tokens that include leading whitespace, and move the compiler block to
|
|
||||||
# the end when we hit a dependency ^... or the end of a string.
|
|
||||||
# [@3.1][ +foo][ +bar][ %gcc@3.1][ +baz]
|
|
||||||
# [@3.1][ +foo][ +bar][ +baz][ %gcc@3.1]
|
|
||||||
|
|
||||||
current_block: List[Token] = []
|
|
||||||
blocks: List[List[Token]] = []
|
|
||||||
compiler_block_idx = -1
|
|
||||||
in_edge_attr = False
|
|
||||||
|
|
||||||
for token in SPEC_TOKENIZER.tokenize(spec_str):
|
|
||||||
if token.kind == SpecTokens.UNEXPECTED:
|
|
||||||
# parsing error, we cannot fix this string.
|
|
||||||
return None
|
|
||||||
elif token.kind in (SpecTokens.COMPILER, SpecTokens.COMPILER_AND_VERSION):
|
|
||||||
# multiple compilers are not supported in Spack v0.x, so early return
|
|
||||||
if compiler_block_idx != -1:
|
|
||||||
return None
|
|
||||||
current_block.append(token)
|
|
||||||
blocks.append(current_block)
|
|
||||||
current_block = []
|
|
||||||
compiler_block_idx = len(blocks) - 1
|
|
||||||
elif token.kind in (
|
|
||||||
SpecTokens.START_EDGE_PROPERTIES,
|
|
||||||
SpecTokens.DEPENDENCY,
|
|
||||||
SpecTokens.UNQUALIFIED_PACKAGE_NAME,
|
|
||||||
SpecTokens.FULLY_QUALIFIED_PACKAGE_NAME,
|
|
||||||
):
|
|
||||||
_spec_str_reorder_compiler(compiler_block_idx, blocks)
|
|
||||||
compiler_block_idx = -1
|
|
||||||
if token.kind == SpecTokens.START_EDGE_PROPERTIES:
|
|
||||||
in_edge_attr = True
|
|
||||||
current_block.append(token)
|
|
||||||
blocks.append(current_block)
|
|
||||||
current_block = []
|
|
||||||
elif token.kind == SpecTokens.END_EDGE_PROPERTIES:
|
|
||||||
in_edge_attr = False
|
|
||||||
current_block.append(token)
|
|
||||||
blocks.append(current_block)
|
|
||||||
current_block = []
|
|
||||||
elif in_edge_attr:
|
|
||||||
current_block.append(token)
|
|
||||||
elif token.kind in (
|
|
||||||
SpecTokens.VERSION_HASH_PAIR,
|
|
||||||
SpecTokens.GIT_VERSION,
|
|
||||||
SpecTokens.VERSION,
|
|
||||||
SpecTokens.PROPAGATED_BOOL_VARIANT,
|
|
||||||
SpecTokens.BOOL_VARIANT,
|
|
||||||
SpecTokens.PROPAGATED_KEY_VALUE_PAIR,
|
|
||||||
SpecTokens.KEY_VALUE_PAIR,
|
|
||||||
SpecTokens.DAG_HASH,
|
|
||||||
):
|
|
||||||
current_block.append(token)
|
|
||||||
blocks.append(current_block)
|
|
||||||
current_block = []
|
|
||||||
elif token.kind == SpecTokens.WS:
|
|
||||||
current_block.append(token)
|
|
||||||
else:
|
|
||||||
raise ValueError(f"unexpected token {token}")
|
|
||||||
|
|
||||||
if current_block:
|
|
||||||
blocks.append(current_block)
|
|
||||||
_spec_str_reorder_compiler(compiler_block_idx, blocks)
|
|
||||||
|
|
||||||
new_spec_str = "".join(token.value for block in blocks for token in block)
|
|
||||||
return new_spec_str if spec_str != new_spec_str else None
|
|
||||||
|
|
||||||
|
|
||||||
SpecStrHandler = Callable[[str, int, int, str, str], None]
|
|
||||||
|
|
||||||
|
|
||||||
def _spec_str_default_handler(path: str, line: int, col: int, old: str, new: str):
|
|
||||||
"""A SpecStrHandler that prints formatted spec strings and their locations."""
|
|
||||||
print(f"{path}:{line}:{col}: `{old}` -> `{new}`")
|
|
||||||
|
|
||||||
|
|
||||||
def _spec_str_fix_handler(path: str, line: int, col: int, old: str, new: str):
|
|
||||||
"""A SpecStrHandler that updates formatted spec strings in files."""
|
|
||||||
with open(path, "r", encoding="utf-8") as f:
|
|
||||||
lines = f.readlines()
|
|
||||||
new_line = lines[line - 1].replace(old, new)
|
|
||||||
if new_line == lines[line - 1]:
|
|
||||||
tty.warn(f"{path}:{line}:{col}: could not apply fix: `{old}` -> `{new}`")
|
|
||||||
return
|
|
||||||
lines[line - 1] = new_line
|
|
||||||
print(f"{path}:{line}:{col}: fixed `{old}` -> `{new}`")
|
|
||||||
with open(path, "w", encoding="utf-8") as f:
|
|
||||||
f.writelines(lines)
|
|
||||||
|
|
||||||
|
|
||||||
def _spec_str_ast(path: str, tree: ast.AST, handler: SpecStrHandler) -> None:
|
|
||||||
"""Walk the AST of a Python file and apply handler to formatted spec strings."""
|
|
||||||
has_constant = sys.version_info >= (3, 8)
|
|
||||||
for node in ast.walk(tree):
|
|
||||||
if has_constant and isinstance(node, ast.Constant) and isinstance(node.value, str):
|
|
||||||
current_str = node.value
|
|
||||||
elif not has_constant and isinstance(node, ast.Str):
|
|
||||||
current_str = node.s
|
|
||||||
else:
|
|
||||||
continue
|
|
||||||
if not IS_PROBABLY_COMPILER.search(current_str):
|
|
||||||
continue
|
|
||||||
new = _spec_str_format(current_str)
|
|
||||||
if new is not None:
|
|
||||||
handler(path, node.lineno, node.col_offset, current_str, new)
|
|
||||||
|
|
||||||
|
|
||||||
def _spec_str_json_and_yaml(path: str, data: dict, handler: SpecStrHandler) -> None:
|
|
||||||
"""Walk a YAML or JSON data structure and apply handler to formatted spec strings."""
|
|
||||||
queue = [data]
|
|
||||||
seen = set()
|
|
||||||
|
|
||||||
while queue:
|
|
||||||
current = queue.pop(0)
|
|
||||||
if id(current) in seen:
|
|
||||||
continue
|
|
||||||
seen.add(id(current))
|
|
||||||
if isinstance(current, dict):
|
|
||||||
queue.extend(current.values())
|
|
||||||
queue.extend(current.keys())
|
|
||||||
elif isinstance(current, list):
|
|
||||||
queue.extend(current)
|
|
||||||
elif isinstance(current, str) and IS_PROBABLY_COMPILER.search(current):
|
|
||||||
new = _spec_str_format(current)
|
|
||||||
if new is not None:
|
|
||||||
mark = getattr(current, "_start_mark", None)
|
|
||||||
if mark:
|
|
||||||
line, col = mark.line + 1, mark.column + 1
|
|
||||||
else:
|
|
||||||
line, col = 0, 0
|
|
||||||
handler(path, line, col, current, new)
|
|
||||||
|
|
||||||
|
|
||||||
def _check_spec_strings(
|
|
||||||
paths: List[str], handler: SpecStrHandler = _spec_str_default_handler
|
|
||||||
) -> None:
|
|
||||||
"""Open Python, JSON and YAML files, and format their string literals that look like spec
|
|
||||||
strings. A handler is called for each formatting, which can be used to print or apply fixes."""
|
|
||||||
for path in paths:
|
|
||||||
is_json_or_yaml = path.endswith(".json") or path.endswith(".yaml") or path.endswith(".yml")
|
|
||||||
is_python = path.endswith(".py")
|
|
||||||
if not is_json_or_yaml and not is_python:
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(path, "r", encoding="utf-8") as f:
|
|
||||||
# skip files that are likely too large to be user code or config
|
|
||||||
if os.fstat(f.fileno()).st_size > 1024 * 1024:
|
|
||||||
warnings.warn(f"skipping {path}: too large.")
|
|
||||||
continue
|
|
||||||
if is_json_or_yaml:
|
|
||||||
_spec_str_json_and_yaml(path, spack.util.spack_yaml.load_config(f), handler)
|
|
||||||
elif is_python:
|
|
||||||
_spec_str_ast(path, ast.parse(f.read()), handler)
|
|
||||||
except (OSError, spack.util.spack_yaml.SpackYAMLError, SyntaxError, ValueError):
|
|
||||||
warnings.warn(f"skipping {path}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
|
|
||||||
def style(parser, args):
|
def style(parser, args):
|
||||||
if args.spec_strings:
|
|
||||||
if not args.files:
|
|
||||||
tty.die("No files provided to check spec strings.")
|
|
||||||
handler = _spec_str_fix_handler if args.fix else _spec_str_default_handler
|
|
||||||
return _check_spec_strings(args.files, handler)
|
|
||||||
|
|
||||||
# save initial working directory for relativizing paths later
|
# save initial working directory for relativizing paths later
|
||||||
args.initial_working_dir = os.getcwd()
|
args.initial_working_dir = os.getcwd()
|
||||||
|
|
||||||
|
@@ -65,12 +65,6 @@ def setup_parser(subparser):
|
|||||||
run_parser.add_argument(
|
run_parser.add_argument(
|
||||||
"--help-cdash", action="store_true", help="show usage instructions for CDash reporting"
|
"--help-cdash", action="store_true", help="show usage instructions for CDash reporting"
|
||||||
)
|
)
|
||||||
run_parser.add_argument(
|
|
||||||
"--timeout",
|
|
||||||
type=int,
|
|
||||||
default=None,
|
|
||||||
help="maximum time (in seconds) that tests are allowed to run",
|
|
||||||
)
|
|
||||||
|
|
||||||
cd_group = run_parser.add_mutually_exclusive_group()
|
cd_group = run_parser.add_mutually_exclusive_group()
|
||||||
arguments.add_common_arguments(cd_group, ["clean", "dirty"])
|
arguments.add_common_arguments(cd_group, ["clean", "dirty"])
|
||||||
@@ -182,7 +176,7 @@ def test_run(args):
|
|||||||
for spec in specs:
|
for spec in specs:
|
||||||
matching = spack.store.STORE.db.query_local(spec, hashes=hashes, explicit=explicit)
|
matching = spack.store.STORE.db.query_local(spec, hashes=hashes, explicit=explicit)
|
||||||
if spec and not matching:
|
if spec and not matching:
|
||||||
tty.warn(f"No {explicit_str}installed packages match spec {spec}")
|
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
|
||||||
|
|
||||||
# TODO: Need to write out a log message and/or CDASH Testing
|
# TODO: Need to write out a log message and/or CDASH Testing
|
||||||
# output that package not installed IF continue to process
|
# output that package not installed IF continue to process
|
||||||
@@ -198,7 +192,7 @@ def test_run(args):
|
|||||||
# test_stage_dir
|
# test_stage_dir
|
||||||
test_suite = spack.install_test.TestSuite(specs_to_test, args.alias)
|
test_suite = spack.install_test.TestSuite(specs_to_test, args.alias)
|
||||||
test_suite.ensure_stage()
|
test_suite.ensure_stage()
|
||||||
tty.msg(f"Spack test {test_suite.name}")
|
tty.msg("Spack test %s" % test_suite.name)
|
||||||
|
|
||||||
# Set up reporter
|
# Set up reporter
|
||||||
setattr(args, "package", [s.format() for s in test_suite.specs])
|
setattr(args, "package", [s.format() for s in test_suite.specs])
|
||||||
@@ -210,7 +204,6 @@ def test_run(args):
|
|||||||
dirty=args.dirty,
|
dirty=args.dirty,
|
||||||
fail_first=args.fail_first,
|
fail_first=args.fail_first,
|
||||||
externals=args.externals,
|
externals=args.externals,
|
||||||
timeout=args.timeout,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -17,7 +17,6 @@
|
|||||||
pytest = None # type: ignore
|
pytest = None # type: ignore
|
||||||
|
|
||||||
import llnl.util.filesystem
|
import llnl.util.filesystem
|
||||||
import llnl.util.tty as tty
|
|
||||||
import llnl.util.tty.color as color
|
import llnl.util.tty.color as color
|
||||||
from llnl.util.tty.colify import colify
|
from llnl.util.tty.colify import colify
|
||||||
|
|
||||||
@@ -237,12 +236,6 @@ def unit_test(parser, args, unknown_args):
|
|||||||
pytest_root = spack.extensions.load_extension(args.extension)
|
pytest_root = spack.extensions.load_extension(args.extension)
|
||||||
|
|
||||||
if args.numprocesses is not None and args.numprocesses > 1:
|
if args.numprocesses is not None and args.numprocesses > 1:
|
||||||
try:
|
|
||||||
import xdist # noqa: F401
|
|
||||||
except ImportError:
|
|
||||||
tty.error("parallel unit-test requires pytest-xdist module")
|
|
||||||
return 1
|
|
||||||
|
|
||||||
pytest_args.extend(
|
pytest_args.extend(
|
||||||
[
|
[
|
||||||
"--dist",
|
"--dist",
|
||||||
|
@@ -18,10 +18,6 @@ class Languages(enum.Enum):
|
|||||||
|
|
||||||
|
|
||||||
class CompilerAdaptor:
|
class CompilerAdaptor:
|
||||||
"""Provides access to compiler attributes via `Package.compiler`. Useful for
|
|
||||||
packages which do not yet access compiler properties via `self.spec[language]`.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self, compiled_spec: spack.spec.Spec, compilers: Dict[Languages, spack.spec.Spec]
|
self, compiled_spec: spack.spec.Spec, compilers: Dict[Languages, spack.spec.Spec]
|
||||||
) -> None:
|
) -> None:
|
||||||
@@ -83,14 +79,6 @@ def implicit_rpaths(self) -> List[str]:
|
|||||||
result.extend(CompilerPropertyDetector(compiler).implicit_rpaths())
|
result.extend(CompilerPropertyDetector(compiler).implicit_rpaths())
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@property
|
|
||||||
def opt_flags(self) -> List[str]:
|
|
||||||
return next(iter(self.compilers.values())).package.opt_flags
|
|
||||||
|
|
||||||
@property
|
|
||||||
def debug_flags(self) -> List[str]:
|
|
||||||
return next(iter(self.compilers.values())).package.debug_flags
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def openmp_flag(self) -> str:
|
def openmp_flag(self) -> str:
|
||||||
return next(iter(self.compilers.values())).package.openmp_flag
|
return next(iter(self.compilers.values())).package.openmp_flag
|
||||||
@@ -152,7 +140,7 @@ def c17_flag(self) -> str:
|
|||||||
@property
|
@property
|
||||||
def c23_flag(self) -> str:
|
def c23_flag(self) -> str:
|
||||||
return self.compilers[Languages.C].package.standard_flag(
|
return self.compilers[Languages.C].package.standard_flag(
|
||||||
language=Languages.C.value, standard="23"
|
language=Languages.C.value, standard="17"
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -202,10 +190,6 @@ def f77(self):
|
|||||||
self._lang_exists_or_raise("f77", lang=Languages.FORTRAN)
|
self._lang_exists_or_raise("f77", lang=Languages.FORTRAN)
|
||||||
return self.compilers[Languages.FORTRAN].package.fortran
|
return self.compilers[Languages.FORTRAN].package.fortran
|
||||||
|
|
||||||
@property
|
|
||||||
def stdcxx_libs(self):
|
|
||||||
return self._maybe_return_attribute("stdcxx_libs", lang=Languages.CXX)
|
|
||||||
|
|
||||||
|
|
||||||
class DeprecatedCompiler(lang.DeprecatedProperty):
|
class DeprecatedCompiler(lang.DeprecatedProperty):
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
|
@@ -7,7 +7,6 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import warnings
|
|
||||||
from typing import Any, Dict, List, Optional, Tuple
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
|
||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
@@ -26,6 +25,15 @@
|
|||||||
from spack.operating_systems import windows_os
|
from spack.operating_systems import windows_os
|
||||||
from spack.util.environment import get_path
|
from spack.util.environment import get_path
|
||||||
|
|
||||||
|
package_name_to_compiler_name = {
|
||||||
|
"llvm": "clang",
|
||||||
|
"intel-oneapi-compilers": "oneapi",
|
||||||
|
"llvm-amdgpu": "rocmcc",
|
||||||
|
"intel-oneapi-compilers-classic": "intel",
|
||||||
|
"acfl": "arm",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
#: Tag used to identify packages providing a compiler
|
#: Tag used to identify packages providing a compiler
|
||||||
COMPILER_TAG = "compiler"
|
COMPILER_TAG = "compiler"
|
||||||
|
|
||||||
@@ -338,15 +346,7 @@ def from_legacy_yaml(compiler_dict: Dict[str, Any]) -> List[spack.spec.Spec]:
|
|||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||||
pattern = re.compile(r"|".join(finder.search_patterns(pkg=pkg_cls)))
|
pattern = re.compile(r"|".join(finder.search_patterns(pkg=pkg_cls)))
|
||||||
filtered_paths = [x for x in candidate_paths if pattern.search(os.path.basename(x))]
|
filtered_paths = [x for x in candidate_paths if pattern.search(os.path.basename(x))]
|
||||||
try:
|
detected = finder.detect_specs(pkg=pkg_cls, paths=filtered_paths)
|
||||||
detected = finder.detect_specs(pkg=pkg_cls, paths=filtered_paths)
|
|
||||||
except Exception:
|
|
||||||
warnings.warn(
|
|
||||||
f"[{__name__}] cannot detect {pkg_name} from the "
|
|
||||||
f"following paths: {', '.join(filtered_paths)}"
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
for s in detected:
|
for s in detected:
|
||||||
for key in ("flags", "environment", "extra_rpaths"):
|
for key in ("flags", "environment", "extra_rpaths"):
|
||||||
if key in compiler_dict:
|
if key in compiler_dict:
|
||||||
|
@@ -250,11 +250,7 @@ def implicit_rpaths(self) -> List[str]:
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
link_dirs = parse_non_system_link_dirs(output)
|
link_dirs = parse_non_system_link_dirs(output)
|
||||||
all_required_libs = list(self.spec.package.implicit_rpath_libs) + [
|
all_required_libs = list(self.spec.package.required_libs) + ["libc", "libc++", "libstdc++"]
|
||||||
"libc",
|
|
||||||
"libc++",
|
|
||||||
"libstdc++",
|
|
||||||
]
|
|
||||||
dynamic_linker = self.default_dynamic_linker()
|
dynamic_linker = self.default_dynamic_linker()
|
||||||
result = DefaultDynamicLinkerFilter(dynamic_linker)(
|
result = DefaultDynamicLinkerFilter(dynamic_linker)(
|
||||||
paths_containing_libs(link_dirs, all_required_libs)
|
paths_containing_libs(link_dirs, all_required_libs)
|
||||||
|
@@ -32,10 +32,9 @@
|
|||||||
import copy
|
import copy
|
||||||
import functools
|
import functools
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from typing import Any, Callable, Dict, Generator, List, NamedTuple, Optional, Tuple, Union
|
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union
|
||||||
|
|
||||||
import jsonschema
|
import jsonschema
|
||||||
|
|
||||||
@@ -43,6 +42,7 @@
|
|||||||
|
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.paths
|
import spack.paths
|
||||||
|
import spack.platforms
|
||||||
import spack.schema
|
import spack.schema
|
||||||
import spack.schema.bootstrap
|
import spack.schema.bootstrap
|
||||||
import spack.schema.cdash
|
import spack.schema.cdash
|
||||||
@@ -54,18 +54,17 @@
|
|||||||
import spack.schema.develop
|
import spack.schema.develop
|
||||||
import spack.schema.env
|
import spack.schema.env
|
||||||
import spack.schema.env_vars
|
import spack.schema.env_vars
|
||||||
import spack.schema.include
|
|
||||||
import spack.schema.merged
|
|
||||||
import spack.schema.mirrors
|
import spack.schema.mirrors
|
||||||
import spack.schema.modules
|
import spack.schema.modules
|
||||||
import spack.schema.packages
|
import spack.schema.packages
|
||||||
import spack.schema.repos
|
import spack.schema.repos
|
||||||
import spack.schema.upstreams
|
import spack.schema.upstreams
|
||||||
import spack.schema.view
|
import spack.schema.view
|
||||||
import spack.util.remote_file_cache as rfc_util
|
|
||||||
|
# Hacked yaml for configuration files preserves line numbers.
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
|
import spack.util.web as web_util
|
||||||
from spack.util.cpus import cpus_available
|
from spack.util.cpus import cpus_available
|
||||||
from spack.util.spack_yaml import get_mark_from_yaml_data
|
|
||||||
|
|
||||||
from .enums import ConfigScopePriority
|
from .enums import ConfigScopePriority
|
||||||
|
|
||||||
@@ -75,7 +74,6 @@
|
|||||||
"concretizer": spack.schema.concretizer.schema,
|
"concretizer": spack.schema.concretizer.schema,
|
||||||
"definitions": spack.schema.definitions.schema,
|
"definitions": spack.schema.definitions.schema,
|
||||||
"env_vars": spack.schema.env_vars.schema,
|
"env_vars": spack.schema.env_vars.schema,
|
||||||
"include": spack.schema.include.schema,
|
|
||||||
"view": spack.schema.view.schema,
|
"view": spack.schema.view.schema,
|
||||||
"develop": spack.schema.develop.schema,
|
"develop": spack.schema.develop.schema,
|
||||||
"mirrors": spack.schema.mirrors.schema,
|
"mirrors": spack.schema.mirrors.schema,
|
||||||
@@ -123,17 +121,6 @@
|
|||||||
#: Type used for raw YAML configuration
|
#: Type used for raw YAML configuration
|
||||||
YamlConfigDict = Dict[str, Any]
|
YamlConfigDict = Dict[str, Any]
|
||||||
|
|
||||||
#: prefix for name of included configuration scopes
|
|
||||||
INCLUDE_SCOPE_PREFIX = "include"
|
|
||||||
|
|
||||||
#: safeguard for recursive includes -- maximum include depth
|
|
||||||
MAX_RECURSIVE_INCLUDES = 100
|
|
||||||
|
|
||||||
|
|
||||||
def _include_cache_location():
|
|
||||||
"""Location to cache included configuration files."""
|
|
||||||
return os.path.join(spack.paths.user_cache_path, "includes")
|
|
||||||
|
|
||||||
|
|
||||||
class ConfigScope:
|
class ConfigScope:
|
||||||
def __init__(self, name: str) -> None:
|
def __init__(self, name: str) -> None:
|
||||||
@@ -141,25 +128,6 @@ def __init__(self, name: str) -> None:
|
|||||||
self.writable = False
|
self.writable = False
|
||||||
self.sections = syaml.syaml_dict()
|
self.sections = syaml.syaml_dict()
|
||||||
|
|
||||||
#: names of any included scopes
|
|
||||||
self._included_scopes: Optional[List["ConfigScope"]] = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def included_scopes(self) -> List["ConfigScope"]:
|
|
||||||
"""Memoized list of included scopes, in the order they appear in this scope."""
|
|
||||||
if self._included_scopes is None:
|
|
||||||
self._included_scopes = []
|
|
||||||
|
|
||||||
includes = self.get_section("include")
|
|
||||||
if includes:
|
|
||||||
include_paths = [included_path(data) for data in includes["include"]]
|
|
||||||
for path in include_paths:
|
|
||||||
included_scope = include_path_scope(path)
|
|
||||||
if included_scope:
|
|
||||||
self._included_scopes.append(included_scope)
|
|
||||||
|
|
||||||
return self._included_scopes
|
|
||||||
|
|
||||||
def get_section_filename(self, section: str) -> str:
|
def get_section_filename(self, section: str) -> str:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@@ -465,9 +433,7 @@ def highest(self) -> ConfigScope:
|
|||||||
return next(self.scopes.reversed_values()) # type: ignore
|
return next(self.scopes.reversed_values()) # type: ignore
|
||||||
|
|
||||||
@_config_mutator
|
@_config_mutator
|
||||||
def push_scope(
|
def push_scope(self, scope: ConfigScope, priority: Optional[int] = None) -> None:
|
||||||
self, scope: ConfigScope, priority: Optional[int] = None, _depth: int = 0
|
|
||||||
) -> None:
|
|
||||||
"""Adds a scope to the Configuration, at a given priority.
|
"""Adds a scope to the Configuration, at a given priority.
|
||||||
|
|
||||||
If a priority is not given, it is assumed to be the current highest priority.
|
If a priority is not given, it is assumed to be the current highest priority.
|
||||||
@@ -476,44 +442,18 @@ def push_scope(
|
|||||||
scope: scope to be added
|
scope: scope to be added
|
||||||
priority: priority of the scope
|
priority: priority of the scope
|
||||||
"""
|
"""
|
||||||
# TODO: As a follow on to #48784, change this to create a graph of the
|
|
||||||
# TODO: includes AND ensure properly sorted such that the order included
|
|
||||||
# TODO: at the highest level is reflected in the value of an option that
|
|
||||||
# TODO: is set in multiple included files.
|
|
||||||
# before pushing the scope itself, push any included scopes recursively, at same priority
|
|
||||||
for included_scope in reversed(scope.included_scopes):
|
|
||||||
if _depth + 1 > MAX_RECURSIVE_INCLUDES: # make sure we're not recursing endlessly
|
|
||||||
mark = ""
|
|
||||||
if hasattr(included_scope, "path") and syaml.marked(included_scope.path):
|
|
||||||
mark = included_scope.path._start_mark # type: ignore
|
|
||||||
raise RecursiveIncludeError(
|
|
||||||
f"Maximum include recursion exceeded in {included_scope.name}", str(mark)
|
|
||||||
)
|
|
||||||
|
|
||||||
# record this inclusion so that remove_scope() can use it
|
|
||||||
self.push_scope(included_scope, priority=priority, _depth=_depth + 1)
|
|
||||||
|
|
||||||
tty.debug(f"[CONFIGURATION: PUSH SCOPE]: {str(scope)}, priority={priority}", level=2)
|
tty.debug(f"[CONFIGURATION: PUSH SCOPE]: {str(scope)}, priority={priority}", level=2)
|
||||||
self.scopes.add(scope.name, value=scope, priority=priority)
|
self.scopes.add(scope.name, value=scope, priority=priority)
|
||||||
|
|
||||||
@_config_mutator
|
@_config_mutator
|
||||||
def remove_scope(self, scope_name: str) -> Optional[ConfigScope]:
|
def remove_scope(self, scope_name: str) -> Optional[ConfigScope]:
|
||||||
"""Removes a scope by name, and returns it. If the scope does not exist, returns None."""
|
"""Removes a scope by name, and returns it. If the scope does not exist, returns None."""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
scope = self.scopes.remove(scope_name)
|
scope = self.scopes.remove(scope_name)
|
||||||
tty.debug(f"[CONFIGURATION: REMOVE SCOPE]: {str(scope)}", level=2)
|
tty.debug(f"[CONFIGURATION: POP SCOPE]: {str(scope)}", level=2)
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
tty.debug(f"[CONFIGURATION: REMOVE SCOPE]: {e}", level=2)
|
tty.debug(f"[CONFIGURATION: POP SCOPE]: {e}", level=2)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# transitively remove included scopes
|
|
||||||
for included_scope in scope.included_scopes:
|
|
||||||
assert (
|
|
||||||
included_scope.name in self.scopes
|
|
||||||
), f"Included scope '{included_scope.name}' was never added to configuration!"
|
|
||||||
self.remove_scope(included_scope.name)
|
|
||||||
|
|
||||||
return scope
|
return scope
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -823,8 +763,6 @@ def _add_platform_scope(
|
|||||||
cfg: Configuration, name: str, path: str, priority: ConfigScopePriority, writable: bool = True
|
cfg: Configuration, name: str, path: str, priority: ConfigScopePriority, writable: bool = True
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Add a platform-specific subdirectory for the current platform."""
|
"""Add a platform-specific subdirectory for the current platform."""
|
||||||
import spack.platforms # circular dependency
|
|
||||||
|
|
||||||
platform = spack.platforms.host().name
|
platform = spack.platforms.host().name
|
||||||
scope = DirectoryConfigScope(
|
scope = DirectoryConfigScope(
|
||||||
f"{name}/{platform}", os.path.join(path, platform), writable=writable
|
f"{name}/{platform}", os.path.join(path, platform), writable=writable
|
||||||
@@ -832,75 +770,6 @@ def _add_platform_scope(
|
|||||||
cfg.push_scope(scope, priority=priority)
|
cfg.push_scope(scope, priority=priority)
|
||||||
|
|
||||||
|
|
||||||
#: Class for the relevance of an optional path conditioned on a limited
|
|
||||||
#: python code that evaluates to a boolean and or explicit specification
|
|
||||||
#: as optional.
|
|
||||||
class IncludePath(NamedTuple):
|
|
||||||
path: str
|
|
||||||
when: str
|
|
||||||
sha256: str
|
|
||||||
optional: bool
|
|
||||||
|
|
||||||
|
|
||||||
def included_path(entry: Union[str, dict]) -> IncludePath:
|
|
||||||
"""Convert the included path entry into an IncludePath.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
entry: include configuration entry
|
|
||||||
|
|
||||||
Returns: converted entry, where an empty ``when`` means the path is
|
|
||||||
not conditionally included
|
|
||||||
"""
|
|
||||||
if isinstance(entry, str):
|
|
||||||
return IncludePath(path=entry, sha256="", when="", optional=False)
|
|
||||||
|
|
||||||
path = entry["path"]
|
|
||||||
sha256 = entry.get("sha256", "")
|
|
||||||
when = entry.get("when", "")
|
|
||||||
optional = entry.get("optional", False)
|
|
||||||
return IncludePath(path=path, sha256=sha256, when=when, optional=optional)
|
|
||||||
|
|
||||||
|
|
||||||
def include_path_scope(include: IncludePath) -> Optional[ConfigScope]:
|
|
||||||
"""Instantiate an appropriate configuration scope for the given path.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
include: optional include path
|
|
||||||
|
|
||||||
Returns: configuration scope
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: included path has an unsupported URL scheme, is required
|
|
||||||
but does not exist; configuration stage directory argument is missing
|
|
||||||
ConfigFileError: unable to access remote configuration file(s)
|
|
||||||
"""
|
|
||||||
# circular dependencies
|
|
||||||
import spack.spec
|
|
||||||
|
|
||||||
if (not include.when) or spack.spec.eval_conditional(include.when):
|
|
||||||
config_path = rfc_util.local_path(include.path, include.sha256, _include_cache_location)
|
|
||||||
if not config_path:
|
|
||||||
raise ConfigFileError(f"Unable to fetch remote configuration from {include.path}")
|
|
||||||
|
|
||||||
if os.path.isdir(config_path):
|
|
||||||
# directories are treated as regular ConfigScopes
|
|
||||||
config_name = f"{INCLUDE_SCOPE_PREFIX}:{os.path.basename(config_path)}"
|
|
||||||
tty.debug(f"Creating DirectoryConfigScope {config_name} for '{config_path}'")
|
|
||||||
return DirectoryConfigScope(config_name, config_path)
|
|
||||||
|
|
||||||
if os.path.exists(config_path):
|
|
||||||
# files are assumed to be SingleFileScopes
|
|
||||||
config_name = f"{INCLUDE_SCOPE_PREFIX}:{config_path}"
|
|
||||||
tty.debug(f"Creating SingleFileScope {config_name} for '{config_path}'")
|
|
||||||
return SingleFileScope(config_name, config_path, spack.schema.merged.schema)
|
|
||||||
|
|
||||||
if not include.optional:
|
|
||||||
path = f" at ({config_path})" if config_path != include.path else ""
|
|
||||||
raise ValueError(f"Required path ({include.path}) does not exist{path}")
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
||||||
"""Load configuration paths from entry points
|
"""Load configuration paths from entry points
|
||||||
|
|
||||||
@@ -926,7 +795,7 @@ def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
|||||||
return config_paths
|
return config_paths
|
||||||
|
|
||||||
|
|
||||||
def create_incremental() -> Generator[Configuration, None, None]:
|
def create() -> Configuration:
|
||||||
"""Singleton Configuration instance.
|
"""Singleton Configuration instance.
|
||||||
|
|
||||||
This constructs one instance associated with this module and returns
|
This constructs one instance associated with this module and returns
|
||||||
@@ -970,25 +839,11 @@ def create_incremental() -> Generator[Configuration, None, None]:
|
|||||||
# Each scope can have per-platform overrides in subdirectories
|
# Each scope can have per-platform overrides in subdirectories
|
||||||
_add_platform_scope(cfg, name, path, priority=ConfigScopePriority.CONFIG_FILES)
|
_add_platform_scope(cfg, name, path, priority=ConfigScopePriority.CONFIG_FILES)
|
||||||
|
|
||||||
# yield the config incrementally so that each config level's init code can get
|
return cfg
|
||||||
# data from the one below. This can be tricky, but it enables us to have a
|
|
||||||
# single unified config system.
|
|
||||||
#
|
|
||||||
# TODO: think about whether we want to restrict what types of config can be used
|
|
||||||
# at each level. e.g., we may want to just more forcibly disallow remote
|
|
||||||
# config (which uses ssl and other config options) for some of the scopes,
|
|
||||||
# to make the bootstrap issues more explicit, even if allowing config scope
|
|
||||||
# init to reference lower scopes is more flexible.
|
|
||||||
yield cfg
|
|
||||||
|
|
||||||
|
|
||||||
def create() -> Configuration:
|
|
||||||
"""Create a configuration using create_incremental(), return the last yielded result."""
|
|
||||||
return list(create_incremental())[-1]
|
|
||||||
|
|
||||||
|
|
||||||
#: This is the singleton configuration instance for Spack.
|
#: This is the singleton configuration instance for Spack.
|
||||||
CONFIG: Configuration = lang.Singleton(create_incremental) # type: ignore
|
CONFIG: Configuration = lang.Singleton(create) # type: ignore
|
||||||
|
|
||||||
|
|
||||||
def add_from_file(filename: str, scope: Optional[str] = None) -> None:
|
def add_from_file(filename: str, scope: Optional[str] = None) -> None:
|
||||||
@@ -1084,8 +939,7 @@ def set(path: str, value: Any, scope: Optional[str] = None) -> None:
|
|||||||
|
|
||||||
Accepts the path syntax described in ``get()``.
|
Accepts the path syntax described in ``get()``.
|
||||||
"""
|
"""
|
||||||
result = CONFIG.set(path, value, scope)
|
return CONFIG.set(path, value, scope)
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def scopes() -> lang.PriorityOrderedMapping[str, ConfigScope]:
|
def scopes() -> lang.PriorityOrderedMapping[str, ConfigScope]:
|
||||||
@@ -1608,6 +1462,120 @@ def create_from(*scopes_or_paths: Union[ScopeWithOptionalPriority, str]) -> Conf
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def raw_github_gitlab_url(url: str) -> str:
|
||||||
|
"""Transform a github URL to the raw form to avoid undesirable html.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
url: url to be converted to raw form
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Raw github/gitlab url or the original url
|
||||||
|
"""
|
||||||
|
# Note we rely on GitHub to redirect the 'raw' URL returned here to the
|
||||||
|
# actual URL under https://raw.githubusercontent.com/ with '/blob'
|
||||||
|
# removed and or, '/blame' if needed.
|
||||||
|
if "github" in url or "gitlab" in url:
|
||||||
|
return url.replace("/blob/", "/raw/")
|
||||||
|
|
||||||
|
return url
|
||||||
|
|
||||||
|
|
||||||
|
def collect_urls(base_url: str) -> list:
|
||||||
|
"""Return a list of configuration URLs.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
base_url: URL for a configuration (yaml) file or a directory
|
||||||
|
containing yaml file(s)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of configuration file(s) or empty list if none
|
||||||
|
"""
|
||||||
|
if not base_url:
|
||||||
|
return []
|
||||||
|
|
||||||
|
extension = ".yaml"
|
||||||
|
|
||||||
|
if base_url.endswith(extension):
|
||||||
|
return [base_url]
|
||||||
|
|
||||||
|
# Collect configuration URLs if the base_url is a "directory".
|
||||||
|
_, links = web_util.spider(base_url, 0)
|
||||||
|
return [link for link in links if link.endswith(extension)]
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_remote_configs(url: str, dest_dir: str, skip_existing: bool = True) -> str:
|
||||||
|
"""Retrieve configuration file(s) at the specified URL.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
url: URL for a configuration (yaml) file or a directory containing
|
||||||
|
yaml file(s)
|
||||||
|
dest_dir: destination directory
|
||||||
|
skip_existing: Skip files that already exist in dest_dir if
|
||||||
|
``True``; otherwise, replace those files
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path to the corresponding file if URL is or contains a
|
||||||
|
single file and it is the only file in the destination directory or
|
||||||
|
the root (dest_dir) directory if multiple configuration files exist
|
||||||
|
or are retrieved.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _fetch_file(url):
|
||||||
|
raw = raw_github_gitlab_url(url)
|
||||||
|
tty.debug(f"Reading config from url {raw}")
|
||||||
|
return web_util.fetch_url_text(raw, dest_dir=dest_dir)
|
||||||
|
|
||||||
|
if not url:
|
||||||
|
raise ConfigFileError("Cannot retrieve configuration without a URL")
|
||||||
|
|
||||||
|
# Return the local path to the cached configuration file OR to the
|
||||||
|
# directory containing the cached configuration files.
|
||||||
|
config_links = collect_urls(url)
|
||||||
|
existing_files = os.listdir(dest_dir) if os.path.isdir(dest_dir) else []
|
||||||
|
|
||||||
|
paths = []
|
||||||
|
for config_url in config_links:
|
||||||
|
basename = os.path.basename(config_url)
|
||||||
|
if skip_existing and basename in existing_files:
|
||||||
|
tty.warn(
|
||||||
|
f"Will not fetch configuration from {config_url} since a "
|
||||||
|
f"version already exists in {dest_dir}"
|
||||||
|
)
|
||||||
|
path = os.path.join(dest_dir, basename)
|
||||||
|
else:
|
||||||
|
path = _fetch_file(config_url)
|
||||||
|
|
||||||
|
if path:
|
||||||
|
paths.append(path)
|
||||||
|
|
||||||
|
if paths:
|
||||||
|
return dest_dir if len(paths) > 1 else paths[0]
|
||||||
|
|
||||||
|
raise ConfigFileError(f"Cannot retrieve configuration (yaml) from {url}")
|
||||||
|
|
||||||
|
|
||||||
|
def get_mark_from_yaml_data(obj):
|
||||||
|
"""Try to get ``spack.util.spack_yaml`` mark from YAML data.
|
||||||
|
|
||||||
|
We try the object, and if that fails we try its first member (if it's a container).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
mark if one is found, otherwise None.
|
||||||
|
"""
|
||||||
|
# mark of object itelf
|
||||||
|
mark = getattr(obj, "_start_mark", None)
|
||||||
|
if mark:
|
||||||
|
return mark
|
||||||
|
|
||||||
|
# mark of first member if it is a container
|
||||||
|
if isinstance(obj, (list, dict)):
|
||||||
|
first_member = next(iter(obj), None)
|
||||||
|
if first_member:
|
||||||
|
mark = getattr(first_member, "_start_mark", None)
|
||||||
|
|
||||||
|
return mark
|
||||||
|
|
||||||
|
|
||||||
def determine_number_of_jobs(
|
def determine_number_of_jobs(
|
||||||
*,
|
*,
|
||||||
parallel: bool = False,
|
parallel: bool = False,
|
||||||
@@ -1712,7 +1680,3 @@ def get_path(path, data):
|
|||||||
|
|
||||||
# give up and return None if nothing worked
|
# give up and return None if nothing worked
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
class RecursiveIncludeError(spack.error.SpackError):
|
|
||||||
"""Too many levels of recursive includes."""
|
|
||||||
|
@@ -149,12 +149,12 @@ def _getfqdn():
|
|||||||
return socket.getfqdn()
|
return socket.getfqdn()
|
||||||
|
|
||||||
|
|
||||||
def reader(version: vn.StandardVersion) -> Type["spack.spec.SpecfileReaderBase"]:
|
def reader(version: vn.ConcreteVersion) -> Type["spack.spec.SpecfileReaderBase"]:
|
||||||
reader_cls = {
|
reader_cls = {
|
||||||
vn.StandardVersion.from_string("5"): spack.spec.SpecfileV1,
|
vn.Version("5"): spack.spec.SpecfileV1,
|
||||||
vn.StandardVersion.from_string("6"): spack.spec.SpecfileV3,
|
vn.Version("6"): spack.spec.SpecfileV3,
|
||||||
vn.StandardVersion.from_string("7"): spack.spec.SpecfileV4,
|
vn.Version("7"): spack.spec.SpecfileV4,
|
||||||
vn.StandardVersion.from_string("8"): spack.spec.SpecfileV5,
|
vn.Version("8"): spack.spec.SpecfileV5,
|
||||||
}
|
}
|
||||||
return reader_cls[version]
|
return reader_cls[version]
|
||||||
|
|
||||||
@@ -649,7 +649,7 @@ def __init__(
|
|||||||
@property
|
@property
|
||||||
def db_version(self) -> vn.ConcreteVersion:
|
def db_version(self) -> vn.ConcreteVersion:
|
||||||
if self._db_version is None:
|
if self._db_version is None:
|
||||||
raise AttributeError("version not set -- DB has not been read yet")
|
raise AttributeError("db version is not yet set")
|
||||||
return self._db_version
|
return self._db_version
|
||||||
|
|
||||||
@db_version.setter
|
@db_version.setter
|
||||||
@@ -824,7 +824,7 @@ def check(cond, msg):
|
|||||||
db = fdata["database"]
|
db = fdata["database"]
|
||||||
check("version" in db, "no 'version' in JSON DB.")
|
check("version" in db, "no 'version' in JSON DB.")
|
||||||
|
|
||||||
self.db_version = vn.StandardVersion.from_string(db["version"])
|
self.db_version = vn.Version(db["version"])
|
||||||
if self.db_version > _DB_VERSION:
|
if self.db_version > _DB_VERSION:
|
||||||
raise InvalidDatabaseVersionError(self, _DB_VERSION, self.db_version)
|
raise InvalidDatabaseVersionError(self, _DB_VERSION, self.db_version)
|
||||||
elif self.db_version < _DB_VERSION:
|
elif self.db_version < _DB_VERSION:
|
||||||
@@ -896,7 +896,7 @@ def _handle_current_version_read(self, check, db):
|
|||||||
|
|
||||||
def _handle_old_db_versions_read(self, check, db, *, reindex: bool):
|
def _handle_old_db_versions_read(self, check, db, *, reindex: bool):
|
||||||
if reindex is False and not self.is_upstream:
|
if reindex is False and not self.is_upstream:
|
||||||
self.raise_explicit_database_upgrade_error()
|
self.raise_explicit_database_upgrade()
|
||||||
|
|
||||||
if not self.is_readable():
|
if not self.is_readable():
|
||||||
raise DatabaseNotReadableError(
|
raise DatabaseNotReadableError(
|
||||||
@@ -909,16 +909,13 @@ def is_readable(self) -> bool:
|
|||||||
"""Returns true if this DB can be read without reindexing"""
|
"""Returns true if this DB can be read without reindexing"""
|
||||||
return (self.db_version, _DB_VERSION) in _REINDEX_NOT_NEEDED_ON_READ
|
return (self.db_version, _DB_VERSION) in _REINDEX_NOT_NEEDED_ON_READ
|
||||||
|
|
||||||
def raise_explicit_database_upgrade_error(self):
|
def raise_explicit_database_upgrade(self):
|
||||||
"""Raises an ExplicitDatabaseUpgradeError with an appropriate message"""
|
"""Raises an ExplicitDatabaseUpgradeError with an appropriate message"""
|
||||||
raise ExplicitDatabaseUpgradeError(
|
raise ExplicitDatabaseUpgradeError(
|
||||||
f"database is v{self.db_version}, but Spack v{spack.__version__} needs v{_DB_VERSION}",
|
f"database is v{self.db_version}, but Spack v{spack.__version__} needs v{_DB_VERSION}",
|
||||||
long_message=(
|
long_message=(
|
||||||
f"\nChange config:install_tree:root to use a different store, or use `spack "
|
f"\nUse `spack reindex` to upgrade the store at {self.root} to version "
|
||||||
f"reindex` to migrate the store at {self.root} to version {_DB_VERSION}.\n\n"
|
f"{_DB_VERSION}, or change config:install_tree:root to use a different store"
|
||||||
f"If you decide to migrate the store, note that:\n"
|
|
||||||
f"1. The operation cannot be reverted, and\n"
|
|
||||||
f"2. Older Spack versions will not be able to read the store anymore\n"
|
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -1163,7 +1160,7 @@ def _add(
|
|||||||
installation_time:
|
installation_time:
|
||||||
Date and time of installation
|
Date and time of installation
|
||||||
allow_missing: if True, don't warn when installation is not found on on disk
|
allow_missing: if True, don't warn when installation is not found on on disk
|
||||||
This is useful when installing specs without build/test deps.
|
This is useful when installing specs without build deps.
|
||||||
"""
|
"""
|
||||||
if not spec.concrete:
|
if not spec.concrete:
|
||||||
raise NonConcreteSpecAddError("Specs added to DB must be concrete.")
|
raise NonConcreteSpecAddError("Specs added to DB must be concrete.")
|
||||||
@@ -1183,8 +1180,10 @@ def _add(
|
|||||||
edge.spec,
|
edge.spec,
|
||||||
explicit=False,
|
explicit=False,
|
||||||
installation_time=installation_time,
|
installation_time=installation_time,
|
||||||
# allow missing build / test only deps
|
# allow missing build-only deps. This prevents excessive warnings when a spec is
|
||||||
allow_missing=allow_missing or edge.depflag & (dt.BUILD | dt.TEST) == edge.depflag,
|
# installed, and its build dep is missing a build dep; there's no need to install
|
||||||
|
# the build dep's build dep first, and there's no need to warn about it missing.
|
||||||
|
allow_missing=allow_missing or edge.depflag == dt.BUILD,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Make sure the directory layout agrees whether the spec is installed
|
# Make sure the directory layout agrees whether the spec is installed
|
||||||
|
@@ -20,7 +20,7 @@
|
|||||||
import sys
|
import sys
|
||||||
from typing import Dict, List, Optional, Set, Tuple, Union
|
from typing import Dict, List, Optional, Set, Tuple, Union
|
||||||
|
|
||||||
from llnl.util import tty
|
import llnl.util.tty
|
||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.error
|
import spack.error
|
||||||
@@ -93,13 +93,14 @@ def _spec_is_valid(spec: spack.spec.Spec) -> bool:
|
|||||||
except spack.error.SpackError:
|
except spack.error.SpackError:
|
||||||
# It is assumed here that we can at least extract the package name from the spec so we
|
# It is assumed here that we can at least extract the package name from the spec so we
|
||||||
# can look up the implementation of determine_spec_details
|
# can look up the implementation of determine_spec_details
|
||||||
tty.warn(f"Constructed spec for {spec.name} does not have a string representation")
|
msg = f"Constructed spec for {spec.name} does not have a string representation"
|
||||||
|
llnl.util.tty.warn(msg)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
spack.spec.Spec(str(spec))
|
spack.spec.Spec(str(spec))
|
||||||
except spack.error.SpackError:
|
except spack.error.SpackError:
|
||||||
tty.warn(
|
llnl.util.tty.warn(
|
||||||
"Constructed spec has a string representation but the string"
|
"Constructed spec has a string representation but the string"
|
||||||
" representation does not evaluate to a valid spec: {0}".format(str(spec))
|
" representation does not evaluate to a valid spec: {0}".format(str(spec))
|
||||||
)
|
)
|
||||||
@@ -108,24 +109,20 @@ def _spec_is_valid(spec: spack.spec.Spec) -> bool:
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def path_to_dict(search_paths: List[str]) -> Dict[str, str]:
|
def path_to_dict(search_paths: List[str]):
|
||||||
"""Return dictionary[fullpath]: basename from list of paths"""
|
"""Return dictionary[fullpath]: basename from list of paths"""
|
||||||
path_to_lib: Dict[str, str] = {}
|
path_to_lib = {}
|
||||||
# Reverse order of search directories so that a lib in the first
|
# Reverse order of search directories so that a lib in the first
|
||||||
# entry overrides later entries
|
# entry overrides later entries
|
||||||
for search_path in reversed(search_paths):
|
for search_path in reversed(search_paths):
|
||||||
try:
|
try:
|
||||||
dir_iter = os.scandir(search_path)
|
with os.scandir(search_path) as entries:
|
||||||
|
path_to_lib.update(
|
||||||
|
{entry.path: entry.name for entry in entries if entry.is_file()}
|
||||||
|
)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
tty.debug(f"cannot scan '{search_path}' for external software: {e}")
|
msg = f"cannot scan '{search_path}' for external software: {str(e)}"
|
||||||
continue
|
llnl.util.tty.debug(msg)
|
||||||
with dir_iter as entries:
|
|
||||||
for entry in entries:
|
|
||||||
try:
|
|
||||||
if entry.is_file():
|
|
||||||
path_to_lib[entry.path] = entry.name
|
|
||||||
except OSError as e:
|
|
||||||
tty.debug(f"cannot scan '{search_path}' for external software: {e}")
|
|
||||||
|
|
||||||
return path_to_lib
|
return path_to_lib
|
||||||
|
|
||||||
|
@@ -7,7 +7,6 @@
|
|||||||
import collections
|
import collections
|
||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
import os
|
import os
|
||||||
import pathlib
|
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
@@ -16,7 +15,6 @@
|
|||||||
|
|
||||||
import llnl.util.filesystem
|
import llnl.util.filesystem
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
import llnl.util.symlink
|
|
||||||
import llnl.util.tty
|
import llnl.util.tty
|
||||||
|
|
||||||
import spack.error
|
import spack.error
|
||||||
@@ -72,21 +70,13 @@ def dedupe_paths(paths: List[str]) -> List[str]:
|
|||||||
"""Deduplicate paths based on inode and device number. In case the list contains first a
|
"""Deduplicate paths based on inode and device number. In case the list contains first a
|
||||||
symlink and then the directory it points to, the symlink is replaced with the directory path.
|
symlink and then the directory it points to, the symlink is replaced with the directory path.
|
||||||
This ensures that we pick for example ``/usr/bin`` over ``/bin`` if the latter is a symlink to
|
This ensures that we pick for example ``/usr/bin`` over ``/bin`` if the latter is a symlink to
|
||||||
the former."""
|
the former`."""
|
||||||
seen: Dict[Tuple[int, int], str] = {}
|
seen: Dict[Tuple[int, int], str] = {}
|
||||||
|
|
||||||
linked_parent_check = lambda x: any(
|
|
||||||
[llnl.util.symlink.islink(str(y)) for y in pathlib.Path(x).parents]
|
|
||||||
)
|
|
||||||
|
|
||||||
for path in paths:
|
for path in paths:
|
||||||
identifier = file_identifier(path)
|
identifier = file_identifier(path)
|
||||||
if identifier not in seen:
|
if identifier not in seen:
|
||||||
seen[identifier] = path
|
seen[identifier] = path
|
||||||
# we also want to deprioritize paths if they contain a symlink in any parent
|
elif not os.path.islink(path):
|
||||||
# (not just the basedir): e.g. oneapi has "latest/bin",
|
|
||||||
# where "latest" is a symlink to 2025.0"
|
|
||||||
elif not (llnl.util.symlink.islink(path) or linked_parent_check(path)):
|
|
||||||
seen[identifier] = path
|
seen[identifier] = path
|
||||||
return list(seen.values())
|
return list(seen.values())
|
||||||
|
|
||||||
|
@@ -34,13 +34,11 @@ class OpenMpi(Package):
|
|||||||
import collections.abc
|
import collections.abc
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import warnings
|
|
||||||
from typing import Any, Callable, List, Optional, Tuple, Type, Union
|
from typing import Any, Callable, List, Optional, Tuple, Type, Union
|
||||||
|
|
||||||
import llnl.util.tty.color
|
import llnl.util.tty.color
|
||||||
|
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.error
|
|
||||||
import spack.fetch_strategy
|
import spack.fetch_strategy
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.patch
|
import spack.patch
|
||||||
@@ -459,7 +457,8 @@ def _execute_extends(pkg):
|
|||||||
if dep_spec.name == "python" and not pkg.name == "python-venv":
|
if dep_spec.name == "python" and not pkg.name == "python-venv":
|
||||||
_depends_on(pkg, spack.spec.Spec("python-venv"), when=when, type=("build", "run"))
|
_depends_on(pkg, spack.spec.Spec("python-venv"), when=when, type=("build", "run"))
|
||||||
|
|
||||||
pkg.extendees[dep_spec.name] = (dep_spec, when_spec)
|
# TODO: the values of the extendees dictionary are not used. Remove in next refactor.
|
||||||
|
pkg.extendees[dep_spec.name] = (dep_spec, None)
|
||||||
|
|
||||||
return _execute_extends
|
return _execute_extends
|
||||||
|
|
||||||
@@ -610,7 +609,7 @@ def _execute_patch(
|
|||||||
return _execute_patch
|
return _execute_patch
|
||||||
|
|
||||||
|
|
||||||
def conditional(*values: Union[str, bool], when: Optional[WhenType] = None):
|
def conditional(*values: List[Any], when: Optional[WhenType] = None):
|
||||||
"""Conditional values that can be used in variant declarations."""
|
"""Conditional values that can be used in variant declarations."""
|
||||||
# _make_when_spec returns None when the condition is statically false.
|
# _make_when_spec returns None when the condition is statically false.
|
||||||
when = _make_when_spec(when)
|
when = _make_when_spec(when)
|
||||||
@@ -622,7 +621,7 @@ def conditional(*values: Union[str, bool], when: Optional[WhenType] = None):
|
|||||||
@directive("variants")
|
@directive("variants")
|
||||||
def variant(
|
def variant(
|
||||||
name: str,
|
name: str,
|
||||||
default: Optional[Union[bool, str, Tuple[str, ...]]] = None,
|
default: Optional[Any] = None,
|
||||||
description: str = "",
|
description: str = "",
|
||||||
values: Optional[Union[collections.abc.Sequence, Callable[[Any], bool]]] = None,
|
values: Optional[Union[collections.abc.Sequence, Callable[[Any], bool]]] = None,
|
||||||
multi: Optional[bool] = None,
|
multi: Optional[bool] = None,
|
||||||
@@ -652,29 +651,11 @@ def variant(
|
|||||||
DirectiveError: If arguments passed to the directive are invalid
|
DirectiveError: If arguments passed to the directive are invalid
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# This validation can be removed at runtime and enforced with an audit in Spack v1.0.
|
|
||||||
# For now it's a warning to let people migrate faster.
|
|
||||||
if not (
|
|
||||||
default is None
|
|
||||||
or type(default) in (bool, str)
|
|
||||||
or (type(default) is tuple and all(type(x) is str for x in default))
|
|
||||||
):
|
|
||||||
if isinstance(default, (list, tuple)):
|
|
||||||
did_you_mean = f"default={','.join(str(x) for x in default)!r}"
|
|
||||||
else:
|
|
||||||
did_you_mean = f"default={str(default)!r}"
|
|
||||||
warnings.warn(
|
|
||||||
f"default value for variant '{name}' is not a boolean or string: default={default!r}. "
|
|
||||||
f"Did you mean {did_you_mean}?",
|
|
||||||
stacklevel=3,
|
|
||||||
category=spack.error.SpackAPIWarning,
|
|
||||||
)
|
|
||||||
|
|
||||||
def format_error(msg, pkg):
|
def format_error(msg, pkg):
|
||||||
msg += " @*r{{[{0}, variant '{1}']}}"
|
msg += " @*r{{[{0}, variant '{1}']}}"
|
||||||
return llnl.util.tty.color.colorize(msg.format(pkg.name, name))
|
return llnl.util.tty.color.colorize(msg.format(pkg.name, name))
|
||||||
|
|
||||||
if name in spack.variant.RESERVED_NAMES:
|
if name in spack.variant.reserved_names:
|
||||||
|
|
||||||
def _raise_reserved_name(pkg):
|
def _raise_reserved_name(pkg):
|
||||||
msg = "The name '%s' is reserved by Spack" % name
|
msg = "The name '%s' is reserved by Spack" % name
|
||||||
@@ -685,11 +666,7 @@ def _raise_reserved_name(pkg):
|
|||||||
# Ensure we have a sequence of allowed variant values, or a
|
# Ensure we have a sequence of allowed variant values, or a
|
||||||
# predicate for it.
|
# predicate for it.
|
||||||
if values is None:
|
if values is None:
|
||||||
if (
|
if str(default).upper() in ("TRUE", "FALSE"):
|
||||||
default in (True, False)
|
|
||||||
or type(default) is str
|
|
||||||
and default.upper() in ("TRUE", "FALSE")
|
|
||||||
):
|
|
||||||
values = (True, False)
|
values = (True, False)
|
||||||
else:
|
else:
|
||||||
values = lambda x: True
|
values = lambda x: True
|
||||||
@@ -722,15 +699,12 @@ def _raise_argument_error(pkg):
|
|||||||
# or the empty string, as the former indicates that a default
|
# or the empty string, as the former indicates that a default
|
||||||
# was not set while the latter will make the variant unparsable
|
# was not set while the latter will make the variant unparsable
|
||||||
# from the command line
|
# from the command line
|
||||||
if isinstance(default, tuple):
|
|
||||||
default = ",".join(default)
|
|
||||||
|
|
||||||
if default is None or default == "":
|
if default is None or default == "":
|
||||||
|
|
||||||
def _raise_default_not_set(pkg):
|
def _raise_default_not_set(pkg):
|
||||||
if default is None:
|
if default is None:
|
||||||
msg = "either a default was not explicitly set, or 'None' was used"
|
msg = "either a default was not explicitly set, " "or 'None' was used"
|
||||||
else:
|
elif default == "":
|
||||||
msg = "the default cannot be an empty string"
|
msg = "the default cannot be an empty string"
|
||||||
raise DirectiveError(format_error(msg, pkg))
|
raise DirectiveError(format_error(msg, pkg))
|
||||||
|
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user