Compare commits
1 Commits
white238/b
...
hs/fix/tar
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8b020067f4 |
18
.github/workflows/build-containers.yml
vendored
18
.github/workflows/build-containers.yml
vendored
@@ -40,17 +40,17 @@ jobs:
|
|||||||
# 1: Platforms to build for
|
# 1: Platforms to build for
|
||||||
# 2: Base image (e.g. ubuntu:22.04)
|
# 2: Base image (e.g. ubuntu:22.04)
|
||||||
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
||||||
[centos-stream9, 'linux/amd64,linux/arm64', 'centos:stream9'],
|
[centos-stream9, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream9'],
|
||||||
[leap15, 'linux/amd64,linux/arm64', 'opensuse/leap:15'],
|
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||||
[ubuntu-focal, 'linux/amd64,linux/arm64', 'ubuntu:20.04'],
|
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||||
[ubuntu-jammy, 'linux/amd64,linux/arm64', 'ubuntu:22.04'],
|
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
||||||
[ubuntu-noble, 'linux/amd64,linux/arm64', 'ubuntu:24.04'],
|
[ubuntu-noble, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:24.04'],
|
||||||
[almalinux8, 'linux/amd64,linux/arm64', 'almalinux:8'],
|
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
||||||
[almalinux9, 'linux/amd64,linux/arm64', 'almalinux:9'],
|
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
||||||
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
||||||
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
||||||
[fedora39, 'linux/amd64,linux/arm64', 'fedora:39'],
|
[fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
|
||||||
[fedora40, 'linux/amd64,linux/arm64', 'fedora:40']]
|
[fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
|
||||||
name: Build ${{ matrix.dockerfile[0] }}
|
name: Build ${{ matrix.dockerfile[0] }}
|
||||||
if: github.repository == 'spack/spack'
|
if: github.repository == 'spack/spack'
|
||||||
steps:
|
steps:
|
||||||
|
|||||||
21
.github/workflows/ci.yaml
vendored
21
.github/workflows/ci.yaml
vendored
@@ -9,7 +9,6 @@ on:
|
|||||||
branches:
|
branches:
|
||||||
- develop
|
- develop
|
||||||
- releases/**
|
- releases/**
|
||||||
merge_group:
|
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ci-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
group: ci-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||||
@@ -26,17 +25,13 @@ jobs:
|
|||||||
packages: ${{ steps.filter.outputs.packages }}
|
packages: ${{ steps.filter.outputs.packages }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
if: ${{ github.event_name == 'push' || github.event_name == 'merge_group' }}
|
if: ${{ github.event_name == 'push' }}
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
# For pull requests it's not necessary to checkout the code
|
# For pull requests it's not necessary to checkout the code
|
||||||
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36
|
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36
|
||||||
id: filter
|
id: filter
|
||||||
with:
|
with:
|
||||||
# For merge group events, compare against the target branch (main)
|
|
||||||
base: ${{ github.event_name == 'merge_group' && github.event.merge_group.base_ref || '' }}
|
|
||||||
# For merge group events, use the merge group head ref
|
|
||||||
ref: ${{ github.event_name == 'merge_group' && github.event.merge_group.head_sha || github.ref }}
|
|
||||||
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below
|
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below
|
||||||
# Don't run if we only modified packages in the
|
# Don't run if we only modified packages in the
|
||||||
# built-in repository or documentation
|
# built-in repository or documentation
|
||||||
@@ -81,15 +76,10 @@ jobs:
|
|||||||
|
|
||||||
prechecks:
|
prechecks:
|
||||||
needs: [ changes ]
|
needs: [ changes ]
|
||||||
uses: ./.github/workflows/prechecks.yml
|
uses: ./.github/workflows/valid-style.yml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
with:
|
with:
|
||||||
with_coverage: ${{ needs.changes.outputs.core }}
|
with_coverage: ${{ needs.changes.outputs.core }}
|
||||||
with_packages: ${{ needs.changes.outputs.packages }}
|
|
||||||
|
|
||||||
import-check:
|
|
||||||
needs: [ changes ]
|
|
||||||
uses: ./.github/workflows/import-check.yaml
|
|
||||||
|
|
||||||
all-prechecks:
|
all-prechecks:
|
||||||
needs: [ prechecks ]
|
needs: [ prechecks ]
|
||||||
@@ -99,7 +89,7 @@ jobs:
|
|||||||
- name: Success
|
- name: Success
|
||||||
run: |
|
run: |
|
||||||
if [ "${{ needs.prechecks.result }}" == "failure" ] || [ "${{ needs.prechecks.result }}" == "canceled" ]; then
|
if [ "${{ needs.prechecks.result }}" == "failure" ] || [ "${{ needs.prechecks.result }}" == "canceled" ]; then
|
||||||
echo "Unit tests failed."
|
echo "Unit tests failed."
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
exit 0
|
exit 0
|
||||||
@@ -107,7 +97,6 @@ jobs:
|
|||||||
|
|
||||||
coverage:
|
coverage:
|
||||||
needs: [ unit-tests, prechecks ]
|
needs: [ unit-tests, prechecks ]
|
||||||
if: ${{ needs.changes.outputs.core }}
|
|
||||||
uses: ./.github/workflows/coverage.yml
|
uses: ./.github/workflows/coverage.yml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
@@ -120,10 +109,10 @@ jobs:
|
|||||||
- name: Status summary
|
- name: Status summary
|
||||||
run: |
|
run: |
|
||||||
if [ "${{ needs.unit-tests.result }}" == "failure" ] || [ "${{ needs.unit-tests.result }}" == "canceled" ]; then
|
if [ "${{ needs.unit-tests.result }}" == "failure" ] || [ "${{ needs.unit-tests.result }}" == "canceled" ]; then
|
||||||
echo "Unit tests failed."
|
echo "Unit tests failed."
|
||||||
exit 1
|
exit 1
|
||||||
elif [ "${{ needs.bootstrap.result }}" == "failure" ] || [ "${{ needs.bootstrap.result }}" == "canceled" ]; then
|
elif [ "${{ needs.bootstrap.result }}" == "failure" ] || [ "${{ needs.bootstrap.result }}" == "canceled" ]; then
|
||||||
echo "Bootstrap tests failed."
|
echo "Bootstrap tests failed."
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
exit 0
|
exit 0
|
||||||
|
|||||||
1
.github/workflows/coverage.yml
vendored
1
.github/workflows/coverage.yml
vendored
@@ -33,4 +33,3 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
verbose: true
|
verbose: true
|
||||||
fail_ci_if_error: false
|
fail_ci_if_error: false
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
|
|||||||
49
.github/workflows/import-check.yaml
vendored
49
.github/workflows/import-check.yaml
vendored
@@ -1,49 +0,0 @@
|
|||||||
name: import-check
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
# Check we don't make the situation with circular imports worse
|
|
||||||
import-check:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: julia-actions/setup-julia@v2
|
|
||||||
with:
|
|
||||||
version: '1.10'
|
|
||||||
- uses: julia-actions/cache@v2
|
|
||||||
|
|
||||||
# PR: use the base of the PR as the old commit
|
|
||||||
- name: Checkout PR base commit
|
|
||||||
if: github.event_name == 'pull_request'
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
ref: ${{ github.event.pull_request.base.sha }}
|
|
||||||
path: old
|
|
||||||
# not a PR: use the previous commit as the old commit
|
|
||||||
- name: Checkout previous commit
|
|
||||||
if: github.event_name != 'pull_request'
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
fetch-depth: 2
|
|
||||||
path: old
|
|
||||||
- name: Checkout previous commit
|
|
||||||
if: github.event_name != 'pull_request'
|
|
||||||
run: git -C old reset --hard HEAD^
|
|
||||||
|
|
||||||
- name: Checkout new commit
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
path: new
|
|
||||||
- name: Install circular import checker
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
repository: haampie/circular-import-fighter
|
|
||||||
ref: 4cdb0bf15f04ab6b49041d5ef1bfd9644cce7f33
|
|
||||||
path: circular-import-fighter
|
|
||||||
- name: Install dependencies
|
|
||||||
working-directory: circular-import-fighter
|
|
||||||
run: make -j dependencies
|
|
||||||
- name: Circular import check
|
|
||||||
working-directory: circular-import-fighter
|
|
||||||
run: make -j compare "SPACK_ROOT=../old ../new"
|
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
black==25.1.0
|
black==24.10.0
|
||||||
clingo==5.7.1
|
clingo==5.7.1
|
||||||
flake8==7.1.2
|
flake8==7.1.1
|
||||||
isort==6.0.1
|
isort==5.13.2
|
||||||
mypy==1.15.0
|
mypy==1.11.2
|
||||||
types-six==1.17.0.20250304
|
types-six==1.17.0.20241205
|
||||||
vermin==1.6.0
|
vermin==1.6.0
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
name: prechecks
|
name: style
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
@@ -6,9 +6,6 @@ on:
|
|||||||
with_coverage:
|
with_coverage:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
with_packages:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: style-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
group: style-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||||
@@ -33,7 +30,6 @@ jobs:
|
|||||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||||
- name: vermin (Repositories)
|
- name: vermin (Repositories)
|
||||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
|
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
|
||||||
|
|
||||||
# Run style checks on the files that have been changed
|
# Run style checks on the files that have been changed
|
||||||
style:
|
style:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -57,25 +53,12 @@ jobs:
|
|||||||
- name: Run style tests
|
- name: Run style tests
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-style-tests
|
share/spack/qa/run-style-tests
|
||||||
|
|
||||||
audit:
|
audit:
|
||||||
uses: ./.github/workflows/audit.yaml
|
uses: ./.github/workflows/audit.yaml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
with:
|
with:
|
||||||
with_coverage: ${{ inputs.with_coverage }}
|
with_coverage: ${{ inputs.with_coverage }}
|
||||||
python_version: '3.13'
|
python_version: '3.13'
|
||||||
|
|
||||||
verify-checksums:
|
|
||||||
if: ${{ inputs.with_packages == 'true' }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
|
||||||
with:
|
|
||||||
fetch-depth: 2
|
|
||||||
- name: Verify Added Checksums
|
|
||||||
run: |
|
|
||||||
bin/spack ci verify-versions HEAD^1 HEAD
|
|
||||||
|
|
||||||
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
|
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
|
||||||
bootstrap-dev-rhel8:
|
bootstrap-dev-rhel8:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -103,6 +86,66 @@ jobs:
|
|||||||
spack -d bootstrap now --dev
|
spack -d bootstrap now --dev
|
||||||
spack -d style -t black
|
spack -d style -t black
|
||||||
spack unit-test -V
|
spack unit-test -V
|
||||||
|
# Check we don't make the situation with circular imports worse
|
||||||
|
import-check:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: julia-actions/setup-julia@v2
|
||||||
|
with:
|
||||||
|
version: '1.10'
|
||||||
|
- uses: julia-actions/cache@v2
|
||||||
|
|
||||||
|
# PR: use the base of the PR as the old commit
|
||||||
|
- name: Checkout PR base commit
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
|
with:
|
||||||
|
ref: ${{ github.event.pull_request.base.sha }}
|
||||||
|
path: old
|
||||||
|
# not a PR: use the previous commit as the old commit
|
||||||
|
- name: Checkout previous commit
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
|
with:
|
||||||
|
fetch-depth: 2
|
||||||
|
path: old
|
||||||
|
- name: Checkout previous commit
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
run: git -C old reset --hard HEAD^
|
||||||
|
|
||||||
|
- name: Checkout new commit
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
|
with:
|
||||||
|
path: new
|
||||||
|
- name: Install circular import checker
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
|
with:
|
||||||
|
repository: haampie/circular-import-fighter
|
||||||
|
ref: b5d6ce9be35f602cca7d5a6aa0259fca10639cca
|
||||||
|
path: circular-import-fighter
|
||||||
|
- name: Install dependencies
|
||||||
|
working-directory: circular-import-fighter
|
||||||
|
run: make -j dependencies
|
||||||
|
- name: Problematic imports before
|
||||||
|
working-directory: circular-import-fighter
|
||||||
|
run: make SPACK_ROOT=../old SUFFIX=.old
|
||||||
|
- name: Problematic imports after
|
||||||
|
working-directory: circular-import-fighter
|
||||||
|
run: make SPACK_ROOT=../new SUFFIX=.new
|
||||||
|
- name: Compare import cycles
|
||||||
|
working-directory: circular-import-fighter
|
||||||
|
run: |
|
||||||
|
edges_before="$(head -n1 solution.old)"
|
||||||
|
edges_after="$(head -n1 solution.new)"
|
||||||
|
if [ "$edges_after" -gt "$edges_before" ]; then
|
||||||
|
printf '\033[1;31mImport check failed: %s imports need to be deleted, ' "$edges_after"
|
||||||
|
printf 'previously this was %s\033[0m\n' "$edges_before"
|
||||||
|
printf 'Compare \033[1;97m"Problematic imports before"\033[0m and '
|
||||||
|
printf '\033[1;97m"Problematic imports after"\033[0m.\n'
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
printf '\033[1;32mImport check passed: %s <= %s\033[0m\n' "$edges_after" "$edges_before"
|
||||||
|
fi
|
||||||
|
|
||||||
# Further style checks from pylint
|
# Further style checks from pylint
|
||||||
pylint:
|
pylint:
|
||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -201,6 +201,7 @@ tramp
|
|||||||
|
|
||||||
# Org-mode
|
# Org-mode
|
||||||
.org-id-locations
|
.org-id-locations
|
||||||
|
*_archive
|
||||||
|
|
||||||
# flymake-mode
|
# flymake-mode
|
||||||
*_flymake.*
|
*_flymake.*
|
||||||
|
|||||||
@@ -43,28 +43,6 @@ concretizer:
|
|||||||
# (e.g. py-setuptools, cmake etc.)
|
# (e.g. py-setuptools, cmake etc.)
|
||||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||||
strategy: minimal
|
strategy: minimal
|
||||||
# Maximum number of duplicates in a DAG, when using a strategy that allows duplicates. "default" is the
|
|
||||||
# number used if there isn't a more specific alternative
|
|
||||||
max_dupes:
|
|
||||||
default: 1
|
|
||||||
# Virtuals
|
|
||||||
c: 2
|
|
||||||
cxx: 2
|
|
||||||
fortran: 1
|
|
||||||
# Regular packages
|
|
||||||
cmake: 2
|
|
||||||
gmake: 2
|
|
||||||
python: 2
|
|
||||||
python-venv: 2
|
|
||||||
py-cython: 2
|
|
||||||
py-flit-core: 2
|
|
||||||
py-pip: 2
|
|
||||||
py-setuptools: 2
|
|
||||||
py-wheel: 2
|
|
||||||
xcb-proto: 2
|
|
||||||
# Compilers
|
|
||||||
gcc: 2
|
|
||||||
llvm: 2
|
|
||||||
# Option to specify compatibility between operating systems for reuse of compilers and packages
|
# Option to specify compatibility between operating systems for reuse of compilers and packages
|
||||||
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
||||||
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||||
@@ -85,7 +63,3 @@ concretizer:
|
|||||||
# Setting this to false yields unreproducible results, so we advise to use that value only
|
# Setting this to false yields unreproducible results, so we advise to use that value only
|
||||||
# for debugging purposes (e.g. check which constraints can help Spack concretize faster).
|
# for debugging purposes (e.g. check which constraints can help Spack concretize faster).
|
||||||
error_on_timeout: true
|
error_on_timeout: true
|
||||||
|
|
||||||
# Static analysis may reduce the concretization time by generating smaller ASP problems, in
|
|
||||||
# cases where there are requirements that prevent part of the search space to be explored.
|
|
||||||
static_analysis: false
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
config:
|
config:
|
||||||
locks: false
|
locks: false
|
||||||
build_stage::
|
build_stage::
|
||||||
- '$user_cache_path/stage'
|
- '$spack/.staging'
|
||||||
stage_name: '{name}-{version}-{hash:7}'
|
stage_name: '{name}-{version}-{hash:7}'
|
||||||
|
|||||||
@@ -1761,24 +1761,19 @@ Verifying installations
|
|||||||
The ``spack verify`` command can be used to verify the validity of
|
The ``spack verify`` command can be used to verify the validity of
|
||||||
Spack-installed packages any time after installation.
|
Spack-installed packages any time after installation.
|
||||||
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
``spack verify manifest``
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
At installation time, Spack creates a manifest of every file in the
|
At installation time, Spack creates a manifest of every file in the
|
||||||
installation prefix. For links, Spack tracks the mode, ownership, and
|
installation prefix. For links, Spack tracks the mode, ownership, and
|
||||||
destination. For directories, Spack tracks the mode, and
|
destination. For directories, Spack tracks the mode, and
|
||||||
ownership. For files, Spack tracks the mode, ownership, modification
|
ownership. For files, Spack tracks the mode, ownership, modification
|
||||||
time, hash, and size. The ``spack verify manifest`` command will check,
|
time, hash, and size. The Spack verify command will check, for every
|
||||||
for every file in each package, whether any of those attributes have
|
file in each package, whether any of those attributes have changed. It
|
||||||
changed. It will also check for newly added files or deleted files from
|
will also check for newly added files or deleted files from the
|
||||||
the installation prefix. Spack can either check all installed packages
|
installation prefix. Spack can either check all installed packages
|
||||||
using the `-a,--all` or accept specs listed on the command line to
|
using the `-a,--all` or accept specs listed on the command line to
|
||||||
verify.
|
verify.
|
||||||
|
|
||||||
The ``spack verify manifest`` command can also verify for individual files
|
The ``spack verify`` command can also verify for individual files that
|
||||||
that they haven't been altered since installation time. If the given file
|
they haven't been altered since installation time. If the given file
|
||||||
is not in a Spack installation prefix, Spack will report that it is
|
is not in a Spack installation prefix, Spack will report that it is
|
||||||
not owned by any package. To check individual files instead of specs,
|
not owned by any package. To check individual files instead of specs,
|
||||||
use the ``-f,--files`` option.
|
use the ``-f,--files`` option.
|
||||||
@@ -1793,22 +1788,6 @@ check only local packages (as opposed to those used transparently from
|
|||||||
``upstream`` spack instances) and the ``-j,--json`` option to output
|
``upstream`` spack instances) and the ``-j,--json`` option to output
|
||||||
machine-readable json data for any errors.
|
machine-readable json data for any errors.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
``spack verify libraries``
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
The ``spack verify libraries`` command can be used to verify that packages
|
|
||||||
do not have accidental system dependencies. This command scans the install
|
|
||||||
prefixes of packages for executables and shared libraries, and resolves
|
|
||||||
their needed libraries in their RPATHs. When needed libraries cannot be
|
|
||||||
located, an error is reported. This typically indicates that a package
|
|
||||||
was linked against a system library, instead of a library provided by
|
|
||||||
a Spack package.
|
|
||||||
|
|
||||||
This verification can also be enabled as a post-install hook by setting
|
|
||||||
``config:shared_linking:missing_library_policy`` to ``error`` or ``warn``
|
|
||||||
in :ref:`config.yaml <config-yaml>`.
|
|
||||||
|
|
||||||
-----------------------
|
-----------------------
|
||||||
Filesystem requirements
|
Filesystem requirements
|
||||||
-----------------------
|
-----------------------
|
||||||
|
|||||||
@@ -272,9 +272,9 @@ often lists dependencies and the flags needed to locate them. The
|
|||||||
"environment variables" section lists environment variables that the
|
"environment variables" section lists environment variables that the
|
||||||
build system uses to pass flags to the compiler and linker.
|
build system uses to pass flags to the compiler and linker.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Adding flags to configure
|
Addings flags to configure
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
For most of the flags you encounter, you will want a variant to
|
For most of the flags you encounter, you will want a variant to
|
||||||
optionally enable/disable them. You can then optionally pass these
|
optionally enable/disable them. You can then optionally pass these
|
||||||
@@ -285,7 +285,7 @@ function like so:
|
|||||||
|
|
||||||
def configure_args(self):
|
def configure_args(self):
|
||||||
args = []
|
args = []
|
||||||
...
|
|
||||||
if self.spec.satisfies("+mpi"):
|
if self.spec.satisfies("+mpi"):
|
||||||
args.append("--enable-mpi")
|
args.append("--enable-mpi")
|
||||||
else:
|
else:
|
||||||
@@ -299,10 +299,7 @@ Alternatively, you can use the :ref:`enable_or_disable <autotools_enable_or_dis
|
|||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
def configure_args(self):
|
def configure_args(self):
|
||||||
args = []
|
return [self.enable_or_disable("mpi")]
|
||||||
...
|
|
||||||
args.extend(self.enable_or_disable("mpi"))
|
|
||||||
return args
|
|
||||||
|
|
||||||
|
|
||||||
Note that we are explicitly disabling MPI support if it is not
|
Note that we are explicitly disabling MPI support if it is not
|
||||||
@@ -347,14 +344,7 @@ typically used to enable or disable some feature within the package.
|
|||||||
default=False,
|
default=False,
|
||||||
description="Memchecker support for debugging [degrades performance]"
|
description="Memchecker support for debugging [degrades performance]"
|
||||||
)
|
)
|
||||||
...
|
config_args.extend(self.enable_or_disable("memchecker"))
|
||||||
|
|
||||||
def configure_args(self):
|
|
||||||
args = []
|
|
||||||
...
|
|
||||||
args.extend(self.enable_or_disable("memchecker"))
|
|
||||||
|
|
||||||
return args
|
|
||||||
|
|
||||||
In this example, specifying the variant ``+memchecker`` will generate
|
In this example, specifying the variant ``+memchecker`` will generate
|
||||||
the following configuration options:
|
the following configuration options:
|
||||||
|
|||||||
@@ -223,10 +223,6 @@ def setup(sphinx):
|
|||||||
("py:class", "spack.compiler.CompilerCache"),
|
("py:class", "spack.compiler.CompilerCache"),
|
||||||
# TypeVar that is not handled correctly
|
# TypeVar that is not handled correctly
|
||||||
("py:class", "llnl.util.lang.T"),
|
("py:class", "llnl.util.lang.T"),
|
||||||
("py:class", "llnl.util.lang.KT"),
|
|
||||||
("py:class", "llnl.util.lang.VT"),
|
|
||||||
("py:obj", "llnl.util.lang.KT"),
|
|
||||||
("py:obj", "llnl.util.lang.VT"),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||||
|
|||||||
@@ -125,8 +125,6 @@ are stored in ``$spack/var/spack/cache``. These are stored indefinitely
|
|||||||
by default. Can be purged with :ref:`spack clean --downloads
|
by default. Can be purged with :ref:`spack clean --downloads
|
||||||
<cmd-spack-clean>`.
|
<cmd-spack-clean>`.
|
||||||
|
|
||||||
.. _Misc Cache:
|
|
||||||
|
|
||||||
--------------------
|
--------------------
|
||||||
``misc_cache``
|
``misc_cache``
|
||||||
--------------------
|
--------------------
|
||||||
@@ -336,52 +334,3 @@ create a new alias called ``inst`` that will always call ``install -v``:
|
|||||||
|
|
||||||
aliases:
|
aliases:
|
||||||
inst: install -v
|
inst: install -v
|
||||||
|
|
||||||
-------------------------------
|
|
||||||
``concretization_cache:enable``
|
|
||||||
-------------------------------
|
|
||||||
|
|
||||||
When set to ``true``, Spack will utilize a cache of solver outputs from
|
|
||||||
successful concretization runs. When enabled, Spack will check the concretization
|
|
||||||
cache prior to running the solver. If a previous request to solve a given
|
|
||||||
problem is present in the cache, Spack will load the concrete specs and other
|
|
||||||
solver data from the cache rather than running the solver. Specs not previously
|
|
||||||
concretized will be added to the cache on a successful solve. The cache additionally
|
|
||||||
holds solver statistics, so commands like ``spack solve`` will still return information
|
|
||||||
about the run that produced a given solver result.
|
|
||||||
|
|
||||||
This cache is a subcache of the :ref:`Misc Cache` and as such will be cleaned when the Misc
|
|
||||||
Cache is cleaned.
|
|
||||||
|
|
||||||
When ``false`` or ommitted, all concretization requests will be performed from scatch
|
|
||||||
|
|
||||||
----------------------------
|
|
||||||
``concretization_cache:url``
|
|
||||||
----------------------------
|
|
||||||
|
|
||||||
Path to the location where Spack will root the concretization cache. Currently this only supports
|
|
||||||
paths on the local filesystem.
|
|
||||||
|
|
||||||
Default location is under the :ref:`Misc Cache` at: ``$misc_cache/concretization``
|
|
||||||
|
|
||||||
------------------------------------
|
|
||||||
``concretization_cache:entry_limit``
|
|
||||||
------------------------------------
|
|
||||||
|
|
||||||
Sets a limit on the number of concretization results that Spack will cache. The limit is evaluated
|
|
||||||
after each concretization run; if Spack has stored more results than the limit allows, the
|
|
||||||
oldest concretization results are pruned until 10% of the limit has been removed.
|
|
||||||
|
|
||||||
Setting this value to 0 disables the automatic pruning. It is expected users will be
|
|
||||||
responsible for maintaining this cache.
|
|
||||||
|
|
||||||
-----------------------------------
|
|
||||||
``concretization_cache:size_limit``
|
|
||||||
-----------------------------------
|
|
||||||
|
|
||||||
Sets a limit on the size of the concretization cache in bytes. The limit is evaluated
|
|
||||||
after each concretization run; if Spack has stored more results than the limit allows, the
|
|
||||||
oldest concretization results are pruned until 10% of the limit has been removed.
|
|
||||||
|
|
||||||
Setting this value to 0 disables the automatic pruning. It is expected users will be
|
|
||||||
responsible for maintaining this cache.
|
|
||||||
|
|||||||
@@ -14,7 +14,6 @@ case you want to skip directly to specific docs:
|
|||||||
* :ref:`compilers.yaml <compiler-config>`
|
* :ref:`compilers.yaml <compiler-config>`
|
||||||
* :ref:`concretizer.yaml <concretizer-options>`
|
* :ref:`concretizer.yaml <concretizer-options>`
|
||||||
* :ref:`config.yaml <config-yaml>`
|
* :ref:`config.yaml <config-yaml>`
|
||||||
* :ref:`include.yaml <include-yaml>`
|
|
||||||
* :ref:`mirrors.yaml <mirrors>`
|
* :ref:`mirrors.yaml <mirrors>`
|
||||||
* :ref:`modules.yaml <modules>`
|
* :ref:`modules.yaml <modules>`
|
||||||
* :ref:`packages.yaml <packages-config>`
|
* :ref:`packages.yaml <packages-config>`
|
||||||
|
|||||||
@@ -361,6 +361,7 @@ and the tags associated with the class of runners to build on.
|
|||||||
* ``.linux_neoverse_n1``
|
* ``.linux_neoverse_n1``
|
||||||
* ``.linux_neoverse_v1``
|
* ``.linux_neoverse_v1``
|
||||||
* ``.linux_neoverse_v2``
|
* ``.linux_neoverse_v2``
|
||||||
|
* ``.linux_power``
|
||||||
* ``.linux_skylake``
|
* ``.linux_skylake``
|
||||||
* ``.linux_x86_64``
|
* ``.linux_x86_64``
|
||||||
* ``.linux_x86_64_v4``
|
* ``.linux_x86_64_v4``
|
||||||
|
|||||||
@@ -112,19 +112,6 @@ the original but may concretize differently in the presence of different
|
|||||||
explicit or default configuration settings (e.g., a different version of
|
explicit or default configuration settings (e.g., a different version of
|
||||||
Spack or for a different user account).
|
Spack or for a different user account).
|
||||||
|
|
||||||
Environments created from a manifest will copy any included configs
|
|
||||||
from relative paths inside the environment. Relative paths from
|
|
||||||
outside the environment will cause errors, and absolute paths will be
|
|
||||||
kept absolute. For example, if ``spack.yaml`` includes:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
spack:
|
|
||||||
include: [./config.yaml]
|
|
||||||
|
|
||||||
then the created environment will have its own copy of the file
|
|
||||||
``config.yaml`` copied from the location in the original environment.
|
|
||||||
|
|
||||||
Create an environment from a ``spack.lock`` file using:
|
Create an environment from a ``spack.lock`` file using:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
@@ -173,7 +160,7 @@ accepts. If an environment already exists then spack will simply activate it
|
|||||||
and ignore the create-specific flags.
|
and ignore the create-specific flags.
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env activate --create -p myenv
|
$ spack env activate --create -p myenv
|
||||||
# ...
|
# ...
|
||||||
# [creates if myenv does not exist yet]
|
# [creates if myenv does not exist yet]
|
||||||
@@ -437,8 +424,8 @@ Developing Packages in a Spack Environment
|
|||||||
|
|
||||||
The ``spack develop`` command allows one to develop Spack packages in
|
The ``spack develop`` command allows one to develop Spack packages in
|
||||||
an environment. It requires a spec containing a concrete version, and
|
an environment. It requires a spec containing a concrete version, and
|
||||||
will configure Spack to install the package from local source.
|
will configure Spack to install the package from local source.
|
||||||
If a version is not provided from the command line interface then spack
|
If a version is not provided from the command line interface then spack
|
||||||
will automatically pick the highest version the package has defined.
|
will automatically pick the highest version the package has defined.
|
||||||
This means any infinity versions (``develop``, ``main``, ``stable``) will be
|
This means any infinity versions (``develop``, ``main``, ``stable``) will be
|
||||||
preferred in this selection process.
|
preferred in this selection process.
|
||||||
@@ -448,22 +435,15 @@ set, and Spack will ensure the package and its dependents are rebuilt
|
|||||||
any time the environment is installed if the package's local source
|
any time the environment is installed if the package's local source
|
||||||
code has been modified. Spack's native implementation to check for modifications
|
code has been modified. Spack's native implementation to check for modifications
|
||||||
is to check if ``mtime`` is newer than the installation.
|
is to check if ``mtime`` is newer than the installation.
|
||||||
A custom check can be created by overriding the ``detect_dev_src_change`` method
|
A custom check can be created by overriding the ``detect_dev_src_change`` method
|
||||||
in your package class. This is particularly useful for projects using custom spack repo's
|
in your package class. This is particularly useful for projects using custom spack repo's
|
||||||
to drive development and want to optimize performance.
|
to drive development and want to optimize performance.
|
||||||
|
|
||||||
Spack ensures that all instances of a
|
Spack ensures that all instances of a
|
||||||
developed package in the environment are concretized to match the
|
developed package in the environment are concretized to match the
|
||||||
version (and other constraints) passed as the spec argument to the
|
version (and other constraints) passed as the spec argument to the
|
||||||
``spack develop`` command.
|
``spack develop`` command.
|
||||||
|
|
||||||
When working deep in the graph it is often desirable to have multiple specs marked
|
|
||||||
as ``develop`` so you don't have to restage and/or do full rebuilds each time you
|
|
||||||
call ``spack install``. The ``--recursive`` flag can be used in these scenarios
|
|
||||||
to ensure that all the dependents of the initial spec you provide are also marked
|
|
||||||
as develop specs. The ``--recursive`` flag requires a pre-concretized environment
|
|
||||||
so the graph can be traversed from the supplied spec all the way to the root specs.
|
|
||||||
|
|
||||||
For packages with ``git`` attributes, git branches, tags, and commits can
|
For packages with ``git`` attributes, git branches, tags, and commits can
|
||||||
also be used as valid concrete versions (see :ref:`version-specifier`).
|
also be used as valid concrete versions (see :ref:`version-specifier`).
|
||||||
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
|
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
|
||||||
@@ -473,7 +453,7 @@ Further development on ``foo`` can be tested by re-installing the environment,
|
|||||||
and eventually committed and pushed to the upstream git repo.
|
and eventually committed and pushed to the upstream git repo.
|
||||||
|
|
||||||
If the package being developed supports out-of-source builds then users can use the
|
If the package being developed supports out-of-source builds then users can use the
|
||||||
``--build_directory`` flag to control the location and name of the build directory.
|
``--build_directory`` flag to control the location and name of the build directory.
|
||||||
This is a shortcut to set the ``package_attributes:build_directory`` in the
|
This is a shortcut to set the ``package_attributes:build_directory`` in the
|
||||||
``packages`` configuration (see :ref:`assigning-package-attributes`).
|
``packages`` configuration (see :ref:`assigning-package-attributes`).
|
||||||
The supplied location will become the build-directory for that package in all future builds.
|
The supplied location will become the build-directory for that package in all future builds.
|
||||||
@@ -677,45 +657,24 @@ This configuration sets the default compiler for all packages to
|
|||||||
Included configurations
|
Included configurations
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Spack environments allow an ``include`` heading in their yaml schema.
|
Spack environments allow an ``include`` heading in their yaml
|
||||||
This heading pulls in external configuration files and applies them to
|
schema. This heading pulls in external configuration files and applies
|
||||||
the environment.
|
them to the environment.
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
spack:
|
spack:
|
||||||
include:
|
include:
|
||||||
- environment/relative/path/to/config.yaml
|
- relative/path/to/config.yaml
|
||||||
- https://github.com/path/to/raw/config/compilers.yaml
|
- https://github.com/path/to/raw/config/compilers.yaml
|
||||||
- /absolute/path/to/packages.yaml
|
- /absolute/path/to/packages.yaml
|
||||||
- path: /path/to/$os/$target/environment
|
|
||||||
optional: true
|
|
||||||
- path: /path/to/os-specific/config-dir
|
|
||||||
when: os == "ventura"
|
|
||||||
|
|
||||||
Included configuration files are required *unless* they are explicitly optional
|
|
||||||
or the entry's condition evaluates to ``false``. Optional includes are specified
|
|
||||||
with the ``optional`` clause and conditional with the ``when`` clause. (See
|
|
||||||
:ref:`include-yaml` for more information on optional and conditional entries.)
|
|
||||||
|
|
||||||
Files are listed using paths to individual files or directories containing them.
|
|
||||||
Path entries may be absolute or relative to the environment or specified as
|
|
||||||
URLs. URLs to individual files need link to the **raw** form of the file's
|
|
||||||
contents (e.g., `GitHub
|
|
||||||
<https://docs.github.com/en/repositories/working-with-files/using-files/viewing-and-understanding-files#viewing-or-copying-the-raw-file-content>`_
|
|
||||||
or `GitLab
|
|
||||||
<https://docs.gitlab.com/ee/api/repository_files.html#get-raw-file-from-repository>`_).
|
|
||||||
Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or schemes) are
|
|
||||||
supported. Spack-specific, environment and user path variables can be used.
|
|
||||||
(See :ref:`config-file-variables` for more information.)
|
|
||||||
|
|
||||||
.. warning::
|
|
||||||
|
|
||||||
Recursive includes are not currently processed in a breadth-first manner
|
|
||||||
so the value of a configuration option that is altered by multiple included
|
|
||||||
files may not be what you expect. This will be addressed in a future
|
|
||||||
update.
|
|
||||||
|
|
||||||
|
Environments can include files or URLs. File paths can be relative or
|
||||||
|
absolute. URLs include the path to the text for individual files or
|
||||||
|
can be the path to a directory containing configuration files.
|
||||||
|
Spack supports ``file``, ``http``, ``https`` and ``ftp`` protocols (or
|
||||||
|
schemes). Spack-specific, environment and user path variables may be
|
||||||
|
used in these paths. See :ref:`config-file-variables` for more information.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Configuration precedence
|
Configuration precedence
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ than always choosing the latest versions or default variants.
|
|||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
As a rule of thumb: requirements + constraints > strong preferences > reuse > preferences > defaults.
|
As a rule of thumb: requirements + constraints > reuse > preferences > defaults.
|
||||||
|
|
||||||
The following set of criteria (from lowest to highest precedence) explain
|
The following set of criteria (from lowest to highest precedence) explain
|
||||||
common cases where concretization output may seem surprising at first.
|
common cases where concretization output may seem surprising at first.
|
||||||
@@ -56,19 +56,7 @@ common cases where concretization output may seem surprising at first.
|
|||||||
concretizer:
|
concretizer:
|
||||||
reuse: dependencies # other options are 'true' and 'false'
|
reuse: dependencies # other options are 'true' and 'false'
|
||||||
|
|
||||||
3. :ref:`Strong preferences <package-strong-preferences>` configured in ``packages.yaml``
|
3. :ref:`Package requirements <package-requirements>` configured in ``packages.yaml``,
|
||||||
are higher priority than reuse, and can be used to strongly prefer a specific version
|
|
||||||
or variant, without erroring out if it's not possible. Strong preferences are specified
|
|
||||||
as follows:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
packages:
|
|
||||||
foo:
|
|
||||||
prefer:
|
|
||||||
- "@1.1: ~mpi"
|
|
||||||
|
|
||||||
4. :ref:`Package requirements <package-requirements>` configured in ``packages.yaml``,
|
|
||||||
and constraints from the command line as well as ``package.py`` files override all
|
and constraints from the command line as well as ``package.py`` files override all
|
||||||
of the above. Requirements are specified as follows:
|
of the above. Requirements are specified as follows:
|
||||||
|
|
||||||
@@ -78,8 +66,6 @@ common cases where concretization output may seem surprising at first.
|
|||||||
foo:
|
foo:
|
||||||
require:
|
require:
|
||||||
- "@1.2: +mpi"
|
- "@1.2: +mpi"
|
||||||
conflicts:
|
|
||||||
- "@1.4"
|
|
||||||
|
|
||||||
Requirements and constraints restrict the set of possible solutions, while reuse
|
Requirements and constraints restrict the set of possible solutions, while reuse
|
||||||
behavior and preferences influence what an optimal solution looks like.
|
behavior and preferences influence what an optimal solution looks like.
|
||||||
|
|||||||
@@ -1,51 +0,0 @@
|
|||||||
.. Copyright Spack Project Developers. See COPYRIGHT file for details.
|
|
||||||
|
|
||||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
|
|
||||||
.. _include-yaml:
|
|
||||||
|
|
||||||
===============================
|
|
||||||
Include Settings (include.yaml)
|
|
||||||
===============================
|
|
||||||
|
|
||||||
Spack allows you to include configuration files through ``include.yaml``.
|
|
||||||
Using the ``include:`` heading results in pulling in external configuration
|
|
||||||
information to be used by any Spack command.
|
|
||||||
|
|
||||||
Included configuration files are required *unless* they are explicitly optional
|
|
||||||
or the entry's condition evaluates to ``false``. Optional includes are specified
|
|
||||||
with the ``optional`` clause and conditional with the ``when`` clause. For
|
|
||||||
example,
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
include:
|
|
||||||
- /path/to/a/required/config.yaml
|
|
||||||
- path: /path/to/$os/$target/config
|
|
||||||
optional: true
|
|
||||||
- path: /path/to/os-specific/config-dir
|
|
||||||
when: os == "ventura"
|
|
||||||
|
|
||||||
shows all three. The first entry, ``/path/to/a/required/config.yaml``,
|
|
||||||
indicates that included ``config.yaml`` file is required (so must exist).
|
|
||||||
Use of ``optional: true`` for ``/path/to/$os/$target/config`` means
|
|
||||||
the path is only included if it exists. The condition ``os == "ventura"``
|
|
||||||
in the ``when`` clause for ``/path/to/os-specific/config-dir`` means the
|
|
||||||
path is only included when the operating system (``os``) is ``ventura``.
|
|
||||||
|
|
||||||
The same conditions and variables in `Spec List References
|
|
||||||
<https://spack.readthedocs.io/en/latest/environments.html#spec-list-references>`_
|
|
||||||
can be used for conditional activation in the ``when`` clauses.
|
|
||||||
|
|
||||||
Included files can be specified by path or by their parent directory.
|
|
||||||
Paths may be absolute, relative (to the configuration file including the path),
|
|
||||||
or specified as URLs. Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or
|
|
||||||
schemes) are supported. Spack-specific, environment and user path variables
|
|
||||||
can be used. (See :ref:`config-file-variables` for more information.)
|
|
||||||
|
|
||||||
.. warning::
|
|
||||||
|
|
||||||
Recursive includes are not currently processed in a breadth-first manner
|
|
||||||
so the value of a configuration option that is altered by multiple included
|
|
||||||
files may not be what you expect. This will be addressed in a future
|
|
||||||
update.
|
|
||||||
@@ -71,7 +71,6 @@ or refer to the full manual below.
|
|||||||
|
|
||||||
configuration
|
configuration
|
||||||
config_yaml
|
config_yaml
|
||||||
include_yaml
|
|
||||||
packages_yaml
|
packages_yaml
|
||||||
build_settings
|
build_settings
|
||||||
environments
|
environments
|
||||||
|
|||||||
@@ -486,8 +486,6 @@ present. For instance with a configuration like:
|
|||||||
|
|
||||||
you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
|
you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
|
||||||
|
|
||||||
.. _package-strong-preferences:
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Conflicts and strong preferences
|
Conflicts and strong preferences
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|||||||
@@ -820,69 +820,6 @@ presence of a ``SPACK_CDASH_AUTH_TOKEN`` environment variable during the
|
|||||||
build group on CDash called "Release Testing" (that group will be created if
|
build group on CDash called "Release Testing" (that group will be created if
|
||||||
it didn't already exist).
|
it didn't already exist).
|
||||||
|
|
||||||
.. _ci_artifacts:
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
CI Artifacts Directory Layout
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
When running the CI build using the command ``spack ci rebuild`` a number of directories are created for
|
|
||||||
storing data generated during the CI job. The default root directory for artifacts is ``job_scratch_root``.
|
|
||||||
This can be overridden by passing the argument ``--artifacts-root`` to the ``spack ci generate`` command
|
|
||||||
or by setting the ``SPACK_ARTIFACTS_ROOT`` environment variable in the build job scripts.
|
|
||||||
|
|
||||||
The top level directories under the artifact root are ``concrete_environment``, ``logs``, ``reproduction``,
|
|
||||||
``tests``, and ``user_data``. Spack does not restrict what is written to any of these directories nor does
|
|
||||||
it require user specified files be written to any specific directory.
|
|
||||||
|
|
||||||
------------------------
|
|
||||||
``concrete_environment``
|
|
||||||
------------------------
|
|
||||||
|
|
||||||
The directory ``concrete_environment`` is used to communicate the ci generate processed ``spack.yaml`` and
|
|
||||||
the concrete ``spack.lock`` for the CI environment.
|
|
||||||
|
|
||||||
--------
|
|
||||||
``logs``
|
|
||||||
--------
|
|
||||||
|
|
||||||
The directory ``logs`` contains the spack build log, ``spack-build-out.txt``, and the spack build environment
|
|
||||||
modification file, ``spack-build-mod-env.txt``. Additionally all files specified by the packages ``Builder``
|
|
||||||
property ``archive_files`` are also copied here (ie. ``CMakeCache.txt`` in ``CMakeBuilder``).
|
|
||||||
|
|
||||||
----------------
|
|
||||||
``reproduction``
|
|
||||||
----------------
|
|
||||||
|
|
||||||
The directory ``reproduction`` is used to store the files needed by the ``spack reproduce-build`` command.
|
|
||||||
This includes ``repro.json``, copies of all of the files in ``concrete_environment``, the concrete spec
|
|
||||||
JSON file for the current spec being built, and all of the files written in the artifacts root directory.
|
|
||||||
|
|
||||||
The ``repro.json`` file is not versioned and is only designed to work with the version of spack CI was run with.
|
|
||||||
An example of what a ``repro.json`` may look like is here.
|
|
||||||
|
|
||||||
.. code:: json
|
|
||||||
|
|
||||||
{
|
|
||||||
"job_name": "adios2@2.9.2 /feaevuj %gcc@11.4.0 arch=linux-ubuntu20.04-x86_64_v3 E4S ROCm External",
|
|
||||||
"job_spec_json": "adios2.json",
|
|
||||||
"ci_project_dir": "/builds/spack/spack"
|
|
||||||
}
|
|
||||||
|
|
||||||
---------
|
|
||||||
``tests``
|
|
||||||
---------
|
|
||||||
|
|
||||||
The directory ``tests`` is used to store output from running ``spack test <job spec>``. This may or may not have
|
|
||||||
data in it depending on the package that was built and the availability of tests.
|
|
||||||
|
|
||||||
-------------
|
|
||||||
``user_data``
|
|
||||||
-------------
|
|
||||||
|
|
||||||
The directory ``user_data`` is used to store everything else that shouldn't be copied to the ``reproduction`` direcotory.
|
|
||||||
Users may use this to store additional logs or metrics or other types of files generated by the build job.
|
|
||||||
|
|
||||||
-------------------------------------
|
-------------------------------------
|
||||||
Using a custom spack in your pipeline
|
Using a custom spack in your pipeline
|
||||||
-------------------------------------
|
-------------------------------------
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
sphinx==8.2.3
|
sphinx==8.1.3
|
||||||
sphinxcontrib-programoutput==0.18
|
sphinxcontrib-programoutput==0.18
|
||||||
sphinx_design==0.6.1
|
sphinx_design==0.6.1
|
||||||
sphinx-rtd-theme==3.0.2
|
sphinx-rtd-theme==3.0.2
|
||||||
python-levenshtein==0.27.1
|
python-levenshtein==0.26.1
|
||||||
docutils==0.21.2
|
docutils==0.21.2
|
||||||
pygments==2.19.1
|
pygments==2.19.1
|
||||||
urllib3==2.3.0
|
urllib3==2.3.0
|
||||||
pytest==8.3.5
|
pytest==8.3.4
|
||||||
isort==6.0.1
|
isort==5.13.2
|
||||||
black==25.1.0
|
black==24.10.0
|
||||||
flake8==7.1.2
|
flake8==7.1.1
|
||||||
mypy==1.11.1
|
mypy==1.11.1
|
||||||
|
|||||||
@@ -7,7 +7,6 @@
|
|||||||
import fnmatch
|
import fnmatch
|
||||||
import glob
|
import glob
|
||||||
import hashlib
|
import hashlib
|
||||||
import io
|
|
||||||
import itertools
|
import itertools
|
||||||
import numbers
|
import numbers
|
||||||
import os
|
import os
|
||||||
@@ -21,7 +20,6 @@
|
|||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from itertools import accumulate
|
from itertools import accumulate
|
||||||
from typing import (
|
from typing import (
|
||||||
IO,
|
|
||||||
Callable,
|
Callable,
|
||||||
Deque,
|
Deque,
|
||||||
Dict,
|
Dict,
|
||||||
@@ -670,7 +668,7 @@ def copy(src, dest, _permissions=False):
|
|||||||
_permissions (bool): for internal use only
|
_permissions (bool): for internal use only
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
OSError: if *src* does not match any files or directories
|
IOError: if *src* does not match any files or directories
|
||||||
ValueError: if *src* matches multiple files but *dest* is
|
ValueError: if *src* matches multiple files but *dest* is
|
||||||
not a directory
|
not a directory
|
||||||
"""
|
"""
|
||||||
@@ -681,7 +679,7 @@ def copy(src, dest, _permissions=False):
|
|||||||
|
|
||||||
files = glob.glob(src)
|
files = glob.glob(src)
|
||||||
if not files:
|
if not files:
|
||||||
raise OSError("No such file or directory: '{0}'".format(src))
|
raise IOError("No such file or directory: '{0}'".format(src))
|
||||||
if len(files) > 1 and not os.path.isdir(dest):
|
if len(files) > 1 and not os.path.isdir(dest):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"'{0}' matches multiple files but '{1}' is not a directory".format(src, dest)
|
"'{0}' matches multiple files but '{1}' is not a directory".format(src, dest)
|
||||||
@@ -712,7 +710,7 @@ def install(src, dest):
|
|||||||
dest (str): the destination file or directory
|
dest (str): the destination file or directory
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
OSError: if *src* does not match any files or directories
|
IOError: if *src* does not match any files or directories
|
||||||
ValueError: if *src* matches multiple files but *dest* is
|
ValueError: if *src* matches multiple files but *dest* is
|
||||||
not a directory
|
not a directory
|
||||||
"""
|
"""
|
||||||
@@ -750,7 +748,7 @@ def copy_tree(
|
|||||||
_permissions (bool): for internal use only
|
_permissions (bool): for internal use only
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
OSError: if *src* does not match any files or directories
|
IOError: if *src* does not match any files or directories
|
||||||
ValueError: if *src* is a parent directory of *dest*
|
ValueError: if *src* is a parent directory of *dest*
|
||||||
"""
|
"""
|
||||||
if _permissions:
|
if _permissions:
|
||||||
@@ -764,7 +762,7 @@ def copy_tree(
|
|||||||
|
|
||||||
files = glob.glob(src)
|
files = glob.glob(src)
|
||||||
if not files:
|
if not files:
|
||||||
raise OSError("No such file or directory: '{0}'".format(src))
|
raise IOError("No such file or directory: '{0}'".format(src))
|
||||||
|
|
||||||
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
|
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
|
||||||
# all symlinks to this list while traversing the tree, then when finished, make all
|
# all symlinks to this list while traversing the tree, then when finished, make all
|
||||||
@@ -845,7 +843,7 @@ def install_tree(src, dest, symlinks=True, ignore=None):
|
|||||||
ignore (typing.Callable): function indicating which files to ignore
|
ignore (typing.Callable): function indicating which files to ignore
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
OSError: if *src* does not match any files or directories
|
IOError: if *src* does not match any files or directories
|
||||||
ValueError: if *src* is a parent directory of *dest*
|
ValueError: if *src* is a parent directory of *dest*
|
||||||
"""
|
"""
|
||||||
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
||||||
@@ -2456,69 +2454,26 @@ class WindowsSimulatedRPath:
|
|||||||
and vis versa.
|
and vis versa.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(self, package, link_install_prefix=True):
|
||||||
self,
|
|
||||||
package,
|
|
||||||
base_modification_prefix: Optional[Union[str, pathlib.Path]] = None,
|
|
||||||
link_install_prefix: bool = True,
|
|
||||||
):
|
|
||||||
"""
|
"""
|
||||||
Args:
|
Args:
|
||||||
package (spack.package_base.PackageBase): Package requiring links
|
package (spack.package_base.PackageBase): Package requiring links
|
||||||
base_modification_prefix (str|pathlib.Path): Path representation indicating
|
|
||||||
the root directory in which to establish the simulated rpath, ie where the
|
|
||||||
symlinks that comprise the "rpath" behavior will be installed.
|
|
||||||
|
|
||||||
Note: This is a mutually exclusive option with `link_install_prefix` using
|
|
||||||
both is an error.
|
|
||||||
|
|
||||||
Default: None
|
|
||||||
link_install_prefix (bool): Link against package's own install or stage root.
|
link_install_prefix (bool): Link against package's own install or stage root.
|
||||||
Packages that run their own executables during build and require rpaths to
|
Packages that run their own executables during build and require rpaths to
|
||||||
the build directory during build time require this option.
|
the build directory during build time require this option. Default: install
|
||||||
|
|
||||||
Default: install
|
|
||||||
root
|
root
|
||||||
|
|
||||||
Note: This is a mutually exclusive option with `base_modification_prefix`, using
|
|
||||||
both is an error.
|
|
||||||
"""
|
"""
|
||||||
self.pkg = package
|
self.pkg = package
|
||||||
self._addl_rpaths: set[str] = set()
|
self._addl_rpaths = set()
|
||||||
if link_install_prefix and base_modification_prefix:
|
|
||||||
raise RuntimeError(
|
|
||||||
"Invalid combination of arguments given to WindowsSimulated RPath.\n"
|
|
||||||
"Select either `link_install_prefix` to create an install prefix rpath"
|
|
||||||
" or specify a `base_modification_prefix` for any other link type. "
|
|
||||||
"Specifying both arguments is invalid."
|
|
||||||
)
|
|
||||||
if not (link_install_prefix or base_modification_prefix):
|
|
||||||
raise RuntimeError(
|
|
||||||
"Insufficient arguments given to WindowsSimulatedRpath.\n"
|
|
||||||
"WindowsSimulatedRPath requires one of link_install_prefix"
|
|
||||||
" or base_modification_prefix to be specified."
|
|
||||||
" Neither was provided."
|
|
||||||
)
|
|
||||||
|
|
||||||
self.link_install_prefix = link_install_prefix
|
self.link_install_prefix = link_install_prefix
|
||||||
if base_modification_prefix:
|
self._additional_library_dependents = set()
|
||||||
self.base_modification_prefix = pathlib.Path(base_modification_prefix)
|
|
||||||
else:
|
|
||||||
self.base_modification_prefix = pathlib.Path(self.pkg.prefix)
|
|
||||||
self._additional_library_dependents: set[pathlib.Path] = set()
|
|
||||||
if not self.link_install_prefix:
|
|
||||||
tty.debug(f"Generating rpath for non install context: {base_modification_prefix}")
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def library_dependents(self):
|
def library_dependents(self):
|
||||||
"""
|
"""
|
||||||
Set of directories where package binaries/libraries are located.
|
Set of directories where package binaries/libraries are located.
|
||||||
"""
|
"""
|
||||||
base_pths = set()
|
return set([pathlib.Path(self.pkg.prefix.bin)]) | self._additional_library_dependents
|
||||||
if self.link_install_prefix:
|
|
||||||
base_pths.add(pathlib.Path(self.pkg.prefix.bin))
|
|
||||||
base_pths |= self._additional_library_dependents
|
|
||||||
return base_pths
|
|
||||||
|
|
||||||
def add_library_dependent(self, *dest):
|
def add_library_dependent(self, *dest):
|
||||||
"""
|
"""
|
||||||
@@ -2534,12 +2489,6 @@ def add_library_dependent(self, *dest):
|
|||||||
new_pth = pathlib.Path(pth).parent
|
new_pth = pathlib.Path(pth).parent
|
||||||
else:
|
else:
|
||||||
new_pth = pathlib.Path(pth)
|
new_pth = pathlib.Path(pth)
|
||||||
path_is_in_prefix = new_pth.is_relative_to(self.base_modification_prefix)
|
|
||||||
if not path_is_in_prefix:
|
|
||||||
raise RuntimeError(
|
|
||||||
f"Attempting to generate rpath symlink out of rpath context:\
|
|
||||||
{str(self.base_modification_prefix)}"
|
|
||||||
)
|
|
||||||
self._additional_library_dependents.add(new_pth)
|
self._additional_library_dependents.add(new_pth)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -2628,33 +2577,6 @@ def establish_link(self):
|
|||||||
self._link(library, lib_dir)
|
self._link(library, lib_dir)
|
||||||
|
|
||||||
|
|
||||||
def make_package_test_rpath(pkg, test_dir: Union[str, pathlib.Path]):
|
|
||||||
"""Establishes a temp Windows simulated rpath for the pkg in the testing directory
|
|
||||||
so an executable can test the libraries/executables with proper access
|
|
||||||
to dependent dlls
|
|
||||||
|
|
||||||
Note: this is a no-op on all other platforms besides Windows
|
|
||||||
|
|
||||||
Args:
|
|
||||||
pkg (spack.package_base.PackageBase): the package for which the rpath should be computed
|
|
||||||
test_dir: the testing directory in which we should construct an rpath
|
|
||||||
"""
|
|
||||||
# link_install_prefix as false ensures we're not linking into the install prefix
|
|
||||||
mini_rpath = WindowsSimulatedRPath(pkg, link_install_prefix=False)
|
|
||||||
# add the testing directory as a location to install rpath symlinks
|
|
||||||
mini_rpath.add_library_dependent(test_dir)
|
|
||||||
|
|
||||||
# check for whether build_directory is available, if not
|
|
||||||
# assume the stage root is the build dir
|
|
||||||
build_dir_attr = getattr(pkg, "build_directory", None)
|
|
||||||
build_directory = build_dir_attr if build_dir_attr else pkg.stage.path
|
|
||||||
# add the build dir & build dir bin
|
|
||||||
mini_rpath.add_rpath(os.path.join(build_directory, "bin"))
|
|
||||||
mini_rpath.add_rpath(os.path.join(build_directory))
|
|
||||||
# construct rpath
|
|
||||||
mini_rpath.establish_link()
|
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
@memoized
|
@memoized
|
||||||
def can_access_dir(path):
|
def can_access_dir(path):
|
||||||
@@ -2883,20 +2805,6 @@ def keep_modification_time(*filenames):
|
|||||||
os.utime(f, (os.path.getatime(f), mtime))
|
os.utime(f, (os.path.getatime(f), mtime))
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def temporary_file_position(stream):
|
|
||||||
orig_pos = stream.tell()
|
|
||||||
yield
|
|
||||||
stream.seek(orig_pos)
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def current_file_position(stream: IO[str], loc: int, relative_to=io.SEEK_CUR):
|
|
||||||
with temporary_file_position(stream):
|
|
||||||
stream.seek(loc, relative_to)
|
|
||||||
yield
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def temporary_dir(
|
def temporary_dir(
|
||||||
suffix: Optional[str] = None, prefix: Optional[str] = None, dir: Optional[str] = None
|
suffix: Optional[str] = None, prefix: Optional[str] = None, dir: Optional[str] = None
|
||||||
|
|||||||
@@ -11,11 +11,10 @@
|
|||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
import types
|
|
||||||
import typing
|
import typing
|
||||||
import warnings
|
import warnings
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from typing import Callable, Dict, Iterable, List, Mapping, Optional, Tuple, TypeVar
|
from typing import Callable, Dict, Iterable, List, Tuple, TypeVar
|
||||||
|
|
||||||
# Ignore emacs backups when listing modules
|
# Ignore emacs backups when listing modules
|
||||||
ignore_modules = r"^\.#|~$"
|
ignore_modules = r"^\.#|~$"
|
||||||
@@ -708,24 +707,14 @@ def __init__(self, wrapped_object):
|
|||||||
|
|
||||||
|
|
||||||
class Singleton:
|
class Singleton:
|
||||||
"""Wrapper for lazily initialized singleton objects."""
|
"""Simple wrapper for lazily initialized singleton objects."""
|
||||||
|
|
||||||
def __init__(self, factory: Callable[[], object]):
|
def __init__(self, factory):
|
||||||
"""Create a new singleton to be inited with the factory function.
|
"""Create a new singleton to be inited with the factory function.
|
||||||
|
|
||||||
Most factories will simply create the object to be initialized and
|
|
||||||
return it.
|
|
||||||
|
|
||||||
In some cases, e.g. when bootstrapping some global state, the singleton
|
|
||||||
may need to be initialized incrementally. If the factory returns a generator
|
|
||||||
instead of a regular object, the singleton will assign each result yielded by
|
|
||||||
the generator to the singleton instance. This allows methods called by
|
|
||||||
the factory in later stages to refer back to the singleton.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
factory (function): function taking no arguments that creates the
|
factory (function): function taking no arguments that
|
||||||
singleton instance.
|
creates the singleton instance.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self.factory = factory
|
self.factory = factory
|
||||||
self._instance = None
|
self._instance = None
|
||||||
@@ -733,16 +722,7 @@ def __init__(self, factory: Callable[[], object]):
|
|||||||
@property
|
@property
|
||||||
def instance(self):
|
def instance(self):
|
||||||
if self._instance is None:
|
if self._instance is None:
|
||||||
instance = self.factory()
|
self._instance = self.factory()
|
||||||
|
|
||||||
if isinstance(instance, types.GeneratorType):
|
|
||||||
# if it's a generator, assign every value
|
|
||||||
for value in instance:
|
|
||||||
self._instance = value
|
|
||||||
else:
|
|
||||||
# if not, just assign the result like a normal singleton
|
|
||||||
self._instance = instance
|
|
||||||
|
|
||||||
return self._instance
|
return self._instance
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
@@ -1100,88 +1080,3 @@ def __set__(self, instance, value):
|
|||||||
|
|
||||||
def factory(self, instance, owner):
|
def factory(self, instance, owner):
|
||||||
raise NotImplementedError("must be implemented by derived classes")
|
raise NotImplementedError("must be implemented by derived classes")
|
||||||
|
|
||||||
|
|
||||||
KT = TypeVar("KT")
|
|
||||||
VT = TypeVar("VT")
|
|
||||||
|
|
||||||
|
|
||||||
class PriorityOrderedMapping(Mapping[KT, VT]):
|
|
||||||
"""Mapping that iterates over key according to an integer priority. If the priority is
|
|
||||||
the same for two keys, insertion order is what matters.
|
|
||||||
|
|
||||||
The priority is set when the key/value pair is added. If not set, the highest current priority
|
|
||||||
is used.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_data: Dict[KT, VT]
|
|
||||||
_priorities: List[Tuple[int, KT]]
|
|
||||||
|
|
||||||
def __init__(self) -> None:
|
|
||||||
self._data = {}
|
|
||||||
# Tuple of (priority, key)
|
|
||||||
self._priorities = []
|
|
||||||
|
|
||||||
def __getitem__(self, key: KT) -> VT:
|
|
||||||
return self._data[key]
|
|
||||||
|
|
||||||
def __len__(self) -> int:
|
|
||||||
return len(self._data)
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
yield from (key for _, key in self._priorities)
|
|
||||||
|
|
||||||
def __reversed__(self):
|
|
||||||
yield from (key for _, key in reversed(self._priorities))
|
|
||||||
|
|
||||||
def reversed_keys(self):
|
|
||||||
"""Iterates over keys from the highest priority, to the lowest."""
|
|
||||||
return reversed(self)
|
|
||||||
|
|
||||||
def reversed_values(self):
|
|
||||||
"""Iterates over values from the highest priority, to the lowest."""
|
|
||||||
yield from (self._data[key] for _, key in reversed(self._priorities))
|
|
||||||
|
|
||||||
def _highest_priority(self) -> int:
|
|
||||||
if not self._priorities:
|
|
||||||
return 0
|
|
||||||
result, _ = self._priorities[-1]
|
|
||||||
return result
|
|
||||||
|
|
||||||
def add(self, key: KT, *, value: VT, priority: Optional[int] = None) -> None:
|
|
||||||
"""Adds a key/value pair to the mapping, with a specific priority.
|
|
||||||
|
|
||||||
If the priority is None, then it is assumed to be the highest priority value currently
|
|
||||||
in the container.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: when the same priority is already in the mapping
|
|
||||||
"""
|
|
||||||
if priority is None:
|
|
||||||
priority = self._highest_priority()
|
|
||||||
|
|
||||||
if key in self._data:
|
|
||||||
self.remove(key)
|
|
||||||
|
|
||||||
self._priorities.append((priority, key))
|
|
||||||
# We rely on sort being stable
|
|
||||||
self._priorities.sort(key=lambda x: x[0])
|
|
||||||
self._data[key] = value
|
|
||||||
assert len(self._data) == len(self._priorities)
|
|
||||||
|
|
||||||
def remove(self, key: KT) -> VT:
|
|
||||||
"""Removes a key from the mapping.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The value associated with the key being removed
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
KeyError: if the key is not in the mapping
|
|
||||||
"""
|
|
||||||
if key not in self._data:
|
|
||||||
raise KeyError(f"cannot find {key}")
|
|
||||||
|
|
||||||
popped_item = self._data.pop(key)
|
|
||||||
self._priorities = [(p, k) for p, k in self._priorities if k != key]
|
|
||||||
assert len(self._data) == len(self._priorities)
|
|
||||||
return popped_item
|
|
||||||
|
|||||||
@@ -41,16 +41,6 @@ def __init__(self, dst, src_a=None, src_b=None):
|
|||||||
self.src_a = src_a
|
self.src_a = src_a
|
||||||
self.src_b = src_b
|
self.src_b = src_b
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return f"MergeConflict(dst={self.dst!r}, src_a={self.src_a!r}, src_b={self.src_b!r})"
|
|
||||||
|
|
||||||
|
|
||||||
def _samefile(a: str, b: str):
|
|
||||||
try:
|
|
||||||
return os.path.samefile(a, b)
|
|
||||||
except OSError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class SourceMergeVisitor(BaseDirectoryVisitor):
|
class SourceMergeVisitor(BaseDirectoryVisitor):
|
||||||
"""
|
"""
|
||||||
@@ -60,14 +50,9 @@ class SourceMergeVisitor(BaseDirectoryVisitor):
|
|||||||
- A list of merge conflicts in dst/
|
- A list of merge conflicts in dst/
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(self, ignore: Optional[Callable[[str], bool]] = None):
|
||||||
self, ignore: Optional[Callable[[str], bool]] = None, normalize_paths: bool = False
|
|
||||||
):
|
|
||||||
self.ignore = ignore if ignore is not None else lambda f: False
|
self.ignore = ignore if ignore is not None else lambda f: False
|
||||||
|
|
||||||
# On case-insensitive filesystems, normalize paths to detect duplications
|
|
||||||
self.normalize_paths = normalize_paths
|
|
||||||
|
|
||||||
# When mapping <src root> to <dst root>/<projection>, we need to prepend the <projection>
|
# When mapping <src root> to <dst root>/<projection>, we need to prepend the <projection>
|
||||||
# bit to the relative path in the destination dir.
|
# bit to the relative path in the destination dir.
|
||||||
self.projection: str = ""
|
self.projection: str = ""
|
||||||
@@ -86,88 +71,10 @@ def __init__(
|
|||||||
# and can run mkdir in order.
|
# and can run mkdir in order.
|
||||||
self.directories: Dict[str, Tuple[str, str]] = {}
|
self.directories: Dict[str, Tuple[str, str]] = {}
|
||||||
|
|
||||||
# If the visitor is configured to normalize paths, keep a map of
|
|
||||||
# normalized path to: original path, root directory + relative path
|
|
||||||
self._directories_normalized: Dict[str, Tuple[str, str, str]] = {}
|
|
||||||
|
|
||||||
# Files to link. Maps dst_rel to (src_root, src_rel). This is an ordered dict, where files
|
# Files to link. Maps dst_rel to (src_root, src_rel). This is an ordered dict, where files
|
||||||
# are guaranteed to be grouped by src_root in the order they were visited.
|
# are guaranteed to be grouped by src_root in the order they were visited.
|
||||||
self.files: Dict[str, Tuple[str, str]] = {}
|
self.files: Dict[str, Tuple[str, str]] = {}
|
||||||
|
|
||||||
# If the visitor is configured to normalize paths, keep a map of
|
|
||||||
# normalized path to: original path, root directory + relative path
|
|
||||||
self._files_normalized: Dict[str, Tuple[str, str, str]] = {}
|
|
||||||
|
|
||||||
def _in_directories(self, proj_rel_path: str) -> bool:
|
|
||||||
"""
|
|
||||||
Check if a path is already in the directory list
|
|
||||||
"""
|
|
||||||
if self.normalize_paths:
|
|
||||||
return proj_rel_path.lower() in self._directories_normalized
|
|
||||||
else:
|
|
||||||
return proj_rel_path in self.directories
|
|
||||||
|
|
||||||
def _directory(self, proj_rel_path: str) -> Tuple[str, str, str]:
|
|
||||||
"""
|
|
||||||
Get the directory that is mapped to a path
|
|
||||||
"""
|
|
||||||
if self.normalize_paths:
|
|
||||||
return self._directories_normalized[proj_rel_path.lower()]
|
|
||||||
else:
|
|
||||||
return (proj_rel_path, *self.directories[proj_rel_path])
|
|
||||||
|
|
||||||
def _del_directory(self, proj_rel_path: str):
|
|
||||||
"""
|
|
||||||
Remove a directory from the list of directories
|
|
||||||
"""
|
|
||||||
del self.directories[proj_rel_path]
|
|
||||||
if self.normalize_paths:
|
|
||||||
del self._directories_normalized[proj_rel_path.lower()]
|
|
||||||
|
|
||||||
def _add_directory(self, proj_rel_path: str, root: str, rel_path: str):
|
|
||||||
"""
|
|
||||||
Add a directory to the list of directories.
|
|
||||||
Also stores the normalized version for later lookups
|
|
||||||
"""
|
|
||||||
self.directories[proj_rel_path] = (root, rel_path)
|
|
||||||
if self.normalize_paths:
|
|
||||||
self._directories_normalized[proj_rel_path.lower()] = (proj_rel_path, root, rel_path)
|
|
||||||
|
|
||||||
def _in_files(self, proj_rel_path: str) -> bool:
|
|
||||||
"""
|
|
||||||
Check if a path is already in the files list
|
|
||||||
"""
|
|
||||||
if self.normalize_paths:
|
|
||||||
return proj_rel_path.lower() in self._files_normalized
|
|
||||||
else:
|
|
||||||
return proj_rel_path in self.files
|
|
||||||
|
|
||||||
def _file(self, proj_rel_path: str) -> Tuple[str, str, str]:
|
|
||||||
"""
|
|
||||||
Get the file that is mapped to a path
|
|
||||||
"""
|
|
||||||
if self.normalize_paths:
|
|
||||||
return self._files_normalized[proj_rel_path.lower()]
|
|
||||||
else:
|
|
||||||
return (proj_rel_path, *self.files[proj_rel_path])
|
|
||||||
|
|
||||||
def _del_file(self, proj_rel_path: str):
|
|
||||||
"""
|
|
||||||
Remove a file from the list of files
|
|
||||||
"""
|
|
||||||
del self.files[proj_rel_path]
|
|
||||||
if self.normalize_paths:
|
|
||||||
del self._files_normalized[proj_rel_path.lower()]
|
|
||||||
|
|
||||||
def _add_file(self, proj_rel_path: str, root: str, rel_path: str):
|
|
||||||
"""
|
|
||||||
Add a file to the list of files
|
|
||||||
Also stores the normalized version for later lookups
|
|
||||||
"""
|
|
||||||
self.files[proj_rel_path] = (root, rel_path)
|
|
||||||
if self.normalize_paths:
|
|
||||||
self._files_normalized[proj_rel_path.lower()] = (proj_rel_path, root, rel_path)
|
|
||||||
|
|
||||||
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||||
"""
|
"""
|
||||||
Register a directory if dst / rel_path is not blocked by a file or ignored.
|
Register a directory if dst / rel_path is not blocked by a file or ignored.
|
||||||
@@ -177,28 +84,23 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
|||||||
if self.ignore(rel_path):
|
if self.ignore(rel_path):
|
||||||
# Don't recurse when dir is ignored.
|
# Don't recurse when dir is ignored.
|
||||||
return False
|
return False
|
||||||
elif self._in_files(proj_rel_path):
|
elif proj_rel_path in self.files:
|
||||||
# A file-dir conflict is fatal except if they're the same file (symlinked dir).
|
# Can't create a dir where a file is.
|
||||||
src_a = os.path.join(*self._file(proj_rel_path))
|
src_a_root, src_a_relpath = self.files[proj_rel_path]
|
||||||
src_b = os.path.join(root, rel_path)
|
self.fatal_conflicts.append(
|
||||||
|
MergeConflict(
|
||||||
if not _samefile(src_a, src_b):
|
dst=proj_rel_path,
|
||||||
self.fatal_conflicts.append(
|
src_a=os.path.join(src_a_root, src_a_relpath),
|
||||||
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
|
src_b=os.path.join(root, rel_path),
|
||||||
)
|
)
|
||||||
return False
|
)
|
||||||
|
return False
|
||||||
# Remove the link in favor of the dir.
|
elif proj_rel_path in self.directories:
|
||||||
existing_proj_rel_path, _, _ = self._file(proj_rel_path)
|
|
||||||
self._del_file(existing_proj_rel_path)
|
|
||||||
self._add_directory(proj_rel_path, root, rel_path)
|
|
||||||
return True
|
|
||||||
elif self._in_directories(proj_rel_path):
|
|
||||||
# No new directory, carry on.
|
# No new directory, carry on.
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
# Register new directory.
|
# Register new directory.
|
||||||
self._add_directory(proj_rel_path, root, rel_path)
|
self.directories[proj_rel_path] = (root, rel_path)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||||
@@ -230,7 +132,7 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
|
|||||||
if handle_as_dir:
|
if handle_as_dir:
|
||||||
return self.before_visit_dir(root, rel_path, depth)
|
return self.before_visit_dir(root, rel_path, depth)
|
||||||
|
|
||||||
self.visit_file(root, rel_path, depth, symlink=True)
|
self.visit_file(root, rel_path, depth)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = False) -> None:
|
def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = False) -> None:
|
||||||
@@ -238,23 +140,30 @@ def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = Fa
|
|||||||
|
|
||||||
if self.ignore(rel_path):
|
if self.ignore(rel_path):
|
||||||
pass
|
pass
|
||||||
elif self._in_directories(proj_rel_path):
|
elif proj_rel_path in self.directories:
|
||||||
# Can't create a file where a dir is, unless they are the same file (symlinked dir),
|
# Can't create a file where a dir is; fatal error
|
||||||
# in which case we simply drop the symlink in favor of the actual dir.
|
self.fatal_conflicts.append(
|
||||||
src_a = os.path.join(*self._directory(proj_rel_path))
|
MergeConflict(
|
||||||
src_b = os.path.join(root, rel_path)
|
dst=proj_rel_path,
|
||||||
if not symlink or not _samefile(src_a, src_b):
|
src_a=os.path.join(*self.directories[proj_rel_path]),
|
||||||
self.fatal_conflicts.append(
|
src_b=os.path.join(root, rel_path),
|
||||||
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
|
|
||||||
)
|
)
|
||||||
elif self._in_files(proj_rel_path):
|
)
|
||||||
|
elif proj_rel_path in self.files:
|
||||||
# When two files project to the same path, they conflict iff they are distinct.
|
# When two files project to the same path, they conflict iff they are distinct.
|
||||||
# If they are the same (i.e. one links to the other), register regular files rather
|
# If they are the same (i.e. one links to the other), register regular files rather
|
||||||
# than symlinks. The reason is that in copy-type views, we need a copy of the actual
|
# than symlinks. The reason is that in copy-type views, we need a copy of the actual
|
||||||
# file, not the symlink.
|
# file, not the symlink.
|
||||||
src_a = os.path.join(*self._file(proj_rel_path))
|
|
||||||
|
src_a = os.path.join(*self.files[proj_rel_path])
|
||||||
src_b = os.path.join(root, rel_path)
|
src_b = os.path.join(root, rel_path)
|
||||||
if not _samefile(src_a, src_b):
|
|
||||||
|
try:
|
||||||
|
samefile = os.path.samefile(src_a, src_b)
|
||||||
|
except OSError:
|
||||||
|
samefile = False
|
||||||
|
|
||||||
|
if not samefile:
|
||||||
# Distinct files produce a conflict.
|
# Distinct files produce a conflict.
|
||||||
self.file_conflicts.append(
|
self.file_conflicts.append(
|
||||||
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
|
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
|
||||||
@@ -264,12 +173,12 @@ def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = Fa
|
|||||||
if not symlink:
|
if not symlink:
|
||||||
# Remove the link in favor of the actual file. The del is necessary to maintain the
|
# Remove the link in favor of the actual file. The del is necessary to maintain the
|
||||||
# order of the files dict, which is grouped by root.
|
# order of the files dict, which is grouped by root.
|
||||||
existing_proj_rel_path, _, _ = self._file(proj_rel_path)
|
del self.files[proj_rel_path]
|
||||||
self._del_file(existing_proj_rel_path)
|
self.files[proj_rel_path] = (root, rel_path)
|
||||||
self._add_file(proj_rel_path, root, rel_path)
|
|
||||||
else:
|
else:
|
||||||
# Otherwise register this file to be linked.
|
# Otherwise register this file to be linked.
|
||||||
self._add_file(proj_rel_path, root, rel_path)
|
self.files[proj_rel_path] = (root, rel_path)
|
||||||
|
|
||||||
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
|
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||||
# Treat symlinked files as ordinary files (without "dereferencing")
|
# Treat symlinked files as ordinary files (without "dereferencing")
|
||||||
@@ -288,11 +197,11 @@ def set_projection(self, projection: str) -> None:
|
|||||||
path = ""
|
path = ""
|
||||||
for part in self.projection.split(os.sep):
|
for part in self.projection.split(os.sep):
|
||||||
path = os.path.join(path, part)
|
path = os.path.join(path, part)
|
||||||
if not self._in_files(path):
|
if path not in self.files:
|
||||||
self._add_directory(path, "<projection>", path)
|
self.directories[path] = ("<projection>", path)
|
||||||
else:
|
else:
|
||||||
# Can't create a dir where a file is.
|
# Can't create a dir where a file is.
|
||||||
_, src_a_root, src_a_relpath = self._file(path)
|
src_a_root, src_a_relpath = self.files[path]
|
||||||
self.fatal_conflicts.append(
|
self.fatal_conflicts.append(
|
||||||
MergeConflict(
|
MergeConflict(
|
||||||
dst=path,
|
dst=path,
|
||||||
@@ -318,8 +227,8 @@ def __init__(self, source_merge_visitor: SourceMergeVisitor):
|
|||||||
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||||
# If destination dir is a file in a src dir, add a conflict,
|
# If destination dir is a file in a src dir, add a conflict,
|
||||||
# and don't traverse deeper
|
# and don't traverse deeper
|
||||||
if self.src._in_files(rel_path):
|
if rel_path in self.src.files:
|
||||||
_, src_a_root, src_a_relpath = self.src._file(rel_path)
|
src_a_root, src_a_relpath = self.src.files[rel_path]
|
||||||
self.src.fatal_conflicts.append(
|
self.src.fatal_conflicts.append(
|
||||||
MergeConflict(
|
MergeConflict(
|
||||||
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||||
@@ -329,9 +238,8 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
|||||||
|
|
||||||
# If destination dir was also a src dir, remove the mkdir
|
# If destination dir was also a src dir, remove the mkdir
|
||||||
# action, and traverse deeper.
|
# action, and traverse deeper.
|
||||||
if self.src._in_directories(rel_path):
|
if rel_path in self.src.directories:
|
||||||
existing_proj_rel_path, _, _ = self.src._directory(rel_path)
|
del self.src.directories[rel_path]
|
||||||
self.src._del_directory(existing_proj_rel_path)
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# If the destination dir does not appear in the src dir,
|
# If the destination dir does not appear in the src dir,
|
||||||
@@ -344,24 +252,38 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
|
|||||||
be seen as files; we should not accidentally merge
|
be seen as files; we should not accidentally merge
|
||||||
source dir with a symlinked dest dir.
|
source dir with a symlinked dest dir.
|
||||||
"""
|
"""
|
||||||
|
# Always conflict
|
||||||
self.visit_file(root, rel_path, depth)
|
if rel_path in self.src.directories:
|
||||||
|
src_a_root, src_a_relpath = self.src.directories[rel_path]
|
||||||
# Never descend into symlinked target dirs.
|
|
||||||
return False
|
|
||||||
|
|
||||||
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
|
|
||||||
# Can't merge a file if target already exists
|
|
||||||
if self.src._in_directories(rel_path):
|
|
||||||
_, src_a_root, src_a_relpath = self.src._directory(rel_path)
|
|
||||||
self.src.fatal_conflicts.append(
|
self.src.fatal_conflicts.append(
|
||||||
MergeConflict(
|
MergeConflict(
|
||||||
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
elif self.src._in_files(rel_path):
|
if rel_path in self.src.files:
|
||||||
_, src_a_root, src_a_relpath = self.src._file(rel_path)
|
src_a_root, src_a_relpath = self.src.files[rel_path]
|
||||||
|
self.src.fatal_conflicts.append(
|
||||||
|
MergeConflict(
|
||||||
|
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Never descend into symlinked target dirs.
|
||||||
|
return False
|
||||||
|
|
||||||
|
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||||
|
# Can't merge a file if target already exists
|
||||||
|
if rel_path in self.src.directories:
|
||||||
|
src_a_root, src_a_relpath = self.src.directories[rel_path]
|
||||||
|
self.src.fatal_conflicts.append(
|
||||||
|
MergeConflict(
|
||||||
|
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
elif rel_path in self.src.files:
|
||||||
|
src_a_root, src_a_relpath = self.src.files[rel_path]
|
||||||
self.src.fatal_conflicts.append(
|
self.src.fatal_conflicts.append(
|
||||||
MergeConflict(
|
MergeConflict(
|
||||||
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||||
@@ -386,7 +308,7 @@ class LinkTree:
|
|||||||
|
|
||||||
def __init__(self, source_root):
|
def __init__(self, source_root):
|
||||||
if not os.path.exists(source_root):
|
if not os.path.exists(source_root):
|
||||||
raise OSError("No such file or directory: '%s'", source_root)
|
raise IOError("No such file or directory: '%s'", source_root)
|
||||||
|
|
||||||
self._root = source_root
|
self._root = source_root
|
||||||
|
|
||||||
|
|||||||
@@ -269,7 +269,7 @@ def __init__(
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _poll_interval_generator(
|
def _poll_interval_generator(
|
||||||
_wait_times: Optional[Tuple[float, float, float]] = None,
|
_wait_times: Optional[Tuple[float, float, float]] = None
|
||||||
) -> Generator[float, None, None]:
|
) -> Generator[float, None, None]:
|
||||||
"""This implements a backoff scheme for polling a contended resource
|
"""This implements a backoff scheme for polling a contended resource
|
||||||
by suggesting a succession of wait times between polls.
|
by suggesting a succession of wait times between polls.
|
||||||
@@ -391,7 +391,7 @@ def _poll_lock(self, op: int) -> bool:
|
|||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
except OSError as e:
|
except IOError as e:
|
||||||
# EAGAIN and EACCES == locked by another process (so try again)
|
# EAGAIN and EACCES == locked by another process (so try again)
|
||||||
if e.errno not in (errno.EAGAIN, errno.EACCES):
|
if e.errno not in (errno.EAGAIN, errno.EACCES):
|
||||||
raise
|
raise
|
||||||
|
|||||||
@@ -2,7 +2,8 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
"""Utility classes for logging the output of blocks of code."""
|
"""Utility classes for logging the output of blocks of code.
|
||||||
|
"""
|
||||||
import atexit
|
import atexit
|
||||||
import ctypes
|
import ctypes
|
||||||
import errno
|
import errno
|
||||||
@@ -343,6 +344,26 @@ def close(self):
|
|||||||
self.file.close()
|
self.file.close()
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def replace_environment(env):
|
||||||
|
"""Replace the current environment (`os.environ`) with `env`.
|
||||||
|
|
||||||
|
If `env` is empty (or None), this unsets all current environment
|
||||||
|
variables.
|
||||||
|
"""
|
||||||
|
env = env or {}
|
||||||
|
old_env = os.environ.copy()
|
||||||
|
try:
|
||||||
|
os.environ.clear()
|
||||||
|
for name, val in env.items():
|
||||||
|
os.environ[name] = val
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
os.environ.clear()
|
||||||
|
for name, val in old_env.items():
|
||||||
|
os.environ[name] = val
|
||||||
|
|
||||||
|
|
||||||
def log_output(*args, **kwargs):
|
def log_output(*args, **kwargs):
|
||||||
"""Context manager that logs its output to a file.
|
"""Context manager that logs its output to a file.
|
||||||
|
|
||||||
@@ -426,6 +447,7 @@ def __init__(
|
|||||||
self.echo = echo
|
self.echo = echo
|
||||||
self.debug = debug
|
self.debug = debug
|
||||||
self.buffer = buffer
|
self.buffer = buffer
|
||||||
|
self.env = env # the environment to use for _writer_daemon
|
||||||
self.filter_fn = filter_fn
|
self.filter_fn = filter_fn
|
||||||
|
|
||||||
self._active = False # used to prevent re-entry
|
self._active = False # used to prevent re-entry
|
||||||
@@ -497,20 +519,21 @@ def __enter__(self):
|
|||||||
# just don't forward input if this fails
|
# just don't forward input if this fails
|
||||||
pass
|
pass
|
||||||
|
|
||||||
self.process = multiprocessing.Process(
|
with replace_environment(self.env):
|
||||||
target=_writer_daemon,
|
self.process = multiprocessing.Process(
|
||||||
args=(
|
target=_writer_daemon,
|
||||||
input_fd,
|
args=(
|
||||||
read_fd,
|
input_fd,
|
||||||
self.write_fd,
|
read_fd,
|
||||||
self.echo,
|
self.write_fd,
|
||||||
self.log_file,
|
self.echo,
|
||||||
child_pipe,
|
self.log_file,
|
||||||
self.filter_fn,
|
child_pipe,
|
||||||
),
|
self.filter_fn,
|
||||||
)
|
),
|
||||||
self.process.daemon = True # must set before start()
|
)
|
||||||
self.process.start()
|
self.process.daemon = True # must set before start()
|
||||||
|
self.process.start()
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
if input_fd:
|
if input_fd:
|
||||||
@@ -706,7 +729,10 @@ class winlog:
|
|||||||
Does not support the use of 'v' toggling as nixlog does.
|
Does not support the use of 'v' toggling as nixlog does.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, file_like=None, echo=False, debug=0, buffer=False, filter_fn=None):
|
def __init__(
|
||||||
|
self, file_like=None, echo=False, debug=0, buffer=False, env=None, filter_fn=None
|
||||||
|
):
|
||||||
|
self.env = env
|
||||||
self.debug = debug
|
self.debug = debug
|
||||||
self.echo = echo
|
self.echo = echo
|
||||||
self.logfile = file_like
|
self.logfile = file_like
|
||||||
@@ -763,10 +789,11 @@ def background_reader(reader, echo_writer, _kill):
|
|||||||
reader.close()
|
reader.close()
|
||||||
|
|
||||||
self._active = True
|
self._active = True
|
||||||
self._thread = Thread(
|
with replace_environment(self.env):
|
||||||
target=background_reader, args=(self.reader, self.echo_writer, self._kill)
|
self._thread = Thread(
|
||||||
)
|
target=background_reader, args=(self.reader, self.echo_writer, self._kill)
|
||||||
self._thread.start()
|
)
|
||||||
|
self._thread.start()
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
@@ -891,7 +918,7 @@ def _writer_daemon(
|
|||||||
try:
|
try:
|
||||||
if stdin_file.read(1) == "v":
|
if stdin_file.read(1) == "v":
|
||||||
echo = not echo
|
echo = not echo
|
||||||
except OSError as e:
|
except IOError as e:
|
||||||
# If SIGTTIN is ignored, the system gives EIO
|
# If SIGTTIN is ignored, the system gives EIO
|
||||||
# to let the caller know the read failed b/c it
|
# to let the caller know the read failed b/c it
|
||||||
# was in the bg. Ignore that too.
|
# was in the bg. Ignore that too.
|
||||||
@@ -986,7 +1013,7 @@ def wrapped(*args, **kwargs):
|
|||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
return function(*args, **kwargs)
|
return function(*args, **kwargs)
|
||||||
except OSError as e:
|
except IOError as e:
|
||||||
if e.errno == errno.EINTR:
|
if e.errno == errno.EINTR:
|
||||||
continue
|
continue
|
||||||
raise
|
raise
|
||||||
|
|||||||
@@ -10,21 +10,9 @@
|
|||||||
import spack.util.git
|
import spack.util.git
|
||||||
|
|
||||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||||
__version__ = "1.0.0.dev0"
|
__version__ = "0.24.0.dev0"
|
||||||
spack_version = __version__
|
spack_version = __version__
|
||||||
|
|
||||||
#: The current Package API version implemented by this version of Spack. The Package API defines
|
|
||||||
#: the Python interface for packages as well as the layout of package repositories. The minor
|
|
||||||
#: version is incremented when the package API is extended in a backwards-compatible way. The major
|
|
||||||
#: version is incremented upon breaking changes. This version is changed independently from the
|
|
||||||
#: Spack version.
|
|
||||||
package_api_version = (1, 0)
|
|
||||||
|
|
||||||
#: The minimum Package API version that this version of Spack is compatible with. This should
|
|
||||||
#: always be a tuple of the form ``(major, 0)``, since compatibility with vX.Y implies
|
|
||||||
#: compatibility with vX.0.
|
|
||||||
min_package_api_version = (1, 0)
|
|
||||||
|
|
||||||
|
|
||||||
def __try_int(v):
|
def __try_int(v):
|
||||||
try:
|
try:
|
||||||
@@ -91,6 +79,4 @@ def get_short_version() -> str:
|
|||||||
"get_version",
|
"get_version",
|
||||||
"get_spack_commit",
|
"get_spack_commit",
|
||||||
"get_short_version",
|
"get_short_version",
|
||||||
"package_api_version",
|
|
||||||
"min_package_api_version",
|
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1010,7 +1010,7 @@ def _issues_in_depends_on_directive(pkgs, error_cls):
|
|||||||
for dep_name, dep in deps_by_name.items():
|
for dep_name, dep in deps_by_name.items():
|
||||||
|
|
||||||
def check_virtual_with_variants(spec, msg):
|
def check_virtual_with_variants(spec, msg):
|
||||||
if not spack.repo.PATH.is_virtual(spec.name) or not spec.variants:
|
if not spec.virtual or not spec.variants:
|
||||||
return
|
return
|
||||||
error = error_cls(
|
error = error_cls(
|
||||||
f"{pkg_name}: {msg}",
|
f"{pkg_name}: {msg}",
|
||||||
|
|||||||
@@ -923,7 +923,7 @@ class FileTypes:
|
|||||||
UNKNOWN = 2
|
UNKNOWN = 2
|
||||||
|
|
||||||
|
|
||||||
NOT_ISO8859_1_TEXT = re.compile(b"[\x00\x7f-\x9f]")
|
NOT_ISO8859_1_TEXT = re.compile(b"[\x00\x7F-\x9F]")
|
||||||
|
|
||||||
|
|
||||||
def file_type(f: IO[bytes]) -> int:
|
def file_type(f: IO[bytes]) -> int:
|
||||||
@@ -2529,10 +2529,10 @@ def install_root_node(
|
|||||||
allow_missing: when true, allows installing a node with missing dependencies
|
allow_missing: when true, allows installing a node with missing dependencies
|
||||||
"""
|
"""
|
||||||
# Early termination
|
# Early termination
|
||||||
if spec.external or not spec.concrete:
|
if spec.external or spec.virtual:
|
||||||
warnings.warn("Skipping external or abstract spec {0}".format(spec.format()))
|
warnings.warn("Skipping external or virtual package {0}".format(spec.format()))
|
||||||
return
|
return
|
||||||
elif spec.installed and not force:
|
elif spec.concrete and spec.installed and not force:
|
||||||
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
|
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|||||||
@@ -27,9 +27,9 @@
|
|||||||
class ClingoBootstrapConcretizer:
|
class ClingoBootstrapConcretizer:
|
||||||
def __init__(self, configuration):
|
def __init__(self, configuration):
|
||||||
self.host_platform = spack.platforms.host()
|
self.host_platform = spack.platforms.host()
|
||||||
self.host_os = self.host_platform.default_operating_system()
|
self.host_os = self.host_platform.operating_system("frontend")
|
||||||
self.host_target = archspec.cpu.host().family
|
self.host_target = archspec.cpu.host().family
|
||||||
self.host_architecture = spack.spec.ArchSpec.default_arch()
|
self.host_architecture = spack.spec.ArchSpec.frontend_arch()
|
||||||
self.host_architecture.target = str(self.host_target)
|
self.host_architecture.target = str(self.host_target)
|
||||||
self.host_compiler = self._valid_compiler_or_raise()
|
self.host_compiler = self._valid_compiler_or_raise()
|
||||||
self.host_python = self.python_external_spec()
|
self.host_python = self.python_external_spec()
|
||||||
|
|||||||
@@ -141,7 +141,7 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
|||||||
|
|
||||||
|
|
||||||
def _add_compilers_if_missing() -> None:
|
def _add_compilers_if_missing() -> None:
|
||||||
arch = spack.spec.ArchSpec.default_arch()
|
arch = spack.spec.ArchSpec.frontend_arch()
|
||||||
if not spack.compilers.compilers_for_arch(arch):
|
if not spack.compilers.compilers_for_arch(arch):
|
||||||
spack.compilers.find_compilers()
|
spack.compilers.find_compilers()
|
||||||
|
|
||||||
|
|||||||
@@ -292,12 +292,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
|||||||
|
|
||||||
# Install the spec that should make the module importable
|
# Install the spec that should make the module importable
|
||||||
with spack.config.override(self.mirror_scope):
|
with spack.config.override(self.mirror_scope):
|
||||||
PackageInstaller(
|
PackageInstaller([concrete_spec.package], fail_fast=True).install()
|
||||||
[concrete_spec.package],
|
|
||||||
fail_fast=True,
|
|
||||||
package_use_cache=False,
|
|
||||||
dependencies_use_cache=False,
|
|
||||||
).install()
|
|
||||||
|
|
||||||
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
||||||
self.last_search = info
|
self.last_search = info
|
||||||
@@ -367,7 +362,6 @@ def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str]
|
|||||||
for current_config in bootstrapping_sources():
|
for current_config in bootstrapping_sources():
|
||||||
if not source_is_enabled(current_config):
|
if not source_is_enabled(current_config):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
with exception_handler.forward(current_config["name"], Exception):
|
with exception_handler.forward(current_config["name"], Exception):
|
||||||
if create_bootstrapper(current_config).try_import(module, abstract_spec):
|
if create_bootstrapper(current_config).try_import(module, abstract_spec):
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -881,6 +881,21 @@ def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]
|
|||||||
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
||||||
|
|
||||||
|
|
||||||
|
def load_external_modules(pkg):
|
||||||
|
"""Traverse a package's spec DAG and load any external modules.
|
||||||
|
|
||||||
|
Traverse a package's dependencies and load any external modules
|
||||||
|
associated with them.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pkg (spack.package_base.PackageBase): package to load deps for
|
||||||
|
"""
|
||||||
|
for dep in list(pkg.spec.traverse()):
|
||||||
|
external_modules = dep.external_modules or []
|
||||||
|
for external_module in external_modules:
|
||||||
|
load_module(external_module)
|
||||||
|
|
||||||
|
|
||||||
def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||||
"""Execute all environment setup routines."""
|
"""Execute all environment setup routines."""
|
||||||
if context not in (Context.BUILD, Context.TEST):
|
if context not in (Context.BUILD, Context.TEST):
|
||||||
@@ -931,7 +946,7 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
|||||||
for mod in pkg.compiler.modules:
|
for mod in pkg.compiler.modules:
|
||||||
load_module(mod)
|
load_module(mod)
|
||||||
|
|
||||||
load_external_modules(setup_context)
|
load_external_modules(pkg)
|
||||||
|
|
||||||
# Make sure nothing's strange about the Spack environment.
|
# Make sure nothing's strange about the Spack environment.
|
||||||
validate(env_mods, tty.warn)
|
validate(env_mods, tty.warn)
|
||||||
@@ -1220,21 +1235,6 @@ def _make_runnable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
|
|||||||
env.prepend_path("PATH", bin_dir)
|
env.prepend_path("PATH", bin_dir)
|
||||||
|
|
||||||
|
|
||||||
def load_external_modules(context: SetupContext) -> None:
|
|
||||||
"""Traverse a package's spec DAG and load any external modules.
|
|
||||||
|
|
||||||
Traverse a package's dependencies and load any external modules
|
|
||||||
associated with them.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
context: A populated SetupContext object
|
|
||||||
"""
|
|
||||||
for spec, _ in context.external:
|
|
||||||
external_modules = spec.external_modules or []
|
|
||||||
for external_module in external_modules:
|
|
||||||
load_module(external_module)
|
|
||||||
|
|
||||||
|
|
||||||
def _setup_pkg_and_run(
|
def _setup_pkg_and_run(
|
||||||
serialized_pkg: "spack.subprocess_context.PackageInstallContext",
|
serialized_pkg: "spack.subprocess_context.PackageInstallContext",
|
||||||
function: Callable,
|
function: Callable,
|
||||||
|
|||||||
@@ -12,7 +12,6 @@
|
|||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.util.prefix
|
import spack.util.prefix
|
||||||
from spack.directives import depends_on
|
|
||||||
|
|
||||||
from .cmake import CMakeBuilder, CMakePackage
|
from .cmake import CMakeBuilder, CMakePackage
|
||||||
|
|
||||||
@@ -278,24 +277,17 @@ def initconfig_hardware_entries(self):
|
|||||||
entries.append("# ROCm")
|
entries.append("# ROCm")
|
||||||
entries.append("#------------------{0}\n".format("-" * 30))
|
entries.append("#------------------{0}\n".format("-" * 30))
|
||||||
|
|
||||||
if spec.satisfies("^blt@0.7:"):
|
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
|
||||||
rocm_root = os.path.dirname(spec["llvm-amdgpu"].prefix)
|
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
|
||||||
entries.append(cmake_cache_path("ROCM_PATH", rocm_root))
|
llvm_bin = spec["llvm-amdgpu"].prefix.bin
|
||||||
else:
|
llvm_prefix = spec["llvm-amdgpu"].prefix
|
||||||
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
|
# Some ROCm systems seem to point to /<path>/rocm-<ver>/ and
|
||||||
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
|
# others point to /<path>/rocm-<ver>/llvm
|
||||||
llvm_bin = spec["llvm-amdgpu"].prefix.bin
|
if os.path.basename(os.path.normpath(llvm_prefix)) != "llvm":
|
||||||
llvm_prefix = spec["llvm-amdgpu"].prefix
|
llvm_bin = os.path.join(llvm_prefix, "llvm/bin/")
|
||||||
# Some ROCm systems seem to point to /<path>/rocm-<ver>/ and
|
entries.append(
|
||||||
# others point to /<path>/rocm-<ver>/llvm
|
cmake_cache_filepath("CMAKE_HIP_COMPILER", os.path.join(llvm_bin, "clang++"))
|
||||||
if os.path.basename(os.path.normpath(llvm_prefix)) != "llvm":
|
)
|
||||||
llvm_bin = os.path.join(llvm_prefix, "llvm/bin/")
|
|
||||||
entries.append(
|
|
||||||
cmake_cache_filepath(
|
|
||||||
"CMAKE_HIP_COMPILER", os.path.join(llvm_bin, "amdclang++")
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
archs = self.spec.variants["amdgpu_target"].value
|
archs = self.spec.variants["amdgpu_target"].value
|
||||||
if archs[0] != "none":
|
if archs[0] != "none":
|
||||||
arch_str = ";".join(archs)
|
arch_str = ";".join(archs)
|
||||||
@@ -379,10 +371,6 @@ class CachedCMakePackage(CMakePackage):
|
|||||||
|
|
||||||
CMakeBuilder = CachedCMakeBuilder
|
CMakeBuilder = CachedCMakeBuilder
|
||||||
|
|
||||||
# These dependencies are assumed in the builder
|
|
||||||
depends_on("c", type="build")
|
|
||||||
depends_on("cxx", type="build")
|
|
||||||
|
|
||||||
def flag_handler(self, name, flags):
|
def flag_handler(self, name, flags):
|
||||||
if name in ("cflags", "cxxflags", "cppflags", "fflags"):
|
if name in ("cflags", "cxxflags", "cppflags", "fflags"):
|
||||||
return None, None, None # handled in the cmake cache
|
return None, None, None # handled in the cmake cache
|
||||||
|
|||||||
@@ -70,16 +70,10 @@ def build_directory(self):
|
|||||||
"""Return the directory containing the main Cargo.toml."""
|
"""Return the directory containing the main Cargo.toml."""
|
||||||
return self.pkg.stage.source_path
|
return self.pkg.stage.source_path
|
||||||
|
|
||||||
@property
|
|
||||||
def std_build_args(self):
|
|
||||||
"""Standard arguments for ``cargo build`` provided as a property for
|
|
||||||
convenience of package writers."""
|
|
||||||
return ["-j", str(self.pkg.module.make_jobs)]
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def build_args(self):
|
def build_args(self):
|
||||||
"""Arguments for ``cargo build``."""
|
"""Arguments for ``cargo build``."""
|
||||||
return []
|
return ["-j", str(self.pkg.module.make_jobs)]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def check_args(self):
|
def check_args(self):
|
||||||
@@ -94,9 +88,7 @@ def build(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Runs ``cargo install`` in the source directory"""
|
"""Runs ``cargo install`` in the source directory"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pkg.module.cargo(
|
pkg.module.cargo("install", "--root", "out", "--path", ".", *self.build_args)
|
||||||
"install", "--root", "out", "--path", ".", *self.std_build_args, *self.build_args
|
|
||||||
)
|
|
||||||
|
|
||||||
def install(
|
def install(
|
||||||
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
|||||||
@@ -11,7 +11,6 @@
|
|||||||
from typing import Any, List, Optional, Tuple
|
from typing import Any, List, Optional, Tuple
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
from llnl.util import tty
|
|
||||||
from llnl.util.lang import stable_partition
|
from llnl.util.lang import stable_partition
|
||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
@@ -459,23 +458,11 @@ def cmake(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Runs ``cmake`` in the build directory"""
|
"""Runs ``cmake`` in the build directory"""
|
||||||
|
|
||||||
if spec.is_develop:
|
# skip cmake phase if it is an incremental develop build
|
||||||
# skip cmake phase if it is an incremental develop build
|
if spec.is_develop and os.path.isfile(
|
||||||
|
os.path.join(self.build_directory, "CMakeCache.txt")
|
||||||
# Determine the files that will re-run CMake that are generated from a successful
|
):
|
||||||
# configure step based on state
|
return
|
||||||
primary_generator = _extract_primary_generator(self.generator)
|
|
||||||
configure_artifact = "Makefile"
|
|
||||||
if primary_generator == "Ninja":
|
|
||||||
configure_artifact = "ninja.build"
|
|
||||||
|
|
||||||
if os.path.isfile(os.path.join(self.build_directory, configure_artifact)):
|
|
||||||
tty.msg(
|
|
||||||
"Incremental build criteria satisfied."
|
|
||||||
"Skipping CMake configure step. To force configuration run"
|
|
||||||
f" `spack clean {pkg.name}`"
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
options = self.std_cmake_args
|
options = self.std_cmake_args
|
||||||
options += self.cmake_args()
|
options += self.cmake_args()
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ class CudaPackage(PackageBase):
|
|||||||
"""Auxiliary class which contains CUDA variant, dependencies and conflicts
|
"""Auxiliary class which contains CUDA variant, dependencies and conflicts
|
||||||
and is meant to unify and facilitate its usage.
|
and is meant to unify and facilitate its usage.
|
||||||
|
|
||||||
Maintainers: ax3l, Rombur, davidbeckingsale, pauleonix
|
Maintainers: ax3l, Rombur, davidbeckingsale
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list
|
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list
|
||||||
@@ -47,12 +47,6 @@ class CudaPackage(PackageBase):
|
|||||||
"89",
|
"89",
|
||||||
"90",
|
"90",
|
||||||
"90a",
|
"90a",
|
||||||
"100",
|
|
||||||
"100a",
|
|
||||||
"101",
|
|
||||||
"101a",
|
|
||||||
"120",
|
|
||||||
"120a",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# FIXME: keep cuda and cuda_arch separate to make usage easier until
|
# FIXME: keep cuda and cuda_arch separate to make usage easier until
|
||||||
@@ -105,56 +99,39 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
|||||||
# CUDA version vs Architecture
|
# CUDA version vs Architecture
|
||||||
# https://en.wikipedia.org/wiki/CUDA#GPUs_supported
|
# https://en.wikipedia.org/wiki/CUDA#GPUs_supported
|
||||||
# https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html#deprecated-features
|
# https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html#deprecated-features
|
||||||
# Tesla support:
|
|
||||||
depends_on("cuda@:6.0", when="cuda_arch=10")
|
depends_on("cuda@:6.0", when="cuda_arch=10")
|
||||||
depends_on("cuda@:6.5", when="cuda_arch=11")
|
depends_on("cuda@:6.5", when="cuda_arch=11")
|
||||||
depends_on("cuda@2.1:6.5", when="cuda_arch=12")
|
depends_on("cuda@2.1:6.5", when="cuda_arch=12")
|
||||||
depends_on("cuda@2.1:6.5", when="cuda_arch=13")
|
depends_on("cuda@2.1:6.5", when="cuda_arch=13")
|
||||||
|
|
||||||
# Fermi support:
|
|
||||||
depends_on("cuda@3.0:8.0", when="cuda_arch=20")
|
depends_on("cuda@3.0:8.0", when="cuda_arch=20")
|
||||||
depends_on("cuda@3.2:8.0", when="cuda_arch=21")
|
depends_on("cuda@3.2:8.0", when="cuda_arch=21")
|
||||||
|
|
||||||
# Kepler support:
|
|
||||||
depends_on("cuda@5.0:10.2", when="cuda_arch=30")
|
depends_on("cuda@5.0:10.2", when="cuda_arch=30")
|
||||||
depends_on("cuda@5.0:10.2", when="cuda_arch=32")
|
depends_on("cuda@5.0:10.2", when="cuda_arch=32")
|
||||||
depends_on("cuda@5.0:11.8", when="cuda_arch=35")
|
depends_on("cuda@5.0:11.8", when="cuda_arch=35")
|
||||||
depends_on("cuda@6.5:11.8", when="cuda_arch=37")
|
depends_on("cuda@6.5:11.8", when="cuda_arch=37")
|
||||||
|
|
||||||
# Maxwell support:
|
|
||||||
depends_on("cuda@6.0:", when="cuda_arch=50")
|
depends_on("cuda@6.0:", when="cuda_arch=50")
|
||||||
depends_on("cuda@6.5:", when="cuda_arch=52")
|
depends_on("cuda@6.5:", when="cuda_arch=52")
|
||||||
depends_on("cuda@6.5:", when="cuda_arch=53")
|
depends_on("cuda@6.5:", when="cuda_arch=53")
|
||||||
|
|
||||||
# Pascal support:
|
|
||||||
depends_on("cuda@8.0:", when="cuda_arch=60")
|
depends_on("cuda@8.0:", when="cuda_arch=60")
|
||||||
depends_on("cuda@8.0:", when="cuda_arch=61")
|
depends_on("cuda@8.0:", when="cuda_arch=61")
|
||||||
depends_on("cuda@8.0:", when="cuda_arch=62")
|
depends_on("cuda@8.0:", when="cuda_arch=62")
|
||||||
|
|
||||||
# Volta support:
|
|
||||||
depends_on("cuda@9.0:", when="cuda_arch=70")
|
depends_on("cuda@9.0:", when="cuda_arch=70")
|
||||||
# Turing support:
|
|
||||||
depends_on("cuda@9.0:", when="cuda_arch=72")
|
depends_on("cuda@9.0:", when="cuda_arch=72")
|
||||||
depends_on("cuda@10.0:", when="cuda_arch=75")
|
depends_on("cuda@10.0:", when="cuda_arch=75")
|
||||||
|
|
||||||
# Ampere support:
|
|
||||||
depends_on("cuda@11.0:", when="cuda_arch=80")
|
depends_on("cuda@11.0:", when="cuda_arch=80")
|
||||||
depends_on("cuda@11.1:", when="cuda_arch=86")
|
depends_on("cuda@11.1:", when="cuda_arch=86")
|
||||||
depends_on("cuda@11.4:", when="cuda_arch=87")
|
depends_on("cuda@11.4:", when="cuda_arch=87")
|
||||||
# Ada support:
|
|
||||||
depends_on("cuda@11.8:", when="cuda_arch=89")
|
depends_on("cuda@11.8:", when="cuda_arch=89")
|
||||||
|
|
||||||
# Hopper support:
|
|
||||||
depends_on("cuda@12.0:", when="cuda_arch=90")
|
depends_on("cuda@12.0:", when="cuda_arch=90")
|
||||||
depends_on("cuda@12.0:", when="cuda_arch=90a")
|
depends_on("cuda@12.0:", when="cuda_arch=90a")
|
||||||
|
|
||||||
# Blackwell support:
|
|
||||||
depends_on("cuda@12.8:", when="cuda_arch=100")
|
|
||||||
depends_on("cuda@12.8:", when="cuda_arch=100a")
|
|
||||||
depends_on("cuda@12.8:", when="cuda_arch=101")
|
|
||||||
depends_on("cuda@12.8:", when="cuda_arch=101a")
|
|
||||||
depends_on("cuda@12.8:", when="cuda_arch=120")
|
|
||||||
depends_on("cuda@12.8:", when="cuda_arch=120a")
|
|
||||||
# From the NVIDIA install guide we know of conflicts for particular
|
# From the NVIDIA install guide we know of conflicts for particular
|
||||||
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
||||||
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
|
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
|
||||||
@@ -186,7 +163,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
|||||||
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
||||||
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
||||||
conflicts("%gcc@14:", when="+cuda ^cuda@:12.6")
|
conflicts("%gcc@14:", when="+cuda ^cuda@:12.6")
|
||||||
conflicts("%gcc@15:", when="+cuda ^cuda@:12.8")
|
|
||||||
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
||||||
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
||||||
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
||||||
@@ -195,7 +171,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
|||||||
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
||||||
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
|
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
|
||||||
conflicts("%clang@19:", when="+cuda ^cuda@:12.6")
|
conflicts("%clang@19:", when="+cuda ^cuda@:12.6")
|
||||||
conflicts("%clang@20:", when="+cuda ^cuda@:12.8")
|
|
||||||
|
|
||||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||||
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
||||||
|
|||||||
@@ -48,9 +48,6 @@ class MesonPackage(spack.package_base.PackageBase):
|
|||||||
variant("strip", default=False, description="Strip targets on install")
|
variant("strip", default=False, description="Strip targets on install")
|
||||||
depends_on("meson", type="build")
|
depends_on("meson", type="build")
|
||||||
depends_on("ninja", type="build")
|
depends_on("ninja", type="build")
|
||||||
# Meson uses pkg-config for dependency detection, and this dependency is
|
|
||||||
# often overlooked by packages that use meson as a build system.
|
|
||||||
depends_on("pkgconfig", type="build")
|
|
||||||
# Python detection in meson requires distutils to be importable, but distutils no longer
|
# Python detection in meson requires distutils to be importable, but distutils no longer
|
||||||
# exists in Python 3.12. In Spack, we can't use setuptools as distutils replacement,
|
# exists in Python 3.12. In Spack, we can't use setuptools as distutils replacement,
|
||||||
# because the distutils-precedence.pth startup file that setuptools ships with is not run
|
# because the distutils-precedence.pth startup file that setuptools ships with is not run
|
||||||
|
|||||||
@@ -142,7 +142,7 @@ def setup_run_environment(self, env):
|
|||||||
$ source {prefix}/{component}/{version}/env/vars.sh
|
$ source {prefix}/{component}/{version}/env/vars.sh
|
||||||
"""
|
"""
|
||||||
# Only if environment modifications are desired (default is +envmods)
|
# Only if environment modifications are desired (default is +envmods)
|
||||||
if "+envmods" in self.spec:
|
if "~envmods" not in self.spec:
|
||||||
env.extend(
|
env.extend(
|
||||||
EnvironmentModifications.from_sourcing_file(
|
EnvironmentModifications.from_sourcing_file(
|
||||||
self.component_prefix.env.join("vars.sh"), *self.env_script_args
|
self.component_prefix.env.join("vars.sh"), *self.env_script_args
|
||||||
|
|||||||
@@ -264,17 +264,16 @@ def update_external_dependencies(self, extendee_spec=None):
|
|||||||
# Ensure architecture information is present
|
# Ensure architecture information is present
|
||||||
if not python.architecture:
|
if not python.architecture:
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
host_os = host_platform.default_operating_system()
|
host_os = host_platform.operating_system("default_os")
|
||||||
host_target = host_platform.default_target()
|
host_target = host_platform.target("default_target")
|
||||||
python.architecture = spack.spec.ArchSpec(
|
python.architecture = spack.spec.ArchSpec(
|
||||||
(str(host_platform), str(host_os), str(host_target))
|
(str(host_platform), str(host_os), str(host_target))
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
if not python.architecture.platform:
|
if not python.architecture.platform:
|
||||||
python.architecture.platform = spack.platforms.host()
|
python.architecture.platform = spack.platforms.host()
|
||||||
platform = spack.platforms.by_name(python.architecture.platform)
|
|
||||||
if not python.architecture.os:
|
if not python.architecture.os:
|
||||||
python.architecture.os = platform.default_operating_system()
|
python.architecture.os = "default_os"
|
||||||
if not python.architecture.target:
|
if not python.architecture.target:
|
||||||
python.architecture.target = archspec.cpu.host().family.name
|
python.architecture.target = archspec.cpu.host().family.name
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,6 @@
|
|||||||
import codecs
|
import codecs
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import pathlib
|
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import stat
|
import stat
|
||||||
@@ -14,16 +13,16 @@
|
|||||||
import tempfile
|
import tempfile
|
||||||
import zipfile
|
import zipfile
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from typing import Callable, Dict, List, Set, Union
|
from typing import Callable, Dict, List, Set
|
||||||
from urllib.request import Request
|
from urllib.request import HTTPHandler, Request, build_opener
|
||||||
|
|
||||||
import llnl.path
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.tty.color import cescape, colorize
|
from llnl.util.tty.color import cescape, colorize
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
|
import spack.builder
|
||||||
import spack.concretize
|
import spack.concretize
|
||||||
import spack.config as cfg
|
import spack.config as cfg
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
@@ -33,7 +32,6 @@
|
|||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
|
||||||
import spack.util.git
|
import spack.util.git
|
||||||
import spack.util.gpg as gpg_util
|
import spack.util.gpg as gpg_util
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
@@ -42,7 +40,6 @@
|
|||||||
from spack import traverse
|
from spack import traverse
|
||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||||
from spack.version import GitVersion, StandardVersion
|
|
||||||
|
|
||||||
from .common import (
|
from .common import (
|
||||||
IS_WINDOWS,
|
IS_WINDOWS,
|
||||||
@@ -65,8 +62,6 @@
|
|||||||
|
|
||||||
PushResult = namedtuple("PushResult", "success url")
|
PushResult = namedtuple("PushResult", "success url")
|
||||||
|
|
||||||
urlopen = web_util.urlopen # alias for mocking in tests
|
|
||||||
|
|
||||||
|
|
||||||
def get_change_revisions():
|
def get_change_revisions():
|
||||||
"""If this is a git repo get the revisions to use when checking
|
"""If this is a git repo get the revisions to use when checking
|
||||||
@@ -81,53 +76,11 @@ def get_change_revisions():
|
|||||||
return None, None
|
return None, None
|
||||||
|
|
||||||
|
|
||||||
def get_added_versions(
|
|
||||||
checksums_version_dict: Dict[str, Union[StandardVersion, GitVersion]],
|
|
||||||
path: str,
|
|
||||||
from_ref: str = "HEAD~1",
|
|
||||||
to_ref: str = "HEAD",
|
|
||||||
) -> List[Union[StandardVersion, GitVersion]]:
|
|
||||||
"""Get a list of the versions added between `from_ref` and `to_ref`.
|
|
||||||
Args:
|
|
||||||
checksums_version_dict (Dict): all package versions keyed by known checksums.
|
|
||||||
path (str): path to the package.py
|
|
||||||
from_ref (str): oldest git ref, defaults to `HEAD~1`
|
|
||||||
to_ref (str): newer git ref, defaults to `HEAD`
|
|
||||||
Returns: list of versions added between refs
|
|
||||||
"""
|
|
||||||
git_exe = spack.util.git.git(required=True)
|
|
||||||
|
|
||||||
# Gather git diff
|
|
||||||
diff_lines = git_exe("diff", from_ref, to_ref, "--", path, output=str).split("\n")
|
|
||||||
|
|
||||||
# Store added and removed versions
|
|
||||||
# Removed versions are tracked here to determine when versions are moved in a file
|
|
||||||
# and show up as both added and removed in a git diff.
|
|
||||||
added_checksums = set()
|
|
||||||
removed_checksums = set()
|
|
||||||
|
|
||||||
# Scrape diff for modified versions and prune added versions if they show up
|
|
||||||
# as also removed (which means they've actually just moved in the file and
|
|
||||||
# we shouldn't need to rechecksum them)
|
|
||||||
for checksum in checksums_version_dict.keys():
|
|
||||||
for line in diff_lines:
|
|
||||||
if checksum in line:
|
|
||||||
if line.startswith("+"):
|
|
||||||
added_checksums.add(checksum)
|
|
||||||
if line.startswith("-"):
|
|
||||||
removed_checksums.add(checksum)
|
|
||||||
|
|
||||||
return [checksums_version_dict[c] for c in added_checksums - removed_checksums]
|
|
||||||
|
|
||||||
|
|
||||||
def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
|
def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
|
||||||
"""Given an environment manifest path and two revisions to compare, return
|
"""Given an environment manifest path and two revisions to compare, return
|
||||||
whether or not the stack was changed. Returns True if the environment
|
whether or not the stack was changed. Returns True if the environment
|
||||||
manifest changed between the provided revisions (or additionally if the
|
manifest changed between the provided revisions (or additionally if the
|
||||||
`.gitlab-ci.yml` file itself changed). Returns False otherwise."""
|
`.gitlab-ci.yml` file itself changed). Returns False otherwise."""
|
||||||
# git returns posix paths always, normalize input to be comptaible
|
|
||||||
# with that
|
|
||||||
env_path = llnl.path.convert_to_posix_path(env_path)
|
|
||||||
git = spack.util.git.git()
|
git = spack.util.git.git()
|
||||||
if git:
|
if git:
|
||||||
with fs.working_dir(spack.paths.prefix):
|
with fs.working_dir(spack.paths.prefix):
|
||||||
@@ -265,7 +218,7 @@ def rebuild_filter(s: spack.spec.Spec) -> RebuildDecision:
|
|||||||
|
|
||||||
def _format_pruning_message(spec: spack.spec.Spec, prune: bool, reasons: List[str]) -> str:
|
def _format_pruning_message(spec: spack.spec.Spec, prune: bool, reasons: List[str]) -> str:
|
||||||
reason_msg = ", ".join(reasons)
|
reason_msg = ", ".join(reasons)
|
||||||
spec_fmt = "{name}{@version}{/hash:7}{%compiler}"
|
spec_fmt = "{name}{@version}{%compiler}{/hash:7}"
|
||||||
|
|
||||||
if not prune:
|
if not prune:
|
||||||
status = colorize("@*g{[x]} ")
|
status = colorize("@*g{[x]} ")
|
||||||
@@ -622,25 +575,22 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
|
|||||||
tty.debug(f"job spec: {job_spec}")
|
tty.debug(f"job spec: {job_spec}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
package_metadata_root = pathlib.Path(spack.store.STORE.layout.metadata_path(job_spec))
|
pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name)
|
||||||
except spack.error.SpackError as e:
|
job_pkg = pkg_cls(job_spec)
|
||||||
tty.error(f"Cannot copy logs: {str(e)}")
|
tty.debug(f"job package: {job_pkg}")
|
||||||
|
except AssertionError:
|
||||||
|
msg = f"Cannot copy stage logs: job spec ({job_spec}) must be concrete"
|
||||||
|
tty.error(msg)
|
||||||
return
|
return
|
||||||
|
|
||||||
# Get the package's archived files
|
stage_dir = job_pkg.stage.path
|
||||||
archive_files = []
|
tty.debug(f"stage dir: {stage_dir}")
|
||||||
archive_root = package_metadata_root / "archived-files"
|
for file in [
|
||||||
if archive_root.is_dir():
|
job_pkg.log_path,
|
||||||
archive_files = [f for f in archive_root.rglob("*") if f.is_file()]
|
job_pkg.env_mods_path,
|
||||||
else:
|
*spack.builder.create(job_pkg).archive_files,
|
||||||
msg = "Cannot copy package archived files: archived-files must be a directory"
|
]:
|
||||||
tty.warn(msg)
|
copy_files_to_artifacts(file, job_log_dir)
|
||||||
|
|
||||||
build_log_zipped = package_metadata_root / "spack-build-out.txt.gz"
|
|
||||||
build_env_mods = package_metadata_root / "spack-build-env.txt"
|
|
||||||
|
|
||||||
for f in [build_log_zipped, build_env_mods, *archive_files]:
|
|
||||||
copy_files_to_artifacts(str(f), job_log_dir)
|
|
||||||
|
|
||||||
|
|
||||||
def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
||||||
@@ -660,7 +610,7 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
|||||||
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
||||||
|
|
||||||
|
|
||||||
def download_and_extract_artifacts(url, work_dir) -> str:
|
def download_and_extract_artifacts(url, work_dir):
|
||||||
"""Look for gitlab artifacts.zip at the given url, and attempt to download
|
"""Look for gitlab artifacts.zip at the given url, and attempt to download
|
||||||
and extract the contents into the given work_dir
|
and extract the contents into the given work_dir
|
||||||
|
|
||||||
@@ -668,10 +618,6 @@ def download_and_extract_artifacts(url, work_dir) -> str:
|
|||||||
|
|
||||||
url (str): Complete url to artifacts.zip file
|
url (str): Complete url to artifacts.zip file
|
||||||
work_dir (str): Path to destination where artifacts should be extracted
|
work_dir (str): Path to destination where artifacts should be extracted
|
||||||
|
|
||||||
Output:
|
|
||||||
|
|
||||||
Artifacts root path relative to the archive root
|
|
||||||
"""
|
"""
|
||||||
tty.msg(f"Fetching artifacts from: {url}")
|
tty.msg(f"Fetching artifacts from: {url}")
|
||||||
|
|
||||||
@@ -681,33 +627,31 @@ def download_and_extract_artifacts(url, work_dir) -> str:
|
|||||||
if token:
|
if token:
|
||||||
headers["PRIVATE-TOKEN"] = token
|
headers["PRIVATE-TOKEN"] = token
|
||||||
|
|
||||||
request = Request(url, headers=headers, method="GET")
|
opener = build_opener(HTTPHandler)
|
||||||
|
|
||||||
|
request = Request(url, headers=headers)
|
||||||
|
request.get_method = lambda: "GET"
|
||||||
|
|
||||||
|
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||||
|
response_code = response.getcode()
|
||||||
|
|
||||||
|
if response_code != 200:
|
||||||
|
msg = f"Error response code ({response_code}) in reproduce_ci_job"
|
||||||
|
raise SpackError(msg)
|
||||||
|
|
||||||
artifacts_zip_path = os.path.join(work_dir, "artifacts.zip")
|
artifacts_zip_path = os.path.join(work_dir, "artifacts.zip")
|
||||||
os.makedirs(work_dir, exist_ok=True)
|
|
||||||
|
|
||||||
try:
|
if not os.path.exists(work_dir):
|
||||||
response = urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
|
os.makedirs(work_dir)
|
||||||
with open(artifacts_zip_path, "wb") as out_file:
|
|
||||||
shutil.copyfileobj(response, out_file)
|
|
||||||
|
|
||||||
with zipfile.ZipFile(artifacts_zip_path) as zip_file:
|
with open(artifacts_zip_path, "wb") as out_file:
|
||||||
zip_file.extractall(work_dir)
|
shutil.copyfileobj(response, out_file)
|
||||||
# Get the artifact root
|
|
||||||
artifact_root = ""
|
|
||||||
for f in zip_file.filelist:
|
|
||||||
if "spack.lock" in f.filename:
|
|
||||||
artifact_root = os.path.dirname(os.path.dirname(f.filename))
|
|
||||||
break
|
|
||||||
except OSError as e:
|
|
||||||
raise SpackError(f"Error fetching artifacts: {e}")
|
|
||||||
finally:
|
|
||||||
try:
|
|
||||||
os.remove(artifacts_zip_path)
|
|
||||||
except FileNotFoundError:
|
|
||||||
# If the file doesn't exist we are already raising
|
|
||||||
pass
|
|
||||||
|
|
||||||
return artifact_root
|
zip_file = zipfile.ZipFile(artifacts_zip_path)
|
||||||
|
zip_file.extractall(work_dir)
|
||||||
|
zip_file.close()
|
||||||
|
|
||||||
|
os.remove(artifacts_zip_path)
|
||||||
|
|
||||||
|
|
||||||
def get_spack_info():
|
def get_spack_info():
|
||||||
@@ -821,7 +765,7 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head):
|
def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
||||||
"""Given a url to gitlab artifacts.zip from a failed 'spack ci rebuild' job,
|
"""Given a url to gitlab artifacts.zip from a failed 'spack ci rebuild' job,
|
||||||
attempt to setup an environment in which the failure can be reproduced
|
attempt to setup an environment in which the failure can be reproduced
|
||||||
locally. This entails the following:
|
locally. This entails the following:
|
||||||
@@ -835,11 +779,8 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
|
|||||||
commands to run to reproduce the build once inside the container.
|
commands to run to reproduce the build once inside the container.
|
||||||
"""
|
"""
|
||||||
work_dir = os.path.realpath(work_dir)
|
work_dir = os.path.realpath(work_dir)
|
||||||
if os.path.exists(work_dir) and os.listdir(work_dir):
|
|
||||||
raise SpackError(f"Cannot run reproducer in non-emptry working dir:\n {work_dir}")
|
|
||||||
|
|
||||||
platform_script_ext = "ps1" if IS_WINDOWS else "sh"
|
platform_script_ext = "ps1" if IS_WINDOWS else "sh"
|
||||||
artifact_root = download_and_extract_artifacts(url, work_dir)
|
download_and_extract_artifacts(url, work_dir)
|
||||||
|
|
||||||
gpg_path = None
|
gpg_path = None
|
||||||
if gpg_url:
|
if gpg_url:
|
||||||
@@ -901,9 +842,6 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
|
|||||||
with open(repro_file, encoding="utf-8") as fd:
|
with open(repro_file, encoding="utf-8") as fd:
|
||||||
repro_details = json.load(fd)
|
repro_details = json.load(fd)
|
||||||
|
|
||||||
spec_file = fs.find(work_dir, repro_details["job_spec_json"])[0]
|
|
||||||
reproducer_spec = spack.spec.Spec.from_specfile(spec_file)
|
|
||||||
|
|
||||||
repro_dir = os.path.dirname(repro_file)
|
repro_dir = os.path.dirname(repro_file)
|
||||||
rel_repro_dir = repro_dir.replace(work_dir, "").lstrip(os.path.sep)
|
rel_repro_dir = repro_dir.replace(work_dir, "").lstrip(os.path.sep)
|
||||||
|
|
||||||
@@ -964,20 +902,17 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
|
|||||||
commit_regex = re.compile(r"commit\s+([^\s]+)")
|
commit_regex = re.compile(r"commit\s+([^\s]+)")
|
||||||
merge_commit_regex = re.compile(r"Merge\s+([^\s]+)\s+into\s+([^\s]+)")
|
merge_commit_regex = re.compile(r"Merge\s+([^\s]+)\s+into\s+([^\s]+)")
|
||||||
|
|
||||||
if use_local_head:
|
# Try the more specific merge commit regex first
|
||||||
commit_1 = "HEAD"
|
m = merge_commit_regex.search(spack_info)
|
||||||
|
if m:
|
||||||
|
# This was a merge commit and we captured the parents
|
||||||
|
commit_1 = m.group(1)
|
||||||
|
commit_2 = m.group(2)
|
||||||
else:
|
else:
|
||||||
# Try the more specific merge commit regex first
|
# Not a merge commit, just get the commit sha
|
||||||
m = merge_commit_regex.search(spack_info)
|
m = commit_regex.search(spack_info)
|
||||||
if m:
|
if m:
|
||||||
# This was a merge commit and we captured the parents
|
|
||||||
commit_1 = m.group(1)
|
commit_1 = m.group(1)
|
||||||
commit_2 = m.group(2)
|
|
||||||
else:
|
|
||||||
# Not a merge commit, just get the commit sha
|
|
||||||
m = commit_regex.search(spack_info)
|
|
||||||
if m:
|
|
||||||
commit_1 = m.group(1)
|
|
||||||
|
|
||||||
setup_result = False
|
setup_result = False
|
||||||
if commit_1:
|
if commit_1:
|
||||||
@@ -1052,8 +987,6 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
|
|||||||
"entrypoint", entrypoint_script, work_dir, run=False, exit_on_failure=False
|
"entrypoint", entrypoint_script, work_dir, run=False, exit_on_failure=False
|
||||||
)
|
)
|
||||||
|
|
||||||
# Attempt to create a unique name for the reproducer container
|
|
||||||
container_suffix = "_" + reproducer_spec.dag_hash() if reproducer_spec else ""
|
|
||||||
docker_command = [
|
docker_command = [
|
||||||
runtime,
|
runtime,
|
||||||
"run",
|
"run",
|
||||||
@@ -1061,14 +994,14 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
|
|||||||
"-t",
|
"-t",
|
||||||
"--rm",
|
"--rm",
|
||||||
"--name",
|
"--name",
|
||||||
f"spack_reproducer{container_suffix}",
|
"spack_reproducer",
|
||||||
"-v",
|
"-v",
|
||||||
":".join([work_dir, mounted_workdir, "Z"]),
|
":".join([work_dir, mounted_workdir, "Z"]),
|
||||||
"-v",
|
"-v",
|
||||||
":".join(
|
":".join(
|
||||||
[
|
[
|
||||||
os.path.join(work_dir, artifact_root),
|
os.path.join(work_dir, "jobs_scratch_dir"),
|
||||||
os.path.join(mount_as_dir, artifact_root),
|
os.path.join(mount_as_dir, "jobs_scratch_dir"),
|
||||||
"Z",
|
"Z",
|
||||||
]
|
]
|
||||||
),
|
),
|
||||||
|
|||||||
@@ -616,7 +616,7 @@ def generate_ir(self):
|
|||||||
# Reindex script
|
# Reindex script
|
||||||
{
|
{
|
||||||
"reindex-job": {
|
"reindex-job": {
|
||||||
"script:": ["spack -d buildcache update-index --keys {index_target_mirror}"]
|
"script:": ["spack buildcache update-index --keys {index_target_mirror}"]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
# Cleanup script
|
# Cleanup script
|
||||||
|
|||||||
@@ -330,7 +330,7 @@ def ensure_single_spec_or_die(spec, matching_specs):
|
|||||||
if len(matching_specs) <= 1:
|
if len(matching_specs) <= 1:
|
||||||
return
|
return
|
||||||
|
|
||||||
format_string = "{name}{@version}{ arch=architecture} {%compiler.name}{@compiler.version}"
|
format_string = "{name}{@version}{%compiler.name}{@compiler.version}{ arch=architecture}"
|
||||||
args = ["%s matches multiple packages." % spec, "Matching packages:"]
|
args = ["%s matches multiple packages." % spec, "Matching packages:"]
|
||||||
args += [
|
args += [
|
||||||
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
|
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
|
||||||
@@ -471,11 +471,12 @@ def get_arg(name, default=None):
|
|||||||
nfmt = "{fullname}" if namespaces else "{name}"
|
nfmt = "{fullname}" if namespaces else "{name}"
|
||||||
ffmt = ""
|
ffmt = ""
|
||||||
if full_compiler or flags:
|
if full_compiler or flags:
|
||||||
ffmt += "{compiler_flags} {%compiler.name}"
|
ffmt += "{%compiler.name}"
|
||||||
if full_compiler:
|
if full_compiler:
|
||||||
ffmt += "{@compiler.version}"
|
ffmt += "{@compiler.version}"
|
||||||
|
ffmt += " {compiler_flags}"
|
||||||
vfmt = "{variants}" if variants else ""
|
vfmt = "{variants}" if variants else ""
|
||||||
format_string = nfmt + "{@version}" + vfmt + ffmt
|
format_string = nfmt + "{@version}" + ffmt + vfmt
|
||||||
|
|
||||||
def fmt(s, depth=0):
|
def fmt(s, depth=0):
|
||||||
"""Formatter function for all output specs"""
|
"""Formatter function for all output specs"""
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
import warnings
|
|
||||||
|
|
||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
|
||||||
@@ -52,10 +51,10 @@ def setup_parser(subparser):
|
|||||||
"-t", "--target", action="store_true", default=False, help="print only the target"
|
"-t", "--target", action="store_true", default=False, help="print only the target"
|
||||||
)
|
)
|
||||||
parts2.add_argument(
|
parts2.add_argument(
|
||||||
"-f", "--frontend", action="store_true", default=False, help="print frontend (DEPRECATED)"
|
"-f", "--frontend", action="store_true", default=False, help="print frontend"
|
||||||
)
|
)
|
||||||
parts2.add_argument(
|
parts2.add_argument(
|
||||||
"-b", "--backend", action="store_true", default=False, help="print backend (DEPRECATED)"
|
"-b", "--backend", action="store_true", default=False, help="print backend"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -99,14 +98,15 @@ def arch(parser, args):
|
|||||||
display_targets(archspec.cpu.TARGETS)
|
display_targets(archspec.cpu.TARGETS)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
os_args, target_args = "default_os", "default_target"
|
||||||
if args.frontend:
|
if args.frontend:
|
||||||
warnings.warn("the argument --frontend is deprecated, and will be removed in Spack v1.0")
|
os_args, target_args = "frontend", "frontend"
|
||||||
elif args.backend:
|
elif args.backend:
|
||||||
warnings.warn("the argument --backend is deprecated, and will be removed in Spack v1.0")
|
os_args, target_args = "backend", "backend"
|
||||||
|
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
host_os = host_platform.default_operating_system()
|
host_os = host_platform.operating_system(os_args)
|
||||||
host_target = host_platform.default_target()
|
host_target = host_platform.target(target_args)
|
||||||
if args.family:
|
if args.family:
|
||||||
host_target = host_target.family
|
host_target = host_target.family
|
||||||
elif args.generic:
|
elif args.generic:
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from typing import Dict, Optional, Tuple
|
from typing import Dict, Optional
|
||||||
|
|
||||||
import llnl.string
|
import llnl.string
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
@@ -181,11 +181,7 @@ def checksum(parser, args):
|
|||||||
print()
|
print()
|
||||||
|
|
||||||
if args.add_to_package:
|
if args.add_to_package:
|
||||||
path = spack.repo.PATH.filename_for_package_name(pkg.name)
|
add_versions_to_package(pkg, version_lines, args.batch)
|
||||||
num_versions_added = add_versions_to_pkg(path, version_lines)
|
|
||||||
tty.msg(f"Added {num_versions_added} new versions to {pkg.name} in {path}")
|
|
||||||
if not args.batch and sys.stdin.isatty():
|
|
||||||
editor(path)
|
|
||||||
|
|
||||||
|
|
||||||
def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
||||||
@@ -231,9 +227,20 @@ def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
|||||||
tty.die("Invalid checksums found.")
|
tty.die("Invalid checksums found.")
|
||||||
|
|
||||||
|
|
||||||
def _update_version_statements(package_src: str, version_lines: str) -> Tuple[int, str]:
|
def add_versions_to_package(pkg: PackageBase, version_lines: str, is_batch: bool):
|
||||||
"""Returns a tuple of number of versions added and the package's modified contents."""
|
"""
|
||||||
|
Add checksumed versions to a package's instructions and open a user's
|
||||||
|
editor so they may double check the work of the function.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pkg (spack.package_base.PackageBase): A package class for a given package in Spack.
|
||||||
|
version_lines (str): A string of rendered version lines.
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Get filename and path for package
|
||||||
|
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
||||||
num_versions_added = 0
|
num_versions_added = 0
|
||||||
|
|
||||||
version_statement_re = re.compile(r"([\t ]+version\([^\)]*\))")
|
version_statement_re = re.compile(r"([\t ]+version\([^\)]*\))")
|
||||||
version_re = re.compile(r'[\t ]+version\(\s*"([^"]+)"[^\)]*\)')
|
version_re = re.compile(r'[\t ]+version\(\s*"([^"]+)"[^\)]*\)')
|
||||||
|
|
||||||
@@ -245,34 +252,33 @@ def _update_version_statements(package_src: str, version_lines: str) -> Tuple[in
|
|||||||
if match:
|
if match:
|
||||||
new_versions.append((Version(match.group(1)), ver_line))
|
new_versions.append((Version(match.group(1)), ver_line))
|
||||||
|
|
||||||
split_contents = version_statement_re.split(package_src)
|
with open(filename, "r+", encoding="utf-8") as f:
|
||||||
|
contents = f.read()
|
||||||
|
split_contents = version_statement_re.split(contents)
|
||||||
|
|
||||||
for i, subsection in enumerate(split_contents):
|
for i, subsection in enumerate(split_contents):
|
||||||
# If there are no more versions to add we should exit
|
# If there are no more versions to add we should exit
|
||||||
if len(new_versions) <= 0:
|
if len(new_versions) <= 0:
|
||||||
break
|
break
|
||||||
|
|
||||||
# Check if the section contains a version
|
# Check if the section contains a version
|
||||||
contents_version = version_re.match(subsection)
|
contents_version = version_re.match(subsection)
|
||||||
if contents_version is not None:
|
if contents_version is not None:
|
||||||
parsed_version = Version(contents_version.group(1))
|
parsed_version = Version(contents_version.group(1))
|
||||||
|
|
||||||
if parsed_version < new_versions[0][0]:
|
if parsed_version < new_versions[0][0]:
|
||||||
split_contents[i:i] = [new_versions.pop(0)[1], " # FIXME", "\n"]
|
split_contents[i:i] = [new_versions.pop(0)[1], " # FIXME", "\n"]
|
||||||
num_versions_added += 1
|
num_versions_added += 1
|
||||||
|
|
||||||
elif parsed_version == new_versions[0][0]:
|
elif parsed_version == new_versions[0][0]:
|
||||||
new_versions.pop(0)
|
new_versions.pop(0)
|
||||||
|
|
||||||
return num_versions_added, "".join(split_contents)
|
# Seek back to the start of the file so we can rewrite the file contents.
|
||||||
|
f.seek(0)
|
||||||
|
f.writelines("".join(split_contents))
|
||||||
|
|
||||||
|
tty.msg(f"Added {num_versions_added} new versions to {pkg.name}")
|
||||||
|
tty.msg(f"Open {filename} to review the additions.")
|
||||||
|
|
||||||
def add_versions_to_pkg(path: str, version_lines: str) -> int:
|
if sys.stdout.isatty() and not is_batch:
|
||||||
"""Add new versions to a package.py file. Returns the number of versions added."""
|
editor(filename)
|
||||||
with open(path, "r", encoding="utf-8") as f:
|
|
||||||
package_src = f.read()
|
|
||||||
num_versions_added, package_src = _update_version_statements(package_src, version_lines)
|
|
||||||
if num_versions_added > 0:
|
|
||||||
with open(path, "w", encoding="utf-8") as f:
|
|
||||||
f.write(package_src)
|
|
||||||
return num_versions_added
|
|
||||||
|
|||||||
@@ -4,15 +4,12 @@
|
|||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
|
||||||
from typing import Dict
|
|
||||||
from urllib.parse import urlparse, urlunparse
|
from urllib.parse import urlparse, urlunparse
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
import llnl.util.tty as tty
|
||||||
import llnl.util.tty.color as clr
|
import llnl.util.tty.color as clr
|
||||||
from llnl.util import tty
|
|
||||||
|
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
import spack.ci as spack_ci
|
import spack.ci as spack_ci
|
||||||
@@ -21,22 +18,12 @@
|
|||||||
import spack.cmd.common.arguments
|
import spack.cmd.common.arguments
|
||||||
import spack.config as cfg
|
import spack.config as cfg
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.error
|
|
||||||
import spack.fetch_strategy
|
|
||||||
import spack.hash_types as ht
|
import spack.hash_types as ht
|
||||||
import spack.mirrors.mirror
|
import spack.mirrors.mirror
|
||||||
import spack.package_base
|
|
||||||
import spack.paths
|
|
||||||
import spack.repo
|
|
||||||
import spack.spec
|
|
||||||
import spack.stage
|
|
||||||
import spack.util.executable
|
|
||||||
import spack.util.git
|
|
||||||
import spack.util.gpg as gpg_util
|
import spack.util.gpg as gpg_util
|
||||||
import spack.util.timer as timer
|
import spack.util.timer as timer
|
||||||
import spack.util.url as url_util
|
import spack.util.url as url_util
|
||||||
import spack.util.web as web_util
|
import spack.util.web as web_util
|
||||||
import spack.version
|
|
||||||
|
|
||||||
description = "manage continuous integration pipelines"
|
description = "manage continuous integration pipelines"
|
||||||
section = "build"
|
section = "build"
|
||||||
@@ -45,7 +32,6 @@
|
|||||||
SPACK_COMMAND = "spack"
|
SPACK_COMMAND = "spack"
|
||||||
INSTALL_FAIL_CODE = 1
|
INSTALL_FAIL_CODE = 1
|
||||||
FAILED_CREATE_BUILDCACHE_CODE = 100
|
FAILED_CREATE_BUILDCACHE_CODE = 100
|
||||||
BUILTIN = re.compile(r"var\/spack\/repos\/builtin\/packages\/([^\/]+)\/package\.py")
|
|
||||||
|
|
||||||
|
|
||||||
def deindent(desc):
|
def deindent(desc):
|
||||||
@@ -190,11 +176,6 @@ def setup_parser(subparser):
|
|||||||
reproduce.add_argument(
|
reproduce.add_argument(
|
||||||
"-s", "--autostart", help="Run docker reproducer automatically", action="store_true"
|
"-s", "--autostart", help="Run docker reproducer automatically", action="store_true"
|
||||||
)
|
)
|
||||||
reproduce.add_argument(
|
|
||||||
"--use-local-head",
|
|
||||||
help="Use the HEAD of the local Spack instead of reproducing a commit",
|
|
||||||
action="store_true",
|
|
||||||
)
|
|
||||||
gpg_group = reproduce.add_mutually_exclusive_group(required=False)
|
gpg_group = reproduce.add_mutually_exclusive_group(required=False)
|
||||||
gpg_group.add_argument(
|
gpg_group.add_argument(
|
||||||
"--gpg-file", help="Path to public GPG key for validating binary cache installs"
|
"--gpg-file", help="Path to public GPG key for validating binary cache installs"
|
||||||
@@ -205,16 +186,6 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
reproduce.set_defaults(func=ci_reproduce)
|
reproduce.set_defaults(func=ci_reproduce)
|
||||||
|
|
||||||
# Verify checksums inside of ci workflows
|
|
||||||
verify_versions = subparsers.add_parser(
|
|
||||||
"verify-versions",
|
|
||||||
description=deindent(ci_verify_versions.__doc__),
|
|
||||||
help=spack.cmd.first_line(ci_verify_versions.__doc__),
|
|
||||||
)
|
|
||||||
verify_versions.add_argument("from_ref", help="git ref from which start looking at changes")
|
|
||||||
verify_versions.add_argument("to_ref", help="git ref to end looking at changes")
|
|
||||||
verify_versions.set_defaults(func=ci_verify_versions)
|
|
||||||
|
|
||||||
|
|
||||||
def ci_generate(args):
|
def ci_generate(args):
|
||||||
"""generate jobs file from a CI-aware spack file
|
"""generate jobs file from a CI-aware spack file
|
||||||
@@ -451,7 +422,7 @@ def ci_rebuild(args):
|
|||||||
|
|
||||||
# Arguments when installing the root from sources
|
# Arguments when installing the root from sources
|
||||||
deps_install_args = install_args + ["--only=dependencies"]
|
deps_install_args = install_args + ["--only=dependencies"]
|
||||||
root_install_args = install_args + ["--only=package"]
|
root_install_args = install_args + ["--keep-stage", "--only=package"]
|
||||||
|
|
||||||
if cdash_handler:
|
if cdash_handler:
|
||||||
# Add additional arguments to `spack install` for CDash reporting.
|
# Add additional arguments to `spack install` for CDash reporting.
|
||||||
@@ -488,7 +459,8 @@ def ci_rebuild(args):
|
|||||||
job_spec.to_dict(hash=ht.dag_hash),
|
job_spec.to_dict(hash=ht.dag_hash),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Copy logs and archived files from the install metadata (.spack) directory to artifacts now
|
# We generated the "spack install ..." command to "--keep-stage", copy
|
||||||
|
# any logs from the staging directory to artifacts now
|
||||||
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
|
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
|
||||||
|
|
||||||
# If the installation succeeded and we're running stand-alone tests for
|
# If the installation succeeded and we're running stand-alone tests for
|
||||||
@@ -636,12 +608,7 @@ def ci_reproduce(args):
|
|||||||
gpg_key_url = None
|
gpg_key_url = None
|
||||||
|
|
||||||
return spack_ci.reproduce_ci_job(
|
return spack_ci.reproduce_ci_job(
|
||||||
args.job_url,
|
args.job_url, args.working_dir, args.autostart, gpg_key_url, args.runtime
|
||||||
args.working_dir,
|
|
||||||
args.autostart,
|
|
||||||
gpg_key_url,
|
|
||||||
args.runtime,
|
|
||||||
args.use_local_head,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -683,159 +650,6 @@ def _gitlab_artifacts_url(url: str) -> str:
|
|||||||
return urlunparse(parsed._replace(path="/".join(parts), fragment="", query=""))
|
return urlunparse(parsed._replace(path="/".join(parts), fragment="", query=""))
|
||||||
|
|
||||||
|
|
||||||
def validate_standard_versions(
|
|
||||||
pkg: spack.package_base.PackageBase, versions: spack.version.VersionList
|
|
||||||
) -> bool:
|
|
||||||
"""Get and test the checksum of a package version based on a tarball.
|
|
||||||
Args:
|
|
||||||
pkg spack.package_base.PackageBase: Spack package for which to validate a version checksum
|
|
||||||
versions spack.version.VersionList: list of package versions to validate
|
|
||||||
Returns: bool: result of the validation. True is valid and false is failed.
|
|
||||||
"""
|
|
||||||
url_dict: Dict[spack.version.StandardVersion, str] = {}
|
|
||||||
|
|
||||||
for version in versions:
|
|
||||||
url = pkg.find_valid_url_for_version(version)
|
|
||||||
url_dict[version] = url
|
|
||||||
|
|
||||||
version_hashes = spack.stage.get_checksums_for_versions(
|
|
||||||
url_dict, pkg.name, fetch_options=pkg.fetch_options
|
|
||||||
)
|
|
||||||
|
|
||||||
valid_checksums = True
|
|
||||||
for version, sha in version_hashes.items():
|
|
||||||
if sha != pkg.versions[version]["sha256"]:
|
|
||||||
tty.error(
|
|
||||||
f"Invalid checksum found {pkg.name}@{version}\n"
|
|
||||||
f" [package.py] {pkg.versions[version]['sha256']}\n"
|
|
||||||
f" [Downloaded] {sha}"
|
|
||||||
)
|
|
||||||
valid_checksums = False
|
|
||||||
continue
|
|
||||||
|
|
||||||
tty.info(f"Validated {pkg.name}@{version} --> {sha}")
|
|
||||||
|
|
||||||
return valid_checksums
|
|
||||||
|
|
||||||
|
|
||||||
def validate_git_versions(
|
|
||||||
pkg: spack.package_base.PackageBase, versions: spack.version.VersionList
|
|
||||||
) -> bool:
|
|
||||||
"""Get and test the commit and tag of a package version based on a git repository.
|
|
||||||
Args:
|
|
||||||
pkg spack.package_base.PackageBase: Spack package for which to validate a version
|
|
||||||
versions spack.version.VersionList: list of package versions to validate
|
|
||||||
Returns: bool: result of the validation. True is valid and false is failed.
|
|
||||||
"""
|
|
||||||
valid_commit = True
|
|
||||||
for version in versions:
|
|
||||||
fetcher = spack.fetch_strategy.for_package_version(pkg, version)
|
|
||||||
with spack.stage.Stage(fetcher) as stage:
|
|
||||||
known_commit = pkg.versions[version]["commit"]
|
|
||||||
try:
|
|
||||||
stage.fetch()
|
|
||||||
except spack.error.FetchError:
|
|
||||||
tty.error(
|
|
||||||
f"Invalid commit for {pkg.name}@{version}\n"
|
|
||||||
f" {known_commit} could not be checked out in the git repository."
|
|
||||||
)
|
|
||||||
valid_commit = False
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Test if the specified tag matches the commit in the package.py
|
|
||||||
# We retrieve the commit associated with a tag and compare it to the
|
|
||||||
# commit that is located in the package.py file.
|
|
||||||
if "tag" in pkg.versions[version]:
|
|
||||||
tag = pkg.versions[version]["tag"]
|
|
||||||
try:
|
|
||||||
with fs.working_dir(stage.source_path):
|
|
||||||
found_commit = fetcher.git(
|
|
||||||
"rev-list", "-n", "1", tag, output=str, error=str
|
|
||||||
).strip()
|
|
||||||
except spack.util.executable.ProcessError:
|
|
||||||
tty.error(
|
|
||||||
f"Invalid tag for {pkg.name}@{version}\n"
|
|
||||||
f" {tag} could not be found in the git repository."
|
|
||||||
)
|
|
||||||
valid_commit = False
|
|
||||||
continue
|
|
||||||
|
|
||||||
if found_commit != known_commit:
|
|
||||||
tty.error(
|
|
||||||
f"Mismatched tag <-> commit found for {pkg.name}@{version}\n"
|
|
||||||
f" [package.py] {known_commit}\n"
|
|
||||||
f" [Downloaded] {found_commit}"
|
|
||||||
)
|
|
||||||
valid_commit = False
|
|
||||||
continue
|
|
||||||
|
|
||||||
# If we have downloaded the repository, found the commit, and compared
|
|
||||||
# the tag (if specified) we can conclude that the version is pointing
|
|
||||||
# at what we would expect.
|
|
||||||
tty.info(f"Validated {pkg.name}@{version} --> {known_commit}")
|
|
||||||
|
|
||||||
return valid_commit
|
|
||||||
|
|
||||||
|
|
||||||
def ci_verify_versions(args):
|
|
||||||
"""validate version checksum & commits between git refs
|
|
||||||
This command takes a from_ref and to_ref arguments and
|
|
||||||
then parses the git diff between the two to determine which packages
|
|
||||||
have been modified verifies the new checksums inside of them.
|
|
||||||
"""
|
|
||||||
with fs.working_dir(spack.paths.prefix):
|
|
||||||
# We use HEAD^1 explicitly on the merge commit created by
|
|
||||||
# GitHub Actions. However HEAD~1 is a safer default for the helper function.
|
|
||||||
files = spack.util.git.get_modified_files(from_ref=args.from_ref, to_ref=args.to_ref)
|
|
||||||
|
|
||||||
# Get a list of package names from the modified files.
|
|
||||||
pkgs = [(m.group(1), p) for p in files for m in [BUILTIN.search(p)] if m]
|
|
||||||
|
|
||||||
failed_version = False
|
|
||||||
for pkg_name, path in pkgs:
|
|
||||||
spec = spack.spec.Spec(pkg_name)
|
|
||||||
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
|
|
||||||
|
|
||||||
# Skip checking manual download packages and trust the maintainers
|
|
||||||
if pkg.manual_download:
|
|
||||||
tty.warn(f"Skipping manual download package: {pkg_name}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Store versions checksums / commits for future loop
|
|
||||||
checksums_version_dict = {}
|
|
||||||
commits_version_dict = {}
|
|
||||||
for version in pkg.versions:
|
|
||||||
# If the package version defines a sha256 we'll use that as the high entropy
|
|
||||||
# string to detect which versions have been added between from_ref and to_ref
|
|
||||||
if "sha256" in pkg.versions[version]:
|
|
||||||
checksums_version_dict[pkg.versions[version]["sha256"]] = version
|
|
||||||
|
|
||||||
# If a package version instead defines a commit we'll use that as a
|
|
||||||
# high entropy string to detect new versions.
|
|
||||||
elif "commit" in pkg.versions[version]:
|
|
||||||
commits_version_dict[pkg.versions[version]["commit"]] = version
|
|
||||||
|
|
||||||
# TODO: enforce every version have a commit or a sha256 defined if not
|
|
||||||
# an infinite version (there are a lot of package's where this doesn't work yet.)
|
|
||||||
|
|
||||||
with fs.working_dir(spack.paths.prefix):
|
|
||||||
added_checksums = spack_ci.get_added_versions(
|
|
||||||
checksums_version_dict, path, from_ref=args.from_ref, to_ref=args.to_ref
|
|
||||||
)
|
|
||||||
added_commits = spack_ci.get_added_versions(
|
|
||||||
commits_version_dict, path, from_ref=args.from_ref, to_ref=args.to_ref
|
|
||||||
)
|
|
||||||
|
|
||||||
if added_checksums:
|
|
||||||
failed_version = not validate_standard_versions(pkg, added_checksums) or failed_version
|
|
||||||
|
|
||||||
if added_commits:
|
|
||||||
failed_version = not validate_git_versions(pkg, added_commits) or failed_version
|
|
||||||
|
|
||||||
if failed_version:
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
def ci(parser, args):
|
def ci(parser, args):
|
||||||
if args.func:
|
if args.func:
|
||||||
return args.func(args)
|
return args.func(args)
|
||||||
|
|||||||
@@ -528,6 +528,7 @@ def __call__(self, parser, namespace, values, option_string):
|
|||||||
# the const from the constructor or a value from the CLI.
|
# the const from the constructor or a value from the CLI.
|
||||||
# Note that this is only called if the argument is actually
|
# Note that this is only called if the argument is actually
|
||||||
# specified on the command line.
|
# specified on the command line.
|
||||||
|
spack.config.CONFIG.ensure_scope_ordering()
|
||||||
spack.config.set(self.config_path, self.const, scope="command_line")
|
spack.config.set(self.config_path, self.const, scope="command_line")
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -350,12 +350,9 @@ def _config_change(config_path, match_spec_str=None):
|
|||||||
if spack.config.get(key_path, scope=scope):
|
if spack.config.get(key_path, scope=scope):
|
||||||
ideal_scope_to_modify = scope
|
ideal_scope_to_modify = scope
|
||||||
break
|
break
|
||||||
# If we find our key in a specific scope, that's the one we want
|
|
||||||
# to modify. Otherwise we use the default write scope.
|
|
||||||
write_scope = ideal_scope_to_modify or spack.config.default_modify_scope()
|
|
||||||
|
|
||||||
update_path = f"{key_path}:[{str(spec)}]"
|
update_path = f"{key_path}:[{str(spec)}]"
|
||||||
spack.config.add(update_path, scope=write_scope)
|
spack.config.add(update_path, scope=ideal_scope_to_modify)
|
||||||
else:
|
else:
|
||||||
raise ValueError("'config change' can currently only change 'require' sections")
|
raise ValueError("'config change' can currently only change 'require' sections")
|
||||||
|
|
||||||
|
|||||||
@@ -2,11 +2,23 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
import os
|
||||||
import platform
|
import platform
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from datetime import datetime
|
||||||
|
from glob import glob
|
||||||
|
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
from llnl.util.filesystem import working_dir
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
|
import spack.paths
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.spec
|
import spack.spec
|
||||||
|
import spack.store
|
||||||
|
import spack.util.git
|
||||||
|
from spack.util.executable import which
|
||||||
|
|
||||||
description = "debugging commands for troubleshooting Spack"
|
description = "debugging commands for troubleshooting Spack"
|
||||||
section = "developer"
|
section = "developer"
|
||||||
@@ -15,13 +27,67 @@
|
|||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="debug_command")
|
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="debug_command")
|
||||||
|
sp.add_parser("create-db-tarball", help="create a tarball of Spack's installation metadata")
|
||||||
sp.add_parser("report", help="print information useful for bug reports")
|
sp.add_parser("report", help="print information useful for bug reports")
|
||||||
|
|
||||||
|
|
||||||
|
def _debug_tarball_suffix():
|
||||||
|
now = datetime.now()
|
||||||
|
suffix = now.strftime("%Y-%m-%d-%H%M%S")
|
||||||
|
|
||||||
|
git = spack.util.git.git()
|
||||||
|
if not git:
|
||||||
|
return "nobranch-nogit-%s" % suffix
|
||||||
|
|
||||||
|
with working_dir(spack.paths.prefix):
|
||||||
|
if not os.path.isdir(".git"):
|
||||||
|
return "nobranch.nogit.%s" % suffix
|
||||||
|
|
||||||
|
# Get symbolic branch name and strip any special chars (mainly '/')
|
||||||
|
symbolic = git("rev-parse", "--abbrev-ref", "--short", "HEAD", output=str).strip()
|
||||||
|
symbolic = re.sub(r"[^\w.-]", "-", symbolic)
|
||||||
|
|
||||||
|
# Get the commit hash too.
|
||||||
|
commit = git("rev-parse", "--short", "HEAD", output=str).strip()
|
||||||
|
|
||||||
|
if symbolic == commit:
|
||||||
|
return "nobranch.%s.%s" % (commit, suffix)
|
||||||
|
else:
|
||||||
|
return "%s.%s.%s" % (symbolic, commit, suffix)
|
||||||
|
|
||||||
|
|
||||||
|
def create_db_tarball(args):
|
||||||
|
tar = which("tar")
|
||||||
|
tarball_name = "spack-db.%s.tar.gz" % _debug_tarball_suffix()
|
||||||
|
tarball_path = os.path.abspath(tarball_name)
|
||||||
|
|
||||||
|
base = os.path.basename(str(spack.store.STORE.root))
|
||||||
|
transform_args = []
|
||||||
|
# Currently --transform and -s are not supported by Windows native tar
|
||||||
|
if "GNU" in tar("--version", output=str):
|
||||||
|
transform_args = ["--transform", "s/^%s/%s/" % (base, tarball_name)]
|
||||||
|
elif sys.platform != "win32":
|
||||||
|
transform_args = ["-s", "/^%s/%s/" % (base, tarball_name)]
|
||||||
|
|
||||||
|
wd = os.path.dirname(str(spack.store.STORE.root))
|
||||||
|
with working_dir(wd):
|
||||||
|
files = [spack.store.STORE.db._index_path]
|
||||||
|
files += glob("%s/*/*/*/.spack/spec.json" % base)
|
||||||
|
files += glob("%s/*/*/*/.spack/spec.yaml" % base)
|
||||||
|
files = [os.path.relpath(f) for f in files]
|
||||||
|
|
||||||
|
args = ["-czf", tarball_path]
|
||||||
|
args += transform_args
|
||||||
|
args += files
|
||||||
|
tar(*args)
|
||||||
|
|
||||||
|
tty.msg("Created %s" % tarball_name)
|
||||||
|
|
||||||
|
|
||||||
def report(args):
|
def report(args):
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
host_os = host_platform.default_operating_system()
|
host_os = host_platform.operating_system("frontend")
|
||||||
host_target = host_platform.default_target()
|
host_target = host_platform.target("frontend")
|
||||||
architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
|
architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
|
||||||
print("* **Spack:**", spack.get_version())
|
print("* **Spack:**", spack.get_version())
|
||||||
print("* **Python:**", platform.python_version())
|
print("* **Python:**", platform.python_version())
|
||||||
@@ -29,5 +95,5 @@ def report(args):
|
|||||||
|
|
||||||
|
|
||||||
def debug(parser, args):
|
def debug(parser, args):
|
||||||
if args.debug_command == "report":
|
action = {"create-db-tarball": create_db_tarball, "report": report}
|
||||||
report(args)
|
action[args.debug_command](args)
|
||||||
|
|||||||
@@ -9,9 +9,9 @@
|
|||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
|
import spack.package_base
|
||||||
import spack.store
|
import spack.store
|
||||||
from spack.cmd.common import arguments
|
from spack.cmd.common import arguments
|
||||||
from spack.solver.input_analysis import create_graph_analyzer
|
|
||||||
|
|
||||||
description = "show dependencies of a package"
|
description = "show dependencies of a package"
|
||||||
section = "basic"
|
section = "basic"
|
||||||
@@ -55,7 +55,7 @@ def dependencies(parser, args):
|
|||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||||
|
|
||||||
format_string = "{name}{@version}{/hash:7}{%compiler}"
|
format_string = "{name}{@version}{%compiler}{/hash:7}"
|
||||||
if sys.stdout.isatty():
|
if sys.stdout.isatty():
|
||||||
tty.msg("Dependencies of %s" % spec.format(format_string, color=True))
|
tty.msg("Dependencies of %s" % spec.format(format_string, color=True))
|
||||||
deps = spack.store.STORE.db.installed_relatives(
|
deps = spack.store.STORE.db.installed_relatives(
|
||||||
@@ -68,17 +68,15 @@ def dependencies(parser, args):
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
spec = specs[0]
|
spec = specs[0]
|
||||||
dependencies, virtuals, _ = create_graph_analyzer().possible_dependencies(
|
dependencies = spack.package_base.possible_dependencies(
|
||||||
spec,
|
spec,
|
||||||
transitive=args.transitive,
|
transitive=args.transitive,
|
||||||
expand_virtuals=args.expand_virtuals,
|
expand_virtuals=args.expand_virtuals,
|
||||||
allowed_deps=args.deptype,
|
depflag=args.deptype,
|
||||||
)
|
)
|
||||||
if not args.expand_virtuals:
|
|
||||||
dependencies.update(virtuals)
|
|
||||||
|
|
||||||
if spec.name in dependencies:
|
if spec.name in dependencies:
|
||||||
dependencies.remove(spec.name)
|
del dependencies[spec.name]
|
||||||
|
|
||||||
if dependencies:
|
if dependencies:
|
||||||
colify(sorted(dependencies))
|
colify(sorted(dependencies))
|
||||||
|
|||||||
@@ -93,7 +93,7 @@ def dependents(parser, args):
|
|||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||||
|
|
||||||
format_string = "{name}{@version}{/hash:7}{%compiler}"
|
format_string = "{name}{@version}{%compiler}{/hash:7}"
|
||||||
if sys.stdout.isatty():
|
if sys.stdout.isatty():
|
||||||
tty.msg("Dependents of %s" % spec.cformat(format_string))
|
tty.msg("Dependents of %s" % spec.cformat(format_string))
|
||||||
deps = spack.store.STORE.db.installed_relatives(spec, "parents", args.transitive)
|
deps = spack.store.STORE.db.installed_relatives(spec, "parents", args.transitive)
|
||||||
|
|||||||
@@ -3,13 +3,11 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.environment
|
|
||||||
import spack.fetch_strategy
|
import spack.fetch_strategy
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
@@ -33,33 +31,37 @@ def setup_parser(subparser):
|
|||||||
"--no-clone",
|
"--no-clone",
|
||||||
action="store_false",
|
action="store_false",
|
||||||
dest="clone",
|
dest="clone",
|
||||||
|
default=None,
|
||||||
help="do not clone, the package already exists at the source path",
|
help="do not clone, the package already exists at the source path",
|
||||||
)
|
)
|
||||||
clone_group.add_argument(
|
clone_group.add_argument(
|
||||||
"--clone",
|
"--clone",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
dest="clone",
|
dest="clone",
|
||||||
default=True,
|
default=None,
|
||||||
help=(
|
help="clone the package even if the path already exists",
|
||||||
"(default) clone the package unless the path already exists, "
|
|
||||||
"use --force to overwrite"
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-f", "--force", help="remove any files or directories that block cloning source code"
|
"-f", "--force", help="remove any files or directories that block cloning source code"
|
||||||
)
|
)
|
||||||
|
|
||||||
subparser.add_argument(
|
|
||||||
"-r",
|
|
||||||
"--recursive",
|
|
||||||
action="store_true",
|
|
||||||
help="traverse nodes of the graph to mark everything up to the root as a develop spec",
|
|
||||||
)
|
|
||||||
|
|
||||||
arguments.add_common_arguments(subparser, ["spec"])
|
arguments.add_common_arguments(subparser, ["spec"])
|
||||||
|
|
||||||
|
|
||||||
|
def _update_config(spec, path):
|
||||||
|
find_fn = lambda section: spec.name in section
|
||||||
|
|
||||||
|
entry = {"spec": str(spec)}
|
||||||
|
if path != spec.name:
|
||||||
|
entry["path"] = path
|
||||||
|
|
||||||
|
def change_fn(section):
|
||||||
|
section[spec.name] = entry
|
||||||
|
|
||||||
|
spack.config.change_or_add("develop", find_fn, change_fn)
|
||||||
|
|
||||||
|
|
||||||
def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
|
def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
|
||||||
# "steal" the source code via staging API. We ask for a stage
|
# "steal" the source code via staging API. We ask for a stage
|
||||||
# to be created, then copy it afterwards somewhere else. It would be
|
# to be created, then copy it afterwards somewhere else. It would be
|
||||||
@@ -81,159 +83,86 @@ def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
|
|||||||
package.stage.steal_source(abspath)
|
package.stage.steal_source(abspath)
|
||||||
|
|
||||||
|
|
||||||
def assure_concrete_spec(env: spack.environment.Environment, spec: spack.spec.Spec):
|
def develop(parser, args):
|
||||||
version = spec.versions.concrete_range_as_version
|
# Note: we could put develop specs in any scope, but I assume
|
||||||
if not version:
|
# users would only ever want to do this for either (a) an active
|
||||||
# first check environment for a matching concrete spec
|
# env or (b) a specified config file (e.g. that is included by
|
||||||
matching_specs = env.all_matching_specs(spec)
|
# an environment)
|
||||||
if matching_specs:
|
# TODO: when https://github.com/spack/spack/pull/35307 is merged,
|
||||||
version = matching_specs[0].version
|
# an active env is not required if a scope is specified
|
||||||
test_spec = spack.spec.Spec(f"{spec}@{version}")
|
env = spack.cmd.require_active_env(cmd_name="develop")
|
||||||
for m_spec in matching_specs:
|
|
||||||
if not m_spec.satisfies(test_spec):
|
|
||||||
raise SpackError(
|
|
||||||
f"{spec.name}: has multiple concrete instances in the graph that can't be"
|
|
||||||
" satisified by a single develop spec. To use `spack develop` ensure one"
|
|
||||||
" of the following:"
|
|
||||||
f"\n a) {spec.name} nodes can satisfy the same develop spec (minimally "
|
|
||||||
"this means they all share the same version)"
|
|
||||||
f"\n b) Provide a concrete develop spec ({spec.name}@[version]) to clearly"
|
|
||||||
" indicate what should be developed"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# look up the maximum version so infintiy versions are preferred for develop
|
|
||||||
version = max(spec.package_class.versions.keys())
|
|
||||||
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
|
|
||||||
spec.versions = spack.version.VersionList([version])
|
|
||||||
|
|
||||||
|
|
||||||
def setup_src_code(spec: spack.spec.Spec, src_path: str, clone: bool = True, force: bool = False):
|
|
||||||
"""
|
|
||||||
Handle checking, cloning or overwriting source code
|
|
||||||
"""
|
|
||||||
assert spec.versions
|
|
||||||
|
|
||||||
if clone:
|
|
||||||
_clone(spec, src_path, force)
|
|
||||||
|
|
||||||
if not clone and not os.path.exists(src_path):
|
|
||||||
raise SpackError(f"Provided path {src_path} does not exist")
|
|
||||||
|
|
||||||
version = spec.versions.concrete_range_as_version
|
|
||||||
if not version:
|
|
||||||
# look up the maximum version so infintiy versions are preferred for develop
|
|
||||||
version = max(spack.repo.PATH.get_pkg_class(spec.fullname).versions.keys())
|
|
||||||
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
|
|
||||||
spec.versions = spack.version.VersionList([version])
|
|
||||||
|
|
||||||
|
|
||||||
def _update_config(spec, path):
|
|
||||||
find_fn = lambda section: spec.name in section
|
|
||||||
|
|
||||||
entry = {"spec": str(spec)}
|
|
||||||
if path and path != spec.name:
|
|
||||||
entry["path"] = path
|
|
||||||
|
|
||||||
def change_fn(section):
|
|
||||||
section[spec.name] = entry
|
|
||||||
|
|
||||||
spack.config.change_or_add("develop", find_fn, change_fn)
|
|
||||||
|
|
||||||
|
|
||||||
def update_env(
|
|
||||||
env: spack.environment.Environment,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
specified_path: Optional[str] = None,
|
|
||||||
build_dir: Optional[str] = None,
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Update the spack.yaml file with additions or changes from a develop call
|
|
||||||
"""
|
|
||||||
tty.debug(f"Updating develop config for {env.name} transactionally")
|
|
||||||
|
|
||||||
if not specified_path:
|
|
||||||
dev_entry = env.dev_specs.get(spec.name)
|
|
||||||
if dev_entry:
|
|
||||||
specified_path = dev_entry.get("path", None)
|
|
||||||
|
|
||||||
with env.write_transaction():
|
|
||||||
if build_dir is not None:
|
|
||||||
spack.config.add(
|
|
||||||
f"packages:{spec.name}:package_attributes:build_directory:{build_dir}",
|
|
||||||
env.scope_name,
|
|
||||||
)
|
|
||||||
# add develop spec and update path
|
|
||||||
_update_config(spec, specified_path)
|
|
||||||
|
|
||||||
|
|
||||||
def _clone(spec: spack.spec.Spec, abspath: str, force: bool = False):
|
|
||||||
if os.path.exists(abspath):
|
|
||||||
if force:
|
|
||||||
shutil.rmtree(abspath)
|
|
||||||
else:
|
|
||||||
msg = f"Skipping developer download of {spec.name}"
|
|
||||||
msg += f" because its path {abspath} already exists."
|
|
||||||
tty.msg(msg)
|
|
||||||
return
|
|
||||||
|
|
||||||
# cloning can take a while and it's nice to get a message for the longer clones
|
|
||||||
tty.msg(f"Cloning source code for {spec}")
|
|
||||||
_retrieve_develop_source(spec, abspath)
|
|
||||||
|
|
||||||
|
|
||||||
def _abs_code_path(
|
|
||||||
env: spack.environment.Environment, spec: spack.spec.Spec, path: Optional[str] = None
|
|
||||||
):
|
|
||||||
src_path = path if path else spec.name
|
|
||||||
return spack.util.path.canonicalize_path(src_path, default_wd=env.path)
|
|
||||||
|
|
||||||
|
|
||||||
def _dev_spec_generator(args, env):
|
|
||||||
"""
|
|
||||||
Generator function to loop over all the develop specs based on how the command is called
|
|
||||||
If no specs are supplied then loop over the develop specs listed in the environment.
|
|
||||||
"""
|
|
||||||
if not args.spec:
|
if not args.spec:
|
||||||
if args.clone is False:
|
if args.clone is False:
|
||||||
raise SpackError("No spec provided to spack develop command")
|
raise SpackError("No spec provided to spack develop command")
|
||||||
|
|
||||||
|
# download all dev specs
|
||||||
for name, entry in env.dev_specs.items():
|
for name, entry in env.dev_specs.items():
|
||||||
path = entry.get("path", name)
|
path = entry.get("path", name)
|
||||||
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
|
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
|
||||||
|
|
||||||
|
if os.path.exists(abspath):
|
||||||
|
msg = "Skipping developer download of %s" % entry["spec"]
|
||||||
|
msg += " because its path already exists."
|
||||||
|
tty.msg(msg)
|
||||||
|
continue
|
||||||
|
|
||||||
# Both old syntax `spack develop pkg@x` and new syntax `spack develop pkg@=x`
|
# Both old syntax `spack develop pkg@x` and new syntax `spack develop pkg@=x`
|
||||||
# are currently supported.
|
# are currently supported.
|
||||||
spec = spack.spec.parse_with_version_concrete(entry["spec"])
|
spec = spack.spec.parse_with_version_concrete(entry["spec"])
|
||||||
yield spec, abspath
|
_retrieve_develop_source(spec, abspath)
|
||||||
|
|
||||||
|
if not env.dev_specs:
|
||||||
|
tty.warn("No develop specs to download")
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
specs = spack.cmd.parse_specs(args.spec)
|
||||||
|
if len(specs) > 1:
|
||||||
|
raise SpackError("spack develop requires at most one named spec")
|
||||||
|
|
||||||
|
spec = specs[0]
|
||||||
|
|
||||||
|
version = spec.versions.concrete_range_as_version
|
||||||
|
if not version:
|
||||||
|
# look up the maximum version so infintiy versions are preferred for develop
|
||||||
|
version = max(spec.package_class.versions.keys())
|
||||||
|
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
|
||||||
|
spec.versions = spack.version.VersionList([version])
|
||||||
|
|
||||||
|
# If user does not specify --path, we choose to create a directory in the
|
||||||
|
# active environment's directory, named after the spec
|
||||||
|
path = args.path or spec.name
|
||||||
|
if not os.path.isabs(path):
|
||||||
|
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
|
||||||
else:
|
else:
|
||||||
specs = spack.cmd.parse_specs(args.spec)
|
abspath = path
|
||||||
if (args.path or args.build_directory) and len(specs) > 1:
|
|
||||||
raise SpackError(
|
|
||||||
"spack develop requires at most one named spec when using the --path or"
|
|
||||||
" --build-directory arguments"
|
|
||||||
)
|
|
||||||
|
|
||||||
for spec in specs:
|
# clone default: only if the path doesn't exist
|
||||||
if args.recursive:
|
clone = args.clone
|
||||||
concrete_specs = env.all_matching_specs(spec)
|
if clone is None:
|
||||||
if not concrete_specs:
|
clone = not os.path.exists(abspath)
|
||||||
tty.warn(
|
|
||||||
f"{spec.name} has no matching concrete specs in the environment and "
|
if not clone and not os.path.exists(abspath):
|
||||||
"will be skipped. `spack develop --recursive` requires a concretized"
|
raise SpackError("Provided path %s does not exist" % abspath)
|
||||||
" environment"
|
|
||||||
)
|
if clone:
|
||||||
else:
|
if os.path.exists(abspath):
|
||||||
for s in concrete_specs:
|
if args.force:
|
||||||
for node_spec in s.traverse(direction="parents", root=True):
|
shutil.rmtree(abspath)
|
||||||
tty.debug(f"Recursive develop for {node_spec.name}")
|
|
||||||
yield node_spec, _abs_code_path(env, node_spec, args.path)
|
|
||||||
else:
|
else:
|
||||||
yield spec, _abs_code_path(env, spec, args.path)
|
msg = "Path %s already exists and cannot be cloned to." % abspath
|
||||||
|
msg += " Use `spack develop -f` to overwrite."
|
||||||
|
raise SpackError(msg)
|
||||||
|
|
||||||
|
_retrieve_develop_source(spec, abspath)
|
||||||
|
|
||||||
def develop(parser, args):
|
tty.debug("Updating develop config for {0} transactionally".format(env.name))
|
||||||
env = spack.cmd.require_active_env(cmd_name="develop")
|
with env.write_transaction():
|
||||||
|
if args.build_directory is not None:
|
||||||
for spec, abspath in _dev_spec_generator(args, env):
|
spack.config.add(
|
||||||
assure_concrete_spec(env, spec)
|
"packages:{}:package_attributes:build_directory:{}".format(
|
||||||
setup_src_code(spec, abspath, clone=args.clone, force=args.force)
|
spec.name, args.build_directory
|
||||||
update_env(env, spec, args.path, args.build_directory)
|
),
|
||||||
|
env.scope_name,
|
||||||
|
)
|
||||||
|
_update_config(spec, path)
|
||||||
|
|||||||
@@ -110,7 +110,10 @@ def external_find(args):
|
|||||||
# Note that KeyboardInterrupt does not subclass Exception
|
# Note that KeyboardInterrupt does not subclass Exception
|
||||||
# (so CTRL-C will terminate the program as expected).
|
# (so CTRL-C will terminate the program as expected).
|
||||||
skip_msg = "Skipping manifest and continuing with other external checks"
|
skip_msg = "Skipping manifest and continuing with other external checks"
|
||||||
if isinstance(e, OSError) and e.errno in (errno.EPERM, errno.EACCES):
|
if (isinstance(e, IOError) or isinstance(e, OSError)) and e.errno in [
|
||||||
|
errno.EPERM,
|
||||||
|
errno.EACCES,
|
||||||
|
]:
|
||||||
# The manifest file does not have sufficient permissions enabled:
|
# The manifest file does not have sufficient permissions enabled:
|
||||||
# print a warning and keep going
|
# print a warning and keep going
|
||||||
tty.warn("Unable to read manifest due to insufficient permissions.", skip_msg)
|
tty.warn("Unable to read manifest due to insufficient permissions.", skip_msg)
|
||||||
|
|||||||
@@ -54,6 +54,10 @@
|
|||||||
@m{target=target} specific <target> processor
|
@m{target=target} specific <target> processor
|
||||||
@m{arch=platform-os-target} shortcut for all three above
|
@m{arch=platform-os-target} shortcut for all three above
|
||||||
|
|
||||||
|
cross-compiling:
|
||||||
|
@m{os=backend} or @m{os=be} build for compute node (backend)
|
||||||
|
@m{os=frontend} or @m{os=fe} build for login node (frontend)
|
||||||
|
|
||||||
dependencies:
|
dependencies:
|
||||||
^dependency [constraints] specify constraints on dependencies
|
^dependency [constraints] specify constraints on dependencies
|
||||||
^@K{/hash} build with a specific installed
|
^@K{/hash} build with a specific installed
|
||||||
@@ -73,7 +77,7 @@
|
|||||||
boxlib @B{dim=2} boxlib built for 2 dimensions
|
boxlib @B{dim=2} boxlib built for 2 dimensions
|
||||||
libdwarf @g{%intel} ^libelf@g{%gcc}
|
libdwarf @g{%intel} ^libelf@g{%gcc}
|
||||||
libdwarf, built with intel compiler, linked to libelf built with gcc
|
libdwarf, built with intel compiler, linked to libelf built with gcc
|
||||||
mvapich2 @B{fabrics=psm,mrail,sock} @g{%gcc}
|
mvapich2 @g{%gcc} @B{fabrics=psm,mrail,sock}
|
||||||
mvapich2, built with gcc compiler, with support for multiple fabrics
|
mvapich2, built with gcc compiler, with support for multiple fabrics
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -545,7 +545,7 @@ def _not_license_excluded(self, x):
|
|||||||
package does not explicitly forbid redistributing source."""
|
package does not explicitly forbid redistributing source."""
|
||||||
if self.private:
|
if self.private:
|
||||||
return True
|
return True
|
||||||
elif spack.repo.PATH.get_pkg_class(x.fullname).redistribute_source(x):
|
elif x.package_class.redistribute_source(x):
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
tty.debug(
|
tty.debug(
|
||||||
|
|||||||
@@ -383,10 +383,8 @@ def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
|||||||
query = " ".join(str(s) for s in args.constraint_specs)
|
query = " ".join(str(s) for s in args.constraint_specs)
|
||||||
msg = f"the constraint '{query}' matches multiple packages:\n"
|
msg = f"the constraint '{query}' matches multiple packages:\n"
|
||||||
for s in specs:
|
for s in specs:
|
||||||
spec_fmt = (
|
spec_fmt = "{hash:7} {name}{@version}{%compiler}"
|
||||||
"{hash:7} {name}{@version}{compiler_flags}{variants}"
|
spec_fmt += "{compiler_flags}{variants}{arch=architecture}"
|
||||||
"{arch=architecture} {%compiler}"
|
|
||||||
)
|
|
||||||
msg += "\t" + s.cformat(spec_fmt) + "\n"
|
msg += "\t" + s.cformat(spec_fmt) + "\n"
|
||||||
tty.die(msg, "In this context exactly *one* match is needed.")
|
tty.die(msg, "In this context exactly *one* match is needed.")
|
||||||
|
|
||||||
|
|||||||
@@ -41,11 +41,7 @@ def providers(parser, args):
|
|||||||
specs = spack.cmd.parse_specs(args.virtual_package)
|
specs = spack.cmd.parse_specs(args.virtual_package)
|
||||||
|
|
||||||
# Check prerequisites
|
# Check prerequisites
|
||||||
non_virtual = [
|
non_virtual = [str(s) for s in specs if not s.virtual or s.name not in valid_virtuals]
|
||||||
str(s)
|
|
||||||
for s in specs
|
|
||||||
if not spack.repo.PATH.is_virtual(s.name) or s.name not in valid_virtuals
|
|
||||||
]
|
|
||||||
if non_virtual:
|
if non_virtual:
|
||||||
msg = "non-virtual specs cannot be part of the query "
|
msg = "non-virtual specs cannot be part of the query "
|
||||||
msg += "[{0}]\n".format(", ".join(non_virtual))
|
msg += "[{0}]\n".format(", ".join(non_virtual))
|
||||||
|
|||||||
@@ -136,7 +136,20 @@ def solve(parser, args):
|
|||||||
setup_only = set(show) == {"asp"}
|
setup_only = set(show) == {"asp"}
|
||||||
unify = spack.config.get("concretizer:unify")
|
unify = spack.config.get("concretizer:unify")
|
||||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||||
if unify == "when_possible":
|
if unify != "when_possible":
|
||||||
|
# set up solver parameters
|
||||||
|
# Note: reuse and other concretizer prefs are passed as configuration
|
||||||
|
result = solver.solve(
|
||||||
|
specs,
|
||||||
|
out=output,
|
||||||
|
timers=args.timers,
|
||||||
|
stats=args.stats,
|
||||||
|
setup_only=setup_only,
|
||||||
|
allow_deprecated=allow_deprecated,
|
||||||
|
)
|
||||||
|
if not setup_only:
|
||||||
|
_process_result(result, show, required_format, kwargs)
|
||||||
|
else:
|
||||||
for idx, result in enumerate(
|
for idx, result in enumerate(
|
||||||
solver.solve_in_rounds(
|
solver.solve_in_rounds(
|
||||||
specs,
|
specs,
|
||||||
@@ -153,29 +166,3 @@ def solve(parser, args):
|
|||||||
print("% END ROUND {0}\n".format(idx))
|
print("% END ROUND {0}\n".format(idx))
|
||||||
if not setup_only:
|
if not setup_only:
|
||||||
_process_result(result, show, required_format, kwargs)
|
_process_result(result, show, required_format, kwargs)
|
||||||
elif unify:
|
|
||||||
# set up solver parameters
|
|
||||||
# Note: reuse and other concretizer prefs are passed as configuration
|
|
||||||
result = solver.solve(
|
|
||||||
specs,
|
|
||||||
out=output,
|
|
||||||
timers=args.timers,
|
|
||||||
stats=args.stats,
|
|
||||||
setup_only=setup_only,
|
|
||||||
allow_deprecated=allow_deprecated,
|
|
||||||
)
|
|
||||||
if not setup_only:
|
|
||||||
_process_result(result, show, required_format, kwargs)
|
|
||||||
else:
|
|
||||||
for spec in specs:
|
|
||||||
print("SOLVING SPEC:", spec)
|
|
||||||
result = solver.solve(
|
|
||||||
[spec],
|
|
||||||
out=output,
|
|
||||||
timers=args.timers,
|
|
||||||
stats=args.stats,
|
|
||||||
setup_only=setup_only,
|
|
||||||
allow_deprecated=allow_deprecated,
|
|
||||||
)
|
|
||||||
if not setup_only:
|
|
||||||
_process_result(result, show, required_format, kwargs)
|
|
||||||
|
|||||||
@@ -6,9 +6,8 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import warnings
|
from itertools import zip_longest
|
||||||
from itertools import islice, zip_longest
|
from typing import Dict, List, Optional
|
||||||
from typing import Callable, Dict, List, Optional
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
import llnl.util.tty.color as color
|
import llnl.util.tty.color as color
|
||||||
@@ -17,9 +16,6 @@
|
|||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.util.git
|
import spack.util.git
|
||||||
import spack.util.spack_yaml
|
|
||||||
from spack.spec_parser import SPEC_TOKENIZER, SpecTokens
|
|
||||||
from spack.tokenize import Token
|
|
||||||
from spack.util.executable import Executable, which
|
from spack.util.executable import Executable, which
|
||||||
|
|
||||||
description = "runs source code style checks on spack"
|
description = "runs source code style checks on spack"
|
||||||
@@ -202,13 +198,6 @@ def setup_parser(subparser):
|
|||||||
action="append",
|
action="append",
|
||||||
help="specify tools to skip (choose from %s)" % ", ".join(tool_names),
|
help="specify tools to skip (choose from %s)" % ", ".join(tool_names),
|
||||||
)
|
)
|
||||||
subparser.add_argument(
|
|
||||||
"--spec-strings",
|
|
||||||
action="store_true",
|
|
||||||
help="upgrade spec strings in Python, JSON and YAML files for compatibility with Spack "
|
|
||||||
"v1.0 and v0.x. Example: spack style --spec-strings $(git ls-files). Note: this flag "
|
|
||||||
"will be removed in Spack v1.0.",
|
|
||||||
)
|
|
||||||
|
|
||||||
subparser.add_argument("files", nargs=argparse.REMAINDER, help="specific files to check")
|
subparser.add_argument("files", nargs=argparse.REMAINDER, help="specific files to check")
|
||||||
|
|
||||||
@@ -434,8 +423,7 @@ def _run_import_check(
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
for m in is_abs_import.finditer(contents):
|
for m in is_abs_import.finditer(contents):
|
||||||
# Find at most two occurences: the first is the import itself, the second is its usage.
|
if contents.count(m.group(1)) == 1:
|
||||||
if len(list(islice(re.finditer(rf"{re.escape(m.group(1))}(?!\w)", contents), 2))) == 1:
|
|
||||||
to_remove.append(m.group(0))
|
to_remove.append(m.group(0))
|
||||||
exit_code = 1
|
exit_code = 1
|
||||||
print(f"{pretty_path}: redundant import: {m.group(1)}", file=out)
|
print(f"{pretty_path}: redundant import: {m.group(1)}", file=out)
|
||||||
@@ -450,7 +438,7 @@ def _run_import_check(
|
|||||||
module = _module_part(root, m.group(0))
|
module = _module_part(root, m.group(0))
|
||||||
if not module or module in to_add:
|
if not module or module in to_add:
|
||||||
continue
|
continue
|
||||||
if re.search(rf"import {re.escape(module)}(?!\w|\.)", contents):
|
if re.search(rf"import {re.escape(module)}\b(?!\.)", contents):
|
||||||
continue
|
continue
|
||||||
to_add.add(module)
|
to_add.add(module)
|
||||||
exit_code = 1
|
exit_code = 1
|
||||||
@@ -518,196 +506,7 @@ def _bootstrap_dev_dependencies():
|
|||||||
spack.bootstrap.ensure_environment_dependencies()
|
spack.bootstrap.ensure_environment_dependencies()
|
||||||
|
|
||||||
|
|
||||||
IS_PROBABLY_COMPILER = re.compile(r"%[a-zA-Z_][a-zA-Z0-9\-]")
|
|
||||||
|
|
||||||
|
|
||||||
def _spec_str_reorder_compiler(idx: int, blocks: List[List[Token]]) -> None:
|
|
||||||
# only move the compiler to the back if it exists and is not already at the end
|
|
||||||
if not 0 <= idx < len(blocks) - 1:
|
|
||||||
return
|
|
||||||
# if there's only whitespace after the compiler, don't move it
|
|
||||||
if all(token.kind == SpecTokens.WS for block in blocks[idx + 1 :] for token in block):
|
|
||||||
return
|
|
||||||
# rotate left and always add at least one WS token between compiler and previous token
|
|
||||||
compiler_block = blocks.pop(idx)
|
|
||||||
if compiler_block[0].kind != SpecTokens.WS:
|
|
||||||
compiler_block.insert(0, Token(SpecTokens.WS, " "))
|
|
||||||
# delete the WS tokens from the new first block if it was at the very start, to prevent leading
|
|
||||||
# WS tokens.
|
|
||||||
while idx == 0 and blocks[0][0].kind == SpecTokens.WS:
|
|
||||||
blocks[0].pop(0)
|
|
||||||
blocks.append(compiler_block)
|
|
||||||
|
|
||||||
|
|
||||||
def _spec_str_format(spec_str: str) -> Optional[str]:
|
|
||||||
"""Given any string, try to parse as spec string, and rotate the compiler token to the end
|
|
||||||
of each spec instance. Returns the formatted string if it was changed, otherwise None."""
|
|
||||||
# We parse blocks of tokens that include leading whitespace, and move the compiler block to
|
|
||||||
# the end when we hit a dependency ^... or the end of a string.
|
|
||||||
# [@3.1][ +foo][ +bar][ %gcc@3.1][ +baz]
|
|
||||||
# [@3.1][ +foo][ +bar][ +baz][ %gcc@3.1]
|
|
||||||
|
|
||||||
current_block: List[Token] = []
|
|
||||||
blocks: List[List[Token]] = []
|
|
||||||
compiler_block_idx = -1
|
|
||||||
in_edge_attr = False
|
|
||||||
|
|
||||||
for token in SPEC_TOKENIZER.tokenize(spec_str):
|
|
||||||
if token.kind == SpecTokens.UNEXPECTED:
|
|
||||||
# parsing error, we cannot fix this string.
|
|
||||||
return None
|
|
||||||
elif token.kind in (SpecTokens.COMPILER, SpecTokens.COMPILER_AND_VERSION):
|
|
||||||
# multiple compilers are not supported in Spack v0.x, so early return
|
|
||||||
if compiler_block_idx != -1:
|
|
||||||
return None
|
|
||||||
current_block.append(token)
|
|
||||||
blocks.append(current_block)
|
|
||||||
current_block = []
|
|
||||||
compiler_block_idx = len(blocks) - 1
|
|
||||||
elif token.kind in (
|
|
||||||
SpecTokens.START_EDGE_PROPERTIES,
|
|
||||||
SpecTokens.DEPENDENCY,
|
|
||||||
SpecTokens.UNQUALIFIED_PACKAGE_NAME,
|
|
||||||
SpecTokens.FULLY_QUALIFIED_PACKAGE_NAME,
|
|
||||||
):
|
|
||||||
_spec_str_reorder_compiler(compiler_block_idx, blocks)
|
|
||||||
compiler_block_idx = -1
|
|
||||||
if token.kind == SpecTokens.START_EDGE_PROPERTIES:
|
|
||||||
in_edge_attr = True
|
|
||||||
current_block.append(token)
|
|
||||||
blocks.append(current_block)
|
|
||||||
current_block = []
|
|
||||||
elif token.kind == SpecTokens.END_EDGE_PROPERTIES:
|
|
||||||
in_edge_attr = False
|
|
||||||
current_block.append(token)
|
|
||||||
blocks.append(current_block)
|
|
||||||
current_block = []
|
|
||||||
elif in_edge_attr:
|
|
||||||
current_block.append(token)
|
|
||||||
elif token.kind in (
|
|
||||||
SpecTokens.VERSION_HASH_PAIR,
|
|
||||||
SpecTokens.GIT_VERSION,
|
|
||||||
SpecTokens.VERSION,
|
|
||||||
SpecTokens.PROPAGATED_BOOL_VARIANT,
|
|
||||||
SpecTokens.BOOL_VARIANT,
|
|
||||||
SpecTokens.PROPAGATED_KEY_VALUE_PAIR,
|
|
||||||
SpecTokens.KEY_VALUE_PAIR,
|
|
||||||
SpecTokens.DAG_HASH,
|
|
||||||
):
|
|
||||||
current_block.append(token)
|
|
||||||
blocks.append(current_block)
|
|
||||||
current_block = []
|
|
||||||
elif token.kind == SpecTokens.WS:
|
|
||||||
current_block.append(token)
|
|
||||||
else:
|
|
||||||
raise ValueError(f"unexpected token {token}")
|
|
||||||
|
|
||||||
if current_block:
|
|
||||||
blocks.append(current_block)
|
|
||||||
_spec_str_reorder_compiler(compiler_block_idx, blocks)
|
|
||||||
|
|
||||||
new_spec_str = "".join(token.value for block in blocks for token in block)
|
|
||||||
return new_spec_str if spec_str != new_spec_str else None
|
|
||||||
|
|
||||||
|
|
||||||
SpecStrHandler = Callable[[str, int, int, str, str], None]
|
|
||||||
|
|
||||||
|
|
||||||
def _spec_str_default_handler(path: str, line: int, col: int, old: str, new: str):
|
|
||||||
"""A SpecStrHandler that prints formatted spec strings and their locations."""
|
|
||||||
print(f"{path}:{line}:{col}: `{old}` -> `{new}`")
|
|
||||||
|
|
||||||
|
|
||||||
def _spec_str_fix_handler(path: str, line: int, col: int, old: str, new: str):
|
|
||||||
"""A SpecStrHandler that updates formatted spec strings in files."""
|
|
||||||
with open(path, "r", encoding="utf-8") as f:
|
|
||||||
lines = f.readlines()
|
|
||||||
new_line = lines[line - 1].replace(old, new)
|
|
||||||
if new_line == lines[line - 1]:
|
|
||||||
tty.warn(f"{path}:{line}:{col}: could not apply fix: `{old}` -> `{new}`")
|
|
||||||
return
|
|
||||||
lines[line - 1] = new_line
|
|
||||||
print(f"{path}:{line}:{col}: fixed `{old}` -> `{new}`")
|
|
||||||
with open(path, "w", encoding="utf-8") as f:
|
|
||||||
f.writelines(lines)
|
|
||||||
|
|
||||||
|
|
||||||
def _spec_str_ast(path: str, tree: ast.AST, handler: SpecStrHandler) -> None:
|
|
||||||
"""Walk the AST of a Python file and apply handler to formatted spec strings."""
|
|
||||||
has_constant = sys.version_info >= (3, 8)
|
|
||||||
for node in ast.walk(tree):
|
|
||||||
if has_constant and isinstance(node, ast.Constant) and isinstance(node.value, str):
|
|
||||||
current_str = node.value
|
|
||||||
elif not has_constant and isinstance(node, ast.Str):
|
|
||||||
current_str = node.s
|
|
||||||
else:
|
|
||||||
continue
|
|
||||||
if not IS_PROBABLY_COMPILER.search(current_str):
|
|
||||||
continue
|
|
||||||
new = _spec_str_format(current_str)
|
|
||||||
if new is not None:
|
|
||||||
handler(path, node.lineno, node.col_offset, current_str, new)
|
|
||||||
|
|
||||||
|
|
||||||
def _spec_str_json_and_yaml(path: str, data: dict, handler: SpecStrHandler) -> None:
|
|
||||||
"""Walk a YAML or JSON data structure and apply handler to formatted spec strings."""
|
|
||||||
queue = [data]
|
|
||||||
seen = set()
|
|
||||||
|
|
||||||
while queue:
|
|
||||||
current = queue.pop(0)
|
|
||||||
if id(current) in seen:
|
|
||||||
continue
|
|
||||||
seen.add(id(current))
|
|
||||||
if isinstance(current, dict):
|
|
||||||
queue.extend(current.values())
|
|
||||||
queue.extend(current.keys())
|
|
||||||
elif isinstance(current, list):
|
|
||||||
queue.extend(current)
|
|
||||||
elif isinstance(current, str) and IS_PROBABLY_COMPILER.search(current):
|
|
||||||
new = _spec_str_format(current)
|
|
||||||
if new is not None:
|
|
||||||
mark = getattr(current, "_start_mark", None)
|
|
||||||
if mark:
|
|
||||||
line, col = mark.line + 1, mark.column + 1
|
|
||||||
else:
|
|
||||||
line, col = 0, 0
|
|
||||||
handler(path, line, col, current, new)
|
|
||||||
|
|
||||||
|
|
||||||
def _check_spec_strings(
|
|
||||||
paths: List[str], handler: SpecStrHandler = _spec_str_default_handler
|
|
||||||
) -> None:
|
|
||||||
"""Open Python, JSON and YAML files, and format their string literals that look like spec
|
|
||||||
strings. A handler is called for each formatting, which can be used to print or apply fixes."""
|
|
||||||
for path in paths:
|
|
||||||
is_json_or_yaml = path.endswith(".json") or path.endswith(".yaml") or path.endswith(".yml")
|
|
||||||
is_python = path.endswith(".py")
|
|
||||||
if not is_json_or_yaml and not is_python:
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(path, "r", encoding="utf-8") as f:
|
|
||||||
# skip files that are likely too large to be user code or config
|
|
||||||
if os.fstat(f.fileno()).st_size > 1024 * 1024:
|
|
||||||
warnings.warn(f"skipping {path}: too large.")
|
|
||||||
continue
|
|
||||||
if is_json_or_yaml:
|
|
||||||
_spec_str_json_and_yaml(path, spack.util.spack_yaml.load_config(f), handler)
|
|
||||||
elif is_python:
|
|
||||||
_spec_str_ast(path, ast.parse(f.read()), handler)
|
|
||||||
except (OSError, spack.util.spack_yaml.SpackYAMLError, SyntaxError, ValueError):
|
|
||||||
warnings.warn(f"skipping {path}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
|
|
||||||
def style(parser, args):
|
def style(parser, args):
|
||||||
if args.spec_strings:
|
|
||||||
if not args.files:
|
|
||||||
tty.die("No files provided to check spec strings.")
|
|
||||||
handler = _spec_str_fix_handler if args.fix else _spec_str_default_handler
|
|
||||||
return _check_spec_strings(args.files, handler)
|
|
||||||
|
|
||||||
# save initial working directory for relativizing paths later
|
# save initial working directory for relativizing paths later
|
||||||
args.initial_working_dir = os.getcwd()
|
args.initial_working_dir = os.getcwd()
|
||||||
|
|
||||||
|
|||||||
@@ -177,15 +177,16 @@ def test_run(args):
|
|||||||
matching = spack.store.STORE.db.query_local(spec, hashes=hashes, explicit=explicit)
|
matching = spack.store.STORE.db.query_local(spec, hashes=hashes, explicit=explicit)
|
||||||
if spec and not matching:
|
if spec and not matching:
|
||||||
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
|
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
|
||||||
|
"""
|
||||||
|
TODO: Need to write out a log message and/or CDASH Testing
|
||||||
|
output that package not installed IF continue to process
|
||||||
|
these issues here.
|
||||||
|
|
||||||
# TODO: Need to write out a log message and/or CDASH Testing
|
if args.log_format:
|
||||||
# output that package not installed IF continue to process
|
# Proceed with the spec assuming the test process
|
||||||
# these issues here.
|
# to ensure report package as skipped (e.g., for CI)
|
||||||
|
specs_to_test.append(spec)
|
||||||
# if args.log_format:
|
"""
|
||||||
# # Proceed with the spec assuming the test process
|
|
||||||
# # to ensure report package as skipped (e.g., for CI)
|
|
||||||
# specs_to_test.append(spec)
|
|
||||||
|
|
||||||
specs_to_test.extend(matching)
|
specs_to_test.extend(matching)
|
||||||
|
|
||||||
@@ -252,9 +253,7 @@ def has_test_and_tags(pkg_class):
|
|||||||
hashes = env.all_hashes() if env else None
|
hashes = env.all_hashes() if env else None
|
||||||
|
|
||||||
specs = spack.store.STORE.db.query(hashes=hashes)
|
specs = spack.store.STORE.db.query(hashes=hashes)
|
||||||
specs = list(
|
specs = list(filter(lambda s: has_test_and_tags(s.package_class), specs))
|
||||||
filter(lambda s: has_test_and_tags(spack.repo.PATH.get_pkg_class(s.fullname)), specs)
|
|
||||||
)
|
|
||||||
|
|
||||||
spack.cmd.display_specs(specs, long=True)
|
spack.cmd.display_specs(specs, long=True)
|
||||||
|
|
||||||
|
|||||||
@@ -17,7 +17,6 @@
|
|||||||
pytest = None # type: ignore
|
pytest = None # type: ignore
|
||||||
|
|
||||||
import llnl.util.filesystem
|
import llnl.util.filesystem
|
||||||
import llnl.util.tty as tty
|
|
||||||
import llnl.util.tty.color as color
|
import llnl.util.tty.color as color
|
||||||
from llnl.util.tty.colify import colify
|
from llnl.util.tty.colify import colify
|
||||||
|
|
||||||
@@ -217,7 +216,7 @@ def unit_test(parser, args, unknown_args):
|
|||||||
# Ensure clingo is available before switching to the
|
# Ensure clingo is available before switching to the
|
||||||
# mock configuration used by unit tests
|
# mock configuration used by unit tests
|
||||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
spack.bootstrap.ensure_core_dependencies()
|
||||||
if pytest is None:
|
if pytest is None:
|
||||||
spack.bootstrap.ensure_environment_dependencies()
|
spack.bootstrap.ensure_environment_dependencies()
|
||||||
import pytest
|
import pytest
|
||||||
@@ -237,12 +236,6 @@ def unit_test(parser, args, unknown_args):
|
|||||||
pytest_root = spack.extensions.load_extension(args.extension)
|
pytest_root = spack.extensions.load_extension(args.extension)
|
||||||
|
|
||||||
if args.numprocesses is not None and args.numprocesses > 1:
|
if args.numprocesses is not None and args.numprocesses > 1:
|
||||||
try:
|
|
||||||
import xdist # noqa: F401
|
|
||||||
except ImportError:
|
|
||||||
tty.error("parallel unit-test requires pytest-xdist module")
|
|
||||||
return 1
|
|
||||||
|
|
||||||
pytest_args.extend(
|
pytest_args.extend(
|
||||||
[
|
[
|
||||||
"--dist",
|
"--dist",
|
||||||
|
|||||||
@@ -2,48 +2,35 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import argparse
|
import argparse
|
||||||
import io
|
|
||||||
from typing import List, Optional
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.string import plural
|
|
||||||
from llnl.util.filesystem import visit_directory_tree
|
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.spec
|
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.verify
|
import spack.verify
|
||||||
import spack.verify_libraries
|
|
||||||
from spack.cmd.common import arguments
|
|
||||||
|
|
||||||
description = "verify spack installations on disk"
|
description = "check that all spack packages are on disk as installed"
|
||||||
section = "admin"
|
section = "admin"
|
||||||
level = "long"
|
level = "long"
|
||||||
|
|
||||||
MANIFEST_SUBPARSER: Optional[argparse.ArgumentParser] = None
|
|
||||||
|
|
||||||
|
def setup_parser(subparser):
|
||||||
|
setup_parser.parser = subparser
|
||||||
|
|
||||||
def setup_parser(subparser: argparse.ArgumentParser):
|
subparser.add_argument(
|
||||||
global MANIFEST_SUBPARSER
|
|
||||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="verify_command")
|
|
||||||
|
|
||||||
MANIFEST_SUBPARSER = sp.add_parser(
|
|
||||||
"manifest", help=verify_manifest.__doc__, description=verify_manifest.__doc__
|
|
||||||
)
|
|
||||||
MANIFEST_SUBPARSER.add_argument(
|
|
||||||
"-l", "--local", action="store_true", help="verify only locally installed packages"
|
"-l", "--local", action="store_true", help="verify only locally installed packages"
|
||||||
)
|
)
|
||||||
MANIFEST_SUBPARSER.add_argument(
|
subparser.add_argument(
|
||||||
"-j", "--json", action="store_true", help="ouptut json-formatted errors"
|
"-j", "--json", action="store_true", help="ouptut json-formatted errors"
|
||||||
)
|
)
|
||||||
MANIFEST_SUBPARSER.add_argument("-a", "--all", action="store_true", help="verify all packages")
|
subparser.add_argument("-a", "--all", action="store_true", help="verify all packages")
|
||||||
MANIFEST_SUBPARSER.add_argument(
|
subparser.add_argument(
|
||||||
"specs_or_files", nargs=argparse.REMAINDER, help="specs or files to verify"
|
"specs_or_files", nargs=argparse.REMAINDER, help="specs or files to verify"
|
||||||
)
|
)
|
||||||
|
|
||||||
manifest_sp_type = MANIFEST_SUBPARSER.add_mutually_exclusive_group()
|
type = subparser.add_mutually_exclusive_group()
|
||||||
manifest_sp_type.add_argument(
|
type.add_argument(
|
||||||
"-s",
|
"-s",
|
||||||
"--specs",
|
"--specs",
|
||||||
action="store_const",
|
action="store_const",
|
||||||
@@ -52,7 +39,7 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
|||||||
default="specs",
|
default="specs",
|
||||||
help="treat entries as specs (default)",
|
help="treat entries as specs (default)",
|
||||||
)
|
)
|
||||||
manifest_sp_type.add_argument(
|
type.add_argument(
|
||||||
"-f",
|
"-f",
|
||||||
"--files",
|
"--files",
|
||||||
action="store_const",
|
action="store_const",
|
||||||
@@ -62,67 +49,14 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
|||||||
help="treat entries as absolute filenames\n\ncannot be used with '-a'",
|
help="treat entries as absolute filenames\n\ncannot be used with '-a'",
|
||||||
)
|
)
|
||||||
|
|
||||||
libraries_subparser = sp.add_parser(
|
|
||||||
"libraries", help=verify_libraries.__doc__, description=verify_libraries.__doc__
|
|
||||||
)
|
|
||||||
|
|
||||||
arguments.add_common_arguments(libraries_subparser, ["constraint"])
|
|
||||||
|
|
||||||
|
|
||||||
def verify(parser, args):
|
def verify(parser, args):
|
||||||
cmd = args.verify_command
|
|
||||||
if cmd == "libraries":
|
|
||||||
return verify_libraries(args)
|
|
||||||
elif cmd == "manifest":
|
|
||||||
return verify_manifest(args)
|
|
||||||
parser.error("invalid verify subcommand")
|
|
||||||
|
|
||||||
|
|
||||||
def verify_libraries(args):
|
|
||||||
"""verify that shared libraries of install packages can be located in rpaths (Linux only)"""
|
|
||||||
specs_from_db = [s for s in args.specs(installed=True) if not s.external]
|
|
||||||
|
|
||||||
tty.info(f"Checking {len(specs_from_db)} packages for shared library resolution")
|
|
||||||
|
|
||||||
errors = 0
|
|
||||||
for spec in specs_from_db:
|
|
||||||
try:
|
|
||||||
pkg = spec.package
|
|
||||||
except Exception:
|
|
||||||
tty.warn(f"Skipping {spec.cformat('{name}{@version}{/hash}')} due to missing package")
|
|
||||||
error_msg = _verify_libraries(spec, pkg.unresolved_libraries)
|
|
||||||
if error_msg is not None:
|
|
||||||
errors += 1
|
|
||||||
tty.error(error_msg)
|
|
||||||
|
|
||||||
if errors:
|
|
||||||
tty.error(f"Cannot resolve shared libraries in {plural(errors, 'package')}")
|
|
||||||
return 1
|
|
||||||
|
|
||||||
|
|
||||||
def _verify_libraries(spec: spack.spec.Spec, unresolved_libraries: List[str]) -> Optional[str]:
|
|
||||||
"""Go over the prefix of the installed spec and verify its shared libraries can be resolved."""
|
|
||||||
visitor = spack.verify_libraries.ResolveSharedElfLibDepsVisitor(
|
|
||||||
[*spack.verify_libraries.ALLOW_UNRESOLVED, *unresolved_libraries]
|
|
||||||
)
|
|
||||||
visit_directory_tree(spec.prefix, visitor)
|
|
||||||
|
|
||||||
if not visitor.problems:
|
|
||||||
return None
|
|
||||||
|
|
||||||
output = io.StringIO()
|
|
||||||
visitor.write(output, indent=4, brief=True)
|
|
||||||
message = output.getvalue().rstrip()
|
|
||||||
return f"{spec.cformat('{name}{@version}{/hash}')}: {spec.prefix}:\n{message}"
|
|
||||||
|
|
||||||
|
|
||||||
def verify_manifest(args):
|
|
||||||
"""verify that install directories have not been modified since installation"""
|
|
||||||
local = args.local
|
local = args.local
|
||||||
|
|
||||||
if args.type == "files":
|
if args.type == "files":
|
||||||
if args.all:
|
if args.all:
|
||||||
MANIFEST_SUBPARSER.error("cannot use --all with --files")
|
setup_parser.parser.print_help()
|
||||||
|
return 1
|
||||||
|
|
||||||
for file in args.specs_or_files:
|
for file in args.specs_or_files:
|
||||||
results = spack.verify.check_file_manifest(file)
|
results = spack.verify.check_file_manifest(file)
|
||||||
@@ -153,7 +87,8 @@ def verify_manifest(args):
|
|||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
specs = list(map(lambda x: spack.cmd.disambiguate_spec(x, env, local=local), spec_args))
|
specs = list(map(lambda x: spack.cmd.disambiguate_spec(x, env, local=local), spec_args))
|
||||||
else:
|
else:
|
||||||
MANIFEST_SUBPARSER.error("use --all or specify specs to verify")
|
setup_parser.parser.print_help()
|
||||||
|
return 1
|
||||||
|
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
tty.debug("Verifying package %s")
|
tty.debug("Verifying package %s")
|
||||||
|
|||||||
@@ -801,17 +801,17 @@ def _extract_compiler_paths(spec: "spack.spec.Spec") -> Optional[Dict[str, str]]
|
|||||||
def _extract_os_and_target(spec: "spack.spec.Spec"):
|
def _extract_os_and_target(spec: "spack.spec.Spec"):
|
||||||
if not spec.architecture:
|
if not spec.architecture:
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
operating_system = host_platform.default_operating_system()
|
operating_system = host_platform.operating_system("default_os")
|
||||||
target = host_platform.default_target()
|
target = host_platform.target("default_target")
|
||||||
else:
|
else:
|
||||||
target = spec.architecture.target
|
target = spec.architecture.target
|
||||||
if not target:
|
if not target:
|
||||||
target = spack.platforms.host().default_target()
|
target = spack.platforms.host().target("default_target")
|
||||||
|
|
||||||
operating_system = spec.os
|
operating_system = spec.os
|
||||||
if not operating_system:
|
if not operating_system:
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
operating_system = host_platform.default_operating_system()
|
operating_system = host_platform.operating_system("default_os")
|
||||||
return operating_system, target
|
return operating_system, target
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -220,7 +220,7 @@ def concretize_one(spec: Union[str, Spec], tests: TestsType = False) -> Spec:
|
|||||||
opt, i, answer = min(result.answers)
|
opt, i, answer = min(result.answers)
|
||||||
name = spec.name
|
name = spec.name
|
||||||
# TODO: Consolidate this code with similar code in solve.py
|
# TODO: Consolidate this code with similar code in solve.py
|
||||||
if spack.repo.PATH.is_virtual(spec.name):
|
if spec.virtual:
|
||||||
providers = [s.name for s in answer.values() if s.package.provides(name)]
|
providers = [s.name for s in answer.values() if s.package.provides(name)]
|
||||||
name = providers[0]
|
name = providers[0]
|
||||||
|
|
||||||
|
|||||||
@@ -32,10 +32,9 @@
|
|||||||
import copy
|
import copy
|
||||||
import functools
|
import functools
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from typing import Any, Callable, Dict, Generator, List, NamedTuple, Optional, Tuple, Union
|
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union
|
||||||
|
|
||||||
import jsonschema
|
import jsonschema
|
||||||
|
|
||||||
@@ -43,6 +42,7 @@
|
|||||||
|
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.paths
|
import spack.paths
|
||||||
|
import spack.platforms
|
||||||
import spack.schema
|
import spack.schema
|
||||||
import spack.schema.bootstrap
|
import spack.schema.bootstrap
|
||||||
import spack.schema.cdash
|
import spack.schema.cdash
|
||||||
@@ -54,20 +54,17 @@
|
|||||||
import spack.schema.develop
|
import spack.schema.develop
|
||||||
import spack.schema.env
|
import spack.schema.env
|
||||||
import spack.schema.env_vars
|
import spack.schema.env_vars
|
||||||
import spack.schema.include
|
|
||||||
import spack.schema.merged
|
|
||||||
import spack.schema.mirrors
|
import spack.schema.mirrors
|
||||||
import spack.schema.modules
|
import spack.schema.modules
|
||||||
import spack.schema.packages
|
import spack.schema.packages
|
||||||
import spack.schema.repos
|
import spack.schema.repos
|
||||||
import spack.schema.upstreams
|
import spack.schema.upstreams
|
||||||
import spack.schema.view
|
import spack.schema.view
|
||||||
import spack.util.remote_file_cache as rfc_util
|
|
||||||
import spack.util.spack_yaml as syaml
|
|
||||||
from spack.util.cpus import cpus_available
|
|
||||||
from spack.util.spack_yaml import get_mark_from_yaml_data
|
|
||||||
|
|
||||||
from .enums import ConfigScopePriority
|
# Hacked yaml for configuration files preserves line numbers.
|
||||||
|
import spack.util.spack_yaml as syaml
|
||||||
|
import spack.util.web as web_util
|
||||||
|
from spack.util.cpus import cpus_available
|
||||||
|
|
||||||
#: Dict from section names -> schema for that section
|
#: Dict from section names -> schema for that section
|
||||||
SECTION_SCHEMAS: Dict[str, Any] = {
|
SECTION_SCHEMAS: Dict[str, Any] = {
|
||||||
@@ -75,7 +72,6 @@
|
|||||||
"concretizer": spack.schema.concretizer.schema,
|
"concretizer": spack.schema.concretizer.schema,
|
||||||
"definitions": spack.schema.definitions.schema,
|
"definitions": spack.schema.definitions.schema,
|
||||||
"env_vars": spack.schema.env_vars.schema,
|
"env_vars": spack.schema.env_vars.schema,
|
||||||
"include": spack.schema.include.schema,
|
|
||||||
"view": spack.schema.view.schema,
|
"view": spack.schema.view.schema,
|
||||||
"develop": spack.schema.develop.schema,
|
"develop": spack.schema.develop.schema,
|
||||||
"mirrors": spack.schema.mirrors.schema,
|
"mirrors": spack.schema.mirrors.schema,
|
||||||
@@ -123,17 +119,6 @@
|
|||||||
#: Type used for raw YAML configuration
|
#: Type used for raw YAML configuration
|
||||||
YamlConfigDict = Dict[str, Any]
|
YamlConfigDict = Dict[str, Any]
|
||||||
|
|
||||||
#: prefix for name of included configuration scopes
|
|
||||||
INCLUDE_SCOPE_PREFIX = "include"
|
|
||||||
|
|
||||||
#: safeguard for recursive includes -- maximum include depth
|
|
||||||
MAX_RECURSIVE_INCLUDES = 100
|
|
||||||
|
|
||||||
|
|
||||||
def _include_cache_location():
|
|
||||||
"""Location to cache included configuration files."""
|
|
||||||
return os.path.join(spack.paths.user_cache_path, "includes")
|
|
||||||
|
|
||||||
|
|
||||||
class ConfigScope:
|
class ConfigScope:
|
||||||
def __init__(self, name: str) -> None:
|
def __init__(self, name: str) -> None:
|
||||||
@@ -141,25 +126,6 @@ def __init__(self, name: str) -> None:
|
|||||||
self.writable = False
|
self.writable = False
|
||||||
self.sections = syaml.syaml_dict()
|
self.sections = syaml.syaml_dict()
|
||||||
|
|
||||||
#: names of any included scopes
|
|
||||||
self._included_scopes: Optional[List["ConfigScope"]] = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def included_scopes(self) -> List["ConfigScope"]:
|
|
||||||
"""Memoized list of included scopes, in the order they appear in this scope."""
|
|
||||||
if self._included_scopes is None:
|
|
||||||
self._included_scopes = []
|
|
||||||
|
|
||||||
includes = self.get_section("include")
|
|
||||||
if includes:
|
|
||||||
include_paths = [included_path(data) for data in includes["include"]]
|
|
||||||
for path in include_paths:
|
|
||||||
included_scope = include_path_scope(path)
|
|
||||||
if included_scope:
|
|
||||||
self._included_scopes.append(included_scope)
|
|
||||||
|
|
||||||
return self._included_scopes
|
|
||||||
|
|
||||||
def get_section_filename(self, section: str) -> str:
|
def get_section_filename(self, section: str) -> str:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@@ -442,18 +408,26 @@ def _method(self, *args, **kwargs):
|
|||||||
return _method
|
return _method
|
||||||
|
|
||||||
|
|
||||||
ScopeWithOptionalPriority = Union[ConfigScope, Tuple[int, ConfigScope]]
|
|
||||||
ScopeWithPriority = Tuple[int, ConfigScope]
|
|
||||||
|
|
||||||
|
|
||||||
class Configuration:
|
class Configuration:
|
||||||
"""A hierarchical configuration, merging a number of scopes at different priorities."""
|
"""A full Spack configuration, from a hierarchy of config files.
|
||||||
|
|
||||||
|
This class makes it easy to add a new scope on top of an existing one.
|
||||||
|
"""
|
||||||
|
|
||||||
# convert to typing.OrderedDict when we drop 3.6, or OrderedDict when we reach 3.9
|
# convert to typing.OrderedDict when we drop 3.6, or OrderedDict when we reach 3.9
|
||||||
scopes: lang.PriorityOrderedMapping[str, ConfigScope]
|
scopes: Dict[str, ConfigScope]
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self, *scopes: ConfigScope) -> None:
|
||||||
self.scopes = lang.PriorityOrderedMapping()
|
"""Initialize a configuration with an initial list of scopes.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
scopes: list of scopes to add to this
|
||||||
|
Configuration, ordered from lowest to highest precedence
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.scopes = collections.OrderedDict()
|
||||||
|
for scope in scopes:
|
||||||
|
self.push_scope(scope)
|
||||||
self.format_updates: Dict[str, List[ConfigScope]] = collections.defaultdict(list)
|
self.format_updates: Dict[str, List[ConfigScope]] = collections.defaultdict(list)
|
||||||
|
|
||||||
def ensure_unwrapped(self) -> "Configuration":
|
def ensure_unwrapped(self) -> "Configuration":
|
||||||
@@ -461,59 +435,36 @@ def ensure_unwrapped(self) -> "Configuration":
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
def highest(self) -> ConfigScope:
|
def highest(self) -> ConfigScope:
|
||||||
"""Scope with the highest precedence"""
|
"""Scope with highest precedence"""
|
||||||
return next(self.scopes.reversed_values()) # type: ignore
|
return next(reversed(self.scopes.values())) # type: ignore
|
||||||
|
|
||||||
@_config_mutator
|
@_config_mutator
|
||||||
def push_scope(
|
def ensure_scope_ordering(self):
|
||||||
self, scope: ConfigScope, priority: Optional[int] = None, _depth: int = 0
|
"""Ensure that scope order matches documented precedent"""
|
||||||
) -> None:
|
# FIXME: We also need to consider that custom configurations and other orderings
|
||||||
"""Adds a scope to the Configuration, at a given priority.
|
# may not be preserved correctly
|
||||||
|
if "command_line" in self.scopes:
|
||||||
|
# TODO (when dropping python 3.6): self.scopes.move_to_end
|
||||||
|
self.scopes["command_line"] = self.remove_scope("command_line")
|
||||||
|
|
||||||
If a priority is not given, it is assumed to be the current highest priority.
|
@_config_mutator
|
||||||
|
def push_scope(self, scope: ConfigScope) -> None:
|
||||||
|
"""Add a higher precedence scope to the Configuration."""
|
||||||
|
tty.debug(f"[CONFIGURATION: PUSH SCOPE]: {str(scope)}", level=2)
|
||||||
|
self.scopes[scope.name] = scope
|
||||||
|
|
||||||
Args:
|
@_config_mutator
|
||||||
scope: scope to be added
|
def pop_scope(self) -> ConfigScope:
|
||||||
priority: priority of the scope
|
"""Remove the highest precedence scope and return it."""
|
||||||
"""
|
name, scope = self.scopes.popitem(last=True) # type: ignore[call-arg]
|
||||||
# TODO: As a follow on to #48784, change this to create a graph of the
|
tty.debug(f"[CONFIGURATION: POP SCOPE]: {str(scope)}", level=2)
|
||||||
# TODO: includes AND ensure properly sorted such that the order included
|
return scope
|
||||||
# TODO: at the highest level is reflected in the value of an option that
|
|
||||||
# TODO: is set in multiple included files.
|
|
||||||
# before pushing the scope itself, push any included scopes recursively, at same priority
|
|
||||||
for included_scope in reversed(scope.included_scopes):
|
|
||||||
if _depth + 1 > MAX_RECURSIVE_INCLUDES: # make sure we're not recursing endlessly
|
|
||||||
mark = ""
|
|
||||||
if hasattr(included_scope, "path") and syaml.marked(included_scope.path):
|
|
||||||
mark = included_scope.path._start_mark # type: ignore
|
|
||||||
raise RecursiveIncludeError(
|
|
||||||
f"Maximum include recursion exceeded in {included_scope.name}", str(mark)
|
|
||||||
)
|
|
||||||
|
|
||||||
# record this inclusion so that remove_scope() can use it
|
|
||||||
self.push_scope(included_scope, priority=priority, _depth=_depth + 1)
|
|
||||||
|
|
||||||
tty.debug(f"[CONFIGURATION: PUSH SCOPE]: {str(scope)}, priority={priority}", level=2)
|
|
||||||
self.scopes.add(scope.name, value=scope, priority=priority)
|
|
||||||
|
|
||||||
@_config_mutator
|
@_config_mutator
|
||||||
def remove_scope(self, scope_name: str) -> Optional[ConfigScope]:
|
def remove_scope(self, scope_name: str) -> Optional[ConfigScope]:
|
||||||
"""Removes a scope by name, and returns it. If the scope does not exist, returns None."""
|
"""Remove scope by name; has no effect when ``scope_name`` does not exist"""
|
||||||
|
scope = self.scopes.pop(scope_name, None)
|
||||||
try:
|
tty.debug(f"[CONFIGURATION: POP SCOPE]: {str(scope)}", level=2)
|
||||||
scope = self.scopes.remove(scope_name)
|
|
||||||
tty.debug(f"[CONFIGURATION: REMOVE SCOPE]: {str(scope)}", level=2)
|
|
||||||
except KeyError as e:
|
|
||||||
tty.debug(f"[CONFIGURATION: REMOVE SCOPE]: {e}", level=2)
|
|
||||||
return None
|
|
||||||
|
|
||||||
# transitively remove included scopes
|
|
||||||
for included_scope in scope.included_scopes:
|
|
||||||
assert (
|
|
||||||
included_scope.name in self.scopes
|
|
||||||
), f"Included scope '{included_scope.name}' was never added to configuration!"
|
|
||||||
self.remove_scope(included_scope.name)
|
|
||||||
|
|
||||||
return scope
|
return scope
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -522,13 +473,15 @@ def writable_scopes(self) -> Generator[ConfigScope, None, None]:
|
|||||||
return (s for s in self.scopes.values() if s.writable)
|
return (s for s in self.scopes.values() if s.writable)
|
||||||
|
|
||||||
def highest_precedence_scope(self) -> ConfigScope:
|
def highest_precedence_scope(self) -> ConfigScope:
|
||||||
"""Writable scope with the highest precedence."""
|
"""Writable scope with highest precedence."""
|
||||||
return next(s for s in self.scopes.reversed_values() if s.writable)
|
return next(s for s in reversed(self.scopes.values()) if s.writable) # type: ignore
|
||||||
|
|
||||||
def highest_precedence_non_platform_scope(self) -> ConfigScope:
|
def highest_precedence_non_platform_scope(self) -> ConfigScope:
|
||||||
"""Writable non-platform scope with the highest precedence"""
|
"""Writable non-platform scope with highest precedence"""
|
||||||
return next(
|
return next(
|
||||||
s for s in self.scopes.reversed_values() if s.writable and not s.is_platform_dependent
|
s
|
||||||
|
for s in reversed(self.scopes.values()) # type: ignore
|
||||||
|
if s.writable and not s.is_platform_dependent
|
||||||
)
|
)
|
||||||
|
|
||||||
def matching_scopes(self, reg_expr) -> List[ConfigScope]:
|
def matching_scopes(self, reg_expr) -> List[ConfigScope]:
|
||||||
@@ -795,7 +748,7 @@ def override(
|
|||||||
"""
|
"""
|
||||||
if isinstance(path_or_scope, ConfigScope):
|
if isinstance(path_or_scope, ConfigScope):
|
||||||
overrides = path_or_scope
|
overrides = path_or_scope
|
||||||
CONFIG.push_scope(path_or_scope, priority=None)
|
CONFIG.push_scope(path_or_scope)
|
||||||
else:
|
else:
|
||||||
base_name = _OVERRIDES_BASE_NAME
|
base_name = _OVERRIDES_BASE_NAME
|
||||||
# Ensure the new override gets a unique scope name
|
# Ensure the new override gets a unique scope name
|
||||||
@@ -809,7 +762,7 @@ def override(
|
|||||||
break
|
break
|
||||||
|
|
||||||
overrides = InternalConfigScope(scope_name)
|
overrides = InternalConfigScope(scope_name)
|
||||||
CONFIG.push_scope(overrides, priority=None)
|
CONFIG.push_scope(overrides)
|
||||||
CONFIG.set(path_or_scope, value, scope=scope_name)
|
CONFIG.set(path_or_scope, value, scope=scope_name)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -819,86 +772,13 @@ def override(
|
|||||||
assert scope is overrides
|
assert scope is overrides
|
||||||
|
|
||||||
|
|
||||||
def _add_platform_scope(
|
def _add_platform_scope(cfg: Configuration, name: str, path: str, writable: bool = True) -> None:
|
||||||
cfg: Configuration, name: str, path: str, priority: ConfigScopePriority, writable: bool = True
|
|
||||||
) -> None:
|
|
||||||
"""Add a platform-specific subdirectory for the current platform."""
|
"""Add a platform-specific subdirectory for the current platform."""
|
||||||
import spack.platforms # circular dependency
|
|
||||||
|
|
||||||
platform = spack.platforms.host().name
|
platform = spack.platforms.host().name
|
||||||
scope = DirectoryConfigScope(
|
scope = DirectoryConfigScope(
|
||||||
f"{name}/{platform}", os.path.join(path, platform), writable=writable
|
f"{name}/{platform}", os.path.join(path, platform), writable=writable
|
||||||
)
|
)
|
||||||
cfg.push_scope(scope, priority=priority)
|
cfg.push_scope(scope)
|
||||||
|
|
||||||
|
|
||||||
#: Class for the relevance of an optional path conditioned on a limited
|
|
||||||
#: python code that evaluates to a boolean and or explicit specification
|
|
||||||
#: as optional.
|
|
||||||
class IncludePath(NamedTuple):
|
|
||||||
path: str
|
|
||||||
when: str
|
|
||||||
sha256: str
|
|
||||||
optional: bool
|
|
||||||
|
|
||||||
|
|
||||||
def included_path(entry: Union[str, dict]) -> IncludePath:
|
|
||||||
"""Convert the included path entry into an IncludePath.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
entry: include configuration entry
|
|
||||||
|
|
||||||
Returns: converted entry, where an empty ``when`` means the path is
|
|
||||||
not conditionally included
|
|
||||||
"""
|
|
||||||
if isinstance(entry, str):
|
|
||||||
return IncludePath(path=entry, sha256="", when="", optional=False)
|
|
||||||
|
|
||||||
path = entry["path"]
|
|
||||||
sha256 = entry.get("sha256", "")
|
|
||||||
when = entry.get("when", "")
|
|
||||||
optional = entry.get("optional", False)
|
|
||||||
return IncludePath(path=path, sha256=sha256, when=when, optional=optional)
|
|
||||||
|
|
||||||
|
|
||||||
def include_path_scope(include: IncludePath) -> Optional[ConfigScope]:
|
|
||||||
"""Instantiate an appropriate configuration scope for the given path.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
include: optional include path
|
|
||||||
|
|
||||||
Returns: configuration scope
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: included path has an unsupported URL scheme, is required
|
|
||||||
but does not exist; configuration stage directory argument is missing
|
|
||||||
ConfigFileError: unable to access remote configuration file(s)
|
|
||||||
"""
|
|
||||||
# circular dependencies
|
|
||||||
import spack.spec
|
|
||||||
|
|
||||||
if (not include.when) or spack.spec.eval_conditional(include.when):
|
|
||||||
config_path = rfc_util.local_path(include.path, include.sha256, _include_cache_location)
|
|
||||||
if not config_path:
|
|
||||||
raise ConfigFileError(f"Unable to fetch remote configuration from {include.path}")
|
|
||||||
|
|
||||||
if os.path.isdir(config_path):
|
|
||||||
# directories are treated as regular ConfigScopes
|
|
||||||
config_name = f"{INCLUDE_SCOPE_PREFIX}:{os.path.basename(config_path)}"
|
|
||||||
tty.debug(f"Creating DirectoryConfigScope {config_name} for '{config_path}'")
|
|
||||||
return DirectoryConfigScope(config_name, config_path)
|
|
||||||
|
|
||||||
if os.path.exists(config_path):
|
|
||||||
# files are assumed to be SingleFileScopes
|
|
||||||
config_name = f"{INCLUDE_SCOPE_PREFIX}:{config_path}"
|
|
||||||
tty.debug(f"Creating SingleFileScope {config_name} for '{config_path}'")
|
|
||||||
return SingleFileScope(config_name, config_path, spack.schema.merged.schema)
|
|
||||||
|
|
||||||
if not include.optional:
|
|
||||||
path = f" at ({config_path})" if config_path != include.path else ""
|
|
||||||
raise ValueError(f"Required path ({include.path}) does not exist{path}")
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
||||||
@@ -926,17 +806,18 @@ def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
|||||||
return config_paths
|
return config_paths
|
||||||
|
|
||||||
|
|
||||||
def create_incremental() -> Generator[Configuration, None, None]:
|
def create() -> Configuration:
|
||||||
"""Singleton Configuration instance.
|
"""Singleton Configuration instance.
|
||||||
|
|
||||||
This constructs one instance associated with this module and returns
|
This constructs one instance associated with this module and returns
|
||||||
it. It is bundled inside a function so that configuration can be
|
it. It is bundled inside a function so that configuration can be
|
||||||
initialized lazily.
|
initialized lazily.
|
||||||
"""
|
"""
|
||||||
|
cfg = Configuration()
|
||||||
|
|
||||||
# first do the builtin, hardcoded defaults
|
# first do the builtin, hardcoded defaults
|
||||||
cfg = create_from(
|
builtin = InternalConfigScope("_builtin", CONFIG_DEFAULTS)
|
||||||
(ConfigScopePriority.BUILTIN, InternalConfigScope("_builtin", CONFIG_DEFAULTS))
|
cfg.push_scope(builtin)
|
||||||
)
|
|
||||||
|
|
||||||
# Builtin paths to configuration files in Spack
|
# Builtin paths to configuration files in Spack
|
||||||
configuration_paths = [
|
configuration_paths = [
|
||||||
@@ -966,29 +847,16 @@ def create_incremental() -> Generator[Configuration, None, None]:
|
|||||||
|
|
||||||
# add each scope and its platform-specific directory
|
# add each scope and its platform-specific directory
|
||||||
for name, path in configuration_paths:
|
for name, path in configuration_paths:
|
||||||
cfg.push_scope(DirectoryConfigScope(name, path), priority=ConfigScopePriority.CONFIG_FILES)
|
cfg.push_scope(DirectoryConfigScope(name, path))
|
||||||
# Each scope can have per-platform overrides in subdirectories
|
|
||||||
_add_platform_scope(cfg, name, path, priority=ConfigScopePriority.CONFIG_FILES)
|
|
||||||
|
|
||||||
# yield the config incrementally so that each config level's init code can get
|
# Each scope can have per-platfom overrides in subdirectories
|
||||||
# data from the one below. This can be tricky, but it enables us to have a
|
_add_platform_scope(cfg, name, path)
|
||||||
# single unified config system.
|
|
||||||
#
|
|
||||||
# TODO: think about whether we want to restrict what types of config can be used
|
|
||||||
# at each level. e.g., we may want to just more forcibly disallow remote
|
|
||||||
# config (which uses ssl and other config options) for some of the scopes,
|
|
||||||
# to make the bootstrap issues more explicit, even if allowing config scope
|
|
||||||
# init to reference lower scopes is more flexible.
|
|
||||||
yield cfg
|
|
||||||
|
|
||||||
|
return cfg
|
||||||
def create() -> Configuration:
|
|
||||||
"""Create a configuration using create_incremental(), return the last yielded result."""
|
|
||||||
return list(create_incremental())[-1]
|
|
||||||
|
|
||||||
|
|
||||||
#: This is the singleton configuration instance for Spack.
|
#: This is the singleton configuration instance for Spack.
|
||||||
CONFIG: Configuration = lang.Singleton(create_incremental) # type: ignore
|
CONFIG: Configuration = lang.Singleton(create) # type: ignore
|
||||||
|
|
||||||
|
|
||||||
def add_from_file(filename: str, scope: Optional[str] = None) -> None:
|
def add_from_file(filename: str, scope: Optional[str] = None) -> None:
|
||||||
@@ -1084,11 +952,10 @@ def set(path: str, value: Any, scope: Optional[str] = None) -> None:
|
|||||||
|
|
||||||
Accepts the path syntax described in ``get()``.
|
Accepts the path syntax described in ``get()``.
|
||||||
"""
|
"""
|
||||||
result = CONFIG.set(path, value, scope)
|
return CONFIG.set(path, value, scope)
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def scopes() -> lang.PriorityOrderedMapping[str, ConfigScope]:
|
def scopes() -> Dict[str, ConfigScope]:
|
||||||
"""Convenience function to get list of configuration scopes."""
|
"""Convenience function to get list of configuration scopes."""
|
||||||
return CONFIG.scopes
|
return CONFIG.scopes
|
||||||
|
|
||||||
@@ -1542,7 +1409,7 @@ def ensure_latest_format_fn(section: str) -> Callable[[YamlConfigDict], bool]:
|
|||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def use_configuration(
|
def use_configuration(
|
||||||
*scopes_or_paths: Union[ScopeWithOptionalPriority, str]
|
*scopes_or_paths: Union[ConfigScope, str]
|
||||||
) -> Generator[Configuration, None, None]:
|
) -> Generator[Configuration, None, None]:
|
||||||
"""Use the configuration scopes passed as arguments within the context manager.
|
"""Use the configuration scopes passed as arguments within the context manager.
|
||||||
|
|
||||||
@@ -1557,7 +1424,7 @@ def use_configuration(
|
|||||||
global CONFIG
|
global CONFIG
|
||||||
|
|
||||||
# Normalize input and construct a Configuration object
|
# Normalize input and construct a Configuration object
|
||||||
configuration = create_from(*scopes_or_paths)
|
configuration = _config_from(scopes_or_paths)
|
||||||
CONFIG.clear_caches(), configuration.clear_caches()
|
CONFIG.clear_caches(), configuration.clear_caches()
|
||||||
|
|
||||||
saved_config, CONFIG = CONFIG, configuration
|
saved_config, CONFIG = CONFIG, configuration
|
||||||
@@ -1568,44 +1435,137 @@ def use_configuration(
|
|||||||
CONFIG = saved_config
|
CONFIG = saved_config
|
||||||
|
|
||||||
|
|
||||||
def _normalize_input(entry: Union[ScopeWithOptionalPriority, str]) -> ScopeWithPriority:
|
|
||||||
if isinstance(entry, tuple):
|
|
||||||
return entry
|
|
||||||
|
|
||||||
default_priority = ConfigScopePriority.CONFIG_FILES
|
|
||||||
if isinstance(entry, ConfigScope):
|
|
||||||
return default_priority, entry
|
|
||||||
|
|
||||||
# Otherwise we need to construct it
|
|
||||||
path = os.path.normpath(entry)
|
|
||||||
assert os.path.isdir(path), f'"{path}" must be a directory'
|
|
||||||
name = os.path.basename(path)
|
|
||||||
return default_priority, DirectoryConfigScope(name, path)
|
|
||||||
|
|
||||||
|
|
||||||
@lang.memoized
|
@lang.memoized
|
||||||
def create_from(*scopes_or_paths: Union[ScopeWithOptionalPriority, str]) -> Configuration:
|
def _config_from(scopes_or_paths: List[Union[ConfigScope, str]]) -> Configuration:
|
||||||
"""Creates a configuration object from the scopes passed in input.
|
scopes = []
|
||||||
|
for scope_or_path in scopes_or_paths:
|
||||||
|
# If we have a config scope we are already done
|
||||||
|
if isinstance(scope_or_path, ConfigScope):
|
||||||
|
scopes.append(scope_or_path)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Otherwise we need to construct it
|
||||||
|
path = os.path.normpath(scope_or_path)
|
||||||
|
assert os.path.isdir(path), f'"{path}" must be a directory'
|
||||||
|
name = os.path.basename(path)
|
||||||
|
scopes.append(DirectoryConfigScope(name, path))
|
||||||
|
|
||||||
|
configuration = Configuration(*scopes)
|
||||||
|
return configuration
|
||||||
|
|
||||||
|
|
||||||
|
def raw_github_gitlab_url(url: str) -> str:
|
||||||
|
"""Transform a github URL to the raw form to avoid undesirable html.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
*scopes_or_paths: either a tuple of (priority, ConfigScope), or a ConfigScope, or a string
|
url: url to be converted to raw form
|
||||||
If priority is not given, it is assumed to be ConfigScopePriority.CONFIG_FILES. If a
|
|
||||||
string is given, a DirectoryConfigScope is created from it.
|
|
||||||
|
|
||||||
Examples:
|
Returns:
|
||||||
|
Raw github/gitlab url or the original url
|
||||||
>>> builtin_scope = InternalConfigScope("_builtin", {"config": {"build_jobs": 1}})
|
|
||||||
>>> cl_scope = InternalConfigScope("command_line", {"config": {"build_jobs": 10}})
|
|
||||||
>>> cfg = create_from(
|
|
||||||
... (ConfigScopePriority.COMMAND_LINE, cl_scope),
|
|
||||||
... (ConfigScopePriority.BUILTIN, builtin_scope)
|
|
||||||
... )
|
|
||||||
"""
|
"""
|
||||||
scopes_with_priority = [_normalize_input(x) for x in scopes_or_paths]
|
# Note we rely on GitHub to redirect the 'raw' URL returned here to the
|
||||||
result = Configuration()
|
# actual URL under https://raw.githubusercontent.com/ with '/blob'
|
||||||
for priority, scope in scopes_with_priority:
|
# removed and or, '/blame' if needed.
|
||||||
result.push_scope(scope, priority=priority)
|
if "github" in url or "gitlab" in url:
|
||||||
return result
|
return url.replace("/blob/", "/raw/")
|
||||||
|
|
||||||
|
return url
|
||||||
|
|
||||||
|
|
||||||
|
def collect_urls(base_url: str) -> list:
|
||||||
|
"""Return a list of configuration URLs.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
base_url: URL for a configuration (yaml) file or a directory
|
||||||
|
containing yaml file(s)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of configuration file(s) or empty list if none
|
||||||
|
"""
|
||||||
|
if not base_url:
|
||||||
|
return []
|
||||||
|
|
||||||
|
extension = ".yaml"
|
||||||
|
|
||||||
|
if base_url.endswith(extension):
|
||||||
|
return [base_url]
|
||||||
|
|
||||||
|
# Collect configuration URLs if the base_url is a "directory".
|
||||||
|
_, links = web_util.spider(base_url, 0)
|
||||||
|
return [link for link in links if link.endswith(extension)]
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_remote_configs(url: str, dest_dir: str, skip_existing: bool = True) -> str:
|
||||||
|
"""Retrieve configuration file(s) at the specified URL.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
url: URL for a configuration (yaml) file or a directory containing
|
||||||
|
yaml file(s)
|
||||||
|
dest_dir: destination directory
|
||||||
|
skip_existing: Skip files that already exist in dest_dir if
|
||||||
|
``True``; otherwise, replace those files
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path to the corresponding file if URL is or contains a
|
||||||
|
single file and it is the only file in the destination directory or
|
||||||
|
the root (dest_dir) directory if multiple configuration files exist
|
||||||
|
or are retrieved.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _fetch_file(url):
|
||||||
|
raw = raw_github_gitlab_url(url)
|
||||||
|
tty.debug(f"Reading config from url {raw}")
|
||||||
|
return web_util.fetch_url_text(raw, dest_dir=dest_dir)
|
||||||
|
|
||||||
|
if not url:
|
||||||
|
raise ConfigFileError("Cannot retrieve configuration without a URL")
|
||||||
|
|
||||||
|
# Return the local path to the cached configuration file OR to the
|
||||||
|
# directory containing the cached configuration files.
|
||||||
|
config_links = collect_urls(url)
|
||||||
|
existing_files = os.listdir(dest_dir) if os.path.isdir(dest_dir) else []
|
||||||
|
|
||||||
|
paths = []
|
||||||
|
for config_url in config_links:
|
||||||
|
basename = os.path.basename(config_url)
|
||||||
|
if skip_existing and basename in existing_files:
|
||||||
|
tty.warn(
|
||||||
|
f"Will not fetch configuration from {config_url} since a "
|
||||||
|
f"version already exists in {dest_dir}"
|
||||||
|
)
|
||||||
|
path = os.path.join(dest_dir, basename)
|
||||||
|
else:
|
||||||
|
path = _fetch_file(config_url)
|
||||||
|
|
||||||
|
if path:
|
||||||
|
paths.append(path)
|
||||||
|
|
||||||
|
if paths:
|
||||||
|
return dest_dir if len(paths) > 1 else paths[0]
|
||||||
|
|
||||||
|
raise ConfigFileError(f"Cannot retrieve configuration (yaml) from {url}")
|
||||||
|
|
||||||
|
|
||||||
|
def get_mark_from_yaml_data(obj):
|
||||||
|
"""Try to get ``spack.util.spack_yaml`` mark from YAML data.
|
||||||
|
|
||||||
|
We try the object, and if that fails we try its first member (if it's a container).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
mark if one is found, otherwise None.
|
||||||
|
"""
|
||||||
|
# mark of object itelf
|
||||||
|
mark = getattr(obj, "_start_mark", None)
|
||||||
|
if mark:
|
||||||
|
return mark
|
||||||
|
|
||||||
|
# mark of first member if it is a container
|
||||||
|
if isinstance(obj, (list, dict)):
|
||||||
|
first_member = next(iter(obj), None)
|
||||||
|
if first_member:
|
||||||
|
mark = getattr(first_member, "_start_mark", None)
|
||||||
|
|
||||||
|
return mark
|
||||||
|
|
||||||
|
|
||||||
def determine_number_of_jobs(
|
def determine_number_of_jobs(
|
||||||
@@ -1712,7 +1672,3 @@ def get_path(path, data):
|
|||||||
|
|
||||||
# give up and return None if nothing worked
|
# give up and return None if nothing worked
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
class RecursiveIncludeError(spack.error.SpackError):
|
|
||||||
"""Too many levels of recursive includes."""
|
|
||||||
|
|||||||
@@ -57,7 +57,7 @@ def validate(configuration_file):
|
|||||||
# Set the default value of the concretization strategy to unify and
|
# Set the default value of the concretization strategy to unify and
|
||||||
# warn if the user explicitly set another value
|
# warn if the user explicitly set another value
|
||||||
env_dict.setdefault("concretizer", {"unify": True})
|
env_dict.setdefault("concretizer", {"unify": True})
|
||||||
if env_dict["concretizer"]["unify"] is not True:
|
if not env_dict["concretizer"]["unify"] is True:
|
||||||
warnings.warn(
|
warnings.warn(
|
||||||
'"concretizer:unify" is not set to "true", which means the '
|
'"concretizer:unify" is not set to "true", which means the '
|
||||||
"generated image may contain different variants of the same "
|
"generated image may contain different variants of the same "
|
||||||
|
|||||||
@@ -41,8 +41,6 @@
|
|||||||
Union,
|
Union,
|
||||||
)
|
)
|
||||||
|
|
||||||
import spack.repo
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
@@ -1126,7 +1124,7 @@ def _add(
|
|||||||
installation_time:
|
installation_time:
|
||||||
Date and time of installation
|
Date and time of installation
|
||||||
allow_missing: if True, don't warn when installation is not found on on disk
|
allow_missing: if True, don't warn when installation is not found on on disk
|
||||||
This is useful when installing specs without build/test deps.
|
This is useful when installing specs without build deps.
|
||||||
"""
|
"""
|
||||||
if not spec.concrete:
|
if not spec.concrete:
|
||||||
raise NonConcreteSpecAddError("Specs added to DB must be concrete.")
|
raise NonConcreteSpecAddError("Specs added to DB must be concrete.")
|
||||||
@@ -1146,8 +1144,10 @@ def _add(
|
|||||||
edge.spec,
|
edge.spec,
|
||||||
explicit=False,
|
explicit=False,
|
||||||
installation_time=installation_time,
|
installation_time=installation_time,
|
||||||
# allow missing build / test only deps
|
# allow missing build-only deps. This prevents excessive warnings when a spec is
|
||||||
allow_missing=allow_missing or edge.depflag & (dt.BUILD | dt.TEST) == edge.depflag,
|
# installed, and its build dep is missing a build dep; there's no need to install
|
||||||
|
# the build dep's build dep first, and there's no need to warn about it missing.
|
||||||
|
allow_missing=allow_missing or edge.depflag == dt.BUILD,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Make sure the directory layout agrees whether the spec is installed
|
# Make sure the directory layout agrees whether the spec is installed
|
||||||
@@ -1556,12 +1556,7 @@ def _query(
|
|||||||
# If we did fine something, the query spec can't be virtual b/c we matched an actual
|
# If we did fine something, the query spec can't be virtual b/c we matched an actual
|
||||||
# package installation, so skip the virtual check entirely. If we *didn't* find anything,
|
# package installation, so skip the virtual check entirely. If we *didn't* find anything,
|
||||||
# check all the deferred specs *if* the query is virtual.
|
# check all the deferred specs *if* the query is virtual.
|
||||||
if (
|
if not results and query_spec is not None and deferred and query_spec.virtual:
|
||||||
not results
|
|
||||||
and query_spec is not None
|
|
||||||
and deferred
|
|
||||||
and spack.repo.PATH.is_virtual(query_spec.name)
|
|
||||||
):
|
|
||||||
results = [spec for spec in deferred if spec.satisfies(query_spec)]
|
results = [spec for spec in deferred if spec.satisfies(query_spec)]
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|||||||
@@ -310,7 +310,7 @@ def find_windows_kit_roots() -> List[str]:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def find_windows_kit_bin_paths(
|
def find_windows_kit_bin_paths(
|
||||||
kit_base: Union[Optional[str], Optional[list]] = None,
|
kit_base: Union[Optional[str], Optional[list]] = None
|
||||||
) -> List[str]:
|
) -> List[str]:
|
||||||
"""Returns Windows kit bin directory per version"""
|
"""Returns Windows kit bin directory per version"""
|
||||||
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
||||||
@@ -325,7 +325,7 @@ def find_windows_kit_bin_paths(
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def find_windows_kit_lib_paths(
|
def find_windows_kit_lib_paths(
|
||||||
kit_base: Union[Optional[str], Optional[list]] = None,
|
kit_base: Union[Optional[str], Optional[list]] = None
|
||||||
) -> List[str]:
|
) -> List[str]:
|
||||||
"""Returns Windows kit lib directory per version"""
|
"""Returns Windows kit lib directory per version"""
|
||||||
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
||||||
|
|||||||
@@ -7,7 +7,6 @@
|
|||||||
import collections
|
import collections
|
||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
import os
|
import os
|
||||||
import pathlib
|
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
@@ -16,7 +15,6 @@
|
|||||||
|
|
||||||
import llnl.util.filesystem
|
import llnl.util.filesystem
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
import llnl.util.symlink
|
|
||||||
import llnl.util.tty
|
import llnl.util.tty
|
||||||
|
|
||||||
import spack.error
|
import spack.error
|
||||||
@@ -72,21 +70,13 @@ def dedupe_paths(paths: List[str]) -> List[str]:
|
|||||||
"""Deduplicate paths based on inode and device number. In case the list contains first a
|
"""Deduplicate paths based on inode and device number. In case the list contains first a
|
||||||
symlink and then the directory it points to, the symlink is replaced with the directory path.
|
symlink and then the directory it points to, the symlink is replaced with the directory path.
|
||||||
This ensures that we pick for example ``/usr/bin`` over ``/bin`` if the latter is a symlink to
|
This ensures that we pick for example ``/usr/bin`` over ``/bin`` if the latter is a symlink to
|
||||||
the former."""
|
the former`."""
|
||||||
seen: Dict[Tuple[int, int], str] = {}
|
seen: Dict[Tuple[int, int], str] = {}
|
||||||
|
|
||||||
linked_parent_check = lambda x: any(
|
|
||||||
[llnl.util.symlink.islink(str(y)) for y in pathlib.Path(x).parents]
|
|
||||||
)
|
|
||||||
|
|
||||||
for path in paths:
|
for path in paths:
|
||||||
identifier = file_identifier(path)
|
identifier = file_identifier(path)
|
||||||
if identifier not in seen:
|
if identifier not in seen:
|
||||||
seen[identifier] = path
|
seen[identifier] = path
|
||||||
# we also want to deprioritize paths if they contain a symlink in any parent
|
elif not os.path.islink(path):
|
||||||
# (not just the basedir): e.g. oneapi has "latest/bin",
|
|
||||||
# where "latest" is a symlink to 2025.0"
|
|
||||||
elif not (llnl.util.symlink.islink(path) or linked_parent_check(path)):
|
|
||||||
seen[identifier] = path
|
seen[identifier] = path
|
||||||
return list(seen.values())
|
return list(seen.values())
|
||||||
|
|
||||||
@@ -253,7 +243,7 @@ def prefix_from_path(self, *, path: str) -> str:
|
|||||||
raise NotImplementedError("must be implemented by derived classes")
|
raise NotImplementedError("must be implemented by derived classes")
|
||||||
|
|
||||||
def detect_specs(
|
def detect_specs(
|
||||||
self, *, pkg: Type["spack.package_base.PackageBase"], paths: Iterable[str]
|
self, *, pkg: Type["spack.package_base.PackageBase"], paths: List[str]
|
||||||
) -> List["spack.spec.Spec"]:
|
) -> List["spack.spec.Spec"]:
|
||||||
"""Given a list of files matching the search patterns, returns a list of detected specs.
|
"""Given a list of files matching the search patterns, returns a list of detected specs.
|
||||||
|
|
||||||
@@ -269,8 +259,6 @@ def detect_specs(
|
|||||||
)
|
)
|
||||||
return []
|
return []
|
||||||
|
|
||||||
from spack.repo import PATH as repo_path
|
|
||||||
|
|
||||||
result = []
|
result = []
|
||||||
for candidate_path, items_in_prefix in _group_by_prefix(
|
for candidate_path, items_in_prefix in _group_by_prefix(
|
||||||
llnl.util.lang.dedupe(paths)
|
llnl.util.lang.dedupe(paths)
|
||||||
@@ -317,10 +305,7 @@ def detect_specs(
|
|||||||
|
|
||||||
resolved_specs[spec] = candidate_path
|
resolved_specs[spec] = candidate_path
|
||||||
try:
|
try:
|
||||||
# Validate the spec calling a package specific method
|
spec.validate_detection()
|
||||||
pkg_cls = repo_path.get_pkg_class(spec.name)
|
|
||||||
validate_fn = getattr(pkg_cls, "validate_detected_spec", lambda x, y: None)
|
|
||||||
validate_fn(spec, spec.extra_attributes)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
msg = (
|
msg = (
|
||||||
f'"{spec}" has been detected on the system but will '
|
f'"{spec}" has been detected on the system but will '
|
||||||
|
|||||||
@@ -462,7 +462,8 @@ def _execute_extends(pkg):
|
|||||||
if dep_spec.name == "python" and not pkg.name == "python-venv":
|
if dep_spec.name == "python" and not pkg.name == "python-venv":
|
||||||
_depends_on(pkg, spack.spec.Spec("python-venv"), when=when, type=("build", "run"))
|
_depends_on(pkg, spack.spec.Spec("python-venv"), when=when, type=("build", "run"))
|
||||||
|
|
||||||
pkg.extendees[dep_spec.name] = (dep_spec, when_spec)
|
# TODO: the values of the extendees dictionary are not used. Remove in next refactor.
|
||||||
|
pkg.extendees[dep_spec.name] = (dep_spec, None)
|
||||||
|
|
||||||
return _execute_extends
|
return _execute_extends
|
||||||
|
|
||||||
@@ -567,7 +568,7 @@ def patch(
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def _execute_patch(
|
def _execute_patch(
|
||||||
pkg_or_dep: Union[Type[spack.package_base.PackageBase], Dependency],
|
pkg_or_dep: Union[Type[spack.package_base.PackageBase], Dependency]
|
||||||
) -> None:
|
) -> None:
|
||||||
pkg = pkg_or_dep.pkg if isinstance(pkg_or_dep, Dependency) else pkg_or_dep
|
pkg = pkg_or_dep.pkg if isinstance(pkg_or_dep, Dependency) else pkg_or_dep
|
||||||
|
|
||||||
|
|||||||
@@ -25,7 +25,7 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def _check_concrete(spec: "spack.spec.Spec") -> None:
|
def _check_concrete(spec):
|
||||||
"""If the spec is not concrete, raise a ValueError"""
|
"""If the spec is not concrete, raise a ValueError"""
|
||||||
if not spec.concrete:
|
if not spec.concrete:
|
||||||
raise ValueError("Specs passed to a DirectoryLayout must be concrete!")
|
raise ValueError("Specs passed to a DirectoryLayout must be concrete!")
|
||||||
@@ -51,7 +51,7 @@ def specs_from_metadata_dirs(root: str) -> List["spack.spec.Spec"]:
|
|||||||
spec = _get_spec(prefix)
|
spec = _get_spec(prefix)
|
||||||
|
|
||||||
if spec:
|
if spec:
|
||||||
spec.set_prefix(prefix)
|
spec.prefix = prefix
|
||||||
specs.append(spec)
|
specs.append(spec)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -84,7 +84,7 @@ class DirectoryLayout:
|
|||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
root: str,
|
root,
|
||||||
*,
|
*,
|
||||||
projections: Optional[Dict[str, str]] = None,
|
projections: Optional[Dict[str, str]] = None,
|
||||||
hash_length: Optional[int] = None,
|
hash_length: Optional[int] = None,
|
||||||
@@ -120,17 +120,17 @@ def __init__(
|
|||||||
self.manifest_file_name = "install_manifest.json"
|
self.manifest_file_name = "install_manifest.json"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def hidden_file_regexes(self) -> Tuple[str]:
|
def hidden_file_regexes(self):
|
||||||
return ("^{0}$".format(re.escape(self.metadata_dir)),)
|
return ("^{0}$".format(re.escape(self.metadata_dir)),)
|
||||||
|
|
||||||
def relative_path_for_spec(self, spec: "spack.spec.Spec") -> str:
|
def relative_path_for_spec(self, spec):
|
||||||
_check_concrete(spec)
|
_check_concrete(spec)
|
||||||
|
|
||||||
projection = spack.projections.get_projection(self.projections, spec)
|
projection = spack.projections.get_projection(self.projections, spec)
|
||||||
path = spec.format_path(projection)
|
path = spec.format_path(projection)
|
||||||
return str(Path(path))
|
return str(Path(path))
|
||||||
|
|
||||||
def write_spec(self, spec: "spack.spec.Spec", path: str) -> None:
|
def write_spec(self, spec, path):
|
||||||
"""Write a spec out to a file."""
|
"""Write a spec out to a file."""
|
||||||
_check_concrete(spec)
|
_check_concrete(spec)
|
||||||
with open(path, "w", encoding="utf-8") as f:
|
with open(path, "w", encoding="utf-8") as f:
|
||||||
@@ -138,7 +138,7 @@ def write_spec(self, spec: "spack.spec.Spec", path: str) -> None:
|
|||||||
# the full provenance, so it's availabe if we want it later
|
# the full provenance, so it's availabe if we want it later
|
||||||
spec.to_json(f, hash=ht.dag_hash)
|
spec.to_json(f, hash=ht.dag_hash)
|
||||||
|
|
||||||
def write_host_environment(self, spec: "spack.spec.Spec") -> None:
|
def write_host_environment(self, spec):
|
||||||
"""The host environment is a json file with os, kernel, and spack
|
"""The host environment is a json file with os, kernel, and spack
|
||||||
versioning. We use it in the case that an analysis later needs to
|
versioning. We use it in the case that an analysis later needs to
|
||||||
easily access this information.
|
easily access this information.
|
||||||
@@ -148,7 +148,7 @@ def write_host_environment(self, spec: "spack.spec.Spec") -> None:
|
|||||||
with open(env_file, "w", encoding="utf-8") as fd:
|
with open(env_file, "w", encoding="utf-8") as fd:
|
||||||
sjson.dump(environ, fd)
|
sjson.dump(environ, fd)
|
||||||
|
|
||||||
def read_spec(self, path: str) -> "spack.spec.Spec":
|
def read_spec(self, path):
|
||||||
"""Read the contents of a file and parse them as a spec"""
|
"""Read the contents of a file and parse them as a spec"""
|
||||||
try:
|
try:
|
||||||
with open(path, encoding="utf-8") as f:
|
with open(path, encoding="utf-8") as f:
|
||||||
@@ -159,28 +159,26 @@ def read_spec(self, path: str) -> "spack.spec.Spec":
|
|||||||
# Too late for conversion; spec_file_path() already called.
|
# Too late for conversion; spec_file_path() already called.
|
||||||
spec = spack.spec.Spec.from_yaml(f)
|
spec = spack.spec.Spec.from_yaml(f)
|
||||||
else:
|
else:
|
||||||
raise SpecReadError(f"Did not recognize spec file extension: {extension}")
|
raise SpecReadError(
|
||||||
|
"Did not recognize spec file extension:" " {0}".format(extension)
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if spack.config.get("config:debug"):
|
if spack.config.get("config:debug"):
|
||||||
raise
|
raise
|
||||||
raise SpecReadError(f"Unable to read file: {path}", f"Cause: {e}")
|
raise SpecReadError("Unable to read file: %s" % path, "Cause: " + str(e))
|
||||||
|
|
||||||
# Specs read from actual installations are always concrete
|
# Specs read from actual installations are always concrete
|
||||||
spec._mark_concrete()
|
spec._mark_concrete()
|
||||||
return spec
|
return spec
|
||||||
|
|
||||||
def spec_file_path(self, spec: "spack.spec.Spec") -> str:
|
def spec_file_path(self, spec):
|
||||||
"""Gets full path to spec file"""
|
"""Gets full path to spec file"""
|
||||||
_check_concrete(spec)
|
_check_concrete(spec)
|
||||||
yaml_path = os.path.join(self.metadata_path(spec), self._spec_file_name_yaml)
|
yaml_path = os.path.join(self.metadata_path(spec), self._spec_file_name_yaml)
|
||||||
json_path = os.path.join(self.metadata_path(spec), self.spec_file_name)
|
json_path = os.path.join(self.metadata_path(spec), self.spec_file_name)
|
||||||
return yaml_path if os.path.exists(yaml_path) else json_path
|
return yaml_path if os.path.exists(yaml_path) else json_path
|
||||||
|
|
||||||
def deprecated_file_path(
|
def deprecated_file_path(self, deprecated_spec, deprecator_spec=None):
|
||||||
self,
|
|
||||||
deprecated_spec: "spack.spec.Spec",
|
|
||||||
deprecator_spec: Optional["spack.spec.Spec"] = None,
|
|
||||||
) -> str:
|
|
||||||
"""Gets full path to spec file for deprecated spec
|
"""Gets full path to spec file for deprecated spec
|
||||||
|
|
||||||
If the deprecator_spec is provided, use that. Otherwise, assume
|
If the deprecator_spec is provided, use that. Otherwise, assume
|
||||||
@@ -214,16 +212,16 @@ def deprecated_file_path(
|
|||||||
|
|
||||||
return yaml_path if os.path.exists(yaml_path) else json_path
|
return yaml_path if os.path.exists(yaml_path) else json_path
|
||||||
|
|
||||||
def metadata_path(self, spec: "spack.spec.Spec") -> str:
|
def metadata_path(self, spec):
|
||||||
return os.path.join(spec.prefix, self.metadata_dir)
|
return os.path.join(spec.prefix, self.metadata_dir)
|
||||||
|
|
||||||
def env_metadata_path(self, spec: "spack.spec.Spec") -> str:
|
def env_metadata_path(self, spec):
|
||||||
return os.path.join(self.metadata_path(spec), "install_environment.json")
|
return os.path.join(self.metadata_path(spec), "install_environment.json")
|
||||||
|
|
||||||
def build_packages_path(self, spec: "spack.spec.Spec") -> str:
|
def build_packages_path(self, spec):
|
||||||
return os.path.join(self.metadata_path(spec), self.packages_dir)
|
return os.path.join(self.metadata_path(spec), self.packages_dir)
|
||||||
|
|
||||||
def create_install_directory(self, spec: "spack.spec.Spec") -> None:
|
def create_install_directory(self, spec):
|
||||||
_check_concrete(spec)
|
_check_concrete(spec)
|
||||||
|
|
||||||
# Create install directory with properly configured permissions
|
# Create install directory with properly configured permissions
|
||||||
@@ -241,7 +239,7 @@ def create_install_directory(self, spec: "spack.spec.Spec") -> None:
|
|||||||
|
|
||||||
self.write_spec(spec, self.spec_file_path(spec))
|
self.write_spec(spec, self.spec_file_path(spec))
|
||||||
|
|
||||||
def ensure_installed(self, spec: "spack.spec.Spec") -> None:
|
def ensure_installed(self, spec):
|
||||||
"""
|
"""
|
||||||
Throws InconsistentInstallDirectoryError if:
|
Throws InconsistentInstallDirectoryError if:
|
||||||
1. spec prefix does not exist
|
1. spec prefix does not exist
|
||||||
@@ -268,7 +266,7 @@ def ensure_installed(self, spec: "spack.spec.Spec") -> None:
|
|||||||
"Spec file in %s does not match hash!" % spec_file_path
|
"Spec file in %s does not match hash!" % spec_file_path
|
||||||
)
|
)
|
||||||
|
|
||||||
def path_for_spec(self, spec: "spack.spec.Spec") -> str:
|
def path_for_spec(self, spec):
|
||||||
"""Return absolute path from the root to a directory for the spec."""
|
"""Return absolute path from the root to a directory for the spec."""
|
||||||
_check_concrete(spec)
|
_check_concrete(spec)
|
||||||
|
|
||||||
@@ -279,13 +277,23 @@ def path_for_spec(self, spec: "spack.spec.Spec") -> str:
|
|||||||
assert not path.startswith(self.root)
|
assert not path.startswith(self.root)
|
||||||
return os.path.join(self.root, path)
|
return os.path.join(self.root, path)
|
||||||
|
|
||||||
def remove_install_directory(self, spec: "spack.spec.Spec", deprecated: bool = False) -> None:
|
def remove_install_directory(self, spec, deprecated=False):
|
||||||
"""Removes a prefix and any empty parent directories from the root.
|
"""Removes a prefix and any empty parent directories from the root.
|
||||||
Raised RemoveFailedError if something goes wrong.
|
Raised RemoveFailedError if something goes wrong.
|
||||||
"""
|
"""
|
||||||
path = self.path_for_spec(spec)
|
path = self.path_for_spec(spec)
|
||||||
assert path.startswith(self.root)
|
assert path.startswith(self.root)
|
||||||
|
|
||||||
|
# Windows readonly files cannot be removed by Python
|
||||||
|
# directly, change permissions before attempting to remove
|
||||||
|
if sys.platform == "win32":
|
||||||
|
kwargs = {
|
||||||
|
"ignore_errors": False,
|
||||||
|
"onerror": fs.readonly_file_handler(ignore_errors=False),
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
kwargs = {} # the default value for ignore_errors is false
|
||||||
|
|
||||||
if deprecated:
|
if deprecated:
|
||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
try:
|
try:
|
||||||
@@ -296,16 +304,7 @@ def remove_install_directory(self, spec: "spack.spec.Spec", deprecated: bool = F
|
|||||||
raise RemoveFailedError(spec, path, e) from e
|
raise RemoveFailedError(spec, path, e) from e
|
||||||
elif os.path.exists(path):
|
elif os.path.exists(path):
|
||||||
try:
|
try:
|
||||||
if sys.platform == "win32":
|
shutil.rmtree(path, **kwargs)
|
||||||
# Windows readonly files cannot be removed by Python
|
|
||||||
# directly, change permissions before attempting to remove
|
|
||||||
shutil.rmtree(
|
|
||||||
path,
|
|
||||||
ignore_errors=False,
|
|
||||||
onerror=fs.readonly_file_handler(ignore_errors=False),
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
shutil.rmtree(path)
|
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
raise RemoveFailedError(spec, path, e) from e
|
raise RemoveFailedError(spec, path, e) from e
|
||||||
|
|
||||||
|
|||||||
@@ -12,13 +12,3 @@ class InstallRecordStatus(enum.Flag):
|
|||||||
DEPRECATED = enum.auto()
|
DEPRECATED = enum.auto()
|
||||||
MISSING = enum.auto()
|
MISSING = enum.auto()
|
||||||
ANY = INSTALLED | DEPRECATED | MISSING
|
ANY = INSTALLED | DEPRECATED | MISSING
|
||||||
|
|
||||||
|
|
||||||
class ConfigScopePriority(enum.IntEnum):
|
|
||||||
"""Priorities of the different kind of config scopes used by Spack"""
|
|
||||||
|
|
||||||
BUILTIN = 0
|
|
||||||
CONFIG_FILES = 1
|
|
||||||
CUSTOM = 2
|
|
||||||
ENVIRONMENT = 3
|
|
||||||
COMMAND_LINE = 4
|
|
||||||
|
|||||||
@@ -166,7 +166,7 @@ def __init__(
|
|||||||
" ".join(self._install_target(s.safe_name()) for s in item.prereqs),
|
" ".join(self._install_target(s.safe_name()) for s in item.prereqs),
|
||||||
item.target.spec_hash(),
|
item.target.spec_hash(),
|
||||||
item.target.unsafe_format(
|
item.target.unsafe_format(
|
||||||
"{name}{@version}{variants}{ arch=architecture} {%compiler}"
|
"{name}{@version}{%compiler}{variants}{arch=architecture}"
|
||||||
),
|
),
|
||||||
item.buildcache_flag,
|
item.buildcache_flag,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -10,6 +10,8 @@
|
|||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import stat
|
import stat
|
||||||
|
import urllib.parse
|
||||||
|
import urllib.request
|
||||||
import warnings
|
import warnings
|
||||||
from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Union
|
from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Union
|
||||||
|
|
||||||
@@ -30,6 +32,7 @@
|
|||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.schema.env
|
import spack.schema.env
|
||||||
|
import spack.schema.merged
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.spec_list
|
import spack.spec_list
|
||||||
import spack.store
|
import spack.store
|
||||||
@@ -40,6 +43,7 @@
|
|||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.util.spack_json as sjson
|
import spack.util.spack_json as sjson
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
|
import spack.util.url
|
||||||
from spack import traverse
|
from spack import traverse
|
||||||
from spack.installer import PackageInstaller
|
from spack.installer import PackageInstaller
|
||||||
from spack.schema.env import TOP_LEVEL_KEY
|
from spack.schema.env import TOP_LEVEL_KEY
|
||||||
@@ -47,8 +51,6 @@
|
|||||||
from spack.spec_list import SpecList
|
from spack.spec_list import SpecList
|
||||||
from spack.util.path import substitute_path_variables
|
from spack.util.path import substitute_path_variables
|
||||||
|
|
||||||
from ..enums import ConfigScopePriority
|
|
||||||
|
|
||||||
SpecPair = spack.concretize.SpecPair
|
SpecPair = spack.concretize.SpecPair
|
||||||
|
|
||||||
#: environment variable used to indicate the active environment
|
#: environment variable used to indicate the active environment
|
||||||
@@ -385,7 +387,6 @@ def create_in_dir(
|
|||||||
# dev paths in this environment to refer to their original
|
# dev paths in this environment to refer to their original
|
||||||
# locations.
|
# locations.
|
||||||
_rewrite_relative_dev_paths_on_relocation(env, init_file_dir)
|
_rewrite_relative_dev_paths_on_relocation(env, init_file_dir)
|
||||||
_rewrite_relative_repos_paths_on_relocation(env, init_file_dir)
|
|
||||||
|
|
||||||
return env
|
return env
|
||||||
|
|
||||||
@@ -402,8 +403,8 @@ def _rewrite_relative_dev_paths_on_relocation(env, init_file_dir):
|
|||||||
dev_path = substitute_path_variables(entry["path"])
|
dev_path = substitute_path_variables(entry["path"])
|
||||||
expanded_path = spack.util.path.canonicalize_path(dev_path, default_wd=init_file_dir)
|
expanded_path = spack.util.path.canonicalize_path(dev_path, default_wd=init_file_dir)
|
||||||
|
|
||||||
# Skip if the substituted and expanded path is the same (e.g. when absolute)
|
# Skip if the expanded path is the same (e.g. when absolute)
|
||||||
if entry["path"] == expanded_path:
|
if dev_path == expanded_path:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
tty.debug("Expanding develop path for {0} to {1}".format(name, expanded_path))
|
tty.debug("Expanding develop path for {0} to {1}".format(name, expanded_path))
|
||||||
@@ -418,34 +419,6 @@ def _rewrite_relative_dev_paths_on_relocation(env, init_file_dir):
|
|||||||
env._re_read()
|
env._re_read()
|
||||||
|
|
||||||
|
|
||||||
def _rewrite_relative_repos_paths_on_relocation(env, init_file_dir):
|
|
||||||
"""When initializing the environment from a manifest file and we plan
|
|
||||||
to store the environment in a different directory, we have to rewrite
|
|
||||||
relative repo paths to absolute ones and expand environment variables."""
|
|
||||||
with env:
|
|
||||||
repos_specs = spack.config.get("repos", default={}, scope=env.scope_name)
|
|
||||||
if not repos_specs:
|
|
||||||
return
|
|
||||||
for i, entry in enumerate(repos_specs):
|
|
||||||
repo_path = substitute_path_variables(entry)
|
|
||||||
expanded_path = spack.util.path.canonicalize_path(repo_path, default_wd=init_file_dir)
|
|
||||||
|
|
||||||
# Skip if the substituted and expanded path is the same (e.g. when absolute)
|
|
||||||
if entry == expanded_path:
|
|
||||||
continue
|
|
||||||
|
|
||||||
tty.debug("Expanding repo path for {0} to {1}".format(entry, expanded_path))
|
|
||||||
|
|
||||||
repos_specs[i] = expanded_path
|
|
||||||
|
|
||||||
spack.config.set("repos", repos_specs, scope=env.scope_name)
|
|
||||||
|
|
||||||
env.repos_specs = None
|
|
||||||
# If we changed the environment's spack.yaml scope, that will not be reflected
|
|
||||||
# in the manifest that we read
|
|
||||||
env._re_read()
|
|
||||||
|
|
||||||
|
|
||||||
def environment_dir_from_name(name: str, exists_ok: bool = True) -> str:
|
def environment_dir_from_name(name: str, exists_ok: bool = True) -> str:
|
||||||
"""Returns the directory associated with a named environment.
|
"""Returns the directory associated with a named environment.
|
||||||
|
|
||||||
@@ -573,6 +546,13 @@ def _write_yaml(data, str_or_file):
|
|||||||
syaml.dump_config(data, str_or_file, default_flow_style=False)
|
syaml.dump_config(data, str_or_file, default_flow_style=False)
|
||||||
|
|
||||||
|
|
||||||
|
def _eval_conditional(string):
|
||||||
|
"""Evaluate conditional definitions using restricted variable scope."""
|
||||||
|
valid_variables = spack.spec.get_host_environment()
|
||||||
|
valid_variables.update({"re": re, "env": os.environ})
|
||||||
|
return eval(string, valid_variables)
|
||||||
|
|
||||||
|
|
||||||
def _is_dev_spec_and_has_changed(spec):
|
def _is_dev_spec_and_has_changed(spec):
|
||||||
"""Check if the passed spec is a dev build and whether it has changed since the
|
"""Check if the passed spec is a dev build and whether it has changed since the
|
||||||
last installation"""
|
last installation"""
|
||||||
@@ -601,7 +581,7 @@ def _error_on_nonempty_view_dir(new_root):
|
|||||||
# Check if the target path lexists
|
# Check if the target path lexists
|
||||||
try:
|
try:
|
||||||
st = os.lstat(new_root)
|
st = os.lstat(new_root)
|
||||||
except OSError:
|
except (IOError, OSError):
|
||||||
return
|
return
|
||||||
|
|
||||||
# Empty directories are fine
|
# Empty directories are fine
|
||||||
@@ -881,7 +861,7 @@ def regenerate(self, concrete_roots: List[Spec]) -> None:
|
|||||||
):
|
):
|
||||||
try:
|
try:
|
||||||
shutil.rmtree(old_root)
|
shutil.rmtree(old_root)
|
||||||
except OSError as e:
|
except (IOError, OSError) as e:
|
||||||
msg = "Failed to remove old view at %s\n" % old_root
|
msg = "Failed to remove old view at %s\n" % old_root
|
||||||
msg += str(e)
|
msg += str(e)
|
||||||
tty.warn(msg)
|
tty.warn(msg)
|
||||||
@@ -1005,7 +985,7 @@ def _process_definition(self, entry):
|
|||||||
"""Process a single spec definition item."""
|
"""Process a single spec definition item."""
|
||||||
when_string = entry.get("when")
|
when_string = entry.get("when")
|
||||||
if when_string is not None:
|
if when_string is not None:
|
||||||
when = spack.spec.eval_conditional(when_string)
|
when = _eval_conditional(when_string)
|
||||||
assert len([x for x in entry if x != "when"]) == 1
|
assert len([x for x in entry if x != "when"]) == 1
|
||||||
else:
|
else:
|
||||||
when = True
|
when = True
|
||||||
@@ -1128,6 +1108,11 @@ def user_specs(self):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def dev_specs(self):
|
def dev_specs(self):
|
||||||
|
if not self._dev_specs:
|
||||||
|
self._dev_specs = self._read_dev_specs()
|
||||||
|
return self._dev_specs
|
||||||
|
|
||||||
|
def _read_dev_specs(self):
|
||||||
dev_specs = {}
|
dev_specs = {}
|
||||||
dev_config = spack.config.get("develop", {})
|
dev_config = spack.config.get("develop", {})
|
||||||
for name, entry in dev_config.items():
|
for name, entry in dev_config.items():
|
||||||
@@ -1545,6 +1530,9 @@ def _get_specs_to_concretize(
|
|||||||
return new_user_specs, kept_user_specs, specs_to_concretize
|
return new_user_specs, kept_user_specs, specs_to_concretize
|
||||||
|
|
||||||
def _concretize_together_where_possible(self, tests: bool = False) -> Sequence[SpecPair]:
|
def _concretize_together_where_possible(self, tests: bool = False) -> Sequence[SpecPair]:
|
||||||
|
# Avoid cyclic dependency
|
||||||
|
import spack.solver.asp
|
||||||
|
|
||||||
# Exit early if the set of concretized specs is the set of user specs
|
# Exit early if the set of concretized specs is the set of user specs
|
||||||
new_user_specs, _, specs_to_concretize = self._get_specs_to_concretize()
|
new_user_specs, _, specs_to_concretize = self._get_specs_to_concretize()
|
||||||
if not new_user_specs:
|
if not new_user_specs:
|
||||||
@@ -2404,8 +2392,6 @@ def invalidate_repository_cache(self):
|
|||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
self._previous_active = _active_environment
|
self._previous_active = _active_environment
|
||||||
if self._previous_active:
|
|
||||||
deactivate()
|
|
||||||
activate(self)
|
activate(self)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
@@ -2568,7 +2554,7 @@ def is_latest_format(manifest):
|
|||||||
try:
|
try:
|
||||||
with open(manifest, encoding="utf-8") as f:
|
with open(manifest, encoding="utf-8") as f:
|
||||||
data = syaml.load(f)
|
data = syaml.load(f)
|
||||||
except OSError:
|
except (OSError, IOError):
|
||||||
return True
|
return True
|
||||||
top_level_key = _top_level_key(data)
|
top_level_key = _top_level_key(data)
|
||||||
changed = spack.schema.env.update(data[top_level_key])
|
changed = spack.schema.env.update(data[top_level_key])
|
||||||
@@ -2648,35 +2634,6 @@ def _ensure_env_dir():
|
|||||||
|
|
||||||
shutil.copy(envfile, target_manifest)
|
shutil.copy(envfile, target_manifest)
|
||||||
|
|
||||||
# Copy relative path includes that live inside the environment dir
|
|
||||||
try:
|
|
||||||
manifest = EnvironmentManifestFile(environment_dir)
|
|
||||||
except Exception:
|
|
||||||
# error handling for bad manifests is handled on other code paths
|
|
||||||
return
|
|
||||||
|
|
||||||
# TODO: make this recursive
|
|
||||||
includes = manifest[TOP_LEVEL_KEY].get("include", [])
|
|
||||||
for include in includes:
|
|
||||||
included_path = spack.config.included_path(include)
|
|
||||||
path = included_path.path
|
|
||||||
if os.path.isabs(path):
|
|
||||||
continue
|
|
||||||
|
|
||||||
abspath = pathlib.Path(os.path.normpath(environment_dir / path))
|
|
||||||
common_path = pathlib.Path(os.path.commonpath([environment_dir, abspath]))
|
|
||||||
if common_path != environment_dir:
|
|
||||||
tty.debug(f"Will not copy relative include file from outside environment: {path}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
orig_abspath = os.path.normpath(envfile.parent / path)
|
|
||||||
if not os.path.exists(orig_abspath):
|
|
||||||
tty.warn(f"Included file does not exist; will not copy: '{path}'")
|
|
||||||
continue
|
|
||||||
|
|
||||||
fs.touchp(abspath)
|
|
||||||
shutil.copy(orig_abspath, abspath)
|
|
||||||
|
|
||||||
|
|
||||||
class EnvironmentManifestFile(collections.abc.Mapping):
|
class EnvironmentManifestFile(collections.abc.Mapping):
|
||||||
"""Manages the in-memory representation of a manifest file, and its synchronization
|
"""Manages the in-memory representation of a manifest file, and its synchronization
|
||||||
@@ -2894,7 +2851,7 @@ def extract_name(_item):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
condition_str = item.get("when", "True")
|
condition_str = item.get("when", "True")
|
||||||
if not spack.spec.eval_conditional(condition_str):
|
if not _eval_conditional(condition_str):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
yield idx, item
|
yield idx, item
|
||||||
@@ -2955,20 +2912,127 @@ def __iter__(self):
|
|||||||
def __str__(self):
|
def __str__(self):
|
||||||
return str(self.manifest_file)
|
return str(self.manifest_file)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def included_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||||
|
"""List of included configuration scopes from the manifest.
|
||||||
|
|
||||||
|
Scopes are listed in the YAML file in order from highest to
|
||||||
|
lowest precedence, so configuration from earlier scope will take
|
||||||
|
precedence over later ones.
|
||||||
|
|
||||||
|
This routine returns them in the order they should be pushed onto
|
||||||
|
the internal scope stack (so, in reverse, from lowest to highest).
|
||||||
|
|
||||||
|
Returns: Configuration scopes associated with the environment manifest
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
SpackEnvironmentError: if the manifest includes a remote file but
|
||||||
|
no configuration stage directory has been identified
|
||||||
|
"""
|
||||||
|
scopes: List[spack.config.ConfigScope] = []
|
||||||
|
|
||||||
|
# load config scopes added via 'include:', in reverse so that
|
||||||
|
# highest-precedence scopes are last.
|
||||||
|
includes = self[TOP_LEVEL_KEY].get("include", [])
|
||||||
|
missing = []
|
||||||
|
for i, config_path in enumerate(reversed(includes)):
|
||||||
|
# allow paths to contain spack config/environment variables, etc.
|
||||||
|
config_path = substitute_path_variables(config_path)
|
||||||
|
include_url = urllib.parse.urlparse(config_path)
|
||||||
|
|
||||||
|
# If scheme is not valid, config_path is not a url
|
||||||
|
# of a type Spack is generally aware
|
||||||
|
if spack.util.url.validate_scheme(include_url.scheme):
|
||||||
|
# Transform file:// URLs to direct includes.
|
||||||
|
if include_url.scheme == "file":
|
||||||
|
config_path = urllib.request.url2pathname(include_url.path)
|
||||||
|
|
||||||
|
# Any other URL should be fetched.
|
||||||
|
elif include_url.scheme in ("http", "https", "ftp"):
|
||||||
|
# Stage any remote configuration file(s)
|
||||||
|
staged_configs = (
|
||||||
|
os.listdir(self.config_stage_dir)
|
||||||
|
if os.path.exists(self.config_stage_dir)
|
||||||
|
else []
|
||||||
|
)
|
||||||
|
remote_path = urllib.request.url2pathname(include_url.path)
|
||||||
|
basename = os.path.basename(remote_path)
|
||||||
|
if basename in staged_configs:
|
||||||
|
# Do NOT re-stage configuration files over existing
|
||||||
|
# ones with the same name since there is a risk of
|
||||||
|
# losing changes (e.g., from 'spack config update').
|
||||||
|
tty.warn(
|
||||||
|
"Will not re-stage configuration from {0} to avoid "
|
||||||
|
"losing changes to the already staged file of the "
|
||||||
|
"same name.".format(remote_path)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Recognize the configuration stage directory
|
||||||
|
# is flattened to ensure a single copy of each
|
||||||
|
# configuration file.
|
||||||
|
config_path = self.config_stage_dir
|
||||||
|
if basename.endswith(".yaml"):
|
||||||
|
config_path = os.path.join(config_path, basename)
|
||||||
|
else:
|
||||||
|
staged_path = spack.config.fetch_remote_configs(
|
||||||
|
config_path, str(self.config_stage_dir), skip_existing=True
|
||||||
|
)
|
||||||
|
if not staged_path:
|
||||||
|
raise SpackEnvironmentError(
|
||||||
|
"Unable to fetch remote configuration {0}".format(config_path)
|
||||||
|
)
|
||||||
|
config_path = staged_path
|
||||||
|
|
||||||
|
elif include_url.scheme:
|
||||||
|
raise ValueError(
|
||||||
|
f"Unsupported URL scheme ({include_url.scheme}) for "
|
||||||
|
f"environment include: {config_path}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# treat relative paths as relative to the environment
|
||||||
|
if not os.path.isabs(config_path):
|
||||||
|
config_path = os.path.join(self.manifest_dir, config_path)
|
||||||
|
config_path = os.path.normpath(os.path.realpath(config_path))
|
||||||
|
|
||||||
|
if os.path.isdir(config_path):
|
||||||
|
# directories are treated as regular ConfigScopes
|
||||||
|
config_name = f"env:{self.name}:{os.path.basename(config_path)}"
|
||||||
|
tty.debug(f"Creating DirectoryConfigScope {config_name} for '{config_path}'")
|
||||||
|
scopes.append(spack.config.DirectoryConfigScope(config_name, config_path))
|
||||||
|
elif os.path.exists(config_path):
|
||||||
|
# files are assumed to be SingleFileScopes
|
||||||
|
config_name = f"env:{self.name}:{config_path}"
|
||||||
|
tty.debug(f"Creating SingleFileScope {config_name} for '{config_path}'")
|
||||||
|
scopes.append(
|
||||||
|
spack.config.SingleFileScope(
|
||||||
|
config_name, config_path, spack.schema.merged.schema
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
missing.append(config_path)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if missing:
|
||||||
|
msg = "Detected {0} missing include path(s):".format(len(missing))
|
||||||
|
msg += "\n {0}".format("\n ".join(missing))
|
||||||
|
raise spack.config.ConfigFileError(msg)
|
||||||
|
|
||||||
|
return scopes
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def env_config_scopes(self) -> List[spack.config.ConfigScope]:
|
def env_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||||
"""A list of all configuration scopes for the environment manifest. On the first call this
|
"""A list of all configuration scopes for the environment manifest. On the first call this
|
||||||
instantiates all the scopes, on subsequent calls it returns the cached list."""
|
instantiates all the scopes, on subsequent calls it returns the cached list."""
|
||||||
if self._config_scopes is not None:
|
if self._config_scopes is not None:
|
||||||
return self._config_scopes
|
return self._config_scopes
|
||||||
|
|
||||||
scopes: List[spack.config.ConfigScope] = [
|
scopes: List[spack.config.ConfigScope] = [
|
||||||
|
*self.included_config_scopes,
|
||||||
spack.config.SingleFileScope(
|
spack.config.SingleFileScope(
|
||||||
self.scope_name,
|
self.scope_name,
|
||||||
str(self.manifest_file),
|
str(self.manifest_file),
|
||||||
spack.schema.env.schema,
|
spack.schema.env.schema,
|
||||||
yaml_path=[TOP_LEVEL_KEY],
|
yaml_path=[TOP_LEVEL_KEY],
|
||||||
)
|
),
|
||||||
]
|
]
|
||||||
ensure_no_disallowed_env_config_mods(scopes)
|
ensure_no_disallowed_env_config_mods(scopes)
|
||||||
self._config_scopes = scopes
|
self._config_scopes = scopes
|
||||||
@@ -2977,12 +3041,14 @@ def env_config_scopes(self) -> List[spack.config.ConfigScope]:
|
|||||||
def prepare_config_scope(self) -> None:
|
def prepare_config_scope(self) -> None:
|
||||||
"""Add the manifest's scopes to the global configuration search path."""
|
"""Add the manifest's scopes to the global configuration search path."""
|
||||||
for scope in self.env_config_scopes:
|
for scope in self.env_config_scopes:
|
||||||
spack.config.CONFIG.push_scope(scope, priority=ConfigScopePriority.ENVIRONMENT)
|
spack.config.CONFIG.push_scope(scope)
|
||||||
|
spack.config.CONFIG.ensure_scope_ordering()
|
||||||
|
|
||||||
def deactivate_config_scope(self) -> None:
|
def deactivate_config_scope(self) -> None:
|
||||||
"""Remove any of the manifest's scopes from the global config path."""
|
"""Remove any of the manifest's scopes from the global config path."""
|
||||||
for scope in self.env_config_scopes:
|
for scope in self.env_config_scopes:
|
||||||
spack.config.CONFIG.remove_scope(scope.name)
|
spack.config.CONFIG.remove_scope(scope.name)
|
||||||
|
spack.config.CONFIG.ensure_scope_ordering()
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def use_config(self):
|
def use_config(self):
|
||||||
|
|||||||
@@ -8,7 +8,6 @@
|
|||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.tty.color import colorize
|
from llnl.util.tty.color import colorize
|
||||||
|
|
||||||
import spack.config
|
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.schema.environment
|
import spack.schema.environment
|
||||||
@@ -159,8 +158,7 @@ def activate(
|
|||||||
# become PATH variables.
|
# become PATH variables.
|
||||||
#
|
#
|
||||||
|
|
||||||
with env.manifest.use_config():
|
env_vars_yaml = env.manifest.configuration.get("env_vars", None)
|
||||||
env_vars_yaml = spack.config.get("env_vars", None)
|
|
||||||
if env_vars_yaml:
|
if env_vars_yaml:
|
||||||
env_mods.extend(spack.schema.environment.parse(env_vars_yaml))
|
env_mods.extend(spack.schema.environment.parse(env_vars_yaml))
|
||||||
|
|
||||||
@@ -197,8 +195,7 @@ def deactivate() -> EnvironmentModifications:
|
|||||||
if active is None:
|
if active is None:
|
||||||
return env_mods
|
return env_mods
|
||||||
|
|
||||||
with active.manifest.use_config():
|
env_vars_yaml = active.manifest.configuration.get("env_vars", None)
|
||||||
env_vars_yaml = spack.config.get("env_vars", None)
|
|
||||||
if env_vars_yaml:
|
if env_vars_yaml:
|
||||||
env_mods.extend(spack.schema.environment.parse(env_vars_yaml).reversed())
|
env_mods.extend(spack.schema.environment.parse(env_vars_yaml).reversed())
|
||||||
|
|
||||||
|
|||||||
@@ -187,7 +187,7 @@ def path_for_extension(target_name: str, *, paths: List[str]) -> str:
|
|||||||
if name == target_name:
|
if name == target_name:
|
||||||
return path
|
return path
|
||||||
else:
|
else:
|
||||||
raise OSError('extension "{0}" not found'.format(target_name))
|
raise IOError('extension "{0}" not found'.format(target_name))
|
||||||
|
|
||||||
|
|
||||||
def get_module(cmd_name):
|
def get_module(cmd_name):
|
||||||
|
|||||||
@@ -9,8 +9,7 @@
|
|||||||
import shutil
|
import shutil
|
||||||
import stat
|
import stat
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
from typing import Callable, Dict, Optional
|
||||||
from typing import Callable, Dict, List, Optional
|
|
||||||
|
|
||||||
from typing_extensions import Literal
|
from typing_extensions import Literal
|
||||||
|
|
||||||
@@ -78,7 +77,7 @@ def view_copy(
|
|||||||
|
|
||||||
# Order of this dict is somewhat irrelevant
|
# Order of this dict is somewhat irrelevant
|
||||||
prefix_to_projection = {
|
prefix_to_projection = {
|
||||||
str(s.prefix): view.get_projection_for_spec(s)
|
s.prefix: view.get_projection_for_spec(s)
|
||||||
for s in spec.traverse(root=True, order="breadth")
|
for s in spec.traverse(root=True, order="breadth")
|
||||||
if not s.external
|
if not s.external
|
||||||
}
|
}
|
||||||
@@ -185,7 +184,7 @@ def __init__(
|
|||||||
def link(self, src: str, dst: str, spec: Optional[spack.spec.Spec] = None) -> None:
|
def link(self, src: str, dst: str, spec: Optional[spack.spec.Spec] = None) -> None:
|
||||||
self._link(src, dst, self, spec)
|
self._link(src, dst, self, spec)
|
||||||
|
|
||||||
def add_specs(self, *specs: spack.spec.Spec, **kwargs) -> None:
|
def add_specs(self, *specs, **kwargs):
|
||||||
"""
|
"""
|
||||||
Add given specs to view.
|
Add given specs to view.
|
||||||
|
|
||||||
@@ -200,19 +199,19 @@ def add_specs(self, *specs: spack.spec.Spec, **kwargs) -> None:
|
|||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def add_standalone(self, spec: spack.spec.Spec) -> bool:
|
def add_standalone(self, spec):
|
||||||
"""
|
"""
|
||||||
Add (link) a standalone package into this view.
|
Add (link) a standalone package into this view.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def check_added(self, spec: spack.spec.Spec) -> bool:
|
def check_added(self, spec):
|
||||||
"""
|
"""
|
||||||
Check if the given concrete spec is active in this view.
|
Check if the given concrete spec is active in this view.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def remove_specs(self, *specs: spack.spec.Spec, **kwargs) -> None:
|
def remove_specs(self, *specs, **kwargs):
|
||||||
"""
|
"""
|
||||||
Removes given specs from view.
|
Removes given specs from view.
|
||||||
|
|
||||||
@@ -231,25 +230,25 @@ def remove_specs(self, *specs: spack.spec.Spec, **kwargs) -> None:
|
|||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def remove_standalone(self, spec: spack.spec.Spec) -> None:
|
def remove_standalone(self, spec):
|
||||||
"""
|
"""
|
||||||
Remove (unlink) a standalone package from this view.
|
Remove (unlink) a standalone package from this view.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def get_projection_for_spec(self, spec: spack.spec.Spec) -> str:
|
def get_projection_for_spec(self, spec):
|
||||||
"""
|
"""
|
||||||
Get the projection in this view for a spec.
|
Get the projection in this view for a spec.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def get_all_specs(self) -> List[spack.spec.Spec]:
|
def get_all_specs(self):
|
||||||
"""
|
"""
|
||||||
Get all specs currently active in this view.
|
Get all specs currently active in this view.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def get_spec(self, spec: spack.spec.Spec) -> Optional[spack.spec.Spec]:
|
def get_spec(self, spec):
|
||||||
"""
|
"""
|
||||||
Return the actual spec linked in this view (i.e. do not look it up
|
Return the actual spec linked in this view (i.e. do not look it up
|
||||||
in the database by name).
|
in the database by name).
|
||||||
@@ -263,7 +262,7 @@ def get_spec(self, spec: spack.spec.Spec) -> Optional[spack.spec.Spec]:
|
|||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def print_status(self, *specs: spack.spec.Spec, **kwargs) -> None:
|
def print_status(self, *specs, **kwargs):
|
||||||
"""
|
"""
|
||||||
Print a short summary about the given specs, detailing whether..
|
Print a short summary about the given specs, detailing whether..
|
||||||
* ..they are active in the view.
|
* ..they are active in the view.
|
||||||
@@ -428,7 +427,7 @@ def needs_file(spec, file):
|
|||||||
try:
|
try:
|
||||||
with open(manifest_file, "r", encoding="utf-8") as f:
|
with open(manifest_file, "r", encoding="utf-8") as f:
|
||||||
manifest = s_json.load(f)
|
manifest = s_json.load(f)
|
||||||
except OSError:
|
except (OSError, IOError):
|
||||||
# if we can't load it, assume it doesn't know about the file.
|
# if we can't load it, assume it doesn't know about the file.
|
||||||
manifest = {}
|
manifest = {}
|
||||||
return test_path in manifest
|
return test_path in manifest
|
||||||
@@ -643,7 +642,7 @@ def print_status(self, *specs, **kwargs):
|
|||||||
specs.sort()
|
specs.sort()
|
||||||
|
|
||||||
abbreviated = [
|
abbreviated = [
|
||||||
s.cformat("{name}{@version}{compiler_flags}{variants}{%compiler}")
|
s.cformat("{name}{@version}{%compiler}{compiler_flags}{variants}")
|
||||||
for s in specs
|
for s in specs
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -694,7 +693,7 @@ def _sanity_check_view_projection(self, specs):
|
|||||||
raise ConflictingSpecsError(current_spec, conflicting_spec)
|
raise ConflictingSpecsError(current_spec, conflicting_spec)
|
||||||
seen[metadata_dir] = current_spec
|
seen[metadata_dir] = current_spec
|
||||||
|
|
||||||
def add_specs(self, *specs, **kwargs) -> None:
|
def add_specs(self, *specs: spack.spec.Spec) -> None:
|
||||||
"""Link a root-to-leaf topologically ordered list of specs into the view."""
|
"""Link a root-to-leaf topologically ordered list of specs into the view."""
|
||||||
assert all((s.concrete for s in specs))
|
assert all((s.concrete for s in specs))
|
||||||
if len(specs) == 0:
|
if len(specs) == 0:
|
||||||
@@ -709,10 +708,7 @@ def add_specs(self, *specs, **kwargs) -> None:
|
|||||||
def skip_list(file):
|
def skip_list(file):
|
||||||
return os.path.basename(file) == spack.store.STORE.layout.metadata_dir
|
return os.path.basename(file) == spack.store.STORE.layout.metadata_dir
|
||||||
|
|
||||||
# Determine if the root is on a case-insensitive filesystem
|
visitor = SourceMergeVisitor(ignore=skip_list)
|
||||||
normalize_paths = is_folder_on_case_insensitive_filesystem(self._root)
|
|
||||||
|
|
||||||
visitor = SourceMergeVisitor(ignore=skip_list, normalize_paths=normalize_paths)
|
|
||||||
|
|
||||||
# Gather all the directories to be made and files to be linked
|
# Gather all the directories to be made and files to be linked
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
@@ -831,11 +827,11 @@ def get_projection_for_spec(self, spec):
|
|||||||
#####################
|
#####################
|
||||||
# utility functions #
|
# utility functions #
|
||||||
#####################
|
#####################
|
||||||
def get_spec_from_file(filename) -> Optional[spack.spec.Spec]:
|
def get_spec_from_file(filename):
|
||||||
try:
|
try:
|
||||||
with open(filename, "r", encoding="utf-8") as f:
|
with open(filename, "r", encoding="utf-8") as f:
|
||||||
return spack.spec.Spec.from_yaml(f)
|
return spack.spec.Spec.from_yaml(f)
|
||||||
except OSError:
|
except IOError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
@@ -888,8 +884,3 @@ def get_dependencies(specs):
|
|||||||
|
|
||||||
class ConflictingProjectionsError(SpackError):
|
class ConflictingProjectionsError(SpackError):
|
||||||
"""Raised when a view has a projections file and is given one manually."""
|
"""Raised when a view has a projections file and is given one manually."""
|
||||||
|
|
||||||
|
|
||||||
def is_folder_on_case_insensitive_filesystem(path: str) -> bool:
|
|
||||||
with tempfile.NamedTemporaryFile(dir=path, prefix=".sentinel") as sentinel:
|
|
||||||
return os.path.exists(os.path.join(path, os.path.basename(sentinel.name).upper()))
|
|
||||||
|
|||||||
@@ -42,10 +42,10 @@
|
|||||||
import llnl.util.tty.color
|
import llnl.util.tty.color
|
||||||
|
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.tengine
|
import spack.tengine
|
||||||
import spack.traverse
|
import spack.traverse
|
||||||
from spack.solver.input_analysis import create_graph_analyzer
|
|
||||||
|
|
||||||
|
|
||||||
def find(seq, predicate):
|
def find(seq, predicate):
|
||||||
@@ -482,7 +482,7 @@ class SimpleDAG(DotGraphBuilder):
|
|||||||
"""Simple DOT graph, with nodes colored uniformly and edges without properties"""
|
"""Simple DOT graph, with nodes colored uniformly and edges without properties"""
|
||||||
|
|
||||||
def node_entry(self, node):
|
def node_entry(self, node):
|
||||||
format_option = "{name}{@version}{/hash:7}{%compiler}"
|
format_option = "{name}{@version}{%compiler}{/hash:7}"
|
||||||
return node.dag_hash(), f'[label="{node.format(format_option)}"]'
|
return node.dag_hash(), f'[label="{node.format(format_option)}"]'
|
||||||
|
|
||||||
def edge_entry(self, edge):
|
def edge_entry(self, edge):
|
||||||
@@ -515,7 +515,7 @@ def visit(self, edge):
|
|||||||
super().visit(edge)
|
super().visit(edge)
|
||||||
|
|
||||||
def node_entry(self, node):
|
def node_entry(self, node):
|
||||||
node_str = node.format("{name}{@version}{/hash:7}{%compiler}")
|
node_str = node.format("{name}{@version}{%compiler}{/hash:7}")
|
||||||
options = f'[label="{node_str}", group="build_dependencies", fillcolor="coral"]'
|
options = f'[label="{node_str}", group="build_dependencies", fillcolor="coral"]'
|
||||||
if node.dag_hash() in self.main_unified_space:
|
if node.dag_hash() in self.main_unified_space:
|
||||||
options = f'[label="{node_str}", group="main_psid"]'
|
options = f'[label="{node_str}", group="main_psid"]'
|
||||||
@@ -537,11 +537,10 @@ def edge_entry(self, edge):
|
|||||||
|
|
||||||
def _static_edges(specs, depflag):
|
def _static_edges(specs, depflag):
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
*_, edges = create_graph_analyzer().possible_dependencies(
|
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||||
spec.name, expand_virtuals=True, allowed_deps=depflag
|
possible = pkg_cls.possible_dependencies(expand_virtuals=True, depflag=depflag)
|
||||||
)
|
|
||||||
|
|
||||||
for parent_name, dependencies in edges.items():
|
for parent_name, dependencies in possible.items():
|
||||||
for dependency_name in dependencies:
|
for dependency_name in dependencies:
|
||||||
yield spack.spec.DependencySpec(
|
yield spack.spec.DependencySpec(
|
||||||
spack.spec.Spec(parent_name),
|
spack.spec.Spec(parent_name),
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.repo
|
import spack.repo
|
||||||
|
|
||||||
HASHES = []
|
hashes = []
|
||||||
|
|
||||||
|
|
||||||
class SpecHashDescriptor:
|
class SpecHashDescriptor:
|
||||||
@@ -23,7 +23,7 @@ def __init__(self, depflag: dt.DepFlag, package_hash, name, override=None):
|
|||||||
self.depflag = depflag
|
self.depflag = depflag
|
||||||
self.package_hash = package_hash
|
self.package_hash = package_hash
|
||||||
self.name = name
|
self.name = name
|
||||||
HASHES.append(self)
|
hashes.append(self)
|
||||||
# Allow spec hashes to have an alternate computation method
|
# Allow spec hashes to have an alternate computation method
|
||||||
self.override = override
|
self.override = override
|
||||||
|
|
||||||
@@ -43,9 +43,13 @@ def __repr__(self):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
#: The DAG hash includes all inputs that can affect how a package is built.
|
#: Spack's deployment hash. Includes all inputs that can affect how a package is built.
|
||||||
dag_hash = SpecHashDescriptor(
|
dag_hash = SpecHashDescriptor(depflag=dt.BUILD | dt.LINK | dt.RUN, package_hash=True, name="hash")
|
||||||
depflag=dt.BUILD | dt.LINK | dt.RUN | dt.TEST, package_hash=True, name="hash"
|
|
||||||
|
|
||||||
|
#: Hash descriptor used only to transfer a DAG, as is, across processes
|
||||||
|
process_hash = SpecHashDescriptor(
|
||||||
|
depflag=dt.BUILD | dt.LINK | dt.RUN | dt.TEST, package_hash=True, name="process_hash"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ def is_shared_library_elf(filepath):
|
|||||||
with open(filepath, "rb") as f:
|
with open(filepath, "rb") as f:
|
||||||
elf = parse_elf(f, interpreter=True, dynamic_section=True)
|
elf = parse_elf(f, interpreter=True, dynamic_section=True)
|
||||||
return elf.has_pt_dynamic and (elf.has_soname or not elf.has_pt_interp)
|
return elf.has_pt_dynamic and (elf.has_soname or not elf.has_pt_interp)
|
||||||
except (OSError, ElfParsingError):
|
except (IOError, OSError, ElfParsingError):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -2,14 +2,198 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
import fnmatch
|
||||||
import io
|
import io
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
from typing import Dict, List, Union
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import visit_directory_tree
|
from llnl.util.filesystem import BaseDirectoryVisitor, visit_directory_tree
|
||||||
|
from llnl.util.lang import stable_partition
|
||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.verify_libraries
|
import spack.util.elf as elf
|
||||||
|
|
||||||
|
#: Patterns for names of libraries that are allowed to be unresolved when *just* looking at RPATHs
|
||||||
|
#: added by Spack. These are libraries outside of Spack's control, and assumed to be located in
|
||||||
|
#: default search paths of the dynamic linker.
|
||||||
|
ALLOW_UNRESOLVED = [
|
||||||
|
# kernel
|
||||||
|
"linux-vdso.so.*",
|
||||||
|
"libselinux.so.*",
|
||||||
|
# musl libc
|
||||||
|
"ld-musl-*.so.*",
|
||||||
|
# glibc
|
||||||
|
"ld-linux*.so.*",
|
||||||
|
"ld64.so.*",
|
||||||
|
"libanl.so.*",
|
||||||
|
"libc.so.*",
|
||||||
|
"libdl.so.*",
|
||||||
|
"libm.so.*",
|
||||||
|
"libmemusage.so.*",
|
||||||
|
"libmvec.so.*",
|
||||||
|
"libnsl.so.*",
|
||||||
|
"libnss_compat.so.*",
|
||||||
|
"libnss_db.so.*",
|
||||||
|
"libnss_dns.so.*",
|
||||||
|
"libnss_files.so.*",
|
||||||
|
"libnss_hesiod.so.*",
|
||||||
|
"libpcprofile.so.*",
|
||||||
|
"libpthread.so.*",
|
||||||
|
"libresolv.so.*",
|
||||||
|
"librt.so.*",
|
||||||
|
"libSegFault.so.*",
|
||||||
|
"libthread_db.so.*",
|
||||||
|
"libutil.so.*",
|
||||||
|
# gcc -- this is required even with gcc-runtime, because e.g. libstdc++ depends on libgcc_s,
|
||||||
|
# but the binaries we copy from the compiler don't have an $ORIGIN rpath.
|
||||||
|
"libasan.so.*",
|
||||||
|
"libatomic.so.*",
|
||||||
|
"libcc1.so.*",
|
||||||
|
"libgcc_s.so.*",
|
||||||
|
"libgfortran.so.*",
|
||||||
|
"libgomp.so.*",
|
||||||
|
"libitm.so.*",
|
||||||
|
"liblsan.so.*",
|
||||||
|
"libquadmath.so.*",
|
||||||
|
"libssp.so.*",
|
||||||
|
"libstdc++.so.*",
|
||||||
|
"libtsan.so.*",
|
||||||
|
"libubsan.so.*",
|
||||||
|
# systemd
|
||||||
|
"libudev.so.*",
|
||||||
|
# cuda driver
|
||||||
|
"libcuda.so.*",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def is_compatible(parent: elf.ElfFile, child: elf.ElfFile) -> bool:
|
||||||
|
return (
|
||||||
|
child.elf_hdr.e_type == elf.ELF_CONSTANTS.ET_DYN
|
||||||
|
and parent.is_little_endian == child.is_little_endian
|
||||||
|
and parent.is_64_bit == child.is_64_bit
|
||||||
|
and parent.elf_hdr.e_machine == child.elf_hdr.e_machine
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def candidate_matches(current_elf: elf.ElfFile, candidate_path: bytes) -> bool:
|
||||||
|
try:
|
||||||
|
with open(candidate_path, "rb") as g:
|
||||||
|
return is_compatible(current_elf, elf.parse_elf(g))
|
||||||
|
except (OSError, elf.ElfParsingError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class Problem:
|
||||||
|
def __init__(
|
||||||
|
self, resolved: Dict[bytes, bytes], unresolved: List[bytes], relative_rpaths: List[bytes]
|
||||||
|
) -> None:
|
||||||
|
self.resolved = resolved
|
||||||
|
self.unresolved = unresolved
|
||||||
|
self.relative_rpaths = relative_rpaths
|
||||||
|
|
||||||
|
|
||||||
|
class ResolveSharedElfLibDepsVisitor(BaseDirectoryVisitor):
|
||||||
|
def __init__(self, allow_unresolved_patterns: List[str]) -> None:
|
||||||
|
self.problems: Dict[str, Problem] = {}
|
||||||
|
self._allow_unresolved_regex = re.compile(
|
||||||
|
"|".join(fnmatch.translate(x) for x in allow_unresolved_patterns)
|
||||||
|
)
|
||||||
|
|
||||||
|
def allow_unresolved(self, needed: bytes) -> bool:
|
||||||
|
try:
|
||||||
|
name = needed.decode("utf-8")
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
return False
|
||||||
|
return bool(self._allow_unresolved_regex.match(name))
|
||||||
|
|
||||||
|
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||||
|
# We work with byte strings for paths.
|
||||||
|
path = os.path.join(root, rel_path).encode("utf-8")
|
||||||
|
|
||||||
|
# For $ORIGIN interpolation: should not have trailing dir seperator.
|
||||||
|
origin = os.path.dirname(path)
|
||||||
|
|
||||||
|
# Retrieve the needed libs + rpaths.
|
||||||
|
try:
|
||||||
|
with open(path, "rb") as f:
|
||||||
|
parsed_elf = elf.parse_elf(f, interpreter=False, dynamic_section=True)
|
||||||
|
except (OSError, elf.ElfParsingError):
|
||||||
|
# Not dealing with an invalid ELF file.
|
||||||
|
return
|
||||||
|
|
||||||
|
# If there's no needed libs all is good
|
||||||
|
if not parsed_elf.has_needed:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Get the needed libs and rpaths (notice: byte strings)
|
||||||
|
# Don't force an encoding cause paths are just a bag of bytes.
|
||||||
|
needed_libs = parsed_elf.dt_needed_strs
|
||||||
|
|
||||||
|
rpaths = parsed_elf.dt_rpath_str.split(b":") if parsed_elf.has_rpath else []
|
||||||
|
|
||||||
|
# We only interpolate $ORIGIN, not $LIB and $PLATFORM, they're not really
|
||||||
|
# supported in general. Also remove empty paths.
|
||||||
|
rpaths = [x.replace(b"$ORIGIN", origin) for x in rpaths if x]
|
||||||
|
|
||||||
|
# Do not allow relative rpaths (they are relative to the current working directory)
|
||||||
|
rpaths, relative_rpaths = stable_partition(rpaths, os.path.isabs)
|
||||||
|
|
||||||
|
# If there's a / in the needed lib, it's opened directly, otherwise it needs
|
||||||
|
# a search.
|
||||||
|
direct_libs, search_libs = stable_partition(needed_libs, lambda x: b"/" in x)
|
||||||
|
|
||||||
|
# Do not allow relative paths in direct libs (they are relative to the current working
|
||||||
|
# directory)
|
||||||
|
direct_libs, unresolved = stable_partition(direct_libs, os.path.isabs)
|
||||||
|
|
||||||
|
resolved: Dict[bytes, bytes] = {}
|
||||||
|
|
||||||
|
for lib in search_libs:
|
||||||
|
if self.allow_unresolved(lib):
|
||||||
|
continue
|
||||||
|
for rpath in rpaths:
|
||||||
|
candidate = os.path.join(rpath, lib)
|
||||||
|
if candidate_matches(parsed_elf, candidate):
|
||||||
|
resolved[lib] = candidate
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
unresolved.append(lib)
|
||||||
|
|
||||||
|
# Check if directly opened libs are compatible
|
||||||
|
for lib in direct_libs:
|
||||||
|
if candidate_matches(parsed_elf, lib):
|
||||||
|
resolved[lib] = lib
|
||||||
|
else:
|
||||||
|
unresolved.append(lib)
|
||||||
|
|
||||||
|
if unresolved or relative_rpaths:
|
||||||
|
self.problems[rel_path] = Problem(resolved, unresolved, relative_rpaths)
|
||||||
|
|
||||||
|
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||||
|
# There can be binaries in .spack/test which shouldn't be checked.
|
||||||
|
if rel_path == ".spack":
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class CannotLocateSharedLibraries(spack.error.SpackError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def maybe_decode(byte_str: bytes) -> Union[str, bytes]:
|
||||||
|
try:
|
||||||
|
return byte_str.decode("utf-8")
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
return byte_str
|
||||||
|
|
||||||
|
|
||||||
def post_install(spec, explicit):
|
def post_install(spec, explicit):
|
||||||
@@ -20,23 +204,36 @@ def post_install(spec, explicit):
|
|||||||
if policy == "ignore" or spec.external or spec.platform not in ("linux", "freebsd"):
|
if policy == "ignore" or spec.external or spec.platform not in ("linux", "freebsd"):
|
||||||
return
|
return
|
||||||
|
|
||||||
visitor = spack.verify_libraries.ResolveSharedElfLibDepsVisitor(
|
visitor = ResolveSharedElfLibDepsVisitor(
|
||||||
[*spack.verify_libraries.ALLOW_UNRESOLVED, *spec.package.unresolved_libraries]
|
[*ALLOW_UNRESOLVED, *spec.package.unresolved_libraries]
|
||||||
)
|
)
|
||||||
visit_directory_tree(spec.prefix, visitor)
|
visit_directory_tree(spec.prefix, visitor)
|
||||||
|
|
||||||
|
# All good?
|
||||||
if not visitor.problems:
|
if not visitor.problems:
|
||||||
return
|
return
|
||||||
|
|
||||||
output = io.StringIO("not all executables and libraries can resolve their dependencies:\n")
|
# For now just list the issues (print it in ldd style, except we don't recurse)
|
||||||
visitor.write(output)
|
output = io.StringIO()
|
||||||
|
output.write("not all executables and libraries can resolve their dependencies:\n")
|
||||||
|
for path, problem in visitor.problems.items():
|
||||||
|
output.write(path)
|
||||||
|
output.write("\n")
|
||||||
|
for needed, full_path in problem.resolved.items():
|
||||||
|
output.write(" ")
|
||||||
|
if needed == full_path:
|
||||||
|
output.write(maybe_decode(needed))
|
||||||
|
else:
|
||||||
|
output.write(f"{maybe_decode(needed)} => {maybe_decode(full_path)}")
|
||||||
|
output.write("\n")
|
||||||
|
for not_found in problem.unresolved:
|
||||||
|
output.write(f" {maybe_decode(not_found)} => not found\n")
|
||||||
|
for relative_rpath in problem.relative_rpaths:
|
||||||
|
output.write(f" {maybe_decode(relative_rpath)} => relative rpath\n")
|
||||||
|
|
||||||
message = output.getvalue().strip()
|
message = output.getvalue().strip()
|
||||||
|
|
||||||
if policy == "error":
|
if policy == "error":
|
||||||
raise CannotLocateSharedLibraries(message)
|
raise CannotLocateSharedLibraries(message)
|
||||||
|
|
||||||
tty.warn(message)
|
tty.warn(message)
|
||||||
|
|
||||||
|
|
||||||
class CannotLocateSharedLibraries(spack.error.SpackError):
|
|
||||||
pass
|
|
||||||
|
|||||||
@@ -166,7 +166,7 @@ def filter_shebangs_in_directory(directory, filenames=None):
|
|||||||
# Only look at executable, non-symlink files.
|
# Only look at executable, non-symlink files.
|
||||||
try:
|
try:
|
||||||
st = os.lstat(path)
|
st = os.lstat(path)
|
||||||
except OSError:
|
except (IOError, OSError):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if stat.S_ISLNK(st.st_mode) or stat.S_ISDIR(st.st_mode) or not st.st_mode & is_exe:
|
if stat.S_ISLNK(st.st_mode) or stat.S_ISDIR(st.st_mode) or not st.st_mode & is_exe:
|
||||||
|
|||||||
@@ -21,6 +21,7 @@
|
|||||||
from llnl.util.lang import nullcontext
|
from llnl.util.lang import nullcontext
|
||||||
from llnl.util.tty.color import colorize
|
from llnl.util.tty.color import colorize
|
||||||
|
|
||||||
|
import spack.build_environment
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
@@ -397,7 +398,7 @@ def stand_alone_tests(self, kwargs):
|
|||||||
Args:
|
Args:
|
||||||
kwargs (dict): arguments to be used by the test process
|
kwargs (dict): arguments to be used by the test process
|
||||||
"""
|
"""
|
||||||
import spack.build_environment # avoid circular dependency
|
import spack.build_environment
|
||||||
|
|
||||||
spack.build_environment.start_build_process(self.pkg, test_process, kwargs)
|
spack.build_environment.start_build_process(self.pkg, test_process, kwargs)
|
||||||
|
|
||||||
@@ -462,8 +463,6 @@ def write_tested_status(self):
|
|||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def test_part(pkg: Pb, test_name: str, purpose: str, work_dir: str = ".", verbose: bool = False):
|
def test_part(pkg: Pb, test_name: str, purpose: str, work_dir: str = ".", verbose: bool = False):
|
||||||
import spack.build_environment # avoid circular dependency
|
|
||||||
|
|
||||||
wdir = "." if work_dir is None else work_dir
|
wdir = "." if work_dir is None else work_dir
|
||||||
tester = pkg.tester
|
tester = pkg.tester
|
||||||
assert test_name and test_name.startswith(
|
assert test_name and test_name.startswith(
|
||||||
@@ -567,7 +566,7 @@ def copy_test_files(pkg: Pb, test_spec: spack.spec.Spec):
|
|||||||
|
|
||||||
# copy test data into test stage data dir
|
# copy test data into test stage data dir
|
||||||
try:
|
try:
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(test_spec.fullname)
|
pkg_cls = test_spec.package_class
|
||||||
except spack.repo.UnknownPackageError:
|
except spack.repo.UnknownPackageError:
|
||||||
tty.debug(f"{test_spec.name}: skipping test data copy since no package class found")
|
tty.debug(f"{test_spec.name}: skipping test data copy since no package class found")
|
||||||
return
|
return
|
||||||
@@ -624,7 +623,7 @@ def test_functions(
|
|||||||
vpkgs = virtuals(pkg)
|
vpkgs = virtuals(pkg)
|
||||||
for vname in vpkgs:
|
for vname in vpkgs:
|
||||||
try:
|
try:
|
||||||
classes.append(spack.repo.PATH.get_pkg_class(vname))
|
classes.append((Spec(vname)).package_class)
|
||||||
except spack.repo.UnknownPackageError:
|
except spack.repo.UnknownPackageError:
|
||||||
tty.debug(f"{vname}: virtual does not appear to have a package file")
|
tty.debug(f"{vname}: virtual does not appear to have a package file")
|
||||||
|
|
||||||
@@ -669,7 +668,7 @@ def process_test_parts(pkg: Pb, test_specs: List[spack.spec.Spec], verbose: bool
|
|||||||
|
|
||||||
# grab test functions associated with the spec, which may be virtual
|
# grab test functions associated with the spec, which may be virtual
|
||||||
try:
|
try:
|
||||||
tests = test_functions(spack.repo.PATH.get_pkg_class(spec.fullname))
|
tests = test_functions(spec.package_class)
|
||||||
except spack.repo.UnknownPackageError:
|
except spack.repo.UnknownPackageError:
|
||||||
# Some virtuals don't have a package so we don't want to report
|
# Some virtuals don't have a package so we don't want to report
|
||||||
# them as not having tests when that isn't appropriate.
|
# them as not having tests when that isn't appropriate.
|
||||||
|
|||||||
@@ -814,7 +814,7 @@ def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int:
|
|||||||
# Include build dependencies if pkg is going to be built from sources, or
|
# Include build dependencies if pkg is going to be built from sources, or
|
||||||
# if build deps are explicitly requested.
|
# if build deps are explicitly requested.
|
||||||
if include_build_deps or not (
|
if include_build_deps or not (
|
||||||
cache_only or pkg.spec.installed and pkg.spec.dag_hash() not in self.overwrite
|
cache_only or pkg.spec.installed and not pkg.spec.dag_hash() in self.overwrite
|
||||||
):
|
):
|
||||||
depflag |= dt.BUILD
|
depflag |= dt.BUILD
|
||||||
if self.run_tests(pkg):
|
if self.run_tests(pkg):
|
||||||
@@ -2436,7 +2436,11 @@ def _real_install(self) -> None:
|
|||||||
# DEBUGGING TIP - to debug this section, insert an IPython
|
# DEBUGGING TIP - to debug this section, insert an IPython
|
||||||
# embed here, and run the sections below without log capture
|
# embed here, and run the sections below without log capture
|
||||||
log_contextmanager = log_output(
|
log_contextmanager = log_output(
|
||||||
log_file, self.echo, True, filter_fn=self.filter_fn
|
log_file,
|
||||||
|
self.echo,
|
||||||
|
True,
|
||||||
|
env=self.unmodified_env,
|
||||||
|
filter_fn=self.filter_fn,
|
||||||
)
|
)
|
||||||
|
|
||||||
with log_contextmanager as logger:
|
with log_contextmanager as logger:
|
||||||
|
|||||||
@@ -47,8 +47,6 @@
|
|||||||
import spack.util.environment
|
import spack.util.environment
|
||||||
import spack.util.lock
|
import spack.util.lock
|
||||||
|
|
||||||
from .enums import ConfigScopePriority
|
|
||||||
|
|
||||||
#: names of profile statistics
|
#: names of profile statistics
|
||||||
stat_names = pstats.Stats.sort_arg_dict_default
|
stat_names = pstats.Stats.sort_arg_dict_default
|
||||||
|
|
||||||
@@ -165,7 +163,7 @@ def format_help_sections(self, level):
|
|||||||
# lazily add all commands to the parser when needed.
|
# lazily add all commands to the parser when needed.
|
||||||
add_all_commands(self)
|
add_all_commands(self)
|
||||||
|
|
||||||
# Print help on subcommands in neatly formatted sections.
|
"""Print help on subcommands in neatly formatted sections."""
|
||||||
formatter = self._get_formatter()
|
formatter = self._get_formatter()
|
||||||
|
|
||||||
# Create a list of subcommand actions. Argparse internals are nasty!
|
# Create a list of subcommand actions. Argparse internals are nasty!
|
||||||
@@ -730,7 +728,7 @@ def _compatible_sys_types():
|
|||||||
with the current host.
|
with the current host.
|
||||||
"""
|
"""
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
host_os = str(host_platform.default_operating_system())
|
host_os = str(host_platform.operating_system("default_os"))
|
||||||
host_target = archspec.cpu.host()
|
host_target = archspec.cpu.host()
|
||||||
compatible_targets = [host_target] + host_target.ancestors
|
compatible_targets = [host_target] + host_target.ancestors
|
||||||
|
|
||||||
@@ -874,19 +872,14 @@ def add_command_line_scopes(
|
|||||||
scopes = ev.environment_path_scopes(name, path)
|
scopes = ev.environment_path_scopes(name, path)
|
||||||
if scopes is None:
|
if scopes is None:
|
||||||
if os.path.isdir(path): # directory with config files
|
if os.path.isdir(path): # directory with config files
|
||||||
cfg.push_scope(
|
cfg.push_scope(spack.config.DirectoryConfigScope(name, path, writable=False))
|
||||||
spack.config.DirectoryConfigScope(name, path, writable=False),
|
spack.config._add_platform_scope(cfg, name, path, writable=False)
|
||||||
priority=ConfigScopePriority.CUSTOM,
|
|
||||||
)
|
|
||||||
spack.config._add_platform_scope(
|
|
||||||
cfg, name, path, priority=ConfigScopePriority.CUSTOM, writable=False
|
|
||||||
)
|
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
raise spack.error.ConfigError(f"Invalid configuration scope: {path}")
|
raise spack.error.ConfigError(f"Invalid configuration scope: {path}")
|
||||||
|
|
||||||
for scope in scopes:
|
for scope in scopes:
|
||||||
cfg.push_scope(scope, priority=ConfigScopePriority.CUSTOM)
|
cfg.push_scope(scope)
|
||||||
|
|
||||||
|
|
||||||
def _main(argv=None):
|
def _main(argv=None):
|
||||||
@@ -959,9 +952,7 @@ def _main(argv=None):
|
|||||||
# Push scopes from the command line last
|
# Push scopes from the command line last
|
||||||
if args.config_scopes:
|
if args.config_scopes:
|
||||||
add_command_line_scopes(spack.config.CONFIG, args.config_scopes)
|
add_command_line_scopes(spack.config.CONFIG, args.config_scopes)
|
||||||
spack.config.CONFIG.push_scope(
|
spack.config.CONFIG.push_scope(spack.config.InternalConfigScope("command_line"))
|
||||||
spack.config.InternalConfigScope("command_line"), priority=ConfigScopePriority.COMMAND_LINE
|
|
||||||
)
|
|
||||||
setup_main_options(args)
|
setup_main_options(args)
|
||||||
|
|
||||||
# ------------------------------------------------------------------------
|
# ------------------------------------------------------------------------
|
||||||
@@ -1007,7 +998,6 @@ def finish_parse_and_run(parser, cmd_name, main_args, env_format_error):
|
|||||||
args, unknown = parser.parse_known_args(main_args.command)
|
args, unknown = parser.parse_known_args(main_args.command)
|
||||||
# we need to inherit verbose since the install command checks for it
|
# we need to inherit verbose since the install command checks for it
|
||||||
args.verbose = main_args.verbose
|
args.verbose = main_args.verbose
|
||||||
args.lines = main_args.lines
|
|
||||||
|
|
||||||
# Now that we know what command this is and what its args are, determine
|
# Now that we know what command this is and what its args are, determine
|
||||||
# whether we can continue with a bad environment and raise if not.
|
# whether we can continue with a bad environment and raise if not.
|
||||||
|
|||||||
@@ -64,7 +64,7 @@ def from_local_path(path: str):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def from_url(url: str):
|
def from_url(url: str):
|
||||||
"""Create an anonymous mirror by URL. This method validates the URL."""
|
"""Create an anonymous mirror by URL. This method validates the URL."""
|
||||||
if urllib.parse.urlparse(url).scheme not in supported_url_schemes:
|
if not urllib.parse.urlparse(url).scheme in supported_url_schemes:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f'"{url}" is not a valid mirror URL. '
|
f'"{url}" is not a valid mirror URL. '
|
||||||
f"Scheme must be one of {supported_url_schemes}."
|
f"Scheme must be one of {supported_url_schemes}."
|
||||||
|
|||||||
@@ -330,17 +330,18 @@ class BaseConfiguration:
|
|||||||
default_projections = {"all": "{name}/{version}-{compiler.name}-{compiler.version}"}
|
default_projections = {"all": "{name}/{version}-{compiler.name}-{compiler.version}"}
|
||||||
|
|
||||||
def __init__(self, spec: spack.spec.Spec, module_set_name: str, explicit: bool) -> None:
|
def __init__(self, spec: spack.spec.Spec, module_set_name: str, explicit: bool) -> None:
|
||||||
|
# Module where type(self) is defined
|
||||||
|
m = inspect.getmodule(self)
|
||||||
|
assert m is not None # make mypy happy
|
||||||
|
self.module = m
|
||||||
# Spec for which we want to generate a module file
|
# Spec for which we want to generate a module file
|
||||||
self.spec = spec
|
self.spec = spec
|
||||||
self.name = module_set_name
|
self.name = module_set_name
|
||||||
self.explicit = explicit
|
self.explicit = explicit
|
||||||
# Dictionary of configuration options that should be applied to the spec
|
# Dictionary of configuration options that should be applied
|
||||||
|
# to the spec
|
||||||
self.conf = merge_config_rules(self.module.configuration(self.name), self.spec)
|
self.conf = merge_config_rules(self.module.configuration(self.name), self.spec)
|
||||||
|
|
||||||
@property
|
|
||||||
def module(self):
|
|
||||||
return inspect.getmodule(self)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def projections(self):
|
def projections(self):
|
||||||
"""Projection from specs to module names"""
|
"""Projection from specs to module names"""
|
||||||
@@ -564,12 +565,6 @@ def __init__(self, configuration):
|
|||||||
def spec(self):
|
def spec(self):
|
||||||
return self.conf.spec
|
return self.conf.spec
|
||||||
|
|
||||||
@tengine.context_property
|
|
||||||
def tags(self):
|
|
||||||
if not hasattr(self.spec.package, "tags"):
|
|
||||||
return []
|
|
||||||
return self.spec.package.tags
|
|
||||||
|
|
||||||
@tengine.context_property
|
@tengine.context_property
|
||||||
def timestamp(self):
|
def timestamp(self):
|
||||||
return datetime.datetime.now()
|
return datetime.datetime.now()
|
||||||
@@ -780,6 +775,10 @@ def __init__(
|
|||||||
) -> None:
|
) -> None:
|
||||||
self.spec = spec
|
self.spec = spec
|
||||||
|
|
||||||
|
# This class is meant to be derived. Get the module of the
|
||||||
|
# actual writer.
|
||||||
|
self.module = inspect.getmodule(self)
|
||||||
|
assert self.module is not None # make mypy happy
|
||||||
m = self.module
|
m = self.module
|
||||||
|
|
||||||
# Create the triplet of configuration/layout/context
|
# Create the triplet of configuration/layout/context
|
||||||
@@ -817,10 +816,6 @@ def __init__(
|
|||||||
name = type(self).__name__
|
name = type(self).__name__
|
||||||
raise ModulercHeaderNotDefined(msg.format(name))
|
raise ModulercHeaderNotDefined(msg.format(name))
|
||||||
|
|
||||||
@property
|
|
||||||
def module(self):
|
|
||||||
return inspect.getmodule(self)
|
|
||||||
|
|
||||||
def _get_template(self):
|
def _get_template(self):
|
||||||
"""Gets the template that will be rendered for this spec."""
|
"""Gets the template that will be rendered for this spec."""
|
||||||
# Get templates and put them in the order of importance:
|
# Get templates and put them in the order of importance:
|
||||||
|
|||||||
@@ -209,7 +209,7 @@ def provides(self):
|
|||||||
# All the other tokens in the hierarchy must be virtual dependencies
|
# All the other tokens in the hierarchy must be virtual dependencies
|
||||||
for x in self.hierarchy_tokens:
|
for x in self.hierarchy_tokens:
|
||||||
if self.spec.package.provides(x):
|
if self.spec.package.provides(x):
|
||||||
provides[x] = self.spec
|
provides[x] = self.spec[x]
|
||||||
return provides
|
return provides
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|||||||
@@ -383,7 +383,6 @@ def create_opener():
|
|||||||
"""Create an opener that can handle OCI authentication."""
|
"""Create an opener that can handle OCI authentication."""
|
||||||
opener = urllib.request.OpenerDirector()
|
opener = urllib.request.OpenerDirector()
|
||||||
for handler in [
|
for handler in [
|
||||||
urllib.request.ProxyHandler(),
|
|
||||||
urllib.request.UnknownHandler(),
|
urllib.request.UnknownHandler(),
|
||||||
urllib.request.HTTPSHandler(context=spack.util.web.ssl_create_default_context()),
|
urllib.request.HTTPSHandler(context=spack.util.web.ssl_create_default_context()),
|
||||||
spack.util.web.SpackHTTPDefaultErrorHandler(),
|
spack.util.web.SpackHTTPDefaultErrorHandler(),
|
||||||
|
|||||||
@@ -2,64 +2,31 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
# flake8: noqa: F401, E402
|
# flake8: noqa: F401
|
||||||
"""spack.package defines the public API for Spack packages, by re-exporting useful symbols from
|
"""spack.util.package is a set of useful build tools and directives for packages.
|
||||||
other modules. Packages should import this module, instead of importing from spack.* directly
|
|
||||||
to ensure forward compatibility with future versions of Spack."""
|
|
||||||
|
|
||||||
|
Everything in this module is automatically imported into Spack package files.
|
||||||
|
"""
|
||||||
from os import chdir, environ, getcwd, makedirs, mkdir, remove, removedirs
|
from os import chdir, environ, getcwd, makedirs, mkdir, remove, removedirs
|
||||||
from shutil import move, rmtree
|
from shutil import move, rmtree
|
||||||
|
|
||||||
|
from spack.error import InstallError, NoHeadersError, NoLibrariesError
|
||||||
|
|
||||||
|
# Emulate some shell commands for convenience
|
||||||
|
env = environ
|
||||||
|
cd = chdir
|
||||||
|
pwd = getcwd
|
||||||
|
|
||||||
# import most common types used in packages
|
# import most common types used in packages
|
||||||
from typing import Dict, List, Optional
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
|
import llnl.util.filesystem
|
||||||
class tty:
|
from llnl.util.filesystem import *
|
||||||
import llnl.util.tty as _tty
|
|
||||||
|
|
||||||
debug = _tty.debug
|
|
||||||
error = _tty.error
|
|
||||||
info = _tty.info
|
|
||||||
msg = _tty.msg
|
|
||||||
warn = _tty.warn
|
|
||||||
|
|
||||||
|
|
||||||
from llnl.util.filesystem import (
|
|
||||||
FileFilter,
|
|
||||||
FileList,
|
|
||||||
HeaderList,
|
|
||||||
LibraryList,
|
|
||||||
ancestor,
|
|
||||||
can_access,
|
|
||||||
change_sed_delimiter,
|
|
||||||
copy,
|
|
||||||
copy_tree,
|
|
||||||
filter_file,
|
|
||||||
find,
|
|
||||||
find_all_headers,
|
|
||||||
find_first,
|
|
||||||
find_headers,
|
|
||||||
find_libraries,
|
|
||||||
find_system_libraries,
|
|
||||||
force_remove,
|
|
||||||
force_symlink,
|
|
||||||
install,
|
|
||||||
install_tree,
|
|
||||||
is_exe,
|
|
||||||
join_path,
|
|
||||||
keep_modification_time,
|
|
||||||
library_extensions,
|
|
||||||
mkdirp,
|
|
||||||
remove_directory_contents,
|
|
||||||
remove_linked_tree,
|
|
||||||
rename,
|
|
||||||
set_executable,
|
|
||||||
set_install_permissions,
|
|
||||||
touch,
|
|
||||||
working_dir,
|
|
||||||
)
|
|
||||||
from llnl.util.symlink import symlink
|
from llnl.util.symlink import symlink
|
||||||
|
|
||||||
|
import spack.util.executable
|
||||||
|
|
||||||
|
# These props will be overridden when the build env is set up.
|
||||||
from spack.build_environment import MakeExecutable
|
from spack.build_environment import MakeExecutable
|
||||||
from spack.build_systems.aspell_dict import AspellDictPackage
|
from spack.build_systems.aspell_dict import AspellDictPackage
|
||||||
from spack.build_systems.autotools import AutotoolsPackage
|
from spack.build_systems.autotools import AutotoolsPackage
|
||||||
@@ -109,24 +76,7 @@ class tty:
|
|||||||
from spack.builder import BaseBuilder
|
from spack.builder import BaseBuilder
|
||||||
from spack.config import determine_number_of_jobs
|
from spack.config import determine_number_of_jobs
|
||||||
from spack.deptypes import ALL_TYPES as all_deptypes
|
from spack.deptypes import ALL_TYPES as all_deptypes
|
||||||
from spack.directives import (
|
from spack.directives import *
|
||||||
build_system,
|
|
||||||
can_splice,
|
|
||||||
conditional,
|
|
||||||
conflicts,
|
|
||||||
depends_on,
|
|
||||||
extends,
|
|
||||||
license,
|
|
||||||
maintainers,
|
|
||||||
patch,
|
|
||||||
provides,
|
|
||||||
redistribute,
|
|
||||||
requires,
|
|
||||||
resource,
|
|
||||||
variant,
|
|
||||||
version,
|
|
||||||
)
|
|
||||||
from spack.error import InstallError, NoHeadersError, NoLibrariesError
|
|
||||||
from spack.install_test import (
|
from spack.install_test import (
|
||||||
SkipTest,
|
SkipTest,
|
||||||
cache_extra_test_sources,
|
cache_extra_test_sources,
|
||||||
@@ -136,28 +86,26 @@ class tty:
|
|||||||
install_test_root,
|
install_test_root,
|
||||||
test_part,
|
test_part,
|
||||||
)
|
)
|
||||||
|
from spack.installer import ExternalPackageError, InstallLockError, UpstreamPackageError
|
||||||
from spack.mixins import filter_compiler_wrappers
|
from spack.mixins import filter_compiler_wrappers
|
||||||
from spack.multimethod import default_args, when
|
from spack.multimethod import default_args, when
|
||||||
from spack.package_base import build_system_flags, env_flags, inject_flags, on_package_attributes
|
from spack.package_base import (
|
||||||
from spack.package_completions import (
|
DependencyConflictError,
|
||||||
bash_completion_path,
|
build_system_flags,
|
||||||
fish_completion_path,
|
env_flags,
|
||||||
zsh_completion_path,
|
flatten_dependencies,
|
||||||
|
inject_flags,
|
||||||
|
install_dependency_symlinks,
|
||||||
|
on_package_attributes,
|
||||||
)
|
)
|
||||||
|
from spack.package_completions import *
|
||||||
from spack.phase_callbacks import run_after, run_before
|
from spack.phase_callbacks import run_after, run_before
|
||||||
from spack.spec import Spec
|
from spack.spec import InvalidSpecDetected, Spec
|
||||||
from spack.util.environment import EnvironmentModifications
|
from spack.util.executable import *
|
||||||
from spack.util.executable import Executable, ProcessError, which, which_string
|
|
||||||
from spack.util.filesystem import fix_darwin_install_name
|
from spack.util.filesystem import fix_darwin_install_name
|
||||||
from spack.util.prefix import Prefix
|
|
||||||
from spack.variant import any_combination_of, auto_or_any_combination_of, disjoint_sets
|
from spack.variant import any_combination_of, auto_or_any_combination_of, disjoint_sets
|
||||||
from spack.version import Version, ver
|
from spack.version import Version, ver
|
||||||
|
|
||||||
# Emulate some shell commands for convenience
|
|
||||||
env = environ
|
|
||||||
cd = chdir
|
|
||||||
pwd = getcwd
|
|
||||||
|
|
||||||
# These are just here for editor support; they may be set when the build env is set up.
|
# These are just here for editor support; they may be set when the build env is set up.
|
||||||
configure: Executable
|
configure: Executable
|
||||||
make_jobs: int
|
make_jobs: int
|
||||||
|
|||||||
@@ -22,6 +22,7 @@
|
|||||||
import textwrap
|
import textwrap
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
|
import typing
|
||||||
from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Type, TypeVar, Union
|
from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Type, TypeVar, Union
|
||||||
|
|
||||||
from typing_extensions import Literal
|
from typing_extensions import Literal
|
||||||
@@ -29,6 +30,7 @@
|
|||||||
import llnl.util.filesystem as fsys
|
import llnl.util.filesystem as fsys
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.lang import classproperty, memoized
|
from llnl.util.lang import classproperty, memoized
|
||||||
|
from llnl.util.link_tree import LinkTree
|
||||||
|
|
||||||
import spack.compilers
|
import spack.compilers
|
||||||
import spack.config
|
import spack.config
|
||||||
@@ -48,7 +50,6 @@
|
|||||||
import spack.store
|
import spack.store
|
||||||
import spack.url
|
import spack.url
|
||||||
import spack.util.environment
|
import spack.util.environment
|
||||||
import spack.util.executable
|
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.util.web
|
import spack.util.web
|
||||||
import spack.variant
|
import spack.variant
|
||||||
@@ -66,6 +67,10 @@
|
|||||||
]
|
]
|
||||||
FLAG_HANDLER_TYPE = Callable[[str, Iterable[str]], FLAG_HANDLER_RETURN_TYPE]
|
FLAG_HANDLER_TYPE = Callable[[str, Iterable[str]], FLAG_HANDLER_RETURN_TYPE]
|
||||||
|
|
||||||
|
"""Allowed URL schemes for spack packages."""
|
||||||
|
_ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file", "git"]
|
||||||
|
|
||||||
|
|
||||||
#: Filename for the Spack build/install log.
|
#: Filename for the Spack build/install log.
|
||||||
_spack_build_logfile = "spack-build-out.txt"
|
_spack_build_logfile = "spack-build-out.txt"
|
||||||
|
|
||||||
@@ -126,10 +131,9 @@ def windows_establish_runtime_linkage(self):
|
|||||||
# Spack should in general not modify things it has not installed
|
# Spack should in general not modify things it has not installed
|
||||||
# we can reasonably expect externals to have their link interface properly established
|
# we can reasonably expect externals to have their link interface properly established
|
||||||
if sys.platform == "win32" and not self.spec.external:
|
if sys.platform == "win32" and not self.spec.external:
|
||||||
win_rpath = fsys.WindowsSimulatedRPath(self)
|
self.win_rpath.add_library_dependent(*self.win_add_library_dependent())
|
||||||
win_rpath.add_library_dependent(*self.win_add_library_dependent())
|
self.win_rpath.add_rpath(*self.win_add_rpath())
|
||||||
win_rpath.add_rpath(*self.win_add_rpath())
|
self.win_rpath.establish_link()
|
||||||
win_rpath.establish_link()
|
|
||||||
|
|
||||||
|
|
||||||
#: Registers which are the detectable packages, by repo and package name
|
#: Registers which are the detectable packages, by repo and package name
|
||||||
@@ -698,6 +702,9 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
|||||||
#: Verbosity level, preserved across installs.
|
#: Verbosity level, preserved across installs.
|
||||||
_verbose = None
|
_verbose = None
|
||||||
|
|
||||||
|
#: index of patches by sha256 sum, built lazily
|
||||||
|
_patches_by_hash = None
|
||||||
|
|
||||||
#: Package homepage where users can find more information about the package
|
#: Package homepage where users can find more information about the package
|
||||||
homepage: Optional[str] = None
|
homepage: Optional[str] = None
|
||||||
|
|
||||||
@@ -711,6 +718,19 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
|||||||
#: Do not include @ here in order not to unnecessarily ping the users.
|
#: Do not include @ here in order not to unnecessarily ping the users.
|
||||||
maintainers: List[str] = []
|
maintainers: List[str] = []
|
||||||
|
|
||||||
|
#: List of attributes to be excluded from a package's hash.
|
||||||
|
metadata_attrs = [
|
||||||
|
"homepage",
|
||||||
|
"url",
|
||||||
|
"urls",
|
||||||
|
"list_url",
|
||||||
|
"extendable",
|
||||||
|
"parallel",
|
||||||
|
"make_jobs",
|
||||||
|
"maintainers",
|
||||||
|
"tags",
|
||||||
|
]
|
||||||
|
|
||||||
#: Set to ``True`` to indicate the stand-alone test requires a compiler.
|
#: Set to ``True`` to indicate the stand-alone test requires a compiler.
|
||||||
#: It is used to ensure a compiler and build dependencies like 'cmake'
|
#: It is used to ensure a compiler and build dependencies like 'cmake'
|
||||||
#: are available to build a custom test code.
|
#: are available to build a custom test code.
|
||||||
@@ -744,6 +764,7 @@ def __init__(self, spec):
|
|||||||
# Set up timing variables
|
# Set up timing variables
|
||||||
self._fetch_time = 0.0
|
self._fetch_time = 0.0
|
||||||
|
|
||||||
|
self.win_rpath = fsys.WindowsSimulatedRPath(self)
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
def __getitem__(self, key: str) -> "PackageBase":
|
def __getitem__(self, key: str) -> "PackageBase":
|
||||||
@@ -809,6 +830,104 @@ def get_variant(self, name: str) -> spack.variant.Variant:
|
|||||||
except StopIteration:
|
except StopIteration:
|
||||||
raise ValueError(f"No variant '{name}' on spec: {self.spec}")
|
raise ValueError(f"No variant '{name}' on spec: {self.spec}")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def possible_dependencies(
|
||||||
|
cls,
|
||||||
|
transitive: bool = True,
|
||||||
|
expand_virtuals: bool = True,
|
||||||
|
depflag: dt.DepFlag = dt.ALL,
|
||||||
|
visited: Optional[dict] = None,
|
||||||
|
missing: Optional[dict] = None,
|
||||||
|
virtuals: Optional[set] = None,
|
||||||
|
) -> Dict[str, Set[str]]:
|
||||||
|
"""Return dict of possible dependencies of this package.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
transitive (bool or None): return all transitive dependencies if
|
||||||
|
True, only direct dependencies if False (default True)..
|
||||||
|
expand_virtuals (bool or None): expand virtual dependencies into
|
||||||
|
all possible implementations (default True)
|
||||||
|
depflag: dependency types to consider
|
||||||
|
visited (dict or None): dict of names of dependencies visited so
|
||||||
|
far, mapped to their immediate dependencies' names.
|
||||||
|
missing (dict or None): dict to populate with packages and their
|
||||||
|
*missing* dependencies.
|
||||||
|
virtuals (set): if provided, populate with virtuals seen so far.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
(dict): dictionary mapping dependency names to *their*
|
||||||
|
immediate dependencies
|
||||||
|
|
||||||
|
Each item in the returned dictionary maps a (potentially
|
||||||
|
transitive) dependency of this package to its possible
|
||||||
|
*immediate* dependencies. If ``expand_virtuals`` is ``False``,
|
||||||
|
virtual package names wil be inserted as keys mapped to empty
|
||||||
|
sets of dependencies. Virtuals, if not expanded, are treated as
|
||||||
|
though they have no immediate dependencies.
|
||||||
|
|
||||||
|
Missing dependencies by default are ignored, but if a
|
||||||
|
missing dict is provided, it will be populated with package names
|
||||||
|
mapped to any dependencies they have that are in no
|
||||||
|
repositories. This is only populated if transitive is True.
|
||||||
|
|
||||||
|
Note: the returned dict *includes* the package itself.
|
||||||
|
|
||||||
|
"""
|
||||||
|
visited = {} if visited is None else visited
|
||||||
|
missing = {} if missing is None else missing
|
||||||
|
|
||||||
|
visited.setdefault(cls.name, set())
|
||||||
|
|
||||||
|
for name, conditions in cls.dependencies_by_name(when=True).items():
|
||||||
|
# check whether this dependency could be of the type asked for
|
||||||
|
depflag_union = 0
|
||||||
|
for deplist in conditions.values():
|
||||||
|
for dep in deplist:
|
||||||
|
depflag_union |= dep.depflag
|
||||||
|
if not (depflag & depflag_union):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# expand virtuals if enabled, otherwise just stop at virtuals
|
||||||
|
if spack.repo.PATH.is_virtual(name):
|
||||||
|
if virtuals is not None:
|
||||||
|
virtuals.add(name)
|
||||||
|
if expand_virtuals:
|
||||||
|
providers = spack.repo.PATH.providers_for(name)
|
||||||
|
dep_names = [spec.name for spec in providers]
|
||||||
|
else:
|
||||||
|
visited.setdefault(cls.name, set()).add(name)
|
||||||
|
visited.setdefault(name, set())
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
dep_names = [name]
|
||||||
|
|
||||||
|
# add the dependency names to the visited dict
|
||||||
|
visited.setdefault(cls.name, set()).update(set(dep_names))
|
||||||
|
|
||||||
|
# recursively traverse dependencies
|
||||||
|
for dep_name in dep_names:
|
||||||
|
if dep_name in visited:
|
||||||
|
continue
|
||||||
|
|
||||||
|
visited.setdefault(dep_name, set())
|
||||||
|
|
||||||
|
# skip the rest if not transitive
|
||||||
|
if not transitive:
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
dep_cls = spack.repo.PATH.get_pkg_class(dep_name)
|
||||||
|
except spack.repo.UnknownPackageError:
|
||||||
|
# log unknown packages
|
||||||
|
missing.setdefault(cls.name, set()).add(dep_name)
|
||||||
|
continue
|
||||||
|
|
||||||
|
dep_cls.possible_dependencies(
|
||||||
|
transitive, expand_virtuals, depflag, visited, missing, virtuals
|
||||||
|
)
|
||||||
|
|
||||||
|
return visited
|
||||||
|
|
||||||
@classproperty
|
@classproperty
|
||||||
def package_dir(cls):
|
def package_dir(cls):
|
||||||
"""Directory where the package.py file lives."""
|
"""Directory where the package.py file lives."""
|
||||||
@@ -1288,13 +1407,12 @@ def extendee_spec(self):
|
|||||||
if not self.extendees:
|
if not self.extendees:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
deps = []
|
||||||
|
|
||||||
# If the extendee is in the spec's deps already, return that.
|
# If the extendee is in the spec's deps already, return that.
|
||||||
deps = [
|
for dep in self.spec.traverse(deptype=("link", "run")):
|
||||||
dep
|
if dep.name in self.extendees:
|
||||||
for dep in self.spec.dependencies(deptype=("link", "run"))
|
deps.append(dep)
|
||||||
for d, when in self.extendees.values()
|
|
||||||
if dep.satisfies(d) and self.spec.satisfies(when)
|
|
||||||
]
|
|
||||||
|
|
||||||
if deps:
|
if deps:
|
||||||
assert len(deps) == 1
|
assert len(deps) == 1
|
||||||
@@ -1371,14 +1489,6 @@ def prefix(self):
|
|||||||
def home(self):
|
def home(self):
|
||||||
return self.prefix
|
return self.prefix
|
||||||
|
|
||||||
@property
|
|
||||||
def command(self) -> spack.util.executable.Executable:
|
|
||||||
"""Returns the main executable for this package."""
|
|
||||||
path = os.path.join(self.home.bin, self.spec.name)
|
|
||||||
if fsys.is_exe(path):
|
|
||||||
return spack.util.executable.Executable(path)
|
|
||||||
raise RuntimeError(f"Unable to locate {self.spec.name} command in {self.home.bin}")
|
|
||||||
|
|
||||||
@property # type: ignore[misc]
|
@property # type: ignore[misc]
|
||||||
@memoized
|
@memoized
|
||||||
def compiler(self):
|
def compiler(self):
|
||||||
@@ -2182,6 +2292,85 @@ def rpath_args(self):
|
|||||||
build_system_flags = PackageBase.build_system_flags
|
build_system_flags = PackageBase.build_system_flags
|
||||||
|
|
||||||
|
|
||||||
|
def install_dependency_symlinks(pkg, spec, prefix):
|
||||||
|
"""
|
||||||
|
Execute a dummy install and flatten dependencies.
|
||||||
|
|
||||||
|
This routine can be used in a ``package.py`` definition by setting
|
||||||
|
``install = install_dependency_symlinks``.
|
||||||
|
|
||||||
|
This feature comes in handy for creating a common location for the
|
||||||
|
the installation of third-party libraries.
|
||||||
|
"""
|
||||||
|
flatten_dependencies(spec, prefix)
|
||||||
|
|
||||||
|
|
||||||
|
def use_cray_compiler_names():
|
||||||
|
"""Compiler names for builds that rely on cray compiler names."""
|
||||||
|
os.environ["CC"] = "cc"
|
||||||
|
os.environ["CXX"] = "CC"
|
||||||
|
os.environ["FC"] = "ftn"
|
||||||
|
os.environ["F77"] = "ftn"
|
||||||
|
|
||||||
|
|
||||||
|
def flatten_dependencies(spec, flat_dir):
|
||||||
|
"""Make each dependency of spec present in dir via symlink."""
|
||||||
|
for dep in spec.traverse(root=False):
|
||||||
|
name = dep.name
|
||||||
|
|
||||||
|
dep_path = spack.store.STORE.layout.path_for_spec(dep)
|
||||||
|
dep_files = LinkTree(dep_path)
|
||||||
|
|
||||||
|
os.mkdir(flat_dir + "/" + name)
|
||||||
|
|
||||||
|
conflict = dep_files.find_conflict(flat_dir + "/" + name)
|
||||||
|
if conflict:
|
||||||
|
raise DependencyConflictError(conflict)
|
||||||
|
|
||||||
|
dep_files.merge(flat_dir + "/" + name)
|
||||||
|
|
||||||
|
|
||||||
|
def possible_dependencies(
|
||||||
|
*pkg_or_spec: Union[str, spack.spec.Spec, typing.Type[PackageBase]],
|
||||||
|
transitive: bool = True,
|
||||||
|
expand_virtuals: bool = True,
|
||||||
|
depflag: dt.DepFlag = dt.ALL,
|
||||||
|
missing: Optional[dict] = None,
|
||||||
|
virtuals: Optional[set] = None,
|
||||||
|
) -> Dict[str, Set[str]]:
|
||||||
|
"""Get the possible dependencies of a number of packages.
|
||||||
|
|
||||||
|
See ``PackageBase.possible_dependencies`` for details.
|
||||||
|
"""
|
||||||
|
packages = []
|
||||||
|
for pos in pkg_or_spec:
|
||||||
|
if isinstance(pos, PackageMeta) and issubclass(pos, PackageBase):
|
||||||
|
packages.append(pos)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not isinstance(pos, spack.spec.Spec):
|
||||||
|
pos = spack.spec.Spec(pos)
|
||||||
|
|
||||||
|
if spack.repo.PATH.is_virtual(pos.name):
|
||||||
|
packages.extend(p.package_class for p in spack.repo.PATH.providers_for(pos.name))
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
packages.append(pos.package_class)
|
||||||
|
|
||||||
|
visited: Dict[str, Set[str]] = {}
|
||||||
|
for pkg in packages:
|
||||||
|
pkg.possible_dependencies(
|
||||||
|
visited=visited,
|
||||||
|
transitive=transitive,
|
||||||
|
expand_virtuals=expand_virtuals,
|
||||||
|
depflag=depflag,
|
||||||
|
missing=missing,
|
||||||
|
virtuals=virtuals,
|
||||||
|
)
|
||||||
|
|
||||||
|
return visited
|
||||||
|
|
||||||
|
|
||||||
def deprecated_version(pkg: PackageBase, version: Union[str, StandardVersion]) -> bool:
|
def deprecated_version(pkg: PackageBase, version: Union[str, StandardVersion]) -> bool:
|
||||||
"""Return True iff the version is deprecated.
|
"""Return True iff the version is deprecated.
|
||||||
|
|
||||||
|
|||||||
@@ -83,7 +83,6 @@ def __init__(
|
|||||||
level: int,
|
level: int,
|
||||||
working_dir: str,
|
working_dir: str,
|
||||||
reverse: bool = False,
|
reverse: bool = False,
|
||||||
ordering_key: Optional[Tuple[str, int]] = None,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize a new Patch instance.
|
"""Initialize a new Patch instance.
|
||||||
|
|
||||||
@@ -93,7 +92,6 @@ def __init__(
|
|||||||
level: patch level
|
level: patch level
|
||||||
working_dir: relative path *within* the stage to change to
|
working_dir: relative path *within* the stage to change to
|
||||||
reverse: reverse the patch
|
reverse: reverse the patch
|
||||||
ordering_key: key used to ensure patches are applied in a consistent order
|
|
||||||
"""
|
"""
|
||||||
# validate level (must be an integer >= 0)
|
# validate level (must be an integer >= 0)
|
||||||
if not isinstance(level, int) or not level >= 0:
|
if not isinstance(level, int) or not level >= 0:
|
||||||
@@ -107,13 +105,6 @@ def __init__(
|
|||||||
self.working_dir = working_dir
|
self.working_dir = working_dir
|
||||||
self.reverse = reverse
|
self.reverse = reverse
|
||||||
|
|
||||||
# The ordering key is passed when executing package.py directives, and is only relevant
|
|
||||||
# after a solve to build concrete specs with consistently ordered patches. For concrete
|
|
||||||
# specs read from a file, we add patches in the order of its patches variants and the
|
|
||||||
# ordering_key is irrelevant. In that case, use a default value so we don't need to branch
|
|
||||||
# on whether ordering_key is None where it's used, just to make static analysis happy.
|
|
||||||
self.ordering_key: Tuple[str, int] = ordering_key or ("", 0)
|
|
||||||
|
|
||||||
def apply(self, stage: "spack.stage.Stage") -> None:
|
def apply(self, stage: "spack.stage.Stage") -> None:
|
||||||
"""Apply a patch to source in a stage.
|
"""Apply a patch to source in a stage.
|
||||||
|
|
||||||
@@ -211,8 +202,9 @@ def __init__(
|
|||||||
msg += "package %s.%s does not exist." % (pkg.namespace, pkg.name)
|
msg += "package %s.%s does not exist." % (pkg.namespace, pkg.name)
|
||||||
raise ValueError(msg)
|
raise ValueError(msg)
|
||||||
|
|
||||||
super().__init__(pkg, abs_path, level, working_dir, reverse, ordering_key)
|
super().__init__(pkg, abs_path, level, working_dir, reverse)
|
||||||
self.path = abs_path
|
self.path = abs_path
|
||||||
|
self.ordering_key = ordering_key
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def sha256(self) -> str:
|
def sha256(self) -> str:
|
||||||
@@ -274,11 +266,13 @@ def __init__(
|
|||||||
archive_sha256: sha256 sum of the *archive*, if the patch is compressed
|
archive_sha256: sha256 sum of the *archive*, if the patch is compressed
|
||||||
(only required for compressed URL patches)
|
(only required for compressed URL patches)
|
||||||
"""
|
"""
|
||||||
super().__init__(pkg, url, level, working_dir, reverse, ordering_key)
|
super().__init__(pkg, url, level, working_dir, reverse)
|
||||||
|
|
||||||
self.url = url
|
self.url = url
|
||||||
self._stage: Optional["spack.stage.Stage"] = None
|
self._stage: Optional["spack.stage.Stage"] = None
|
||||||
|
|
||||||
|
self.ordering_key = ordering_key
|
||||||
|
|
||||||
if allowed_archive(self.url) and not archive_sha256:
|
if allowed_archive(self.url) and not archive_sha256:
|
||||||
raise spack.error.PatchDirectiveError(
|
raise spack.error.PatchDirectiveError(
|
||||||
"Compressed patches require 'archive_sha256' "
|
"Compressed patches require 'archive_sha256' "
|
||||||
|
|||||||
@@ -108,8 +108,6 @@ def _get_user_cache_path():
|
|||||||
#: transient caches for Spack data (virtual cache, patch sha256 lookup, etc.)
|
#: transient caches for Spack data (virtual cache, patch sha256 lookup, etc.)
|
||||||
default_misc_cache_path = os.path.join(user_cache_path, "cache")
|
default_misc_cache_path = os.path.join(user_cache_path, "cache")
|
||||||
|
|
||||||
#: concretization cache for Spack concretizations
|
|
||||||
default_conc_cache_path = os.path.join(default_misc_cache_path, "concretization")
|
|
||||||
|
|
||||||
# Below paths pull configuration from the host environment.
|
# Below paths pull configuration from the host environment.
|
||||||
#
|
#
|
||||||
|
|||||||
@@ -52,7 +52,8 @@ def use_platform(new_platform):
|
|||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
|
|
||||||
assert isinstance(new_platform, Platform), f'"{new_platform}" must be an instance of Platform'
|
msg = '"{0}" must be an instance of Platform'
|
||||||
|
assert isinstance(new_platform, Platform), msg.format(new_platform)
|
||||||
|
|
||||||
original_host_fn = host
|
original_host_fn = host
|
||||||
|
|
||||||
|
|||||||
@@ -1,22 +1,42 @@
|
|||||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import warnings
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
|
|
||||||
|
import spack.error
|
||||||
|
|
||||||
|
|
||||||
|
class NoPlatformError(spack.error.SpackError):
|
||||||
|
def __init__(self):
|
||||||
|
msg = "Could not determine a platform for this machine"
|
||||||
|
super().__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
@llnl.util.lang.lazy_lexicographic_ordering
|
@llnl.util.lang.lazy_lexicographic_ordering
|
||||||
class Platform:
|
class Platform:
|
||||||
"""Platform is an abstract class extended by subclasses.
|
"""Platform is an abstract class extended by subclasses.
|
||||||
|
|
||||||
|
To add a new type of platform (such as cray_xe), create a subclass and set all the
|
||||||
|
class attributes such as priority, front_target, back_target, front_os, back_os.
|
||||||
|
|
||||||
Platform also contain a priority class attribute. A lower number signifies higher
|
Platform also contain a priority class attribute. A lower number signifies higher
|
||||||
priority. These numbers are arbitrarily set and can be changed though often there
|
priority. These numbers are arbitrarily set and can be changed though often there
|
||||||
isn't much need unless a new platform is added and the user wants that to be
|
isn't much need unless a new platform is added and the user wants that to be
|
||||||
detected first.
|
detected first.
|
||||||
|
|
||||||
|
Targets are created inside the platform subclasses. Most architecture (like linux,
|
||||||
|
and darwin) will have only one target family (x86_64) but in the case of Cray
|
||||||
|
machines, there is both a frontend and backend processor. The user can specify
|
||||||
|
which targets are present on front-end and back-end architecture.
|
||||||
|
|
||||||
|
Depending on the platform, operating systems are either autodetected or are
|
||||||
|
set. The user can set the frontend and backend operating setting by the class
|
||||||
|
attributes front_os and back_os. The operating system will be responsible for
|
||||||
|
compiler detection.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Subclass sets number. Controls detection order
|
# Subclass sets number. Controls detection order
|
||||||
@@ -25,72 +45,82 @@ class Platform:
|
|||||||
#: binary formats used on this platform; used by relocation logic
|
#: binary formats used on this platform; used by relocation logic
|
||||||
binary_formats = ["elf"]
|
binary_formats = ["elf"]
|
||||||
|
|
||||||
default: str
|
front_end: Optional[str] = None
|
||||||
default_os: str
|
back_end: Optional[str] = None
|
||||||
|
default: Optional[str] = None # The default back end target.
|
||||||
|
|
||||||
|
front_os: Optional[str] = None
|
||||||
|
back_os: Optional[str] = None
|
||||||
|
default_os: Optional[str] = None
|
||||||
|
|
||||||
reserved_targets = ["default_target", "frontend", "fe", "backend", "be"]
|
reserved_targets = ["default_target", "frontend", "fe", "backend", "be"]
|
||||||
reserved_oss = ["default_os", "frontend", "fe", "backend", "be"]
|
reserved_oss = ["default_os", "frontend", "fe", "backend", "be"]
|
||||||
deprecated_names = ["frontend", "fe", "backend", "be"]
|
|
||||||
|
|
||||||
def __init__(self, name):
|
def __init__(self, name):
|
||||||
self.targets = {}
|
self.targets = {}
|
||||||
self.operating_sys = {}
|
self.operating_sys = {}
|
||||||
self.name = name
|
self.name = name
|
||||||
self._init_targets()
|
|
||||||
|
|
||||||
def add_target(self, name: str, target: archspec.cpu.Microarchitecture) -> None:
|
def add_target(self, name: str, target: archspec.cpu.Microarchitecture) -> None:
|
||||||
|
"""Used by the platform specific subclass to list available targets.
|
||||||
|
Raises an error if the platform specifies a name
|
||||||
|
that is reserved by spack as an alias.
|
||||||
|
"""
|
||||||
if name in Platform.reserved_targets:
|
if name in Platform.reserved_targets:
|
||||||
msg = f"{name} is a spack reserved alias and cannot be the name of a target"
|
msg = "{0} is a spack reserved alias and cannot be the name of a target"
|
||||||
raise ValueError(msg)
|
raise ValueError(msg.format(name))
|
||||||
self.targets[name] = target
|
self.targets[name] = target
|
||||||
|
|
||||||
def _init_targets(self):
|
def _add_archspec_targets(self):
|
||||||
self.default = archspec.cpu.host().name
|
|
||||||
for name, microarchitecture in archspec.cpu.TARGETS.items():
|
for name, microarchitecture in archspec.cpu.TARGETS.items():
|
||||||
self.add_target(name, microarchitecture)
|
self.add_target(name, microarchitecture)
|
||||||
|
|
||||||
def target(self, name):
|
def target(self, name):
|
||||||
|
"""This is a getter method for the target dictionary
|
||||||
|
that handles defaulting based on the values provided by default,
|
||||||
|
front-end, and back-end. This can be overwritten
|
||||||
|
by a subclass for which we want to provide further aliasing options.
|
||||||
|
"""
|
||||||
|
# TODO: Check if we can avoid using strings here
|
||||||
name = str(name)
|
name = str(name)
|
||||||
if name in Platform.deprecated_names:
|
if name == "default_target":
|
||||||
warnings.warn(f"target={name} is deprecated, use target={self.default} instead")
|
|
||||||
|
|
||||||
if name in Platform.reserved_targets:
|
|
||||||
name = self.default
|
name = self.default
|
||||||
|
elif name == "frontend" or name == "fe":
|
||||||
|
name = self.front_end
|
||||||
|
elif name == "backend" or name == "be":
|
||||||
|
name = self.back_end
|
||||||
|
|
||||||
return self.targets.get(name, None)
|
return self.targets.get(name, None)
|
||||||
|
|
||||||
def add_operating_system(self, name, os_class):
|
def add_operating_system(self, name, os_class):
|
||||||
if name in Platform.reserved_oss + Platform.deprecated_names:
|
"""Add the operating_system class object into the
|
||||||
msg = f"{name} is a spack reserved alias and cannot be the name of an OS"
|
platform.operating_sys dictionary.
|
||||||
raise ValueError(msg)
|
"""
|
||||||
|
if name in Platform.reserved_oss:
|
||||||
|
msg = "{0} is a spack reserved alias and cannot be the name of an OS"
|
||||||
|
raise ValueError(msg.format(name))
|
||||||
self.operating_sys[name] = os_class
|
self.operating_sys[name] = os_class
|
||||||
|
|
||||||
def default_target(self):
|
|
||||||
return self.target(self.default)
|
|
||||||
|
|
||||||
def default_operating_system(self):
|
|
||||||
return self.operating_system(self.default_os)
|
|
||||||
|
|
||||||
def operating_system(self, name):
|
def operating_system(self, name):
|
||||||
if name in Platform.deprecated_names:
|
if name == "default_os":
|
||||||
warnings.warn(f"os={name} is deprecated, use os={self.default_os} instead")
|
|
||||||
|
|
||||||
if name in Platform.reserved_oss:
|
|
||||||
name = self.default_os
|
name = self.default_os
|
||||||
|
if name == "frontend" or name == "fe":
|
||||||
|
name = self.front_os
|
||||||
|
if name == "backend" or name == "be":
|
||||||
|
name = self.back_os
|
||||||
|
|
||||||
return self.operating_sys.get(name, None)
|
return self.operating_sys.get(name, None)
|
||||||
|
|
||||||
def setup_platform_environment(self, pkg, env):
|
def setup_platform_environment(self, pkg, env):
|
||||||
"""Platform-specific build environment modifications.
|
"""Subclass can override this method if it requires any
|
||||||
|
platform-specific build environment modifications.
|
||||||
This method is meant toi be overridden by subclasses, when needed.
|
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def detect(cls):
|
def detect(cls):
|
||||||
"""Returns True if the host platform is detected to be the current Platform class,
|
"""Return True if the the host platform is detected to be the current
|
||||||
False otherwise.
|
Platform class, False otherwise.
|
||||||
|
|
||||||
Derived classes are responsible for implementing this method.
|
Derived classes are responsible for implementing this method.
|
||||||
"""
|
"""
|
||||||
@@ -105,7 +135,11 @@ def __str__(self):
|
|||||||
def _cmp_iter(self):
|
def _cmp_iter(self):
|
||||||
yield self.name
|
yield self.name
|
||||||
yield self.default
|
yield self.default
|
||||||
|
yield self.front_end
|
||||||
|
yield self.back_end
|
||||||
yield self.default_os
|
yield self.default_os
|
||||||
|
yield self.front_os
|
||||||
|
yield self.back_os
|
||||||
|
|
||||||
def targets():
|
def targets():
|
||||||
for t in sorted(self.targets.values()):
|
for t in sorted(self.targets.values()):
|
||||||
|
|||||||
@@ -4,6 +4,8 @@
|
|||||||
|
|
||||||
import platform as py_platform
|
import platform as py_platform
|
||||||
|
|
||||||
|
import archspec.cpu
|
||||||
|
|
||||||
from spack.operating_systems.mac_os import MacOs
|
from spack.operating_systems.mac_os import MacOs
|
||||||
from spack.version import Version
|
from spack.version import Version
|
||||||
|
|
||||||
@@ -17,8 +19,18 @@ class Darwin(Platform):
|
|||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__("darwin")
|
super().__init__("darwin")
|
||||||
|
self._add_archspec_targets()
|
||||||
|
|
||||||
|
self.default = archspec.cpu.host().name
|
||||||
|
self.front_end = self.default
|
||||||
|
self.back_end = self.default
|
||||||
|
|
||||||
mac_os = MacOs()
|
mac_os = MacOs()
|
||||||
|
|
||||||
self.default_os = str(mac_os)
|
self.default_os = str(mac_os)
|
||||||
|
self.front_os = str(mac_os)
|
||||||
|
self.back_os = str(mac_os)
|
||||||
|
|
||||||
self.add_operating_system(str(mac_os), mac_os)
|
self.add_operating_system(str(mac_os), mac_os)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -3,6 +3,8 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import platform
|
import platform
|
||||||
|
|
||||||
|
import archspec.cpu
|
||||||
|
|
||||||
from spack.operating_systems.freebsd import FreeBSDOs
|
from spack.operating_systems.freebsd import FreeBSDOs
|
||||||
|
|
||||||
from ._platform import Platform
|
from ._platform import Platform
|
||||||
@@ -13,8 +15,18 @@ class FreeBSD(Platform):
|
|||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__("freebsd")
|
super().__init__("freebsd")
|
||||||
|
|
||||||
|
self._add_archspec_targets()
|
||||||
|
|
||||||
|
# Get specific default
|
||||||
|
self.default = archspec.cpu.host().name
|
||||||
|
self.front_end = self.default
|
||||||
|
self.back_end = self.default
|
||||||
|
|
||||||
os = FreeBSDOs()
|
os = FreeBSDOs()
|
||||||
self.default_os = str(os)
|
self.default_os = str(os)
|
||||||
|
self.front_os = self.default_os
|
||||||
|
self.back_os = self.default_os
|
||||||
self.add_operating_system(str(os), os)
|
self.add_operating_system(str(os), os)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -3,6 +3,8 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import platform
|
import platform
|
||||||
|
|
||||||
|
import archspec.cpu
|
||||||
|
|
||||||
from spack.operating_systems.linux_distro import LinuxDistro
|
from spack.operating_systems.linux_distro import LinuxDistro
|
||||||
|
|
||||||
from ._platform import Platform
|
from ._platform import Platform
|
||||||
@@ -13,8 +15,18 @@ class Linux(Platform):
|
|||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__("linux")
|
super().__init__("linux")
|
||||||
|
|
||||||
|
self._add_archspec_targets()
|
||||||
|
|
||||||
|
# Get specific default
|
||||||
|
self.default = archspec.cpu.host().name
|
||||||
|
self.front_end = self.default
|
||||||
|
self.back_end = self.default
|
||||||
|
|
||||||
linux_dist = LinuxDistro()
|
linux_dist = LinuxDistro()
|
||||||
self.default_os = str(linux_dist)
|
self.default_os = str(linux_dist)
|
||||||
|
self.front_os = self.default_os
|
||||||
|
self.back_os = self.default_os
|
||||||
self.add_operating_system(str(linux_dist), linux_dist)
|
self.add_operating_system(str(linux_dist), linux_dist)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -16,19 +16,31 @@ class Test(Platform):
|
|||||||
if platform.system().lower() == "darwin":
|
if platform.system().lower() == "darwin":
|
||||||
binary_formats = ["macho"]
|
binary_formats = ["macho"]
|
||||||
|
|
||||||
|
if platform.machine() == "arm64":
|
||||||
|
front_end = "aarch64"
|
||||||
|
back_end = "m1"
|
||||||
|
default = "m1"
|
||||||
|
else:
|
||||||
|
front_end = "x86_64"
|
||||||
|
back_end = "core2"
|
||||||
|
default = "core2"
|
||||||
|
|
||||||
|
front_os = "redhat6"
|
||||||
|
back_os = "debian6"
|
||||||
default_os = "debian6"
|
default_os = "debian6"
|
||||||
default = "m1" if platform.machine() == "arm64" else "core2"
|
|
||||||
|
|
||||||
def __init__(self, name=None):
|
def __init__(self, name=None):
|
||||||
name = name or "test"
|
name = name or "test"
|
||||||
super().__init__(name)
|
super().__init__(name)
|
||||||
self.add_operating_system("debian6", spack.operating_systems.OperatingSystem("debian", 6))
|
self.add_target(self.default, archspec.cpu.TARGETS[self.default])
|
||||||
self.add_operating_system("redhat6", spack.operating_systems.OperatingSystem("redhat", 6))
|
self.add_target(self.front_end, archspec.cpu.TARGETS[self.front_end])
|
||||||
|
|
||||||
def _init_targets(self):
|
self.add_operating_system(
|
||||||
targets = ("aarch64", "m1") if platform.machine() == "arm64" else ("x86_64", "core2")
|
self.default_os, spack.operating_systems.OperatingSystem("debian", 6)
|
||||||
for t in targets:
|
)
|
||||||
self.add_target(t, archspec.cpu.TARGETS[t])
|
self.add_operating_system(
|
||||||
|
self.front_os, spack.operating_systems.OperatingSystem("redhat", 6)
|
||||||
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def detect(cls):
|
def detect(cls):
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user