Compare commits
9 Commits
before-com
...
packages/c
Author | SHA1 | Date | |
---|---|---|---|
![]() |
026521132f | ||
![]() |
649d6f901c | ||
![]() |
ff9d763de5 | ||
![]() |
3e466a5d66 | ||
![]() |
ec9d3f799e | ||
![]() |
4928acf3a4 | ||
![]() |
248b99c1d4 | ||
![]() |
340a7a564c | ||
![]() |
384afd559b |
18
.github/workflows/build-containers.yml
vendored
18
.github/workflows/build-containers.yml
vendored
@@ -40,17 +40,17 @@ jobs:
|
||||
# 1: Platforms to build for
|
||||
# 2: Base image (e.g. ubuntu:22.04)
|
||||
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
||||
[centos-stream9, 'linux/amd64,linux/arm64', 'centos:stream9'],
|
||||
[leap15, 'linux/amd64,linux/arm64', 'opensuse/leap:15'],
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64', 'ubuntu:22.04'],
|
||||
[ubuntu-noble, 'linux/amd64,linux/arm64', 'ubuntu:24.04'],
|
||||
[almalinux8, 'linux/amd64,linux/arm64', 'almalinux:8'],
|
||||
[almalinux9, 'linux/amd64,linux/arm64', 'almalinux:9'],
|
||||
[centos-stream9, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream9'],
|
||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
||||
[ubuntu-noble, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:24.04'],
|
||||
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
||||
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
||||
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
||||
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
||||
[fedora39, 'linux/amd64,linux/arm64', 'fedora:39'],
|
||||
[fedora40, 'linux/amd64,linux/arm64', 'fedora:40']]
|
||||
[fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
|
||||
[fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
|
21
.github/workflows/ci.yaml
vendored
21
.github/workflows/ci.yaml
vendored
@@ -9,7 +9,6 @@ on:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
merge_group:
|
||||
|
||||
concurrency:
|
||||
group: ci-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
@@ -26,17 +25,13 @@ jobs:
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
if: ${{ github.event_name == 'push' || github.event_name == 'merge_group' }}
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# For pull requests it's not necessary to checkout the code
|
||||
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36
|
||||
id: filter
|
||||
with:
|
||||
# For merge group events, compare against the target branch (main)
|
||||
base: ${{ github.event_name == 'merge_group' && github.event.merge_group.base_ref || '' }}
|
||||
# For merge group events, use the merge group head ref
|
||||
ref: ${{ github.event_name == 'merge_group' && github.event.merge_group.head_sha || github.ref }}
|
||||
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below
|
||||
# Don't run if we only modified packages in the
|
||||
# built-in repository or documentation
|
||||
@@ -81,15 +76,10 @@ jobs:
|
||||
|
||||
prechecks:
|
||||
needs: [ changes ]
|
||||
uses: ./.github/workflows/prechecks.yml
|
||||
uses: ./.github/workflows/valid-style.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
with_coverage: ${{ needs.changes.outputs.core }}
|
||||
with_packages: ${{ needs.changes.outputs.packages }}
|
||||
|
||||
import-check:
|
||||
needs: [ changes ]
|
||||
uses: ./.github/workflows/import-check.yaml
|
||||
|
||||
all-prechecks:
|
||||
needs: [ prechecks ]
|
||||
@@ -99,7 +89,7 @@ jobs:
|
||||
- name: Success
|
||||
run: |
|
||||
if [ "${{ needs.prechecks.result }}" == "failure" ] || [ "${{ needs.prechecks.result }}" == "canceled" ]; then
|
||||
echo "Unit tests failed."
|
||||
echo "Unit tests failed."
|
||||
exit 1
|
||||
else
|
||||
exit 0
|
||||
@@ -107,7 +97,6 @@ jobs:
|
||||
|
||||
coverage:
|
||||
needs: [ unit-tests, prechecks ]
|
||||
if: ${{ needs.changes.outputs.core }}
|
||||
uses: ./.github/workflows/coverage.yml
|
||||
secrets: inherit
|
||||
|
||||
@@ -120,10 +109,10 @@ jobs:
|
||||
- name: Status summary
|
||||
run: |
|
||||
if [ "${{ needs.unit-tests.result }}" == "failure" ] || [ "${{ needs.unit-tests.result }}" == "canceled" ]; then
|
||||
echo "Unit tests failed."
|
||||
echo "Unit tests failed."
|
||||
exit 1
|
||||
elif [ "${{ needs.bootstrap.result }}" == "failure" ] || [ "${{ needs.bootstrap.result }}" == "canceled" ]; then
|
||||
echo "Bootstrap tests failed."
|
||||
echo "Bootstrap tests failed."
|
||||
exit 1
|
||||
else
|
||||
exit 0
|
||||
|
3
.github/workflows/coverage.yml
vendored
3
.github/workflows/coverage.yml
vendored
@@ -29,8 +29,7 @@ jobs:
|
||||
- run: coverage xml
|
||||
|
||||
- name: "Upload coverage report to CodeCov"
|
||||
uses: codecov/codecov-action@1e68e06f1dbfde0e4cefc87efeba9e4643565303
|
||||
uses: codecov/codecov-action@05f5a9cfad807516dbbef9929c4a42df3eb78766
|
||||
with:
|
||||
verbose: true
|
||||
fail_ci_if_error: false
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
49
.github/workflows/import-check.yaml
vendored
49
.github/workflows/import-check.yaml
vendored
@@ -1,49 +0,0 @@
|
||||
name: import-check
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
# Check we don't make the situation with circular imports worse
|
||||
import-check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: julia-actions/setup-julia@v2
|
||||
with:
|
||||
version: '1.10'
|
||||
- uses: julia-actions/cache@v2
|
||||
|
||||
# PR: use the base of the PR as the old commit
|
||||
- name: Checkout PR base commit
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.base.sha }}
|
||||
path: old
|
||||
# not a PR: use the previous commit as the old commit
|
||||
- name: Checkout previous commit
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 2
|
||||
path: old
|
||||
- name: Checkout previous commit
|
||||
if: github.event_name != 'pull_request'
|
||||
run: git -C old reset --hard HEAD^
|
||||
|
||||
- name: Checkout new commit
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
path: new
|
||||
- name: Install circular import checker
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
repository: haampie/circular-import-fighter
|
||||
ref: 4cdb0bf15f04ab6b49041d5ef1bfd9644cce7f33
|
||||
path: circular-import-fighter
|
||||
- name: Install dependencies
|
||||
working-directory: circular-import-fighter
|
||||
run: make -j dependencies
|
||||
- name: Circular import check
|
||||
working-directory: circular-import-fighter
|
||||
run: make -j compare "SPACK_ROOT=../old ../new"
|
@@ -1,7 +1,7 @@
|
||||
black==25.1.0
|
||||
black==24.10.0
|
||||
clingo==5.7.1
|
||||
flake8==7.1.2
|
||||
isort==6.0.1
|
||||
mypy==1.15.0
|
||||
types-six==1.17.0.20250304
|
||||
flake8==7.1.1
|
||||
isort==5.13.2
|
||||
mypy==1.8.0
|
||||
types-six==1.17.0.20241205
|
||||
vermin==1.6.0
|
||||
|
@@ -1,4 +1,4 @@
|
||||
name: prechecks
|
||||
name: style
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
@@ -6,9 +6,6 @@ on:
|
||||
with_coverage:
|
||||
required: true
|
||||
type: string
|
||||
with_packages:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
concurrency:
|
||||
group: style-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
@@ -23,7 +20,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.13'
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
@@ -33,7 +30,6 @@ jobs:
|
||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: vermin (Repositories)
|
||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
|
||||
|
||||
# Run style checks on the files that have been changed
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -43,7 +39,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.13'
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
@@ -57,25 +53,12 @@ jobs:
|
||||
- name: Run style tests
|
||||
run: |
|
||||
share/spack/qa/run-style-tests
|
||||
|
||||
audit:
|
||||
uses: ./.github/workflows/audit.yaml
|
||||
secrets: inherit
|
||||
with:
|
||||
with_coverage: ${{ inputs.with_coverage }}
|
||||
python_version: '3.13'
|
||||
|
||||
verify-checksums:
|
||||
if: ${{ inputs.with_packages == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 2
|
||||
- name: Verify Added Checksums
|
||||
run: |
|
||||
bin/spack ci verify-versions HEAD^1 HEAD
|
||||
|
||||
python_version: '3.11'
|
||||
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
|
||||
bootstrap-dev-rhel8:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -103,6 +86,66 @@ jobs:
|
||||
spack -d bootstrap now --dev
|
||||
spack -d style -t black
|
||||
spack unit-test -V
|
||||
# Check we don't make the situation with circular imports worse
|
||||
import-check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: julia-actions/setup-julia@v2
|
||||
with:
|
||||
version: '1.10'
|
||||
- uses: julia-actions/cache@v2
|
||||
|
||||
# PR: use the base of the PR as the old commit
|
||||
- name: Checkout PR base commit
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.base.sha }}
|
||||
path: old
|
||||
# not a PR: use the previous commit as the old commit
|
||||
- name: Checkout previous commit
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 2
|
||||
path: old
|
||||
- name: Checkout previous commit
|
||||
if: github.event_name != 'pull_request'
|
||||
run: git -C old reset --hard HEAD^
|
||||
|
||||
- name: Checkout new commit
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
path: new
|
||||
- name: Install circular import checker
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
repository: haampie/circular-import-fighter
|
||||
ref: b5d6ce9be35f602cca7d5a6aa0259fca10639cca
|
||||
path: circular-import-fighter
|
||||
- name: Install dependencies
|
||||
working-directory: circular-import-fighter
|
||||
run: make -j dependencies
|
||||
- name: Problematic imports before
|
||||
working-directory: circular-import-fighter
|
||||
run: make SPACK_ROOT=../old SUFFIX=.old
|
||||
- name: Problematic imports after
|
||||
working-directory: circular-import-fighter
|
||||
run: make SPACK_ROOT=../new SUFFIX=.new
|
||||
- name: Compare import cycles
|
||||
working-directory: circular-import-fighter
|
||||
run: |
|
||||
edges_before="$(head -n1 solution.old)"
|
||||
edges_after="$(head -n1 solution.new)"
|
||||
if [ "$edges_after" -gt "$edges_before" ]; then
|
||||
printf '\033[1;31mImport check failed: %s imports need to be deleted, ' "$edges_after"
|
||||
printf 'previously this was %s\033[0m\n' "$edges_before"
|
||||
printf 'Compare \033[1;97m"Problematic imports before"\033[0m and '
|
||||
printf '\033[1;97m"Problematic imports after"\033[0m.\n'
|
||||
exit 1
|
||||
else
|
||||
printf '\033[1;32mImport check passed: %s <= %s\033[0m\n' "$edges_after" "$edges_before"
|
||||
fi
|
||||
|
||||
# Further style checks from pylint
|
||||
pylint:
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -201,6 +201,7 @@ tramp
|
||||
|
||||
# Org-mode
|
||||
.org-id-locations
|
||||
*_archive
|
||||
|
||||
# flymake-mode
|
||||
*_flymake.*
|
||||
|
@@ -25,6 +25,7 @@ exit 1
|
||||
# The code above runs this file with our preferred python interpreter.
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
min_python3 = (3, 6)
|
||||
|
@@ -43,28 +43,6 @@ concretizer:
|
||||
# (e.g. py-setuptools, cmake etc.)
|
||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||
strategy: minimal
|
||||
# Maximum number of duplicates in a DAG, when using a strategy that allows duplicates. "default" is the
|
||||
# number used if there isn't a more specific alternative
|
||||
max_dupes:
|
||||
default: 1
|
||||
# Virtuals
|
||||
c: 2
|
||||
cxx: 2
|
||||
fortran: 1
|
||||
# Regular packages
|
||||
cmake: 2
|
||||
gmake: 2
|
||||
python: 2
|
||||
python-venv: 2
|
||||
py-cython: 2
|
||||
py-flit-core: 2
|
||||
py-pip: 2
|
||||
py-setuptools: 2
|
||||
py-wheel: 2
|
||||
xcb-proto: 2
|
||||
# Compilers
|
||||
gcc: 2
|
||||
llvm: 2
|
||||
# Option to specify compatibility between operating systems for reuse of compilers and packages
|
||||
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
||||
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||
@@ -85,7 +63,3 @@ concretizer:
|
||||
# Setting this to false yields unreproducible results, so we advise to use that value only
|
||||
# for debugging purposes (e.g. check which constraints can help Spack concretize faster).
|
||||
error_on_timeout: true
|
||||
|
||||
# Static analysis may reduce the concretization time by generating smaller ASP problems, in
|
||||
# cases where there are requirements that prevent part of the search space to be explored.
|
||||
static_analysis: false
|
||||
|
@@ -36,7 +36,7 @@ packages:
|
||||
go-or-gccgo-bootstrap: [go-bootstrap, gcc]
|
||||
iconv: [libiconv]
|
||||
ipp: [intel-oneapi-ipp]
|
||||
java: [openjdk, jdk]
|
||||
java: [openjdk, jdk, ibm-java]
|
||||
jpeg: [libjpeg-turbo, libjpeg]
|
||||
lapack: [openblas, amdlibflame]
|
||||
libc: [glibc, musl]
|
||||
@@ -73,27 +73,15 @@ packages:
|
||||
permissions:
|
||||
read: world
|
||||
write: user
|
||||
cray-fftw:
|
||||
buildable: false
|
||||
cray-libsci:
|
||||
buildable: false
|
||||
cray-mpich:
|
||||
buildable: false
|
||||
cray-mvapich2:
|
||||
buildable: false
|
||||
cray-pmi:
|
||||
buildable: false
|
||||
egl:
|
||||
buildable: false
|
||||
essl:
|
||||
buildable: false
|
||||
fujitsu-mpi:
|
||||
buildable: false
|
||||
fujitsu-ssl2:
|
||||
buildable: false
|
||||
hpcx-mpi:
|
||||
buildable: false
|
||||
mpt:
|
||||
buildable: false
|
||||
spectrum-mpi:
|
||||
buildable: false
|
||||
|
@@ -1,5 +1,5 @@
|
||||
config:
|
||||
locks: false
|
||||
build_stage::
|
||||
- '$user_cache_path/stage'
|
||||
- '$spack/.staging'
|
||||
stage_name: '{name}-{version}-{hash:7}'
|
||||
|
@@ -1761,24 +1761,19 @@ Verifying installations
|
||||
The ``spack verify`` command can be used to verify the validity of
|
||||
Spack-installed packages any time after installation.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
``spack verify manifest``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
At installation time, Spack creates a manifest of every file in the
|
||||
installation prefix. For links, Spack tracks the mode, ownership, and
|
||||
destination. For directories, Spack tracks the mode, and
|
||||
ownership. For files, Spack tracks the mode, ownership, modification
|
||||
time, hash, and size. The ``spack verify manifest`` command will check,
|
||||
for every file in each package, whether any of those attributes have
|
||||
changed. It will also check for newly added files or deleted files from
|
||||
the installation prefix. Spack can either check all installed packages
|
||||
time, hash, and size. The Spack verify command will check, for every
|
||||
file in each package, whether any of those attributes have changed. It
|
||||
will also check for newly added files or deleted files from the
|
||||
installation prefix. Spack can either check all installed packages
|
||||
using the `-a,--all` or accept specs listed on the command line to
|
||||
verify.
|
||||
|
||||
The ``spack verify manifest`` command can also verify for individual files
|
||||
that they haven't been altered since installation time. If the given file
|
||||
The ``spack verify`` command can also verify for individual files that
|
||||
they haven't been altered since installation time. If the given file
|
||||
is not in a Spack installation prefix, Spack will report that it is
|
||||
not owned by any package. To check individual files instead of specs,
|
||||
use the ``-f,--files`` option.
|
||||
@@ -1793,22 +1788,6 @@ check only local packages (as opposed to those used transparently from
|
||||
``upstream`` spack instances) and the ``-j,--json`` option to output
|
||||
machine-readable json data for any errors.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
``spack verify libraries``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The ``spack verify libraries`` command can be used to verify that packages
|
||||
do not have accidental system dependencies. This command scans the install
|
||||
prefixes of packages for executables and shared libraries, and resolves
|
||||
their needed libraries in their RPATHs. When needed libraries cannot be
|
||||
located, an error is reported. This typically indicates that a package
|
||||
was linked against a system library, instead of a library provided by
|
||||
a Spack package.
|
||||
|
||||
This verification can also be enabled as a post-install hook by setting
|
||||
``config:shared_linking:missing_library_policy`` to ``error`` or ``warn``
|
||||
in :ref:`config.yaml <config-yaml>`.
|
||||
|
||||
-----------------------
|
||||
Filesystem requirements
|
||||
-----------------------
|
||||
|
@@ -170,7 +170,7 @@ bootstrapping.
|
||||
To register the mirror on the platform where it's supposed to be used run the following command(s):
|
||||
% spack bootstrap add --trust local-sources /opt/bootstrap/metadata/sources
|
||||
% spack bootstrap add --trust local-binaries /opt/bootstrap/metadata/binaries
|
||||
% spack buildcache update-index /opt/bootstrap/bootstrap_cache
|
||||
|
||||
|
||||
This command needs to be run on a machine with internet access and the resulting folder
|
||||
has to be moved over to the air-gapped system. Once the local sources are added using the
|
||||
|
@@ -272,9 +272,9 @@ often lists dependencies and the flags needed to locate them. The
|
||||
"environment variables" section lists environment variables that the
|
||||
build system uses to pass flags to the compiler and linker.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Adding flags to configure
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Addings flags to configure
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
For most of the flags you encounter, you will want a variant to
|
||||
optionally enable/disable them. You can then optionally pass these
|
||||
@@ -285,7 +285,7 @@ function like so:
|
||||
|
||||
def configure_args(self):
|
||||
args = []
|
||||
...
|
||||
|
||||
if self.spec.satisfies("+mpi"):
|
||||
args.append("--enable-mpi")
|
||||
else:
|
||||
@@ -299,10 +299,7 @@ Alternatively, you can use the :ref:`enable_or_disable <autotools_enable_or_dis
|
||||
.. code-block:: python
|
||||
|
||||
def configure_args(self):
|
||||
args = []
|
||||
...
|
||||
args.extend(self.enable_or_disable("mpi"))
|
||||
return args
|
||||
return [self.enable_or_disable("mpi")]
|
||||
|
||||
|
||||
Note that we are explicitly disabling MPI support if it is not
|
||||
@@ -347,14 +344,7 @@ typically used to enable or disable some feature within the package.
|
||||
default=False,
|
||||
description="Memchecker support for debugging [degrades performance]"
|
||||
)
|
||||
...
|
||||
|
||||
def configure_args(self):
|
||||
args = []
|
||||
...
|
||||
args.extend(self.enable_or_disable("memchecker"))
|
||||
|
||||
return args
|
||||
config_args.extend(self.enable_or_disable("memchecker"))
|
||||
|
||||
In this example, specifying the variant ``+memchecker`` will generate
|
||||
the following configuration options:
|
||||
|
@@ -56,13 +56,13 @@ If you look at the ``perl`` package, you'll see:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
phases = ("configure", "build", "install")
|
||||
phases = ["configure", "build", "install"]
|
||||
|
||||
Similarly, ``cmake`` defines:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
phases = ("bootstrap", "build", "install")
|
||||
phases = ["bootstrap", "build", "install"]
|
||||
|
||||
If we look at the ``cmake`` example, this tells Spack's ``PackageBase``
|
||||
class to run the ``bootstrap``, ``build``, and ``install`` functions
|
||||
|
@@ -223,10 +223,6 @@ def setup(sphinx):
|
||||
("py:class", "spack.compiler.CompilerCache"),
|
||||
# TypeVar that is not handled correctly
|
||||
("py:class", "llnl.util.lang.T"),
|
||||
("py:class", "llnl.util.lang.KT"),
|
||||
("py:class", "llnl.util.lang.VT"),
|
||||
("py:obj", "llnl.util.lang.KT"),
|
||||
("py:obj", "llnl.util.lang.VT"),
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
|
@@ -125,8 +125,6 @@ are stored in ``$spack/var/spack/cache``. These are stored indefinitely
|
||||
by default. Can be purged with :ref:`spack clean --downloads
|
||||
<cmd-spack-clean>`.
|
||||
|
||||
.. _Misc Cache:
|
||||
|
||||
--------------------
|
||||
``misc_cache``
|
||||
--------------------
|
||||
@@ -336,52 +334,3 @@ create a new alias called ``inst`` that will always call ``install -v``:
|
||||
|
||||
aliases:
|
||||
inst: install -v
|
||||
|
||||
-------------------------------
|
||||
``concretization_cache:enable``
|
||||
-------------------------------
|
||||
|
||||
When set to ``true``, Spack will utilize a cache of solver outputs from
|
||||
successful concretization runs. When enabled, Spack will check the concretization
|
||||
cache prior to running the solver. If a previous request to solve a given
|
||||
problem is present in the cache, Spack will load the concrete specs and other
|
||||
solver data from the cache rather than running the solver. Specs not previously
|
||||
concretized will be added to the cache on a successful solve. The cache additionally
|
||||
holds solver statistics, so commands like ``spack solve`` will still return information
|
||||
about the run that produced a given solver result.
|
||||
|
||||
This cache is a subcache of the :ref:`Misc Cache` and as such will be cleaned when the Misc
|
||||
Cache is cleaned.
|
||||
|
||||
When ``false`` or ommitted, all concretization requests will be performed from scatch
|
||||
|
||||
----------------------------
|
||||
``concretization_cache:url``
|
||||
----------------------------
|
||||
|
||||
Path to the location where Spack will root the concretization cache. Currently this only supports
|
||||
paths on the local filesystem.
|
||||
|
||||
Default location is under the :ref:`Misc Cache` at: ``$misc_cache/concretization``
|
||||
|
||||
------------------------------------
|
||||
``concretization_cache:entry_limit``
|
||||
------------------------------------
|
||||
|
||||
Sets a limit on the number of concretization results that Spack will cache. The limit is evaluated
|
||||
after each concretization run; if Spack has stored more results than the limit allows, the
|
||||
oldest concretization results are pruned until 10% of the limit has been removed.
|
||||
|
||||
Setting this value to 0 disables the automatic pruning. It is expected users will be
|
||||
responsible for maintaining this cache.
|
||||
|
||||
-----------------------------------
|
||||
``concretization_cache:size_limit``
|
||||
-----------------------------------
|
||||
|
||||
Sets a limit on the size of the concretization cache in bytes. The limit is evaluated
|
||||
after each concretization run; if Spack has stored more results than the limit allows, the
|
||||
oldest concretization results are pruned until 10% of the limit has been removed.
|
||||
|
||||
Setting this value to 0 disables the automatic pruning. It is expected users will be
|
||||
responsible for maintaining this cache.
|
||||
|
@@ -14,7 +14,6 @@ case you want to skip directly to specific docs:
|
||||
* :ref:`compilers.yaml <compiler-config>`
|
||||
* :ref:`concretizer.yaml <concretizer-options>`
|
||||
* :ref:`config.yaml <config-yaml>`
|
||||
* :ref:`include.yaml <include-yaml>`
|
||||
* :ref:`mirrors.yaml <mirrors>`
|
||||
* :ref:`modules.yaml <modules>`
|
||||
* :ref:`packages.yaml <packages-config>`
|
||||
|
@@ -361,6 +361,7 @@ and the tags associated with the class of runners to build on.
|
||||
* ``.linux_neoverse_n1``
|
||||
* ``.linux_neoverse_v1``
|
||||
* ``.linux_neoverse_v2``
|
||||
* ``.linux_power``
|
||||
* ``.linux_skylake``
|
||||
* ``.linux_x86_64``
|
||||
* ``.linux_x86_64_v4``
|
||||
|
@@ -543,10 +543,10 @@ With either interpreter you can run a single command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack python -c 'from spack.concretize import concretize_one; concretize_one("python")'
|
||||
$ spack python -c 'from spack.spec import Spec; Spec("python").concretized()'
|
||||
...
|
||||
|
||||
$ spack python -i ipython -c 'from spack.concretize import concretize_one; concretize_one("python")'
|
||||
$ spack python -i ipython -c 'from spack.spec import Spec; Spec("python").concretized()'
|
||||
Out[1]: ...
|
||||
|
||||
or a file:
|
||||
|
@@ -112,19 +112,6 @@ the original but may concretize differently in the presence of different
|
||||
explicit or default configuration settings (e.g., a different version of
|
||||
Spack or for a different user account).
|
||||
|
||||
Environments created from a manifest will copy any included configs
|
||||
from relative paths inside the environment. Relative paths from
|
||||
outside the environment will cause errors, and absolute paths will be
|
||||
kept absolute. For example, if ``spack.yaml`` includes:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
include: [./config.yaml]
|
||||
|
||||
then the created environment will have its own copy of the file
|
||||
``config.yaml`` copied from the location in the original environment.
|
||||
|
||||
Create an environment from a ``spack.lock`` file using:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -173,7 +160,7 @@ accepts. If an environment already exists then spack will simply activate it
|
||||
and ignore the create-specific flags.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
|
||||
$ spack env activate --create -p myenv
|
||||
# ...
|
||||
# [creates if myenv does not exist yet]
|
||||
@@ -437,8 +424,8 @@ Developing Packages in a Spack Environment
|
||||
|
||||
The ``spack develop`` command allows one to develop Spack packages in
|
||||
an environment. It requires a spec containing a concrete version, and
|
||||
will configure Spack to install the package from local source.
|
||||
If a version is not provided from the command line interface then spack
|
||||
will configure Spack to install the package from local source.
|
||||
If a version is not provided from the command line interface then spack
|
||||
will automatically pick the highest version the package has defined.
|
||||
This means any infinity versions (``develop``, ``main``, ``stable``) will be
|
||||
preferred in this selection process.
|
||||
@@ -448,22 +435,15 @@ set, and Spack will ensure the package and its dependents are rebuilt
|
||||
any time the environment is installed if the package's local source
|
||||
code has been modified. Spack's native implementation to check for modifications
|
||||
is to check if ``mtime`` is newer than the installation.
|
||||
A custom check can be created by overriding the ``detect_dev_src_change`` method
|
||||
in your package class. This is particularly useful for projects using custom spack repo's
|
||||
to drive development and want to optimize performance.
|
||||
A custom check can be created by overriding the ``detect_dev_src_change`` method
|
||||
in your package class. This is particularly useful for projects using custom spack repo's
|
||||
to drive development and want to optimize performance.
|
||||
|
||||
Spack ensures that all instances of a
|
||||
developed package in the environment are concretized to match the
|
||||
version (and other constraints) passed as the spec argument to the
|
||||
``spack develop`` command.
|
||||
|
||||
When working deep in the graph it is often desirable to have multiple specs marked
|
||||
as ``develop`` so you don't have to restage and/or do full rebuilds each time you
|
||||
call ``spack install``. The ``--recursive`` flag can be used in these scenarios
|
||||
to ensure that all the dependents of the initial spec you provide are also marked
|
||||
as develop specs. The ``--recursive`` flag requires a pre-concretized environment
|
||||
so the graph can be traversed from the supplied spec all the way to the root specs.
|
||||
|
||||
For packages with ``git`` attributes, git branches, tags, and commits can
|
||||
also be used as valid concrete versions (see :ref:`version-specifier`).
|
||||
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
|
||||
@@ -473,7 +453,7 @@ Further development on ``foo`` can be tested by re-installing the environment,
|
||||
and eventually committed and pushed to the upstream git repo.
|
||||
|
||||
If the package being developed supports out-of-source builds then users can use the
|
||||
``--build_directory`` flag to control the location and name of the build directory.
|
||||
``--build_directory`` flag to control the location and name of the build directory.
|
||||
This is a shortcut to set the ``package_attributes:build_directory`` in the
|
||||
``packages`` configuration (see :ref:`assigning-package-attributes`).
|
||||
The supplied location will become the build-directory for that package in all future builds.
|
||||
@@ -677,45 +657,24 @@ This configuration sets the default compiler for all packages to
|
||||
Included configurations
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack environments allow an ``include`` heading in their yaml schema.
|
||||
This heading pulls in external configuration files and applies them to
|
||||
the environment.
|
||||
Spack environments allow an ``include`` heading in their yaml
|
||||
schema. This heading pulls in external configuration files and applies
|
||||
them to the environment.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
include:
|
||||
- environment/relative/path/to/config.yaml
|
||||
- relative/path/to/config.yaml
|
||||
- https://github.com/path/to/raw/config/compilers.yaml
|
||||
- /absolute/path/to/packages.yaml
|
||||
- path: /path/to/$os/$target/environment
|
||||
optional: true
|
||||
- path: /path/to/os-specific/config-dir
|
||||
when: os == "ventura"
|
||||
|
||||
Included configuration files are required *unless* they are explicitly optional
|
||||
or the entry's condition evaluates to ``false``. Optional includes are specified
|
||||
with the ``optional`` clause and conditional with the ``when`` clause. (See
|
||||
:ref:`include-yaml` for more information on optional and conditional entries.)
|
||||
|
||||
Files are listed using paths to individual files or directories containing them.
|
||||
Path entries may be absolute or relative to the environment or specified as
|
||||
URLs. URLs to individual files need link to the **raw** form of the file's
|
||||
contents (e.g., `GitHub
|
||||
<https://docs.github.com/en/repositories/working-with-files/using-files/viewing-and-understanding-files#viewing-or-copying-the-raw-file-content>`_
|
||||
or `GitLab
|
||||
<https://docs.gitlab.com/ee/api/repository_files.html#get-raw-file-from-repository>`_).
|
||||
Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or schemes) are
|
||||
supported. Spack-specific, environment and user path variables can be used.
|
||||
(See :ref:`config-file-variables` for more information.)
|
||||
|
||||
.. warning::
|
||||
|
||||
Recursive includes are not currently processed in a breadth-first manner
|
||||
so the value of a configuration option that is altered by multiple included
|
||||
files may not be what you expect. This will be addressed in a future
|
||||
update.
|
||||
|
||||
Environments can include files or URLs. File paths can be relative or
|
||||
absolute. URLs include the path to the text for individual files or
|
||||
can be the path to a directory containing configuration files.
|
||||
Spack supports ``file``, ``http``, ``https`` and ``ftp`` protocols (or
|
||||
schemes). Spack-specific, environment and user path variables may be
|
||||
used in these paths. See :ref:`config-file-variables` for more information.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Configuration precedence
|
||||
|
@@ -30,7 +30,7 @@ than always choosing the latest versions or default variants.
|
||||
|
||||
.. note::
|
||||
|
||||
As a rule of thumb: requirements + constraints > strong preferences > reuse > preferences > defaults.
|
||||
As a rule of thumb: requirements + constraints > reuse > preferences > defaults.
|
||||
|
||||
The following set of criteria (from lowest to highest precedence) explain
|
||||
common cases where concretization output may seem surprising at first.
|
||||
@@ -56,19 +56,7 @@ common cases where concretization output may seem surprising at first.
|
||||
concretizer:
|
||||
reuse: dependencies # other options are 'true' and 'false'
|
||||
|
||||
3. :ref:`Strong preferences <package-strong-preferences>` configured in ``packages.yaml``
|
||||
are higher priority than reuse, and can be used to strongly prefer a specific version
|
||||
or variant, without erroring out if it's not possible. Strong preferences are specified
|
||||
as follows:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
foo:
|
||||
prefer:
|
||||
- "@1.1: ~mpi"
|
||||
|
||||
4. :ref:`Package requirements <package-requirements>` configured in ``packages.yaml``,
|
||||
3. :ref:`Package requirements <package-requirements>` configured in ``packages.yaml``,
|
||||
and constraints from the command line as well as ``package.py`` files override all
|
||||
of the above. Requirements are specified as follows:
|
||||
|
||||
@@ -78,8 +66,6 @@ common cases where concretization output may seem surprising at first.
|
||||
foo:
|
||||
require:
|
||||
- "@1.2: +mpi"
|
||||
conflicts:
|
||||
- "@1.4"
|
||||
|
||||
Requirements and constraints restrict the set of possible solutions, while reuse
|
||||
behavior and preferences influence what an optimal solution looks like.
|
||||
|
@@ -1,51 +0,0 @@
|
||||
.. Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _include-yaml:
|
||||
|
||||
===============================
|
||||
Include Settings (include.yaml)
|
||||
===============================
|
||||
|
||||
Spack allows you to include configuration files through ``include.yaml``.
|
||||
Using the ``include:`` heading results in pulling in external configuration
|
||||
information to be used by any Spack command.
|
||||
|
||||
Included configuration files are required *unless* they are explicitly optional
|
||||
or the entry's condition evaluates to ``false``. Optional includes are specified
|
||||
with the ``optional`` clause and conditional with the ``when`` clause. For
|
||||
example,
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
include:
|
||||
- /path/to/a/required/config.yaml
|
||||
- path: /path/to/$os/$target/config
|
||||
optional: true
|
||||
- path: /path/to/os-specific/config-dir
|
||||
when: os == "ventura"
|
||||
|
||||
shows all three. The first entry, ``/path/to/a/required/config.yaml``,
|
||||
indicates that included ``config.yaml`` file is required (so must exist).
|
||||
Use of ``optional: true`` for ``/path/to/$os/$target/config`` means
|
||||
the path is only included if it exists. The condition ``os == "ventura"``
|
||||
in the ``when`` clause for ``/path/to/os-specific/config-dir`` means the
|
||||
path is only included when the operating system (``os``) is ``ventura``.
|
||||
|
||||
The same conditions and variables in `Spec List References
|
||||
<https://spack.readthedocs.io/en/latest/environments.html#spec-list-references>`_
|
||||
can be used for conditional activation in the ``when`` clauses.
|
||||
|
||||
Included files can be specified by path or by their parent directory.
|
||||
Paths may be absolute, relative (to the configuration file including the path),
|
||||
or specified as URLs. Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or
|
||||
schemes) are supported. Spack-specific, environment and user path variables
|
||||
can be used. (See :ref:`config-file-variables` for more information.)
|
||||
|
||||
.. warning::
|
||||
|
||||
Recursive includes are not currently processed in a breadth-first manner
|
||||
so the value of a configuration option that is altered by multiple included
|
||||
files may not be what you expect. This will be addressed in a future
|
||||
update.
|
@@ -71,7 +71,6 @@ or refer to the full manual below.
|
||||
|
||||
configuration
|
||||
config_yaml
|
||||
include_yaml
|
||||
packages_yaml
|
||||
build_settings
|
||||
environments
|
||||
|
@@ -456,13 +456,14 @@ For instance, the following config options,
|
||||
tcl:
|
||||
all:
|
||||
suffixes:
|
||||
^python@3: 'python{^python.version.up_to_2}'
|
||||
^python@3: 'python{^python.version}'
|
||||
^openblas: 'openblas'
|
||||
|
||||
will add a ``python3.12`` to module names of packages compiled with Python 3.12, and similarly for
|
||||
all specs depending on ``python@3``. This is useful to know which version of Python a set of Python
|
||||
extensions is associated with. Likewise, the ``openblas`` string is attached to any program that
|
||||
has openblas in the spec, most likely via the ``+blas`` variant specification.
|
||||
will add a ``python-3.12.1`` version string to any packages compiled with
|
||||
Python matching the spec, ``python@3``. This is useful to know which
|
||||
version of Python a set of Python extensions is associated with. Likewise, the
|
||||
``openblas`` string is attached to any program that has openblas in the spec,
|
||||
most likely via the ``+blas`` variant specification.
|
||||
|
||||
The most heavyweight solution to module naming is to change the entire
|
||||
naming convention for module files. This uses the projections format
|
||||
|
@@ -486,8 +486,6 @@ present. For instance with a configuration like:
|
||||
|
||||
you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
|
||||
|
||||
.. _package-strong-preferences:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Conflicts and strong preferences
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
@@ -820,69 +820,6 @@ presence of a ``SPACK_CDASH_AUTH_TOKEN`` environment variable during the
|
||||
build group on CDash called "Release Testing" (that group will be created if
|
||||
it didn't already exist).
|
||||
|
||||
.. _ci_artifacts:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
CI Artifacts Directory Layout
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
When running the CI build using the command ``spack ci rebuild`` a number of directories are created for
|
||||
storing data generated during the CI job. The default root directory for artifacts is ``job_scratch_root``.
|
||||
This can be overridden by passing the argument ``--artifacts-root`` to the ``spack ci generate`` command
|
||||
or by setting the ``SPACK_ARTIFACTS_ROOT`` environment variable in the build job scripts.
|
||||
|
||||
The top level directories under the artifact root are ``concrete_environment``, ``logs``, ``reproduction``,
|
||||
``tests``, and ``user_data``. Spack does not restrict what is written to any of these directories nor does
|
||||
it require user specified files be written to any specific directory.
|
||||
|
||||
------------------------
|
||||
``concrete_environment``
|
||||
------------------------
|
||||
|
||||
The directory ``concrete_environment`` is used to communicate the ci generate processed ``spack.yaml`` and
|
||||
the concrete ``spack.lock`` for the CI environment.
|
||||
|
||||
--------
|
||||
``logs``
|
||||
--------
|
||||
|
||||
The directory ``logs`` contains the spack build log, ``spack-build-out.txt``, and the spack build environment
|
||||
modification file, ``spack-build-mod-env.txt``. Additionally all files specified by the packages ``Builder``
|
||||
property ``archive_files`` are also copied here (ie. ``CMakeCache.txt`` in ``CMakeBuilder``).
|
||||
|
||||
----------------
|
||||
``reproduction``
|
||||
----------------
|
||||
|
||||
The directory ``reproduction`` is used to store the files needed by the ``spack reproduce-build`` command.
|
||||
This includes ``repro.json``, copies of all of the files in ``concrete_environment``, the concrete spec
|
||||
JSON file for the current spec being built, and all of the files written in the artifacts root directory.
|
||||
|
||||
The ``repro.json`` file is not versioned and is only designed to work with the version of spack CI was run with.
|
||||
An example of what a ``repro.json`` may look like is here.
|
||||
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"job_name": "adios2@2.9.2 /feaevuj %gcc@11.4.0 arch=linux-ubuntu20.04-x86_64_v3 E4S ROCm External",
|
||||
"job_spec_json": "adios2.json",
|
||||
"ci_project_dir": "/builds/spack/spack"
|
||||
}
|
||||
|
||||
---------
|
||||
``tests``
|
||||
---------
|
||||
|
||||
The directory ``tests`` is used to store output from running ``spack test <job spec>``. This may or may not have
|
||||
data in it depending on the package that was built and the availability of tests.
|
||||
|
||||
-------------
|
||||
``user_data``
|
||||
-------------
|
||||
|
||||
The directory ``user_data`` is used to store everything else that shouldn't be copied to the ``reproduction`` direcotory.
|
||||
Users may use this to store additional logs or metrics or other types of files generated by the build job.
|
||||
|
||||
-------------------------------------
|
||||
Using a custom spack in your pipeline
|
||||
-------------------------------------
|
||||
|
@@ -1,13 +1,13 @@
|
||||
sphinx==8.2.3
|
||||
sphinx==8.1.3
|
||||
sphinxcontrib-programoutput==0.18
|
||||
sphinx_design==0.6.1
|
||||
sphinx-rtd-theme==3.0.2
|
||||
python-levenshtein==0.27.1
|
||||
python-levenshtein==0.26.1
|
||||
docutils==0.21.2
|
||||
pygments==2.19.1
|
||||
pygments==2.18.0
|
||||
urllib3==2.3.0
|
||||
pytest==8.3.5
|
||||
isort==6.0.1
|
||||
black==25.1.0
|
||||
flake8==7.1.2
|
||||
pytest==8.3.4
|
||||
isort==5.13.2
|
||||
black==24.10.0
|
||||
flake8==7.1.1
|
||||
mypy==1.11.1
|
||||
|
@@ -3,7 +3,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""URL primitives that just require Python standard library."""
|
||||
import itertools
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
from typing import Optional, Set, Tuple
|
||||
from urllib.parse import urlsplit, urlunsplit
|
||||
|
@@ -7,7 +7,6 @@
|
||||
import fnmatch
|
||||
import glob
|
||||
import hashlib
|
||||
import io
|
||||
import itertools
|
||||
import numbers
|
||||
import os
|
||||
@@ -21,7 +20,6 @@
|
||||
from contextlib import contextmanager
|
||||
from itertools import accumulate
|
||||
from typing import (
|
||||
IO,
|
||||
Callable,
|
||||
Deque,
|
||||
Dict,
|
||||
@@ -77,6 +75,7 @@
|
||||
"install_tree",
|
||||
"is_exe",
|
||||
"join_path",
|
||||
"last_modification_time_recursive",
|
||||
"library_extensions",
|
||||
"mkdirp",
|
||||
"partition_path",
|
||||
@@ -670,7 +669,7 @@ def copy(src, dest, _permissions=False):
|
||||
_permissions (bool): for internal use only
|
||||
|
||||
Raises:
|
||||
OSError: if *src* does not match any files or directories
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* matches multiple files but *dest* is
|
||||
not a directory
|
||||
"""
|
||||
@@ -681,7 +680,7 @@ def copy(src, dest, _permissions=False):
|
||||
|
||||
files = glob.glob(src)
|
||||
if not files:
|
||||
raise OSError("No such file or directory: '{0}'".format(src))
|
||||
raise IOError("No such file or directory: '{0}'".format(src))
|
||||
if len(files) > 1 and not os.path.isdir(dest):
|
||||
raise ValueError(
|
||||
"'{0}' matches multiple files but '{1}' is not a directory".format(src, dest)
|
||||
@@ -712,7 +711,7 @@ def install(src, dest):
|
||||
dest (str): the destination file or directory
|
||||
|
||||
Raises:
|
||||
OSError: if *src* does not match any files or directories
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* matches multiple files but *dest* is
|
||||
not a directory
|
||||
"""
|
||||
@@ -750,7 +749,7 @@ def copy_tree(
|
||||
_permissions (bool): for internal use only
|
||||
|
||||
Raises:
|
||||
OSError: if *src* does not match any files or directories
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* is a parent directory of *dest*
|
||||
"""
|
||||
if _permissions:
|
||||
@@ -764,7 +763,7 @@ def copy_tree(
|
||||
|
||||
files = glob.glob(src)
|
||||
if not files:
|
||||
raise OSError("No such file or directory: '{0}'".format(src))
|
||||
raise IOError("No such file or directory: '{0}'".format(src))
|
||||
|
||||
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
|
||||
# all symlinks to this list while traversing the tree, then when finished, make all
|
||||
@@ -845,7 +844,7 @@ def install_tree(src, dest, symlinks=True, ignore=None):
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
|
||||
Raises:
|
||||
OSError: if *src* does not match any files or directories
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* is a parent directory of *dest*
|
||||
"""
|
||||
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
||||
@@ -1471,36 +1470,15 @@ def set_executable(path):
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def recursive_mtime_greater_than(path: str, time: float) -> bool:
|
||||
"""Returns true if any file or dir recursively under `path` has mtime greater than `time`."""
|
||||
# use bfs order to increase likelihood of early return
|
||||
queue: Deque[str] = collections.deque([path])
|
||||
|
||||
if os.stat(path).st_mtime > time:
|
||||
return True
|
||||
|
||||
while queue:
|
||||
current = queue.popleft()
|
||||
|
||||
try:
|
||||
entries = os.scandir(current)
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
with entries:
|
||||
for entry in entries:
|
||||
try:
|
||||
st = entry.stat(follow_symlinks=False)
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
if st.st_mtime > time:
|
||||
return True
|
||||
|
||||
if entry.is_dir(follow_symlinks=False):
|
||||
queue.append(entry.path)
|
||||
|
||||
return False
|
||||
def last_modification_time_recursive(path):
|
||||
path = os.path.abspath(path)
|
||||
times = [os.stat(path).st_mtime]
|
||||
times.extend(
|
||||
os.lstat(os.path.join(root, name)).st_mtime
|
||||
for root, dirs, files in os.walk(path)
|
||||
for name in dirs + files
|
||||
)
|
||||
return max(times)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@@ -1762,7 +1740,8 @@ def find(
|
||||
|
||||
|
||||
def _log_file_access_issue(e: OSError, path: str) -> None:
|
||||
tty.debug(f"find must skip {path}: {e}")
|
||||
errno_name = errno.errorcode.get(e.errno, "UNKNOWN")
|
||||
tty.debug(f"find must skip {path}: {errno_name} {e}")
|
||||
|
||||
|
||||
def _file_id(s: os.stat_result) -> Tuple[int, int]:
|
||||
@@ -2456,69 +2435,26 @@ class WindowsSimulatedRPath:
|
||||
and vis versa.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
package,
|
||||
base_modification_prefix: Optional[Union[str, pathlib.Path]] = None,
|
||||
link_install_prefix: bool = True,
|
||||
):
|
||||
def __init__(self, package, link_install_prefix=True):
|
||||
"""
|
||||
Args:
|
||||
package (spack.package_base.PackageBase): Package requiring links
|
||||
base_modification_prefix (str|pathlib.Path): Path representation indicating
|
||||
the root directory in which to establish the simulated rpath, ie where the
|
||||
symlinks that comprise the "rpath" behavior will be installed.
|
||||
|
||||
Note: This is a mutually exclusive option with `link_install_prefix` using
|
||||
both is an error.
|
||||
|
||||
Default: None
|
||||
link_install_prefix (bool): Link against package's own install or stage root.
|
||||
Packages that run their own executables during build and require rpaths to
|
||||
the build directory during build time require this option.
|
||||
|
||||
Default: install
|
||||
the build directory during build time require this option. Default: install
|
||||
root
|
||||
|
||||
Note: This is a mutually exclusive option with `base_modification_prefix`, using
|
||||
both is an error.
|
||||
"""
|
||||
self.pkg = package
|
||||
self._addl_rpaths: set[str] = set()
|
||||
if link_install_prefix and base_modification_prefix:
|
||||
raise RuntimeError(
|
||||
"Invalid combination of arguments given to WindowsSimulated RPath.\n"
|
||||
"Select either `link_install_prefix` to create an install prefix rpath"
|
||||
" or specify a `base_modification_prefix` for any other link type. "
|
||||
"Specifying both arguments is invalid."
|
||||
)
|
||||
if not (link_install_prefix or base_modification_prefix):
|
||||
raise RuntimeError(
|
||||
"Insufficient arguments given to WindowsSimulatedRpath.\n"
|
||||
"WindowsSimulatedRPath requires one of link_install_prefix"
|
||||
" or base_modification_prefix to be specified."
|
||||
" Neither was provided."
|
||||
)
|
||||
|
||||
self._addl_rpaths = set()
|
||||
self.link_install_prefix = link_install_prefix
|
||||
if base_modification_prefix:
|
||||
self.base_modification_prefix = pathlib.Path(base_modification_prefix)
|
||||
else:
|
||||
self.base_modification_prefix = pathlib.Path(self.pkg.prefix)
|
||||
self._additional_library_dependents: set[pathlib.Path] = set()
|
||||
if not self.link_install_prefix:
|
||||
tty.debug(f"Generating rpath for non install context: {base_modification_prefix}")
|
||||
self._additional_library_dependents = set()
|
||||
|
||||
@property
|
||||
def library_dependents(self):
|
||||
"""
|
||||
Set of directories where package binaries/libraries are located.
|
||||
"""
|
||||
base_pths = set()
|
||||
if self.link_install_prefix:
|
||||
base_pths.add(pathlib.Path(self.pkg.prefix.bin))
|
||||
base_pths |= self._additional_library_dependents
|
||||
return base_pths
|
||||
return set([pathlib.Path(self.pkg.prefix.bin)]) | self._additional_library_dependents
|
||||
|
||||
def add_library_dependent(self, *dest):
|
||||
"""
|
||||
@@ -2534,12 +2470,6 @@ def add_library_dependent(self, *dest):
|
||||
new_pth = pathlib.Path(pth).parent
|
||||
else:
|
||||
new_pth = pathlib.Path(pth)
|
||||
path_is_in_prefix = new_pth.is_relative_to(self.base_modification_prefix)
|
||||
if not path_is_in_prefix:
|
||||
raise RuntimeError(
|
||||
f"Attempting to generate rpath symlink out of rpath context:\
|
||||
{str(self.base_modification_prefix)}"
|
||||
)
|
||||
self._additional_library_dependents.add(new_pth)
|
||||
|
||||
@property
|
||||
@@ -2628,33 +2558,6 @@ def establish_link(self):
|
||||
self._link(library, lib_dir)
|
||||
|
||||
|
||||
def make_package_test_rpath(pkg, test_dir: Union[str, pathlib.Path]):
|
||||
"""Establishes a temp Windows simulated rpath for the pkg in the testing directory
|
||||
so an executable can test the libraries/executables with proper access
|
||||
to dependent dlls
|
||||
|
||||
Note: this is a no-op on all other platforms besides Windows
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): the package for which the rpath should be computed
|
||||
test_dir: the testing directory in which we should construct an rpath
|
||||
"""
|
||||
# link_install_prefix as false ensures we're not linking into the install prefix
|
||||
mini_rpath = WindowsSimulatedRPath(pkg, link_install_prefix=False)
|
||||
# add the testing directory as a location to install rpath symlinks
|
||||
mini_rpath.add_library_dependent(test_dir)
|
||||
|
||||
# check for whether build_directory is available, if not
|
||||
# assume the stage root is the build dir
|
||||
build_dir_attr = getattr(pkg, "build_directory", None)
|
||||
build_directory = build_dir_attr if build_dir_attr else pkg.stage.path
|
||||
# add the build dir & build dir bin
|
||||
mini_rpath.add_rpath(os.path.join(build_directory, "bin"))
|
||||
mini_rpath.add_rpath(os.path.join(build_directory))
|
||||
# construct rpath
|
||||
mini_rpath.establish_link()
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@memoized
|
||||
def can_access_dir(path):
|
||||
@@ -2883,20 +2786,6 @@ def keep_modification_time(*filenames):
|
||||
os.utime(f, (os.path.getatime(f), mtime))
|
||||
|
||||
|
||||
@contextmanager
|
||||
def temporary_file_position(stream):
|
||||
orig_pos = stream.tell()
|
||||
yield
|
||||
stream.seek(orig_pos)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def current_file_position(stream: IO[str], loc: int, relative_to=io.SEEK_CUR):
|
||||
with temporary_file_position(stream):
|
||||
stream.seek(loc, relative_to)
|
||||
yield
|
||||
|
||||
|
||||
@contextmanager
|
||||
def temporary_dir(
|
||||
suffix: Optional[str] = None, prefix: Optional[str] = None, dir: Optional[str] = None
|
||||
|
@@ -11,11 +11,10 @@
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
import types
|
||||
import typing
|
||||
import warnings
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Callable, Dict, Iterable, List, Mapping, Optional, Tuple, TypeVar
|
||||
from typing import Callable, Dict, Iterable, List, Tuple, TypeVar
|
||||
|
||||
# Ignore emacs backups when listing modules
|
||||
ignore_modules = r"^\.#|~$"
|
||||
@@ -708,24 +707,14 @@ def __init__(self, wrapped_object):
|
||||
|
||||
|
||||
class Singleton:
|
||||
"""Wrapper for lazily initialized singleton objects."""
|
||||
"""Simple wrapper for lazily initialized singleton objects."""
|
||||
|
||||
def __init__(self, factory: Callable[[], object]):
|
||||
def __init__(self, factory):
|
||||
"""Create a new singleton to be inited with the factory function.
|
||||
|
||||
Most factories will simply create the object to be initialized and
|
||||
return it.
|
||||
|
||||
In some cases, e.g. when bootstrapping some global state, the singleton
|
||||
may need to be initialized incrementally. If the factory returns a generator
|
||||
instead of a regular object, the singleton will assign each result yielded by
|
||||
the generator to the singleton instance. This allows methods called by
|
||||
the factory in later stages to refer back to the singleton.
|
||||
|
||||
Args:
|
||||
factory (function): function taking no arguments that creates the
|
||||
singleton instance.
|
||||
|
||||
factory (function): function taking no arguments that
|
||||
creates the singleton instance.
|
||||
"""
|
||||
self.factory = factory
|
||||
self._instance = None
|
||||
@@ -733,16 +722,7 @@ def __init__(self, factory: Callable[[], object]):
|
||||
@property
|
||||
def instance(self):
|
||||
if self._instance is None:
|
||||
instance = self.factory()
|
||||
|
||||
if isinstance(instance, types.GeneratorType):
|
||||
# if it's a generator, assign every value
|
||||
for value in instance:
|
||||
self._instance = value
|
||||
else:
|
||||
# if not, just assign the result like a normal singleton
|
||||
self._instance = instance
|
||||
|
||||
self._instance = self.factory()
|
||||
return self._instance
|
||||
|
||||
def __getattr__(self, name):
|
||||
@@ -1100,88 +1080,3 @@ def __set__(self, instance, value):
|
||||
|
||||
def factory(self, instance, owner):
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
|
||||
KT = TypeVar("KT")
|
||||
VT = TypeVar("VT")
|
||||
|
||||
|
||||
class PriorityOrderedMapping(Mapping[KT, VT]):
|
||||
"""Mapping that iterates over key according to an integer priority. If the priority is
|
||||
the same for two keys, insertion order is what matters.
|
||||
|
||||
The priority is set when the key/value pair is added. If not set, the highest current priority
|
||||
is used.
|
||||
"""
|
||||
|
||||
_data: Dict[KT, VT]
|
||||
_priorities: List[Tuple[int, KT]]
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._data = {}
|
||||
# Tuple of (priority, key)
|
||||
self._priorities = []
|
||||
|
||||
def __getitem__(self, key: KT) -> VT:
|
||||
return self._data[key]
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._data)
|
||||
|
||||
def __iter__(self):
|
||||
yield from (key for _, key in self._priorities)
|
||||
|
||||
def __reversed__(self):
|
||||
yield from (key for _, key in reversed(self._priorities))
|
||||
|
||||
def reversed_keys(self):
|
||||
"""Iterates over keys from the highest priority, to the lowest."""
|
||||
return reversed(self)
|
||||
|
||||
def reversed_values(self):
|
||||
"""Iterates over values from the highest priority, to the lowest."""
|
||||
yield from (self._data[key] for _, key in reversed(self._priorities))
|
||||
|
||||
def _highest_priority(self) -> int:
|
||||
if not self._priorities:
|
||||
return 0
|
||||
result, _ = self._priorities[-1]
|
||||
return result
|
||||
|
||||
def add(self, key: KT, *, value: VT, priority: Optional[int] = None) -> None:
|
||||
"""Adds a key/value pair to the mapping, with a specific priority.
|
||||
|
||||
If the priority is None, then it is assumed to be the highest priority value currently
|
||||
in the container.
|
||||
|
||||
Raises:
|
||||
ValueError: when the same priority is already in the mapping
|
||||
"""
|
||||
if priority is None:
|
||||
priority = self._highest_priority()
|
||||
|
||||
if key in self._data:
|
||||
self.remove(key)
|
||||
|
||||
self._priorities.append((priority, key))
|
||||
# We rely on sort being stable
|
||||
self._priorities.sort(key=lambda x: x[0])
|
||||
self._data[key] = value
|
||||
assert len(self._data) == len(self._priorities)
|
||||
|
||||
def remove(self, key: KT) -> VT:
|
||||
"""Removes a key from the mapping.
|
||||
|
||||
Returns:
|
||||
The value associated with the key being removed
|
||||
|
||||
Raises:
|
||||
KeyError: if the key is not in the mapping
|
||||
"""
|
||||
if key not in self._data:
|
||||
raise KeyError(f"cannot find {key}")
|
||||
|
||||
popped_item = self._data.pop(key)
|
||||
self._priorities = [(p, k) for p, k in self._priorities if k != key]
|
||||
assert len(self._data) == len(self._priorities)
|
||||
return popped_item
|
||||
|
@@ -41,16 +41,6 @@ def __init__(self, dst, src_a=None, src_b=None):
|
||||
self.src_a = src_a
|
||||
self.src_b = src_b
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"MergeConflict(dst={self.dst!r}, src_a={self.src_a!r}, src_b={self.src_b!r})"
|
||||
|
||||
|
||||
def _samefile(a: str, b: str):
|
||||
try:
|
||||
return os.path.samefile(a, b)
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
|
||||
class SourceMergeVisitor(BaseDirectoryVisitor):
|
||||
"""
|
||||
@@ -60,14 +50,9 @@ class SourceMergeVisitor(BaseDirectoryVisitor):
|
||||
- A list of merge conflicts in dst/
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, ignore: Optional[Callable[[str], bool]] = None, normalize_paths: bool = False
|
||||
):
|
||||
def __init__(self, ignore: Optional[Callable[[str], bool]] = None):
|
||||
self.ignore = ignore if ignore is not None else lambda f: False
|
||||
|
||||
# On case-insensitive filesystems, normalize paths to detect duplications
|
||||
self.normalize_paths = normalize_paths
|
||||
|
||||
# When mapping <src root> to <dst root>/<projection>, we need to prepend the <projection>
|
||||
# bit to the relative path in the destination dir.
|
||||
self.projection: str = ""
|
||||
@@ -86,88 +71,10 @@ def __init__(
|
||||
# and can run mkdir in order.
|
||||
self.directories: Dict[str, Tuple[str, str]] = {}
|
||||
|
||||
# If the visitor is configured to normalize paths, keep a map of
|
||||
# normalized path to: original path, root directory + relative path
|
||||
self._directories_normalized: Dict[str, Tuple[str, str, str]] = {}
|
||||
|
||||
# Files to link. Maps dst_rel to (src_root, src_rel). This is an ordered dict, where files
|
||||
# are guaranteed to be grouped by src_root in the order they were visited.
|
||||
self.files: Dict[str, Tuple[str, str]] = {}
|
||||
|
||||
# If the visitor is configured to normalize paths, keep a map of
|
||||
# normalized path to: original path, root directory + relative path
|
||||
self._files_normalized: Dict[str, Tuple[str, str, str]] = {}
|
||||
|
||||
def _in_directories(self, proj_rel_path: str) -> bool:
|
||||
"""
|
||||
Check if a path is already in the directory list
|
||||
"""
|
||||
if self.normalize_paths:
|
||||
return proj_rel_path.lower() in self._directories_normalized
|
||||
else:
|
||||
return proj_rel_path in self.directories
|
||||
|
||||
def _directory(self, proj_rel_path: str) -> Tuple[str, str, str]:
|
||||
"""
|
||||
Get the directory that is mapped to a path
|
||||
"""
|
||||
if self.normalize_paths:
|
||||
return self._directories_normalized[proj_rel_path.lower()]
|
||||
else:
|
||||
return (proj_rel_path, *self.directories[proj_rel_path])
|
||||
|
||||
def _del_directory(self, proj_rel_path: str):
|
||||
"""
|
||||
Remove a directory from the list of directories
|
||||
"""
|
||||
del self.directories[proj_rel_path]
|
||||
if self.normalize_paths:
|
||||
del self._directories_normalized[proj_rel_path.lower()]
|
||||
|
||||
def _add_directory(self, proj_rel_path: str, root: str, rel_path: str):
|
||||
"""
|
||||
Add a directory to the list of directories.
|
||||
Also stores the normalized version for later lookups
|
||||
"""
|
||||
self.directories[proj_rel_path] = (root, rel_path)
|
||||
if self.normalize_paths:
|
||||
self._directories_normalized[proj_rel_path.lower()] = (proj_rel_path, root, rel_path)
|
||||
|
||||
def _in_files(self, proj_rel_path: str) -> bool:
|
||||
"""
|
||||
Check if a path is already in the files list
|
||||
"""
|
||||
if self.normalize_paths:
|
||||
return proj_rel_path.lower() in self._files_normalized
|
||||
else:
|
||||
return proj_rel_path in self.files
|
||||
|
||||
def _file(self, proj_rel_path: str) -> Tuple[str, str, str]:
|
||||
"""
|
||||
Get the file that is mapped to a path
|
||||
"""
|
||||
if self.normalize_paths:
|
||||
return self._files_normalized[proj_rel_path.lower()]
|
||||
else:
|
||||
return (proj_rel_path, *self.files[proj_rel_path])
|
||||
|
||||
def _del_file(self, proj_rel_path: str):
|
||||
"""
|
||||
Remove a file from the list of files
|
||||
"""
|
||||
del self.files[proj_rel_path]
|
||||
if self.normalize_paths:
|
||||
del self._files_normalized[proj_rel_path.lower()]
|
||||
|
||||
def _add_file(self, proj_rel_path: str, root: str, rel_path: str):
|
||||
"""
|
||||
Add a file to the list of files
|
||||
Also stores the normalized version for later lookups
|
||||
"""
|
||||
self.files[proj_rel_path] = (root, rel_path)
|
||||
if self.normalize_paths:
|
||||
self._files_normalized[proj_rel_path.lower()] = (proj_rel_path, root, rel_path)
|
||||
|
||||
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||
"""
|
||||
Register a directory if dst / rel_path is not blocked by a file or ignored.
|
||||
@@ -177,28 +84,23 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||
if self.ignore(rel_path):
|
||||
# Don't recurse when dir is ignored.
|
||||
return False
|
||||
elif self._in_files(proj_rel_path):
|
||||
# A file-dir conflict is fatal except if they're the same file (symlinked dir).
|
||||
src_a = os.path.join(*self._file(proj_rel_path))
|
||||
src_b = os.path.join(root, rel_path)
|
||||
|
||||
if not _samefile(src_a, src_b):
|
||||
self.fatal_conflicts.append(
|
||||
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
|
||||
elif proj_rel_path in self.files:
|
||||
# Can't create a dir where a file is.
|
||||
src_a_root, src_a_relpath = self.files[proj_rel_path]
|
||||
self.fatal_conflicts.append(
|
||||
MergeConflict(
|
||||
dst=proj_rel_path,
|
||||
src_a=os.path.join(src_a_root, src_a_relpath),
|
||||
src_b=os.path.join(root, rel_path),
|
||||
)
|
||||
return False
|
||||
|
||||
# Remove the link in favor of the dir.
|
||||
existing_proj_rel_path, _, _ = self._file(proj_rel_path)
|
||||
self._del_file(existing_proj_rel_path)
|
||||
self._add_directory(proj_rel_path, root, rel_path)
|
||||
return True
|
||||
elif self._in_directories(proj_rel_path):
|
||||
)
|
||||
return False
|
||||
elif proj_rel_path in self.directories:
|
||||
# No new directory, carry on.
|
||||
return True
|
||||
else:
|
||||
# Register new directory.
|
||||
self._add_directory(proj_rel_path, root, rel_path)
|
||||
self.directories[proj_rel_path] = (root, rel_path)
|
||||
return True
|
||||
|
||||
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||
@@ -230,7 +132,7 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
|
||||
if handle_as_dir:
|
||||
return self.before_visit_dir(root, rel_path, depth)
|
||||
|
||||
self.visit_file(root, rel_path, depth, symlink=True)
|
||||
self.visit_file(root, rel_path, depth)
|
||||
return False
|
||||
|
||||
def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = False) -> None:
|
||||
@@ -238,23 +140,30 @@ def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = Fa
|
||||
|
||||
if self.ignore(rel_path):
|
||||
pass
|
||||
elif self._in_directories(proj_rel_path):
|
||||
# Can't create a file where a dir is, unless they are the same file (symlinked dir),
|
||||
# in which case we simply drop the symlink in favor of the actual dir.
|
||||
src_a = os.path.join(*self._directory(proj_rel_path))
|
||||
src_b = os.path.join(root, rel_path)
|
||||
if not symlink or not _samefile(src_a, src_b):
|
||||
self.fatal_conflicts.append(
|
||||
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
|
||||
elif proj_rel_path in self.directories:
|
||||
# Can't create a file where a dir is; fatal error
|
||||
self.fatal_conflicts.append(
|
||||
MergeConflict(
|
||||
dst=proj_rel_path,
|
||||
src_a=os.path.join(*self.directories[proj_rel_path]),
|
||||
src_b=os.path.join(root, rel_path),
|
||||
)
|
||||
elif self._in_files(proj_rel_path):
|
||||
)
|
||||
elif proj_rel_path in self.files:
|
||||
# When two files project to the same path, they conflict iff they are distinct.
|
||||
# If they are the same (i.e. one links to the other), register regular files rather
|
||||
# than symlinks. The reason is that in copy-type views, we need a copy of the actual
|
||||
# file, not the symlink.
|
||||
src_a = os.path.join(*self._file(proj_rel_path))
|
||||
|
||||
src_a = os.path.join(*self.files[proj_rel_path])
|
||||
src_b = os.path.join(root, rel_path)
|
||||
if not _samefile(src_a, src_b):
|
||||
|
||||
try:
|
||||
samefile = os.path.samefile(src_a, src_b)
|
||||
except OSError:
|
||||
samefile = False
|
||||
|
||||
if not samefile:
|
||||
# Distinct files produce a conflict.
|
||||
self.file_conflicts.append(
|
||||
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
|
||||
@@ -264,12 +173,12 @@ def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = Fa
|
||||
if not symlink:
|
||||
# Remove the link in favor of the actual file. The del is necessary to maintain the
|
||||
# order of the files dict, which is grouped by root.
|
||||
existing_proj_rel_path, _, _ = self._file(proj_rel_path)
|
||||
self._del_file(existing_proj_rel_path)
|
||||
self._add_file(proj_rel_path, root, rel_path)
|
||||
del self.files[proj_rel_path]
|
||||
self.files[proj_rel_path] = (root, rel_path)
|
||||
|
||||
else:
|
||||
# Otherwise register this file to be linked.
|
||||
self._add_file(proj_rel_path, root, rel_path)
|
||||
self.files[proj_rel_path] = (root, rel_path)
|
||||
|
||||
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||
# Treat symlinked files as ordinary files (without "dereferencing")
|
||||
@@ -288,11 +197,11 @@ def set_projection(self, projection: str) -> None:
|
||||
path = ""
|
||||
for part in self.projection.split(os.sep):
|
||||
path = os.path.join(path, part)
|
||||
if not self._in_files(path):
|
||||
self._add_directory(path, "<projection>", path)
|
||||
if path not in self.files:
|
||||
self.directories[path] = ("<projection>", path)
|
||||
else:
|
||||
# Can't create a dir where a file is.
|
||||
_, src_a_root, src_a_relpath = self._file(path)
|
||||
src_a_root, src_a_relpath = self.files[path]
|
||||
self.fatal_conflicts.append(
|
||||
MergeConflict(
|
||||
dst=path,
|
||||
@@ -318,8 +227,8 @@ def __init__(self, source_merge_visitor: SourceMergeVisitor):
|
||||
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||
# If destination dir is a file in a src dir, add a conflict,
|
||||
# and don't traverse deeper
|
||||
if self.src._in_files(rel_path):
|
||||
_, src_a_root, src_a_relpath = self.src._file(rel_path)
|
||||
if rel_path in self.src.files:
|
||||
src_a_root, src_a_relpath = self.src.files[rel_path]
|
||||
self.src.fatal_conflicts.append(
|
||||
MergeConflict(
|
||||
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||
@@ -329,9 +238,8 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||
|
||||
# If destination dir was also a src dir, remove the mkdir
|
||||
# action, and traverse deeper.
|
||||
if self.src._in_directories(rel_path):
|
||||
existing_proj_rel_path, _, _ = self.src._directory(rel_path)
|
||||
self.src._del_directory(existing_proj_rel_path)
|
||||
if rel_path in self.src.directories:
|
||||
del self.src.directories[rel_path]
|
||||
return True
|
||||
|
||||
# If the destination dir does not appear in the src dir,
|
||||
@@ -344,24 +252,38 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
|
||||
be seen as files; we should not accidentally merge
|
||||
source dir with a symlinked dest dir.
|
||||
"""
|
||||
|
||||
self.visit_file(root, rel_path, depth)
|
||||
|
||||
# Never descend into symlinked target dirs.
|
||||
return False
|
||||
|
||||
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||
# Can't merge a file if target already exists
|
||||
if self.src._in_directories(rel_path):
|
||||
_, src_a_root, src_a_relpath = self.src._directory(rel_path)
|
||||
# Always conflict
|
||||
if rel_path in self.src.directories:
|
||||
src_a_root, src_a_relpath = self.src.directories[rel_path]
|
||||
self.src.fatal_conflicts.append(
|
||||
MergeConflict(
|
||||
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||
)
|
||||
)
|
||||
|
||||
elif self.src._in_files(rel_path):
|
||||
_, src_a_root, src_a_relpath = self.src._file(rel_path)
|
||||
if rel_path in self.src.files:
|
||||
src_a_root, src_a_relpath = self.src.files[rel_path]
|
||||
self.src.fatal_conflicts.append(
|
||||
MergeConflict(
|
||||
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||
)
|
||||
)
|
||||
|
||||
# Never descend into symlinked target dirs.
|
||||
return False
|
||||
|
||||
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||
# Can't merge a file if target already exists
|
||||
if rel_path in self.src.directories:
|
||||
src_a_root, src_a_relpath = self.src.directories[rel_path]
|
||||
self.src.fatal_conflicts.append(
|
||||
MergeConflict(
|
||||
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||
)
|
||||
)
|
||||
|
||||
elif rel_path in self.src.files:
|
||||
src_a_root, src_a_relpath = self.src.files[rel_path]
|
||||
self.src.fatal_conflicts.append(
|
||||
MergeConflict(
|
||||
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||
@@ -386,7 +308,7 @@ class LinkTree:
|
||||
|
||||
def __init__(self, source_root):
|
||||
if not os.path.exists(source_root):
|
||||
raise OSError("No such file or directory: '%s'", source_root)
|
||||
raise IOError("No such file or directory: '%s'", source_root)
|
||||
|
||||
self._root = source_root
|
||||
|
||||
|
@@ -269,7 +269,7 @@ def __init__(
|
||||
|
||||
@staticmethod
|
||||
def _poll_interval_generator(
|
||||
_wait_times: Optional[Tuple[float, float, float]] = None,
|
||||
_wait_times: Optional[Tuple[float, float, float]] = None
|
||||
) -> Generator[float, None, None]:
|
||||
"""This implements a backoff scheme for polling a contended resource
|
||||
by suggesting a succession of wait times between polls.
|
||||
@@ -391,7 +391,7 @@ def _poll_lock(self, op: int) -> bool:
|
||||
|
||||
return True
|
||||
|
||||
except OSError as e:
|
||||
except IOError as e:
|
||||
# EAGAIN and EACCES == locked by another process (so try again)
|
||||
if e.errno not in (errno.EAGAIN, errno.EACCES):
|
||||
raise
|
||||
|
@@ -2,7 +2,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Utility classes for logging the output of blocks of code."""
|
||||
"""Utility classes for logging the output of blocks of code.
|
||||
"""
|
||||
import atexit
|
||||
import ctypes
|
||||
import errno
|
||||
@@ -343,6 +344,26 @@ def close(self):
|
||||
self.file.close()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def replace_environment(env):
|
||||
"""Replace the current environment (`os.environ`) with `env`.
|
||||
|
||||
If `env` is empty (or None), this unsets all current environment
|
||||
variables.
|
||||
"""
|
||||
env = env or {}
|
||||
old_env = os.environ.copy()
|
||||
try:
|
||||
os.environ.clear()
|
||||
for name, val in env.items():
|
||||
os.environ[name] = val
|
||||
yield
|
||||
finally:
|
||||
os.environ.clear()
|
||||
for name, val in old_env.items():
|
||||
os.environ[name] = val
|
||||
|
||||
|
||||
def log_output(*args, **kwargs):
|
||||
"""Context manager that logs its output to a file.
|
||||
|
||||
@@ -426,6 +447,7 @@ def __init__(
|
||||
self.echo = echo
|
||||
self.debug = debug
|
||||
self.buffer = buffer
|
||||
self.env = env # the environment to use for _writer_daemon
|
||||
self.filter_fn = filter_fn
|
||||
|
||||
self._active = False # used to prevent re-entry
|
||||
@@ -497,20 +519,21 @@ def __enter__(self):
|
||||
# just don't forward input if this fails
|
||||
pass
|
||||
|
||||
self.process = multiprocessing.Process(
|
||||
target=_writer_daemon,
|
||||
args=(
|
||||
input_fd,
|
||||
read_fd,
|
||||
self.write_fd,
|
||||
self.echo,
|
||||
self.log_file,
|
||||
child_pipe,
|
||||
self.filter_fn,
|
||||
),
|
||||
)
|
||||
self.process.daemon = True # must set before start()
|
||||
self.process.start()
|
||||
with replace_environment(self.env):
|
||||
self.process = multiprocessing.Process(
|
||||
target=_writer_daemon,
|
||||
args=(
|
||||
input_fd,
|
||||
read_fd,
|
||||
self.write_fd,
|
||||
self.echo,
|
||||
self.log_file,
|
||||
child_pipe,
|
||||
self.filter_fn,
|
||||
),
|
||||
)
|
||||
self.process.daemon = True # must set before start()
|
||||
self.process.start()
|
||||
|
||||
finally:
|
||||
if input_fd:
|
||||
@@ -706,7 +729,10 @@ class winlog:
|
||||
Does not support the use of 'v' toggling as nixlog does.
|
||||
"""
|
||||
|
||||
def __init__(self, file_like=None, echo=False, debug=0, buffer=False, filter_fn=None):
|
||||
def __init__(
|
||||
self, file_like=None, echo=False, debug=0, buffer=False, env=None, filter_fn=None
|
||||
):
|
||||
self.env = env
|
||||
self.debug = debug
|
||||
self.echo = echo
|
||||
self.logfile = file_like
|
||||
@@ -763,10 +789,11 @@ def background_reader(reader, echo_writer, _kill):
|
||||
reader.close()
|
||||
|
||||
self._active = True
|
||||
self._thread = Thread(
|
||||
target=background_reader, args=(self.reader, self.echo_writer, self._kill)
|
||||
)
|
||||
self._thread.start()
|
||||
with replace_environment(self.env):
|
||||
self._thread = Thread(
|
||||
target=background_reader, args=(self.reader, self.echo_writer, self._kill)
|
||||
)
|
||||
self._thread.start()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
@@ -891,7 +918,7 @@ def _writer_daemon(
|
||||
try:
|
||||
if stdin_file.read(1) == "v":
|
||||
echo = not echo
|
||||
except OSError as e:
|
||||
except IOError as e:
|
||||
# If SIGTTIN is ignored, the system gives EIO
|
||||
# to let the caller know the read failed b/c it
|
||||
# was in the bg. Ignore that too.
|
||||
@@ -986,7 +1013,7 @@ def wrapped(*args, **kwargs):
|
||||
while True:
|
||||
try:
|
||||
return function(*args, **kwargs)
|
||||
except OSError as e:
|
||||
except IOError as e:
|
||||
if e.errno == errno.EINTR:
|
||||
continue
|
||||
raise
|
||||
|
@@ -10,21 +10,9 @@
|
||||
import spack.util.git
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "1.0.0.dev0"
|
||||
__version__ = "0.24.0.dev0"
|
||||
spack_version = __version__
|
||||
|
||||
#: The current Package API version implemented by this version of Spack. The Package API defines
|
||||
#: the Python interface for packages as well as the layout of package repositories. The minor
|
||||
#: version is incremented when the package API is extended in a backwards-compatible way. The major
|
||||
#: version is incremented upon breaking changes. This version is changed independently from the
|
||||
#: Spack version.
|
||||
package_api_version = (1, 0)
|
||||
|
||||
#: The minimum Package API version that this version of Spack is compatible with. This should
|
||||
#: always be a tuple of the form ``(major, 0)``, since compatibility with vX.Y implies
|
||||
#: compatibility with vX.0.
|
||||
min_package_api_version = (1, 0)
|
||||
|
||||
|
||||
def __try_int(v):
|
||||
try:
|
||||
@@ -91,6 +79,4 @@ def get_short_version() -> str:
|
||||
"get_version",
|
||||
"get_spack_commit",
|
||||
"get_short_version",
|
||||
"package_api_version",
|
||||
"min_package_api_version",
|
||||
]
|
||||
|
@@ -1010,7 +1010,7 @@ def _issues_in_depends_on_directive(pkgs, error_cls):
|
||||
for dep_name, dep in deps_by_name.items():
|
||||
|
||||
def check_virtual_with_variants(spec, msg):
|
||||
if not spack.repo.PATH.is_virtual(spec.name) or not spec.variants:
|
||||
if not spec.virtual or not spec.variants:
|
||||
return
|
||||
error = error_cls(
|
||||
f"{pkg_name}: {msg}",
|
||||
@@ -1356,8 +1356,14 @@ def _test_detection_by_executable(pkgs, debug_log, error_cls):
|
||||
|
||||
def _compare_extra_attribute(_expected, _detected, *, _spec):
|
||||
result = []
|
||||
# Check items are of the same type
|
||||
if not isinstance(_detected, type(_expected)):
|
||||
_summary = f'{pkg_name}: error when trying to detect "{_expected}"'
|
||||
_details = [f"{_detected} was detected instead"]
|
||||
return [error_cls(summary=_summary, details=_details)]
|
||||
|
||||
# If they are string expected is a regex
|
||||
if isinstance(_expected, str) and isinstance(_detected, str):
|
||||
if isinstance(_expected, str):
|
||||
try:
|
||||
_regex = re.compile(_expected)
|
||||
except re.error:
|
||||
@@ -1373,7 +1379,7 @@ def _compare_extra_attribute(_expected, _detected, *, _spec):
|
||||
_details = [f"{_detected} does not match the regex"]
|
||||
return [error_cls(summary=_summary, details=_details)]
|
||||
|
||||
elif isinstance(_expected, dict) and isinstance(_detected, dict):
|
||||
if isinstance(_expected, dict):
|
||||
_not_detected = set(_expected.keys()) - set(_detected.keys())
|
||||
if _not_detected:
|
||||
_summary = f"{pkg_name}: cannot detect some attributes for spec {_spec}"
|
||||
@@ -1388,10 +1394,6 @@ def _compare_extra_attribute(_expected, _detected, *, _spec):
|
||||
result.extend(
|
||||
_compare_extra_attribute(_expected[_key], _detected[_key], _spec=_spec)
|
||||
)
|
||||
else:
|
||||
_summary = f'{pkg_name}: error when trying to detect "{_expected}"'
|
||||
_details = [f"{_detected} was detected instead"]
|
||||
return [error_cls(summary=_summary, details=_details)]
|
||||
|
||||
return result
|
||||
|
||||
|
@@ -5,7 +5,6 @@
|
||||
import codecs
|
||||
import collections
|
||||
import concurrent.futures
|
||||
import contextlib
|
||||
import copy
|
||||
import hashlib
|
||||
import io
|
||||
@@ -24,7 +23,7 @@
|
||||
import urllib.request
|
||||
import warnings
|
||||
from contextlib import closing
|
||||
from typing import IO, Callable, Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
|
||||
from typing import IO, Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
import llnl.util.lang
|
||||
@@ -92,9 +91,6 @@
|
||||
CURRENT_BUILD_CACHE_LAYOUT_VERSION = 2
|
||||
|
||||
|
||||
INDEX_HASH_FILE = "index.json.hash"
|
||||
|
||||
|
||||
class BuildCacheDatabase(spack_db.Database):
|
||||
"""A database for binary buildcaches.
|
||||
|
||||
@@ -506,7 +502,7 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}):
|
||||
scheme = urllib.parse.urlparse(mirror_url).scheme
|
||||
|
||||
if scheme != "oci" and not web_util.url_exists(
|
||||
url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
||||
url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||
):
|
||||
return False
|
||||
|
||||
@@ -595,18 +591,32 @@ def file_matches(f: IO[bytes], regex: llnl.util.lang.PatternBytes) -> bool:
|
||||
f.seek(0)
|
||||
|
||||
|
||||
def specs_to_relocate(spec: spack.spec.Spec) -> List[spack.spec.Spec]:
|
||||
"""Return the set of specs that may be referenced in the install prefix of the provided spec.
|
||||
We currently include non-external transitive link and direct run dependencies."""
|
||||
specs = [
|
||||
def deps_to_relocate(spec):
|
||||
"""Return the transitive link and direct run dependencies of the spec.
|
||||
|
||||
This is a special traversal for dependencies we need to consider when relocating a package.
|
||||
|
||||
Package binaries, scripts, and other files may refer to the prefixes of dependencies, so
|
||||
we need to rewrite those locations when dependencies are in a different place at install time
|
||||
than they were at build time.
|
||||
|
||||
This traversal covers transitive link dependencies and direct run dependencies because:
|
||||
|
||||
1. Spack adds RPATHs for transitive link dependencies so that packages can find needed
|
||||
dependency libraries.
|
||||
2. Packages may call any of their *direct* run dependencies (and may bake their paths into
|
||||
binaries or scripts), so we also need to search for run dependency prefixes when relocating.
|
||||
|
||||
This returns a deduplicated list of transitive link dependencies and direct run dependencies.
|
||||
"""
|
||||
deps = [
|
||||
s
|
||||
for s in itertools.chain(
|
||||
spec.traverse(root=True, deptype="link", order="breadth", key=traverse.by_dag_hash),
|
||||
spec.dependencies(deptype="run"),
|
||||
spec.traverse(root=True, deptype="link"), spec.dependencies(deptype="run")
|
||||
)
|
||||
if not s.external
|
||||
]
|
||||
return list(llnl.util.lang.dedupe(specs, key=lambda s: s.dag_hash()))
|
||||
return llnl.util.lang.dedupe(deps, key=lambda s: s.dag_hash())
|
||||
|
||||
|
||||
def get_buildinfo_dict(spec):
|
||||
@@ -620,7 +630,7 @@ def get_buildinfo_dict(spec):
|
||||
# "relocate_binaries": [],
|
||||
# "relocate_links": [],
|
||||
"hardlinks_deduped": True,
|
||||
"hash_to_prefix": {d.dag_hash(): str(d.prefix) for d in specs_to_relocate(spec)},
|
||||
"hash_to_prefix": {d.dag_hash(): str(d.prefix) for d in deps_to_relocate(spec)},
|
||||
}
|
||||
|
||||
|
||||
@@ -673,24 +683,19 @@ def sign_specfile(key: str, specfile_path: str) -> str:
|
||||
|
||||
|
||||
def _read_specs_and_push_index(
|
||||
file_list: List[str],
|
||||
read_method: Callable,
|
||||
cache_prefix: str,
|
||||
db: BuildCacheDatabase,
|
||||
temp_dir: str,
|
||||
concurrency: int,
|
||||
file_list, read_method, cache_prefix, db: BuildCacheDatabase, temp_dir, concurrency
|
||||
):
|
||||
"""Read all the specs listed in the provided list, using thread given thread parallelism,
|
||||
generate the index, and push it to the mirror.
|
||||
|
||||
Args:
|
||||
file_list: List of urls or file paths pointing at spec files to read
|
||||
file_list (list(str)): List of urls or file paths pointing at spec files to read
|
||||
read_method: A function taking a single argument, either a url or a file path,
|
||||
and which reads the spec file at that location, and returns the spec.
|
||||
cache_prefix: prefix of the build cache on s3 where index should be pushed.
|
||||
cache_prefix (str): prefix of the build cache on s3 where index should be pushed.
|
||||
db: A spack database used for adding specs and then writing the index.
|
||||
temp_dir: Location to write index.json and hash for pushing
|
||||
concurrency: Number of parallel processes to use when fetching
|
||||
temp_dir (str): Location to write index.json and hash for pushing
|
||||
concurrency (int): Number of parallel processes to use when fetching
|
||||
"""
|
||||
for file in file_list:
|
||||
contents = read_method(file)
|
||||
@@ -708,7 +713,7 @@ def _read_specs_and_push_index(
|
||||
|
||||
# Now generate the index, compute its hash, and push the two files to
|
||||
# the mirror.
|
||||
index_json_path = os.path.join(temp_dir, spack_db.INDEX_JSON_FILE)
|
||||
index_json_path = os.path.join(temp_dir, "index.json")
|
||||
with open(index_json_path, "w", encoding="utf-8") as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
@@ -718,14 +723,14 @@ def _read_specs_and_push_index(
|
||||
index_hash = compute_hash(index_string)
|
||||
|
||||
# Write the hash out to a local file
|
||||
index_hash_path = os.path.join(temp_dir, INDEX_HASH_FILE)
|
||||
index_hash_path = os.path.join(temp_dir, "index.json.hash")
|
||||
with open(index_hash_path, "w", encoding="utf-8") as f:
|
||||
f.write(index_hash)
|
||||
|
||||
# Push the index itself
|
||||
web_util.push_to_url(
|
||||
index_json_path,
|
||||
url_util.join(cache_prefix, spack_db.INDEX_JSON_FILE),
|
||||
url_util.join(cache_prefix, "index.json"),
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "application/json", "CacheControl": "no-cache"},
|
||||
)
|
||||
@@ -733,7 +738,7 @@ def _read_specs_and_push_index(
|
||||
# Push the hash
|
||||
web_util.push_to_url(
|
||||
index_hash_path,
|
||||
url_util.join(cache_prefix, INDEX_HASH_FILE),
|
||||
url_util.join(cache_prefix, "index.json.hash"),
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "text/plain", "CacheControl": "no-cache"},
|
||||
)
|
||||
@@ -802,7 +807,7 @@ def url_read_method(url):
|
||||
try:
|
||||
_, _, spec_file = web_util.read_from_url(url)
|
||||
contents = codecs.getreader("utf-8")(spec_file).read()
|
||||
except (web_util.SpackWebError, OSError) as e:
|
||||
except web_util.SpackWebError as e:
|
||||
tty.error(f"Error reading specfile: {url}: {e}")
|
||||
return contents
|
||||
|
||||
@@ -870,12 +875,9 @@ def _url_generate_package_index(url: str, tmpdir: str, concurrency: int = 32):
|
||||
tty.debug(f"Retrieving spec descriptor files from {url} to build index")
|
||||
|
||||
db = BuildCacheDatabase(tmpdir)
|
||||
db._write()
|
||||
|
||||
try:
|
||||
_read_specs_and_push_index(
|
||||
file_list, read_fn, url, db, str(db.database_directory), concurrency
|
||||
)
|
||||
_read_specs_and_push_index(file_list, read_fn, url, db, db.database_directory, concurrency)
|
||||
except Exception as e:
|
||||
raise GenerateIndexError(f"Encountered problem pushing package index to {url}: {e}") from e
|
||||
|
||||
@@ -923,7 +925,7 @@ class FileTypes:
|
||||
UNKNOWN = 2
|
||||
|
||||
|
||||
NOT_ISO8859_1_TEXT = re.compile(b"[\x00\x7f-\x9f]")
|
||||
NOT_ISO8859_1_TEXT = re.compile(b"[\x00\x7F-\x9F]")
|
||||
|
||||
|
||||
def file_type(f: IO[bytes]) -> int:
|
||||
@@ -1110,7 +1112,7 @@ def _exists_in_buildcache(spec: spack.spec.Spec, tmpdir: str, out_url: str) -> E
|
||||
|
||||
|
||||
def prefixes_to_relocate(spec):
|
||||
prefixes = [s.prefix for s in specs_to_relocate(spec)]
|
||||
prefixes = [s.prefix for s in deps_to_relocate(spec)]
|
||||
prefixes.append(spack.hooks.sbang.sbang_install_path())
|
||||
prefixes.append(str(spack.store.STORE.layout.root))
|
||||
return prefixes
|
||||
@@ -1789,7 +1791,7 @@ def _oci_update_index(
|
||||
db.mark(spec, "in_buildcache", True)
|
||||
|
||||
# Create the index.json file
|
||||
index_json_path = os.path.join(tmpdir, spack_db.INDEX_JSON_FILE)
|
||||
index_json_path = os.path.join(tmpdir, "index.json")
|
||||
with open(index_json_path, "w", encoding="utf-8") as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
@@ -2010,7 +2012,7 @@ def fetch_url_to_mirror(url):
|
||||
|
||||
# Download the config = spec.json and the relevant tarball
|
||||
try:
|
||||
manifest = json.load(response)
|
||||
manifest = json.loads(response.read())
|
||||
spec_digest = spack.oci.image.Digest.from_string(manifest["config"]["digest"])
|
||||
tarball_digest = spack.oci.image.Digest.from_string(
|
||||
manifest["layers"][-1]["digest"]
|
||||
@@ -2137,9 +2139,10 @@ def fetch_url_to_mirror(url):
|
||||
|
||||
|
||||
def dedupe_hardlinks_if_necessary(root, buildinfo):
|
||||
"""Updates a buildinfo dict for old archives that did not dedupe hardlinks. De-duping hardlinks
|
||||
is necessary when relocating files in parallel and in-place. This means we must preserve inodes
|
||||
when relocating."""
|
||||
"""Updates a buildinfo dict for old archives that did
|
||||
not dedupe hardlinks. De-duping hardlinks is necessary
|
||||
when relocating files in parallel and in-place. This
|
||||
means we must preserve inodes when relocating."""
|
||||
|
||||
# New archives don't need this.
|
||||
if buildinfo.get("hardlinks_deduped", False):
|
||||
@@ -2168,48 +2171,65 @@ def dedupe_hardlinks_if_necessary(root, buildinfo):
|
||||
buildinfo[key] = new_list
|
||||
|
||||
|
||||
def relocate_package(spec: spack.spec.Spec) -> None:
|
||||
"""Relocate binaries and text files in the given spec prefix, based on its buildinfo file."""
|
||||
spec_prefix = str(spec.prefix)
|
||||
buildinfo = read_buildinfo_file(spec_prefix)
|
||||
def relocate_package(spec):
|
||||
"""
|
||||
Relocate the given package
|
||||
"""
|
||||
workdir = str(spec.prefix)
|
||||
buildinfo = read_buildinfo_file(workdir)
|
||||
new_layout_root = str(spack.store.STORE.layout.root)
|
||||
new_prefix = str(spec.prefix)
|
||||
new_rel_prefix = str(os.path.relpath(new_prefix, new_layout_root))
|
||||
new_spack_prefix = str(spack.paths.prefix)
|
||||
|
||||
old_sbang_install_path = None
|
||||
if "sbang_install_path" in buildinfo:
|
||||
old_sbang_install_path = str(buildinfo["sbang_install_path"])
|
||||
old_layout_root = str(buildinfo["buildpath"])
|
||||
old_spack_prefix = str(buildinfo.get("spackprefix"))
|
||||
old_rel_prefix = buildinfo.get("relative_prefix")
|
||||
old_prefix = os.path.join(old_layout_root, old_rel_prefix)
|
||||
rel = buildinfo.get("relative_rpaths", False)
|
||||
|
||||
# Warn about old style tarballs created with the --rel flag (removed in Spack v0.20)
|
||||
if buildinfo.get("relative_rpaths", False):
|
||||
tty.warn(
|
||||
f"Tarball for {spec} uses relative rpaths, which can cause library loading issues."
|
||||
)
|
||||
|
||||
# In Spack 0.19 and older prefix_to_hash was the default and externals were not dropped, so
|
||||
# prefixes were not unique.
|
||||
# In the past prefix_to_hash was the default and externals were not dropped, so prefixes
|
||||
# were not unique.
|
||||
if "hash_to_prefix" in buildinfo:
|
||||
hash_to_old_prefix = buildinfo["hash_to_prefix"]
|
||||
elif "prefix_to_hash" in buildinfo:
|
||||
hash_to_old_prefix = {v: k for (k, v) in buildinfo["prefix_to_hash"].items()}
|
||||
hash_to_old_prefix = dict((v, k) for (k, v) in buildinfo["prefix_to_hash"].items())
|
||||
else:
|
||||
raise NewLayoutException(
|
||||
"Package tarball was created from an install prefix with a different directory layout "
|
||||
"and an older buildcache create implementation. It cannot be relocated."
|
||||
)
|
||||
hash_to_old_prefix = dict()
|
||||
|
||||
prefix_to_prefix: Dict[str, str] = {}
|
||||
if old_rel_prefix != new_rel_prefix and not hash_to_old_prefix:
|
||||
msg = "Package tarball was created from an install "
|
||||
msg += "prefix with a different directory layout and an older "
|
||||
msg += "buildcache create implementation. It cannot be relocated."
|
||||
raise NewLayoutException(msg)
|
||||
|
||||
if "sbang_install_path" in buildinfo:
|
||||
old_sbang_install_path = str(buildinfo["sbang_install_path"])
|
||||
prefix_to_prefix[old_sbang_install_path] = spack.hooks.sbang.sbang_install_path()
|
||||
# Spurious replacements (e.g. sbang) will cause issues with binaries
|
||||
# For example, the new sbang can be longer than the old one.
|
||||
# Hence 2 dictionaries are maintained here.
|
||||
prefix_to_prefix_text = collections.OrderedDict()
|
||||
prefix_to_prefix_bin = collections.OrderedDict()
|
||||
|
||||
# First match specific prefix paths. Possibly the *local* install prefix of some dependency is
|
||||
# in an upstream, so we cannot assume the original spack store root can be mapped uniformly to
|
||||
# the new spack store root.
|
||||
if old_sbang_install_path:
|
||||
install_path = spack.hooks.sbang.sbang_install_path()
|
||||
prefix_to_prefix_text[old_sbang_install_path] = install_path
|
||||
|
||||
# If the spec is spliced, we need to handle the simultaneous mapping from the old install_tree
|
||||
# to the new install_tree and from the build_spec to the spliced spec. Because foo.build_spec
|
||||
# is foo for any non-spliced spec, we can simplify by checking for spliced-in nodes by checking
|
||||
# for nodes not in the build_spec without any explicit check for whether the spec is spliced.
|
||||
# An analog in this algorithm is any spec that shares a name or provides the same virtuals in
|
||||
# the context of the relevant root spec. This ensures that the analog for a spec s is the spec
|
||||
# that s replaced when we spliced.
|
||||
relocation_specs = specs_to_relocate(spec)
|
||||
# First match specific prefix paths. Possibly the *local* install prefix
|
||||
# of some dependency is in an upstream, so we cannot assume the original
|
||||
# spack store root can be mapped uniformly to the new spack store root.
|
||||
#
|
||||
# If the spec is spliced, we need to handle the simultaneous mapping
|
||||
# from the old install_tree to the new install_tree and from the build_spec
|
||||
# to the spliced spec.
|
||||
# Because foo.build_spec is foo for any non-spliced spec, we can simplify
|
||||
# by checking for spliced-in nodes by checking for nodes not in the build_spec
|
||||
# without any explicit check for whether the spec is spliced.
|
||||
# An analog in this algorithm is any spec that shares a name or provides the same virtuals
|
||||
# in the context of the relevant root spec. This ensures that the analog for a spec s
|
||||
# is the spec that s replaced when we spliced.
|
||||
relocation_specs = deps_to_relocate(spec)
|
||||
build_spec_ids = set(id(s) for s in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD))
|
||||
for s in relocation_specs:
|
||||
analog = s
|
||||
@@ -2228,66 +2248,98 @@ def relocate_package(spec: spack.spec.Spec) -> None:
|
||||
lookup_dag_hash = analog.dag_hash()
|
||||
if lookup_dag_hash in hash_to_old_prefix:
|
||||
old_dep_prefix = hash_to_old_prefix[lookup_dag_hash]
|
||||
prefix_to_prefix[old_dep_prefix] = str(s.prefix)
|
||||
prefix_to_prefix_bin[old_dep_prefix] = str(s.prefix)
|
||||
prefix_to_prefix_text[old_dep_prefix] = str(s.prefix)
|
||||
|
||||
# Only then add the generic fallback of install prefix -> install prefix.
|
||||
prefix_to_prefix[old_layout_root] = str(spack.store.STORE.layout.root)
|
||||
prefix_to_prefix_text[old_prefix] = new_prefix
|
||||
prefix_to_prefix_bin[old_prefix] = new_prefix
|
||||
prefix_to_prefix_text[old_layout_root] = new_layout_root
|
||||
prefix_to_prefix_bin[old_layout_root] = new_layout_root
|
||||
|
||||
# Delete identity mappings from prefix_to_prefix
|
||||
prefix_to_prefix = {k: v for k, v in prefix_to_prefix.items() if k != v}
|
||||
# This is vestigial code for the *old* location of sbang. Previously,
|
||||
# sbang was a bash script, and it lived in the spack prefix. It is
|
||||
# now a POSIX script that lives in the install prefix. Old packages
|
||||
# will have the old sbang location in their shebangs.
|
||||
orig_sbang = "#!/bin/bash {0}/bin/sbang".format(old_spack_prefix)
|
||||
new_sbang = spack.hooks.sbang.sbang_shebang_line()
|
||||
prefix_to_prefix_text[orig_sbang] = new_sbang
|
||||
|
||||
# If there's nothing to relocate, we're done.
|
||||
if not prefix_to_prefix:
|
||||
return
|
||||
tty.debug("Relocating package from", "%s to %s." % (old_layout_root, new_layout_root))
|
||||
|
||||
for old, new in prefix_to_prefix.items():
|
||||
tty.debug(f"Relocating: {old} => {new}.")
|
||||
# Old archives maybe have hardlinks repeated.
|
||||
dedupe_hardlinks_if_necessary(workdir, buildinfo)
|
||||
|
||||
# Old archives may have hardlinks repeated.
|
||||
dedupe_hardlinks_if_necessary(spec_prefix, buildinfo)
|
||||
def is_backup_file(file):
|
||||
return file.endswith("~")
|
||||
|
||||
# Text files containing the prefix text
|
||||
textfiles = [os.path.join(spec_prefix, f) for f in buildinfo["relocate_textfiles"]]
|
||||
binaries = [os.path.join(spec_prefix, f) for f in buildinfo.get("relocate_binaries")]
|
||||
links = [os.path.join(spec_prefix, f) for f in buildinfo.get("relocate_links", [])]
|
||||
text_names = list()
|
||||
for filename in buildinfo["relocate_textfiles"]:
|
||||
text_name = os.path.join(workdir, filename)
|
||||
# Don't add backup files generated by filter_file during install step.
|
||||
if not is_backup_file(text_name):
|
||||
text_names.append(text_name)
|
||||
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
if "macho" in platform.binary_formats:
|
||||
relocate.relocate_macho_binaries(binaries, prefix_to_prefix)
|
||||
elif "elf" in platform.binary_formats:
|
||||
relocate.relocate_elf_binaries(binaries, prefix_to_prefix)
|
||||
# If we are not installing back to the same install tree do the relocation
|
||||
if old_prefix != new_prefix:
|
||||
files_to_relocate = [
|
||||
os.path.join(workdir, filename) for filename in buildinfo.get("relocate_binaries")
|
||||
]
|
||||
# If the buildcache was not created with relativized rpaths
|
||||
# do the relocation of path in binaries
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
if "macho" in platform.binary_formats:
|
||||
relocate.relocate_macho_binaries(
|
||||
files_to_relocate,
|
||||
old_layout_root,
|
||||
new_layout_root,
|
||||
prefix_to_prefix_bin,
|
||||
rel,
|
||||
old_prefix,
|
||||
new_prefix,
|
||||
)
|
||||
elif "elf" in platform.binary_formats and not rel:
|
||||
# The new ELF dynamic section relocation logic only handles absolute to
|
||||
# absolute relocation.
|
||||
relocate.new_relocate_elf_binaries(files_to_relocate, prefix_to_prefix_bin)
|
||||
elif "elf" in platform.binary_formats and rel:
|
||||
relocate.relocate_elf_binaries(
|
||||
files_to_relocate,
|
||||
old_layout_root,
|
||||
new_layout_root,
|
||||
prefix_to_prefix_bin,
|
||||
rel,
|
||||
old_prefix,
|
||||
new_prefix,
|
||||
)
|
||||
|
||||
relocate.relocate_links(links, prefix_to_prefix)
|
||||
relocate.relocate_text(textfiles, prefix_to_prefix)
|
||||
changed_files = relocate.relocate_text_bin(binaries, prefix_to_prefix)
|
||||
# Relocate links to the new install prefix
|
||||
links = [os.path.join(workdir, f) for f in buildinfo.get("relocate_links", [])]
|
||||
relocate.relocate_links(links, prefix_to_prefix_bin)
|
||||
|
||||
# Add ad-hoc signatures to patched macho files when on macOS.
|
||||
if "macho" in platform.binary_formats and sys.platform == "darwin":
|
||||
codesign = which("codesign")
|
||||
if not codesign:
|
||||
return
|
||||
for binary in changed_files:
|
||||
# preserve the original inode by running codesign on a copy
|
||||
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
|
||||
codesign("-fs-", tmp_binary)
|
||||
# For all buildcaches
|
||||
# relocate the install prefixes in text files including dependencies
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
|
||||
install_manifest = os.path.join(
|
||||
spec.prefix,
|
||||
spack.store.STORE.layout.metadata_dir,
|
||||
spack.store.STORE.layout.manifest_file_name,
|
||||
)
|
||||
if not os.path.exists(install_manifest):
|
||||
spec_id = spec.format("{name}/{hash:7}")
|
||||
tty.warn("No manifest file in tarball for spec %s" % spec_id)
|
||||
# relocate the install prefixes in binary files including dependencies
|
||||
changed_files = relocate.relocate_text_bin(files_to_relocate, prefix_to_prefix_bin)
|
||||
|
||||
# overwrite old metadata with new
|
||||
if spec.spliced:
|
||||
# rewrite spec on disk
|
||||
spack.store.STORE.layout.write_spec(spec, spack.store.STORE.layout.spec_file_path(spec))
|
||||
# Add ad-hoc signatures to patched macho files when on macOS.
|
||||
if "macho" in platform.binary_formats and sys.platform == "darwin":
|
||||
codesign = which("codesign")
|
||||
if not codesign:
|
||||
return
|
||||
for binary in changed_files:
|
||||
# preserve the original inode by running codesign on a copy
|
||||
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
|
||||
codesign("-fs-", tmp_binary)
|
||||
|
||||
# de-cache the install manifest
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
os.unlink(install_manifest)
|
||||
# If we are installing back to the same location
|
||||
# relocate the sbang location if the spack directory changed
|
||||
else:
|
||||
if old_spack_prefix != new_spack_prefix:
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
|
||||
|
||||
def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool, remote_checksum):
|
||||
@@ -2455,6 +2507,15 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
||||
except Exception as e:
|
||||
shutil.rmtree(spec.prefix, ignore_errors=True)
|
||||
raise e
|
||||
else:
|
||||
manifest_file = os.path.join(
|
||||
spec.prefix,
|
||||
spack.store.STORE.layout.metadata_dir,
|
||||
spack.store.STORE.layout.manifest_file_name,
|
||||
)
|
||||
if not os.path.exists(manifest_file):
|
||||
spec_id = spec.format("{name}/{hash:7}")
|
||||
tty.warn("No manifest file in tarball for spec %s" % spec_id)
|
||||
finally:
|
||||
if tmpdir:
|
||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||
@@ -2529,10 +2590,10 @@ def install_root_node(
|
||||
allow_missing: when true, allows installing a node with missing dependencies
|
||||
"""
|
||||
# Early termination
|
||||
if spec.external or not spec.concrete:
|
||||
warnings.warn("Skipping external or abstract spec {0}".format(spec.format()))
|
||||
if spec.external or spec.virtual:
|
||||
warnings.warn("Skipping external or virtual package {0}".format(spec.format()))
|
||||
return
|
||||
elif spec.installed and not force:
|
||||
elif spec.concrete and spec.installed and not force:
|
||||
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
|
||||
return
|
||||
|
||||
@@ -2559,6 +2620,10 @@ def install_root_node(
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, download_result, force)
|
||||
spec.package.windows_establish_runtime_linkage()
|
||||
if spec.spliced: # overwrite old metadata with new
|
||||
spack.store.STORE.layout.write_spec(
|
||||
spec, spack.store.STORE.layout.spec_file_path(spec)
|
||||
)
|
||||
spack.hooks.post_install(spec, False)
|
||||
spack.store.STORE.db.add(spec, allow_missing=allow_missing)
|
||||
|
||||
@@ -2596,14 +2661,11 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
)
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
|
||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||
specfile_is_signed = True
|
||||
except (web_util.SpackWebError, OSError) as e1:
|
||||
except web_util.SpackWebError as e1:
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||
specfile_is_signed = False
|
||||
except (web_util.SpackWebError, OSError) as e2:
|
||||
except web_util.SpackWebError as e2:
|
||||
tty.debug(
|
||||
f"Did not find {specfile_name} on {buildcache_fetch_url_signed_json}",
|
||||
e1,
|
||||
@@ -2613,6 +2675,7 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
f"Did not find {specfile_name} on {buildcache_fetch_url_json}", e2, level=2
|
||||
)
|
||||
continue
|
||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||
|
||||
# read the spec from the build cache file. All specs in build caches
|
||||
# are concrete (as they are built) so we need to mark this spec
|
||||
@@ -2706,9 +2769,8 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||
|
||||
try:
|
||||
_, _, json_file = web_util.read_from_url(keys_index)
|
||||
json_index = sjson.load(json_file)
|
||||
except (web_util.SpackWebError, OSError, ValueError) as url_err:
|
||||
# TODO: avoid repeated request
|
||||
json_index = sjson.load(codecs.getreader("utf-8")(json_file))
|
||||
except web_util.SpackWebError as url_err:
|
||||
if web_util.url_exists(keys_index):
|
||||
tty.error(
|
||||
f"Unable to find public keys in {url_util.format(fetch_url)},"
|
||||
@@ -2955,14 +3017,14 @@ def __init__(self, url, local_hash, urlopen=web_util.urlopen):
|
||||
|
||||
def get_remote_hash(self):
|
||||
# Failure to fetch index.json.hash is not fatal
|
||||
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, INDEX_HASH_FILE)
|
||||
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash")
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
|
||||
remote_hash = response.read(64)
|
||||
except OSError:
|
||||
except (TimeoutError, urllib.error.URLError):
|
||||
return None
|
||||
|
||||
# Validate the hash
|
||||
remote_hash = response.read(64)
|
||||
if not re.match(rb"[a-f\d]{64}$", remote_hash):
|
||||
return None
|
||||
return remote_hash.decode("utf-8")
|
||||
@@ -2976,17 +3038,17 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||
|
||||
# Otherwise, download index.json
|
||||
url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
||||
url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
|
||||
except OSError as e:
|
||||
raise FetchIndexError(f"Could not fetch index from {url_index}", e) from e
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
|
||||
|
||||
try:
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
except (ValueError, OSError) as e:
|
||||
raise FetchIndexError(f"Remote index {url_index} is invalid") from e
|
||||
except ValueError as e:
|
||||
raise FetchIndexError("Remote index {} is invalid".format(url_index), e) from e
|
||||
|
||||
computed_hash = compute_hash(result)
|
||||
|
||||
@@ -3020,7 +3082,7 @@ def __init__(self, url, etag, urlopen=web_util.urlopen):
|
||||
|
||||
def conditional_fetch(self) -> FetchIndexResult:
|
||||
# Just do a conditional fetch immediately
|
||||
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
||||
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||
headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'}
|
||||
|
||||
try:
|
||||
@@ -3030,12 +3092,12 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
# Not modified; that means fresh.
|
||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
||||
except OSError as e: # URLError, socket.timeout, etc.
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
||||
|
||||
try:
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
except (ValueError, OSError) as e:
|
||||
except ValueError as e:
|
||||
raise FetchIndexError(f"Remote index {url} is invalid", e) from e
|
||||
|
||||
headers = response.headers
|
||||
@@ -3067,11 +3129,11 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
|
||||
)
|
||||
)
|
||||
except OSError as e:
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
raise FetchIndexError(f"Could not fetch manifest from {url_manifest}", e) from e
|
||||
|
||||
try:
|
||||
manifest = json.load(response)
|
||||
manifest = json.loads(response.read())
|
||||
except Exception as e:
|
||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
||||
|
||||
@@ -3086,16 +3148,14 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||
|
||||
# Otherwise fetch the blob / index.json
|
||||
try:
|
||||
response = self.urlopen(
|
||||
urllib.request.Request(
|
||||
url=self.ref.blob_url(index_digest),
|
||||
headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"},
|
||||
)
|
||||
response = self.urlopen(
|
||||
urllib.request.Request(
|
||||
url=self.ref.blob_url(index_digest),
|
||||
headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"},
|
||||
)
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
except (OSError, ValueError) as e:
|
||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
||||
)
|
||||
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
|
||||
# Make sure the blob we download has the advertised hash
|
||||
if compute_hash(result) != index_digest.digest:
|
||||
|
@@ -5,14 +5,12 @@
|
||||
import fnmatch
|
||||
import glob
|
||||
import importlib
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
import sysconfig
|
||||
import warnings
|
||||
from typing import Optional, Sequence, Union
|
||||
|
||||
from typing_extensions import TypedDict
|
||||
from typing import Dict, Optional, Sequence, Union
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -20,17 +18,13 @@
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
|
||||
from .config import spec_for_current_python
|
||||
|
||||
|
||||
class QueryInfo(TypedDict, total=False):
|
||||
spec: spack.spec.Spec
|
||||
command: spack.util.executable.Executable
|
||||
QueryInfo = Dict[str, "spack.spec.Spec"]
|
||||
|
||||
|
||||
def _python_import(module: str) -> bool:
|
||||
@@ -217,9 +211,7 @@ def _executables_in_store(
|
||||
):
|
||||
spack.util.environment.path_put_first("PATH", [bin_dir])
|
||||
if query_info is not None:
|
||||
query_info["command"] = spack.util.executable.which(
|
||||
*executables, path=bin_dir, required=True
|
||||
)
|
||||
query_info["command"] = spack.util.executable.which(*executables, path=bin_dir)
|
||||
query_info["spec"] = concrete_spec
|
||||
return True
|
||||
return False
|
||||
|
@@ -27,9 +27,9 @@
|
||||
class ClingoBootstrapConcretizer:
|
||||
def __init__(self, configuration):
|
||||
self.host_platform = spack.platforms.host()
|
||||
self.host_os = self.host_platform.default_operating_system()
|
||||
self.host_os = self.host_platform.operating_system("frontend")
|
||||
self.host_target = archspec.cpu.host().family
|
||||
self.host_architecture = spack.spec.ArchSpec.default_arch()
|
||||
self.host_architecture = spack.spec.ArchSpec.frontend_arch()
|
||||
self.host_architecture.target = str(self.host_target)
|
||||
self.host_compiler = self._valid_compiler_or_raise()
|
||||
self.host_python = self.python_external_spec()
|
||||
|
@@ -4,7 +4,7 @@
|
||||
"""Manage configuration swapping for bootstrapping purposes"""
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
from typing import Any, Dict, Generator, MutableSequence, Sequence
|
||||
|
||||
@@ -141,7 +141,7 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||
|
||||
|
||||
def _add_compilers_if_missing() -> None:
|
||||
arch = spack.spec.ArchSpec.default_arch()
|
||||
arch = spack.spec.ArchSpec.frontend_arch()
|
||||
if not spack.compilers.compilers_for_arch(arch):
|
||||
spack.compilers.find_compilers()
|
||||
|
||||
|
@@ -25,6 +25,7 @@
|
||||
import functools
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
import uuid
|
||||
from typing import Any, Callable, Dict, List, Optional, Tuple
|
||||
@@ -33,10 +34,8 @@
|
||||
from llnl.util.lang import GroupedExceptionHandler
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.detection
|
||||
import spack.error
|
||||
import spack.mirrors.mirror
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
@@ -45,17 +44,10 @@
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml
|
||||
import spack.util.url
|
||||
import spack.version
|
||||
from spack.installer import PackageInstaller
|
||||
|
||||
from ._common import (
|
||||
QueryInfo,
|
||||
_executables_in_store,
|
||||
_python_import,
|
||||
_root_spec,
|
||||
_try_import_from_store,
|
||||
)
|
||||
from ._common import _executables_in_store, _python_import, _root_spec, _try_import_from_store
|
||||
from .clingo import ClingoBootstrapConcretizer
|
||||
from .config import spack_python_interpreter, spec_for_current_python
|
||||
|
||||
@@ -97,12 +89,8 @@ def __init__(self, conf: ConfigDictionary) -> None:
|
||||
self.name = conf["name"]
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
||||
|
||||
# Check for relative paths, and turn them into absolute paths
|
||||
# root is the metadata_dir
|
||||
maybe_url = conf["info"]["url"]
|
||||
if spack.util.url.is_path_instead_of_url(maybe_url) and not os.path.isabs(maybe_url):
|
||||
maybe_url = os.path.join(self.metadata_dir, maybe_url)
|
||||
self.url = spack.mirrors.mirror.Mirror(maybe_url).fetch_url
|
||||
# Promote (relative) paths to file urls
|
||||
self.url = spack.mirrors.mirror.Mirror(conf["info"]["url"]).fetch_url
|
||||
|
||||
@property
|
||||
def mirror_scope(self) -> spack.config.InternalConfigScope:
|
||||
@@ -146,7 +134,7 @@ class BuildcacheBootstrapper(Bootstrapper):
|
||||
|
||||
def __init__(self, conf) -> None:
|
||||
super().__init__(conf)
|
||||
self.last_search: Optional[QueryInfo] = None
|
||||
self.last_search: Optional[ConfigDictionary] = None
|
||||
self.config_scope_name = f"bootstrap_buildcache-{uuid.uuid4()}"
|
||||
|
||||
@staticmethod
|
||||
@@ -223,14 +211,14 @@ def _install_and_test(
|
||||
for _, pkg_hash, pkg_sha256 in item["binaries"]:
|
||||
self._install_by_hash(pkg_hash, pkg_sha256, bincache_platform)
|
||||
|
||||
info: QueryInfo = {}
|
||||
info: ConfigDictionary = {}
|
||||
if test_fn(query_spec=abstract_spec, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
return False
|
||||
|
||||
def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
info: QueryInfo
|
||||
info: ConfigDictionary
|
||||
test_fn, info = functools.partial(_try_import_from_store, module), {}
|
||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||
return True
|
||||
@@ -243,7 +231,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
return self._install_and_test(abstract_spec, bincache_platform, data, test_fn)
|
||||
|
||||
def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bool:
|
||||
info: QueryInfo
|
||||
info: ConfigDictionary
|
||||
test_fn, info = functools.partial(_executables_in_store, executables), {}
|
||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||
self.last_search = info
|
||||
@@ -261,11 +249,11 @@ class SourceBootstrapper(Bootstrapper):
|
||||
|
||||
def __init__(self, conf) -> None:
|
||||
super().__init__(conf)
|
||||
self.last_search: Optional[QueryInfo] = None
|
||||
self.last_search: Optional[ConfigDictionary] = None
|
||||
self.config_scope_name = f"bootstrap_source-{uuid.uuid4()}"
|
||||
|
||||
def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
info: QueryInfo = {}
|
||||
info: ConfigDictionary = {}
|
||||
if _try_import_from_store(module, abstract_spec_str, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
@@ -282,22 +270,17 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
bootstrapper = ClingoBootstrapConcretizer(configuration=spack.config.CONFIG)
|
||||
concrete_spec = bootstrapper.concretize()
|
||||
else:
|
||||
abstract_spec = spack.spec.Spec(
|
||||
concrete_spec = spack.spec.Spec(
|
||||
abstract_spec_str + " ^" + spec_for_current_python()
|
||||
)
|
||||
concrete_spec = spack.concretize.concretize_one(abstract_spec)
|
||||
concrete_spec.concretize()
|
||||
|
||||
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
|
||||
tty.debug(msg.format(module, abstract_spec_str))
|
||||
|
||||
# Install the spec that should make the module importable
|
||||
with spack.config.override(self.mirror_scope):
|
||||
PackageInstaller(
|
||||
[concrete_spec.package],
|
||||
fail_fast=True,
|
||||
package_use_cache=False,
|
||||
dependencies_use_cache=False,
|
||||
).install()
|
||||
PackageInstaller([concrete_spec.package], fail_fast=True).install()
|
||||
|
||||
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
||||
self.last_search = info
|
||||
@@ -305,7 +288,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
return False
|
||||
|
||||
def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bool:
|
||||
info: QueryInfo = {}
|
||||
info: ConfigDictionary = {}
|
||||
if _executables_in_store(executables, abstract_spec_str, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
@@ -316,7 +299,7 @@ def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bo
|
||||
# might reduce compilation time by a fair amount
|
||||
_add_externals_if_missing()
|
||||
|
||||
concrete_spec = spack.concretize.concretize_one(abstract_spec_str)
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str).concretized()
|
||||
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
||||
tty.debug(msg.format(abstract_spec_str))
|
||||
with spack.config.override(self.mirror_scope):
|
||||
@@ -333,9 +316,11 @@ def create_bootstrapper(conf: ConfigDictionary):
|
||||
return _bootstrap_methods[btype](conf)
|
||||
|
||||
|
||||
def source_is_enabled(conf: ConfigDictionary) -> bool:
|
||||
"""Returns true if the source is not enabled for bootstrapping"""
|
||||
return spack.config.get("bootstrap:trusted").get(conf["name"], False)
|
||||
def source_is_enabled_or_raise(conf: ConfigDictionary):
|
||||
"""Raise ValueError if the source is not enabled for bootstrapping"""
|
||||
trusted, name = spack.config.get("bootstrap:trusted"), conf["name"]
|
||||
if not trusted.get(name, False):
|
||||
raise ValueError("source is not trusted")
|
||||
|
||||
|
||||
def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str] = None):
|
||||
@@ -365,24 +350,24 @@ def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str]
|
||||
exception_handler = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
if not source_is_enabled(current_config):
|
||||
continue
|
||||
|
||||
with exception_handler.forward(current_config["name"], Exception):
|
||||
if create_bootstrapper(current_config).try_import(module, abstract_spec):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
current_bootstrapper = create_bootstrapper(current_config)
|
||||
if current_bootstrapper.try_import(module, abstract_spec):
|
||||
return
|
||||
|
||||
assert exception_handler, (
|
||||
f"expected at least one exception to have been raised at this point: "
|
||||
f"while bootstrapping {module}"
|
||||
)
|
||||
msg = f'cannot bootstrap the "{module}" Python module '
|
||||
if abstract_spec:
|
||||
msg += f'from spec "{abstract_spec}" '
|
||||
|
||||
if not exception_handler:
|
||||
msg += ": no bootstrapping sources are enabled"
|
||||
elif spack.error.debug or spack.error.SHOW_BACKTRACE:
|
||||
if tty.is_debug():
|
||||
msg += exception_handler.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += exception_handler.grouped_message(with_tracebacks=False)
|
||||
msg += "\nRun `spack --backtrace ...` for more detailed errors"
|
||||
msg += "\nRun `spack --debug ...` for more detailed errors"
|
||||
raise ImportError(msg)
|
||||
|
||||
|
||||
@@ -420,9 +405,8 @@ def ensure_executables_in_path_or_raise(
|
||||
exception_handler = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
if not source_is_enabled(current_config):
|
||||
continue
|
||||
with exception_handler.forward(current_config["name"], Exception):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
current_bootstrapper = create_bootstrapper(current_config)
|
||||
if current_bootstrapper.try_search_path(executables, abstract_spec):
|
||||
# Additional environment variables needed
|
||||
@@ -430,7 +414,6 @@ def ensure_executables_in_path_or_raise(
|
||||
current_bootstrapper.last_search["spec"],
|
||||
current_bootstrapper.last_search["command"],
|
||||
)
|
||||
assert cmd is not None, "expected an Executable"
|
||||
cmd.add_default_envmod(
|
||||
spack.user_environment.environment_modifications_for_specs(
|
||||
concrete_spec, set_package_py_globals=False
|
||||
@@ -438,17 +421,18 @@ def ensure_executables_in_path_or_raise(
|
||||
)
|
||||
return cmd
|
||||
|
||||
assert exception_handler, (
|
||||
f"expected at least one exception to have been raised at this point: "
|
||||
f"while bootstrapping {executables_str}"
|
||||
)
|
||||
msg = f"cannot bootstrap any of the {executables_str} executables "
|
||||
if abstract_spec:
|
||||
msg += f'from spec "{abstract_spec}" '
|
||||
|
||||
if not exception_handler:
|
||||
msg += ": no bootstrapping sources are enabled"
|
||||
elif spack.error.debug or spack.error.SHOW_BACKTRACE:
|
||||
if tty.is_debug():
|
||||
msg += exception_handler.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += exception_handler.grouped_message(with_tracebacks=False)
|
||||
msg += "\nRun `spack --backtrace ...` for more detailed errors"
|
||||
msg += "\nRun `spack --debug ...` for more detailed errors"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
|
@@ -63,6 +63,7 @@ def _missing(name: str, purpose: str, system_only: bool = True) -> str:
|
||||
|
||||
def _core_requirements() -> List[RequiredResponseType]:
|
||||
_core_system_exes = {
|
||||
"make": _missing("make", "required to build software from sources"),
|
||||
"patch": _missing("patch", "required to patch source code before building"),
|
||||
"tar": _missing("tar", "required to manage code archives"),
|
||||
"gzip": _missing("gzip", "required to compress/decompress code archives"),
|
||||
|
@@ -44,19 +44,7 @@
|
||||
from enum import Flag, auto
|
||||
from itertools import chain
|
||||
from multiprocessing.connection import Connection
|
||||
from typing import (
|
||||
Callable,
|
||||
Dict,
|
||||
List,
|
||||
Optional,
|
||||
Sequence,
|
||||
Set,
|
||||
TextIO,
|
||||
Tuple,
|
||||
Type,
|
||||
Union,
|
||||
overload,
|
||||
)
|
||||
from typing import Callable, Dict, List, Optional, Set, Tuple
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -158,128 +146,48 @@ def get_effective_jobs(jobs, parallel=True, supports_jobserver=False):
|
||||
|
||||
|
||||
class MakeExecutable(Executable):
|
||||
"""Special callable executable object for make so the user can specify parallelism options
|
||||
on a per-invocation basis.
|
||||
"""Special callable executable object for make so the user can specify
|
||||
parallelism options on a per-invocation basis. Specifying
|
||||
'parallel' to the call will override whatever the package's
|
||||
global setting is, so you can either default to true or false and
|
||||
override particular calls. Specifying 'jobs_env' to a particular
|
||||
call will name an environment variable which will be set to the
|
||||
parallelism level (without affecting the normal invocation with
|
||||
-j).
|
||||
"""
|
||||
|
||||
def __init__(self, name: str, *, jobs: int, supports_jobserver: bool = True) -> None:
|
||||
super().__init__(name)
|
||||
def __init__(self, name, jobs, **kwargs):
|
||||
supports_jobserver = kwargs.pop("supports_jobserver", True)
|
||||
super().__init__(name, **kwargs)
|
||||
self.supports_jobserver = supports_jobserver
|
||||
self.jobs = jobs
|
||||
|
||||
@overload
|
||||
def __call__(
|
||||
self,
|
||||
*args: str,
|
||||
parallel: bool = ...,
|
||||
jobs_env: Optional[str] = ...,
|
||||
jobs_env_supports_jobserver: bool = ...,
|
||||
fail_on_error: bool = ...,
|
||||
ignore_errors: Union[int, Sequence[int]] = ...,
|
||||
ignore_quotes: Optional[bool] = ...,
|
||||
timeout: Optional[int] = ...,
|
||||
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
input: Optional[TextIO] = ...,
|
||||
output: Union[Optional[TextIO], str] = ...,
|
||||
error: Union[Optional[TextIO], str] = ...,
|
||||
_dump_env: Optional[Dict[str, str]] = ...,
|
||||
) -> None: ...
|
||||
|
||||
@overload
|
||||
def __call__(
|
||||
self,
|
||||
*args: str,
|
||||
parallel: bool = ...,
|
||||
jobs_env: Optional[str] = ...,
|
||||
jobs_env_supports_jobserver: bool = ...,
|
||||
fail_on_error: bool = ...,
|
||||
ignore_errors: Union[int, Sequence[int]] = ...,
|
||||
ignore_quotes: Optional[bool] = ...,
|
||||
timeout: Optional[int] = ...,
|
||||
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
input: Optional[TextIO] = ...,
|
||||
output: Union[Type[str], Callable] = ...,
|
||||
error: Union[Optional[TextIO], str, Type[str], Callable] = ...,
|
||||
_dump_env: Optional[Dict[str, str]] = ...,
|
||||
) -> str: ...
|
||||
|
||||
@overload
|
||||
def __call__(
|
||||
self,
|
||||
*args: str,
|
||||
parallel: bool = ...,
|
||||
jobs_env: Optional[str] = ...,
|
||||
jobs_env_supports_jobserver: bool = ...,
|
||||
fail_on_error: bool = ...,
|
||||
ignore_errors: Union[int, Sequence[int]] = ...,
|
||||
ignore_quotes: Optional[bool] = ...,
|
||||
timeout: Optional[int] = ...,
|
||||
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
input: Optional[TextIO] = ...,
|
||||
output: Union[Optional[TextIO], str, Type[str], Callable] = ...,
|
||||
error: Union[Type[str], Callable] = ...,
|
||||
_dump_env: Optional[Dict[str, str]] = ...,
|
||||
) -> str: ...
|
||||
|
||||
def __call__(
|
||||
self,
|
||||
*args: str,
|
||||
parallel: bool = True,
|
||||
jobs_env: Optional[str] = None,
|
||||
jobs_env_supports_jobserver: bool = False,
|
||||
**kwargs,
|
||||
) -> Optional[str]:
|
||||
"""Runs this "make" executable in a subprocess.
|
||||
|
||||
Args:
|
||||
parallel: if False, parallelism is disabled
|
||||
jobs_env: environment variable that will be set to the current level of parallelism
|
||||
jobs_env_supports_jobserver: whether the jobs env supports a job server
|
||||
|
||||
For all the other **kwargs, refer to the base class.
|
||||
def __call__(self, *args, **kwargs):
|
||||
"""parallel, and jobs_env from kwargs are swallowed and used here;
|
||||
remaining arguments are passed through to the superclass.
|
||||
"""
|
||||
parallel = kwargs.pop("parallel", True)
|
||||
jobs_env = kwargs.pop("jobs_env", None)
|
||||
jobs_env_supports_jobserver = kwargs.pop("jobs_env_supports_jobserver", False)
|
||||
|
||||
jobs = get_effective_jobs(
|
||||
self.jobs, parallel=parallel, supports_jobserver=self.supports_jobserver
|
||||
)
|
||||
if jobs is not None:
|
||||
args = (f"-j{jobs}",) + args
|
||||
args = ("-j{0}".format(jobs),) + args
|
||||
|
||||
if jobs_env:
|
||||
# Caller wants us to set an environment variable to control the parallelism
|
||||
# Caller wants us to set an environment variable to
|
||||
# control the parallelism.
|
||||
jobs_env_jobs = get_effective_jobs(
|
||||
self.jobs, parallel=parallel, supports_jobserver=jobs_env_supports_jobserver
|
||||
)
|
||||
if jobs_env_jobs is not None:
|
||||
extra_env = kwargs.setdefault("extra_env", {})
|
||||
extra_env.update({jobs_env: str(jobs_env_jobs)})
|
||||
kwargs["extra_env"] = {jobs_env: str(jobs_env_jobs)}
|
||||
|
||||
return super().__call__(*args, **kwargs)
|
||||
|
||||
|
||||
class UndeclaredDependencyError(spack.error.SpackError):
|
||||
"""Raised if a dependency is invoking an executable through a module global, without
|
||||
declaring a dependency on it.
|
||||
"""
|
||||
|
||||
|
||||
class DeprecatedExecutable:
|
||||
def __init__(self, pkg: str, exe: str, exe_pkg: str) -> None:
|
||||
self.pkg = pkg
|
||||
self.exe = exe
|
||||
self.exe_pkg = exe_pkg
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
raise UndeclaredDependencyError(
|
||||
f"{self.pkg} is using {self.exe} without declaring a dependency on {self.exe_pkg}"
|
||||
)
|
||||
|
||||
def add_default_env(self, key: str, value: str):
|
||||
self.__call__()
|
||||
|
||||
|
||||
def clean_environment():
|
||||
# Stuff in here sanitizes the build environment to eliminate
|
||||
# anything the user has set that may interfere. We apply it immediately
|
||||
@@ -301,13 +209,11 @@ def clean_environment():
|
||||
env.unset("CPLUS_INCLUDE_PATH")
|
||||
env.unset("OBJC_INCLUDE_PATH")
|
||||
|
||||
# prevent configure scripts from sourcing variables from config site file (AC_SITE_LOAD).
|
||||
env.set("CONFIG_SITE", os.devnull)
|
||||
env.unset("CMAKE_PREFIX_PATH")
|
||||
|
||||
env.unset("PYTHONPATH")
|
||||
env.unset("R_HOME")
|
||||
env.unset("R_ENVIRON")
|
||||
|
||||
env.unset("LUA_PATH")
|
||||
env.unset("LUA_CPATH")
|
||||
|
||||
@@ -715,9 +621,10 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
||||
|
||||
module.make = DeprecatedExecutable(pkg.name, "make", "gmake")
|
||||
module.gmake = DeprecatedExecutable(pkg.name, "gmake", "gmake")
|
||||
module.ninja = DeprecatedExecutable(pkg.name, "ninja", "ninja")
|
||||
# TODO: make these build deps that can be installed if not found.
|
||||
module.make = MakeExecutable("make", jobs)
|
||||
module.gmake = MakeExecutable("gmake", jobs)
|
||||
module.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
|
||||
# TODO: johnwparent: add package or builder support to define these build tools
|
||||
# for now there is no entrypoint for builders to define these on their
|
||||
# own
|
||||
@@ -881,6 +788,21 @@ def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]
|
||||
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
||||
|
||||
|
||||
def load_external_modules(pkg):
|
||||
"""Traverse a package's spec DAG and load any external modules.
|
||||
|
||||
Traverse a package's dependencies and load any external modules
|
||||
associated with them.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package to load deps for
|
||||
"""
|
||||
for dep in list(pkg.spec.traverse()):
|
||||
external_modules = dep.external_modules or []
|
||||
for external_module in external_modules:
|
||||
load_module(external_module)
|
||||
|
||||
|
||||
def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
"""Execute all environment setup routines."""
|
||||
if context not in (Context.BUILD, Context.TEST):
|
||||
@@ -931,7 +853,7 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
for mod in pkg.compiler.modules:
|
||||
load_module(mod)
|
||||
|
||||
load_external_modules(setup_context)
|
||||
load_external_modules(pkg)
|
||||
|
||||
# Make sure nothing's strange about the Spack environment.
|
||||
validate(env_mods, tty.warn)
|
||||
@@ -1220,21 +1142,6 @@ def _make_runnable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
|
||||
env.prepend_path("PATH", bin_dir)
|
||||
|
||||
|
||||
def load_external_modules(context: SetupContext) -> None:
|
||||
"""Traverse a package's spec DAG and load any external modules.
|
||||
|
||||
Traverse a package's dependencies and load any external modules
|
||||
associated with them.
|
||||
|
||||
Args:
|
||||
context: A populated SetupContext object
|
||||
"""
|
||||
for spec, _ in context.external:
|
||||
external_modules = spec.external_modules or []
|
||||
for external_module in external_modules:
|
||||
load_module(external_module)
|
||||
|
||||
|
||||
def _setup_pkg_and_run(
|
||||
serialized_pkg: "spack.subprocess_context.PackageInstallContext",
|
||||
function: Callable,
|
||||
|
@@ -6,9 +6,7 @@
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.directives
|
||||
import spack.spec
|
||||
import spack.util.executable
|
||||
import spack.util.prefix
|
||||
|
||||
from .autotools import AutotoolsBuilder, AutotoolsPackage
|
||||
|
||||
@@ -19,18 +17,19 @@ class AspellBuilder(AutotoolsBuilder):
|
||||
to the Aspell extensions.
|
||||
"""
|
||||
|
||||
def configure(
|
||||
self,
|
||||
pkg: "AspellDictPackage", # type: ignore[override]
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
):
|
||||
def configure(self, pkg, spec, prefix):
|
||||
aspell = spec["aspell"].prefix.bin.aspell
|
||||
prezip = spec["aspell"].prefix.bin.prezip
|
||||
destdir = prefix
|
||||
|
||||
sh = spack.util.executable.Executable("/bin/sh")
|
||||
sh("./configure", "--vars", f"ASPELL={aspell}", f"PREZIP={prezip}", f"DESTDIR={destdir}")
|
||||
sh = spack.util.executable.which("sh")
|
||||
sh(
|
||||
"./configure",
|
||||
"--vars",
|
||||
"ASPELL={0}".format(aspell),
|
||||
"PREZIP={0}".format(prezip),
|
||||
"DESTDIR={0}".format(destdir),
|
||||
)
|
||||
|
||||
|
||||
# Aspell dictionaries install their bits into their prefix.lib
|
||||
|
@@ -2,6 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import os.path
|
||||
import stat
|
||||
import subprocess
|
||||
from typing import Callable, List, Optional, Set, Tuple, Union
|
||||
@@ -355,13 +356,6 @@ def _do_patch_libtool_configure(self) -> None:
|
||||
)
|
||||
# Support Libtool 2.4.2 and older:
|
||||
x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2')
|
||||
# Configure scripts generated with libtool < 2.5.4 have a faulty test for the
|
||||
# -single_module linker flag. A deprecation warning makes it think the default is
|
||||
# -multi_module, triggering it to use problematic linker flags (such as ld -r). The
|
||||
# linker default is `-single_module` from (ancient) macOS 10.4, so override by setting
|
||||
# `lt_cv_apple_cc_single_mod=yes`. See the fix in libtool commit
|
||||
# 82f7f52123e4e7e50721049f7fa6f9b870e09c9d.
|
||||
x.filter("lt_cv_apple_cc_single_mod=no", "lt_cv_apple_cc_single_mod=yes", string=True)
|
||||
|
||||
@spack.phase_callbacks.run_after("configure")
|
||||
def _do_patch_libtool(self) -> None:
|
||||
@@ -533,7 +527,7 @@ def build_directory(self) -> str:
|
||||
return build_dir
|
||||
|
||||
@spack.phase_callbacks.run_before("autoreconf")
|
||||
def _delete_configure_to_force_update(self) -> None:
|
||||
def delete_configure_to_force_update(self) -> None:
|
||||
if self.force_autoreconf:
|
||||
fs.force_remove(self.configure_abs_path)
|
||||
|
||||
@@ -546,7 +540,7 @@ def autoreconf_search_path_args(self) -> List[str]:
|
||||
return _autoreconf_search_path_args(self.spec)
|
||||
|
||||
@spack.phase_callbacks.run_after("autoreconf")
|
||||
def _set_configure_or_die(self) -> None:
|
||||
def set_configure_or_die(self) -> None:
|
||||
"""Ensure the presence of a "configure" script, or raise. If the "configure"
|
||||
is found, a module level attribute is set.
|
||||
|
||||
@@ -570,7 +564,10 @@ def configure_args(self) -> List[str]:
|
||||
return []
|
||||
|
||||
def autoreconf(
|
||||
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Not needed usually, configure should be already there"""
|
||||
|
||||
@@ -599,7 +596,10 @@ def autoreconf(
|
||||
self.pkg.module.autoreconf(*autoreconf_args)
|
||||
|
||||
def configure(
|
||||
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "configure", with the arguments specified by the builder and an
|
||||
appropriately set prefix.
|
||||
@@ -612,7 +612,10 @@ def configure(
|
||||
pkg.module.configure(*options)
|
||||
|
||||
def build(
|
||||
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
# See https://autotools.io/automake/silent.html
|
||||
@@ -622,7 +625,10 @@ def build(
|
||||
pkg.module.make(*params)
|
||||
|
||||
def install(
|
||||
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
@@ -819,7 +825,7 @@ def installcheck(self) -> None:
|
||||
self.pkg._if_make_target_execute("installcheck")
|
||||
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def _remove_libtool_archives(self) -> None:
|
||||
def remove_libtool_archives(self) -> None:
|
||||
"""Remove all .la files in prefix sub-folders if the package sets
|
||||
``install_libtool_archives`` to be False.
|
||||
"""
|
||||
|
@@ -10,9 +10,6 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import depends_on
|
||||
|
||||
from .cmake import CMakeBuilder, CMakePackage
|
||||
|
||||
@@ -278,24 +275,17 @@ def initconfig_hardware_entries(self):
|
||||
entries.append("# ROCm")
|
||||
entries.append("#------------------{0}\n".format("-" * 30))
|
||||
|
||||
if spec.satisfies("^blt@0.7:"):
|
||||
rocm_root = os.path.dirname(spec["llvm-amdgpu"].prefix)
|
||||
entries.append(cmake_cache_path("ROCM_PATH", rocm_root))
|
||||
else:
|
||||
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
|
||||
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
|
||||
llvm_bin = spec["llvm-amdgpu"].prefix.bin
|
||||
llvm_prefix = spec["llvm-amdgpu"].prefix
|
||||
# Some ROCm systems seem to point to /<path>/rocm-<ver>/ and
|
||||
# others point to /<path>/rocm-<ver>/llvm
|
||||
if os.path.basename(os.path.normpath(llvm_prefix)) != "llvm":
|
||||
llvm_bin = os.path.join(llvm_prefix, "llvm/bin/")
|
||||
entries.append(
|
||||
cmake_cache_filepath(
|
||||
"CMAKE_HIP_COMPILER", os.path.join(llvm_bin, "amdclang++")
|
||||
)
|
||||
)
|
||||
|
||||
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
|
||||
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
|
||||
llvm_bin = spec["llvm-amdgpu"].prefix.bin
|
||||
llvm_prefix = spec["llvm-amdgpu"].prefix
|
||||
# Some ROCm systems seem to point to /<path>/rocm-<ver>/ and
|
||||
# others point to /<path>/rocm-<ver>/llvm
|
||||
if os.path.basename(os.path.normpath(llvm_prefix)) != "llvm":
|
||||
llvm_bin = os.path.join(llvm_prefix, "llvm/bin/")
|
||||
entries.append(
|
||||
cmake_cache_filepath("CMAKE_HIP_COMPILER", os.path.join(llvm_bin, "clang++"))
|
||||
)
|
||||
archs = self.spec.variants["amdgpu_target"].value
|
||||
if archs[0] != "none":
|
||||
arch_str = ";".join(archs)
|
||||
@@ -303,13 +293,6 @@ def initconfig_hardware_entries(self):
|
||||
entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
|
||||
entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
|
||||
|
||||
if spec.satisfies("%gcc"):
|
||||
entries.append(
|
||||
cmake_cache_string(
|
||||
"CMAKE_HIP_FLAGS", f"--gcc-toolchain={self.pkg.compiler.prefix}"
|
||||
)
|
||||
)
|
||||
|
||||
return entries
|
||||
|
||||
def std_initconfig_entries(self):
|
||||
@@ -340,9 +323,7 @@ def initconfig_package_entries(self):
|
||||
"""This method is to be overwritten by the package"""
|
||||
return []
|
||||
|
||||
def initconfig(
|
||||
self, pkg: "CachedCMakePackage", spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def initconfig(self, pkg, spec, prefix):
|
||||
cache_entries = (
|
||||
self.std_initconfig_entries()
|
||||
+ self.initconfig_compiler_entries()
|
||||
@@ -379,10 +360,6 @@ class CachedCMakePackage(CMakePackage):
|
||||
|
||||
CMakeBuilder = CachedCMakeBuilder
|
||||
|
||||
# These dependencies are assumed in the builder
|
||||
depends_on("c", type="build")
|
||||
depends_on("cxx", type="build")
|
||||
|
||||
def flag_handler(self, name, flags):
|
||||
if name in ("cflags", "cxxflags", "cppflags", "fflags"):
|
||||
return None, None, None # handled in the cmake cache
|
||||
|
@@ -7,8 +7,6 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
|
||||
@@ -70,16 +68,10 @@ def build_directory(self):
|
||||
"""Return the directory containing the main Cargo.toml."""
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
@property
|
||||
def std_build_args(self):
|
||||
"""Standard arguments for ``cargo build`` provided as a property for
|
||||
convenience of package writers."""
|
||||
return ["-j", str(self.pkg.module.make_jobs)]
|
||||
|
||||
@property
|
||||
def build_args(self):
|
||||
"""Arguments for ``cargo build``."""
|
||||
return []
|
||||
return ["-j", str(self.pkg.module.make_jobs)]
|
||||
|
||||
@property
|
||||
def check_args(self):
|
||||
@@ -89,18 +81,12 @@ def check_args(self):
|
||||
def setup_build_environment(self, env):
|
||||
env.set("CARGO_HOME", self.stage.path)
|
||||
|
||||
def build(
|
||||
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Runs ``cargo install`` in the source directory"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.cargo(
|
||||
"install", "--root", "out", "--path", ".", *self.std_build_args, *self.build_args
|
||||
)
|
||||
pkg.module.cargo("install", "--root", "out", "--path", ".", *self.build_args)
|
||||
|
||||
def install(
|
||||
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Copy build files into package prefix."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
fs.install_tree("out", prefix)
|
||||
|
@@ -11,7 +11,6 @@
|
||||
from typing import Any, List, Optional, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util import tty
|
||||
from llnl.util.lang import stable_partition
|
||||
|
||||
import spack.builder
|
||||
@@ -455,27 +454,18 @@ def cmake_args(self) -> List[str]:
|
||||
return []
|
||||
|
||||
def cmake(
|
||||
self, pkg: CMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Runs ``cmake`` in the build directory"""
|
||||
|
||||
if spec.is_develop:
|
||||
# skip cmake phase if it is an incremental develop build
|
||||
|
||||
# Determine the files that will re-run CMake that are generated from a successful
|
||||
# configure step based on state
|
||||
primary_generator = _extract_primary_generator(self.generator)
|
||||
configure_artifact = "Makefile"
|
||||
if primary_generator == "Ninja":
|
||||
configure_artifact = "ninja.build"
|
||||
|
||||
if os.path.isfile(os.path.join(self.build_directory, configure_artifact)):
|
||||
tty.msg(
|
||||
"Incremental build criteria satisfied."
|
||||
"Skipping CMake configure step. To force configuration run"
|
||||
f" `spack clean {pkg.name}`"
|
||||
)
|
||||
return
|
||||
# skip cmake phase if it is an incremental develop build
|
||||
if spec.is_develop and os.path.isfile(
|
||||
os.path.join(self.build_directory, "CMakeCache.txt")
|
||||
):
|
||||
return
|
||||
|
||||
options = self.std_cmake_args
|
||||
options += self.cmake_args()
|
||||
@@ -484,7 +474,10 @@ def cmake(
|
||||
pkg.module.cmake(*options)
|
||||
|
||||
def build(
|
||||
self, pkg: CMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Make the build targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
@@ -495,7 +488,10 @@ def build(
|
||||
pkg.module.ninja(*self.build_targets)
|
||||
|
||||
def install(
|
||||
self, pkg: CMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Make the install targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
|
@@ -15,7 +15,7 @@ class CudaPackage(PackageBase):
|
||||
"""Auxiliary class which contains CUDA variant, dependencies and conflicts
|
||||
and is meant to unify and facilitate its usage.
|
||||
|
||||
Maintainers: ax3l, Rombur, davidbeckingsale, pauleonix
|
||||
Maintainers: ax3l, Rombur, davidbeckingsale
|
||||
"""
|
||||
|
||||
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list
|
||||
@@ -47,12 +47,6 @@ class CudaPackage(PackageBase):
|
||||
"89",
|
||||
"90",
|
||||
"90a",
|
||||
"100",
|
||||
"100a",
|
||||
"101",
|
||||
"101a",
|
||||
"120",
|
||||
"120a",
|
||||
)
|
||||
|
||||
# FIXME: keep cuda and cuda_arch separate to make usage easier until
|
||||
@@ -105,56 +99,39 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
# CUDA version vs Architecture
|
||||
# https://en.wikipedia.org/wiki/CUDA#GPUs_supported
|
||||
# https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html#deprecated-features
|
||||
# Tesla support:
|
||||
depends_on("cuda@:6.0", when="cuda_arch=10")
|
||||
depends_on("cuda@:6.5", when="cuda_arch=11")
|
||||
depends_on("cuda@2.1:6.5", when="cuda_arch=12")
|
||||
depends_on("cuda@2.1:6.5", when="cuda_arch=13")
|
||||
|
||||
# Fermi support:
|
||||
depends_on("cuda@3.0:8.0", when="cuda_arch=20")
|
||||
depends_on("cuda@3.2:8.0", when="cuda_arch=21")
|
||||
|
||||
# Kepler support:
|
||||
depends_on("cuda@5.0:10.2", when="cuda_arch=30")
|
||||
depends_on("cuda@5.0:10.2", when="cuda_arch=32")
|
||||
depends_on("cuda@5.0:11.8", when="cuda_arch=35")
|
||||
depends_on("cuda@6.5:11.8", when="cuda_arch=37")
|
||||
|
||||
# Maxwell support:
|
||||
depends_on("cuda@6.0:", when="cuda_arch=50")
|
||||
depends_on("cuda@6.5:", when="cuda_arch=52")
|
||||
depends_on("cuda@6.5:", when="cuda_arch=53")
|
||||
|
||||
# Pascal support:
|
||||
depends_on("cuda@8.0:", when="cuda_arch=60")
|
||||
depends_on("cuda@8.0:", when="cuda_arch=61")
|
||||
depends_on("cuda@8.0:", when="cuda_arch=62")
|
||||
|
||||
# Volta support:
|
||||
depends_on("cuda@9.0:", when="cuda_arch=70")
|
||||
# Turing support:
|
||||
depends_on("cuda@9.0:", when="cuda_arch=72")
|
||||
depends_on("cuda@10.0:", when="cuda_arch=75")
|
||||
|
||||
# Ampere support:
|
||||
depends_on("cuda@11.0:", when="cuda_arch=80")
|
||||
depends_on("cuda@11.1:", when="cuda_arch=86")
|
||||
depends_on("cuda@11.4:", when="cuda_arch=87")
|
||||
# Ada support:
|
||||
depends_on("cuda@11.8:", when="cuda_arch=89")
|
||||
|
||||
# Hopper support:
|
||||
depends_on("cuda@12.0:", when="cuda_arch=90")
|
||||
depends_on("cuda@12.0:", when="cuda_arch=90a")
|
||||
|
||||
# Blackwell support:
|
||||
depends_on("cuda@12.8:", when="cuda_arch=100")
|
||||
depends_on("cuda@12.8:", when="cuda_arch=100a")
|
||||
depends_on("cuda@12.8:", when="cuda_arch=101")
|
||||
depends_on("cuda@12.8:", when="cuda_arch=101a")
|
||||
depends_on("cuda@12.8:", when="cuda_arch=120")
|
||||
depends_on("cuda@12.8:", when="cuda_arch=120a")
|
||||
# From the NVIDIA install guide we know of conflicts for particular
|
||||
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
||||
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
|
||||
@@ -186,7 +163,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
||||
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%gcc@14:", when="+cuda ^cuda@:12.6")
|
||||
conflicts("%gcc@15:", when="+cuda ^cuda@:12.8")
|
||||
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
||||
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
||||
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
||||
@@ -195,7 +171,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
|
||||
conflicts("%clang@19:", when="+cuda ^cuda@:12.6")
|
||||
conflicts("%clang@20:", when="+cuda ^cuda@:12.8")
|
||||
|
||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
||||
|
@@ -7,8 +7,6 @@
|
||||
import spack.directives
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
|
||||
from ._checks import BuilderWithDefaults, apply_macos_rpath_fixups, execute_install_time_tests
|
||||
|
||||
@@ -50,8 +48,3 @@ class GenericBuilder(BuilderWithDefaults):
|
||||
|
||||
# unconditionally perform any post-install phase tests
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
||||
def install(
|
||||
self, pkg: Package, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
raise NotImplementedError
|
||||
|
@@ -7,9 +7,7 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.directives import build_system, extends
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||
@@ -28,7 +26,9 @@ class GoPackage(spack.package_base.PackageBase):
|
||||
build_system("go")
|
||||
|
||||
with when("build_system=go"):
|
||||
depends_on("go", type="build")
|
||||
# TODO: this seems like it should be depends_on, see
|
||||
# setup_dependent_build_environment in go for why I kept it like this
|
||||
extends("go@1.14:", type="build")
|
||||
|
||||
|
||||
@spack.builder.builder("go")
|
||||
@@ -71,7 +71,6 @@ class GoBuilder(BuilderWithDefaults):
|
||||
def setup_build_environment(self, env):
|
||||
env.set("GO111MODULE", "on")
|
||||
env.set("GOTOOLCHAIN", "local")
|
||||
env.set("GOPATH", fs.join_path(self.pkg.stage.path, "go"))
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
@@ -82,31 +81,19 @@ def build_directory(self):
|
||||
def build_args(self):
|
||||
"""Arguments for ``go build``."""
|
||||
# Pass ldflags -s = --strip-all and -w = --no-warnings by default
|
||||
return [
|
||||
"-p",
|
||||
str(self.pkg.module.make_jobs),
|
||||
"-modcacherw",
|
||||
"-ldflags",
|
||||
"-s -w",
|
||||
"-o",
|
||||
f"{self.pkg.name}",
|
||||
]
|
||||
return ["-modcacherw", "-ldflags", "-s -w", "-o", f"{self.pkg.name}"]
|
||||
|
||||
@property
|
||||
def check_args(self):
|
||||
"""Argument for ``go test`` during check phase"""
|
||||
return []
|
||||
|
||||
def build(
|
||||
self, pkg: GoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Runs ``go build`` in the source directory"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.go("build", *self.build_args)
|
||||
|
||||
def install(
|
||||
self, pkg: GoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Install built binaries into prefix bin."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
fs.mkdirp(prefix.bin)
|
||||
|
@@ -7,9 +7,7 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.executable
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.multimethod import when
|
||||
|
||||
@@ -57,9 +55,7 @@ class LuaBuilder(spack.builder.Builder):
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ()
|
||||
|
||||
def unpack(
|
||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def unpack(self, pkg, spec, prefix):
|
||||
if os.path.splitext(pkg.stage.archive_file)[1] == ".rock":
|
||||
directory = pkg.luarocks("unpack", pkg.stage.archive_file, output=str)
|
||||
dirlines = directory.split("\n")
|
||||
@@ -70,16 +66,15 @@ def unpack(
|
||||
def _generate_tree_line(name, prefix):
|
||||
return """{{ name = "{name}", root = "{prefix}" }};""".format(name=name, prefix=prefix)
|
||||
|
||||
def generate_luarocks_config(
|
||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def generate_luarocks_config(self, pkg, spec, prefix):
|
||||
spec = self.pkg.spec
|
||||
table_entries = []
|
||||
for d in spec.traverse(deptype=("build", "run")):
|
||||
if d.package.extends(self.pkg.extendee_spec):
|
||||
table_entries.append(self._generate_tree_line(d.name, d.prefix))
|
||||
|
||||
with open(self._luarocks_config_path(), "w", encoding="utf-8") as config:
|
||||
path = self._luarocks_config_path()
|
||||
with open(path, "w", encoding="utf-8") as config:
|
||||
config.write(
|
||||
"""
|
||||
deps_mode="all"
|
||||
@@ -90,26 +85,23 @@ def generate_luarocks_config(
|
||||
"\n".join(table_entries)
|
||||
)
|
||||
)
|
||||
return path
|
||||
|
||||
def preprocess(
|
||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def preprocess(self, pkg, spec, prefix):
|
||||
"""Override this to preprocess source before building with luarocks"""
|
||||
pass
|
||||
|
||||
def luarocks_args(self):
|
||||
return []
|
||||
|
||||
def install(
|
||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def install(self, pkg, spec, prefix):
|
||||
rock = "."
|
||||
specs = find(".", "*.rockspec", recursive=False)
|
||||
if specs:
|
||||
rock = specs[0]
|
||||
rocks_args = self.luarocks_args()
|
||||
rocks_args.append(rock)
|
||||
pkg.luarocks("--tree=" + prefix, "make", *rocks_args)
|
||||
self.pkg.luarocks("--tree=" + prefix, "make", *rocks_args)
|
||||
|
||||
def _luarocks_config_path(self):
|
||||
return os.path.join(self.pkg.stage.source_path, "spack_luarocks.lua")
|
||||
|
@@ -98,20 +98,29 @@ def build_directory(self) -> str:
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
def edit(
|
||||
self, pkg: MakefilePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Edit the Makefile before calling make. The default is a no-op."""
|
||||
pass
|
||||
|
||||
def build(
|
||||
self, pkg: MakefilePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*self.build_targets)
|
||||
|
||||
def install(
|
||||
self, pkg: MakefilePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
|
@@ -5,8 +5,6 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
from spack.util.executable import which
|
||||
@@ -60,20 +58,16 @@ def build_args(self):
|
||||
"""List of args to pass to build phase."""
|
||||
return []
|
||||
|
||||
def build(
|
||||
self, pkg: MavenPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Compile code and package into a JAR file."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
mvn = which("mvn", required=True)
|
||||
mvn = which("mvn")
|
||||
if self.pkg.run_tests:
|
||||
mvn("verify", *self.build_args())
|
||||
else:
|
||||
mvn("package", "-DskipTests", *self.build_args())
|
||||
|
||||
def install(
|
||||
self, pkg: MavenPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Copy to installation prefix."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
fs.install_tree(".", prefix)
|
||||
|
@@ -48,9 +48,6 @@ class MesonPackage(spack.package_base.PackageBase):
|
||||
variant("strip", default=False, description="Strip targets on install")
|
||||
depends_on("meson", type="build")
|
||||
depends_on("ninja", type="build")
|
||||
# Meson uses pkg-config for dependency detection, and this dependency is
|
||||
# often overlooked by packages that use meson as a build system.
|
||||
depends_on("pkgconfig", type="build")
|
||||
# Python detection in meson requires distutils to be importable, but distutils no longer
|
||||
# exists in Python 3.12. In Spack, we can't use setuptools as distutils replacement,
|
||||
# because the distutils-precedence.pth startup file that setuptools ships with is not run
|
||||
@@ -191,7 +188,10 @@ def meson_args(self) -> List[str]:
|
||||
return []
|
||||
|
||||
def meson(
|
||||
self, pkg: MesonPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run ``meson`` in the build directory"""
|
||||
options = []
|
||||
@@ -204,7 +204,10 @@ def meson(
|
||||
pkg.module.meson(*options)
|
||||
|
||||
def build(
|
||||
self, pkg: MesonPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Make the build targets"""
|
||||
options = ["-v"]
|
||||
@@ -213,7 +216,10 @@ def build(
|
||||
pkg.module.ninja(*options)
|
||||
|
||||
def install(
|
||||
self, pkg: MesonPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Make the install targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
|
@@ -7,8 +7,6 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts
|
||||
|
||||
from ._checks import BuilderWithDefaults
|
||||
@@ -101,9 +99,7 @@ def msbuild_install_args(self):
|
||||
as `msbuild_args` by default."""
|
||||
return self.msbuild_args()
|
||||
|
||||
def build(
|
||||
self, pkg: MSBuildPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Run "msbuild" on the build targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.msbuild(
|
||||
@@ -112,9 +108,7 @@ def build(
|
||||
self.define_targets(*self.build_targets),
|
||||
)
|
||||
|
||||
def install(
|
||||
self, pkg: MSBuildPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Run "msbuild" on the install targets specified by the builder.
|
||||
This is INSTALL by default"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
|
@@ -7,8 +7,6 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts
|
||||
|
||||
from ._checks import BuilderWithDefaults
|
||||
@@ -125,9 +123,7 @@ def nmake_install_args(self):
|
||||
Individual packages should override to specify NMake args to command line"""
|
||||
return []
|
||||
|
||||
def build(
|
||||
self, pkg: NMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Run "nmake" on the build targets specified by the builder."""
|
||||
opts = self.std_nmake_args
|
||||
opts += self.nmake_args()
|
||||
@@ -136,9 +132,7 @@ def build(
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.nmake(*opts, *self.build_targets, ignore_quotes=self.ignore_quotes)
|
||||
|
||||
def install(
|
||||
self, pkg: NMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Run "nmake" on the install targets specified by the builder.
|
||||
This is INSTALL by default"""
|
||||
opts = self.std_nmake_args
|
||||
|
@@ -3,8 +3,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, extends
|
||||
from spack.multimethod import when
|
||||
|
||||
@@ -44,9 +42,7 @@ class OctaveBuilder(BuilderWithDefaults):
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ()
|
||||
|
||||
def install(
|
||||
self, pkg: OctavePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Install the package from the archive file"""
|
||||
pkg.module.octave(
|
||||
"--quiet",
|
||||
|
@@ -142,7 +142,7 @@ def setup_run_environment(self, env):
|
||||
$ source {prefix}/{component}/{version}/env/vars.sh
|
||||
"""
|
||||
# Only if environment modifications are desired (default is +envmods)
|
||||
if "+envmods" in self.spec:
|
||||
if "~envmods" not in self.spec:
|
||||
env.extend(
|
||||
EnvironmentModifications.from_sourcing_file(
|
||||
self.component_prefix.env.join("vars.sh"), *self.env_script_args
|
||||
|
@@ -10,8 +10,6 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.install_test import SkipTest, test_part
|
||||
from spack.multimethod import when
|
||||
@@ -151,9 +149,7 @@ def configure_args(self):
|
||||
"""
|
||||
return []
|
||||
|
||||
def configure(
|
||||
self, pkg: PerlPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def configure(self, pkg, spec, prefix):
|
||||
"""Run Makefile.PL or Build.PL with arguments consisting of
|
||||
an appropriate installation base directory followed by the
|
||||
list returned by :py:meth:`~.PerlBuilder.configure_args`.
|
||||
@@ -177,9 +173,7 @@ def fix_shebang(self):
|
||||
repl = "#!/usr/bin/env perl"
|
||||
filter_file(pattern, repl, "Build", backup=False)
|
||||
|
||||
def build(
|
||||
self, pkg: PerlPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Builds a Perl package."""
|
||||
self.build_executable()
|
||||
|
||||
@@ -190,8 +184,6 @@ def check(self):
|
||||
"""Runs built-in tests of a Perl package."""
|
||||
self.build_executable("test")
|
||||
|
||||
def install(
|
||||
self, pkg: PerlPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Installs a Perl package."""
|
||||
self.build_executable("install")
|
||||
|
@@ -28,7 +28,6 @@
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.error import NoHeadersError, NoLibrariesError
|
||||
from spack.install_test import test_part
|
||||
@@ -264,17 +263,16 @@ def update_external_dependencies(self, extendee_spec=None):
|
||||
# Ensure architecture information is present
|
||||
if not python.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.default_operating_system()
|
||||
host_target = host_platform.default_target()
|
||||
host_os = host_platform.operating_system("default_os")
|
||||
host_target = host_platform.target("default_target")
|
||||
python.architecture = spack.spec.ArchSpec(
|
||||
(str(host_platform), str(host_os), str(host_target))
|
||||
)
|
||||
else:
|
||||
if not python.architecture.platform:
|
||||
python.architecture.platform = spack.platforms.host()
|
||||
platform = spack.platforms.by_name(python.architecture.platform)
|
||||
if not python.architecture.os:
|
||||
python.architecture.os = platform.default_operating_system()
|
||||
python.architecture.os = "default_os"
|
||||
if not python.architecture.target:
|
||||
python.architecture.target = archspec.cpu.host().family.name
|
||||
|
||||
|
@@ -6,8 +6,6 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
@@ -29,7 +27,6 @@ class QMakePackage(spack.package_base.PackageBase):
|
||||
build_system("qmake")
|
||||
|
||||
depends_on("qmake", type="build", when="build_system=qmake")
|
||||
depends_on("gmake", type="build")
|
||||
|
||||
|
||||
@spack.builder.builder("qmake")
|
||||
@@ -64,23 +61,17 @@ def qmake_args(self):
|
||||
"""List of arguments passed to qmake."""
|
||||
return []
|
||||
|
||||
def qmake(
|
||||
self, pkg: QMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def qmake(self, pkg, spec, prefix):
|
||||
"""Run ``qmake`` to configure the project and generate a Makefile."""
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.qmake(*self.qmake_args())
|
||||
|
||||
def build(
|
||||
self, pkg: QMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Make the build targets"""
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.make()
|
||||
|
||||
def install(
|
||||
self, pkg: QMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Make the install targets"""
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.make("install")
|
||||
|
@@ -94,7 +94,7 @@ def list_url(cls):
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
|
||||
|
||||
@lang.classproperty
|
||||
def git(cls):
|
||||
if cls.bioc:
|
||||
return f"https://git.bioconductor.org/packages/{cls.bioc}"
|
||||
@property
|
||||
def git(self):
|
||||
if self.bioc:
|
||||
return f"https://git.bioconductor.org/packages/{self.bioc}"
|
||||
|
@@ -9,8 +9,6 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.builder
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
|
||||
from spack.config import determine_number_of_jobs
|
||||
from spack.directives import build_system, extends, maintainers
|
||||
@@ -76,22 +74,18 @@ def build_directory(self):
|
||||
ret = os.path.join(ret, self.subdirectory)
|
||||
return ret
|
||||
|
||||
def install(
|
||||
self, pkg: RacketPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Install everything from build directory."""
|
||||
raco = Executable("raco")
|
||||
with fs.working_dir(self.build_directory):
|
||||
parallel = pkg.parallel and (not env_flag(SPACK_NO_PARALLEL_MAKE))
|
||||
name = pkg.racket_name
|
||||
assert name is not None, "Racket package name is not set"
|
||||
parallel = self.pkg.parallel and (not env_flag(SPACK_NO_PARALLEL_MAKE))
|
||||
args = [
|
||||
"pkg",
|
||||
"install",
|
||||
"-t",
|
||||
"dir",
|
||||
"-n",
|
||||
name,
|
||||
self.pkg.racket_name,
|
||||
"--deps",
|
||||
"fail",
|
||||
"--ignore-implies",
|
||||
@@ -107,7 +101,8 @@ def install(
|
||||
except ProcessError:
|
||||
args.insert(-2, "--skip-installed")
|
||||
raco(*args)
|
||||
tty.warn(
|
||||
f"Racket package {name} was already installed, uninstalling via "
|
||||
msg = (
|
||||
"Racket package {0} was already installed, uninstalling via "
|
||||
"Spack may make someone unhappy!"
|
||||
)
|
||||
tty.warn(msg.format(self.pkg.racket_name))
|
||||
|
@@ -140,7 +140,7 @@ class ROCmPackage(PackageBase):
|
||||
when="+rocm",
|
||||
)
|
||||
|
||||
depends_on("llvm-amdgpu", type="build", when="+rocm")
|
||||
depends_on("llvm-amdgpu", when="+rocm")
|
||||
depends_on("hsa-rocr-dev", when="+rocm")
|
||||
depends_on("hip +rocm", when="+rocm")
|
||||
|
||||
|
@@ -5,8 +5,6 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, extends, maintainers
|
||||
|
||||
from ._checks import BuilderWithDefaults
|
||||
@@ -44,9 +42,7 @@ class RubyBuilder(BuilderWithDefaults):
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ()
|
||||
|
||||
def build(
|
||||
self, pkg: RubyPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Build a Ruby gem."""
|
||||
|
||||
# ruby-rake provides both rake.gemspec and Rakefile, but only
|
||||
@@ -62,9 +58,7 @@ def build(
|
||||
# Some Ruby packages only ship `*.gem` files, so nothing to build
|
||||
pass
|
||||
|
||||
def install(
|
||||
self, pkg: RubyPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Install a Ruby gem.
|
||||
|
||||
The ruby package sets ``GEM_HOME`` to tell gem where to install to."""
|
||||
|
@@ -4,8 +4,6 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
@@ -61,9 +59,7 @@ def build_args(self, spec, prefix):
|
||||
"""Arguments to pass to build."""
|
||||
return []
|
||||
|
||||
def build(
|
||||
self, pkg: SConsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Build the package."""
|
||||
pkg.module.scons(*self.build_args(spec, prefix))
|
||||
|
||||
@@ -71,9 +67,7 @@ def install_args(self, spec, prefix):
|
||||
"""Arguments to pass to install."""
|
||||
return []
|
||||
|
||||
def install(
|
||||
self, pkg: SConsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Install the package."""
|
||||
pkg.module.scons("install", *self.install_args(spec, prefix))
|
||||
|
||||
|
@@ -11,8 +11,6 @@
|
||||
import spack.install_test
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.multimethod import when
|
||||
from spack.util.executable import Executable
|
||||
@@ -43,7 +41,6 @@ class SIPPackage(spack.package_base.PackageBase):
|
||||
with when("build_system=sip"):
|
||||
extends("python", type=("build", "link", "run"))
|
||||
depends_on("py-sip", type="build")
|
||||
depends_on("gmake", type="build")
|
||||
|
||||
@property
|
||||
def import_modules(self):
|
||||
@@ -133,9 +130,7 @@ class SIPBuilder(BuilderWithDefaults):
|
||||
|
||||
build_directory = "build"
|
||||
|
||||
def configure(
|
||||
self, pkg: SIPPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def configure(self, pkg, spec, prefix):
|
||||
"""Configure the package."""
|
||||
|
||||
# https://www.riverbankcomputing.com/static/Docs/sip/command_line_tools.html
|
||||
@@ -153,9 +148,7 @@ def configure_args(self):
|
||||
"""Arguments to pass to configure."""
|
||||
return []
|
||||
|
||||
def build(
|
||||
self, pkg: SIPPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Build the package."""
|
||||
args = self.build_args()
|
||||
|
||||
@@ -166,9 +159,7 @@ def build_args(self):
|
||||
"""Arguments to pass to build."""
|
||||
return []
|
||||
|
||||
def install(
|
||||
self, pkg: SIPPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Install the package."""
|
||||
args = self.install_args()
|
||||
|
||||
|
@@ -6,8 +6,6 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests, execute_install_time_tests
|
||||
@@ -99,9 +97,7 @@ def waf(self, *args, **kwargs):
|
||||
with working_dir(self.build_directory):
|
||||
self.python("waf", "-j{0}".format(jobs), *args, **kwargs)
|
||||
|
||||
def configure(
|
||||
self, pkg: WafPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def configure(self, pkg, spec, prefix):
|
||||
"""Configures the project."""
|
||||
args = ["--prefix={0}".format(self.pkg.prefix)]
|
||||
args += self.configure_args()
|
||||
@@ -112,9 +108,7 @@ def configure_args(self):
|
||||
"""Arguments to pass to configure."""
|
||||
return []
|
||||
|
||||
def build(
|
||||
self, pkg: WafPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Executes the build."""
|
||||
args = self.build_args()
|
||||
|
||||
@@ -124,9 +118,7 @@ def build_args(self):
|
||||
"""Arguments to pass to build."""
|
||||
return []
|
||||
|
||||
def install(
|
||||
self, pkg: WafPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Installs the targets on the system."""
|
||||
args = self.install_args()
|
||||
|
||||
|
@@ -6,7 +6,6 @@
|
||||
import codecs
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
@@ -14,16 +13,17 @@
|
||||
import tempfile
|
||||
import zipfile
|
||||
from collections import namedtuple
|
||||
from typing import Callable, Dict, List, Set, Union
|
||||
from urllib.request import Request
|
||||
from typing import Callable, Dict, List, Set
|
||||
from urllib.error import HTTPError, URLError
|
||||
from urllib.request import HTTPHandler, Request, build_opener
|
||||
|
||||
import llnl.path
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.builder
|
||||
import spack.concretize
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
@@ -33,7 +33,6 @@
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.git
|
||||
import spack.util.gpg as gpg_util
|
||||
import spack.util.spack_yaml as syaml
|
||||
@@ -42,7 +41,6 @@
|
||||
from spack import traverse
|
||||
from spack.error import SpackError
|
||||
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||
from spack.version import GitVersion, StandardVersion
|
||||
|
||||
from .common import (
|
||||
IS_WINDOWS,
|
||||
@@ -65,8 +63,6 @@
|
||||
|
||||
PushResult = namedtuple("PushResult", "success url")
|
||||
|
||||
urlopen = web_util.urlopen # alias for mocking in tests
|
||||
|
||||
|
||||
def get_change_revisions():
|
||||
"""If this is a git repo get the revisions to use when checking
|
||||
@@ -81,53 +77,11 @@ def get_change_revisions():
|
||||
return None, None
|
||||
|
||||
|
||||
def get_added_versions(
|
||||
checksums_version_dict: Dict[str, Union[StandardVersion, GitVersion]],
|
||||
path: str,
|
||||
from_ref: str = "HEAD~1",
|
||||
to_ref: str = "HEAD",
|
||||
) -> List[Union[StandardVersion, GitVersion]]:
|
||||
"""Get a list of the versions added between `from_ref` and `to_ref`.
|
||||
Args:
|
||||
checksums_version_dict (Dict): all package versions keyed by known checksums.
|
||||
path (str): path to the package.py
|
||||
from_ref (str): oldest git ref, defaults to `HEAD~1`
|
||||
to_ref (str): newer git ref, defaults to `HEAD`
|
||||
Returns: list of versions added between refs
|
||||
"""
|
||||
git_exe = spack.util.git.git(required=True)
|
||||
|
||||
# Gather git diff
|
||||
diff_lines = git_exe("diff", from_ref, to_ref, "--", path, output=str).split("\n")
|
||||
|
||||
# Store added and removed versions
|
||||
# Removed versions are tracked here to determine when versions are moved in a file
|
||||
# and show up as both added and removed in a git diff.
|
||||
added_checksums = set()
|
||||
removed_checksums = set()
|
||||
|
||||
# Scrape diff for modified versions and prune added versions if they show up
|
||||
# as also removed (which means they've actually just moved in the file and
|
||||
# we shouldn't need to rechecksum them)
|
||||
for checksum in checksums_version_dict.keys():
|
||||
for line in diff_lines:
|
||||
if checksum in line:
|
||||
if line.startswith("+"):
|
||||
added_checksums.add(checksum)
|
||||
if line.startswith("-"):
|
||||
removed_checksums.add(checksum)
|
||||
|
||||
return [checksums_version_dict[c] for c in added_checksums - removed_checksums]
|
||||
|
||||
|
||||
def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
|
||||
"""Given an environment manifest path and two revisions to compare, return
|
||||
whether or not the stack was changed. Returns True if the environment
|
||||
manifest changed between the provided revisions (or additionally if the
|
||||
`.gitlab-ci.yml` file itself changed). Returns False otherwise."""
|
||||
# git returns posix paths always, normalize input to be comptaible
|
||||
# with that
|
||||
env_path = llnl.path.convert_to_posix_path(env_path)
|
||||
git = spack.util.git.git()
|
||||
if git:
|
||||
with fs.working_dir(spack.paths.prefix):
|
||||
@@ -265,7 +219,7 @@ def rebuild_filter(s: spack.spec.Spec) -> RebuildDecision:
|
||||
|
||||
def _format_pruning_message(spec: spack.spec.Spec, prune: bool, reasons: List[str]) -> str:
|
||||
reason_msg = ", ".join(reasons)
|
||||
spec_fmt = "{name}{@version}{/hash:7}{%compiler}"
|
||||
spec_fmt = "{name}{@version}{%compiler}{/hash:7}"
|
||||
|
||||
if not prune:
|
||||
status = colorize("@*g{[x]} ")
|
||||
@@ -518,9 +472,12 @@ def generate_pipeline(env: ev.Environment, args) -> None:
|
||||
# Use all unpruned specs to populate the build group for this set
|
||||
cdash_config = cfg.get("cdash")
|
||||
if options.cdash_handler and options.cdash_handler.auth_token:
|
||||
options.cdash_handler.populate_buildgroup(
|
||||
[options.cdash_handler.build_name(s) for s in pipeline_specs]
|
||||
)
|
||||
try:
|
||||
options.cdash_handler.populate_buildgroup(
|
||||
[options.cdash_handler.build_name(s) for s in pipeline_specs]
|
||||
)
|
||||
except (SpackError, HTTPError, URLError, TimeoutError) as err:
|
||||
tty.warn(f"Problem populating buildgroup: {err}")
|
||||
elif cdash_config:
|
||||
# warn only if there was actually a CDash configuration.
|
||||
tty.warn("Unable to populate buildgroup without CDash credentials")
|
||||
@@ -622,25 +579,22 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
|
||||
tty.debug(f"job spec: {job_spec}")
|
||||
|
||||
try:
|
||||
package_metadata_root = pathlib.Path(spack.store.STORE.layout.metadata_path(job_spec))
|
||||
except spack.error.SpackError as e:
|
||||
tty.error(f"Cannot copy logs: {str(e)}")
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name)
|
||||
job_pkg = pkg_cls(job_spec)
|
||||
tty.debug(f"job package: {job_pkg}")
|
||||
except AssertionError:
|
||||
msg = f"Cannot copy stage logs: job spec ({job_spec}) must be concrete"
|
||||
tty.error(msg)
|
||||
return
|
||||
|
||||
# Get the package's archived files
|
||||
archive_files = []
|
||||
archive_root = package_metadata_root / "archived-files"
|
||||
if archive_root.is_dir():
|
||||
archive_files = [f for f in archive_root.rglob("*") if f.is_file()]
|
||||
else:
|
||||
msg = "Cannot copy package archived files: archived-files must be a directory"
|
||||
tty.warn(msg)
|
||||
|
||||
build_log_zipped = package_metadata_root / "spack-build-out.txt.gz"
|
||||
build_env_mods = package_metadata_root / "spack-build-env.txt"
|
||||
|
||||
for f in [build_log_zipped, build_env_mods, *archive_files]:
|
||||
copy_files_to_artifacts(str(f), job_log_dir)
|
||||
stage_dir = job_pkg.stage.path
|
||||
tty.debug(f"stage dir: {stage_dir}")
|
||||
for file in [
|
||||
job_pkg.log_path,
|
||||
job_pkg.env_mods_path,
|
||||
*spack.builder.create(job_pkg).archive_files,
|
||||
]:
|
||||
copy_files_to_artifacts(file, job_log_dir)
|
||||
|
||||
|
||||
def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
||||
@@ -660,7 +614,7 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
||||
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
||||
|
||||
|
||||
def download_and_extract_artifacts(url, work_dir) -> str:
|
||||
def download_and_extract_artifacts(url, work_dir):
|
||||
"""Look for gitlab artifacts.zip at the given url, and attempt to download
|
||||
and extract the contents into the given work_dir
|
||||
|
||||
@@ -668,10 +622,6 @@ def download_and_extract_artifacts(url, work_dir) -> str:
|
||||
|
||||
url (str): Complete url to artifacts.zip file
|
||||
work_dir (str): Path to destination where artifacts should be extracted
|
||||
|
||||
Output:
|
||||
|
||||
Artifacts root path relative to the archive root
|
||||
"""
|
||||
tty.msg(f"Fetching artifacts from: {url}")
|
||||
|
||||
@@ -681,33 +631,31 @@ def download_and_extract_artifacts(url, work_dir) -> str:
|
||||
if token:
|
||||
headers["PRIVATE-TOKEN"] = token
|
||||
|
||||
request = Request(url, headers=headers, method="GET")
|
||||
opener = build_opener(HTTPHandler)
|
||||
|
||||
request = Request(url, headers=headers)
|
||||
request.get_method = lambda: "GET"
|
||||
|
||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code != 200:
|
||||
msg = f"Error response code ({response_code}) in reproduce_ci_job"
|
||||
raise SpackError(msg)
|
||||
|
||||
artifacts_zip_path = os.path.join(work_dir, "artifacts.zip")
|
||||
os.makedirs(work_dir, exist_ok=True)
|
||||
|
||||
try:
|
||||
response = urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
with open(artifacts_zip_path, "wb") as out_file:
|
||||
shutil.copyfileobj(response, out_file)
|
||||
if not os.path.exists(work_dir):
|
||||
os.makedirs(work_dir)
|
||||
|
||||
with zipfile.ZipFile(artifacts_zip_path) as zip_file:
|
||||
zip_file.extractall(work_dir)
|
||||
# Get the artifact root
|
||||
artifact_root = ""
|
||||
for f in zip_file.filelist:
|
||||
if "spack.lock" in f.filename:
|
||||
artifact_root = os.path.dirname(os.path.dirname(f.filename))
|
||||
break
|
||||
except OSError as e:
|
||||
raise SpackError(f"Error fetching artifacts: {e}")
|
||||
finally:
|
||||
try:
|
||||
os.remove(artifacts_zip_path)
|
||||
except FileNotFoundError:
|
||||
# If the file doesn't exist we are already raising
|
||||
pass
|
||||
with open(artifacts_zip_path, "wb") as out_file:
|
||||
shutil.copyfileobj(response, out_file)
|
||||
|
||||
return artifact_root
|
||||
zip_file = zipfile.ZipFile(artifacts_zip_path)
|
||||
zip_file.extractall(work_dir)
|
||||
zip_file.close()
|
||||
|
||||
os.remove(artifacts_zip_path)
|
||||
|
||||
|
||||
def get_spack_info():
|
||||
@@ -821,7 +769,7 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||
return True
|
||||
|
||||
|
||||
def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head):
|
||||
def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
||||
"""Given a url to gitlab artifacts.zip from a failed 'spack ci rebuild' job,
|
||||
attempt to setup an environment in which the failure can be reproduced
|
||||
locally. This entails the following:
|
||||
@@ -835,11 +783,8 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
|
||||
commands to run to reproduce the build once inside the container.
|
||||
"""
|
||||
work_dir = os.path.realpath(work_dir)
|
||||
if os.path.exists(work_dir) and os.listdir(work_dir):
|
||||
raise SpackError(f"Cannot run reproducer in non-emptry working dir:\n {work_dir}")
|
||||
|
||||
platform_script_ext = "ps1" if IS_WINDOWS else "sh"
|
||||
artifact_root = download_and_extract_artifacts(url, work_dir)
|
||||
download_and_extract_artifacts(url, work_dir)
|
||||
|
||||
gpg_path = None
|
||||
if gpg_url:
|
||||
@@ -901,9 +846,6 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
|
||||
with open(repro_file, encoding="utf-8") as fd:
|
||||
repro_details = json.load(fd)
|
||||
|
||||
spec_file = fs.find(work_dir, repro_details["job_spec_json"])[0]
|
||||
reproducer_spec = spack.spec.Spec.from_specfile(spec_file)
|
||||
|
||||
repro_dir = os.path.dirname(repro_file)
|
||||
rel_repro_dir = repro_dir.replace(work_dir, "").lstrip(os.path.sep)
|
||||
|
||||
@@ -964,20 +906,17 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
|
||||
commit_regex = re.compile(r"commit\s+([^\s]+)")
|
||||
merge_commit_regex = re.compile(r"Merge\s+([^\s]+)\s+into\s+([^\s]+)")
|
||||
|
||||
if use_local_head:
|
||||
commit_1 = "HEAD"
|
||||
# Try the more specific merge commit regex first
|
||||
m = merge_commit_regex.search(spack_info)
|
||||
if m:
|
||||
# This was a merge commit and we captured the parents
|
||||
commit_1 = m.group(1)
|
||||
commit_2 = m.group(2)
|
||||
else:
|
||||
# Try the more specific merge commit regex first
|
||||
m = merge_commit_regex.search(spack_info)
|
||||
# Not a merge commit, just get the commit sha
|
||||
m = commit_regex.search(spack_info)
|
||||
if m:
|
||||
# This was a merge commit and we captured the parents
|
||||
commit_1 = m.group(1)
|
||||
commit_2 = m.group(2)
|
||||
else:
|
||||
# Not a merge commit, just get the commit sha
|
||||
m = commit_regex.search(spack_info)
|
||||
if m:
|
||||
commit_1 = m.group(1)
|
||||
|
||||
setup_result = False
|
||||
if commit_1:
|
||||
@@ -1052,8 +991,6 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
|
||||
"entrypoint", entrypoint_script, work_dir, run=False, exit_on_failure=False
|
||||
)
|
||||
|
||||
# Attempt to create a unique name for the reproducer container
|
||||
container_suffix = "_" + reproducer_spec.dag_hash() if reproducer_spec else ""
|
||||
docker_command = [
|
||||
runtime,
|
||||
"run",
|
||||
@@ -1061,14 +998,14 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
|
||||
"-t",
|
||||
"--rm",
|
||||
"--name",
|
||||
f"spack_reproducer{container_suffix}",
|
||||
"spack_reproducer",
|
||||
"-v",
|
||||
":".join([work_dir, mounted_workdir, "Z"]),
|
||||
"-v",
|
||||
":".join(
|
||||
[
|
||||
os.path.join(work_dir, artifact_root),
|
||||
os.path.join(mount_as_dir, artifact_root),
|
||||
os.path.join(work_dir, "jobs_scratch_dir"),
|
||||
os.path.join(mount_as_dir, "jobs_scratch_dir"),
|
||||
"Z",
|
||||
]
|
||||
),
|
||||
|
@@ -1,21 +1,23 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import codecs
|
||||
import copy
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import ssl
|
||||
import sys
|
||||
import time
|
||||
from collections import deque
|
||||
from enum import Enum
|
||||
from typing import Dict, Generator, List, Optional, Set, Tuple
|
||||
from urllib.parse import quote, urlencode, urlparse
|
||||
from urllib.request import Request
|
||||
from urllib.request import HTTPHandler, HTTPSHandler, Request, build_opener
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import memoized
|
||||
from llnl.util.lang import Singleton, memoized
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.config as cfg
|
||||
@@ -33,11 +35,32 @@
|
||||
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||
|
||||
|
||||
def _urlopen():
|
||||
error_handler = web_util.SpackHTTPDefaultErrorHandler()
|
||||
|
||||
# One opener with HTTPS ssl enabled
|
||||
with_ssl = build_opener(
|
||||
HTTPHandler(), HTTPSHandler(context=web_util.ssl_create_default_context()), error_handler
|
||||
)
|
||||
|
||||
# One opener with HTTPS ssl disabled
|
||||
without_ssl = build_opener(
|
||||
HTTPHandler(), HTTPSHandler(context=ssl._create_unverified_context()), error_handler
|
||||
)
|
||||
|
||||
# And dynamically dispatch based on the config:verify_ssl.
|
||||
def dispatch_open(fullurl, data=None, timeout=None, verify_ssl=True):
|
||||
opener = with_ssl if verify_ssl else without_ssl
|
||||
timeout = timeout or cfg.get("config:connect_timeout", 1)
|
||||
return opener.open(fullurl, data, timeout)
|
||||
|
||||
return dispatch_open
|
||||
|
||||
|
||||
IS_WINDOWS = sys.platform == "win32"
|
||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||
|
||||
# this exists purely for testing purposes
|
||||
_urlopen = web_util.urlopen
|
||||
_dyn_mapping_urlopener = Singleton(_urlopen)
|
||||
|
||||
|
||||
def copy_files_to_artifacts(src, artifacts_dir):
|
||||
@@ -256,25 +279,26 @@ def copy_test_results(self, source, dest):
|
||||
reports = fs.join_path(source, "*_Test*.xml")
|
||||
copy_files_to_artifacts(reports, dest)
|
||||
|
||||
def create_buildgroup(self, headers, url, group_name, group_type):
|
||||
def create_buildgroup(self, opener, headers, url, group_name, group_type):
|
||||
data = {"newbuildgroup": group_name, "project": self.project, "type": group_type}
|
||||
|
||||
enc_data = json.dumps(data).encode("utf-8")
|
||||
|
||||
request = Request(url, data=enc_data, headers=headers)
|
||||
|
||||
try:
|
||||
response_text = _urlopen(request, timeout=SPACK_CDASH_TIMEOUT).read()
|
||||
except OSError as e:
|
||||
tty.warn(f"Failed to create CDash buildgroup: {e}")
|
||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code not in [200, 201]:
|
||||
msg = f"Creating buildgroup failed (response code = {response_code})"
|
||||
tty.warn(msg)
|
||||
return None
|
||||
|
||||
try:
|
||||
response_json = json.loads(response_text)
|
||||
return response_json["id"]
|
||||
except (json.JSONDecodeError, KeyError) as e:
|
||||
tty.warn(f"Failed to parse CDash response: {e}")
|
||||
return None
|
||||
response_text = response.read()
|
||||
response_json = json.loads(response_text)
|
||||
build_group_id = response_json["id"]
|
||||
|
||||
return build_group_id
|
||||
|
||||
def populate_buildgroup(self, job_names):
|
||||
url = f"{self.url}/api/v1/buildgroup.php"
|
||||
@@ -284,11 +308,16 @@ def populate_buildgroup(self, job_names):
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
parent_group_id = self.create_buildgroup(headers, url, self.build_group, "Daily")
|
||||
group_id = self.create_buildgroup(headers, url, f"Latest {self.build_group}", "Latest")
|
||||
opener = build_opener(HTTPHandler)
|
||||
|
||||
parent_group_id = self.create_buildgroup(opener, headers, url, self.build_group, "Daily")
|
||||
group_id = self.create_buildgroup(
|
||||
opener, headers, url, f"Latest {self.build_group}", "Latest"
|
||||
)
|
||||
|
||||
if not parent_group_id or not group_id:
|
||||
tty.warn(f"Failed to create or retrieve buildgroups for {self.build_group}")
|
||||
msg = f"Failed to create or retrieve buildgroups for {self.build_group}"
|
||||
tty.warn(msg)
|
||||
return
|
||||
|
||||
data = {
|
||||
@@ -300,12 +329,15 @@ def populate_buildgroup(self, job_names):
|
||||
|
||||
enc_data = json.dumps(data).encode("utf-8")
|
||||
|
||||
request = Request(url, data=enc_data, headers=headers, method="PUT")
|
||||
request = Request(url, data=enc_data, headers=headers)
|
||||
request.get_method = lambda: "PUT"
|
||||
|
||||
try:
|
||||
_urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
except OSError as e:
|
||||
tty.warn(f"Failed to populate CDash buildgroup: {e}")
|
||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code != 200:
|
||||
msg = f"Error response code ({response_code}) in populate_buildgroup"
|
||||
tty.warn(msg)
|
||||
|
||||
def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optional[str]):
|
||||
"""Explicitly report skipping testing of a spec (e.g., it's CI
|
||||
@@ -703,6 +735,9 @@ def _apply_section(dest, src):
|
||||
for value in header.values():
|
||||
value = os.path.expandvars(value)
|
||||
|
||||
verify_ssl = mapping.get("verify_ssl", spack.config.get("config:verify_ssl", True))
|
||||
timeout = mapping.get("timeout", spack.config.get("config:connect_timeout", 1))
|
||||
|
||||
required = mapping.get("require", [])
|
||||
allowed = mapping.get("allow", [])
|
||||
ignored = mapping.get("ignore", [])
|
||||
@@ -736,15 +771,19 @@ def job_query(job):
|
||||
endpoint_url._replace(query=query).geturl(), headers=header, method="GET"
|
||||
)
|
||||
try:
|
||||
response = _urlopen(request)
|
||||
config = json.load(response)
|
||||
response = _dyn_mapping_urlopener(
|
||||
request, verify_ssl=verify_ssl, timeout=timeout
|
||||
)
|
||||
except Exception as e:
|
||||
# For now just ignore any errors from dynamic mapping and continue
|
||||
# This is still experimental, and failures should not stop CI
|
||||
# from running normally
|
||||
tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}: {e}")
|
||||
tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}")
|
||||
tty.warn(f"{e}")
|
||||
continue
|
||||
|
||||
config = json.load(codecs.getreader("utf-8")(response))
|
||||
|
||||
# Strip ignore keys
|
||||
if ignored:
|
||||
for key in ignored:
|
||||
|
@@ -202,7 +202,7 @@ def _concretize_spec_pairs(
|
||||
# Special case for concretizing a single spec
|
||||
if len(to_concretize) == 1:
|
||||
abstract, concrete = to_concretize[0]
|
||||
return [concrete or spack.concretize.concretize_one(abstract, tests=tests)]
|
||||
return [concrete or abstract.concretized(tests=tests)]
|
||||
|
||||
# Special case if every spec is either concrete or has an abstract hash
|
||||
if all(
|
||||
@@ -254,9 +254,9 @@ def matching_spec_from_env(spec):
|
||||
"""
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
return env.matching_spec(spec) or spack.concretize.concretize_one(spec)
|
||||
return env.matching_spec(spec) or spec.concretized()
|
||||
else:
|
||||
return spack.concretize.concretize_one(spec)
|
||||
return spec.concretized()
|
||||
|
||||
|
||||
def matching_specs_from_env(specs):
|
||||
@@ -297,7 +297,7 @@ def disambiguate_spec(
|
||||
|
||||
def disambiguate_spec_from_hashes(
|
||||
spec: spack.spec.Spec,
|
||||
hashes: Optional[List[str]],
|
||||
hashes: List[str],
|
||||
local: bool = False,
|
||||
installed: Union[bool, InstallRecordStatus] = True,
|
||||
first: bool = False,
|
||||
@@ -330,7 +330,7 @@ def ensure_single_spec_or_die(spec, matching_specs):
|
||||
if len(matching_specs) <= 1:
|
||||
return
|
||||
|
||||
format_string = "{name}{@version}{ arch=architecture} {%compiler.name}{@compiler.version}"
|
||||
format_string = "{name}{@version}{%compiler.name}{@compiler.version}{ arch=architecture}"
|
||||
args = ["%s matches multiple packages." % spec, "Matching packages:"]
|
||||
args += [
|
||||
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
|
||||
@@ -471,11 +471,12 @@ def get_arg(name, default=None):
|
||||
nfmt = "{fullname}" if namespaces else "{name}"
|
||||
ffmt = ""
|
||||
if full_compiler or flags:
|
||||
ffmt += "{compiler_flags} {%compiler.name}"
|
||||
ffmt += "{%compiler.name}"
|
||||
if full_compiler:
|
||||
ffmt += "{@compiler.version}"
|
||||
ffmt += " {compiler_flags}"
|
||||
vfmt = "{variants}" if variants else ""
|
||||
format_string = nfmt + "{@version}" + vfmt + ffmt
|
||||
format_string = nfmt + "{@version}" + ffmt + vfmt
|
||||
|
||||
def fmt(s, depth=0):
|
||||
"""Formatter function for all output specs"""
|
||||
|
@@ -3,7 +3,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import collections
|
||||
import warnings
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -52,10 +51,10 @@ def setup_parser(subparser):
|
||||
"-t", "--target", action="store_true", default=False, help="print only the target"
|
||||
)
|
||||
parts2.add_argument(
|
||||
"-f", "--frontend", action="store_true", default=False, help="print frontend (DEPRECATED)"
|
||||
"-f", "--frontend", action="store_true", default=False, help="print frontend"
|
||||
)
|
||||
parts2.add_argument(
|
||||
"-b", "--backend", action="store_true", default=False, help="print backend (DEPRECATED)"
|
||||
"-b", "--backend", action="store_true", default=False, help="print backend"
|
||||
)
|
||||
|
||||
|
||||
@@ -99,14 +98,15 @@ def arch(parser, args):
|
||||
display_targets(archspec.cpu.TARGETS)
|
||||
return
|
||||
|
||||
os_args, target_args = "default_os", "default_target"
|
||||
if args.frontend:
|
||||
warnings.warn("the argument --frontend is deprecated, and will be removed in Spack v1.0")
|
||||
os_args, target_args = "frontend", "frontend"
|
||||
elif args.backend:
|
||||
warnings.warn("the argument --backend is deprecated, and will be removed in Spack v1.0")
|
||||
os_args, target_args = "backend", "backend"
|
||||
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.default_operating_system()
|
||||
host_target = host_platform.default_target()
|
||||
host_os = host_platform.operating_system(os_args)
|
||||
host_target = host_platform.target(target_args)
|
||||
if args.family:
|
||||
host_target = host_target.family
|
||||
elif args.generic:
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import os.path
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
@@ -14,9 +14,9 @@
|
||||
import spack.bootstrap
|
||||
import spack.bootstrap.config
|
||||
import spack.bootstrap.core
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.mirrors.utils
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml
|
||||
@@ -397,7 +397,7 @@ def _mirror(args):
|
||||
llnl.util.tty.msg(msg.format(spec_str, mirror_dir))
|
||||
# Suppress tty from the call below for terser messages
|
||||
llnl.util.tty.set_msg_enabled(False)
|
||||
spec = spack.concretize.concretize_one(spec_str)
|
||||
spec = spack.spec.Spec(spec_str).concretized()
|
||||
for node in spec.traverse():
|
||||
spack.mirrors.utils.create(mirror_dir, [node])
|
||||
llnl.util.tty.set_msg_enabled(True)
|
||||
@@ -436,7 +436,6 @@ def write_metadata(subdir, metadata):
|
||||
shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory)
|
||||
shutil.copy(spack.util.path.canonicalize_path(PATCHELF_JSON), abs_directory)
|
||||
instructions += cmd.format("local-binaries", rel_directory)
|
||||
instructions += " % spack buildcache update-index <final-path>/bootstrap_cache\n"
|
||||
print(instructions)
|
||||
|
||||
|
||||
|
@@ -16,7 +16,6 @@
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.cmd
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.environment as ev
|
||||
@@ -555,7 +554,8 @@ def check_fn(args: argparse.Namespace):
|
||||
tty.msg("No specs provided, exiting.")
|
||||
return
|
||||
|
||||
specs = [spack.concretize.concretize_one(s) for s in specs]
|
||||
for spec in specs:
|
||||
spec.concretize()
|
||||
|
||||
# Next see if there are any configured binary mirrors
|
||||
configured_mirrors = spack.config.get("mirrors", scope=args.scope)
|
||||
@@ -623,7 +623,7 @@ def save_specfile_fn(args):
|
||||
root = specs[0]
|
||||
|
||||
if not root.concrete:
|
||||
root = spack.concretize.concretize_one(root)
|
||||
root.concretize()
|
||||
|
||||
save_dependency_specfiles(
|
||||
root, args.specfile_dir, dependencies=spack.cmd.parse_specs(args.specs)
|
||||
|
@@ -4,7 +4,7 @@
|
||||
|
||||
import re
|
||||
import sys
|
||||
from typing import Dict, Optional, Tuple
|
||||
from typing import Dict, Optional
|
||||
|
||||
import llnl.string
|
||||
import llnl.util.lang
|
||||
@@ -181,11 +181,7 @@ def checksum(parser, args):
|
||||
print()
|
||||
|
||||
if args.add_to_package:
|
||||
path = spack.repo.PATH.filename_for_package_name(pkg.name)
|
||||
num_versions_added = add_versions_to_pkg(path, version_lines)
|
||||
tty.msg(f"Added {num_versions_added} new versions to {pkg.name} in {path}")
|
||||
if not args.batch and sys.stdin.isatty():
|
||||
editor(path)
|
||||
add_versions_to_package(pkg, version_lines, args.batch)
|
||||
|
||||
|
||||
def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
||||
@@ -231,9 +227,20 @@ def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
||||
tty.die("Invalid checksums found.")
|
||||
|
||||
|
||||
def _update_version_statements(package_src: str, version_lines: str) -> Tuple[int, str]:
|
||||
"""Returns a tuple of number of versions added and the package's modified contents."""
|
||||
def add_versions_to_package(pkg: PackageBase, version_lines: str, is_batch: bool):
|
||||
"""
|
||||
Add checksumed versions to a package's instructions and open a user's
|
||||
editor so they may double check the work of the function.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): A package class for a given package in Spack.
|
||||
version_lines (str): A string of rendered version lines.
|
||||
|
||||
"""
|
||||
# Get filename and path for package
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
||||
num_versions_added = 0
|
||||
|
||||
version_statement_re = re.compile(r"([\t ]+version\([^\)]*\))")
|
||||
version_re = re.compile(r'[\t ]+version\(\s*"([^"]+)"[^\)]*\)')
|
||||
|
||||
@@ -245,34 +252,33 @@ def _update_version_statements(package_src: str, version_lines: str) -> Tuple[in
|
||||
if match:
|
||||
new_versions.append((Version(match.group(1)), ver_line))
|
||||
|
||||
split_contents = version_statement_re.split(package_src)
|
||||
with open(filename, "r+", encoding="utf-8") as f:
|
||||
contents = f.read()
|
||||
split_contents = version_statement_re.split(contents)
|
||||
|
||||
for i, subsection in enumerate(split_contents):
|
||||
# If there are no more versions to add we should exit
|
||||
if len(new_versions) <= 0:
|
||||
break
|
||||
for i, subsection in enumerate(split_contents):
|
||||
# If there are no more versions to add we should exit
|
||||
if len(new_versions) <= 0:
|
||||
break
|
||||
|
||||
# Check if the section contains a version
|
||||
contents_version = version_re.match(subsection)
|
||||
if contents_version is not None:
|
||||
parsed_version = Version(contents_version.group(1))
|
||||
# Check if the section contains a version
|
||||
contents_version = version_re.match(subsection)
|
||||
if contents_version is not None:
|
||||
parsed_version = Version(contents_version.group(1))
|
||||
|
||||
if parsed_version < new_versions[0][0]:
|
||||
split_contents[i:i] = [new_versions.pop(0)[1], " # FIXME", "\n"]
|
||||
num_versions_added += 1
|
||||
if parsed_version < new_versions[0][0]:
|
||||
split_contents[i:i] = [new_versions.pop(0)[1], " # FIXME", "\n"]
|
||||
num_versions_added += 1
|
||||
|
||||
elif parsed_version == new_versions[0][0]:
|
||||
new_versions.pop(0)
|
||||
elif parsed_version == new_versions[0][0]:
|
||||
new_versions.pop(0)
|
||||
|
||||
return num_versions_added, "".join(split_contents)
|
||||
# Seek back to the start of the file so we can rewrite the file contents.
|
||||
f.seek(0)
|
||||
f.writelines("".join(split_contents))
|
||||
|
||||
tty.msg(f"Added {num_versions_added} new versions to {pkg.name}")
|
||||
tty.msg(f"Open {filename} to review the additions.")
|
||||
|
||||
def add_versions_to_pkg(path: str, version_lines: str) -> int:
|
||||
"""Add new versions to a package.py file. Returns the number of versions added."""
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
package_src = f.read()
|
||||
num_versions_added, package_src = _update_version_statements(package_src, version_lines)
|
||||
if num_versions_added > 0:
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
f.write(package_src)
|
||||
return num_versions_added
|
||||
if sys.stdout.isatty() and not is_batch:
|
||||
editor(filename)
|
||||
|
@@ -4,15 +4,12 @@
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
from typing import Dict
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as clr
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.ci as spack_ci
|
||||
@@ -21,22 +18,12 @@
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.fetch_strategy
|
||||
import spack.hash_types as ht
|
||||
import spack.mirrors.mirror
|
||||
import spack.package_base
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.executable
|
||||
import spack.util.git
|
||||
import spack.util.gpg as gpg_util
|
||||
import spack.util.timer as timer
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
import spack.version
|
||||
|
||||
description = "manage continuous integration pipelines"
|
||||
section = "build"
|
||||
@@ -45,7 +32,6 @@
|
||||
SPACK_COMMAND = "spack"
|
||||
INSTALL_FAIL_CODE = 1
|
||||
FAILED_CREATE_BUILDCACHE_CODE = 100
|
||||
BUILTIN = re.compile(r"var\/spack\/repos\/builtin\/packages\/([^\/]+)\/package\.py")
|
||||
|
||||
|
||||
def deindent(desc):
|
||||
@@ -190,11 +176,6 @@ def setup_parser(subparser):
|
||||
reproduce.add_argument(
|
||||
"-s", "--autostart", help="Run docker reproducer automatically", action="store_true"
|
||||
)
|
||||
reproduce.add_argument(
|
||||
"--use-local-head",
|
||||
help="Use the HEAD of the local Spack instead of reproducing a commit",
|
||||
action="store_true",
|
||||
)
|
||||
gpg_group = reproduce.add_mutually_exclusive_group(required=False)
|
||||
gpg_group.add_argument(
|
||||
"--gpg-file", help="Path to public GPG key for validating binary cache installs"
|
||||
@@ -205,16 +186,6 @@ def setup_parser(subparser):
|
||||
|
||||
reproduce.set_defaults(func=ci_reproduce)
|
||||
|
||||
# Verify checksums inside of ci workflows
|
||||
verify_versions = subparsers.add_parser(
|
||||
"verify-versions",
|
||||
description=deindent(ci_verify_versions.__doc__),
|
||||
help=spack.cmd.first_line(ci_verify_versions.__doc__),
|
||||
)
|
||||
verify_versions.add_argument("from_ref", help="git ref from which start looking at changes")
|
||||
verify_versions.add_argument("to_ref", help="git ref to end looking at changes")
|
||||
verify_versions.set_defaults(func=ci_verify_versions)
|
||||
|
||||
|
||||
def ci_generate(args):
|
||||
"""generate jobs file from a CI-aware spack file
|
||||
@@ -451,7 +422,7 @@ def ci_rebuild(args):
|
||||
|
||||
# Arguments when installing the root from sources
|
||||
deps_install_args = install_args + ["--only=dependencies"]
|
||||
root_install_args = install_args + ["--only=package"]
|
||||
root_install_args = install_args + ["--keep-stage", "--only=package"]
|
||||
|
||||
if cdash_handler:
|
||||
# Add additional arguments to `spack install` for CDash reporting.
|
||||
@@ -488,7 +459,8 @@ def ci_rebuild(args):
|
||||
job_spec.to_dict(hash=ht.dag_hash),
|
||||
)
|
||||
|
||||
# Copy logs and archived files from the install metadata (.spack) directory to artifacts now
|
||||
# We generated the "spack install ..." command to "--keep-stage", copy
|
||||
# any logs from the staging directory to artifacts now
|
||||
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
|
||||
|
||||
# If the installation succeeded and we're running stand-alone tests for
|
||||
@@ -636,12 +608,7 @@ def ci_reproduce(args):
|
||||
gpg_key_url = None
|
||||
|
||||
return spack_ci.reproduce_ci_job(
|
||||
args.job_url,
|
||||
args.working_dir,
|
||||
args.autostart,
|
||||
gpg_key_url,
|
||||
args.runtime,
|
||||
args.use_local_head,
|
||||
args.job_url, args.working_dir, args.autostart, gpg_key_url, args.runtime
|
||||
)
|
||||
|
||||
|
||||
@@ -683,159 +650,6 @@ def _gitlab_artifacts_url(url: str) -> str:
|
||||
return urlunparse(parsed._replace(path="/".join(parts), fragment="", query=""))
|
||||
|
||||
|
||||
def validate_standard_versions(
|
||||
pkg: spack.package_base.PackageBase, versions: spack.version.VersionList
|
||||
) -> bool:
|
||||
"""Get and test the checksum of a package version based on a tarball.
|
||||
Args:
|
||||
pkg spack.package_base.PackageBase: Spack package for which to validate a version checksum
|
||||
versions spack.version.VersionList: list of package versions to validate
|
||||
Returns: bool: result of the validation. True is valid and false is failed.
|
||||
"""
|
||||
url_dict: Dict[spack.version.StandardVersion, str] = {}
|
||||
|
||||
for version in versions:
|
||||
url = pkg.find_valid_url_for_version(version)
|
||||
url_dict[version] = url
|
||||
|
||||
version_hashes = spack.stage.get_checksums_for_versions(
|
||||
url_dict, pkg.name, fetch_options=pkg.fetch_options
|
||||
)
|
||||
|
||||
valid_checksums = True
|
||||
for version, sha in version_hashes.items():
|
||||
if sha != pkg.versions[version]["sha256"]:
|
||||
tty.error(
|
||||
f"Invalid checksum found {pkg.name}@{version}\n"
|
||||
f" [package.py] {pkg.versions[version]['sha256']}\n"
|
||||
f" [Downloaded] {sha}"
|
||||
)
|
||||
valid_checksums = False
|
||||
continue
|
||||
|
||||
tty.info(f"Validated {pkg.name}@{version} --> {sha}")
|
||||
|
||||
return valid_checksums
|
||||
|
||||
|
||||
def validate_git_versions(
|
||||
pkg: spack.package_base.PackageBase, versions: spack.version.VersionList
|
||||
) -> bool:
|
||||
"""Get and test the commit and tag of a package version based on a git repository.
|
||||
Args:
|
||||
pkg spack.package_base.PackageBase: Spack package for which to validate a version
|
||||
versions spack.version.VersionList: list of package versions to validate
|
||||
Returns: bool: result of the validation. True is valid and false is failed.
|
||||
"""
|
||||
valid_commit = True
|
||||
for version in versions:
|
||||
fetcher = spack.fetch_strategy.for_package_version(pkg, version)
|
||||
with spack.stage.Stage(fetcher) as stage:
|
||||
known_commit = pkg.versions[version]["commit"]
|
||||
try:
|
||||
stage.fetch()
|
||||
except spack.error.FetchError:
|
||||
tty.error(
|
||||
f"Invalid commit for {pkg.name}@{version}\n"
|
||||
f" {known_commit} could not be checked out in the git repository."
|
||||
)
|
||||
valid_commit = False
|
||||
continue
|
||||
|
||||
# Test if the specified tag matches the commit in the package.py
|
||||
# We retrieve the commit associated with a tag and compare it to the
|
||||
# commit that is located in the package.py file.
|
||||
if "tag" in pkg.versions[version]:
|
||||
tag = pkg.versions[version]["tag"]
|
||||
try:
|
||||
with fs.working_dir(stage.source_path):
|
||||
found_commit = fetcher.git(
|
||||
"rev-list", "-n", "1", tag, output=str, error=str
|
||||
).strip()
|
||||
except spack.util.executable.ProcessError:
|
||||
tty.error(
|
||||
f"Invalid tag for {pkg.name}@{version}\n"
|
||||
f" {tag} could not be found in the git repository."
|
||||
)
|
||||
valid_commit = False
|
||||
continue
|
||||
|
||||
if found_commit != known_commit:
|
||||
tty.error(
|
||||
f"Mismatched tag <-> commit found for {pkg.name}@{version}\n"
|
||||
f" [package.py] {known_commit}\n"
|
||||
f" [Downloaded] {found_commit}"
|
||||
)
|
||||
valid_commit = False
|
||||
continue
|
||||
|
||||
# If we have downloaded the repository, found the commit, and compared
|
||||
# the tag (if specified) we can conclude that the version is pointing
|
||||
# at what we would expect.
|
||||
tty.info(f"Validated {pkg.name}@{version} --> {known_commit}")
|
||||
|
||||
return valid_commit
|
||||
|
||||
|
||||
def ci_verify_versions(args):
|
||||
"""validate version checksum & commits between git refs
|
||||
This command takes a from_ref and to_ref arguments and
|
||||
then parses the git diff between the two to determine which packages
|
||||
have been modified verifies the new checksums inside of them.
|
||||
"""
|
||||
with fs.working_dir(spack.paths.prefix):
|
||||
# We use HEAD^1 explicitly on the merge commit created by
|
||||
# GitHub Actions. However HEAD~1 is a safer default for the helper function.
|
||||
files = spack.util.git.get_modified_files(from_ref=args.from_ref, to_ref=args.to_ref)
|
||||
|
||||
# Get a list of package names from the modified files.
|
||||
pkgs = [(m.group(1), p) for p in files for m in [BUILTIN.search(p)] if m]
|
||||
|
||||
failed_version = False
|
||||
for pkg_name, path in pkgs:
|
||||
spec = spack.spec.Spec(pkg_name)
|
||||
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
|
||||
|
||||
# Skip checking manual download packages and trust the maintainers
|
||||
if pkg.manual_download:
|
||||
tty.warn(f"Skipping manual download package: {pkg_name}")
|
||||
continue
|
||||
|
||||
# Store versions checksums / commits for future loop
|
||||
checksums_version_dict = {}
|
||||
commits_version_dict = {}
|
||||
for version in pkg.versions:
|
||||
# If the package version defines a sha256 we'll use that as the high entropy
|
||||
# string to detect which versions have been added between from_ref and to_ref
|
||||
if "sha256" in pkg.versions[version]:
|
||||
checksums_version_dict[pkg.versions[version]["sha256"]] = version
|
||||
|
||||
# If a package version instead defines a commit we'll use that as a
|
||||
# high entropy string to detect new versions.
|
||||
elif "commit" in pkg.versions[version]:
|
||||
commits_version_dict[pkg.versions[version]["commit"]] = version
|
||||
|
||||
# TODO: enforce every version have a commit or a sha256 defined if not
|
||||
# an infinite version (there are a lot of package's where this doesn't work yet.)
|
||||
|
||||
with fs.working_dir(spack.paths.prefix):
|
||||
added_checksums = spack_ci.get_added_versions(
|
||||
checksums_version_dict, path, from_ref=args.from_ref, to_ref=args.to_ref
|
||||
)
|
||||
added_commits = spack_ci.get_added_versions(
|
||||
commits_version_dict, path, from_ref=args.from_ref, to_ref=args.to_ref
|
||||
)
|
||||
|
||||
if added_checksums:
|
||||
failed_version = not validate_standard_versions(pkg, added_checksums) or failed_version
|
||||
|
||||
if added_commits:
|
||||
failed_version = not validate_git_versions(pkg, added_commits) or failed_version
|
||||
|
||||
if failed_version:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def ci(parser, args):
|
||||
if args.func:
|
||||
return args.func(args)
|
||||
|
@@ -4,7 +4,7 @@
|
||||
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import os.path
|
||||
import textwrap
|
||||
|
||||
from llnl.util.lang import stable_partition
|
||||
@@ -528,6 +528,7 @@ def __call__(self, parser, namespace, values, option_string):
|
||||
# the const from the constructor or a value from the CLI.
|
||||
# Note that this is only called if the argument is actually
|
||||
# specified on the command line.
|
||||
spack.config.CONFIG.ensure_scope_ordering()
|
||||
spack.config.set(self.config_path, self.const, scope="command_line")
|
||||
|
||||
|
||||
|
@@ -350,12 +350,9 @@ def _config_change(config_path, match_spec_str=None):
|
||||
if spack.config.get(key_path, scope=scope):
|
||||
ideal_scope_to_modify = scope
|
||||
break
|
||||
# If we find our key in a specific scope, that's the one we want
|
||||
# to modify. Otherwise we use the default write scope.
|
||||
write_scope = ideal_scope_to_modify or spack.config.default_modify_scope()
|
||||
|
||||
update_path = f"{key_path}:[{str(spec)}]"
|
||||
spack.config.add(update_path, scope=write_scope)
|
||||
spack.config.add(update_path, scope=ideal_scope_to_modify)
|
||||
else:
|
||||
raise ValueError("'config change' can currently only change 'require' sections")
|
||||
|
||||
|
@@ -2,6 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import os.path
|
||||
|
||||
import llnl.util.tty
|
||||
|
||||
|
@@ -2,11 +2,23 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from glob import glob
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
import spack
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.git
|
||||
from spack.util.executable import which
|
||||
|
||||
description = "debugging commands for troubleshooting Spack"
|
||||
section = "developer"
|
||||
@@ -15,13 +27,67 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="debug_command")
|
||||
sp.add_parser("create-db-tarball", help="create a tarball of Spack's installation metadata")
|
||||
sp.add_parser("report", help="print information useful for bug reports")
|
||||
|
||||
|
||||
def _debug_tarball_suffix():
|
||||
now = datetime.now()
|
||||
suffix = now.strftime("%Y-%m-%d-%H%M%S")
|
||||
|
||||
git = spack.util.git.git()
|
||||
if not git:
|
||||
return "nobranch-nogit-%s" % suffix
|
||||
|
||||
with working_dir(spack.paths.prefix):
|
||||
if not os.path.isdir(".git"):
|
||||
return "nobranch.nogit.%s" % suffix
|
||||
|
||||
# Get symbolic branch name and strip any special chars (mainly '/')
|
||||
symbolic = git("rev-parse", "--abbrev-ref", "--short", "HEAD", output=str).strip()
|
||||
symbolic = re.sub(r"[^\w.-]", "-", symbolic)
|
||||
|
||||
# Get the commit hash too.
|
||||
commit = git("rev-parse", "--short", "HEAD", output=str).strip()
|
||||
|
||||
if symbolic == commit:
|
||||
return "nobranch.%s.%s" % (commit, suffix)
|
||||
else:
|
||||
return "%s.%s.%s" % (symbolic, commit, suffix)
|
||||
|
||||
|
||||
def create_db_tarball(args):
|
||||
tar = which("tar")
|
||||
tarball_name = "spack-db.%s.tar.gz" % _debug_tarball_suffix()
|
||||
tarball_path = os.path.abspath(tarball_name)
|
||||
|
||||
base = os.path.basename(str(spack.store.STORE.root))
|
||||
transform_args = []
|
||||
# Currently --transform and -s are not supported by Windows native tar
|
||||
if "GNU" in tar("--version", output=str):
|
||||
transform_args = ["--transform", "s/^%s/%s/" % (base, tarball_name)]
|
||||
elif sys.platform != "win32":
|
||||
transform_args = ["-s", "/^%s/%s/" % (base, tarball_name)]
|
||||
|
||||
wd = os.path.dirname(str(spack.store.STORE.root))
|
||||
with working_dir(wd):
|
||||
files = [spack.store.STORE.db._index_path]
|
||||
files += glob("%s/*/*/*/.spack/spec.json" % base)
|
||||
files += glob("%s/*/*/*/.spack/spec.yaml" % base)
|
||||
files = [os.path.relpath(f) for f in files]
|
||||
|
||||
args = ["-czf", tarball_path]
|
||||
args += transform_args
|
||||
args += files
|
||||
tar(*args)
|
||||
|
||||
tty.msg("Created %s" % tarball_name)
|
||||
|
||||
|
||||
def report(args):
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.default_operating_system()
|
||||
host_target = host_platform.default_target()
|
||||
host_os = host_platform.operating_system("frontend")
|
||||
host_target = host_platform.target("frontend")
|
||||
architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
|
||||
print("* **Spack:**", spack.get_version())
|
||||
print("* **Python:**", platform.python_version())
|
||||
@@ -29,5 +95,5 @@ def report(args):
|
||||
|
||||
|
||||
def debug(parser, args):
|
||||
if args.debug_command == "report":
|
||||
report(args)
|
||||
action = {"create-db-tarball": create_db_tarball, "report": report}
|
||||
action[args.debug_command](args)
|
||||
|
@@ -9,9 +9,9 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.environment as ev
|
||||
import spack.package_base
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.solver.input_analysis import create_graph_analyzer
|
||||
|
||||
description = "show dependencies of a package"
|
||||
section = "basic"
|
||||
@@ -55,7 +55,7 @@ def dependencies(parser, args):
|
||||
env = ev.active_environment()
|
||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||
|
||||
format_string = "{name}{@version}{/hash:7}{%compiler}"
|
||||
format_string = "{name}{@version}{%compiler}{/hash:7}"
|
||||
if sys.stdout.isatty():
|
||||
tty.msg("Dependencies of %s" % spec.format(format_string, color=True))
|
||||
deps = spack.store.STORE.db.installed_relatives(
|
||||
@@ -68,17 +68,15 @@ def dependencies(parser, args):
|
||||
|
||||
else:
|
||||
spec = specs[0]
|
||||
dependencies, virtuals, _ = create_graph_analyzer().possible_dependencies(
|
||||
dependencies = spack.package_base.possible_dependencies(
|
||||
spec,
|
||||
transitive=args.transitive,
|
||||
expand_virtuals=args.expand_virtuals,
|
||||
allowed_deps=args.deptype,
|
||||
depflag=args.deptype,
|
||||
)
|
||||
if not args.expand_virtuals:
|
||||
dependencies.update(virtuals)
|
||||
|
||||
if spec.name in dependencies:
|
||||
dependencies.remove(spec.name)
|
||||
del dependencies[spec.name]
|
||||
|
||||
if dependencies:
|
||||
colify(sorted(dependencies))
|
||||
|
@@ -93,7 +93,7 @@ def dependents(parser, args):
|
||||
env = ev.active_environment()
|
||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||
|
||||
format_string = "{name}{@version}{/hash:7}{%compiler}"
|
||||
format_string = "{name}{@version}{%compiler}{/hash:7}"
|
||||
if sys.stdout.isatty():
|
||||
tty.msg("Dependents of %s" % spec.cformat(format_string))
|
||||
deps = spack.store.STORE.db.installed_relatives(spec, "parents", args.transitive)
|
||||
|
@@ -18,7 +18,6 @@
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.cmd
|
||||
import spack.concretize
|
||||
import spack.environment as ev
|
||||
import spack.installer
|
||||
import spack.store
|
||||
@@ -104,7 +103,7 @@ def deprecate(parser, args):
|
||||
)
|
||||
|
||||
if args.install:
|
||||
deprecator = spack.concretize.concretize_one(specs[1])
|
||||
deprecator = specs[1].concretized()
|
||||
else:
|
||||
deprecator = spack.cmd.disambiguate_spec(specs[1], env, local=True)
|
||||
|
||||
|
@@ -10,7 +10,6 @@
|
||||
import spack.build_environment
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.repo
|
||||
from spack.cmd.common import arguments
|
||||
@@ -114,8 +113,8 @@ def dev_build(self, args):
|
||||
source_path = os.path.abspath(source_path)
|
||||
|
||||
# Forces the build to run out of the source directory.
|
||||
spec.constrain(f'dev_path="{source_path}"')
|
||||
spec = spack.concretize.concretize_one(spec)
|
||||
spec.constrain("dev_path=%s" % source_path)
|
||||
spec.concretize()
|
||||
|
||||
if spec.installed:
|
||||
tty.error("Already installed in %s" % spec.prefix)
|
||||
|
@@ -3,13 +3,11 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import shutil
|
||||
from typing import Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.fetch_strategy
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
@@ -33,33 +31,37 @@ def setup_parser(subparser):
|
||||
"--no-clone",
|
||||
action="store_false",
|
||||
dest="clone",
|
||||
default=None,
|
||||
help="do not clone, the package already exists at the source path",
|
||||
)
|
||||
clone_group.add_argument(
|
||||
"--clone",
|
||||
action="store_true",
|
||||
dest="clone",
|
||||
default=True,
|
||||
help=(
|
||||
"(default) clone the package unless the path already exists, "
|
||||
"use --force to overwrite"
|
||||
),
|
||||
default=None,
|
||||
help="clone the package even if the path already exists",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"-f", "--force", help="remove any files or directories that block cloning source code"
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"-r",
|
||||
"--recursive",
|
||||
action="store_true",
|
||||
help="traverse nodes of the graph to mark everything up to the root as a develop spec",
|
||||
)
|
||||
|
||||
arguments.add_common_arguments(subparser, ["spec"])
|
||||
|
||||
|
||||
def _update_config(spec, path):
|
||||
find_fn = lambda section: spec.name in section
|
||||
|
||||
entry = {"spec": str(spec)}
|
||||
if path != spec.name:
|
||||
entry["path"] = path
|
||||
|
||||
def change_fn(section):
|
||||
section[spec.name] = entry
|
||||
|
||||
spack.config.change_or_add("develop", find_fn, change_fn)
|
||||
|
||||
|
||||
def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
|
||||
# "steal" the source code via staging API. We ask for a stage
|
||||
# to be created, then copy it afterwards somewhere else. It would be
|
||||
@@ -81,159 +83,86 @@ def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
|
||||
package.stage.steal_source(abspath)
|
||||
|
||||
|
||||
def assure_concrete_spec(env: spack.environment.Environment, spec: spack.spec.Spec):
|
||||
version = spec.versions.concrete_range_as_version
|
||||
if not version:
|
||||
# first check environment for a matching concrete spec
|
||||
matching_specs = env.all_matching_specs(spec)
|
||||
if matching_specs:
|
||||
version = matching_specs[0].version
|
||||
test_spec = spack.spec.Spec(f"{spec}@{version}")
|
||||
for m_spec in matching_specs:
|
||||
if not m_spec.satisfies(test_spec):
|
||||
raise SpackError(
|
||||
f"{spec.name}: has multiple concrete instances in the graph that can't be"
|
||||
" satisified by a single develop spec. To use `spack develop` ensure one"
|
||||
" of the following:"
|
||||
f"\n a) {spec.name} nodes can satisfy the same develop spec (minimally "
|
||||
"this means they all share the same version)"
|
||||
f"\n b) Provide a concrete develop spec ({spec.name}@[version]) to clearly"
|
||||
" indicate what should be developed"
|
||||
)
|
||||
else:
|
||||
# look up the maximum version so infintiy versions are preferred for develop
|
||||
version = max(spec.package_class.versions.keys())
|
||||
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
|
||||
spec.versions = spack.version.VersionList([version])
|
||||
|
||||
|
||||
def setup_src_code(spec: spack.spec.Spec, src_path: str, clone: bool = True, force: bool = False):
|
||||
"""
|
||||
Handle checking, cloning or overwriting source code
|
||||
"""
|
||||
assert spec.versions
|
||||
|
||||
if clone:
|
||||
_clone(spec, src_path, force)
|
||||
|
||||
if not clone and not os.path.exists(src_path):
|
||||
raise SpackError(f"Provided path {src_path} does not exist")
|
||||
|
||||
version = spec.versions.concrete_range_as_version
|
||||
if not version:
|
||||
# look up the maximum version so infintiy versions are preferred for develop
|
||||
version = max(spack.repo.PATH.get_pkg_class(spec.fullname).versions.keys())
|
||||
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
|
||||
spec.versions = spack.version.VersionList([version])
|
||||
|
||||
|
||||
def _update_config(spec, path):
|
||||
find_fn = lambda section: spec.name in section
|
||||
|
||||
entry = {"spec": str(spec)}
|
||||
if path and path != spec.name:
|
||||
entry["path"] = path
|
||||
|
||||
def change_fn(section):
|
||||
section[spec.name] = entry
|
||||
|
||||
spack.config.change_or_add("develop", find_fn, change_fn)
|
||||
|
||||
|
||||
def update_env(
|
||||
env: spack.environment.Environment,
|
||||
spec: spack.spec.Spec,
|
||||
specified_path: Optional[str] = None,
|
||||
build_dir: Optional[str] = None,
|
||||
):
|
||||
"""
|
||||
Update the spack.yaml file with additions or changes from a develop call
|
||||
"""
|
||||
tty.debug(f"Updating develop config for {env.name} transactionally")
|
||||
|
||||
if not specified_path:
|
||||
dev_entry = env.dev_specs.get(spec.name)
|
||||
if dev_entry:
|
||||
specified_path = dev_entry.get("path", None)
|
||||
|
||||
with env.write_transaction():
|
||||
if build_dir is not None:
|
||||
spack.config.add(
|
||||
f"packages:{spec.name}:package_attributes:build_directory:{build_dir}",
|
||||
env.scope_name,
|
||||
)
|
||||
# add develop spec and update path
|
||||
_update_config(spec, specified_path)
|
||||
|
||||
|
||||
def _clone(spec: spack.spec.Spec, abspath: str, force: bool = False):
|
||||
if os.path.exists(abspath):
|
||||
if force:
|
||||
shutil.rmtree(abspath)
|
||||
else:
|
||||
msg = f"Skipping developer download of {spec.name}"
|
||||
msg += f" because its path {abspath} already exists."
|
||||
tty.msg(msg)
|
||||
return
|
||||
|
||||
# cloning can take a while and it's nice to get a message for the longer clones
|
||||
tty.msg(f"Cloning source code for {spec}")
|
||||
_retrieve_develop_source(spec, abspath)
|
||||
|
||||
|
||||
def _abs_code_path(
|
||||
env: spack.environment.Environment, spec: spack.spec.Spec, path: Optional[str] = None
|
||||
):
|
||||
src_path = path if path else spec.name
|
||||
return spack.util.path.canonicalize_path(src_path, default_wd=env.path)
|
||||
|
||||
|
||||
def _dev_spec_generator(args, env):
|
||||
"""
|
||||
Generator function to loop over all the develop specs based on how the command is called
|
||||
If no specs are supplied then loop over the develop specs listed in the environment.
|
||||
"""
|
||||
def develop(parser, args):
|
||||
# Note: we could put develop specs in any scope, but I assume
|
||||
# users would only ever want to do this for either (a) an active
|
||||
# env or (b) a specified config file (e.g. that is included by
|
||||
# an environment)
|
||||
# TODO: when https://github.com/spack/spack/pull/35307 is merged,
|
||||
# an active env is not required if a scope is specified
|
||||
env = spack.cmd.require_active_env(cmd_name="develop")
|
||||
if not args.spec:
|
||||
if args.clone is False:
|
||||
raise SpackError("No spec provided to spack develop command")
|
||||
|
||||
# download all dev specs
|
||||
for name, entry in env.dev_specs.items():
|
||||
path = entry.get("path", name)
|
||||
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
|
||||
|
||||
if os.path.exists(abspath):
|
||||
msg = "Skipping developer download of %s" % entry["spec"]
|
||||
msg += " because its path already exists."
|
||||
tty.msg(msg)
|
||||
continue
|
||||
|
||||
# Both old syntax `spack develop pkg@x` and new syntax `spack develop pkg@=x`
|
||||
# are currently supported.
|
||||
spec = spack.spec.parse_with_version_concrete(entry["spec"])
|
||||
yield spec, abspath
|
||||
_retrieve_develop_source(spec, abspath)
|
||||
|
||||
if not env.dev_specs:
|
||||
tty.warn("No develop specs to download")
|
||||
|
||||
return
|
||||
|
||||
specs = spack.cmd.parse_specs(args.spec)
|
||||
if len(specs) > 1:
|
||||
raise SpackError("spack develop requires at most one named spec")
|
||||
|
||||
spec = specs[0]
|
||||
|
||||
version = spec.versions.concrete_range_as_version
|
||||
if not version:
|
||||
# look up the maximum version so infintiy versions are preferred for develop
|
||||
version = max(spec.package_class.versions.keys())
|
||||
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
|
||||
spec.versions = spack.version.VersionList([version])
|
||||
|
||||
# If user does not specify --path, we choose to create a directory in the
|
||||
# active environment's directory, named after the spec
|
||||
path = args.path or spec.name
|
||||
if not os.path.isabs(path):
|
||||
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
|
||||
else:
|
||||
specs = spack.cmd.parse_specs(args.spec)
|
||||
if (args.path or args.build_directory) and len(specs) > 1:
|
||||
raise SpackError(
|
||||
"spack develop requires at most one named spec when using the --path or"
|
||||
" --build-directory arguments"
|
||||
)
|
||||
abspath = path
|
||||
|
||||
for spec in specs:
|
||||
if args.recursive:
|
||||
concrete_specs = env.all_matching_specs(spec)
|
||||
if not concrete_specs:
|
||||
tty.warn(
|
||||
f"{spec.name} has no matching concrete specs in the environment and "
|
||||
"will be skipped. `spack develop --recursive` requires a concretized"
|
||||
" environment"
|
||||
)
|
||||
else:
|
||||
for s in concrete_specs:
|
||||
for node_spec in s.traverse(direction="parents", root=True):
|
||||
tty.debug(f"Recursive develop for {node_spec.name}")
|
||||
yield node_spec, _abs_code_path(env, node_spec, args.path)
|
||||
# clone default: only if the path doesn't exist
|
||||
clone = args.clone
|
||||
if clone is None:
|
||||
clone = not os.path.exists(abspath)
|
||||
|
||||
if not clone and not os.path.exists(abspath):
|
||||
raise SpackError("Provided path %s does not exist" % abspath)
|
||||
|
||||
if clone:
|
||||
if os.path.exists(abspath):
|
||||
if args.force:
|
||||
shutil.rmtree(abspath)
|
||||
else:
|
||||
yield spec, _abs_code_path(env, spec, args.path)
|
||||
msg = "Path %s already exists and cannot be cloned to." % abspath
|
||||
msg += " Use `spack develop -f` to overwrite."
|
||||
raise SpackError(msg)
|
||||
|
||||
_retrieve_develop_source(spec, abspath)
|
||||
|
||||
def develop(parser, args):
|
||||
env = spack.cmd.require_active_env(cmd_name="develop")
|
||||
|
||||
for spec, abspath in _dev_spec_generator(args, env):
|
||||
assure_concrete_spec(env, spec)
|
||||
setup_src_code(spec, abspath, clone=args.clone, force=args.force)
|
||||
update_env(env, spec, args.path, args.build_directory)
|
||||
tty.debug("Updating develop config for {0} transactionally".format(env.name))
|
||||
with env.write_transaction():
|
||||
if args.build_directory is not None:
|
||||
spack.config.add(
|
||||
"packages:{}:package_attributes:build_directory:{}".format(
|
||||
spec.name, args.build_directory
|
||||
),
|
||||
env.scope_name,
|
||||
)
|
||||
_update_config(spec, path)
|
||||
|
@@ -110,7 +110,10 @@ def external_find(args):
|
||||
# Note that KeyboardInterrupt does not subclass Exception
|
||||
# (so CTRL-C will terminate the program as expected).
|
||||
skip_msg = "Skipping manifest and continuing with other external checks"
|
||||
if isinstance(e, OSError) and e.errno in (errno.EPERM, errno.EACCES):
|
||||
if (isinstance(e, IOError) or isinstance(e, OSError)) and e.errno in [
|
||||
errno.EPERM,
|
||||
errno.EACCES,
|
||||
]:
|
||||
# The manifest file does not have sufficient permissions enabled:
|
||||
# print a warning and keep going
|
||||
tty.warn("Unable to read manifest due to insufficient permissions.", skip_msg)
|
||||
|
@@ -54,6 +54,10 @@
|
||||
@m{target=target} specific <target> processor
|
||||
@m{arch=platform-os-target} shortcut for all three above
|
||||
|
||||
cross-compiling:
|
||||
@m{os=backend} or @m{os=be} build for compute node (backend)
|
||||
@m{os=frontend} or @m{os=fe} build for login node (frontend)
|
||||
|
||||
dependencies:
|
||||
^dependency [constraints] specify constraints on dependencies
|
||||
^@K{/hash} build with a specific installed
|
||||
@@ -73,7 +77,7 @@
|
||||
boxlib @B{dim=2} boxlib built for 2 dimensions
|
||||
libdwarf @g{%intel} ^libelf@g{%gcc}
|
||||
libdwarf, built with intel compiler, linked to libelf built with gcc
|
||||
mvapich2 @B{fabrics=psm,mrail,sock} @g{%gcc}
|
||||
mvapich2 @g{%gcc} @B{fabrics=psm,mrail,sock}
|
||||
mvapich2, built with gcc compiler, with support for multiple fabrics
|
||||
"""
|
||||
|
||||
|
@@ -13,7 +13,6 @@
|
||||
from llnl.util import lang, tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.paths
|
||||
@@ -451,7 +450,7 @@ def concrete_specs_from_file(args):
|
||||
else:
|
||||
s = spack.spec.Spec.from_json(f)
|
||||
|
||||
concretized = spack.concretize.concretize_one(s)
|
||||
concretized = s.concretized()
|
||||
if concretized.dag_hash() != s.dag_hash():
|
||||
msg = 'skipped invalid file "{0}". '
|
||||
msg += "The file does not contain a concrete spec."
|
||||
|
@@ -7,9 +7,9 @@
|
||||
|
||||
from llnl.path import convert_to_posix_path
|
||||
|
||||
import spack.concretize
|
||||
import spack.paths
|
||||
import spack.util.executable
|
||||
from spack.spec import Spec
|
||||
|
||||
description = "generate Windows installer"
|
||||
section = "admin"
|
||||
@@ -65,7 +65,8 @@ def make_installer(parser, args):
|
||||
"""
|
||||
if sys.platform == "win32":
|
||||
output_dir = args.output_dir
|
||||
cmake_spec = spack.concretize.concretize_one("cmake")
|
||||
cmake_spec = Spec("cmake")
|
||||
cmake_spec.concretize()
|
||||
cmake_path = os.path.join(cmake_spec.prefix, "bin", "cmake.exe")
|
||||
cpack_path = os.path.join(cmake_spec.prefix, "bin", "cpack.exe")
|
||||
spack_source = args.spack_source
|
||||
|
@@ -492,7 +492,7 @@ def extend_with_additional_versions(specs, num_versions):
|
||||
mirror_specs = spack.mirrors.utils.get_all_versions(specs)
|
||||
else:
|
||||
mirror_specs = spack.mirrors.utils.get_matching_versions(specs, num_versions=num_versions)
|
||||
mirror_specs = [spack.concretize.concretize_one(x) for x in mirror_specs]
|
||||
mirror_specs = [x.concretized() for x in mirror_specs]
|
||||
return mirror_specs
|
||||
|
||||
|
||||
@@ -545,7 +545,7 @@ def _not_license_excluded(self, x):
|
||||
package does not explicitly forbid redistributing source."""
|
||||
if self.private:
|
||||
return True
|
||||
elif spack.repo.PATH.get_pkg_class(x.fullname).redistribute_source(x):
|
||||
elif x.package_class.redistribute_source(x):
|
||||
return True
|
||||
else:
|
||||
tty.debug(
|
||||
|
@@ -5,7 +5,7 @@
|
||||
"""Implementation details of the ``spack module`` command."""
|
||||
|
||||
import collections
|
||||
import os
|
||||
import os.path
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
@@ -383,10 +383,8 @@ def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
||||
query = " ".join(str(s) for s in args.constraint_specs)
|
||||
msg = f"the constraint '{query}' matches multiple packages:\n"
|
||||
for s in specs:
|
||||
spec_fmt = (
|
||||
"{hash:7} {name}{@version}{compiler_flags}{variants}"
|
||||
"{arch=architecture} {%compiler}"
|
||||
)
|
||||
spec_fmt = "{hash:7} {name}{@version}{%compiler}"
|
||||
spec_fmt += "{compiler_flags}{variants}{arch=architecture}"
|
||||
msg += "\t" + s.cformat(spec_fmt) + "\n"
|
||||
tty.die(msg, "In this context exactly *one* match is needed.")
|
||||
|
||||
|
@@ -41,11 +41,7 @@ def providers(parser, args):
|
||||
specs = spack.cmd.parse_specs(args.virtual_package)
|
||||
|
||||
# Check prerequisites
|
||||
non_virtual = [
|
||||
str(s)
|
||||
for s in specs
|
||||
if not spack.repo.PATH.is_virtual(s.name) or s.name not in valid_virtuals
|
||||
]
|
||||
non_virtual = [str(s) for s in specs if not s.virtual or s.name not in valid_virtuals]
|
||||
if non_virtual:
|
||||
msg = "non-virtual specs cannot be part of the query "
|
||||
msg += "[{0}]\n".format(", ".join(non_virtual))
|
||||
|
@@ -6,9 +6,8 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
from itertools import islice, zip_longest
|
||||
from typing import Callable, Dict, List, Optional
|
||||
from itertools import zip_longest
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as color
|
||||
@@ -17,9 +16,6 @@
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.util.git
|
||||
import spack.util.spack_yaml
|
||||
from spack.spec_parser import SPEC_TOKENIZER, SpecTokens
|
||||
from spack.tokenize import Token
|
||||
from spack.util.executable import Executable, which
|
||||
|
||||
description = "runs source code style checks on spack"
|
||||
@@ -202,13 +198,6 @@ def setup_parser(subparser):
|
||||
action="append",
|
||||
help="specify tools to skip (choose from %s)" % ", ".join(tool_names),
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--spec-strings",
|
||||
action="store_true",
|
||||
help="upgrade spec strings in Python, JSON and YAML files for compatibility with Spack "
|
||||
"v1.0 and v0.x. Example: spack style --spec-strings $(git ls-files). Note: this flag "
|
||||
"will be removed in Spack v1.0.",
|
||||
)
|
||||
|
||||
subparser.add_argument("files", nargs=argparse.REMAINDER, help="specific files to check")
|
||||
|
||||
@@ -434,8 +423,7 @@ def _run_import_check(
|
||||
continue
|
||||
|
||||
for m in is_abs_import.finditer(contents):
|
||||
# Find at most two occurences: the first is the import itself, the second is its usage.
|
||||
if len(list(islice(re.finditer(rf"{re.escape(m.group(1))}(?!\w)", contents), 2))) == 1:
|
||||
if contents.count(m.group(1)) == 1:
|
||||
to_remove.append(m.group(0))
|
||||
exit_code = 1
|
||||
print(f"{pretty_path}: redundant import: {m.group(1)}", file=out)
|
||||
@@ -450,7 +438,7 @@ def _run_import_check(
|
||||
module = _module_part(root, m.group(0))
|
||||
if not module or module in to_add:
|
||||
continue
|
||||
if re.search(rf"import {re.escape(module)}(?!\w|\.)", contents):
|
||||
if re.search(rf"import {re.escape(module)}\b(?!\.)", contents):
|
||||
continue
|
||||
to_add.add(module)
|
||||
exit_code = 1
|
||||
@@ -518,196 +506,7 @@ def _bootstrap_dev_dependencies():
|
||||
spack.bootstrap.ensure_environment_dependencies()
|
||||
|
||||
|
||||
IS_PROBABLY_COMPILER = re.compile(r"%[a-zA-Z_][a-zA-Z0-9\-]")
|
||||
|
||||
|
||||
def _spec_str_reorder_compiler(idx: int, blocks: List[List[Token]]) -> None:
|
||||
# only move the compiler to the back if it exists and is not already at the end
|
||||
if not 0 <= idx < len(blocks) - 1:
|
||||
return
|
||||
# if there's only whitespace after the compiler, don't move it
|
||||
if all(token.kind == SpecTokens.WS for block in blocks[idx + 1 :] for token in block):
|
||||
return
|
||||
# rotate left and always add at least one WS token between compiler and previous token
|
||||
compiler_block = blocks.pop(idx)
|
||||
if compiler_block[0].kind != SpecTokens.WS:
|
||||
compiler_block.insert(0, Token(SpecTokens.WS, " "))
|
||||
# delete the WS tokens from the new first block if it was at the very start, to prevent leading
|
||||
# WS tokens.
|
||||
while idx == 0 and blocks[0][0].kind == SpecTokens.WS:
|
||||
blocks[0].pop(0)
|
||||
blocks.append(compiler_block)
|
||||
|
||||
|
||||
def _spec_str_format(spec_str: str) -> Optional[str]:
|
||||
"""Given any string, try to parse as spec string, and rotate the compiler token to the end
|
||||
of each spec instance. Returns the formatted string if it was changed, otherwise None."""
|
||||
# We parse blocks of tokens that include leading whitespace, and move the compiler block to
|
||||
# the end when we hit a dependency ^... or the end of a string.
|
||||
# [@3.1][ +foo][ +bar][ %gcc@3.1][ +baz]
|
||||
# [@3.1][ +foo][ +bar][ +baz][ %gcc@3.1]
|
||||
|
||||
current_block: List[Token] = []
|
||||
blocks: List[List[Token]] = []
|
||||
compiler_block_idx = -1
|
||||
in_edge_attr = False
|
||||
|
||||
for token in SPEC_TOKENIZER.tokenize(spec_str):
|
||||
if token.kind == SpecTokens.UNEXPECTED:
|
||||
# parsing error, we cannot fix this string.
|
||||
return None
|
||||
elif token.kind in (SpecTokens.COMPILER, SpecTokens.COMPILER_AND_VERSION):
|
||||
# multiple compilers are not supported in Spack v0.x, so early return
|
||||
if compiler_block_idx != -1:
|
||||
return None
|
||||
current_block.append(token)
|
||||
blocks.append(current_block)
|
||||
current_block = []
|
||||
compiler_block_idx = len(blocks) - 1
|
||||
elif token.kind in (
|
||||
SpecTokens.START_EDGE_PROPERTIES,
|
||||
SpecTokens.DEPENDENCY,
|
||||
SpecTokens.UNQUALIFIED_PACKAGE_NAME,
|
||||
SpecTokens.FULLY_QUALIFIED_PACKAGE_NAME,
|
||||
):
|
||||
_spec_str_reorder_compiler(compiler_block_idx, blocks)
|
||||
compiler_block_idx = -1
|
||||
if token.kind == SpecTokens.START_EDGE_PROPERTIES:
|
||||
in_edge_attr = True
|
||||
current_block.append(token)
|
||||
blocks.append(current_block)
|
||||
current_block = []
|
||||
elif token.kind == SpecTokens.END_EDGE_PROPERTIES:
|
||||
in_edge_attr = False
|
||||
current_block.append(token)
|
||||
blocks.append(current_block)
|
||||
current_block = []
|
||||
elif in_edge_attr:
|
||||
current_block.append(token)
|
||||
elif token.kind in (
|
||||
SpecTokens.VERSION_HASH_PAIR,
|
||||
SpecTokens.GIT_VERSION,
|
||||
SpecTokens.VERSION,
|
||||
SpecTokens.PROPAGATED_BOOL_VARIANT,
|
||||
SpecTokens.BOOL_VARIANT,
|
||||
SpecTokens.PROPAGATED_KEY_VALUE_PAIR,
|
||||
SpecTokens.KEY_VALUE_PAIR,
|
||||
SpecTokens.DAG_HASH,
|
||||
):
|
||||
current_block.append(token)
|
||||
blocks.append(current_block)
|
||||
current_block = []
|
||||
elif token.kind == SpecTokens.WS:
|
||||
current_block.append(token)
|
||||
else:
|
||||
raise ValueError(f"unexpected token {token}")
|
||||
|
||||
if current_block:
|
||||
blocks.append(current_block)
|
||||
_spec_str_reorder_compiler(compiler_block_idx, blocks)
|
||||
|
||||
new_spec_str = "".join(token.value for block in blocks for token in block)
|
||||
return new_spec_str if spec_str != new_spec_str else None
|
||||
|
||||
|
||||
SpecStrHandler = Callable[[str, int, int, str, str], None]
|
||||
|
||||
|
||||
def _spec_str_default_handler(path: str, line: int, col: int, old: str, new: str):
|
||||
"""A SpecStrHandler that prints formatted spec strings and their locations."""
|
||||
print(f"{path}:{line}:{col}: `{old}` -> `{new}`")
|
||||
|
||||
|
||||
def _spec_str_fix_handler(path: str, line: int, col: int, old: str, new: str):
|
||||
"""A SpecStrHandler that updates formatted spec strings in files."""
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
lines = f.readlines()
|
||||
new_line = lines[line - 1].replace(old, new)
|
||||
if new_line == lines[line - 1]:
|
||||
tty.warn(f"{path}:{line}:{col}: could not apply fix: `{old}` -> `{new}`")
|
||||
return
|
||||
lines[line - 1] = new_line
|
||||
print(f"{path}:{line}:{col}: fixed `{old}` -> `{new}`")
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
f.writelines(lines)
|
||||
|
||||
|
||||
def _spec_str_ast(path: str, tree: ast.AST, handler: SpecStrHandler) -> None:
|
||||
"""Walk the AST of a Python file and apply handler to formatted spec strings."""
|
||||
has_constant = sys.version_info >= (3, 8)
|
||||
for node in ast.walk(tree):
|
||||
if has_constant and isinstance(node, ast.Constant) and isinstance(node.value, str):
|
||||
current_str = node.value
|
||||
elif not has_constant and isinstance(node, ast.Str):
|
||||
current_str = node.s
|
||||
else:
|
||||
continue
|
||||
if not IS_PROBABLY_COMPILER.search(current_str):
|
||||
continue
|
||||
new = _spec_str_format(current_str)
|
||||
if new is not None:
|
||||
handler(path, node.lineno, node.col_offset, current_str, new)
|
||||
|
||||
|
||||
def _spec_str_json_and_yaml(path: str, data: dict, handler: SpecStrHandler) -> None:
|
||||
"""Walk a YAML or JSON data structure and apply handler to formatted spec strings."""
|
||||
queue = [data]
|
||||
seen = set()
|
||||
|
||||
while queue:
|
||||
current = queue.pop(0)
|
||||
if id(current) in seen:
|
||||
continue
|
||||
seen.add(id(current))
|
||||
if isinstance(current, dict):
|
||||
queue.extend(current.values())
|
||||
queue.extend(current.keys())
|
||||
elif isinstance(current, list):
|
||||
queue.extend(current)
|
||||
elif isinstance(current, str) and IS_PROBABLY_COMPILER.search(current):
|
||||
new = _spec_str_format(current)
|
||||
if new is not None:
|
||||
mark = getattr(current, "_start_mark", None)
|
||||
if mark:
|
||||
line, col = mark.line + 1, mark.column + 1
|
||||
else:
|
||||
line, col = 0, 0
|
||||
handler(path, line, col, current, new)
|
||||
|
||||
|
||||
def _check_spec_strings(
|
||||
paths: List[str], handler: SpecStrHandler = _spec_str_default_handler
|
||||
) -> None:
|
||||
"""Open Python, JSON and YAML files, and format their string literals that look like spec
|
||||
strings. A handler is called for each formatting, which can be used to print or apply fixes."""
|
||||
for path in paths:
|
||||
is_json_or_yaml = path.endswith(".json") or path.endswith(".yaml") or path.endswith(".yml")
|
||||
is_python = path.endswith(".py")
|
||||
if not is_json_or_yaml and not is_python:
|
||||
continue
|
||||
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
# skip files that are likely too large to be user code or config
|
||||
if os.fstat(f.fileno()).st_size > 1024 * 1024:
|
||||
warnings.warn(f"skipping {path}: too large.")
|
||||
continue
|
||||
if is_json_or_yaml:
|
||||
_spec_str_json_and_yaml(path, spack.util.spack_yaml.load_config(f), handler)
|
||||
elif is_python:
|
||||
_spec_str_ast(path, ast.parse(f.read()), handler)
|
||||
except (OSError, spack.util.spack_yaml.SpackYAMLError, SyntaxError, ValueError):
|
||||
warnings.warn(f"skipping {path}")
|
||||
continue
|
||||
|
||||
|
||||
def style(parser, args):
|
||||
if args.spec_strings:
|
||||
if not args.files:
|
||||
tty.die("No files provided to check spec strings.")
|
||||
handler = _spec_str_fix_handler if args.fix else _spec_str_default_handler
|
||||
return _check_spec_strings(args.files, handler)
|
||||
|
||||
# save initial working directory for relativizing paths later
|
||||
args.initial_working_dir = os.getcwd()
|
||||
|
||||
|
@@ -177,15 +177,16 @@ def test_run(args):
|
||||
matching = spack.store.STORE.db.query_local(spec, hashes=hashes, explicit=explicit)
|
||||
if spec and not matching:
|
||||
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
|
||||
"""
|
||||
TODO: Need to write out a log message and/or CDASH Testing
|
||||
output that package not installed IF continue to process
|
||||
these issues here.
|
||||
|
||||
# TODO: Need to write out a log message and/or CDASH Testing
|
||||
# output that package not installed IF continue to process
|
||||
# these issues here.
|
||||
|
||||
# if args.log_format:
|
||||
# # Proceed with the spec assuming the test process
|
||||
# # to ensure report package as skipped (e.g., for CI)
|
||||
# specs_to_test.append(spec)
|
||||
if args.log_format:
|
||||
# Proceed with the spec assuming the test process
|
||||
# to ensure report package as skipped (e.g., for CI)
|
||||
specs_to_test.append(spec)
|
||||
"""
|
||||
|
||||
specs_to_test.extend(matching)
|
||||
|
||||
@@ -252,9 +253,7 @@ def has_test_and_tags(pkg_class):
|
||||
hashes = env.all_hashes() if env else None
|
||||
|
||||
specs = spack.store.STORE.db.query(hashes=hashes)
|
||||
specs = list(
|
||||
filter(lambda s: has_test_and_tags(spack.repo.PATH.get_pkg_class(s.fullname)), specs)
|
||||
)
|
||||
specs = list(filter(lambda s: has_test_and_tags(s.package_class), specs))
|
||||
|
||||
spack.cmd.display_specs(specs, long=True)
|
||||
|
||||
|
@@ -2,7 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import shutil
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
@@ -144,7 +144,7 @@ def is_installed(spec):
|
||||
record = spack.store.STORE.db.query_local_by_spec_hash(spec.dag_hash())
|
||||
return record and record.installed
|
||||
|
||||
all_specs = traverse.traverse_nodes(
|
||||
specs = traverse.traverse_nodes(
|
||||
specs,
|
||||
root=False,
|
||||
order="breadth",
|
||||
@@ -155,7 +155,7 @@ def is_installed(spec):
|
||||
)
|
||||
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
return [spec for spec in all_specs if is_installed(spec)]
|
||||
return [spec for spec in specs if is_installed(spec)]
|
||||
|
||||
|
||||
def dependent_environments(
|
||||
|
@@ -5,7 +5,7 @@
|
||||
import argparse
|
||||
import collections
|
||||
import io
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
|
||||
@@ -17,7 +17,6 @@
|
||||
pytest = None # type: ignore
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as color
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
@@ -217,7 +216,7 @@ def unit_test(parser, args, unknown_args):
|
||||
# Ensure clingo is available before switching to the
|
||||
# mock configuration used by unit tests
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
if pytest is None:
|
||||
spack.bootstrap.ensure_environment_dependencies()
|
||||
import pytest
|
||||
@@ -237,12 +236,6 @@ def unit_test(parser, args, unknown_args):
|
||||
pytest_root = spack.extensions.load_extension(args.extension)
|
||||
|
||||
if args.numprocesses is not None and args.numprocesses > 1:
|
||||
try:
|
||||
import xdist # noqa: F401
|
||||
except ImportError:
|
||||
tty.error("parallel unit-test requires pytest-xdist module")
|
||||
return 1
|
||||
|
||||
pytest_args.extend(
|
||||
[
|
||||
"--dist",
|
||||
|
@@ -2,48 +2,35 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import argparse
|
||||
import io
|
||||
from typing import List, Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
from llnl.util.filesystem import visit_directory_tree
|
||||
|
||||
import spack.cmd
|
||||
import spack.environment as ev
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.verify
|
||||
import spack.verify_libraries
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "verify spack installations on disk"
|
||||
description = "check that all spack packages are on disk as installed"
|
||||
section = "admin"
|
||||
level = "long"
|
||||
|
||||
MANIFEST_SUBPARSER: Optional[argparse.ArgumentParser] = None
|
||||
|
||||
def setup_parser(subparser):
|
||||
setup_parser.parser = subparser
|
||||
|
||||
def setup_parser(subparser: argparse.ArgumentParser):
|
||||
global MANIFEST_SUBPARSER
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="verify_command")
|
||||
|
||||
MANIFEST_SUBPARSER = sp.add_parser(
|
||||
"manifest", help=verify_manifest.__doc__, description=verify_manifest.__doc__
|
||||
)
|
||||
MANIFEST_SUBPARSER.add_argument(
|
||||
subparser.add_argument(
|
||||
"-l", "--local", action="store_true", help="verify only locally installed packages"
|
||||
)
|
||||
MANIFEST_SUBPARSER.add_argument(
|
||||
subparser.add_argument(
|
||||
"-j", "--json", action="store_true", help="ouptut json-formatted errors"
|
||||
)
|
||||
MANIFEST_SUBPARSER.add_argument("-a", "--all", action="store_true", help="verify all packages")
|
||||
MANIFEST_SUBPARSER.add_argument(
|
||||
subparser.add_argument("-a", "--all", action="store_true", help="verify all packages")
|
||||
subparser.add_argument(
|
||||
"specs_or_files", nargs=argparse.REMAINDER, help="specs or files to verify"
|
||||
)
|
||||
|
||||
manifest_sp_type = MANIFEST_SUBPARSER.add_mutually_exclusive_group()
|
||||
manifest_sp_type.add_argument(
|
||||
type = subparser.add_mutually_exclusive_group()
|
||||
type.add_argument(
|
||||
"-s",
|
||||
"--specs",
|
||||
action="store_const",
|
||||
@@ -52,7 +39,7 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
default="specs",
|
||||
help="treat entries as specs (default)",
|
||||
)
|
||||
manifest_sp_type.add_argument(
|
||||
type.add_argument(
|
||||
"-f",
|
||||
"--files",
|
||||
action="store_const",
|
||||
@@ -62,67 +49,14 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
help="treat entries as absolute filenames\n\ncannot be used with '-a'",
|
||||
)
|
||||
|
||||
libraries_subparser = sp.add_parser(
|
||||
"libraries", help=verify_libraries.__doc__, description=verify_libraries.__doc__
|
||||
)
|
||||
|
||||
arguments.add_common_arguments(libraries_subparser, ["constraint"])
|
||||
|
||||
|
||||
def verify(parser, args):
|
||||
cmd = args.verify_command
|
||||
if cmd == "libraries":
|
||||
return verify_libraries(args)
|
||||
elif cmd == "manifest":
|
||||
return verify_manifest(args)
|
||||
parser.error("invalid verify subcommand")
|
||||
|
||||
|
||||
def verify_libraries(args):
|
||||
"""verify that shared libraries of install packages can be located in rpaths (Linux only)"""
|
||||
specs_from_db = [s for s in args.specs(installed=True) if not s.external]
|
||||
|
||||
tty.info(f"Checking {len(specs_from_db)} packages for shared library resolution")
|
||||
|
||||
errors = 0
|
||||
for spec in specs_from_db:
|
||||
try:
|
||||
pkg = spec.package
|
||||
except Exception:
|
||||
tty.warn(f"Skipping {spec.cformat('{name}{@version}{/hash}')} due to missing package")
|
||||
error_msg = _verify_libraries(spec, pkg.unresolved_libraries)
|
||||
if error_msg is not None:
|
||||
errors += 1
|
||||
tty.error(error_msg)
|
||||
|
||||
if errors:
|
||||
tty.error(f"Cannot resolve shared libraries in {plural(errors, 'package')}")
|
||||
return 1
|
||||
|
||||
|
||||
def _verify_libraries(spec: spack.spec.Spec, unresolved_libraries: List[str]) -> Optional[str]:
|
||||
"""Go over the prefix of the installed spec and verify its shared libraries can be resolved."""
|
||||
visitor = spack.verify_libraries.ResolveSharedElfLibDepsVisitor(
|
||||
[*spack.verify_libraries.ALLOW_UNRESOLVED, *unresolved_libraries]
|
||||
)
|
||||
visit_directory_tree(spec.prefix, visitor)
|
||||
|
||||
if not visitor.problems:
|
||||
return None
|
||||
|
||||
output = io.StringIO()
|
||||
visitor.write(output, indent=4, brief=True)
|
||||
message = output.getvalue().rstrip()
|
||||
return f"{spec.cformat('{name}{@version}{/hash}')}: {spec.prefix}:\n{message}"
|
||||
|
||||
|
||||
def verify_manifest(args):
|
||||
"""verify that install directories have not been modified since installation"""
|
||||
local = args.local
|
||||
|
||||
if args.type == "files":
|
||||
if args.all:
|
||||
MANIFEST_SUBPARSER.error("cannot use --all with --files")
|
||||
setup_parser.parser.print_help()
|
||||
return 1
|
||||
|
||||
for file in args.specs_or_files:
|
||||
results = spack.verify.check_file_manifest(file)
|
||||
@@ -153,7 +87,8 @@ def verify_manifest(args):
|
||||
env = ev.active_environment()
|
||||
specs = list(map(lambda x: spack.cmd.disambiguate_spec(x, env, local=local), spec_args))
|
||||
else:
|
||||
MANIFEST_SUBPARSER.error("use --all or specify specs to verify")
|
||||
setup_parser.parser.print_help()
|
||||
return 1
|
||||
|
||||
for spec in specs:
|
||||
tty.debug("Verifying package %s")
|
||||
|
@@ -801,17 +801,17 @@ def _extract_compiler_paths(spec: "spack.spec.Spec") -> Optional[Dict[str, str]]
|
||||
def _extract_os_and_target(spec: "spack.spec.Spec"):
|
||||
if not spec.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
operating_system = host_platform.default_operating_system()
|
||||
target = host_platform.default_target()
|
||||
operating_system = host_platform.operating_system("default_os")
|
||||
target = host_platform.target("default_target")
|
||||
else:
|
||||
target = spec.architecture.target
|
||||
if not target:
|
||||
target = spack.platforms.host().default_target()
|
||||
target = spack.platforms.host().target("default_target")
|
||||
|
||||
operating_system = spec.os
|
||||
if not operating_system:
|
||||
host_platform = spack.platforms.host()
|
||||
operating_system = host_platform.default_operating_system()
|
||||
operating_system = host_platform.operating_system("default_os")
|
||||
return operating_system, target
|
||||
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user