Compare commits

..

1 Commits

Author SHA1 Message Date
Harmen Stoppels
4969fdf23a autotools.py: order props / methods / callbacks 2025-01-23 12:24:56 +01:00
935 changed files with 9719 additions and 14747 deletions

View File

@@ -40,17 +40,17 @@ jobs:
# 1: Platforms to build for
# 2: Base image (e.g. ubuntu:22.04)
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
[centos-stream9, 'linux/amd64,linux/arm64', 'centos:stream9'],
[leap15, 'linux/amd64,linux/arm64', 'opensuse/leap:15'],
[ubuntu-focal, 'linux/amd64,linux/arm64', 'ubuntu:20.04'],
[ubuntu-jammy, 'linux/amd64,linux/arm64', 'ubuntu:22.04'],
[ubuntu-noble, 'linux/amd64,linux/arm64', 'ubuntu:24.04'],
[almalinux8, 'linux/amd64,linux/arm64', 'almalinux:8'],
[almalinux9, 'linux/amd64,linux/arm64', 'almalinux:9'],
[centos-stream9, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream9'],
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
[ubuntu-noble, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:24.04'],
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
[fedora39, 'linux/amd64,linux/arm64', 'fedora:39'],
[fedora40, 'linux/amd64,linux/arm64', 'fedora:40']]
[fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
[fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
name: Build ${{ matrix.dockerfile[0] }}
if: github.repository == 'spack/spack'
steps:

View File

@@ -81,10 +81,6 @@ jobs:
with:
with_coverage: ${{ needs.changes.outputs.core }}
import-check:
needs: [ changes ]
uses: ./.github/workflows/import-check.yaml
all-prechecks:
needs: [ prechecks ]
if: ${{ always() }}

View File

@@ -33,4 +33,3 @@ jobs:
with:
verbose: true
fail_ci_if_error: false
token: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -1,49 +0,0 @@
name: import-check
on:
workflow_call:
jobs:
# Check we don't make the situation with circular imports worse
import-check:
runs-on: ubuntu-latest
steps:
- uses: julia-actions/setup-julia@v2
with:
version: '1.10'
- uses: julia-actions/cache@v2
# PR: use the base of the PR as the old commit
- name: Checkout PR base commit
if: github.event_name == 'pull_request'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
ref: ${{ github.event.pull_request.base.sha }}
path: old
# not a PR: use the previous commit as the old commit
- name: Checkout previous commit
if: github.event_name != 'pull_request'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
fetch-depth: 2
path: old
- name: Checkout previous commit
if: github.event_name != 'pull_request'
run: git -C old reset --hard HEAD^
- name: Checkout new commit
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
path: new
- name: Install circular import checker
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
repository: haampie/circular-import-fighter
ref: 4cdb0bf15f04ab6b49041d5ef1bfd9644cce7f33
path: circular-import-fighter
- name: Install dependencies
working-directory: circular-import-fighter
run: make -j dependencies
- name: Circular import check
working-directory: circular-import-fighter
run: make -j compare "SPACK_ROOT=../old ../new"

View File

@@ -1,7 +1,7 @@
black==25.1.0
black==24.10.0
clingo==5.7.1
flake8==7.1.2
isort==6.0.1
mypy==1.15.0
types-six==1.17.0.20250304
flake8==7.1.1
isort==5.13.2
mypy==1.11.2
types-six==1.17.0.20241205
vermin==1.6.0

View File

@@ -86,6 +86,66 @@ jobs:
spack -d bootstrap now --dev
spack -d style -t black
spack unit-test -V
# Check we don't make the situation with circular imports worse
import-check:
runs-on: ubuntu-latest
steps:
- uses: julia-actions/setup-julia@v2
with:
version: '1.10'
- uses: julia-actions/cache@v2
# PR: use the base of the PR as the old commit
- name: Checkout PR base commit
if: github.event_name == 'pull_request'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
ref: ${{ github.event.pull_request.base.sha }}
path: old
# not a PR: use the previous commit as the old commit
- name: Checkout previous commit
if: github.event_name != 'pull_request'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
fetch-depth: 2
path: old
- name: Checkout previous commit
if: github.event_name != 'pull_request'
run: git -C old reset --hard HEAD^
- name: Checkout new commit
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
path: new
- name: Install circular import checker
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
repository: haampie/circular-import-fighter
ref: b5d6ce9be35f602cca7d5a6aa0259fca10639cca
path: circular-import-fighter
- name: Install dependencies
working-directory: circular-import-fighter
run: make -j dependencies
- name: Problematic imports before
working-directory: circular-import-fighter
run: make SPACK_ROOT=../old SUFFIX=.old
- name: Problematic imports after
working-directory: circular-import-fighter
run: make SPACK_ROOT=../new SUFFIX=.new
- name: Compare import cycles
working-directory: circular-import-fighter
run: |
edges_before="$(head -n1 solution.old)"
edges_after="$(head -n1 solution.new)"
if [ "$edges_after" -gt "$edges_before" ]; then
printf '\033[1;31mImport check failed: %s imports need to be deleted, ' "$edges_after"
printf 'previously this was %s\033[0m\n' "$edges_before"
printf 'Compare \033[1;97m"Problematic imports before"\033[0m and '
printf '\033[1;97m"Problematic imports after"\033[0m.\n'
exit 1
else
printf '\033[1;32mImport check passed: %s <= %s\033[0m\n' "$edges_after" "$edges_before"
fi
# Further style checks from pylint
pylint:

1
.gitignore vendored
View File

@@ -201,6 +201,7 @@ tramp
# Org-mode
.org-id-locations
*_archive
# flymake-mode
*_flymake.*

View File

@@ -25,6 +25,7 @@ exit 1
# The code above runs this file with our preferred python interpreter.
import os
import os.path
import sys
min_python3 = (3, 6)

View File

@@ -43,28 +43,6 @@ concretizer:
# (e.g. py-setuptools, cmake etc.)
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
strategy: minimal
# Maximum number of duplicates in a DAG, when using a strategy that allows duplicates. "default" is the
# number used if there isn't a more specific alternative
max_dupes:
default: 1
# Virtuals
c: 2
cxx: 2
fortran: 1
# Regular packages
cmake: 2
gmake: 2
python: 2
python-venv: 2
py-cython: 2
py-flit-core: 2
py-pip: 2
py-setuptools: 2
py-wheel: 2
xcb-proto: 2
# Compilers
gcc: 2
llvm: 2
# Option to specify compatibility between operating systems for reuse of compilers and packages
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
@@ -85,7 +63,3 @@ concretizer:
# Setting this to false yields unreproducible results, so we advise to use that value only
# for debugging purposes (e.g. check which constraints can help Spack concretize faster).
error_on_timeout: true
# Static analysis may reduce the concretization time by generating smaller ASP problems, in
# cases where there are requirements that prevent part of the search space to be explored.
static_analysis: false

View File

@@ -36,7 +36,7 @@ packages:
go-or-gccgo-bootstrap: [go-bootstrap, gcc]
iconv: [libiconv]
ipp: [intel-oneapi-ipp]
java: [openjdk, jdk]
java: [openjdk, jdk, ibm-java]
jpeg: [libjpeg-turbo, libjpeg]
lapack: [openblas, amdlibflame]
libc: [glibc, musl]
@@ -73,27 +73,15 @@ packages:
permissions:
read: world
write: user
cray-fftw:
buildable: false
cray-libsci:
buildable: false
cray-mpich:
buildable: false
cray-mvapich2:
buildable: false
cray-pmi:
buildable: false
egl:
buildable: false
essl:
buildable: false
fujitsu-mpi:
buildable: false
fujitsu-ssl2:
buildable: false
hpcx-mpi:
buildable: false
mpt:
buildable: false
spectrum-mpi:
buildable: false

View File

@@ -1,5 +1,5 @@
config:
locks: false
build_stage::
- '$user_cache_path/stage'
- '$spack/.staging'
stage_name: '{name}-{version}-{hash:7}'

View File

@@ -1761,24 +1761,19 @@ Verifying installations
The ``spack verify`` command can be used to verify the validity of
Spack-installed packages any time after installation.
^^^^^^^^^^^^^^^^^^^^^^^^^
``spack verify manifest``
^^^^^^^^^^^^^^^^^^^^^^^^^
At installation time, Spack creates a manifest of every file in the
installation prefix. For links, Spack tracks the mode, ownership, and
destination. For directories, Spack tracks the mode, and
ownership. For files, Spack tracks the mode, ownership, modification
time, hash, and size. The ``spack verify manifest`` command will check,
for every file in each package, whether any of those attributes have
changed. It will also check for newly added files or deleted files from
the installation prefix. Spack can either check all installed packages
time, hash, and size. The Spack verify command will check, for every
file in each package, whether any of those attributes have changed. It
will also check for newly added files or deleted files from the
installation prefix. Spack can either check all installed packages
using the `-a,--all` or accept specs listed on the command line to
verify.
The ``spack verify manifest`` command can also verify for individual files
that they haven't been altered since installation time. If the given file
The ``spack verify`` command can also verify for individual files that
they haven't been altered since installation time. If the given file
is not in a Spack installation prefix, Spack will report that it is
not owned by any package. To check individual files instead of specs,
use the ``-f,--files`` option.
@@ -1793,22 +1788,6 @@ check only local packages (as opposed to those used transparently from
``upstream`` spack instances) and the ``-j,--json`` option to output
machine-readable json data for any errors.
^^^^^^^^^^^^^^^^^^^^^^^^^^
``spack verify libraries``
^^^^^^^^^^^^^^^^^^^^^^^^^^
The ``spack verify libraries`` command can be used to verify that packages
do not have accidental system dependencies. This command scans the install
prefixes of packages for executables and shared libraries, and resolves
their needed libraries in their RPATHs. When needed libraries cannot be
located, an error is reported. This typically indicates that a package
was linked against a system library, instead of a library provided by
a Spack package.
This verification can also be enabled as a post-install hook by setting
``config:shared_linking:missing_library_policy`` to ``error`` or ``warn``
in :ref:`config.yaml <config-yaml>`.
-----------------------
Filesystem requirements
-----------------------

View File

@@ -170,7 +170,7 @@ bootstrapping.
To register the mirror on the platform where it's supposed to be used run the following command(s):
% spack bootstrap add --trust local-sources /opt/bootstrap/metadata/sources
% spack bootstrap add --trust local-binaries /opt/bootstrap/metadata/binaries
% spack buildcache update-index /opt/bootstrap/bootstrap_cache
This command needs to be run on a machine with internet access and the resulting folder
has to be moved over to the air-gapped system. Once the local sources are added using the

View File

@@ -272,9 +272,9 @@ often lists dependencies and the flags needed to locate them. The
"environment variables" section lists environment variables that the
build system uses to pass flags to the compiler and linker.
^^^^^^^^^^^^^^^^^^^^^^^^^
Adding flags to configure
^^^^^^^^^^^^^^^^^^^^^^^^^
^^^^^^^^^^^^^^^^^^^^^^^^^^
Addings flags to configure
^^^^^^^^^^^^^^^^^^^^^^^^^^
For most of the flags you encounter, you will want a variant to
optionally enable/disable them. You can then optionally pass these
@@ -285,7 +285,7 @@ function like so:
def configure_args(self):
args = []
...
if self.spec.satisfies("+mpi"):
args.append("--enable-mpi")
else:
@@ -299,10 +299,7 @@ Alternatively, you can use the :ref:`enable_or_disable <autotools_enable_or_dis
.. code-block:: python
def configure_args(self):
args = []
...
args.extend(self.enable_or_disable("mpi"))
return args
return [self.enable_or_disable("mpi")]
Note that we are explicitly disabling MPI support if it is not
@@ -347,14 +344,7 @@ typically used to enable or disable some feature within the package.
default=False,
description="Memchecker support for debugging [degrades performance]"
)
...
def configure_args(self):
args = []
...
args.extend(self.enable_or_disable("memchecker"))
return args
config_args.extend(self.enable_or_disable("memchecker"))
In this example, specifying the variant ``+memchecker`` will generate
the following configuration options:

View File

@@ -223,10 +223,6 @@ def setup(sphinx):
("py:class", "spack.compiler.CompilerCache"),
# TypeVar that is not handled correctly
("py:class", "llnl.util.lang.T"),
("py:class", "llnl.util.lang.KT"),
("py:class", "llnl.util.lang.VT"),
("py:obj", "llnl.util.lang.KT"),
("py:obj", "llnl.util.lang.VT"),
]
# The reST default role (used for this markup: `text`) to use for all documents.

View File

@@ -125,8 +125,6 @@ are stored in ``$spack/var/spack/cache``. These are stored indefinitely
by default. Can be purged with :ref:`spack clean --downloads
<cmd-spack-clean>`.
.. _Misc Cache:
--------------------
``misc_cache``
--------------------
@@ -336,52 +334,3 @@ create a new alias called ``inst`` that will always call ``install -v``:
aliases:
inst: install -v
-------------------------------
``concretization_cache:enable``
-------------------------------
When set to ``true``, Spack will utilize a cache of solver outputs from
successful concretization runs. When enabled, Spack will check the concretization
cache prior to running the solver. If a previous request to solve a given
problem is present in the cache, Spack will load the concrete specs and other
solver data from the cache rather than running the solver. Specs not previously
concretized will be added to the cache on a successful solve. The cache additionally
holds solver statistics, so commands like ``spack solve`` will still return information
about the run that produced a given solver result.
This cache is a subcache of the :ref:`Misc Cache` and as such will be cleaned when the Misc
Cache is cleaned.
When ``false`` or ommitted, all concretization requests will be performed from scatch
----------------------------
``concretization_cache:url``
----------------------------
Path to the location where Spack will root the concretization cache. Currently this only supports
paths on the local filesystem.
Default location is under the :ref:`Misc Cache` at: ``$misc_cache/concretization``
------------------------------------
``concretization_cache:entry_limit``
------------------------------------
Sets a limit on the number of concretization results that Spack will cache. The limit is evaluated
after each concretization run; if Spack has stored more results than the limit allows, the
oldest concretization results are pruned until 10% of the limit has been removed.
Setting this value to 0 disables the automatic pruning. It is expected users will be
responsible for maintaining this cache.
-----------------------------------
``concretization_cache:size_limit``
-----------------------------------
Sets a limit on the size of the concretization cache in bytes. The limit is evaluated
after each concretization run; if Spack has stored more results than the limit allows, the
oldest concretization results are pruned until 10% of the limit has been removed.
Setting this value to 0 disables the automatic pruning. It is expected users will be
responsible for maintaining this cache.

View File

@@ -361,6 +361,7 @@ and the tags associated with the class of runners to build on.
* ``.linux_neoverse_n1``
* ``.linux_neoverse_v1``
* ``.linux_neoverse_v2``
* ``.linux_power``
* ``.linux_skylake``
* ``.linux_x86_64``
* ``.linux_x86_64_v4``

View File

@@ -112,19 +112,6 @@ the original but may concretize differently in the presence of different
explicit or default configuration settings (e.g., a different version of
Spack or for a different user account).
Environments created from a manifest will copy any included configs
from relative paths inside the environment. Relative paths from
outside the environment will cause errors, and absolute paths will be
kept absolute. For example, if ``spack.yaml`` includes:
.. code-block:: yaml
spack:
include: [./config.yaml]
then the created environment will have its own copy of the file
``config.yaml`` copied from the location in the original environment.
Create an environment from a ``spack.lock`` file using:
.. code-block:: console
@@ -173,7 +160,7 @@ accepts. If an environment already exists then spack will simply activate it
and ignore the create-specific flags.
.. code-block:: console
$ spack env activate --create -p myenv
# ...
# [creates if myenv does not exist yet]
@@ -437,8 +424,8 @@ Developing Packages in a Spack Environment
The ``spack develop`` command allows one to develop Spack packages in
an environment. It requires a spec containing a concrete version, and
will configure Spack to install the package from local source.
If a version is not provided from the command line interface then spack
will configure Spack to install the package from local source.
If a version is not provided from the command line interface then spack
will automatically pick the highest version the package has defined.
This means any infinity versions (``develop``, ``main``, ``stable``) will be
preferred in this selection process.
@@ -448,9 +435,9 @@ set, and Spack will ensure the package and its dependents are rebuilt
any time the environment is installed if the package's local source
code has been modified. Spack's native implementation to check for modifications
is to check if ``mtime`` is newer than the installation.
A custom check can be created by overriding the ``detect_dev_src_change`` method
in your package class. This is particularly useful for projects using custom spack repo's
to drive development and want to optimize performance.
A custom check can be created by overriding the ``detect_dev_src_change`` method
in your package class. This is particularly useful for projects using custom spack repo's
to drive development and want to optimize performance.
Spack ensures that all instances of a
developed package in the environment are concretized to match the
@@ -466,7 +453,7 @@ Further development on ``foo`` can be tested by re-installing the environment,
and eventually committed and pushed to the upstream git repo.
If the package being developed supports out-of-source builds then users can use the
``--build_directory`` flag to control the location and name of the build directory.
``--build_directory`` flag to control the location and name of the build directory.
This is a shortcut to set the ``package_attributes:build_directory`` in the
``packages`` configuration (see :ref:`assigning-package-attributes`).
The supplied location will become the build-directory for that package in all future builds.

View File

@@ -820,69 +820,6 @@ presence of a ``SPACK_CDASH_AUTH_TOKEN`` environment variable during the
build group on CDash called "Release Testing" (that group will be created if
it didn't already exist).
.. _ci_artifacts:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
CI Artifacts Directory Layout
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
When running the CI build using the command ``spack ci rebuild`` a number of directories are created for
storing data generated during the CI job. The default root directory for artifacts is ``job_scratch_root``.
This can be overridden by passing the argument ``--artifacts-root`` to the ``spack ci generate`` command
or by setting the ``SPACK_ARTIFACTS_ROOT`` environment variable in the build job scripts.
The top level directories under the artifact root are ``concrete_environment``, ``logs``, ``reproduction``,
``tests``, and ``user_data``. Spack does not restrict what is written to any of these directories nor does
it require user specified files be written to any specific directory.
------------------------
``concrete_environment``
------------------------
The directory ``concrete_environment`` is used to communicate the ci generate processed ``spack.yaml`` and
the concrete ``spack.lock`` for the CI environment.
--------
``logs``
--------
The directory ``logs`` contains the spack build log, ``spack-build-out.txt``, and the spack build environment
modification file, ``spack-build-mod-env.txt``. Additionally all files specified by the packages ``Builder``
property ``archive_files`` are also copied here (ie. ``CMakeCache.txt`` in ``CMakeBuilder``).
----------------
``reproduction``
----------------
The directory ``reproduction`` is used to store the files needed by the ``spack reproduce-build`` command.
This includes ``repro.json``, copies of all of the files in ``concrete_environment``, the concrete spec
JSON file for the current spec being built, and all of the files written in the artifacts root directory.
The ``repro.json`` file is not versioned and is only designed to work with the version of spack CI was run with.
An example of what a ``repro.json`` may look like is here.
.. code:: json
{
"job_name": "adios2@2.9.2 /feaevuj %gcc@11.4.0 arch=linux-ubuntu20.04-x86_64_v3 E4S ROCm External",
"job_spec_json": "adios2.json",
"ci_project_dir": "/builds/spack/spack"
}
---------
``tests``
---------
The directory ``tests`` is used to store output from running ``spack test <job spec>``. This may or may not have
data in it depending on the package that was built and the availability of tests.
-------------
``user_data``
-------------
The directory ``user_data`` is used to store everything else that shouldn't be copied to the ``reproduction`` direcotory.
Users may use this to store additional logs or metrics or other types of files generated by the build job.
-------------------------------------
Using a custom spack in your pipeline
-------------------------------------

View File

@@ -1,13 +1,13 @@
sphinx==8.2.3
sphinx==8.1.3
sphinxcontrib-programoutput==0.18
sphinx_design==0.6.1
sphinx-rtd-theme==3.0.2
python-levenshtein==0.27.1
python-levenshtein==0.26.1
docutils==0.21.2
pygments==2.19.1
urllib3==2.3.0
pytest==8.3.5
isort==6.0.1
black==25.1.0
flake8==7.1.2
pytest==8.3.4
isort==5.13.2
black==24.10.0
flake8==7.1.1
mypy==1.11.1

View File

@@ -3,7 +3,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""URL primitives that just require Python standard library."""
import itertools
import os
import os.path
import re
from typing import Optional, Set, Tuple
from urllib.parse import urlsplit, urlunsplit

View File

@@ -7,7 +7,6 @@
import fnmatch
import glob
import hashlib
import io
import itertools
import numbers
import os
@@ -21,7 +20,6 @@
from contextlib import contextmanager
from itertools import accumulate
from typing import (
IO,
Callable,
Deque,
Dict,
@@ -670,7 +668,7 @@ def copy(src, dest, _permissions=False):
_permissions (bool): for internal use only
Raises:
OSError: if *src* does not match any files or directories
IOError: if *src* does not match any files or directories
ValueError: if *src* matches multiple files but *dest* is
not a directory
"""
@@ -681,7 +679,7 @@ def copy(src, dest, _permissions=False):
files = glob.glob(src)
if not files:
raise OSError("No such file or directory: '{0}'".format(src))
raise IOError("No such file or directory: '{0}'".format(src))
if len(files) > 1 and not os.path.isdir(dest):
raise ValueError(
"'{0}' matches multiple files but '{1}' is not a directory".format(src, dest)
@@ -712,7 +710,7 @@ def install(src, dest):
dest (str): the destination file or directory
Raises:
OSError: if *src* does not match any files or directories
IOError: if *src* does not match any files or directories
ValueError: if *src* matches multiple files but *dest* is
not a directory
"""
@@ -750,7 +748,7 @@ def copy_tree(
_permissions (bool): for internal use only
Raises:
OSError: if *src* does not match any files or directories
IOError: if *src* does not match any files or directories
ValueError: if *src* is a parent directory of *dest*
"""
if _permissions:
@@ -764,7 +762,7 @@ def copy_tree(
files = glob.glob(src)
if not files:
raise OSError("No such file or directory: '{0}'".format(src))
raise IOError("No such file or directory: '{0}'".format(src))
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
# all symlinks to this list while traversing the tree, then when finished, make all
@@ -845,7 +843,7 @@ def install_tree(src, dest, symlinks=True, ignore=None):
ignore (typing.Callable): function indicating which files to ignore
Raises:
OSError: if *src* does not match any files or directories
IOError: if *src* does not match any files or directories
ValueError: if *src* is a parent directory of *dest*
"""
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
@@ -1474,7 +1472,7 @@ def set_executable(path):
def recursive_mtime_greater_than(path: str, time: float) -> bool:
"""Returns true if any file or dir recursively under `path` has mtime greater than `time`."""
# use bfs order to increase likelihood of early return
queue: Deque[str] = collections.deque([path])
queue: Deque[str] = collections.deque()
if os.stat(path).st_mtime > time:
return True
@@ -2456,69 +2454,26 @@ class WindowsSimulatedRPath:
and vis versa.
"""
def __init__(
self,
package,
base_modification_prefix: Optional[Union[str, pathlib.Path]] = None,
link_install_prefix: bool = True,
):
def __init__(self, package, link_install_prefix=True):
"""
Args:
package (spack.package_base.PackageBase): Package requiring links
base_modification_prefix (str|pathlib.Path): Path representation indicating
the root directory in which to establish the simulated rpath, ie where the
symlinks that comprise the "rpath" behavior will be installed.
Note: This is a mutually exclusive option with `link_install_prefix` using
both is an error.
Default: None
link_install_prefix (bool): Link against package's own install or stage root.
Packages that run their own executables during build and require rpaths to
the build directory during build time require this option.
Default: install
the build directory during build time require this option. Default: install
root
Note: This is a mutually exclusive option with `base_modification_prefix`, using
both is an error.
"""
self.pkg = package
self._addl_rpaths: set[str] = set()
if link_install_prefix and base_modification_prefix:
raise RuntimeError(
"Invalid combination of arguments given to WindowsSimulated RPath.\n"
"Select either `link_install_prefix` to create an install prefix rpath"
" or specify a `base_modification_prefix` for any other link type. "
"Specifying both arguments is invalid."
)
if not (link_install_prefix or base_modification_prefix):
raise RuntimeError(
"Insufficient arguments given to WindowsSimulatedRpath.\n"
"WindowsSimulatedRPath requires one of link_install_prefix"
" or base_modification_prefix to be specified."
" Neither was provided."
)
self._addl_rpaths = set()
self.link_install_prefix = link_install_prefix
if base_modification_prefix:
self.base_modification_prefix = pathlib.Path(base_modification_prefix)
else:
self.base_modification_prefix = pathlib.Path(self.pkg.prefix)
self._additional_library_dependents: set[pathlib.Path] = set()
if not self.link_install_prefix:
tty.debug(f"Generating rpath for non install context: {base_modification_prefix}")
self._additional_library_dependents = set()
@property
def library_dependents(self):
"""
Set of directories where package binaries/libraries are located.
"""
base_pths = set()
if self.link_install_prefix:
base_pths.add(pathlib.Path(self.pkg.prefix.bin))
base_pths |= self._additional_library_dependents
return base_pths
return set([pathlib.Path(self.pkg.prefix.bin)]) | self._additional_library_dependents
def add_library_dependent(self, *dest):
"""
@@ -2534,12 +2489,6 @@ def add_library_dependent(self, *dest):
new_pth = pathlib.Path(pth).parent
else:
new_pth = pathlib.Path(pth)
path_is_in_prefix = new_pth.is_relative_to(self.base_modification_prefix)
if not path_is_in_prefix:
raise RuntimeError(
f"Attempting to generate rpath symlink out of rpath context:\
{str(self.base_modification_prefix)}"
)
self._additional_library_dependents.add(new_pth)
@property
@@ -2628,33 +2577,6 @@ def establish_link(self):
self._link(library, lib_dir)
def make_package_test_rpath(pkg, test_dir: Union[str, pathlib.Path]):
"""Establishes a temp Windows simulated rpath for the pkg in the testing directory
so an executable can test the libraries/executables with proper access
to dependent dlls
Note: this is a no-op on all other platforms besides Windows
Args:
pkg (spack.package_base.PackageBase): the package for which the rpath should be computed
test_dir: the testing directory in which we should construct an rpath
"""
# link_install_prefix as false ensures we're not linking into the install prefix
mini_rpath = WindowsSimulatedRPath(pkg, link_install_prefix=False)
# add the testing directory as a location to install rpath symlinks
mini_rpath.add_library_dependent(test_dir)
# check for whether build_directory is available, if not
# assume the stage root is the build dir
build_dir_attr = getattr(pkg, "build_directory", None)
build_directory = build_dir_attr if build_dir_attr else pkg.stage.path
# add the build dir & build dir bin
mini_rpath.add_rpath(os.path.join(build_directory, "bin"))
mini_rpath.add_rpath(os.path.join(build_directory))
# construct rpath
mini_rpath.establish_link()
@system_path_filter
@memoized
def can_access_dir(path):
@@ -2883,20 +2805,6 @@ def keep_modification_time(*filenames):
os.utime(f, (os.path.getatime(f), mtime))
@contextmanager
def temporary_file_position(stream):
orig_pos = stream.tell()
yield
stream.seek(orig_pos)
@contextmanager
def current_file_position(stream: IO[str], loc: int, relative_to=io.SEEK_CUR):
with temporary_file_position(stream):
stream.seek(loc, relative_to)
yield
@contextmanager
def temporary_dir(
suffix: Optional[str] = None, prefix: Optional[str] = None, dir: Optional[str] = None

View File

@@ -14,7 +14,7 @@
import typing
import warnings
from datetime import datetime, timedelta
from typing import Callable, Dict, Iterable, List, Mapping, Optional, Tuple, TypeVar
from typing import Callable, Dict, Iterable, List, Tuple, TypeVar
# Ignore emacs backups when listing modules
ignore_modules = r"^\.#|~$"
@@ -1080,88 +1080,3 @@ def __set__(self, instance, value):
def factory(self, instance, owner):
raise NotImplementedError("must be implemented by derived classes")
KT = TypeVar("KT")
VT = TypeVar("VT")
class PriorityOrderedMapping(Mapping[KT, VT]):
"""Mapping that iterates over key according to an integer priority. If the priority is
the same for two keys, insertion order is what matters.
The priority is set when the key/value pair is added. If not set, the highest current priority
is used.
"""
_data: Dict[KT, VT]
_priorities: List[Tuple[int, KT]]
def __init__(self) -> None:
self._data = {}
# Tuple of (priority, key)
self._priorities = []
def __getitem__(self, key: KT) -> VT:
return self._data[key]
def __len__(self) -> int:
return len(self._data)
def __iter__(self):
yield from (key for _, key in self._priorities)
def __reversed__(self):
yield from (key for _, key in reversed(self._priorities))
def reversed_keys(self):
"""Iterates over keys from the highest priority, to the lowest."""
return reversed(self)
def reversed_values(self):
"""Iterates over values from the highest priority, to the lowest."""
yield from (self._data[key] for _, key in reversed(self._priorities))
def _highest_priority(self) -> int:
if not self._priorities:
return 0
result, _ = self._priorities[-1]
return result
def add(self, key: KT, *, value: VT, priority: Optional[int] = None) -> None:
"""Adds a key/value pair to the mapping, with a specific priority.
If the priority is None, then it is assumed to be the highest priority value currently
in the container.
Raises:
ValueError: when the same priority is already in the mapping
"""
if priority is None:
priority = self._highest_priority()
if key in self._data:
self.remove(key)
self._priorities.append((priority, key))
# We rely on sort being stable
self._priorities.sort(key=lambda x: x[0])
self._data[key] = value
assert len(self._data) == len(self._priorities)
def remove(self, key: KT) -> VT:
"""Removes a key from the mapping.
Returns:
The value associated with the key being removed
Raises:
KeyError: if the key is not in the mapping
"""
if key not in self._data:
raise KeyError(f"cannot find {key}")
popped_item = self._data.pop(key)
self._priorities = [(p, k) for p, k in self._priorities if k != key]
assert len(self._data) == len(self._priorities)
return popped_item

View File

@@ -41,16 +41,6 @@ def __init__(self, dst, src_a=None, src_b=None):
self.src_a = src_a
self.src_b = src_b
def __repr__(self) -> str:
return f"MergeConflict(dst={self.dst!r}, src_a={self.src_a!r}, src_b={self.src_b!r})"
def _samefile(a: str, b: str):
try:
return os.path.samefile(a, b)
except OSError:
return False
class SourceMergeVisitor(BaseDirectoryVisitor):
"""
@@ -60,14 +50,9 @@ class SourceMergeVisitor(BaseDirectoryVisitor):
- A list of merge conflicts in dst/
"""
def __init__(
self, ignore: Optional[Callable[[str], bool]] = None, normalize_paths: bool = False
):
def __init__(self, ignore: Optional[Callable[[str], bool]] = None):
self.ignore = ignore if ignore is not None else lambda f: False
# On case-insensitive filesystems, normalize paths to detect duplications
self.normalize_paths = normalize_paths
# When mapping <src root> to <dst root>/<projection>, we need to prepend the <projection>
# bit to the relative path in the destination dir.
self.projection: str = ""
@@ -86,88 +71,10 @@ def __init__(
# and can run mkdir in order.
self.directories: Dict[str, Tuple[str, str]] = {}
# If the visitor is configured to normalize paths, keep a map of
# normalized path to: original path, root directory + relative path
self._directories_normalized: Dict[str, Tuple[str, str, str]] = {}
# Files to link. Maps dst_rel to (src_root, src_rel). This is an ordered dict, where files
# are guaranteed to be grouped by src_root in the order they were visited.
self.files: Dict[str, Tuple[str, str]] = {}
# If the visitor is configured to normalize paths, keep a map of
# normalized path to: original path, root directory + relative path
self._files_normalized: Dict[str, Tuple[str, str, str]] = {}
def _in_directories(self, proj_rel_path: str) -> bool:
"""
Check if a path is already in the directory list
"""
if self.normalize_paths:
return proj_rel_path.lower() in self._directories_normalized
else:
return proj_rel_path in self.directories
def _directory(self, proj_rel_path: str) -> Tuple[str, str, str]:
"""
Get the directory that is mapped to a path
"""
if self.normalize_paths:
return self._directories_normalized[proj_rel_path.lower()]
else:
return (proj_rel_path, *self.directories[proj_rel_path])
def _del_directory(self, proj_rel_path: str):
"""
Remove a directory from the list of directories
"""
del self.directories[proj_rel_path]
if self.normalize_paths:
del self._directories_normalized[proj_rel_path.lower()]
def _add_directory(self, proj_rel_path: str, root: str, rel_path: str):
"""
Add a directory to the list of directories.
Also stores the normalized version for later lookups
"""
self.directories[proj_rel_path] = (root, rel_path)
if self.normalize_paths:
self._directories_normalized[proj_rel_path.lower()] = (proj_rel_path, root, rel_path)
def _in_files(self, proj_rel_path: str) -> bool:
"""
Check if a path is already in the files list
"""
if self.normalize_paths:
return proj_rel_path.lower() in self._files_normalized
else:
return proj_rel_path in self.files
def _file(self, proj_rel_path: str) -> Tuple[str, str, str]:
"""
Get the file that is mapped to a path
"""
if self.normalize_paths:
return self._files_normalized[proj_rel_path.lower()]
else:
return (proj_rel_path, *self.files[proj_rel_path])
def _del_file(self, proj_rel_path: str):
"""
Remove a file from the list of files
"""
del self.files[proj_rel_path]
if self.normalize_paths:
del self._files_normalized[proj_rel_path.lower()]
def _add_file(self, proj_rel_path: str, root: str, rel_path: str):
"""
Add a file to the list of files
Also stores the normalized version for later lookups
"""
self.files[proj_rel_path] = (root, rel_path)
if self.normalize_paths:
self._files_normalized[proj_rel_path.lower()] = (proj_rel_path, root, rel_path)
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
"""
Register a directory if dst / rel_path is not blocked by a file or ignored.
@@ -177,28 +84,23 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
if self.ignore(rel_path):
# Don't recurse when dir is ignored.
return False
elif self._in_files(proj_rel_path):
# A file-dir conflict is fatal except if they're the same file (symlinked dir).
src_a = os.path.join(*self._file(proj_rel_path))
src_b = os.path.join(root, rel_path)
if not _samefile(src_a, src_b):
self.fatal_conflicts.append(
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
elif proj_rel_path in self.files:
# Can't create a dir where a file is.
src_a_root, src_a_relpath = self.files[proj_rel_path]
self.fatal_conflicts.append(
MergeConflict(
dst=proj_rel_path,
src_a=os.path.join(src_a_root, src_a_relpath),
src_b=os.path.join(root, rel_path),
)
return False
# Remove the link in favor of the dir.
existing_proj_rel_path, _, _ = self._file(proj_rel_path)
self._del_file(existing_proj_rel_path)
self._add_directory(proj_rel_path, root, rel_path)
return True
elif self._in_directories(proj_rel_path):
)
return False
elif proj_rel_path in self.directories:
# No new directory, carry on.
return True
else:
# Register new directory.
self._add_directory(proj_rel_path, root, rel_path)
self.directories[proj_rel_path] = (root, rel_path)
return True
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
@@ -230,7 +132,7 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
if handle_as_dir:
return self.before_visit_dir(root, rel_path, depth)
self.visit_file(root, rel_path, depth, symlink=True)
self.visit_file(root, rel_path, depth)
return False
def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = False) -> None:
@@ -238,23 +140,30 @@ def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = Fa
if self.ignore(rel_path):
pass
elif self._in_directories(proj_rel_path):
# Can't create a file where a dir is, unless they are the same file (symlinked dir),
# in which case we simply drop the symlink in favor of the actual dir.
src_a = os.path.join(*self._directory(proj_rel_path))
src_b = os.path.join(root, rel_path)
if not symlink or not _samefile(src_a, src_b):
self.fatal_conflicts.append(
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
elif proj_rel_path in self.directories:
# Can't create a file where a dir is; fatal error
self.fatal_conflicts.append(
MergeConflict(
dst=proj_rel_path,
src_a=os.path.join(*self.directories[proj_rel_path]),
src_b=os.path.join(root, rel_path),
)
elif self._in_files(proj_rel_path):
)
elif proj_rel_path in self.files:
# When two files project to the same path, they conflict iff they are distinct.
# If they are the same (i.e. one links to the other), register regular files rather
# than symlinks. The reason is that in copy-type views, we need a copy of the actual
# file, not the symlink.
src_a = os.path.join(*self._file(proj_rel_path))
src_a = os.path.join(*self.files[proj_rel_path])
src_b = os.path.join(root, rel_path)
if not _samefile(src_a, src_b):
try:
samefile = os.path.samefile(src_a, src_b)
except OSError:
samefile = False
if not samefile:
# Distinct files produce a conflict.
self.file_conflicts.append(
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
@@ -264,12 +173,12 @@ def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = Fa
if not symlink:
# Remove the link in favor of the actual file. The del is necessary to maintain the
# order of the files dict, which is grouped by root.
existing_proj_rel_path, _, _ = self._file(proj_rel_path)
self._del_file(existing_proj_rel_path)
self._add_file(proj_rel_path, root, rel_path)
del self.files[proj_rel_path]
self.files[proj_rel_path] = (root, rel_path)
else:
# Otherwise register this file to be linked.
self._add_file(proj_rel_path, root, rel_path)
self.files[proj_rel_path] = (root, rel_path)
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
# Treat symlinked files as ordinary files (without "dereferencing")
@@ -288,11 +197,11 @@ def set_projection(self, projection: str) -> None:
path = ""
for part in self.projection.split(os.sep):
path = os.path.join(path, part)
if not self._in_files(path):
self._add_directory(path, "<projection>", path)
if path not in self.files:
self.directories[path] = ("<projection>", path)
else:
# Can't create a dir where a file is.
_, src_a_root, src_a_relpath = self._file(path)
src_a_root, src_a_relpath = self.files[path]
self.fatal_conflicts.append(
MergeConflict(
dst=path,
@@ -318,8 +227,8 @@ def __init__(self, source_merge_visitor: SourceMergeVisitor):
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
# If destination dir is a file in a src dir, add a conflict,
# and don't traverse deeper
if self.src._in_files(rel_path):
_, src_a_root, src_a_relpath = self.src._file(rel_path)
if rel_path in self.src.files:
src_a_root, src_a_relpath = self.src.files[rel_path]
self.src.fatal_conflicts.append(
MergeConflict(
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
@@ -329,9 +238,8 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
# If destination dir was also a src dir, remove the mkdir
# action, and traverse deeper.
if self.src._in_directories(rel_path):
existing_proj_rel_path, _, _ = self.src._directory(rel_path)
self.src._del_directory(existing_proj_rel_path)
if rel_path in self.src.directories:
del self.src.directories[rel_path]
return True
# If the destination dir does not appear in the src dir,
@@ -344,24 +252,38 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
be seen as files; we should not accidentally merge
source dir with a symlinked dest dir.
"""
self.visit_file(root, rel_path, depth)
# Never descend into symlinked target dirs.
return False
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
# Can't merge a file if target already exists
if self.src._in_directories(rel_path):
_, src_a_root, src_a_relpath = self.src._directory(rel_path)
# Always conflict
if rel_path in self.src.directories:
src_a_root, src_a_relpath = self.src.directories[rel_path]
self.src.fatal_conflicts.append(
MergeConflict(
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
)
)
elif self.src._in_files(rel_path):
_, src_a_root, src_a_relpath = self.src._file(rel_path)
if rel_path in self.src.files:
src_a_root, src_a_relpath = self.src.files[rel_path]
self.src.fatal_conflicts.append(
MergeConflict(
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
)
)
# Never descend into symlinked target dirs.
return False
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
# Can't merge a file if target already exists
if rel_path in self.src.directories:
src_a_root, src_a_relpath = self.src.directories[rel_path]
self.src.fatal_conflicts.append(
MergeConflict(
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
)
)
elif rel_path in self.src.files:
src_a_root, src_a_relpath = self.src.files[rel_path]
self.src.fatal_conflicts.append(
MergeConflict(
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
@@ -386,7 +308,7 @@ class LinkTree:
def __init__(self, source_root):
if not os.path.exists(source_root):
raise OSError("No such file or directory: '%s'", source_root)
raise IOError("No such file or directory: '%s'", source_root)
self._root = source_root

View File

@@ -269,7 +269,7 @@ def __init__(
@staticmethod
def _poll_interval_generator(
_wait_times: Optional[Tuple[float, float, float]] = None,
_wait_times: Optional[Tuple[float, float, float]] = None
) -> Generator[float, None, None]:
"""This implements a backoff scheme for polling a contended resource
by suggesting a succession of wait times between polls.
@@ -391,7 +391,7 @@ def _poll_lock(self, op: int) -> bool:
return True
except OSError as e:
except IOError as e:
# EAGAIN and EACCES == locked by another process (so try again)
if e.errno not in (errno.EAGAIN, errno.EACCES):
raise

View File

@@ -2,7 +2,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Utility classes for logging the output of blocks of code."""
"""Utility classes for logging the output of blocks of code.
"""
import atexit
import ctypes
import errno
@@ -343,6 +344,26 @@ def close(self):
self.file.close()
@contextmanager
def replace_environment(env):
"""Replace the current environment (`os.environ`) with `env`.
If `env` is empty (or None), this unsets all current environment
variables.
"""
env = env or {}
old_env = os.environ.copy()
try:
os.environ.clear()
for name, val in env.items():
os.environ[name] = val
yield
finally:
os.environ.clear()
for name, val in old_env.items():
os.environ[name] = val
def log_output(*args, **kwargs):
"""Context manager that logs its output to a file.
@@ -426,6 +447,7 @@ def __init__(
self.echo = echo
self.debug = debug
self.buffer = buffer
self.env = env # the environment to use for _writer_daemon
self.filter_fn = filter_fn
self._active = False # used to prevent re-entry
@@ -497,20 +519,21 @@ def __enter__(self):
# just don't forward input if this fails
pass
self.process = multiprocessing.Process(
target=_writer_daemon,
args=(
input_fd,
read_fd,
self.write_fd,
self.echo,
self.log_file,
child_pipe,
self.filter_fn,
),
)
self.process.daemon = True # must set before start()
self.process.start()
with replace_environment(self.env):
self.process = multiprocessing.Process(
target=_writer_daemon,
args=(
input_fd,
read_fd,
self.write_fd,
self.echo,
self.log_file,
child_pipe,
self.filter_fn,
),
)
self.process.daemon = True # must set before start()
self.process.start()
finally:
if input_fd:
@@ -706,7 +729,10 @@ class winlog:
Does not support the use of 'v' toggling as nixlog does.
"""
def __init__(self, file_like=None, echo=False, debug=0, buffer=False, filter_fn=None):
def __init__(
self, file_like=None, echo=False, debug=0, buffer=False, env=None, filter_fn=None
):
self.env = env
self.debug = debug
self.echo = echo
self.logfile = file_like
@@ -763,10 +789,11 @@ def background_reader(reader, echo_writer, _kill):
reader.close()
self._active = True
self._thread = Thread(
target=background_reader, args=(self.reader, self.echo_writer, self._kill)
)
self._thread.start()
with replace_environment(self.env):
self._thread = Thread(
target=background_reader, args=(self.reader, self.echo_writer, self._kill)
)
self._thread.start()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
@@ -891,7 +918,7 @@ def _writer_daemon(
try:
if stdin_file.read(1) == "v":
echo = not echo
except OSError as e:
except IOError as e:
# If SIGTTIN is ignored, the system gives EIO
# to let the caller know the read failed b/c it
# was in the bg. Ignore that too.
@@ -986,7 +1013,7 @@ def wrapped(*args, **kwargs):
while True:
try:
return function(*args, **kwargs)
except OSError as e:
except IOError as e:
if e.errno == errno.EINTR:
continue
raise

View File

@@ -10,21 +10,9 @@
import spack.util.git
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
__version__ = "1.0.0.dev0"
__version__ = "0.24.0.dev0"
spack_version = __version__
#: The current Package API version implemented by this version of Spack. The Package API defines
#: the Python interface for packages as well as the layout of package repositories. The minor
#: version is incremented when the package API is extended in a backwards-compatible way. The major
#: version is incremented upon breaking changes. This version is changed independently from the
#: Spack version.
package_api_version = (1, 0)
#: The minimum Package API version that this version of Spack is compatible with. This should
#: always be a tuple of the form ``(major, 0)``, since compatibility with vX.Y implies
#: compatibility with vX.0.
min_package_api_version = (1, 0)
def __try_int(v):
try:
@@ -91,6 +79,4 @@ def get_short_version() -> str:
"get_version",
"get_spack_commit",
"get_short_version",
"package_api_version",
"min_package_api_version",
]

View File

@@ -1010,7 +1010,7 @@ def _issues_in_depends_on_directive(pkgs, error_cls):
for dep_name, dep in deps_by_name.items():
def check_virtual_with_variants(spec, msg):
if not spack.repo.PATH.is_virtual(spec.name) or not spec.variants:
if not spec.virtual or not spec.variants:
return
error = error_cls(
f"{pkg_name}: {msg}",

View File

@@ -5,7 +5,6 @@
import codecs
import collections
import concurrent.futures
import contextlib
import copy
import hashlib
import io
@@ -802,7 +801,7 @@ def url_read_method(url):
try:
_, _, spec_file = web_util.read_from_url(url)
contents = codecs.getreader("utf-8")(spec_file).read()
except (web_util.SpackWebError, OSError) as e:
except web_util.SpackWebError as e:
tty.error(f"Error reading specfile: {url}: {e}")
return contents
@@ -923,7 +922,7 @@ class FileTypes:
UNKNOWN = 2
NOT_ISO8859_1_TEXT = re.compile(b"[\x00\x7f-\x9f]")
NOT_ISO8859_1_TEXT = re.compile(b"[\x00\x7F-\x9F]")
def file_type(f: IO[bytes]) -> int:
@@ -2010,7 +2009,7 @@ def fetch_url_to_mirror(url):
# Download the config = spec.json and the relevant tarball
try:
manifest = json.load(response)
manifest = json.loads(response.read())
spec_digest = spack.oci.image.Digest.from_string(manifest["config"]["digest"])
tarball_digest = spack.oci.image.Digest.from_string(
manifest["layers"][-1]["digest"]
@@ -2271,24 +2270,6 @@ def relocate_package(spec: spack.spec.Spec) -> None:
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
codesign("-fs-", tmp_binary)
install_manifest = os.path.join(
spec.prefix,
spack.store.STORE.layout.metadata_dir,
spack.store.STORE.layout.manifest_file_name,
)
if not os.path.exists(install_manifest):
spec_id = spec.format("{name}/{hash:7}")
tty.warn("No manifest file in tarball for spec %s" % spec_id)
# overwrite old metadata with new
if spec.spliced:
# rewrite spec on disk
spack.store.STORE.layout.write_spec(spec, spack.store.STORE.layout.spec_file_path(spec))
# de-cache the install manifest
with contextlib.suppress(FileNotFoundError):
os.unlink(install_manifest)
def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool, remote_checksum):
stagepath = os.path.dirname(filename)
@@ -2455,6 +2436,15 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
except Exception as e:
shutil.rmtree(spec.prefix, ignore_errors=True)
raise e
else:
manifest_file = os.path.join(
spec.prefix,
spack.store.STORE.layout.metadata_dir,
spack.store.STORE.layout.manifest_file_name,
)
if not os.path.exists(manifest_file):
spec_id = spec.format("{name}/{hash:7}")
tty.warn("No manifest file in tarball for spec %s" % spec_id)
finally:
if tmpdir:
shutil.rmtree(tmpdir, ignore_errors=True)
@@ -2529,10 +2519,10 @@ def install_root_node(
allow_missing: when true, allows installing a node with missing dependencies
"""
# Early termination
if spec.external or not spec.concrete:
warnings.warn("Skipping external or abstract spec {0}".format(spec.format()))
if spec.external or spec.virtual:
warnings.warn("Skipping external or virtual package {0}".format(spec.format()))
return
elif spec.installed and not force:
elif spec.concrete and spec.installed and not force:
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
return
@@ -2559,6 +2549,10 @@ def install_root_node(
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
extract_tarball(spec, download_result, force)
spec.package.windows_establish_runtime_linkage()
if spec.spliced: # overwrite old metadata with new
spack.store.STORE.layout.write_spec(
spec, spack.store.STORE.layout.spec_file_path(spec)
)
spack.hooks.post_install(spec, False)
spack.store.STORE.db.add(spec, allow_missing=allow_missing)
@@ -2596,14 +2590,11 @@ def try_direct_fetch(spec, mirrors=None):
)
try:
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
specfile_contents = codecs.getreader("utf-8")(fs).read()
specfile_is_signed = True
except (web_util.SpackWebError, OSError) as e1:
except web_util.SpackWebError as e1:
try:
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
specfile_contents = codecs.getreader("utf-8")(fs).read()
specfile_is_signed = False
except (web_util.SpackWebError, OSError) as e2:
except web_util.SpackWebError as e2:
tty.debug(
f"Did not find {specfile_name} on {buildcache_fetch_url_signed_json}",
e1,
@@ -2613,6 +2604,7 @@ def try_direct_fetch(spec, mirrors=None):
f"Did not find {specfile_name} on {buildcache_fetch_url_json}", e2, level=2
)
continue
specfile_contents = codecs.getreader("utf-8")(fs).read()
# read the spec from the build cache file. All specs in build caches
# are concrete (as they are built) so we need to mark this spec
@@ -2706,9 +2698,8 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
try:
_, _, json_file = web_util.read_from_url(keys_index)
json_index = sjson.load(json_file)
except (web_util.SpackWebError, OSError, ValueError) as url_err:
# TODO: avoid repeated request
json_index = sjson.load(codecs.getreader("utf-8")(json_file))
except web_util.SpackWebError as url_err:
if web_util.url_exists(keys_index):
tty.error(
f"Unable to find public keys in {url_util.format(fetch_url)},"
@@ -2958,11 +2949,11 @@ def get_remote_hash(self):
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, INDEX_HASH_FILE)
try:
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
remote_hash = response.read(64)
except OSError:
except (TimeoutError, urllib.error.URLError):
return None
# Validate the hash
remote_hash = response.read(64)
if not re.match(rb"[a-f\d]{64}$", remote_hash):
return None
return remote_hash.decode("utf-8")
@@ -2980,13 +2971,13 @@ def conditional_fetch(self) -> FetchIndexResult:
try:
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
except OSError as e:
raise FetchIndexError(f"Could not fetch index from {url_index}", e) from e
except (TimeoutError, urllib.error.URLError) as e:
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
try:
result = codecs.getreader("utf-8")(response).read()
except (ValueError, OSError) as e:
raise FetchIndexError(f"Remote index {url_index} is invalid") from e
except ValueError as e:
raise FetchIndexError("Remote index {} is invalid".format(url_index), e) from e
computed_hash = compute_hash(result)
@@ -3030,12 +3021,12 @@ def conditional_fetch(self) -> FetchIndexResult:
# Not modified; that means fresh.
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
raise FetchIndexError(f"Could not fetch index {url}", e) from e
except OSError as e: # URLError, socket.timeout, etc.
except (TimeoutError, urllib.error.URLError) as e:
raise FetchIndexError(f"Could not fetch index {url}", e) from e
try:
result = codecs.getreader("utf-8")(response).read()
except (ValueError, OSError) as e:
except ValueError as e:
raise FetchIndexError(f"Remote index {url} is invalid", e) from e
headers = response.headers
@@ -3067,11 +3058,11 @@ def conditional_fetch(self) -> FetchIndexResult:
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
)
)
except OSError as e:
except (TimeoutError, urllib.error.URLError) as e:
raise FetchIndexError(f"Could not fetch manifest from {url_manifest}", e) from e
try:
manifest = json.load(response)
manifest = json.loads(response.read())
except Exception as e:
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
@@ -3086,16 +3077,14 @@ def conditional_fetch(self) -> FetchIndexResult:
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
# Otherwise fetch the blob / index.json
try:
response = self.urlopen(
urllib.request.Request(
url=self.ref.blob_url(index_digest),
headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"},
)
response = self.urlopen(
urllib.request.Request(
url=self.ref.blob_url(index_digest),
headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"},
)
result = codecs.getreader("utf-8")(response).read()
except (OSError, ValueError) as e:
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
)
result = codecs.getreader("utf-8")(response).read()
# Make sure the blob we download has the advertised hash
if compute_hash(result) != index_digest.digest:

View File

@@ -5,7 +5,7 @@
import fnmatch
import glob
import importlib
import os
import os.path
import re
import sys
import sysconfig

View File

@@ -27,9 +27,9 @@
class ClingoBootstrapConcretizer:
def __init__(self, configuration):
self.host_platform = spack.platforms.host()
self.host_os = self.host_platform.default_operating_system()
self.host_os = self.host_platform.operating_system("frontend")
self.host_target = archspec.cpu.host().family
self.host_architecture = spack.spec.ArchSpec.default_arch()
self.host_architecture = spack.spec.ArchSpec.frontend_arch()
self.host_architecture.target = str(self.host_target)
self.host_compiler = self._valid_compiler_or_raise()
self.host_python = self.python_external_spec()

View File

@@ -4,7 +4,7 @@
"""Manage configuration swapping for bootstrapping purposes"""
import contextlib
import os
import os.path
import sys
from typing import Any, Dict, Generator, MutableSequence, Sequence
@@ -141,7 +141,7 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
def _add_compilers_if_missing() -> None:
arch = spack.spec.ArchSpec.default_arch()
arch = spack.spec.ArchSpec.frontend_arch()
if not spack.compilers.compilers_for_arch(arch):
spack.compilers.find_compilers()

View File

@@ -25,6 +25,7 @@
import functools
import json
import os
import os.path
import sys
import uuid
from typing import Any, Callable, Dict, List, Optional, Tuple
@@ -45,7 +46,6 @@
import spack.util.executable
import spack.util.path
import spack.util.spack_yaml
import spack.util.url
import spack.version
from spack.installer import PackageInstaller
@@ -97,12 +97,8 @@ def __init__(self, conf: ConfigDictionary) -> None:
self.name = conf["name"]
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
# Check for relative paths, and turn them into absolute paths
# root is the metadata_dir
maybe_url = conf["info"]["url"]
if spack.util.url.is_path_instead_of_url(maybe_url) and not os.path.isabs(maybe_url):
maybe_url = os.path.join(self.metadata_dir, maybe_url)
self.url = spack.mirrors.mirror.Mirror(maybe_url).fetch_url
# Promote (relative) paths to file urls
self.url = spack.mirrors.mirror.Mirror(conf["info"]["url"]).fetch_url
@property
def mirror_scope(self) -> spack.config.InternalConfigScope:
@@ -292,12 +288,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
# Install the spec that should make the module importable
with spack.config.override(self.mirror_scope):
PackageInstaller(
[concrete_spec.package],
fail_fast=True,
package_use_cache=False,
dependencies_use_cache=False,
).install()
PackageInstaller([concrete_spec.package], fail_fast=True).install()
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
self.last_search = info
@@ -367,7 +358,6 @@ def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str]
for current_config in bootstrapping_sources():
if not source_is_enabled(current_config):
continue
with exception_handler.forward(current_config["name"], Exception):
if create_bootstrapper(current_config).try_import(module, abstract_spec):
return

View File

@@ -2,6 +2,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import os.path
import stat
import subprocess
from typing import Callable, List, Optional, Set, Tuple, Union
@@ -190,6 +191,177 @@ def archive_files(self) -> List[str]:
files.append(self._removed_la_files_log)
return files
@property
def configure_directory(self) -> str:
"""Return the directory where 'configure' resides."""
return self.pkg.stage.source_path
@property
def configure_abs_path(self) -> str:
# Absolute path to configure
configure_abs_path = os.path.join(os.path.abspath(self.configure_directory), "configure")
return configure_abs_path
@property
def build_directory(self) -> str:
"""Override to provide another place to build the package"""
# Handle the case where the configure directory is set to a non-absolute path
# Non-absolute paths are always relative to the staging source path
build_dir = self.configure_directory
if not os.path.isabs(build_dir):
build_dir = os.path.join(self.pkg.stage.source_path, build_dir)
return build_dir
@property
def autoreconf_search_path_args(self) -> List[str]:
"""Search path includes for autoreconf. Add an -I flag for all `aclocal` dirs
of build deps, skips the default path of automake, move external include
flags to the back, since they might pull in unrelated m4 files shadowing
spack dependencies."""
return _autoreconf_search_path_args(self.spec)
def autoreconf(
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Not needed usually, configure should be already there"""
# If configure exists nothing needs to be done
if os.path.exists(self.configure_abs_path):
return
# Else try to regenerate it, which requires a few build dependencies
ensure_build_dependencies_or_raise(
spec=spec,
dependencies=["autoconf", "automake", "libtool"],
error_msg="Cannot generate configure",
)
tty.msg("Configure script not found: trying to generate it")
tty.warn("*********************************************************")
tty.warn("* If the default procedure fails, consider implementing *")
tty.warn("* a custom AUTORECONF phase in the package *")
tty.warn("*********************************************************")
with fs.working_dir(self.configure_directory):
# This line is what is needed most of the time
# --install, --verbose, --force
autoreconf_args = ["-ivf"]
autoreconf_args += self.autoreconf_search_path_args
autoreconf_args += self.autoreconf_extra_args
self.pkg.module.autoreconf(*autoreconf_args)
def configure(
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Run "configure", with the arguments specified by the builder and an
appropriately set prefix.
"""
options = getattr(self.pkg, "configure_flag_args", [])
options += ["--prefix={0}".format(prefix)]
options += self.configure_args()
with fs.working_dir(self.build_directory, create=True):
pkg.module.configure(*options)
def build(
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Run "make" on the build targets specified by the builder."""
# See https://autotools.io/automake/silent.html
params = ["V=1"]
params += self.build_targets
with fs.working_dir(self.build_directory):
pkg.module.make(*params)
def install(
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Run "make" on the install targets specified by the builder."""
with fs.working_dir(self.build_directory):
pkg.module.make(*self.install_targets)
def check(self) -> None:
"""Run "make" on the ``test`` and ``check`` targets, if found."""
with fs.working_dir(self.build_directory):
self.pkg._if_make_target_execute("test")
self.pkg._if_make_target_execute("check")
def installcheck(self) -> None:
"""Run "make" on the ``installcheck`` target, if found."""
with fs.working_dir(self.build_directory):
self.pkg._if_make_target_execute("installcheck")
def setup_build_environment(self, env):
if self.spec.platform == "darwin" and macos_version() >= Version("11"):
# Many configure files rely on matching '10.*' for macOS version
# detection and fail to add flags if it shows as version 11.
env.set("MACOSX_DEPLOYMENT_TARGET", "10.16")
def with_or_without(
self,
name: str,
activation_value: Optional[Union[Callable, str]] = None,
variant: Optional[str] = None,
) -> List[str]:
"""Inspects a variant and returns the arguments that activate
or deactivate the selected feature(s) for the configure options.
This function works on all type of variants. For bool-valued variants
it will return by default ``--with-{name}`` or ``--without-{name}``.
For other kinds of variants it will cycle over the allowed values and
return either ``--with-{value}`` or ``--without-{value}``.
If activation_value is given, then for each possible value of the
variant, the option ``--with-{value}=activation_value(value)`` or
``--without-{value}`` will be added depending on whether or not
``variant=value`` is in the spec.
Args:
name: name of a valid multi-valued variant
activation_value: callable that accepts a single value and returns the parameter to be
used leading to an entry of the type ``--with-{name}={parameter}``.
The special value "prefix" can also be assigned and will return
``spec[name].prefix`` as activation parameter.
Returns:
list of arguments to configure
"""
return self._activate_or_not(name, "with", "without", activation_value, variant)
def enable_or_disable(
self,
name: str,
activation_value: Optional[Union[Callable, str]] = None,
variant: Optional[str] = None,
) -> List[str]:
"""Same as
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without`
but substitute ``with`` with ``enable`` and ``without`` with ``disable``.
Args:
name: name of a valid multi-valued variant
activation_value: if present accepts a single value and returns the parameter to be
used leading to an entry of the type ``--enable-{name}={parameter}``
The special value "prefix" can also be assigned and will return
``spec[name].prefix`` as activation parameter.
Returns:
list of arguments to configure
"""
return self._activate_or_not(name, "enable", "disable", activation_value, variant)
def configure_args(self) -> List[str]:
"""Return the list of all the arguments that must be passed to configure,
except ``--prefix`` which will be pre-pended to the list.
"""
return []
@spack.phase_callbacks.run_before("autoreconf")
def _delete_configure_to_force_update(self) -> None:
if self.force_autoreconf:
fs.force_remove(self.configure_abs_path)
@spack.phase_callbacks.run_after("autoreconf")
def _do_patch_config_files(self) -> None:
"""Some packages ship with older config.guess/config.sub files and need to
@@ -302,6 +474,24 @@ def runs_ok(script_abs_path):
fs.copy(substitutes[name], abs_path)
os.chmod(abs_path, mode)
@spack.phase_callbacks.run_after("autoreconf")
def _set_configure_or_die(self) -> None:
"""Ensure the presence of a "configure" script, or raise. If the "configure"
is found, a module level attribute is set.
Raises:
RuntimeError: if the "configure" script is not found
"""
# Check if the "configure" script is there. If not raise a RuntimeError.
if not os.path.exists(self.configure_abs_path):
msg = "configure script not found in {0}"
raise RuntimeError(msg.format(self.configure_directory))
# Monkey-patch the configure script in the corresponding module
globals_for_pkg = spack.build_environment.ModuleChangePropagator(self.pkg)
globals_for_pkg.configure = Executable(self.configure_abs_path)
globals_for_pkg.propagate_changes_to_mro()
@spack.phase_callbacks.run_before("configure")
def _patch_usr_bin_file(self) -> None:
"""On NixOS file is not available in /usr/bin/file. Patch configure
@@ -511,130 +701,27 @@ def _do_patch_libtool(self) -> None:
stop_at=stop_at,
)
@property
def configure_directory(self) -> str:
"""Return the directory where 'configure' resides."""
return self.pkg.stage.source_path
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
@property
def configure_abs_path(self) -> str:
# Absolute path to configure
configure_abs_path = os.path.join(os.path.abspath(self.configure_directory), "configure")
return configure_abs_path
@property
def build_directory(self) -> str:
"""Override to provide another place to build the package"""
# Handle the case where the configure directory is set to a non-absolute path
# Non-absolute paths are always relative to the staging source path
build_dir = self.configure_directory
if not os.path.isabs(build_dir):
build_dir = os.path.join(self.pkg.stage.source_path, build_dir)
return build_dir
@spack.phase_callbacks.run_before("autoreconf")
def _delete_configure_to_force_update(self) -> None:
if self.force_autoreconf:
fs.force_remove(self.configure_abs_path)
@property
def autoreconf_search_path_args(self) -> List[str]:
"""Search path includes for autoreconf. Add an -I flag for all `aclocal` dirs
of build deps, skips the default path of automake, move external include
flags to the back, since they might pull in unrelated m4 files shadowing
spack dependencies."""
return _autoreconf_search_path_args(self.spec)
@spack.phase_callbacks.run_after("autoreconf")
def _set_configure_or_die(self) -> None:
"""Ensure the presence of a "configure" script, or raise. If the "configure"
is found, a module level attribute is set.
Raises:
RuntimeError: if the "configure" script is not found
@spack.phase_callbacks.run_after("install")
def _remove_libtool_archives(self) -> None:
"""Remove all .la files in prefix sub-folders if the package sets
``install_libtool_archives`` to be False.
"""
# Check if the "configure" script is there. If not raise a RuntimeError.
if not os.path.exists(self.configure_abs_path):
msg = "configure script not found in {0}"
raise RuntimeError(msg.format(self.configure_directory))
# Monkey-patch the configure script in the corresponding module
globals_for_pkg = spack.build_environment.ModuleChangePropagator(self.pkg)
globals_for_pkg.configure = Executable(self.configure_abs_path)
globals_for_pkg.propagate_changes_to_mro()
def configure_args(self) -> List[str]:
"""Return the list of all the arguments that must be passed to configure,
except ``--prefix`` which will be pre-pended to the list.
"""
return []
def autoreconf(
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Not needed usually, configure should be already there"""
# If configure exists nothing needs to be done
if os.path.exists(self.configure_abs_path):
# If .la files are to be installed there's nothing to do
if self.install_libtool_archives:
return
# Else try to regenerate it, which requires a few build dependencies
ensure_build_dependencies_or_raise(
spec=spec,
dependencies=["autoconf", "automake", "libtool"],
error_msg="Cannot generate configure",
)
# Remove the files and create a log of what was removed
libtool_files = fs.find(str(self.pkg.prefix), "*.la", recursive=True)
with fs.safe_remove(*libtool_files):
fs.mkdirp(os.path.dirname(self._removed_la_files_log))
with open(self._removed_la_files_log, mode="w", encoding="utf-8") as f:
f.write("\n".join(libtool_files))
tty.msg("Configure script not found: trying to generate it")
tty.warn("*********************************************************")
tty.warn("* If the default procedure fails, consider implementing *")
tty.warn("* a custom AUTORECONF phase in the package *")
tty.warn("*********************************************************")
with fs.working_dir(self.configure_directory):
# This line is what is needed most of the time
# --install, --verbose, --force
autoreconf_args = ["-ivf"]
autoreconf_args += self.autoreconf_search_path_args
autoreconf_args += self.autoreconf_extra_args
self.pkg.module.autoreconf(*autoreconf_args)
def configure(
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Run "configure", with the arguments specified by the builder and an
appropriately set prefix.
"""
options = getattr(self.pkg, "configure_flag_args", [])
options += ["--prefix={0}".format(prefix)]
options += self.configure_args()
with fs.working_dir(self.build_directory, create=True):
pkg.module.configure(*options)
def build(
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Run "make" on the build targets specified by the builder."""
# See https://autotools.io/automake/silent.html
params = ["V=1"]
params += self.build_targets
with fs.working_dir(self.build_directory):
pkg.module.make(*params)
def install(
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Run "make" on the install targets specified by the builder."""
with fs.working_dir(self.build_directory):
pkg.module.make(*self.install_targets)
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
def check(self) -> None:
"""Run "make" on the ``test`` and ``check`` targets, if found."""
with fs.working_dir(self.build_directory):
self.pkg._if_make_target_execute("test")
self.pkg._if_make_target_execute("check")
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
spack.phase_callbacks.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
def _activate_or_not(
self,
@@ -756,93 +843,6 @@ def _default_generator(is_activated):
args.append(line_generator(activated))
return args
def with_or_without(
self,
name: str,
activation_value: Optional[Union[Callable, str]] = None,
variant: Optional[str] = None,
) -> List[str]:
"""Inspects a variant and returns the arguments that activate
or deactivate the selected feature(s) for the configure options.
This function works on all type of variants. For bool-valued variants
it will return by default ``--with-{name}`` or ``--without-{name}``.
For other kinds of variants it will cycle over the allowed values and
return either ``--with-{value}`` or ``--without-{value}``.
If activation_value is given, then for each possible value of the
variant, the option ``--with-{value}=activation_value(value)`` or
``--without-{value}`` will be added depending on whether or not
``variant=value`` is in the spec.
Args:
name: name of a valid multi-valued variant
activation_value: callable that accepts a single value and returns the parameter to be
used leading to an entry of the type ``--with-{name}={parameter}``.
The special value "prefix" can also be assigned and will return
``spec[name].prefix`` as activation parameter.
Returns:
list of arguments to configure
"""
return self._activate_or_not(name, "with", "without", activation_value, variant)
def enable_or_disable(
self,
name: str,
activation_value: Optional[Union[Callable, str]] = None,
variant: Optional[str] = None,
) -> List[str]:
"""Same as
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without`
but substitute ``with`` with ``enable`` and ``without`` with ``disable``.
Args:
name: name of a valid multi-valued variant
activation_value: if present accepts a single value and returns the parameter to be
used leading to an entry of the type ``--enable-{name}={parameter}``
The special value "prefix" can also be assigned and will return
``spec[name].prefix`` as activation parameter.
Returns:
list of arguments to configure
"""
return self._activate_or_not(name, "enable", "disable", activation_value, variant)
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
def installcheck(self) -> None:
"""Run "make" on the ``installcheck`` target, if found."""
with fs.working_dir(self.build_directory):
self.pkg._if_make_target_execute("installcheck")
@spack.phase_callbacks.run_after("install")
def _remove_libtool_archives(self) -> None:
"""Remove all .la files in prefix sub-folders if the package sets
``install_libtool_archives`` to be False.
"""
# If .la files are to be installed there's nothing to do
if self.install_libtool_archives:
return
# Remove the files and create a log of what was removed
libtool_files = fs.find(str(self.pkg.prefix), "*.la", recursive=True)
with fs.safe_remove(*libtool_files):
fs.mkdirp(os.path.dirname(self._removed_la_files_log))
with open(self._removed_la_files_log, mode="w", encoding="utf-8") as f:
f.write("\n".join(libtool_files))
def setup_build_environment(self, env):
if self.spec.platform == "darwin" and macos_version() >= Version("11"):
# Many configure files rely on matching '10.*' for macOS version
# detection and fail to add flags if it shows as version 11.
env.set("MACOSX_DEPLOYMENT_TARGET", "10.16")
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
spack.phase_callbacks.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
def _autoreconf_search_path_args(spec: spack.spec.Spec) -> List[str]:
dirs_seen: Set[Tuple[int, int]] = set()

View File

@@ -12,7 +12,6 @@
import spack.phase_callbacks
import spack.spec
import spack.util.prefix
from spack.directives import depends_on
from .cmake import CMakeBuilder, CMakePackage
@@ -372,10 +371,6 @@ class CachedCMakePackage(CMakePackage):
CMakeBuilder = CachedCMakeBuilder
# These dependencies are assumed in the builder
depends_on("c", type="build")
depends_on("cxx", type="build")
def flag_handler(self, name, flags):
if name in ("cflags", "cxxflags", "cppflags", "fflags"):
return None, None, None # handled in the cmake cache

View File

@@ -70,16 +70,10 @@ def build_directory(self):
"""Return the directory containing the main Cargo.toml."""
return self.pkg.stage.source_path
@property
def std_build_args(self):
"""Standard arguments for ``cargo build`` provided as a property for
convenience of package writers."""
return ["-j", str(self.pkg.module.make_jobs)]
@property
def build_args(self):
"""Arguments for ``cargo build``."""
return []
return ["-j", str(self.pkg.module.make_jobs)]
@property
def check_args(self):
@@ -94,9 +88,7 @@ def build(
) -> None:
"""Runs ``cargo install`` in the source directory"""
with fs.working_dir(self.build_directory):
pkg.module.cargo(
"install", "--root", "out", "--path", ".", *self.std_build_args, *self.build_args
)
pkg.module.cargo("install", "--root", "out", "--path", ".", *self.build_args)
def install(
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix

View File

@@ -11,7 +11,6 @@
from typing import Any, List, Optional, Tuple
import llnl.util.filesystem as fs
from llnl.util import tty
from llnl.util.lang import stable_partition
import spack.builder
@@ -459,23 +458,11 @@ def cmake(
) -> None:
"""Runs ``cmake`` in the build directory"""
if spec.is_develop:
# skip cmake phase if it is an incremental develop build
# Determine the files that will re-run CMake that are generated from a successful
# configure step based on state
primary_generator = _extract_primary_generator(self.generator)
configure_artifact = "Makefile"
if primary_generator == "Ninja":
configure_artifact = "ninja.build"
if os.path.isfile(os.path.join(self.build_directory, configure_artifact)):
tty.msg(
"Incremental build criteria satisfied."
"Skipping CMake configure step. To force configuration run"
f" `spack clean {pkg.name}`"
)
return
# skip cmake phase if it is an incremental develop build
if spec.is_develop and os.path.isfile(
os.path.join(self.build_directory, "CMakeCache.txt")
):
return
options = self.std_cmake_args
options += self.cmake_args()

View File

@@ -15,7 +15,7 @@ class CudaPackage(PackageBase):
"""Auxiliary class which contains CUDA variant, dependencies and conflicts
and is meant to unify and facilitate its usage.
Maintainers: ax3l, Rombur, davidbeckingsale, pauleonix
Maintainers: ax3l, Rombur, davidbeckingsale
"""
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list
@@ -47,12 +47,6 @@ class CudaPackage(PackageBase):
"89",
"90",
"90a",
"100",
"100a",
"101",
"101a",
"120",
"120a",
)
# FIXME: keep cuda and cuda_arch separate to make usage easier until
@@ -105,56 +99,39 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
# CUDA version vs Architecture
# https://en.wikipedia.org/wiki/CUDA#GPUs_supported
# https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html#deprecated-features
# Tesla support:
depends_on("cuda@:6.0", when="cuda_arch=10")
depends_on("cuda@:6.5", when="cuda_arch=11")
depends_on("cuda@2.1:6.5", when="cuda_arch=12")
depends_on("cuda@2.1:6.5", when="cuda_arch=13")
# Fermi support:
depends_on("cuda@3.0:8.0", when="cuda_arch=20")
depends_on("cuda@3.2:8.0", when="cuda_arch=21")
# Kepler support:
depends_on("cuda@5.0:10.2", when="cuda_arch=30")
depends_on("cuda@5.0:10.2", when="cuda_arch=32")
depends_on("cuda@5.0:11.8", when="cuda_arch=35")
depends_on("cuda@6.5:11.8", when="cuda_arch=37")
# Maxwell support:
depends_on("cuda@6.0:", when="cuda_arch=50")
depends_on("cuda@6.5:", when="cuda_arch=52")
depends_on("cuda@6.5:", when="cuda_arch=53")
# Pascal support:
depends_on("cuda@8.0:", when="cuda_arch=60")
depends_on("cuda@8.0:", when="cuda_arch=61")
depends_on("cuda@8.0:", when="cuda_arch=62")
# Volta support:
depends_on("cuda@9.0:", when="cuda_arch=70")
# Turing support:
depends_on("cuda@9.0:", when="cuda_arch=72")
depends_on("cuda@10.0:", when="cuda_arch=75")
# Ampere support:
depends_on("cuda@11.0:", when="cuda_arch=80")
depends_on("cuda@11.1:", when="cuda_arch=86")
depends_on("cuda@11.4:", when="cuda_arch=87")
# Ada support:
depends_on("cuda@11.8:", when="cuda_arch=89")
# Hopper support:
depends_on("cuda@12.0:", when="cuda_arch=90")
depends_on("cuda@12.0:", when="cuda_arch=90a")
# Blackwell support:
depends_on("cuda@12.8:", when="cuda_arch=100")
depends_on("cuda@12.8:", when="cuda_arch=100a")
depends_on("cuda@12.8:", when="cuda_arch=101")
depends_on("cuda@12.8:", when="cuda_arch=101a")
depends_on("cuda@12.8:", when="cuda_arch=120")
depends_on("cuda@12.8:", when="cuda_arch=120a")
# From the NVIDIA install guide we know of conflicts for particular
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
@@ -186,7 +163,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
conflicts("%gcc@14:", when="+cuda ^cuda@:12.6")
conflicts("%gcc@15:", when="+cuda ^cuda@:12.8")
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
@@ -195,7 +171,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
conflicts("%clang@19:", when="+cuda ^cuda@:12.6")
conflicts("%clang@20:", when="+cuda ^cuda@:12.8")
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")

View File

@@ -9,7 +9,7 @@
import spack.phase_callbacks
import spack.spec
import spack.util.prefix
from spack.directives import build_system, depends_on
from spack.directives import build_system, extends
from spack.multimethod import when
from ._checks import BuilderWithDefaults, execute_install_time_tests
@@ -28,7 +28,9 @@ class GoPackage(spack.package_base.PackageBase):
build_system("go")
with when("build_system=go"):
depends_on("go", type="build")
# TODO: this seems like it should be depends_on, see
# setup_dependent_build_environment in go for why I kept it like this
extends("go@1.14:", type="build")
@spack.builder.builder("go")
@@ -71,7 +73,6 @@ class GoBuilder(BuilderWithDefaults):
def setup_build_environment(self, env):
env.set("GO111MODULE", "on")
env.set("GOTOOLCHAIN", "local")
env.set("GOPATH", fs.join_path(self.pkg.stage.path, "go"))
@property
def build_directory(self):
@@ -82,15 +83,7 @@ def build_directory(self):
def build_args(self):
"""Arguments for ``go build``."""
# Pass ldflags -s = --strip-all and -w = --no-warnings by default
return [
"-p",
str(self.pkg.module.make_jobs),
"-modcacherw",
"-ldflags",
"-s -w",
"-o",
f"{self.pkg.name}",
]
return ["-modcacherw", "-ldflags", "-s -w", "-o", f"{self.pkg.name}"]
@property
def check_args(self):

View File

@@ -48,9 +48,6 @@ class MesonPackage(spack.package_base.PackageBase):
variant("strip", default=False, description="Strip targets on install")
depends_on("meson", type="build")
depends_on("ninja", type="build")
# Meson uses pkg-config for dependency detection, and this dependency is
# often overlooked by packages that use meson as a build system.
depends_on("pkgconfig", type="build")
# Python detection in meson requires distutils to be importable, but distutils no longer
# exists in Python 3.12. In Spack, we can't use setuptools as distutils replacement,
# because the distutils-precedence.pth startup file that setuptools ships with is not run

View File

@@ -142,7 +142,7 @@ def setup_run_environment(self, env):
$ source {prefix}/{component}/{version}/env/vars.sh
"""
# Only if environment modifications are desired (default is +envmods)
if "+envmods" in self.spec:
if "~envmods" not in self.spec:
env.extend(
EnvironmentModifications.from_sourcing_file(
self.component_prefix.env.join("vars.sh"), *self.env_script_args

View File

@@ -264,17 +264,16 @@ def update_external_dependencies(self, extendee_spec=None):
# Ensure architecture information is present
if not python.architecture:
host_platform = spack.platforms.host()
host_os = host_platform.default_operating_system()
host_target = host_platform.default_target()
host_os = host_platform.operating_system("default_os")
host_target = host_platform.target("default_target")
python.architecture = spack.spec.ArchSpec(
(str(host_platform), str(host_os), str(host_target))
)
else:
if not python.architecture.platform:
python.architecture.platform = spack.platforms.host()
platform = spack.platforms.by_name(python.architecture.platform)
if not python.architecture.os:
python.architecture.os = platform.default_operating_system()
python.architecture.os = "default_os"
if not python.architecture.target:
python.architecture.target = archspec.cpu.host().family.name

View File

@@ -43,7 +43,6 @@ class SIPPackage(spack.package_base.PackageBase):
with when("build_system=sip"):
extends("python", type=("build", "link", "run"))
depends_on("py-sip", type="build")
depends_on("gmake", type="build")
@property
def import_modules(self):

View File

@@ -14,9 +14,9 @@
import zipfile
from collections import namedtuple
from typing import Callable, Dict, List, Set
from urllib.request import Request
from urllib.error import HTTPError, URLError
from urllib.request import HTTPHandler, Request, build_opener
import llnl.path
import llnl.util.filesystem as fs
import llnl.util.tty as tty
from llnl.util.tty.color import cescape, colorize
@@ -63,8 +63,6 @@
PushResult = namedtuple("PushResult", "success url")
urlopen = web_util.urlopen # alias for mocking in tests
def get_change_revisions():
"""If this is a git repo get the revisions to use when checking
@@ -84,9 +82,6 @@ def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
whether or not the stack was changed. Returns True if the environment
manifest changed between the provided revisions (or additionally if the
`.gitlab-ci.yml` file itself changed). Returns False otherwise."""
# git returns posix paths always, normalize input to be comptaible
# with that
env_path = llnl.path.convert_to_posix_path(env_path)
git = spack.util.git.git()
if git:
with fs.working_dir(spack.paths.prefix):
@@ -477,9 +472,12 @@ def generate_pipeline(env: ev.Environment, args) -> None:
# Use all unpruned specs to populate the build group for this set
cdash_config = cfg.get("cdash")
if options.cdash_handler and options.cdash_handler.auth_token:
options.cdash_handler.populate_buildgroup(
[options.cdash_handler.build_name(s) for s in pipeline_specs]
)
try:
options.cdash_handler.populate_buildgroup(
[options.cdash_handler.build_name(s) for s in pipeline_specs]
)
except (SpackError, HTTPError, URLError, TimeoutError) as err:
tty.warn(f"Problem populating buildgroup: {err}")
elif cdash_config:
# warn only if there was actually a CDash configuration.
tty.warn("Unable to populate buildgroup without CDash credentials")
@@ -616,7 +614,7 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
def download_and_extract_artifacts(url, work_dir) -> str:
def download_and_extract_artifacts(url, work_dir):
"""Look for gitlab artifacts.zip at the given url, and attempt to download
and extract the contents into the given work_dir
@@ -624,10 +622,6 @@ def download_and_extract_artifacts(url, work_dir) -> str:
url (str): Complete url to artifacts.zip file
work_dir (str): Path to destination where artifacts should be extracted
Output:
Artifacts root path relative to the archive root
"""
tty.msg(f"Fetching artifacts from: {url}")
@@ -637,33 +631,31 @@ def download_and_extract_artifacts(url, work_dir) -> str:
if token:
headers["PRIVATE-TOKEN"] = token
request = Request(url, headers=headers, method="GET")
opener = build_opener(HTTPHandler)
request = Request(url, headers=headers)
request.get_method = lambda: "GET"
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
response_code = response.getcode()
if response_code != 200:
msg = f"Error response code ({response_code}) in reproduce_ci_job"
raise SpackError(msg)
artifacts_zip_path = os.path.join(work_dir, "artifacts.zip")
os.makedirs(work_dir, exist_ok=True)
try:
response = urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
with open(artifacts_zip_path, "wb") as out_file:
shutil.copyfileobj(response, out_file)
if not os.path.exists(work_dir):
os.makedirs(work_dir)
with zipfile.ZipFile(artifacts_zip_path) as zip_file:
zip_file.extractall(work_dir)
# Get the artifact root
artifact_root = ""
for f in zip_file.filelist:
if "spack.lock" in f.filename:
artifact_root = os.path.dirname(os.path.dirname(f.filename))
break
except OSError as e:
raise SpackError(f"Error fetching artifacts: {e}")
finally:
try:
os.remove(artifacts_zip_path)
except FileNotFoundError:
# If the file doesn't exist we are already raising
pass
with open(artifacts_zip_path, "wb") as out_file:
shutil.copyfileobj(response, out_file)
return artifact_root
zip_file = zipfile.ZipFile(artifacts_zip_path)
zip_file.extractall(work_dir)
zip_file.close()
os.remove(artifacts_zip_path)
def get_spack_info():
@@ -777,7 +769,7 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
return True
def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head):
def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
"""Given a url to gitlab artifacts.zip from a failed 'spack ci rebuild' job,
attempt to setup an environment in which the failure can be reproduced
locally. This entails the following:
@@ -791,11 +783,8 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
commands to run to reproduce the build once inside the container.
"""
work_dir = os.path.realpath(work_dir)
if os.path.exists(work_dir) and os.listdir(work_dir):
raise SpackError(f"Cannot run reproducer in non-emptry working dir:\n {work_dir}")
platform_script_ext = "ps1" if IS_WINDOWS else "sh"
artifact_root = download_and_extract_artifacts(url, work_dir)
download_and_extract_artifacts(url, work_dir)
gpg_path = None
if gpg_url:
@@ -857,9 +846,6 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
with open(repro_file, encoding="utf-8") as fd:
repro_details = json.load(fd)
spec_file = fs.find(work_dir, repro_details["job_spec_json"])[0]
reproducer_spec = spack.spec.Spec.from_specfile(spec_file)
repro_dir = os.path.dirname(repro_file)
rel_repro_dir = repro_dir.replace(work_dir, "").lstrip(os.path.sep)
@@ -920,20 +906,17 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
commit_regex = re.compile(r"commit\s+([^\s]+)")
merge_commit_regex = re.compile(r"Merge\s+([^\s]+)\s+into\s+([^\s]+)")
if use_local_head:
commit_1 = "HEAD"
# Try the more specific merge commit regex first
m = merge_commit_regex.search(spack_info)
if m:
# This was a merge commit and we captured the parents
commit_1 = m.group(1)
commit_2 = m.group(2)
else:
# Try the more specific merge commit regex first
m = merge_commit_regex.search(spack_info)
# Not a merge commit, just get the commit sha
m = commit_regex.search(spack_info)
if m:
# This was a merge commit and we captured the parents
commit_1 = m.group(1)
commit_2 = m.group(2)
else:
# Not a merge commit, just get the commit sha
m = commit_regex.search(spack_info)
if m:
commit_1 = m.group(1)
setup_result = False
if commit_1:
@@ -1008,8 +991,6 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
"entrypoint", entrypoint_script, work_dir, run=False, exit_on_failure=False
)
# Attempt to create a unique name for the reproducer container
container_suffix = "_" + reproducer_spec.dag_hash() if reproducer_spec else ""
docker_command = [
runtime,
"run",
@@ -1017,14 +998,14 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
"-t",
"--rm",
"--name",
f"spack_reproducer{container_suffix}",
"spack_reproducer",
"-v",
":".join([work_dir, mounted_workdir, "Z"]),
"-v",
":".join(
[
os.path.join(work_dir, artifact_root),
os.path.join(mount_as_dir, artifact_root),
os.path.join(work_dir, "jobs_scratch_dir"),
os.path.join(mount_as_dir, "jobs_scratch_dir"),
"Z",
]
),

View File

@@ -1,21 +1,23 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import codecs
import copy
import json
import os
import re
import ssl
import sys
import time
from collections import deque
from enum import Enum
from typing import Dict, Generator, List, Optional, Set, Tuple
from urllib.parse import quote, urlencode, urlparse
from urllib.request import Request
from urllib.request import HTTPHandler, HTTPSHandler, Request, build_opener
import llnl.util.filesystem as fs
import llnl.util.tty as tty
from llnl.util.lang import memoized
from llnl.util.lang import Singleton, memoized
import spack.binary_distribution as bindist
import spack.config as cfg
@@ -33,11 +35,32 @@
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
from spack.reporters.cdash import build_stamp as cdash_build_stamp
def _urlopen():
error_handler = web_util.SpackHTTPDefaultErrorHandler()
# One opener with HTTPS ssl enabled
with_ssl = build_opener(
HTTPHandler(), HTTPSHandler(context=web_util.ssl_create_default_context()), error_handler
)
# One opener with HTTPS ssl disabled
without_ssl = build_opener(
HTTPHandler(), HTTPSHandler(context=ssl._create_unverified_context()), error_handler
)
# And dynamically dispatch based on the config:verify_ssl.
def dispatch_open(fullurl, data=None, timeout=None, verify_ssl=True):
opener = with_ssl if verify_ssl else without_ssl
timeout = timeout or cfg.get("config:connect_timeout", 1)
return opener.open(fullurl, data, timeout)
return dispatch_open
IS_WINDOWS = sys.platform == "win32"
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
# this exists purely for testing purposes
_urlopen = web_util.urlopen
_dyn_mapping_urlopener = Singleton(_urlopen)
def copy_files_to_artifacts(src, artifacts_dir):
@@ -256,25 +279,26 @@ def copy_test_results(self, source, dest):
reports = fs.join_path(source, "*_Test*.xml")
copy_files_to_artifacts(reports, dest)
def create_buildgroup(self, headers, url, group_name, group_type):
def create_buildgroup(self, opener, headers, url, group_name, group_type):
data = {"newbuildgroup": group_name, "project": self.project, "type": group_type}
enc_data = json.dumps(data).encode("utf-8")
request = Request(url, data=enc_data, headers=headers)
try:
response_text = _urlopen(request, timeout=SPACK_CDASH_TIMEOUT).read()
except OSError as e:
tty.warn(f"Failed to create CDash buildgroup: {e}")
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
response_code = response.getcode()
if response_code not in [200, 201]:
msg = f"Creating buildgroup failed (response code = {response_code})"
tty.warn(msg)
return None
try:
response_json = json.loads(response_text)
return response_json["id"]
except (json.JSONDecodeError, KeyError) as e:
tty.warn(f"Failed to parse CDash response: {e}")
return None
response_text = response.read()
response_json = json.loads(response_text)
build_group_id = response_json["id"]
return build_group_id
def populate_buildgroup(self, job_names):
url = f"{self.url}/api/v1/buildgroup.php"
@@ -284,11 +308,16 @@ def populate_buildgroup(self, job_names):
"Content-Type": "application/json",
}
parent_group_id = self.create_buildgroup(headers, url, self.build_group, "Daily")
group_id = self.create_buildgroup(headers, url, f"Latest {self.build_group}", "Latest")
opener = build_opener(HTTPHandler)
parent_group_id = self.create_buildgroup(opener, headers, url, self.build_group, "Daily")
group_id = self.create_buildgroup(
opener, headers, url, f"Latest {self.build_group}", "Latest"
)
if not parent_group_id or not group_id:
tty.warn(f"Failed to create or retrieve buildgroups for {self.build_group}")
msg = f"Failed to create or retrieve buildgroups for {self.build_group}"
tty.warn(msg)
return
data = {
@@ -300,12 +329,15 @@ def populate_buildgroup(self, job_names):
enc_data = json.dumps(data).encode("utf-8")
request = Request(url, data=enc_data, headers=headers, method="PUT")
request = Request(url, data=enc_data, headers=headers)
request.get_method = lambda: "PUT"
try:
_urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
except OSError as e:
tty.warn(f"Failed to populate CDash buildgroup: {e}")
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
response_code = response.getcode()
if response_code != 200:
msg = f"Error response code ({response_code}) in populate_buildgroup"
tty.warn(msg)
def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optional[str]):
"""Explicitly report skipping testing of a spec (e.g., it's CI
@@ -703,6 +735,9 @@ def _apply_section(dest, src):
for value in header.values():
value = os.path.expandvars(value)
verify_ssl = mapping.get("verify_ssl", spack.config.get("config:verify_ssl", True))
timeout = mapping.get("timeout", spack.config.get("config:connect_timeout", 1))
required = mapping.get("require", [])
allowed = mapping.get("allow", [])
ignored = mapping.get("ignore", [])
@@ -736,15 +771,19 @@ def job_query(job):
endpoint_url._replace(query=query).geturl(), headers=header, method="GET"
)
try:
response = _urlopen(request)
config = json.load(response)
response = _dyn_mapping_urlopener(
request, verify_ssl=verify_ssl, timeout=timeout
)
except Exception as e:
# For now just ignore any errors from dynamic mapping and continue
# This is still experimental, and failures should not stop CI
# from running normally
tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}: {e}")
tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}")
tty.warn(f"{e}")
continue
config = json.load(codecs.getreader("utf-8")(response))
# Strip ignore keys
if ignored:
for key in ignored:

View File

@@ -3,7 +3,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections
import warnings
import archspec.cpu
@@ -52,10 +51,10 @@ def setup_parser(subparser):
"-t", "--target", action="store_true", default=False, help="print only the target"
)
parts2.add_argument(
"-f", "--frontend", action="store_true", default=False, help="print frontend (DEPRECATED)"
"-f", "--frontend", action="store_true", default=False, help="print frontend"
)
parts2.add_argument(
"-b", "--backend", action="store_true", default=False, help="print backend (DEPRECATED)"
"-b", "--backend", action="store_true", default=False, help="print backend"
)
@@ -99,14 +98,15 @@ def arch(parser, args):
display_targets(archspec.cpu.TARGETS)
return
os_args, target_args = "default_os", "default_target"
if args.frontend:
warnings.warn("the argument --frontend is deprecated, and will be removed in Spack v1.0")
os_args, target_args = "frontend", "frontend"
elif args.backend:
warnings.warn("the argument --backend is deprecated, and will be removed in Spack v1.0")
os_args, target_args = "backend", "backend"
host_platform = spack.platforms.host()
host_os = host_platform.default_operating_system()
host_target = host_platform.default_target()
host_os = host_platform.operating_system(os_args)
host_target = host_platform.target(target_args)
if args.family:
host_target = host_target.family
elif args.generic:

View File

@@ -1,7 +1,7 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import os.path
import shutil
import sys
import tempfile
@@ -436,7 +436,6 @@ def write_metadata(subdir, metadata):
shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory)
shutil.copy(spack.util.path.canonicalize_path(PATCHELF_JSON), abs_directory)
instructions += cmd.format("local-binaries", rel_directory)
instructions += " % spack buildcache update-index <final-path>/bootstrap_cache\n"
print(instructions)

View File

@@ -4,7 +4,7 @@
import re
import sys
from typing import Dict, Optional, Tuple
from typing import Dict, Optional
import llnl.string
import llnl.util.lang
@@ -181,11 +181,7 @@ def checksum(parser, args):
print()
if args.add_to_package:
path = spack.repo.PATH.filename_for_package_name(pkg.name)
num_versions_added = add_versions_to_pkg(path, version_lines)
tty.msg(f"Added {num_versions_added} new versions to {pkg.name} in {path}")
if not args.batch and sys.stdin.isatty():
editor(path)
add_versions_to_package(pkg, version_lines, args.batch)
def print_checksum_status(pkg: PackageBase, version_hashes: dict):
@@ -231,9 +227,20 @@ def print_checksum_status(pkg: PackageBase, version_hashes: dict):
tty.die("Invalid checksums found.")
def _update_version_statements(package_src: str, version_lines: str) -> Tuple[int, str]:
"""Returns a tuple of number of versions added and the package's modified contents."""
def add_versions_to_package(pkg: PackageBase, version_lines: str, is_batch: bool):
"""
Add checksumed versions to a package's instructions and open a user's
editor so they may double check the work of the function.
Args:
pkg (spack.package_base.PackageBase): A package class for a given package in Spack.
version_lines (str): A string of rendered version lines.
"""
# Get filename and path for package
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
num_versions_added = 0
version_statement_re = re.compile(r"([\t ]+version\([^\)]*\))")
version_re = re.compile(r'[\t ]+version\(\s*"([^"]+)"[^\)]*\)')
@@ -245,34 +252,33 @@ def _update_version_statements(package_src: str, version_lines: str) -> Tuple[in
if match:
new_versions.append((Version(match.group(1)), ver_line))
split_contents = version_statement_re.split(package_src)
with open(filename, "r+", encoding="utf-8") as f:
contents = f.read()
split_contents = version_statement_re.split(contents)
for i, subsection in enumerate(split_contents):
# If there are no more versions to add we should exit
if len(new_versions) <= 0:
break
for i, subsection in enumerate(split_contents):
# If there are no more versions to add we should exit
if len(new_versions) <= 0:
break
# Check if the section contains a version
contents_version = version_re.match(subsection)
if contents_version is not None:
parsed_version = Version(contents_version.group(1))
# Check if the section contains a version
contents_version = version_re.match(subsection)
if contents_version is not None:
parsed_version = Version(contents_version.group(1))
if parsed_version < new_versions[0][0]:
split_contents[i:i] = [new_versions.pop(0)[1], " # FIXME", "\n"]
num_versions_added += 1
if parsed_version < new_versions[0][0]:
split_contents[i:i] = [new_versions.pop(0)[1], " # FIXME", "\n"]
num_versions_added += 1
elif parsed_version == new_versions[0][0]:
new_versions.pop(0)
elif parsed_version == new_versions[0][0]:
new_versions.pop(0)
return num_versions_added, "".join(split_contents)
# Seek back to the start of the file so we can rewrite the file contents.
f.seek(0)
f.writelines("".join(split_contents))
tty.msg(f"Added {num_versions_added} new versions to {pkg.name}")
tty.msg(f"Open {filename} to review the additions.")
def add_versions_to_pkg(path: str, version_lines: str) -> int:
"""Add new versions to a package.py file. Returns the number of versions added."""
with open(path, "r", encoding="utf-8") as f:
package_src = f.read()
num_versions_added, package_src = _update_version_statements(package_src, version_lines)
if num_versions_added > 0:
with open(path, "w", encoding="utf-8") as f:
f.write(package_src)
return num_versions_added
if sys.stdout.isatty() and not is_batch:
editor(filename)

View File

@@ -176,11 +176,6 @@ def setup_parser(subparser):
reproduce.add_argument(
"-s", "--autostart", help="Run docker reproducer automatically", action="store_true"
)
reproduce.add_argument(
"--use-local-head",
help="Use the HEAD of the local Spack instead of reproducing a commit",
action="store_true",
)
gpg_group = reproduce.add_mutually_exclusive_group(required=False)
gpg_group.add_argument(
"--gpg-file", help="Path to public GPG key for validating binary cache installs"
@@ -613,12 +608,7 @@ def ci_reproduce(args):
gpg_key_url = None
return spack_ci.reproduce_ci_job(
args.job_url,
args.working_dir,
args.autostart,
gpg_key_url,
args.runtime,
args.use_local_head,
args.job_url, args.working_dir, args.autostart, gpg_key_url, args.runtime
)

View File

@@ -4,7 +4,7 @@
import argparse
import os
import os.path
import textwrap
from llnl.util.lang import stable_partition
@@ -528,6 +528,7 @@ def __call__(self, parser, namespace, values, option_string):
# the const from the constructor or a value from the CLI.
# Note that this is only called if the argument is actually
# specified on the command line.
spack.config.CONFIG.ensure_scope_ordering()
spack.config.set(self.config_path, self.const, scope="command_line")

View File

@@ -2,6 +2,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import os.path
import llnl.util.tty

View File

@@ -2,11 +2,23 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import platform
import re
import sys
from datetime import datetime
from glob import glob
import llnl.util.tty as tty
from llnl.util.filesystem import working_dir
import spack
import spack.paths
import spack.platforms
import spack.spec
import spack.store
import spack.util.git
from spack.util.executable import which
description = "debugging commands for troubleshooting Spack"
section = "developer"
@@ -15,13 +27,67 @@
def setup_parser(subparser):
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="debug_command")
sp.add_parser("create-db-tarball", help="create a tarball of Spack's installation metadata")
sp.add_parser("report", help="print information useful for bug reports")
def _debug_tarball_suffix():
now = datetime.now()
suffix = now.strftime("%Y-%m-%d-%H%M%S")
git = spack.util.git.git()
if not git:
return "nobranch-nogit-%s" % suffix
with working_dir(spack.paths.prefix):
if not os.path.isdir(".git"):
return "nobranch.nogit.%s" % suffix
# Get symbolic branch name and strip any special chars (mainly '/')
symbolic = git("rev-parse", "--abbrev-ref", "--short", "HEAD", output=str).strip()
symbolic = re.sub(r"[^\w.-]", "-", symbolic)
# Get the commit hash too.
commit = git("rev-parse", "--short", "HEAD", output=str).strip()
if symbolic == commit:
return "nobranch.%s.%s" % (commit, suffix)
else:
return "%s.%s.%s" % (symbolic, commit, suffix)
def create_db_tarball(args):
tar = which("tar")
tarball_name = "spack-db.%s.tar.gz" % _debug_tarball_suffix()
tarball_path = os.path.abspath(tarball_name)
base = os.path.basename(str(spack.store.STORE.root))
transform_args = []
# Currently --transform and -s are not supported by Windows native tar
if "GNU" in tar("--version", output=str):
transform_args = ["--transform", "s/^%s/%s/" % (base, tarball_name)]
elif sys.platform != "win32":
transform_args = ["-s", "/^%s/%s/" % (base, tarball_name)]
wd = os.path.dirname(str(spack.store.STORE.root))
with working_dir(wd):
files = [spack.store.STORE.db._index_path]
files += glob("%s/*/*/*/.spack/spec.json" % base)
files += glob("%s/*/*/*/.spack/spec.yaml" % base)
files = [os.path.relpath(f) for f in files]
args = ["-czf", tarball_path]
args += transform_args
args += files
tar(*args)
tty.msg("Created %s" % tarball_name)
def report(args):
host_platform = spack.platforms.host()
host_os = host_platform.default_operating_system()
host_target = host_platform.default_target()
host_os = host_platform.operating_system("frontend")
host_target = host_platform.target("frontend")
architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
print("* **Spack:**", spack.get_version())
print("* **Python:**", platform.python_version())
@@ -29,5 +95,5 @@ def report(args):
def debug(parser, args):
if args.debug_command == "report":
report(args)
action = {"create-db-tarball": create_db_tarball, "report": report}
action[args.debug_command](args)

View File

@@ -9,9 +9,9 @@
import spack.cmd
import spack.environment as ev
import spack.package_base
import spack.store
from spack.cmd.common import arguments
from spack.solver.input_analysis import create_graph_analyzer
description = "show dependencies of a package"
section = "basic"
@@ -68,17 +68,15 @@ def dependencies(parser, args):
else:
spec = specs[0]
dependencies, virtuals, _ = create_graph_analyzer().possible_dependencies(
dependencies = spack.package_base.possible_dependencies(
spec,
transitive=args.transitive,
expand_virtuals=args.expand_virtuals,
allowed_deps=args.deptype,
depflag=args.deptype,
)
if not args.expand_virtuals:
dependencies.update(virtuals)
if spec.name in dependencies:
dependencies.remove(spec.name)
del dependencies[spec.name]
if dependencies:
colify(sorted(dependencies))

View File

@@ -125,7 +125,7 @@ def develop(parser, args):
version = spec.versions.concrete_range_as_version
if not version:
# look up the maximum version so infintiy versions are preferred for develop
version = max(spack.repo.PATH.get_pkg_class(spec.fullname).versions.keys())
version = max(spec.package_class.versions.keys())
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
spec.versions = spack.version.VersionList([version])

View File

@@ -110,7 +110,10 @@ def external_find(args):
# Note that KeyboardInterrupt does not subclass Exception
# (so CTRL-C will terminate the program as expected).
skip_msg = "Skipping manifest and continuing with other external checks"
if isinstance(e, OSError) and e.errno in (errno.EPERM, errno.EACCES):
if (isinstance(e, IOError) or isinstance(e, OSError)) and e.errno in [
errno.EPERM,
errno.EACCES,
]:
# The manifest file does not have sufficient permissions enabled:
# print a warning and keep going
tty.warn("Unable to read manifest due to insufficient permissions.", skip_msg)

View File

@@ -54,6 +54,10 @@
@m{target=target} specific <target> processor
@m{arch=platform-os-target} shortcut for all three above
cross-compiling:
@m{os=backend} or @m{os=be} build for compute node (backend)
@m{os=frontend} or @m{os=fe} build for login node (frontend)
dependencies:
^dependency [constraints] specify constraints on dependencies
^@K{/hash} build with a specific installed

View File

@@ -545,7 +545,7 @@ def _not_license_excluded(self, x):
package does not explicitly forbid redistributing source."""
if self.private:
return True
elif spack.repo.PATH.get_pkg_class(x.fullname).redistribute_source(x):
elif x.package_class.redistribute_source(x):
return True
else:
tty.debug(

View File

@@ -5,7 +5,7 @@
"""Implementation details of the ``spack module`` command."""
import collections
import os
import os.path
import shutil
import sys

View File

@@ -41,11 +41,7 @@ def providers(parser, args):
specs = spack.cmd.parse_specs(args.virtual_package)
# Check prerequisites
non_virtual = [
str(s)
for s in specs
if not spack.repo.PATH.is_virtual(s.name) or s.name not in valid_virtuals
]
non_virtual = [str(s) for s in specs if not s.virtual or s.name not in valid_virtuals]
if non_virtual:
msg = "non-virtual specs cannot be part of the query "
msg += "[{0}]\n".format(", ".join(non_virtual))

View File

@@ -6,7 +6,7 @@
import os
import re
import sys
from itertools import islice, zip_longest
from itertools import zip_longest
from typing import Dict, List, Optional
import llnl.util.tty as tty
@@ -423,8 +423,7 @@ def _run_import_check(
continue
for m in is_abs_import.finditer(contents):
# Find at most two occurences: the first is the import itself, the second is its usage.
if len(list(islice(re.finditer(rf"{re.escape(m.group(1))}(?!\w)", contents), 2))) == 1:
if contents.count(m.group(1)) == 1:
to_remove.append(m.group(0))
exit_code = 1
print(f"{pretty_path}: redundant import: {m.group(1)}", file=out)
@@ -439,7 +438,7 @@ def _run_import_check(
module = _module_part(root, m.group(0))
if not module or module in to_add:
continue
if re.search(rf"import {re.escape(module)}(?!\w|\.)", contents):
if re.search(rf"import {re.escape(module)}\b(?!\.)", contents):
continue
to_add.add(module)
exit_code = 1

View File

@@ -177,15 +177,16 @@ def test_run(args):
matching = spack.store.STORE.db.query_local(spec, hashes=hashes, explicit=explicit)
if spec and not matching:
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
"""
TODO: Need to write out a log message and/or CDASH Testing
output that package not installed IF continue to process
these issues here.
# TODO: Need to write out a log message and/or CDASH Testing
# output that package not installed IF continue to process
# these issues here.
# if args.log_format:
# # Proceed with the spec assuming the test process
# # to ensure report package as skipped (e.g., for CI)
# specs_to_test.append(spec)
if args.log_format:
# Proceed with the spec assuming the test process
# to ensure report package as skipped (e.g., for CI)
specs_to_test.append(spec)
"""
specs_to_test.extend(matching)
@@ -252,9 +253,7 @@ def has_test_and_tags(pkg_class):
hashes = env.all_hashes() if env else None
specs = spack.store.STORE.db.query(hashes=hashes)
specs = list(
filter(lambda s: has_test_and_tags(spack.repo.PATH.get_pkg_class(s.fullname)), specs)
)
specs = list(filter(lambda s: has_test_and_tags(s.package_class), specs))
spack.cmd.display_specs(specs, long=True)

View File

@@ -2,7 +2,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import os.path
import shutil
import llnl.util.tty as tty

View File

@@ -5,7 +5,7 @@
import argparse
import collections
import io
import os
import os.path
import re
import sys
@@ -216,7 +216,7 @@ def unit_test(parser, args, unknown_args):
# Ensure clingo is available before switching to the
# mock configuration used by unit tests
with spack.bootstrap.ensure_bootstrap_configuration():
spack.bootstrap.ensure_clingo_importable_or_raise()
spack.bootstrap.ensure_core_dependencies()
if pytest is None:
spack.bootstrap.ensure_environment_dependencies()
import pytest

View File

@@ -2,48 +2,35 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import io
from typing import List, Optional
import llnl.util.tty as tty
from llnl.string import plural
from llnl.util.filesystem import visit_directory_tree
import spack.cmd
import spack.environment as ev
import spack.spec
import spack.store
import spack.verify
import spack.verify_libraries
from spack.cmd.common import arguments
description = "verify spack installations on disk"
description = "check that all spack packages are on disk as installed"
section = "admin"
level = "long"
MANIFEST_SUBPARSER: Optional[argparse.ArgumentParser] = None
def setup_parser(subparser):
setup_parser.parser = subparser
def setup_parser(subparser: argparse.ArgumentParser):
global MANIFEST_SUBPARSER
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="verify_command")
MANIFEST_SUBPARSER = sp.add_parser(
"manifest", help=verify_manifest.__doc__, description=verify_manifest.__doc__
)
MANIFEST_SUBPARSER.add_argument(
subparser.add_argument(
"-l", "--local", action="store_true", help="verify only locally installed packages"
)
MANIFEST_SUBPARSER.add_argument(
subparser.add_argument(
"-j", "--json", action="store_true", help="ouptut json-formatted errors"
)
MANIFEST_SUBPARSER.add_argument("-a", "--all", action="store_true", help="verify all packages")
MANIFEST_SUBPARSER.add_argument(
subparser.add_argument("-a", "--all", action="store_true", help="verify all packages")
subparser.add_argument(
"specs_or_files", nargs=argparse.REMAINDER, help="specs or files to verify"
)
manifest_sp_type = MANIFEST_SUBPARSER.add_mutually_exclusive_group()
manifest_sp_type.add_argument(
type = subparser.add_mutually_exclusive_group()
type.add_argument(
"-s",
"--specs",
action="store_const",
@@ -52,7 +39,7 @@ def setup_parser(subparser: argparse.ArgumentParser):
default="specs",
help="treat entries as specs (default)",
)
manifest_sp_type.add_argument(
type.add_argument(
"-f",
"--files",
action="store_const",
@@ -62,67 +49,14 @@ def setup_parser(subparser: argparse.ArgumentParser):
help="treat entries as absolute filenames\n\ncannot be used with '-a'",
)
libraries_subparser = sp.add_parser(
"libraries", help=verify_libraries.__doc__, description=verify_libraries.__doc__
)
arguments.add_common_arguments(libraries_subparser, ["constraint"])
def verify(parser, args):
cmd = args.verify_command
if cmd == "libraries":
return verify_libraries(args)
elif cmd == "manifest":
return verify_manifest(args)
parser.error("invalid verify subcommand")
def verify_libraries(args):
"""verify that shared libraries of install packages can be located in rpaths (Linux only)"""
specs_from_db = [s for s in args.specs(installed=True) if not s.external]
tty.info(f"Checking {len(specs_from_db)} packages for shared library resolution")
errors = 0
for spec in specs_from_db:
try:
pkg = spec.package
except Exception:
tty.warn(f"Skipping {spec.cformat('{name}{@version}{/hash}')} due to missing package")
error_msg = _verify_libraries(spec, pkg.unresolved_libraries)
if error_msg is not None:
errors += 1
tty.error(error_msg)
if errors:
tty.error(f"Cannot resolve shared libraries in {plural(errors, 'package')}")
return 1
def _verify_libraries(spec: spack.spec.Spec, unresolved_libraries: List[str]) -> Optional[str]:
"""Go over the prefix of the installed spec and verify its shared libraries can be resolved."""
visitor = spack.verify_libraries.ResolveSharedElfLibDepsVisitor(
[*spack.verify_libraries.ALLOW_UNRESOLVED, *unresolved_libraries]
)
visit_directory_tree(spec.prefix, visitor)
if not visitor.problems:
return None
output = io.StringIO()
visitor.write(output, indent=4, brief=True)
message = output.getvalue().rstrip()
return f"{spec.cformat('{name}{@version}{/hash}')}: {spec.prefix}:\n{message}"
def verify_manifest(args):
"""verify that install directories have not been modified since installation"""
local = args.local
if args.type == "files":
if args.all:
MANIFEST_SUBPARSER.error("cannot use --all with --files")
setup_parser.parser.print_help()
return 1
for file in args.specs_or_files:
results = spack.verify.check_file_manifest(file)
@@ -153,7 +87,8 @@ def verify_manifest(args):
env = ev.active_environment()
specs = list(map(lambda x: spack.cmd.disambiguate_spec(x, env, local=local), spec_args))
else:
MANIFEST_SUBPARSER.error("use --all or specify specs to verify")
setup_parser.parser.print_help()
return 1
for spec in specs:
tty.debug("Verifying package %s")

View File

@@ -801,17 +801,17 @@ def _extract_compiler_paths(spec: "spack.spec.Spec") -> Optional[Dict[str, str]]
def _extract_os_and_target(spec: "spack.spec.Spec"):
if not spec.architecture:
host_platform = spack.platforms.host()
operating_system = host_platform.default_operating_system()
target = host_platform.default_target()
operating_system = host_platform.operating_system("default_os")
target = host_platform.target("default_target")
else:
target = spec.architecture.target
if not target:
target = spack.platforms.host().default_target()
target = spack.platforms.host().target("default_target")
operating_system = spec.os
if not operating_system:
host_platform = spack.platforms.host()
operating_system = host_platform.default_operating_system()
operating_system = host_platform.operating_system("default_os")
return operating_system, target

View File

@@ -220,7 +220,7 @@ def concretize_one(spec: Union[str, Spec], tests: TestsType = False) -> Spec:
opt, i, answer = min(result.answers)
name = spec.name
# TODO: Consolidate this code with similar code in solve.py
if spack.repo.PATH.is_virtual(spec.name):
if spec.virtual:
providers = [s.name for s in answer.values() if s.package.provides(name)]
name = providers[0]

View File

@@ -53,7 +53,6 @@
import spack.schema.definitions
import spack.schema.develop
import spack.schema.env
import spack.schema.env_vars
import spack.schema.mirrors
import spack.schema.modules
import spack.schema.packages
@@ -66,14 +65,11 @@
import spack.util.web as web_util
from spack.util.cpus import cpus_available
from .enums import ConfigScopePriority
#: Dict from section names -> schema for that section
SECTION_SCHEMAS: Dict[str, Any] = {
"compilers": spack.schema.compilers.schema,
"concretizer": spack.schema.concretizer.schema,
"definitions": spack.schema.definitions.schema,
"env_vars": spack.schema.env_vars.schema,
"view": spack.schema.view.schema,
"develop": spack.schema.develop.schema,
"mirrors": spack.schema.mirrors.schema,
@@ -410,18 +406,26 @@ def _method(self, *args, **kwargs):
return _method
ScopeWithOptionalPriority = Union[ConfigScope, Tuple[int, ConfigScope]]
ScopeWithPriority = Tuple[int, ConfigScope]
class Configuration:
"""A hierarchical configuration, merging a number of scopes at different priorities."""
"""A full Spack configuration, from a hierarchy of config files.
This class makes it easy to add a new scope on top of an existing one.
"""
# convert to typing.OrderedDict when we drop 3.6, or OrderedDict when we reach 3.9
scopes: lang.PriorityOrderedMapping[str, ConfigScope]
scopes: Dict[str, ConfigScope]
def __init__(self) -> None:
self.scopes = lang.PriorityOrderedMapping()
def __init__(self, *scopes: ConfigScope) -> None:
"""Initialize a configuration with an initial list of scopes.
Args:
scopes: list of scopes to add to this
Configuration, ordered from lowest to highest precedence
"""
self.scopes = collections.OrderedDict()
for scope in scopes:
self.push_scope(scope)
self.format_updates: Dict[str, List[ConfigScope]] = collections.defaultdict(list)
def ensure_unwrapped(self) -> "Configuration":
@@ -429,31 +433,36 @@ def ensure_unwrapped(self) -> "Configuration":
return self
def highest(self) -> ConfigScope:
"""Scope with the highest precedence"""
return next(self.scopes.reversed_values()) # type: ignore
"""Scope with highest precedence"""
return next(reversed(self.scopes.values())) # type: ignore
@_config_mutator
def push_scope(self, scope: ConfigScope, priority: Optional[int] = None) -> None:
"""Adds a scope to the Configuration, at a given priority.
def ensure_scope_ordering(self):
"""Ensure that scope order matches documented precedent"""
# FIXME: We also need to consider that custom configurations and other orderings
# may not be preserved correctly
if "command_line" in self.scopes:
# TODO (when dropping python 3.6): self.scopes.move_to_end
self.scopes["command_line"] = self.remove_scope("command_line")
If a priority is not given, it is assumed to be the current highest priority.
@_config_mutator
def push_scope(self, scope: ConfigScope) -> None:
"""Add a higher precedence scope to the Configuration."""
tty.debug(f"[CONFIGURATION: PUSH SCOPE]: {str(scope)}", level=2)
self.scopes[scope.name] = scope
Args:
scope: scope to be added
priority: priority of the scope
"""
tty.debug(f"[CONFIGURATION: PUSH SCOPE]: {str(scope)}, priority={priority}", level=2)
self.scopes.add(scope.name, value=scope, priority=priority)
@_config_mutator
def pop_scope(self) -> ConfigScope:
"""Remove the highest precedence scope and return it."""
name, scope = self.scopes.popitem(last=True) # type: ignore[call-arg]
tty.debug(f"[CONFIGURATION: POP SCOPE]: {str(scope)}", level=2)
return scope
@_config_mutator
def remove_scope(self, scope_name: str) -> Optional[ConfigScope]:
"""Removes a scope by name, and returns it. If the scope does not exist, returns None."""
try:
scope = self.scopes.remove(scope_name)
tty.debug(f"[CONFIGURATION: POP SCOPE]: {str(scope)}", level=2)
except KeyError as e:
tty.debug(f"[CONFIGURATION: POP SCOPE]: {e}", level=2)
return None
"""Remove scope by name; has no effect when ``scope_name`` does not exist"""
scope = self.scopes.pop(scope_name, None)
tty.debug(f"[CONFIGURATION: POP SCOPE]: {str(scope)}", level=2)
return scope
@property
@@ -462,13 +471,15 @@ def writable_scopes(self) -> Generator[ConfigScope, None, None]:
return (s for s in self.scopes.values() if s.writable)
def highest_precedence_scope(self) -> ConfigScope:
"""Writable scope with the highest precedence."""
return next(s for s in self.scopes.reversed_values() if s.writable)
"""Writable scope with highest precedence."""
return next(s for s in reversed(self.scopes.values()) if s.writable) # type: ignore
def highest_precedence_non_platform_scope(self) -> ConfigScope:
"""Writable non-platform scope with the highest precedence"""
"""Writable non-platform scope with highest precedence"""
return next(
s for s in self.scopes.reversed_values() if s.writable and not s.is_platform_dependent
s
for s in reversed(self.scopes.values()) # type: ignore
if s.writable and not s.is_platform_dependent
)
def matching_scopes(self, reg_expr) -> List[ConfigScope]:
@@ -735,7 +746,7 @@ def override(
"""
if isinstance(path_or_scope, ConfigScope):
overrides = path_or_scope
CONFIG.push_scope(path_or_scope, priority=None)
CONFIG.push_scope(path_or_scope)
else:
base_name = _OVERRIDES_BASE_NAME
# Ensure the new override gets a unique scope name
@@ -749,7 +760,7 @@ def override(
break
overrides = InternalConfigScope(scope_name)
CONFIG.push_scope(overrides, priority=None)
CONFIG.push_scope(overrides)
CONFIG.set(path_or_scope, value, scope=scope_name)
try:
@@ -759,15 +770,13 @@ def override(
assert scope is overrides
def _add_platform_scope(
cfg: Configuration, name: str, path: str, priority: ConfigScopePriority, writable: bool = True
) -> None:
def _add_platform_scope(cfg: Configuration, name: str, path: str, writable: bool = True) -> None:
"""Add a platform-specific subdirectory for the current platform."""
platform = spack.platforms.host().name
scope = DirectoryConfigScope(
f"{name}/{platform}", os.path.join(path, platform), writable=writable
)
cfg.push_scope(scope, priority=priority)
cfg.push_scope(scope)
def config_paths_from_entry_points() -> List[Tuple[str, str]]:
@@ -802,10 +811,11 @@ def create() -> Configuration:
it. It is bundled inside a function so that configuration can be
initialized lazily.
"""
cfg = Configuration()
# first do the builtin, hardcoded defaults
cfg = create_from(
(ConfigScopePriority.BUILTIN, InternalConfigScope("_builtin", CONFIG_DEFAULTS))
)
builtin = InternalConfigScope("_builtin", CONFIG_DEFAULTS)
cfg.push_scope(builtin)
# Builtin paths to configuration files in Spack
configuration_paths = [
@@ -835,9 +845,10 @@ def create() -> Configuration:
# add each scope and its platform-specific directory
for name, path in configuration_paths:
cfg.push_scope(DirectoryConfigScope(name, path), priority=ConfigScopePriority.CONFIG_FILES)
# Each scope can have per-platform overrides in subdirectories
_add_platform_scope(cfg, name, path, priority=ConfigScopePriority.CONFIG_FILES)
cfg.push_scope(DirectoryConfigScope(name, path))
# Each scope can have per-platfom overrides in subdirectories
_add_platform_scope(cfg, name, path)
return cfg
@@ -942,7 +953,7 @@ def set(path: str, value: Any, scope: Optional[str] = None) -> None:
return CONFIG.set(path, value, scope)
def scopes() -> lang.PriorityOrderedMapping[str, ConfigScope]:
def scopes() -> Dict[str, ConfigScope]:
"""Convenience function to get list of configuration scopes."""
return CONFIG.scopes
@@ -1396,7 +1407,7 @@ def ensure_latest_format_fn(section: str) -> Callable[[YamlConfigDict], bool]:
@contextlib.contextmanager
def use_configuration(
*scopes_or_paths: Union[ScopeWithOptionalPriority, str]
*scopes_or_paths: Union[ConfigScope, str]
) -> Generator[Configuration, None, None]:
"""Use the configuration scopes passed as arguments within the context manager.
@@ -1411,7 +1422,7 @@ def use_configuration(
global CONFIG
# Normalize input and construct a Configuration object
configuration = create_from(*scopes_or_paths)
configuration = _config_from(scopes_or_paths)
CONFIG.clear_caches(), configuration.clear_caches()
saved_config, CONFIG = CONFIG, configuration
@@ -1422,44 +1433,23 @@ def use_configuration(
CONFIG = saved_config
def _normalize_input(entry: Union[ScopeWithOptionalPriority, str]) -> ScopeWithPriority:
if isinstance(entry, tuple):
return entry
default_priority = ConfigScopePriority.CONFIG_FILES
if isinstance(entry, ConfigScope):
return default_priority, entry
# Otherwise we need to construct it
path = os.path.normpath(entry)
assert os.path.isdir(path), f'"{path}" must be a directory'
name = os.path.basename(path)
return default_priority, DirectoryConfigScope(name, path)
@lang.memoized
def create_from(*scopes_or_paths: Union[ScopeWithOptionalPriority, str]) -> Configuration:
"""Creates a configuration object from the scopes passed in input.
def _config_from(scopes_or_paths: List[Union[ConfigScope, str]]) -> Configuration:
scopes = []
for scope_or_path in scopes_or_paths:
# If we have a config scope we are already done
if isinstance(scope_or_path, ConfigScope):
scopes.append(scope_or_path)
continue
Args:
*scopes_or_paths: either a tuple of (priority, ConfigScope), or a ConfigScope, or a string
If priority is not given, it is assumed to be ConfigScopePriority.CONFIG_FILES. If a
string is given, a DirectoryConfigScope is created from it.
# Otherwise we need to construct it
path = os.path.normpath(scope_or_path)
assert os.path.isdir(path), f'"{path}" must be a directory'
name = os.path.basename(path)
scopes.append(DirectoryConfigScope(name, path))
Examples:
>>> builtin_scope = InternalConfigScope("_builtin", {"config": {"build_jobs": 1}})
>>> cl_scope = InternalConfigScope("command_line", {"config": {"build_jobs": 10}})
>>> cfg = create_from(
... (ConfigScopePriority.COMMAND_LINE, cl_scope),
... (ConfigScopePriority.BUILTIN, builtin_scope)
... )
"""
scopes_with_priority = [_normalize_input(x) for x in scopes_or_paths]
result = Configuration()
for priority, scope in scopes_with_priority:
result.push_scope(scope, priority=priority)
return result
configuration = Configuration(*scopes)
return configuration
def raw_github_gitlab_url(url: str) -> str:

View File

@@ -57,7 +57,7 @@ def validate(configuration_file):
# Set the default value of the concretization strategy to unify and
# warn if the user explicitly set another value
env_dict.setdefault("concretizer", {"unify": True})
if env_dict["concretizer"]["unify"] is not True:
if not env_dict["concretizer"]["unify"] is True:
warnings.warn(
'"concretizer:unify" is not set to "true", which means the '
"generated image may contain different variants of the same "

View File

@@ -3,7 +3,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Manages the details on the images used in the various stages."""
import json
import os
import os.path
import shlex
import sys

View File

@@ -41,8 +41,6 @@
Union,
)
import spack.repo
try:
import uuid
@@ -1558,12 +1556,7 @@ def _query(
# If we did fine something, the query spec can't be virtual b/c we matched an actual
# package installation, so skip the virtual check entirely. If we *didn't* find anything,
# check all the deferred specs *if* the query is virtual.
if (
not results
and query_spec is not None
and deferred
and spack.repo.PATH.is_virtual(query_spec.name)
):
if not results and query_spec is not None and deferred and query_spec.virtual:
results = [spec for spec in deferred if spec.satisfies(query_spec)]
return results

View File

@@ -15,6 +15,7 @@
import glob
import itertools
import os
import os.path
import pathlib
import re
import sys
@@ -310,7 +311,7 @@ def find_windows_kit_roots() -> List[str]:
@staticmethod
def find_windows_kit_bin_paths(
kit_base: Union[Optional[str], Optional[list]] = None,
kit_base: Union[Optional[str], Optional[list]] = None
) -> List[str]:
"""Returns Windows kit bin directory per version"""
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
@@ -325,7 +326,7 @@ def find_windows_kit_bin_paths(
@staticmethod
def find_windows_kit_lib_paths(
kit_base: Union[Optional[str], Optional[list]] = None,
kit_base: Union[Optional[str], Optional[list]] = None
) -> List[str]:
"""Returns Windows kit lib directory per version"""
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base

View File

@@ -7,6 +7,7 @@
import collections
import concurrent.futures
import os
import os.path
import re
import sys
import traceback
@@ -243,7 +244,7 @@ def prefix_from_path(self, *, path: str) -> str:
raise NotImplementedError("must be implemented by derived classes")
def detect_specs(
self, *, pkg: Type["spack.package_base.PackageBase"], paths: Iterable[str]
self, *, pkg: Type["spack.package_base.PackageBase"], paths: List[str]
) -> List["spack.spec.Spec"]:
"""Given a list of files matching the search patterns, returns a list of detected specs.
@@ -259,8 +260,6 @@ def detect_specs(
)
return []
from spack.repo import PATH as repo_path
result = []
for candidate_path, items_in_prefix in _group_by_prefix(
llnl.util.lang.dedupe(paths)
@@ -307,10 +306,7 @@ def detect_specs(
resolved_specs[spec] = candidate_path
try:
# Validate the spec calling a package specific method
pkg_cls = repo_path.get_pkg_class(spec.name)
validate_fn = getattr(pkg_cls, "validate_detected_spec", lambda x, y: None)
validate_fn(spec, spec.extra_attributes)
spec.validate_detection()
except Exception as e:
msg = (
f'"{spec}" has been detected on the system but will '

View File

@@ -32,7 +32,7 @@ class OpenMpi(Package):
"""
import collections
import collections.abc
import os
import os.path
import re
from typing import Any, Callable, List, Optional, Tuple, Type, Union
@@ -568,7 +568,7 @@ def patch(
"""
def _execute_patch(
pkg_or_dep: Union[Type[spack.package_base.PackageBase], Dependency],
pkg_or_dep: Union[Type[spack.package_base.PackageBase], Dependency]
) -> None:
pkg = pkg_or_dep.pkg if isinstance(pkg_or_dep, Dependency) else pkg_or_dep

View File

@@ -25,7 +25,7 @@
}
def _check_concrete(spec: "spack.spec.Spec") -> None:
def _check_concrete(spec):
"""If the spec is not concrete, raise a ValueError"""
if not spec.concrete:
raise ValueError("Specs passed to a DirectoryLayout must be concrete!")
@@ -51,7 +51,7 @@ def specs_from_metadata_dirs(root: str) -> List["spack.spec.Spec"]:
spec = _get_spec(prefix)
if spec:
spec.set_prefix(prefix)
spec.prefix = prefix
specs.append(spec)
continue
@@ -84,7 +84,7 @@ class DirectoryLayout:
def __init__(
self,
root: str,
root,
*,
projections: Optional[Dict[str, str]] = None,
hash_length: Optional[int] = None,
@@ -120,17 +120,17 @@ def __init__(
self.manifest_file_name = "install_manifest.json"
@property
def hidden_file_regexes(self) -> Tuple[str]:
def hidden_file_regexes(self):
return ("^{0}$".format(re.escape(self.metadata_dir)),)
def relative_path_for_spec(self, spec: "spack.spec.Spec") -> str:
def relative_path_for_spec(self, spec):
_check_concrete(spec)
projection = spack.projections.get_projection(self.projections, spec)
path = spec.format_path(projection)
return str(Path(path))
def write_spec(self, spec: "spack.spec.Spec", path: str) -> None:
def write_spec(self, spec, path):
"""Write a spec out to a file."""
_check_concrete(spec)
with open(path, "w", encoding="utf-8") as f:
@@ -138,7 +138,7 @@ def write_spec(self, spec: "spack.spec.Spec", path: str) -> None:
# the full provenance, so it's availabe if we want it later
spec.to_json(f, hash=ht.dag_hash)
def write_host_environment(self, spec: "spack.spec.Spec") -> None:
def write_host_environment(self, spec):
"""The host environment is a json file with os, kernel, and spack
versioning. We use it in the case that an analysis later needs to
easily access this information.
@@ -148,7 +148,7 @@ def write_host_environment(self, spec: "spack.spec.Spec") -> None:
with open(env_file, "w", encoding="utf-8") as fd:
sjson.dump(environ, fd)
def read_spec(self, path: str) -> "spack.spec.Spec":
def read_spec(self, path):
"""Read the contents of a file and parse them as a spec"""
try:
with open(path, encoding="utf-8") as f:
@@ -159,28 +159,26 @@ def read_spec(self, path: str) -> "spack.spec.Spec":
# Too late for conversion; spec_file_path() already called.
spec = spack.spec.Spec.from_yaml(f)
else:
raise SpecReadError(f"Did not recognize spec file extension: {extension}")
raise SpecReadError(
"Did not recognize spec file extension:" " {0}".format(extension)
)
except Exception as e:
if spack.config.get("config:debug"):
raise
raise SpecReadError(f"Unable to read file: {path}", f"Cause: {e}")
raise SpecReadError("Unable to read file: %s" % path, "Cause: " + str(e))
# Specs read from actual installations are always concrete
spec._mark_concrete()
return spec
def spec_file_path(self, spec: "spack.spec.Spec") -> str:
def spec_file_path(self, spec):
"""Gets full path to spec file"""
_check_concrete(spec)
yaml_path = os.path.join(self.metadata_path(spec), self._spec_file_name_yaml)
json_path = os.path.join(self.metadata_path(spec), self.spec_file_name)
return yaml_path if os.path.exists(yaml_path) else json_path
def deprecated_file_path(
self,
deprecated_spec: "spack.spec.Spec",
deprecator_spec: Optional["spack.spec.Spec"] = None,
) -> str:
def deprecated_file_path(self, deprecated_spec, deprecator_spec=None):
"""Gets full path to spec file for deprecated spec
If the deprecator_spec is provided, use that. Otherwise, assume
@@ -214,16 +212,16 @@ def deprecated_file_path(
return yaml_path if os.path.exists(yaml_path) else json_path
def metadata_path(self, spec: "spack.spec.Spec") -> str:
def metadata_path(self, spec):
return os.path.join(spec.prefix, self.metadata_dir)
def env_metadata_path(self, spec: "spack.spec.Spec") -> str:
def env_metadata_path(self, spec):
return os.path.join(self.metadata_path(spec), "install_environment.json")
def build_packages_path(self, spec: "spack.spec.Spec") -> str:
def build_packages_path(self, spec):
return os.path.join(self.metadata_path(spec), self.packages_dir)
def create_install_directory(self, spec: "spack.spec.Spec") -> None:
def create_install_directory(self, spec):
_check_concrete(spec)
# Create install directory with properly configured permissions
@@ -241,7 +239,7 @@ def create_install_directory(self, spec: "spack.spec.Spec") -> None:
self.write_spec(spec, self.spec_file_path(spec))
def ensure_installed(self, spec: "spack.spec.Spec") -> None:
def ensure_installed(self, spec):
"""
Throws InconsistentInstallDirectoryError if:
1. spec prefix does not exist
@@ -268,7 +266,7 @@ def ensure_installed(self, spec: "spack.spec.Spec") -> None:
"Spec file in %s does not match hash!" % spec_file_path
)
def path_for_spec(self, spec: "spack.spec.Spec") -> str:
def path_for_spec(self, spec):
"""Return absolute path from the root to a directory for the spec."""
_check_concrete(spec)
@@ -279,13 +277,23 @@ def path_for_spec(self, spec: "spack.spec.Spec") -> str:
assert not path.startswith(self.root)
return os.path.join(self.root, path)
def remove_install_directory(self, spec: "spack.spec.Spec", deprecated: bool = False) -> None:
def remove_install_directory(self, spec, deprecated=False):
"""Removes a prefix and any empty parent directories from the root.
Raised RemoveFailedError if something goes wrong.
"""
path = self.path_for_spec(spec)
assert path.startswith(self.root)
# Windows readonly files cannot be removed by Python
# directly, change permissions before attempting to remove
if sys.platform == "win32":
kwargs = {
"ignore_errors": False,
"onerror": fs.readonly_file_handler(ignore_errors=False),
}
else:
kwargs = {} # the default value for ignore_errors is false
if deprecated:
if os.path.exists(path):
try:
@@ -296,16 +304,7 @@ def remove_install_directory(self, spec: "spack.spec.Spec", deprecated: bool = F
raise RemoveFailedError(spec, path, e) from e
elif os.path.exists(path):
try:
if sys.platform == "win32":
# Windows readonly files cannot be removed by Python
# directly, change permissions before attempting to remove
shutil.rmtree(
path,
ignore_errors=False,
onerror=fs.readonly_file_handler(ignore_errors=False),
)
else:
shutil.rmtree(path)
shutil.rmtree(path, **kwargs)
except OSError as e:
raise RemoveFailedError(spec, path, e) from e

View File

@@ -12,13 +12,3 @@ class InstallRecordStatus(enum.Flag):
DEPRECATED = enum.auto()
MISSING = enum.auto()
ANY = INSTALLED | DEPRECATED | MISSING
class ConfigScopePriority(enum.IntEnum):
"""Priorities of the different kind of config scopes used by Spack"""
BUILTIN = 0
CONFIG_FILES = 1
CUSTOM = 2
ENVIRONMENT = 3
COMMAND_LINE = 4

View File

@@ -51,8 +51,6 @@
from spack.spec_list import SpecList
from spack.util.path import substitute_path_variables
from ..enums import ConfigScopePriority
SpecPair = spack.concretize.SpecPair
#: environment variable used to indicate the active environment
@@ -389,7 +387,6 @@ def create_in_dir(
# dev paths in this environment to refer to their original
# locations.
_rewrite_relative_dev_paths_on_relocation(env, init_file_dir)
_rewrite_relative_repos_paths_on_relocation(env, init_file_dir)
return env
@@ -406,8 +403,8 @@ def _rewrite_relative_dev_paths_on_relocation(env, init_file_dir):
dev_path = substitute_path_variables(entry["path"])
expanded_path = spack.util.path.canonicalize_path(dev_path, default_wd=init_file_dir)
# Skip if the substituted and expanded path is the same (e.g. when absolute)
if entry["path"] == expanded_path:
# Skip if the expanded path is the same (e.g. when absolute)
if dev_path == expanded_path:
continue
tty.debug("Expanding develop path for {0} to {1}".format(name, expanded_path))
@@ -422,34 +419,6 @@ def _rewrite_relative_dev_paths_on_relocation(env, init_file_dir):
env._re_read()
def _rewrite_relative_repos_paths_on_relocation(env, init_file_dir):
"""When initializing the environment from a manifest file and we plan
to store the environment in a different directory, we have to rewrite
relative repo paths to absolute ones and expand environment variables."""
with env:
repos_specs = spack.config.get("repos", default={}, scope=env.scope_name)
if not repos_specs:
return
for i, entry in enumerate(repos_specs):
repo_path = substitute_path_variables(entry)
expanded_path = spack.util.path.canonicalize_path(repo_path, default_wd=init_file_dir)
# Skip if the substituted and expanded path is the same (e.g. when absolute)
if entry == expanded_path:
continue
tty.debug("Expanding repo path for {0} to {1}".format(entry, expanded_path))
repos_specs[i] = expanded_path
spack.config.set("repos", repos_specs, scope=env.scope_name)
env.repos_specs = None
# If we changed the environment's spack.yaml scope, that will not be reflected
# in the manifest that we read
env._re_read()
def environment_dir_from_name(name: str, exists_ok: bool = True) -> str:
"""Returns the directory associated with a named environment.
@@ -612,7 +581,7 @@ def _error_on_nonempty_view_dir(new_root):
# Check if the target path lexists
try:
st = os.lstat(new_root)
except OSError:
except (IOError, OSError):
return
# Empty directories are fine
@@ -892,7 +861,7 @@ def regenerate(self, concrete_roots: List[Spec]) -> None:
):
try:
shutil.rmtree(old_root)
except OSError as e:
except (IOError, OSError) as e:
msg = "Failed to remove old view at %s\n" % old_root
msg += str(e)
tty.warn(msg)
@@ -2423,8 +2392,6 @@ def invalidate_repository_cache(self):
def __enter__(self):
self._previous_active = _active_environment
if self._previous_active:
deactivate()
activate(self)
return self
@@ -2587,7 +2554,7 @@ def is_latest_format(manifest):
try:
with open(manifest, encoding="utf-8") as f:
data = syaml.load(f)
except OSError:
except (OSError, IOError):
return True
top_level_key = _top_level_key(data)
changed = spack.schema.env.update(data[top_level_key])
@@ -2667,32 +2634,6 @@ def _ensure_env_dir():
shutil.copy(envfile, target_manifest)
# Copy relative path includes that live inside the environment dir
try:
manifest = EnvironmentManifestFile(environment_dir)
except Exception:
# error handling for bad manifests is handled on other code paths
return
includes = manifest[TOP_LEVEL_KEY].get("include", [])
for include in includes:
if os.path.isabs(include):
continue
abspath = pathlib.Path(os.path.normpath(environment_dir / include))
common_path = pathlib.Path(os.path.commonpath([environment_dir, abspath]))
if common_path != environment_dir:
tty.debug(f"Will not copy relative include from outside environment: {include}")
continue
orig_abspath = os.path.normpath(envfile.parent / include)
if not os.path.exists(orig_abspath):
tty.warn(f"Included file does not exist; will not copy: '{include}'")
continue
fs.touchp(abspath)
shutil.copy(orig_abspath, abspath)
class EnvironmentManifestFile(collections.abc.Mapping):
"""Manages the in-memory representation of a manifest file, and its synchronization
@@ -3100,12 +3041,14 @@ def env_config_scopes(self) -> List[spack.config.ConfigScope]:
def prepare_config_scope(self) -> None:
"""Add the manifest's scopes to the global configuration search path."""
for scope in self.env_config_scopes:
spack.config.CONFIG.push_scope(scope, priority=ConfigScopePriority.ENVIRONMENT)
spack.config.CONFIG.push_scope(scope)
spack.config.CONFIG.ensure_scope_ordering()
def deactivate_config_scope(self) -> None:
"""Remove any of the manifest's scopes from the global config path."""
for scope in self.env_config_scopes:
spack.config.CONFIG.remove_scope(scope.name)
spack.config.CONFIG.ensure_scope_ordering()
@contextlib.contextmanager
def use_config(self):

View File

@@ -10,7 +10,6 @@
import spack.environment as ev
import spack.repo
import spack.schema.environment
import spack.store
from spack.util.environment import EnvironmentModifications
@@ -157,11 +156,6 @@ def activate(
# MANPATH, PYTHONPATH, etc. All variables that end in PATH (case-sensitive)
# become PATH variables.
#
env_vars_yaml = env.manifest.configuration.get("env_vars", None)
if env_vars_yaml:
env_mods.extend(spack.schema.environment.parse(env_vars_yaml))
try:
if view and env.has_view(view):
with spack.store.STORE.db.read_transaction():
@@ -195,10 +189,6 @@ def deactivate() -> EnvironmentModifications:
if active is None:
return env_mods
env_vars_yaml = active.manifest.configuration.get("env_vars", None)
if env_vars_yaml:
env_mods.extend(spack.schema.environment.parse(env_vars_yaml).reversed())
active_view = os.getenv(ev.spack_env_view_var)
if active_view and active.has_view(active_view):

View File

@@ -187,7 +187,7 @@ def path_for_extension(target_name: str, *, paths: List[str]) -> str:
if name == target_name:
return path
else:
raise OSError('extension "{0}" not found'.format(target_name))
raise IOError('extension "{0}" not found'.format(target_name))
def get_module(cmd_name):

View File

@@ -25,6 +25,7 @@
import functools
import http.client
import os
import os.path
import re
import shutil
import urllib.error
@@ -320,15 +321,9 @@ def _fetch_urllib(self, url):
request = urllib.request.Request(url, headers={"User-Agent": web_util.SPACK_USER_AGENT})
if os.path.lexists(save_file):
os.remove(save_file)
try:
response = web_util.urlopen(request)
tty.msg(f"Fetching {url}")
with open(save_file, "wb") as f:
shutil.copyfileobj(response, f)
except OSError as e:
except (TimeoutError, urllib.error.URLError) as e:
# clean up archive on failure.
if self.archive_file:
os.remove(self.archive_file)
@@ -336,6 +331,14 @@ def _fetch_urllib(self, url):
os.remove(save_file)
raise FailedDownloadError(e) from e
tty.msg(f"Fetching {url}")
if os.path.lexists(save_file):
os.remove(save_file)
with open(save_file, "wb") as f:
shutil.copyfileobj(response, f)
# Save the redirected URL for error messages. Sometimes we're redirected to an arbitrary
# mirror that is broken, leading to spurious download failures. In that case it's helpful
# for users to know which URL was actually fetched.
@@ -532,16 +535,11 @@ def __init__(self, *, url: str, checksum: Optional[str] = None, **kwargs):
@_needs_stage
def fetch(self):
file = self.stage.save_filename
if os.path.lexists(file):
os.remove(file)
tty.msg(f"Fetching {self.url}")
try:
response = self._urlopen(self.url)
tty.msg(f"Fetching {self.url}")
with open(file, "wb") as f:
shutil.copyfileobj(response, f)
except OSError as e:
except (TimeoutError, urllib.error.URLError) as e:
# clean up archive on failure.
if self.archive_file:
os.remove(self.archive_file)
@@ -549,6 +547,12 @@ def fetch(self):
os.remove(file)
raise FailedDownloadError(e) from e
if os.path.lexists(file):
os.remove(file)
with open(file, "wb") as f:
shutil.copyfileobj(response, f)
class VCSFetchStrategy(FetchStrategy):
"""Superclass for version control system fetch strategies.

View File

@@ -9,8 +9,7 @@
import shutil
import stat
import sys
import tempfile
from typing import Callable, Dict, List, Optional
from typing import Callable, Dict, Optional
from typing_extensions import Literal
@@ -78,7 +77,7 @@ def view_copy(
# Order of this dict is somewhat irrelevant
prefix_to_projection = {
str(s.prefix): view.get_projection_for_spec(s)
s.prefix: view.get_projection_for_spec(s)
for s in spec.traverse(root=True, order="breadth")
if not s.external
}
@@ -185,7 +184,7 @@ def __init__(
def link(self, src: str, dst: str, spec: Optional[spack.spec.Spec] = None) -> None:
self._link(src, dst, self, spec)
def add_specs(self, *specs: spack.spec.Spec, **kwargs) -> None:
def add_specs(self, *specs, **kwargs):
"""
Add given specs to view.
@@ -200,19 +199,19 @@ def add_specs(self, *specs: spack.spec.Spec, **kwargs) -> None:
"""
raise NotImplementedError
def add_standalone(self, spec: spack.spec.Spec) -> bool:
def add_standalone(self, spec):
"""
Add (link) a standalone package into this view.
"""
raise NotImplementedError
def check_added(self, spec: spack.spec.Spec) -> bool:
def check_added(self, spec):
"""
Check if the given concrete spec is active in this view.
"""
raise NotImplementedError
def remove_specs(self, *specs: spack.spec.Spec, **kwargs) -> None:
def remove_specs(self, *specs, **kwargs):
"""
Removes given specs from view.
@@ -231,25 +230,25 @@ def remove_specs(self, *specs: spack.spec.Spec, **kwargs) -> None:
"""
raise NotImplementedError
def remove_standalone(self, spec: spack.spec.Spec) -> None:
def remove_standalone(self, spec):
"""
Remove (unlink) a standalone package from this view.
"""
raise NotImplementedError
def get_projection_for_spec(self, spec: spack.spec.Spec) -> str:
def get_projection_for_spec(self, spec):
"""
Get the projection in this view for a spec.
"""
raise NotImplementedError
def get_all_specs(self) -> List[spack.spec.Spec]:
def get_all_specs(self):
"""
Get all specs currently active in this view.
"""
raise NotImplementedError
def get_spec(self, spec: spack.spec.Spec) -> Optional[spack.spec.Spec]:
def get_spec(self, spec):
"""
Return the actual spec linked in this view (i.e. do not look it up
in the database by name).
@@ -263,7 +262,7 @@ def get_spec(self, spec: spack.spec.Spec) -> Optional[spack.spec.Spec]:
"""
raise NotImplementedError
def print_status(self, *specs: spack.spec.Spec, **kwargs) -> None:
def print_status(self, *specs, **kwargs):
"""
Print a short summary about the given specs, detailing whether..
* ..they are active in the view.
@@ -428,7 +427,7 @@ def needs_file(spec, file):
try:
with open(manifest_file, "r", encoding="utf-8") as f:
manifest = s_json.load(f)
except OSError:
except (OSError, IOError):
# if we can't load it, assume it doesn't know about the file.
manifest = {}
return test_path in manifest
@@ -694,7 +693,7 @@ def _sanity_check_view_projection(self, specs):
raise ConflictingSpecsError(current_spec, conflicting_spec)
seen[metadata_dir] = current_spec
def add_specs(self, *specs, **kwargs) -> None:
def add_specs(self, *specs: spack.spec.Spec) -> None:
"""Link a root-to-leaf topologically ordered list of specs into the view."""
assert all((s.concrete for s in specs))
if len(specs) == 0:
@@ -709,10 +708,7 @@ def add_specs(self, *specs, **kwargs) -> None:
def skip_list(file):
return os.path.basename(file) == spack.store.STORE.layout.metadata_dir
# Determine if the root is on a case-insensitive filesystem
normalize_paths = is_folder_on_case_insensitive_filesystem(self._root)
visitor = SourceMergeVisitor(ignore=skip_list, normalize_paths=normalize_paths)
visitor = SourceMergeVisitor(ignore=skip_list)
# Gather all the directories to be made and files to be linked
for spec in specs:
@@ -831,11 +827,11 @@ def get_projection_for_spec(self, spec):
#####################
# utility functions #
#####################
def get_spec_from_file(filename) -> Optional[spack.spec.Spec]:
def get_spec_from_file(filename):
try:
with open(filename, "r", encoding="utf-8") as f:
return spack.spec.Spec.from_yaml(f)
except OSError:
except IOError:
return None
@@ -888,8 +884,3 @@ def get_dependencies(specs):
class ConflictingProjectionsError(SpackError):
"""Raised when a view has a projections file and is given one manually."""
def is_folder_on_case_insensitive_filesystem(path: str) -> bool:
with tempfile.NamedTemporaryFile(dir=path, prefix=".sentinel") as sentinel:
return os.path.exists(os.path.join(path, os.path.basename(sentinel.name).upper()))

View File

@@ -42,10 +42,10 @@
import llnl.util.tty.color
import spack.deptypes as dt
import spack.repo
import spack.spec
import spack.tengine
import spack.traverse
from spack.solver.input_analysis import create_graph_analyzer
def find(seq, predicate):
@@ -537,11 +537,10 @@ def edge_entry(self, edge):
def _static_edges(specs, depflag):
for spec in specs:
*_, edges = create_graph_analyzer().possible_dependencies(
spec.name, expand_virtuals=True, allowed_deps=depflag
)
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
possible = pkg_cls.possible_dependencies(expand_virtuals=True, depflag=depflag)
for parent_name, dependencies in edges.items():
for parent_name, dependencies in possible.items():
for dependency_name in dependencies:
yield spack.spec.DependencySpec(
spack.spec.Spec(parent_name),

View File

@@ -26,7 +26,7 @@ def is_shared_library_elf(filepath):
with open(filepath, "rb") as f:
elf = parse_elf(f, interpreter=True, dynamic_section=True)
return elf.has_pt_dynamic and (elf.has_soname or not elf.has_pt_interp)
except (OSError, ElfParsingError):
except (IOError, OSError, ElfParsingError):
return False

View File

@@ -2,14 +2,198 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import fnmatch
import io
import os
import re
from typing import Dict, List, Union
import llnl.util.tty as tty
from llnl.util.filesystem import visit_directory_tree
from llnl.util.filesystem import BaseDirectoryVisitor, visit_directory_tree
from llnl.util.lang import stable_partition
import spack.config
import spack.error
import spack.verify_libraries
import spack.util.elf as elf
#: Patterns for names of libraries that are allowed to be unresolved when *just* looking at RPATHs
#: added by Spack. These are libraries outside of Spack's control, and assumed to be located in
#: default search paths of the dynamic linker.
ALLOW_UNRESOLVED = [
# kernel
"linux-vdso.so.*",
"libselinux.so.*",
# musl libc
"ld-musl-*.so.*",
# glibc
"ld-linux*.so.*",
"ld64.so.*",
"libanl.so.*",
"libc.so.*",
"libdl.so.*",
"libm.so.*",
"libmemusage.so.*",
"libmvec.so.*",
"libnsl.so.*",
"libnss_compat.so.*",
"libnss_db.so.*",
"libnss_dns.so.*",
"libnss_files.so.*",
"libnss_hesiod.so.*",
"libpcprofile.so.*",
"libpthread.so.*",
"libresolv.so.*",
"librt.so.*",
"libSegFault.so.*",
"libthread_db.so.*",
"libutil.so.*",
# gcc -- this is required even with gcc-runtime, because e.g. libstdc++ depends on libgcc_s,
# but the binaries we copy from the compiler don't have an $ORIGIN rpath.
"libasan.so.*",
"libatomic.so.*",
"libcc1.so.*",
"libgcc_s.so.*",
"libgfortran.so.*",
"libgomp.so.*",
"libitm.so.*",
"liblsan.so.*",
"libquadmath.so.*",
"libssp.so.*",
"libstdc++.so.*",
"libtsan.so.*",
"libubsan.so.*",
# systemd
"libudev.so.*",
# cuda driver
"libcuda.so.*",
]
def is_compatible(parent: elf.ElfFile, child: elf.ElfFile) -> bool:
return (
child.elf_hdr.e_type == elf.ELF_CONSTANTS.ET_DYN
and parent.is_little_endian == child.is_little_endian
and parent.is_64_bit == child.is_64_bit
and parent.elf_hdr.e_machine == child.elf_hdr.e_machine
)
def candidate_matches(current_elf: elf.ElfFile, candidate_path: bytes) -> bool:
try:
with open(candidate_path, "rb") as g:
return is_compatible(current_elf, elf.parse_elf(g))
except (OSError, elf.ElfParsingError):
return False
class Problem:
def __init__(
self, resolved: Dict[bytes, bytes], unresolved: List[bytes], relative_rpaths: List[bytes]
) -> None:
self.resolved = resolved
self.unresolved = unresolved
self.relative_rpaths = relative_rpaths
class ResolveSharedElfLibDepsVisitor(BaseDirectoryVisitor):
def __init__(self, allow_unresolved_patterns: List[str]) -> None:
self.problems: Dict[str, Problem] = {}
self._allow_unresolved_regex = re.compile(
"|".join(fnmatch.translate(x) for x in allow_unresolved_patterns)
)
def allow_unresolved(self, needed: bytes) -> bool:
try:
name = needed.decode("utf-8")
except UnicodeDecodeError:
return False
return bool(self._allow_unresolved_regex.match(name))
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
# We work with byte strings for paths.
path = os.path.join(root, rel_path).encode("utf-8")
# For $ORIGIN interpolation: should not have trailing dir seperator.
origin = os.path.dirname(path)
# Retrieve the needed libs + rpaths.
try:
with open(path, "rb") as f:
parsed_elf = elf.parse_elf(f, interpreter=False, dynamic_section=True)
except (OSError, elf.ElfParsingError):
# Not dealing with an invalid ELF file.
return
# If there's no needed libs all is good
if not parsed_elf.has_needed:
return
# Get the needed libs and rpaths (notice: byte strings)
# Don't force an encoding cause paths are just a bag of bytes.
needed_libs = parsed_elf.dt_needed_strs
rpaths = parsed_elf.dt_rpath_str.split(b":") if parsed_elf.has_rpath else []
# We only interpolate $ORIGIN, not $LIB and $PLATFORM, they're not really
# supported in general. Also remove empty paths.
rpaths = [x.replace(b"$ORIGIN", origin) for x in rpaths if x]
# Do not allow relative rpaths (they are relative to the current working directory)
rpaths, relative_rpaths = stable_partition(rpaths, os.path.isabs)
# If there's a / in the needed lib, it's opened directly, otherwise it needs
# a search.
direct_libs, search_libs = stable_partition(needed_libs, lambda x: b"/" in x)
# Do not allow relative paths in direct libs (they are relative to the current working
# directory)
direct_libs, unresolved = stable_partition(direct_libs, os.path.isabs)
resolved: Dict[bytes, bytes] = {}
for lib in search_libs:
if self.allow_unresolved(lib):
continue
for rpath in rpaths:
candidate = os.path.join(rpath, lib)
if candidate_matches(parsed_elf, candidate):
resolved[lib] = candidate
break
else:
unresolved.append(lib)
# Check if directly opened libs are compatible
for lib in direct_libs:
if candidate_matches(parsed_elf, lib):
resolved[lib] = lib
else:
unresolved.append(lib)
if unresolved or relative_rpaths:
self.problems[rel_path] = Problem(resolved, unresolved, relative_rpaths)
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
pass
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
# There can be binaries in .spack/test which shouldn't be checked.
if rel_path == ".spack":
return False
return True
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
return False
class CannotLocateSharedLibraries(spack.error.SpackError):
pass
def maybe_decode(byte_str: bytes) -> Union[str, bytes]:
try:
return byte_str.decode("utf-8")
except UnicodeDecodeError:
return byte_str
def post_install(spec, explicit):
@@ -20,23 +204,36 @@ def post_install(spec, explicit):
if policy == "ignore" or spec.external or spec.platform not in ("linux", "freebsd"):
return
visitor = spack.verify_libraries.ResolveSharedElfLibDepsVisitor(
[*spack.verify_libraries.ALLOW_UNRESOLVED, *spec.package.unresolved_libraries]
visitor = ResolveSharedElfLibDepsVisitor(
[*ALLOW_UNRESOLVED, *spec.package.unresolved_libraries]
)
visit_directory_tree(spec.prefix, visitor)
# All good?
if not visitor.problems:
return
output = io.StringIO("not all executables and libraries can resolve their dependencies:\n")
visitor.write(output)
# For now just list the issues (print it in ldd style, except we don't recurse)
output = io.StringIO()
output.write("not all executables and libraries can resolve their dependencies:\n")
for path, problem in visitor.problems.items():
output.write(path)
output.write("\n")
for needed, full_path in problem.resolved.items():
output.write(" ")
if needed == full_path:
output.write(maybe_decode(needed))
else:
output.write(f"{maybe_decode(needed)} => {maybe_decode(full_path)}")
output.write("\n")
for not_found in problem.unresolved:
output.write(f" {maybe_decode(not_found)} => not found\n")
for relative_rpath in problem.relative_rpaths:
output.write(f" {maybe_decode(relative_rpath)} => relative rpath\n")
message = output.getvalue().strip()
if policy == "error":
raise CannotLocateSharedLibraries(message)
tty.warn(message)
class CannotLocateSharedLibraries(spack.error.SpackError):
pass

View File

@@ -166,7 +166,7 @@ def filter_shebangs_in_directory(directory, filenames=None):
# Only look at executable, non-symlink files.
try:
st = os.lstat(path)
except OSError:
except (IOError, OSError):
continue
if stat.S_ISLNK(st.st_mode) or stat.S_ISDIR(st.st_mode) or not st.st_mode & is_exe:

View File

@@ -566,7 +566,7 @@ def copy_test_files(pkg: Pb, test_spec: spack.spec.Spec):
# copy test data into test stage data dir
try:
pkg_cls = spack.repo.PATH.get_pkg_class(test_spec.fullname)
pkg_cls = test_spec.package_class
except spack.repo.UnknownPackageError:
tty.debug(f"{test_spec.name}: skipping test data copy since no package class found")
return
@@ -623,7 +623,7 @@ def test_functions(
vpkgs = virtuals(pkg)
for vname in vpkgs:
try:
classes.append(spack.repo.PATH.get_pkg_class(vname))
classes.append((Spec(vname)).package_class)
except spack.repo.UnknownPackageError:
tty.debug(f"{vname}: virtual does not appear to have a package file")
@@ -668,7 +668,7 @@ def process_test_parts(pkg: Pb, test_specs: List[spack.spec.Spec], verbose: bool
# grab test functions associated with the spec, which may be virtual
try:
tests = test_functions(spack.repo.PATH.get_pkg_class(spec.fullname))
tests = test_functions(spec.package_class)
except spack.repo.UnknownPackageError:
# Some virtuals don't have a package so we don't want to report
# them as not having tests when that isn't appropriate.

View File

@@ -814,7 +814,7 @@ def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int:
# Include build dependencies if pkg is going to be built from sources, or
# if build deps are explicitly requested.
if include_build_deps or not (
cache_only or pkg.spec.installed and pkg.spec.dag_hash() not in self.overwrite
cache_only or pkg.spec.installed and not pkg.spec.dag_hash() in self.overwrite
):
depflag |= dt.BUILD
if self.run_tests(pkg):
@@ -2436,7 +2436,11 @@ def _real_install(self) -> None:
# DEBUGGING TIP - to debug this section, insert an IPython
# embed here, and run the sections below without log capture
log_contextmanager = log_output(
log_file, self.echo, True, filter_fn=self.filter_fn
log_file,
self.echo,
True,
env=self.unmodified_env,
filter_fn=self.filter_fn,
)
with log_contextmanager as logger:

View File

@@ -14,6 +14,7 @@
import io
import operator
import os
import os.path
import pstats
import re
import shlex
@@ -47,8 +48,6 @@
import spack.util.environment
import spack.util.lock
from .enums import ConfigScopePriority
#: names of profile statistics
stat_names = pstats.Stats.sort_arg_dict_default
@@ -165,7 +164,7 @@ def format_help_sections(self, level):
# lazily add all commands to the parser when needed.
add_all_commands(self)
# Print help on subcommands in neatly formatted sections.
"""Print help on subcommands in neatly formatted sections."""
formatter = self._get_formatter()
# Create a list of subcommand actions. Argparse internals are nasty!
@@ -730,7 +729,7 @@ def _compatible_sys_types():
with the current host.
"""
host_platform = spack.platforms.host()
host_os = str(host_platform.default_operating_system())
host_os = str(host_platform.operating_system("default_os"))
host_target = archspec.cpu.host()
compatible_targets = [host_target] + host_target.ancestors
@@ -874,19 +873,14 @@ def add_command_line_scopes(
scopes = ev.environment_path_scopes(name, path)
if scopes is None:
if os.path.isdir(path): # directory with config files
cfg.push_scope(
spack.config.DirectoryConfigScope(name, path, writable=False),
priority=ConfigScopePriority.CUSTOM,
)
spack.config._add_platform_scope(
cfg, name, path, priority=ConfigScopePriority.CUSTOM, writable=False
)
cfg.push_scope(spack.config.DirectoryConfigScope(name, path, writable=False))
spack.config._add_platform_scope(cfg, name, path, writable=False)
continue
else:
raise spack.error.ConfigError(f"Invalid configuration scope: {path}")
for scope in scopes:
cfg.push_scope(scope, priority=ConfigScopePriority.CUSTOM)
cfg.push_scope(scope)
def _main(argv=None):
@@ -959,9 +953,7 @@ def _main(argv=None):
# Push scopes from the command line last
if args.config_scopes:
add_command_line_scopes(spack.config.CONFIG, args.config_scopes)
spack.config.CONFIG.push_scope(
spack.config.InternalConfigScope("command_line"), priority=ConfigScopePriority.COMMAND_LINE
)
spack.config.CONFIG.push_scope(spack.config.InternalConfigScope("command_line"))
setup_main_options(args)
# ------------------------------------------------------------------------
@@ -1007,7 +999,6 @@ def finish_parse_and_run(parser, cmd_name, main_args, env_format_error):
args, unknown = parser.parse_known_args(main_args.command)
# we need to inherit verbose since the install command checks for it
args.verbose = main_args.verbose
args.lines = main_args.lines
# Now that we know what command this is and what its args are, determine
# whether we can continue with a bad environment and raise if not.

View File

@@ -2,6 +2,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import os.path
from typing import Optional
import llnl.url

View File

@@ -64,7 +64,7 @@ def from_local_path(path: str):
@staticmethod
def from_url(url: str):
"""Create an anonymous mirror by URL. This method validates the URL."""
if urllib.parse.urlparse(url).scheme not in supported_url_schemes:
if not urllib.parse.urlparse(url).scheme in supported_url_schemes:
raise ValueError(
f'"{url}" is not a valid mirror URL. '
f"Scheme must be one of {supported_url_schemes}."

View File

@@ -2,6 +2,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import os.path
import traceback
import llnl.util.tty as tty

View File

@@ -31,7 +31,7 @@
import copy
import datetime
import inspect
import os
import os.path
import re
import string
from typing import List, Optional
@@ -330,17 +330,18 @@ class BaseConfiguration:
default_projections = {"all": "{name}/{version}-{compiler.name}-{compiler.version}"}
def __init__(self, spec: spack.spec.Spec, module_set_name: str, explicit: bool) -> None:
# Module where type(self) is defined
m = inspect.getmodule(self)
assert m is not None # make mypy happy
self.module = m
# Spec for which we want to generate a module file
self.spec = spec
self.name = module_set_name
self.explicit = explicit
# Dictionary of configuration options that should be applied to the spec
# Dictionary of configuration options that should be applied
# to the spec
self.conf = merge_config_rules(self.module.configuration(self.name), self.spec)
@property
def module(self):
return inspect.getmodule(self)
@property
def projections(self):
"""Projection from specs to module names"""
@@ -774,6 +775,10 @@ def __init__(
) -> None:
self.spec = spec
# This class is meant to be derived. Get the module of the
# actual writer.
self.module = inspect.getmodule(self)
assert self.module is not None # make mypy happy
m = self.module
# Create the triplet of configuration/layout/context
@@ -811,10 +816,6 @@ def __init__(
name = type(self).__name__
raise ModulercHeaderNotDefined(msg.format(name))
@property
def module(self):
return inspect.getmodule(self)
def _get_template(self):
"""Gets the template that will be rendered for this spec."""
# Get templates and put them in the order of importance:

View File

@@ -4,7 +4,7 @@
import collections
import itertools
import os
import os.path
from typing import Dict, List, Optional, Tuple
import llnl.util.filesystem as fs
@@ -209,7 +209,7 @@ def provides(self):
# All the other tokens in the hierarchy must be virtual dependencies
for x in self.hierarchy_tokens:
if self.spec.package.provides(x):
provides[x] = self.spec
provides[x] = self.spec[x]
return provides
@property

View File

@@ -5,7 +5,7 @@
"""This module implements the classes necessary to generate Tcl
non-hierarchical modules.
"""
import os
import os.path
from typing import Dict, Optional, Tuple
import spack.config

View File

@@ -7,7 +7,6 @@
import base64
import json
import re
import socket
import time
import urllib.error
import urllib.parse
@@ -383,7 +382,6 @@ def create_opener():
"""Create an opener that can handle OCI authentication."""
opener = urllib.request.OpenerDirector()
for handler in [
urllib.request.ProxyHandler(),
urllib.request.UnknownHandler(),
urllib.request.HTTPSHandler(context=spack.util.web.ssl_create_default_context()),
spack.util.web.SpackHTTPDefaultErrorHandler(),
@@ -412,7 +410,7 @@ def wrapper(*args, **kwargs):
for i in range(retries):
try:
return f(*args, **kwargs)
except OSError as e:
except (urllib.error.URLError, TimeoutError) as e:
# Retry on internal server errors, and rate limit errors
# Potentially this could take into account the Retry-After header
# if registries support it
@@ -422,10 +420,9 @@ def wrapper(*args, **kwargs):
and (500 <= e.code < 600 or e.code == 429)
)
or (
isinstance(e, urllib.error.URLError)
and isinstance(e.reason, socket.timeout)
isinstance(e, urllib.error.URLError) and isinstance(e.reason, TimeoutError)
)
or isinstance(e, socket.timeout)
or isinstance(e, TimeoutError)
):
# Exponential backoff
sleep(2**i)

View File

@@ -2,64 +2,31 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
# flake8: noqa: F401, E402
"""spack.package defines the public API for Spack packages, by re-exporting useful symbols from
other modules. Packages should import this module, instead of importing from spack.* directly
to ensure forward compatibility with future versions of Spack."""
# flake8: noqa: F401
"""spack.util.package is a set of useful build tools and directives for packages.
Everything in this module is automatically imported into Spack package files.
"""
from os import chdir, environ, getcwd, makedirs, mkdir, remove, removedirs
from shutil import move, rmtree
from spack.error import InstallError, NoHeadersError, NoLibrariesError
# Emulate some shell commands for convenience
env = environ
cd = chdir
pwd = getcwd
# import most common types used in packages
from typing import Dict, List, Optional
class tty:
import llnl.util.tty as _tty
debug = _tty.debug
error = _tty.error
info = _tty.info
msg = _tty.msg
warn = _tty.warn
from llnl.util.filesystem import (
FileFilter,
FileList,
HeaderList,
LibraryList,
ancestor,
can_access,
change_sed_delimiter,
copy,
copy_tree,
filter_file,
find,
find_all_headers,
find_first,
find_headers,
find_libraries,
find_system_libraries,
force_remove,
force_symlink,
install,
install_tree,
is_exe,
join_path,
keep_modification_time,
library_extensions,
mkdirp,
remove_directory_contents,
remove_linked_tree,
rename,
set_executable,
set_install_permissions,
touch,
working_dir,
)
import llnl.util.filesystem
from llnl.util.filesystem import *
from llnl.util.symlink import symlink
import spack.util.executable
# These props will be overridden when the build env is set up.
from spack.build_environment import MakeExecutable
from spack.build_systems.aspell_dict import AspellDictPackage
from spack.build_systems.autotools import AutotoolsPackage
@@ -109,24 +76,7 @@ class tty:
from spack.builder import BaseBuilder
from spack.config import determine_number_of_jobs
from spack.deptypes import ALL_TYPES as all_deptypes
from spack.directives import (
build_system,
can_splice,
conditional,
conflicts,
depends_on,
extends,
license,
maintainers,
patch,
provides,
redistribute,
requires,
resource,
variant,
version,
)
from spack.error import InstallError, NoHeadersError, NoLibrariesError
from spack.directives import *
from spack.install_test import (
SkipTest,
cache_extra_test_sources,
@@ -136,28 +86,26 @@ class tty:
install_test_root,
test_part,
)
from spack.installer import ExternalPackageError, InstallLockError, UpstreamPackageError
from spack.mixins import filter_compiler_wrappers
from spack.multimethod import default_args, when
from spack.package_base import build_system_flags, env_flags, inject_flags, on_package_attributes
from spack.package_completions import (
bash_completion_path,
fish_completion_path,
zsh_completion_path,
from spack.package_base import (
DependencyConflictError,
build_system_flags,
env_flags,
flatten_dependencies,
inject_flags,
install_dependency_symlinks,
on_package_attributes,
)
from spack.package_completions import *
from spack.phase_callbacks import run_after, run_before
from spack.spec import Spec
from spack.util.environment import EnvironmentModifications
from spack.util.executable import Executable, ProcessError, which, which_string
from spack.spec import InvalidSpecDetected, Spec
from spack.util.executable import *
from spack.util.filesystem import fix_darwin_install_name
from spack.util.prefix import Prefix
from spack.variant import any_combination_of, auto_or_any_combination_of, disjoint_sets
from spack.version import Version, ver
# Emulate some shell commands for convenience
env = environ
cd = chdir
pwd = getcwd
# These are just here for editor support; they may be set when the build env is set up.
configure: Executable
make_jobs: int

View File

@@ -22,6 +22,7 @@
import textwrap
import time
import traceback
import typing
from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Type, TypeVar, Union
from typing_extensions import Literal
@@ -29,6 +30,7 @@
import llnl.util.filesystem as fsys
import llnl.util.tty as tty
from llnl.util.lang import classproperty, memoized
from llnl.util.link_tree import LinkTree
import spack.compilers
import spack.config
@@ -65,6 +67,10 @@
]
FLAG_HANDLER_TYPE = Callable[[str, Iterable[str]], FLAG_HANDLER_RETURN_TYPE]
"""Allowed URL schemes for spack packages."""
_ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file", "git"]
#: Filename for the Spack build/install log.
_spack_build_logfile = "spack-build-out.txt"
@@ -125,10 +131,9 @@ def windows_establish_runtime_linkage(self):
# Spack should in general not modify things it has not installed
# we can reasonably expect externals to have their link interface properly established
if sys.platform == "win32" and not self.spec.external:
win_rpath = fsys.WindowsSimulatedRPath(self)
win_rpath.add_library_dependent(*self.win_add_library_dependent())
win_rpath.add_rpath(*self.win_add_rpath())
win_rpath.establish_link()
self.win_rpath.add_library_dependent(*self.win_add_library_dependent())
self.win_rpath.add_rpath(*self.win_add_rpath())
self.win_rpath.establish_link()
#: Registers which are the detectable packages, by repo and package name
@@ -697,6 +702,9 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
#: Verbosity level, preserved across installs.
_verbose = None
#: index of patches by sha256 sum, built lazily
_patches_by_hash = None
#: Package homepage where users can find more information about the package
homepage: Optional[str] = None
@@ -710,6 +718,19 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
#: Do not include @ here in order not to unnecessarily ping the users.
maintainers: List[str] = []
#: List of attributes to be excluded from a package's hash.
metadata_attrs = [
"homepage",
"url",
"urls",
"list_url",
"extendable",
"parallel",
"make_jobs",
"maintainers",
"tags",
]
#: Set to ``True`` to indicate the stand-alone test requires a compiler.
#: It is used to ensure a compiler and build dependencies like 'cmake'
#: are available to build a custom test code.
@@ -743,6 +764,7 @@ def __init__(self, spec):
# Set up timing variables
self._fetch_time = 0.0
self.win_rpath = fsys.WindowsSimulatedRPath(self)
super().__init__()
def __getitem__(self, key: str) -> "PackageBase":
@@ -808,6 +830,104 @@ def get_variant(self, name: str) -> spack.variant.Variant:
except StopIteration:
raise ValueError(f"No variant '{name}' on spec: {self.spec}")
@classmethod
def possible_dependencies(
cls,
transitive: bool = True,
expand_virtuals: bool = True,
depflag: dt.DepFlag = dt.ALL,
visited: Optional[dict] = None,
missing: Optional[dict] = None,
virtuals: Optional[set] = None,
) -> Dict[str, Set[str]]:
"""Return dict of possible dependencies of this package.
Args:
transitive (bool or None): return all transitive dependencies if
True, only direct dependencies if False (default True)..
expand_virtuals (bool or None): expand virtual dependencies into
all possible implementations (default True)
depflag: dependency types to consider
visited (dict or None): dict of names of dependencies visited so
far, mapped to their immediate dependencies' names.
missing (dict or None): dict to populate with packages and their
*missing* dependencies.
virtuals (set): if provided, populate with virtuals seen so far.
Returns:
(dict): dictionary mapping dependency names to *their*
immediate dependencies
Each item in the returned dictionary maps a (potentially
transitive) dependency of this package to its possible
*immediate* dependencies. If ``expand_virtuals`` is ``False``,
virtual package names wil be inserted as keys mapped to empty
sets of dependencies. Virtuals, if not expanded, are treated as
though they have no immediate dependencies.
Missing dependencies by default are ignored, but if a
missing dict is provided, it will be populated with package names
mapped to any dependencies they have that are in no
repositories. This is only populated if transitive is True.
Note: the returned dict *includes* the package itself.
"""
visited = {} if visited is None else visited
missing = {} if missing is None else missing
visited.setdefault(cls.name, set())
for name, conditions in cls.dependencies_by_name(when=True).items():
# check whether this dependency could be of the type asked for
depflag_union = 0
for deplist in conditions.values():
for dep in deplist:
depflag_union |= dep.depflag
if not (depflag & depflag_union):
continue
# expand virtuals if enabled, otherwise just stop at virtuals
if spack.repo.PATH.is_virtual(name):
if virtuals is not None:
virtuals.add(name)
if expand_virtuals:
providers = spack.repo.PATH.providers_for(name)
dep_names = [spec.name for spec in providers]
else:
visited.setdefault(cls.name, set()).add(name)
visited.setdefault(name, set())
continue
else:
dep_names = [name]
# add the dependency names to the visited dict
visited.setdefault(cls.name, set()).update(set(dep_names))
# recursively traverse dependencies
for dep_name in dep_names:
if dep_name in visited:
continue
visited.setdefault(dep_name, set())
# skip the rest if not transitive
if not transitive:
continue
try:
dep_cls = spack.repo.PATH.get_pkg_class(dep_name)
except spack.repo.UnknownPackageError:
# log unknown packages
missing.setdefault(cls.name, set()).add(dep_name)
continue
dep_cls.possible_dependencies(
transitive, expand_virtuals, depflag, visited, missing, virtuals
)
return visited
@classproperty
def package_dir(cls):
"""Directory where the package.py file lives."""
@@ -2172,6 +2292,85 @@ def rpath_args(self):
build_system_flags = PackageBase.build_system_flags
def install_dependency_symlinks(pkg, spec, prefix):
"""
Execute a dummy install and flatten dependencies.
This routine can be used in a ``package.py`` definition by setting
``install = install_dependency_symlinks``.
This feature comes in handy for creating a common location for the
the installation of third-party libraries.
"""
flatten_dependencies(spec, prefix)
def use_cray_compiler_names():
"""Compiler names for builds that rely on cray compiler names."""
os.environ["CC"] = "cc"
os.environ["CXX"] = "CC"
os.environ["FC"] = "ftn"
os.environ["F77"] = "ftn"
def flatten_dependencies(spec, flat_dir):
"""Make each dependency of spec present in dir via symlink."""
for dep in spec.traverse(root=False):
name = dep.name
dep_path = spack.store.STORE.layout.path_for_spec(dep)
dep_files = LinkTree(dep_path)
os.mkdir(flat_dir + "/" + name)
conflict = dep_files.find_conflict(flat_dir + "/" + name)
if conflict:
raise DependencyConflictError(conflict)
dep_files.merge(flat_dir + "/" + name)
def possible_dependencies(
*pkg_or_spec: Union[str, spack.spec.Spec, typing.Type[PackageBase]],
transitive: bool = True,
expand_virtuals: bool = True,
depflag: dt.DepFlag = dt.ALL,
missing: Optional[dict] = None,
virtuals: Optional[set] = None,
) -> Dict[str, Set[str]]:
"""Get the possible dependencies of a number of packages.
See ``PackageBase.possible_dependencies`` for details.
"""
packages = []
for pos in pkg_or_spec:
if isinstance(pos, PackageMeta) and issubclass(pos, PackageBase):
packages.append(pos)
continue
if not isinstance(pos, spack.spec.Spec):
pos = spack.spec.Spec(pos)
if spack.repo.PATH.is_virtual(pos.name):
packages.extend(p.package_class for p in spack.repo.PATH.providers_for(pos.name))
continue
else:
packages.append(pos.package_class)
visited: Dict[str, Set[str]] = {}
for pkg in packages:
pkg.possible_dependencies(
visited=visited,
transitive=transitive,
expand_virtuals=expand_virtuals,
depflag=depflag,
missing=missing,
virtuals=virtuals,
)
return visited
def deprecated_version(pkg: PackageBase, version: Union[str, StandardVersion]) -> bool:
"""Return True iff the version is deprecated.

View File

@@ -4,6 +4,7 @@
import hashlib
import os
import os.path
import pathlib
import sys
from typing import Any, Dict, Optional, Tuple, Type, Union
@@ -83,7 +84,6 @@ def __init__(
level: int,
working_dir: str,
reverse: bool = False,
ordering_key: Optional[Tuple[str, int]] = None,
) -> None:
"""Initialize a new Patch instance.
@@ -93,7 +93,6 @@ def __init__(
level: patch level
working_dir: relative path *within* the stage to change to
reverse: reverse the patch
ordering_key: key used to ensure patches are applied in a consistent order
"""
# validate level (must be an integer >= 0)
if not isinstance(level, int) or not level >= 0:
@@ -107,13 +106,6 @@ def __init__(
self.working_dir = working_dir
self.reverse = reverse
# The ordering key is passed when executing package.py directives, and is only relevant
# after a solve to build concrete specs with consistently ordered patches. For concrete
# specs read from a file, we add patches in the order of its patches variants and the
# ordering_key is irrelevant. In that case, use a default value so we don't need to branch
# on whether ordering_key is None where it's used, just to make static analysis happy.
self.ordering_key: Tuple[str, int] = ordering_key or ("", 0)
def apply(self, stage: "spack.stage.Stage") -> None:
"""Apply a patch to source in a stage.
@@ -211,8 +203,9 @@ def __init__(
msg += "package %s.%s does not exist." % (pkg.namespace, pkg.name)
raise ValueError(msg)
super().__init__(pkg, abs_path, level, working_dir, reverse, ordering_key)
super().__init__(pkg, abs_path, level, working_dir, reverse)
self.path = abs_path
self.ordering_key = ordering_key
@property
def sha256(self) -> str:
@@ -274,11 +267,13 @@ def __init__(
archive_sha256: sha256 sum of the *archive*, if the patch is compressed
(only required for compressed URL patches)
"""
super().__init__(pkg, url, level, working_dir, reverse, ordering_key)
super().__init__(pkg, url, level, working_dir, reverse)
self.url = url
self._stage: Optional["spack.stage.Stage"] = None
self.ordering_key = ordering_key
if allowed_archive(self.url) and not archive_sha256:
raise spack.error.PatchDirectiveError(
"Compressed patches require 'archive_sha256' "

View File

@@ -108,8 +108,6 @@ def _get_user_cache_path():
#: transient caches for Spack data (virtual cache, patch sha256 lookup, etc.)
default_misc_cache_path = os.path.join(user_cache_path, "cache")
#: concretization cache for Spack concretizations
default_conc_cache_path = os.path.join(default_misc_cache_path, "concretization")
# Below paths pull configuration from the host environment.
#

View File

@@ -52,7 +52,8 @@ def use_platform(new_platform):
import spack.config
assert isinstance(new_platform, Platform), f'"{new_platform}" must be an instance of Platform'
msg = '"{0}" must be an instance of Platform'
assert isinstance(new_platform, Platform), msg.format(new_platform)
original_host_fn = host

View File

@@ -1,22 +1,42 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import warnings
from typing import Optional
import archspec.cpu
import llnl.util.lang
import spack.error
class NoPlatformError(spack.error.SpackError):
def __init__(self):
msg = "Could not determine a platform for this machine"
super().__init__(msg)
@llnl.util.lang.lazy_lexicographic_ordering
class Platform:
"""Platform is an abstract class extended by subclasses.
To add a new type of platform (such as cray_xe), create a subclass and set all the
class attributes such as priority, front_target, back_target, front_os, back_os.
Platform also contain a priority class attribute. A lower number signifies higher
priority. These numbers are arbitrarily set and can be changed though often there
isn't much need unless a new platform is added and the user wants that to be
detected first.
Targets are created inside the platform subclasses. Most architecture (like linux,
and darwin) will have only one target family (x86_64) but in the case of Cray
machines, there is both a frontend and backend processor. The user can specify
which targets are present on front-end and back-end architecture.
Depending on the platform, operating systems are either autodetected or are
set. The user can set the frontend and backend operating setting by the class
attributes front_os and back_os. The operating system will be responsible for
compiler detection.
"""
# Subclass sets number. Controls detection order
@@ -25,72 +45,82 @@ class Platform:
#: binary formats used on this platform; used by relocation logic
binary_formats = ["elf"]
default: str
default_os: str
front_end: Optional[str] = None
back_end: Optional[str] = None
default: Optional[str] = None # The default back end target.
front_os: Optional[str] = None
back_os: Optional[str] = None
default_os: Optional[str] = None
reserved_targets = ["default_target", "frontend", "fe", "backend", "be"]
reserved_oss = ["default_os", "frontend", "fe", "backend", "be"]
deprecated_names = ["frontend", "fe", "backend", "be"]
def __init__(self, name):
self.targets = {}
self.operating_sys = {}
self.name = name
self._init_targets()
def add_target(self, name: str, target: archspec.cpu.Microarchitecture) -> None:
"""Used by the platform specific subclass to list available targets.
Raises an error if the platform specifies a name
that is reserved by spack as an alias.
"""
if name in Platform.reserved_targets:
msg = f"{name} is a spack reserved alias and cannot be the name of a target"
raise ValueError(msg)
msg = "{0} is a spack reserved alias and cannot be the name of a target"
raise ValueError(msg.format(name))
self.targets[name] = target
def _init_targets(self):
self.default = archspec.cpu.host().name
def _add_archspec_targets(self):
for name, microarchitecture in archspec.cpu.TARGETS.items():
self.add_target(name, microarchitecture)
def target(self, name):
"""This is a getter method for the target dictionary
that handles defaulting based on the values provided by default,
front-end, and back-end. This can be overwritten
by a subclass for which we want to provide further aliasing options.
"""
# TODO: Check if we can avoid using strings here
name = str(name)
if name in Platform.deprecated_names:
warnings.warn(f"target={name} is deprecated, use target={self.default} instead")
if name in Platform.reserved_targets:
if name == "default_target":
name = self.default
elif name == "frontend" or name == "fe":
name = self.front_end
elif name == "backend" or name == "be":
name = self.back_end
return self.targets.get(name, None)
def add_operating_system(self, name, os_class):
if name in Platform.reserved_oss + Platform.deprecated_names:
msg = f"{name} is a spack reserved alias and cannot be the name of an OS"
raise ValueError(msg)
"""Add the operating_system class object into the
platform.operating_sys dictionary.
"""
if name in Platform.reserved_oss:
msg = "{0} is a spack reserved alias and cannot be the name of an OS"
raise ValueError(msg.format(name))
self.operating_sys[name] = os_class
def default_target(self):
return self.target(self.default)
def default_operating_system(self):
return self.operating_system(self.default_os)
def operating_system(self, name):
if name in Platform.deprecated_names:
warnings.warn(f"os={name} is deprecated, use os={self.default_os} instead")
if name in Platform.reserved_oss:
if name == "default_os":
name = self.default_os
if name == "frontend" or name == "fe":
name = self.front_os
if name == "backend" or name == "be":
name = self.back_os
return self.operating_sys.get(name, None)
def setup_platform_environment(self, pkg, env):
"""Platform-specific build environment modifications.
This method is meant toi be overridden by subclasses, when needed.
"""Subclass can override this method if it requires any
platform-specific build environment modifications.
"""
pass
@classmethod
def detect(cls):
"""Returns True if the host platform is detected to be the current Platform class,
False otherwise.
"""Return True if the the host platform is detected to be the current
Platform class, False otherwise.
Derived classes are responsible for implementing this method.
"""
@@ -105,7 +135,11 @@ def __str__(self):
def _cmp_iter(self):
yield self.name
yield self.default
yield self.front_end
yield self.back_end
yield self.default_os
yield self.front_os
yield self.back_os
def targets():
for t in sorted(self.targets.values()):

Some files were not shown because too many files have changed in this diff Show More