Compare commits

..

3 Commits

Author SHA1 Message Date
Harmen Stoppels
7aaf51c36e show directive properly 2025-02-10 10:17:49 +01:00
Harmen Stoppels
908095e865 group by package 2025-02-10 10:05:50 +01:00
Harmen Stoppels
f32a756800 audit.py: check for unsatisfiable when conditions of directives 2025-02-10 09:48:22 +01:00
1150 changed files with 17866 additions and 23545 deletions

View File

@@ -9,7 +9,6 @@ on:
branches:
- develop
- releases/**
merge_group:
concurrency:
group: ci-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
@@ -26,17 +25,13 @@ jobs:
packages: ${{ steps.filter.outputs.packages }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
if: ${{ github.event_name == 'push' || github.event_name == 'merge_group' }}
if: ${{ github.event_name == 'push' }}
with:
fetch-depth: 0
# For pull requests it's not necessary to checkout the code
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36
id: filter
with:
# For merge group events, compare against the target branch (main)
base: ${{ github.event_name == 'merge_group' && github.event.merge_group.base_ref || '' }}
# For merge group events, use the merge group head ref
ref: ${{ github.event_name == 'merge_group' && github.event.merge_group.head_sha || github.ref }}
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below
# Don't run if we only modified packages in the
# built-in repository or documentation
@@ -81,11 +76,10 @@ jobs:
prechecks:
needs: [ changes ]
uses: ./.github/workflows/prechecks.yml
uses: ./.github/workflows/valid-style.yml
secrets: inherit
with:
with_coverage: ${{ needs.changes.outputs.core }}
with_packages: ${{ needs.changes.outputs.packages }}
import-check:
needs: [ changes ]
@@ -99,7 +93,7 @@ jobs:
- name: Success
run: |
if [ "${{ needs.prechecks.result }}" == "failure" ] || [ "${{ needs.prechecks.result }}" == "canceled" ]; then
echo "Unit tests failed."
echo "Unit tests failed."
exit 1
else
exit 0
@@ -107,7 +101,6 @@ jobs:
coverage:
needs: [ unit-tests, prechecks ]
if: ${{ needs.changes.outputs.core }}
uses: ./.github/workflows/coverage.yml
secrets: inherit
@@ -120,10 +113,10 @@ jobs:
- name: Status summary
run: |
if [ "${{ needs.unit-tests.result }}" == "failure" ] || [ "${{ needs.unit-tests.result }}" == "canceled" ]; then
echo "Unit tests failed."
echo "Unit tests failed."
exit 1
elif [ "${{ needs.bootstrap.result }}" == "failure" ] || [ "${{ needs.bootstrap.result }}" == "canceled" ]; then
echo "Bootstrap tests failed."
echo "Bootstrap tests failed."
exit 1
else
exit 0

View File

@@ -39,7 +39,7 @@ jobs:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
repository: haampie/circular-import-fighter
ref: 4cdb0bf15f04ab6b49041d5ef1bfd9644cce7f33
ref: e38bcd0aa46368e30648b61b7f0d8c1ca68aadff
path: circular-import-fighter
- name: Install dependencies
working-directory: circular-import-fighter

View File

@@ -1,7 +1,7 @@
black==25.1.0
black==24.10.0
clingo==5.7.1
flake8==7.1.2
isort==6.0.1
mypy==1.15.0
types-six==1.17.0.20250304
flake8==7.1.1
isort==5.13.2
mypy==1.11.2
types-six==1.17.0.20241205
vermin==1.6.0

View File

@@ -1,4 +1,4 @@
name: prechecks
name: style
on:
workflow_call:
@@ -6,9 +6,6 @@ on:
with_coverage:
required: true
type: string
with_packages:
required: true
type: string
concurrency:
group: style-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
@@ -33,7 +30,6 @@ jobs:
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
- name: vermin (Repositories)
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
# Run style checks on the files that have been changed
style:
runs-on: ubuntu-latest
@@ -57,25 +53,12 @@ jobs:
- name: Run style tests
run: |
share/spack/qa/run-style-tests
audit:
uses: ./.github/workflows/audit.yaml
secrets: inherit
with:
with_coverage: ${{ inputs.with_coverage }}
python_version: '3.13'
verify-checksums:
if: ${{ inputs.with_packages == 'true' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
fetch-depth: 2
- name: Verify Added Checksums
run: |
bin/spack ci verify-versions HEAD^1 HEAD
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
bootstrap-dev-rhel8:
runs-on: ubuntu-latest

1
.gitignore vendored
View File

@@ -201,6 +201,7 @@ tramp
# Org-mode
.org-id-locations
*_archive
# flymake-mode
*_flymake.*

View File

@@ -43,28 +43,6 @@ concretizer:
# (e.g. py-setuptools, cmake etc.)
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
strategy: minimal
# Maximum number of duplicates in a DAG, when using a strategy that allows duplicates. "default" is the
# number used if there isn't a more specific alternative
max_dupes:
default: 1
# Virtuals
c: 2
cxx: 2
fortran: 1
# Regular packages
cmake: 2
gmake: 2
python: 2
python-venv: 2
py-cython: 2
py-flit-core: 2
py-pip: 2
py-setuptools: 2
py-wheel: 2
xcb-proto: 2
# Compilers
gcc: 2
llvm: 2
# Option to specify compatibility between operating systems for reuse of compilers and packages
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
@@ -85,7 +63,3 @@ concretizer:
# Setting this to false yields unreproducible results, so we advise to use that value only
# for debugging purposes (e.g. check which constraints can help Spack concretize faster).
error_on_timeout: true
# Static analysis may reduce the concretization time by generating smaller ASP problems, in
# cases where there are requirements that prevent part of the search space to be explored.
static_analysis: false

View File

@@ -19,7 +19,7 @@ config:
install_tree:
root: $spack/opt/spack
projections:
all: "{architecture.platform}-{architecture.target}/{name}-{version}-{hash}"
all: "{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}"
# install_tree can include an optional padded length (int or boolean)
# default is False (do not pad)
# if padded_length is True, Spack will pad as close to the system max path

View File

@@ -15,11 +15,12 @@
# -------------------------------------------------------------------------
packages:
all:
compiler:
- apple-clang
- clang
- gcc
providers:
c: [apple-clang, llvm, gcc]
cxx: [apple-clang, llvm, gcc]
elf: [libelf]
fortran: [gcc]
fuse: [macfuse]
gl: [apple-gl]
glu: [apple-glu]
@@ -49,12 +50,3 @@ packages:
# although the version number used here isn't critical
- spec: apple-libuuid@1353.100.2
prefix: /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk
c:
prefer:
- apple-clang
cxx:
prefer:
- apple-clang
fortran:
prefer:
- gcc

View File

@@ -15,18 +15,19 @@
# -------------------------------------------------------------------------
packages:
all:
compiler: [gcc, clang, oneapi, xl, nag, fj, aocc]
providers:
awk: [gawk]
armci: [armcimpi]
blas: [openblas, amdblis]
c: [gcc, llvm, intel-oneapi-compilers]
cxx: [gcc, llvm, intel-oneapi-compilers]
c: [gcc]
cxx: [gcc]
D: [ldc]
daal: [intel-oneapi-daal]
elf: [elfutils]
fftw-api: [fftw, amdfftw]
flame: [libflame, amdlibflame]
fortran: [gcc, llvm, intel-oneapi-compilers]
fortran: [gcc]
fortran-rt: [gcc-runtime, intel-oneapi-runtime]
fuse: [libfuse]
gl: [glx, osmesa]

View File

@@ -15,8 +15,8 @@
# -------------------------------------------------------------------------
packages:
all:
compiler:
- msvc
providers:
c : [msvc]
cxx: [msvc]
mpi: [msmpi]
gl: [wgl]

View File

@@ -1761,24 +1761,19 @@ Verifying installations
The ``spack verify`` command can be used to verify the validity of
Spack-installed packages any time after installation.
^^^^^^^^^^^^^^^^^^^^^^^^^
``spack verify manifest``
^^^^^^^^^^^^^^^^^^^^^^^^^
At installation time, Spack creates a manifest of every file in the
installation prefix. For links, Spack tracks the mode, ownership, and
destination. For directories, Spack tracks the mode, and
ownership. For files, Spack tracks the mode, ownership, modification
time, hash, and size. The ``spack verify manifest`` command will check,
for every file in each package, whether any of those attributes have
changed. It will also check for newly added files or deleted files from
the installation prefix. Spack can either check all installed packages
time, hash, and size. The Spack verify command will check, for every
file in each package, whether any of those attributes have changed. It
will also check for newly added files or deleted files from the
installation prefix. Spack can either check all installed packages
using the `-a,--all` or accept specs listed on the command line to
verify.
The ``spack verify manifest`` command can also verify for individual files
that they haven't been altered since installation time. If the given file
The ``spack verify`` command can also verify for individual files that
they haven't been altered since installation time. If the given file
is not in a Spack installation prefix, Spack will report that it is
not owned by any package. To check individual files instead of specs,
use the ``-f,--files`` option.
@@ -1793,22 +1788,6 @@ check only local packages (as opposed to those used transparently from
``upstream`` spack instances) and the ``-j,--json`` option to output
machine-readable json data for any errors.
^^^^^^^^^^^^^^^^^^^^^^^^^^
``spack verify libraries``
^^^^^^^^^^^^^^^^^^^^^^^^^^
The ``spack verify libraries`` command can be used to verify that packages
do not have accidental system dependencies. This command scans the install
prefixes of packages for executables and shared libraries, and resolves
their needed libraries in their RPATHs. When needed libraries cannot be
located, an error is reported. This typically indicates that a package
was linked against a system library, instead of a library provided by
a Spack package.
This verification can also be enabled as a post-install hook by setting
``config:shared_linking:missing_library_policy`` to ``error`` or ``warn``
in :ref:`config.yaml <config-yaml>`.
-----------------------
Filesystem requirements
-----------------------

View File

@@ -223,10 +223,6 @@ def setup(sphinx):
("py:class", "spack.compiler.CompilerCache"),
# TypeVar that is not handled correctly
("py:class", "llnl.util.lang.T"),
("py:class", "llnl.util.lang.KT"),
("py:class", "llnl.util.lang.VT"),
("py:obj", "llnl.util.lang.KT"),
("py:obj", "llnl.util.lang.VT"),
]
# The reST default role (used for this markup: `text`) to use for all documents.

View File

@@ -125,8 +125,6 @@ are stored in ``$spack/var/spack/cache``. These are stored indefinitely
by default. Can be purged with :ref:`spack clean --downloads
<cmd-spack-clean>`.
.. _Misc Cache:
--------------------
``misc_cache``
--------------------
@@ -336,52 +334,3 @@ create a new alias called ``inst`` that will always call ``install -v``:
aliases:
inst: install -v
-------------------------------
``concretization_cache:enable``
-------------------------------
When set to ``true``, Spack will utilize a cache of solver outputs from
successful concretization runs. When enabled, Spack will check the concretization
cache prior to running the solver. If a previous request to solve a given
problem is present in the cache, Spack will load the concrete specs and other
solver data from the cache rather than running the solver. Specs not previously
concretized will be added to the cache on a successful solve. The cache additionally
holds solver statistics, so commands like ``spack solve`` will still return information
about the run that produced a given solver result.
This cache is a subcache of the :ref:`Misc Cache` and as such will be cleaned when the Misc
Cache is cleaned.
When ``false`` or ommitted, all concretization requests will be performed from scatch
----------------------------
``concretization_cache:url``
----------------------------
Path to the location where Spack will root the concretization cache. Currently this only supports
paths on the local filesystem.
Default location is under the :ref:`Misc Cache` at: ``$misc_cache/concretization``
------------------------------------
``concretization_cache:entry_limit``
------------------------------------
Sets a limit on the number of concretization results that Spack will cache. The limit is evaluated
after each concretization run; if Spack has stored more results than the limit allows, the
oldest concretization results are pruned until 10% of the limit has been removed.
Setting this value to 0 disables the automatic pruning. It is expected users will be
responsible for maintaining this cache.
-----------------------------------
``concretization_cache:size_limit``
-----------------------------------
Sets a limit on the size of the concretization cache in bytes. The limit is evaluated
after each concretization run; if Spack has stored more results than the limit allows, the
oldest concretization results are pruned until 10% of the limit has been removed.
Setting this value to 0 disables the automatic pruning. It is expected users will be
responsible for maintaining this cache.

View File

@@ -14,7 +14,6 @@ case you want to skip directly to specific docs:
* :ref:`compilers.yaml <compiler-config>`
* :ref:`concretizer.yaml <concretizer-options>`
* :ref:`config.yaml <config-yaml>`
* :ref:`include.yaml <include-yaml>`
* :ref:`mirrors.yaml <mirrors>`
* :ref:`modules.yaml <modules>`
* :ref:`packages.yaml <packages-config>`

View File

@@ -457,13 +457,6 @@ developed package in the environment are concretized to match the
version (and other constraints) passed as the spec argument to the
``spack develop`` command.
When working deep in the graph it is often desirable to have multiple specs marked
as ``develop`` so you don't have to restage and/or do full rebuilds each time you
call ``spack install``. The ``--recursive`` flag can be used in these scenarios
to ensure that all the dependents of the initial spec you provide are also marked
as develop specs. The ``--recursive`` flag requires a pre-concretized environment
so the graph can be traversed from the supplied spec all the way to the root specs.
For packages with ``git`` attributes, git branches, tags, and commits can
also be used as valid concrete versions (see :ref:`version-specifier`).
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
@@ -677,45 +670,24 @@ This configuration sets the default compiler for all packages to
Included configurations
^^^^^^^^^^^^^^^^^^^^^^^
Spack environments allow an ``include`` heading in their yaml schema.
This heading pulls in external configuration files and applies them to
the environment.
Spack environments allow an ``include`` heading in their yaml
schema. This heading pulls in external configuration files and applies
them to the environment.
.. code-block:: yaml
spack:
include:
- environment/relative/path/to/config.yaml
- relative/path/to/config.yaml
- https://github.com/path/to/raw/config/compilers.yaml
- /absolute/path/to/packages.yaml
- path: /path/to/$os/$target/environment
optional: true
- path: /path/to/os-specific/config-dir
when: os == "ventura"
Included configuration files are required *unless* they are explicitly optional
or the entry's condition evaluates to ``false``. Optional includes are specified
with the ``optional`` clause and conditional with the ``when`` clause. (See
:ref:`include-yaml` for more information on optional and conditional entries.)
Files are listed using paths to individual files or directories containing them.
Path entries may be absolute or relative to the environment or specified as
URLs. URLs to individual files need link to the **raw** form of the file's
contents (e.g., `GitHub
<https://docs.github.com/en/repositories/working-with-files/using-files/viewing-and-understanding-files#viewing-or-copying-the-raw-file-content>`_
or `GitLab
<https://docs.gitlab.com/ee/api/repository_files.html#get-raw-file-from-repository>`_).
Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or schemes) are
supported. Spack-specific, environment and user path variables can be used.
(See :ref:`config-file-variables` for more information.)
.. warning::
Recursive includes are not currently processed in a breadth-first manner
so the value of a configuration option that is altered by multiple included
files may not be what you expect. This will be addressed in a future
update.
Environments can include files or URLs. File paths can be relative or
absolute. URLs include the path to the text for individual files or
can be the path to a directory containing configuration files.
Spack supports ``file``, ``http``, ``https`` and ``ftp`` protocols (or
schemes). Spack-specific, environment and user path variables may be
used in these paths. See :ref:`config-file-variables` for more information.
^^^^^^^^^^^^^^^^^^^^^^^^
Configuration precedence

View File

@@ -30,7 +30,7 @@ than always choosing the latest versions or default variants.
.. note::
As a rule of thumb: requirements + constraints > strong preferences > reuse > preferences > defaults.
As a rule of thumb: requirements + constraints > reuse > preferences > defaults.
The following set of criteria (from lowest to highest precedence) explain
common cases where concretization output may seem surprising at first.
@@ -56,19 +56,7 @@ common cases where concretization output may seem surprising at first.
concretizer:
reuse: dependencies # other options are 'true' and 'false'
3. :ref:`Strong preferences <package-strong-preferences>` configured in ``packages.yaml``
are higher priority than reuse, and can be used to strongly prefer a specific version
or variant, without erroring out if it's not possible. Strong preferences are specified
as follows:
.. code-block:: yaml
packages:
foo:
prefer:
- "@1.1: ~mpi"
4. :ref:`Package requirements <package-requirements>` configured in ``packages.yaml``,
3. :ref:`Package requirements <package-requirements>` configured in ``packages.yaml``,
and constraints from the command line as well as ``package.py`` files override all
of the above. Requirements are specified as follows:
@@ -78,8 +66,6 @@ common cases where concretization output may seem surprising at first.
foo:
require:
- "@1.2: +mpi"
conflicts:
- "@1.4"
Requirements and constraints restrict the set of possible solutions, while reuse
behavior and preferences influence what an optimal solution looks like.

View File

@@ -1,51 +0,0 @@
.. Copyright Spack Project Developers. See COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT)
.. _include-yaml:
===============================
Include Settings (include.yaml)
===============================
Spack allows you to include configuration files through ``include.yaml``.
Using the ``include:`` heading results in pulling in external configuration
information to be used by any Spack command.
Included configuration files are required *unless* they are explicitly optional
or the entry's condition evaluates to ``false``. Optional includes are specified
with the ``optional`` clause and conditional with the ``when`` clause. For
example,
.. code-block:: yaml
include:
- /path/to/a/required/config.yaml
- path: /path/to/$os/$target/config
optional: true
- path: /path/to/os-specific/config-dir
when: os == "ventura"
shows all three. The first entry, ``/path/to/a/required/config.yaml``,
indicates that included ``config.yaml`` file is required (so must exist).
Use of ``optional: true`` for ``/path/to/$os/$target/config`` means
the path is only included if it exists. The condition ``os == "ventura"``
in the ``when`` clause for ``/path/to/os-specific/config-dir`` means the
path is only included when the operating system (``os``) is ``ventura``.
The same conditions and variables in `Spec List References
<https://spack.readthedocs.io/en/latest/environments.html#spec-list-references>`_
can be used for conditional activation in the ``when`` clauses.
Included files can be specified by path or by their parent directory.
Paths may be absolute, relative (to the configuration file including the path),
or specified as URLs. Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or
schemes) are supported. Spack-specific, environment and user path variables
can be used. (See :ref:`config-file-variables` for more information.)
.. warning::
Recursive includes are not currently processed in a breadth-first manner
so the value of a configuration option that is altered by multiple included
files may not be what you expect. This will be addressed in a future
update.

View File

@@ -71,7 +71,6 @@ or refer to the full manual below.
configuration
config_yaml
include_yaml
packages_yaml
build_settings
environments

View File

@@ -486,8 +486,6 @@ present. For instance with a configuration like:
you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
.. _package-strong-preferences:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Conflicts and strong preferences
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

View File

@@ -330,7 +330,7 @@ that ``--tests`` is passed to ``spack ci rebuild`` as part of the
- spack --version
- cd ${SPACK_CONCRETE_ENV_DIR}
- spack env activate --without-view .
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture.platform}-{architecture.target}/{name}-{version}-{hash}'"
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
@@ -820,69 +820,6 @@ presence of a ``SPACK_CDASH_AUTH_TOKEN`` environment variable during the
build group on CDash called "Release Testing" (that group will be created if
it didn't already exist).
.. _ci_artifacts:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
CI Artifacts Directory Layout
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
When running the CI build using the command ``spack ci rebuild`` a number of directories are created for
storing data generated during the CI job. The default root directory for artifacts is ``job_scratch_root``.
This can be overridden by passing the argument ``--artifacts-root`` to the ``spack ci generate`` command
or by setting the ``SPACK_ARTIFACTS_ROOT`` environment variable in the build job scripts.
The top level directories under the artifact root are ``concrete_environment``, ``logs``, ``reproduction``,
``tests``, and ``user_data``. Spack does not restrict what is written to any of these directories nor does
it require user specified files be written to any specific directory.
------------------------
``concrete_environment``
------------------------
The directory ``concrete_environment`` is used to communicate the ci generate processed ``spack.yaml`` and
the concrete ``spack.lock`` for the CI environment.
--------
``logs``
--------
The directory ``logs`` contains the spack build log, ``spack-build-out.txt``, and the spack build environment
modification file, ``spack-build-mod-env.txt``. Additionally all files specified by the packages ``Builder``
property ``archive_files`` are also copied here (ie. ``CMakeCache.txt`` in ``CMakeBuilder``).
----------------
``reproduction``
----------------
The directory ``reproduction`` is used to store the files needed by the ``spack reproduce-build`` command.
This includes ``repro.json``, copies of all of the files in ``concrete_environment``, the concrete spec
JSON file for the current spec being built, and all of the files written in the artifacts root directory.
The ``repro.json`` file is not versioned and is only designed to work with the version of spack CI was run with.
An example of what a ``repro.json`` may look like is here.
.. code:: json
{
"job_name": "adios2@2.9.2 /feaevuj %gcc@11.4.0 arch=linux-ubuntu20.04-x86_64_v3 E4S ROCm External",
"job_spec_json": "adios2.json",
"ci_project_dir": "/builds/spack/spack"
}
---------
``tests``
---------
The directory ``tests`` is used to store output from running ``spack test <job spec>``. This may or may not have
data in it depending on the package that was built and the availability of tests.
-------------
``user_data``
-------------
The directory ``user_data`` is used to store everything else that shouldn't be copied to the ``reproduction`` direcotory.
Users may use this to store additional logs or metrics or other types of files generated by the build job.
-------------------------------------
Using a custom spack in your pipeline
-------------------------------------

View File

@@ -1,13 +1,13 @@
sphinx==8.2.3
sphinx==8.1.3
sphinxcontrib-programoutput==0.18
sphinx_design==0.6.1
sphinx-rtd-theme==3.0.2
python-levenshtein==0.27.1
python-levenshtein==0.26.1
docutils==0.21.2
pygments==2.19.1
urllib3==2.3.0
pytest==8.3.5
isort==6.0.1
black==25.1.0
flake8==7.1.2
pytest==8.3.4
isort==5.13.2
black==24.10.0
flake8==7.1.1
mypy==1.11.1

1
lib/spack/env/aocc/clang vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/aocc/clang++ vendored Symbolic link
View File

@@ -0,0 +1 @@
../cpp

1
lib/spack/env/aocc/flang vendored Symbolic link
View File

@@ -0,0 +1 @@
../fc

1
lib/spack/env/arm/armclang vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/arm/armclang++ vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/arm/armflang vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/c++ vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/c89 vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/c99 vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/case-insensitive/CC vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

View File

@@ -36,9 +36,15 @@ readonly lsep=''
# the script runs. They are set by routines in spack.build_environment
# as part of the package installation process.
readonly params="\
SPACK_COMPILER_WRAPPER_PATH
SPACK_ENV_PATH
SPACK_DEBUG_LOG_DIR
SPACK_DEBUG_LOG_ID
SPACK_COMPILER_SPEC
SPACK_CC_RPATH_ARG
SPACK_CXX_RPATH_ARG
SPACK_F77_RPATH_ARG
SPACK_FC_RPATH_ARG
SPACK_LINKER_ARG
SPACK_SHORT_SPEC
SPACK_SYSTEM_DIRS
SPACK_MANAGED_DIRS"
@@ -339,9 +345,6 @@ case "$command" in
;;
ld|ld.gold|ld.lld)
mode=ld
if [ -z "$SPACK_CC_RPATH_ARG" ]; then
comp="CXX"
fi
;;
*)
die "Unknown compiler: $command"
@@ -396,12 +399,10 @@ fi
#
dtags_to_add="${SPACK_DTAGS_TO_ADD}"
dtags_to_strip="${SPACK_DTAGS_TO_STRIP}"
linker_arg="ERROR: LINKER ARG WAS NOT SET, MAYBE THE PACKAGE DOES NOT DEPEND ON ${comp}?"
eval "linker_arg=\${SPACK_${comp}_LINKER_ARG:?${linker_arg}}"
linker_arg="${SPACK_LINKER_ARG}"
# Set up rpath variable according to language.
rpath="ERROR: RPATH ARG WAS NOT SET, MAYBE THE PACKAGE DOES NOT DEPEND ON ${comp}?"
rpath="ERROR: RPATH ARG WAS NOT SET"
eval "rpath=\${SPACK_${comp}_RPATH_ARG:?${rpath}}"
# Dump the mode and exit if the command is dump-mode.
@@ -410,6 +411,13 @@ if [ "$SPACK_TEST_COMMAND" = "dump-mode" ]; then
exit
fi
# If, say, SPACK_CC is set but SPACK_FC is not, we want to know. Compilers do not
# *have* to set up Fortran executables, so we need to tell the user when a build is
# about to attempt to use them unsuccessfully.
if [ -z "$command" ]; then
die "Compiler '$SPACK_COMPILER_SPEC' does not have a $language compiler configured."
fi
#
# Filter '.' and Spack environment directories out of PATH so that
# this script doesn't just call itself
@@ -418,7 +426,7 @@ new_dirs=""
IFS=':'
for dir in $PATH; do
addpath=true
for spack_env_dir in $SPACK_COMPILER_WRAPPER_PATH; do
for spack_env_dir in $SPACK_ENV_PATH; do
case "${dir%%/}" in
"$spack_env_dir"|'.'|'')
addpath=false
@@ -779,17 +787,15 @@ case "$mode" in
C)
extend spack_flags_list SPACK_ALWAYS_CFLAGS
extend spack_flags_list SPACK_CFLAGS
preextend flags_list SPACK_TARGET_ARGS_CC
;;
CXX)
extend spack_flags_list SPACK_ALWAYS_CXXFLAGS
extend spack_flags_list SPACK_CXXFLAGS
preextend flags_list SPACK_TARGET_ARGS_CXX
;;
F)
preextend flags_list SPACK_TARGET_ARGS_FORTRAN
;;
esac
# prepend target args
preextend flags_list SPACK_TARGET_ARGS
;;
esac

1
lib/spack/env/cce/case-insensitive/CC vendored Symbolic link
View File

@@ -0,0 +1 @@
../../cc

View File

@@ -0,0 +1 @@
../../cc

1
lib/spack/env/cce/cc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/cce/craycc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/cce/crayftn vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/cce/ftn vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/clang/clang vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/clang/clang++ vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/clang/flang vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/clang/gfortran vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/cpp vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/f77 vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/f90 vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/f95 vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/fc vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/fj/case-insensitive/FCC vendored Symbolic link
View File

@@ -0,0 +1 @@
../../cc

1
lib/spack/env/fj/fcc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/fj/frt vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/ftn vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/gcc/g++ vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/gcc/gcc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/gcc/gfortran vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/intel/icc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/intel/icpc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/intel/ifort vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/ld vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/ld.gold vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/ld.lld vendored Symbolic link
View File

@@ -0,0 +1 @@
cc

1
lib/spack/env/nag/nagfor vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/nvhpc/nvc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/nvhpc/nvc++ vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/nvhpc/nvfortran vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/oneapi/dpcpp vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/oneapi/icpx vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/oneapi/icx vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/oneapi/ifx vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/pgi/pgc++ vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/pgi/pgcc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/pgi/pgfortran vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/rocmcc/amdclang vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/rocmcc/amdclang++ vendored Symbolic link
View File

@@ -0,0 +1 @@
../cpp

1
lib/spack/env/rocmcc/amdflang vendored Symbolic link
View File

@@ -0,0 +1 @@
../fc

1
lib/spack/env/xl/xlc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/xl/xlc++ vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/xl/xlf vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/xl/xlf90 vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/xl_r/xlc++_r vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/xl_r/xlc_r vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/xl_r/xlf90_r vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/xl_r/xlf_r vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

View File

@@ -7,7 +7,6 @@
import fnmatch
import glob
import hashlib
import io
import itertools
import numbers
import os
@@ -21,7 +20,6 @@
from contextlib import contextmanager
from itertools import accumulate
from typing import (
IO,
Callable,
Deque,
Dict,
@@ -2456,69 +2454,26 @@ class WindowsSimulatedRPath:
and vis versa.
"""
def __init__(
self,
package,
base_modification_prefix: Optional[Union[str, pathlib.Path]] = None,
link_install_prefix: bool = True,
):
def __init__(self, package, link_install_prefix=True):
"""
Args:
package (spack.package_base.PackageBase): Package requiring links
base_modification_prefix (str|pathlib.Path): Path representation indicating
the root directory in which to establish the simulated rpath, ie where the
symlinks that comprise the "rpath" behavior will be installed.
Note: This is a mutually exclusive option with `link_install_prefix` using
both is an error.
Default: None
link_install_prefix (bool): Link against package's own install or stage root.
Packages that run their own executables during build and require rpaths to
the build directory during build time require this option.
Default: install
the build directory during build time require this option. Default: install
root
Note: This is a mutually exclusive option with `base_modification_prefix`, using
both is an error.
"""
self.pkg = package
self._addl_rpaths: set[str] = set()
if link_install_prefix and base_modification_prefix:
raise RuntimeError(
"Invalid combination of arguments given to WindowsSimulated RPath.\n"
"Select either `link_install_prefix` to create an install prefix rpath"
" or specify a `base_modification_prefix` for any other link type. "
"Specifying both arguments is invalid."
)
if not (link_install_prefix or base_modification_prefix):
raise RuntimeError(
"Insufficient arguments given to WindowsSimulatedRpath.\n"
"WindowsSimulatedRPath requires one of link_install_prefix"
" or base_modification_prefix to be specified."
" Neither was provided."
)
self._addl_rpaths = set()
self.link_install_prefix = link_install_prefix
if base_modification_prefix:
self.base_modification_prefix = pathlib.Path(base_modification_prefix)
else:
self.base_modification_prefix = pathlib.Path(self.pkg.prefix)
self._additional_library_dependents: set[pathlib.Path] = set()
if not self.link_install_prefix:
tty.debug(f"Generating rpath for non install context: {base_modification_prefix}")
self._additional_library_dependents = set()
@property
def library_dependents(self):
"""
Set of directories where package binaries/libraries are located.
"""
base_pths = set()
if self.link_install_prefix:
base_pths.add(pathlib.Path(self.pkg.prefix.bin))
base_pths |= self._additional_library_dependents
return base_pths
return set([pathlib.Path(self.pkg.prefix.bin)]) | self._additional_library_dependents
def add_library_dependent(self, *dest):
"""
@@ -2534,12 +2489,6 @@ def add_library_dependent(self, *dest):
new_pth = pathlib.Path(pth).parent
else:
new_pth = pathlib.Path(pth)
path_is_in_prefix = new_pth.is_relative_to(self.base_modification_prefix)
if not path_is_in_prefix:
raise RuntimeError(
f"Attempting to generate rpath symlink out of rpath context:\
{str(self.base_modification_prefix)}"
)
self._additional_library_dependents.add(new_pth)
@property
@@ -2628,33 +2577,6 @@ def establish_link(self):
self._link(library, lib_dir)
def make_package_test_rpath(pkg, test_dir: Union[str, pathlib.Path]):
"""Establishes a temp Windows simulated rpath for the pkg in the testing directory
so an executable can test the libraries/executables with proper access
to dependent dlls
Note: this is a no-op on all other platforms besides Windows
Args:
pkg (spack.package_base.PackageBase): the package for which the rpath should be computed
test_dir: the testing directory in which we should construct an rpath
"""
# link_install_prefix as false ensures we're not linking into the install prefix
mini_rpath = WindowsSimulatedRPath(pkg, link_install_prefix=False)
# add the testing directory as a location to install rpath symlinks
mini_rpath.add_library_dependent(test_dir)
# check for whether build_directory is available, if not
# assume the stage root is the build dir
build_dir_attr = getattr(pkg, "build_directory", None)
build_directory = build_dir_attr if build_dir_attr else pkg.stage.path
# add the build dir & build dir bin
mini_rpath.add_rpath(os.path.join(build_directory, "bin"))
mini_rpath.add_rpath(os.path.join(build_directory))
# construct rpath
mini_rpath.establish_link()
@system_path_filter
@memoized
def can_access_dir(path):
@@ -2883,20 +2805,6 @@ def keep_modification_time(*filenames):
os.utime(f, (os.path.getatime(f), mtime))
@contextmanager
def temporary_file_position(stream):
orig_pos = stream.tell()
yield
stream.seek(orig_pos)
@contextmanager
def current_file_position(stream: IO[str], loc: int, relative_to=io.SEEK_CUR):
with temporary_file_position(stream):
stream.seek(loc, relative_to)
yield
@contextmanager
def temporary_dir(
suffix: Optional[str] = None, prefix: Optional[str] = None, dir: Optional[str] = None

View File

@@ -11,11 +11,10 @@
import re
import sys
import traceback
import types
import typing
import warnings
from datetime import datetime, timedelta
from typing import Callable, Dict, Iterable, List, Mapping, Optional, Tuple, TypeVar
from typing import Callable, Dict, Iterable, List, Tuple, TypeVar
# Ignore emacs backups when listing modules
ignore_modules = r"^\.#|~$"
@@ -73,7 +72,7 @@ def index_by(objects, *funcs):
if isinstance(f, str):
f = lambda x: getattr(x, funcs[0])
elif isinstance(f, tuple):
f = lambda x: tuple(getattr(x, p, None) for p in funcs[0])
f = lambda x: tuple(getattr(x, p) for p in funcs[0])
result = {}
for o in objects:
@@ -708,24 +707,14 @@ def __init__(self, wrapped_object):
class Singleton:
"""Wrapper for lazily initialized singleton objects."""
"""Simple wrapper for lazily initialized singleton objects."""
def __init__(self, factory: Callable[[], object]):
def __init__(self, factory):
"""Create a new singleton to be inited with the factory function.
Most factories will simply create the object to be initialized and
return it.
In some cases, e.g. when bootstrapping some global state, the singleton
may need to be initialized incrementally. If the factory returns a generator
instead of a regular object, the singleton will assign each result yielded by
the generator to the singleton instance. This allows methods called by
the factory in later stages to refer back to the singleton.
Args:
factory (function): function taking no arguments that creates the
singleton instance.
factory (function): function taking no arguments that
creates the singleton instance.
"""
self.factory = factory
self._instance = None
@@ -733,16 +722,7 @@ def __init__(self, factory: Callable[[], object]):
@property
def instance(self):
if self._instance is None:
instance = self.factory()
if isinstance(instance, types.GeneratorType):
# if it's a generator, assign every value
for value in instance:
self._instance = value
else:
# if not, just assign the result like a normal singleton
self._instance = instance
self._instance = self.factory()
return self._instance
def __getattr__(self, name):
@@ -1016,8 +996,11 @@ def _receive_forwarded(self, context: str, exc: Exception, tb: List[str]):
def grouped_message(self, with_tracebacks: bool = True) -> str:
"""Print out an error message coalescing all the forwarded errors."""
each_exception_message = [
"\n\t{0} raised {1}: {2}\n{3}".format(
context, exc.__class__.__name__, exc, f"\n{''.join(tb)}" if with_tracebacks else ""
"{0} raised {1}: {2}{3}".format(
context,
exc.__class__.__name__,
exc,
"\n{0}".format("".join(tb)) if with_tracebacks else "",
)
for context, exc, tb in self.exceptions
]
@@ -1097,88 +1080,3 @@ def __set__(self, instance, value):
def factory(self, instance, owner):
raise NotImplementedError("must be implemented by derived classes")
KT = TypeVar("KT")
VT = TypeVar("VT")
class PriorityOrderedMapping(Mapping[KT, VT]):
"""Mapping that iterates over key according to an integer priority. If the priority is
the same for two keys, insertion order is what matters.
The priority is set when the key/value pair is added. If not set, the highest current priority
is used.
"""
_data: Dict[KT, VT]
_priorities: List[Tuple[int, KT]]
def __init__(self) -> None:
self._data = {}
# Tuple of (priority, key)
self._priorities = []
def __getitem__(self, key: KT) -> VT:
return self._data[key]
def __len__(self) -> int:
return len(self._data)
def __iter__(self):
yield from (key for _, key in self._priorities)
def __reversed__(self):
yield from (key for _, key in reversed(self._priorities))
def reversed_keys(self):
"""Iterates over keys from the highest priority, to the lowest."""
return reversed(self)
def reversed_values(self):
"""Iterates over values from the highest priority, to the lowest."""
yield from (self._data[key] for _, key in reversed(self._priorities))
def _highest_priority(self) -> int:
if not self._priorities:
return 0
result, _ = self._priorities[-1]
return result
def add(self, key: KT, *, value: VT, priority: Optional[int] = None) -> None:
"""Adds a key/value pair to the mapping, with a specific priority.
If the priority is None, then it is assumed to be the highest priority value currently
in the container.
Raises:
ValueError: when the same priority is already in the mapping
"""
if priority is None:
priority = self._highest_priority()
if key in self._data:
self.remove(key)
self._priorities.append((priority, key))
# We rely on sort being stable
self._priorities.sort(key=lambda x: x[0])
self._data[key] = value
assert len(self._data) == len(self._priorities)
def remove(self, key: KT) -> VT:
"""Removes a key from the mapping.
Returns:
The value associated with the key being removed
Raises:
KeyError: if the key is not in the mapping
"""
if key not in self._data:
raise KeyError(f"cannot find {key}")
popped_item = self._data.pop(key)
self._priorities = [(p, k) for p, k in self._priorities if k != key]
assert len(self._data) == len(self._priorities)
return popped_item

View File

@@ -41,16 +41,6 @@ def __init__(self, dst, src_a=None, src_b=None):
self.src_a = src_a
self.src_b = src_b
def __repr__(self) -> str:
return f"MergeConflict(dst={self.dst!r}, src_a={self.src_a!r}, src_b={self.src_b!r})"
def _samefile(a: str, b: str):
try:
return os.path.samefile(a, b)
except OSError:
return False
class SourceMergeVisitor(BaseDirectoryVisitor):
"""
@@ -60,14 +50,9 @@ class SourceMergeVisitor(BaseDirectoryVisitor):
- A list of merge conflicts in dst/
"""
def __init__(
self, ignore: Optional[Callable[[str], bool]] = None, normalize_paths: bool = False
):
def __init__(self, ignore: Optional[Callable[[str], bool]] = None):
self.ignore = ignore if ignore is not None else lambda f: False
# On case-insensitive filesystems, normalize paths to detect duplications
self.normalize_paths = normalize_paths
# When mapping <src root> to <dst root>/<projection>, we need to prepend the <projection>
# bit to the relative path in the destination dir.
self.projection: str = ""
@@ -86,88 +71,10 @@ def __init__(
# and can run mkdir in order.
self.directories: Dict[str, Tuple[str, str]] = {}
# If the visitor is configured to normalize paths, keep a map of
# normalized path to: original path, root directory + relative path
self._directories_normalized: Dict[str, Tuple[str, str, str]] = {}
# Files to link. Maps dst_rel to (src_root, src_rel). This is an ordered dict, where files
# are guaranteed to be grouped by src_root in the order they were visited.
self.files: Dict[str, Tuple[str, str]] = {}
# If the visitor is configured to normalize paths, keep a map of
# normalized path to: original path, root directory + relative path
self._files_normalized: Dict[str, Tuple[str, str, str]] = {}
def _in_directories(self, proj_rel_path: str) -> bool:
"""
Check if a path is already in the directory list
"""
if self.normalize_paths:
return proj_rel_path.lower() in self._directories_normalized
else:
return proj_rel_path in self.directories
def _directory(self, proj_rel_path: str) -> Tuple[str, str, str]:
"""
Get the directory that is mapped to a path
"""
if self.normalize_paths:
return self._directories_normalized[proj_rel_path.lower()]
else:
return (proj_rel_path, *self.directories[proj_rel_path])
def _del_directory(self, proj_rel_path: str):
"""
Remove a directory from the list of directories
"""
del self.directories[proj_rel_path]
if self.normalize_paths:
del self._directories_normalized[proj_rel_path.lower()]
def _add_directory(self, proj_rel_path: str, root: str, rel_path: str):
"""
Add a directory to the list of directories.
Also stores the normalized version for later lookups
"""
self.directories[proj_rel_path] = (root, rel_path)
if self.normalize_paths:
self._directories_normalized[proj_rel_path.lower()] = (proj_rel_path, root, rel_path)
def _in_files(self, proj_rel_path: str) -> bool:
"""
Check if a path is already in the files list
"""
if self.normalize_paths:
return proj_rel_path.lower() in self._files_normalized
else:
return proj_rel_path in self.files
def _file(self, proj_rel_path: str) -> Tuple[str, str, str]:
"""
Get the file that is mapped to a path
"""
if self.normalize_paths:
return self._files_normalized[proj_rel_path.lower()]
else:
return (proj_rel_path, *self.files[proj_rel_path])
def _del_file(self, proj_rel_path: str):
"""
Remove a file from the list of files
"""
del self.files[proj_rel_path]
if self.normalize_paths:
del self._files_normalized[proj_rel_path.lower()]
def _add_file(self, proj_rel_path: str, root: str, rel_path: str):
"""
Add a file to the list of files
Also stores the normalized version for later lookups
"""
self.files[proj_rel_path] = (root, rel_path)
if self.normalize_paths:
self._files_normalized[proj_rel_path.lower()] = (proj_rel_path, root, rel_path)
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
"""
Register a directory if dst / rel_path is not blocked by a file or ignored.
@@ -177,28 +84,23 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
if self.ignore(rel_path):
# Don't recurse when dir is ignored.
return False
elif self._in_files(proj_rel_path):
# A file-dir conflict is fatal except if they're the same file (symlinked dir).
src_a = os.path.join(*self._file(proj_rel_path))
src_b = os.path.join(root, rel_path)
if not _samefile(src_a, src_b):
self.fatal_conflicts.append(
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
elif proj_rel_path in self.files:
# Can't create a dir where a file is.
src_a_root, src_a_relpath = self.files[proj_rel_path]
self.fatal_conflicts.append(
MergeConflict(
dst=proj_rel_path,
src_a=os.path.join(src_a_root, src_a_relpath),
src_b=os.path.join(root, rel_path),
)
return False
# Remove the link in favor of the dir.
existing_proj_rel_path, _, _ = self._file(proj_rel_path)
self._del_file(existing_proj_rel_path)
self._add_directory(proj_rel_path, root, rel_path)
return True
elif self._in_directories(proj_rel_path):
)
return False
elif proj_rel_path in self.directories:
# No new directory, carry on.
return True
else:
# Register new directory.
self._add_directory(proj_rel_path, root, rel_path)
self.directories[proj_rel_path] = (root, rel_path)
return True
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
@@ -230,7 +132,7 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
if handle_as_dir:
return self.before_visit_dir(root, rel_path, depth)
self.visit_file(root, rel_path, depth, symlink=True)
self.visit_file(root, rel_path, depth)
return False
def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = False) -> None:
@@ -238,23 +140,30 @@ def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = Fa
if self.ignore(rel_path):
pass
elif self._in_directories(proj_rel_path):
# Can't create a file where a dir is, unless they are the same file (symlinked dir),
# in which case we simply drop the symlink in favor of the actual dir.
src_a = os.path.join(*self._directory(proj_rel_path))
src_b = os.path.join(root, rel_path)
if not symlink or not _samefile(src_a, src_b):
self.fatal_conflicts.append(
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
elif proj_rel_path in self.directories:
# Can't create a file where a dir is; fatal error
self.fatal_conflicts.append(
MergeConflict(
dst=proj_rel_path,
src_a=os.path.join(*self.directories[proj_rel_path]),
src_b=os.path.join(root, rel_path),
)
elif self._in_files(proj_rel_path):
)
elif proj_rel_path in self.files:
# When two files project to the same path, they conflict iff they are distinct.
# If they are the same (i.e. one links to the other), register regular files rather
# than symlinks. The reason is that in copy-type views, we need a copy of the actual
# file, not the symlink.
src_a = os.path.join(*self._file(proj_rel_path))
src_a = os.path.join(*self.files[proj_rel_path])
src_b = os.path.join(root, rel_path)
if not _samefile(src_a, src_b):
try:
samefile = os.path.samefile(src_a, src_b)
except OSError:
samefile = False
if not samefile:
# Distinct files produce a conflict.
self.file_conflicts.append(
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
@@ -264,12 +173,12 @@ def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = Fa
if not symlink:
# Remove the link in favor of the actual file. The del is necessary to maintain the
# order of the files dict, which is grouped by root.
existing_proj_rel_path, _, _ = self._file(proj_rel_path)
self._del_file(existing_proj_rel_path)
self._add_file(proj_rel_path, root, rel_path)
del self.files[proj_rel_path]
self.files[proj_rel_path] = (root, rel_path)
else:
# Otherwise register this file to be linked.
self._add_file(proj_rel_path, root, rel_path)
self.files[proj_rel_path] = (root, rel_path)
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
# Treat symlinked files as ordinary files (without "dereferencing")
@@ -288,11 +197,11 @@ def set_projection(self, projection: str) -> None:
path = ""
for part in self.projection.split(os.sep):
path = os.path.join(path, part)
if not self._in_files(path):
self._add_directory(path, "<projection>", path)
if path not in self.files:
self.directories[path] = ("<projection>", path)
else:
# Can't create a dir where a file is.
_, src_a_root, src_a_relpath = self._file(path)
src_a_root, src_a_relpath = self.files[path]
self.fatal_conflicts.append(
MergeConflict(
dst=path,
@@ -318,8 +227,8 @@ def __init__(self, source_merge_visitor: SourceMergeVisitor):
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
# If destination dir is a file in a src dir, add a conflict,
# and don't traverse deeper
if self.src._in_files(rel_path):
_, src_a_root, src_a_relpath = self.src._file(rel_path)
if rel_path in self.src.files:
src_a_root, src_a_relpath = self.src.files[rel_path]
self.src.fatal_conflicts.append(
MergeConflict(
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
@@ -329,9 +238,8 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
# If destination dir was also a src dir, remove the mkdir
# action, and traverse deeper.
if self.src._in_directories(rel_path):
existing_proj_rel_path, _, _ = self.src._directory(rel_path)
self.src._del_directory(existing_proj_rel_path)
if rel_path in self.src.directories:
del self.src.directories[rel_path]
return True
# If the destination dir does not appear in the src dir,
@@ -344,24 +252,38 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
be seen as files; we should not accidentally merge
source dir with a symlinked dest dir.
"""
self.visit_file(root, rel_path, depth)
# Never descend into symlinked target dirs.
return False
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
# Can't merge a file if target already exists
if self.src._in_directories(rel_path):
_, src_a_root, src_a_relpath = self.src._directory(rel_path)
# Always conflict
if rel_path in self.src.directories:
src_a_root, src_a_relpath = self.src.directories[rel_path]
self.src.fatal_conflicts.append(
MergeConflict(
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
)
)
elif self.src._in_files(rel_path):
_, src_a_root, src_a_relpath = self.src._file(rel_path)
if rel_path in self.src.files:
src_a_root, src_a_relpath = self.src.files[rel_path]
self.src.fatal_conflicts.append(
MergeConflict(
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
)
)
# Never descend into symlinked target dirs.
return False
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
# Can't merge a file if target already exists
if rel_path in self.src.directories:
src_a_root, src_a_relpath = self.src.directories[rel_path]
self.src.fatal_conflicts.append(
MergeConflict(
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
)
)
elif rel_path in self.src.files:
src_a_root, src_a_relpath = self.src.files[rel_path]
self.src.fatal_conflicts.append(
MergeConflict(
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)

View File

@@ -269,7 +269,7 @@ def __init__(
@staticmethod
def _poll_interval_generator(
_wait_times: Optional[Tuple[float, float, float]] = None,
_wait_times: Optional[Tuple[float, float, float]] = None
) -> Generator[float, None, None]:
"""This implements a backoff scheme for polling a contended resource
by suggesting a succession of wait times between polls.

View File

@@ -2,7 +2,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Utility classes for logging the output of blocks of code."""
"""Utility classes for logging the output of blocks of code.
"""
import atexit
import ctypes
import errno
@@ -343,6 +344,26 @@ def close(self):
self.file.close()
@contextmanager
def replace_environment(env):
"""Replace the current environment (`os.environ`) with `env`.
If `env` is empty (or None), this unsets all current environment
variables.
"""
env = env or {}
old_env = os.environ.copy()
try:
os.environ.clear()
for name, val in env.items():
os.environ[name] = val
yield
finally:
os.environ.clear()
for name, val in old_env.items():
os.environ[name] = val
def log_output(*args, **kwargs):
"""Context manager that logs its output to a file.
@@ -426,6 +447,7 @@ def __init__(
self.echo = echo
self.debug = debug
self.buffer = buffer
self.env = env # the environment to use for _writer_daemon
self.filter_fn = filter_fn
self._active = False # used to prevent re-entry
@@ -497,20 +519,21 @@ def __enter__(self):
# just don't forward input if this fails
pass
self.process = multiprocessing.Process(
target=_writer_daemon,
args=(
input_fd,
read_fd,
self.write_fd,
self.echo,
self.log_file,
child_pipe,
self.filter_fn,
),
)
self.process.daemon = True # must set before start()
self.process.start()
with replace_environment(self.env):
self.process = multiprocessing.Process(
target=_writer_daemon,
args=(
input_fd,
read_fd,
self.write_fd,
self.echo,
self.log_file,
child_pipe,
self.filter_fn,
),
)
self.process.daemon = True # must set before start()
self.process.start()
finally:
if input_fd:
@@ -706,7 +729,10 @@ class winlog:
Does not support the use of 'v' toggling as nixlog does.
"""
def __init__(self, file_like=None, echo=False, debug=0, buffer=False, filter_fn=None):
def __init__(
self, file_like=None, echo=False, debug=0, buffer=False, env=None, filter_fn=None
):
self.env = env
self.debug = debug
self.echo = echo
self.logfile = file_like
@@ -763,10 +789,11 @@ def background_reader(reader, echo_writer, _kill):
reader.close()
self._active = True
self._thread = Thread(
target=background_reader, args=(self.reader, self.echo_writer, self._kill)
)
self._thread.start()
with replace_environment(self.env):
self._thread = Thread(
target=background_reader, args=(self.reader, self.echo_writer, self._kill)
)
self._thread.start()
return self
def __exit__(self, exc_type, exc_val, exc_tb):

View File

@@ -13,18 +13,6 @@
__version__ = "1.0.0.dev0"
spack_version = __version__
#: The current Package API version implemented by this version of Spack. The Package API defines
#: the Python interface for packages as well as the layout of package repositories. The minor
#: version is incremented when the package API is extended in a backwards-compatible way. The major
#: version is incremented upon breaking changes. This version is changed independently from the
#: Spack version.
package_api_version = (1, 0)
#: The minimum Package API version that this version of Spack is compatible with. This should
#: always be a tuple of the form ``(major, 0)``, since compatibility with vX.Y implies
#: compatibility with vX.0.
min_package_api_version = (1, 0)
def __try_int(v):
try:
@@ -91,6 +79,4 @@ def get_short_version() -> str:
"get_version",
"get_spack_commit",
"get_short_version",
"package_api_version",
"min_package_api_version",
]

View File

@@ -1,20 +0,0 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Alias names to convert legacy compilers to builtin packages and vice-versa"""
BUILTIN_TO_LEGACY_COMPILER = {
"llvm": "clang",
"intel-oneapi-compilers": "oneapi",
"llvm-amdgpu": "rocmcc",
"intel-oneapi-compiler-classic": "intel",
"acfl": "arm",
}
LEGACY_COMPILER_TO_BUILTIN = {
"clang": "llvm",
"oneapi": "intel-oneapi-compilers",
"rocmcc": "llvm-amdgpu",
"intel": "intel-oneapi-compiler-classic",
"arm": "acfl",
}

View File

@@ -61,6 +61,7 @@ def _search_duplicate_compilers(error_cls):
import spack.util.crypto
import spack.util.spack_yaml as syaml
import spack.variant
import spack.version
#: Map an audit tag to a list of callables implementing checks
CALLBACKS = {}
@@ -1010,7 +1011,7 @@ def _issues_in_depends_on_directive(pkgs, error_cls):
for dep_name, dep in deps_by_name.items():
def check_virtual_with_variants(spec, msg):
if not spack.repo.PATH.is_virtual(spec.name) or not spec.variants:
if not spec.virtual or not spec.variants:
return
error = error_cls(
f"{pkg_name}: {msg}",
@@ -1137,10 +1138,23 @@ def _ensure_variants_have_descriptions(pkgs, error_cls):
return errors
def _skip_version_audit(host_arch: spack.spec.ArchSpec, pkg_cls) -> bool:
"""The version audit is skipped if a package defines skip_version_audit as True or if the
host architecture satisfies any of the archs in the list skip_version_audit."""
skip_conditions = getattr(pkg_cls, "skip_version_audit", False)
if type(skip_conditions) is bool:
return skip_conditions
for condition in skip_conditions:
if host_arch.satisfies(spack.spec.Spec(condition).architecture):
return True
return False
@package_directives
def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls):
"""Report if version constraints used in directives are not satisfiable"""
errors = []
host_architecture = spack.spec.ArchSpec.default_arch()
for pkg_name in pkgs:
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
@@ -1156,20 +1170,13 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
dependencies_to_check.append(dep.spec)
host_architecture = spack.spec.ArchSpec.default_arch()
for s in dependencies_to_check:
dependency_pkg_cls = None
try:
dependency_pkg_cls = spack.repo.PATH.get_pkg_class(s.name)
# Some packages have hacks that might cause failures on some platform
# Allow to explicitly set conditions to skip version checks in that case
skip_conditions = getattr(dependency_pkg_cls, "skip_version_audit", [])
skip_version_check = False
for condition in skip_conditions:
if host_architecture.satisfies(spack.spec.Spec(condition).architecture):
skip_version_check = True
break
assert skip_version_check or any(
assert _skip_version_audit(host_architecture, dependency_pkg_cls) or any(
v.intersects(s.versions) for v in list(dependency_pkg_cls.versions)
)
except Exception:
@@ -1188,6 +1195,64 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
return errors
@package_directives
def _when_conditions_are_satisfiable_by_some_version(pkgs, error_cls):
"""Report if versions in when conditions used in directives are not satisfiable"""
errors = []
attrs = {
"conflicts": "conflicts",
"dependencies": "depends_on",
"licenses": "license",
"patches": "patch",
"provided_together": "provides",
"provided": "provides",
"requirements": "requires",
"resources": "resource",
"splice_specs": "can_splice",
"variants": "variant",
}
host_architecture = spack.spec.ArchSpec.default_arch()
for pkg_name in pkgs:
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
if _skip_version_audit(host_architecture, pkg_cls):
continue
if not pkg_cls.versions:
continue
range = spack.version.VersionRange(
min(pkg_cls.versions), spack.version.StandardVersion.typemax()
)
details = []
for attr, directive in attrs.items():
if attr == "patches":
# Patches should strictly apply to some known version
unsatisfiable = [
when
for when in getattr(pkg_cls, attr)
if not any(y.intersects(when.versions) for y in pkg_cls.versions)
]
else:
# Other directives should just be in the range of min known version to infinity.
# The reason being sometimes people add depends_on("foo@1.1", when="@1.1") because
# it's copied, even though only 1.0 and 1.2 are registered in Spack. It's too
# annoying to complain about that.
unsatisfiable = [
when for when in getattr(pkg_cls, attr) if not range.intersects(when.versions)
]
details.extend(f'{directive}(..., when="{x}")' for x in unsatisfiable)
if details:
errors.append(
error_cls(
summary=f"{filename}: when conditions are not satisfiable by "
f"any known version of {pkg_cls.name}",
details=details,
)
)
return errors
def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
errors = []
variant_names = pkg.variant_names()

View File

@@ -110,13 +110,6 @@ def __init__(self, root):
self._write_transaction_impl = llnl.util.lang.nullcontext
self._read_transaction_impl = llnl.util.lang.nullcontext
def _handle_old_db_versions_read(self, check, db, *, reindex: bool):
if not self.is_readable():
raise spack_db.DatabaseNotReadableError(
f"cannot read buildcache v{self.db_version} at {self.root}"
)
return self._handle_current_version_read(check, db)
class FetchCacheError(Exception):
"""Error thrown when fetching the cache failed, usually a composite error list."""
@@ -249,7 +242,7 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
self._index_file_cache.init_entry(cache_key)
cache_path = self._index_file_cache.cache_path(cache_key)
with self._index_file_cache.read_transaction(cache_key):
db._read_from_file(pathlib.Path(cache_path))
db._read_from_file(cache_path)
except spack_db.InvalidDatabaseVersionError as e:
tty.warn(
f"you need a newer Spack version to read the buildcache index for the "
@@ -930,7 +923,7 @@ class FileTypes:
UNKNOWN = 2
NOT_ISO8859_1_TEXT = re.compile(b"[\x00\x7f-\x9f]")
NOT_ISO8859_1_TEXT = re.compile(b"[\x00\x7F-\x9F]")
def file_type(f: IO[bytes]) -> int:
@@ -2536,10 +2529,10 @@ def install_root_node(
allow_missing: when true, allows installing a node with missing dependencies
"""
# Early termination
if spec.external or not spec.concrete:
warnings.warn("Skipping external or abstract spec {0}".format(spec.format()))
if spec.external or spec.virtual:
warnings.warn("Skipping external or virtual package {0}".format(spec.format()))
return
elif spec.installed and not force:
elif spec.concrete and spec.installed and not force:
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
return

View File

@@ -234,6 +234,14 @@ def _root_spec(spec_str: str) -> str:
# Add a compiler and platform requirement to the root spec.
platform = str(spack.platforms.host())
if platform == "darwin":
spec_str += " %apple-clang"
elif platform == "windows":
spec_str += " %msvc"
elif platform == "linux":
spec_str += " %gcc"
elif platform == "freebsd":
spec_str += " %clang"
spec_str += f" platform={platform}"
target = archspec.cpu.host().family
spec_str += f" target={target}"

View File

@@ -15,13 +15,11 @@
import archspec.cpu
import spack.compilers.config
import spack.compilers.libraries
import spack.config
import spack.compiler
import spack.compilers
import spack.platforms
import spack.spec
import spack.traverse
import spack.version
from .config import spec_for_current_python
@@ -40,7 +38,7 @@ def __init__(self, configuration):
self.external_cmake, self.external_bison = self._externals_from_yaml(configuration)
def _valid_compiler_or_raise(self):
def _valid_compiler_or_raise(self) -> "spack.compiler.Compiler":
if str(self.host_platform) == "linux":
compiler_name = "gcc"
elif str(self.host_platform) == "darwin":
@@ -48,30 +46,17 @@ def _valid_compiler_or_raise(self):
elif str(self.host_platform) == "windows":
compiler_name = "msvc"
elif str(self.host_platform) == "freebsd":
compiler_name = "llvm"
compiler_name = "clang"
else:
raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}")
candidates = [
x
for x in spack.compilers.config.CompilerFactory.from_packages_yaml(spack.config.CONFIG)
if x.name == compiler_name
]
candidates = spack.compilers.compilers_for_spec(
compiler_name, arch_spec=self.host_architecture
)
if not candidates:
raise RuntimeError(
f"Cannot find any version of {compiler_name} to bootstrap clingo from sources"
)
candidates.sort(key=lambda x: x.version, reverse=True)
best = candidates[0]
# Get compilers for bootstrapping from the 'builtin' repository
best.namespace = "builtin"
# If the compiler does not support C++ 14, fail with a legible error message
try:
_ = best.package.standard_flag(language="cxx", standard="14")
except RuntimeError as e:
raise RuntimeError(
"cannot find a compiler supporting C++ 14 [needed to bootstrap clingo]"
) from e
candidates.sort(key=lambda x: x.spec.version, reverse=True)
return candidates[0]
def _externals_from_yaml(
@@ -90,6 +75,9 @@ def _externals_from_yaml(
if not s.satisfies(requirements[pkg_name]):
continue
if not s.intersects(f"%{self.host_compiler.spec}"):
continue
if not s.intersects(f"arch={self.host_architecture}"):
continue
@@ -122,14 +110,11 @@ def concretize(self) -> "spack.spec.Spec":
# Tweak it to conform to the host architecture
for node in s.traverse():
node.architecture.os = str(self.host_os)
node.compiler = self.host_compiler.spec
node.architecture = self.host_architecture
if node.name == "gcc-runtime":
node.versions = self.host_compiler.versions
# Can't use re2c@3.1 with Python 3.6
if self.host_python.satisfies("@3.6"):
s["re2c"].versions.versions = [spack.version.from_string("=2.2")]
node.versions = self.host_compiler.spec.versions
for edge in spack.traverse.traverse_edges([s], cover="edges"):
if edge.spec.name == "python":
@@ -141,9 +126,6 @@ def concretize(self) -> "spack.spec.Spec":
if edge.spec.name == "cmake" and self.external_cmake:
edge.spec = self.external_cmake
if edge.spec.name == self.host_compiler.name:
edge.spec = self.host_compiler
if "libc" in edge.virtuals:
edge.spec = self.host_libc
@@ -159,12 +141,12 @@ def python_external_spec(self) -> "spack.spec.Spec":
return self._external_spec(result)
def libc_external_spec(self) -> "spack.spec.Spec":
detector = spack.compilers.libraries.CompilerPropertyDetector(self.host_compiler)
result = detector.default_libc()
result = self.host_compiler.default_libc
return self._external_spec(result)
def _external_spec(self, initial_spec) -> "spack.spec.Spec":
initial_spec.namespace = "builtin"
initial_spec.compiler = self.host_compiler.spec
initial_spec.architecture = self.host_architecture
for flag_type in spack.spec.FlagMap.valid_compiler_flags():
initial_spec.compiler_flags[flag_type] = []

View File

@@ -10,7 +10,7 @@
from llnl.util import tty
import spack.compilers.config
import spack.compilers
import spack.config
import spack.environment
import spack.modules
@@ -142,8 +142,8 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
def _add_compilers_if_missing() -> None:
arch = spack.spec.ArchSpec.default_arch()
if not spack.compilers.config.compilers_for_arch(arch):
spack.compilers.config.find_compilers()
if not spack.compilers.compilers_for_arch(arch):
spack.compilers.find_compilers()
@contextlib.contextmanager

View File

@@ -292,12 +292,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
# Install the spec that should make the module importable
with spack.config.override(self.mirror_scope):
PackageInstaller(
[concrete_spec.package],
fail_fast=True,
package_use_cache=False,
dependencies_use_cache=False,
).install()
PackageInstaller([concrete_spec.package], fail_fast=True).install()
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
self.last_search = info
@@ -367,7 +362,6 @@ def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str]
for current_config in bootstrapping_sources():
if not source_is_enabled(current_config):
continue
with exception_handler.forward(current_config["name"], Exception):
if create_bootstrapper(current_config).try_import(module, abstract_spec):
return

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Some files were not shown because too many files have changed in this diff Show More