Compare commits

..

6 Commits

Author SHA1 Message Date
Massimiliano Culpo
8c436c8ed1 asio: remove self-referential dependencies (#42469)
These shouldn't be an issue, but they can be expressed
in terms of variants on the package.
2024-02-08 12:06:13 -08:00
eugeneswalker
2064934ed3 hydrogen@1.5.3: cmake patch with ESCAPE_QUOTES (#42325) 2024-01-31 09:02:28 -08:00
wspear
c5467ef1c4 Update tau 2.33.1 hash (#42336) 2024-01-28 09:45:21 -08:00
wspear
8b59d58ca6 pdt 3.25.2
Add support for -icpx for oneapi
2024-01-26 18:23:34 -08:00
wspear
09074727bf Add tau 2.33.1 2024-01-26 18:23:30 -08:00
eugeneswalker
f562ffba72 xyce: break blis circularity in depends_on 2024-01-26 09:39:11 -08:00
695 changed files with 11623 additions and 11893 deletions

View File

@@ -1,6 +0,0 @@
<!--
Remember that `spackbot` can help with your PR in multiple ways:
- `@spackbot help` shows all the commands that are currently available
- `@spackbot fix style` tries to push a commit to fix style issues in this PR
- `@spackbot re-run pipeline` runs the pipelines again, if you have write access to the repository
-->

View File

@@ -43,7 +43,7 @@ jobs:
. share/spack/setup-env.sh
$(which spack) audit packages
$(which spack) audit externals
- uses: codecov/codecov-action@0cfda1dd0a4ad9efc75517f399d859cd1ea4ced1 # @v2.1.0
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
if: ${{ inputs.with_coverage == 'true' }}
with:
flags: unittests,audits

View File

@@ -57,7 +57,7 @@ jobs:
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
- uses: docker/metadata-action@dbef88086f6cef02e264edb7dbf63250c17cef6c
id: docker_meta
with:
images: |
@@ -118,5 +118,7 @@ jobs:
context: dockerfiles/${{ matrix.dockerfile[0] }}
platforms: ${{ matrix.dockerfile[1] }}
push: ${{ github.event_name != 'pull_request' }}
cache-from: type=gha
cache-to: type=gha,mode=max
tags: ${{ steps.docker_meta.outputs.tags }}
labels: ${{ steps.docker_meta.outputs.labels }}

View File

@@ -40,7 +40,7 @@ jobs:
with:
fetch-depth: 0
# For pull requests it's not necessary to checkout the code
- uses: dorny/paths-filter@ebc4d7e9ebcb0b1eb21480bb8f43113e996ac77a
- uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50
id: filter
with:
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below

View File

@@ -1,5 +1,5 @@
black==24.2.0
clingo==5.7.1
black==23.12.1
clingo==5.6.2
flake8==7.0.0
isort==5.13.2
mypy==1.8.0

View File

@@ -91,7 +91,7 @@ jobs:
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
run: |
share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@0cfda1dd0a4ad9efc75517f399d859cd1ea4ced1
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
with:
flags: unittests,linux,${{ matrix.concretizer }}
# Test shell integration
@@ -122,7 +122,7 @@ jobs:
COVERAGE: true
run: |
share/spack/qa/run-shell-tests
- uses: codecov/codecov-action@0cfda1dd0a4ad9efc75517f399d859cd1ea4ced1
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
with:
flags: shelltests,linux
@@ -181,7 +181,7 @@ jobs:
SPACK_TEST_SOLVER: clingo
run: |
share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@0cfda1dd0a4ad9efc75517f399d859cd1ea4ced1 # @v2.1.0
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
with:
flags: unittests,linux,clingo
# Run unit tests on MacOS
@@ -216,6 +216,6 @@ jobs:
$(which spack) solve zlib
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
- uses: codecov/codecov-action@0cfda1dd0a4ad9efc75517f399d859cd1ea4ced1
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
with:
flags: unittests,macos

View File

@@ -33,7 +33,7 @@ jobs:
./share/spack/qa/validate_last_exit.ps1
coverage combine -a
coverage xml
- uses: codecov/codecov-action@0cfda1dd0a4ad9efc75517f399d859cd1ea4ced1
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
with:
flags: unittests,windows
unit-tests-cmd:
@@ -57,7 +57,7 @@ jobs:
./share/spack/qa/validate_last_exit.ps1
coverage combine -a
coverage xml
- uses: codecov/codecov-action@0cfda1dd0a4ad9efc75517f399d859cd1ea4ced1
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
with:
flags: unittests,windows
build-abseil:

View File

@@ -1130,10 +1130,6 @@ A version specifier can also be a list of ranges and specific versions,
separated by commas. For example, ``@1.0:1.5,=1.7.1`` matches any version
in the range ``1.0:1.5`` and the specific version ``1.7.1``.
^^^^^^^^^^^^
Git versions
^^^^^^^^^^^^
For packages with a ``git`` attribute, ``git`` references
may be specified instead of a numerical version i.e. branches, tags
and commits. Spack will stage and build based off the ``git``

View File

@@ -199,7 +199,6 @@ def setup(sphinx):
("py:class", "contextlib.contextmanager"),
("py:class", "module"),
("py:class", "_io.BufferedReader"),
("py:class", "_io.BytesIO"),
("py:class", "unittest.case.TestCase"),
("py:class", "_frozen_importlib_external.SourceFileLoader"),
("py:class", "clingo.Control"),

View File

@@ -357,23 +357,91 @@ If there is a hook that you would like and is missing, you can propose to add a
``pre_install(spec)``
"""""""""""""""""""""
A ``pre_install`` hook is run within the install subprocess, directly before the install starts.
It expects a single argument of a spec.
A ``pre_install`` hook is run within an install subprocess, directly before
the install starts. It expects a single argument of a spec, and is run in
a multiprocessing subprocess. Note that if you see ``pre_install`` functions associated with packages these are not hooks
as we have defined them here, but rather callback functions associated with
a package install.
"""""""""""""""""""""""""""""""""""""
``post_install(spec, explicit=None)``
"""""""""""""""""""""""""""""""""""""
""""""""""""""""""""""
``post_install(spec)``
""""""""""""""""""""""
A ``post_install`` hook is run within the install subprocess, directly after the install finishes,
but before the build stage is removed and the spec is registered in the database. It expects two
arguments: spec and an optional boolean indicating whether this spec is being installed explicitly.
A ``post_install`` hook is run within an install subprocess, directly after
the install finishes, but before the build stage is removed. If you
write one of these hooks, you should expect it to accept a spec as the only
argument. This is run in a multiprocessing subprocess. This ``post_install`` is
also seen in packages, but in this context not related to the hooks described
here.
""""""""""""""""""""""""""""""""""""""""""""""""""""
``pre_uninstall(spec)`` and ``post_uninstall(spec)``
""""""""""""""""""""""""""""""""""""""""""""""""""""
These hooks are currently used for cleaning up module files after uninstall.
""""""""""""""""""""""""""
``on_install_start(spec)``
""""""""""""""""""""""""""
This hook is run at the beginning of ``lib/spack/spack/installer.py``,
in the install function of a ``PackageInstaller``,
and importantly is not part of a build process, but before it. This is when
we have just newly grabbed the task, and are preparing to install. If you
write a hook of this type, you should provide the spec to it.
.. code-block:: python
def on_install_start(spec):
"""On start of an install, we want to...
"""
print('on_install_start')
""""""""""""""""""""""""""""
``on_install_success(spec)``
""""""""""""""""""""""""""""
This hook is run on a successful install, and is also run inside the build
process, akin to ``post_install``. The main difference is that this hook
is run outside of the context of the stage directory, meaning after the
build stage has been removed and the user is alerted that the install was
successful. If you need to write a hook that is run on success of a particular
phase, you should use ``on_phase_success``.
""""""""""""""""""""""""""""
``on_install_failure(spec)``
""""""""""""""""""""""""""""
This hook is run given an install failure that happens outside of the build
subprocess, but somewhere in ``installer.py`` when something else goes wrong.
If you need to write a hook that is relevant to a failure within a build
process, you would want to instead use ``on_phase_failure``.
"""""""""""""""""""""""""""
``on_install_cancel(spec)``
"""""""""""""""""""""""""""
The same, but triggered if a spec install is cancelled for any reason.
"""""""""""""""""""""""""""""""""""""""""""""""
``on_phase_success(pkg, phase_name, log_file)``
"""""""""""""""""""""""""""""""""""""""""""""""
This hook is run within the install subprocess, and specifically when a phase
successfully finishes. Since we are interested in the package, the name of
the phase, and any output from it, we require:
- **pkg**: the package variable, which also has the attached spec at ``pkg.spec``
- **phase_name**: the name of the phase that was successful (e.g., configure)
- **log_file**: the path to the file with output, in case you need to inspect or otherwise interact with it.
"""""""""""""""""""""""""""""""""""""""""""""
``on_phase_error(pkg, phase_name, log_file)``
"""""""""""""""""""""""""""""""""""""""""""""
In the case of an error during a phase, we might want to trigger some event
with a hook, and this is the purpose of this particular hook. Akin to
``on_phase_success`` we require the same variables - the package that failed,
the name of the phase, and the log file where we might find errors.
^^^^^^^^^^^^^^^^^^^^^^

View File

@@ -416,23 +416,6 @@ that git clone if ``foo`` is in the environment.
Further development on ``foo`` can be tested by reinstalling the environment,
and eventually committed and pushed to the upstream git repo.
If the package being developed supports out-of-source builds then users can use the
``--build_directory`` flag to control the location and name of the build directory.
This is a shortcut to set the ``package_attributes:build_directory`` in the
``packages`` configuration (see :ref:`assigning-package-attributes`).
The supplied location will become the build-directory for that package in all future builds.
.. warning::
Potential pitfalls of setting the build directory
Spack does not check for out-of-source build compatibility with the packages and
so the onerous of making sure the package supports out-of-source builds is on
the user.
For example, most ``autotool`` and ``makefile`` packages do not support out-of-source builds
while all ``CMake`` packages do.
Understanding these nuances are on the software developers and we strongly encourage
developers to only redirect the build directory if they understand their package's
build-system.
^^^^^^^
Loading
^^^^^^^
@@ -489,11 +472,11 @@ a ``packages.yaml`` file) could contain:
.. code-block:: yaml
spack:
# ...
...
packages:
all:
compiler: [intel]
# ...
...
This configuration sets the default compiler for all packages to
``intel``.
@@ -839,7 +822,7 @@ directories.
.. code-block:: yaml
spack:
# ...
...
view:
mpis:
root: /path/to/view
@@ -883,7 +866,7 @@ automatically named ``default``, so that
.. code-block:: yaml
spack:
# ...
...
view: True
is equivalent to
@@ -891,7 +874,7 @@ is equivalent to
.. code-block:: yaml
spack:
# ...
...
view:
default:
root: .spack-env/view
@@ -901,7 +884,7 @@ and
.. code-block:: yaml
spack:
# ...
...
view: /path/to/view
is equivalent to
@@ -909,7 +892,7 @@ is equivalent to
.. code-block:: yaml
spack:
# ...
...
view:
default:
root: /path/to/view

View File

@@ -623,7 +623,7 @@ Fortran.
compilers:
- compiler:
# ...
...
paths:
cc: /usr/bin/clang
cxx: /usr/bin/clang++

View File

@@ -10,7 +10,7 @@ Modules (modules.yaml)
======================
The use of module systems to manage user environment in a controlled way
is a common practice at HPC centers that is sometimes embraced also by
is a common practice at HPC centers that is often embraced also by
individual programmers on their development machines. To support this
common practice Spack integrates with `Environment Modules
<http://modules.sourceforge.net/>`_ and `Lmod
@@ -21,38 +21,14 @@ Modules are one of several ways you can use Spack packages. For other
options that may fit your use case better, you should also look at
:ref:`spack load <spack-load>` and :ref:`environments <environments>`.
-----------
Quick start
-----------
----------------------------
Using module files via Spack
----------------------------
In the current version of Spack, module files are not generated by default. To get started, you
can generate module files for all currently installed packages by running either
.. code-block:: console
$ spack module tcl refresh
or
.. code-block:: console
$ spack module lmod refresh
Spack can also generate module files for all future installations automatically through the
following configuration:
.. code-block:: console
$ spack config add modules:default:enable:[tcl]
or
.. code-block:: console
$ spack config add modules:default:enable:[lmod]
Assuming you have a module system installed, you should now be able to use the ``module`` command
to interact with them:
If you have installed a supported module system you should be able to
run ``module avail`` to see what module
files have been installed. Here is sample output of those programs,
showing lots of installed packages:
.. code-block:: console
@@ -89,17 +65,33 @@ scheme used at your site.
Module file customization
-------------------------
Module files are generated by post-install hooks after the successful
installation of a package.
.. note::
Spack only generates modulefiles when a package is installed. If
you attempt to install a package and it is already installed, Spack
will not regenerate modulefiles for the package. This may lead to
inconsistent modulefiles if the Spack module configuration has
changed since the package was installed, either by editing a file
or changing scopes or environments.
Later in this section there is a subsection on :ref:`regenerating
modules <cmd-spack-module-refresh>` that will allow you to bring
your modules to a consistent state.
The table below summarizes the essential information associated with
the different file formats that can be generated by Spack:
+-----------+--------------+------------------------------+----------------------------------------------+----------------------+
| | Hierarchical | **Default root directory** | **Default template file** | **Compatible tools** |
+===========+==============+==============================+==============================================+======================+
| ``tcl`` | No | share/spack/modules | share/spack/templates/modules/modulefile.tcl | Env. Modules/Lmod |
+-----------+--------------+------------------------------+----------------------------------------------+----------------------+
| ``lmod`` | Yes | share/spack/lmod | share/spack/templates/modules/modulefile.lua | Lmod |
+-----------+--------------+------------------------------+----------------------------------------------+----------------------+
+-----------------------------+--------------------+-------------------------------+----------------------------------------------+----------------------+
| | **Hook name** | **Default root directory** | **Default template file** | **Compatible tools** |
+=============================+====================+===============================+==============================================+======================+
| **Tcl - Non-Hierarchical** | ``tcl`` | share/spack/modules | share/spack/templates/modules/modulefile.tcl | Env. Modules/Lmod |
+-----------------------------+--------------------+-------------------------------+----------------------------------------------+----------------------+
| **Lua - Hierarchical** | ``lmod`` | share/spack/lmod | share/spack/templates/modules/modulefile.lua | Lmod |
+-----------------------------+--------------------+-------------------------------+----------------------------------------------+----------------------+
Spack ships with sensible defaults for the generation of module files, but
@@ -110,7 +102,7 @@ In general you can override or extend the default behavior by:
2. writing specific rules in the ``modules.yaml`` configuration file
3. writing your own templates to override or extend the defaults
The former method lets you express changes in the run-time environment
The former method let you express changes in the run-time environment
that are needed to use the installed software properly, e.g. injecting variables
from language interpreters into their extensions. The latter two instead permit to
fine tune the filesystem layout, content and creation of module files to meet
@@ -118,62 +110,79 @@ site specific conventions.
.. _overide-api-calls-in-package-py:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Setting environment variables dynamically in ``package.py``
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Override API calls in ``package.py``
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
There are two methods that you can implement in any ``package.py`` to dynamically affect the
content of the module files generated by Spack. The most important one is
``setup_run_environment``, which can be used to set environment variables in the module file that
depend on the spec:
There are two methods that you can override in any ``package.py`` to affect the
content of the module files generated by Spack. The first one:
.. code-block:: python
def setup_run_environment(self, env):
if self.spec.satisfies("+foo"):
env.set("FOO", "bar")
pass
The second, less commonly used, is ``setup_dependent_run_environment(self, env, dependent_spec)``,
which allows a dependency to set variables in the module file of its dependents. This is typically
used in packages like ``python``, ``r``, or ``perl`` to prepend the dependent's prefix to the
search path of the interpreter (``PYTHONPATH``, ``R_LIBS``, ``PERL5LIB`` resp.), so it can locate
the packages at runtime.
For example, a simplified version of the ``python`` package could look like this:
can alter the content of the module file associated with the same package where it is overridden.
The second method:
.. code-block:: python
def setup_dependent_run_environment(self, env, dependent_spec):
if dependent_spec.package.extends(self.spec):
env.prepend_path("PYTHONPATH", dependent_spec.prefix.lib.python)
pass
and would make any package that ``extends("python")`` have its library directory added to the
``PYTHONPATH`` environment variable in the module file. It's much more convenient to set this
variable here, than to repeat it in every Python extension's ``setup_run_environment`` method.
can instead inject run-time environment modifications in the module files of packages
that depend on it. In both cases you need to fill ``env`` with the desired
list of environment modifications.
.. admonition:: The ``r`` package and callback APIs
An example in which it is crucial to override both methods
is given by the ``r`` package. This package installs libraries and headers
in non-standard locations and it is possible to prepend the appropriate directory
to the corresponding environment variables:
================== =================================
LD_LIBRARY_PATH ``self.prefix/rlib/R/lib``
PKG_CONFIG_PATH ``self.prefix/rlib/pkgconfig``
================== =================================
with the following snippet:
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/r/package.py
:pyobject: R.setup_run_environment
The ``r`` package also knows which environment variable should be modified
to make language extensions provided by other packages available, and modifies
it appropriately in the override of the second method:
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/r/package.py
:pyobject: R.setup_dependent_run_environment
.. _modules-yaml:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The ``modules.yaml`` config file and module sets
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
^^^^^^^^^^^^^^^^^^^^^^^^^^
Write a configuration file
^^^^^^^^^^^^^^^^^^^^^^^^^^
The configuration files that control module generation behavior are named ``modules.yaml``. The
default configuration looks like this:
The configuration files that control module generation behavior
are named ``modules.yaml``. The default configuration:
.. literalinclude:: _spack_root/etc/spack/defaults/modules.yaml
:language: yaml
You can define one or more **module sets**, each of which can be configured separately with regard
to install location, naming scheme, inclusion and exclusion, autoloading, et cetera.
activates the hooks to generate ``tcl`` module files and inspects
the installation folder of each package for the presence of a set of subdirectories
(``bin``, ``man``, ``share/man``, etc.). If any is found its full path is prepended
to the environment variables listed below the folder name.
The default module set is aptly named ``default``. All
:ref:`Spack commands that operate on modules <maintaining-module-files>` apply to the ``default``
module set, unless another module set is specified explicitly (with the ``--name`` flag).
Spack modules can be configured for multiple module sets. The default
module set is named ``default``. All Spack commands which operate on
modules default to apply the ``default`` module set, but can be
applied to any module set in the configuration.
^^^^^^^^^^^^^^^^^^^^^^^^^
"""""""""""""""""""""""""
Changing the modules root
^^^^^^^^^^^^^^^^^^^^^^^^^
"""""""""""""""""""""""""
As shown in the table above, the default module root for ``lmod`` is
``$spack/share/spack/lmod`` and the default root for ``tcl`` is
@@ -189,7 +198,7 @@ set by changing the ``roots`` key of the configuration.
my_custom_lmod_modules:
roots:
lmod: /path/to/install/custom/lmod/modules
# ...
...
This configuration will create two module sets. The default module set
will install its ``tcl`` modules to ``/path/to/install/tcl/modules``
@@ -215,32 +224,25 @@ location could be confusing to users of your modules. In the next
section, we will discuss enabling and disabling module types (module
file generators) for each module set.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Automatically generating module files
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
""""""""""""""""""""
Activate other hooks
""""""""""""""""""""
Spack can be configured to automatically generate module files as part of package installation.
This is done by adding the desired module systems to the ``enable`` list.
Any other module file generator shipped with Spack can be activated adding it to the
list under the ``enable`` key in the module file. Currently the only generator that
is not active by default is ``lmod``, which produces hierarchical lua module files.
Each module system can then be configured separately. In fact, you should list configuration
options that affect a particular type of module files under a top level key corresponding
to the generator being customized:
.. code-block:: yaml
modules:
default:
enable:
- tcl
- lmod
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Configuring ``tcl`` and ``lmod`` modules
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
You can configure the behavior of either module system separately, under a key corresponding to
the generator being customized:
.. code-block:: yaml
modules:
default:
- tcl
- lmod
tcl:
# contains environment modules specific customizations
lmod:
@@ -251,70 +253,16 @@ either change the layout of the module files on the filesystem, or they will aff
their content. For the latter point it is possible to use anonymous specs
to fine tune the set of packages on which the modifications should be applied.
.. _autoloading-dependencies:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Autoloading and hiding dependencies
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
A module file should set the variables that are needed for an application to work. But since an
application often has many dependencies, where should all the environment variables for those be
set? In Spack the rule is that each package sets the runtime variables that are needed by the
package itself, and no more. This way, dependencies can be loaded standalone too, and duplication
of environment variables is avoided.
That means however that if you want to use an application, you need to load the modules for all its
dependencies. Of course this is not something you would want users to do manually.
Since Spack knows the dependency graph of every package, it can easily generate module files that
automatically load the modules for its dependencies recursively. It is enabled by default for both
Lmod and Environment Modules under the ``autoload: direct`` config option. The former system has
builtin support through the ``depends_on`` function, the latter simply uses a ``module load``
statement. Both module systems (at least in newer versions) do reference counting, so that if a
module is loaded by two different modules, it will only be unloaded after the others are.
The ``autoload`` key accepts the values ``none``, ``direct``, and ``all``. To disable it, use
``none``, and to enable, it's best to stick to ``direct``, which only autoloads the direct link and
run type dependencies, relying on recursive autoloading to load the rest.
A common complaint about autoloading is the large number of modules that are visible to the user.
Spack has a solution for this as well: ``hide_implicits: true``. This ensures that only those
packages you've explicitly installed are exposed by ``module avail``, but still allows for
autoloading of hidden dependencies. Lmod should support hiding implicits in general, while
Environment Modules requires version 4.7 or higher.
.. note::
If supported by your module system, we highly encourage the following configuration that enables
autoloading and hiding of implicits. It ensures all runtime variables are set correctly,
including those for dependencies, without overwhelming the user with a large number of available
modules. Further, it makes it easier to get readable module names without collisions, see the
section below on :ref:`modules-projections`.
.. code-block:: yaml
modules:
default:
tcl:
hide_implicits: true
all:
autoload: direct
lmod:
hide_implicits: true
all:
autoload: direct
.. _anonymous_specs:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Setting environment variables for selected packages in config
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
""""""""""""""""""""""""""""
Selection by anonymous specs
""""""""""""""""""""""""""""
In the configuration file you can filter particular specs, and make further changes to the
environment variables that go into their module files. This is very powerful when you want to avoid
:ref:`modifying the package itself <overide-api-calls-in-package-py>`, or when you want to set
certain variables on multiple selected packages at once.
For instance, in the snippet below:
In the configuration file you can use *anonymous specs* (i.e. specs
that **are not required to have a root package** and are thus used just
to express constraints) to apply certain modifications on a selected set
of the installed software. For instance, in the snippet below:
.. code-block:: yaml
@@ -357,28 +305,12 @@ the variable ``FOOBAR`` will be unset.
.. note::
Order does matter
The modifications associated with the ``all`` keyword are always evaluated
first, no matter where they appear in the configuration file. All the other changes to
environment variables for matching specs are evaluated from top to bottom.
first, no matter where they appear in the configuration file. All the other
spec constraints are instead evaluated top to bottom.
.. warning::
As general advice, it's often better to set as few unnecessary variables as possible. For
example, the following seemingly innocent and potentially useful configuration
.. code-block:: yaml
all:
environment:
set:
"{name}_ROOT": "{prefix}"
sets ``BINUTILS_ROOT`` to its prefix in modules for ``binutils``, which happens to break
the ``gcc`` compiler: it uses this variable as its default search path for certain object
files and libraries, and by merely setting it, everything fails to link.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
""""""""""""""""""""""""""""""""""""""""""""
Exclude or include specific module files
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
""""""""""""""""""""""""""""""""""""""""""""
You can use anonymous specs also to prevent module files from being written or
to force them to be written. Consider the case where you want to hide from users
@@ -398,19 +330,14 @@ you will prevent the generation of module files for any package that
is compiled with ``gcc@4.4.7``, with the only exception of any ``gcc``
or any ``llvm`` installation.
It is safe to combine ``exclude`` and ``autoload``
:ref:`mentioned above <autoloading-dependencies>`. When ``exclude`` prevents a module file to be
generated for a dependency, the ``autoload`` feature will simply not generate a statement to load
it.
.. _modules-projections:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
"""""""""""""""""""""""""""""""
Customize the naming of modules
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
"""""""""""""""""""""""""""""""
The names of environment modules generated by Spack are not always easy to
The names of environment modules generated by spack are not always easy to
fully comprehend due to the long hash in the name. There are three module
configuration options to help with that. The first is a global setting to
adjust the hash length. It can be set anywhere from 0 to 32 and has a default
@@ -426,13 +353,6 @@ shows how to set hash length in the module file names:
tcl:
hash_length: 7
.. tip::
Using ``hide_implicits: true`` (see :ref:`autoloading-dependencies`) vastly reduces the number
modules exposed to the user. The hidden modules always contain the hash in their name, and are
not influenced by the ``hash_length`` setting. Hidden implicits thus make it easier to use a
short hash length or no hash at all, without risking name conflicts.
To help make module names more readable, and to help alleviate name conflicts
with a short hash, one can use the ``suffixes`` option in the modules
configuration file. This option will add strings to modules that match a spec.
@@ -445,12 +365,12 @@ For instance, the following config options,
tcl:
all:
suffixes:
^python@3.12: 'python-3.12'
^python@2.7.12: 'python-2.7.12'
^openblas: 'openblas'
will add a ``python-3.12`` version string to any packages compiled with
Python matching the spec, ``python@3.12``. This is useful to know which
version of Python a set of Python extensions is associated with. Likewise, the
will add a ``python-2.7.12`` version string to any packages compiled with
python matching the spec, ``python@2.7.12``. This is useful to know which
version of python a set of python extensions is associated with. Likewise, the
``openblas`` string is attached to any program that has openblas in the spec,
most likely via the ``+blas`` variant specification.
@@ -548,11 +468,41 @@ that are already in the Lmod hierarchy.
For hierarchies that are deeper than three layers ``lmod spider`` may have some issues.
See `this discussion on the Lmod project <https://github.com/TACC/Lmod/issues/114>`_.
""""""""""""""""""""""
Select default modules
""""""""""""""""""""""
By default, when multiple modules of the same name share a directory,
the highest version number will be the default module. This behavior
of the ``module`` command can be overridden with a symlink named
``default`` to the desired default module. If you wish to configure
default modules with Spack, add a ``defaults`` key to your modules
configuration:
.. code-block:: yaml
modules:
my-module-set:
tcl:
defaults:
- gcc@10.2.1
- hdf5@1.2.10+mpi+hl%gcc
These defaults may be arbitrarily specific. For any package that
satisfies a default, Spack will generate the module file in the
appropriate path, and will generate a default symlink to the module
file as well.
.. warning::
If Spack is configured to generate multiple default packages in the
same directory, the last modulefile to be generated will be the
default module.
.. _customize-env-modifications:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
"""""""""""""""""""""""""""""""""""
Customize environment modifications
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
"""""""""""""""""""""""""""""""""""
You can control which prefixes in a Spack package are added to
environment variables with the ``prefix_inspections`` section; this
@@ -650,9 +600,9 @@ stack to users who are likely to inspect the modules to find full
paths to software, when it is desirable to present the users with a
simpler set of paths than those generated by the Spack install tree.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
""""""""""""""""""""""""""""""""""""
Filter out environment modifications
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
""""""""""""""""""""""""""""""""""""
Modifications to certain environment variables in module files are there by
default, for instance because they are generated by prefix inspections.
@@ -672,37 +622,49 @@ do so by using the ``exclude_env_vars``:
The configuration above will generate module files that will not contain
modifications to either ``CPATH`` or ``LIBRARY_PATH``.
^^^^^^^^^^^^^^^^^^^^^^
Select default modules
^^^^^^^^^^^^^^^^^^^^^^
By default, when multiple modules of the same name share a directory,
the highest version number will be the default module. This behavior
of the ``module`` command can be overridden with a symlink named
``default`` to the desired default module. If you wish to configure
default modules with Spack, add a ``defaults`` key to your modules
configuration:
.. _autoloading-dependencies:
"""""""""""""""""""""
Autoload dependencies
"""""""""""""""""""""
Often it is required for a module to have its (transient) dependencies loaded as well.
One example where this is useful is when one package needs to use executables provided
by its dependency; when the dependency is autoloaded, the executable will be in the
PATH. Similarly for scripting languages such as Python, packages and their dependencies
have to be loaded together.
Autoloading is enabled by default for Lmod and Environment Modules. The former
has builtin support for through the ``depends_on`` function. The latter uses
``module load`` statement to load and track dependencies.
Autoloading can also be enabled conditionally:
.. code-block:: yaml
modules:
my-module-set:
tcl:
defaults:
- gcc@10.2.1
- hdf5@1.2.10+mpi+hl%gcc
modules:
default:
tcl:
all:
autoload: none
^python:
autoload: direct
These defaults may be arbitrarily specific. For any package that
satisfies a default, Spack will generate the module file in the
appropriate path, and will generate a default symlink to the module
file as well.
The configuration file above will produce module files that will
load their direct dependencies if the package installed depends on ``python``.
The allowed values for the ``autoload`` statement are either ``none``,
``direct`` or ``all``.
.. warning::
If Spack is configured to generate multiple default packages in the
same directory, the last modulefile to be generated will be the
default module.
.. _maintaining-module-files:
.. note::
Tcl prerequisites
In the ``tcl`` section of the configuration file it is possible to use
the ``prerequisites`` directive that accepts the same values as
``autoload``. It will produce module files that have a ``prereq``
statement, which autoloads dependencies on Environment Modules when its
``auto_handling`` configuration option is enabled. If Environment Modules
is installed with Spack, ``auto_handling`` is enabled by default starting
version 4.2. Otherwise it is enabled by default since version 5.0.
------------------------
Maintaining Module Files

View File

@@ -647,8 +647,6 @@ manually placed files within the install prefix are owned by the
assigned group. If no group is assigned, Spack will allow the OS
default behavior to go as expected.
.. _assigning-package-attributes:
----------------------------
Assigning Package Attributes
----------------------------
@@ -659,11 +657,10 @@ You can assign class-level attributes in the configuration:
packages:
mpileaks:
package_attributes:
# Override existing attributes
url: http://www.somewhereelse.com/mpileaks-1.0.tar.gz
# ... or add new ones
x: 1
# Override existing attributes
url: http://www.somewhereelse.com/mpileaks-1.0.tar.gz
# ... or add new ones
x: 1
Attributes set this way will be accessible to any method executed
in the package.py file (e.g. the ``install()`` method). Values for these

View File

@@ -810,7 +810,7 @@ generated by ``spack ci generate``. You also want your generated rebuild jobs
.. code-block:: yaml
spack:
# ...
...
ci:
pipeline-gen:
- build-job:

View File

@@ -17,7 +17,7 @@ experimental software separately from the built-in repository. Spack
allows you to configure local repositories using either the
``repos.yaml`` or the ``spack repo`` command.
A package repository is a directory structured like this::
A package repository a directory structured like this::
repo/
repo.yaml

View File

@@ -2,12 +2,12 @@ sphinx==7.2.6
sphinxcontrib-programoutput==0.17
sphinx_design==0.5.0
sphinx-rtd-theme==2.0.0
python-levenshtein==0.25.0
python-levenshtein==0.23.0
docutils==0.20.1
pygments==2.17.2
urllib3==2.2.1
pytest==8.0.1
urllib3==2.1.0
pytest==7.4.4
isort==5.13.2
black==24.2.0
black==23.12.1
flake8==7.0.0
mypy==1.8.0

View File

@@ -171,7 +171,7 @@ def polite_path(components: Iterable[str]):
@memoized
def _polite_antipattern():
# A regex of all the characters we don't want in a filename
return re.compile(r"[^A-Za-z0-9_+.-]")
return re.compile(r"[^A-Za-z0-9_.-]")
def polite_filename(filename: str) -> str:
@@ -920,34 +920,28 @@ def get_filetype(path_name):
return output.strip()
def has_shebang(path):
"""Returns whether a path has a shebang line. Returns False if the file cannot be opened."""
try:
with open(path, "rb") as f:
return f.read(2) == b"#!"
except OSError:
return False
@system_path_filter
def is_nonsymlink_exe_with_shebang(path):
"""Returns whether the path is an executable regular file with a shebang. Returns False too
when the path is a symlink to a script, and also when the file cannot be opened."""
"""
Returns whether the path is an executable script with a shebang.
Return False when the path is a *symlink* to an executable script.
"""
try:
st = os.lstat(path)
except OSError:
return False
# Should not be a symlink
if stat.S_ISLNK(st.st_mode):
return False
# Should not be a symlink
if stat.S_ISLNK(st.st_mode):
return False
# Should be executable
if not st.st_mode & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH):
return False
# Should be executable
if not st.st_mode & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH):
# Should start with a shebang
with open(path, "rb") as f:
return f.read(2) == b"#!"
except (IOError, OSError):
return False
return has_shebang(path)
@system_path_filter(arg_slice=slice(1))
def chgrp_if_not_world_writable(path, group):
@@ -1240,47 +1234,6 @@ def get_single_file(directory):
return fnames[0]
@system_path_filter
def windows_sfn(path: os.PathLike):
"""Returns 8.3 Filename (SFN) representation of
path
8.3 Filenames (SFN or short filename) is a file
naming convention used prior to Win95 that Windows
still (and will continue to) support. This convention
caps filenames at 8 characters, and most importantly
does not allow for spaces in addition to other specifications.
The scheme is generally the same as a normal Windows
file scheme, but all spaces are removed and the filename
is capped at 6 characters. The remaining characters are
replaced with ~N where N is the number file in a directory
that a given file represents i.e. Program Files and Program Files (x86)
would be PROGRA~1 and PROGRA~2 respectively.
Further, all file/directory names are all caps (although modern Windows
is case insensitive in practice).
Conversion is accomplished by fileapi.h GetShortPathNameW
Returns paths in 8.3 Filename form
Note: this method is a no-op on Linux
Args:
path: Path to be transformed into SFN (8.3 filename) format
"""
# This should not be run-able on linux/macos
if sys.platform != "win32":
return path
path = str(path)
import ctypes
k32 = ctypes.WinDLL("kernel32", use_last_error=True)
# stub Windows types TCHAR[LENGTH]
TCHAR_arr = ctypes.c_wchar * len(path)
ret_str = TCHAR_arr()
k32.GetShortPathNameW(path, ret_str, len(path))
return ret_str.value
@contextmanager
def temp_cwd():
tmp_dir = tempfile.mkdtemp()
@@ -1424,89 +1377,120 @@ def traverse_tree(
yield (source_path, dest_path)
def lexists_islink_isdir(path):
"""Computes the tuple (lexists(path), islink(path), isdir(path)) in a minimal
number of stat calls on unix. Use os.path and symlink.islink methods for windows."""
if sys.platform == "win32":
if not os.path.lexists(path):
return False, False, False
return os.path.lexists(path), islink(path), os.path.isdir(path)
# First try to lstat, so we know if it's a link or not.
try:
lst = os.lstat(path)
except (IOError, OSError):
return False, False, False
is_link = stat.S_ISLNK(lst.st_mode)
# Check whether file is a dir.
if not is_link:
is_dir = stat.S_ISDIR(lst.st_mode)
return True, is_link, is_dir
# Check whether symlink points to a dir.
try:
st = os.stat(path)
is_dir = stat.S_ISDIR(st.st_mode)
except (IOError, OSError):
# Dangling symlink (i.e. it lexists but not exists)
is_dir = False
return True, is_link, is_dir
class BaseDirectoryVisitor:
"""Base class and interface for :py:func:`visit_directory_tree`."""
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
def visit_file(self, root, rel_path, depth):
"""Handle the non-symlink file at ``os.path.join(root, rel_path)``
Parameters:
root: root directory
rel_path: relative path to current file from ``root``
root (str): root directory
rel_path (str): relative path to current file from ``root``
depth (int): depth of current file from the ``root`` directory"""
pass
def visit_symlinked_file(self, root: str, rel_path: str, depth) -> None:
"""Handle the symlink to a file at ``os.path.join(root, rel_path)``. Note: ``rel_path`` is
the location of the symlink, not to what it is pointing to. The symlink may be dangling.
def visit_symlinked_file(self, root, rel_path, depth):
"""Handle the symlink to a file at ``os.path.join(root, rel_path)``.
Note: ``rel_path`` is the location of the symlink, not to what it is
pointing to. The symlink may be dangling.
Parameters:
root: root directory
rel_path: relative path to current symlink from ``root``
depth: depth of current symlink from the ``root`` directory"""
root (str): root directory
rel_path (str): relative path to current symlink from ``root``
depth (int): depth of current symlink from the ``root`` directory"""
pass
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
def before_visit_dir(self, root, rel_path, depth):
"""Return True from this function to recurse into the directory at
os.path.join(root, rel_path). Return False in order not to recurse further.
Parameters:
root: root directory
rel_path: relative path to current directory from ``root``
depth: depth of current directory from the ``root`` directory
root (str): root directory
rel_path (str): relative path to current directory from ``root``
depth (int): depth of current directory from the ``root`` directory
Returns:
bool: ``True`` when the directory should be recursed into. ``False`` when
not"""
return False
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
"""Return ``True`` to recurse into the symlinked directory and ``False`` in order not to.
Note: ``rel_path`` is the path to the symlink itself. Following symlinked directories
blindly can cause infinite recursion due to cycles.
def before_visit_symlinked_dir(self, root, rel_path, depth):
"""Return ``True`` to recurse into the symlinked directory and ``False`` in
order not to. Note: ``rel_path`` is the path to the symlink itself.
Following symlinked directories blindly can cause infinite recursion due to
cycles.
Parameters:
root: root directory
rel_path: relative path to current symlink from ``root``
depth: depth of current symlink from the ``root`` directory
root (str): root directory
rel_path (str): relative path to current symlink from ``root``
depth (int): depth of current symlink from the ``root`` directory
Returns:
bool: ``True`` when the directory should be recursed into. ``False`` when
not"""
return False
def after_visit_dir(self, root: str, rel_path: str, depth: int) -> None:
"""Called after recursion into ``rel_path`` finished. This function is not called when
``rel_path`` was not recursed into.
def after_visit_dir(self, root, rel_path, depth):
"""Called after recursion into ``rel_path`` finished. This function is not
called when ``rel_path`` was not recursed into.
Parameters:
root: root directory
rel_path: relative path to current directory from ``root``
depth: depth of current directory from the ``root`` directory"""
root (str): root directory
rel_path (str): relative path to current directory from ``root``
depth (int): depth of current directory from the ``root`` directory"""
pass
def after_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> None:
"""Called after recursion into ``rel_path`` finished. This function is not called when
``rel_path`` was not recursed into.
def after_visit_symlinked_dir(self, root, rel_path, depth):
"""Called after recursion into ``rel_path`` finished. This function is not
called when ``rel_path`` was not recursed into.
Parameters:
root: root directory
rel_path: relative path to current symlink from ``root``
depth: depth of current symlink from the ``root`` directory"""
root (str): root directory
rel_path (str): relative path to current symlink from ``root``
depth (int): depth of current symlink from the ``root`` directory"""
pass
def visit_directory_tree(
root: str, visitor: BaseDirectoryVisitor, rel_path: str = "", depth: int = 0
):
"""Recurses the directory root depth-first through a visitor pattern using the interface from
:py:class:`BaseDirectoryVisitor`
def visit_directory_tree(root, visitor, rel_path="", depth=0):
"""Recurses the directory root depth-first through a visitor pattern using the
interface from :py:class:`BaseDirectoryVisitor`
Parameters:
root: path of directory to recurse into
visitor: what visitor to use
rel_path: current relative path from the root
depth: current depth from the root
root (str): path of directory to recurse into
visitor (BaseDirectoryVisitor): what visitor to use
rel_path (str): current relative path from the root
depth (str): current depth from the root
"""
dir = os.path.join(root, rel_path)
dir_entries = sorted(os.scandir(dir), key=lambda d: d.name)
@@ -1514,19 +1498,26 @@ def visit_directory_tree(
for f in dir_entries:
rel_child = os.path.join(rel_path, f.name)
islink = f.is_symlink()
# On Windows, symlinks to directories are distinct from symlinks to files, and it is
# possible to create a broken symlink to a directory (e.g. using os.symlink without
# `target_is_directory=True`), invoking `isdir` on a symlink on Windows that is broken in
# this manner will result in an error. In this case we can work around the issue by reading
# the target and resolving the directory ourselves
# On Windows, symlinks to directories are distinct from
# symlinks to files, and it is possible to create a
# broken symlink to a directory (e.g. using os.symlink
# without `target_is_directory=True`), invoking `isdir`
# on a symlink on Windows that is broken in this manner
# will result in an error. In this case we can work around
# the issue by reading the target and resolving the
# directory ourselves
try:
isdir = f.is_dir()
except OSError as e:
if sys.platform == "win32" and hasattr(e, "winerror") and e.winerror == 5 and islink:
# if path is a symlink, determine destination and evaluate file vs directory
# if path is a symlink, determine destination and
# evaluate file vs directory
link_target = resolve_link_target_relative_to_the_link(f)
# link_target might be relative but resolve_link_target_relative_to_the_link
# will ensure that if so, that it is relative to the CWD and therefore makes sense
# link_target might be relative but
# resolve_link_target_relative_to_the_link
# will ensure that if so, that it is relative
# to the CWD and therefore
# makes sense
isdir = os.path.isdir(link_target)
else:
raise e

View File

@@ -8,7 +8,7 @@
import filecmp
import os
import shutil
from typing import Callable, Dict, List, Optional, Tuple
from collections import OrderedDict
import llnl.util.tty as tty
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, touch, traverse_tree
@@ -51,32 +51,32 @@ class SourceMergeVisitor(BaseDirectoryVisitor):
- A list of merge conflicts in dst/
"""
def __init__(self, ignore: Optional[Callable[[str], bool]] = None):
def __init__(self, ignore=None):
self.ignore = ignore if ignore is not None else lambda f: False
# When mapping <src root> to <dst root>/<projection>, we need to prepend the <projection>
# bit to the relative path in the destination dir.
self.projection: str = ""
# When mapping <src root> to <dst root>/<projection>, we need
# to prepend the <projection> bit to the relative path in the
# destination dir.
self.projection = ""
# Two files f and g conflict if they are not os.path.samefile(f, g) and they are both
# projected to the same destination file. These conflicts are not necessarily fatal, and
# can be resolved or ignored. For example <prefix>/LICENSE or
# <site-packages>/<namespace>/__init__.py conflicts can be ignored).
self.file_conflicts: List[MergeConflict] = []
# When a file blocks another file, the conflict can sometimes
# be resolved / ignored (e.g. <prefix>/LICENSE or
# or <site-packages>/<namespace>/__init__.py conflicts can be
# ignored).
self.file_conflicts = []
# When we have to create a dir where a file is, or a file where a dir is, we have fatal
# errors, listed here.
self.fatal_conflicts: List[MergeConflict] = []
# When we have to create a dir where a file is, or a file
# where a dir is, we have fatal errors, listed here.
self.fatal_conflicts = []
# What directories we have to make; this is an ordered dict, so that we have a fast lookup
# and can run mkdir in order.
self.directories: Dict[str, Tuple[str, str]] = {}
# What directories we have to make; this is an ordered set,
# so that we have a fast lookup and can run mkdir in order.
self.directories = OrderedDict()
# Files to link. Maps dst_rel to (src_root, src_rel). This is an ordered dict, where files
# are guaranteed to be grouped by src_root in the order they were visited.
self.files: Dict[str, Tuple[str, str]] = {}
# Files to link. Maps dst_rel to (src_root, src_rel)
self.files = OrderedDict()
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
def before_visit_dir(self, root, rel_path, depth):
"""
Register a directory if dst / rel_path is not blocked by a file or ignored.
"""
@@ -104,7 +104,7 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
self.directories[proj_rel_path] = (root, rel_path)
return True
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
def before_visit_symlinked_dir(self, root, rel_path, depth):
"""
Replace symlinked dirs with actual directories when possible in low depths,
otherwise handle it as a file (i.e. we link to the symlink).
@@ -136,56 +136,40 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
self.visit_file(root, rel_path, depth)
return False
def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = False) -> None:
def visit_file(self, root, rel_path, depth):
proj_rel_path = os.path.join(self.projection, rel_path)
if self.ignore(rel_path):
pass
elif proj_rel_path in self.directories:
# Can't create a file where a dir is; fatal error
src_a_root, src_a_relpath = self.directories[proj_rel_path]
self.fatal_conflicts.append(
MergeConflict(
dst=proj_rel_path,
src_a=os.path.join(*self.directories[proj_rel_path]),
src_a=os.path.join(src_a_root, src_a_relpath),
src_b=os.path.join(root, rel_path),
)
)
elif proj_rel_path in self.files:
# When two files project to the same path, they conflict iff they are distinct.
# If they are the same (i.e. one links to the other), register regular files rather
# than symlinks. The reason is that in copy-type views, we need a copy of the actual
# file, not the symlink.
src_a = os.path.join(*self.files[proj_rel_path])
src_b = os.path.join(root, rel_path)
try:
samefile = os.path.samefile(src_a, src_b)
except OSError:
samefile = False
if not samefile:
# Distinct files produce a conflict.
self.file_conflicts.append(
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
# In some cases we can resolve file-file conflicts
src_a_root, src_a_relpath = self.files[proj_rel_path]
self.file_conflicts.append(
MergeConflict(
dst=proj_rel_path,
src_a=os.path.join(src_a_root, src_a_relpath),
src_b=os.path.join(root, rel_path),
)
return
if not symlink:
# Remove the link in favor of the actual file. The del is necessary to maintain the
# order of the files dict, which is grouped by root.
del self.files[proj_rel_path]
self.files[proj_rel_path] = (root, rel_path)
)
else:
# Otherwise register this file to be linked.
self.files[proj_rel_path] = (root, rel_path)
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
def visit_symlinked_file(self, root, rel_path, depth):
# Treat symlinked files as ordinary files (without "dereferencing")
self.visit_file(root, rel_path, depth, symlink=True)
self.visit_file(root, rel_path, depth)
def set_projection(self, projection: str) -> None:
def set_projection(self, projection):
self.projection = os.path.normpath(projection)
# Todo, is this how to check in general for empty projection?
@@ -213,19 +197,24 @@ def set_projection(self, projection: str) -> None:
class DestinationMergeVisitor(BaseDirectoryVisitor):
"""DestinatinoMergeVisitor takes a SourceMergeVisitor and:
"""DestinatinoMergeVisitor takes a SourceMergeVisitor
and:
a. registers additional conflicts when merging to the destination prefix
b. removes redundant mkdir operations when directories already exist in the destination prefix.
a. registers additional conflicts when merging
to the destination prefix
b. removes redundant mkdir operations when
directories already exist in the destination
prefix.
This also makes sure that symlinked directories in the target prefix will never be merged with
This also makes sure that symlinked directories
in the target prefix will never be merged with
directories in the sources directories.
"""
def __init__(self, source_merge_visitor: SourceMergeVisitor):
def __init__(self, source_merge_visitor):
self.src = source_merge_visitor
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
def before_visit_dir(self, root, rel_path, depth):
# If destination dir is a file in a src dir, add a conflict,
# and don't traverse deeper
if rel_path in self.src.files:
@@ -247,7 +236,7 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
# don't descend into it.
return False
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
def before_visit_symlinked_dir(self, root, rel_path, depth):
"""
Symlinked directories in the destination prefix should
be seen as files; we should not accidentally merge
@@ -273,7 +262,7 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
# Never descend into symlinked target dirs.
return False
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
def visit_file(self, root, rel_path, depth):
# Can't merge a file if target already exists
if rel_path in self.src.directories:
src_a_root, src_a_relpath = self.src.directories[rel_path]
@@ -291,7 +280,7 @@ def visit_file(self, root: str, rel_path: str, depth: int) -> None:
)
)
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
def visit_symlinked_file(self, root, rel_path, depth):
# Treat symlinked files as ordinary files (without "dereferencing")
self.visit_file(root, rel_path, depth)

View File

@@ -189,7 +189,6 @@ def _windows_can_symlink() -> bool:
import llnl.util.filesystem as fs
fs.touchp(fpath)
fs.mkdirp(dpath)
try:
os.symlink(dpath, dlink)

View File

@@ -244,7 +244,7 @@ def _search_duplicate_specs_in_externals(error_cls):
+ lines
+ ["as they might result in non-deterministic hashes"]
)
except (TypeError, AttributeError):
except TypeError:
details = []
errors.append(error_cls(summary=error_msg, details=details))
@@ -292,6 +292,12 @@ def _avoid_mismatched_variants(error_cls):
errors = []
packages_yaml = spack.config.CONFIG.get_config("packages")
def make_error(config_data, summary):
s = io.StringIO()
s.write("Occurring in the following file:\n")
syaml.dump_config(config_data, stream=s, blame=True)
return error_cls(summary=summary, details=[s.getvalue()])
for pkg_name in packages_yaml:
# 'all:' must be more forgiving, since it is setting defaults for everything
if pkg_name == "all" or "variants" not in packages_yaml[pkg_name]:
@@ -311,7 +317,7 @@ def _avoid_mismatched_variants(error_cls):
f"Setting a preference for the '{pkg_name}' package to the "
f"non-existing variant '{variant.name}'"
)
errors.append(_make_config_error(preferences, summary, error_cls=error_cls))
errors.append(make_error(preferences, summary))
continue
# Variant cannot accept this value
@@ -323,41 +329,11 @@ def _avoid_mismatched_variants(error_cls):
f"Setting the variant '{variant.name}' of the '{pkg_name}' package "
f"to the invalid value '{str(variant)}'"
)
errors.append(_make_config_error(preferences, summary, error_cls=error_cls))
errors.append(make_error(preferences, summary))
return errors
@config_packages
def _wrongly_named_spec(error_cls):
"""Warns if the wrong name is used for an external spec"""
errors = []
packages_yaml = spack.config.CONFIG.get_config("packages")
for pkg_name in packages_yaml:
if pkg_name == "all":
continue
externals = packages_yaml[pkg_name].get("externals", [])
is_virtual = spack.repo.PATH.is_virtual(pkg_name)
for entry in externals:
spec = spack.spec.Spec(entry["spec"])
regular_pkg_is_wrong = not is_virtual and pkg_name != spec.name
virtual_pkg_is_wrong = is_virtual and not any(
p.name == spec.name for p in spack.repo.PATH.providers_for(pkg_name)
)
if regular_pkg_is_wrong or virtual_pkg_is_wrong:
summary = f"Wrong external spec detected for '{pkg_name}': {spec}"
errors.append(_make_config_error(entry, summary, error_cls=error_cls))
return errors
def _make_config_error(config_data, summary, error_cls):
s = io.StringIO()
s.write("Occurring in the following file:\n")
syaml.dump_config(config_data, stream=s, blame=True)
return error_cls(summary=summary, details=[s.getvalue()])
#: Sanity checks on package directives
package_directives = AuditClass(
group="packages",
@@ -796,30 +772,10 @@ def check_virtual_with_variants(spec, msg):
except spack.repo.UnknownPackageError:
# This dependency is completely missing, so report
# and continue the analysis
summary = f"{pkg_name}: unknown package '{dep_name}' in 'depends_on' directive"
details = [f" in {filename}"]
errors.append(error_cls(summary=summary, details=details))
continue
# Check for self-referential specs similar to:
#
# depends_on("foo@X.Y", when="^foo+bar")
#
# That would allow clingo to choose whether to have foo@X.Y+bar in the graph.
problematic_edges = [
x for x in when.edges_to_dependencies(dep_name) if not x.virtuals
]
if problematic_edges and not dep.patches:
summary = (
f"{pkg_name}: dependency on '{dep.spec}' when '{when}' is self-referential"
f"{pkg_name}: unknown package '{dep_name}' in " "'depends_on' directive"
)
details = [
(
f" please specify better using '^[virtuals=...] {dep_name}', or "
f"substitute with an equivalent condition on '{pkg_name}'"
),
f" in {filename}",
]
details = [f" in {filename}"]
errors.append(error_cls(summary=summary, details=details))
continue

View File

@@ -1541,7 +1541,7 @@ def fetch_url_to_mirror(url):
response = spack.oci.opener.urlopen(
urllib.request.Request(
url=ref.manifest_url(),
headers={"Accept": ", ".join(spack.oci.oci.manifest_content_type)},
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
)
)
except Exception:

View File

@@ -542,7 +542,7 @@ def verify_patchelf(patchelf: "spack.util.executable.Executable") -> bool:
return version >= spack.version.Version("0.13.1")
def ensure_patchelf_in_path_or_raise() -> spack.util.executable.Executable:
def ensure_patchelf_in_path_or_raise() -> None:
"""Ensure patchelf is in the PATH or raise."""
# The old concretizer is not smart and we're doing its job: if the latest patchelf
# does not concretize because the compiler doesn't support C++17, we try to

View File

@@ -146,7 +146,7 @@ def mypy_root_spec() -> str:
def black_root_spec() -> str:
"""Return the root spec used to bootstrap black"""
return _root_spec("py-black@:24.1.0")
return _root_spec("py-black@:23.1.0")
def flake8_root_spec() -> str:

View File

@@ -199,8 +199,6 @@ def initconfig_mpi_entries(self):
mpiexec = "/usr/bin/srun"
else:
mpiexec = os.path.join(spec["slurm"].prefix.bin, "srun")
elif hasattr(spec["mpi"].package, "mpiexec"):
mpiexec = spec["mpi"].package.mpiexec
else:
mpiexec = os.path.join(spec["mpi"].prefix.bin, "mpirun")
if not os.path.exists(mpiexec):

View File

@@ -58,62 +58,6 @@ def _maybe_set_python_hints(pkg: spack.package_base.PackageBase, args: List[str]
)
def _supports_compilation_databases(pkg: spack.package_base.PackageBase) -> bool:
"""Check if this package (and CMake) can support compilation databases."""
# CMAKE_EXPORT_COMPILE_COMMANDS only exists for CMake >= 3.5
if not pkg.spec.satisfies("^cmake@3.5:"):
return False
# CMAKE_EXPORT_COMPILE_COMMANDS is only implemented for Makefile and Ninja generators
if not (pkg.spec.satisfies("generator=make") or pkg.spec.satisfies("generator=ninja")):
return False
return True
def _conditional_cmake_defaults(pkg: spack.package_base.PackageBase, args: List[str]) -> None:
"""Set a few default defines for CMake, depending on its version."""
cmakes = pkg.spec.dependencies("cmake", dt.BUILD)
if len(cmakes) != 1:
return
cmake = cmakes[0]
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
try:
ipo = pkg.spec.variants["ipo"].value
except KeyError:
ipo = False
if cmake.satisfies("@3.9:"):
args.append(CMakeBuilder.define("CMAKE_INTERPROCEDURAL_OPTIMIZATION", ipo))
# Disable Package Registry: export(PACKAGE) may put files in the user's home directory, and
# find_package may search there. This is not what we want.
# Do not populate CMake User Package Registry
if cmake.satisfies("@3.15:"):
# see https://cmake.org/cmake/help/latest/policy/CMP0090.html
args.append(CMakeBuilder.define("CMAKE_POLICY_DEFAULT_CMP0090", "NEW"))
elif cmake.satisfies("@3.1:"):
# see https://cmake.org/cmake/help/latest/variable/CMAKE_EXPORT_NO_PACKAGE_REGISTRY.html
args.append(CMakeBuilder.define("CMAKE_EXPORT_NO_PACKAGE_REGISTRY", True))
# Do not use CMake User/System Package Registry
# https://cmake.org/cmake/help/latest/manual/cmake-packages.7.html#disabling-the-package-registry
if cmake.satisfies("@3.16:"):
args.append(CMakeBuilder.define("CMAKE_FIND_USE_PACKAGE_REGISTRY", False))
elif cmake.satisfies("@3.1:3.15"):
args.append(CMakeBuilder.define("CMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY", False))
args.append(CMakeBuilder.define("CMAKE_FIND_PACKAGE_NO_SYSTEM_PACKAGE_REGISTRY", False))
# Export a compilation database if supported.
if _supports_compilation_databases(pkg):
args.append(CMakeBuilder.define("CMAKE_EXPORT_COMPILE_COMMANDS", True))
def generator(*names: str, default: Optional[str] = None):
"""The build system generator to use.
@@ -302,10 +246,7 @@ class CMakeBuilder(BaseBuilder):
@property
def archive_files(self):
"""Files to archive for packages based on CMake"""
files = [os.path.join(self.build_directory, "CMakeCache.txt")]
if _supports_compilation_databases(self):
files.append(os.path.join(self.build_directory, "compile_commands.json"))
return files
return [os.path.join(self.build_directory, "CMakeCache.txt")]
@property
def root_cmakelists_dir(self):
@@ -352,6 +293,11 @@ def std_args(pkg, generator=None):
except KeyError:
build_type = "RelWithDebInfo"
try:
ipo = pkg.spec.variants["ipo"].value
except KeyError:
ipo = False
define = CMakeBuilder.define
args = [
"-G",
@@ -360,6 +306,10 @@ def std_args(pkg, generator=None):
define("CMAKE_BUILD_TYPE", build_type),
]
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
if pkg.spec.satisfies("^cmake@3.9:"):
args.append(define("CMAKE_INTERPROCEDURAL_OPTIMIZATION", ipo))
if primary_generator == "Unix Makefiles":
args.append(define("CMAKE_VERBOSE_MAKEFILE", True))
@@ -368,7 +318,6 @@ def std_args(pkg, generator=None):
[define("CMAKE_FIND_FRAMEWORK", "LAST"), define("CMAKE_FIND_APPBUNDLE", "LAST")]
)
_conditional_cmake_defaults(pkg, args)
_maybe_set_python_hints(pkg, args)
# Set up CMake rpath

View File

@@ -218,7 +218,7 @@ def pset_components(self):
"+inspector": " intel-inspector",
"+itac": " intel-itac intel-ta intel-tc" " intel-trace-analyzer intel-trace-collector",
# Trace Analyzer and Collector
"+vtune": " intel-vtune",
"+vtune": " intel-vtune"
# VTune, ..-profiler since 2020, ..-amplifier before
}.items():
if variant in self.spec:

View File

@@ -29,12 +29,15 @@ class LuaPackage(spack.package_base.PackageBase):
with when("build_system=lua"):
depends_on("lua-lang")
with when("^[virtuals=lua-lang] lua"):
extends("lua")
with when("^[virtuals=lua-lang] lua-luajit"):
extends("lua-luajit+lualinks")
with when("^[virtuals=lua-lang] lua-luajit-openresty"):
extends("lua-luajit-openresty+lualinks")
extends("lua", when="^lua")
with when("^lua-luajit"):
extends("lua-luajit")
depends_on("luajit")
depends_on("lua-luajit+lualinks")
with when("^lua-luajit-openresty"):
extends("lua-luajit-openresty")
depends_on("luajit")
depends_on("lua-luajit-openresty+lualinks")
@property
def lua(self):

View File

@@ -69,7 +69,7 @@ class MSBuildBuilder(BaseBuilder):
@property
def build_directory(self):
"""Return the directory containing the MSBuild solution or vcxproj."""
return fs.windows_sfn(self.pkg.stage.source_path)
return self.pkg.stage.source_path
@property
def toolchain_version(self):

View File

@@ -77,11 +77,7 @@ def ignore_quotes(self):
@property
def build_directory(self):
"""Return the directory containing the makefile."""
return (
fs.windows_sfn(self.pkg.stage.source_path)
if not self.makefile_root
else fs.windows_sfn(self.makefile_root)
)
return self.pkg.stage.source_path if not self.makefile_root else self.makefile_root
@property
def std_nmake_args(self):

View File

@@ -9,13 +9,10 @@
import shutil
from os.path import basename, isdir
from llnl.util import tty
from llnl.util.filesystem import HeaderList, LibraryList, find_libraries, join_path, mkdirp
from llnl.util.filesystem import HeaderList, find_libraries, join_path, mkdirp
from llnl.util.link_tree import LinkTree
from spack.build_environment import dso_suffix
from spack.directives import conflicts, variant
from spack.package_base import InstallError
from spack.util.environment import EnvironmentModifications
from spack.util.executable import Executable
@@ -182,72 +179,16 @@ class IntelOneApiLibraryPackage(IntelOneApiPackage):
"""
def openmp_libs(self):
"""Supply LibraryList for linking OpenMP"""
# NB: Hunting down explicit library files may be the Spack way of
# doing things, but it is better to add the compiler defined option
# e.g. -fopenmp
# If other packages use openmp, then all the packages need to
# support the same ABI. Spack usually uses the same compiler
# for all the packages, but you can force it if necessary:
#
# e.g. spack install blaspp%oneapi@2024 ^intel-oneapi-mkl%oneapi@2024
#
if self.spec.satisfies("%intel") or self.spec.satisfies("%oneapi"):
libname = "libiomp5"
elif self.spec.satisfies("%gcc"):
libname = "libgomp"
elif self.spec.satisfies("%clang"):
libname = "libomp"
else:
raise InstallError(
"OneAPI package with OpenMP threading requires one of %clang, %gcc, %oneapi, "
"or %intel"
)
# query the compiler for the library path
with self.compiler.compiler_environment():
omp_lib_path = Executable(self.compiler.cc)(
"--print-file-name", f"{libname}.{dso_suffix}", output=str
).strip()
# Newer versions of clang do not give the full path to libomp. If that's
# the case, look in a path relative to the compiler where libomp is
# typically found. If it's not found there, error out.
if not os.path.exists(omp_lib_path) and self.spec.satisfies("%clang"):
compiler_root = os.path.dirname(os.path.dirname(os.path.realpath(self.compiler.cc)))
omp_lib_path_compiler = os.path.join(compiler_root, "lib", f"{libname}.{dso_suffix}")
if os.path.exists(omp_lib_path_compiler):
omp_lib_path = omp_lib_path_compiler
# if the compiler cannot find the file, it returns the input path
if not os.path.exists(omp_lib_path):
raise InstallError(f"OneAPI package cannot locate OpenMP library: {omp_lib_path}")
omp_libs = LibraryList(omp_lib_path)
tty.info(f"OneAPI package requires OpenMP library: {omp_libs}")
return omp_libs
# find_headers uses heuristics to determine the include directory
# that does not work for oneapi packages. Use explicit directories
# instead.
def header_directories(self, dirs):
h = HeaderList([])
h.directories = dirs
# trilinos passes the directories to cmake, and cmake requires
# that the directory exists
for dir in dirs:
if not isdir(dir):
raise RuntimeError(f"{dir} does not exist")
return h
@property
def headers(self):
# This should match the directories added to CPATH by
# env/vars.sh for the component
return self.header_directories([self.component_prefix.include])
return self.header_directories(
[self.component_prefix.include, self.component_prefix.include.join(self.component_dir)]
)
@property
def libs(self):

View File

@@ -2,15 +2,11 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import functools
import inspect
import operator
import os
import re
import shutil
import stat
from typing import Dict, Iterable, List, Mapping, Optional, Tuple
from typing import Iterable, List, Mapping, Optional
import archspec
@@ -140,52 +136,31 @@ def view_file_conflicts(self, view, merge_map):
return conflicts
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
# Patch up shebangs to the python linked in the view only if python is built by Spack.
if not self.extendee_spec or self.extendee_spec.external:
if not self.extendee_spec:
return super().add_files_to_view(view, merge_map, skip_if_exists)
# We only patch shebangs in the bin directory.
copied_files: Dict[Tuple[int, int], str] = {} # File identifier -> source
delayed_links: List[Tuple[str, str]] = [] # List of symlinks from merge map
bin_dir = self.spec.prefix.bin
python_prefix = self.extendee_spec.prefix
python_is_external = self.extendee_spec.external
global_view = fs.same_path(python_prefix, view.get_projection_for_spec(self.spec))
for src, dst in merge_map.items():
if skip_if_exists and os.path.lexists(dst):
if os.path.exists(dst):
continue
if not fs.path_contains_subdirectory(src, bin_dir):
elif global_view or not fs.path_contains_subdirectory(src, bin_dir):
view.link(src, dst)
continue
s = os.lstat(src)
# Symlink is delayed because we may need to re-target if its target is copied in view
if stat.S_ISLNK(s.st_mode):
delayed_links.append((src, dst))
continue
# If it's executable and has a shebang, copy and patch it.
if (s.st_mode & 0b111) and fs.has_shebang(src):
copied_files[(s.st_dev, s.st_ino)] = dst
elif not os.path.islink(src):
shutil.copy2(src, dst)
fs.filter_file(
python_prefix, os.path.abspath(view.get_projection_for_spec(self.spec)), dst
)
is_script = fs.is_nonsymlink_exe_with_shebang(src)
if is_script and not python_is_external:
fs.filter_file(
python_prefix,
os.path.abspath(view.get_projection_for_spec(self.spec)),
dst,
)
else:
view.link(src, dst)
# Finally re-target the symlinks that point to copied files.
for src, dst in delayed_links:
try:
s = os.stat(src)
target = copied_files[(s.st_dev, s.st_ino)]
except (OSError, KeyError):
target = None
if target:
os.symlink(os.path.relpath(target, os.path.dirname(dst)), dst)
else:
view.link(src, dst, spec=self.spec)
orig_link_target = os.path.realpath(src)
new_link_target = os.path.abspath(merge_map[orig_link_target])
view.link(new_link_target, dst)
def remove_files_from_view(self, view, merge_map):
ignore_namespace = False
@@ -371,19 +346,16 @@ def headers(self) -> HeaderList:
# Remove py- prefix in package name
name = self.spec.name[3:]
# Headers should only be in include or platlib, but no harm in checking purelib too
# Headers may be in either location
include = self.prefix.join(self.spec["python"].package.include).join(name)
platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
purelib = self.prefix.join(self.spec["python"].package.purelib).join(name)
headers_list = map(fs.find_all_headers, [include, platlib, purelib])
headers = functools.reduce(operator.add, headers_list)
headers = fs.find_all_headers(include) + fs.find_all_headers(platlib)
if headers:
return headers
msg = "Unable to locate {} headers in {}, {}, or {}"
raise NoHeadersError(msg.format(self.spec.name, include, platlib, purelib))
msg = "Unable to locate {} headers in {} or {}"
raise NoHeadersError(msg.format(self.spec.name, include, platlib))
@property
def libs(self) -> LibraryList:
@@ -392,19 +364,15 @@ def libs(self) -> LibraryList:
# Remove py- prefix in package name
name = self.spec.name[3:]
# Libraries should only be in platlib, but no harm in checking purelib too
platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
purelib = self.prefix.join(self.spec["python"].package.purelib).join(name)
root = self.prefix.join(self.spec["python"].package.platlib).join(name)
find_all_libraries = functools.partial(fs.find_all_libraries, recursive=True)
libs_list = map(find_all_libraries, [platlib, purelib])
libs = functools.reduce(operator.add, libs_list)
libs = fs.find_all_libraries(root, recursive=True)
if libs:
return libs
msg = "Unable to recursively locate {} libraries in {} or {}"
raise NoLibrariesError(msg.format(self.spec.name, platlib, purelib))
msg = "Unable to recursively locate {} libraries in {}"
raise NoLibrariesError(msg.format(self.spec.name, root))
@spack.builder.builder("python_pip")

View File

@@ -162,9 +162,23 @@ def hip_flags(amdgpu_target):
# Add compiler minimum versions based on the first release where the
# processor is included in llvm/lib/Support/TargetParser.cpp
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx900:xnack-")
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx906:xnack-")
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx908:xnack-")
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx90c")
depends_on("llvm-amdgpu@4.3.0:", when="amdgpu_target=gfx90a")
depends_on("llvm-amdgpu@4.3.0:", when="amdgpu_target=gfx90a:xnack-")
depends_on("llvm-amdgpu@4.3.0:", when="amdgpu_target=gfx90a:xnack+")
depends_on("llvm-amdgpu@5.2.0:", when="amdgpu_target=gfx940")
depends_on("llvm-amdgpu@5.7.0:", when="amdgpu_target=gfx941")
depends_on("llvm-amdgpu@5.7.0:", when="amdgpu_target=gfx942")
depends_on("llvm-amdgpu@4.5.0:", when="amdgpu_target=gfx1013")
depends_on("llvm-amdgpu@3.8.0:", when="amdgpu_target=gfx1030")
depends_on("llvm-amdgpu@3.9.0:", when="amdgpu_target=gfx1031")
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx1032")
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx1033")
depends_on("llvm-amdgpu@4.3.0:", when="amdgpu_target=gfx1034")
depends_on("llvm-amdgpu@4.5.0:", when="amdgpu_target=gfx1035")
depends_on("llvm-amdgpu@5.2.0:", when="amdgpu_target=gfx1036")
depends_on("llvm-amdgpu@5.3.0:", when="amdgpu_target=gfx1100")
depends_on("llvm-amdgpu@5.3.0:", when="amdgpu_target=gfx1101")

View File

@@ -35,9 +35,9 @@ def _misc_cache():
#: Spack's cache for small data
MISC_CACHE: Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton] = (
llnl.util.lang.Singleton(_misc_cache)
)
MISC_CACHE: Union[
spack.util.file_cache.FileCache, llnl.util.lang.Singleton
] = llnl.util.lang.Singleton(_misc_cache)
def fetch_cache_location():
@@ -91,6 +91,6 @@ def symlink(self, mirror_ref):
#: Spack's local cache for downloaded source archives
FETCH_CACHE: Union[spack.fetch_strategy.FsCache, llnl.util.lang.Singleton] = (
llnl.util.lang.Singleton(_fetch_cache)
)
FETCH_CACHE: Union[
spack.fetch_strategy.FsCache, llnl.util.lang.Singleton
] = llnl.util.lang.Singleton(_fetch_cache)

View File

@@ -7,7 +7,9 @@
get_job_name = lambda needs_entry: (
needs_entry.get("job")
if (isinstance(needs_entry, collections.abc.Mapping) and needs_entry.get("artifacts", True))
else needs_entry if isinstance(needs_entry, str) else None
else needs_entry
if isinstance(needs_entry, str)
else None
)

View File

@@ -594,15 +594,6 @@ def _put_manifest(
base_manifest, base_config = base_images[architecture]
env = _retrieve_env_dict_from_config(base_config)
# If the base image uses `vnd.docker.distribution.manifest.v2+json`, then we use that too.
# This is because Singularity / Apptainer is very strict about not mixing them.
base_manifest_mediaType = base_manifest.get(
"mediaType", "application/vnd.oci.image.manifest.v1+json"
)
use_docker_format = (
base_manifest_mediaType == "application/vnd.docker.distribution.manifest.v2+json"
)
spack.user_environment.environment_modifications_for_specs(*specs).apply_modifications(env)
# Create an oci.image.config file
@@ -634,8 +625,8 @@ def _put_manifest(
# Upload the config file
upload_blob_with_retry(image_ref, file=config_file, digest=config_file_checksum)
manifest = {
"mediaType": base_manifest_mediaType,
oci_manifest = {
"mediaType": "application/vnd.oci.image.manifest.v1+json",
"schemaVersion": 2,
"config": {
"mediaType": base_manifest["config"]["mediaType"],
@@ -646,11 +637,7 @@ def _put_manifest(
*(layer for layer in base_manifest["layers"]),
*(
{
"mediaType": (
"application/vnd.docker.image.rootfs.diff.tar.gzip"
if use_docker_format
else "application/vnd.oci.image.layer.v1.tar+gzip"
),
"mediaType": "application/vnd.oci.image.layer.v1.tar+gzip",
"digest": str(checksums[s.dag_hash()].compressed_digest),
"size": checksums[s.dag_hash()].size,
}
@@ -659,11 +646,11 @@ def _put_manifest(
],
}
if not use_docker_format and annotations:
manifest["annotations"] = annotations
if annotations:
oci_manifest["annotations"] = annotations
# Finally upload the manifest
upload_manifest_with_retry(image_ref, manifest=manifest)
upload_manifest_with_retry(image_ref, oci_manifest=oci_manifest)
# delete the config file
os.unlink(config_file)

View File

@@ -6,7 +6,6 @@
import json
import os
import shutil
from urllib.parse import urlparse, urlunparse
import llnl.util.filesystem as fs
import llnl.util.tty as tty
@@ -158,9 +157,7 @@ def setup_parser(subparser):
description=deindent(ci_reproduce.__doc__),
help=spack.cmd.first_line(ci_reproduce.__doc__),
)
reproduce.add_argument(
"job_url", help="URL of GitLab job web page or artifact", type=_gitlab_artifacts_url
)
reproduce.add_argument("job_url", help="URL of job artifacts bundle")
reproduce.add_argument(
"--runtime",
help="Container runtime to use.",
@@ -795,6 +792,11 @@ def ci_reproduce(args):
artifacts of the provided gitlab pipeline rebuild job's URL will be used to derive
instructions for reproducing the build locally
"""
job_url = args.job_url
work_dir = args.working_dir
autostart = args.autostart
runtime = args.runtime
# Allow passing GPG key for reprocuding protected CI jobs
if args.gpg_file:
gpg_key_url = url_util.path_to_file_url(args.gpg_file)
@@ -803,47 +805,7 @@ def ci_reproduce(args):
else:
gpg_key_url = None
return spack_ci.reproduce_ci_job(
args.job_url, args.working_dir, args.autostart, gpg_key_url, args.runtime
)
def _gitlab_artifacts_url(url: str) -> str:
"""Take a URL either to the URL of the job in the GitLab UI, or to the artifacts zip file,
and output the URL to the artifacts zip file."""
parsed = urlparse(url)
if not parsed.scheme or not parsed.netloc:
raise ValueError(url)
parts = parsed.path.split("/")
if len(parts) < 2:
raise ValueError(url)
# Just use API endpoints verbatim, they're probably generated by Spack.
if parts[1] == "api":
return url
# If it's a URL to the job in the Gitlab UI, we may need to append the artifacts path.
minus_idx = parts.index("-")
# Remove repeated slashes in the remainder
rest = [p for p in parts[minus_idx + 1 :] if p]
# Now the format is jobs/X or jobs/X/artifacts/download
if len(rest) < 2 or rest[0] != "jobs":
raise ValueError(url)
if len(rest) == 2:
# replace jobs/X with jobs/X/artifacts/download
rest.extend(("artifacts", "download"))
# Replace the parts and unparse.
parts[minus_idx + 1 :] = rest
# Don't allow fragments / queries
return urlunparse(parsed._replace(path="/".join(parts), fragment="", query=""))
return spack_ci.reproduce_ci_job(job_url, work_dir, autostart, gpg_key_url, runtime)
def ci(parser, args):

View File

@@ -8,7 +8,6 @@
import llnl.util.tty as tty
import spack.cmd
import spack.config
import spack.spec
import spack.util.path
import spack.version
@@ -22,7 +21,6 @@
def setup_parser(subparser):
subparser.add_argument("-p", "--path", help="source location of package")
subparser.add_argument("-b", "--build-directory", help="build directory for the package")
clone_group = subparser.add_mutually_exclusive_group()
clone_group.add_argument(
@@ -153,11 +151,4 @@ def develop(parser, args):
env = spack.cmd.require_active_env(cmd_name="develop")
tty.debug("Updating develop config for {0} transactionally".format(env.name))
with env.write_transaction():
if args.build_directory is not None:
spack.config.add(
"packages:{}:package_attributes:build_directory:{}".format(
spec.name, args.build_directory
),
env.scope_name,
)
_update_config(spec, path)

View File

@@ -270,8 +270,7 @@ def create_temp_env_directory():
def _tty_info(msg):
"""tty.info like function that prints the equivalent printf statement for eval."""
decorated = f'{colorize("@*b{==>}")} {msg}\n'
executor = "echo" if sys.platform == "win32" else "printf"
print(f"{executor} {shlex.quote(decorated)};")
print(f"printf {shlex.quote(decorated)};")
def env_activate(args):

View File

@@ -30,7 +30,6 @@
@c{@min:max} version range (inclusive)
@c{@min:} version <min> or higher
@c{@:max} up to version <max> (inclusive)
@c{@=version} exact version
compilers:
@g{%compiler} build with <compiler>

View File

@@ -292,11 +292,9 @@ def head(n, span_id, title, anchor=None):
out.write("<dd>\n")
out.write(
", ".join(
(
d
if d not in pkg_names
else '<a class="reference internal" href="#%s">%s</a>' % (d, d)
)
d
if d not in pkg_names
else '<a class="reference internal" href="#%s">%s</a>' % (d, d)
for d in deps
)
)

View File

@@ -1,71 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import errno
import gzip
import io
import os
import shutil
import sys
import spack.cmd
import spack.spec
import spack.util.compression as compression
from spack.cmd.common import arguments
from spack.main import SpackCommandError
description = "print out logs for packages"
section = "basic"
level = "long"
def setup_parser(subparser):
arguments.add_common_arguments(subparser, ["spec"])
def _dump_byte_stream_to_stdout(instream: io.BufferedIOBase) -> None:
# Reopen stdout in binary mode so we don't have to worry about encoding
outstream = os.fdopen(sys.stdout.fileno(), "wb", closefd=False)
shutil.copyfileobj(instream, outstream)
def _logs(cmdline_spec: spack.spec.Spec, concrete_spec: spack.spec.Spec):
if concrete_spec.installed:
log_path = concrete_spec.package.install_log_path
elif os.path.exists(concrete_spec.package.stage.path):
# TODO: `spack logs` can currently not show the logs while a package is being built, as the
# combined log file is only written after the build is finished.
log_path = concrete_spec.package.log_path
else:
raise SpackCommandError(f"{cmdline_spec} is not installed or staged")
try:
stream = open(log_path, "rb")
except OSError as e:
if e.errno == errno.ENOENT:
raise SpackCommandError(f"No logs are available for {cmdline_spec}") from e
raise SpackCommandError(f"Error reading logs for {cmdline_spec}: {e}") from e
with stream as f:
ext = compression.extension_from_magic_numbers_by_stream(f, decompress=False)
if ext and ext != "gz":
raise SpackCommandError(f"Unsupported storage format for {log_path}: {ext}")
# If the log file is gzip compressed, wrap it with a decompressor
_dump_byte_stream_to_stdout(gzip.GzipFile(fileobj=f) if ext == "gz" else f)
def logs(parser, args):
specs = spack.cmd.parse_specs(args.spec)
if not specs:
raise SpackCommandError("You must supply a spec.")
if len(specs) != 1:
raise SpackCommandError("Too many specs. Supply only one.")
concrete_spec = spack.cmd.matching_spec_from_env(specs[0])
_logs(specs[0], concrete_spec)

View File

@@ -127,7 +127,10 @@ def _process_result(result, show, required_format, kwargs):
print()
if result.unsolved_specs and "solutions" in show:
tty.msg(asp.Result.format_unsolved(result.unsolved_specs))
tty.msg("Unsolved specs")
for spec in result.unsolved_specs:
print(spec)
print()
def solve(parser, args):

View File

@@ -514,10 +514,9 @@ def get_compilers(config, cspec=None, arch_spec=None):
for items in config:
items = items["compiler"]
# We might use equality here.
if cspec and not spack.spec.parse_with_version_concrete(
items["spec"], compiler=True
).satisfies(cspec):
# NOTE: in principle this should be equality not satisfies, but config can still
# be written in old format gcc@10.1.0 instead of gcc@=10.1.0.
if cspec and not cspec.satisfies(items["spec"]):
continue
# If an arch spec is given, confirm that this compiler

View File

@@ -7,7 +7,6 @@
import re
import subprocess
import sys
import tempfile
from typing import Dict, List, Set
import spack.compiler
@@ -16,7 +15,7 @@
import spack.util.executable
from spack.compiler import Compiler
from spack.error import SpackError
from spack.version import Version, VersionRange
from spack.version import Version
avail_fc_version: Set[str] = set()
fc_path: Dict[str, str] = dict()
@@ -293,15 +292,6 @@ def setup_custom_environment(self, pkg, env):
else:
env.set_path(env_var, int_env[env_var].split(os.pathsep))
# certain versions of ifx (2021.3.0:2023.1.0) do not play well with env:TMP
# that has a "." character in the path
# Work around by pointing tmp to the stage for the duration of the build
if self.fc and Version(self.fc_version(self.fc)).satisfies(
VersionRange("2021.3.0", "2023.1.0")
):
new_tmp = tempfile.mkdtemp(dir=pkg.stage.path)
env.set("TMP", new_tmp)
env.set("CC", self.cc)
env.set("CXX", self.cxx)
env.set("FC", self.fc)

View File

@@ -826,6 +826,7 @@ def __init__(self, spec):
class InsufficientArchitectureInfoError(spack.error.SpackError):
"""Raised when details on architecture cannot be collected from the
system"""

View File

@@ -63,11 +63,10 @@
from spack.util.cpus import cpus_available
#: Dict from section names -> schema for that section
SECTION_SCHEMAS: Dict[str, Any] = {
SECTION_SCHEMAS = {
"compilers": spack.schema.compilers.schema,
"concretizer": spack.schema.concretizer.schema,
"definitions": spack.schema.definitions.schema,
"view": spack.schema.view.schema,
"develop": spack.schema.develop.schema,
"mirrors": spack.schema.mirrors.schema,
"repos": spack.schema.repos.schema,
@@ -82,7 +81,7 @@
# Same as above, but including keys for environments
# this allows us to unify config reading between configs and environments
_ALL_SCHEMAS: Dict[str, Any] = copy.deepcopy(SECTION_SCHEMAS)
_ALL_SCHEMAS = copy.deepcopy(SECTION_SCHEMAS)
_ALL_SCHEMAS.update({spack.schema.env.TOP_LEVEL_KEY: spack.schema.env.schema})
#: Path to the default configuration
@@ -1097,7 +1096,7 @@ def read_config_file(
data = syaml.load_config(f)
if data:
if schema is None:
if not schema:
key = next(iter(data))
schema = _ALL_SCHEMAS[key]
validate(data, schema)

View File

@@ -71,7 +71,7 @@
"almalinux:9": {
"bootstrap": {
"template": "container/almalinux_9.dockerfile",
"image": "quay.io/almalinuxorg/almalinux:9"
"image": "quay.io/almalinux/almalinux:9"
},
"os_package_manager": "dnf_epel",
"build": "spack/almalinux9",
@@ -79,13 +79,13 @@
"develop": "latest"
},
"final": {
"image": "quay.io/almalinuxorg/almalinux:9"
"image": "quay.io/almalinux/almalinux:9"
}
},
"almalinux:8": {
"bootstrap": {
"template": "container/almalinux_8.dockerfile",
"image": "quay.io/almalinuxorg/almalinux:8"
"image": "quay.io/almalinux/almalinux:8"
},
"os_package_manager": "dnf_epel",
"build": "spack/almalinux8",
@@ -93,7 +93,7 @@
"develop": "latest"
},
"final": {
"image": "quay.io/almalinuxorg/almalinux:8"
"image": "quay.io/almalinux/almalinux:8"
}
},
"centos:stream": {

View File

@@ -1687,11 +1687,7 @@ def root(key, record):
with self.read_transaction():
roots = [rec.spec for key, rec in self._data.items() if root(key, rec)]
needed = set(id(spec) for spec in tr.traverse_nodes(roots, deptype=deptype))
return [
rec.spec
for rec in self._data.values()
if id(rec.spec) not in needed and rec.installed
]
return [rec.spec for rec in self._data.values() if id(rec.spec) not in needed]
def update_explicit(self, spec, explicit):
"""

View File

@@ -36,9 +36,6 @@
#: Default dependency type if none is specified
DEFAULT: DepFlag = BUILD | LINK
#: A flag with no dependency types set
NONE: DepFlag = 0
#: An iterator of all flag components
ALL_FLAGS: Tuple[DepFlag, DepFlag, DepFlag, DepFlag] = (BUILD, LINK, RUN, TEST)

View File

@@ -34,7 +34,7 @@ class OpenMpi(Package):
import functools
import os.path
import re
from typing import TYPE_CHECKING, Any, Callable, List, Optional, Set, Tuple, Union
from typing import Any, Callable, List, Optional, Set, Tuple, Union
import llnl.util.lang
import llnl.util.tty.color
@@ -57,9 +57,6 @@ class OpenMpi(Package):
VersionLookupError,
)
if TYPE_CHECKING:
import spack.package_base
__all__ = [
"DirectiveError",
"DirectiveMeta",
@@ -352,7 +349,6 @@ def remove_directives(arg):
return _decorator
SubmoduleCallback = Callable[["spack.package_base.PackageBase"], Union[str, List[str], bool]]
directive = DirectiveMeta.directive
@@ -384,7 +380,7 @@ def version(
tag: Optional[str] = None,
branch: Optional[str] = None,
get_full_repo: Optional[bool] = None,
submodules: Union[SubmoduleCallback, Optional[bool]] = None,
submodules: Optional[bool] = None,
submodules_delete: Optional[bool] = None,
# other version control
svn: Optional[str] = None,

View File

@@ -21,6 +21,7 @@
import llnl.util.filesystem as fs
import llnl.util.tty as tty
import llnl.util.tty.color as clr
from llnl.util.lang import dedupe
from llnl.util.link_tree import ConflictingSpecsError
from llnl.util.symlink import symlink
@@ -378,8 +379,8 @@ def _rewrite_relative_dev_paths_on_relocation(env, init_file_dir):
if not dev_specs:
return
for name, entry in dev_specs.items():
dev_path = substitute_path_variables(entry["path"])
expanded_path = spack.util.path.canonicalize_path(dev_path, default_wd=init_file_dir)
dev_path = entry["path"]
expanded_path = os.path.normpath(os.path.join(init_file_dir, entry["path"]))
# Skip if the expanded path is the same (e.g. when absolute)
if dev_path == expanded_path:
@@ -662,26 +663,30 @@ def __contains__(self, spec):
return True
def specs_for_view(self, concrete_roots: List[Spec]) -> List[Spec]:
"""Flatten the DAGs of the concrete roots, keep only unique, selected, and installed specs
in topological order from root to leaf."""
if self.link == "all":
deptype = dt.LINK | dt.RUN
elif self.link == "run":
deptype = dt.RUN
def specs_for_view(self, concretized_root_specs):
"""
From the list of concretized user specs in the environment, flatten
the dags, and filter selected, installed specs, remove duplicates on dag hash.
"""
# With deps, requires traversal
if self.link == "all" or self.link == "run":
deptype = ("run") if self.link == "run" else ("link", "run")
specs = list(
traverse.traverse_nodes(
concretized_root_specs, deptype=deptype, key=traverse.by_dag_hash
)
)
else:
deptype = dt.NONE
specs = traverse.traverse_nodes(
concrete_roots, order="topo", deptype=deptype, key=traverse.by_dag_hash
)
specs = list(dedupe(concretized_root_specs, key=traverse.by_dag_hash))
# Filter selected, installed specs
with spack.store.STORE.db.read_transaction():
return [s for s in specs if s in self and s.installed]
specs = [s for s in specs if s in self and s.installed]
def regenerate(self, concrete_roots: List[Spec]) -> None:
specs = self.specs_for_view(concrete_roots)
return specs
def regenerate(self, concretized_root_specs):
specs = self.specs_for_view(concretized_root_specs)
# To ensure there are no conflicts with packages being installed
# that cannot be resolved or have repos that have been removed
@@ -698,14 +703,14 @@ def regenerate(self, concrete_roots: List[Spec]) -> None:
old_root = self._current_root
if new_root == old_root:
tty.debug(f"View at {self.root} does not need regeneration.")
tty.debug("View at %s does not need regeneration." % self.root)
return
_error_on_nonempty_view_dir(new_root)
# construct view at new_root
if specs:
tty.msg(f"Updating view at {self.root}")
tty.msg("Updating view at {0}".format(self.root))
view = self.view(new=new_root)
@@ -715,7 +720,7 @@ def regenerate(self, concrete_roots: List[Spec]) -> None:
# Create a new view
try:
fs.mkdirp(new_root)
view.add_specs(*specs)
view.add_specs(*specs, with_dependencies=False)
# create symlink from tmp_symlink_name to new_root
if os.path.exists(tmp_symlink_name):
@@ -729,7 +734,7 @@ def regenerate(self, concrete_roots: List[Spec]) -> None:
try:
shutil.rmtree(new_root, ignore_errors=True)
os.unlink(tmp_symlink_name)
except OSError:
except (IOError, OSError):
pass
# Give an informative error message for the typical error case: two specs, same package
@@ -871,55 +876,9 @@ def _process_definition(self, item):
else:
self.spec_lists[name] = user_specs
def _process_view(self, env_view: Optional[Union[bool, str, Dict]]):
"""Process view option(s), which can be boolean, string, or None.
A boolean environment view option takes precedence over any that may
be included. So ``view: True`` results in the default view only. And
``view: False`` means the environment will have no view.
Args:
env_view: view option provided in the manifest or configuration
"""
def add_view(name, values):
"""Add the view with the name and the string or dict values."""
if isinstance(values, str):
self.views[name] = ViewDescriptor(self.path, values)
elif isinstance(values, dict):
self.views[name] = ViewDescriptor.from_dict(self.path, values)
else:
tty.error(f"Cannot add view named {name} for {type(values)} values {values}")
# If the configuration specifies 'view: False' then we are done
# processing views. If this is called with the environment's view
# view (versus an included view), then there are to be NO views.
if env_view is False:
return
# If the configuration specifies 'view: True' then only the default
# view will be created for the environment and we are done processing
# views.
if env_view is True:
add_view(default_view_name, self.view_path_default)
return
# Otherwise, the configuration has a subdirectory or dictionary.
if isinstance(env_view, str):
add_view(default_view_name, env_view)
elif env_view:
for name, values in env_view.items():
add_view(name, values)
# If we reach this point without an explicit view option then we
# provide the default view.
if self.views == dict():
self.views[default_view_name] = ViewDescriptor(self.path, self.view_path_default)
def _construct_state_from_manifest(self):
"""Set up user specs and views from the manifest file."""
self.spec_lists = collections.OrderedDict()
self.views = {}
for item in spack.config.get("definitions", []):
self._process_definition(item)
@@ -931,7 +890,20 @@ def _construct_state_from_manifest(self):
)
self.spec_lists[user_speclist_name] = user_specs
self._process_view(spack.config.get("view", True))
enable_view = env_configuration.get("view")
# enable_view can be boolean, string, or None
if enable_view is True or enable_view is None:
self.views = {default_view_name: ViewDescriptor(self.path, self.view_path_default)}
elif isinstance(enable_view, str):
self.views = {default_view_name: ViewDescriptor(self.path, enable_view)}
elif enable_view:
path = self.path
self.views = dict(
(name, ViewDescriptor.from_dict(path, values))
for name, values in enable_view.items()
)
else:
self.views = {}
@property
def user_specs(self):
@@ -1485,6 +1457,44 @@ def _concretize_separately(self, tests=False):
]
return results
def concretize_and_add(self, user_spec, concrete_spec=None, tests=False):
"""Concretize and add a single spec to the environment.
Concretize the provided ``user_spec`` and add it along with the
concretized result to the environment. If the given ``user_spec`` was
already present in the environment, this does not add a duplicate.
The concretized spec will be added unless the ``user_spec`` was
already present and an associated concrete spec was already present.
Args:
concrete_spec: if provided, then it is assumed that it is the
result of concretizing the provided ``user_spec``
"""
if self.unify is True:
msg = (
"cannot install a single spec in an environment that is "
"configured to be concretized together. Run instead:\n\n"
" $ spack add <spec>\n"
" $ spack install\n"
)
raise SpackEnvironmentError(msg)
spec = Spec(user_spec)
if self.add(spec):
concrete = concrete_spec or spec.concretized(tests=tests)
self._add_concrete_spec(spec, concrete)
else:
# spec might be in the user_specs, but not installed.
# TODO: Redo name-based comparison for old style envs
spec = next(s for s in self.user_specs if s.satisfies(user_spec))
concrete = self.specs_by_hash.get(spec.dag_hash())
if not concrete:
concrete = spec.concretized(tests=tests)
self._add_concrete_spec(spec, concrete)
return concrete
@property
def default_view(self):
if not self.has_view(default_view_name):
@@ -2052,6 +2062,7 @@ def write(self, regenerate: bool = True) -> None:
if regenerate:
self.regenerate_views()
spack.hooks.post_env_write(self)
self.new_specs.clear()

View File

@@ -697,6 +697,7 @@ def __str__(self):
@fetcher
class GitFetchStrategy(VCSFetchStrategy):
"""
Fetch strategy that gets source code from a git repository.
Use like this in a package:
@@ -929,12 +930,9 @@ def clone(self, dest=None, commit=None, branch=None, tag=None, bare=False):
git_commands = []
submodules = self.submodules
if callable(submodules):
submodules = submodules(self.package)
if submodules:
if isinstance(submodules, str):
submodules = [submodules]
git_commands.append(["submodule", "init", "--"] + submodules)
git_commands.append(["submodule", "update", "--recursive"])
submodules = list(submodules(self.package))
git_commands.append(["submodule", "init", "--"] + submodules)
git_commands.append(["submodule", "update", "--recursive"])
elif submodules:
git_commands.append(["submodule", "update", "--init", "--recursive"])
@@ -1091,6 +1089,7 @@ def __str__(self):
@fetcher
class SvnFetchStrategy(VCSFetchStrategy):
"""Fetch strategy that gets source code from a subversion repository.
Use like this in a package:
@@ -1185,6 +1184,7 @@ def __str__(self):
@fetcher
class HgFetchStrategy(VCSFetchStrategy):
"""
Fetch strategy that gets source code from a Mercurial repository.
Use like this in a package:

View File

@@ -32,7 +32,6 @@
from llnl.util.tty.color import colorize
import spack.config
import spack.paths
import spack.projections
import spack.relocate
import spack.schema.projections
@@ -92,16 +91,16 @@ def view_copy(src: str, dst: str, view, spec: Optional[spack.spec.Spec] = None):
prefix_to_projection[spack.store.STORE.layout.root] = view._root
# This is vestigial code for the *old* location of sbang.
prefix_to_projection[f"#!/bin/bash {spack.paths.spack_root}/bin/sbang"] = (
sbang.sbang_shebang_line()
)
prefix_to_projection[
"#!/bin/bash {0}/bin/sbang".format(spack.paths.spack_root)
] = sbang.sbang_shebang_line()
spack.relocate.relocate_text(files=[dst], prefixes=prefix_to_projection)
try:
os.chown(dst, src_stat.st_uid, src_stat.st_gid)
except OSError:
tty.debug(f"Can't change the permissions for {dst}")
tty.debug("Can't change the permissions for %s" % dst)
def view_func_parser(parsed_name):
@@ -113,7 +112,7 @@ def view_func_parser(parsed_name):
elif parsed_name in ("add", "symlink", "soft"):
return view_symlink
else:
raise ValueError(f"invalid link type for view: '{parsed_name}'")
raise ValueError("invalid link type for view: '%s'" % parsed_name)
def inverse_view_func_parser(view_type):
@@ -271,10 +270,9 @@ def __init__(self, root, layout, **kwargs):
# Ensure projections are the same from each source
# Read projections file from view
if self.projections != self.read_projections():
raise ConflictingProjectionsError(
f"View at {self._root} has projections file"
" which does not match projections passed manually."
)
msg = "View at %s has projections file" % self._root
msg += " which does not match projections passed manually."
raise ConflictingProjectionsError(msg)
self._croot = colorize_root(self._root) + " "
@@ -315,11 +313,11 @@ def add_specs(self, *specs, **kwargs):
def add_standalone(self, spec):
if spec.external:
tty.warn(f"{self._croot}Skipping external package: {colorize_spec(spec)}")
tty.warn(self._croot + "Skipping external package: %s" % colorize_spec(spec))
return True
if self.check_added(spec):
tty.warn(f"{self._croot}Skipping already linked package: {colorize_spec(spec)}")
tty.warn(self._croot + "Skipping already linked package: %s" % colorize_spec(spec))
return True
self.merge(spec)
@@ -327,7 +325,7 @@ def add_standalone(self, spec):
self.link_meta_folder(spec)
if self.verbose:
tty.info(f"{self._croot}Linked package: {colorize_spec(spec)}")
tty.info(self._croot + "Linked package: %s" % colorize_spec(spec))
return True
def merge(self, spec, ignore=None):
@@ -395,7 +393,7 @@ def needs_file(spec, file):
for file in files:
if not os.path.lexists(file):
tty.warn(f"Tried to remove {file} which does not exist")
tty.warn("Tried to remove %s which does not exist" % file)
continue
# remove if file is not owned by any other package in the view
@@ -406,7 +404,7 @@ def needs_file(spec, file):
# we are currently removing, as we remove files before unlinking the
# metadata directory.
if len([s for s in specs if needs_file(s, file)]) <= 1:
tty.debug(f"Removing file {file}")
tty.debug("Removing file " + file)
os.remove(file)
def check_added(self, spec):
@@ -479,14 +477,14 @@ def remove_standalone(self, spec):
Remove (unlink) a standalone package from this view.
"""
if not self.check_added(spec):
tty.warn(f"{self._croot}Skipping package not linked in view: {spec.name}")
tty.warn(self._croot + "Skipping package not linked in view: %s" % spec.name)
return
self.unmerge(spec)
self.unlink_meta_folder(spec)
if self.verbose:
tty.info(f"{self._croot}Removed package: {colorize_spec(spec)}")
tty.info(self._croot + "Removed package: %s" % colorize_spec(spec))
def get_projection_for_spec(self, spec):
"""
@@ -560,9 +558,9 @@ def print_conflict(self, spec_active, spec_specified, level="error"):
linked = tty.color.colorize(" (@gLinked@.)", color=color)
specified = tty.color.colorize("(@rSpecified@.)", color=color)
cprint(
f"{self._croot}Package conflict detected:\n"
f"{linked} {colorize_spec(spec_active)}\n"
f"{specified} {colorize_spec(spec_specified)}"
self._croot + "Package conflict detected:\n"
"%s %s\n" % (linked, colorize_spec(spec_active))
+ "%s %s" % (specified, colorize_spec(spec_specified))
)
def print_status(self, *specs, **kwargs):
@@ -574,14 +572,14 @@ def print_status(self, *specs, **kwargs):
for s, v in zip(specs, in_view):
if not v:
tty.error(f"{self._croot}Package not linked: {s.name}")
tty.error(self._croot + "Package not linked: %s" % s.name)
elif s != v:
self.print_conflict(v, s, level="warn")
in_view = list(filter(None, in_view))
if len(specs) > 0:
tty.msg(f"Packages linked in {self._croot[:-1]}:")
tty.msg("Packages linked in %s:" % self._croot[:-1])
# Make a dict with specs keyed by architecture and compiler.
index = index_by(specs, ("architecture", "compiler"))
@@ -591,19 +589,20 @@ def print_status(self, *specs, **kwargs):
if i > 0:
print()
header = (
f"{spack.spec.ARCHITECTURE_COLOR}{{{architecture}}} "
f"/ {spack.spec.COMPILER_COLOR}{{{compiler}}}"
header = "%s{%s} / %s{%s}" % (
spack.spec.ARCHITECTURE_COLOR,
architecture,
spack.spec.COMPILER_COLOR,
compiler,
)
tty.hline(colorize(header), char="-")
specs = index[(architecture, compiler)]
specs.sort()
abbreviated = [
s.cformat("{name}{@version}{%compiler}{compiler_flags}{variants}")
for s in specs
]
format_string = "{name}{@version}"
format_string += "{%compiler}{compiler_flags}{variants}"
abbreviated = [s.cformat(format_string) for s in specs]
# Print one spec per line along with prefix path
width = max(len(s) for s in abbreviated)
@@ -635,19 +634,22 @@ def unlink_meta_folder(self, spec):
class SimpleFilesystemView(FilesystemView):
"""A simple and partial implementation of FilesystemView focused on performance and immutable
views, where specs cannot be removed after they were added."""
"""A simple and partial implementation of FilesystemView focused on
performance and immutable views, where specs cannot be removed after they
were added."""
def __init__(self, root, layout, **kwargs):
super().__init__(root, layout, **kwargs)
def _sanity_check_view_projection(self, specs):
"""A very common issue is that we end up with two specs of the same package, that project
to the same prefix. We want to catch that as early as possible and give a sensible error to
the user. Here we use the metadata dir (.spack) projection as a quick test to see whether
two specs in the view are going to clash. The metadata dir is used because it's always
added by Spack with identical files, so a guaranteed clash that's easily verified."""
seen = {}
"""A very common issue is that we end up with two specs of the same
package, that project to the same prefix. We want to catch that as
early as possible and give a sensible error to the user. Here we use
the metadata dir (.spack) projection as a quick test to see whether
two specs in the view are going to clash. The metadata dir is used
because it's always added by Spack with identical files, so a
guaranteed clash that's easily verified."""
seen = dict()
for current_spec in specs:
metadata_dir = self.relative_metadata_dir_for_spec(current_spec)
conflicting_spec = seen.get(metadata_dir)
@@ -655,8 +657,7 @@ def _sanity_check_view_projection(self, specs):
raise ConflictingSpecsError(current_spec, conflicting_spec)
seen[metadata_dir] = current_spec
def add_specs(self, *specs: spack.spec.Spec) -> None:
"""Link a root-to-leaf topologically ordered list of specs into the view."""
def add_specs(self, *specs, **kwargs):
assert all((s.concrete for s in specs))
if len(specs) == 0:
return
@@ -667,6 +668,9 @@ def add_specs(self, *specs: spack.spec.Spec) -> None:
tty.warn("Skipping external package: " + s.short_spec)
specs = [s for s in specs if not s.external]
if kwargs.get("exclude", None):
specs = set(filter_exclude(specs, kwargs["exclude"]))
self._sanity_check_view_projection(specs)
# Ignore spack meta data folder.
@@ -691,11 +695,13 @@ def skip_list(file):
# Inform about file-file conflicts.
if visitor.file_conflicts:
if self.ignore_conflicts:
tty.debug(f"{len(visitor.file_conflicts)} file conflicts")
tty.debug("{0} file conflicts".format(len(visitor.file_conflicts)))
else:
raise MergeConflictSummary(visitor.file_conflicts)
tty.debug(f"Creating {len(visitor.directories)} dirs and {len(visitor.files)} links")
tty.debug(
"Creating {0} dirs and {1} links".format(len(visitor.directories), len(visitor.files))
)
# Make the directory structure
for dst in visitor.directories:

View File

@@ -15,6 +15,13 @@
* post_install(spec, explicit)
* pre_uninstall(spec)
* post_uninstall(spec)
* on_install_start(spec)
* on_install_success(spec)
* on_install_failure(spec)
* on_phase_success(pkg, phase_name, log_file)
* on_phase_error(pkg, phase_name, log_file)
* on_phase_error(pkg, phase_name, log_file)
* post_env_write(env)
This can be used to implement support for things like module
systems (e.g. modules, lmod, etc.) or to add other custom
@@ -71,5 +78,17 @@ def __call__(self, *args, **kwargs):
pre_install = _HookRunner("pre_install")
post_install = _HookRunner("post_install")
# These hooks are run within an install subprocess
pre_uninstall = _HookRunner("pre_uninstall")
post_uninstall = _HookRunner("post_uninstall")
on_phase_success = _HookRunner("on_phase_success")
on_phase_error = _HookRunner("on_phase_error")
# These are hooks in installer.py, before starting install subprocess
on_install_start = _HookRunner("on_install_start")
on_install_success = _HookRunner("on_install_success")
on_install_failure = _HookRunner("on_install_failure")
on_install_cancel = _HookRunner("on_install_cancel")
# Environment hooks
post_env_write = _HookRunner("post_env_write")

View File

@@ -4,7 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
from typing import BinaryIO, Optional, Tuple
from typing import IO, Optional, Tuple
import llnl.util.tty as tty
from llnl.util.filesystem import BaseDirectoryVisitor, visit_directory_tree
@@ -18,7 +18,7 @@ def should_keep(path: bytes) -> bool:
return path.startswith(b"$") or (os.path.isabs(path) and os.path.lexists(path))
def _drop_redundant_rpaths(f: BinaryIO) -> Optional[Tuple[bytes, bytes]]:
def _drop_redundant_rpaths(f: IO) -> Optional[Tuple[bytes, bytes]]:
"""Drop redundant entries from rpath.
Args:

View File

@@ -1705,6 +1705,7 @@ def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
except spack.build_environment.StopPhase as e:
# A StopPhase exception means that do_install was asked to
# stop early from clients, and is not an error at this point
spack.hooks.on_install_failure(task.request.pkg.spec)
pid = f"{self.pid}: " if tty.show_pid() else ""
tty.debug(f"{pid}{str(e)}")
tty.debug(f"Package stage directory: {pkg.stage.source_path}")
@@ -2010,6 +2011,7 @@ def install(self) -> None:
if task is None:
continue
spack.hooks.on_install_start(task.request.pkg.spec)
install_args = task.request.install_args
keep_prefix = install_args.get("keep_prefix")
@@ -2035,6 +2037,9 @@ def install(self) -> None:
tty.warn(f"{pkg_id} does NOT actually have any uninstalled deps left")
dep_str = "dependencies" if task.priority > 1 else "dependency"
# Hook to indicate task failure, but without an exception
spack.hooks.on_install_failure(task.request.pkg.spec)
raise InstallError(
f"Cannot proceed with {pkg_id}: {task.priority} uninstalled "
f"{dep_str}: {','.join(task.uninstalled_deps)}",
@@ -2057,6 +2062,11 @@ def install(self) -> None:
tty.warn(f"{pkg_id} failed to install")
self._update_failed(task)
# Mark that the package failed
# TODO: this should also be for the task.pkg, but we don't
# model transitive yet.
spack.hooks.on_install_failure(task.request.pkg.spec)
if self.fail_fast:
raise InstallError(fail_fast_err, pkg=pkg)
@@ -2159,6 +2169,7 @@ def install(self) -> None:
tty.error(
f"Failed to install {pkg.name} due to " f"{exc.__class__.__name__}: {str(exc)}"
)
spack.hooks.on_install_cancel(task.request.pkg.spec)
raise
except binary_distribution.NoChecksumException as exc:
@@ -2177,6 +2188,7 @@ def install(self) -> None:
except (Exception, SystemExit) as exc:
self._update_failed(task, True, exc)
spack.hooks.on_install_failure(task.request.pkg.spec)
# Best effort installs suppress the exception and mark the
# package as a failure.
@@ -2360,6 +2372,9 @@ def run(self) -> bool:
_print_timer(pre=self.pre, pkg_id=self.pkg_id, timer=self.timer)
_print_installed_pkg(self.pkg.prefix)
# Send final status that install is successful
spack.hooks.on_install_success(self.pkg.spec)
# preserve verbosity across runs
return self.echo
@@ -2438,10 +2453,15 @@ def _real_install(self) -> None:
# Catch any errors to report to logging
self.timer.start(phase_fn.name)
phase_fn.execute()
spack.hooks.on_phase_success(pkg, phase_fn.name, log_file)
self.timer.stop(phase_fn.name)
except BaseException:
combine_phase_logs(pkg.phase_log_files, pkg.log_path)
spack.hooks.on_phase_error(pkg, phase_fn.name, log_file)
# phase error indicates install error
spack.hooks.on_install_failure(pkg.spec)
raise
# We assume loggers share echo True/False

View File

@@ -1038,9 +1038,9 @@ def finish_parse_and_run(parser, cmd_name, main_args, env_format_error):
set_working_dir()
# now we can actually execute the command.
if main_args.spack_profile or main_args.sorted_profile:
if args.spack_profile or args.sorted_profile:
_profile_wrapper(command, parser, args, unknown)
elif main_args.pdb:
elif args.pdb:
import pdb
pdb.runctx("_invoke_command(command, parser, args, unknown)", globals(), locals())

View File

@@ -161,7 +161,7 @@ def upload_blob(
def upload_manifest(
ref: ImageReference,
manifest: dict,
oci_manifest: dict,
tag: bool = True,
_urlopen: spack.oci.opener.MaybeOpen = None,
):
@@ -169,7 +169,7 @@ def upload_manifest(
Args:
ref: The image reference.
manifest: The manifest or index.
oci_manifest: The OCI manifest or index.
tag: When true, use the tag, otherwise use the digest,
this is relevant for multi-arch images, where the
tag is an index, referencing the manifests by digest.
@@ -179,7 +179,7 @@ def upload_manifest(
"""
_urlopen = _urlopen or spack.oci.opener.urlopen
data = json.dumps(manifest, separators=(",", ":")).encode()
data = json.dumps(oci_manifest, separators=(",", ":")).encode()
digest = Digest.from_sha256(hashlib.sha256(data).hexdigest())
size = len(data)
@@ -190,7 +190,7 @@ def upload_manifest(
url=ref.manifest_url(),
method="PUT",
data=data,
headers={"Content-Type": manifest["mediaType"]},
headers={"Content-Type": oci_manifest["mediaType"]},
)
response = _urlopen(request)

View File

@@ -9,8 +9,6 @@
import platform
import subprocess
from llnl.util import tty
from spack.error import SpackError
from spack.util import windows_registry as winreg
from spack.version import Version
@@ -85,50 +83,11 @@ def compiler_search_paths(self):
os.path.join(str(os.getenv("ONEAPI_ROOT")), "compiler", "*", "windows", "bin")
)
)
# Second strategy: Find MSVC via the registry
def try_query_registry(retry=False):
winreg_report_error = lambda e: tty.debug(
'Windows registry query on "SOFTWARE\\WOW6432Node\\Microsoft"'
f"under HKEY_LOCAL_MACHINE: {str(e)}"
)
try:
# Registry interactions are subject to race conditions, etc and can generally
# be flakey, do this in a catch block to prevent reg issues from interfering
# with compiler detection
msft = winreg.WindowsRegistryView(
"SOFTWARE\\WOW6432Node\\Microsoft", winreg.HKEY.HKEY_LOCAL_MACHINE
)
return msft.find_subkeys(r"VisualStudio_.*", recursive=False)
except OSError as e:
# OSErrors propagated into caller by Spack's registry module are expected
# and indicate a known issue with the registry query
# i.e. user does not have permissions or the key/value
# doesn't exist
winreg_report_error(e)
return []
except winreg.InvalidRegistryOperation as e:
# Other errors raised by the Spack's reg module indicate
# an unexpected error type, and are handled specifically
# as the underlying cause is difficult/impossible to determine
# without manually exploring the registry
# These errors can also be spurious (race conditions)
# and may resolve on re-execution of the query
# or are permanent (specific types of permission issues)
# but the registry raises the same exception for all types of
# atypical errors
if retry:
winreg_report_error(e)
return []
vs_entries = try_query_registry()
if not vs_entries:
# Occasional spurious race conditions can arise when reading the MS reg
# typically these race conditions resolve immediately and we can safely
# retry the reg query without waiting
# Note: Winreg does not support locking
vs_entries = try_query_registry(retry=True)
msft = winreg.WindowsRegistryView(
"SOFTWARE\\WOW6432Node\\Microsoft", winreg.HKEY.HKEY_LOCAL_MACHINE
)
vs_entries = msft.find_subkeys(r"VisualStudio_.*")
vs_paths = []
def clean_vs_path(path):
@@ -140,8 +99,11 @@ def clean_vs_path(path):
val = entry.get_subkey("Capabilities").get_value("ApplicationDescription").value
vs_paths.append(clean_vs_path(val))
except FileNotFoundError as e:
if hasattr(e, "winerror") and e.winerror == 2:
pass
if hasattr(e, "winerror"):
if e.winerror == 2:
pass
else:
raise
else:
raise

View File

@@ -7,7 +7,6 @@
import os
import re
from collections import OrderedDict
from typing import List, Optional
import macholib.mach_o
import macholib.MachO
@@ -48,7 +47,7 @@ def __init__(self, file_path, root_path):
@memoized
def _patchelf() -> Optional[executable.Executable]:
def _patchelf():
"""Return the full path to the patchelf binary, if available, else None."""
import spack.bootstrap
@@ -56,7 +55,9 @@ def _patchelf() -> Optional[executable.Executable]:
return None
with spack.bootstrap.ensure_bootstrap_configuration():
return spack.bootstrap.ensure_patchelf_in_path_or_raise()
patchelf = spack.bootstrap.ensure_patchelf_in_path_or_raise()
return patchelf.path
def _elf_rpaths_for(path):
@@ -339,34 +340,31 @@ def macholib_get_paths(cur_path):
return (rpaths, deps, ident)
def _set_elf_rpaths_and_interpreter(
target: str, rpaths: List[str], interpreter: Optional[str] = None
) -> Optional[str]:
"""Replace the original RPATH of the target with the paths passed as arguments.
def _set_elf_rpaths(target, rpaths):
"""Replace the original RPATH of the target with the paths passed
as arguments.
Args:
target: target executable. Must be an ELF object.
rpaths: paths to be set in the RPATH
interpreter: optionally set the interpreter
Returns:
A string concatenating the stdout and stderr of the call to ``patchelf`` if it was invoked
A string concatenating the stdout and stderr of the call
to ``patchelf`` if it was invoked
"""
# Join the paths using ':' as a separator
rpaths_str = ":".join(rpaths)
patchelf, output = executable.Executable(_patchelf()), None
try:
# TODO: error handling is not great here?
# TODO: revisit the use of --force-rpath as it might be conditional
# TODO: if we want to support setting RUNPATH from binary packages
args = ["--force-rpath", "--set-rpath", rpaths_str]
if interpreter:
args.extend(["--set-interpreter", interpreter])
args.append(target)
return _patchelf()(*args, output=str, error=str)
patchelf_args = ["--force-rpath", "--set-rpath", rpaths_str, target]
output = patchelf(*patchelf_args, output=str, error=str)
except executable.ProcessError as e:
tty.warn(str(e))
return None
msg = "patchelf --force-rpath --set-rpath {0} failed with error {1}"
tty.warn(msg.format(target, e))
return output
def needs_binary_relocation(m_type, m_subtype):
@@ -503,12 +501,10 @@ def new_relocate_elf_binaries(binaries, prefix_to_prefix):
for path in binaries:
try:
elf.substitute_rpath_and_pt_interp_in_place_or_raise(path, prefix_to_prefix)
except elf.ElfCStringUpdatesFailed as e:
# Fall back to `patchelf --set-rpath ... --set-interpreter ...`
rpaths = e.rpath.new_value.decode("utf-8").split(":") if e.rpath else []
interpreter = e.pt_interp.new_value.decode("utf-8") if e.pt_interp else None
_set_elf_rpaths_and_interpreter(path, rpaths=rpaths, interpreter=interpreter)
elf.replace_rpath_in_place_or_raise(path, prefix_to_prefix)
except elf.ElfDynamicSectionUpdateFailed as e:
# Fall back to the old `patchelf --set-rpath` method.
_set_elf_rpaths(path, e.new.decode("utf-8").split(":"))
def relocate_elf_binaries(
@@ -550,10 +546,10 @@ def relocate_elf_binaries(
new_rpaths = _make_relative(new_binary, new_root, new_norm_rpaths)
# check to see if relative rpaths are changed before rewriting
if sorted(new_rpaths) != sorted(orig_rpaths):
_set_elf_rpaths_and_interpreter(new_binary, new_rpaths)
_set_elf_rpaths(new_binary, new_rpaths)
else:
new_rpaths = _transform_rpaths(orig_rpaths, orig_root, new_prefixes)
_set_elf_rpaths_and_interpreter(new_binary, new_rpaths)
_set_elf_rpaths(new_binary, new_rpaths)
def make_link_relative(new_links, orig_links):
@@ -600,7 +596,7 @@ def make_elf_binaries_relative(new_binaries, orig_binaries, orig_layout_root):
orig_rpaths = _elf_rpaths_for(new_binary)
if orig_rpaths:
new_rpaths = _make_relative(orig_binary, orig_layout_root, orig_rpaths)
_set_elf_rpaths_and_interpreter(new_binary, new_rpaths)
_set_elf_rpaths(new_binary, new_rpaths)
def warn_if_link_cant_be_relocated(link, target):

View File

@@ -6,6 +6,7 @@
import warnings
import llnl.util.lang
import llnl.util.tty
# jsonschema is imported lazily as it is heavy to import
@@ -61,3 +62,25 @@ def _deprecated_properties(validator, deprecated, instance, schema):
Validator = llnl.util.lang.Singleton(_make_validator)
spec_list_schema = {
"type": "array",
"default": [],
"items": {
"anyOf": [
{
"type": "object",
"additionalProperties": False,
"properties": {
"matrix": {
"type": "array",
"items": {"type": "array", "items": {"type": "string"}},
},
"exclude": {"type": "array", "items": {"type": "string"}},
},
},
{"type": "string"},
{"type": "null"},
]
},
}

View File

@@ -3,17 +3,16 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for bootstrap.yaml configuration file."""
from typing import Any, Dict
#: Schema of a single source
_source_schema: Dict[str, Any] = {
_source_schema = {
"type": "object",
"properties": {"name": {"type": "string"}, "metadata": {"type": "string"}},
"additionalProperties": False,
"required": ["name", "metadata"],
}
properties: Dict[str, Any] = {
properties = {
"bootstrap": {
"type": "object",
"properties": {

View File

@@ -6,31 +6,27 @@
"""Schema for a buildcache spec.yaml file
.. literalinclude:: _spack_root/lib/spack/spack/schema/buildcache_spec.py
:lines: 15-
:lines: 13-
"""
from typing import Any, Dict
import spack.schema.spec
properties: Dict[str, Any] = {
# `buildinfo` is no longer needed as of Spack 0.21
"buildinfo": {"type": "object"},
"spec": {
"type": "object",
"additionalProperties": True,
"items": spack.schema.spec.properties,
},
"binary_cache_checksum": {
"type": "object",
"properties": {"hash_algorithm": {"type": "string"}, "hash": {"type": "string"}},
},
"buildcache_layout_version": {"type": "number"},
}
schema = {
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "Spack buildcache specfile schema",
"type": "object",
"additionalProperties": False,
"properties": properties,
"properties": {
# `buildinfo` is no longer needed as of Spack 0.21
"buildinfo": {"type": "object"},
"spec": {
"type": "object",
"additionalProperties": True,
"items": spack.schema.spec.properties,
},
"binary_cache_checksum": {
"type": "object",
"properties": {"hash_algorithm": {"type": "string"}, "hash": {"type": "string"}},
},
"buildcache_layout_version": {"type": "number"},
},
}

View File

@@ -2,15 +2,16 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for cdash.yaml configuration file.
.. literalinclude:: ../spack/schema/cdash.py
:lines: 13-
"""
from typing import Any, Dict
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
properties = {
"cdash": {
"type": "object",
"additionalProperties": False,

View File

@@ -2,12 +2,12 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for gitlab-ci.yaml configuration file.
.. literalinclude:: ../spack/schema/ci.py
:lines: 16-
:lines: 13-
"""
from typing import Any, Dict
from llnl.util.lang import union_dicts
@@ -164,7 +164,7 @@
}
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
properties = {
"ci": {
"oneOf": [
# TODO: Replace with core-shared-properties in Spack 0.23

View File

@@ -2,17 +2,16 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for compilers.yaml configuration file.
.. literalinclude:: _spack_root/lib/spack/spack/schema/compilers.py
:lines: 15-
:lines: 13-
"""
from typing import Any, Dict
import spack.schema.environment
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
properties = {
"compilers": {
"type": "array",
"items": {

View File

@@ -2,14 +2,14 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for concretizer.yaml configuration file.
.. literalinclude:: _spack_root/lib/spack/spack/schema/concretizer.py
:lines: 12-
:lines: 13-
"""
from typing import Any, Dict
properties: Dict[str, Any] = {
properties = {
"concretizer": {
"type": "object",
"additionalProperties": False,

View File

@@ -5,16 +5,15 @@
"""Schema for config.yaml configuration file.
.. literalinclude:: _spack_root/lib/spack/spack/schema/config.py
:lines: 17-
:lines: 13-
"""
from typing import Any, Dict
from llnl.util.lang import union_dicts
import spack.schema.projections
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
properties = {
"config": {
"type": "object",
"default": {},

View File

@@ -3,7 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for the 'container' subsection of Spack environments."""
from typing import Any, Dict
_stages_from_dockerhub = {
"type": "object",
@@ -86,4 +85,4 @@
},
}
properties: Dict[str, Any] = {"container": container_schema}
properties = {"container": container_schema}

View File

@@ -11,115 +11,112 @@
This does not specify a configuration - it is an input format
that is consumed and transformed into Spack DB records.
"""
from typing import Any, Dict
properties: Dict[str, Any] = {
"_meta": {
"type": "object",
"additionalProperties": False,
"properties": {
"file-type": {"type": "string", "minLength": 1},
"cpe-version": {"type": "string", "minLength": 1},
"system-type": {"type": "string", "minLength": 1},
"schema-version": {"type": "string", "minLength": 1},
# Older schemas use did not have "cpe-version", just the
# schema version; in that case it was just called "version"
"version": {"type": "string", "minLength": 1},
},
},
"compilers": {
"type": "array",
"items": {
"type": "object",
"additionalProperties": False,
"properties": {
"name": {"type": "string", "minLength": 1},
"version": {"type": "string", "minLength": 1},
"prefix": {"type": "string", "minLength": 1},
"executables": {
"type": "object",
"additionalProperties": False,
"properties": {
"cc": {"type": "string", "minLength": 1},
"cxx": {"type": "string", "minLength": 1},
"fc": {"type": "string", "minLength": 1},
},
},
"arch": {
"type": "object",
"required": ["os", "target"],
"additionalProperties": False,
"properties": {
"os": {"type": "string", "minLength": 1},
"target": {"type": "string", "minLength": 1},
},
},
},
},
},
"specs": {
"type": "array",
"items": {
"type": "object",
"required": ["name", "version", "arch", "compiler", "prefix", "hash"],
"additionalProperties": False,
"properties": {
"name": {"type": "string", "minLength": 1},
"version": {"type": "string", "minLength": 1},
"arch": {
"type": "object",
"required": ["platform", "platform_os", "target"],
"additionalProperties": False,
"properties": {
"platform": {"type": "string", "minLength": 1},
"platform_os": {"type": "string", "minLength": 1},
"target": {
"type": "object",
"additionalProperties": False,
"required": ["name"],
"properties": {"name": {"type": "string", "minLength": 1}},
},
},
},
"compiler": {
"type": "object",
"required": ["name", "version"],
"additionalProperties": False,
"properties": {
"name": {"type": "string", "minLength": 1},
"version": {"type": "string", "minLength": 1},
},
},
"dependencies": {
"type": "object",
"patternProperties": {
"\\w[\\w-]*": {
"type": "object",
"required": ["hash"],
"additionalProperties": False,
"properties": {
"hash": {"type": "string", "minLength": 1},
"type": {
"type": "array",
"items": {"type": "string", "minLength": 1},
},
},
}
},
},
"prefix": {"type": "string", "minLength": 1},
"rpm": {"type": "string", "minLength": 1},
"hash": {"type": "string", "minLength": 1},
"parameters": {"type": "object"},
},
},
},
}
schema = {
"$schema": "http://json-schema.org/schema#",
"title": "CPE manifest schema",
"type": "object",
"additionalProperties": False,
"properties": properties,
"properties": {
"_meta": {
"type": "object",
"additionalProperties": False,
"properties": {
"file-type": {"type": "string", "minLength": 1},
"cpe-version": {"type": "string", "minLength": 1},
"system-type": {"type": "string", "minLength": 1},
"schema-version": {"type": "string", "minLength": 1},
# Older schemas use did not have "cpe-version", just the
# schema version; in that case it was just called "version"
"version": {"type": "string", "minLength": 1},
},
},
"compilers": {
"type": "array",
"items": {
"type": "object",
"additionalProperties": False,
"properties": {
"name": {"type": "string", "minLength": 1},
"version": {"type": "string", "minLength": 1},
"prefix": {"type": "string", "minLength": 1},
"executables": {
"type": "object",
"additionalProperties": False,
"properties": {
"cc": {"type": "string", "minLength": 1},
"cxx": {"type": "string", "minLength": 1},
"fc": {"type": "string", "minLength": 1},
},
},
"arch": {
"type": "object",
"required": ["os", "target"],
"additionalProperties": False,
"properties": {
"os": {"type": "string", "minLength": 1},
"target": {"type": "string", "minLength": 1},
},
},
},
},
},
"specs": {
"type": "array",
"items": {
"type": "object",
"required": ["name", "version", "arch", "compiler", "prefix", "hash"],
"additionalProperties": False,
"properties": {
"name": {"type": "string", "minLength": 1},
"version": {"type": "string", "minLength": 1},
"arch": {
"type": "object",
"required": ["platform", "platform_os", "target"],
"additioanlProperties": False,
"properties": {
"platform": {"type": "string", "minLength": 1},
"platform_os": {"type": "string", "minLength": 1},
"target": {
"type": "object",
"additionalProperties": False,
"required": ["name"],
"properties": {"name": {"type": "string", "minLength": 1}},
},
},
},
"compiler": {
"type": "object",
"required": ["name", "version"],
"additionalProperties": False,
"properties": {
"name": {"type": "string", "minLength": 1},
"version": {"type": "string", "minLength": 1},
},
},
"dependencies": {
"type": "object",
"patternProperties": {
"\\w[\\w-]*": {
"type": "object",
"required": ["hash"],
"additionalProperties": False,
"properties": {
"hash": {"type": "string", "minLength": 1},
"type": {
"type": "array",
"items": {"type": "string", "minLength": 1},
},
},
}
},
},
"prefix": {"type": "string", "minLength": 1},
"rpm": {"type": "string", "minLength": 1},
"hash": {"type": "string", "minLength": 1},
"parameters": {"type": "object"},
},
},
},
},
}

View File

@@ -6,41 +6,12 @@
"""Schema for database index.json file
.. literalinclude:: _spack_root/lib/spack/spack/schema/database_index.py
:lines: 17-
:lines: 36-
"""
from typing import Any, Dict
import spack.schema.spec
# spack.schema.spec.properties
properties: Dict[str, Any] = {
"database": {
"type": "object",
"required": ["installs", "version"],
"additionalProperties": False,
"properties": {
"installs": {
"type": "object",
"patternProperties": {
r"^[\w\d]{32}$": {
"type": "object",
"properties": {
"spec": spack.schema.spec.properties,
"path": {"oneOf": [{"type": "string"}, {"type": "null"}]},
"installed": {"type": "boolean"},
"ref_count": {"type": "integer", "minimum": 0},
"explicit": {"type": "boolean"},
"installation_time": {"type": "number"},
},
}
},
},
"version": {"type": "string"},
},
}
}
#: Full schema with metadata
schema = {
"$schema": "http://json-schema.org/draft-07/schema#",
@@ -48,5 +19,30 @@
"type": "object",
"required": ["database"],
"additionalProperties": False,
"properties": properties,
"properties": {
"database": {
"type": "object",
"required": ["installs", "version"],
"additionalProperties": False,
"properties": {
"installs": {
"type": "object",
"patternProperties": {
r"^[\w\d]{32}$": {
"type": "object",
"properties": {
"spec": spack.schema.spec.properties,
"path": {"oneOf": [{"type": "string"}, {"type": "null"}]},
"installed": {"type": "boolean"},
"ref_count": {"type": "integer", "minimum": 0},
"explicit": {"type": "boolean"},
"installation_time": {"type": "number"},
},
}
},
},
"version": {"type": "string"},
},
}
},
}

View File

@@ -6,21 +6,20 @@
"""Schema for definitions
.. literalinclude:: _spack_root/lib/spack/spack/schema/definitions.py
:lines: 16-
:lines: 13-
"""
from typing import Any, Dict
from .spec_list import spec_list_schema
import spack.schema
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
properties = {
"definitions": {
"type": "array",
"default": [],
"items": {
"type": "object",
"properties": {"when": {"type": "string"}},
"patternProperties": {r"^(?!when$)\w*": spec_list_schema},
"patternProperties": {r"^(?!when$)\w*": spack.schema.spec_list_schema},
},
}
}

View File

@@ -2,9 +2,9 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from typing import Any, Dict
properties: Dict[str, Any] = {
properties = {
"develop": {
"type": "object",
"default": {},

View File

@@ -6,46 +6,74 @@
"""Schema for env.yaml configuration file.
.. literalinclude:: _spack_root/lib/spack/spack/schema/env.py
:lines: 19-
:lines: 36-
"""
from typing import Any, Dict
from llnl.util.lang import union_dicts
import spack.schema.gitlab_ci # DEPRECATED
import spack.schema.merged
import spack.schema.projections
from .spec_list import spec_list_schema
#: Top level key in a manifest file
TOP_LEVEL_KEY = "spack"
properties: Dict[str, Any] = {
"spack": {
"type": "object",
"default": {},
"additionalProperties": False,
"properties": union_dicts(
# Include deprecated "gitlab-ci" section
spack.schema.gitlab_ci.properties,
# merged configuration scope schemas
spack.schema.merged.properties,
# extra environment schema properties
{
"include": {"type": "array", "default": [], "items": {"type": "string"}},
"specs": spec_list_schema,
},
),
}
}
projections_scheme = spack.schema.projections.properties["projections"]
schema = {
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "Spack environment file schema",
"type": "object",
"additionalProperties": False,
"properties": properties,
"properties": {
"spack": {
"type": "object",
"default": {},
"additionalProperties": False,
"properties": union_dicts(
# Include deprecated "gitlab-ci" section
spack.schema.gitlab_ci.properties,
# merged configuration scope schemas
spack.schema.merged.properties,
# extra environment schema properties
{
"include": {"type": "array", "default": [], "items": {"type": "string"}},
"specs": spack.schema.spec_list_schema,
"view": {
"anyOf": [
{"type": "boolean"},
{"type": "string"},
{
"type": "object",
"patternProperties": {
r"\w+": {
"required": ["root"],
"additionalProperties": False,
"properties": {
"root": {"type": "string"},
"link": {
"type": "string",
"pattern": "(roots|all|run)",
},
"link_type": {"type": "string"},
"select": {
"type": "array",
"items": {"type": "string"},
},
"exclude": {
"type": "array",
"items": {"type": "string"},
},
"projections": projections_scheme,
},
}
},
},
]
},
},
),
}
},
}

View File

@@ -6,7 +6,6 @@
schemas.
"""
import collections.abc
from typing import Any, Dict
array_of_strings_or_num = {
"type": "array",
@@ -19,7 +18,7 @@
"patternProperties": {r"\w[\w-]*": {"anyOf": [{"type": "string"}, {"type": "number"}]}},
}
definition: Dict[str, Any] = {
definition = {
"type": "object",
"default": {},
"additionalProperties": False,

View File

@@ -6,9 +6,8 @@
"""Schema for gitlab-ci.yaml configuration file.
.. literalinclude:: ../spack/schema/gitlab_ci.py
:lines: 15-
:lines: 13-
"""
from typing import Any, Dict
from llnl.util.lang import union_dicts
@@ -36,7 +35,7 @@
runner_selector_schema = {
"type": "object",
"additionalProperties": True,
"additionalProperties": False,
"required": ["tags"],
"properties": runner_attributes_schema_items,
}
@@ -113,7 +112,7 @@
}
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {"gitlab-ci": gitlab_ci_properties}
properties = {"gitlab-ci": gitlab_ci_properties}
#: Full schema with metadata
schema = {

View File

@@ -6,10 +6,8 @@
"""Schema for configuration merged into one file.
.. literalinclude:: _spack_root/lib/spack/spack/schema/merged.py
:lines: 32-
:lines: 39-
"""
from typing import Any, Dict
from llnl.util.lang import union_dicts
import spack.schema.bootstrap
@@ -26,10 +24,9 @@
import spack.schema.packages
import spack.schema.repos
import spack.schema.upstreams
import spack.schema.view
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = union_dicts(
properties = union_dicts(
spack.schema.bootstrap.properties,
spack.schema.cdash.properties,
spack.schema.compilers.properties,
@@ -44,7 +41,6 @@
spack.schema.packages.properties,
spack.schema.repos.properties,
spack.schema.upstreams.properties,
spack.schema.view.properties,
)

View File

@@ -6,9 +6,8 @@
"""Schema for mirrors.yaml configuration file.
.. literalinclude:: _spack_root/lib/spack/spack/schema/mirrors.py
:lines: 13-
:lines: 12-69
"""
from typing import Any, Dict
#: Common properties for connection specification
connection = {
@@ -51,7 +50,7 @@
}
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
properties = {
"mirrors": {
"type": "object",
"default": {},

View File

@@ -6,10 +6,8 @@
"""Schema for modules.yaml configuration file.
.. literalinclude:: _spack_root/lib/spack/spack/schema/modules.py
:lines: 16-
:lines: 13-
"""
from typing import Any, Dict
import spack.schema.environment
import spack.schema.projections
@@ -143,7 +141,7 @@
# Properties for inclusion into other schemas (requires definitions)
properties: Dict[str, Any] = {
properties = {
"modules": {
"type": "object",
"additionalProperties": False,

View File

@@ -5,10 +5,8 @@
"""Schema for packages.yaml configuration files.
.. literalinclude:: _spack_root/lib/spack/spack/schema/packages.py
:lines: 14-
:lines: 13-
"""
from typing import Any, Dict
import spack.schema.environment
permissions = {
@@ -93,7 +91,7 @@
REQUIREMENT_URL = "https://spack.readthedocs.io/en/latest/packages_yaml.html#package-requirements"
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
properties = {
"packages": {
"type": "object",
"default": {},

View File

@@ -6,12 +6,12 @@
"""Schema for projections.yaml configuration file.
.. literalinclude:: _spack_root/lib/spack/spack/schema/projections.py
:lines: 14-
:lines: 13-
"""
from typing import Any, Dict
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
properties = {
"projections": {"type": "object", "patternProperties": {r"all|\w[\w-]*": {"type": "string"}}}
}

View File

@@ -6,14 +6,12 @@
"""Schema for repos.yaml configuration file.
.. literalinclude:: _spack_root/lib/spack/spack/schema/repos.py
:lines: 14-
:lines: 13-
"""
from typing import Any, Dict
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
"repos": {"type": "array", "default": [], "items": {"type": "string"}}
}
properties = {"repos": {"type": "array", "default": [], "items": {"type": "string"}}}
#: Full schema with metadata

View File

@@ -8,9 +8,9 @@
TODO: This needs to be updated? Especially the hashes under properties.
.. literalinclude:: _spack_root/lib/spack/spack/schema/spec.py
:lines: 15-
:lines: 13-
"""
from typing import Any, Dict
target = {
"oneOf": [
@@ -57,7 +57,7 @@
}
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
properties = {
"spec": {
"type": "object",
"additionalProperties": False,

View File

@@ -1,24 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
matrix_schema = {"type": "array", "items": {"type": "array", "items": {"type": "string"}}}
spec_list_schema = {
"type": "array",
"default": [],
"items": {
"anyOf": [
{
"type": "object",
"additionalProperties": False,
"properties": {
"matrix": matrix_schema,
"exclude": {"type": "array", "items": {"type": "string"}},
},
},
{"type": "string"},
{"type": "null"},
]
},
}

View File

@@ -2,10 +2,10 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from typing import Any, Dict
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
properties = {
"upstreams": {
"type": "object",
"default": {},

View File

@@ -1,49 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for view
.. literalinclude:: _spack_root/lib/spack/spack/schema/view.py
:lines: 15-
"""
from typing import Any, Dict
import spack.schema
projections_scheme = spack.schema.projections.properties["projections"]
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
"view": {
"anyOf": [
{"type": "boolean"},
{"type": "string"},
{
"type": "object",
"patternProperties": {
r"\w+": {
"required": ["root"],
"additionalProperties": False,
"properties": {
"root": {"type": "string"},
"link": {"type": "string", "pattern": "(roots|all|run)"},
"link_type": {"type": "string"},
"select": {"type": "array", "items": {"type": "string"}},
"exclude": {"type": "array", "items": {"type": "string"}},
"projections": projections_scheme,
},
}
},
},
]
}
}
#: Full schema with metadata
schema = {
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "Spack view configuration file schema",
"properties": properties,
}

View File

@@ -19,6 +19,51 @@
import archspec.cpu
import spack.config as sc
import spack.deptypes as dt
import spack.parser
import spack.paths as sp
import spack.util.path as sup
try:
import clingo # type: ignore[import]
# There may be a better way to detect this
clingo_cffi = hasattr(clingo.Symbol, "_rep")
except ImportError:
clingo = None # type: ignore
clingo_cffi = False
except AttributeError:
# Reaching this point indicates a broken clingo installation
# If Spack derived clingo, suggest user re-run bootstrap
# if non-spack, suggest user investigate installation
# assume Spack is not responsibe for broken clingo
msg = (
f"Clingo installation at {clingo.__file__} is incomplete or invalid."
"Please repair installation or re-install. "
"Alternatively, consider installing clingo via Spack."
)
# check whether Spack is responsible
if (
pathlib.Path(
sup.canonicalize_path(sc.get("bootstrap:root", sp.default_user_bootstrap_path))
)
in pathlib.Path(clingo.__file__).parents
):
# Spack is responsible for the broken clingo
msg = (
"Spack bootstrapped copy of Clingo is broken, "
"please re-run the bootstrapping process via command `spack bootstrap now`."
" If this issue persists, please file a bug at: github.com/spack/spack"
)
raise RuntimeError(
"Clingo installation may be broken or incomplete, "
"please verify clingo has been installed correctly"
"\n\nClingo does not provide symbol clingo.Symbol"
f"{msg}"
)
import llnl.util.lang
import llnl.util.tty as tty
@@ -27,14 +72,11 @@
import spack.cmd
import spack.compilers
import spack.config
import spack.config as sc
import spack.deptypes as dt
import spack.directives
import spack.environment as ev
import spack.error
import spack.package_base
import spack.package_prefs
import spack.parser
import spack.platforms
import spack.repo
import spack.spec
@@ -47,23 +89,13 @@
import spack.version.git_ref_lookup
from spack import traverse
from .core import (
AspFunction,
NodeArgument,
ast_sym,
ast_type,
clingo,
clingo_cffi,
extract_args,
fn,
parse_files,
parse_term,
)
from .counter import FullDuplicatesCounter, MinimalDuplicatesCounter, NoDuplicatesCounter
GitOrStandardVersion = Union[spack.version.GitVersion, spack.version.StandardVersion]
TransformFunction = Callable[["spack.spec.Spec", List[AspFunction]], List[AspFunction]]
# these are from clingo.ast and bootstrapped later
ASTType = None
parse_files = None
#: Enable the addition of a runtime node
WITH_RUNTIME = sys.platform != "win32"
@@ -88,13 +120,29 @@
def default_clingo_control():
"""Return a control object with the default settings used in Spack"""
control = clingo().Control()
control = clingo.Control()
control.configuration.configuration = "tweety"
control.configuration.solver.heuristic = "Domain"
control.configuration.solver.opt_strategy = "usc,one"
return control
# backward compatibility functions for clingo ASTs
def ast_getter(*names):
def getter(node):
for name in names:
result = getattr(node, name, None)
if result:
return result
raise KeyError("node has no such keys: %s" % names)
return getter
ast_type = ast_getter("ast_type", "type")
ast_sym = ast_getter("symbol", "term")
class Provenance(enum.IntEnum):
"""Enumeration of the possible provenances of a version."""
@@ -253,6 +301,86 @@ def specify(spec):
return spack.spec.Spec(spec)
class AspObject:
"""Object representing a piece of ASP code."""
def _id(thing):
"""Quote string if needed for it to be a valid identifier."""
if isinstance(thing, AspObject):
return thing
elif isinstance(thing, bool):
return '"%s"' % str(thing)
elif isinstance(thing, int):
return str(thing)
else:
return '"%s"' % str(thing)
@llnl.util.lang.key_ordering
class AspFunction(AspObject):
__slots__ = ["name", "args"]
def __init__(self, name, args=None):
self.name = name
self.args = () if args is None else tuple(args)
def _cmp_key(self):
return self.name, self.args
def __call__(self, *args):
"""Return a new instance of this function with added arguments.
Note that calls are additive, so you can do things like::
>>> attr = AspFunction("attr")
attr()
>>> attr("version")
attr("version")
>>> attr("version")("foo")
attr("version", "foo")
>>> v = AspFunction("attr", "version")
attr("version")
>>> v("foo", "bar")
attr("version", "foo", "bar")
"""
return AspFunction(self.name, self.args + args)
def symbol(self, positive=True):
def argify(arg):
if isinstance(arg, bool):
return clingo.String(str(arg))
elif isinstance(arg, int):
return clingo.Number(arg)
elif isinstance(arg, AspFunction):
return clingo.Function(arg.name, [argify(x) for x in arg.args], positive=positive)
else:
return clingo.String(str(arg))
return clingo.Function(self.name, [argify(arg) for arg in self.args], positive=positive)
def __str__(self):
return "%s(%s)" % (self.name, ", ".join(str(_id(arg)) for arg in self.args))
def __repr__(self):
return str(self)
class AspFunctionBuilder:
def __getattr__(self, name):
return AspFunction(name)
fn = AspFunctionBuilder()
TransformFunction = Callable[[spack.spec.Spec, List[AspFunction]], List[AspFunction]]
def remove_node(spec: spack.spec.Spec, facts: List[AspFunction]) -> List[AspFunction]:
"""Transformation that removes all "node" and "virtual_node" from the input list of facts."""
return list(filter(lambda x: x.args[0] not in ("node", "virtual_node"), facts))
@@ -411,7 +539,7 @@ def raise_if_unsat(self):
"""
Raise an appropriate error if the result is unsatisfiable.
The error is an SolverError, and includes the minimized cores
The error is an InternalConcretizerError, and includes the minimized cores
resulting from the solve, formatted to be human readable.
"""
if self.satisfiable:
@@ -422,7 +550,7 @@ def raise_if_unsat(self):
constraints = constraints[0]
conflicts = self.format_minimal_cores()
raise SolverError(constraints, conflicts=conflicts)
raise InternalConcretizerError(constraints, conflicts=conflicts)
@property
def specs(self):
@@ -435,10 +563,7 @@ def specs(self):
@property
def unsolved_specs(self):
"""List of tuples pairing abstract input specs that were not
solved with their associated candidate spec from the solver
(if the solve completed).
"""
"""List of abstract input specs that were not solved."""
if self._unsolved_specs is None:
self._compute_specs_from_answer_set()
return self._unsolved_specs
@@ -452,7 +577,7 @@ def specs_by_input(self):
def _compute_specs_from_answer_set(self):
if not self.satisfiable:
self._concrete_specs = []
self._unsolved_specs = list((x, None) for x in self.abstract_specs)
self._unsolved_specs = self.abstract_specs
self._concrete_specs_by_input = {}
return
@@ -473,22 +598,7 @@ def _compute_specs_from_answer_set(self):
self._concrete_specs.append(answer[node])
self._concrete_specs_by_input[input_spec] = answer[node]
else:
self._unsolved_specs.append((input_spec, candidate))
@staticmethod
def format_unsolved(unsolved_specs):
"""Create a message providing info on unsolved user specs and for
each one show the associated candidate spec from the solver (if
there is one).
"""
msg = "Unsatisfied input specs:"
for input_spec, candidate in unsolved_specs:
msg += f"\n\tInput spec: {str(input_spec)}"
if candidate:
msg += f"\n\tCandidate spec: {str(candidate)}"
else:
msg += "\n\t(No candidate specs from solver)"
return msg
self._unsolved_specs.append(input_spec)
def _normalize_packages_yaml(packages_yaml):
@@ -553,6 +663,72 @@ def _spec_with_default_name(spec_str, name):
return spec
def bootstrap_clingo():
global clingo, ASTType, parse_files
if not clingo:
import spack.bootstrap
with spack.bootstrap.ensure_bootstrap_configuration():
spack.bootstrap.ensure_core_dependencies()
import clingo
from clingo.ast import ASTType
try:
from clingo.ast import parse_files
except ImportError:
# older versions of clingo have this one namespace up
from clingo import parse_files
class NodeArgument(NamedTuple):
id: str
pkg: str
def intermediate_repr(sym):
"""Returns an intermediate representation of clingo models for Spack's spec builder.
Currently, transforms symbols from clingo models either to strings or to NodeArgument objects.
Returns:
This will turn a ``clingo.Symbol`` into a string or NodeArgument, or a sequence of
``clingo.Symbol`` objects into a tuple of those objects.
"""
# TODO: simplify this when we no longer have to support older clingo versions.
if isinstance(sym, (list, tuple)):
return tuple(intermediate_repr(a) for a in sym)
try:
if sym.name == "node":
return NodeArgument(
id=intermediate_repr(sym.arguments[0]), pkg=intermediate_repr(sym.arguments[1])
)
except RuntimeError:
# This happens when using clingo w/ CFFI and trying to access ".name" for symbols
# that are not functions
pass
if clingo_cffi:
# Clingo w/ CFFI will throw an exception on failure
try:
return sym.string
except RuntimeError:
return str(sym)
else:
return sym.string or str(sym)
def extract_args(model, predicate_name):
"""Extract the arguments to predicates with the provided name from a model.
Pull out all the predicates with name ``predicate_name`` from the model, and
return their intermediate representation.
"""
return [intermediate_repr(sym.arguments) for sym in model if sym.name == predicate_name]
class ErrorHandler:
def __init__(self, model):
self.model = model
@@ -654,7 +830,7 @@ def raise_if_errors(self):
if not initial_error_args:
return
error_causation = clingo().Control()
error_causation = clingo.Control()
parent_dir = pathlib.Path(__file__).parent
errors_lp = parent_dir / "error_messages.lp"
@@ -705,9 +881,54 @@ def __init__(self, cores=True):
cores (bool): whether to generate unsatisfiable cores for better
error reporting.
"""
bootstrap_clingo()
self.out = llnl.util.lang.Devnull()
self.cores = cores
# This attribute will be reset at each call to solve
# These attributes are part of the object, but will be reset
# at each call to solve
self.control = None
self.backend = None
self.assumptions = None
def title(self, name, char):
self.out.write("\n")
self.out.write("%" + (char * 76))
self.out.write("\n")
self.out.write("%% %s\n" % name)
self.out.write("%" + (char * 76))
self.out.write("\n")
def h1(self, name):
self.title(name, "=")
def h2(self, name):
self.title(name, "-")
def newline(self):
self.out.write("\n")
def fact(self, head):
"""ASP fact (a rule without a body).
Arguments:
head (AspFunction): ASP function to generate as fact
"""
symbol = head.symbol() if hasattr(head, "symbol") else head
# This is commented out to avoid evaluating str(symbol) when we have no stream
if not isinstance(self.out, llnl.util.lang.Devnull):
self.out.write(f"{str(symbol)}.\n")
atom = self.backend.add_atom(symbol)
# Only functions relevant for constructing bug reports for bad error messages
# are assumptions, and only when using cores.
choice = self.cores and symbol.name == "internal_error"
self.backend.add_rule([atom], [], choice=choice)
if choice:
self.assumptions.append(atom)
def solve(self, setup, specs, reuse=None, output=None, control=None, allow_deprecated=False):
"""Set up the input and solve for dependencies of ``specs``.
@@ -727,24 +948,49 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
solve, and the internal statistics from clingo.
"""
output = output or DEFAULT_OUTPUT_CONFIGURATION
# allow solve method to override the output stream
if output.out is not None:
self.out = output.out
timer = spack.util.timer.Timer()
# Initialize the control object for the solver
self.control = control or default_clingo_control()
# set up the problem -- this generates facts and rules
self.assumptions = []
timer.start("setup")
asp_problem = setup.setup(specs, reuse=reuse, allow_deprecated=allow_deprecated)
if output.out is not None:
output.out.write(asp_problem)
if output.setup_only:
return Result(specs), None, None
with self.control.backend() as backend:
self.backend = backend
setup.setup(self, specs, reuse=reuse, allow_deprecated=allow_deprecated)
timer.stop("setup")
timer.start("load")
# Add the problem instance
self.control.add("base", [], asp_problem)
# Load the file itself
# read in the main ASP program and display logic -- these are
# handwritten, not generated, so we load them as resources
parent_dir = os.path.dirname(__file__)
# extract error messages from concretize.lp by inspecting its AST
with self.backend:
def visit(node):
if ast_type(node) == ASTType.Rule:
for term in node.body:
if ast_type(term) == ASTType.Literal:
if ast_type(term.atom) == ASTType.SymbolicAtom:
name = ast_sym(term.atom).name
if name == "internal_error":
arg = ast_sym(ast_sym(term.atom).arguments[0])
self.fact(AspFunction(name)(arg.string))
self.h1("Error messages")
path = os.path.join(parent_dir, "concretize.lp")
parse_files([path], visit)
# If we're only doing setup, just return an empty solve result
if output.setup_only:
return Result(specs), None, None
# Load the file itself
self.control.load(os.path.join(parent_dir, "concretize.lp"))
self.control.load(os.path.join(parent_dir, "heuristic.lp"))
if spack.config.CONFIG.get("concretizer:duplicates:strategy", "none") != "none":
@@ -770,12 +1016,12 @@ def on_model(model):
models.append((model.cost, model.symbols(shown=True, terms=True)))
solve_kwargs = {
"assumptions": setup.assumptions,
"assumptions": self.assumptions,
"on_model": on_model,
"on_core": cores.append,
}
if clingo_cffi():
if clingo_cffi:
solve_kwargs["on_unsat"] = cores.append
timer.start("solve")
@@ -823,13 +1069,6 @@ def on_model(model):
print("Statistics:")
pprint.pprint(self.control.statistics)
if result.unsolved_specs and setup.concretize_everything:
unsolved_str = Result.format_unsolved(result.unsolved_specs)
raise InternalConcretizerError(
"Internal Spack error: the solver completed but produced specs"
f" that do not satisfy the request.\n\t{unsolved_str}"
)
return result, timer, self.control.statistics
@@ -903,7 +1142,6 @@ class SpackSolverSetup:
def __init__(self, tests=False):
self.gen = None # set by setup()
self.assumptions = []
self.declared_versions = collections.defaultdict(list)
self.possible_versions = collections.defaultdict(set)
self.deprecated_versions = collections.defaultdict(set)
@@ -1530,12 +1768,15 @@ def external_packages(self):
for local_idx, spec in enumerate(external_specs):
msg = "%s available as external when satisfying %s" % (spec.name, spec)
def external_imposition(input_spec, requirements):
return requirements + [
fn.attr("external_conditions_hold", input_spec.name, local_idx)
]
def external_imposition(input_spec, _):
return [fn.attr("external_conditions_hold", input_spec.name, local_idx)]
self.condition(spec, spec, msg=msg, transform_imposed=external_imposition)
self.condition(
spec,
spack.spec.Spec(spec.name),
msg=msg,
transform_imposed=external_imposition,
)
self.possible_versions[spec.name].add(spec.version)
self.gen.newline()
@@ -1640,7 +1881,36 @@ def _spec_clauses(
"""
clauses = []
f = _Body if body else _Head
# TODO: do this with consistent suffixes.
class Head:
node = fn.attr("node")
virtual_node = fn.attr("virtual_node")
node_platform = fn.attr("node_platform_set")
node_os = fn.attr("node_os_set")
node_target = fn.attr("node_target_set")
variant_value = fn.attr("variant_set")
node_compiler = fn.attr("node_compiler_set")
node_compiler_version = fn.attr("node_compiler_version_set")
node_flag = fn.attr("node_flag_set")
node_flag_source = fn.attr("node_flag_source")
node_flag_propagate = fn.attr("node_flag_propagate")
variant_propagation_candidate = fn.attr("variant_propagation_candidate")
class Body:
node = fn.attr("node")
virtual_node = fn.attr("virtual_node")
node_platform = fn.attr("node_platform")
node_os = fn.attr("node_os")
node_target = fn.attr("node_target")
variant_value = fn.attr("variant_value")
node_compiler = fn.attr("node_compiler")
node_compiler_version = fn.attr("node_compiler_version")
node_flag = fn.attr("node_flag")
node_flag_source = fn.attr("node_flag_source")
node_flag_propagate = fn.attr("node_flag_propagate")
variant_propagation_candidate = fn.attr("variant_propagation_candidate")
f = Body if body else Head
if spec.name:
clauses.append(f.node(spec.name) if not spec.virtual else f.virtual_node(spec.name))
@@ -2236,11 +2506,12 @@ def define_concrete_input_specs(self, specs, possible):
def setup(
self,
driver: PyclingoDriver,
specs: Sequence[spack.spec.Spec],
*,
reuse: Optional[List[spack.spec.Spec]] = None,
allow_deprecated: bool = False,
) -> str:
):
"""Generate an ASP program with relevant constraints for specs.
This calls methods on the solve driver to set up the problem with
@@ -2248,6 +2519,7 @@ def setup(
specs, as well as constraints from the specs themselves.
Arguments:
driver: driver instance of this solve
specs: list of Specs to solve
reuse: list of concrete specs that can be reused
allow_deprecated: if True adds deprecated versions into the solve
@@ -2273,7 +2545,9 @@ def setup(
if node.namespace is not None:
self.explicitly_required_namespaces[node.name] = node.namespace
self.gen = ProblemInstanceBuilder()
# driver is used by all the functions below to add facts and
# rules to generate an ASP program.
self.gen = driver
if not allow_deprecated:
self.gen.fact(fn.deprecated_versions_not_allowed())
@@ -2377,29 +2651,6 @@ def setup(
self.gen.h1("Target Constraints")
self.define_target_constraints()
self.gen.h1("Internal errors")
self.internal_errors()
return self.gen.value()
def internal_errors(self):
parent_dir = os.path.dirname(__file__)
def visit(node):
if ast_type(node) == clingo().ast.ASTType.Rule:
for term in node.body:
if ast_type(term) == clingo().ast.ASTType.Literal:
if ast_type(term.atom) == clingo().ast.ASTType.SymbolicAtom:
name = ast_sym(term.atom).name
if name == "internal_error":
arg = ast_sym(ast_sym(term.atom).arguments[0])
symbol = AspFunction(name)(arg.string)
self.assumptions.append((parse_term(str(symbol)), True))
self.gen.asp_problem.append(f"{{ {symbol} }}.\n")
path = os.path.join(parent_dir, "concretize.lp")
parse_files([path], visit)
def define_runtime_constraints(self):
"""Define the constraints to be imposed on the runtimes"""
recorder = RuntimePropertyRecorder(self)
@@ -2530,83 +2781,6 @@ def pkg_class(self, pkg_name: str) -> typing.Type["spack.package_base.PackageBas
return spack.repo.PATH.get_pkg_class(request)
class _Head:
"""ASP functions used to express spec clauses in the HEAD of a rule"""
node = fn.attr("node")
virtual_node = fn.attr("virtual_node")
node_platform = fn.attr("node_platform_set")
node_os = fn.attr("node_os_set")
node_target = fn.attr("node_target_set")
variant_value = fn.attr("variant_set")
node_compiler = fn.attr("node_compiler_set")
node_compiler_version = fn.attr("node_compiler_version_set")
node_flag = fn.attr("node_flag_set")
node_flag_source = fn.attr("node_flag_source")
node_flag_propagate = fn.attr("node_flag_propagate")
variant_propagation_candidate = fn.attr("variant_propagation_candidate")
class _Body:
"""ASP functions used to express spec clauses in the BODY of a rule"""
node = fn.attr("node")
virtual_node = fn.attr("virtual_node")
node_platform = fn.attr("node_platform")
node_os = fn.attr("node_os")
node_target = fn.attr("node_target")
variant_value = fn.attr("variant_value")
node_compiler = fn.attr("node_compiler")
node_compiler_version = fn.attr("node_compiler_version")
node_flag = fn.attr("node_flag")
node_flag_source = fn.attr("node_flag_source")
node_flag_propagate = fn.attr("node_flag_propagate")
variant_propagation_candidate = fn.attr("variant_propagation_candidate")
class ProblemInstanceBuilder:
"""Provides an interface to construct a problem instance.
Once all the facts and rules have been added, the problem instance can be retrieved with:
>>> builder = ProblemInstanceBuilder()
>>> ...
>>> problem_instance = builder.value()
The problem instance can be added directly to the "control" structure of clingo.
"""
def __init__(self):
self.asp_problem = []
def fact(self, atom: AspFunction) -> None:
symbol = atom.symbol() if hasattr(atom, "symbol") else atom
self.asp_problem.append(f"{str(symbol)}.\n")
def append(self, rule: str) -> None:
self.asp_problem.append(rule)
def title(self, header: str, char: str) -> None:
self.asp_problem.append("\n")
self.asp_problem.append("%" + (char * 76))
self.asp_problem.append("\n")
self.asp_problem.append(f"% {header}\n")
self.asp_problem.append("%" + (char * 76))
self.asp_problem.append("\n")
def h1(self, header: str) -> None:
self.title(header, "=")
def h2(self, header: str) -> None:
self.title(header, "-")
def newline(self):
self.asp_problem.append("\n")
def value(self) -> str:
return "".join(self.asp_problem)
class RequirementParser:
"""Parses requirements from package.py files and configuration, and returns rules."""
@@ -2914,7 +3088,9 @@ def consume_facts(self):
self._setup.gen.h2("Runtimes: rules")
self._setup.gen.newline()
for rule in self.rules:
self._setup.gen.append(rule)
if not isinstance(self._setup.gen.out, llnl.util.lang.Devnull):
self._setup.gen.out.write(rule)
self._setup.gen.control.add("base", [], rule)
self._setup.gen.h2("Runtimes: conditions")
for runtime_pkg in spack.repo.PATH.packages_with_tags("runtime"):
@@ -3454,13 +3630,15 @@ def solve_in_rounds(
if not result.satisfiable or not result.specs:
break
input_specs = list(x for (x, y) in result.unsolved_specs)
input_specs = result.unsolved_specs
for spec in result.specs:
reusable_specs.extend(spec.traverse())
class UnsatisfiableSpecError(spack.error.UnsatisfiableSpecError):
"""There was an issue with the spec that was requested (i.e. a user error)."""
"""
Subclass for new constructor signature for new concretizer
"""
def __init__(self, msg):
super(spack.error.UnsatisfiableSpecError, self).__init__(msg)
@@ -3470,21 +3648,8 @@ def __init__(self, msg):
class InternalConcretizerError(spack.error.UnsatisfiableSpecError):
"""Errors that indicate a bug in Spack."""
def __init__(self, msg):
super(spack.error.UnsatisfiableSpecError, self).__init__(msg)
self.provided = None
self.required = None
self.constraint_type = None
class SolverError(InternalConcretizerError):
"""For cases where the solver is unable to produce a solution.
Such cases are unexpected because we allow for solutions with errors,
so for example user specs that are over-constrained should still
get a solution.
"""
Subclass for new constructor signature for new concretizer
"""
def __init__(self, provided, conflicts):
@@ -3497,7 +3662,7 @@ def __init__(self, provided, conflicts):
if conflicts:
msg += ", errors are:" + "".join([f"\n {conflict}" for conflict in conflicts])
super().__init__(msg)
super(spack.error.UnsatisfiableSpecError, self).__init__(msg)
self.provided = provided

View File

@@ -698,14 +698,6 @@ requirement_group_satisfied(node(ID, Package), X) :-
activate_requirement(node(ID, Package), X),
requirement_group(Package, X).
% Do not impose requirements, if the conditional requirement is not active
do_not_impose(EffectID, node(ID, Package)) :-
trigger_condition_holds(TriggerID, node(ID, Package)),
pkg_fact(Package, condition_trigger(ConditionID, TriggerID)),
pkg_fact(Package, condition_effect(ConditionID, EffectID)),
requirement_group_member(ConditionID , Package, RequirementID),
not activate_requirement(node(ID, Package), RequirementID).
% When we have a required provider, we need to ensure that the provider/2 facts respect
% the requirement. This is particularly important for packages that could provide multiple
% virtuals independently

View File

@@ -1,272 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Low-level wrappers around clingo API."""
import importlib
import pathlib
from types import ModuleType
from typing import Any, Callable, NamedTuple, Optional, Tuple, Union
from llnl.util import lang
def _ast_getter(*names: str) -> Callable[[Any], Any]:
"""Helper to retrieve AST attributes from different versions of the clingo API"""
def getter(node):
for name in names:
result = getattr(node, name, None)
if result:
return result
raise KeyError(f"node has no such keys: {names}")
return getter
ast_type = _ast_getter("ast_type", "type")
ast_sym = _ast_getter("symbol", "term")
class AspObject:
"""Object representing a piece of ASP code."""
def _id(thing: Any) -> Union[str, AspObject]:
"""Quote string if needed for it to be a valid identifier."""
if isinstance(thing, AspObject):
return thing
elif isinstance(thing, bool):
return f'"{str(thing)}"'
elif isinstance(thing, int):
return str(thing)
else:
return f'"{str(thing)}"'
@lang.key_ordering
class AspFunction(AspObject):
"""A term in the ASP logic program"""
__slots__ = ["name", "args"]
def __init__(self, name: str, args: Optional[Tuple[Any, ...]] = None) -> None:
self.name = name
self.args = () if args is None else tuple(args)
def _cmp_key(self) -> Tuple[str, Optional[Tuple[Any, ...]]]:
return self.name, self.args
def __call__(self, *args: Any) -> "AspFunction":
"""Return a new instance of this function with added arguments.
Note that calls are additive, so you can do things like::
>>> attr = AspFunction("attr")
attr()
>>> attr("version")
attr("version")
>>> attr("version")("foo")
attr("version", "foo")
>>> v = AspFunction("attr", "version")
attr("version")
>>> v("foo", "bar")
attr("version", "foo", "bar")
"""
return AspFunction(self.name, self.args + args)
def _argify(self, arg: Any) -> Any:
"""Turn the argument into an appropriate clingo symbol"""
if isinstance(arg, bool):
return clingo().String(str(arg))
elif isinstance(arg, int):
return clingo().Number(arg)
elif isinstance(arg, AspFunction):
return clingo().Function(arg.name, [self._argify(x) for x in arg.args], positive=True)
return clingo().String(str(arg))
def symbol(self):
"""Return a clingo symbol for this function"""
return clingo().Function(
self.name, [self._argify(arg) for arg in self.args], positive=True
)
def __str__(self) -> str:
return f"{self.name}({', '.join(str(_id(arg)) for arg in self.args)})"
def __repr__(self) -> str:
return str(self)
class _AspFunctionBuilder:
def __getattr__(self, name):
return AspFunction(name)
#: Global AspFunction builder
fn = _AspFunctionBuilder()
_CLINGO_MODULE: Optional[ModuleType] = None
def clingo() -> ModuleType:
"""Lazy imports the Python module for clingo, and returns it."""
if _CLINGO_MODULE is not None:
return _CLINGO_MODULE
try:
clingo_mod = importlib.import_module("clingo")
# Make sure we didn't import an empty module
_ensure_clingo_or_raise(clingo_mod)
except ImportError:
clingo_mod = None
if clingo_mod is not None:
return _set_clingo_module_cache(clingo_mod)
clingo_mod = _bootstrap_clingo()
return _set_clingo_module_cache(clingo_mod)
def _set_clingo_module_cache(clingo_mod: ModuleType) -> ModuleType:
"""Sets the global cache to the lazy imported clingo module"""
global _CLINGO_MODULE
importlib.import_module("clingo.ast")
_CLINGO_MODULE = clingo_mod
return clingo_mod
def _ensure_clingo_or_raise(clingo_mod: ModuleType) -> None:
"""Ensures the clingo module can access expected attributes, otherwise raises an error."""
# These are imports that may be problematic at top level (circular imports). They are used
# only to provide exhaustive details when erroring due to a broken clingo module.
import spack.config
import spack.paths as sp
import spack.util.path as sup
try:
clingo_mod.Symbol
except AttributeError:
assert clingo_mod.__file__ is not None, "clingo installation is incomplete or invalid"
# Reaching this point indicates a broken clingo installation
# If Spack derived clingo, suggest user re-run bootstrap
# if non-spack, suggest user investigate installation
# assume Spack is not responsible for broken clingo
msg = (
f"Clingo installation at {clingo_mod.__file__} is incomplete or invalid."
"Please repair installation or re-install. "
"Alternatively, consider installing clingo via Spack."
)
# check whether Spack is responsible
if (
pathlib.Path(
sup.canonicalize_path(
spack.config.CONFIG.get("bootstrap:root", sp.default_user_bootstrap_path)
)
)
in pathlib.Path(clingo_mod.__file__).parents
):
# Spack is responsible for the broken clingo
msg = (
"Spack bootstrapped copy of Clingo is broken, "
"please re-run the bootstrapping process via command `spack bootstrap now`."
" If this issue persists, please file a bug at: github.com/spack/spack"
)
raise RuntimeError(
"Clingo installation may be broken or incomplete, "
"please verify clingo has been installed correctly"
"\n\nClingo does not provide symbol clingo.Symbol"
f"{msg}"
)
def clingo_cffi() -> bool:
"""Returns True if clingo uses the CFFI interface"""
return hasattr(clingo().Symbol, "_rep")
def _bootstrap_clingo() -> ModuleType:
"""Bootstraps the clingo module and returns it"""
import spack.bootstrap
with spack.bootstrap.ensure_bootstrap_configuration():
spack.bootstrap.ensure_core_dependencies()
clingo_mod = importlib.import_module("clingo")
return clingo_mod
def parse_files(*args, **kwargs):
"""Wrapper around clingo parse_files, that dispatches the function according
to clingo API version.
"""
clingo()
try:
return importlib.import_module("clingo.ast").parse_files(*args, **kwargs)
except (ImportError, AttributeError):
return clingo().parse_files(*args, **kwargs)
def parse_term(*args, **kwargs):
"""Wrapper around clingo parse_term, that dispatches the function according
to clingo API version.
"""
clingo()
try:
return importlib.import_module("clingo.symbol").parse_term(*args, **kwargs)
except (ImportError, AttributeError):
return clingo().parse_term(*args, **kwargs)
class NodeArgument(NamedTuple):
"""Represents a node in the DAG"""
id: str
pkg: str
def intermediate_repr(sym):
"""Returns an intermediate representation of clingo models for Spack's spec builder.
Currently, transforms symbols from clingo models either to strings or to NodeArgument objects.
Returns:
This will turn a ``clingo.Symbol`` into a string or NodeArgument, or a sequence of
``clingo.Symbol`` objects into a tuple of those objects.
"""
# TODO: simplify this when we no longer have to support older clingo versions.
if isinstance(sym, (list, tuple)):
return tuple(intermediate_repr(a) for a in sym)
try:
if sym.name == "node":
return NodeArgument(
id=intermediate_repr(sym.arguments[0]), pkg=intermediate_repr(sym.arguments[1])
)
except RuntimeError:
# This happens when using clingo w/ CFFI and trying to access ".name" for symbols
# that are not functions
pass
if clingo_cffi():
# Clingo w/ CFFI will throw an exception on failure
try:
return sym.string
except RuntimeError:
return str(sym)
else:
return sym.string or str(sym)
def extract_args(model, predicate_name):
"""Extract the arguments to predicates with the provided name from a model.
Pull out all the predicates with name ``predicate_name`` from the model, and
return their intermediate representation.
"""
return [intermediate_repr(sym.arguments) for sym in model if sym.name == predicate_name]

View File

@@ -1501,9 +1501,7 @@ def edge_attributes(self) -> str:
result = f"{deptypes_str} {virtuals_str}".strip()
return f"[{result}]"
def dependencies(
self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL
) -> List["Spec"]:
def dependencies(self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL):
"""Return a list of direct dependencies (nodes in the DAG).
Args:
@@ -1514,9 +1512,7 @@ def dependencies(
deptype = dt.canonicalize(deptype)
return [d.spec for d in self.edges_to_dependencies(name, depflag=deptype)]
def dependents(
self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL
) -> List["Spec"]:
def dependents(self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL):
"""Return a list of direct dependents (nodes in the DAG).
Args:
@@ -1640,23 +1636,23 @@ def _add_dependency(self, spec: "Spec", *, depflag: dt.DepFlag, virtuals: Tuple[
self.add_dependency_edge(spec, depflag=depflag, virtuals=virtuals)
return
# Keep the intersection of constraints when a dependency is added multiple times.
# The only restriction, currently, is keeping the same dependency type
# Keep the intersection of constraints when a dependency is added
# multiple times. Currently, we only allow identical edge types.
orig = self._dependencies[spec.name]
try:
dspec = next(dspec for dspec in orig if depflag == dspec.depflag)
except StopIteration:
edge_attrs = f"deptypes={dt.flag_to_chars(depflag).strip()}"
required_dep_str = f"^[{edge_attrs}] {str(spec)}"
current_deps = ", ".join(
dt.flag_to_chars(x.depflag) + " " + x.spec.short_spec for x in orig
)
raise DuplicateDependencyError(
f"{spec.name} is a duplicate dependency, with conflicting dependency types\n"
f"\t'{str(self)}' cannot depend on '{required_dep_str}'"
f"{self.short_spec} cannot depend on '{spec.short_spec}' multiple times.\n"
f"\tRequired: {dt.flag_to_chars(depflag)}\n"
f"\tDependency: {current_deps}"
)
try:
dspec.spec.constrain(spec)
dspec.update_virtuals(virtuals=virtuals)
except spack.error.UnsatisfiableSpecError:
raise DuplicateDependencyError(
f"Cannot depend on incompatible specs '{dspec.spec}' and '{spec}'"
@@ -2091,12 +2087,7 @@ def to_node_dict(self, hash=ht.dag_hash):
if hasattr(variant, "_patches_in_order_of_appearance"):
d["patches"] = variant._patches_in_order_of_appearance
if (
self._concrete
and hash.package_hash
and hasattr(self, "_package_hash")
and self._package_hash
):
if self._concrete and hash.package_hash and self._package_hash:
# We use the attribute here instead of `self.package_hash()` because this
# should *always* be assignhed at concretization time. We don't want to try
# to compute a package hash for concrete spec where a) the package might not

View File

@@ -199,11 +199,9 @@ def get_stage_root():
def _mirror_roots():
mirrors = spack.config.get("mirrors")
return [
(
sup.substitute_path_variables(root)
if root.endswith(os.sep)
else sup.substitute_path_variables(root) + os.sep
)
sup.substitute_path_variables(root)
if root.endswith(os.sep)
else sup.substitute_path_variables(root) + os.sep
for root in mirrors.values()
]

View File

@@ -16,7 +16,6 @@
import spack
import spack.binary_distribution
import spack.ci as ci
import spack.cmd.ci
import spack.config
import spack.environment as ev
import spack.hash_types as ht
@@ -2029,43 +2028,6 @@ def fake_download_and_extract_artifacts(url, work_dir):
assert expect_out in rep_out
@pytest.mark.parametrize(
"url_in,url_out",
[
(
"https://example.com/api/v4/projects/1/jobs/2/artifacts",
"https://example.com/api/v4/projects/1/jobs/2/artifacts",
),
(
"https://example.com/spack/spack/-/jobs/123456/artifacts/download",
"https://example.com/spack/spack/-/jobs/123456/artifacts/download",
),
(
"https://example.com/spack/spack/-/jobs/123456",
"https://example.com/spack/spack/-/jobs/123456/artifacts/download",
),
(
"https://example.com/spack/spack/-/jobs/////123456////?x=y#z",
"https://example.com/spack/spack/-/jobs/123456/artifacts/download",
),
],
)
def test_reproduce_build_url_validation(url_in, url_out):
assert spack.cmd.ci._gitlab_artifacts_url(url_in) == url_out
def test_reproduce_build_url_validation_fails():
"""Wrong URLs should cause an exception"""
with pytest.raises(SystemExit):
ci_cmd("reproduce-build", "example.com/spack/spack/-/jobs/123456/artifacts/download")
with pytest.raises(SystemExit):
ci_cmd("reproduce-build", "https://example.com/spack/spack/-/issues")
with pytest.raises(SystemExit):
ci_cmd("reproduce-build", "https://example.com/spack/spack/-")
@pytest.mark.parametrize(
"subcmd", [(""), ("generate"), ("rebuild-index"), ("rebuild"), ("reproduce-build")]
)

View File

@@ -215,44 +215,6 @@ def test_dev_build_env(tmpdir, install_mockery, mutable_mock_env_path):
assert f.read() == spec.package.replacement_string
def test_dev_build_env_with_vars(tmpdir, install_mockery, mutable_mock_env_path, monkeypatch):
"""Test Spack does dev builds for packages in develop section of env (path with variables)."""
# setup dev-build-test-install package for dev build
build_dir = tmpdir.mkdir("build")
spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={build_dir}")
spec.concretize()
# store the build path in an environment variable that will be used in the environment
monkeypatch.setenv("CUSTOM_BUILD_PATH", build_dir)
with build_dir.as_cwd(), open(spec.package.filename, "w") as f:
f.write(spec.package.original_string)
# setup environment
envdir = tmpdir.mkdir("env")
with envdir.as_cwd():
with open("spack.yaml", "w") as f:
f.write(
"""\
spack:
specs:
- dev-build-test-install@0.0.0
develop:
dev-build-test-install:
spec: dev-build-test-install@0.0.0
path: $CUSTOM_BUILD_PATH
"""
)
env("create", "test", "./spack.yaml")
with ev.read("test"):
install()
assert spec.package.filename in os.listdir(spec.prefix)
with open(os.path.join(spec.prefix, spec.package.filename), "r") as f:
assert f.read() == spec.package.replacement_string
def test_dev_build_env_version_mismatch(tmpdir, install_mockery, mutable_mock_env_path):
"""Test Spack constraints concretization by develop specs."""
# setup dev-build-test-install package for dev build

View File

@@ -9,7 +9,6 @@
import llnl.util.filesystem as fs
import spack.config
import spack.environment as ev
import spack.spec
from spack.main import SpackCommand
@@ -22,7 +21,7 @@
@pytest.mark.usefixtures("mutable_mock_env_path", "mock_packages", "mock_fetch", "mutable_config")
class TestDevelop:
def check_develop(self, env, spec, path=None, build_dir=None):
def check_develop(self, env, spec, path=None):
path = path or spec.name
# check in memory representation
@@ -42,12 +41,6 @@ def check_develop(self, env, spec, path=None, build_dir=None):
else:
assert yaml_entry["path"] == path
if build_dir is not None:
scope = env.scope_name
assert build_dir == spack.config.get(
"packages:{}:package_attributes:build_directory".format(spec.name), scope
)
def test_develop_no_path_no_clone(self):
env("create", "test")
with ev.read("test") as e:
@@ -79,12 +72,6 @@ def test_develop_no_args(self):
develop()
self.check_develop(e, spack.spec.Spec("mpich@=1.0"))
def test_develop_build_directory(self):
env("create", "test")
with ev.read("test") as e:
develop("-b", "test_build_dir", "mpich@1.0")
self.check_develop(e, spack.spec.Spec("mpich@=1.0"), None, "test_build_dir")
def test_develop_twice(self):
env("create", "test")
with ev.read("test") as e:

View File

@@ -1471,8 +1471,8 @@ def test_env_view_fails_dir_file(tmpdir, mock_packages, mock_stage, mock_fetch,
view_dir = tmpdir.join("view")
env("create", "--with-view=%s" % view_dir, "test")
with ev.read("test"):
add("view-file")
add("view-dir")
add("view-dir-file")
add("view-dir-dir")
with pytest.raises(
llnl.util.link_tree.MergeConflictSummary, match=os.path.join("bin", "x")
):
@@ -1486,8 +1486,8 @@ def test_env_view_succeeds_symlinked_dir_file(
view_dir = tmpdir.join("view")
env("create", "--with-view=%s" % view_dir, "test")
with ev.read("test"):
add("view-symlinked-dir")
add("view-dir")
add("view-dir-symlinked-dir")
add("view-dir-dir")
install()
x_dir = os.path.join(str(view_dir), "bin", "x")
assert os.path.exists(os.path.join(x_dir, "file_in_dir"))
@@ -2537,88 +2537,58 @@ def test_stack_view_no_activate_without_default(
assert viewdir not in shell
@pytest.mark.parametrize("include_views", [True, False, "split"])
def test_stack_view_multiple_views(
tmp_path,
mock_fetch,
mock_packages,
mock_archive,
install_mockery,
mutable_config,
include_views,
tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery
):
"""Test multiple views as both included views (True), as both environment
views (False), or as one included and the other in the environment."""
# Write the view configuration and or manifest file
view_filename = tmp_path / "view.yaml"
base_content = """\
filename = str(tmpdir.join("spack.yaml"))
default_viewdir = str(tmpdir.join("default-view"))
combin_viewdir = str(tmpdir.join("combinatorial-view"))
with open(filename, "w") as f:
f.write(
"""\
spack:
definitions:
- packages: [mpileaks, cmake]
- compilers: ['%gcc', '%clang']
- compilers: ['%%gcc', '%%clang']
specs:
- matrix:
- [$packages]
- [$compilers]
"""
include_content = f" include:\n - {view_filename}\n"
view_line = " view:\n"
view:
default:
root: %s
select: ['%%gcc']
combinatorial:
root: %s
exclude: [callpath %%gcc]
projections:
'all': '{name}/{version}-{compiler.name}'"""
% (default_viewdir, combin_viewdir)
)
with tmpdir.as_cwd():
env("create", "test", "./spack.yaml")
with ev.read("test"):
install()
comb_dir = tmp_path / "combinatorial-view"
comb_view = """\
{0}combinatorial:
{0} root: {1}
{0} exclude: [callpath%gcc]
{0} projections:
"""
shell = env("activate", "--sh", "test")
assert "PATH" in shell
assert os.path.join(default_viewdir, "bin") in shell
projection = " 'all': '{name}/{version}-{compiler.name}'"
default_dir = tmp_path / "default-view"
default_view = """\
{0}default:
{0} root: {1}
{0} select: ['%gcc']
"""
content = "spack:\n"
indent = " "
if include_views is True:
# Include both the gcc and combinatorial views
view = "view:\n" + default_view.format(indent, str(default_dir))
view += comb_view.format(indent, str(comb_dir)) + indent + projection
view_filename.write_text(view)
content += include_content + base_content
elif include_views == "split":
# Include the gcc view and inline the combinatorial view
view = "view:\n" + default_view.format(indent, str(default_dir))
view_filename.write_text(view)
content += include_content + base_content + view_line
indent += " "
content += comb_view.format(indent, str(comb_dir)) + indent + projection
else:
# Inline both the gcc and combinatorial views in the environment.
indent += " "
content += base_content + view_line
content += default_view.format(indent, str(default_dir))
content += comb_view.format(indent, str(comb_dir)) + indent + projection
filename = tmp_path / ev.manifest_name
filename.write_text(content)
env("create", "test", str(filename))
with ev.read("test"):
install()
with ev.read("test") as e:
assert os.path.exists(str(default_dir / "bin"))
for spec in e._get_environment_specs():
spec_subdir = f"{spec.version}-{spec.compiler.name}"
comb_spec_dir = str(comb_dir / spec.name / spec_subdir)
test = ev.read("test")
for spec in test._get_environment_specs():
if not spec.satisfies("callpath%gcc"):
assert os.path.exists(comb_spec_dir)
assert os.path.exists(
os.path.join(
combin_viewdir, spec.name, "%s-%s" % (spec.version, spec.compiler.name)
)
)
else:
assert not os.path.exists(comb_spec_dir)
assert not os.path.exists(
os.path.join(
combin_viewdir, spec.name, "%s-%s" % (spec.version, spec.compiler.name)
)
)
def test_env_activate_sh_prints_shell_output(tmpdir, mock_stage, mock_fetch, install_mockery):
@@ -2731,6 +2701,15 @@ def test_concretize_user_specs_together():
assert all("mpich" not in spec for _, spec in e.concretized_specs())
def test_cant_install_single_spec_when_concretizing_together():
e = ev.create("coconcretization")
e.unify = True
with pytest.raises(ev.SpackEnvironmentError, match=r"cannot install"):
e.concretize_and_add("zlib")
e.install_all()
def test_duplicate_packages_raise_when_concretizing_together():
e = ev.create("coconcretization")
e.unify = True
@@ -3735,191 +3714,3 @@ def test_environment_created_from_lockfile_has_view(mock_packages, temporary_sto
# Make sure the view was created
with ev.Environment(env_b) as e:
assert os.path.isdir(e.view_path_default)
def test_env_view_disabled(tmp_path, mutable_mock_env_path):
"""Ensure an inlined view being disabled means not even the default view
is created (since the case doesn't appear to be covered in this module)."""
spack_yaml = tmp_path / ev.manifest_name
spack_yaml.write_text(
"""\
spack:
specs:
- mpileaks
view: false
"""
)
env("create", "disabled", str(spack_yaml))
with ev.read("disabled") as e:
e.concretize()
assert len(e.views) == 0
assert not os.path.exists(e.view_path_default)
@pytest.mark.parametrize("first", ["false", "true", "custom"])
def test_env_include_mixed_views(tmp_path, mutable_mock_env_path, mutable_config, first):
"""Ensure including path and boolean views in different combinations result
in the creation of only the first view if it is not disabled."""
false_yaml = tmp_path / "false-view.yaml"
false_yaml.write_text("view: false\n")
true_yaml = tmp_path / "true-view.yaml"
true_yaml.write_text("view: true\n")
custom_name = "my-test-view"
custom_view = tmp_path / custom_name
custom_yaml = tmp_path / "custom-view.yaml"
custom_yaml.write_text(
f"""
view:
{custom_name}:
root: {custom_view}
"""
)
if first == "false":
order = [false_yaml, true_yaml, custom_yaml]
elif first == "true":
order = [true_yaml, custom_yaml, false_yaml]
else:
order = [custom_yaml, false_yaml, true_yaml]
includes = [f" - {yaml}\n" for yaml in order]
spack_yaml = tmp_path / ev.manifest_name
spack_yaml.write_text(
f"""\
spack:
include:
{''.join(includes)}
specs:
- mpileaks
packages:
mpileaks:
compiler: [gcc]
"""
)
env("create", "test", str(spack_yaml))
with ev.read("test") as e:
concretize()
# Only the first included view should be created if view not disabled by it
assert len(e.views) == 0 if first == "false" else 1
if first == "true":
assert os.path.exists(e.view_path_default)
else:
assert not os.path.exists(e.view_path_default)
if first == "custom":
assert os.path.exists(custom_view)
else:
assert not os.path.exists(custom_view)
def test_stack_view_multiple_views_same_name(
tmp_path, mock_fetch, mock_packages, mock_archive, install_mockery, mutable_config
):
"""Test multiple views with the same name combine settings with precedence
given to the options in spack.yaml."""
# Write the view configuration and or manifest file
view_filename = tmp_path / "view.yaml"
default_dir = tmp_path / "default-view"
default_view = f"""\
view:
default:
root: {default_dir}
select: ['%gcc']
projections:
all: '{{name}}/{{version}}-{{compiler.name}}'
"""
view_filename.write_text(default_view)
view_dir = tmp_path / "view"
content = f"""\
spack:
include:
- {view_filename}
definitions:
- packages: [mpileaks, cmake]
- compilers: ['%gcc', '%clang']
specs:
- matrix:
- [$packages]
- [$compilers]
view:
default:
root: {view_dir}
exclude: ['cmake']
projections:
all: '{{name}}/{{compiler.name}}-{{version}}'
"""
filename = tmp_path / ev.manifest_name
filename.write_text(content)
env("create", "test", str(filename))
with ev.read("test"):
install()
with ev.read("test") as e:
# the view root in the included view should NOT exist
assert not os.path.exists(str(default_dir))
for spec in e._get_environment_specs():
# no specs will exist in the included view projection
included_spec_subdir = f"{spec.version}-{spec.compiler.name}"
included_spec_dir = str(view_dir / spec.name / included_spec_subdir)
assert not os.path.exists(included_spec_dir)
# only specs compiled with %gcc (selected in the included view) that
# are also not cmake (excluded in the environment view) should exist
env_spec_subdir = f"{spec.compiler.name}-{spec.version}"
env_spec_dir = str(view_dir / spec.name / env_spec_subdir)
if spec.satisfies("cmake") or spec.satisfies("%clang"):
assert not os.path.exists(env_spec_dir)
else:
assert os.path.exists(env_spec_dir)
def test_env_view_resolves_identical_file_conflicts(tmp_path, install_mockery, mock_fetch):
"""When files clash in a view, but refer to the same file on disk (for example, the dependent
symlinks to a file in the dependency at the same relative path), Spack links the first regular
file instead of symlinks. This is important for copy type views where we need the underlying
file to be copied instead of the symlink (when a symlink would be copied, it would become a
self-referencing symlink after relocation). The test uses a symlink type view though, since
that keeps track of the original file path."""
with ev.create("env", with_view=tmp_path / "view") as e:
add("view-resolve-conflict-top")
install()
top = e.matching_spec("view-resolve-conflict-top").prefix
bottom = e.matching_spec("view-file").prefix
# In this example we have `./bin/x` in 3 prefixes, two links, one regular file. We expect the
# regular file to be linked into the view. There are also 2 links at `./bin/y`, but no regular
# file, so we expect standard behavior: first entry is linked into the view.
# view-resolve-conflict-top/bin/
# x -> view-file/bin/x
# y -> view-resolve-conflict-middle/bin/y # expect this y to be linked
# view-resolve-conflict-middle/bin/
# x -> view-file/bin/x
# y -> view-file/bin/x
# view-file/bin/
# x # expect this x to be linked
assert os.readlink(tmp_path / "view" / "bin" / "x") == bottom.bin.x
assert os.readlink(tmp_path / "view" / "bin" / "y") == top.bin.y
def test_env_view_ignores_different_file_conflicts(tmp_path, install_mockery, mock_fetch):
"""Test that file-file conflicts for two unique files in environment views are ignored, and
that the dependent's file is linked into the view, not the dependency's file."""
with ev.create("env", with_view=tmp_path / "view") as e:
add("view-ignore-conflict")
install()
prefix_dependent = e.matching_spec("view-ignore-conflict").prefix
# The dependent's file is linked into the view
assert os.readlink(tmp_path / "view" / "bin" / "x") == prefix_dependent.bin.x

View File

@@ -6,11 +6,9 @@
import pytest
import spack.deptypes as dt
import spack.environment as ev
import spack.main
import spack.spec
import spack.traverse
gc = spack.main.SpackCommand("gc")
add = spack.main.SpackCommand("add")
@@ -21,8 +19,11 @@
@pytest.mark.db
def test_gc_without_build_dependency(config, mutable_database):
assert "There are no unused specs." in gc("-yb")
assert "There are no unused specs." in gc("-y")
output = gc("-yb")
assert "There are no unused specs." in output
output = gc("-y")
assert "There are no unused specs." in output
@pytest.mark.db
@@ -31,9 +32,11 @@ def test_gc_with_build_dependency(config, mutable_database):
s.concretize()
s.package.do_install(fake=True, explicit=True)
assert "There are no unused specs." in gc("-yb")
assert "Successfully uninstalled cmake" in gc("-y")
assert "There are no unused specs." in gc("-y")
output = gc("-yb")
assert "There are no unused specs." in output
output = gc("-y")
assert "Successfully uninstalled cmake" in output
@pytest.mark.db
@@ -69,39 +72,34 @@ def test_gc_with_build_dependency_in_environment(config, mutable_database, mutab
with e:
assert mutable_database.query_local("simple-inheritance")
fst = gc("-y")
assert "Restricting garbage collection" in fst
assert "Successfully uninstalled cmake" in fst
snd = gc("-y")
assert "Restricting garbage collection" in snd
assert "There are no unused specs" in snd
output = gc("-y")
assert "Restricting garbage collection" in output
assert "Successfully uninstalled cmake" in output
@pytest.mark.db
def test_gc_except_any_environments(config, mutable_database, mutable_mock_env_path):
"""Tests whether the garbage collector can remove all specs except those still needed in some
environment (needed in the sense of roots + link/run deps)."""
s = spack.spec.Spec("simple-inheritance")
s.concretize()
s.package.do_install(fake=True, explicit=True)
assert mutable_database.query_local("zmpi")
e = ev.create("test_gc")
e.add("simple-inheritance")
e.concretize()
e.install_all(fake=True)
e.write()
assert mutable_database.query_local("simple-inheritance")
assert not e.all_matching_specs(spack.spec.Spec("zmpi"))
with e:
add("simple-inheritance")
install()
assert mutable_database.query_local("simple-inheritance")
output = gc("-yE")
assert "Restricting garbage collection" not in output
assert "Successfully uninstalled zmpi" in output
assert not mutable_database.query_local("zmpi")
# All runtime specs in this env should still be installed.
assert all(
s.installed
for s in spack.traverse.traverse_nodes(e.concrete_roots(), deptype=dt.LINK | dt.RUN)
)
with e:
output = gc("-yE")
assert "Restricting garbage collection" not in output
assert "There are no unused specs" not in output
@pytest.mark.db

View File

@@ -1,119 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import gzip
import os
import sys
import tempfile
from contextlib import contextmanager
from io import BytesIO, TextIOWrapper
import pytest
import spack
from spack.main import SpackCommand
logs = SpackCommand("logs")
install = SpackCommand("install")
@contextmanager
def stdout_as_buffered_text_stream():
"""Attempt to simulate "typical" interface for stdout when user is
running Spack/Python from terminal. "spack log" should not be run
for all possible cases of what stdout might look like, in
particular some programmatic redirections of stdout like StringIO
are not meant to be supported by this command; more-generally,
mechanisms that depend on decoding binary output prior to write
are not supported for "spack log".
"""
original_stdout = sys.stdout
with tempfile.TemporaryFile(mode="w+b") as tf:
sys.stdout = TextIOWrapper(tf)
try:
yield tf
finally:
sys.stdout = original_stdout
def _rewind_collect_and_decode(rw_stream):
rw_stream.seek(0)
return rw_stream.read().decode("utf-8")
@pytest.fixture
def disable_capture(capfd):
with capfd.disabled():
yield
def test_logs_cmd_errors(install_mockery, mock_fetch, mock_archive, mock_packages):
spec = spack.spec.Spec("libelf").concretized()
assert not spec.installed
with pytest.raises(spack.main.SpackCommandError, match="is not installed or staged"):
logs("libelf")
with pytest.raises(spack.main.SpackCommandError, match="Too many specs"):
logs("libelf mpi")
install("libelf")
os.remove(spec.package.install_log_path)
with pytest.raises(spack.main.SpackCommandError, match="No logs are available"):
logs("libelf")
def _write_string_to_path(string, path):
"""Write a string to a file, preserving newline format in the string."""
with open(path, "wb") as f:
f.write(string.encode("utf-8"))
def test_dump_logs(install_mockery, mock_fetch, mock_archive, mock_packages, disable_capture):
"""Test that ``spack log`` can find (and print) the logs for partial
builds and completed installs.
Also make sure that for compressed logs, that we automatically
decompress them.
"""
cmdline_spec = spack.spec.Spec("libelf")
concrete_spec = cmdline_spec.concretized()
# Sanity check, make sure this test is checking what we want: to
# start with
assert not concrete_spec.installed
stage_log_content = "test_log stage output\nanother line"
installed_log_content = "test_log install output\nhere to test multiple lines"
with concrete_spec.package.stage:
_write_string_to_path(stage_log_content, concrete_spec.package.log_path)
with stdout_as_buffered_text_stream() as redirected_stdout:
spack.cmd.logs._logs(cmdline_spec, concrete_spec)
assert _rewind_collect_and_decode(redirected_stdout) == stage_log_content
install("libelf")
# Sanity check: make sure a path is recorded, regardless of whether
# it exists (if it does exist, we will overwrite it with content
# in this test)
assert concrete_spec.package.install_log_path
with gzip.open(concrete_spec.package.install_log_path, "wb") as compressed_file:
bstream = BytesIO(installed_log_content.encode("utf-8"))
compressed_file.writelines(bstream)
with stdout_as_buffered_text_stream() as redirected_stdout:
spack.cmd.logs._logs(cmdline_spec, concrete_spec)
assert _rewind_collect_and_decode(redirected_stdout) == installed_log_content
with concrete_spec.package.stage:
_write_string_to_path(stage_log_content, concrete_spec.package.log_path)
# We re-create the stage, but "spack log" should ignore that
# if the package is installed
with stdout_as_buffered_text_stream() as redirected_stdout:
spack.cmd.logs._logs(cmdline_spec, concrete_spec)
assert _rewind_collect_and_decode(redirected_stdout) == installed_log_content

View File

@@ -98,9 +98,13 @@ def test_url_list(mock_packages):
def test_url_summary(mock_packages):
"""Test the URL summary command."""
# test url_summary, the internal function that does the work
(total_urls, correct_names, correct_versions, name_count_dict, version_count_dict) = (
url_summary(None)
)
(
total_urls,
correct_names,
correct_versions,
name_count_dict,
version_count_dict,
) = url_summary(None)
assert 0 < correct_names <= sum(name_count_dict.values()) <= total_urls
assert 0 < correct_versions <= sum(version_count_dict.values()) <= total_urls

View File

@@ -191,7 +191,7 @@ def test_view_files_not_ignored(
pkg.do_install()
pkg.assert_installed(spec.prefix)
install("view-file") # Arbitrary package to add noise
install("view-dir-file") # Arbitrary package to add noise
viewpath = str(tmpdir.mkdir("view_{0}".format(cmd)))
@@ -205,7 +205,7 @@ def test_view_files_not_ignored(
prefix_in_view = viewpath
args = []
view(cmd, *(args + [viewpath, "view-not-ignored", "view-file"]))
view(cmd, *(args + [viewpath, "view-not-ignored", "view-dir-file"]))
pkg.assert_installed(prefix_in_view)
view("remove", viewpath, "view-not-ignored")

Some files were not shown because too many files have changed in this diff Show More